repo_name
stringlengths 5
100
| ref
stringlengths 12
67
| path
stringlengths 4
244
| copies
stringlengths 1
8
| content
stringlengths 0
1.05M
⌀ |
|---|---|---|---|---|
Nu3001/external_chromium_org
|
refs/heads/master
|
tools/win/link_limiter/build_link_limiter.py
|
169
|
#!/usr/bin/env python
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import glob
import os
import shutil
import subprocess
import sys
import tempfile
BUILD_DIR = 'build'
def run_with_vsvars(cmd, tmpdir=None):
fd, filename = tempfile.mkstemp('.bat', text=True)
with os.fdopen(fd, 'w') as f:
print >> f, '@echo off'
print >> f, r'call "%VS100COMNTOOLS%\vsvars32.bat"'
if tmpdir:
print >> f, r'cd %s' % tmpdir
print >> f, cmd
try:
p = subprocess.Popen([filename], shell=True, stdout=subprocess.PIPE,
universal_newlines=True)
out, _ = p.communicate()
return p.returncode, out
finally:
os.unlink(filename)
def get_vc_dir():
_, out = run_with_vsvars('echo VCINSTALLDIR=%VCINSTALLDIR%')
for line in out.splitlines(): # pylint: disable-msg=E1103
if line.startswith('VCINSTALLDIR='):
return line[len('VCINSTALLDIR='):]
return None
def build(infile):
if not os.path.exists(BUILD_DIR):
os.makedirs(BUILD_DIR)
outfile = 'limiter.exe'
outpath = os.path.join(BUILD_DIR, outfile)
cpptime = os.path.getmtime(infile)
if not os.path.exists(outpath) or cpptime > os.path.getmtime(outpath):
print 'Building %s...' % outfile
rc, out = run_with_vsvars(
'cl /nologo /Ox /Zi /W4 /WX /D_UNICODE /DUNICODE'
' /D_CRT_SECURE_NO_WARNINGS /EHsc %s /link /out:%s'
% (os.path.join('..', infile), outfile), BUILD_DIR)
if rc:
print out
print 'Failed to build %s' % outfile
sys.exit(1)
else:
print '%s already built' % outfile
return outpath
def main():
# Switch to our own dir.
os.chdir(os.path.dirname(os.path.abspath(__file__)))
if sys.argv[-1] == 'clean':
if os.path.exists(BUILD_DIR):
shutil.rmtree(BUILD_DIR)
for exe in glob.glob('*.exe'):
os.unlink(exe)
return 0
vcdir = os.environ.get('VCINSTALLDIR')
if not vcdir:
vcdir = get_vc_dir()
if not vcdir:
print 'Could not get VCINSTALLDIR. Run vsvars32.bat?'
return 1
os.environ['PATH'] += (';' + os.path.join(vcdir, 'bin') +
';' + os.path.join(vcdir, r'..\Common7\IDE'))
# Verify that we can find link.exe.
link = os.path.join(vcdir, 'bin', 'link.exe')
if not os.path.exists(link):
print 'link.exe not found at %s' % link
return 1
exe_name = build('limiter.cc')
for shim_exe in ('lib.exe', 'link.exe'):
newpath = '%s__LIMITER.exe' % shim_exe
shutil.copyfile(exe_name, newpath)
print '%s shim built. Use with msbuild like: "/p:LinkToolExe=%s"' \
% (shim_exe, os.path.abspath(newpath))
return 0
if __name__ == '__main__':
sys.exit(main())
|
Natim/sentry
|
refs/heads/master
|
src/sentry/management/commands/createuser.py
|
22
|
"""
sentry.management.commands.createuser
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:copyright: (c) 2012 by the Sentry Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
from __future__ import absolute_import
import getpass
import sys
from django.conf import settings
from django.core.exceptions import ValidationError
from django.core.management.base import BaseCommand, CommandError, make_option
from sentry.models import (
Organization, OrganizationMember, OrganizationMemberType, User
)
class Command(BaseCommand):
help = 'Creates a new user'
option_list = BaseCommand.option_list + (
make_option('--email', dest='email'),
make_option('--superuser', dest='is_superuser', action='store_true', default=None),
make_option('--password', dest='password', default=None),
make_option('--no-superuser', dest='is_superuser', action='store_false', default=None),
make_option('--no-password', dest='nopassword', action='store_true', default=False),
make_option('--no-input', dest='noinput', action='store_true', default=False),
)
def _get_field(self, field_name):
return User._meta.get_field(field_name)
def get_email(self):
raw_value = raw_input('Email: ')
if not raw_value:
raise CommandError('Invalid email address: This field cannot be blank')
field = self._get_field('email')
try:
return field.clean(raw_value, None)
except ValidationError as e:
raise CommandError('Invalid email address: %s' % '; '.join(e.messages))
def get_password(self):
raw_value = getpass.getpass()
field = self._get_field('password')
try:
return field.clean(raw_value, None)
except ValidationError as e:
raise CommandError('Invalid password: %s' % '; '.join(e.messages))
def get_superuser(self):
if raw_input('Should this user be a superuser? [yN] ').lower() == 'y':
return True
return False
def handle(self, **options):
email = options['email']
is_superuser = options['is_superuser']
password = options['password']
if not options['noinput']:
try:
if not email:
email = self.get_email()
if not (password or options['nopassword']):
password = self.get_password()
if is_superuser is None:
is_superuser = self.get_superuser()
except KeyboardInterrupt:
self.stderr.write("\nOperation cancelled.")
sys.exit(1)
if not email:
raise CommandError('Invalid or missing email address')
if not options['nopassword'] and not password:
raise CommandError('No password set and --no-password not passed')
user = User(
email=email,
username=email,
is_superuser=is_superuser,
is_staff=is_superuser,
is_active=True,
)
if password:
user.set_password(password)
user.save()
self.stdout.write('User created: %s' % (email,))
# TODO(dcramer): kill this when we improve flows
if settings.SENTRY_SINGLE_ORGANIZATION:
org = Organization.get_default()
OrganizationMember.objects.create(
organization=org,
user=user,
type=OrganizationMemberType.OWNER,
has_global_access=user.is_superuser,
)
self.stdout.write('Added to organization: %s' % (org.slug,))
|
jonnary/keystone
|
refs/heads/master
|
keystone/common/sql/migrate_repo/versions/066_fixup_service_name_value.py
|
14
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_serialization import jsonutils
import sqlalchemy as sql
def upgrade(migrate_engine):
meta = sql.MetaData()
meta.bind = migrate_engine
service_table = sql.Table('service', meta, autoload=True)
services = list(service_table.select().execute())
for service in services:
if service.extra is not None:
extra_dict = jsonutils.loads(service.extra)
else:
extra_dict = {}
# Skip records where service is not null
if extra_dict.get('name') is not None:
continue
# Default the name to empty string
extra_dict['name'] = ''
new_values = {
'extra': jsonutils.dumps(extra_dict),
}
f = service_table.c.id == service.id
update = service_table.update().where(f).values(new_values)
migrate_engine.execute(update)
|
qqshfox/protobuf-with-as3
|
refs/heads/pb-2.3.0-pbas3-2.3
|
gtest/test/gtest_filter_unittest.py
|
69
|
#!/usr/bin/env python
#
# Copyright 2005 Google Inc. All Rights Reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Unit test for Google Test test filters.
A user can specify which test(s) in a Google Test program to run via either
the GTEST_FILTER environment variable or the --gtest_filter flag.
This script tests such functionality by invoking
gtest_filter_unittest_ (a program written with Google Test) with different
environments and command line flags.
Note that test sharding may also influence which tests are filtered. Therefore,
we test that here also.
"""
__author__ = 'wan@google.com (Zhanyong Wan)'
import os
import re
import sets
import gtest_test_utils
# Constants.
IS_WINDOWS = os.name == 'nt'
# The environment variable for specifying the test filters.
FILTER_ENV_VAR = 'GTEST_FILTER'
# The environment variables for test sharding.
TOTAL_SHARDS_ENV_VAR = 'GTEST_TOTAL_SHARDS'
SHARD_INDEX_ENV_VAR = 'GTEST_SHARD_INDEX'
SHARD_STATUS_FILE_ENV_VAR = 'GTEST_SHARD_STATUS_FILE'
# The command line flag for specifying the test filters.
FILTER_FLAG = 'gtest_filter'
# The command line flag for including disabled tests.
ALSO_RUN_DISABED_TESTS_FLAG = 'gtest_also_run_disabled_tests'
# Command to run the gtest_filter_unittest_ program.
COMMAND = gtest_test_utils.GetTestExecutablePath('gtest_filter_unittest_')
# Regex for determining whether parameterized tests are enabled in the binary.
PARAM_TEST_REGEX = re.compile(r'/ParamTest')
# Regex for parsing test case names from Google Test's output.
TEST_CASE_REGEX = re.compile(r'^\[\-+\] \d+ tests? from (\w+(/\w+)?)')
# Regex for parsing test names from Google Test's output.
TEST_REGEX = re.compile(r'^\[\s*RUN\s*\].*\.(\w+(/\w+)?)')
# Full names of all tests in gtest_filter_unittests_.
PARAM_TESTS = [
'SeqP/ParamTest.TestX/0',
'SeqP/ParamTest.TestX/1',
'SeqP/ParamTest.TestY/0',
'SeqP/ParamTest.TestY/1',
'SeqQ/ParamTest.TestX/0',
'SeqQ/ParamTest.TestX/1',
'SeqQ/ParamTest.TestY/0',
'SeqQ/ParamTest.TestY/1',
]
DISABLED_TESTS = [
'BarTest.DISABLED_TestFour',
'BarTest.DISABLED_TestFive',
'BazTest.DISABLED_TestC',
'DISABLED_FoobarTest.Test1',
'DISABLED_FoobarTest.DISABLED_Test2',
'DISABLED_FoobarbazTest.TestA',
]
# All the non-disabled tests.
ACTIVE_TESTS = [
'FooTest.Abc',
'FooTest.Xyz',
'BarTest.TestOne',
'BarTest.TestTwo',
'BarTest.TestThree',
'BazTest.TestOne',
'BazTest.TestA',
'BazTest.TestB',
'HasDeathTest.Test1',
'HasDeathTest.Test2',
] + PARAM_TESTS
param_tests_present = None
# Utilities.
def SetEnvVar(env_var, value):
"""Sets the env variable to 'value'; unsets it when 'value' is None."""
if value is not None:
os.environ[env_var] = value
elif env_var in os.environ:
del os.environ[env_var]
def RunAndReturnOutput(args = None):
"""Runs the test program and returns its output."""
return gtest_test_utils.Subprocess([COMMAND] + (args or [])).output
def RunAndExtractTestList(args = None):
"""Runs the test program and returns its exit code and a list of tests run."""
p = gtest_test_utils.Subprocess([COMMAND] + (args or []))
tests_run = []
test_case = ''
test = ''
for line in p.output.split('\n'):
match = TEST_CASE_REGEX.match(line)
if match is not None:
test_case = match.group(1)
else:
match = TEST_REGEX.match(line)
if match is not None:
test = match.group(1)
tests_run.append(test_case + '.' + test)
return (tests_run, p.exit_code)
def InvokeWithModifiedEnv(extra_env, function, *args, **kwargs):
"""Runs the given function and arguments in a modified environment."""
try:
original_env = os.environ.copy()
os.environ.update(extra_env)
return function(*args, **kwargs)
finally:
for key in extra_env.iterkeys():
if key in original_env:
os.environ[key] = original_env[key]
else:
del os.environ[key]
def RunWithSharding(total_shards, shard_index, command):
"""Runs a test program shard and returns exit code and a list of tests run."""
extra_env = {SHARD_INDEX_ENV_VAR: str(shard_index),
TOTAL_SHARDS_ENV_VAR: str(total_shards)}
return InvokeWithModifiedEnv(extra_env, RunAndExtractTestList, command)
# The unit test.
class GTestFilterUnitTest(gtest_test_utils.TestCase):
"""Tests GTEST_FILTER env variable or --gtest_filter flag to filter tests."""
# Utilities.
def AssertSetEqual(self, lhs, rhs):
"""Asserts that two sets are equal."""
for elem in lhs:
self.assert_(elem in rhs, '%s in %s' % (elem, rhs))
for elem in rhs:
self.assert_(elem in lhs, '%s in %s' % (elem, lhs))
def AssertPartitionIsValid(self, set_var, list_of_sets):
"""Asserts that list_of_sets is a valid partition of set_var."""
full_partition = []
for slice_var in list_of_sets:
full_partition.extend(slice_var)
self.assertEqual(len(set_var), len(full_partition))
self.assertEqual(sets.Set(set_var), sets.Set(full_partition))
def AdjustForParameterizedTests(self, tests_to_run):
"""Adjust tests_to_run in case value parameterized tests are disabled."""
global param_tests_present
if not param_tests_present:
return list(sets.Set(tests_to_run) - sets.Set(PARAM_TESTS))
else:
return tests_to_run
def RunAndVerify(self, gtest_filter, tests_to_run):
"""Checks that the binary runs correct set of tests for the given filter."""
tests_to_run = self.AdjustForParameterizedTests(tests_to_run)
# First, tests using GTEST_FILTER.
# Windows removes empty variables from the environment when passing it
# to a new process. This means it is impossible to pass an empty filter
# into a process using the GTEST_FILTER environment variable. However,
# we can still test the case when the variable is not supplied (i.e.,
# gtest_filter is None).
# pylint: disable-msg=C6403
if not IS_WINDOWS or gtest_filter != '':
SetEnvVar(FILTER_ENV_VAR, gtest_filter)
tests_run = RunAndExtractTestList()[0]
SetEnvVar(FILTER_ENV_VAR, None)
self.AssertSetEqual(tests_run, tests_to_run)
# pylint: enable-msg=C6403
# Next, tests using --gtest_filter.
if gtest_filter is None:
args = []
else:
args = ['--%s=%s' % (FILTER_FLAG, gtest_filter)]
tests_run = RunAndExtractTestList(args)[0]
self.AssertSetEqual(tests_run, tests_to_run)
def RunAndVerifyWithSharding(self, gtest_filter, total_shards, tests_to_run,
args=None, check_exit_0=False):
"""Checks that binary runs correct tests for the given filter and shard.
Runs all shards of gtest_filter_unittest_ with the given filter, and
verifies that the right set of tests were run. The union of tests run
on each shard should be identical to tests_to_run, without duplicates.
Args:
gtest_filter: A filter to apply to the tests.
total_shards: A total number of shards to split test run into.
tests_to_run: A set of tests expected to run.
args : Arguments to pass to the to the test binary.
check_exit_0: When set to a true value, make sure that all shards
return 0.
"""
tests_to_run = self.AdjustForParameterizedTests(tests_to_run)
# Windows removes empty variables from the environment when passing it
# to a new process. This means it is impossible to pass an empty filter
# into a process using the GTEST_FILTER environment variable. However,
# we can still test the case when the variable is not supplied (i.e.,
# gtest_filter is None).
# pylint: disable-msg=C6403
if not IS_WINDOWS or gtest_filter != '':
SetEnvVar(FILTER_ENV_VAR, gtest_filter)
partition = []
for i in range(0, total_shards):
(tests_run, exit_code) = RunWithSharding(total_shards, i, args)
if check_exit_0:
self.assertEqual(0, exit_code)
partition.append(tests_run)
self.AssertPartitionIsValid(tests_to_run, partition)
SetEnvVar(FILTER_ENV_VAR, None)
# pylint: enable-msg=C6403
def RunAndVerifyAllowingDisabled(self, gtest_filter, tests_to_run):
"""Checks that the binary runs correct set of tests for the given filter.
Runs gtest_filter_unittest_ with the given filter, and enables
disabled tests. Verifies that the right set of tests were run.
Args:
gtest_filter: A filter to apply to the tests.
tests_to_run: A set of tests expected to run.
"""
tests_to_run = self.AdjustForParameterizedTests(tests_to_run)
# Construct the command line.
args = ['--%s' % ALSO_RUN_DISABED_TESTS_FLAG]
if gtest_filter is not None:
args.append('--%s=%s' % (FILTER_FLAG, gtest_filter))
tests_run = RunAndExtractTestList(args)[0]
self.AssertSetEqual(tests_run, tests_to_run)
def setUp(self):
"""Sets up test case.
Determines whether value-parameterized tests are enabled in the binary and
sets the flags accordingly.
"""
global param_tests_present
if param_tests_present is None:
param_tests_present = PARAM_TEST_REGEX.search(
RunAndReturnOutput()) is not None
def testDefaultBehavior(self):
"""Tests the behavior of not specifying the filter."""
self.RunAndVerify(None, ACTIVE_TESTS)
def testDefaultBehaviorWithShards(self):
"""Tests the behavior without the filter, with sharding enabled."""
self.RunAndVerifyWithSharding(None, 1, ACTIVE_TESTS)
self.RunAndVerifyWithSharding(None, 2, ACTIVE_TESTS)
self.RunAndVerifyWithSharding(None, len(ACTIVE_TESTS) - 1, ACTIVE_TESTS)
self.RunAndVerifyWithSharding(None, len(ACTIVE_TESTS), ACTIVE_TESTS)
self.RunAndVerifyWithSharding(None, len(ACTIVE_TESTS) + 1, ACTIVE_TESTS)
def testEmptyFilter(self):
"""Tests an empty filter."""
self.RunAndVerify('', [])
self.RunAndVerifyWithSharding('', 1, [])
self.RunAndVerifyWithSharding('', 2, [])
def testBadFilter(self):
"""Tests a filter that matches nothing."""
self.RunAndVerify('BadFilter', [])
self.RunAndVerifyAllowingDisabled('BadFilter', [])
def testFullName(self):
"""Tests filtering by full name."""
self.RunAndVerify('FooTest.Xyz', ['FooTest.Xyz'])
self.RunAndVerifyAllowingDisabled('FooTest.Xyz', ['FooTest.Xyz'])
self.RunAndVerifyWithSharding('FooTest.Xyz', 5, ['FooTest.Xyz'])
def testUniversalFilters(self):
"""Tests filters that match everything."""
self.RunAndVerify('*', ACTIVE_TESTS)
self.RunAndVerify('*.*', ACTIVE_TESTS)
self.RunAndVerifyWithSharding('*.*', len(ACTIVE_TESTS) - 3, ACTIVE_TESTS)
self.RunAndVerifyAllowingDisabled('*', ACTIVE_TESTS + DISABLED_TESTS)
self.RunAndVerifyAllowingDisabled('*.*', ACTIVE_TESTS + DISABLED_TESTS)
def testFilterByTestCase(self):
"""Tests filtering by test case name."""
self.RunAndVerify('FooTest.*', ['FooTest.Abc', 'FooTest.Xyz'])
BAZ_TESTS = ['BazTest.TestOne', 'BazTest.TestA', 'BazTest.TestB']
self.RunAndVerify('BazTest.*', BAZ_TESTS)
self.RunAndVerifyAllowingDisabled('BazTest.*',
BAZ_TESTS + ['BazTest.DISABLED_TestC'])
def testFilterByTest(self):
"""Tests filtering by test name."""
self.RunAndVerify('*.TestOne', ['BarTest.TestOne', 'BazTest.TestOne'])
def testFilterDisabledTests(self):
"""Select only the disabled tests to run."""
self.RunAndVerify('DISABLED_FoobarTest.Test1', [])
self.RunAndVerifyAllowingDisabled('DISABLED_FoobarTest.Test1',
['DISABLED_FoobarTest.Test1'])
self.RunAndVerify('*DISABLED_*', [])
self.RunAndVerifyAllowingDisabled('*DISABLED_*', DISABLED_TESTS)
self.RunAndVerify('*.DISABLED_*', [])
self.RunAndVerifyAllowingDisabled('*.DISABLED_*', [
'BarTest.DISABLED_TestFour',
'BarTest.DISABLED_TestFive',
'BazTest.DISABLED_TestC',
'DISABLED_FoobarTest.DISABLED_Test2',
])
self.RunAndVerify('DISABLED_*', [])
self.RunAndVerifyAllowingDisabled('DISABLED_*', [
'DISABLED_FoobarTest.Test1',
'DISABLED_FoobarTest.DISABLED_Test2',
'DISABLED_FoobarbazTest.TestA',
])
def testWildcardInTestCaseName(self):
"""Tests using wildcard in the test case name."""
self.RunAndVerify('*a*.*', [
'BarTest.TestOne',
'BarTest.TestTwo',
'BarTest.TestThree',
'BazTest.TestOne',
'BazTest.TestA',
'BazTest.TestB',
'HasDeathTest.Test1',
'HasDeathTest.Test2', ] + PARAM_TESTS)
def testWildcardInTestName(self):
"""Tests using wildcard in the test name."""
self.RunAndVerify('*.*A*', ['FooTest.Abc', 'BazTest.TestA'])
def testFilterWithoutDot(self):
"""Tests a filter that has no '.' in it."""
self.RunAndVerify('*z*', [
'FooTest.Xyz',
'BazTest.TestOne',
'BazTest.TestA',
'BazTest.TestB',
])
def testTwoPatterns(self):
"""Tests filters that consist of two patterns."""
self.RunAndVerify('Foo*.*:*A*', [
'FooTest.Abc',
'FooTest.Xyz',
'BazTest.TestA',
])
# An empty pattern + a non-empty one
self.RunAndVerify(':*A*', ['FooTest.Abc', 'BazTest.TestA'])
def testThreePatterns(self):
"""Tests filters that consist of three patterns."""
self.RunAndVerify('*oo*:*A*:*One', [
'FooTest.Abc',
'FooTest.Xyz',
'BarTest.TestOne',
'BazTest.TestOne',
'BazTest.TestA',
])
# The 2nd pattern is empty.
self.RunAndVerify('*oo*::*One', [
'FooTest.Abc',
'FooTest.Xyz',
'BarTest.TestOne',
'BazTest.TestOne',
])
# The last 2 patterns are empty.
self.RunAndVerify('*oo*::', [
'FooTest.Abc',
'FooTest.Xyz',
])
def testNegativeFilters(self):
self.RunAndVerify('*-HasDeathTest.Test1', [
'FooTest.Abc',
'FooTest.Xyz',
'BarTest.TestOne',
'BarTest.TestTwo',
'BarTest.TestThree',
'BazTest.TestOne',
'BazTest.TestA',
'BazTest.TestB',
'HasDeathTest.Test2',
] + PARAM_TESTS)
self.RunAndVerify('*-FooTest.Abc:HasDeathTest.*', [
'FooTest.Xyz',
'BarTest.TestOne',
'BarTest.TestTwo',
'BarTest.TestThree',
'BazTest.TestOne',
'BazTest.TestA',
'BazTest.TestB',
] + PARAM_TESTS)
self.RunAndVerify('BarTest.*-BarTest.TestOne', [
'BarTest.TestTwo',
'BarTest.TestThree',
])
# Tests without leading '*'.
self.RunAndVerify('-FooTest.Abc:FooTest.Xyz:HasDeathTest.*', [
'BarTest.TestOne',
'BarTest.TestTwo',
'BarTest.TestThree',
'BazTest.TestOne',
'BazTest.TestA',
'BazTest.TestB',
] + PARAM_TESTS)
# Value parameterized tests.
self.RunAndVerify('*/*', PARAM_TESTS)
# Value parameterized tests filtering by the sequence name.
self.RunAndVerify('SeqP/*', [
'SeqP/ParamTest.TestX/0',
'SeqP/ParamTest.TestX/1',
'SeqP/ParamTest.TestY/0',
'SeqP/ParamTest.TestY/1',
])
# Value parameterized tests filtering by the test name.
self.RunAndVerify('*/0', [
'SeqP/ParamTest.TestX/0',
'SeqP/ParamTest.TestY/0',
'SeqQ/ParamTest.TestX/0',
'SeqQ/ParamTest.TestY/0',
])
def testFlagOverridesEnvVar(self):
"""Tests that the filter flag overrides the filtering env. variable."""
SetEnvVar(FILTER_ENV_VAR, 'Foo*')
args = ['--%s=%s' % (FILTER_FLAG, '*One')]
tests_run = RunAndExtractTestList(args)[0]
SetEnvVar(FILTER_ENV_VAR, None)
self.AssertSetEqual(tests_run, ['BarTest.TestOne', 'BazTest.TestOne'])
def testShardStatusFileIsCreated(self):
"""Tests that the shard file is created if specified in the environment."""
shard_status_file = os.path.join(gtest_test_utils.GetTempDir(),
'shard_status_file')
self.assert_(not os.path.exists(shard_status_file))
extra_env = {SHARD_STATUS_FILE_ENV_VAR: shard_status_file}
try:
InvokeWithModifiedEnv(extra_env, RunAndReturnOutput)
finally:
self.assert_(os.path.exists(shard_status_file))
os.remove(shard_status_file)
def testShardStatusFileIsCreatedWithListTests(self):
"""Tests that the shard file is created with --gtest_list_tests."""
shard_status_file = os.path.join(gtest_test_utils.GetTempDir(),
'shard_status_file2')
self.assert_(not os.path.exists(shard_status_file))
extra_env = {SHARD_STATUS_FILE_ENV_VAR: shard_status_file}
try:
InvokeWithModifiedEnv(extra_env,
RunAndReturnOutput,
['--gtest_list_tests'])
finally:
self.assert_(os.path.exists(shard_status_file))
os.remove(shard_status_file)
def testShardingWorksWithDeathTests(self):
"""Tests integration with death tests and sharding."""
gtest_filter = 'HasDeathTest.*:SeqP/*'
expected_tests = [
'HasDeathTest.Test1',
'HasDeathTest.Test2',
'SeqP/ParamTest.TestX/0',
'SeqP/ParamTest.TestX/1',
'SeqP/ParamTest.TestY/0',
'SeqP/ParamTest.TestY/1',
]
for flag in ['--gtest_death_test_style=threadsafe',
'--gtest_death_test_style=fast']:
self.RunAndVerifyWithSharding(gtest_filter, 3, expected_tests,
check_exit_0=True, args=[flag])
self.RunAndVerifyWithSharding(gtest_filter, 5, expected_tests,
check_exit_0=True, args=[flag])
if __name__ == '__main__':
gtest_test_utils.Main()
|
tschorsch/nstor
|
refs/heads/master
|
src/buildings/bindings/callbacks_list.py
|
664
|
callback_classes = [
['void', 'ns3::Ptr<ns3::NetDevice>', 'ns3::Ptr<ns3::Packet const>', 'unsigned short', 'ns3::Address const&', 'ns3::Address const&', 'ns3::NetDevice::PacketType', 'ns3::empty', 'ns3::empty', 'ns3::empty'],
]
|
crafty78/ansible
|
refs/heads/devel
|
contrib/inventory/lxc_inventory.py
|
89
|
#!/usr/bin/env python
#
# (c) 2015-16 Florian Haas, hastexo Professional Services GmbH
# <florian@hastexo.com>
# Based in part on:
# libvirt_lxc.py, (c) 2013, Michael Scherer <misc@zarb.org>
#
# This file is part of Ansible,
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
"""
Ansible inventory script for LXC containers. Requires Python
bindings for LXC API.
In LXC, containers can be grouped by setting the lxc.group option,
which may be found more than once in a container's
configuration. So, we enumerate all containers, fetch their list
of groups, and then build the dictionary in the way Ansible expects
it.
"""
from __future__ import print_function
import sys
import lxc
import json
def build_dict():
"""Returns a dictionary keyed to the defined LXC groups. All
containers, including the ones not in any group, are included in the
"all" group."""
# Enumerate all containers, and list the groups they are in. Also,
# implicitly add every container to the 'all' group.
containers = dict([(c,
['all'] +
(lxc.Container(c).get_config_item('lxc.group') or []))
for c in lxc.list_containers()])
# Extract the groups, flatten the list, and remove duplicates
groups = set(sum([g for g in containers.values()], []))
# Create a dictionary for each group (including the 'all' group
return dict([(g, {'hosts': [k for k, v in containers.items() if g in v],
'vars': {'ansible_connection':'lxc'}}) for g in groups])
def main(argv):
"""Returns a JSON dictionary as expected by Ansible"""
result = build_dict()
if len(argv) == 2 and argv[1] == '--list':
json.dump(result, sys.stdout)
elif len(argv) == 3 and argv[1] == '--host':
json.dump({'ansible_connection': 'lxc'}, sys.stdout)
else:
print("Need an argument, either --list or --host <host>", file=sys.stderr)
if __name__ == '__main__':
main(sys.argv)
|
FHannes/intellij-community
|
refs/heads/master
|
python/testData/mover/lastComment1_afterDown.py
|
10
|
def f():
if True:
a = 1
else:
a = 2
#comment <caret>
|
ktosiek/spacewalk
|
refs/heads/debian-repos
|
client/solaris/smartpm/smart/backends/rpm/header.py
|
3
|
#
# Copyright (c) 2004 Conectiva, Inc.
#
# Written by Gustavo Niemeyer <niemeyer@conectiva.com>
#
# This file is part of Smart Package Manager.
#
# Smart Package Manager is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as published
# by the Free Software Foundation; either version 2 of the License, or (at
# your option) any later version.
#
# Smart Package Manager is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Smart Package Manager; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
from smart.backends.rpm.rpmver import splitarch
from smart.util.strtools import globdistance
from smart.cache import Loader, PackageInfo
from smart.channel import FileChannel
from smart.backends.rpm.base import *
from smart.progress import Progress
from smart import *
import locale
import stat
import rpm
import os, sys
try:
import rpmhelper
except ImportError:
rpmhelper = None
CRPMTAG_FILENAME = 1000000
CRPMTAG_FILESIZE = 1000001
CRPMTAG_MD5 = 1000005
CRPMTAG_SHA1 = 1000006
CRPMTAG_DIRECTORY = 1000010
CRPMTAG_BINARY = 1000011
CRPMTAG_UPDATE_SUMMARY = 1000020
CRPMTAG_UPDATE_IMPORTANCE = 1000021
CRPMTAG_UPDATE_DATE = 1000022
CRPMTAG_UPDATE_URL = 1000023
ENCODINGS = ["utf8", "iso-8859-1"]
class RPMHeaderPackageInfo(PackageInfo):
class LazyHeader(object):
def __get__(self, obj, type):
obj._h = obj._loader.getHeader(obj._package)
return obj._h
_h = LazyHeader()
def __init__(self, package, loader, order=0):
PackageInfo.__init__(self, package, order)
self._loader = loader
self._path = None
def getReferenceURLs(self):
url = self._h[rpm.RPMTAG_URL]
if url:
return [url]
return []
def getURLs(self):
url = self._loader.getURL()
if url:
return [os.path.join(url, self._loader.getFileName(self))]
return []
def getSize(self, url):
return self._loader.getSize(self)
def getMD5(self, url):
return self._loader.getMD5(self)
def getInstalledSize(self):
return self._h[rpm.RPMTAG_SIZE]
def getDescription(self):
s = self._h[rpm.RPMTAG_DESCRIPTION]
for encoding in ENCODINGS:
try:
s = s.decode(encoding)
except UnicodeDecodeError:
continue
break
else:
s = ""
return s
def getSummary(self):
s = self._h[rpm.RPMTAG_SUMMARY]
for encoding in ENCODINGS:
try:
s = s.decode(encoding)
except UnicodeDecodeError:
continue
break
else:
s = ""
return s
def getGroup(self):
s = self._loader.getGroup(self._package)
for encoding in ENCODINGS:
try:
s = s.decode(encoding)
except UnicodeDecodeError:
continue
break
else:
s = ""
return s
def getPathList(self):
if self._path is None:
paths = self._h[rpm.RPMTAG_OLDFILENAMES]
modes = self._h[rpm.RPMTAG_FILEMODES]
if modes:
self._path = {}
for i in range(len(paths)):
self._path[paths[i]] = modes[i]
else:
self._path = dict.fromkeys(paths, 0)
return self._path.keys()
def pathIsDir(self, path):
return stat.S_ISDIR(self._path[path])
def pathIsLink(self, path):
return stat.S_ISLNK(self._path[path])
def pathIsFile(self, path):
return stat.S_ISREG(self._path[path])
def pathIsSpecial(self, path):
mode = self._path[path]
return not (stat.S_ISDIR(mode) or
stat.S_ISLNK(mode) or
stat.S_ISREG(mode))
class RPMHeaderLoader(Loader):
__stateversion__ = Loader.__stateversion__+1
COMPFLAGS = rpm.RPMSENSE_EQUAL|rpm.RPMSENSE_GREATER|rpm.RPMSENSE_LESS
COMPMAP = { rpm.RPMSENSE_EQUAL: "=",
rpm.RPMSENSE_LESS: "<",
rpm.RPMSENSE_GREATER: ">",
rpm.RPMSENSE_EQUAL|rpm.RPMSENSE_LESS: "<=",
rpm.RPMSENSE_EQUAL|rpm.RPMSENSE_GREATER: ">=" }
def __init__(self):
Loader.__init__(self)
self._infoorder = 0
self._offsets = {}
self._groups = {}
def getHeaders(self, prog):
return []
def getInfo(self, pkg):
return RPMHeaderPackageInfo(pkg, self, self._infoorder)
def getGroup(self, pkg):
return self._groups[pkg]
def reset(self):
Loader.reset(self)
self._offsets.clear()
self._groups.clear()
def load(self):
CM = self.COMPMAP
CF = self.COMPFLAGS
Pkg = RPMPackage
Prv = RPMProvides
NPrv = RPMNameProvides
PreReq = RPMPreRequires
Req = RPMRequires
Obs = RPMObsoletes
Cnf = RPMConflicts
prog = iface.getProgress(self._cache)
for h, offset in self.getHeaders(prog):
if h[1106]: # RPMTAG_SOURCEPACKAGE
continue
arch = h[1022] # RPMTAG_ARCH
if rpm.archscore(arch) == 0:
continue
name = h[1000] # RPMTAG_NAME
epoch = h[1003] # RPMTAG_EPOCH
if epoch and epoch != "0":
# RPMTAG_VERSION, RPMTAG_RELEASE
version = "%s:%s-%s" % (epoch, h[1001], h[1002])
else:
# RPMTAG_VERSION, RPMTAG_RELEASE
version = "%s-%s" % (h[1001], h[1002])
versionarch = "%s@%s" % (version, arch)
n = h[1047] # RPMTAG_PROVIDENAME
v = h[1113] # RPMTAG_PROVIDEVERSION
prvdict = {}
for i in range(len(n)):
ni = n[i]
if not ni.startswith("config("):
vi = v[i]
if vi and vi[:2] == "0:":
vi = vi[2:]
if ni == name and vi == version:
prvdict[(NPrv, intern(ni), versionarch)] = True
else:
prvdict[(Prv, intern(ni), vi or None)] = True
prvargs = prvdict.keys()
n = h[1049] # RPMTAG_REQUIRENAME
if n:
f = h[1048] # RPMTAG_REQUIREFLAGS
v = h[1050] # RPMTAG_REQUIREVERSION
reqdict = {}
for i in range(len(n)):
ni = n[i]
if ni[:7] not in ("rpmlib(", "config("):
vi = v[i] or None
if vi and vi[:2] == "0:":
vi = vi[2:]
r = CM.get(f[i]&CF)
if ((r is not None and r != "=") or
((Prv, ni, vi) not in prvdict)):
# RPMSENSE_PREREQ |
# RPMSENSE_SCRIPT_PRE |
# RPMSENSE_SCRIPT_PREUN |
# RPMSENSE_SCRIPT_POST |
# RPMSENSE_SCRIPT_POSTUN == 7744
reqdict[(f[i]&7744 and PreReq or Req,
intern(ni), r, vi)] = True
reqargs = reqdict.keys()
else:
reqargs = None
n = h[1054] # RPMTAG_CONFLICTNAME
if n:
f = h[1053] # RPMTAG_CONFLICTFLAGS
# FIXME (20050321): Solaris rpm 4.1 hack
if type(f) == int:
f = [f]
v = h[1055] # RPMTAG_CONFLICTVERSION
cnfargs = []
for i in range(len(n)):
vi = v[i] or None
if vi and vi[:2] == "0:":
vi = vi[2:]
cnfargs.append((Cnf, n[i], CM.get(f[i]&CF), vi))
else:
cnfargs = []
obstup = (Obs, name, '<', versionarch)
n = h[1090] # RPMTAG_OBSOLETENAME
if n:
f = h[1114] # RPMTAG_OBSOLETEFLAGS
# FIXME (20050321): Solaris rpm 4.1 hack
if type(f) == int:
f = [f]
v = h[1115] # RPMTAG_OBSOLETEVERSION
upgargs = []
for i in range(len(n)):
vi = v[i] or None
if vi and vi[:2] == "0:":
vi = vi[2:]
upgargs.append((Obs, n[i], CM.get(f[i]&CF), vi))
cnfargs.extend(upgargs)
upgargs.append(obstup)
else:
upgargs = [obstup]
pkg = self.buildPackage((Pkg, name, versionarch),
prvargs, reqargs, upgargs, cnfargs)
pkg.loaders[self] = offset
self._offsets[offset] = pkg
self._groups[pkg] = intern(h[rpm.RPMTAG_GROUP])
def search(self, searcher):
for h, offset in self.getHeaders(Progress()):
pkg = self._offsets.get(offset)
if not pkg:
continue
ratio = 0
if searcher.url:
refurl = h[rpm.RPMTAG_URL]
if refurl:
for url, cutoff in searcher.url:
_, newratio = globdistance(url, refurl, cutoff)
if newratio > ratio:
ratio = newratio
if ratio == 1:
break
if ratio == 1:
searcher.addResult(pkg, ratio)
continue
if searcher.path:
paths = h[rpm.RPMTAG_OLDFILENAMES]
if paths:
for spath, cutoff in searcher.path:
for path in paths:
_, newratio = globdistance(spath, path, cutoff)
if newratio > ratio:
ratio = newratio
if ratio == 1:
break
else:
continue
break
if ratio == 1:
searcher.addResult(pkg, ratio)
continue
if searcher.group:
group = self._groups[pkg]
for pat in searcher.group:
if pat.search(group):
ratio = 1
break
if ratio == 1:
searcher.addResult(pkg, ratio)
continue
if searcher.summary:
summary = h[rpm.RPMTAG_SUMMARY]
for pat in searcher.summary:
if pat.search(summary):
ratio = 1
break
if ratio == 1:
searcher.addResult(pkg, ratio)
continue
if searcher.description:
description = h[rpm.RPMTAG_DESCRIPTION]
for pat in searcher.description:
if pat.search(description):
ratio = 1
break
if ratio:
searcher.addResult(pkg, ratio)
class RPMHeaderListLoader(RPMHeaderLoader):
def __init__(self, filename, baseurl, count=None):
RPMHeaderLoader.__init__(self)
self._filename = filename
self._baseurl = baseurl
self._count = count
self._checkRPM()
def __getstate__(self):
state = RPMHeaderLoader.__getstate__(self)
if "_hdl" in state:
del state["_hdl"]
return state
def __setstate__(self, state):
RPMHeaderLoader.__setstate__(self, state)
self._checkRPM()
def _checkRPM(self):
if not hasattr(rpm, "readHeaderFromFD"):
if (not hasattr(self.__class__, "WARNED") and
sysconf.get("no-rpm-readHeaderFromFD", 0) < 3):
self.__class__.WARNED = True
sysconf.set("no-rpm-readHeaderFromFD",
sysconf.get("no-rpm-readHeaderFromFD", 0)+1)
iface.warning(_("Your rpm module has no support for "
"readHeaderFromFD()!\n"
"As a consequence, Smart will consume "
"extra memory."))
self.__class__.getHeaders = self.getHeadersHDL
self.__class__.getHeader = self.getHeaderHDL
self.__class__.loadFileProvides = self.loadFileProvidesHDL
self._hdl = rpm.readHeaderListFromFile(self._filename)
def getLoadSteps(self):
if self._count is None:
if hasattr(rpm, "readHeaderFromFD"):
return os.path.getsize(self._filename)/2500
else:
return len(rpm.readHeaderListFromFile(self._filename))
return self._count
def getHeaders(self, prog):
file = open(self._filename)
lastoffset = mod = 0
h, offset = rpm.readHeaderFromFD(file.fileno())
if self._count:
while h:
yield h, offset
h, offset = rpm.readHeaderFromFD(file.fileno())
if offset:
prog.add(1)
prog.show()
else:
while h:
yield h, offset
h, offset = rpm.readHeaderFromFD(file.fileno())
if offset:
div, mod = divmod(offset-lastoffset+mod, 2500)
lastoffset = offset
prog.add(div)
prog.show()
file.close()
def getHeadersHDL(self, prog):
for offset, h in enumerate(self._hdl):
yield h, offset
prog.add(1)
prog.show()
def getHeader(self, pkg):
file = open(self._filename)
file.seek(pkg.loaders[self])
h, offset = rpm.readHeaderFromFD(file.fileno())
file.close()
return h
def getHeaderHDL(self, pkg):
return self._hdl[pkg.loaders[self]]
def getURL(self):
return self._baseurl
def getFileName(self, info):
h = info._h
return "%s-%s-%s.%s.rpm" % (h[rpm.RPMTAG_NAME],
h[rpm.RPMTAG_VERSION],
h[rpm.RPMTAG_RELEASE],
h[rpm.RPMTAG_ARCH])
def getSize(self, info):
return None
def getMD5(self, info):
return None
def loadFileProvides(self, fndict):
file = open(self._filename)
h, offset = rpm.readHeaderFromFD(file.fileno())
bfp = self.buildFileProvides
while h:
for fn in h[1027]: # RPMTAG_OLDFILENAMES
fn = fndict.get(fn)
if fn and offset in self._offsets:
bfp(self._offsets[offset], (RPMProvides, fn, None))
h, offset = rpm.readHeaderFromFD(file.fileno())
file.close()
def loadFileProvidesHDL(self, fndict):
bfp = self.buildFileProvides
for offset, h in enumerate(self._hdl):
for fn in h[1027]: # RPMTAG_OLDFILENAMES
fn = fndict.get(fn)
if fn and offset in self._offsets:
bfp(self._offsets[offset], (RPMProvides, fn, None))
class RPMPackageListLoader(RPMHeaderListLoader):
def getFileName(self, info):
h = info._h
filename = h[CRPMTAG_FILENAME]
if not filename:
raise Error, _("Package list with no CRPMTAG_FILENAME tag")
directory = h[CRPMTAG_DIRECTORY]
if directory:
filename = os.path.join(directory, filename)
return filename
def getSize(self, info):
return info._h[CRPMTAG_FILESIZE]
def getMD5(self, info):
return info._h[CRPMTAG_MD5]
class URPMILoader(RPMHeaderListLoader):
def __init__(self, filename, baseurl, listfile):
RPMHeaderListLoader.__init__(self, filename, baseurl)
self._prefix = {}
if listfile:
for entry in open(listfile):
if entry[:2] == "./":
entry = entry[2:]
dirname, basename = os.path.split(entry.rstrip())
self._prefix[basename] = dirname
def getFileName(self, info):
h = info._h
filename = h[CRPMTAG_FILENAME]
if not filename:
raise Error, _("Package list with no CRPMTAG_FILENAME tag")
if filename in self._prefix:
filename = os.path.join(self._prefix[filename], filename)
return filename
def getSize(self, info):
return info._h[CRPMTAG_FILESIZE]
def getMD5(self, info):
return None
class RPMDBLoader(RPMHeaderLoader):
def __init__(self):
RPMHeaderLoader.__init__(self)
self.setInstalled(True)
self._infoorder = -100
def getLoadSteps(self):
return 1
def getHeaders(self, prog):
# FIXME (20050321): Solaris rpm 4.1 hack
if sys.platform[:5] == "sunos":
rpm.addMacro("_dbPath", sysconf.get("rpm-root", "/"))
ts = rpm.TransactionSet()
else:
ts = rpm.ts(sysconf.get("rpm-root", "/"))
mi = ts.dbMatch()
for h in mi:
if h[1000] != "gpg-pubkey": # RPMTAG_NAME
yield h, mi.instance()
prog.addTotal(1)
prog.add(1)
prog.show()
prog.add(1)
if rpmhelper:
def getHeader(self, pkg):
# FIXME (20050321): Solaris rpm 4.1 hack
if sys.platform[:5] == "sunos":
rpm.addMacro("_dbPath", sysconf.get("rpm-root", "/"))
ts = rpm.TransactionSet()
else:
ts = rpm.ts(sysconf.get("rpm-root", "/"))
mi = rpmhelper.dbMatch(ts, 0, pkg.loaders[self])
return mi.next()
else:
def getHeader(self, pkg):
# FIXME (20050321): Solaris rpm 4.1 hack
if sys.platform[:5] == "sunos":
rpm.addMacro("_dbPath", sysconf.get("rpm-root", "/"))
ts = rpm.TransactionSet()
else:
ts = rpm.ts(sysconf.get("rpm-root", "/"))
mi = ts.dbMatch(0, pkg.loaders[self])
return mi.next()
def getURL(self):
return None
def getFileName(self, info):
return None
def getSize(self, info):
return None
def getMD5(self, info):
return None
def loadFileProvides(self, fndict):
# FIXME (20050321): Solaris rpm 4.1 hack
if sys.platform[:5] == "sunos":
rpm.addMacro("_dbPath", sysconf.get("rpm-root", "/"))
ts = rpm.TransactionSet()
else:
ts = rpm.ts(sysconf.get("rpm-root", "/"))
bfp = self.buildFileProvides
for fn in fndict:
mi = ts.dbMatch(1117, fn) # RPMTAG_BASENAMES
try:
h = mi.next()
while h:
i = mi.instance()
if i in self._offsets:
bfp(self._offsets[i], (RPMProvides, fn, None))
h = mi.next()
except StopIteration:
pass
class RPMDirLoader(RPMHeaderLoader):
def __init__(self, dir, filename=None):
RPMHeaderLoader.__init__(self)
self._dir = os.path.abspath(dir)
if filename:
self._filenames = [filename]
else:
self._filenames = [x for x in os.listdir(dir)
if x.endswith(".rpm") and
not x.endswith(".src.rpm")]
def getLoadSteps(self):
return len(self._filenames)
def getHeaders(self, prog):
# FIXME (20050321): Solaris rpm 4.1 hack
if sys.platform[:5] == "sunos":
rpm.addMacro("_dbPath", sysconf.get("rpm-root", "/"))
ts = rpm.TransactionSet()
else:
ts = rpm.ts()
for i, filename in enumerate(self._filenames):
filepath = os.path.join(self._dir, filename)
file = open(filepath)
try:
h = ts.hdrFromFdno(file.fileno())
except rpm.error, e:
iface.error("%s: %s" % (os.path.basename(filepath), e))
else:
yield (h, i)
file.close()
prog.add(1)
prog.show()
def getHeader(self, pkg):
filename = self._filenames[pkg.loaders[self]]
filepath = os.path.join(self._dir, filename)
file = open(filepath)
# FIXME (20050321): Solaris rpm 4.1 hack
if sys.platform[:5] == "sunos":
rpm.addMacro("_dbPath", sysconf.get("rpm-root", "/"))
ts = rpm.TransactionSet()
else:
ts = rpm.ts()
try:
h = ts.hdrFromFdno(file.fileno())
except rpm.error, e:
iface.error("%s: %s" % (os.path.basename(filepath), e))
h = None
file.close()
return h
def getURL(self):
return "file:///"
def getFileName(self, info):
pkg = info.getPackage()
filename = self._filenames[pkg.loaders[self]]
filepath = os.path.join(self._dir, filename)
while filepath.startswith("/"):
filepath = filepath[1:]
return filepath
def getSize(self, info):
pkg = info.getPackage()
filename = self._filenames[pkg.loaders[self]]
return os.path.getsize(os.path.join(self._dir, filename))
def getMD5(self, info):
# Could compute it now, but why?
return None
def loadFileProvides(self, fndict):
# FIXME (20050321): Solaris rpm 4.1 hack
if sys.platform[:5] == "sunos":
rpm.addMacro("_dbPath", sysconf.get("rpm-root", "/"))
ts = rpm.TransactionSet()
else:
ts = rpm.ts()
bfp = self.buildFileProvides
for i, filename in enumerate(self._filenames):
if i not in self._offsets:
continue
filepath = os.path.join(self._dir, filename)
file = open(filepath)
try:
h = ts.hdrFromFdno(file.fileno())
except rpm.error, e:
file.close()
iface.error("%s: %s" % (os.path.basename(filepath), e))
else:
file.close()
# FIXME (20050321): Solaris rpm 4.1 hack
f = h[1027] # RPMTAG_OLDFILENAMES
if f == None: f = []
for fn in f:
fn = fndict.get(fn)
if fn:
bfp(self._offsets[i], (RPMProvides, fn, None))
class RPMFileChannel(FileChannel):
def fetch(self, fetcher, progress):
digest = os.path.getmtime(self._filename)
if digest == self._digest:
return True
self.removeLoaders()
dirname, basename = os.path.split(self._filename)
loader = RPMDirLoader(dirname, basename)
loader.setChannel(self)
self._loaders.append(loader)
self._digest = digest
return True
def createFileChannel(filename):
if filename.endswith(".rpm") and not filename.endswith(".src.rpm"):
return RPMFileChannel(filename)
return None
hooks.register("create-file-channel", createFileChannel)
# vim:ts=4:sw=4:et
|
yuanming-hu/taichi
|
refs/heads/master
|
python/taichi/lang/ops.py
|
1
|
import builtins
import ctypes
import functools
import math
import operator as _bt_ops_mod # bt for builtin
import traceback
from taichi.core.util import ti_core as _ti_core
from taichi.lang import impl, matrix
from taichi.lang.exception import TaichiSyntaxError
from taichi.lang.expr import Expr, make_expr_group
from taichi.lang.util import cook_dtype, is_taichi_class, taichi_scope
unary_ops = []
def stack_info():
s = traceback.extract_stack()[3:-1]
for i, l in enumerate(s):
if 'taichi_ast_generator' in l:
s = s[i + 1:]
break
raw = ''.join(traceback.format_list(s))
# remove the confusing last line
return '\n'.join(raw.split('\n')[:-5]) + '\n'
def is_taichi_expr(a):
return isinstance(a, Expr)
def wrap_if_not_expr(a):
_taichi_skip_traceback = 1
return Expr(a) if not is_taichi_expr(a) else a
def unary(foo):
@functools.wraps(foo)
def imp_foo(x):
_taichi_skip_traceback = 2
return foo(x)
@functools.wraps(foo)
def wrapped(a):
_taichi_skip_traceback = 1
if is_taichi_class(a):
return a.element_wise_unary(imp_foo)
else:
return imp_foo(a)
return wrapped
binary_ops = []
def binary(foo):
@functools.wraps(foo)
def imp_foo(x, y):
_taichi_skip_traceback = 2
return foo(x, y)
@functools.wraps(foo)
def rev_foo(x, y):
_taichi_skip_traceback = 2
return foo(y, x)
@functools.wraps(foo)
def wrapped(a, b):
_taichi_skip_traceback = 1
if is_taichi_class(a):
return a.element_wise_binary(imp_foo, b)
elif is_taichi_class(b):
return b.element_wise_binary(rev_foo, a)
else:
return imp_foo(a, b)
binary_ops.append(wrapped)
return wrapped
ternary_ops = []
def ternary(foo):
@functools.wraps(foo)
def abc_foo(a, b, c):
_taichi_skip_traceback = 2
return foo(a, b, c)
@functools.wraps(foo)
def bac_foo(b, a, c):
_taichi_skip_traceback = 2
return foo(a, b, c)
@functools.wraps(foo)
def cab_foo(c, a, b):
_taichi_skip_traceback = 2
return foo(a, b, c)
@functools.wraps(foo)
def wrapped(a, b, c):
_taichi_skip_traceback = 1
if is_taichi_class(a):
return a.element_wise_ternary(abc_foo, b, c)
elif is_taichi_class(b):
return b.element_wise_ternary(bac_foo, a, c)
elif is_taichi_class(c):
return c.element_wise_ternary(cab_foo, a, b)
else:
return abc_foo(a, b, c)
ternary_ops.append(wrapped)
return wrapped
writeback_binary_ops = []
def writeback_binary(foo):
@functools.wraps(foo)
def imp_foo(x, y):
_taichi_skip_traceback = 2
return foo(x, wrap_if_not_expr(y))
@functools.wraps(foo)
def wrapped(a, b):
_taichi_skip_traceback = 1
if is_taichi_class(a):
return a.element_wise_writeback_binary(imp_foo, b)
elif is_taichi_class(b):
raise TaichiSyntaxError(
f'cannot augassign taichi class {type(b)} to scalar expr')
else:
return imp_foo(a, b)
writeback_binary_ops.append(wrapped)
return wrapped
def cast(obj, dtype):
_taichi_skip_traceback = 1
dtype = cook_dtype(dtype)
if is_taichi_class(obj):
# TODO: unify with element_wise_unary
return obj.cast(dtype)
else:
return Expr(_ti_core.value_cast(Expr(obj).ptr, dtype))
def bit_cast(obj, dtype):
_taichi_skip_traceback = 1
dtype = cook_dtype(dtype)
if is_taichi_class(obj):
raise ValueError('Cannot apply bit_cast on Taichi classes')
else:
return Expr(_ti_core.bits_cast(Expr(obj).ptr, dtype))
def _unary_operation(taichi_op, python_op, a):
_taichi_skip_traceback = 1
if is_taichi_expr(a):
return Expr(taichi_op(a.ptr), tb=stack_info())
else:
return python_op(a)
def _binary_operation(taichi_op, python_op, a, b):
_taichi_skip_traceback = 1
if is_taichi_expr(a) or is_taichi_expr(b):
a, b = wrap_if_not_expr(a), wrap_if_not_expr(b)
return Expr(taichi_op(a.ptr, b.ptr), tb=stack_info())
else:
return python_op(a, b)
def _ternary_operation(taichi_op, python_op, a, b, c):
_taichi_skip_traceback = 1
if is_taichi_expr(a) or is_taichi_expr(b) or is_taichi_expr(c):
a, b, c = wrap_if_not_expr(a), wrap_if_not_expr(b), wrap_if_not_expr(c)
return Expr(taichi_op(a.ptr, b.ptr, c.ptr), tb=stack_info())
else:
return python_op(a, b, c)
@unary
def neg(a):
return _unary_operation(_ti_core.expr_neg, _bt_ops_mod.neg, a)
@unary
def sin(a):
return _unary_operation(_ti_core.expr_sin, math.sin, a)
@unary
def cos(a):
return _unary_operation(_ti_core.expr_cos, math.cos, a)
@unary
def asin(a):
return _unary_operation(_ti_core.expr_asin, math.asin, a)
@unary
def acos(a):
return _unary_operation(_ti_core.expr_acos, math.acos, a)
@unary
def sqrt(a):
return _unary_operation(_ti_core.expr_sqrt, math.sqrt, a)
@unary
def rsqrt(a):
def _rsqrt(a):
return 1 / math.sqrt(a)
return _unary_operation(_ti_core.expr_rsqrt, _rsqrt, a)
@unary
def floor(a):
return _unary_operation(_ti_core.expr_floor, math.floor, a)
@unary
def ceil(a):
return _unary_operation(_ti_core.expr_ceil, math.ceil, a)
@unary
def tan(a):
return _unary_operation(_ti_core.expr_tan, math.tan, a)
@unary
def tanh(a):
return _unary_operation(_ti_core.expr_tanh, math.tanh, a)
@unary
def exp(a):
return _unary_operation(_ti_core.expr_exp, math.exp, a)
@unary
def log(a):
return _unary_operation(_ti_core.expr_log, math.log, a)
@unary
def abs(a):
return _unary_operation(_ti_core.expr_abs, builtins.abs, a)
@unary
def bit_not(a):
return _unary_operation(_ti_core.expr_bit_not, _bt_ops_mod.invert, a)
@unary
def logical_not(a):
return _unary_operation(_ti_core.expr_logic_not, lambda x: int(not x), a)
def random(dtype=float):
dtype = cook_dtype(dtype)
x = Expr(_ti_core.make_rand_expr(dtype))
return impl.expr_init(x)
# NEXT: add matpow(self, power)
@binary
def add(a, b):
return _binary_operation(_ti_core.expr_add, _bt_ops_mod.add, a, b)
@binary
def sub(a, b):
return _binary_operation(_ti_core.expr_sub, _bt_ops_mod.sub, a, b)
@binary
def mul(a, b):
return _binary_operation(_ti_core.expr_mul, _bt_ops_mod.mul, a, b)
@binary
def mod(a, b):
def expr_python_mod(a, b):
# a % b = (a // b) * b - a
quotient = Expr(_ti_core.expr_floordiv(a, b))
multiply = Expr(_ti_core.expr_mul(b, quotient.ptr))
return _ti_core.expr_sub(a, multiply.ptr)
return _binary_operation(expr_python_mod, _bt_ops_mod.mod, a, b)
@binary
def pow(a, b):
return _binary_operation(_ti_core.expr_pow, _bt_ops_mod.pow, a, b)
@binary
def floordiv(a, b):
return _binary_operation(_ti_core.expr_floordiv, _bt_ops_mod.floordiv, a,
b)
@binary
def truediv(a, b):
return _binary_operation(_ti_core.expr_truediv, _bt_ops_mod.truediv, a, b)
@binary
def max(a, b):
return _binary_operation(_ti_core.expr_max, builtins.max, a, b)
@binary
def min(a, b):
return _binary_operation(_ti_core.expr_min, builtins.min, a, b)
@binary
def atan2(a, b):
return _binary_operation(_ti_core.expr_atan2, math.atan2, a, b)
@binary
def raw_div(a, b):
def c_div(a, b):
if isinstance(a, int) and isinstance(b, int):
return a // b
else:
return a / b
return _binary_operation(_ti_core.expr_div, c_div, a, b)
@binary
def raw_mod(a, b):
def c_mod(a, b):
return a - b * int(float(a) / b)
return _binary_operation(_ti_core.expr_mod, c_mod, a, b)
@binary
def cmp_lt(a, b):
return _binary_operation(_ti_core.expr_cmp_lt, lambda a, b: -int(a < b), a,
b)
@binary
def cmp_le(a, b):
return _binary_operation(_ti_core.expr_cmp_le, lambda a, b: -int(a <= b),
a, b)
@binary
def cmp_gt(a, b):
return _binary_operation(_ti_core.expr_cmp_gt, lambda a, b: -int(a > b), a,
b)
@binary
def cmp_ge(a, b):
return _binary_operation(_ti_core.expr_cmp_ge, lambda a, b: -int(a >= b),
a, b)
@binary
def cmp_eq(a, b):
return _binary_operation(_ti_core.expr_cmp_eq, lambda a, b: -int(a == b),
a, b)
@binary
def cmp_ne(a, b):
return _binary_operation(_ti_core.expr_cmp_ne, lambda a, b: -int(a != b),
a, b)
@binary
def bit_or(a, b):
return _binary_operation(_ti_core.expr_bit_or, _bt_ops_mod.or_, a, b)
@binary
def bit_and(a, b):
return _binary_operation(_ti_core.expr_bit_and, _bt_ops_mod.and_, a, b)
@binary
def bit_xor(a, b):
return _binary_operation(_ti_core.expr_bit_xor, _bt_ops_mod.xor, a, b)
@binary
def bit_shl(a, b):
return _binary_operation(_ti_core.expr_bit_shl, _bt_ops_mod.lshift, a, b)
@binary
def bit_sar(a, b):
return _binary_operation(_ti_core.expr_bit_sar, _bt_ops_mod.rshift, a, b)
@taichi_scope
@binary
def bit_shr(a, b):
return _binary_operation(_ti_core.expr_bit_shr, _bt_ops_mod.rshift, a, b)
# We don't have logic_and/or instructions yet:
logical_or = bit_or
logical_and = bit_and
@ternary
def select(cond, a, b):
# TODO: systematically resolve `-1 = True` problem by introducing u1:
cond = logical_not(logical_not(cond))
def py_select(cond, a, b):
return a * cond + b * (1 - cond)
return _ternary_operation(_ti_core.expr_select, py_select, cond, a, b)
@writeback_binary
def atomic_add(a, b):
return impl.expr_init(
Expr(_ti_core.expr_atomic_add(a.ptr, b.ptr), tb=stack_info()))
@writeback_binary
def atomic_sub(a, b):
return impl.expr_init(
Expr(_ti_core.expr_atomic_sub(a.ptr, b.ptr), tb=stack_info()))
@writeback_binary
def atomic_min(a, b):
return impl.expr_init(
Expr(_ti_core.expr_atomic_min(a.ptr, b.ptr), tb=stack_info()))
@writeback_binary
def atomic_max(a, b):
return impl.expr_init(
Expr(_ti_core.expr_atomic_max(a.ptr, b.ptr), tb=stack_info()))
@writeback_binary
def atomic_and(a, b):
return impl.expr_init(
Expr(_ti_core.expr_atomic_bit_and(a.ptr, b.ptr), tb=stack_info()))
@writeback_binary
def atomic_or(a, b):
return impl.expr_init(
Expr(_ti_core.expr_atomic_bit_or(a.ptr, b.ptr), tb=stack_info()))
@writeback_binary
def atomic_xor(a, b):
return impl.expr_init(
Expr(_ti_core.expr_atomic_bit_xor(a.ptr, b.ptr), tb=stack_info()))
@writeback_binary
def assign(a, b):
_ti_core.expr_assign(a.ptr, b.ptr, stack_info())
return a
def ti_max(*args):
num_args = len(args)
assert num_args >= 1
if num_args == 1:
return args[0]
elif num_args == 2:
return max(args[0], args[1])
else:
return max(args[0], ti_max(*args[1:]))
def ti_min(*args):
num_args = len(args)
assert num_args >= 1
if num_args == 1:
return args[0]
elif num_args == 2:
return min(args[0], args[1])
else:
return min(args[0], ti_min(*args[1:]))
def ti_any(a):
return a.any()
def ti_all(a):
return a.all()
def append(l, indices, val):
a = impl.expr_init(
_ti_core.insert_append(l.snode.ptr, make_expr_group(indices),
Expr(val).ptr))
return a
def external_func_call(func, args=[], outputs=[]):
func_addr = ctypes.cast(func, ctypes.c_void_p).value
_ti_core.insert_external_func_call(func_addr, '', make_expr_group(args),
make_expr_group(outputs))
def asm(source, inputs=[], outputs=[]):
_ti_core.insert_external_func_call(0, source, make_expr_group(inputs),
make_expr_group(outputs))
def is_active(l, indices):
return Expr(
_ti_core.insert_is_active(l.snode.ptr, make_expr_group(indices)))
def activate(l, indices):
_ti_core.insert_activate(l.snode.ptr, make_expr_group(indices))
def deactivate(l, indices):
_ti_core.insert_deactivate(l.snode.ptr, make_expr_group(indices))
def length(l, indices):
return Expr(_ti_core.insert_len(l.snode.ptr, make_expr_group(indices)))
def rescale_index(a, b, I):
"""Rescales the index 'I' of field 'a' the match the shape of field 'b'
Parameters
----------
a: ti.field(), ti.Vector.field, ti.Matrix.field()
input taichi field
b: ti.field(), ti.Vector.field, ti.Matrix.field()
output taichi field
I: ti.Vector()
grouped loop index
Returns
-------
Ib: ti.Vector()
rescaled grouped loop index
"""
assert isinstance(a, Expr) and a.is_global(), \
f"first arguement must be a field"
assert isinstance(b, Expr) and b.is_global(), \
f"second arguement must be a field"
assert isinstance(I, matrix.Matrix) and not I.is_global(), \
f"third arguement must be a grouped index"
Ib = I.copy()
for n in range(min(I.n, min(len(a.shape), len(b.shape)))):
if a.shape[n] > b.shape[n]:
Ib.entries[n] = I.entries[n] // (a.shape[n] // b.shape[n])
if a.shape[n] < b.shape[n]:
Ib.entries[n] = I.entries[n] * (b.shape[n] // a.shape[n])
return Ib
def get_addr(f, indices):
"""Returns the address (for CUDA/x64) for index `indices` of field `f`. Currently, this function can only be called inside a kernel.
Args:
f (ti.field(), ti.Vector.field, ti.Matrix.field()): input taichi field
indices (int, ti.Vector()): field indices
Returns:
ti.u64: The address of `f[indices]`.
"""
return Expr(_ti_core.expr_get_addr(f.snode.ptr, make_expr_group(indices)))
|
af1rst/bite-project
|
refs/heads/master
|
tools/bugs/server/appengine/models/bugs/bug.py
|
17
|
# Copyright 2010 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Model for BITE bug data."""
__author__ = ('alexto@google.com (Alexis O. Torres)'
'jason.stredwick@gmail.com (Jason Stredwick)')
import logging
from google.appengine.ext import db
from bugs import kind
from bugs.providers.provider import Provider
from common.util import class_attr
# Allowed bug states.
class State(object):
ACTIVE = 'active'
CLOSED = 'closed'
RESOLVED = 'resolved'
UNKNOWN = 'unknown'
class CreateError(Exception):
pass
class InvalidIdError(Exception):
pass
class UpdateError(Exception):
pass
class Bug(db.Model):
"""Models a Bug stored in AppEngine's Datastore.
This data may be a reduced form of the bug's details as stored in the
provider's database.
Attributes:
title: The bug's title.
state: The current state of the bug; i.e. resolved, closed, or active.
url: The url of the page the bug was filed against.
summary: The bug's summary.
added: When this instance of the bug was added to the BITE datastore.
modified: When this instance of the bug was last modified in the BITE
datastore.
provider: Source provider of the bug information.
bug_id: The ID of the bug within the provider's bug database.
status: Status of the bug (eg. active, fixed, closed) when it
was crawled.
author: The user who first reported this bug; from provider.
author_id: Identifies the user in the provider backend.
reported_on: The date the bug was first opened; from provider.
last_update: Date the bug was last updated; from provider.
last_updater: The last user to update the bug; from provider.
project: Name of the project this bug is associated with.
priority: The bug's priority.
details_link: A url/link to the bug in the provider's database.
has_target_element: Whether or not a target element is attached.
target_element: When specified, describes an element on the page the bug
is associated with.
has_screenshot: Whether a screenshot is attached.
screenshot: Url to an associated screenshot.
has_recording: True, if the bug has recorded script attached.
recording_link: Link to recorded script.
"""
# Bug Details
title = db.StringProperty(required=False)
state = db.StringProperty(required=False, default=State.UNKNOWN,
choices=(State.ACTIVE, State.RESOLVED,
State.CLOSED, State.UNKNOWN))
url = db.StringProperty(required=False, multiline=True, default='')
summary = db.TextProperty(required=False)
added = db.DateTimeProperty(required=False, auto_now_add=True)
modified = db.DateTimeProperty(required=False, auto_now=True)
# Provider Related Details
provider = db.StringProperty(required=False,
default=Provider.DATASTORE,
choices=(Provider.DATASTORE,
Provider.ISSUETRACKER))
bug_id = db.StringProperty(required=False)
status = db.StringProperty(required=False)
author = db.StringProperty(required=False)
author_id = db.StringProperty(required=False)
reported_on = db.StringProperty(required=False)
last_update = db.StringProperty(required=False)
last_updater = db.StringProperty(required=False)
project = db.StringProperty(required=False)
priority = db.StringProperty(required=False)
details_link = db.StringProperty(required=False)
# Attachments
has_target_element = db.BooleanProperty(required=False, default=False)
target_element = db.TextProperty(required=False, default='')
has_screenshot = db.BooleanProperty(required=False, default=False)
screenshot = db.TextProperty(required=False, default='')
has_recording = db.BooleanProperty(required=False, default=False)
recording_link = db.TextProperty(required=False, default='')
def Patch(self, obj):
"""Patch/update the model with data from the given object.
For each property in the model, check if that property exists in the given
data. If it exists then update the value for that property for the value
in the given data. All properties in the given data will be ignored if
that property does not exist in the model.
Properties ignored by the patcher (that also exist in the model):
added, modified, has_target_element, has_screenshot, has_recording
Args:
obj: The data to use to patch the model. (dict)
Raise:
db.Error: Raised if there is an error assigning the value from the given
data to model.
TypeError: Raised if the given object is not an object.
"""
# TODO (jason.stredwick): Change to
# auto_update_attr = class_attr.GetPODAttrs(Bug)
# once the special cases have been resolved.
special_props = ['target_element', 'has_target_element',
'screenshot', 'has_screenshot',
'recording_link', 'has_recording',
'added', 'modified']
props = self.properties().keys()
for key, value in obj.iteritems():
if key in props and key not in special_props:
setattr(self, key, value)
# Handle special case properties.
# Attachments
if 'target_element' in obj:
self.target_element = obj['target_element']
if obj['target_element']:
self.has_target_element = True
else:
self.has_target_element = False
if 'screenshot' in obj:
self.screenshot = obj['screenshot']
if obj['screenshot']:
self.has_screenshot = True
else:
self.has_screenshot = False
if 'recording_link' in obj:
self.recording_link = obj['recording_link']
if obj['recording_link']:
self.has_recording = True
else:
self.has_recording = False
self.__Verify()
def __Verify(self):
"""Determines if the bug is valid.
Raises:
db.Error: Raised if any bug property is invalid.
"""
if not self.title:
raise db.Error('Missing title; required.')
def Create(data):
"""Create a new bug entry.
Args:
data: An object used to create a new model. (dict)
Returns:
Return the newly created bug.
Raises:
CreateError: Raised if something goes wrong while creating a new bug.
"""
try:
bug = Bug()
bug.Patch(data)
bug.put()
except (TypeError, db.Error, AssertionError), e:
logging.error('bug.Create: Exception while creating bug: %s', e)
raise CreateError('Failed to create a new bug.\n%s\n' % e)
return bug
def Get(id):
"""Returns the bug model for the given id.
Args:
id: The id of the bug to retrieve. (integer)
Returns:
Returns the bug model. (Bug)
Raises:
InvalidIdError: Raised if the id does not match a stored bug.
"""
try:
bug = Bug.get_by_id(id)
if not bug:
raise InvalidIdError
except (db.Error, InvalidIdError), e:
logging.error('bug.Get: Exception while retrieving bug (%s): %s', id, e)
raise InvalidIdError('Bug not found [id=%s].%s' % (id, e))
return bug
def Update(bug, data):
"""Update the bug specified by the given id with the given data.
Args:
bug: The bug to update. (Bug)
data: An object used to update the model details. (dict)
Raises:
UpdateError: Raised if there was an error updating the bug.
"""
try:
bug.Patch(data)
bug.put()
except (TypeError, db.Error), e:
# tempdate is used to output the data into the log, but strip out the
# screenshot information due to size.
# TODO (jason.stredwick): Resolve how to store and access screenshots and
# remove tempdate once the screenshot data is no longer directly stored.
tempdata = data
if 'screenshot' in tempdata:
del tempdata['screenshot']
logging.error('bug.Update: Exception while updating bug (%s): %s. Given '
'data of %s', id, e, tempdata)
raise UpdateError('bug [id=%s] failed to update.\n%s\n' % (id, e))
|
KaiRo-at/socorro
|
refs/heads/master
|
webapp-django/crashstats/manage/tests/test_views.py
|
2
|
import json
import datetime
import os
import re
import urlparse
from django.core.urlresolvers import reverse
from django.contrib.auth.models import User, Group, Permission
from django.conf import settings
from django.contrib.contenttypes.models import ContentType
from django.utils import timezone
import mock
from nose.tools import eq_, ok_, assert_raises
from eventlog.models import Log
from crashstats.symbols.models import SymbolsUpload
from crashstats.tokens.models import Token
from crashstats.supersearch.models import (
SuperSearchFields,
SuperSearchMissingFields,
)
from crashstats.crashstats import models
from crashstats.crashstats.tests.test_views import (
BaseTestViews,
Response
)
class TestViews(BaseTestViews):
def setUp(self):
super(TestViews, self).setUp()
def mocked_product_versions(**params):
hits = [
{
'is_featured': True,
'throttle': 1.0,
'end_date': 'string',
'start_date': 'integer',
'build_type': 'string',
'product': 'WaterWolf',
'version': '19.0',
'has_builds': True
}
]
return {
'hits': hits,
'total': len(hits),
}
models.ProductVersions.implementation().get.side_effect = (
mocked_product_versions
)
# prime the cache
models.ProductVersions().get(active=True)
def _login(self, is_superuser=True):
self.user = User.objects.create_user('kairo', 'kai@ro.com', 'secret')
self.user.is_superuser = is_superuser
# django doesn't set this unless the user has actually signed in
self.user.last_login = datetime.datetime.utcnow()
self.user.save()
assert self.client.login(username='kairo', password='secret')
def _create_permission(self, name='Mess Around', codename='mess_around'):
ct, __ = ContentType.objects.get_or_create(
model='',
app_label='crashstats',
)
return Permission.objects.create(
name=name,
codename=codename,
content_type=ct
)
def test_home_page_not_signed_in(self):
home_url = reverse('manage:home')
response = self.client.get(home_url)
assert response.status_code == 302
# because the home also redirects to the first product page
# we can't use assertRedirects
eq_(
urlparse.urlparse(response['location']).path,
settings.LOGIN_URL
)
# if you're logged in, but not a superuser you'll get thrown
# back on the home page with a message
self._login(is_superuser=False)
response = self.client.get(home_url, follow=True)
assert response.status_code == 200
ok_('You need to be a superuser to access this' in response.content)
def test_home_page_signed_in(self):
self._login()
# at the moment it just redirects
home_url = reverse('manage:home')
response = self.client.get(home_url)
eq_(response.status_code, 200)
# certain links on that page
featured_versions_url = reverse('manage:featured_versions')
ok_(featured_versions_url in response.content)
fields_url = reverse('manage:fields')
ok_(fields_url in response.content)
users_url = reverse('manage:users')
ok_(users_url in response.content)
products_url = reverse('manage:products')
ok_(products_url in response.content)
releases_url = reverse('manage:releases')
ok_(releases_url in response.content)
@mock.patch('requests.put')
@mock.patch('requests.get')
def test_featured_versions(self, rget, rput):
self._login()
url = reverse('manage:featured_versions')
put_calls = [] # some mutable
def mocked_put(url, **options):
assert '/releases/featured' in url
data = options['data']
put_calls.append(data)
return Response("true")
rput.side_effect = mocked_put
def mocked_product_versions(**params):
today = datetime.date.today()
tomorrow = today + datetime.timedelta(days=1)
hits = [
{
'product': 'WaterWolf',
'is_featured': True,
'throttle': 90.0,
'end_date': tomorrow,
'product': 'WaterWolf',
'release': 'Nightly',
'version': '19.0.1',
'has_builds': True,
'start_date': today,
},
{
'product': 'WaterWolf',
'is_featured': False,
'throttle': 33.333,
'end_date': today,
'product': 'WaterWolf',
'release': 'Nightly',
'version': '18.0.1',
'has_builds': True,
'start_date': today,
},
]
return {
'hits': hits,
'total': len(hits),
}
models.ProductVersions.implementation().get.side_effect = (
mocked_product_versions
)
response = self.client.get(url)
eq_(response.status_code, 200)
ok_('19.0.1' in response.content)
ok_('18.0.1' in response.content)
# also, note how the percentages are written out
# (know thy fixtures)
ok_('90%' in response.content)
ok_('33.3%' in response.content)
input_regex = re.compile('<input .*?>', re.M | re.DOTALL)
checkboxes = [
x for x in
input_regex.findall(response.content)
if 'type="checkbox"' in x
]
eq_(len(checkboxes), 2)
checkboxes_by_value = dict(
(re.findall('value="(.*)"', x)[0], x)
for x in checkboxes
)
ok_('checked' in checkboxes_by_value['19.0.1'])
ok_('checked' not in checkboxes_by_value['18.0.1'])
# post in a change
update_url = reverse('manage:update_featured_versions')
response = self.client.post(update_url, {
'WaterWolf': '18.0.1'
})
eq_(response.status_code, 302)
put_call = put_calls[0]
eq_(put_call['WaterWolf'], '18.0.1')
# check that it got logged
event, = Log.objects.all()
eq_(event.user, self.user)
eq_(event.action, 'featured_versions.update')
eq_(event.extra['success'], True)
eq_(event.extra['data'], {'WaterWolf': ['18.0.1']})
def test_fields(self):
url = reverse('manage:fields')
response = self.client.get(url)
eq_(response.status_code, 302)
self._login()
response = self.client.get(url)
eq_(response.status_code, 200)
@mock.patch('requests.get')
def test_field_lookup(self, rget):
url = reverse('manage:field_lookup')
response = self.client.get(url)
eq_(response.status_code, 302)
self._login()
response = self.client.get(url)
# missing 'name' parameter
eq_(response.status_code, 400)
def mocked_get(url, params, **options):
assert '/field' in url
ok_('name' in params)
eq_('Android_Display', params['name'])
return Response("""
{
"name": "Android_Display",
"product": null,
"transforms": {
"1.X processed json": "",
"collector:raw json": "",
"data name": "Android_Display",
"database": "",
"mdsw pipe dump": "",
"pj transform": "",
"processed json 2012": "",
"processor transform": "",
"ted's mdsw json": ""
}
}
""")
rget.side_effect = mocked_get
response = self.client.get(url, {'name': 'Android_Display'})
eq_(response.status_code, 200)
data = json.loads(response.content)
eq_(data['product'], None)
eq_(len(data['transforms']), 9)
def test_skiplist_link(self):
self._login()
home_url = reverse('manage:home')
response = self.client.get(home_url)
assert response.status_code == 200
ok_(reverse('manage:skiplist') in response.content)
def test_skiplist_admin_page(self):
url = reverse('manage:skiplist')
response = self.client.get(url)
eq_(response.status_code, 302)
self._login()
response = self.client.get(url)
eq_(response.status_code, 200)
@mock.patch('requests.get')
def test_skiplist_data(self, rget):
self._login()
def mocked_get(url, params, **options):
assert '/skiplist' in url
if (
'category' in params and 'suffix' == params['category'] and
'rule' in params and 'Bar' == params['rule']
):
return Response("""
{
"hits": [
{"category": "suffix", "rule": "Bar"}
],
"total": 1
}
""")
elif 'category' in params and 'suffix' == params['category']:
return Response("""
{
"hits": [
{"category": "suffix", "rule": "Bar"},
{"category": "suffix", "rule": "Foo"}
],
"total": 2
}
""")
elif 'rule' in params and 'Bar' == params['rule']:
return Response("""
{
"hits": [
{"category": "prefix", "rule": "Bar"},
{"category": "suffix", "rule": "Bar"}
],
"total": 2
}
""")
else:
return Response("""
{
"hits": [
{"category": "prefix", "rule": "Bar"},
{"category": "prefix", "rule": "Foo"},
{"category": "suffix", "rule": "Bar"},
{"category": "suffix", "rule": "Foo"}
],
"total": 4
}
""")
rget.side_effect = mocked_get
url = reverse('manage:skiplist_data')
response = self.client.get(url)
eq_(response.status_code, 200)
data = json.loads(response.content)
expect = {
'hits': [
{'category': 'prefix', 'rule': 'Bar'},
{'category': 'prefix', 'rule': 'Foo'},
{'category': 'suffix', 'rule': 'Bar'},
{'category': 'suffix', 'rule': 'Foo'}
],
'total': 4
}
eq_(data, expect)
# filter by category
response = self.client.get(url, {'category': 'suffix'})
eq_(response.status_code, 200)
data = json.loads(response.content)
expect = {
'hits': [
{'category': 'suffix', 'rule': 'Bar'},
{'category': 'suffix', 'rule': 'Foo'}
],
'total': 2
}
eq_(data, expect)
# filter by rule
response = self.client.get(url, {'rule': 'Bar'})
eq_(response.status_code, 200)
data = json.loads(response.content)
expect = {
'hits': [
{'category': 'prefix', 'rule': 'Bar'},
{'category': 'suffix', 'rule': 'Bar'},
],
'total': 2
}
eq_(data, expect)
# filter by rule and category
response = self.client.get(
url,
{'rule': 'Bar', 'category': 'suffix'}
)
eq_(response.status_code, 200)
data = json.loads(response.content)
expect = {
'hits': [
{'category': 'suffix', 'rule': 'Bar'},
],
'total': 1
}
eq_(data, expect)
@mock.patch('requests.post')
def test_skiplist_add(self, rpost):
def mocked_post(url, **options):
assert '/skiplist' in url, url
ok_(options['data'].get('category'))
ok_(options['data'].get('rule'))
return Response("true")
rpost.side_effect = mocked_post
self._login()
url = reverse('manage:skiplist_add')
# neither
response = self.client.post(url)
eq_(response.status_code, 400)
# only category
response = self.client.post(url, {'category': 'suffix'})
eq_(response.status_code, 400)
# only rule
response = self.client.post(url, {'rule': 'Foo'})
eq_(response.status_code, 400)
response = self.client.post(
url,
{'rule': 'Foo', 'category': 'suffix'}
)
eq_(response.status_code, 200)
eq_(json.loads(response.content), True)
# check that it got logged
event, = Log.objects.all()
eq_(event.user, self.user)
eq_(event.action, 'skiplist.add')
eq_(event.extra['success'], True)
eq_(event.extra['data'], {
'category': 'suffix',
'rule': 'Foo'
})
@mock.patch('requests.delete')
def test_skiplist_delete(self, rdelete):
def mocked_delete(url, params, **options):
assert '/skiplist' in url, url
ok_('category' in params)
eq_('suffix', params['category'])
ok_('rule' in params)
eq_('Foo', params['rule'])
return Response("true")
rdelete.side_effect = mocked_delete
self._login()
url = reverse('manage:skiplist_delete')
# neither
response = self.client.post(url)
eq_(response.status_code, 400)
# only category
response = self.client.post(url, {'category': 'suffix'})
eq_(response.status_code, 400)
# only rule
response = self.client.post(url, {'rule': 'Foo'})
eq_(response.status_code, 400)
response = self.client.post(
url,
{'rule': 'Foo', 'category': 'suffix'}
)
eq_(response.status_code, 200)
eq_(json.loads(response.content), True)
# check that it got logged
event, = Log.objects.all()
eq_(event.user, self.user)
eq_(event.action, 'skiplist.delete')
eq_(event.extra['success'], True)
eq_(event.extra['data'], {
'category': 'suffix',
'rule': 'Foo'
})
def test_users_page(self):
url = reverse('manage:users')
response = self.client.get(url)
eq_(response.status_code, 302)
self._login()
response = self.client.get(url)
eq_(response.status_code, 200)
Group.objects.create(name='Wackos')
response = self.client.get(url)
eq_(response.status_code, 200)
ok_('Wackos' in response.content)
def test_users_data(self):
url = reverse('manage:users_data')
response = self.client.get(url)
eq_(response.status_code, 302)
self._login()
response = self.client.get(url)
eq_(response.status_code, 200)
data = json.loads(response.content)
eq_(data['count'], 1)
eq_(data['users'][0]['email'], self.user.email)
eq_(data['users'][0]['id'], self.user.pk)
eq_(data['users'][0]['is_superuser'], True)
eq_(data['users'][0]['is_active'], True)
eq_(data['users'][0]['groups'], [])
austrians = Group.objects.create(name='Austrians')
self.user.groups.add(austrians)
response = self.client.get(url)
eq_(response.status_code, 200)
data = json.loads(response.content)
groups = data['users'][0]['groups']
group = groups[0]
eq_(group['name'], 'Austrians')
eq_(group['id'], austrians.pk)
def test_users_data_pagination(self):
url = reverse('manage:users_data')
response = self.client.get(url)
eq_(response.status_code, 302)
self._login()
self.user.last_login -= datetime.timedelta(days=365)
self.user.save()
now = timezone.now()
for i in range(1, 101): # 100 times, 1-100
User.objects.create(
username='user%03d' % i,
email='user%03d@mozilla.com' % i,
last_login=now - datetime.timedelta(days=i)
)
response = self.client.get(url)
eq_(response.status_code, 200)
data = json.loads(response.content)
eq_(data['count'], 101)
# because it's sorted by last_login
eq_(data['users'][0]['email'], 'user001@mozilla.com')
eq_(len(data['users']), settings.USERS_ADMIN_BATCH_SIZE)
eq_(data['page'], 1)
eq_(data['batch_size'], settings.USERS_ADMIN_BATCH_SIZE)
# let's go to page 2
response = self.client.get(url, {'page': 2})
eq_(response.status_code, 200)
data = json.loads(response.content)
eq_(data['count'], 101)
# because it's sorted by last_login
eq_(data['users'][0]['email'], 'user011@mozilla.com')
eq_(len(data['users']), settings.USERS_ADMIN_BATCH_SIZE)
eq_(data['page'], 2)
eq_(data['batch_size'], settings.USERS_ADMIN_BATCH_SIZE)
response = self.client.get(url, {'page': 11})
eq_(response.status_code, 200)
data = json.loads(response.content)
eq_(data['count'], 101)
# because it's sorted by last_login
eq_(data['users'][0]['email'], self.user.email)
eq_(len(data['users']), 1)
eq_(data['page'], 11)
eq_(data['batch_size'], settings.USERS_ADMIN_BATCH_SIZE)
def test_users_data_pagination_bad_request(self):
url = reverse('manage:users_data')
self._login()
response = self.client.get(url)
eq_(response.status_code, 200)
response = self.client.get(url, {'page': 0})
eq_(response.status_code, 400)
response = self.client.get(url, {'page': -1})
eq_(response.status_code, 400)
response = self.client.get(url, {'page': 'NaN'})
eq_(response.status_code, 400)
def test_users_data_filter(self):
url = reverse('manage:users_data')
self._login()
group_a = Group.objects.create(name='Group A')
group_b = Group.objects.create(name='Group B')
def create_user(username, **kwargs):
return User.objects.create(
username=username,
email=username + '@example.com',
last_login=datetime.datetime.utcnow(),
**kwargs
)
bob = create_user('bob')
bob.groups.add(group_a)
dick = create_user('dick')
dick.groups.add(group_b)
harry = create_user('harry')
harry.groups.add(group_b)
harry.groups.add(group_b)
create_user('bill', is_active=False)
# filter by email
response = self.client.get(url, {'email': 'b'})
eq_(response.status_code, 200)
data = json.loads(response.content)
eq_(data['count'], 2)
eq_(
['bill@example.com', 'bob@example.com'],
[x['email'] for x in data['users']]
)
# filter by email and group
response = self.client.get(url, {
'email': 'b',
'group': group_a.pk
})
eq_(response.status_code, 200)
data = json.loads(response.content)
eq_(data['count'], 1)
eq_(
['bob@example.com'],
[x['email'] for x in data['users']]
)
# filter by active and superuser
response = self.client.get(url, {
'active': '1',
'superuser': '-1'
})
eq_(response.status_code, 200)
data = json.loads(response.content)
eq_(data['count'], 3)
eq_(
['harry@example.com', 'dick@example.com', 'bob@example.com'],
[x['email'] for x in data['users']]
)
# don't send in junk
response = self.client.get(url, {
'group': 'xxx',
})
eq_(response.status_code, 400)
def test_edit_user(self):
group_a = Group.objects.create(name='Group A')
group_b = Group.objects.create(name='Group B')
bob = User.objects.create(
username='bob',
email='bob@example.com',
is_active=False,
is_superuser=True
)
bob.groups.add(group_a)
url = reverse('manage:user', args=(bob.pk,))
response = self.client.get(url)
eq_(response.status_code, 302)
self._login()
response = self.client.get(url)
eq_(response.status_code, 200)
ok_('bob@example.com' in response.content)
response = self.client.post(url, {
'groups': group_b.pk,
'is_active': 'true',
'is_superuser': ''
})
eq_(response.status_code, 302)
# reload from database
bob = User.objects.get(pk=bob.pk)
ok_(bob.is_active)
ok_(not bob.is_superuser)
eq_(list(bob.groups.all()), [group_b])
# check that the event got logged
event, = Log.objects.all()
eq_(event.user, self.user)
eq_(event.action, 'user.edit')
eq_(event.extra['id'], bob.pk)
change = event.extra['change']
eq_(change['is_superuser'], [True, False])
eq_(change['is_active'], [False, True])
eq_(change['groups'], [['Group A'], ['Group B']])
def test_groups(self):
url = reverse('manage:groups')
response = self.client.get(url)
eq_(response.status_code, 302)
self._login()
response = self.client.get(url)
eq_(response.status_code, 200)
wackos = Group.objects.create(name='Wackos')
# Attach a known permission to it
permission = self._create_permission()
wackos.permissions.add(permission)
response = self.client.get(url)
eq_(response.status_code, 200)
ok_('Wackos' in response.content)
ok_('Mess Around' in response.content)
def test_group(self):
url = reverse('manage:groups')
self._login()
ct = ContentType.objects.create(
model='',
app_label='crashstats.crashstats',
)
p1 = Permission.objects.create(
name='Mess Around',
codename='mess_around',
content_type=ct
)
p2 = Permission.objects.create(
name='Launch Missiles',
codename='launch_missiles',
content_type=ct
)
response = self.client.get(url)
eq_(response.status_code, 200)
ok_(p1.name in response.content)
ok_(p2.name in response.content)
data = {
'name': 'New Group',
'permissions': [p2.id]
}
response = self.client.post(url, data)
eq_(response.status_code, 302)
group = Group.objects.get(name=data['name'])
eq_(list(group.permissions.all()), [p2])
# check that it got logged
event, = Log.objects.all()
eq_(event.user, self.user)
eq_(event.action, 'group.add')
eq_(event.extra, {
'id': group.id,
'name': 'New Group',
'permissions': ['Launch Missiles']
})
# edit it
edit_url = reverse('manage:group', args=(group.pk,))
response = self.client.get(edit_url)
eq_(response.status_code, 200)
data = {
'name': 'New New Group',
'permissions': [p1.id]
}
response = self.client.post(edit_url, data)
eq_(response.status_code, 302)
group = Group.objects.get(name=data['name'])
eq_(list(group.permissions.all()), [p1])
event, = Log.objects.all()[:1]
eq_(event.user, self.user)
eq_(event.action, 'group.edit')
eq_(event.extra['change']['name'], ['New Group', 'New New Group'])
eq_(event.extra['change']['permissions'], [
['Launch Missiles'],
['Mess Around']
])
# delete it
response = self.client.post(url, {'delete': group.pk})
eq_(response.status_code, 302)
ok_(not Group.objects.filter(name=data['name']))
event, = Log.objects.all()[:1]
eq_(event.user, self.user)
eq_(event.action, 'group.delete')
eq_(event.extra['name'], data['name'])
def test_analyze_model_fetches(self):
self._login()
url = reverse('manage:analyze_model_fetches')
response = self.client.get(url)
eq_(response.status_code, 200)
def test_render_graphics_devices_page(self):
url = reverse('manage:graphics_devices')
response = self.client.get(url)
eq_(response.status_code, 302)
self._login()
response = self.client.get(url)
eq_(response.status_code, 200)
def test_graphics_devices_lookup(self):
self._login()
url = reverse('manage:graphics_devices_lookup')
def mocked_get(**params):
if (
'adapter_hex' in params and
params['adapter_hex'] == 'xyz123' and
'vendor_hex' in params and
params['vendor_hex'] == 'abc123'
):
return {
"hits": [
{
"vendor_hex": "abc123",
"adapter_hex": "xyz123",
"vendor_name": "Logictech",
"adapter_name": "Webcamera"
}
],
"total": 1
}
raise NotImplementedError(url)
models.GraphicsDevices.implementation().get.side_effect = (
mocked_get
)
response = self.client.get(url)
eq_(response.status_code, 400)
response = self.client.get(url, {
'vendor_hex': 'abc123',
'adapter_hex': 'xyz123',
})
eq_(response.status_code, 200)
content = json.loads(response.content)
eq_(content['total'], 1)
eq_(
content['hits'][0],
{
'vendor_hex': 'abc123',
'adapter_hex': 'xyz123',
'vendor_name': 'Logictech',
'adapter_name': 'Webcamera'
}
)
def test_graphics_devices_edit(self):
self._login()
url = reverse('manage:graphics_devices')
def mocked_post(**payload):
data = payload['data']
eq_(
data[0],
{
'vendor_hex': 'abc123',
'adapter_hex': 'xyz123',
'vendor_name': 'Logictech',
'adapter_name': 'Webcamera'
}
)
return True
models.GraphicsDevices.implementation().post.side_effect = (
mocked_post
)
data = {
'vendor_hex': 'abc123',
'adapter_hex': 'xyz123',
'vendor_name': 'Logictech',
'adapter_name': 'Webcamera'
}
response = self.client.post(url, data)
eq_(response.status_code, 302)
ok_(url in response['location'])
event, = Log.objects.all()
eq_(event.user, self.user)
eq_(event.action, 'graphicsdevices.add')
eq_(event.extra['payload'], [data])
eq_(event.extra['success'], True)
def test_graphics_devices_csv_upload_pcidatabase_com(self):
self._login()
url = reverse('manage:graphics_devices')
def mocked_post(**payload):
data = payload['data']
eq_(
data[0],
{
'vendor_hex': '0x0033',
'adapter_hex': '0x002f',
'vendor_name': 'Paradyne Corp.',
'adapter_name': '.43 ieee 1394 controller'
}
)
eq_(len(data), 7)
return True
models.GraphicsDevices.implementation().post.side_effect = (
mocked_post
)
sample_file = os.path.join(
os.path.dirname(__file__),
'sample-graphics.csv'
)
with open(sample_file) as fp:
response = self.client.post(url, {
'file': fp,
'database': 'pcidatabase.com',
})
eq_(response.status_code, 302)
ok_(url in response['location'])
event, = Log.objects.all()
eq_(event.user, self.user)
eq_(event.action, 'graphicsdevices.post')
eq_(event.extra['success'], True)
eq_(event.extra['database'], 'pcidatabase.com')
eq_(event.extra['no_lines'], 7)
def test_graphics_devices_csv_upload_pci_ids(self):
self._login()
url = reverse('manage:graphics_devices')
def mocked_post(**payload):
data = payload['data']
eq_(
data[0],
{
'vendor_hex': '0x0010',
'adapter_hex': '0x8139',
'vendor_name': 'Allied Telesis, Inc',
'adapter_name': 'AT-2500TX V3 Ethernet'
}
)
eq_(len(data), 6)
return True
models.GraphicsDevices.implementation().post.side_effect = (
mocked_post
)
sample_file = os.path.join(
os.path.dirname(__file__),
'sample-pci.ids'
)
with open(sample_file) as fp:
response = self.client.post(url, {
'file': fp,
'database': 'pci.ids',
})
eq_(response.status_code, 302)
ok_(url in response['location'])
event, = Log.objects.all()
eq_(event.user, self.user)
eq_(event.action, 'graphicsdevices.post')
eq_(event.extra['success'], True)
eq_(event.extra['database'], 'pci.ids')
eq_(event.extra['no_lines'], 6)
def test_symbols_uploads(self):
url = reverse('manage:symbols_uploads')
response = self.client.get(url)
eq_(response.status_code, 302)
self._login()
response = self.client.get(url)
eq_(response.status_code, 200)
def test_supersearch_fields(self):
self._login()
url = reverse('manage:supersearch_fields')
def mocked_supersearchfields_get_fields(**params):
return {
'signature': {
'name': 'signature',
'namespace': 'processed_crash',
'in_database_name': 'signature',
'query_type': 'string',
'form_field_choices': None,
'permissions_needed': [],
'default_value': None,
'is_exposed': True,
'is_returned': True,
'is_mandatory': False,
},
'product': {
'name': 'product',
'namespace': 'processed_crash',
'in_database_name': 'product',
'query_type': 'enum',
'form_field_choices': None,
'permissions_needed': [],
'default_value': None,
'is_exposed': True,
'is_returned': True,
'is_mandatory': False,
}
}
SuperSearchFields.implementation().get.side_effect = (
mocked_supersearchfields_get_fields
)
response = self.client.get(url)
eq_(response.status_code, 200)
ok_('signature' in response.content)
ok_('string' in response.content)
ok_('product' in response.content)
ok_('enum' in response.content)
def test_supersearch_fields_missing(self):
self._login()
url = reverse('manage:supersearch_fields_missing')
def mocked_supersearchfields_get_fields(**params):
return {
'product': {
'name': 'product',
'namespace': 'processed_crash',
'in_database_name': 'product',
'query_type': 'enum',
'form_field_choices': None,
'permissions_needed': [],
'default_value': None,
'is_exposed': True,
'is_returned': True,
'is_mandatory': False,
}
}
def mocked_supersearchfields_get_missing_fields(**params):
return {
'hits': [
'field_a',
'namespace1.field_b',
'namespace2.subspace1.field_c',
],
'total': 3
}
SuperSearchFields.implementation().get.side_effect = (
mocked_supersearchfields_get_fields
)
SuperSearchMissingFields.implementation().get.side_effect = (
mocked_supersearchfields_get_missing_fields
)
response = self.client.get(url)
eq_(response.status_code, 200)
ok_('field_a' in response.content)
ok_('namespace1.field_b' in response.content)
ok_('namespace2.subspace1.field_c' in response.content)
def test_supersearch_field(self):
self._login()
url = reverse('manage:supersearch_field')
def mocked_supersearchfields_get_fields(**params):
return {
'signature': {
'name': 'signature',
'namespace': 'processed_crash',
'in_database_name': 'signature',
'query_type': 'string',
'form_field_choices': None,
'permissions_needed': [],
'default_value': None,
'is_exposed': True,
'is_returned': True,
'is_mandatory': False,
},
'platform': {
'name': 'platform',
'namespace': 'processed_crash',
'in_database_name': 'platform',
'query_type': 'enum',
'form_field_choices': None,
'permissions_needed': [],
'default_value': None,
'is_exposed': True,
'is_returned': True,
'is_mandatory': False,
}
}
SuperSearchFields.implementation().get.side_effect = (
mocked_supersearchfields_get_fields
)
# Test when creating a new field.
response = self.client.get(url)
eq_(response.status_code, 200)
ok_('signature' not in response.content)
ok_('platform' not in response.content)
# Test when creating a new field with some default values.
response = self.client.get(
url + '?full_name=namespace.subspace.field_z'
)
eq_(response.status_code, 200)
ok_('field_z' in response.content)
ok_('namespace.subspace' in response.content)
ok_('namespace.subspace.field_z' not in response.content)
# Test when editing an existing field.
response = self.client.get(url, {'name': 'signature'})
eq_(response.status_code, 200)
ok_('signature' in response.content)
ok_('string' in response.content)
ok_('platform' not in response.content)
# Test a missing field.
response = self.client.get(url, {'name': 'unknown'})
eq_(response.status_code, 400)
def test_supersearch_field_create(self):
self._login()
url = reverse('manage:supersearch_field_create')
def mocked_supersearchfields_get_fields(**params):
return {}
def mocked_supersearchfields_create_field(**params):
assert 'name' in params
assert 'in_database_name' in params
return True
SuperSearchFields.implementation().get.side_effect = (
mocked_supersearchfields_get_fields
)
SuperSearchFields.implementation().create_field.side_effect = (
mocked_supersearchfields_create_field
)
response = self.client.post(
url,
{
'name': 'something',
'in_database_name': 'something',
}
)
eq_(response.status_code, 302)
event, = Log.objects.all()
eq_(event.user, self.user)
eq_(event.action, 'supersearch_field.post')
eq_(event.extra['name'], 'something')
response = self.client.post(url)
eq_(response.status_code, 400)
response = self.client.post(url, {'name': 'abcd'})
eq_(response.status_code, 400)
response = self.client.post(url, {'in_database_name': 'bar'})
eq_(response.status_code, 400)
def test_supersearch_field_update(self):
self._login()
url = reverse('manage:supersearch_field_update')
# Create a permission to test permission validation.
ct = ContentType.objects.create(
model='',
app_label='crashstats.crashstats',
)
Permission.objects.create(
name='I can haz permission!',
codename='i.can.haz.permission',
content_type=ct
)
def mocked_supersearchfields_get_fields(**params):
return {}
def mocked_supersearchfields_update_field(**data):
ok_('name' in data)
ok_('description' in data)
ok_('is_returned' in data)
ok_('form_field_choices' in data)
ok_('permissions_needed' in data)
ok_(not data['is_returned'])
ok_('' not in data['form_field_choices'])
eq_(
data['permissions_needed'],
['crashstats.i.can.haz.permission']
)
return True
SuperSearchFields.implementation().get.side_effect = (
mocked_supersearchfields_get_fields
)
SuperSearchFields.implementation().update_field.side_effect = (
mocked_supersearchfields_update_field
)
response = self.client.post(
url,
{
'name': 'something',
'in_database_name': 'something',
'description': 'hello world',
'is_returned': False,
'form_field_choices': ['', 'a choice', 'another choice'],
'permissions_needed': ['', 'crashstats.i.can.haz.permission'],
}
)
eq_(response.status_code, 302)
event, = Log.objects.all()
eq_(event.user, self.user)
eq_(event.action, 'supersearch_field.put')
eq_(event.extra['name'], 'something')
response = self.client.post(url)
eq_(response.status_code, 400)
response = self.client.post(url, {'name': 'foo'})
eq_(response.status_code, 400)
response = self.client.post(url, {'in_database_name': 'bar'})
eq_(response.status_code, 400)
def test_supersearch_field_delete(self):
self._login()
url = reverse('manage:supersearch_field_delete')
def mocked_supersearchfields_get_fields(**params):
return {}
def mocked_supersearchfields_delete_field(**params):
assert 'name' in params
return True
SuperSearchFields.implementation().get.side_effect = (
mocked_supersearchfields_get_fields
)
SuperSearchFields.implementation().delete_field.side_effect = (
mocked_supersearchfields_delete_field
)
response = self.client.get(url, {'name': 'signature'})
eq_(response.status_code, 302)
event, = Log.objects.all()
eq_(event.user, self.user)
eq_(event.action, 'supersearch_field.delete')
eq_(event.extra['name'], 'signature')
response = self.client.get(url)
eq_(response.status_code, 400)
def test_create_product(self):
def mocked_post(**options):
eq_(options['product'], 'WaterCat')
eq_(options['version'], '1.0')
return True
models.ProductVersions.implementation().post.side_effect = (
mocked_post
)
self._login()
url = reverse('manage:products')
response = self.client.get(url)
eq_(response.status_code, 200)
ok_('value="1.0"' in response.content)
# first attempt to create an existing combo
response = self.client.post(url, {
'product': 'WaterWolf',
'initial_version': '1.0'
})
eq_(response.status_code, 200)
ok_('WaterWolf already exists' in response.content)
# now with a new unique product
response = self.client.post(url, {
'product': 'WaterCat',
'initial_version': '1.0'
})
eq_(response.status_code, 302)
event, = Log.objects.all()
eq_(event.user, self.user)
eq_(event.action, 'product.add')
eq_(event.extra['product'], 'WaterCat')
@mock.patch('requests.post')
def test_create_release(self, rpost):
def mocked_post(url, **options):
assert '/releases/release/' in url, url
data = options['data']
eq_(data['product'], 'WaterCat')
eq_(data['version'], '19.0')
eq_(data['beta_number'], 1)
eq_(data['throttle'], 0)
return Response(True)
rpost.side_effect = mocked_post
self._login()
url = reverse('manage:releases')
response = self.client.get(url)
eq_(response.status_code, 200)
# there should be a dropdown with some known platforms
ok_('value="Windows"' in response.content)
ok_('value="Mac OS X"' in response.content)
# first attempt to create with a product version that doesn't exist
now = datetime.datetime.utcnow()
data = {
'product': 'WaterCat',
'version': '99.9',
'update_channel': 'beta',
'build_id': now.strftime('%Y%m%d%H%M'),
'platform': 'Windows',
'beta_number': '0',
'release_channel': 'Beta',
'throttle': '1'
}
# set some bad values that won't pass validation
data['throttle'] = 'xxx'
data['beta_number'] = 'yyy'
data['version'] = '19.0'
data['build_id'] = 'XX'
response = self.client.post(url, data)
eq_(response.status_code, 200)
ok_('Must start with YYYYMMDD' in response.content)
eq_(response.content.count('not a number'), 2)
data['build_id'] = '20140101XXXXX'
response = self.client.post(url, data)
eq_(response.status_code, 200)
ok_('Date older than 30 days' in response.content)
# finally, all with good parameters
data['beta_number'] = '1'
data['throttle'] = '0'
data['build_id'] = now.strftime('%Y%m%d%H%M')
response = self.client.post(url, data)
eq_(response.status_code, 302)
event, = Log.objects.all()
eq_(event.user, self.user)
eq_(event.action, 'release.add')
eq_(event.extra['product'], 'WaterCat')
@mock.patch('requests.post')
def test_create_release_with_null_beta_number(self, rpost):
mock_calls = []
def mocked_post(url, **options):
assert '/releases/release/' in url, url
mock_calls.append(url)
data = options['data']
eq_(data['beta_number'], None)
return Response(True)
rpost.side_effect = mocked_post
self._login()
now = datetime.datetime.utcnow()
data = {
'product': 'WaterWolf',
'version': '99.9',
'update_channel': 'beta',
'build_id': now.strftime('%Y%m%d%H%M'),
'platform': 'Windows',
'beta_number': ' ',
'release_channel': 'Beta',
'throttle': '1'
}
url = reverse('manage:releases')
response = self.client.post(url, data)
eq_(response.status_code, 302)
# make sure it really called the POST to /releases/release/
ok_(mock_calls)
def test_view_events_page(self):
url = reverse('manage:events')
response = self.client.get(url)
eq_(response.status_code, 302)
self._login()
# this page will iterate over all unique possible Log actions
Log.objects.create(
user=self.user,
action='actionA'
)
Log.objects.create(
user=self.user,
action='actionB'
)
Log.objects.create(
user=self.user,
action='actionA'
)
response = self.client.get(url)
eq_(response.status_code, 200)
# for the action filter drop-downs
eq_(response.content.count('value="actionA"'), 1)
eq_(response.content.count('value="actionB"'), 1)
def test_events_data(self):
url = reverse('manage:events_data')
response = self.client.get(url)
eq_(response.status_code, 302)
self._login()
Log.objects.create(
user=self.user,
action='actionA',
extra={'foo': True}
)
other_user = User.objects.create(
username='other',
email='other@email.com'
)
Log.objects.create(
user=other_user,
action='actionB',
extra={'bar': False}
)
third_user = User.objects.create(
username='third',
email='third@user.com',
)
now = timezone.now()
for i in range(settings.EVENTS_ADMIN_BATCH_SIZE * 2):
Log.objects.create(
user=third_user,
action='actionX',
timestamp=now - datetime.timedelta(
seconds=i + 1
)
)
response = self.client.get(url)
eq_(response.status_code, 200)
data = json.loads(response.content)
eq_(data['count'], 2 + settings.EVENTS_ADMIN_BATCH_SIZE * 2)
# the most recent should be "actionB"
eq_(len(data['events']), settings.EVENTS_ADMIN_BATCH_SIZE)
first = data['events'][0]
eq_(first['action'], 'actionB')
eq_(first['extra'], {'bar': False})
# try to go to another page
response = self.client.get(url, {'page': 'xxx'})
eq_(response.status_code, 400)
response = self.client.get(url, {'page': '0'})
eq_(response.status_code, 400)
response = self.client.get(url, {'page': '2'})
eq_(response.status_code, 200)
data = json.loads(response.content)
first = data['events'][0]
# we should now be on one of the actionX events
eq_(first['action'], 'actionX')
# we can filter by user
response = self.client.get(url, {'user': 'other'})
eq_(response.status_code, 200)
data = json.loads(response.content)
eq_(data['count'], 1)
# we can filter by action
response = self.client.get(url, {'action': 'actionX'})
eq_(response.status_code, 200)
data = json.loads(response.content)
eq_(data['count'], settings.EVENTS_ADMIN_BATCH_SIZE * 2)
def test_events_data_urls(self):
"""some logged events have a URL associated with them"""
self._login()
Log.objects.create(
user=self.user,
action='user.edit',
extra={'id': self.user.id}
)
group = Group.objects.create(name='Wackos')
Log.objects.create(
user=self.user,
action='group.add',
extra={'id': group.id}
)
Log.objects.create(
user=self.user,
action='group.edit',
extra={'id': group.id}
)
Log.objects.create(
user=self.user,
action='supersearch_field.post',
extra={'name': 'sig1'}
)
Log.objects.create(
user=self.user,
action='supersearch_field.put',
extra={'name': 'sig2'}
)
url = reverse('manage:events_data')
response = self.client.get(url)
data = json.loads(response.content)
eq_(data['count'], 5)
five, four, three, two, one = data['events']
eq_(one['url'], reverse('manage:user', args=(self.user.id,)))
eq_(two['url'], reverse('manage:group', args=(group.id,)))
eq_(three['url'], reverse('manage:group', args=(group.id,)))
eq_(four['url'], reverse('manage:supersearch_field') + '?name=sig1')
eq_(five['url'], reverse('manage:supersearch_field') + '?name=sig2')
def test_api_tokens(self):
permission = self._create_permission()
url = reverse('manage:api_tokens')
response = self.client.get(url)
# because we're not logged in
eq_(response.status_code, 302)
self._login()
response = self.client.get(url)
eq_(response.status_code, 200)
# expects some permissions to be available as dropdowns
ok_(
'<option value="%s">%s</option>' % (
permission.id,
permission.name
) in response.content
)
def test_create_api_token(self):
self._login()
user = User.objects.create_user(
'user',
'user@example.com',
'secret'
)
permission = self._create_permission()
# the user must belong to a group that has this permission
wackos = Group.objects.create(name='Wackos')
wackos.permissions.add(permission)
user.groups.add(wackos)
assert user.has_perm('crashstats.' + permission.codename)
url = reverse('manage:api_tokens')
response = self.client.post(url, {
'user': user.email.upper(),
'permissions': [permission.id],
'notes': 'Some notes',
'expires': 7
})
eq_(response.status_code, 302)
token = Token.objects.get(
user=user,
notes='Some notes',
)
eq_(list(token.permissions.all()), [permission])
lasting = (timezone.now() - token.expires).days * -1
eq_(lasting, 7)
event, = Log.objects.all()
eq_(event.user, self.user)
eq_(event.action, 'api_token.create')
eq_(event.extra['notes'], 'Some notes')
ok_(event.extra['expires'])
eq_(event.extra['expires_days'], 7)
eq_(event.extra['permissions'], permission.name)
def test_create_api_token_rejected(self):
self._login()
user = User.objects.create_user(
'koala',
'koala@example.com',
'secret'
)
permission = self._create_permission()
url = reverse('manage:api_tokens')
response = self.client.post(url, {
'user': 'xxx',
'permissions': [permission.id],
'notes': '',
'expires': 7
})
eq_(response.status_code, 200)
ok_('No user found by that email address' in response.content)
response = self.client.post(url, {
'user': 'k', # there will be two users whose email starts with k
'permissions': [permission.id],
'notes': '',
'expires': 7
})
eq_(response.status_code, 200)
ok_(
'More than one user found by that email address'
in response.content
)
response = self.client.post(url, {
'user': 'koala@example',
'permissions': [permission.id],
'notes': '',
'expires': 7
})
eq_(response.status_code, 200)
ok_(
'koala@example.com does not have the permission '
'"Mess Around"'
in response.content
)
ok_(
'koala@example.com has no permissions!'
in response.content
)
# suppose the user has some other permission, only
permission2 = self._create_permission(
'Do Things',
'do_things'
)
group = Group.objects.create(name='Noobs')
group.permissions.add(permission2)
user.groups.add(group)
assert user.has_perm('crashstats.do_things')
response = self.client.post(url, {
'user': 'koala@example',
'permissions': [permission.id],
'notes': '',
'expires': 7
})
eq_(response.status_code, 200)
ok_(
'koala@example.com does not have the permission '
'"Mess Around"'
in response.content
)
ok_(
'Only permissions possible are: Do Things'
in response.content
)
# you can't create a token for an inactive user
user.is_active = False
user.save()
response = self.client.post(url, {
'user': 'koala',
'permissions': [permission.id],
'notes': '',
'expires': 7
})
eq_(response.status_code, 200)
ok_(
'koala@example.com is not an active user'
in response.content
)
def test_api_tokens_data(self):
url = reverse('manage:api_tokens_data')
response = self.client.get(url)
eq_(response.status_code, 302)
self._login()
response = self.client.get(url)
eq_(response.status_code, 200)
result = json.loads(response.content)
eq_(result['tokens'], [])
eq_(result['count'], 0)
eq_(result['page'], 1)
eq_(result['batch_size'], settings.API_TOKENS_ADMIN_BATCH_SIZE)
expires = timezone.now()
expires += datetime.timedelta(
days=settings.TOKENS_DEFAULT_EXPIRATION_DAYS
)
token = Token.objects.create(
user=self.user,
notes='Some notes',
expires=expires
)
assert token.key # automatically generated
permission = self._create_permission()
token.permissions.add(permission)
response = self.client.get(url)
eq_(response.status_code, 200)
result = json.loads(response.content)
expected_token = {
'created': token.created.isoformat(),
'notes': 'Some notes',
'expires': expires.isoformat(),
'id': token.id,
'expired': False,
'permissions': [permission.name],
'user': self.user.email,
'key': token.key,
}
eq_(result['tokens'], [expected_token])
eq_(result['count'], 1)
# mess with the page parameter
response = self.client.get(url, {'page': '0'})
eq_(response.status_code, 400)
response = self.client.get(url, {'expired': 'junk'})
eq_(response.status_code, 400)
# filter by email
response = self.client.get(url, {'email': self.user.email[:5]})
eq_(response.status_code, 200)
result = json.loads(response.content)
eq_(result['tokens'], [expected_token])
eq_(result['count'], 1)
response = self.client.get(url, {'user': 'junk'})
eq_(response.status_code, 200)
result = json.loads(response.content)
eq_(result['tokens'], [])
eq_(result['count'], 0)
# filter by key
response = self.client.get(url, {'key': token.key[:5]})
eq_(response.status_code, 200)
result = json.loads(response.content)
eq_(result['tokens'], [expected_token])
eq_(result['count'], 1)
response = self.client.get(url, {'key': 'junk'})
eq_(response.status_code, 200)
result = json.loads(response.content)
eq_(result['tokens'], [])
eq_(result['count'], 0)
# filter by expired
response = self.client.get(url, {'expired': 'no'})
eq_(response.status_code, 200)
result = json.loads(response.content)
eq_(result['tokens'], [expected_token])
eq_(result['count'], 1)
response = self.client.get(url, {'expired': 'yes'})
eq_(response.status_code, 200)
result = json.loads(response.content)
eq_(result['tokens'], [])
eq_(result['count'], 0)
token.expires = timezone.now() - datetime.timedelta(days=1)
token.save()
response = self.client.get(url, {'expired': 'yes'})
eq_(response.status_code, 200)
result = json.loads(response.content)
expected_token['expires'] = token.expires.isoformat()
expected_token['expired'] = True
eq_(result['tokens'], [expected_token])
eq_(result['count'], 1)
def test_api_tokens_delete(self):
url = reverse('manage:api_tokens_delete')
response = self.client.get(url)
eq_(response.status_code, 405)
response = self.client.post(url)
eq_(response.status_code, 302)
self._login()
response = self.client.post(url)
eq_(response.status_code, 400)
response = self.client.post(url, {'id': '99999'})
eq_(response.status_code, 404)
expires = timezone.now()
expires += datetime.timedelta(
days=settings.TOKENS_DEFAULT_EXPIRATION_DAYS
)
token = Token.objects.create(
user=self.user,
notes='Some notes',
expires=expires
)
assert token.key # automatically generated
permission = self._create_permission()
token.permissions.add(permission)
response = self.client.post(url, {'id': token.id})
eq_(response.status_code, 200) # it's AJAX
ok_(not Token.objects.all())
event, = Log.objects.all()
eq_(event.user, self.user)
eq_(event.action, 'api_token.delete')
eq_(event.extra['notes'], 'Some notes')
eq_(event.extra['user'], self.user.email)
eq_(event.extra['permissions'], permission.name)
def test_crash_me_now(self):
url = reverse('manage:crash_me_now')
response = self.client.get(url)
eq_(response.status_code, 302)
self._login()
response = self.client.get(url)
eq_(response.status_code, 200)
assert_raises(
NameError,
self.client.post,
url,
{
'exception_type': 'NameError',
'exception_value': 'Crash!'
}
)
def test_symbols_uploads_data_pagination(self):
url = reverse('manage:symbols_uploads_data')
response = self.client.get(url)
eq_(response.status_code, 302)
self._login()
other = User.objects.create(username='o', email='other@mozilla.com')
for i in range(settings.SYMBOLS_UPLOADS_ADMIN_BATCH_SIZE):
SymbolsUpload.objects.create(
user=other,
filename='file-%d.zip' % i,
size=1000 + i,
content='Some Content'
)
# add this last so it shows up first
user = User.objects.create(username='user', email='user@mozilla.com')
upload = SymbolsUpload.objects.create(
user=user,
filename='file.zip',
size=123456,
content='Some Content'
)
response = self.client.get(url)
eq_(response.status_code, 200)
data = json.loads(response.content)
eq_(data['count'], settings.SYMBOLS_UPLOADS_ADMIN_BATCH_SIZE + 1)
eq_(data['batch_size'], settings.SYMBOLS_UPLOADS_ADMIN_BATCH_SIZE)
eq_(data['page'], 1)
items = data['items']
eq_(len(items), settings.SYMBOLS_UPLOADS_ADMIN_BATCH_SIZE)
first, = items[:1]
eq_(first['id'], upload.id)
eq_(first['created'], upload.created.isoformat())
eq_(first['filename'], 'file.zip')
eq_(first['size'], 123456)
eq_(first['url'], reverse('symbols:content', args=(first['id'],)))
eq_(first['user'], {
'email': user.email,
'url': reverse('manage:user', args=(user.id,)),
'id': user.id,
})
# let's go to page 2
response = self.client.get(url, {'page': 2})
eq_(response.status_code, 200)
data = json.loads(response.content)
eq_(data['count'], settings.SYMBOLS_UPLOADS_ADMIN_BATCH_SIZE + 1)
items = data['items']
eq_(len(items), 1)
eq_(data['page'], 2)
# filter by user email
response = self.client.get(url, {'email': user.email[:5].upper()})
eq_(response.status_code, 200)
data = json.loads(response.content)
eq_(data['count'], 1)
first, = data['items']
eq_(first['user']['id'], user.id)
# filter by filename
response = self.client.get(url, {'filename': 'FILE.ZI'})
eq_(response.status_code, 200)
data = json.loads(response.content)
eq_(data['count'], 1)
first, = data['items']
eq_(first['filename'], 'file.zip')
def test_symbols_uploads_data_pagination_bad_request(self):
url = reverse('manage:symbols_uploads_data')
self._login()
response = self.client.get(url)
eq_(response.status_code, 200)
response = self.client.get(url, {'page': 0})
eq_(response.status_code, 400)
response = self.client.get(url, {'page': -1})
eq_(response.status_code, 400)
response = self.client.get(url, {'page': 'NaN'})
eq_(response.status_code, 400)
|
kursitet/edx-ora2
|
refs/heads/master
|
openassessment/xblock/message_mixin.py
|
7
|
"""
Message step in the OpenAssessment XBlock.
"""
import datetime as dt
import pytz
from xblock.core import XBlock
class MessageMixin(object):
"""
Message Mixin introduces all handlers for displaying the banner message
MessageMixin is a Mixin for the OpenAssessmentBlock. Functions in the
MessageMixin call into the OpenAssessmentBlock functions and will not work
outside of OpenAssessmentBlock.
"""
@XBlock.handler
def render_message(self, data, suffix=''):
"""
Render the message step.
Args:
data: Not used.
Keyword Arguments:
suffix: Not used.
Returns:
unicode: HTML content of the message banner.
"""
# Retrieve the status of the workflow and information about deadlines.
workflow = self.get_workflow_info()
deadline_info = self._get_deadline_info()
# Finds the cannonical status of the workflow and the is_closed status of the problem
status = workflow.get('status')
status_details = workflow.get('status_details', {})
is_closed = deadline_info.get('general').get('is_closed')
# Finds the status_information which describes the closed status of the current step (defaults to submission)
status_info = deadline_info.get(status, deadline_info.get("submission"))
status_is_closed = status_info.get('is_closed')
# Default context is empty
context = {}
# Default path leads to an "instruction-unavailable" block
path = 'openassessmentblock/message/oa_message_unavailable.html'
# Render the instruction message based on the status of the workflow
# and the closed status.
if status == "done" or status == "waiting":
path, context = self.render_message_complete(status_details)
elif is_closed or status_is_closed:
path, context = self.render_message_closed(status_info)
elif status == "self":
path, context = self.render_message_self(deadline_info)
elif status == "peer":
path, context = self.render_message_peer(deadline_info)
elif status == "training":
path, context = self.render_message_training(deadline_info)
elif status is None:
path, context = self.render_message_open(deadline_info)
return self.render_assessment(path, context)
def render_message_complete(self, status_details):
"""
Renders the "Complete" message state (Either Waiting or Done)
Args:
status (String): indicates the canonical status of the workflow
Returns:
The path (String) and context (dict) to render the "Complete" message template
"""
context = {
"waiting": self.get_waiting_details(status_details),
}
return 'openassessmentblock/message/oa_message_complete.html', context
def render_message_training(self, deadline_info):
"""
Renders the "Student-Training" message state (Either Waiting or Done)
Args:
status (String): indicates the canonical status of the workflow
Returns:
The path (String) and context (dict) to render the "Complete" message template
"""
approaching = deadline_info.get('training').get('approaching')
context = {
'approaching': approaching
}
return 'openassessmentblock/message/oa_message_training.html', context
def render_message_closed(self, status_info):
"""
Renders the "Closed" message state
Args:
status_info (dict): The dictionary describing the closed status of the current step
Returns:
The path (String) and context (dict) to render the "Closed" template
"""
reason = status_info.get("reason")
context = {
"not_yet_open": (reason == "start")
}
return 'openassessmentblock/message/oa_message_closed.html', context
def render_message_self(self, deadline_info):
"""
Renders the "Self" message state
Args:
deadline_info (dict): The dictionary of boolean assessment near/closed states
Returns:
The path (String) and context (dict) to render the "Self" template
"""
has_peer = 'peer-assessment' in self.assessment_steps
self_info = deadline_info.get("self")
context = {
"has_peer": has_peer,
"self_approaching": self_info.get("approaching"),
"self_closed": self_info.get("is_closed"),
"self_not_released": (self_info.get("reason") == "start")
}
return 'openassessmentblock/message/oa_message_self.html', context
def render_message_peer(self, deadline_info):
"""
Renders the "Peer" message state
Args:
deadline_info (dict): The dictionary of boolean assessment near/closed states
Returns:
The path (String) and context (dict) to render the "Peer" template
"""
#Uses a static field in the XBlock to determine if the PeerAssessment Block was able to pick up an assessment.
waiting = self.no_peers
has_self = 'self-assessment' in self.assessment_steps
peer_info = deadline_info.get("peer")
context = {
"has_self": has_self,
"waiting": waiting,
"peer_approaching": peer_info.get("approaching"),
"peer_closed": peer_info.get("is_closed"),
"peer_not_released": (peer_info.get("reason") == "start")
}
return 'openassessmentblock/message/oa_message_peer.html', context
def render_message_open(self, deadline_info):
"""
Renders the "Open" message state
Args:
deadline_info (dict): The dictionary of boolean assessment near/closed states
Returns:
The path (String) and context (dict) to render the "Open" template
"""
submission_approaching = deadline_info.get("submission").get("approaching")
context = {
"approaching": submission_approaching
}
return 'openassessmentblock/message/oa_message_open.html', context
def _get_deadline_info(self):
"""
Get detailed information about the standing of all deadlines.
Args:
None.
Returns:
dict with the following elements
"submission" : dictionary on submission closure of this^ form
"general" : dictionary on problem (all elements) closure of this^ form
"peer": dictionary on peer closure of this^ form *If Assessment has a Peer Section*
"self": dictionary on self closure of this^ form *If Assessment has a Self Section*
this^ form:
"is_closed": (bool) Indicating whether or not that section has closed
"reason": (str) The reason that the section is closed (None if !is_closed)
"approaching": (bool) Indicates whether or not the section deadline is within a day.
"""
# Methods which use datetime.deltatime to figure out if a deadline is approaching.
now = dt.datetime.utcnow().replace(tzinfo=pytz.utc)
def _is_approaching(date):
# Determines if the deadline is within one day of now. (Approaching = True if so)
delta = date - now
return delta.days == 0
# problem_info has form (is_closed, reason, start_date, due_date)
problem_info = self.is_closed()
# submission_info has form (is_closed, reason, start_date, due_date)
submission_info = self.is_closed("submission")
# The information we will always pass on to the user. Adds additional dicts on peer and self if applicable.
deadline_info = {
"submission": {
"is_closed": submission_info[0],
"reason": submission_info[1],
"approaching": _is_approaching(submission_info[3])
},
"general": {
"is_closed": problem_info[0],
"reason": problem_info[1],
"approaching": _is_approaching(problem_info[3])
}
}
has_training = 'student-training' in self.assessment_steps
if has_training:
training_info = self.is_closed("student-training")
training_dict = {
"training": {
"is_closed": training_info[0],
"reason": training_info[1],
"approaching": _is_approaching(training_info[3])
}
}
deadline_info.update(training_dict)
has_peer = 'peer-assessment' in self.assessment_steps
# peer_info has form (is_closed, reason, start_date, due_date)
if has_peer:
peer_info = self.is_closed("peer-assessment")
peer_dict = {
"peer": {
"is_closed": peer_info[0],
"reason": peer_info[1],
"approaching": _is_approaching(peer_info[3])
}
}
deadline_info.update(peer_dict)
has_self = 'self-assessment' in self.assessment_steps
# self_info has form (is_closed, reason, start_date, due_date)
if has_self:
self_info = self.is_closed("self-assessment")
self_dict = {
"self": {
"is_closed": self_info[0],
"reason": self_info[1],
"approaching": _is_approaching(self_info[3])
}
}
deadline_info.update(self_dict)
return deadline_info
|
Reat0ide/plugin.video.pelisalacarta
|
refs/heads/master
|
lib/mechanize/_urllib2_fork.py
|
130
|
"""Fork of urllib2.
When reading this, don't assume that all code in here is reachable. Code in
the rest of mechanize may be used instead.
Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 Python
Software Foundation; All Rights Reserved
Copyright 2002-2009 John J Lee <jjl@pobox.com>
This code is free software; you can redistribute it and/or modify it
under the terms of the BSD or ZPL 2.1 licenses (see the file
COPYING.txt included with the distribution).
"""
# XXX issues:
# If an authentication error handler that tries to perform
# authentication for some reason but fails, how should the error be
# signalled? The client needs to know the HTTP error code. But if
# the handler knows that the problem was, e.g., that it didn't know
# that hash algo that requested in the challenge, it would be good to
# pass that information along to the client, too.
# ftp errors aren't handled cleanly
# check digest against correct (i.e. non-apache) implementation
# Possible extensions:
# complex proxies XXX not sure what exactly was meant by this
# abstract factory for opener
import copy
import base64
import httplib
import mimetools
import logging
import os
import posixpath
import random
import re
import socket
import sys
import time
import urllib
import urlparse
import bisect
try:
from cStringIO import StringIO
except ImportError:
from StringIO import StringIO
try:
import hashlib
except ImportError:
# python 2.4
import md5
import sha
def sha1_digest(bytes):
return sha.new(bytes).hexdigest()
def md5_digest(bytes):
return md5.new(bytes).hexdigest()
else:
def sha1_digest(bytes):
return hashlib.sha1(bytes).hexdigest()
def md5_digest(bytes):
return hashlib.md5(bytes).hexdigest()
try:
socket._fileobject("fake socket", close=True)
except TypeError:
# python <= 2.4
create_readline_wrapper = socket._fileobject
else:
def create_readline_wrapper(fh):
return socket._fileobject(fh, close=True)
# python 2.4 splithost has a bug in empty path component case
_hostprog = None
def splithost(url):
"""splithost('//host[:port]/path') --> 'host[:port]', '/path'."""
global _hostprog
if _hostprog is None:
import re
_hostprog = re.compile('^//([^/?]*)(.*)$')
match = _hostprog.match(url)
if match: return match.group(1, 2)
return None, url
from urllib import (unwrap, unquote, splittype, quote,
addinfourl, splitport,
splitattr, ftpwrapper, splituser, splitpasswd, splitvalue)
# support for FileHandler, proxies via environment variables
from urllib import localhost, url2pathname, getproxies
from urllib2 import HTTPError, URLError
import _request
import _rfc3986
import _sockettimeout
from _clientcookie import CookieJar
from _response import closeable_response
# used in User-Agent header sent
__version__ = sys.version[:3]
_opener = None
def urlopen(url, data=None, timeout=_sockettimeout._GLOBAL_DEFAULT_TIMEOUT):
global _opener
if _opener is None:
_opener = build_opener()
return _opener.open(url, data, timeout)
def install_opener(opener):
global _opener
_opener = opener
# copied from cookielib.py
_cut_port_re = re.compile(r":\d+$")
def request_host(request):
"""Return request-host, as defined by RFC 2965.
Variation from RFC: returned value is lowercased, for convenient
comparison.
"""
url = request.get_full_url()
host = urlparse.urlparse(url)[1]
if host == "":
host = request.get_header("Host", "")
# remove port, if present
host = _cut_port_re.sub("", host, 1)
return host.lower()
class Request:
def __init__(self, url, data=None, headers={},
origin_req_host=None, unverifiable=False):
# unwrap('<URL:type://host/path>') --> 'type://host/path'
self.__original = unwrap(url)
self.type = None
# self.__r_type is what's left after doing the splittype
self.host = None
self.port = None
self._tunnel_host = None
self.data = data
self.headers = {}
for key, value in headers.items():
self.add_header(key, value)
self.unredirected_hdrs = {}
if origin_req_host is None:
origin_req_host = request_host(self)
self.origin_req_host = origin_req_host
self.unverifiable = unverifiable
def __getattr__(self, attr):
# XXX this is a fallback mechanism to guard against these
# methods getting called in a non-standard order. this may be
# too complicated and/or unnecessary.
# XXX should the __r_XXX attributes be public?
if attr[:12] == '_Request__r_':
name = attr[12:]
if hasattr(Request, 'get_' + name):
getattr(self, 'get_' + name)()
return getattr(self, attr)
raise AttributeError, attr
def get_method(self):
if self.has_data():
return "POST"
else:
return "GET"
# XXX these helper methods are lame
def add_data(self, data):
self.data = data
def has_data(self):
return self.data is not None
def get_data(self):
return self.data
def get_full_url(self):
return self.__original
def get_type(self):
if self.type is None:
self.type, self.__r_type = splittype(self.__original)
if self.type is None:
raise ValueError, "unknown url type: %s" % self.__original
return self.type
def get_host(self):
if self.host is None:
self.host, self.__r_host = splithost(self.__r_type)
if self.host:
self.host = unquote(self.host)
return self.host
def get_selector(self):
scheme, authority, path, query, fragment = _rfc3986.urlsplit(
self.__r_host)
if path == "":
path = "/" # RFC 2616, section 3.2.2
fragment = None # RFC 3986, section 3.5
return _rfc3986.urlunsplit([scheme, authority, path, query, fragment])
def set_proxy(self, host, type):
orig_host = self.get_host()
if self.get_type() == 'https' and not self._tunnel_host:
self._tunnel_host = orig_host
else:
self.type = type
self.__r_host = self.__original
self.host = host
def has_proxy(self):
"""Private method."""
# has non-HTTPS proxy
return self.__r_host == self.__original
def get_origin_req_host(self):
return self.origin_req_host
def is_unverifiable(self):
return self.unverifiable
def add_header(self, key, val):
# useful for something like authentication
self.headers[key.capitalize()] = val
def add_unredirected_header(self, key, val):
# will not be added to a redirected request
self.unredirected_hdrs[key.capitalize()] = val
def has_header(self, header_name):
return (header_name in self.headers or
header_name in self.unredirected_hdrs)
def get_header(self, header_name, default=None):
return self.headers.get(
header_name,
self.unredirected_hdrs.get(header_name, default))
def header_items(self):
hdrs = self.unredirected_hdrs.copy()
hdrs.update(self.headers)
return hdrs.items()
class OpenerDirector:
def __init__(self):
client_version = "Python-urllib/%s" % __version__
self.addheaders = [('User-agent', client_version)]
# manage the individual handlers
self.handlers = []
self.handle_open = {}
self.handle_error = {}
self.process_response = {}
self.process_request = {}
def add_handler(self, handler):
if not hasattr(handler, "add_parent"):
raise TypeError("expected BaseHandler instance, got %r" %
type(handler))
added = False
for meth in dir(handler):
if meth in ["redirect_request", "do_open", "proxy_open"]:
# oops, coincidental match
continue
i = meth.find("_")
protocol = meth[:i]
condition = meth[i+1:]
if condition.startswith("error"):
j = condition.find("_") + i + 1
kind = meth[j+1:]
try:
kind = int(kind)
except ValueError:
pass
lookup = self.handle_error.get(protocol, {})
self.handle_error[protocol] = lookup
elif condition == "open":
kind = protocol
lookup = self.handle_open
elif condition == "response":
kind = protocol
lookup = self.process_response
elif condition == "request":
kind = protocol
lookup = self.process_request
else:
continue
handlers = lookup.setdefault(kind, [])
if handlers:
bisect.insort(handlers, handler)
else:
handlers.append(handler)
added = True
if added:
# the handlers must work in an specific order, the order
# is specified in a Handler attribute
bisect.insort(self.handlers, handler)
handler.add_parent(self)
def close(self):
# Only exists for backwards compatibility.
pass
def _call_chain(self, chain, kind, meth_name, *args):
# Handlers raise an exception if no one else should try to handle
# the request, or return None if they can't but another handler
# could. Otherwise, they return the response.
handlers = chain.get(kind, ())
for handler in handlers:
func = getattr(handler, meth_name)
result = func(*args)
if result is not None:
return result
def _open(self, req, data=None):
result = self._call_chain(self.handle_open, 'default',
'default_open', req)
if result:
return result
protocol = req.get_type()
result = self._call_chain(self.handle_open, protocol, protocol +
'_open', req)
if result:
return result
return self._call_chain(self.handle_open, 'unknown',
'unknown_open', req)
def error(self, proto, *args):
if proto in ('http', 'https'):
# XXX http[s] protocols are special-cased
dict = self.handle_error['http'] # https is not different than http
proto = args[2] # YUCK!
meth_name = 'http_error_%s' % proto
http_err = 1
orig_args = args
else:
dict = self.handle_error
meth_name = proto + '_error'
http_err = 0
args = (dict, proto, meth_name) + args
result = self._call_chain(*args)
if result:
return result
if http_err:
args = (dict, 'default', 'http_error_default') + orig_args
return self._call_chain(*args)
# XXX probably also want an abstract factory that knows when it makes
# sense to skip a superclass in favor of a subclass and when it might
# make sense to include both
def build_opener(*handlers):
"""Create an opener object from a list of handlers.
The opener will use several default handlers, including support
for HTTP, FTP and when applicable, HTTPS.
If any of the handlers passed as arguments are subclasses of the
default handlers, the default handlers will not be used.
"""
import types
def isclass(obj):
return isinstance(obj, (types.ClassType, type))
opener = OpenerDirector()
default_classes = [ProxyHandler, UnknownHandler, HTTPHandler,
HTTPDefaultErrorHandler, HTTPRedirectHandler,
FTPHandler, FileHandler, HTTPErrorProcessor]
if hasattr(httplib, 'HTTPS'):
default_classes.append(HTTPSHandler)
skip = set()
for klass in default_classes:
for check in handlers:
if isclass(check):
if issubclass(check, klass):
skip.add(klass)
elif isinstance(check, klass):
skip.add(klass)
for klass in skip:
default_classes.remove(klass)
for klass in default_classes:
opener.add_handler(klass())
for h in handlers:
if isclass(h):
h = h()
opener.add_handler(h)
return opener
class BaseHandler:
handler_order = 500
def add_parent(self, parent):
self.parent = parent
def close(self):
# Only exists for backwards compatibility
pass
def __lt__(self, other):
if not hasattr(other, "handler_order"):
# Try to preserve the old behavior of having custom classes
# inserted after default ones (works only for custom user
# classes which are not aware of handler_order).
return True
return self.handler_order < other.handler_order
class HTTPErrorProcessor(BaseHandler):
"""Process HTTP error responses.
The purpose of this handler is to to allow other response processors a
look-in by removing the call to parent.error() from
AbstractHTTPHandler.
For non-2xx error codes, this just passes the job on to the
Handler.<proto>_error_<code> methods, via the OpenerDirector.error method.
Eventually, HTTPDefaultErrorHandler will raise an HTTPError if no other
handler handles the error.
"""
handler_order = 1000 # after all other processors
def http_response(self, request, response):
code, msg, hdrs = response.code, response.msg, response.info()
# According to RFC 2616, "2xx" code indicates that the client's
# request was successfully received, understood, and accepted.
if not (200 <= code < 300):
# hardcoded http is NOT a bug
response = self.parent.error(
'http', request, response, code, msg, hdrs)
return response
https_response = http_response
class HTTPDefaultErrorHandler(BaseHandler):
def http_error_default(self, req, fp, code, msg, hdrs):
# why these error methods took the code, msg, headers args in the first
# place rather than a response object, I don't know, but to avoid
# multiple wrapping, we're discarding them
if isinstance(fp, HTTPError):
response = fp
else:
response = HTTPError(
req.get_full_url(), code, msg, hdrs, fp)
assert code == response.code
assert msg == response.msg
assert hdrs == response.hdrs
raise response
class HTTPRedirectHandler(BaseHandler):
# maximum number of redirections to any single URL
# this is needed because of the state that cookies introduce
max_repeats = 4
# maximum total number of redirections (regardless of URL) before
# assuming we're in a loop
max_redirections = 10
# Implementation notes:
# To avoid the server sending us into an infinite loop, the request
# object needs to track what URLs we have already seen. Do this by
# adding a handler-specific attribute to the Request object. The value
# of the dict is used to count the number of times the same URL has
# been visited. This is needed because visiting the same URL twice
# does not necessarily imply a loop, thanks to state introduced by
# cookies.
# Always unhandled redirection codes:
# 300 Multiple Choices: should not handle this here.
# 304 Not Modified: no need to handle here: only of interest to caches
# that do conditional GETs
# 305 Use Proxy: probably not worth dealing with here
# 306 Unused: what was this for in the previous versions of protocol??
def redirect_request(self, req, fp, code, msg, headers, newurl):
"""Return a Request or None in response to a redirect.
This is called by the http_error_30x methods when a
redirection response is received. If a redirection should
take place, return a new Request to allow http_error_30x to
perform the redirect. Otherwise, raise HTTPError if no-one
else should try to handle this url. Return None if you can't
but another Handler might.
"""
m = req.get_method()
if (code in (301, 302, 303, 307, "refresh") and m in ("GET", "HEAD")
or code in (301, 302, 303, "refresh") and m == "POST"):
# Strictly (according to RFC 2616), 301 or 302 in response
# to a POST MUST NOT cause a redirection without confirmation
# from the user (of urllib2, in this case). In practice,
# essentially all clients do redirect in this case, so we do
# the same.
# TODO: really refresh redirections should be visiting; tricky to fix
new = _request.Request(
newurl,
headers=req.headers,
origin_req_host=req.get_origin_req_host(),
unverifiable=True,
visit=False,
timeout=req.timeout)
new._origin_req = getattr(req, "_origin_req", req)
return new
else:
raise HTTPError(req.get_full_url(), code, msg, headers, fp)
def http_error_302(self, req, fp, code, msg, headers):
# Some servers (incorrectly) return multiple Location headers
# (so probably same goes for URI). Use first header.
if 'location' in headers:
newurl = headers.getheaders('location')[0]
elif 'uri' in headers:
newurl = headers.getheaders('uri')[0]
else:
return
newurl = _rfc3986.clean_url(newurl, "latin-1")
newurl = _rfc3986.urljoin(req.get_full_url(), newurl)
# XXX Probably want to forget about the state of the current
# request, although that might interact poorly with other
# handlers that also use handler-specific request attributes
new = self.redirect_request(req, fp, code, msg, headers, newurl)
if new is None:
return
# loop detection
# .redirect_dict has a key url if url was previously visited.
if hasattr(req, 'redirect_dict'):
visited = new.redirect_dict = req.redirect_dict
if (visited.get(newurl, 0) >= self.max_repeats or
len(visited) >= self.max_redirections):
raise HTTPError(req.get_full_url(), code,
self.inf_msg + msg, headers, fp)
else:
visited = new.redirect_dict = req.redirect_dict = {}
visited[newurl] = visited.get(newurl, 0) + 1
# Don't close the fp until we are sure that we won't use it
# with HTTPError.
fp.read()
fp.close()
return self.parent.open(new)
http_error_301 = http_error_303 = http_error_307 = http_error_302
http_error_refresh = http_error_302
inf_msg = "The HTTP server returned a redirect error that would " \
"lead to an infinite loop.\n" \
"The last 30x error message was:\n"
def _parse_proxy(proxy):
"""Return (scheme, user, password, host/port) given a URL or an authority.
If a URL is supplied, it must have an authority (host:port) component.
According to RFC 3986, having an authority component means the URL must
have two slashes after the scheme:
>>> _parse_proxy('file:/ftp.example.com/')
Traceback (most recent call last):
ValueError: proxy URL with no authority: 'file:/ftp.example.com/'
The first three items of the returned tuple may be None.
Examples of authority parsing:
>>> _parse_proxy('proxy.example.com')
(None, None, None, 'proxy.example.com')
>>> _parse_proxy('proxy.example.com:3128')
(None, None, None, 'proxy.example.com:3128')
The authority component may optionally include userinfo (assumed to be
username:password):
>>> _parse_proxy('joe:password@proxy.example.com')
(None, 'joe', 'password', 'proxy.example.com')
>>> _parse_proxy('joe:password@proxy.example.com:3128')
(None, 'joe', 'password', 'proxy.example.com:3128')
Same examples, but with URLs instead:
>>> _parse_proxy('http://proxy.example.com/')
('http', None, None, 'proxy.example.com')
>>> _parse_proxy('http://proxy.example.com:3128/')
('http', None, None, 'proxy.example.com:3128')
>>> _parse_proxy('http://joe:password@proxy.example.com/')
('http', 'joe', 'password', 'proxy.example.com')
>>> _parse_proxy('http://joe:password@proxy.example.com:3128')
('http', 'joe', 'password', 'proxy.example.com:3128')
Everything after the authority is ignored:
>>> _parse_proxy('ftp://joe:password@proxy.example.com/rubbish:3128')
('ftp', 'joe', 'password', 'proxy.example.com')
Test for no trailing '/' case:
>>> _parse_proxy('http://joe:password@proxy.example.com')
('http', 'joe', 'password', 'proxy.example.com')
"""
scheme, r_scheme = splittype(proxy)
if not r_scheme.startswith("/"):
# authority
scheme = None
authority = proxy
else:
# URL
if not r_scheme.startswith("//"):
raise ValueError("proxy URL with no authority: %r" % proxy)
# We have an authority, so for RFC 3986-compliant URLs (by ss 3.
# and 3.3.), path is empty or starts with '/'
end = r_scheme.find("/", 2)
if end == -1:
end = None
authority = r_scheme[2:end]
userinfo, hostport = splituser(authority)
if userinfo is not None:
user, password = splitpasswd(userinfo)
else:
user = password = None
return scheme, user, password, hostport
class ProxyHandler(BaseHandler):
# Proxies must be in front
handler_order = 100
def __init__(self, proxies=None, proxy_bypass=None):
if proxies is None:
proxies = getproxies()
assert hasattr(proxies, 'has_key'), "proxies must be a mapping"
self.proxies = proxies
for type, url in proxies.items():
setattr(self, '%s_open' % type,
lambda r, proxy=url, type=type, meth=self.proxy_open: \
meth(r, proxy, type))
if proxy_bypass is None:
proxy_bypass = urllib.proxy_bypass
self._proxy_bypass = proxy_bypass
def proxy_open(self, req, proxy, type):
orig_type = req.get_type()
proxy_type, user, password, hostport = _parse_proxy(proxy)
if proxy_type is None:
proxy_type = orig_type
if req.get_host() and self._proxy_bypass(req.get_host()):
return None
if user and password:
user_pass = '%s:%s' % (unquote(user), unquote(password))
creds = base64.b64encode(user_pass).strip()
req.add_header('Proxy-authorization', 'Basic ' + creds)
hostport = unquote(hostport)
req.set_proxy(hostport, proxy_type)
if orig_type == proxy_type or orig_type == 'https':
# let other handlers take care of it
return None
else:
# need to start over, because the other handlers don't
# grok the proxy's URL type
# e.g. if we have a constructor arg proxies like so:
# {'http': 'ftp://proxy.example.com'}, we may end up turning
# a request for http://acme.example.com/a into one for
# ftp://proxy.example.com/a
return self.parent.open(req)
class HTTPPasswordMgr:
def __init__(self):
self.passwd = {}
def add_password(self, realm, uri, user, passwd):
# uri could be a single URI or a sequence
if isinstance(uri, basestring):
uri = [uri]
if not realm in self.passwd:
self.passwd[realm] = {}
for default_port in True, False:
reduced_uri = tuple(
[self.reduce_uri(u, default_port) for u in uri])
self.passwd[realm][reduced_uri] = (user, passwd)
def find_user_password(self, realm, authuri):
domains = self.passwd.get(realm, {})
for default_port in True, False:
reduced_authuri = self.reduce_uri(authuri, default_port)
for uris, authinfo in domains.iteritems():
for uri in uris:
if self.is_suburi(uri, reduced_authuri):
return authinfo
return None, None
def reduce_uri(self, uri, default_port=True):
"""Accept authority or URI and extract only the authority and path."""
# note HTTP URLs do not have a userinfo component
parts = urlparse.urlsplit(uri)
if parts[1]:
# URI
scheme = parts[0]
authority = parts[1]
path = parts[2] or '/'
else:
# host or host:port
scheme = None
authority = uri
path = '/'
host, port = splitport(authority)
if default_port and port is None and scheme is not None:
dport = {"http": 80,
"https": 443,
}.get(scheme)
if dport is not None:
authority = "%s:%d" % (host, dport)
return authority, path
def is_suburi(self, base, test):
"""Check if test is below base in a URI tree
Both args must be URIs in reduced form.
"""
if base == test:
return True
if base[0] != test[0]:
return False
common = posixpath.commonprefix((base[1], test[1]))
if len(common) == len(base[1]):
return True
return False
class HTTPPasswordMgrWithDefaultRealm(HTTPPasswordMgr):
def find_user_password(self, realm, authuri):
user, password = HTTPPasswordMgr.find_user_password(self, realm,
authuri)
if user is not None:
return user, password
return HTTPPasswordMgr.find_user_password(self, None, authuri)
class AbstractBasicAuthHandler:
# XXX this allows for multiple auth-schemes, but will stupidly pick
# the last one with a realm specified.
# allow for double- and single-quoted realm values
# (single quotes are a violation of the RFC, but appear in the wild)
rx = re.compile('(?:.*,)*[ \t]*([^ \t]+)[ \t]+'
'realm=(["\'])(.*?)\\2', re.I)
# XXX could pre-emptively send auth info already accepted (RFC 2617,
# end of section 2, and section 1.2 immediately after "credentials"
# production).
def __init__(self, password_mgr=None):
if password_mgr is None:
password_mgr = HTTPPasswordMgr()
self.passwd = password_mgr
self.add_password = self.passwd.add_password
def http_error_auth_reqed(self, authreq, host, req, headers):
# host may be an authority (without userinfo) or a URL with an
# authority
# XXX could be multiple headers
authreq = headers.get(authreq, None)
if authreq:
mo = AbstractBasicAuthHandler.rx.search(authreq)
if mo:
scheme, quote, realm = mo.groups()
if scheme.lower() == 'basic':
return self.retry_http_basic_auth(host, req, realm)
def retry_http_basic_auth(self, host, req, realm):
user, pw = self.passwd.find_user_password(realm, host)
if pw is not None:
raw = "%s:%s" % (user, pw)
auth = 'Basic %s' % base64.b64encode(raw).strip()
if req.headers.get(self.auth_header, None) == auth:
return None
newreq = copy.copy(req)
newreq.add_header(self.auth_header, auth)
newreq.visit = False
return self.parent.open(newreq)
else:
return None
class HTTPBasicAuthHandler(AbstractBasicAuthHandler, BaseHandler):
auth_header = 'Authorization'
def http_error_401(self, req, fp, code, msg, headers):
url = req.get_full_url()
return self.http_error_auth_reqed('www-authenticate',
url, req, headers)
class ProxyBasicAuthHandler(AbstractBasicAuthHandler, BaseHandler):
auth_header = 'Proxy-authorization'
def http_error_407(self, req, fp, code, msg, headers):
# http_error_auth_reqed requires that there is no userinfo component in
# authority. Assume there isn't one, since urllib2 does not (and
# should not, RFC 3986 s. 3.2.1) support requests for URLs containing
# userinfo.
authority = req.get_host()
return self.http_error_auth_reqed('proxy-authenticate',
authority, req, headers)
def randombytes(n):
"""Return n random bytes."""
# Use /dev/urandom if it is available. Fall back to random module
# if not. It might be worthwhile to extend this function to use
# other platform-specific mechanisms for getting random bytes.
if os.path.exists("/dev/urandom"):
f = open("/dev/urandom")
s = f.read(n)
f.close()
return s
else:
L = [chr(random.randrange(0, 256)) for i in range(n)]
return "".join(L)
class AbstractDigestAuthHandler:
# Digest authentication is specified in RFC 2617.
# XXX The client does not inspect the Authentication-Info header
# in a successful response.
# XXX It should be possible to test this implementation against
# a mock server that just generates a static set of challenges.
# XXX qop="auth-int" supports is shaky
def __init__(self, passwd=None):
if passwd is None:
passwd = HTTPPasswordMgr()
self.passwd = passwd
self.add_password = self.passwd.add_password
self.retried = 0
self.nonce_count = 0
self.last_nonce = None
def reset_retry_count(self):
self.retried = 0
def http_error_auth_reqed(self, auth_header, host, req, headers):
authreq = headers.get(auth_header, None)
if self.retried > 5:
# Don't fail endlessly - if we failed once, we'll probably
# fail a second time. Hm. Unless the Password Manager is
# prompting for the information. Crap. This isn't great
# but it's better than the current 'repeat until recursion
# depth exceeded' approach <wink>
raise HTTPError(req.get_full_url(), 401, "digest auth failed",
headers, None)
else:
self.retried += 1
if authreq:
scheme = authreq.split()[0]
if scheme.lower() == 'digest':
return self.retry_http_digest_auth(req, authreq)
def retry_http_digest_auth(self, req, auth):
token, challenge = auth.split(' ', 1)
chal = parse_keqv_list(parse_http_list(challenge))
auth = self.get_authorization(req, chal)
if auth:
auth_val = 'Digest %s' % auth
if req.headers.get(self.auth_header, None) == auth_val:
return None
newreq = copy.copy(req)
newreq.add_unredirected_header(self.auth_header, auth_val)
newreq.visit = False
return self.parent.open(newreq)
def get_cnonce(self, nonce):
# The cnonce-value is an opaque
# quoted string value provided by the client and used by both client
# and server to avoid chosen plaintext attacks, to provide mutual
# authentication, and to provide some message integrity protection.
# This isn't a fabulous effort, but it's probably Good Enough.
dig = sha1_digest("%s:%s:%s:%s" % (self.nonce_count, nonce,
time.ctime(), randombytes(8)))
return dig[:16]
def get_authorization(self, req, chal):
try:
realm = chal['realm']
nonce = chal['nonce']
qop = chal.get('qop')
algorithm = chal.get('algorithm', 'MD5')
# mod_digest doesn't send an opaque, even though it isn't
# supposed to be optional
opaque = chal.get('opaque', None)
except KeyError:
return None
H, KD = self.get_algorithm_impls(algorithm)
if H is None:
return None
user, pw = self.passwd.find_user_password(realm, req.get_full_url())
if user is None:
return None
# XXX not implemented yet
if req.has_data():
entdig = self.get_entity_digest(req.get_data(), chal)
else:
entdig = None
A1 = "%s:%s:%s" % (user, realm, pw)
A2 = "%s:%s" % (req.get_method(),
# XXX selector: what about proxies and full urls
req.get_selector())
if qop == 'auth':
if nonce == self.last_nonce:
self.nonce_count += 1
else:
self.nonce_count = 1
self.last_nonce = nonce
ncvalue = '%08x' % self.nonce_count
cnonce = self.get_cnonce(nonce)
noncebit = "%s:%s:%s:%s:%s" % (nonce, ncvalue, cnonce, qop, H(A2))
respdig = KD(H(A1), noncebit)
elif qop is None:
respdig = KD(H(A1), "%s:%s" % (nonce, H(A2)))
else:
# XXX handle auth-int.
logger = logging.getLogger("mechanize.auth")
logger.info("digest auth auth-int qop is not supported, not "
"handling digest authentication")
return None
# XXX should the partial digests be encoded too?
base = 'username="%s", realm="%s", nonce="%s", uri="%s", ' \
'response="%s"' % (user, realm, nonce, req.get_selector(),
respdig)
if opaque:
base += ', opaque="%s"' % opaque
if entdig:
base += ', digest="%s"' % entdig
base += ', algorithm="%s"' % algorithm
if qop:
base += ', qop=auth, nc=%s, cnonce="%s"' % (ncvalue, cnonce)
return base
def get_algorithm_impls(self, algorithm):
# algorithm should be case-insensitive according to RFC2617
algorithm = algorithm.upper()
if algorithm == 'MD5':
H = md5_digest
elif algorithm == 'SHA':
H = sha1_digest
# XXX MD5-sess
KD = lambda s, d: H("%s:%s" % (s, d))
return H, KD
def get_entity_digest(self, data, chal):
# XXX not implemented yet
return None
class HTTPDigestAuthHandler(BaseHandler, AbstractDigestAuthHandler):
"""An authentication protocol defined by RFC 2069
Digest authentication improves on basic authentication because it
does not transmit passwords in the clear.
"""
auth_header = 'Authorization'
handler_order = 490 # before Basic auth
def http_error_401(self, req, fp, code, msg, headers):
host = urlparse.urlparse(req.get_full_url())[1]
retry = self.http_error_auth_reqed('www-authenticate',
host, req, headers)
self.reset_retry_count()
return retry
class ProxyDigestAuthHandler(BaseHandler, AbstractDigestAuthHandler):
auth_header = 'Proxy-Authorization'
handler_order = 490 # before Basic auth
def http_error_407(self, req, fp, code, msg, headers):
host = req.get_host()
retry = self.http_error_auth_reqed('proxy-authenticate',
host, req, headers)
self.reset_retry_count()
return retry
class AbstractHTTPHandler(BaseHandler):
def __init__(self, debuglevel=0):
self._debuglevel = debuglevel
def set_http_debuglevel(self, level):
self._debuglevel = level
def do_request_(self, request):
host = request.get_host()
if not host:
raise URLError('no host given')
if request.has_data(): # POST
data = request.get_data()
if not request.has_header('Content-type'):
request.add_unredirected_header(
'Content-type',
'application/x-www-form-urlencoded')
if not request.has_header('Content-length'):
request.add_unredirected_header(
'Content-length', '%d' % len(data))
sel_host = host
if request.has_proxy():
scheme, sel = splittype(request.get_selector())
sel_host, sel_path = splithost(sel)
if not request.has_header('Host'):
request.add_unredirected_header('Host', sel_host)
for name, value in self.parent.addheaders:
name = name.capitalize()
if not request.has_header(name):
request.add_unredirected_header(name, value)
return request
def do_open(self, http_class, req):
"""Return an addinfourl object for the request, using http_class.
http_class must implement the HTTPConnection API from httplib.
The addinfourl return value is a file-like object. It also
has methods and attributes including:
- info(): return a mimetools.Message object for the headers
- geturl(): return the original request URL
- code: HTTP status code
"""
host_port = req.get_host()
if not host_port:
raise URLError('no host given')
try:
h = http_class(host_port, timeout=req.timeout)
except TypeError:
# Python < 2.6, no per-connection timeout support
h = http_class(host_port)
h.set_debuglevel(self._debuglevel)
headers = dict(req.headers)
headers.update(req.unredirected_hdrs)
# We want to make an HTTP/1.1 request, but the addinfourl
# class isn't prepared to deal with a persistent connection.
# It will try to read all remaining data from the socket,
# which will block while the server waits for the next request.
# So make sure the connection gets closed after the (only)
# request.
headers["Connection"] = "close"
headers = dict(
(name.title(), val) for name, val in headers.items())
if req._tunnel_host:
if not hasattr(h, "set_tunnel"):
if not hasattr(h, "_set_tunnel"):
raise URLError("HTTPS through proxy not supported "
"(Python >= 2.6.4 required)")
else:
# python 2.6
set_tunnel = h._set_tunnel
else:
set_tunnel = h.set_tunnel
set_tunnel(req._tunnel_host)
try:
h.request(req.get_method(), req.get_selector(), req.data, headers)
r = h.getresponse()
except socket.error, err: # XXX what error?
raise URLError(err)
# Pick apart the HTTPResponse object to get the addinfourl
# object initialized properly.
# Wrap the HTTPResponse object in socket's file object adapter
# for Windows. That adapter calls recv(), so delegate recv()
# to read(). This weird wrapping allows the returned object to
# have readline() and readlines() methods.
# XXX It might be better to extract the read buffering code
# out of socket._fileobject() and into a base class.
r.recv = r.read
fp = create_readline_wrapper(r)
resp = closeable_response(fp, r.msg, req.get_full_url(),
r.status, r.reason)
return resp
class HTTPHandler(AbstractHTTPHandler):
def http_open(self, req):
return self.do_open(httplib.HTTPConnection, req)
http_request = AbstractHTTPHandler.do_request_
if hasattr(httplib, 'HTTPS'):
class HTTPSConnectionFactory:
def __init__(self, key_file, cert_file):
self._key_file = key_file
self._cert_file = cert_file
def __call__(self, hostport):
return httplib.HTTPSConnection(
hostport,
key_file=self._key_file, cert_file=self._cert_file)
class HTTPSHandler(AbstractHTTPHandler):
def __init__(self, client_cert_manager=None):
AbstractHTTPHandler.__init__(self)
self.client_cert_manager = client_cert_manager
def https_open(self, req):
if self.client_cert_manager is not None:
key_file, cert_file = self.client_cert_manager.find_key_cert(
req.get_full_url())
conn_factory = HTTPSConnectionFactory(key_file, cert_file)
else:
conn_factory = httplib.HTTPSConnection
return self.do_open(conn_factory, req)
https_request = AbstractHTTPHandler.do_request_
class HTTPCookieProcessor(BaseHandler):
"""Handle HTTP cookies.
Public attributes:
cookiejar: CookieJar instance
"""
def __init__(self, cookiejar=None):
if cookiejar is None:
cookiejar = CookieJar()
self.cookiejar = cookiejar
def http_request(self, request):
self.cookiejar.add_cookie_header(request)
return request
def http_response(self, request, response):
self.cookiejar.extract_cookies(response, request)
return response
https_request = http_request
https_response = http_response
class UnknownHandler(BaseHandler):
def unknown_open(self, req):
type = req.get_type()
raise URLError('unknown url type: %s' % type)
def parse_keqv_list(l):
"""Parse list of key=value strings where keys are not duplicated."""
parsed = {}
for elt in l:
k, v = elt.split('=', 1)
if v[0] == '"' and v[-1] == '"':
v = v[1:-1]
parsed[k] = v
return parsed
def parse_http_list(s):
"""Parse lists as described by RFC 2068 Section 2.
In particular, parse comma-separated lists where the elements of
the list may include quoted-strings. A quoted-string could
contain a comma. A non-quoted string could have quotes in the
middle. Neither commas nor quotes count if they are escaped.
Only double-quotes count, not single-quotes.
"""
res = []
part = ''
escape = quote = False
for cur in s:
if escape:
part += cur
escape = False
continue
if quote:
if cur == '\\':
escape = True
continue
elif cur == '"':
quote = False
part += cur
continue
if cur == ',':
res.append(part)
part = ''
continue
if cur == '"':
quote = True
part += cur
# append last part
if part:
res.append(part)
return [part.strip() for part in res]
class FileHandler(BaseHandler):
# Use local file or FTP depending on form of URL
def file_open(self, req):
url = req.get_selector()
if url[:2] == '//' and url[2:3] != '/':
req.type = 'ftp'
return self.parent.open(req)
else:
return self.open_local_file(req)
# names for the localhost
names = None
def get_names(self):
if FileHandler.names is None:
try:
FileHandler.names = (socket.gethostbyname('localhost'),
socket.gethostbyname(socket.gethostname()))
except socket.gaierror:
FileHandler.names = (socket.gethostbyname('localhost'),)
return FileHandler.names
# not entirely sure what the rules are here
def open_local_file(self, req):
try:
import email.utils as emailutils
except ImportError:
# python 2.4
import email.Utils as emailutils
import mimetypes
host = req.get_host()
file = req.get_selector()
localfile = url2pathname(file)
try:
stats = os.stat(localfile)
size = stats.st_size
modified = emailutils.formatdate(stats.st_mtime, usegmt=True)
mtype = mimetypes.guess_type(file)[0]
headers = mimetools.Message(StringIO(
'Content-type: %s\nContent-length: %d\nLast-modified: %s\n' %
(mtype or 'text/plain', size, modified)))
if host:
host, port = splitport(host)
if not host or \
(not port and socket.gethostbyname(host) in self.get_names()):
return addinfourl(open(localfile, 'rb'),
headers, 'file:'+file)
except OSError, msg:
# urllib2 users shouldn't expect OSErrors coming from urlopen()
raise URLError(msg)
raise URLError('file not on local host')
class FTPHandler(BaseHandler):
def ftp_open(self, req):
import ftplib
import mimetypes
host = req.get_host()
if not host:
raise URLError('ftp error: no host given')
host, port = splitport(host)
if port is None:
port = ftplib.FTP_PORT
else:
port = int(port)
# username/password handling
user, host = splituser(host)
if user:
user, passwd = splitpasswd(user)
else:
passwd = None
host = unquote(host)
user = unquote(user or '')
passwd = unquote(passwd or '')
try:
host = socket.gethostbyname(host)
except socket.error, msg:
raise URLError(msg)
path, attrs = splitattr(req.get_selector())
dirs = path.split('/')
dirs = map(unquote, dirs)
dirs, file = dirs[:-1], dirs[-1]
if dirs and not dirs[0]:
dirs = dirs[1:]
try:
fw = self.connect_ftp(user, passwd, host, port, dirs, req.timeout)
type = file and 'I' or 'D'
for attr in attrs:
attr, value = splitvalue(attr)
if attr.lower() == 'type' and \
value in ('a', 'A', 'i', 'I', 'd', 'D'):
type = value.upper()
fp, retrlen = fw.retrfile(file, type)
headers = ""
mtype = mimetypes.guess_type(req.get_full_url())[0]
if mtype:
headers += "Content-type: %s\n" % mtype
if retrlen is not None and retrlen >= 0:
headers += "Content-length: %d\n" % retrlen
sf = StringIO(headers)
headers = mimetools.Message(sf)
return addinfourl(fp, headers, req.get_full_url())
except ftplib.all_errors, msg:
raise URLError, ('ftp error: %s' % msg), sys.exc_info()[2]
def connect_ftp(self, user, passwd, host, port, dirs, timeout):
try:
fw = ftpwrapper(user, passwd, host, port, dirs, timeout)
except TypeError:
# Python < 2.6, no per-connection timeout support
fw = ftpwrapper(user, passwd, host, port, dirs)
## fw.ftp.set_debuglevel(1)
return fw
class CacheFTPHandler(FTPHandler):
# XXX would be nice to have pluggable cache strategies
# XXX this stuff is definitely not thread safe
def __init__(self):
self.cache = {}
self.timeout = {}
self.soonest = 0
self.delay = 60
self.max_conns = 16
def setTimeout(self, t):
self.delay = t
def setMaxConns(self, m):
self.max_conns = m
def connect_ftp(self, user, passwd, host, port, dirs, timeout):
key = user, host, port, '/'.join(dirs), timeout
if key in self.cache:
self.timeout[key] = time.time() + self.delay
else:
self.cache[key] = ftpwrapper(user, passwd, host, port, dirs, timeout)
self.timeout[key] = time.time() + self.delay
self.check_cache()
return self.cache[key]
def check_cache(self):
# first check for old ones
t = time.time()
if self.soonest <= t:
for k, v in self.timeout.items():
if v < t:
self.cache[k].close()
del self.cache[k]
del self.timeout[k]
self.soonest = min(self.timeout.values())
# then check the size
if len(self.cache) == self.max_conns:
for k, v in self.timeout.items():
if v == self.soonest:
del self.cache[k]
del self.timeout[k]
break
self.soonest = min(self.timeout.values())
|
cjcjameson/gpdb
|
refs/heads/master
|
src/test/tinc/tincrepo/mpp/gpdb/tests/package/compat/test_compatibility.py
|
9
|
"""
Copyright (c) 2004-Present Pivotal Software, Inc.
This program and the accompanying materials are made available under
the terms of the under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import os
import tinctest
from mpp.models import MPPTestCase
from mpp.lib.gppkg.gppkg import Gppkg
gppkg = Gppkg()
pkgname = 'plperl'
class CompatiblityMPPTestCase(MPPTestCase):
def __init__(self, methodName):
super(CompatiblityMPPTestCase, self).__init__(methodName)
@classmethod
def setUpClass(self):
super(CompatiblityMPPTestCase, self).setUpClass()
gppkg.run_gppkg_uninstall(pkgname)
def test_install_should_fail(self):
"""@product_version gpdb: [4.3.5.0 -]"""
"Old package on the new database which is above the version of 4.3.5.0 should fail"
gppkg = Gppkg()
build_type = None
if os.environ.get("BUILD_TYPE"):
build_type = os.environ["BUILD_TYPE"]
os.environ["BUILD_TYPE"] = 'rc'
with self.assertRaisesRegexp(Exception, 'Failed to install'):
gppkg.gppkg_install(product_version='4.3.4.0', gppkg=pkgname)
if build_type is not None:
os.environ["BUILD_TYPE"] = build_type
existed, _ = gppkg.check_pkg_exists(pkgname)
self.assertFalse(existed)
|
GiladE/birde
|
refs/heads/master
|
venv/lib/python2.7/site-packages/django/contrib/sessions/management/commands/clearsessions.py
|
79
|
from importlib import import_module
from django.conf import settings
from django.core.management.base import NoArgsCommand
class Command(NoArgsCommand):
help = "Can be run as a cronjob or directly to clean out expired sessions (only with the database backend at the moment)."
def handle_noargs(self, **options):
engine = import_module(settings.SESSION_ENGINE)
try:
engine.SessionStore.clear_expired()
except NotImplementedError:
self.stderr.write("Session engine '%s' doesn't support clearing "
"expired sessions.\n" % settings.SESSION_ENGINE)
|
joerocklin/gem5
|
refs/heads/master
|
src/cpu/ozone/OzoneCPU.py
|
26
|
# Copyright (c) 2006-2007 The Regents of The University of Michigan
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met: redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer;
# redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution;
# neither the name of the copyright holders nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# Authors: Kevin Lim
from m5.defines import buildEnv
from m5.params import *
from BaseCPU import BaseCPU
from OzoneChecker import OzoneChecker
class DerivOzoneCPU(BaseCPU):
type = 'DerivOzoneCPU'
numThreads = Param.Unsigned("number of HW thread contexts")
width = Param.Unsigned("Width")
frontEndWidth = Param.Unsigned("Front end width")
frontEndLatency = Param.Unsigned("Front end latency")
backEndWidth = Param.Unsigned("Back end width")
backEndSquashLatency = Param.Unsigned("Back end squash latency")
backEndLatency = Param.Unsigned("Back end latency")
maxInstBufferSize = Param.Unsigned("Maximum instruction buffer size")
maxOutstandingMemOps = Param.Unsigned("Maximum number of outstanding memory operations")
decodeToFetchDelay = Param.Unsigned("Decode to fetch delay")
renameToFetchDelay = Param.Unsigned("Rename to fetch delay")
iewToFetchDelay = Param.Unsigned("Issue/Execute/Writeback to fetch "
"delay")
commitToFetchDelay = Param.Unsigned("Commit to fetch delay")
fetchWidth = Param.Unsigned("Fetch width")
renameToDecodeDelay = Param.Unsigned("Rename to decode delay")
iewToDecodeDelay = Param.Unsigned("Issue/Execute/Writeback to decode "
"delay")
commitToDecodeDelay = Param.Unsigned("Commit to decode delay")
fetchToDecodeDelay = Param.Unsigned("Fetch to decode delay")
decodeWidth = Param.Unsigned("Decode width")
iewToRenameDelay = Param.Unsigned("Issue/Execute/Writeback to rename "
"delay")
commitToRenameDelay = Param.Unsigned("Commit to rename delay")
decodeToRenameDelay = Param.Unsigned("Decode to rename delay")
renameWidth = Param.Unsigned("Rename width")
commitToIEWDelay = Param.Unsigned("Commit to "
"Issue/Execute/Writeback delay")
renameToIEWDelay = Param.Unsigned("Rename to "
"Issue/Execute/Writeback delay")
issueToExecuteDelay = Param.Unsigned("Issue to execute delay (internal "
"to the IEW stage)")
issueWidth = Param.Unsigned("Issue width")
executeWidth = Param.Unsigned("Execute width")
executeIntWidth = Param.Unsigned("Integer execute width")
executeFloatWidth = Param.Unsigned("Floating point execute width")
executeBranchWidth = Param.Unsigned("Branch execute width")
executeMemoryWidth = Param.Unsigned("Memory execute width")
iewToCommitDelay = Param.Unsigned("Issue/Execute/Writeback to commit "
"delay")
renameToROBDelay = Param.Unsigned("Rename to reorder buffer delay")
commitWidth = Param.Unsigned("Commit width")
squashWidth = Param.Unsigned("Squash width")
predType = Param.String("Type of branch predictor ('local', 'tournament')")
localPredictorSize = Param.Unsigned("Size of local predictor")
localCtrBits = Param.Unsigned("Bits per counter")
localHistoryTableSize = Param.Unsigned("Size of local history table")
localHistoryBits = Param.Unsigned("Bits for the local history")
globalPredictorSize = Param.Unsigned("Size of global predictor")
globalCtrBits = Param.Unsigned("Bits per counter")
globalHistoryBits = Param.Unsigned("Bits of history")
choicePredictorSize = Param.Unsigned("Size of choice predictor")
choiceCtrBits = Param.Unsigned("Bits of choice counters")
BTBEntries = Param.Unsigned("Number of BTB entries")
BTBTagSize = Param.Unsigned("Size of the BTB tags, in bits")
RASSize = Param.Unsigned("RAS size")
LQEntries = Param.Unsigned("Number of load queue entries")
SQEntries = Param.Unsigned("Number of store queue entries")
lsqLimits = Param.Bool(True, "LSQ size limits dispatch")
LFSTSize = Param.Unsigned("Last fetched store table size")
SSITSize = Param.Unsigned("Store set ID table size")
numPhysIntRegs = Param.Unsigned("Number of physical integer registers")
numPhysFloatRegs = Param.Unsigned("Number of physical floating point "
"registers")
numIQEntries = Param.Unsigned("Number of instruction queue entries")
numROBEntries = Param.Unsigned("Number of reorder buffer entries")
instShiftAmt = Param.Unsigned("Number of bits to shift instructions by")
# If the CheckerCPU is brought back to useability in the OzoneCPU, create a
# function here called addCheckerCpu() to create a non-NULL Checker and
# connect its TLBs (if needed)
|
google/skia
|
refs/heads/main
|
infra/bots/recipe_modules/checkout/__init__.py
|
3
|
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
DEPS = [
'depot_tools/bot_update',
'depot_tools/gclient',
'depot_tools/git',
'depot_tools/tryserver',
'recipe_engine/context',
'recipe_engine/file',
'recipe_engine/path',
'recipe_engine/properties',
'recipe_engine/python',
'recipe_engine/step',
'run',
'vars',
]
|
mammique/django
|
refs/heads/tp_alpha
|
django/contrib/gis/gdal/field.py
|
214
|
from ctypes import byref, c_int
from datetime import date, datetime, time
from django.contrib.gis.gdal.base import GDALBase
from django.contrib.gis.gdal.error import OGRException
from django.contrib.gis.gdal.prototypes import ds as capi
from django.utils.encoding import force_text
# For more information, see the OGR C API source code:
# http://www.gdal.org/ogr/ogr__api_8h.html
#
# The OGR_Fld_* routines are relevant here.
class Field(GDALBase):
"""
This class wraps an OGR Field, and needs to be instantiated
from a Feature object.
"""
#### Python 'magic' routines ####
def __init__(self, feat, index):
"""
Initializes on the feature object and the integer index of
the field within the feature.
"""
# Setting the feature pointer and index.
self._feat = feat
self._index = index
# Getting the pointer for this field.
fld_ptr = capi.get_feat_field_defn(feat.ptr, index)
if not fld_ptr:
raise OGRException('Cannot create OGR Field, invalid pointer given.')
self.ptr = fld_ptr
# Setting the class depending upon the OGR Field Type (OFT)
self.__class__ = OGRFieldTypes[self.type]
# OFTReal with no precision should be an OFTInteger.
if isinstance(self, OFTReal) and self.precision == 0:
self.__class__ = OFTInteger
self._double = True
def __str__(self):
"Returns the string representation of the Field."
return str(self.value).strip()
#### Field Methods ####
def as_double(self):
"Retrieves the Field's value as a double (float)."
return capi.get_field_as_double(self._feat.ptr, self._index)
def as_int(self):
"Retrieves the Field's value as an integer."
return capi.get_field_as_integer(self._feat.ptr, self._index)
def as_string(self):
"Retrieves the Field's value as a string."
string = capi.get_field_as_string(self._feat.ptr, self._index)
return force_text(string, encoding=self._feat.encoding, strings_only=True)
def as_datetime(self):
"Retrieves the Field's value as a tuple of date & time components."
yy, mm, dd, hh, mn, ss, tz = [c_int() for i in range(7)]
status = capi.get_field_as_datetime(
self._feat.ptr, self._index, byref(yy), byref(mm), byref(dd),
byref(hh), byref(mn), byref(ss), byref(tz))
if status:
return (yy, mm, dd, hh, mn, ss, tz)
else:
raise OGRException('Unable to retrieve date & time information from the field.')
#### Field Properties ####
@property
def name(self):
"Returns the name of this Field."
name = capi.get_field_name(self.ptr)
return force_text(name, encoding=self._feat.encoding, strings_only=True)
@property
def precision(self):
"Returns the precision of this Field."
return capi.get_field_precision(self.ptr)
@property
def type(self):
"Returns the OGR type of this Field."
return capi.get_field_type(self.ptr)
@property
def type_name(self):
"Return the OGR field type name for this Field."
return capi.get_field_type_name(self.type)
@property
def value(self):
"Returns the value of this Field."
# Default is to get the field as a string.
return self.as_string()
@property
def width(self):
"Returns the width of this Field."
return capi.get_field_width(self.ptr)
### The Field sub-classes for each OGR Field type. ###
class OFTInteger(Field):
_double = False
@property
def value(self):
"Returns an integer contained in this field."
if self._double:
# If this is really from an OFTReal field with no precision,
# read as a double and cast as Python int (to prevent overflow).
return int(self.as_double())
else:
return self.as_int()
@property
def type(self):
"""
GDAL uses OFTReals to represent OFTIntegers in created
shapefiles -- forcing the type here since the underlying field
type may actually be OFTReal.
"""
return 0
class OFTReal(Field):
@property
def value(self):
"Returns a float contained in this field."
return self.as_double()
# String & Binary fields, just subclasses
class OFTString(Field): pass
class OFTWideString(Field): pass
class OFTBinary(Field): pass
# OFTDate, OFTTime, OFTDateTime fields.
class OFTDate(Field):
@property
def value(self):
"Returns a Python `date` object for the OFTDate field."
try:
yy, mm, dd, hh, mn, ss, tz = self.as_datetime()
return date(yy.value, mm.value, dd.value)
except (ValueError, OGRException):
return None
class OFTDateTime(Field):
@property
def value(self):
"Returns a Python `datetime` object for this OFTDateTime field."
# TODO: Adapt timezone information.
# See http://lists.osgeo.org/pipermail/gdal-dev/2006-February/007990.html
# The `tz` variable has values of: 0=unknown, 1=localtime (ambiguous),
# 100=GMT, 104=GMT+1, 80=GMT-5, etc.
try:
yy, mm, dd, hh, mn, ss, tz = self.as_datetime()
return datetime(yy.value, mm.value, dd.value, hh.value, mn.value, ss.value)
except (ValueError, OGRException):
return None
class OFTTime(Field):
@property
def value(self):
"Returns a Python `time` object for this OFTTime field."
try:
yy, mm, dd, hh, mn, ss, tz = self.as_datetime()
return time(hh.value, mn.value, ss.value)
except (ValueError, OGRException):
return None
# List fields are also just subclasses
class OFTIntegerList(Field): pass
class OFTRealList(Field): pass
class OFTStringList(Field): pass
class OFTWideStringList(Field): pass
# Class mapping dictionary for OFT Types and reverse mapping.
OGRFieldTypes = { 0 : OFTInteger,
1 : OFTIntegerList,
2 : OFTReal,
3 : OFTRealList,
4 : OFTString,
5 : OFTStringList,
6 : OFTWideString,
7 : OFTWideStringList,
8 : OFTBinary,
9 : OFTDate,
10 : OFTTime,
11 : OFTDateTime,
}
ROGRFieldTypes = dict([(cls, num) for num, cls in OGRFieldTypes.items()])
|
google-code/android-scripting
|
refs/heads/master
|
python/src/Lib/lib2to3/fixes/fix_raw_input.py
|
53
|
"""Fixer that changes raw_input(...) into input(...)."""
# Author: Andre Roberge
# Local imports
from .. import fixer_base
from ..fixer_util import Name
class FixRawInput(fixer_base.BaseFix):
PATTERN = """
power< name='raw_input' trailer< '(' [any] ')' > any* >
"""
def transform(self, node, results):
name = results["name"]
name.replace(Name("input", prefix=name.get_prefix()))
|
bsmrstu-warriors/Moytri--The-Drone-Aider
|
refs/heads/master
|
Lib/argparse.py
|
111
|
# Author: Steven J. Bethard <steven.bethard@gmail.com>.
"""Command-line parsing library
This module is an optparse-inspired command-line parsing library that:
- handles both optional and positional arguments
- produces highly informative usage messages
- supports parsers that dispatch to sub-parsers
The following is a simple usage example that sums integers from the
command-line and writes the result to a file::
parser = argparse.ArgumentParser(
description='sum the integers at the command line')
parser.add_argument(
'integers', metavar='int', nargs='+', type=int,
help='an integer to be summed')
parser.add_argument(
'--log', default=sys.stdout, type=argparse.FileType('w'),
help='the file where the sum should be written')
args = parser.parse_args()
args.log.write('%s' % sum(args.integers))
args.log.close()
The module contains the following public classes:
- ArgumentParser -- The main entry point for command-line parsing. As the
example above shows, the add_argument() method is used to populate
the parser with actions for optional and positional arguments. Then
the parse_args() method is invoked to convert the args at the
command-line into an object with attributes.
- ArgumentError -- The exception raised by ArgumentParser objects when
there are errors with the parser's actions. Errors raised while
parsing the command-line are caught by ArgumentParser and emitted
as command-line messages.
- FileType -- A factory for defining types of files to be created. As the
example above shows, instances of FileType are typically passed as
the type= argument of add_argument() calls.
- Action -- The base class for parser actions. Typically actions are
selected by passing strings like 'store_true' or 'append_const' to
the action= argument of add_argument(). However, for greater
customization of ArgumentParser actions, subclasses of Action may
be defined and passed as the action= argument.
- HelpFormatter, RawDescriptionHelpFormatter, RawTextHelpFormatter,
ArgumentDefaultsHelpFormatter -- Formatter classes which
may be passed as the formatter_class= argument to the
ArgumentParser constructor. HelpFormatter is the default,
RawDescriptionHelpFormatter and RawTextHelpFormatter tell the parser
not to change the formatting for help text, and
ArgumentDefaultsHelpFormatter adds information about argument defaults
to the help.
All other classes in this module are considered implementation details.
(Also note that HelpFormatter and RawDescriptionHelpFormatter are only
considered public as object names -- the API of the formatter objects is
still considered an implementation detail.)
"""
__version__ = '1.1'
__all__ = [
'ArgumentParser',
'ArgumentError',
'ArgumentTypeError',
'FileType',
'HelpFormatter',
'ArgumentDefaultsHelpFormatter',
'RawDescriptionHelpFormatter',
'RawTextHelpFormatter',
'Namespace',
'Action',
'ONE_OR_MORE',
'OPTIONAL',
'PARSER',
'REMAINDER',
'SUPPRESS',
'ZERO_OR_MORE',
]
import collections as _collections
import copy as _copy
import os as _os
import re as _re
import sys as _sys
import textwrap as _textwrap
from gettext import gettext as _
def _callable(obj):
return hasattr(obj, '__call__') or hasattr(obj, '__bases__')
SUPPRESS = '==SUPPRESS=='
OPTIONAL = '?'
ZERO_OR_MORE = '*'
ONE_OR_MORE = '+'
PARSER = 'A...'
REMAINDER = '...'
_UNRECOGNIZED_ARGS_ATTR = '_unrecognized_args'
# =============================
# Utility functions and classes
# =============================
class _AttributeHolder(object):
"""Abstract base class that provides __repr__.
The __repr__ method returns a string in the format::
ClassName(attr=name, attr=name, ...)
The attributes are determined either by a class-level attribute,
'_kwarg_names', or by inspecting the instance __dict__.
"""
def __repr__(self):
type_name = type(self).__name__
arg_strings = []
for arg in self._get_args():
arg_strings.append(repr(arg))
for name, value in self._get_kwargs():
arg_strings.append('%s=%r' % (name, value))
return '%s(%s)' % (type_name, ', '.join(arg_strings))
def _get_kwargs(self):
return sorted(self.__dict__.items())
def _get_args(self):
return []
def _ensure_value(namespace, name, value):
if getattr(namespace, name, None) is None:
setattr(namespace, name, value)
return getattr(namespace, name)
# ===============
# Formatting Help
# ===============
class HelpFormatter(object):
"""Formatter for generating usage messages and argument help strings.
Only the name of this class is considered a public API. All the methods
provided by the class are considered an implementation detail.
"""
def __init__(self,
prog,
indent_increment=2,
max_help_position=24,
width=None):
# default setting for width
if width is None:
try:
width = int(_os.environ['COLUMNS'])
except (KeyError, ValueError):
width = 80
width -= 2
self._prog = prog
self._indent_increment = indent_increment
self._max_help_position = max_help_position
self._width = width
self._current_indent = 0
self._level = 0
self._action_max_length = 0
self._root_section = self._Section(self, None)
self._current_section = self._root_section
self._whitespace_matcher = _re.compile(r'\s+')
self._long_break_matcher = _re.compile(r'\n\n\n+')
# ===============================
# Section and indentation methods
# ===============================
def _indent(self):
self._current_indent += self._indent_increment
self._level += 1
def _dedent(self):
self._current_indent -= self._indent_increment
assert self._current_indent >= 0, 'Indent decreased below 0.'
self._level -= 1
class _Section(object):
def __init__(self, formatter, parent, heading=None):
self.formatter = formatter
self.parent = parent
self.heading = heading
self.items = []
def format_help(self):
# format the indented section
if self.parent is not None:
self.formatter._indent()
join = self.formatter._join_parts
for func, args in self.items:
func(*args)
item_help = join([func(*args) for func, args in self.items])
if self.parent is not None:
self.formatter._dedent()
# return nothing if the section was empty
if not item_help:
return ''
# add the heading if the section was non-empty
if self.heading is not SUPPRESS and self.heading is not None:
current_indent = self.formatter._current_indent
heading = '%*s%s:\n' % (current_indent, '', self.heading)
else:
heading = ''
# join the section-initial newline, the heading and the help
return join(['\n', heading, item_help, '\n'])
def _add_item(self, func, args):
self._current_section.items.append((func, args))
# ========================
# Message building methods
# ========================
def start_section(self, heading):
self._indent()
section = self._Section(self, self._current_section, heading)
self._add_item(section.format_help, [])
self._current_section = section
def end_section(self):
self._current_section = self._current_section.parent
self._dedent()
def add_text(self, text):
if text is not SUPPRESS and text is not None:
self._add_item(self._format_text, [text])
def add_usage(self, usage, actions, groups, prefix=None):
if usage is not SUPPRESS:
args = usage, actions, groups, prefix
self._add_item(self._format_usage, args)
def add_argument(self, action):
if action.help is not SUPPRESS:
# find all invocations
get_invocation = self._format_action_invocation
invocations = [get_invocation(action)]
for subaction in self._iter_indented_subactions(action):
invocations.append(get_invocation(subaction))
# update the maximum item length
invocation_length = max([len(s) for s in invocations])
action_length = invocation_length + self._current_indent
self._action_max_length = max(self._action_max_length,
action_length)
# add the item to the list
self._add_item(self._format_action, [action])
def add_arguments(self, actions):
for action in actions:
self.add_argument(action)
# =======================
# Help-formatting methods
# =======================
def format_help(self):
help = self._root_section.format_help()
if help:
help = self._long_break_matcher.sub('\n\n', help)
help = help.strip('\n') + '\n'
return help
def _join_parts(self, part_strings):
return ''.join([part
for part in part_strings
if part and part is not SUPPRESS])
def _format_usage(self, usage, actions, groups, prefix):
if prefix is None:
prefix = _('usage: ')
# if usage is specified, use that
if usage is not None:
usage = usage % dict(prog=self._prog)
# if no optionals or positionals are available, usage is just prog
elif usage is None and not actions:
usage = '%(prog)s' % dict(prog=self._prog)
# if optionals and positionals are available, calculate usage
elif usage is None:
prog = '%(prog)s' % dict(prog=self._prog)
# split optionals from positionals
optionals = []
positionals = []
for action in actions:
if action.option_strings:
optionals.append(action)
else:
positionals.append(action)
# build full usage string
format = self._format_actions_usage
action_usage = format(optionals + positionals, groups)
usage = ' '.join([s for s in [prog, action_usage] if s])
# wrap the usage parts if it's too long
text_width = self._width - self._current_indent
if len(prefix) + len(usage) > text_width:
# break usage into wrappable parts
part_regexp = r'\(.*?\)+|\[.*?\]+|\S+'
opt_usage = format(optionals, groups)
pos_usage = format(positionals, groups)
opt_parts = _re.findall(part_regexp, opt_usage)
pos_parts = _re.findall(part_regexp, pos_usage)
assert ' '.join(opt_parts) == opt_usage
assert ' '.join(pos_parts) == pos_usage
# helper for wrapping lines
def get_lines(parts, indent, prefix=None):
lines = []
line = []
if prefix is not None:
line_len = len(prefix) - 1
else:
line_len = len(indent) - 1
for part in parts:
if line_len + 1 + len(part) > text_width:
lines.append(indent + ' '.join(line))
line = []
line_len = len(indent) - 1
line.append(part)
line_len += len(part) + 1
if line:
lines.append(indent + ' '.join(line))
if prefix is not None:
lines[0] = lines[0][len(indent):]
return lines
# if prog is short, follow it with optionals or positionals
if len(prefix) + len(prog) <= 0.75 * text_width:
indent = ' ' * (len(prefix) + len(prog) + 1)
if opt_parts:
lines = get_lines([prog] + opt_parts, indent, prefix)
lines.extend(get_lines(pos_parts, indent))
elif pos_parts:
lines = get_lines([prog] + pos_parts, indent, prefix)
else:
lines = [prog]
# if prog is long, put it on its own line
else:
indent = ' ' * len(prefix)
parts = opt_parts + pos_parts
lines = get_lines(parts, indent)
if len(lines) > 1:
lines = []
lines.extend(get_lines(opt_parts, indent))
lines.extend(get_lines(pos_parts, indent))
lines = [prog] + lines
# join lines into usage
usage = '\n'.join(lines)
# prefix with 'usage:'
return '%s%s\n\n' % (prefix, usage)
def _format_actions_usage(self, actions, groups):
# find group indices and identify actions in groups
group_actions = set()
inserts = {}
for group in groups:
try:
start = actions.index(group._group_actions[0])
except ValueError:
continue
else:
end = start + len(group._group_actions)
if actions[start:end] == group._group_actions:
for action in group._group_actions:
group_actions.add(action)
if not group.required:
if start in inserts:
inserts[start] += ' ['
else:
inserts[start] = '['
inserts[end] = ']'
else:
if start in inserts:
inserts[start] += ' ('
else:
inserts[start] = '('
inserts[end] = ')'
for i in range(start + 1, end):
inserts[i] = '|'
# collect all actions format strings
parts = []
for i, action in enumerate(actions):
# suppressed arguments are marked with None
# remove | separators for suppressed arguments
if action.help is SUPPRESS:
parts.append(None)
if inserts.get(i) == '|':
inserts.pop(i)
elif inserts.get(i + 1) == '|':
inserts.pop(i + 1)
# produce all arg strings
elif not action.option_strings:
part = self._format_args(action, action.dest)
# if it's in a group, strip the outer []
if action in group_actions:
if part[0] == '[' and part[-1] == ']':
part = part[1:-1]
# add the action string to the list
parts.append(part)
# produce the first way to invoke the option in brackets
else:
option_string = action.option_strings[0]
# if the Optional doesn't take a value, format is:
# -s or --long
if action.nargs == 0:
part = '%s' % option_string
# if the Optional takes a value, format is:
# -s ARGS or --long ARGS
else:
default = action.dest.upper()
args_string = self._format_args(action, default)
part = '%s %s' % (option_string, args_string)
# make it look optional if it's not required or in a group
if not action.required and action not in group_actions:
part = '[%s]' % part
# add the action string to the list
parts.append(part)
# insert things at the necessary indices
for i in sorted(inserts, reverse=True):
parts[i:i] = [inserts[i]]
# join all the action items with spaces
text = ' '.join([item for item in parts if item is not None])
# clean up separators for mutually exclusive groups
open = r'[\[(]'
close = r'[\])]'
text = _re.sub(r'(%s) ' % open, r'\1', text)
text = _re.sub(r' (%s)' % close, r'\1', text)
text = _re.sub(r'%s *%s' % (open, close), r'', text)
text = _re.sub(r'\(([^|]*)\)', r'\1', text)
text = text.strip()
# return the text
return text
def _format_text(self, text):
if '%(prog)' in text:
text = text % dict(prog=self._prog)
text_width = self._width - self._current_indent
indent = ' ' * self._current_indent
return self._fill_text(text, text_width, indent) + '\n\n'
def _format_action(self, action):
# determine the required width and the entry label
help_position = min(self._action_max_length + 2,
self._max_help_position)
help_width = self._width - help_position
action_width = help_position - self._current_indent - 2
action_header = self._format_action_invocation(action)
# ho nelp; start on same line and add a final newline
if not action.help:
tup = self._current_indent, '', action_header
action_header = '%*s%s\n' % tup
# short action name; start on the same line and pad two spaces
elif len(action_header) <= action_width:
tup = self._current_indent, '', action_width, action_header
action_header = '%*s%-*s ' % tup
indent_first = 0
# long action name; start on the next line
else:
tup = self._current_indent, '', action_header
action_header = '%*s%s\n' % tup
indent_first = help_position
# collect the pieces of the action help
parts = [action_header]
# if there was help for the action, add lines of help text
if action.help:
help_text = self._expand_help(action)
help_lines = self._split_lines(help_text, help_width)
parts.append('%*s%s\n' % (indent_first, '', help_lines[0]))
for line in help_lines[1:]:
parts.append('%*s%s\n' % (help_position, '', line))
# or add a newline if the description doesn't end with one
elif not action_header.endswith('\n'):
parts.append('\n')
# if there are any sub-actions, add their help as well
for subaction in self._iter_indented_subactions(action):
parts.append(self._format_action(subaction))
# return a single string
return self._join_parts(parts)
def _format_action_invocation(self, action):
if not action.option_strings:
metavar, = self._metavar_formatter(action, action.dest)(1)
return metavar
else:
parts = []
# if the Optional doesn't take a value, format is:
# -s, --long
if action.nargs == 0:
parts.extend(action.option_strings)
# if the Optional takes a value, format is:
# -s ARGS, --long ARGS
else:
default = action.dest.upper()
args_string = self._format_args(action, default)
for option_string in action.option_strings:
parts.append('%s %s' % (option_string, args_string))
return ', '.join(parts)
def _metavar_formatter(self, action, default_metavar):
if action.metavar is not None:
result = action.metavar
elif action.choices is not None:
choice_strs = [str(choice) for choice in action.choices]
result = '{%s}' % ','.join(choice_strs)
else:
result = default_metavar
def format(tuple_size):
if isinstance(result, tuple):
return result
else:
return (result, ) * tuple_size
return format
def _format_args(self, action, default_metavar):
get_metavar = self._metavar_formatter(action, default_metavar)
if action.nargs is None:
result = '%s' % get_metavar(1)
elif action.nargs == OPTIONAL:
result = '[%s]' % get_metavar(1)
elif action.nargs == ZERO_OR_MORE:
result = '[%s [%s ...]]' % get_metavar(2)
elif action.nargs == ONE_OR_MORE:
result = '%s [%s ...]' % get_metavar(2)
elif action.nargs == REMAINDER:
result = '...'
elif action.nargs == PARSER:
result = '%s ...' % get_metavar(1)
else:
formats = ['%s' for _ in range(action.nargs)]
result = ' '.join(formats) % get_metavar(action.nargs)
return result
def _expand_help(self, action):
params = dict(vars(action), prog=self._prog)
for name in list(params):
if params[name] is SUPPRESS:
del params[name]
for name in list(params):
if hasattr(params[name], '__name__'):
params[name] = params[name].__name__
if params.get('choices') is not None:
choices_str = ', '.join([str(c) for c in params['choices']])
params['choices'] = choices_str
return self._get_help_string(action) % params
def _iter_indented_subactions(self, action):
try:
get_subactions = action._get_subactions
except AttributeError:
pass
else:
self._indent()
for subaction in get_subactions():
yield subaction
self._dedent()
def _split_lines(self, text, width):
text = self._whitespace_matcher.sub(' ', text).strip()
return _textwrap.wrap(text, width)
def _fill_text(self, text, width, indent):
text = self._whitespace_matcher.sub(' ', text).strip()
return _textwrap.fill(text, width, initial_indent=indent,
subsequent_indent=indent)
def _get_help_string(self, action):
return action.help
class RawDescriptionHelpFormatter(HelpFormatter):
"""Help message formatter which retains any formatting in descriptions.
Only the name of this class is considered a public API. All the methods
provided by the class are considered an implementation detail.
"""
def _fill_text(self, text, width, indent):
return ''.join([indent + line for line in text.splitlines(True)])
class RawTextHelpFormatter(RawDescriptionHelpFormatter):
"""Help message formatter which retains formatting of all help text.
Only the name of this class is considered a public API. All the methods
provided by the class are considered an implementation detail.
"""
def _split_lines(self, text, width):
return text.splitlines()
class ArgumentDefaultsHelpFormatter(HelpFormatter):
"""Help message formatter which adds default values to argument help.
Only the name of this class is considered a public API. All the methods
provided by the class are considered an implementation detail.
"""
def _get_help_string(self, action):
help = action.help
if '%(default)' not in action.help:
if action.default is not SUPPRESS:
defaulting_nargs = [OPTIONAL, ZERO_OR_MORE]
if action.option_strings or action.nargs in defaulting_nargs:
help += ' (default: %(default)s)'
return help
# =====================
# Options and Arguments
# =====================
def _get_action_name(argument):
if argument is None:
return None
elif argument.option_strings:
return '/'.join(argument.option_strings)
elif argument.metavar not in (None, SUPPRESS):
return argument.metavar
elif argument.dest not in (None, SUPPRESS):
return argument.dest
else:
return None
class ArgumentError(Exception):
"""An error from creating or using an argument (optional or positional).
The string value of this exception is the message, augmented with
information about the argument that caused it.
"""
def __init__(self, argument, message):
self.argument_name = _get_action_name(argument)
self.message = message
def __str__(self):
if self.argument_name is None:
format = '%(message)s'
else:
format = 'argument %(argument_name)s: %(message)s'
return format % dict(message=self.message,
argument_name=self.argument_name)
class ArgumentTypeError(Exception):
"""An error from trying to convert a command line string to a type."""
pass
# ==============
# Action classes
# ==============
class Action(_AttributeHolder):
"""Information about how to convert command line strings to Python objects.
Action objects are used by an ArgumentParser to represent the information
needed to parse a single argument from one or more strings from the
command line. The keyword arguments to the Action constructor are also
all attributes of Action instances.
Keyword Arguments:
- option_strings -- A list of command-line option strings which
should be associated with this action.
- dest -- The name of the attribute to hold the created object(s)
- nargs -- The number of command-line arguments that should be
consumed. By default, one argument will be consumed and a single
value will be produced. Other values include:
- N (an integer) consumes N arguments (and produces a list)
- '?' consumes zero or one arguments
- '*' consumes zero or more arguments (and produces a list)
- '+' consumes one or more arguments (and produces a list)
Note that the difference between the default and nargs=1 is that
with the default, a single value will be produced, while with
nargs=1, a list containing a single value will be produced.
- const -- The value to be produced if the option is specified and the
option uses an action that takes no values.
- default -- The value to be produced if the option is not specified.
- type -- The type which the command-line arguments should be converted
to, should be one of 'string', 'int', 'float', 'complex' or a
callable object that accepts a single string argument. If None,
'string' is assumed.
- choices -- A container of values that should be allowed. If not None,
after a command-line argument has been converted to the appropriate
type, an exception will be raised if it is not a member of this
collection.
- required -- True if the action must always be specified at the
command line. This is only meaningful for optional command-line
arguments.
- help -- The help string describing the argument.
- metavar -- The name to be used for the option's argument with the
help string. If None, the 'dest' value will be used as the name.
"""
def __init__(self,
option_strings,
dest,
nargs=None,
const=None,
default=None,
type=None,
choices=None,
required=False,
help=None,
metavar=None):
self.option_strings = option_strings
self.dest = dest
self.nargs = nargs
self.const = const
self.default = default
self.type = type
self.choices = choices
self.required = required
self.help = help
self.metavar = metavar
def _get_kwargs(self):
names = [
'option_strings',
'dest',
'nargs',
'const',
'default',
'type',
'choices',
'help',
'metavar',
]
return [(name, getattr(self, name)) for name in names]
def __call__(self, parser, namespace, values, option_string=None):
raise NotImplementedError(_('.__call__() not defined'))
class _StoreAction(Action):
def __init__(self,
option_strings,
dest,
nargs=None,
const=None,
default=None,
type=None,
choices=None,
required=False,
help=None,
metavar=None):
if nargs == 0:
raise ValueError('nargs for store actions must be > 0; if you '
'have nothing to store, actions such as store '
'true or store const may be more appropriate')
if const is not None and nargs != OPTIONAL:
raise ValueError('nargs must be %r to supply const' % OPTIONAL)
super(_StoreAction, self).__init__(
option_strings=option_strings,
dest=dest,
nargs=nargs,
const=const,
default=default,
type=type,
choices=choices,
required=required,
help=help,
metavar=metavar)
def __call__(self, parser, namespace, values, option_string=None):
setattr(namespace, self.dest, values)
class _StoreConstAction(Action):
def __init__(self,
option_strings,
dest,
const,
default=None,
required=False,
help=None,
metavar=None):
super(_StoreConstAction, self).__init__(
option_strings=option_strings,
dest=dest,
nargs=0,
const=const,
default=default,
required=required,
help=help)
def __call__(self, parser, namespace, values, option_string=None):
setattr(namespace, self.dest, self.const)
class _StoreTrueAction(_StoreConstAction):
def __init__(self,
option_strings,
dest,
default=False,
required=False,
help=None):
super(_StoreTrueAction, self).__init__(
option_strings=option_strings,
dest=dest,
const=True,
default=default,
required=required,
help=help)
class _StoreFalseAction(_StoreConstAction):
def __init__(self,
option_strings,
dest,
default=True,
required=False,
help=None):
super(_StoreFalseAction, self).__init__(
option_strings=option_strings,
dest=dest,
const=False,
default=default,
required=required,
help=help)
class _AppendAction(Action):
def __init__(self,
option_strings,
dest,
nargs=None,
const=None,
default=None,
type=None,
choices=None,
required=False,
help=None,
metavar=None):
if nargs == 0:
raise ValueError('nargs for append actions must be > 0; if arg '
'strings are not supplying the value to append, '
'the append const action may be more appropriate')
if const is not None and nargs != OPTIONAL:
raise ValueError('nargs must be %r to supply const' % OPTIONAL)
super(_AppendAction, self).__init__(
option_strings=option_strings,
dest=dest,
nargs=nargs,
const=const,
default=default,
type=type,
choices=choices,
required=required,
help=help,
metavar=metavar)
def __call__(self, parser, namespace, values, option_string=None):
items = _copy.copy(_ensure_value(namespace, self.dest, []))
items.append(values)
setattr(namespace, self.dest, items)
class _AppendConstAction(Action):
def __init__(self,
option_strings,
dest,
const,
default=None,
required=False,
help=None,
metavar=None):
super(_AppendConstAction, self).__init__(
option_strings=option_strings,
dest=dest,
nargs=0,
const=const,
default=default,
required=required,
help=help,
metavar=metavar)
def __call__(self, parser, namespace, values, option_string=None):
items = _copy.copy(_ensure_value(namespace, self.dest, []))
items.append(self.const)
setattr(namespace, self.dest, items)
class _CountAction(Action):
def __init__(self,
option_strings,
dest,
default=None,
required=False,
help=None):
super(_CountAction, self).__init__(
option_strings=option_strings,
dest=dest,
nargs=0,
default=default,
required=required,
help=help)
def __call__(self, parser, namespace, values, option_string=None):
new_count = _ensure_value(namespace, self.dest, 0) + 1
setattr(namespace, self.dest, new_count)
class _HelpAction(Action):
def __init__(self,
option_strings,
dest=SUPPRESS,
default=SUPPRESS,
help=None):
super(_HelpAction, self).__init__(
option_strings=option_strings,
dest=dest,
default=default,
nargs=0,
help=help)
def __call__(self, parser, namespace, values, option_string=None):
parser.print_help()
parser.exit()
class _VersionAction(Action):
def __init__(self,
option_strings,
version=None,
dest=SUPPRESS,
default=SUPPRESS,
help="show program's version number and exit"):
super(_VersionAction, self).__init__(
option_strings=option_strings,
dest=dest,
default=default,
nargs=0,
help=help)
self.version = version
def __call__(self, parser, namespace, values, option_string=None):
version = self.version
if version is None:
version = parser.version
formatter = parser._get_formatter()
formatter.add_text(version)
parser.exit(message=formatter.format_help())
class _SubParsersAction(Action):
class _ChoicesPseudoAction(Action):
def __init__(self, name, help):
sup = super(_SubParsersAction._ChoicesPseudoAction, self)
sup.__init__(option_strings=[], dest=name, help=help)
def __init__(self,
option_strings,
prog,
parser_class,
dest=SUPPRESS,
help=None,
metavar=None):
self._prog_prefix = prog
self._parser_class = parser_class
self._name_parser_map = _collections.OrderedDict()
self._choices_actions = []
super(_SubParsersAction, self).__init__(
option_strings=option_strings,
dest=dest,
nargs=PARSER,
choices=self._name_parser_map,
help=help,
metavar=metavar)
def add_parser(self, name, **kwargs):
# set prog from the existing prefix
if kwargs.get('prog') is None:
kwargs['prog'] = '%s %s' % (self._prog_prefix, name)
# create a pseudo-action to hold the choice help
if 'help' in kwargs:
help = kwargs.pop('help')
choice_action = self._ChoicesPseudoAction(name, help)
self._choices_actions.append(choice_action)
# create the parser and add it to the map
parser = self._parser_class(**kwargs)
self._name_parser_map[name] = parser
return parser
def _get_subactions(self):
return self._choices_actions
def __call__(self, parser, namespace, values, option_string=None):
parser_name = values[0]
arg_strings = values[1:]
# set the parser name if requested
if self.dest is not SUPPRESS:
setattr(namespace, self.dest, parser_name)
# select the parser
try:
parser = self._name_parser_map[parser_name]
except KeyError:
tup = parser_name, ', '.join(self._name_parser_map)
msg = _('unknown parser %r (choices: %s)') % tup
raise ArgumentError(self, msg)
# parse all the remaining options into the namespace
# store any unrecognized options on the object, so that the top
# level parser can decide what to do with them
namespace, arg_strings = parser.parse_known_args(arg_strings, namespace)
if arg_strings:
vars(namespace).setdefault(_UNRECOGNIZED_ARGS_ATTR, [])
getattr(namespace, _UNRECOGNIZED_ARGS_ATTR).extend(arg_strings)
# ==============
# Type classes
# ==============
class FileType(object):
"""Factory for creating file object types
Instances of FileType are typically passed as type= arguments to the
ArgumentParser add_argument() method.
Keyword Arguments:
- mode -- A string indicating how the file is to be opened. Accepts the
same values as the builtin open() function.
- bufsize -- The file's desired buffer size. Accepts the same values as
the builtin open() function.
"""
def __init__(self, mode='r', bufsize=-1):
self._mode = mode
self._bufsize = bufsize
def __call__(self, string):
# the special argument "-" means sys.std{in,out}
if string == '-':
if 'r' in self._mode:
return _sys.stdin
elif 'w' in self._mode:
return _sys.stdout
else:
msg = _('argument "-" with mode %r') % self._mode
raise ValueError(msg)
# all other arguments are used as file names
try:
return open(string, self._mode, self._bufsize)
except IOError as e:
message = _("can't open '%s': %s")
raise ArgumentTypeError(message % (string, e))
def __repr__(self):
args = self._mode, self._bufsize
args_str = ', '.join(repr(arg) for arg in args if arg != -1)
return '%s(%s)' % (type(self).__name__, args_str)
# ===========================
# Optional and Positional Parsing
# ===========================
class Namespace(_AttributeHolder):
"""Simple object for storing attributes.
Implements equality by attribute names and values, and provides a simple
string representation.
"""
def __init__(self, **kwargs):
for name in kwargs:
setattr(self, name, kwargs[name])
__hash__ = None
def __eq__(self, other):
return vars(self) == vars(other)
def __ne__(self, other):
return not (self == other)
def __contains__(self, key):
return key in self.__dict__
class _ActionsContainer(object):
def __init__(self,
description,
prefix_chars,
argument_default,
conflict_handler):
super(_ActionsContainer, self).__init__()
self.description = description
self.argument_default = argument_default
self.prefix_chars = prefix_chars
self.conflict_handler = conflict_handler
# set up registries
self._registries = {}
# register actions
self.register('action', None, _StoreAction)
self.register('action', 'store', _StoreAction)
self.register('action', 'store_const', _StoreConstAction)
self.register('action', 'store_true', _StoreTrueAction)
self.register('action', 'store_false', _StoreFalseAction)
self.register('action', 'append', _AppendAction)
self.register('action', 'append_const', _AppendConstAction)
self.register('action', 'count', _CountAction)
self.register('action', 'help', _HelpAction)
self.register('action', 'version', _VersionAction)
self.register('action', 'parsers', _SubParsersAction)
# raise an exception if the conflict handler is invalid
self._get_handler()
# action storage
self._actions = []
self._option_string_actions = {}
# groups
self._action_groups = []
self._mutually_exclusive_groups = []
# defaults storage
self._defaults = {}
# determines whether an "option" looks like a negative number
self._negative_number_matcher = _re.compile(r'^-\d+$|^-\d*\.\d+$')
# whether or not there are any optionals that look like negative
# numbers -- uses a list so it can be shared and edited
self._has_negative_number_optionals = []
# ====================
# Registration methods
# ====================
def register(self, registry_name, value, object):
registry = self._registries.setdefault(registry_name, {})
registry[value] = object
def _registry_get(self, registry_name, value, default=None):
return self._registries[registry_name].get(value, default)
# ==================================
# Namespace default accessor methods
# ==================================
def set_defaults(self, **kwargs):
self._defaults.update(kwargs)
# if these defaults match any existing arguments, replace
# the previous default on the object with the new one
for action in self._actions:
if action.dest in kwargs:
action.default = kwargs[action.dest]
def get_default(self, dest):
for action in self._actions:
if action.dest == dest and action.default is not None:
return action.default
return self._defaults.get(dest, None)
# =======================
# Adding argument actions
# =======================
def add_argument(self, *args, **kwargs):
"""
add_argument(dest, ..., name=value, ...)
add_argument(option_string, option_string, ..., name=value, ...)
"""
# if no positional args are supplied or only one is supplied and
# it doesn't look like an option string, parse a positional
# argument
chars = self.prefix_chars
if not args or len(args) == 1 and args[0][0] not in chars:
if args and 'dest' in kwargs:
raise ValueError('dest supplied twice for positional argument')
kwargs = self._get_positional_kwargs(*args, **kwargs)
# otherwise, we're adding an optional argument
else:
kwargs = self._get_optional_kwargs(*args, **kwargs)
# if no default was supplied, use the parser-level default
if 'default' not in kwargs:
dest = kwargs['dest']
if dest in self._defaults:
kwargs['default'] = self._defaults[dest]
elif self.argument_default is not None:
kwargs['default'] = self.argument_default
# create the action object, and add it to the parser
action_class = self._pop_action_class(kwargs)
if not _callable(action_class):
raise ValueError('unknown action "%s"' % (action_class,))
action = action_class(**kwargs)
# raise an error if the action type is not callable
type_func = self._registry_get('type', action.type, action.type)
if not _callable(type_func):
raise ValueError('%r is not callable' % (type_func,))
# raise an error if the metavar does not match the type
if hasattr(self, "_get_formatter"):
try:
self._get_formatter()._format_args(action, None)
except TypeError:
raise ValueError("length of metavar tuple does not match nargs")
return self._add_action(action)
def add_argument_group(self, *args, **kwargs):
group = _ArgumentGroup(self, *args, **kwargs)
self._action_groups.append(group)
return group
def add_mutually_exclusive_group(self, **kwargs):
group = _MutuallyExclusiveGroup(self, **kwargs)
self._mutually_exclusive_groups.append(group)
return group
def _add_action(self, action):
# resolve any conflicts
self._check_conflict(action)
# add to actions list
self._actions.append(action)
action.container = self
# index the action by any option strings it has
for option_string in action.option_strings:
self._option_string_actions[option_string] = action
# set the flag if any option strings look like negative numbers
for option_string in action.option_strings:
if self._negative_number_matcher.match(option_string):
if not self._has_negative_number_optionals:
self._has_negative_number_optionals.append(True)
# return the created action
return action
def _remove_action(self, action):
self._actions.remove(action)
def _add_container_actions(self, container):
# collect groups by titles
title_group_map = {}
for group in self._action_groups:
if group.title in title_group_map:
msg = _('cannot merge actions - two groups are named %r')
raise ValueError(msg % (group.title))
title_group_map[group.title] = group
# map each action to its group
group_map = {}
for group in container._action_groups:
# if a group with the title exists, use that, otherwise
# create a new group matching the container's group
if group.title not in title_group_map:
title_group_map[group.title] = self.add_argument_group(
title=group.title,
description=group.description,
conflict_handler=group.conflict_handler)
# map the actions to their new group
for action in group._group_actions:
group_map[action] = title_group_map[group.title]
# add container's mutually exclusive groups
# NOTE: if add_mutually_exclusive_group ever gains title= and
# description= then this code will need to be expanded as above
for group in container._mutually_exclusive_groups:
mutex_group = self.add_mutually_exclusive_group(
required=group.required)
# map the actions to their new mutex group
for action in group._group_actions:
group_map[action] = mutex_group
# add all actions to this container or their group
for action in container._actions:
group_map.get(action, self)._add_action(action)
def _get_positional_kwargs(self, dest, **kwargs):
# make sure required is not specified
if 'required' in kwargs:
msg = _("'required' is an invalid argument for positionals")
raise TypeError(msg)
# mark positional arguments as required if at least one is
# always required
if kwargs.get('nargs') not in [OPTIONAL, ZERO_OR_MORE]:
kwargs['required'] = True
if kwargs.get('nargs') == ZERO_OR_MORE and 'default' not in kwargs:
kwargs['required'] = True
# return the keyword arguments with no option strings
return dict(kwargs, dest=dest, option_strings=[])
def _get_optional_kwargs(self, *args, **kwargs):
# determine short and long option strings
option_strings = []
long_option_strings = []
for option_string in args:
# error on strings that don't start with an appropriate prefix
if not option_string[0] in self.prefix_chars:
msg = _('invalid option string %r: '
'must start with a character %r')
tup = option_string, self.prefix_chars
raise ValueError(msg % tup)
# strings starting with two prefix characters are long options
option_strings.append(option_string)
if option_string[0] in self.prefix_chars:
if len(option_string) > 1:
if option_string[1] in self.prefix_chars:
long_option_strings.append(option_string)
# infer destination, '--foo-bar' -> 'foo_bar' and '-x' -> 'x'
dest = kwargs.pop('dest', None)
if dest is None:
if long_option_strings:
dest_option_string = long_option_strings[0]
else:
dest_option_string = option_strings[0]
dest = dest_option_string.lstrip(self.prefix_chars)
if not dest:
msg = _('dest= is required for options like %r')
raise ValueError(msg % option_string)
dest = dest.replace('-', '_')
# return the updated keyword arguments
return dict(kwargs, dest=dest, option_strings=option_strings)
def _pop_action_class(self, kwargs, default=None):
action = kwargs.pop('action', default)
return self._registry_get('action', action, action)
def _get_handler(self):
# determine function from conflict handler string
handler_func_name = '_handle_conflict_%s' % self.conflict_handler
try:
return getattr(self, handler_func_name)
except AttributeError:
msg = _('invalid conflict_resolution value: %r')
raise ValueError(msg % self.conflict_handler)
def _check_conflict(self, action):
# find all options that conflict with this option
confl_optionals = []
for option_string in action.option_strings:
if option_string in self._option_string_actions:
confl_optional = self._option_string_actions[option_string]
confl_optionals.append((option_string, confl_optional))
# resolve any conflicts
if confl_optionals:
conflict_handler = self._get_handler()
conflict_handler(action, confl_optionals)
def _handle_conflict_error(self, action, conflicting_actions):
message = _('conflicting option string(s): %s')
conflict_string = ', '.join([option_string
for option_string, action
in conflicting_actions])
raise ArgumentError(action, message % conflict_string)
def _handle_conflict_resolve(self, action, conflicting_actions):
# remove all conflicting options
for option_string, action in conflicting_actions:
# remove the conflicting option
action.option_strings.remove(option_string)
self._option_string_actions.pop(option_string, None)
# if the option now has no option string, remove it from the
# container holding it
if not action.option_strings:
action.container._remove_action(action)
class _ArgumentGroup(_ActionsContainer):
def __init__(self, container, title=None, description=None, **kwargs):
# add any missing keyword arguments by checking the container
update = kwargs.setdefault
update('conflict_handler', container.conflict_handler)
update('prefix_chars', container.prefix_chars)
update('argument_default', container.argument_default)
super_init = super(_ArgumentGroup, self).__init__
super_init(description=description, **kwargs)
# group attributes
self.title = title
self._group_actions = []
# share most attributes with the container
self._registries = container._registries
self._actions = container._actions
self._option_string_actions = container._option_string_actions
self._defaults = container._defaults
self._has_negative_number_optionals = \
container._has_negative_number_optionals
self._mutually_exclusive_groups = container._mutually_exclusive_groups
def _add_action(self, action):
action = super(_ArgumentGroup, self)._add_action(action)
self._group_actions.append(action)
return action
def _remove_action(self, action):
super(_ArgumentGroup, self)._remove_action(action)
self._group_actions.remove(action)
class _MutuallyExclusiveGroup(_ArgumentGroup):
def __init__(self, container, required=False):
super(_MutuallyExclusiveGroup, self).__init__(container)
self.required = required
self._container = container
def _add_action(self, action):
if action.required:
msg = _('mutually exclusive arguments must be optional')
raise ValueError(msg)
action = self._container._add_action(action)
self._group_actions.append(action)
return action
def _remove_action(self, action):
self._container._remove_action(action)
self._group_actions.remove(action)
class ArgumentParser(_AttributeHolder, _ActionsContainer):
"""Object for parsing command line strings into Python objects.
Keyword Arguments:
- prog -- The name of the program (default: sys.argv[0])
- usage -- A usage message (default: auto-generated from arguments)
- description -- A description of what the program does
- epilog -- Text following the argument descriptions
- parents -- Parsers whose arguments should be copied into this one
- formatter_class -- HelpFormatter class for printing help messages
- prefix_chars -- Characters that prefix optional arguments
- fromfile_prefix_chars -- Characters that prefix files containing
additional arguments
- argument_default -- The default value for all arguments
- conflict_handler -- String indicating how to handle conflicts
- add_help -- Add a -h/-help option
"""
def __init__(self,
prog=None,
usage=None,
description=None,
epilog=None,
version=None,
parents=[],
formatter_class=HelpFormatter,
prefix_chars='-',
fromfile_prefix_chars=None,
argument_default=None,
conflict_handler='error',
add_help=True):
if version is not None:
import warnings
warnings.warn(
"""The "version" argument to ArgumentParser is deprecated. """
"""Please use """
""""add_argument(..., action='version', version="N", ...)" """
"""instead""", DeprecationWarning)
superinit = super(ArgumentParser, self).__init__
superinit(description=description,
prefix_chars=prefix_chars,
argument_default=argument_default,
conflict_handler=conflict_handler)
# default setting for prog
if prog is None:
prog = _os.path.basename(_sys.argv[0])
self.prog = prog
self.usage = usage
self.epilog = epilog
self.version = version
self.formatter_class = formatter_class
self.fromfile_prefix_chars = fromfile_prefix_chars
self.add_help = add_help
add_group = self.add_argument_group
self._positionals = add_group(_('positional arguments'))
self._optionals = add_group(_('optional arguments'))
self._subparsers = None
# register types
def identity(string):
return string
self.register('type', None, identity)
# add help and version arguments if necessary
# (using explicit default to override global argument_default)
default_prefix = '-' if '-' in prefix_chars else prefix_chars[0]
if self.add_help:
self.add_argument(
default_prefix+'h', default_prefix*2+'help',
action='help', default=SUPPRESS,
help=_('show this help message and exit'))
if self.version:
self.add_argument(
default_prefix+'v', default_prefix*2+'version',
action='version', default=SUPPRESS,
version=self.version,
help=_("show program's version number and exit"))
# add parent arguments and defaults
for parent in parents:
self._add_container_actions(parent)
try:
defaults = parent._defaults
except AttributeError:
pass
else:
self._defaults.update(defaults)
# =======================
# Pretty __repr__ methods
# =======================
def _get_kwargs(self):
names = [
'prog',
'usage',
'description',
'version',
'formatter_class',
'conflict_handler',
'add_help',
]
return [(name, getattr(self, name)) for name in names]
# ==================================
# Optional/Positional adding methods
# ==================================
def add_subparsers(self, **kwargs):
if self._subparsers is not None:
self.error(_('cannot have multiple subparser arguments'))
# add the parser class to the arguments if it's not present
kwargs.setdefault('parser_class', type(self))
if 'title' in kwargs or 'description' in kwargs:
title = _(kwargs.pop('title', 'subcommands'))
description = _(kwargs.pop('description', None))
self._subparsers = self.add_argument_group(title, description)
else:
self._subparsers = self._positionals
# prog defaults to the usage message of this parser, skipping
# optional arguments and with no "usage:" prefix
if kwargs.get('prog') is None:
formatter = self._get_formatter()
positionals = self._get_positional_actions()
groups = self._mutually_exclusive_groups
formatter.add_usage(self.usage, positionals, groups, '')
kwargs['prog'] = formatter.format_help().strip()
# create the parsers action and add it to the positionals list
parsers_class = self._pop_action_class(kwargs, 'parsers')
action = parsers_class(option_strings=[], **kwargs)
self._subparsers._add_action(action)
# return the created parsers action
return action
def _add_action(self, action):
if action.option_strings:
self._optionals._add_action(action)
else:
self._positionals._add_action(action)
return action
def _get_optional_actions(self):
return [action
for action in self._actions
if action.option_strings]
def _get_positional_actions(self):
return [action
for action in self._actions
if not action.option_strings]
# =====================================
# Command line argument parsing methods
# =====================================
def parse_args(self, args=None, namespace=None):
args, argv = self.parse_known_args(args, namespace)
if argv:
msg = _('unrecognized arguments: %s')
self.error(msg % ' '.join(argv))
return args
def parse_known_args(self, args=None, namespace=None):
# args default to the system args
if args is None:
args = _sys.argv[1:]
# default Namespace built from parser defaults
if namespace is None:
namespace = Namespace()
# add any action defaults that aren't present
for action in self._actions:
if action.dest is not SUPPRESS:
if not hasattr(namespace, action.dest):
if action.default is not SUPPRESS:
default = action.default
if isinstance(action.default, basestring):
default = self._get_value(action, default)
setattr(namespace, action.dest, default)
# add any parser defaults that aren't present
for dest in self._defaults:
if not hasattr(namespace, dest):
setattr(namespace, dest, self._defaults[dest])
# parse the arguments and exit if there are any errors
try:
namespace, args = self._parse_known_args(args, namespace)
if hasattr(namespace, _UNRECOGNIZED_ARGS_ATTR):
args.extend(getattr(namespace, _UNRECOGNIZED_ARGS_ATTR))
delattr(namespace, _UNRECOGNIZED_ARGS_ATTR)
return namespace, args
except ArgumentError:
err = _sys.exc_info()[1]
self.error(str(err))
def _parse_known_args(self, arg_strings, namespace):
# replace arg strings that are file references
if self.fromfile_prefix_chars is not None:
arg_strings = self._read_args_from_files(arg_strings)
# map all mutually exclusive arguments to the other arguments
# they can't occur with
action_conflicts = {}
for mutex_group in self._mutually_exclusive_groups:
group_actions = mutex_group._group_actions
for i, mutex_action in enumerate(mutex_group._group_actions):
conflicts = action_conflicts.setdefault(mutex_action, [])
conflicts.extend(group_actions[:i])
conflicts.extend(group_actions[i + 1:])
# find all option indices, and determine the arg_string_pattern
# which has an 'O' if there is an option at an index,
# an 'A' if there is an argument, or a '-' if there is a '--'
option_string_indices = {}
arg_string_pattern_parts = []
arg_strings_iter = iter(arg_strings)
for i, arg_string in enumerate(arg_strings_iter):
# all args after -- are non-options
if arg_string == '--':
arg_string_pattern_parts.append('-')
for arg_string in arg_strings_iter:
arg_string_pattern_parts.append('A')
# otherwise, add the arg to the arg strings
# and note the index if it was an option
else:
option_tuple = self._parse_optional(arg_string)
if option_tuple is None:
pattern = 'A'
else:
option_string_indices[i] = option_tuple
pattern = 'O'
arg_string_pattern_parts.append(pattern)
# join the pieces together to form the pattern
arg_strings_pattern = ''.join(arg_string_pattern_parts)
# converts arg strings to the appropriate and then takes the action
seen_actions = set()
seen_non_default_actions = set()
def take_action(action, argument_strings, option_string=None):
seen_actions.add(action)
argument_values = self._get_values(action, argument_strings)
# error if this argument is not allowed with other previously
# seen arguments, assuming that actions that use the default
# value don't really count as "present"
if argument_values is not action.default:
seen_non_default_actions.add(action)
for conflict_action in action_conflicts.get(action, []):
if conflict_action in seen_non_default_actions:
msg = _('not allowed with argument %s')
action_name = _get_action_name(conflict_action)
raise ArgumentError(action, msg % action_name)
# take the action if we didn't receive a SUPPRESS value
# (e.g. from a default)
if argument_values is not SUPPRESS:
action(self, namespace, argument_values, option_string)
# function to convert arg_strings into an optional action
def consume_optional(start_index):
# get the optional identified at this index
option_tuple = option_string_indices[start_index]
action, option_string, explicit_arg = option_tuple
# identify additional optionals in the same arg string
# (e.g. -xyz is the same as -x -y -z if no args are required)
match_argument = self._match_argument
action_tuples = []
while True:
# if we found no optional action, skip it
if action is None:
extras.append(arg_strings[start_index])
return start_index + 1
# if there is an explicit argument, try to match the
# optional's string arguments to only this
if explicit_arg is not None:
arg_count = match_argument(action, 'A')
# if the action is a single-dash option and takes no
# arguments, try to parse more single-dash options out
# of the tail of the option string
chars = self.prefix_chars
if arg_count == 0 and option_string[1] not in chars:
action_tuples.append((action, [], option_string))
char = option_string[0]
option_string = char + explicit_arg[0]
new_explicit_arg = explicit_arg[1:] or None
optionals_map = self._option_string_actions
if option_string in optionals_map:
action = optionals_map[option_string]
explicit_arg = new_explicit_arg
else:
msg = _('ignored explicit argument %r')
raise ArgumentError(action, msg % explicit_arg)
# if the action expect exactly one argument, we've
# successfully matched the option; exit the loop
elif arg_count == 1:
stop = start_index + 1
args = [explicit_arg]
action_tuples.append((action, args, option_string))
break
# error if a double-dash option did not use the
# explicit argument
else:
msg = _('ignored explicit argument %r')
raise ArgumentError(action, msg % explicit_arg)
# if there is no explicit argument, try to match the
# optional's string arguments with the following strings
# if successful, exit the loop
else:
start = start_index + 1
selected_patterns = arg_strings_pattern[start:]
arg_count = match_argument(action, selected_patterns)
stop = start + arg_count
args = arg_strings[start:stop]
action_tuples.append((action, args, option_string))
break
# add the Optional to the list and return the index at which
# the Optional's string args stopped
assert action_tuples
for action, args, option_string in action_tuples:
take_action(action, args, option_string)
return stop
# the list of Positionals left to be parsed; this is modified
# by consume_positionals()
positionals = self._get_positional_actions()
# function to convert arg_strings into positional actions
def consume_positionals(start_index):
# match as many Positionals as possible
match_partial = self._match_arguments_partial
selected_pattern = arg_strings_pattern[start_index:]
arg_counts = match_partial(positionals, selected_pattern)
# slice off the appropriate arg strings for each Positional
# and add the Positional and its args to the list
for action, arg_count in zip(positionals, arg_counts):
args = arg_strings[start_index: start_index + arg_count]
start_index += arg_count
take_action(action, args)
# slice off the Positionals that we just parsed and return the
# index at which the Positionals' string args stopped
positionals[:] = positionals[len(arg_counts):]
return start_index
# consume Positionals and Optionals alternately, until we have
# passed the last option string
extras = []
start_index = 0
if option_string_indices:
max_option_string_index = max(option_string_indices)
else:
max_option_string_index = -1
while start_index <= max_option_string_index:
# consume any Positionals preceding the next option
next_option_string_index = min([
index
for index in option_string_indices
if index >= start_index])
if start_index != next_option_string_index:
positionals_end_index = consume_positionals(start_index)
# only try to parse the next optional if we didn't consume
# the option string during the positionals parsing
if positionals_end_index > start_index:
start_index = positionals_end_index
continue
else:
start_index = positionals_end_index
# if we consumed all the positionals we could and we're not
# at the index of an option string, there were extra arguments
if start_index not in option_string_indices:
strings = arg_strings[start_index:next_option_string_index]
extras.extend(strings)
start_index = next_option_string_index
# consume the next optional and any arguments for it
start_index = consume_optional(start_index)
# consume any positionals following the last Optional
stop_index = consume_positionals(start_index)
# if we didn't consume all the argument strings, there were extras
extras.extend(arg_strings[stop_index:])
# if we didn't use all the Positional objects, there were too few
# arg strings supplied.
if positionals:
self.error(_('too few arguments'))
# make sure all required actions were present
for action in self._actions:
if action.required:
if action not in seen_actions:
name = _get_action_name(action)
self.error(_('argument %s is required') % name)
# make sure all required groups had one option present
for group in self._mutually_exclusive_groups:
if group.required:
for action in group._group_actions:
if action in seen_non_default_actions:
break
# if no actions were used, report the error
else:
names = [_get_action_name(action)
for action in group._group_actions
if action.help is not SUPPRESS]
msg = _('one of the arguments %s is required')
self.error(msg % ' '.join(names))
# return the updated namespace and the extra arguments
return namespace, extras
def _read_args_from_files(self, arg_strings):
# expand arguments referencing files
new_arg_strings = []
for arg_string in arg_strings:
# for regular arguments, just add them back into the list
if arg_string[0] not in self.fromfile_prefix_chars:
new_arg_strings.append(arg_string)
# replace arguments referencing files with the file content
else:
try:
args_file = open(arg_string[1:])
try:
arg_strings = []
for arg_line in args_file.read().splitlines():
for arg in self.convert_arg_line_to_args(arg_line):
arg_strings.append(arg)
arg_strings = self._read_args_from_files(arg_strings)
new_arg_strings.extend(arg_strings)
finally:
args_file.close()
except IOError:
err = _sys.exc_info()[1]
self.error(str(err))
# return the modified argument list
return new_arg_strings
def convert_arg_line_to_args(self, arg_line):
return [arg_line]
def _match_argument(self, action, arg_strings_pattern):
# match the pattern for this action to the arg strings
nargs_pattern = self._get_nargs_pattern(action)
match = _re.match(nargs_pattern, arg_strings_pattern)
# raise an exception if we weren't able to find a match
if match is None:
nargs_errors = {
None: _('expected one argument'),
OPTIONAL: _('expected at most one argument'),
ONE_OR_MORE: _('expected at least one argument'),
}
default = _('expected %s argument(s)') % action.nargs
msg = nargs_errors.get(action.nargs, default)
raise ArgumentError(action, msg)
# return the number of arguments matched
return len(match.group(1))
def _match_arguments_partial(self, actions, arg_strings_pattern):
# progressively shorten the actions list by slicing off the
# final actions until we find a match
result = []
for i in range(len(actions), 0, -1):
actions_slice = actions[:i]
pattern = ''.join([self._get_nargs_pattern(action)
for action in actions_slice])
match = _re.match(pattern, arg_strings_pattern)
if match is not None:
result.extend([len(string) for string in match.groups()])
break
# return the list of arg string counts
return result
def _parse_optional(self, arg_string):
# if it's an empty string, it was meant to be a positional
if not arg_string:
return None
# if it doesn't start with a prefix, it was meant to be positional
if not arg_string[0] in self.prefix_chars:
return None
# if the option string is present in the parser, return the action
if arg_string in self._option_string_actions:
action = self._option_string_actions[arg_string]
return action, arg_string, None
# if it's just a single character, it was meant to be positional
if len(arg_string) == 1:
return None
# if the option string before the "=" is present, return the action
if '=' in arg_string:
option_string, explicit_arg = arg_string.split('=', 1)
if option_string in self._option_string_actions:
action = self._option_string_actions[option_string]
return action, option_string, explicit_arg
# search through all possible prefixes of the option string
# and all actions in the parser for possible interpretations
option_tuples = self._get_option_tuples(arg_string)
# if multiple actions match, the option string was ambiguous
if len(option_tuples) > 1:
options = ', '.join([option_string
for action, option_string, explicit_arg in option_tuples])
tup = arg_string, options
self.error(_('ambiguous option: %s could match %s') % tup)
# if exactly one action matched, this segmentation is good,
# so return the parsed action
elif len(option_tuples) == 1:
option_tuple, = option_tuples
return option_tuple
# if it was not found as an option, but it looks like a negative
# number, it was meant to be positional
# unless there are negative-number-like options
if self._negative_number_matcher.match(arg_string):
if not self._has_negative_number_optionals:
return None
# if it contains a space, it was meant to be a positional
if ' ' in arg_string:
return None
# it was meant to be an optional but there is no such option
# in this parser (though it might be a valid option in a subparser)
return None, arg_string, None
def _get_option_tuples(self, option_string):
result = []
# option strings starting with two prefix characters are only
# split at the '='
chars = self.prefix_chars
if option_string[0] in chars and option_string[1] in chars:
if '=' in option_string:
option_prefix, explicit_arg = option_string.split('=', 1)
else:
option_prefix = option_string
explicit_arg = None
for option_string in self._option_string_actions:
if option_string.startswith(option_prefix):
action = self._option_string_actions[option_string]
tup = action, option_string, explicit_arg
result.append(tup)
# single character options can be concatenated with their arguments
# but multiple character options always have to have their argument
# separate
elif option_string[0] in chars and option_string[1] not in chars:
option_prefix = option_string
explicit_arg = None
short_option_prefix = option_string[:2]
short_explicit_arg = option_string[2:]
for option_string in self._option_string_actions:
if option_string == short_option_prefix:
action = self._option_string_actions[option_string]
tup = action, option_string, short_explicit_arg
result.append(tup)
elif option_string.startswith(option_prefix):
action = self._option_string_actions[option_string]
tup = action, option_string, explicit_arg
result.append(tup)
# shouldn't ever get here
else:
self.error(_('unexpected option string: %s') % option_string)
# return the collected option tuples
return result
def _get_nargs_pattern(self, action):
# in all examples below, we have to allow for '--' args
# which are represented as '-' in the pattern
nargs = action.nargs
# the default (None) is assumed to be a single argument
if nargs is None:
nargs_pattern = '(-*A-*)'
# allow zero or one arguments
elif nargs == OPTIONAL:
nargs_pattern = '(-*A?-*)'
# allow zero or more arguments
elif nargs == ZERO_OR_MORE:
nargs_pattern = '(-*[A-]*)'
# allow one or more arguments
elif nargs == ONE_OR_MORE:
nargs_pattern = '(-*A[A-]*)'
# allow any number of options or arguments
elif nargs == REMAINDER:
nargs_pattern = '([-AO]*)'
# allow one argument followed by any number of options or arguments
elif nargs == PARSER:
nargs_pattern = '(-*A[-AO]*)'
# all others should be integers
else:
nargs_pattern = '(-*%s-*)' % '-*'.join('A' * nargs)
# if this is an optional action, -- is not allowed
if action.option_strings:
nargs_pattern = nargs_pattern.replace('-*', '')
nargs_pattern = nargs_pattern.replace('-', '')
# return the pattern
return nargs_pattern
# ========================
# Value conversion methods
# ========================
def _get_values(self, action, arg_strings):
# for everything but PARSER args, strip out '--'
if action.nargs not in [PARSER, REMAINDER]:
arg_strings = [s for s in arg_strings if s != '--']
# optional argument produces a default when not present
if not arg_strings and action.nargs == OPTIONAL:
if action.option_strings:
value = action.const
else:
value = action.default
if isinstance(value, basestring):
value = self._get_value(action, value)
self._check_value(action, value)
# when nargs='*' on a positional, if there were no command-line
# args, use the default if it is anything other than None
elif (not arg_strings and action.nargs == ZERO_OR_MORE and
not action.option_strings):
if action.default is not None:
value = action.default
else:
value = arg_strings
self._check_value(action, value)
# single argument or optional argument produces a single value
elif len(arg_strings) == 1 and action.nargs in [None, OPTIONAL]:
arg_string, = arg_strings
value = self._get_value(action, arg_string)
self._check_value(action, value)
# REMAINDER arguments convert all values, checking none
elif action.nargs == REMAINDER:
value = [self._get_value(action, v) for v in arg_strings]
# PARSER arguments convert all values, but check only the first
elif action.nargs == PARSER:
value = [self._get_value(action, v) for v in arg_strings]
self._check_value(action, value[0])
# all other types of nargs produce a list
else:
value = [self._get_value(action, v) for v in arg_strings]
for v in value:
self._check_value(action, v)
# return the converted value
return value
def _get_value(self, action, arg_string):
type_func = self._registry_get('type', action.type, action.type)
if not _callable(type_func):
msg = _('%r is not callable')
raise ArgumentError(action, msg % type_func)
# convert the value to the appropriate type
try:
result = type_func(arg_string)
# ArgumentTypeErrors indicate errors
except ArgumentTypeError:
name = getattr(action.type, '__name__', repr(action.type))
msg = str(_sys.exc_info()[1])
raise ArgumentError(action, msg)
# TypeErrors or ValueErrors also indicate errors
except (TypeError, ValueError):
name = getattr(action.type, '__name__', repr(action.type))
msg = _('invalid %s value: %r')
raise ArgumentError(action, msg % (name, arg_string))
# return the converted value
return result
def _check_value(self, action, value):
# converted value must be one of the choices (if specified)
if action.choices is not None and value not in action.choices:
tup = value, ', '.join(map(repr, action.choices))
msg = _('invalid choice: %r (choose from %s)') % tup
raise ArgumentError(action, msg)
# =======================
# Help-formatting methods
# =======================
def format_usage(self):
formatter = self._get_formatter()
formatter.add_usage(self.usage, self._actions,
self._mutually_exclusive_groups)
return formatter.format_help()
def format_help(self):
formatter = self._get_formatter()
# usage
formatter.add_usage(self.usage, self._actions,
self._mutually_exclusive_groups)
# description
formatter.add_text(self.description)
# positionals, optionals and user-defined groups
for action_group in self._action_groups:
formatter.start_section(action_group.title)
formatter.add_text(action_group.description)
formatter.add_arguments(action_group._group_actions)
formatter.end_section()
# epilog
formatter.add_text(self.epilog)
# determine help from format above
return formatter.format_help()
def format_version(self):
import warnings
warnings.warn(
'The format_version method is deprecated -- the "version" '
'argument to ArgumentParser is no longer supported.',
DeprecationWarning)
formatter = self._get_formatter()
formatter.add_text(self.version)
return formatter.format_help()
def _get_formatter(self):
return self.formatter_class(prog=self.prog)
# =====================
# Help-printing methods
# =====================
def print_usage(self, file=None):
if file is None:
file = _sys.stdout
self._print_message(self.format_usage(), file)
def print_help(self, file=None):
if file is None:
file = _sys.stdout
self._print_message(self.format_help(), file)
def print_version(self, file=None):
import warnings
warnings.warn(
'The print_version method is deprecated -- the "version" '
'argument to ArgumentParser is no longer supported.',
DeprecationWarning)
self._print_message(self.format_version(), file)
def _print_message(self, message, file=None):
if message:
if file is None:
file = _sys.stderr
file.write(message)
# ===============
# Exiting methods
# ===============
def exit(self, status=0, message=None):
if message:
self._print_message(message, _sys.stderr)
_sys.exit(status)
def error(self, message):
"""error(message: string)
Prints a usage message incorporating the message to stderr and
exits.
If you override this in a subclass, it should not return -- it
should either exit or raise an exception.
"""
self.print_usage(_sys.stderr)
self.exit(2, _('%s: error: %s\n') % (self.prog, message))
|
agry/NGECore2
|
refs/heads/master
|
scripts/object/waypoint/base/waypoint_default.py
|
85615
|
import sys
def setup(core, object):
return
|
wimnat/ansible
|
refs/heads/devel
|
test/lib/ansible_test/_internal/io.py
|
30
|
"""Functions for disk IO."""
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import errno
import io
import json
import os
from . import types as t
from .encoding import (
ENCODING,
to_bytes,
to_text,
)
def read_json_file(path): # type: (t.AnyStr) -> t.Any
"""Parse and return the json content from the specified path."""
return json.loads(read_text_file(path))
def read_text_file(path): # type: (t.AnyStr) -> t.Text
"""Return the contents of the specified path as text."""
return to_text(read_binary_file(path))
def read_binary_file(path): # type: (t.AnyStr) -> bytes
"""Return the contents of the specified path as bytes."""
with open_binary_file(path) as file:
return file.read()
def make_dirs(path): # type: (str) -> None
"""Create a directory at path, including any necessary parent directories."""
try:
os.makedirs(to_bytes(path))
except OSError as ex:
if ex.errno != errno.EEXIST:
raise
def write_json_file(path, # type: str
content, # type: t.Union[t.List[t.Any], t.Dict[str, t.Any]]
create_directories=False, # type: bool
formatted=True, # type: bool
encoder=None, # type: t.Optional[t.Callable[[t.Any], t.Any]]
): # type: (...) -> None
"""Write the given json content to the specified path, optionally creating missing directories."""
text_content = json.dumps(content,
sort_keys=formatted,
indent=4 if formatted else None,
separators=(', ', ': ') if formatted else (',', ':'),
cls=encoder,
) + '\n'
write_text_file(path, text_content, create_directories=create_directories)
def write_text_file(path, content, create_directories=False): # type: (str, str, bool) -> None
"""Write the given text content to the specified path, optionally creating missing directories."""
if create_directories:
make_dirs(os.path.dirname(path))
with open_binary_file(path, 'wb') as file:
file.write(to_bytes(content))
def open_text_file(path, mode='r'): # type: (str, str) -> t.TextIO
"""Open the given path for text access."""
if 'b' in mode:
raise Exception('mode cannot include "b" for text files: %s' % mode)
# noinspection PyTypeChecker
return io.open(to_bytes(path), mode, encoding=ENCODING)
def open_binary_file(path, mode='rb'): # type: (str, str) -> t.BinaryIO
"""Open the given path for binary access."""
if 'b' not in mode:
raise Exception('mode must include "b" for binary files: %s' % mode)
# noinspection PyTypeChecker
return io.open(to_bytes(path), mode)
class SortedSetEncoder(json.JSONEncoder):
"""Encode sets as sorted lists."""
def default(self, obj): # pylint: disable=method-hidden, arguments-differ
if isinstance(obj, set):
return sorted(obj)
return super(SortedSetEncoder).default(self, obj)
|
rebost/django
|
refs/heads/master
|
tests/regressiontests/localflavor/fi/tests.py
|
13
|
from __future__ import unicode_literals
from django.contrib.localflavor.fi.forms import (FIZipCodeField,
FISocialSecurityNumber, FIMunicipalitySelect)
from django.test import SimpleTestCase
class FILocalFlavorTests(SimpleTestCase):
def test_FIMunicipalitySelect(self):
f = FIMunicipalitySelect()
out = '''<select name="municipalities">
<option value="akaa">Akaa</option>
<option value="alajarvi">Alaj\xe4rvi</option>
<option value="alavieska">Alavieska</option>
<option value="alavus">Alavus</option>
<option value="artjarvi">Artj\xe4rvi</option>
<option value="asikkala">Asikkala</option>
<option value="askola">Askola</option>
<option value="aura">Aura</option>
<option value="brando">Br\xe4nd\xf6</option>
<option value="eckero">Ecker\xf6</option>
<option value="enonkoski">Enonkoski</option>
<option value="enontekio">Enonteki\xf6</option>
<option value="espoo">Espoo</option>
<option value="eura">Eura</option>
<option value="eurajoki">Eurajoki</option>
<option value="evijarvi">Evij\xe4rvi</option>
<option value="finstrom">Finstr\xf6m</option>
<option value="forssa">Forssa</option>
<option value="foglo">F\xf6gl\xf6</option>
<option value="geta">Geta</option>
<option value="haapajarvi">Haapaj\xe4rvi</option>
<option value="haapavesi">Haapavesi</option>
<option value="hailuoto">Hailuoto</option>
<option value="halsua">Halsua</option>
<option value="hamina">Hamina</option>
<option value="hammarland">Hammarland</option>
<option value="hankasalmi">Hankasalmi</option>
<option value="hanko">Hanko</option>
<option value="harjavalta">Harjavalta</option>
<option value="hartola">Hartola</option>
<option value="hattula">Hattula</option>
<option value="haukipudas">Haukipudas</option>
<option value="hausjarvi">Hausj\xe4rvi</option>
<option value="heinola">Heinola</option>
<option value="heinavesi">Hein\xe4vesi</option>
<option value="helsinki">Helsinki</option>
<option value="hirvensalmi">Hirvensalmi</option>
<option value="hollola">Hollola</option>
<option value="honkajoki">Honkajoki</option>
<option value="huittinen">Huittinen</option>
<option value="humppila">Humppila</option>
<option value="hyrynsalmi">Hyrynsalmi</option>
<option value="hyvinkaa">Hyvink\xe4\xe4</option>
<option value="hameenkoski">H\xe4meenkoski</option>
<option value="hameenkyro">H\xe4meenkyr\xf6</option>
<option value="hameenlinna">H\xe4meenlinna</option>
<option value="ii">Ii</option>
<option value="iisalmi">Iisalmi</option>
<option value="iitti">Iitti</option>
<option value="ikaalinen">Ikaalinen</option>
<option value="ilmajoki">Ilmajoki</option>
<option value="ilomantsi">Ilomantsi</option>
<option value="imatra">Imatra</option>
<option value="inari">Inari</option>
<option value="inkoo">Inkoo</option>
<option value="isojoki">Isojoki</option>
<option value="isokyro">Isokyr\xf6</option>
<option value="jalasjarvi">Jalasj\xe4rvi</option>
<option value="janakkala">Janakkala</option>
<option value="joensuu">Joensuu</option>
<option value="jokioinen">Jokioinen</option>
<option value="jomala">Jomala</option>
<option value="joroinen">Joroinen</option>
<option value="joutsa">Joutsa</option>
<option value="juankoski">Juankoski</option>
<option value="juuka">Juuka</option>
<option value="juupajoki">Juupajoki</option>
<option value="juva">Juva</option>
<option value="jyvaskyla">Jyv\xe4skyl\xe4</option>
<option value="jamijarvi">J\xe4mij\xe4rvi</option>
<option value="jamsa">J\xe4ms\xe4</option>
<option value="jarvenpaa">J\xe4rvenp\xe4\xe4</option>
<option value="kaarina">Kaarina</option>
<option value="kaavi">Kaavi</option>
<option value="kajaani">Kajaani</option>
<option value="kalajoki">Kalajoki</option>
<option value="kangasala">Kangasala</option>
<option value="kangasniemi">Kangasniemi</option>
<option value="kankaanpaa">Kankaanp\xe4\xe4</option>
<option value="kannonkoski">Kannonkoski</option>
<option value="kannus">Kannus</option>
<option value="karijoki">Karijoki</option>
<option value="karjalohja">Karjalohja</option>
<option value="karkkila">Karkkila</option>
<option value="karstula">Karstula</option>
<option value="karttula">Karttula</option>
<option value="karvia">Karvia</option>
<option value="kaskinen">Kaskinen</option>
<option value="kauhajoki">Kauhajoki</option>
<option value="kauhava">Kauhava</option>
<option value="kauniainen">Kauniainen</option>
<option value="kaustinen">Kaustinen</option>
<option value="keitele">Keitele</option>
<option value="kemi">Kemi</option>
<option value="kemijarvi">Kemij\xe4rvi</option>
<option value="keminmaa">Keminmaa</option>
<option value="kemionsaari">Kemi\xf6nsaari</option>
<option value="kempele">Kempele</option>
<option value="kerava">Kerava</option>
<option value="kerimaki">Kerim\xe4ki</option>
<option value="kesalahti">Kes\xe4lahti</option>
<option value="keuruu">Keuruu</option>
<option value="kihnio">Kihni\xf6</option>
<option value="kiikoinen">Kiikoinen</option>
<option value="kiiminki">Kiiminki</option>
<option value="kinnula">Kinnula</option>
<option value="kirkkonummi">Kirkkonummi</option>
<option value="kitee">Kitee</option>
<option value="kittila">Kittil\xe4</option>
<option value="kiuruvesi">Kiuruvesi</option>
<option value="kivijarvi">Kivij\xe4rvi</option>
<option value="kokemaki">Kokem\xe4ki</option>
<option value="kokkola">Kokkola</option>
<option value="kolari">Kolari</option>
<option value="konnevesi">Konnevesi</option>
<option value="kontiolahti">Kontiolahti</option>
<option value="korsnas">Korsn\xe4s</option>
<option value="koskitl">Koski Tl</option>
<option value="kotka">Kotka</option>
<option value="kouvola">Kouvola</option>
<option value="kristiinankaupunki">Kristiinankaupunki</option>
<option value="kruunupyy">Kruunupyy</option>
<option value="kuhmalahti">Kuhmalahti</option>
<option value="kuhmo">Kuhmo</option>
<option value="kuhmoinen">Kuhmoinen</option>
<option value="kumlinge">Kumlinge</option>
<option value="kuopio">Kuopio</option>
<option value="kuortane">Kuortane</option>
<option value="kurikka">Kurikka</option>
<option value="kustavi">Kustavi</option>
<option value="kuusamo">Kuusamo</option>
<option value="kylmakoski">Kylm\xe4koski</option>
<option value="kyyjarvi">Kyyj\xe4rvi</option>
<option value="karkola">K\xe4rk\xf6l\xe4</option>
<option value="karsamaki">K\xe4rs\xe4m\xe4ki</option>
<option value="kokar">K\xf6kar</option>
<option value="koylio">K\xf6yli\xf6</option>
<option value="lahti">Lahti</option>
<option value="laihia">Laihia</option>
<option value="laitila">Laitila</option>
<option value="lapinjarvi">Lapinj\xe4rvi</option>
<option value="lapinlahti">Lapinlahti</option>
<option value="lappajarvi">Lappaj\xe4rvi</option>
<option value="lappeenranta">Lappeenranta</option>
<option value="lapua">Lapua</option>
<option value="laukaa">Laukaa</option>
<option value="lavia">Lavia</option>
<option value="lemi">Lemi</option>
<option value="lemland">Lemland</option>
<option value="lempaala">Lemp\xe4\xe4l\xe4</option>
<option value="leppavirta">Lepp\xe4virta</option>
<option value="lestijarvi">Lestij\xe4rvi</option>
<option value="lieksa">Lieksa</option>
<option value="lieto">Lieto</option>
<option value="liminka">Liminka</option>
<option value="liperi">Liperi</option>
<option value="lohja">Lohja</option>
<option value="loimaa">Loimaa</option>
<option value="loppi">Loppi</option>
<option value="loviisa">Loviisa</option>
<option value="luhanka">Luhanka</option>
<option value="lumijoki">Lumijoki</option>
<option value="lumparland">Lumparland</option>
<option value="luoto">Luoto</option>
<option value="luumaki">Luum\xe4ki</option>
<option value="luvia">Luvia</option>
<option value="lansi-turunmaa">L\xe4nsi-Turunmaa</option>
<option value="maalahti">Maalahti</option>
<option value="maaninka">Maaninka</option>
<option value="maarianhamina">Maarianhamina</option>
<option value="marttila">Marttila</option>
<option value="masku">Masku</option>
<option value="merijarvi">Merij\xe4rvi</option>
<option value="merikarvia">Merikarvia</option>
<option value="miehikkala">Miehikk\xe4l\xe4</option>
<option value="mikkeli">Mikkeli</option>
<option value="muhos">Muhos</option>
<option value="multia">Multia</option>
<option value="muonio">Muonio</option>
<option value="mustasaari">Mustasaari</option>
<option value="muurame">Muurame</option>
<option value="mynamaki">Myn\xe4m\xe4ki</option>
<option value="myrskyla">Myrskyl\xe4</option>
<option value="mantsala">M\xe4nts\xe4l\xe4</option>
<option value="mantta-vilppula">M\xe4ntt\xe4-Vilppula</option>
<option value="mantyharju">M\xe4ntyharju</option>
<option value="naantali">Naantali</option>
<option value="nakkila">Nakkila</option>
<option value="nastola">Nastola</option>
<option value="nilsia">Nilsi\xe4</option>
<option value="nivala">Nivala</option>
<option value="nokia">Nokia</option>
<option value="nousiainen">Nousiainen</option>
<option value="nummi-pusula">Nummi-Pusula</option>
<option value="nurmes">Nurmes</option>
<option value="nurmijarvi">Nurmij\xe4rvi</option>
<option value="narpio">N\xe4rpi\xf6</option>
<option value="oravainen">Oravainen</option>
<option value="orimattila">Orimattila</option>
<option value="oripaa">Orip\xe4\xe4</option>
<option value="orivesi">Orivesi</option>
<option value="oulainen">Oulainen</option>
<option value="oulu">Oulu</option>
<option value="oulunsalo">Oulunsalo</option>
<option value="outokumpu">Outokumpu</option>
<option value="padasjoki">Padasjoki</option>
<option value="paimio">Paimio</option>
<option value="paltamo">Paltamo</option>
<option value="parikkala">Parikkala</option>
<option value="parkano">Parkano</option>
<option value="pedersore">Peders\xf6re</option>
<option value="pelkosenniemi">Pelkosenniemi</option>
<option value="pello">Pello</option>
<option value="perho">Perho</option>
<option value="pertunmaa">Pertunmaa</option>
<option value="petajavesi">Pet\xe4j\xe4vesi</option>
<option value="pieksamaki">Pieks\xe4m\xe4ki</option>
<option value="pielavesi">Pielavesi</option>
<option value="pietarsaari">Pietarsaari</option>
<option value="pihtipudas">Pihtipudas</option>
<option value="pirkkala">Pirkkala</option>
<option value="polvijarvi">Polvij\xe4rvi</option>
<option value="pomarkku">Pomarkku</option>
<option value="pori">Pori</option>
<option value="pornainen">Pornainen</option>
<option value="porvoo">Porvoo</option>
<option value="posio">Posio</option>
<option value="pudasjarvi">Pudasj\xe4rvi</option>
<option value="pukkila">Pukkila</option>
<option value="punkaharju">Punkaharju</option>
<option value="punkalaidun">Punkalaidun</option>
<option value="puolanka">Puolanka</option>
<option value="puumala">Puumala</option>
<option value="pyhtaa">Pyht\xe4\xe4</option>
<option value="pyhajoki">Pyh\xe4joki</option>
<option value="pyhajarvi">Pyh\xe4j\xe4rvi</option>
<option value="pyhanta">Pyh\xe4nt\xe4</option>
<option value="pyharanta">Pyh\xe4ranta</option>
<option value="palkane">P\xe4lk\xe4ne</option>
<option value="poytya">P\xf6yty\xe4</option>
<option value="raahe">Raahe</option>
<option value="raasepori">Raasepori</option>
<option value="raisio">Raisio</option>
<option value="rantasalmi">Rantasalmi</option>
<option value="ranua">Ranua</option>
<option value="rauma">Rauma</option>
<option value="rautalampi">Rautalampi</option>
<option value="rautavaara">Rautavaara</option>
<option value="rautjarvi">Rautj\xe4rvi</option>
<option value="reisjarvi">Reisj\xe4rvi</option>
<option value="riihimaki">Riihim\xe4ki</option>
<option value="ristiina">Ristiina</option>
<option value="ristijarvi">Ristij\xe4rvi</option>
<option value="rovaniemi">Rovaniemi</option>
<option value="ruokolahti">Ruokolahti</option>
<option value="ruovesi">Ruovesi</option>
<option value="rusko">Rusko</option>
<option value="raakkyla">R\xe4\xe4kkyl\xe4</option>
<option value="saarijarvi">Saarij\xe4rvi</option>
<option value="salla">Salla</option>
<option value="salo">Salo</option>
<option value="saltvik">Saltvik</option>
<option value="sastamala">Sastamala</option>
<option value="sauvo">Sauvo</option>
<option value="savitaipale">Savitaipale</option>
<option value="savonlinna">Savonlinna</option>
<option value="savukoski">Savukoski</option>
<option value="seinajoki">Sein\xe4joki</option>
<option value="sievi">Sievi</option>
<option value="siikainen">Siikainen</option>
<option value="siikajoki">Siikajoki</option>
<option value="siikalatva">Siikalatva</option>
<option value="siilinjarvi">Siilinj\xe4rvi</option>
<option value="simo">Simo</option>
<option value="sipoo">Sipoo</option>
<option value="siuntio">Siuntio</option>
<option value="sodankyla">Sodankyl\xe4</option>
<option value="soini">Soini</option>
<option value="somero">Somero</option>
<option value="sonkajarvi">Sonkaj\xe4rvi</option>
<option value="sotkamo">Sotkamo</option>
<option value="sottunga">Sottunga</option>
<option value="sulkava">Sulkava</option>
<option value="sund">Sund</option>
<option value="suomenniemi">Suomenniemi</option>
<option value="suomussalmi">Suomussalmi</option>
<option value="suonenjoki">Suonenjoki</option>
<option value="sysma">Sysm\xe4</option>
<option value="sakyla">S\xe4kyl\xe4</option>
<option value="taipalsaari">Taipalsaari</option>
<option value="taivalkoski">Taivalkoski</option>
<option value="taivassalo">Taivassalo</option>
<option value="tammela">Tammela</option>
<option value="tampere">Tampere</option>
<option value="tarvasjoki">Tarvasjoki</option>
<option value="tervo">Tervo</option>
<option value="tervola">Tervola</option>
<option value="teuva">Teuva</option>
<option value="tohmajarvi">Tohmaj\xe4rvi</option>
<option value="toholampi">Toholampi</option>
<option value="toivakka">Toivakka</option>
<option value="tornio">Tornio</option>
<option value="turku" selected="selected">Turku</option>
<option value="tuusniemi">Tuusniemi</option>
<option value="tuusula">Tuusula</option>
<option value="tyrnava">Tyrn\xe4v\xe4</option>
<option value="toysa">T\xf6ys\xe4</option>
<option value="ulvila">Ulvila</option>
<option value="urjala">Urjala</option>
<option value="utajarvi">Utaj\xe4rvi</option>
<option value="utsjoki">Utsjoki</option>
<option value="uurainen">Uurainen</option>
<option value="uusikaarlepyy">Uusikaarlepyy</option>
<option value="uusikaupunki">Uusikaupunki</option>
<option value="vaala">Vaala</option>
<option value="vaasa">Vaasa</option>
<option value="valkeakoski">Valkeakoski</option>
<option value="valtimo">Valtimo</option>
<option value="vantaa">Vantaa</option>
<option value="varkaus">Varkaus</option>
<option value="varpaisjarvi">Varpaisj\xe4rvi</option>
<option value="vehmaa">Vehmaa</option>
<option value="vesanto">Vesanto</option>
<option value="vesilahti">Vesilahti</option>
<option value="veteli">Veteli</option>
<option value="vierema">Vierem\xe4</option>
<option value="vihanti">Vihanti</option>
<option value="vihti">Vihti</option>
<option value="viitasaari">Viitasaari</option>
<option value="vimpeli">Vimpeli</option>
<option value="virolahti">Virolahti</option>
<option value="virrat">Virrat</option>
<option value="vardo">V\xe5rd\xf6</option>
<option value="vahakyro">V\xe4h\xe4kyr\xf6</option>
<option value="voyri-maksamaa">V\xf6yri-Maksamaa</option>
<option value="yli-ii">Yli-Ii</option>
<option value="ylitornio">Ylitornio</option>
<option value="ylivieska">Ylivieska</option>
<option value="ylojarvi">Yl\xf6j\xe4rvi</option>
<option value="ypaja">Yp\xe4j\xe4</option>
<option value="ahtari">\xc4ht\xe4ri</option>
<option value="aanekoski">\xc4\xe4nekoski</option>
</select>'''
self.assertHTMLEqual(f.render('municipalities', 'turku'), out)
def test_FIZipCodeField(self):
error_format = ['Enter a zip code in the format XXXXX.']
valid = {
'20540': '20540',
'20101': '20101',
}
invalid = {
'20s40': error_format,
'205401': error_format
}
self.assertFieldOutput(FIZipCodeField, valid, invalid)
def test_FISocialSecurityNumber(self):
error_invalid = ['Enter a valid Finnish social security number.']
valid = {
'010101-0101': '010101-0101',
'010101+0101': '010101+0101',
'010101A0101': '010101A0101',
}
invalid = {
'101010-0102': error_invalid,
'10a010-0101': error_invalid,
'101010-0\xe401': error_invalid,
'101010b0101': error_invalid,
}
self.assertFieldOutput(FISocialSecurityNumber, valid, invalid)
|
mattvick/phantomjs
|
refs/heads/master
|
src/qt/qtwebkit/Tools/Scripts/webkitpy/port/xvfbdriver.py
|
117
|
# Copyright (C) 2010 Google Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the Google name nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import logging
import os
import re
import time
from webkitpy.port.server_process import ServerProcess
from webkitpy.port.driver import Driver
from webkitpy.common.system.file_lock import FileLock
_log = logging.getLogger(__name__)
class XvfbDriver(Driver):
@staticmethod
def check_xvfb(port):
xvfb_found = port.host.executive.run_command(['which', 'Xvfb'], return_exit_code=True) is 0
if not xvfb_found:
_log.error("No Xvfb found. Cannot run layout tests.")
return xvfb_found
def __init__(self, *args, **kwargs):
Driver.__init__(self, *args, **kwargs)
self._guard_lock = None
self._startup_delay_secs = 1.0
def _next_free_display(self):
running_pids = self._port.host.executive.run_command(['ps', '-eo', 'comm,command'])
reserved_screens = set()
for pid in running_pids.split('\n'):
match = re.match('(X|Xvfb|Xorg)\s+.*\s:(?P<screen_number>\d+)', pid)
if match:
reserved_screens.add(int(match.group('screen_number')))
for i in range(99):
if i not in reserved_screens:
_guard_lock_file = self._port.host.filesystem.join('/tmp', 'WebKitXvfb.lock.%i' % i)
self._guard_lock = self._port.host.make_file_lock(_guard_lock_file)
if self._guard_lock.acquire_lock():
return i
def _start(self, pixel_tests, per_test_args):
self.stop()
# Use even displays for pixel tests and odd ones otherwise. When pixel tests are disabled,
# DriverProxy creates two drivers, one for normal and the other for ref tests. Both have
# the same worker number, so this prevents them from using the same Xvfb instance.
display_id = self._next_free_display()
self._lock_file = "/tmp/.X%d-lock" % display_id
run_xvfb = ["Xvfb", ":%d" % display_id, "-screen", "0", "800x600x24", "-nolisten", "tcp"]
with open(os.devnull, 'w') as devnull:
self._xvfb_process = self._port.host.executive.popen(run_xvfb, stderr=devnull)
# Crashes intend to occur occasionally in the first few tests that are run through each
# worker because the Xvfb display isn't ready yet. Halting execution a bit should avoid that.
time.sleep(self._startup_delay_secs)
server_name = self._port.driver_name()
environment = self._port.setup_environ_for_server(server_name)
# We must do this here because the DISPLAY number depends on _worker_number
environment['DISPLAY'] = ":%d" % display_id
self._driver_tempdir = self._port._filesystem.mkdtemp(prefix='%s-' % self._port.driver_name())
environment['DUMPRENDERTREE_TEMP'] = str(self._driver_tempdir)
environment['LOCAL_RESOURCE_ROOT'] = self._port.layout_tests_dir()
# Currently on WebKit2, there is no API for setting the application
# cache directory. Each worker should have it's own and it should be
# cleaned afterwards, so we set it to inside the temporary folder by
# prepending XDG_CACHE_HOME with DUMPRENDERTREE_TEMP.
environment['XDG_CACHE_HOME'] = self._port.host.filesystem.join(str(self._driver_tempdir), 'appcache')
self._crashed_process_name = None
self._crashed_pid = None
self._server_process = self._port._server_process_constructor(self._port, server_name, self.cmd_line(pixel_tests, per_test_args), environment)
self._server_process.start()
def stop(self):
super(XvfbDriver, self).stop()
if self._guard_lock:
self._guard_lock.release_lock()
self._guard_lock = None
if getattr(self, '_xvfb_process', None):
self._port.host.executive.kill_process(self._xvfb_process.pid)
self._xvfb_process = None
if self._port.host.filesystem.exists(self._lock_file):
self._port.host.filesystem.remove(self._lock_file)
|
liaowang11/alifuse
|
refs/heads/master
|
oss_xml_handler.py
|
4
|
#coding=utf8
from xml.dom import minidom
def get_tag_text(element, tag):
nodes = element.getElementsByTagName(tag)
if len(nodes) == 0:
return ""
else:
node = nodes[0]
rc = ""
for node in node.childNodes:
if node.nodeType in ( node.TEXT_NODE, node.CDATA_SECTION_NODE):
rc = rc + node.data
return rc
class ErrorXml:
def __init__(self, xml_string):
self.xml = minidom.parseString(xml_string)
self.code = get_tag_text(self.xml, 'Code')
self.msg = get_tag_text(self.xml, 'Message')
self.resource = get_tag_text(self.xml, 'Resource')
self.request_id = get_tag_text(self.xml, 'RequestId')
self.host_id = get_tag_text(self.xml, 'HostId')
def show(self):
print "Code: %s\nMessage: %s\nResource: %s\nRequestId: %s \nHostId: %s" % (self.code, self.msg, self.resource, self.request_id, self.host_id)
class Owner:
def __init__(self, xml_element):
self.element = xml_element
self.id = get_tag_text(self.element, "ID")
self.display_name = get_tag_text(self.element, "DisplayName")
def show(self):
print "ID: %s\nDisplayName: %s" % (self.id, self.display_name)
class Bucket:
def __init__(self, xml_element):
self.element = xml_element
self.name = get_tag_text(self.element, "Name")
self.creation_date = get_tag_text(self.element, "CreationDate")
def show(self):
print "Name: %s\nCreationDate: %s" % (self.name, self.creation_date)
class GetServiceXml:
def __init__(self, xml_string):
self.xml = minidom.parseString(xml_string)
self.owner = Owner(self.xml.getElementsByTagName('Owner')[0])
self.buckets = self.xml.getElementsByTagName('Bucket')
self.bucket_list = []
for b in self.buckets:
self.bucket_list.append(Bucket(b))
def show(self):
print "Owner:"
self.owner.show()
print "\nBucket list:"
for b in self.bucket_list:
b.show()
print ""
def list(self):
bl = []
for b in self.bucket_list:
bl.append((b.name, b.creation_date))
return bl
class Content:
def __init__(self, xml_element):
self.element = xml_element
self.key = get_tag_text(self.element, "Key")
self.last_modified = get_tag_text(self.element, "LastModified")
self.etag = get_tag_text(self.element, "ETag")
self.size = get_tag_text(self.element, "Size")
self.owner = Owner(self.element.getElementsByTagName('Owner')[0])
self.storage_class = get_tag_text(self.element, "StorageClass")
def show(self):
print "Key: %s\nLastModified: %s\nETag: %s\nSize: %s\nStorageClass: %s" % (self.key, self.last_modified, self.etag, self.size, self.storage_class)
self.owner.show()
class Part:
def __init__(self, xml_element):
self.element = xml_element
self.part_num = get_tag_text(self.element, "PartNumber")
self.object_name = get_tag_text(self.element, "PartName")
self.object_size = get_tag_text(self.element, "PartSize")
self.etag = get_tag_text(self.element, "ETag")
def show(self):
print "PartNumber: %s\nPartName: %s\nPartSize: %s\nETag: %s\n" % (self.part_num, self.object_name, self.object_size, self.etag)
class PostObjectGroupXml:
def __init__(self, xml_string):
self.xml = minidom.parseString(xml_string)
self.bucket = get_tag_text(self.xml, 'Bucket')
self.key = get_tag_text(self.xml, 'Key')
self.size = get_tag_text(self.xml, 'Size')
self.etag = get_tag_text(self.xml, "ETag")
def show(self):
print "Post Object Group, Bucket: %s\nKey: %s\nSize: %s\nETag: %s" % (self.bucket, self.key, self.size, self.etag)
class GetObjectGroupIndexXml:
def __init__(self, xml_string):
self.xml = minidom.parseString(xml_string)
self.bucket = get_tag_text(self.xml, 'Bucket')
self.key = get_tag_text(self.xml, 'Key')
self.etag = get_tag_text(self.xml, 'Etag')
self.file_length = get_tag_text(self.xml, 'FileLength')
self.index_list = []
index_lists = self.xml.getElementsByTagName('Part')
for i in index_lists:
self.index_list.append(Part(i))
def list(self):
index_list = []
for i in self.index_list:
index_list.append((i.part_num, i.object_name, i.object_size, i.etag))
return index_list
def show(self):
print "Bucket: %s\nObject: %s\nEtag: %s\nObjectSize: %s" % (self.bucket, self.key, self.etag, self.file_length)
print "\nPart list:"
for p in self.index_list:
p.show()
class GetBucketXml:
def __init__(self, xml_string):
self.xml = minidom.parseString(xml_string)
self.name = get_tag_text(self.xml, 'Name')
self.prefix = get_tag_text(self.xml, 'Prefix')
self.marker = get_tag_text(self.xml, 'Marker')
self.nextmarker = get_tag_text(self.xml, 'NextMarker')
self.maxkeys = get_tag_text(self.xml, 'MaxKeys')
self.delimiter = get_tag_text(self.xml, 'Delimiter')
self.is_truncated = get_tag_text(self.xml, 'IsTruncated')
self.prefix_list = []
prefixes = self.xml.getElementsByTagName('CommonPrefixes')
for p in prefixes:
tag_txt = get_tag_text(p, "Prefix")
self.prefix_list.append(tag_txt)
self.content_list = []
contents = self.xml.getElementsByTagName('Contents')
for c in contents:
self.content_list.append(Content(c))
def show(self):
print "Name: %s\nPrefix: %s\nMarker: %s\nNextMarker: %s\nMaxKeys: %s\nDelimiter: %s\nIsTruncated: %s" % (self.name, self.prefix, self.marker, self.nextmarker, self.maxkeys, self.delimiter, self.is_truncated)
print "\nPrefix list:"
for p in self.prefix_list:
print p
print "\nContent list:"
for c in self.content_list:
c.show()
print ""
def list(self):
cl = []
pl = []
for c in self.content_list:
cl.append((c.key, c.last_modified, c.etag, c.size, c.owner.id, c.owner.display_name, c.storage_class))
for p in self.prefix_list:
pl.append(p)
return (cl, pl)
class GetBucketAclXml:
def __init__(self, xml_string):
self.xml = minidom.parseString(xml_string)
if len(self.xml.getElementsByTagName('Owner')) != 0:
self.owner = Owner(self.xml.getElementsByTagName('Owner')[0])
else:
self.owner = ""
self.grant = get_tag_text(self.xml, 'Grant')
def show(self):
print "Owner Name: %s\nOwner ID: %s\nGrant: %s" % (self.owner.id, self.owner.display_name, self.grant)
def test_get_bucket_xml():
body = "<?xml version=\"1.0\" encoding=\"UTF-8\"?><ListBucketResult xmlns=\"http://s3.amazonaws.com/doc/2006-03-01/\"><Name>sweet-memory</Name><Prefix>IMG</Prefix><Marker>IMG_0</Marker><MaxKeys>1000</MaxKeys><IsTruncated>false</IsTruncated><Contents><Key>IMG_2744.JPG</Key><LastModified>2011-03-04T06:20:37.000Z</LastModified><ETag>"a56047f218618a43a9b1c2dca2d8c592"</ETag><Size>220778</Size><Owner><ID>2cfd76976de6f4c6f4e05fcd02680c4ca619428123681589efcb203f29dce924</ID><DisplayName>sanbo_ustc</DisplayName></Owner><StorageClass>STANDARD</StorageClass></Contents><Contents><Key>IMG_2745.JPG</Key><LastModified>2011-03-04T06:20:39.000Z</LastModified><ETag>"511c0b52911bcd667338103c385741af"</ETag><Size>244612</Size><Owner><ID>2cfd76976de6f4c6f4e05fcd02680c4ca619428123681589efcb203f29dce924</ID><DisplayName>sanbo_ustc</DisplayName></Owner><StorageClass>STANDARD</StorageClass></Contents><Contents><Key>IMG_3344.JPG</Key><LastModified>2011-03-04T06:20:48.000Z</LastModified><ETag>"4ea11d796ecc742b216864dcf5dfd193"</ETag><Size>229211</Size><Owner><ID>2cfd76976de6f4c6f4e05fcd02680c4ca619428123681589efcb203f29dce924</ID><DisplayName>sanbo_ustc</DisplayName></Owner><StorageClass>STANDARD</StorageClass></Contents><Contents><Key>IMG_3387.JPG</Key><LastModified>2011-03-04T06:20:53.000Z</LastModified><ETag>"c32b5568ae4fb0a3421f0daba25ecfd4"</ETag><Size>460062</Size><Owner><ID>2cfd76976de6f4c6f4e05fcd02680c4ca619428123681589efcb203f29dce924</ID><DisplayName>sanbo_ustc</DisplayName></Owner><StorageClass>STANDARD</StorageClass></Contents><Contents><Key>IMG_3420.JPG</Key><LastModified>2011-03-04T06:20:25.000Z</LastModified><ETag>"edf010d2a8a4877ce0362b245fcc963b"</ETag><Size>174973</Size><Owner><ID>2cfd76976de6f4c6f4e05fcd02680c4ca619428123681589efcb203f29dce924</ID><DisplayName>sanbo_ustc</DisplayName></Owner><StorageClass>STANDARD</StorageClass></Contents><Contents><Key>中文.case</Key><LastModified>2011-03-04T06:20:26.000Z</LastModified><ETag>"7fd64eec21799ef048ed827cf6098f06"</ETag><Size>208134</Size><Owner><ID>2cfd76976de6f4c6f4e05fcd02680c4ca619428123681589efcb203f29dce924</ID><DisplayName>sanbo_ustc</DisplayName></Owner><StorageClass>STANDARD</StorageClass></Contents></ListBucketResult>"
h = GetBucketXml(body)
h.show()
(fl, pl) = h.list()
print "\nfile_list: ", fl
print "prefix list: ", pl
def test_get_service_xml():
body = "<?xml version=\"1.0\" encoding=\"UTF-8\"?><ListAllMyBucketsResult xmlns=\"http://s3.amazonaws.com/doc/2006-03-01/\"><Owner><ID>2cfd76976de6f4c6f4e05fcd02680c4ca619428123681589efcb203f29dce924</ID><DisplayName>sanbo_ustc</DisplayName></Owner><Buckets><Bucket><Name>360buy</Name><CreationDate>2011-03-04T09:25:37.000Z</CreationDate></Bucket><Bucket><Name>aliyun-test-test</Name><CreationDate>2011-04-11T12:24:06.000Z</CreationDate></Bucket><Bucket><Name>irecoffee</Name><CreationDate>2011-03-04T06:14:56.000Z</CreationDate></Bucket><Bucket><Name>sweet-memory</Name><CreationDate>2011-04-12T11:56:04.000Z</CreationDate></Bucket></Buckets></ListAllMyBucketsResult>"
h = GetServiceXml(body)
h.show()
print "\nbucket list: ", h.list()
def test_get_bucket_acl_xml():
body = '<?xml version="1.0" ?><AccessControlPolicy><Owner><ID>61155b1e39dbca1d0d0f3c7faa32d9e8e9a90a9cd86edbd27d8eed5d0ad8ce82</ID><DisplayName>megjian</DisplayName></Owner><AccessControlList><Grant>public-read-write</Grant></AccessControlList></AccessControlPolicy>'
h = GetBucketAclXml(body)
h.show()
def test_get_object_group_xml():
body = '<?xml version="1.0" encoding="UTF-8"?><FileGroup><Bucket>ut_test_post_object_group</Bucket> <Key>ut_test_post_object_group</Key> <Etag>"91E8503F4DA1324E28434AA6B6E20D15"</Etag><FileLength>1073741824</FileLength> <FilePart><Part> <PartNumber>1</PartNumber> <ObjectName>4d37380c7149508bedf78dc7c5c68f55_test_post_object_group.txt_1</ObjectName><ObjectSize>10485760</ObjectSize><ETag>"A957A9F1EF44ED7D40CD5C738D113509"</ETag></Part><Part><PartNumber>2</PartNumber><ObjectName>7aa26b8da263589e875d179b87642691_test_post_object_group.txt_2</ObjectName><ObjectSize>10485760</ObjectSize><ETag>"A957A9F1EF44ED7D40CD5C738D113509"</ETag></Part><Part><PartNumber>3</PartNumber><ObjectName>28b0c8a9bd69469f76cd102d6e1b0f03_test_post_object_group.txt_3</ObjectName><ObjectSize>10485760</ObjectSize><ETag>"A957A9F1EF44ED7D40CD5C738D113509"</ETag></Part></FilePart></FileGroup>'
h = GetObjectGroupIndexXml(body)
h.show()
if __name__ == "__main__":
test_get_bucket_xml()
test_get_service_xml()
test_get_bucket_acl_xml()
test_get_object_group_xml()
|
klahnakoski/MoDevETL
|
refs/heads/master
|
pyLibrary/sql/redshift.py
|
4
|
# encoding: utf-8
#
#
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
# You can obtain one at http://mozilla.org/MPL/2.0/.
#
# Author: Kyle Lahnakoski (kyle@lahnakoski.com)
#
from __future__ import unicode_literals
from __future__ import division
from __future__ import absolute_import
# FOR WINDOWS INSTALL OF psycopg2
# http://stickpeople.com/projects/python/win-psycopg/2.6.0/psycopg2-2.6.0.win32-py2.7-pg9.4.1-release.exe
import psycopg2
from psycopg2.extensions import adapt
from pyLibrary import convert
from pyLibrary.debugs.logs import Log
from pyLibrary.meta import use_settings
from pyLibrary.queries import qb
from pyLibrary.sql import SQL
from pyLibrary.strings import expand_template
from pyLibrary.thread.threads import Lock
class Redshift(object):
@use_settings
def __init__(self, host, user, password, database=None, port=5439, settings=None):
self.settings=settings
self.locker = Lock()
self.connection = None
def _connect(self):
self.connection=psycopg2.connect(
database=self.settings.database,
user=self.settings.user,
password=self.settings.password,
host=self.settings.host,
port=self.settings.port
)
def query(self, sql, param=None):
return self.execute(sql, param)
def execute(
self,
command,
param=None,
retry=True # IF command FAILS, JUST THROW ERROR
):
if param:
command = expand_template(command, self.quote_param(param))
output = None
done = False
while not done:
try:
with self.locker:
if not self.connection:
self._connect()
with Closer(self.connection.cursor()) as curs:
curs.execute(command)
if curs.rowcount >= 0:
output = curs.fetchall()
self.connection.commit()
done = True
except Exception, e:
try:
self.connection.rollback()
# TODO: FIGURE OUT WHY rollback() DOES NOT HELP
self.connection.close()
except Exception, f:
pass
self.connection = None
self._connect()
if not retry:
Log.error("Problem with command:\n{{command|indent}}", command= command, cause=e)
return output
def insert(self, table_name, record):
keys = record.keys()
try:
command = "INSERT INTO " + self.quote_column(table_name) + "(" + \
",".join([self.quote_column(k) for k in keys]) + \
") VALUES (" + \
",".join([self.quote_value(record[k]) for k in keys]) + \
")"
self.execute(command)
except Exception, e:
Log.error("problem with record: {{record}}", record= record, cause=e)
def insert_list(self, table_name, records):
if not records:
return
columns = set()
for r in records:
columns |= set(r.keys())
columns = qb.sort(columns)
try:
self.execute(
"DELETE FROM " + self.quote_column(table_name) + " WHERE _id IN {{ids}}",
{"ids": self.quote_column([r["_id"] for r in records])}
)
command = \
"INSERT INTO " + self.quote_column(table_name) + "(" + \
",".join([self.quote_column(k) for k in columns]) + \
") VALUES " + ",\n".join([
"(" + ",".join([self.quote_value(r.get(k, None)) for k in columns]) + ")"
for r in records
])
self.execute(command)
except Exception, e:
Log.error("problem with insert", e)
def quote_param(self, param):
output={}
for k, v in param.items():
if isinstance(v, SQL):
output[k]=v.sql
else:
output[k]=self.quote_value(v)
return output
def quote_column(self, name):
if isinstance(name, basestring):
return SQL('"' + name.replace('"', '""') + '"')
return SQL("(" + (", ".join(self.quote_value(v) for v in name)) + ")")
def quote_value(self, value):
if value ==None:
return SQL("NULL")
if isinstance(value, list):
json = convert.value2json(value)
return self.quote_value(json)
if isinstance(value, basestring) and len(value) > 256:
value = value[:256]
return SQL(adapt(value))
def es_type2pg_type(self, es_type):
return PG_TYPES.get(es_type, "character varying")
PG_TYPES = {
"boolean": "boolean",
"double": "double precision",
"float": "double precision",
"string": "VARCHAR",
"long": "bigint"
}
class Closer(object):
def __init__(self, resource):
self.resource=resource
def __enter__(self):
return self
def __exit__(self, exc_type, exc_val, exc_tb):
try:
self.resource.close()
except Exception, e:
pass
def __getattr__(self, item):
return getattr(self.resource, item)
|
DmitryADP/diff_qc750
|
refs/heads/master
|
external/webkit/Tools/Scripts/webkitpy/layout_tests/port/mock_drt_unittest.py
|
15
|
#!/usr/bin/env python
# Copyright (C) 2011 Google Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Unit tests for MockDRT."""
import sys
import unittest
from webkitpy.common import newstringio
from webkitpy.layout_tests.port import mock_drt
from webkitpy.layout_tests.port import factory
from webkitpy.layout_tests.port import port_testcase
from webkitpy.layout_tests.port import test
from webkitpy.tool import mocktool
mock_options = mocktool.MockOptions(use_apache=True,
configuration='Release')
class MockDRTPortTest(port_testcase.PortTestCase):
def make_port(self, options=mock_options):
if sys.platform == 'win32':
# We use this because the 'win' port doesn't work yet.
return mock_drt.MockDRTPort(port_name='mock-chromium-win', options=options)
return mock_drt.MockDRTPort(options=options)
def test_default_worker_model(self):
# only overridding the default test; we don't care about this one.
pass
def test_port_name_in_constructor(self):
self.assertTrue(mock_drt.MockDRTPort(port_name='mock-test'))
def test_acquire_http_lock(self):
# Only checking that no exception is raised.
self.make_port().acquire_http_lock()
def test_release_http_lock(self):
# Only checking that no exception is raised.
self.make_port().release_http_lock()
def test_check_build(self):
port = self.make_port()
self.assertTrue(port.check_build(True))
def test_check_sys_deps(self):
port = self.make_port()
self.assertTrue(port.check_sys_deps(True))
def test_start_helper(self):
# Only checking that no exception is raised.
self.make_port().start_helper()
def test_start_http_server(self):
# Only checking that no exception is raised.
self.make_port().start_http_server()
def test_start_websocket_server(self):
# Only checking that no exception is raised.
self.make_port().start_websocket_server()
def test_stop_helper(self):
# Only checking that no exception is raised.
self.make_port().stop_helper()
def test_stop_http_server(self):
# Only checking that no exception is raised.
self.make_port().stop_http_server()
def test_stop_websocket_server(self):
# Only checking that no exception is raised.
self.make_port().stop_websocket_server()
class MockDRTTest(unittest.TestCase):
def to_path(self, port, test_name):
return port._filesystem.join(port.layout_tests_dir(), test_name)
def input_line(self, port, test_name, checksum=None):
url = port.filename_to_uri(self.to_path(port, test_name))
# FIXME: we shouldn't have to work around platform-specific issues
# here.
if url.startswith('file:////'):
url = url[len('file:////') - 1:]
if url.startswith('file:///'):
url = url[len('file:///') - 1:]
if checksum:
return url + "'" + checksum + '\n'
return url + '\n'
def extra_args(self, pixel_tests):
if pixel_tests:
return ['--pixel-tests', '-']
return ['-']
def make_drt(self, options, args, filesystem, stdin, stdout, stderr):
return mock_drt.MockDRT(options, args, filesystem, stdin, stdout, stderr)
def make_input_output(self, port, test_name, pixel_tests,
expected_checksum, drt_output, drt_input=None):
path = self.to_path(port, test_name)
if pixel_tests:
if not expected_checksum:
expected_checksum = port.expected_checksum(path)
if not drt_input:
drt_input = self.input_line(port, test_name, expected_checksum)
text_output = port.expected_text(path)
if not drt_output:
drt_output = self.expected_output(port, test_name, pixel_tests,
text_output, expected_checksum)
return (drt_input, drt_output)
def expected_output(self, port, test_name, pixel_tests, text_output, expected_checksum):
if pixel_tests and expected_checksum:
return ['Content-Type: text/plain\n',
text_output,
'#EOF\n',
'\n',
'ActualHash: %s\n' % expected_checksum,
'ExpectedHash: %s\n' % expected_checksum,
'#EOF\n']
else:
return ['Content-Type: text/plain\n',
text_output,
'#EOF\n',
'#EOF\n']
def assertTest(self, test_name, pixel_tests, expected_checksum=None,
drt_output=None, filesystem=None):
platform = 'test'
filesystem = filesystem or test.unit_test_filesystem()
port = factory.get(platform, filesystem=filesystem)
drt_input, drt_output = self.make_input_output(port, test_name,
pixel_tests, expected_checksum, drt_output)
args = ['--platform', 'test'] + self.extra_args(pixel_tests)
stdin = newstringio.StringIO(drt_input)
stdout = newstringio.StringIO()
stderr = newstringio.StringIO()
options, args = mock_drt.parse_options(args)
drt = self.make_drt(options, args, filesystem, stdin, stdout, stderr)
res = drt.run()
self.assertEqual(res, 0)
# We use the StringIO.buflist here instead of getvalue() because
# the StringIO might be a mix of unicode/ascii and 8-bit strings.
self.assertEqual(stdout.buflist, drt_output)
self.assertEqual(stderr.getvalue(), '')
def test_main(self):
filesystem = test.unit_test_filesystem()
stdin = newstringio.StringIO()
stdout = newstringio.StringIO()
stderr = newstringio.StringIO()
res = mock_drt.main(['--platform', 'test'] + self.extra_args(False),
filesystem, stdin, stdout, stderr)
self.assertEqual(res, 0)
self.assertEqual(stdout.getvalue(), '')
self.assertEqual(stderr.getvalue(), '')
self.assertEqual(filesystem.written_files, {})
def test_pixeltest_passes(self):
# This also tests that we handle HTTP: test URLs properly.
self.assertTest('http/tests/passes/text.html', True)
def test_pixeltest__fails(self):
self.assertTest('failures/expected/checksum.html', pixel_tests=True,
expected_checksum='wrong-checksum',
drt_output=['Content-Type: text/plain\n',
'checksum-txt',
'#EOF\n',
'\n',
'ActualHash: checksum-checksum\n',
'ExpectedHash: wrong-checksum\n',
'Content-Type: image/png\n',
'Content-Length: 13\n',
'checksum\x8a-png',
'#EOF\n'])
def test_textonly(self):
self.assertTest('passes/image.html', False)
def test_checksum_in_png(self):
self.assertTest('passes/checksum_in_image.html', True)
class MockChromiumDRTTest(MockDRTTest):
def extra_args(self, pixel_tests):
if pixel_tests:
return ['--pixel-tests=/tmp/png_result0.png']
return []
def make_drt(self, options, args, filesystem, stdin, stdout, stderr):
options.chromium = True
# We have to set these by hand because --platform test won't trigger
# the Chromium code paths.
options.pixel_path = '/tmp/png_result0.png'
options.pixel_tests = True
return mock_drt.MockChromiumDRT(options, args, filesystem, stdin, stdout, stderr)
def input_line(self, port, test_name, checksum=None):
url = port.filename_to_uri(self.to_path(port, test_name))
if checksum:
return url + ' 6000 ' + checksum + '\n'
return url + ' 6000\n'
def expected_output(self, port, test_name, pixel_tests, text_output, expected_checksum):
url = port.filename_to_uri(self.to_path(port, test_name))
if pixel_tests and expected_checksum:
return ['#URL:%s\n' % url,
'#MD5:%s\n' % expected_checksum,
text_output,
'\n',
'#EOF\n']
else:
return ['#URL:%s\n' % url,
text_output,
'\n',
'#EOF\n']
def test_pixeltest__fails(self):
filesystem = test.unit_test_filesystem()
self.assertTest('failures/expected/checksum.html', pixel_tests=True,
expected_checksum='wrong-checksum',
drt_output=['#URL:file:///test.checkout/LayoutTests/failures/expected/checksum.html\n',
'#MD5:checksum-checksum\n',
'checksum-txt',
'\n',
'#EOF\n'],
filesystem=filesystem)
self.assertEquals(filesystem.written_files,
{'/tmp/png_result0.png': 'checksum\x8a-png'})
def test_chromium_parse_options(self):
options, args = mock_drt.parse_options(['--platform', 'chromium-mac',
'--pixel-tests=/tmp/png_result0.png'])
self.assertTrue(options.chromium)
self.assertTrue(options.pixel_tests)
self.assertEquals(options.pixel_path, '/tmp/png_result0.png')
if __name__ == '__main__':
unittest.main()
|
mlperf/training_results_v0.7
|
refs/heads/master
|
NVIDIA/benchmarks/transformer/implementations/pytorch/tests/utils.py
|
6
|
# Copyright (c) 2017-present, Facebook, Inc.
# All rights reserved.
#
# This source code is licensed under the license found in the LICENSE file in
# the root directory of this source tree. An additional grant of patent rights
# can be found in the PATENTS file in the same directory.
import torch
from fairseq import utils
from fairseq.data import Dictionary
from fairseq.data.language_pair_dataset import collate
from fairseq.models import (
FairseqEncoder,
FairseqIncrementalDecoder,
FairseqModel,
)
from fairseq.tasks import FairseqTask
def dummy_dictionary(vocab_size, prefix='token_'):
d = Dictionary()
for i in range(vocab_size):
token = prefix + str(i)
d.add_symbol(token)
d.finalize(padding_factor=1) # don't add extra padding symbols
return d
def dummy_dataloader(
samples,
padding_idx=1,
eos_idx=2,
batch_size=None,
):
if batch_size is None:
batch_size = len(samples)
# add any missing data to samples
for i, sample in enumerate(samples):
if 'id' not in sample:
sample['id'] = i
# create dataloader
dataset = TestDataset(samples)
dataloader = torch.utils.data.DataLoader(
dataset,
batch_size=batch_size,
collate_fn=(lambda samples: collate(samples, padding_idx, eos_idx)),
)
return iter(dataloader)
class TestDataset(torch.utils.data.Dataset):
def __init__(self, data):
super().__init__()
self.data = data
def __getitem__(self, index):
return self.data[index]
def __len__(self):
return len(self.data)
class TestTranslationTask(FairseqTask):
def __init__(self, args, src_dict, tgt_dict, model):
super().__init__(args)
self.src_dict = src_dict
self.tgt_dict = tgt_dict
self.model = model
@classmethod
def setup_task(cls, args, src_dict=None, tgt_dict=None, model=None):
return cls(args, src_dict, tgt_dict, model)
def build_model(self, args):
return TestModel.build_model(args, self)
@property
def source_dictionary(self):
return self.src_dict
@property
def target_dictionary(self):
return self.tgt_dict
class TestModel(FairseqModel):
def __init__(self, encoder, decoder):
super().__init__(encoder, decoder)
@classmethod
def build_model(cls, args, task):
encoder = TestEncoder(args, task.source_dictionary)
decoder = TestIncrementalDecoder(args, task.target_dictionary)
return cls(encoder, decoder)
class TestEncoder(FairseqEncoder):
def __init__(self, args, dictionary):
super().__init__(dictionary)
self.args = args
def forward(self, src_tokens, src_lengths):
return src_tokens
def reorder_encoder_out(self, encoder_out, new_order):
return encoder_out.index_select(0, new_order)
class TestIncrementalDecoder(FairseqIncrementalDecoder):
def __init__(self, args, dictionary):
super().__init__(dictionary)
assert hasattr(args, 'beam_probs') or hasattr(args, 'probs')
args.max_decoder_positions = getattr(args, 'max_decoder_positions', 100)
self.args = args
def forward(self, prev_output_tokens, encoder_out, incremental_state=None):
if incremental_state is not None:
prev_output_tokens = prev_output_tokens[:, -1:]
bbsz = prev_output_tokens.size(0)
vocab = len(self.dictionary)
src_len = encoder_out.size(1)
tgt_len = prev_output_tokens.size(1)
# determine number of steps
if incremental_state is not None:
# cache step number
step = utils.get_incremental_state(self, incremental_state, 'step')
if step is None:
step = 0
utils.set_incremental_state(self, incremental_state, 'step', step + 1)
steps = [step]
else:
steps = list(range(tgt_len))
# define output in terms of raw probs
if hasattr(self.args, 'probs'):
assert self.args.probs.dim() == 3, \
'expected probs to have size bsz*steps*vocab'
probs = self.args.probs.index_select(1, torch.LongTensor(steps))
else:
probs = torch.FloatTensor(bbsz, len(steps), vocab).zero_()
for i, step in enumerate(steps):
# args.beam_probs gives the probability for every vocab element,
# starting with eos, then unknown, and then the rest of the vocab
if step < len(self.args.beam_probs):
probs[:, i, self.dictionary.eos():] = self.args.beam_probs[step]
else:
probs[:, i, self.dictionary.eos()] = 1.0
# random attention
attn = torch.rand(bbsz, tgt_len, src_len)
return probs, attn
def get_normalized_probs(self, net_output, log_probs, _):
# the decoder returns probabilities directly
probs = net_output[0]
if log_probs:
return probs.log()
else:
return probs
def max_positions(self):
return self.args.max_decoder_positions
|
dkentw/robotframework
|
refs/heads/master
|
utest/model/test_testsuite.py
|
25
|
import unittest
from robot.utils.asserts import assert_equal, assert_true, assert_raises
from robot.model import TestSuite
class TestTestSuite(unittest.TestCase):
def setUp(self):
self.suite = TestSuite(metadata={'M': 'V'})
def test_modify_medatata(self):
self.suite.metadata['m'] = 'v'
self.suite.metadata['n'] = 'w'
assert_equal(dict(self.suite.metadata), {'M': 'v', 'n': 'w'})
def test_set_metadata(self):
self.suite.metadata = {'a': '1', 'b': '1'}
self.suite.metadata['A'] = '2'
assert_equal(dict(self.suite.metadata), {'a': '2', 'b': '1'})
def test_create_and_add_suite(self):
s1 = self.suite.suites.create(name='s1')
s2 = TestSuite(name='s2')
self.suite.suites.append(s2)
assert_true(s1.parent is self.suite)
assert_true(s2.parent is self.suite)
assert_equal(list(self.suite.suites), [s1, s2])
def test_reset_suites(self):
s1 = TestSuite(name='s1')
self.suite.suites = [s1]
s2 = self.suite.suites.create(name='s2')
assert_true(s1.parent is self.suite)
assert_true(s2.parent is self.suite)
assert_equal(list(self.suite.suites), [s1, s2])
def test_suite_name(self):
suite = TestSuite()
assert_equal(suite.name, '')
assert_equal(suite.suites.create(name='foo').name, 'foo')
assert_equal(suite.suites.create(name='bar').name, 'bar')
assert_equal(suite.name, 'foo & bar')
assert_equal(suite.suites.create(name='zap').name, 'zap')
assert_equal(suite.name, 'foo & bar & zap')
suite.name = 'new name'
assert_equal(suite.name, 'new name')
def test_nested_subsuites(self):
suite = TestSuite(name='top')
sub1 = suite.suites.create(name='sub1')
sub2 = sub1.suites.create(name='sub2')
assert_equal(list(suite.suites), [sub1])
assert_equal(list(sub1.suites), [sub2])
def test_set_tags(self):
suite = TestSuite()
suite.tests.create()
suite.tests.create(tags=['t1', 't2'])
suite.set_tags(add='a', remove=['t2', 'nonex'])
suite.tests.create()
assert_equal(list(suite.tests[0].tags), ['a'])
assert_equal(list(suite.tests[1].tags), ['a', 't1'])
assert_equal(list(suite.tests[2].tags), [])
def test_set_tags_also_to_new_child(self):
suite = TestSuite()
suite.tests.create()
suite.set_tags(add='a', remove=['t2', 'nonex'], persist=True)
suite.tests.create(tags=['t1', 't2'])
suite.tests = list(suite.tests)
suite.tests.create()
suite.suites.create().tests.create()
assert_equal(list(suite.tests[0].tags), ['a'])
assert_equal(list(suite.tests[1].tags), ['a', 't1'])
assert_equal(list(suite.tests[2].tags), ['a'])
assert_equal(list(suite.suites[0].tests[0].tags), ['a'])
def test_slots(self):
assert_raises(AttributeError, setattr, self.suite, 'attr', 'value')
class TestSuiteId(unittest.TestCase):
def test_one_suite(self):
assert_equal(TestSuite().id, 's1')
def test_sub_suites(self):
parent = TestSuite()
for i in range(10):
assert_equal(parent.suites.create().id, 's1-s%s' % (i+1))
assert_equal(parent.suites[-1].suites.create().id, 's1-s10-s1')
def test_id_is_dynamic(self):
suite = TestSuite()
sub = suite.suites.create().suites.create()
assert_equal(sub.id, 's1-s1-s1')
suite.suites = [sub]
assert_equal(sub.id, 's1-s1')
class TestStringRepresentation(unittest.TestCase):
def setUp(self):
self.empty = TestSuite()
self.ascii = TestSuite(name='Kekkonen')
self.non_ascii = TestSuite(name=u'hyv\xe4 nimi')
def test_unicode(self):
assert_equal(unicode(self.empty), '')
assert_equal(unicode(self.ascii), 'Kekkonen')
assert_equal(unicode(self.non_ascii), u'hyv\xe4 nimi')
def test_str(self):
assert_equal(str(self.empty), '')
assert_equal(str(self.ascii), 'Kekkonen')
assert_equal(str(self.non_ascii), 'hyv? nimi')
if __name__ == '__main__':
unittest.main()
|
dharmabumstead/ansible
|
refs/heads/devel
|
lib/ansible/plugins/lookup/nios_next_ip.py
|
30
|
#
# Copyright 2018 Red Hat | Ansible
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
DOCUMENTATION = """
---
lookup: nios_next_ip
version_added: "2.5"
short_description: Return the next available IP address for a network
description:
- Uses the Infoblox WAPI API to return the next available IP addresses
for a given network CIDR
requirements:
- infoblox_client
extends_documentation_fragment: nios
options:
_terms:
description: The CIDR network to retrieve the next addresses from
required: True
num:
description: The number of IP addresses to return
required: false
default: 1
"""
EXAMPLES = """
- name: return next available IP address for network 192.168.10.0/24
set_fact:
ipaddr: "{{ lookup('nios_next_ip', '192.168.10.0/24', provider={'host': 'nios01', 'username': 'admin', 'password': 'password'}) }}"
- name: return the next 3 available IP addresses for network 192.168.10.0/24
set_fact:
ipaddr: "{{ lookup('nios_next_ip', '192.168.10.0/24', num=3, provider={'host': 'nios01', 'username': 'admin', 'password': 'password'}) }}"
"""
RETURN = """
_list:
description:
- The list of next IP addresses available
returned: always
type: list
"""
from ansible.plugins.lookup import LookupBase
from ansible.module_utils.net_tools.nios.api import WapiLookup
from ansible.module_utils._text import to_text
from ansible.errors import AnsibleError
class LookupModule(LookupBase):
def run(self, terms, variables=None, **kwargs):
try:
network = terms[0]
except IndexError:
raise AnsibleError('missing argument in the form of A.B.C.D/E')
provider = kwargs.pop('provider', {})
wapi = WapiLookup(provider)
network_obj = wapi.get_object('network', {'network': network})
if network_obj is None:
raise AnsibleError('unable to find network object %s' % network)
num = kwargs.get('num', 1)
try:
ref = network_obj[0]['_ref']
avail_ips = wapi.call_func('next_available_ip', ref, {'num': num})
return [avail_ips['ips']]
except Exception as exc:
raise AnsibleError(to_text(exc))
|
napkindrawing/ansible
|
refs/heads/devel
|
lib/ansible/modules/network/f5/bigip_gtm_pool.py
|
33
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright 2017 F5 Networks Inc.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
ANSIBLE_METADATA = {
'status': ['preview'],
'supported_by': 'community',
'metadata_version': '1.0'
}
DOCUMENTATION = '''
---
module: bigip_gtm_pool
short_description: Manages F5 BIG-IP GTM pools.
description:
- Manages F5 BIG-IP GTM pools.
version_added: "2.4"
options:
state:
description:
- Pool member state. When C(present), ensures that the pool is
created and enabled. When C(absent), ensures that the pool is
removed from the system. When C(enabled) or C(disabled), ensures
that the pool is enabled or disabled (respectively) on the remote
device.
required: True
choices:
- present
- absent
- enabled
- disabled
preferred_lb_method:
description:
- The load balancing mode that the system tries first.
choices:
- round-robin
- return-to-dns
- ratio
- topology
- static-persistence
- global-availability
- virtual-server-capacity
- least-connections
- lowest-round-trip-time
- fewest-hops
- packet-rate
- cpu
- completion-rate
- quality-of-service
- kilobytes-per-second
- drop-packet
- fallback-ip
- virtual-server-score
alternate_lb_method:
description:
- The load balancing mode that the system tries if the
C(preferred_lb_method) is unsuccessful in picking a pool.
choices:
- round-robin
- return-to-dns
- none
- ratio
- topology
- static-persistence
- global-availability
- virtual-server-capacity
- packet-rate
- drop-packet
- fallback-ip
- virtual-server-score
fallback_lb_method:
description:
- The load balancing mode that the system tries if both the
C(preferred_lb_method) and C(alternate_lb_method)s are unsuccessful
in picking a pool.
choices:
- round-robin
- return-to-dns
- ratio
- topology
- static-persistence
- global-availability
- virtual-server-capacity
- least-connections
- lowest-round-trip-time
- fewest-hops
- packet-rate
- cpu
- completion-rate
- quality-of-service
- kilobytes-per-second
- drop-packet
- fallback-ip
- virtual-server-score
fallback_ip:
description:
- Specifies the IPv4, or IPv6 address of the server to which the system
directs requests when it cannot use one of its pools to do so.
Note that the system uses the fallback IP only if you select the
C(fallback_ip) load balancing method.
type:
description:
- The type of GTM pool that you want to create. On BIG-IP releases
prior to version 12, this parameter is not required. On later versions
of BIG-IP, this is a required parameter.
choices:
- a
- aaaa
- cname
- mx
- naptr
- srv
name:
description:
- Name of the GTM pool.
required: True
notes:
- Requires the f5-sdk Python package on the host. This is as easy as
pip install f5-sdk.
- Requires the netaddr Python package on the host. This is as easy as
pip install netaddr.
extends_documentation_fragment: f5
requirements:
- f5-sdk
- netaddr
author:
- Tim Rupp (@caphrim007)
'''
RETURN = '''
preferred_lb_method:
description: New preferred load balancing method for the pool.
returned: changed
type: string
sample: "topology"
alternate_lb_method:
description: New alternate load balancing method for the pool.
returned: changed
type: string
sample: "drop-packet"
fallback_lb_method:
description: New fallback load balancing method for the pool.
returned: changed
type: string
sample: "fewest-hops"
fallback_ip:
description: New fallback IP used when load balacing using the C(fallback_ip) method.
returned: changed
type: string
sample: "10.10.10.10"
'''
EXAMPLES = '''
- name: Create a GTM pool
bigip_gtm_pool:
server: "lb.mydomain.com"
user: "admin"
password: "secret"
name: "my_pool"
delegate_to: localhost
- name: Disable pool
bigip_gtm_pool:
server: "lb.mydomain.com"
user: "admin"
password: "secret"
state: "disabled"
name: "my_pool"
delegate_to: localhost
'''
from distutils.version import LooseVersion
from ansible.module_utils.f5_utils import (
AnsibleF5Client,
AnsibleF5Parameters,
HAS_F5SDK,
F5ModuleError,
iControlUnexpectedHTTPError
)
try:
from netaddr import IPAddress, AddrFormatError
HAS_NETADDR = True
except ImportError:
HAS_NETADDR = False
import copy
class Parameters(AnsibleF5Parameters):
api_map = {
'loadBalancingMode': 'preferred_lb_method',
'alternateMode': 'alternate_lb_method',
'fallbackMode': 'fallback_lb_method',
'verifyMemberAvailability': 'verify_member_availability',
'fallbackIpv4': 'fallback_ip',
'fallbackIpv6': 'fallback_ip',
'fallbackIp': 'fallback_ip'
}
updatables = [
'preferred_lb_method', 'alternate_lb_method', 'fallback_lb_method',
'fallback_ip'
]
returnables = [
'preferred_lb_method', 'alternate_lb_method', 'fallback_lb_method',
'fallback_ip'
]
api_attributes = [
'loadBalancingMode', 'alternateMode', 'fallbackMode', 'verifyMemberAvailability',
'fallbackIpv4', 'fallbackIpv6', 'fallbackIp'
]
def to_return(self):
result = {}
for returnable in self.returnables:
result[returnable] = getattr(self, returnable)
result = self._filter_params(result)
return result
def api_params(self):
result = {}
for api_attribute in self.api_attributes:
if self.api_map is not None and api_attribute in self.api_map:
result[api_attribute] = getattr(self, self.api_map[api_attribute])
else:
result[api_attribute] = getattr(self, api_attribute)
result = self._filter_params(result)
return result
@property
def collection(self):
type_map = dict(
a='a_s',
aaaa='aaaas',
cname='cnames',
mx='mxs',
naptr='naptrs',
srv='srvs'
)
if self._values['type'] is None:
return None
wideip_type = self._values['type']
return type_map[wideip_type]
@property
def type(self):
if self._values['type'] is None:
return None
return str(self._values['type'])
@property
def verify_member_availability(self):
if self._values['verify_member_availability'] is None:
return None
elif self._values['verify_member_availability']:
return 'enabled'
else:
return 'disabled'
@property
def fallback_ip(self):
if self._values['fallback_ip'] is None:
return None
if self._values['fallback_ip'] == 'any':
return 'any'
try:
address = IPAddress(self._values['fallback_ip'])
if address.version == 4:
return str(address.ip)
elif address.version == 6:
return str(address.ip)
return None
except AddrFormatError:
raise F5ModuleError(
'The provided fallback address is not a valid IPv4 address'
)
@property
def state(self):
if self._values['state'] == 'enabled':
return 'present'
return self._values['state']
@property
def enabled(self):
if self._values['state'] == 'disabled':
return False
elif self._values['state'] in ['present', 'enabled']:
return True
elif self._values['enabled'] is True:
return True
else:
return None
@property
def disabled(self):
if self._values['state'] == 'disabled':
return True
elif self._values['state'] in ['present', 'enabled']:
return False
elif self._values['disabled'] is True:
return True
else:
return None
class ModuleManager(object):
def __init__(self, client):
self.client = client
def exec_module(self):
if not self.gtm_provisioned():
raise F5ModuleError(
"GTM must be provisioned to use this module."
)
if self.version_is_less_than_12():
manager = self.get_manager('untyped')
else:
manager = self.get_manager('typed')
return manager.exec_module()
def get_manager(self, type):
if type == 'typed':
return TypedManager(self.client)
elif type == 'untyped':
return UntypedManager(self.client)
def version_is_less_than_12(self):
version = self.client.api.tmos_version
if LooseVersion(version) < LooseVersion('12.0.0'):
return True
else:
return False
def gtm_provisioned(self):
resource = self.client.api.tm.sys.dbs.db.load(
name='provisioned.cpu.gtm'
)
if int(resource.value) == 0:
return False
return True
class BaseManager(object):
def __init__(self, client):
self.client = client
self.have = None
self.want = Parameters(self.client.module.params)
self.changes = Parameters()
def _set_changed_options(self):
changed = {}
for key in Parameters.returnables:
if getattr(self.want, key) is not None:
changed[key] = getattr(self.want, key)
if changed:
self.changes = Parameters(changed)
def _update_changed_options(self):
changed = {}
for key in Parameters.updatables:
if getattr(self.want, key) is not None:
attr1 = getattr(self.want, key)
attr2 = getattr(self.have, key)
if attr1 != attr2:
changed[key] = attr1
if self.want.state == 'disabled' and self.have.enabled:
changed['state'] = self.want.state
elif self.want.state in ['present', 'enabled'] and self.have.disabled:
changed['state'] = self.want.state
if changed:
self.changes = Parameters(changed)
return True
return False
def exec_module(self):
changed = False
result = dict()
state = self.want.state
try:
if state in ["present", "disabled"]:
changed = self.present()
elif state == "absent":
changed = self.absent()
except iControlUnexpectedHTTPError as e:
raise F5ModuleError(str(e))
changes = self.changes.to_return()
result.update(**changes)
result.update(dict(changed=changed))
return result
def present(self):
if self.exists():
return self.update()
else:
return self.create()
def absent(self):
if self.exists():
return self.remove()
return False
def should_update(self):
result = self._update_changed_options()
if result:
return True
return False
def update(self):
self.have = self.read_current_from_device()
if not self.should_update():
return False
if self.client.check_mode:
return True
self.update_on_device()
return True
def create(self):
self._set_changed_options()
if self.client.check_mode:
return True
self.create_on_device()
if self.exists():
return True
else:
raise F5ModuleError("Failed to create the GTM pool")
def remove(self):
if self.client.check_mode:
return True
self.remove_from_device()
if self.exists():
raise F5ModuleError("Failed to delete the GTM pool")
return True
class TypedManager(BaseManager):
def __init__(self, client):
super(TypedManager, self).__init__(client)
if self.want.type is None:
raise F5ModuleError(
"The 'type' option is required for BIG-IP instances "
"greater than or equal to 12.x"
)
def present(self):
types = [
'a', 'aaaa', 'cname', 'mx', 'naptr', 'srv'
]
if self.want.type is None:
raise F5ModuleError(
"A pool 'type' must be specified"
)
elif self.want.type not in types:
raise F5ModuleError(
"The specified pool type is invalid"
)
return super(TypedManager, self).present()
def exists(self):
pools = self.client.api.tm.gtm.pools
collection = getattr(pools, self.want.collection)
resource = getattr(collection, self.want.type)
result = resource.exists(
name=self.want.name,
partition=self.want.partition
)
return result
def update_on_device(self):
params = self.want.api_params()
pools = self.client.api.tm.gtm.pools
collection = getattr(pools, self.want.collection)
resource = getattr(collection, self.want.type)
result = resource.load(
name=self.want.name,
partition=self.want.partition
)
result.modify(**params)
def read_current_from_device(self):
pools = self.client.api.tm.gtm.pools
collection = getattr(pools, self.want.collection)
resource = getattr(collection, self.want.type)
result = resource.load(
name=self.want.name,
partition=self.want.partition
)
result = result.attrs
return Parameters(result)
def create_on_device(self):
params = self.want.api_params()
pools = self.client.api.tm.gtm.pools
collection = getattr(pools, self.want.collection)
resource = getattr(collection, self.want.type)
resource.create(
name=self.want.name,
partition=self.want.partition,
**params
)
def remove_from_device(self):
pools = self.client.api.tm.gtm.pools
collection = getattr(pools, self.want.collection)
resource = getattr(collection, self.want.type)
resource = resource.load(
name=self.want.name,
partition=self.want.partition
)
if resource:
resource.delete()
class UntypedManager(BaseManager):
def exists(self):
result = self.client.api.tm.gtm.pools.pool.exists(
name=self.want.name,
partition=self.want.partition
)
return result
def update_on_device(self):
params = self.want.api_params()
resource = self.client.api.tm.gtm.pools.pool.load(
name=self.want.name,
partition=self.want.partition
)
resource.modify(**params)
def read_current_from_device(self):
resource = self.client.api.tm.gtm.pools.pool.load(
name=self.want.name,
partition=self.want.partition
)
result = resource.attrs
return Parameters(result)
def create_on_device(self):
params = self.want.api_params()
self.client.api.tm.gtm.pools.pool.create(
name=self.want.name,
partition=self.want.partition,
**params
)
def remove_from_device(self):
resource = self.client.api.tm.gtm.pools.pool.load(
name=self.want.name,
partition=self.want.partition
)
resource.delete()
class ArgumentSpec(object):
def __init__(self):
self.states = ['absent', 'present', 'enabled', 'disabled']
self.preferred_lb_methods = [
'round-robin', 'return-to-dns', 'ratio', 'topology',
'static-persistence', 'global-availability',
'virtual-server-capacity', 'least-connections',
'lowest-round-trip-time', 'fewest-hops', 'packet-rate', 'cpu',
'completion-rate', 'quality-of-service', 'kilobytes-per-second',
'drop-packet', 'fallback-ip', 'virtual-server-score'
]
self.alternate_lb_methods = [
'round-robin', 'return-to-dns', 'none', 'ratio', 'topology',
'static-persistence', 'global-availability',
'virtual-server-capacity', 'packet-rate', 'drop-packet',
'fallback-ip', 'virtual-server-score'
]
self.fallback_lb_methods = copy.copy(self.preferred_lb_methods)
self.fallback_lb_methods.append('none')
self.types = [
'a', 'aaaa', 'cname', 'mx', 'naptr', 'srv'
]
self.supports_check_mode = True
self.argument_spec = dict(
name=dict(required=True),
state=dict(
default='present',
choices=self.states,
),
preferred_lb_method=dict(
choices=self.preferred_lb_methods,
),
fallback_lb_method=dict(
choices=self.fallback_lb_methods,
),
alternate_lb_method=dict(
choices=self.alternate_lb_methods,
),
fallback_ip=dict(),
type=dict(
choices=self.types
)
)
self.required_if = [
['preferred_lb_method', 'fallback-ip', ['fallback_ip']],
['fallback_lb_method', 'fallback-ip', ['fallback_ip']],
['alternate_lb_method', 'fallback-ip', ['fallback_ip']]
]
self.f5_product_name = 'bigip'
def main():
if not HAS_F5SDK:
raise F5ModuleError("The python f5-sdk module is required")
if not HAS_NETADDR:
raise F5ModuleError("The python netaddr module is required")
spec = ArgumentSpec()
client = AnsibleF5Client(
argument_spec=spec.argument_spec,
supports_check_mode=spec.supports_check_mode,
f5_product_name=spec.f5_product_name,
required_if=spec.required_if
)
try:
mm = ModuleManager(client)
results = mm.exec_module()
client.module.exit_json(**results)
except F5ModuleError as e:
client.module.fail_json(msg=str(e))
if __name__ == '__main__':
main()
|
Mitali-Sodhi/CodeLingo
|
refs/heads/master
|
Dataset/python/test_main.py
|
8
|
from __future__ import with_statement
import copy
from functools import partial
from operator import isMappingType
import os
import sys
from contextlib import contextmanager
from fudge import Fake, patched_context, with_fakes
from nose.tools import ok_, eq_
from fabric.decorators import hosts, roles, task
from fabric.context_managers import settings
from fabric.main import (parse_arguments, _escape_split,
load_fabfile as _load_fabfile, list_commands, _task_names,
COMMANDS_HEADER, NESTED_REMINDER)
import fabric.state
from fabric.state import _AttributeDict
from fabric.tasks import Task, WrappedCallableTask
from fabric.task_utils import _crawl, crawl, merge
from utils import mock_streams, eq_, FabricTest, fabfile, path_prefix, aborts
# Stupid load_fabfile wrapper to hide newly added return value.
# WTB more free time to rewrite all this with objects :)
def load_fabfile(*args, **kwargs):
return _load_fabfile(*args, **kwargs)[:2]
#
# Basic CLI stuff
#
def test_argument_parsing():
for args, output in [
# Basic
('abc', ('abc', [], {}, [], [], [])),
# Arg
('ab:c', ('ab', ['c'], {}, [], [], [])),
# Kwarg
('a:b=c', ('a', [], {'b':'c'}, [], [], [])),
# Arg and kwarg
('a:b=c,d', ('a', ['d'], {'b':'c'}, [], [], [])),
# Multiple kwargs
('a:b=c,d=e', ('a', [], {'b':'c','d':'e'}, [], [], [])),
# Host
('abc:host=foo', ('abc', [], {}, ['foo'], [], [])),
# Hosts with single host
('abc:hosts=foo', ('abc', [], {}, ['foo'], [], [])),
# Hosts with multiple hosts
# Note: in a real shell, one would need to quote or escape "foo;bar".
# But in pure-Python that would get interpreted literally, so we don't.
('abc:hosts=foo;bar', ('abc', [], {}, ['foo', 'bar'], [], [])),
# Exclude hosts
('abc:hosts=foo;bar,exclude_hosts=foo', ('abc', [], {}, ['foo', 'bar'], [], ['foo'])),
('abc:hosts=foo;bar,exclude_hosts=foo;bar', ('abc', [], {}, ['foo', 'bar'], [], ['foo','bar'])),
# Empty string args
("task:x=y,z=", ('task', [], {'x': 'y', 'z': ''}, [], [], [])),
("task:foo,,x=y", ('task', ['foo', ''], {'x': 'y'}, [], [], [])),
]:
yield eq_, parse_arguments([args]), [output]
def test_escaped_task_arg_split():
"""
Allow backslashes to escape the task argument separator character
"""
argstr = r"foo,bar\,biz\,baz,what comes after baz?"
eq_(
_escape_split(',', argstr),
['foo', 'bar,biz,baz', 'what comes after baz?']
)
def test_escaped_task_kwarg_split():
"""
Allow backslashes to escape the = in x=y task kwargs
"""
argstr = r"cmd:arg,escaped\,arg,nota\=kwarg,regular=kwarg,escaped=regular\=kwarg"
args = ['arg', 'escaped,arg', 'nota=kwarg']
kwargs = {'regular': 'kwarg', 'escaped': 'regular=kwarg'}
eq_(
parse_arguments([argstr])[0],
('cmd', args, kwargs, [], [], []),
)
#
# Host/role decorators
#
# Allow calling Task.get_hosts as function instead (meh.)
def get_hosts(command, *args):
return WrappedCallableTask(command).get_hosts(*args)
def eq_hosts(command, host_list, env=None, func=set):
eq_(func(get_hosts(command, [], [], [], env)), func(host_list))
true_eq_hosts = partial(eq_hosts, func=lambda x: x)
def test_hosts_decorator_by_itself():
"""
Use of @hosts only
"""
host_list = ['a', 'b']
@hosts(*host_list)
def command():
pass
eq_hosts(command, host_list)
fake_roles = {
'r1': ['a', 'b'],
'r2': ['b', 'c']
}
def test_roles_decorator_by_itself():
"""
Use of @roles only
"""
@roles('r1')
def command():
pass
eq_hosts(command, ['a', 'b'], env={'roledefs': fake_roles})
def test_hosts_and_roles_together():
"""
Use of @roles and @hosts together results in union of both
"""
@roles('r1', 'r2')
@hosts('d')
def command():
pass
eq_hosts(command, ['a', 'b', 'c', 'd'], env={'roledefs': fake_roles})
def test_host_role_merge_deduping():
"""
Use of @roles and @hosts dedupes when merging
"""
@roles('r1', 'r2')
@hosts('a')
def command():
pass
# Not ['a', 'a', 'b', 'c'] or etc
true_eq_hosts(command, ['a', 'b', 'c'], env={'roledefs': fake_roles})
def test_host_role_merge_deduping_off():
"""
Allow turning deduping off
"""
@roles('r1', 'r2')
@hosts('a')
def command():
pass
with settings(dedupe_hosts=False):
true_eq_hosts(
command,
# 'a' 1x host 1x role
# 'b' 1x r1 1x r2
['a', 'a', 'b', 'b', 'c'],
env={'roledefs': fake_roles}
)
tuple_roles = {
'r1': ('a', 'b'),
'r2': ('b', 'c'),
}
def test_roles_as_tuples():
"""
Test that a list of roles as a tuple succeeds
"""
@roles('r1')
def command():
pass
eq_hosts(command, ['a', 'b'], env={'roledefs': tuple_roles})
def test_hosts_as_tuples():
"""
Test that a list of hosts as a tuple succeeds
"""
def command():
pass
eq_hosts(command, ['foo', 'bar'], env={'hosts': ('foo', 'bar')})
def test_hosts_decorator_overrides_env_hosts():
"""
If @hosts is used it replaces any env.hosts value
"""
@hosts('bar')
def command():
pass
eq_hosts(command, ['bar'])
assert 'foo' not in get_hosts(command, [], [], [], {'hosts': ['foo']})
def test_hosts_decorator_overrides_env_hosts_with_task_decorator_first():
"""
If @hosts is used it replaces any env.hosts value even with @task
"""
@task
@hosts('bar')
def command():
pass
eq_hosts(command, ['bar'])
assert 'foo' not in get_hosts(command, [], [], {'hosts': ['foo']})
def test_hosts_decorator_overrides_env_hosts_with_task_decorator_last():
@hosts('bar')
@task
def command():
pass
eq_hosts(command, ['bar'])
assert 'foo' not in get_hosts(command, [], [], {'hosts': ['foo']})
def test_hosts_stripped_env_hosts():
"""
Make sure hosts defined in env.hosts are cleaned of extra spaces
"""
def command():
pass
myenv = {'hosts': [' foo ', 'bar '], 'roles': [], 'exclude_hosts': []}
eq_hosts(command, ['foo', 'bar'], myenv)
spaced_roles = {
'r1': [' a ', ' b '],
'r2': ['b', 'c'],
}
def test_roles_stripped_env_hosts():
"""
Make sure hosts defined in env.roles are cleaned of extra spaces
"""
@roles('r1')
def command():
pass
eq_hosts(command, ['a', 'b'], {'roledefs': spaced_roles})
def test_hosts_decorator_expands_single_iterable():
"""
@hosts(iterable) should behave like @hosts(*iterable)
"""
host_list = ['foo', 'bar']
@hosts(host_list)
def command():
pass
eq_(command.hosts, host_list)
def test_roles_decorator_expands_single_iterable():
"""
@roles(iterable) should behave like @roles(*iterable)
"""
role_list = ['foo', 'bar']
@roles(role_list)
def command():
pass
eq_(command.roles, role_list)
#
# Host exclusion
#
def dummy(): pass
def test_get_hosts_excludes_cli_exclude_hosts_from_cli_hosts():
assert 'foo' not in get_hosts(dummy, ['foo', 'bar'], [], ['foo'])
def test_get_hosts_excludes_cli_exclude_hosts_from_decorator_hosts():
assert 'foo' not in get_hosts(hosts('foo', 'bar')(dummy), [], [], ['foo'])
def test_get_hosts_excludes_global_exclude_hosts_from_global_hosts():
fake_env = {'hosts': ['foo', 'bar'], 'exclude_hosts': ['foo']}
assert 'foo' not in get_hosts(dummy, [], [], [], fake_env)
#
# Basic role behavior
#
@aborts
def test_aborts_on_nonexistent_roles():
"""
Aborts if any given roles aren't found
"""
merge([], ['badrole'], [], {})
def test_accepts_non_list_hosts():
"""
Aborts if hosts is a string, not a list
"""
assert merge('badhosts', [], [], {}) == ['badhosts']
lazy_role = {'r1': lambda: ['a', 'b']}
def test_lazy_roles():
"""
Roles may be callables returning lists, as well as regular lists
"""
@roles('r1')
def command():
pass
eq_hosts(command, ['a', 'b'], env={'roledefs': lazy_role})
#
# Fabfile loading
#
def run_load_fabfile(path, sys_path):
# Module-esque object
fake_module = Fake().has_attr(__dict__={})
# Fake __import__
importer = Fake(callable=True).returns(fake_module)
# Snapshot sys.path for restore
orig_path = copy.copy(sys.path)
# Update with fake path
sys.path = sys_path
# Test for side effects
load_fabfile(path, importer=importer)
eq_(sys.path, sys_path)
# Restore
sys.path = orig_path
def test_load_fabfile_should_not_remove_real_path_elements():
for fabfile_path, sys_dot_path in (
# Directory not in path
('subdir/fabfile.py', ['not_subdir']),
('fabfile.py', ['nope']),
# Directory in path, but not at front
('subdir/fabfile.py', ['not_subdir', 'subdir']),
('fabfile.py', ['not_subdir', '']),
('fabfile.py', ['not_subdir', '', 'also_not_subdir']),
# Directory in path, and at front already
('subdir/fabfile.py', ['subdir']),
('subdir/fabfile.py', ['subdir', 'not_subdir']),
('fabfile.py', ['', 'some_dir', 'some_other_dir']),
):
yield run_load_fabfile, fabfile_path, sys_dot_path
#
# Namespacing and new-style tasks
#
class TestTaskAliases(FabricTest):
def test_flat_alias(self):
f = fabfile("flat_alias.py")
with path_prefix(f):
docs, funcs = load_fabfile(f)
eq_(len(funcs), 2)
ok_("foo" in funcs)
ok_("foo_aliased" in funcs)
def test_nested_alias(self):
f = fabfile("nested_alias.py")
with path_prefix(f):
docs, funcs = load_fabfile(f)
ok_("nested" in funcs)
eq_(len(funcs["nested"]), 2)
ok_("foo" in funcs["nested"])
ok_("foo_aliased" in funcs["nested"])
def test_flat_aliases(self):
f = fabfile("flat_aliases.py")
with path_prefix(f):
docs, funcs = load_fabfile(f)
eq_(len(funcs), 3)
ok_("foo" in funcs)
ok_("foo_aliased" in funcs)
ok_("foo_aliased_two" in funcs)
def test_nested_aliases(self):
f = fabfile("nested_aliases.py")
with path_prefix(f):
docs, funcs = load_fabfile(f)
ok_("nested" in funcs)
eq_(len(funcs["nested"]), 3)
ok_("foo" in funcs["nested"])
ok_("foo_aliased" in funcs["nested"])
ok_("foo_aliased_two" in funcs["nested"])
class TestNamespaces(FabricTest):
def setup(self):
# Parent class preserves current env
super(TestNamespaces, self).setup()
# Reset new-style-tests flag so running tests via Fab itself doesn't
# muck with it.
import fabric.state
if 'new_style_tasks' in fabric.state.env:
del fabric.state.env['new_style_tasks']
def test_implicit_discovery(self):
"""
Default to automatically collecting all tasks in a fabfile module
"""
implicit = fabfile("implicit_fabfile.py")
with path_prefix(implicit):
docs, funcs = load_fabfile(implicit)
eq_(len(funcs), 2)
ok_("foo" in funcs)
ok_("bar" in funcs)
def test_explicit_discovery(self):
"""
If __all__ is present, only collect the tasks it specifies
"""
explicit = fabfile("explicit_fabfile.py")
with path_prefix(explicit):
docs, funcs = load_fabfile(explicit)
eq_(len(funcs), 1)
ok_("foo" in funcs)
ok_("bar" not in funcs)
def test_should_load_decorated_tasks_only_if_one_is_found(self):
"""
If any new-style tasks are found, *only* new-style tasks should load
"""
module = fabfile('decorated_fabfile.py')
with path_prefix(module):
docs, funcs = load_fabfile(module)
eq_(len(funcs), 1)
ok_('foo' in funcs)
def test_class_based_tasks_are_found_with_proper_name(self):
"""
Wrapped new-style tasks should preserve their function names
"""
module = fabfile('decorated_fabfile_with_classbased_task.py')
from fabric.state import env
with path_prefix(module):
docs, funcs = load_fabfile(module)
eq_(len(funcs), 1)
ok_('foo' in funcs)
def test_class_based_tasks_are_found_with_variable_name(self):
"""
A new-style tasks with undefined name attribute should use the instance
variable name.
"""
module = fabfile('classbased_task_fabfile.py')
from fabric.state import env
with path_prefix(module):
docs, funcs = load_fabfile(module)
eq_(len(funcs), 1)
ok_('foo' in funcs)
eq_(funcs['foo'].name, 'foo')
def test_recursion_steps_into_nontask_modules(self):
"""
Recursive loading will continue through modules with no tasks
"""
module = fabfile('deep')
with path_prefix(module):
docs, funcs = load_fabfile(module)
eq_(len(funcs), 1)
ok_('submodule.subsubmodule.deeptask' in _task_names(funcs))
def test_newstyle_task_presence_skips_classic_task_modules(self):
"""
Classic-task-only modules shouldn't add tasks if any new-style tasks exist
"""
module = fabfile('deep')
with path_prefix(module):
docs, funcs = load_fabfile(module)
eq_(len(funcs), 1)
ok_('submodule.classic_task' not in _task_names(funcs))
def test_task_decorator_plays_well_with_others(self):
"""
@task, when inside @hosts/@roles, should not hide the decorated task.
"""
module = fabfile('decorator_order')
with path_prefix(module):
docs, funcs = load_fabfile(module)
# When broken, crawl() finds None for 'foo' instead.
eq_(crawl('foo', funcs), funcs['foo'])
#
# --list output
#
def eq_output(docstring, format_, expected):
return eq_(
"\n".join(list_commands(docstring, format_)),
expected
)
def list_output(module, format_, expected):
module = fabfile(module)
with path_prefix(module):
docstring, tasks = load_fabfile(module)
with patched_context(fabric.state, 'commands', tasks):
eq_output(docstring, format_, expected)
def test_list_output():
lead = ":\n\n "
normal_head = COMMANDS_HEADER + lead
nested_head = COMMANDS_HEADER + NESTED_REMINDER + lead
for desc, module, format_, expected in (
("shorthand (& with namespacing)", 'deep', 'short', "submodule.subsubmodule.deeptask"),
("normal (& with namespacing)", 'deep', 'normal', normal_head + "submodule.subsubmodule.deeptask"),
("normal (with docstring)", 'docstring', 'normal', normal_head + "foo Foos!"),
("nested (leaf only)", 'deep', 'nested', nested_head + """submodule:
subsubmodule:
deeptask"""),
("nested (full)", 'tree', 'nested', nested_head + """build_docs
deploy
db:
migrate
system:
install_package
debian:
update_apt"""),
):
list_output.description = "--list output: %s" % desc
yield list_output, module, format_, expected
del list_output.description
def name_to_task(name):
t = Task()
t.name = name
return t
def strings_to_tasks(d):
ret = {}
for key, value in d.iteritems():
if isMappingType(value):
val = strings_to_tasks(value)
else:
val = name_to_task(value)
ret[key] = val
return ret
def test_task_names():
for desc, input_, output in (
('top level (single)', {'a': 5}, ['a']),
('top level (multiple, sorting)', {'a': 5, 'b': 6}, ['a', 'b']),
('just nested', {'a': {'b': 5}}, ['a.b']),
('mixed', {'a': 5, 'b': {'c': 6}}, ['a', 'b.c']),
('top level comes before nested', {'z': 5, 'b': {'c': 6}}, ['z', 'b.c']),
('peers sorted equally', {'z': 5, 'b': {'c': 6}, 'd': {'e': 7}}, ['z', 'b.c', 'd.e']),
(
'complex tree',
{
'z': 5,
'b': {
'c': 6,
'd': {
'e': {
'f': '7'
}
},
'g': 8
},
'h': 9,
'w': {
'y': 10
}
},
['h', 'z', 'b.c', 'b.g', 'b.d.e.f', 'w.y']
),
):
eq_.description = "task name flattening: %s" % desc
yield eq_, _task_names(strings_to_tasks(input_)), output
del eq_.description
def test_crawl():
for desc, name, mapping, output in (
("base case", 'a', {'a': 5}, 5),
("one level", 'a.b', {'a': {'b': 5}}, 5),
("deep", 'a.b.c.d.e', {'a': {'b': {'c': {'d': {'e': 5}}}}}, 5),
("full tree", 'a.b.c', {'a': {'b': {'c': 5}, 'd': 6}, 'z': 7}, 5)
):
eq_.description = "crawling dotted names: %s" % desc
yield eq_, _crawl(name, mapping), output
del eq_.description
def test_mapping_task_classes():
"""
Task classes implementing the mapping interface shouldn't break --list
"""
list_output('mapping', 'normal', COMMANDS_HEADER + """:\n
mapping_task""")
def test_default_task_listings():
"""
@task(default=True) should cause task to also load under module's name
"""
for format_, expected in (
('short', """mymodule
mymodule.long_task_name"""),
('normal', COMMANDS_HEADER + """:\n
mymodule
mymodule.long_task_name"""),
('nested', COMMANDS_HEADER + NESTED_REMINDER + """:\n
mymodule:
long_task_name""")
):
list_output.description = "Default task --list output: %s" % format_
yield list_output, 'default_tasks', format_, expected
del list_output.description
def test_default_task_loading():
"""
crawl() should return default tasks where found, instead of module objs
"""
docs, tasks = load_fabfile(fabfile('default_tasks'))
ok_(isinstance(crawl('mymodule', tasks), Task))
def test_aliases_appear_in_fab_list():
"""
--list should include aliases
"""
list_output('nested_alias', 'short', """nested.foo
nested.foo_aliased""")
|
proversity-org/edx-platform
|
refs/heads/master
|
openedx/tests/xblock_integration/test_recommender.py
|
4
|
"""
This test file will run through some XBlock test scenarios regarding the
recommender system
"""
import itertools
import json
import StringIO
import unittest
from copy import deepcopy
from django.conf import settings
from django.core.urlresolvers import reverse
from ddt import data, ddt
from lms.djangoapps.courseware.tests.factories import GlobalStaffFactory
from lms.djangoapps.courseware.tests.helpers import LoginEnrollmentTestCase
from nose.plugins.attrib import attr
from six import text_type
from openedx.core.lib.url_utils import quote_slashes
from xmodule.modulestore.tests.django_utils import SharedModuleStoreTestCase
from xmodule.modulestore.tests.factories import CourseFactory, ItemFactory
class TestRecommender(SharedModuleStoreTestCase, LoginEnrollmentTestCase):
"""
Check that Recommender state is saved properly
"""
STUDENTS = [
{'email': 'view@test.com', 'password': 'foo'},
{'email': 'view2@test.com', 'password': 'foo'}
]
XBLOCK_NAMES = ['recommender', 'recommender_second']
@classmethod
def setUpClass(cls):
# Nose runs setUpClass methods even if a class decorator says to skip
# the class: https://github.com/nose-devs/nose/issues/946
# So, skip the test class here if we are not in the LMS.
if settings.ROOT_URLCONF != 'lms.urls':
raise unittest.SkipTest('Test only valid in lms')
super(TestRecommender, cls).setUpClass()
cls.course = CourseFactory.create(
display_name='Recommender_Test_Course'
)
with cls.store.bulk_operations(cls.course.id, emit_signals=False):
cls.chapter = ItemFactory.create(
parent=cls.course, display_name='Overview'
)
cls.section = ItemFactory.create(
parent=cls.chapter, display_name='Welcome'
)
cls.unit = ItemFactory.create(
parent=cls.section, display_name='New Unit'
)
cls.xblock = ItemFactory.create(
parent=cls.unit,
category='recommender',
display_name='recommender'
)
cls.xblock2 = ItemFactory.create(
parent=cls.unit,
category='recommender',
display_name='recommender_second'
)
cls.course_url = reverse(
'courseware_section',
kwargs={
'course_id': text_type(cls.course.id),
'chapter': 'Overview',
'section': 'Welcome',
}
)
cls.resource_urls = [
(
"https://courses.edx.org/courses/MITx/3.091X/"
"2013_Fall/courseware/SP13_Week_4/"
"SP13_Periodic_Trends_and_Bonding/"
),
(
"https://courses.edx.org/courses/MITx/3.091X/"
"2013_Fall/courseware/SP13_Week_4/SP13_Covalent_Bonding/"
)
]
cls.test_recommendations = {
cls.resource_urls[0]: {
"title": "Covalent bonding and periodic trends",
"url": cls.resource_urls[0],
"description": (
"http://people.csail.mit.edu/swli/edx/"
"recommendation/img/videopage1.png"
),
"descriptionText": (
"short description for Covalent bonding "
"and periodic trends"
)
},
cls.resource_urls[1]: {
"title": "Polar covalent bonds and electronegativity",
"url": cls.resource_urls[1],
"description": (
"http://people.csail.mit.edu/swli/edx/"
"recommendation/img/videopage2.png"
),
"descriptionText": (
"short description for Polar covalent "
"bonds and electronegativity"
)
}
}
def setUp(self):
super(TestRecommender, self).setUp()
for idx, student in enumerate(self.STUDENTS):
username = "u{}".format(idx)
self.create_account(username, student['email'], student['password'])
self.activate_user(student['email'])
self.logout()
self.staff_user = GlobalStaffFactory()
def get_handler_url(self, handler, xblock_name=None):
"""
Get url for the specified xblock handler
"""
if xblock_name is None:
xblock_name = TestRecommender.XBLOCK_NAMES[0]
return reverse('xblock_handler', kwargs={
'course_id': text_type(self.course.id),
'usage_id': quote_slashes(text_type(self.course.id.make_usage_key('recommender', xblock_name))),
'handler': handler,
'suffix': ''
})
def enroll_student(self, email, password):
"""
Student login and enroll for the course
"""
self.login(email, password)
self.enroll(self.course, verify=True)
def enroll_staff(self, staff):
"""
Staff login and enroll for the course
"""
email = staff.email
password = 'test'
self.login(email, password)
self.enroll(self.course, verify=True)
def initialize_database_by_id(self, handler, resource_id, times, xblock_name=None):
"""
Call a ajax event (vote, delete, endorse) on a resource by its id
several times
"""
if xblock_name is None:
xblock_name = TestRecommender.XBLOCK_NAMES[0]
url = self.get_handler_url(handler, xblock_name)
for _ in range(times):
self.client.post(url, json.dumps({'id': resource_id}), '')
def call_event(self, handler, resource, xblock_name=None):
"""
Call a ajax event (add, edit, flag, etc.) by specifying the resource
it takes
"""
if xblock_name is None:
xblock_name = TestRecommender.XBLOCK_NAMES[0]
url = self.get_handler_url(handler, xblock_name)
return self.client.post(url, json.dumps(resource), '')
def check_event_response_by_key(self, handler, resource, resp_key, resp_val, xblock_name=None):
"""
Call the event specified by the handler with the resource, and check
whether the key (resp_key) in response is as expected (resp_val)
"""
if xblock_name is None:
xblock_name = TestRecommender.XBLOCK_NAMES[0]
resp = json.loads(self.call_event(handler, resource, xblock_name).content)
self.assertEqual(resp[resp_key], resp_val)
self.assert_request_status_code(200, self.course_url)
def check_event_response_by_http_status(self, handler, resource, http_status_code, xblock_name=None):
"""
Call the event specified by the handler with the resource, and check
whether the http_status in response is as expected
"""
if xblock_name is None:
xblock_name = TestRecommender.XBLOCK_NAMES[0]
resp = self.call_event(handler, resource, xblock_name)
self.assertEqual(resp.status_code, http_status_code)
self.assert_request_status_code(200, self.course_url)
@attr(shard=1)
class TestRecommenderCreateFromEmpty(TestRecommender):
"""
Check whether we can add resources to an empty database correctly
"""
def test_add_resource(self):
"""
Verify the addition of new resource is handled correctly
"""
self.enroll_student(self.STUDENTS[0]['email'], self.STUDENTS[0]['password'])
# Check whether adding new resource is successful
for resource_id, resource in self.test_recommendations.iteritems():
for xblock_name in self.XBLOCK_NAMES:
result = self.call_event('add_resource', resource, xblock_name)
expected_result = {
'upvotes': 0,
'downvotes': 0,
'id': resource_id
}
for field in resource:
expected_result[field] = resource[field]
self.assertDictEqual(json.loads(result.content), expected_result)
self.assert_request_status_code(200, self.course_url)
@attr(shard=1)
class TestRecommenderResourceBase(TestRecommender):
"""Base helper class for tests with resources."""
def setUp(self):
super(TestRecommenderResourceBase, self).setUp()
self.resource_id = self.resource_urls[0]
self.resource_id_second = self.resource_urls[1]
self.non_existing_resource_id = 'An non-existing id'
self.set_up_resources()
def set_up_resources(self):
"""
Set up resources and enroll staff
"""
self.logout()
self.enroll_staff(self.staff_user)
# Add resources, assume correct here, tested in test_add_resource
for resource, xblock_name in itertools.product(self.test_recommendations.values(), self.XBLOCK_NAMES):
self.call_event('add_resource', resource, xblock_name)
def generate_edit_resource(self, resource_id):
"""
Based on the given resource (specified by resource_id), this function
generate a new one for testing 'edit_resource' event
"""
resource = {"id": resource_id}
edited_recommendations = {
key: value + " edited" for key, value in self.test_recommendations[self.resource_id].iteritems()
}
resource.update(edited_recommendations)
return resource
@attr(shard=1)
class TestRecommenderWithResources(TestRecommenderResourceBase):
"""
Check whether we can add/edit/flag/export resources correctly
"""
def test_add_redundant_resource(self):
"""
Verify the addition of a redundant resource (url) is rejected
"""
for suffix in ['', '#IAmSuffix', '%23IAmSuffix']:
resource = deepcopy(self.test_recommendations[self.resource_id])
resource['url'] += suffix
self.check_event_response_by_http_status('add_resource', resource, 409)
def test_add_removed_resource(self):
"""
Verify the addition of a removed resource (url) is rejected
"""
self.call_event('remove_resource', {"id": self.resource_id, 'reason': ''})
for suffix in ['', '#IAmSuffix', '%23IAmSuffix']:
resource = deepcopy(self.test_recommendations[self.resource_id])
resource['url'] += suffix
self.check_event_response_by_http_status('add_resource', resource, 405)
def test_edit_resource_non_existing(self):
"""
Edit a non-existing resource
"""
self.check_event_response_by_http_status(
'edit_resource',
self.generate_edit_resource(self.non_existing_resource_id),
400
)
def test_edit_redundant_resource(self):
"""
Check whether changing the url to the one of 'another' resource is
rejected
"""
for suffix in ['', '#IAmSuffix', '%23IAmSuffix']:
resource = self.generate_edit_resource(self.resource_id)
resource['url'] = self.resource_id_second + suffix
self.check_event_response_by_http_status('edit_resource', resource, 409)
def test_edit_removed_resource(self):
"""
Check whether changing the url to the one of a removed resource is
rejected
"""
self.call_event('remove_resource', {"id": self.resource_id_second, 'reason': ''})
for suffix in ['', '#IAmSuffix', '%23IAmSuffix']:
resource = self.generate_edit_resource(self.resource_id)
resource['url'] = self.resource_id_second + suffix
self.check_event_response_by_http_status('edit_resource', resource, 405)
def test_edit_resource(self):
"""
Check whether changing the content of resource is successful
"""
self.check_event_response_by_http_status(
'edit_resource',
self.generate_edit_resource(self.resource_id),
200
)
def test_edit_resource_same_url(self):
"""
Check whether changing the content (except for url) of resource is successful
"""
resource = self.generate_edit_resource(self.resource_id)
for suffix in ['', '#IAmSuffix', '%23IAmSuffix']:
resource['url'] = self.resource_id + suffix
self.check_event_response_by_http_status('edit_resource', resource, 200)
def test_edit_then_add_resource(self):
"""
Check whether we can add back an edited resource
"""
self.call_event('edit_resource', self.generate_edit_resource(self.resource_id))
# Test
self.check_event_response_by_key(
'add_resource',
self.test_recommendations[self.resource_id],
'id',
self.resource_id
)
def test_edit_resources_in_different_xblocks(self):
"""
Check whether changing the content of resource is successful in two
different xblocks
"""
resource = self.generate_edit_resource(self.resource_id)
for xblock_name in self.XBLOCK_NAMES:
self.check_event_response_by_http_status('edit_resource', resource, 200, xblock_name)
def test_flag_resource_wo_reason(self):
"""
Flag a resource as problematic, without providing the reason
"""
resource = {'id': self.resource_id, 'isProblematic': True, 'reason': ''}
# Test
self.check_event_response_by_key('flag_resource', resource, 'reason', '')
def test_flag_resource_w_reason(self):
"""
Flag a resource as problematic, with providing the reason
"""
resource = {'id': self.resource_id, 'isProblematic': True, 'reason': 'reason 0'}
# Test
self.check_event_response_by_key('flag_resource', resource, 'reason', 'reason 0')
def test_flag_resource_change_reason(self):
"""
Flag a resource as problematic twice, with different reasons
"""
resource = {'id': self.resource_id, 'isProblematic': True, 'reason': 'reason 0'}
self.call_event('flag_resource', resource)
# Test
resource['reason'] = 'reason 1'
resp = json.loads(self.call_event('flag_resource', resource).content)
self.assertEqual(resp['oldReason'], 'reason 0')
self.assertEqual(resp['reason'], 'reason 1')
self.assert_request_status_code(200, self.course_url)
def test_flag_resources_in_different_xblocks(self):
"""
Flag resources as problematic in two different xblocks
"""
resource = {'id': self.resource_id, 'isProblematic': True, 'reason': 'reason 0'}
# Test
for xblock_name in self.XBLOCK_NAMES:
self.check_event_response_by_key('flag_resource', resource, 'reason', 'reason 0', xblock_name)
def test_flag_resources_by_different_users(self):
"""
Different users can't see the flag result of each other
"""
resource = {'id': self.resource_id, 'isProblematic': True, 'reason': 'reason 0'}
self.call_event('flag_resource', resource)
self.logout()
self.enroll_student(self.STUDENTS[0]['email'], self.STUDENTS[0]['password'])
# Test
resp = json.loads(self.call_event('flag_resource', resource).content)
# The second user won't see the reason provided by the first user
self.assertNotIn('oldReason', resp)
self.assertEqual(resp['reason'], 'reason 0')
self.assert_request_status_code(200, self.course_url)
def test_export_resources(self):
"""
Test the function for exporting all resources from the Recommender.
"""
self.call_event('remove_resource', {"id": self.resource_id, 'reason': ''})
self.call_event('endorse_resource', {"id": self.resource_id_second, 'reason': ''})
# Test
resp = json.loads(self.call_event('export_resources', {}).content)
self.assertIn(self.resource_id_second, resp['export']['recommendations'])
self.assertNotIn(self.resource_id, resp['export']['recommendations'])
self.assertIn(self.resource_id_second, resp['export']['endorsed_recommendation_ids'])
self.assertIn(self.resource_id, resp['export']['removed_recommendations'])
self.assert_request_status_code(200, self.course_url)
@attr(shard=1)
@ddt
class TestRecommenderVoteWithResources(TestRecommenderResourceBase):
"""
Check whether we can vote resources correctly
"""
@data(
{'event': 'recommender_upvote'},
{'event': 'recommender_downvote'}
)
def test_vote_resource_non_existing(self, test_case):
"""
Vote a non-existing resource
"""
resource = {"id": self.non_existing_resource_id, 'event': test_case['event']}
self.check_event_response_by_http_status('handle_vote', resource, 400)
@data(
{'event': 'recommender_upvote', 'new_votes': 1},
{'event': 'recommender_downvote', 'new_votes': -1}
)
def test_vote_resource_once(self, test_case):
"""
Vote a resource
"""
resource = {"id": self.resource_id, 'event': test_case['event']}
self.check_event_response_by_key('handle_vote', resource, 'newVotes', test_case['new_votes'])
@data(
{'event': 'recommender_upvote', 'new_votes': 0},
{'event': 'recommender_downvote', 'new_votes': 0}
)
def test_vote_resource_twice(self, test_case):
"""
Vote a resource twice
"""
resource = {"id": self.resource_id, 'event': test_case['event']}
self.call_event('handle_vote', resource)
# Test
self.check_event_response_by_key('handle_vote', resource, 'newVotes', test_case['new_votes'])
@data(
{'event': 'recommender_upvote', 'new_votes': 1},
{'event': 'recommender_downvote', 'new_votes': -1}
)
def test_vote_resource_thrice(self, test_case):
"""
Vote a resource thrice
"""
resource = {"id": self.resource_id, 'event': test_case['event']}
for _ in range(2):
self.call_event('handle_vote', resource)
# Test
self.check_event_response_by_key('handle_vote', resource, 'newVotes', test_case['new_votes'])
@data(
{'event': 'recommender_upvote', 'event_second': 'recommender_downvote', 'new_votes': -1},
{'event': 'recommender_downvote', 'event_second': 'recommender_upvote', 'new_votes': 1}
)
def test_switch_vote_resource(self, test_case):
"""
Switch the vote of a resource
"""
resource = {"id": self.resource_id, 'event': test_case['event']}
self.call_event('handle_vote', resource)
# Test
resource['event'] = test_case['event_second']
self.check_event_response_by_key('handle_vote', resource, 'newVotes', test_case['new_votes'])
@data(
{'event': 'recommender_upvote', 'new_votes': 1},
{'event': 'recommender_downvote', 'new_votes': -1}
)
def test_vote_different_resources(self, test_case):
"""
Vote two different resources
"""
resource = {"id": self.resource_id, 'event': test_case['event']}
self.call_event('handle_vote', resource)
# Test
resource['id'] = self.resource_id_second
self.check_event_response_by_key('handle_vote', resource, 'newVotes', test_case['new_votes'])
@data(
{'event': 'recommender_upvote', 'new_votes': 1},
{'event': 'recommender_downvote', 'new_votes': -1}
)
def test_vote_resources_in_different_xblocks(self, test_case):
"""
Vote two resources in two different xblocks
"""
resource = {"id": self.resource_id, 'event': test_case['event']}
self.call_event('handle_vote', resource)
# Test
self.check_event_response_by_key(
'handle_vote', resource, 'newVotes', test_case['new_votes'], self.XBLOCK_NAMES[1]
)
@data(
{'event': 'recommender_upvote', 'new_votes': 2},
{'event': 'recommender_downvote', 'new_votes': -2}
)
def test_vote_resource_by_different_users(self, test_case):
"""
Vote resource by two different users
"""
resource = {"id": self.resource_id, 'event': test_case['event']}
self.call_event('handle_vote', resource)
self.logout()
self.enroll_student(self.STUDENTS[0]['email'], self.STUDENTS[0]['password'])
# Test
self.check_event_response_by_key('handle_vote', resource, 'newVotes', test_case['new_votes'])
@attr(shard=1)
@ddt
class TestRecommenderStaffFeedbackWithResources(TestRecommenderResourceBase):
"""
Check whether we can remove/endorse resources correctly
"""
@data('remove_resource', 'endorse_resource')
def test_remove_or_endorse_resource_non_existing(self, test_case):
"""
Remove/endorse a non-existing resource
"""
resource = {"id": self.non_existing_resource_id, 'reason': ''}
self.check_event_response_by_http_status(test_case, resource, 400)
@data(
{'times': 1, 'key': 'status', 'val': 'endorsement'},
{'times': 2, 'key': 'status', 'val': 'undo endorsement'},
{'times': 3, 'key': 'status', 'val': 'endorsement'}
)
def test_endorse_resource_multiple_times(self, test_case):
"""
Endorse a resource once/twice/thrice
"""
resource = {"id": self.resource_id, 'reason': ''}
for _ in range(test_case['times'] - 1):
self.call_event('endorse_resource', resource)
# Test
self.check_event_response_by_key('endorse_resource', resource, test_case['key'], test_case['val'])
@data(
{'times': 1, 'status': 200},
{'times': 2, 'status': 400},
{'times': 3, 'status': 400}
)
def test_remove_resource_multiple_times(self, test_case):
"""
Remove a resource once/twice/thrice
"""
resource = {"id": self.resource_id, 'reason': ''}
for _ in range(test_case['times'] - 1):
self.call_event('remove_resource', resource)
# Test
self.check_event_response_by_http_status('remove_resource', resource, test_case['status'])
@data(
{'handler': 'remove_resource', 'status': 200},
{'handler': 'endorse_resource', 'key': 'status', 'val': 'endorsement'}
)
def test_remove_or_endorse_different_resources(self, test_case):
"""
Remove/endorse two different resources
"""
self.call_event(test_case['handler'], {"id": self.resource_id, 'reason': ''})
# Test
resource = {"id": self.resource_id_second, 'reason': ''}
if test_case['handler'] == 'remove_resource':
self.check_event_response_by_http_status(test_case['handler'], resource, test_case['status'])
else:
self.check_event_response_by_key(test_case['handler'], resource, test_case['key'], test_case['val'])
@data(
{'handler': 'remove_resource', 'status': 200},
{'handler': 'endorse_resource', 'key': 'status', 'val': 'endorsement'}
)
def test_remove_or_endorse_resources_in_different_xblocks(self, test_case):
"""
Remove/endorse two resources in two different xblocks
"""
self.call_event(test_case['handler'], {"id": self.resource_id, 'reason': ''})
# Test
resource = {"id": self.resource_id, 'reason': ''}
if test_case['handler'] == 'remove_resource':
self.check_event_response_by_http_status(
test_case['handler'], resource, test_case['status'], self.XBLOCK_NAMES[1]
)
else:
self.check_event_response_by_key(
test_case['handler'], resource, test_case['key'], test_case['val'], self.XBLOCK_NAMES[1]
)
@data(
{'handler': 'remove_resource', 'status': 400},
{'handler': 'endorse_resource', 'status': 400}
)
def test_remove_or_endorse_resource_by_student(self, test_case):
"""
Remove/endorse resource by a student
"""
self.logout()
self.enroll_student(self.STUDENTS[0]['email'], self.STUDENTS[0]['password'])
# Test
resource = {"id": self.resource_id, 'reason': ''}
self.check_event_response_by_http_status(test_case['handler'], resource, test_case['status'])
@attr(shard=1)
@ddt
class TestRecommenderFileUploading(TestRecommender):
"""
Check whether we can handle file uploading correctly
"""
def setUp(self):
super(TestRecommenderFileUploading, self).setUp()
self.initial_configuration = {
'flagged_accum_resources': {},
'endorsed_recommendation_reasons': [],
'endorsed_recommendation_ids': [],
'removed_recommendations': {},
'recommendations': self.test_recommendations[self.resource_urls[0]]
}
def attempt_upload_file_and_verify_result(self, test_case, event_name, content=None):
"""
Running on a test case, creating a temp file, uploading it by
calling the corresponding ajax event, and verifying that upload
happens or is rejected as expected.
"""
if 'magic_number' in test_case:
f_handler = StringIO.StringIO(test_case['magic_number'].decode('hex'))
elif content is not None:
f_handler = StringIO.StringIO(json.dumps(content, sort_keys=True))
else:
f_handler = StringIO.StringIO('')
f_handler.content_type = test_case['mimetypes']
f_handler.name = 'file' + test_case['suffixes']
url = self.get_handler_url(event_name)
resp = self.client.post(url, {'file': f_handler})
self.assertEqual(resp.status_code, test_case['status'])
@data(
{
'suffixes': '.csv',
'magic_number': 'ffff',
'mimetypes': 'text/plain',
'status': 415
}, # Upload file with wrong extension name
{
'suffixes': '.gif',
'magic_number': '89504e470d0a1a0a',
'mimetypes': 'image/gif',
'status': 415
}, # Upload file with wrong magic number
{
'suffixes': '.jpg',
'magic_number': '89504e470d0a1a0a',
'mimetypes': 'image/jpeg',
'status': 415
}, # Upload file with wrong magic number
{
'suffixes': '.png',
'magic_number': '474946383761',
'mimetypes': 'image/png',
'status': 415
}, # Upload file with wrong magic number
{
'suffixes': '.jpg',
'magic_number': '474946383761',
'mimetypes': 'image/jpeg',
'status': 415
}, # Upload file with wrong magic number
{
'suffixes': '.png',
'magic_number': 'ffd8ffd9',
'mimetypes': 'image/png',
'status': 415
}, # Upload file with wrong magic number
{
'suffixes': '.gif',
'magic_number': 'ffd8ffd9',
'mimetypes': 'image/gif',
'status': 415
}
)
def test_upload_screenshot_wrong_file_type(self, test_case):
"""
Verify the file uploading fails correctly when file with wrong type
(extension/magic number) is provided
"""
self.enroll_staff(self.staff_user)
# Upload file with wrong extension name or magic number
self.attempt_upload_file_and_verify_result(test_case, 'upload_screenshot')
@data(
{
'suffixes': '.png',
'magic_number': '89504e470d0a1a0a',
'mimetypes': 'image/png',
'status': 200
},
{
'suffixes': '.gif',
'magic_number': '474946383961',
'mimetypes': 'image/gif',
'status': 200
},
{
'suffixes': '.gif',
'magic_number': '474946383761',
'mimetypes': 'image/gif',
'status': 200
},
{
'suffixes': '.jpg',
'magic_number': 'ffd8ffd9',
'mimetypes': 'image/jpeg',
'status': 200
}
)
def test_upload_screenshot_correct_file_type(self, test_case):
"""
Verify the file type checking in the file uploading method is
successful.
"""
self.enroll_staff(self.staff_user)
# Upload file with correct extension name and magic number
self.attempt_upload_file_and_verify_result(test_case, 'upload_screenshot')
@data(
{
'suffixes': '.json',
'mimetypes': 'application/json',
'status': 403
}
)
def test_import_resources_by_student(self, test_case):
"""
Test the function for importing all resources into the Recommender
by a student.
"""
self.enroll_student(self.STUDENTS[0]['email'], self.STUDENTS[0]['password'])
self.attempt_upload_file_and_verify_result(test_case, 'import_resources', self.initial_configuration)
@data(
{
'suffixes': '.csv',
'mimetypes': 'application/json',
'status': 415
}, # Upload file with wrong extension name
{
'suffixes': '.json',
'mimetypes': 'application/json',
'status': 200
}
)
def test_import_resources(self, test_case):
"""
Test the function for importing all resources into the Recommender.
"""
self.enroll_staff(self.staff_user)
self.attempt_upload_file_and_verify_result(test_case, 'import_resources', self.initial_configuration)
@data(
{
'suffixes': '.json',
'mimetypes': 'application/json',
'status': 415
}
)
def test_import_resources_wrong_format(self, test_case):
"""
Test the function for importing empty dictionary into the Recommender.
This should fire an error.
"""
self.enroll_staff(self.staff_user)
self.attempt_upload_file_and_verify_result(test_case, 'import_resources', {})
|
stephane-martin/salt-debian-packaging
|
refs/heads/master
|
salt-2016.3.3/tests/unit/modules/win_disk_test.py
|
2
|
# -*- coding: utf-8 -*-
'''
:codeauthor: :email:`Jayesh Kariya <jayeshk@saltstack.com>`
'''
# Import Python Libs
from __future__ import absolute_import
# Import Salt Testing Libs
from salttesting import TestCase, skipIf
from salttesting.mock import (
NO_MOCK,
NO_MOCK_REASON
)
from salttesting.helpers import ensure_in_syspath
ensure_in_syspath('../../')
# Import Salt Libs
from salt.modules import win_disk
class MockKernel32(object):
'''
Mock windll class
'''
def __init__(self):
pass
@staticmethod
def GetLogicalDrives():
'''
Mock GetLogicalDrives method
'''
return 1
class MockWindll(object):
'''
Mock windll class
'''
def __init__(self):
self.kernel32 = MockKernel32()
class MockCtypes(object):
'''
Mock ctypes class
'''
def __init__(self):
self.windll = MockWindll()
win_disk.ctypes = MockCtypes()
@skipIf(NO_MOCK, NO_MOCK_REASON)
class WinDiskTestCase(TestCase):
'''
Test cases for salt.modules.win_disk
'''
# 'usage' function tests: 1
def test_usage(self):
'''
Test if it return usage information for volumes mounted on this minion.
'''
self.assertDictEqual(win_disk.usage(),
{'A:\\': {'available': None,
'1K-blocks': None,
'used': None,
'capacity': None,
'filesystem': 'A:\\'}})
if __name__ == '__main__':
from integration import run_tests
run_tests(WinDiskTestCase, needs_daemon=False)
|
salguarnieri/intellij-community
|
refs/heads/master
|
python/testData/quickFixes/PyAugmentAssignmentQuickFixTest/references_after.py
|
83
|
v = 0
f = 1
v += f
|
trabucayre/gnuradio
|
refs/heads/master
|
gr-blocks/python/blocks/parse_file_metadata.py
|
1
|
#!/usr/bin/env python
#
# Copyright 2012 Free Software Foundation, Inc.
#
# This file is part of GNU Radio
#
# SPDX-License-Identifier: GPL-3.0-or-later
#
#
import sys
from gnuradio import gr, blocks
import pmt
'''
sr Sample rate (samples/second)
time Time as uint64(secs), double(fractional secs)
type Type of data (see gr_file_types enum)
cplx is complex? (True or False)
strt Start of data (or size of header) in bytes
size Size of data in bytes
'''
HEADER_LENGTH = blocks.METADATA_HEADER_SIZE
ftype_to_string = {blocks.GR_FILE_BYTE: "bytes",
blocks.GR_FILE_SHORT: "short",
blocks.GR_FILE_INT: "int",
blocks.GR_FILE_LONG: "long",
blocks.GR_FILE_LONG_LONG: "long long",
blocks.GR_FILE_FLOAT: "float",
blocks.GR_FILE_DOUBLE: "double" }
ftype_to_size = {blocks.GR_FILE_BYTE: gr.sizeof_char,
blocks.GR_FILE_SHORT: gr.sizeof_short,
blocks.GR_FILE_INT: gr.sizeof_int,
blocks.GR_FILE_LONG: gr.sizeof_int,
blocks.GR_FILE_LONG_LONG: 2*gr.sizeof_int,
blocks.GR_FILE_FLOAT: gr.sizeof_float,
blocks.GR_FILE_DOUBLE: gr.sizeof_double}
def parse_header(p, VERBOSE=False):
dump = pmt.PMT_NIL
info = dict()
if(pmt.is_dict(p) is False):
sys.stderr.write("Header is not a PMT dictionary: invalid or corrupt data file.\n")
sys.exit(1)
# GET FILE FORMAT VERSION NUMBER
if(pmt.dict_has_key(p, pmt.string_to_symbol("version"))):
r = pmt.dict_ref(p, pmt.string_to_symbol("version"), dump)
version = pmt.to_long(r)
if(VERBOSE):
print("Version Number: {0}".format(version))
else:
sys.stderr.write("Could not find key 'version': invalid or corrupt data file.\n")
sys.exit(1)
# EXTRACT SAMPLE RATE
if(pmt.dict_has_key(p, pmt.string_to_symbol("rx_rate"))):
r = pmt.dict_ref(p, pmt.string_to_symbol("rx_rate"), dump)
samp_rate = pmt.to_double(r)
info["rx_rate"] = samp_rate
if(VERBOSE):
print("Sample Rate: {0:.2f} sps".format(samp_rate))
else:
sys.stderr.write("Could not find key 'sr': invalid or corrupt data file.\n")
sys.exit(1)
# EXTRACT TIME STAMP
if(pmt.dict_has_key(p, pmt.string_to_symbol("rx_time"))):
r = pmt.dict_ref(p, pmt.string_to_symbol("rx_time"), dump)
secs = pmt.tuple_ref(r, 0)
fracs = pmt.tuple_ref(r, 1)
secs = float(pmt.to_uint64(secs))
fracs = pmt.to_double(fracs)
t = secs + fracs
info["rx_time"] = t
if(VERBOSE):
print("Seconds: {0:.6f}".format(t))
else:
sys.stderr.write("Could not find key 'time': invalid or corrupt data file.\n")
sys.exit(1)
# EXTRACT ITEM SIZE
if(pmt.dict_has_key(p, pmt.string_to_symbol("size"))):
r = pmt.dict_ref(p, pmt.string_to_symbol("size"), dump)
dsize = pmt.to_long(r)
info["size"] = dsize
if(VERBOSE):
print("Item size: {0}".format(dsize))
else:
sys.stderr.write("Could not find key 'size': invalid or corrupt data file.\n")
sys.exit(1)
# EXTRACT DATA TYPE
if(pmt.dict_has_key(p, pmt.string_to_symbol("type"))):
r = pmt.dict_ref(p, pmt.string_to_symbol("type"), dump)
dtype = pmt.to_long(r)
stype = ftype_to_string[dtype]
info["type"] = stype
if(VERBOSE):
print("Data Type: {0} ({1})".format(stype, dtype))
else:
sys.stderr.write("Could not find key 'type': invalid or corrupt data file.\n")
sys.exit(1)
# EXTRACT COMPLEX
if(pmt.dict_has_key(p, pmt.string_to_symbol("cplx"))):
r = pmt.dict_ref(p, pmt.string_to_symbol("cplx"), dump)
cplx = pmt.to_bool(r)
info["cplx"] = cplx
if(VERBOSE):
print("Complex? {0}".format(cplx))
else:
sys.stderr.write("Could not find key 'cplx': invalid or corrupt data file.\n")
sys.exit(1)
# EXTRACT WHERE CURRENT SEGMENT STARTS
if(pmt.dict_has_key(p, pmt.string_to_symbol("strt"))):
r = pmt.dict_ref(p, pmt.string_to_symbol("strt"), dump)
seg_start = pmt.to_uint64(r)
info["hdr_len"] = seg_start
info["extra_len"] = seg_start - HEADER_LENGTH
info["has_extra"] = info["extra_len"] > 0
if(VERBOSE):
print("Header Length: {0} bytes".format(info["hdr_len"]))
print("Extra Length: {0}".format((info["extra_len"])))
print("Extra Header? {0}".format(info["has_extra"]))
else:
sys.stderr.write("Could not find key 'strt': invalid or corrupt data file.\n")
sys.exit(1)
# EXTRACT SIZE OF DATA
if(pmt.dict_has_key(p, pmt.string_to_symbol("bytes"))):
r = pmt.dict_ref(p, pmt.string_to_symbol("bytes"), dump)
nbytes = pmt.to_uint64(r)
nitems = nbytes / dsize
info["nitems"] = nitems
info["nbytes"] = nbytes
if(VERBOSE):
print("Size of Data: {0} bytes".format(nbytes))
print(" {0} items".format(nitems))
else:
sys.stderr.write("Could not find key 'size': invalid or corrupt data file.\n")
sys.exit(1)
return info
# IF THERE IS EXTRA DATA, PULL OUT THE DICTIONARY AND PARSE IT
def parse_extra_dict(p, info, VERBOSE=False):
if(pmt.is_dict(p) is False):
sys.stderr.write("Extra header is not a PMT dictionary: invalid or corrupt data file.\n")
sys.exit(1)
items = pmt.dict_items(p)
nitems = pmt.length(items)
for i in range(nitems):
item = pmt.nth(i, items)
key = pmt.symbol_to_string(pmt.car(item))
val = pmt.cdr(item)
info[key] = val
if(VERBOSE):
print("{0}: {1}".format(key, val))
return info
|
hbrunn/OCB
|
refs/heads/8.0
|
openerp/addons/base/tests/test_acl.py
|
338
|
import unittest2
from lxml import etree
import openerp
from openerp.tools.misc import mute_logger
from openerp.tests import common
# test group that demo user should not have
GROUP_TECHNICAL_FEATURES = 'base.group_no_one'
class TestACL(common.TransactionCase):
def setUp(self):
super(TestACL, self).setUp()
self.res_currency = self.registry('res.currency')
self.res_partner = self.registry('res.partner')
self.res_users = self.registry('res.users')
_, self.demo_uid = self.registry('ir.model.data').get_object_reference(self.cr, self.uid, 'base', 'user_demo')
self.tech_group = self.registry('ir.model.data').get_object(self.cr, self.uid,
*(GROUP_TECHNICAL_FEATURES.split('.')))
def _set_field_groups(self, model, field_name, groups):
field = model._fields[field_name]
column = model._columns[field_name]
old_groups = field.groups
old_prefetch = column._prefetch
field.groups = groups
column.groups = groups
column._prefetch = False
@self.addCleanup
def cleanup():
field.groups = old_groups
column.groups = old_groups
column._prefetch = old_prefetch
def test_field_visibility_restriction(self):
"""Check that model-level ``groups`` parameter effectively restricts access to that
field for users who do not belong to one of the explicitly allowed groups"""
# Verify the test environment first
original_fields = self.res_currency.fields_get(self.cr, self.demo_uid, [])
form_view = self.res_currency.fields_view_get(self.cr, self.demo_uid, False, 'form')
view_arch = etree.fromstring(form_view.get('arch'))
has_tech_feat = self.res_users.has_group(self.cr, self.demo_uid, GROUP_TECHNICAL_FEATURES)
self.assertFalse(has_tech_feat, "`demo` user should not belong to the restricted group before the test")
self.assertTrue('accuracy' in original_fields, "'accuracy' field must be properly visible before the test")
self.assertNotEquals(view_arch.xpath("//field[@name='accuracy']"), [],
"Field 'accuracy' must be found in view definition before the test")
# restrict access to the field and check it's gone
self._set_field_groups(self.res_currency, 'accuracy', GROUP_TECHNICAL_FEATURES)
fields = self.res_currency.fields_get(self.cr, self.demo_uid, [])
form_view = self.res_currency.fields_view_get(self.cr, self.demo_uid, False, 'form')
view_arch = etree.fromstring(form_view.get('arch'))
self.assertFalse('accuracy' in fields, "'accuracy' field should be gone")
self.assertEquals(view_arch.xpath("//field[@name='accuracy']"), [],
"Field 'accuracy' must not be found in view definition")
# Make demo user a member of the restricted group and check that the field is back
self.tech_group.write({'users': [(4, self.demo_uid)]})
has_tech_feat = self.res_users.has_group(self.cr, self.demo_uid, GROUP_TECHNICAL_FEATURES)
fields = self.res_currency.fields_get(self.cr, self.demo_uid, [])
form_view = self.res_currency.fields_view_get(self.cr, self.demo_uid, False, 'form')
view_arch = etree.fromstring(form_view.get('arch'))
#import pprint; pprint.pprint(fields); pprint.pprint(form_view)
self.assertTrue(has_tech_feat, "`demo` user should now belong to the restricted group")
self.assertTrue('accuracy' in fields, "'accuracy' field must be properly visible again")
self.assertNotEquals(view_arch.xpath("//field[@name='accuracy']"), [],
"Field 'accuracy' must be found in view definition again")
#cleanup
self.tech_group.write({'users': [(3, self.demo_uid)]})
@mute_logger('openerp.models')
def test_field_crud_restriction(self):
"Read/Write RPC access to restricted field should be forbidden"
# Verify the test environment first
has_tech_feat = self.res_users.has_group(self.cr, self.demo_uid, GROUP_TECHNICAL_FEATURES)
self.assertFalse(has_tech_feat, "`demo` user should not belong to the restricted group")
self.assert_(self.res_partner.read(self.cr, self.demo_uid, [1], ['bank_ids']))
self.assert_(self.res_partner.write(self.cr, self.demo_uid, [1], {'bank_ids': []}))
# Now restrict access to the field and check it's forbidden
self._set_field_groups(self.res_partner, 'bank_ids', GROUP_TECHNICAL_FEATURES)
with self.assertRaises(openerp.osv.orm.except_orm):
self.res_partner.read(self.cr, self.demo_uid, [1], ['bank_ids'])
with self.assertRaises(openerp.osv.orm.except_orm):
self.res_partner.write(self.cr, self.demo_uid, [1], {'bank_ids': []})
# Add the restricted group, and check that it works again
self.tech_group.write({'users': [(4, self.demo_uid)]})
has_tech_feat = self.res_users.has_group(self.cr, self.demo_uid, GROUP_TECHNICAL_FEATURES)
self.assertTrue(has_tech_feat, "`demo` user should now belong to the restricted group")
self.assert_(self.res_partner.read(self.cr, self.demo_uid, [1], ['bank_ids']))
self.assert_(self.res_partner.write(self.cr, self.demo_uid, [1], {'bank_ids': []}))
#cleanup
self.tech_group.write({'users': [(3, self.demo_uid)]})
@mute_logger('openerp.models')
def test_fields_browse_restriction(self):
"""Test access to records having restricted fields"""
self._set_field_groups(self.res_partner, 'email', GROUP_TECHNICAL_FEATURES)
pid = self.res_partner.search(self.cr, self.demo_uid, [], limit=1)[0]
part = self.res_partner.browse(self.cr, self.demo_uid, pid)
# accessing fields must no raise exceptions...
part.name
# ... except if they are restricted
with self.assertRaises(openerp.osv.orm.except_orm) as cm:
with mute_logger('openerp.models'):
part.email
self.assertEqual(cm.exception.args[0], 'AccessError')
if __name__ == '__main__':
unittest2.main()
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
orosp/ddiskit
|
refs/heads/master
|
setup.py
|
1
|
#!/usr/bin/python
from setuptools import setup
setup(
name="ddiskit",
version="3.6",
author="Petr Oros",
author_email="poros@redhat.com",
description=("Red Hat tool for Driver Update Disk creation"),
license="GPLv3",
url="https://github.com/orosp/ddiskit.git",
data_files=[('/usr/share/bash-completion/completions', ['ddiskit']),
('/usr/share/ddiskit/templates',
['templates/spec', 'templates/config']),
('/usr/share/ddiskit/profiles', ['profiles/default']),
('/usr/share/ddiskit/profiles',
['profiles/rh-testing', 'profiles/rh-release']),
('/usr/share/ddiskit/keyrings/rh-release',
['keyrings/rh-release/fd431d51.key']),
('/usr/share/ddiskit', ['ddiskit.config']),
('/usr/share/man/man1', ['ddiskit.1']),
('/etc', ['etc/ddiskit.config']),
],
scripts=['bin/ddiskit'],
)
|
p4datasystems/CarnotKE
|
refs/heads/master
|
jyhton/lib-python/2.7/genericpath.py
|
246
|
"""
Path operations common to more than one OS
Do not use directly. The OS specific modules import the appropriate
functions from this module themselves.
"""
import os
import stat
__all__ = ['commonprefix', 'exists', 'getatime', 'getctime', 'getmtime',
'getsize', 'isdir', 'isfile']
# Does a path exist?
# This is false for dangling symbolic links on systems that support them.
def exists(path):
"""Test whether a path exists. Returns False for broken symbolic links"""
try:
os.stat(path)
except os.error:
return False
return True
# This follows symbolic links, so both islink() and isdir() can be true
# for the same path ono systems that support symlinks
def isfile(path):
"""Test whether a path is a regular file"""
try:
st = os.stat(path)
except os.error:
return False
return stat.S_ISREG(st.st_mode)
# Is a path a directory?
# This follows symbolic links, so both islink() and isdir()
# can be true for the same path on systems that support symlinks
def isdir(s):
"""Return true if the pathname refers to an existing directory."""
try:
st = os.stat(s)
except os.error:
return False
return stat.S_ISDIR(st.st_mode)
def getsize(filename):
"""Return the size of a file, reported by os.stat()."""
return os.stat(filename).st_size
def getmtime(filename):
"""Return the last modification time of a file, reported by os.stat()."""
return os.stat(filename).st_mtime
def getatime(filename):
"""Return the last access time of a file, reported by os.stat()."""
return os.stat(filename).st_atime
def getctime(filename):
"""Return the metadata change time of a file, reported by os.stat()."""
return os.stat(filename).st_ctime
# Return the longest prefix of all list elements.
def commonprefix(m):
"Given a list of pathnames, returns the longest common leading component"
if not m: return ''
s1 = min(m)
s2 = max(m)
for i, c in enumerate(s1):
if c != s2[i]:
return s1[:i]
return s1
# Split a path in root and extension.
# The extension is everything starting at the last dot in the last
# pathname component; the root is everything before that.
# It is always true that root + ext == p.
# Generic implementation of splitext, to be parametrized with
# the separators
def _splitext(p, sep, altsep, extsep):
"""Split the extension from a pathname.
Extension is everything from the last dot to the end, ignoring
leading dots. Returns "(root, ext)"; ext may be empty."""
sepIndex = p.rfind(sep)
if altsep:
altsepIndex = p.rfind(altsep)
sepIndex = max(sepIndex, altsepIndex)
dotIndex = p.rfind(extsep)
if dotIndex > sepIndex:
# skip all leading dots
filenameIndex = sepIndex + 1
while filenameIndex < dotIndex:
if p[filenameIndex] != extsep:
return p[:dotIndex], p[dotIndex:]
filenameIndex += 1
return p, ''
|
hazrpg/calibre
|
refs/heads/master
|
src/calibre/ebooks/oeb/transforms/htmltoc.py
|
19
|
'''
HTML-TOC-adding transform.
'''
from __future__ import with_statement
__license__ = 'GPL v3'
__copyright__ = '2008, Marshall T. Vandegrift <llasram@gmail.com>'
from calibre.ebooks.oeb.base import XML, XHTML, XHTML_NS
from calibre.ebooks.oeb.base import XHTML_MIME, CSS_MIME
from calibre.ebooks.oeb.base import element, XPath
__all__ = ['HTMLTOCAdder']
DEFAULT_TITLE = __('Table of Contents')
STYLE_CSS = {
'nested': """
.calibre_toc_header {
text-align: center;
}
.calibre_toc_block {
margin-left: 1.2em;
text-indent: -1.2em;
}
.calibre_toc_block .calibre_toc_block {
margin-left: 2.4em;
}
.calibre_toc_block .calibre_toc_block .calibre_toc_block {
margin-left: 3.6em;
}
""",
'centered': """
.calibre_toc_header {
text-align: center;
}
.calibre_toc_block {
text-align: center;
}
body > .calibre_toc_block {
margin-top: 1.2em;
}
"""
}
class HTMLTOCAdder(object):
def __init__(self, title=None, style='nested', position='end'):
self.title = title
self.style = style
self.position = position
@classmethod
def config(cls, cfg):
group = cfg.add_group('htmltoc', _('HTML TOC generation options.'))
group('toc_title', ['--toc-title'], default=None,
help=_('Title for any generated in-line table of contents.'))
return cfg
@classmethod
def generate(cls, opts):
return cls(title=opts.toc_title)
def __call__(self, oeb, context):
has_toc = getattr(getattr(oeb, 'toc', False), 'nodes', False)
if 'toc' in oeb.guide:
# Ensure toc pointed to in <guide> is in spine
from calibre.ebooks.oeb.base import urlnormalize
href = urlnormalize(oeb.guide['toc'].href)
if href in oeb.manifest.hrefs:
item = oeb.manifest.hrefs[href]
if (hasattr(item.data, 'xpath') and
XPath('//h:a[@href]')(item.data)):
if oeb.spine.index(item) < 0:
if self.position == 'end':
oeb.spine.add(item, linear=False)
else:
oeb.spine.insert(0, item, linear=True)
return
elif has_toc:
oeb.guide.remove('toc')
else:
oeb.guide.remove('toc')
if not has_toc:
return
oeb.logger.info('Generating in-line TOC...')
title = self.title or oeb.translate(DEFAULT_TITLE)
style = self.style
if style not in STYLE_CSS:
oeb.logger.error('Unknown TOC style %r' % style)
style = 'nested'
id, css_href = oeb.manifest.generate('tocstyle', 'tocstyle.css')
oeb.manifest.add(id, css_href, CSS_MIME, data=STYLE_CSS[style])
language = str(oeb.metadata.language[0])
contents = element(None, XHTML('html'), nsmap={None: XHTML_NS},
attrib={XML('lang'): language})
head = element(contents, XHTML('head'))
htitle = element(head, XHTML('title'))
htitle.text = title
element(head, XHTML('link'), rel='stylesheet', type=CSS_MIME,
href=css_href)
body = element(contents, XHTML('body'),
attrib={'class': 'calibre_toc'})
h1 = element(body, XHTML('h2'),
attrib={'class': 'calibre_toc_header'})
h1.text = title
self.add_toc_level(body, oeb.toc)
id, href = oeb.manifest.generate('contents', 'contents.xhtml')
item = oeb.manifest.add(id, href, XHTML_MIME, data=contents)
if self.position == 'end':
oeb.spine.add(item, linear=False)
else:
oeb.spine.insert(0, item, linear=True)
oeb.guide.add('toc', 'Table of Contents', href)
def add_toc_level(self, elem, toc):
for node in toc:
block = element(elem, XHTML('div'),
attrib={'class': 'calibre_toc_block'})
line = element(block, XHTML('a'),
attrib={'href': node.href,
'class': 'calibre_toc_line'})
line.text = node.title
self.add_toc_level(block, node)
|
google/material-design-icons
|
refs/heads/master
|
update/venv/lib/python3.9/site-packages/pip/_internal/commands/check.py
|
6
|
import logging
from optparse import Values
from typing import Any, List
from pip._internal.cli.base_command import Command
from pip._internal.cli.status_codes import ERROR, SUCCESS
from pip._internal.operations.check import (
check_package_set,
create_package_set_from_installed,
)
from pip._internal.utils.misc import write_output
logger = logging.getLogger(__name__)
class CheckCommand(Command):
"""Verify installed packages have compatible dependencies."""
usage = """
%prog [options]"""
def run(self, options, args):
# type: (Values, List[Any]) -> int
package_set, parsing_probs = create_package_set_from_installed()
missing, conflicting = check_package_set(package_set)
for project_name in missing:
version = package_set[project_name].version
for dependency in missing[project_name]:
write_output(
"%s %s requires %s, which is not installed.",
project_name, version, dependency[0],
)
for project_name in conflicting:
version = package_set[project_name].version
for dep_name, dep_version, req in conflicting[project_name]:
write_output(
"%s %s has requirement %s, but you have %s %s.",
project_name, version, req, dep_name, dep_version,
)
if missing or conflicting or parsing_probs:
return ERROR
else:
write_output("No broken requirements found.")
return SUCCESS
|
Dark-Hacker/horizon
|
refs/heads/master
|
openstack_dashboard/enabled/_1840_data_processing_jobs_panel.py
|
17
|
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# The slug of the panel to be added to HORIZON_CONFIG. Required.
PANEL = 'data_processing.jobs'
# The slug of the dashboard the PANEL associated with. Required.
PANEL_DASHBOARD = 'project'
# The slug of the panel group the PANEL is associated with.
PANEL_GROUP = 'data_processing'
# Python panel class of the PANEL to be added.
ADD_PANEL = \
('openstack_dashboard.contrib.sahara.'
'content.data_processing.jobs.panel.JobsPanel')
|
txomon/SpockBot
|
refs/heads/master
|
spock/plugins/loader.py
|
1
|
"""
Provides reasonably not-awful plugin loading
"""
import logging
from spock.plugins.core.settings import SettingsPlugin
logger = logging.getLogger('spock')
base_warn = "PluginLoader could not satisfy %s dependency for %s"
pl_warn = base_warn + ": %s"
class PluginLoader(object):
def __init__(self, **kwargs):
self.announce = {}
self.extensions = {}
self.events = []
kwargs.get('settings_mixin', SettingsPlugin)(self, kwargs)
self.fetch = self.requires('PloaderFetch')
self.plugins = self.fetch.get_plugins()
for plugin in self.plugins:
if hasattr(plugin, 'pl_announce'):
for ident in plugin.pl_announce:
self.announce[ident] = plugin
if hasattr(plugin, 'pl_event'):
for ident in plugin.pl_event:
self.events.append(ident)
event = self.requires('Event')
self.reg_event_handler = event.reg_event_handler if event else None
while self.plugins:
plugin = self.plugins.pop()
try:
plugin(self, self.fetch.get_plugin_settings(plugin))
except Exception:
logger.exception('LOADER: Plugin %s failed to load',
plugin.__name__)
def requires(self, ident, soft=False, warning=None):
if ident not in self.extensions:
if ident in self.announce:
plugin = self.announce[ident]
self.plugins.remove(plugin)
plugin(self, self.fetch.get_plugin_settings(plugin))
elif ident in self.events:
return True
else:
softness = "soft" if soft else "hard"
if warning:
logger.warn(pl_warn, softness, ident, warning)
else:
logger.warn(base_warn, softness, ident)
return None
return self.extensions[ident]
def provides(self, ident, obj):
self.extensions[ident] = obj
|
lavish205/olympia
|
refs/heads/master
|
src/olympia/bandwagon/views.py
|
1
|
import functools
import hashlib
import os
from django import http
from django.conf import settings
from django.core.cache import cache
from django.core.exceptions import PermissionDenied
from django.db.models import Q
from django.db.transaction import non_atomic_requests
from django.shortcuts import get_object_or_404
from django.utils.translation import ugettext, ugettext_lazy as _lazy
from django.views.decorators.csrf import csrf_protect
from django.views.decorators.http import require_POST
from django_statsd.clients import statsd
from rest_framework import serializers
from rest_framework.viewsets import ModelViewSet
import olympia.core.logger
from olympia import amo
from olympia.access import acl
from olympia.accounts.utils import redirect_for_login
from olympia.accounts.views import AccountViewSet
from olympia.addons.models import Addon
from olympia.addons.views import BaseFilter
from olympia.amo import messages
from olympia.amo.decorators import (
allow_mine, json_view, login_required, post_required, write)
from olympia.amo.urlresolvers import reverse
from olympia.amo.utils import paginate, render, urlparams
from olympia.api.filters import OrderingAliasFilter
from olympia.api.permissions import (
AllOf, AllowReadOnlyIfPublic, AnyOf, PreventActionPermission)
from olympia.legacy_api.utils import addon_to_dict
from olympia.tags.models import Tag
from olympia.translations.query import order_by_translation
from olympia.users.models import UserProfile
from . import forms, tasks
from .models import (
SPECIAL_SLUGS, Collection, CollectionAddon, CollectionVote,
CollectionWatcher)
from .permissions import (
AllowCollectionAuthor, AllowCollectionContributor, AllowContentCurators)
from .serializers import (
CollectionAddonSerializer, CollectionSerializer,
CollectionWithAddonsSerializer)
log = olympia.core.logger.getLogger('z.collections')
@non_atomic_requests
def get_collection(request, username, slug):
if (slug in SPECIAL_SLUGS.values() and request.user.is_authenticated() and
request.user.username == username):
return getattr(request.user, slug + '_collection')()
else:
return get_object_or_404(Collection.objects,
author__username=username, slug=slug)
def owner_required(f=None, require_owner=True):
"""Requires collection to be owned, by someone."""
def decorator(func):
@functools.wraps(func)
def wrapper(request, username, slug, *args, **kw):
collection = get_collection(request, username, slug)
if acl.check_collection_ownership(request, collection,
require_owner=require_owner):
return func(request, collection, username, slug, *args, **kw)
else:
raise PermissionDenied
return wrapper
return decorator(f) if f else decorator
@non_atomic_requests
def legacy_redirect(request, uuid, edit=False):
# Nicknames have a limit of 30, so len == 36 implies a uuid.
key = 'uuid' if len(uuid) == 36 else 'nickname'
collection = get_object_or_404(Collection.objects, **{key: uuid})
if edit:
return http.HttpResponseRedirect(collection.edit_url())
to = collection.get_url_path() + '?' + request.GET.urlencode()
return http.HttpResponseRedirect(to)
@non_atomic_requests
def legacy_directory_redirects(request, page):
sorts = {'editors_picks': 'featured', 'popular': 'popular',
'users': 'followers'}
loc = base = reverse('collections.list')
if page in sorts:
loc = urlparams(base, sort=sorts[page])
elif request.user.is_authenticated():
if page == 'mine':
loc = reverse('collections.user', args=[request.user.username])
elif page == 'favorites':
loc = reverse('collections.following')
return http.HttpResponseRedirect(loc)
@non_atomic_requests
def render_cat(request, template, data=None, extra=None):
if extra is None:
extra = {}
if data is None:
data = {}
data.update(dict(search_cat='collections'))
return render(request, template, data, **extra)
@non_atomic_requests
def collection_listing(request, base=None):
qs = (
Collection.objects.listed()
.filter(Q(application=request.APP.id) | Q(application=None))
.filter(type=amo.COLLECTION_FEATURED)
.exclude(addon_count=0)
)
# Counts are hard to cache automatically, and accuracy for this
# one is less important. Remember it for 5 minutes.
countkey = hashlib.sha256(str(qs.query) + '_count').hexdigest()
count = cache.get(countkey)
if count is None:
count = qs.count()
cache.set(countkey, count, 300)
collections = paginate(request, qs, count=count)
return render_cat(request, 'bandwagon/impala/collection_listing.html',
{'collections': collections, 'src': 'co-hc-sidebar',
'dl_src': 'co-dp-sidebar'})
def get_votes(request, collections):
if not request.user.is_authenticated():
return {}
qs = CollectionVote.objects.filter(
user=request.user, collection__in=[c.id for c in collections])
return {v.collection_id: v for v in qs}
@allow_mine
@non_atomic_requests
def user_listing(request, username):
author = get_object_or_404(UserProfile, username=username)
qs = (Collection.objects.filter(author__username=username)
.order_by('-created'))
mine = (request.user.is_authenticated() and
request.user.username == username)
if mine:
page = 'mine'
else:
page = 'user'
qs = qs.filter(listed=True)
collections = paginate(request, qs)
votes = get_votes(request, collections.object_list)
return render_cat(request, 'bandwagon/user_listing.html',
{'collections': collections, 'collection_votes': votes,
'page': page, 'author': author})
class CollectionAddonFilter(BaseFilter):
opts = (('added', _lazy(u'Added')),
('popular', _lazy(u'Popularity')),
('name', _lazy(u'Name')))
def filter_added(self):
return self.base_queryset.order_by('collectionaddon__created')
def filter_name(self):
return order_by_translation(self.base_queryset, 'name')
def filter_popular(self):
return self.base_queryset.order_by('-weekly_downloads')
@allow_mine
@non_atomic_requests
def collection_detail(request, username, slug):
collection = get_collection(request, username, slug)
if not collection.listed:
if not request.user.is_authenticated():
return redirect_for_login(request)
if not acl.check_collection_ownership(request, collection):
raise PermissionDenied
base = Addon.objects.valid() & collection.addons.all()
filter = CollectionAddonFilter(request, base,
key='sort', default='popular')
notes = get_notes(collection)
# Go directly to CollectionAddon for the count to avoid joins.
count = CollectionAddon.objects.filter(
Addon.objects.all().valid_q(
amo.VALID_ADDON_STATUSES, prefix='addon__'),
collection=collection.id)
addons = paginate(request, filter.qs, per_page=15, count=count.count())
# `perms` is defined in django.contrib.auth.context_processors. Gotcha!
user_perms = {
'view_stats': acl.check_ownership(
request, collection, require_owner=False),
}
tags = Tag.objects.filter(
id__in=collection.top_tags) if collection.top_tags else []
return render_cat(request, 'bandwagon/collection_detail.html',
{'collection': collection, 'filter': filter,
'addons': addons, 'notes': notes,
'tags': tags, 'user_perms': user_perms})
@json_view(has_trans=True)
@allow_mine
@non_atomic_requests
def collection_detail_json(request, username, slug):
collection = get_collection(request, username, slug)
if not (collection.listed or acl.check_collection_ownership(
request, collection)):
raise PermissionDenied
# We evaluate the QuerySet with `list` to work around bug 866454.
addons_dict = [addon_to_dict(a) for a in list(collection.addons.valid())]
return {
'name': collection.name,
'url': collection.get_abs_url(),
'iconUrl': collection.icon_url,
'addons': addons_dict
}
def get_notes(collection, raw=False):
# This might hurt in a big collection with lots of notes.
# It's a generator so we don't evaluate anything by default.
notes = CollectionAddon.objects.filter(collection=collection,
comments__isnull=False)
rv = {}
for note in notes:
# Watch out for comments in a language we didn't pick up.
if note.comments:
rv[note.addon_id] = (note.comments.localized_string if raw
else note.comments)
yield rv
@write
@login_required
def collection_vote(request, username, slug, direction):
collection = get_collection(request, username, slug)
if request.method != 'POST':
return http.HttpResponseRedirect(collection.get_url_path())
vote = {'up': 1, 'down': -1}[direction]
qs = (CollectionVote.objects.using('default')
.filter(collection=collection, user=request.user))
if qs:
cv = qs[0]
if vote == cv.vote: # Double vote => cancel.
cv.delete()
else:
cv.vote = vote
cv.save(force_update=True)
else:
CollectionVote.objects.create(collection=collection, user=request.user,
vote=vote)
if request.is_ajax():
return http.HttpResponse()
else:
return http.HttpResponseRedirect(collection.get_url_path())
def initial_data_from_request(request):
return {'author': request.user, 'application': request.APP.id}
def collection_message(request, collection, option):
if option == 'add':
title = ugettext('Collection created!')
msg = ugettext(
'Your new collection is shown below. You can '
'<a href="%(url)s">edit additional settings</a> if you\'d '
'like.'
) % {'url': collection.edit_url()}
elif option == 'update':
title = ugettext('Collection updated!')
msg = ugettext(
'<a href="%(url)s">View your collection</a> to see the changes.'
) % {'url': collection.get_url_path()}
else:
raise ValueError('Incorrect option "%s", '
'takes only "add" or "update".' % option)
messages.success(request, title, msg, message_safe=True)
@write
@login_required
def add(request):
"""Displays/processes a form to create a collection."""
ctx = {}
if request.method == 'POST':
form = forms.CollectionForm(
request.POST, request.FILES,
initial=initial_data_from_request(request))
aform = forms.AddonsForm(request.POST)
if form.is_valid():
collection = form.save(default_locale=request.LANG)
collection.save()
if aform.is_valid():
aform.save(collection)
collection_message(request, collection, 'add')
statsd.incr('collections.created')
log.info('Created collection %s' % collection.id)
return http.HttpResponseRedirect(collection.get_url_path())
else:
ctx['addons'] = Addon.objects.filter(pk__in=aform.clean_addon())
ctx['comments'] = aform.clean_addon_comment()
else:
form = forms.CollectionForm()
ctx['form'] = form
return render_cat(request, 'bandwagon/add.html', ctx)
@write
@login_required(redirect=False)
def ajax_new(request):
form = forms.CollectionForm(
request.POST or None,
initial=initial_data_from_request(request))
if request.method == 'POST' and form.is_valid():
collection = form.save()
addon_id = request.POST['addon_id']
collection.add_addon(Addon.objects.get(pk=addon_id))
log.info('Created collection %s' % collection.id)
return http.HttpResponseRedirect(reverse('collections.ajax_list') +
'?addon_id=%s' % addon_id)
return render(request, 'bandwagon/ajax_new.html', {'form': form})
@login_required(redirect=False)
@non_atomic_requests
def ajax_list(request):
try:
addon_id = int(request.GET['addon_id'])
except (KeyError, ValueError):
return http.HttpResponseBadRequest()
qs = Collection.objects.owned_by(request.user).with_has_addon(addon_id)
return render(request, 'bandwagon/ajax_list.html',
{'collections': order_by_translation(qs, 'name')})
@write
@login_required
@post_required
def collection_alter(request, username, slug, action):
collection = get_collection(request, username, slug)
return change_addon(request, collection, action)
def change_addon(request, collection, action):
if not acl.check_collection_ownership(request, collection):
raise PermissionDenied
try:
addon = get_object_or_404(Addon.objects, pk=request.POST['addon_id'])
except (ValueError, KeyError):
return http.HttpResponseBadRequest()
getattr(collection, action + '_addon')(addon)
log.info(u'%s: %s %s to collection %s' %
(request.user, action, addon.id, collection.id))
if request.is_ajax():
url = '%s?addon_id=%s' % (reverse('collections.ajax_list'), addon.id)
else:
url = collection.get_url_path()
return http.HttpResponseRedirect(url)
@write
@login_required
@post_required
def ajax_collection_alter(request, action):
try:
collection = get_object_or_404(
Collection.objects, pk=request.POST['id'])
except (ValueError, KeyError):
return http.HttpResponseBadRequest()
return change_addon(request, collection, action)
@write
@login_required
# Contributors are allowed to *see* the page, but there is another
# permission check below to prevent them from doing any modifications.
@owner_required(require_owner=False)
def edit(request, collection, username, slug):
is_admin = acl.action_allowed(request, amo.permissions.ADMIN_CURATION)
if not acl.check_collection_ownership(
request, collection, require_owner=True):
if request.method == 'POST':
raise PermissionDenied
form = None
elif request.method == 'POST':
initial = initial_data_from_request(request)
if collection.author_id: # Don't try to change the author.
initial['author'] = collection.author
form = forms.CollectionForm(request.POST, request.FILES,
initial=initial,
instance=collection)
if form.is_valid():
collection = form.save()
collection_message(request, collection, 'update')
log.info(u'%s edited collection %s' %
(request.user, collection.id))
return http.HttpResponseRedirect(collection.edit_url())
else:
form = forms.CollectionForm(instance=collection)
qs = (CollectionAddon.objects.using('default')
.filter(collection=collection))
meta = {c.addon_id: c for c in qs}
addons = collection.addons.all()
comments = next(get_notes(collection, raw=True))
data = {
'collection': collection,
'form': form,
'username': username,
'slug': slug,
'meta': meta,
'is_admin': is_admin,
'addons': addons,
'comments': comments
}
return render_cat(request, 'bandwagon/edit.html', data)
@write
@login_required
@owner_required(require_owner=False)
@post_required
def edit_addons(request, collection, username, slug):
if request.method == 'POST':
form = forms.AddonsForm(request.POST)
if form.is_valid():
form.save(collection)
collection_message(request, collection, 'update')
log.info(u'%s added add-ons to %s' %
(request.user, collection.id))
return http.HttpResponseRedirect(collection.edit_url() + '#addons-edit')
@write
@login_required
@owner_required
@post_required
def edit_privacy(request, collection, username, slug):
collection.listed = not collection.listed
collection.save()
log.info(u'%s changed privacy on collection %s' %
(request.user, collection.id))
return http.HttpResponseRedirect(collection.get_url_path())
@write
@login_required
def delete(request, username, slug):
collection = get_object_or_404(Collection, author__username=username,
slug=slug)
if not acl.check_collection_ownership(request, collection, True):
log.info(u'%s is trying to delete collection %s'
% (request.user, collection.id))
raise PermissionDenied
data = dict(collection=collection, username=username, slug=slug)
if request.method == 'POST':
if request.POST['sure'] == '1':
collection.delete()
log.info(u'%s deleted collection %s' %
(request.user, collection.id))
url = reverse('collections.user', args=[username])
return http.HttpResponseRedirect(url)
else:
return http.HttpResponseRedirect(collection.get_url_path())
return render_cat(request, 'bandwagon/delete.html', data)
@require_POST
@write
@login_required
@owner_required
@json_view
@csrf_protect
def delete_icon(request, collection, username, slug):
log.debug(u"User deleted collection (%s) icon " % slug)
tasks.delete_icon(os.path.join(collection.get_img_dir(),
'%d.png' % collection.id))
collection.icontype = ''
collection.save()
if request.is_ajax():
return {'icon': collection.icon_url}
else:
messages.success(request, ugettext('Icon Deleted'))
return http.HttpResponseRedirect(collection.edit_url())
@login_required
@post_required
@json_view
def watch(request, username, slug):
"""
POST /collections/:user/:slug/watch to toggle the user's watching status.
For ajax, return {watching: true|false}. (reflects the new value)
Otherwise, redirect to the collection page.
"""
collection = get_collection(request, username, slug)
params = {'user': request.user, 'collection': collection}
qs = CollectionWatcher.objects.using('default').filter(**params)
watching = not qs # Flip the bool since we're about to change it.
if qs:
qs.delete()
else:
CollectionWatcher.objects.create(**params)
if request.is_ajax():
return {'watching': watching}
else:
return http.HttpResponseRedirect(collection.get_url_path())
@login_required
@non_atomic_requests
def following(request):
qs = (Collection.objects.filter(following__user=request.user)
.order_by('-following__created'))
collections = paginate(request, qs)
votes = get_votes(request, collections.object_list)
return render_cat(request, 'bandwagon/user_listing.html',
{'collections': collections, 'votes': votes,
'page': 'following'})
@login_required
@allow_mine
@non_atomic_requests
def mine(request, username=None, slug=None):
if slug is None:
return user_listing(request, username)
else:
return collection_detail(request, username, slug)
class CollectionViewSet(ModelViewSet):
permission_classes = [
AnyOf(
# Collection authors can do everything.
AllowCollectionAuthor,
# Collection contributors can access the featured themes collection
# (it's community-managed) and change it's addons, but can't delete
# or edit it's details.
AllOf(AllowCollectionContributor,
PreventActionPermission(('create', 'list', 'update',
'destroy', 'partial_update'))),
# Content curators can modify existing mozilla collections as they
# see fit, but can't list or delete them.
AllOf(AllowContentCurators,
PreventActionPermission(('create', 'destroy', 'list'))),
# Everyone else can do read-only stuff, except list.
AllOf(AllowReadOnlyIfPublic,
PreventActionPermission('list'))),
]
lookup_field = 'slug'
def get_account_viewset(self):
if not hasattr(self, 'account_viewset'):
self.account_viewset = AccountViewSet(
request=self.request,
permission_classes=[], # We handled permissions already.
kwargs={'pk': self.kwargs['user_pk']})
return self.account_viewset
def get_serializer_class(self):
with_addons = ('with_addons' in self.request.GET and
self.action == 'retrieve')
return (CollectionSerializer if not with_addons
else CollectionWithAddonsSerializer)
def get_queryset(self):
return Collection.objects.filter(
author=self.get_account_viewset().get_object()).order_by(
'-modified')
def get_addons_queryset(self):
collection_addons_viewset = CollectionAddonViewSet(
request=self.request
)
# Set this to avoid a pointless lookup loop.
collection_addons_viewset.collection_viewset = self
# This needs to be list to make the filtering work.
collection_addons_viewset.action = 'list'
qs = collection_addons_viewset.get_queryset()
# Now limit and sort
limit = settings.REST_FRAMEWORK['PAGE_SIZE']
sort = collection_addons_viewset.ordering[0]
return qs.order_by(sort)[:limit]
class TranslationAwareOrderingAliasFilter(OrderingAliasFilter):
def filter_queryset(self, request, queryset, view):
ordering = self.get_ordering(request, queryset, view)
if len(ordering) > 1:
# We can't support multiple orderings easily because of
# how order_by_translation works.
raise serializers.ValidationError(
'You can only specify one "sort" argument. Multiple '
'orderings are not supported')
order_by = ordering[0]
if order_by in ('name', '-name'):
return order_by_translation(queryset, order_by, Addon)
sup = super(TranslationAwareOrderingAliasFilter, self)
return sup.filter_queryset(request, queryset, view)
class CollectionAddonViewSet(ModelViewSet):
permission_classes = [] # We don't need extra permissions.
serializer_class = CollectionAddonSerializer
lookup_field = 'addon'
filter_backends = (TranslationAwareOrderingAliasFilter,)
ordering_fields = ()
ordering_field_aliases = {'popularity': 'addon__weekly_downloads',
'name': 'name',
'added': 'created'}
ordering = ('-addon__weekly_downloads',)
def get_collection_viewset(self):
if not hasattr(self, 'collection_viewset'):
# CollectionViewSet's permission_classes are good for us.
self.collection_viewset = CollectionViewSet(
request=self.request,
kwargs={'user_pk': self.kwargs['user_pk'],
'slug': self.kwargs['collection_slug']})
return self.collection_viewset
def get_object(self):
self.lookup_url_kwarg = self.lookup_url_kwarg or self.lookup_field
lookup_value = self.kwargs.get(self.lookup_url_kwarg)
# if the lookup is not a number, its probably the slug instead.
if lookup_value and not unicode(lookup_value).isdigit():
self.lookup_field = '%s__slug' % self.lookup_field
return super(CollectionAddonViewSet, self).get_object()
def get_queryset(self):
qs = CollectionAddon.objects.filter(
collection=self.get_collection_viewset().get_object())
filter_param = self.request.GET.get('filter')
# We only filter list action.
include_all_with_deleted = (filter_param == 'all_with_deleted' or
self.action != 'list')
# If deleted addons are requested, that implies all addons.
include_all = filter_param == 'all' or include_all_with_deleted
if not include_all:
qs = qs.filter(
addon__status=amo.STATUS_PUBLIC, addon__disabled_by_user=False)
elif not include_all_with_deleted:
qs = qs.exclude(addon__status=amo.STATUS_DELETED)
return qs
|
ilyes14/scikit-learn
|
refs/heads/master
|
examples/svm/plot_svm_anova.py
|
250
|
"""
=================================================
SVM-Anova: SVM with univariate feature selection
=================================================
This example shows how to perform univariate feature before running a SVC
(support vector classifier) to improve the classification scores.
"""
print(__doc__)
import numpy as np
import matplotlib.pyplot as plt
from sklearn import svm, datasets, feature_selection, cross_validation
from sklearn.pipeline import Pipeline
###############################################################################
# Import some data to play with
digits = datasets.load_digits()
y = digits.target
# Throw away data, to be in the curse of dimension settings
y = y[:200]
X = digits.data[:200]
n_samples = len(y)
X = X.reshape((n_samples, -1))
# add 200 non-informative features
X = np.hstack((X, 2 * np.random.random((n_samples, 200))))
###############################################################################
# Create a feature-selection transform and an instance of SVM that we
# combine together to have an full-blown estimator
transform = feature_selection.SelectPercentile(feature_selection.f_classif)
clf = Pipeline([('anova', transform), ('svc', svm.SVC(C=1.0))])
###############################################################################
# Plot the cross-validation score as a function of percentile of features
score_means = list()
score_stds = list()
percentiles = (1, 3, 6, 10, 15, 20, 30, 40, 60, 80, 100)
for percentile in percentiles:
clf.set_params(anova__percentile=percentile)
# Compute cross-validation score using all CPUs
this_scores = cross_validation.cross_val_score(clf, X, y, n_jobs=1)
score_means.append(this_scores.mean())
score_stds.append(this_scores.std())
plt.errorbar(percentiles, score_means, np.array(score_stds))
plt.title(
'Performance of the SVM-Anova varying the percentile of features selected')
plt.xlabel('Percentile')
plt.ylabel('Prediction rate')
plt.axis('tight')
plt.show()
|
kanagasabapathi/python-for-android
|
refs/heads/master
|
python-modules/twisted/twisted/test/myrebuilder1.py
|
162
|
class A:
def a(self):
return 'a'
try:
object
except NameError:
pass
else:
class B(object, A):
def b(self):
return 'b'
class Inherit(A):
def a(self):
return 'c'
|
zhangqiking/Data-Structure-Zoo
|
refs/heads/master
|
1-Algorithm Analysis/algorithms.py
|
6
|
""" 2: Algorithms
thomas moll 2015
"""
import time, random
def find_sequentially(arr, item):
""" Sequential Search
Complexity: O(n)
"""
for value, i in enumerate(arr):
# Check each item in the list
if item == value: #Runs N number of times
return True
return False
def binary_search(arr, item):
""" Binary Search
Complexity: O(log(n))
Only works on sorted arrays
"""
first = 0
last = len(arr)-1
found = False
# Note that first and last will get closer!
while first <= last and not found:
# Divide problem set
mid = (first+last)//2
if arr[mid] == item:
found = True
else:
# Decide which half to search next
if item < arr[mid]:
last = mid - 1
else:
first = mid + 1
return found
def array_equals(a, b):
""" Checks to see that two arrays
are completely equal, regardless of order
Complexity: O(n^2)
"""
i = 0
# Check all values in A
while i < len(a): # This loop runs N times
flag = False
j = 0
# Search for that value in B
while j < len(b): # This loop runs N times
if a[i] == b[j]:
flag = True
break
j+=1
if not flag:
return False
i+=1
return True
# Below are some speed tests comparing sequential to binary search
if __name__ == '__main__':
print 'Given an array of a million ordered ints...'
big_o_list = list(xrange(1000000))
item = random.randint(0, 1000000)
print 'Finding',item,'using sequential search'
t0 = time.time()
find_sequentially(big_o_list, item)
t1 = time.time()
total = t1-t0
print 'Found',item,'in',total,'MS'
item = random.randint(0, 1000000)
print 'Finding',item,'using binary search'
t2 = time.time()
binary_search(big_o_list, item)
t3 = time.time()
total = t2-t3
print 'Found',item,'in',total,'MS'
|
samueljohn/pelican-plugins
|
refs/heads/master
|
asciidoc_reader/__init__.py
|
72
|
from .asciidoc_reader import *
|
piyush82/icclab-rcb-web
|
refs/heads/master
|
virtualenv/lib/python2.7/site-packages/django/contrib/gis/geos/error.py
|
641
|
"""
This module houses the GEOS exceptions, specifically, GEOSException and
GEOSGeometryIndexError.
"""
class GEOSException(Exception):
"The base GEOS exception, indicates a GEOS-related error."
pass
class GEOSIndexError(GEOSException, KeyError):
"""
This exception is raised when an invalid index is encountered, and has
the 'silent_variable_feature' attribute set to true. This ensures that
django's templates proceed to use the next lookup type gracefully when
an Exception is raised. Fixes ticket #4740.
"""
# "If, during the method lookup, a method raises an exception, the exception
# will be propagated, unless the exception has an attribute
# `silent_variable_failure` whose value is True." -- Django template docs.
silent_variable_failure = True
|
debugger22/sympy
|
refs/heads/master
|
sympy/physics/quantum/tests/test_constants.py
|
130
|
from sympy import Float
from sympy.physics.quantum.constants import hbar
def test_hbar():
assert hbar.is_commutative is True
assert hbar.is_real is True
assert hbar.is_positive is True
assert hbar.is_negative is False
assert hbar.is_irrational is True
assert hbar.evalf() == Float(1.05457162e-34)
|
thundernet8/WRGameVideos-API
|
refs/heads/master
|
venv/lib/python2.7/site-packages/sqlalchemy/dialects/postgresql/psycopg2cffi.py
|
54
|
# testing/engines.py
# Copyright (C) 2005-2015 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
"""
.. dialect:: postgresql+psycopg2cffi
:name: psycopg2cffi
:dbapi: psycopg2cffi
:connectstring: \
postgresql+psycopg2cffi://user:password@host:port/dbname\
[?key=value&key=value...]
:url: http://pypi.python.org/pypi/psycopg2cffi/
``psycopg2cffi`` is an adaptation of ``psycopg2``, using CFFI for the C
layer. This makes it suitable for use in e.g. PyPy. Documentation
is as per ``psycopg2``.
.. versionadded:: 1.0.0
.. seealso::
:mod:`sqlalchemy.dialects.postgresql.psycopg2`
"""
from .psycopg2 import PGDialect_psycopg2
class PGDialect_psycopg2cffi(PGDialect_psycopg2):
driver = 'psycopg2cffi'
supports_unicode_statements = True
# psycopg2cffi's first release is 2.5.0, but reports
# __version__ as 2.4.4. Subsequent releases seem to have
# fixed this.
FEATURE_VERSION_MAP = dict(
native_json=(2, 4, 4),
native_jsonb=(2, 7, 1),
sane_multi_rowcount=(2, 4, 4),
array_oid=(2, 4, 4),
hstore_adapter=(2, 4, 4)
)
@classmethod
def dbapi(cls):
return __import__('psycopg2cffi')
@classmethod
def _psycopg2_extensions(cls):
root = __import__('psycopg2cffi', fromlist=['extensions'])
return root.extensions
@classmethod
def _psycopg2_extras(cls):
root = __import__('psycopg2cffi', fromlist=['extras'])
return root.extras
dialect = PGDialect_psycopg2cffi
|
tumbl3w33d/ansible
|
refs/heads/devel
|
lib/ansible/modules/network/check_point/cp_mgmt_publish.py
|
20
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Ansible module to manage Check Point Firewall (c) 2019
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = """
---
module: cp_mgmt_publish
short_description: All the changes done by this user will be seen by all users only after publish is called.
description:
- All the changes done by this user will be seen by all users only after publish is called.
- All operations are performed over Web Services API.
version_added: "2.9"
author: "Or Soffer (@chkp-orso)"
options:
uid:
description:
- Session unique identifier. Specify it to publish a different session than the one you currently use.
type: str
extends_documentation_fragment: checkpoint_commands
"""
EXAMPLES = """
- name: publish
cp_mgmt_publish:
"""
RETURN = """
cp_mgmt_publish:
description: The checkpoint publish output.
returned: always.
type: dict
"""
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.network.checkpoint.checkpoint import checkpoint_argument_spec_for_commands, api_command
def main():
argument_spec = dict(
uid=dict(type='str')
)
argument_spec.update(checkpoint_argument_spec_for_commands)
module = AnsibleModule(argument_spec=argument_spec)
command = "publish"
result = api_command(module, command)
module.exit_json(**result)
if __name__ == '__main__':
main()
|
HLFH/CouchPotatoServer
|
refs/heads/develop
|
libs/pyasn1/type/constraint.py
|
382
|
#
# ASN.1 subtype constraints classes.
#
# Constraints are relatively rare, but every ASN1 object
# is doing checks all the time for whether they have any
# constraints and whether they are applicable to the object.
#
# What we're going to do is define objects/functions that
# can be called unconditionally if they are present, and that
# are simply not present if there are no constraints.
#
# Original concept and code by Mike C. Fletcher.
#
import sys
from pyasn1.type import error
class AbstractConstraint:
"""Abstract base-class for constraint objects
Constraints should be stored in a simple sequence in the
namespace of their client Asn1Item sub-classes.
"""
def __init__(self, *values):
self._valueMap = {}
self._setValues(values)
self.__hashedValues = None
def __call__(self, value, idx=None):
try:
self._testValue(value, idx)
except error.ValueConstraintError:
raise error.ValueConstraintError(
'%s failed at: \"%s\"' % (self, sys.exc_info()[1])
)
def __repr__(self):
return '%s(%s)' % (
self.__class__.__name__,
', '.join([repr(x) for x in self._values])
)
def __eq__(self, other):
return self is other and True or self._values == other
def __ne__(self, other): return self._values != other
def __lt__(self, other): return self._values < other
def __le__(self, other): return self._values <= other
def __gt__(self, other): return self._values > other
def __ge__(self, other): return self._values >= other
if sys.version_info[0] <= 2:
def __nonzero__(self): return bool(self._values)
else:
def __bool__(self): return bool(self._values)
def __hash__(self):
if self.__hashedValues is None:
self.__hashedValues = hash((self.__class__.__name__, self._values))
return self.__hashedValues
def _setValues(self, values): self._values = values
def _testValue(self, value, idx):
raise error.ValueConstraintError(value)
# Constraints derivation logic
def getValueMap(self): return self._valueMap
def isSuperTypeOf(self, otherConstraint):
return self in otherConstraint.getValueMap() or \
otherConstraint is self or otherConstraint == self
def isSubTypeOf(self, otherConstraint):
return otherConstraint in self._valueMap or \
otherConstraint is self or otherConstraint == self
class SingleValueConstraint(AbstractConstraint):
"""Value must be part of defined values constraint"""
def _testValue(self, value, idx):
# XXX index vals for performance?
if value not in self._values:
raise error.ValueConstraintError(value)
class ContainedSubtypeConstraint(AbstractConstraint):
"""Value must satisfy all of defined set of constraints"""
def _testValue(self, value, idx):
for c in self._values:
c(value, idx)
class ValueRangeConstraint(AbstractConstraint):
"""Value must be within start and stop values (inclusive)"""
def _testValue(self, value, idx):
if value < self.start or value > self.stop:
raise error.ValueConstraintError(value)
def _setValues(self, values):
if len(values) != 2:
raise error.PyAsn1Error(
'%s: bad constraint values' % (self.__class__.__name__,)
)
self.start, self.stop = values
if self.start > self.stop:
raise error.PyAsn1Error(
'%s: screwed constraint values (start > stop): %s > %s' % (
self.__class__.__name__,
self.start, self.stop
)
)
AbstractConstraint._setValues(self, values)
class ValueSizeConstraint(ValueRangeConstraint):
"""len(value) must be within start and stop values (inclusive)"""
def _testValue(self, value, idx):
l = len(value)
if l < self.start or l > self.stop:
raise error.ValueConstraintError(value)
class PermittedAlphabetConstraint(SingleValueConstraint):
def _setValues(self, values):
self._values = ()
for v in values:
self._values = self._values + tuple(v)
def _testValue(self, value, idx):
for v in value:
if v not in self._values:
raise error.ValueConstraintError(value)
# This is a bit kludgy, meaning two op modes within a single constraing
class InnerTypeConstraint(AbstractConstraint):
"""Value must satisfy type and presense constraints"""
def _testValue(self, value, idx):
if self.__singleTypeConstraint:
self.__singleTypeConstraint(value)
elif self.__multipleTypeConstraint:
if idx not in self.__multipleTypeConstraint:
raise error.ValueConstraintError(value)
constraint, status = self.__multipleTypeConstraint[idx]
if status == 'ABSENT': # XXX presense is not checked!
raise error.ValueConstraintError(value)
constraint(value)
def _setValues(self, values):
self.__multipleTypeConstraint = {}
self.__singleTypeConstraint = None
for v in values:
if isinstance(v, tuple):
self.__multipleTypeConstraint[v[0]] = v[1], v[2]
else:
self.__singleTypeConstraint = v
AbstractConstraint._setValues(self, values)
# Boolean ops on constraints
class ConstraintsExclusion(AbstractConstraint):
"""Value must not fit the single constraint"""
def _testValue(self, value, idx):
try:
self._values[0](value, idx)
except error.ValueConstraintError:
return
else:
raise error.ValueConstraintError(value)
def _setValues(self, values):
if len(values) != 1:
raise error.PyAsn1Error('Single constraint expected')
AbstractConstraint._setValues(self, values)
class AbstractConstraintSet(AbstractConstraint):
"""Value must not satisfy the single constraint"""
def __getitem__(self, idx): return self._values[idx]
def __add__(self, value): return self.__class__(self, value)
def __radd__(self, value): return self.__class__(self, value)
def __len__(self): return len(self._values)
# Constraints inclusion in sets
def _setValues(self, values):
self._values = values
for v in values:
self._valueMap[v] = 1
self._valueMap.update(v.getValueMap())
class ConstraintsIntersection(AbstractConstraintSet):
"""Value must satisfy all constraints"""
def _testValue(self, value, idx):
for v in self._values:
v(value, idx)
class ConstraintsUnion(AbstractConstraintSet):
"""Value must satisfy at least one constraint"""
def _testValue(self, value, idx):
for v in self._values:
try:
v(value, idx)
except error.ValueConstraintError:
pass
else:
return
raise error.ValueConstraintError(
'all of %s failed for \"%s\"' % (self._values, value)
)
# XXX
# add tests for type check
|
lisael/pg-django
|
refs/heads/master
|
tests/regressiontests/admin_inlines/tests.py
|
23
|
from __future__ import absolute_import
from django.contrib.admin.tests import AdminSeleniumWebDriverTestCase
from django.contrib.admin.helpers import InlineAdminForm
from django.contrib.auth.models import User, Permission
from django.contrib.contenttypes.models import ContentType
from django.test import TestCase
# local test models
from .admin import InnerInline
from .models import (Holder, Inner, Holder2, Inner2, Holder3, Inner3, Person,
OutfitItem, Fashionista, Teacher, Parent, Child, Author, Book, Profile,
ProfileCollection)
class TestInline(TestCase):
urls = "regressiontests.admin_inlines.urls"
fixtures = ['admin-views-users.xml']
def setUp(self):
holder = Holder(dummy=13)
holder.save()
Inner(dummy=42, holder=holder).save()
self.change_url = '/admin/admin_inlines/holder/%i/' % holder.id
result = self.client.login(username='super', password='secret')
self.assertEqual(result, True)
def tearDown(self):
self.client.logout()
def test_can_delete(self):
"""
can_delete should be passed to inlineformset factory.
"""
response = self.client.get(self.change_url)
inner_formset = response.context['inline_admin_formsets'][0].formset
expected = InnerInline.can_delete
actual = inner_formset.can_delete
self.assertEqual(expected, actual, 'can_delete must be equal')
def test_readonly_stacked_inline_label(self):
"""Bug #13174."""
holder = Holder.objects.create(dummy=42)
inner = Inner.objects.create(holder=holder, dummy=42, readonly='')
response = self.client.get('/admin/admin_inlines/holder/%i/'
% holder.id)
self.assertContains(response, '<label>Inner readonly label:</label>')
def test_many_to_many_inlines(self):
"Autogenerated many-to-many inlines are displayed correctly (#13407)"
response = self.client.get('/admin/admin_inlines/author/add/')
# The heading for the m2m inline block uses the right text
self.assertContains(response, '<h2>Author-book relationships</h2>')
# The "add another" label is correct
self.assertContains(response, 'Add another Author-Book Relationship')
# The '+' is dropped from the autogenerated form prefix (Author_books+)
self.assertContains(response, 'id="id_Author_books-TOTAL_FORMS"')
def test_inline_primary(self):
person = Person.objects.create(firstname='Imelda')
item = OutfitItem.objects.create(name='Shoes')
# Imelda likes shoes, but can't cary her own bags.
data = {
'shoppingweakness_set-TOTAL_FORMS': 1,
'shoppingweakness_set-INITIAL_FORMS': 0,
'shoppingweakness_set-MAX_NUM_FORMS': 0,
'_save': u'Save',
'person': person.id,
'max_weight': 0,
'shoppingweakness_set-0-item': item.id,
}
response = self.client.post('/admin/admin_inlines/fashionista/add/', data)
self.assertEqual(response.status_code, 302)
self.assertEqual(len(Fashionista.objects.filter(person__firstname='Imelda')), 1)
def test_tabular_non_field_errors(self):
"""
Ensure that non_field_errors are displayed correctly, including the
right value for colspan. Refs #13510.
"""
data = {
'title_set-TOTAL_FORMS': 1,
'title_set-INITIAL_FORMS': 0,
'title_set-MAX_NUM_FORMS': 0,
'_save': u'Save',
'title_set-0-title1': 'a title',
'title_set-0-title2': 'a different title',
}
response = self.client.post('/admin/admin_inlines/titlecollection/add/', data)
# Here colspan is "4": two fields (title1 and title2), one hidden field and the delete checkbock.
self.assertContains(response, '<tr><td colspan="4"><ul class="errorlist"><li>The two titles must be the same</li></ul></td></tr>')
def test_no_parent_callable_lookup(self):
"""Admin inline `readonly_field` shouldn't invoke parent ModelAdmin callable"""
# Identically named callable isn't present in the parent ModelAdmin,
# rendering of the add view shouldn't explode
response = self.client.get('/admin/admin_inlines/novel/add/')
self.assertEqual(response.status_code, 200)
# View should have the child inlines section
self.assertContains(response, '<div class="inline-group" id="chapter_set-group">')
def test_callable_lookup(self):
"""Admin inline should invoke local callable when its name is listed in readonly_fields"""
response = self.client.get('/admin/admin_inlines/poll/add/')
self.assertEqual(response.status_code, 200)
# Add parent object view should have the child inlines section
self.assertContains(response, '<div class="inline-group" id="question_set-group">')
# The right callabe should be used for the inline readonly_fields
# column cells
self.assertContains(response, '<p>Callable in QuestionInline</p>')
def test_help_text(self):
"""
Ensure that the inlines' model field help texts are displayed when
using both the stacked and tabular layouts.
Ref #8190.
"""
response = self.client.get('/admin/admin_inlines/holder4/add/')
self.assertContains(response, '<p class="help">Awesome stacked help text is awesome.</p>', 4)
self.assertContains(response, '<img src="/static/admin/img/icon-unknown.gif" class="help help-tooltip" width="10" height="10" alt="(Awesome tabular help text is awesome.)" title="Awesome tabular help text is awesome." />', 1)
def test_non_related_name_inline(self):
"""
Ensure that multiple inlines with related_name='+' have correct form
prefixes. Bug #16838.
"""
response = self.client.get('/admin/admin_inlines/capofamiglia/add/')
self.assertContains(response,
'<input type="hidden" name="-1-0-id" id="id_-1-0-id" />')
self.assertContains(response,
'<input type="hidden" name="-1-0-capo_famiglia" '
'id="id_-1-0-capo_famiglia" />')
self.assertContains(response,
'<input id="id_-1-0-name" type="text" class="vTextField" '
'name="-1-0-name" maxlength="100" />', html=True)
self.assertContains(response,
'<input type="hidden" name="-2-0-id" id="id_-2-0-id" />')
self.assertContains(response,
'<input type="hidden" name="-2-0-capo_famiglia" '
'id="id_-2-0-capo_famiglia" />')
self.assertContains(response,
'<input id="id_-2-0-name" type="text" class="vTextField" '
'name="-2-0-name" maxlength="100" />', html=True)
class TestInlineMedia(TestCase):
urls = "regressiontests.admin_inlines.urls"
fixtures = ['admin-views-users.xml']
def setUp(self):
result = self.client.login(username='super', password='secret')
self.assertEqual(result, True)
def tearDown(self):
self.client.logout()
def test_inline_media_only_base(self):
holder = Holder(dummy=13)
holder.save()
Inner(dummy=42, holder=holder).save()
change_url = '/admin/admin_inlines/holder/%i/' % holder.id
response = self.client.get(change_url)
self.assertContains(response, 'my_awesome_admin_scripts.js')
def test_inline_media_only_inline(self):
holder = Holder3(dummy=13)
holder.save()
Inner3(dummy=42, holder=holder).save()
change_url = '/admin/admin_inlines/holder3/%i/' % holder.id
response = self.client.get(change_url)
self.assertContains(response, 'my_awesome_inline_scripts.js')
def test_all_inline_media(self):
holder = Holder2(dummy=13)
holder.save()
Inner2(dummy=42, holder=holder).save()
change_url = '/admin/admin_inlines/holder2/%i/' % holder.id
response = self.client.get(change_url)
self.assertContains(response, 'my_awesome_admin_scripts.js')
self.assertContains(response, 'my_awesome_inline_scripts.js')
class TestInlineAdminForm(TestCase):
urls = "regressiontests.admin_inlines.urls"
def test_immutable_content_type(self):
"""Regression for #9362
The problem depends only on InlineAdminForm and its "original"
argument, so we can safely set the other arguments to None/{}. We just
need to check that the content_type argument of Child isn't altered by
the internals of the inline form."""
sally = Teacher.objects.create(name='Sally')
john = Parent.objects.create(name='John')
joe = Child.objects.create(name='Joe', teacher=sally, parent=john)
iaf = InlineAdminForm(None, None, {}, {}, joe)
parent_ct = ContentType.objects.get_for_model(Parent)
self.assertEqual(iaf.original.content_type, parent_ct)
class TestInlinePermissions(TestCase):
"""
Make sure the admin respects permissions for objects that are edited
inline. Refs #8060.
"""
urls = "regressiontests.admin_inlines.urls"
def setUp(self):
self.user = User(username='admin')
self.user.is_staff = True
self.user.is_active = True
self.user.set_password('secret')
self.user.save()
self.author_ct = ContentType.objects.get_for_model(Author)
self.holder_ct = ContentType.objects.get_for_model(Holder2)
self.book_ct = ContentType.objects.get_for_model(Book)
self.inner_ct = ContentType.objects.get_for_model(Inner2)
# User always has permissions to add and change Authors, and Holders,
# the main (parent) models of the inlines. Permissions on the inlines
# vary per test.
permission = Permission.objects.get(codename='add_author', content_type=self.author_ct)
self.user.user_permissions.add(permission)
permission = Permission.objects.get(codename='change_author', content_type=self.author_ct)
self.user.user_permissions.add(permission)
permission = Permission.objects.get(codename='add_holder2', content_type=self.holder_ct)
self.user.user_permissions.add(permission)
permission = Permission.objects.get(codename='change_holder2', content_type=self.holder_ct)
self.user.user_permissions.add(permission)
author = Author.objects.create(pk=1, name=u'The Author')
book = author.books.create(name=u'The inline Book')
self.author_change_url = '/admin/admin_inlines/author/%i/' % author.id
# Get the ID of the automatically created intermediate model for thw Author-Book m2m
author_book_auto_m2m_intermediate = Author.books.through.objects.get(author=author, book=book)
self.author_book_auto_m2m_intermediate_id = author_book_auto_m2m_intermediate.pk
holder = Holder2.objects.create(dummy=13)
inner2 = Inner2.objects.create(dummy=42, holder=holder)
self.holder_change_url = '/admin/admin_inlines/holder2/%i/' % holder.id
self.inner2_id = inner2.id
self.assertEqual(
self.client.login(username='admin', password='secret'),
True)
def tearDown(self):
self.client.logout()
def test_inline_add_m2m_noperm(self):
response = self.client.get('/admin/admin_inlines/author/add/')
# No change permission on books, so no inline
self.assertNotContains(response, '<h2>Author-book relationships</h2>')
self.assertNotContains(response, 'Add another Author-Book Relationship')
self.assertNotContains(response, 'id="id_Author_books-TOTAL_FORMS"')
def test_inline_add_fk_noperm(self):
response = self.client.get('/admin/admin_inlines/holder2/add/')
# No permissions on Inner2s, so no inline
self.assertNotContains(response, '<h2>Inner2s</h2>')
self.assertNotContains(response, 'Add another Inner2')
self.assertNotContains(response, 'id="id_inner2_set-TOTAL_FORMS"')
def test_inline_change_m2m_noperm(self):
response = self.client.get(self.author_change_url)
# No change permission on books, so no inline
self.assertNotContains(response, '<h2>Author-book relationships</h2>')
self.assertNotContains(response, 'Add another Author-Book Relationship')
self.assertNotContains(response, 'id="id_Author_books-TOTAL_FORMS"')
def test_inline_change_fk_noperm(self):
response = self.client.get(self.holder_change_url)
# No permissions on Inner2s, so no inline
self.assertNotContains(response, '<h2>Inner2s</h2>')
self.assertNotContains(response, 'Add another Inner2')
self.assertNotContains(response, 'id="id_inner2_set-TOTAL_FORMS"')
def test_inline_add_m2m_add_perm(self):
permission = Permission.objects.get(codename='add_book', content_type=self.book_ct)
self.user.user_permissions.add(permission)
response = self.client.get('/admin/admin_inlines/author/add/')
# No change permission on Books, so no inline
self.assertNotContains(response, '<h2>Author-book relationships</h2>')
self.assertNotContains(response, 'Add another Author-Book Relationship')
self.assertNotContains(response, 'id="id_Author_books-TOTAL_FORMS"')
def test_inline_add_fk_add_perm(self):
permission = Permission.objects.get(codename='add_inner2', content_type=self.inner_ct)
self.user.user_permissions.add(permission)
response = self.client.get('/admin/admin_inlines/holder2/add/')
# Add permission on inner2s, so we get the inline
self.assertContains(response, '<h2>Inner2s</h2>')
self.assertContains(response, 'Add another Inner2')
self.assertContains(response, 'value="3" id="id_inner2_set-TOTAL_FORMS"')
def test_inline_change_m2m_add_perm(self):
permission = Permission.objects.get(codename='add_book', content_type=self.book_ct)
self.user.user_permissions.add(permission)
response = self.client.get(self.author_change_url)
# No change permission on books, so no inline
self.assertNotContains(response, '<h2>Author-book relationships</h2>')
self.assertNotContains(response, 'Add another Author-Book Relationship')
self.assertNotContains(response, 'id="id_Author_books-TOTAL_FORMS"')
self.assertNotContains(response, 'id="id_Author_books-0-DELETE"')
def test_inline_change_m2m_change_perm(self):
permission = Permission.objects.get(codename='change_book', content_type=self.book_ct)
self.user.user_permissions.add(permission)
response = self.client.get(self.author_change_url)
# We have change perm on books, so we can add/change/delete inlines
self.assertContains(response, '<h2>Author-book relationships</h2>')
self.assertContains(response, 'Add another Author-Book Relationship')
self.assertContains(response, 'value="4" id="id_Author_books-TOTAL_FORMS"')
self.assertContains(response, '<input type="hidden" name="Author_books-0-id" value="%i"' % self.author_book_auto_m2m_intermediate_id)
self.assertContains(response, 'id="id_Author_books-0-DELETE"')
def test_inline_change_fk_add_perm(self):
permission = Permission.objects.get(codename='add_inner2', content_type=self.inner_ct)
self.user.user_permissions.add(permission)
response = self.client.get(self.holder_change_url)
# Add permission on inner2s, so we can add but not modify existing
self.assertContains(response, '<h2>Inner2s</h2>')
self.assertContains(response, 'Add another Inner2')
# 3 extra forms only, not the existing instance form
self.assertContains(response, 'value="3" id="id_inner2_set-TOTAL_FORMS"')
self.assertNotContains(response, '<input type="hidden" name="inner2_set-0-id" value="%i"' % self.inner2_id)
def test_inline_change_fk_change_perm(self):
permission = Permission.objects.get(codename='change_inner2', content_type=self.inner_ct)
self.user.user_permissions.add(permission)
response = self.client.get(self.holder_change_url)
# Change permission on inner2s, so we can change existing but not add new
self.assertContains(response, '<h2>Inner2s</h2>')
# Just the one form for existing instances
self.assertContains(response, 'value="1" id="id_inner2_set-TOTAL_FORMS"')
self.assertContains(response, '<input type="hidden" name="inner2_set-0-id" value="%i"' % self.inner2_id)
# max-num 0 means we can't add new ones
self.assertContains(response, 'value="0" id="id_inner2_set-MAX_NUM_FORMS"')
def test_inline_change_fk_add_change_perm(self):
permission = Permission.objects.get(codename='add_inner2', content_type=self.inner_ct)
self.user.user_permissions.add(permission)
permission = Permission.objects.get(codename='change_inner2', content_type=self.inner_ct)
self.user.user_permissions.add(permission)
response = self.client.get(self.holder_change_url)
# Add/change perm, so we can add new and change existing
self.assertContains(response, '<h2>Inner2s</h2>')
# One form for existing instance and three extra for new
self.assertContains(response, 'value="4" id="id_inner2_set-TOTAL_FORMS"')
self.assertContains(response, '<input type="hidden" name="inner2_set-0-id" value="%i"' % self.inner2_id)
def test_inline_change_fk_change_del_perm(self):
permission = Permission.objects.get(codename='change_inner2', content_type=self.inner_ct)
self.user.user_permissions.add(permission)
permission = Permission.objects.get(codename='delete_inner2', content_type=self.inner_ct)
self.user.user_permissions.add(permission)
response = self.client.get(self.holder_change_url)
# Change/delete perm on inner2s, so we can change/delete existing
self.assertContains(response, '<h2>Inner2s</h2>')
# One form for existing instance only, no new
self.assertContains(response, 'value="1" id="id_inner2_set-TOTAL_FORMS"')
self.assertContains(response, '<input type="hidden" name="inner2_set-0-id" value="%i"' % self.inner2_id)
self.assertContains(response, 'id="id_inner2_set-0-DELETE"')
def test_inline_change_fk_all_perms(self):
permission = Permission.objects.get(codename='add_inner2', content_type=self.inner_ct)
self.user.user_permissions.add(permission)
permission = Permission.objects.get(codename='change_inner2', content_type=self.inner_ct)
self.user.user_permissions.add(permission)
permission = Permission.objects.get(codename='delete_inner2', content_type=self.inner_ct)
self.user.user_permissions.add(permission)
response = self.client.get(self.holder_change_url)
# All perms on inner2s, so we can add/change/delete
self.assertContains(response, '<h2>Inner2s</h2>')
# One form for existing instance only, three for new
self.assertContains(response, 'value="4" id="id_inner2_set-TOTAL_FORMS"')
self.assertContains(response, '<input type="hidden" name="inner2_set-0-id" value="%i"' % self.inner2_id)
self.assertContains(response, 'id="id_inner2_set-0-DELETE"')
class SeleniumFirefoxTests(AdminSeleniumWebDriverTestCase):
webdriver_class = 'selenium.webdriver.firefox.webdriver.WebDriver'
fixtures = ['admin-views-users.xml']
urls = "regressiontests.admin_inlines.urls"
def test_add_inlines(self):
"""
Ensure that the "Add another XXX" link correctly adds items to the
inline form.
"""
from selenium.common.exceptions import TimeoutException
self.admin_login(username='super', password='secret')
self.selenium.get('%s%s' % (self.live_server_url,
'/admin/admin_inlines/profilecollection/add/'))
# Check that there's only one inline to start with and that it has the
# correct ID.
self.failUnlessEqual(len(self.selenium.find_elements_by_css_selector(
'.dynamic-profile_set')), 1)
self.failUnlessEqual(self.selenium.find_elements_by_css_selector(
'.dynamic-profile_set')[0].get_attribute('id'),
'profile_set-0')
self.failUnlessEqual(len(self.selenium.find_elements_by_css_selector(
'.dynamic-profile_set#profile_set-0 input[name=profile_set-0-first_name]')), 1)
self.failUnlessEqual(len(self.selenium.find_elements_by_css_selector(
'.dynamic-profile_set#profile_set-0 input[name=profile_set-0-last_name]')), 1)
# Add an inline
self.selenium.find_element_by_link_text('Add another Profile').click()
# Check that the inline has been added, that it has the right id, and
# that it contains the right fields.
self.failUnlessEqual(len(self.selenium.find_elements_by_css_selector(
'.dynamic-profile_set')), 2)
self.failUnlessEqual(self.selenium.find_elements_by_css_selector(
'.dynamic-profile_set')[1].get_attribute('id'), 'profile_set-1')
self.failUnlessEqual(len(self.selenium.find_elements_by_css_selector(
'.dynamic-profile_set#profile_set-1 input[name=profile_set-1-first_name]')), 1)
self.failUnlessEqual(len(self.selenium.find_elements_by_css_selector(
'.dynamic-profile_set#profile_set-1 input[name=profile_set-1-last_name]')), 1)
# Let's add another one to be sure
self.selenium.find_element_by_link_text('Add another Profile').click()
self.failUnlessEqual(len(self.selenium.find_elements_by_css_selector(
'.dynamic-profile_set')), 3)
self.failUnlessEqual(self.selenium.find_elements_by_css_selector(
'.dynamic-profile_set')[2].get_attribute('id'), 'profile_set-2')
self.failUnlessEqual(len(self.selenium.find_elements_by_css_selector(
'.dynamic-profile_set#profile_set-2 input[name=profile_set-2-first_name]')), 1)
self.failUnlessEqual(len(self.selenium.find_elements_by_css_selector(
'.dynamic-profile_set#profile_set-2 input[name=profile_set-2-last_name]')), 1)
# Enter some data and click 'Save'
self.selenium.find_element_by_name('profile_set-0-first_name').send_keys('0 first name 1')
self.selenium.find_element_by_name('profile_set-0-last_name').send_keys('0 last name 2')
self.selenium.find_element_by_name('profile_set-1-first_name').send_keys('1 first name 1')
self.selenium.find_element_by_name('profile_set-1-last_name').send_keys('1 last name 2')
self.selenium.find_element_by_name('profile_set-2-first_name').send_keys('2 first name 1')
self.selenium.find_element_by_name('profile_set-2-last_name').send_keys('2 last name 2')
self.selenium.find_element_by_xpath('//input[@value="Save"]').click()
try:
# Wait for the next page to be loaded.
self.wait_loaded_tag('body')
except TimeoutException:
# IE7 occasionnally returns an error "Internet Explorer cannot
# display the webpage" and doesn't load the next page. We just
# ignore it.
pass
# Check that the objects have been created in the database
self.assertEqual(ProfileCollection.objects.all().count(), 1)
self.assertEqual(Profile.objects.all().count(), 3)
def test_delete_inlines(self):
self.admin_login(username='super', password='secret')
self.selenium.get('%s%s' % (self.live_server_url,
'/admin/admin_inlines/profilecollection/add/'))
# Add a few inlines
self.selenium.find_element_by_link_text('Add another Profile').click()
self.selenium.find_element_by_link_text('Add another Profile').click()
self.selenium.find_element_by_link_text('Add another Profile').click()
self.selenium.find_element_by_link_text('Add another Profile').click()
self.failUnlessEqual(len(self.selenium.find_elements_by_css_selector(
'#profile_set-group table tr.dynamic-profile_set')), 5)
self.failUnlessEqual(len(self.selenium.find_elements_by_css_selector(
'form#profilecollection_form tr.dynamic-profile_set#profile_set-0')), 1)
self.failUnlessEqual(len(self.selenium.find_elements_by_css_selector(
'form#profilecollection_form tr.dynamic-profile_set#profile_set-1')), 1)
self.failUnlessEqual(len(self.selenium.find_elements_by_css_selector(
'form#profilecollection_form tr.dynamic-profile_set#profile_set-2')), 1)
self.failUnlessEqual(len(self.selenium.find_elements_by_css_selector(
'form#profilecollection_form tr.dynamic-profile_set#profile_set-3')), 1)
self.failUnlessEqual(len(self.selenium.find_elements_by_css_selector(
'form#profilecollection_form tr.dynamic-profile_set#profile_set-4')), 1)
# Click on a few delete buttons
self.selenium.find_element_by_css_selector(
'form#profilecollection_form tr.dynamic-profile_set#profile_set-1 td.delete a').click()
self.selenium.find_element_by_css_selector(
'form#profilecollection_form tr.dynamic-profile_set#profile_set-2 td.delete a').click()
# Verify that they're gone and that the IDs have been re-sequenced
self.failUnlessEqual(len(self.selenium.find_elements_by_css_selector(
'#profile_set-group table tr.dynamic-profile_set')), 3)
self.failUnlessEqual(len(self.selenium.find_elements_by_css_selector(
'form#profilecollection_form tr.dynamic-profile_set#profile_set-0')), 1)
self.failUnlessEqual(len(self.selenium.find_elements_by_css_selector(
'form#profilecollection_form tr.dynamic-profile_set#profile_set-1')), 1)
self.failUnlessEqual(len(self.selenium.find_elements_by_css_selector(
'form#profilecollection_form tr.dynamic-profile_set#profile_set-2')), 1)
class SeleniumChromeTests(SeleniumFirefoxTests):
webdriver_class = 'selenium.webdriver.chrome.webdriver.WebDriver'
class SeleniumIETests(SeleniumFirefoxTests):
webdriver_class = 'selenium.webdriver.ie.webdriver.WebDriver'
|
dex4er/django
|
refs/heads/1.6.x
|
django/core/files/base.py
|
147
|
from __future__ import unicode_literals
import os
from io import BytesIO, StringIO, UnsupportedOperation
from django.utils.encoding import smart_text
from django.core.files.utils import FileProxyMixin
from django.utils import six
from django.utils.encoding import force_bytes, python_2_unicode_compatible
@python_2_unicode_compatible
class File(FileProxyMixin):
DEFAULT_CHUNK_SIZE = 64 * 2**10
def __init__(self, file, name=None):
self.file = file
if name is None:
name = getattr(file, 'name', None)
self.name = name
if hasattr(file, 'mode'):
self.mode = file.mode
def __str__(self):
return smart_text(self.name or '')
def __repr__(self):
return "<%s: %s>" % (self.__class__.__name__, self or "None")
def __bool__(self):
return bool(self.name)
def __nonzero__(self): # Python 2 compatibility
return type(self).__bool__(self)
def __len__(self):
return self.size
def _get_size(self):
if not hasattr(self, '_size'):
if hasattr(self.file, 'size'):
self._size = self.file.size
elif hasattr(self.file, 'name') and os.path.exists(self.file.name):
self._size = os.path.getsize(self.file.name)
elif hasattr(self.file, 'tell') and hasattr(self.file, 'seek'):
pos = self.file.tell()
self.file.seek(0, os.SEEK_END)
self._size = self.file.tell()
self.file.seek(pos)
else:
raise AttributeError("Unable to determine the file's size.")
return self._size
def _set_size(self, size):
self._size = size
size = property(_get_size, _set_size)
def _get_closed(self):
return not self.file or self.file.closed
closed = property(_get_closed)
def chunks(self, chunk_size=None):
"""
Read the file and yield chucks of ``chunk_size`` bytes (defaults to
``UploadedFile.DEFAULT_CHUNK_SIZE``).
"""
if not chunk_size:
chunk_size = self.DEFAULT_CHUNK_SIZE
try:
self.seek(0)
except (AttributeError, UnsupportedOperation):
pass
while True:
data = self.read(chunk_size)
if not data:
break
yield data
def multiple_chunks(self, chunk_size=None):
"""
Returns ``True`` if you can expect multiple chunks.
NB: If a particular file representation is in memory, subclasses should
always return ``False`` -- there's no good reason to read from memory in
chunks.
"""
if not chunk_size:
chunk_size = self.DEFAULT_CHUNK_SIZE
return self.size > chunk_size
def __iter__(self):
# Iterate over this file-like object by newlines
buffer_ = None
for chunk in self.chunks():
chunk_buffer = BytesIO(chunk)
for line in chunk_buffer:
if buffer_:
line = buffer_ + line
buffer_ = None
# If this is the end of a line, yield
# otherwise, wait for the next round
if line[-1] in ('\n', '\r'):
yield line
else:
buffer_ = line
if buffer_ is not None:
yield buffer_
def __enter__(self):
return self
def __exit__(self, exc_type, exc_value, tb):
self.close()
def open(self, mode=None):
if not self.closed:
self.seek(0)
elif self.name and os.path.exists(self.name):
self.file = open(self.name, mode or self.mode)
else:
raise ValueError("The file cannot be reopened.")
def close(self):
self.file.close()
@python_2_unicode_compatible
class ContentFile(File):
"""
A File-like object that takes just raw content, rather than an actual file.
"""
def __init__(self, content, name=None):
if six.PY3:
stream_class = StringIO if isinstance(content, six.text_type) else BytesIO
else:
stream_class = BytesIO
content = force_bytes(content)
super(ContentFile, self).__init__(stream_class(content), name=name)
self.size = len(content)
def __str__(self):
return 'Raw content'
def __bool__(self):
return True
def __nonzero__(self): # Python 2 compatibility
return type(self).__bool__(self)
def open(self, mode=None):
self.seek(0)
def close(self):
pass
|
darkwing/kuma
|
refs/heads/master
|
vendor/packages/pyflakes/test/test_api.py
|
6
|
"""
Tests for L{pyflakes.scripts.pyflakes}.
"""
import os
import sys
import shutil
import subprocess
import tempfile
from pyflakes.messages import UnusedImport
from pyflakes.reporter import Reporter
from pyflakes.api import (
checkPath,
checkRecursive,
iterSourceCode,
)
from pyflakes.test.harness import TestCase, skipIf
if sys.version_info < (3,):
from cStringIO import StringIO
else:
from io import StringIO
unichr = chr
def withStderrTo(stderr, f, *args, **kwargs):
"""
Call C{f} with C{sys.stderr} redirected to C{stderr}.
"""
(outer, sys.stderr) = (sys.stderr, stderr)
try:
return f(*args, **kwargs)
finally:
sys.stderr = outer
class Node(object):
"""
Mock an AST node.
"""
def __init__(self, lineno, col_offset=0):
self.lineno = lineno
self.col_offset = col_offset
class LoggingReporter(object):
"""
Implementation of Reporter that just appends any error to a list.
"""
def __init__(self, log):
"""
Construct a C{LoggingReporter}.
@param log: A list to append log messages to.
"""
self.log = log
def flake(self, message):
self.log.append(('flake', str(message)))
def unexpectedError(self, filename, message):
self.log.append(('unexpectedError', filename, message))
def syntaxError(self, filename, msg, lineno, offset, line):
self.log.append(('syntaxError', filename, msg, lineno, offset, line))
class TestIterSourceCode(TestCase):
"""
Tests for L{iterSourceCode}.
"""
def setUp(self):
self.tempdir = tempfile.mkdtemp()
def tearDown(self):
shutil.rmtree(self.tempdir)
def makeEmptyFile(self, *parts):
assert parts
fpath = os.path.join(self.tempdir, *parts)
fd = open(fpath, 'a')
fd.close()
return fpath
def test_emptyDirectory(self):
"""
There are no Python files in an empty directory.
"""
self.assertEqual(list(iterSourceCode([self.tempdir])), [])
def test_singleFile(self):
"""
If the directory contains one Python file, C{iterSourceCode} will find
it.
"""
childpath = self.makeEmptyFile('foo.py')
self.assertEqual(list(iterSourceCode([self.tempdir])), [childpath])
def test_onlyPythonSource(self):
"""
Files that are not Python source files are not included.
"""
self.makeEmptyFile('foo.pyc')
self.assertEqual(list(iterSourceCode([self.tempdir])), [])
def test_recurses(self):
"""
If the Python files are hidden deep down in child directories, we will
find them.
"""
os.mkdir(os.path.join(self.tempdir, 'foo'))
apath = self.makeEmptyFile('foo', 'a.py')
os.mkdir(os.path.join(self.tempdir, 'bar'))
bpath = self.makeEmptyFile('bar', 'b.py')
cpath = self.makeEmptyFile('c.py')
self.assertEqual(
sorted(iterSourceCode([self.tempdir])),
sorted([apath, bpath, cpath]))
def test_multipleDirectories(self):
"""
L{iterSourceCode} can be given multiple directories. It will recurse
into each of them.
"""
foopath = os.path.join(self.tempdir, 'foo')
barpath = os.path.join(self.tempdir, 'bar')
os.mkdir(foopath)
apath = self.makeEmptyFile('foo', 'a.py')
os.mkdir(barpath)
bpath = self.makeEmptyFile('bar', 'b.py')
self.assertEqual(
sorted(iterSourceCode([foopath, barpath])),
sorted([apath, bpath]))
def test_explicitFiles(self):
"""
If one of the paths given to L{iterSourceCode} is not a directory but
a file, it will include that in its output.
"""
epath = self.makeEmptyFile('e.py')
self.assertEqual(list(iterSourceCode([epath])),
[epath])
class TestReporter(TestCase):
"""
Tests for L{Reporter}.
"""
def test_syntaxError(self):
"""
C{syntaxError} reports that there was a syntax error in the source
file. It reports to the error stream and includes the filename, line
number, error message, actual line of source and a caret pointing to
where the error is.
"""
err = StringIO()
reporter = Reporter(None, err)
reporter.syntaxError('foo.py', 'a problem', 3, 7, 'bad line of source')
self.assertEqual(
("foo.py:3:8: a problem\n"
"bad line of source\n"
" ^\n"),
err.getvalue())
def test_syntaxErrorNoOffset(self):
"""
C{syntaxError} doesn't include a caret pointing to the error if
C{offset} is passed as C{None}.
"""
err = StringIO()
reporter = Reporter(None, err)
reporter.syntaxError('foo.py', 'a problem', 3, None,
'bad line of source')
self.assertEqual(
("foo.py:3: a problem\n"
"bad line of source\n"),
err.getvalue())
def test_multiLineSyntaxError(self):
"""
If there's a multi-line syntax error, then we only report the last
line. The offset is adjusted so that it is relative to the start of
the last line.
"""
err = StringIO()
lines = [
'bad line of source',
'more bad lines of source',
]
reporter = Reporter(None, err)
reporter.syntaxError('foo.py', 'a problem', 3, len(lines[0]) + 7,
'\n'.join(lines))
self.assertEqual(
("foo.py:3:7: a problem\n" +
lines[-1] + "\n" +
" ^\n"),
err.getvalue())
def test_unexpectedError(self):
"""
C{unexpectedError} reports an error processing a source file.
"""
err = StringIO()
reporter = Reporter(None, err)
reporter.unexpectedError('source.py', 'error message')
self.assertEqual('source.py: error message\n', err.getvalue())
def test_flake(self):
"""
C{flake} reports a code warning from Pyflakes. It is exactly the
str() of a L{pyflakes.messages.Message}.
"""
out = StringIO()
reporter = Reporter(out, None)
message = UnusedImport('foo.py', Node(42), 'bar')
reporter.flake(message)
self.assertEqual(out.getvalue(), "%s\n" % (message,))
class CheckTests(TestCase):
"""
Tests for L{check} and L{checkPath} which check a file for flakes.
"""
def makeTempFile(self, content):
"""
Make a temporary file containing C{content} and return a path to it.
"""
_, fpath = tempfile.mkstemp()
if not hasattr(content, 'decode'):
content = content.encode('ascii')
fd = open(fpath, 'wb')
fd.write(content)
fd.close()
return fpath
def assertHasErrors(self, path, errorList):
"""
Assert that C{path} causes errors.
@param path: A path to a file to check.
@param errorList: A list of errors expected to be printed to stderr.
"""
err = StringIO()
count = withStderrTo(err, checkPath, path)
self.assertEqual(
(count, err.getvalue()), (len(errorList), ''.join(errorList)))
def getErrors(self, path):
"""
Get any warnings or errors reported by pyflakes for the file at C{path}.
@param path: The path to a Python file on disk that pyflakes will check.
@return: C{(count, log)}, where C{count} is the number of warnings or
errors generated, and log is a list of those warnings, presented
as structured data. See L{LoggingReporter} for more details.
"""
log = []
reporter = LoggingReporter(log)
count = checkPath(path, reporter)
return count, log
def test_legacyScript(self):
from pyflakes.scripts import pyflakes as script_pyflakes
self.assertIs(script_pyflakes.checkPath, checkPath)
def test_missingTrailingNewline(self):
"""
Source which doesn't end with a newline shouldn't cause any
exception to be raised nor an error indicator to be returned by
L{check}.
"""
fName = self.makeTempFile("def foo():\n\tpass\n\t")
self.assertHasErrors(fName, [])
def test_checkPathNonExisting(self):
"""
L{checkPath} handles non-existing files.
"""
count, errors = self.getErrors('extremo')
self.assertEqual(count, 1)
self.assertEqual(
errors,
[('unexpectedError', 'extremo', 'No such file or directory')])
def test_multilineSyntaxError(self):
"""
Source which includes a syntax error which results in the raised
L{SyntaxError.text} containing multiple lines of source are reported
with only the last line of that source.
"""
source = """\
def foo():
'''
def bar():
pass
def baz():
'''quux'''
"""
# Sanity check - SyntaxError.text should be multiple lines, if it
# isn't, something this test was unprepared for has happened.
def evaluate(source):
exec(source)
try:
evaluate(source)
except SyntaxError:
e = sys.exc_info()[1]
self.assertTrue(e.text.count('\n') > 1)
else:
self.fail()
sourcePath = self.makeTempFile(source)
self.assertHasErrors(
sourcePath,
["""\
%s:8:11: invalid syntax
'''quux'''
^
""" % (sourcePath,)])
def test_eofSyntaxError(self):
"""
The error reported for source files which end prematurely causing a
syntax error reflects the cause for the syntax error.
"""
sourcePath = self.makeTempFile("def foo(")
self.assertHasErrors(
sourcePath,
["""\
%s:1:9: unexpected EOF while parsing
def foo(
^
""" % (sourcePath,)])
def test_eofSyntaxErrorWithTab(self):
"""
The error reported for source files which end prematurely causing a
syntax error reflects the cause for the syntax error.
"""
sourcePath = self.makeTempFile("if True:\n\tfoo =")
self.assertHasErrors(
sourcePath,
["""\
%s:2:7: invalid syntax
\tfoo =
\t ^
""" % (sourcePath,)])
def test_nonDefaultFollowsDefaultSyntaxError(self):
"""
Source which has a non-default argument following a default argument
should include the line number of the syntax error. However these
exceptions do not include an offset.
"""
source = """\
def foo(bar=baz, bax):
pass
"""
sourcePath = self.makeTempFile(source)
last_line = ' ^\n' if sys.version_info >= (3, 2) else ''
column = '8:' if sys.version_info >= (3, 2) else ''
self.assertHasErrors(
sourcePath,
["""\
%s:1:%s non-default argument follows default argument
def foo(bar=baz, bax):
%s""" % (sourcePath, column, last_line)])
def test_nonKeywordAfterKeywordSyntaxError(self):
"""
Source which has a non-keyword argument after a keyword argument should
include the line number of the syntax error. However these exceptions
do not include an offset.
"""
source = """\
foo(bar=baz, bax)
"""
sourcePath = self.makeTempFile(source)
last_line = ' ^\n' if sys.version_info >= (3, 2) else ''
column = '13:' if sys.version_info >= (3, 2) else ''
if sys.version_info >= (3, 5):
message = 'positional argument follows keyword argument'
else:
message = 'non-keyword arg after keyword arg'
self.assertHasErrors(
sourcePath,
["""\
%s:1:%s %s
foo(bar=baz, bax)
%s""" % (sourcePath, column, message, last_line)])
def test_invalidEscape(self):
"""
The invalid escape syntax raises ValueError in Python 2
"""
ver = sys.version_info
# ValueError: invalid \x escape
sourcePath = self.makeTempFile(r"foo = '\xyz'")
if ver < (3,):
decoding_error = "%s: problem decoding source\n" % (sourcePath,)
else:
last_line = ' ^\n' if ver >= (3, 2) else ''
# Column has been "fixed" since 3.2.4 and 3.3.1
col = 1 if ver >= (3, 3, 1) or ((3, 2, 4) <= ver < (3, 3)) else 2
decoding_error = """\
%s:1:7: (unicode error) 'unicodeescape' codec can't decode bytes \
in position 0-%d: truncated \\xXX escape
foo = '\\xyz'
%s""" % (sourcePath, col, last_line)
self.assertHasErrors(
sourcePath, [decoding_error])
@skipIf(sys.platform == 'win32', 'unsupported on Windows')
def test_permissionDenied(self):
"""
If the source file is not readable, this is reported on standard
error.
"""
sourcePath = self.makeTempFile('')
os.chmod(sourcePath, 0)
count, errors = self.getErrors(sourcePath)
self.assertEqual(count, 1)
self.assertEqual(
errors,
[('unexpectedError', sourcePath, "Permission denied")])
def test_pyflakesWarning(self):
"""
If the source file has a pyflakes warning, this is reported as a
'flake'.
"""
sourcePath = self.makeTempFile("import foo")
count, errors = self.getErrors(sourcePath)
self.assertEqual(count, 1)
self.assertEqual(
errors, [('flake', str(UnusedImport(sourcePath, Node(1), 'foo')))])
def test_encodedFileUTF8(self):
"""
If source file declares the correct encoding, no error is reported.
"""
SNOWMAN = unichr(0x2603)
source = ("""\
# coding: utf-8
x = "%s"
""" % SNOWMAN).encode('utf-8')
sourcePath = self.makeTempFile(source)
self.assertHasErrors(sourcePath, [])
def test_CRLFLineEndings(self):
"""
Source files with Windows CR LF line endings are parsed successfully.
"""
sourcePath = self.makeTempFile("x = 42\r\n")
self.assertHasErrors(sourcePath, [])
def test_misencodedFileUTF8(self):
"""
If a source file contains bytes which cannot be decoded, this is
reported on stderr.
"""
SNOWMAN = unichr(0x2603)
source = ("""\
# coding: ascii
x = "%s"
""" % SNOWMAN).encode('utf-8')
sourcePath = self.makeTempFile(source)
self.assertHasErrors(
sourcePath, ["%s: problem decoding source\n" % (sourcePath,)])
def test_misencodedFileUTF16(self):
"""
If a source file contains bytes which cannot be decoded, this is
reported on stderr.
"""
SNOWMAN = unichr(0x2603)
source = ("""\
# coding: ascii
x = "%s"
""" % SNOWMAN).encode('utf-16')
sourcePath = self.makeTempFile(source)
self.assertHasErrors(
sourcePath, ["%s: problem decoding source\n" % (sourcePath,)])
def test_checkRecursive(self):
"""
L{checkRecursive} descends into each directory, finding Python files
and reporting problems.
"""
tempdir = tempfile.mkdtemp()
os.mkdir(os.path.join(tempdir, 'foo'))
file1 = os.path.join(tempdir, 'foo', 'bar.py')
fd = open(file1, 'wb')
fd.write("import baz\n".encode('ascii'))
fd.close()
file2 = os.path.join(tempdir, 'baz.py')
fd = open(file2, 'wb')
fd.write("import contraband".encode('ascii'))
fd.close()
log = []
reporter = LoggingReporter(log)
warnings = checkRecursive([tempdir], reporter)
self.assertEqual(warnings, 2)
self.assertEqual(
sorted(log),
sorted([('flake', str(UnusedImport(file1, Node(1), 'baz'))),
('flake',
str(UnusedImport(file2, Node(1), 'contraband')))]))
class IntegrationTests(TestCase):
"""
Tests of the pyflakes script that actually spawn the script.
"""
def setUp(self):
self.tempdir = tempfile.mkdtemp()
self.tempfilepath = os.path.join(self.tempdir, 'temp')
def tearDown(self):
shutil.rmtree(self.tempdir)
def getPyflakesBinary(self):
"""
Return the path to the pyflakes binary.
"""
import pyflakes
package_dir = os.path.dirname(pyflakes.__file__)
return os.path.join(package_dir, '..', 'bin', 'pyflakes')
def runPyflakes(self, paths, stdin=None):
"""
Launch a subprocess running C{pyflakes}.
@param args: Command-line arguments to pass to pyflakes.
@param kwargs: Options passed on to C{subprocess.Popen}.
@return: C{(returncode, stdout, stderr)} of the completed pyflakes
process.
"""
env = dict(os.environ)
env['PYTHONPATH'] = os.pathsep.join(sys.path)
command = [sys.executable, self.getPyflakesBinary()]
command.extend(paths)
if stdin:
p = subprocess.Popen(command, env=env, stdin=subprocess.PIPE,
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
(stdout, stderr) = p.communicate(stdin)
else:
p = subprocess.Popen(command, env=env,
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
(stdout, stderr) = p.communicate()
rv = p.wait()
if sys.version_info >= (3,):
stdout = stdout.decode('utf-8')
stderr = stderr.decode('utf-8')
return (stdout, stderr, rv)
def test_goodFile(self):
"""
When a Python source file is all good, the return code is zero and no
messages are printed to either stdout or stderr.
"""
fd = open(self.tempfilepath, 'a')
fd.close()
d = self.runPyflakes([self.tempfilepath])
self.assertEqual(d, ('', '', 0))
def test_fileWithFlakes(self):
"""
When a Python source file has warnings, the return code is non-zero
and the warnings are printed to stdout.
"""
fd = open(self.tempfilepath, 'wb')
fd.write("import contraband\n".encode('ascii'))
fd.close()
d = self.runPyflakes([self.tempfilepath])
expected = UnusedImport(self.tempfilepath, Node(1), 'contraband')
self.assertEqual(d, ("%s%s" % (expected, os.linesep), '', 1))
def test_errors(self):
"""
When pyflakes finds errors with the files it's given, (if they don't
exist, say), then the return code is non-zero and the errors are
printed to stderr.
"""
d = self.runPyflakes([self.tempfilepath])
error_msg = '%s: No such file or directory%s' % (self.tempfilepath,
os.linesep)
self.assertEqual(d, ('', error_msg, 1))
def test_readFromStdin(self):
"""
If no arguments are passed to C{pyflakes} then it reads from stdin.
"""
d = self.runPyflakes([], stdin='import contraband'.encode('ascii'))
expected = UnusedImport('<stdin>', Node(1), 'contraband')
self.assertEqual(d, ("%s%s" % (expected, os.linesep), '', 1))
|
rezasafi/spark
|
refs/heads/master
|
examples/src/main/python/mllib/multi_class_metrics_example.py
|
55
|
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# $example on$
from pyspark.mllib.classification import LogisticRegressionWithLBFGS
from pyspark.mllib.util import MLUtils
from pyspark.mllib.evaluation import MulticlassMetrics
# $example off$
from pyspark import SparkContext
if __name__ == "__main__":
sc = SparkContext(appName="MultiClassMetricsExample")
# Several of the methods available in scala are currently missing from pyspark
# $example on$
# Load training data in LIBSVM format
data = MLUtils.loadLibSVMFile(sc, "data/mllib/sample_multiclass_classification_data.txt")
# Split data into training (60%) and test (40%)
training, test = data.randomSplit([0.6, 0.4], seed=11)
training.cache()
# Run training algorithm to build the model
model = LogisticRegressionWithLBFGS.train(training, numClasses=3)
# Compute raw scores on the test set
predictionAndLabels = test.map(lambda lp: (float(model.predict(lp.features)), lp.label))
# Instantiate metrics object
metrics = MulticlassMetrics(predictionAndLabels)
# Overall statistics
precision = metrics.precision(1.0)
recall = metrics.recall(1.0)
f1Score = metrics.fMeasure(1.0)
print("Summary Stats")
print("Precision = %s" % precision)
print("Recall = %s" % recall)
print("F1 Score = %s" % f1Score)
# Statistics by class
labels = data.map(lambda lp: lp.label).distinct().collect()
for label in sorted(labels):
print("Class %s precision = %s" % (label, metrics.precision(label)))
print("Class %s recall = %s" % (label, metrics.recall(label)))
print("Class %s F1 Measure = %s" % (label, metrics.fMeasure(label, beta=1.0)))
# Weighted stats
print("Weighted recall = %s" % metrics.weightedRecall)
print("Weighted precision = %s" % metrics.weightedPrecision)
print("Weighted F(1) Score = %s" % metrics.weightedFMeasure())
print("Weighted F(0.5) Score = %s" % metrics.weightedFMeasure(beta=0.5))
print("Weighted false positive rate = %s" % metrics.weightedFalsePositiveRate)
# $example off$
|
kblok/TelegramBotFriend
|
refs/heads/master
|
markovify_provider.py
|
1
|
"""Text provider based on Marcovify"""
import re
import markovify
import nltk
class MarkovifyProvider(object):
"""Text provider based on Marcovify"""
def __init__(self, language, text_provider):
self.text_provider = text_provider
self.load()
self.language = language
def load(self):
"""Parses the text file in self.source_text and loads the model with markofy"""
self.model = markovify.NewlineText(self.text_provider.get_text())
def is_stop_word(self, word):
"""Checks whether a word is an stop word or not in the language specified in the class"""
return word in nltk.corpus.stopwords.words(self.language)
def get_message(self, text):
"""
Returns a markofy text based on the text argument
It will try to generate a message based on the words in the text
If it's not able to do that it'll just create a random text
"""
response = None
regex_wrods = re.compile(r"(\w+)")
words = regex_wrods.findall(text)
for index in range(len(words)-1, -1, -1):
try:
if not self.is_stop_word(words[index]): #It should be some word
response = self.model.make_sentence_with_start(words[index], tries=10).lower()
break
except Exception:
pass
if response is None:
response = self.model.make_short_sentence(80, tries=10).lower()
print response
return response
|
defionscode/ansible
|
refs/heads/devel
|
lib/ansible/modules/network/cloudengine/ce_ospf_vrf.py
|
43
|
#!/usr/bin/python
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: ce_ospf_vrf
version_added: "2.4"
short_description: Manages configuration of an OSPF VPN instance on HUAWEI CloudEngine switches.
description:
- Manages configuration of an OSPF VPN instance on HUAWEI CloudEngine switches.
author: Yang yang (@CloudEngine Ansible)
options:
ospf:
description:
- The ID of the ospf process.
Valid values are an integer, 1 - 4294967295, the default value is 1.
required: true
route_id:
description:
- Specifies the ospf private route id,.
Valid values are a string, formatted as an IP address
(i.e. "10.1.1.1") the length is 0 - 20.
vrf:
description:
- Specifies the vpn instance which use ospf,length is 1 - 31.
Valid values are a string.
default: _public_
description:
description:
- Specifies the description information of ospf process.
bandwidth:
description:
- Specifies the reference bandwidth used to assign ospf cost.
Valid values are an integer, in Mbps, 1 - 2147483648, the default value is 100.
lsaalflag:
description:
- Specifies the mode of timer to calculate interval of arrive LSA.
If set the parameter but not specifies value, the default will be used.
If true use general timer.
If false use intelligent timer.
type: bool
default: 'no'
lsaainterval:
description:
- Specifies the interval of arrive LSA when use the general timer.
Valid value is an integer, in millisecond, from 0 to 10000.
lsaamaxinterval:
description:
- Specifies the max interval of arrive LSA when use the intelligent timer.
Valid value is an integer, in millisecond, from 0 to 10000, the default value is 1000.
lsaastartinterval:
description:
- Specifies the start interval of arrive LSA when use the intelligent timer.
Valid value is an integer, in millisecond, from 0 to 10000, the default value is 500.
lsaaholdinterval:
description:
- Specifies the hold interval of arrive LSA when use the intelligent timer.
Valid value is an integer, in millisecond, from 0 to 10000, the default value is 500.
lsaointervalflag:
description:
- Specifies whether cancel the interval of LSA originate or not.
If set the parameter but noe specifies value, the default will be used.
true:cancel the interval of LSA originate, the interval is 0.
false:do not cancel the interval of LSA originate.
type: bool
default: 'no'
lsaointerval:
description:
- Specifies the interval of originate LSA .
Valid value is an integer, in second, from 0 to 10, the default value is 5.
lsaomaxinterval:
description:
- Specifies the max interval of originate LSA .
Valid value is an integer, in millisecond, from 1 to 10000, the default value is 5000.
lsaostartinterval:
description:
- Specifies the start interval of originate LSA .
Valid value is an integer, in millisecond, from 0 to 1000, the default value is 500.
lsaoholdinterval:
description:
- Specifies the hold interval of originate LSA .
Valid value is an integer, in millisecond, from 0 to 5000, the default value is 1000.
spfintervaltype:
description:
- Specifies the mode of timer which used to calculate SPF.
If set the parameter but noe specifies value, the default will be used.
If is intelligent-timer, then use intelligent timer.
If is timer, then use second level timer.
If is millisecond, then use millisecond level timer.
choices: ['intelligent-timer','timer','millisecond']
default: intelligent-timer
spfinterval:
description:
- Specifies the interval to calculate SPF when use second level timer.
Valid value is an integer, in second, from 1 to 10.
spfintervalmi:
description:
- Specifies the interval to calculate SPF when use millisecond level timer.
Valid value is an integer, in millisecond, from 1 to 10000.
spfmaxinterval:
description:
- Specifies the max interval to calculate SPF when use intelligent timer.
Valid value is an integer, in millisecond, from 1 to 20000, the default value is 5000.
spfstartinterval:
description:
- Specifies the start interval to calculate SPF when use intelligent timer.
Valid value is an integer, in millisecond, from 1 to 1000, the default value is 50.
spfholdinterval:
description:
- Specifies the hold interval to calculate SPF when use intelligent timer.
Valid value is an integer, in millisecond, from 1 to 5000, the default value is 200.
state:
description:
- Specify desired state of the resource.
choices: ['present', 'absent']
default: present
'''
EXAMPLES = '''
- name: ospf vrf module test
hosts: cloudengine
connection: local
gather_facts: no
vars:
cli:
host: "{{ inventory_hostname }}"
port: "{{ ansible_ssh_port }}"
username: "{{ username }}"
password: "{{ password }}"
transport: cli
tasks:
- name: Configure ospf route id
ce_ospf_vrf:
ospf: 2
route_id: 2.2.2.2
lsaointervalflag: False
lsaointerval: 2
provider: "{{ cli }}"
'''
RETURN = '''
proposed:
description: k/v pairs of parameters passed into module
returned: verbose mode
type: dict
sample: {
"bandwidth": "100",
"description": null,
"lsaaholdinterval": "500",
"lsaainterval": null,
"lsaamaxinterval": "1000",
"lsaastartinterval": "500",
"lsaalflag": "False",
"lsaoholdinterval": "1000",
"lsaointerval": "2",
"lsaointervalflag": "False",
"lsaomaxinterval": "5000",
"lsaostartinterval": "500",
"process_id": "2",
"route_id": "2.2.2.2",
"spfholdinterval": "1000",
"spfinterval": null,
"spfintervalmi": null,
"spfintervaltype": "intelligent-timer",
"spfmaxinterval": "10000",
"spfstartinterval": "500",
"vrf": "_public_"
}
existing:
description: k/v pairs of existing configuration
returned: verbose mode
type: dict
sample: {
"bandwidthReference": "100",
"description": null,
"lsaArrivalFlag": "false",
"lsaArrivalHoldInterval": "500",
"lsaArrivalInterval": null,
"lsaArrivalMaxInterval": "1000",
"lsaArrivalStartInterval": "500",
"lsaOriginateHoldInterval": "1000",
"lsaOriginateInterval": "2",
"lsaOriginateIntervalFlag": "false",
"lsaOriginateMaxInterval": "5000",
"lsaOriginateStartInterval": "500",
"processId": "2",
"routerId": "2.2.2.2",
"spfScheduleHoldInterval": "1000",
"spfScheduleInterval": null,
"spfScheduleIntervalMillisecond": null,
"spfScheduleIntervalType": "intelligent-timer",
"spfScheduleMaxInterval": "10000",
"spfScheduleStartInterval": "500",
"vrfName": "_public_"
}
end_state:
description: k/v pairs of configuration after module execution
returned: verbose mode
type: dict
sample: {
"bandwidthReference": "100",
"description": null,
"lsaArrivalFlag": "false",
"lsaArrivalHoldInterval": "500",
"lsaArrivalInterval": null,
"lsaArrivalMaxInterval": "1000",
"lsaArrivalStartInterval": "500",
"lsaOriginateHoldInterval": "1000",
"lsaOriginateInterval": "2",
"lsaOriginateIntervalFlag": "false",
"lsaOriginateMaxInterval": "5000",
"lsaOriginateStartInterval": "500",
"processId": "2",
"routerId": "2.2.2.2",
"spfScheduleHoldInterval": "1000",
"spfScheduleInterval": null,
"spfScheduleIntervalMillisecond": null,
"spfScheduleIntervalType": "intelligent-timer",
"spfScheduleMaxInterval": "10000",
"spfScheduleStartInterval": "500",
"vrfName": "_public_"
}
updates:
description: commands sent to the device
returned: always
type: list
sample: ["ospf 2"]
changed:
description: check to see if a change was made on the device
returned: always
type: boolean
sample: False
'''
from xml.etree import ElementTree
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.network.cloudengine.ce import get_nc_config, set_nc_config, ce_argument_spec
CE_NC_GET_OSPF_VRF = """
<filter type="subtree">
<ospfv2 xmlns="http://www.huawei.com/netconf/vrp" content-version="1.0" format-version="1.0">
<ospfv2comm>
<ospfSites>
<ospfSite>
<processId></processId>
<routerId></routerId>
<vrfName></vrfName>
<description></description>
<bandwidthReference></bandwidthReference>
<lsaArrivalFlag></lsaArrivalFlag>
<lsaArrivalInterval></lsaArrivalInterval>
<lsaArrivalMaxInterval></lsaArrivalMaxInterval>
<lsaArrivalStartInterval></lsaArrivalStartInterval>
<lsaArrivalHoldInterval></lsaArrivalHoldInterval>
<lsaOriginateIntervalFlag></lsaOriginateIntervalFlag>
<lsaOriginateInterval></lsaOriginateInterval>
<lsaOriginateMaxInterval></lsaOriginateMaxInterval>
<lsaOriginateStartInterval></lsaOriginateStartInterval>
<lsaOriginateHoldInterval></lsaOriginateHoldInterval>
<spfScheduleIntervalType></spfScheduleIntervalType>
<spfScheduleInterval></spfScheduleInterval>
<spfScheduleIntervalMillisecond></spfScheduleIntervalMillisecond>
<spfScheduleMaxInterval></spfScheduleMaxInterval>
<spfScheduleStartInterval></spfScheduleStartInterval>
<spfScheduleHoldInterval></spfScheduleHoldInterval>
</ospfSite>
</ospfSites>
</ospfv2comm>
</ospfv2>
</filter>
"""
CE_NC_CREATE_OSPF_VRF = """
<ospfv2 xmlns="http://www.huawei.com/netconf/vrp" content-version="1.0" format-version="1.0">
<ospfv2comm>
<ospfSites>
<ospfSite operation="merge">
<processId>%s</processId>
%s
<vrfName>%s</vrfName>
<description>%s</description>
<bandwidthReference>%s</bandwidthReference>
<lsaArrivalFlag>%s</lsaArrivalFlag>
<lsaArrivalInterval>%s</lsaArrivalInterval>
<lsaArrivalMaxInterval>%s</lsaArrivalMaxInterval>
<lsaArrivalStartInterval>%s</lsaArrivalStartInterval>
<lsaArrivalHoldInterval>%s</lsaArrivalHoldInterval>
<lsaOriginateIntervalFlag>%s</lsaOriginateIntervalFlag>
<lsaOriginateInterval>%s</lsaOriginateInterval>
<lsaOriginateMaxInterval>%s</lsaOriginateMaxInterval>
<lsaOriginateStartInterval>%s</lsaOriginateStartInterval>
<lsaOriginateHoldInterval>%s</lsaOriginateHoldInterval>
<spfScheduleIntervalType>%s</spfScheduleIntervalType>
<spfScheduleInterval>%s</spfScheduleInterval>
<spfScheduleIntervalMillisecond>%s</spfScheduleIntervalMillisecond>
<spfScheduleMaxInterval>%s</spfScheduleMaxInterval>
<spfScheduleStartInterval>%s</spfScheduleStartInterval>
<spfScheduleHoldInterval>%s</spfScheduleHoldInterval>
</ospfSite>
</ospfSites>
</ospfv2comm>
</ospfv2>
"""
CE_NC_CREATE_ROUTE_ID = """
<routerId>%s</routerId>
"""
CE_NC_DELETE_OSPF = """
<ospfv2 xmlns="http://www.huawei.com/netconf/vrp" content-version="1.0" format-version="1.0">
<ospfv2comm>
<ospfSites>
<ospfSite operation="delete">
<processId>%s</processId>
<routerId>%s</routerId>
<vrfName>%s</vrfName>
</ospfSite>
</ospfSites>
</ospfv2comm>
</ospfv2>
"""
def build_config_xml(xmlstr):
"""build_config_xml"""
return '<config> ' + xmlstr + ' </config>'
class OspfVrf(object):
"""
Manages configuration of an ospf instance.
"""
def __init__(self, argument_spec):
self.spec = argument_spec
self.module = None
self.init_module()
# module input info
self.ospf = self.module.params['ospf']
self.route_id = self.module.params['route_id']
self.vrf = self.module.params['vrf']
self.description = self.module.params['description']
self.bandwidth = self.module.params['bandwidth']
self.lsaalflag = self.module.params['lsaalflag']
self.lsaainterval = self.module.params['lsaainterval']
self.lsaamaxinterval = self.module.params['lsaamaxinterval']
self.lsaastartinterval = self.module.params['lsaastartinterval']
self.lsaaholdinterval = self.module.params['lsaaholdinterval']
self.lsaointervalflag = self.module.params['lsaointervalflag']
self.lsaointerval = self.module.params['lsaointerval']
self.lsaomaxinterval = self.module.params['lsaomaxinterval']
self.lsaostartinterval = self.module.params['lsaostartinterval']
self.lsaoholdinterval = self.module.params['lsaoholdinterval']
self.spfintervaltype = self.module.params['spfintervaltype']
self.spfinterval = self.module.params['spfinterval']
self.spfintervalmi = self.module.params['spfintervalmi']
self.spfmaxinterval = self.module.params['spfmaxinterval']
self.spfstartinterval = self.module.params['spfstartinterval']
self.spfholdinterval = self.module.params['spfholdinterval']
self.state = self.module.params['state']
# ospf info
self.ospf_info = dict()
# state
self.changed = False
self.updates_cmd = list()
self.results = dict()
self.proposed = dict()
self.existing = dict()
self.end_state = dict()
self.lsa_arrival_changed = False
self.lsa_originate_changed = False
self.spf_changed = False
self.route_id_changed = False
self.bandwidth_changed = False
self.description_changed = False
self.vrf_changed = False
def init_module(self):
"""" init module """
self.module = AnsibleModule(
argument_spec=self.spec, supports_check_mode=True)
def check_response(self, xml_str, xml_name):
"""Check if response message is already succeed."""
if "<ok/>" not in xml_str:
self.module.fail_json(msg='Error: %s failed.' % xml_name)
def is_valid_ospf_process_id(self):
"""check whether the input ospf process id is valid"""
if not self.ospf.isdigit():
return False
if int(self.ospf) > 4294967295 or int(self.ospf) < 1:
return False
return True
def is_valid_ospf_route_id(self):
"""check is ipv4 addr is valid"""
if self.route_id.find('.') != -1:
addr_list = self.route_id.split('.')
if len(addr_list) != 4:
return False
for each_num in addr_list:
if not each_num.isdigit():
return False
if int(each_num) > 255:
return False
return True
return False
def is_valid_vrf_name(self):
"""check whether the input ospf vrf name is valid"""
if len(self.vrf) > 31 or len(self.vrf) < 1:
return False
if self.vrf.find('?') != -1:
return False
if self.vrf.find(' ') != -1:
return False
return True
def is_valid_description(self):
"""check whether the input ospf description is valid"""
if len(self.description) > 80 or len(self.description) < 1:
return False
if self.description.find('?') != -1:
return False
return True
def is_valid_bandwidth(self):
"""check whether the input ospf bandwidth reference is valid"""
if not self.bandwidth.isdigit():
return False
if int(self.bandwidth) > 2147483648 or int(self.bandwidth) < 1:
return False
return True
def is_valid_lsa_arrival_interval(self):
"""check whether the input ospf lsa arrival interval is valid"""
if self.lsaainterval is None:
return False
if not self.lsaainterval.isdigit():
return False
if int(self.lsaainterval) > 10000 or int(self.lsaainterval) < 0:
return False
return True
def isvalidlsamaxarrivalinterval(self):
"""check whether the input ospf lsa max arrival interval is valid"""
if not self.lsaamaxinterval.isdigit():
return False
if int(self.lsaamaxinterval) > 10000 or int(self.lsaamaxinterval) < 1:
return False
return True
def isvalidlsastartarrivalinterval(self):
"""check whether the input ospf lsa start arrival interval is valid"""
if not self.lsaastartinterval.isdigit():
return False
if int(self.lsaastartinterval) > 1000 or int(self.lsaastartinterval) < 0:
return False
return True
def isvalidlsaholdarrivalinterval(self):
"""check whether the input ospf lsa hold arrival interval is valid"""
if not self.lsaaholdinterval.isdigit():
return False
if int(self.lsaaholdinterval) > 5000 or int(self.lsaaholdinterval) < 0:
return False
return True
def is_valid_lsa_originate_interval(self):
"""check whether the input ospf lsa originate interval is valid"""
if not self.lsaointerval.isdigit():
return False
if int(self.lsaointerval) > 10 or int(self.lsaointerval) < 0:
return False
return True
def isvalidlsaoriginatemaxinterval(self):
"""check whether the input ospf lsa originate max interval is valid"""
if not self.lsaomaxinterval.isdigit():
return False
if int(self.lsaomaxinterval) > 10000 or int(self.lsaomaxinterval) < 1:
return False
return True
def isvalidlsaostartinterval(self):
"""check whether the input ospf lsa originate start interval is valid"""
if not self.lsaostartinterval.isdigit():
return False
if int(self.lsaostartinterval) > 1000 or int(self.lsaostartinterval) < 0:
return False
return True
def isvalidlsaoholdinterval(self):
"""check whether the input ospf lsa originate hold interval is valid"""
if not self.lsaoholdinterval.isdigit():
return False
if int(self.lsaoholdinterval) > 5000 or int(self.lsaoholdinterval) < 1:
return False
return True
def is_valid_spf_interval(self):
"""check whether the input ospf spf interval is valid"""
if not self.spfinterval.isdigit():
return False
if int(self.spfinterval) > 10 or int(self.spfinterval) < 1:
return False
return True
def is_valid_spf_milli_interval(self):
"""check whether the input ospf spf millisecond level interval is valid"""
if not self.spfintervalmi.isdigit():
return False
if int(self.spfintervalmi) > 10000 or int(self.spfintervalmi) < 1:
return False
return True
def is_valid_spf_max_interval(self):
"""check whether the input ospf spf intelligent timer max interval is valid"""
if not self.spfmaxinterval.isdigit():
return False
if int(self.spfmaxinterval) > 20000 or int(self.spfmaxinterval) < 1:
return False
return True
def is_valid_spf_start_interval(self):
"""check whether the input ospf spf intelligent timer start interval is valid"""
if not self.spfstartinterval.isdigit():
return False
if int(self.spfstartinterval) > 1000 or int(self.spfstartinterval) < 1:
return False
return True
def is_valid_spf_hold_interval(self):
"""check whether the input ospf spf intelligent timer hold interval is valid"""
if not self.spfholdinterval.isdigit():
return False
if int(self.spfholdinterval) > 5000 or int(self.spfholdinterval) < 1:
return False
return True
def is_route_id_exist(self):
"""is route id exist"""
if not self.ospf_info:
return False
for ospf_site in self.ospf_info["ospfsite"]:
if ospf_site["processId"] != self.ospf:
continue
if ospf_site["routerId"] == self.route_id:
return True
else:
continue
return False
def get_exist_ospf_id(self):
"""get exist ospf process id"""
if not self.ospf_info:
return None
for ospf_site in self.ospf_info["ospfsite"]:
if ospf_site["processId"] == self.ospf:
return ospf_site["processId"]
else:
continue
return None
def get_exist_route(self):
"""get exist route id"""
if not self.ospf_info:
return None
for ospf_site in self.ospf_info["ospfsite"]:
if ospf_site["processId"] == self.ospf:
return ospf_site["routerId"]
else:
continue
return None
def get_exist_vrf(self):
"""get exist vrf"""
if not self.ospf_info:
return None
for ospf_site in self.ospf_info["ospfsite"]:
if ospf_site["processId"] == self.ospf:
return ospf_site["vrfName"]
else:
continue
return None
def get_exist_bandwidth(self):
"""get exist bandwidth"""
if not self.ospf_info:
return None
for ospf_site in self.ospf_info["ospfsite"]:
if ospf_site["processId"] == self.ospf:
return ospf_site["bandwidthReference"]
else:
continue
return None
def get_exist_lsa_a_interval(self):
"""get exist lsa arrival interval"""
if not self.ospf_info:
return None
for ospf_site in self.ospf_info["ospfsite"]:
if ospf_site["processId"] == self.ospf:
return ospf_site["lsaArrivalInterval"]
else:
continue
return None
def get_exist_lsa_a_interval_flag(self):
"""get exist lsa arrival interval flag"""
if not self.ospf_info:
return None
for ospf_site in self.ospf_info["ospfsite"]:
if ospf_site["processId"] == self.ospf:
return ospf_site["lsaArrivalFlag"]
else:
continue
return None
def get_exist_lsa_a_max_interval(self):
"""get exist lsa arrival max interval"""
if not self.ospf_info:
return None
for ospf_site in self.ospf_info["ospfsite"]:
if ospf_site["processId"] == self.ospf:
return ospf_site["lsaArrivalMaxInterval"]
else:
continue
return None
def get_exist_lsa_a_start_interval(self):
"""get exist lsa arrival start interval"""
if not self.ospf_info:
return None
for ospf_site in self.ospf_info["ospfsite"]:
if ospf_site["processId"] == self.ospf:
return ospf_site["lsaArrivalStartInterval"]
else:
continue
return None
def get_exist_lsa_a_hold_interval(self):
"""get exist lsa arrival hold interval"""
if not self.ospf_info:
return None
for ospf_site in self.ospf_info["ospfsite"]:
if ospf_site["processId"] == self.ospf:
return ospf_site["lsaArrivalHoldInterval"]
else:
continue
return None
def getexistlsaointerval(self):
"""get exist lsa originate interval"""
if not self.ospf_info:
return None
for ospf_site in self.ospf_info["ospfsite"]:
if ospf_site["processId"] == self.ospf:
return ospf_site["lsaOriginateInterval"]
else:
continue
return None
def getexistlsaointerval_flag(self):
"""get exist lsa originate interval flag"""
if not self.ospf_info:
return None
for ospf_site in self.ospf_info["ospfsite"]:
if ospf_site["processId"] == self.ospf:
return ospf_site["lsaOriginateIntervalFlag"]
else:
continue
return None
def getexistlsaomaxinterval(self):
"""get exist lsa originate max interval"""
if not self.ospf_info:
return None
for ospf_site in self.ospf_info["ospfsite"]:
if ospf_site["processId"] == self.ospf:
return ospf_site["lsaOriginateMaxInterval"]
else:
continue
return None
def getexistlsaostartinterval(self):
"""get exist lsa originate start interval"""
if not self.ospf_info:
return None
for ospf_site in self.ospf_info["ospfsite"]:
if ospf_site["processId"] == self.ospf:
return ospf_site["lsaOriginateStartInterval"]
else:
continue
return None
def getexistlsaoholdinterval(self):
"""get exist lsa originate hold interval"""
if not self.ospf_info:
return None
for ospf_site in self.ospf_info["ospfsite"]:
if ospf_site["processId"] == self.ospf:
return ospf_site["lsaOriginateHoldInterval"]
else:
continue
return None
def get_exist_spf_interval(self):
"""get exist spf second level timer interval"""
if not self.ospf_info:
return None
for ospf_site in self.ospf_info["ospfsite"]:
if ospf_site["processId"] == self.ospf:
return ospf_site["spfScheduleInterval"]
else:
continue
return None
def get_exist_spf_milli_interval(self):
"""get exist spf millisecond level timer interval"""
if not self.ospf_info:
return None
for ospf_site in self.ospf_info["ospfsite"]:
if ospf_site["processId"] == self.ospf:
return ospf_site["spfScheduleIntervalMillisecond"]
else:
continue
return None
def get_exist_spf_max_interval(self):
"""get exist spf max interval"""
if not self.ospf_info:
return None
for ospf_site in self.ospf_info["ospfsite"]:
if ospf_site["processId"] == self.ospf:
return ospf_site["spfScheduleMaxInterval"]
else:
continue
return None
def get_exist_spf_start_interval(self):
"""get exist spf start interval"""
if not self.ospf_info:
return None
for ospf_site in self.ospf_info["ospfsite"]:
if ospf_site["processId"] == self.ospf:
return ospf_site["spfScheduleStartInterval"]
else:
continue
return None
def get_exist_spf_hold_interval(self):
"""get exist spf hold interval"""
if not self.ospf_info:
return None
for ospf_site in self.ospf_info["ospfsite"]:
if ospf_site["processId"] == self.ospf:
return ospf_site["spfScheduleHoldInterval"]
else:
continue
return None
def get_exist_spf_interval_type(self):
"""get exist spf hold interval"""
if not self.ospf_info:
return None
for ospf_site in self.ospf_info["ospfsite"]:
if ospf_site["processId"] == self.ospf:
return ospf_site["spfScheduleIntervalType"]
else:
continue
return None
def is_ospf_exist(self):
"""is ospf exist"""
if not self.ospf_info:
return False
for ospf_site in self.ospf_info["ospfsite"]:
if ospf_site["processId"] == self.ospf:
return True
else:
continue
return False
def get_exist_description(self):
"""is description exist"""
if not self.ospf_info:
return None
for ospf_site in self.ospf_info["ospfsite"]:
if ospf_site["processId"] == self.ospf:
return ospf_site["description"]
else:
continue
return None
def check_params(self):
"""Check all input params"""
if self.ospf == '':
self.module.fail_json(
msg='Error: The ospf process id should not be null.')
if self.ospf:
if not self.is_valid_ospf_process_id():
self.module.fail_json(
msg='Error: The ospf process id should between 1 - 4294967295.')
if self.route_id == '':
self.module.fail_json(
msg='Error: The ospf route id length should not be null.')
if self.route_id:
if not self.is_valid_ospf_route_id():
self.module.fail_json(
msg='Error: The ospf route id length should between 0 - 20,i.e.10.1.1.1.')
if self.vrf == '':
self.module.fail_json(
msg='Error: The ospf vpn instance length should not be null.')
if self.vrf:
if not self.is_valid_vrf_name():
self.module.fail_json(
msg='Error: The ospf vpn instance length should between 0 - 31,but can not contain " " or "?".')
if self.description == '':
self.module.fail_json(
msg='Error: The ospf description should not be null.')
if self.description:
if not self.is_valid_description():
self.module.fail_json(
msg='Error: The ospf description length should between 1 - 80,but can not contain "?".')
if self.bandwidth == '':
self.module.fail_json(
msg='Error: The ospf bandwidth reference should not be null.')
if self.bandwidth:
if not self.is_valid_bandwidth():
self.module.fail_json(
msg='Error: The ospf bandwidth reference should between 1 - 2147483648.')
if self.lsaalflag is True:
if not self.is_valid_lsa_arrival_interval():
self.module.fail_json(
msg='Error: The ospf lsa arrival interval should between 0 - 10000.')
if self.lsaamaxinterval or self.lsaastartinterval or self.lsaaholdinterval:
self.module.fail_json(
msg='Error: Non-Intelligent Timer and Intelligent Timer Interval of '
'lsa-arrival-interval can not configured at the same time.')
if self.lsaalflag is False:
if self.lsaainterval:
self.module.fail_json(
msg='Error: The parameter of lsa arrival interval command is invalid, '
'because LSA arrival interval can not be config when the LSA arrival flag is not set.')
if self.lsaamaxinterval == '' or self.lsaastartinterval == '' or self.lsaaholdinterval == '':
self.module.fail_json(
msg='Error: The ospf lsa arrival intervals should not be null.')
if self.lsaamaxinterval:
if not self.isvalidlsamaxarrivalinterval():
self.module.fail_json(
msg='Error: The ospf lsa arrival max interval should between 1 - 10000.')
if self.lsaastartinterval:
if not self.isvalidlsastartarrivalinterval():
self.module.fail_json(
msg='Error: The ospf lsa arrival start interval should between 1 - 1000.')
if self.lsaaholdinterval:
if not self.isvalidlsaholdarrivalinterval():
self.module.fail_json(
msg='Error: The ospf lsa arrival hold interval should between 1 - 5000.')
if self.lsaointervalflag is True:
if self.lsaointerval or self.lsaomaxinterval \
or self.lsaostartinterval or self.lsaoholdinterval:
self.module.fail_json(
msg='Error: Interval for other-type and Instantly Flag '
'of lsa-originate-interval can not configured at the same time.')
if self.lsaointerval == '':
self.module.fail_json(
msg='Error: The ospf lsa originate interval should should not be null.')
if self.lsaointerval:
if not self.is_valid_lsa_originate_interval():
self.module.fail_json(
msg='Error: The ospf lsa originate interval should between 0 - 10 s.')
if self.lsaomaxinterval == '' or self.lsaostartinterval == '' or self.lsaoholdinterval == '':
self.module.fail_json(
msg='Error: The ospf lsa originate intelligent intervals should should not be null.')
if self.lsaomaxinterval:
if not self.isvalidlsaoriginatemaxinterval():
self.module.fail_json(
msg='Error: The ospf lsa originate max interval should between 1 - 10000 ms.')
if self.lsaostartinterval:
if not self.isvalidlsaostartinterval():
self.module.fail_json(
msg='Error: The ospf lsa originate start interval should between 0 - 1000 ms.')
if self.lsaoholdinterval:
if not self.isvalidlsaoholdinterval():
self.module.fail_json(
msg='Error: The ospf lsa originate hold interval should between 1 - 5000 ms.')
if self.spfintervaltype == '':
self.module.fail_json(
msg='Error: The ospf spf interval type should should not be null.')
if self.spfintervaltype == 'intelligent-timer':
if self.spfinterval is not None or self.spfintervalmi is not None:
self.module.fail_json(
msg='Error: Interval second and interval millisecond '
'of spf-schedule-interval can not configured if use intelligent timer.')
if self.spfmaxinterval == '' or self.spfstartinterval == '' or self.spfholdinterval == '':
self.module.fail_json(
msg='Error: The ospf spf intelligent timer intervals should should not be null.')
if self.spfmaxinterval and not self.is_valid_spf_max_interval():
self.module.fail_json(
msg='Error: The ospf spf max interval of intelligent timer should between 1 - 20000 ms.')
if self.spfstartinterval and not self.is_valid_spf_start_interval():
self.module.fail_json(
msg='Error: The ospf spf start interval of intelligent timer should between 1 - 1000 ms.')
if self.spfholdinterval and not self.is_valid_spf_hold_interval():
self.module.fail_json(
msg='Error: The ospf spf hold interval of intelligent timer should between 1 - 5000 ms.')
if self.spfintervaltype == 'timer':
if self.spfintervalmi is not None:
self.module.fail_json(
msg='Error: Interval second and interval millisecond '
'of spf-schedule-interval can not configured at the same time.')
if self.spfmaxinterval or self.spfstartinterval or self.spfholdinterval:
self.module.fail_json(
msg='Error: Interval second and interval intelligent '
'of spf-schedule-interval can not configured at the same time.')
if self.spfinterval == '' or self.spfinterval is None:
self.module.fail_json(
msg='Error: The ospf spf timer intervals should should not be null.')
if not self.is_valid_spf_interval():
self.module.fail_json(
msg='Error: Interval second should between 1 - 10 s.')
if self.spfintervaltype == 'millisecond':
if self.spfinterval is not None:
self.module.fail_json(
msg='Error: Interval millisecond and interval second '
'of spf-schedule-interval can not configured at the same time.')
if self.spfmaxinterval or self.spfstartinterval or self.spfholdinterval:
self.module.fail_json(
msg='Error: Interval millisecond and interval intelligent '
'of spf-schedule-interval can not configured at the same time.')
if self.spfintervalmi == '' or self.spfintervalmi is None:
self.module.fail_json(
msg='Error: The ospf spf millisecond intervals should should not be null.')
if not self.is_valid_spf_milli_interval():
self.module.fail_json(
msg='Error: Interval millisecond should between 1 - 10000 ms.')
def get_ospf_info(self):
""" get the detail information of ospf """
self.ospf_info["ospfsite"] = list()
getxmlstr = CE_NC_GET_OSPF_VRF
xml_str = get_nc_config(self.module, getxmlstr)
if 'data/' in xml_str:
return
xml_str = xml_str.replace('\r', '').replace('\n', '').\
replace('xmlns="urn:ietf:params:xml:ns:netconf:base:1.0"', "").\
replace('xmlns="http://www.huawei.com/netconf/vrp"', "")
root = ElementTree.fromstring(xml_str)
# get the vpn address family and RD text
ospf_sites = root.findall(
"data/ospfv2/ospfv2comm/ospfSites/ospfSite")
if ospf_sites:
for ospf_site in ospf_sites:
ospf_ele_info = dict()
for ospf_site_ele in ospf_site:
if ospf_site_ele.tag in ["processId", "routerId", "vrfName", "bandwidthReference",
"description", "lsaArrivalInterval", "lsaArrivalMaxInterval",
"lsaArrivalStartInterval", "lsaArrivalHoldInterval", "lsaArrivalFlag",
"lsaOriginateInterval", "lsaOriginateMaxInterval",
"lsaOriginateStartInterval", "lsaOriginateHoldInterval",
"lsaOriginateIntervalFlag", "spfScheduleInterval",
"spfScheduleIntervalMillisecond", "spfScheduleMaxInterval",
"spfScheduleStartInterval", "spfScheduleHoldInterval",
"spfScheduleIntervalType"]:
ospf_ele_info[
ospf_site_ele.tag] = ospf_site_ele.text
self.ospf_info["ospfsite"].append(ospf_ele_info)
def get_proposed(self):
"""get proposed info"""
self.proposed["process_id"] = self.ospf
self.proposed["route_id"] = self.route_id
self.proposed["vrf"] = self.vrf
self.proposed["description"] = self.description
self.proposed["bandwidth"] = self.bandwidth
self.proposed["lsaalflag"] = self.lsaalflag
self.proposed["lsaainterval"] = self.lsaainterval
self.proposed["lsaamaxinterval"] = self.lsaamaxinterval
self.proposed["lsaastartinterval"] = self.lsaastartinterval
self.proposed["lsaaholdinterval"] = self.lsaaholdinterval
self.proposed["lsaointervalflag"] = self.lsaointervalflag
self.proposed["lsaointerval"] = self.lsaointerval
self.proposed["lsaomaxinterval"] = self.lsaomaxinterval
self.proposed["lsaostartinterval"] = self.lsaostartinterval
self.proposed["lsaoholdinterval"] = self.lsaoholdinterval
self.proposed["spfintervaltype"] = self.spfintervaltype
self.proposed["spfinterval"] = self.spfinterval
self.proposed["spfintervalmi"] = self.spfintervalmi
self.proposed["spfmaxinterval"] = self.spfmaxinterval
self.proposed["spfstartinterval"] = self.spfstartinterval
self.proposed["spfholdinterval"] = self.spfholdinterval
def operate_ospf_info(self):
"""operate ospf info"""
config_route_id_xml = ''
vrf = self.get_exist_vrf()
if vrf is None:
vrf = '_public_'
description = self.get_exist_description()
if description is None:
description = ''
bandwidth_reference = self.get_exist_bandwidth()
if bandwidth_reference is None:
bandwidth_reference = '100'
lsa_in_interval = self.get_exist_lsa_a_interval()
if lsa_in_interval is None:
lsa_in_interval = ''
lsa_arrival_max_interval = self.get_exist_lsa_a_max_interval()
if lsa_arrival_max_interval is None:
lsa_arrival_max_interval = '1000'
lsa_arrival_start_interval = self.get_exist_lsa_a_start_interval()
if lsa_arrival_start_interval is None:
lsa_arrival_start_interval = '500'
lsa_arrival_hold_interval = self.get_exist_lsa_a_hold_interval()
if lsa_arrival_hold_interval is None:
lsa_arrival_hold_interval = '500'
lsa_originate_interval = self.getexistlsaointerval()
if lsa_originate_interval is None:
lsa_originate_interval = '5'
lsa_originate_max_interval = self.getexistlsaomaxinterval()
if lsa_originate_max_interval is None:
lsa_originate_max_interval = '5000'
lsa_originate_start_interval = self.getexistlsaostartinterval()
if lsa_originate_start_interval is None:
lsa_originate_start_interval = '500'
lsa_originate_hold_interval = self.getexistlsaoholdinterval()
if lsa_originate_hold_interval is None:
lsa_originate_hold_interval = '1000'
spf_interval = self.get_exist_spf_interval()
if spf_interval is None:
spf_interval = ''
spf_interval_milli = self.get_exist_spf_milli_interval()
if spf_interval_milli is None:
spf_interval_milli = ''
spf_max_interval = self.get_exist_spf_max_interval()
if spf_max_interval is None:
spf_max_interval = '5000'
spf_start_interval = self.get_exist_spf_start_interval()
if spf_start_interval is None:
spf_start_interval = '50'
spf_hold_interval = self.get_exist_spf_hold_interval()
if spf_hold_interval is None:
spf_hold_interval = '200'
if self.route_id:
if self.state == 'present':
if self.route_id != self.get_exist_route():
self.route_id_changed = True
config_route_id_xml = CE_NC_CREATE_ROUTE_ID % self.route_id
else:
if self.route_id != self.get_exist_route():
self.module.fail_json(
msg='Error: The route id %s is not exist.' % self.route_id)
self.route_id_changed = True
configxmlstr = CE_NC_DELETE_OSPF % (
self.ospf, self.get_exist_route(), self.get_exist_vrf())
conf_str = build_config_xml(configxmlstr)
recv_xml = set_nc_config(self.module, conf_str)
self.check_response(recv_xml, "OPERATE_VRF_AF")
self.changed = True
return
if self.vrf != '_public_':
if self.state == 'present':
if self.vrf != self.get_exist_vrf():
self.vrf_changed = True
vrf = self.vrf
else:
if self.vrf != self.get_exist_vrf():
self.module.fail_json(
msg='Error: The vrf %s is not exist.' % self.vrf)
self.vrf_changed = True
configxmlstr = CE_NC_DELETE_OSPF % (
self.ospf, self.get_exist_route(), self.get_exist_vrf())
conf_str = build_config_xml(configxmlstr)
recv_xml = set_nc_config(self.module, conf_str)
self.check_response(recv_xml, "OPERATE_VRF_AF")
self.changed = True
return
if self.bandwidth:
if self.state == 'present':
if self.bandwidth != self.get_exist_bandwidth():
self.bandwidth_changed = True
bandwidth_reference = self.bandwidth
else:
if self.bandwidth != self.get_exist_bandwidth():
self.module.fail_json(
msg='Error: The bandwidth %s is not exist.' % self.bandwidth)
if self.get_exist_bandwidth() != '100':
self.bandwidth_changed = True
bandwidth_reference = '100'
if self.description:
if self.state == 'present':
if self.description != self.get_exist_description():
self.description_changed = True
description = self.description
else:
if self.description != self.get_exist_description():
self.module.fail_json(
msg='Error: The description %s is not exist.' % self.description)
self.description_changed = True
description = ''
if self.lsaalflag is False:
lsa_in_interval = ''
if self.state == 'present':
if self.lsaamaxinterval:
if self.lsaamaxinterval != self.get_exist_lsa_a_max_interval():
self.lsa_arrival_changed = True
lsa_arrival_max_interval = self.lsaamaxinterval
if self.lsaastartinterval:
if self.lsaastartinterval != self.get_exist_lsa_a_start_interval():
self.lsa_arrival_changed = True
lsa_arrival_start_interval = self.lsaastartinterval
if self.lsaaholdinterval:
if self.lsaaholdinterval != self.get_exist_lsa_a_hold_interval():
self.lsa_arrival_changed = True
lsa_arrival_hold_interval = self.lsaaholdinterval
else:
if self.lsaamaxinterval:
if self.lsaamaxinterval != self.get_exist_lsa_a_max_interval():
self.module.fail_json(
msg='Error: The lsaamaxinterval %s is not exist.' % self.lsaamaxinterval)
if self.get_exist_lsa_a_max_interval() != '1000':
lsa_arrival_max_interval = '1000'
self.lsa_arrival_changed = True
if self.lsaastartinterval:
if self.lsaastartinterval != self.get_exist_lsa_a_start_interval():
self.module.fail_json(
msg='Error: The lsaastartinterval %s is not exist.' % self.lsaastartinterval)
if self.get_exist_lsa_a_start_interval() != '500':
lsa_arrival_start_interval = '500'
self.lsa_arrival_changed = True
if self.lsaaholdinterval:
if self.lsaaholdinterval != self.get_exist_lsa_a_hold_interval():
self.module.fail_json(
msg='Error: The lsaaholdinterval %s is not exist.' % self.lsaaholdinterval)
if self.get_exist_lsa_a_hold_interval() != '500':
lsa_arrival_hold_interval = '500'
self.lsa_arrival_changed = True
else:
if self.state == 'present':
lsaalflag = "false"
if self.lsaalflag is True:
lsaalflag = "true"
if lsaalflag != self.get_exist_lsa_a_interval_flag():
self.lsa_arrival_changed = True
if self.lsaainterval is None:
self.module.fail_json(
msg='Error: The lsaainterval is not supplied.')
else:
lsa_in_interval = self.lsaainterval
else:
if self.lsaainterval:
if self.lsaainterval != self.get_exist_lsa_a_interval():
self.lsa_arrival_changed = True
lsa_in_interval = self.lsaainterval
else:
if self.lsaainterval:
if self.lsaainterval != self.get_exist_lsa_a_interval():
self.module.fail_json(
msg='Error: The lsaainterval %s is not exist.' % self.lsaainterval)
self.lsaalflag = False
lsa_in_interval = ''
self.lsa_arrival_changed = True
if self.lsaointervalflag is False:
if self.state == 'present':
if self.lsaomaxinterval:
if self.lsaomaxinterval != self.getexistlsaomaxinterval():
self.lsa_originate_changed = True
lsa_originate_max_interval = self.lsaomaxinterval
if self.lsaostartinterval:
if self.lsaostartinterval != self.getexistlsaostartinterval():
self.lsa_originate_changed = True
lsa_originate_start_interval = self.lsaostartinterval
if self.lsaoholdinterval:
if self.lsaoholdinterval != self.getexistlsaoholdinterval():
self.lsa_originate_changed = True
lsa_originate_hold_interval = self.lsaoholdinterval
if self.lsaointerval:
if self.lsaointerval != self.getexistlsaointerval():
self.lsa_originate_changed = True
lsa_originate_interval = self.lsaointerval
else:
if self.lsaomaxinterval:
if self.lsaomaxinterval != self.getexistlsaomaxinterval():
self.module.fail_json(
msg='Error: The lsaomaxinterval %s is not exist.' % self.lsaomaxinterval)
if self.getexistlsaomaxinterval() != '5000':
lsa_originate_max_interval = '5000'
self.lsa_originate_changed = True
if self.lsaostartinterval:
if self.lsaostartinterval != self.getexistlsaostartinterval():
self.module.fail_json(
msg='Error: The lsaostartinterval %s is not exist.' % self.lsaostartinterval)
if self.getexistlsaostartinterval() != '500':
lsa_originate_start_interval = '500'
self.lsa_originate_changed = True
if self.lsaoholdinterval:
if self.lsaoholdinterval != self.getexistlsaoholdinterval():
self.module.fail_json(
msg='Error: The lsaoholdinterval %s is not exist.' % self.lsaoholdinterval)
if self.getexistlsaoholdinterval() != '1000':
lsa_originate_hold_interval = '1000'
self.lsa_originate_changed = True
if self.lsaointerval:
if self.lsaointerval != self.getexistlsaointerval():
self.module.fail_json(
msg='Error: The lsaointerval %s is not exist.' % self.lsaointerval)
if self.getexistlsaointerval() != '5':
lsa_originate_interval = '5'
self.lsa_originate_changed = True
else:
if self.state == 'present':
if self.getexistlsaointerval_flag() != 'true':
self.lsa_originate_changed = True
lsa_originate_interval = '5'
lsa_originate_max_interval = '5000'
lsa_originate_start_interval = '500'
lsa_originate_hold_interval = '1000'
else:
if self.getexistlsaointerval_flag() == 'true':
self.lsaointervalflag = False
self.lsa_originate_changed = True
if self.spfintervaltype != self.get_exist_spf_interval_type():
self.spf_changed = True
if self.spfintervaltype == 'timer':
if self.spfinterval:
if self.state == 'present':
if self.spfinterval != self.get_exist_spf_interval():
self.spf_changed = True
spf_interval = self.spfinterval
spf_interval_milli = ''
else:
if self.spfinterval != self.get_exist_spf_interval():
self.module.fail_json(
msg='Error: The spfinterval %s is not exist.' % self.spfinterval)
self.spfintervaltype = 'intelligent-timer'
spf_interval = ''
self.spf_changed = True
if self.spfintervaltype == 'millisecond':
if self.spfintervalmi:
if self.state == 'present':
if self.spfintervalmi != self.get_exist_spf_milli_interval():
self.spf_changed = True
spf_interval_milli = self.spfintervalmi
spf_interval = ''
else:
if self.spfintervalmi != self.get_exist_spf_milli_interval():
self.module.fail_json(
msg='Error: The spfintervalmi %s is not exist.' % self.spfintervalmi)
self.spfintervaltype = 'intelligent-timer'
spf_interval_milli = ''
self.spf_changed = True
if self.spfintervaltype == 'intelligent-timer':
spf_interval = ''
spf_interval_milli = ''
if self.spfmaxinterval:
if self.state == 'present':
if self.spfmaxinterval != self.get_exist_spf_max_interval():
self.spf_changed = True
spf_max_interval = self.spfmaxinterval
else:
if self.spfmaxinterval != self.get_exist_spf_max_interval():
self.module.fail_json(
msg='Error: The spfmaxinterval %s is not exist.' % self.spfmaxinterval)
if self.get_exist_spf_max_interval() != '5000':
self.spf_changed = True
spf_max_interval = '5000'
if self.spfstartinterval:
if self.state == 'present':
if self.spfstartinterval != self.get_exist_spf_start_interval():
self.spf_changed = True
spf_start_interval = self.spfstartinterval
else:
if self.spfstartinterval != self.get_exist_spf_start_interval():
self.module.fail_json(
msg='Error: The spfstartinterval %s is not exist.' % self.spfstartinterval)
if self.get_exist_spf_start_interval() != '50':
self.spf_changed = True
spf_start_interval = '50'
if self.spfholdinterval:
if self.state == 'present':
if self.spfholdinterval != self.get_exist_spf_hold_interval():
self.spf_changed = True
spf_hold_interval = self.spfholdinterval
else:
if self.spfholdinterval != self.get_exist_spf_hold_interval():
self.module.fail_json(
msg='Error: The spfholdinterval %s is not exist.' % self.spfholdinterval)
if self.get_exist_spf_hold_interval() != '200':
self.spf_changed = True
spf_hold_interval = '200'
if not self.description_changed and not self.vrf_changed and not self.lsa_arrival_changed \
and not self.lsa_originate_changed and not self.spf_changed \
and not self.route_id_changed and not self.bandwidth_changed:
self.changed = False
return
else:
self.changed = True
lsaointervalflag = "false"
lsaalflag = "false"
if self.lsaointervalflag is True:
lsaointervalflag = "true"
if self.lsaalflag is True:
lsaalflag = "true"
configxmlstr = CE_NC_CREATE_OSPF_VRF % (
self.ospf, config_route_id_xml, vrf,
description, bandwidth_reference, lsaalflag,
lsa_in_interval, lsa_arrival_max_interval, lsa_arrival_start_interval,
lsa_arrival_hold_interval, lsaointervalflag, lsa_originate_interval,
lsa_originate_max_interval, lsa_originate_start_interval, lsa_originate_hold_interval,
self.spfintervaltype, spf_interval, spf_interval_milli,
spf_max_interval, spf_start_interval, spf_hold_interval)
conf_str = build_config_xml(configxmlstr)
recv_xml = set_nc_config(self.module, conf_str)
self.check_response(recv_xml, "OPERATE_VRF_AF")
def get_existing(self):
"""get existing info"""
self.get_ospf_info()
self.existing['ospf_info'] = self.ospf_info["ospfsite"]
def set_update_cmd(self):
""" set update command"""
if not self.changed:
return
if self.state == 'present':
if self.vrf_changed:
if self.vrf != '_public_':
if self.route_id_changed:
self.updates_cmd.append(
'ospf %s router-id %s vpn-instance %s' % (self.ospf, self.route_id, self.vrf))
else:
self.updates_cmd.append(
'ospf %s vpn-instance %s ' % (self.ospf, self.vrf))
else:
if self.route_id_changed:
self.updates_cmd.append(
'ospf %s router-id %s' % (self.ospf, self.route_id))
else:
if self.route_id_changed:
if self.vrf != '_public_':
self.updates_cmd.append(
'ospf %s router-id %s vpn-instance %s' % (self.ospf, self.route_id, self.get_exist_vrf()))
else:
self.updates_cmd.append(
'ospf %s router-id %s' % (self.ospf, self.route_id))
else:
if self.route_id_changed:
self.updates_cmd.append('undo ospf %s' % self.ospf)
return
self.updates_cmd.append('ospf %s' % self.ospf)
if self.description:
if self.state == 'present':
if self.description_changed:
self.updates_cmd.append(
'description %s' % self.description)
else:
if self.description_changed:
self.updates_cmd.append('undo description')
if self.bandwidth_changed:
if self.state == 'present':
if self.get_exist_bandwidth() != '100':
self.updates_cmd.append(
'bandwidth-reference %s' % (self.get_exist_bandwidth()))
else:
self.updates_cmd.append('undo bandwidth-reference')
if self.lsaalflag is True:
if self.lsa_arrival_changed:
if self.state == 'present':
self.updates_cmd.append(
'lsa-arrival-interval %s' % (self.get_exist_lsa_a_interval()))
else:
self.updates_cmd.append(
'undo lsa-arrival-interval')
if self.lsaalflag is False:
if self.lsa_arrival_changed:
if self.state == 'present':
if self.get_exist_lsa_a_max_interval() != '1000' \
or self.get_exist_lsa_a_start_interval() != '500'\
or self.get_exist_lsa_a_hold_interval() != '500':
self.updates_cmd.append('lsa-arrival-interval intelligent-timer %s %s %s'
% (self.get_exist_lsa_a_max_interval(),
self.get_exist_lsa_a_start_interval(),
self.get_exist_lsa_a_hold_interval()))
else:
if self.get_exist_lsa_a_max_interval() == '1000' \
and self.get_exist_lsa_a_start_interval() == '500'\
and self.get_exist_lsa_a_hold_interval() == '500':
self.updates_cmd.append(
'undo lsa-arrival-interval')
if self.lsaointervalflag is False:
if self.lsa_originate_changed:
if self.state == 'present':
if self.getexistlsaointerval() != '5' \
or self.getexistlsaomaxinterval() != '5000' \
or self.getexistlsaostartinterval() != '500' \
or self.getexistlsaoholdinterval() != '1000':
self.updates_cmd.append('lsa-originate-interval other-type %s intelligent-timer %s %s %s'
% (self.getexistlsaointerval(),
self.getexistlsaomaxinterval(),
self.getexistlsaostartinterval(),
self.getexistlsaoholdinterval()))
else:
self.updates_cmd.append(
'undo lsa-originate-interval')
if self.lsaointervalflag is True:
if self.lsa_originate_changed:
if self.state == 'present':
self.updates_cmd.append('lsa-originate-interval 0 ')
else:
self.updates_cmd.append(
'undo lsa-originate-interval')
if self.spfintervaltype == 'millisecond':
if self.spf_changed:
if self.state == 'present':
self.updates_cmd.append(
'spf-schedule-interval millisecond %s' % self.get_exist_spf_milli_interval())
else:
self.updates_cmd.append(
'undo spf-schedule-interval')
if self.spfintervaltype == 'timer':
if self.spf_changed:
if self.state == 'present':
self.updates_cmd.append(
'spf-schedule-interval %s' % self.get_exist_spf_interval())
else:
self.updates_cmd.append(
'undo spf-schedule-interval')
if self.spfintervaltype == 'intelligent-timer':
if self.spf_changed:
if self.state == 'present':
if self.get_exist_spf_max_interval() != '5000' \
or self.get_exist_spf_start_interval() != '50' \
or self.get_exist_spf_hold_interval() != '200':
self.updates_cmd.append('spf-schedule-interval intelligent-timer %s %s %s'
% (self.get_exist_spf_max_interval(),
self.get_exist_spf_start_interval(),
self.get_exist_spf_hold_interval()))
else:
self.updates_cmd.append(
'undo spf-schedule-interval')
def get_end_state(self):
"""get end state info"""
self.get_ospf_info()
self.end_state['ospf_info'] = self.ospf_info["ospfsite"]
def work(self):
"""worker"""
self.check_params()
self.get_existing()
self.get_proposed()
self.operate_ospf_info()
self.get_end_state()
self.set_update_cmd()
self.results['changed'] = self.changed
self.results['proposed'] = self.proposed
self.results['existing'] = self.existing
self.results['end_state'] = self.end_state
if self.changed:
self.results['updates'] = self.updates_cmd
else:
self.results['updates'] = list()
self.module.exit_json(**self.results)
def main():
"""Module main"""
argument_spec = dict(
ospf=dict(required=True, type='str'),
route_id=dict(required=False, type='str'),
vrf=dict(required=False, type='str', default='_public_'),
description=dict(required=False, type='str'),
bandwidth=dict(required=False, type='str'),
lsaalflag=dict(type='bool', default=False),
lsaainterval=dict(required=False, type='str'),
lsaamaxinterval=dict(required=False, type='str'),
lsaastartinterval=dict(required=False, type='str'),
lsaaholdinterval=dict(required=False, type='str'),
lsaointervalflag=dict(type='bool', default=False),
lsaointerval=dict(required=False, type='str'),
lsaomaxinterval=dict(required=False, type='str'),
lsaostartinterval=dict(required=False, type='str'),
lsaoholdinterval=dict(required=False, type='str'),
spfintervaltype=dict(required=False, default='intelligent-timer',
choices=['intelligent-timer', 'timer', 'millisecond']),
spfinterval=dict(required=False, type='str'),
spfintervalmi=dict(required=False, type='str'),
spfmaxinterval=dict(required=False, type='str'),
spfstartinterval=dict(required=False, type='str'),
spfholdinterval=dict(required=False, type='str'),
state=dict(required=False, choices=['present', 'absent'], default='present'),
)
argument_spec.update(ce_argument_spec)
module = OspfVrf(argument_spec)
module.work()
if __name__ == '__main__':
main()
|
petteyg/intellij-community
|
refs/heads/master
|
python/testData/inspections/PyUnresolvedReferencesInspection/builtinDerivedClassAttribute.py
|
83
|
class C(dict):
def foo(self):
pass
def bar(self):
self.foo() #pass
|
areski/django
|
refs/heads/master
|
django/contrib/gis/utils/wkt.py
|
589
|
"""
Utilities for manipulating Geometry WKT.
"""
from django.utils import six
def precision_wkt(geom, prec):
"""
Returns WKT text of the geometry according to the given precision (an
integer or a string). If the precision is an integer, then the decimal
places of coordinates WKT will be truncated to that number:
>>> from django.contrib.gis.geos import Point
>>> pnt = Point(5, 23)
>>> pnt.wkt
'POINT (5.0000000000000000 23.0000000000000000)'
>>> precision_wkt(pnt, 1)
'POINT (5.0 23.0)'
If the precision is a string, it must be valid Python format string
(e.g., '%20.7f') -- thus, you should know what you're doing.
"""
if isinstance(prec, int):
num_fmt = '%%.%df' % prec
elif isinstance(prec, six.string_types):
num_fmt = prec
else:
raise TypeError
# TODO: Support 3D geometries.
coord_fmt = ' '.join([num_fmt, num_fmt])
def formatted_coords(coords):
return ','.join(coord_fmt % c[:2] for c in coords)
def formatted_poly(poly):
return ','.join('(%s)' % formatted_coords(r) for r in poly)
def formatted_geom(g):
gtype = str(g.geom_type).upper()
yield '%s(' % gtype
if gtype == 'POINT':
yield formatted_coords((g.coords,))
elif gtype in ('LINESTRING', 'LINEARRING'):
yield formatted_coords(g.coords)
elif gtype in ('POLYGON', 'MULTILINESTRING'):
yield formatted_poly(g)
elif gtype == 'MULTIPOINT':
yield formatted_coords(g.coords)
elif gtype == 'MULTIPOLYGON':
yield ','.join('(%s)' % formatted_poly(p) for p in g)
elif gtype == 'GEOMETRYCOLLECTION':
yield ','.join(''.join(wkt for wkt in formatted_geom(child)) for child in g)
else:
raise TypeError
yield ')'
return ''.join(wkt for wkt in formatted_geom(geom))
|
evandavid/dodolipet
|
refs/heads/master
|
yowsup/layers/protocol_groups/protocolentities/iq_groups_participants_remove_success.py
|
4
|
from yowsup.structs import ProtocolTreeNode
from yowsup.layers.protocol_iq.protocolentities import ResultIqProtocolEntity
class SuccessRemoveParticipantsIqProtocolEntity(ResultIqProtocolEntity):
'''
<iq type="result" from="{{group_jid}}" id="{{id}}">
<remove type="success" participant="{{jid}}"></remove>
<remove type="success" participant="{{jid}}"></remove>
</iq>
'''
def __init__(self, _id, groupId, participantList):
super(SuccessRemoveParticipantsIqProtocolEntity, self).__init__(_from = groupId, _id = _id)
self.setProps(groupId, participantList)
def setProps(self, groupId, participantList):
self.groupId = groupId
self.participantList = participantList
self.action = 'remove'
def getAction(self):
return self.action
def toProtocolTreeNode(self):
node = super(SuccessRemoveParticipantsIqProtocolEntity, self).toProtocolTreeNode()
participantNodes = [
ProtocolTreeNode("remove", {
"type": "success",
"participant": participant
})
for participant in self.participantList
]
node.addChildren(participantNodes)
return node
@staticmethod
def fromProtocolTreeNode(node):
entity = ResultIqProtocolEntity.fromProtocolTreeNode(node)
entity.__class__ = SuccessRemoveParticipantsIqProtocolEntity
participantList = []
for participantNode in node.getAllChildren():
if participantNode["type"]=="success":
participantList.append(participantNode["participant"])
entity.setProps(node.getAttributeValue("from"), participantList)
return entity
|
woqer/kappa
|
refs/heads/develop
|
kappa/__init__.py
|
3
|
# -*- coding: utf-8 -*-
# Copyright (c) 2014, 2015 Mitch Garnaat
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
__version__ = '0.7.0'
|
thu-ml/zhusuan
|
refs/heads/master
|
examples/toy_examples/gaussian.py
|
1
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import absolute_import
from __future__ import print_function
from __future__ import division
import numpy as np
from scipy import stats
import matplotlib.pyplot as plt
import tensorflow as tf
import zhusuan as zs
@zs.meta_bayesian_net()
def gaussian(n_x, stdev, n_particles):
bn = zs.BayesianNet()
bn.normal('x', tf.zeros([n_x]), std=stdev, n_samples=n_particles,
group_ndims=1)
return bn
if __name__ == "__main__":
tf.set_random_seed(1)
# Define model parameters
n_x = 1
# n_x = 10
stdev = 1 / (np.arange(n_x, dtype=np.float32) + 1)
# Define HMC parameters
kernel_width = 0.1
n_chains = 1000
n_iters = 200
burnin = n_iters // 2
n_leapfrogs = 5
# Build the computation graph
model = gaussian(n_x, stdev, n_chains)
adapt_step_size = tf.placeholder(tf.bool, shape=[], name="adapt_step_size")
adapt_mass = tf.placeholder(tf.bool, shape=[], name="adapt_mass")
hmc = zs.HMC(step_size=1e-3, n_leapfrogs=n_leapfrogs,
adapt_step_size=adapt_step_size, adapt_mass=adapt_mass,
target_acceptance_rate=0.9)
x = tf.Variable(tf.zeros([n_chains, n_x]), trainable=False, name='x')
sample_op, hmc_info = hmc.sample(model, {}, {'x': x})
# Run the inference
with tf.Session() as sess:
sess.run(tf.global_variables_initializer())
samples = []
print('Sampling...')
for i in range(n_iters):
_, x_sample, acc, ss = sess.run(
[sample_op, hmc_info.samples['x'], hmc_info.acceptance_rate,
hmc_info.updated_step_size],
feed_dict={adapt_step_size: i < burnin // 2,
adapt_mass: i < burnin // 2})
print('Sample {}: Acceptance rate = {}, updated step size = {}'
.format(i, np.mean(acc), ss))
if i >= burnin:
samples.append(x_sample)
print('Finished.')
samples = np.vstack(samples)
# Check & plot the results
print('Expected mean = {}'.format(np.zeros(n_x)))
print('Sample mean = {}'.format(np.mean(samples, 0)))
print('Expected stdev = {}'.format(stdev))
print('Sample stdev = {}'.format(np.std(samples, 0)))
print('Relative error of stdev = {}'.format(
(np.std(samples, 0) - stdev) / stdev))
def kde(xs, mu, batch_size):
mu_n = len(mu)
assert mu_n % batch_size == 0
xs_row = np.expand_dims(xs, 1)
ys = np.zeros(xs.shape)
for b in range(mu_n // batch_size):
mu_col = np.expand_dims(mu[b * batch_size:(b + 1) * batch_size], 0)
ys += (1 / np.sqrt(2 * np.pi) / kernel_width) * \
np.mean(np.exp((-0.5 / kernel_width ** 2) *
np.square(xs_row - mu_col)), 1)
ys /= (mu_n / batch_size)
return ys
if n_x == 1:
xs = np.linspace(-5, 5, 1000)
ys = kde(xs, np.squeeze(samples), n_chains)
f, ax = plt.subplots()
ax.plot(xs, ys)
ax.plot(xs, stats.norm.pdf(xs, scale=stdev[0]))
plt.show()
|
blaquee/volatility
|
refs/heads/master
|
volatility/plugins/overlays/windows/win10_x64_1AC738FB_vtypes.py
|
4
|
ntkrnlmp_types = {
'LIST_ENTRY64' : [ 0x10, {
'Flink' : [ 0x0, ['unsigned long long']],
'Blink' : [ 0x8, ['unsigned long long']],
} ],
'LIST_ENTRY32' : [ 0x8, {
'Flink' : [ 0x0, ['unsigned long']],
'Blink' : [ 0x4, ['unsigned long']],
} ],
'_KUSER_SHARED_DATA' : [ 0x708, {
'TickCountLowDeprecated' : [ 0x0, ['unsigned long']],
'TickCountMultiplier' : [ 0x4, ['unsigned long']],
'InterruptTime' : [ 0x8, ['_KSYSTEM_TIME']],
'SystemTime' : [ 0x14, ['_KSYSTEM_TIME']],
'TimeZoneBias' : [ 0x20, ['_KSYSTEM_TIME']],
'ImageNumberLow' : [ 0x2c, ['unsigned short']],
'ImageNumberHigh' : [ 0x2e, ['unsigned short']],
'NtSystemRoot' : [ 0x30, ['array', 260, ['wchar']]],
'MaxStackTraceDepth' : [ 0x238, ['unsigned long']],
'CryptoExponent' : [ 0x23c, ['unsigned long']],
'TimeZoneId' : [ 0x240, ['unsigned long']],
'LargePageMinimum' : [ 0x244, ['unsigned long']],
'AitSamplingValue' : [ 0x248, ['unsigned long']],
'AppCompatFlag' : [ 0x24c, ['unsigned long']],
'RNGSeedVersion' : [ 0x250, ['unsigned long long']],
'GlobalValidationRunlevel' : [ 0x258, ['unsigned long']],
'TimeZoneBiasStamp' : [ 0x25c, ['long']],
'NtBuildNumber' : [ 0x260, ['unsigned long']],
'NtProductType' : [ 0x264, ['Enumeration', dict(target = 'long', choices = {1: 'NtProductWinNt', 2: 'NtProductLanManNt', 3: 'NtProductServer'})]],
'ProductTypeIsValid' : [ 0x268, ['unsigned char']],
'Reserved0' : [ 0x269, ['array', 1, ['unsigned char']]],
'NativeProcessorArchitecture' : [ 0x26a, ['unsigned short']],
'NtMajorVersion' : [ 0x26c, ['unsigned long']],
'NtMinorVersion' : [ 0x270, ['unsigned long']],
'ProcessorFeatures' : [ 0x274, ['array', 64, ['unsigned char']]],
'Reserved1' : [ 0x2b4, ['unsigned long']],
'Reserved3' : [ 0x2b8, ['unsigned long']],
'TimeSlip' : [ 0x2bc, ['unsigned long']],
'AlternativeArchitecture' : [ 0x2c0, ['Enumeration', dict(target = 'long', choices = {0: 'StandardDesign', 1: 'NEC98x86', 2: 'EndAlternatives'})]],
'BootId' : [ 0x2c4, ['unsigned long']],
'SystemExpirationDate' : [ 0x2c8, ['_LARGE_INTEGER']],
'SuiteMask' : [ 0x2d0, ['unsigned long']],
'KdDebuggerEnabled' : [ 0x2d4, ['unsigned char']],
'MitigationPolicies' : [ 0x2d5, ['unsigned char']],
'NXSupportPolicy' : [ 0x2d5, ['BitField', dict(start_bit = 0, end_bit = 2, native_type='unsigned char')]],
'SEHValidationPolicy' : [ 0x2d5, ['BitField', dict(start_bit = 2, end_bit = 4, native_type='unsigned char')]],
'CurDirDevicesSkippedForDlls' : [ 0x2d5, ['BitField', dict(start_bit = 4, end_bit = 6, native_type='unsigned char')]],
'Reserved' : [ 0x2d5, ['BitField', dict(start_bit = 6, end_bit = 8, native_type='unsigned char')]],
'Reserved6' : [ 0x2d6, ['array', 2, ['unsigned char']]],
'ActiveConsoleId' : [ 0x2d8, ['unsigned long']],
'DismountCount' : [ 0x2dc, ['unsigned long']],
'ComPlusPackage' : [ 0x2e0, ['unsigned long']],
'LastSystemRITEventTickCount' : [ 0x2e4, ['unsigned long']],
'NumberOfPhysicalPages' : [ 0x2e8, ['unsigned long']],
'SafeBootMode' : [ 0x2ec, ['unsigned char']],
'Reserved12' : [ 0x2ed, ['array', 3, ['unsigned char']]],
'SharedDataFlags' : [ 0x2f0, ['unsigned long']],
'DbgErrorPortPresent' : [ 0x2f0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'DbgElevationEnabled' : [ 0x2f0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'DbgVirtEnabled' : [ 0x2f0, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long')]],
'DbgInstallerDetectEnabled' : [ 0x2f0, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned long')]],
'DbgLkgEnabled' : [ 0x2f0, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned long')]],
'DbgDynProcessorEnabled' : [ 0x2f0, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned long')]],
'DbgConsoleBrokerEnabled' : [ 0x2f0, ['BitField', dict(start_bit = 6, end_bit = 7, native_type='unsigned long')]],
'DbgSecureBootEnabled' : [ 0x2f0, ['BitField', dict(start_bit = 7, end_bit = 8, native_type='unsigned long')]],
'DbgMultiSessionSku' : [ 0x2f0, ['BitField', dict(start_bit = 8, end_bit = 9, native_type='unsigned long')]],
'SpareBits' : [ 0x2f0, ['BitField', dict(start_bit = 9, end_bit = 32, native_type='unsigned long')]],
'DataFlagsPad' : [ 0x2f4, ['array', 1, ['unsigned long']]],
'TestRetInstruction' : [ 0x2f8, ['unsigned long long']],
'QpcFrequency' : [ 0x300, ['long long']],
'SystemCall' : [ 0x308, ['unsigned long']],
'SystemCallPad0' : [ 0x30c, ['unsigned long']],
'SystemCallPad' : [ 0x310, ['array', 2, ['unsigned long long']]],
'TickCount' : [ 0x320, ['_KSYSTEM_TIME']],
'TickCountQuad' : [ 0x320, ['unsigned long long']],
'ReservedTickCountOverlay' : [ 0x320, ['array', 3, ['unsigned long']]],
'TickCountPad' : [ 0x32c, ['array', 1, ['unsigned long']]],
'Cookie' : [ 0x330, ['unsigned long']],
'CookiePad' : [ 0x334, ['array', 1, ['unsigned long']]],
'ConsoleSessionForegroundProcessId' : [ 0x338, ['long long']],
'TimeUpdateLock' : [ 0x340, ['unsigned long long']],
'BaselineSystemTimeQpc' : [ 0x348, ['unsigned long long']],
'BaselineInterruptTimeQpc' : [ 0x350, ['unsigned long long']],
'QpcSystemTimeIncrement' : [ 0x358, ['unsigned long long']],
'QpcInterruptTimeIncrement' : [ 0x360, ['unsigned long long']],
'QpcSystemTimeIncrementShift' : [ 0x368, ['unsigned char']],
'QpcInterruptTimeIncrementShift' : [ 0x369, ['unsigned char']],
'UnparkedProcessorCount' : [ 0x36a, ['unsigned short']],
'EnclaveFeatureMask' : [ 0x36c, ['array', 4, ['unsigned long']]],
'Reserved8' : [ 0x37c, ['unsigned long']],
'UserModeGlobalLogger' : [ 0x380, ['array', 16, ['unsigned short']]],
'ImageFileExecutionOptions' : [ 0x3a0, ['unsigned long']],
'LangGenerationCount' : [ 0x3a4, ['unsigned long']],
'Reserved4' : [ 0x3a8, ['unsigned long long']],
'InterruptTimeBias' : [ 0x3b0, ['unsigned long long']],
'QpcBias' : [ 0x3b8, ['unsigned long long']],
'ActiveProcessorCount' : [ 0x3c0, ['unsigned long']],
'ActiveGroupCount' : [ 0x3c4, ['unsigned char']],
'Reserved9' : [ 0x3c5, ['unsigned char']],
'QpcData' : [ 0x3c6, ['unsigned short']],
'QpcBypassEnabled' : [ 0x3c6, ['unsigned char']],
'QpcShift' : [ 0x3c7, ['unsigned char']],
'TimeZoneBiasEffectiveStart' : [ 0x3c8, ['_LARGE_INTEGER']],
'TimeZoneBiasEffectiveEnd' : [ 0x3d0, ['_LARGE_INTEGER']],
'XState' : [ 0x3d8, ['_XSTATE_CONFIGURATION']],
} ],
'__unnamed_1080' : [ 0x8, {
'LowPart' : [ 0x0, ['unsigned long']],
'HighPart' : [ 0x4, ['unsigned long']],
} ],
'_ULARGE_INTEGER' : [ 0x8, {
'LowPart' : [ 0x0, ['unsigned long']],
'HighPart' : [ 0x4, ['unsigned long']],
'u' : [ 0x0, ['__unnamed_1080']],
'QuadPart' : [ 0x0, ['unsigned long long']],
} ],
'__unnamed_1098' : [ 0x4, {
'LongFunction' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'Persistent' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'Private' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 32, native_type='unsigned long')]],
} ],
'__unnamed_109a' : [ 0x4, {
'Flags' : [ 0x0, ['unsigned long']],
's' : [ 0x0, ['__unnamed_1098']],
} ],
'_TP_CALLBACK_ENVIRON_V3' : [ 0x48, {
'Version' : [ 0x0, ['unsigned long']],
'Pool' : [ 0x8, ['pointer64', ['_TP_POOL']]],
'CleanupGroup' : [ 0x10, ['pointer64', ['_TP_CLEANUP_GROUP']]],
'CleanupGroupCancelCallback' : [ 0x18, ['pointer64', ['void']]],
'RaceDll' : [ 0x20, ['pointer64', ['void']]],
'ActivationContext' : [ 0x28, ['pointer64', ['_ACTIVATION_CONTEXT']]],
'FinalizationCallback' : [ 0x30, ['pointer64', ['void']]],
'u' : [ 0x38, ['__unnamed_109a']],
'CallbackPriority' : [ 0x3c, ['Enumeration', dict(target = 'long', choices = {0: 'TP_CALLBACK_PRIORITY_HIGH', 1: 'TP_CALLBACK_PRIORITY_NORMAL', 2: 'TP_CALLBACK_PRIORITY_LOW', 3: 'TP_CALLBACK_PRIORITY_COUNT'})]],
'Size' : [ 0x40, ['unsigned long']],
} ],
'_TEB' : [ 0x1838, {
'NtTib' : [ 0x0, ['_NT_TIB']],
'EnvironmentPointer' : [ 0x38, ['pointer64', ['void']]],
'ClientId' : [ 0x40, ['_CLIENT_ID']],
'ActiveRpcHandle' : [ 0x50, ['pointer64', ['void']]],
'ThreadLocalStoragePointer' : [ 0x58, ['pointer64', ['void']]],
'ProcessEnvironmentBlock' : [ 0x60, ['pointer64', ['_PEB']]],
'LastErrorValue' : [ 0x68, ['unsigned long']],
'CountOfOwnedCriticalSections' : [ 0x6c, ['unsigned long']],
'CsrClientThread' : [ 0x70, ['pointer64', ['void']]],
'Win32ThreadInfo' : [ 0x78, ['pointer64', ['void']]],
'User32Reserved' : [ 0x80, ['array', 26, ['unsigned long']]],
'UserReserved' : [ 0xe8, ['array', 5, ['unsigned long']]],
'WOW32Reserved' : [ 0x100, ['pointer64', ['void']]],
'CurrentLocale' : [ 0x108, ['unsigned long']],
'FpSoftwareStatusRegister' : [ 0x10c, ['unsigned long']],
'ReservedForDebuggerInstrumentation' : [ 0x110, ['array', 16, ['pointer64', ['void']]]],
'SystemReserved1' : [ 0x190, ['array', 38, ['pointer64', ['void']]]],
'ExceptionCode' : [ 0x2c0, ['long']],
'Padding0' : [ 0x2c4, ['array', 4, ['unsigned char']]],
'ActivationContextStackPointer' : [ 0x2c8, ['pointer64', ['_ACTIVATION_CONTEXT_STACK']]],
'InstrumentationCallbackSp' : [ 0x2d0, ['unsigned long long']],
'InstrumentationCallbackPreviousPc' : [ 0x2d8, ['unsigned long long']],
'InstrumentationCallbackPreviousSp' : [ 0x2e0, ['unsigned long long']],
'TxFsContext' : [ 0x2e8, ['unsigned long']],
'InstrumentationCallbackDisabled' : [ 0x2ec, ['unsigned char']],
'Padding1' : [ 0x2ed, ['array', 3, ['unsigned char']]],
'GdiTebBatch' : [ 0x2f0, ['_GDI_TEB_BATCH']],
'RealClientId' : [ 0x7d8, ['_CLIENT_ID']],
'GdiCachedProcessHandle' : [ 0x7e8, ['pointer64', ['void']]],
'GdiClientPID' : [ 0x7f0, ['unsigned long']],
'GdiClientTID' : [ 0x7f4, ['unsigned long']],
'GdiThreadLocalInfo' : [ 0x7f8, ['pointer64', ['void']]],
'Win32ClientInfo' : [ 0x800, ['array', 62, ['unsigned long long']]],
'glDispatchTable' : [ 0x9f0, ['array', 233, ['pointer64', ['void']]]],
'glReserved1' : [ 0x1138, ['array', 29, ['unsigned long long']]],
'glReserved2' : [ 0x1220, ['pointer64', ['void']]],
'glSectionInfo' : [ 0x1228, ['pointer64', ['void']]],
'glSection' : [ 0x1230, ['pointer64', ['void']]],
'glTable' : [ 0x1238, ['pointer64', ['void']]],
'glCurrentRC' : [ 0x1240, ['pointer64', ['void']]],
'glContext' : [ 0x1248, ['pointer64', ['void']]],
'LastStatusValue' : [ 0x1250, ['unsigned long']],
'Padding2' : [ 0x1254, ['array', 4, ['unsigned char']]],
'StaticUnicodeString' : [ 0x1258, ['_UNICODE_STRING']],
'StaticUnicodeBuffer' : [ 0x1268, ['array', 261, ['wchar']]],
'Padding3' : [ 0x1472, ['array', 6, ['unsigned char']]],
'DeallocationStack' : [ 0x1478, ['pointer64', ['void']]],
'TlsSlots' : [ 0x1480, ['array', 64, ['pointer64', ['void']]]],
'TlsLinks' : [ 0x1680, ['_LIST_ENTRY']],
'Vdm' : [ 0x1690, ['pointer64', ['void']]],
'ReservedForNtRpc' : [ 0x1698, ['pointer64', ['void']]],
'DbgSsReserved' : [ 0x16a0, ['array', 2, ['pointer64', ['void']]]],
'HardErrorMode' : [ 0x16b0, ['unsigned long']],
'Padding4' : [ 0x16b4, ['array', 4, ['unsigned char']]],
'Instrumentation' : [ 0x16b8, ['array', 11, ['pointer64', ['void']]]],
'ActivityId' : [ 0x1710, ['_GUID']],
'SubProcessTag' : [ 0x1720, ['pointer64', ['void']]],
'PerflibData' : [ 0x1728, ['pointer64', ['void']]],
'EtwTraceData' : [ 0x1730, ['pointer64', ['void']]],
'WinSockData' : [ 0x1738, ['pointer64', ['void']]],
'GdiBatchCount' : [ 0x1740, ['unsigned long']],
'CurrentIdealProcessor' : [ 0x1744, ['_PROCESSOR_NUMBER']],
'IdealProcessorValue' : [ 0x1744, ['unsigned long']],
'ReservedPad0' : [ 0x1744, ['unsigned char']],
'ReservedPad1' : [ 0x1745, ['unsigned char']],
'ReservedPad2' : [ 0x1746, ['unsigned char']],
'IdealProcessor' : [ 0x1747, ['unsigned char']],
'GuaranteedStackBytes' : [ 0x1748, ['unsigned long']],
'Padding5' : [ 0x174c, ['array', 4, ['unsigned char']]],
'ReservedForPerf' : [ 0x1750, ['pointer64', ['void']]],
'ReservedForOle' : [ 0x1758, ['pointer64', ['void']]],
'WaitingOnLoaderLock' : [ 0x1760, ['unsigned long']],
'Padding6' : [ 0x1764, ['array', 4, ['unsigned char']]],
'SavedPriorityState' : [ 0x1768, ['pointer64', ['void']]],
'ReservedForCodeCoverage' : [ 0x1770, ['unsigned long long']],
'ThreadPoolData' : [ 0x1778, ['pointer64', ['void']]],
'TlsExpansionSlots' : [ 0x1780, ['pointer64', ['pointer64', ['void']]]],
'DeallocationBStore' : [ 0x1788, ['pointer64', ['void']]],
'BStoreLimit' : [ 0x1790, ['pointer64', ['void']]],
'MuiGeneration' : [ 0x1798, ['unsigned long']],
'IsImpersonating' : [ 0x179c, ['unsigned long']],
'NlsCache' : [ 0x17a0, ['pointer64', ['void']]],
'pShimData' : [ 0x17a8, ['pointer64', ['void']]],
'HeapVirtualAffinity' : [ 0x17b0, ['unsigned short']],
'LowFragHeapDataSlot' : [ 0x17b2, ['unsigned short']],
'Padding7' : [ 0x17b4, ['array', 4, ['unsigned char']]],
'CurrentTransactionHandle' : [ 0x17b8, ['pointer64', ['void']]],
'ActiveFrame' : [ 0x17c0, ['pointer64', ['_TEB_ACTIVE_FRAME']]],
'FlsData' : [ 0x17c8, ['pointer64', ['void']]],
'PreferredLanguages' : [ 0x17d0, ['pointer64', ['void']]],
'UserPrefLanguages' : [ 0x17d8, ['pointer64', ['void']]],
'MergedPrefLanguages' : [ 0x17e0, ['pointer64', ['void']]],
'MuiImpersonation' : [ 0x17e8, ['unsigned long']],
'CrossTebFlags' : [ 0x17ec, ['unsigned short']],
'SpareCrossTebBits' : [ 0x17ec, ['BitField', dict(start_bit = 0, end_bit = 16, native_type='unsigned short')]],
'SameTebFlags' : [ 0x17ee, ['unsigned short']],
'SafeThunkCall' : [ 0x17ee, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned short')]],
'InDebugPrint' : [ 0x17ee, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned short')]],
'HasFiberData' : [ 0x17ee, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned short')]],
'SkipThreadAttach' : [ 0x17ee, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned short')]],
'WerInShipAssertCode' : [ 0x17ee, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned short')]],
'RanProcessInit' : [ 0x17ee, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned short')]],
'ClonedThread' : [ 0x17ee, ['BitField', dict(start_bit = 6, end_bit = 7, native_type='unsigned short')]],
'SuppressDebugMsg' : [ 0x17ee, ['BitField', dict(start_bit = 7, end_bit = 8, native_type='unsigned short')]],
'DisableUserStackWalk' : [ 0x17ee, ['BitField', dict(start_bit = 8, end_bit = 9, native_type='unsigned short')]],
'RtlExceptionAttached' : [ 0x17ee, ['BitField', dict(start_bit = 9, end_bit = 10, native_type='unsigned short')]],
'InitialThread' : [ 0x17ee, ['BitField', dict(start_bit = 10, end_bit = 11, native_type='unsigned short')]],
'SessionAware' : [ 0x17ee, ['BitField', dict(start_bit = 11, end_bit = 12, native_type='unsigned short')]],
'LoadOwner' : [ 0x17ee, ['BitField', dict(start_bit = 12, end_bit = 13, native_type='unsigned short')]],
'LoaderWorker' : [ 0x17ee, ['BitField', dict(start_bit = 13, end_bit = 14, native_type='unsigned short')]],
'SpareSameTebBits' : [ 0x17ee, ['BitField', dict(start_bit = 14, end_bit = 16, native_type='unsigned short')]],
'TxnScopeEnterCallback' : [ 0x17f0, ['pointer64', ['void']]],
'TxnScopeExitCallback' : [ 0x17f8, ['pointer64', ['void']]],
'TxnScopeContext' : [ 0x1800, ['pointer64', ['void']]],
'LockCount' : [ 0x1808, ['unsigned long']],
'WowTebOffset' : [ 0x180c, ['long']],
'ResourceRetValue' : [ 0x1810, ['pointer64', ['void']]],
'ReservedForWdf' : [ 0x1818, ['pointer64', ['void']]],
'ReservedForCrt' : [ 0x1820, ['unsigned long long']],
'EffectiveContainerId' : [ 0x1828, ['_GUID']],
} ],
'_LIST_ENTRY' : [ 0x10, {
'Flink' : [ 0x0, ['pointer64', ['_LIST_ENTRY']]],
'Blink' : [ 0x8, ['pointer64', ['_LIST_ENTRY']]],
} ],
'_SINGLE_LIST_ENTRY' : [ 0x8, {
'Next' : [ 0x0, ['pointer64', ['_SINGLE_LIST_ENTRY']]],
} ],
'_RTL_SPLAY_LINKS' : [ 0x18, {
'Parent' : [ 0x0, ['pointer64', ['_RTL_SPLAY_LINKS']]],
'LeftChild' : [ 0x8, ['pointer64', ['_RTL_SPLAY_LINKS']]],
'RightChild' : [ 0x10, ['pointer64', ['_RTL_SPLAY_LINKS']]],
} ],
'_RTL_DYNAMIC_HASH_TABLE_CONTEXT' : [ 0x18, {
'ChainHead' : [ 0x0, ['pointer64', ['_LIST_ENTRY']]],
'PrevLinkage' : [ 0x8, ['pointer64', ['_LIST_ENTRY']]],
'Signature' : [ 0x10, ['unsigned long long']],
} ],
'_RTL_DYNAMIC_HASH_TABLE_ENUMERATOR' : [ 0x28, {
'HashEntry' : [ 0x0, ['_RTL_DYNAMIC_HASH_TABLE_ENTRY']],
'CurEntry' : [ 0x0, ['pointer64', ['_LIST_ENTRY']]],
'ChainHead' : [ 0x18, ['pointer64', ['_LIST_ENTRY']]],
'BucketIndex' : [ 0x20, ['unsigned long']],
} ],
'_RTL_DYNAMIC_HASH_TABLE' : [ 0x28, {
'Flags' : [ 0x0, ['unsigned long']],
'Shift' : [ 0x4, ['unsigned long']],
'TableSize' : [ 0x8, ['unsigned long']],
'Pivot' : [ 0xc, ['unsigned long']],
'DivisorMask' : [ 0x10, ['unsigned long']],
'NumEntries' : [ 0x14, ['unsigned long']],
'NonEmptyBuckets' : [ 0x18, ['unsigned long']],
'NumEnumerators' : [ 0x1c, ['unsigned long']],
'Directory' : [ 0x20, ['pointer64', ['void']]],
} ],
'_UNICODE_STRING' : [ 0x10, {
'Length' : [ 0x0, ['unsigned short']],
'MaximumLength' : [ 0x2, ['unsigned short']],
'Buffer' : [ 0x8, ['pointer64', ['unsigned short']]],
} ],
'_STRING' : [ 0x10, {
'Length' : [ 0x0, ['unsigned short']],
'MaximumLength' : [ 0x2, ['unsigned short']],
'Buffer' : [ 0x8, ['pointer64', ['unsigned char']]],
} ],
'__unnamed_1108' : [ 0x8, {
'LowPart' : [ 0x0, ['unsigned long']],
'HighPart' : [ 0x4, ['long']],
} ],
'_LARGE_INTEGER' : [ 0x8, {
'LowPart' : [ 0x0, ['unsigned long']],
'HighPart' : [ 0x4, ['long']],
'u' : [ 0x0, ['__unnamed_1108']],
'QuadPart' : [ 0x0, ['long long']],
} ],
'_RTL_BITMAP' : [ 0x10, {
'SizeOfBitMap' : [ 0x0, ['unsigned long']],
'Buffer' : [ 0x8, ['pointer64', ['unsigned long']]],
} ],
'_LUID' : [ 0x8, {
'LowPart' : [ 0x0, ['unsigned long']],
'HighPart' : [ 0x4, ['long']],
} ],
'_IMAGE_NT_HEADERS64' : [ 0x108, {
'Signature' : [ 0x0, ['unsigned long']],
'FileHeader' : [ 0x4, ['_IMAGE_FILE_HEADER']],
'OptionalHeader' : [ 0x18, ['_IMAGE_OPTIONAL_HEADER64']],
} ],
'_IMAGE_DOS_HEADER' : [ 0x40, {
'e_magic' : [ 0x0, ['unsigned short']],
'e_cblp' : [ 0x2, ['unsigned short']],
'e_cp' : [ 0x4, ['unsigned short']],
'e_crlc' : [ 0x6, ['unsigned short']],
'e_cparhdr' : [ 0x8, ['unsigned short']],
'e_minalloc' : [ 0xa, ['unsigned short']],
'e_maxalloc' : [ 0xc, ['unsigned short']],
'e_ss' : [ 0xe, ['unsigned short']],
'e_sp' : [ 0x10, ['unsigned short']],
'e_csum' : [ 0x12, ['unsigned short']],
'e_ip' : [ 0x14, ['unsigned short']],
'e_cs' : [ 0x16, ['unsigned short']],
'e_lfarlc' : [ 0x18, ['unsigned short']],
'e_ovno' : [ 0x1a, ['unsigned short']],
'e_res' : [ 0x1c, ['array', 4, ['unsigned short']]],
'e_oemid' : [ 0x24, ['unsigned short']],
'e_oeminfo' : [ 0x26, ['unsigned short']],
'e_res2' : [ 0x28, ['array', 10, ['unsigned short']]],
'e_lfanew' : [ 0x3c, ['long']],
} ],
'_RTL_RB_TREE' : [ 0x10, {
'Root' : [ 0x0, ['pointer64', ['_RTL_BALANCED_NODE']]],
'Min' : [ 0x8, ['pointer64', ['_RTL_BALANCED_NODE']]],
} ],
'_RTL_BALANCED_NODE' : [ 0x18, {
'Children' : [ 0x0, ['array', 2, ['pointer64', ['_RTL_BALANCED_NODE']]]],
'Left' : [ 0x0, ['pointer64', ['_RTL_BALANCED_NODE']]],
'Right' : [ 0x8, ['pointer64', ['_RTL_BALANCED_NODE']]],
'Red' : [ 0x10, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned char')]],
'Balance' : [ 0x10, ['BitField', dict(start_bit = 0, end_bit = 2, native_type='unsigned char')]],
'ParentValue' : [ 0x10, ['unsigned long long']],
} ],
'_RTL_AVL_TREE' : [ 0x8, {
'Root' : [ 0x0, ['pointer64', ['_RTL_BALANCED_NODE']]],
} ],
'_GUID' : [ 0x10, {
'Data1' : [ 0x0, ['unsigned long']],
'Data2' : [ 0x4, ['unsigned short']],
'Data3' : [ 0x6, ['unsigned short']],
'Data4' : [ 0x8, ['array', 8, ['unsigned char']]],
} ],
'_KPCR' : [ 0x6a80, {
'NtTib' : [ 0x0, ['_NT_TIB']],
'GdtBase' : [ 0x0, ['pointer64', ['_KGDTENTRY64']]],
'TssBase' : [ 0x8, ['pointer64', ['_KTSS64']]],
'UserRsp' : [ 0x10, ['unsigned long long']],
'Self' : [ 0x18, ['pointer64', ['_KPCR']]],
'CurrentPrcb' : [ 0x20, ['pointer64', ['_KPRCB']]],
'LockArray' : [ 0x28, ['pointer64', ['_KSPIN_LOCK_QUEUE']]],
'Used_Self' : [ 0x30, ['pointer64', ['void']]],
'IdtBase' : [ 0x38, ['pointer64', ['_KIDTENTRY64']]],
'Unused' : [ 0x40, ['array', 2, ['unsigned long long']]],
'Irql' : [ 0x50, ['unsigned char']],
'SecondLevelCacheAssociativity' : [ 0x51, ['unsigned char']],
'ObsoleteNumber' : [ 0x52, ['unsigned char']],
'Fill0' : [ 0x53, ['unsigned char']],
'Unused0' : [ 0x54, ['array', 3, ['unsigned long']]],
'MajorVersion' : [ 0x60, ['unsigned short']],
'MinorVersion' : [ 0x62, ['unsigned short']],
'StallScaleFactor' : [ 0x64, ['unsigned long']],
'Unused1' : [ 0x68, ['array', 3, ['pointer64', ['void']]]],
'KernelReserved' : [ 0x80, ['array', 15, ['unsigned long']]],
'SecondLevelCacheSize' : [ 0xbc, ['unsigned long']],
'HalReserved' : [ 0xc0, ['array', 16, ['unsigned long']]],
'Unused2' : [ 0x100, ['unsigned long']],
'KdVersionBlock' : [ 0x108, ['pointer64', ['void']]],
'Unused3' : [ 0x110, ['pointer64', ['void']]],
'PcrAlign1' : [ 0x118, ['array', 24, ['unsigned long']]],
'Prcb' : [ 0x180, ['_KPRCB']],
} ],
'_KPRCB' : [ 0x6900, {
'MxCsr' : [ 0x0, ['unsigned long']],
'LegacyNumber' : [ 0x4, ['unsigned char']],
'ReservedMustBeZero' : [ 0x5, ['unsigned char']],
'InterruptRequest' : [ 0x6, ['unsigned char']],
'IdleHalt' : [ 0x7, ['unsigned char']],
'CurrentThread' : [ 0x8, ['pointer64', ['_KTHREAD']]],
'NextThread' : [ 0x10, ['pointer64', ['_KTHREAD']]],
'IdleThread' : [ 0x18, ['pointer64', ['_KTHREAD']]],
'NestingLevel' : [ 0x20, ['unsigned char']],
'ClockOwner' : [ 0x21, ['unsigned char']],
'PendingTickFlags' : [ 0x22, ['unsigned char']],
'PendingTick' : [ 0x22, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned char')]],
'PendingBackupTick' : [ 0x22, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned char')]],
'IdleState' : [ 0x23, ['unsigned char']],
'Number' : [ 0x24, ['unsigned long']],
'RspBase' : [ 0x28, ['unsigned long long']],
'PrcbLock' : [ 0x30, ['unsigned long long']],
'PriorityState' : [ 0x38, ['pointer64', ['unsigned char']]],
'ProcessorState' : [ 0x40, ['_KPROCESSOR_STATE']],
'CpuType' : [ 0x5f0, ['unsigned char']],
'CpuID' : [ 0x5f1, ['unsigned char']],
'CpuStep' : [ 0x5f2, ['unsigned short']],
'CpuStepping' : [ 0x5f2, ['unsigned char']],
'CpuModel' : [ 0x5f3, ['unsigned char']],
'MHz' : [ 0x5f4, ['unsigned long']],
'HalReserved' : [ 0x5f8, ['array', 8, ['unsigned long long']]],
'MinorVersion' : [ 0x638, ['unsigned short']],
'MajorVersion' : [ 0x63a, ['unsigned short']],
'BuildType' : [ 0x63c, ['unsigned char']],
'CpuVendor' : [ 0x63d, ['unsigned char']],
'CoresPerPhysicalProcessor' : [ 0x63e, ['unsigned char']],
'LogicalProcessorsPerCore' : [ 0x63f, ['unsigned char']],
'ParentNode' : [ 0x640, ['pointer64', ['_KNODE']]],
'GroupSetMember' : [ 0x648, ['unsigned long long']],
'Group' : [ 0x650, ['unsigned char']],
'GroupIndex' : [ 0x651, ['unsigned char']],
'PrcbPad05' : [ 0x652, ['array', 2, ['unsigned char']]],
'InitialApicId' : [ 0x654, ['unsigned long']],
'ScbOffset' : [ 0x658, ['unsigned long']],
'ApicMask' : [ 0x65c, ['unsigned long']],
'AcpiReserved' : [ 0x660, ['pointer64', ['void']]],
'CFlushSize' : [ 0x668, ['unsigned long']],
'PrcbPad10' : [ 0x66c, ['unsigned long']],
'LockQueue' : [ 0x670, ['array', 17, ['_KSPIN_LOCK_QUEUE']]],
'PPLookasideList' : [ 0x780, ['array', 16, ['_PP_LOOKASIDE_LIST']]],
'PPNxPagedLookasideList' : [ 0x880, ['array', 32, ['_GENERAL_LOOKASIDE_POOL']]],
'PPNPagedLookasideList' : [ 0x1480, ['array', 32, ['_GENERAL_LOOKASIDE_POOL']]],
'PPPagedLookasideList' : [ 0x2080, ['array', 32, ['_GENERAL_LOOKASIDE_POOL']]],
'PrcbPad20' : [ 0x2c80, ['unsigned long long']],
'DeferredReadyListHead' : [ 0x2c88, ['_SINGLE_LIST_ENTRY']],
'MmPageFaultCount' : [ 0x2c90, ['long']],
'MmCopyOnWriteCount' : [ 0x2c94, ['long']],
'MmTransitionCount' : [ 0x2c98, ['long']],
'MmDemandZeroCount' : [ 0x2c9c, ['long']],
'MmPageReadCount' : [ 0x2ca0, ['long']],
'MmPageReadIoCount' : [ 0x2ca4, ['long']],
'MmDirtyPagesWriteCount' : [ 0x2ca8, ['long']],
'MmDirtyWriteIoCount' : [ 0x2cac, ['long']],
'MmMappedPagesWriteCount' : [ 0x2cb0, ['long']],
'MmMappedWriteIoCount' : [ 0x2cb4, ['long']],
'KeSystemCalls' : [ 0x2cb8, ['unsigned long']],
'KeContextSwitches' : [ 0x2cbc, ['unsigned long']],
'LdtSelector' : [ 0x2cc0, ['unsigned short']],
'PrcbPad40' : [ 0x2cc2, ['unsigned short']],
'CcFastReadNoWait' : [ 0x2cc4, ['unsigned long']],
'CcFastReadWait' : [ 0x2cc8, ['unsigned long']],
'CcFastReadNotPossible' : [ 0x2ccc, ['unsigned long']],
'CcCopyReadNoWait' : [ 0x2cd0, ['unsigned long']],
'CcCopyReadWait' : [ 0x2cd4, ['unsigned long']],
'CcCopyReadNoWaitMiss' : [ 0x2cd8, ['unsigned long']],
'IoReadOperationCount' : [ 0x2cdc, ['long']],
'IoWriteOperationCount' : [ 0x2ce0, ['long']],
'IoOtherOperationCount' : [ 0x2ce4, ['long']],
'IoReadTransferCount' : [ 0x2ce8, ['_LARGE_INTEGER']],
'IoWriteTransferCount' : [ 0x2cf0, ['_LARGE_INTEGER']],
'IoOtherTransferCount' : [ 0x2cf8, ['_LARGE_INTEGER']],
'PacketBarrier' : [ 0x2d00, ['long']],
'TargetCount' : [ 0x2d04, ['long']],
'IpiFrozen' : [ 0x2d08, ['unsigned long']],
'IsrDpcStats' : [ 0x2d10, ['pointer64', ['void']]],
'DeviceInterrupts' : [ 0x2d18, ['unsigned long']],
'LookasideIrpFloat' : [ 0x2d1c, ['long']],
'InterruptLastCount' : [ 0x2d20, ['unsigned long']],
'InterruptRate' : [ 0x2d24, ['unsigned long']],
'PrcbPad41' : [ 0x2d28, ['array', 22, ['unsigned long']]],
'DpcData' : [ 0x2d80, ['array', 2, ['_KDPC_DATA']]],
'DpcStack' : [ 0x2dd0, ['pointer64', ['void']]],
'MaximumDpcQueueDepth' : [ 0x2dd8, ['long']],
'DpcRequestRate' : [ 0x2ddc, ['unsigned long']],
'MinimumDpcRate' : [ 0x2de0, ['unsigned long']],
'DpcLastCount' : [ 0x2de4, ['unsigned long']],
'ThreadDpcEnable' : [ 0x2de8, ['unsigned char']],
'QuantumEnd' : [ 0x2de9, ['unsigned char']],
'DpcRoutineActive' : [ 0x2dea, ['unsigned char']],
'IdleSchedule' : [ 0x2deb, ['unsigned char']],
'DpcRequestSummary' : [ 0x2dec, ['long']],
'DpcRequestSlot' : [ 0x2dec, ['array', 2, ['short']]],
'NormalDpcState' : [ 0x2dec, ['short']],
'ThreadDpcState' : [ 0x2dee, ['short']],
'DpcNormalProcessingActive' : [ 0x2dec, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'DpcNormalProcessingRequested' : [ 0x2dec, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'DpcNormalThreadSignal' : [ 0x2dec, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long')]],
'DpcNormalTimerExpiration' : [ 0x2dec, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned long')]],
'DpcNormalDpcPresent' : [ 0x2dec, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned long')]],
'DpcNormalLocalInterrupt' : [ 0x2dec, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned long')]],
'DpcNormalSpare' : [ 0x2dec, ['BitField', dict(start_bit = 6, end_bit = 16, native_type='unsigned long')]],
'DpcThreadActive' : [ 0x2dec, ['BitField', dict(start_bit = 16, end_bit = 17, native_type='unsigned long')]],
'DpcThreadRequested' : [ 0x2dec, ['BitField', dict(start_bit = 17, end_bit = 18, native_type='unsigned long')]],
'DpcThreadSpare' : [ 0x2dec, ['BitField', dict(start_bit = 18, end_bit = 32, native_type='unsigned long')]],
'LastTimerHand' : [ 0x2df0, ['unsigned long']],
'LastTick' : [ 0x2df4, ['unsigned long']],
'ClockInterrupts' : [ 0x2df8, ['unsigned long']],
'ReadyScanTick' : [ 0x2dfc, ['unsigned long']],
'InterruptObject' : [ 0x2e00, ['array', 256, ['pointer64', ['void']]]],
'TimerTable' : [ 0x3600, ['_KTIMER_TABLE']],
'DpcGate' : [ 0x5800, ['_KGATE']],
'PrcbPad52' : [ 0x5818, ['pointer64', ['void']]],
'CallDpc' : [ 0x5820, ['_KDPC']],
'ClockKeepAlive' : [ 0x5860, ['long']],
'PrcbPad60' : [ 0x5864, ['array', 2, ['unsigned char']]],
'NmiActive' : [ 0x5866, ['unsigned short']],
'DpcWatchdogPeriod' : [ 0x5868, ['long']],
'DpcWatchdogCount' : [ 0x586c, ['long']],
'KeSpinLockOrdering' : [ 0x5870, ['long']],
'PrcbPad70' : [ 0x5874, ['array', 1, ['unsigned long']]],
'CachedPtes' : [ 0x5878, ['pointer64', ['void']]],
'WaitListHead' : [ 0x5880, ['_LIST_ENTRY']],
'WaitLock' : [ 0x5890, ['unsigned long long']],
'ReadySummary' : [ 0x5898, ['unsigned long']],
'AffinitizedSelectionMask' : [ 0x589c, ['long']],
'QueueIndex' : [ 0x58a0, ['unsigned long']],
'PrcbPad75' : [ 0x58a4, ['array', 3, ['unsigned long']]],
'TimerExpirationDpc' : [ 0x58b0, ['_KDPC']],
'ScbQueue' : [ 0x58f0, ['_RTL_RB_TREE']],
'DispatcherReadyListHead' : [ 0x5900, ['array', 32, ['_LIST_ENTRY']]],
'InterruptCount' : [ 0x5b00, ['unsigned long']],
'KernelTime' : [ 0x5b04, ['unsigned long']],
'UserTime' : [ 0x5b08, ['unsigned long']],
'DpcTime' : [ 0x5b0c, ['unsigned long']],
'InterruptTime' : [ 0x5b10, ['unsigned long']],
'AdjustDpcThreshold' : [ 0x5b14, ['unsigned long']],
'DebuggerSavedIRQL' : [ 0x5b18, ['unsigned char']],
'GroupSchedulingOverQuota' : [ 0x5b19, ['unsigned char']],
'DeepSleep' : [ 0x5b1a, ['unsigned char']],
'PrcbPad80' : [ 0x5b1b, ['array', 5, ['unsigned char']]],
'DpcTimeCount' : [ 0x5b20, ['unsigned long']],
'DpcTimeLimit' : [ 0x5b24, ['unsigned long']],
'PeriodicCount' : [ 0x5b28, ['unsigned long']],
'PeriodicBias' : [ 0x5b2c, ['unsigned long']],
'AvailableTime' : [ 0x5b30, ['unsigned long']],
'KeExceptionDispatchCount' : [ 0x5b34, ['unsigned long']],
'StartCycles' : [ 0x5b38, ['unsigned long long']],
'TaggedCyclesStart' : [ 0x5b40, ['unsigned long long']],
'TaggedCycles' : [ 0x5b48, ['array', 2, ['unsigned long long']]],
'GenerationTarget' : [ 0x5b58, ['unsigned long long']],
'AffinitizedCycles' : [ 0x5b60, ['unsigned long long']],
'PrcbPad81' : [ 0x5b68, ['array', 29, ['unsigned long']]],
'MmSpinLockOrdering' : [ 0x5bdc, ['long']],
'PageColor' : [ 0x5be0, ['unsigned long']],
'NodeColor' : [ 0x5be4, ['unsigned long']],
'NodeShiftedColor' : [ 0x5be8, ['unsigned long']],
'SecondaryColorMask' : [ 0x5bec, ['unsigned long']],
'PrcbPad83' : [ 0x5bf0, ['unsigned long']],
'CycleTime' : [ 0x5bf8, ['unsigned long long']],
'Cycles' : [ 0x5c00, ['array', 4, ['array', 2, ['unsigned long long']]]],
'PrcbPad84' : [ 0x5c40, ['array', 16, ['unsigned long']]],
'CcFastMdlReadNoWait' : [ 0x5c80, ['unsigned long']],
'CcFastMdlReadWait' : [ 0x5c84, ['unsigned long']],
'CcFastMdlReadNotPossible' : [ 0x5c88, ['unsigned long']],
'CcMapDataNoWait' : [ 0x5c8c, ['unsigned long']],
'CcMapDataWait' : [ 0x5c90, ['unsigned long']],
'CcPinMappedDataCount' : [ 0x5c94, ['unsigned long']],
'CcPinReadNoWait' : [ 0x5c98, ['unsigned long']],
'CcPinReadWait' : [ 0x5c9c, ['unsigned long']],
'CcMdlReadNoWait' : [ 0x5ca0, ['unsigned long']],
'CcMdlReadWait' : [ 0x5ca4, ['unsigned long']],
'CcLazyWriteHotSpots' : [ 0x5ca8, ['unsigned long']],
'CcLazyWriteIos' : [ 0x5cac, ['unsigned long']],
'CcLazyWritePages' : [ 0x5cb0, ['unsigned long']],
'CcDataFlushes' : [ 0x5cb4, ['unsigned long']],
'CcDataPages' : [ 0x5cb8, ['unsigned long']],
'CcLostDelayedWrites' : [ 0x5cbc, ['unsigned long']],
'CcFastReadResourceMiss' : [ 0x5cc0, ['unsigned long']],
'CcCopyReadWaitMiss' : [ 0x5cc4, ['unsigned long']],
'CcFastMdlReadResourceMiss' : [ 0x5cc8, ['unsigned long']],
'CcMapDataNoWaitMiss' : [ 0x5ccc, ['unsigned long']],
'CcMapDataWaitMiss' : [ 0x5cd0, ['unsigned long']],
'CcPinReadNoWaitMiss' : [ 0x5cd4, ['unsigned long']],
'CcPinReadWaitMiss' : [ 0x5cd8, ['unsigned long']],
'CcMdlReadNoWaitMiss' : [ 0x5cdc, ['unsigned long']],
'CcMdlReadWaitMiss' : [ 0x5ce0, ['unsigned long']],
'CcReadAheadIos' : [ 0x5ce4, ['unsigned long']],
'MmCacheTransitionCount' : [ 0x5ce8, ['long']],
'MmCacheReadCount' : [ 0x5cec, ['long']],
'MmCacheIoCount' : [ 0x5cf0, ['long']],
'PrcbPad91' : [ 0x5cf4, ['array', 3, ['unsigned long']]],
'PowerState' : [ 0x5d00, ['_PROCESSOR_POWER_STATE']],
'ScbList' : [ 0x5ed0, ['_LIST_ENTRY']],
'PrcbPad92' : [ 0x5ee0, ['array', 7, ['unsigned long']]],
'KeAlignmentFixupCount' : [ 0x5efc, ['unsigned long']],
'DpcWatchdogDpc' : [ 0x5f00, ['_KDPC']],
'DpcWatchdogTimer' : [ 0x5f40, ['_KTIMER']],
'Cache' : [ 0x5f80, ['array', 5, ['_CACHE_DESCRIPTOR']]],
'CacheCount' : [ 0x5fbc, ['unsigned long']],
'CachedCommit' : [ 0x5fc0, ['unsigned long']],
'CachedResidentAvailable' : [ 0x5fc4, ['unsigned long']],
'HyperPte' : [ 0x5fc8, ['pointer64', ['void']]],
'WheaInfo' : [ 0x5fd0, ['pointer64', ['void']]],
'EtwSupport' : [ 0x5fd8, ['pointer64', ['void']]],
'InterruptObjectPool' : [ 0x5fe0, ['_SLIST_HEADER']],
'HypercallPageList' : [ 0x5ff0, ['_SLIST_HEADER']],
'HypercallCachedPages' : [ 0x6000, ['pointer64', ['void']]],
'VirtualApicAssist' : [ 0x6008, ['pointer64', ['void']]],
'StatisticsPage' : [ 0x6010, ['pointer64', ['unsigned long long']]],
'PackageProcessorSet' : [ 0x6018, ['_KAFFINITY_EX']],
'SharedReadyQueueMask' : [ 0x60c0, ['unsigned long long']],
'SharedReadyQueue' : [ 0x60c8, ['pointer64', ['_KSHARED_READY_QUEUE']]],
'SharedQueueScanOwner' : [ 0x60d0, ['unsigned long']],
'ScanSiblingIndex' : [ 0x60d4, ['unsigned long']],
'CoreProcessorSet' : [ 0x60d8, ['unsigned long long']],
'ScanSiblingMask' : [ 0x60e0, ['unsigned long long']],
'LLCMask' : [ 0x60e8, ['unsigned long long']],
'CacheProcessorMask' : [ 0x60f0, ['array', 5, ['unsigned long long']]],
'ProcessorProfileControlArea' : [ 0x6118, ['pointer64', ['_PROCESSOR_PROFILE_CONTROL_AREA']]],
'ProfileEventIndexAddress' : [ 0x6120, ['pointer64', ['void']]],
'PrcbPad94' : [ 0x6128, ['array', 11, ['unsigned long long']]],
'SynchCounters' : [ 0x6180, ['_SYNCH_COUNTERS']],
'PteBitCache' : [ 0x6238, ['unsigned long long']],
'PteBitOffset' : [ 0x6240, ['unsigned long']],
'FsCounters' : [ 0x6248, ['_FILESYSTEM_DISK_COUNTERS']],
'VendorString' : [ 0x6258, ['array', 13, ['unsigned char']]],
'PrcbPad100' : [ 0x6265, ['array', 3, ['unsigned char']]],
'FeatureBits' : [ 0x6268, ['unsigned long long']],
'PrcbPad110' : [ 0x6270, ['unsigned long']],
'UpdateSignature' : [ 0x6278, ['_LARGE_INTEGER']],
'Context' : [ 0x6280, ['pointer64', ['_CONTEXT']]],
'ContextFlagsInit' : [ 0x6288, ['unsigned long']],
'ExtendedState' : [ 0x6290, ['pointer64', ['_XSAVE_AREA']]],
'IsrStack' : [ 0x6298, ['pointer64', ['void']]],
'EntropyTimingState' : [ 0x62a0, ['_KENTROPY_TIMING_STATE']],
'AbSelfIoBoostsList' : [ 0x63f0, ['_SINGLE_LIST_ENTRY']],
'AbPropagateBoostsList' : [ 0x63f8, ['_SINGLE_LIST_ENTRY']],
'AbDpc' : [ 0x6400, ['_KDPC']],
'IoIrpStackProfilerCurrent' : [ 0x6440, ['_IOP_IRP_STACK_PROFILER']],
'IoIrpStackProfilerPrevious' : [ 0x6494, ['_IOP_IRP_STACK_PROFILER']],
'LocalSharedReadyQueue' : [ 0x6500, ['_KSHARED_READY_QUEUE']],
'TimerExpirationTrace' : [ 0x6760, ['array', 16, ['_KTIMER_EXPIRATION_TRACE']]],
'TimerExpirationTraceCount' : [ 0x6860, ['unsigned long']],
'ExSaPageArray' : [ 0x6868, ['pointer64', ['void']]],
'Mailbox' : [ 0x6880, ['pointer64', ['_REQUEST_MAILBOX']]],
'RequestMailbox' : [ 0x68c0, ['array', 1, ['_REQUEST_MAILBOX']]],
} ],
'_KFLOATING_SAVE' : [ 0x4, {
'Dummy' : [ 0x0, ['unsigned long']],
} ],
'_SINGLE_LIST_ENTRY32' : [ 0x4, {
'Next' : [ 0x0, ['unsigned long']],
} ],
'_EXT_SET_PARAMETERS_V0' : [ 0x10, {
'Version' : [ 0x0, ['unsigned long']],
'Reserved' : [ 0x4, ['unsigned long']],
'NoWakeTolerance' : [ 0x8, ['long long']],
} ],
'_PS_TRUSTLET_CREATE_ATTRIBUTES' : [ 0x18, {
'TrustletIdentity' : [ 0x0, ['unsigned long long']],
'Attributes' : [ 0x8, ['array', 1, ['_PS_TRUSTLET_ATTRIBUTE_DATA']]],
} ],
'_PS_TRUSTLET_ATTRIBUTE_DATA' : [ 0x10, {
'Header' : [ 0x0, ['_PS_TRUSTLET_ATTRIBUTE_HEADER']],
'Data' : [ 0x8, ['array', 1, ['unsigned long long']]],
} ],
'_PS_TRUSTLET_ATTRIBUTE_HEADER' : [ 0x8, {
'AttributeType' : [ 0x0, ['_PS_TRUSTLET_ATTRIBUTE_TYPE']],
'InstanceNumber' : [ 0x4, ['BitField', dict(start_bit = 0, end_bit = 8, native_type='unsigned long')]],
'Reserved' : [ 0x4, ['BitField', dict(start_bit = 8, end_bit = 32, native_type='unsigned long')]],
} ],
'_TRUSTLET_MAILBOX_KEY' : [ 0x10, {
'SecretValue' : [ 0x0, ['array', 2, ['unsigned long long']]],
} ],
'_TRUSTLET_COLLABORATION_ID' : [ 0x10, {
'Value' : [ 0x0, ['array', 2, ['unsigned long long']]],
} ],
'_KPROCESS' : [ 0x2d8, {
'Header' : [ 0x0, ['_DISPATCHER_HEADER']],
'ProfileListHead' : [ 0x18, ['_LIST_ENTRY']],
'DirectoryTableBase' : [ 0x28, ['unsigned long long']],
'ThreadListHead' : [ 0x30, ['_LIST_ENTRY']],
'ProcessLock' : [ 0x40, ['unsigned long']],
'Spare0' : [ 0x44, ['unsigned long']],
'DeepFreezeStartTime' : [ 0x48, ['unsigned long long']],
'Affinity' : [ 0x50, ['_KAFFINITY_EX']],
'ReadyListHead' : [ 0xf8, ['_LIST_ENTRY']],
'SwapListEntry' : [ 0x108, ['_SINGLE_LIST_ENTRY']],
'ActiveProcessors' : [ 0x110, ['_KAFFINITY_EX']],
'AutoAlignment' : [ 0x1b8, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='long')]],
'DisableBoost' : [ 0x1b8, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='long')]],
'DisableQuantum' : [ 0x1b8, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='long')]],
'DeepFreeze' : [ 0x1b8, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned long')]],
'TimerVirtualization' : [ 0x1b8, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned long')]],
'CheckStackExtents' : [ 0x1b8, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned long')]],
'SpareFlags0' : [ 0x1b8, ['BitField', dict(start_bit = 6, end_bit = 8, native_type='unsigned long')]],
'ActiveGroupsMask' : [ 0x1b8, ['BitField', dict(start_bit = 8, end_bit = 28, native_type='unsigned long')]],
'ReservedFlags' : [ 0x1b8, ['BitField', dict(start_bit = 28, end_bit = 32, native_type='long')]],
'ProcessFlags' : [ 0x1b8, ['long']],
'BasePriority' : [ 0x1bc, ['unsigned char']],
'QuantumReset' : [ 0x1bd, ['unsigned char']],
'Visited' : [ 0x1be, ['unsigned char']],
'Flags' : [ 0x1bf, ['_KEXECUTE_OPTIONS']],
'ThreadSeed' : [ 0x1c0, ['array', 20, ['unsigned long']]],
'IdealNode' : [ 0x210, ['array', 20, ['unsigned short']]],
'IdealGlobalNode' : [ 0x238, ['unsigned short']],
'Spare1' : [ 0x23a, ['unsigned short']],
'StackCount' : [ 0x23c, ['_KSTACK_COUNT']],
'ProcessListEntry' : [ 0x240, ['_LIST_ENTRY']],
'CycleTime' : [ 0x250, ['unsigned long long']],
'ContextSwitches' : [ 0x258, ['unsigned long long']],
'SchedulingGroup' : [ 0x260, ['pointer64', ['_KSCHEDULING_GROUP']]],
'FreezeCount' : [ 0x268, ['unsigned long']],
'KernelTime' : [ 0x26c, ['unsigned long']],
'UserTime' : [ 0x270, ['unsigned long']],
'LdtFreeSelectorHint' : [ 0x274, ['unsigned short']],
'LdtTableLength' : [ 0x276, ['unsigned short']],
'LdtSystemDescriptor' : [ 0x278, ['_KGDTENTRY64']],
'LdtBaseAddress' : [ 0x288, ['pointer64', ['void']]],
'LdtProcessLock' : [ 0x290, ['_FAST_MUTEX']],
'InstrumentationCallback' : [ 0x2c8, ['pointer64', ['void']]],
'SecurePid' : [ 0x2d0, ['unsigned long long']],
} ],
'_KTHREAD' : [ 0x5d8, {
'Header' : [ 0x0, ['_DISPATCHER_HEADER']],
'SListFaultAddress' : [ 0x18, ['pointer64', ['void']]],
'QuantumTarget' : [ 0x20, ['unsigned long long']],
'InitialStack' : [ 0x28, ['pointer64', ['void']]],
'StackLimit' : [ 0x30, ['pointer64', ['void']]],
'StackBase' : [ 0x38, ['pointer64', ['void']]],
'ThreadLock' : [ 0x40, ['unsigned long long']],
'CycleTime' : [ 0x48, ['unsigned long long']],
'CurrentRunTime' : [ 0x50, ['unsigned long']],
'ExpectedRunTime' : [ 0x54, ['unsigned long']],
'KernelStack' : [ 0x58, ['pointer64', ['void']]],
'StateSaveArea' : [ 0x60, ['pointer64', ['_XSAVE_FORMAT']]],
'SchedulingGroup' : [ 0x68, ['pointer64', ['_KSCHEDULING_GROUP']]],
'WaitRegister' : [ 0x70, ['_KWAIT_STATUS_REGISTER']],
'Running' : [ 0x71, ['unsigned char']],
'Alerted' : [ 0x72, ['array', 2, ['unsigned char']]],
'AutoBoostActive' : [ 0x74, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'ReadyTransition' : [ 0x74, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'WaitNext' : [ 0x74, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long')]],
'SystemAffinityActive' : [ 0x74, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned long')]],
'Alertable' : [ 0x74, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned long')]],
'UserStackWalkActive' : [ 0x74, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned long')]],
'ApcInterruptRequest' : [ 0x74, ['BitField', dict(start_bit = 6, end_bit = 7, native_type='unsigned long')]],
'QuantumEndMigrate' : [ 0x74, ['BitField', dict(start_bit = 7, end_bit = 8, native_type='unsigned long')]],
'UmsDirectedSwitchEnable' : [ 0x74, ['BitField', dict(start_bit = 8, end_bit = 9, native_type='unsigned long')]],
'TimerActive' : [ 0x74, ['BitField', dict(start_bit = 9, end_bit = 10, native_type='unsigned long')]],
'SystemThread' : [ 0x74, ['BitField', dict(start_bit = 10, end_bit = 11, native_type='unsigned long')]],
'ProcessDetachActive' : [ 0x74, ['BitField', dict(start_bit = 11, end_bit = 12, native_type='unsigned long')]],
'CalloutActive' : [ 0x74, ['BitField', dict(start_bit = 12, end_bit = 13, native_type='unsigned long')]],
'ScbReadyQueue' : [ 0x74, ['BitField', dict(start_bit = 13, end_bit = 14, native_type='unsigned long')]],
'ApcQueueable' : [ 0x74, ['BitField', dict(start_bit = 14, end_bit = 15, native_type='unsigned long')]],
'ReservedStackInUse' : [ 0x74, ['BitField', dict(start_bit = 15, end_bit = 16, native_type='unsigned long')]],
'UmsPerformingSyscall' : [ 0x74, ['BitField', dict(start_bit = 16, end_bit = 17, native_type='unsigned long')]],
'TimerSuspended' : [ 0x74, ['BitField', dict(start_bit = 17, end_bit = 18, native_type='unsigned long')]],
'SuspendedWaitMode' : [ 0x74, ['BitField', dict(start_bit = 18, end_bit = 19, native_type='unsigned long')]],
'SuspendSchedulerApcWait' : [ 0x74, ['BitField', dict(start_bit = 19, end_bit = 20, native_type='unsigned long')]],
'Reserved' : [ 0x74, ['BitField', dict(start_bit = 20, end_bit = 32, native_type='unsigned long')]],
'MiscFlags' : [ 0x74, ['long']],
'AutoAlignment' : [ 0x78, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'DisableBoost' : [ 0x78, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'ThreadFlagsSpare0' : [ 0x78, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long')]],
'AlertedByThreadId' : [ 0x78, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned long')]],
'QuantumDonation' : [ 0x78, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned long')]],
'EnableStackSwap' : [ 0x78, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned long')]],
'GuiThread' : [ 0x78, ['BitField', dict(start_bit = 6, end_bit = 7, native_type='unsigned long')]],
'DisableQuantum' : [ 0x78, ['BitField', dict(start_bit = 7, end_bit = 8, native_type='unsigned long')]],
'ChargeOnlySchedulingGroup' : [ 0x78, ['BitField', dict(start_bit = 8, end_bit = 9, native_type='unsigned long')]],
'DeferPreemption' : [ 0x78, ['BitField', dict(start_bit = 9, end_bit = 10, native_type='unsigned long')]],
'QueueDeferPreemption' : [ 0x78, ['BitField', dict(start_bit = 10, end_bit = 11, native_type='unsigned long')]],
'ForceDeferSchedule' : [ 0x78, ['BitField', dict(start_bit = 11, end_bit = 12, native_type='unsigned long')]],
'SharedReadyQueueAffinity' : [ 0x78, ['BitField', dict(start_bit = 12, end_bit = 13, native_type='unsigned long')]],
'FreezeCount' : [ 0x78, ['BitField', dict(start_bit = 13, end_bit = 14, native_type='unsigned long')]],
'TerminationApcRequest' : [ 0x78, ['BitField', dict(start_bit = 14, end_bit = 15, native_type='unsigned long')]],
'AutoBoostEntriesExhausted' : [ 0x78, ['BitField', dict(start_bit = 15, end_bit = 16, native_type='unsigned long')]],
'KernelStackResident' : [ 0x78, ['BitField', dict(start_bit = 16, end_bit = 17, native_type='unsigned long')]],
'CommitFailTerminateRequest' : [ 0x78, ['BitField', dict(start_bit = 17, end_bit = 18, native_type='unsigned long')]],
'ProcessStackCountDecremented' : [ 0x78, ['BitField', dict(start_bit = 18, end_bit = 19, native_type='unsigned long')]],
'ThreadFlagsSpare' : [ 0x78, ['BitField', dict(start_bit = 19, end_bit = 24, native_type='unsigned long')]],
'EtwStackTraceApcInserted' : [ 0x78, ['BitField', dict(start_bit = 24, end_bit = 32, native_type='unsigned long')]],
'ThreadFlags' : [ 0x78, ['long']],
'Tag' : [ 0x7c, ['unsigned char']],
'SystemHeteroCpuPolicy' : [ 0x7d, ['unsigned char']],
'UserHeteroCpuPolicy' : [ 0x7e, ['BitField', dict(start_bit = 0, end_bit = 7, native_type='unsigned char')]],
'ExplicitSystemHeteroCpuPolicy' : [ 0x7e, ['BitField', dict(start_bit = 7, end_bit = 8, native_type='unsigned char')]],
'Spare0' : [ 0x7f, ['unsigned char']],
'SystemCallNumber' : [ 0x80, ['unsigned long']],
'Spare10' : [ 0x84, ['unsigned long']],
'FirstArgument' : [ 0x88, ['pointer64', ['void']]],
'TrapFrame' : [ 0x90, ['pointer64', ['_KTRAP_FRAME']]],
'ApcState' : [ 0x98, ['_KAPC_STATE']],
'ApcStateFill' : [ 0x98, ['array', 43, ['unsigned char']]],
'Priority' : [ 0xc3, ['unsigned char']],
'UserIdealProcessor' : [ 0xc4, ['unsigned long']],
'WaitStatus' : [ 0xc8, ['long long']],
'WaitBlockList' : [ 0xd0, ['pointer64', ['_KWAIT_BLOCK']]],
'WaitListEntry' : [ 0xd8, ['_LIST_ENTRY']],
'SwapListEntry' : [ 0xd8, ['_SINGLE_LIST_ENTRY']],
'Queue' : [ 0xe8, ['pointer64', ['_DISPATCHER_HEADER']]],
'Teb' : [ 0xf0, ['pointer64', ['void']]],
'RelativeTimerBias' : [ 0xf8, ['unsigned long long']],
'Timer' : [ 0x100, ['_KTIMER']],
'WaitBlock' : [ 0x140, ['array', 4, ['_KWAIT_BLOCK']]],
'WaitBlockFill4' : [ 0x140, ['array', 20, ['unsigned char']]],
'ContextSwitches' : [ 0x154, ['unsigned long']],
'WaitBlockFill5' : [ 0x140, ['array', 68, ['unsigned char']]],
'State' : [ 0x184, ['unsigned char']],
'Spare13' : [ 0x185, ['unsigned char']],
'WaitIrql' : [ 0x186, ['unsigned char']],
'WaitMode' : [ 0x187, ['unsigned char']],
'WaitBlockFill6' : [ 0x140, ['array', 116, ['unsigned char']]],
'WaitTime' : [ 0x1b4, ['unsigned long']],
'WaitBlockFill7' : [ 0x140, ['array', 164, ['unsigned char']]],
'KernelApcDisable' : [ 0x1e4, ['short']],
'SpecialApcDisable' : [ 0x1e6, ['short']],
'CombinedApcDisable' : [ 0x1e4, ['unsigned long']],
'WaitBlockFill8' : [ 0x140, ['array', 40, ['unsigned char']]],
'ThreadCounters' : [ 0x168, ['pointer64', ['_KTHREAD_COUNTERS']]],
'WaitBlockFill9' : [ 0x140, ['array', 88, ['unsigned char']]],
'XStateSave' : [ 0x198, ['pointer64', ['_XSTATE_SAVE']]],
'WaitBlockFill10' : [ 0x140, ['array', 136, ['unsigned char']]],
'Win32Thread' : [ 0x1c8, ['pointer64', ['void']]],
'WaitBlockFill11' : [ 0x140, ['array', 176, ['unsigned char']]],
'Ucb' : [ 0x1f0, ['pointer64', ['_UMS_CONTROL_BLOCK']]],
'Uch' : [ 0x1f8, ['pointer64', ['_KUMS_CONTEXT_HEADER']]],
'TebMappedLowVa' : [ 0x200, ['pointer64', ['void']]],
'QueueListEntry' : [ 0x208, ['_LIST_ENTRY']],
'NextProcessor' : [ 0x218, ['unsigned long']],
'NextProcessorNumber' : [ 0x218, ['BitField', dict(start_bit = 0, end_bit = 31, native_type='unsigned long')]],
'SharedReadyQueue' : [ 0x218, ['BitField', dict(start_bit = 31, end_bit = 32, native_type='unsigned long')]],
'QueuePriority' : [ 0x21c, ['long']],
'Process' : [ 0x220, ['pointer64', ['_KPROCESS']]],
'UserAffinity' : [ 0x228, ['_GROUP_AFFINITY']],
'UserAffinityFill' : [ 0x228, ['array', 10, ['unsigned char']]],
'PreviousMode' : [ 0x232, ['unsigned char']],
'BasePriority' : [ 0x233, ['unsigned char']],
'PriorityDecrement' : [ 0x234, ['unsigned char']],
'ForegroundBoost' : [ 0x234, ['BitField', dict(start_bit = 0, end_bit = 4, native_type='unsigned char')]],
'UnusualBoost' : [ 0x234, ['BitField', dict(start_bit = 4, end_bit = 8, native_type='unsigned char')]],
'Preempted' : [ 0x235, ['unsigned char']],
'AdjustReason' : [ 0x236, ['unsigned char']],
'AdjustIncrement' : [ 0x237, ['unsigned char']],
'AffinityVersion' : [ 0x238, ['unsigned long long']],
'Affinity' : [ 0x240, ['_GROUP_AFFINITY']],
'AffinityFill' : [ 0x240, ['array', 10, ['unsigned char']]],
'ApcStateIndex' : [ 0x24a, ['unsigned char']],
'WaitBlockCount' : [ 0x24b, ['unsigned char']],
'IdealProcessor' : [ 0x24c, ['unsigned long']],
'NpxState' : [ 0x250, ['unsigned long long']],
'SavedApcState' : [ 0x258, ['_KAPC_STATE']],
'SavedApcStateFill' : [ 0x258, ['array', 43, ['unsigned char']]],
'WaitReason' : [ 0x283, ['unsigned char']],
'SuspendCount' : [ 0x284, ['unsigned char']],
'Saturation' : [ 0x285, ['unsigned char']],
'SListFaultCount' : [ 0x286, ['unsigned short']],
'SchedulerApc' : [ 0x288, ['_KAPC']],
'SchedulerApcFill0' : [ 0x288, ['array', 1, ['unsigned char']]],
'ResourceIndex' : [ 0x289, ['unsigned char']],
'SchedulerApcFill1' : [ 0x288, ['array', 3, ['unsigned char']]],
'QuantumReset' : [ 0x28b, ['unsigned char']],
'SchedulerApcFill2' : [ 0x288, ['array', 4, ['unsigned char']]],
'KernelTime' : [ 0x28c, ['unsigned long']],
'SchedulerApcFill3' : [ 0x288, ['array', 64, ['unsigned char']]],
'WaitPrcb' : [ 0x2c8, ['pointer64', ['_KPRCB']]],
'SchedulerApcFill4' : [ 0x288, ['array', 72, ['unsigned char']]],
'LegoData' : [ 0x2d0, ['pointer64', ['void']]],
'SchedulerApcFill5' : [ 0x288, ['array', 83, ['unsigned char']]],
'CallbackNestingLevel' : [ 0x2db, ['unsigned char']],
'UserTime' : [ 0x2dc, ['unsigned long']],
'SuspendEvent' : [ 0x2e0, ['_KEVENT']],
'ThreadListEntry' : [ 0x2f8, ['_LIST_ENTRY']],
'MutantListHead' : [ 0x308, ['_LIST_ENTRY']],
'AbEntrySummary' : [ 0x318, ['unsigned char']],
'AbWaitEntryCount' : [ 0x319, ['unsigned char']],
'Spare20' : [ 0x31a, ['unsigned short']],
'SecureThreadCookie' : [ 0x31c, ['unsigned long']],
'LockEntries' : [ 0x320, ['array', 6, ['_KLOCK_ENTRY']]],
'PropagateBoostsEntry' : [ 0x560, ['_SINGLE_LIST_ENTRY']],
'IoSelfBoostsEntry' : [ 0x568, ['_SINGLE_LIST_ENTRY']],
'PriorityFloorCounts' : [ 0x570, ['array', 16, ['unsigned char']]],
'PriorityFloorSummary' : [ 0x580, ['unsigned long']],
'AbCompletedIoBoostCount' : [ 0x584, ['long']],
'KeReferenceCount' : [ 0x588, ['short']],
'AbOrphanedEntrySummary' : [ 0x58a, ['unsigned char']],
'AbOwnedEntryCount' : [ 0x58b, ['unsigned char']],
'ForegroundLossTime' : [ 0x58c, ['unsigned long']],
'GlobalForegroundListEntry' : [ 0x590, ['_LIST_ENTRY']],
'ForegroundDpcStackListEntry' : [ 0x590, ['_SINGLE_LIST_ENTRY']],
'InGlobalForegroundList' : [ 0x598, ['unsigned long long']],
'ReadOperationCount' : [ 0x5a0, ['long long']],
'WriteOperationCount' : [ 0x5a8, ['long long']],
'OtherOperationCount' : [ 0x5b0, ['long long']],
'ReadTransferCount' : [ 0x5b8, ['long long']],
'WriteTransferCount' : [ 0x5c0, ['long long']],
'OtherTransferCount' : [ 0x5c8, ['long long']],
'QueuedScb' : [ 0x5d0, ['pointer64', ['_KSCB']]],
} ],
'_KSTACK_CONTROL' : [ 0x30, {
'StackBase' : [ 0x0, ['unsigned long long']],
'ActualLimit' : [ 0x8, ['unsigned long long']],
'StackExpansion' : [ 0x8, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long long')]],
'Previous' : [ 0x10, ['_KERNEL_STACK_SEGMENT']],
} ],
'_KSPIN_LOCK_QUEUE' : [ 0x10, {
'Next' : [ 0x0, ['pointer64', ['_KSPIN_LOCK_QUEUE']]],
'Lock' : [ 0x8, ['pointer64', ['unsigned long long']]],
} ],
'_FAST_MUTEX' : [ 0x38, {
'Count' : [ 0x0, ['long']],
'Owner' : [ 0x8, ['pointer64', ['void']]],
'Contention' : [ 0x10, ['unsigned long']],
'Event' : [ 0x18, ['_KEVENT']],
'OldIrql' : [ 0x30, ['unsigned long']],
} ],
'_KEVENT' : [ 0x18, {
'Header' : [ 0x0, ['_DISPATCHER_HEADER']],
} ],
'__unnamed_1269' : [ 0x10, {
'Depth' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 16, native_type='unsigned long long')]],
'Sequence' : [ 0x0, ['BitField', dict(start_bit = 16, end_bit = 64, native_type='unsigned long long')]],
'Reserved' : [ 0x8, ['BitField', dict(start_bit = 0, end_bit = 4, native_type='unsigned long long')]],
'NextEntry' : [ 0x8, ['BitField', dict(start_bit = 4, end_bit = 64, native_type='unsigned long long')]],
} ],
'_SLIST_HEADER' : [ 0x10, {
'Alignment' : [ 0x0, ['unsigned long long']],
'Region' : [ 0x8, ['unsigned long long']],
'HeaderX64' : [ 0x0, ['__unnamed_1269']],
} ],
'_LOOKASIDE_LIST_EX' : [ 0x60, {
'L' : [ 0x0, ['_GENERAL_LOOKASIDE_POOL']],
} ],
'_SLIST_ENTRY' : [ 0x10, {
'Next' : [ 0x0, ['pointer64', ['_SLIST_ENTRY']]],
} ],
'_NPAGED_LOOKASIDE_LIST' : [ 0x80, {
'L' : [ 0x0, ['_GENERAL_LOOKASIDE']],
} ],
'_PAGED_LOOKASIDE_LIST' : [ 0x80, {
'L' : [ 0x0, ['_GENERAL_LOOKASIDE']],
} ],
'_IO_STATUS_BLOCK' : [ 0x10, {
'Status' : [ 0x0, ['long']],
'Pointer' : [ 0x0, ['pointer64', ['void']]],
'Information' : [ 0x8, ['unsigned long long']],
} ],
'_QUAD' : [ 0x8, {
'UseThisFieldToCopy' : [ 0x0, ['long long']],
'DoNotUseThisField' : [ 0x0, ['double']],
} ],
'_WORK_QUEUE_ITEM' : [ 0x20, {
'List' : [ 0x0, ['_LIST_ENTRY']],
'WorkerRoutine' : [ 0x10, ['pointer64', ['void']]],
'Parameter' : [ 0x18, ['pointer64', ['void']]],
} ],
'_EXT_DELETE_PARAMETERS' : [ 0x18, {
'Version' : [ 0x0, ['unsigned long']],
'Reserved' : [ 0x4, ['unsigned long']],
'DeleteCallback' : [ 0x8, ['pointer64', ['void']]],
'DeleteContext' : [ 0x10, ['pointer64', ['void']]],
} ],
'_EX_PUSH_LOCK' : [ 0x8, {
'Locked' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long long')]],
'Waiting' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long long')]],
'Waking' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long long')]],
'MultipleShared' : [ 0x0, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned long long')]],
'Shared' : [ 0x0, ['BitField', dict(start_bit = 4, end_bit = 64, native_type='unsigned long long')]],
'Value' : [ 0x0, ['unsigned long long']],
'Ptr' : [ 0x0, ['pointer64', ['void']]],
} ],
'_PP_LOOKASIDE_LIST' : [ 0x10, {
'P' : [ 0x0, ['pointer64', ['_GENERAL_LOOKASIDE']]],
'L' : [ 0x8, ['pointer64', ['_GENERAL_LOOKASIDE']]],
} ],
'_GENERAL_LOOKASIDE' : [ 0x80, {
'ListHead' : [ 0x0, ['_SLIST_HEADER']],
'SingleListHead' : [ 0x0, ['_SINGLE_LIST_ENTRY']],
'Depth' : [ 0x10, ['unsigned short']],
'MaximumDepth' : [ 0x12, ['unsigned short']],
'TotalAllocates' : [ 0x14, ['unsigned long']],
'AllocateMisses' : [ 0x18, ['unsigned long']],
'AllocateHits' : [ 0x18, ['unsigned long']],
'TotalFrees' : [ 0x1c, ['unsigned long']],
'FreeMisses' : [ 0x20, ['unsigned long']],
'FreeHits' : [ 0x20, ['unsigned long']],
'Type' : [ 0x24, ['Enumeration', dict(target = 'long', choices = {0: 'NonPagedPoolBase', 1: 'PagedPool', 2: 'NonPagedPoolBaseMustSucceed', 3: 'DontUseThisType', 4: 'NonPagedPoolBaseCacheAligned', 5: 'PagedPoolCacheAligned', 6: 'NonPagedPoolBaseCacheAlignedMustS', 7: 'MaxPoolType', 34: 'NonPagedPoolMustSucceedSession', 516: 'NonPagedPoolNxCacheAligned', 35: 'DontUseThisTypeSession', 32: 'NonPagedPoolSession', 512: 'NonPagedPoolNx', 544: 'NonPagedPoolSessionNx', 36: 'NonPagedPoolCacheAlignedSession', 33: 'PagedPoolSession', 38: 'NonPagedPoolCacheAlignedMustSSession', 37: 'PagedPoolCacheAlignedSession'})]],
'Tag' : [ 0x28, ['unsigned long']],
'Size' : [ 0x2c, ['unsigned long']],
'AllocateEx' : [ 0x30, ['pointer64', ['void']]],
'Allocate' : [ 0x30, ['pointer64', ['void']]],
'FreeEx' : [ 0x38, ['pointer64', ['void']]],
'Free' : [ 0x38, ['pointer64', ['void']]],
'ListEntry' : [ 0x40, ['_LIST_ENTRY']],
'LastTotalAllocates' : [ 0x50, ['unsigned long']],
'LastAllocateMisses' : [ 0x54, ['unsigned long']],
'LastAllocateHits' : [ 0x54, ['unsigned long']],
'Future' : [ 0x58, ['array', 2, ['unsigned long']]],
} ],
'_KNODE' : [ 0x100, {
'IdleNonParkedCpuSet' : [ 0x0, ['unsigned long long']],
'IdleSmtSet' : [ 0x8, ['unsigned long long']],
'IdleCpuSet' : [ 0x10, ['unsigned long long']],
'DeepIdleSet' : [ 0x40, ['unsigned long long']],
'IdleConstrainedSet' : [ 0x48, ['unsigned long long']],
'NonParkedSet' : [ 0x50, ['unsigned long long']],
'ParkLock' : [ 0x58, ['long']],
'Seed' : [ 0x5c, ['unsigned long']],
'SiblingMask' : [ 0x80, ['unsigned long']],
'Affinity' : [ 0x88, ['_GROUP_AFFINITY']],
'AffinityFill' : [ 0x88, ['array', 10, ['unsigned char']]],
'NodeNumber' : [ 0x92, ['unsigned short']],
'PrimaryNodeNumber' : [ 0x94, ['unsigned short']],
'Stride' : [ 0x96, ['unsigned char']],
'Spare0' : [ 0x97, ['unsigned char']],
'SharedReadyQueueLeaders' : [ 0x98, ['unsigned long long']],
'ProximityId' : [ 0xa0, ['unsigned long']],
'Lowest' : [ 0xa4, ['unsigned long']],
'Highest' : [ 0xa8, ['unsigned long']],
'MaximumProcessors' : [ 0xac, ['unsigned char']],
'Flags' : [ 0xad, ['_flags']],
'Spare10' : [ 0xae, ['unsigned char']],
'HeteroSets' : [ 0xb0, ['array', 5, ['_KHETERO_PROCESSOR_SET']]],
} ],
'_ENODE' : [ 0x540, {
'Ncb' : [ 0x0, ['_KNODE']],
'ExWorkQueues' : [ 0x100, ['array', 8, ['pointer64', ['_EX_WORK_QUEUE']]]],
'ExWorkQueue' : [ 0x140, ['_EX_WORK_QUEUE']],
'ExpThreadSetManagerEvent' : [ 0x410, ['_KEVENT']],
'ExpDeadlockTimer' : [ 0x428, ['_KTIMER']],
'ExpThreadReaperEvent' : [ 0x468, ['_KEVENT']],
'WaitBlocks' : [ 0x480, ['array', 3, ['_KWAIT_BLOCK']]],
'ExpWorkerThreadBalanceManagerPtr' : [ 0x510, ['pointer64', ['_ETHREAD']]],
'ExpWorkerSeed' : [ 0x518, ['unsigned long']],
'ExWorkerFullInit' : [ 0x51c, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'ExWorkerStructInit' : [ 0x51c, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'ExWorkerFlags' : [ 0x51c, ['unsigned long']],
} ],
'_HANDLE_TABLE' : [ 0x80, {
'NextHandleNeedingPool' : [ 0x0, ['unsigned long']],
'ExtraInfoPages' : [ 0x4, ['long']],
'TableCode' : [ 0x8, ['unsigned long long']],
'QuotaProcess' : [ 0x10, ['pointer64', ['_EPROCESS']]],
'HandleTableList' : [ 0x18, ['_LIST_ENTRY']],
'UniqueProcessId' : [ 0x28, ['unsigned long']],
'Flags' : [ 0x2c, ['unsigned long']],
'StrictFIFO' : [ 0x2c, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned char')]],
'EnableHandleExceptions' : [ 0x2c, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned char')]],
'Rundown' : [ 0x2c, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned char')]],
'Duplicated' : [ 0x2c, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned char')]],
'RaiseUMExceptionOnInvalidHandleClose' : [ 0x2c, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned char')]],
'HandleContentionEvent' : [ 0x30, ['_EX_PUSH_LOCK']],
'HandleTableLock' : [ 0x38, ['_EX_PUSH_LOCK']],
'FreeLists' : [ 0x40, ['array', 1, ['_HANDLE_TABLE_FREE_LIST']]],
'ActualEntry' : [ 0x40, ['array', 32, ['unsigned char']]],
'DebugInfo' : [ 0x60, ['pointer64', ['_HANDLE_TRACE_DEBUG_INFO']]],
} ],
'_HANDLE_TABLE_ENTRY_INFO' : [ 0x4, {
'AuditMask' : [ 0x0, ['unsigned long']],
} ],
'_HANDLE_TABLE_ENTRY' : [ 0x10, {
'VolatileLowValue' : [ 0x0, ['long long']],
'LowValue' : [ 0x0, ['long long']],
'InfoTable' : [ 0x0, ['pointer64', ['_HANDLE_TABLE_ENTRY_INFO']]],
'HighValue' : [ 0x8, ['long long']],
'NextFreeHandleEntry' : [ 0x8, ['pointer64', ['_HANDLE_TABLE_ENTRY']]],
'LeafHandleValue' : [ 0x8, ['_EXHANDLE']],
'RefCountField' : [ 0x0, ['long long']],
'Unlocked' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long long')]],
'RefCnt' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 17, native_type='unsigned long long')]],
'Attributes' : [ 0x0, ['BitField', dict(start_bit = 17, end_bit = 20, native_type='unsigned long long')]],
'ObjectPointerBits' : [ 0x0, ['BitField', dict(start_bit = 20, end_bit = 64, native_type='unsigned long long')]],
'GrantedAccessBits' : [ 0x8, ['BitField', dict(start_bit = 0, end_bit = 25, native_type='unsigned long')]],
'NoRightsUpgrade' : [ 0x8, ['BitField', dict(start_bit = 25, end_bit = 26, native_type='unsigned long')]],
'Spare1' : [ 0x8, ['BitField', dict(start_bit = 26, end_bit = 32, native_type='unsigned long')]],
'Spare2' : [ 0xc, ['unsigned long']],
} ],
'_EX_FAST_REF' : [ 0x8, {
'Object' : [ 0x0, ['pointer64', ['void']]],
'RefCnt' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 4, native_type='unsigned long long')]],
'Value' : [ 0x0, ['unsigned long long']],
} ],
'__unnamed_135c' : [ 0x2c, {
'InitialPrivilegeSet' : [ 0x0, ['_INITIAL_PRIVILEGE_SET']],
'PrivilegeSet' : [ 0x0, ['_PRIVILEGE_SET']],
} ],
'_ACCESS_STATE' : [ 0xa0, {
'OperationID' : [ 0x0, ['_LUID']],
'SecurityEvaluated' : [ 0x8, ['unsigned char']],
'GenerateAudit' : [ 0x9, ['unsigned char']],
'GenerateOnClose' : [ 0xa, ['unsigned char']],
'PrivilegesAllocated' : [ 0xb, ['unsigned char']],
'Flags' : [ 0xc, ['unsigned long']],
'RemainingDesiredAccess' : [ 0x10, ['unsigned long']],
'PreviouslyGrantedAccess' : [ 0x14, ['unsigned long']],
'OriginalDesiredAccess' : [ 0x18, ['unsigned long']],
'SubjectSecurityContext' : [ 0x20, ['_SECURITY_SUBJECT_CONTEXT']],
'SecurityDescriptor' : [ 0x40, ['pointer64', ['void']]],
'AuxData' : [ 0x48, ['pointer64', ['void']]],
'Privileges' : [ 0x50, ['__unnamed_135c']],
'AuditPrivileges' : [ 0x7c, ['unsigned char']],
'ObjectName' : [ 0x80, ['_UNICODE_STRING']],
'ObjectTypeName' : [ 0x90, ['_UNICODE_STRING']],
} ],
'_AUX_ACCESS_DATA' : [ 0xe0, {
'PrivilegesUsed' : [ 0x0, ['pointer64', ['_PRIVILEGE_SET']]],
'GenericMapping' : [ 0x8, ['_GENERIC_MAPPING']],
'AccessesToAudit' : [ 0x18, ['unsigned long']],
'MaximumAuditMask' : [ 0x1c, ['unsigned long']],
'TransactionId' : [ 0x20, ['_GUID']],
'NewSecurityDescriptor' : [ 0x30, ['pointer64', ['void']]],
'ExistingSecurityDescriptor' : [ 0x38, ['pointer64', ['void']]],
'ParentSecurityDescriptor' : [ 0x40, ['pointer64', ['void']]],
'DeRefSecurityDescriptor' : [ 0x48, ['pointer64', ['void']]],
'SDLock' : [ 0x50, ['pointer64', ['void']]],
'AccessReasons' : [ 0x58, ['_ACCESS_REASONS']],
'GenerateStagingEvents' : [ 0xd8, ['unsigned char']],
} ],
'_OBJECT_HANDLE_INFORMATION' : [ 0x8, {
'HandleAttributes' : [ 0x0, ['unsigned long']],
'GrantedAccess' : [ 0x4, ['unsigned long']],
} ],
'_EPROCESS' : [ 0x788, {
'Pcb' : [ 0x0, ['_KPROCESS']],
'ProcessLock' : [ 0x2d8, ['_EX_PUSH_LOCK']],
'RundownProtect' : [ 0x2e0, ['_EX_RUNDOWN_REF']],
'UniqueProcessId' : [ 0x2e8, ['pointer64', ['void']]],
'ActiveProcessLinks' : [ 0x2f0, ['_LIST_ENTRY']],
'Flags2' : [ 0x300, ['unsigned long']],
'JobNotReallyActive' : [ 0x300, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'AccountingFolded' : [ 0x300, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'NewProcessReported' : [ 0x300, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long')]],
'ExitProcessReported' : [ 0x300, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned long')]],
'ReportCommitChanges' : [ 0x300, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned long')]],
'LastReportMemory' : [ 0x300, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned long')]],
'ForceWakeCharge' : [ 0x300, ['BitField', dict(start_bit = 6, end_bit = 7, native_type='unsigned long')]],
'CrossSessionCreate' : [ 0x300, ['BitField', dict(start_bit = 7, end_bit = 8, native_type='unsigned long')]],
'NeedsHandleRundown' : [ 0x300, ['BitField', dict(start_bit = 8, end_bit = 9, native_type='unsigned long')]],
'RefTraceEnabled' : [ 0x300, ['BitField', dict(start_bit = 9, end_bit = 10, native_type='unsigned long')]],
'DisableDynamicCode' : [ 0x300, ['BitField', dict(start_bit = 10, end_bit = 11, native_type='unsigned long')]],
'EmptyJobEvaluated' : [ 0x300, ['BitField', dict(start_bit = 11, end_bit = 12, native_type='unsigned long')]],
'DefaultPagePriority' : [ 0x300, ['BitField', dict(start_bit = 12, end_bit = 15, native_type='unsigned long')]],
'PrimaryTokenFrozen' : [ 0x300, ['BitField', dict(start_bit = 15, end_bit = 16, native_type='unsigned long')]],
'ProcessVerifierTarget' : [ 0x300, ['BitField', dict(start_bit = 16, end_bit = 17, native_type='unsigned long')]],
'StackRandomizationDisabled' : [ 0x300, ['BitField', dict(start_bit = 17, end_bit = 18, native_type='unsigned long')]],
'AffinityPermanent' : [ 0x300, ['BitField', dict(start_bit = 18, end_bit = 19, native_type='unsigned long')]],
'AffinityUpdateEnable' : [ 0x300, ['BitField', dict(start_bit = 19, end_bit = 20, native_type='unsigned long')]],
'PropagateNode' : [ 0x300, ['BitField', dict(start_bit = 20, end_bit = 21, native_type='unsigned long')]],
'ExplicitAffinity' : [ 0x300, ['BitField', dict(start_bit = 21, end_bit = 22, native_type='unsigned long')]],
'ProcessExecutionState' : [ 0x300, ['BitField', dict(start_bit = 22, end_bit = 24, native_type='unsigned long')]],
'DisallowStrippedImages' : [ 0x300, ['BitField', dict(start_bit = 24, end_bit = 25, native_type='unsigned long')]],
'HighEntropyASLREnabled' : [ 0x300, ['BitField', dict(start_bit = 25, end_bit = 26, native_type='unsigned long')]],
'ExtensionPointDisable' : [ 0x300, ['BitField', dict(start_bit = 26, end_bit = 27, native_type='unsigned long')]],
'ForceRelocateImages' : [ 0x300, ['BitField', dict(start_bit = 27, end_bit = 28, native_type='unsigned long')]],
'ProcessStateChangeRequest' : [ 0x300, ['BitField', dict(start_bit = 28, end_bit = 30, native_type='unsigned long')]],
'ProcessStateChangeInProgress' : [ 0x300, ['BitField', dict(start_bit = 30, end_bit = 31, native_type='unsigned long')]],
'DisallowWin32kSystemCalls' : [ 0x300, ['BitField', dict(start_bit = 31, end_bit = 32, native_type='unsigned long')]],
'Flags' : [ 0x304, ['unsigned long']],
'CreateReported' : [ 0x304, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'NoDebugInherit' : [ 0x304, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'ProcessExiting' : [ 0x304, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long')]],
'ProcessDelete' : [ 0x304, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned long')]],
'ControlFlowGuardEnabled' : [ 0x304, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned long')]],
'VmDeleted' : [ 0x304, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned long')]],
'OutswapEnabled' : [ 0x304, ['BitField', dict(start_bit = 6, end_bit = 7, native_type='unsigned long')]],
'Outswapped' : [ 0x304, ['BitField', dict(start_bit = 7, end_bit = 8, native_type='unsigned long')]],
'FailFastOnCommitFail' : [ 0x304, ['BitField', dict(start_bit = 8, end_bit = 9, native_type='unsigned long')]],
'Wow64VaSpace4Gb' : [ 0x304, ['BitField', dict(start_bit = 9, end_bit = 10, native_type='unsigned long')]],
'AddressSpaceInitialized' : [ 0x304, ['BitField', dict(start_bit = 10, end_bit = 12, native_type='unsigned long')]],
'SetTimerResolution' : [ 0x304, ['BitField', dict(start_bit = 12, end_bit = 13, native_type='unsigned long')]],
'BreakOnTermination' : [ 0x304, ['BitField', dict(start_bit = 13, end_bit = 14, native_type='unsigned long')]],
'DeprioritizeViews' : [ 0x304, ['BitField', dict(start_bit = 14, end_bit = 15, native_type='unsigned long')]],
'WriteWatch' : [ 0x304, ['BitField', dict(start_bit = 15, end_bit = 16, native_type='unsigned long')]],
'ProcessInSession' : [ 0x304, ['BitField', dict(start_bit = 16, end_bit = 17, native_type='unsigned long')]],
'OverrideAddressSpace' : [ 0x304, ['BitField', dict(start_bit = 17, end_bit = 18, native_type='unsigned long')]],
'HasAddressSpace' : [ 0x304, ['BitField', dict(start_bit = 18, end_bit = 19, native_type='unsigned long')]],
'LaunchPrefetched' : [ 0x304, ['BitField', dict(start_bit = 19, end_bit = 20, native_type='unsigned long')]],
'Background' : [ 0x304, ['BitField', dict(start_bit = 20, end_bit = 21, native_type='unsigned long')]],
'VmTopDown' : [ 0x304, ['BitField', dict(start_bit = 21, end_bit = 22, native_type='unsigned long')]],
'ImageNotifyDone' : [ 0x304, ['BitField', dict(start_bit = 22, end_bit = 23, native_type='unsigned long')]],
'PdeUpdateNeeded' : [ 0x304, ['BitField', dict(start_bit = 23, end_bit = 24, native_type='unsigned long')]],
'VdmAllowed' : [ 0x304, ['BitField', dict(start_bit = 24, end_bit = 25, native_type='unsigned long')]],
'ProcessRundown' : [ 0x304, ['BitField', dict(start_bit = 25, end_bit = 26, native_type='unsigned long')]],
'ProcessInserted' : [ 0x304, ['BitField', dict(start_bit = 26, end_bit = 27, native_type='unsigned long')]],
'DefaultIoPriority' : [ 0x304, ['BitField', dict(start_bit = 27, end_bit = 30, native_type='unsigned long')]],
'ProcessSelfDelete' : [ 0x304, ['BitField', dict(start_bit = 30, end_bit = 31, native_type='unsigned long')]],
'SetTimerResolutionLink' : [ 0x304, ['BitField', dict(start_bit = 31, end_bit = 32, native_type='unsigned long')]],
'CreateTime' : [ 0x308, ['_LARGE_INTEGER']],
'ProcessQuotaUsage' : [ 0x310, ['array', 2, ['unsigned long long']]],
'ProcessQuotaPeak' : [ 0x320, ['array', 2, ['unsigned long long']]],
'PeakVirtualSize' : [ 0x330, ['unsigned long long']],
'VirtualSize' : [ 0x338, ['unsigned long long']],
'SessionProcessLinks' : [ 0x340, ['_LIST_ENTRY']],
'ExceptionPortData' : [ 0x350, ['pointer64', ['void']]],
'ExceptionPortValue' : [ 0x350, ['unsigned long long']],
'ExceptionPortState' : [ 0x350, ['BitField', dict(start_bit = 0, end_bit = 3, native_type='unsigned long long')]],
'Token' : [ 0x358, ['_EX_FAST_REF']],
'WorkingSetPage' : [ 0x360, ['unsigned long long']],
'AddressCreationLock' : [ 0x368, ['_EX_PUSH_LOCK']],
'PageTableCommitmentLock' : [ 0x370, ['_EX_PUSH_LOCK']],
'RotateInProgress' : [ 0x378, ['pointer64', ['_ETHREAD']]],
'ForkInProgress' : [ 0x380, ['pointer64', ['_ETHREAD']]],
'CommitChargeJob' : [ 0x388, ['pointer64', ['_EJOB']]],
'CloneRoot' : [ 0x390, ['_RTL_AVL_TREE']],
'NumberOfPrivatePages' : [ 0x398, ['unsigned long long']],
'NumberOfLockedPages' : [ 0x3a0, ['unsigned long long']],
'Win32Process' : [ 0x3a8, ['pointer64', ['void']]],
'Job' : [ 0x3b0, ['pointer64', ['_EJOB']]],
'SectionObject' : [ 0x3b8, ['pointer64', ['void']]],
'SectionBaseAddress' : [ 0x3c0, ['pointer64', ['void']]],
'Cookie' : [ 0x3c8, ['unsigned long']],
'WorkingSetWatch' : [ 0x3d0, ['pointer64', ['_PAGEFAULT_HISTORY']]],
'Win32WindowStation' : [ 0x3d8, ['pointer64', ['void']]],
'InheritedFromUniqueProcessId' : [ 0x3e0, ['pointer64', ['void']]],
'LdtInformation' : [ 0x3e8, ['pointer64', ['void']]],
'OwnerProcessId' : [ 0x3f0, ['unsigned long long']],
'Peb' : [ 0x3f8, ['pointer64', ['_PEB']]],
'Session' : [ 0x400, ['pointer64', ['void']]],
'AweInfo' : [ 0x408, ['pointer64', ['void']]],
'QuotaBlock' : [ 0x410, ['pointer64', ['_EPROCESS_QUOTA_BLOCK']]],
'ObjectTable' : [ 0x418, ['pointer64', ['_HANDLE_TABLE']]],
'DebugPort' : [ 0x420, ['pointer64', ['void']]],
'WoW64Process' : [ 0x428, ['pointer64', ['_EWOW64PROCESS']]],
'DeviceMap' : [ 0x430, ['pointer64', ['void']]],
'EtwDataSource' : [ 0x438, ['pointer64', ['void']]],
'PageDirectoryPte' : [ 0x440, ['unsigned long long']],
'ImageFilePointer' : [ 0x448, ['pointer64', ['_FILE_OBJECT']]],
'ImageFileName' : [ 0x450, ['array', 15, ['unsigned char']]],
'PriorityClass' : [ 0x45f, ['unsigned char']],
'SecurityPort' : [ 0x460, ['pointer64', ['void']]],
'SeAuditProcessCreationInfo' : [ 0x468, ['_SE_AUDIT_PROCESS_CREATION_INFO']],
'JobLinks' : [ 0x470, ['_LIST_ENTRY']],
'HighestUserAddress' : [ 0x480, ['pointer64', ['void']]],
'ThreadListHead' : [ 0x488, ['_LIST_ENTRY']],
'ActiveThreads' : [ 0x498, ['unsigned long']],
'ImagePathHash' : [ 0x49c, ['unsigned long']],
'DefaultHardErrorProcessing' : [ 0x4a0, ['unsigned long']],
'LastThreadExitStatus' : [ 0x4a4, ['long']],
'PrefetchTrace' : [ 0x4a8, ['_EX_FAST_REF']],
'LockedPagesList' : [ 0x4b0, ['pointer64', ['void']]],
'ReadOperationCount' : [ 0x4b8, ['_LARGE_INTEGER']],
'WriteOperationCount' : [ 0x4c0, ['_LARGE_INTEGER']],
'OtherOperationCount' : [ 0x4c8, ['_LARGE_INTEGER']],
'ReadTransferCount' : [ 0x4d0, ['_LARGE_INTEGER']],
'WriteTransferCount' : [ 0x4d8, ['_LARGE_INTEGER']],
'OtherTransferCount' : [ 0x4e0, ['_LARGE_INTEGER']],
'CommitChargeLimit' : [ 0x4e8, ['unsigned long long']],
'CommitCharge' : [ 0x4f0, ['unsigned long long']],
'CommitChargePeak' : [ 0x4f8, ['unsigned long long']],
'Vm' : [ 0x500, ['_MMSUPPORT']],
'MmProcessLinks' : [ 0x5f8, ['_LIST_ENTRY']],
'ModifiedPageCount' : [ 0x608, ['unsigned long']],
'ExitStatus' : [ 0x60c, ['long']],
'VadRoot' : [ 0x610, ['_RTL_AVL_TREE']],
'VadHint' : [ 0x618, ['pointer64', ['void']]],
'VadCount' : [ 0x620, ['unsigned long long']],
'VadPhysicalPages' : [ 0x628, ['unsigned long long']],
'VadPhysicalPagesLimit' : [ 0x630, ['unsigned long long']],
'AlpcContext' : [ 0x638, ['_ALPC_PROCESS_CONTEXT']],
'TimerResolutionLink' : [ 0x658, ['_LIST_ENTRY']],
'TimerResolutionStackRecord' : [ 0x668, ['pointer64', ['_PO_DIAG_STACK_RECORD']]],
'RequestedTimerResolution' : [ 0x670, ['unsigned long']],
'SmallestTimerResolution' : [ 0x674, ['unsigned long']],
'ExitTime' : [ 0x678, ['_LARGE_INTEGER']],
'InvertedFunctionTable' : [ 0x680, ['pointer64', ['_INVERTED_FUNCTION_TABLE']]],
'InvertedFunctionTableLock' : [ 0x688, ['_EX_PUSH_LOCK']],
'ActiveThreadsHighWatermark' : [ 0x690, ['unsigned long']],
'LargePrivateVadCount' : [ 0x694, ['unsigned long']],
'ThreadListLock' : [ 0x698, ['_EX_PUSH_LOCK']],
'WnfContext' : [ 0x6a0, ['pointer64', ['void']]],
'Spare0' : [ 0x6a8, ['unsigned long long']],
'SignatureLevel' : [ 0x6b0, ['unsigned char']],
'SectionSignatureLevel' : [ 0x6b1, ['unsigned char']],
'Protection' : [ 0x6b2, ['_PS_PROTECTION']],
'HangCount' : [ 0x6b3, ['unsigned char']],
'Flags3' : [ 0x6b4, ['unsigned long']],
'Minimal' : [ 0x6b4, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'ReplacingPageRoot' : [ 0x6b4, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'DisableNonSystemFonts' : [ 0x6b4, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long')]],
'AuditNonSystemFontLoading' : [ 0x6b4, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned long')]],
'Crashed' : [ 0x6b4, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned long')]],
'JobVadsAreTracked' : [ 0x6b4, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned long')]],
'VadTrackingDisabled' : [ 0x6b4, ['BitField', dict(start_bit = 6, end_bit = 7, native_type='unsigned long')]],
'AuxiliaryProcess' : [ 0x6b4, ['BitField', dict(start_bit = 7, end_bit = 8, native_type='unsigned long')]],
'SubsystemProcess' : [ 0x6b4, ['BitField', dict(start_bit = 8, end_bit = 9, native_type='unsigned long')]],
'IndirectCpuSets' : [ 0x6b4, ['BitField', dict(start_bit = 9, end_bit = 10, native_type='unsigned long')]],
'InPrivate' : [ 0x6b4, ['BitField', dict(start_bit = 10, end_bit = 11, native_type='unsigned long')]],
'ProhibitRemoteImageMap' : [ 0x6b4, ['BitField', dict(start_bit = 11, end_bit = 12, native_type='unsigned long')]],
'ProhibitLowILImageMap' : [ 0x6b4, ['BitField', dict(start_bit = 12, end_bit = 13, native_type='unsigned long')]],
'SignatureMitigationOptIn' : [ 0x6b4, ['BitField', dict(start_bit = 13, end_bit = 14, native_type='unsigned long')]],
'DeviceAsid' : [ 0x6b8, ['long']],
'SvmData' : [ 0x6c0, ['pointer64', ['void']]],
'SvmProcessLock' : [ 0x6c8, ['_EX_PUSH_LOCK']],
'SvmLock' : [ 0x6d0, ['unsigned long long']],
'SvmProcessDeviceListHead' : [ 0x6d8, ['_LIST_ENTRY']],
'LastFreezeInterruptTime' : [ 0x6e8, ['unsigned long long']],
'DiskCounters' : [ 0x6f0, ['pointer64', ['_PROCESS_DISK_COUNTERS']]],
'PicoContext' : [ 0x6f8, ['pointer64', ['void']]],
'TrustletIdentity' : [ 0x700, ['unsigned long long']],
'KeepAliveCounter' : [ 0x708, ['unsigned long']],
'NoWakeKeepAliveCounter' : [ 0x70c, ['unsigned long']],
'HighPriorityFaultsAllowed' : [ 0x710, ['unsigned long']],
'EnergyValues' : [ 0x718, ['pointer64', ['_PROCESS_ENERGY_VALUES']]],
'VmContext' : [ 0x720, ['pointer64', ['void']]],
'SequenceNumber' : [ 0x728, ['unsigned long long']],
'CreateInterruptTime' : [ 0x730, ['unsigned long long']],
'CreateUnbiasedInterruptTime' : [ 0x738, ['unsigned long long']],
'TotalUnbiasedFrozenTime' : [ 0x740, ['unsigned long long']],
'LastAppStateUpdateTime' : [ 0x748, ['unsigned long long']],
'LastAppStateUptime' : [ 0x750, ['BitField', dict(start_bit = 0, end_bit = 61, native_type='unsigned long long')]],
'LastAppState' : [ 0x750, ['BitField', dict(start_bit = 61, end_bit = 64, native_type='unsigned long long')]],
'SharedCommitCharge' : [ 0x758, ['unsigned long long']],
'SharedCommitLock' : [ 0x760, ['_EX_PUSH_LOCK']],
'SharedCommitLinks' : [ 0x768, ['_LIST_ENTRY']],
'AllowedCpuSets' : [ 0x778, ['unsigned long long']],
'DefaultCpuSets' : [ 0x780, ['unsigned long long']],
'AllowedCpuSetsIndirect' : [ 0x778, ['pointer64', ['unsigned long long']]],
'DefaultCpuSetsIndirect' : [ 0x780, ['pointer64', ['unsigned long long']]],
} ],
'_EWOW64PROCESS' : [ 0x10, {
'Peb' : [ 0x0, ['pointer64', ['void']]],
'Machine' : [ 0x8, ['unsigned short']],
} ],
'_ETHREAD' : [ 0x7c0, {
'Tcb' : [ 0x0, ['_KTHREAD']],
'CreateTime' : [ 0x5d8, ['_LARGE_INTEGER']],
'ExitTime' : [ 0x5e0, ['_LARGE_INTEGER']],
'KeyedWaitChain' : [ 0x5e0, ['_LIST_ENTRY']],
'ChargeOnlySession' : [ 0x5f0, ['pointer64', ['void']]],
'PostBlockList' : [ 0x5f8, ['_LIST_ENTRY']],
'ForwardLinkShadow' : [ 0x5f8, ['pointer64', ['void']]],
'StartAddress' : [ 0x600, ['pointer64', ['void']]],
'TerminationPort' : [ 0x608, ['pointer64', ['_TERMINATION_PORT']]],
'ReaperLink' : [ 0x608, ['pointer64', ['_ETHREAD']]],
'KeyedWaitValue' : [ 0x608, ['pointer64', ['void']]],
'ActiveTimerListLock' : [ 0x610, ['unsigned long long']],
'ActiveTimerListHead' : [ 0x618, ['_LIST_ENTRY']],
'Cid' : [ 0x628, ['_CLIENT_ID']],
'KeyedWaitSemaphore' : [ 0x638, ['_KSEMAPHORE']],
'AlpcWaitSemaphore' : [ 0x638, ['_KSEMAPHORE']],
'ClientSecurity' : [ 0x658, ['_PS_CLIENT_SECURITY_CONTEXT']],
'IrpList' : [ 0x660, ['_LIST_ENTRY']],
'TopLevelIrp' : [ 0x670, ['unsigned long long']],
'DeviceToVerify' : [ 0x678, ['pointer64', ['_DEVICE_OBJECT']]],
'Win32StartAddress' : [ 0x680, ['pointer64', ['void']]],
'LegacyPowerObject' : [ 0x688, ['pointer64', ['void']]],
'ThreadListEntry' : [ 0x690, ['_LIST_ENTRY']],
'RundownProtect' : [ 0x6a0, ['_EX_RUNDOWN_REF']],
'ThreadLock' : [ 0x6a8, ['_EX_PUSH_LOCK']],
'ReadClusterSize' : [ 0x6b0, ['unsigned long']],
'MmLockOrdering' : [ 0x6b4, ['long']],
'CmLockOrdering' : [ 0x6b8, ['long']],
'CrossThreadFlags' : [ 0x6bc, ['unsigned long']],
'Terminated' : [ 0x6bc, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'ThreadInserted' : [ 0x6bc, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'HideFromDebugger' : [ 0x6bc, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long')]],
'ActiveImpersonationInfo' : [ 0x6bc, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned long')]],
'HardErrorsAreDisabled' : [ 0x6bc, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned long')]],
'BreakOnTermination' : [ 0x6bc, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned long')]],
'SkipCreationMsg' : [ 0x6bc, ['BitField', dict(start_bit = 6, end_bit = 7, native_type='unsigned long')]],
'SkipTerminationMsg' : [ 0x6bc, ['BitField', dict(start_bit = 7, end_bit = 8, native_type='unsigned long')]],
'CopyTokenOnOpen' : [ 0x6bc, ['BitField', dict(start_bit = 8, end_bit = 9, native_type='unsigned long')]],
'ThreadIoPriority' : [ 0x6bc, ['BitField', dict(start_bit = 9, end_bit = 12, native_type='unsigned long')]],
'ThreadPagePriority' : [ 0x6bc, ['BitField', dict(start_bit = 12, end_bit = 15, native_type='unsigned long')]],
'RundownFail' : [ 0x6bc, ['BitField', dict(start_bit = 15, end_bit = 16, native_type='unsigned long')]],
'UmsForceQueueTermination' : [ 0x6bc, ['BitField', dict(start_bit = 16, end_bit = 17, native_type='unsigned long')]],
'IndirectCpuSets' : [ 0x6bc, ['BitField', dict(start_bit = 17, end_bit = 18, native_type='unsigned long')]],
'ReservedCrossThreadFlags' : [ 0x6bc, ['BitField', dict(start_bit = 18, end_bit = 32, native_type='unsigned long')]],
'SameThreadPassiveFlags' : [ 0x6c0, ['unsigned long']],
'ActiveExWorker' : [ 0x6c0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'MemoryMaker' : [ 0x6c0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'StoreLockThread' : [ 0x6c0, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long')]],
'ClonedThread' : [ 0x6c0, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned long')]],
'KeyedEventInUse' : [ 0x6c0, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned long')]],
'SelfTerminate' : [ 0x6c0, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned long')]],
'RespectIoPriority' : [ 0x6c0, ['BitField', dict(start_bit = 6, end_bit = 7, native_type='unsigned long')]],
'ReservedSameThreadPassiveFlags' : [ 0x6c0, ['BitField', dict(start_bit = 7, end_bit = 32, native_type='unsigned long')]],
'SameThreadApcFlags' : [ 0x6c4, ['unsigned long']],
'OwnsProcessAddressSpaceExclusive' : [ 0x6c4, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned char')]],
'OwnsProcessAddressSpaceShared' : [ 0x6c4, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned char')]],
'HardFaultBehavior' : [ 0x6c4, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned char')]],
'StartAddressInvalid' : [ 0x6c4, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned char')]],
'EtwCalloutActive' : [ 0x6c4, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned char')]],
'SuppressSymbolLoad' : [ 0x6c4, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned char')]],
'Prefetching' : [ 0x6c4, ['BitField', dict(start_bit = 6, end_bit = 7, native_type='unsigned char')]],
'OwnsVadExclusive' : [ 0x6c4, ['BitField', dict(start_bit = 7, end_bit = 8, native_type='unsigned char')]],
'SystemPagePriorityActive' : [ 0x6c5, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned char')]],
'SystemPagePriority' : [ 0x6c5, ['BitField', dict(start_bit = 1, end_bit = 4, native_type='unsigned char')]],
'CacheManagerActive' : [ 0x6c8, ['unsigned char']],
'DisablePageFaultClustering' : [ 0x6c9, ['unsigned char']],
'ActiveFaultCount' : [ 0x6ca, ['unsigned char']],
'LockOrderState' : [ 0x6cb, ['unsigned char']],
'AlpcMessageId' : [ 0x6d0, ['unsigned long long']],
'AlpcMessage' : [ 0x6d8, ['pointer64', ['void']]],
'AlpcReceiveAttributeSet' : [ 0x6d8, ['unsigned long']],
'ExitStatus' : [ 0x6e0, ['long']],
'AlpcWaitListEntry' : [ 0x6e8, ['_LIST_ENTRY']],
'CacheManagerCount' : [ 0x6f8, ['unsigned long']],
'IoBoostCount' : [ 0x6fc, ['unsigned long']],
'BoostList' : [ 0x700, ['_LIST_ENTRY']],
'DeboostList' : [ 0x710, ['_LIST_ENTRY']],
'BoostListLock' : [ 0x720, ['unsigned long long']],
'IrpListLock' : [ 0x728, ['unsigned long long']],
'ReservedForSynchTracking' : [ 0x730, ['pointer64', ['void']]],
'CmCallbackListHead' : [ 0x738, ['_SINGLE_LIST_ENTRY']],
'ActivityId' : [ 0x740, ['pointer64', ['_GUID']]],
'SeLearningModeListHead' : [ 0x748, ['_SINGLE_LIST_ENTRY']],
'VerifierContext' : [ 0x750, ['pointer64', ['void']]],
'KernelStackReference' : [ 0x758, ['unsigned long']],
'AdjustedClientToken' : [ 0x760, ['pointer64', ['void']]],
'WorkingOnBehalfClient' : [ 0x768, ['pointer64', ['void']]],
'PropertySet' : [ 0x770, ['_PS_PROPERTY_SET']],
'PicoContext' : [ 0x788, ['pointer64', ['void']]],
'UserFsBase' : [ 0x790, ['unsigned long']],
'UserGsBase' : [ 0x798, ['unsigned long long']],
'EnergyValues' : [ 0x7a0, ['pointer64', ['_THREAD_ENERGY_VALUES']]],
'CmCellReferences' : [ 0x7a8, ['unsigned long']],
'SelectedCpuSets' : [ 0x7b0, ['unsigned long long']],
'SelectedCpuSetsIndirect' : [ 0x7b0, ['pointer64', ['unsigned long long']]],
'Silo' : [ 0x7b8, ['pointer64', ['_EJOB']]],
} ],
'__unnamed_13c5' : [ 0x8, {
'MasterIrp' : [ 0x0, ['pointer64', ['_IRP']]],
'IrpCount' : [ 0x0, ['long']],
'SystemBuffer' : [ 0x0, ['pointer64', ['void']]],
} ],
'__unnamed_13cb' : [ 0x10, {
'UserApcRoutine' : [ 0x0, ['pointer64', ['void']]],
'IssuingProcess' : [ 0x0, ['pointer64', ['void']]],
'UserApcContext' : [ 0x8, ['pointer64', ['void']]],
} ],
'__unnamed_13cd' : [ 0x10, {
'AsynchronousParameters' : [ 0x0, ['__unnamed_13cb']],
'AllocationSize' : [ 0x0, ['_LARGE_INTEGER']],
} ],
'__unnamed_13d6' : [ 0x58, {
'DeviceQueueEntry' : [ 0x0, ['_KDEVICE_QUEUE_ENTRY']],
'DriverContext' : [ 0x0, ['array', 4, ['pointer64', ['void']]]],
'Thread' : [ 0x20, ['pointer64', ['_ETHREAD']]],
'AuxiliaryBuffer' : [ 0x28, ['pointer64', ['unsigned char']]],
'ListEntry' : [ 0x30, ['_LIST_ENTRY']],
'CurrentStackLocation' : [ 0x40, ['pointer64', ['_IO_STACK_LOCATION']]],
'PacketType' : [ 0x40, ['unsigned long']],
'OriginalFileObject' : [ 0x48, ['pointer64', ['_FILE_OBJECT']]],
'IrpExtension' : [ 0x50, ['pointer64', ['void']]],
} ],
'__unnamed_13d8' : [ 0x58, {
'Overlay' : [ 0x0, ['__unnamed_13d6']],
'Apc' : [ 0x0, ['_KAPC']],
'CompletionKey' : [ 0x0, ['pointer64', ['void']]],
} ],
'_IRP' : [ 0xd0, {
'Type' : [ 0x0, ['short']],
'Size' : [ 0x2, ['unsigned short']],
'AllocationProcessorNumber' : [ 0x4, ['unsigned short']],
'Reserved' : [ 0x6, ['unsigned short']],
'MdlAddress' : [ 0x8, ['pointer64', ['_MDL']]],
'Flags' : [ 0x10, ['unsigned long']],
'AssociatedIrp' : [ 0x18, ['__unnamed_13c5']],
'ThreadListEntry' : [ 0x20, ['_LIST_ENTRY']],
'IoStatus' : [ 0x30, ['_IO_STATUS_BLOCK']],
'RequestorMode' : [ 0x40, ['unsigned char']],
'PendingReturned' : [ 0x41, ['unsigned char']],
'StackCount' : [ 0x42, ['unsigned char']],
'CurrentLocation' : [ 0x43, ['unsigned char']],
'Cancel' : [ 0x44, ['unsigned char']],
'CancelIrql' : [ 0x45, ['unsigned char']],
'ApcEnvironment' : [ 0x46, ['unsigned char']],
'AllocationFlags' : [ 0x47, ['unsigned char']],
'UserIosb' : [ 0x48, ['pointer64', ['_IO_STATUS_BLOCK']]],
'UserEvent' : [ 0x50, ['pointer64', ['_KEVENT']]],
'Overlay' : [ 0x58, ['__unnamed_13cd']],
'CancelRoutine' : [ 0x68, ['pointer64', ['void']]],
'UserBuffer' : [ 0x70, ['pointer64', ['void']]],
'Tail' : [ 0x78, ['__unnamed_13d8']],
} ],
'__unnamed_13df' : [ 0x20, {
'SecurityContext' : [ 0x0, ['pointer64', ['_IO_SECURITY_CONTEXT']]],
'Options' : [ 0x8, ['unsigned long']],
'FileAttributes' : [ 0x10, ['unsigned short']],
'ShareAccess' : [ 0x12, ['unsigned short']],
'EaLength' : [ 0x18, ['unsigned long']],
} ],
'__unnamed_13e3' : [ 0x20, {
'SecurityContext' : [ 0x0, ['pointer64', ['_IO_SECURITY_CONTEXT']]],
'Options' : [ 0x8, ['unsigned long']],
'Reserved' : [ 0x10, ['unsigned short']],
'ShareAccess' : [ 0x12, ['unsigned short']],
'Parameters' : [ 0x18, ['pointer64', ['_NAMED_PIPE_CREATE_PARAMETERS']]],
} ],
'__unnamed_13e7' : [ 0x20, {
'SecurityContext' : [ 0x0, ['pointer64', ['_IO_SECURITY_CONTEXT']]],
'Options' : [ 0x8, ['unsigned long']],
'Reserved' : [ 0x10, ['unsigned short']],
'ShareAccess' : [ 0x12, ['unsigned short']],
'Parameters' : [ 0x18, ['pointer64', ['_MAILSLOT_CREATE_PARAMETERS']]],
} ],
'__unnamed_13e9' : [ 0x18, {
'Length' : [ 0x0, ['unsigned long']],
'Key' : [ 0x8, ['unsigned long']],
'ByteOffset' : [ 0x10, ['_LARGE_INTEGER']],
} ],
'__unnamed_13ed' : [ 0x20, {
'Length' : [ 0x0, ['unsigned long']],
'FileName' : [ 0x8, ['pointer64', ['_UNICODE_STRING']]],
'FileInformationClass' : [ 0x10, ['Enumeration', dict(target = 'long', choices = {1: 'FileDirectoryInformation', 2: 'FileFullDirectoryInformation', 3: 'FileBothDirectoryInformation', 4: 'FileBasicInformation', 5: 'FileStandardInformation', 6: 'FileInternalInformation', 7: 'FileEaInformation', 8: 'FileAccessInformation', 9: 'FileNameInformation', 10: 'FileRenameInformation', 11: 'FileLinkInformation', 12: 'FileNamesInformation', 13: 'FileDispositionInformation', 14: 'FilePositionInformation', 15: 'FileFullEaInformation', 16: 'FileModeInformation', 17: 'FileAlignmentInformation', 18: 'FileAllInformation', 19: 'FileAllocationInformation', 20: 'FileEndOfFileInformation', 21: 'FileAlternateNameInformation', 22: 'FileStreamInformation', 23: 'FilePipeInformation', 24: 'FilePipeLocalInformation', 25: 'FilePipeRemoteInformation', 26: 'FileMailslotQueryInformation', 27: 'FileMailslotSetInformation', 28: 'FileCompressionInformation', 29: 'FileObjectIdInformation', 30: 'FileCompletionInformation', 31: 'FileMoveClusterInformation', 32: 'FileQuotaInformation', 33: 'FileReparsePointInformation', 34: 'FileNetworkOpenInformation', 35: 'FileAttributeTagInformation', 36: 'FileTrackingInformation', 37: 'FileIdBothDirectoryInformation', 38: 'FileIdFullDirectoryInformation', 39: 'FileValidDataLengthInformation', 40: 'FileShortNameInformation', 41: 'FileIoCompletionNotificationInformation', 42: 'FileIoStatusBlockRangeInformation', 43: 'FileIoPriorityHintInformation', 44: 'FileSfioReserveInformation', 45: 'FileSfioVolumeInformation', 46: 'FileHardLinkInformation', 47: 'FileProcessIdsUsingFileInformation', 48: 'FileNormalizedNameInformation', 49: 'FileNetworkPhysicalNameInformation', 50: 'FileIdGlobalTxDirectoryInformation', 51: 'FileIsRemoteDeviceInformation', 52: 'FileUnusedInformation', 53: 'FileNumaNodeInformation', 54: 'FileStandardLinkInformation', 55: 'FileRemoteProtocolInformation', 56: 'FileRenameInformationBypassAccessCheck', 57: 'FileLinkInformationBypassAccessCheck', 58: 'FileVolumeNameInformation', 59: 'FileIdInformation', 60: 'FileIdExtdDirectoryInformation', 61: 'FileReplaceCompletionInformation', 62: 'FileHardLinkFullIdInformation', 63: 'FileIdExtdBothDirectoryInformation', 64: 'FileMaximumInformation'})]],
'FileIndex' : [ 0x18, ['unsigned long']],
} ],
'__unnamed_13ef' : [ 0x10, {
'Length' : [ 0x0, ['unsigned long']],
'CompletionFilter' : [ 0x8, ['unsigned long']],
} ],
'__unnamed_13f1' : [ 0x10, {
'Length' : [ 0x0, ['unsigned long']],
'FileInformationClass' : [ 0x8, ['Enumeration', dict(target = 'long', choices = {1: 'FileDirectoryInformation', 2: 'FileFullDirectoryInformation', 3: 'FileBothDirectoryInformation', 4: 'FileBasicInformation', 5: 'FileStandardInformation', 6: 'FileInternalInformation', 7: 'FileEaInformation', 8: 'FileAccessInformation', 9: 'FileNameInformation', 10: 'FileRenameInformation', 11: 'FileLinkInformation', 12: 'FileNamesInformation', 13: 'FileDispositionInformation', 14: 'FilePositionInformation', 15: 'FileFullEaInformation', 16: 'FileModeInformation', 17: 'FileAlignmentInformation', 18: 'FileAllInformation', 19: 'FileAllocationInformation', 20: 'FileEndOfFileInformation', 21: 'FileAlternateNameInformation', 22: 'FileStreamInformation', 23: 'FilePipeInformation', 24: 'FilePipeLocalInformation', 25: 'FilePipeRemoteInformation', 26: 'FileMailslotQueryInformation', 27: 'FileMailslotSetInformation', 28: 'FileCompressionInformation', 29: 'FileObjectIdInformation', 30: 'FileCompletionInformation', 31: 'FileMoveClusterInformation', 32: 'FileQuotaInformation', 33: 'FileReparsePointInformation', 34: 'FileNetworkOpenInformation', 35: 'FileAttributeTagInformation', 36: 'FileTrackingInformation', 37: 'FileIdBothDirectoryInformation', 38: 'FileIdFullDirectoryInformation', 39: 'FileValidDataLengthInformation', 40: 'FileShortNameInformation', 41: 'FileIoCompletionNotificationInformation', 42: 'FileIoStatusBlockRangeInformation', 43: 'FileIoPriorityHintInformation', 44: 'FileSfioReserveInformation', 45: 'FileSfioVolumeInformation', 46: 'FileHardLinkInformation', 47: 'FileProcessIdsUsingFileInformation', 48: 'FileNormalizedNameInformation', 49: 'FileNetworkPhysicalNameInformation', 50: 'FileIdGlobalTxDirectoryInformation', 51: 'FileIsRemoteDeviceInformation', 52: 'FileUnusedInformation', 53: 'FileNumaNodeInformation', 54: 'FileStandardLinkInformation', 55: 'FileRemoteProtocolInformation', 56: 'FileRenameInformationBypassAccessCheck', 57: 'FileLinkInformationBypassAccessCheck', 58: 'FileVolumeNameInformation', 59: 'FileIdInformation', 60: 'FileIdExtdDirectoryInformation', 61: 'FileReplaceCompletionInformation', 62: 'FileHardLinkFullIdInformation', 63: 'FileIdExtdBothDirectoryInformation', 64: 'FileMaximumInformation'})]],
} ],
'__unnamed_13f3' : [ 0x20, {
'Length' : [ 0x0, ['unsigned long']],
'FileInformationClass' : [ 0x8, ['Enumeration', dict(target = 'long', choices = {1: 'FileDirectoryInformation', 2: 'FileFullDirectoryInformation', 3: 'FileBothDirectoryInformation', 4: 'FileBasicInformation', 5: 'FileStandardInformation', 6: 'FileInternalInformation', 7: 'FileEaInformation', 8: 'FileAccessInformation', 9: 'FileNameInformation', 10: 'FileRenameInformation', 11: 'FileLinkInformation', 12: 'FileNamesInformation', 13: 'FileDispositionInformation', 14: 'FilePositionInformation', 15: 'FileFullEaInformation', 16: 'FileModeInformation', 17: 'FileAlignmentInformation', 18: 'FileAllInformation', 19: 'FileAllocationInformation', 20: 'FileEndOfFileInformation', 21: 'FileAlternateNameInformation', 22: 'FileStreamInformation', 23: 'FilePipeInformation', 24: 'FilePipeLocalInformation', 25: 'FilePipeRemoteInformation', 26: 'FileMailslotQueryInformation', 27: 'FileMailslotSetInformation', 28: 'FileCompressionInformation', 29: 'FileObjectIdInformation', 30: 'FileCompletionInformation', 31: 'FileMoveClusterInformation', 32: 'FileQuotaInformation', 33: 'FileReparsePointInformation', 34: 'FileNetworkOpenInformation', 35: 'FileAttributeTagInformation', 36: 'FileTrackingInformation', 37: 'FileIdBothDirectoryInformation', 38: 'FileIdFullDirectoryInformation', 39: 'FileValidDataLengthInformation', 40: 'FileShortNameInformation', 41: 'FileIoCompletionNotificationInformation', 42: 'FileIoStatusBlockRangeInformation', 43: 'FileIoPriorityHintInformation', 44: 'FileSfioReserveInformation', 45: 'FileSfioVolumeInformation', 46: 'FileHardLinkInformation', 47: 'FileProcessIdsUsingFileInformation', 48: 'FileNormalizedNameInformation', 49: 'FileNetworkPhysicalNameInformation', 50: 'FileIdGlobalTxDirectoryInformation', 51: 'FileIsRemoteDeviceInformation', 52: 'FileUnusedInformation', 53: 'FileNumaNodeInformation', 54: 'FileStandardLinkInformation', 55: 'FileRemoteProtocolInformation', 56: 'FileRenameInformationBypassAccessCheck', 57: 'FileLinkInformationBypassAccessCheck', 58: 'FileVolumeNameInformation', 59: 'FileIdInformation', 60: 'FileIdExtdDirectoryInformation', 61: 'FileReplaceCompletionInformation', 62: 'FileHardLinkFullIdInformation', 63: 'FileIdExtdBothDirectoryInformation', 64: 'FileMaximumInformation'})]],
'FileObject' : [ 0x10, ['pointer64', ['_FILE_OBJECT']]],
'ReplaceIfExists' : [ 0x18, ['unsigned char']],
'AdvanceOnly' : [ 0x19, ['unsigned char']],
'ClusterCount' : [ 0x18, ['unsigned long']],
'DeleteHandle' : [ 0x18, ['pointer64', ['void']]],
} ],
'__unnamed_13f5' : [ 0x20, {
'Length' : [ 0x0, ['unsigned long']],
'EaList' : [ 0x8, ['pointer64', ['void']]],
'EaListLength' : [ 0x10, ['unsigned long']],
'EaIndex' : [ 0x18, ['unsigned long']],
} ],
'__unnamed_13f7' : [ 0x4, {
'Length' : [ 0x0, ['unsigned long']],
} ],
'__unnamed_13fb' : [ 0x10, {
'Length' : [ 0x0, ['unsigned long']],
'FsInformationClass' : [ 0x8, ['Enumeration', dict(target = 'long', choices = {1: 'FileFsVolumeInformation', 2: 'FileFsLabelInformation', 3: 'FileFsSizeInformation', 4: 'FileFsDeviceInformation', 5: 'FileFsAttributeInformation', 6: 'FileFsControlInformation', 7: 'FileFsFullSizeInformation', 8: 'FileFsObjectIdInformation', 9: 'FileFsDriverPathInformation', 10: 'FileFsVolumeFlagsInformation', 11: 'FileFsSectorSizeInformation', 12: 'FileFsDataCopyInformation', 13: 'FileFsMetadataSizeInformation', 14: 'FileFsMaximumInformation'})]],
} ],
'__unnamed_13fd' : [ 0x20, {
'OutputBufferLength' : [ 0x0, ['unsigned long']],
'InputBufferLength' : [ 0x8, ['unsigned long']],
'FsControlCode' : [ 0x10, ['unsigned long']],
'Type3InputBuffer' : [ 0x18, ['pointer64', ['void']]],
} ],
'__unnamed_13ff' : [ 0x18, {
'Length' : [ 0x0, ['pointer64', ['_LARGE_INTEGER']]],
'Key' : [ 0x8, ['unsigned long']],
'ByteOffset' : [ 0x10, ['_LARGE_INTEGER']],
} ],
'__unnamed_1401' : [ 0x20, {
'OutputBufferLength' : [ 0x0, ['unsigned long']],
'InputBufferLength' : [ 0x8, ['unsigned long']],
'IoControlCode' : [ 0x10, ['unsigned long']],
'Type3InputBuffer' : [ 0x18, ['pointer64', ['void']]],
} ],
'__unnamed_1403' : [ 0x10, {
'SecurityInformation' : [ 0x0, ['unsigned long']],
'Length' : [ 0x8, ['unsigned long']],
} ],
'__unnamed_1405' : [ 0x10, {
'SecurityInformation' : [ 0x0, ['unsigned long']],
'SecurityDescriptor' : [ 0x8, ['pointer64', ['void']]],
} ],
'__unnamed_1409' : [ 0x10, {
'Vpb' : [ 0x0, ['pointer64', ['_VPB']]],
'DeviceObject' : [ 0x8, ['pointer64', ['_DEVICE_OBJECT']]],
} ],
'__unnamed_140d' : [ 0x8, {
'Srb' : [ 0x0, ['pointer64', ['_SCSI_REQUEST_BLOCK']]],
} ],
'__unnamed_1411' : [ 0x20, {
'Length' : [ 0x0, ['unsigned long']],
'StartSid' : [ 0x8, ['pointer64', ['void']]],
'SidList' : [ 0x10, ['pointer64', ['_FILE_GET_QUOTA_INFORMATION']]],
'SidListLength' : [ 0x18, ['unsigned long']],
} ],
'__unnamed_1415' : [ 0x4, {
'Type' : [ 0x0, ['Enumeration', dict(target = 'long', choices = {0: 'BusRelations', 1: 'EjectionRelations', 2: 'PowerRelations', 3: 'RemovalRelations', 4: 'TargetDeviceRelation', 5: 'SingleBusRelations', 6: 'TransportRelations'})]],
} ],
'__unnamed_1419' : [ 0x20, {
'InterfaceType' : [ 0x0, ['pointer64', ['_GUID']]],
'Size' : [ 0x8, ['unsigned short']],
'Version' : [ 0xa, ['unsigned short']],
'Interface' : [ 0x10, ['pointer64', ['_INTERFACE']]],
'InterfaceSpecificData' : [ 0x18, ['pointer64', ['void']]],
} ],
'__unnamed_141d' : [ 0x8, {
'Capabilities' : [ 0x0, ['pointer64', ['_DEVICE_CAPABILITIES']]],
} ],
'__unnamed_1421' : [ 0x8, {
'IoResourceRequirementList' : [ 0x0, ['pointer64', ['_IO_RESOURCE_REQUIREMENTS_LIST']]],
} ],
'__unnamed_1423' : [ 0x20, {
'WhichSpace' : [ 0x0, ['unsigned long']],
'Buffer' : [ 0x8, ['pointer64', ['void']]],
'Offset' : [ 0x10, ['unsigned long']],
'Length' : [ 0x18, ['unsigned long']],
} ],
'__unnamed_1425' : [ 0x1, {
'Lock' : [ 0x0, ['unsigned char']],
} ],
'__unnamed_1429' : [ 0x4, {
'IdType' : [ 0x0, ['Enumeration', dict(target = 'long', choices = {0: 'BusQueryDeviceID', 1: 'BusQueryHardwareIDs', 2: 'BusQueryCompatibleIDs', 3: 'BusQueryInstanceID', 4: 'BusQueryDeviceSerialNumber', 5: 'BusQueryContainerID'})]],
} ],
'__unnamed_142d' : [ 0x10, {
'DeviceTextType' : [ 0x0, ['Enumeration', dict(target = 'long', choices = {0: 'DeviceTextDescription', 1: 'DeviceTextLocationInformation'})]],
'LocaleId' : [ 0x8, ['unsigned long']],
} ],
'__unnamed_1431' : [ 0x10, {
'InPath' : [ 0x0, ['unsigned char']],
'Reserved' : [ 0x1, ['array', 3, ['unsigned char']]],
'Type' : [ 0x8, ['Enumeration', dict(target = 'long', choices = {0: 'DeviceUsageTypeUndefined', 1: 'DeviceUsageTypePaging', 2: 'DeviceUsageTypeHibernation', 3: 'DeviceUsageTypeDumpFile', 4: 'DeviceUsageTypeBoot', 5: 'DeviceUsageTypePostDisplay'})]],
} ],
'__unnamed_1435' : [ 0x4, {
'PowerState' : [ 0x0, ['Enumeration', dict(target = 'long', choices = {0: 'PowerSystemUnspecified', 1: 'PowerSystemWorking', 2: 'PowerSystemSleeping1', 3: 'PowerSystemSleeping2', 4: 'PowerSystemSleeping3', 5: 'PowerSystemHibernate', 6: 'PowerSystemShutdown', 7: 'PowerSystemMaximum'})]],
} ],
'__unnamed_1439' : [ 0x8, {
'PowerSequence' : [ 0x0, ['pointer64', ['_POWER_SEQUENCE']]],
} ],
'__unnamed_1441' : [ 0x20, {
'SystemContext' : [ 0x0, ['unsigned long']],
'SystemPowerStateContext' : [ 0x0, ['_SYSTEM_POWER_STATE_CONTEXT']],
'Type' : [ 0x8, ['Enumeration', dict(target = 'long', choices = {0: 'SystemPowerState', 1: 'DevicePowerState'})]],
'State' : [ 0x10, ['_POWER_STATE']],
'ShutdownType' : [ 0x18, ['Enumeration', dict(target = 'long', choices = {0: 'PowerActionNone', 1: 'PowerActionReserved', 2: 'PowerActionSleep', 3: 'PowerActionHibernate', 4: 'PowerActionShutdown', 5: 'PowerActionShutdownReset', 6: 'PowerActionShutdownOff', 7: 'PowerActionWarmEject', 8: 'PowerActionDisplayOff'})]],
} ],
'__unnamed_1445' : [ 0x10, {
'AllocatedResources' : [ 0x0, ['pointer64', ['_CM_RESOURCE_LIST']]],
'AllocatedResourcesTranslated' : [ 0x8, ['pointer64', ['_CM_RESOURCE_LIST']]],
} ],
'__unnamed_1447' : [ 0x20, {
'ProviderId' : [ 0x0, ['unsigned long long']],
'DataPath' : [ 0x8, ['pointer64', ['void']]],
'BufferSize' : [ 0x10, ['unsigned long']],
'Buffer' : [ 0x18, ['pointer64', ['void']]],
} ],
'__unnamed_1449' : [ 0x20, {
'Argument1' : [ 0x0, ['pointer64', ['void']]],
'Argument2' : [ 0x8, ['pointer64', ['void']]],
'Argument3' : [ 0x10, ['pointer64', ['void']]],
'Argument4' : [ 0x18, ['pointer64', ['void']]],
} ],
'__unnamed_144b' : [ 0x20, {
'Create' : [ 0x0, ['__unnamed_13df']],
'CreatePipe' : [ 0x0, ['__unnamed_13e3']],
'CreateMailslot' : [ 0x0, ['__unnamed_13e7']],
'Read' : [ 0x0, ['__unnamed_13e9']],
'Write' : [ 0x0, ['__unnamed_13e9']],
'QueryDirectory' : [ 0x0, ['__unnamed_13ed']],
'NotifyDirectory' : [ 0x0, ['__unnamed_13ef']],
'QueryFile' : [ 0x0, ['__unnamed_13f1']],
'SetFile' : [ 0x0, ['__unnamed_13f3']],
'QueryEa' : [ 0x0, ['__unnamed_13f5']],
'SetEa' : [ 0x0, ['__unnamed_13f7']],
'QueryVolume' : [ 0x0, ['__unnamed_13fb']],
'SetVolume' : [ 0x0, ['__unnamed_13fb']],
'FileSystemControl' : [ 0x0, ['__unnamed_13fd']],
'LockControl' : [ 0x0, ['__unnamed_13ff']],
'DeviceIoControl' : [ 0x0, ['__unnamed_1401']],
'QuerySecurity' : [ 0x0, ['__unnamed_1403']],
'SetSecurity' : [ 0x0, ['__unnamed_1405']],
'MountVolume' : [ 0x0, ['__unnamed_1409']],
'VerifyVolume' : [ 0x0, ['__unnamed_1409']],
'Scsi' : [ 0x0, ['__unnamed_140d']],
'QueryQuota' : [ 0x0, ['__unnamed_1411']],
'SetQuota' : [ 0x0, ['__unnamed_13f7']],
'QueryDeviceRelations' : [ 0x0, ['__unnamed_1415']],
'QueryInterface' : [ 0x0, ['__unnamed_1419']],
'DeviceCapabilities' : [ 0x0, ['__unnamed_141d']],
'FilterResourceRequirements' : [ 0x0, ['__unnamed_1421']],
'ReadWriteConfig' : [ 0x0, ['__unnamed_1423']],
'SetLock' : [ 0x0, ['__unnamed_1425']],
'QueryId' : [ 0x0, ['__unnamed_1429']],
'QueryDeviceText' : [ 0x0, ['__unnamed_142d']],
'UsageNotification' : [ 0x0, ['__unnamed_1431']],
'WaitWake' : [ 0x0, ['__unnamed_1435']],
'PowerSequence' : [ 0x0, ['__unnamed_1439']],
'Power' : [ 0x0, ['__unnamed_1441']],
'StartDevice' : [ 0x0, ['__unnamed_1445']],
'WMI' : [ 0x0, ['__unnamed_1447']],
'Others' : [ 0x0, ['__unnamed_1449']],
} ],
'_IO_STACK_LOCATION' : [ 0x48, {
'MajorFunction' : [ 0x0, ['unsigned char']],
'MinorFunction' : [ 0x1, ['unsigned char']],
'Flags' : [ 0x2, ['unsigned char']],
'Control' : [ 0x3, ['unsigned char']],
'Parameters' : [ 0x8, ['__unnamed_144b']],
'DeviceObject' : [ 0x28, ['pointer64', ['_DEVICE_OBJECT']]],
'FileObject' : [ 0x30, ['pointer64', ['_FILE_OBJECT']]],
'CompletionRoutine' : [ 0x38, ['pointer64', ['void']]],
'Context' : [ 0x40, ['pointer64', ['void']]],
} ],
'__unnamed_1461' : [ 0x48, {
'ListEntry' : [ 0x0, ['_LIST_ENTRY']],
'Wcb' : [ 0x0, ['_WAIT_CONTEXT_BLOCK']],
} ],
'_DEVICE_OBJECT' : [ 0x150, {
'Type' : [ 0x0, ['short']],
'Size' : [ 0x2, ['unsigned short']],
'ReferenceCount' : [ 0x4, ['long']],
'DriverObject' : [ 0x8, ['pointer64', ['_DRIVER_OBJECT']]],
'NextDevice' : [ 0x10, ['pointer64', ['_DEVICE_OBJECT']]],
'AttachedDevice' : [ 0x18, ['pointer64', ['_DEVICE_OBJECT']]],
'CurrentIrp' : [ 0x20, ['pointer64', ['_IRP']]],
'Timer' : [ 0x28, ['pointer64', ['_IO_TIMER']]],
'Flags' : [ 0x30, ['unsigned long']],
'Characteristics' : [ 0x34, ['unsigned long']],
'Vpb' : [ 0x38, ['pointer64', ['_VPB']]],
'DeviceExtension' : [ 0x40, ['pointer64', ['void']]],
'DeviceType' : [ 0x48, ['unsigned long']],
'StackSize' : [ 0x4c, ['unsigned char']],
'Queue' : [ 0x50, ['__unnamed_1461']],
'AlignmentRequirement' : [ 0x98, ['unsigned long']],
'DeviceQueue' : [ 0xa0, ['_KDEVICE_QUEUE']],
'Dpc' : [ 0xc8, ['_KDPC']],
'ActiveThreadCount' : [ 0x108, ['unsigned long']],
'SecurityDescriptor' : [ 0x110, ['pointer64', ['void']]],
'DeviceLock' : [ 0x118, ['_KEVENT']],
'SectorSize' : [ 0x130, ['unsigned short']],
'Spare1' : [ 0x132, ['unsigned short']],
'DeviceObjectExtension' : [ 0x138, ['pointer64', ['_DEVOBJ_EXTENSION']]],
'Reserved' : [ 0x140, ['pointer64', ['void']]],
} ],
'_KDPC' : [ 0x40, {
'TargetInfoAsUlong' : [ 0x0, ['unsigned long']],
'Type' : [ 0x0, ['unsigned char']],
'Importance' : [ 0x1, ['unsigned char']],
'Number' : [ 0x2, ['unsigned short']],
'DpcListEntry' : [ 0x8, ['_SINGLE_LIST_ENTRY']],
'ProcessorHistory' : [ 0x10, ['unsigned long long']],
'DeferredRoutine' : [ 0x18, ['pointer64', ['void']]],
'DeferredContext' : [ 0x20, ['pointer64', ['void']]],
'SystemArgument1' : [ 0x28, ['pointer64', ['void']]],
'SystemArgument2' : [ 0x30, ['pointer64', ['void']]],
'DpcData' : [ 0x38, ['pointer64', ['void']]],
} ],
'_IO_DRIVER_CREATE_CONTEXT' : [ 0x28, {
'Size' : [ 0x0, ['short']],
'ExtraCreateParameter' : [ 0x8, ['pointer64', ['_ECP_LIST']]],
'DeviceObjectHint' : [ 0x10, ['pointer64', ['void']]],
'TxnParameters' : [ 0x18, ['pointer64', ['_TXN_PARAMETER_BLOCK']]],
'SiloContext' : [ 0x20, ['pointer64', ['_EJOB']]],
} ],
'_EJOB' : [ 0x528, {
'Event' : [ 0x0, ['_KEVENT']],
'JobLinks' : [ 0x18, ['_LIST_ENTRY']],
'ProcessListHead' : [ 0x28, ['_LIST_ENTRY']],
'JobLock' : [ 0x38, ['_ERESOURCE']],
'TotalUserTime' : [ 0xa0, ['_LARGE_INTEGER']],
'TotalKernelTime' : [ 0xa8, ['_LARGE_INTEGER']],
'TotalCycleTime' : [ 0xb0, ['_LARGE_INTEGER']],
'ThisPeriodTotalUserTime' : [ 0xb8, ['_LARGE_INTEGER']],
'ThisPeriodTotalKernelTime' : [ 0xc0, ['_LARGE_INTEGER']],
'TotalContextSwitches' : [ 0xc8, ['unsigned long long']],
'TotalPageFaultCount' : [ 0xd0, ['unsigned long']],
'TotalProcesses' : [ 0xd4, ['unsigned long']],
'ActiveProcesses' : [ 0xd8, ['unsigned long']],
'TotalTerminatedProcesses' : [ 0xdc, ['unsigned long']],
'PerProcessUserTimeLimit' : [ 0xe0, ['_LARGE_INTEGER']],
'PerJobUserTimeLimit' : [ 0xe8, ['_LARGE_INTEGER']],
'MinimumWorkingSetSize' : [ 0xf0, ['unsigned long long']],
'MaximumWorkingSetSize' : [ 0xf8, ['unsigned long long']],
'LimitFlags' : [ 0x100, ['unsigned long']],
'ActiveProcessLimit' : [ 0x104, ['unsigned long']],
'Affinity' : [ 0x108, ['_KAFFINITY_EX']],
'AccessState' : [ 0x1b0, ['pointer64', ['_JOB_ACCESS_STATE']]],
'AccessStateQuotaReference' : [ 0x1b8, ['pointer64', ['void']]],
'UIRestrictionsClass' : [ 0x1c0, ['unsigned long']],
'EndOfJobTimeAction' : [ 0x1c4, ['unsigned long']],
'CompletionPort' : [ 0x1c8, ['pointer64', ['void']]],
'CompletionKey' : [ 0x1d0, ['pointer64', ['void']]],
'CompletionCount' : [ 0x1d8, ['unsigned long long']],
'SessionId' : [ 0x1e0, ['unsigned long']],
'SchedulingClass' : [ 0x1e4, ['unsigned long']],
'ReadOperationCount' : [ 0x1e8, ['unsigned long long']],
'WriteOperationCount' : [ 0x1f0, ['unsigned long long']],
'OtherOperationCount' : [ 0x1f8, ['unsigned long long']],
'ReadTransferCount' : [ 0x200, ['unsigned long long']],
'WriteTransferCount' : [ 0x208, ['unsigned long long']],
'OtherTransferCount' : [ 0x210, ['unsigned long long']],
'DiskIoInfo' : [ 0x218, ['_PROCESS_DISK_COUNTERS']],
'ProcessMemoryLimit' : [ 0x240, ['unsigned long long']],
'JobMemoryLimit' : [ 0x248, ['unsigned long long']],
'JobTotalMemoryLimit' : [ 0x250, ['unsigned long long']],
'PeakProcessMemoryUsed' : [ 0x258, ['unsigned long long']],
'PeakJobMemoryUsed' : [ 0x260, ['unsigned long long']],
'EffectiveAffinity' : [ 0x268, ['_KAFFINITY_EX']],
'EffectivePerProcessUserTimeLimit' : [ 0x310, ['_LARGE_INTEGER']],
'EffectiveMinimumWorkingSetSize' : [ 0x318, ['unsigned long long']],
'EffectiveMaximumWorkingSetSize' : [ 0x320, ['unsigned long long']],
'EffectiveProcessMemoryLimit' : [ 0x328, ['unsigned long long']],
'EffectiveProcessMemoryLimitJob' : [ 0x330, ['pointer64', ['_EJOB']]],
'EffectivePerProcessUserTimeLimitJob' : [ 0x338, ['pointer64', ['_EJOB']]],
'EffectiveDiskIoRateLimitJob' : [ 0x340, ['pointer64', ['_EJOB']]],
'EffectiveNetIoRateLimitJob' : [ 0x348, ['pointer64', ['_EJOB']]],
'EffectiveHeapAttributionJob' : [ 0x350, ['pointer64', ['_EJOB']]],
'EffectiveLimitFlags' : [ 0x358, ['unsigned long']],
'EffectiveSchedulingClass' : [ 0x35c, ['unsigned long']],
'EffectiveFreezeCount' : [ 0x360, ['unsigned long']],
'EffectiveBackgroundCount' : [ 0x364, ['unsigned long']],
'EffectiveSwapCount' : [ 0x368, ['unsigned long']],
'EffectiveNotificationLimitCount' : [ 0x36c, ['unsigned long']],
'EffectivePriorityClass' : [ 0x370, ['unsigned char']],
'PriorityClass' : [ 0x371, ['unsigned char']],
'NestingDepth' : [ 0x372, ['unsigned char']],
'Reserved1' : [ 0x373, ['array', 1, ['unsigned char']]],
'CompletionFilter' : [ 0x374, ['unsigned long']],
'WakeChannel' : [ 0x378, ['_WNF_STATE_NAME']],
'WakeInfo' : [ 0x378, ['_PS_WAKE_INFORMATION']],
'WakeFilter' : [ 0x3b0, ['_JOBOBJECT_WAKE_FILTER']],
'LowEdgeLatchFilter' : [ 0x3b8, ['unsigned long']],
'OwnedHighEdgeFilters' : [ 0x3bc, ['unsigned long']],
'NotificationLink' : [ 0x3c0, ['pointer64', ['_EJOB']]],
'CurrentJobMemoryUsed' : [ 0x3c8, ['unsigned long long']],
'NotificationInfo' : [ 0x3d0, ['pointer64', ['_JOB_NOTIFICATION_INFORMATION']]],
'NotificationInfoQuotaReference' : [ 0x3d8, ['pointer64', ['void']]],
'NotificationPacket' : [ 0x3e0, ['pointer64', ['_IO_MINI_COMPLETION_PACKET_USER']]],
'CpuRateControl' : [ 0x3e8, ['pointer64', ['_JOB_CPU_RATE_CONTROL']]],
'EffectiveSchedulingGroup' : [ 0x3f0, ['pointer64', ['void']]],
'ReadyTime' : [ 0x3f8, ['unsigned long long']],
'MemoryLimitsLock' : [ 0x400, ['_EX_PUSH_LOCK']],
'SiblingJobLinks' : [ 0x408, ['_LIST_ENTRY']],
'ChildJobListHead' : [ 0x418, ['_LIST_ENTRY']],
'ParentJob' : [ 0x428, ['pointer64', ['_EJOB']]],
'RootJob' : [ 0x430, ['pointer64', ['_EJOB']]],
'IteratorListHead' : [ 0x438, ['_LIST_ENTRY']],
'AncestorCount' : [ 0x448, ['unsigned long long']],
'Ancestors' : [ 0x450, ['pointer64', ['pointer64', ['_EJOB']]]],
'SessionObject' : [ 0x450, ['pointer64', ['void']]],
'Accounting' : [ 0x458, ['_EPROCESS_VALUES']],
'ShadowActiveProcessCount' : [ 0x4a8, ['unsigned long']],
'ActiveAuxiliaryProcessCount' : [ 0x4ac, ['unsigned long']],
'SequenceNumber' : [ 0x4b0, ['unsigned long']],
'TimerListLock' : [ 0x4b8, ['unsigned long long']],
'TimerListHead' : [ 0x4c0, ['_LIST_ENTRY']],
'ContainerId' : [ 0x4d0, ['_GUID']],
'Container' : [ 0x4e0, ['pointer64', ['_SILO_CONTEXT']]],
'PropertySet' : [ 0x4e8, ['_PS_PROPERTY_SET']],
'NetRateControl' : [ 0x500, ['pointer64', ['_JOB_NET_RATE_CONTROL']]],
'IoRateControl' : [ 0x508, ['pointer64', ['_JOB_IO_RATE_CONTROL']]],
'JobFlags' : [ 0x510, ['unsigned long']],
'CloseDone' : [ 0x510, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'MultiGroup' : [ 0x510, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'OutstandingNotification' : [ 0x510, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long')]],
'NotificationInProgress' : [ 0x510, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned long')]],
'UILimits' : [ 0x510, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned long')]],
'CpuRateControlActive' : [ 0x510, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned long')]],
'OwnCpuRateControl' : [ 0x510, ['BitField', dict(start_bit = 6, end_bit = 7, native_type='unsigned long')]],
'Terminating' : [ 0x510, ['BitField', dict(start_bit = 7, end_bit = 8, native_type='unsigned long')]],
'WorkingSetLock' : [ 0x510, ['BitField', dict(start_bit = 8, end_bit = 9, native_type='unsigned long')]],
'JobFrozen' : [ 0x510, ['BitField', dict(start_bit = 9, end_bit = 10, native_type='unsigned long')]],
'Background' : [ 0x510, ['BitField', dict(start_bit = 10, end_bit = 11, native_type='unsigned long')]],
'WakeNotificationAllocated' : [ 0x510, ['BitField', dict(start_bit = 11, end_bit = 12, native_type='unsigned long')]],
'WakeNotificationEnabled' : [ 0x510, ['BitField', dict(start_bit = 12, end_bit = 13, native_type='unsigned long')]],
'WakeNotificationPending' : [ 0x510, ['BitField', dict(start_bit = 13, end_bit = 14, native_type='unsigned long')]],
'LimitNotificationRequired' : [ 0x510, ['BitField', dict(start_bit = 14, end_bit = 15, native_type='unsigned long')]],
'ZeroCountNotificationRequired' : [ 0x510, ['BitField', dict(start_bit = 15, end_bit = 16, native_type='unsigned long')]],
'CycleTimeNotificationRequired' : [ 0x510, ['BitField', dict(start_bit = 16, end_bit = 17, native_type='unsigned long')]],
'CycleTimeNotificationPending' : [ 0x510, ['BitField', dict(start_bit = 17, end_bit = 18, native_type='unsigned long')]],
'TimersVirtualized' : [ 0x510, ['BitField', dict(start_bit = 18, end_bit = 19, native_type='unsigned long')]],
'JobSwapped' : [ 0x510, ['BitField', dict(start_bit = 19, end_bit = 20, native_type='unsigned long')]],
'ViolationDetected' : [ 0x510, ['BitField', dict(start_bit = 20, end_bit = 21, native_type='unsigned long')]],
'EmptyJobNotified' : [ 0x510, ['BitField', dict(start_bit = 21, end_bit = 22, native_type='unsigned long')]],
'NoSystemCharge' : [ 0x510, ['BitField', dict(start_bit = 22, end_bit = 23, native_type='unsigned long')]],
'DropNoWakeCharges' : [ 0x510, ['BitField', dict(start_bit = 23, end_bit = 24, native_type='unsigned long')]],
'NoWakeChargePolicyDecided' : [ 0x510, ['BitField', dict(start_bit = 24, end_bit = 25, native_type='unsigned long')]],
'NetRateControlActive' : [ 0x510, ['BitField', dict(start_bit = 25, end_bit = 26, native_type='unsigned long')]],
'OwnNetRateControl' : [ 0x510, ['BitField', dict(start_bit = 26, end_bit = 27, native_type='unsigned long')]],
'IoRateControlActive' : [ 0x510, ['BitField', dict(start_bit = 27, end_bit = 28, native_type='unsigned long')]],
'OwnIoRateControl' : [ 0x510, ['BitField', dict(start_bit = 28, end_bit = 29, native_type='unsigned long')]],
'DisallowNewProcesses' : [ 0x510, ['BitField', dict(start_bit = 29, end_bit = 30, native_type='unsigned long')]],
'SpareJobFlags' : [ 0x510, ['BitField', dict(start_bit = 30, end_bit = 32, native_type='unsigned long')]],
'EffectiveHighEdgeFilters' : [ 0x514, ['unsigned long']],
'EnergyValues' : [ 0x518, ['pointer64', ['_PROCESS_ENERGY_VALUES']]],
'SharedCommitCharge' : [ 0x520, ['unsigned long long']],
} ],
'_IO_PRIORITY_INFO' : [ 0x10, {
'Size' : [ 0x0, ['unsigned long']],
'ThreadPriority' : [ 0x4, ['unsigned long']],
'PagePriority' : [ 0x8, ['unsigned long']],
'IoPriority' : [ 0xc, ['Enumeration', dict(target = 'long', choices = {0: 'IoPriorityVeryLow', 1: 'IoPriorityLow', 2: 'IoPriorityNormal', 3: 'IoPriorityHigh', 4: 'IoPriorityCritical', 5: 'MaxIoPriorityTypes'})]],
} ],
'_MDL' : [ 0x30, {
'Next' : [ 0x0, ['pointer64', ['_MDL']]],
'Size' : [ 0x8, ['short']],
'MdlFlags' : [ 0xa, ['short']],
'AllocationProcessorNumber' : [ 0xc, ['unsigned short']],
'Reserved' : [ 0xe, ['unsigned short']],
'Process' : [ 0x10, ['pointer64', ['_EPROCESS']]],
'MappedSystemVa' : [ 0x18, ['pointer64', ['void']]],
'StartVa' : [ 0x20, ['pointer64', ['void']]],
'ByteCount' : [ 0x28, ['unsigned long']],
'ByteOffset' : [ 0x2c, ['unsigned long']],
} ],
'_EVENT_DATA_DESCRIPTOR' : [ 0x10, {
'Ptr' : [ 0x0, ['unsigned long long']],
'Size' : [ 0x8, ['unsigned long']],
'Reserved' : [ 0xc, ['unsigned long']],
'Type' : [ 0xc, ['unsigned char']],
'Reserved1' : [ 0xd, ['unsigned char']],
'Reserved2' : [ 0xe, ['unsigned short']],
} ],
'_EVENT_DESCRIPTOR' : [ 0x10, {
'Id' : [ 0x0, ['unsigned short']],
'Version' : [ 0x2, ['unsigned char']],
'Channel' : [ 0x3, ['unsigned char']],
'Level' : [ 0x4, ['unsigned char']],
'Opcode' : [ 0x5, ['unsigned char']],
'Task' : [ 0x6, ['unsigned short']],
'Keyword' : [ 0x8, ['unsigned long long']],
} ],
'_EVENT_RECORD' : [ 0x70, {
'EventHeader' : [ 0x0, ['_EVENT_HEADER']],
'BufferContext' : [ 0x50, ['_ETW_BUFFER_CONTEXT']],
'ExtendedDataCount' : [ 0x54, ['unsigned short']],
'UserDataLength' : [ 0x56, ['unsigned short']],
'ExtendedData' : [ 0x58, ['pointer64', ['_EVENT_HEADER_EXTENDED_DATA_ITEM']]],
'UserData' : [ 0x60, ['pointer64', ['void']]],
'UserContext' : [ 0x68, ['pointer64', ['void']]],
} ],
'_PERFINFO_GROUPMASK' : [ 0x20, {
'Masks' : [ 0x0, ['array', 8, ['unsigned long']]],
} ],
'_FILE_OBJECT' : [ 0xd8, {
'Type' : [ 0x0, ['short']],
'Size' : [ 0x2, ['short']],
'DeviceObject' : [ 0x8, ['pointer64', ['_DEVICE_OBJECT']]],
'Vpb' : [ 0x10, ['pointer64', ['_VPB']]],
'FsContext' : [ 0x18, ['pointer64', ['void']]],
'FsContext2' : [ 0x20, ['pointer64', ['void']]],
'SectionObjectPointer' : [ 0x28, ['pointer64', ['_SECTION_OBJECT_POINTERS']]],
'PrivateCacheMap' : [ 0x30, ['pointer64', ['void']]],
'FinalStatus' : [ 0x38, ['long']],
'RelatedFileObject' : [ 0x40, ['pointer64', ['_FILE_OBJECT']]],
'LockOperation' : [ 0x48, ['unsigned char']],
'DeletePending' : [ 0x49, ['unsigned char']],
'ReadAccess' : [ 0x4a, ['unsigned char']],
'WriteAccess' : [ 0x4b, ['unsigned char']],
'DeleteAccess' : [ 0x4c, ['unsigned char']],
'SharedRead' : [ 0x4d, ['unsigned char']],
'SharedWrite' : [ 0x4e, ['unsigned char']],
'SharedDelete' : [ 0x4f, ['unsigned char']],
'Flags' : [ 0x50, ['unsigned long']],
'FileName' : [ 0x58, ['_UNICODE_STRING']],
'CurrentByteOffset' : [ 0x68, ['_LARGE_INTEGER']],
'Waiters' : [ 0x70, ['unsigned long']],
'Busy' : [ 0x74, ['unsigned long']],
'LastLock' : [ 0x78, ['pointer64', ['void']]],
'Lock' : [ 0x80, ['_KEVENT']],
'Event' : [ 0x98, ['_KEVENT']],
'CompletionContext' : [ 0xb0, ['pointer64', ['_IO_COMPLETION_CONTEXT']]],
'IrpListLock' : [ 0xb8, ['unsigned long long']],
'IrpList' : [ 0xc0, ['_LIST_ENTRY']],
'FileObjectExtension' : [ 0xd0, ['pointer64', ['void']]],
} ],
'_EX_RUNDOWN_REF' : [ 0x8, {
'Count' : [ 0x0, ['unsigned long long']],
'Ptr' : [ 0x0, ['pointer64', ['void']]],
} ],
'_MM_PAGE_ACCESS_INFO_HEADER' : [ 0x48, {
'Link' : [ 0x0, ['_SINGLE_LIST_ENTRY']],
'Type' : [ 0x8, ['Enumeration', dict(target = 'long', choices = {0: 'MmPteAccessType', 1: 'MmCcReadAheadType', 2: 'MmPfnRepurposeType', 3: 'MmMaximumPageAccessType'})]],
'EmptySequenceNumber' : [ 0xc, ['unsigned long']],
'CurrentFileIndex' : [ 0xc, ['unsigned long']],
'CreateTime' : [ 0x10, ['unsigned long long']],
'EmptyTime' : [ 0x18, ['unsigned long long']],
'TempEntry' : [ 0x18, ['pointer64', ['_MM_PAGE_ACCESS_INFO']]],
'PageEntry' : [ 0x20, ['pointer64', ['_MM_PAGE_ACCESS_INFO']]],
'FileEntry' : [ 0x28, ['pointer64', ['unsigned long long']]],
'FirstFileEntry' : [ 0x30, ['pointer64', ['unsigned long long']]],
'Process' : [ 0x38, ['pointer64', ['_EPROCESS']]],
'SessionId' : [ 0x40, ['unsigned long']],
'PageFrameEntry' : [ 0x20, ['pointer64', ['unsigned long long']]],
'LastPageFrameEntry' : [ 0x28, ['pointer64', ['unsigned long long']]],
} ],
'_WHEA_ERROR_PACKET_V2' : [ 0x50, {
'Signature' : [ 0x0, ['unsigned long']],
'Version' : [ 0x4, ['unsigned long']],
'Length' : [ 0x8, ['unsigned long']],
'Flags' : [ 0xc, ['_WHEA_ERROR_PACKET_FLAGS']],
'ErrorType' : [ 0x10, ['Enumeration', dict(target = 'long', choices = {0: 'WheaErrTypeProcessor', 1: 'WheaErrTypeMemory', 2: 'WheaErrTypePCIExpress', 3: 'WheaErrTypeNMI', 4: 'WheaErrTypePCIXBus', 5: 'WheaErrTypePCIXDevice', 6: 'WheaErrTypeGeneric'})]],
'ErrorSeverity' : [ 0x14, ['Enumeration', dict(target = 'long', choices = {0: 'WheaErrSevRecoverable', 1: 'WheaErrSevFatal', 2: 'WheaErrSevCorrected', 3: 'WheaErrSevInformational'})]],
'ErrorSourceId' : [ 0x18, ['unsigned long']],
'ErrorSourceType' : [ 0x1c, ['Enumeration', dict(target = 'long', choices = {0: 'WheaErrSrcTypeMCE', 1: 'WheaErrSrcTypeCMC', 2: 'WheaErrSrcTypeCPE', 3: 'WheaErrSrcTypeNMI', 4: 'WheaErrSrcTypePCIe', 5: 'WheaErrSrcTypeGeneric', 6: 'WheaErrSrcTypeINIT', 7: 'WheaErrSrcTypeBOOT', 8: 'WheaErrSrcTypeSCIGeneric', 9: 'WheaErrSrcTypeIPFMCA', 10: 'WheaErrSrcTypeIPFCMC', 11: 'WheaErrSrcTypeIPFCPE', 12: 'WheaErrSrcTypeMax'})]],
'NotifyType' : [ 0x20, ['_GUID']],
'Context' : [ 0x30, ['unsigned long long']],
'DataFormat' : [ 0x38, ['Enumeration', dict(target = 'long', choices = {0: 'WheaDataFormatIPFSalRecord', 1: 'WheaDataFormatXPFMCA', 2: 'WheaDataFormatMemory', 3: 'WheaDataFormatPCIExpress', 4: 'WheaDataFormatNMIPort', 5: 'WheaDataFormatPCIXBus', 6: 'WheaDataFormatPCIXDevice', 7: 'WheaDataFormatGeneric', 8: 'WheaDataFormatMax'})]],
'Reserved1' : [ 0x3c, ['unsigned long']],
'DataOffset' : [ 0x40, ['unsigned long']],
'DataLength' : [ 0x44, ['unsigned long']],
'PshedDataOffset' : [ 0x48, ['unsigned long']],
'PshedDataLength' : [ 0x4c, ['unsigned long']],
} ],
'_WHEA_ERROR_RECORD' : [ 0xc8, {
'Header' : [ 0x0, ['_WHEA_ERROR_RECORD_HEADER']],
'SectionDescriptor' : [ 0x80, ['array', 1, ['_WHEA_ERROR_RECORD_SECTION_DESCRIPTOR']]],
} ],
'_WHEA_ERROR_RECORD_SECTION_DESCRIPTOR' : [ 0x48, {
'SectionOffset' : [ 0x0, ['unsigned long']],
'SectionLength' : [ 0x4, ['unsigned long']],
'Revision' : [ 0x8, ['_WHEA_REVISION']],
'ValidBits' : [ 0xa, ['_WHEA_ERROR_RECORD_SECTION_DESCRIPTOR_VALIDBITS']],
'Reserved' : [ 0xb, ['unsigned char']],
'Flags' : [ 0xc, ['_WHEA_ERROR_RECORD_SECTION_DESCRIPTOR_FLAGS']],
'SectionType' : [ 0x10, ['_GUID']],
'FRUId' : [ 0x20, ['_GUID']],
'SectionSeverity' : [ 0x30, ['Enumeration', dict(target = 'long', choices = {0: 'WheaErrSevRecoverable', 1: 'WheaErrSevFatal', 2: 'WheaErrSevCorrected', 3: 'WheaErrSevInformational'})]],
'FRUText' : [ 0x34, ['array', 20, ['unsigned char']]],
} ],
'_FSRTL_ADVANCED_FCB_HEADER' : [ 0x68, {
'NodeTypeCode' : [ 0x0, ['short']],
'NodeByteSize' : [ 0x2, ['short']],
'Flags' : [ 0x4, ['unsigned char']],
'IsFastIoPossible' : [ 0x5, ['unsigned char']],
'Flags2' : [ 0x6, ['unsigned char']],
'Reserved' : [ 0x7, ['BitField', dict(start_bit = 0, end_bit = 4, native_type='unsigned char')]],
'Version' : [ 0x7, ['BitField', dict(start_bit = 4, end_bit = 8, native_type='unsigned char')]],
'Resource' : [ 0x8, ['pointer64', ['_ERESOURCE']]],
'PagingIoResource' : [ 0x10, ['pointer64', ['_ERESOURCE']]],
'AllocationSize' : [ 0x18, ['_LARGE_INTEGER']],
'FileSize' : [ 0x20, ['_LARGE_INTEGER']],
'ValidDataLength' : [ 0x28, ['_LARGE_INTEGER']],
'FastMutex' : [ 0x30, ['pointer64', ['_FAST_MUTEX']]],
'FilterContexts' : [ 0x38, ['_LIST_ENTRY']],
'PushLock' : [ 0x48, ['_EX_PUSH_LOCK']],
'FileContextSupportPointer' : [ 0x50, ['pointer64', ['pointer64', ['void']]]],
'Oplock' : [ 0x58, ['pointer64', ['void']]],
'ReservedForRemote' : [ 0x58, ['pointer64', ['void']]],
'ReservedContext' : [ 0x60, ['pointer64', ['void']]],
} ],
'_iobuf' : [ 0x30, {
'_ptr' : [ 0x0, ['pointer64', ['unsigned char']]],
'_cnt' : [ 0x8, ['long']],
'_base' : [ 0x10, ['pointer64', ['unsigned char']]],
'_flag' : [ 0x18, ['long']],
'_file' : [ 0x1c, ['long']],
'_charbuf' : [ 0x20, ['long']],
'_bufsiz' : [ 0x24, ['long']],
'_tmpfname' : [ 0x28, ['pointer64', ['unsigned char']]],
} ],
'_TlgProvider_t' : [ 0x40, {
'LevelPlus1' : [ 0x0, ['unsigned long']],
'ProviderMetadataPtr' : [ 0x8, ['pointer64', ['unsigned short']]],
'KeywordAny' : [ 0x10, ['unsigned long long']],
'KeywordAll' : [ 0x18, ['unsigned long long']],
'RegHandle' : [ 0x20, ['unsigned long long']],
'EnableCallback' : [ 0x28, ['pointer64', ['void']]],
'CallbackContext' : [ 0x30, ['pointer64', ['void']]],
'AnnotationFunc' : [ 0x38, ['pointer64', ['void']]],
} ],
'_EVENT_FILTER_DESCRIPTOR' : [ 0x10, {
'Ptr' : [ 0x0, ['unsigned long long']],
'Size' : [ 0x8, ['unsigned long']],
'Type' : [ 0xc, ['unsigned long']],
} ],
'_TlgProviderMetadata_t' : [ 0x13, {
'Type' : [ 0x0, ['unsigned char']],
'ProviderId' : [ 0x1, ['_GUID']],
'RemainingSize' : [ 0x11, ['unsigned short']],
} ],
'_SID' : [ 0xc, {
'Revision' : [ 0x0, ['unsigned char']],
'SubAuthorityCount' : [ 0x1, ['unsigned char']],
'IdentifierAuthority' : [ 0x2, ['_SID_IDENTIFIER_AUTHORITY']],
'SubAuthority' : [ 0x8, ['array', 1, ['unsigned long']]],
} ],
'__unnamed_164b' : [ 0x8, {
'Long' : [ 0x0, ['unsigned long long']],
'VolatileLong' : [ 0x0, ['unsigned long long']],
'Hard' : [ 0x0, ['_MMPTE_HARDWARE']],
'Flush' : [ 0x0, ['_HARDWARE_PTE']],
'Proto' : [ 0x0, ['_MMPTE_PROTOTYPE']],
'Soft' : [ 0x0, ['_MMPTE_SOFTWARE']],
'TimeStamp' : [ 0x0, ['_MMPTE_TIMESTAMP']],
'Trans' : [ 0x0, ['_MMPTE_TRANSITION']],
'Subsect' : [ 0x0, ['_MMPTE_SUBSECTION']],
'List' : [ 0x0, ['_MMPTE_LIST']],
} ],
'_MMPTE' : [ 0x8, {
'u' : [ 0x0, ['__unnamed_164b']],
} ],
'_EX_PUSH_LOCK_AUTO_EXPAND' : [ 0x10, {
'LocalLock' : [ 0x0, ['_EX_PUSH_LOCK']],
'State' : [ 0x8, ['_EX_PUSH_LOCK_AUTO_EXPAND_STATE']],
'Stats' : [ 0xc, ['unsigned long']],
} ],
'_ERESOURCE' : [ 0x68, {
'SystemResourcesList' : [ 0x0, ['_LIST_ENTRY']],
'OwnerTable' : [ 0x10, ['pointer64', ['_OWNER_ENTRY']]],
'ActiveCount' : [ 0x18, ['short']],
'Flag' : [ 0x1a, ['unsigned short']],
'ReservedLowFlags' : [ 0x1a, ['unsigned char']],
'WaiterPriority' : [ 0x1b, ['unsigned char']],
'SharedWaiters' : [ 0x20, ['_KWAIT_CHAIN']],
'ExclusiveWaiters' : [ 0x28, ['pointer64', ['_KEVENT']]],
'OwnerEntry' : [ 0x30, ['_OWNER_ENTRY']],
'ActiveEntries' : [ 0x40, ['unsigned long']],
'ContentionCount' : [ 0x44, ['unsigned long']],
'NumberOfSharedWaiters' : [ 0x48, ['unsigned long']],
'NumberOfExclusiveWaiters' : [ 0x4c, ['unsigned long']],
'Reserved2' : [ 0x50, ['pointer64', ['void']]],
'Address' : [ 0x58, ['pointer64', ['void']]],
'CreatorBackTraceIndex' : [ 0x58, ['unsigned long long']],
'SpinLock' : [ 0x60, ['unsigned long long']],
} ],
'_MI_CACHED_PTE' : [ 0x8, {
'GlobalTimeStamp' : [ 0x0, ['unsigned long']],
'PteIndex' : [ 0x4, ['unsigned long']],
'Long' : [ 0x0, ['long long']],
} ],
'_KLOCK_QUEUE_HANDLE' : [ 0x18, {
'LockQueue' : [ 0x0, ['_KSPIN_LOCK_QUEUE']],
'OldIrql' : [ 0x10, ['unsigned char']],
} ],
'_MMPFNLIST' : [ 0x28, {
'Total' : [ 0x0, ['unsigned long long']],
'ListName' : [ 0x8, ['Enumeration', dict(target = 'long', choices = {0: 'ZeroedPageList', 1: 'FreePageList', 2: 'StandbyPageList', 3: 'ModifiedPageList', 4: 'ModifiedNoWritePageList', 5: 'BadPageList', 6: 'ActiveAndValid', 7: 'TransitionPage'})]],
'Flink' : [ 0x10, ['unsigned long long']],
'Blink' : [ 0x18, ['unsigned long long']],
'Lock' : [ 0x20, ['unsigned long long']],
} ],
'__unnamed_1683' : [ 0x8, {
'Flink' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 36, native_type='unsigned long long')]],
'NodeFlinkHigh' : [ 0x0, ['BitField', dict(start_bit = 36, end_bit = 64, native_type='unsigned long long')]],
'WsIndex' : [ 0x0, ['unsigned long long']],
'Event' : [ 0x0, ['pointer64', ['_KEVENT']]],
'Next' : [ 0x0, ['pointer64', ['void']]],
'VolatileNext' : [ 0x0, ['pointer64', ['void']]],
'KernelStackOwner' : [ 0x0, ['pointer64', ['_KTHREAD']]],
'NextStackPfn' : [ 0x0, ['_SINGLE_LIST_ENTRY']],
} ],
'__unnamed_1687' : [ 0x4, {
'ReferenceCount' : [ 0x0, ['unsigned short']],
'ShortFlags' : [ 0x2, ['unsigned short']],
'VolatileShortFlags' : [ 0x2, ['unsigned short']],
} ],
'__unnamed_1689' : [ 0x4, {
'ReferenceCount' : [ 0x0, ['unsigned short']],
'e1' : [ 0x2, ['_MMPFNENTRY']],
'e2' : [ 0x0, ['__unnamed_1687']],
} ],
'__unnamed_1695' : [ 0x8, {
'PteFrame' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 36, native_type='unsigned long long')]],
'Channel' : [ 0x0, ['BitField', dict(start_bit = 36, end_bit = 38, native_type='unsigned long long')]],
'Unused1' : [ 0x0, ['BitField', dict(start_bit = 38, end_bit = 39, native_type='unsigned long long')]],
'Unused2' : [ 0x0, ['BitField', dict(start_bit = 39, end_bit = 40, native_type='unsigned long long')]],
'Partition' : [ 0x0, ['BitField', dict(start_bit = 40, end_bit = 50, native_type='unsigned long long')]],
'Spare' : [ 0x0, ['BitField', dict(start_bit = 50, end_bit = 52, native_type='unsigned long long')]],
'FileOnly' : [ 0x0, ['BitField', dict(start_bit = 52, end_bit = 53, native_type='unsigned long long')]],
'PfnExists' : [ 0x0, ['BitField', dict(start_bit = 53, end_bit = 54, native_type='unsigned long long')]],
'PageIdentity' : [ 0x0, ['BitField', dict(start_bit = 54, end_bit = 57, native_type='unsigned long long')]],
'PrototypePte' : [ 0x0, ['BitField', dict(start_bit = 57, end_bit = 58, native_type='unsigned long long')]],
'PageColor' : [ 0x0, ['BitField', dict(start_bit = 58, end_bit = 64, native_type='unsigned long long')]],
'EntireField' : [ 0x0, ['unsigned long long']],
} ],
'_MMPFN' : [ 0x30, {
'ListEntry' : [ 0x0, ['_LIST_ENTRY']],
'TreeNode' : [ 0x0, ['_RTL_BALANCED_NODE']],
'u1' : [ 0x0, ['__unnamed_1683']],
'PteAddress' : [ 0x8, ['pointer64', ['_MMPTE']]],
'VolatilePteAddress' : [ 0x8, ['pointer64', ['void']]],
'PteLong' : [ 0x8, ['unsigned long long']],
'OriginalPte' : [ 0x10, ['_MMPTE']],
'u2' : [ 0x18, ['_MIPFNBLINK']],
'u3' : [ 0x20, ['__unnamed_1689']],
'NodeBlinkLow' : [ 0x24, ['unsigned short']],
'Unused' : [ 0x26, ['BitField', dict(start_bit = 0, end_bit = 4, native_type='unsigned char')]],
'VaType' : [ 0x26, ['BitField', dict(start_bit = 4, end_bit = 8, native_type='unsigned char')]],
'ViewCount' : [ 0x27, ['unsigned char']],
'NodeFlinkLow' : [ 0x27, ['unsigned char']],
'u4' : [ 0x28, ['__unnamed_1695']],
} ],
'_MI_SYSTEM_PTE_TYPE' : [ 0x60, {
'Bitmap' : [ 0x0, ['_RTL_BITMAP_EX']],
'BasePte' : [ 0x10, ['pointer64', ['_MMPTE']]],
'Flags' : [ 0x18, ['unsigned long']],
'VaType' : [ 0x1c, ['Enumeration', dict(target = 'long', choices = {0: 'MiVaUnused', 1: 'MiVaSessionSpace', 2: 'MiVaProcessSpace', 3: 'MiVaBootLoaded', 4: 'MiVaPfnDatabase', 5: 'MiVaNonPagedPool', 6: 'MiVaPagedPool', 7: 'MiVaSpecialPoolPaged', 8: 'MiVaSystemCache', 9: 'MiVaSystemPtes', 10: 'MiVaHal', 11: 'MiVaSessionGlobalSpace', 12: 'MiVaDriverImages', 13: 'MiVaSpecialPoolNonPaged', 14: 'MiVaMaximumType', 15: 'MiVaSystemPtesLarge'})]],
'FailureCount' : [ 0x20, ['pointer64', ['unsigned long']]],
'PteFailures' : [ 0x28, ['unsigned long']],
'SpinLock' : [ 0x30, ['unsigned long long']],
'GlobalPushLock' : [ 0x30, ['pointer64', ['_EX_PUSH_LOCK']]],
'Vm' : [ 0x38, ['pointer64', ['_MMSUPPORT']]],
'TotalSystemPtes' : [ 0x40, ['unsigned long long']],
'Hint' : [ 0x48, ['unsigned long long']],
'CachedPtes' : [ 0x50, ['pointer64', ['_MI_CACHED_PTES']]],
'TotalFreeSystemPtes' : [ 0x58, ['unsigned long long']],
} ],
'_MMCLONE_DESCRIPTOR' : [ 0x50, {
'CloneNode' : [ 0x0, ['_RTL_BALANCED_NODE']],
'Next' : [ 0x0, ['pointer64', ['_MMCLONE_DESCRIPTOR']]],
'StartingCloneBlock' : [ 0x18, ['pointer64', ['_MMCLONE_BLOCK']]],
'EndingCloneBlock' : [ 0x20, ['pointer64', ['_MMCLONE_BLOCK']]],
'NumberOfPtes' : [ 0x28, ['unsigned long long']],
'NumberOfReferences' : [ 0x30, ['unsigned long long']],
'CloneHeader' : [ 0x38, ['pointer64', ['_MMCLONE_HEADER']]],
'NonPagedPoolQuotaCharge' : [ 0x40, ['unsigned long long']],
'NestingLevel' : [ 0x48, ['unsigned long long']],
} ],
'__unnamed_16c7' : [ 0x8, {
'VirtualAddress' : [ 0x0, ['pointer64', ['void']]],
'Long' : [ 0x0, ['unsigned long long']],
'e1' : [ 0x0, ['_MMWSLENTRY']],
'e2' : [ 0x0, ['_MMWSLE_FREE_ENTRY']],
} ],
'_MMWSLE' : [ 0x8, {
'u1' : [ 0x0, ['__unnamed_16c7']],
} ],
'_MMWSL' : [ 0x350, {
'FirstFree' : [ 0x0, ['unsigned long long']],
'FirstDynamic' : [ 0x8, ['unsigned long long']],
'LastEntry' : [ 0x10, ['unsigned long long']],
'NextSlot' : [ 0x18, ['unsigned long long']],
'LastInitializedWsle' : [ 0x20, ['unsigned long long']],
'NextAgingSlot' : [ 0x28, ['unsigned long long']],
'NextAccessClearingSlot' : [ 0x30, ['unsigned long long']],
'LastAccessClearingRemainder' : [ 0x38, ['unsigned long']],
'LastAgingRemainder' : [ 0x3c, ['unsigned long']],
'WsleSize' : [ 0x40, ['unsigned long']],
'NonDirectCount' : [ 0x48, ['unsigned long long']],
'LowestPagableAddress' : [ 0x50, ['pointer64', ['void']]],
'NonDirectHash' : [ 0x58, ['pointer64', ['_MMWSLE_NONDIRECT_HASH']]],
'HashTableStart' : [ 0x60, ['pointer64', ['_MMWSLE_HASH']]],
'HighestPermittedHashAddress' : [ 0x68, ['pointer64', ['_MMWSLE_HASH']]],
'ActiveWsleCounts' : [ 0x70, ['array', 16, ['unsigned long long']]],
'ActiveWsles' : [ 0xf0, ['array', 16, ['_MI_ACTIVE_WSLE_LISTHEAD']]],
'Wsle' : [ 0x1f0, ['pointer64', ['_MMWSLE']]],
'UserVaInfo' : [ 0x1f8, ['_MI_USER_VA_INFO']],
} ],
'_MMSUPPORT' : [ 0xf8, {
'WorkingSetLock' : [ 0x0, ['long']],
'ExitOutswapGate' : [ 0x8, ['pointer64', ['_KGATE']]],
'AccessLog' : [ 0x10, ['pointer64', ['void']]],
'WorkingSetExpansionLinks' : [ 0x18, ['_LIST_ENTRY']],
'AgeDistribution' : [ 0x28, ['array', 7, ['unsigned long long']]],
'MinimumWorkingSetSize' : [ 0x60, ['unsigned long long']],
'WorkingSetLeafSize' : [ 0x68, ['unsigned long long']],
'WorkingSetLeafPrivateSize' : [ 0x70, ['unsigned long long']],
'WorkingSetSize' : [ 0x78, ['unsigned long long']],
'WorkingSetPrivateSize' : [ 0x80, ['unsigned long long']],
'MaximumWorkingSetSize' : [ 0x88, ['unsigned long long']],
'ChargedWslePages' : [ 0x90, ['unsigned long long']],
'ActualWslePages' : [ 0x98, ['unsigned long long']],
'WorkingSetSizeOverhead' : [ 0xa0, ['unsigned long long']],
'PeakWorkingSetSize' : [ 0xa8, ['unsigned long long']],
'HardFaultCount' : [ 0xb0, ['unsigned long']],
'PartitionId' : [ 0xb4, ['unsigned short']],
'Pad0' : [ 0xb6, ['unsigned short']],
'VmWorkingSetList' : [ 0xb8, ['pointer64', ['_MMWSL']]],
'NextPageColor' : [ 0xc0, ['unsigned short']],
'LastTrimStamp' : [ 0xc2, ['unsigned short']],
'PageFaultCount' : [ 0xc4, ['unsigned long']],
'TrimmedPageCount' : [ 0xc8, ['unsigned long long']],
'Reserved0' : [ 0xd0, ['unsigned long long']],
'Flags' : [ 0xd8, ['_MMSUPPORT_FLAGS']],
'ReleasedCommitDebt' : [ 0xe0, ['unsigned long long']],
'WsSwapSupport' : [ 0xe8, ['pointer64', ['void']]],
'CommitReAcquireFailSupport' : [ 0xf0, ['pointer64', ['void']]],
} ],
'__unnamed_16e3' : [ 0x8, {
'ImageCommitment' : [ 0x0, ['unsigned long long']],
'CreatingProcess' : [ 0x0, ['pointer64', ['_EPROCESS']]],
} ],
'__unnamed_16e7' : [ 0x8, {
'ImageInformation' : [ 0x0, ['pointer64', ['_MI_SECTION_IMAGE_INFORMATION']]],
'FirstMappedVa' : [ 0x0, ['pointer64', ['void']]],
} ],
'_SEGMENT' : [ 0x48, {
'ControlArea' : [ 0x0, ['pointer64', ['_CONTROL_AREA']]],
'TotalNumberOfPtes' : [ 0x8, ['unsigned long']],
'SegmentFlags' : [ 0xc, ['_SEGMENT_FLAGS']],
'NumberOfCommittedPages' : [ 0x10, ['unsigned long long']],
'SizeOfSegment' : [ 0x18, ['unsigned long long']],
'ExtendInfo' : [ 0x20, ['pointer64', ['_MMEXTEND_INFO']]],
'BasedAddress' : [ 0x20, ['pointer64', ['void']]],
'SegmentLock' : [ 0x28, ['_EX_PUSH_LOCK']],
'u1' : [ 0x30, ['__unnamed_16e3']],
'u2' : [ 0x38, ['__unnamed_16e7']],
'PrototypePte' : [ 0x40, ['pointer64', ['_MMPTE']]],
} ],
'__unnamed_16ec' : [ 0x4, {
'LongFlags' : [ 0x0, ['unsigned long']],
'Flags' : [ 0x0, ['_MMSECTION_FLAGS']],
} ],
'__unnamed_16ef' : [ 0x4, {
'LongFlags' : [ 0x0, ['unsigned long']],
'Flags' : [ 0x0, ['_MMSECTION_FLAGS2']],
} ],
'__unnamed_16f9' : [ 0x10, {
'NumberOfSystemCacheViews' : [ 0x0, ['unsigned long']],
'ImageRelocationStartBit' : [ 0x0, ['unsigned long']],
'WritableUserReferences' : [ 0x4, ['long']],
'ImageRelocationSizeIn64k' : [ 0x4, ['BitField', dict(start_bit = 0, end_bit = 16, native_type='unsigned long')]],
'Unused' : [ 0x4, ['BitField', dict(start_bit = 16, end_bit = 25, native_type='unsigned long')]],
'SystemImage' : [ 0x4, ['BitField', dict(start_bit = 25, end_bit = 26, native_type='unsigned long')]],
'StrongCode' : [ 0x4, ['BitField', dict(start_bit = 26, end_bit = 28, native_type='unsigned long')]],
'CantMove' : [ 0x4, ['BitField', dict(start_bit = 28, end_bit = 29, native_type='unsigned long')]],
'BitMap' : [ 0x4, ['BitField', dict(start_bit = 29, end_bit = 31, native_type='unsigned long')]],
'ImageActive' : [ 0x4, ['BitField', dict(start_bit = 31, end_bit = 32, native_type='unsigned long')]],
'FlushInProgressCount' : [ 0x8, ['unsigned long']],
'NumberOfSubsections' : [ 0x8, ['unsigned long']],
'SeImageStub' : [ 0x8, ['pointer64', ['_MI_IMAGE_SECURITY_REFERENCE']]],
} ],
'__unnamed_16fb' : [ 0x10, {
'e2' : [ 0x0, ['__unnamed_16f9']],
} ],
'_CONTROL_AREA' : [ 0x78, {
'Segment' : [ 0x0, ['pointer64', ['_SEGMENT']]],
'ListHead' : [ 0x8, ['_LIST_ENTRY']],
'NumberOfSectionReferences' : [ 0x18, ['unsigned long long']],
'NumberOfPfnReferences' : [ 0x20, ['unsigned long long']],
'NumberOfMappedViews' : [ 0x28, ['unsigned long long']],
'NumberOfUserReferences' : [ 0x30, ['unsigned long long']],
'u' : [ 0x38, ['__unnamed_16ec']],
'u1' : [ 0x3c, ['__unnamed_16ef']],
'FilePointer' : [ 0x40, ['_EX_FAST_REF']],
'ControlAreaLock' : [ 0x48, ['long']],
'ModifiedWriteCount' : [ 0x4c, ['unsigned long']],
'WaitList' : [ 0x50, ['pointer64', ['_MI_CONTROL_AREA_WAIT_BLOCK']]],
'u2' : [ 0x58, ['__unnamed_16fb']],
'LockedPages' : [ 0x68, ['unsigned long long']],
'FileObjectLock' : [ 0x70, ['_EX_PUSH_LOCK']],
} ],
'__unnamed_1709' : [ 0x4, {
'LongFlags' : [ 0x0, ['unsigned long']],
'VadFlags' : [ 0x0, ['_MMVAD_FLAGS']],
} ],
'__unnamed_170c' : [ 0x4, {
'LongFlags1' : [ 0x0, ['unsigned long']],
'VadFlags1' : [ 0x0, ['_MMVAD_FLAGS1']],
} ],
'_MMVAD_SHORT' : [ 0x40, {
'VadNode' : [ 0x0, ['_RTL_BALANCED_NODE']],
'NextVad' : [ 0x0, ['pointer64', ['_MMVAD_SHORT']]],
'StartingVpn' : [ 0x18, ['unsigned long']],
'EndingVpn' : [ 0x1c, ['unsigned long']],
'StartingVpnHigh' : [ 0x20, ['unsigned char']],
'EndingVpnHigh' : [ 0x21, ['unsigned char']],
'CommitChargeHigh' : [ 0x22, ['unsigned char']],
'SpareNT64VadUChar' : [ 0x23, ['unsigned char']],
'ReferenceCount' : [ 0x24, ['long']],
'PushLock' : [ 0x28, ['_EX_PUSH_LOCK']],
'u' : [ 0x30, ['__unnamed_1709']],
'u1' : [ 0x34, ['__unnamed_170c']],
'EventList' : [ 0x38, ['pointer64', ['_MI_VAD_EVENT_BLOCK']]],
} ],
'_MI_PARTITION' : [ 0x2740, {
'Core' : [ 0x0, ['_MI_PARTITION_CORE']],
'Modwriter' : [ 0x158, ['_MI_PARTITION_MODWRITES']],
'Store' : [ 0x430, ['_MI_PARTITION_STORES']],
'Segments' : [ 0x4c0, ['_MI_PARTITION_SEGMENTS']],
'PageLists' : [ 0x640, ['_MI_PARTITION_PAGE_LISTS']],
'Commit' : [ 0x1380, ['_MI_PARTITION_COMMIT']],
'Zeroing' : [ 0x1400, ['_MI_PARTITION_ZEROING']],
'PageCombine' : [ 0x1468, ['_MI_PAGE_COMBINING_SUPPORT']],
'WorkingSetControl' : [ 0x15f0, ['pointer64', ['void']]],
'WorkingSetExpansionHead' : [ 0x15f8, ['_MMWORKING_SET_EXPANSION_HEAD']],
'Vp' : [ 0x1640, ['_MI_VISIBLE_PARTITION']],
} ],
'_MM_STORE_KEY' : [ 0x8, {
'KeyLow' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 60, native_type='unsigned long long')]],
'KeyHigh' : [ 0x0, ['BitField', dict(start_bit = 60, end_bit = 64, native_type='unsigned long long')]],
'EntireKey' : [ 0x0, ['unsigned long long']],
} ],
'_MMPAGING_FILE' : [ 0x120, {
'Size' : [ 0x0, ['unsigned long long']],
'MaximumSize' : [ 0x8, ['unsigned long long']],
'MinimumSize' : [ 0x10, ['unsigned long long']],
'FreeSpace' : [ 0x18, ['unsigned long long']],
'PeakUsage' : [ 0x20, ['unsigned long long']],
'HighestPage' : [ 0x28, ['unsigned long long']],
'FreeReservationSpace' : [ 0x30, ['unsigned long long']],
'File' : [ 0x38, ['pointer64', ['_FILE_OBJECT']]],
'Entry' : [ 0x40, ['array', 2, ['pointer64', ['_MMMOD_WRITER_MDL_ENTRY']]]],
'PfnsToFree' : [ 0x50, ['_SLIST_HEADER']],
'PageFileName' : [ 0x60, ['_UNICODE_STRING']],
'Bitmaps' : [ 0x70, ['pointer64', ['_MI_PAGING_FILE_SPACE_BITMAPS']]],
'AllocationBitmapHint' : [ 0x78, ['unsigned long']],
'LargestAllocationCluster' : [ 0x7c, ['unsigned long']],
'RefreshAllocationCluster' : [ 0x80, ['unsigned long']],
'LastRefreshAllocationCluster' : [ 0x84, ['unsigned long']],
'ReservedClusterSizeAggregate' : [ 0x88, ['unsigned long']],
'MaximumRunLengthInBitmaps' : [ 0x8c, ['unsigned long']],
'BitmapsCacheLengthTree' : [ 0x90, ['_RTL_RB_TREE']],
'BitmapsCacheLocationTree' : [ 0xa0, ['_RTL_RB_TREE']],
'BitmapsCacheFreeList' : [ 0xb0, ['_LIST_ENTRY']],
'BitmapsCacheEntries' : [ 0xc0, ['pointer64', ['_MI_PAGEFILE_BITMAPS_CACHE_ENTRY']]],
'ToBeEvictedCount' : [ 0xc8, ['unsigned long']],
'HybridPriority' : [ 0xc8, ['unsigned long']],
'PageFileNumber' : [ 0xcc, ['BitField', dict(start_bit = 0, end_bit = 4, native_type='unsigned short')]],
'WsSwapPagefile' : [ 0xcc, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned short')]],
'NoReservations' : [ 0xcc, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned short')]],
'VirtualStorePagefile' : [ 0xcc, ['BitField', dict(start_bit = 6, end_bit = 7, native_type='unsigned short')]],
'SwapSupported' : [ 0xcc, ['BitField', dict(start_bit = 7, end_bit = 8, native_type='unsigned short')]],
'NodeInserted' : [ 0xcc, ['BitField', dict(start_bit = 8, end_bit = 9, native_type='unsigned short')]],
'StackNotified' : [ 0xcc, ['BitField', dict(start_bit = 9, end_bit = 10, native_type='unsigned short')]],
'Spare0' : [ 0xcc, ['BitField', dict(start_bit = 10, end_bit = 15, native_type='unsigned short')]],
'AdriftMdls' : [ 0xce, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned char')]],
'Spare1' : [ 0xce, ['BitField', dict(start_bit = 1, end_bit = 8, native_type='unsigned char')]],
'IgnoreReservations' : [ 0xcf, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned char')]],
'Spare2' : [ 0xcf, ['BitField', dict(start_bit = 1, end_bit = 8, native_type='unsigned char')]],
'PageHashPages' : [ 0xd0, ['unsigned long']],
'PageHashPagesPeak' : [ 0xd4, ['unsigned long']],
'PageHash' : [ 0xd8, ['pointer64', ['unsigned long']]],
'FileHandle' : [ 0xe0, ['pointer64', ['void']]],
'Lock' : [ 0xe8, ['unsigned long long']],
'LockOwner' : [ 0xf0, ['pointer64', ['_ETHREAD']]],
'FlowThroughReadRoot' : [ 0xf8, ['_RTL_AVL_TREE']],
'Partition' : [ 0x100, ['pointer64', ['_MI_PARTITION']]],
'FileObjectNode' : [ 0x108, ['_RTL_BALANCED_NODE']],
} ],
'tagSWITCH_CONTEXT' : [ 0x68, {
'Attribute' : [ 0x0, ['tagSWITCH_CONTEXT_ATTRIBUTE']],
'Data' : [ 0x18, ['tagSWITCH_CONTEXT_DATA']],
} ],
'_CMP_SILO_CONTEXT' : [ 0x20, {
'LockEntryHead' : [ 0x0, ['_LIST_ENTRY']],
'LockListUnderCleanup' : [ 0x10, ['unsigned char']],
'ContextLock' : [ 0x18, ['_EX_PUSH_LOCK']],
} ],
'__unnamed_1756' : [ 0xc, {
'Failure' : [ 0x0, ['Enumeration', dict(target = 'long', choices = {0: '_None', 1: '_CmInitializeHive', 2: '_HvInitializeHive', 3: '_HvpBuildMap', 4: '_HvpBuildMapForLoaderHive', 5: '_HvpInitMap', 6: '_HvLoadHive', 7: '_HvpMapHiveImage', 8: '_HvpRecoverData', 9: '_CmpValidateHiveSecurityDescriptors', 10: '_HvpEnlistBinInMap', 11: '_CmCheckRegistry', 12: '_CmRegistryIO', 13: '_CmCheckRegistry2', 14: '_CmpCheckKey', 15: '_CmpCheckValueList', 16: '_HvCheckHive', 17: '_HvCheckBin', 18: '_HvpGetLogEntryDirtyVector', 19: '_HvpReadLogEntryHeader', 20: '_HvpReadLogEntry', 21: '_CmpMountPreloadedHives', 22: '_CmpLoadHiveThread'})]],
'Status' : [ 0x4, ['long']],
'Point' : [ 0x8, ['unsigned long']],
} ],
'__unnamed_1759' : [ 0x18, {
'Action' : [ 0x0, ['unsigned long']],
'Handle' : [ 0x8, ['pointer64', ['void']]],
'Status' : [ 0x10, ['long']],
} ],
'__unnamed_175b' : [ 0x8, {
'CheckStack' : [ 0x0, ['pointer64', ['void']]],
} ],
'__unnamed_175f' : [ 0x20, {
'Cell' : [ 0x0, ['unsigned long']],
'CellPoint' : [ 0x8, ['pointer64', ['_CELL_DATA']]],
'RootPoint' : [ 0x10, ['pointer64', ['void']]],
'Index' : [ 0x18, ['unsigned long']],
} ],
'__unnamed_1761' : [ 0x18, {
'List' : [ 0x0, ['pointer64', ['_CELL_DATA']]],
'Index' : [ 0x8, ['unsigned long']],
'Cell' : [ 0xc, ['unsigned long']],
'CellPoint' : [ 0x10, ['pointer64', ['_CELL_DATA']]],
} ],
'__unnamed_1765' : [ 0x10, {
'Space' : [ 0x0, ['unsigned long']],
'MapPoint' : [ 0x4, ['unsigned long']],
'BinPoint' : [ 0x8, ['pointer64', ['_HBIN']]],
} ],
'__unnamed_1769' : [ 0x10, {
'Bin' : [ 0x0, ['pointer64', ['_HBIN']]],
'CellPoint' : [ 0x8, ['pointer64', ['_HCELL']]],
} ],
'__unnamed_176b' : [ 0x4, {
'FileOffset' : [ 0x0, ['unsigned long']],
} ],
'_HIVE_LOAD_FAILURE' : [ 0x160, {
'Hive' : [ 0x0, ['pointer64', ['_HHIVE']]],
'Index' : [ 0x8, ['unsigned long']],
'RecoverableIndex' : [ 0xc, ['unsigned long']],
'Locations' : [ 0x10, ['array', 8, ['__unnamed_1756']]],
'RecoverableLocations' : [ 0x70, ['array', 8, ['__unnamed_1756']]],
'RegistryIO' : [ 0xd0, ['__unnamed_1759']],
'CheckRegistry2' : [ 0xe8, ['__unnamed_175b']],
'CheckKey' : [ 0xf0, ['__unnamed_175f']],
'CheckValueList' : [ 0x110, ['__unnamed_1761']],
'CheckHive' : [ 0x128, ['__unnamed_1765']],
'CheckHive1' : [ 0x138, ['__unnamed_1765']],
'CheckBin' : [ 0x148, ['__unnamed_1769']],
'RecoverData' : [ 0x158, ['__unnamed_176b']],
} ],
'_PCW_COUNTER_DESCRIPTOR' : [ 0x8, {
'Id' : [ 0x0, ['unsigned short']],
'StructIndex' : [ 0x2, ['unsigned short']],
'Offset' : [ 0x4, ['unsigned short']],
'Size' : [ 0x6, ['unsigned short']],
} ],
'_PCW_REGISTRATION_INFORMATION' : [ 0x30, {
'Version' : [ 0x0, ['unsigned long']],
'Name' : [ 0x8, ['pointer64', ['_UNICODE_STRING']]],
'CounterCount' : [ 0x10, ['unsigned long']],
'Counters' : [ 0x18, ['pointer64', ['_PCW_COUNTER_DESCRIPTOR']]],
'Callback' : [ 0x20, ['pointer64', ['void']]],
'CallbackContext' : [ 0x28, ['pointer64', ['void']]],
} ],
'_PCW_PROCESSOR_INFO' : [ 0xc0, {
'IdleTime' : [ 0x0, ['unsigned long long']],
'AvailableTime' : [ 0x8, ['unsigned long long']],
'UserTime' : [ 0x10, ['unsigned long long']],
'KernelTime' : [ 0x18, ['unsigned long long']],
'Interrupts' : [ 0x20, ['unsigned long']],
'DpcTime' : [ 0x28, ['unsigned long long']],
'InterruptTime' : [ 0x30, ['unsigned long long']],
'ClockInterrupts' : [ 0x38, ['unsigned long']],
'DpcCount' : [ 0x3c, ['unsigned long']],
'DpcRate' : [ 0x40, ['unsigned long']],
'C1Time' : [ 0x48, ['unsigned long long']],
'C2Time' : [ 0x50, ['unsigned long long']],
'C3Time' : [ 0x58, ['unsigned long long']],
'C1Transitions' : [ 0x60, ['unsigned long long']],
'C2Transitions' : [ 0x68, ['unsigned long long']],
'C3Transitions' : [ 0x70, ['unsigned long long']],
'StallTime' : [ 0x78, ['unsigned long long']],
'ParkingStatus' : [ 0x80, ['unsigned long']],
'CurrentFrequency' : [ 0x84, ['unsigned long']],
'PercentMaxFrequency' : [ 0x88, ['unsigned long']],
'StateFlags' : [ 0x8c, ['unsigned long']],
'NominalThroughput' : [ 0x90, ['unsigned long']],
'ActiveThroughput' : [ 0x94, ['unsigned long']],
'ScaledThroughput' : [ 0x98, ['unsigned long long']],
'ScaledKernelThroughput' : [ 0xa0, ['unsigned long long']],
'AverageIdleTime' : [ 0xa8, ['unsigned long long']],
'IdleBreakEvents' : [ 0xb0, ['unsigned long long']],
'PerformanceLimit' : [ 0xb8, ['unsigned long']],
'PerformanceLimitFlags' : [ 0xbc, ['unsigned long']],
} ],
'_PCW_DATA' : [ 0x10, {
'Data' : [ 0x0, ['pointer64', ['void']]],
'Size' : [ 0x8, ['unsigned long']],
} ],
'_SYNCH_COUNTERS' : [ 0xb8, {
'SpinLockAcquireCount' : [ 0x0, ['unsigned long']],
'SpinLockContentionCount' : [ 0x4, ['unsigned long']],
'SpinLockSpinCount' : [ 0x8, ['unsigned long']],
'IpiSendRequestBroadcastCount' : [ 0xc, ['unsigned long']],
'IpiSendRequestRoutineCount' : [ 0x10, ['unsigned long']],
'IpiSendSoftwareInterruptCount' : [ 0x14, ['unsigned long']],
'ExInitializeResourceCount' : [ 0x18, ['unsigned long']],
'ExReInitializeResourceCount' : [ 0x1c, ['unsigned long']],
'ExDeleteResourceCount' : [ 0x20, ['unsigned long']],
'ExecutiveResourceAcquiresCount' : [ 0x24, ['unsigned long']],
'ExecutiveResourceContentionsCount' : [ 0x28, ['unsigned long']],
'ExecutiveResourceReleaseExclusiveCount' : [ 0x2c, ['unsigned long']],
'ExecutiveResourceReleaseSharedCount' : [ 0x30, ['unsigned long']],
'ExecutiveResourceConvertsCount' : [ 0x34, ['unsigned long']],
'ExAcqResExclusiveAttempts' : [ 0x38, ['unsigned long']],
'ExAcqResExclusiveAcquiresExclusive' : [ 0x3c, ['unsigned long']],
'ExAcqResExclusiveAcquiresExclusiveRecursive' : [ 0x40, ['unsigned long']],
'ExAcqResExclusiveWaits' : [ 0x44, ['unsigned long']],
'ExAcqResExclusiveNotAcquires' : [ 0x48, ['unsigned long']],
'ExAcqResSharedAttempts' : [ 0x4c, ['unsigned long']],
'ExAcqResSharedAcquiresExclusive' : [ 0x50, ['unsigned long']],
'ExAcqResSharedAcquiresShared' : [ 0x54, ['unsigned long']],
'ExAcqResSharedAcquiresSharedRecursive' : [ 0x58, ['unsigned long']],
'ExAcqResSharedWaits' : [ 0x5c, ['unsigned long']],
'ExAcqResSharedNotAcquires' : [ 0x60, ['unsigned long']],
'ExAcqResSharedStarveExclusiveAttempts' : [ 0x64, ['unsigned long']],
'ExAcqResSharedStarveExclusiveAcquiresExclusive' : [ 0x68, ['unsigned long']],
'ExAcqResSharedStarveExclusiveAcquiresShared' : [ 0x6c, ['unsigned long']],
'ExAcqResSharedStarveExclusiveAcquiresSharedRecursive' : [ 0x70, ['unsigned long']],
'ExAcqResSharedStarveExclusiveWaits' : [ 0x74, ['unsigned long']],
'ExAcqResSharedStarveExclusiveNotAcquires' : [ 0x78, ['unsigned long']],
'ExAcqResSharedWaitForExclusiveAttempts' : [ 0x7c, ['unsigned long']],
'ExAcqResSharedWaitForExclusiveAcquiresExclusive' : [ 0x80, ['unsigned long']],
'ExAcqResSharedWaitForExclusiveAcquiresShared' : [ 0x84, ['unsigned long']],
'ExAcqResSharedWaitForExclusiveAcquiresSharedRecursive' : [ 0x88, ['unsigned long']],
'ExAcqResSharedWaitForExclusiveWaits' : [ 0x8c, ['unsigned long']],
'ExAcqResSharedWaitForExclusiveNotAcquires' : [ 0x90, ['unsigned long']],
'ExSetResOwnerPointerExclusive' : [ 0x94, ['unsigned long']],
'ExSetResOwnerPointerSharedNew' : [ 0x98, ['unsigned long']],
'ExSetResOwnerPointerSharedOld' : [ 0x9c, ['unsigned long']],
'ExTryToAcqExclusiveAttempts' : [ 0xa0, ['unsigned long']],
'ExTryToAcqExclusiveAcquires' : [ 0xa4, ['unsigned long']],
'ExBoostExclusiveOwner' : [ 0xa8, ['unsigned long']],
'ExBoostSharedOwners' : [ 0xac, ['unsigned long']],
'ExEtwSynchTrackingNotificationsCount' : [ 0xb0, ['unsigned long']],
'ExEtwSynchTrackingNotificationsAccountedCount' : [ 0xb4, ['unsigned long']],
} ],
'_ETW_PERF_COUNTERS' : [ 0x18, {
'TotalActiveSessions' : [ 0x0, ['long']],
'TotalBufferMemoryNonPagedPool' : [ 0x4, ['long']],
'TotalBufferMemoryPagedPool' : [ 0x8, ['long']],
'TotalGuidsEnabled' : [ 0xc, ['long']],
'TotalGuidsNotEnabled' : [ 0x10, ['long']],
'TotalGuidsPreEnabled' : [ 0x14, ['long']],
} ],
'_ETW_SESSION_PERF_COUNTERS' : [ 0x18, {
'BufferMemoryPagedPool' : [ 0x0, ['long']],
'BufferMemoryNonPagedPool' : [ 0x4, ['long']],
'EventsLoggedCount' : [ 0x8, ['unsigned long long']],
'EventsLost' : [ 0x10, ['long']],
'NumConsumers' : [ 0x14, ['long']],
} ],
'_FILESYSTEM_DISK_COUNTERS' : [ 0x10, {
'FsBytesRead' : [ 0x0, ['unsigned long long']],
'FsBytesWritten' : [ 0x8, ['unsigned long long']],
} ],
'_THERMAL_ZONE_COUNTERS' : [ 0xc, {
'Temperature' : [ 0x0, ['unsigned long']],
'ThrottleLimit' : [ 0x4, ['unsigned long']],
'ThrottleReasons' : [ 0x8, ['unsigned long']],
} ],
'_TEB32' : [ 0x1000, {
'NtTib' : [ 0x0, ['_NT_TIB32']],
'EnvironmentPointer' : [ 0x1c, ['unsigned long']],
'ClientId' : [ 0x20, ['_CLIENT_ID32']],
'ActiveRpcHandle' : [ 0x28, ['unsigned long']],
'ThreadLocalStoragePointer' : [ 0x2c, ['unsigned long']],
'ProcessEnvironmentBlock' : [ 0x30, ['unsigned long']],
'LastErrorValue' : [ 0x34, ['unsigned long']],
'CountOfOwnedCriticalSections' : [ 0x38, ['unsigned long']],
'CsrClientThread' : [ 0x3c, ['unsigned long']],
'Win32ThreadInfo' : [ 0x40, ['unsigned long']],
'User32Reserved' : [ 0x44, ['array', 26, ['unsigned long']]],
'UserReserved' : [ 0xac, ['array', 5, ['unsigned long']]],
'WOW32Reserved' : [ 0xc0, ['unsigned long']],
'CurrentLocale' : [ 0xc4, ['unsigned long']],
'FpSoftwareStatusRegister' : [ 0xc8, ['unsigned long']],
'ReservedForDebuggerInstrumentation' : [ 0xcc, ['array', 16, ['unsigned long']]],
'SystemReserved1' : [ 0x10c, ['array', 38, ['unsigned long']]],
'ExceptionCode' : [ 0x1a4, ['long']],
'ActivationContextStackPointer' : [ 0x1a8, ['unsigned long']],
'InstrumentationCallbackSp' : [ 0x1ac, ['unsigned long']],
'InstrumentationCallbackPreviousPc' : [ 0x1b0, ['unsigned long']],
'InstrumentationCallbackPreviousSp' : [ 0x1b4, ['unsigned long']],
'InstrumentationCallbackDisabled' : [ 0x1b8, ['unsigned char']],
'SpareBytes' : [ 0x1b9, ['array', 23, ['unsigned char']]],
'TxFsContext' : [ 0x1d0, ['unsigned long']],
'GdiTebBatch' : [ 0x1d4, ['_GDI_TEB_BATCH32']],
'RealClientId' : [ 0x6b4, ['_CLIENT_ID32']],
'GdiCachedProcessHandle' : [ 0x6bc, ['unsigned long']],
'GdiClientPID' : [ 0x6c0, ['unsigned long']],
'GdiClientTID' : [ 0x6c4, ['unsigned long']],
'GdiThreadLocalInfo' : [ 0x6c8, ['unsigned long']],
'Win32ClientInfo' : [ 0x6cc, ['array', 62, ['unsigned long']]],
'glDispatchTable' : [ 0x7c4, ['array', 233, ['unsigned long']]],
'glReserved1' : [ 0xb68, ['array', 29, ['unsigned long']]],
'glReserved2' : [ 0xbdc, ['unsigned long']],
'glSectionInfo' : [ 0xbe0, ['unsigned long']],
'glSection' : [ 0xbe4, ['unsigned long']],
'glTable' : [ 0xbe8, ['unsigned long']],
'glCurrentRC' : [ 0xbec, ['unsigned long']],
'glContext' : [ 0xbf0, ['unsigned long']],
'LastStatusValue' : [ 0xbf4, ['unsigned long']],
'StaticUnicodeString' : [ 0xbf8, ['_STRING32']],
'StaticUnicodeBuffer' : [ 0xc00, ['array', 261, ['wchar']]],
'DeallocationStack' : [ 0xe0c, ['unsigned long']],
'TlsSlots' : [ 0xe10, ['array', 64, ['unsigned long']]],
'TlsLinks' : [ 0xf10, ['LIST_ENTRY32']],
'Vdm' : [ 0xf18, ['unsigned long']],
'ReservedForNtRpc' : [ 0xf1c, ['unsigned long']],
'DbgSsReserved' : [ 0xf20, ['array', 2, ['unsigned long']]],
'HardErrorMode' : [ 0xf28, ['unsigned long']],
'Instrumentation' : [ 0xf2c, ['array', 9, ['unsigned long']]],
'ActivityId' : [ 0xf50, ['_GUID']],
'SubProcessTag' : [ 0xf60, ['unsigned long']],
'PerflibData' : [ 0xf64, ['unsigned long']],
'EtwTraceData' : [ 0xf68, ['unsigned long']],
'WinSockData' : [ 0xf6c, ['unsigned long']],
'GdiBatchCount' : [ 0xf70, ['unsigned long']],
'CurrentIdealProcessor' : [ 0xf74, ['_PROCESSOR_NUMBER']],
'IdealProcessorValue' : [ 0xf74, ['unsigned long']],
'ReservedPad0' : [ 0xf74, ['unsigned char']],
'ReservedPad1' : [ 0xf75, ['unsigned char']],
'ReservedPad2' : [ 0xf76, ['unsigned char']],
'IdealProcessor' : [ 0xf77, ['unsigned char']],
'GuaranteedStackBytes' : [ 0xf78, ['unsigned long']],
'ReservedForPerf' : [ 0xf7c, ['unsigned long']],
'ReservedForOle' : [ 0xf80, ['unsigned long']],
'WaitingOnLoaderLock' : [ 0xf84, ['unsigned long']],
'SavedPriorityState' : [ 0xf88, ['unsigned long']],
'ReservedForCodeCoverage' : [ 0xf8c, ['unsigned long']],
'ThreadPoolData' : [ 0xf90, ['unsigned long']],
'TlsExpansionSlots' : [ 0xf94, ['unsigned long']],
'MuiGeneration' : [ 0xf98, ['unsigned long']],
'IsImpersonating' : [ 0xf9c, ['unsigned long']],
'NlsCache' : [ 0xfa0, ['unsigned long']],
'pShimData' : [ 0xfa4, ['unsigned long']],
'HeapVirtualAffinity' : [ 0xfa8, ['unsigned short']],
'LowFragHeapDataSlot' : [ 0xfaa, ['unsigned short']],
'CurrentTransactionHandle' : [ 0xfac, ['unsigned long']],
'ActiveFrame' : [ 0xfb0, ['unsigned long']],
'FlsData' : [ 0xfb4, ['unsigned long']],
'PreferredLanguages' : [ 0xfb8, ['unsigned long']],
'UserPrefLanguages' : [ 0xfbc, ['unsigned long']],
'MergedPrefLanguages' : [ 0xfc0, ['unsigned long']],
'MuiImpersonation' : [ 0xfc4, ['unsigned long']],
'CrossTebFlags' : [ 0xfc8, ['unsigned short']],
'SpareCrossTebBits' : [ 0xfc8, ['BitField', dict(start_bit = 0, end_bit = 16, native_type='unsigned short')]],
'SameTebFlags' : [ 0xfca, ['unsigned short']],
'SafeThunkCall' : [ 0xfca, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned short')]],
'InDebugPrint' : [ 0xfca, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned short')]],
'HasFiberData' : [ 0xfca, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned short')]],
'SkipThreadAttach' : [ 0xfca, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned short')]],
'WerInShipAssertCode' : [ 0xfca, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned short')]],
'RanProcessInit' : [ 0xfca, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned short')]],
'ClonedThread' : [ 0xfca, ['BitField', dict(start_bit = 6, end_bit = 7, native_type='unsigned short')]],
'SuppressDebugMsg' : [ 0xfca, ['BitField', dict(start_bit = 7, end_bit = 8, native_type='unsigned short')]],
'DisableUserStackWalk' : [ 0xfca, ['BitField', dict(start_bit = 8, end_bit = 9, native_type='unsigned short')]],
'RtlExceptionAttached' : [ 0xfca, ['BitField', dict(start_bit = 9, end_bit = 10, native_type='unsigned short')]],
'InitialThread' : [ 0xfca, ['BitField', dict(start_bit = 10, end_bit = 11, native_type='unsigned short')]],
'SessionAware' : [ 0xfca, ['BitField', dict(start_bit = 11, end_bit = 12, native_type='unsigned short')]],
'LoadOwner' : [ 0xfca, ['BitField', dict(start_bit = 12, end_bit = 13, native_type='unsigned short')]],
'LoaderWorker' : [ 0xfca, ['BitField', dict(start_bit = 13, end_bit = 14, native_type='unsigned short')]],
'SpareSameTebBits' : [ 0xfca, ['BitField', dict(start_bit = 14, end_bit = 16, native_type='unsigned short')]],
'TxnScopeEnterCallback' : [ 0xfcc, ['unsigned long']],
'TxnScopeExitCallback' : [ 0xfd0, ['unsigned long']],
'TxnScopeContext' : [ 0xfd4, ['unsigned long']],
'LockCount' : [ 0xfd8, ['unsigned long']],
'WowTebOffset' : [ 0xfdc, ['long']],
'ResourceRetValue' : [ 0xfe0, ['unsigned long']],
'ReservedForWdf' : [ 0xfe4, ['unsigned long']],
'ReservedForCrt' : [ 0xfe8, ['unsigned long long']],
'EffectiveContainerId' : [ 0xff0, ['_GUID']],
} ],
'_TEB64' : [ 0x1838, {
'NtTib' : [ 0x0, ['_NT_TIB64']],
'EnvironmentPointer' : [ 0x38, ['unsigned long long']],
'ClientId' : [ 0x40, ['_CLIENT_ID64']],
'ActiveRpcHandle' : [ 0x50, ['unsigned long long']],
'ThreadLocalStoragePointer' : [ 0x58, ['unsigned long long']],
'ProcessEnvironmentBlock' : [ 0x60, ['unsigned long long']],
'LastErrorValue' : [ 0x68, ['unsigned long']],
'CountOfOwnedCriticalSections' : [ 0x6c, ['unsigned long']],
'CsrClientThread' : [ 0x70, ['unsigned long long']],
'Win32ThreadInfo' : [ 0x78, ['unsigned long long']],
'User32Reserved' : [ 0x80, ['array', 26, ['unsigned long']]],
'UserReserved' : [ 0xe8, ['array', 5, ['unsigned long']]],
'WOW32Reserved' : [ 0x100, ['unsigned long long']],
'CurrentLocale' : [ 0x108, ['unsigned long']],
'FpSoftwareStatusRegister' : [ 0x10c, ['unsigned long']],
'ReservedForDebuggerInstrumentation' : [ 0x110, ['array', 16, ['unsigned long long']]],
'SystemReserved1' : [ 0x190, ['array', 38, ['unsigned long long']]],
'ExceptionCode' : [ 0x2c0, ['long']],
'Padding0' : [ 0x2c4, ['array', 4, ['unsigned char']]],
'ActivationContextStackPointer' : [ 0x2c8, ['unsigned long long']],
'InstrumentationCallbackSp' : [ 0x2d0, ['unsigned long long']],
'InstrumentationCallbackPreviousPc' : [ 0x2d8, ['unsigned long long']],
'InstrumentationCallbackPreviousSp' : [ 0x2e0, ['unsigned long long']],
'TxFsContext' : [ 0x2e8, ['unsigned long']],
'InstrumentationCallbackDisabled' : [ 0x2ec, ['unsigned char']],
'Padding1' : [ 0x2ed, ['array', 3, ['unsigned char']]],
'GdiTebBatch' : [ 0x2f0, ['_GDI_TEB_BATCH64']],
'RealClientId' : [ 0x7d8, ['_CLIENT_ID64']],
'GdiCachedProcessHandle' : [ 0x7e8, ['unsigned long long']],
'GdiClientPID' : [ 0x7f0, ['unsigned long']],
'GdiClientTID' : [ 0x7f4, ['unsigned long']],
'GdiThreadLocalInfo' : [ 0x7f8, ['unsigned long long']],
'Win32ClientInfo' : [ 0x800, ['array', 62, ['unsigned long long']]],
'glDispatchTable' : [ 0x9f0, ['array', 233, ['unsigned long long']]],
'glReserved1' : [ 0x1138, ['array', 29, ['unsigned long long']]],
'glReserved2' : [ 0x1220, ['unsigned long long']],
'glSectionInfo' : [ 0x1228, ['unsigned long long']],
'glSection' : [ 0x1230, ['unsigned long long']],
'glTable' : [ 0x1238, ['unsigned long long']],
'glCurrentRC' : [ 0x1240, ['unsigned long long']],
'glContext' : [ 0x1248, ['unsigned long long']],
'LastStatusValue' : [ 0x1250, ['unsigned long']],
'Padding2' : [ 0x1254, ['array', 4, ['unsigned char']]],
'StaticUnicodeString' : [ 0x1258, ['_STRING64']],
'StaticUnicodeBuffer' : [ 0x1268, ['array', 261, ['wchar']]],
'Padding3' : [ 0x1472, ['array', 6, ['unsigned char']]],
'DeallocationStack' : [ 0x1478, ['unsigned long long']],
'TlsSlots' : [ 0x1480, ['array', 64, ['unsigned long long']]],
'TlsLinks' : [ 0x1680, ['LIST_ENTRY64']],
'Vdm' : [ 0x1690, ['unsigned long long']],
'ReservedForNtRpc' : [ 0x1698, ['unsigned long long']],
'DbgSsReserved' : [ 0x16a0, ['array', 2, ['unsigned long long']]],
'HardErrorMode' : [ 0x16b0, ['unsigned long']],
'Padding4' : [ 0x16b4, ['array', 4, ['unsigned char']]],
'Instrumentation' : [ 0x16b8, ['array', 11, ['unsigned long long']]],
'ActivityId' : [ 0x1710, ['_GUID']],
'SubProcessTag' : [ 0x1720, ['unsigned long long']],
'PerflibData' : [ 0x1728, ['unsigned long long']],
'EtwTraceData' : [ 0x1730, ['unsigned long long']],
'WinSockData' : [ 0x1738, ['unsigned long long']],
'GdiBatchCount' : [ 0x1740, ['unsigned long']],
'CurrentIdealProcessor' : [ 0x1744, ['_PROCESSOR_NUMBER']],
'IdealProcessorValue' : [ 0x1744, ['unsigned long']],
'ReservedPad0' : [ 0x1744, ['unsigned char']],
'ReservedPad1' : [ 0x1745, ['unsigned char']],
'ReservedPad2' : [ 0x1746, ['unsigned char']],
'IdealProcessor' : [ 0x1747, ['unsigned char']],
'GuaranteedStackBytes' : [ 0x1748, ['unsigned long']],
'Padding5' : [ 0x174c, ['array', 4, ['unsigned char']]],
'ReservedForPerf' : [ 0x1750, ['unsigned long long']],
'ReservedForOle' : [ 0x1758, ['unsigned long long']],
'WaitingOnLoaderLock' : [ 0x1760, ['unsigned long']],
'Padding6' : [ 0x1764, ['array', 4, ['unsigned char']]],
'SavedPriorityState' : [ 0x1768, ['unsigned long long']],
'ReservedForCodeCoverage' : [ 0x1770, ['unsigned long long']],
'ThreadPoolData' : [ 0x1778, ['unsigned long long']],
'TlsExpansionSlots' : [ 0x1780, ['unsigned long long']],
'DeallocationBStore' : [ 0x1788, ['unsigned long long']],
'BStoreLimit' : [ 0x1790, ['unsigned long long']],
'MuiGeneration' : [ 0x1798, ['unsigned long']],
'IsImpersonating' : [ 0x179c, ['unsigned long']],
'NlsCache' : [ 0x17a0, ['unsigned long long']],
'pShimData' : [ 0x17a8, ['unsigned long long']],
'HeapVirtualAffinity' : [ 0x17b0, ['unsigned short']],
'LowFragHeapDataSlot' : [ 0x17b2, ['unsigned short']],
'Padding7' : [ 0x17b4, ['array', 4, ['unsigned char']]],
'CurrentTransactionHandle' : [ 0x17b8, ['unsigned long long']],
'ActiveFrame' : [ 0x17c0, ['unsigned long long']],
'FlsData' : [ 0x17c8, ['unsigned long long']],
'PreferredLanguages' : [ 0x17d0, ['unsigned long long']],
'UserPrefLanguages' : [ 0x17d8, ['unsigned long long']],
'MergedPrefLanguages' : [ 0x17e0, ['unsigned long long']],
'MuiImpersonation' : [ 0x17e8, ['unsigned long']],
'CrossTebFlags' : [ 0x17ec, ['unsigned short']],
'SpareCrossTebBits' : [ 0x17ec, ['BitField', dict(start_bit = 0, end_bit = 16, native_type='unsigned short')]],
'SameTebFlags' : [ 0x17ee, ['unsigned short']],
'SafeThunkCall' : [ 0x17ee, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned short')]],
'InDebugPrint' : [ 0x17ee, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned short')]],
'HasFiberData' : [ 0x17ee, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned short')]],
'SkipThreadAttach' : [ 0x17ee, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned short')]],
'WerInShipAssertCode' : [ 0x17ee, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned short')]],
'RanProcessInit' : [ 0x17ee, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned short')]],
'ClonedThread' : [ 0x17ee, ['BitField', dict(start_bit = 6, end_bit = 7, native_type='unsigned short')]],
'SuppressDebugMsg' : [ 0x17ee, ['BitField', dict(start_bit = 7, end_bit = 8, native_type='unsigned short')]],
'DisableUserStackWalk' : [ 0x17ee, ['BitField', dict(start_bit = 8, end_bit = 9, native_type='unsigned short')]],
'RtlExceptionAttached' : [ 0x17ee, ['BitField', dict(start_bit = 9, end_bit = 10, native_type='unsigned short')]],
'InitialThread' : [ 0x17ee, ['BitField', dict(start_bit = 10, end_bit = 11, native_type='unsigned short')]],
'SessionAware' : [ 0x17ee, ['BitField', dict(start_bit = 11, end_bit = 12, native_type='unsigned short')]],
'LoadOwner' : [ 0x17ee, ['BitField', dict(start_bit = 12, end_bit = 13, native_type='unsigned short')]],
'LoaderWorker' : [ 0x17ee, ['BitField', dict(start_bit = 13, end_bit = 14, native_type='unsigned short')]],
'SpareSameTebBits' : [ 0x17ee, ['BitField', dict(start_bit = 14, end_bit = 16, native_type='unsigned short')]],
'TxnScopeEnterCallback' : [ 0x17f0, ['unsigned long long']],
'TxnScopeExitCallback' : [ 0x17f8, ['unsigned long long']],
'TxnScopeContext' : [ 0x1800, ['unsigned long long']],
'LockCount' : [ 0x1808, ['unsigned long']],
'WowTebOffset' : [ 0x180c, ['long']],
'ResourceRetValue' : [ 0x1810, ['unsigned long long']],
'ReservedForWdf' : [ 0x1818, ['unsigned long long']],
'ReservedForCrt' : [ 0x1820, ['unsigned long long']],
'EffectiveContainerId' : [ 0x1828, ['_GUID']],
} ],
'_HV_X64_HYPERVISOR_FEATURES' : [ 0x10, {
'PartitionPrivileges' : [ 0x0, ['_HV_PARTITION_PRIVILEGE_MASK']],
'MaxSupportedCState' : [ 0x8, ['BitField', dict(start_bit = 0, end_bit = 4, native_type='unsigned long')]],
'HpetNeededForC3PowerState_Deprecated' : [ 0x8, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned long')]],
'Reserved' : [ 0x8, ['BitField', dict(start_bit = 5, end_bit = 32, native_type='unsigned long')]],
'MwaitAvailable_Deprecated' : [ 0xc, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'GuestDebuggingAvailable' : [ 0xc, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'PerformanceMonitorsAvailable' : [ 0xc, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long')]],
'CpuDynamicPartitioningAvailable' : [ 0xc, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned long')]],
'XmmRegistersForFastHypercallAvailable' : [ 0xc, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned long')]],
'GuestIdleAvailable' : [ 0xc, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned long')]],
'HypervisorSleepStateSupportAvailable' : [ 0xc, ['BitField', dict(start_bit = 6, end_bit = 7, native_type='unsigned long')]],
'NumaDistanceQueryAvailable' : [ 0xc, ['BitField', dict(start_bit = 7, end_bit = 8, native_type='unsigned long')]],
'FrequencyRegsAvailable' : [ 0xc, ['BitField', dict(start_bit = 8, end_bit = 9, native_type='unsigned long')]],
'SyntheticMachineCheckAvailable' : [ 0xc, ['BitField', dict(start_bit = 9, end_bit = 10, native_type='unsigned long')]],
'GuestCrashRegsAvailable' : [ 0xc, ['BitField', dict(start_bit = 10, end_bit = 11, native_type='unsigned long')]],
'DebugRegsAvailable' : [ 0xc, ['BitField', dict(start_bit = 11, end_bit = 12, native_type='unsigned long')]],
'Npiep1Available' : [ 0xc, ['BitField', dict(start_bit = 12, end_bit = 13, native_type='unsigned long')]],
'DisableHypervisorAvailable' : [ 0xc, ['BitField', dict(start_bit = 13, end_bit = 14, native_type='unsigned long')]],
'ExtendedGvaRangesForFlushVirtualAddressListAvailable' : [ 0xc, ['BitField', dict(start_bit = 14, end_bit = 15, native_type='unsigned long')]],
'FastHypercallOutputAvailable' : [ 0xc, ['BitField', dict(start_bit = 15, end_bit = 16, native_type='unsigned long')]],
'SvmFeaturesAvailable' : [ 0xc, ['BitField', dict(start_bit = 16, end_bit = 17, native_type='unsigned long')]],
'SintPollingModeAvailable' : [ 0xc, ['BitField', dict(start_bit = 17, end_bit = 18, native_type='unsigned long')]],
'HypercallMsrLockAvailable' : [ 0xc, ['BitField', dict(start_bit = 18, end_bit = 19, native_type='unsigned long')]],
'Reserved1' : [ 0xc, ['BitField', dict(start_bit = 19, end_bit = 32, native_type='unsigned long')]],
} ],
'_HV_PARTITION_PRIVILEGE_MASK' : [ 0x8, {
'AsUINT64' : [ 0x0, ['unsigned long long']],
'AccessVpRunTimeReg' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long long')]],
'AccessPartitionReferenceCounter' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long long')]],
'AccessSynicRegs' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long long')]],
'AccessSyntheticTimerRegs' : [ 0x0, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned long long')]],
'AccessIntrCtrlRegs' : [ 0x0, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned long long')]],
'AccessHypercallMsrs' : [ 0x0, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned long long')]],
'AccessVpIndex' : [ 0x0, ['BitField', dict(start_bit = 6, end_bit = 7, native_type='unsigned long long')]],
'AccessResetReg' : [ 0x0, ['BitField', dict(start_bit = 7, end_bit = 8, native_type='unsigned long long')]],
'AccessStatsReg' : [ 0x0, ['BitField', dict(start_bit = 8, end_bit = 9, native_type='unsigned long long')]],
'AccessPartitionReferenceTsc' : [ 0x0, ['BitField', dict(start_bit = 9, end_bit = 10, native_type='unsigned long long')]],
'AccessGuestIdleReg' : [ 0x0, ['BitField', dict(start_bit = 10, end_bit = 11, native_type='unsigned long long')]],
'AccessFrequencyRegs' : [ 0x0, ['BitField', dict(start_bit = 11, end_bit = 12, native_type='unsigned long long')]],
'AccessDebugRegs' : [ 0x0, ['BitField', dict(start_bit = 12, end_bit = 13, native_type='unsigned long long')]],
'Reserved1' : [ 0x0, ['BitField', dict(start_bit = 13, end_bit = 32, native_type='unsigned long long')]],
'CreatePartitions' : [ 0x0, ['BitField', dict(start_bit = 32, end_bit = 33, native_type='unsigned long long')]],
'AccessPartitionId' : [ 0x0, ['BitField', dict(start_bit = 33, end_bit = 34, native_type='unsigned long long')]],
'AccessMemoryPool' : [ 0x0, ['BitField', dict(start_bit = 34, end_bit = 35, native_type='unsigned long long')]],
'AdjustMessageBuffers' : [ 0x0, ['BitField', dict(start_bit = 35, end_bit = 36, native_type='unsigned long long')]],
'PostMessages' : [ 0x0, ['BitField', dict(start_bit = 36, end_bit = 37, native_type='unsigned long long')]],
'SignalEvents' : [ 0x0, ['BitField', dict(start_bit = 37, end_bit = 38, native_type='unsigned long long')]],
'CreatePort' : [ 0x0, ['BitField', dict(start_bit = 38, end_bit = 39, native_type='unsigned long long')]],
'ConnectPort' : [ 0x0, ['BitField', dict(start_bit = 39, end_bit = 40, native_type='unsigned long long')]],
'AccessStats' : [ 0x0, ['BitField', dict(start_bit = 40, end_bit = 41, native_type='unsigned long long')]],
'Reserved2' : [ 0x0, ['BitField', dict(start_bit = 41, end_bit = 43, native_type='unsigned long long')]],
'Debugging' : [ 0x0, ['BitField', dict(start_bit = 43, end_bit = 44, native_type='unsigned long long')]],
'CpuManagement' : [ 0x0, ['BitField', dict(start_bit = 44, end_bit = 45, native_type='unsigned long long')]],
'ConfigureProfiler' : [ 0x0, ['BitField', dict(start_bit = 45, end_bit = 46, native_type='unsigned long long')]],
'AccessVpExitTracing' : [ 0x0, ['BitField', dict(start_bit = 46, end_bit = 47, native_type='unsigned long long')]],
'EnableExtendedGvaRangesForFlushVirtualAddressList' : [ 0x0, ['BitField', dict(start_bit = 47, end_bit = 48, native_type='unsigned long long')]],
'AccessVsm' : [ 0x0, ['BitField', dict(start_bit = 48, end_bit = 49, native_type='unsigned long long')]],
'AccessVpRegisters' : [ 0x0, ['BitField', dict(start_bit = 49, end_bit = 50, native_type='unsigned long long')]],
'UnusedBit' : [ 0x0, ['BitField', dict(start_bit = 50, end_bit = 51, native_type='unsigned long long')]],
'FastHypercallOutput' : [ 0x0, ['BitField', dict(start_bit = 51, end_bit = 52, native_type='unsigned long long')]],
'EnableExtendedHypercalls' : [ 0x0, ['BitField', dict(start_bit = 52, end_bit = 53, native_type='unsigned long long')]],
'StartVirtualProcessor' : [ 0x0, ['BitField', dict(start_bit = 53, end_bit = 54, native_type='unsigned long long')]],
'Reserved3' : [ 0x0, ['BitField', dict(start_bit = 54, end_bit = 64, native_type='unsigned long long')]],
} ],
'_KTIMER_TABLE' : [ 0x2200, {
'TimerExpiry' : [ 0x0, ['array', 64, ['pointer64', ['_KTIMER']]]],
'TimerEntries' : [ 0x200, ['array', 256, ['_KTIMER_TABLE_ENTRY']]],
} ],
'_KTIMER_TABLE_ENTRY' : [ 0x20, {
'Lock' : [ 0x0, ['unsigned long long']],
'Entry' : [ 0x8, ['_LIST_ENTRY']],
'Time' : [ 0x18, ['_ULARGE_INTEGER']],
} ],
'_XSTATE_SAVE' : [ 0x38, {
'Prev' : [ 0x0, ['pointer64', ['_XSTATE_SAVE']]],
'Thread' : [ 0x8, ['pointer64', ['_KTHREAD']]],
'Level' : [ 0x10, ['unsigned char']],
'XStateContext' : [ 0x18, ['_XSTATE_CONTEXT']],
} ],
'_XSAVE_AREA' : [ 0x240, {
'LegacyState' : [ 0x0, ['_XSAVE_FORMAT']],
'Header' : [ 0x200, ['_XSAVE_AREA_HEADER']],
} ],
'_KSHARED_READY_QUEUE' : [ 0x260, {
'Lock' : [ 0x0, ['unsigned long long']],
'ReadySummary' : [ 0x8, ['unsigned long']],
'ReadyListHead' : [ 0x10, ['array', 32, ['_LIST_ENTRY']]],
'RunningSummary' : [ 0x210, ['array', 64, ['unsigned char']]],
'Span' : [ 0x250, ['unsigned char']],
'LowProcIndex' : [ 0x251, ['unsigned char']],
'QueueIndex' : [ 0x252, ['unsigned char']],
'ProcCount' : [ 0x253, ['unsigned char']],
'ScanOwner' : [ 0x254, ['unsigned char']],
'Spare' : [ 0x255, ['array', 3, ['unsigned char']]],
'Affinity' : [ 0x258, ['unsigned long long']],
} ],
'_KEXCEPTION_FRAME' : [ 0x140, {
'P1Home' : [ 0x0, ['unsigned long long']],
'P2Home' : [ 0x8, ['unsigned long long']],
'P3Home' : [ 0x10, ['unsigned long long']],
'P4Home' : [ 0x18, ['unsigned long long']],
'P5' : [ 0x20, ['unsigned long long']],
'Spare1' : [ 0x28, ['unsigned long long']],
'Xmm6' : [ 0x30, ['_M128A']],
'Xmm7' : [ 0x40, ['_M128A']],
'Xmm8' : [ 0x50, ['_M128A']],
'Xmm9' : [ 0x60, ['_M128A']],
'Xmm10' : [ 0x70, ['_M128A']],
'Xmm11' : [ 0x80, ['_M128A']],
'Xmm12' : [ 0x90, ['_M128A']],
'Xmm13' : [ 0xa0, ['_M128A']],
'Xmm14' : [ 0xb0, ['_M128A']],
'Xmm15' : [ 0xc0, ['_M128A']],
'TrapFrame' : [ 0xd0, ['unsigned long long']],
'OutputBuffer' : [ 0xd8, ['unsigned long long']],
'OutputLength' : [ 0xe0, ['unsigned long long']],
'Spare2' : [ 0xe8, ['unsigned long long']],
'MxCsr' : [ 0xf0, ['unsigned long long']],
'Rbp' : [ 0xf8, ['unsigned long long']],
'Rbx' : [ 0x100, ['unsigned long long']],
'Rdi' : [ 0x108, ['unsigned long long']],
'Rsi' : [ 0x110, ['unsigned long long']],
'R12' : [ 0x118, ['unsigned long long']],
'R13' : [ 0x120, ['unsigned long long']],
'R14' : [ 0x128, ['unsigned long long']],
'R15' : [ 0x130, ['unsigned long long']],
'Return' : [ 0x138, ['unsigned long long']],
} ],
'_KTRAP_FRAME' : [ 0x190, {
'P1Home' : [ 0x0, ['unsigned long long']],
'P2Home' : [ 0x8, ['unsigned long long']],
'P3Home' : [ 0x10, ['unsigned long long']],
'P4Home' : [ 0x18, ['unsigned long long']],
'P5' : [ 0x20, ['unsigned long long']],
'PreviousMode' : [ 0x28, ['unsigned char']],
'PreviousIrql' : [ 0x29, ['unsigned char']],
'FaultIndicator' : [ 0x2a, ['unsigned char']],
'ExceptionActive' : [ 0x2b, ['unsigned char']],
'MxCsr' : [ 0x2c, ['unsigned long']],
'Rax' : [ 0x30, ['unsigned long long']],
'Rcx' : [ 0x38, ['unsigned long long']],
'Rdx' : [ 0x40, ['unsigned long long']],
'R8' : [ 0x48, ['unsigned long long']],
'R9' : [ 0x50, ['unsigned long long']],
'R10' : [ 0x58, ['unsigned long long']],
'R11' : [ 0x60, ['unsigned long long']],
'GsBase' : [ 0x68, ['unsigned long long']],
'GsSwap' : [ 0x68, ['unsigned long long']],
'Xmm0' : [ 0x70, ['_M128A']],
'Xmm1' : [ 0x80, ['_M128A']],
'Xmm2' : [ 0x90, ['_M128A']],
'Xmm3' : [ 0xa0, ['_M128A']],
'Xmm4' : [ 0xb0, ['_M128A']],
'Xmm5' : [ 0xc0, ['_M128A']],
'FaultAddress' : [ 0xd0, ['unsigned long long']],
'ContextRecord' : [ 0xd0, ['unsigned long long']],
'TimeStampCKCL' : [ 0xd0, ['unsigned long long']],
'Dr0' : [ 0xd8, ['unsigned long long']],
'Dr1' : [ 0xe0, ['unsigned long long']],
'Dr2' : [ 0xe8, ['unsigned long long']],
'Dr3' : [ 0xf0, ['unsigned long long']],
'Dr6' : [ 0xf8, ['unsigned long long']],
'Dr7' : [ 0x100, ['unsigned long long']],
'DebugControl' : [ 0x108, ['unsigned long long']],
'LastBranchToRip' : [ 0x110, ['unsigned long long']],
'LastBranchFromRip' : [ 0x118, ['unsigned long long']],
'LastExceptionToRip' : [ 0x120, ['unsigned long long']],
'LastExceptionFromRip' : [ 0x128, ['unsigned long long']],
'SegDs' : [ 0x130, ['unsigned short']],
'SegEs' : [ 0x132, ['unsigned short']],
'SegFs' : [ 0x134, ['unsigned short']],
'SegGs' : [ 0x136, ['unsigned short']],
'TrapFrame' : [ 0x138, ['unsigned long long']],
'Rbx' : [ 0x140, ['unsigned long long']],
'Rdi' : [ 0x148, ['unsigned long long']],
'Rsi' : [ 0x150, ['unsigned long long']],
'Rbp' : [ 0x158, ['unsigned long long']],
'ErrorCode' : [ 0x160, ['unsigned long long']],
'ExceptionFrame' : [ 0x160, ['unsigned long long']],
'TimeStampKlog' : [ 0x160, ['unsigned long long']],
'Rip' : [ 0x168, ['unsigned long long']],
'SegCs' : [ 0x170, ['unsigned short']],
'Fill0' : [ 0x172, ['unsigned char']],
'Logging' : [ 0x173, ['unsigned char']],
'Fill1' : [ 0x174, ['array', 2, ['unsigned short']]],
'EFlags' : [ 0x178, ['unsigned long']],
'Fill2' : [ 0x17c, ['unsigned long']],
'Rsp' : [ 0x180, ['unsigned long long']],
'SegSs' : [ 0x188, ['unsigned short']],
'Fill3' : [ 0x18a, ['unsigned short']],
'Fill4' : [ 0x18c, ['unsigned long']],
} ],
'__unnamed_1866' : [ 0x8, {
'LegacyDeviceNode' : [ 0x0, ['pointer64', ['_DEVICE_NODE']]],
'PendingDeviceRelations' : [ 0x0, ['pointer64', ['_DEVICE_RELATIONS']]],
'Information' : [ 0x0, ['pointer64', ['void']]],
} ],
'__unnamed_1868' : [ 0x8, {
'NextResourceDeviceNode' : [ 0x0, ['pointer64', ['_DEVICE_NODE']]],
} ],
'__unnamed_186c' : [ 0x20, {
'DockStatus' : [ 0x0, ['Enumeration', dict(target = 'long', choices = {0: 'DOCK_NOTDOCKDEVICE', 1: 'DOCK_QUIESCENT', 2: 'DOCK_ARRIVING', 3: 'DOCK_DEPARTING', 4: 'DOCK_EJECTIRP_COMPLETED'})]],
'ListEntry' : [ 0x8, ['_LIST_ENTRY']],
'SerialNumber' : [ 0x18, ['pointer64', ['unsigned short']]],
} ],
'_DEVICE_NODE' : [ 0x2c8, {
'Sibling' : [ 0x0, ['pointer64', ['_DEVICE_NODE']]],
'Child' : [ 0x8, ['pointer64', ['_DEVICE_NODE']]],
'Parent' : [ 0x10, ['pointer64', ['_DEVICE_NODE']]],
'LastChild' : [ 0x18, ['pointer64', ['_DEVICE_NODE']]],
'PhysicalDeviceObject' : [ 0x20, ['pointer64', ['_DEVICE_OBJECT']]],
'InstancePath' : [ 0x28, ['_UNICODE_STRING']],
'ServiceName' : [ 0x38, ['_UNICODE_STRING']],
'PendingIrp' : [ 0x48, ['pointer64', ['_IRP']]],
'FxDevice' : [ 0x50, ['pointer64', ['_POP_FX_DEVICE']]],
'FxDeviceLock' : [ 0x58, ['long']],
'FxRemoveEvent' : [ 0x60, ['_KEVENT']],
'FxActivationCount' : [ 0x78, ['long']],
'FxSleepCount' : [ 0x7c, ['long']],
'Plugin' : [ 0x80, ['pointer64', ['_POP_FX_PLUGIN']]],
'Level' : [ 0x88, ['unsigned long']],
'CurrentPowerState' : [ 0x8c, ['_POWER_STATE']],
'Notify' : [ 0x90, ['_PO_DEVICE_NOTIFY']],
'PoIrpManager' : [ 0xf8, ['_PO_IRP_MANAGER']],
'UniqueId' : [ 0x118, ['_UNICODE_STRING']],
'PowerFlags' : [ 0x128, ['unsigned long']],
'State' : [ 0x12c, ['Enumeration', dict(target = 'long', choices = {768: 'DeviceNodeUnspecified', 769: 'DeviceNodeUninitialized', 770: 'DeviceNodeInitialized', 771: 'DeviceNodeDriversAdded', 772: 'DeviceNodeResourcesAssigned', 773: 'DeviceNodeStartPending', 774: 'DeviceNodeStartCompletion', 775: 'DeviceNodeStartPostWork', 776: 'DeviceNodeStarted', 777: 'DeviceNodeQueryStopped', 778: 'DeviceNodeStopped', 779: 'DeviceNodeRestartCompletion', 780: 'DeviceNodeEnumeratePending', 781: 'DeviceNodeEnumerateCompletion', 782: 'DeviceNodeAwaitingQueuedDeletion', 783: 'DeviceNodeAwaitingQueuedRemoval', 784: 'DeviceNodeQueryRemoved', 785: 'DeviceNodeRemovePendingCloses', 786: 'DeviceNodeRemoved', 787: 'DeviceNodeDeletePendingCloses', 788: 'DeviceNodeDeleted', 789: 'MaxDeviceNodeState'})]],
'PreviousState' : [ 0x130, ['Enumeration', dict(target = 'long', choices = {768: 'DeviceNodeUnspecified', 769: 'DeviceNodeUninitialized', 770: 'DeviceNodeInitialized', 771: 'DeviceNodeDriversAdded', 772: 'DeviceNodeResourcesAssigned', 773: 'DeviceNodeStartPending', 774: 'DeviceNodeStartCompletion', 775: 'DeviceNodeStartPostWork', 776: 'DeviceNodeStarted', 777: 'DeviceNodeQueryStopped', 778: 'DeviceNodeStopped', 779: 'DeviceNodeRestartCompletion', 780: 'DeviceNodeEnumeratePending', 781: 'DeviceNodeEnumerateCompletion', 782: 'DeviceNodeAwaitingQueuedDeletion', 783: 'DeviceNodeAwaitingQueuedRemoval', 784: 'DeviceNodeQueryRemoved', 785: 'DeviceNodeRemovePendingCloses', 786: 'DeviceNodeRemoved', 787: 'DeviceNodeDeletePendingCloses', 788: 'DeviceNodeDeleted', 789: 'MaxDeviceNodeState'})]],
'StateHistory' : [ 0x134, ['array', -80, ['Enumeration', dict(target = 'long', choices = {768: 'DeviceNodeUnspecified', 769: 'DeviceNodeUninitialized', 770: 'DeviceNodeInitialized', 771: 'DeviceNodeDriversAdded', 772: 'DeviceNodeResourcesAssigned', 773: 'DeviceNodeStartPending', 774: 'DeviceNodeStartCompletion', 775: 'DeviceNodeStartPostWork', 776: 'DeviceNodeStarted', 777: 'DeviceNodeQueryStopped', 778: 'DeviceNodeStopped', 779: 'DeviceNodeRestartCompletion', 780: 'DeviceNodeEnumeratePending', 781: 'DeviceNodeEnumerateCompletion', 782: 'DeviceNodeAwaitingQueuedDeletion', 783: 'DeviceNodeAwaitingQueuedRemoval', 784: 'DeviceNodeQueryRemoved', 785: 'DeviceNodeRemovePendingCloses', 786: 'DeviceNodeRemoved', 787: 'DeviceNodeDeletePendingCloses', 788: 'DeviceNodeDeleted', 789: 'MaxDeviceNodeState'})]]],
'StateHistoryEntry' : [ 0x184, ['unsigned long']],
'CompletionStatus' : [ 0x188, ['long']],
'Flags' : [ 0x18c, ['unsigned long']],
'UserFlags' : [ 0x190, ['unsigned long']],
'Problem' : [ 0x194, ['unsigned long']],
'ProblemStatus' : [ 0x198, ['long']],
'ResourceList' : [ 0x1a0, ['pointer64', ['_CM_RESOURCE_LIST']]],
'ResourceListTranslated' : [ 0x1a8, ['pointer64', ['_CM_RESOURCE_LIST']]],
'DuplicatePDO' : [ 0x1b0, ['pointer64', ['_DEVICE_OBJECT']]],
'ResourceRequirements' : [ 0x1b8, ['pointer64', ['_IO_RESOURCE_REQUIREMENTS_LIST']]],
'InterfaceType' : [ 0x1c0, ['Enumeration', dict(target = 'long', choices = {0: 'Internal', 1: 'Isa', 2: 'Eisa', 3: 'MicroChannel', 4: 'TurboChannel', 5: 'PCIBus', 6: 'VMEBus', 7: 'NuBus', 8: 'PCMCIABus', 9: 'CBus', 10: 'MPIBus', 11: 'MPSABus', 12: 'ProcessorInternal', 13: 'InternalPowerBus', 14: 'PNPISABus', 15: 'PNPBus', 16: 'Vmcs', 17: 'ACPIBus', 18: 'MaximumInterfaceType', -1: 'InterfaceTypeUndefined'})]],
'BusNumber' : [ 0x1c4, ['unsigned long']],
'ChildInterfaceType' : [ 0x1c8, ['Enumeration', dict(target = 'long', choices = {0: 'Internal', 1: 'Isa', 2: 'Eisa', 3: 'MicroChannel', 4: 'TurboChannel', 5: 'PCIBus', 6: 'VMEBus', 7: 'NuBus', 8: 'PCMCIABus', 9: 'CBus', 10: 'MPIBus', 11: 'MPSABus', 12: 'ProcessorInternal', 13: 'InternalPowerBus', 14: 'PNPISABus', 15: 'PNPBus', 16: 'Vmcs', 17: 'ACPIBus', 18: 'MaximumInterfaceType', -1: 'InterfaceTypeUndefined'})]],
'ChildBusNumber' : [ 0x1cc, ['unsigned long']],
'ChildBusTypeIndex' : [ 0x1d0, ['unsigned short']],
'RemovalPolicy' : [ 0x1d2, ['unsigned char']],
'HardwareRemovalPolicy' : [ 0x1d3, ['unsigned char']],
'TargetDeviceNotify' : [ 0x1d8, ['_LIST_ENTRY']],
'DeviceArbiterList' : [ 0x1e8, ['_LIST_ENTRY']],
'DeviceTranslatorList' : [ 0x1f8, ['_LIST_ENTRY']],
'NoTranslatorMask' : [ 0x208, ['unsigned short']],
'QueryTranslatorMask' : [ 0x20a, ['unsigned short']],
'NoArbiterMask' : [ 0x20c, ['unsigned short']],
'QueryArbiterMask' : [ 0x20e, ['unsigned short']],
'OverUsed1' : [ 0x210, ['__unnamed_1866']],
'OverUsed2' : [ 0x218, ['__unnamed_1868']],
'BootResources' : [ 0x220, ['pointer64', ['_CM_RESOURCE_LIST']]],
'BootResourcesTranslated' : [ 0x228, ['pointer64', ['_CM_RESOURCE_LIST']]],
'CapabilityFlags' : [ 0x230, ['unsigned long']],
'DockInfo' : [ 0x238, ['__unnamed_186c']],
'DisableableDepends' : [ 0x258, ['unsigned long']],
'PendedSetInterfaceState' : [ 0x260, ['_LIST_ENTRY']],
'LegacyBusListEntry' : [ 0x270, ['_LIST_ENTRY']],
'DriverUnloadRetryCount' : [ 0x280, ['unsigned long']],
'PreviousParent' : [ 0x288, ['pointer64', ['_DEVICE_NODE']]],
'DeletedChildren' : [ 0x290, ['long']],
'NumaNodeIndex' : [ 0x294, ['unsigned long']],
'ContainerID' : [ 0x298, ['_GUID']],
'OverrideFlags' : [ 0x2a8, ['unsigned char']],
'DeviceIdsHash' : [ 0x2ac, ['unsigned long']],
'RequiresUnloadedDriver' : [ 0x2b0, ['unsigned char']],
'PendingEjectRelations' : [ 0x2b8, ['pointer64', ['_PENDING_RELATIONS_LIST_ENTRY']]],
'StateFlags' : [ 0x2c0, ['unsigned long']],
} ],
'_MCGEN_TRACE_CONTEXT' : [ 0x48, {
'RegistrationHandle' : [ 0x0, ['unsigned long long']],
'Logger' : [ 0x8, ['unsigned long long']],
'MatchAnyKeyword' : [ 0x10, ['unsigned long long']],
'MatchAllKeyword' : [ 0x18, ['unsigned long long']],
'Flags' : [ 0x20, ['unsigned long']],
'IsEnabled' : [ 0x24, ['unsigned long']],
'Level' : [ 0x28, ['unsigned char']],
'Reserve' : [ 0x29, ['unsigned char']],
'EnableBitsCount' : [ 0x2a, ['unsigned short']],
'EnableBitMask' : [ 0x30, ['pointer64', ['unsigned long']]],
'EnableKeyWords' : [ 0x38, ['pointer64', ['unsigned long long']]],
'EnableLevel' : [ 0x40, ['pointer64', ['unsigned char']]],
} ],
'_PNP_DEVICE_COMPLETION_QUEUE' : [ 0x50, {
'DispatchedList' : [ 0x0, ['_LIST_ENTRY']],
'DispatchedCount' : [ 0x10, ['unsigned long']],
'CompletedList' : [ 0x18, ['_LIST_ENTRY']],
'CompletedSemaphore' : [ 0x28, ['_KSEMAPHORE']],
'SpinLock' : [ 0x48, ['unsigned long long']],
} ],
'_KSEMAPHORE' : [ 0x20, {
'Header' : [ 0x0, ['_DISPATCHER_HEADER']],
'Limit' : [ 0x18, ['long']],
} ],
'_DEVOBJ_EXTENSION' : [ 0x68, {
'Type' : [ 0x0, ['short']],
'Size' : [ 0x2, ['unsigned short']],
'DeviceObject' : [ 0x8, ['pointer64', ['_DEVICE_OBJECT']]],
'PowerFlags' : [ 0x10, ['unsigned long']],
'Dope' : [ 0x18, ['pointer64', ['_DEVICE_OBJECT_POWER_EXTENSION']]],
'ExtensionFlags' : [ 0x20, ['unsigned long']],
'DeviceNode' : [ 0x28, ['pointer64', ['void']]],
'AttachedTo' : [ 0x30, ['pointer64', ['_DEVICE_OBJECT']]],
'StartIoCount' : [ 0x38, ['long']],
'StartIoKey' : [ 0x3c, ['long']],
'StartIoFlags' : [ 0x40, ['unsigned long']],
'Vpb' : [ 0x48, ['pointer64', ['_VPB']]],
'DependencyNode' : [ 0x50, ['pointer64', ['void']]],
'InterruptContext' : [ 0x58, ['pointer64', ['void']]],
'VerifierContext' : [ 0x60, ['pointer64', ['void']]],
} ],
'_GROUP_AFFINITY' : [ 0x10, {
'Mask' : [ 0x0, ['unsigned long long']],
'Group' : [ 0x8, ['unsigned short']],
'Reserved' : [ 0xa, ['array', 3, ['unsigned short']]],
} ],
'_KAFFINITY_EX' : [ 0xa8, {
'Count' : [ 0x0, ['unsigned short']],
'Size' : [ 0x2, ['unsigned short']],
'Reserved' : [ 0x4, ['unsigned long']],
'Bitmap' : [ 0x8, ['array', 20, ['unsigned long long']]],
} ],
'_PNP_ASSIGN_RESOURCES_CONTEXT' : [ 0x10, {
'IncludeFailedDevices' : [ 0x0, ['unsigned long']],
'DeviceCount' : [ 0x4, ['unsigned long']],
'DeviceList' : [ 0x8, ['array', 1, ['pointer64', ['_DEVICE_OBJECT']]]],
} ],
'_PNP_RESOURCE_REQUEST' : [ 0x40, {
'PhysicalDevice' : [ 0x0, ['pointer64', ['_DEVICE_OBJECT']]],
'Flags' : [ 0x8, ['unsigned long']],
'AllocationType' : [ 0xc, ['Enumeration', dict(target = 'long', choices = {0: 'ArbiterRequestLegacyReported', 1: 'ArbiterRequestHalReported', 2: 'ArbiterRequestLegacyAssigned', 3: 'ArbiterRequestPnpDetected', 4: 'ArbiterRequestPnpEnumerated', -1: 'ArbiterRequestUndefined'})]],
'Priority' : [ 0x10, ['unsigned long']],
'Position' : [ 0x14, ['unsigned long']],
'ResourceRequirements' : [ 0x18, ['pointer64', ['_IO_RESOURCE_REQUIREMENTS_LIST']]],
'ReqList' : [ 0x20, ['pointer64', ['void']]],
'ResourceAssignment' : [ 0x28, ['pointer64', ['_CM_RESOURCE_LIST']]],
'TranslatedResourceAssignment' : [ 0x30, ['pointer64', ['_CM_RESOURCE_LIST']]],
'Status' : [ 0x38, ['long']],
} ],
'_IO_RESOURCE_REQUIREMENTS_LIST' : [ 0x48, {
'ListSize' : [ 0x0, ['unsigned long']],
'InterfaceType' : [ 0x4, ['Enumeration', dict(target = 'long', choices = {0: 'Internal', 1: 'Isa', 2: 'Eisa', 3: 'MicroChannel', 4: 'TurboChannel', 5: 'PCIBus', 6: 'VMEBus', 7: 'NuBus', 8: 'PCMCIABus', 9: 'CBus', 10: 'MPIBus', 11: 'MPSABus', 12: 'ProcessorInternal', 13: 'InternalPowerBus', 14: 'PNPISABus', 15: 'PNPBus', 16: 'Vmcs', 17: 'ACPIBus', 18: 'MaximumInterfaceType', -1: 'InterfaceTypeUndefined'})]],
'BusNumber' : [ 0x8, ['unsigned long']],
'SlotNumber' : [ 0xc, ['unsigned long']],
'Reserved' : [ 0x10, ['array', 3, ['unsigned long']]],
'AlternativeLists' : [ 0x1c, ['unsigned long']],
'List' : [ 0x20, ['array', 1, ['_IO_RESOURCE_LIST']]],
} ],
'_EXCEPTION_RECORD64' : [ 0x98, {
'ExceptionCode' : [ 0x0, ['long']],
'ExceptionFlags' : [ 0x4, ['unsigned long']],
'ExceptionRecord' : [ 0x8, ['unsigned long long']],
'ExceptionAddress' : [ 0x10, ['unsigned long long']],
'NumberParameters' : [ 0x18, ['unsigned long']],
'__unusedAlignment' : [ 0x1c, ['unsigned long']],
'ExceptionInformation' : [ 0x20, ['array', 15, ['unsigned long long']]],
} ],
'_EXCEPTION_RECORD32' : [ 0x50, {
'ExceptionCode' : [ 0x0, ['long']],
'ExceptionFlags' : [ 0x4, ['unsigned long']],
'ExceptionRecord' : [ 0x8, ['unsigned long']],
'ExceptionAddress' : [ 0xc, ['unsigned long']],
'NumberParameters' : [ 0x10, ['unsigned long']],
'ExceptionInformation' : [ 0x14, ['array', 15, ['unsigned long']]],
} ],
'_DBGKM_EXCEPTION64' : [ 0xa0, {
'ExceptionRecord' : [ 0x0, ['_EXCEPTION_RECORD64']],
'FirstChance' : [ 0x98, ['unsigned long']],
} ],
'_DBGKM_EXCEPTION32' : [ 0x54, {
'ExceptionRecord' : [ 0x0, ['_EXCEPTION_RECORD32']],
'FirstChance' : [ 0x50, ['unsigned long']],
} ],
'_X86_KTRAP_FRAME' : [ 0x8c, {
'DbgEbp' : [ 0x0, ['unsigned long']],
'DbgEip' : [ 0x4, ['unsigned long']],
'DbgArgMark' : [ 0x8, ['unsigned long']],
'DbgArgPointer' : [ 0xc, ['unsigned long']],
'TempSegCs' : [ 0x10, ['unsigned long']],
'TempEsp' : [ 0x14, ['unsigned long']],
'Dr0' : [ 0x18, ['unsigned long']],
'Dr1' : [ 0x1c, ['unsigned long']],
'Dr2' : [ 0x20, ['unsigned long']],
'Dr3' : [ 0x24, ['unsigned long']],
'Dr6' : [ 0x28, ['unsigned long']],
'Dr7' : [ 0x2c, ['unsigned long']],
'SegGs' : [ 0x30, ['unsigned long']],
'SegEs' : [ 0x34, ['unsigned long']],
'SegDs' : [ 0x38, ['unsigned long']],
'Edx' : [ 0x3c, ['unsigned long']],
'Ecx' : [ 0x40, ['unsigned long']],
'Eax' : [ 0x44, ['unsigned long']],
'PreviousPreviousMode' : [ 0x48, ['unsigned char']],
'EntropyQueueDpc' : [ 0x49, ['unsigned char']],
'Reserved' : [ 0x4a, ['array', 2, ['unsigned char']]],
'ExceptionList' : [ 0x4c, ['unsigned long']],
'SegFs' : [ 0x50, ['unsigned long']],
'Edi' : [ 0x54, ['unsigned long']],
'Esi' : [ 0x58, ['unsigned long']],
'Ebx' : [ 0x5c, ['unsigned long']],
'Ebp' : [ 0x60, ['unsigned long']],
'ErrCode' : [ 0x64, ['unsigned long']],
'Eip' : [ 0x68, ['unsigned long']],
'SegCs' : [ 0x6c, ['unsigned long']],
'EFlags' : [ 0x70, ['unsigned long']],
'HardwareEsp' : [ 0x74, ['unsigned long']],
'HardwareSegSs' : [ 0x78, ['unsigned long']],
'V86Es' : [ 0x7c, ['unsigned long']],
'V86Ds' : [ 0x80, ['unsigned long']],
'V86Fs' : [ 0x84, ['unsigned long']],
'V86Gs' : [ 0x88, ['unsigned long']],
} ],
'_X86_KTRAP_FRAME_BLUE' : [ 0x8c, {
'DbgEbp' : [ 0x0, ['unsigned long']],
'DbgEip' : [ 0x4, ['unsigned long']],
'DbgArgMark' : [ 0x8, ['unsigned long']],
'TempSegCs' : [ 0xc, ['unsigned short']],
'Logging' : [ 0xe, ['unsigned char']],
'FrameType' : [ 0xf, ['unsigned char']],
'TempEsp' : [ 0x10, ['unsigned long']],
'Dr0' : [ 0x14, ['unsigned long']],
'Dr1' : [ 0x18, ['unsigned long']],
'Dr2' : [ 0x1c, ['unsigned long']],
'Dr3' : [ 0x20, ['unsigned long']],
'Dr6' : [ 0x24, ['unsigned long']],
'Dr7' : [ 0x28, ['unsigned long']],
'SegGs' : [ 0x2c, ['unsigned long']],
'SegEs' : [ 0x30, ['unsigned long']],
'SegDs' : [ 0x34, ['unsigned long']],
'Edx' : [ 0x38, ['unsigned long']],
'Ecx' : [ 0x3c, ['unsigned long']],
'Eax' : [ 0x40, ['unsigned long']],
'PreviousPreviousMode' : [ 0x44, ['unsigned char']],
'EntropyQueueDpc' : [ 0x45, ['unsigned char']],
'Reserved' : [ 0x46, ['array', 2, ['unsigned char']]],
'MxCsr' : [ 0x48, ['unsigned long']],
'ExceptionList' : [ 0x4c, ['unsigned long']],
'SegFs' : [ 0x50, ['unsigned long']],
'Edi' : [ 0x54, ['unsigned long']],
'Esi' : [ 0x58, ['unsigned long']],
'Ebx' : [ 0x5c, ['unsigned long']],
'Ebp' : [ 0x60, ['unsigned long']],
'ErrCode' : [ 0x64, ['unsigned long']],
'Eip' : [ 0x68, ['unsigned long']],
'SegCs' : [ 0x6c, ['unsigned long']],
'EFlags' : [ 0x70, ['unsigned long']],
'HardwareEsp' : [ 0x74, ['unsigned long']],
'HardwareSegSs' : [ 0x78, ['unsigned long']],
'V86Es' : [ 0x7c, ['unsigned long']],
'V86Ds' : [ 0x80, ['unsigned long']],
'V86Fs' : [ 0x84, ['unsigned long']],
'V86Gs' : [ 0x88, ['unsigned long']],
} ],
'_DBGKD_LOAD_SYMBOLS64' : [ 0x28, {
'PathNameLength' : [ 0x0, ['unsigned long']],
'BaseOfDll' : [ 0x8, ['unsigned long long']],
'ProcessId' : [ 0x10, ['unsigned long long']],
'CheckSum' : [ 0x18, ['unsigned long']],
'SizeOfImage' : [ 0x1c, ['unsigned long']],
'UnloadSymbols' : [ 0x20, ['unsigned char']],
} ],
'_DBGKD_LOAD_SYMBOLS32' : [ 0x18, {
'PathNameLength' : [ 0x0, ['unsigned long']],
'BaseOfDll' : [ 0x4, ['unsigned long']],
'ProcessId' : [ 0x8, ['unsigned long']],
'CheckSum' : [ 0xc, ['unsigned long']],
'SizeOfImage' : [ 0x10, ['unsigned long']],
'UnloadSymbols' : [ 0x14, ['unsigned char']],
} ],
'_DBGKD_READ_MEMORY64' : [ 0x10, {
'TargetBaseAddress' : [ 0x0, ['unsigned long long']],
'TransferCount' : [ 0x8, ['unsigned long']],
'ActualBytesRead' : [ 0xc, ['unsigned long']],
} ],
'_DBGKD_READ_MEMORY32' : [ 0xc, {
'TargetBaseAddress' : [ 0x0, ['unsigned long']],
'TransferCount' : [ 0x4, ['unsigned long']],
'ActualBytesRead' : [ 0x8, ['unsigned long']],
} ],
'_DBGKD_WRITE_MEMORY64' : [ 0x10, {
'TargetBaseAddress' : [ 0x0, ['unsigned long long']],
'TransferCount' : [ 0x8, ['unsigned long']],
'ActualBytesWritten' : [ 0xc, ['unsigned long']],
} ],
'_DBGKD_WRITE_MEMORY32' : [ 0xc, {
'TargetBaseAddress' : [ 0x0, ['unsigned long']],
'TransferCount' : [ 0x4, ['unsigned long']],
'ActualBytesWritten' : [ 0x8, ['unsigned long']],
} ],
'_DBGKD_WRITE_BREAKPOINT64' : [ 0x10, {
'BreakPointAddress' : [ 0x0, ['unsigned long long']],
'BreakPointHandle' : [ 0x8, ['unsigned long']],
} ],
'_DBGKD_WRITE_BREAKPOINT32' : [ 0x8, {
'BreakPointAddress' : [ 0x0, ['unsigned long']],
'BreakPointHandle' : [ 0x4, ['unsigned long']],
} ],
'_DBGKD_READ_WRITE_IO64' : [ 0x10, {
'IoAddress' : [ 0x0, ['unsigned long long']],
'DataSize' : [ 0x8, ['unsigned long']],
'DataValue' : [ 0xc, ['unsigned long']],
} ],
'_DBGKD_READ_WRITE_IO32' : [ 0xc, {
'DataSize' : [ 0x0, ['unsigned long']],
'IoAddress' : [ 0x4, ['unsigned long']],
'DataValue' : [ 0x8, ['unsigned long']],
} ],
'_DBGKD_READ_WRITE_IO_EXTENDED64' : [ 0x20, {
'DataSize' : [ 0x0, ['unsigned long']],
'InterfaceType' : [ 0x4, ['unsigned long']],
'BusNumber' : [ 0x8, ['unsigned long']],
'AddressSpace' : [ 0xc, ['unsigned long']],
'IoAddress' : [ 0x10, ['unsigned long long']],
'DataValue' : [ 0x18, ['unsigned long']],
} ],
'_DBGKD_READ_WRITE_IO_EXTENDED32' : [ 0x18, {
'DataSize' : [ 0x0, ['unsigned long']],
'InterfaceType' : [ 0x4, ['unsigned long']],
'BusNumber' : [ 0x8, ['unsigned long']],
'AddressSpace' : [ 0xc, ['unsigned long']],
'IoAddress' : [ 0x10, ['unsigned long']],
'DataValue' : [ 0x14, ['unsigned long']],
} ],
'_DBGKD_SET_SPECIAL_CALL32' : [ 0x4, {
'SpecialCall' : [ 0x0, ['unsigned long']],
} ],
'_DBGKD_SET_SPECIAL_CALL64' : [ 0x8, {
'SpecialCall' : [ 0x0, ['unsigned long long']],
} ],
'_DBGKD_SET_INTERNAL_BREAKPOINT32' : [ 0x8, {
'BreakpointAddress' : [ 0x0, ['unsigned long']],
'Flags' : [ 0x4, ['unsigned long']],
} ],
'_DBGKD_SET_INTERNAL_BREAKPOINT64' : [ 0x10, {
'BreakpointAddress' : [ 0x0, ['unsigned long long']],
'Flags' : [ 0x8, ['unsigned long']],
} ],
'_DBGKD_GET_INTERNAL_BREAKPOINT64' : [ 0x20, {
'BreakpointAddress' : [ 0x0, ['unsigned long long']],
'Flags' : [ 0x8, ['unsigned long']],
'Calls' : [ 0xc, ['unsigned long']],
'MaxCallsPerPeriod' : [ 0x10, ['unsigned long']],
'MinInstructions' : [ 0x14, ['unsigned long']],
'MaxInstructions' : [ 0x18, ['unsigned long']],
'TotalInstructions' : [ 0x1c, ['unsigned long']],
} ],
'_DBGKD_GET_INTERNAL_BREAKPOINT32' : [ 0x1c, {
'BreakpointAddress' : [ 0x0, ['unsigned long']],
'Flags' : [ 0x4, ['unsigned long']],
'Calls' : [ 0x8, ['unsigned long']],
'MaxCallsPerPeriod' : [ 0xc, ['unsigned long']],
'MinInstructions' : [ 0x10, ['unsigned long']],
'MaxInstructions' : [ 0x14, ['unsigned long']],
'TotalInstructions' : [ 0x18, ['unsigned long']],
} ],
'__unnamed_1969' : [ 0x28, {
'ReadMemory' : [ 0x0, ['_DBGKD_READ_MEMORY64']],
'WriteMemory' : [ 0x0, ['_DBGKD_WRITE_MEMORY64']],
'GetContext' : [ 0x0, ['_DBGKD_GET_CONTEXT']],
'SetContext' : [ 0x0, ['_DBGKD_SET_CONTEXT']],
'WriteBreakPoint' : [ 0x0, ['_DBGKD_WRITE_BREAKPOINT64']],
'RestoreBreakPoint' : [ 0x0, ['_DBGKD_RESTORE_BREAKPOINT']],
'Continue' : [ 0x0, ['_DBGKD_CONTINUE']],
'Continue2' : [ 0x0, ['_DBGKD_CONTINUE2']],
'ReadWriteIo' : [ 0x0, ['_DBGKD_READ_WRITE_IO64']],
'ReadWriteIoExtended' : [ 0x0, ['_DBGKD_READ_WRITE_IO_EXTENDED64']],
'QuerySpecialCalls' : [ 0x0, ['_DBGKD_QUERY_SPECIAL_CALLS']],
'SetSpecialCall' : [ 0x0, ['_DBGKD_SET_SPECIAL_CALL64']],
'SetInternalBreakpoint' : [ 0x0, ['_DBGKD_SET_INTERNAL_BREAKPOINT64']],
'GetInternalBreakpoint' : [ 0x0, ['_DBGKD_GET_INTERNAL_BREAKPOINT64']],
'GetVersion64' : [ 0x0, ['_DBGKD_GET_VERSION64']],
'BreakPointEx' : [ 0x0, ['_DBGKD_BREAKPOINTEX']],
'ReadWriteMsr' : [ 0x0, ['_DBGKD_READ_WRITE_MSR']],
'SearchMemory' : [ 0x0, ['_DBGKD_SEARCH_MEMORY']],
'GetSetBusData' : [ 0x0, ['_DBGKD_GET_SET_BUS_DATA']],
'FillMemory' : [ 0x0, ['_DBGKD_FILL_MEMORY']],
'QueryMemory' : [ 0x0, ['_DBGKD_QUERY_MEMORY']],
'SwitchPartition' : [ 0x0, ['_DBGKD_SWITCH_PARTITION']],
'GetContextEx' : [ 0x0, ['_DBGKD_CONTEXT_EX']],
'SetContextEx' : [ 0x0, ['_DBGKD_CONTEXT_EX']],
'WriteCustomBreakPoint' : [ 0x0, ['_DBGKD_WRITE_CUSTOM_BREAKPOINT']],
} ],
'_DBGKD_MANIPULATE_STATE64' : [ 0x38, {
'ApiNumber' : [ 0x0, ['unsigned long']],
'ProcessorLevel' : [ 0x4, ['unsigned short']],
'Processor' : [ 0x6, ['unsigned short']],
'ReturnStatus' : [ 0x8, ['long']],
'u' : [ 0x10, ['__unnamed_1969']],
} ],
'__unnamed_1970' : [ 0x28, {
'ReadMemory' : [ 0x0, ['_DBGKD_READ_MEMORY32']],
'WriteMemory' : [ 0x0, ['_DBGKD_WRITE_MEMORY32']],
'ReadMemory64' : [ 0x0, ['_DBGKD_READ_MEMORY64']],
'WriteMemory64' : [ 0x0, ['_DBGKD_WRITE_MEMORY64']],
'GetContext' : [ 0x0, ['_DBGKD_GET_CONTEXT']],
'SetContext' : [ 0x0, ['_DBGKD_SET_CONTEXT']],
'WriteBreakPoint' : [ 0x0, ['_DBGKD_WRITE_BREAKPOINT32']],
'RestoreBreakPoint' : [ 0x0, ['_DBGKD_RESTORE_BREAKPOINT']],
'Continue' : [ 0x0, ['_DBGKD_CONTINUE']],
'Continue2' : [ 0x0, ['_DBGKD_CONTINUE2']],
'ReadWriteIo' : [ 0x0, ['_DBGKD_READ_WRITE_IO32']],
'ReadWriteIoExtended' : [ 0x0, ['_DBGKD_READ_WRITE_IO_EXTENDED32']],
'QuerySpecialCalls' : [ 0x0, ['_DBGKD_QUERY_SPECIAL_CALLS']],
'SetSpecialCall' : [ 0x0, ['_DBGKD_SET_SPECIAL_CALL32']],
'SetInternalBreakpoint' : [ 0x0, ['_DBGKD_SET_INTERNAL_BREAKPOINT32']],
'GetInternalBreakpoint' : [ 0x0, ['_DBGKD_GET_INTERNAL_BREAKPOINT32']],
'GetVersion32' : [ 0x0, ['_DBGKD_GET_VERSION32']],
'BreakPointEx' : [ 0x0, ['_DBGKD_BREAKPOINTEX']],
'ReadWriteMsr' : [ 0x0, ['_DBGKD_READ_WRITE_MSR']],
'SearchMemory' : [ 0x0, ['_DBGKD_SEARCH_MEMORY']],
'GetContextEx' : [ 0x0, ['_DBGKD_CONTEXT_EX']],
'SetContextEx' : [ 0x0, ['_DBGKD_CONTEXT_EX']],
} ],
'_DBGKD_MANIPULATE_STATE32' : [ 0x34, {
'ApiNumber' : [ 0x0, ['unsigned long']],
'ProcessorLevel' : [ 0x4, ['unsigned short']],
'Processor' : [ 0x6, ['unsigned short']],
'ReturnStatus' : [ 0x8, ['long']],
'u' : [ 0xc, ['__unnamed_1970']],
} ],
'_DBGKD_READ_WRITE_MSR' : [ 0xc, {
'Msr' : [ 0x0, ['unsigned long']],
'DataValueLow' : [ 0x4, ['unsigned long']],
'DataValueHigh' : [ 0x8, ['unsigned long']],
} ],
'_DBGKD_BREAKPOINTEX' : [ 0x8, {
'BreakPointCount' : [ 0x0, ['unsigned long']],
'ContinueStatus' : [ 0x4, ['long']],
} ],
'_DBGKD_SEARCH_MEMORY' : [ 0x18, {
'SearchAddress' : [ 0x0, ['unsigned long long']],
'FoundAddress' : [ 0x0, ['unsigned long long']],
'SearchLength' : [ 0x8, ['unsigned long long']],
'PatternLength' : [ 0x10, ['unsigned long']],
} ],
'_DBGKD_RESTORE_BREAKPOINT' : [ 0x4, {
'BreakPointHandle' : [ 0x0, ['unsigned long']],
} ],
'_DBGKD_CONTINUE' : [ 0x4, {
'ContinueStatus' : [ 0x0, ['long']],
} ],
'_DBGKD_CONTINUE2' : [ 0x20, {
'ContinueStatus' : [ 0x0, ['long']],
'ControlSet' : [ 0x4, ['_AMD64_DBGKD_CONTROL_SET']],
'AnyControlSet' : [ 0x4, ['_DBGKD_ANY_CONTROL_SET']],
} ],
'_PEP_ACPI_RESOURCE' : [ 0x48, {
'Type' : [ 0x0, ['Enumeration', dict(target = 'long', choices = {0: 'PepAcpiMemory', 1: 'PepAcpiIoPort', 2: 'PepAcpiInterrupt', 3: 'PepAcpiGpioIo', 4: 'PepAcpiGpioInt', 5: 'PepAcpiSpbI2c', 6: 'PepAcpiSpbSpi', 7: 'PepAcpiSpbUart', 8: 'PepAcpiExtendedMemory', 9: 'PepAcpiExtendedIo'})]],
'IoMemory' : [ 0x0, ['_PEP_ACPI_IO_MEMORY_RESOURCE']],
'Interrupt' : [ 0x0, ['_PEP_ACPI_INTERRUPT_RESOURCE']],
'Gpio' : [ 0x0, ['_PEP_ACPI_GPIO_RESOURCE']],
'SpbI2c' : [ 0x0, ['_PEP_ACPI_SPB_I2C_RESOURCE']],
'SpbSpi' : [ 0x0, ['_PEP_ACPI_SPB_SPI_RESOURCE']],
'SpbUart' : [ 0x0, ['_PEP_ACPI_SPB_UART_RESOURCE']],
'ExtendedAddress' : [ 0x0, ['_PEP_ACPI_EXTENDED_ADDRESS']],
} ],
'_PEP_ACPI_IO_MEMORY_RESOURCE' : [ 0x20, {
'Type' : [ 0x0, ['Enumeration', dict(target = 'long', choices = {0: 'PepAcpiMemory', 1: 'PepAcpiIoPort', 2: 'PepAcpiInterrupt', 3: 'PepAcpiGpioIo', 4: 'PepAcpiGpioInt', 5: 'PepAcpiSpbI2c', 6: 'PepAcpiSpbSpi', 7: 'PepAcpiSpbUart', 8: 'PepAcpiExtendedMemory', 9: 'PepAcpiExtendedIo'})]],
'Information' : [ 0x4, ['unsigned char']],
'MinimumAddress' : [ 0x8, ['_LARGE_INTEGER']],
'MaximumAddress' : [ 0x10, ['_LARGE_INTEGER']],
'Alignment' : [ 0x18, ['unsigned long']],
'Length' : [ 0x1c, ['unsigned long']],
} ],
'_PEP_ACPI_INTERRUPT_RESOURCE' : [ 0x20, {
'Type' : [ 0x0, ['Enumeration', dict(target = 'long', choices = {0: 'PepAcpiMemory', 1: 'PepAcpiIoPort', 2: 'PepAcpiInterrupt', 3: 'PepAcpiGpioIo', 4: 'PepAcpiGpioInt', 5: 'PepAcpiSpbI2c', 6: 'PepAcpiSpbSpi', 7: 'PepAcpiSpbUart', 8: 'PepAcpiExtendedMemory', 9: 'PepAcpiExtendedIo'})]],
'InterruptType' : [ 0x4, ['Enumeration', dict(target = 'long', choices = {0: 'LevelSensitive', 1: 'Latched'})]],
'InterruptPolarity' : [ 0x8, ['Enumeration', dict(target = 'long', choices = {0: 'InterruptPolarityUnknown', 1: 'InterruptRisingEdge', 2: 'InterruptFallingEdge', 3: 'InterruptActiveBothTriggerLow', 4: 'InterruptActiveBothTriggerHigh'})]],
'Flags' : [ 0xc, ['_PEP_ACPI_RESOURCE_FLAGS']],
'Count' : [ 0x10, ['unsigned char']],
'Pins' : [ 0x18, ['pointer64', ['unsigned long']]],
} ],
'_PEP_ACPI_GPIO_RESOURCE' : [ 0x48, {
'Type' : [ 0x0, ['Enumeration', dict(target = 'long', choices = {0: 'PepAcpiMemory', 1: 'PepAcpiIoPort', 2: 'PepAcpiInterrupt', 3: 'PepAcpiGpioIo', 4: 'PepAcpiGpioInt', 5: 'PepAcpiSpbI2c', 6: 'PepAcpiSpbSpi', 7: 'PepAcpiSpbUart', 8: 'PepAcpiExtendedMemory', 9: 'PepAcpiExtendedIo'})]],
'Flags' : [ 0x4, ['_PEP_ACPI_RESOURCE_FLAGS']],
'InterruptType' : [ 0x8, ['Enumeration', dict(target = 'long', choices = {0: 'LevelSensitive', 1: 'Latched'})]],
'InterruptPolarity' : [ 0xc, ['Enumeration', dict(target = 'long', choices = {0: 'InterruptPolarityUnknown', 1: 'InterruptRisingEdge', 2: 'InterruptFallingEdge', 3: 'InterruptActiveBothTriggerLow', 4: 'InterruptActiveBothTriggerHigh'})]],
'PinConfig' : [ 0x10, ['Enumeration', dict(target = 'long', choices = {0: 'PullDefault', 1: 'PullUp', 2: 'PullDown', 3: 'PullNone'})]],
'IoRestrictionType' : [ 0x14, ['Enumeration', dict(target = 'long', choices = {0: 'IoRestrictionNone', 1: 'IoRestrictionInputOnly', 2: 'IoRestrictionOutputOnly', 3: 'IoRestrictionNoneAndPreserve'})]],
'DriveStrength' : [ 0x18, ['unsigned short']],
'DebounceTimeout' : [ 0x1a, ['unsigned short']],
'PinTable' : [ 0x20, ['pointer64', ['unsigned short']]],
'PinCount' : [ 0x28, ['unsigned short']],
'ResourceSourceIndex' : [ 0x2a, ['unsigned char']],
'ResourceSourceName' : [ 0x30, ['pointer64', ['_UNICODE_STRING']]],
'VendorData' : [ 0x38, ['pointer64', ['unsigned char']]],
'VendorDataLength' : [ 0x40, ['unsigned short']],
} ],
'_PEP_ACPI_SPB_I2C_RESOURCE' : [ 0x30, {
'SpbCommon' : [ 0x0, ['_PEP_ACPI_SPB_RESOURCE']],
'ConnectionSpeed' : [ 0x28, ['unsigned long']],
'SlaveAddress' : [ 0x2c, ['unsigned short']],
} ],
'_PEP_ACPI_SPB_UART_RESOURCE' : [ 0x38, {
'SpbCommon' : [ 0x0, ['_PEP_ACPI_SPB_RESOURCE']],
'BaudRate' : [ 0x28, ['unsigned long']],
'RxBufferSize' : [ 0x2c, ['unsigned short']],
'TxBufferSize' : [ 0x2e, ['unsigned short']],
'Parity' : [ 0x30, ['unsigned char']],
'LinesInUse' : [ 0x31, ['unsigned char']],
} ],
'_PEP_ACPI_SPB_SPI_RESOURCE' : [ 0x38, {
'SpbCommon' : [ 0x0, ['_PEP_ACPI_SPB_RESOURCE']],
'ConnectionSpeed' : [ 0x28, ['unsigned long']],
'DataBitLength' : [ 0x2c, ['unsigned char']],
'Phase' : [ 0x2d, ['unsigned char']],
'Polarity' : [ 0x2e, ['unsigned char']],
'DeviceSelection' : [ 0x30, ['unsigned short']],
} ],
'_PEP_ACPI_EXTENDED_ADDRESS' : [ 0x48, {
'Type' : [ 0x0, ['Enumeration', dict(target = 'long', choices = {0: 'PepAcpiMemory', 1: 'PepAcpiIoPort', 2: 'PepAcpiInterrupt', 3: 'PepAcpiGpioIo', 4: 'PepAcpiGpioInt', 5: 'PepAcpiSpbI2c', 6: 'PepAcpiSpbSpi', 7: 'PepAcpiSpbUart', 8: 'PepAcpiExtendedMemory', 9: 'PepAcpiExtendedIo'})]],
'Flags' : [ 0x4, ['_PEP_ACPI_RESOURCE_FLAGS']],
'ResourceFlags' : [ 0x8, ['unsigned char']],
'GeneralFlags' : [ 0x9, ['unsigned char']],
'TypeSpecificFlags' : [ 0xa, ['unsigned char']],
'RevisionId' : [ 0xb, ['unsigned char']],
'Reserved' : [ 0xc, ['unsigned char']],
'Granularity' : [ 0x10, ['unsigned long long']],
'MinimumAddress' : [ 0x18, ['unsigned long long']],
'MaximumAddress' : [ 0x20, ['unsigned long long']],
'TranslationAddress' : [ 0x28, ['unsigned long long']],
'AddressLength' : [ 0x30, ['unsigned long long']],
'TypeAttribute' : [ 0x38, ['unsigned long long']],
'DescriptorName' : [ 0x40, ['pointer64', ['_UNICODE_STRING']]],
} ],
'_PPM_PLATFORM_STATES' : [ 0x1c0, {
'StateCount' : [ 0x0, ['unsigned long']],
'InterfaceVersion' : [ 0x4, ['unsigned long']],
'ProcessorCount' : [ 0x8, ['unsigned long']],
'CoordinatedInterface' : [ 0xc, ['unsigned char']],
'IdleTest' : [ 0x10, ['pointer64', ['void']]],
'IdlePreExecute' : [ 0x18, ['pointer64', ['void']]],
'IdleComplete' : [ 0x20, ['pointer64', ['void']]],
'QueryPlatformStateResidency' : [ 0x28, ['pointer64', ['void']]],
'Accounting' : [ 0x30, ['pointer64', ['_PLATFORM_IDLE_ACCOUNTING']]],
'State' : [ 0x40, ['array', 1, ['_PPM_PLATFORM_STATE']]],
} ],
'_CPU_INFO' : [ 0x10, {
'AsUINT32' : [ 0x0, ['array', 4, ['unsigned long']]],
'Eax' : [ 0x0, ['unsigned long']],
'Ebx' : [ 0x4, ['unsigned long']],
'Ecx' : [ 0x8, ['unsigned long']],
'Edx' : [ 0xc, ['unsigned long']],
} ],
'_POP_PPM_PROFILE' : [ 0xb30, {
'Name' : [ 0x0, ['pointer64', ['unsigned short']]],
'Id' : [ 0x8, ['unsigned char']],
'Guid' : [ 0xc, ['_GUID']],
'Flags' : [ 0x1c, ['unsigned long']],
'Priority' : [ 0x20, ['unsigned char']],
'Settings' : [ 0x28, ['array', 2, ['_PPM_ENGINE_SETTINGS']]],
'StartTime' : [ 0xb08, ['unsigned long long']],
'Count' : [ 0xb10, ['unsigned long long']],
'MaxDuration' : [ 0xb18, ['unsigned long long']],
'MinDuration' : [ 0xb20, ['unsigned long long']],
'TotalDuration' : [ 0xb28, ['unsigned long long']],
} ],
'_PPM_ENGINE_SETTINGS' : [ 0x570, {
'ExplicitSetting' : [ 0x0, ['array', 2, ['_PPM_POLICY_SETTINGS_MASK']]],
'ThrottlingPolicy' : [ 0x10, ['unsigned char']],
'PerfTimeCheck' : [ 0x14, ['unsigned long']],
'PerfHistoryCount' : [ 0x18, ['array', 2, ['unsigned char']]],
'PerfMinPolicy' : [ 0x1a, ['array', 2, ['unsigned char']]],
'PerfMaxPolicy' : [ 0x1c, ['array', 2, ['unsigned char']]],
'PerfDecreaseTime' : [ 0x1e, ['array', 2, ['unsigned char']]],
'PerfIncreaseTime' : [ 0x20, ['array', 2, ['unsigned char']]],
'PerfDecreasePolicy' : [ 0x22, ['array', 2, ['unsigned char']]],
'PerfIncreasePolicy' : [ 0x24, ['array', 2, ['unsigned char']]],
'PerfDecreaseThreshold' : [ 0x26, ['array', 2, ['unsigned char']]],
'PerfIncreaseThreshold' : [ 0x28, ['array', 2, ['unsigned char']]],
'PerfBoostPolicy' : [ 0x2c, ['unsigned long']],
'PerfBoostMode' : [ 0x30, ['unsigned long']],
'PerfReductionTolerance' : [ 0x34, ['unsigned long']],
'EnergyPerfPreference' : [ 0x38, ['unsigned long']],
'AutonomousActivityWindow' : [ 0x3c, ['unsigned long']],
'AutonomousPreference' : [ 0x40, ['unsigned char']],
'LatencyHintPerf' : [ 0x41, ['array', 2, ['unsigned char']]],
'LatencyHintUnpark' : [ 0x43, ['array', 2, ['unsigned char']]],
'DutyCycling' : [ 0x45, ['unsigned char']],
'ParkingPerfState' : [ 0x46, ['array', 2, ['unsigned char']]],
'DistributeUtility' : [ 0x48, ['unsigned char']],
'CoreParkingOverUtilizationThreshold' : [ 0x49, ['unsigned char']],
'CoreParkingConcurrencyThreshold' : [ 0x4a, ['unsigned char']],
'CoreParkingHeadroomThreshold' : [ 0x4b, ['unsigned char']],
'CoreParkingDistributionThreshold' : [ 0x4c, ['unsigned char']],
'CoreParkingDecreasePolicy' : [ 0x4d, ['unsigned char']],
'CoreParkingIncreasePolicy' : [ 0x4e, ['unsigned char']],
'CoreParkingDecreaseTime' : [ 0x50, ['unsigned long']],
'CoreParkingIncreaseTime' : [ 0x54, ['unsigned long']],
'CoreParkingMinCores' : [ 0x58, ['array', 2, ['unsigned char']]],
'CoreParkingMaxCores' : [ 0x5a, ['array', 2, ['unsigned char']]],
'AllowScaling' : [ 0x5c, ['unsigned char']],
'IdleDisabled' : [ 0x5d, ['unsigned char']],
'IdleTimeCheck' : [ 0x60, ['unsigned long']],
'IdleDemotePercent' : [ 0x64, ['unsigned char']],
'IdlePromotePercent' : [ 0x65, ['unsigned char']],
'HeteroDecreaseTime' : [ 0x66, ['unsigned char']],
'HeteroIncreaseTime' : [ 0x67, ['unsigned char']],
'HeteroDecreaseThreshold' : [ 0x68, ['array', 640, ['unsigned char']]],
'HeteroIncreaseThreshold' : [ 0x2e8, ['array', 640, ['unsigned char']]],
'Class0FloorPerformance' : [ 0x568, ['unsigned char']],
'Class1InitialPerformance' : [ 0x569, ['unsigned char']],
} ],
'_POP_FX_COMPONENT_FLAGS' : [ 0x8, {
'Value' : [ 0x0, ['long']],
'Value2' : [ 0x4, ['long']],
'RefCount' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 30, native_type='unsigned long')]],
'Idling' : [ 0x0, ['BitField', dict(start_bit = 30, end_bit = 31, native_type='unsigned long')]],
'Active' : [ 0x0, ['BitField', dict(start_bit = 31, end_bit = 32, native_type='unsigned long')]],
'CriticalIdleOverride' : [ 0x4, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'ResidentOverride' : [ 0x4, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'Reserved' : [ 0x4, ['BitField', dict(start_bit = 2, end_bit = 32, native_type='unsigned long')]],
} ],
'_POP_FX_PERF_FLAGS' : [ 0x4, {
'Value' : [ 0x0, ['long']],
'Progress' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 3, native_type='unsigned long')]],
'Reserved' : [ 0x0, ['BitField', dict(start_bit = 3, end_bit = 27, native_type='unsigned long')]],
'Synchronicity' : [ 0x0, ['BitField', dict(start_bit = 27, end_bit = 29, native_type='unsigned long')]],
'RequestPepCompleted' : [ 0x0, ['BitField', dict(start_bit = 29, end_bit = 30, native_type='unsigned long')]],
'RequestSucceeded' : [ 0x0, ['BitField', dict(start_bit = 30, end_bit = 31, native_type='unsigned long')]],
'NestedCallback' : [ 0x0, ['BitField', dict(start_bit = 31, end_bit = 32, native_type='unsigned long')]],
} ],
'_POP_FX_DEVICE_STATUS' : [ 0x4, {
'Value' : [ 0x0, ['long']],
'SystemTransition' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'PepD0Notify' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'IdleTimerOn' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long')]],
'IgnoreIdleTimeout' : [ 0x0, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned long')]],
'IrpInUse' : [ 0x0, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned long')]],
'IrpPending' : [ 0x0, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned long')]],
'DPNRDeviceNotified' : [ 0x0, ['BitField', dict(start_bit = 6, end_bit = 7, native_type='unsigned long')]],
'DPNRReceivedFromPep' : [ 0x0, ['BitField', dict(start_bit = 7, end_bit = 8, native_type='unsigned long')]],
'IrpFirstPendingIndex' : [ 0x0, ['BitField', dict(start_bit = 8, end_bit = 9, native_type='unsigned long')]],
'IrpLastPendingIndex' : [ 0x0, ['BitField', dict(start_bit = 9, end_bit = 10, native_type='unsigned long')]],
'Reserved' : [ 0x0, ['BitField', dict(start_bit = 10, end_bit = 32, native_type='unsigned long')]],
} ],
'_POP_RW_LOCK' : [ 0x10, {
'Lock' : [ 0x0, ['_EX_PUSH_LOCK']],
'Thread' : [ 0x8, ['pointer64', ['_KTHREAD']]],
} ],
'_VOLUME_CACHE_MAP' : [ 0xd0, {
'NodeTypeCode' : [ 0x0, ['short']],
'NodeByteCode' : [ 0x2, ['short']],
'UseCount' : [ 0x4, ['unsigned long']],
'DeviceObject' : [ 0x8, ['pointer64', ['_DEVICE_OBJECT']]],
'VolumeCacheMapLinks' : [ 0x10, ['_LIST_ENTRY']],
'DirtyPages' : [ 0x20, ['unsigned long long']],
'LogHandleContext' : [ 0x28, ['_LOG_HANDLE_CONTEXT']],
'Flags' : [ 0xc0, ['unsigned long']],
'PagesQueuedToDisk' : [ 0xc4, ['unsigned long']],
'LoggedPagesQueuedToDisk' : [ 0xc8, ['unsigned long']],
} ],
'_SHARED_CACHE_MAP' : [ 0x210, {
'NodeTypeCode' : [ 0x0, ['short']],
'NodeByteSize' : [ 0x2, ['short']],
'OpenCount' : [ 0x4, ['unsigned long']],
'FileSize' : [ 0x8, ['_LARGE_INTEGER']],
'BcbList' : [ 0x10, ['_LIST_ENTRY']],
'SectionSize' : [ 0x20, ['_LARGE_INTEGER']],
'ValidDataLength' : [ 0x28, ['_LARGE_INTEGER']],
'ValidDataGoal' : [ 0x30, ['_LARGE_INTEGER']],
'InitialVacbs' : [ 0x38, ['array', 4, ['pointer64', ['_VACB']]]],
'Vacbs' : [ 0x58, ['pointer64', ['pointer64', ['_VACB']]]],
'FileObjectFastRef' : [ 0x60, ['_EX_FAST_REF']],
'VacbLock' : [ 0x68, ['_EX_PUSH_LOCK']],
'DirtyPages' : [ 0x70, ['unsigned long']],
'LoggedStreamLinks' : [ 0x78, ['_LIST_ENTRY']],
'SharedCacheMapLinks' : [ 0x88, ['_LIST_ENTRY']],
'Flags' : [ 0x98, ['unsigned long']],
'Status' : [ 0x9c, ['long']],
'Mbcb' : [ 0xa0, ['pointer64', ['_MBCB']]],
'Section' : [ 0xa8, ['pointer64', ['void']]],
'CreateEvent' : [ 0xb0, ['pointer64', ['_KEVENT']]],
'WaitOnActiveCount' : [ 0xb8, ['pointer64', ['_KEVENT']]],
'PagesToWrite' : [ 0xc0, ['unsigned long']],
'BeyondLastFlush' : [ 0xc8, ['long long']],
'Callbacks' : [ 0xd0, ['pointer64', ['_CACHE_MANAGER_CALLBACKS']]],
'LazyWriteContext' : [ 0xd8, ['pointer64', ['void']]],
'PrivateList' : [ 0xe0, ['_LIST_ENTRY']],
'V1' : [ 0xf0, ['_LOGGED_STREAM_CALLBACK_V1']],
'V2' : [ 0xf0, ['_LOGGED_STREAM_CALLBACK_V2']],
'LargestLSN' : [ 0x100, ['_LARGE_INTEGER']],
'DirtyPageThreshold' : [ 0x108, ['unsigned long']],
'LazyWritePassCount' : [ 0x10c, ['unsigned long']],
'UninitializeEvent' : [ 0x110, ['pointer64', ['_CACHE_UNINITIALIZE_EVENT']]],
'BcbLock' : [ 0x118, ['_FAST_MUTEX']],
'LastUnmapBehindOffset' : [ 0x150, ['_LARGE_INTEGER']],
'Event' : [ 0x158, ['_KEVENT']],
'HighWaterMappingOffset' : [ 0x170, ['_LARGE_INTEGER']],
'PrivateCacheMap' : [ 0x178, ['_PRIVATE_CACHE_MAP']],
'WriteBehindWorkQueueEntry' : [ 0x1f0, ['pointer64', ['void']]],
'VolumeCacheMap' : [ 0x1f8, ['pointer64', ['_VOLUME_CACHE_MAP']]],
'ProcImagePathHash' : [ 0x200, ['unsigned long']],
'WritesInProgress' : [ 0x204, ['unsigned long']],
'AsyncReadRequestCount' : [ 0x208, ['unsigned long']],
} ],
'__unnamed_1a53' : [ 0x10, {
'FileOffset' : [ 0x0, ['_LARGE_INTEGER']],
'ActiveCount' : [ 0x0, ['unsigned short']],
'Links' : [ 0x0, ['_LIST_ENTRY']],
} ],
'_VACB' : [ 0x28, {
'BaseAddress' : [ 0x0, ['pointer64', ['void']]],
'SharedCacheMap' : [ 0x8, ['pointer64', ['_SHARED_CACHE_MAP']]],
'Overlay' : [ 0x10, ['__unnamed_1a53']],
'ArrayHead' : [ 0x20, ['pointer64', ['_VACB_ARRAY_HEADER']]],
} ],
'__unnamed_1a77' : [ 0x8, {
'FileObject' : [ 0x0, ['pointer64', ['_FILE_OBJECT']]],
} ],
'__unnamed_1a79' : [ 0x8, {
'SharedCacheMap' : [ 0x0, ['pointer64', ['_SHARED_CACHE_MAP']]],
} ],
'__unnamed_1a7b' : [ 0x8, {
'Event' : [ 0x0, ['pointer64', ['_KEVENT']]],
} ],
'__unnamed_1a7d' : [ 0x4, {
'Reason' : [ 0x0, ['unsigned long']],
} ],
'__unnamed_1a7f' : [ 0x30, {
'SharedCacheMap' : [ 0x0, ['pointer64', ['_SHARED_CACHE_MAP']]],
'IoStatus' : [ 0x8, ['pointer64', ['_IO_STATUS_BLOCK']]],
'CallerWaitEvent' : [ 0x10, ['_KEVENT']],
'IsLowPriWriteBehind' : [ 0x28, ['unsigned char']],
} ],
'__unnamed_1a83' : [ 0x58, {
'SharedCacheMap' : [ 0x0, ['pointer64', ['_SHARED_CACHE_MAP']]],
'FileOffset' : [ 0x8, ['_LARGE_INTEGER']],
'FileObject' : [ 0x10, ['pointer64', ['_FILE_OBJECT']]],
'Length' : [ 0x18, ['unsigned long']],
'PrefetchList' : [ 0x20, ['pointer64', ['_SINGLE_LIST_ENTRY']]],
'PrefetchPagePriority' : [ 0x28, ['unsigned long']],
'Mdl' : [ 0x30, ['pointer64', ['_MDL']]],
'IoStatusBlock' : [ 0x38, ['pointer64', ['_IO_STATUS_BLOCK']]],
'CallbackContext' : [ 0x40, ['pointer64', ['_CC_ASYNC_READ_CONTEXT']]],
'OriginatingProcess' : [ 0x48, ['pointer64', ['_EPROCESS']]],
'RequestorMode' : [ 0x50, ['unsigned char']],
'NestingLevel' : [ 0x54, ['unsigned long']],
} ],
'__unnamed_1a85' : [ 0x58, {
'Read' : [ 0x0, ['__unnamed_1a77']],
'Write' : [ 0x0, ['__unnamed_1a79']],
'Event' : [ 0x0, ['__unnamed_1a7b']],
'Notification' : [ 0x0, ['__unnamed_1a7d']],
'LowPriWrite' : [ 0x0, ['__unnamed_1a7f']],
'AsyncRead' : [ 0x0, ['__unnamed_1a83']],
} ],
'_WORK_QUEUE_ENTRY' : [ 0x70, {
'WorkQueueLinks' : [ 0x0, ['_LIST_ENTRY']],
'Parameters' : [ 0x10, ['__unnamed_1a85']],
'Function' : [ 0x68, ['unsigned char']],
} ],
'_CC_EXTERNAL_CACHE_INFO' : [ 0x30, {
'Callback' : [ 0x0, ['pointer64', ['void']]],
'DirtyPageStatistics' : [ 0x8, ['_DIRTY_PAGE_STATISTICS']],
'Links' : [ 0x20, ['_LIST_ENTRY']],
} ],
'_LOG_HANDLE_CONTEXT' : [ 0x98, {
'LogHandle' : [ 0x0, ['pointer64', ['void']]],
'FlushToLsnRoutine' : [ 0x8, ['pointer64', ['void']]],
'QueryLogHandleInfoRoutine' : [ 0x10, ['pointer64', ['void']]],
'DirtyPageStatistics' : [ 0x18, ['_DIRTY_PAGE_STATISTICS']],
'DirtyPageThresholds' : [ 0x30, ['_DIRTY_PAGE_THRESHOLDS']],
'AdditionalPagesToWrite' : [ 0x68, ['unsigned long']],
'CcLWScanDPThreshold' : [ 0x6c, ['unsigned long']],
'LargestLsnForCurrentLWScan' : [ 0x70, ['_LARGE_INTEGER']],
'RelatedFileObject' : [ 0x78, ['pointer64', ['_FILE_OBJECT']]],
'LargestLsnFileObjectKey' : [ 0x80, ['unsigned long long']],
'LastLWTimeStamp' : [ 0x88, ['_LARGE_INTEGER']],
'Flags' : [ 0x90, ['unsigned long']],
} ],
'_MBCB' : [ 0xc0, {
'NodeTypeCode' : [ 0x0, ['short']],
'NodeIsInZone' : [ 0x2, ['short']],
'PagesToWrite' : [ 0x4, ['unsigned long']],
'DirtyPages' : [ 0x8, ['unsigned long']],
'Reserved' : [ 0xc, ['unsigned long']],
'BitmapRanges' : [ 0x10, ['_LIST_ENTRY']],
'ResumeWritePage' : [ 0x20, ['long long']],
'MostRecentlyDirtiedPage' : [ 0x28, ['long long']],
'BitmapRange1' : [ 0x30, ['_BITMAP_RANGE']],
'BitmapRange2' : [ 0x60, ['_BITMAP_RANGE']],
'BitmapRange3' : [ 0x90, ['_BITMAP_RANGE']],
} ],
'_BITMAP_RANGE' : [ 0x30, {
'Links' : [ 0x0, ['_LIST_ENTRY']],
'BasePage' : [ 0x10, ['long long']],
'FirstDirtyPage' : [ 0x18, ['unsigned long']],
'LastDirtyPage' : [ 0x1c, ['unsigned long']],
'DirtyPages' : [ 0x20, ['unsigned long']],
'Bitmap' : [ 0x28, ['pointer64', ['unsigned long']]],
} ],
'VACB_LEVEL_ALLOCATION_LIST' : [ 0x20, {
'VacbLevelList' : [ 0x0, ['_LIST_ENTRY']],
'VacbLevelWithBcbListHeads' : [ 0x10, ['pointer64', ['void']]],
'VacbLevelsAllocated' : [ 0x18, ['unsigned long']],
} ],
'_VACB_LEVEL_REFERENCE' : [ 0x8, {
'Reference' : [ 0x0, ['long']],
'SpecialReference' : [ 0x4, ['long']],
} ],
'_CACHE_UNINITIALIZE_EVENT' : [ 0x20, {
'Next' : [ 0x0, ['pointer64', ['_CACHE_UNINITIALIZE_EVENT']]],
'Event' : [ 0x8, ['_KEVENT']],
} ],
'_HEAP_LIST_LOOKUP' : [ 0x38, {
'ExtendedLookup' : [ 0x0, ['pointer64', ['_HEAP_LIST_LOOKUP']]],
'ArraySize' : [ 0x8, ['unsigned long']],
'ExtraItem' : [ 0xc, ['unsigned long']],
'ItemCount' : [ 0x10, ['unsigned long']],
'OutOfRangeItems' : [ 0x14, ['unsigned long']],
'BaseIndex' : [ 0x18, ['unsigned long']],
'ListHead' : [ 0x20, ['pointer64', ['_LIST_ENTRY']]],
'ListsInUseUlong' : [ 0x28, ['pointer64', ['unsigned long']]],
'ListHints' : [ 0x30, ['pointer64', ['pointer64', ['_LIST_ENTRY']]]],
} ],
'_HEAP' : [ 0x298, {
'Segment' : [ 0x0, ['_HEAP_SEGMENT']],
'Entry' : [ 0x0, ['_HEAP_ENTRY']],
'SegmentSignature' : [ 0x10, ['unsigned long']],
'SegmentFlags' : [ 0x14, ['unsigned long']],
'SegmentListEntry' : [ 0x18, ['_LIST_ENTRY']],
'Heap' : [ 0x28, ['pointer64', ['_HEAP']]],
'BaseAddress' : [ 0x30, ['pointer64', ['void']]],
'NumberOfPages' : [ 0x38, ['unsigned long']],
'FirstEntry' : [ 0x40, ['pointer64', ['_HEAP_ENTRY']]],
'LastValidEntry' : [ 0x48, ['pointer64', ['_HEAP_ENTRY']]],
'NumberOfUnCommittedPages' : [ 0x50, ['unsigned long']],
'NumberOfUnCommittedRanges' : [ 0x54, ['unsigned long']],
'SegmentAllocatorBackTraceIndex' : [ 0x58, ['unsigned short']],
'Reserved' : [ 0x5a, ['unsigned short']],
'UCRSegmentList' : [ 0x60, ['_LIST_ENTRY']],
'Flags' : [ 0x70, ['unsigned long']],
'ForceFlags' : [ 0x74, ['unsigned long']],
'CompatibilityFlags' : [ 0x78, ['unsigned long']],
'EncodeFlagMask' : [ 0x7c, ['unsigned long']],
'Encoding' : [ 0x80, ['_HEAP_ENTRY']],
'Interceptor' : [ 0x90, ['unsigned long']],
'VirtualMemoryThreshold' : [ 0x94, ['unsigned long']],
'Signature' : [ 0x98, ['unsigned long']],
'SegmentReserve' : [ 0xa0, ['unsigned long long']],
'SegmentCommit' : [ 0xa8, ['unsigned long long']],
'DeCommitFreeBlockThreshold' : [ 0xb0, ['unsigned long long']],
'DeCommitTotalFreeThreshold' : [ 0xb8, ['unsigned long long']],
'TotalFreeSize' : [ 0xc0, ['unsigned long long']],
'MaximumAllocationSize' : [ 0xc8, ['unsigned long long']],
'ProcessHeapsListIndex' : [ 0xd0, ['unsigned short']],
'HeaderValidateLength' : [ 0xd2, ['unsigned short']],
'HeaderValidateCopy' : [ 0xd8, ['pointer64', ['void']]],
'NextAvailableTagIndex' : [ 0xe0, ['unsigned short']],
'MaximumTagIndex' : [ 0xe2, ['unsigned short']],
'TagEntries' : [ 0xe8, ['pointer64', ['_HEAP_TAG_ENTRY']]],
'UCRList' : [ 0xf0, ['_LIST_ENTRY']],
'AlignRound' : [ 0x100, ['unsigned long long']],
'AlignMask' : [ 0x108, ['unsigned long long']],
'VirtualAllocdBlocks' : [ 0x110, ['_LIST_ENTRY']],
'SegmentList' : [ 0x120, ['_LIST_ENTRY']],
'AllocatorBackTraceIndex' : [ 0x130, ['unsigned short']],
'NonDedicatedListLength' : [ 0x134, ['unsigned long']],
'BlocksIndex' : [ 0x138, ['pointer64', ['void']]],
'UCRIndex' : [ 0x140, ['pointer64', ['void']]],
'PseudoTagEntries' : [ 0x148, ['pointer64', ['_HEAP_PSEUDO_TAG_ENTRY']]],
'FreeLists' : [ 0x150, ['_LIST_ENTRY']],
'LockVariable' : [ 0x160, ['pointer64', ['_HEAP_LOCK']]],
'CommitRoutine' : [ 0x168, ['pointer64', ['void']]],
'FrontEndHeap' : [ 0x170, ['pointer64', ['void']]],
'FrontHeapLockCount' : [ 0x178, ['unsigned short']],
'FrontEndHeapType' : [ 0x17a, ['unsigned char']],
'RequestedFrontEndHeapType' : [ 0x17b, ['unsigned char']],
'FrontEndHeapUsageData' : [ 0x180, ['pointer64', ['unsigned short']]],
'FrontEndHeapMaximumIndex' : [ 0x188, ['unsigned short']],
'FrontEndHeapStatusBitmap' : [ 0x18a, ['array', 129, ['unsigned char']]],
'Counters' : [ 0x210, ['_HEAP_COUNTERS']],
'TuningParameters' : [ 0x288, ['_HEAP_TUNING_PARAMETERS']],
} ],
'__unnamed_1af3' : [ 0x68, {
'CriticalSection' : [ 0x0, ['_RTL_CRITICAL_SECTION']],
'Resource' : [ 0x0, ['_ERESOURCE']],
} ],
'_HEAP_LOCK' : [ 0x68, {
'Lock' : [ 0x0, ['__unnamed_1af3']],
} ],
'_HEAP_ENTRY' : [ 0x10, {
'UnpackedEntry' : [ 0x0, ['_HEAP_UNPACKED_ENTRY']],
'PreviousBlockPrivateData' : [ 0x0, ['pointer64', ['void']]],
'Size' : [ 0x8, ['unsigned short']],
'Flags' : [ 0xa, ['unsigned char']],
'SmallTagIndex' : [ 0xb, ['unsigned char']],
'SubSegmentCode' : [ 0x8, ['unsigned long']],
'PreviousSize' : [ 0xc, ['unsigned short']],
'SegmentOffset' : [ 0xe, ['unsigned char']],
'LFHFlags' : [ 0xe, ['unsigned char']],
'UnusedBytes' : [ 0xf, ['unsigned char']],
'CompactHeader' : [ 0x8, ['unsigned long long']],
'ExtendedEntry' : [ 0x0, ['_HEAP_EXTENDED_ENTRY']],
'Reserved' : [ 0x0, ['pointer64', ['void']]],
'FunctionIndex' : [ 0x8, ['unsigned short']],
'ContextValue' : [ 0xa, ['unsigned short']],
'InterceptorValue' : [ 0x8, ['unsigned long']],
'UnusedBytesLength' : [ 0xc, ['unsigned short']],
'EntryOffset' : [ 0xe, ['unsigned char']],
'ExtendedBlockSignature' : [ 0xf, ['unsigned char']],
'ReservedForAlignment' : [ 0x0, ['pointer64', ['void']]],
'Code1' : [ 0x8, ['unsigned long']],
'Code2' : [ 0xc, ['unsigned short']],
'Code3' : [ 0xe, ['unsigned char']],
'Code4' : [ 0xf, ['unsigned char']],
'Code234' : [ 0xc, ['unsigned long']],
'AgregateCode' : [ 0x8, ['unsigned long long']],
} ],
'_HEAP_SEGMENT' : [ 0x70, {
'Entry' : [ 0x0, ['_HEAP_ENTRY']],
'SegmentSignature' : [ 0x10, ['unsigned long']],
'SegmentFlags' : [ 0x14, ['unsigned long']],
'SegmentListEntry' : [ 0x18, ['_LIST_ENTRY']],
'Heap' : [ 0x28, ['pointer64', ['_HEAP']]],
'BaseAddress' : [ 0x30, ['pointer64', ['void']]],
'NumberOfPages' : [ 0x38, ['unsigned long']],
'FirstEntry' : [ 0x40, ['pointer64', ['_HEAP_ENTRY']]],
'LastValidEntry' : [ 0x48, ['pointer64', ['_HEAP_ENTRY']]],
'NumberOfUnCommittedPages' : [ 0x50, ['unsigned long']],
'NumberOfUnCommittedRanges' : [ 0x54, ['unsigned long']],
'SegmentAllocatorBackTraceIndex' : [ 0x58, ['unsigned short']],
'Reserved' : [ 0x5a, ['unsigned short']],
'UCRSegmentList' : [ 0x60, ['_LIST_ENTRY']],
} ],
'_HEAP_VIRTUAL_ALLOC_ENTRY' : [ 0x40, {
'Entry' : [ 0x0, ['_LIST_ENTRY']],
'ExtraStuff' : [ 0x10, ['_HEAP_ENTRY_EXTRA']],
'CommitSize' : [ 0x20, ['unsigned long long']],
'ReserveSize' : [ 0x28, ['unsigned long long']],
'BusyBlock' : [ 0x30, ['_HEAP_ENTRY']],
} ],
'_HEAP_FREE_ENTRY' : [ 0x20, {
'HeapEntry' : [ 0x0, ['_HEAP_ENTRY']],
'UnpackedEntry' : [ 0x0, ['_HEAP_UNPACKED_ENTRY']],
'PreviousBlockPrivateData' : [ 0x0, ['pointer64', ['void']]],
'Size' : [ 0x8, ['unsigned short']],
'Flags' : [ 0xa, ['unsigned char']],
'SmallTagIndex' : [ 0xb, ['unsigned char']],
'SubSegmentCode' : [ 0x8, ['unsigned long']],
'PreviousSize' : [ 0xc, ['unsigned short']],
'SegmentOffset' : [ 0xe, ['unsigned char']],
'LFHFlags' : [ 0xe, ['unsigned char']],
'UnusedBytes' : [ 0xf, ['unsigned char']],
'CompactHeader' : [ 0x8, ['unsigned long long']],
'ExtendedEntry' : [ 0x0, ['_HEAP_EXTENDED_ENTRY']],
'Reserved' : [ 0x0, ['pointer64', ['void']]],
'FunctionIndex' : [ 0x8, ['unsigned short']],
'ContextValue' : [ 0xa, ['unsigned short']],
'InterceptorValue' : [ 0x8, ['unsigned long']],
'UnusedBytesLength' : [ 0xc, ['unsigned short']],
'EntryOffset' : [ 0xe, ['unsigned char']],
'ExtendedBlockSignature' : [ 0xf, ['unsigned char']],
'ReservedForAlignment' : [ 0x0, ['pointer64', ['void']]],
'Code1' : [ 0x8, ['unsigned long']],
'Code2' : [ 0xc, ['unsigned short']],
'Code3' : [ 0xe, ['unsigned char']],
'Code4' : [ 0xf, ['unsigned char']],
'Code234' : [ 0xc, ['unsigned long']],
'AgregateCode' : [ 0x8, ['unsigned long long']],
'FreeList' : [ 0x10, ['_LIST_ENTRY']],
} ],
'__unnamed_1b46' : [ 0x4, {
'DataLength' : [ 0x0, ['short']],
'TotalLength' : [ 0x2, ['short']],
} ],
'__unnamed_1b48' : [ 0x4, {
's1' : [ 0x0, ['__unnamed_1b46']],
'Length' : [ 0x0, ['unsigned long']],
} ],
'__unnamed_1b4a' : [ 0x4, {
'Type' : [ 0x0, ['short']],
'DataInfoOffset' : [ 0x2, ['short']],
} ],
'__unnamed_1b4c' : [ 0x4, {
's2' : [ 0x0, ['__unnamed_1b4a']],
'ZeroInit' : [ 0x0, ['unsigned long']],
} ],
'_PORT_MESSAGE' : [ 0x28, {
'u1' : [ 0x0, ['__unnamed_1b48']],
'u2' : [ 0x4, ['__unnamed_1b4c']],
'ClientId' : [ 0x8, ['_CLIENT_ID']],
'DoNotUseThisField' : [ 0x8, ['double']],
'MessageId' : [ 0x18, ['unsigned long']],
'ClientViewSize' : [ 0x20, ['unsigned long long']],
'CallbackId' : [ 0x20, ['unsigned long']],
} ],
'_ALPC_MESSAGE_ATTRIBUTES' : [ 0x8, {
'AllocatedAttributes' : [ 0x0, ['unsigned long']],
'ValidAttributes' : [ 0x4, ['unsigned long']],
} ],
'_ALPC_HANDLE_ENTRY' : [ 0x8, {
'Object' : [ 0x0, ['pointer64', ['void']]],
} ],
'_BLOB_TYPE' : [ 0x30, {
'ResourceId' : [ 0x0, ['Enumeration', dict(target = 'long', choices = {0: 'BLOB_TYPE_UNKNOWN', 1: 'BLOB_TYPE_CONNECTION_INFO', 2: 'BLOB_TYPE_MESSAGE', 3: 'BLOB_TYPE_SECURITY_CONTEXT', 4: 'BLOB_TYPE_SECTION', 5: 'BLOB_TYPE_REGION', 6: 'BLOB_TYPE_VIEW', 7: 'BLOB_TYPE_RESERVE', 8: 'BLOB_TYPE_DIRECT_TRANSFER', 9: 'BLOB_TYPE_HANDLE_DATA', 10: 'BLOB_TYPE_MAX_ID'})]],
'PoolTag' : [ 0x4, ['unsigned long']],
'LookasideIndex' : [ 0x8, ['unsigned long']],
'Flags' : [ 0xc, ['unsigned long']],
'Counters' : [ 0x10, ['pointer64', ['_BLOB_COUNTERS']]],
'DeleteProcedure' : [ 0x18, ['pointer64', ['void']]],
'DestroyProcedure' : [ 0x20, ['pointer64', ['void']]],
'UsualSize' : [ 0x28, ['unsigned long long']],
} ],
'__unnamed_1b67' : [ 0x1, {
'ReferenceCache' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned char')]],
'Lookaside' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned char')]],
'Initializing' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned char')]],
'Deleted' : [ 0x0, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned char')]],
} ],
'__unnamed_1b69' : [ 0x1, {
's1' : [ 0x0, ['__unnamed_1b67']],
'Flags' : [ 0x0, ['unsigned char']],
} ],
'_BLOB' : [ 0x30, {
'ResourceList' : [ 0x0, ['_LIST_ENTRY']],
'FreeListEntry' : [ 0x0, ['_SLIST_ENTRY']],
'u1' : [ 0x10, ['__unnamed_1b69']],
'ResourceId' : [ 0x11, ['unsigned char']],
'CachedReferences' : [ 0x12, ['short']],
'ReferenceCount' : [ 0x18, ['long long']],
'Lock' : [ 0x20, ['_EX_PUSH_LOCK']],
} ],
'__unnamed_1b7b' : [ 0x4, {
'Internal' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'Secure' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
} ],
'__unnamed_1b7d' : [ 0x4, {
's1' : [ 0x0, ['__unnamed_1b7b']],
} ],
'_KALPC_SECTION' : [ 0x48, {
'SectionObject' : [ 0x0, ['pointer64', ['void']]],
'Size' : [ 0x8, ['unsigned long long']],
'HandleTable' : [ 0x10, ['pointer64', ['_ALPC_HANDLE_TABLE']]],
'SectionHandle' : [ 0x18, ['pointer64', ['void']]],
'OwnerProcess' : [ 0x20, ['pointer64', ['_EPROCESS']]],
'OwnerPort' : [ 0x28, ['pointer64', ['_ALPC_PORT']]],
'u1' : [ 0x30, ['__unnamed_1b7d']],
'NumberOfRegions' : [ 0x34, ['unsigned long']],
'RegionListHead' : [ 0x38, ['_LIST_ENTRY']],
} ],
'__unnamed_1b86' : [ 0x4, {
'Secure' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
} ],
'__unnamed_1b88' : [ 0x4, {
's1' : [ 0x0, ['__unnamed_1b86']],
} ],
'_KALPC_REGION' : [ 0x58, {
'RegionListEntry' : [ 0x0, ['_LIST_ENTRY']],
'Section' : [ 0x10, ['pointer64', ['_KALPC_SECTION']]],
'Offset' : [ 0x18, ['unsigned long long']],
'Size' : [ 0x20, ['unsigned long long']],
'ViewSize' : [ 0x28, ['unsigned long long']],
'u1' : [ 0x30, ['__unnamed_1b88']],
'NumberOfViews' : [ 0x34, ['unsigned long']],
'ViewListHead' : [ 0x38, ['_LIST_ENTRY']],
'ReadOnlyView' : [ 0x48, ['pointer64', ['_KALPC_VIEW']]],
'ReadWriteView' : [ 0x50, ['pointer64', ['_KALPC_VIEW']]],
} ],
'__unnamed_1b8e' : [ 0x4, {
'WriteAccess' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'AutoRelease' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'ForceUnlink' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long')]],
} ],
'__unnamed_1b90' : [ 0x4, {
's1' : [ 0x0, ['__unnamed_1b8e']],
} ],
'_KALPC_VIEW' : [ 0x60, {
'ViewListEntry' : [ 0x0, ['_LIST_ENTRY']],
'Region' : [ 0x10, ['pointer64', ['_KALPC_REGION']]],
'OwnerPort' : [ 0x18, ['pointer64', ['_ALPC_PORT']]],
'OwnerProcess' : [ 0x20, ['pointer64', ['_EPROCESS']]],
'Address' : [ 0x28, ['pointer64', ['void']]],
'Size' : [ 0x30, ['unsigned long long']],
'SecureViewHandle' : [ 0x38, ['pointer64', ['void']]],
'WriteAccessHandle' : [ 0x40, ['pointer64', ['void']]],
'u1' : [ 0x48, ['__unnamed_1b90']],
'NumberOfOwnerMessages' : [ 0x4c, ['unsigned long']],
'ProcessViewListEntry' : [ 0x50, ['_LIST_ENTRY']],
} ],
'_ALPC_COMMUNICATION_INFO' : [ 0x48, {
'ConnectionPort' : [ 0x0, ['pointer64', ['_ALPC_PORT']]],
'ServerCommunicationPort' : [ 0x8, ['pointer64', ['_ALPC_PORT']]],
'ClientCommunicationPort' : [ 0x10, ['pointer64', ['_ALPC_PORT']]],
'CommunicationList' : [ 0x18, ['_LIST_ENTRY']],
'HandleTable' : [ 0x28, ['_ALPC_HANDLE_TABLE']],
'CloseMessage' : [ 0x40, ['pointer64', ['_KALPC_MESSAGE']]],
} ],
'__unnamed_1bae' : [ 0x4, {
'Initialized' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'Type' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 3, native_type='unsigned long')]],
'ConnectionPending' : [ 0x0, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned long')]],
'ConnectionRefused' : [ 0x0, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned long')]],
'Disconnected' : [ 0x0, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned long')]],
'Closed' : [ 0x0, ['BitField', dict(start_bit = 6, end_bit = 7, native_type='unsigned long')]],
'NoFlushOnClose' : [ 0x0, ['BitField', dict(start_bit = 7, end_bit = 8, native_type='unsigned long')]],
'ReturnExtendedInfo' : [ 0x0, ['BitField', dict(start_bit = 8, end_bit = 9, native_type='unsigned long')]],
'Waitable' : [ 0x0, ['BitField', dict(start_bit = 9, end_bit = 10, native_type='unsigned long')]],
'DynamicSecurity' : [ 0x0, ['BitField', dict(start_bit = 10, end_bit = 11, native_type='unsigned long')]],
'Wow64CompletionList' : [ 0x0, ['BitField', dict(start_bit = 11, end_bit = 12, native_type='unsigned long')]],
'Lpc' : [ 0x0, ['BitField', dict(start_bit = 12, end_bit = 13, native_type='unsigned long')]],
'LpcToLpc' : [ 0x0, ['BitField', dict(start_bit = 13, end_bit = 14, native_type='unsigned long')]],
'HasCompletionList' : [ 0x0, ['BitField', dict(start_bit = 14, end_bit = 15, native_type='unsigned long')]],
'HadCompletionList' : [ 0x0, ['BitField', dict(start_bit = 15, end_bit = 16, native_type='unsigned long')]],
'EnableCompletionList' : [ 0x0, ['BitField', dict(start_bit = 16, end_bit = 17, native_type='unsigned long')]],
} ],
'__unnamed_1bb0' : [ 0x4, {
's1' : [ 0x0, ['__unnamed_1bae']],
'State' : [ 0x0, ['unsigned long']],
} ],
'_ALPC_PORT' : [ 0x1d8, {
'PortListEntry' : [ 0x0, ['_LIST_ENTRY']],
'CommunicationInfo' : [ 0x10, ['pointer64', ['_ALPC_COMMUNICATION_INFO']]],
'OwnerProcess' : [ 0x18, ['pointer64', ['_EPROCESS']]],
'CompletionPort' : [ 0x20, ['pointer64', ['void']]],
'CompletionKey' : [ 0x28, ['pointer64', ['void']]],
'CompletionPacketLookaside' : [ 0x30, ['pointer64', ['_ALPC_COMPLETION_PACKET_LOOKASIDE']]],
'PortContext' : [ 0x38, ['pointer64', ['void']]],
'StaticSecurity' : [ 0x40, ['_SECURITY_CLIENT_CONTEXT']],
'IncomingQueueLock' : [ 0x88, ['_EX_PUSH_LOCK']],
'MainQueue' : [ 0x90, ['_LIST_ENTRY']],
'LargeMessageQueue' : [ 0xa0, ['_LIST_ENTRY']],
'PendingQueueLock' : [ 0xb0, ['_EX_PUSH_LOCK']],
'PendingQueue' : [ 0xb8, ['_LIST_ENTRY']],
'DirectQueueLock' : [ 0xc8, ['_EX_PUSH_LOCK']],
'DirectQueue' : [ 0xd0, ['_LIST_ENTRY']],
'WaitQueueLock' : [ 0xe0, ['_EX_PUSH_LOCK']],
'WaitQueue' : [ 0xe8, ['_LIST_ENTRY']],
'Semaphore' : [ 0xf8, ['pointer64', ['_KSEMAPHORE']]],
'DummyEvent' : [ 0xf8, ['pointer64', ['_KEVENT']]],
'PortAttributes' : [ 0x100, ['_ALPC_PORT_ATTRIBUTES']],
'ResourceListLock' : [ 0x148, ['_EX_PUSH_LOCK']],
'ResourceListHead' : [ 0x150, ['_LIST_ENTRY']],
'PortObjectLock' : [ 0x160, ['_EX_PUSH_LOCK']],
'CompletionList' : [ 0x168, ['pointer64', ['_ALPC_COMPLETION_LIST']]],
'CallbackObject' : [ 0x170, ['pointer64', ['_CALLBACK_OBJECT']]],
'CallbackContext' : [ 0x178, ['pointer64', ['void']]],
'CanceledQueue' : [ 0x180, ['_LIST_ENTRY']],
'SequenceNo' : [ 0x190, ['long']],
'ReferenceNo' : [ 0x194, ['long']],
'ReferenceNoWait' : [ 0x198, ['pointer64', ['_PALPC_PORT_REFERENCE_WAIT_BLOCK']]],
'u1' : [ 0x1a0, ['__unnamed_1bb0']],
'TargetQueuePort' : [ 0x1a8, ['pointer64', ['_ALPC_PORT']]],
'TargetSequencePort' : [ 0x1b0, ['pointer64', ['_ALPC_PORT']]],
'CachedMessage' : [ 0x1b8, ['pointer64', ['_KALPC_MESSAGE']]],
'MainQueueLength' : [ 0x1c0, ['unsigned long']],
'LargeMessageQueueLength' : [ 0x1c4, ['unsigned long']],
'PendingQueueLength' : [ 0x1c8, ['unsigned long']],
'DirectQueueLength' : [ 0x1cc, ['unsigned long']],
'CanceledQueueLength' : [ 0x1d0, ['unsigned long']],
'WaitQueueLength' : [ 0x1d4, ['unsigned long']],
} ],
'_ALPC_COMPLETION_LIST' : [ 0xa0, {
'Entry' : [ 0x0, ['_LIST_ENTRY']],
'OwnerProcess' : [ 0x10, ['pointer64', ['_EPROCESS']]],
'CompletionListLock' : [ 0x18, ['_EX_PUSH_LOCK']],
'Mdl' : [ 0x20, ['pointer64', ['_MDL']]],
'UserVa' : [ 0x28, ['pointer64', ['void']]],
'UserLimit' : [ 0x30, ['pointer64', ['void']]],
'DataUserVa' : [ 0x38, ['pointer64', ['void']]],
'SystemVa' : [ 0x40, ['pointer64', ['void']]],
'TotalSize' : [ 0x48, ['unsigned long long']],
'Header' : [ 0x50, ['pointer64', ['_ALPC_COMPLETION_LIST_HEADER']]],
'List' : [ 0x58, ['pointer64', ['void']]],
'ListSize' : [ 0x60, ['unsigned long long']],
'Bitmap' : [ 0x68, ['pointer64', ['void']]],
'BitmapSize' : [ 0x70, ['unsigned long long']],
'Data' : [ 0x78, ['pointer64', ['void']]],
'DataSize' : [ 0x80, ['unsigned long long']],
'BitmapLimit' : [ 0x88, ['unsigned long']],
'BitmapNextHint' : [ 0x8c, ['unsigned long']],
'ConcurrencyCount' : [ 0x90, ['unsigned long']],
'AttributeFlags' : [ 0x94, ['unsigned long']],
'AttributeSize' : [ 0x98, ['unsigned long']],
} ],
'_OBJECT_ATTRIBUTES' : [ 0x30, {
'Length' : [ 0x0, ['unsigned long']],
'RootDirectory' : [ 0x8, ['pointer64', ['void']]],
'ObjectName' : [ 0x10, ['pointer64', ['_UNICODE_STRING']]],
'Attributes' : [ 0x18, ['unsigned long']],
'SecurityDescriptor' : [ 0x20, ['pointer64', ['void']]],
'SecurityQualityOfService' : [ 0x28, ['pointer64', ['void']]],
} ],
'_OBJECT_TYPE' : [ 0xd8, {
'TypeList' : [ 0x0, ['_LIST_ENTRY']],
'Name' : [ 0x10, ['_UNICODE_STRING']],
'DefaultObject' : [ 0x20, ['pointer64', ['void']]],
'Index' : [ 0x28, ['unsigned char']],
'TotalNumberOfObjects' : [ 0x2c, ['unsigned long']],
'TotalNumberOfHandles' : [ 0x30, ['unsigned long']],
'HighWaterNumberOfObjects' : [ 0x34, ['unsigned long']],
'HighWaterNumberOfHandles' : [ 0x38, ['unsigned long']],
'TypeInfo' : [ 0x40, ['_OBJECT_TYPE_INITIALIZER']],
'TypeLock' : [ 0xb8, ['_EX_PUSH_LOCK']],
'Key' : [ 0xc0, ['unsigned long']],
'CallbackList' : [ 0xc8, ['_LIST_ENTRY']],
} ],
'_PALPC_PORT_REFERENCE_WAIT_BLOCK' : [ 0x20, {
'DesiredReferenceNoEvent' : [ 0x0, ['_KEVENT']],
'DesiredReferenceNo' : [ 0x18, ['long']],
} ],
'_PORT_MESSAGE32' : [ 0x18, {
'u1' : [ 0x0, ['__unnamed_1b48']],
'u2' : [ 0x4, ['__unnamed_1b4c']],
'ClientId' : [ 0x8, ['_CLIENT_ID32']],
'DoNotUseThisField' : [ 0x8, ['double']],
'MessageId' : [ 0x10, ['unsigned long']],
'ClientViewSize' : [ 0x14, ['unsigned long']],
'CallbackId' : [ 0x14, ['unsigned long']],
} ],
'__unnamed_1bd6' : [ 0x4, {
'QueueType' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 3, native_type='unsigned long')]],
'QueuePortType' : [ 0x0, ['BitField', dict(start_bit = 3, end_bit = 7, native_type='unsigned long')]],
'Canceled' : [ 0x0, ['BitField', dict(start_bit = 7, end_bit = 8, native_type='unsigned long')]],
'Ready' : [ 0x0, ['BitField', dict(start_bit = 8, end_bit = 9, native_type='unsigned long')]],
'ReleaseMessage' : [ 0x0, ['BitField', dict(start_bit = 9, end_bit = 10, native_type='unsigned long')]],
'SharedQuota' : [ 0x0, ['BitField', dict(start_bit = 10, end_bit = 11, native_type='unsigned long')]],
'ReplyWaitReply' : [ 0x0, ['BitField', dict(start_bit = 11, end_bit = 12, native_type='unsigned long')]],
'OwnerPortReference' : [ 0x0, ['BitField', dict(start_bit = 12, end_bit = 13, native_type='unsigned long')]],
'ReserveReference' : [ 0x0, ['BitField', dict(start_bit = 13, end_bit = 14, native_type='unsigned long')]],
'ReceiverReference' : [ 0x0, ['BitField', dict(start_bit = 14, end_bit = 15, native_type='unsigned long')]],
'ViewAttributeRetrieved' : [ 0x0, ['BitField', dict(start_bit = 15, end_bit = 16, native_type='unsigned long')]],
'InDispatch' : [ 0x0, ['BitField', dict(start_bit = 16, end_bit = 17, native_type='unsigned long')]],
} ],
'__unnamed_1bd8' : [ 0x4, {
's1' : [ 0x0, ['__unnamed_1bd6']],
'State' : [ 0x0, ['unsigned long']],
} ],
'_KALPC_MESSAGE' : [ 0x108, {
'Entry' : [ 0x0, ['_LIST_ENTRY']],
'PortQueue' : [ 0x10, ['pointer64', ['_ALPC_PORT']]],
'OwnerPort' : [ 0x18, ['pointer64', ['_ALPC_PORT']]],
'WaitingThread' : [ 0x20, ['pointer64', ['_ETHREAD']]],
'u1' : [ 0x28, ['__unnamed_1bd8']],
'SequenceNo' : [ 0x2c, ['long']],
'QuotaProcess' : [ 0x30, ['pointer64', ['_EPROCESS']]],
'QuotaBlock' : [ 0x30, ['pointer64', ['void']]],
'CancelSequencePort' : [ 0x38, ['pointer64', ['_ALPC_PORT']]],
'CancelQueuePort' : [ 0x40, ['pointer64', ['_ALPC_PORT']]],
'CancelSequenceNo' : [ 0x48, ['long']],
'CancelListEntry' : [ 0x50, ['_LIST_ENTRY']],
'Reserve' : [ 0x60, ['pointer64', ['_KALPC_RESERVE']]],
'MessageAttributes' : [ 0x68, ['_KALPC_MESSAGE_ATTRIBUTES']],
'DataUserVa' : [ 0xa8, ['pointer64', ['void']]],
'CommunicationInfo' : [ 0xb0, ['pointer64', ['_ALPC_COMMUNICATION_INFO']]],
'ConnectionPort' : [ 0xb8, ['pointer64', ['_ALPC_PORT']]],
'ServerThread' : [ 0xc0, ['pointer64', ['_ETHREAD']]],
'WakeReference' : [ 0xc8, ['pointer64', ['void']]],
'ExtensionBuffer' : [ 0xd0, ['pointer64', ['void']]],
'ExtensionBufferSize' : [ 0xd8, ['unsigned long long']],
'PortMessage' : [ 0xe0, ['_PORT_MESSAGE']],
} ],
'_ALPC_DISPATCH_CONTEXT' : [ 0x40, {
'PortObject' : [ 0x0, ['pointer64', ['_ALPC_PORT']]],
'Message' : [ 0x8, ['pointer64', ['_KALPC_MESSAGE']]],
'CommunicationInfo' : [ 0x10, ['pointer64', ['_ALPC_COMMUNICATION_INFO']]],
'TargetThread' : [ 0x18, ['pointer64', ['_ETHREAD']]],
'TargetPort' : [ 0x20, ['pointer64', ['_ALPC_PORT']]],
'DirectEvent' : [ 0x28, ['_KALPC_DIRECT_EVENT']],
'Flags' : [ 0x30, ['unsigned long']],
'TotalLength' : [ 0x34, ['unsigned short']],
'Type' : [ 0x36, ['unsigned short']],
'DataInfoOffset' : [ 0x38, ['unsigned short']],
'SignalCompletion' : [ 0x3a, ['unsigned char']],
'PostedToCompletionList' : [ 0x3b, ['unsigned char']],
} ],
'_REMOTE_PORT_VIEW' : [ 0x18, {
'Length' : [ 0x0, ['unsigned long']],
'ViewSize' : [ 0x8, ['unsigned long long']],
'ViewBase' : [ 0x10, ['pointer64', ['void']]],
} ],
'_KALPC_RESERVE' : [ 0x28, {
'OwnerPort' : [ 0x0, ['pointer64', ['_ALPC_PORT']]],
'HandleTable' : [ 0x8, ['pointer64', ['_ALPC_HANDLE_TABLE']]],
'Handle' : [ 0x10, ['pointer64', ['void']]],
'Message' : [ 0x18, ['pointer64', ['_KALPC_MESSAGE']]],
'Active' : [ 0x20, ['long']],
} ],
'_KALPC_HANDLE_DATA' : [ 0x30, {
'Flags' : [ 0x0, ['unsigned long']],
'ObjectType' : [ 0x4, ['unsigned long']],
'DuplicateContext' : [ 0x8, ['_OB_DUPLICATE_OBJECT_STATE']],
} ],
'_KALPC_MESSAGE_ATTRIBUTES' : [ 0x40, {
'ClientContext' : [ 0x0, ['pointer64', ['void']]],
'ServerContext' : [ 0x8, ['pointer64', ['void']]],
'PortContext' : [ 0x10, ['pointer64', ['void']]],
'CancelPortContext' : [ 0x18, ['pointer64', ['void']]],
'SecurityData' : [ 0x20, ['pointer64', ['_KALPC_SECURITY_DATA']]],
'View' : [ 0x28, ['pointer64', ['_KALPC_VIEW']]],
'HandleData' : [ 0x30, ['pointer64', ['_KALPC_HANDLE_DATA']]],
'DirectEvent' : [ 0x38, ['_KALPC_DIRECT_EVENT']],
} ],
'__unnamed_1c1c' : [ 0x4, {
'Revoked' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'Impersonated' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
} ],
'__unnamed_1c1e' : [ 0x4, {
's1' : [ 0x0, ['__unnamed_1c1c']],
} ],
'_KALPC_SECURITY_DATA' : [ 0x70, {
'HandleTable' : [ 0x0, ['pointer64', ['_ALPC_HANDLE_TABLE']]],
'ContextHandle' : [ 0x8, ['pointer64', ['void']]],
'OwningProcess' : [ 0x10, ['pointer64', ['_EPROCESS']]],
'OwnerPort' : [ 0x18, ['pointer64', ['_ALPC_PORT']]],
'DynamicSecurity' : [ 0x20, ['_SECURITY_CLIENT_CONTEXT']],
'u1' : [ 0x68, ['__unnamed_1c1e']],
} ],
'_KALPC_DIRECT_EVENT' : [ 0x8, {
'Event' : [ 0x0, ['unsigned long long']],
'Referenced' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long long')]],
'Reserved' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 64, native_type='unsigned long long')]],
} ],
'_IO_MINI_COMPLETION_PACKET_USER' : [ 0x50, {
'ListEntry' : [ 0x0, ['_LIST_ENTRY']],
'PacketType' : [ 0x10, ['unsigned long']],
'KeyContext' : [ 0x18, ['pointer64', ['void']]],
'ApcContext' : [ 0x20, ['pointer64', ['void']]],
'IoStatus' : [ 0x28, ['long']],
'IoStatusInformation' : [ 0x30, ['unsigned long long']],
'MiniPacketCallback' : [ 0x38, ['pointer64', ['void']]],
'Context' : [ 0x40, ['pointer64', ['void']]],
'Allocated' : [ 0x48, ['unsigned char']],
} ],
'_IOP_IRP_EXTENSION' : [ 0x30, {
'ExtensionFlags' : [ 0x0, ['unsigned short']],
'Allocated' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned short')]],
'PropagateId' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned short')]],
'SpareBits' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 16, native_type='unsigned short')]],
'TypesAllocated' : [ 0x2, ['unsigned short']],
'GenericExtension' : [ 0x4, ['array', 4, ['unsigned char']]],
'VerifierContext' : [ 0x8, ['pointer64', ['void']]],
'ActivityId' : [ 0x10, ['_GUID']],
'Timestamp' : [ 0x20, ['_LARGE_INTEGER']],
'ZeroingOffset' : [ 0x20, ['unsigned long']],
'FsTrackOffsetBlob' : [ 0x20, ['pointer64', ['_IO_IRP_EXT_TRACK_OFFSET_HEADER']]],
'FsTrackedOffset' : [ 0x28, ['long long']],
} ],
'_DRIVER_OBJECT' : [ 0x150, {
'Type' : [ 0x0, ['short']],
'Size' : [ 0x2, ['short']],
'DeviceObject' : [ 0x8, ['pointer64', ['_DEVICE_OBJECT']]],
'Flags' : [ 0x10, ['unsigned long']],
'DriverStart' : [ 0x18, ['pointer64', ['void']]],
'DriverSize' : [ 0x20, ['unsigned long']],
'DriverSection' : [ 0x28, ['pointer64', ['void']]],
'DriverExtension' : [ 0x30, ['pointer64', ['_DRIVER_EXTENSION']]],
'DriverName' : [ 0x38, ['_UNICODE_STRING']],
'HardwareDatabase' : [ 0x48, ['pointer64', ['_UNICODE_STRING']]],
'FastIoDispatch' : [ 0x50, ['pointer64', ['_FAST_IO_DISPATCH']]],
'DriverInit' : [ 0x58, ['pointer64', ['void']]],
'DriverStartIo' : [ 0x60, ['pointer64', ['void']]],
'DriverUnload' : [ 0x68, ['pointer64', ['void']]],
'MajorFunction' : [ 0x70, ['array', 28, ['pointer64', ['void']]]],
} ],
'_FILE_SEGMENT_ELEMENT' : [ 0x8, {
'Buffer' : [ 0x0, ['pointer64', ['void']]],
'Alignment' : [ 0x0, ['unsigned long long']],
} ],
'_RELATIVE_SYMLINK_INFO' : [ 0x20, {
'ExposedNamespaceLength' : [ 0x0, ['unsigned short']],
'Flags' : [ 0x2, ['unsigned short']],
'DeviceNameLength' : [ 0x4, ['unsigned short']],
'Reserved' : [ 0x6, ['unsigned short']],
'InteriorMountPoint' : [ 0x8, ['pointer64', ['_RELATIVE_SYMLINK_INFO']]],
'OpenedName' : [ 0x10, ['_UNICODE_STRING']],
} ],
'_ECP_LIST' : [ 0x18, {
'Signature' : [ 0x0, ['unsigned long']],
'Flags' : [ 0x4, ['unsigned long']],
'EcpList' : [ 0x8, ['_LIST_ENTRY']],
} ],
'_IOP_FILE_OBJECT_EXTENSION' : [ 0x50, {
'FoExtFlags' : [ 0x0, ['unsigned long']],
'FoExtPerTypeExtension' : [ 0x8, ['array', 8, ['pointer64', ['void']]]],
'FoIoPriorityHint' : [ 0x48, ['Enumeration', dict(target = 'long', choices = {0: 'IopIoPriorityNotSet', 1: 'IopIoPriorityVeryLow', 2: 'IopIoPriorityLow', 3: 'IopIoPriorityNormal', 4: 'IopIoPriorityHigh', 5: 'IopIoPriorityCritical', 6: 'MaxIopIoPriorityTypes'})]],
} ],
'_OPEN_PACKET' : [ 0xc0, {
'Type' : [ 0x0, ['short']],
'Size' : [ 0x2, ['short']],
'FileObject' : [ 0x8, ['pointer64', ['_FILE_OBJECT']]],
'FinalStatus' : [ 0x10, ['long']],
'Information' : [ 0x18, ['unsigned long long']],
'ParseCheck' : [ 0x20, ['unsigned long']],
'RelatedFileObject' : [ 0x28, ['pointer64', ['_FILE_OBJECT']]],
'ReferencedDeviceObject' : [ 0x28, ['pointer64', ['_DEVICE_OBJECT']]],
'OriginalAttributes' : [ 0x30, ['pointer64', ['_OBJECT_ATTRIBUTES']]],
'AllocationSize' : [ 0x38, ['_LARGE_INTEGER']],
'CreateOptions' : [ 0x40, ['unsigned long']],
'FileAttributes' : [ 0x44, ['unsigned short']],
'ShareAccess' : [ 0x46, ['unsigned short']],
'EaBuffer' : [ 0x48, ['pointer64', ['void']]],
'EaLength' : [ 0x50, ['unsigned long']],
'Options' : [ 0x54, ['unsigned long']],
'Disposition' : [ 0x58, ['unsigned long']],
'BasicInformation' : [ 0x60, ['pointer64', ['_FILE_BASIC_INFORMATION']]],
'NetworkInformation' : [ 0x68, ['pointer64', ['_FILE_NETWORK_OPEN_INFORMATION']]],
'CreateFileType' : [ 0x70, ['Enumeration', dict(target = 'long', choices = {0: 'CreateFileTypeNone', 1: 'CreateFileTypeNamedPipe', 2: 'CreateFileTypeMailslot'})]],
'MailslotOrPipeParameters' : [ 0x78, ['pointer64', ['void']]],
'Override' : [ 0x80, ['unsigned char']],
'QueryOnly' : [ 0x81, ['unsigned char']],
'DeleteOnly' : [ 0x82, ['unsigned char']],
'FullAttributes' : [ 0x83, ['unsigned char']],
'LocalFileObject' : [ 0x88, ['pointer64', ['_DUMMY_FILE_OBJECT']]],
'InternalFlags' : [ 0x90, ['unsigned long']],
'AccessMode' : [ 0x94, ['unsigned char']],
'DriverCreateContext' : [ 0x98, ['_IO_DRIVER_CREATE_CONTEXT']],
} ],
'_ETW_SYSTEMTIME' : [ 0x10, {
'Year' : [ 0x0, ['unsigned short']],
'Month' : [ 0x2, ['unsigned short']],
'DayOfWeek' : [ 0x4, ['unsigned short']],
'Day' : [ 0x6, ['unsigned short']],
'Hour' : [ 0x8, ['unsigned short']],
'Minute' : [ 0xa, ['unsigned short']],
'Second' : [ 0xc, ['unsigned short']],
'Milliseconds' : [ 0xe, ['unsigned short']],
} ],
'_TIME_FIELDS' : [ 0x10, {
'Year' : [ 0x0, ['short']],
'Month' : [ 0x2, ['short']],
'Day' : [ 0x4, ['short']],
'Hour' : [ 0x6, ['short']],
'Minute' : [ 0x8, ['short']],
'Second' : [ 0xa, ['short']],
'Milliseconds' : [ 0xc, ['short']],
'Weekday' : [ 0xe, ['short']],
} ],
'__unnamed_1ce5' : [ 0x4, {
'MajorVersion' : [ 0x0, ['unsigned char']],
'MinorVersion' : [ 0x1, ['unsigned char']],
'SubVersion' : [ 0x2, ['unsigned char']],
'SubMinorVersion' : [ 0x3, ['unsigned char']],
} ],
'_TRACE_LOGFILE_HEADER' : [ 0x118, {
'BufferSize' : [ 0x0, ['unsigned long']],
'Version' : [ 0x4, ['unsigned long']],
'VersionDetail' : [ 0x4, ['__unnamed_1ce5']],
'ProviderVersion' : [ 0x8, ['unsigned long']],
'NumberOfProcessors' : [ 0xc, ['unsigned long']],
'EndTime' : [ 0x10, ['_LARGE_INTEGER']],
'TimerResolution' : [ 0x18, ['unsigned long']],
'MaximumFileSize' : [ 0x1c, ['unsigned long']],
'LogFileMode' : [ 0x20, ['unsigned long']],
'BuffersWritten' : [ 0x24, ['unsigned long']],
'LogInstanceGuid' : [ 0x28, ['_GUID']],
'StartBuffers' : [ 0x28, ['unsigned long']],
'PointerSize' : [ 0x2c, ['unsigned long']],
'EventsLost' : [ 0x30, ['unsigned long']],
'CpuSpeedInMHz' : [ 0x34, ['unsigned long']],
'LoggerName' : [ 0x38, ['pointer64', ['unsigned short']]],
'LogFileName' : [ 0x40, ['pointer64', ['unsigned short']]],
'TimeZone' : [ 0x48, ['_RTL_TIME_ZONE_INFORMATION']],
'BootTime' : [ 0xf8, ['_LARGE_INTEGER']],
'PerfFreq' : [ 0x100, ['_LARGE_INTEGER']],
'StartTime' : [ 0x108, ['_LARGE_INTEGER']],
'ReservedFlags' : [ 0x110, ['unsigned long']],
'BuffersLost' : [ 0x114, ['unsigned long']],
} ],
'_WMI_LOGGER_CONTEXT' : [ 0x390, {
'LoggerId' : [ 0x0, ['unsigned long']],
'BufferSize' : [ 0x4, ['unsigned long']],
'MaximumEventSize' : [ 0x8, ['unsigned long']],
'LoggerMode' : [ 0xc, ['unsigned long']],
'AcceptNewEvents' : [ 0x10, ['long']],
'EventMarker' : [ 0x14, ['array', 2, ['unsigned long']]],
'ErrorMarker' : [ 0x1c, ['unsigned long']],
'SizeMask' : [ 0x20, ['unsigned long']],
'GetCpuClock' : [ 0x28, ['pointer64', ['void']]],
'LoggerThread' : [ 0x30, ['pointer64', ['_ETHREAD']]],
'LoggerStatus' : [ 0x38, ['long']],
'FailureReason' : [ 0x3c, ['unsigned long']],
'BufferQueue' : [ 0x40, ['_ETW_BUFFER_QUEUE']],
'OverflowQueue' : [ 0x58, ['_ETW_BUFFER_QUEUE']],
'GlobalList' : [ 0x70, ['_LIST_ENTRY']],
'ProviderBinaryList' : [ 0x80, ['_LIST_ENTRY']],
'BatchedBufferList' : [ 0x90, ['pointer64', ['_WMI_BUFFER_HEADER']]],
'CurrentBuffer' : [ 0x90, ['_EX_FAST_REF']],
'LoggerName' : [ 0x98, ['_UNICODE_STRING']],
'LogFileName' : [ 0xa8, ['_UNICODE_STRING']],
'LogFilePattern' : [ 0xb8, ['_UNICODE_STRING']],
'NewLogFileName' : [ 0xc8, ['_UNICODE_STRING']],
'ClockType' : [ 0xd8, ['unsigned long']],
'LastFlushedBuffer' : [ 0xdc, ['unsigned long']],
'FlushTimer' : [ 0xe0, ['unsigned long']],
'FlushThreshold' : [ 0xe4, ['unsigned long']],
'ByteOffset' : [ 0xe8, ['_LARGE_INTEGER']],
'MinimumBuffers' : [ 0xf0, ['unsigned long']],
'BuffersAvailable' : [ 0xf4, ['long']],
'NumberOfBuffers' : [ 0xf8, ['long']],
'MaximumBuffers' : [ 0xfc, ['unsigned long']],
'EventsLost' : [ 0x100, ['unsigned long']],
'PeakBuffersCount' : [ 0x104, ['long']],
'BuffersWritten' : [ 0x108, ['unsigned long']],
'LogBuffersLost' : [ 0x10c, ['unsigned long']],
'RealTimeBuffersDelivered' : [ 0x110, ['unsigned long']],
'RealTimeBuffersLost' : [ 0x114, ['unsigned long']],
'SequencePtr' : [ 0x118, ['pointer64', ['long']]],
'LocalSequence' : [ 0x120, ['unsigned long']],
'InstanceGuid' : [ 0x124, ['_GUID']],
'MaximumFileSize' : [ 0x134, ['unsigned long']],
'FileCounter' : [ 0x138, ['long']],
'PoolType' : [ 0x13c, ['Enumeration', dict(target = 'long', choices = {0: 'NonPagedPoolBase', 1: 'PagedPool', 2: 'NonPagedPoolBaseMustSucceed', 3: 'DontUseThisType', 4: 'NonPagedPoolBaseCacheAligned', 5: 'PagedPoolCacheAligned', 6: 'NonPagedPoolBaseCacheAlignedMustS', 7: 'MaxPoolType', 34: 'NonPagedPoolMustSucceedSession', 516: 'NonPagedPoolNxCacheAligned', 35: 'DontUseThisTypeSession', 32: 'NonPagedPoolSession', 512: 'NonPagedPoolNx', 544: 'NonPagedPoolSessionNx', 36: 'NonPagedPoolCacheAlignedSession', 33: 'PagedPoolSession', 38: 'NonPagedPoolCacheAlignedMustSSession', 37: 'PagedPoolCacheAlignedSession'})]],
'ReferenceTime' : [ 0x140, ['_ETW_REF_CLOCK']],
'CollectionOn' : [ 0x150, ['long']],
'ProviderInfoSize' : [ 0x154, ['unsigned long']],
'Consumers' : [ 0x158, ['_LIST_ENTRY']],
'NumConsumers' : [ 0x168, ['unsigned long']],
'TransitionConsumer' : [ 0x170, ['pointer64', ['_ETW_REALTIME_CONSUMER']]],
'RealtimeLogfileHandle' : [ 0x178, ['pointer64', ['void']]],
'RealtimeLogfileName' : [ 0x180, ['_UNICODE_STRING']],
'RealtimeWriteOffset' : [ 0x190, ['_LARGE_INTEGER']],
'RealtimeReadOffset' : [ 0x198, ['_LARGE_INTEGER']],
'RealtimeLogfileSize' : [ 0x1a0, ['_LARGE_INTEGER']],
'RealtimeLogfileUsage' : [ 0x1a8, ['unsigned long long']],
'RealtimeMaximumFileSize' : [ 0x1b0, ['unsigned long long']],
'RealtimeBuffersSaved' : [ 0x1b8, ['unsigned long']],
'RealtimeReferenceTime' : [ 0x1c0, ['_ETW_REF_CLOCK']],
'NewRTEventsLost' : [ 0x1d0, ['Enumeration', dict(target = 'long', choices = {0: 'EtwRtEventNoLoss', 1: 'EtwRtEventLost', 2: 'EtwRtBufferLost', 3: 'EtwRtBackupLost', 4: 'EtwRtEventLossMax'})]],
'LoggerEvent' : [ 0x1d8, ['_KEVENT']],
'FlushEvent' : [ 0x1f0, ['_KEVENT']],
'FlushTimeOutTimer' : [ 0x208, ['_KTIMER']],
'LoggerDpc' : [ 0x248, ['_KDPC']],
'LoggerMutex' : [ 0x288, ['_KMUTANT']],
'LoggerLock' : [ 0x2c0, ['_EX_PUSH_LOCK']],
'BufferListSpinLock' : [ 0x2c8, ['unsigned long long']],
'BufferListPushLock' : [ 0x2c8, ['_EX_PUSH_LOCK']],
'ClientSecurityContext' : [ 0x2d0, ['_SECURITY_CLIENT_CONTEXT']],
'TokenAccessInformation' : [ 0x318, ['pointer64', ['_TOKEN_ACCESS_INFORMATION']]],
'SecurityDescriptor' : [ 0x320, ['_EX_FAST_REF']],
'StartTime' : [ 0x328, ['_LARGE_INTEGER']],
'LogFileHandle' : [ 0x330, ['pointer64', ['void']]],
'BufferSequenceNumber' : [ 0x338, ['long long']],
'Flags' : [ 0x340, ['unsigned long']],
'Persistent' : [ 0x340, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'AutoLogger' : [ 0x340, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'FsReady' : [ 0x340, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long')]],
'RealTime' : [ 0x340, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned long')]],
'Wow' : [ 0x340, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned long')]],
'KernelTrace' : [ 0x340, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned long')]],
'NoMoreEnable' : [ 0x340, ['BitField', dict(start_bit = 6, end_bit = 7, native_type='unsigned long')]],
'StackTracing' : [ 0x340, ['BitField', dict(start_bit = 7, end_bit = 8, native_type='unsigned long')]],
'ErrorLogged' : [ 0x340, ['BitField', dict(start_bit = 8, end_bit = 9, native_type='unsigned long')]],
'RealtimeLoggerContextFreed' : [ 0x340, ['BitField', dict(start_bit = 9, end_bit = 10, native_type='unsigned long')]],
'PebsTracing' : [ 0x340, ['BitField', dict(start_bit = 10, end_bit = 11, native_type='unsigned long')]],
'PmcCounters' : [ 0x340, ['BitField', dict(start_bit = 11, end_bit = 12, native_type='unsigned long')]],
'PageAlignBuffers' : [ 0x340, ['BitField', dict(start_bit = 12, end_bit = 13, native_type='unsigned long')]],
'StackLookasideListAllocated' : [ 0x340, ['BitField', dict(start_bit = 13, end_bit = 14, native_type='unsigned long')]],
'SecurityTrace' : [ 0x340, ['BitField', dict(start_bit = 14, end_bit = 15, native_type='unsigned long')]],
'SpareFlags1' : [ 0x340, ['BitField', dict(start_bit = 15, end_bit = 16, native_type='unsigned long')]],
'SystemLoggerIndex' : [ 0x340, ['BitField', dict(start_bit = 16, end_bit = 24, native_type='unsigned long')]],
'StackCaching' : [ 0x340, ['BitField', dict(start_bit = 24, end_bit = 25, native_type='unsigned long')]],
'SpareFlags2' : [ 0x340, ['BitField', dict(start_bit = 25, end_bit = 32, native_type='unsigned long')]],
'RequestFlag' : [ 0x344, ['unsigned long']],
'DbgRequestNewFile' : [ 0x344, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'DbgRequestUpdateFile' : [ 0x344, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'DbgRequestFlush' : [ 0x344, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long')]],
'DbgRequestDisableRealtime' : [ 0x344, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned long')]],
'DbgRequestDisconnectConsumer' : [ 0x344, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned long')]],
'DbgRequestConnectConsumer' : [ 0x344, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned long')]],
'DbgRequestNotifyConsumer' : [ 0x344, ['BitField', dict(start_bit = 6, end_bit = 7, native_type='unsigned long')]],
'DbgRequestUpdateHeader' : [ 0x344, ['BitField', dict(start_bit = 7, end_bit = 8, native_type='unsigned long')]],
'DbgRequestDeferredFlush' : [ 0x344, ['BitField', dict(start_bit = 8, end_bit = 9, native_type='unsigned long')]],
'DbgRequestDeferredFlushTimer' : [ 0x344, ['BitField', dict(start_bit = 9, end_bit = 10, native_type='unsigned long')]],
'DbgRequestFlushTimer' : [ 0x344, ['BitField', dict(start_bit = 10, end_bit = 11, native_type='unsigned long')]],
'DbgRequestUpdateDebugger' : [ 0x344, ['BitField', dict(start_bit = 11, end_bit = 12, native_type='unsigned long')]],
'DbgSpareRequestFlags' : [ 0x344, ['BitField', dict(start_bit = 12, end_bit = 32, native_type='unsigned long')]],
'HookIdMap' : [ 0x348, ['_RTL_BITMAP']],
'StackCache' : [ 0x358, ['pointer64', ['_ETW_STACK_CACHE']]],
'PmcData' : [ 0x360, ['pointer64', ['_ETW_PMC_SUPPORT']]],
'WinRtProviderBinaryList' : [ 0x368, ['_LIST_ENTRY']],
'ScratchArray' : [ 0x378, ['pointer64', ['pointer64', ['_WMI_BUFFER_HEADER']]]],
'DisallowedGuids' : [ 0x380, ['_DISALLOWED_GUIDS']],
} ],
'_ETW_PMC_SUPPORT' : [ 0x28, {
'Source' : [ 0x0, ['array', -16, ['Enumeration', dict(target = 'long', choices = {0: 'ProfileTime', 1: 'ProfileAlignmentFixup', 2: 'ProfileTotalIssues', 3: 'ProfilePipelineDry', 4: 'ProfileLoadInstructions', 5: 'ProfilePipelineFrozen', 6: 'ProfileBranchInstructions', 7: 'ProfileTotalNonissues', 8: 'ProfileDcacheMisses', 9: 'ProfileIcacheMisses', 10: 'ProfileCacheMisses', 11: 'ProfileBranchMispredictions', 12: 'ProfileStoreInstructions', 13: 'ProfileFpInstructions', 14: 'ProfileIntegerInstructions', 15: 'Profile2Issue', 16: 'Profile3Issue', 17: 'Profile4Issue', 18: 'ProfileSpecialInstructions', 19: 'ProfileTotalCycles', 20: 'ProfileIcacheIssues', 21: 'ProfileDcacheAccesses', 22: 'ProfileMemoryBarrierCycles', 23: 'ProfileLoadLinkedIssues', 24: 'ProfileMaximum'})]]],
'HookIdCount' : [ 0x10, ['unsigned long']],
'HookId' : [ 0x14, ['array', 4, ['unsigned short']]],
'CountersCount' : [ 0x1c, ['unsigned long']],
'ProcessorCtrs' : [ 0x20, ['array', 1, ['pointer64', ['_HAL_PMC_COUNTERS']]]],
} ],
'_ETW_SILODRIVERSTATE' : [ 0x13a8, {
'EtwpSecurityProviderGuidEntry' : [ 0x0, ['_ETW_GUID_ENTRY']],
'EtwpLoggerRundown' : [ 0x190, ['array', 64, ['pointer64', ['_EX_RUNDOWN_REF_CACHE_AWARE']]]],
'WmipLoggerContext' : [ 0x390, ['array', 64, ['pointer64', ['_WMI_LOGGER_CONTEXT']]]],
'EtwpGuidHashTable' : [ 0x590, ['array', 64, ['_ETW_HASH_BUCKET']]],
'EtwpSecurityLoggers' : [ 0x1390, ['array', 8, ['unsigned short']]],
'EtwpSecurityProviderEnableMask' : [ 0x13a0, ['unsigned char']],
'EtwpShutdownInProgress' : [ 0x13a1, ['unsigned char']],
'EtwpSecurityProviderPID' : [ 0x13a4, ['unsigned long']],
} ],
'_EX_RUNDOWN_REF_CACHE_AWARE' : [ 0x18, {
'RunRefs' : [ 0x0, ['pointer64', ['_EX_RUNDOWN_REF']]],
'PoolToFree' : [ 0x8, ['pointer64', ['void']]],
'RunRefSize' : [ 0x10, ['unsigned long']],
'Number' : [ 0x14, ['unsigned long']],
} ],
'_ETW_LOGGER_HANDLE' : [ 0x1, {
'DereferenceAndLeave' : [ 0x0, ['unsigned char']],
} ],
'_LUID_AND_ATTRIBUTES' : [ 0xc, {
'Luid' : [ 0x0, ['_LUID']],
'Attributes' : [ 0x8, ['unsigned long']],
} ],
'_TOKEN' : [ 0x480, {
'TokenSource' : [ 0x0, ['_TOKEN_SOURCE']],
'TokenId' : [ 0x10, ['_LUID']],
'AuthenticationId' : [ 0x18, ['_LUID']],
'ParentTokenId' : [ 0x20, ['_LUID']],
'ExpirationTime' : [ 0x28, ['_LARGE_INTEGER']],
'TokenLock' : [ 0x30, ['pointer64', ['_ERESOURCE']]],
'ModifiedId' : [ 0x38, ['_LUID']],
'Privileges' : [ 0x40, ['_SEP_TOKEN_PRIVILEGES']],
'AuditPolicy' : [ 0x58, ['_SEP_AUDIT_POLICY']],
'SessionId' : [ 0x78, ['unsigned long']],
'UserAndGroupCount' : [ 0x7c, ['unsigned long']],
'RestrictedSidCount' : [ 0x80, ['unsigned long']],
'VariableLength' : [ 0x84, ['unsigned long']],
'DynamicCharged' : [ 0x88, ['unsigned long']],
'DynamicAvailable' : [ 0x8c, ['unsigned long']],
'DefaultOwnerIndex' : [ 0x90, ['unsigned long']],
'UserAndGroups' : [ 0x98, ['pointer64', ['_SID_AND_ATTRIBUTES']]],
'RestrictedSids' : [ 0xa0, ['pointer64', ['_SID_AND_ATTRIBUTES']]],
'PrimaryGroup' : [ 0xa8, ['pointer64', ['void']]],
'DynamicPart' : [ 0xb0, ['pointer64', ['unsigned long']]],
'DefaultDacl' : [ 0xb8, ['pointer64', ['_ACL']]],
'TokenType' : [ 0xc0, ['Enumeration', dict(target = 'long', choices = {1: 'TokenPrimary', 2: 'TokenImpersonation'})]],
'ImpersonationLevel' : [ 0xc4, ['Enumeration', dict(target = 'long', choices = {0: 'SecurityAnonymous', 1: 'SecurityIdentification', 2: 'SecurityImpersonation', 3: 'SecurityDelegation'})]],
'TokenFlags' : [ 0xc8, ['unsigned long']],
'TokenInUse' : [ 0xcc, ['unsigned char']],
'IntegrityLevelIndex' : [ 0xd0, ['unsigned long']],
'MandatoryPolicy' : [ 0xd4, ['unsigned long']],
'LogonSession' : [ 0xd8, ['pointer64', ['_SEP_LOGON_SESSION_REFERENCES']]],
'OriginatingLogonSession' : [ 0xe0, ['_LUID']],
'SidHash' : [ 0xe8, ['_SID_AND_ATTRIBUTES_HASH']],
'RestrictedSidHash' : [ 0x1f8, ['_SID_AND_ATTRIBUTES_HASH']],
'pSecurityAttributes' : [ 0x308, ['pointer64', ['_AUTHZBASEP_SECURITY_ATTRIBUTES_INFORMATION']]],
'Package' : [ 0x310, ['pointer64', ['void']]],
'Capabilities' : [ 0x318, ['pointer64', ['_SID_AND_ATTRIBUTES']]],
'CapabilityCount' : [ 0x320, ['unsigned long']],
'CapabilitiesHash' : [ 0x328, ['_SID_AND_ATTRIBUTES_HASH']],
'LowboxNumberEntry' : [ 0x438, ['pointer64', ['_SEP_LOWBOX_NUMBER_ENTRY']]],
'LowboxHandlesEntry' : [ 0x440, ['pointer64', ['_SEP_LOWBOX_HANDLES_ENTRY']]],
'pClaimAttributes' : [ 0x448, ['pointer64', ['_AUTHZBASEP_CLAIM_ATTRIBUTES_COLLECTION']]],
'TrustLevelSid' : [ 0x450, ['pointer64', ['void']]],
'TrustLinkedToken' : [ 0x458, ['pointer64', ['_TOKEN']]],
'IntegrityLevelSidValue' : [ 0x460, ['pointer64', ['void']]],
'TokenSidValues' : [ 0x468, ['pointer64', ['_SEP_SID_VALUES_BLOCK']]],
'IndexEntry' : [ 0x470, ['pointer64', ['_SEP_LUID_TO_INDEX_MAP_ENTRY']]],
'VariablePart' : [ 0x478, ['unsigned long long']],
} ],
'_SEP_LOGON_SESSION_REFERENCES' : [ 0xb0, {
'Next' : [ 0x0, ['pointer64', ['_SEP_LOGON_SESSION_REFERENCES']]],
'LogonId' : [ 0x8, ['_LUID']],
'BuddyLogonId' : [ 0x10, ['_LUID']],
'ReferenceCount' : [ 0x18, ['long long']],
'Flags' : [ 0x20, ['unsigned long']],
'pDeviceMap' : [ 0x28, ['pointer64', ['_DEVICE_MAP']]],
'Token' : [ 0x30, ['pointer64', ['void']]],
'AccountName' : [ 0x38, ['_UNICODE_STRING']],
'AuthorityName' : [ 0x48, ['_UNICODE_STRING']],
'LowBoxHandlesTable' : [ 0x58, ['_SEP_LOWBOX_HANDLES_TABLE']],
'SharedDataLock' : [ 0x68, ['_EX_PUSH_LOCK']],
'SharedClaimAttributes' : [ 0x70, ['pointer64', ['_AUTHZBASEP_CLAIM_ATTRIBUTES_COLLECTION']]],
'SharedSidValues' : [ 0x78, ['pointer64', ['_SEP_SID_VALUES_BLOCK']]],
'RevocationBlock' : [ 0x80, ['_OB_HANDLE_REVOCATION_BLOCK']],
'ServerSilo' : [ 0xa0, ['pointer64', ['_EJOB']]],
'SiblingAuthId' : [ 0xa8, ['_LUID']],
} ],
'_OBJECT_HEADER' : [ 0x38, {
'PointerCount' : [ 0x0, ['long long']],
'HandleCount' : [ 0x8, ['long long']],
'NextToFree' : [ 0x8, ['pointer64', ['void']]],
'Lock' : [ 0x10, ['_EX_PUSH_LOCK']],
'TypeIndex' : [ 0x18, ['unsigned char']],
'TraceFlags' : [ 0x19, ['unsigned char']],
'DbgRefTrace' : [ 0x19, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned char')]],
'DbgTracePermanent' : [ 0x19, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned char')]],
'InfoMask' : [ 0x1a, ['unsigned char']],
'Flags' : [ 0x1b, ['unsigned char']],
'NewObject' : [ 0x1b, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned char')]],
'KernelObject' : [ 0x1b, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned char')]],
'KernelOnlyAccess' : [ 0x1b, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned char')]],
'ExclusiveObject' : [ 0x1b, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned char')]],
'PermanentObject' : [ 0x1b, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned char')]],
'DefaultSecurityQuota' : [ 0x1b, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned char')]],
'SingleHandleEntry' : [ 0x1b, ['BitField', dict(start_bit = 6, end_bit = 7, native_type='unsigned char')]],
'DeletedInline' : [ 0x1b, ['BitField', dict(start_bit = 7, end_bit = 8, native_type='unsigned char')]],
'Spare' : [ 0x1c, ['unsigned long']],
'ObjectCreateInfo' : [ 0x20, ['pointer64', ['_OBJECT_CREATE_INFORMATION']]],
'QuotaBlockCharged' : [ 0x20, ['pointer64', ['void']]],
'SecurityDescriptor' : [ 0x28, ['pointer64', ['void']]],
'Body' : [ 0x30, ['_QUAD']],
} ],
'_OBJECT_HEADER_QUOTA_INFO' : [ 0x20, {
'PagedPoolCharge' : [ 0x0, ['unsigned long']],
'NonPagedPoolCharge' : [ 0x4, ['unsigned long']],
'SecurityDescriptorCharge' : [ 0x8, ['unsigned long']],
'SecurityDescriptorQuotaBlock' : [ 0x10, ['pointer64', ['void']]],
'Reserved' : [ 0x18, ['unsigned long long']],
} ],
'_OBJECT_HEADER_PROCESS_INFO' : [ 0x10, {
'ExclusiveProcess' : [ 0x0, ['pointer64', ['_EPROCESS']]],
'Reserved' : [ 0x8, ['unsigned long long']],
} ],
'_OBJECT_HEADER_HANDLE_INFO' : [ 0x10, {
'HandleCountDataBase' : [ 0x0, ['pointer64', ['_OBJECT_HANDLE_COUNT_DATABASE']]],
'SingleEntry' : [ 0x0, ['_OBJECT_HANDLE_COUNT_ENTRY']],
} ],
'_OBJECT_HEADER_NAME_INFO' : [ 0x20, {
'Directory' : [ 0x0, ['pointer64', ['_OBJECT_DIRECTORY']]],
'Name' : [ 0x8, ['_UNICODE_STRING']],
'ReferenceCount' : [ 0x18, ['long']],
} ],
'_OBJECT_HEADER_CREATOR_INFO' : [ 0x20, {
'TypeList' : [ 0x0, ['_LIST_ENTRY']],
'CreatorUniqueProcess' : [ 0x10, ['pointer64', ['void']]],
'CreatorBackTraceIndex' : [ 0x18, ['unsigned short']],
'Reserved' : [ 0x1a, ['unsigned short']],
} ],
'_OBJECT_HEADER_AUDIT_INFO' : [ 0x10, {
'SecurityDescriptor' : [ 0x0, ['pointer64', ['void']]],
'Reserved' : [ 0x8, ['unsigned long long']],
} ],
'_OBJECT_HEADER_HANDLE_REVOCATION_INFO' : [ 0x20, {
'ListEntry' : [ 0x0, ['_LIST_ENTRY']],
'RevocationBlock' : [ 0x10, ['pointer64', ['_OB_HANDLE_REVOCATION_BLOCK']]],
'Padding1' : [ 0x18, ['array', 4, ['unsigned char']]],
'Padding2' : [ 0x1c, ['array', 4, ['unsigned char']]],
} ],
'_OBP_LOOKUP_CONTEXT' : [ 0x28, {
'Directory' : [ 0x0, ['pointer64', ['_OBJECT_DIRECTORY']]],
'Object' : [ 0x8, ['pointer64', ['void']]],
'EntryLink' : [ 0x10, ['pointer64', ['pointer64', ['_OBJECT_DIRECTORY_ENTRY']]]],
'HashValue' : [ 0x18, ['unsigned long']],
'HashIndex' : [ 0x1c, ['unsigned short']],
'DirectoryLocked' : [ 0x1e, ['unsigned char']],
'LockedExclusive' : [ 0x1f, ['unsigned char']],
'LockStateSignature' : [ 0x20, ['unsigned long']],
} ],
'_OBJECT_DIRECTORY' : [ 0x158, {
'HashBuckets' : [ 0x0, ['array', 37, ['pointer64', ['_OBJECT_DIRECTORY_ENTRY']]]],
'Lock' : [ 0x128, ['_EX_PUSH_LOCK']],
'DeviceMap' : [ 0x130, ['pointer64', ['_DEVICE_MAP']]],
'ShadowDirectory' : [ 0x138, ['pointer64', ['_OBJECT_DIRECTORY']]],
'SessionId' : [ 0x140, ['unsigned long']],
'NamespaceEntry' : [ 0x148, ['pointer64', ['void']]],
'Flags' : [ 0x150, ['unsigned long']],
} ],
'_OBP_SILODRIVERSTATE' : [ 0x2e0, {
'SystemDeviceMap' : [ 0x0, ['pointer64', ['_DEVICE_MAP']]],
'SystemDosDeviceState' : [ 0x8, ['_OBP_SYSTEM_DOS_DEVICE_STATE']],
'DeviceMapLock' : [ 0x78, ['_EX_PUSH_LOCK']],
'PrivateNamespaceLookupTable' : [ 0x80, ['_OBJECT_NAMESPACE_LOOKUPTABLE']],
} ],
'_DEVICE_MAP' : [ 0x40, {
'DosDevicesDirectory' : [ 0x0, ['pointer64', ['_OBJECT_DIRECTORY']]],
'GlobalDosDevicesDirectory' : [ 0x8, ['pointer64', ['_OBJECT_DIRECTORY']]],
'DosDevicesDirectoryHandle' : [ 0x10, ['pointer64', ['void']]],
'ReferenceCount' : [ 0x18, ['long']],
'DriveMap' : [ 0x1c, ['unsigned long']],
'DriveType' : [ 0x20, ['array', 32, ['unsigned char']]],
} ],
'_WHEAP_INFO_BLOCK' : [ 0x18, {
'ErrorSourceCount' : [ 0x0, ['unsigned long']],
'ErrorSourceTable' : [ 0x8, ['pointer64', ['_WHEAP_ERROR_SOURCE_TABLE']]],
'WorkQueue' : [ 0x10, ['pointer64', ['_WHEAP_WORK_QUEUE']]],
} ],
'_WHEAP_ERROR_SOURCE' : [ 0x428, {
'ListEntry' : [ 0x0, ['_LIST_ENTRY']],
'FailedAllocations' : [ 0x10, ['unsigned long']],
'PlatformErrorSourceId' : [ 0x14, ['unsigned long']],
'ErrorCount' : [ 0x18, ['long']],
'RecordCount' : [ 0x1c, ['unsigned long']],
'RecordLength' : [ 0x20, ['unsigned long']],
'PoolTag' : [ 0x24, ['unsigned long']],
'Type' : [ 0x28, ['Enumeration', dict(target = 'long', choices = {0: 'WheaErrSrcTypeMCE', 1: 'WheaErrSrcTypeCMC', 2: 'WheaErrSrcTypeCPE', 3: 'WheaErrSrcTypeNMI', 4: 'WheaErrSrcTypePCIe', 5: 'WheaErrSrcTypeGeneric', 6: 'WheaErrSrcTypeINIT', 7: 'WheaErrSrcTypeBOOT', 8: 'WheaErrSrcTypeSCIGeneric', 9: 'WheaErrSrcTypeIPFMCA', 10: 'WheaErrSrcTypeIPFCMC', 11: 'WheaErrSrcTypeIPFCPE', 12: 'WheaErrSrcTypeMax'})]],
'Records' : [ 0x30, ['pointer64', ['_WHEAP_ERROR_RECORD_WRAPPER']]],
'Context' : [ 0x38, ['pointer64', ['void']]],
'SectionCount' : [ 0x40, ['unsigned long']],
'SectionLength' : [ 0x44, ['unsigned long']],
'TickCountAtLastError' : [ 0x48, ['_LARGE_INTEGER']],
'AccumulatedErrors' : [ 0x50, ['unsigned long']],
'TotalErrors' : [ 0x54, ['unsigned long']],
'Deferred' : [ 0x58, ['unsigned char']],
'Descriptor' : [ 0x59, ['_WHEA_ERROR_SOURCE_DESCRIPTOR']],
} ],
'_WHEAP_ERROR_RECORD_WRAPPER' : [ 0xf0, {
'WorkEntry' : [ 0x0, ['_LIST_ENTRY']],
'Length' : [ 0x10, ['unsigned long']],
'ProcessorNumber' : [ 0x14, ['unsigned long']],
'Flags' : [ 0x18, ['_WHEAP_ERROR_RECORD_WRAPPER_FLAGS']],
'InUse' : [ 0x1c, ['long']],
'ErrorSource' : [ 0x20, ['pointer64', ['_WHEAP_ERROR_SOURCE']]],
'ErrorRecord' : [ 0x28, ['_WHEA_ERROR_RECORD']],
} ],
'_KSECONDARY_IDT_ENTRY' : [ 0x30, {
'SpinLock' : [ 0x0, ['unsigned long long']],
'ConnectLock' : [ 0x8, ['_KEVENT']],
'LineMasked' : [ 0x20, ['unsigned char']],
'InterruptList' : [ 0x28, ['pointer64', ['_KINTERRUPT']]],
} ],
'_WNF_STATE_NAME' : [ 0x8, {
'Data' : [ 0x0, ['array', 2, ['unsigned long']]],
} ],
'_PS_CLIENT_SECURITY_CONTEXT' : [ 0x8, {
'ImpersonationData' : [ 0x0, ['unsigned long long']],
'ImpersonationToken' : [ 0x0, ['pointer64', ['void']]],
'ImpersonationLevel' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 2, native_type='unsigned long long')]],
'EffectiveOnly' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long long')]],
} ],
'_DBGKD_ANY_CONTROL_SET' : [ 0x1c, {
'X86ControlSet' : [ 0x0, ['_X86_DBGKD_CONTROL_SET']],
'AlphaControlSet' : [ 0x0, ['unsigned long']],
'IA64ControlSet' : [ 0x0, ['_IA64_DBGKD_CONTROL_SET']],
'Amd64ControlSet' : [ 0x0, ['_AMD64_DBGKD_CONTROL_SET']],
'ArmControlSet' : [ 0x0, ['_ARM_DBGKD_CONTROL_SET']],
'Arm64ControlSet' : [ 0x0, ['_ARM64_DBGKD_CONTROL_SET']],
'ArmCeControlSet' : [ 0x0, ['_ARMCE_DBGKD_CONTROL_SET']],
'PpcControlSet' : [ 0x0, ['_PPC_DBGKD_CONTROL_SET']],
} ],
'_MI_VERIFIER_POOL_HEADER' : [ 0x8, {
'VerifierPoolEntry' : [ 0x0, ['pointer64', ['_VI_POOL_ENTRY']]],
} ],
'_POP_FX_PLUGIN' : [ 0xb8, {
'Link' : [ 0x0, ['_LIST_ENTRY']],
'Version' : [ 0x10, ['unsigned long']],
'Flags' : [ 0x18, ['unsigned long long']],
'WorkQueue' : [ 0x20, ['_KQUEUE']],
'AcceptDeviceNotification' : [ 0x60, ['pointer64', ['void']]],
'AcceptProcessorNotification' : [ 0x68, ['pointer64', ['void']]],
'AcceptAcpiNotification' : [ 0x70, ['pointer64', ['void']]],
'WorkOrderCount' : [ 0x78, ['unsigned long']],
'WorkOrders' : [ 0x80, ['array', 1, ['_POP_FX_WORK_ORDER']]],
} ],
'_ARM_DBGKD_CONTROL_SET' : [ 0xc, {
'Continue' : [ 0x0, ['unsigned long']],
'CurrentSymbolStart' : [ 0x4, ['unsigned long']],
'CurrentSymbolEnd' : [ 0x8, ['unsigned long']],
} ],
'_LPCP_MESSAGE' : [ 0x50, {
'Entry' : [ 0x0, ['_LIST_ENTRY']],
'FreeEntry' : [ 0x0, ['_SINGLE_LIST_ENTRY']],
'Reserved0' : [ 0x8, ['unsigned long']],
'SenderPort' : [ 0x10, ['pointer64', ['void']]],
'RepliedToThread' : [ 0x18, ['pointer64', ['_ETHREAD']]],
'PortContext' : [ 0x20, ['pointer64', ['void']]],
'Request' : [ 0x28, ['_PORT_MESSAGE']],
} ],
'_HARDWARE_PTE' : [ 0x8, {
'Valid' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long long')]],
'Write' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long long')]],
'Owner' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long long')]],
'WriteThrough' : [ 0x0, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned long long')]],
'CacheDisable' : [ 0x0, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned long long')]],
'Accessed' : [ 0x0, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned long long')]],
'Dirty' : [ 0x0, ['BitField', dict(start_bit = 6, end_bit = 7, native_type='unsigned long long')]],
'LargePage' : [ 0x0, ['BitField', dict(start_bit = 7, end_bit = 8, native_type='unsigned long long')]],
'Global' : [ 0x0, ['BitField', dict(start_bit = 8, end_bit = 9, native_type='unsigned long long')]],
'CopyOnWrite' : [ 0x0, ['BitField', dict(start_bit = 9, end_bit = 10, native_type='unsigned long long')]],
'Prototype' : [ 0x0, ['BitField', dict(start_bit = 10, end_bit = 11, native_type='unsigned long long')]],
'reserved0' : [ 0x0, ['BitField', dict(start_bit = 11, end_bit = 12, native_type='unsigned long long')]],
'PageFrameNumber' : [ 0x0, ['BitField', dict(start_bit = 12, end_bit = 48, native_type='unsigned long long')]],
'reserved1' : [ 0x0, ['BitField', dict(start_bit = 48, end_bit = 52, native_type='unsigned long long')]],
'SoftwareWsIndex' : [ 0x0, ['BitField', dict(start_bit = 52, end_bit = 63, native_type='unsigned long long')]],
'NoExecute' : [ 0x0, ['BitField', dict(start_bit = 63, end_bit = 64, native_type='unsigned long long')]],
} ],
'_ALPC_PORT_ATTRIBUTES' : [ 0x48, {
'Flags' : [ 0x0, ['unsigned long']],
'SecurityQos' : [ 0x4, ['_SECURITY_QUALITY_OF_SERVICE']],
'MaxMessageLength' : [ 0x10, ['unsigned long long']],
'MemoryBandwidth' : [ 0x18, ['unsigned long long']],
'MaxPoolUsage' : [ 0x20, ['unsigned long long']],
'MaxSectionSize' : [ 0x28, ['unsigned long long']],
'MaxViewSize' : [ 0x30, ['unsigned long long']],
'MaxTotalSectionSize' : [ 0x38, ['unsigned long long']],
'DupObjectTypes' : [ 0x40, ['unsigned long']],
'Reserved' : [ 0x44, ['unsigned long']],
} ],
'_MI_PARTITION_SEGMENTS' : [ 0x180, {
'DeleteSubsectionCleanup' : [ 0x0, ['_KEVENT']],
'UnusedSegmentCleanup' : [ 0x18, ['_KEVENT']],
'SubsectionDeletePtes' : [ 0x30, ['unsigned long long']],
'DereferenceSegmentHeader' : [ 0x38, ['_MMDEREFERENCE_SEGMENT_HEADER']],
'DeleteOnCloseList' : [ 0x68, ['_LIST_ENTRY']],
'DeleteOnCloseTimer' : [ 0x78, ['_KTIMER']],
'DeleteOnCloseTimerActive' : [ 0xb8, ['unsigned char']],
'DeleteOnCloseCount' : [ 0xbc, ['unsigned long']],
'UnusedSegmentList' : [ 0xc0, ['_LIST_ENTRY']],
'UnusedSubsectionList' : [ 0xd0, ['_LIST_ENTRY']],
'DeleteSubsectionList' : [ 0xe0, ['_LIST_ENTRY']],
'ControlAreaDeleteEvent' : [ 0xf0, ['_KEVENT']],
'ControlAreaDeleteList' : [ 0x108, ['_SINGLE_LIST_ENTRY']],
'SegmentListLock' : [ 0x140, ['long']],
} ],
'_KSTACK_COUNT' : [ 0x4, {
'Value' : [ 0x0, ['long']],
'State' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 3, native_type='unsigned long')]],
'StackCount' : [ 0x0, ['BitField', dict(start_bit = 3, end_bit = 32, native_type='unsigned long')]],
} ],
'_PS_TRUSTLET_ATTRIBUTE_TYPE' : [ 0x4, {
'Version' : [ 0x0, ['unsigned char']],
'DataCount' : [ 0x1, ['unsigned char']],
'SemanticType' : [ 0x2, ['unsigned char']],
'AccessRights' : [ 0x3, ['_PS_TRUSTLET_ATTRIBUTE_ACCESSRIGHTS']],
'AttributeType' : [ 0x0, ['unsigned long']],
} ],
'_KENTROPY_TIMING_STATE' : [ 0x150, {
'EntropyCount' : [ 0x0, ['unsigned long']],
'Buffer' : [ 0x4, ['array', 64, ['unsigned long']]],
'Dpc' : [ 0x108, ['_KDPC']],
'LastDeliveredBuffer' : [ 0x148, ['unsigned long']],
} ],
'_HEAP_UNPACKED_ENTRY' : [ 0x10, {
'PreviousBlockPrivateData' : [ 0x0, ['pointer64', ['void']]],
'Size' : [ 0x8, ['unsigned short']],
'Flags' : [ 0xa, ['unsigned char']],
'SmallTagIndex' : [ 0xb, ['unsigned char']],
'SubSegmentCode' : [ 0x8, ['unsigned long']],
'PreviousSize' : [ 0xc, ['unsigned short']],
'SegmentOffset' : [ 0xe, ['unsigned char']],
'LFHFlags' : [ 0xe, ['unsigned char']],
'UnusedBytes' : [ 0xf, ['unsigned char']],
'CompactHeader' : [ 0x8, ['unsigned long long']],
} ],
'_PEP_ACPI_SPB_RESOURCE' : [ 0x28, {
'Type' : [ 0x0, ['Enumeration', dict(target = 'long', choices = {0: 'PepAcpiMemory', 1: 'PepAcpiIoPort', 2: 'PepAcpiInterrupt', 3: 'PepAcpiGpioIo', 4: 'PepAcpiGpioInt', 5: 'PepAcpiSpbI2c', 6: 'PepAcpiSpbSpi', 7: 'PepAcpiSpbUart', 8: 'PepAcpiExtendedMemory', 9: 'PepAcpiExtendedIo'})]],
'Flags' : [ 0x4, ['_PEP_ACPI_RESOURCE_FLAGS']],
'TypeSpecificFlags' : [ 0x8, ['unsigned short']],
'ResourceSourceIndex' : [ 0xa, ['unsigned char']],
'ResourceSourceName' : [ 0x10, ['pointer64', ['_UNICODE_STRING']]],
'VendorData' : [ 0x18, ['pointer64', ['unsigned char']]],
'VendorDataLength' : [ 0x20, ['unsigned short']],
} ],
'_DISPATCHER_HEADER' : [ 0x18, {
'Lock' : [ 0x0, ['long']],
'LockNV' : [ 0x0, ['long']],
'Type' : [ 0x0, ['unsigned char']],
'Signalling' : [ 0x1, ['unsigned char']],
'Size' : [ 0x2, ['unsigned char']],
'Reserved1' : [ 0x3, ['unsigned char']],
'TimerType' : [ 0x0, ['unsigned char']],
'TimerControlFlags' : [ 0x1, ['unsigned char']],
'Absolute' : [ 0x1, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned char')]],
'Wake' : [ 0x1, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned char')]],
'EncodedTolerableDelay' : [ 0x1, ['BitField', dict(start_bit = 2, end_bit = 8, native_type='unsigned char')]],
'Hand' : [ 0x2, ['unsigned char']],
'TimerMiscFlags' : [ 0x3, ['unsigned char']],
'Index' : [ 0x3, ['BitField', dict(start_bit = 0, end_bit = 6, native_type='unsigned char')]],
'Inserted' : [ 0x3, ['BitField', dict(start_bit = 6, end_bit = 7, native_type='unsigned char')]],
'Expired' : [ 0x3, ['BitField', dict(start_bit = 7, end_bit = 8, native_type='unsigned char')]],
'Timer2Type' : [ 0x0, ['unsigned char']],
'Timer2Flags' : [ 0x1, ['unsigned char']],
'Timer2Inserted' : [ 0x1, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned char')]],
'Timer2Expiring' : [ 0x1, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned char')]],
'Timer2CancelPending' : [ 0x1, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned char')]],
'Timer2SetPending' : [ 0x1, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned char')]],
'Timer2Running' : [ 0x1, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned char')]],
'Timer2Disabled' : [ 0x1, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned char')]],
'Timer2ReservedFlags' : [ 0x1, ['BitField', dict(start_bit = 6, end_bit = 8, native_type='unsigned char')]],
'Timer2Reserved1' : [ 0x2, ['unsigned char']],
'Timer2Reserved2' : [ 0x3, ['unsigned char']],
'QueueType' : [ 0x0, ['unsigned char']],
'QueueControlFlags' : [ 0x1, ['unsigned char']],
'Abandoned' : [ 0x1, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned char')]],
'DisableIncrement' : [ 0x1, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned char')]],
'QueueReservedControlFlags' : [ 0x1, ['BitField', dict(start_bit = 2, end_bit = 8, native_type='unsigned char')]],
'QueueSize' : [ 0x2, ['unsigned char']],
'QueueReserved' : [ 0x3, ['unsigned char']],
'ThreadType' : [ 0x0, ['unsigned char']],
'ThreadReserved' : [ 0x1, ['unsigned char']],
'ThreadControlFlags' : [ 0x2, ['unsigned char']],
'CycleProfiling' : [ 0x2, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned char')]],
'CounterProfiling' : [ 0x2, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned char')]],
'GroupScheduling' : [ 0x2, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned char')]],
'AffinitySet' : [ 0x2, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned char')]],
'Tagged' : [ 0x2, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned char')]],
'EnergyProfiling' : [ 0x2, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned char')]],
'ThreadReservedControlFlags' : [ 0x2, ['BitField', dict(start_bit = 6, end_bit = 8, native_type='unsigned char')]],
'DebugActive' : [ 0x3, ['unsigned char']],
'ActiveDR7' : [ 0x3, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned char')]],
'Instrumented' : [ 0x3, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned char')]],
'Minimal' : [ 0x3, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned char')]],
'Reserved4' : [ 0x3, ['BitField', dict(start_bit = 3, end_bit = 6, native_type='unsigned char')]],
'UmsScheduled' : [ 0x3, ['BitField', dict(start_bit = 6, end_bit = 7, native_type='unsigned char')]],
'UmsPrimary' : [ 0x3, ['BitField', dict(start_bit = 7, end_bit = 8, native_type='unsigned char')]],
'MutantType' : [ 0x0, ['unsigned char']],
'MutantSize' : [ 0x1, ['unsigned char']],
'DpcActive' : [ 0x2, ['unsigned char']],
'MutantReserved' : [ 0x3, ['unsigned char']],
'SignalState' : [ 0x4, ['long']],
'WaitListHead' : [ 0x8, ['_LIST_ENTRY']],
} ],
'_ETW_GUID_ENTRY' : [ 0x190, {
'GuidList' : [ 0x0, ['_LIST_ENTRY']],
'RefCount' : [ 0x10, ['long long']],
'Guid' : [ 0x18, ['_GUID']],
'RegListHead' : [ 0x28, ['_LIST_ENTRY']],
'SecurityDescriptor' : [ 0x38, ['pointer64', ['void']]],
'LastEnable' : [ 0x40, ['_ETW_LAST_ENABLE_INFO']],
'MatchId' : [ 0x40, ['unsigned long long']],
'ProviderEnableInfo' : [ 0x50, ['_TRACE_ENABLE_INFO']],
'EnableInfo' : [ 0x70, ['array', 8, ['_TRACE_ENABLE_INFO']]],
'FilterData' : [ 0x170, ['pointer64', ['_ETW_FILTER_HEADER']]],
'HostSilo' : [ 0x178, ['unsigned char']],
'Lock' : [ 0x180, ['_EX_PUSH_LOCK']],
'LockOwner' : [ 0x188, ['pointer64', ['_ETHREAD']]],
} ],
'_VI_POOL_ENTRY' : [ 0x20, {
'PageHeader' : [ 0x0, ['_VI_POOL_PAGE_HEADER']],
'InUse' : [ 0x0, ['_VI_POOL_ENTRY_INUSE']],
'NextFree' : [ 0x0, ['pointer64', ['_SLIST_ENTRY']]],
} ],
'_MM_PAGE_ACCESS_INFO' : [ 0x8, {
'Flags' : [ 0x0, ['_MM_PAGE_ACCESS_INFO_FLAGS']],
'FileOffset' : [ 0x0, ['unsigned long long']],
'VirtualAddress' : [ 0x0, ['pointer64', ['void']]],
'PointerProtoPte' : [ 0x0, ['pointer64', ['void']]],
} ],
'_MI_CONTROL_AREA_WAIT_BLOCK' : [ 0x28, {
'Next' : [ 0x0, ['pointer64', ['_MI_CONTROL_AREA_WAIT_BLOCK']]],
'WaitReason' : [ 0x8, ['unsigned long']],
'WaitResponse' : [ 0xc, ['unsigned long']],
'Gate' : [ 0x10, ['_KGATE']],
} ],
'_HEAP_COUNTERS' : [ 0x78, {
'TotalMemoryReserved' : [ 0x0, ['unsigned long long']],
'TotalMemoryCommitted' : [ 0x8, ['unsigned long long']],
'TotalMemoryLargeUCR' : [ 0x10, ['unsigned long long']],
'TotalSizeInVirtualBlocks' : [ 0x18, ['unsigned long long']],
'TotalSegments' : [ 0x20, ['unsigned long']],
'TotalUCRs' : [ 0x24, ['unsigned long']],
'CommittOps' : [ 0x28, ['unsigned long']],
'DeCommitOps' : [ 0x2c, ['unsigned long']],
'LockAcquires' : [ 0x30, ['unsigned long']],
'LockCollisions' : [ 0x34, ['unsigned long']],
'CommitRate' : [ 0x38, ['unsigned long']],
'DecommittRate' : [ 0x3c, ['unsigned long']],
'CommitFailures' : [ 0x40, ['unsigned long']],
'InBlockCommitFailures' : [ 0x44, ['unsigned long']],
'PollIntervalCounter' : [ 0x48, ['unsigned long']],
'DecommitsSinceLastCheck' : [ 0x4c, ['unsigned long']],
'HeapPollInterval' : [ 0x50, ['unsigned long']],
'AllocAndFreeOps' : [ 0x54, ['unsigned long']],
'AllocationIndicesActive' : [ 0x58, ['unsigned long']],
'InBlockDeccommits' : [ 0x5c, ['unsigned long']],
'InBlockDeccomitSize' : [ 0x60, ['unsigned long long']],
'HighWatermarkSize' : [ 0x68, ['unsigned long long']],
'LastPolledSize' : [ 0x70, ['unsigned long long']],
} ],
'_TraceLoggingMetadata_t' : [ 0x10, {
'Signature' : [ 0x0, ['unsigned long']],
'Size' : [ 0x4, ['unsigned short']],
'Version' : [ 0x6, ['unsigned char']],
'Flags' : [ 0x7, ['unsigned char']],
'Magic' : [ 0x8, ['unsigned long long']],
} ],
'_MI_VISIBLE_PARTITION' : [ 0x1100, {
'LowestPhysicalPage' : [ 0x0, ['unsigned long long']],
'HighestPhysicalPage' : [ 0x8, ['unsigned long long']],
'NumberOfPhysicalPages' : [ 0x10, ['unsigned long long']],
'NumberOfPagingFiles' : [ 0x18, ['unsigned long']],
'PagingFile' : [ 0x20, ['array', 16, ['pointer64', ['_MMPAGING_FILE']]]],
'AvailablePages' : [ 0xc0, ['unsigned long long']],
'ResidentAvailablePages' : [ 0x100, ['unsigned long long']],
'TotalCommittedPages' : [ 0x108, ['unsigned long long']],
'ModifiedPageListHead' : [ 0x140, ['_MMPFNLIST']],
'ModifiedNoWritePageListHead' : [ 0x180, ['_MMPFNLIST']],
'TotalCommitLimit' : [ 0x1a8, ['unsigned long long']],
'TotalPagesForPagingFile' : [ 0x1b0, ['unsigned long long']],
'VadPhysicalPages' : [ 0x1b8, ['unsigned long long']],
'ProcessLockedFilePages' : [ 0x1c0, ['unsigned long long']],
'ChargeCommitmentFailures' : [ 0x1c8, ['array', 4, ['unsigned long']]],
'PageTableBitmapPages' : [ 0x1d8, ['unsigned long long']],
'PageFileTraceIndex' : [ 0x1e0, ['long']],
'PageFileTraces' : [ 0x1e8, ['array', 32, ['_MI_PAGEFILE_TRACES']]],
} ],
'_OB_HANDLE_REVOCATION_BLOCK' : [ 0x20, {
'RevocationInfos' : [ 0x0, ['_LIST_ENTRY']],
'Lock' : [ 0x10, ['_EX_PUSH_LOCK']],
'Rundown' : [ 0x18, ['_EX_RUNDOWN_REF']],
} ],
'_SYSPTES_HEADER' : [ 0x118, {
'ListHead' : [ 0x0, ['array', 16, ['_LIST_ENTRY']]],
'Count' : [ 0x100, ['unsigned long long']],
'NumberOfEntries' : [ 0x108, ['unsigned long long']],
'NumberOfEntriesPeak' : [ 0x110, ['unsigned long long']],
} ],
'_EXCEPTION_RECORD' : [ 0x98, {
'ExceptionCode' : [ 0x0, ['long']],
'ExceptionFlags' : [ 0x4, ['unsigned long']],
'ExceptionRecord' : [ 0x8, ['pointer64', ['_EXCEPTION_RECORD']]],
'ExceptionAddress' : [ 0x10, ['pointer64', ['void']]],
'NumberParameters' : [ 0x18, ['unsigned long']],
'ExceptionInformation' : [ 0x20, ['array', 15, ['unsigned long long']]],
} ],
'_MI_ACTIVE_WSLE_LISTHEAD' : [ 0x10, {
'Flink' : [ 0x0, ['unsigned long long']],
'Blink' : [ 0x8, ['unsigned long long']],
} ],
'_PENDING_RELATIONS_LIST_ENTRY' : [ 0x70, {
'Link' : [ 0x0, ['_LIST_ENTRY']],
'WorkItem' : [ 0x10, ['_WORK_QUEUE_ITEM']],
'DeviceEvent' : [ 0x30, ['pointer64', ['_PNP_DEVICE_EVENT_ENTRY']]],
'DeviceObject' : [ 0x38, ['pointer64', ['_DEVICE_OBJECT']]],
'RelationsList' : [ 0x40, ['pointer64', ['_RELATION_LIST']]],
'EjectIrp' : [ 0x48, ['pointer64', ['_IRP']]],
'Lock' : [ 0x50, ['Enumeration', dict(target = 'long', choices = {0: 'IRPLOCK_CANCELABLE', 1: 'IRPLOCK_CANCEL_STARTED', 2: 'IRPLOCK_CANCEL_COMPLETE', 3: 'IRPLOCK_COMPLETED'})]],
'Problem' : [ 0x54, ['unsigned long']],
'ProfileChangingEject' : [ 0x58, ['unsigned char']],
'DisplaySafeRemovalDialog' : [ 0x59, ['unsigned char']],
'LightestSleepState' : [ 0x5c, ['Enumeration', dict(target = 'long', choices = {0: 'PowerSystemUnspecified', 1: 'PowerSystemWorking', 2: 'PowerSystemSleeping1', 3: 'PowerSystemSleeping2', 4: 'PowerSystemSleeping3', 5: 'PowerSystemHibernate', 6: 'PowerSystemShutdown', 7: 'PowerSystemMaximum'})]],
'DockInterface' : [ 0x60, ['pointer64', ['DOCK_INTERFACE']]],
'DequeuePending' : [ 0x68, ['unsigned char']],
'DeleteType' : [ 0x6c, ['Enumeration', dict(target = 'long', choices = {0: 'QueryRemoveDevice', 1: 'CancelRemoveDevice', 2: 'RemoveDevice', 3: 'SurpriseRemoveDevice', 4: 'EjectDevice', 5: 'RemoveFailedDevice', 6: 'RemoveUnstartedFailedDevice', 7: 'MaxDeviceDeleteType'})]],
} ],
'_PPM_PLATFORM_STATE' : [ 0x180, {
'Latency' : [ 0x0, ['unsigned long']],
'BreakEvenDuration' : [ 0x4, ['unsigned long']],
'VetoAccounting' : [ 0x8, ['_PPM_VETO_ACCOUNTING']],
'TransitionDebugger' : [ 0x30, ['unsigned char']],
'Platform' : [ 0x31, ['unsigned char']],
'DependencyListCount' : [ 0x34, ['unsigned long']],
'Processors' : [ 0x38, ['_KAFFINITY_EX']],
'Name' : [ 0xe0, ['_UNICODE_STRING']],
'DependencyLists' : [ 0xf0, ['pointer64', ['_PPM_SELECTION_DEPENDENCY']]],
'Synchronization' : [ 0xf8, ['_PPM_COORDINATED_SYNCHRONIZATION']],
'EnterTime' : [ 0x100, ['unsigned long long']],
'RefCount' : [ 0x140, ['long']],
'CacheAlign0' : [ 0x140, ['array', 64, ['unsigned char']]],
} ],
'_MI_SECTION_IMAGE_INFORMATION' : [ 0x48, {
'ExportedImageInformation' : [ 0x0, ['_SECTION_IMAGE_INFORMATION']],
'InternalImageInformation' : [ 0x40, ['_MI_EXTRA_IMAGE_INFORMATION']],
} ],
'_TOKEN_ACCESS_INFORMATION' : [ 0x58, {
'SidHash' : [ 0x0, ['pointer64', ['_SID_AND_ATTRIBUTES_HASH']]],
'RestrictedSidHash' : [ 0x8, ['pointer64', ['_SID_AND_ATTRIBUTES_HASH']]],
'Privileges' : [ 0x10, ['pointer64', ['_TOKEN_PRIVILEGES']]],
'AuthenticationId' : [ 0x18, ['_LUID']],
'TokenType' : [ 0x20, ['Enumeration', dict(target = 'long', choices = {1: 'TokenPrimary', 2: 'TokenImpersonation'})]],
'ImpersonationLevel' : [ 0x24, ['Enumeration', dict(target = 'long', choices = {0: 'SecurityAnonymous', 1: 'SecurityIdentification', 2: 'SecurityImpersonation', 3: 'SecurityDelegation'})]],
'MandatoryPolicy' : [ 0x28, ['_TOKEN_MANDATORY_POLICY']],
'Flags' : [ 0x2c, ['unsigned long']],
'AppContainerNumber' : [ 0x30, ['unsigned long']],
'PackageSid' : [ 0x38, ['pointer64', ['void']]],
'CapabilitiesHash' : [ 0x40, ['pointer64', ['_SID_AND_ATTRIBUTES_HASH']]],
'TrustLevelSid' : [ 0x48, ['pointer64', ['void']]],
'SecurityAttributes' : [ 0x50, ['pointer64', ['void']]],
} ],
'_CELL_DATA' : [ 0x50, {
'u' : [ 0x0, ['_u']],
} ],
'_INITIAL_PRIVILEGE_SET' : [ 0x2c, {
'PrivilegeCount' : [ 0x0, ['unsigned long']],
'Control' : [ 0x4, ['unsigned long']],
'Privilege' : [ 0x8, ['array', 3, ['_LUID_AND_ATTRIBUTES']]],
} ],
'_HEAP_TUNING_PARAMETERS' : [ 0x10, {
'CommittThresholdShift' : [ 0x0, ['unsigned long']],
'MaxPreCommittThreshold' : [ 0x8, ['unsigned long long']],
} ],
'_MMWSLE_NONDIRECT_HASH' : [ 0x10, {
'Key' : [ 0x0, ['pointer64', ['void']]],
'Index' : [ 0x8, ['unsigned long long']],
} ],
'_POP_FX_WORK_ORDER' : [ 0x38, {
'WorkItem' : [ 0x0, ['_WORK_QUEUE_ITEM']],
'WorkCount' : [ 0x20, ['long']],
'Context' : [ 0x28, ['pointer64', ['void']]],
'WatchdogTimerInfo' : [ 0x30, ['pointer64', ['_POP_FX_WORK_ORDER_WATCHDOG_INFO']]],
} ],
'_SEGMENT_FLAGS' : [ 0x4, {
'TotalNumberOfPtes4132' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 10, native_type='unsigned short')]],
'Spare0' : [ 0x0, ['BitField', dict(start_bit = 10, end_bit = 12, native_type='unsigned short')]],
'LargePages' : [ 0x0, ['BitField', dict(start_bit = 12, end_bit = 13, native_type='unsigned short')]],
'DebugSymbolsLoaded' : [ 0x0, ['BitField', dict(start_bit = 13, end_bit = 14, native_type='unsigned short')]],
'WriteCombined' : [ 0x0, ['BitField', dict(start_bit = 14, end_bit = 15, native_type='unsigned short')]],
'NoCache' : [ 0x0, ['BitField', dict(start_bit = 15, end_bit = 16, native_type='unsigned short')]],
'Short0' : [ 0x0, ['unsigned short']],
'FloppyMedia' : [ 0x2, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned char')]],
'DefaultProtectionMask' : [ 0x2, ['BitField', dict(start_bit = 1, end_bit = 6, native_type='unsigned char')]],
'Binary32' : [ 0x2, ['BitField', dict(start_bit = 6, end_bit = 7, native_type='unsigned char')]],
'ContainsDebug' : [ 0x2, ['BitField', dict(start_bit = 7, end_bit = 8, native_type='unsigned char')]],
'UChar1' : [ 0x2, ['unsigned char']],
'ForceCollision' : [ 0x3, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned char')]],
'ImageSigningType' : [ 0x3, ['BitField', dict(start_bit = 1, end_bit = 4, native_type='unsigned char')]],
'ImageSigningLevel' : [ 0x3, ['BitField', dict(start_bit = 4, end_bit = 8, native_type='unsigned char')]],
'UChar2' : [ 0x3, ['unsigned char']],
} ],
'_KAPC' : [ 0x58, {
'Type' : [ 0x0, ['unsigned char']],
'SpareByte0' : [ 0x1, ['unsigned char']],
'Size' : [ 0x2, ['unsigned char']],
'SpareByte1' : [ 0x3, ['unsigned char']],
'SpareLong0' : [ 0x4, ['unsigned long']],
'Thread' : [ 0x8, ['pointer64', ['_KTHREAD']]],
'ApcListEntry' : [ 0x10, ['_LIST_ENTRY']],
'KernelRoutine' : [ 0x20, ['pointer64', ['void']]],
'RundownRoutine' : [ 0x28, ['pointer64', ['void']]],
'NormalRoutine' : [ 0x30, ['pointer64', ['void']]],
'Reserved' : [ 0x20, ['array', 3, ['pointer64', ['void']]]],
'NormalContext' : [ 0x38, ['pointer64', ['void']]],
'SystemArgument1' : [ 0x40, ['pointer64', ['void']]],
'SystemArgument2' : [ 0x48, ['pointer64', ['void']]],
'ApcStateIndex' : [ 0x50, ['unsigned char']],
'ApcMode' : [ 0x51, ['unsigned char']],
'Inserted' : [ 0x52, ['unsigned char']],
} ],
'_KTIMER_EXPIRATION_TRACE' : [ 0x10, {
'InterruptTime' : [ 0x0, ['unsigned long long']],
'PerformanceCounter' : [ 0x8, ['_LARGE_INTEGER']],
} ],
'_POOL_TRACKER_BIG_PAGES' : [ 0x18, {
'Va' : [ 0x0, ['unsigned long long']],
'Key' : [ 0x8, ['unsigned long']],
'Pattern' : [ 0xc, ['BitField', dict(start_bit = 0, end_bit = 8, native_type='unsigned long')]],
'PoolType' : [ 0xc, ['BitField', dict(start_bit = 8, end_bit = 20, native_type='unsigned long')]],
'SlushSize' : [ 0xc, ['BitField', dict(start_bit = 20, end_bit = 32, native_type='unsigned long')]],
'NumberOfBytes' : [ 0x10, ['unsigned long long']],
} ],
'tagSWITCH_CONTEXT_DATA' : [ 0x50, {
'ullOsMaxVersionTested' : [ 0x0, ['unsigned long long']],
'ulTargetPlatform' : [ 0x8, ['unsigned long']],
'ullContextMinimum' : [ 0x10, ['unsigned long long']],
'guPlatform' : [ 0x18, ['_GUID']],
'guMinPlatform' : [ 0x28, ['_GUID']],
'ulContextSource' : [ 0x38, ['unsigned long']],
'ulElementCount' : [ 0x3c, ['unsigned long']],
'guElements' : [ 0x40, ['array', 1, ['_GUID']]],
} ],
'_WHEAP_ERROR_SOURCE_TABLE' : [ 0x30, {
'Signature' : [ 0x0, ['unsigned long']],
'Count' : [ 0x4, ['long']],
'Items' : [ 0x8, ['_LIST_ENTRY']],
'InsertLock' : [ 0x18, ['_KEVENT']],
} ],
'_ETW_HASH_BUCKET' : [ 0x38, {
'ListHead' : [ 0x0, ['array', 3, ['_LIST_ENTRY']]],
'BucketLock' : [ 0x30, ['_EX_PUSH_LOCK']],
} ],
'_TEB_ACTIVE_FRAME' : [ 0x18, {
'Flags' : [ 0x0, ['unsigned long']],
'Previous' : [ 0x8, ['pointer64', ['_TEB_ACTIVE_FRAME']]],
'Context' : [ 0x10, ['pointer64', ['_TEB_ACTIVE_FRAME_CONTEXT']]],
} ],
'_FILE_GET_QUOTA_INFORMATION' : [ 0x14, {
'NextEntryOffset' : [ 0x0, ['unsigned long']],
'SidLength' : [ 0x4, ['unsigned long']],
'Sid' : [ 0x8, ['_SID']],
} ],
'_ACCESS_REASONS' : [ 0x80, {
'Data' : [ 0x0, ['array', 32, ['unsigned long']]],
} ],
'_CM_KEY_BODY' : [ 0x58, {
'Type' : [ 0x0, ['unsigned long']],
'KeyControlBlock' : [ 0x8, ['pointer64', ['_CM_KEY_CONTROL_BLOCK']]],
'NotifyBlock' : [ 0x10, ['pointer64', ['_CM_NOTIFY_BLOCK']]],
'ProcessID' : [ 0x18, ['pointer64', ['void']]],
'KeyBodyList' : [ 0x20, ['_LIST_ENTRY']],
'Flags' : [ 0x30, ['BitField', dict(start_bit = 0, end_bit = 16, native_type='unsigned long')]],
'HandleTags' : [ 0x30, ['BitField', dict(start_bit = 16, end_bit = 32, native_type='unsigned long')]],
'KtmTrans' : [ 0x38, ['pointer64', ['void']]],
'KtmUow' : [ 0x40, ['pointer64', ['_GUID']]],
'ContextListHead' : [ 0x48, ['_LIST_ENTRY']],
} ],
'_KWAIT_BLOCK' : [ 0x30, {
'WaitListEntry' : [ 0x0, ['_LIST_ENTRY']],
'WaitType' : [ 0x10, ['unsigned char']],
'BlockState' : [ 0x11, ['unsigned char']],
'WaitKey' : [ 0x12, ['unsigned short']],
'SpareLong' : [ 0x14, ['long']],
'Thread' : [ 0x18, ['pointer64', ['_KTHREAD']]],
'NotificationQueue' : [ 0x18, ['pointer64', ['_KQUEUE']]],
'Object' : [ 0x20, ['pointer64', ['void']]],
'SparePtr' : [ 0x28, ['pointer64', ['void']]],
} ],
'_ARM64_DBGKD_CONTROL_SET' : [ 0x18, {
'Continue' : [ 0x0, ['unsigned long']],
'TraceFlag' : [ 0x4, ['unsigned long']],
'CurrentSymbolStart' : [ 0x8, ['unsigned long long']],
'CurrentSymbolEnd' : [ 0x10, ['unsigned long long']],
} ],
'_MMPTE_PROTOTYPE' : [ 0x8, {
'Valid' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long long')]],
'DemandFillProto' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long long')]],
'HiberVerifyConverted' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long long')]],
'Unused1' : [ 0x0, ['BitField', dict(start_bit = 3, end_bit = 8, native_type='unsigned long long')]],
'ReadOnly' : [ 0x0, ['BitField', dict(start_bit = 8, end_bit = 9, native_type='unsigned long long')]],
'Combined' : [ 0x0, ['BitField', dict(start_bit = 9, end_bit = 10, native_type='unsigned long long')]],
'Prototype' : [ 0x0, ['BitField', dict(start_bit = 10, end_bit = 11, native_type='unsigned long long')]],
'Protection' : [ 0x0, ['BitField', dict(start_bit = 11, end_bit = 16, native_type='unsigned long long')]],
'ProtoAddress' : [ 0x0, ['BitField', dict(start_bit = 16, end_bit = 64, native_type='long long')]],
} ],
'_WHEA_ERROR_PACKET_FLAGS' : [ 0x4, {
'PreviousError' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'Reserved1' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'HypervisorError' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long')]],
'Simulated' : [ 0x0, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned long')]],
'PlatformPfaControl' : [ 0x0, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned long')]],
'PlatformDirectedOffline' : [ 0x0, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned long')]],
'Reserved2' : [ 0x0, ['BitField', dict(start_bit = 6, end_bit = 32, native_type='unsigned long')]],
'AsULONG' : [ 0x0, ['unsigned long']],
} ],
'_THERMAL_INFORMATION_EX' : [ 0x58, {
'ThermalStamp' : [ 0x0, ['unsigned long']],
'ThermalConstant1' : [ 0x4, ['unsigned long']],
'ThermalConstant2' : [ 0x8, ['unsigned long']],
'SamplingPeriod' : [ 0xc, ['unsigned long']],
'CurrentTemperature' : [ 0x10, ['unsigned long']],
'PassiveTripPoint' : [ 0x14, ['unsigned long']],
'ThermalStandbyTripPoint' : [ 0x18, ['unsigned long']],
'CriticalTripPoint' : [ 0x1c, ['unsigned long']],
'ActiveTripPointCount' : [ 0x20, ['unsigned char']],
'ActiveTripPoint' : [ 0x24, ['array', 10, ['unsigned long']]],
'S4TransitionTripPoint' : [ 0x4c, ['unsigned long']],
'MinimumThrottle' : [ 0x50, ['unsigned long']],
'OverThrottleThreshold' : [ 0x54, ['unsigned long']],
} ],
'__unnamed_1ed8' : [ 0x4, {
'FilePointerIndex' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 9, native_type='unsigned long')]],
'HardFault' : [ 0x0, ['BitField', dict(start_bit = 9, end_bit = 10, native_type='unsigned long')]],
'Image' : [ 0x0, ['BitField', dict(start_bit = 10, end_bit = 11, native_type='unsigned long')]],
'Spare0' : [ 0x0, ['BitField', dict(start_bit = 11, end_bit = 12, native_type='unsigned long')]],
} ],
'__unnamed_1eda' : [ 0x4, {
'FilePointerIndex' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 9, native_type='unsigned long')]],
'HardFault' : [ 0x0, ['BitField', dict(start_bit = 9, end_bit = 10, native_type='unsigned long')]],
'Spare1' : [ 0x0, ['BitField', dict(start_bit = 10, end_bit = 12, native_type='unsigned long')]],
} ],
'_MM_PAGE_ACCESS_INFO_FLAGS' : [ 0x4, {
'File' : [ 0x0, ['__unnamed_1ed8']],
'Private' : [ 0x0, ['__unnamed_1eda']],
} ],
'_KTIMER2' : [ 0x88, {
'Header' : [ 0x0, ['_DISPATCHER_HEADER']],
'RbNodes' : [ 0x18, ['array', 2, ['_RTL_BALANCED_NODE']]],
'ListEntry' : [ 0x18, ['_LIST_ENTRY']],
'DueTime' : [ 0x48, ['array', 2, ['unsigned long long']]],
'Period' : [ 0x58, ['long long']],
'Callback' : [ 0x60, ['pointer64', ['void']]],
'CallbackContext' : [ 0x68, ['pointer64', ['void']]],
'DisableCallback' : [ 0x70, ['pointer64', ['void']]],
'DisableContext' : [ 0x78, ['pointer64', ['void']]],
'AbsoluteSystemTime' : [ 0x80, ['unsigned char']],
'TypeFlags' : [ 0x81, ['unsigned char']],
'Unused' : [ 0x81, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned char')]],
'IdleResilient' : [ 0x81, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned char')]],
'HighResolution' : [ 0x81, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned char')]],
'NoWake' : [ 0x81, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned char')]],
'Unused1' : [ 0x81, ['BitField', dict(start_bit = 4, end_bit = 8, native_type='unsigned char')]],
'CollectionIndex' : [ 0x82, ['array', 2, ['unsigned char']]],
} ],
'_VI_VERIFIER_ISSUE' : [ 0x20, {
'IssueType' : [ 0x0, ['unsigned long long']],
'Address' : [ 0x8, ['pointer64', ['void']]],
'Parameters' : [ 0x10, ['array', 2, ['unsigned long long']]],
} ],
'_MMSUBSECTION_FLAGS' : [ 0x4, {
'SubsectionAccessed' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned short')]],
'Protection' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 6, native_type='unsigned short')]],
'StartingSector4132' : [ 0x0, ['BitField', dict(start_bit = 6, end_bit = 16, native_type='unsigned short')]],
'SubsectionStatic' : [ 0x2, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned short')]],
'GlobalMemory' : [ 0x2, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned short')]],
'SubsectionMappedDirect' : [ 0x2, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned short')]],
'OnDereferenceList' : [ 0x2, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned short')]],
'SectorEndOffset' : [ 0x2, ['BitField', dict(start_bit = 4, end_bit = 16, native_type='unsigned short')]],
} ],
'_EXCEPTION_POINTERS' : [ 0x10, {
'ExceptionRecord' : [ 0x0, ['pointer64', ['_EXCEPTION_RECORD']]],
'ContextRecord' : [ 0x8, ['pointer64', ['_CONTEXT']]],
} ],
'_KMUTANT' : [ 0x38, {
'Header' : [ 0x0, ['_DISPATCHER_HEADER']],
'MutantListEntry' : [ 0x18, ['_LIST_ENTRY']],
'OwnerThread' : [ 0x28, ['pointer64', ['_KTHREAD']]],
'Abandoned' : [ 0x30, ['unsigned char']],
'ApcDisable' : [ 0x31, ['unsigned char']],
} ],
'_OBJECT_REF_INFO' : [ 0x28, {
'ObjectHeader' : [ 0x0, ['pointer64', ['_OBJECT_HEADER']]],
'NextRef' : [ 0x8, ['pointer64', ['void']]],
'ImageFileName' : [ 0x10, ['array', 16, ['unsigned char']]],
'NextPos' : [ 0x20, ['unsigned short']],
'MaxStacks' : [ 0x22, ['unsigned short']],
'StackInfo' : [ 0x24, ['array', 0, ['_OBJECT_REF_STACK_INFO']]],
} ],
'_HBIN' : [ 0x20, {
'Signature' : [ 0x0, ['unsigned long']],
'FileOffset' : [ 0x4, ['unsigned long']],
'Size' : [ 0x8, ['unsigned long']],
'Reserved1' : [ 0xc, ['array', 2, ['unsigned long']]],
'TimeStamp' : [ 0x14, ['_LARGE_INTEGER']],
'Spare' : [ 0x1c, ['unsigned long']],
} ],
'_MI_IMAGE_SECURITY_REFERENCE' : [ 0x18, {
'DynamicRelocations' : [ 0x0, ['pointer64', ['void']]],
'SecurityContext' : [ 0x8, ['_IMAGE_SECURITY_CONTEXT']],
'StrongImageReference' : [ 0x10, ['unsigned long long']],
} ],
'_AUTHZBASEP_CLAIM_ATTRIBUTES_COLLECTION' : [ 0x260, {
'DeviceGroupsCount' : [ 0x0, ['unsigned long']],
'pDeviceGroups' : [ 0x8, ['pointer64', ['_SID_AND_ATTRIBUTES']]],
'RestrictedDeviceGroupsCount' : [ 0x10, ['unsigned long']],
'pRestrictedDeviceGroups' : [ 0x18, ['pointer64', ['_SID_AND_ATTRIBUTES']]],
'DeviceGroupsHash' : [ 0x20, ['_SID_AND_ATTRIBUTES_HASH']],
'RestrictedDeviceGroupsHash' : [ 0x130, ['_SID_AND_ATTRIBUTES_HASH']],
'pUserSecurityAttributes' : [ 0x240, ['pointer64', ['_AUTHZBASEP_SECURITY_ATTRIBUTES_INFORMATION']]],
'pDeviceSecurityAttributes' : [ 0x248, ['pointer64', ['_AUTHZBASEP_SECURITY_ATTRIBUTES_INFORMATION']]],
'pRestrictedUserSecurityAttributes' : [ 0x250, ['pointer64', ['_AUTHZBASEP_SECURITY_ATTRIBUTES_INFORMATION']]],
'pRestrictedDeviceSecurityAttributes' : [ 0x258, ['pointer64', ['_AUTHZBASEP_SECURITY_ATTRIBUTES_INFORMATION']]],
} ],
'_HEAP_TAG_ENTRY' : [ 0x48, {
'Allocs' : [ 0x0, ['unsigned long']],
'Frees' : [ 0x4, ['unsigned long']],
'Size' : [ 0x8, ['unsigned long long']],
'TagIndex' : [ 0x10, ['unsigned short']],
'CreatorBackTraceIndex' : [ 0x12, ['unsigned short']],
'TagName' : [ 0x14, ['array', 24, ['wchar']]],
} ],
'_SECURITY_QUALITY_OF_SERVICE' : [ 0xc, {
'Length' : [ 0x0, ['unsigned long']],
'ImpersonationLevel' : [ 0x4, ['Enumeration', dict(target = 'long', choices = {0: 'SecurityAnonymous', 1: 'SecurityIdentification', 2: 'SecurityImpersonation', 3: 'SecurityDelegation'})]],
'ContextTrackingMode' : [ 0x8, ['unsigned char']],
'EffectiveOnly' : [ 0x9, ['unsigned char']],
} ],
'_MMWSLE_FREE_ENTRY' : [ 0x8, {
'MustBeZero' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long long')]],
'PreviousFree' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 28, native_type='unsigned long long')]],
'NextFree' : [ 0x0, ['BitField', dict(start_bit = 28, end_bit = 64, native_type='unsigned long long')]],
} ],
'_NT_TIB' : [ 0x38, {
'ExceptionList' : [ 0x0, ['pointer64', ['_EXCEPTION_REGISTRATION_RECORD']]],
'StackBase' : [ 0x8, ['pointer64', ['void']]],
'StackLimit' : [ 0x10, ['pointer64', ['void']]],
'SubSystemTib' : [ 0x18, ['pointer64', ['void']]],
'FiberData' : [ 0x20, ['pointer64', ['void']]],
'Version' : [ 0x20, ['unsigned long']],
'ArbitraryUserPointer' : [ 0x28, ['pointer64', ['void']]],
'Self' : [ 0x30, ['pointer64', ['_NT_TIB']]],
} ],
'_LEARNING_MODE_DATA' : [ 0x8, {
'Settings' : [ 0x0, ['unsigned long']],
'Enabled' : [ 0x4, ['unsigned char']],
'PermissiveModeEnabled' : [ 0x5, ['unsigned char']],
} ],
'_WHEA_REVISION' : [ 0x2, {
'MinorRevision' : [ 0x0, ['unsigned char']],
'MajorRevision' : [ 0x1, ['unsigned char']],
'AsUSHORT' : [ 0x0, ['unsigned short']],
} ],
'_PPM_IDLE_STATES' : [ 0x418, {
'InterfaceVersion' : [ 0x0, ['unsigned char']],
'ForceIdle' : [ 0x1, ['unsigned char']],
'EstimateIdleDuration' : [ 0x2, ['unsigned char']],
'ExitLatencyTraceEnabled' : [ 0x3, ['unsigned char']],
'NonInterruptibleTransition' : [ 0x4, ['unsigned char']],
'UnaccountedTransition' : [ 0x5, ['unsigned char']],
'IdleDurationLimited' : [ 0x6, ['unsigned char']],
'ExitLatencyCountdown' : [ 0x8, ['unsigned long']],
'TargetState' : [ 0xc, ['unsigned long']],
'ActualState' : [ 0x10, ['unsigned long']],
'OldState' : [ 0x14, ['unsigned long']],
'OverrideIndex' : [ 0x18, ['unsigned long']],
'ProcessorIdleCount' : [ 0x1c, ['unsigned long']],
'Type' : [ 0x20, ['unsigned long']],
'ReasonFlags' : [ 0x24, ['unsigned short']],
'InitiateWakeStamp' : [ 0x28, ['unsigned long long']],
'PreviousStatus' : [ 0x30, ['long']],
'PreviousCancelReason' : [ 0x34, ['unsigned long']],
'PrimaryProcessorMask' : [ 0x38, ['_KAFFINITY_EX']],
'SecondaryProcessorMask' : [ 0xe0, ['_KAFFINITY_EX']],
'IdlePrepare' : [ 0x188, ['pointer64', ['void']]],
'IdlePreExecute' : [ 0x190, ['pointer64', ['void']]],
'IdleExecute' : [ 0x198, ['pointer64', ['void']]],
'IdlePreselect' : [ 0x1a0, ['pointer64', ['void']]],
'IdleTest' : [ 0x1a8, ['pointer64', ['void']]],
'IdleAvailabilityCheck' : [ 0x1b0, ['pointer64', ['void']]],
'IdleComplete' : [ 0x1b8, ['pointer64', ['void']]],
'IdleCancel' : [ 0x1c0, ['pointer64', ['void']]],
'IdleIsHalted' : [ 0x1c8, ['pointer64', ['void']]],
'IdleInitiateWake' : [ 0x1d0, ['pointer64', ['void']]],
'PrepareInfo' : [ 0x1d8, ['_PROCESSOR_IDLE_PREPARE_INFO']],
'DeepIdleSnapshot' : [ 0x230, ['_KAFFINITY_EX']],
'Tracing' : [ 0x2d8, ['pointer64', ['_PERFINFO_PPM_STATE_SELECTION']]],
'CoordinatedTracing' : [ 0x2e0, ['pointer64', ['_PERFINFO_PPM_STATE_SELECTION']]],
'ProcessorMenu' : [ 0x2e8, ['_PPM_SELECTION_MENU']],
'CoordinatedMenu' : [ 0x2f8, ['_PPM_SELECTION_MENU']],
'CoordinatedSelection' : [ 0x308, ['_PPM_COORDINATED_SELECTION']],
'State' : [ 0x320, ['array', 1, ['_PPM_IDLE_STATE']]],
} ],
'_PPM_VETO_ACCOUNTING' : [ 0x28, {
'VetoPresent' : [ 0x0, ['long']],
'VetoListHead' : [ 0x8, ['_LIST_ENTRY']],
'CsAccountingBlocks' : [ 0x18, ['unsigned char']],
'BlocksDrips' : [ 0x19, ['unsigned char']],
'PreallocatedVetoCount' : [ 0x1c, ['unsigned long']],
'PreallocatedVetoList' : [ 0x20, ['pointer64', ['_PPM_VETO_ENTRY']]],
} ],
'_PEB' : [ 0x7a0, {
'InheritedAddressSpace' : [ 0x0, ['unsigned char']],
'ReadImageFileExecOptions' : [ 0x1, ['unsigned char']],
'BeingDebugged' : [ 0x2, ['unsigned char']],
'BitField' : [ 0x3, ['unsigned char']],
'ImageUsesLargePages' : [ 0x3, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned char')]],
'IsProtectedProcess' : [ 0x3, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned char')]],
'IsImageDynamicallyRelocated' : [ 0x3, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned char')]],
'SkipPatchingUser32Forwarders' : [ 0x3, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned char')]],
'IsPackagedProcess' : [ 0x3, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned char')]],
'IsAppContainer' : [ 0x3, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned char')]],
'IsProtectedProcessLight' : [ 0x3, ['BitField', dict(start_bit = 6, end_bit = 7, native_type='unsigned char')]],
'SpareBits' : [ 0x3, ['BitField', dict(start_bit = 7, end_bit = 8, native_type='unsigned char')]],
'Padding0' : [ 0x4, ['array', 4, ['unsigned char']]],
'Mutant' : [ 0x8, ['pointer64', ['void']]],
'ImageBaseAddress' : [ 0x10, ['pointer64', ['void']]],
'Ldr' : [ 0x18, ['pointer64', ['_PEB_LDR_DATA']]],
'ProcessParameters' : [ 0x20, ['pointer64', ['_RTL_USER_PROCESS_PARAMETERS']]],
'SubSystemData' : [ 0x28, ['pointer64', ['void']]],
'ProcessHeap' : [ 0x30, ['pointer64', ['void']]],
'FastPebLock' : [ 0x38, ['pointer64', ['_RTL_CRITICAL_SECTION']]],
'AtlThunkSListPtr' : [ 0x40, ['pointer64', ['void']]],
'IFEOKey' : [ 0x48, ['pointer64', ['void']]],
'CrossProcessFlags' : [ 0x50, ['unsigned long']],
'ProcessInJob' : [ 0x50, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'ProcessInitializing' : [ 0x50, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'ProcessUsingVEH' : [ 0x50, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long')]],
'ProcessUsingVCH' : [ 0x50, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned long')]],
'ProcessUsingFTH' : [ 0x50, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned long')]],
'ReservedBits0' : [ 0x50, ['BitField', dict(start_bit = 5, end_bit = 32, native_type='unsigned long')]],
'Padding1' : [ 0x54, ['array', 4, ['unsigned char']]],
'KernelCallbackTable' : [ 0x58, ['pointer64', ['void']]],
'UserSharedInfoPtr' : [ 0x58, ['pointer64', ['void']]],
'SystemReserved' : [ 0x60, ['array', 1, ['unsigned long']]],
'AtlThunkSListPtr32' : [ 0x64, ['unsigned long']],
'ApiSetMap' : [ 0x68, ['pointer64', ['void']]],
'TlsExpansionCounter' : [ 0x70, ['unsigned long']],
'Padding2' : [ 0x74, ['array', 4, ['unsigned char']]],
'TlsBitmap' : [ 0x78, ['pointer64', ['void']]],
'TlsBitmapBits' : [ 0x80, ['array', 2, ['unsigned long']]],
'ReadOnlySharedMemoryBase' : [ 0x88, ['pointer64', ['void']]],
'SparePvoid0' : [ 0x90, ['pointer64', ['void']]],
'ReadOnlyStaticServerData' : [ 0x98, ['pointer64', ['pointer64', ['void']]]],
'AnsiCodePageData' : [ 0xa0, ['pointer64', ['void']]],
'OemCodePageData' : [ 0xa8, ['pointer64', ['void']]],
'UnicodeCaseTableData' : [ 0xb0, ['pointer64', ['void']]],
'NumberOfProcessors' : [ 0xb8, ['unsigned long']],
'NtGlobalFlag' : [ 0xbc, ['unsigned long']],
'CriticalSectionTimeout' : [ 0xc0, ['_LARGE_INTEGER']],
'HeapSegmentReserve' : [ 0xc8, ['unsigned long long']],
'HeapSegmentCommit' : [ 0xd0, ['unsigned long long']],
'HeapDeCommitTotalFreeThreshold' : [ 0xd8, ['unsigned long long']],
'HeapDeCommitFreeBlockThreshold' : [ 0xe0, ['unsigned long long']],
'NumberOfHeaps' : [ 0xe8, ['unsigned long']],
'MaximumNumberOfHeaps' : [ 0xec, ['unsigned long']],
'ProcessHeaps' : [ 0xf0, ['pointer64', ['pointer64', ['void']]]],
'GdiSharedHandleTable' : [ 0xf8, ['pointer64', ['void']]],
'ProcessStarterHelper' : [ 0x100, ['pointer64', ['void']]],
'GdiDCAttributeList' : [ 0x108, ['unsigned long']],
'Padding3' : [ 0x10c, ['array', 4, ['unsigned char']]],
'LoaderLock' : [ 0x110, ['pointer64', ['_RTL_CRITICAL_SECTION']]],
'OSMajorVersion' : [ 0x118, ['unsigned long']],
'OSMinorVersion' : [ 0x11c, ['unsigned long']],
'OSBuildNumber' : [ 0x120, ['unsigned short']],
'OSCSDVersion' : [ 0x122, ['unsigned short']],
'OSPlatformId' : [ 0x124, ['unsigned long']],
'ImageSubsystem' : [ 0x128, ['unsigned long']],
'ImageSubsystemMajorVersion' : [ 0x12c, ['unsigned long']],
'ImageSubsystemMinorVersion' : [ 0x130, ['unsigned long']],
'Padding4' : [ 0x134, ['array', 4, ['unsigned char']]],
'ActiveProcessAffinityMask' : [ 0x138, ['unsigned long long']],
'GdiHandleBuffer' : [ 0x140, ['array', 60, ['unsigned long']]],
'PostProcessInitRoutine' : [ 0x230, ['pointer64', ['void']]],
'TlsExpansionBitmap' : [ 0x238, ['pointer64', ['void']]],
'TlsExpansionBitmapBits' : [ 0x240, ['array', 32, ['unsigned long']]],
'SessionId' : [ 0x2c0, ['unsigned long']],
'Padding5' : [ 0x2c4, ['array', 4, ['unsigned char']]],
'AppCompatFlags' : [ 0x2c8, ['_ULARGE_INTEGER']],
'AppCompatFlagsUser' : [ 0x2d0, ['_ULARGE_INTEGER']],
'pShimData' : [ 0x2d8, ['pointer64', ['void']]],
'AppCompatInfo' : [ 0x2e0, ['pointer64', ['void']]],
'CSDVersion' : [ 0x2e8, ['_UNICODE_STRING']],
'ActivationContextData' : [ 0x2f8, ['pointer64', ['_ACTIVATION_CONTEXT_DATA']]],
'ProcessAssemblyStorageMap' : [ 0x300, ['pointer64', ['_ASSEMBLY_STORAGE_MAP']]],
'SystemDefaultActivationContextData' : [ 0x308, ['pointer64', ['_ACTIVATION_CONTEXT_DATA']]],
'SystemAssemblyStorageMap' : [ 0x310, ['pointer64', ['_ASSEMBLY_STORAGE_MAP']]],
'MinimumStackCommit' : [ 0x318, ['unsigned long long']],
'FlsCallback' : [ 0x320, ['pointer64', ['_FLS_CALLBACK_INFO']]],
'FlsListHead' : [ 0x328, ['_LIST_ENTRY']],
'FlsBitmap' : [ 0x338, ['pointer64', ['void']]],
'FlsBitmapBits' : [ 0x340, ['array', 4, ['unsigned long']]],
'FlsHighIndex' : [ 0x350, ['unsigned long']],
'WerRegistrationData' : [ 0x358, ['pointer64', ['void']]],
'WerShipAssertPtr' : [ 0x360, ['pointer64', ['void']]],
'pUnused' : [ 0x368, ['pointer64', ['void']]],
'pImageHeaderHash' : [ 0x370, ['pointer64', ['void']]],
'TracingFlags' : [ 0x378, ['unsigned long']],
'HeapTracingEnabled' : [ 0x378, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'CritSecTracingEnabled' : [ 0x378, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'LibLoaderTracingEnabled' : [ 0x378, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long')]],
'SpareTracingBits' : [ 0x378, ['BitField', dict(start_bit = 3, end_bit = 32, native_type='unsigned long')]],
'Padding6' : [ 0x37c, ['array', 4, ['unsigned char']]],
'CsrServerReadOnlySharedMemoryBase' : [ 0x380, ['unsigned long long']],
'TppWorkerpListLock' : [ 0x388, ['unsigned long long']],
'TppWorkerpList' : [ 0x390, ['_LIST_ENTRY']],
'WaitOnAddressHashTable' : [ 0x3a0, ['array', 128, ['pointer64', ['void']]]],
} ],
'_HEAP_UCR_DESCRIPTOR' : [ 0x30, {
'ListEntry' : [ 0x0, ['_LIST_ENTRY']],
'SegmentEntry' : [ 0x10, ['_LIST_ENTRY']],
'Address' : [ 0x20, ['pointer64', ['void']]],
'Size' : [ 0x28, ['unsigned long long']],
} ],
'_ETW_REALTIME_CONSUMER' : [ 0x98, {
'Links' : [ 0x0, ['_LIST_ENTRY']],
'ProcessHandle' : [ 0x10, ['pointer64', ['void']]],
'ProcessObject' : [ 0x18, ['pointer64', ['_EPROCESS']]],
'NextNotDelivered' : [ 0x20, ['pointer64', ['void']]],
'RealtimeConnectContext' : [ 0x28, ['pointer64', ['void']]],
'DisconnectEvent' : [ 0x30, ['pointer64', ['_KEVENT']]],
'DataAvailableEvent' : [ 0x38, ['pointer64', ['_KEVENT']]],
'UserBufferCount' : [ 0x40, ['pointer64', ['unsigned long']]],
'UserBufferListHead' : [ 0x48, ['pointer64', ['_SINGLE_LIST_ENTRY']]],
'BuffersLost' : [ 0x50, ['unsigned long']],
'EmptyBuffersCount' : [ 0x54, ['unsigned long']],
'LoggerId' : [ 0x58, ['unsigned short']],
'Flags' : [ 0x5a, ['unsigned char']],
'ShutDownRequested' : [ 0x5a, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned char')]],
'NewBuffersLost' : [ 0x5a, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned char')]],
'Disconnected' : [ 0x5a, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned char')]],
'Notified' : [ 0x5a, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned char')]],
'Wow' : [ 0x5a, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned char')]],
'ReservedBufferSpaceBitMap' : [ 0x60, ['_RTL_BITMAP']],
'ReservedBufferSpace' : [ 0x70, ['pointer64', ['unsigned char']]],
'ReservedBufferSpaceSize' : [ 0x78, ['unsigned long']],
'UserPagesAllocated' : [ 0x7c, ['unsigned long']],
'UserPagesReused' : [ 0x80, ['unsigned long']],
'EventsLostCount' : [ 0x88, ['pointer64', ['unsigned long']]],
'BuffersLostCount' : [ 0x90, ['pointer64', ['unsigned long']]],
} ],
'_POOL_DESCRIPTOR' : [ 0x1140, {
'PoolType' : [ 0x0, ['Enumeration', dict(target = 'long', choices = {0: 'NonPagedPoolBase', 1: 'PagedPool', 2: 'NonPagedPoolBaseMustSucceed', 3: 'DontUseThisType', 4: 'NonPagedPoolBaseCacheAligned', 5: 'PagedPoolCacheAligned', 6: 'NonPagedPoolBaseCacheAlignedMustS', 7: 'MaxPoolType', 34: 'NonPagedPoolMustSucceedSession', 516: 'NonPagedPoolNxCacheAligned', 35: 'DontUseThisTypeSession', 32: 'NonPagedPoolSession', 512: 'NonPagedPoolNx', 544: 'NonPagedPoolSessionNx', 36: 'NonPagedPoolCacheAlignedSession', 33: 'PagedPoolSession', 38: 'NonPagedPoolCacheAlignedMustSSession', 37: 'PagedPoolCacheAlignedSession'})]],
'PagedLock' : [ 0x8, ['_FAST_MUTEX']],
'NonPagedLock' : [ 0x8, ['unsigned long long']],
'RunningAllocs' : [ 0x40, ['long']],
'RunningDeAllocs' : [ 0x44, ['long']],
'TotalBigPages' : [ 0x48, ['long']],
'ThreadsProcessingDeferrals' : [ 0x4c, ['long']],
'TotalBytes' : [ 0x50, ['unsigned long long']],
'PoolIndex' : [ 0x80, ['unsigned long']],
'TotalPages' : [ 0xc0, ['long']],
'PendingFrees' : [ 0x100, ['_SINGLE_LIST_ENTRY']],
'PendingFreeDepth' : [ 0x108, ['long']],
'ListHeads' : [ 0x140, ['array', 256, ['_LIST_ENTRY']]],
} ],
'_TOKEN_MANDATORY_POLICY' : [ 0x4, {
'Policy' : [ 0x0, ['unsigned long']],
} ],
'_BLOB_COUNTERS' : [ 0x8, {
'CreatedObjects' : [ 0x0, ['unsigned long']],
'DeletedObjects' : [ 0x4, ['unsigned long']],
} ],
'_KGATE' : [ 0x18, {
'Header' : [ 0x0, ['_DISPATCHER_HEADER']],
} ],
'_WHEA_ERROR_RECORD_HEADER' : [ 0x80, {
'Signature' : [ 0x0, ['unsigned long']],
'Revision' : [ 0x4, ['_WHEA_REVISION']],
'SignatureEnd' : [ 0x6, ['unsigned long']],
'SectionCount' : [ 0xa, ['unsigned short']],
'Severity' : [ 0xc, ['Enumeration', dict(target = 'long', choices = {0: 'WheaErrSevRecoverable', 1: 'WheaErrSevFatal', 2: 'WheaErrSevCorrected', 3: 'WheaErrSevInformational'})]],
'ValidBits' : [ 0x10, ['_WHEA_ERROR_RECORD_HEADER_VALIDBITS']],
'Length' : [ 0x14, ['unsigned long']],
'Timestamp' : [ 0x18, ['_WHEA_TIMESTAMP']],
'PlatformId' : [ 0x20, ['_GUID']],
'PartitionId' : [ 0x30, ['_GUID']],
'CreatorId' : [ 0x40, ['_GUID']],
'NotifyType' : [ 0x50, ['_GUID']],
'RecordId' : [ 0x60, ['unsigned long long']],
'Flags' : [ 0x68, ['_WHEA_ERROR_RECORD_HEADER_FLAGS']],
'PersistenceInfo' : [ 0x6c, ['_WHEA_PERSISTENCE_INFO']],
'Reserved' : [ 0x74, ['array', 12, ['unsigned char']]],
} ],
'_ALPC_PROCESS_CONTEXT' : [ 0x20, {
'Lock' : [ 0x0, ['_EX_PUSH_LOCK']],
'ViewListHead' : [ 0x8, ['_LIST_ENTRY']],
'PagedPoolQuotaCache' : [ 0x18, ['unsigned long long']],
} ],
'_DRIVER_EXTENSION' : [ 0x50, {
'DriverObject' : [ 0x0, ['pointer64', ['_DRIVER_OBJECT']]],
'AddDevice' : [ 0x8, ['pointer64', ['void']]],
'Count' : [ 0x10, ['unsigned long']],
'ServiceKeyName' : [ 0x18, ['_UNICODE_STRING']],
'ClientDriverExtension' : [ 0x28, ['pointer64', ['_IO_CLIENT_EXTENSION']]],
'FsFilterCallbacks' : [ 0x30, ['pointer64', ['_FS_FILTER_CALLBACKS']]],
'KseCallbacks' : [ 0x38, ['pointer64', ['void']]],
'DvCallbacks' : [ 0x40, ['pointer64', ['void']]],
'VerifierContext' : [ 0x48, ['pointer64', ['void']]],
} ],
'_PRIVILEGE_SET' : [ 0x14, {
'PrivilegeCount' : [ 0x0, ['unsigned long']],
'Control' : [ 0x4, ['unsigned long']],
'Privilege' : [ 0x8, ['array', 1, ['_LUID_AND_ATTRIBUTES']]],
} ],
'_WHEAP_WORK_QUEUE' : [ 0x88, {
'ListHead' : [ 0x0, ['_LIST_ENTRY']],
'ListLock' : [ 0x10, ['unsigned long long']],
'ItemCount' : [ 0x18, ['long']],
'Dpc' : [ 0x20, ['_KDPC']],
'WorkItem' : [ 0x60, ['_WORK_QUEUE_ITEM']],
'WorkRoutine' : [ 0x80, ['pointer64', ['void']]],
} ],
'_CM_NOTIFY_BLOCK' : [ 0x58, {
'HiveList' : [ 0x0, ['_LIST_ENTRY']],
'PostList' : [ 0x10, ['_LIST_ENTRY']],
'KeyControlBlock' : [ 0x20, ['pointer64', ['_CM_KEY_CONTROL_BLOCK']]],
'KeyBody' : [ 0x28, ['pointer64', ['_CM_KEY_BODY']]],
'Filter' : [ 0x30, ['BitField', dict(start_bit = 0, end_bit = 30, native_type='unsigned long')]],
'WatchTree' : [ 0x30, ['BitField', dict(start_bit = 30, end_bit = 31, native_type='unsigned long')]],
'NotifyPending' : [ 0x30, ['BitField', dict(start_bit = 31, end_bit = 32, native_type='unsigned long')]],
'SubjectContext' : [ 0x38, ['_SECURITY_SUBJECT_CONTEXT']],
} ],
'_MI_EXTRA_IMAGE_INFORMATION' : [ 0x8, {
'SizeOfHeaders' : [ 0x0, ['unsigned long']],
'SizeOfImage' : [ 0x4, ['unsigned long']],
} ],
'_KINTERRUPT' : [ 0x100, {
'Type' : [ 0x0, ['short']],
'Size' : [ 0x2, ['short']],
'InterruptListEntry' : [ 0x8, ['_LIST_ENTRY']],
'ServiceRoutine' : [ 0x18, ['pointer64', ['void']]],
'MessageServiceRoutine' : [ 0x20, ['pointer64', ['void']]],
'MessageIndex' : [ 0x28, ['unsigned long']],
'ServiceContext' : [ 0x30, ['pointer64', ['void']]],
'SpinLock' : [ 0x38, ['unsigned long long']],
'TickCount' : [ 0x40, ['unsigned long']],
'ActualLock' : [ 0x48, ['pointer64', ['unsigned long long']]],
'DispatchAddress' : [ 0x50, ['pointer64', ['void']]],
'Vector' : [ 0x58, ['unsigned long']],
'Irql' : [ 0x5c, ['unsigned char']],
'SynchronizeIrql' : [ 0x5d, ['unsigned char']],
'FloatingSave' : [ 0x5e, ['unsigned char']],
'Connected' : [ 0x5f, ['unsigned char']],
'Number' : [ 0x60, ['unsigned long']],
'ShareVector' : [ 0x64, ['unsigned char']],
'EmulateActiveBoth' : [ 0x65, ['unsigned char']],
'ActiveCount' : [ 0x66, ['unsigned short']],
'InternalState' : [ 0x68, ['long']],
'Mode' : [ 0x6c, ['Enumeration', dict(target = 'long', choices = {0: 'LevelSensitive', 1: 'Latched'})]],
'Polarity' : [ 0x70, ['Enumeration', dict(target = 'long', choices = {0: 'InterruptPolarityUnknown', 1: 'InterruptRisingEdge', 2: 'InterruptFallingEdge', 3: 'InterruptActiveBothTriggerLow', 4: 'InterruptActiveBothTriggerHigh'})]],
'ServiceCount' : [ 0x74, ['unsigned long']],
'DispatchCount' : [ 0x78, ['unsigned long']],
'PassiveEvent' : [ 0x80, ['pointer64', ['_KEVENT']]],
'TrapFrame' : [ 0x88, ['pointer64', ['_KTRAP_FRAME']]],
'DisconnectData' : [ 0x90, ['pointer64', ['void']]],
'ServiceThread' : [ 0x98, ['pointer64', ['_KTHREAD']]],
'ConnectionData' : [ 0xa0, ['pointer64', ['_INTERRUPT_CONNECTION_DATA']]],
'IntTrackEntry' : [ 0xa8, ['pointer64', ['void']]],
'IsrDpcStats' : [ 0xb0, ['_ISRDPCSTATS']],
'RedirectObject' : [ 0xf0, ['pointer64', ['void']]],
'Padding' : [ 0xf8, ['array', 8, ['unsigned char']]],
} ],
'_AUTHZBASEP_SECURITY_ATTRIBUTES_INFORMATION' : [ 0x30, {
'SecurityAttributeCount' : [ 0x0, ['unsigned long']],
'SecurityAttributesList' : [ 0x8, ['_LIST_ENTRY']],
'WorkingSecurityAttributeCount' : [ 0x18, ['unsigned long']],
'WorkingSecurityAttributesList' : [ 0x20, ['_LIST_ENTRY']],
} ],
'_IMAGE_FILE_HEADER' : [ 0x14, {
'Machine' : [ 0x0, ['unsigned short']],
'NumberOfSections' : [ 0x2, ['unsigned short']],
'TimeDateStamp' : [ 0x4, ['unsigned long']],
'PointerToSymbolTable' : [ 0x8, ['unsigned long']],
'NumberOfSymbols' : [ 0xc, ['unsigned long']],
'SizeOfOptionalHeader' : [ 0x10, ['unsigned short']],
'Characteristics' : [ 0x12, ['unsigned short']],
} ],
'_MMEXTEND_INFO' : [ 0x10, {
'CommittedSize' : [ 0x0, ['unsigned long long']],
'ReferenceCount' : [ 0x8, ['unsigned long']],
} ],
'_STRING64' : [ 0x10, {
'Length' : [ 0x0, ['unsigned short']],
'MaximumLength' : [ 0x2, ['unsigned short']],
'Buffer' : [ 0x8, ['unsigned long long']],
} ],
'_HIVE_LIST_ENTRY' : [ 0x98, {
'FileName' : [ 0x0, ['pointer64', ['unsigned short']]],
'BaseName' : [ 0x8, ['pointer64', ['unsigned short']]],
'RegRootName' : [ 0x10, ['pointer64', ['unsigned short']]],
'CmHive' : [ 0x18, ['pointer64', ['_CMHIVE']]],
'HHiveFlags' : [ 0x20, ['unsigned long']],
'CmHiveFlags' : [ 0x24, ['unsigned long']],
'CmKcbCacheSize' : [ 0x28, ['unsigned long']],
'CmHive2' : [ 0x30, ['pointer64', ['_CMHIVE']]],
'HiveMounted' : [ 0x38, ['unsigned char']],
'ThreadFinished' : [ 0x39, ['unsigned char']],
'ThreadStarted' : [ 0x3a, ['unsigned char']],
'Allocate' : [ 0x3b, ['unsigned char']],
'WinPERequired' : [ 0x3c, ['unsigned char']],
'StartEvent' : [ 0x40, ['_KEVENT']],
'FinishedEvent' : [ 0x58, ['_KEVENT']],
'MountLock' : [ 0x70, ['_KEVENT']],
'FilePath' : [ 0x88, ['_UNICODE_STRING']],
} ],
'_HMAP_DIRECTORY' : [ 0x2000, {
'Directory' : [ 0x0, ['array', 1024, ['pointer64', ['_HMAP_TABLE']]]],
} ],
'_CONTEXT' : [ 0x4d0, {
'P1Home' : [ 0x0, ['unsigned long long']],
'P2Home' : [ 0x8, ['unsigned long long']],
'P3Home' : [ 0x10, ['unsigned long long']],
'P4Home' : [ 0x18, ['unsigned long long']],
'P5Home' : [ 0x20, ['unsigned long long']],
'P6Home' : [ 0x28, ['unsigned long long']],
'ContextFlags' : [ 0x30, ['unsigned long']],
'MxCsr' : [ 0x34, ['unsigned long']],
'SegCs' : [ 0x38, ['unsigned short']],
'SegDs' : [ 0x3a, ['unsigned short']],
'SegEs' : [ 0x3c, ['unsigned short']],
'SegFs' : [ 0x3e, ['unsigned short']],
'SegGs' : [ 0x40, ['unsigned short']],
'SegSs' : [ 0x42, ['unsigned short']],
'EFlags' : [ 0x44, ['unsigned long']],
'Dr0' : [ 0x48, ['unsigned long long']],
'Dr1' : [ 0x50, ['unsigned long long']],
'Dr2' : [ 0x58, ['unsigned long long']],
'Dr3' : [ 0x60, ['unsigned long long']],
'Dr6' : [ 0x68, ['unsigned long long']],
'Dr7' : [ 0x70, ['unsigned long long']],
'Rax' : [ 0x78, ['unsigned long long']],
'Rcx' : [ 0x80, ['unsigned long long']],
'Rdx' : [ 0x88, ['unsigned long long']],
'Rbx' : [ 0x90, ['unsigned long long']],
'Rsp' : [ 0x98, ['unsigned long long']],
'Rbp' : [ 0xa0, ['unsigned long long']],
'Rsi' : [ 0xa8, ['unsigned long long']],
'Rdi' : [ 0xb0, ['unsigned long long']],
'R8' : [ 0xb8, ['unsigned long long']],
'R9' : [ 0xc0, ['unsigned long long']],
'R10' : [ 0xc8, ['unsigned long long']],
'R11' : [ 0xd0, ['unsigned long long']],
'R12' : [ 0xd8, ['unsigned long long']],
'R13' : [ 0xe0, ['unsigned long long']],
'R14' : [ 0xe8, ['unsigned long long']],
'R15' : [ 0xf0, ['unsigned long long']],
'Rip' : [ 0xf8, ['unsigned long long']],
'FltSave' : [ 0x100, ['_XSAVE_FORMAT']],
'Header' : [ 0x100, ['array', 2, ['_M128A']]],
'Legacy' : [ 0x120, ['array', 8, ['_M128A']]],
'Xmm0' : [ 0x1a0, ['_M128A']],
'Xmm1' : [ 0x1b0, ['_M128A']],
'Xmm2' : [ 0x1c0, ['_M128A']],
'Xmm3' : [ 0x1d0, ['_M128A']],
'Xmm4' : [ 0x1e0, ['_M128A']],
'Xmm5' : [ 0x1f0, ['_M128A']],
'Xmm6' : [ 0x200, ['_M128A']],
'Xmm7' : [ 0x210, ['_M128A']],
'Xmm8' : [ 0x220, ['_M128A']],
'Xmm9' : [ 0x230, ['_M128A']],
'Xmm10' : [ 0x240, ['_M128A']],
'Xmm11' : [ 0x250, ['_M128A']],
'Xmm12' : [ 0x260, ['_M128A']],
'Xmm13' : [ 0x270, ['_M128A']],
'Xmm14' : [ 0x280, ['_M128A']],
'Xmm15' : [ 0x290, ['_M128A']],
'VectorRegister' : [ 0x300, ['array', 26, ['_M128A']]],
'VectorControl' : [ 0x4a0, ['unsigned long long']],
'DebugControl' : [ 0x4a8, ['unsigned long long']],
'LastBranchToRip' : [ 0x4b0, ['unsigned long long']],
'LastBranchFromRip' : [ 0x4b8, ['unsigned long long']],
'LastExceptionToRip' : [ 0x4c0, ['unsigned long long']],
'LastExceptionFromRip' : [ 0x4c8, ['unsigned long long']],
} ],
'_ALPC_HANDLE_TABLE' : [ 0x18, {
'Handles' : [ 0x0, ['pointer64', ['_ALPC_HANDLE_ENTRY']]],
'TotalHandles' : [ 0x8, ['unsigned long']],
'Flags' : [ 0xc, ['unsigned long']],
'Lock' : [ 0x10, ['_EX_PUSH_LOCK']],
} ],
'__unnamed_1fa4' : [ 0x3a4, {
'XpfMceDescriptor' : [ 0x0, ['_WHEA_XPF_MCE_DESCRIPTOR']],
'XpfCmcDescriptor' : [ 0x0, ['_WHEA_XPF_CMC_DESCRIPTOR']],
'XpfNmiDescriptor' : [ 0x0, ['_WHEA_XPF_NMI_DESCRIPTOR']],
'IpfMcaDescriptor' : [ 0x0, ['_WHEA_IPF_MCA_DESCRIPTOR']],
'IpfCmcDescriptor' : [ 0x0, ['_WHEA_IPF_CMC_DESCRIPTOR']],
'IpfCpeDescriptor' : [ 0x0, ['_WHEA_IPF_CPE_DESCRIPTOR']],
'AerRootportDescriptor' : [ 0x0, ['_WHEA_AER_ROOTPORT_DESCRIPTOR']],
'AerEndpointDescriptor' : [ 0x0, ['_WHEA_AER_ENDPOINT_DESCRIPTOR']],
'AerBridgeDescriptor' : [ 0x0, ['_WHEA_AER_BRIDGE_DESCRIPTOR']],
'GenErrDescriptor' : [ 0x0, ['_WHEA_GENERIC_ERROR_DESCRIPTOR']],
} ],
'_WHEA_ERROR_SOURCE_DESCRIPTOR' : [ 0x3cc, {
'Length' : [ 0x0, ['unsigned long']],
'Version' : [ 0x4, ['unsigned long']],
'Type' : [ 0x8, ['Enumeration', dict(target = 'long', choices = {0: 'WheaErrSrcTypeMCE', 1: 'WheaErrSrcTypeCMC', 2: 'WheaErrSrcTypeCPE', 3: 'WheaErrSrcTypeNMI', 4: 'WheaErrSrcTypePCIe', 5: 'WheaErrSrcTypeGeneric', 6: 'WheaErrSrcTypeINIT', 7: 'WheaErrSrcTypeBOOT', 8: 'WheaErrSrcTypeSCIGeneric', 9: 'WheaErrSrcTypeIPFMCA', 10: 'WheaErrSrcTypeIPFCMC', 11: 'WheaErrSrcTypeIPFCPE', 12: 'WheaErrSrcTypeMax'})]],
'State' : [ 0xc, ['Enumeration', dict(target = 'long', choices = {1: 'WheaErrSrcStateStopped', 2: 'WheaErrSrcStateStarted'})]],
'MaxRawDataLength' : [ 0x10, ['unsigned long']],
'NumRecordsToPreallocate' : [ 0x14, ['unsigned long']],
'MaxSectionsPerRecord' : [ 0x18, ['unsigned long']],
'ErrorSourceId' : [ 0x1c, ['unsigned long']],
'PlatformErrorSourceId' : [ 0x20, ['unsigned long']],
'Flags' : [ 0x24, ['unsigned long']],
'Info' : [ 0x28, ['__unnamed_1fa4']],
} ],
'_MMPTE_HARDWARE' : [ 0x8, {
'Valid' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long long')]],
'Dirty1' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long long')]],
'Owner' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long long')]],
'WriteThrough' : [ 0x0, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned long long')]],
'CacheDisable' : [ 0x0, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned long long')]],
'Accessed' : [ 0x0, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned long long')]],
'Dirty' : [ 0x0, ['BitField', dict(start_bit = 6, end_bit = 7, native_type='unsigned long long')]],
'LargePage' : [ 0x0, ['BitField', dict(start_bit = 7, end_bit = 8, native_type='unsigned long long')]],
'Global' : [ 0x0, ['BitField', dict(start_bit = 8, end_bit = 9, native_type='unsigned long long')]],
'CopyOnWrite' : [ 0x0, ['BitField', dict(start_bit = 9, end_bit = 10, native_type='unsigned long long')]],
'Unused' : [ 0x0, ['BitField', dict(start_bit = 10, end_bit = 11, native_type='unsigned long long')]],
'Write' : [ 0x0, ['BitField', dict(start_bit = 11, end_bit = 12, native_type='unsigned long long')]],
'PageFrameNumber' : [ 0x0, ['BitField', dict(start_bit = 12, end_bit = 48, native_type='unsigned long long')]],
'reserved1' : [ 0x0, ['BitField', dict(start_bit = 48, end_bit = 52, native_type='unsigned long long')]],
'SoftwareWsIndex' : [ 0x0, ['BitField', dict(start_bit = 52, end_bit = 63, native_type='unsigned long long')]],
'NoExecute' : [ 0x0, ['BitField', dict(start_bit = 63, end_bit = 64, native_type='unsigned long long')]],
} ],
'_IO_COMPLETION_CONTEXT' : [ 0x10, {
'Port' : [ 0x0, ['pointer64', ['void']]],
'Key' : [ 0x8, ['pointer64', ['void']]],
} ],
'_EX_WORK_QUEUE' : [ 0x2d0, {
'WorkPriQueue' : [ 0x0, ['_KPRIQUEUE']],
'Node' : [ 0x2b0, ['pointer64', ['_ENODE']]],
'WorkItemsProcessed' : [ 0x2b8, ['unsigned long']],
'WorkItemsProcessedLastPass' : [ 0x2bc, ['unsigned long']],
'ThreadCount' : [ 0x2c0, ['long']],
'MinThreads' : [ 0x2c4, ['BitField', dict(start_bit = 0, end_bit = 31, native_type='long')]],
'TryFailed' : [ 0x2c4, ['BitField', dict(start_bit = 31, end_bit = 32, native_type='unsigned long')]],
'MaxThreads' : [ 0x2c8, ['long']],
'QueueIndex' : [ 0x2cc, ['Enumeration', dict(target = 'long', choices = {0: 'ExPoolUntrusted', 1: 'ExPoolTrusted', 8: 'ExPoolMax'})]],
} ],
'_IOV_FORCED_PENDING_TRACE' : [ 0x200, {
'Irp' : [ 0x0, ['pointer64', ['_IRP']]],
'Thread' : [ 0x8, ['pointer64', ['_ETHREAD']]],
'StackTrace' : [ 0x10, ['array', 62, ['pointer64', ['void']]]],
} ],
'_IOP_IRP_EXTENSION_STATUS' : [ 0xc, {
'Flags' : [ 0x0, ['unsigned long']],
'ActivityId' : [ 0x4, ['unsigned long']],
'IoTracking' : [ 0x8, ['unsigned long']],
} ],
'_DBGKD_SET_CONTEXT' : [ 0x4, {
'ContextFlags' : [ 0x0, ['unsigned long']],
} ],
'_VI_POOL_ENTRY_INUSE' : [ 0x20, {
'VirtualAddress' : [ 0x0, ['pointer64', ['void']]],
'CallingAddress' : [ 0x8, ['pointer64', ['void']]],
'NumberOfBytes' : [ 0x10, ['unsigned long long']],
'Tag' : [ 0x18, ['unsigned long long']],
} ],
'_MI_FLAGS' : [ 0x4, {
'EntireFlags' : [ 0x0, ['long']],
'VerifierEnabled' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'KernelVerifierEnabled' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'LargePageKernel' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long')]],
'StopOn4d' : [ 0x0, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned long')]],
'InitializationPhase' : [ 0x0, ['BitField', dict(start_bit = 4, end_bit = 6, native_type='unsigned long')]],
'PageKernelStacks' : [ 0x0, ['BitField', dict(start_bit = 6, end_bit = 7, native_type='unsigned long')]],
'CheckZeroPages' : [ 0x0, ['BitField', dict(start_bit = 7, end_bit = 8, native_type='unsigned long')]],
'ProcessorPrewalks' : [ 0x0, ['BitField', dict(start_bit = 8, end_bit = 9, native_type='unsigned long')]],
'ProcessorPostwalks' : [ 0x0, ['BitField', dict(start_bit = 9, end_bit = 10, native_type='unsigned long')]],
'CoverageBuild' : [ 0x0, ['BitField', dict(start_bit = 10, end_bit = 11, native_type='unsigned long')]],
'AccessBitReplacementDisabled' : [ 0x0, ['BitField', dict(start_bit = 11, end_bit = 12, native_type='unsigned long')]],
'CheckExecute' : [ 0x0, ['BitField', dict(start_bit = 12, end_bit = 13, native_type='unsigned long')]],
'ZeroNonCachedByConverting' : [ 0x0, ['BitField', dict(start_bit = 13, end_bit = 14, native_type='unsigned long')]],
'ZeroWriteCombinedByConverting' : [ 0x0, ['BitField', dict(start_bit = 14, end_bit = 15, native_type='unsigned long')]],
'ProtectedPagesEnabled' : [ 0x0, ['BitField', dict(start_bit = 15, end_bit = 16, native_type='unsigned long')]],
'StrongCodeGuarantees' : [ 0x0, ['BitField', dict(start_bit = 16, end_bit = 17, native_type='unsigned long')]],
'HardCodeGuarantees' : [ 0x0, ['BitField', dict(start_bit = 17, end_bit = 18, native_type='unsigned long')]],
'ExecutePagePrivilegeRequired' : [ 0x0, ['BitField', dict(start_bit = 18, end_bit = 19, native_type='unsigned long')]],
'StrongPageIdentity' : [ 0x0, ['BitField', dict(start_bit = 19, end_bit = 20, native_type='unsigned long')]],
'SecureRelocations' : [ 0x0, ['BitField', dict(start_bit = 20, end_bit = 21, native_type='unsigned long')]],
} ],
'_INTERFACE' : [ 0x20, {
'Size' : [ 0x0, ['unsigned short']],
'Version' : [ 0x2, ['unsigned short']],
'Context' : [ 0x8, ['pointer64', ['void']]],
'InterfaceReference' : [ 0x10, ['pointer64', ['void']]],
'InterfaceDereference' : [ 0x18, ['pointer64', ['void']]],
} ],
'_ACL' : [ 0x8, {
'AclRevision' : [ 0x0, ['unsigned char']],
'Sbz1' : [ 0x1, ['unsigned char']],
'AclSize' : [ 0x2, ['unsigned short']],
'AceCount' : [ 0x4, ['unsigned short']],
'Sbz2' : [ 0x6, ['unsigned short']],
} ],
'_PS_PROPERTY_SET' : [ 0x18, {
'ListHead' : [ 0x0, ['_LIST_ENTRY']],
'Lock' : [ 0x10, ['unsigned long long']],
} ],
'_LAZY_WRITER' : [ 0x88, {
'ScanDpc' : [ 0x0, ['_KDPC']],
'ScanTimer' : [ 0x40, ['_KTIMER']],
'ScanActive' : [ 0x80, ['unsigned char']],
'OtherWork' : [ 0x81, ['unsigned char']],
'PendingTeardownScan' : [ 0x82, ['unsigned char']],
'PendingPeriodicScan' : [ 0x83, ['unsigned char']],
'PendingLowMemoryScan' : [ 0x84, ['unsigned char']],
'PendingPowerScan' : [ 0x85, ['unsigned char']],
'PendingCoalescingFlushScan' : [ 0x86, ['unsigned char']],
} ],
'_PI_BUS_EXTENSION' : [ 0x70, {
'Flags' : [ 0x0, ['unsigned long']],
'NumberCSNs' : [ 0x4, ['unsigned char']],
'ReadDataPort' : [ 0x8, ['pointer64', ['unsigned char']]],
'DataPortMapped' : [ 0x10, ['unsigned char']],
'AddressPort' : [ 0x18, ['pointer64', ['unsigned char']]],
'AddrPortMapped' : [ 0x20, ['unsigned char']],
'CommandPort' : [ 0x28, ['pointer64', ['unsigned char']]],
'CmdPortMapped' : [ 0x30, ['unsigned char']],
'NextSlotNumber' : [ 0x34, ['unsigned long']],
'DeviceList' : [ 0x38, ['_SINGLE_LIST_ENTRY']],
'CardList' : [ 0x40, ['_SINGLE_LIST_ENTRY']],
'PhysicalBusDevice' : [ 0x48, ['pointer64', ['_DEVICE_OBJECT']]],
'FunctionalBusDevice' : [ 0x50, ['pointer64', ['_DEVICE_OBJECT']]],
'AttachedDevice' : [ 0x58, ['pointer64', ['_DEVICE_OBJECT']]],
'BusNumber' : [ 0x60, ['unsigned long']],
'SystemPowerState' : [ 0x64, ['Enumeration', dict(target = 'long', choices = {0: 'PowerSystemUnspecified', 1: 'PowerSystemWorking', 2: 'PowerSystemSleeping1', 3: 'PowerSystemSleeping2', 4: 'PowerSystemSleeping3', 5: 'PowerSystemHibernate', 6: 'PowerSystemShutdown', 7: 'PowerSystemMaximum'})]],
'DevicePowerState' : [ 0x68, ['Enumeration', dict(target = 'long', choices = {0: 'PowerDeviceUnspecified', 1: 'PowerDeviceD0', 2: 'PowerDeviceD1', 3: 'PowerDeviceD2', 4: 'PowerDeviceD3', 5: 'PowerDeviceMaximum'})]],
} ],
'_DEVICE_DESCRIPTION' : [ 0x40, {
'Version' : [ 0x0, ['unsigned long']],
'Master' : [ 0x4, ['unsigned char']],
'ScatterGather' : [ 0x5, ['unsigned char']],
'DemandMode' : [ 0x6, ['unsigned char']],
'AutoInitialize' : [ 0x7, ['unsigned char']],
'Dma32BitAddresses' : [ 0x8, ['unsigned char']],
'IgnoreCount' : [ 0x9, ['unsigned char']],
'Reserved1' : [ 0xa, ['unsigned char']],
'Dma64BitAddresses' : [ 0xb, ['unsigned char']],
'BusNumber' : [ 0xc, ['unsigned long']],
'DmaChannel' : [ 0x10, ['unsigned long']],
'InterfaceType' : [ 0x14, ['Enumeration', dict(target = 'long', choices = {0: 'Internal', 1: 'Isa', 2: 'Eisa', 3: 'MicroChannel', 4: 'TurboChannel', 5: 'PCIBus', 6: 'VMEBus', 7: 'NuBus', 8: 'PCMCIABus', 9: 'CBus', 10: 'MPIBus', 11: 'MPSABus', 12: 'ProcessorInternal', 13: 'InternalPowerBus', 14: 'PNPISABus', 15: 'PNPBus', 16: 'Vmcs', 17: 'ACPIBus', 18: 'MaximumInterfaceType', -1: 'InterfaceTypeUndefined'})]],
'DmaWidth' : [ 0x18, ['Enumeration', dict(target = 'long', choices = {0: 'Width8Bits', 1: 'Width16Bits', 2: 'Width32Bits', 3: 'Width64Bits', 4: 'WidthNoWrap', 5: 'MaximumDmaWidth'})]],
'DmaSpeed' : [ 0x1c, ['Enumeration', dict(target = 'long', choices = {0: 'Compatible', 1: 'TypeA', 2: 'TypeB', 3: 'TypeC', 4: 'TypeF', 5: 'MaximumDmaSpeed'})]],
'MaximumLength' : [ 0x20, ['unsigned long']],
'DmaPort' : [ 0x24, ['unsigned long']],
'DmaAddressWidth' : [ 0x28, ['unsigned long']],
'DmaControllerInstance' : [ 0x2c, ['unsigned long']],
'DmaRequestLine' : [ 0x30, ['unsigned long']],
'DeviceAddress' : [ 0x38, ['_LARGE_INTEGER']],
} ],
'_PS_TRUSTLET_ATTRIBUTE_ACCESSRIGHTS' : [ 0x1, {
'Trustlet' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned char')]],
'Ntos' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned char')]],
'WriteHandle' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned char')]],
'ReadHandle' : [ 0x0, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned char')]],
'Reserved' : [ 0x0, ['BitField', dict(start_bit = 4, end_bit = 8, native_type='unsigned char')]],
'AccessRights' : [ 0x0, ['unsigned char']],
} ],
'_SID_AND_ATTRIBUTES' : [ 0x10, {
'Sid' : [ 0x0, ['pointer64', ['void']]],
'Attributes' : [ 0x8, ['unsigned long']],
} ],
'_SID_IDENTIFIER_AUTHORITY' : [ 0x6, {
'Value' : [ 0x0, ['array', 6, ['unsigned char']]],
} ],
'_PROCESS_DISK_COUNTERS' : [ 0x28, {
'BytesRead' : [ 0x0, ['unsigned long long']],
'BytesWritten' : [ 0x8, ['unsigned long long']],
'ReadOperationCount' : [ 0x10, ['unsigned long long']],
'WriteOperationCount' : [ 0x18, ['unsigned long long']],
'FlushOperationCount' : [ 0x20, ['unsigned long long']],
} ],
'_IO_WORKITEM' : [ 0x58, {
'WorkItem' : [ 0x0, ['_WORK_QUEUE_ITEM']],
'Routine' : [ 0x20, ['pointer64', ['void']]],
'IoObject' : [ 0x28, ['pointer64', ['void']]],
'Context' : [ 0x30, ['pointer64', ['void']]],
'WorkingOnBehalfClient' : [ 0x38, ['pointer64', ['void']]],
'Type' : [ 0x40, ['unsigned long']],
'ActivityId' : [ 0x44, ['_GUID']],
} ],
'_MMVAD_FLAGS' : [ 0x4, {
'VadType' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 3, native_type='unsigned long')]],
'Protection' : [ 0x0, ['BitField', dict(start_bit = 3, end_bit = 8, native_type='unsigned long')]],
'PreferredNode' : [ 0x0, ['BitField', dict(start_bit = 8, end_bit = 14, native_type='unsigned long')]],
'NoChange' : [ 0x0, ['BitField', dict(start_bit = 14, end_bit = 15, native_type='unsigned long')]],
'PrivateMemory' : [ 0x0, ['BitField', dict(start_bit = 15, end_bit = 16, native_type='unsigned long')]],
'PrivateFixup' : [ 0x0, ['BitField', dict(start_bit = 16, end_bit = 17, native_type='unsigned long')]],
'ManySubsections' : [ 0x0, ['BitField', dict(start_bit = 17, end_bit = 18, native_type='unsigned long')]],
'Enclave' : [ 0x0, ['BitField', dict(start_bit = 18, end_bit = 19, native_type='unsigned long')]],
'DeleteInProgress' : [ 0x0, ['BitField', dict(start_bit = 19, end_bit = 20, native_type='unsigned long')]],
'Spare' : [ 0x0, ['BitField', dict(start_bit = 20, end_bit = 32, native_type='unsigned long')]],
} ],
'_MMWSLE_HASH' : [ 0x8, {
'Index' : [ 0x0, ['unsigned long long']],
} ],
'_JOBOBJECT_WAKE_FILTER' : [ 0x8, {
'HighEdgeFilter' : [ 0x0, ['unsigned long']],
'LowEdgeFilter' : [ 0x4, ['unsigned long']],
} ],
'_UNEXPECTED_INTERRUPT' : [ 0x8, {
'PushImm' : [ 0x0, ['unsigned char']],
'Vector' : [ 0x1, ['unsigned char']],
'PushRbp' : [ 0x2, ['unsigned char']],
'JmpOp' : [ 0x3, ['unsigned char']],
'JmpOffset' : [ 0x4, ['long']],
} ],
'_STRING32' : [ 0x8, {
'Length' : [ 0x0, ['unsigned short']],
'MaximumLength' : [ 0x2, ['unsigned short']],
'Buffer' : [ 0x4, ['unsigned long']],
} ],
'_POP_FX_WORK_ORDER_WATCHDOG_INFO' : [ 0x88, {
'Timer' : [ 0x0, ['_KTIMER']],
'Dpc' : [ 0x40, ['_KDPC']],
'WorkOrder' : [ 0x80, ['pointer64', ['_POP_FX_WORK_ORDER']]],
} ],
'_MI_VAD_EVENT_BLOCK' : [ 0x40, {
'Next' : [ 0x0, ['pointer64', ['_MI_VAD_EVENT_BLOCK']]],
'WaitReason' : [ 0x8, ['unsigned long']],
'Gate' : [ 0x10, ['_KGATE']],
'SecureInfo' : [ 0x10, ['_MMADDRESS_LIST']],
'BitMap' : [ 0x10, ['_RTL_BITMAP_EX']],
'InPageSupport' : [ 0x10, ['pointer64', ['_MMINPAGE_SUPPORT']]],
'LargePage' : [ 0x10, ['pointer64', ['_MI_LARGEPAGE_MEMORY_INFO']]],
'CreatingThread' : [ 0x10, ['pointer64', ['_ETHREAD']]],
'PebTeb' : [ 0x10, ['_MI_SUB64K_FREE_RANGES']],
} ],
'_DBGKD_FILL_MEMORY' : [ 0x10, {
'Address' : [ 0x0, ['unsigned long long']],
'Length' : [ 0x8, ['unsigned long']],
'Flags' : [ 0xc, ['unsigned short']],
'PatternLength' : [ 0xe, ['unsigned short']],
} ],
'_HEAP_STOP_ON_VALUES' : [ 0x30, {
'AllocAddress' : [ 0x0, ['unsigned long long']],
'AllocTag' : [ 0x8, ['_HEAP_STOP_ON_TAG']],
'ReAllocAddress' : [ 0x10, ['unsigned long long']],
'ReAllocTag' : [ 0x18, ['_HEAP_STOP_ON_TAG']],
'FreeAddress' : [ 0x20, ['unsigned long long']],
'FreeTag' : [ 0x28, ['_HEAP_STOP_ON_TAG']],
} ],
'_HEAP_PSEUDO_TAG_ENTRY' : [ 0x10, {
'Allocs' : [ 0x0, ['unsigned long']],
'Frees' : [ 0x4, ['unsigned long']],
'Size' : [ 0x8, ['unsigned long long']],
} ],
'_CALL_HASH_ENTRY' : [ 0x28, {
'ListEntry' : [ 0x0, ['_LIST_ENTRY']],
'CallersAddress' : [ 0x10, ['pointer64', ['void']]],
'CallersCaller' : [ 0x18, ['pointer64', ['void']]],
'CallCount' : [ 0x20, ['unsigned long']],
} ],
'_SEP_LUID_TO_INDEX_MAP_ENTRY' : [ 0x38, {
'HashEntry' : [ 0x0, ['_RTL_DYNAMIC_HASH_TABLE_ENTRY']],
'ReferenceCount' : [ 0x18, ['long long']],
'Luid' : [ 0x20, ['unsigned long long']],
'IndexIntoGlobalSingletonTable' : [ 0x28, ['unsigned long long']],
'MarkedForDeletion' : [ 0x30, ['unsigned char']],
} ],
'_KTIMER2_COLLECTION' : [ 0x18, {
'Tree' : [ 0x0, ['_RTL_RB_TREE']],
'NextDueTime' : [ 0x10, ['unsigned long long']],
} ],
'_MIPFNBLINK' : [ 0x8, {
'Blink' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 36, native_type='unsigned long long')]],
'NodeBlinkHigh' : [ 0x0, ['BitField', dict(start_bit = 36, end_bit = 56, native_type='unsigned long long')]],
'TbFlushStamp' : [ 0x0, ['BitField', dict(start_bit = 56, end_bit = 60, native_type='unsigned long long')]],
'Unused' : [ 0x0, ['BitField', dict(start_bit = 60, end_bit = 62, native_type='unsigned long long')]],
'PageBlinkDeleteBit' : [ 0x0, ['BitField', dict(start_bit = 62, end_bit = 63, native_type='unsigned long long')]],
'PageBlinkLockBit' : [ 0x0, ['BitField', dict(start_bit = 63, end_bit = 64, native_type='unsigned long long')]],
'ShareCount' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 62, native_type='unsigned long long')]],
'PageShareCountDeleteBit' : [ 0x0, ['BitField', dict(start_bit = 62, end_bit = 63, native_type='unsigned long long')]],
'PageShareCountLockBit' : [ 0x0, ['BitField', dict(start_bit = 63, end_bit = 64, native_type='unsigned long long')]],
'EntireField' : [ 0x0, ['unsigned long long']],
'Lock' : [ 0x0, ['long long']],
'LockNotUsed' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 62, native_type='unsigned long long')]],
'DeleteBit' : [ 0x0, ['BitField', dict(start_bit = 62, end_bit = 63, native_type='unsigned long long')]],
'LockBit' : [ 0x0, ['BitField', dict(start_bit = 63, end_bit = 64, native_type='unsigned long long')]],
} ],
'_VF_TRACKER_STAMP' : [ 0x10, {
'Thread' : [ 0x0, ['pointer64', ['void']]],
'Flags' : [ 0x8, ['BitField', dict(start_bit = 0, end_bit = 8, native_type='unsigned char')]],
'OldIrql' : [ 0x9, ['BitField', dict(start_bit = 0, end_bit = 8, native_type='unsigned char')]],
'NewIrql' : [ 0xa, ['BitField', dict(start_bit = 0, end_bit = 8, native_type='unsigned char')]],
'Processor' : [ 0xb, ['BitField', dict(start_bit = 0, end_bit = 8, native_type='unsigned char')]],
} ],
'_VI_TRACK_IRQL' : [ 0x38, {
'Thread' : [ 0x0, ['pointer64', ['void']]],
'OldIrql' : [ 0x8, ['unsigned char']],
'NewIrql' : [ 0x9, ['unsigned char']],
'Processor' : [ 0xa, ['unsigned short']],
'TickCount' : [ 0xc, ['unsigned long']],
'StackTrace' : [ 0x10, ['array', 5, ['pointer64', ['void']]]],
} ],
'_MMCLONE_HEADER' : [ 0x18, {
'NumberOfPtes' : [ 0x0, ['unsigned long long']],
'NumberOfProcessReferences' : [ 0x8, ['unsigned long long']],
'ClonePtes' : [ 0x10, ['pointer64', ['_MMCLONE_BLOCK']]],
} ],
'_SESSION_LOWBOX_MAP' : [ 0x40, {
'ListEntry' : [ 0x0, ['_LIST_ENTRY']],
'SessionId' : [ 0x10, ['unsigned long']],
'LowboxMap' : [ 0x18, ['_SEP_LOWBOX_NUMBER_MAPPING']],
} ],
'_PROCESSOR_PROFILE_CONTROL_AREA' : [ 0x60, {
'PebsDsSaveArea' : [ 0x0, ['_PEBS_DS_SAVE_AREA']],
} ],
'_PEB_LDR_DATA' : [ 0x58, {
'Length' : [ 0x0, ['unsigned long']],
'Initialized' : [ 0x4, ['unsigned char']],
'SsHandle' : [ 0x8, ['pointer64', ['void']]],
'InLoadOrderModuleList' : [ 0x10, ['_LIST_ENTRY']],
'InMemoryOrderModuleList' : [ 0x20, ['_LIST_ENTRY']],
'InInitializationOrderModuleList' : [ 0x30, ['_LIST_ENTRY']],
'EntryInProgress' : [ 0x40, ['pointer64', ['void']]],
'ShutdownInProgress' : [ 0x48, ['unsigned char']],
'ShutdownThreadId' : [ 0x50, ['pointer64', ['void']]],
} ],
'_PNP_DEVICE_EVENT_ENTRY' : [ 0xb8, {
'ListEntry' : [ 0x0, ['_LIST_ENTRY']],
'Argument' : [ 0x10, ['unsigned long']],
'ArgumentStatus' : [ 0x14, ['long']],
'CallerEvent' : [ 0x18, ['pointer64', ['_KEVENT']]],
'Callback' : [ 0x20, ['pointer64', ['void']]],
'Context' : [ 0x28, ['pointer64', ['void']]],
'VetoType' : [ 0x30, ['pointer64', ['Enumeration', dict(target = 'long', choices = {0: 'PNP_VetoTypeUnknown', 1: 'PNP_VetoLegacyDevice', 2: 'PNP_VetoPendingClose', 3: 'PNP_VetoWindowsApp', 4: 'PNP_VetoWindowsService', 5: 'PNP_VetoOutstandingOpen', 6: 'PNP_VetoDevice', 7: 'PNP_VetoDriver', 8: 'PNP_VetoIllegalDeviceRequest', 9: 'PNP_VetoInsufficientPower', 10: 'PNP_VetoNonDisableable', 11: 'PNP_VetoLegacyDriver', 12: 'PNP_VetoInsufficientRights'})]]],
'VetoName' : [ 0x38, ['pointer64', ['_UNICODE_STRING']]],
'RefCount' : [ 0x40, ['unsigned long']],
'Lock' : [ 0x44, ['unsigned long']],
'Cancel' : [ 0x48, ['unsigned char']],
'Parent' : [ 0x50, ['pointer64', ['_PNP_DEVICE_EVENT_ENTRY']]],
'ActivityId' : [ 0x58, ['_GUID']],
'Data' : [ 0x68, ['_PLUGPLAY_EVENT_BLOCK']],
} ],
'_HEAP_STOP_ON_TAG' : [ 0x4, {
'HeapAndTagIndex' : [ 0x0, ['unsigned long']],
'TagIndex' : [ 0x0, ['unsigned short']],
'HeapIndex' : [ 0x2, ['unsigned short']],
} ],
'_PS_WAKE_INFORMATION' : [ 0x38, {
'NotificationChannel' : [ 0x0, ['unsigned long long']],
'WakeCounters' : [ 0x8, ['array', 5, ['unsigned long long']]],
'NoWakeCounter' : [ 0x30, ['unsigned long long']],
} ],
'_RH_OP_CONTEXT' : [ 0x48, {
'Links' : [ 0x0, ['_LIST_ENTRY']],
'OplockRequestIrp' : [ 0x10, ['pointer64', ['_IRP']]],
'OplockRequestFileObject' : [ 0x18, ['pointer64', ['_FILE_OBJECT']]],
'OplockRequestProcess' : [ 0x20, ['pointer64', ['_EPROCESS']]],
'OplockOwnerThread' : [ 0x28, ['pointer64', ['_ETHREAD']]],
'Flags' : [ 0x30, ['unsigned long']],
'AtomicLinks' : [ 0x38, ['_LIST_ENTRY']],
} ],
'_DBGKD_GET_CONTEXT' : [ 0x4, {
'Unused' : [ 0x0, ['unsigned long']],
} ],
'_TEB_ACTIVE_FRAME_CONTEXT' : [ 0x10, {
'Flags' : [ 0x0, ['unsigned long']],
'FrameName' : [ 0x8, ['pointer64', ['unsigned char']]],
} ],
'_KWAIT_CHAIN' : [ 0x8, {
'Head' : [ 0x0, ['_SINGLE_LIST_ENTRY']],
} ],
'_ISRDPCSTATS' : [ 0x40, {
'IsrTime' : [ 0x0, ['unsigned long long']],
'IsrTimeStart' : [ 0x8, ['unsigned long long']],
'IsrCount' : [ 0x10, ['unsigned long long']],
'DpcTime' : [ 0x18, ['unsigned long long']],
'DpcTimeStart' : [ 0x20, ['unsigned long long']],
'DpcCount' : [ 0x28, ['unsigned long long']],
'IsrActive' : [ 0x30, ['unsigned char']],
'Reserved' : [ 0x31, ['array', 15, ['unsigned char']]],
} ],
'_RTL_BITMAP_EX' : [ 0x10, {
'SizeOfBitMap' : [ 0x0, ['unsigned long long']],
'Buffer' : [ 0x8, ['pointer64', ['unsigned long long']]],
} ],
'_MI_PARTITION_PAGE_LISTS' : [ 0xd40, {
'FreePagesByColor' : [ 0x0, ['array', 2, ['pointer64', ['_MMPFNLIST']]]],
'FreePageSlist' : [ 0x10, ['array', 2, ['pointer64', ['_SLIST_HEADER']]]],
'ZeroedPageListHead' : [ 0x40, ['_MMPFNLIST']],
'FreePageListHead' : [ 0x80, ['_MMPFNLIST']],
'StandbyPageListHead' : [ 0xc0, ['_MMPFNLIST']],
'StandbyPageListByPriority' : [ 0x100, ['array', 8, ['_MMPFNLIST']]],
'ModifiedPageListNoReservation' : [ 0x240, ['_MMPFNLIST']],
'ModifiedPageListByReservation' : [ 0x280, ['array', 16, ['_MMPFNLIST']]],
'MappedPageListHead' : [ 0x500, ['array', 16, ['_MMPFNLIST']]],
'BadPageListHead' : [ 0x780, ['_MMPFNLIST']],
'EnclavePageListHead' : [ 0x7c0, ['_MMPFNLIST']],
'PageLocationList' : [ 0x7e8, ['array', 8, ['pointer64', ['_MMPFNLIST']]]],
'StandbyRepurposedByPriority' : [ 0x828, ['array', 8, ['unsigned long']]],
'MappedPageListHeadEvent' : [ 0x848, ['array', 16, ['_KEVENT']]],
'DecayClusterTimerHeads' : [ 0x9c8, ['array', 4, ['_MI_DECAY_TIMER_LINK']]],
'DecayHand' : [ 0x9e8, ['unsigned long']],
'LastDecayHandUpdateTime' : [ 0x9f0, ['unsigned long long']],
'LastChanceLdwContext' : [ 0x9f8, ['_MI_LDW_WORK_CONTEXT']],
'AvailableEventsLock' : [ 0xa40, ['unsigned long long']],
'AvailablePageWaitStates' : [ 0xa48, ['array', 3, ['_MI_AVAILABLE_PAGE_WAIT_STATES']]],
'LowMemoryThreshold' : [ 0xaa8, ['unsigned long long']],
'HighMemoryThreshold' : [ 0xab0, ['unsigned long long']],
'TransitionPrivatePages' : [ 0xac0, ['unsigned long long']],
'StandbyListDiscard' : [ 0xac8, ['unsigned long']],
'FreeListDiscard' : [ 0xacc, ['unsigned char']],
'RebuildLargePagesInitialized' : [ 0xacd, ['unsigned char']],
'RebuildLargePagesItem' : [ 0xad0, ['_MI_REBUILD_LARGE_PAGES']],
'AddMemoryNotifyList' : [ 0xcf8, ['_LIST_ENTRY']],
'MirrorListLocks' : [ 0xd08, ['pointer64', ['void']]],
} ],
'_XSTATE_CONFIGURATION' : [ 0x330, {
'EnabledFeatures' : [ 0x0, ['unsigned long long']],
'EnabledVolatileFeatures' : [ 0x8, ['unsigned long long']],
'Size' : [ 0x10, ['unsigned long']],
'OptimizedSave' : [ 0x14, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'CompactionEnabled' : [ 0x14, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'Features' : [ 0x18, ['array', 64, ['_XSTATE_FEATURE']]],
'EnabledSupervisorFeatures' : [ 0x218, ['unsigned long long']],
'AlignedFeatures' : [ 0x220, ['unsigned long long']],
'AllFeatureSize' : [ 0x228, ['unsigned long']],
'AllFeatures' : [ 0x22c, ['array', 64, ['unsigned long']]],
} ],
'_CM_KEY_CONTROL_BLOCK' : [ 0x128, {
'RefCount' : [ 0x0, ['unsigned long']],
'ExtFlags' : [ 0x4, ['BitField', dict(start_bit = 0, end_bit = 16, native_type='unsigned long')]],
'PrivateAlloc' : [ 0x4, ['BitField', dict(start_bit = 16, end_bit = 17, native_type='unsigned long')]],
'Delete' : [ 0x4, ['BitField', dict(start_bit = 17, end_bit = 18, native_type='unsigned long')]],
'HiveUnloaded' : [ 0x4, ['BitField', dict(start_bit = 18, end_bit = 19, native_type='unsigned long')]],
'Decommissioned' : [ 0x4, ['BitField', dict(start_bit = 19, end_bit = 20, native_type='unsigned long')]],
'LockTablePresent' : [ 0x4, ['BitField', dict(start_bit = 20, end_bit = 21, native_type='unsigned long')]],
'TotalLevels' : [ 0x4, ['BitField', dict(start_bit = 21, end_bit = 31, native_type='unsigned long')]],
'DelayedDeref' : [ 0x8, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'DelayedClose' : [ 0x8, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'Parking' : [ 0x8, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long')]],
'KeyHash' : [ 0x10, ['_CM_KEY_HASH']],
'ConvKey' : [ 0x10, ['unsigned long']],
'NextHash' : [ 0x18, ['pointer64', ['_CM_KEY_HASH']]],
'KeyHive' : [ 0x20, ['pointer64', ['_HHIVE']]],
'KeyCell' : [ 0x28, ['unsigned long']],
'KcbPushlock' : [ 0x30, ['_EX_PUSH_LOCK']],
'Owner' : [ 0x38, ['pointer64', ['_KTHREAD']]],
'SharedCount' : [ 0x38, ['long']],
'SlotHint' : [ 0x40, ['unsigned long']],
'ParentKcb' : [ 0x48, ['pointer64', ['_CM_KEY_CONTROL_BLOCK']]],
'NameBlock' : [ 0x50, ['pointer64', ['_CM_NAME_CONTROL_BLOCK']]],
'CachedSecurity' : [ 0x58, ['pointer64', ['_CM_KEY_SECURITY_CACHE']]],
'ValueCache' : [ 0x60, ['_CACHED_CHILD_LIST']],
'IndexHint' : [ 0x70, ['pointer64', ['_CM_INDEX_HINT_BLOCK']]],
'HashKey' : [ 0x70, ['unsigned long']],
'SubKeyCount' : [ 0x70, ['unsigned long']],
'KeyBodyListHead' : [ 0x78, ['_LIST_ENTRY']],
'FreeListEntry' : [ 0x78, ['_LIST_ENTRY']],
'KeyBodyArray' : [ 0x88, ['array', 4, ['pointer64', ['_CM_KEY_BODY']]]],
'KcbLastWriteTime' : [ 0xa8, ['_LARGE_INTEGER']],
'KcbMaxNameLen' : [ 0xb0, ['unsigned short']],
'KcbMaxValueNameLen' : [ 0xb2, ['unsigned short']],
'KcbMaxValueDataLen' : [ 0xb4, ['unsigned long']],
'KcbUserFlags' : [ 0xb8, ['BitField', dict(start_bit = 0, end_bit = 4, native_type='unsigned long')]],
'KcbVirtControlFlags' : [ 0xb8, ['BitField', dict(start_bit = 4, end_bit = 8, native_type='unsigned long')]],
'KcbDebug' : [ 0xb8, ['BitField', dict(start_bit = 8, end_bit = 16, native_type='unsigned long')]],
'Flags' : [ 0xb8, ['BitField', dict(start_bit = 16, end_bit = 32, native_type='unsigned long')]],
'RealKeyName' : [ 0xc0, ['pointer64', ['unsigned char']]],
'KCBUoWListHead' : [ 0xc8, ['_LIST_ENTRY']],
'DelayQueueEntry' : [ 0xd8, ['_LIST_ENTRY']],
'Stolen' : [ 0xd8, ['pointer64', ['unsigned char']]],
'TransKCBOwner' : [ 0xe8, ['pointer64', ['_CM_TRANS']]],
'KCBLock' : [ 0xf0, ['_CM_INTENT_LOCK']],
'KeyLock' : [ 0x100, ['_CM_INTENT_LOCK']],
'TransValueCache' : [ 0x110, ['_CHILD_LIST']],
'TransValueListOwner' : [ 0x118, ['pointer64', ['_CM_TRANS']]],
'FullKCBName' : [ 0x120, ['pointer64', ['_UNICODE_STRING']]],
} ],
'_KLOCK_ENTRY' : [ 0x60, {
'TreeNode' : [ 0x0, ['_RTL_BALANCED_NODE']],
'FreeListEntry' : [ 0x0, ['_SINGLE_LIST_ENTRY']],
'EntryFlags' : [ 0x18, ['unsigned long']],
'EntryOffset' : [ 0x18, ['unsigned char']],
'ThreadLocalFlags' : [ 0x19, ['unsigned char']],
'WaitingBit' : [ 0x19, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned char')]],
'Spare0' : [ 0x19, ['BitField', dict(start_bit = 1, end_bit = 8, native_type='unsigned char')]],
'AcquiredByte' : [ 0x1a, ['unsigned char']],
'AcquiredBit' : [ 0x1a, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned char')]],
'CrossThreadFlags' : [ 0x1b, ['unsigned char']],
'HeadNodeBit' : [ 0x1b, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned char')]],
'IoPriorityBit' : [ 0x1b, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned char')]],
'Spare1' : [ 0x1b, ['BitField', dict(start_bit = 2, end_bit = 8, native_type='unsigned char')]],
'StaticState' : [ 0x18, ['BitField', dict(start_bit = 0, end_bit = 8, native_type='unsigned long')]],
'AllFlags' : [ 0x18, ['BitField', dict(start_bit = 8, end_bit = 32, native_type='unsigned long')]],
'SpareFlags' : [ 0x1c, ['unsigned long']],
'LockState' : [ 0x20, ['_KLOCK_ENTRY_LOCK_STATE']],
'LockUnsafe' : [ 0x20, ['pointer64', ['void']]],
'CrossThreadReleasableAndBusyByte' : [ 0x20, ['unsigned char']],
'Reserved' : [ 0x21, ['array', 6, ['unsigned char']]],
'InTreeByte' : [ 0x27, ['unsigned char']],
'SessionState' : [ 0x28, ['pointer64', ['void']]],
'SessionId' : [ 0x28, ['unsigned long']],
'SessionPad' : [ 0x2c, ['unsigned long']],
'OwnerTree' : [ 0x30, ['_RTL_RB_TREE']],
'WaiterTree' : [ 0x40, ['_RTL_RB_TREE']],
'CpuPriorityKey' : [ 0x30, ['unsigned char']],
'EntryLock' : [ 0x50, ['unsigned long long']],
'AllBoosts' : [ 0x58, ['unsigned short']],
'IoBoost' : [ 0x58, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned short')]],
'CpuBoostsBitmap' : [ 0x58, ['BitField', dict(start_bit = 1, end_bit = 16, native_type='unsigned short')]],
'IoNormalPriorityWaiterCount' : [ 0x5a, ['unsigned short']],
'SparePad' : [ 0x5c, ['unsigned short']],
} ],
'_OBP_SYSTEM_DOS_DEVICE_STATE' : [ 0x6c, {
'GlobalDeviceMap' : [ 0x0, ['unsigned long']],
'LocalDeviceCount' : [ 0x4, ['array', 26, ['unsigned long']]],
} ],
'_MMPTE_SOFTWARE' : [ 0x8, {
'Valid' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long long')]],
'PageFileLow' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 5, native_type='unsigned long long')]],
'Protection' : [ 0x0, ['BitField', dict(start_bit = 5, end_bit = 10, native_type='unsigned long long')]],
'Prototype' : [ 0x0, ['BitField', dict(start_bit = 10, end_bit = 11, native_type='unsigned long long')]],
'Transition' : [ 0x0, ['BitField', dict(start_bit = 11, end_bit = 12, native_type='unsigned long long')]],
'PageFileReserved' : [ 0x0, ['BitField', dict(start_bit = 12, end_bit = 13, native_type='unsigned long long')]],
'PageFileAllocated' : [ 0x0, ['BitField', dict(start_bit = 13, end_bit = 14, native_type='unsigned long long')]],
'UsedPageTableEntries' : [ 0x0, ['BitField', dict(start_bit = 14, end_bit = 24, native_type='unsigned long long')]],
'LocalPartition' : [ 0x0, ['BitField', dict(start_bit = 24, end_bit = 25, native_type='unsigned long long')]],
'Unused' : [ 0x0, ['BitField', dict(start_bit = 25, end_bit = 32, native_type='unsigned long long')]],
'PageFileHigh' : [ 0x0, ['BitField', dict(start_bit = 32, end_bit = 64, native_type='unsigned long long')]],
} ],
'__unnamed_2080' : [ 0x10, {
'IoStatus' : [ 0x0, ['_IO_STATUS_BLOCK']],
} ],
'_MMMOD_WRITER_MDL_ENTRY' : [ 0x108, {
'Links' : [ 0x0, ['_LIST_ENTRY']],
'u' : [ 0x10, ['__unnamed_2080']],
'Irp' : [ 0x20, ['pointer64', ['_IRP']]],
'u1' : [ 0x28, ['_MODWRITER_FLAGS']],
'StoreWriteRefCount' : [ 0x2c, ['unsigned long']],
'StoreWriteCompletionApc' : [ 0x30, ['_KAPC']],
'ByteCount' : [ 0x88, ['unsigned long']],
'ChargedPages' : [ 0x8c, ['unsigned long']],
'PagingFile' : [ 0x90, ['pointer64', ['_MMPAGING_FILE']]],
'File' : [ 0x98, ['pointer64', ['_FILE_OBJECT']]],
'ControlArea' : [ 0xa0, ['pointer64', ['_CONTROL_AREA']]],
'FileResource' : [ 0xa8, ['pointer64', ['_ERESOURCE']]],
'WriteOffset' : [ 0xb0, ['_LARGE_INTEGER']],
'IssueTime' : [ 0xb8, ['_LARGE_INTEGER']],
'Partition' : [ 0xc0, ['pointer64', ['_MI_PARTITION']]],
'PointerMdl' : [ 0xc8, ['pointer64', ['_MDL']]],
'Mdl' : [ 0xd0, ['_MDL']],
'Page' : [ 0x100, ['array', 1, ['unsigned long long']]],
} ],
'_MI_PARTITION_COMMIT' : [ 0x80, {
'PeakCommitment' : [ 0x0, ['unsigned long long']],
'TotalCommitLimitMaximum' : [ 0x8, ['unsigned long long']],
'Popups' : [ 0x10, ['array', 2, ['long']]],
'LowCommitThreshold' : [ 0x18, ['unsigned long long']],
'HighCommitThreshold' : [ 0x20, ['unsigned long long']],
'EventLock' : [ 0x28, ['unsigned long long']],
'SystemCommitReserve' : [ 0x30, ['unsigned long long']],
'OverCommit' : [ 0x40, ['unsigned long long']],
} ],
'_NT_TIB32' : [ 0x1c, {
'ExceptionList' : [ 0x0, ['unsigned long']],
'StackBase' : [ 0x4, ['unsigned long']],
'StackLimit' : [ 0x8, ['unsigned long']],
'SubSystemTib' : [ 0xc, ['unsigned long']],
'FiberData' : [ 0x10, ['unsigned long']],
'Version' : [ 0x10, ['unsigned long']],
'ArbitraryUserPointer' : [ 0x14, ['unsigned long']],
'Self' : [ 0x18, ['unsigned long']],
} ],
'_CM_RESOURCE_LIST' : [ 0x28, {
'Count' : [ 0x0, ['unsigned long']],
'List' : [ 0x4, ['array', 1, ['_CM_FULL_RESOURCE_DESCRIPTOR']]],
} ],
'_TOKEN_PRIVILEGES' : [ 0x10, {
'PrivilegeCount' : [ 0x0, ['unsigned long']],
'Privileges' : [ 0x4, ['array', 1, ['_LUID_AND_ATTRIBUTES']]],
} ],
'_POOL_TRACKER_TABLE' : [ 0x28, {
'Key' : [ 0x0, ['long']],
'NonPagedAllocs' : [ 0x4, ['unsigned long']],
'NonPagedFrees' : [ 0x8, ['unsigned long']],
'NonPagedBytes' : [ 0x10, ['unsigned long long']],
'PagedAllocs' : [ 0x18, ['unsigned long']],
'PagedFrees' : [ 0x1c, ['unsigned long']],
'PagedBytes' : [ 0x20, ['unsigned long long']],
} ],
'_CM_FULL_RESOURCE_DESCRIPTOR' : [ 0x24, {
'InterfaceType' : [ 0x0, ['Enumeration', dict(target = 'long', choices = {0: 'Internal', 1: 'Isa', 2: 'Eisa', 3: 'MicroChannel', 4: 'TurboChannel', 5: 'PCIBus', 6: 'VMEBus', 7: 'NuBus', 8: 'PCMCIABus', 9: 'CBus', 10: 'MPIBus', 11: 'MPSABus', 12: 'ProcessorInternal', 13: 'InternalPowerBus', 14: 'PNPISABus', 15: 'PNPBus', 16: 'Vmcs', 17: 'ACPIBus', 18: 'MaximumInterfaceType', -1: 'InterfaceTypeUndefined'})]],
'BusNumber' : [ 0x4, ['unsigned long']],
'PartialResourceList' : [ 0x8, ['_CM_PARTIAL_RESOURCE_LIST']],
} ],
'_WHEA_ERROR_RECORD_SECTION_DESCRIPTOR_FLAGS' : [ 0x4, {
'Primary' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'ContainmentWarning' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'Reset' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long')]],
'ThresholdExceeded' : [ 0x0, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned long')]],
'ResourceNotAvailable' : [ 0x0, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned long')]],
'LatentError' : [ 0x0, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned long')]],
'Reserved' : [ 0x0, ['BitField', dict(start_bit = 6, end_bit = 32, native_type='unsigned long')]],
'AsULONG' : [ 0x0, ['unsigned long']],
} ],
'_WMI_BUFFER_HEADER' : [ 0x48, {
'BufferSize' : [ 0x0, ['unsigned long']],
'SavedOffset' : [ 0x4, ['unsigned long']],
'CurrentOffset' : [ 0x8, ['unsigned long']],
'ReferenceCount' : [ 0xc, ['long']],
'TimeStamp' : [ 0x10, ['_LARGE_INTEGER']],
'SequenceNumber' : [ 0x18, ['long long']],
'ClockType' : [ 0x20, ['BitField', dict(start_bit = 0, end_bit = 3, native_type='unsigned long long')]],
'Frequency' : [ 0x20, ['BitField', dict(start_bit = 3, end_bit = 64, native_type='unsigned long long')]],
'SlistEntry' : [ 0x20, ['_SINGLE_LIST_ENTRY']],
'NextBuffer' : [ 0x20, ['pointer64', ['_WMI_BUFFER_HEADER']]],
'ClientContext' : [ 0x28, ['_ETW_BUFFER_CONTEXT']],
'State' : [ 0x2c, ['Enumeration', dict(target = 'long', choices = {0: 'EtwBufferStateFree', 1: 'EtwBufferStateGeneralLogging', 2: 'EtwBufferStateCSwitch', 3: 'EtwBufferStateFlush', 4: 'EtwBufferStateMaximum'})]],
'Offset' : [ 0x30, ['unsigned long']],
'BufferFlag' : [ 0x34, ['unsigned short']],
'BufferType' : [ 0x36, ['unsigned short']],
'Padding1' : [ 0x38, ['array', 4, ['unsigned long']]],
'ReferenceTime' : [ 0x38, ['_ETW_REF_CLOCK']],
'GlobalEntry' : [ 0x38, ['_LIST_ENTRY']],
'Pointer0' : [ 0x38, ['pointer64', ['void']]],
'Pointer1' : [ 0x40, ['pointer64', ['void']]],
} ],
'_NT_TIB64' : [ 0x38, {
'ExceptionList' : [ 0x0, ['unsigned long long']],
'StackBase' : [ 0x8, ['unsigned long long']],
'StackLimit' : [ 0x10, ['unsigned long long']],
'SubSystemTib' : [ 0x18, ['unsigned long long']],
'FiberData' : [ 0x20, ['unsigned long long']],
'Version' : [ 0x20, ['unsigned long']],
'ArbitraryUserPointer' : [ 0x28, ['unsigned long long']],
'Self' : [ 0x30, ['unsigned long long']],
} ],
'_POWER_SEQUENCE' : [ 0xc, {
'SequenceD1' : [ 0x0, ['unsigned long']],
'SequenceD2' : [ 0x4, ['unsigned long']],
'SequenceD3' : [ 0x8, ['unsigned long']],
} ],
'_EPROCESS_VALUES' : [ 0x50, {
'KernelTime' : [ 0x0, ['unsigned long long']],
'UserTime' : [ 0x8, ['unsigned long long']],
'CycleTime' : [ 0x10, ['unsigned long long']],
'ContextSwitches' : [ 0x18, ['unsigned long long']],
'ReadOperationCount' : [ 0x20, ['long long']],
'WriteOperationCount' : [ 0x28, ['long long']],
'OtherOperationCount' : [ 0x30, ['long long']],
'ReadTransferCount' : [ 0x38, ['long long']],
'WriteTransferCount' : [ 0x40, ['long long']],
'OtherTransferCount' : [ 0x48, ['long long']],
} ],
'_PROCESSOR_POWER_STATE' : [ 0x1d0, {
'IdleStates' : [ 0x0, ['pointer64', ['_PPM_IDLE_STATES']]],
'IdleAccounting' : [ 0x8, ['pointer64', ['_PROC_IDLE_ACCOUNTING']]],
'IdleTimeLast' : [ 0x10, ['unsigned long long']],
'IdleTimeTotal' : [ 0x18, ['unsigned long long']],
'IdleTimeEntry' : [ 0x20, ['unsigned long long']],
'IdleTimeExpiration' : [ 0x28, ['unsigned long long']],
'NonInterruptibleTransition' : [ 0x30, ['unsigned char']],
'PepWokenTransition' : [ 0x31, ['unsigned char']],
'Class' : [ 0x32, ['unsigned char']],
'TargetIdleState' : [ 0x34, ['unsigned long']],
'IdlePolicy' : [ 0x38, ['_PROC_IDLE_POLICY']],
'Synchronization' : [ 0x40, ['_PPM_IDLE_SYNCHRONIZATION_STATE']],
'PerfFeedback' : [ 0x48, ['_PROC_FEEDBACK']],
'Hypervisor' : [ 0xd8, ['Enumeration', dict(target = 'long', choices = {0: 'ProcHypervisorNone', 1: 'ProcHypervisorPresent', 2: 'ProcHypervisorPower', 3: 'ProcHypervisorHvCounters'})]],
'LastSysTime' : [ 0xdc, ['unsigned long']],
'WmiDispatchPtr' : [ 0xe0, ['unsigned long long']],
'WmiInterfaceEnabled' : [ 0xe8, ['long']],
'FFHThrottleStateInfo' : [ 0xf0, ['_PPM_FFH_THROTTLE_STATE_INFO']],
'PerfActionDpc' : [ 0x110, ['_KDPC']],
'PerfActionMask' : [ 0x150, ['long']],
'HvIdleCheck' : [ 0x158, ['_PROC_IDLE_SNAP']],
'PerfCheck' : [ 0x168, ['pointer64', ['_PROC_PERF_CHECK']]],
'Domain' : [ 0x170, ['pointer64', ['_PROC_PERF_DOMAIN']]],
'PerfConstraint' : [ 0x178, ['pointer64', ['_PROC_PERF_CONSTRAINT']]],
'Concurrency' : [ 0x180, ['pointer64', ['_PPM_CONCURRENCY_ACCOUNTING']]],
'Load' : [ 0x188, ['pointer64', ['_PROC_PERF_LOAD']]],
'PerfHistory' : [ 0x190, ['pointer64', ['_PROC_PERF_HISTORY']]],
'GuaranteedPerformancePercent' : [ 0x198, ['unsigned char']],
'HvTargetState' : [ 0x199, ['unsigned char']],
'Parked' : [ 0x19a, ['unsigned char']],
'LatestPerformancePercent' : [ 0x19c, ['unsigned long']],
'AveragePerformancePercent' : [ 0x1a0, ['unsigned long']],
'LatestAffinitizedPercent' : [ 0x1a4, ['unsigned long']],
'RelativePerformance' : [ 0x1a8, ['unsigned long']],
'Utility' : [ 0x1ac, ['unsigned long']],
'AffinitizedUtility' : [ 0x1b0, ['unsigned long']],
'SnapTimeLast' : [ 0x1b8, ['unsigned long long']],
'EnergyConsumed' : [ 0x1b8, ['unsigned long long']],
'ActiveTime' : [ 0x1c0, ['unsigned long long']],
'TotalTime' : [ 0x1c8, ['unsigned long long']],
} ],
'_OBJECT_REF_STACK_INFO' : [ 0xc, {
'Sequence' : [ 0x0, ['unsigned long']],
'Index' : [ 0x4, ['unsigned short']],
'NumTraces' : [ 0x6, ['unsigned short']],
'Tag' : [ 0x8, ['unsigned long']],
} ],
'_PPC_DBGKD_CONTROL_SET' : [ 0xc, {
'Continue' : [ 0x0, ['unsigned long']],
'CurrentSymbolStart' : [ 0x4, ['unsigned long']],
'CurrentSymbolEnd' : [ 0x8, ['unsigned long']],
} ],
'_MMPFNENTRY' : [ 0x2, {
'PageLocation' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 3, native_type='unsigned char')]],
'WriteInProgress' : [ 0x0, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned char')]],
'Modified' : [ 0x0, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned char')]],
'ReadInProgress' : [ 0x0, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned char')]],
'CacheAttribute' : [ 0x0, ['BitField', dict(start_bit = 6, end_bit = 8, native_type='unsigned char')]],
'Priority' : [ 0x1, ['BitField', dict(start_bit = 0, end_bit = 3, native_type='unsigned char')]],
'OnProtectedStandby' : [ 0x1, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned char')]],
'InPageError' : [ 0x1, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned char')]],
'SystemChargedPage' : [ 0x1, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned char')]],
'RemovalRequested' : [ 0x1, ['BitField', dict(start_bit = 6, end_bit = 7, native_type='unsigned char')]],
'ParityError' : [ 0x1, ['BitField', dict(start_bit = 7, end_bit = 8, native_type='unsigned char')]],
} ],
'_THREAD_ENERGY_VALUES' : [ 0x40, {
'Cycles' : [ 0x0, ['array', 4, ['array', 2, ['unsigned long long']]]],
} ],
'_PCW_CALLBACK_INFORMATION' : [ 0x28, {
'AddCounter' : [ 0x0, ['_PCW_COUNTER_INFORMATION']],
'RemoveCounter' : [ 0x0, ['_PCW_COUNTER_INFORMATION']],
'EnumerateInstances' : [ 0x0, ['_PCW_MASK_INFORMATION']],
'CollectData' : [ 0x0, ['_PCW_MASK_INFORMATION']],
} ],
'_CC_ASYNC_READ_CONTEXT' : [ 0x20, {
'CompletionRoutine' : [ 0x0, ['pointer64', ['void']]],
'Context' : [ 0x8, ['pointer64', ['void']]],
'Mdl' : [ 0x10, ['pointer64', ['_MDL']]],
'RequestorMode' : [ 0x18, ['unsigned char']],
'NestingLevel' : [ 0x1c, ['unsigned long']],
} ],
'_TOKEN_SOURCE' : [ 0x10, {
'SourceName' : [ 0x0, ['array', 8, ['unsigned char']]],
'SourceIdentifier' : [ 0x8, ['_LUID']],
} ],
'_CMHIVE' : [ 0x17a8, {
'Hive' : [ 0x0, ['_HHIVE']],
'FileHandles' : [ 0xa68, ['array', 6, ['pointer64', ['void']]]],
'NotifyList' : [ 0xa98, ['_LIST_ENTRY']],
'HiveList' : [ 0xaa8, ['_LIST_ENTRY']],
'PreloadedHiveList' : [ 0xab8, ['_LIST_ENTRY']],
'FailedUnloadList' : [ 0xac8, ['_LIST_ENTRY']],
'HiveRundown' : [ 0xad8, ['_EX_RUNDOWN_REF']],
'ParseCacheEntries' : [ 0xae0, ['_LIST_ENTRY']],
'KcbCacheTable' : [ 0xaf0, ['pointer64', ['_CM_KEY_HASH_TABLE_ENTRY']]],
'KcbCacheTableSize' : [ 0xaf8, ['unsigned long']],
'DeletedKcbTable' : [ 0xb00, ['pointer64', ['_CM_KEY_HASH_TABLE_ENTRY']]],
'DeletedKcbTableSize' : [ 0xb08, ['unsigned long']],
'Identity' : [ 0xb0c, ['unsigned long']],
'HiveLock' : [ 0xb10, ['pointer64', ['_FAST_MUTEX']]],
'WriterLock' : [ 0xb18, ['pointer64', ['_FAST_MUTEX']]],
'FlusherLock' : [ 0xb20, ['pointer64', ['_ERESOURCE']]],
'FlushDirtyVector' : [ 0xb28, ['_RTL_BITMAP']],
'FlushDirtyVectorSize' : [ 0xb38, ['unsigned long']],
'FlushLogEntry' : [ 0xb40, ['pointer64', ['unsigned char']]],
'FlushLogEntrySize' : [ 0xb48, ['unsigned long']],
'FlushHiveTruncated' : [ 0xb4c, ['unsigned long']],
'FlushBaseBlockDirty' : [ 0xb50, ['unsigned char']],
'CapturedUnreconciledVector' : [ 0xb58, ['_RTL_BITMAP']],
'CapturedUnreconciledVectorSize' : [ 0xb68, ['unsigned long']],
'UnreconciledOffsetArray' : [ 0xb70, ['pointer64', ['CMP_OFFSET_ARRAY']]],
'UnreconciledOffsetArrayCount' : [ 0xb78, ['unsigned long']],
'UnreconciledBaseBlock' : [ 0xb80, ['pointer64', ['_HBASE_BLOCK']]],
'SecurityLock' : [ 0xb88, ['_EX_PUSH_LOCK']],
'UseCount' : [ 0xb90, ['unsigned long']],
'LastShrinkHiveSize' : [ 0xb94, ['unsigned long']],
'ActualFileSize' : [ 0xb98, ['_LARGE_INTEGER']],
'LogFileSizes' : [ 0xba0, ['array', 2, ['_LARGE_INTEGER']]],
'FileFullPath' : [ 0xbb0, ['_UNICODE_STRING']],
'FileUserName' : [ 0xbc0, ['_UNICODE_STRING']],
'HiveRootPath' : [ 0xbd0, ['_UNICODE_STRING']],
'SecurityCount' : [ 0xbe0, ['unsigned long']],
'SecurityCacheSize' : [ 0xbe4, ['unsigned long']],
'SecurityHitHint' : [ 0xbe8, ['long']],
'SecurityCache' : [ 0xbf0, ['pointer64', ['_CM_KEY_SECURITY_CACHE_ENTRY']]],
'SecurityHash' : [ 0xbf8, ['array', 64, ['_LIST_ENTRY']]],
'UnloadEventCount' : [ 0xff8, ['unsigned long']],
'UnloadEventArray' : [ 0x1000, ['pointer64', ['pointer64', ['_KEVENT']]]],
'RootKcb' : [ 0x1008, ['pointer64', ['_CM_KEY_CONTROL_BLOCK']]],
'Frozen' : [ 0x1010, ['unsigned char']],
'UnloadWorkItem' : [ 0x1018, ['pointer64', ['_CM_WORKITEM']]],
'UnloadWorkItemHolder' : [ 0x1020, ['_CM_WORKITEM']],
'GrowOnlyMode' : [ 0x1048, ['unsigned char']],
'GrowOffset' : [ 0x104c, ['unsigned long']],
'KcbConvertListHead' : [ 0x1050, ['_LIST_ENTRY']],
'CellRemapArray' : [ 0x1060, ['pointer64', ['_CM_CELL_REMAP_BLOCK']]],
'DirtyVectorLog' : [ 0x1068, ['_CM_DIRTY_VECTOR_LOG']],
'Flags' : [ 0x14f0, ['unsigned long']],
'TrustClassEntry' : [ 0x14f8, ['_LIST_ENTRY']],
'DirtyTime' : [ 0x1508, ['unsigned long long']],
'UnreconciledTime' : [ 0x1510, ['unsigned long long']],
'CmRm' : [ 0x1518, ['pointer64', ['_CM_RM']]],
'CmRmInitFailPoint' : [ 0x1520, ['unsigned long']],
'CmRmInitFailStatus' : [ 0x1524, ['long']],
'CreatorOwner' : [ 0x1528, ['pointer64', ['_KTHREAD']]],
'RundownThread' : [ 0x1530, ['pointer64', ['_KTHREAD']]],
'LastWriteTime' : [ 0x1538, ['_LARGE_INTEGER']],
'FlushQueue' : [ 0x1540, ['_HIVE_WRITE_WAIT_QUEUE']],
'ReconcileQueue' : [ 0x1558, ['_HIVE_WRITE_WAIT_QUEUE']],
'FlushFlags' : [ 0x1570, ['unsigned long']],
'FlushActive' : [ 0x1570, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'ReconcileActive' : [ 0x1570, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'PrimaryFilePurged' : [ 0x1570, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long')]],
'DiskFileBad' : [ 0x1570, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned long')]],
'PrimaryFileSizeBeforeLastFlush' : [ 0x1574, ['unsigned long']],
'ReferenceCount' : [ 0x1578, ['long']],
'UnloadHistoryIndex' : [ 0x157c, ['long']],
'UnloadHistory' : [ 0x1580, ['array', 128, ['unsigned long']]],
'BootStart' : [ 0x1780, ['unsigned long']],
'UnaccessedStart' : [ 0x1784, ['unsigned long']],
'UnaccessedEnd' : [ 0x1788, ['unsigned long']],
'LoadedKeyCount' : [ 0x178c, ['unsigned long']],
'HandleClosePending' : [ 0x1790, ['unsigned long']],
'HandleClosePendingEvent' : [ 0x1798, ['_EX_PUSH_LOCK']],
'FinalFlushSucceeded' : [ 0x17a0, ['unsigned char']],
'FailedUnload' : [ 0x17a1, ['unsigned char']],
} ],
'_DBGKD_QUERY_MEMORY' : [ 0x18, {
'Address' : [ 0x0, ['unsigned long long']],
'Reserved' : [ 0x8, ['unsigned long long']],
'AddressSpace' : [ 0x10, ['unsigned long']],
'Flags' : [ 0x14, ['unsigned long']],
} ],
'_DIRTY_PAGE_THRESHOLDS' : [ 0x38, {
'DirtyPageThreshold' : [ 0x0, ['unsigned long long']],
'DirtyPageThresholdTop' : [ 0x8, ['unsigned long long']],
'DirtyPageThresholdBottom' : [ 0x10, ['unsigned long long']],
'DirtyPageTarget' : [ 0x18, ['unsigned long']],
'AggregateAvailablePages' : [ 0x20, ['unsigned long long']],
'AggregateDirtyPages' : [ 0x28, ['unsigned long long']],
'AvailableHistory' : [ 0x30, ['unsigned long']],
} ],
'DOCK_INTERFACE' : [ 0x30, {
'Size' : [ 0x0, ['unsigned short']],
'Version' : [ 0x2, ['unsigned short']],
'Context' : [ 0x8, ['pointer64', ['void']]],
'InterfaceReference' : [ 0x10, ['pointer64', ['void']]],
'InterfaceDereference' : [ 0x18, ['pointer64', ['void']]],
'ProfileDepartureSetMode' : [ 0x20, ['pointer64', ['void']]],
'ProfileDepartureUpdate' : [ 0x28, ['pointer64', ['void']]],
} ],
'CMP_OFFSET_ARRAY' : [ 0x18, {
'FileOffset' : [ 0x0, ['unsigned long']],
'DataBuffer' : [ 0x8, ['pointer64', ['void']]],
'DataLength' : [ 0x10, ['unsigned long']],
} ],
'_MMSUPPORT_FLAGS' : [ 0x4, {
'WorkingSetType' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 3, native_type='unsigned char')]],
'Reserved0' : [ 0x0, ['BitField', dict(start_bit = 3, end_bit = 6, native_type='unsigned char')]],
'MaximumWorkingSetHard' : [ 0x0, ['BitField', dict(start_bit = 6, end_bit = 7, native_type='unsigned char')]],
'MinimumWorkingSetHard' : [ 0x0, ['BitField', dict(start_bit = 7, end_bit = 8, native_type='unsigned char')]],
'SessionMaster' : [ 0x1, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned char')]],
'TrimmerState' : [ 0x1, ['BitField', dict(start_bit = 1, end_bit = 3, native_type='unsigned char')]],
'Reserved' : [ 0x1, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned char')]],
'PageStealers' : [ 0x1, ['BitField', dict(start_bit = 4, end_bit = 8, native_type='unsigned char')]],
'MemoryPriority' : [ 0x2, ['unsigned char']],
'WsleDeleted' : [ 0x3, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned char')]],
'VmExiting' : [ 0x3, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned char')]],
'ExpansionFailed' : [ 0x3, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned char')]],
'SvmEnabled' : [ 0x3, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned char')]],
'ForceAge' : [ 0x3, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned char')]],
'NewMaximum' : [ 0x3, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned char')]],
'CommitReleaseState' : [ 0x3, ['BitField', dict(start_bit = 6, end_bit = 8, native_type='unsigned char')]],
} ],
'_PPM_VETO_ENTRY' : [ 0x40, {
'Link' : [ 0x0, ['_LIST_ENTRY']],
'VetoReason' : [ 0x10, ['unsigned long']],
'ReferenceCount' : [ 0x14, ['unsigned long']],
'HitCount' : [ 0x18, ['unsigned long long']],
'LastActivationTime' : [ 0x20, ['unsigned long long']],
'TotalActiveTime' : [ 0x28, ['unsigned long long']],
'CsActivationTime' : [ 0x30, ['unsigned long long']],
'CsActiveTime' : [ 0x38, ['unsigned long long']],
} ],
'_IMAGE_OPTIONAL_HEADER64' : [ 0xf0, {
'Magic' : [ 0x0, ['unsigned short']],
'MajorLinkerVersion' : [ 0x2, ['unsigned char']],
'MinorLinkerVersion' : [ 0x3, ['unsigned char']],
'SizeOfCode' : [ 0x4, ['unsigned long']],
'SizeOfInitializedData' : [ 0x8, ['unsigned long']],
'SizeOfUninitializedData' : [ 0xc, ['unsigned long']],
'AddressOfEntryPoint' : [ 0x10, ['unsigned long']],
'BaseOfCode' : [ 0x14, ['unsigned long']],
'ImageBase' : [ 0x18, ['unsigned long long']],
'SectionAlignment' : [ 0x20, ['unsigned long']],
'FileAlignment' : [ 0x24, ['unsigned long']],
'MajorOperatingSystemVersion' : [ 0x28, ['unsigned short']],
'MinorOperatingSystemVersion' : [ 0x2a, ['unsigned short']],
'MajorImageVersion' : [ 0x2c, ['unsigned short']],
'MinorImageVersion' : [ 0x2e, ['unsigned short']],
'MajorSubsystemVersion' : [ 0x30, ['unsigned short']],
'MinorSubsystemVersion' : [ 0x32, ['unsigned short']],
'Win32VersionValue' : [ 0x34, ['unsigned long']],
'SizeOfImage' : [ 0x38, ['unsigned long']],
'SizeOfHeaders' : [ 0x3c, ['unsigned long']],
'CheckSum' : [ 0x40, ['unsigned long']],
'Subsystem' : [ 0x44, ['unsigned short']],
'DllCharacteristics' : [ 0x46, ['unsigned short']],
'SizeOfStackReserve' : [ 0x48, ['unsigned long long']],
'SizeOfStackCommit' : [ 0x50, ['unsigned long long']],
'SizeOfHeapReserve' : [ 0x58, ['unsigned long long']],
'SizeOfHeapCommit' : [ 0x60, ['unsigned long long']],
'LoaderFlags' : [ 0x68, ['unsigned long']],
'NumberOfRvaAndSizes' : [ 0x6c, ['unsigned long']],
'DataDirectory' : [ 0x70, ['array', 16, ['_IMAGE_DATA_DIRECTORY']]],
} ],
'_ALPC_COMPLETION_PACKET_LOOKASIDE' : [ 0x50, {
'Lock' : [ 0x0, ['unsigned long long']],
'Size' : [ 0x8, ['unsigned long']],
'ActiveCount' : [ 0xc, ['unsigned long']],
'PendingNullCount' : [ 0x10, ['unsigned long']],
'PendingCheckCompletionListCount' : [ 0x14, ['unsigned long']],
'PendingDelete' : [ 0x18, ['unsigned long']],
'FreeListHead' : [ 0x20, ['_SINGLE_LIST_ENTRY']],
'CompletionPort' : [ 0x28, ['pointer64', ['void']]],
'CompletionKey' : [ 0x30, ['pointer64', ['void']]],
'Entry' : [ 0x38, ['array', 1, ['_ALPC_COMPLETION_PACKET_LOOKASIDE_ENTRY']]],
} ],
'_TERMINATION_PORT' : [ 0x10, {
'Next' : [ 0x0, ['pointer64', ['_TERMINATION_PORT']]],
'Port' : [ 0x8, ['pointer64', ['void']]],
} ],
'_MEMORY_ALLOCATION_DESCRIPTOR' : [ 0x28, {
'ListEntry' : [ 0x0, ['_LIST_ENTRY']],
'MemoryType' : [ 0x10, ['Enumeration', dict(target = 'long', choices = {0: 'LoaderExceptionBlock', 1: 'LoaderSystemBlock', 2: 'LoaderFree', 3: 'LoaderBad', 4: 'LoaderLoadedProgram', 5: 'LoaderFirmwareTemporary', 6: 'LoaderFirmwarePermanent', 7: 'LoaderOsloaderHeap', 8: 'LoaderOsloaderStack', 9: 'LoaderSystemCode', 10: 'LoaderHalCode', 11: 'LoaderBootDriver', 12: 'LoaderConsoleInDriver', 13: 'LoaderConsoleOutDriver', 14: 'LoaderStartupDpcStack', 15: 'LoaderStartupKernelStack', 16: 'LoaderStartupPanicStack', 17: 'LoaderStartupPcrPage', 18: 'LoaderStartupPdrPage', 19: 'LoaderRegistryData', 20: 'LoaderMemoryData', 21: 'LoaderNlsData', 22: 'LoaderSpecialMemory', 23: 'LoaderBBTMemory', 24: 'LoaderZero', 25: 'LoaderXIPRom', 26: 'LoaderHALCachedMemory', 27: 'LoaderLargePageFiller', 28: 'LoaderErrorLogMemory', 29: 'LoaderVsmMemory', 30: 'LoaderFirmwareCode', 31: 'LoaderFirmwareData', 32: 'LoaderFirmwareReserved', 33: 'LoaderEnclaveMemory', 34: 'LoaderMaximum'})]],
'BasePage' : [ 0x18, ['unsigned long long']],
'PageCount' : [ 0x20, ['unsigned long long']],
} ],
'_CM_INTENT_LOCK' : [ 0x10, {
'OwnerCount' : [ 0x0, ['unsigned long']],
'OwnerTable' : [ 0x8, ['pointer64', ['pointer64', ['_CM_KCB_UOW']]]],
} ],
'_PROC_IDLE_ACCOUNTING' : [ 0x408, {
'StateCount' : [ 0x0, ['unsigned long']],
'TotalTransitions' : [ 0x4, ['unsigned long']],
'ResetCount' : [ 0x8, ['unsigned long']],
'AbortCount' : [ 0xc, ['unsigned long']],
'StartTime' : [ 0x10, ['unsigned long long']],
'PriorIdleTime' : [ 0x18, ['unsigned long long']],
'TimeUnit' : [ 0x20, ['Enumeration', dict(target = 'long', choices = {0: 'PpmIdleBucketTimeInQpc', 1: 'PpmIdleBucketTimeIn100ns', 2: 'PpmIdleBucketTimeMaximum'})]],
'State' : [ 0x28, ['array', 1, ['_PROC_IDLE_STATE_ACCOUNTING']]],
} ],
'_THERMAL_INFORMATION' : [ 0x58, {
'ThermalStamp' : [ 0x0, ['unsigned long']],
'ThermalConstant1' : [ 0x4, ['unsigned long']],
'ThermalConstant2' : [ 0x8, ['unsigned long']],
'Processors' : [ 0x10, ['unsigned long long']],
'SamplingPeriod' : [ 0x18, ['unsigned long']],
'CurrentTemperature' : [ 0x1c, ['unsigned long']],
'PassiveTripPoint' : [ 0x20, ['unsigned long']],
'CriticalTripPoint' : [ 0x24, ['unsigned long']],
'ActiveTripPointCount' : [ 0x28, ['unsigned char']],
'ActiveTripPoint' : [ 0x2c, ['array', 10, ['unsigned long']]],
} ],
'_SEP_LOWBOX_NUMBER_MAPPING' : [ 0x28, {
'Lock' : [ 0x0, ['_EX_PUSH_LOCK']],
'Bitmap' : [ 0x8, ['_RTL_BITMAP']],
'HashTable' : [ 0x18, ['pointer64', ['_RTL_DYNAMIC_HASH_TABLE']]],
'Active' : [ 0x20, ['unsigned char']],
} ],
'_MAPPED_FILE_SEGMENT' : [ 0x30, {
'ControlArea' : [ 0x0, ['pointer64', ['_CONTROL_AREA']]],
'TotalNumberOfPtes' : [ 0x8, ['unsigned long']],
'SegmentFlags' : [ 0xc, ['_SEGMENT_FLAGS']],
'NumberOfCommittedPages' : [ 0x10, ['unsigned long long']],
'SizeOfSegment' : [ 0x18, ['unsigned long long']],
'ExtendInfo' : [ 0x20, ['pointer64', ['_MMEXTEND_INFO']]],
'BasedAddress' : [ 0x20, ['pointer64', ['void']]],
'SegmentLock' : [ 0x28, ['_EX_PUSH_LOCK']],
} ],
'_GDI_TEB_BATCH' : [ 0x4e8, {
'Offset' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 31, native_type='unsigned long')]],
'HasRenderingCommand' : [ 0x0, ['BitField', dict(start_bit = 31, end_bit = 32, native_type='unsigned long')]],
'HDC' : [ 0x8, ['unsigned long long']],
'Buffer' : [ 0x10, ['array', 310, ['unsigned long']]],
} ],
'_MM_DRIVER_VERIFIER_DATA' : [ 0xa8, {
'Level' : [ 0x0, ['unsigned long']],
'RaiseIrqls' : [ 0x4, ['unsigned long']],
'AcquireSpinLocks' : [ 0x8, ['unsigned long']],
'SynchronizeExecutions' : [ 0xc, ['unsigned long']],
'AllocationsAttempted' : [ 0x10, ['unsigned long']],
'AllocationsSucceeded' : [ 0x14, ['unsigned long']],
'AllocationsSucceededSpecialPool' : [ 0x18, ['unsigned long']],
'AllocationsWithNoTag' : [ 0x1c, ['unsigned long']],
'TrimRequests' : [ 0x20, ['unsigned long']],
'Trims' : [ 0x24, ['unsigned long']],
'AllocationsFailed' : [ 0x28, ['unsigned long']],
'AllocationsFailedDeliberately' : [ 0x2c, ['unsigned long']],
'Loads' : [ 0x30, ['unsigned long']],
'Unloads' : [ 0x34, ['unsigned long']],
'UnTrackedPool' : [ 0x38, ['unsigned long']],
'UserTrims' : [ 0x3c, ['unsigned long']],
'CurrentPagedPoolAllocations' : [ 0x40, ['unsigned long']],
'CurrentNonPagedPoolAllocations' : [ 0x44, ['unsigned long']],
'PeakPagedPoolAllocations' : [ 0x48, ['unsigned long']],
'PeakNonPagedPoolAllocations' : [ 0x4c, ['unsigned long']],
'PagedBytes' : [ 0x50, ['unsigned long long']],
'NonPagedBytes' : [ 0x58, ['unsigned long long']],
'PeakPagedBytes' : [ 0x60, ['unsigned long long']],
'PeakNonPagedBytes' : [ 0x68, ['unsigned long long']],
'BurstAllocationsFailedDeliberately' : [ 0x70, ['unsigned long']],
'SessionTrims' : [ 0x74, ['unsigned long']],
'OptionChanges' : [ 0x78, ['unsigned long']],
'VerifyMode' : [ 0x7c, ['unsigned long']],
'PreviousBucketName' : [ 0x80, ['_UNICODE_STRING']],
'ExecutePoolTypes' : [ 0x90, ['unsigned long']],
'ExecutePageProtections' : [ 0x94, ['unsigned long']],
'ExecutePageMappings' : [ 0x98, ['unsigned long']],
'ExecuteWriteSections' : [ 0x9c, ['unsigned long']],
'SectionAlignmentFailures' : [ 0xa0, ['unsigned long']],
} ],
'_INVERTED_FUNCTION_TABLE' : [ 0x1810, {
'CurrentSize' : [ 0x0, ['unsigned long']],
'MaximumSize' : [ 0x4, ['unsigned long']],
'Epoch' : [ 0x8, ['unsigned long']],
'Overflow' : [ 0xc, ['unsigned char']],
'TableEntry' : [ 0x10, ['array', 256, ['_INVERTED_FUNCTION_TABLE_ENTRY']]],
} ],
'_VF_DRIVER_IO_CALLBACKS' : [ 0x100, {
'DriverInit' : [ 0x0, ['pointer64', ['void']]],
'DriverStartIo' : [ 0x8, ['pointer64', ['void']]],
'DriverUnload' : [ 0x10, ['pointer64', ['void']]],
'AddDevice' : [ 0x18, ['pointer64', ['void']]],
'MajorFunction' : [ 0x20, ['array', 28, ['pointer64', ['void']]]],
} ],
'_HIVE_WRITE_WAIT_QUEUE' : [ 0x18, {
'ActiveThread' : [ 0x0, ['pointer64', ['_ETHREAD']]],
'WaitList' : [ 0x8, ['pointer64', ['_HIVE_WAIT_PACKET']]],
'OwnerBoosted' : [ 0x10, ['unsigned long']],
} ],
'_VI_FAULT_TRACE' : [ 0x48, {
'Thread' : [ 0x0, ['pointer64', ['_ETHREAD']]],
'StackTrace' : [ 0x8, ['array', 8, ['pointer64', ['void']]]],
} ],
'_TRIAGE_PNP_DEVICE_COMPLETION_QUEUE' : [ 0x10, {
'DispatchedList' : [ 0x0, ['_LIST_ENTRY']],
} ],
'_GENERIC_MAPPING' : [ 0x10, {
'GenericRead' : [ 0x0, ['unsigned long']],
'GenericWrite' : [ 0x4, ['unsigned long']],
'GenericExecute' : [ 0x8, ['unsigned long']],
'GenericAll' : [ 0xc, ['unsigned long']],
} ],
'_OBJECT_HANDLE_COUNT_DATABASE' : [ 0x18, {
'CountEntries' : [ 0x0, ['unsigned long']],
'HandleCountEntries' : [ 0x8, ['array', 1, ['_OBJECT_HANDLE_COUNT_ENTRY']]],
} ],
'_UMS_CONTROL_BLOCK' : [ 0x90, {
'UmsContext' : [ 0x0, ['pointer64', ['_RTL_UMS_CONTEXT']]],
'CompletionListEntry' : [ 0x8, ['pointer64', ['_SINGLE_LIST_ENTRY']]],
'CompletionListEvent' : [ 0x10, ['pointer64', ['_KEVENT']]],
'ServiceSequenceNumber' : [ 0x18, ['unsigned long']],
'UmsQueue' : [ 0x20, ['_KQUEUE']],
'QueueEntry' : [ 0x60, ['_LIST_ENTRY']],
'YieldingUmsContext' : [ 0x70, ['pointer64', ['_RTL_UMS_CONTEXT']]],
'YieldingParam' : [ 0x78, ['pointer64', ['void']]],
'UmsTeb' : [ 0x80, ['pointer64', ['void']]],
'UmsAssociatedQueue' : [ 0x20, ['pointer64', ['_KQUEUE']]],
'UmsQueueListEntry' : [ 0x28, ['pointer64', ['_LIST_ENTRY']]],
'UmsWaitEvent' : [ 0x30, ['_KEVENT']],
'StagingArea' : [ 0x48, ['pointer64', ['void']]],
'UmsPrimaryDeliveredContext' : [ 0x50, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'UmsAssociatedQueueUsed' : [ 0x50, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'UmsThreadParked' : [ 0x50, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long')]],
'UmsFlags' : [ 0x50, ['unsigned long']],
'TebSelector' : [ 0x88, ['unsigned short']],
} ],
'_OWNER_ENTRY' : [ 0x10, {
'OwnerThread' : [ 0x0, ['unsigned long long']],
'IoPriorityBoosted' : [ 0x8, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'OwnerReferenced' : [ 0x8, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'OwnerCount' : [ 0x8, ['BitField', dict(start_bit = 2, end_bit = 32, native_type='unsigned long')]],
'TableSize' : [ 0x8, ['unsigned long']],
} ],
'_KSYSTEM_TIME' : [ 0xc, {
'LowPart' : [ 0x0, ['unsigned long']],
'High1Time' : [ 0x4, ['long']],
'High2Time' : [ 0x8, ['long']],
} ],
'_ETIMER' : [ 0x138, {
'KeTimer' : [ 0x0, ['_KTIMER']],
'Lock' : [ 0x40, ['unsigned long long']],
'TimerApc' : [ 0x48, ['_KAPC']],
'TimerDpc' : [ 0xa0, ['_KDPC']],
'ActiveTimerListEntry' : [ 0xe0, ['_LIST_ENTRY']],
'Period' : [ 0xf0, ['unsigned long']],
'TimerFlags' : [ 0xf4, ['unsigned char']],
'ApcAssociated' : [ 0xf4, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned char')]],
'FlushDpcs' : [ 0xf4, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned char')]],
'Paused' : [ 0xf4, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned char')]],
'Spare1' : [ 0xf4, ['BitField', dict(start_bit = 3, end_bit = 8, native_type='unsigned char')]],
'DueTimeType' : [ 0xf5, ['unsigned char']],
'Spare2' : [ 0xf6, ['unsigned short']],
'WakeReason' : [ 0xf8, ['pointer64', ['_DIAGNOSTIC_CONTEXT']]],
'WakeTimerListEntry' : [ 0x100, ['_LIST_ENTRY']],
'VirtualizedTimerCookie' : [ 0x110, ['pointer64', ['void']]],
'VirtualizedTimerLinks' : [ 0x118, ['_LIST_ENTRY']],
'DueTime' : [ 0x128, ['unsigned long long']],
'CoalescingWindow' : [ 0x130, ['unsigned long']],
} ],
'_OBJECT_DIRECTORY_ENTRY' : [ 0x18, {
'ChainLink' : [ 0x0, ['pointer64', ['_OBJECT_DIRECTORY_ENTRY']]],
'Object' : [ 0x8, ['pointer64', ['void']]],
'HashValue' : [ 0x10, ['unsigned long']],
} ],
'_LOCK_TRACKER' : [ 0x90, {
'LockTrackerNode' : [ 0x0, ['_RTL_BALANCED_NODE']],
'Mdl' : [ 0x18, ['pointer64', ['_MDL']]],
'StartVa' : [ 0x20, ['pointer64', ['void']]],
'Count' : [ 0x28, ['unsigned long long']],
'Offset' : [ 0x30, ['unsigned long']],
'Length' : [ 0x34, ['unsigned long']],
'Page' : [ 0x38, ['unsigned long long']],
'StackTrace' : [ 0x40, ['array', 8, ['pointer64', ['void']]]],
'Who' : [ 0x80, ['unsigned long']],
'Process' : [ 0x88, ['pointer64', ['_EPROCESS']]],
} ],
'_MI_CACHED_PTES' : [ 0x48, {
'Bins' : [ 0x0, ['array', 8, ['_MI_CACHED_PTE']]],
'CachedPteCount' : [ 0x40, ['long']],
} ],
'_EXHANDLE' : [ 0x8, {
'TagBits' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 2, native_type='unsigned long')]],
'Index' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 32, native_type='unsigned long')]],
'GenericHandleOverlay' : [ 0x0, ['pointer64', ['void']]],
'Value' : [ 0x0, ['unsigned long long']],
} ],
'__unnamed_2155' : [ 0x8, {
'Flags' : [ 0x0, ['_MMSECURE_FLAGS']],
'FlagsLong' : [ 0x0, ['unsigned long']],
'StartVa' : [ 0x0, ['pointer64', ['void']]],
} ],
'_MMADDRESS_LIST' : [ 0x10, {
'u1' : [ 0x0, ['__unnamed_2155']],
'EndVa' : [ 0x8, ['pointer64', ['void']]],
} ],
'_EX_PUSH_LOCK_AUTO_EXPAND_STATE' : [ 0x4, {
'Expanded' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'Transitioning' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'Pageable' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long')]],
'Value' : [ 0x0, ['unsigned long']],
} ],
'_XSTATE_FEATURE' : [ 0x8, {
'Offset' : [ 0x0, ['unsigned long']],
'Size' : [ 0x4, ['unsigned long']],
} ],
'_DBGKD_CONTEXT_EX' : [ 0xc, {
'Offset' : [ 0x0, ['unsigned long']],
'ByteCount' : [ 0x4, ['unsigned long']],
'BytesCopied' : [ 0x8, ['unsigned long']],
} ],
'_CM_DIRTY_VECTOR_LOG' : [ 0x488, {
'Next' : [ 0x0, ['unsigned long']],
'Size' : [ 0x4, ['unsigned long']],
'Log' : [ 0x8, ['array', 16, ['_CM_DIRTY_VECTOR_LOG_ENTRY']]],
} ],
'_ARBITER_INSTANCE' : [ 0x150, {
'Signature' : [ 0x0, ['unsigned long']],
'MutexEvent' : [ 0x8, ['pointer64', ['_KEVENT']]],
'Name' : [ 0x10, ['pointer64', ['unsigned short']]],
'OrderingName' : [ 0x18, ['pointer64', ['unsigned short']]],
'ResourceType' : [ 0x20, ['long']],
'Allocation' : [ 0x28, ['pointer64', ['_RTL_RANGE_LIST']]],
'PossibleAllocation' : [ 0x30, ['pointer64', ['_RTL_RANGE_LIST']]],
'OrderingList' : [ 0x38, ['_ARBITER_ORDERING_LIST']],
'ReservedList' : [ 0x48, ['_ARBITER_ORDERING_LIST']],
'ReferenceCount' : [ 0x58, ['long']],
'Interface' : [ 0x60, ['pointer64', ['_ARBITER_INTERFACE']]],
'AllocationStackMaxSize' : [ 0x68, ['unsigned long']],
'AllocationStack' : [ 0x70, ['pointer64', ['_ARBITER_ALLOCATION_STATE']]],
'UnpackRequirement' : [ 0x78, ['pointer64', ['void']]],
'PackResource' : [ 0x80, ['pointer64', ['void']]],
'UnpackResource' : [ 0x88, ['pointer64', ['void']]],
'ScoreRequirement' : [ 0x90, ['pointer64', ['void']]],
'TestAllocation' : [ 0x98, ['pointer64', ['void']]],
'RetestAllocation' : [ 0xa0, ['pointer64', ['void']]],
'CommitAllocation' : [ 0xa8, ['pointer64', ['void']]],
'RollbackAllocation' : [ 0xb0, ['pointer64', ['void']]],
'BootAllocation' : [ 0xb8, ['pointer64', ['void']]],
'QueryArbitrate' : [ 0xc0, ['pointer64', ['void']]],
'QueryConflict' : [ 0xc8, ['pointer64', ['void']]],
'AddReserved' : [ 0xd0, ['pointer64', ['void']]],
'StartArbiter' : [ 0xd8, ['pointer64', ['void']]],
'PreprocessEntry' : [ 0xe0, ['pointer64', ['void']]],
'AllocateEntry' : [ 0xe8, ['pointer64', ['void']]],
'GetNextAllocationRange' : [ 0xf0, ['pointer64', ['void']]],
'FindSuitableRange' : [ 0xf8, ['pointer64', ['void']]],
'AddAllocation' : [ 0x100, ['pointer64', ['void']]],
'BacktrackAllocation' : [ 0x108, ['pointer64', ['void']]],
'OverrideConflict' : [ 0x110, ['pointer64', ['void']]],
'InitializeRangeList' : [ 0x118, ['pointer64', ['void']]],
'TransactionInProgress' : [ 0x120, ['unsigned char']],
'TransactionEvent' : [ 0x128, ['pointer64', ['_KEVENT']]],
'Extension' : [ 0x130, ['pointer64', ['void']]],
'BusDeviceObject' : [ 0x138, ['pointer64', ['_DEVICE_OBJECT']]],
'ConflictCallbackContext' : [ 0x140, ['pointer64', ['void']]],
'ConflictCallback' : [ 0x148, ['pointer64', ['void']]],
} ],
'_MMVAD_FLAGS1' : [ 0x4, {
'CommitCharge' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 31, native_type='unsigned long')]],
'MemCommit' : [ 0x0, ['BitField', dict(start_bit = 31, end_bit = 32, native_type='unsigned long')]],
} ],
'_MI_SYSTEM_INFORMATION' : [ 0x1bc0, {
'Pools' : [ 0x0, ['_MI_POOL_STATE']],
'Sections' : [ 0x100, ['_MI_SECTION_STATE']],
'SystemImages' : [ 0x380, ['_MI_SYSTEM_IMAGE_STATE']],
'Sessions' : [ 0x440, ['_MI_SESSION_STATE']],
'Processes' : [ 0x4d0, ['_MI_PROCESS_STATE']],
'Hardware' : [ 0x530, ['_MI_HARDWARE_STATE']],
'SystemVa' : [ 0x600, ['_MI_SYSTEM_VA_STATE']],
'PageCombines' : [ 0x8c0, ['_MI_COMBINE_STATE']],
'Partitions' : [ 0xa60, ['_MI_PARTITION_STATE']],
'Shutdowns' : [ 0xac0, ['_MI_SHUTDOWN_STATE']],
'Errors' : [ 0xb40, ['_MI_ERROR_STATE']],
'AccessLog' : [ 0xc00, ['_MI_ACCESS_LOG_STATE']],
'Debugger' : [ 0xc80, ['_MI_DEBUGGER_STATE']],
'Standby' : [ 0xdc0, ['_MI_STANDBY_STATE']],
'SystemPtes' : [ 0xe80, ['_MI_SYSTEM_PTE_STATE']],
'IoPages' : [ 0x1000, ['_MI_IO_PAGE_STATE']],
'PagingIo' : [ 0x1060, ['_MI_PAGING_IO_STATE']],
'CommonPages' : [ 0x10b0, ['_MI_COMMON_PAGE_STATE']],
'Trims' : [ 0x1180, ['_MI_SYSTEM_TRIM_STATE']],
'ResTrack' : [ 0x11c0, ['_MI_RESAVAIL_TRACKER']],
'Cookie' : [ 0x1540, ['unsigned long long']],
'ZeroingDisabled' : [ 0x1548, ['long']],
'BootRegistryRuns' : [ 0x1550, ['pointer64', ['pointer64', ['void']]]],
'FullyInitialized' : [ 0x1558, ['unsigned char']],
'SafeBooted' : [ 0x1559, ['unsigned char']],
'LargePfnBitMap' : [ 0x1560, ['_RTL_BITMAP_EX']],
'TraceLogging' : [ 0x1570, ['pointer64', ['_TlgProvider_t']]],
'Vs' : [ 0x1580, ['_MI_VISIBLE_STATE']],
} ],
'_KDEVICE_QUEUE_ENTRY' : [ 0x18, {
'DeviceListEntry' : [ 0x0, ['_LIST_ENTRY']],
'SortKey' : [ 0x10, ['unsigned long']],
'Inserted' : [ 0x14, ['unsigned char']],
} ],
'_PPM_SELECTION_DEPENDENCY' : [ 0x18, {
'Processor' : [ 0x0, ['unsigned long']],
'Menu' : [ 0x8, ['_PPM_SELECTION_MENU']],
} ],
'__unnamed_21cf' : [ 0x4, {
'UserData' : [ 0x0, ['unsigned long']],
'Next' : [ 0x0, ['unsigned long']],
} ],
'__unnamed_21d1' : [ 0x4, {
'u' : [ 0x0, ['__unnamed_21cf']],
} ],
'__unnamed_21d3' : [ 0x4, {
'NewCell' : [ 0x0, ['__unnamed_21d1']],
} ],
'_HCELL' : [ 0x8, {
'Size' : [ 0x0, ['long']],
'u' : [ 0x4, ['__unnamed_21d3']],
} ],
'_MI_VISIBLE_STATE' : [ 0x640, {
'SpecialPool' : [ 0x0, ['_MI_SPECIAL_POOL']],
'SessionWsList' : [ 0x50, ['_LIST_ENTRY']],
'SessionIdBitmap' : [ 0x60, ['pointer64', ['_RTL_BITMAP']]],
'PagedPoolInfo' : [ 0x68, ['_MM_PAGED_POOL_INFO']],
'MaximumNonPagedPoolInPages' : [ 0xa0, ['unsigned long long']],
'SizeOfPagedPoolInPages' : [ 0xa8, ['unsigned long long']],
'SystemPteInfo' : [ 0xb0, ['_MI_SYSTEM_PTE_TYPE']],
'NonPagedPoolCommit' : [ 0x110, ['unsigned long long']],
'BootCommit' : [ 0x118, ['unsigned long long']],
'MdlPagesAllocated' : [ 0x120, ['unsigned long long']],
'SystemPageTableCommit' : [ 0x128, ['unsigned long long']],
'SpecialPagesInUse' : [ 0x130, ['unsigned long long']],
'WsOverheadPages' : [ 0x138, ['unsigned long long']],
'VadBitmapPages' : [ 0x140, ['unsigned long long']],
'ProcessCommit' : [ 0x148, ['unsigned long long']],
'SharedCommit' : [ 0x150, ['unsigned long long']],
'DriverCommit' : [ 0x158, ['long']],
'SystemWs' : [ 0x180, ['array', 3, ['_MMSUPPORT']]],
'MapCacheFailures' : [ 0x468, ['unsigned long']],
'PagefileHashPages' : [ 0x470, ['unsigned long long']],
'PteHeader' : [ 0x478, ['_SYSPTES_HEADER']],
'SessionSpecialPool' : [ 0x590, ['pointer64', ['_MI_SPECIAL_POOL']]],
'SystemVaTypeCount' : [ 0x598, ['array', 14, ['unsigned long long']]],
} ],
'_WHEA_GENERIC_ERROR_DESCRIPTOR' : [ 0x34, {
'Type' : [ 0x0, ['unsigned short']],
'Reserved' : [ 0x2, ['unsigned char']],
'Enabled' : [ 0x3, ['unsigned char']],
'ErrStatusBlockLength' : [ 0x4, ['unsigned long']],
'RelatedErrorSourceId' : [ 0x8, ['unsigned long']],
'ErrStatusAddressSpaceID' : [ 0xc, ['unsigned char']],
'ErrStatusAddressBitWidth' : [ 0xd, ['unsigned char']],
'ErrStatusAddressBitOffset' : [ 0xe, ['unsigned char']],
'ErrStatusAddressAccessSize' : [ 0xf, ['unsigned char']],
'ErrStatusAddress' : [ 0x10, ['_LARGE_INTEGER']],
'Notify' : [ 0x18, ['_WHEA_NOTIFICATION_DESCRIPTOR']],
} ],
'_HMAP_TABLE' : [ 0x5000, {
'Table' : [ 0x0, ['array', 512, ['_HMAP_ENTRY']]],
} ],
'_SEP_LOWBOX_HANDLES_ENTRY' : [ 0x38, {
'HashEntry' : [ 0x0, ['_RTL_DYNAMIC_HASH_TABLE_ENTRY']],
'ReferenceCount' : [ 0x18, ['long long']],
'PackageSid' : [ 0x20, ['pointer64', ['void']]],
'HandleCount' : [ 0x28, ['unsigned long']],
'Handles' : [ 0x30, ['pointer64', ['pointer64', ['void']]]],
} ],
'_PROC_PERF_CONSTRAINT' : [ 0x58, {
'Prcb' : [ 0x0, ['pointer64', ['_KPRCB']]],
'PerfContext' : [ 0x8, ['unsigned long long']],
'ProcCap' : [ 0x10, ['unsigned long']],
'ProcFloor' : [ 0x14, ['unsigned long']],
'PlatformCap' : [ 0x18, ['unsigned long']],
'ThermalCap' : [ 0x1c, ['unsigned long']],
'LimitReasons' : [ 0x20, ['unsigned long']],
'PlatformCapStartTime' : [ 0x28, ['unsigned long long']],
'TargetPercent' : [ 0x30, ['unsigned long']],
'SelectedPercent' : [ 0x34, ['unsigned long']],
'SelectedFrequency' : [ 0x38, ['unsigned long']],
'PreviousFrequency' : [ 0x3c, ['unsigned long']],
'PreviousPercent' : [ 0x40, ['unsigned long']],
'LatestFrequencyPercent' : [ 0x44, ['unsigned long']],
'SelectedState' : [ 0x48, ['unsigned long long']],
'Force' : [ 0x50, ['unsigned char']],
} ],
'__unnamed_21ef' : [ 0x20, {
'CallerCompletion' : [ 0x0, ['pointer64', ['void']]],
'CallerContext' : [ 0x8, ['pointer64', ['void']]],
'CallerDevice' : [ 0x10, ['pointer64', ['_DEVICE_OBJECT']]],
'SystemWake' : [ 0x18, ['unsigned char']],
} ],
'__unnamed_21f2' : [ 0x10, {
'NotifyDevice' : [ 0x0, ['pointer64', ['_PO_DEVICE_NOTIFY']]],
'FxDeviceActivated' : [ 0x8, ['unsigned char']],
} ],
'_POP_IRP_DATA' : [ 0xf8, {
'Link' : [ 0x0, ['_LIST_ENTRY']],
'Irp' : [ 0x10, ['pointer64', ['_IRP']]],
'Pdo' : [ 0x18, ['pointer64', ['_DEVICE_OBJECT']]],
'TargetDevice' : [ 0x20, ['pointer64', ['_DEVICE_OBJECT']]],
'CurrentDevice' : [ 0x28, ['pointer64', ['_DEVICE_OBJECT']]],
'WatchdogStart' : [ 0x30, ['unsigned long long']],
'WatchdogTimer' : [ 0x38, ['_KTIMER']],
'WatchdogDpc' : [ 0x78, ['_KDPC']],
'MinorFunction' : [ 0xb8, ['unsigned char']],
'PowerStateType' : [ 0xbc, ['Enumeration', dict(target = 'long', choices = {0: 'SystemPowerState', 1: 'DevicePowerState'})]],
'PowerState' : [ 0xc0, ['_POWER_STATE']],
'WatchdogEnabled' : [ 0xc4, ['unsigned char']],
'FxDevice' : [ 0xc8, ['pointer64', ['_POP_FX_DEVICE']]],
'SystemTransition' : [ 0xd0, ['unsigned char']],
'NotifyPEP' : [ 0xd1, ['unsigned char']],
'Device' : [ 0xd8, ['__unnamed_21ef']],
'System' : [ 0xd8, ['__unnamed_21f2']],
} ],
'_MI_ERROR_STATE' : [ 0xb8, {
'BadMemoryEventEntry' : [ 0x0, ['_MI_BAD_MEMORY_EVENT_ENTRY']],
'ProbeRaises' : [ 0x38, ['_MI_PROBE_RAISE_TRACKER']],
'ForcedCommits' : [ 0x78, ['_MI_FORCED_COMMITS']],
'WsleFailures' : [ 0x80, ['array', 2, ['unsigned long']]],
'WsLinear' : [ 0x88, ['unsigned long']],
'PageHashErrors' : [ 0x8c, ['unsigned long']],
'CheckZeroCount' : [ 0x90, ['unsigned long']],
'ZeroedPageSingleBitErrorsDetected' : [ 0x94, ['long']],
'BadPagesDetected' : [ 0x98, ['long']],
'ScrubPasses' : [ 0x9c, ['long']],
'ScrubBadPagesFound' : [ 0xa0, ['long']],
'UserViewFailures' : [ 0xa4, ['unsigned long']],
'UserViewCollisionFailures' : [ 0xa8, ['unsigned long']],
'ResavailFailures' : [ 0xac, ['_MI_RESAVAIL_FAILURES']],
'PendingBadPages' : [ 0xb4, ['unsigned char']],
'InitFailure' : [ 0xb5, ['unsigned char']],
'StopBadMaps' : [ 0xb6, ['unsigned char']],
} ],
'_IMAGE_DATA_DIRECTORY' : [ 0x8, {
'VirtualAddress' : [ 0x0, ['unsigned long']],
'Size' : [ 0x4, ['unsigned long']],
} ],
'_DEVICE_CAPABILITIES' : [ 0x40, {
'Size' : [ 0x0, ['unsigned short']],
'Version' : [ 0x2, ['unsigned short']],
'DeviceD1' : [ 0x4, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'DeviceD2' : [ 0x4, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'LockSupported' : [ 0x4, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long')]],
'EjectSupported' : [ 0x4, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned long')]],
'Removable' : [ 0x4, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned long')]],
'DockDevice' : [ 0x4, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned long')]],
'UniqueID' : [ 0x4, ['BitField', dict(start_bit = 6, end_bit = 7, native_type='unsigned long')]],
'SilentInstall' : [ 0x4, ['BitField', dict(start_bit = 7, end_bit = 8, native_type='unsigned long')]],
'RawDeviceOK' : [ 0x4, ['BitField', dict(start_bit = 8, end_bit = 9, native_type='unsigned long')]],
'SurpriseRemovalOK' : [ 0x4, ['BitField', dict(start_bit = 9, end_bit = 10, native_type='unsigned long')]],
'WakeFromD0' : [ 0x4, ['BitField', dict(start_bit = 10, end_bit = 11, native_type='unsigned long')]],
'WakeFromD1' : [ 0x4, ['BitField', dict(start_bit = 11, end_bit = 12, native_type='unsigned long')]],
'WakeFromD2' : [ 0x4, ['BitField', dict(start_bit = 12, end_bit = 13, native_type='unsigned long')]],
'WakeFromD3' : [ 0x4, ['BitField', dict(start_bit = 13, end_bit = 14, native_type='unsigned long')]],
'HardwareDisabled' : [ 0x4, ['BitField', dict(start_bit = 14, end_bit = 15, native_type='unsigned long')]],
'NonDynamic' : [ 0x4, ['BitField', dict(start_bit = 15, end_bit = 16, native_type='unsigned long')]],
'WarmEjectSupported' : [ 0x4, ['BitField', dict(start_bit = 16, end_bit = 17, native_type='unsigned long')]],
'NoDisplayInUI' : [ 0x4, ['BitField', dict(start_bit = 17, end_bit = 18, native_type='unsigned long')]],
'Reserved1' : [ 0x4, ['BitField', dict(start_bit = 18, end_bit = 19, native_type='unsigned long')]],
'WakeFromInterrupt' : [ 0x4, ['BitField', dict(start_bit = 19, end_bit = 20, native_type='unsigned long')]],
'Reserved' : [ 0x4, ['BitField', dict(start_bit = 20, end_bit = 32, native_type='unsigned long')]],
'Address' : [ 0x8, ['unsigned long']],
'UINumber' : [ 0xc, ['unsigned long']],
'DeviceState' : [ 0x10, ['array', -28, ['Enumeration', dict(target = 'long', choices = {0: 'PowerDeviceUnspecified', 1: 'PowerDeviceD0', 2: 'PowerDeviceD1', 3: 'PowerDeviceD2', 4: 'PowerDeviceD3', 5: 'PowerDeviceMaximum'})]]],
'SystemWake' : [ 0x2c, ['Enumeration', dict(target = 'long', choices = {0: 'PowerSystemUnspecified', 1: 'PowerSystemWorking', 2: 'PowerSystemSleeping1', 3: 'PowerSystemSleeping2', 4: 'PowerSystemSleeping3', 5: 'PowerSystemHibernate', 6: 'PowerSystemShutdown', 7: 'PowerSystemMaximum'})]],
'DeviceWake' : [ 0x30, ['Enumeration', dict(target = 'long', choices = {0: 'PowerDeviceUnspecified', 1: 'PowerDeviceD0', 2: 'PowerDeviceD1', 3: 'PowerDeviceD2', 4: 'PowerDeviceD3', 5: 'PowerDeviceMaximum'})]],
'D1Latency' : [ 0x34, ['unsigned long']],
'D2Latency' : [ 0x38, ['unsigned long']],
'D3Latency' : [ 0x3c, ['unsigned long']],
} ],
'_MI_USER_VA_INFO' : [ 0x158, {
'NumberOfCommittedPageTables' : [ 0x0, ['unsigned long']],
'HighestTopDownAllocationAddress' : [ 0x8, ['pointer64', ['void']]],
'VadCell' : [ 0x10, ['array', 2, ['_MI_VAD_ALLOCATION_CELL']]],
'VadBitMapCommitment' : [ 0x60, ['unsigned long']],
'MaximumLastVadBit' : [ 0x64, ['unsigned long']],
'VadsBeingDeleted' : [ 0x68, ['long']],
'NumberOfDebugEnclaves' : [ 0x6c, ['long']],
'PhysicalMappingCount' : [ 0x70, ['unsigned long long']],
'LastVadDeletionEvent' : [ 0x78, ['pointer64', ['_KEVENT']]],
'SubVadRanges' : [ 0x80, ['array', 3, ['_LIST_ENTRY']]],
'NumaAware' : [ 0xb0, ['unsigned char']],
'CloneNestingLevel' : [ 0xb8, ['unsigned long long']],
'PrivateFixupVadCount' : [ 0xc0, ['unsigned long long']],
'CfgBitMap' : [ 0xc8, ['array', 2, ['_MI_CFG_BITMAP_INFO']]],
'CommittedPageTableBufferForTopLevel' : [ 0xf8, ['array', 8, ['unsigned long']]],
'CommittedPageTableBitmaps' : [ 0x118, ['array', 3, ['_RTL_BITMAP']]],
'PageTableBitmapPages' : [ 0x148, ['array', 3, ['unsigned long']]],
} ],
'_PROC_FEEDBACK' : [ 0x90, {
'Lock' : [ 0x0, ['unsigned long long']],
'CyclesLast' : [ 0x8, ['unsigned long long']],
'CyclesActive' : [ 0x10, ['unsigned long long']],
'Counters' : [ 0x18, ['array', 2, ['pointer64', ['_PROC_FEEDBACK_COUNTER']]]],
'LastUpdateTime' : [ 0x28, ['unsigned long long']],
'UnscaledTime' : [ 0x30, ['unsigned long long']],
'UnaccountedTime' : [ 0x38, ['long long']],
'ScaledTime' : [ 0x40, ['array', 2, ['unsigned long long']]],
'UnaccountedKernelTime' : [ 0x50, ['unsigned long long']],
'PerformanceScaledKernelTime' : [ 0x58, ['unsigned long long']],
'UserTimeLast' : [ 0x60, ['unsigned long']],
'KernelTimeLast' : [ 0x64, ['unsigned long']],
'IdleGenerationNumberLast' : [ 0x68, ['unsigned long long']],
'HvActiveTimeLast' : [ 0x70, ['unsigned long long']],
'StallCyclesLast' : [ 0x78, ['unsigned long long']],
'StallTime' : [ 0x80, ['unsigned long long']],
'KernelTimesIndex' : [ 0x88, ['unsigned char']],
} ],
'_MI_PAGEFILE_BITMAPS_CACHE_ENTRY' : [ 0x38, {
'LengthTreeNode' : [ 0x0, ['_RTL_BALANCED_NODE']],
'FreeListEntry' : [ 0x0, ['_LIST_ENTRY']],
'LocationTreeNode' : [ 0x18, ['_RTL_BALANCED_NODE']],
'StartingIndex' : [ 0x30, ['unsigned long']],
'Length' : [ 0x34, ['unsigned long']],
} ],
'__unnamed_220f' : [ 0x18, {
'Length' : [ 0x0, ['unsigned long']],
'Alignment' : [ 0x4, ['unsigned long']],
'MinimumAddress' : [ 0x8, ['_LARGE_INTEGER']],
'MaximumAddress' : [ 0x10, ['_LARGE_INTEGER']],
} ],
'__unnamed_2213' : [ 0x18, {
'MinimumVector' : [ 0x0, ['unsigned long']],
'MaximumVector' : [ 0x4, ['unsigned long']],
'AffinityPolicy' : [ 0x8, ['unsigned short']],
'Group' : [ 0xa, ['unsigned short']],
'PriorityPolicy' : [ 0xc, ['Enumeration', dict(target = 'long', choices = {0: 'IrqPriorityUndefined', 1: 'IrqPriorityLow', 2: 'IrqPriorityNormal', 3: 'IrqPriorityHigh'})]],
'TargetedProcessors' : [ 0x10, ['unsigned long long']],
} ],
'__unnamed_2215' : [ 0x8, {
'MinimumChannel' : [ 0x0, ['unsigned long']],
'MaximumChannel' : [ 0x4, ['unsigned long']],
} ],
'__unnamed_2217' : [ 0x10, {
'RequestLine' : [ 0x0, ['unsigned long']],
'Reserved' : [ 0x4, ['unsigned long']],
'Channel' : [ 0x8, ['unsigned long']],
'TransferWidth' : [ 0xc, ['unsigned long']],
} ],
'__unnamed_2219' : [ 0xc, {
'Data' : [ 0x0, ['array', 3, ['unsigned long']]],
} ],
'__unnamed_221b' : [ 0x10, {
'Length' : [ 0x0, ['unsigned long']],
'MinBusNumber' : [ 0x4, ['unsigned long']],
'MaxBusNumber' : [ 0x8, ['unsigned long']],
'Reserved' : [ 0xc, ['unsigned long']],
} ],
'__unnamed_221d' : [ 0xc, {
'Priority' : [ 0x0, ['unsigned long']],
'Reserved1' : [ 0x4, ['unsigned long']],
'Reserved2' : [ 0x8, ['unsigned long']],
} ],
'__unnamed_221f' : [ 0x18, {
'Length40' : [ 0x0, ['unsigned long']],
'Alignment40' : [ 0x4, ['unsigned long']],
'MinimumAddress' : [ 0x8, ['_LARGE_INTEGER']],
'MaximumAddress' : [ 0x10, ['_LARGE_INTEGER']],
} ],
'__unnamed_2221' : [ 0x18, {
'Length48' : [ 0x0, ['unsigned long']],
'Alignment48' : [ 0x4, ['unsigned long']],
'MinimumAddress' : [ 0x8, ['_LARGE_INTEGER']],
'MaximumAddress' : [ 0x10, ['_LARGE_INTEGER']],
} ],
'__unnamed_2223' : [ 0x18, {
'Length64' : [ 0x0, ['unsigned long']],
'Alignment64' : [ 0x4, ['unsigned long']],
'MinimumAddress' : [ 0x8, ['_LARGE_INTEGER']],
'MaximumAddress' : [ 0x10, ['_LARGE_INTEGER']],
} ],
'__unnamed_2225' : [ 0xc, {
'Class' : [ 0x0, ['unsigned char']],
'Type' : [ 0x1, ['unsigned char']],
'Reserved1' : [ 0x2, ['unsigned char']],
'Reserved2' : [ 0x3, ['unsigned char']],
'IdLowPart' : [ 0x4, ['unsigned long']],
'IdHighPart' : [ 0x8, ['unsigned long']],
} ],
'__unnamed_2227' : [ 0x18, {
'Port' : [ 0x0, ['__unnamed_220f']],
'Memory' : [ 0x0, ['__unnamed_220f']],
'Interrupt' : [ 0x0, ['__unnamed_2213']],
'Dma' : [ 0x0, ['__unnamed_2215']],
'DmaV3' : [ 0x0, ['__unnamed_2217']],
'Generic' : [ 0x0, ['__unnamed_220f']],
'DevicePrivate' : [ 0x0, ['__unnamed_2219']],
'BusNumber' : [ 0x0, ['__unnamed_221b']],
'ConfigData' : [ 0x0, ['__unnamed_221d']],
'Memory40' : [ 0x0, ['__unnamed_221f']],
'Memory48' : [ 0x0, ['__unnamed_2221']],
'Memory64' : [ 0x0, ['__unnamed_2223']],
'Connection' : [ 0x0, ['__unnamed_2225']],
} ],
'_IO_RESOURCE_DESCRIPTOR' : [ 0x20, {
'Option' : [ 0x0, ['unsigned char']],
'Type' : [ 0x1, ['unsigned char']],
'ShareDisposition' : [ 0x2, ['unsigned char']],
'Spare1' : [ 0x3, ['unsigned char']],
'Flags' : [ 0x4, ['unsigned short']],
'Spare2' : [ 0x6, ['unsigned short']],
'u' : [ 0x8, ['__unnamed_2227']],
} ],
'_POP_THERMAL_ZONE' : [ 0x348, {
'PolicyDevice' : [ 0x0, ['_POP_POLICY_DEVICE']],
'Link' : [ 0x0, ['_LIST_ENTRY']],
'DeviceType' : [ 0x10, ['Enumeration', dict(target = 'long', choices = {0: 'PolicyDeviceSystemButton', 1: 'PolicyDeviceThermalZone', 2: 'PolicyDeviceBattery', 3: 'PolicyDeviceMemory', 4: 'PolicyInitiatePowerActionAPI', 5: 'PolicySetPowerStateAPI', 6: 'PolicyImmediateDozeS4', 7: 'PolicySystemIdle', 8: 'PolicyDeviceWakeAlarm', 9: 'PolicyDeviceFan', 10: 'PolicyCsBatterySaver', 11: 'PolicyImmediateDozeS4Predicted', 12: 'PolicyImmediateDozeS4PredictedNoWake', 13: 'PolicyDeviceMax'})]],
'Notification' : [ 0x18, ['pointer64', ['void']]],
'Name' : [ 0x20, ['_UNICODE_STRING']],
'Device' : [ 0x30, ['pointer64', ['_DEVICE_OBJECT']]],
'Irp' : [ 0x38, ['pointer64', ['_IRP']]],
'State' : [ 0x40, ['unsigned char']],
'Flags' : [ 0x41, ['unsigned char']],
'Removing' : [ 0x42, ['unsigned char']],
'Mode' : [ 0x43, ['unsigned char']],
'PendingMode' : [ 0x44, ['unsigned char']],
'ActivePoint' : [ 0x45, ['unsigned char']],
'PendingActivePoint' : [ 0x46, ['unsigned char']],
'Critical' : [ 0x47, ['unsigned char']],
'ThermalStandby' : [ 0x48, ['unsigned char']],
'OverThrottled' : [ 0x49, ['unsigned char']],
'HighPrecisionThrottle' : [ 0x4c, ['long']],
'Throttle' : [ 0x50, ['long']],
'PendingThrottle' : [ 0x54, ['long']],
'ThrottleReasons' : [ 0x58, ['unsigned long']],
'LastTime' : [ 0x60, ['unsigned long long']],
'SampleRate' : [ 0x68, ['unsigned long']],
'LastTemp' : [ 0x6c, ['unsigned long']],
'PassiveTimer' : [ 0x70, ['_KTIMER']],
'PassiveDpc' : [ 0xb0, ['_KDPC']],
'Info' : [ 0xf0, ['_THERMAL_INFORMATION_EX']],
'InfoLastUpdateTime' : [ 0x148, ['_LARGE_INTEGER']],
'Policy' : [ 0x150, ['_THERMAL_POLICY']],
'PolicyDriver' : [ 0x168, ['unsigned char']],
'LastActiveStartTime' : [ 0x170, ['unsigned long long']],
'LastPassiveStartTime' : [ 0x178, ['unsigned long long']],
'WorkItem' : [ 0x180, ['_WORK_QUEUE_ITEM']],
'Lock' : [ 0x1a0, ['_POP_RW_LOCK']],
'ZoneStopped' : [ 0x1b0, ['_KEVENT']],
'TemperatureUpdated' : [ 0x1c8, ['_KEVENT']],
'InstanceId' : [ 0x1e0, ['unsigned long']],
'TelemetryTracker' : [ 0x1e8, ['_POP_THERMAL_TELEMETRY_TRACKER']],
'Description' : [ 0x338, ['_UNICODE_STRING']],
} ],
'_MMPTE_LIST' : [ 0x8, {
'Valid' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long long')]],
'OneEntry' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long long')]],
'filler0' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 5, native_type='unsigned long long')]],
'Protection' : [ 0x0, ['BitField', dict(start_bit = 5, end_bit = 10, native_type='unsigned long long')]],
'Prototype' : [ 0x0, ['BitField', dict(start_bit = 10, end_bit = 11, native_type='unsigned long long')]],
'Transition' : [ 0x0, ['BitField', dict(start_bit = 11, end_bit = 12, native_type='unsigned long long')]],
'filler1' : [ 0x0, ['BitField', dict(start_bit = 12, end_bit = 28, native_type='unsigned long long')]],
'NextEntry' : [ 0x0, ['BitField', dict(start_bit = 28, end_bit = 64, native_type='unsigned long long')]],
} ],
'_VI_POOL_PAGE_HEADER' : [ 0x18, {
'NextPage' : [ 0x0, ['pointer64', ['_SLIST_ENTRY']]],
'VerifierEntry' : [ 0x8, ['pointer64', ['void']]],
'Signature' : [ 0x10, ['unsigned long long']],
} ],
'_MI_REBUILD_LARGE_PAGES' : [ 0x228, {
'Active' : [ 0x0, ['long']],
'Timer' : [ 0x4, ['array', 64, ['array', 4, ['_MI_REBUILD_LARGE_PAGE_COUNTDOWN']]]],
'WorkItem' : [ 0x208, ['_WORK_QUEUE_ITEM']],
} ],
'_HANDLE_TRACE_DEBUG_INFO' : [ 0xf0, {
'RefCount' : [ 0x0, ['long']],
'TableSize' : [ 0x4, ['unsigned long']],
'BitMaskFlags' : [ 0x8, ['unsigned long']],
'CloseCompactionLock' : [ 0x10, ['_FAST_MUTEX']],
'CurrentStackIndex' : [ 0x48, ['unsigned long']],
'TraceDb' : [ 0x50, ['array', 1, ['_HANDLE_TRACE_DB_ENTRY']]],
} ],
'_HHIVE' : [ 0xa68, {
'Signature' : [ 0x0, ['unsigned long']],
'GetCellRoutine' : [ 0x8, ['pointer64', ['void']]],
'ReleaseCellRoutine' : [ 0x10, ['pointer64', ['void']]],
'Allocate' : [ 0x18, ['pointer64', ['void']]],
'Free' : [ 0x20, ['pointer64', ['void']]],
'FileWrite' : [ 0x28, ['pointer64', ['void']]],
'FileRead' : [ 0x30, ['pointer64', ['void']]],
'HiveLoadFailure' : [ 0x38, ['pointer64', ['void']]],
'BaseBlock' : [ 0x40, ['pointer64', ['_HBASE_BLOCK']]],
'DirtyVector' : [ 0x48, ['_RTL_BITMAP']],
'DirtyCount' : [ 0x58, ['unsigned long']],
'DirtyAlloc' : [ 0x5c, ['unsigned long']],
'UnreconciledVector' : [ 0x60, ['_RTL_BITMAP']],
'UnreconciledCount' : [ 0x70, ['unsigned long']],
'BaseBlockAlloc' : [ 0x74, ['unsigned long']],
'Cluster' : [ 0x78, ['unsigned long']],
'Flat' : [ 0x7c, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned char')]],
'ReadOnly' : [ 0x7c, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned char')]],
'SystemCacheBacked' : [ 0x7c, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned char')]],
'Reserved' : [ 0x7c, ['BitField', dict(start_bit = 3, end_bit = 8, native_type='unsigned char')]],
'DirtyFlag' : [ 0x7d, ['unsigned char']],
'HvBinHeadersUse' : [ 0x80, ['unsigned long']],
'HvFreeCellsUse' : [ 0x84, ['unsigned long']],
'HvUsedCellsUse' : [ 0x88, ['unsigned long']],
'CmUsedCellsUse' : [ 0x8c, ['unsigned long']],
'HiveFlags' : [ 0x90, ['unsigned long']],
'CurrentLog' : [ 0x94, ['unsigned long']],
'CurrentLogSequence' : [ 0x98, ['unsigned long']],
'CurrentLogMinimumSequence' : [ 0x9c, ['unsigned long']],
'CurrentLogOffset' : [ 0xa0, ['unsigned long']],
'MinimumLogSequence' : [ 0xa4, ['unsigned long']],
'LogFileSizeCap' : [ 0xa8, ['unsigned long']],
'LogDataPresent' : [ 0xac, ['array', 2, ['unsigned char']]],
'PrimaryFileValid' : [ 0xae, ['unsigned char']],
'BaseBlockDirty' : [ 0xaf, ['unsigned char']],
'LastLogSwapTime' : [ 0xb0, ['_LARGE_INTEGER']],
'FirstLogFile' : [ 0xb8, ['BitField', dict(start_bit = 0, end_bit = 3, native_type='unsigned short')]],
'SecondLogFile' : [ 0xb8, ['BitField', dict(start_bit = 3, end_bit = 6, native_type='unsigned short')]],
'HeaderRecovered' : [ 0xb8, ['BitField', dict(start_bit = 6, end_bit = 7, native_type='unsigned short')]],
'LegacyRecoveryIndicated' : [ 0xb8, ['BitField', dict(start_bit = 7, end_bit = 8, native_type='unsigned short')]],
'RecoveryInformationReserved' : [ 0xb8, ['BitField', dict(start_bit = 8, end_bit = 16, native_type='unsigned short')]],
'RecoveryInformation' : [ 0xb8, ['unsigned short']],
'LogEntriesRecovered' : [ 0xba, ['array', 2, ['unsigned char']]],
'RefreshCount' : [ 0xbc, ['unsigned long']],
'StorageTypeCount' : [ 0xc0, ['unsigned long']],
'Version' : [ 0xc4, ['unsigned long']],
'ViewMap' : [ 0xc8, ['_HVIEW_MAP']],
'Storage' : [ 0x578, ['array', 2, ['_DUAL']]],
} ],
'_WHEA_XPF_NMI_DESCRIPTOR' : [ 0x3, {
'Type' : [ 0x0, ['unsigned short']],
'Enabled' : [ 0x2, ['unsigned char']],
} ],
'_ETW_FILTER_HEADER' : [ 0x48, {
'FilterFlags' : [ 0x0, ['long']],
'PidFilter' : [ 0x8, ['pointer64', ['_ETW_FILTER_PID']]],
'ExeFilter' : [ 0x10, ['pointer64', ['_ETW_FILTER_STRING_TOKEN']]],
'PkgIdFilter' : [ 0x18, ['pointer64', ['_ETW_FILTER_STRING_TOKEN']]],
'PkgAppIdFilter' : [ 0x20, ['pointer64', ['_ETW_FILTER_STRING_TOKEN']]],
'StackWalkFilter' : [ 0x28, ['pointer64', ['_ETW_PERFECT_HASH_FUNCTION']]],
'EventIdFilter' : [ 0x30, ['pointer64', ['_ETW_PERFECT_HASH_FUNCTION']]],
'PayloadFilter' : [ 0x38, ['pointer64', ['_ETW_PAYLOAD_FILTER']]],
'ProviderSideFilter' : [ 0x40, ['pointer64', ['_EVENT_FILTER_HEADER']]],
} ],
'_CM_WORKITEM' : [ 0x28, {
'ListEntry' : [ 0x0, ['_LIST_ENTRY']],
'Private' : [ 0x10, ['unsigned long']],
'WorkerRoutine' : [ 0x18, ['pointer64', ['void']]],
'Parameter' : [ 0x20, ['pointer64', ['void']]],
} ],
'_ETW_PAYLOAD_FILTER' : [ 0x58, {
'RefCount' : [ 0x0, ['long']],
'PayloadFilter' : [ 0x8, ['_AGGREGATED_PAYLOAD_FILTER']],
} ],
'_CM_TRANS' : [ 0xa8, {
'TransactionListEntry' : [ 0x0, ['_LIST_ENTRY']],
'KCBUoWListHead' : [ 0x10, ['_LIST_ENTRY']],
'LazyCommitListEntry' : [ 0x20, ['_LIST_ENTRY']],
'KtmTrans' : [ 0x30, ['pointer64', ['void']]],
'CmRm' : [ 0x38, ['pointer64', ['_CM_RM']]],
'KtmEnlistmentObject' : [ 0x40, ['pointer64', ['_KENLISTMENT']]],
'KtmEnlistmentHandle' : [ 0x48, ['pointer64', ['void']]],
'KtmUow' : [ 0x50, ['_GUID']],
'StartLsn' : [ 0x60, ['unsigned long long']],
'TransState' : [ 0x68, ['unsigned long']],
'HiveCount' : [ 0x6c, ['unsigned long']],
'HiveArray' : [ 0x70, ['array', 7, ['pointer64', ['_CMHIVE']]]],
} ],
'_WHEA_ERROR_RECORD_HEADER_VALIDBITS' : [ 0x4, {
'PlatformId' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'Timestamp' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'PartitionId' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long')]],
'Reserved' : [ 0x0, ['BitField', dict(start_bit = 3, end_bit = 32, native_type='unsigned long')]],
'AsULONG' : [ 0x0, ['unsigned long']],
} ],
'_CM_PARTIAL_RESOURCE_LIST' : [ 0x1c, {
'Version' : [ 0x0, ['unsigned short']],
'Revision' : [ 0x2, ['unsigned short']],
'Count' : [ 0x4, ['unsigned long']],
'PartialDescriptors' : [ 0x8, ['array', 1, ['_CM_PARTIAL_RESOURCE_DESCRIPTOR']]],
} ],
'_RTL_RANGE_LIST' : [ 0x20, {
'ListHead' : [ 0x0, ['_LIST_ENTRY']],
'Flags' : [ 0x10, ['unsigned long']],
'Count' : [ 0x14, ['unsigned long']],
'Stamp' : [ 0x18, ['unsigned long']],
} ],
'_RTL_TIME_ZONE_INFORMATION' : [ 0xac, {
'Bias' : [ 0x0, ['long']],
'StandardName' : [ 0x4, ['array', 32, ['wchar']]],
'StandardStart' : [ 0x44, ['_TIME_FIELDS']],
'StandardBias' : [ 0x54, ['long']],
'DaylightName' : [ 0x58, ['array', 32, ['wchar']]],
'DaylightStart' : [ 0x98, ['_TIME_FIELDS']],
'DaylightBias' : [ 0xa8, ['long']],
} ],
'_POP_THERMAL_TELEMETRY_TRACKER' : [ 0x150, {
'AccountingDisabled' : [ 0x0, ['unsigned char']],
'LastPassiveUpdateTime' : [ 0x8, ['unsigned long long']],
'TotalPassiveTime' : [ 0x10, ['array', 20, ['unsigned long long']]],
'PassiveTimeSnap' : [ 0xb0, ['array', 20, ['unsigned long long']]],
} ],
'_OBJECT_CREATE_INFORMATION' : [ 0x40, {
'Attributes' : [ 0x0, ['unsigned long']],
'RootDirectory' : [ 0x8, ['pointer64', ['void']]],
'ProbeMode' : [ 0x10, ['unsigned char']],
'PagedPoolCharge' : [ 0x14, ['unsigned long']],
'NonPagedPoolCharge' : [ 0x18, ['unsigned long']],
'SecurityDescriptorCharge' : [ 0x1c, ['unsigned long']],
'SecurityDescriptor' : [ 0x20, ['pointer64', ['void']]],
'SecurityQos' : [ 0x28, ['pointer64', ['_SECURITY_QUALITY_OF_SERVICE']]],
'SecurityQualityOfService' : [ 0x30, ['_SECURITY_QUALITY_OF_SERVICE']],
} ],
'_HVIEW_MAP' : [ 0x4b0, {
'MappedLength' : [ 0x0, ['unsigned long']],
'Lock' : [ 0x8, ['_EX_PUSH_LOCK']],
'FileObject' : [ 0x10, ['pointer64', ['_FILE_OBJECT']]],
'Directory' : [ 0x18, ['pointer64', ['_HVIEW_MAP_DIRECTORY']]],
'PagesCharged' : [ 0x20, ['unsigned long']],
'PinLog' : [ 0x28, ['_HVIEW_MAP_PIN_LOG']],
} ],
'_TRACE_ENABLE_INFO' : [ 0x20, {
'IsEnabled' : [ 0x0, ['unsigned long']],
'Level' : [ 0x4, ['unsigned char']],
'Reserved1' : [ 0x5, ['unsigned char']],
'LoggerId' : [ 0x6, ['unsigned short']],
'EnableProperty' : [ 0x8, ['unsigned long']],
'Reserved2' : [ 0xc, ['unsigned long']],
'MatchAnyKeyword' : [ 0x10, ['unsigned long long']],
'MatchAllKeyword' : [ 0x18, ['unsigned long long']],
} ],
'_DISALLOWED_GUIDS' : [ 0x10, {
'Count' : [ 0x0, ['unsigned short']],
'Guids' : [ 0x8, ['pointer64', ['_GUID']]],
} ],
'_HVIEW_MAP_DIRECTORY' : [ 0x400, {
'Tables' : [ 0x0, ['array', 128, ['pointer64', ['_HVIEW_MAP_TABLE']]]],
} ],
'_PO_DIAG_STACK_RECORD' : [ 0x10, {
'StackDepth' : [ 0x0, ['unsigned long']],
'Stack' : [ 0x8, ['array', 1, ['pointer64', ['void']]]],
} ],
'_SECTION_OBJECT_POINTERS' : [ 0x18, {
'DataSectionObject' : [ 0x0, ['pointer64', ['void']]],
'SharedCacheMap' : [ 0x8, ['pointer64', ['void']]],
'ImageSectionObject' : [ 0x10, ['pointer64', ['void']]],
} ],
'_SEP_AUDIT_POLICY' : [ 0x1f, {
'AdtTokenPolicy' : [ 0x0, ['_TOKEN_AUDIT_POLICY']],
'PolicySetStatus' : [ 0x1e, ['unsigned char']],
} ],
'__unnamed_229f' : [ 0x4, {
'SnapSharedExportsFailed' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'Spare' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 32, native_type='unsigned long')]],
} ],
'__unnamed_22a1' : [ 0x20, {
'AllSharedExportThunks' : [ 0x0, ['_VF_TARGET_ALL_SHARED_EXPORT_THUNKS']],
'Flags' : [ 0x0, ['__unnamed_229f']],
} ],
'_VF_TARGET_DRIVER' : [ 0x38, {
'TreeNode' : [ 0x0, ['_VF_AVL_TREE_NODE']],
'u1' : [ 0x10, ['__unnamed_22a1']],
'VerifiedData' : [ 0x30, ['pointer64', ['_VF_TARGET_VERIFIED_DRIVER_DATA']]],
} ],
'__unnamed_22aa' : [ 0x14, {
'ClassGuid' : [ 0x0, ['_GUID']],
'SymbolicLinkName' : [ 0x10, ['array', 1, ['wchar']]],
} ],
'__unnamed_22ac' : [ 0x2, {
'DeviceId' : [ 0x0, ['array', 1, ['wchar']]],
} ],
'__unnamed_22ae' : [ 0x10, {
'NotificationStructure' : [ 0x0, ['pointer64', ['void']]],
'DeviceId' : [ 0x8, ['array', 1, ['wchar']]],
} ],
'__unnamed_22b0' : [ 0x8, {
'Notification' : [ 0x0, ['pointer64', ['void']]],
} ],
'__unnamed_22b2' : [ 0x8, {
'NotificationCode' : [ 0x0, ['unsigned long']],
'NotificationData' : [ 0x4, ['unsigned long']],
} ],
'__unnamed_22b4' : [ 0x8, {
'VetoType' : [ 0x0, ['Enumeration', dict(target = 'long', choices = {0: 'PNP_VetoTypeUnknown', 1: 'PNP_VetoLegacyDevice', 2: 'PNP_VetoPendingClose', 3: 'PNP_VetoWindowsApp', 4: 'PNP_VetoWindowsService', 5: 'PNP_VetoOutstandingOpen', 6: 'PNP_VetoDevice', 7: 'PNP_VetoDriver', 8: 'PNP_VetoIllegalDeviceRequest', 9: 'PNP_VetoInsufficientPower', 10: 'PNP_VetoNonDisableable', 11: 'PNP_VetoLegacyDriver', 12: 'PNP_VetoInsufficientRights'})]],
'DeviceIdVetoNameBuffer' : [ 0x4, ['array', 1, ['wchar']]],
} ],
'__unnamed_22b6' : [ 0x10, {
'BlockedDriverGuid' : [ 0x0, ['_GUID']],
} ],
'__unnamed_22b8' : [ 0x2, {
'ParentId' : [ 0x0, ['array', 1, ['wchar']]],
} ],
'__unnamed_22ba' : [ 0x20, {
'PowerSettingGuid' : [ 0x0, ['_GUID']],
'Flags' : [ 0x10, ['unsigned long']],
'SessionId' : [ 0x14, ['unsigned long']],
'DataLength' : [ 0x18, ['unsigned long']],
'Data' : [ 0x1c, ['array', 1, ['unsigned char']]],
} ],
'__unnamed_22bc' : [ 0x20, {
'DeviceClass' : [ 0x0, ['__unnamed_22aa']],
'TargetDevice' : [ 0x0, ['__unnamed_22ac']],
'InstallDevice' : [ 0x0, ['__unnamed_22ac']],
'CustomNotification' : [ 0x0, ['__unnamed_22ae']],
'ProfileNotification' : [ 0x0, ['__unnamed_22b0']],
'PowerNotification' : [ 0x0, ['__unnamed_22b2']],
'VetoNotification' : [ 0x0, ['__unnamed_22b4']],
'BlockedDriverNotification' : [ 0x0, ['__unnamed_22b6']],
'InvalidIDNotification' : [ 0x0, ['__unnamed_22b8']],
'PowerSettingNotification' : [ 0x0, ['__unnamed_22ba']],
'PropertyChangeNotification' : [ 0x0, ['__unnamed_22ac']],
'DeviceInstanceNotification' : [ 0x0, ['__unnamed_22ac']],
} ],
'_PLUGPLAY_EVENT_BLOCK' : [ 0x50, {
'EventGuid' : [ 0x0, ['_GUID']],
'EventCategory' : [ 0x10, ['Enumeration', dict(target = 'long', choices = {0: 'HardwareProfileChangeEvent', 1: 'TargetDeviceChangeEvent', 2: 'DeviceClassChangeEvent', 3: 'CustomDeviceEvent', 4: 'DeviceInstallEvent', 5: 'DeviceArrivalEvent', 6: 'VetoEvent', 7: 'BlockedDriverEvent', 8: 'InvalidIDEvent', 9: 'DevicePropertyChangeEvent', 10: 'DeviceInstanceRemovalEvent', 11: 'DeviceInstanceStartedEvent', 12: 'MaxPlugEventCategory'})]],
'Result' : [ 0x18, ['pointer64', ['unsigned long']]],
'Flags' : [ 0x20, ['unsigned long']],
'TotalSize' : [ 0x24, ['unsigned long']],
'DeviceObject' : [ 0x28, ['pointer64', ['void']]],
'u' : [ 0x30, ['__unnamed_22bc']],
} ],
'_VF_SUSPECT_DRIVER_ENTRY' : [ 0x28, {
'Links' : [ 0x0, ['_LIST_ENTRY']],
'Loads' : [ 0x10, ['unsigned long']],
'Unloads' : [ 0x14, ['unsigned long']],
'BaseName' : [ 0x18, ['_UNICODE_STRING']],
} ],
'_MMPTE_TIMESTAMP' : [ 0x8, {
'MustBeZero' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long long')]],
'PageFileLow' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 5, native_type='unsigned long long')]],
'Protection' : [ 0x0, ['BitField', dict(start_bit = 5, end_bit = 10, native_type='unsigned long long')]],
'Prototype' : [ 0x0, ['BitField', dict(start_bit = 10, end_bit = 11, native_type='unsigned long long')]],
'Transition' : [ 0x0, ['BitField', dict(start_bit = 11, end_bit = 12, native_type='unsigned long long')]],
'Reserved' : [ 0x0, ['BitField', dict(start_bit = 12, end_bit = 32, native_type='unsigned long long')]],
'GlobalTimeStamp' : [ 0x0, ['BitField', dict(start_bit = 32, end_bit = 64, native_type='unsigned long long')]],
} ],
'_SID_AND_ATTRIBUTES_HASH' : [ 0x110, {
'SidCount' : [ 0x0, ['unsigned long']],
'SidAttr' : [ 0x8, ['pointer64', ['_SID_AND_ATTRIBUTES']]],
'Hash' : [ 0x10, ['array', 32, ['unsigned long long']]],
} ],
'_XSTATE_CONTEXT' : [ 0x20, {
'Mask' : [ 0x0, ['unsigned long long']],
'Length' : [ 0x8, ['unsigned long']],
'Reserved1' : [ 0xc, ['unsigned long']],
'Area' : [ 0x10, ['pointer64', ['_XSAVE_AREA']]],
'Buffer' : [ 0x18, ['pointer64', ['void']]],
} ],
'_PROCESSOR_IDLE_PREPARE_INFO' : [ 0x58, {
'Context' : [ 0x0, ['pointer64', ['void']]],
'Constraints' : [ 0x8, ['_PROCESSOR_IDLE_CONSTRAINTS']],
'DependencyCount' : [ 0x38, ['unsigned long']],
'DependencyUsed' : [ 0x3c, ['unsigned long']],
'DependencyArray' : [ 0x40, ['pointer64', ['_PROCESSOR_IDLE_DEPENDENCY']]],
'PlatformIdleStateIndex' : [ 0x48, ['unsigned long']],
'ProcessorIdleStateIndex' : [ 0x4c, ['unsigned long']],
'IdleSelectFailureMask' : [ 0x50, ['unsigned long']],
} ],
'_XSAVE_FORMAT' : [ 0x200, {
'ControlWord' : [ 0x0, ['unsigned short']],
'StatusWord' : [ 0x2, ['unsigned short']],
'TagWord' : [ 0x4, ['unsigned char']],
'Reserved1' : [ 0x5, ['unsigned char']],
'ErrorOpcode' : [ 0x6, ['unsigned short']],
'ErrorOffset' : [ 0x8, ['unsigned long']],
'ErrorSelector' : [ 0xc, ['unsigned short']],
'Reserved2' : [ 0xe, ['unsigned short']],
'DataOffset' : [ 0x10, ['unsigned long']],
'DataSelector' : [ 0x14, ['unsigned short']],
'Reserved3' : [ 0x16, ['unsigned short']],
'MxCsr' : [ 0x18, ['unsigned long']],
'MxCsr_Mask' : [ 0x1c, ['unsigned long']],
'FloatRegisters' : [ 0x20, ['array', 8, ['_M128A']]],
'XmmRegisters' : [ 0xa0, ['array', 16, ['_M128A']]],
'Reserved4' : [ 0x1a0, ['array', 96, ['unsigned char']]],
} ],
'__unnamed_22d8' : [ 0x1, {
'AsUCHAR' : [ 0x0, ['unsigned char']],
'NoDomainAccounting' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned char')]],
'IncreasePolicy' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 3, native_type='unsigned char')]],
'DecreasePolicy' : [ 0x0, ['BitField', dict(start_bit = 3, end_bit = 5, native_type='unsigned char')]],
'Reserved' : [ 0x0, ['BitField', dict(start_bit = 5, end_bit = 8, native_type='unsigned char')]],
} ],
'PROCESSOR_PERFSTATE_POLICY' : [ 0x1c, {
'Revision' : [ 0x0, ['unsigned long']],
'MaxThrottle' : [ 0x4, ['unsigned char']],
'MinThrottle' : [ 0x5, ['unsigned char']],
'BusyAdjThreshold' : [ 0x6, ['unsigned char']],
'Spare' : [ 0x7, ['unsigned char']],
'Flags' : [ 0x7, ['__unnamed_22d8']],
'TimeCheck' : [ 0x8, ['unsigned long']],
'IncreaseTime' : [ 0xc, ['unsigned long']],
'DecreaseTime' : [ 0x10, ['unsigned long']],
'IncreasePercent' : [ 0x14, ['unsigned long']],
'DecreasePercent' : [ 0x18, ['unsigned long']],
} ],
'_BUS_EXTENSION_LIST' : [ 0x10, {
'Next' : [ 0x0, ['pointer64', ['void']]],
'BusExtension' : [ 0x8, ['pointer64', ['_PI_BUS_EXTENSION']]],
} ],
'_CACHED_CHILD_LIST' : [ 0x10, {
'Count' : [ 0x0, ['unsigned long']],
'ValueList' : [ 0x8, ['unsigned long long']],
'RealKcb' : [ 0x8, ['pointer64', ['_CM_KEY_CONTROL_BLOCK']]],
} ],
'_KDEVICE_QUEUE' : [ 0x28, {
'Type' : [ 0x0, ['short']],
'Size' : [ 0x2, ['short']],
'DeviceListHead' : [ 0x8, ['_LIST_ENTRY']],
'Lock' : [ 0x18, ['unsigned long long']],
'Busy' : [ 0x20, ['unsigned char']],
'Reserved' : [ 0x20, ['BitField', dict(start_bit = 0, end_bit = 8, native_type='long long')]],
'Hint' : [ 0x20, ['BitField', dict(start_bit = 8, end_bit = 64, native_type='long long')]],
} ],
'_SYSTEM_POWER_STATE_CONTEXT' : [ 0x4, {
'Reserved1' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 8, native_type='unsigned long')]],
'TargetSystemState' : [ 0x0, ['BitField', dict(start_bit = 8, end_bit = 12, native_type='unsigned long')]],
'EffectiveSystemState' : [ 0x0, ['BitField', dict(start_bit = 12, end_bit = 16, native_type='unsigned long')]],
'CurrentSystemState' : [ 0x0, ['BitField', dict(start_bit = 16, end_bit = 20, native_type='unsigned long')]],
'IgnoreHibernationPath' : [ 0x0, ['BitField', dict(start_bit = 20, end_bit = 21, native_type='unsigned long')]],
'PseudoTransition' : [ 0x0, ['BitField', dict(start_bit = 21, end_bit = 22, native_type='unsigned long')]],
'Reserved2' : [ 0x0, ['BitField', dict(start_bit = 22, end_bit = 32, native_type='unsigned long')]],
'ContextAsUlong' : [ 0x0, ['unsigned long']],
} ],
'_MI_HARDWARE_STATE' : [ 0xa8, {
'NodeMask' : [ 0x0, ['unsigned long']],
'NodeGraph' : [ 0x8, ['pointer64', ['unsigned short']]],
'SystemNodeInformation' : [ 0x10, ['pointer64', ['_MI_SYSTEM_NODE_INFORMATION']]],
'NumaLastRangeIndex' : [ 0x18, ['unsigned long']],
'NumaMemoryRanges' : [ 0x20, ['pointer64', ['_HAL_NODE_RANGE']]],
'NumaTableCaptured' : [ 0x28, ['unsigned char']],
'NodeShift' : [ 0x29, ['unsigned char']],
'ChannelMemoryRanges' : [ 0x30, ['pointer64', ['_HAL_CHANNEL_MEMORY_RANGES']]],
'ChannelShift' : [ 0x38, ['unsigned char']],
'SecondLevelCacheSize' : [ 0x3c, ['unsigned long']],
'FirstLevelCacheSize' : [ 0x40, ['unsigned long']],
'PhysicalAddressBits' : [ 0x44, ['unsigned long']],
'AllMainMemoryMustBeCached' : [ 0x48, ['unsigned char']],
'TotalPagesAllowed' : [ 0x50, ['unsigned long long']],
'SecondaryColorMask' : [ 0x58, ['unsigned long']],
'SecondaryColors' : [ 0x5c, ['unsigned long']],
'FlushTbForAttributeChange' : [ 0x60, ['unsigned long']],
'FlushCacheForAttributeChange' : [ 0x64, ['unsigned long']],
'FlushCacheForPageAttributeChange' : [ 0x68, ['unsigned long']],
'CacheFlushPromoteThreshold' : [ 0x6c, ['unsigned long']],
'FlushTbThreshold' : [ 0x70, ['unsigned long long']],
'ZeroCostCounts' : [ 0x78, ['array', 2, ['_MI_ZERO_COST_COUNTS']]],
'PrimaryPfns' : [ 0x98, ['unsigned long long']],
'HighestPossiblePhysicalPage' : [ 0xa0, ['unsigned long long']],
} ],
'_PEBS_DS_SAVE_AREA' : [ 0x60, {
'BtsBufferBase' : [ 0x0, ['unsigned long long']],
'BtsIndex' : [ 0x8, ['unsigned long long']],
'BtsAbsoluteMaximum' : [ 0x10, ['unsigned long long']],
'BtsInterruptThreshold' : [ 0x18, ['unsigned long long']],
'PebsBufferBase' : [ 0x20, ['unsigned long long']],
'PebsIndex' : [ 0x28, ['unsigned long long']],
'PebsAbsoluteMaximum' : [ 0x30, ['unsigned long long']],
'PebsInterruptThreshold' : [ 0x38, ['unsigned long long']],
'PebsCounterReset0' : [ 0x40, ['unsigned long long']],
'PebsCounterReset1' : [ 0x48, ['unsigned long long']],
'PebsCounterReset2' : [ 0x50, ['unsigned long long']],
'PebsCounterReset3' : [ 0x58, ['unsigned long long']],
} ],
'_OBJECT_TYPE_INITIALIZER' : [ 0x78, {
'Length' : [ 0x0, ['unsigned short']],
'ObjectTypeFlags' : [ 0x2, ['unsigned char']],
'CaseInsensitive' : [ 0x2, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned char')]],
'UnnamedObjectsOnly' : [ 0x2, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned char')]],
'UseDefaultObject' : [ 0x2, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned char')]],
'SecurityRequired' : [ 0x2, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned char')]],
'MaintainHandleCount' : [ 0x2, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned char')]],
'MaintainTypeList' : [ 0x2, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned char')]],
'SupportsObjectCallbacks' : [ 0x2, ['BitField', dict(start_bit = 6, end_bit = 7, native_type='unsigned char')]],
'CacheAligned' : [ 0x2, ['BitField', dict(start_bit = 7, end_bit = 8, native_type='unsigned char')]],
'ObjectTypeCode' : [ 0x4, ['unsigned long']],
'InvalidAttributes' : [ 0x8, ['unsigned long']],
'GenericMapping' : [ 0xc, ['_GENERIC_MAPPING']],
'ValidAccessMask' : [ 0x1c, ['unsigned long']],
'RetainAccess' : [ 0x20, ['unsigned long']],
'PoolType' : [ 0x24, ['Enumeration', dict(target = 'long', choices = {0: 'NonPagedPoolBase', 1: 'PagedPool', 2: 'NonPagedPoolBaseMustSucceed', 3: 'DontUseThisType', 4: 'NonPagedPoolBaseCacheAligned', 5: 'PagedPoolCacheAligned', 6: 'NonPagedPoolBaseCacheAlignedMustS', 7: 'MaxPoolType', 34: 'NonPagedPoolMustSucceedSession', 516: 'NonPagedPoolNxCacheAligned', 35: 'DontUseThisTypeSession', 32: 'NonPagedPoolSession', 512: 'NonPagedPoolNx', 544: 'NonPagedPoolSessionNx', 36: 'NonPagedPoolCacheAlignedSession', 33: 'PagedPoolSession', 38: 'NonPagedPoolCacheAlignedMustSSession', 37: 'PagedPoolCacheAlignedSession'})]],
'DefaultPagedPoolCharge' : [ 0x28, ['unsigned long']],
'DefaultNonPagedPoolCharge' : [ 0x2c, ['unsigned long']],
'DumpProcedure' : [ 0x30, ['pointer64', ['void']]],
'OpenProcedure' : [ 0x38, ['pointer64', ['void']]],
'CloseProcedure' : [ 0x40, ['pointer64', ['void']]],
'DeleteProcedure' : [ 0x48, ['pointer64', ['void']]],
'ParseProcedure' : [ 0x50, ['pointer64', ['void']]],
'SecurityProcedure' : [ 0x58, ['pointer64', ['void']]],
'QueryNameProcedure' : [ 0x60, ['pointer64', ['void']]],
'OkayToCloseProcedure' : [ 0x68, ['pointer64', ['void']]],
'WaitObjectFlagMask' : [ 0x70, ['unsigned long']],
'WaitObjectFlagOffset' : [ 0x74, ['unsigned short']],
'WaitObjectPointerOffset' : [ 0x76, ['unsigned short']],
} ],
'__unnamed_231c' : [ 0x8, {
'Long' : [ 0x0, ['unsigned long long']],
'e1' : [ 0x0, ['_MI_DECAY_TIMER_LINKAGE']],
} ],
'_MI_DECAY_TIMER_LINK' : [ 0x8, {
'u1' : [ 0x0, ['__unnamed_231c']],
} ],
'_TRIAGE_PNP_DEVICE_COMPLETION_REQUEST' : [ 0x18, {
'ListEntry' : [ 0x0, ['_LIST_ENTRY']],
'DeviceNode' : [ 0x10, ['pointer64', ['_TRIAGE_DEVICE_NODE']]],
} ],
'_KPROCESSOR_STATE' : [ 0x5b0, {
'SpecialRegisters' : [ 0x0, ['_KSPECIAL_REGISTERS']],
'ContextFrame' : [ 0xe0, ['_CONTEXT']],
} ],
'tagSWITCH_CONTEXT_ATTRIBUTE' : [ 0x18, {
'ulContextUpdateCounter' : [ 0x0, ['unsigned long long']],
'fAllowContextUpdate' : [ 0x8, ['long']],
'fEnableTrace' : [ 0xc, ['long']],
'EtwHandle' : [ 0x10, ['unsigned long long']],
} ],
'_IO_CLIENT_EXTENSION' : [ 0x10, {
'NextExtension' : [ 0x0, ['pointer64', ['_IO_CLIENT_EXTENSION']]],
'ClientIdentificationAddress' : [ 0x8, ['pointer64', ['void']]],
} ],
'_ETW_BUFFER_CONTEXT' : [ 0x4, {
'ProcessorNumber' : [ 0x0, ['unsigned char']],
'Alignment' : [ 0x1, ['unsigned char']],
'ProcessorIndex' : [ 0x0, ['unsigned short']],
'LoggerId' : [ 0x2, ['unsigned short']],
} ],
'_HEAP_EXTENDED_ENTRY' : [ 0x10, {
'Reserved' : [ 0x0, ['pointer64', ['void']]],
'FunctionIndex' : [ 0x8, ['unsigned short']],
'ContextValue' : [ 0xa, ['unsigned short']],
'InterceptorValue' : [ 0x8, ['unsigned long']],
'UnusedBytesLength' : [ 0xc, ['unsigned short']],
'EntryOffset' : [ 0xe, ['unsigned char']],
'ExtendedBlockSignature' : [ 0xf, ['unsigned char']],
} ],
'_MI_SYSTEM_VA_STATE' : [ 0x2c0, {
'SystemTablesLock' : [ 0x0, ['unsigned long long']],
'AvailableSystemCacheVa' : [ 0x8, ['unsigned long long']],
'DynamicBitMapSystemPtes' : [ 0x10, ['_MI_DYNAMIC_BITMAP']],
'DynamicBitMapDriverImages' : [ 0x60, ['array', 2, ['_MI_DYNAMIC_BITMAP']]],
'DynamicBitMapPagedPool' : [ 0x100, ['_MI_DYNAMIC_BITMAP']],
'DynamicBitMapSpecialPool' : [ 0x150, ['_MI_DYNAMIC_BITMAP']],
'DynamicBitMapSystemCache' : [ 0x1a0, ['_MI_DYNAMIC_BITMAP']],
'WorkingSetListHashStart' : [ 0x1f0, ['pointer64', ['_MMWSLE_HASH']]],
'WorkingSetListHashEnd' : [ 0x1f8, ['pointer64', ['_MMWSLE_HASH']]],
'WorkingSetListIndirectHashStart' : [ 0x200, ['pointer64', ['_MMWSLE_NONDIRECT_HASH']]],
'FreeSystemCacheVa' : [ 0x208, ['_KEVENT']],
'SystemVaLock' : [ 0x220, ['unsigned long long']],
'DeleteKvaLock' : [ 0x228, ['long']],
'FreeSystemCache' : [ 0x230, ['_MI_PTE_CHAIN_HEAD']],
'SystemCacheViewLock' : [ 0x248, ['unsigned long long']],
'UnusableWsles' : [ 0x250, ['array', 5, ['unsigned long long']]],
'PossibleWsles' : [ 0x278, ['array', 5, ['unsigned long long']]],
} ],
'_DIRTY_PAGE_STATISTICS' : [ 0x18, {
'DirtyPages' : [ 0x0, ['unsigned long long']],
'DirtyPagesLastScan' : [ 0x8, ['unsigned long long']],
'DirtyPagesScheduledLastScan' : [ 0x10, ['unsigned long']],
} ],
'_DBGKD_WRITE_CUSTOM_BREAKPOINT' : [ 0x18, {
'BreakPointAddress' : [ 0x0, ['unsigned long long']],
'BreakPointInstruction' : [ 0x8, ['unsigned long long']],
'BreakPointHandle' : [ 0x10, ['unsigned long']],
'BreakPointInstructionSize' : [ 0x14, ['unsigned char']],
'BreakPointInstructionAlignment' : [ 0x15, ['unsigned char']],
} ],
'_PROC_IDLE_SNAP' : [ 0x10, {
'Time' : [ 0x0, ['unsigned long long']],
'Idle' : [ 0x8, ['unsigned long long']],
} ],
'_KERNEL_STACK_SEGMENT' : [ 0x20, {
'StackBase' : [ 0x0, ['unsigned long long']],
'StackLimit' : [ 0x8, ['unsigned long long']],
'KernelStack' : [ 0x10, ['unsigned long long']],
'InitialStack' : [ 0x18, ['unsigned long long']],
} ],
'_KEXECUTE_OPTIONS' : [ 0x1, {
'ExecuteDisable' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned char')]],
'ExecuteEnable' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned char')]],
'DisableThunkEmulation' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned char')]],
'Permanent' : [ 0x0, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned char')]],
'ExecuteDispatchEnable' : [ 0x0, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned char')]],
'ImageDispatchEnable' : [ 0x0, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned char')]],
'DisableExceptionChainValidation' : [ 0x0, ['BitField', dict(start_bit = 6, end_bit = 7, native_type='unsigned char')]],
'Spare' : [ 0x0, ['BitField', dict(start_bit = 7, end_bit = 8, native_type='unsigned char')]],
'ExecuteOptions' : [ 0x0, ['unsigned char']],
'ExecuteOptionsNV' : [ 0x0, ['unsigned char']],
} ],
'_SEP_TOKEN_PRIVILEGES' : [ 0x18, {
'Present' : [ 0x0, ['unsigned long long']],
'Enabled' : [ 0x8, ['unsigned long long']],
'EnabledByDefault' : [ 0x10, ['unsigned long long']],
} ],
'_WHEA_XPF_MCE_DESCRIPTOR' : [ 0x398, {
'Type' : [ 0x0, ['unsigned short']],
'Enabled' : [ 0x2, ['unsigned char']],
'NumberOfBanks' : [ 0x3, ['unsigned char']],
'Flags' : [ 0x4, ['_XPF_MCE_FLAGS']],
'MCG_Capability' : [ 0x8, ['unsigned long long']],
'MCG_GlobalControl' : [ 0x10, ['unsigned long long']],
'Banks' : [ 0x18, ['array', 32, ['_WHEA_XPF_MC_BANK_DESCRIPTOR']]],
} ],
'_MI_DEBUGGER_STATE' : [ 0x118, {
'TransientWrite' : [ 0x0, ['unsigned char']],
'CodePageEdited' : [ 0x1, ['unsigned char']],
'DebugPte' : [ 0x8, ['pointer64', ['_MMPTE']]],
'PoisonedTb' : [ 0x10, ['unsigned long']],
'InDebugger' : [ 0x14, ['long']],
'Pfns' : [ 0x18, ['array', 32, ['pointer64', ['void']]]],
} ],
'_MI_PROCESS_STATE' : [ 0x60, {
'ColorSeed' : [ 0x0, ['unsigned long']],
'CloneDereferenceEvent' : [ 0x8, ['_KEVENT']],
'CloneProtosSListHead' : [ 0x20, ['_SLIST_HEADER']],
'SystemDllBase' : [ 0x30, ['pointer64', ['void']]],
'RotatingUniprocessorNumber' : [ 0x38, ['long']],
'CriticalSectionTimeout' : [ 0x40, ['_LARGE_INTEGER']],
'ProcessList' : [ 0x48, ['_LIST_ENTRY']],
'SharedUserDataPte' : [ 0x58, ['pointer64', ['_MMPTE']]],
} ],
'_ARBITER_ALLOCATION_STATE' : [ 0x50, {
'Start' : [ 0x0, ['unsigned long long']],
'End' : [ 0x8, ['unsigned long long']],
'CurrentMinimum' : [ 0x10, ['unsigned long long']],
'CurrentMaximum' : [ 0x18, ['unsigned long long']],
'Entry' : [ 0x20, ['pointer64', ['_ARBITER_LIST_ENTRY']]],
'CurrentAlternative' : [ 0x28, ['pointer64', ['_ARBITER_ALTERNATIVE']]],
'AlternativeCount' : [ 0x30, ['unsigned long']],
'Alternatives' : [ 0x38, ['pointer64', ['_ARBITER_ALTERNATIVE']]],
'Flags' : [ 0x40, ['unsigned short']],
'RangeAttributes' : [ 0x42, ['unsigned char']],
'RangeAvailableAttributes' : [ 0x43, ['unsigned char']],
'WorkSpace' : [ 0x48, ['unsigned long long']],
} ],
'_VACB_ARRAY_HEADER' : [ 0x10, {
'VacbArrayIndex' : [ 0x0, ['unsigned long']],
'MappingCount' : [ 0x4, ['unsigned long']],
'HighestMappedIndex' : [ 0x8, ['unsigned long']],
'Reserved' : [ 0xc, ['unsigned long']],
} ],
'_MMWSLENTRY' : [ 0x8, {
'Valid' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long long')]],
'Spare' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long long')]],
'Hashed' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long long')]],
'Direct' : [ 0x0, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned long long')]],
'Protection' : [ 0x0, ['BitField', dict(start_bit = 4, end_bit = 9, native_type='unsigned long long')]],
'Age' : [ 0x0, ['BitField', dict(start_bit = 9, end_bit = 12, native_type='unsigned long long')]],
'VirtualPageNumber' : [ 0x0, ['BitField', dict(start_bit = 12, end_bit = 48, native_type='unsigned long long')]],
'HighActiveFlink' : [ 0x0, ['BitField', dict(start_bit = 48, end_bit = 52, native_type='unsigned long long')]],
'HighActiveBlink' : [ 0x0, ['BitField', dict(start_bit = 52, end_bit = 56, native_type='unsigned long long')]],
'Unused' : [ 0x0, ['BitField', dict(start_bit = 56, end_bit = 64, native_type='unsigned long long')]],
} ],
'SYSTEM_POWER_CAPABILITIES' : [ 0x4c, {
'PowerButtonPresent' : [ 0x0, ['unsigned char']],
'SleepButtonPresent' : [ 0x1, ['unsigned char']],
'LidPresent' : [ 0x2, ['unsigned char']],
'SystemS1' : [ 0x3, ['unsigned char']],
'SystemS2' : [ 0x4, ['unsigned char']],
'SystemS3' : [ 0x5, ['unsigned char']],
'SystemS4' : [ 0x6, ['unsigned char']],
'SystemS5' : [ 0x7, ['unsigned char']],
'HiberFilePresent' : [ 0x8, ['unsigned char']],
'FullWake' : [ 0x9, ['unsigned char']],
'VideoDimPresent' : [ 0xa, ['unsigned char']],
'ApmPresent' : [ 0xb, ['unsigned char']],
'UpsPresent' : [ 0xc, ['unsigned char']],
'ThermalControl' : [ 0xd, ['unsigned char']],
'ProcessorThrottle' : [ 0xe, ['unsigned char']],
'ProcessorMinThrottle' : [ 0xf, ['unsigned char']],
'ProcessorMaxThrottle' : [ 0x10, ['unsigned char']],
'FastSystemS4' : [ 0x11, ['unsigned char']],
'Hiberboot' : [ 0x12, ['unsigned char']],
'WakeAlarmPresent' : [ 0x13, ['unsigned char']],
'AoAc' : [ 0x14, ['unsigned char']],
'DiskSpinDown' : [ 0x15, ['unsigned char']],
'HiberFileType' : [ 0x16, ['unsigned char']],
'AoAcConnectivitySupported' : [ 0x17, ['unsigned char']],
'spare3' : [ 0x18, ['array', 6, ['unsigned char']]],
'SystemBatteriesPresent' : [ 0x1e, ['unsigned char']],
'BatteriesAreShortTerm' : [ 0x1f, ['unsigned char']],
'BatteryScale' : [ 0x20, ['array', 3, ['BATTERY_REPORTING_SCALE']]],
'AcOnLineWake' : [ 0x38, ['Enumeration', dict(target = 'long', choices = {0: 'PowerSystemUnspecified', 1: 'PowerSystemWorking', 2: 'PowerSystemSleeping1', 3: 'PowerSystemSleeping2', 4: 'PowerSystemSleeping3', 5: 'PowerSystemHibernate', 6: 'PowerSystemShutdown', 7: 'PowerSystemMaximum'})]],
'SoftLidWake' : [ 0x3c, ['Enumeration', dict(target = 'long', choices = {0: 'PowerSystemUnspecified', 1: 'PowerSystemWorking', 2: 'PowerSystemSleeping1', 3: 'PowerSystemSleeping2', 4: 'PowerSystemSleeping3', 5: 'PowerSystemHibernate', 6: 'PowerSystemShutdown', 7: 'PowerSystemMaximum'})]],
'RtcWake' : [ 0x40, ['Enumeration', dict(target = 'long', choices = {0: 'PowerSystemUnspecified', 1: 'PowerSystemWorking', 2: 'PowerSystemSleeping1', 3: 'PowerSystemSleeping2', 4: 'PowerSystemSleeping3', 5: 'PowerSystemHibernate', 6: 'PowerSystemShutdown', 7: 'PowerSystemMaximum'})]],
'MinDeviceWakeState' : [ 0x44, ['Enumeration', dict(target = 'long', choices = {0: 'PowerSystemUnspecified', 1: 'PowerSystemWorking', 2: 'PowerSystemSleeping1', 3: 'PowerSystemSleeping2', 4: 'PowerSystemSleeping3', 5: 'PowerSystemHibernate', 6: 'PowerSystemShutdown', 7: 'PowerSystemMaximum'})]],
'DefaultLowLatencyWake' : [ 0x48, ['Enumeration', dict(target = 'long', choices = {0: 'PowerSystemUnspecified', 1: 'PowerSystemWorking', 2: 'PowerSystemSleeping1', 3: 'PowerSystemSleeping2', 4: 'PowerSystemSleeping3', 5: 'PowerSystemHibernate', 6: 'PowerSystemShutdown', 7: 'PowerSystemMaximum'})]],
} ],
'_MI_REBUILD_LARGE_PAGE_COUNTDOWN' : [ 0x2, {
'SecondsLeft' : [ 0x0, ['unsigned char']],
'SecondsAssigned' : [ 0x1, ['unsigned char']],
} ],
'_DBGKD_SWITCH_PARTITION' : [ 0x4, {
'Partition' : [ 0x0, ['unsigned long']],
} ],
'_REQUEST_MAILBOX' : [ 0x40, {
'Next' : [ 0x0, ['pointer64', ['_REQUEST_MAILBOX']]],
'RequestSummary' : [ 0x8, ['unsigned long long']],
'RequestPacket' : [ 0x10, ['_KREQUEST_PACKET']],
'NodeTargetCountAddr' : [ 0x30, ['pointer64', ['long']]],
'NodeTargetCount' : [ 0x38, ['long']],
} ],
'_DBGKD_GET_VERSION32' : [ 0x28, {
'MajorVersion' : [ 0x0, ['unsigned short']],
'MinorVersion' : [ 0x2, ['unsigned short']],
'ProtocolVersion' : [ 0x4, ['unsigned short']],
'Flags' : [ 0x6, ['unsigned short']],
'KernBase' : [ 0x8, ['unsigned long']],
'PsLoadedModuleList' : [ 0xc, ['unsigned long']],
'MachineType' : [ 0x10, ['unsigned short']],
'ThCallbackStack' : [ 0x12, ['unsigned short']],
'NextCallback' : [ 0x14, ['unsigned short']],
'FramePointer' : [ 0x16, ['unsigned short']],
'KiCallUserMode' : [ 0x18, ['unsigned long']],
'KeUserCallbackDispatcher' : [ 0x1c, ['unsigned long']],
'BreakpointWithStatus' : [ 0x20, ['unsigned long']],
'DebuggerDataList' : [ 0x24, ['unsigned long']],
} ],
'_WHEA_XPF_CMC_DESCRIPTOR' : [ 0x3a4, {
'Type' : [ 0x0, ['unsigned short']],
'Enabled' : [ 0x2, ['unsigned char']],
'NumberOfBanks' : [ 0x3, ['unsigned char']],
'Reserved' : [ 0x4, ['unsigned long']],
'Notify' : [ 0x8, ['_WHEA_NOTIFICATION_DESCRIPTOR']],
'Banks' : [ 0x24, ['array', 32, ['_WHEA_XPF_MC_BANK_DESCRIPTOR']]],
} ],
'_WHEA_TIMESTAMP' : [ 0x8, {
'Seconds' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 8, native_type='unsigned long long')]],
'Minutes' : [ 0x0, ['BitField', dict(start_bit = 8, end_bit = 16, native_type='unsigned long long')]],
'Hours' : [ 0x0, ['BitField', dict(start_bit = 16, end_bit = 24, native_type='unsigned long long')]],
'Precise' : [ 0x0, ['BitField', dict(start_bit = 24, end_bit = 25, native_type='unsigned long long')]],
'Reserved' : [ 0x0, ['BitField', dict(start_bit = 25, end_bit = 32, native_type='unsigned long long')]],
'Day' : [ 0x0, ['BitField', dict(start_bit = 32, end_bit = 40, native_type='unsigned long long')]],
'Month' : [ 0x0, ['BitField', dict(start_bit = 40, end_bit = 48, native_type='unsigned long long')]],
'Year' : [ 0x0, ['BitField', dict(start_bit = 48, end_bit = 56, native_type='unsigned long long')]],
'Century' : [ 0x0, ['BitField', dict(start_bit = 56, end_bit = 64, native_type='unsigned long long')]],
'AsLARGE_INTEGER' : [ 0x0, ['_LARGE_INTEGER']],
} ],
'_PEB32' : [ 0x460, {
'InheritedAddressSpace' : [ 0x0, ['unsigned char']],
'ReadImageFileExecOptions' : [ 0x1, ['unsigned char']],
'BeingDebugged' : [ 0x2, ['unsigned char']],
'BitField' : [ 0x3, ['unsigned char']],
'ImageUsesLargePages' : [ 0x3, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned char')]],
'IsProtectedProcess' : [ 0x3, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned char')]],
'IsImageDynamicallyRelocated' : [ 0x3, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned char')]],
'SkipPatchingUser32Forwarders' : [ 0x3, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned char')]],
'IsPackagedProcess' : [ 0x3, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned char')]],
'IsAppContainer' : [ 0x3, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned char')]],
'IsProtectedProcessLight' : [ 0x3, ['BitField', dict(start_bit = 6, end_bit = 7, native_type='unsigned char')]],
'SpareBits' : [ 0x3, ['BitField', dict(start_bit = 7, end_bit = 8, native_type='unsigned char')]],
'Mutant' : [ 0x4, ['unsigned long']],
'ImageBaseAddress' : [ 0x8, ['unsigned long']],
'Ldr' : [ 0xc, ['unsigned long']],
'ProcessParameters' : [ 0x10, ['unsigned long']],
'SubSystemData' : [ 0x14, ['unsigned long']],
'ProcessHeap' : [ 0x18, ['unsigned long']],
'FastPebLock' : [ 0x1c, ['unsigned long']],
'AtlThunkSListPtr' : [ 0x20, ['unsigned long']],
'IFEOKey' : [ 0x24, ['unsigned long']],
'CrossProcessFlags' : [ 0x28, ['unsigned long']],
'ProcessInJob' : [ 0x28, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'ProcessInitializing' : [ 0x28, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'ProcessUsingVEH' : [ 0x28, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long')]],
'ProcessUsingVCH' : [ 0x28, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned long')]],
'ProcessUsingFTH' : [ 0x28, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned long')]],
'ReservedBits0' : [ 0x28, ['BitField', dict(start_bit = 5, end_bit = 32, native_type='unsigned long')]],
'KernelCallbackTable' : [ 0x2c, ['unsigned long']],
'UserSharedInfoPtr' : [ 0x2c, ['unsigned long']],
'SystemReserved' : [ 0x30, ['array', 1, ['unsigned long']]],
'AtlThunkSListPtr32' : [ 0x34, ['unsigned long']],
'ApiSetMap' : [ 0x38, ['unsigned long']],
'TlsExpansionCounter' : [ 0x3c, ['unsigned long']],
'TlsBitmap' : [ 0x40, ['unsigned long']],
'TlsBitmapBits' : [ 0x44, ['array', 2, ['unsigned long']]],
'ReadOnlySharedMemoryBase' : [ 0x4c, ['unsigned long']],
'SparePvoid0' : [ 0x50, ['unsigned long']],
'ReadOnlyStaticServerData' : [ 0x54, ['unsigned long']],
'AnsiCodePageData' : [ 0x58, ['unsigned long']],
'OemCodePageData' : [ 0x5c, ['unsigned long']],
'UnicodeCaseTableData' : [ 0x60, ['unsigned long']],
'NumberOfProcessors' : [ 0x64, ['unsigned long']],
'NtGlobalFlag' : [ 0x68, ['unsigned long']],
'CriticalSectionTimeout' : [ 0x70, ['_LARGE_INTEGER']],
'HeapSegmentReserve' : [ 0x78, ['unsigned long']],
'HeapSegmentCommit' : [ 0x7c, ['unsigned long']],
'HeapDeCommitTotalFreeThreshold' : [ 0x80, ['unsigned long']],
'HeapDeCommitFreeBlockThreshold' : [ 0x84, ['unsigned long']],
'NumberOfHeaps' : [ 0x88, ['unsigned long']],
'MaximumNumberOfHeaps' : [ 0x8c, ['unsigned long']],
'ProcessHeaps' : [ 0x90, ['unsigned long']],
'GdiSharedHandleTable' : [ 0x94, ['unsigned long']],
'ProcessStarterHelper' : [ 0x98, ['unsigned long']],
'GdiDCAttributeList' : [ 0x9c, ['unsigned long']],
'LoaderLock' : [ 0xa0, ['unsigned long']],
'OSMajorVersion' : [ 0xa4, ['unsigned long']],
'OSMinorVersion' : [ 0xa8, ['unsigned long']],
'OSBuildNumber' : [ 0xac, ['unsigned short']],
'OSCSDVersion' : [ 0xae, ['unsigned short']],
'OSPlatformId' : [ 0xb0, ['unsigned long']],
'ImageSubsystem' : [ 0xb4, ['unsigned long']],
'ImageSubsystemMajorVersion' : [ 0xb8, ['unsigned long']],
'ImageSubsystemMinorVersion' : [ 0xbc, ['unsigned long']],
'ActiveProcessAffinityMask' : [ 0xc0, ['unsigned long']],
'GdiHandleBuffer' : [ 0xc4, ['array', 34, ['unsigned long']]],
'PostProcessInitRoutine' : [ 0x14c, ['unsigned long']],
'TlsExpansionBitmap' : [ 0x150, ['unsigned long']],
'TlsExpansionBitmapBits' : [ 0x154, ['array', 32, ['unsigned long']]],
'SessionId' : [ 0x1d4, ['unsigned long']],
'AppCompatFlags' : [ 0x1d8, ['_ULARGE_INTEGER']],
'AppCompatFlagsUser' : [ 0x1e0, ['_ULARGE_INTEGER']],
'pShimData' : [ 0x1e8, ['unsigned long']],
'AppCompatInfo' : [ 0x1ec, ['unsigned long']],
'CSDVersion' : [ 0x1f0, ['_STRING32']],
'ActivationContextData' : [ 0x1f8, ['unsigned long']],
'ProcessAssemblyStorageMap' : [ 0x1fc, ['unsigned long']],
'SystemDefaultActivationContextData' : [ 0x200, ['unsigned long']],
'SystemAssemblyStorageMap' : [ 0x204, ['unsigned long']],
'MinimumStackCommit' : [ 0x208, ['unsigned long']],
'FlsCallback' : [ 0x20c, ['unsigned long']],
'FlsListHead' : [ 0x210, ['LIST_ENTRY32']],
'FlsBitmap' : [ 0x218, ['unsigned long']],
'FlsBitmapBits' : [ 0x21c, ['array', 4, ['unsigned long']]],
'FlsHighIndex' : [ 0x22c, ['unsigned long']],
'WerRegistrationData' : [ 0x230, ['unsigned long']],
'WerShipAssertPtr' : [ 0x234, ['unsigned long']],
'pUnused' : [ 0x238, ['unsigned long']],
'pImageHeaderHash' : [ 0x23c, ['unsigned long']],
'TracingFlags' : [ 0x240, ['unsigned long']],
'HeapTracingEnabled' : [ 0x240, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'CritSecTracingEnabled' : [ 0x240, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'LibLoaderTracingEnabled' : [ 0x240, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long')]],
'SpareTracingBits' : [ 0x240, ['BitField', dict(start_bit = 3, end_bit = 32, native_type='unsigned long')]],
'CsrServerReadOnlySharedMemoryBase' : [ 0x248, ['unsigned long long']],
'TppWorkerpListLock' : [ 0x250, ['unsigned long']],
'TppWorkerpList' : [ 0x254, ['LIST_ENTRY32']],
'WaitOnAddressHashTable' : [ 0x25c, ['array', 128, ['unsigned long']]],
} ],
'_IO_IRP_EXT_TRACK_OFFSET_HEADER' : [ 0x10, {
'Validation' : [ 0x0, ['unsigned short']],
'Flags' : [ 0x2, ['unsigned short']],
'TrackedOffsetCallback' : [ 0x8, ['pointer64', ['void']]],
} ],
'_VPB' : [ 0x60, {
'Type' : [ 0x0, ['short']],
'Size' : [ 0x2, ['short']],
'Flags' : [ 0x4, ['unsigned short']],
'VolumeLabelLength' : [ 0x6, ['unsigned short']],
'DeviceObject' : [ 0x8, ['pointer64', ['_DEVICE_OBJECT']]],
'RealDevice' : [ 0x10, ['pointer64', ['_DEVICE_OBJECT']]],
'SerialNumber' : [ 0x18, ['unsigned long']],
'ReferenceCount' : [ 0x1c, ['unsigned long']],
'VolumeLabel' : [ 0x20, ['array', 32, ['wchar']]],
} ],
'_MI_SESSION_STATE' : [ 0x88, {
'SystemSession' : [ 0x0, ['_MMSESSION']],
'CodePageEdited' : [ 0x20, ['unsigned char']],
'DynamicVaBitBuffer' : [ 0x28, ['pointer64', ['unsigned long']]],
'DynamicVaBitBufferPages' : [ 0x30, ['unsigned long long']],
'DynamicPoolBitBuffer' : [ 0x38, ['pointer64', ['unsigned long']]],
'DynamicVaStart' : [ 0x40, ['pointer64', ['void']]],
'DynamicPtesBitBuffer' : [ 0x48, ['pointer64', ['unsigned long']]],
'IdLock' : [ 0x50, ['_EX_PUSH_LOCK']],
'DetachTimeStamp' : [ 0x58, ['unsigned long']],
'LeaderProcess' : [ 0x60, ['pointer64', ['_EPROCESS']]],
'InitializeLock' : [ 0x68, ['_EX_PUSH_LOCK']],
'WorkingSetList' : [ 0x70, ['pointer64', ['_MMWSL']]],
'WsHashStart' : [ 0x78, ['pointer64', ['_MMWSLE_HASH']]],
'WsHashEnd' : [ 0x80, ['pointer64', ['_MMWSLE_HASH']]],
} ],
'_CACHE_DESCRIPTOR' : [ 0xc, {
'Level' : [ 0x0, ['unsigned char']],
'Associativity' : [ 0x1, ['unsigned char']],
'LineSize' : [ 0x2, ['unsigned short']],
'Size' : [ 0x4, ['unsigned long']],
'Type' : [ 0x8, ['Enumeration', dict(target = 'long', choices = {0: 'CacheUnified', 1: 'CacheInstruction', 2: 'CacheData', 3: 'CacheTrace'})]],
} ],
'_MMSESSION' : [ 0x20, {
'SystemSpaceViewLock' : [ 0x0, ['_EX_PUSH_LOCK']],
'SystemSpaceViewLockPointer' : [ 0x8, ['pointer64', ['_EX_PUSH_LOCK']]],
'ViewRoot' : [ 0x10, ['_RTL_AVL_TREE']],
'ViewCount' : [ 0x18, ['unsigned long']],
'BitmapFailures' : [ 0x1c, ['unsigned long']],
} ],
'_IOP_IRP_STACK_PROFILER' : [ 0x54, {
'Profile' : [ 0x0, ['array', 20, ['unsigned long']]],
'TotalIrps' : [ 0x50, ['unsigned long']],
} ],
'_FILE_BASIC_INFORMATION' : [ 0x28, {
'CreationTime' : [ 0x0, ['_LARGE_INTEGER']],
'LastAccessTime' : [ 0x8, ['_LARGE_INTEGER']],
'LastWriteTime' : [ 0x10, ['_LARGE_INTEGER']],
'ChangeTime' : [ 0x18, ['_LARGE_INTEGER']],
'FileAttributes' : [ 0x20, ['unsigned long']],
} ],
'_OBJECT_NAMESPACE_LOOKUPTABLE' : [ 0x260, {
'HashBuckets' : [ 0x0, ['array', 37, ['_LIST_ENTRY']]],
'Lock' : [ 0x250, ['_EX_PUSH_LOCK']],
'NumberOfPrivateSpaces' : [ 0x258, ['unsigned long']],
} ],
'_SECURITY_SUBJECT_CONTEXT' : [ 0x20, {
'ClientToken' : [ 0x0, ['pointer64', ['void']]],
'ImpersonationLevel' : [ 0x8, ['Enumeration', dict(target = 'long', choices = {0: 'SecurityAnonymous', 1: 'SecurityIdentification', 2: 'SecurityImpersonation', 3: 'SecurityDelegation'})]],
'PrimaryToken' : [ 0x10, ['pointer64', ['void']]],
'ProcessAuditId' : [ 0x18, ['pointer64', ['void']]],
} ],
'_MI_DECAY_TIMER_LINKAGE' : [ 0x8, {
'Spare0' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long long')]],
'PreviousDecayPfn' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 32, native_type='unsigned long long')]],
'Spare1' : [ 0x0, ['BitField', dict(start_bit = 32, end_bit = 33, native_type='unsigned long long')]],
'NextDecayPfn' : [ 0x0, ['BitField', dict(start_bit = 33, end_bit = 64, native_type='unsigned long long')]],
} ],
'_EVENT_HEADER' : [ 0x50, {
'Size' : [ 0x0, ['unsigned short']],
'HeaderType' : [ 0x2, ['unsigned short']],
'Flags' : [ 0x4, ['unsigned short']],
'EventProperty' : [ 0x6, ['unsigned short']],
'ThreadId' : [ 0x8, ['unsigned long']],
'ProcessId' : [ 0xc, ['unsigned long']],
'TimeStamp' : [ 0x10, ['_LARGE_INTEGER']],
'ProviderId' : [ 0x18, ['_GUID']],
'EventDescriptor' : [ 0x28, ['_EVENT_DESCRIPTOR']],
'KernelTime' : [ 0x38, ['unsigned long']],
'UserTime' : [ 0x3c, ['unsigned long']],
'ProcessorTime' : [ 0x38, ['unsigned long long']],
'ActivityId' : [ 0x40, ['_GUID']],
} ],
'_PF_KERNEL_GLOBALS' : [ 0x60, {
'AccessBufferAgeThreshold' : [ 0x0, ['unsigned long long']],
'AccessBufferRef' : [ 0x8, ['_EX_RUNDOWN_REF']],
'AccessBufferExistsEvent' : [ 0x10, ['_KEVENT']],
'AccessBufferMax' : [ 0x28, ['unsigned long']],
'AccessBufferList' : [ 0x40, ['_SLIST_HEADER']],
'StreamSequenceNumber' : [ 0x50, ['long']],
'Flags' : [ 0x54, ['unsigned long']],
'ScenarioPrefetchCount' : [ 0x58, ['long']],
} ],
'_CM_KEY_HASH_TABLE_ENTRY' : [ 0x18, {
'Lock' : [ 0x0, ['_EX_PUSH_LOCK']],
'Owner' : [ 0x8, ['pointer64', ['_KTHREAD']]],
'Entry' : [ 0x10, ['pointer64', ['_CM_KEY_HASH']]],
} ],
'_ARBITER_QUERY_ARBITRATE_PARAMETERS' : [ 0x8, {
'ArbitrationList' : [ 0x0, ['pointer64', ['_LIST_ENTRY']]],
} ],
'__unnamed_23a3' : [ 0x4, {
'LongFlags' : [ 0x0, ['unsigned long']],
'Flags' : [ 0x0, ['_MI_PARTITION_FLAGS']],
} ],
'_MI_PARTITION_CORE' : [ 0x158, {
'PartitionId' : [ 0x0, ['unsigned short']],
'u' : [ 0x4, ['__unnamed_23a3']],
'ReferenceCount' : [ 0x8, ['unsigned long long']],
'ParentPartition' : [ 0x10, ['pointer64', ['_MI_PARTITION']]],
'ListEntry' : [ 0x18, ['_LIST_ENTRY']],
'NodeInformation' : [ 0x28, ['pointer64', ['_MI_NODE_INFORMATION']]],
'MdlPhysicalMemoryBlock' : [ 0x30, ['pointer64', ['_MDL']]],
'MemoryNodeRuns' : [ 0x38, ['pointer64', ['_PHYSICAL_MEMORY_DESCRIPTOR']]],
'MemoryBlockReferences' : [ 0x40, ['unsigned long long']],
'PfnUnmapWorkItem' : [ 0x48, ['_WORK_QUEUE_ITEM']],
'PfnUnmapActive' : [ 0x68, ['unsigned char']],
'PfnUnmapCount' : [ 0x70, ['unsigned long long']],
'PfnUnmapWaitList' : [ 0x78, ['pointer64', ['void']]],
'MemoryRuns' : [ 0x80, ['pointer64', ['_PHYSICAL_MEMORY_DESCRIPTOR']]],
'ExitEvent' : [ 0x88, ['_KEVENT']],
'SystemThreadHandles' : [ 0xa0, ['array', 5, ['pointer64', ['void']]]],
'PartitionObject' : [ 0xc8, ['pointer64', ['void']]],
'PartitionObjectHandle' : [ 0xd0, ['pointer64', ['void']]],
'DynamicMemoryPushLock' : [ 0xd8, ['_EX_PUSH_LOCK']],
'DynamicMemoryLock' : [ 0xe0, ['long']],
'TemporaryMemoryEvent' : [ 0xe8, ['_KEVENT']],
'MemoryEvents' : [ 0x100, ['array', 11, ['pointer64', ['_KEVENT']]]],
} ],
'_MI_PARTITION_MODWRITES' : [ 0x2d0, {
'AttemptForCantExtend' : [ 0x0, ['_MMPAGE_FILE_EXPANSION']],
'PageFileContract' : [ 0x60, ['_MMPAGE_FILE_EXPANSION']],
'NumberOfMappedMdls' : [ 0xc0, ['unsigned long long']],
'NumberOfMappedMdlsInUse' : [ 0xc8, ['long']],
'NumberOfMappedMdlsInUsePeak' : [ 0xcc, ['unsigned long']],
'MappedFileHeader' : [ 0xd0, ['_MMMOD_WRITER_LISTHEAD']],
'NeedMappedMdl' : [ 0xf8, ['unsigned char']],
'NeedPageFileMdl' : [ 0xf9, ['unsigned char']],
'TransitionInserted' : [ 0xfa, ['unsigned char']],
'LastModifiedWriteError' : [ 0xfc, ['long']],
'LastMappedWriteError' : [ 0x100, ['long']],
'MappedFileWriteSucceeded' : [ 0x104, ['unsigned long']],
'MappedWriteBurstCount' : [ 0x108, ['unsigned long']],
'LowPriorityModWritesOutstanding' : [ 0x10c, ['unsigned long']],
'BoostModWriteIoPriorityEvent' : [ 0x110, ['_KEVENT']],
'ModifiedWriterThreadPriority' : [ 0x128, ['long']],
'ModifiedPagesLowPriorityGoal' : [ 0x130, ['unsigned long long']],
'ModifiedPageWriterEvent' : [ 0x138, ['_KEVENT']],
'ModifiedWriterExitedEvent' : [ 0x150, ['_KEVENT']],
'WriteAllPagefilePages' : [ 0x168, ['long']],
'WriteAllMappedPages' : [ 0x16c, ['long']],
'MappedPageWriterEvent' : [ 0x170, ['_KEVENT']],
'ModWriteData' : [ 0x188, ['_MI_MODWRITE_DATA']],
'RescanPageFilesEvent' : [ 0x1c8, ['_KEVENT']],
'PagingFileHeader' : [ 0x1e0, ['_MMMOD_WRITER_LISTHEAD']],
'ModifiedPageWriterThread' : [ 0x208, ['pointer64', ['_ETHREAD']]],
'ModifiedPageWriterRundown' : [ 0x210, ['_EX_RUNDOWN_REF']],
'PagefileScanWorkItem' : [ 0x218, ['_WORK_QUEUE_ITEM']],
'PagefileScanCount' : [ 0x238, ['unsigned long']],
'ClusterWritesDisabled' : [ 0x23c, ['array', 2, ['long']]],
'NotifyStoreMemoryConditions' : [ 0x248, ['_KEVENT']],
'DelayMappedWrite' : [ 0x260, ['unsigned char']],
'PagefileReservationsEnabled' : [ 0x264, ['unsigned long']],
'PageFileCreationLock' : [ 0x268, ['_EX_PUSH_LOCK']],
'TrimPagefileWorkItem' : [ 0x270, ['_WORK_QUEUE_ITEM']],
'LastTrimPagefileTime' : [ 0x290, ['unsigned long long']],
'WsSwapPagefileContractWorkItem' : [ 0x298, ['_WORK_QUEUE_ITEM']],
'WsSwapPageFileContractionInProgress' : [ 0x2b8, ['long']],
'WorkingSetSwapLock' : [ 0x2c0, ['_EX_PUSH_LOCK']],
'WorkingSetInswapLock' : [ 0x2c8, ['long']],
} ],
'_ARBITER_BOOT_ALLOCATION_PARAMETERS' : [ 0x8, {
'ArbitrationList' : [ 0x0, ['pointer64', ['_LIST_ENTRY']]],
} ],
'_KPRIQUEUE' : [ 0x2b0, {
'Header' : [ 0x0, ['_DISPATCHER_HEADER']],
'EntryListHead' : [ 0x18, ['array', 32, ['_LIST_ENTRY']]],
'CurrentCount' : [ 0x218, ['array', 32, ['long']]],
'MaximumCount' : [ 0x298, ['unsigned long']],
'ThreadListHead' : [ 0x2a0, ['_LIST_ENTRY']],
} ],
'__unnamed_23c0' : [ 0x4, {
'ChannelsHotCold' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'Spare' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 32, native_type='unsigned long')]],
} ],
'_MI_NODE_INFORMATION' : [ 0x538, {
'LargePageFreeCount' : [ 0x0, ['array', 2, ['unsigned long long']]],
'LargePages' : [ 0x10, ['array', 2, ['array', 2, ['array', 4, ['_LIST_ENTRY']]]]],
'LargePagesCount' : [ 0x110, ['array', 2, ['array', 2, ['array', 4, ['unsigned long long']]]]],
'StandbyPageList' : [ 0x190, ['array', 4, ['array', 8, ['_MMPFNLIST_SHORT']]]],
'FreeCount' : [ 0x490, ['array', 2, ['unsigned long long']]],
'TotalPages' : [ 0x4a0, ['array', 4, ['unsigned long long']]],
'TotalPagesEntireNode' : [ 0x4c0, ['unsigned long long']],
'MmShiftedColor' : [ 0x4c8, ['unsigned long']],
'Color' : [ 0x4cc, ['unsigned long']],
'ChannelFreeCount' : [ 0x4d0, ['array', 4, ['array', 2, ['unsigned long long']]]],
'Flags' : [ 0x510, ['__unnamed_23c0']],
'NodeLock' : [ 0x518, ['_EX_PUSH_LOCK']],
'ChannelStatus' : [ 0x520, ['unsigned char']],
'ChannelOrdering' : [ 0x521, ['array', 4, ['unsigned char']]],
'LockedChannelOrdering' : [ 0x525, ['array', 4, ['unsigned char']]],
'PowerAttribute' : [ 0x529, ['array', 4, ['unsigned char']]],
'LargePageLock' : [ 0x530, ['unsigned long long']],
} ],
'_EXCEPTION_REGISTRATION_RECORD' : [ 0x10, {
'Next' : [ 0x0, ['pointer64', ['_EXCEPTION_REGISTRATION_RECORD']]],
'Handler' : [ 0x8, ['pointer64', ['void']]],
} ],
'_WAITING_IRP' : [ 0x38, {
'Links' : [ 0x0, ['_LIST_ENTRY']],
'Irp' : [ 0x10, ['pointer64', ['_IRP']]],
'CompletionRoutine' : [ 0x18, ['pointer64', ['void']]],
'Context' : [ 0x20, ['pointer64', ['void']]],
'Event' : [ 0x28, ['pointer64', ['_KEVENT']]],
'Information' : [ 0x30, ['unsigned long']],
'BreakAllRH' : [ 0x34, ['unsigned char']],
} ],
'_ETW_FILTER_PID' : [ 0x24, {
'Count' : [ 0x0, ['unsigned long']],
'Pids' : [ 0x4, ['array', 8, ['unsigned long']]],
} ],
'_PPM_SELECTION_MENU' : [ 0x10, {
'Count' : [ 0x0, ['unsigned long']],
'Entries' : [ 0x8, ['pointer64', ['_PPM_SELECTION_MENU_ENTRY']]],
} ],
'_VF_TARGET_ALL_SHARED_EXPORT_THUNKS' : [ 0x20, {
'SharedExportThunks' : [ 0x0, ['pointer64', ['_VERIFIER_SHARED_EXPORT_THUNK']]],
'PoolSharedExportThunks' : [ 0x8, ['pointer64', ['_VERIFIER_SHARED_EXPORT_THUNK']]],
'OrderDependentSharedExportThunks' : [ 0x10, ['pointer64', ['_VERIFIER_SHARED_EXPORT_THUNK']]],
'XdvSharedExportThunks' : [ 0x18, ['pointer64', ['_VERIFIER_SHARED_EXPORT_THUNK']]],
} ],
'_KSCHEDULING_GROUP' : [ 0x240, {
'Policy' : [ 0x0, ['_KSCHEDULING_GROUP_POLICY']],
'RelativeWeight' : [ 0x8, ['unsigned long']],
'ChildMinRate' : [ 0xc, ['unsigned long']],
'ChildMinWeight' : [ 0x10, ['unsigned long']],
'ChildTotalWeight' : [ 0x14, ['unsigned long']],
'QueryHistoryTimeStamp' : [ 0x18, ['unsigned long long']],
'NotificationCycles' : [ 0x20, ['long long']],
'SchedulingGroupList' : [ 0x28, ['_LIST_ENTRY']],
'Sibling' : [ 0x28, ['_LIST_ENTRY']],
'NotificationDpc' : [ 0x38, ['pointer64', ['_KDPC']]],
'ChildList' : [ 0x40, ['_LIST_ENTRY']],
'Parent' : [ 0x50, ['pointer64', ['_KSCHEDULING_GROUP']]],
'PerProcessor' : [ 0x80, ['array', 1, ['_KSCB']]],
} ],
'_ETW_REF_CLOCK' : [ 0x10, {
'StartTime' : [ 0x0, ['_LARGE_INTEGER']],
'StartPerfClock' : [ 0x8, ['_LARGE_INTEGER']],
} ],
'_OB_DUPLICATE_OBJECT_STATE' : [ 0x28, {
'SourceProcess' : [ 0x0, ['pointer64', ['_EPROCESS']]],
'SourceHandle' : [ 0x8, ['pointer64', ['void']]],
'Object' : [ 0x10, ['pointer64', ['void']]],
'TargetAccess' : [ 0x18, ['unsigned long']],
'ObjectInfo' : [ 0x1c, ['_HANDLE_TABLE_ENTRY_INFO']],
'HandleAttributes' : [ 0x20, ['unsigned long']],
} ],
'_MMPTE_SUBSECTION' : [ 0x8, {
'Valid' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long long')]],
'Unused0' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 5, native_type='unsigned long long')]],
'Protection' : [ 0x0, ['BitField', dict(start_bit = 5, end_bit = 10, native_type='unsigned long long')]],
'Prototype' : [ 0x0, ['BitField', dict(start_bit = 10, end_bit = 11, native_type='unsigned long long')]],
'Unused1' : [ 0x0, ['BitField', dict(start_bit = 11, end_bit = 15, native_type='unsigned long long')]],
'ExecutePrivilege' : [ 0x0, ['BitField', dict(start_bit = 15, end_bit = 16, native_type='unsigned long long')]],
'SubsectionAddress' : [ 0x0, ['BitField', dict(start_bit = 16, end_bit = 64, native_type='long long')]],
} ],
'_POWER_STATE' : [ 0x4, {
'SystemState' : [ 0x0, ['Enumeration', dict(target = 'long', choices = {0: 'PowerSystemUnspecified', 1: 'PowerSystemWorking', 2: 'PowerSystemSleeping1', 3: 'PowerSystemSleeping2', 4: 'PowerSystemSleeping3', 5: 'PowerSystemHibernate', 6: 'PowerSystemShutdown', 7: 'PowerSystemMaximum'})]],
'DeviceState' : [ 0x0, ['Enumeration', dict(target = 'long', choices = {0: 'PowerDeviceUnspecified', 1: 'PowerDeviceD0', 2: 'PowerDeviceD1', 3: 'PowerDeviceD2', 4: 'PowerDeviceD3', 5: 'PowerDeviceMaximum'})]],
} ],
'_MMWORKING_SET_EXPANSION_HEAD' : [ 0x10, {
'ListHead' : [ 0x0, ['_LIST_ENTRY']],
} ],
'_POP_IRP_WORKER_ENTRY' : [ 0x30, {
'Link' : [ 0x0, ['_LIST_ENTRY']],
'Thread' : [ 0x10, ['pointer64', ['_ETHREAD']]],
'Irp' : [ 0x18, ['pointer64', ['_IRP']]],
'Device' : [ 0x20, ['pointer64', ['_DEVICE_OBJECT']]],
'Static' : [ 0x28, ['unsigned char']],
} ],
'_POP_POLICY_DEVICE' : [ 0x40, {
'Link' : [ 0x0, ['_LIST_ENTRY']],
'DeviceType' : [ 0x10, ['Enumeration', dict(target = 'long', choices = {0: 'PolicyDeviceSystemButton', 1: 'PolicyDeviceThermalZone', 2: 'PolicyDeviceBattery', 3: 'PolicyDeviceMemory', 4: 'PolicyInitiatePowerActionAPI', 5: 'PolicySetPowerStateAPI', 6: 'PolicyImmediateDozeS4', 7: 'PolicySystemIdle', 8: 'PolicyDeviceWakeAlarm', 9: 'PolicyDeviceFan', 10: 'PolicyCsBatterySaver', 11: 'PolicyImmediateDozeS4Predicted', 12: 'PolicyImmediateDozeS4PredictedNoWake', 13: 'PolicyDeviceMax'})]],
'Notification' : [ 0x18, ['pointer64', ['void']]],
'Name' : [ 0x20, ['_UNICODE_STRING']],
'Device' : [ 0x30, ['pointer64', ['_DEVICE_OBJECT']]],
'Irp' : [ 0x38, ['pointer64', ['_IRP']]],
} ],
'__unnamed_23f3' : [ 0x8, {
'ControlArea' : [ 0x0, ['pointer64', ['_CONTROL_AREA']]],
'FileObject' : [ 0x0, ['pointer64', ['_FILE_OBJECT']]],
'RemoteImageFileObject' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long long')]],
'RemoteDataFileObject' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long long')]],
} ],
'_SECTION' : [ 0x40, {
'SectionNode' : [ 0x0, ['_RTL_BALANCED_NODE']],
'StartingVpn' : [ 0x18, ['unsigned long long']],
'EndingVpn' : [ 0x20, ['unsigned long long']],
'u1' : [ 0x28, ['__unnamed_23f3']],
'SizeOfSection' : [ 0x30, ['unsigned long long']],
'u' : [ 0x38, ['__unnamed_16ec']],
'InitialPageProtection' : [ 0x3c, ['BitField', dict(start_bit = 0, end_bit = 12, native_type='unsigned long')]],
'SessionId' : [ 0x3c, ['BitField', dict(start_bit = 12, end_bit = 31, native_type='unsigned long')]],
'NoValidationNeeded' : [ 0x3c, ['BitField', dict(start_bit = 31, end_bit = 32, native_type='unsigned long')]],
} ],
'_MI_SECTION_STATE' : [ 0x280, {
'SectionObjectPointersLock' : [ 0x0, ['long']],
'SectionExtendLock' : [ 0x8, ['_EX_PUSH_LOCK']],
'SectionExtendSetLock' : [ 0x10, ['_EX_PUSH_LOCK']],
'SectionBasedRoot' : [ 0x18, ['_RTL_AVL_TREE']],
'SectionBasedLock' : [ 0x20, ['_EX_PUSH_LOCK']],
'UnusedSubsectionPagedPool' : [ 0x28, ['unsigned long long']],
'UnusedSegmentForceFree' : [ 0x30, ['unsigned long']],
'DataSectionProtectionMask' : [ 0x34, ['unsigned long']],
'HighSectionBase' : [ 0x38, ['pointer64', ['void']]],
'PhysicalSubsection' : [ 0x40, ['_MSUBSECTION']],
'PhysicalControlArea' : [ 0xb0, ['_CONTROL_AREA']],
'DanglingExtentsPages' : [ 0x128, ['pointer64', ['_MMPFN']]],
'DanglingExtentsLock' : [ 0x130, ['long']],
'DanglingExtentsWorkItem' : [ 0x138, ['_WORK_QUEUE_ITEM']],
'DanglingExtentsWorkerActive' : [ 0x158, ['unsigned char']],
'PageFileSectionHead' : [ 0x160, ['_RTL_AVL_TREE']],
'PageFileSectionListSpinLock' : [ 0x168, ['long']],
'SharedSegmentCharges' : [ 0x170, ['_MI_CROSS_PARTITION_CHARGES']],
'SharedPageCombineCharges' : [ 0x198, ['_MI_CROSS_PARTITION_CHARGES']],
'ImageBias' : [ 0x1c0, ['unsigned long']],
'RelocateBitmapsLock' : [ 0x1c8, ['_EX_PUSH_LOCK']],
'ImageBitMap' : [ 0x1d0, ['_RTL_BITMAP']],
'ImageBias64Low' : [ 0x1e0, ['unsigned long']],
'ImageBias64High' : [ 0x1e4, ['unsigned long']],
'ImageBitMap64Low' : [ 0x1e8, ['_RTL_BITMAP']],
'ImageBitMap64High' : [ 0x1f8, ['_RTL_BITMAP']],
'ImageBitMapWow64Dll' : [ 0x208, ['_RTL_BITMAP']],
'ApiSetSection' : [ 0x218, ['pointer64', ['void']]],
'ApiSetSchema' : [ 0x220, ['pointer64', ['void']]],
'ApiSetSchemaSize' : [ 0x228, ['unsigned long long']],
'LostDataFiles' : [ 0x230, ['unsigned long']],
'LostDataPages' : [ 0x234, ['unsigned long']],
'ImageFailureReason' : [ 0x238, ['unsigned long']],
'CfgBitMapSection32' : [ 0x240, ['pointer64', ['_SECTION']]],
'CfgBitMapControlArea32' : [ 0x248, ['pointer64', ['_CONTROL_AREA']]],
'CfgBitMapSection64' : [ 0x250, ['pointer64', ['_SECTION']]],
'CfgBitMapControlArea64' : [ 0x258, ['pointer64', ['_CONTROL_AREA']]],
'ImageCfgFailure' : [ 0x260, ['unsigned long']],
'ImageValidationFailed' : [ 0x264, ['long']],
} ],
'_MI_PARTITION_FLAGS' : [ 0x4, {
'BeingDeleted' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'ObjectInitialized' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'PageListsInitialized' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long')]],
'StoreReservedPagesCharged' : [ 0x0, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned long')]],
} ],
'__unnamed_2400' : [ 0xc, {
'Start' : [ 0x0, ['_LARGE_INTEGER']],
'Length' : [ 0x8, ['unsigned long']],
} ],
'__unnamed_2402' : [ 0x10, {
'Level' : [ 0x0, ['unsigned short']],
'Group' : [ 0x2, ['unsigned short']],
'Vector' : [ 0x4, ['unsigned long']],
'Affinity' : [ 0x8, ['unsigned long long']],
} ],
'__unnamed_2404' : [ 0x10, {
'Group' : [ 0x0, ['unsigned short']],
'MessageCount' : [ 0x2, ['unsigned short']],
'Vector' : [ 0x4, ['unsigned long']],
'Affinity' : [ 0x8, ['unsigned long long']],
} ],
'__unnamed_2406' : [ 0x10, {
'Raw' : [ 0x0, ['__unnamed_2404']],
'Translated' : [ 0x0, ['__unnamed_2402']],
} ],
'__unnamed_2408' : [ 0xc, {
'Channel' : [ 0x0, ['unsigned long']],
'Port' : [ 0x4, ['unsigned long']],
'Reserved1' : [ 0x8, ['unsigned long']],
} ],
'__unnamed_240a' : [ 0xc, {
'Channel' : [ 0x0, ['unsigned long']],
'RequestLine' : [ 0x4, ['unsigned long']],
'TransferWidth' : [ 0x8, ['unsigned char']],
'Reserved1' : [ 0x9, ['unsigned char']],
'Reserved2' : [ 0xa, ['unsigned char']],
'Reserved3' : [ 0xb, ['unsigned char']],
} ],
'__unnamed_240c' : [ 0xc, {
'Start' : [ 0x0, ['unsigned long']],
'Length' : [ 0x4, ['unsigned long']],
'Reserved' : [ 0x8, ['unsigned long']],
} ],
'__unnamed_240e' : [ 0xc, {
'DataSize' : [ 0x0, ['unsigned long']],
'Reserved1' : [ 0x4, ['unsigned long']],
'Reserved2' : [ 0x8, ['unsigned long']],
} ],
'__unnamed_2410' : [ 0xc, {
'Start' : [ 0x0, ['_LARGE_INTEGER']],
'Length40' : [ 0x8, ['unsigned long']],
} ],
'__unnamed_2412' : [ 0xc, {
'Start' : [ 0x0, ['_LARGE_INTEGER']],
'Length48' : [ 0x8, ['unsigned long']],
} ],
'__unnamed_2414' : [ 0xc, {
'Start' : [ 0x0, ['_LARGE_INTEGER']],
'Length64' : [ 0x8, ['unsigned long']],
} ],
'__unnamed_2416' : [ 0x10, {
'Generic' : [ 0x0, ['__unnamed_2400']],
'Port' : [ 0x0, ['__unnamed_2400']],
'Interrupt' : [ 0x0, ['__unnamed_2402']],
'MessageInterrupt' : [ 0x0, ['__unnamed_2406']],
'Memory' : [ 0x0, ['__unnamed_2400']],
'Dma' : [ 0x0, ['__unnamed_2408']],
'DmaV3' : [ 0x0, ['__unnamed_240a']],
'DevicePrivate' : [ 0x0, ['__unnamed_2219']],
'BusNumber' : [ 0x0, ['__unnamed_240c']],
'DeviceSpecificData' : [ 0x0, ['__unnamed_240e']],
'Memory40' : [ 0x0, ['__unnamed_2410']],
'Memory48' : [ 0x0, ['__unnamed_2412']],
'Memory64' : [ 0x0, ['__unnamed_2414']],
'Connection' : [ 0x0, ['__unnamed_2225']],
} ],
'_CM_PARTIAL_RESOURCE_DESCRIPTOR' : [ 0x14, {
'Type' : [ 0x0, ['unsigned char']],
'ShareDisposition' : [ 0x1, ['unsigned char']],
'Flags' : [ 0x2, ['unsigned short']],
'u' : [ 0x4, ['__unnamed_2416']],
} ],
'_OBJECT_HEADER_PADDING_INFO' : [ 0x4, {
'PaddingAmount' : [ 0x0, ['unsigned long']],
} ],
'__unnamed_241e' : [ 0x4, {
'PhysicalAddress' : [ 0x0, ['unsigned long']],
'VirtualSize' : [ 0x0, ['unsigned long']],
} ],
'_IMAGE_SECTION_HEADER' : [ 0x28, {
'Name' : [ 0x0, ['array', 8, ['unsigned char']]],
'Misc' : [ 0x8, ['__unnamed_241e']],
'VirtualAddress' : [ 0xc, ['unsigned long']],
'SizeOfRawData' : [ 0x10, ['unsigned long']],
'PointerToRawData' : [ 0x14, ['unsigned long']],
'PointerToRelocations' : [ 0x18, ['unsigned long']],
'PointerToLinenumbers' : [ 0x1c, ['unsigned long']],
'NumberOfRelocations' : [ 0x20, ['unsigned short']],
'NumberOfLinenumbers' : [ 0x22, ['unsigned short']],
'Characteristics' : [ 0x24, ['unsigned long']],
} ],
'_INVERTED_FUNCTION_TABLE_ENTRY' : [ 0x18, {
'FunctionTable' : [ 0x0, ['pointer64', ['_IMAGE_RUNTIME_FUNCTION_ENTRY']]],
'DynamicTable' : [ 0x0, ['pointer64', ['_DYNAMIC_FUNCTION_TABLE']]],
'ImageBase' : [ 0x8, ['pointer64', ['void']]],
'SizeOfImage' : [ 0x10, ['unsigned long']],
'SizeOfTable' : [ 0x14, ['unsigned long']],
} ],
'_ARBITER_ADD_RESERVED_PARAMETERS' : [ 0x8, {
'ReserveDevice' : [ 0x0, ['pointer64', ['_DEVICE_OBJECT']]],
} ],
'_MM_PAGED_POOL_INFO' : [ 0x38, {
'Lock' : [ 0x0, ['_EX_PUSH_LOCK']],
'PagedPoolAllocationMap' : [ 0x8, ['_RTL_BITMAP_EX']],
'FirstPteForPagedPool' : [ 0x18, ['pointer64', ['_MMPTE']]],
'MaximumSize' : [ 0x20, ['unsigned long long']],
'PagedPoolHint' : [ 0x28, ['unsigned long long']],
'AllocatedPagedPool' : [ 0x30, ['unsigned long long']],
} ],
'__unnamed_2430' : [ 0x50, {
'CellData' : [ 0x0, ['_CELL_DATA']],
'List' : [ 0x0, ['array', 1, ['unsigned long long']]],
} ],
'_CM_CACHED_VALUE_INDEX' : [ 0x58, {
'CellIndex' : [ 0x0, ['unsigned long']],
'Data' : [ 0x8, ['__unnamed_2430']],
} ],
'_PPM_COORDINATED_SELECTION' : [ 0x18, {
'MaximumStates' : [ 0x0, ['unsigned long']],
'SelectedStates' : [ 0x4, ['unsigned long']],
'DefaultSelection' : [ 0x8, ['unsigned long']],
'Selection' : [ 0x10, ['pointer64', ['unsigned long']]],
} ],
'_DBGKD_QUERY_SPECIAL_CALLS' : [ 0x4, {
'NumberOfSpecialCalls' : [ 0x0, ['unsigned long']],
} ],
'_MI_PAGE_COMBINING_SUPPORT' : [ 0x188, {
'Partition' : [ 0x0, ['pointer64', ['_MI_PARTITION']]],
'ArbitraryPfnMapList' : [ 0x8, ['_LIST_ENTRY']],
'FreeCombinePoolItem' : [ 0x18, ['_MI_COMBINE_WORKITEM']],
'CombiningThreadCount' : [ 0x40, ['unsigned long']],
'CombinePageFreeList' : [ 0x48, ['_LIST_ENTRY']],
'CombineFreeListLock' : [ 0x58, ['unsigned long long']],
'CombinePageListHeads' : [ 0x60, ['array', 16, ['_MI_COMBINE_PAGE_LISTHEAD']]],
'PageCombineStats' : [ 0x160, ['_MI_PAGE_COMBINE_STATISTICS']],
} ],
'_VF_AVL_TREE_NODE' : [ 0x10, {
'p' : [ 0x0, ['pointer64', ['void']]],
'RangeSize' : [ 0x8, ['unsigned long long']],
} ],
'_POP_FX_DEVICE' : [ 0x278, {
'Link' : [ 0x0, ['_LIST_ENTRY']],
'Irp' : [ 0x10, ['pointer64', ['_IRP']]],
'IrpData' : [ 0x18, ['pointer64', ['_POP_IRP_DATA']]],
'Status' : [ 0x20, ['_POP_FX_DEVICE_STATUS']],
'PowerReqCall' : [ 0x24, ['long']],
'PowerNotReqCall' : [ 0x28, ['long']],
'DevNode' : [ 0x30, ['pointer64', ['_DEVICE_NODE']]],
'DpmContext' : [ 0x38, ['pointer64', ['PEPHANDLE__']]],
'Plugin' : [ 0x40, ['pointer64', ['_POP_FX_PLUGIN']]],
'PluginHandle' : [ 0x48, ['pointer64', ['PEPHANDLE__']]],
'AcpiPlugin' : [ 0x50, ['pointer64', ['_POP_FX_PLUGIN']]],
'AcpiPluginHandle' : [ 0x58, ['pointer64', ['PEPHANDLE__']]],
'DeviceObject' : [ 0x60, ['pointer64', ['_DEVICE_OBJECT']]],
'TargetDevice' : [ 0x68, ['pointer64', ['_DEVICE_OBJECT']]],
'Callbacks' : [ 0x70, ['_POP_FX_DRIVER_CALLBACKS']],
'DriverContext' : [ 0xa8, ['pointer64', ['void']]],
'AcpiLink' : [ 0xb0, ['_LIST_ENTRY']],
'DeviceId' : [ 0xc0, ['_UNICODE_STRING']],
'RemoveLock' : [ 0xd0, ['_IO_REMOVE_LOCK']],
'AcpiRemoveLock' : [ 0xf0, ['_IO_REMOVE_LOCK']],
'WorkOrder' : [ 0x110, ['_POP_FX_WORK_ORDER']],
'IdleLock' : [ 0x148, ['unsigned long long']],
'IdleTimer' : [ 0x150, ['_KTIMER']],
'IdleDpc' : [ 0x190, ['_KDPC']],
'IdleTimeout' : [ 0x1d0, ['unsigned long long']],
'IdleStamp' : [ 0x1d8, ['unsigned long long']],
'NextIrpDeviceObject' : [ 0x1e0, ['array', 2, ['pointer64', ['_DEVICE_OBJECT']]]],
'NextIrpPowerState' : [ 0x1f0, ['array', 2, ['_POWER_STATE']]],
'NextIrpCallerCompletion' : [ 0x1f8, ['array', 2, ['pointer64', ['void']]]],
'NextIrpCallerContext' : [ 0x208, ['array', 2, ['pointer64', ['void']]]],
'IrpCompleteEvent' : [ 0x218, ['_KEVENT']],
'PowerOnDumpDeviceCallback' : [ 0x230, ['pointer64', ['void']]],
'Accounting' : [ 0x238, ['_POP_FX_ACCOUNTING']],
'Flags' : [ 0x268, ['unsigned long']],
'ComponentCount' : [ 0x26c, ['unsigned long']],
'Components' : [ 0x270, ['pointer64', ['pointer64', ['_POP_FX_COMPONENT']]]],
} ],
'_PEP_ACPI_RESOURCE_FLAGS' : [ 0x4, {
'AsULong' : [ 0x0, ['unsigned long']],
'Shared' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'Wake' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'ResourceUsage' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long')]],
'SlaveMode' : [ 0x0, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned long')]],
'AddressingMode' : [ 0x0, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned long')]],
'SharedMode' : [ 0x0, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned long')]],
'Reserved' : [ 0x0, ['BitField', dict(start_bit = 6, end_bit = 32, native_type='unsigned long')]],
} ],
'__unnamed_2458' : [ 0x8, {
'IdleTime' : [ 0x0, ['unsigned long']],
'NonIdleTime' : [ 0x4, ['unsigned long']],
} ],
'__unnamed_245a' : [ 0x8, {
'Disk' : [ 0x0, ['__unnamed_2458']],
} ],
'_DEVICE_OBJECT_POWER_EXTENSION' : [ 0x60, {
'IdleCount' : [ 0x0, ['unsigned long']],
'BusyCount' : [ 0x4, ['unsigned long']],
'BusyReference' : [ 0x8, ['unsigned long']],
'TotalBusyCount' : [ 0xc, ['unsigned long']],
'ConservationIdleTime' : [ 0x10, ['unsigned long']],
'PerformanceIdleTime' : [ 0x14, ['unsigned long']],
'DeviceObject' : [ 0x18, ['pointer64', ['_DEVICE_OBJECT']]],
'IdleList' : [ 0x20, ['_LIST_ENTRY']],
'IdleType' : [ 0x30, ['Enumeration', dict(target = 'long', choices = {0: 'DeviceIdleNormal', 1: 'DeviceIdleDisk'})]],
'IdleState' : [ 0x34, ['Enumeration', dict(target = 'long', choices = {0: 'PowerDeviceUnspecified', 1: 'PowerDeviceD0', 2: 'PowerDeviceD1', 3: 'PowerDeviceD2', 4: 'PowerDeviceD3', 5: 'PowerDeviceMaximum'})]],
'CurrentState' : [ 0x38, ['Enumeration', dict(target = 'long', choices = {0: 'PowerDeviceUnspecified', 1: 'PowerDeviceD0', 2: 'PowerDeviceD1', 3: 'PowerDeviceD2', 4: 'PowerDeviceD3', 5: 'PowerDeviceMaximum'})]],
'CoolingExtension' : [ 0x40, ['pointer64', ['_POP_COOLING_EXTENSION']]],
'Volume' : [ 0x48, ['_LIST_ENTRY']],
'Specific' : [ 0x58, ['__unnamed_245a']],
} ],
'_ARBITER_RETEST_ALLOCATION_PARAMETERS' : [ 0x18, {
'ArbitrationList' : [ 0x0, ['pointer64', ['_LIST_ENTRY']]],
'AllocateFromCount' : [ 0x8, ['unsigned long']],
'AllocateFrom' : [ 0x10, ['pointer64', ['_CM_PARTIAL_RESOURCE_DESCRIPTOR']]],
} ],
'_PROCESSOR_NUMBER' : [ 0x4, {
'Group' : [ 0x0, ['unsigned short']],
'Number' : [ 0x2, ['unsigned char']],
'Reserved' : [ 0x3, ['unsigned char']],
} ],
'_MI_COMBINE_STATE' : [ 0x1a0, {
'ActiveSpinLock' : [ 0x0, ['long']],
'CombiningThreadCount' : [ 0x4, ['unsigned long']],
'ActiveThreadTree' : [ 0x8, ['_RTL_AVL_TREE']],
'ZeroPageHashValue' : [ 0x10, ['unsigned long long']],
'CrossPartition' : [ 0x18, ['_MI_PAGE_COMBINING_SUPPORT']],
} ],
'_MMDEREFERENCE_SEGMENT_HEADER' : [ 0x30, {
'Semaphore' : [ 0x0, ['_KSEMAPHORE']],
'ListHead' : [ 0x20, ['_LIST_ENTRY']],
} ],
'_MI_TRIAGE_DUMP_DATA' : [ 0x38, {
'BadPageCount' : [ 0x0, ['unsigned long long']],
'BadPagesDetected' : [ 0x8, ['long']],
'ZeroedPageSingleBitErrorsDetected' : [ 0xc, ['long']],
'ScrubPasses' : [ 0x10, ['long']],
'ScrubBadPagesFound' : [ 0x14, ['long']],
'PageHashErrors' : [ 0x18, ['unsigned long']],
'FeatureBits' : [ 0x20, ['unsigned long long']],
'TimeZoneId' : [ 0x28, ['unsigned long']],
'Flags' : [ 0x2c, ['_MI_FLAGS']],
'VsmConnection' : [ 0x30, ['pointer64', ['void']]],
} ],
'_WHEA_ERROR_RECORD_SECTION_DESCRIPTOR_VALIDBITS' : [ 0x1, {
'FRUId' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned char')]],
'FRUText' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned char')]],
'Reserved' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 8, native_type='unsigned char')]],
'AsUCHAR' : [ 0x0, ['unsigned char']],
} ],
'_FS_FILTER_CALLBACKS' : [ 0x68, {
'SizeOfFsFilterCallbacks' : [ 0x0, ['unsigned long']],
'Reserved' : [ 0x4, ['unsigned long']],
'PreAcquireForSectionSynchronization' : [ 0x8, ['pointer64', ['void']]],
'PostAcquireForSectionSynchronization' : [ 0x10, ['pointer64', ['void']]],
'PreReleaseForSectionSynchronization' : [ 0x18, ['pointer64', ['void']]],
'PostReleaseForSectionSynchronization' : [ 0x20, ['pointer64', ['void']]],
'PreAcquireForCcFlush' : [ 0x28, ['pointer64', ['void']]],
'PostAcquireForCcFlush' : [ 0x30, ['pointer64', ['void']]],
'PreReleaseForCcFlush' : [ 0x38, ['pointer64', ['void']]],
'PostReleaseForCcFlush' : [ 0x40, ['pointer64', ['void']]],
'PreAcquireForModifiedPageWriter' : [ 0x48, ['pointer64', ['void']]],
'PostAcquireForModifiedPageWriter' : [ 0x50, ['pointer64', ['void']]],
'PreReleaseForModifiedPageWriter' : [ 0x58, ['pointer64', ['void']]],
'PostReleaseForModifiedPageWriter' : [ 0x60, ['pointer64', ['void']]],
} ],
'_KENLISTMENT' : [ 0x1e0, {
'cookie' : [ 0x0, ['unsigned long']],
'NamespaceLink' : [ 0x8, ['_KTMOBJECT_NAMESPACE_LINK']],
'EnlistmentId' : [ 0x30, ['_GUID']],
'Mutex' : [ 0x40, ['_KMUTANT']],
'NextSameTx' : [ 0x78, ['_LIST_ENTRY']],
'NextSameRm' : [ 0x88, ['_LIST_ENTRY']],
'ResourceManager' : [ 0x98, ['pointer64', ['_KRESOURCEMANAGER']]],
'Transaction' : [ 0xa0, ['pointer64', ['_KTRANSACTION']]],
'State' : [ 0xa8, ['Enumeration', dict(target = 'long', choices = {0: 'KEnlistmentUninitialized', 256: 'KEnlistmentActive', 258: 'KEnlistmentPrepared', 259: 'KEnlistmentInDoubt', 260: 'KEnlistmentCommitted', 261: 'KEnlistmentCommittedNotify', 262: 'KEnlistmentCommitRequested', 257: 'KEnlistmentPreparing', 264: 'KEnlistmentDelegated', 265: 'KEnlistmentDelegatedDisconnected', 266: 'KEnlistmentPrePreparing', 263: 'KEnlistmentAborted', 268: 'KEnlistmentRecovering', 269: 'KEnlistmentAborting', 270: 'KEnlistmentReadOnly', 271: 'KEnlistmentOutcomeUnavailable', 272: 'KEnlistmentOffline', 273: 'KEnlistmentPrePrepared', 274: 'KEnlistmentInitialized', 267: 'KEnlistmentForgotten'})]],
'Flags' : [ 0xac, ['unsigned long']],
'NotificationMask' : [ 0xb0, ['unsigned long']],
'Key' : [ 0xb8, ['pointer64', ['void']]],
'KeyRefCount' : [ 0xc0, ['unsigned long']],
'RecoveryInformation' : [ 0xc8, ['pointer64', ['void']]],
'RecoveryInformationLength' : [ 0xd0, ['unsigned long']],
'DynamicNameInformation' : [ 0xd8, ['pointer64', ['void']]],
'DynamicNameInformationLength' : [ 0xe0, ['unsigned long']],
'FinalNotification' : [ 0xe8, ['pointer64', ['_KTMNOTIFICATION_PACKET']]],
'SupSubEnlistment' : [ 0xf0, ['pointer64', ['_KENLISTMENT']]],
'SupSubEnlHandle' : [ 0xf8, ['pointer64', ['void']]],
'SubordinateTxHandle' : [ 0x100, ['pointer64', ['void']]],
'CrmEnlistmentEnId' : [ 0x108, ['_GUID']],
'CrmEnlistmentTmId' : [ 0x118, ['_GUID']],
'CrmEnlistmentRmId' : [ 0x128, ['_GUID']],
'NextHistory' : [ 0x138, ['unsigned long']],
'History' : [ 0x13c, ['array', 20, ['_KENLISTMENT_HISTORY']]],
} ],
'_ARBITER_INTERFACE' : [ 0x30, {
'Size' : [ 0x0, ['unsigned short']],
'Version' : [ 0x2, ['unsigned short']],
'Context' : [ 0x8, ['pointer64', ['void']]],
'InterfaceReference' : [ 0x10, ['pointer64', ['void']]],
'InterfaceDereference' : [ 0x18, ['pointer64', ['void']]],
'ArbiterHandler' : [ 0x20, ['pointer64', ['void']]],
'Flags' : [ 0x28, ['unsigned long']],
} ],
'_DELAY_ACK_FO' : [ 0x18, {
'Links' : [ 0x0, ['_LIST_ENTRY']],
'OriginalFileObject' : [ 0x10, ['pointer64', ['_FILE_OBJECT']]],
} ],
'_IA64_DBGKD_CONTROL_SET' : [ 0x14, {
'Continue' : [ 0x0, ['unsigned long']],
'CurrentSymbolStart' : [ 0x4, ['unsigned long long']],
'CurrentSymbolEnd' : [ 0xc, ['unsigned long long']],
} ],
'_DEVICE_RELATIONS' : [ 0x10, {
'Count' : [ 0x0, ['unsigned long']],
'Objects' : [ 0x8, ['array', 1, ['pointer64', ['_DEVICE_OBJECT']]]],
} ],
'_IMAGE_ROM_OPTIONAL_HEADER' : [ 0x38, {
'Magic' : [ 0x0, ['unsigned short']],
'MajorLinkerVersion' : [ 0x2, ['unsigned char']],
'MinorLinkerVersion' : [ 0x3, ['unsigned char']],
'SizeOfCode' : [ 0x4, ['unsigned long']],
'SizeOfInitializedData' : [ 0x8, ['unsigned long']],
'SizeOfUninitializedData' : [ 0xc, ['unsigned long']],
'AddressOfEntryPoint' : [ 0x10, ['unsigned long']],
'BaseOfCode' : [ 0x14, ['unsigned long']],
'BaseOfData' : [ 0x18, ['unsigned long']],
'BaseOfBss' : [ 0x1c, ['unsigned long']],
'GprMask' : [ 0x20, ['unsigned long']],
'CprMask' : [ 0x24, ['array', 4, ['unsigned long']]],
'GpValue' : [ 0x34, ['unsigned long']],
} ],
'_MI_CROSS_PARTITION_CHARGES' : [ 0x28, {
'CurrentCharges' : [ 0x0, ['unsigned long long']],
'ChargeFailures' : [ 0x8, ['unsigned long long']],
'ChargePeak' : [ 0x10, ['unsigned long long']],
'ChargeMinimum' : [ 0x18, ['unsigned long long']],
'ChargeMaximum' : [ 0x20, ['unsigned long long']],
} ],
'_MI_BAD_MEMORY_EVENT_ENTRY' : [ 0x38, {
'BugCheckCode' : [ 0x0, ['unsigned long']],
'Active' : [ 0x4, ['long']],
'Data' : [ 0x8, ['unsigned long']],
'PhysicalAddress' : [ 0x10, ['_LARGE_INTEGER']],
'WorkItem' : [ 0x18, ['_WORK_QUEUE_ITEM']],
} ],
'_ALPC_COMPLETION_LIST_HEADER' : [ 0x180, {
'StartMagic' : [ 0x0, ['unsigned long long']],
'TotalSize' : [ 0x8, ['unsigned long']],
'ListOffset' : [ 0xc, ['unsigned long']],
'ListSize' : [ 0x10, ['unsigned long']],
'BitmapOffset' : [ 0x14, ['unsigned long']],
'BitmapSize' : [ 0x18, ['unsigned long']],
'DataOffset' : [ 0x1c, ['unsigned long']],
'DataSize' : [ 0x20, ['unsigned long']],
'AttributeFlags' : [ 0x24, ['unsigned long']],
'AttributeSize' : [ 0x28, ['unsigned long']],
'State' : [ 0x40, ['_ALPC_COMPLETION_LIST_STATE']],
'LastMessageId' : [ 0x48, ['unsigned long']],
'LastCallbackId' : [ 0x4c, ['unsigned long']],
'PostCount' : [ 0x80, ['unsigned long']],
'ReturnCount' : [ 0xc0, ['unsigned long']],
'LogSequenceNumber' : [ 0x100, ['unsigned long']],
'UserLock' : [ 0x140, ['_RTL_SRWLOCK']],
'EndMagic' : [ 0x148, ['unsigned long long']],
} ],
'_IMAGE_DEBUG_DIRECTORY' : [ 0x1c, {
'Characteristics' : [ 0x0, ['unsigned long']],
'TimeDateStamp' : [ 0x4, ['unsigned long']],
'MajorVersion' : [ 0x8, ['unsigned short']],
'MinorVersion' : [ 0xa, ['unsigned short']],
'Type' : [ 0xc, ['unsigned long']],
'SizeOfData' : [ 0x10, ['unsigned long']],
'AddressOfRawData' : [ 0x14, ['unsigned long']],
'PointerToRawData' : [ 0x18, ['unsigned long']],
} ],
'_WHEA_AER_ENDPOINT_DESCRIPTOR' : [ 0x20, {
'Type' : [ 0x0, ['unsigned short']],
'Enabled' : [ 0x2, ['unsigned char']],
'Reserved' : [ 0x3, ['unsigned char']],
'BusNumber' : [ 0x4, ['unsigned long']],
'Slot' : [ 0x8, ['_WHEA_PCI_SLOT_NUMBER']],
'DeviceControl' : [ 0xc, ['unsigned short']],
'Flags' : [ 0xe, ['_AER_ENDPOINT_DESCRIPTOR_FLAGS']],
'UncorrectableErrorMask' : [ 0x10, ['unsigned long']],
'UncorrectableErrorSeverity' : [ 0x14, ['unsigned long']],
'CorrectableErrorMask' : [ 0x18, ['unsigned long']],
'AdvancedCapsAndControl' : [ 0x1c, ['unsigned long']],
} ],
'_MI_SYSTEM_TRIM_STATE' : [ 0x40, {
'ExpansionLock' : [ 0x0, ['unsigned long long']],
'TrimInProgressCount' : [ 0x8, ['long']],
'PeriodicWorkingSetEvent' : [ 0x10, ['_KEVENT']],
'TrimAllPageFaultCount' : [ 0x28, ['array', 3, ['unsigned long']]],
} ],
'_ETW_WMITRACE_WORK' : [ 0xf0, {
'LoggerId' : [ 0x0, ['unsigned long']],
'SpareUlong' : [ 0x4, ['unsigned long']],
'LoggerName' : [ 0x8, ['array', 65, ['unsigned char']]],
'FileName' : [ 0x49, ['array', 129, ['unsigned char']]],
'MaximumFileSize' : [ 0xcc, ['unsigned long']],
'MinBuffers' : [ 0xd0, ['unsigned long']],
'MaxBuffers' : [ 0xd4, ['unsigned long']],
'BufferSize' : [ 0xd8, ['unsigned long']],
'Mode' : [ 0xdc, ['unsigned long']],
'FlushTimer' : [ 0xe0, ['unsigned long']],
'MatchAny' : [ 0x8, ['unsigned long long']],
'MatchAll' : [ 0x10, ['unsigned long long']],
'EnableProperty' : [ 0x18, ['unsigned long']],
'Guid' : [ 0x1c, ['_GUID']],
'Level' : [ 0x2c, ['unsigned char']],
'Status' : [ 0xe8, ['long']],
} ],
'_MI_ZERO_COST_COUNTS' : [ 0x10, {
'NativeSum' : [ 0x0, ['unsigned long long']],
'CachedSum' : [ 0x8, ['unsigned long long']],
} ],
'_CHILD_LIST' : [ 0x8, {
'Count' : [ 0x0, ['unsigned long']],
'List' : [ 0x4, ['unsigned long']],
} ],
'_IO_RESOURCE_LIST' : [ 0x28, {
'Version' : [ 0x0, ['unsigned short']],
'Revision' : [ 0x2, ['unsigned short']],
'Count' : [ 0x4, ['unsigned long']],
'Descriptors' : [ 0x8, ['array', 1, ['_IO_RESOURCE_DESCRIPTOR']]],
} ],
'_ARMCE_DBGKD_CONTROL_SET' : [ 0xc, {
'Continue' : [ 0x0, ['unsigned long']],
'CurrentSymbolStart' : [ 0x4, ['unsigned long']],
'CurrentSymbolEnd' : [ 0x8, ['unsigned long']],
} ],
'_MI_RESAVAIL_TRACKER' : [ 0x380, {
'AllocateKernelStack' : [ 0x0, ['unsigned long long']],
'AllocateGrowKernelStack' : [ 0x8, ['unsigned long long']],
'FreeKernelStack' : [ 0x10, ['unsigned long long']],
'FreeKernelStackError' : [ 0x18, ['unsigned long long']],
'FreeGrowKernelStackError' : [ 0x20, ['unsigned long long']],
'AllocateCreateProcess' : [ 0x28, ['unsigned long long']],
'FreeCreateProcessError' : [ 0x30, ['unsigned long long']],
'FreeDeleteProcess' : [ 0x38, ['unsigned long long']],
'FreeCleanProcess' : [ 0x40, ['unsigned long long']],
'FreeCleanProcessError' : [ 0x48, ['unsigned long long']],
'AllocateAddProcessWsMetaPage' : [ 0x50, ['unsigned long long']],
'AllocateWsIncrease' : [ 0x58, ['unsigned long long']],
'FreeWsIncreaseError' : [ 0x60, ['unsigned long long']],
'FreeWsIncreaseErrorMax' : [ 0x68, ['unsigned long long']],
'FreeWsDecrease' : [ 0x70, ['unsigned long long']],
'AllocateWorkingSetPage' : [ 0x78, ['unsigned long long']],
'FreeWorkingSetPageError' : [ 0x80, ['unsigned long long']],
'FreeDeletePteRange' : [ 0x88, ['unsigned long long']],
'AllocatePageTablesForProcessMetadata' : [ 0x90, ['unsigned long long']],
'FreePageTablesForProcessMetadataError2' : [ 0x98, ['unsigned long long']],
'AllocatePageTablesForSystem' : [ 0xa0, ['unsigned long long']],
'FreePageTablesExcess' : [ 0xa8, ['unsigned long long']],
'FreeSystemVaPageTables' : [ 0xb0, ['unsigned long long']],
'FreeSessionVaPageTables' : [ 0xb8, ['unsigned long long']],
'AllocateCreateSession' : [ 0xc0, ['unsigned long long']],
'FreeSessionWsDereference' : [ 0xc8, ['unsigned long long']],
'FreeSessionDereference' : [ 0xd0, ['unsigned long long']],
'AllocateLockedSessionImage' : [ 0xd8, ['unsigned long long']],
'FreeLockedSessionImage' : [ 0xe0, ['unsigned long long']],
'FreeSessionImageConversion' : [ 0xe8, ['unsigned long long']],
'AllocateWsAdjustPageTable' : [ 0xf0, ['unsigned long long']],
'FreeWsAdjustPageTable' : [ 0xf8, ['unsigned long long']],
'FreeWsAdjustPageTableError' : [ 0x100, ['unsigned long long']],
'AllocateNoLowMemory' : [ 0x108, ['unsigned long long']],
'AllocatePagedPoolLockedDown' : [ 0x110, ['unsigned long long']],
'FreePagedPoolLockedDown' : [ 0x118, ['unsigned long long']],
'AllocateSystemBitmaps' : [ 0x120, ['unsigned long long']],
'FreeSystemBitmapsError' : [ 0x128, ['unsigned long long']],
'AllocateForMdl' : [ 0x130, ['unsigned long long']],
'FreeFromMdl' : [ 0x138, ['unsigned long long']],
'AllocateForMdlPartition' : [ 0x140, ['unsigned long long']],
'FreeFromMdlPartition' : [ 0x148, ['unsigned long long']],
'FreeMdlExcess' : [ 0x150, ['unsigned long long']],
'AllocateExpansionNonPagedPool' : [ 0x158, ['unsigned long long']],
'FreeExpansionNonPagedPool' : [ 0x160, ['unsigned long long']],
'AllocateVad' : [ 0x168, ['unsigned long long']],
'RemoveVad' : [ 0x170, ['unsigned long long']],
'FreeVad' : [ 0x178, ['unsigned long long']],
'AllocateContiguous' : [ 0x180, ['unsigned long long']],
'FreeContiguousPages' : [ 0x188, ['unsigned long long']],
'FreeContiguousError' : [ 0x190, ['unsigned long long']],
'FreeLargePageMemory' : [ 0x198, ['unsigned long long']],
'AllocateSystemWsles' : [ 0x1a0, ['unsigned long long']],
'FreeSystemWsles' : [ 0x1a8, ['unsigned long long']],
'AllocateSystemInitWs' : [ 0x1b0, ['unsigned long long']],
'AllocateSessionInitWs' : [ 0x1b8, ['unsigned long long']],
'FreeSessionInitWsError' : [ 0x1c0, ['unsigned long long']],
'AllocateSystemImage' : [ 0x1c8, ['unsigned long long']],
'AllocateSystemImageLoad' : [ 0x1d0, ['unsigned long long']],
'AllocateSessionSharedImage' : [ 0x1d8, ['unsigned long long']],
'FreeSystemImageInitCode' : [ 0x1e0, ['unsigned long long']],
'FreeSystemImageLargePageConversion' : [ 0x1e8, ['unsigned long long']],
'FreeSystemImageError' : [ 0x1f0, ['unsigned long long']],
'FreeSystemImageLoadExcess' : [ 0x1f8, ['unsigned long long']],
'FreeUnloadSystemImage' : [ 0x200, ['unsigned long long']],
'FreeReloadBootImageLarge' : [ 0x208, ['unsigned long long']],
'FreeIndependent' : [ 0x210, ['unsigned long long']],
'AllocateHotRemove' : [ 0x218, ['unsigned long long']],
'FreeHotAdd' : [ 0x220, ['unsigned long long']],
'AllocateBoot' : [ 0x228, ['unsigned long long']],
'FreeLoaderBlock' : [ 0x230, ['unsigned long long']],
'AllocateNonPagedSpecialPool' : [ 0x238, ['unsigned long long']],
'FreeNonPagedSpecialPoolError' : [ 0x240, ['unsigned long long']],
'FreeNonPagedSpecialPool' : [ 0x248, ['unsigned long long']],
'AllocateSharedSegmentPage' : [ 0x250, ['unsigned long long']],
'FreeSharedSegmentPage' : [ 0x258, ['unsigned long long']],
'AllocateZeroPage' : [ 0x260, ['unsigned long long']],
'FreeZeroPage' : [ 0x268, ['unsigned long long']],
'AllocateForPo' : [ 0x270, ['unsigned long long']],
'AllocateForPoForce' : [ 0x278, ['unsigned long long']],
'FreeForPo' : [ 0x280, ['unsigned long long']],
'AllocateThreadHardFaultBehavior' : [ 0x288, ['unsigned long long']],
'FreeThreadHardFaultBehavior' : [ 0x290, ['unsigned long long']],
'ObtainFaultCharges' : [ 0x298, ['unsigned long long']],
'FreeFaultCharges' : [ 0x2a0, ['unsigned long long']],
'AllocateStoreCharges' : [ 0x2a8, ['unsigned long long']],
'FreeStoreCharges' : [ 0x2b0, ['unsigned long long']],
'ObtainLockedPageCharge' : [ 0x2c0, ['unsigned long long']],
'FreeLockedPageCharge' : [ 0x300, ['unsigned long long']],
'AllocateStore' : [ 0x308, ['unsigned long long']],
'FreeStore' : [ 0x310, ['unsigned long long']],
'AllocateSystemImageProtos' : [ 0x318, ['unsigned long long']],
'FreeSystemImageProtos' : [ 0x320, ['unsigned long long']],
'AllocateModWriterCharge' : [ 0x328, ['unsigned long long']],
'FreeModWriterCharge' : [ 0x330, ['unsigned long long']],
'AllocateMappedWriterCharge' : [ 0x338, ['unsigned long long']],
'FreeMappedWriterCharge' : [ 0x340, ['unsigned long long']],
'AllocateRegistryCharges' : [ 0x348, ['unsigned long long']],
'FreeRegistryCharges' : [ 0x350, ['unsigned long long']],
} ],
'_WHEA_ERROR_RECORD_HEADER_FLAGS' : [ 0x4, {
'Recovered' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'PreviousError' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'Simulated' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long')]],
'Reserved' : [ 0x0, ['BitField', dict(start_bit = 3, end_bit = 32, native_type='unsigned long')]],
'AsULONG' : [ 0x0, ['unsigned long']],
} ],
'_XSAVE_AREA_HEADER' : [ 0x40, {
'Mask' : [ 0x0, ['unsigned long long']],
'CompactionMask' : [ 0x8, ['unsigned long long']],
'Reserved2' : [ 0x10, ['array', 6, ['unsigned long long']]],
} ],
'_RTL_CRITICAL_SECTION' : [ 0x28, {
'DebugInfo' : [ 0x0, ['pointer64', ['_RTL_CRITICAL_SECTION_DEBUG']]],
'LockCount' : [ 0x8, ['long']],
'RecursionCount' : [ 0xc, ['long']],
'OwningThread' : [ 0x10, ['pointer64', ['void']]],
'LockSemaphore' : [ 0x18, ['pointer64', ['void']]],
'SpinCount' : [ 0x20, ['unsigned long long']],
} ],
'_PNP_DEVICE_COMPLETION_REQUEST' : [ 0x40, {
'ListEntry' : [ 0x0, ['_LIST_ENTRY']],
'DeviceNode' : [ 0x10, ['pointer64', ['_DEVICE_NODE']]],
'Context' : [ 0x18, ['pointer64', ['void']]],
'CompletionState' : [ 0x20, ['Enumeration', dict(target = 'long', choices = {768: 'DeviceNodeUnspecified', 769: 'DeviceNodeUninitialized', 770: 'DeviceNodeInitialized', 771: 'DeviceNodeDriversAdded', 772: 'DeviceNodeResourcesAssigned', 773: 'DeviceNodeStartPending', 774: 'DeviceNodeStartCompletion', 775: 'DeviceNodeStartPostWork', 776: 'DeviceNodeStarted', 777: 'DeviceNodeQueryStopped', 778: 'DeviceNodeStopped', 779: 'DeviceNodeRestartCompletion', 780: 'DeviceNodeEnumeratePending', 781: 'DeviceNodeEnumerateCompletion', 782: 'DeviceNodeAwaitingQueuedDeletion', 783: 'DeviceNodeAwaitingQueuedRemoval', 784: 'DeviceNodeQueryRemoved', 785: 'DeviceNodeRemovePendingCloses', 786: 'DeviceNodeRemoved', 787: 'DeviceNodeDeletePendingCloses', 788: 'DeviceNodeDeleted', 789: 'MaxDeviceNodeState'})]],
'IrpPended' : [ 0x24, ['unsigned long']],
'Status' : [ 0x28, ['long']],
'Information' : [ 0x30, ['pointer64', ['void']]],
'ReferenceCount' : [ 0x38, ['long']],
} ],
'_MI_COMBINE_PAGE_LISTHEAD' : [ 0x10, {
'Table' : [ 0x0, ['_RTL_AVL_TREE']],
'Lock' : [ 0x8, ['long']],
} ],
'_PHYSICAL_MEMORY_DESCRIPTOR' : [ 0x20, {
'NumberOfRuns' : [ 0x0, ['unsigned long']],
'NumberOfPages' : [ 0x8, ['unsigned long long']],
'Run' : [ 0x10, ['array', 1, ['_PHYSICAL_MEMORY_RUN']]],
} ],
'_KTSS64' : [ 0x68, {
'Reserved0' : [ 0x0, ['unsigned long']],
'Rsp0' : [ 0x4, ['unsigned long long']],
'Rsp1' : [ 0xc, ['unsigned long long']],
'Rsp2' : [ 0x14, ['unsigned long long']],
'Ist' : [ 0x1c, ['array', 8, ['unsigned long long']]],
'Reserved1' : [ 0x5c, ['unsigned long long']],
'Reserved2' : [ 0x64, ['unsigned short']],
'IoMapBase' : [ 0x66, ['unsigned short']],
} ],
'__unnamed_24ce' : [ 0x8, {
'Start' : [ 0x0, ['unsigned long']],
'Length' : [ 0x4, ['unsigned long']],
} ],
'__unnamed_24d0' : [ 0x8, {
'RangeCount' : [ 0x0, ['unsigned long']],
'SetBitCount' : [ 0x4, ['unsigned long']],
} ],
'__unnamed_24d2' : [ 0x8, {
'Context1' : [ 0x0, ['unsigned long']],
'Context2' : [ 0x4, ['unsigned long']],
} ],
'__unnamed_24d4' : [ 0x8, {
'DirtyVectorModifiedContext' : [ 0x0, ['__unnamed_24ce']],
'DirtyDataCaptureContext' : [ 0x0, ['__unnamed_24d0']],
'Raw' : [ 0x0, ['__unnamed_24d2']],
} ],
'_CM_DIRTY_VECTOR_LOG_ENTRY' : [ 0x48, {
'Thread' : [ 0x0, ['pointer64', ['_ETHREAD']]],
'Operation' : [ 0x8, ['Enumeration', dict(target = 'long', choices = {0: 'DirtyVectorModified', 1: 'DirtyDataCaptureStart', 2: 'DirtyDataCaptureEnd'})]],
'Data' : [ 0xc, ['__unnamed_24d4']],
'Stack' : [ 0x18, ['array', 6, ['pointer64', ['void']]]],
} ],
'_MI_SYSTEM_NODE_INFORMATION' : [ 0x1a0, {
'PagedPoolSListHead' : [ 0x0, ['_SLIST_HEADER']],
'NonPagedPoolSListHead' : [ 0x10, ['array', 3, ['_SLIST_HEADER']]],
'NonPagedPoolSListHeadNx' : [ 0x40, ['array', 3, ['_SLIST_HEADER']]],
'CachedKernelStacks' : [ 0x70, ['array', 2, ['_CACHED_KSTACK_LIST']]],
'NonPagedBitMapMaximum' : [ 0xb0, ['unsigned long long']],
'DynamicBitMapNonPagedPool' : [ 0xb8, ['_MI_DYNAMIC_BITMAP']],
'NonPagedPoolLowestPage' : [ 0x108, ['unsigned long long']],
'NonPagedPoolHighestPage' : [ 0x110, ['unsigned long long']],
'AllocatedNonPagedPool' : [ 0x118, ['unsigned long long']],
'PartialLargePoolRegions' : [ 0x120, ['unsigned long long']],
'PagesInPartialLargePoolRegions' : [ 0x128, ['unsigned long long']],
'CachedNonPagedPoolCount' : [ 0x130, ['unsigned long long']],
'NonPagedPoolSpinLock' : [ 0x138, ['unsigned long long']],
'CachedNonPagedPool' : [ 0x140, ['pointer64', ['_MMPFN']]],
'NonPagedPoolFirstVa' : [ 0x148, ['pointer64', ['void']]],
'NonPagedPoolLastVa' : [ 0x150, ['pointer64', ['void']]],
'NonPagedBitMap' : [ 0x158, ['array', 3, ['_RTL_BITMAP_EX']]],
'NonPagedHint' : [ 0x188, ['array', 2, ['unsigned long long']]],
} ],
'_KLOCK_ENTRY_LOCK_STATE' : [ 0x10, {
'CrossThreadReleasable' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long long')]],
'Busy' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long long')]],
'Reserved' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 63, native_type='unsigned long long')]],
'InTree' : [ 0x0, ['BitField', dict(start_bit = 63, end_bit = 64, native_type='unsigned long long')]],
'LockState' : [ 0x0, ['pointer64', ['void']]],
'SessionState' : [ 0x8, ['pointer64', ['void']]],
'SessionId' : [ 0x8, ['unsigned long']],
'SessionPad' : [ 0xc, ['unsigned long']],
} ],
'__unnamed_24e4' : [ 0x4, {
'FlushCompleting' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='long')]],
'FlushInProgress' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 32, native_type='long')]],
'Long' : [ 0x0, ['long']],
} ],
'_MI_PARTITION_STORES' : [ 0x80, {
'WriteAllStoreHintedPages' : [ 0x0, ['__unnamed_24e4']],
'VirtualPageFileNumber' : [ 0x4, ['unsigned long']],
'Registered' : [ 0x8, ['unsigned long']],
'ReadClusterSizeMax' : [ 0xc, ['unsigned long']],
'EvictFlushRequestCount' : [ 0x10, ['unsigned long']],
'ModifiedWriteDisableCount' : [ 0x14, ['unsigned long']],
'WriteIssueFailures' : [ 0x18, ['unsigned long']],
'EvictionThread' : [ 0x20, ['pointer64', ['_ETHREAD']]],
'EvictEvent' : [ 0x28, ['_KEVENT']],
'EvictFlushCompleteEvent' : [ 0x40, ['_KEVENT']],
'WriteSupportSListHead' : [ 0x60, ['_SLIST_HEADER']],
'EvictFlushLock' : [ 0x70, ['long']],
'ModifiedWriteFailedBitmap' : [ 0x78, ['pointer64', ['_RTL_BITMAP']]],
} ],
'_EVENT_FILTER_HEADER' : [ 0x18, {
'Id' : [ 0x0, ['unsigned short']],
'Version' : [ 0x2, ['unsigned char']],
'Reserved' : [ 0x3, ['array', 5, ['unsigned char']]],
'InstanceId' : [ 0x8, ['unsigned long long']],
'Size' : [ 0x10, ['unsigned long']],
'NextOffset' : [ 0x14, ['unsigned long']],
} ],
'_WAIT_CONTEXT_BLOCK' : [ 0x48, {
'WaitQueueEntry' : [ 0x0, ['_KDEVICE_QUEUE_ENTRY']],
'DmaWaitEntry' : [ 0x0, ['_LIST_ENTRY']],
'NumberOfChannels' : [ 0x10, ['unsigned long']],
'SyncCallback' : [ 0x14, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'DmaContext' : [ 0x14, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'Reserved' : [ 0x14, ['BitField', dict(start_bit = 2, end_bit = 32, native_type='unsigned long')]],
'DeviceRoutine' : [ 0x18, ['pointer64', ['void']]],
'DeviceContext' : [ 0x20, ['pointer64', ['void']]],
'NumberOfMapRegisters' : [ 0x28, ['unsigned long']],
'DeviceObject' : [ 0x30, ['pointer64', ['void']]],
'CurrentIrp' : [ 0x38, ['pointer64', ['void']]],
'BufferChainingDpc' : [ 0x40, ['pointer64', ['_KDPC']]],
} ],
'_CM_NAME_CONTROL_BLOCK' : [ 0x20, {
'Compressed' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'RefCount' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 32, native_type='unsigned long')]],
'NameHash' : [ 0x8, ['_CM_NAME_HASH']],
'ConvKey' : [ 0x8, ['unsigned long']],
'NextHash' : [ 0x10, ['pointer64', ['_CM_KEY_HASH']]],
'NameLength' : [ 0x18, ['unsigned short']],
'Name' : [ 0x1a, ['array', 1, ['wchar']]],
} ],
'_u' : [ 0x50, {
'KeyNode' : [ 0x0, ['_CM_KEY_NODE']],
'KeyValue' : [ 0x0, ['_CM_KEY_VALUE']],
'KeySecurity' : [ 0x0, ['_CM_KEY_SECURITY']],
'KeyIndex' : [ 0x0, ['_CM_KEY_INDEX']],
'ValueData' : [ 0x0, ['_CM_BIG_DATA']],
'KeyList' : [ 0x0, ['array', 1, ['unsigned long']]],
'KeyString' : [ 0x0, ['array', 1, ['wchar']]],
} ],
'_HBASE_BLOCK' : [ 0x1000, {
'Signature' : [ 0x0, ['unsigned long']],
'Sequence1' : [ 0x4, ['unsigned long']],
'Sequence2' : [ 0x8, ['unsigned long']],
'TimeStamp' : [ 0xc, ['_LARGE_INTEGER']],
'Major' : [ 0x14, ['unsigned long']],
'Minor' : [ 0x18, ['unsigned long']],
'Type' : [ 0x1c, ['unsigned long']],
'Format' : [ 0x20, ['unsigned long']],
'RootCell' : [ 0x24, ['unsigned long']],
'Length' : [ 0x28, ['unsigned long']],
'Cluster' : [ 0x2c, ['unsigned long']],
'FileName' : [ 0x30, ['array', 64, ['unsigned char']]],
'RmId' : [ 0x70, ['_GUID']],
'LogId' : [ 0x80, ['_GUID']],
'Flags' : [ 0x90, ['unsigned long']],
'TmId' : [ 0x94, ['_GUID']],
'GuidSignature' : [ 0xa4, ['unsigned long']],
'LastReorganizeTime' : [ 0xa8, ['unsigned long long']],
'Reserved1' : [ 0xb0, ['array', 83, ['unsigned long']]],
'CheckSum' : [ 0x1fc, ['unsigned long']],
'Reserved2' : [ 0x200, ['array', 882, ['unsigned long']]],
'ThawTmId' : [ 0xfc8, ['_GUID']],
'ThawRmId' : [ 0xfd8, ['_GUID']],
'ThawLogId' : [ 0xfe8, ['_GUID']],
'BootType' : [ 0xff8, ['unsigned long']],
'BootRecover' : [ 0xffc, ['unsigned long']],
} ],
'_GENERAL_LOOKASIDE_POOL' : [ 0x60, {
'ListHead' : [ 0x0, ['_SLIST_HEADER']],
'SingleListHead' : [ 0x0, ['_SINGLE_LIST_ENTRY']],
'Depth' : [ 0x10, ['unsigned short']],
'MaximumDepth' : [ 0x12, ['unsigned short']],
'TotalAllocates' : [ 0x14, ['unsigned long']],
'AllocateMisses' : [ 0x18, ['unsigned long']],
'AllocateHits' : [ 0x18, ['unsigned long']],
'TotalFrees' : [ 0x1c, ['unsigned long']],
'FreeMisses' : [ 0x20, ['unsigned long']],
'FreeHits' : [ 0x20, ['unsigned long']],
'Type' : [ 0x24, ['Enumeration', dict(target = 'long', choices = {0: 'NonPagedPoolBase', 1: 'PagedPool', 2: 'NonPagedPoolBaseMustSucceed', 3: 'DontUseThisType', 4: 'NonPagedPoolBaseCacheAligned', 5: 'PagedPoolCacheAligned', 6: 'NonPagedPoolBaseCacheAlignedMustS', 7: 'MaxPoolType', 34: 'NonPagedPoolMustSucceedSession', 516: 'NonPagedPoolNxCacheAligned', 35: 'DontUseThisTypeSession', 32: 'NonPagedPoolSession', 512: 'NonPagedPoolNx', 544: 'NonPagedPoolSessionNx', 36: 'NonPagedPoolCacheAlignedSession', 33: 'PagedPoolSession', 38: 'NonPagedPoolCacheAlignedMustSSession', 37: 'PagedPoolCacheAlignedSession'})]],
'Tag' : [ 0x28, ['unsigned long']],
'Size' : [ 0x2c, ['unsigned long']],
'AllocateEx' : [ 0x30, ['pointer64', ['void']]],
'Allocate' : [ 0x30, ['pointer64', ['void']]],
'FreeEx' : [ 0x38, ['pointer64', ['void']]],
'Free' : [ 0x38, ['pointer64', ['void']]],
'ListEntry' : [ 0x40, ['_LIST_ENTRY']],
'LastTotalAllocates' : [ 0x50, ['unsigned long']],
'LastAllocateMisses' : [ 0x54, ['unsigned long']],
'LastAllocateHits' : [ 0x54, ['unsigned long']],
'Future' : [ 0x58, ['array', 2, ['unsigned long']]],
} ],
'_RTL_DYNAMIC_HASH_TABLE_ENTRY' : [ 0x18, {
'Linkage' : [ 0x0, ['_LIST_ENTRY']],
'Signature' : [ 0x10, ['unsigned long long']],
} ],
'_MI_SUB64K_FREE_RANGES' : [ 0x30, {
'BitMap' : [ 0x0, ['_RTL_BITMAP_EX']],
'ListEntry' : [ 0x10, ['_LIST_ENTRY']],
'Vad' : [ 0x20, ['pointer64', ['_MMVAD_SHORT']]],
'SubListIndex' : [ 0x28, ['unsigned short']],
'Hint' : [ 0x2a, ['unsigned short']],
'SetBits' : [ 0x2c, ['unsigned long']],
} ],
'_ETW_LAST_ENABLE_INFO' : [ 0x10, {
'EnableFlags' : [ 0x0, ['_LARGE_INTEGER']],
'LoggerId' : [ 0x8, ['unsigned short']],
'Level' : [ 0xa, ['unsigned char']],
'Enabled' : [ 0xb, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned char')]],
'InternalFlag' : [ 0xb, ['BitField', dict(start_bit = 1, end_bit = 8, native_type='unsigned char')]],
} ],
'_M128A' : [ 0x10, {
'Low' : [ 0x0, ['unsigned long long']],
'High' : [ 0x8, ['long long']],
} ],
'_HEAP_LOOKASIDE' : [ 0x40, {
'ListHead' : [ 0x0, ['_SLIST_HEADER']],
'Depth' : [ 0x10, ['unsigned short']],
'MaximumDepth' : [ 0x12, ['unsigned short']],
'TotalAllocates' : [ 0x14, ['unsigned long']],
'AllocateMisses' : [ 0x18, ['unsigned long']],
'TotalFrees' : [ 0x1c, ['unsigned long']],
'FreeMisses' : [ 0x20, ['unsigned long']],
'LastTotalAllocates' : [ 0x24, ['unsigned long']],
'LastAllocateMisses' : [ 0x28, ['unsigned long']],
'Counters' : [ 0x2c, ['array', 2, ['unsigned long']]],
} ],
'_KTIMER' : [ 0x40, {
'Header' : [ 0x0, ['_DISPATCHER_HEADER']],
'DueTime' : [ 0x18, ['_ULARGE_INTEGER']],
'TimerListEntry' : [ 0x20, ['_LIST_ENTRY']],
'Dpc' : [ 0x30, ['pointer64', ['_KDPC']]],
'Processor' : [ 0x38, ['unsigned long']],
'Period' : [ 0x3c, ['unsigned long']],
} ],
'_RTL_ATOM_TABLE' : [ 0x28, {
'Signature' : [ 0x0, ['unsigned long']],
'ReferenceCount' : [ 0x4, ['long']],
'PushLock' : [ 0x8, ['_EX_PUSH_LOCK']],
'ExHandleTable' : [ 0x10, ['pointer64', ['_HANDLE_TABLE']]],
'Flags' : [ 0x18, ['unsigned long']],
'NumberOfBuckets' : [ 0x1c, ['unsigned long']],
'Buckets' : [ 0x20, ['array', 1, ['pointer64', ['_RTL_ATOM_TABLE_ENTRY']]]],
} ],
'__unnamed_2522' : [ 0x18, {
'RequestedTime' : [ 0x0, ['unsigned long long']],
'ProgrammedTime' : [ 0x8, ['unsigned long long']],
'TimerInfo' : [ 0x10, ['pointer64', ['_DIAGNOSTIC_BUFFER']]],
} ],
'_POP_POWER_ACTION' : [ 0x110, {
'Updates' : [ 0x0, ['unsigned char']],
'State' : [ 0x1, ['unsigned char']],
'Shutdown' : [ 0x2, ['unsigned char']],
'Action' : [ 0x4, ['Enumeration', dict(target = 'long', choices = {0: 'PowerActionNone', 1: 'PowerActionReserved', 2: 'PowerActionSleep', 3: 'PowerActionHibernate', 4: 'PowerActionShutdown', 5: 'PowerActionShutdownReset', 6: 'PowerActionShutdownOff', 7: 'PowerActionWarmEject', 8: 'PowerActionDisplayOff'})]],
'LightestState' : [ 0x8, ['Enumeration', dict(target = 'long', choices = {0: 'PowerSystemUnspecified', 1: 'PowerSystemWorking', 2: 'PowerSystemSleeping1', 3: 'PowerSystemSleeping2', 4: 'PowerSystemSleeping3', 5: 'PowerSystemHibernate', 6: 'PowerSystemShutdown', 7: 'PowerSystemMaximum'})]],
'Flags' : [ 0xc, ['unsigned long']],
'Status' : [ 0x10, ['long']],
'DeviceType' : [ 0x14, ['Enumeration', dict(target = 'long', choices = {0: 'PolicyDeviceSystemButton', 1: 'PolicyDeviceThermalZone', 2: 'PolicyDeviceBattery', 3: 'PolicyDeviceMemory', 4: 'PolicyInitiatePowerActionAPI', 5: 'PolicySetPowerStateAPI', 6: 'PolicyImmediateDozeS4', 7: 'PolicySystemIdle', 8: 'PolicyDeviceWakeAlarm', 9: 'PolicyDeviceFan', 10: 'PolicyCsBatterySaver', 11: 'PolicyImmediateDozeS4Predicted', 12: 'PolicyImmediateDozeS4PredictedNoWake', 13: 'PolicyDeviceMax'})]],
'DeviceTypeFlags' : [ 0x18, ['unsigned long']],
'IrpMinor' : [ 0x1c, ['unsigned char']],
'Waking' : [ 0x1d, ['unsigned char']],
'SystemState' : [ 0x20, ['Enumeration', dict(target = 'long', choices = {0: 'PowerSystemUnspecified', 1: 'PowerSystemWorking', 2: 'PowerSystemSleeping1', 3: 'PowerSystemSleeping2', 4: 'PowerSystemSleeping3', 5: 'PowerSystemHibernate', 6: 'PowerSystemShutdown', 7: 'PowerSystemMaximum'})]],
'NextSystemState' : [ 0x24, ['Enumeration', dict(target = 'long', choices = {0: 'PowerSystemUnspecified', 1: 'PowerSystemWorking', 2: 'PowerSystemSleeping1', 3: 'PowerSystemSleeping2', 4: 'PowerSystemSleeping3', 5: 'PowerSystemHibernate', 6: 'PowerSystemShutdown', 7: 'PowerSystemMaximum'})]],
'EffectiveSystemState' : [ 0x28, ['Enumeration', dict(target = 'long', choices = {0: 'PowerSystemUnspecified', 1: 'PowerSystemWorking', 2: 'PowerSystemSleeping1', 3: 'PowerSystemSleeping2', 4: 'PowerSystemSleeping3', 5: 'PowerSystemHibernate', 6: 'PowerSystemShutdown', 7: 'PowerSystemMaximum'})]],
'CurrentSystemState' : [ 0x2c, ['Enumeration', dict(target = 'long', choices = {0: 'PowerSystemUnspecified', 1: 'PowerSystemWorking', 2: 'PowerSystemSleeping1', 3: 'PowerSystemSleeping2', 4: 'PowerSystemSleeping3', 5: 'PowerSystemHibernate', 6: 'PowerSystemShutdown', 7: 'PowerSystemMaximum'})]],
'ShutdownBugCode' : [ 0x30, ['pointer64', ['_POP_SHUTDOWN_BUG_CHECK']]],
'DevState' : [ 0x38, ['pointer64', ['_POP_DEVICE_SYS_STATE']]],
'HiberContext' : [ 0x40, ['pointer64', ['_POP_HIBER_CONTEXT']]],
'WakeTime' : [ 0x48, ['unsigned long long']],
'SleepTime' : [ 0x50, ['unsigned long long']],
'WakeFirstUnattendedTime' : [ 0x58, ['unsigned long long']],
'WakeAlarmSignaled' : [ 0x60, ['Enumeration', dict(target = 'long', choices = {0: 'PoAc', 1: 'PoDc', 2: 'PoHot', 3: 'PoConditionMaximum'})]],
'WakeAlarm' : [ 0x68, ['array', 3, ['__unnamed_2522']]],
'WakeAlarmPaused' : [ 0xb0, ['unsigned char']],
'WakeAlarmLastTime' : [ 0xb8, ['unsigned long long']],
'FilteredCapabilities' : [ 0xc0, ['SYSTEM_POWER_CAPABILITIES']],
} ],
'_CM_KEY_VALUE' : [ 0x18, {
'Signature' : [ 0x0, ['unsigned short']],
'NameLength' : [ 0x2, ['unsigned short']],
'DataLength' : [ 0x4, ['unsigned long']],
'Data' : [ 0x8, ['unsigned long']],
'Type' : [ 0xc, ['unsigned long']],
'Flags' : [ 0x10, ['unsigned short']],
'Spare' : [ 0x12, ['unsigned short']],
'Name' : [ 0x14, ['array', 1, ['wchar']]],
} ],
'_CM_KEY_HASH' : [ 0x20, {
'ConvKey' : [ 0x0, ['unsigned long']],
'NextHash' : [ 0x8, ['pointer64', ['_CM_KEY_HASH']]],
'KeyHive' : [ 0x10, ['pointer64', ['_HHIVE']]],
'KeyCell' : [ 0x18, ['unsigned long']],
} ],
'_WHEA_IPF_CMC_DESCRIPTOR' : [ 0x4, {
'Type' : [ 0x0, ['unsigned short']],
'Enabled' : [ 0x2, ['unsigned char']],
'Reserved' : [ 0x3, ['unsigned char']],
} ],
'_PROCESSOR_IDLE_DEPENDENCY' : [ 0x8, {
'ProcessorIndex' : [ 0x0, ['unsigned long']],
'ExpectedState' : [ 0x4, ['unsigned char']],
'AllowDeeperStates' : [ 0x5, ['unsigned char']],
'LooseDependency' : [ 0x6, ['unsigned char']],
} ],
'_AMD64_DBGKD_CONTROL_SET' : [ 0x1c, {
'TraceFlag' : [ 0x0, ['unsigned long']],
'Dr7' : [ 0x4, ['unsigned long long']],
'CurrentSymbolStart' : [ 0xc, ['unsigned long long']],
'CurrentSymbolEnd' : [ 0x14, ['unsigned long long']],
} ],
'_KAPC_STATE' : [ 0x30, {
'ApcListHead' : [ 0x0, ['array', 2, ['_LIST_ENTRY']]],
'Process' : [ 0x20, ['pointer64', ['_KPROCESS']]],
'InProgressFlags' : [ 0x28, ['unsigned char']],
'KernelApcInProgress' : [ 0x28, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned char')]],
'SpecialApcInProgress' : [ 0x28, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned char')]],
'KernelApcPending' : [ 0x29, ['unsigned char']],
'UserApcPending' : [ 0x2a, ['unsigned char']],
} ],
'_PO_DEVICE_NOTIFY' : [ 0x68, {
'Link' : [ 0x0, ['_LIST_ENTRY']],
'PowerChildren' : [ 0x10, ['_LIST_ENTRY']],
'PowerParents' : [ 0x20, ['_LIST_ENTRY']],
'TargetDevice' : [ 0x30, ['pointer64', ['_DEVICE_OBJECT']]],
'OrderLevel' : [ 0x38, ['unsigned char']],
'DeviceObject' : [ 0x40, ['pointer64', ['_DEVICE_OBJECT']]],
'DeviceName' : [ 0x48, ['pointer64', ['unsigned short']]],
'DriverName' : [ 0x50, ['pointer64', ['unsigned short']]],
'ChildCount' : [ 0x58, ['unsigned long']],
'ActiveChild' : [ 0x5c, ['unsigned long']],
'ParentCount' : [ 0x60, ['unsigned long']],
'ActiveParent' : [ 0x64, ['unsigned long']],
} ],
'_CM_KEY_SECURITY_CACHE_ENTRY' : [ 0x10, {
'Cell' : [ 0x0, ['unsigned long']],
'CachedSecurity' : [ 0x8, ['pointer64', ['_CM_KEY_SECURITY_CACHE']]],
} ],
'_FS_FILTER_CALLBACK_DATA' : [ 0x40, {
'SizeOfFsFilterCallbackData' : [ 0x0, ['unsigned long']],
'Operation' : [ 0x4, ['unsigned char']],
'Reserved' : [ 0x5, ['unsigned char']],
'DeviceObject' : [ 0x8, ['pointer64', ['_DEVICE_OBJECT']]],
'FileObject' : [ 0x10, ['pointer64', ['_FILE_OBJECT']]],
'Parameters' : [ 0x18, ['_FS_FILTER_PARAMETERS']],
} ],
'_GDI_TEB_BATCH32' : [ 0x4e0, {
'Offset' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 31, native_type='unsigned long')]],
'HasRenderingCommand' : [ 0x0, ['BitField', dict(start_bit = 31, end_bit = 32, native_type='unsigned long')]],
'HDC' : [ 0x4, ['unsigned long']],
'Buffer' : [ 0x8, ['array', 310, ['unsigned long']]],
} ],
'_WHEA_AER_ROOTPORT_DESCRIPTOR' : [ 0x24, {
'Type' : [ 0x0, ['unsigned short']],
'Enabled' : [ 0x2, ['unsigned char']],
'Reserved' : [ 0x3, ['unsigned char']],
'BusNumber' : [ 0x4, ['unsigned long']],
'Slot' : [ 0x8, ['_WHEA_PCI_SLOT_NUMBER']],
'DeviceControl' : [ 0xc, ['unsigned short']],
'Flags' : [ 0xe, ['_AER_ROOTPORT_DESCRIPTOR_FLAGS']],
'UncorrectableErrorMask' : [ 0x10, ['unsigned long']],
'UncorrectableErrorSeverity' : [ 0x14, ['unsigned long']],
'CorrectableErrorMask' : [ 0x18, ['unsigned long']],
'AdvancedCapsAndControl' : [ 0x1c, ['unsigned long']],
'RootErrorCommand' : [ 0x20, ['unsigned long']],
} ],
'_PROC_IDLE_STATE_ACCOUNTING' : [ 0x3e0, {
'TotalTime' : [ 0x0, ['unsigned long long']],
'CancelCount' : [ 0x8, ['unsigned long']],
'FailureCount' : [ 0xc, ['unsigned long']],
'SuccessCount' : [ 0x10, ['unsigned long']],
'InvalidBucketIndex' : [ 0x14, ['unsigned long']],
'MinTime' : [ 0x18, ['unsigned long long']],
'MaxTime' : [ 0x20, ['unsigned long long']],
'SelectionStatistics' : [ 0x28, ['_PPM_SELECTION_STATISTICS']],
'IdleTimeBuckets' : [ 0xa0, ['array', 26, ['_PROC_IDLE_STATE_BUCKET']]],
} ],
'_IMAGE_SECURITY_CONTEXT' : [ 0x8, {
'PageHashes' : [ 0x0, ['pointer64', ['void']]],
'Value' : [ 0x0, ['unsigned long long']],
'SecurityBeingCreated' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 2, native_type='unsigned long long')]],
'SecurityMandatory' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long long')]],
'PageHashPointer' : [ 0x0, ['BitField', dict(start_bit = 3, end_bit = 64, native_type='unsigned long long')]],
} ],
'_KENLISTMENT_HISTORY' : [ 0x8, {
'Notification' : [ 0x0, ['unsigned long']],
'NewState' : [ 0x4, ['Enumeration', dict(target = 'long', choices = {0: 'KEnlistmentUninitialized', 256: 'KEnlistmentActive', 258: 'KEnlistmentPrepared', 259: 'KEnlistmentInDoubt', 260: 'KEnlistmentCommitted', 261: 'KEnlistmentCommittedNotify', 262: 'KEnlistmentCommitRequested', 257: 'KEnlistmentPreparing', 264: 'KEnlistmentDelegated', 265: 'KEnlistmentDelegatedDisconnected', 266: 'KEnlistmentPrePreparing', 263: 'KEnlistmentAborted', 268: 'KEnlistmentRecovering', 269: 'KEnlistmentAborting', 270: 'KEnlistmentReadOnly', 271: 'KEnlistmentOutcomeUnavailable', 272: 'KEnlistmentOffline', 273: 'KEnlistmentPrePrepared', 274: 'KEnlistmentInitialized', 267: 'KEnlistmentForgotten'})]],
} ],
'_FAST_IO_DISPATCH' : [ 0xe0, {
'SizeOfFastIoDispatch' : [ 0x0, ['unsigned long']],
'FastIoCheckIfPossible' : [ 0x8, ['pointer64', ['void']]],
'FastIoRead' : [ 0x10, ['pointer64', ['void']]],
'FastIoWrite' : [ 0x18, ['pointer64', ['void']]],
'FastIoQueryBasicInfo' : [ 0x20, ['pointer64', ['void']]],
'FastIoQueryStandardInfo' : [ 0x28, ['pointer64', ['void']]],
'FastIoLock' : [ 0x30, ['pointer64', ['void']]],
'FastIoUnlockSingle' : [ 0x38, ['pointer64', ['void']]],
'FastIoUnlockAll' : [ 0x40, ['pointer64', ['void']]],
'FastIoUnlockAllByKey' : [ 0x48, ['pointer64', ['void']]],
'FastIoDeviceControl' : [ 0x50, ['pointer64', ['void']]],
'AcquireFileForNtCreateSection' : [ 0x58, ['pointer64', ['void']]],
'ReleaseFileForNtCreateSection' : [ 0x60, ['pointer64', ['void']]],
'FastIoDetachDevice' : [ 0x68, ['pointer64', ['void']]],
'FastIoQueryNetworkOpenInfo' : [ 0x70, ['pointer64', ['void']]],
'AcquireForModWrite' : [ 0x78, ['pointer64', ['void']]],
'MdlRead' : [ 0x80, ['pointer64', ['void']]],
'MdlReadComplete' : [ 0x88, ['pointer64', ['void']]],
'PrepareMdlWrite' : [ 0x90, ['pointer64', ['void']]],
'MdlWriteComplete' : [ 0x98, ['pointer64', ['void']]],
'FastIoReadCompressed' : [ 0xa0, ['pointer64', ['void']]],
'FastIoWriteCompressed' : [ 0xa8, ['pointer64', ['void']]],
'MdlReadCompleteCompressed' : [ 0xb0, ['pointer64', ['void']]],
'MdlWriteCompleteCompressed' : [ 0xb8, ['pointer64', ['void']]],
'FastIoQueryOpen' : [ 0xc0, ['pointer64', ['void']]],
'ReleaseForModWrite' : [ 0xc8, ['pointer64', ['void']]],
'AcquireForCcFlush' : [ 0xd0, ['pointer64', ['void']]],
'ReleaseForCcFlush' : [ 0xd8, ['pointer64', ['void']]],
} ],
'_PERFINFO_PPM_STATE_SELECTION' : [ 0xc, {
'SelectedState' : [ 0x0, ['unsigned long']],
'VetoedStates' : [ 0x4, ['unsigned long']],
'VetoReason' : [ 0x8, ['array', 1, ['unsigned long']]],
} ],
'_KIDTENTRY64' : [ 0x10, {
'OffsetLow' : [ 0x0, ['unsigned short']],
'Selector' : [ 0x2, ['unsigned short']],
'IstIndex' : [ 0x4, ['BitField', dict(start_bit = 0, end_bit = 3, native_type='unsigned short')]],
'Reserved0' : [ 0x4, ['BitField', dict(start_bit = 3, end_bit = 8, native_type='unsigned short')]],
'Type' : [ 0x4, ['BitField', dict(start_bit = 8, end_bit = 13, native_type='unsigned short')]],
'Dpl' : [ 0x4, ['BitField', dict(start_bit = 13, end_bit = 15, native_type='unsigned short')]],
'Present' : [ 0x4, ['BitField', dict(start_bit = 15, end_bit = 16, native_type='unsigned short')]],
'OffsetMiddle' : [ 0x6, ['unsigned short']],
'OffsetHigh' : [ 0x8, ['unsigned long']],
'Reserved1' : [ 0xc, ['unsigned long']],
'Alignment' : [ 0x0, ['unsigned long long']],
} ],
'_CM_CELL_REMAP_BLOCK' : [ 0x8, {
'OldCell' : [ 0x0, ['unsigned long']],
'NewCell' : [ 0x4, ['unsigned long']],
} ],
'_PI_RESOURCE_ARBITER_ENTRY' : [ 0x70, {
'DeviceArbiterList' : [ 0x0, ['_LIST_ENTRY']],
'ResourceType' : [ 0x10, ['unsigned char']],
'ArbiterInterface' : [ 0x18, ['pointer64', ['_ARBITER_INTERFACE']]],
'DeviceNode' : [ 0x20, ['pointer64', ['_DEVICE_NODE']]],
'ResourceList' : [ 0x28, ['_LIST_ENTRY']],
'BestResourceList' : [ 0x38, ['_LIST_ENTRY']],
'BestConfig' : [ 0x48, ['_LIST_ENTRY']],
'ActiveArbiterList' : [ 0x58, ['_LIST_ENTRY']],
'State' : [ 0x68, ['unsigned char']],
'ResourcesChanged' : [ 0x69, ['unsigned char']],
} ],
'_SECURITY_DESCRIPTOR' : [ 0x28, {
'Revision' : [ 0x0, ['unsigned char']],
'Sbz1' : [ 0x1, ['unsigned char']],
'Control' : [ 0x2, ['unsigned short']],
'Owner' : [ 0x8, ['pointer64', ['void']]],
'Group' : [ 0x10, ['pointer64', ['void']]],
'Sacl' : [ 0x18, ['pointer64', ['_ACL']]],
'Dacl' : [ 0x20, ['pointer64', ['_ACL']]],
} ],
'_MODWRITER_FLAGS' : [ 0x4, {
'KeepForever' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'Networked' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'IoPriority' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 5, native_type='unsigned long')]],
'ModifiedStoreWrite' : [ 0x0, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned long')]],
} ],
'_MI_PTE_CHAIN_HEAD' : [ 0x18, {
'Flink' : [ 0x0, ['_MMPTE']],
'Blink' : [ 0x8, ['_MMPTE']],
'PteBase' : [ 0x10, ['pointer64', ['_MMPTE']]],
} ],
'_PPM_SELECTION_MENU_ENTRY' : [ 0x18, {
'StrictDependency' : [ 0x0, ['unsigned char']],
'InitiatingState' : [ 0x1, ['unsigned char']],
'DependentState' : [ 0x2, ['unsigned char']],
'StateIndex' : [ 0x4, ['unsigned long']],
'Dependencies' : [ 0x8, ['unsigned long']],
'DependencyList' : [ 0x10, ['pointer64', ['_PPM_SELECTION_DEPENDENCY']]],
} ],
'_MI_PAGING_FILE_SPACE_BITMAPS' : [ 0x28, {
'RefCount' : [ 0x0, ['unsigned long']],
'Anchor' : [ 0x0, ['pointer64', ['_MI_PAGING_FILE_SPACE_BITMAPS']]],
'AllocationBitmap' : [ 0x8, ['_RTL_BITMAP']],
'ReservationBitmap' : [ 0x18, ['_RTL_BITMAP']],
'EvictedBitmap' : [ 0x18, ['_RTL_BITMAP']],
} ],
'_KQUEUE' : [ 0x40, {
'Header' : [ 0x0, ['_DISPATCHER_HEADER']],
'EntryListHead' : [ 0x18, ['_LIST_ENTRY']],
'CurrentCount' : [ 0x28, ['unsigned long']],
'MaximumCount' : [ 0x2c, ['unsigned long']],
'ThreadListHead' : [ 0x30, ['_LIST_ENTRY']],
} ],
'_MI_COMBINE_WORKITEM' : [ 0x28, {
'NextEntry' : [ 0x0, ['pointer64', ['void']]],
'WorkItem' : [ 0x8, ['_WORK_QUEUE_ITEM']],
} ],
'_RTL_USER_PROCESS_PARAMETERS' : [ 0x410, {
'MaximumLength' : [ 0x0, ['unsigned long']],
'Length' : [ 0x4, ['unsigned long']],
'Flags' : [ 0x8, ['unsigned long']],
'DebugFlags' : [ 0xc, ['unsigned long']],
'ConsoleHandle' : [ 0x10, ['pointer64', ['void']]],
'ConsoleFlags' : [ 0x18, ['unsigned long']],
'StandardInput' : [ 0x20, ['pointer64', ['void']]],
'StandardOutput' : [ 0x28, ['pointer64', ['void']]],
'StandardError' : [ 0x30, ['pointer64', ['void']]],
'CurrentDirectory' : [ 0x38, ['_CURDIR']],
'DllPath' : [ 0x50, ['_UNICODE_STRING']],
'ImagePathName' : [ 0x60, ['_UNICODE_STRING']],
'CommandLine' : [ 0x70, ['_UNICODE_STRING']],
'Environment' : [ 0x80, ['pointer64', ['void']]],
'StartingX' : [ 0x88, ['unsigned long']],
'StartingY' : [ 0x8c, ['unsigned long']],
'CountX' : [ 0x90, ['unsigned long']],
'CountY' : [ 0x94, ['unsigned long']],
'CountCharsX' : [ 0x98, ['unsigned long']],
'CountCharsY' : [ 0x9c, ['unsigned long']],
'FillAttribute' : [ 0xa0, ['unsigned long']],
'WindowFlags' : [ 0xa4, ['unsigned long']],
'ShowWindowFlags' : [ 0xa8, ['unsigned long']],
'WindowTitle' : [ 0xb0, ['_UNICODE_STRING']],
'DesktopInfo' : [ 0xc0, ['_UNICODE_STRING']],
'ShellInfo' : [ 0xd0, ['_UNICODE_STRING']],
'RuntimeData' : [ 0xe0, ['_UNICODE_STRING']],
'CurrentDirectores' : [ 0xf0, ['array', 32, ['_RTL_DRIVE_LETTER_CURDIR']]],
'EnvironmentSize' : [ 0x3f0, ['unsigned long long']],
'EnvironmentVersion' : [ 0x3f8, ['unsigned long long']],
'PackageDependencyData' : [ 0x400, ['pointer64', ['void']]],
'ProcessGroupId' : [ 0x408, ['unsigned long']],
'LoaderThreads' : [ 0x40c, ['unsigned long']],
} ],
'_PHYSICAL_MEMORY_RUN' : [ 0x10, {
'BasePage' : [ 0x0, ['unsigned long long']],
'PageCount' : [ 0x8, ['unsigned long long']],
} ],
'_RTL_SRWLOCK' : [ 0x8, {
'Locked' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long long')]],
'Waiting' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long long')]],
'Waking' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long long')]],
'MultipleShared' : [ 0x0, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned long long')]],
'Shared' : [ 0x0, ['BitField', dict(start_bit = 4, end_bit = 64, native_type='unsigned long long')]],
'Value' : [ 0x0, ['unsigned long long']],
'Ptr' : [ 0x0, ['pointer64', ['void']]],
} ],
'_KTMOBJECT_NAMESPACE_LINK' : [ 0x28, {
'Links' : [ 0x0, ['_RTL_BALANCED_LINKS']],
'Expired' : [ 0x20, ['unsigned char']],
} ],
'_CACHE_MANAGER_CALLBACKS' : [ 0x20, {
'AcquireForLazyWrite' : [ 0x0, ['pointer64', ['void']]],
'ReleaseFromLazyWrite' : [ 0x8, ['pointer64', ['void']]],
'AcquireForReadAhead' : [ 0x10, ['pointer64', ['void']]],
'ReleaseFromReadAhead' : [ 0x18, ['pointer64', ['void']]],
} ],
'_PROC_PERF_LOAD' : [ 0x2, {
'BusyPercentage' : [ 0x0, ['unsigned char']],
'FrequencyPercentage' : [ 0x1, ['unsigned char']],
} ],
'_RTL_RANGE' : [ 0x28, {
'Start' : [ 0x0, ['unsigned long long']],
'End' : [ 0x8, ['unsigned long long']],
'UserData' : [ 0x10, ['pointer64', ['void']]],
'Owner' : [ 0x18, ['pointer64', ['void']]],
'Attributes' : [ 0x20, ['unsigned char']],
'Flags' : [ 0x21, ['unsigned char']],
} ],
'_LOCK_HEADER' : [ 0x20, {
'LockTree' : [ 0x0, ['_RTL_AVL_TREE']],
'Count' : [ 0x8, ['unsigned long long']],
'Lock' : [ 0x10, ['unsigned long long']],
'Valid' : [ 0x18, ['unsigned long']],
} ],
'_WHEA_IPF_MCA_DESCRIPTOR' : [ 0x4, {
'Type' : [ 0x0, ['unsigned short']],
'Enabled' : [ 0x2, ['unsigned char']],
'Reserved' : [ 0x3, ['unsigned char']],
} ],
'_MMSECTION_FLAGS2' : [ 0x4, {
'PartitionId' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 10, native_type='unsigned long')]],
'NumberOfChildViews' : [ 0x0, ['BitField', dict(start_bit = 10, end_bit = 32, native_type='unsigned long')]],
} ],
'_KSPECIAL_REGISTERS' : [ 0xe0, {
'Cr0' : [ 0x0, ['unsigned long long']],
'Cr2' : [ 0x8, ['unsigned long long']],
'Cr3' : [ 0x10, ['unsigned long long']],
'Cr4' : [ 0x18, ['unsigned long long']],
'KernelDr0' : [ 0x20, ['unsigned long long']],
'KernelDr1' : [ 0x28, ['unsigned long long']],
'KernelDr2' : [ 0x30, ['unsigned long long']],
'KernelDr3' : [ 0x38, ['unsigned long long']],
'KernelDr6' : [ 0x40, ['unsigned long long']],
'KernelDr7' : [ 0x48, ['unsigned long long']],
'Gdtr' : [ 0x50, ['_KDESCRIPTOR']],
'Idtr' : [ 0x60, ['_KDESCRIPTOR']],
'Tr' : [ 0x70, ['unsigned short']],
'Ldtr' : [ 0x72, ['unsigned short']],
'MxCsr' : [ 0x74, ['unsigned long']],
'DebugControl' : [ 0x78, ['unsigned long long']],
'LastBranchToRip' : [ 0x80, ['unsigned long long']],
'LastBranchFromRip' : [ 0x88, ['unsigned long long']],
'LastExceptionToRip' : [ 0x90, ['unsigned long long']],
'LastExceptionFromRip' : [ 0x98, ['unsigned long long']],
'Cr8' : [ 0xa0, ['unsigned long long']],
'MsrGsBase' : [ 0xa8, ['unsigned long long']],
'MsrGsSwap' : [ 0xb0, ['unsigned long long']],
'MsrStar' : [ 0xb8, ['unsigned long long']],
'MsrLStar' : [ 0xc0, ['unsigned long long']],
'MsrCStar' : [ 0xc8, ['unsigned long long']],
'MsrSyscallMask' : [ 0xd0, ['unsigned long long']],
'Xcr0' : [ 0xd8, ['unsigned long long']],
} ],
'_SYSTEM_POWER_POLICY' : [ 0xe8, {
'Revision' : [ 0x0, ['unsigned long']],
'PowerButton' : [ 0x4, ['POWER_ACTION_POLICY']],
'SleepButton' : [ 0x10, ['POWER_ACTION_POLICY']],
'LidClose' : [ 0x1c, ['POWER_ACTION_POLICY']],
'LidOpenWake' : [ 0x28, ['Enumeration', dict(target = 'long', choices = {0: 'PowerSystemUnspecified', 1: 'PowerSystemWorking', 2: 'PowerSystemSleeping1', 3: 'PowerSystemSleeping2', 4: 'PowerSystemSleeping3', 5: 'PowerSystemHibernate', 6: 'PowerSystemShutdown', 7: 'PowerSystemMaximum'})]],
'Reserved' : [ 0x2c, ['unsigned long']],
'Idle' : [ 0x30, ['POWER_ACTION_POLICY']],
'IdleTimeout' : [ 0x3c, ['unsigned long']],
'IdleSensitivity' : [ 0x40, ['unsigned char']],
'DynamicThrottle' : [ 0x41, ['unsigned char']],
'Spare2' : [ 0x42, ['array', 2, ['unsigned char']]],
'MinSleep' : [ 0x44, ['Enumeration', dict(target = 'long', choices = {0: 'PowerSystemUnspecified', 1: 'PowerSystemWorking', 2: 'PowerSystemSleeping1', 3: 'PowerSystemSleeping2', 4: 'PowerSystemSleeping3', 5: 'PowerSystemHibernate', 6: 'PowerSystemShutdown', 7: 'PowerSystemMaximum'})]],
'MaxSleep' : [ 0x48, ['Enumeration', dict(target = 'long', choices = {0: 'PowerSystemUnspecified', 1: 'PowerSystemWorking', 2: 'PowerSystemSleeping1', 3: 'PowerSystemSleeping2', 4: 'PowerSystemSleeping3', 5: 'PowerSystemHibernate', 6: 'PowerSystemShutdown', 7: 'PowerSystemMaximum'})]],
'ReducedLatencySleep' : [ 0x4c, ['Enumeration', dict(target = 'long', choices = {0: 'PowerSystemUnspecified', 1: 'PowerSystemWorking', 2: 'PowerSystemSleeping1', 3: 'PowerSystemSleeping2', 4: 'PowerSystemSleeping3', 5: 'PowerSystemHibernate', 6: 'PowerSystemShutdown', 7: 'PowerSystemMaximum'})]],
'WinLogonFlags' : [ 0x50, ['unsigned long']],
'Spare3' : [ 0x54, ['unsigned long']],
'DozeS4Timeout' : [ 0x58, ['unsigned long']],
'BroadcastCapacityResolution' : [ 0x5c, ['unsigned long']],
'DischargePolicy' : [ 0x60, ['array', 4, ['SYSTEM_POWER_LEVEL']]],
'VideoTimeout' : [ 0xc0, ['unsigned long']],
'VideoDimDisplay' : [ 0xc4, ['unsigned char']],
'VideoReserved' : [ 0xc8, ['array', 3, ['unsigned long']]],
'SpindownTimeout' : [ 0xd4, ['unsigned long']],
'OptimizeForPower' : [ 0xd8, ['unsigned char']],
'FanThrottleTolerance' : [ 0xd9, ['unsigned char']],
'ForcedThrottle' : [ 0xda, ['unsigned char']],
'MinThrottle' : [ 0xdb, ['unsigned char']],
'OverThrottled' : [ 0xdc, ['POWER_ACTION_POLICY']],
} ],
'_PEB64' : [ 0x7a0, {
'InheritedAddressSpace' : [ 0x0, ['unsigned char']],
'ReadImageFileExecOptions' : [ 0x1, ['unsigned char']],
'BeingDebugged' : [ 0x2, ['unsigned char']],
'BitField' : [ 0x3, ['unsigned char']],
'ImageUsesLargePages' : [ 0x3, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned char')]],
'IsProtectedProcess' : [ 0x3, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned char')]],
'IsImageDynamicallyRelocated' : [ 0x3, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned char')]],
'SkipPatchingUser32Forwarders' : [ 0x3, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned char')]],
'IsPackagedProcess' : [ 0x3, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned char')]],
'IsAppContainer' : [ 0x3, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned char')]],
'IsProtectedProcessLight' : [ 0x3, ['BitField', dict(start_bit = 6, end_bit = 7, native_type='unsigned char')]],
'SpareBits' : [ 0x3, ['BitField', dict(start_bit = 7, end_bit = 8, native_type='unsigned char')]],
'Padding0' : [ 0x4, ['array', 4, ['unsigned char']]],
'Mutant' : [ 0x8, ['unsigned long long']],
'ImageBaseAddress' : [ 0x10, ['unsigned long long']],
'Ldr' : [ 0x18, ['unsigned long long']],
'ProcessParameters' : [ 0x20, ['unsigned long long']],
'SubSystemData' : [ 0x28, ['unsigned long long']],
'ProcessHeap' : [ 0x30, ['unsigned long long']],
'FastPebLock' : [ 0x38, ['unsigned long long']],
'AtlThunkSListPtr' : [ 0x40, ['unsigned long long']],
'IFEOKey' : [ 0x48, ['unsigned long long']],
'CrossProcessFlags' : [ 0x50, ['unsigned long']],
'ProcessInJob' : [ 0x50, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'ProcessInitializing' : [ 0x50, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'ProcessUsingVEH' : [ 0x50, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long')]],
'ProcessUsingVCH' : [ 0x50, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned long')]],
'ProcessUsingFTH' : [ 0x50, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned long')]],
'ReservedBits0' : [ 0x50, ['BitField', dict(start_bit = 5, end_bit = 32, native_type='unsigned long')]],
'Padding1' : [ 0x54, ['array', 4, ['unsigned char']]],
'KernelCallbackTable' : [ 0x58, ['unsigned long long']],
'UserSharedInfoPtr' : [ 0x58, ['unsigned long long']],
'SystemReserved' : [ 0x60, ['array', 1, ['unsigned long']]],
'AtlThunkSListPtr32' : [ 0x64, ['unsigned long']],
'ApiSetMap' : [ 0x68, ['unsigned long long']],
'TlsExpansionCounter' : [ 0x70, ['unsigned long']],
'Padding2' : [ 0x74, ['array', 4, ['unsigned char']]],
'TlsBitmap' : [ 0x78, ['unsigned long long']],
'TlsBitmapBits' : [ 0x80, ['array', 2, ['unsigned long']]],
'ReadOnlySharedMemoryBase' : [ 0x88, ['unsigned long long']],
'SparePvoid0' : [ 0x90, ['unsigned long long']],
'ReadOnlyStaticServerData' : [ 0x98, ['unsigned long long']],
'AnsiCodePageData' : [ 0xa0, ['unsigned long long']],
'OemCodePageData' : [ 0xa8, ['unsigned long long']],
'UnicodeCaseTableData' : [ 0xb0, ['unsigned long long']],
'NumberOfProcessors' : [ 0xb8, ['unsigned long']],
'NtGlobalFlag' : [ 0xbc, ['unsigned long']],
'CriticalSectionTimeout' : [ 0xc0, ['_LARGE_INTEGER']],
'HeapSegmentReserve' : [ 0xc8, ['unsigned long long']],
'HeapSegmentCommit' : [ 0xd0, ['unsigned long long']],
'HeapDeCommitTotalFreeThreshold' : [ 0xd8, ['unsigned long long']],
'HeapDeCommitFreeBlockThreshold' : [ 0xe0, ['unsigned long long']],
'NumberOfHeaps' : [ 0xe8, ['unsigned long']],
'MaximumNumberOfHeaps' : [ 0xec, ['unsigned long']],
'ProcessHeaps' : [ 0xf0, ['unsigned long long']],
'GdiSharedHandleTable' : [ 0xf8, ['unsigned long long']],
'ProcessStarterHelper' : [ 0x100, ['unsigned long long']],
'GdiDCAttributeList' : [ 0x108, ['unsigned long']],
'Padding3' : [ 0x10c, ['array', 4, ['unsigned char']]],
'LoaderLock' : [ 0x110, ['unsigned long long']],
'OSMajorVersion' : [ 0x118, ['unsigned long']],
'OSMinorVersion' : [ 0x11c, ['unsigned long']],
'OSBuildNumber' : [ 0x120, ['unsigned short']],
'OSCSDVersion' : [ 0x122, ['unsigned short']],
'OSPlatformId' : [ 0x124, ['unsigned long']],
'ImageSubsystem' : [ 0x128, ['unsigned long']],
'ImageSubsystemMajorVersion' : [ 0x12c, ['unsigned long']],
'ImageSubsystemMinorVersion' : [ 0x130, ['unsigned long']],
'Padding4' : [ 0x134, ['array', 4, ['unsigned char']]],
'ActiveProcessAffinityMask' : [ 0x138, ['unsigned long long']],
'GdiHandleBuffer' : [ 0x140, ['array', 60, ['unsigned long']]],
'PostProcessInitRoutine' : [ 0x230, ['unsigned long long']],
'TlsExpansionBitmap' : [ 0x238, ['unsigned long long']],
'TlsExpansionBitmapBits' : [ 0x240, ['array', 32, ['unsigned long']]],
'SessionId' : [ 0x2c0, ['unsigned long']],
'Padding5' : [ 0x2c4, ['array', 4, ['unsigned char']]],
'AppCompatFlags' : [ 0x2c8, ['_ULARGE_INTEGER']],
'AppCompatFlagsUser' : [ 0x2d0, ['_ULARGE_INTEGER']],
'pShimData' : [ 0x2d8, ['unsigned long long']],
'AppCompatInfo' : [ 0x2e0, ['unsigned long long']],
'CSDVersion' : [ 0x2e8, ['_STRING64']],
'ActivationContextData' : [ 0x2f8, ['unsigned long long']],
'ProcessAssemblyStorageMap' : [ 0x300, ['unsigned long long']],
'SystemDefaultActivationContextData' : [ 0x308, ['unsigned long long']],
'SystemAssemblyStorageMap' : [ 0x310, ['unsigned long long']],
'MinimumStackCommit' : [ 0x318, ['unsigned long long']],
'FlsCallback' : [ 0x320, ['unsigned long long']],
'FlsListHead' : [ 0x328, ['LIST_ENTRY64']],
'FlsBitmap' : [ 0x338, ['unsigned long long']],
'FlsBitmapBits' : [ 0x340, ['array', 4, ['unsigned long']]],
'FlsHighIndex' : [ 0x350, ['unsigned long']],
'WerRegistrationData' : [ 0x358, ['unsigned long long']],
'WerShipAssertPtr' : [ 0x360, ['unsigned long long']],
'pUnused' : [ 0x368, ['unsigned long long']],
'pImageHeaderHash' : [ 0x370, ['unsigned long long']],
'TracingFlags' : [ 0x378, ['unsigned long']],
'HeapTracingEnabled' : [ 0x378, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'CritSecTracingEnabled' : [ 0x378, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'LibLoaderTracingEnabled' : [ 0x378, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long')]],
'SpareTracingBits' : [ 0x378, ['BitField', dict(start_bit = 3, end_bit = 32, native_type='unsigned long')]],
'Padding6' : [ 0x37c, ['array', 4, ['unsigned char']]],
'CsrServerReadOnlySharedMemoryBase' : [ 0x380, ['unsigned long long']],
'TppWorkerpListLock' : [ 0x388, ['unsigned long long']],
'TppWorkerpList' : [ 0x390, ['LIST_ENTRY64']],
'WaitOnAddressHashTable' : [ 0x3a0, ['array', 128, ['unsigned long long']]],
} ],
'_PS_PROTECTION' : [ 0x1, {
'Level' : [ 0x0, ['unsigned char']],
'Type' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 3, native_type='unsigned char')]],
'Audit' : [ 0x0, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned char')]],
'Signer' : [ 0x0, ['BitField', dict(start_bit = 4, end_bit = 8, native_type='unsigned char')]],
} ],
'_MSUBSECTION' : [ 0x70, {
'Core' : [ 0x0, ['_SUBSECTION']],
'SubsectionNode' : [ 0x38, ['_RTL_BALANCED_NODE']],
'DereferenceList' : [ 0x50, ['_LIST_ENTRY']],
'NumberOfMappedViews' : [ 0x60, ['unsigned long long']],
'NumberOfPfnReferences' : [ 0x68, ['unsigned long']],
} ],
'_SE_AUDIT_PROCESS_CREATION_INFO' : [ 0x8, {
'ImageFileName' : [ 0x0, ['pointer64', ['_OBJECT_NAME_INFORMATION']]],
} ],
'_HEAP_ENTRY_EXTRA' : [ 0x10, {
'AllocatorBackTraceIndex' : [ 0x0, ['unsigned short']],
'TagIndex' : [ 0x2, ['unsigned short']],
'Settable' : [ 0x8, ['unsigned long long']],
'ZeroInit' : [ 0x0, ['unsigned long long']],
'ZeroInit1' : [ 0x8, ['unsigned long long']],
} ],
'_VF_POOL_TRACE' : [ 0x80, {
'Address' : [ 0x0, ['pointer64', ['void']]],
'Size' : [ 0x8, ['unsigned long long']],
'Thread' : [ 0x10, ['pointer64', ['_ETHREAD']]],
'StackTrace' : [ 0x18, ['array', 13, ['pointer64', ['void']]]],
} ],
'__unnamed_25ee' : [ 0x4, {
'LongFlags' : [ 0x0, ['unsigned long']],
'Flags' : [ 0x0, ['_MM_SESSION_SPACE_FLAGS']],
} ],
'_MM_SESSION_SPACE' : [ 0x1f40, {
'ReferenceCount' : [ 0x0, ['long']],
'u' : [ 0x4, ['__unnamed_25ee']],
'SessionId' : [ 0x8, ['unsigned long']],
'ProcessReferenceToSession' : [ 0xc, ['long']],
'ProcessList' : [ 0x10, ['_LIST_ENTRY']],
'SessionPageDirectoryIndex' : [ 0x20, ['unsigned long long']],
'NonPagablePages' : [ 0x28, ['unsigned long long']],
'CommittedPages' : [ 0x30, ['unsigned long long']],
'PagedPoolStart' : [ 0x38, ['pointer64', ['void']]],
'PagedPoolEnd' : [ 0x40, ['pointer64', ['void']]],
'SessionObject' : [ 0x48, ['pointer64', ['void']]],
'SessionObjectHandle' : [ 0x50, ['pointer64', ['void']]],
'SessionPoolAllocationFailures' : [ 0x58, ['array', 4, ['unsigned long']]],
'ImageTree' : [ 0x68, ['_RTL_AVL_TREE']],
'LocaleId' : [ 0x70, ['unsigned long']],
'AttachCount' : [ 0x74, ['unsigned long']],
'AttachGate' : [ 0x78, ['_KGATE']],
'WsListEntry' : [ 0x90, ['_LIST_ENTRY']],
'Lookaside' : [ 0xc0, ['array', 21, ['_GENERAL_LOOKASIDE']]],
'Session' : [ 0xb40, ['_MMSESSION']],
'PagedPoolInfo' : [ 0xb60, ['_MM_PAGED_POOL_INFO']],
'Vm' : [ 0xb98, ['_MMSUPPORT']],
'Wsle' : [ 0xc90, ['pointer64', ['_MMWSLE']]],
'DriverUnload' : [ 0xc98, ['_MI_SESSION_DRIVER_UNLOAD']],
'PagedPool' : [ 0xcc0, ['_POOL_DESCRIPTOR']],
'PageDirectory' : [ 0x1e00, ['_MMPTE']],
'SessionVaLock' : [ 0x1e08, ['_EX_PUSH_LOCK']],
'DynamicVaBitMap' : [ 0x1e10, ['_RTL_BITMAP']],
'DynamicVaHint' : [ 0x1e20, ['unsigned long']],
'SpecialPool' : [ 0x1e28, ['_MI_SPECIAL_POOL']],
'SessionPteLock' : [ 0x1e78, ['_EX_PUSH_LOCK']],
'PoolBigEntriesInUse' : [ 0x1e80, ['long']],
'PagedPoolPdeCount' : [ 0x1e84, ['unsigned long']],
'SpecialPoolPdeCount' : [ 0x1e88, ['unsigned long']],
'DynamicSessionPdeCount' : [ 0x1e8c, ['unsigned long']],
'SystemPteInfo' : [ 0x1e90, ['_MI_SYSTEM_PTE_TYPE']],
'PoolTrackTableExpansion' : [ 0x1ef0, ['pointer64', ['void']]],
'PoolTrackTableExpansionSize' : [ 0x1ef8, ['unsigned long long']],
'PoolTrackBigPages' : [ 0x1f00, ['pointer64', ['void']]],
'PoolTrackBigPagesSize' : [ 0x1f08, ['unsigned long long']],
'IoState' : [ 0x1f10, ['Enumeration', dict(target = 'long', choices = {1: 'IoSessionStateCreated', 2: 'IoSessionStateInitialized', 3: 'IoSessionStateConnected', 4: 'IoSessionStateDisconnected', 5: 'IoSessionStateDisconnectedLoggedOn', 6: 'IoSessionStateLoggedOn', 7: 'IoSessionStateLoggedOff', 8: 'IoSessionStateTerminated', 9: 'IoSessionStateMax'})]],
'IoStateSequence' : [ 0x1f14, ['unsigned long']],
'IoNotificationEvent' : [ 0x1f18, ['_KEVENT']],
'ServerSilo' : [ 0x1f30, ['pointer64', ['_EJOB']]],
'CreateTime' : [ 0x1f38, ['unsigned long long']],
} ],
'_MMPAGE_FILE_EXPANSION' : [ 0x60, {
'Segment' : [ 0x0, ['pointer64', ['_SEGMENT']]],
'DereferenceList' : [ 0x8, ['_LIST_ENTRY']],
'Partition' : [ 0x18, ['pointer64', ['_MI_PARTITION']]],
'RequestedExpansionSize' : [ 0x20, ['unsigned long long']],
'ActualExpansion' : [ 0x28, ['unsigned long long']],
'Event' : [ 0x30, ['_KEVENT']],
'InProgress' : [ 0x48, ['long']],
'u' : [ 0x4c, ['_MMPAGE_FILE_EXPANSION_FLAGS']],
'ActiveEntry' : [ 0x50, ['pointer64', ['pointer64', ['void']]]],
'AttemptForCantExtend' : [ 0x58, ['unsigned char']],
'PageFileContract' : [ 0x59, ['unsigned char']],
} ],
'_WHEA_XPF_MC_BANK_DESCRIPTOR' : [ 0x1c, {
'BankNumber' : [ 0x0, ['unsigned char']],
'ClearOnInitialization' : [ 0x1, ['unsigned char']],
'StatusDataFormat' : [ 0x2, ['unsigned char']],
'Flags' : [ 0x3, ['_XPF_MC_BANK_FLAGS']],
'ControlMsr' : [ 0x4, ['unsigned long']],
'StatusMsr' : [ 0x8, ['unsigned long']],
'AddressMsr' : [ 0xc, ['unsigned long']],
'MiscMsr' : [ 0x10, ['unsigned long']],
'ControlData' : [ 0x14, ['unsigned long long']],
} ],
'__unnamed_25ff' : [ 0x4, {
'LongFlags2' : [ 0x0, ['unsigned long']],
'VadFlags2' : [ 0x0, ['_MMVAD_FLAGS2']],
} ],
'__unnamed_2603' : [ 0x8, {
'SequentialVa' : [ 0x0, ['_MI_VAD_SEQUENTIAL_INFO']],
'ExtendedInfo' : [ 0x0, ['pointer64', ['_MMEXTEND_INFO']]],
} ],
'_MMVAD' : [ 0x88, {
'Core' : [ 0x0, ['_MMVAD_SHORT']],
'u2' : [ 0x40, ['__unnamed_25ff']],
'Subsection' : [ 0x48, ['pointer64', ['_SUBSECTION']]],
'FirstPrototypePte' : [ 0x50, ['pointer64', ['_MMPTE']]],
'LastContiguousPte' : [ 0x58, ['pointer64', ['_MMPTE']]],
'ViewLinks' : [ 0x60, ['_LIST_ENTRY']],
'VadsProcess' : [ 0x70, ['pointer64', ['_EPROCESS']]],
'u4' : [ 0x78, ['__unnamed_2603']],
'FileObject' : [ 0x80, ['pointer64', ['_FILE_OBJECT']]],
} ],
'_SEP_SID_VALUES_BLOCK' : [ 0x20, {
'BlockLength' : [ 0x0, ['unsigned long']],
'ReferenceCount' : [ 0x8, ['long long']],
'SidCount' : [ 0x10, ['unsigned long']],
'SidValuesStart' : [ 0x18, ['unsigned long long']],
} ],
'_MI_PARTITION_STATE' : [ 0x60, {
'PartitionLock' : [ 0x0, ['unsigned long long']],
'PartitionIdLock' : [ 0x8, ['_EX_PUSH_LOCK']],
'InitialPartitionIdBits' : [ 0x10, ['unsigned long long']],
'PartitionList' : [ 0x18, ['_LIST_ENTRY']],
'PartitionIdBitmap' : [ 0x28, ['pointer64', ['_RTL_BITMAP']]],
'InitialPartitionIdBitmap' : [ 0x30, ['_RTL_BITMAP']],
'TempPartitionPointers' : [ 0x40, ['array', 1, ['pointer64', ['_MI_PARTITION']]]],
'Partition' : [ 0x48, ['pointer64', ['pointer64', ['_MI_PARTITION']]]],
'TotalPagesInChildPartitions' : [ 0x50, ['unsigned long long']],
'CrossPartitionDenials' : [ 0x58, ['unsigned long']],
} ],
'_MMMOD_WRITER_LISTHEAD' : [ 0x28, {
'ListHead' : [ 0x0, ['_LIST_ENTRY']],
'Gate' : [ 0x10, ['_KGATE']],
'Event' : [ 0x10, ['_KEVENT']],
} ],
'_CM_RM' : [ 0x88, {
'RmListEntry' : [ 0x0, ['_LIST_ENTRY']],
'TransactionListHead' : [ 0x10, ['_LIST_ENTRY']],
'TmHandle' : [ 0x20, ['pointer64', ['void']]],
'Tm' : [ 0x28, ['pointer64', ['void']]],
'RmHandle' : [ 0x30, ['pointer64', ['void']]],
'KtmRm' : [ 0x38, ['pointer64', ['void']]],
'RefCount' : [ 0x40, ['unsigned long']],
'ContainerNum' : [ 0x44, ['unsigned long']],
'ContainerSize' : [ 0x48, ['unsigned long long']],
'CmHive' : [ 0x50, ['pointer64', ['_CMHIVE']]],
'LogFileObject' : [ 0x58, ['pointer64', ['void']]],
'MarshallingContext' : [ 0x60, ['pointer64', ['void']]],
'RmFlags' : [ 0x68, ['unsigned long']],
'LogStartStatus1' : [ 0x6c, ['long']],
'LogStartStatus2' : [ 0x70, ['long']],
'BaseLsn' : [ 0x78, ['unsigned long long']],
'RmLock' : [ 0x80, ['pointer64', ['_ERESOURCE']]],
} ],
'_NONOPAQUE_OPLOCK' : [ 0xa0, {
'IrpExclusiveOplock' : [ 0x0, ['pointer64', ['_IRP']]],
'FileObject' : [ 0x8, ['pointer64', ['_FILE_OBJECT']]],
'ExclusiveOplockOwner' : [ 0x10, ['pointer64', ['_EPROCESS']]],
'ExclusiveOplockOwnerThread' : [ 0x18, ['pointer64', ['_ETHREAD']]],
'WaiterPriority' : [ 0x20, ['unsigned char']],
'IrpOplocksR' : [ 0x28, ['_LIST_ENTRY']],
'IrpOplocksRH' : [ 0x38, ['_LIST_ENTRY']],
'RHBreakQueue' : [ 0x48, ['_LIST_ENTRY']],
'WaitingIrps' : [ 0x58, ['_LIST_ENTRY']],
'DelayAckFileObjectQueue' : [ 0x68, ['_LIST_ENTRY']],
'AtomicQueue' : [ 0x78, ['_LIST_ENTRY']],
'DeleterParentKey' : [ 0x88, ['pointer64', ['_GUID']]],
'OplockState' : [ 0x90, ['unsigned long']],
'FastMutex' : [ 0x98, ['pointer64', ['_FAST_MUTEX']]],
} ],
'_MI_LARGEPAGE_MEMORY_INFO' : [ 0x28, {
'ListHead' : [ 0x0, ['_LIST_ENTRY']],
'ColoredPageInfoBase' : [ 0x10, ['pointer64', ['_COLORED_PAGE_INFO']]],
'PagesNeedZeroing' : [ 0x18, ['unsigned long']],
'LargeImageBias' : [ 0x1c, ['unsigned char']],
'Spare' : [ 0x1d, ['array', 3, ['unsigned char']]],
'ActualImageViewSize' : [ 0x20, ['unsigned long long']],
} ],
'_OBJECT_HANDLE_COUNT_ENTRY' : [ 0x10, {
'Process' : [ 0x0, ['pointer64', ['_EPROCESS']]],
'HandleCount' : [ 0x8, ['BitField', dict(start_bit = 0, end_bit = 24, native_type='unsigned long')]],
'LockCount' : [ 0x8, ['BitField', dict(start_bit = 24, end_bit = 32, native_type='unsigned long')]],
} ],
'_PROCESS_ENERGY_VALUES' : [ 0x90, {
'Cycles' : [ 0x0, ['array', 4, ['array', 2, ['unsigned long long']]]],
'DiskEnergy' : [ 0x40, ['unsigned long long']],
'NetworkTailEnergy' : [ 0x48, ['unsigned long long']],
'MBBTailEnergy' : [ 0x50, ['unsigned long long']],
'NetworkTxRxBytes' : [ 0x58, ['unsigned long long']],
'MBBTxRxBytes' : [ 0x60, ['unsigned long long']],
'Foreground' : [ 0x68, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'WindowInformation' : [ 0x68, ['unsigned long']],
'PixelArea' : [ 0x6c, ['unsigned long']],
'PixelReportTimestamp' : [ 0x70, ['long long']],
'PixelTime' : [ 0x78, ['unsigned long long']],
'ForegroundReportTimestamp' : [ 0x80, ['long long']],
'ForegroundTime' : [ 0x88, ['unsigned long long']],
} ],
'_CLIENT_ID' : [ 0x10, {
'UniqueProcess' : [ 0x0, ['pointer64', ['void']]],
'UniqueThread' : [ 0x8, ['pointer64', ['void']]],
} ],
'_WHEA_MEMORY_ERROR_SECTION' : [ 0x49, {
'ValidBits' : [ 0x0, ['_WHEA_MEMORY_ERROR_SECTION_VALIDBITS']],
'ErrorStatus' : [ 0x8, ['_WHEA_ERROR_STATUS']],
'PhysicalAddress' : [ 0x10, ['unsigned long long']],
'PhysicalAddressMask' : [ 0x18, ['unsigned long long']],
'Node' : [ 0x20, ['unsigned short']],
'Card' : [ 0x22, ['unsigned short']],
'Module' : [ 0x24, ['unsigned short']],
'Bank' : [ 0x26, ['unsigned short']],
'Device' : [ 0x28, ['unsigned short']],
'Row' : [ 0x2a, ['unsigned short']],
'Column' : [ 0x2c, ['unsigned short']],
'BitPosition' : [ 0x2e, ['unsigned short']],
'RequesterId' : [ 0x30, ['unsigned long long']],
'ResponderId' : [ 0x38, ['unsigned long long']],
'TargetId' : [ 0x40, ['unsigned long long']],
'ErrorType' : [ 0x48, ['unsigned char']],
} ],
'_MI_COMMON_PAGE_STATE' : [ 0x98, {
'PageOfOnesPfn' : [ 0x0, ['pointer64', ['_MMPFN']]],
'PageOfOnes' : [ 0x8, ['unsigned long long']],
'DummyPagePfn' : [ 0x10, ['pointer64', ['_MMPFN']]],
'DummyPage' : [ 0x18, ['unsigned long long']],
'PageOfZeroes' : [ 0x20, ['unsigned long long']],
'ZeroMapping' : [ 0x28, ['pointer64', ['void']]],
'OnesMapping' : [ 0x30, ['pointer64', ['void']]],
'BitmapGapFrames' : [ 0x38, ['array', 4, ['unsigned long long']]],
'PfnGapFrames' : [ 0x58, ['array', 4, ['unsigned long long']]],
'PageTableOfZeroes' : [ 0x78, ['unsigned long long']],
'PdeOfZeroes' : [ 0x80, ['_MMPTE']],
'PageTableOfOnes' : [ 0x88, ['unsigned long long']],
'PdeOfOnes' : [ 0x90, ['_MMPTE']],
} ],
'_KWAIT_STATUS_REGISTER' : [ 0x1, {
'Flags' : [ 0x0, ['unsigned char']],
'State' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 3, native_type='unsigned char')]],
'Affinity' : [ 0x0, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned char')]],
'Priority' : [ 0x0, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned char')]],
'Apc' : [ 0x0, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned char')]],
'UserApc' : [ 0x0, ['BitField', dict(start_bit = 6, end_bit = 7, native_type='unsigned char')]],
'Alert' : [ 0x0, ['BitField', dict(start_bit = 7, end_bit = 8, native_type='unsigned char')]],
} ],
'_VI_DEADLOCK_RESOURCE' : [ 0xf8, {
'Type' : [ 0x0, ['Enumeration', dict(target = 'long', choices = {0: 'VfDeadlockUnknown', 1: 'VfDeadlockMutex', 2: 'VfDeadlockMutexAbandoned', 3: 'VfDeadlockFastMutex', 4: 'VfDeadlockFastMutexUnsafe', 5: 'VfDeadlockSpinLock', 6: 'VfDeadlockInStackQueuedSpinLock', 7: 'VfDeadlockUnusedSpinLock', 8: 'VfDeadlockEresource', 9: 'VfDeadlockTypeMaximum'})]],
'NodeCount' : [ 0x4, ['BitField', dict(start_bit = 0, end_bit = 16, native_type='unsigned long')]],
'RecursionCount' : [ 0x4, ['BitField', dict(start_bit = 16, end_bit = 32, native_type='unsigned long')]],
'ResourceAddress' : [ 0x8, ['pointer64', ['void']]],
'ThreadOwner' : [ 0x10, ['pointer64', ['_VI_DEADLOCK_THREAD']]],
'ResourceList' : [ 0x18, ['_LIST_ENTRY']],
'HashChainList' : [ 0x28, ['_LIST_ENTRY']],
'FreeListEntry' : [ 0x28, ['_LIST_ENTRY']],
'StackTrace' : [ 0x38, ['array', 8, ['pointer64', ['void']]]],
'LastAcquireTrace' : [ 0x78, ['array', 8, ['pointer64', ['void']]]],
'LastReleaseTrace' : [ 0xb8, ['array', 8, ['pointer64', ['void']]]],
} ],
'_MMPFNLIST_SHORT' : [ 0x18, {
'Total' : [ 0x0, ['unsigned long long']],
'Flink' : [ 0x8, ['unsigned long long']],
'Blink' : [ 0x10, ['unsigned long long']],
} ],
'_DBGKD_GET_SET_BUS_DATA' : [ 0x14, {
'BusDataType' : [ 0x0, ['unsigned long']],
'BusNumber' : [ 0x4, ['unsigned long']],
'SlotNumber' : [ 0x8, ['unsigned long']],
'Offset' : [ 0xc, ['unsigned long']],
'Length' : [ 0x10, ['unsigned long']],
} ],
'_MMSECTION_FLAGS' : [ 0x4, {
'BeingDeleted' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'BeingCreated' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'BeingPurged' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long')]],
'NoModifiedWriting' : [ 0x0, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned long')]],
'FailAllIo' : [ 0x0, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned long')]],
'Image' : [ 0x0, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned long')]],
'Based' : [ 0x0, ['BitField', dict(start_bit = 6, end_bit = 7, native_type='unsigned long')]],
'File' : [ 0x0, ['BitField', dict(start_bit = 7, end_bit = 8, native_type='unsigned long')]],
'AttemptingDelete' : [ 0x0, ['BitField', dict(start_bit = 8, end_bit = 9, native_type='unsigned long')]],
'PrefetchCreated' : [ 0x0, ['BitField', dict(start_bit = 9, end_bit = 10, native_type='unsigned long')]],
'PhysicalMemory' : [ 0x0, ['BitField', dict(start_bit = 10, end_bit = 11, native_type='unsigned long')]],
'CopyOnWrite' : [ 0x0, ['BitField', dict(start_bit = 11, end_bit = 12, native_type='unsigned long')]],
'Reserve' : [ 0x0, ['BitField', dict(start_bit = 12, end_bit = 13, native_type='unsigned long')]],
'Commit' : [ 0x0, ['BitField', dict(start_bit = 13, end_bit = 14, native_type='unsigned long')]],
'NoChange' : [ 0x0, ['BitField', dict(start_bit = 14, end_bit = 15, native_type='unsigned long')]],
'WasPurged' : [ 0x0, ['BitField', dict(start_bit = 15, end_bit = 16, native_type='unsigned long')]],
'UserReference' : [ 0x0, ['BitField', dict(start_bit = 16, end_bit = 17, native_type='unsigned long')]],
'GlobalMemory' : [ 0x0, ['BitField', dict(start_bit = 17, end_bit = 18, native_type='unsigned long')]],
'DeleteOnClose' : [ 0x0, ['BitField', dict(start_bit = 18, end_bit = 19, native_type='unsigned long')]],
'FilePointerNull' : [ 0x0, ['BitField', dict(start_bit = 19, end_bit = 20, native_type='unsigned long')]],
'PreferredNode' : [ 0x0, ['BitField', dict(start_bit = 20, end_bit = 26, native_type='unsigned long')]],
'GlobalOnlyPerSession' : [ 0x0, ['BitField', dict(start_bit = 26, end_bit = 27, native_type='unsigned long')]],
'UserWritable' : [ 0x0, ['BitField', dict(start_bit = 27, end_bit = 28, native_type='unsigned long')]],
'SystemVaAllocated' : [ 0x0, ['BitField', dict(start_bit = 28, end_bit = 29, native_type='unsigned long')]],
'PreferredFsCompressionBoundary' : [ 0x0, ['BitField', dict(start_bit = 29, end_bit = 30, native_type='unsigned long')]],
'UsingFileExtents' : [ 0x0, ['BitField', dict(start_bit = 30, end_bit = 31, native_type='unsigned long')]],
'Spare' : [ 0x0, ['BitField', dict(start_bit = 31, end_bit = 32, native_type='unsigned long')]],
} ],
'_SECURITY_CLIENT_CONTEXT' : [ 0x48, {
'SecurityQos' : [ 0x0, ['_SECURITY_QUALITY_OF_SERVICE']],
'ClientToken' : [ 0x10, ['pointer64', ['void']]],
'DirectlyAccessClientToken' : [ 0x18, ['unsigned char']],
'DirectAccessEffectiveOnly' : [ 0x19, ['unsigned char']],
'ServerIsRemote' : [ 0x1a, ['unsigned char']],
'ClientTokenControl' : [ 0x1c, ['_TOKEN_CONTROL']],
} ],
'_MI_VAD_ALLOCATION_CELL' : [ 0x28, {
'AllocationBitMap' : [ 0x0, ['_RTL_BITMAP']],
'BitMapHint' : [ 0x10, ['unsigned long']],
'LastAllocationSize' : [ 0x14, ['unsigned long']],
'LastAllocationSizeHint' : [ 0x18, ['unsigned long']],
'LowestBottomUpVadBit' : [ 0x1c, ['unsigned long']],
'LowestBottomUpAllocationAddress' : [ 0x20, ['pointer64', ['void']]],
} ],
'_MI_REVERSE_VIEW_MAP' : [ 0x28, {
'ViewLinks' : [ 0x0, ['_LIST_ENTRY']],
'SystemCacheVa' : [ 0x10, ['pointer64', ['void']]],
'SessionViewVa' : [ 0x10, ['pointer64', ['void']]],
'VadsProcess' : [ 0x10, ['pointer64', ['_EPROCESS']]],
'Type' : [ 0x10, ['BitField', dict(start_bit = 0, end_bit = 2, native_type='unsigned long long')]],
'Subsection' : [ 0x18, ['pointer64', ['_SUBSECTION']]],
'SubsectionType' : [ 0x18, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long long')]],
'SectionOffset' : [ 0x20, ['unsigned long long']],
} ],
'_IO_SECURITY_CONTEXT' : [ 0x18, {
'SecurityQos' : [ 0x0, ['pointer64', ['_SECURITY_QUALITY_OF_SERVICE']]],
'AccessState' : [ 0x8, ['pointer64', ['_ACCESS_STATE']]],
'DesiredAccess' : [ 0x10, ['unsigned long']],
'FullCreateOptions' : [ 0x14, ['unsigned long']],
} ],
'__unnamed_2647' : [ 0x38, {
'Mdl' : [ 0x0, ['_MDL']],
'Page' : [ 0x30, ['array', 1, ['unsigned long long']]],
} ],
'_MI_PAGEFILE_TRACES' : [ 0x78, {
'Status' : [ 0x0, ['long']],
'PartitionId' : [ 0x4, ['unsigned short']],
'Priority' : [ 0x6, ['unsigned char']],
'IrpPriority' : [ 0x7, ['unsigned char']],
'ReservationWrite' : [ 0x8, ['unsigned char']],
'CurrentTime' : [ 0x10, ['_LARGE_INTEGER']],
'AvailablePages' : [ 0x18, ['unsigned long long']],
'ModifiedPagesTotal' : [ 0x20, ['unsigned long long']],
'ModifiedPagefilePages' : [ 0x28, ['unsigned long long']],
'ModifiedNoWritePages' : [ 0x30, ['unsigned long long']],
'ModifiedPagefileNoReservationPages' : [ 0x38, ['unsigned long long']],
'MdlHack' : [ 0x40, ['__unnamed_2647']],
} ],
'_PROC_PERF_DOMAIN' : [ 0x190, {
'Link' : [ 0x0, ['_LIST_ENTRY']],
'Master' : [ 0x10, ['pointer64', ['_KPRCB']]],
'Members' : [ 0x18, ['_KAFFINITY_EX']],
'ProcessorCount' : [ 0xc0, ['unsigned long']],
'Class' : [ 0xc4, ['unsigned char']],
'Spare' : [ 0xc5, ['array', 3, ['unsigned char']]],
'Processors' : [ 0xc8, ['pointer64', ['_PROC_PERF_CONSTRAINT']]],
'GetFFHThrottleState' : [ 0xd0, ['pointer64', ['void']]],
'TimeWindowHandler' : [ 0xd8, ['pointer64', ['void']]],
'BoostPolicyHandler' : [ 0xe0, ['pointer64', ['void']]],
'BoostModeHandler' : [ 0xe8, ['pointer64', ['void']]],
'EnergyPerfPreferenceHandler' : [ 0xf0, ['pointer64', ['void']]],
'AutonomousActivityWindowHandler' : [ 0xf8, ['pointer64', ['void']]],
'AutonomousModeHandler' : [ 0x100, ['pointer64', ['void']]],
'ReinitializeHandler' : [ 0x108, ['pointer64', ['void']]],
'PerfSelectionHandler' : [ 0x110, ['pointer64', ['void']]],
'PerfControlHandler' : [ 0x118, ['pointer64', ['void']]],
'MaxFrequency' : [ 0x120, ['unsigned long']],
'NominalFrequency' : [ 0x124, ['unsigned long']],
'MaxPercent' : [ 0x128, ['unsigned long']],
'MinPerfPercent' : [ 0x12c, ['unsigned long']],
'MinThrottlePercent' : [ 0x130, ['unsigned long']],
'MinimumRelativePerformance' : [ 0x138, ['unsigned long long']],
'NominalRelativePerformance' : [ 0x140, ['unsigned long long']],
'Coordination' : [ 0x148, ['unsigned char']],
'HardPlatformCap' : [ 0x149, ['unsigned char']],
'AffinitizeControl' : [ 0x14a, ['unsigned char']],
'EfficientThrottle' : [ 0x14b, ['unsigned char']],
'AutonomousMode' : [ 0x14c, ['unsigned char']],
'SelectedPercent' : [ 0x150, ['unsigned long']],
'SelectedFrequency' : [ 0x154, ['unsigned long']],
'DesiredPercent' : [ 0x158, ['unsigned long']],
'MaxPolicyPercent' : [ 0x15c, ['unsigned long']],
'MinPolicyPercent' : [ 0x160, ['unsigned long']],
'ConstrainedMaxPercent' : [ 0x164, ['unsigned long']],
'ConstrainedMinPercent' : [ 0x168, ['unsigned long']],
'GuaranteedPercent' : [ 0x16c, ['unsigned long']],
'TolerancePercent' : [ 0x170, ['unsigned long']],
'SelectedState' : [ 0x178, ['unsigned long long']],
'PerfChangeTime' : [ 0x180, ['unsigned long long']],
'PerfChangeIntervalCount' : [ 0x188, ['unsigned long']],
'Force' : [ 0x18c, ['unsigned char']],
'ProvideGuidance' : [ 0x18d, ['unsigned char']],
} ],
'_X86_DBGKD_CONTROL_SET' : [ 0x10, {
'TraceFlag' : [ 0x0, ['unsigned long']],
'Dr7' : [ 0x4, ['unsigned long']],
'CurrentSymbolStart' : [ 0x8, ['unsigned long']],
'CurrentSymbolEnd' : [ 0xc, ['unsigned long']],
} ],
'_HVIEW_MAP_TABLE' : [ 0x800, {
'Entries' : [ 0x0, ['array', 64, ['_HVIEW_MAP_ENTRY']]],
} ],
'_HANDLE_TRACE_DB_ENTRY' : [ 0xa0, {
'ClientId' : [ 0x0, ['_CLIENT_ID']],
'Handle' : [ 0x10, ['pointer64', ['void']]],
'Type' : [ 0x18, ['unsigned long']],
'StackTrace' : [ 0x20, ['array', 16, ['pointer64', ['void']]]],
} ],
'_WHEA_IPF_CPE_DESCRIPTOR' : [ 0x4, {
'Type' : [ 0x0, ['unsigned short']],
'Enabled' : [ 0x2, ['unsigned char']],
'Reserved' : [ 0x3, ['unsigned char']],
} ],
'_DUMMY_FILE_OBJECT' : [ 0x110, {
'ObjectHeader' : [ 0x0, ['_OBJECT_HEADER']],
'FileObjectBody' : [ 0x38, ['array', 216, ['unsigned char']]],
} ],
'_TRIAGE_9F_PNP' : [ 0x18, {
'Signature' : [ 0x0, ['unsigned short']],
'Revision' : [ 0x2, ['unsigned short']],
'CompletionQueue' : [ 0x8, ['pointer64', ['_TRIAGE_PNP_DEVICE_COMPLETION_QUEUE']]],
'DelayedWorkQueue' : [ 0x10, ['pointer64', ['_TRIAGE_EX_WORK_QUEUE']]],
} ],
'_RELATION_LIST' : [ 0x10, {
'DeviceObjectList' : [ 0x0, ['pointer64', ['_DEVICE_OBJECT_LIST']]],
'Sorted' : [ 0x8, ['unsigned char']],
} ],
'_IO_TIMER' : [ 0x30, {
'Type' : [ 0x0, ['short']],
'TimerFlag' : [ 0x2, ['short']],
'TimerList' : [ 0x8, ['_LIST_ENTRY']],
'TimerRoutine' : [ 0x18, ['pointer64', ['void']]],
'Context' : [ 0x20, ['pointer64', ['void']]],
'DeviceObject' : [ 0x28, ['pointer64', ['_DEVICE_OBJECT']]],
} ],
'_MI_STANDBY_STATE' : [ 0xc0, {
'TransitionSharedPages' : [ 0x0, ['unsigned long long']],
'TransitionSharedPagesPeak' : [ 0x8, ['array', 3, ['unsigned long long']]],
'FirstDecayPage' : [ 0x20, ['unsigned long long']],
'PfnDecayFreeSList' : [ 0x30, ['_SLIST_HEADER']],
'PfnRepurposeLog' : [ 0x40, ['pointer64', ['_MM_PAGE_ACCESS_INFO_HEADER']]],
'AllocatePfnRepurposeDpc' : [ 0x48, ['_KDPC']],
} ],
'_MI_ACCESS_LOG_STATE' : [ 0x80, {
'CcAccessLog' : [ 0x0, ['pointer64', ['_MM_PAGE_ACCESS_INFO_HEADER']]],
'Enabled' : [ 0x8, ['unsigned long']],
'DisableAccessLogging' : [ 0x10, ['_WORK_QUEUE_ITEM']],
'MinLoggingPriority' : [ 0x30, ['unsigned long']],
'AccessLoggingLock' : [ 0x40, ['unsigned long long']],
} ],
'_ETW_BUFFER_QUEUE' : [ 0x18, {
'QueueHead' : [ 0x0, ['pointer64', ['_SINGLE_LIST_ENTRY']]],
'QueueTail' : [ 0x8, ['pointer64', ['_SINGLE_LIST_ENTRY']]],
'QueueEntry' : [ 0x10, ['_SINGLE_LIST_ENTRY']],
} ],
'_ARBITER_TEST_ALLOCATION_PARAMETERS' : [ 0x18, {
'ArbitrationList' : [ 0x0, ['pointer64', ['_LIST_ENTRY']]],
'AllocateFromCount' : [ 0x8, ['unsigned long']],
'AllocateFrom' : [ 0x10, ['pointer64', ['_CM_PARTIAL_RESOURCE_DESCRIPTOR']]],
} ],
'_MI_SPECIAL_POOL' : [ 0x50, {
'Lock' : [ 0x0, ['unsigned long long']],
'Paged' : [ 0x8, ['_MI_PTE_CHAIN_HEAD']],
'NonPaged' : [ 0x20, ['_MI_PTE_CHAIN_HEAD']],
'PagesInUse' : [ 0x38, ['unsigned long long']],
'SpecialPoolPdes' : [ 0x40, ['_RTL_BITMAP']],
} ],
'_LOGGED_STREAM_CALLBACK_V2' : [ 0x8, {
'LogHandleContext' : [ 0x0, ['pointer64', ['_LOG_HANDLE_CONTEXT']]],
} ],
'_ARBITER_QUERY_CONFLICT_PARAMETERS' : [ 0x20, {
'PhysicalDeviceObject' : [ 0x0, ['pointer64', ['_DEVICE_OBJECT']]],
'ConflictingResource' : [ 0x8, ['pointer64', ['_IO_RESOURCE_DESCRIPTOR']]],
'ConflictCount' : [ 0x10, ['pointer64', ['unsigned long']]],
'Conflicts' : [ 0x18, ['pointer64', ['pointer64', ['_ARBITER_CONFLICT_INFO']]]],
} ],
'_POP_CURRENT_BROADCAST' : [ 0x18, {
'InProgress' : [ 0x0, ['unsigned char']],
'SystemContext' : [ 0x4, ['_SYSTEM_POWER_STATE_CONTEXT']],
'PowerAction' : [ 0x8, ['Enumeration', dict(target = 'long', choices = {0: 'PowerActionNone', 1: 'PowerActionReserved', 2: 'PowerActionSleep', 3: 'PowerActionHibernate', 4: 'PowerActionShutdown', 5: 'PowerActionShutdownReset', 6: 'PowerActionShutdownOff', 7: 'PowerActionWarmEject', 8: 'PowerActionDisplayOff'})]],
'DeviceState' : [ 0x10, ['pointer64', ['_POP_DEVICE_SYS_STATE']]],
} ],
'PEPHANDLE__' : [ 0x4, {
'unused' : [ 0x0, ['long']],
} ],
'__unnamed_2691' : [ 0x4, {
'BaseMiddle' : [ 0x0, ['unsigned char']],
'Flags1' : [ 0x1, ['unsigned char']],
'Flags2' : [ 0x2, ['unsigned char']],
'BaseHigh' : [ 0x3, ['unsigned char']],
} ],
'__unnamed_2695' : [ 0x4, {
'BaseMiddle' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 8, native_type='unsigned long')]],
'Type' : [ 0x0, ['BitField', dict(start_bit = 8, end_bit = 13, native_type='unsigned long')]],
'Dpl' : [ 0x0, ['BitField', dict(start_bit = 13, end_bit = 15, native_type='unsigned long')]],
'Present' : [ 0x0, ['BitField', dict(start_bit = 15, end_bit = 16, native_type='unsigned long')]],
'LimitHigh' : [ 0x0, ['BitField', dict(start_bit = 16, end_bit = 20, native_type='unsigned long')]],
'System' : [ 0x0, ['BitField', dict(start_bit = 20, end_bit = 21, native_type='unsigned long')]],
'LongMode' : [ 0x0, ['BitField', dict(start_bit = 21, end_bit = 22, native_type='unsigned long')]],
'DefaultBig' : [ 0x0, ['BitField', dict(start_bit = 22, end_bit = 23, native_type='unsigned long')]],
'Granularity' : [ 0x0, ['BitField', dict(start_bit = 23, end_bit = 24, native_type='unsigned long')]],
'BaseHigh' : [ 0x0, ['BitField', dict(start_bit = 24, end_bit = 32, native_type='unsigned long')]],
} ],
'_KGDTENTRY64' : [ 0x10, {
'LimitLow' : [ 0x0, ['unsigned short']],
'BaseLow' : [ 0x2, ['unsigned short']],
'Bytes' : [ 0x4, ['__unnamed_2691']],
'Bits' : [ 0x4, ['__unnamed_2695']],
'BaseUpper' : [ 0x8, ['unsigned long']],
'MustBeZero' : [ 0xc, ['unsigned long']],
'DataLow' : [ 0x0, ['long long']],
'DataHigh' : [ 0x8, ['long long']],
} ],
'_PNP_DEVICE_EVENT_LIST' : [ 0x88, {
'Status' : [ 0x0, ['long']],
'EventQueueMutex' : [ 0x8, ['_KMUTANT']],
'Lock' : [ 0x40, ['_FAST_MUTEX']],
'List' : [ 0x78, ['_LIST_ENTRY']],
} ],
'_IOV_IRP_TRACE' : [ 0x80, {
'Irp' : [ 0x0, ['pointer64', ['_IRP']]],
'Thread' : [ 0x8, ['pointer64', ['_KTHREAD']]],
'KernelApcDisable' : [ 0x10, ['short']],
'SpecialApcDisable' : [ 0x12, ['short']],
'CombinedApcDisable' : [ 0x10, ['unsigned long']],
'Irql' : [ 0x14, ['unsigned char']],
'StackTrace' : [ 0x18, ['array', 13, ['pointer64', ['void']]]],
} ],
'_MAILSLOT_CREATE_PARAMETERS' : [ 0x18, {
'MailslotQuota' : [ 0x0, ['unsigned long']],
'MaximumMessageSize' : [ 0x4, ['unsigned long']],
'ReadTimeout' : [ 0x8, ['_LARGE_INTEGER']],
'TimeoutSpecified' : [ 0x10, ['unsigned char']],
} ],
'_PO_IRP_MANAGER' : [ 0x20, {
'DeviceIrpQueue' : [ 0x0, ['_PO_IRP_QUEUE']],
'SystemIrpQueue' : [ 0x10, ['_PO_IRP_QUEUE']],
} ],
'_DYNAMIC_FUNCTION_TABLE' : [ 0x58, {
'ListEntry' : [ 0x0, ['_LIST_ENTRY']],
'FunctionTable' : [ 0x10, ['pointer64', ['_IMAGE_RUNTIME_FUNCTION_ENTRY']]],
'TimeStamp' : [ 0x18, ['_LARGE_INTEGER']],
'MinimumAddress' : [ 0x20, ['unsigned long long']],
'MaximumAddress' : [ 0x28, ['unsigned long long']],
'BaseAddress' : [ 0x30, ['unsigned long long']],
'Callback' : [ 0x38, ['pointer64', ['void']]],
'Context' : [ 0x40, ['pointer64', ['void']]],
'OutOfProcessCallbackDll' : [ 0x48, ['pointer64', ['unsigned short']]],
'Type' : [ 0x50, ['Enumeration', dict(target = 'long', choices = {0: 'RF_SORTED', 1: 'RF_UNSORTED', 2: 'RF_CALLBACK', 3: 'RF_KERNEL_DYNAMIC'})]],
'EntryCount' : [ 0x54, ['unsigned long']],
} ],
'_SEP_LOWBOX_HANDLES_TABLE' : [ 0x10, {
'Lock' : [ 0x0, ['_EX_PUSH_LOCK']],
'HashTable' : [ 0x8, ['pointer64', ['_RTL_DYNAMIC_HASH_TABLE']]],
} ],
'_PPM_FFH_THROTTLE_STATE_INFO' : [ 0x20, {
'EnableLogging' : [ 0x0, ['unsigned char']],
'MismatchCount' : [ 0x4, ['unsigned long']],
'Initialized' : [ 0x8, ['unsigned char']],
'LastValue' : [ 0x10, ['unsigned long long']],
'LastLogTickCount' : [ 0x18, ['_LARGE_INTEGER']],
} ],
'_PROC_IDLE_POLICY' : [ 0x6, {
'PromotePercent' : [ 0x0, ['unsigned char']],
'DemotePercent' : [ 0x1, ['unsigned char']],
'PromotePercentBase' : [ 0x2, ['unsigned char']],
'DemotePercentBase' : [ 0x3, ['unsigned char']],
'AllowScaling' : [ 0x4, ['unsigned char']],
'ForceLightIdle' : [ 0x5, ['unsigned char']],
} ],
'_CLIENT_ID64' : [ 0x10, {
'UniqueProcess' : [ 0x0, ['unsigned long long']],
'UniqueThread' : [ 0x8, ['unsigned long long']],
} ],
'__unnamed_26b7' : [ 0x4, {
'PercentLevel' : [ 0x0, ['unsigned long']],
} ],
'__unnamed_26b9' : [ 0x4, {
'Type' : [ 0x0, ['unsigned long']],
} ],
'_POP_ACTION_TRIGGER' : [ 0x18, {
'Type' : [ 0x0, ['Enumeration', dict(target = 'long', choices = {0: 'PolicyDeviceSystemButton', 1: 'PolicyDeviceThermalZone', 2: 'PolicyDeviceBattery', 3: 'PolicyDeviceMemory', 4: 'PolicyInitiatePowerActionAPI', 5: 'PolicySetPowerStateAPI', 6: 'PolicyImmediateDozeS4', 7: 'PolicySystemIdle', 8: 'PolicyDeviceWakeAlarm', 9: 'PolicyDeviceFan', 10: 'PolicyCsBatterySaver', 11: 'PolicyImmediateDozeS4Predicted', 12: 'PolicyImmediateDozeS4PredictedNoWake', 13: 'PolicyDeviceMax'})]],
'Flags' : [ 0x4, ['unsigned long']],
'Wait' : [ 0x8, ['pointer64', ['_POP_TRIGGER_WAIT']]],
'Battery' : [ 0x10, ['__unnamed_26b7']],
'Button' : [ 0x10, ['__unnamed_26b9']],
} ],
'_KDPC_DATA' : [ 0x28, {
'DpcList' : [ 0x0, ['_KDPC_LIST']],
'DpcLock' : [ 0x10, ['unsigned long long']],
'DpcQueueDepth' : [ 0x18, ['long']],
'DpcCount' : [ 0x1c, ['unsigned long']],
'ActiveDpc' : [ 0x20, ['pointer64', ['_KDPC']]],
} ],
'_NAMED_PIPE_CREATE_PARAMETERS' : [ 0x28, {
'NamedPipeType' : [ 0x0, ['unsigned long']],
'ReadMode' : [ 0x4, ['unsigned long']],
'CompletionMode' : [ 0x8, ['unsigned long']],
'MaximumInstances' : [ 0xc, ['unsigned long']],
'InboundQuota' : [ 0x10, ['unsigned long']],
'OutboundQuota' : [ 0x14, ['unsigned long']],
'DefaultTimeout' : [ 0x18, ['_LARGE_INTEGER']],
'TimeoutSpecified' : [ 0x20, ['unsigned char']],
} ],
'_CM_BIG_DATA' : [ 0x8, {
'Signature' : [ 0x0, ['unsigned short']],
'Count' : [ 0x2, ['unsigned short']],
'List' : [ 0x4, ['unsigned long']],
} ],
'_KSCB' : [ 0x198, {
'GenerationCycles' : [ 0x0, ['unsigned long long']],
'MinQuotaCycleTarget' : [ 0x8, ['unsigned long long']],
'MaxQuotaCycleTarget' : [ 0x10, ['unsigned long long']],
'RankCycleTarget' : [ 0x18, ['unsigned long long']],
'LongTermCycles' : [ 0x20, ['unsigned long long']],
'LastReportedCycles' : [ 0x28, ['unsigned long long']],
'OverQuotaHistory' : [ 0x30, ['unsigned long long']],
'ReadyTime' : [ 0x38, ['unsigned long long']],
'InsertTime' : [ 0x40, ['unsigned long long']],
'PerProcessorList' : [ 0x48, ['_LIST_ENTRY']],
'QueueNode' : [ 0x58, ['_RTL_BALANCED_NODE']],
'Inserted' : [ 0x70, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned char')]],
'MaxOverQuota' : [ 0x70, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned char')]],
'MinOverQuota' : [ 0x70, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned char')]],
'RankBias' : [ 0x70, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned char')]],
'SoftCap' : [ 0x70, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned char')]],
'Spare1' : [ 0x70, ['BitField', dict(start_bit = 5, end_bit = 8, native_type='unsigned char')]],
'Depth' : [ 0x71, ['unsigned char']],
'ReadySummary' : [ 0x72, ['unsigned short']],
'Rank' : [ 0x74, ['unsigned long']],
'ReadyListHead' : [ 0x78, ['array', 16, ['_LIST_ENTRY']]],
'ChildScbQueue' : [ 0x178, ['_RTL_RB_TREE']],
'Parent' : [ 0x188, ['pointer64', ['_KSCB']]],
'Root' : [ 0x190, ['pointer64', ['_KSCB']]],
} ],
'__unnamed_26c8' : [ 0x10, {
'UserData' : [ 0x0, ['pointer64', ['void']]],
'Owner' : [ 0x8, ['pointer64', ['void']]],
} ],
'__unnamed_26c9' : [ 0x10, {
'ListHead' : [ 0x0, ['_LIST_ENTRY']],
} ],
'_RTLP_RANGE_LIST_ENTRY' : [ 0x38, {
'Start' : [ 0x0, ['unsigned long long']],
'End' : [ 0x8, ['unsigned long long']],
'Allocated' : [ 0x10, ['__unnamed_26c8']],
'Merged' : [ 0x10, ['__unnamed_26c9']],
'Attributes' : [ 0x20, ['unsigned char']],
'PublicFlags' : [ 0x21, ['unsigned char']],
'PrivateFlags' : [ 0x22, ['unsigned short']],
'ListEntry' : [ 0x28, ['_LIST_ENTRY']],
} ],
'_ALPC_COMPLETION_PACKET_LOOKASIDE_ENTRY' : [ 0x18, {
'ListEntry' : [ 0x0, ['_SINGLE_LIST_ENTRY']],
'Packet' : [ 0x8, ['pointer64', ['_IO_MINI_COMPLETION_PACKET_USER']]],
'Lookaside' : [ 0x10, ['pointer64', ['_ALPC_COMPLETION_PACKET_LOOKASIDE']]],
} ],
'_PROC_PERF_HISTORY' : [ 0x24, {
'Count' : [ 0x0, ['unsigned long']],
'Slot' : [ 0x4, ['unsigned long']],
'UtilityTotal' : [ 0x8, ['unsigned long']],
'AffinitizedUtilityTotal' : [ 0xc, ['unsigned long']],
'FrequencyTotal' : [ 0x10, ['unsigned long']],
'TaggedPercentTotal' : [ 0x14, ['array', 2, ['unsigned long']]],
'HistoryList' : [ 0x1c, ['array', 1, ['_PROC_PERF_HISTORY_ENTRY']]],
} ],
'_MI_PARTITION_ZEROING' : [ 0x68, {
'PageEvent' : [ 0x0, ['_KEVENT']],
'ThreadActive' : [ 0x18, ['unsigned char']],
'ZeroFreePageSlistMinimum' : [ 0x1c, ['long']],
'FirstReservedZeroingPte' : [ 0x20, ['pointer64', ['_MMPTE']]],
'RebalanceZeroFreeWorkItem' : [ 0x28, ['_WORK_QUEUE_ITEM']],
'ThreadCount' : [ 0x48, ['long']],
'Gate' : [ 0x50, ['_KGATE']],
} ],
'_IMAGE_RUNTIME_FUNCTION_ENTRY' : [ 0xc, {
'BeginAddress' : [ 0x0, ['unsigned long']],
'EndAddress' : [ 0x4, ['unsigned long']],
'UnwindInfoAddress' : [ 0x8, ['unsigned long']],
'UnwindData' : [ 0x8, ['unsigned long']],
} ],
'__unnamed_26d8' : [ 0x2, {
'AsUSHORT' : [ 0x0, ['unsigned short']],
'AllowScaling' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned short')]],
'Disabled' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned short')]],
'Reserved' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 16, native_type='unsigned short')]],
} ],
'PROCESSOR_IDLESTATE_POLICY' : [ 0x20, {
'Revision' : [ 0x0, ['unsigned short']],
'Flags' : [ 0x2, ['__unnamed_26d8']],
'PolicyCount' : [ 0x4, ['unsigned long']],
'Policy' : [ 0x8, ['array', 3, ['PROCESSOR_IDLESTATE_INFO']]],
} ],
'_ACTIVATION_CONTEXT_STACK' : [ 0x28, {
'ActiveFrame' : [ 0x0, ['pointer64', ['_RTL_ACTIVATION_CONTEXT_STACK_FRAME']]],
'FrameListCache' : [ 0x8, ['_LIST_ENTRY']],
'Flags' : [ 0x18, ['unsigned long']],
'NextCookieSequenceNumber' : [ 0x1c, ['unsigned long']],
'StackId' : [ 0x20, ['unsigned long']],
} ],
'_RTL_DRIVE_LETTER_CURDIR' : [ 0x18, {
'Flags' : [ 0x0, ['unsigned short']],
'Length' : [ 0x2, ['unsigned short']],
'TimeStamp' : [ 0x4, ['unsigned long']],
'DosPath' : [ 0x8, ['_STRING']],
} ],
'_PPM_IDLE_SYNCHRONIZATION_STATE' : [ 0x4, {
'AsLong' : [ 0x0, ['long']],
'RefCount' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 24, native_type='long')]],
'State' : [ 0x0, ['BitField', dict(start_bit = 24, end_bit = 32, native_type='unsigned long')]],
} ],
'_PPM_CONCURRENCY_ACCOUNTING' : [ 0x28, {
'Lock' : [ 0x0, ['unsigned long long']],
'Processors' : [ 0x8, ['unsigned long']],
'ActiveProcessors' : [ 0xc, ['unsigned long']],
'LastUpdateTime' : [ 0x10, ['unsigned long long']],
'TotalTime' : [ 0x18, ['unsigned long long']],
'AccumulatedTime' : [ 0x20, ['array', 1, ['unsigned long long']]],
} ],
'_DIAGNOSTIC_CONTEXT' : [ 0x20, {
'CallerType' : [ 0x0, ['Enumeration', dict(target = 'long', choices = {0: 'KernelRequester', 1: 'UserProcessRequester', 2: 'UserSharedServiceRequester'})]],
'Process' : [ 0x8, ['pointer64', ['_EPROCESS']]],
'ServiceTag' : [ 0x10, ['unsigned long']],
'DeviceObject' : [ 0x8, ['pointer64', ['_DEVICE_OBJECT']]],
'ReasonSize' : [ 0x18, ['unsigned long long']],
} ],
'__unnamed_26f0' : [ 0x4, {
'MissedEtwRegistration' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'Spare' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 32, native_type='unsigned long')]],
} ],
'__unnamed_26f2' : [ 0x4, {
'Flags' : [ 0x0, ['__unnamed_26f0']],
'Whole' : [ 0x0, ['unsigned long']],
} ],
'_VF_TARGET_VERIFIED_DRIVER_DATA' : [ 0x110, {
'SuspectDriverEntry' : [ 0x0, ['pointer64', ['_VF_SUSPECT_DRIVER_ENTRY']]],
'WMICallback' : [ 0x8, ['pointer64', ['void']]],
'EtwHandlesListHead' : [ 0x10, ['_LIST_ENTRY']],
'u1' : [ 0x20, ['__unnamed_26f2']],
'Signature' : [ 0x28, ['unsigned long long']],
'PoolPageHeaders' : [ 0x30, ['_SLIST_HEADER']],
'PoolTrackers' : [ 0x40, ['_SLIST_HEADER']],
'CurrentPagedPoolAllocations' : [ 0x50, ['unsigned long']],
'CurrentNonPagedPoolAllocations' : [ 0x54, ['unsigned long']],
'PeakPagedPoolAllocations' : [ 0x58, ['unsigned long']],
'PeakNonPagedPoolAllocations' : [ 0x5c, ['unsigned long']],
'PagedBytes' : [ 0x60, ['unsigned long long']],
'NonPagedBytes' : [ 0x68, ['unsigned long long']],
'PeakPagedBytes' : [ 0x70, ['unsigned long long']],
'PeakNonPagedBytes' : [ 0x78, ['unsigned long long']],
'RaiseIrqls' : [ 0x80, ['unsigned long']],
'AcquireSpinLocks' : [ 0x84, ['unsigned long']],
'SynchronizeExecutions' : [ 0x88, ['unsigned long']],
'AllocationsWithNoTag' : [ 0x8c, ['unsigned long']],
'AllocationsFailed' : [ 0x90, ['unsigned long']],
'AllocationsFailedDeliberately' : [ 0x94, ['unsigned long']],
'LockedBytes' : [ 0x98, ['unsigned long long']],
'PeakLockedBytes' : [ 0xa0, ['unsigned long long']],
'MappedLockedBytes' : [ 0xa8, ['unsigned long long']],
'PeakMappedLockedBytes' : [ 0xb0, ['unsigned long long']],
'MappedIoSpaceBytes' : [ 0xb8, ['unsigned long long']],
'PeakMappedIoSpaceBytes' : [ 0xc0, ['unsigned long long']],
'PagesForMdlBytes' : [ 0xc8, ['unsigned long long']],
'PeakPagesForMdlBytes' : [ 0xd0, ['unsigned long long']],
'ContiguousMemoryBytes' : [ 0xd8, ['unsigned long long']],
'PeakContiguousMemoryBytes' : [ 0xe0, ['unsigned long long']],
'ContiguousMemoryListHead' : [ 0xe8, ['_LIST_ENTRY']],
'ExecutePoolTypes' : [ 0xf8, ['unsigned long']],
'ExecutePageProtections' : [ 0xfc, ['unsigned long']],
'ExecutePageMappings' : [ 0x100, ['unsigned long']],
'ExecuteWriteSections' : [ 0x104, ['unsigned long']],
'SectionAlignmentFailures' : [ 0x108, ['unsigned long']],
} ],
'_TRIAGE_DEVICE_NODE' : [ 0x58, {
'Sibling' : [ 0x0, ['pointer64', ['_TRIAGE_DEVICE_NODE']]],
'Child' : [ 0x8, ['pointer64', ['_TRIAGE_DEVICE_NODE']]],
'Parent' : [ 0x10, ['pointer64', ['_TRIAGE_DEVICE_NODE']]],
'LastChild' : [ 0x18, ['pointer64', ['_TRIAGE_DEVICE_NODE']]],
'PhysicalDeviceObject' : [ 0x20, ['pointer64', ['_DEVICE_OBJECT']]],
'InstancePath' : [ 0x28, ['_UNICODE_STRING']],
'ServiceName' : [ 0x38, ['_UNICODE_STRING']],
'PendingIrp' : [ 0x48, ['pointer64', ['_IRP']]],
'FxDevice' : [ 0x50, ['pointer64', ['_TRIAGE_POP_FX_DEVICE']]],
} ],
'_PRIVATE_CACHE_MAP' : [ 0x78, {
'NodeTypeCode' : [ 0x0, ['short']],
'Flags' : [ 0x0, ['_PRIVATE_CACHE_MAP_FLAGS']],
'ReadAheadMask' : [ 0x4, ['unsigned long']],
'FileObject' : [ 0x8, ['pointer64', ['_FILE_OBJECT']]],
'FileOffset1' : [ 0x10, ['_LARGE_INTEGER']],
'BeyondLastByte1' : [ 0x18, ['_LARGE_INTEGER']],
'FileOffset2' : [ 0x20, ['_LARGE_INTEGER']],
'BeyondLastByte2' : [ 0x28, ['_LARGE_INTEGER']],
'SequentialReadCount' : [ 0x30, ['unsigned long']],
'ReadAheadLength' : [ 0x34, ['unsigned long']],
'ReadAheadOffset' : [ 0x38, ['_LARGE_INTEGER']],
'ReadAheadBeyondLastByte' : [ 0x40, ['_LARGE_INTEGER']],
'PrevReadAheadBeyondLastByte' : [ 0x48, ['unsigned long long']],
'ReadAheadSpinLock' : [ 0x50, ['unsigned long long']],
'PipelinedReadAheadRequestSize' : [ 0x58, ['unsigned long']],
'ReadAheadGrowth' : [ 0x5c, ['unsigned long']],
'PrivateLinks' : [ 0x60, ['_LIST_ENTRY']],
'ReadAheadWorkItem' : [ 0x70, ['pointer64', ['void']]],
} ],
'_CM_KEY_NODE' : [ 0x50, {
'Signature' : [ 0x0, ['unsigned short']],
'Flags' : [ 0x2, ['unsigned short']],
'LastWriteTime' : [ 0x4, ['_LARGE_INTEGER']],
'AccessBits' : [ 0xc, ['unsigned long']],
'Parent' : [ 0x10, ['unsigned long']],
'SubKeyCounts' : [ 0x14, ['array', 2, ['unsigned long']]],
'SubKeyLists' : [ 0x1c, ['array', 2, ['unsigned long']]],
'ValueList' : [ 0x24, ['_CHILD_LIST']],
'ChildHiveReference' : [ 0x1c, ['_CM_KEY_REFERENCE']],
'Security' : [ 0x2c, ['unsigned long']],
'Class' : [ 0x30, ['unsigned long']],
'MaxNameLen' : [ 0x34, ['BitField', dict(start_bit = 0, end_bit = 16, native_type='unsigned long')]],
'UserFlags' : [ 0x34, ['BitField', dict(start_bit = 16, end_bit = 20, native_type='unsigned long')]],
'VirtControlFlags' : [ 0x34, ['BitField', dict(start_bit = 20, end_bit = 24, native_type='unsigned long')]],
'Debug' : [ 0x34, ['BitField', dict(start_bit = 24, end_bit = 32, native_type='unsigned long')]],
'MaxClassLen' : [ 0x38, ['unsigned long']],
'MaxValueNameLen' : [ 0x3c, ['unsigned long']],
'MaxValueDataLen' : [ 0x40, ['unsigned long']],
'WorkVar' : [ 0x44, ['unsigned long']],
'NameLength' : [ 0x48, ['unsigned short']],
'ClassLength' : [ 0x4a, ['unsigned short']],
'Name' : [ 0x4c, ['array', 1, ['wchar']]],
} ],
'_AER_ROOTPORT_DESCRIPTOR_FLAGS' : [ 0x2, {
'UncorrectableErrorMaskRW' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned short')]],
'UncorrectableErrorSeverityRW' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned short')]],
'CorrectableErrorMaskRW' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned short')]],
'AdvancedCapsAndControlRW' : [ 0x0, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned short')]],
'RootErrorCommandRW' : [ 0x0, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned short')]],
'Reserved' : [ 0x0, ['BitField', dict(start_bit = 5, end_bit = 16, native_type='unsigned short')]],
'AsUSHORT' : [ 0x0, ['unsigned short']],
} ],
'_MI_SYSTEM_IMAGE_STATE' : [ 0xc0, {
'FixupLock' : [ 0x0, ['long']],
'FixupList' : [ 0x8, ['_LIST_ENTRY']],
'LoadLock' : [ 0x18, ['_KMUTANT']],
'FirstLoadEver' : [ 0x50, ['unsigned char']],
'LargePageAll' : [ 0x51, ['unsigned char']],
'LastPage' : [ 0x58, ['unsigned long long']],
'LargePageList' : [ 0x60, ['_LIST_ENTRY']],
'BeingDeleted' : [ 0x70, ['pointer64', ['_KLDR_DATA_TABLE_ENTRY']]],
'MappingRangesPushLock' : [ 0x78, ['_EX_PUSH_LOCK']],
'MappingRanges' : [ 0x80, ['array', 2, ['pointer64', ['_MI_DRIVER_VA']]]],
'PageCount' : [ 0x90, ['unsigned long long']],
'PageCounts' : [ 0x98, ['_MM_SYSTEM_PAGE_COUNTS']],
'CollidedLock' : [ 0xa8, ['_EX_PUSH_LOCK']],
'ErrataPte' : [ 0xb0, ['pointer64', ['_MMPTE']]],
'ErrataPteMapped' : [ 0xb8, ['unsigned long']],
} ],
'_PTE_TRACKER' : [ 0x80, {
'ListEntry' : [ 0x0, ['_LIST_ENTRY']],
'Mdl' : [ 0x10, ['pointer64', ['_MDL']]],
'Count' : [ 0x18, ['unsigned long long']],
'SystemVa' : [ 0x20, ['pointer64', ['void']]],
'StartVa' : [ 0x28, ['pointer64', ['void']]],
'Offset' : [ 0x30, ['unsigned long']],
'Length' : [ 0x34, ['unsigned long']],
'Page' : [ 0x38, ['unsigned long long']],
'IoMapping' : [ 0x40, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'Matched' : [ 0x40, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'CacheAttribute' : [ 0x40, ['BitField', dict(start_bit = 2, end_bit = 4, native_type='unsigned long')]],
'GuardPte' : [ 0x40, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned long')]],
'Spare' : [ 0x40, ['BitField', dict(start_bit = 5, end_bit = 32, native_type='unsigned long')]],
'StackTrace' : [ 0x48, ['array', 7, ['pointer64', ['void']]]],
} ],
'_HV_GET_CELL_CONTEXT' : [ 0x4, {
'Cell' : [ 0x0, ['unsigned long']],
'IsInTempBin' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'Reserved' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 32, native_type='unsigned long')]],
} ],
'_KTHREAD_COUNTERS' : [ 0x1a8, {
'WaitReasonBitMap' : [ 0x0, ['unsigned long long']],
'UserData' : [ 0x8, ['pointer64', ['_THREAD_PERFORMANCE_DATA']]],
'Flags' : [ 0x10, ['unsigned long']],
'ContextSwitches' : [ 0x14, ['unsigned long']],
'CycleTimeBias' : [ 0x18, ['unsigned long long']],
'HardwareCounters' : [ 0x20, ['unsigned long long']],
'HwCounter' : [ 0x28, ['array', 16, ['_COUNTER_READING']]],
} ],
'_SHARED_CACHE_MAP_LIST_CURSOR' : [ 0x18, {
'SharedCacheMapLinks' : [ 0x0, ['_LIST_ENTRY']],
'Flags' : [ 0x10, ['unsigned long']],
} ],
'__unnamed_2724' : [ 0x2, {
'SignatureLevel' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 4, native_type='unsigned short')]],
'SignatureType' : [ 0x0, ['BitField', dict(start_bit = 4, end_bit = 7, native_type='unsigned short')]],
'Unused' : [ 0x0, ['BitField', dict(start_bit = 7, end_bit = 16, native_type='unsigned short')]],
'EntireField' : [ 0x0, ['unsigned short']],
} ],
'_KLDR_DATA_TABLE_ENTRY' : [ 0xa0, {
'InLoadOrderLinks' : [ 0x0, ['_LIST_ENTRY']],
'ExceptionTable' : [ 0x10, ['pointer64', ['void']]],
'ExceptionTableSize' : [ 0x18, ['unsigned long']],
'GpValue' : [ 0x20, ['pointer64', ['void']]],
'NonPagedDebugInfo' : [ 0x28, ['pointer64', ['_NON_PAGED_DEBUG_INFO']]],
'DllBase' : [ 0x30, ['pointer64', ['void']]],
'EntryPoint' : [ 0x38, ['pointer64', ['void']]],
'SizeOfImage' : [ 0x40, ['unsigned long']],
'FullDllName' : [ 0x48, ['_UNICODE_STRING']],
'BaseDllName' : [ 0x58, ['_UNICODE_STRING']],
'Flags' : [ 0x68, ['unsigned long']],
'LoadCount' : [ 0x6c, ['unsigned short']],
'u1' : [ 0x6e, ['__unnamed_2724']],
'SectionPointer' : [ 0x70, ['pointer64', ['void']]],
'CheckSum' : [ 0x78, ['unsigned long']],
'CoverageSectionSize' : [ 0x7c, ['unsigned long']],
'CoverageSection' : [ 0x80, ['pointer64', ['void']]],
'LoadedImports' : [ 0x88, ['pointer64', ['void']]],
'Spare' : [ 0x90, ['pointer64', ['void']]],
'SizeOfImageNotRounded' : [ 0x98, ['unsigned long']],
'TimeDateStamp' : [ 0x9c, ['unsigned long']],
} ],
'_DBGKD_GET_VERSION64' : [ 0x28, {
'MajorVersion' : [ 0x0, ['unsigned short']],
'MinorVersion' : [ 0x2, ['unsigned short']],
'ProtocolVersion' : [ 0x4, ['unsigned char']],
'KdSecondaryVersion' : [ 0x5, ['unsigned char']],
'Flags' : [ 0x6, ['unsigned short']],
'MachineType' : [ 0x8, ['unsigned short']],
'MaxPacketType' : [ 0xa, ['unsigned char']],
'MaxStateChange' : [ 0xb, ['unsigned char']],
'MaxManipulate' : [ 0xc, ['unsigned char']],
'Simulation' : [ 0xd, ['unsigned char']],
'Unused' : [ 0xe, ['array', 1, ['unsigned short']]],
'KernBase' : [ 0x10, ['unsigned long long']],
'PsLoadedModuleList' : [ 0x18, ['unsigned long long']],
'DebuggerDataList' : [ 0x20, ['unsigned long long']],
} ],
'_PROC_FEEDBACK_COUNTER' : [ 0x30, {
'InstantaneousRead' : [ 0x0, ['pointer64', ['void']]],
'DifferentialRead' : [ 0x0, ['pointer64', ['void']]],
'LastActualCount' : [ 0x8, ['unsigned long long']],
'LastReferenceCount' : [ 0x10, ['unsigned long long']],
'CachedValue' : [ 0x18, ['unsigned long']],
'Affinitized' : [ 0x20, ['unsigned char']],
'Differential' : [ 0x21, ['unsigned char']],
'Scaling' : [ 0x22, ['unsigned char']],
'Context' : [ 0x28, ['unsigned long long']],
} ],
'_PPM_COORDINATED_SYNCHRONIZATION' : [ 0x4, {
'AsLong' : [ 0x0, ['long']],
'EnterProcessor' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 12, native_type='unsigned long')]],
'ExitProcessor' : [ 0x0, ['BitField', dict(start_bit = 12, end_bit = 24, native_type='unsigned long')]],
'Transition' : [ 0x0, ['BitField', dict(start_bit = 24, end_bit = 26, native_type='unsigned long')]],
'Entered' : [ 0x0, ['BitField', dict(start_bit = 26, end_bit = 27, native_type='unsigned long')]],
'EntryPriority' : [ 0x0, ['BitField', dict(start_bit = 27, end_bit = 32, native_type='unsigned long')]],
} ],
'_MI_PAGING_IO_STATE' : [ 0x50, {
'PageFileHead' : [ 0x0, ['_RTL_AVL_TREE']],
'PageFileHeadSpinLock' : [ 0x8, ['long']],
'PrefetchSeekThreshold' : [ 0xc, ['long']],
'InPageSupportSListHead' : [ 0x10, ['array', 2, ['_SLIST_HEADER']]],
'InPageSupportSListMinimum' : [ 0x30, ['array', 2, ['unsigned char']]],
'InPageSinglePages' : [ 0x34, ['unsigned long']],
'DelayPageFaults' : [ 0x38, ['long']],
'FileCompressionBoundary' : [ 0x3c, ['unsigned long']],
'MdlsAdjusted' : [ 0x40, ['unsigned char']],
} ],
'_PROCESSOR_PLATFORM_STATE_RESIDENCIES' : [ 0x18, {
'Count' : [ 0x0, ['unsigned long']],
'States' : [ 0x8, ['array', 1, ['_PROCESSOR_PLATFORM_STATE_RESIDENCY']]],
} ],
'_MI_FORCED_COMMITS' : [ 0x8, {
'Regular' : [ 0x0, ['unsigned long']],
'Wrap' : [ 0x4, ['unsigned long']],
} ],
'_HMAP_ENTRY' : [ 0x28, {
'BlockOffset' : [ 0x0, ['unsigned long long']],
'PermanentBinAddress' : [ 0x8, ['unsigned long long']],
'TemporaryBinAddress' : [ 0x10, ['unsigned long long']],
'TemporaryBinRundown' : [ 0x18, ['_EX_RUNDOWN_REF']],
'MemAlloc' : [ 0x20, ['unsigned long']],
} ],
'_RTL_ATOM_TABLE_ENTRY' : [ 0x30, {
'HashLink' : [ 0x0, ['pointer64', ['_RTL_ATOM_TABLE_ENTRY']]],
'HandleIndex' : [ 0x8, ['unsigned short']],
'Atom' : [ 0xa, ['unsigned short']],
'Reference' : [ 0x10, ['_RTL_ATOM_TABLE_REFERENCE']],
'NameLength' : [ 0x28, ['unsigned char']],
'Name' : [ 0x2a, ['array', 1, ['wchar']]],
} ],
'_PLATFORM_IDLE_ACCOUNTING' : [ 0x400, {
'ResetCount' : [ 0x0, ['unsigned long']],
'StateCount' : [ 0x4, ['unsigned long']],
'DeepSleepCount' : [ 0x8, ['unsigned long']],
'TimeUnit' : [ 0xc, ['Enumeration', dict(target = 'long', choices = {0: 'PpmIdleBucketTimeInQpc', 1: 'PpmIdleBucketTimeIn100ns', 2: 'PpmIdleBucketTimeMaximum'})]],
'StartTime' : [ 0x10, ['unsigned long long']],
'State' : [ 0x18, ['array', 1, ['_PLATFORM_IDLE_STATE_ACCOUNTING']]],
} ],
'_TXN_PARAMETER_BLOCK' : [ 0x10, {
'Length' : [ 0x0, ['unsigned short']],
'TxFsContext' : [ 0x2, ['unsigned short']],
'TransactionObject' : [ 0x8, ['pointer64', ['void']]],
} ],
'_DUAL' : [ 0x278, {
'Length' : [ 0x0, ['unsigned long']],
'Map' : [ 0x8, ['pointer64', ['_HMAP_DIRECTORY']]],
'SmallDir' : [ 0x10, ['pointer64', ['_HMAP_TABLE']]],
'Guard' : [ 0x18, ['unsigned long']],
'FreeDisplay' : [ 0x20, ['array', 24, ['_FREE_DISPLAY']]],
'FreeBins' : [ 0x260, ['_LIST_ENTRY']],
'FreeSummary' : [ 0x270, ['unsigned long']],
} ],
'_MI_VAD_SEQUENTIAL_INFO' : [ 0x8, {
'Length' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 12, native_type='unsigned long long')]],
'Vpn' : [ 0x0, ['BitField', dict(start_bit = 12, end_bit = 64, native_type='unsigned long long')]],
} ],
'__unnamed_2753' : [ 0x4, {
'ImagePteOffset' : [ 0x0, ['unsigned long']],
'TossPage' : [ 0x0, ['unsigned long']],
} ],
'__unnamed_2756' : [ 0x4, {
'e1' : [ 0x0, ['_MMINPAGE_FLAGS']],
'LongFlags' : [ 0x0, ['unsigned long']],
} ],
'_MMINPAGE_SUPPORT' : [ 0x1b0, {
'ListEntry' : [ 0x0, ['_LIST_ENTRY']],
'ListHead' : [ 0x10, ['_LIST_ENTRY']],
'Event' : [ 0x20, ['_KEVENT']],
'CollidedEvent' : [ 0x38, ['_KEVENT']],
'IoStatus' : [ 0x50, ['_IO_STATUS_BLOCK']],
'ReadOffset' : [ 0x60, ['_LARGE_INTEGER']],
'ApcState' : [ 0x68, ['_KAPC_STATE']],
'Thread' : [ 0x98, ['pointer64', ['_ETHREAD']]],
'LockedProtoPfn' : [ 0xa0, ['pointer64', ['_MMPFN']]],
'PteContents' : [ 0xa8, ['_MMPTE']],
'WaitCount' : [ 0xb0, ['long']],
'ByteCount' : [ 0xb4, ['unsigned long']],
'u3' : [ 0xb8, ['__unnamed_2753']],
'u1' : [ 0xbc, ['__unnamed_2756']],
'FilePointer' : [ 0xc0, ['pointer64', ['_FILE_OBJECT']]],
'ControlArea' : [ 0xc8, ['pointer64', ['_CONTROL_AREA']]],
'Subsection' : [ 0xc8, ['pointer64', ['_SUBSECTION']]],
'Autoboost' : [ 0xd0, ['pointer64', ['void']]],
'FaultingAddress' : [ 0xd8, ['pointer64', ['void']]],
'PointerPte' : [ 0xe0, ['pointer64', ['_MMPTE']]],
'BasePte' : [ 0xe8, ['pointer64', ['_MMPTE']]],
'Pfn' : [ 0xf0, ['pointer64', ['_MMPFN']]],
'PrefetchMdl' : [ 0xf8, ['pointer64', ['_MDL']]],
'Mdl' : [ 0x100, ['_MDL']],
'Page' : [ 0x130, ['array', 16, ['unsigned long long']]],
'FlowThrough' : [ 0x130, ['_MMINPAGE_SUPPORT_FLOW_THROUGH']],
} ],
'_HAL_NODE_RANGE' : [ 0x10, {
'PageFrameIndex' : [ 0x0, ['unsigned long long']],
'Node' : [ 0x8, ['unsigned long']],
} ],
'_MMCLONE_BLOCK' : [ 0x20, {
'ProtoPte' : [ 0x0, ['_MMPTE']],
'PaddingFor16ByteAlignment' : [ 0x8, ['unsigned long long']],
'CloneCommitCount' : [ 0x10, ['unsigned long long']],
'u1' : [ 0x10, ['_MI_CLONE_BLOCK_FLAGS']],
'CloneRefCount' : [ 0x18, ['unsigned long long']],
} ],
'_PS_TRUSTLET_TKSESSION_ID' : [ 0x20, {
'SessionId' : [ 0x0, ['array', 4, ['unsigned long long']]],
} ],
'_PNP_DEVICE_ACTION_ENTRY' : [ 0x48, {
'ListEntry' : [ 0x0, ['_LIST_ENTRY']],
'DeviceObject' : [ 0x10, ['pointer64', ['_DEVICE_OBJECT']]],
'RequestType' : [ 0x18, ['Enumeration', dict(target = 'long', choices = {0: 'AssignResources', 1: 'ClearDeviceProblem', 2: 'ClearProblem', 3: 'ClearEjectProblem', 4: 'HaltDevice', 5: 'QueryPowerRelations', 6: 'Rebalance', 7: 'ReenumerateBootDevices', 8: 'ReenumerateDeviceOnly', 9: 'ReenumerateDeviceTree', 10: 'ReenumerateRootDevices', 11: 'RequeryDeviceState', 12: 'ResetDevice', 13: 'ResourceRequirementsChanged', 14: 'RestartEnumeration', 15: 'SetDeviceProblem', 16: 'StartDevice', 17: 'StartSystemDevicesPass0', 18: 'StartSystemDevicesPass1', 19: 'NotifyTransportRelationsChange', 20: 'NotifyEjectionRelationsChange', 21: 'ConfigureDevice', 22: 'ConfigureDeviceClass', 23: 'ConfigureDeviceExtensions', 24: 'ConfigureDeviceReset'})]],
'ReorderingBarrier' : [ 0x1c, ['unsigned char']],
'RequestArgument' : [ 0x20, ['unsigned long long']],
'CompletionEvent' : [ 0x28, ['pointer64', ['_KEVENT']]],
'CompletionStatus' : [ 0x30, ['pointer64', ['long']]],
'ActivityId' : [ 0x38, ['_GUID']],
} ],
'_SEP_LOWBOX_NUMBER_ENTRY' : [ 0x38, {
'HashEntry' : [ 0x0, ['_RTL_DYNAMIC_HASH_TABLE_ENTRY']],
'ReferenceCount' : [ 0x18, ['long long']],
'PackageSid' : [ 0x20, ['pointer64', ['void']]],
'LowboxNumber' : [ 0x28, ['unsigned long']],
'AtomTable' : [ 0x30, ['pointer64', ['void']]],
} ],
'_MI_LDW_WORK_CONTEXT' : [ 0x38, {
'WorkItem' : [ 0x0, ['_WORK_QUEUE_ITEM']],
'FileObject' : [ 0x20, ['pointer64', ['_FILE_OBJECT']]],
'ErrorStatus' : [ 0x28, ['long']],
'Active' : [ 0x2c, ['long']],
'FreeWhenDone' : [ 0x30, ['unsigned char']],
} ],
'_MI_CFG_BITMAP_INFO' : [ 0x18, {
'BaseAddress' : [ 0x0, ['pointer64', ['void']]],
'RegionSize' : [ 0x8, ['unsigned long long']],
'BitmapVad' : [ 0x10, ['pointer64', ['_MMVAD']]],
} ],
'_COUNTER_READING' : [ 0x18, {
'Type' : [ 0x0, ['Enumeration', dict(target = 'long', choices = {0: 'PMCCounter', 1: 'MaxHardwareCounterType'})]],
'Index' : [ 0x4, ['unsigned long']],
'Start' : [ 0x8, ['unsigned long long']],
'Total' : [ 0x10, ['unsigned long long']],
} ],
'_MI_SHUTDOWN_STATE' : [ 0x80, {
'CrashDumpInitialized' : [ 0x0, ['unsigned char']],
'ConnectedStandbyActive' : [ 0x1, ['unsigned char']],
'SystemShutdown' : [ 0x4, ['unsigned long']],
'ShutdownFlushInProgress' : [ 0x8, ['long']],
'ResumeItem' : [ 0x10, ['_MI_RESUME_WORKITEM']],
'MirrorHoldsPfn' : [ 0x48, ['pointer64', ['_ETHREAD']]],
'MirroringActive' : [ 0x50, ['unsigned long']],
'MirrorBitMaps' : [ 0x58, ['array', 2, ['_RTL_BITMAP_EX']]],
'CrashDumpPte' : [ 0x78, ['pointer64', ['_MMPTE']]],
} ],
'_SECTION_IMAGE_INFORMATION' : [ 0x40, {
'TransferAddress' : [ 0x0, ['pointer64', ['void']]],
'ZeroBits' : [ 0x8, ['unsigned long']],
'MaximumStackSize' : [ 0x10, ['unsigned long long']],
'CommittedStackSize' : [ 0x18, ['unsigned long long']],
'SubSystemType' : [ 0x20, ['unsigned long']],
'SubSystemMinorVersion' : [ 0x24, ['unsigned short']],
'SubSystemMajorVersion' : [ 0x26, ['unsigned short']],
'SubSystemVersion' : [ 0x24, ['unsigned long']],
'MajorOperatingSystemVersion' : [ 0x28, ['unsigned short']],
'MinorOperatingSystemVersion' : [ 0x2a, ['unsigned short']],
'OperatingSystemVersion' : [ 0x28, ['unsigned long']],
'ImageCharacteristics' : [ 0x2c, ['unsigned short']],
'DllCharacteristics' : [ 0x2e, ['unsigned short']],
'Machine' : [ 0x30, ['unsigned short']],
'ImageContainsCode' : [ 0x32, ['unsigned char']],
'ImageFlags' : [ 0x33, ['unsigned char']],
'ComPlusNativeReady' : [ 0x33, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned char')]],
'ComPlusILOnly' : [ 0x33, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned char')]],
'ImageDynamicallyRelocated' : [ 0x33, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned char')]],
'ImageMappedFlat' : [ 0x33, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned char')]],
'BaseBelow4gb' : [ 0x33, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned char')]],
'ComPlusPrefer32bit' : [ 0x33, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned char')]],
'Reserved' : [ 0x33, ['BitField', dict(start_bit = 6, end_bit = 8, native_type='unsigned char')]],
'LoaderFlags' : [ 0x34, ['unsigned long']],
'ImageFileSize' : [ 0x38, ['unsigned long']],
'CheckSum' : [ 0x3c, ['unsigned long']],
} ],
'_ETW_REG_ENTRY' : [ 0x70, {
'RegList' : [ 0x0, ['_LIST_ENTRY']],
'GroupRegList' : [ 0x10, ['_LIST_ENTRY']],
'GuidEntry' : [ 0x20, ['pointer64', ['_ETW_GUID_ENTRY']]],
'GroupEntry' : [ 0x28, ['pointer64', ['_ETW_GUID_ENTRY']]],
'ReplyQueue' : [ 0x30, ['pointer64', ['_ETW_REPLY_QUEUE']]],
'ReplySlot' : [ 0x30, ['array', 4, ['pointer64', ['_ETW_QUEUE_ENTRY']]]],
'Caller' : [ 0x30, ['pointer64', ['void']]],
'SessionId' : [ 0x38, ['unsigned long']],
'Process' : [ 0x50, ['pointer64', ['_EPROCESS']]],
'CallbackContext' : [ 0x50, ['pointer64', ['void']]],
'Callback' : [ 0x58, ['pointer64', ['void']]],
'Index' : [ 0x60, ['unsigned short']],
'Flags' : [ 0x62, ['unsigned char']],
'DbgKernelRegistration' : [ 0x62, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned char')]],
'DbgUserRegistration' : [ 0x62, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned char')]],
'DbgReplyRegistration' : [ 0x62, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned char')]],
'DbgClassicRegistration' : [ 0x62, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned char')]],
'DbgSessionSpaceRegistration' : [ 0x62, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned char')]],
'DbgModernRegistration' : [ 0x62, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned char')]],
'DbgClosed' : [ 0x62, ['BitField', dict(start_bit = 6, end_bit = 7, native_type='unsigned char')]],
'DbgInserted' : [ 0x62, ['BitField', dict(start_bit = 7, end_bit = 8, native_type='unsigned char')]],
'EnableMask' : [ 0x63, ['unsigned char']],
'GroupEnableMask' : [ 0x64, ['unsigned char']],
'UseDescriptorType' : [ 0x65, ['unsigned char']],
'Traits' : [ 0x68, ['pointer64', ['_ETW_PROVIDER_TRAITS']]],
} ],
'_LPCP_PORT_OBJECT' : [ 0x100, {
'ConnectionPort' : [ 0x0, ['pointer64', ['_LPCP_PORT_OBJECT']]],
'ConnectedPort' : [ 0x8, ['pointer64', ['_LPCP_PORT_OBJECT']]],
'MsgQueue' : [ 0x10, ['_LPCP_PORT_QUEUE']],
'Creator' : [ 0x30, ['_CLIENT_ID']],
'ClientSectionBase' : [ 0x40, ['pointer64', ['void']]],
'ServerSectionBase' : [ 0x48, ['pointer64', ['void']]],
'PortContext' : [ 0x50, ['pointer64', ['void']]],
'ClientThread' : [ 0x58, ['pointer64', ['_ETHREAD']]],
'SecurityQos' : [ 0x60, ['_SECURITY_QUALITY_OF_SERVICE']],
'StaticSecurity' : [ 0x70, ['_SECURITY_CLIENT_CONTEXT']],
'LpcReplyChainHead' : [ 0xb8, ['_LIST_ENTRY']],
'LpcDataInfoChainHead' : [ 0xc8, ['_LIST_ENTRY']],
'ServerProcess' : [ 0xd8, ['pointer64', ['_EPROCESS']]],
'MappingProcess' : [ 0xd8, ['pointer64', ['_EPROCESS']]],
'MaxMessageLength' : [ 0xe0, ['unsigned short']],
'MaxConnectionInfoLength' : [ 0xe2, ['unsigned short']],
'Flags' : [ 0xe4, ['unsigned long']],
'WaitEvent' : [ 0xe8, ['_KEVENT']],
} ],
'_HVIEW_MAP_PIN_LOG' : [ 0x488, {
'Next' : [ 0x0, ['unsigned long']],
'Size' : [ 0x4, ['unsigned long']],
'Entries' : [ 0x8, ['array', 16, ['_HVIEW_MAP_PIN_LOG_ENTRY']]],
} ],
'_ARBITER_LIST_ENTRY' : [ 0x60, {
'ListEntry' : [ 0x0, ['_LIST_ENTRY']],
'AlternativeCount' : [ 0x10, ['unsigned long']],
'Alternatives' : [ 0x18, ['pointer64', ['_IO_RESOURCE_DESCRIPTOR']]],
'PhysicalDeviceObject' : [ 0x20, ['pointer64', ['_DEVICE_OBJECT']]],
'RequestSource' : [ 0x28, ['Enumeration', dict(target = 'long', choices = {0: 'ArbiterRequestLegacyReported', 1: 'ArbiterRequestHalReported', 2: 'ArbiterRequestLegacyAssigned', 3: 'ArbiterRequestPnpDetected', 4: 'ArbiterRequestPnpEnumerated', -1: 'ArbiterRequestUndefined'})]],
'Flags' : [ 0x2c, ['unsigned long']],
'WorkSpace' : [ 0x30, ['long long']],
'InterfaceType' : [ 0x38, ['Enumeration', dict(target = 'long', choices = {0: 'Internal', 1: 'Isa', 2: 'Eisa', 3: 'MicroChannel', 4: 'TurboChannel', 5: 'PCIBus', 6: 'VMEBus', 7: 'NuBus', 8: 'PCMCIABus', 9: 'CBus', 10: 'MPIBus', 11: 'MPSABus', 12: 'ProcessorInternal', 13: 'InternalPowerBus', 14: 'PNPISABus', 15: 'PNPBus', 16: 'Vmcs', 17: 'ACPIBus', 18: 'MaximumInterfaceType', -1: 'InterfaceTypeUndefined'})]],
'SlotNumber' : [ 0x3c, ['unsigned long']],
'BusNumber' : [ 0x40, ['unsigned long']],
'Assignment' : [ 0x48, ['pointer64', ['_CM_PARTIAL_RESOURCE_DESCRIPTOR']]],
'SelectedAlternative' : [ 0x50, ['pointer64', ['_IO_RESOURCE_DESCRIPTOR']]],
'Result' : [ 0x58, ['Enumeration', dict(target = 'long', choices = {0: 'ArbiterResultSuccess', 1: 'ArbiterResultExternalConflict', 2: 'ArbiterResultNullRequest', -1: 'ArbiterResultUndefined'})]],
} ],
'_MI_PROBE_RAISE_TRACKER' : [ 0x40, {
'UserRangeInKernel' : [ 0x0, ['unsigned long']],
'FaultFailed' : [ 0x4, ['unsigned long']],
'WriteFaultFailed' : [ 0x8, ['unsigned long']],
'LargePageFailed' : [ 0xc, ['unsigned long']],
'UserAccessToKernelPte' : [ 0x10, ['unsigned long']],
'BadPageLocation' : [ 0x14, ['unsigned long']],
'InsufficientCharge' : [ 0x18, ['unsigned long']],
'PageTableCharge' : [ 0x1c, ['unsigned long']],
'NoPhysicalMapping' : [ 0x20, ['unsigned long']],
'NoIoReference' : [ 0x24, ['unsigned long']],
'ProbeFailed' : [ 0x28, ['unsigned long']],
'PteIsZero' : [ 0x2c, ['unsigned long']],
'StrongCodeWrite' : [ 0x30, ['unsigned long']],
'ReducedCloneCommitChargeFailed' : [ 0x34, ['unsigned long']],
'CopyOnWriteAtDispatchNoPages' : [ 0x38, ['unsigned long']],
'EnclavePageFailed' : [ 0x3c, ['unsigned long']],
} ],
'_ETW_PROVIDER_TRAITS' : [ 0x20, {
'Node' : [ 0x0, ['_RTL_BALANCED_NODE']],
'ReferenceCount' : [ 0x18, ['unsigned long']],
'Traits' : [ 0x1c, ['array', 1, ['unsigned char']]],
} ],
'_INTERRUPT_CONNECTION_DATA' : [ 0x60, {
'Count' : [ 0x0, ['unsigned long']],
'Vectors' : [ 0x8, ['array', 1, ['_INTERRUPT_VECTOR_DATA']]],
} ],
'_MI_CLONE_BLOCK_FLAGS' : [ 0x8, {
'ActualCloneCommit' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 59, native_type='unsigned long long')]],
'CloneProtection' : [ 0x0, ['BitField', dict(start_bit = 59, end_bit = 64, native_type='unsigned long long')]],
} ],
'_LDR_DATA_TABLE_ENTRY' : [ 0x118, {
'InLoadOrderLinks' : [ 0x0, ['_LIST_ENTRY']],
'InMemoryOrderLinks' : [ 0x10, ['_LIST_ENTRY']],
'InInitializationOrderLinks' : [ 0x20, ['_LIST_ENTRY']],
'DllBase' : [ 0x30, ['pointer64', ['void']]],
'EntryPoint' : [ 0x38, ['pointer64', ['void']]],
'SizeOfImage' : [ 0x40, ['unsigned long']],
'FullDllName' : [ 0x48, ['_UNICODE_STRING']],
'BaseDllName' : [ 0x58, ['_UNICODE_STRING']],
'FlagGroup' : [ 0x68, ['array', 4, ['unsigned char']]],
'Flags' : [ 0x68, ['unsigned long']],
'PackagedBinary' : [ 0x68, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'MarkedForRemoval' : [ 0x68, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'ImageDll' : [ 0x68, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long')]],
'LoadNotificationsSent' : [ 0x68, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned long')]],
'TelemetryEntryProcessed' : [ 0x68, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned long')]],
'ProcessStaticImport' : [ 0x68, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned long')]],
'InLegacyLists' : [ 0x68, ['BitField', dict(start_bit = 6, end_bit = 7, native_type='unsigned long')]],
'InIndexes' : [ 0x68, ['BitField', dict(start_bit = 7, end_bit = 8, native_type='unsigned long')]],
'ShimDll' : [ 0x68, ['BitField', dict(start_bit = 8, end_bit = 9, native_type='unsigned long')]],
'InExceptionTable' : [ 0x68, ['BitField', dict(start_bit = 9, end_bit = 10, native_type='unsigned long')]],
'ReservedFlags1' : [ 0x68, ['BitField', dict(start_bit = 10, end_bit = 12, native_type='unsigned long')]],
'LoadInProgress' : [ 0x68, ['BitField', dict(start_bit = 12, end_bit = 13, native_type='unsigned long')]],
'LoadConfigProcessed' : [ 0x68, ['BitField', dict(start_bit = 13, end_bit = 14, native_type='unsigned long')]],
'EntryProcessed' : [ 0x68, ['BitField', dict(start_bit = 14, end_bit = 15, native_type='unsigned long')]],
'ProtectDelayLoad' : [ 0x68, ['BitField', dict(start_bit = 15, end_bit = 16, native_type='unsigned long')]],
'ReservedFlags3' : [ 0x68, ['BitField', dict(start_bit = 16, end_bit = 18, native_type='unsigned long')]],
'DontCallForThreads' : [ 0x68, ['BitField', dict(start_bit = 18, end_bit = 19, native_type='unsigned long')]],
'ProcessAttachCalled' : [ 0x68, ['BitField', dict(start_bit = 19, end_bit = 20, native_type='unsigned long')]],
'ProcessAttachFailed' : [ 0x68, ['BitField', dict(start_bit = 20, end_bit = 21, native_type='unsigned long')]],
'CorDeferredValidate' : [ 0x68, ['BitField', dict(start_bit = 21, end_bit = 22, native_type='unsigned long')]],
'CorImage' : [ 0x68, ['BitField', dict(start_bit = 22, end_bit = 23, native_type='unsigned long')]],
'DontRelocate' : [ 0x68, ['BitField', dict(start_bit = 23, end_bit = 24, native_type='unsigned long')]],
'CorILOnly' : [ 0x68, ['BitField', dict(start_bit = 24, end_bit = 25, native_type='unsigned long')]],
'ReservedFlags5' : [ 0x68, ['BitField', dict(start_bit = 25, end_bit = 28, native_type='unsigned long')]],
'Redirected' : [ 0x68, ['BitField', dict(start_bit = 28, end_bit = 29, native_type='unsigned long')]],
'ReservedFlags6' : [ 0x68, ['BitField', dict(start_bit = 29, end_bit = 31, native_type='unsigned long')]],
'CompatDatabaseProcessed' : [ 0x68, ['BitField', dict(start_bit = 31, end_bit = 32, native_type='unsigned long')]],
'ObsoleteLoadCount' : [ 0x6c, ['unsigned short']],
'TlsIndex' : [ 0x6e, ['unsigned short']],
'HashLinks' : [ 0x70, ['_LIST_ENTRY']],
'TimeDateStamp' : [ 0x80, ['unsigned long']],
'EntryPointActivationContext' : [ 0x88, ['pointer64', ['_ACTIVATION_CONTEXT']]],
'Lock' : [ 0x90, ['pointer64', ['void']]],
'DdagNode' : [ 0x98, ['pointer64', ['_LDR_DDAG_NODE']]],
'NodeModuleLink' : [ 0xa0, ['_LIST_ENTRY']],
'LoadContext' : [ 0xb0, ['pointer64', ['_LDRP_LOAD_CONTEXT']]],
'ParentDllBase' : [ 0xb8, ['pointer64', ['void']]],
'SwitchBackContext' : [ 0xc0, ['pointer64', ['void']]],
'BaseAddressIndexNode' : [ 0xc8, ['_RTL_BALANCED_NODE']],
'MappingInfoIndexNode' : [ 0xe0, ['_RTL_BALANCED_NODE']],
'OriginalBase' : [ 0xf8, ['unsigned long long']],
'LoadTime' : [ 0x100, ['_LARGE_INTEGER']],
'BaseNameHashValue' : [ 0x108, ['unsigned long']],
'LoadReason' : [ 0x10c, ['Enumeration', dict(target = 'long', choices = {0: 'LoadReasonStaticDependency', 1: 'LoadReasonStaticForwarderDependency', 2: 'LoadReasonDynamicForwarderDependency', 3: 'LoadReasonDelayloadDependency', 4: 'LoadReasonDynamicLoad', 5: 'LoadReasonAsImageLoad', 6: 'LoadReasonAsDataLoad', -1: 'LoadReasonUnknown'})]],
'ImplicitPathOptions' : [ 0x110, ['unsigned long']],
'ReferenceCount' : [ 0x114, ['unsigned long']],
} ],
'_CACHED_KSTACK_LIST' : [ 0x20, {
'SListHead' : [ 0x0, ['_SLIST_HEADER']],
'MinimumFree' : [ 0x10, ['long']],
'Misses' : [ 0x14, ['unsigned long']],
'MissesLast' : [ 0x18, ['unsigned long']],
'AllStacksInUse' : [ 0x1c, ['unsigned long']],
} ],
'_MMINPAGE_FLAGS' : [ 0x4, {
'InjectRetry' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned char')]],
'GetExtents' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned char')]],
'CrossThreadPadding' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 8, native_type='unsigned char')]],
'PrefetchSystemVmType' : [ 0x1, ['BitField', dict(start_bit = 0, end_bit = 2, native_type='unsigned char')]],
'VaPrefetchReadBlock' : [ 0x1, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned char')]],
'CollidedFlowThrough' : [ 0x1, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned char')]],
'ForceCollisions' : [ 0x1, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned char')]],
'InPageExpanded' : [ 0x1, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned char')]],
'IssuedAtLowPriority' : [ 0x1, ['BitField', dict(start_bit = 6, end_bit = 7, native_type='unsigned char')]],
'FaultFromStore' : [ 0x1, ['BitField', dict(start_bit = 7, end_bit = 8, native_type='unsigned char')]],
'PagePriority' : [ 0x2, ['BitField', dict(start_bit = 0, end_bit = 3, native_type='unsigned char')]],
'PerformRelocations' : [ 0x2, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned char')]],
'ClusteredPagePriority' : [ 0x2, ['BitField', dict(start_bit = 4, end_bit = 7, native_type='unsigned char')]],
'MakeClusterValid' : [ 0x2, ['BitField', dict(start_bit = 7, end_bit = 8, native_type='unsigned char')]],
'ZeroLastPage' : [ 0x3, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned char')]],
'UserFault' : [ 0x3, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned char')]],
'StandbyProtectionNeeded' : [ 0x3, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned char')]],
'PteChanged' : [ 0x3, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned char')]],
'PageFileFault' : [ 0x3, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned char')]],
'PageFilePageHashActive' : [ 0x3, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned char')]],
'CoalescedIo' : [ 0x3, ['BitField', dict(start_bit = 6, end_bit = 7, native_type='unsigned char')]],
'VmLockNotNeeded' : [ 0x3, ['BitField', dict(start_bit = 7, end_bit = 8, native_type='unsigned char')]],
} ],
'_MI_DRIVER_VA' : [ 0x28, {
'Next' : [ 0x0, ['pointer64', ['_MI_DRIVER_VA']]],
'PointerPte' : [ 0x8, ['pointer64', ['_MMPTE']]],
'BitMap' : [ 0x10, ['_RTL_BITMAP']],
'Hint' : [ 0x20, ['unsigned long']],
} ],
'_LDR_DDAG_NODE' : [ 0x50, {
'Modules' : [ 0x0, ['_LIST_ENTRY']],
'ServiceTagList' : [ 0x10, ['pointer64', ['_LDR_SERVICE_TAG_RECORD']]],
'LoadCount' : [ 0x18, ['unsigned long']],
'LoadWhileUnloadingCount' : [ 0x1c, ['unsigned long']],
'LowestLink' : [ 0x20, ['unsigned long']],
'Dependencies' : [ 0x28, ['_LDRP_CSLIST']],
'IncomingDependencies' : [ 0x30, ['_LDRP_CSLIST']],
'State' : [ 0x38, ['Enumeration', dict(target = 'long', choices = {0: 'LdrModulesPlaceHolder', 1: 'LdrModulesMapping', 2: 'LdrModulesMapped', 3: 'LdrModulesWaitingForDependencies', 4: 'LdrModulesSnapping', 5: 'LdrModulesSnapped', 6: 'LdrModulesCondensed', 7: 'LdrModulesReadyToInit', 8: 'LdrModulesInitializing', 9: 'LdrModulesReadyToRun', '\xfb': 'LdrModulesMerged', '\xfd': 'LdrModulesSnapError', '\xfc': 'LdrModulesInitError', -1: 'LdrModulesUnloading', '\xfe': 'LdrModulesUnloaded'})]],
'CondenseLink' : [ 0x40, ['_SINGLE_LIST_ENTRY']],
'PreorderNumber' : [ 0x48, ['unsigned long']],
} ],
'_POP_DEVICE_SYS_STATE' : [ 0x1d0, {
'IrpMinor' : [ 0x0, ['unsigned char']],
'SystemState' : [ 0x4, ['Enumeration', dict(target = 'long', choices = {0: 'PowerSystemUnspecified', 1: 'PowerSystemWorking', 2: 'PowerSystemSleeping1', 3: 'PowerSystemSleeping2', 4: 'PowerSystemSleeping3', 5: 'PowerSystemHibernate', 6: 'PowerSystemShutdown', 7: 'PowerSystemMaximum'})]],
'SpinLock' : [ 0x8, ['unsigned long long']],
'Thread' : [ 0x10, ['pointer64', ['_KTHREAD']]],
'AbortEvent' : [ 0x18, ['pointer64', ['_KEVENT']]],
'ReadySemaphore' : [ 0x20, ['pointer64', ['_KSEMAPHORE']]],
'FinishedSemaphore' : [ 0x28, ['pointer64', ['_KSEMAPHORE']]],
'Order' : [ 0x30, ['_PO_DEVICE_NOTIFY_ORDER']],
'Pending' : [ 0x1a8, ['_LIST_ENTRY']],
'Status' : [ 0x1b8, ['long']],
'FailedDevice' : [ 0x1c0, ['pointer64', ['_DEVICE_OBJECT']]],
'Waking' : [ 0x1c8, ['unsigned char']],
'Cancelled' : [ 0x1c9, ['unsigned char']],
'IgnoreErrors' : [ 0x1ca, ['unsigned char']],
'IgnoreNotImplemented' : [ 0x1cb, ['unsigned char']],
'TimeRefreshLockAcquired' : [ 0x1cc, ['unsigned char']],
} ],
'_KHETERO_PROCESSOR_SET' : [ 0x10, {
'PreferredMask' : [ 0x0, ['unsigned long long']],
'AvailableMask' : [ 0x8, ['unsigned long long']],
} ],
'_VF_KE_CRITICAL_REGION_TRACE' : [ 0x40, {
'Thread' : [ 0x0, ['pointer64', ['_ETHREAD']]],
'StackTrace' : [ 0x8, ['array', 7, ['pointer64', ['void']]]],
} ],
'_LOGGED_STREAM_CALLBACK_V1' : [ 0x10, {
'LogHandle' : [ 0x0, ['pointer64', ['void']]],
'FlushToLsnRoutine' : [ 0x8, ['pointer64', ['void']]],
} ],
'_DIAGNOSTIC_BUFFER' : [ 0x28, {
'Size' : [ 0x0, ['unsigned long long']],
'CallerType' : [ 0x8, ['Enumeration', dict(target = 'long', choices = {0: 'KernelRequester', 1: 'UserProcessRequester', 2: 'UserSharedServiceRequester'})]],
'ProcessImageNameOffset' : [ 0x10, ['unsigned long long']],
'ProcessId' : [ 0x18, ['unsigned long']],
'ServiceTag' : [ 0x1c, ['unsigned long']],
'DeviceDescriptionOffset' : [ 0x10, ['unsigned long long']],
'DevicePathOffset' : [ 0x18, ['unsigned long long']],
'ReasonOffset' : [ 0x20, ['unsigned long long']],
} ],
'_KWAIT_CHAIN_ENTRY' : [ 0x20, {
'ListEntry' : [ 0x0, ['_SINGLE_LIST_ENTRY']],
'Event' : [ 0x8, ['_KEVENT']],
} ],
'__unnamed_27d1' : [ 0x4, {
'LongFlags' : [ 0x0, ['unsigned long']],
'SubsectionFlags' : [ 0x0, ['_MMSUBSECTION_FLAGS']],
} ],
'__unnamed_27d3' : [ 0x4, {
'NumberOfChildViews' : [ 0x0, ['unsigned long']],
} ],
'__unnamed_27d5' : [ 0x4, {
'AlignmentNoAccessPtes' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 31, native_type='unsigned long')]],
'DirtyPages' : [ 0x0, ['BitField', dict(start_bit = 31, end_bit = 32, native_type='unsigned long')]],
} ],
'_SUBSECTION' : [ 0x38, {
'ControlArea' : [ 0x0, ['pointer64', ['_CONTROL_AREA']]],
'SubsectionBase' : [ 0x8, ['pointer64', ['_MMPTE']]],
'NextSubsection' : [ 0x10, ['pointer64', ['_SUBSECTION']]],
'GlobalPerSessionHead' : [ 0x18, ['_RTL_AVL_TREE']],
'CreationWaitList' : [ 0x18, ['pointer64', ['_MI_SUBSECTION_WAIT_BLOCK']]],
'SessionDriverProtos' : [ 0x18, ['pointer64', ['_MI_PER_SESSION_PROTOS']]],
'u' : [ 0x20, ['__unnamed_27d1']],
'StartingSector' : [ 0x24, ['unsigned long']],
'NumberOfFullSectors' : [ 0x28, ['unsigned long']],
'PtesInSubsection' : [ 0x2c, ['unsigned long']],
'u1' : [ 0x30, ['__unnamed_27d3']],
'UnusedPtes' : [ 0x34, ['BitField', dict(start_bit = 0, end_bit = 31, native_type='unsigned long')]],
'DirtyPages' : [ 0x34, ['BitField', dict(start_bit = 31, end_bit = 32, native_type='unsigned long')]],
'u2' : [ 0x34, ['__unnamed_27d5']],
} ],
'_CLIENT_ID32' : [ 0x8, {
'UniqueProcess' : [ 0x0, ['unsigned long']],
'UniqueThread' : [ 0x4, ['unsigned long']],
} ],
'_CM_KEY_INDEX' : [ 0x8, {
'Signature' : [ 0x0, ['unsigned short']],
'Count' : [ 0x2, ['unsigned short']],
'List' : [ 0x4, ['array', 1, ['unsigned long']]],
} ],
'_VI_DEADLOCK_THREAD' : [ 0x38, {
'Thread' : [ 0x0, ['pointer64', ['_KTHREAD']]],
'CurrentSpinNode' : [ 0x8, ['pointer64', ['_VI_DEADLOCK_NODE']]],
'CurrentOtherNode' : [ 0x10, ['pointer64', ['_VI_DEADLOCK_NODE']]],
'ListEntry' : [ 0x18, ['_LIST_ENTRY']],
'FreeListEntry' : [ 0x18, ['_LIST_ENTRY']],
'NodeCount' : [ 0x28, ['unsigned long']],
'PagingCount' : [ 0x2c, ['unsigned long']],
'ThreadUsesEresources' : [ 0x30, ['unsigned char']],
} ],
'_PPM_IDLE_STATE' : [ 0xf8, {
'DomainMembers' : [ 0x0, ['_KAFFINITY_EX']],
'Name' : [ 0xa8, ['_UNICODE_STRING']],
'Latency' : [ 0xb8, ['unsigned long']],
'BreakEvenDuration' : [ 0xbc, ['unsigned long']],
'Power' : [ 0xc0, ['unsigned long']],
'StateFlags' : [ 0xc4, ['unsigned long']],
'VetoAccounting' : [ 0xc8, ['_PPM_VETO_ACCOUNTING']],
'StateType' : [ 0xf0, ['unsigned char']],
'InterruptsEnabled' : [ 0xf1, ['unsigned char']],
'Interruptible' : [ 0xf2, ['unsigned char']],
'ContextRetained' : [ 0xf3, ['unsigned char']],
'CacheCoherent' : [ 0xf4, ['unsigned char']],
'WakesSpuriously' : [ 0xf5, ['unsigned char']],
'PlatformOnly' : [ 0xf6, ['unsigned char']],
'NoCState' : [ 0xf7, ['unsigned char']],
} ],
'_KRESOURCEMANAGER' : [ 0x250, {
'NotificationAvailable' : [ 0x0, ['_KEVENT']],
'cookie' : [ 0x18, ['unsigned long']],
'State' : [ 0x1c, ['Enumeration', dict(target = 'long', choices = {0: 'KResourceManagerUninitialized', 1: 'KResourceManagerOffline', 2: 'KResourceManagerOnline'})]],
'Flags' : [ 0x20, ['unsigned long']],
'Mutex' : [ 0x28, ['_KMUTANT']],
'NamespaceLink' : [ 0x60, ['_KTMOBJECT_NAMESPACE_LINK']],
'RmId' : [ 0x88, ['_GUID']],
'NotificationQueue' : [ 0x98, ['_KQUEUE']],
'NotificationMutex' : [ 0xd8, ['_KMUTANT']],
'EnlistmentHead' : [ 0x110, ['_LIST_ENTRY']],
'EnlistmentCount' : [ 0x120, ['unsigned long']],
'NotificationRoutine' : [ 0x128, ['pointer64', ['void']]],
'Key' : [ 0x130, ['pointer64', ['void']]],
'ProtocolListHead' : [ 0x138, ['_LIST_ENTRY']],
'PendingPropReqListHead' : [ 0x148, ['_LIST_ENTRY']],
'CRMListEntry' : [ 0x158, ['_LIST_ENTRY']],
'Tm' : [ 0x168, ['pointer64', ['_KTM']]],
'Description' : [ 0x170, ['_UNICODE_STRING']],
'Enlistments' : [ 0x180, ['_KTMOBJECT_NAMESPACE']],
'CompletionBinding' : [ 0x228, ['_KRESOURCEMANAGER_COMPLETION_BINDING']],
} ],
'_MI_SYSTEM_PTE_STATE' : [ 0x180, {
'DeadPteTrackerSListHead' : [ 0x0, ['_SLIST_HEADER']],
'PteTrackerLock' : [ 0x10, ['unsigned long long']],
'MdlTrackerLookaside' : [ 0x40, ['_NPAGED_LOOKASIDE_LIST']],
'PteTrackingBitmap' : [ 0xc0, ['_RTL_BITMAP_EX']],
'CachedPteHeads' : [ 0xd0, ['pointer64', ['_MI_CACHED_PTES']]],
'SystemViewPteInfo' : [ 0xd8, ['_MI_SYSTEM_PTE_TYPE']],
'KernelStackPages' : [ 0x138, ['unsigned char']],
'QueuedStacks' : [ 0x140, ['_SLIST_HEADER']],
'StackGrowthFailures' : [ 0x150, ['unsigned long']],
'TrackPtesAborted' : [ 0x154, ['unsigned char']],
'AdjustCounter' : [ 0x155, ['unsigned char']],
'QueuedStacksWorkItem' : [ 0x158, ['_MI_QUEUED_DEADSTACK_WORKITEM']],
} ],
'_HANDLE_TABLE_FREE_LIST' : [ 0x40, {
'FreeListLock' : [ 0x0, ['_EX_PUSH_LOCK']],
'FirstFreeHandleEntry' : [ 0x8, ['pointer64', ['_HANDLE_TABLE_ENTRY']]],
'LastFreeHandleEntry' : [ 0x10, ['pointer64', ['_HANDLE_TABLE_ENTRY']]],
'HandleCount' : [ 0x18, ['long']],
'HighWaterMark' : [ 0x1c, ['unsigned long']],
'Reserved' : [ 0x20, ['array', 8, ['unsigned long']]],
} ],
'_WHEAP_ERROR_RECORD_WRAPPER_FLAGS' : [ 0x4, {
'Preallocated' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'FromPersistentStore' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'PlatformPfaControl' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long')]],
'PlatformDirectedOffline' : [ 0x0, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned long')]],
'Reserved' : [ 0x0, ['BitField', dict(start_bit = 4, end_bit = 32, native_type='unsigned long')]],
'AsULONG' : [ 0x0, ['unsigned long']],
} ],
'_AGGREGATED_PAYLOAD_FILTER' : [ 0x50, {
'MagicValue' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 12, native_type='unsigned short')]],
'DescriptorVersion' : [ 0x0, ['BitField', dict(start_bit = 12, end_bit = 16, native_type='unsigned short')]],
'Size' : [ 0x2, ['unsigned short']],
'PredicateCount' : [ 0x4, ['unsigned short']],
'Reserved' : [ 0x6, ['unsigned short']],
'HashedEventIdBitmap' : [ 0x8, ['unsigned long long']],
'ProviderGuid' : [ 0x10, ['_GUID']],
'EachEventTableOffset' : [ 0x20, ['unsigned short']],
'EachEventTableLength' : [ 0x22, ['unsigned short']],
'PayloadDecoderTableOffset' : [ 0x24, ['unsigned short']],
'PayloadDecoderTableLength' : [ 0x26, ['unsigned short']],
'EventFilterTableOffset' : [ 0x28, ['unsigned short']],
'EventFilterTableLength' : [ 0x2a, ['unsigned short']],
'UNICODEStringTableOffset' : [ 0x2c, ['unsigned short']],
'UNICODEStringTableLength' : [ 0x2e, ['unsigned short']],
'ANSIStringTableOffset' : [ 0x30, ['unsigned short']],
'ANSIStringTableLength' : [ 0x32, ['unsigned short']],
'PredicateTable' : [ 0x38, ['array', 1, ['_EVENT_PAYLOAD_PREDICATE']]],
} ],
'_GDI_TEB_BATCH64' : [ 0x4e8, {
'Offset' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 31, native_type='unsigned long')]],
'HasRenderingCommand' : [ 0x0, ['BitField', dict(start_bit = 31, end_bit = 32, native_type='unsigned long')]],
'HDC' : [ 0x8, ['unsigned long long']],
'Buffer' : [ 0x10, ['array', 310, ['unsigned long']]],
} ],
'_PPM_POLICY_SETTINGS_MASK' : [ 0x8, {
'Value' : [ 0x0, ['unsigned long long']],
'PerfDecreaseTime' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'PerfIncreaseTime' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'PerfDecreasePolicy' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long')]],
'PerfIncreasePolicy' : [ 0x0, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned long')]],
'PerfDecreaseThreshold' : [ 0x0, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned long')]],
'PerfIncreaseThreshold' : [ 0x0, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned long')]],
'PerfMinPolicy' : [ 0x0, ['BitField', dict(start_bit = 6, end_bit = 7, native_type='unsigned long')]],
'PerfMaxPolicy' : [ 0x0, ['BitField', dict(start_bit = 7, end_bit = 8, native_type='unsigned long')]],
'PerfTimeCheck' : [ 0x0, ['BitField', dict(start_bit = 8, end_bit = 9, native_type='unsigned long')]],
'PerfBoostPolicy' : [ 0x0, ['BitField', dict(start_bit = 9, end_bit = 10, native_type='unsigned long')]],
'PerfBoostMode' : [ 0x0, ['BitField', dict(start_bit = 10, end_bit = 11, native_type='unsigned long')]],
'AllowThrottling' : [ 0x0, ['BitField', dict(start_bit = 11, end_bit = 12, native_type='unsigned long')]],
'PerfHistoryCount' : [ 0x0, ['BitField', dict(start_bit = 12, end_bit = 13, native_type='unsigned long')]],
'ParkingPerfState' : [ 0x0, ['BitField', dict(start_bit = 13, end_bit = 14, native_type='unsigned long')]],
'LatencyHintPerf' : [ 0x0, ['BitField', dict(start_bit = 14, end_bit = 15, native_type='unsigned long')]],
'LatencyHintUnpark' : [ 0x0, ['BitField', dict(start_bit = 15, end_bit = 16, native_type='unsigned long')]],
'CoreParkingMinCores' : [ 0x0, ['BitField', dict(start_bit = 16, end_bit = 17, native_type='unsigned long')]],
'CoreParkingMaxCores' : [ 0x0, ['BitField', dict(start_bit = 17, end_bit = 18, native_type='unsigned long')]],
'CoreParkingDecreasePolicy' : [ 0x0, ['BitField', dict(start_bit = 18, end_bit = 19, native_type='unsigned long')]],
'CoreParkingIncreasePolicy' : [ 0x0, ['BitField', dict(start_bit = 19, end_bit = 20, native_type='unsigned long')]],
'CoreParkingDecreaseTime' : [ 0x0, ['BitField', dict(start_bit = 20, end_bit = 21, native_type='unsigned long')]],
'CoreParkingIncreaseTime' : [ 0x0, ['BitField', dict(start_bit = 21, end_bit = 22, native_type='unsigned long')]],
'CoreParkingOverUtilizationThreshold' : [ 0x0, ['BitField', dict(start_bit = 22, end_bit = 23, native_type='unsigned long')]],
'CoreParkingDistributeUtility' : [ 0x0, ['BitField', dict(start_bit = 23, end_bit = 24, native_type='unsigned long')]],
'CoreParkingConcurrencyThreshold' : [ 0x0, ['BitField', dict(start_bit = 24, end_bit = 25, native_type='unsigned long')]],
'CoreParkingHeadroomThreshold' : [ 0x0, ['BitField', dict(start_bit = 25, end_bit = 26, native_type='unsigned long')]],
'CoreParkingDistributionThreshold' : [ 0x0, ['BitField', dict(start_bit = 26, end_bit = 27, native_type='unsigned long')]],
'IdleAllowScaling' : [ 0x0, ['BitField', dict(start_bit = 27, end_bit = 28, native_type='unsigned long')]],
'IdleDisable' : [ 0x0, ['BitField', dict(start_bit = 28, end_bit = 29, native_type='unsigned long')]],
'IdleTimeCheck' : [ 0x0, ['BitField', dict(start_bit = 29, end_bit = 30, native_type='unsigned long')]],
'IdleDemoteThreshold' : [ 0x0, ['BitField', dict(start_bit = 30, end_bit = 31, native_type='unsigned long')]],
'IdlePromoteThreshold' : [ 0x0, ['BitField', dict(start_bit = 31, end_bit = 32, native_type='unsigned long')]],
'HeteroDecreaseTime' : [ 0x4, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'HeteroIncreaseTime' : [ 0x4, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'HeteroDecreaseThreshold' : [ 0x4, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long')]],
'HeteroIncreaseThreshold' : [ 0x4, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned long')]],
'Class0FloorPerformance' : [ 0x4, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned long')]],
'Class1InitialPerformance' : [ 0x4, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned long')]],
'EnergyPerfPreference' : [ 0x4, ['BitField', dict(start_bit = 6, end_bit = 7, native_type='unsigned long')]],
'AutonomousActivityWindow' : [ 0x4, ['BitField', dict(start_bit = 7, end_bit = 8, native_type='unsigned long')]],
'AutonomousMode' : [ 0x4, ['BitField', dict(start_bit = 8, end_bit = 9, native_type='unsigned long')]],
'DutyCycling' : [ 0x4, ['BitField', dict(start_bit = 9, end_bit = 10, native_type='unsigned long')]],
'Spare' : [ 0x4, ['BitField', dict(start_bit = 10, end_bit = 32, native_type='unsigned long')]],
} ],
'__unnamed_2805' : [ 0x4, {
'NodeSize' : [ 0x0, ['unsigned long']],
'UseLookaside' : [ 0x0, ['unsigned long']],
} ],
'_VF_AVL_TREE' : [ 0x20, {
'NodeRangeSize' : [ 0x0, ['unsigned long long']],
'NodeCount' : [ 0x8, ['unsigned long long']],
'Tables' : [ 0x10, ['pointer64', ['_VF_AVL_TABLE']]],
'TablesNo' : [ 0x18, ['unsigned long']],
'u1' : [ 0x1c, ['__unnamed_2805']],
} ],
'_FILE_NETWORK_OPEN_INFORMATION' : [ 0x38, {
'CreationTime' : [ 0x0, ['_LARGE_INTEGER']],
'LastAccessTime' : [ 0x8, ['_LARGE_INTEGER']],
'LastWriteTime' : [ 0x10, ['_LARGE_INTEGER']],
'ChangeTime' : [ 0x18, ['_LARGE_INTEGER']],
'AllocationSize' : [ 0x20, ['_LARGE_INTEGER']],
'EndOfFile' : [ 0x28, ['_LARGE_INTEGER']],
'FileAttributes' : [ 0x30, ['unsigned long']],
} ],
'_POP_FX_ACCOUNTING' : [ 0x30, {
'Lock' : [ 0x0, ['unsigned long long']],
'Active' : [ 0x8, ['unsigned char']],
'DripsRequiredState' : [ 0xc, ['unsigned long']],
'Level' : [ 0x10, ['long']],
'ActiveStamp' : [ 0x18, ['long long']],
'CsActiveTime' : [ 0x20, ['unsigned long long']],
'CriticalActiveTime' : [ 0x28, ['long long']],
} ],
'_MI_RESUME_WORKITEM' : [ 0x38, {
'ResumeCompleteEvent' : [ 0x0, ['_KEVENT']],
'WorkItem' : [ 0x18, ['_WORK_QUEUE_ITEM']],
} ],
'_WHEA_MEMORY_ERROR_SECTION_VALIDBITS' : [ 0x8, {
'ErrorStatus' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long long')]],
'PhysicalAddress' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long long')]],
'PhysicalAddressMask' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long long')]],
'Node' : [ 0x0, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned long long')]],
'Card' : [ 0x0, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned long long')]],
'Module' : [ 0x0, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned long long')]],
'Bank' : [ 0x0, ['BitField', dict(start_bit = 6, end_bit = 7, native_type='unsigned long long')]],
'Device' : [ 0x0, ['BitField', dict(start_bit = 7, end_bit = 8, native_type='unsigned long long')]],
'Row' : [ 0x0, ['BitField', dict(start_bit = 8, end_bit = 9, native_type='unsigned long long')]],
'Column' : [ 0x0, ['BitField', dict(start_bit = 9, end_bit = 10, native_type='unsigned long long')]],
'BitPosition' : [ 0x0, ['BitField', dict(start_bit = 10, end_bit = 11, native_type='unsigned long long')]],
'RequesterId' : [ 0x0, ['BitField', dict(start_bit = 11, end_bit = 12, native_type='unsigned long long')]],
'ResponderId' : [ 0x0, ['BitField', dict(start_bit = 12, end_bit = 13, native_type='unsigned long long')]],
'TargetId' : [ 0x0, ['BitField', dict(start_bit = 13, end_bit = 14, native_type='unsigned long long')]],
'ErrorType' : [ 0x0, ['BitField', dict(start_bit = 14, end_bit = 15, native_type='unsigned long long')]],
'Reserved' : [ 0x0, ['BitField', dict(start_bit = 15, end_bit = 64, native_type='unsigned long long')]],
'ValidBits' : [ 0x0, ['unsigned long long']],
} ],
'_POP_TRIGGER_WAIT' : [ 0x38, {
'Event' : [ 0x0, ['_KEVENT']],
'Status' : [ 0x18, ['long']],
'Link' : [ 0x20, ['_LIST_ENTRY']],
'Trigger' : [ 0x30, ['pointer64', ['_POP_ACTION_TRIGGER']]],
} ],
'_AER_ENDPOINT_DESCRIPTOR_FLAGS' : [ 0x2, {
'UncorrectableErrorMaskRW' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned short')]],
'UncorrectableErrorSeverityRW' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned short')]],
'CorrectableErrorMaskRW' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned short')]],
'AdvancedCapsAndControlRW' : [ 0x0, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned short')]],
'Reserved' : [ 0x0, ['BitField', dict(start_bit = 4, end_bit = 16, native_type='unsigned short')]],
'AsUSHORT' : [ 0x0, ['unsigned short']],
} ],
'_ISR_THUNK' : [ 0x8, {
'PushImm' : [ 0x0, ['unsigned char']],
'Vector' : [ 0x1, ['unsigned char']],
'PushRbp' : [ 0x2, ['unsigned char']],
'JmpOp' : [ 0x3, ['unsigned char']],
'JmpOffset' : [ 0x4, ['long']],
} ],
'_TRIAGE_EX_WORK_QUEUE' : [ 0x2b0, {
'WorkPriQueue' : [ 0x0, ['_KPRIQUEUE']],
} ],
'_HEAP_FREE_ENTRY_EXTRA' : [ 0x4, {
'TagIndex' : [ 0x0, ['unsigned short']],
'FreeBackTraceIndex' : [ 0x2, ['unsigned short']],
} ],
'_PROC_PERF_HISTORY_ENTRY' : [ 0x8, {
'Utility' : [ 0x0, ['unsigned short']],
'AffinitizedUtility' : [ 0x2, ['unsigned short']],
'Frequency' : [ 0x4, ['unsigned char']],
'TaggedPercent' : [ 0x5, ['array', 2, ['unsigned char']]],
} ],
'_POP_FX_COMPONENT' : [ 0x100, {
'Id' : [ 0x0, ['_GUID']],
'Index' : [ 0x10, ['unsigned long']],
'WorkOrder' : [ 0x18, ['_POP_FX_WORK_ORDER']],
'Device' : [ 0x50, ['pointer64', ['_POP_FX_DEVICE']]],
'Flags' : [ 0x58, ['_POP_FX_COMPONENT_FLAGS']],
'Resident' : [ 0x60, ['long']],
'ActiveEvent' : [ 0x68, ['_KEVENT']],
'IdleLock' : [ 0x80, ['unsigned long long']],
'IdleConditionComplete' : [ 0x88, ['long']],
'IdleStateComplete' : [ 0x8c, ['long']],
'IdleStamp' : [ 0x90, ['unsigned long long']],
'CurrentIdleState' : [ 0x98, ['unsigned long']],
'IdleStateCount' : [ 0x9c, ['unsigned long']],
'IdleStates' : [ 0xa0, ['pointer64', ['_POP_FX_IDLE_STATE']]],
'DeepestWakeableIdleState' : [ 0xa8, ['unsigned long']],
'ProviderCount' : [ 0xac, ['unsigned long']],
'Providers' : [ 0xb0, ['pointer64', ['_POP_FX_PROVIDER']]],
'IdleProviderCount' : [ 0xb8, ['unsigned long']],
'DependentCount' : [ 0xbc, ['unsigned long']],
'Dependents' : [ 0xc0, ['pointer64', ['_POP_FX_DEPENDENT']]],
'Accounting' : [ 0xc8, ['_POP_FX_ACCOUNTING']],
'Performance' : [ 0xf8, ['pointer64', ['_POP_FX_PERF_INFO']]],
} ],
'_PEP_CRASHDUMP_INFORMATION' : [ 0x10, {
'DeviceHandle' : [ 0x0, ['pointer64', ['PEPHANDLE__']]],
'DeviceContext' : [ 0x8, ['pointer64', ['void']]],
} ],
'_POP_FX_DRIVER_CALLBACKS' : [ 0x38, {
'ComponentActive' : [ 0x0, ['pointer64', ['void']]],
'ComponentIdle' : [ 0x8, ['pointer64', ['void']]],
'ComponentIdleState' : [ 0x10, ['pointer64', ['void']]],
'DevicePowerRequired' : [ 0x18, ['pointer64', ['void']]],
'DevicePowerNotRequired' : [ 0x20, ['pointer64', ['void']]],
'PowerControl' : [ 0x28, ['pointer64', ['void']]],
'ComponentCriticalTransition' : [ 0x30, ['pointer64', ['void']]],
} ],
'_PROVIDER_BINARY_ENTRY' : [ 0x38, {
'ListEntry' : [ 0x0, ['_LIST_ENTRY']],
'ConsumersNotified' : [ 0x10, ['unsigned char']],
'Spare' : [ 0x11, ['array', 3, ['unsigned char']]],
'DebugIdSize' : [ 0x14, ['unsigned long']],
'DebugId' : [ 0x18, ['_CVDD']],
} ],
'_VI_DEADLOCK_GLOBALS' : [ 0x8180, {
'TimeAcquire' : [ 0x0, ['long long']],
'TimeRelease' : [ 0x8, ['long long']],
'ResourceDatabase' : [ 0x10, ['pointer64', ['_LIST_ENTRY']]],
'ResourceDatabaseCount' : [ 0x18, ['unsigned long long']],
'ResourceAddressRange' : [ 0x20, ['array', 1023, ['_VF_ADDRESS_RANGE']]],
'ThreadDatabase' : [ 0x4010, ['pointer64', ['_LIST_ENTRY']]],
'ThreadDatabaseCount' : [ 0x4018, ['unsigned long long']],
'ThreadAddressRange' : [ 0x4020, ['array', 1023, ['_VF_ADDRESS_RANGE']]],
'AllocationFailures' : [ 0x8010, ['unsigned long']],
'NodesTrimmedBasedOnAge' : [ 0x8014, ['unsigned long']],
'NodesTrimmedBasedOnCount' : [ 0x8018, ['unsigned long']],
'NodesSearched' : [ 0x801c, ['unsigned long']],
'MaxNodesSearched' : [ 0x8020, ['unsigned long']],
'SequenceNumber' : [ 0x8024, ['unsigned long']],
'RecursionDepthLimit' : [ 0x8028, ['unsigned long']],
'SearchedNodesLimit' : [ 0x802c, ['unsigned long']],
'DepthLimitHits' : [ 0x8030, ['unsigned long']],
'SearchLimitHits' : [ 0x8034, ['unsigned long']],
'StackLimitHits' : [ 0x8038, ['unsigned long']],
'ABC_ACB_Skipped' : [ 0x803c, ['unsigned long']],
'OutOfOrderReleases' : [ 0x8040, ['unsigned long']],
'NodesReleasedOutOfOrder' : [ 0x8044, ['unsigned long']],
'TotalReleases' : [ 0x8048, ['unsigned long']],
'RootNodesDeleted' : [ 0x804c, ['unsigned long']],
'ForgetHistoryCounter' : [ 0x8050, ['unsigned long']],
'Instigator' : [ 0x8058, ['pointer64', ['void']]],
'NumberOfParticipants' : [ 0x8060, ['unsigned long']],
'Participant' : [ 0x8068, ['array', 32, ['pointer64', ['_VI_DEADLOCK_NODE']]]],
'ChildrenCountWatermark' : [ 0x8168, ['long']],
'StackType' : [ 0x816c, ['Enumeration', dict(target = 'long', choices = {0: 'BugcheckStackLimits', 1: 'DPCStackLimits', 2: 'ExpandedStackLimits', 3: 'NormalStackLimits', 4: 'Win32kStackLimits', 5: 'SwapBusyStackLimits', 6: 'IsrStackLimits', 7: 'MaximumStackLimits'})]],
'StackLowLimit' : [ 0x8170, ['unsigned long long']],
'StackHighLimit' : [ 0x8178, ['unsigned long long']],
} ],
'_KTM' : [ 0x3c0, {
'cookie' : [ 0x0, ['unsigned long']],
'Mutex' : [ 0x8, ['_KMUTANT']],
'State' : [ 0x40, ['Enumeration', dict(target = 'long', choices = {0: 'KKtmUninitialized', 1: 'KKtmInitialized', 2: 'KKtmRecovering', 3: 'KKtmOnline', 4: 'KKtmRecoveryFailed', 5: 'KKtmOffline'})]],
'NamespaceLink' : [ 0x48, ['_KTMOBJECT_NAMESPACE_LINK']],
'TmIdentity' : [ 0x70, ['_GUID']],
'Flags' : [ 0x80, ['unsigned long']],
'VolatileFlags' : [ 0x84, ['unsigned long']],
'LogFileName' : [ 0x88, ['_UNICODE_STRING']],
'LogFileObject' : [ 0x98, ['pointer64', ['_FILE_OBJECT']]],
'MarshallingContext' : [ 0xa0, ['pointer64', ['void']]],
'LogManagementContext' : [ 0xa8, ['pointer64', ['void']]],
'Transactions' : [ 0xb0, ['_KTMOBJECT_NAMESPACE']],
'ResourceManagers' : [ 0x158, ['_KTMOBJECT_NAMESPACE']],
'LsnOrderedMutex' : [ 0x200, ['_KMUTANT']],
'LsnOrderedList' : [ 0x238, ['_LIST_ENTRY']],
'CommitVirtualClock' : [ 0x248, ['_LARGE_INTEGER']],
'CommitVirtualClockMutex' : [ 0x250, ['_FAST_MUTEX']],
'BaseLsn' : [ 0x288, ['_CLS_LSN']],
'CurrentReadLsn' : [ 0x290, ['_CLS_LSN']],
'LastRecoveredLsn' : [ 0x298, ['_CLS_LSN']],
'TmRmHandle' : [ 0x2a0, ['pointer64', ['void']]],
'TmRm' : [ 0x2a8, ['pointer64', ['_KRESOURCEMANAGER']]],
'LogFullNotifyEvent' : [ 0x2b0, ['_KEVENT']],
'CheckpointWorkItem' : [ 0x2c8, ['_WORK_QUEUE_ITEM']],
'CheckpointTargetLsn' : [ 0x2e8, ['_CLS_LSN']],
'LogFullCompletedWorkItem' : [ 0x2f0, ['_WORK_QUEUE_ITEM']],
'LogWriteResource' : [ 0x310, ['_ERESOURCE']],
'LogFlags' : [ 0x378, ['unsigned long']],
'LogFullStatus' : [ 0x37c, ['long']],
'RecoveryStatus' : [ 0x380, ['long']],
'LastCheckBaseLsn' : [ 0x388, ['_CLS_LSN']],
'RestartOrderedList' : [ 0x390, ['_LIST_ENTRY']],
'OfflineWorkItem' : [ 0x3a0, ['_WORK_QUEUE_ITEM']],
} ],
'_MM_SYSTEM_PAGE_COUNTS' : [ 0x10, {
'SystemCodePage' : [ 0x0, ['unsigned long']],
'SystemDriverPage' : [ 0x4, ['unsigned long']],
'TotalSystemCodePages' : [ 0x8, ['long']],
'TotalSystemDriverPages' : [ 0xc, ['long']],
} ],
'_MI_MODWRITE_DATA' : [ 0x40, {
'PagesLoad' : [ 0x0, ['long long']],
'PagesAverage' : [ 0x8, ['unsigned long long']],
'AverageAvailablePages' : [ 0x10, ['unsigned long long']],
'PagesWritten' : [ 0x18, ['unsigned long long']],
'WritesIssued' : [ 0x20, ['unsigned long']],
'IgnoredReservationsCount' : [ 0x24, ['unsigned long']],
'FreedReservationsCount' : [ 0x28, ['unsigned long']],
'WriteBurstCount' : [ 0x2c, ['unsigned long']],
'IgnoreReservationsStartTime' : [ 0x30, ['unsigned long long']],
'ReservationClusterInfo' : [ 0x38, ['_MI_RESERVATION_CLUSTER_INFO']],
'IgnoreReservations' : [ 0x3c, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned short')]],
'Spare' : [ 0x3c, ['BitField', dict(start_bit = 1, end_bit = 16, native_type='unsigned short')]],
'Spare1' : [ 0x3e, ['unsigned short']],
} ],
'_PLATFORM_IDLE_STATE_ACCOUNTING' : [ 0x3e8, {
'CancelCount' : [ 0x0, ['unsigned long']],
'FailureCount' : [ 0x4, ['unsigned long']],
'SuccessCount' : [ 0x8, ['unsigned long']],
'MaxTime' : [ 0x10, ['unsigned long long']],
'MinTime' : [ 0x18, ['unsigned long long']],
'TotalTime' : [ 0x20, ['unsigned long long']],
'InvalidBucketIndex' : [ 0x28, ['unsigned long']],
'SelectionStatistics' : [ 0x30, ['_PPM_SELECTION_STATISTICS']],
'IdleTimeBuckets' : [ 0xa8, ['array', 26, ['_PROC_IDLE_STATE_BUCKET']]],
} ],
'_KTRANSACTION' : [ 0x2d8, {
'OutcomeEvent' : [ 0x0, ['_KEVENT']],
'cookie' : [ 0x18, ['unsigned long']],
'Mutex' : [ 0x20, ['_KMUTANT']],
'TreeTx' : [ 0x58, ['pointer64', ['_KTRANSACTION']]],
'GlobalNamespaceLink' : [ 0x60, ['_KTMOBJECT_NAMESPACE_LINK']],
'TmNamespaceLink' : [ 0x88, ['_KTMOBJECT_NAMESPACE_LINK']],
'UOW' : [ 0xb0, ['_GUID']],
'State' : [ 0xc0, ['Enumeration', dict(target = 'long', choices = {0: 'KTransactionUninitialized', 1: 'KTransactionActive', 2: 'KTransactionPreparing', 3: 'KTransactionPrepared', 4: 'KTransactionInDoubt', 5: 'KTransactionCommitted', 6: 'KTransactionAborted', 7: 'KTransactionDelegated', 8: 'KTransactionPrePreparing', 9: 'KTransactionForgotten', 10: 'KTransactionRecovering', 11: 'KTransactionPrePrepared'})]],
'Flags' : [ 0xc4, ['unsigned long']],
'EnlistmentHead' : [ 0xc8, ['_LIST_ENTRY']],
'EnlistmentCount' : [ 0xd8, ['unsigned long']],
'RecoverableEnlistmentCount' : [ 0xdc, ['unsigned long']],
'PrePrepareRequiredEnlistmentCount' : [ 0xe0, ['unsigned long']],
'PrepareRequiredEnlistmentCount' : [ 0xe4, ['unsigned long']],
'OutcomeRequiredEnlistmentCount' : [ 0xe8, ['unsigned long']],
'PendingResponses' : [ 0xec, ['unsigned long']],
'SuperiorEnlistment' : [ 0xf0, ['pointer64', ['_KENLISTMENT']]],
'LastLsn' : [ 0xf8, ['_CLS_LSN']],
'PromotedEntry' : [ 0x100, ['_LIST_ENTRY']],
'PromoterTransaction' : [ 0x110, ['pointer64', ['_KTRANSACTION']]],
'PromotePropagation' : [ 0x118, ['pointer64', ['void']]],
'IsolationLevel' : [ 0x120, ['unsigned long']],
'IsolationFlags' : [ 0x124, ['unsigned long']],
'Timeout' : [ 0x128, ['_LARGE_INTEGER']],
'Description' : [ 0x130, ['_UNICODE_STRING']],
'RollbackThread' : [ 0x140, ['pointer64', ['_KTHREAD']]],
'RollbackWorkItem' : [ 0x148, ['_WORK_QUEUE_ITEM']],
'RollbackDpc' : [ 0x168, ['_KDPC']],
'RollbackTimer' : [ 0x1a8, ['_KTIMER']],
'LsnOrderedEntry' : [ 0x1e8, ['_LIST_ENTRY']],
'Outcome' : [ 0x1f8, ['Enumeration', dict(target = 'long', choices = {0: 'KTxOutcomeUninitialized', 1: 'KTxOutcomeUndetermined', 2: 'KTxOutcomeCommitted', 3: 'KTxOutcomeAborted', 4: 'KTxOutcomeUnavailable'})]],
'Tm' : [ 0x200, ['pointer64', ['_KTM']]],
'CommitReservation' : [ 0x208, ['long long']],
'TransactionHistory' : [ 0x210, ['array', 10, ['_KTRANSACTION_HISTORY']]],
'TransactionHistoryCount' : [ 0x260, ['unsigned long']],
'DTCPrivateInformation' : [ 0x268, ['pointer64', ['void']]],
'DTCPrivateInformationLength' : [ 0x270, ['unsigned long']],
'DTCPrivateInformationMutex' : [ 0x278, ['_KMUTANT']],
'PromotedTxSelfHandle' : [ 0x2b0, ['pointer64', ['void']]],
'PendingPromotionCount' : [ 0x2b8, ['unsigned long']],
'PromotionCompletedEvent' : [ 0x2c0, ['_KEVENT']],
} ],
'_PRIVATE_CACHE_MAP_FLAGS' : [ 0x4, {
'DontUse' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 16, native_type='unsigned long')]],
'ReadAheadActive' : [ 0x0, ['BitField', dict(start_bit = 16, end_bit = 17, native_type='unsigned long')]],
'ReadAheadEnabled' : [ 0x0, ['BitField', dict(start_bit = 17, end_bit = 18, native_type='unsigned long')]],
'PagePriority' : [ 0x0, ['BitField', dict(start_bit = 18, end_bit = 21, native_type='unsigned long')]],
'PipelineReadAheads' : [ 0x0, ['BitField', dict(start_bit = 21, end_bit = 22, native_type='unsigned long')]],
'Available' : [ 0x0, ['BitField', dict(start_bit = 22, end_bit = 32, native_type='unsigned long')]],
} ],
'_CM_KCB_UOW' : [ 0x60, {
'TransactionListEntry' : [ 0x0, ['_LIST_ENTRY']],
'KCBLock' : [ 0x10, ['pointer64', ['_CM_INTENT_LOCK']]],
'KeyLock' : [ 0x18, ['pointer64', ['_CM_INTENT_LOCK']]],
'KCBListEntry' : [ 0x20, ['_LIST_ENTRY']],
'KeyControlBlock' : [ 0x30, ['pointer64', ['_CM_KEY_CONTROL_BLOCK']]],
'Transaction' : [ 0x38, ['pointer64', ['_CM_TRANS']]],
'UoWState' : [ 0x40, ['unsigned long']],
'ActionType' : [ 0x44, ['Enumeration', dict(target = 'long', choices = {0: 'UoWAddThisKey', 1: 'UoWAddChildKey', 2: 'UoWDeleteThisKey', 3: 'UoWDeleteChildKey', 4: 'UoWSetValueNew', 5: 'UoWSetValueExisting', 6: 'UoWDeleteValue', 7: 'UoWSetKeyUserFlags', 8: 'UoWSetLastWriteTime', 9: 'UoWSetSecurityDescriptor', 10: 'UoWRenameSubKey', 11: 'UoWRenameOldSubKey', 12: 'UoWRenameNewSubKey', 13: 'UoWIsolation', 14: 'UoWInvalid'})]],
'StorageType' : [ 0x48, ['Enumeration', dict(target = 'long', choices = {0: 'Stable', 1: 'Volatile', 2: 'InvalidStorage'})]],
'ChildKCB' : [ 0x50, ['pointer64', ['_CM_KEY_CONTROL_BLOCK']]],
'VolatileKeyCell' : [ 0x50, ['unsigned long']],
'OldValueCell' : [ 0x50, ['unsigned long']],
'NewValueCell' : [ 0x54, ['unsigned long']],
'UserFlags' : [ 0x50, ['unsigned long']],
'LastWriteTime' : [ 0x50, ['_LARGE_INTEGER']],
'TxSecurityCell' : [ 0x50, ['unsigned long']],
'OldChildKCB' : [ 0x50, ['pointer64', ['_CM_KEY_CONTROL_BLOCK']]],
'NewChildKCB' : [ 0x58, ['pointer64', ['_CM_KEY_CONTROL_BLOCK']]],
'OtherChildKCB' : [ 0x50, ['pointer64', ['_CM_KEY_CONTROL_BLOCK']]],
'ThisVolatileKeyCell' : [ 0x58, ['unsigned long']],
} ],
'_MMPTE_TRANSITION' : [ 0x8, {
'Valid' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long long')]],
'Write' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long long')]],
'Spare' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long long')]],
'WriteThrough' : [ 0x0, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned long long')]],
'CacheDisable' : [ 0x0, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned long long')]],
'Protection' : [ 0x0, ['BitField', dict(start_bit = 5, end_bit = 10, native_type='unsigned long long')]],
'Prototype' : [ 0x0, ['BitField', dict(start_bit = 10, end_bit = 11, native_type='unsigned long long')]],
'Transition' : [ 0x0, ['BitField', dict(start_bit = 11, end_bit = 12, native_type='unsigned long long')]],
'PageFrameNumber' : [ 0x0, ['BitField', dict(start_bit = 12, end_bit = 48, native_type='unsigned long long')]],
'Unused' : [ 0x0, ['BitField', dict(start_bit = 48, end_bit = 64, native_type='unsigned long long')]],
} ],
'_PROCESSOR_IDLE_CONSTRAINTS' : [ 0x30, {
'TotalTime' : [ 0x0, ['unsigned long long']],
'IdleTime' : [ 0x8, ['unsigned long long']],
'ExpectedIdleDuration' : [ 0x10, ['unsigned long long']],
'MaxIdleDuration' : [ 0x18, ['unsigned long long']],
'OverrideState' : [ 0x20, ['unsigned long']],
'TimeCheck' : [ 0x24, ['unsigned long']],
'PromotePercent' : [ 0x28, ['unsigned char']],
'DemotePercent' : [ 0x29, ['unsigned char']],
'Parked' : [ 0x2a, ['unsigned char']],
'Interruptible' : [ 0x2b, ['unsigned char']],
'PlatformIdle' : [ 0x2c, ['unsigned char']],
'ExpectedWakeReason' : [ 0x2d, ['unsigned char']],
} ],
'_KREQUEST_PACKET' : [ 0x20, {
'CurrentPacket' : [ 0x0, ['array', 3, ['pointer64', ['void']]]],
'WorkerRoutine' : [ 0x18, ['pointer64', ['void']]],
} ],
'_VF_WATCHDOG_IRP' : [ 0x20, {
'ListEntry' : [ 0x0, ['_LIST_ENTRY']],
'Irp' : [ 0x10, ['pointer64', ['_IRP']]],
'DueTickCount' : [ 0x18, ['unsigned long']],
'Inserted' : [ 0x1c, ['unsigned char']],
'TrackedStackLocation' : [ 0x1d, ['unsigned char']],
'CancelTimeoutTicks' : [ 0x1e, ['unsigned short']],
} ],
'_MMVAD_FLAGS2' : [ 0x4, {
'FileOffset' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 24, native_type='unsigned long')]],
'Large' : [ 0x0, ['BitField', dict(start_bit = 24, end_bit = 25, native_type='unsigned long')]],
'TrimBehind' : [ 0x0, ['BitField', dict(start_bit = 25, end_bit = 26, native_type='unsigned long')]],
'Inherit' : [ 0x0, ['BitField', dict(start_bit = 26, end_bit = 27, native_type='unsigned long')]],
'CopyOnWrite' : [ 0x0, ['BitField', dict(start_bit = 27, end_bit = 28, native_type='unsigned long')]],
'NoValidationNeeded' : [ 0x0, ['BitField', dict(start_bit = 28, end_bit = 29, native_type='unsigned long')]],
'PrivateDemandZero' : [ 0x0, ['BitField', dict(start_bit = 29, end_bit = 30, native_type='unsigned long')]],
'Spare' : [ 0x0, ['BitField', dict(start_bit = 30, end_bit = 32, native_type='unsigned long')]],
} ],
'_flags' : [ 0x1, {
'Removable' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned char')]],
'GroupAssigned' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned char')]],
'GroupCommitted' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned char')]],
'GroupAssignmentFixed' : [ 0x0, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned char')]],
'Fill' : [ 0x0, ['BitField', dict(start_bit = 4, end_bit = 8, native_type='unsigned char')]],
} ],
'__unnamed_2880' : [ 0x8, {
'Head' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 24, native_type='unsigned long long')]],
'Tail' : [ 0x0, ['BitField', dict(start_bit = 24, end_bit = 48, native_type='unsigned long long')]],
'ActiveThreadCount' : [ 0x0, ['BitField', dict(start_bit = 48, end_bit = 64, native_type='unsigned long long')]],
} ],
'__unnamed_2882' : [ 0x8, {
's1' : [ 0x0, ['__unnamed_2880']],
'Value' : [ 0x0, ['long long']],
} ],
'_ALPC_COMPLETION_LIST_STATE' : [ 0x8, {
'u1' : [ 0x0, ['__unnamed_2882']],
} ],
'_CM_KEY_SECURITY_CACHE' : [ 0x38, {
'Cell' : [ 0x0, ['unsigned long']],
'ConvKey' : [ 0x4, ['unsigned long']],
'List' : [ 0x8, ['_LIST_ENTRY']],
'DescriptorLength' : [ 0x18, ['unsigned long']],
'RealRefCount' : [ 0x1c, ['unsigned long']],
'Descriptor' : [ 0x20, ['_SECURITY_DESCRIPTOR_RELATIVE']],
} ],
'_CM_NAME_HASH' : [ 0x18, {
'ConvKey' : [ 0x0, ['unsigned long']],
'NextHash' : [ 0x8, ['pointer64', ['_CM_NAME_HASH']]],
'NameLength' : [ 0x10, ['unsigned short']],
'Name' : [ 0x12, ['array', 1, ['wchar']]],
} ],
'_PROC_IDLE_STATE_BUCKET' : [ 0x20, {
'TotalTime' : [ 0x0, ['unsigned long long']],
'MinTime' : [ 0x8, ['unsigned long long']],
'MaxTime' : [ 0x10, ['unsigned long long']],
'Count' : [ 0x18, ['unsigned long']],
} ],
'_PO_IRP_QUEUE' : [ 0x10, {
'CurrentIrp' : [ 0x0, ['pointer64', ['_IRP']]],
'PendingIrpList' : [ 0x8, ['pointer64', ['_IRP']]],
} ],
'_MMSECURE_FLAGS' : [ 0x4, {
'ReadOnly' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'ReadWrite' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'SecNoChange' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long')]],
'NoDelete' : [ 0x0, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned long')]],
'RequiresPteReversal' : [ 0x0, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned long')]],
'ExclusiveSecure' : [ 0x0, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned long')]],
'Spare' : [ 0x0, ['BitField', dict(start_bit = 6, end_bit = 12, native_type='unsigned long')]],
} ],
'__unnamed_2897' : [ 0x4, {
'Active' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'OnlyTryAcquireUsed' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'ReleasedOutOfOrder' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long')]],
'SequenceNumber' : [ 0x0, ['BitField', dict(start_bit = 3, end_bit = 32, native_type='unsigned long')]],
'Whole' : [ 0x0, ['unsigned long']],
} ],
'_VI_DEADLOCK_NODE' : [ 0xd0, {
'Parent' : [ 0x0, ['pointer64', ['_VI_DEADLOCK_NODE']]],
'ChildrenList' : [ 0x8, ['_LIST_ENTRY']],
'SiblingsList' : [ 0x18, ['_LIST_ENTRY']],
'ResourceList' : [ 0x28, ['_LIST_ENTRY']],
'FreeListEntry' : [ 0x28, ['_LIST_ENTRY']],
'Root' : [ 0x38, ['pointer64', ['_VI_DEADLOCK_RESOURCE']]],
'ThreadEntry' : [ 0x40, ['pointer64', ['_VI_DEADLOCK_THREAD']]],
'u1' : [ 0x48, ['__unnamed_2897']],
'ChildrenCount' : [ 0x4c, ['long']],
'StackTrace' : [ 0x50, ['array', 8, ['pointer64', ['void']]]],
'ParentStackTrace' : [ 0x90, ['array', 8, ['pointer64', ['void']]]],
} ],
'PROCESSOR_IDLESTATE_INFO' : [ 0x8, {
'TimeCheck' : [ 0x0, ['unsigned long']],
'DemotePercent' : [ 0x4, ['unsigned char']],
'PromotePercent' : [ 0x5, ['unsigned char']],
'Spare' : [ 0x6, ['array', 2, ['unsigned char']]],
} ],
'_KTMOBJECT_NAMESPACE' : [ 0xa8, {
'Table' : [ 0x0, ['_RTL_AVL_TABLE']],
'Mutex' : [ 0x68, ['_KMUTANT']],
'LinksOffset' : [ 0xa0, ['unsigned short']],
'GuidOffset' : [ 0xa2, ['unsigned short']],
'Expired' : [ 0xa4, ['unsigned char']],
} ],
'_LPCP_PORT_QUEUE' : [ 0x20, {
'NonPagedPortQueue' : [ 0x0, ['pointer64', ['_LPCP_NONPAGED_PORT_QUEUE']]],
'Semaphore' : [ 0x8, ['pointer64', ['_KSEMAPHORE']]],
'ReceiveHead' : [ 0x10, ['_LIST_ENTRY']],
} ],
'_CM_KEY_REFERENCE' : [ 0x10, {
'KeyCell' : [ 0x0, ['unsigned long']],
'KeyHive' : [ 0x8, ['pointer64', ['_HHIVE']]],
} ],
'SYSTEM_POWER_LEVEL' : [ 0x18, {
'Enable' : [ 0x0, ['unsigned char']],
'Spare' : [ 0x1, ['array', 3, ['unsigned char']]],
'BatteryLevel' : [ 0x4, ['unsigned long']],
'PowerPolicy' : [ 0x8, ['POWER_ACTION_POLICY']],
'MinSystemState' : [ 0x14, ['Enumeration', dict(target = 'long', choices = {0: 'PowerSystemUnspecified', 1: 'PowerSystemWorking', 2: 'PowerSystemSleeping1', 3: 'PowerSystemSleeping2', 4: 'PowerSystemSleeping3', 5: 'PowerSystemHibernate', 6: 'PowerSystemShutdown', 7: 'PowerSystemMaximum'})]],
} ],
'_HVIEW_MAP_ENTRY' : [ 0x20, {
'ViewStart' : [ 0x0, ['pointer64', ['void']]],
'IsPinned' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long long')]],
'Bcb' : [ 0x8, ['pointer64', ['void']]],
'PinnedPages' : [ 0x10, ['unsigned long long']],
'Size' : [ 0x18, ['unsigned long']],
} ],
'_OBJECT_DUMP_CONTROL' : [ 0x10, {
'Stream' : [ 0x0, ['pointer64', ['void']]],
'Detail' : [ 0x8, ['unsigned long']],
} ],
'_POP_COOLING_EXTENSION' : [ 0x90, {
'Link' : [ 0x0, ['_LIST_ENTRY']],
'RequestListHead' : [ 0x10, ['_LIST_ENTRY']],
'Lock' : [ 0x20, ['_POP_RW_LOCK']],
'DeviceObject' : [ 0x30, ['pointer64', ['_DEVICE_OBJECT']]],
'NotificationEntry' : [ 0x38, ['pointer64', ['void']]],
'Enabled' : [ 0x40, ['unsigned char']],
'ActiveEngaged' : [ 0x41, ['unsigned char']],
'ThrottleLimit' : [ 0x42, ['unsigned char']],
'UpdatingToCurrent' : [ 0x43, ['unsigned char']],
'RemovalFlushEvent' : [ 0x48, ['pointer64', ['_KEVENT']]],
'PnpFlushEvent' : [ 0x50, ['pointer64', ['_KEVENT']]],
'Interface' : [ 0x58, ['_THERMAL_COOLING_INTERFACE']],
} ],
'_EVENT_PAYLOAD_PREDICATE' : [ 0x18, {
'FieldIndex' : [ 0x0, ['unsigned short']],
'CompareOp' : [ 0x2, ['unsigned short']],
'Value' : [ 0x8, ['array', 2, ['unsigned long long']]],
} ],
'_EVENT_HEADER_EXTENDED_DATA_ITEM' : [ 0x10, {
'Reserved1' : [ 0x0, ['unsigned short']],
'ExtType' : [ 0x2, ['unsigned short']],
'Linkage' : [ 0x4, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned short')]],
'Reserved2' : [ 0x4, ['BitField', dict(start_bit = 1, end_bit = 16, native_type='unsigned short')]],
'DataSize' : [ 0x6, ['unsigned short']],
'DataPtr' : [ 0x8, ['unsigned long long']],
} ],
'_CM_INDEX' : [ 0x8, {
'Cell' : [ 0x0, ['unsigned long']],
'NameHint' : [ 0x4, ['array', 4, ['unsigned char']]],
'HashKey' : [ 0x4, ['unsigned long']],
} ],
'_VF_ADDRESS_RANGE' : [ 0x10, {
'Start' : [ 0x0, ['pointer64', ['unsigned char']]],
'End' : [ 0x8, ['pointer64', ['unsigned char']]],
} ],
'_OBJECT_SYMBOLIC_LINK' : [ 0x20, {
'CreationTime' : [ 0x0, ['_LARGE_INTEGER']],
'LinkTarget' : [ 0x8, ['_UNICODE_STRING']],
'DosDeviceDriveIndex' : [ 0x18, ['unsigned long']],
'Flags' : [ 0x1c, ['unsigned long']],
} ],
'_LPCP_NONPAGED_PORT_QUEUE' : [ 0x28, {
'Semaphore' : [ 0x0, ['_KSEMAPHORE']],
'BackPointer' : [ 0x20, ['pointer64', ['_LPCP_PORT_OBJECT']]],
} ],
'_THERMAL_POLICY' : [ 0x18, {
'Version' : [ 0x0, ['unsigned long']],
'WaitForUpdate' : [ 0x4, ['unsigned char']],
'Hibernate' : [ 0x5, ['unsigned char']],
'Critical' : [ 0x6, ['unsigned char']],
'ThermalStandby' : [ 0x7, ['unsigned char']],
'ActivationReasons' : [ 0x8, ['unsigned long']],
'PassiveLimit' : [ 0xc, ['unsigned long']],
'ActiveLevel' : [ 0x10, ['unsigned long']],
'OverThrottled' : [ 0x14, ['unsigned char']],
} ],
'_KRESOURCEMANAGER_COMPLETION_BINDING' : [ 0x28, {
'NotificationListHead' : [ 0x0, ['_LIST_ENTRY']],
'Port' : [ 0x10, ['pointer64', ['void']]],
'Key' : [ 0x18, ['unsigned long long']],
'BindingProcess' : [ 0x20, ['pointer64', ['_EPROCESS']]],
} ],
'_VF_TRACKER' : [ 0x10, {
'TrackerFlags' : [ 0x0, ['unsigned long']],
'TrackerSize' : [ 0x4, ['unsigned long']],
'TrackerIndex' : [ 0x8, ['unsigned long']],
'TraceDepth' : [ 0xc, ['unsigned long']],
} ],
'_CALL_PERFORMANCE_DATA' : [ 0x408, {
'SpinLock' : [ 0x0, ['unsigned long long']],
'HashTable' : [ 0x8, ['array', 64, ['_LIST_ENTRY']]],
} ],
'_RTL_UMS_CONTEXT' : [ 0x520, {
'Link' : [ 0x0, ['_SINGLE_LIST_ENTRY']],
'Context' : [ 0x10, ['_CONTEXT']],
'Teb' : [ 0x4e0, ['pointer64', ['void']]],
'UserContext' : [ 0x4e8, ['pointer64', ['void']]],
'ScheduledThread' : [ 0x4f0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'Suspended' : [ 0x4f0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'VolatileContext' : [ 0x4f0, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long')]],
'Terminated' : [ 0x4f0, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned long')]],
'DebugActive' : [ 0x4f0, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned long')]],
'RunningOnSelfThread' : [ 0x4f0, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned long')]],
'DenyRunningOnSelfThread' : [ 0x4f0, ['BitField', dict(start_bit = 6, end_bit = 7, native_type='unsigned long')]],
'Flags' : [ 0x4f0, ['long']],
'KernelUpdateLock' : [ 0x4f8, ['BitField', dict(start_bit = 0, end_bit = 2, native_type='unsigned long long')]],
'PrimaryClientID' : [ 0x4f8, ['BitField', dict(start_bit = 2, end_bit = 64, native_type='unsigned long long')]],
'ContextLock' : [ 0x4f8, ['unsigned long long']],
'PrimaryUmsContext' : [ 0x500, ['pointer64', ['_RTL_UMS_CONTEXT']]],
'SwitchCount' : [ 0x508, ['unsigned long']],
'KernelYieldCount' : [ 0x50c, ['unsigned long']],
'MixedYieldCount' : [ 0x510, ['unsigned long']],
'YieldCount' : [ 0x514, ['unsigned long']],
} ],
'_ARBITER_ALTERNATIVE' : [ 0x40, {
'Minimum' : [ 0x0, ['unsigned long long']],
'Maximum' : [ 0x8, ['unsigned long long']],
'Length' : [ 0x10, ['unsigned long long']],
'Alignment' : [ 0x18, ['unsigned long long']],
'Priority' : [ 0x20, ['long']],
'Flags' : [ 0x24, ['unsigned long']],
'Descriptor' : [ 0x28, ['pointer64', ['_IO_RESOURCE_DESCRIPTOR']]],
'Reserved' : [ 0x30, ['array', 3, ['unsigned long']]],
} ],
'_MI_AVAILABLE_PAGE_WAIT_STATES' : [ 0x20, {
'Event' : [ 0x0, ['_KEVENT']],
'EventSets' : [ 0x18, ['unsigned long']],
} ],
'_WHEA_ERROR_STATUS' : [ 0x8, {
'ErrorStatus' : [ 0x0, ['unsigned long long']],
'Reserved1' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 8, native_type='unsigned long long')]],
'ErrorType' : [ 0x0, ['BitField', dict(start_bit = 8, end_bit = 16, native_type='unsigned long long')]],
'Address' : [ 0x0, ['BitField', dict(start_bit = 16, end_bit = 17, native_type='unsigned long long')]],
'Control' : [ 0x0, ['BitField', dict(start_bit = 17, end_bit = 18, native_type='unsigned long long')]],
'Data' : [ 0x0, ['BitField', dict(start_bit = 18, end_bit = 19, native_type='unsigned long long')]],
'Responder' : [ 0x0, ['BitField', dict(start_bit = 19, end_bit = 20, native_type='unsigned long long')]],
'Requester' : [ 0x0, ['BitField', dict(start_bit = 20, end_bit = 21, native_type='unsigned long long')]],
'FirstError' : [ 0x0, ['BitField', dict(start_bit = 21, end_bit = 22, native_type='unsigned long long')]],
'Overflow' : [ 0x0, ['BitField', dict(start_bit = 22, end_bit = 23, native_type='unsigned long long')]],
'Reserved2' : [ 0x0, ['BitField', dict(start_bit = 23, end_bit = 64, native_type='unsigned long long')]],
} ],
'_WHEA_PERSISTENCE_INFO' : [ 0x8, {
'Signature' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 16, native_type='unsigned long long')]],
'Length' : [ 0x0, ['BitField', dict(start_bit = 16, end_bit = 40, native_type='unsigned long long')]],
'Identifier' : [ 0x0, ['BitField', dict(start_bit = 40, end_bit = 56, native_type='unsigned long long')]],
'Attributes' : [ 0x0, ['BitField', dict(start_bit = 56, end_bit = 58, native_type='unsigned long long')]],
'DoNotLog' : [ 0x0, ['BitField', dict(start_bit = 58, end_bit = 59, native_type='unsigned long long')]],
'Reserved' : [ 0x0, ['BitField', dict(start_bit = 59, end_bit = 64, native_type='unsigned long long')]],
'AsULONGLONG' : [ 0x0, ['unsigned long long']],
} ],
'_COLORED_PAGE_INFO' : [ 0x18, {
'BeingZeroed' : [ 0x0, ['long']],
'Processor' : [ 0x4, ['unsigned long']],
'PagesQueued' : [ 0x8, ['unsigned long long']],
'PfnAllocation' : [ 0x10, ['pointer64', ['_MMPFN']]],
} ],
'_TRIAGE_9F_POWER' : [ 0x20, {
'Signature' : [ 0x0, ['unsigned short']],
'Revision' : [ 0x2, ['unsigned short']],
'IrpList' : [ 0x8, ['pointer64', ['_LIST_ENTRY']]],
'ThreadList' : [ 0x10, ['pointer64', ['_LIST_ENTRY']]],
'DelayedWorkQueue' : [ 0x18, ['pointer64', ['_TRIAGE_EX_WORK_QUEUE']]],
} ],
'_MI_POOL_STATE' : [ 0xf8, {
'MaximumNonPagedPoolThreshold' : [ 0x0, ['unsigned long long']],
'NonPagedPoolSListMaximum' : [ 0x8, ['array', 3, ['unsigned long']]],
'AllocatedNonPagedPool' : [ 0x18, ['unsigned long long']],
'BadPoolHead' : [ 0x20, ['_SINGLE_LIST_ENTRY']],
'HighEventSets' : [ 0x28, ['unsigned long']],
'HighEventSetsValid' : [ 0x2c, ['unsigned char']],
'PoolFailures' : [ 0x30, ['array', 3, ['array', 3, ['unsigned long']]]],
'PoolFailureReasons' : [ 0x54, ['_MI_POOL_FAILURE_REASONS']],
'LowPagedPoolThreshold' : [ 0x80, ['unsigned long long']],
'HighPagedPoolThreshold' : [ 0x88, ['unsigned long long']],
'PagedPoolSListMaximum' : [ 0x90, ['unsigned long']],
'PreemptiveTrims' : [ 0x94, ['array', 4, ['unsigned long']]],
'SpecialPagesInUsePeak' : [ 0xa8, ['unsigned long long']],
'SpecialPoolRejected' : [ 0xb0, ['array', 9, ['unsigned long']]],
'SpecialPagesNonPaged' : [ 0xd8, ['unsigned long long']],
'SpecialPoolPdes' : [ 0xe0, ['long']],
'SessionSpecialPoolPdesMax' : [ 0xe4, ['unsigned long']],
'TotalPagedPoolQuota' : [ 0xe8, ['unsigned long long']],
'TotalNonPagedPoolQuota' : [ 0xf0, ['unsigned long long']],
} ],
'_STACK_TABLE' : [ 0x8088, {
'NumStackTraces' : [ 0x0, ['unsigned short']],
'TraceCapacity' : [ 0x2, ['unsigned short']],
'StackTrace' : [ 0x8, ['array', 16, ['pointer64', ['_OBJECT_REF_TRACE']]]],
'StackTableHash' : [ 0x88, ['array', 16381, ['unsigned short']]],
} ],
'_POP_POWER_SETTING_VALUES' : [ 0x13c, {
'StructureSize' : [ 0x0, ['unsigned long']],
'PopPolicy' : [ 0x4, ['_SYSTEM_POWER_POLICY']],
'CurrentAcDcPowerState' : [ 0xec, ['Enumeration', dict(target = 'long', choices = {0: 'PoAc', 1: 'PoDc', 2: 'PoHot', 3: 'PoConditionMaximum'})]],
'AwayModeEnabled' : [ 0xf0, ['unsigned char']],
'AwayModeEngaged' : [ 0xf1, ['unsigned char']],
'AwayModePolicyAllowed' : [ 0xf2, ['unsigned char']],
'AwayModeIgnoreUserPresent' : [ 0xf4, ['long']],
'AwayModeIgnoreAction' : [ 0xf8, ['long']],
'DisableFastS4' : [ 0xfc, ['unsigned char']],
'DisableStandbyStates' : [ 0xfd, ['unsigned char']],
'UnattendSleepTimeout' : [ 0x100, ['unsigned long']],
'DiskIgnoreTime' : [ 0x104, ['unsigned long']],
'DeviceIdlePolicy' : [ 0x108, ['unsigned long']],
'VideoDimTimeout' : [ 0x10c, ['unsigned long']],
'VideoNormalBrightness' : [ 0x110, ['unsigned long']],
'VideoDimBrightness' : [ 0x114, ['unsigned long']],
'AlsOffset' : [ 0x118, ['unsigned long']],
'AlsEnabled' : [ 0x11c, ['unsigned long']],
'EsBrightness' : [ 0x120, ['unsigned long']],
'SwitchShutdownForced' : [ 0x124, ['unsigned char']],
'SystemCoolingPolicy' : [ 0x128, ['unsigned long']],
'MediaBufferingEngaged' : [ 0x12c, ['unsigned char']],
'OffloadedAudio' : [ 0x12d, ['unsigned char']],
'NonOffloadedAudio' : [ 0x12e, ['unsigned char']],
'FullscreenVideoPlayback' : [ 0x12f, ['unsigned char']],
'EsBatteryThreshold' : [ 0x130, ['unsigned long']],
'EsUserAwaySetting' : [ 0x134, ['unsigned char']],
'WiFiInStandby' : [ 0x138, ['unsigned long']],
} ],
'_CM_INDEX_HINT_BLOCK' : [ 0x8, {
'Count' : [ 0x0, ['unsigned long']],
'HashKey' : [ 0x4, ['array', 1, ['unsigned long']]],
} ],
'_TOKEN_CONTROL' : [ 0x28, {
'TokenId' : [ 0x0, ['_LUID']],
'AuthenticationId' : [ 0x8, ['_LUID']],
'ModifiedId' : [ 0x10, ['_LUID']],
'TokenSource' : [ 0x18, ['_TOKEN_SOURCE']],
} ],
'_KUMS_CONTEXT_HEADER' : [ 0x70, {
'P1Home' : [ 0x0, ['unsigned long long']],
'P2Home' : [ 0x8, ['unsigned long long']],
'P3Home' : [ 0x10, ['unsigned long long']],
'P4Home' : [ 0x18, ['unsigned long long']],
'StackTop' : [ 0x20, ['pointer64', ['void']]],
'StackSize' : [ 0x28, ['unsigned long long']],
'RspOffset' : [ 0x30, ['unsigned long long']],
'Rip' : [ 0x38, ['unsigned long long']],
'FltSave' : [ 0x40, ['pointer64', ['_XSAVE_FORMAT']]],
'Volatile' : [ 0x48, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long long')]],
'Reserved' : [ 0x48, ['BitField', dict(start_bit = 1, end_bit = 64, native_type='unsigned long long')]],
'Flags' : [ 0x48, ['unsigned long long']],
'TrapFrame' : [ 0x50, ['pointer64', ['_KTRAP_FRAME']]],
'ExceptionFrame' : [ 0x58, ['pointer64', ['_KEXCEPTION_FRAME']]],
'SourceThread' : [ 0x60, ['pointer64', ['_KTHREAD']]],
'Return' : [ 0x68, ['unsigned long long']],
} ],
'_DEFERRED_WRITE' : [ 0x48, {
'NodeTypeCode' : [ 0x0, ['short']],
'NodeByteSize' : [ 0x2, ['short']],
'FileObject' : [ 0x8, ['pointer64', ['_FILE_OBJECT']]],
'BytesToWrite' : [ 0x10, ['unsigned long']],
'DeferredWriteLinks' : [ 0x18, ['_LIST_ENTRY']],
'Event' : [ 0x28, ['pointer64', ['_KEVENT']]],
'PostRoutine' : [ 0x30, ['pointer64', ['void']]],
'Context1' : [ 0x38, ['pointer64', ['void']]],
'Context2' : [ 0x40, ['pointer64', ['void']]],
} ],
'__unnamed_2911' : [ 0x4, {
'DeviceNumber' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 5, native_type='unsigned long')]],
'FunctionNumber' : [ 0x0, ['BitField', dict(start_bit = 5, end_bit = 8, native_type='unsigned long')]],
'Reserved' : [ 0x0, ['BitField', dict(start_bit = 8, end_bit = 32, native_type='unsigned long')]],
} ],
'__unnamed_2913' : [ 0x4, {
'bits' : [ 0x0, ['__unnamed_2911']],
'AsULONG' : [ 0x0, ['unsigned long']],
} ],
'_WHEA_PCI_SLOT_NUMBER' : [ 0x4, {
'u' : [ 0x0, ['__unnamed_2913']],
} ],
'_MI_RESAVAIL_FAILURES' : [ 0x8, {
'Wrap' : [ 0x0, ['unsigned long']],
'NoCharge' : [ 0x4, ['unsigned long']],
} ],
'_MI_IO_PAGE_STATE' : [ 0x58, {
'IoPfnLock' : [ 0x0, ['unsigned long long']],
'IoPfnRoot' : [ 0x8, ['array', 3, ['_RTL_AVL_TREE']]],
'UnusedCachedMaps' : [ 0x20, ['_LIST_ENTRY']],
'OldestCacheFlushTimeStamp' : [ 0x30, ['unsigned long']],
'IoCacheStats' : [ 0x38, ['_MI_IO_CACHE_STATS']],
} ],
'_ARBITER_ORDERING_LIST' : [ 0x10, {
'Count' : [ 0x0, ['unsigned short']],
'Maximum' : [ 0x2, ['unsigned short']],
'Orderings' : [ 0x8, ['pointer64', ['_ARBITER_ORDERING']]],
} ],
'_VF_AVL_TABLE' : [ 0xc0, {
'RtlTable' : [ 0x0, ['_RTL_AVL_TABLE']],
'ReservedNode' : [ 0x68, ['pointer64', ['_VF_AVL_TREE_NODE']]],
'NodeToFree' : [ 0x70, ['pointer64', ['void']]],
'Lock' : [ 0x80, ['long']],
} ],
'_XPF_MC_BANK_FLAGS' : [ 0x1, {
'ClearOnInitializationRW' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned char')]],
'ControlDataRW' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned char')]],
'Reserved' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 8, native_type='unsigned char')]],
'AsUCHAR' : [ 0x0, ['unsigned char']],
} ],
'_TOKEN_AUDIT_POLICY' : [ 0x1e, {
'PerUserPolicy' : [ 0x0, ['array', 30, ['unsigned char']]],
} ],
'_TRIAGE_POP_FX_DEVICE' : [ 0x38, {
'Link' : [ 0x0, ['_LIST_ENTRY']],
'Irp' : [ 0x10, ['pointer64', ['_IRP']]],
'IrpData' : [ 0x18, ['pointer64', ['_TRIAGE_POP_IRP_DATA']]],
'Status' : [ 0x20, ['long']],
'PowerReqCall' : [ 0x24, ['long']],
'PowerNotReqCall' : [ 0x28, ['long']],
'DeviceNode' : [ 0x30, ['pointer64', ['_TRIAGE_DEVICE_NODE']]],
} ],
'__unnamed_292f' : [ 0x10, {
'EndingOffset' : [ 0x0, ['pointer64', ['_LARGE_INTEGER']]],
'ResourceToRelease' : [ 0x8, ['pointer64', ['pointer64', ['_ERESOURCE']]]],
} ],
'__unnamed_2931' : [ 0x8, {
'ResourceToRelease' : [ 0x0, ['pointer64', ['_ERESOURCE']]],
} ],
'__unnamed_2937' : [ 0x10, {
'SyncType' : [ 0x0, ['Enumeration', dict(target = 'long', choices = {0: 'SyncTypeOther', 1: 'SyncTypeCreateSection'})]],
'PageProtection' : [ 0x4, ['unsigned long']],
'OutputInformation' : [ 0x8, ['pointer64', ['_FS_FILTER_SECTION_SYNC_OUTPUT']]],
} ],
'__unnamed_293b' : [ 0x10, {
'NotificationType' : [ 0x0, ['Enumeration', dict(target = 'long', choices = {0: 'NotifyTypeCreate', 1: 'NotifyTypeRetired'})]],
'SafeToRecurse' : [ 0x8, ['unsigned char']],
} ],
'__unnamed_293d' : [ 0x28, {
'Argument1' : [ 0x0, ['pointer64', ['void']]],
'Argument2' : [ 0x8, ['pointer64', ['void']]],
'Argument3' : [ 0x10, ['pointer64', ['void']]],
'Argument4' : [ 0x18, ['pointer64', ['void']]],
'Argument5' : [ 0x20, ['pointer64', ['void']]],
} ],
'_FS_FILTER_PARAMETERS' : [ 0x28, {
'AcquireForModifiedPageWriter' : [ 0x0, ['__unnamed_292f']],
'ReleaseForModifiedPageWriter' : [ 0x0, ['__unnamed_2931']],
'AcquireForSectionSynchronization' : [ 0x0, ['__unnamed_2937']],
'NotifyStreamFileObject' : [ 0x0, ['__unnamed_293b']],
'Others' : [ 0x0, ['__unnamed_293d']],
} ],
'_MI_SESSION_DRIVER_UNLOAD' : [ 0x8, {
'Function' : [ 0x0, ['pointer64', ['void']]],
'FunctionValue' : [ 0x0, ['unsigned long long']],
} ],
'_PPM_SELECTION_STATISTICS' : [ 0x78, {
'SelectedCount' : [ 0x0, ['unsigned long long']],
'VetoCount' : [ 0x8, ['unsigned long long']],
'PreVetoCount' : [ 0x10, ['unsigned long long']],
'WrongProcessorCount' : [ 0x18, ['unsigned long long']],
'LatencyCount' : [ 0x20, ['unsigned long long']],
'IdleDurationCount' : [ 0x28, ['unsigned long long']],
'DeviceDependencyCount' : [ 0x30, ['unsigned long long']],
'ProcessorDependencyCount' : [ 0x38, ['unsigned long long']],
'PlatformOnlyCount' : [ 0x40, ['unsigned long long']],
'InterruptibleCount' : [ 0x48, ['unsigned long long']],
'LegacyOverrideCount' : [ 0x50, ['unsigned long long']],
'CstateCheckCount' : [ 0x58, ['unsigned long long']],
'NoCStateCount' : [ 0x60, ['unsigned long long']],
'CoordinatedDependencyCount' : [ 0x68, ['unsigned long long']],
'PreVetoAccounting' : [ 0x70, ['pointer64', ['_PPM_VETO_ACCOUNTING']]],
} ],
'_LDR_SERVICE_TAG_RECORD' : [ 0x10, {
'Next' : [ 0x0, ['pointer64', ['_LDR_SERVICE_TAG_RECORD']]],
'ServiceTag' : [ 0x8, ['unsigned long']],
} ],
'_COMPRESSED_DATA_INFO' : [ 0xc, {
'CompressionFormatAndEngine' : [ 0x0, ['unsigned short']],
'CompressionUnitShift' : [ 0x2, ['unsigned char']],
'ChunkShift' : [ 0x3, ['unsigned char']],
'ClusterShift' : [ 0x4, ['unsigned char']],
'Reserved' : [ 0x5, ['unsigned char']],
'NumberOfChunks' : [ 0x6, ['unsigned short']],
'CompressedChunkSizes' : [ 0x8, ['array', 1, ['unsigned long']]],
} ],
'_MI_PAGE_COMBINE_STATISTICS' : [ 0x28, {
'PagesScannedActive' : [ 0x0, ['unsigned long long']],
'PagesScannedStandby' : [ 0x8, ['unsigned long long']],
'PagesCombined' : [ 0x10, ['unsigned long long']],
'CombineScanCount' : [ 0x18, ['unsigned long']],
'CombinedBlocksInUse' : [ 0x1c, ['long']],
'SumCombinedBlocksReferenceCount' : [ 0x20, ['long']],
} ],
'_THERMAL_COOLING_INTERFACE' : [ 0x38, {
'Size' : [ 0x0, ['unsigned short']],
'Version' : [ 0x2, ['unsigned short']],
'Context' : [ 0x8, ['pointer64', ['void']]],
'InterfaceReference' : [ 0x10, ['pointer64', ['void']]],
'InterfaceDereference' : [ 0x18, ['pointer64', ['void']]],
'Flags' : [ 0x20, ['unsigned long']],
'ActiveCooling' : [ 0x28, ['pointer64', ['void']]],
'PassiveCooling' : [ 0x30, ['pointer64', ['void']]],
} ],
'_HIVE_WAIT_PACKET' : [ 0x28, {
'WakeEvent' : [ 0x0, ['_KEVENT']],
'Status' : [ 0x18, ['long']],
'Next' : [ 0x20, ['pointer64', ['_HIVE_WAIT_PACKET']]],
} ],
'_PROC_PERF_CHECK' : [ 0xc0, {
'LastActive' : [ 0x0, ['unsigned long long']],
'LastTime' : [ 0x8, ['unsigned long long']],
'LastStall' : [ 0x10, ['unsigned long long']],
'Snap' : [ 0x18, ['_PROC_PERF_CHECK_SNAP']],
'TempSnap' : [ 0x68, ['_PROC_PERF_CHECK_SNAP']],
'TaggedThreadPercent' : [ 0xb8, ['array', 2, ['unsigned char']]],
'Class0FloorPerfSelection' : [ 0xba, ['unsigned char']],
'Class1MinimumPerfSelection' : [ 0xbb, ['unsigned char']],
} ],
'__unnamed_2956' : [ 0x4, {
'PollInterval' : [ 0x0, ['unsigned long']],
} ],
'__unnamed_2958' : [ 0x18, {
'PollInterval' : [ 0x0, ['unsigned long']],
'Vector' : [ 0x4, ['unsigned long']],
'SwitchToPollingThreshold' : [ 0x8, ['unsigned long']],
'SwitchToPollingWindow' : [ 0xc, ['unsigned long']],
'ErrorThreshold' : [ 0x10, ['unsigned long']],
'ErrorThresholdWindow' : [ 0x14, ['unsigned long']],
} ],
'__unnamed_295a' : [ 0x18, {
'Polled' : [ 0x0, ['__unnamed_2956']],
'Interrupt' : [ 0x0, ['__unnamed_2958']],
'LocalInterrupt' : [ 0x0, ['__unnamed_2958']],
'Sci' : [ 0x0, ['__unnamed_2958']],
'Nmi' : [ 0x0, ['__unnamed_2958']],
} ],
'_WHEA_NOTIFICATION_DESCRIPTOR' : [ 0x1c, {
'Type' : [ 0x0, ['unsigned char']],
'Length' : [ 0x1, ['unsigned char']],
'Flags' : [ 0x2, ['_WHEA_NOTIFICATION_FLAGS']],
'u' : [ 0x4, ['__unnamed_295a']],
} ],
'_POP_HIBER_CONTEXT' : [ 0x1d0, {
'Reset' : [ 0x0, ['unsigned char']],
'HiberFlags' : [ 0x1, ['unsigned char']],
'WroteHiberFile' : [ 0x2, ['unsigned char']],
'VerifyKernelPhaseOnResume' : [ 0x3, ['unsigned char']],
'KernelPhaseVerificationActive' : [ 0x4, ['unsigned char']],
'InitializationFinished' : [ 0x5, ['unsigned char']],
'NextTableLockHeld' : [ 0x8, ['long']],
'BootPhaseFinishedBarrier' : [ 0xc, ['long']],
'KernelResumeFinishedBarrier' : [ 0x10, ['long']],
'HvCaptureReadyBarrier' : [ 0x14, ['long']],
'HvCaptureCompletedBarrier' : [ 0x18, ['long']],
'MapFrozen' : [ 0x1c, ['unsigned char']],
'DiscardMap' : [ 0x20, ['_RTL_BITMAP']],
'KernelPhaseMap' : [ 0x20, ['_RTL_BITMAP']],
'BootPhaseMap' : [ 0x30, ['_RTL_BITMAP']],
'ClonedRanges' : [ 0x40, ['_LIST_ENTRY']],
'ClonedRangeCount' : [ 0x50, ['unsigned long']],
'ClonedPageCount' : [ 0x58, ['unsigned long long']],
'CurrentMap' : [ 0x60, ['pointer64', ['_RTL_BITMAP']]],
'NextCloneRange' : [ 0x68, ['pointer64', ['_LIST_ENTRY']]],
'NextPreserve' : [ 0x70, ['unsigned long long']],
'LoaderMdl' : [ 0x78, ['pointer64', ['_MDL']]],
'AllocatedMdl' : [ 0x80, ['pointer64', ['_MDL']]],
'PagesOut' : [ 0x88, ['unsigned long long']],
'IoPages' : [ 0x90, ['pointer64', ['void']]],
'IoPagesCount' : [ 0x98, ['unsigned long']],
'CurrentMcb' : [ 0xa0, ['pointer64', ['void']]],
'DumpStack' : [ 0xa8, ['pointer64', ['_DUMP_STACK_CONTEXT']]],
'WakeState' : [ 0xb0, ['pointer64', ['_KPROCESSOR_STATE']]],
'IoProgress' : [ 0xb8, ['unsigned long']],
'Status' : [ 0xbc, ['long']],
'GraphicsProc' : [ 0xc0, ['unsigned long']],
'MemoryImage' : [ 0xc8, ['pointer64', ['PO_MEMORY_IMAGE']]],
'PerformanceStats' : [ 0xd0, ['pointer64', ['unsigned long']]],
'BootLoaderLogMdl' : [ 0xd8, ['pointer64', ['_MDL']]],
'SiLogOffset' : [ 0xe0, ['unsigned long']],
'FirmwareRuntimeInformationMdl' : [ 0xe8, ['pointer64', ['_MDL']]],
'FirmwareRuntimeInformationVa' : [ 0xf0, ['pointer64', ['void']]],
'ResumeContext' : [ 0xf8, ['pointer64', ['void']]],
'ResumeContextPages' : [ 0x100, ['unsigned long']],
'SecurePages' : [ 0x104, ['unsigned long']],
'ProcessorCount' : [ 0x108, ['unsigned long']],
'ProcessorContext' : [ 0x110, ['pointer64', ['_POP_PER_PROCESSOR_CONTEXT']]],
'ProdConsBuffer' : [ 0x118, ['pointer64', ['unsigned char']]],
'ProdConsSize' : [ 0x120, ['unsigned long']],
'MaxDataPages' : [ 0x124, ['unsigned long']],
'ExtraBuffer' : [ 0x128, ['pointer64', ['void']]],
'ExtraBufferSize' : [ 0x130, ['unsigned long long']],
'ExtraMapVa' : [ 0x138, ['pointer64', ['void']]],
'BitlockerKeyPFN' : [ 0x140, ['unsigned long long']],
'IoInfo' : [ 0x148, ['_POP_IO_INFO']],
'IoChecksums' : [ 0x1b8, ['pointer64', ['unsigned short']]],
'IoChecksumsSize' : [ 0x1c0, ['unsigned long long']],
'HardwareConfigurationSignature' : [ 0x1c8, ['unsigned long']],
'IumEnabled' : [ 0x1cc, ['unsigned char']],
} ],
'_OBJECT_REF_TRACE' : [ 0x80, {
'StackTrace' : [ 0x0, ['array', 16, ['pointer64', ['void']]]],
} ],
'_CVDD' : [ 0x1c, {
'Signature' : [ 0x0, ['unsigned long']],
'NB10' : [ 0x0, ['_NB10']],
'RsDs' : [ 0x0, ['_RSDS']],
} ],
'_OBJECT_NAME_INFORMATION' : [ 0x10, {
'Name' : [ 0x0, ['_UNICODE_STRING']],
} ],
'_WHEA_AER_BRIDGE_DESCRIPTOR' : [ 0x2c, {
'Type' : [ 0x0, ['unsigned short']],
'Enabled' : [ 0x2, ['unsigned char']],
'Reserved' : [ 0x3, ['unsigned char']],
'BusNumber' : [ 0x4, ['unsigned long']],
'Slot' : [ 0x8, ['_WHEA_PCI_SLOT_NUMBER']],
'DeviceControl' : [ 0xc, ['unsigned short']],
'Flags' : [ 0xe, ['_AER_BRIDGE_DESCRIPTOR_FLAGS']],
'UncorrectableErrorMask' : [ 0x10, ['unsigned long']],
'UncorrectableErrorSeverity' : [ 0x14, ['unsigned long']],
'CorrectableErrorMask' : [ 0x18, ['unsigned long']],
'AdvancedCapsAndControl' : [ 0x1c, ['unsigned long']],
'SecondaryUncorrectableErrorMask' : [ 0x20, ['unsigned long']],
'SecondaryUncorrectableErrorSev' : [ 0x24, ['unsigned long']],
'SecondaryCapsAndControl' : [ 0x28, ['unsigned long']],
} ],
'_POP_FX_PERF_INFO' : [ 0xa0, {
'Component' : [ 0x0, ['pointer64', ['_POP_FX_COMPONENT']]],
'CompletedEvent' : [ 0x8, ['_KEVENT']],
'ComponentPerfState' : [ 0x20, ['pointer64', ['void']]],
'Flags' : [ 0x28, ['_POP_FX_PERF_FLAGS']],
'LastChange' : [ 0x30, ['pointer64', ['_PO_FX_PERF_STATE_CHANGE']]],
'LastChangeCount' : [ 0x38, ['unsigned long']],
'LastChangeStamp' : [ 0x40, ['unsigned long long']],
'LastChangeNominal' : [ 0x48, ['unsigned char']],
'PepRegistered' : [ 0x49, ['unsigned char']],
'QueryOnIdleStates' : [ 0x4a, ['unsigned char']],
'RequestDriverContext' : [ 0x50, ['pointer64', ['void']]],
'WorkOrder' : [ 0x58, ['_POP_FX_WORK_ORDER']],
'SetsCount' : [ 0x90, ['unsigned long']],
'Sets' : [ 0x98, ['pointer64', ['_POP_FX_PERF_SET']]],
} ],
'_KDESCRIPTOR' : [ 0x10, {
'Pad' : [ 0x0, ['array', 3, ['unsigned short']]],
'Limit' : [ 0x6, ['unsigned short']],
'Base' : [ 0x8, ['pointer64', ['void']]],
} ],
'_HAL_CHANNEL_MEMORY_RANGES' : [ 0x10, {
'PageFrameIndex' : [ 0x0, ['unsigned long long']],
'MpnId' : [ 0x8, ['unsigned short']],
'Node' : [ 0xa, ['unsigned short']],
'Channel' : [ 0xc, ['unsigned short']],
'IsPowerManageable' : [ 0xe, ['unsigned char']],
'DeepPowerState' : [ 0xf, ['unsigned char']],
} ],
'_PCW_COUNTER_INFORMATION' : [ 0x10, {
'CounterMask' : [ 0x0, ['unsigned long long']],
'InstanceMask' : [ 0x8, ['pointer64', ['_UNICODE_STRING']]],
} ],
'_DUMP_STACK_CONTEXT' : [ 0x178, {
'Init' : [ 0x0, ['_DUMP_INITIALIZATION_CONTEXT']],
'PartitionOffset' : [ 0x108, ['_LARGE_INTEGER']],
'DumpPointers' : [ 0x110, ['pointer64', ['void']]],
'PointersLength' : [ 0x118, ['unsigned long']],
'ModulePrefix' : [ 0x120, ['pointer64', ['unsigned short']]],
'DriverList' : [ 0x128, ['_LIST_ENTRY']],
'InitMsg' : [ 0x138, ['_STRING']],
'ProgMsg' : [ 0x148, ['_STRING']],
'DoneMsg' : [ 0x158, ['_STRING']],
'FileObject' : [ 0x168, ['pointer64', ['void']]],
'UsageType' : [ 0x170, ['Enumeration', dict(target = 'long', choices = {0: 'DeviceUsageTypeUndefined', 1: 'DeviceUsageTypePaging', 2: 'DeviceUsageTypeHibernation', 3: 'DeviceUsageTypeDumpFile', 4: 'DeviceUsageTypeBoot', 5: 'DeviceUsageTypePostDisplay'})]],
} ],
'_FILE_STANDARD_INFORMATION' : [ 0x18, {
'AllocationSize' : [ 0x0, ['_LARGE_INTEGER']],
'EndOfFile' : [ 0x8, ['_LARGE_INTEGER']],
'NumberOfLinks' : [ 0x10, ['unsigned long']],
'DeletePending' : [ 0x14, ['unsigned char']],
'Directory' : [ 0x15, ['unsigned char']],
} ],
'_ETW_FILTER_STRING_TOKEN' : [ 0x18, {
'Count' : [ 0x0, ['unsigned short']],
'Tokens' : [ 0x8, ['array', 1, ['_ETW_FILTER_STRING_TOKEN_ELEMENT']]],
} ],
'_POP_SHUTDOWN_BUG_CHECK' : [ 0x48, {
'InitiatingThread' : [ 0x0, ['pointer64', ['_ETHREAD']]],
'InitiatingProcess' : [ 0x8, ['pointer64', ['_EPROCESS']]],
'ThreadId' : [ 0x10, ['pointer64', ['void']]],
'ProcessId' : [ 0x18, ['pointer64', ['void']]],
'Code' : [ 0x20, ['unsigned long']],
'Parameter1' : [ 0x28, ['unsigned long long']],
'Parameter2' : [ 0x30, ['unsigned long long']],
'Parameter3' : [ 0x38, ['unsigned long long']],
'Parameter4' : [ 0x40, ['unsigned long long']],
} ],
'_NB10' : [ 0x14, {
'Signature' : [ 0x0, ['unsigned long']],
'Offset' : [ 0x4, ['unsigned long']],
'TimeStamp' : [ 0x8, ['unsigned long']],
'Age' : [ 0xc, ['unsigned long']],
'PdbName' : [ 0x10, ['array', 1, ['unsigned char']]],
} ],
'_PCW_MASK_INFORMATION' : [ 0x28, {
'CounterMask' : [ 0x0, ['unsigned long long']],
'InstanceMask' : [ 0x8, ['pointer64', ['_UNICODE_STRING']]],
'InstanceId' : [ 0x10, ['unsigned long']],
'CollectMultiple' : [ 0x14, ['unsigned char']],
'Buffer' : [ 0x18, ['pointer64', ['_PCW_BUFFER']]],
'CancelEvent' : [ 0x20, ['pointer64', ['_KEVENT']]],
} ],
'_SECURITY_DESCRIPTOR_RELATIVE' : [ 0x14, {
'Revision' : [ 0x0, ['unsigned char']],
'Sbz1' : [ 0x1, ['unsigned char']],
'Control' : [ 0x2, ['unsigned short']],
'Owner' : [ 0x4, ['unsigned long']],
'Group' : [ 0x8, ['unsigned long']],
'Sacl' : [ 0xc, ['unsigned long']],
'Dacl' : [ 0x10, ['unsigned long']],
} ],
'_FS_FILTER_SECTION_SYNC_OUTPUT' : [ 0x10, {
'StructureSize' : [ 0x0, ['unsigned long']],
'SizeReturned' : [ 0x4, ['unsigned long']],
'Flags' : [ 0x8, ['unsigned long']],
'DesiredReadAlignment' : [ 0xc, ['unsigned long']],
} ],
'_HVIEW_MAP_PIN_LOG_ENTRY' : [ 0x48, {
'ViewOffset' : [ 0x0, ['unsigned long']],
'Pinned' : [ 0x4, ['unsigned char']],
'PinMask' : [ 0x8, ['unsigned long long']],
'Thread' : [ 0x10, ['pointer64', ['_KTHREAD']]],
'Stack' : [ 0x18, ['array', 6, ['pointer64', ['void']]]],
} ],
'__unnamed_299a' : [ 0x20, {
'TestAllocation' : [ 0x0, ['_ARBITER_TEST_ALLOCATION_PARAMETERS']],
'RetestAllocation' : [ 0x0, ['_ARBITER_RETEST_ALLOCATION_PARAMETERS']],
'BootAllocation' : [ 0x0, ['_ARBITER_BOOT_ALLOCATION_PARAMETERS']],
'QueryAllocatedResources' : [ 0x0, ['_ARBITER_QUERY_ALLOCATED_RESOURCES_PARAMETERS']],
'QueryConflict' : [ 0x0, ['_ARBITER_QUERY_CONFLICT_PARAMETERS']],
'QueryArbitrate' : [ 0x0, ['_ARBITER_QUERY_ARBITRATE_PARAMETERS']],
'AddReserved' : [ 0x0, ['_ARBITER_ADD_RESERVED_PARAMETERS']],
} ],
'_ARBITER_PARAMETERS' : [ 0x20, {
'Parameters' : [ 0x0, ['__unnamed_299a']],
} ],
'__unnamed_299e' : [ 0x8, {
'idxRecord' : [ 0x0, ['unsigned long']],
'cidContainer' : [ 0x4, ['unsigned long']],
} ],
'_CLS_LSN' : [ 0x8, {
'offset' : [ 0x0, ['__unnamed_299e']],
'ullOffset' : [ 0x0, ['unsigned long long']],
} ],
'_MI_SUBSECTION_WAIT_BLOCK' : [ 0x20, {
'Next' : [ 0x0, ['pointer64', ['_MI_SUBSECTION_WAIT_BLOCK']]],
'Gate' : [ 0x8, ['_KGATE']],
} ],
'POWER_ACTION_POLICY' : [ 0xc, {
'Action' : [ 0x0, ['Enumeration', dict(target = 'long', choices = {0: 'PowerActionNone', 1: 'PowerActionReserved', 2: 'PowerActionSleep', 3: 'PowerActionHibernate', 4: 'PowerActionShutdown', 5: 'PowerActionShutdownReset', 6: 'PowerActionShutdownOff', 7: 'PowerActionWarmEject', 8: 'PowerActionDisplayOff'})]],
'Flags' : [ 0x4, ['unsigned long']],
'EventCode' : [ 0x8, ['unsigned long']],
} ],
'_KSCHEDULING_GROUP_POLICY' : [ 0x8, {
'Value' : [ 0x0, ['unsigned long']],
'Weight' : [ 0x0, ['unsigned short']],
'MinRate' : [ 0x0, ['unsigned short']],
'MaxRate' : [ 0x2, ['unsigned short']],
'AllFlags' : [ 0x4, ['unsigned long']],
'Type' : [ 0x4, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'Disabled' : [ 0x4, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'Spare1' : [ 0x4, ['BitField', dict(start_bit = 2, end_bit = 32, native_type='unsigned long')]],
} ],
'_RSDS' : [ 0x1c, {
'Signature' : [ 0x0, ['unsigned long']],
'Guid' : [ 0x4, ['_GUID']],
'Age' : [ 0x14, ['unsigned long']],
'PdbName' : [ 0x18, ['array', 1, ['unsigned char']]],
} ],
'PO_MEMORY_IMAGE' : [ 0x3b0, {
'Signature' : [ 0x0, ['unsigned long']],
'ImageType' : [ 0x4, ['unsigned long']],
'CheckSum' : [ 0x8, ['unsigned long']],
'LengthSelf' : [ 0xc, ['unsigned long']],
'PageSelf' : [ 0x10, ['unsigned long long']],
'PageSize' : [ 0x18, ['unsigned long']],
'SystemTime' : [ 0x20, ['_LARGE_INTEGER']],
'InterruptTime' : [ 0x28, ['unsigned long long']],
'FeatureFlags' : [ 0x30, ['unsigned long long']],
'HiberFlags' : [ 0x38, ['unsigned char']],
'spare' : [ 0x39, ['array', 3, ['unsigned char']]],
'NoHiberPtes' : [ 0x3c, ['unsigned long']],
'HiberVa' : [ 0x40, ['unsigned long long']],
'NoFreePages' : [ 0x48, ['unsigned long']],
'FreeMapCheck' : [ 0x4c, ['unsigned long']],
'WakeCheck' : [ 0x50, ['unsigned long']],
'NumPagesForLoader' : [ 0x58, ['unsigned long long']],
'FirstSecureRestorePage' : [ 0x60, ['unsigned long long']],
'FirstBootRestorePage' : [ 0x68, ['unsigned long long']],
'FirstKernelRestorePage' : [ 0x70, ['unsigned long long']],
'FirstChecksumRestorePage' : [ 0x78, ['unsigned long long']],
'NoChecksumEntries' : [ 0x80, ['unsigned long long']],
'PerfInfo' : [ 0x88, ['_PO_HIBER_PERF']],
'FirmwareRuntimeInformationPages' : [ 0x268, ['unsigned long']],
'FirmwareRuntimeInformation' : [ 0x270, ['array', 1, ['unsigned long long']]],
'SiLogOffset' : [ 0x278, ['unsigned long']],
'NoBootLoaderLogPages' : [ 0x27c, ['unsigned long']],
'BootLoaderLogPages' : [ 0x280, ['array', 24, ['unsigned long long']]],
'NotUsed' : [ 0x340, ['unsigned long']],
'ResumeContextCheck' : [ 0x344, ['unsigned long']],
'ResumeContextPages' : [ 0x348, ['unsigned long']],
'Hiberboot' : [ 0x34c, ['unsigned char']],
'HvCr3' : [ 0x350, ['unsigned long long']],
'HvEntryPoint' : [ 0x358, ['unsigned long long']],
'HvReservedTransitionAddress' : [ 0x360, ['unsigned long long']],
'HvReservedTransitionAddressSize' : [ 0x368, ['unsigned long long']],
'BootFlags' : [ 0x370, ['unsigned long long']],
'HalEntryPointPhysical' : [ 0x378, ['unsigned long long']],
'HighestPhysicalPage' : [ 0x380, ['unsigned long long']],
'BitlockerKeyPfns' : [ 0x388, ['array', 4, ['unsigned long long']]],
'HardwareSignature' : [ 0x3a8, ['unsigned long']],
} ],
'BATTERY_REPORTING_SCALE' : [ 0x8, {
'Granularity' : [ 0x0, ['unsigned long']],
'Capacity' : [ 0x4, ['unsigned long']],
} ],
'_RTL_ATOM_TABLE_REFERENCE' : [ 0x18, {
'LowBoxList' : [ 0x0, ['_LIST_ENTRY']],
'LowBoxID' : [ 0x10, ['unsigned long']],
'ReferenceCount' : [ 0x14, ['unsigned short']],
'Flags' : [ 0x16, ['unsigned short']],
} ],
'_CURDIR' : [ 0x18, {
'DosPath' : [ 0x0, ['_UNICODE_STRING']],
'Handle' : [ 0x10, ['pointer64', ['void']]],
} ],
'_PO_HIBER_PERF' : [ 0x1e0, {
'HiberIoTicks' : [ 0x0, ['unsigned long long']],
'HiberIoCpuTicks' : [ 0x8, ['unsigned long long']],
'HiberInitTicks' : [ 0x10, ['unsigned long long']],
'HiberHiberFileTicks' : [ 0x18, ['unsigned long long']],
'HiberCompressTicks' : [ 0x20, ['unsigned long long']],
'HiberSharedBufferTicks' : [ 0x28, ['unsigned long long']],
'HiberChecksumTicks' : [ 0x30, ['unsigned long long']],
'HiberChecksumIoTicks' : [ 0x38, ['unsigned long long']],
'TotalHibernateTime' : [ 0x40, ['_LARGE_INTEGER']],
'POSTTime' : [ 0x48, ['unsigned long']],
'ResumeBootMgrTime' : [ 0x4c, ['unsigned long']],
'BootmgrUserInputTime' : [ 0x50, ['unsigned long']],
'ResumeAppTicks' : [ 0x58, ['unsigned long long']],
'ResumeAppStartTimestamp' : [ 0x60, ['unsigned long long']],
'ResumeLibraryInitTicks' : [ 0x68, ['unsigned long long']],
'ResumeInitTicks' : [ 0x70, ['unsigned long long']],
'ResumeRestoreImageStartTimestamp' : [ 0x78, ['unsigned long long']],
'ResumeHiberFileTicks' : [ 0x80, ['unsigned long long']],
'ResumeIoTicks' : [ 0x88, ['unsigned long long']],
'ResumeDecompressTicks' : [ 0x90, ['unsigned long long']],
'ResumeAllocateTicks' : [ 0x98, ['unsigned long long']],
'ResumeUserInOutTicks' : [ 0xa0, ['unsigned long long']],
'ResumeMapTicks' : [ 0xa8, ['unsigned long long']],
'ResumeUnmapTicks' : [ 0xb0, ['unsigned long long']],
'ResumeChecksumTicks' : [ 0xb8, ['unsigned long long']],
'ResumeChecksumIoTicks' : [ 0xc0, ['unsigned long long']],
'ResumeKernelSwitchTimestamp' : [ 0xc8, ['unsigned long long']],
'WriteLogDataTimestamp' : [ 0xd0, ['unsigned long long']],
'KernelReturnFromHandler' : [ 0xd8, ['unsigned long long']],
'TimeStampCounterAtSwitchTime' : [ 0xe0, ['unsigned long long']],
'HalTscOffset' : [ 0xe8, ['unsigned long long']],
'HvlTscOffset' : [ 0xf0, ['unsigned long long']],
'SleeperThreadEnd' : [ 0xf8, ['unsigned long long']],
'KernelReturnSystemPowerStateTimestamp' : [ 0x100, ['unsigned long long']],
'IoBoundedness' : [ 0x108, ['unsigned long long']],
'KernelDecompressTicks' : [ 0x110, ['unsigned long long']],
'KernelIoTicks' : [ 0x118, ['unsigned long long']],
'KernelCopyTicks' : [ 0x120, ['unsigned long long']],
'ReadCheckCount' : [ 0x128, ['unsigned long long']],
'KernelInitTicks' : [ 0x130, ['unsigned long long']],
'KernelResumeHiberFileTicks' : [ 0x138, ['unsigned long long']],
'KernelIoCpuTicks' : [ 0x140, ['unsigned long long']],
'KernelSharedBufferTicks' : [ 0x148, ['unsigned long long']],
'KernelAnimationTicks' : [ 0x150, ['unsigned long long']],
'KernelChecksumTicks' : [ 0x158, ['unsigned long long']],
'KernelChecksumIoTicks' : [ 0x160, ['unsigned long long']],
'AnimationStart' : [ 0x168, ['_LARGE_INTEGER']],
'AnimationStop' : [ 0x170, ['_LARGE_INTEGER']],
'DeviceResumeTime' : [ 0x178, ['unsigned long']],
'SecurePagesProcessed' : [ 0x180, ['unsigned long long']],
'BootPagesProcessed' : [ 0x188, ['unsigned long long']],
'KernelPagesProcessed' : [ 0x190, ['unsigned long long']],
'BootBytesWritten' : [ 0x198, ['unsigned long long']],
'KernelBytesWritten' : [ 0x1a0, ['unsigned long long']],
'BootPagesWritten' : [ 0x1a8, ['unsigned long long']],
'KernelPagesWritten' : [ 0x1b0, ['unsigned long long']],
'BytesWritten' : [ 0x1b8, ['unsigned long long']],
'PagesWritten' : [ 0x1c0, ['unsigned long']],
'FileRuns' : [ 0x1c4, ['unsigned long']],
'NoMultiStageResumeReason' : [ 0x1c8, ['unsigned long']],
'MaxHuffRatio' : [ 0x1cc, ['unsigned long']],
'AdjustedTotalResumeTime' : [ 0x1d0, ['unsigned long long']],
'ResumeCompleteTimestamp' : [ 0x1d8, ['unsigned long long']],
} ],
'_MI_QUEUED_DEADSTACK_WORKITEM' : [ 0x28, {
'WorkItem' : [ 0x0, ['_WORK_QUEUE_ITEM']],
'Active' : [ 0x20, ['long']],
} ],
'_POP_FX_PROVIDER' : [ 0x8, {
'Index' : [ 0x0, ['unsigned long']],
'Activating' : [ 0x4, ['unsigned char']],
} ],
'_RTL_BALANCED_LINKS' : [ 0x20, {
'Parent' : [ 0x0, ['pointer64', ['_RTL_BALANCED_LINKS']]],
'LeftChild' : [ 0x8, ['pointer64', ['_RTL_BALANCED_LINKS']]],
'RightChild' : [ 0x10, ['pointer64', ['_RTL_BALANCED_LINKS']]],
'Balance' : [ 0x18, ['unsigned char']],
'Reserved' : [ 0x19, ['array', 3, ['unsigned char']]],
} ],
'_FREE_DISPLAY' : [ 0x18, {
'RealVectorSize' : [ 0x0, ['unsigned long']],
'Hint' : [ 0x4, ['unsigned long']],
'Display' : [ 0x8, ['_RTL_BITMAP']],
} ],
'_MMINPAGE_SUPPORT_FLOW_THROUGH' : [ 0x38, {
'Page' : [ 0x0, ['array', 1, ['unsigned long long']]],
'InitialInPageSupport' : [ 0x8, ['pointer64', ['_MMINPAGE_SUPPORT']]],
'PagingFile' : [ 0x10, ['pointer64', ['_MMPAGING_FILE']]],
'PageFileOffset' : [ 0x18, ['unsigned long long']],
'Node' : [ 0x20, ['_RTL_BALANCED_NODE']],
} ],
'_POP_PER_PROCESSOR_CONTEXT' : [ 0x80, {
'UncompressedData' : [ 0x0, ['pointer64', ['unsigned char']]],
'MappingVa' : [ 0x8, ['pointer64', ['void']]],
'XpressEncodeWorkspace' : [ 0x10, ['pointer64', ['void']]],
'CompressedDataBuffer' : [ 0x18, ['pointer64', ['unsigned char']]],
'CopyTicks' : [ 0x20, ['unsigned long long']],
'CompressTicks' : [ 0x28, ['unsigned long long']],
'BytesCopied' : [ 0x30, ['unsigned long long']],
'PagesProcessed' : [ 0x38, ['unsigned long long']],
'DecompressTicks' : [ 0x40, ['unsigned long long']],
'ResumeCopyTicks' : [ 0x48, ['unsigned long long']],
'SharedBufferTicks' : [ 0x50, ['unsigned long long']],
'DecompressTicksByMethod' : [ 0x58, ['array', 2, ['unsigned long long']]],
'DecompressSizeByMethod' : [ 0x68, ['array', 2, ['unsigned long long']]],
'CompressCount' : [ 0x78, ['unsigned long']],
'HuffCompressCount' : [ 0x7c, ['unsigned long']],
} ],
'_IO_REMOVE_LOCK' : [ 0x20, {
'Common' : [ 0x0, ['_IO_REMOVE_LOCK_COMMON_BLOCK']],
} ],
'_MI_DYNAMIC_BITMAP' : [ 0x50, {
'Bitmap' : [ 0x0, ['_RTL_BITMAP_EX']],
'MaximumSize' : [ 0x10, ['unsigned long long']],
'Hint' : [ 0x18, ['unsigned long long']],
'BaseVa' : [ 0x20, ['pointer64', ['void']]],
'SizeTopDown' : [ 0x28, ['unsigned long long']],
'HintTopDown' : [ 0x30, ['unsigned long long']],
'BaseVaTopDown' : [ 0x38, ['pointer64', ['void']]],
'SpinLock' : [ 0x40, ['unsigned long long']],
'Vm' : [ 0x48, ['pointer64', ['_MMSUPPORT']]],
} ],
'_POP_IO_INFO' : [ 0x70, {
'DumpMdl' : [ 0x0, ['pointer64', ['_MDL']]],
'IoStatus' : [ 0x8, ['Enumeration', dict(target = 'long', choices = {0: 'IoReady', 1: 'IoPending', 2: 'IoDone'})]],
'IoStartCount' : [ 0x10, ['unsigned long long']],
'IoBytesCompleted' : [ 0x18, ['unsigned long long']],
'IoBytesInProgress' : [ 0x20, ['unsigned long long']],
'RequestSize' : [ 0x28, ['unsigned long long']],
'IoLocation' : [ 0x30, ['_LARGE_INTEGER']],
'FileOffset' : [ 0x38, ['unsigned long long']],
'Buffer' : [ 0x40, ['pointer64', ['void']]],
'AsyncCapable' : [ 0x48, ['unsigned char']],
'BytesToRead' : [ 0x50, ['unsigned long long']],
'Pages' : [ 0x58, ['unsigned long']],
'HighestChecksumIndex' : [ 0x60, ['unsigned long long']],
'PreviousChecksum' : [ 0x68, ['unsigned short']],
} ],
'_LDRP_CSLIST' : [ 0x8, {
'Tail' : [ 0x0, ['pointer64', ['_SINGLE_LIST_ENTRY']]],
} ],
'_NON_PAGED_DEBUG_INFO' : [ 0x20, {
'Signature' : [ 0x0, ['unsigned short']],
'Flags' : [ 0x2, ['unsigned short']],
'Size' : [ 0x4, ['unsigned long']],
'Machine' : [ 0x8, ['unsigned short']],
'Characteristics' : [ 0xa, ['unsigned short']],
'TimeDateStamp' : [ 0xc, ['unsigned long']],
'CheckSum' : [ 0x10, ['unsigned long']],
'SizeOfImage' : [ 0x14, ['unsigned long']],
'ImageBase' : [ 0x18, ['unsigned long long']],
} ],
'_POP_FX_PERF_SET' : [ 0x20, {
'PerfSet' : [ 0x0, ['pointer64', ['_PO_FX_COMPONENT_PERF_SET']]],
'CurrentPerf' : [ 0x8, ['unsigned long long']],
'CurrentPerfStamp' : [ 0x10, ['unsigned long long']],
'CurrentPerfNominal' : [ 0x18, ['unsigned char']],
} ],
'_AER_BRIDGE_DESCRIPTOR_FLAGS' : [ 0x2, {
'UncorrectableErrorMaskRW' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned short')]],
'UncorrectableErrorSeverityRW' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned short')]],
'CorrectableErrorMaskRW' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned short')]],
'AdvancedCapsAndControlRW' : [ 0x0, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned short')]],
'SecondaryUncorrectableErrorMaskRW' : [ 0x0, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned short')]],
'SecondaryUncorrectableErrorSevRW' : [ 0x0, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned short')]],
'SecondaryCapsAndControlRW' : [ 0x0, ['BitField', dict(start_bit = 6, end_bit = 7, native_type='unsigned short')]],
'Reserved' : [ 0x0, ['BitField', dict(start_bit = 7, end_bit = 16, native_type='unsigned short')]],
'AsUSHORT' : [ 0x0, ['unsigned short']],
} ],
'_MM_SESSION_SPACE_FLAGS' : [ 0x4, {
'Initialized' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'DeletePending' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'PoolInitialized' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long')]],
'DynamicVaInitialized' : [ 0x0, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned long')]],
'WsInitialized' : [ 0x0, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned long')]],
'PoolDestroyed' : [ 0x0, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned long')]],
'ObjectInitialized' : [ 0x0, ['BitField', dict(start_bit = 6, end_bit = 7, native_type='unsigned long')]],
'LeakedPoolDeliberately' : [ 0x0, ['BitField', dict(start_bit = 7, end_bit = 8, native_type='unsigned long')]],
'Filler' : [ 0x0, ['BitField', dict(start_bit = 8, end_bit = 32, native_type='unsigned long')]],
} ],
'_RTL_CRITICAL_SECTION_DEBUG' : [ 0x30, {
'Type' : [ 0x0, ['unsigned short']],
'CreatorBackTraceIndex' : [ 0x2, ['unsigned short']],
'CriticalSection' : [ 0x8, ['pointer64', ['_RTL_CRITICAL_SECTION']]],
'ProcessLocksList' : [ 0x10, ['_LIST_ENTRY']],
'EntryCount' : [ 0x20, ['unsigned long']],
'ContentionCount' : [ 0x24, ['unsigned long']],
'Flags' : [ 0x28, ['unsigned long']],
'CreatorBackTraceIndexHigh' : [ 0x2c, ['unsigned short']],
'SpareUSHORT' : [ 0x2e, ['unsigned short']],
} ],
'__unnamed_29dd' : [ 0x8, {
'Gsiv' : [ 0x0, ['unsigned long']],
'WakeInterrupt' : [ 0x4, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'ReservedFlags' : [ 0x4, ['BitField', dict(start_bit = 1, end_bit = 32, native_type='unsigned long')]],
} ],
'__unnamed_29df' : [ 0x10, {
'Address' : [ 0x0, ['_LARGE_INTEGER']],
'DataPayload' : [ 0x8, ['unsigned long']],
} ],
'__unnamed_29e2' : [ 0x8, {
'IntrInfo' : [ 0x0, ['_INTERRUPT_HT_INTR_INFO']],
} ],
'__unnamed_29e6' : [ 0x4, {
'DestinationMode' : [ 0x0, ['Enumeration', dict(target = 'long', choices = {1: 'ApicDestinationModePhysical', 2: 'ApicDestinationModeLogicalFlat', 3: 'ApicDestinationModeLogicalClustered', 4: 'ApicDestinationModeUnknown'})]],
} ],
'_INTERRUPT_VECTOR_DATA' : [ 0x58, {
'Type' : [ 0x0, ['Enumeration', dict(target = 'long', choices = {0: 'InterruptTypeControllerInput', 1: 'InterruptTypeXapicMessage', 2: 'InterruptTypeHypertransport', 3: 'InterruptTypeMessageRequest'})]],
'Vector' : [ 0x4, ['unsigned long']],
'Irql' : [ 0x8, ['unsigned char']],
'Polarity' : [ 0xc, ['Enumeration', dict(target = 'long', choices = {0: 'InterruptPolarityUnknown', 1: 'InterruptRisingEdge', 2: 'InterruptFallingEdge', 3: 'InterruptActiveBothTriggerLow', 4: 'InterruptActiveBothTriggerHigh'})]],
'Mode' : [ 0x10, ['Enumeration', dict(target = 'long', choices = {0: 'LevelSensitive', 1: 'Latched'})]],
'TargetProcessors' : [ 0x18, ['_GROUP_AFFINITY']],
'IntRemapInfo' : [ 0x28, ['_INTERRUPT_REMAPPING_INFO']],
'ControllerInput' : [ 0x38, ['__unnamed_29dd']],
'HvDeviceId' : [ 0x40, ['unsigned long long']],
'XapicMessage' : [ 0x48, ['__unnamed_29df']],
'Hypertransport' : [ 0x48, ['__unnamed_29e2']],
'GenericMessage' : [ 0x48, ['__unnamed_29df']],
'MessageRequest' : [ 0x48, ['__unnamed_29e6']],
} ],
'_MMPAGE_FILE_EXPANSION_FLAGS' : [ 0x4, {
'PageFileNumber' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 8, native_type='unsigned char')]],
'Spare1' : [ 0x1, ['BitField', dict(start_bit = 0, end_bit = 8, native_type='unsigned char')]],
'Spare2' : [ 0x2, ['BitField', dict(start_bit = 0, end_bit = 8, native_type='unsigned char')]],
'IgnoreCurrentCommit' : [ 0x3, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned char')]],
'IncreaseMinimumSize' : [ 0x3, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned char')]],
'Spare3' : [ 0x3, ['BitField', dict(start_bit = 2, end_bit = 8, native_type='unsigned char')]],
} ],
'_POP_FX_DEPENDENT' : [ 0x8, {
'Index' : [ 0x0, ['unsigned long']],
'ProviderIndex' : [ 0x4, ['unsigned long']],
} ],
'__unnamed_29f4' : [ 0x10, {
'Count' : [ 0x0, ['unsigned long']],
'States' : [ 0x8, ['pointer64', ['_PO_FX_PERF_STATE']]],
} ],
'__unnamed_29f6' : [ 0x10, {
'Minimum' : [ 0x0, ['unsigned long long']],
'Maximum' : [ 0x8, ['unsigned long long']],
} ],
'_PO_FX_COMPONENT_PERF_SET' : [ 0x30, {
'Name' : [ 0x0, ['_UNICODE_STRING']],
'Flags' : [ 0x10, ['unsigned long long']],
'Unit' : [ 0x18, ['Enumeration', dict(target = 'long', choices = {0: 'PoFxPerfStateUnitOther', 1: 'PoFxPerfStateUnitFrequency', 2: 'PoFxPerfStateUnitBandwidth', 3: 'PoFxPerfStateUnitMaximum'})]],
'Type' : [ 0x1c, ['Enumeration', dict(target = 'long', choices = {0: 'PoFxPerfStateTypeDiscrete', 1: 'PoFxPerfStateTypeRange', 2: 'PoFxPerfStateTypeMaximum'})]],
'Discrete' : [ 0x20, ['__unnamed_29f4']],
'Range' : [ 0x20, ['__unnamed_29f6']],
} ],
'_XPF_MCE_FLAGS' : [ 0x4, {
'MCG_CapabilityRW' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'MCG_GlobalControlRW' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'Reserved' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 32, native_type='unsigned long')]],
'AsULONG' : [ 0x0, ['unsigned long']],
} ],
'__unnamed_2a07' : [ 0x8, {
'Signature' : [ 0x0, ['unsigned long']],
'CheckSum' : [ 0x4, ['unsigned long']],
} ],
'__unnamed_2a09' : [ 0x10, {
'DiskId' : [ 0x0, ['_GUID']],
} ],
'__unnamed_2a0b' : [ 0x10, {
'Mbr' : [ 0x0, ['__unnamed_2a07']],
'Gpt' : [ 0x0, ['__unnamed_2a09']],
} ],
'_DUMP_INITIALIZATION_CONTEXT' : [ 0x108, {
'Length' : [ 0x0, ['unsigned long']],
'Reserved' : [ 0x4, ['unsigned long']],
'MemoryBlock' : [ 0x8, ['pointer64', ['void']]],
'CommonBuffer' : [ 0x10, ['array', 2, ['pointer64', ['void']]]],
'PhysicalAddress' : [ 0x20, ['array', 2, ['_LARGE_INTEGER']]],
'StallRoutine' : [ 0x30, ['pointer64', ['void']]],
'OpenRoutine' : [ 0x38, ['pointer64', ['void']]],
'WriteRoutine' : [ 0x40, ['pointer64', ['void']]],
'FinishRoutine' : [ 0x48, ['pointer64', ['void']]],
'AdapterObject' : [ 0x50, ['pointer64', ['_ADAPTER_OBJECT']]],
'MappedRegisterBase' : [ 0x58, ['pointer64', ['void']]],
'PortConfiguration' : [ 0x60, ['pointer64', ['void']]],
'CrashDump' : [ 0x68, ['unsigned char']],
'MarkMemoryOnly' : [ 0x69, ['unsigned char']],
'HiberResume' : [ 0x6a, ['unsigned char']],
'Reserved1' : [ 0x6b, ['unsigned char']],
'MaximumTransferSize' : [ 0x6c, ['unsigned long']],
'CommonBufferSize' : [ 0x70, ['unsigned long']],
'TargetAddress' : [ 0x78, ['pointer64', ['void']]],
'WritePendingRoutine' : [ 0x80, ['pointer64', ['void']]],
'PartitionStyle' : [ 0x88, ['unsigned long']],
'DiskInfo' : [ 0x8c, ['__unnamed_2a0b']],
'ReadRoutine' : [ 0xa0, ['pointer64', ['void']]],
'GetDriveTelemetryRoutine' : [ 0xa8, ['pointer64', ['void']]],
'LogSectionTruncateSize' : [ 0xb0, ['unsigned long']],
'Parameters' : [ 0xb4, ['array', 16, ['unsigned long']]],
'GetTransferSizesRoutine' : [ 0xf8, ['pointer64', ['void']]],
'DumpNotifyRoutine' : [ 0x100, ['pointer64', ['void']]],
} ],
'_MI_IO_CACHE_STATS' : [ 0x20, {
'UnusedBlocks' : [ 0x0, ['unsigned long long']],
'ActiveCacheMatch' : [ 0x8, ['unsigned long']],
'ActiveCacheOverride' : [ 0xc, ['unsigned long']],
'UnmappedCacheFlush' : [ 0x10, ['unsigned long']],
'UnmappedCacheMatch' : [ 0x14, ['unsigned long']],
'UnmappedCacheConflict' : [ 0x18, ['unsigned long']],
} ],
'_PROCESSOR_PLATFORM_STATE_RESIDENCY' : [ 0x10, {
'Residency' : [ 0x0, ['unsigned long long']],
'TransitionCount' : [ 0x8, ['unsigned long long']],
} ],
'_ETW_QUEUE_ENTRY' : [ 0x38, {
'ListEntry' : [ 0x0, ['_LIST_ENTRY']],
'DataBlock' : [ 0x10, ['pointer64', ['_ETWP_NOTIFICATION_HEADER']]],
'RegEntry' : [ 0x18, ['pointer64', ['_ETW_REG_ENTRY']]],
'ReplyObject' : [ 0x20, ['pointer64', ['_ETW_REG_ENTRY']]],
'WakeReference' : [ 0x28, ['pointer64', ['void']]],
'RegIndex' : [ 0x30, ['unsigned short']],
'ReplyIndex' : [ 0x32, ['unsigned short']],
'Flags' : [ 0x34, ['unsigned long']],
} ],
'_MI_RESERVATION_CLUSTER_INFO' : [ 0x4, {
'ClusterSize' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 10, native_type='unsigned long')]],
'SequenceNumber' : [ 0x0, ['BitField', dict(start_bit = 10, end_bit = 32, native_type='unsigned long')]],
'EntireInfo' : [ 0x0, ['long']],
} ],
'_TRIAGE_POP_IRP_DATA' : [ 0x20, {
'Link' : [ 0x0, ['_LIST_ENTRY']],
'Irp' : [ 0x10, ['pointer64', ['_IRP']]],
'Pdo' : [ 0x18, ['pointer64', ['_DEVICE_OBJECT']]],
} ],
'_KDPC_LIST' : [ 0x10, {
'ListHead' : [ 0x0, ['_SINGLE_LIST_ENTRY']],
'LastEntry' : [ 0x8, ['pointer64', ['_SINGLE_LIST_ENTRY']]],
} ],
'_CM_KEY_SECURITY' : [ 0x28, {
'Signature' : [ 0x0, ['unsigned short']],
'Reserved' : [ 0x2, ['unsigned short']],
'Flink' : [ 0x4, ['unsigned long']],
'Blink' : [ 0x8, ['unsigned long']],
'ReferenceCount' : [ 0xc, ['unsigned long']],
'DescriptorLength' : [ 0x10, ['unsigned long']],
'Descriptor' : [ 0x14, ['_SECURITY_DESCRIPTOR_RELATIVE']],
} ],
'_PO_DEVICE_NOTIFY_ORDER' : [ 0x178, {
'Locked' : [ 0x0, ['unsigned char']],
'WarmEjectPdoPointer' : [ 0x8, ['pointer64', ['pointer64', ['_DEVICE_OBJECT']]]],
'OrderLevel' : [ 0x10, ['array', 5, ['_PO_NOTIFY_ORDER_LEVEL']]],
} ],
'_MI_POOL_FAILURE_REASONS' : [ 0x2c, {
'NonPagedNoPtes' : [ 0x0, ['unsigned long']],
'PriorityTooLow' : [ 0x4, ['unsigned long']],
'NonPagedNoPagesAvailable' : [ 0x8, ['unsigned long']],
'PagedNoPtes' : [ 0xc, ['unsigned long']],
'SessionPagedNoPtes' : [ 0x10, ['unsigned long']],
'PagedNoPagesAvailable' : [ 0x14, ['unsigned long']],
'SessionPagedNoPagesAvailable' : [ 0x18, ['unsigned long']],
'PagedNoCommit' : [ 0x1c, ['unsigned long']],
'SessionPagedNoCommit' : [ 0x20, ['unsigned long']],
'NonPagedNoResidentAvailable' : [ 0x24, ['unsigned long']],
'NonPagedNoCommit' : [ 0x28, ['unsigned long']],
} ],
'_IO_REMOVE_LOCK_COMMON_BLOCK' : [ 0x20, {
'Removed' : [ 0x0, ['unsigned char']],
'Reserved' : [ 0x1, ['array', 3, ['unsigned char']]],
'IoCount' : [ 0x4, ['long']],
'RemoveEvent' : [ 0x8, ['_KEVENT']],
} ],
'_POP_FX_IDLE_STATE' : [ 0x18, {
'TransitionLatency' : [ 0x0, ['unsigned long long']],
'ResidencyRequirement' : [ 0x8, ['unsigned long long']],
'NominalPower' : [ 0x10, ['unsigned long']],
} ],
'_WHEA_NOTIFICATION_FLAGS' : [ 0x2, {
'PollIntervalRW' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned short')]],
'SwitchToPollingThresholdRW' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned short')]],
'SwitchToPollingWindowRW' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned short')]],
'ErrorThresholdRW' : [ 0x0, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned short')]],
'ErrorThresholdWindowRW' : [ 0x0, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned short')]],
'Reserved' : [ 0x0, ['BitField', dict(start_bit = 5, end_bit = 16, native_type='unsigned short')]],
'AsUSHORT' : [ 0x0, ['unsigned short']],
} ],
'_ARBITER_CONFLICT_INFO' : [ 0x18, {
'OwningObject' : [ 0x0, ['pointer64', ['_DEVICE_OBJECT']]],
'Start' : [ 0x8, ['unsigned long long']],
'End' : [ 0x10, ['unsigned long long']],
} ],
'_PO_NOTIFY_ORDER_LEVEL' : [ 0x48, {
'DeviceCount' : [ 0x0, ['unsigned long']],
'ActiveCount' : [ 0x4, ['unsigned long']],
'WaitSleep' : [ 0x8, ['_LIST_ENTRY']],
'ReadySleep' : [ 0x18, ['_LIST_ENTRY']],
'ReadyS0' : [ 0x28, ['_LIST_ENTRY']],
'WaitS0' : [ 0x38, ['_LIST_ENTRY']],
} ],
'_ETWP_NOTIFICATION_HEADER' : [ 0x48, {
'NotificationType' : [ 0x0, ['Enumeration', dict(target = 'long', choices = {1: 'EtwNotificationTypeNoReply', 2: 'EtwNotificationTypeLegacyEnable', 3: 'EtwNotificationTypeEnable', 4: 'EtwNotificationTypePrivateLogger', 5: 'EtwNotificationTypePerflib', 6: 'EtwNotificationTypeAudio', 7: 'EtwNotificationTypeSession', 8: 'EtwNotificationTypeReserved', 9: 'EtwNotificationTypeCredentialUI', 10: 'EtwNotificationTypeInProcSession', 11: 'EtwNotificationTypeMax'})]],
'NotificationSize' : [ 0x4, ['unsigned long']],
'RefCount' : [ 0x8, ['long']],
'ReplyRequested' : [ 0xc, ['unsigned char']],
'ReplyIndex' : [ 0x10, ['unsigned long']],
'Timeout' : [ 0x10, ['unsigned long']],
'ReplyCount' : [ 0x14, ['unsigned long']],
'NotifyeeCount' : [ 0x14, ['unsigned long']],
'ReplyHandle' : [ 0x18, ['unsigned long long']],
'ReplyObject' : [ 0x18, ['pointer64', ['void']]],
'RegIndex' : [ 0x18, ['unsigned long']],
'TargetPID' : [ 0x20, ['unsigned long']],
'SourcePID' : [ 0x24, ['unsigned long']],
'DestinationGuid' : [ 0x28, ['_GUID']],
'SourceGuid' : [ 0x38, ['_GUID']],
} ],
'__unnamed_2a42' : [ 0x4, {
'Mask' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'Polarity' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'MessageType' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 5, native_type='unsigned long')]],
'RequestEOI' : [ 0x0, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned long')]],
'DestinationMode' : [ 0x0, ['BitField', dict(start_bit = 6, end_bit = 7, native_type='unsigned long')]],
'MessageType3' : [ 0x0, ['BitField', dict(start_bit = 7, end_bit = 8, native_type='unsigned long')]],
'Destination' : [ 0x0, ['BitField', dict(start_bit = 8, end_bit = 16, native_type='unsigned long')]],
'Vector' : [ 0x0, ['BitField', dict(start_bit = 16, end_bit = 24, native_type='unsigned long')]],
'ExtendedAddress' : [ 0x0, ['BitField', dict(start_bit = 24, end_bit = 32, native_type='unsigned long')]],
} ],
'__unnamed_2a44' : [ 0x4, {
'bits' : [ 0x0, ['__unnamed_2a42']],
'AsULONG' : [ 0x0, ['unsigned long']],
} ],
'__unnamed_2a47' : [ 0x4, {
'ExtendedDestination' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 24, native_type='unsigned long')]],
'Reserved' : [ 0x0, ['BitField', dict(start_bit = 24, end_bit = 30, native_type='unsigned long')]],
'PassPW' : [ 0x0, ['BitField', dict(start_bit = 30, end_bit = 31, native_type='unsigned long')]],
'WaitingForEOI' : [ 0x0, ['BitField', dict(start_bit = 31, end_bit = 32, native_type='unsigned long')]],
} ],
'__unnamed_2a49' : [ 0x4, {
'bits' : [ 0x0, ['__unnamed_2a47']],
'AsULONG' : [ 0x0, ['unsigned long']],
} ],
'_INTERRUPT_HT_INTR_INFO' : [ 0x8, {
'LowPart' : [ 0x0, ['__unnamed_2a44']],
'HighPart' : [ 0x4, ['__unnamed_2a49']],
} ],
'_THREAD_PERFORMANCE_DATA' : [ 0x1c0, {
'Size' : [ 0x0, ['unsigned short']],
'Version' : [ 0x2, ['unsigned short']],
'ProcessorNumber' : [ 0x4, ['_PROCESSOR_NUMBER']],
'ContextSwitches' : [ 0x8, ['unsigned long']],
'HwCountersCount' : [ 0xc, ['unsigned long']],
'UpdateCount' : [ 0x10, ['unsigned long long']],
'WaitReasonBitMap' : [ 0x18, ['unsigned long long']],
'HardwareCounters' : [ 0x20, ['unsigned long long']],
'CycleTime' : [ 0x28, ['_COUNTER_READING']],
'HwCounters' : [ 0x40, ['array', 16, ['_COUNTER_READING']]],
} ],
'_ETW_REPLY_QUEUE' : [ 0x48, {
'Queue' : [ 0x0, ['_KQUEUE']],
'EventsLost' : [ 0x40, ['long']],
} ],
'_PROC_PERF_CHECK_SNAP' : [ 0x50, {
'Time' : [ 0x0, ['unsigned long long']],
'Active' : [ 0x8, ['unsigned long long']],
'Stall' : [ 0x10, ['unsigned long long']],
'FrequencyScaledActive' : [ 0x18, ['unsigned long long']],
'PerformanceScaledActive' : [ 0x20, ['unsigned long long']],
'PerformanceScaledKernelActive' : [ 0x28, ['unsigned long long']],
'CyclesActive' : [ 0x30, ['unsigned long long']],
'CyclesAffinitized' : [ 0x38, ['unsigned long long']],
'TaggedThreadCycles' : [ 0x40, ['array', 2, ['unsigned long long']]],
} ],
'_ARBITER_QUERY_ALLOCATED_RESOURCES_PARAMETERS' : [ 0x8, {
'AllocatedResources' : [ 0x0, ['pointer64', ['pointer64', ['_CM_PARTIAL_RESOURCE_LIST']]]],
} ],
'__unnamed_2a57' : [ 0x4, {
'ReferenceCount' : [ 0x0, ['unsigned long']],
'NumberOfPtesToFree' : [ 0x0, ['unsigned long']],
} ],
'_MI_PER_SESSION_PROTOS' : [ 0x30, {
'SessionProtoNode' : [ 0x0, ['_RTL_BALANCED_NODE']],
'FreeList' : [ 0x0, ['_SINGLE_LIST_ENTRY']],
'DriverAddress' : [ 0x0, ['pointer64', ['void']]],
'SessionId' : [ 0x18, ['unsigned long']],
'Subsection' : [ 0x18, ['pointer64', ['_SUBSECTION']]],
'SubsectionBase' : [ 0x20, ['pointer64', ['_MMPTE']]],
'u2' : [ 0x28, ['__unnamed_2a57']],
} ],
'_PO_FX_PERF_STATE_CHANGE' : [ 0x10, {
'Set' : [ 0x0, ['unsigned long']],
'StateIndex' : [ 0x8, ['unsigned long']],
'StateValue' : [ 0x8, ['unsigned long long']],
} ],
'__unnamed_2a5d' : [ 0x8, {
'MessageAddressLow' : [ 0x0, ['unsigned long']],
'MessageData' : [ 0x4, ['unsigned short']],
'Reserved' : [ 0x6, ['unsigned short']],
} ],
'__unnamed_2a5f' : [ 0x8, {
'RemappedFormat' : [ 0x0, ['_ULARGE_INTEGER']],
'Msi' : [ 0x0, ['__unnamed_2a5d']],
} ],
'_INTERRUPT_REMAPPING_INFO' : [ 0x10, {
'IrtIndex' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 30, native_type='unsigned long')]],
'FlagHalInternal' : [ 0x0, ['BitField', dict(start_bit = 30, end_bit = 31, native_type='unsigned long')]],
'FlagTranslated' : [ 0x0, ['BitField', dict(start_bit = 31, end_bit = 32, native_type='unsigned long')]],
'u' : [ 0x8, ['__unnamed_2a5f']],
} ],
'_RTL_ACTIVATION_CONTEXT_STACK_FRAME' : [ 0x18, {
'Previous' : [ 0x0, ['pointer64', ['_RTL_ACTIVATION_CONTEXT_STACK_FRAME']]],
'ActivationContext' : [ 0x8, ['pointer64', ['_ACTIVATION_CONTEXT']]],
'Flags' : [ 0x10, ['unsigned long']],
} ],
'_ETW_FILTER_STRING_TOKEN_ELEMENT' : [ 0x10, {
'Length' : [ 0x0, ['unsigned short']],
'String' : [ 0x8, ['pointer64', ['unsigned short']]],
} ],
'_ARBITER_ORDERING' : [ 0x10, {
'Start' : [ 0x0, ['unsigned long long']],
'End' : [ 0x8, ['unsigned long long']],
} ],
'_RTL_AVL_TABLE' : [ 0x68, {
'BalancedRoot' : [ 0x0, ['_RTL_BALANCED_LINKS']],
'OrderedPointer' : [ 0x20, ['pointer64', ['void']]],
'WhichOrderedElement' : [ 0x28, ['unsigned long']],
'NumberGenericTableElements' : [ 0x2c, ['unsigned long']],
'DepthOfTree' : [ 0x30, ['unsigned long']],
'RestartKey' : [ 0x38, ['pointer64', ['_RTL_BALANCED_LINKS']]],
'DeleteCount' : [ 0x40, ['unsigned long']],
'CompareRoutine' : [ 0x48, ['pointer64', ['void']]],
'AllocateRoutine' : [ 0x50, ['pointer64', ['void']]],
'FreeRoutine' : [ 0x58, ['pointer64', ['void']]],
'TableContext' : [ 0x60, ['pointer64', ['void']]],
} ],
'_KTRANSACTION_HISTORY' : [ 0x8, {
'RecordType' : [ 0x0, ['Enumeration', dict(target = 'long', choices = {1: 'KTMOH_CommitTransaction_Result', 2: 'KTMOH_RollbackTransaction_Result'})]],
'Payload' : [ 0x4, ['unsigned long']],
} ],
'_PO_FX_PERF_STATE' : [ 0x10, {
'Value' : [ 0x0, ['unsigned long long']],
'Context' : [ 0x8, ['pointer64', ['void']]],
} ],
}
|
reinaH/osf.io
|
refs/heads/develop2
|
scripts/dev_populate_conferences.py
|
19
|
#!/usr/bin/env python
# encoding: utf-8
import os
import sys
import argparse
from modularodm import Q
from modularodm.exceptions import ModularOdmException
from framework.auth.core import User
from website import settings
from website.app import init_app
from website.conferences.model import Conference
def parse_args():
parser = argparse.ArgumentParser(description='Create conferences with a specified admin email.')
parser.add_argument('-u', '--user', dest='user', required=True)
return parser.parse_args()
def main():
args = parse_args()
init_app(set_backends=True, routes=False)
populate_conferences(args.user)
MEETING_DATA = {
'spsp2014': {
'name': 'SPSP 2014',
'info_url': None,
'logo_url': None,
'active': False,
'public_projects': True,
},
'asb2014': {
'name': 'ASB 2014',
'info_url': 'http://www.sebiologists.org/meetings/talks_posters.html',
'logo_url': None,
'active': False,
'public_projects': True,
},
'aps2014': {
'name': 'APS 2014',
'info_url': 'http://centerforopenscience.org/aps/',
'logo_url': '/static/img/2014_Convention_banner-with-APS_700px.jpg',
'active': False,
'public_projects': True,
},
'annopeer2014': {
'name': '#annopeer',
'info_url': None,
'logo_url': None,
'active': False,
'public_projects': True,
},
'cpa2014': {
'name': 'CPA 2014',
'info_url': None,
'logo_url': None,
'active': False,
'public_projects': True,
},
'filaments2014': {
'name': 'Filaments 2014',
'info_url': None,
'logo_url': 'https://science.nrao.edu/science/meetings/2014/'
'filamentary-structure/images/filaments2014_660x178.png',
'active': False,
'public_projects': True,
},
'bitss2014': {
'name': 'BITSS Research Transparency Forum 2014',
'info_url': None,
'logo_url': os.path.join(
settings.STATIC_URL_PATH,
'img',
'conferences',
'bitss.jpg',
),
'active': False,
'public_projects': True,
},
'spsp2015': {
'name': 'SPSP 2015',
'info_url': None,
'logo_url': 'http://spspmeeting.org/CMSPages/SPSPimages/spsp2015banner.jpg',
'active': True,
},
'aps2015': {
'name': 'APS 2015',
'info_url': None,
'logo_url': 'http://www.psychologicalscience.org/images/APS_2015_Banner_990x157.jpg',
'active': True,
'public_projects': True,
},
'icps2015': {
'name': 'ICPS 2015',
'info_url': None,
'logo_url': 'http://icps.psychologicalscience.org/wp-content/themes/deepblue/images/ICPS_Website-header_990px.jpg',
'active': True,
'public_projects': True,
},
'mpa2015': {
'name': 'MPA 2015',
'info_url': None,
'logo_url': 'http://www.midwesternpsych.org/resources/Pictures/MPA%20logo.jpg',
'active': True,
'public_projects': True,
},
'NCCC2015': {
'name': '2015 NC Cognition Conference',
'info_url': None,
'logo_url': None,
'active': True,
'public_projects': True,
},
}
def populate_conferences(email):
for meeting, attrs in MEETING_DATA.iteritems():
admin_objs = []
try:
user = User.find_one(Q('username', 'iexact', email))
admin_objs.append(user)
except ModularOdmException:
raise RuntimeError('Username {0!r} is not registered.'.format(email))
conf = Conference(
endpoint=meeting, admins=admin_objs, **attrs
)
try:
conf.save()
except ModularOdmException:
print('{0} Conference already exists. Updating existing record...'.format(meeting))
conf = Conference.find_one(Q('endpoint', 'eq', meeting))
for key, value in attrs.items():
setattr(conf, key, value)
conf.admins = admin_objs
conf.save()
if __name__ == '__main__':
main()
|
projectchrono/chrono
|
refs/heads/develop
|
src/demos/python/irrlicht/demo_IRR_earthquake.py
|
3
|
#------------------------------------------------------------------------------
# Name: pychrono example
# Purpose:
#
# Author: Alessandro Tasora
#
# Created: 1/01/2019
# Copyright: (c) ProjectChrono 2019
#
#
# This file shows how to
# - create a small stack of bricks,
# - create a support that shakes like an earthquake, with motion function
# - simulate the bricks that fall
#-------------------------------------------------------------------------------
import pychrono.core as chrono
import pychrono.irrlicht as chronoirr
# The path to the Chrono data directory containing various assets (meshes, textures, data files)
# is automatically set, relative to the default location of this demo.
# If running from a different directory, you must change the path to the data directory with:
#chrono.SetChronoDataPath('path/to/data')
# ---------------------------------------------------------------------
#
# Create the simulation system and add items
#
my_system = chrono.ChSystemNSC()
# Set the default outward/inward shape margins for collision detection,
# this is epecially important for very large or very small objects.
chrono.ChCollisionModel.SetDefaultSuggestedEnvelope(0.001)
chrono.ChCollisionModel.SetDefaultSuggestedMargin(0.001)
# Maybe you want to change some settings for the solver. For example you
# might want to use SetSolverMaxIterations to set the number of iterations
# per timestep, etc.
#my_system.SetSolverType(chrono.ChSolver.Type_BARZILAIBORWEIN) # precise, more slow
my_system.SetSolverMaxIterations(70)
# Create a contact material (surface property)to share between all objects.
# The rolling and spinning parameters are optional - if enabled they double
# the computational time.
brick_material = chrono.ChMaterialSurfaceNSC()
brick_material.SetFriction(0.5)
brick_material.SetDampingF(0.2)
brick_material.SetCompliance (0.0000001)
brick_material.SetComplianceT(0.0000001)
# brick_material.SetRollingFriction(rollfrict_param)
# brick_material.SetSpinningFriction(0)
# brick_material.SetComplianceRolling(0.0000001)
# brick_material.SetComplianceSpinning(0.0000001)
# Create the set of bricks in a vertical stack, along Y axis
nbricks_on_x = 1
nbricks_on_y = 6
size_brick_x = 0.25
size_brick_y = 0.12
size_brick_z = 0.12
density_brick = 1000; # kg/m^3
mass_brick = density_brick * size_brick_x * size_brick_y * size_brick_z;
inertia_brick = 2/5*(pow(size_brick_x,2))*mass_brick; # to do: compute separate xx,yy,zz inertias
for ix in range(0,nbricks_on_x):
for iy in range(0,nbricks_on_y):
# create it
body_brick = chrono.ChBody()
# set initial position
body_brick.SetPos(chrono.ChVectorD(ix*size_brick_x, (iy+0.5)*size_brick_y, 0 ))
# set mass properties
body_brick.SetMass(mass_brick)
body_brick.SetInertiaXX(chrono.ChVectorD(inertia_brick,inertia_brick,inertia_brick))
# Collision shape
body_brick.GetCollisionModel().ClearModel()
body_brick.GetCollisionModel().AddBox(brick_material, size_brick_x/2, size_brick_y/2, size_brick_z/2) # must set half sizes
body_brick.GetCollisionModel().BuildModel()
body_brick.SetCollide(True)
# Visualization shape, for rendering animation
body_brick_shape = chrono.ChBoxShape()
body_brick_shape.GetBoxGeometry().Size = chrono.ChVectorD(size_brick_x/2, size_brick_y/2, size_brick_z/2)
if iy%2==0 :
body_brick_shape.SetColor(chrono.ChColor(0.65, 0.65, 0.6)) # set gray color only for odd bricks
body_brick.GetAssets().push_back(body_brick_shape)
my_system.Add(body_brick)
# Create the room floor: a simple fixed rigid body with a collision shape
# and a visualization shape
body_floor = chrono.ChBody()
body_floor.SetBodyFixed(True)
body_floor.SetPos(chrono.ChVectorD(0, -2, 0 ))
# Collision shape
body_floor.GetCollisionModel().ClearModel()
body_floor.GetCollisionModel().AddBox(brick_material, 3, 1, 3) # hemi sizes
body_floor.GetCollisionModel().BuildModel()
body_floor.SetCollide(True)
# Visualization shape
body_floor_shape = chrono.ChBoxShape()
body_floor_shape.GetBoxGeometry().Size = chrono.ChVectorD(3, 1, 3)
body_floor.GetAssets().push_back(body_floor_shape)
body_floor_texture = chrono.ChTexture()
body_floor_texture.SetTextureFilename(chrono.GetChronoDataFile('textures/concrete.jpg'))
body_floor.GetAssets().push_back(body_floor_texture)
my_system.Add(body_floor)
# Create the shaking table, as a box
size_table_x = 1;
size_table_y = 0.2;
size_table_z = 1;
body_table = chrono.ChBody()
body_table.SetPos(chrono.ChVectorD(0, -size_table_y/2, 0 ))
# Collision shape
body_table.GetCollisionModel().ClearModel()
body_table.GetCollisionModel().AddBox(brick_material, size_table_x/2, size_table_y/2, size_table_z/2) # hemi sizes
body_table.GetCollisionModel().BuildModel()
body_table.SetCollide(True)
# Visualization shape
body_table_shape = chrono.ChBoxShape()
body_table_shape.GetBoxGeometry().Size = chrono.ChVectorD(size_table_x/2, size_table_y/2, size_table_z/2)
body_table_shape.SetColor(chrono.ChColor(0.4,0.4,0.5))
body_table.GetAssets().push_back(body_table_shape)
body_table_texture = chrono.ChTexture()
body_table_texture.SetTextureFilename(chrono.GetChronoDataFile('textures/concrete.jpg'))
body_table.GetAssets().push_back(body_table_texture)
my_system.Add(body_table)
# Create a constraint that blocks free 3 x y z translations and 3 rx ry rz rotations
# of the table respect to the floor, and impose that the relative imposed position
# depends on a specified motion law.
link_shaker = chrono.ChLinkLockLock()
link_shaker.Initialize(body_table, body_floor, chrono.CSYSNORM)
my_system.Add(link_shaker)
# ..create the function for imposed x horizontal motion, etc.
mfunY = chrono.ChFunction_Sine(0,1.5,0.001) # phase, frequency, amplitude
link_shaker.SetMotion_Y(mfunY)
# ..create the function for imposed y vertical motion, etc.
mfunZ = chrono.ChFunction_Sine(0,1.5,0.12) # phase, frequency, amplitude
link_shaker.SetMotion_Z(mfunZ)
# Note that you could use other types of ChFunction_ objects, or create
# your custom function by class inheritance (see demo_python.py), or also
# set a function for table rotation , etc.
# ---------------------------------------------------------------------
#
# Create an Irrlicht application to visualize the system
#
myapplication = chronoirr.ChIrrApp(my_system, 'PyChrono example', chronoirr.dimension2du(1024,768))
myapplication.AddTypicalSky()
myapplication.AddTypicalLogo(chrono.GetChronoDataFile('logo_pychrono_alpha.png'))
myapplication.AddTypicalCamera(chronoirr.vector3df(0.5,0.5,1.0))
myapplication.AddLightWithShadow(chronoirr.vector3df(2,4,2), # point
chronoirr.vector3df(0,0,0), # aimpoint
9, # radius (power)
1,9, # near, far
30) # angle of FOV
# ==IMPORTANT!== Use this function for adding a ChIrrNodeAsset to all items
# in the system. These ChIrrNodeAsset assets are 'proxies' to the Irrlicht meshes.
# If you need a finer control on which item really needs a visualization proxy in
# Irrlicht, just use application.AssetBind(myitem); on a per-item basis.
myapplication.AssetBindAll();
# ==IMPORTANT!== Use this function for 'converting' into Irrlicht meshes the assets
# that you added to the bodies into 3D shapes, they can be visualized by Irrlicht!
myapplication.AssetUpdateAll();
# If you want to show shadows because you used "AddLightWithShadow()'
# you must remember this:
myapplication.AddShadowAll();
# ---------------------------------------------------------------------
#
# Run the simulation
#
myapplication.SetTimestep(0.001)
myapplication.SetTryRealtime(True)
while(myapplication.GetDevice().run()):
myapplication.BeginScene()
myapplication.DrawAll()
for substep in range(0,5):
myapplication.DoStep()
myapplication.EndScene()
|
ibinti/intellij-community
|
refs/heads/master
|
plugins/hg4idea/testData/bin/mercurial/hgweb/protocol.py
|
93
|
#
# Copyright 21 May 2005 - (c) 2005 Jake Edge <jake@edge2.net>
# Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
import cgi, cStringIO, zlib, urllib
from mercurial import util, wireproto
from common import HTTP_OK
HGTYPE = 'application/mercurial-0.1'
HGERRTYPE = 'application/hg-error'
class webproto(object):
def __init__(self, req, ui):
self.req = req
self.response = ''
self.ui = ui
def getargs(self, args):
knownargs = self._args()
data = {}
keys = args.split()
for k in keys:
if k == '*':
star = {}
for key in knownargs.keys():
if key != 'cmd' and key not in keys:
star[key] = knownargs[key][0]
data['*'] = star
else:
data[k] = knownargs[k][0]
return [data[k] for k in keys]
def _args(self):
args = self.req.form.copy()
chunks = []
i = 1
while True:
h = self.req.env.get('HTTP_X_HGARG_' + str(i))
if h is None:
break
chunks += [h]
i += 1
args.update(cgi.parse_qs(''.join(chunks), keep_blank_values=True))
return args
def getfile(self, fp):
length = int(self.req.env['CONTENT_LENGTH'])
for s in util.filechunkiter(self.req, limit=length):
fp.write(s)
def redirect(self):
self.oldio = self.ui.fout, self.ui.ferr
self.ui.ferr = self.ui.fout = cStringIO.StringIO()
def restore(self):
val = self.ui.fout.getvalue()
self.ui.ferr, self.ui.fout = self.oldio
return val
def groupchunks(self, cg):
z = zlib.compressobj()
while True:
chunk = cg.read(4096)
if not chunk:
break
yield z.compress(chunk)
yield z.flush()
def _client(self):
return 'remote:%s:%s:%s' % (
self.req.env.get('wsgi.url_scheme') or 'http',
urllib.quote(self.req.env.get('REMOTE_HOST', '')),
urllib.quote(self.req.env.get('REMOTE_USER', '')))
def iscmd(cmd):
return cmd in wireproto.commands
def call(repo, req, cmd):
p = webproto(req, repo.ui)
rsp = wireproto.dispatch(repo, p, cmd)
if isinstance(rsp, str):
req.respond(HTTP_OK, HGTYPE, body=rsp)
return []
elif isinstance(rsp, wireproto.streamres):
req.respond(HTTP_OK, HGTYPE)
return rsp.gen
elif isinstance(rsp, wireproto.pushres):
val = p.restore()
rsp = '%d\n%s' % (rsp.res, val)
req.respond(HTTP_OK, HGTYPE, body=rsp)
return []
elif isinstance(rsp, wireproto.pusherr):
# drain the incoming bundle
req.drain()
p.restore()
rsp = '0\n%s\n' % rsp.res
req.respond(HTTP_OK, HGTYPE, body=rsp)
return []
elif isinstance(rsp, wireproto.ooberror):
rsp = rsp.message
req.respond(HTTP_OK, HGERRTYPE, body=rsp)
return []
|
sinkuri256/python-for-android
|
refs/heads/master
|
python3-alpha/python3-src/Lib/dummy_threading.py
|
210
|
"""Faux ``threading`` version using ``dummy_thread`` instead of ``thread``.
The module ``_dummy_threading`` is added to ``sys.modules`` in order
to not have ``threading`` considered imported. Had ``threading`` been
directly imported it would have made all subsequent imports succeed
regardless of whether ``_thread`` was available which is not desired.
"""
from sys import modules as sys_modules
import _dummy_thread
# Declaring now so as to not have to nest ``try``s to get proper clean-up.
holding_thread = False
holding_threading = False
holding__threading_local = False
try:
# Could have checked if ``_thread`` was not in sys.modules and gone
# a different route, but decided to mirror technique used with
# ``threading`` below.
if '_thread' in sys_modules:
held_thread = sys_modules['_thread']
holding_thread = True
# Must have some module named ``_thread`` that implements its API
# in order to initially import ``threading``.
sys_modules['_thread'] = sys_modules['_dummy_thread']
if 'threading' in sys_modules:
# If ``threading`` is already imported, might as well prevent
# trying to import it more than needed by saving it if it is
# already imported before deleting it.
held_threading = sys_modules['threading']
holding_threading = True
del sys_modules['threading']
if '_threading_local' in sys_modules:
# If ``_threading_local`` is already imported, might as well prevent
# trying to import it more than needed by saving it if it is
# already imported before deleting it.
held__threading_local = sys_modules['_threading_local']
holding__threading_local = True
del sys_modules['_threading_local']
import threading
# Need a copy of the code kept somewhere...
sys_modules['_dummy_threading'] = sys_modules['threading']
del sys_modules['threading']
sys_modules['_dummy__threading_local'] = sys_modules['_threading_local']
del sys_modules['_threading_local']
from _dummy_threading import *
from _dummy_threading import __all__
finally:
# Put back ``threading`` if we overwrote earlier
if holding_threading:
sys_modules['threading'] = held_threading
del held_threading
del holding_threading
# Put back ``_threading_local`` if we overwrote earlier
if holding__threading_local:
sys_modules['_threading_local'] = held__threading_local
del held__threading_local
del holding__threading_local
# Put back ``thread`` if we overwrote, else del the entry we made
if holding_thread:
sys_modules['_thread'] = held_thread
del held_thread
else:
del sys_modules['_thread']
del holding_thread
del _dummy_thread
del sys_modules
|
cts2/pyjxslt
|
refs/heads/master
|
pyjxslt-python/tests/testXMLtoJSON.py
|
1
|
# -*- coding: utf-8 -*-
# Copyright (c) 2015, Mayo Clinic
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without modification,
# are permitted provided that the following conditions are met:
#
# Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# Neither the name of the <ORGANIZATION> nor the names of its contributors
# may be used to endorse or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
# IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,
# INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
# OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
# OF THE POSSIBILITY OF SUCH DAMAGE.
import unittest
import pyjxslt
from dict_compare import dict_compare
import json
xml1 = """<?xml version="1.0" encoding="UTF-8"?>
<doc>
<entry id='17'>FOO</entry>
<entry id='42'>BAR</entry>
</doc>"""
expected_json = """{
"doc": {
"entry": [
{
"_content": "FOO",
"id": "17"
},
{
"_content": "BAR",
"id": "42"
}
]
}
}"""
bad_xml = """<?xml version="1.0" encoding="UTF-8"?>
<doc>
<entry id='17'>FOO</entry>
<entry id='42'>BAR</entry>
</dod>"""
xml_with_processing_instruction = """<?xml version="1.0" encoding="UTF-8"?>
<?xml-stylesheet type="text/xsl" href="./datadict_v2.xsl"?>
<data_table id="pht003897.v1" study_id="phs000722.v1" participant_set="1">
</data_table>"""
expected_pi = '{ "data_table": { "id": "pht003897.v1", "study_id": "phs000722.v1", "participant_set": "1" } }'
expected_bad = 'ERROR: Transformer exception: org.xml.sax.SAXParseException; lineNumber: 5; columnNumber: 3; ' \
'The element type "doc" must be terminated by the matching end-tag "</doc>".'
class XMLToJsonTestCase(unittest.TestCase):
# Just a quick test as the actual transform is tested elsewhere. Our job is just to make sure
# that we get what we expect through the gateway
gw = pyjxslt.Gateway()
if not gw.gateway_connected(reconnect=False):
print("Gateway must be running on port 25333")
def compare_jsons(self, json1, json2):
json1d = json.loads(json1)
try:
json2d = json.loads(json2)
except json.JSONDecodeError as e:
print(str(e))
return False
success, txt = dict_compare(json1d, json2d)
if not success:
print(txt)
return success
def test1(self):
self.assertTrue(self.compare_jsons(expected_json, self.gw.to_json(xml1)))
self.assertEqual(expected_bad, self.gw.to_json(bad_xml))
self.assertTrue(self.compare_jsons(expected_pi, self.gw.to_json(xml_with_processing_instruction)))
class NoGatewayTestCase(unittest.TestCase):
def test_gw_down(self):
gw = pyjxslt.Gateway(port=23456) # a non-existent port
self.assertIsNone(gw.to_json(xml1))
if __name__ == '__main__':
unittest.main()
|
partofthething/home-assistant
|
refs/heads/dev
|
tests/components/template/test_alarm_control_panel.py
|
14
|
"""The tests for the Template alarm control panel platform."""
from homeassistant import setup
from homeassistant.const import (
STATE_ALARM_ARMED_AWAY,
STATE_ALARM_ARMED_HOME,
STATE_ALARM_ARMED_NIGHT,
STATE_ALARM_ARMING,
STATE_ALARM_DISARMED,
STATE_ALARM_PENDING,
STATE_ALARM_TRIGGERED,
)
from tests.common import async_mock_service
from tests.components.alarm_control_panel import common
async def test_template_state_text(hass):
"""Test the state text of a template."""
await setup.async_setup_component(
hass,
"alarm_control_panel",
{
"alarm_control_panel": {
"platform": "template",
"panels": {
"test_template_panel": {
"value_template": "{{ states('alarm_control_panel.test') }}",
"arm_away": {
"service": "alarm_control_panel.alarm_arm_away",
"entity_id": "alarm_control_panel.test",
"data": {"code": "1234"},
},
"arm_home": {
"service": "alarm_control_panel.alarm_arm_home",
"entity_id": "alarm_control_panel.test",
"data": {"code": "1234"},
},
"arm_night": {
"service": "alarm_control_panel.alarm_arm_night",
"entity_id": "alarm_control_panel.test",
"data": {"code": "1234"},
},
"disarm": {
"service": "alarm_control_panel.alarm_disarm",
"entity_id": "alarm_control_panel.test",
"data": {"code": "1234"},
},
}
},
}
},
)
await hass.async_block_till_done()
await hass.async_start()
await hass.async_block_till_done()
hass.states.async_set("alarm_control_panel.test", STATE_ALARM_ARMED_HOME)
await hass.async_block_till_done()
state = hass.states.get("alarm_control_panel.test_template_panel")
assert state.state == STATE_ALARM_ARMED_HOME
hass.states.async_set("alarm_control_panel.test", STATE_ALARM_ARMED_AWAY)
await hass.async_block_till_done()
state = hass.states.get("alarm_control_panel.test_template_panel")
assert state.state == STATE_ALARM_ARMED_AWAY
hass.states.async_set("alarm_control_panel.test", STATE_ALARM_ARMED_NIGHT)
await hass.async_block_till_done()
state = hass.states.get("alarm_control_panel.test_template_panel")
assert state.state == STATE_ALARM_ARMED_NIGHT
hass.states.async_set("alarm_control_panel.test", STATE_ALARM_ARMING)
await hass.async_block_till_done()
state = hass.states.get("alarm_control_panel.test_template_panel")
assert state.state == STATE_ALARM_ARMING
hass.states.async_set("alarm_control_panel.test", STATE_ALARM_DISARMED)
await hass.async_block_till_done()
state = hass.states.get("alarm_control_panel.test_template_panel")
assert state.state == STATE_ALARM_DISARMED
hass.states.async_set("alarm_control_panel.test", STATE_ALARM_PENDING)
await hass.async_block_till_done()
state = hass.states.get("alarm_control_panel.test_template_panel")
assert state.state == STATE_ALARM_PENDING
hass.states.async_set("alarm_control_panel.test", STATE_ALARM_TRIGGERED)
await hass.async_block_till_done()
state = hass.states.get("alarm_control_panel.test_template_panel")
assert state.state == STATE_ALARM_TRIGGERED
hass.states.async_set("alarm_control_panel.test", "invalid_state")
await hass.async_block_till_done()
state = hass.states.get("alarm_control_panel.test_template_panel")
assert state.state == "unknown"
async def test_optimistic_states(hass):
"""Test the optimistic state."""
await setup.async_setup_component(
hass,
"alarm_control_panel",
{
"alarm_control_panel": {
"platform": "template",
"panels": {
"test_template_panel": {
"arm_away": {
"service": "alarm_control_panel.alarm_arm_away",
"entity_id": "alarm_control_panel.test",
"data": {"code": "1234"},
},
"arm_home": {
"service": "alarm_control_panel.alarm_arm_home",
"entity_id": "alarm_control_panel.test",
"data": {"code": "1234"},
},
"arm_night": {
"service": "alarm_control_panel.alarm_arm_night",
"entity_id": "alarm_control_panel.test",
"data": {"code": "1234"},
},
"disarm": {
"service": "alarm_control_panel.alarm_disarm",
"entity_id": "alarm_control_panel.test",
"data": {"code": "1234"},
},
}
},
}
},
)
await hass.async_block_till_done()
await hass.async_start()
await hass.async_block_till_done()
state = hass.states.get("alarm_control_panel.test_template_panel")
await hass.async_block_till_done()
assert state.state == "unknown"
await common.async_alarm_arm_away(
hass, entity_id="alarm_control_panel.test_template_panel"
)
await hass.async_block_till_done()
state = hass.states.get("alarm_control_panel.test_template_panel")
await hass.async_block_till_done()
assert state.state == STATE_ALARM_ARMED_AWAY
await common.async_alarm_arm_home(
hass, entity_id="alarm_control_panel.test_template_panel"
)
state = hass.states.get("alarm_control_panel.test_template_panel")
await hass.async_block_till_done()
assert state.state == STATE_ALARM_ARMED_HOME
await common.async_alarm_arm_night(
hass, entity_id="alarm_control_panel.test_template_panel"
)
state = hass.states.get("alarm_control_panel.test_template_panel")
await hass.async_block_till_done()
assert state.state == STATE_ALARM_ARMED_NIGHT
await common.async_alarm_disarm(
hass, entity_id="alarm_control_panel.test_template_panel"
)
state = hass.states.get("alarm_control_panel.test_template_panel")
await hass.async_block_till_done()
assert state.state == STATE_ALARM_DISARMED
async def test_no_action_scripts(hass):
"""Test no action scripts per state."""
await setup.async_setup_component(
hass,
"alarm_control_panel",
{
"alarm_control_panel": {
"platform": "template",
"panels": {
"test_template_panel": {
"value_template": "{{ states('alarm_control_panel.test') }}",
}
},
}
},
)
await hass.async_block_till_done()
await hass.async_start()
await hass.async_block_till_done()
hass.states.async_set("alarm_control_panel.test", STATE_ALARM_ARMED_AWAY)
await hass.async_block_till_done()
await common.async_alarm_arm_away(
hass, entity_id="alarm_control_panel.test_template_panel"
)
await hass.async_block_till_done()
state = hass.states.get("alarm_control_panel.test_template_panel")
await hass.async_block_till_done()
assert state.state == STATE_ALARM_ARMED_AWAY
await common.async_alarm_arm_home(
hass, entity_id="alarm_control_panel.test_template_panel"
)
await hass.async_block_till_done()
state = hass.states.get("alarm_control_panel.test_template_panel")
await hass.async_block_till_done()
assert state.state == STATE_ALARM_ARMED_AWAY
await common.async_alarm_arm_night(
hass, entity_id="alarm_control_panel.test_template_panel"
)
await hass.async_block_till_done()
state = hass.states.get("alarm_control_panel.test_template_panel")
await hass.async_block_till_done()
assert state.state == STATE_ALARM_ARMED_AWAY
await common.async_alarm_disarm(
hass, entity_id="alarm_control_panel.test_template_panel"
)
await hass.async_block_till_done()
state = hass.states.get("alarm_control_panel.test_template_panel")
await hass.async_block_till_done()
assert state.state == STATE_ALARM_ARMED_AWAY
async def test_template_syntax_error(hass, caplog):
"""Test templating syntax error."""
await setup.async_setup_component(
hass,
"alarm_control_panel",
{
"alarm_control_panel": {
"platform": "template",
"panels": {
"test_template_panel": {
"value_template": "{% if blah %}",
"arm_away": {
"service": "alarm_control_panel.alarm_arm_away",
"entity_id": "alarm_control_panel.test",
"data": {"code": "1234"},
},
"arm_home": {
"service": "alarm_control_panel.alarm_arm_home",
"entity_id": "alarm_control_panel.test",
"data": {"code": "1234"},
},
"arm_night": {
"service": "alarm_control_panel.alarm_arm_night",
"entity_id": "alarm_control_panel.test",
"data": {"code": "1234"},
},
"disarm": {
"service": "alarm_control_panel.alarm_disarm",
"entity_id": "alarm_control_panel.test",
"data": {"code": "1234"},
},
}
},
}
},
)
await hass.async_block_till_done()
await hass.async_start()
await hass.async_block_till_done()
assert len(hass.states.async_all()) == 0
assert ("invalid template") in caplog.text
async def test_invalid_name_does_not_create(hass, caplog):
"""Test invalid name."""
await setup.async_setup_component(
hass,
"alarm_control_panel",
{
"alarm_control_panel": {
"platform": "template",
"panels": {
"bad name here": {
"value_template": "{{ disarmed }}",
"arm_away": {
"service": "alarm_control_panel.alarm_arm_away",
"entity_id": "alarm_control_panel.test",
"data": {"code": "1234"},
},
"arm_home": {
"service": "alarm_control_panel.alarm_arm_home",
"entity_id": "alarm_control_panel.test",
"data": {"code": "1234"},
},
"arm_night": {
"service": "alarm_control_panel.alarm_arm_night",
"entity_id": "alarm_control_panel.test",
"data": {"code": "1234"},
},
"disarm": {
"service": "alarm_control_panel.alarm_disarm",
"entity_id": "alarm_control_panel.test",
"data": {"code": "1234"},
},
}
},
}
},
)
await hass.async_block_till_done()
await hass.async_start()
await hass.async_block_till_done()
assert len(hass.states.async_all()) == 0
assert ("invalid slug bad name") in caplog.text
async def test_invalid_panel_does_not_create(hass, caplog):
"""Test invalid alarm control panel."""
await setup.async_setup_component(
hass,
"alarm_control_panel",
{
"alarm_control_panel": {
"platform": "template",
"wibble": {"test_panel": "Invalid"},
}
},
)
await hass.async_block_till_done()
await hass.async_start()
await hass.async_block_till_done()
assert len(hass.states.async_all()) == 0
assert ("[wibble] is an invalid option") in caplog.text
async def test_no_panels_does_not_create(hass, caplog):
"""Test if there are no panels -> no creation."""
await setup.async_setup_component(
hass,
"alarm_control_panel",
{"alarm_control_panel": {"platform": "template"}},
)
await hass.async_block_till_done()
await hass.async_start()
await hass.async_block_till_done()
assert len(hass.states.async_all()) == 0
assert ("required key not provided @ data['panels']") in caplog.text
async def test_name(hass):
"""Test the accessibility of the name attribute."""
await setup.async_setup_component(
hass,
"alarm_control_panel",
{
"alarm_control_panel": {
"platform": "template",
"panels": {
"test_template_panel": {
"name": "Template Alarm Panel",
"value_template": "{{ disarmed }}",
"arm_away": {
"service": "alarm_control_panel.alarm_arm_away",
"entity_id": "alarm_control_panel.test",
"data": {"code": "1234"},
},
"arm_home": {
"service": "alarm_control_panel.alarm_arm_home",
"entity_id": "alarm_control_panel.test",
"data": {"code": "1234"},
},
"arm_night": {
"service": "alarm_control_panel.alarm_arm_night",
"entity_id": "alarm_control_panel.test",
"data": {"code": "1234"},
},
"disarm": {
"service": "alarm_control_panel.alarm_disarm",
"entity_id": "alarm_control_panel.test",
"data": {"code": "1234"},
},
}
},
}
},
)
await hass.async_block_till_done()
await hass.async_start()
await hass.async_block_till_done()
state = hass.states.get("alarm_control_panel.test_template_panel")
assert state is not None
assert state.attributes.get("friendly_name") == "Template Alarm Panel"
async def test_arm_home_action(hass):
"""Test arm home action."""
await setup.async_setup_component(
hass,
"alarm_control_panel",
{
"alarm_control_panel": {
"platform": "template",
"panels": {
"test_template_panel": {
"value_template": "{{ states('alarm_control_panel.test') }}",
"arm_away": {
"service": "alarm_control_panel.alarm_arm_home",
"entity_id": "alarm_control_panel.test",
"data": {"code": "1234"},
},
"arm_home": {"service": "test.automation"},
"arm_night": {
"service": "alarm_control_panel.alarm_arm_home",
"entity_id": "alarm_control_panel.test",
"data": {"code": "1234"},
},
"disarm": {
"service": "alarm_control_panel.alarm_disarm",
"entity_id": "alarm_control_panel.test",
"data": {"code": "1234"},
},
}
},
}
},
)
await hass.async_block_till_done()
await hass.async_start()
await hass.async_block_till_done()
service_calls = async_mock_service(hass, "test", "automation")
await common.async_alarm_arm_home(
hass, entity_id="alarm_control_panel.test_template_panel"
)
await hass.async_block_till_done()
assert len(service_calls) == 1
async def test_arm_away_action(hass):
"""Test arm away action."""
await setup.async_setup_component(
hass,
"alarm_control_panel",
{
"alarm_control_panel": {
"platform": "template",
"panels": {
"test_template_panel": {
"value_template": "{{ states('alarm_control_panel.test') }}",
"arm_home": {
"service": "alarm_control_panel.alarm_arm_home",
"entity_id": "alarm_control_panel.test",
"data": {"code": "1234"},
},
"arm_away": {"service": "test.automation"},
"arm_night": {
"service": "alarm_control_panel.alarm_arm_home",
"entity_id": "alarm_control_panel.test",
"data": {"code": "1234"},
},
"disarm": {
"service": "alarm_control_panel.alarm_disarm",
"entity_id": "alarm_control_panel.test",
"data": {"code": "1234"},
},
}
},
}
},
)
await hass.async_block_till_done()
await hass.async_start()
await hass.async_block_till_done()
service_calls = async_mock_service(hass, "test", "automation")
await common.async_alarm_arm_away(
hass, entity_id="alarm_control_panel.test_template_panel"
)
await hass.async_block_till_done()
assert len(service_calls) == 1
async def test_arm_night_action(hass):
"""Test arm night action."""
await setup.async_setup_component(
hass,
"alarm_control_panel",
{
"alarm_control_panel": {
"platform": "template",
"panels": {
"test_template_panel": {
"value_template": "{{ states('alarm_control_panel.test') }}",
"arm_home": {
"service": "alarm_control_panel.alarm_arm_home",
"entity_id": "alarm_control_panel.test",
"data": {"code": "1234"},
},
"arm_night": {"service": "test.automation"},
"arm_away": {
"service": "alarm_control_panel.alarm_arm_home",
"entity_id": "alarm_control_panel.test",
"data": {"code": "1234"},
},
"disarm": {
"service": "alarm_control_panel.alarm_disarm",
"entity_id": "alarm_control_panel.test",
"data": {"code": "1234"},
},
}
},
}
},
)
await hass.async_block_till_done()
await hass.async_start()
await hass.async_block_till_done()
service_calls = async_mock_service(hass, "test", "automation")
await common.async_alarm_arm_night(
hass, entity_id="alarm_control_panel.test_template_panel"
)
await hass.async_block_till_done()
assert len(service_calls) == 1
async def test_disarm_action(hass):
"""Test disarm action."""
await setup.async_setup_component(
hass,
"alarm_control_panel",
{
"alarm_control_panel": {
"platform": "template",
"panels": {
"test_template_panel": {
"value_template": "{{ states('alarm_control_panel.test') }}",
"arm_home": {
"service": "alarm_control_panel.alarm_arm_home",
"entity_id": "alarm_control_panel.test",
"data": {"code": "1234"},
},
"disarm": {"service": "test.automation"},
"arm_away": {
"service": "alarm_control_panel.alarm_arm_home",
"entity_id": "alarm_control_panel.test",
"data": {"code": "1234"},
},
"arm_night": {
"service": "alarm_control_panel.alarm_disarm",
"entity_id": "alarm_control_panel.test",
"data": {"code": "1234"},
},
}
},
}
},
)
await hass.async_block_till_done()
await hass.async_start()
await hass.async_block_till_done()
service_calls = async_mock_service(hass, "test", "automation")
await common.async_alarm_disarm(
hass, entity_id="alarm_control_panel.test_template_panel"
)
await hass.async_block_till_done()
assert len(service_calls) == 1
async def test_unique_id(hass):
"""Test unique_id option only creates one alarm control panel per id."""
await setup.async_setup_component(
hass,
"alarm_control_panel",
{
"alarm_control_panel": {
"platform": "template",
"panels": {
"test_template_alarm_control_panel_01": {
"unique_id": "not-so-unique-anymore",
"value_template": "{{ true }}",
},
"test_template_alarm_control_panel_02": {
"unique_id": "not-so-unique-anymore",
"value_template": "{{ false }}",
},
},
},
},
)
await hass.async_block_till_done()
await hass.async_start()
await hass.async_block_till_done()
assert len(hass.states.async_all()) == 1
|
pwoodworth/intellij-community
|
refs/heads/master
|
python/lib/Lib/hashlib.py
|
82
|
# $Id: hashlib.py 66095 2008-08-31 16:36:21Z gregory.p.smith $
#
# Copyright (C) 2005 Gregory P. Smith (greg@electricrain.com)
# Licensed to PSF under a Contributor Agreement.
#
__doc__ = """hashlib module - A common interface to many hash functions.
new(name, string='') - returns a new hash object implementing the
given hash function; initializing the hash
using the given string data.
Named constructor functions are also available, these are much faster
than using new():
md5(), sha1(), sha224(), sha256(), sha384(), and sha512()
More algorithms may be available on your platform but the above are
guaranteed to exist.
Choose your hash function wisely. Some have known collision weaknesses.
sha384 and sha512 will be slow on 32 bit platforms.
Hash objects have these methods:
- update(arg): Update the hash object with the string arg. Repeated calls
are equivalent to a single call with the concatenation of all
the arguments.
- digest(): Return the digest of the strings passed to the update() method
so far. This may contain non-ASCII characters, including
NUL bytes.
- hexdigest(): Like digest() except the digest is returned as a string of
double length, containing only hexadecimal digits.
- copy(): Return a copy (clone) of the hash object. This can be used to
efficiently compute the digests of strings that share a common
initial substring.
For example, to obtain the digest of the string 'Nobody inspects the
spammish repetition':
>>> import hashlib
>>> m = hashlib.md5()
>>> m.update("Nobody inspects")
>>> m.update(" the spammish repetition")
>>> m.digest()
'\\xbbd\\x9c\\x83\\xdd\\x1e\\xa5\\xc9\\xd9\\xde\\xc9\\xa1\\x8d\\xf0\\xff\\xe9'
More condensed:
>>> hashlib.sha224("Nobody inspects the spammish repetition").hexdigest()
'a4337bc45a8fc544c03f52dc550cd6e1e87021bc896588bd79e901e2'
"""
def __get_builtin_constructor(name):
if name in ('SHA1', 'sha1'):
import _sha
return _sha.new
elif name in ('MD5', 'md5'):
import _md5
return _md5.new
elif name in ('SHA256', 'sha256', 'SHA224', 'sha224'):
import _sha256
bs = name[3:]
if bs == '256':
return _sha256.sha256
elif bs == '224':
return _sha256.sha224
elif name in ('SHA512', 'sha512', 'SHA384', 'sha384'):
import _sha512
bs = name[3:]
if bs == '512':
return _sha512.sha512
elif bs == '384':
return _sha512.sha384
raise ValueError, "unsupported hash type"
def __py_new(name, string=''):
"""new(name, string='') - Return a new hashing object using the named algorithm;
optionally initialized with a string.
"""
return __get_builtin_constructor(name)(string)
def __hash_new(name, string=''):
"""new(name, string='') - Return a new hashing object using the named algorithm;
optionally initialized with a string.
"""
try:
return _hashlib.new(name, string)
except ValueError:
# If the _hashlib module (OpenSSL) doesn't support the named
# hash, try using our builtin implementations.
# This allows for SHA224/256 and SHA384/512 support even though
# the OpenSSL library prior to 0.9.8 doesn't provide them.
return __get_builtin_constructor(name)(string)
try:
import _hashlib
# use the wrapper of the C implementation
new = __hash_new
for opensslFuncName in filter(lambda n: n.startswith('openssl_'), dir(_hashlib)):
funcName = opensslFuncName[len('openssl_'):]
try:
# try them all, some may not work due to the OpenSSL
# version not supporting that algorithm.
f = getattr(_hashlib, opensslFuncName)
f()
# Use the C function directly (very fast)
exec funcName + ' = f'
except ValueError:
try:
# Use the builtin implementation directly (fast)
exec funcName + ' = __get_builtin_constructor(funcName)'
except ValueError:
# this one has no builtin implementation, don't define it
pass
# clean up our locals
del f
del opensslFuncName
del funcName
except ImportError:
# We don't have the _hashlib OpenSSL module?
# use the built in legacy interfaces via a wrapper function
new = __py_new
# lookup the C function to use directly for the named constructors
md5 = __get_builtin_constructor('md5')
sha1 = __get_builtin_constructor('sha1')
sha224 = __get_builtin_constructor('sha224')
sha256 = __get_builtin_constructor('sha256')
sha384 = __get_builtin_constructor('sha384')
sha512 = __get_builtin_constructor('sha512')
|
yoer/hue
|
refs/heads/master
|
desktop/core/ext-py/Django-1.6.10/django/core/cache/backends/memcached.py
|
104
|
"Memcached cache backend"
import time
import pickle
from threading import local
from django.core.cache.backends.base import BaseCache, DEFAULT_TIMEOUT
from django.utils import six
from django.utils.encoding import force_str
class BaseMemcachedCache(BaseCache):
def __init__(self, server, params, library, value_not_found_exception):
super(BaseMemcachedCache, self).__init__(params)
if isinstance(server, six.string_types):
self._servers = server.split(';')
else:
self._servers = server
# The exception type to catch from the underlying library for a key
# that was not found. This is a ValueError for python-memcache,
# pylibmc.NotFound for pylibmc, and cmemcache will return None without
# raising an exception.
self.LibraryValueNotFoundException = value_not_found_exception
self._lib = library
self._options = params.get('OPTIONS', None)
@property
def _cache(self):
"""
Implements transparent thread-safe access to a memcached client.
"""
if getattr(self, '_client', None) is None:
self._client = self._lib.Client(self._servers)
return self._client
def _get_memcache_timeout(self, timeout=DEFAULT_TIMEOUT):
"""
Memcached deals with long (> 30 days) timeouts in a special
way. Call this function to obtain a safe value for your timeout.
"""
if timeout == DEFAULT_TIMEOUT:
return self.default_timeout
if timeout is None:
# Using 0 in memcache sets a non-expiring timeout.
return 0
elif int(timeout) == 0:
# Other cache backends treat 0 as set-and-expire. To achieve this
# in memcache backends, a negative timeout must be passed.
timeout = -1
if timeout > 2592000: # 60*60*24*30, 30 days
# See http://code.google.com/p/memcached/wiki/FAQ
# "You can set expire times up to 30 days in the future. After that
# memcached interprets it as a date, and will expire the item after
# said date. This is a simple (but obscure) mechanic."
#
# This means that we have to switch to absolute timestamps.
timeout += int(time.time())
return int(timeout)
def make_key(self, key, version=None):
# Python 2 memcache requires the key to be a byte string.
return force_str(super(BaseMemcachedCache, self).make_key(key, version))
def add(self, key, value, timeout=DEFAULT_TIMEOUT, version=None):
key = self.make_key(key, version=version)
return self._cache.add(key, value, self._get_memcache_timeout(timeout))
def get(self, key, default=None, version=None):
key = self.make_key(key, version=version)
val = self._cache.get(key)
if val is None:
return default
return val
def set(self, key, value, timeout=DEFAULT_TIMEOUT, version=None):
key = self.make_key(key, version=version)
self._cache.set(key, value, self._get_memcache_timeout(timeout))
def delete(self, key, version=None):
key = self.make_key(key, version=version)
self._cache.delete(key)
def get_many(self, keys, version=None):
new_keys = [self.make_key(x, version=version) for x in keys]
ret = self._cache.get_multi(new_keys)
if ret:
_ = {}
m = dict(zip(new_keys, keys))
for k, v in ret.items():
_[m[k]] = v
ret = _
return ret
def close(self, **kwargs):
self._cache.disconnect_all()
def incr(self, key, delta=1, version=None):
key = self.make_key(key, version=version)
# memcached doesn't support a negative delta
if delta < 0:
return self._cache.decr(key, -delta)
try:
val = self._cache.incr(key, delta)
# python-memcache responds to incr on non-existent keys by
# raising a ValueError, pylibmc by raising a pylibmc.NotFound
# and Cmemcache returns None. In all cases,
# we should raise a ValueError though.
except self.LibraryValueNotFoundException:
val = None
if val is None:
raise ValueError("Key '%s' not found" % key)
return val
def decr(self, key, delta=1, version=None):
key = self.make_key(key, version=version)
# memcached doesn't support a negative delta
if delta < 0:
return self._cache.incr(key, -delta)
try:
val = self._cache.decr(key, delta)
# python-memcache responds to incr on non-existent keys by
# raising a ValueError, pylibmc by raising a pylibmc.NotFound
# and Cmemcache returns None. In all cases,
# we should raise a ValueError though.
except self.LibraryValueNotFoundException:
val = None
if val is None:
raise ValueError("Key '%s' not found" % key)
return val
def set_many(self, data, timeout=DEFAULT_TIMEOUT, version=None):
safe_data = {}
for key, value in data.items():
key = self.make_key(key, version=version)
safe_data[key] = value
self._cache.set_multi(safe_data, self._get_memcache_timeout(timeout))
def delete_many(self, keys, version=None):
l = lambda x: self.make_key(x, version=version)
self._cache.delete_multi(map(l, keys))
def clear(self):
self._cache.flush_all()
class MemcachedCache(BaseMemcachedCache):
"An implementation of a cache binding using python-memcached"
def __init__(self, server, params):
import memcache
super(MemcachedCache, self).__init__(server, params,
library=memcache,
value_not_found_exception=ValueError)
@property
def _cache(self):
if getattr(self, '_client', None) is None:
self._client = self._lib.Client(self._servers, pickleProtocol=pickle.HIGHEST_PROTOCOL)
return self._client
class PyLibMCCache(BaseMemcachedCache):
"An implementation of a cache binding using pylibmc"
def __init__(self, server, params):
import pylibmc
self._local = local()
super(PyLibMCCache, self).__init__(server, params,
library=pylibmc,
value_not_found_exception=pylibmc.NotFound)
@property
def _cache(self):
# PylibMC uses cache options as the 'behaviors' attribute.
# It also needs to use threadlocals, because some versions of
# PylibMC don't play well with the GIL.
client = getattr(self._local, 'client', None)
if client:
return client
client = self._lib.Client(self._servers)
if self._options:
client.behaviors = self._options
self._local.client = client
return client
|
evgeni/dak
|
refs/heads/master
|
dak/split_done.py
|
1
|
#!/usr/bin/env python
""" Split queue/done into date based subdirectories """
# Copyright (C) 2004, 2005, 2006 James Troup <james@nocrew.org>
# Copyright (C) 2008 Joerg Jaspert <joerg@debian.org>
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
################################################################################
import glob, os, stat, time
from daklib import utils
################################################################################
def main():
Cnf = utils.get_conf()
count = 0
move_date = int(time.time())
os.chdir(Cnf["Dir::Done"])
files = glob.glob("%s/*" % (Cnf["Dir::Done"]))
for filename in files:
if os.path.isfile(filename):
filemtime = os.stat(filename)[stat.ST_MTIME]
if filemtime > move_date:
continue
mtime = time.gmtime(filemtime)
dirname = time.strftime("%Y/%m/%d", mtime)
if not os.path.exists(dirname):
print "Creating: %s" % (dirname)
os.makedirs(dirname)
dest = dirname + '/' + os.path.basename(filename)
if os.path.exists(dest):
utils.warn("%s already exists." % (dest))
continue
print "Move: %s -> %s" % (filename, dest)
os.rename(filename, dest)
count = count + 1
print "Moved %d files." % (count)
############################################################
if __name__ == '__main__':
main()
|
alexDeCastroAtGit/belkin_challenge
|
refs/heads/master
|
data/animate_hf_data.py
|
1
|
"""
simple example of an animated plot
"""
## always start the project as a virtual environment
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
import matplotlib.animation as animation
data_dir = '/Users/alex.decastro/development/gitlab/belkin_challenge/data/CSV_OUT/Tagged_Training_02_15_1360915201'
## had resample hf data at every 15 seconds since data frame woudn't fit into memory was it was taking too long
fields = range(0, 80997, 5)
df = pd.read_csv(data_dir + '/HF.csv', delimiter=',', header=None, na_filter=False, dtype={'x': np.float}, usecols=fields)
#dg = df.pct_change
df_filtered = df.rolling(window=3).mean() # filter out noise and compute first order differences
dg = df.diff(1) / df.shift(1) # same as pct_change?
# Set up formatting for the movie files
Writer = animation.writers['ffmpeg']
writer = Writer(fps=15, metadata=dict(artist='Me'), bitrate=1800)
## include data transformation here
fig, ax = plt.subplots()
x = range(dg.shape[0])
line, = ax.plot(x, dg.iloc[:, 0])
def animate(i):
line.set_ydata(dg.iloc[:, i]) # update the data
return line,
# Init only required for blitting to give a clean slate.
def init():
line.set_ydata(np.ma.array(x, mask=True))
return line,
# plotting first two hundred samples (~ sampled every 1S each)
ani = animation.FuncAnimation(fig, animate, range(dg.shape[0]), init_func=init,
interval=50, blit=True) # interval between frames. Default is 200 ms
ani.save(data_dir + '/hf_animation.mp4', writer=writer)
plt.show()
|
Fanarim/test_case_monkey
|
refs/heads/master
|
test_case_monkey/tcm/migrations/0006_make_attributes_values_optional.py
|
1
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9.9 on 2016-09-20 17:00
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('tcm', '0005_add_uniquetogether_to_project'),
]
operations = [
migrations.AlterField(
model_name='testscenarioattribute',
name='value',
field=models.TextField(null=True),
),
migrations.AlterField(
model_name='testscenariotemplateattribute',
name='value',
field=models.TextField(null=True),
),
]
|
qianwenming/mapnik
|
refs/heads/master
|
tests/python_tests/render_grid_test.py
|
2
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from nose.tools import *
from utilities import execution_path, run_all
import os, mapnik
try:
import json
except ImportError:
import simplejson as json
def setup():
# All of the paths used are relative, if we run the tests
# from another directory we need to chdir()
os.chdir(execution_path('.'))
if mapnik.has_grid_renderer():
def show_grids(name,g1,g2):
g1_file = '/tmp/mapnik-%s-actual.json' % name
open(g1_file,'w').write(json.dumps(g1,sort_keys=True))
g2_file = '/tmp/mapnik-%s-expected.json' % name
open(g2_file,'w').write(json.dumps(g2,sort_keys=True))
val = 'JSON does not match ->\n'
if g1['grid'] != g2['grid']:
val += ' X grid does not match\n'
else:
val += ' ✓ grid matches\n'
if g1['data'].keys() != g2['data'].keys():
val += ' X data does not match\n'
else:
val += ' ✓ data matches\n'
if g1['keys'] != g2['keys']:
val += ' X keys do not\n'
else:
val += ' ✓ keys match\n'
val += '\n\t%s\n\t%s' % (g1_file,g2_file)
return val
def show_grids2(name,g1,g2):
g2_expected = '../data/grids/mapnik-%s-actual.json' % name
if not os.path.exists(g2_expected):
# create test fixture based on actual results
open(g2_expected,'a+').write(json.dumps(g1,sort_keys=True))
return
g1_file = '/tmp/mapnik-%s-actual.json' % name
open(g1_file,'w').write(json.dumps(g1,sort_keys=True))
val = 'JSON does not match ->\n'
if g1['grid'] != g2['grid']:
val += ' X grid does not match\n'
else:
val += ' ✓ grid matches\n'
if g1['data'].keys() != g2['data'].keys():
val += ' X data does not match\n'
else:
val += ' ✓ data matches\n'
if g1['keys'] != g2['keys']:
val += ' X keys do not\n'
else:
val += ' ✓ keys match\n'
val += '\n\t%s\n\t%s' % (g1_file,g2_expected)
return val
# previous rendering using agg ellipse directly
grid_correct_new = {"data": {"North East": {"Name": "North East"}, "North West": {"Name": "North West"}, "South East": {"Name": "South East"}, "South West": {"Name": "South West"}}, "grid": [" ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " !! ## ", " !!! ### ", " !! ## ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " $$ %% ", " $$$ %% ", " $$ %% ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " "], "keys": ["", "North West", "North East", "South West", "South East"]}
# newer rendering using svg
grid_correct_new2 = {"data": {"North East": {"Name": "North East"}, "North West": {"Name": "North West"}, "South East": {"Name": "South East"}, "South West": {"Name": "South West"}}, "grid": [" ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " !! ## ", " !!! ### ", " !! ## ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " $$ %% ", " $$$ %% ", " $$ %% ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " "], "keys": ["", "North West", "North East", "South West", "South East"]}
grid_correct_new3 = {"data": {"North East": {"Name": "North East"}, "North West": {"Name": "North West"}, "South East": {"Name": "South East"}, "South West": {"Name": "South West"}}, "grid": [" ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " !! ## ", " !!! ### ", " !! ## ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " $$ %% ", " $$$ %% ", " $ %% ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " "], "keys": ["", "North West", "North East", "South West", "South East"]}
def resolve(grid,row,col):
""" Resolve the attributes for a given pixel in a grid.
"""
row = grid['grid'][row]
utf_val = row[col]
#http://docs.python.org/library/functions.html#ord
codepoint = ord(utf_val)
if (codepoint >= 93):
codepoint-=1
if (codepoint >= 35):
codepoint-=1
codepoint -= 32
key = grid['keys'][codepoint]
return grid['data'].get(key)
def create_grid_map(width,height,sym):
ds = mapnik.MemoryDatasource()
context = mapnik.Context()
context.push('Name')
f = mapnik.Feature(context,1)
f['Name'] = 'South East'
f.add_geometries_from_wkt('POINT (143.10 -38.60)')
ds.add_feature(f)
f = mapnik.Feature(context,2)
f['Name'] = 'South West'
f.add_geometries_from_wkt('POINT (142.48 -38.60)')
ds.add_feature(f)
f = mapnik.Feature(context,3)
f['Name'] = 'North West'
f.add_geometries_from_wkt('POINT (142.48 -38.38)')
ds.add_feature(f)
f = mapnik.Feature(context,4)
f['Name'] = 'North East'
f.add_geometries_from_wkt('POINT (143.10 -38.38)')
ds.add_feature(f)
s = mapnik.Style()
r = mapnik.Rule()
sym.allow_overlap = True
r.symbols.append(sym)
s.rules.append(r)
lyr = mapnik.Layer('Places')
lyr.datasource = ds
lyr.styles.append('places_labels')
m = mapnik.Map(width,height)
m.append_style('places_labels',s)
m.layers.append(lyr)
return m
def test_render_grid():
""" test render_grid method"""
width,height = 256,256
sym = mapnik.MarkersSymbolizer()
sym.width = mapnik.Expression('10')
sym.height = mapnik.Expression('10')
m = create_grid_map(width,height,sym)
ul_lonlat = mapnik.Coord(142.30,-38.20)
lr_lonlat = mapnik.Coord(143.40,-38.80)
m.zoom_to_box(mapnik.Box2d(ul_lonlat,lr_lonlat))
# new method
grid = mapnik.Grid(m.width,m.height,key='Name')
mapnik.render_layer(m,grid,layer=0,fields=['Name'])
utf1 = grid.encode('utf',resolution=4)
eq_(utf1,grid_correct_new3,show_grids('new-markers',utf1,grid_correct_new3))
# check a full view is the same as a full image
grid_view = grid.view(0,0,width,height)
# for kicks check at full res too
utf3 = grid.encode('utf',resolution=1)
utf4 = grid_view.encode('utf',resolution=1)
eq_(utf3['grid'],utf4['grid'])
eq_(utf3['keys'],utf4['keys'])
eq_(utf3['data'],utf4['data'])
eq_(resolve(utf4,0,0),None)
# resolve some center points in the
# resampled view
utf5 = grid_view.encode('utf',resolution=4)
eq_(resolve(utf5,25,10),{"Name": "North West"})
eq_(resolve(utf5,25,46),{"Name": "North East"})
eq_(resolve(utf5,38,10),{"Name": "South West"})
eq_(resolve(utf5,38,46),{"Name": "South East"})
grid_feat_id = {'keys': ['', '3', '4', '2', '1'], 'data': {'1': {'Name': 'South East'}, '3': {'Name': u'North West'}, '2': {'Name': 'South West'}, '4': {'Name': 'North East'}}, 'grid': [' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' !! ## ', ' !!! ### ', ' !! ## ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' $$$ %% ', ' $$$ %%% ', ' $$ %% ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ']}
grid_feat_id2 = {"data": {"1": {"Name": "South East"}, "2": {"Name": "South West"}, "3": {"Name": "North West"}, "4": {"Name": "North East"}}, "grid": [" ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " !! ## ", " !!! ### ", " !! ## ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " $$ %% ", " $$$ %% ", " $$ %% ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " "], "keys": ["", "3", "4", "2", "1"]}
grid_feat_id3 = {"data": {"1": {"Name": "South East", "__id__": 1}, "2": {"Name": "South West", "__id__": 2}, "3": {"Name": "North West", "__id__": 3}, "4": {"Name": "North East", "__id__": 4}}, "grid": [" ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " !! ## ", " !!! ### ", " !! ## ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " $$ %% ", " $$$ %% ", " $ %% ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " "], "keys": ["", "3", "4", "2", "1"]}
def test_render_grid3():
""" test using feature id"""
width,height = 256,256
sym = mapnik.MarkersSymbolizer()
sym.width = mapnik.Expression('10')
sym.height = mapnik.Expression('10')
m = create_grid_map(width,height,sym)
ul_lonlat = mapnik.Coord(142.30,-38.20)
lr_lonlat = mapnik.Coord(143.40,-38.80)
m.zoom_to_box(mapnik.Box2d(ul_lonlat,lr_lonlat))
grid = mapnik.Grid(m.width,m.height,key='__id__')
mapnik.render_layer(m,grid,layer=0,fields=['__id__','Name'])
utf1 = grid.encode('utf',resolution=4)
eq_(utf1,grid_feat_id3,show_grids('id-markers',utf1,grid_feat_id3))
# check a full view is the same as a full image
grid_view = grid.view(0,0,width,height)
# for kicks check at full res too
utf3 = grid.encode('utf',resolution=1)
utf4 = grid_view.encode('utf',resolution=1)
eq_(utf3['grid'],utf4['grid'])
eq_(utf3['keys'],utf4['keys'])
eq_(utf3['data'],utf4['data'])
eq_(resolve(utf4,0,0),None)
# resolve some center points in the
# resampled view
utf5 = grid_view.encode('utf',resolution=4)
eq_(resolve(utf5,25,10),{"Name": "North West","__id__": 3})
eq_(resolve(utf5,25,46),{"Name": "North East","__id__": 4})
eq_(resolve(utf5,38,10),{"Name": "South West","__id__": 2})
eq_(resolve(utf5,38,46),{"Name": "South East","__id__": 1})
def gen_grid_for_id(pixel_key):
ds = mapnik.MemoryDatasource()
context = mapnik.Context()
context.push('Name')
f = mapnik.Feature(context,pixel_key)
f['Name'] = str(pixel_key)
f.add_geometries_from_wkt('POLYGON ((0 0, 0 256, 256 256, 256 0, 0 0))')
ds.add_feature(f)
s = mapnik.Style()
r = mapnik.Rule()
symb = mapnik.PolygonSymbolizer()
r.symbols.append(symb)
s.rules.append(r)
lyr = mapnik.Layer('Places')
lyr.datasource = ds
lyr.styles.append('places_labels')
width,height = 256,256
m = mapnik.Map(width,height)
m.append_style('places_labels',s)
m.layers.append(lyr)
m.zoom_all()
grid = mapnik.Grid(m.width,m.height,key='__id__')
mapnik.render_layer(m,grid,layer=0,fields=['__id__','Name'])
return grid
def test_negative_id():
grid = gen_grid_for_id(-1)
eq_(grid.get_pixel(128,128),-1)
utf1 = grid.encode('utf',resolution=4)
eq_(utf1['keys'],['-1'])
def test_32bit_int_id():
int32 = 2147483647
grid = gen_grid_for_id(int32)
eq_(grid.get_pixel(128,128),int32)
utf1 = grid.encode('utf',resolution=4)
eq_(utf1['keys'],[str(int32)])
max_neg = -(int32)
grid = gen_grid_for_id(max_neg)
eq_(grid.get_pixel(128,128),max_neg)
utf1 = grid.encode('utf',resolution=4)
eq_(utf1['keys'],[str(max_neg)])
def test_64bit_int_id():
int64 = 0x7FFFFFFFFFFFFFFF
grid = gen_grid_for_id(int64)
eq_(grid.get_pixel(128,128),int64)
utf1 = grid.encode('utf',resolution=4)
eq_(utf1['keys'],[str(int64)])
max_neg = -(int64)
grid = gen_grid_for_id(max_neg)
eq_(grid.get_pixel(128,128),max_neg)
utf1 = grid.encode('utf',resolution=4)
eq_(utf1['keys'],[str(max_neg)])
def test_id_zero():
grid = gen_grid_for_id(0)
eq_(grid.get_pixel(128,128),0)
utf1 = grid.encode('utf',resolution=4)
eq_(utf1['keys'],['0'])
line_expected = {"keys": ["", "1"], "data": {"1": {"Name": "1"}}, "grid": [" !", " !! ", " !! ", " !! ", " !! ", " !! ", " !! ", " !! ", " !! ", " !! ", " !! ", " !! ", " !! ", " !! ", " !! ", " !! ", " !! ", " !! ", " !! ", " !! ", " !! ", "!! ", " ! ", " ! ", " ! ", " ! ", " ! ", " ! ", " ! ", " ! ", " ! ", " ! ", " ! ", " ! ", " ! ", " ! ", " ! ", " ! ", " ! ", " ! ", " ! ", " ! ", " ! ", " ! ", " ! ", " ! ", " ! ", " ! ", " ! ", " ! ", " ! ", " ! ", " ! ", " ! ", " ! ", " ! ", " ! ", " ! ", " ! ", " ! ", " ! ", " ! ", " ! ", " ! "]}
def test_line_rendering():
ds = mapnik.MemoryDatasource()
context = mapnik.Context()
context.push('Name')
pixel_key = 1
f = mapnik.Feature(context,pixel_key)
f['Name'] = str(pixel_key)
f.add_geometries_from_wkt('LINESTRING (30 10, 10 30, 40 40)')
ds.add_feature(f)
s = mapnik.Style()
r = mapnik.Rule()
symb = mapnik.LineSymbolizer()
r.symbols.append(symb)
s.rules.append(r)
lyr = mapnik.Layer('Places')
lyr.datasource = ds
lyr.styles.append('places_labels')
width,height = 256,256
m = mapnik.Map(width,height)
m.append_style('places_labels',s)
m.layers.append(lyr)
m.zoom_all()
#mapnik.render_to_file(m,'test.png')
grid = mapnik.Grid(m.width,m.height,key='__id__')
mapnik.render_layer(m,grid,layer=0,fields=['Name'])
utf1 = grid.encode()
eq_(utf1,line_expected,show_grids('line',utf1,line_expected))
point_expected = {"data": {"1": {"Name": "South East"}, "2": {"Name": "South West"}, "3": {"Name": "North West"}, "4": {"Name": "North East"}}, "grid": [" ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " !!!! #### ", " !!!! #### ", " !!!! #### ", " !!!! #### ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " $$$$ %%%% ", " $$$$ %%%% ", " $$$$ %%%% ", " $$$$ %%%% ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " "], "keys": ["", "3", "4", "2", "1"]}
def test_point_symbolizer_grid():
width,height = 256,256
sym = mapnik.PointSymbolizer()
sym.file = '../data/images/dummy.png'
m = create_grid_map(width,height,sym)
ul_lonlat = mapnik.Coord(142.30,-38.20)
lr_lonlat = mapnik.Coord(143.40,-38.80)
m.zoom_to_box(mapnik.Box2d(ul_lonlat,lr_lonlat))
grid = mapnik.Grid(m.width,m.height)
mapnik.render_layer(m,grid,layer=0,fields=['Name'])
utf1 = grid.encode()
eq_(utf1,point_expected,show_grids('point-sym',utf1,point_expected))
# should throw because this is a mis-usage
# https://github.com/mapnik/mapnik/issues/1325
@raises(RuntimeError)
def test_render_to_grid_multiple_times():
# create map with two layers
m = mapnik.Map(256,256)
s = mapnik.Style()
r = mapnik.Rule()
sym = mapnik.MarkersSymbolizer()
sym.allow_overlap = True
r.symbols.append(sym)
s.rules.append(r)
m.append_style('points',s)
# NOTE: we use a csv datasource here
# because the memorydatasource fails silently for
# queries requesting fields that do not exist in the datasource
ds1 = mapnik.Datasource(**{"type":"csv","inline":'''
wkt,Name
"POINT (143.10 -38.60)",South East'''})
lyr1 = mapnik.Layer('One')
lyr1.datasource = ds1
lyr1.styles.append('points')
m.layers.append(lyr1)
ds2 = mapnik.Datasource(**{"type":"csv","inline":'''
wkt,Value
"POINT (142.48 -38.60)",South West'''})
lyr2 = mapnik.Layer('Two')
lyr2.datasource = ds2
lyr2.styles.append('points')
m.layers.append(lyr2)
ul_lonlat = mapnik.Coord(142.30,-38.20)
lr_lonlat = mapnik.Coord(143.40,-38.80)
m.zoom_to_box(mapnik.Box2d(ul_lonlat,lr_lonlat))
grid = mapnik.Grid(m.width,m.height)
mapnik.render_layer(m,grid,layer=0,fields=['Name'])
# should throw right here since Name will be a property now on the `grid` object
# and it is not found on the second layer
mapnik.render_layer(m,grid,layer=1,fields=['Value'])
utf1 = grid.encode()
if __name__ == "__main__":
setup()
exit(run_all(eval(x) for x in dir() if x.startswith("test_")))
|
oomlout/oomlout-OOMP
|
refs/heads/master
|
OOMPpart_RESE_0402_X_O000_67.py
|
1
|
import OOMP
newPart = OOMP.oompItem(9216)
newPart.addTag("oompType", "RESE")
newPart.addTag("oompSize", "0402")
newPart.addTag("oompColor", "X")
newPart.addTag("oompDesc", "O000")
newPart.addTag("oompIndex", "67")
OOMP.parts.append(newPart)
|
repotvsupertuga/tvsupertuga.repository
|
refs/heads/master
|
plugin.video.youtube/resources/lib/youtube_plugin/kodion/items/__init__.py
|
1
|
__all__ = ['BaseItem', 'AudioItem', 'DirectoryItem', 'VideoItem', 'AudioVideoItem', 'ImageItem', 'WatchLaterItem', 'FavoritesItem',
'SearchItem', 'NewSearchItem', 'SearchHistoryItem', 'NextPageItem', 'UriItem',
'from_json', 'to_json', 'to_jsons']
from utils import to_json, from_json, to_jsons
from .uri_item import UriItem
from .base_item import BaseItem
from .audio_item import AudioItem
from .directory_item import DirectoryItem
from .watch_later_item import WatchLaterItem
from .favorites_item import FavoritesItem
from .search_item import SearchItem
from .new_search_item import NewSearchItem
from .search_history_item import SearchHistoryItem
from .next_page_item import NextPageItem
from .video_item import VideoItem
from .image_item import ImageItem
from .audiovideo_item import AudioVideoItem
|
idaholab/civet
|
refs/heads/master
|
ci/Stats.py
|
2
|
# Copyright 2016 Battelle Energy Alliance, LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import unicode_literals, absolute_import
from django.shortcuts import render
from ci import models, TimeUtils
from graphos.sources.simple import SimpleDataSource
from graphos.renderers.gchart import LineChart
import datetime
def find_group(record, bins):
prev = bins[0]
for b in bins[1:]:
if record > b:
prev = b
else:
return prev
return bins[-1]
def sort_stats_by_bin(q, key, bins):
by_bin = {}
for j in q.all():
b = find_group(j.get(key), bins)
js = by_bin.get(b, None)
if not js:
by_bin[b] = [j]
else:
js.append(j)
return by_bin
def get_stats_query(since):
q = models.JobTestStatistics.objects.order_by('job__created')
if since:
q = q.filter(job__created__gte=since)
q = q.values('passed', 'failed', 'skipped', 'job__created')
return q
def get_stats(since=None, display_format=None, bins=None):
q = get_stats_query(since)
by_bin = sort_stats_by_bin(q, "job__created", bins)
passed_group = []
skipped_group = []
failed_group = []
for key in bins:
passed = 0
failed = 0
skipped = 0
display = key.strftime(display_format)
for j in by_bin.get(key, []):
passed += j.get("passed")
failed += j.get("failed")
skipped += j.get("skipped")
passed_group.append([display, passed])
failed_group.append([display, failed])
skipped_group.append([display, skipped])
return passed_group, failed_group, skipped_group
def set_passed(since, x_axis, title, context, context_key, graph_display, bins):
p, f, s = get_stats(since, graph_display, bins)
if p:
p.insert(0, [x_axis, "passed"])
options = { "title": title,
"hAxis": { "title": x_axis },
"vAxis": { "title": "Number of Tests" },
}
context[context_key] = LineChart(SimpleDataSource(data=p), options=options)
return p
def create_repo_pr_graph(repo, since, x_axis, title, graph_display, bins):
q = models.PullRequest.objects.filter(repository__pk=repo["id"], created__gte=since).values("created")
if not q.count():
return
data = sort_stats_by_bin(q, "created", bins)
all_data = [ [x_axis, repo["name"] ] ]
for key in bins:
display = key.strftime(graph_display)
count = len(data.get(key, []))
row = [display, count]
all_data.append(row)
options = { "title": title,
"hAxis": { "title": x_axis },
"vAxis": { "title": "%s new PRs" % repo["name"] },
}
return LineChart(SimpleDataSource(data=all_data), options=options)
def set_all_repo_prs(repos_q, since, x_axis, title, context, graph_display, bins):
for repo in repos_q.all():
graph = create_repo_pr_graph(repo, since, x_axis, title, graph_display, bins)
if not graph:
continue
repos_dict = context.get("repo_graphs", {})
if not repos_dict:
context["repo_graphs"] = {}
repos_dict = context["repo_graphs"]
graphs = repos_dict.get(repo["id"], [])
if not graphs:
repos_dict[repo["id"]] = [graph]
else:
graphs.append(graph)
def get_bins(start_date, step):
bins = [start_date]
now = TimeUtils.get_local_time().replace(hour=23, minute=59)
prev = start_date
while True:
new = prev + step
if new < now:
bins.append(new)
prev = new
else:
break
return bins
def num_tests(request):
context = {}
start = (TimeUtils.get_local_time() - datetime.timedelta(days=180)).replace(hour=0, minute=0)
bins = get_bins(start, datetime.timedelta(days=7))
set_passed(start, "week", "Passed tests in last 6 months, by week", context, "month_chart", "%m/%d", bins)
start = (TimeUtils.get_local_time() - datetime.timedelta(days=7)).replace(hour=0, minute=0)
bins = get_bins(start, datetime.timedelta(days=1))
set_passed(start, "day", "Passed tests in last week, by day", context, "week_chart", "%m/%d", bins)
return render(request, 'ci/num_tests.html', context)
def num_prs_by_repo(request):
context = {}
repos_q = models.Repository.objects.filter(active=True).order_by("name").values("id", "name").all()
repo_map = { v.get("id"): v.get("name") for v in repos_q }
start = (TimeUtils.get_local_time() - datetime.timedelta(days=180)).replace(hour=0, minute=0)
bins = get_bins(start, datetime.timedelta(days=7))
set_all_repo_prs(repos_q, start, "week", "Number of new PRs in last 6 months, by week", context, "%m/%d", bins)
start = (TimeUtils.get_local_time() - datetime.timedelta(days=7)).replace(hour=0, minute=0)
bins = get_bins(start, datetime.timedelta(days=1))
set_all_repo_prs(repos_q, start, "day", "Number of new PRs in last week, by day", context, "%m/%d", bins)
sorted_repos_by_name = sorted(list(repo_map.keys()), key=lambda v: repo_map[v].lower())
repo_data = []
for key in sorted_repos_by_name:
repo_graphs = context.get("repo_graphs", {}).get(key, [])
if repo_graphs:
repo_data.append({"id": key, "name": repo_map[key], "graphs": repo_graphs})
context["repos"] = repo_data
return render(request, 'ci/num_prs.html', context)
|
larusx/yunmark
|
refs/heads/master
|
site-packages/markdown/extensions/footnotes.py
|
13
|
"""
Footnotes Extension for Python-Markdown
=======================================
Adds footnote handling to Python-Markdown.
See <https://pythonhosted.org/Markdown/extensions/footnotes.html>
for documentation.
Copyright The Python Markdown Project
License: [BSD](http://www.opensource.org/licenses/bsd-license.php)
"""
from __future__ import absolute_import
from __future__ import unicode_literals
from . import Extension
from ..preprocessors import Preprocessor
from ..inlinepatterns import Pattern
from ..treeprocessors import Treeprocessor
from ..postprocessors import Postprocessor
from ..util import etree, text_type
from ..odict import OrderedDict
import re
FN_BACKLINK_TEXT = "zz1337820767766393qq"
NBSP_PLACEHOLDER = "qq3936677670287331zz"
DEF_RE = re.compile(r'[ ]{0,3}\[\^([^\]]*)\]:\s*(.*)')
TABBED_RE = re.compile(r'((\t)|( ))(.*)')
class FootnoteExtension(Extension):
""" Footnote Extension. """
def __init__ (self, *args, **kwargs):
""" Setup configs. """
self.config = {
'PLACE_MARKER':
["///Footnotes Go Here///",
"The text string that marks where the footnotes go"],
'UNIQUE_IDS':
[False,
"Avoid name collisions across "
"multiple calls to reset()."],
"BACKLINK_TEXT":
["↩",
"The text string that links from the footnote to the reader's place."]
}
super(FootnoteExtension, self).__init__(*args, **kwargs)
# In multiple invocations, emit links that don't get tangled.
self.unique_prefix = 0
self.reset()
def extendMarkdown(self, md, md_globals):
""" Add pieces to Markdown. """
md.registerExtension(self)
self.parser = md.parser
self.md = md
# Insert a preprocessor before ReferencePreprocessor
md.preprocessors.add("footnote", FootnotePreprocessor(self),
"<reference")
# Insert an inline pattern before ImageReferencePattern
FOOTNOTE_RE = r'\[\^([^\]]*)\]' # blah blah [^1] blah
md.inlinePatterns.add("footnote", FootnotePattern(FOOTNOTE_RE, self),
"<reference")
# Insert a tree-processor that would actually add the footnote div
# This must be before all other treeprocessors (i.e., inline and
# codehilite) so they can run on the the contents of the div.
md.treeprocessors.add("footnote", FootnoteTreeprocessor(self),
"_begin")
# Insert a postprocessor after amp_substitute oricessor
md.postprocessors.add("footnote", FootnotePostprocessor(self),
">amp_substitute")
def reset(self):
""" Clear the footnotes on reset, and prepare for a distinct document. """
self.footnotes = OrderedDict()
self.unique_prefix += 1
def findFootnotesPlaceholder(self, root):
""" Return ElementTree Element that contains Footnote placeholder. """
def finder(element):
for child in element:
if child.text:
if child.text.find(self.getConfig("PLACE_MARKER")) > -1:
return child, element, True
if child.tail:
if child.tail.find(self.getConfig("PLACE_MARKER")) > -1:
return child, element, False
finder(child)
return None
res = finder(root)
return res
def setFootnote(self, id, text):
""" Store a footnote for later retrieval. """
self.footnotes[id] = text
def get_separator(self):
if self.md.output_format in ['html5', 'xhtml5']:
return '-'
return ':'
def makeFootnoteId(self, id):
""" Return footnote link id. """
if self.getConfig("UNIQUE_IDS"):
return 'fn%s%d-%s' % (self.get_separator(), self.unique_prefix, id)
else:
return 'fn%s%s' % (self.get_separator(), id)
def makeFootnoteRefId(self, id):
""" Return footnote back-link id. """
if self.getConfig("UNIQUE_IDS"):
return 'fnref%s%d-%s' % (self.get_separator(), self.unique_prefix, id)
else:
return 'fnref%s%s' % (self.get_separator(), id)
def makeFootnotesDiv(self, root):
""" Return div of footnotes as et Element. """
if not list(self.footnotes.keys()):
return None
div = etree.Element("div")
div.set('class', 'footnote')
etree.SubElement(div, "hr")
ol = etree.SubElement(div, "ol")
for id in self.footnotes.keys():
li = etree.SubElement(ol, "li")
li.set("id", self.makeFootnoteId(id))
self.parser.parseChunk(li, self.footnotes[id])
backlink = etree.Element("a")
backlink.set("href", "#" + self.makeFootnoteRefId(id))
if self.md.output_format not in ['html5', 'xhtml5']:
backlink.set("rev", "footnote") # Invalid in HTML5
backlink.set("class", "footnote-backref")
backlink.set("title", "Jump back to footnote %d in the text" % \
(self.footnotes.index(id)+1))
backlink.text = FN_BACKLINK_TEXT
if li.getchildren():
node = li[-1]
if node.tag == "p":
node.text = node.text + NBSP_PLACEHOLDER
node.append(backlink)
else:
p = etree.SubElement(li, "p")
p.append(backlink)
return div
class FootnotePreprocessor(Preprocessor):
""" Find all footnote references and store for later use. """
def __init__ (self, footnotes):
self.footnotes = footnotes
def run(self, lines):
"""
Loop through lines and find, set, and remove footnote definitions.
Keywords:
* lines: A list of lines of text
Return: A list of lines of text with footnote definitions removed.
"""
newlines = []
i = 0
while True:
m = DEF_RE.match(lines[i])
if m:
fn, _i = self.detectTabbed(lines[i+1:])
fn.insert(0, m.group(2))
i += _i-1 # skip past footnote
self.footnotes.setFootnote(m.group(1), "\n".join(fn))
else:
newlines.append(lines[i])
if len(lines) > i+1:
i += 1
else:
break
return newlines
def detectTabbed(self, lines):
""" Find indented text and remove indent before further proccesing.
Keyword arguments:
* lines: an array of strings
Returns: a list of post processed items and the index of last line.
"""
items = []
blank_line = False # have we encountered a blank line yet?
i = 0 # to keep track of where we are
def detab(line):
match = TABBED_RE.match(line)
if match:
return match.group(4)
for line in lines:
if line.strip(): # Non-blank line
detabbed_line = detab(line)
if detabbed_line:
items.append(detabbed_line)
i += 1
continue
elif not blank_line and not DEF_RE.match(line):
# not tabbed but still part of first par.
items.append(line)
i += 1
continue
else:
return items, i+1
else: # Blank line: _maybe_ we are done.
blank_line = True
i += 1 # advance
# Find the next non-blank line
for j in range(i, len(lines)):
if lines[j].strip():
next_line = lines[j]; break
else:
break # There is no more text; we are done.
# Check if the next non-blank line is tabbed
if detab(next_line): # Yes, more work to do.
items.append("")
continue
else:
break # No, we are done.
else:
i += 1
return items, i
class FootnotePattern(Pattern):
""" InlinePattern for footnote markers in a document's body text. """
def __init__(self, pattern, footnotes):
super(FootnotePattern, self).__init__(pattern)
self.footnotes = footnotes
def handleMatch(self, m):
id = m.group(2)
if id in self.footnotes.footnotes.keys():
sup = etree.Element("sup")
a = etree.SubElement(sup, "a")
sup.set('id', self.footnotes.makeFootnoteRefId(id))
a.set('href', '#' + self.footnotes.makeFootnoteId(id))
if self.footnotes.md.output_format not in ['html5', 'xhtml5']:
a.set('rel', 'footnote') # invalid in HTML5
a.set('class', 'footnote-ref')
a.text = text_type(self.footnotes.footnotes.index(id) + 1)
return sup
else:
return None
class FootnoteTreeprocessor(Treeprocessor):
""" Build and append footnote div to end of document. """
def __init__ (self, footnotes):
self.footnotes = footnotes
def run(self, root):
footnotesDiv = self.footnotes.makeFootnotesDiv(root)
if footnotesDiv:
result = self.footnotes.findFootnotesPlaceholder(root)
if result:
child, parent, isText = result
ind = parent.getchildren().index(child)
if isText:
parent.remove(child)
parent.insert(ind, footnotesDiv)
else:
parent.insert(ind + 1, footnotesDiv)
child.tail = None
else:
root.append(footnotesDiv)
class FootnotePostprocessor(Postprocessor):
""" Replace placeholders with html entities. """
def __init__(self, footnotes):
self.footnotes = footnotes
def run(self, text):
text = text.replace(FN_BACKLINK_TEXT, self.footnotes.getConfig("BACKLINK_TEXT"))
return text.replace(NBSP_PLACEHOLDER, " ")
def makeExtension(*args, **kwargs):
""" Return an instance of the FootnoteExtension """
return FootnoteExtension(*args, **kwargs)
|
hnzProjects/myBlog
|
refs/heads/master
|
node_modules/gulp-sass/node_modules/node-sass/node_modules/node-gyp/gyp/pylib/gyp/generator/ninja.py
|
1284
|
# Copyright (c) 2013 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import collections
import copy
import hashlib
import json
import multiprocessing
import os.path
import re
import signal
import subprocess
import sys
import gyp
import gyp.common
from gyp.common import OrderedSet
import gyp.msvs_emulation
import gyp.MSVSUtil as MSVSUtil
import gyp.xcode_emulation
from cStringIO import StringIO
from gyp.common import GetEnvironFallback
import gyp.ninja_syntax as ninja_syntax
generator_default_variables = {
'EXECUTABLE_PREFIX': '',
'EXECUTABLE_SUFFIX': '',
'STATIC_LIB_PREFIX': 'lib',
'STATIC_LIB_SUFFIX': '.a',
'SHARED_LIB_PREFIX': 'lib',
# Gyp expects the following variables to be expandable by the build
# system to the appropriate locations. Ninja prefers paths to be
# known at gyp time. To resolve this, introduce special
# variables starting with $! and $| (which begin with a $ so gyp knows it
# should be treated specially, but is otherwise an invalid
# ninja/shell variable) that are passed to gyp here but expanded
# before writing out into the target .ninja files; see
# ExpandSpecial.
# $! is used for variables that represent a path and that can only appear at
# the start of a string, while $| is used for variables that can appear
# anywhere in a string.
'INTERMEDIATE_DIR': '$!INTERMEDIATE_DIR',
'SHARED_INTERMEDIATE_DIR': '$!PRODUCT_DIR/gen',
'PRODUCT_DIR': '$!PRODUCT_DIR',
'CONFIGURATION_NAME': '$|CONFIGURATION_NAME',
# Special variables that may be used by gyp 'rule' targets.
# We generate definitions for these variables on the fly when processing a
# rule.
'RULE_INPUT_ROOT': '${root}',
'RULE_INPUT_DIRNAME': '${dirname}',
'RULE_INPUT_PATH': '${source}',
'RULE_INPUT_EXT': '${ext}',
'RULE_INPUT_NAME': '${name}',
}
# Placates pylint.
generator_additional_non_configuration_keys = []
generator_additional_path_sections = []
generator_extra_sources_for_rules = []
generator_filelist_paths = None
generator_supports_multiple_toolsets = gyp.common.CrossCompileRequested()
def StripPrefix(arg, prefix):
if arg.startswith(prefix):
return arg[len(prefix):]
return arg
def QuoteShellArgument(arg, flavor):
"""Quote a string such that it will be interpreted as a single argument
by the shell."""
# Rather than attempting to enumerate the bad shell characters, just
# whitelist common OK ones and quote anything else.
if re.match(r'^[a-zA-Z0-9_=.\\/-]+$', arg):
return arg # No quoting necessary.
if flavor == 'win':
return gyp.msvs_emulation.QuoteForRspFile(arg)
return "'" + arg.replace("'", "'" + '"\'"' + "'") + "'"
def Define(d, flavor):
"""Takes a preprocessor define and returns a -D parameter that's ninja- and
shell-escaped."""
if flavor == 'win':
# cl.exe replaces literal # characters with = in preprocesor definitions for
# some reason. Octal-encode to work around that.
d = d.replace('#', '\\%03o' % ord('#'))
return QuoteShellArgument(ninja_syntax.escape('-D' + d), flavor)
def AddArch(output, arch):
"""Adds an arch string to an output path."""
output, extension = os.path.splitext(output)
return '%s.%s%s' % (output, arch, extension)
class Target(object):
"""Target represents the paths used within a single gyp target.
Conceptually, building a single target A is a series of steps:
1) actions/rules/copies generates source/resources/etc.
2) compiles generates .o files
3) link generates a binary (library/executable)
4) bundle merges the above in a mac bundle
(Any of these steps can be optional.)
From a build ordering perspective, a dependent target B could just
depend on the last output of this series of steps.
But some dependent commands sometimes need to reach inside the box.
For example, when linking B it needs to get the path to the static
library generated by A.
This object stores those paths. To keep things simple, member
variables only store concrete paths to single files, while methods
compute derived values like "the last output of the target".
"""
def __init__(self, type):
# Gyp type ("static_library", etc.) of this target.
self.type = type
# File representing whether any input dependencies necessary for
# dependent actions have completed.
self.preaction_stamp = None
# File representing whether any input dependencies necessary for
# dependent compiles have completed.
self.precompile_stamp = None
# File representing the completion of actions/rules/copies, if any.
self.actions_stamp = None
# Path to the output of the link step, if any.
self.binary = None
# Path to the file representing the completion of building the bundle,
# if any.
self.bundle = None
# On Windows, incremental linking requires linking against all the .objs
# that compose a .lib (rather than the .lib itself). That list is stored
# here. In this case, we also need to save the compile_deps for the target,
# so that the the target that directly depends on the .objs can also depend
# on those.
self.component_objs = None
self.compile_deps = None
# Windows only. The import .lib is the output of a build step, but
# because dependents only link against the lib (not both the lib and the
# dll) we keep track of the import library here.
self.import_lib = None
def Linkable(self):
"""Return true if this is a target that can be linked against."""
return self.type in ('static_library', 'shared_library')
def UsesToc(self, flavor):
"""Return true if the target should produce a restat rule based on a TOC
file."""
# For bundles, the .TOC should be produced for the binary, not for
# FinalOutput(). But the naive approach would put the TOC file into the
# bundle, so don't do this for bundles for now.
if flavor == 'win' or self.bundle:
return False
return self.type in ('shared_library', 'loadable_module')
def PreActionInput(self, flavor):
"""Return the path, if any, that should be used as a dependency of
any dependent action step."""
if self.UsesToc(flavor):
return self.FinalOutput() + '.TOC'
return self.FinalOutput() or self.preaction_stamp
def PreCompileInput(self):
"""Return the path, if any, that should be used as a dependency of
any dependent compile step."""
return self.actions_stamp or self.precompile_stamp
def FinalOutput(self):
"""Return the last output of the target, which depends on all prior
steps."""
return self.bundle or self.binary or self.actions_stamp
# A small discourse on paths as used within the Ninja build:
# All files we produce (both at gyp and at build time) appear in the
# build directory (e.g. out/Debug).
#
# Paths within a given .gyp file are always relative to the directory
# containing the .gyp file. Call these "gyp paths". This includes
# sources as well as the starting directory a given gyp rule/action
# expects to be run from. We call the path from the source root to
# the gyp file the "base directory" within the per-.gyp-file
# NinjaWriter code.
#
# All paths as written into the .ninja files are relative to the build
# directory. Call these paths "ninja paths".
#
# We translate between these two notions of paths with two helper
# functions:
#
# - GypPathToNinja translates a gyp path (i.e. relative to the .gyp file)
# into the equivalent ninja path.
#
# - GypPathToUniqueOutput translates a gyp path into a ninja path to write
# an output file; the result can be namespaced such that it is unique
# to the input file name as well as the output target name.
class NinjaWriter(object):
def __init__(self, hash_for_rules, target_outputs, base_dir, build_dir,
output_file, toplevel_build, output_file_name, flavor,
toplevel_dir=None):
"""
base_dir: path from source root to directory containing this gyp file,
by gyp semantics, all input paths are relative to this
build_dir: path from source root to build output
toplevel_dir: path to the toplevel directory
"""
self.hash_for_rules = hash_for_rules
self.target_outputs = target_outputs
self.base_dir = base_dir
self.build_dir = build_dir
self.ninja = ninja_syntax.Writer(output_file)
self.toplevel_build = toplevel_build
self.output_file_name = output_file_name
self.flavor = flavor
self.abs_build_dir = None
if toplevel_dir is not None:
self.abs_build_dir = os.path.abspath(os.path.join(toplevel_dir,
build_dir))
self.obj_ext = '.obj' if flavor == 'win' else '.o'
if flavor == 'win':
# See docstring of msvs_emulation.GenerateEnvironmentFiles().
self.win_env = {}
for arch in ('x86', 'x64'):
self.win_env[arch] = 'environment.' + arch
# Relative path from build output dir to base dir.
build_to_top = gyp.common.InvertRelativePath(build_dir, toplevel_dir)
self.build_to_base = os.path.join(build_to_top, base_dir)
# Relative path from base dir to build dir.
base_to_top = gyp.common.InvertRelativePath(base_dir, toplevel_dir)
self.base_to_build = os.path.join(base_to_top, build_dir)
def ExpandSpecial(self, path, product_dir=None):
"""Expand specials like $!PRODUCT_DIR in |path|.
If |product_dir| is None, assumes the cwd is already the product
dir. Otherwise, |product_dir| is the relative path to the product
dir.
"""
PRODUCT_DIR = '$!PRODUCT_DIR'
if PRODUCT_DIR in path:
if product_dir:
path = path.replace(PRODUCT_DIR, product_dir)
else:
path = path.replace(PRODUCT_DIR + '/', '')
path = path.replace(PRODUCT_DIR + '\\', '')
path = path.replace(PRODUCT_DIR, '.')
INTERMEDIATE_DIR = '$!INTERMEDIATE_DIR'
if INTERMEDIATE_DIR in path:
int_dir = self.GypPathToUniqueOutput('gen')
# GypPathToUniqueOutput generates a path relative to the product dir,
# so insert product_dir in front if it is provided.
path = path.replace(INTERMEDIATE_DIR,
os.path.join(product_dir or '', int_dir))
CONFIGURATION_NAME = '$|CONFIGURATION_NAME'
path = path.replace(CONFIGURATION_NAME, self.config_name)
return path
def ExpandRuleVariables(self, path, root, dirname, source, ext, name):
if self.flavor == 'win':
path = self.msvs_settings.ConvertVSMacros(
path, config=self.config_name)
path = path.replace(generator_default_variables['RULE_INPUT_ROOT'], root)
path = path.replace(generator_default_variables['RULE_INPUT_DIRNAME'],
dirname)
path = path.replace(generator_default_variables['RULE_INPUT_PATH'], source)
path = path.replace(generator_default_variables['RULE_INPUT_EXT'], ext)
path = path.replace(generator_default_variables['RULE_INPUT_NAME'], name)
return path
def GypPathToNinja(self, path, env=None):
"""Translate a gyp path to a ninja path, optionally expanding environment
variable references in |path| with |env|.
See the above discourse on path conversions."""
if env:
if self.flavor == 'mac':
path = gyp.xcode_emulation.ExpandEnvVars(path, env)
elif self.flavor == 'win':
path = gyp.msvs_emulation.ExpandMacros(path, env)
if path.startswith('$!'):
expanded = self.ExpandSpecial(path)
if self.flavor == 'win':
expanded = os.path.normpath(expanded)
return expanded
if '$|' in path:
path = self.ExpandSpecial(path)
assert '$' not in path, path
return os.path.normpath(os.path.join(self.build_to_base, path))
def GypPathToUniqueOutput(self, path, qualified=True):
"""Translate a gyp path to a ninja path for writing output.
If qualified is True, qualify the resulting filename with the name
of the target. This is necessary when e.g. compiling the same
path twice for two separate output targets.
See the above discourse on path conversions."""
path = self.ExpandSpecial(path)
assert not path.startswith('$'), path
# Translate the path following this scheme:
# Input: foo/bar.gyp, target targ, references baz/out.o
# Output: obj/foo/baz/targ.out.o (if qualified)
# obj/foo/baz/out.o (otherwise)
# (and obj.host instead of obj for cross-compiles)
#
# Why this scheme and not some other one?
# 1) for a given input, you can compute all derived outputs by matching
# its path, even if the input is brought via a gyp file with '..'.
# 2) simple files like libraries and stamps have a simple filename.
obj = 'obj'
if self.toolset != 'target':
obj += '.' + self.toolset
path_dir, path_basename = os.path.split(path)
assert not os.path.isabs(path_dir), (
"'%s' can not be absolute path (see crbug.com/462153)." % path_dir)
if qualified:
path_basename = self.name + '.' + path_basename
return os.path.normpath(os.path.join(obj, self.base_dir, path_dir,
path_basename))
def WriteCollapsedDependencies(self, name, targets, order_only=None):
"""Given a list of targets, return a path for a single file
representing the result of building all the targets or None.
Uses a stamp file if necessary."""
assert targets == filter(None, targets), targets
if len(targets) == 0:
assert not order_only
return None
if len(targets) > 1 or order_only:
stamp = self.GypPathToUniqueOutput(name + '.stamp')
targets = self.ninja.build(stamp, 'stamp', targets, order_only=order_only)
self.ninja.newline()
return targets[0]
def _SubninjaNameForArch(self, arch):
output_file_base = os.path.splitext(self.output_file_name)[0]
return '%s.%s.ninja' % (output_file_base, arch)
def WriteSpec(self, spec, config_name, generator_flags):
"""The main entry point for NinjaWriter: write the build rules for a spec.
Returns a Target object, which represents the output paths for this spec.
Returns None if there are no outputs (e.g. a settings-only 'none' type
target)."""
self.config_name = config_name
self.name = spec['target_name']
self.toolset = spec['toolset']
config = spec['configurations'][config_name]
self.target = Target(spec['type'])
self.is_standalone_static_library = bool(
spec.get('standalone_static_library', 0))
# Track if this target contains any C++ files, to decide if gcc or g++
# should be used for linking.
self.uses_cpp = False
self.is_mac_bundle = gyp.xcode_emulation.IsMacBundle(self.flavor, spec)
self.xcode_settings = self.msvs_settings = None
if self.flavor == 'mac':
self.xcode_settings = gyp.xcode_emulation.XcodeSettings(spec)
if self.flavor == 'win':
self.msvs_settings = gyp.msvs_emulation.MsvsSettings(spec,
generator_flags)
arch = self.msvs_settings.GetArch(config_name)
self.ninja.variable('arch', self.win_env[arch])
self.ninja.variable('cc', '$cl_' + arch)
self.ninja.variable('cxx', '$cl_' + arch)
self.ninja.variable('cc_host', '$cl_' + arch)
self.ninja.variable('cxx_host', '$cl_' + arch)
self.ninja.variable('asm', '$ml_' + arch)
if self.flavor == 'mac':
self.archs = self.xcode_settings.GetActiveArchs(config_name)
if len(self.archs) > 1:
self.arch_subninjas = dict(
(arch, ninja_syntax.Writer(
OpenOutput(os.path.join(self.toplevel_build,
self._SubninjaNameForArch(arch)),
'w')))
for arch in self.archs)
# Compute predepends for all rules.
# actions_depends is the dependencies this target depends on before running
# any of its action/rule/copy steps.
# compile_depends is the dependencies this target depends on before running
# any of its compile steps.
actions_depends = []
compile_depends = []
# TODO(evan): it is rather confusing which things are lists and which
# are strings. Fix these.
if 'dependencies' in spec:
for dep in spec['dependencies']:
if dep in self.target_outputs:
target = self.target_outputs[dep]
actions_depends.append(target.PreActionInput(self.flavor))
compile_depends.append(target.PreCompileInput())
actions_depends = filter(None, actions_depends)
compile_depends = filter(None, compile_depends)
actions_depends = self.WriteCollapsedDependencies('actions_depends',
actions_depends)
compile_depends = self.WriteCollapsedDependencies('compile_depends',
compile_depends)
self.target.preaction_stamp = actions_depends
self.target.precompile_stamp = compile_depends
# Write out actions, rules, and copies. These must happen before we
# compile any sources, so compute a list of predependencies for sources
# while we do it.
extra_sources = []
mac_bundle_depends = []
self.target.actions_stamp = self.WriteActionsRulesCopies(
spec, extra_sources, actions_depends, mac_bundle_depends)
# If we have actions/rules/copies, we depend directly on those, but
# otherwise we depend on dependent target's actions/rules/copies etc.
# We never need to explicitly depend on previous target's link steps,
# because no compile ever depends on them.
compile_depends_stamp = (self.target.actions_stamp or compile_depends)
# Write out the compilation steps, if any.
link_deps = []
sources = extra_sources + spec.get('sources', [])
if sources:
if self.flavor == 'mac' and len(self.archs) > 1:
# Write subninja file containing compile and link commands scoped to
# a single arch if a fat binary is being built.
for arch in self.archs:
self.ninja.subninja(self._SubninjaNameForArch(arch))
pch = None
if self.flavor == 'win':
gyp.msvs_emulation.VerifyMissingSources(
sources, self.abs_build_dir, generator_flags, self.GypPathToNinja)
pch = gyp.msvs_emulation.PrecompiledHeader(
self.msvs_settings, config_name, self.GypPathToNinja,
self.GypPathToUniqueOutput, self.obj_ext)
else:
pch = gyp.xcode_emulation.MacPrefixHeader(
self.xcode_settings, self.GypPathToNinja,
lambda path, lang: self.GypPathToUniqueOutput(path + '-' + lang))
link_deps = self.WriteSources(
self.ninja, config_name, config, sources, compile_depends_stamp, pch,
spec)
# Some actions/rules output 'sources' that are already object files.
obj_outputs = [f for f in sources if f.endswith(self.obj_ext)]
if obj_outputs:
if self.flavor != 'mac' or len(self.archs) == 1:
link_deps += [self.GypPathToNinja(o) for o in obj_outputs]
else:
print "Warning: Actions/rules writing object files don't work with " \
"multiarch targets, dropping. (target %s)" % spec['target_name']
elif self.flavor == 'mac' and len(self.archs) > 1:
link_deps = collections.defaultdict(list)
compile_deps = self.target.actions_stamp or actions_depends
if self.flavor == 'win' and self.target.type == 'static_library':
self.target.component_objs = link_deps
self.target.compile_deps = compile_deps
# Write out a link step, if needed.
output = None
is_empty_bundle = not link_deps and not mac_bundle_depends
if link_deps or self.target.actions_stamp or actions_depends:
output = self.WriteTarget(spec, config_name, config, link_deps,
compile_deps)
if self.is_mac_bundle:
mac_bundle_depends.append(output)
# Bundle all of the above together, if needed.
if self.is_mac_bundle:
output = self.WriteMacBundle(spec, mac_bundle_depends, is_empty_bundle)
if not output:
return None
assert self.target.FinalOutput(), output
return self.target
def _WinIdlRule(self, source, prebuild, outputs):
"""Handle the implicit VS .idl rule for one source file. Fills |outputs|
with files that are generated."""
outdir, output, vars, flags = self.msvs_settings.GetIdlBuildData(
source, self.config_name)
outdir = self.GypPathToNinja(outdir)
def fix_path(path, rel=None):
path = os.path.join(outdir, path)
dirname, basename = os.path.split(source)
root, ext = os.path.splitext(basename)
path = self.ExpandRuleVariables(
path, root, dirname, source, ext, basename)
if rel:
path = os.path.relpath(path, rel)
return path
vars = [(name, fix_path(value, outdir)) for name, value in vars]
output = [fix_path(p) for p in output]
vars.append(('outdir', outdir))
vars.append(('idlflags', flags))
input = self.GypPathToNinja(source)
self.ninja.build(output, 'idl', input,
variables=vars, order_only=prebuild)
outputs.extend(output)
def WriteWinIdlFiles(self, spec, prebuild):
"""Writes rules to match MSVS's implicit idl handling."""
assert self.flavor == 'win'
if self.msvs_settings.HasExplicitIdlRulesOrActions(spec):
return []
outputs = []
for source in filter(lambda x: x.endswith('.idl'), spec['sources']):
self._WinIdlRule(source, prebuild, outputs)
return outputs
def WriteActionsRulesCopies(self, spec, extra_sources, prebuild,
mac_bundle_depends):
"""Write out the Actions, Rules, and Copies steps. Return a path
representing the outputs of these steps."""
outputs = []
if self.is_mac_bundle:
mac_bundle_resources = spec.get('mac_bundle_resources', [])[:]
else:
mac_bundle_resources = []
extra_mac_bundle_resources = []
if 'actions' in spec:
outputs += self.WriteActions(spec['actions'], extra_sources, prebuild,
extra_mac_bundle_resources)
if 'rules' in spec:
outputs += self.WriteRules(spec['rules'], extra_sources, prebuild,
mac_bundle_resources,
extra_mac_bundle_resources)
if 'copies' in spec:
outputs += self.WriteCopies(spec['copies'], prebuild, mac_bundle_depends)
if 'sources' in spec and self.flavor == 'win':
outputs += self.WriteWinIdlFiles(spec, prebuild)
stamp = self.WriteCollapsedDependencies('actions_rules_copies', outputs)
if self.is_mac_bundle:
xcassets = self.WriteMacBundleResources(
extra_mac_bundle_resources + mac_bundle_resources, mac_bundle_depends)
partial_info_plist = self.WriteMacXCassets(xcassets, mac_bundle_depends)
self.WriteMacInfoPlist(partial_info_plist, mac_bundle_depends)
return stamp
def GenerateDescription(self, verb, message, fallback):
"""Generate and return a description of a build step.
|verb| is the short summary, e.g. ACTION or RULE.
|message| is a hand-written description, or None if not available.
|fallback| is the gyp-level name of the step, usable as a fallback.
"""
if self.toolset != 'target':
verb += '(%s)' % self.toolset
if message:
return '%s %s' % (verb, self.ExpandSpecial(message))
else:
return '%s %s: %s' % (verb, self.name, fallback)
def WriteActions(self, actions, extra_sources, prebuild,
extra_mac_bundle_resources):
# Actions cd into the base directory.
env = self.GetToolchainEnv()
all_outputs = []
for action in actions:
# First write out a rule for the action.
name = '%s_%s' % (action['action_name'], self.hash_for_rules)
description = self.GenerateDescription('ACTION',
action.get('message', None),
name)
is_cygwin = (self.msvs_settings.IsRuleRunUnderCygwin(action)
if self.flavor == 'win' else False)
args = action['action']
depfile = action.get('depfile', None)
if depfile:
depfile = self.ExpandSpecial(depfile, self.base_to_build)
pool = 'console' if int(action.get('ninja_use_console', 0)) else None
rule_name, _ = self.WriteNewNinjaRule(name, args, description,
is_cygwin, env, pool,
depfile=depfile)
inputs = [self.GypPathToNinja(i, env) for i in action['inputs']]
if int(action.get('process_outputs_as_sources', False)):
extra_sources += action['outputs']
if int(action.get('process_outputs_as_mac_bundle_resources', False)):
extra_mac_bundle_resources += action['outputs']
outputs = [self.GypPathToNinja(o, env) for o in action['outputs']]
# Then write out an edge using the rule.
self.ninja.build(outputs, rule_name, inputs,
order_only=prebuild)
all_outputs += outputs
self.ninja.newline()
return all_outputs
def WriteRules(self, rules, extra_sources, prebuild,
mac_bundle_resources, extra_mac_bundle_resources):
env = self.GetToolchainEnv()
all_outputs = []
for rule in rules:
# Skip a rule with no action and no inputs.
if 'action' not in rule and not rule.get('rule_sources', []):
continue
# First write out a rule for the rule action.
name = '%s_%s' % (rule['rule_name'], self.hash_for_rules)
args = rule['action']
description = self.GenerateDescription(
'RULE',
rule.get('message', None),
('%s ' + generator_default_variables['RULE_INPUT_PATH']) % name)
is_cygwin = (self.msvs_settings.IsRuleRunUnderCygwin(rule)
if self.flavor == 'win' else False)
pool = 'console' if int(rule.get('ninja_use_console', 0)) else None
rule_name, args = self.WriteNewNinjaRule(
name, args, description, is_cygwin, env, pool)
# TODO: if the command references the outputs directly, we should
# simplify it to just use $out.
# Rules can potentially make use of some special variables which
# must vary per source file.
# Compute the list of variables we'll need to provide.
special_locals = ('source', 'root', 'dirname', 'ext', 'name')
needed_variables = set(['source'])
for argument in args:
for var in special_locals:
if '${%s}' % var in argument:
needed_variables.add(var)
def cygwin_munge(path):
# pylint: disable=cell-var-from-loop
if is_cygwin:
return path.replace('\\', '/')
return path
inputs = [self.GypPathToNinja(i, env) for i in rule.get('inputs', [])]
# If there are n source files matching the rule, and m additional rule
# inputs, then adding 'inputs' to each build edge written below will
# write m * n inputs. Collapsing reduces this to m + n.
sources = rule.get('rule_sources', [])
num_inputs = len(inputs)
if prebuild:
num_inputs += 1
if num_inputs > 2 and len(sources) > 2:
inputs = [self.WriteCollapsedDependencies(
rule['rule_name'], inputs, order_only=prebuild)]
prebuild = []
# For each source file, write an edge that generates all the outputs.
for source in sources:
source = os.path.normpath(source)
dirname, basename = os.path.split(source)
root, ext = os.path.splitext(basename)
# Gather the list of inputs and outputs, expanding $vars if possible.
outputs = [self.ExpandRuleVariables(o, root, dirname,
source, ext, basename)
for o in rule['outputs']]
if int(rule.get('process_outputs_as_sources', False)):
extra_sources += outputs
was_mac_bundle_resource = source in mac_bundle_resources
if was_mac_bundle_resource or \
int(rule.get('process_outputs_as_mac_bundle_resources', False)):
extra_mac_bundle_resources += outputs
# Note: This is n_resources * n_outputs_in_rule. Put to-be-removed
# items in a set and remove them all in a single pass if this becomes
# a performance issue.
if was_mac_bundle_resource:
mac_bundle_resources.remove(source)
extra_bindings = []
for var in needed_variables:
if var == 'root':
extra_bindings.append(('root', cygwin_munge(root)))
elif var == 'dirname':
# '$dirname' is a parameter to the rule action, which means
# it shouldn't be converted to a Ninja path. But we don't
# want $!PRODUCT_DIR in there either.
dirname_expanded = self.ExpandSpecial(dirname, self.base_to_build)
extra_bindings.append(('dirname', cygwin_munge(dirname_expanded)))
elif var == 'source':
# '$source' is a parameter to the rule action, which means
# it shouldn't be converted to a Ninja path. But we don't
# want $!PRODUCT_DIR in there either.
source_expanded = self.ExpandSpecial(source, self.base_to_build)
extra_bindings.append(('source', cygwin_munge(source_expanded)))
elif var == 'ext':
extra_bindings.append(('ext', ext))
elif var == 'name':
extra_bindings.append(('name', cygwin_munge(basename)))
else:
assert var == None, repr(var)
outputs = [self.GypPathToNinja(o, env) for o in outputs]
if self.flavor == 'win':
# WriteNewNinjaRule uses unique_name for creating an rsp file on win.
extra_bindings.append(('unique_name',
hashlib.md5(outputs[0]).hexdigest()))
self.ninja.build(outputs, rule_name, self.GypPathToNinja(source),
implicit=inputs,
order_only=prebuild,
variables=extra_bindings)
all_outputs.extend(outputs)
return all_outputs
def WriteCopies(self, copies, prebuild, mac_bundle_depends):
outputs = []
env = self.GetToolchainEnv()
for copy in copies:
for path in copy['files']:
# Normalize the path so trailing slashes don't confuse us.
path = os.path.normpath(path)
basename = os.path.split(path)[1]
src = self.GypPathToNinja(path, env)
dst = self.GypPathToNinja(os.path.join(copy['destination'], basename),
env)
outputs += self.ninja.build(dst, 'copy', src, order_only=prebuild)
if self.is_mac_bundle:
# gyp has mac_bundle_resources to copy things into a bundle's
# Resources folder, but there's no built-in way to copy files to other
# places in the bundle. Hence, some targets use copies for this. Check
# if this file is copied into the current bundle, and if so add it to
# the bundle depends so that dependent targets get rebuilt if the copy
# input changes.
if dst.startswith(self.xcode_settings.GetBundleContentsFolderPath()):
mac_bundle_depends.append(dst)
return outputs
def WriteMacBundleResources(self, resources, bundle_depends):
"""Writes ninja edges for 'mac_bundle_resources'."""
xcassets = []
for output, res in gyp.xcode_emulation.GetMacBundleResources(
generator_default_variables['PRODUCT_DIR'],
self.xcode_settings, map(self.GypPathToNinja, resources)):
output = self.ExpandSpecial(output)
if os.path.splitext(output)[-1] != '.xcassets':
isBinary = self.xcode_settings.IsBinaryOutputFormat(self.config_name)
self.ninja.build(output, 'mac_tool', res,
variables=[('mactool_cmd', 'copy-bundle-resource'), \
('binary', isBinary)])
bundle_depends.append(output)
else:
xcassets.append(res)
return xcassets
def WriteMacXCassets(self, xcassets, bundle_depends):
"""Writes ninja edges for 'mac_bundle_resources' .xcassets files.
This add an invocation of 'actool' via the 'mac_tool.py' helper script.
It assumes that the assets catalogs define at least one imageset and
thus an Assets.car file will be generated in the application resources
directory. If this is not the case, then the build will probably be done
at each invocation of ninja."""
if not xcassets:
return
extra_arguments = {}
settings_to_arg = {
'XCASSETS_APP_ICON': 'app-icon',
'XCASSETS_LAUNCH_IMAGE': 'launch-image',
}
settings = self.xcode_settings.xcode_settings[self.config_name]
for settings_key, arg_name in settings_to_arg.iteritems():
value = settings.get(settings_key)
if value:
extra_arguments[arg_name] = value
partial_info_plist = None
if extra_arguments:
partial_info_plist = self.GypPathToUniqueOutput(
'assetcatalog_generated_info.plist')
extra_arguments['output-partial-info-plist'] = partial_info_plist
outputs = []
outputs.append(
os.path.join(
self.xcode_settings.GetBundleResourceFolder(),
'Assets.car'))
if partial_info_plist:
outputs.append(partial_info_plist)
keys = QuoteShellArgument(json.dumps(extra_arguments), self.flavor)
extra_env = self.xcode_settings.GetPerTargetSettings()
env = self.GetSortedXcodeEnv(additional_settings=extra_env)
env = self.ComputeExportEnvString(env)
bundle_depends.extend(self.ninja.build(
outputs, 'compile_xcassets', xcassets,
variables=[('env', env), ('keys', keys)]))
return partial_info_plist
def WriteMacInfoPlist(self, partial_info_plist, bundle_depends):
"""Write build rules for bundle Info.plist files."""
info_plist, out, defines, extra_env = gyp.xcode_emulation.GetMacInfoPlist(
generator_default_variables['PRODUCT_DIR'],
self.xcode_settings, self.GypPathToNinja)
if not info_plist:
return
out = self.ExpandSpecial(out)
if defines:
# Create an intermediate file to store preprocessed results.
intermediate_plist = self.GypPathToUniqueOutput(
os.path.basename(info_plist))
defines = ' '.join([Define(d, self.flavor) for d in defines])
info_plist = self.ninja.build(
intermediate_plist, 'preprocess_infoplist', info_plist,
variables=[('defines',defines)])
env = self.GetSortedXcodeEnv(additional_settings=extra_env)
env = self.ComputeExportEnvString(env)
if partial_info_plist:
intermediate_plist = self.GypPathToUniqueOutput('merged_info.plist')
info_plist = self.ninja.build(
intermediate_plist, 'merge_infoplist',
[partial_info_plist, info_plist])
keys = self.xcode_settings.GetExtraPlistItems(self.config_name)
keys = QuoteShellArgument(json.dumps(keys), self.flavor)
isBinary = self.xcode_settings.IsBinaryOutputFormat(self.config_name)
self.ninja.build(out, 'copy_infoplist', info_plist,
variables=[('env', env), ('keys', keys),
('binary', isBinary)])
bundle_depends.append(out)
def WriteSources(self, ninja_file, config_name, config, sources, predepends,
precompiled_header, spec):
"""Write build rules to compile all of |sources|."""
if self.toolset == 'host':
self.ninja.variable('ar', '$ar_host')
self.ninja.variable('cc', '$cc_host')
self.ninja.variable('cxx', '$cxx_host')
self.ninja.variable('ld', '$ld_host')
self.ninja.variable('ldxx', '$ldxx_host')
self.ninja.variable('nm', '$nm_host')
self.ninja.variable('readelf', '$readelf_host')
if self.flavor != 'mac' or len(self.archs) == 1:
return self.WriteSourcesForArch(
self.ninja, config_name, config, sources, predepends,
precompiled_header, spec)
else:
return dict((arch, self.WriteSourcesForArch(
self.arch_subninjas[arch], config_name, config, sources, predepends,
precompiled_header, spec, arch=arch))
for arch in self.archs)
def WriteSourcesForArch(self, ninja_file, config_name, config, sources,
predepends, precompiled_header, spec, arch=None):
"""Write build rules to compile all of |sources|."""
extra_defines = []
if self.flavor == 'mac':
cflags = self.xcode_settings.GetCflags(config_name, arch=arch)
cflags_c = self.xcode_settings.GetCflagsC(config_name)
cflags_cc = self.xcode_settings.GetCflagsCC(config_name)
cflags_objc = ['$cflags_c'] + \
self.xcode_settings.GetCflagsObjC(config_name)
cflags_objcc = ['$cflags_cc'] + \
self.xcode_settings.GetCflagsObjCC(config_name)
elif self.flavor == 'win':
asmflags = self.msvs_settings.GetAsmflags(config_name)
cflags = self.msvs_settings.GetCflags(config_name)
cflags_c = self.msvs_settings.GetCflagsC(config_name)
cflags_cc = self.msvs_settings.GetCflagsCC(config_name)
extra_defines = self.msvs_settings.GetComputedDefines(config_name)
# See comment at cc_command for why there's two .pdb files.
pdbpath_c = pdbpath_cc = self.msvs_settings.GetCompilerPdbName(
config_name, self.ExpandSpecial)
if not pdbpath_c:
obj = 'obj'
if self.toolset != 'target':
obj += '.' + self.toolset
pdbpath = os.path.normpath(os.path.join(obj, self.base_dir, self.name))
pdbpath_c = pdbpath + '.c.pdb'
pdbpath_cc = pdbpath + '.cc.pdb'
self.WriteVariableList(ninja_file, 'pdbname_c', [pdbpath_c])
self.WriteVariableList(ninja_file, 'pdbname_cc', [pdbpath_cc])
self.WriteVariableList(ninja_file, 'pchprefix', [self.name])
else:
cflags = config.get('cflags', [])
cflags_c = config.get('cflags_c', [])
cflags_cc = config.get('cflags_cc', [])
# Respect environment variables related to build, but target-specific
# flags can still override them.
if self.toolset == 'target':
cflags_c = (os.environ.get('CPPFLAGS', '').split() +
os.environ.get('CFLAGS', '').split() + cflags_c)
cflags_cc = (os.environ.get('CPPFLAGS', '').split() +
os.environ.get('CXXFLAGS', '').split() + cflags_cc)
elif self.toolset == 'host':
cflags_c = (os.environ.get('CPPFLAGS_host', '').split() +
os.environ.get('CFLAGS_host', '').split() + cflags_c)
cflags_cc = (os.environ.get('CPPFLAGS_host', '').split() +
os.environ.get('CXXFLAGS_host', '').split() + cflags_cc)
defines = config.get('defines', []) + extra_defines
self.WriteVariableList(ninja_file, 'defines',
[Define(d, self.flavor) for d in defines])
if self.flavor == 'win':
self.WriteVariableList(ninja_file, 'asmflags',
map(self.ExpandSpecial, asmflags))
self.WriteVariableList(ninja_file, 'rcflags',
[QuoteShellArgument(self.ExpandSpecial(f), self.flavor)
for f in self.msvs_settings.GetRcflags(config_name,
self.GypPathToNinja)])
include_dirs = config.get('include_dirs', [])
env = self.GetToolchainEnv()
if self.flavor == 'win':
include_dirs = self.msvs_settings.AdjustIncludeDirs(include_dirs,
config_name)
self.WriteVariableList(ninja_file, 'includes',
[QuoteShellArgument('-I' + self.GypPathToNinja(i, env), self.flavor)
for i in include_dirs])
if self.flavor == 'win':
midl_include_dirs = config.get('midl_include_dirs', [])
midl_include_dirs = self.msvs_settings.AdjustMidlIncludeDirs(
midl_include_dirs, config_name)
self.WriteVariableList(ninja_file, 'midl_includes',
[QuoteShellArgument('-I' + self.GypPathToNinja(i, env), self.flavor)
for i in midl_include_dirs])
pch_commands = precompiled_header.GetPchBuildCommands(arch)
if self.flavor == 'mac':
# Most targets use no precompiled headers, so only write these if needed.
for ext, var in [('c', 'cflags_pch_c'), ('cc', 'cflags_pch_cc'),
('m', 'cflags_pch_objc'), ('mm', 'cflags_pch_objcc')]:
include = precompiled_header.GetInclude(ext, arch)
if include: ninja_file.variable(var, include)
arflags = config.get('arflags', [])
self.WriteVariableList(ninja_file, 'cflags',
map(self.ExpandSpecial, cflags))
self.WriteVariableList(ninja_file, 'cflags_c',
map(self.ExpandSpecial, cflags_c))
self.WriteVariableList(ninja_file, 'cflags_cc',
map(self.ExpandSpecial, cflags_cc))
if self.flavor == 'mac':
self.WriteVariableList(ninja_file, 'cflags_objc',
map(self.ExpandSpecial, cflags_objc))
self.WriteVariableList(ninja_file, 'cflags_objcc',
map(self.ExpandSpecial, cflags_objcc))
self.WriteVariableList(ninja_file, 'arflags',
map(self.ExpandSpecial, arflags))
ninja_file.newline()
outputs = []
has_rc_source = False
for source in sources:
filename, ext = os.path.splitext(source)
ext = ext[1:]
obj_ext = self.obj_ext
if ext in ('cc', 'cpp', 'cxx'):
command = 'cxx'
self.uses_cpp = True
elif ext == 'c' or (ext == 'S' and self.flavor != 'win'):
command = 'cc'
elif ext == 's' and self.flavor != 'win': # Doesn't generate .o.d files.
command = 'cc_s'
elif (self.flavor == 'win' and ext == 'asm' and
not self.msvs_settings.HasExplicitAsmRules(spec)):
command = 'asm'
# Add the _asm suffix as msvs is capable of handling .cc and
# .asm files of the same name without collision.
obj_ext = '_asm.obj'
elif self.flavor == 'mac' and ext == 'm':
command = 'objc'
elif self.flavor == 'mac' and ext == 'mm':
command = 'objcxx'
self.uses_cpp = True
elif self.flavor == 'win' and ext == 'rc':
command = 'rc'
obj_ext = '.res'
has_rc_source = True
else:
# Ignore unhandled extensions.
continue
input = self.GypPathToNinja(source)
output = self.GypPathToUniqueOutput(filename + obj_ext)
if arch is not None:
output = AddArch(output, arch)
implicit = precompiled_header.GetObjDependencies([input], [output], arch)
variables = []
if self.flavor == 'win':
variables, output, implicit = precompiled_header.GetFlagsModifications(
input, output, implicit, command, cflags_c, cflags_cc,
self.ExpandSpecial)
ninja_file.build(output, command, input,
implicit=[gch for _, _, gch in implicit],
order_only=predepends, variables=variables)
outputs.append(output)
if has_rc_source:
resource_include_dirs = config.get('resource_include_dirs', include_dirs)
self.WriteVariableList(ninja_file, 'resource_includes',
[QuoteShellArgument('-I' + self.GypPathToNinja(i, env), self.flavor)
for i in resource_include_dirs])
self.WritePchTargets(ninja_file, pch_commands)
ninja_file.newline()
return outputs
def WritePchTargets(self, ninja_file, pch_commands):
"""Writes ninja rules to compile prefix headers."""
if not pch_commands:
return
for gch, lang_flag, lang, input in pch_commands:
var_name = {
'c': 'cflags_pch_c',
'cc': 'cflags_pch_cc',
'm': 'cflags_pch_objc',
'mm': 'cflags_pch_objcc',
}[lang]
map = { 'c': 'cc', 'cc': 'cxx', 'm': 'objc', 'mm': 'objcxx', }
cmd = map.get(lang)
ninja_file.build(gch, cmd, input, variables=[(var_name, lang_flag)])
def WriteLink(self, spec, config_name, config, link_deps):
"""Write out a link step. Fills out target.binary. """
if self.flavor != 'mac' or len(self.archs) == 1:
return self.WriteLinkForArch(
self.ninja, spec, config_name, config, link_deps)
else:
output = self.ComputeOutput(spec)
inputs = [self.WriteLinkForArch(self.arch_subninjas[arch], spec,
config_name, config, link_deps[arch],
arch=arch)
for arch in self.archs]
extra_bindings = []
build_output = output
if not self.is_mac_bundle:
self.AppendPostbuildVariable(extra_bindings, spec, output, output)
# TODO(yyanagisawa): more work needed to fix:
# https://code.google.com/p/gyp/issues/detail?id=411
if (spec['type'] in ('shared_library', 'loadable_module') and
not self.is_mac_bundle):
extra_bindings.append(('lib', output))
self.ninja.build([output, output + '.TOC'], 'solipo', inputs,
variables=extra_bindings)
else:
self.ninja.build(build_output, 'lipo', inputs, variables=extra_bindings)
return output
def WriteLinkForArch(self, ninja_file, spec, config_name, config,
link_deps, arch=None):
"""Write out a link step. Fills out target.binary. """
command = {
'executable': 'link',
'loadable_module': 'solink_module',
'shared_library': 'solink',
}[spec['type']]
command_suffix = ''
implicit_deps = set()
solibs = set()
order_deps = set()
if 'dependencies' in spec:
# Two kinds of dependencies:
# - Linkable dependencies (like a .a or a .so): add them to the link line.
# - Non-linkable dependencies (like a rule that generates a file
# and writes a stamp file): add them to implicit_deps
extra_link_deps = set()
for dep in spec['dependencies']:
target = self.target_outputs.get(dep)
if not target:
continue
linkable = target.Linkable()
if linkable:
new_deps = []
if (self.flavor == 'win' and
target.component_objs and
self.msvs_settings.IsUseLibraryDependencyInputs(config_name)):
new_deps = target.component_objs
if target.compile_deps:
order_deps.add(target.compile_deps)
elif self.flavor == 'win' and target.import_lib:
new_deps = [target.import_lib]
elif target.UsesToc(self.flavor):
solibs.add(target.binary)
implicit_deps.add(target.binary + '.TOC')
else:
new_deps = [target.binary]
for new_dep in new_deps:
if new_dep not in extra_link_deps:
extra_link_deps.add(new_dep)
link_deps.append(new_dep)
final_output = target.FinalOutput()
if not linkable or final_output != target.binary:
implicit_deps.add(final_output)
extra_bindings = []
if self.uses_cpp and self.flavor != 'win':
extra_bindings.append(('ld', '$ldxx'))
output = self.ComputeOutput(spec, arch)
if arch is None and not self.is_mac_bundle:
self.AppendPostbuildVariable(extra_bindings, spec, output, output)
is_executable = spec['type'] == 'executable'
# The ldflags config key is not used on mac or win. On those platforms
# linker flags are set via xcode_settings and msvs_settings, respectively.
env_ldflags = os.environ.get('LDFLAGS', '').split()
if self.flavor == 'mac':
ldflags = self.xcode_settings.GetLdflags(config_name,
self.ExpandSpecial(generator_default_variables['PRODUCT_DIR']),
self.GypPathToNinja, arch)
ldflags = env_ldflags + ldflags
elif self.flavor == 'win':
manifest_base_name = self.GypPathToUniqueOutput(
self.ComputeOutputFileName(spec))
ldflags, intermediate_manifest, manifest_files = \
self.msvs_settings.GetLdflags(config_name, self.GypPathToNinja,
self.ExpandSpecial, manifest_base_name,
output, is_executable,
self.toplevel_build)
ldflags = env_ldflags + ldflags
self.WriteVariableList(ninja_file, 'manifests', manifest_files)
implicit_deps = implicit_deps.union(manifest_files)
if intermediate_manifest:
self.WriteVariableList(
ninja_file, 'intermediatemanifest', [intermediate_manifest])
command_suffix = _GetWinLinkRuleNameSuffix(
self.msvs_settings.IsEmbedManifest(config_name))
def_file = self.msvs_settings.GetDefFile(self.GypPathToNinja)
if def_file:
implicit_deps.add(def_file)
else:
# Respect environment variables related to build, but target-specific
# flags can still override them.
ldflags = env_ldflags + config.get('ldflags', [])
if is_executable and len(solibs):
rpath = 'lib/'
if self.toolset != 'target':
rpath += self.toolset
ldflags.append(r'-Wl,-rpath=\$$ORIGIN/%s' % rpath)
ldflags.append('-Wl,-rpath-link=%s' % rpath)
self.WriteVariableList(ninja_file, 'ldflags',
map(self.ExpandSpecial, ldflags))
library_dirs = config.get('library_dirs', [])
if self.flavor == 'win':
library_dirs = [self.msvs_settings.ConvertVSMacros(l, config_name)
for l in library_dirs]
library_dirs = ['/LIBPATH:' + QuoteShellArgument(self.GypPathToNinja(l),
self.flavor)
for l in library_dirs]
else:
library_dirs = [QuoteShellArgument('-L' + self.GypPathToNinja(l),
self.flavor)
for l in library_dirs]
libraries = gyp.common.uniquer(map(self.ExpandSpecial,
spec.get('libraries', [])))
if self.flavor == 'mac':
libraries = self.xcode_settings.AdjustLibraries(libraries, config_name)
elif self.flavor == 'win':
libraries = self.msvs_settings.AdjustLibraries(libraries)
self.WriteVariableList(ninja_file, 'libs', library_dirs + libraries)
linked_binary = output
if command in ('solink', 'solink_module'):
extra_bindings.append(('soname', os.path.split(output)[1]))
extra_bindings.append(('lib',
gyp.common.EncodePOSIXShellArgument(output)))
if self.flavor != 'win':
link_file_list = output
if self.is_mac_bundle:
# 'Dependency Framework.framework/Versions/A/Dependency Framework' ->
# 'Dependency Framework.framework.rsp'
link_file_list = self.xcode_settings.GetWrapperName()
if arch:
link_file_list += '.' + arch
link_file_list += '.rsp'
# If an rspfile contains spaces, ninja surrounds the filename with
# quotes around it and then passes it to open(), creating a file with
# quotes in its name (and when looking for the rsp file, the name
# makes it through bash which strips the quotes) :-/
link_file_list = link_file_list.replace(' ', '_')
extra_bindings.append(
('link_file_list',
gyp.common.EncodePOSIXShellArgument(link_file_list)))
if self.flavor == 'win':
extra_bindings.append(('binary', output))
if ('/NOENTRY' not in ldflags and
not self.msvs_settings.GetNoImportLibrary(config_name)):
self.target.import_lib = output + '.lib'
extra_bindings.append(('implibflag',
'/IMPLIB:%s' % self.target.import_lib))
pdbname = self.msvs_settings.GetPDBName(
config_name, self.ExpandSpecial, output + '.pdb')
output = [output, self.target.import_lib]
if pdbname:
output.append(pdbname)
elif not self.is_mac_bundle:
output = [output, output + '.TOC']
else:
command = command + '_notoc'
elif self.flavor == 'win':
extra_bindings.append(('binary', output))
pdbname = self.msvs_settings.GetPDBName(
config_name, self.ExpandSpecial, output + '.pdb')
if pdbname:
output = [output, pdbname]
if len(solibs):
extra_bindings.append(('solibs', gyp.common.EncodePOSIXShellList(solibs)))
ninja_file.build(output, command + command_suffix, link_deps,
implicit=list(implicit_deps),
order_only=list(order_deps),
variables=extra_bindings)
return linked_binary
def WriteTarget(self, spec, config_name, config, link_deps, compile_deps):
extra_link_deps = any(self.target_outputs.get(dep).Linkable()
for dep in spec.get('dependencies', [])
if dep in self.target_outputs)
if spec['type'] == 'none' or (not link_deps and not extra_link_deps):
# TODO(evan): don't call this function for 'none' target types, as
# it doesn't do anything, and we fake out a 'binary' with a stamp file.
self.target.binary = compile_deps
self.target.type = 'none'
elif spec['type'] == 'static_library':
self.target.binary = self.ComputeOutput(spec)
if (self.flavor not in ('mac', 'openbsd', 'netbsd', 'win') and not
self.is_standalone_static_library):
self.ninja.build(self.target.binary, 'alink_thin', link_deps,
order_only=compile_deps)
else:
variables = []
if self.xcode_settings:
libtool_flags = self.xcode_settings.GetLibtoolflags(config_name)
if libtool_flags:
variables.append(('libtool_flags', libtool_flags))
if self.msvs_settings:
libflags = self.msvs_settings.GetLibFlags(config_name,
self.GypPathToNinja)
variables.append(('libflags', libflags))
if self.flavor != 'mac' or len(self.archs) == 1:
self.AppendPostbuildVariable(variables, spec,
self.target.binary, self.target.binary)
self.ninja.build(self.target.binary, 'alink', link_deps,
order_only=compile_deps, variables=variables)
else:
inputs = []
for arch in self.archs:
output = self.ComputeOutput(spec, arch)
self.arch_subninjas[arch].build(output, 'alink', link_deps[arch],
order_only=compile_deps,
variables=variables)
inputs.append(output)
# TODO: It's not clear if libtool_flags should be passed to the alink
# call that combines single-arch .a files into a fat .a file.
self.AppendPostbuildVariable(variables, spec,
self.target.binary, self.target.binary)
self.ninja.build(self.target.binary, 'alink', inputs,
# FIXME: test proving order_only=compile_deps isn't
# needed.
variables=variables)
else:
self.target.binary = self.WriteLink(spec, config_name, config, link_deps)
return self.target.binary
def WriteMacBundle(self, spec, mac_bundle_depends, is_empty):
assert self.is_mac_bundle
package_framework = spec['type'] in ('shared_library', 'loadable_module')
output = self.ComputeMacBundleOutput()
if is_empty:
output += '.stamp'
variables = []
self.AppendPostbuildVariable(variables, spec, output, self.target.binary,
is_command_start=not package_framework)
if package_framework and not is_empty:
variables.append(('version', self.xcode_settings.GetFrameworkVersion()))
self.ninja.build(output, 'package_framework', mac_bundle_depends,
variables=variables)
else:
self.ninja.build(output, 'stamp', mac_bundle_depends,
variables=variables)
self.target.bundle = output
return output
def GetToolchainEnv(self, additional_settings=None):
"""Returns the variables toolchain would set for build steps."""
env = self.GetSortedXcodeEnv(additional_settings=additional_settings)
if self.flavor == 'win':
env = self.GetMsvsToolchainEnv(
additional_settings=additional_settings)
return env
def GetMsvsToolchainEnv(self, additional_settings=None):
"""Returns the variables Visual Studio would set for build steps."""
return self.msvs_settings.GetVSMacroEnv('$!PRODUCT_DIR',
config=self.config_name)
def GetSortedXcodeEnv(self, additional_settings=None):
"""Returns the variables Xcode would set for build steps."""
assert self.abs_build_dir
abs_build_dir = self.abs_build_dir
return gyp.xcode_emulation.GetSortedXcodeEnv(
self.xcode_settings, abs_build_dir,
os.path.join(abs_build_dir, self.build_to_base), self.config_name,
additional_settings)
def GetSortedXcodePostbuildEnv(self):
"""Returns the variables Xcode would set for postbuild steps."""
postbuild_settings = {}
# CHROMIUM_STRIP_SAVE_FILE is a chromium-specific hack.
# TODO(thakis): It would be nice to have some general mechanism instead.
strip_save_file = self.xcode_settings.GetPerTargetSetting(
'CHROMIUM_STRIP_SAVE_FILE')
if strip_save_file:
postbuild_settings['CHROMIUM_STRIP_SAVE_FILE'] = strip_save_file
return self.GetSortedXcodeEnv(additional_settings=postbuild_settings)
def AppendPostbuildVariable(self, variables, spec, output, binary,
is_command_start=False):
"""Adds a 'postbuild' variable if there is a postbuild for |output|."""
postbuild = self.GetPostbuildCommand(spec, output, binary, is_command_start)
if postbuild:
variables.append(('postbuilds', postbuild))
def GetPostbuildCommand(self, spec, output, output_binary, is_command_start):
"""Returns a shell command that runs all the postbuilds, and removes
|output| if any of them fails. If |is_command_start| is False, then the
returned string will start with ' && '."""
if not self.xcode_settings or spec['type'] == 'none' or not output:
return ''
output = QuoteShellArgument(output, self.flavor)
postbuilds = gyp.xcode_emulation.GetSpecPostbuildCommands(spec, quiet=True)
if output_binary is not None:
postbuilds = self.xcode_settings.AddImplicitPostbuilds(
self.config_name,
os.path.normpath(os.path.join(self.base_to_build, output)),
QuoteShellArgument(
os.path.normpath(os.path.join(self.base_to_build, output_binary)),
self.flavor),
postbuilds, quiet=True)
if not postbuilds:
return ''
# Postbuilds expect to be run in the gyp file's directory, so insert an
# implicit postbuild to cd to there.
postbuilds.insert(0, gyp.common.EncodePOSIXShellList(
['cd', self.build_to_base]))
env = self.ComputeExportEnvString(self.GetSortedXcodePostbuildEnv())
# G will be non-null if any postbuild fails. Run all postbuilds in a
# subshell.
commands = env + ' (' + \
' && '.join([ninja_syntax.escape(command) for command in postbuilds])
command_string = (commands + '); G=$$?; '
# Remove the final output if any postbuild failed.
'((exit $$G) || rm -rf %s) ' % output + '&& exit $$G)')
if is_command_start:
return '(' + command_string + ' && '
else:
return '$ && (' + command_string
def ComputeExportEnvString(self, env):
"""Given an environment, returns a string looking like
'export FOO=foo; export BAR="${FOO} bar;'
that exports |env| to the shell."""
export_str = []
for k, v in env:
export_str.append('export %s=%s;' %
(k, ninja_syntax.escape(gyp.common.EncodePOSIXShellArgument(v))))
return ' '.join(export_str)
def ComputeMacBundleOutput(self):
"""Return the 'output' (full output path) to a bundle output directory."""
assert self.is_mac_bundle
path = generator_default_variables['PRODUCT_DIR']
return self.ExpandSpecial(
os.path.join(path, self.xcode_settings.GetWrapperName()))
def ComputeOutputFileName(self, spec, type=None):
"""Compute the filename of the final output for the current target."""
if not type:
type = spec['type']
default_variables = copy.copy(generator_default_variables)
CalculateVariables(default_variables, {'flavor': self.flavor})
# Compute filename prefix: the product prefix, or a default for
# the product type.
DEFAULT_PREFIX = {
'loadable_module': default_variables['SHARED_LIB_PREFIX'],
'shared_library': default_variables['SHARED_LIB_PREFIX'],
'static_library': default_variables['STATIC_LIB_PREFIX'],
'executable': default_variables['EXECUTABLE_PREFIX'],
}
prefix = spec.get('product_prefix', DEFAULT_PREFIX.get(type, ''))
# Compute filename extension: the product extension, or a default
# for the product type.
DEFAULT_EXTENSION = {
'loadable_module': default_variables['SHARED_LIB_SUFFIX'],
'shared_library': default_variables['SHARED_LIB_SUFFIX'],
'static_library': default_variables['STATIC_LIB_SUFFIX'],
'executable': default_variables['EXECUTABLE_SUFFIX'],
}
extension = spec.get('product_extension')
if extension:
extension = '.' + extension
else:
extension = DEFAULT_EXTENSION.get(type, '')
if 'product_name' in spec:
# If we were given an explicit name, use that.
target = spec['product_name']
else:
# Otherwise, derive a name from the target name.
target = spec['target_name']
if prefix == 'lib':
# Snip out an extra 'lib' from libs if appropriate.
target = StripPrefix(target, 'lib')
if type in ('static_library', 'loadable_module', 'shared_library',
'executable'):
return '%s%s%s' % (prefix, target, extension)
elif type == 'none':
return '%s.stamp' % target
else:
raise Exception('Unhandled output type %s' % type)
def ComputeOutput(self, spec, arch=None):
"""Compute the path for the final output of the spec."""
type = spec['type']
if self.flavor == 'win':
override = self.msvs_settings.GetOutputName(self.config_name,
self.ExpandSpecial)
if override:
return override
if arch is None and self.flavor == 'mac' and type in (
'static_library', 'executable', 'shared_library', 'loadable_module'):
filename = self.xcode_settings.GetExecutablePath()
else:
filename = self.ComputeOutputFileName(spec, type)
if arch is None and 'product_dir' in spec:
path = os.path.join(spec['product_dir'], filename)
return self.ExpandSpecial(path)
# Some products go into the output root, libraries go into shared library
# dir, and everything else goes into the normal place.
type_in_output_root = ['executable', 'loadable_module']
if self.flavor == 'mac' and self.toolset == 'target':
type_in_output_root += ['shared_library', 'static_library']
elif self.flavor == 'win' and self.toolset == 'target':
type_in_output_root += ['shared_library']
if arch is not None:
# Make sure partial executables don't end up in a bundle or the regular
# output directory.
archdir = 'arch'
if self.toolset != 'target':
archdir = os.path.join('arch', '%s' % self.toolset)
return os.path.join(archdir, AddArch(filename, arch))
elif type in type_in_output_root or self.is_standalone_static_library:
return filename
elif type == 'shared_library':
libdir = 'lib'
if self.toolset != 'target':
libdir = os.path.join('lib', '%s' % self.toolset)
return os.path.join(libdir, filename)
else:
return self.GypPathToUniqueOutput(filename, qualified=False)
def WriteVariableList(self, ninja_file, var, values):
assert not isinstance(values, str)
if values is None:
values = []
ninja_file.variable(var, ' '.join(values))
def WriteNewNinjaRule(self, name, args, description, is_cygwin, env, pool,
depfile=None):
"""Write out a new ninja "rule" statement for a given command.
Returns the name of the new rule, and a copy of |args| with variables
expanded."""
if self.flavor == 'win':
args = [self.msvs_settings.ConvertVSMacros(
arg, self.base_to_build, config=self.config_name)
for arg in args]
description = self.msvs_settings.ConvertVSMacros(
description, config=self.config_name)
elif self.flavor == 'mac':
# |env| is an empty list on non-mac.
args = [gyp.xcode_emulation.ExpandEnvVars(arg, env) for arg in args]
description = gyp.xcode_emulation.ExpandEnvVars(description, env)
# TODO: we shouldn't need to qualify names; we do it because
# currently the ninja rule namespace is global, but it really
# should be scoped to the subninja.
rule_name = self.name
if self.toolset == 'target':
rule_name += '.' + self.toolset
rule_name += '.' + name
rule_name = re.sub('[^a-zA-Z0-9_]', '_', rule_name)
# Remove variable references, but not if they refer to the magic rule
# variables. This is not quite right, as it also protects these for
# actions, not just for rules where they are valid. Good enough.
protect = [ '${root}', '${dirname}', '${source}', '${ext}', '${name}' ]
protect = '(?!' + '|'.join(map(re.escape, protect)) + ')'
description = re.sub(protect + r'\$', '_', description)
# gyp dictates that commands are run from the base directory.
# cd into the directory before running, and adjust paths in
# the arguments to point to the proper locations.
rspfile = None
rspfile_content = None
args = [self.ExpandSpecial(arg, self.base_to_build) for arg in args]
if self.flavor == 'win':
rspfile = rule_name + '.$unique_name.rsp'
# The cygwin case handles this inside the bash sub-shell.
run_in = '' if is_cygwin else ' ' + self.build_to_base
if is_cygwin:
rspfile_content = self.msvs_settings.BuildCygwinBashCommandLine(
args, self.build_to_base)
else:
rspfile_content = gyp.msvs_emulation.EncodeRspFileList(args)
command = ('%s gyp-win-tool action-wrapper $arch ' % sys.executable +
rspfile + run_in)
else:
env = self.ComputeExportEnvString(env)
command = gyp.common.EncodePOSIXShellList(args)
command = 'cd %s; ' % self.build_to_base + env + command
# GYP rules/actions express being no-ops by not touching their outputs.
# Avoid executing downstream dependencies in this case by specifying
# restat=1 to ninja.
self.ninja.rule(rule_name, command, description, depfile=depfile,
restat=True, pool=pool,
rspfile=rspfile, rspfile_content=rspfile_content)
self.ninja.newline()
return rule_name, args
def CalculateVariables(default_variables, params):
"""Calculate additional variables for use in the build (called by gyp)."""
global generator_additional_non_configuration_keys
global generator_additional_path_sections
flavor = gyp.common.GetFlavor(params)
if flavor == 'mac':
default_variables.setdefault('OS', 'mac')
default_variables.setdefault('SHARED_LIB_SUFFIX', '.dylib')
default_variables.setdefault('SHARED_LIB_DIR',
generator_default_variables['PRODUCT_DIR'])
default_variables.setdefault('LIB_DIR',
generator_default_variables['PRODUCT_DIR'])
# Copy additional generator configuration data from Xcode, which is shared
# by the Mac Ninja generator.
import gyp.generator.xcode as xcode_generator
generator_additional_non_configuration_keys = getattr(xcode_generator,
'generator_additional_non_configuration_keys', [])
generator_additional_path_sections = getattr(xcode_generator,
'generator_additional_path_sections', [])
global generator_extra_sources_for_rules
generator_extra_sources_for_rules = getattr(xcode_generator,
'generator_extra_sources_for_rules', [])
elif flavor == 'win':
exts = gyp.MSVSUtil.TARGET_TYPE_EXT
default_variables.setdefault('OS', 'win')
default_variables['EXECUTABLE_SUFFIX'] = '.' + exts['executable']
default_variables['STATIC_LIB_PREFIX'] = ''
default_variables['STATIC_LIB_SUFFIX'] = '.' + exts['static_library']
default_variables['SHARED_LIB_PREFIX'] = ''
default_variables['SHARED_LIB_SUFFIX'] = '.' + exts['shared_library']
# Copy additional generator configuration data from VS, which is shared
# by the Windows Ninja generator.
import gyp.generator.msvs as msvs_generator
generator_additional_non_configuration_keys = getattr(msvs_generator,
'generator_additional_non_configuration_keys', [])
generator_additional_path_sections = getattr(msvs_generator,
'generator_additional_path_sections', [])
gyp.msvs_emulation.CalculateCommonVariables(default_variables, params)
else:
operating_system = flavor
if flavor == 'android':
operating_system = 'linux' # Keep this legacy behavior for now.
default_variables.setdefault('OS', operating_system)
default_variables.setdefault('SHARED_LIB_SUFFIX', '.so')
default_variables.setdefault('SHARED_LIB_DIR',
os.path.join('$!PRODUCT_DIR', 'lib'))
default_variables.setdefault('LIB_DIR',
os.path.join('$!PRODUCT_DIR', 'obj'))
def ComputeOutputDir(params):
"""Returns the path from the toplevel_dir to the build output directory."""
# generator_dir: relative path from pwd to where make puts build files.
# Makes migrating from make to ninja easier, ninja doesn't put anything here.
generator_dir = os.path.relpath(params['options'].generator_output or '.')
# output_dir: relative path from generator_dir to the build directory.
output_dir = params.get('generator_flags', {}).get('output_dir', 'out')
# Relative path from source root to our output files. e.g. "out"
return os.path.normpath(os.path.join(generator_dir, output_dir))
def CalculateGeneratorInputInfo(params):
"""Called by __init__ to initialize generator values based on params."""
# E.g. "out/gypfiles"
toplevel = params['options'].toplevel_dir
qualified_out_dir = os.path.normpath(os.path.join(
toplevel, ComputeOutputDir(params), 'gypfiles'))
global generator_filelist_paths
generator_filelist_paths = {
'toplevel': toplevel,
'qualified_out_dir': qualified_out_dir,
}
def OpenOutput(path, mode='w'):
"""Open |path| for writing, creating directories if necessary."""
gyp.common.EnsureDirExists(path)
return open(path, mode)
def CommandWithWrapper(cmd, wrappers, prog):
wrapper = wrappers.get(cmd, '')
if wrapper:
return wrapper + ' ' + prog
return prog
def GetDefaultConcurrentLinks():
"""Returns a best-guess for a number of concurrent links."""
pool_size = int(os.environ.get('GYP_LINK_CONCURRENCY', 0))
if pool_size:
return pool_size
if sys.platform in ('win32', 'cygwin'):
import ctypes
class MEMORYSTATUSEX(ctypes.Structure):
_fields_ = [
("dwLength", ctypes.c_ulong),
("dwMemoryLoad", ctypes.c_ulong),
("ullTotalPhys", ctypes.c_ulonglong),
("ullAvailPhys", ctypes.c_ulonglong),
("ullTotalPageFile", ctypes.c_ulonglong),
("ullAvailPageFile", ctypes.c_ulonglong),
("ullTotalVirtual", ctypes.c_ulonglong),
("ullAvailVirtual", ctypes.c_ulonglong),
("sullAvailExtendedVirtual", ctypes.c_ulonglong),
]
stat = MEMORYSTATUSEX()
stat.dwLength = ctypes.sizeof(stat)
ctypes.windll.kernel32.GlobalMemoryStatusEx(ctypes.byref(stat))
# VS 2015 uses 20% more working set than VS 2013 and can consume all RAM
# on a 64 GB machine.
mem_limit = max(1, stat.ullTotalPhys / (5 * (2 ** 30))) # total / 5GB
hard_cap = max(1, int(os.environ.get('GYP_LINK_CONCURRENCY_MAX', 2**32)))
return min(mem_limit, hard_cap)
elif sys.platform.startswith('linux'):
if os.path.exists("/proc/meminfo"):
with open("/proc/meminfo") as meminfo:
memtotal_re = re.compile(r'^MemTotal:\s*(\d*)\s*kB')
for line in meminfo:
match = memtotal_re.match(line)
if not match:
continue
# Allow 8Gb per link on Linux because Gold is quite memory hungry
return max(1, int(match.group(1)) / (8 * (2 ** 20)))
return 1
elif sys.platform == 'darwin':
try:
avail_bytes = int(subprocess.check_output(['sysctl', '-n', 'hw.memsize']))
# A static library debug build of Chromium's unit_tests takes ~2.7GB, so
# 4GB per ld process allows for some more bloat.
return max(1, avail_bytes / (4 * (2 ** 30))) # total / 4GB
except:
return 1
else:
# TODO(scottmg): Implement this for other platforms.
return 1
def _GetWinLinkRuleNameSuffix(embed_manifest):
"""Returns the suffix used to select an appropriate linking rule depending on
whether the manifest embedding is enabled."""
return '_embed' if embed_manifest else ''
def _AddWinLinkRules(master_ninja, embed_manifest):
"""Adds link rules for Windows platform to |master_ninja|."""
def FullLinkCommand(ldcmd, out, binary_type):
resource_name = {
'exe': '1',
'dll': '2',
}[binary_type]
return '%(python)s gyp-win-tool link-with-manifests $arch %(embed)s ' \
'%(out)s "%(ldcmd)s" %(resname)s $mt $rc "$intermediatemanifest" ' \
'$manifests' % {
'python': sys.executable,
'out': out,
'ldcmd': ldcmd,
'resname': resource_name,
'embed': embed_manifest }
rule_name_suffix = _GetWinLinkRuleNameSuffix(embed_manifest)
use_separate_mspdbsrv = (
int(os.environ.get('GYP_USE_SEPARATE_MSPDBSRV', '0')) != 0)
dlldesc = 'LINK%s(DLL) $binary' % rule_name_suffix.upper()
dllcmd = ('%s gyp-win-tool link-wrapper $arch %s '
'$ld /nologo $implibflag /DLL /OUT:$binary '
'@$binary.rsp' % (sys.executable, use_separate_mspdbsrv))
dllcmd = FullLinkCommand(dllcmd, '$binary', 'dll')
master_ninja.rule('solink' + rule_name_suffix,
description=dlldesc, command=dllcmd,
rspfile='$binary.rsp',
rspfile_content='$libs $in_newline $ldflags',
restat=True,
pool='link_pool')
master_ninja.rule('solink_module' + rule_name_suffix,
description=dlldesc, command=dllcmd,
rspfile='$binary.rsp',
rspfile_content='$libs $in_newline $ldflags',
restat=True,
pool='link_pool')
# Note that ldflags goes at the end so that it has the option of
# overriding default settings earlier in the command line.
exe_cmd = ('%s gyp-win-tool link-wrapper $arch %s '
'$ld /nologo /OUT:$binary @$binary.rsp' %
(sys.executable, use_separate_mspdbsrv))
exe_cmd = FullLinkCommand(exe_cmd, '$binary', 'exe')
master_ninja.rule('link' + rule_name_suffix,
description='LINK%s $binary' % rule_name_suffix.upper(),
command=exe_cmd,
rspfile='$binary.rsp',
rspfile_content='$in_newline $libs $ldflags',
pool='link_pool')
def GenerateOutputForConfig(target_list, target_dicts, data, params,
config_name):
options = params['options']
flavor = gyp.common.GetFlavor(params)
generator_flags = params.get('generator_flags', {})
# build_dir: relative path from source root to our output files.
# e.g. "out/Debug"
build_dir = os.path.normpath(
os.path.join(ComputeOutputDir(params), config_name))
toplevel_build = os.path.join(options.toplevel_dir, build_dir)
master_ninja_file = OpenOutput(os.path.join(toplevel_build, 'build.ninja'))
master_ninja = ninja_syntax.Writer(master_ninja_file, width=120)
# Put build-time support tools in out/{config_name}.
gyp.common.CopyTool(flavor, toplevel_build)
# Grab make settings for CC/CXX.
# The rules are
# - The priority from low to high is gcc/g++, the 'make_global_settings' in
# gyp, the environment variable.
# - If there is no 'make_global_settings' for CC.host/CXX.host or
# 'CC_host'/'CXX_host' enviroment variable, cc_host/cxx_host should be set
# to cc/cxx.
if flavor == 'win':
ar = 'lib.exe'
# cc and cxx must be set to the correct architecture by overriding with one
# of cl_x86 or cl_x64 below.
cc = 'UNSET'
cxx = 'UNSET'
ld = 'link.exe'
ld_host = '$ld'
else:
ar = 'ar'
cc = 'cc'
cxx = 'c++'
ld = '$cc'
ldxx = '$cxx'
ld_host = '$cc_host'
ldxx_host = '$cxx_host'
ar_host = 'ar'
cc_host = None
cxx_host = None
cc_host_global_setting = None
cxx_host_global_setting = None
clang_cl = None
nm = 'nm'
nm_host = 'nm'
readelf = 'readelf'
readelf_host = 'readelf'
build_file, _, _ = gyp.common.ParseQualifiedTarget(target_list[0])
make_global_settings = data[build_file].get('make_global_settings', [])
build_to_root = gyp.common.InvertRelativePath(build_dir,
options.toplevel_dir)
wrappers = {}
for key, value in make_global_settings:
if key == 'AR':
ar = os.path.join(build_to_root, value)
if key == 'AR.host':
ar_host = os.path.join(build_to_root, value)
if key == 'CC':
cc = os.path.join(build_to_root, value)
if cc.endswith('clang-cl'):
clang_cl = cc
if key == 'CXX':
cxx = os.path.join(build_to_root, value)
if key == 'CC.host':
cc_host = os.path.join(build_to_root, value)
cc_host_global_setting = value
if key == 'CXX.host':
cxx_host = os.path.join(build_to_root, value)
cxx_host_global_setting = value
if key == 'LD':
ld = os.path.join(build_to_root, value)
if key == 'LD.host':
ld_host = os.path.join(build_to_root, value)
if key == 'NM':
nm = os.path.join(build_to_root, value)
if key == 'NM.host':
nm_host = os.path.join(build_to_root, value)
if key == 'READELF':
readelf = os.path.join(build_to_root, value)
if key == 'READELF.host':
readelf_host = os.path.join(build_to_root, value)
if key.endswith('_wrapper'):
wrappers[key[:-len('_wrapper')]] = os.path.join(build_to_root, value)
# Support wrappers from environment variables too.
for key, value in os.environ.iteritems():
if key.lower().endswith('_wrapper'):
key_prefix = key[:-len('_wrapper')]
key_prefix = re.sub(r'\.HOST$', '.host', key_prefix)
wrappers[key_prefix] = os.path.join(build_to_root, value)
if flavor == 'win':
configs = [target_dicts[qualified_target]['configurations'][config_name]
for qualified_target in target_list]
shared_system_includes = None
if not generator_flags.get('ninja_use_custom_environment_files', 0):
shared_system_includes = \
gyp.msvs_emulation.ExtractSharedMSVSSystemIncludes(
configs, generator_flags)
cl_paths = gyp.msvs_emulation.GenerateEnvironmentFiles(
toplevel_build, generator_flags, shared_system_includes, OpenOutput)
for arch, path in cl_paths.iteritems():
if clang_cl:
# If we have selected clang-cl, use that instead.
path = clang_cl
command = CommandWithWrapper('CC', wrappers,
QuoteShellArgument(path, 'win'))
if clang_cl:
# Use clang-cl to cross-compile for x86 or x86_64.
command += (' -m32' if arch == 'x86' else ' -m64')
master_ninja.variable('cl_' + arch, command)
cc = GetEnvironFallback(['CC_target', 'CC'], cc)
master_ninja.variable('cc', CommandWithWrapper('CC', wrappers, cc))
cxx = GetEnvironFallback(['CXX_target', 'CXX'], cxx)
master_ninja.variable('cxx', CommandWithWrapper('CXX', wrappers, cxx))
if flavor == 'win':
master_ninja.variable('ld', ld)
master_ninja.variable('idl', 'midl.exe')
master_ninja.variable('ar', ar)
master_ninja.variable('rc', 'rc.exe')
master_ninja.variable('ml_x86', 'ml.exe')
master_ninja.variable('ml_x64', 'ml64.exe')
master_ninja.variable('mt', 'mt.exe')
else:
master_ninja.variable('ld', CommandWithWrapper('LINK', wrappers, ld))
master_ninja.variable('ldxx', CommandWithWrapper('LINK', wrappers, ldxx))
master_ninja.variable('ar', GetEnvironFallback(['AR_target', 'AR'], ar))
if flavor != 'mac':
# Mac does not use readelf/nm for .TOC generation, so avoiding polluting
# the master ninja with extra unused variables.
master_ninja.variable(
'nm', GetEnvironFallback(['NM_target', 'NM'], nm))
master_ninja.variable(
'readelf', GetEnvironFallback(['READELF_target', 'READELF'], readelf))
if generator_supports_multiple_toolsets:
if not cc_host:
cc_host = cc
if not cxx_host:
cxx_host = cxx
master_ninja.variable('ar_host', GetEnvironFallback(['AR_host'], ar_host))
master_ninja.variable('nm_host', GetEnvironFallback(['NM_host'], nm_host))
master_ninja.variable('readelf_host',
GetEnvironFallback(['READELF_host'], readelf_host))
cc_host = GetEnvironFallback(['CC_host'], cc_host)
cxx_host = GetEnvironFallback(['CXX_host'], cxx_host)
# The environment variable could be used in 'make_global_settings', like
# ['CC.host', '$(CC)'] or ['CXX.host', '$(CXX)'], transform them here.
if '$(CC)' in cc_host and cc_host_global_setting:
cc_host = cc_host_global_setting.replace('$(CC)', cc)
if '$(CXX)' in cxx_host and cxx_host_global_setting:
cxx_host = cxx_host_global_setting.replace('$(CXX)', cxx)
master_ninja.variable('cc_host',
CommandWithWrapper('CC.host', wrappers, cc_host))
master_ninja.variable('cxx_host',
CommandWithWrapper('CXX.host', wrappers, cxx_host))
if flavor == 'win':
master_ninja.variable('ld_host', ld_host)
else:
master_ninja.variable('ld_host', CommandWithWrapper(
'LINK', wrappers, ld_host))
master_ninja.variable('ldxx_host', CommandWithWrapper(
'LINK', wrappers, ldxx_host))
master_ninja.newline()
master_ninja.pool('link_pool', depth=GetDefaultConcurrentLinks())
master_ninja.newline()
deps = 'msvc' if flavor == 'win' else 'gcc'
if flavor != 'win':
master_ninja.rule(
'cc',
description='CC $out',
command=('$cc -MMD -MF $out.d $defines $includes $cflags $cflags_c '
'$cflags_pch_c -c $in -o $out'),
depfile='$out.d',
deps=deps)
master_ninja.rule(
'cc_s',
description='CC $out',
command=('$cc $defines $includes $cflags $cflags_c '
'$cflags_pch_c -c $in -o $out'))
master_ninja.rule(
'cxx',
description='CXX $out',
command=('$cxx -MMD -MF $out.d $defines $includes $cflags $cflags_cc '
'$cflags_pch_cc -c $in -o $out'),
depfile='$out.d',
deps=deps)
else:
# TODO(scottmg) Separate pdb names is a test to see if it works around
# http://crbug.com/142362. It seems there's a race between the creation of
# the .pdb by the precompiled header step for .cc and the compilation of
# .c files. This should be handled by mspdbsrv, but rarely errors out with
# c1xx : fatal error C1033: cannot open program database
# By making the rules target separate pdb files this might be avoided.
cc_command = ('ninja -t msvc -e $arch ' +
'-- '
'$cc /nologo /showIncludes /FC '
'@$out.rsp /c $in /Fo$out /Fd$pdbname_c ')
cxx_command = ('ninja -t msvc -e $arch ' +
'-- '
'$cxx /nologo /showIncludes /FC '
'@$out.rsp /c $in /Fo$out /Fd$pdbname_cc ')
master_ninja.rule(
'cc',
description='CC $out',
command=cc_command,
rspfile='$out.rsp',
rspfile_content='$defines $includes $cflags $cflags_c',
deps=deps)
master_ninja.rule(
'cxx',
description='CXX $out',
command=cxx_command,
rspfile='$out.rsp',
rspfile_content='$defines $includes $cflags $cflags_cc',
deps=deps)
master_ninja.rule(
'idl',
description='IDL $in',
command=('%s gyp-win-tool midl-wrapper $arch $outdir '
'$tlb $h $dlldata $iid $proxy $in '
'$midl_includes $idlflags' % sys.executable))
master_ninja.rule(
'rc',
description='RC $in',
# Note: $in must be last otherwise rc.exe complains.
command=('%s gyp-win-tool rc-wrapper '
'$arch $rc $defines $resource_includes $rcflags /fo$out $in' %
sys.executable))
master_ninja.rule(
'asm',
description='ASM $out',
command=('%s gyp-win-tool asm-wrapper '
'$arch $asm $defines $includes $asmflags /c /Fo $out $in' %
sys.executable))
if flavor != 'mac' and flavor != 'win':
master_ninja.rule(
'alink',
description='AR $out',
command='rm -f $out && $ar rcs $arflags $out $in')
master_ninja.rule(
'alink_thin',
description='AR $out',
command='rm -f $out && $ar rcsT $arflags $out $in')
# This allows targets that only need to depend on $lib's API to declare an
# order-only dependency on $lib.TOC and avoid relinking such downstream
# dependencies when $lib changes only in non-public ways.
# The resulting string leaves an uninterpolated %{suffix} which
# is used in the final substitution below.
mtime_preserving_solink_base = (
'if [ ! -e $lib -o ! -e $lib.TOC ]; then '
'%(solink)s && %(extract_toc)s > $lib.TOC; else '
'%(solink)s && %(extract_toc)s > $lib.tmp && '
'if ! cmp -s $lib.tmp $lib.TOC; then mv $lib.tmp $lib.TOC ; '
'fi; fi'
% { 'solink':
'$ld -shared $ldflags -o $lib -Wl,-soname=$soname %(suffix)s',
'extract_toc':
('{ $readelf -d $lib | grep SONAME ; '
'$nm -gD -f p $lib | cut -f1-2 -d\' \'; }')})
master_ninja.rule(
'solink',
description='SOLINK $lib',
restat=True,
command=mtime_preserving_solink_base % {'suffix': '@$link_file_list'},
rspfile='$link_file_list',
rspfile_content=
'-Wl,--whole-archive $in $solibs -Wl,--no-whole-archive $libs',
pool='link_pool')
master_ninja.rule(
'solink_module',
description='SOLINK(module) $lib',
restat=True,
command=mtime_preserving_solink_base % {'suffix': '@$link_file_list'},
rspfile='$link_file_list',
rspfile_content='-Wl,--start-group $in -Wl,--end-group $solibs $libs',
pool='link_pool')
master_ninja.rule(
'link',
description='LINK $out',
command=('$ld $ldflags -o $out '
'-Wl,--start-group $in -Wl,--end-group $solibs $libs'),
pool='link_pool')
elif flavor == 'win':
master_ninja.rule(
'alink',
description='LIB $out',
command=('%s gyp-win-tool link-wrapper $arch False '
'$ar /nologo /ignore:4221 /OUT:$out @$out.rsp' %
sys.executable),
rspfile='$out.rsp',
rspfile_content='$in_newline $libflags')
_AddWinLinkRules(master_ninja, embed_manifest=True)
_AddWinLinkRules(master_ninja, embed_manifest=False)
else:
master_ninja.rule(
'objc',
description='OBJC $out',
command=('$cc -MMD -MF $out.d $defines $includes $cflags $cflags_objc '
'$cflags_pch_objc -c $in -o $out'),
depfile='$out.d',
deps=deps)
master_ninja.rule(
'objcxx',
description='OBJCXX $out',
command=('$cxx -MMD -MF $out.d $defines $includes $cflags $cflags_objcc '
'$cflags_pch_objcc -c $in -o $out'),
depfile='$out.d',
deps=deps)
master_ninja.rule(
'alink',
description='LIBTOOL-STATIC $out, POSTBUILDS',
command='rm -f $out && '
'./gyp-mac-tool filter-libtool libtool $libtool_flags '
'-static -o $out $in'
'$postbuilds')
master_ninja.rule(
'lipo',
description='LIPO $out, POSTBUILDS',
command='rm -f $out && lipo -create $in -output $out$postbuilds')
master_ninja.rule(
'solipo',
description='SOLIPO $out, POSTBUILDS',
command=(
'rm -f $lib $lib.TOC && lipo -create $in -output $lib$postbuilds &&'
'%(extract_toc)s > $lib.TOC'
% { 'extract_toc':
'{ otool -l $lib | grep LC_ID_DYLIB -A 5; '
'nm -gP $lib | cut -f1-2 -d\' \' | grep -v U$$; true; }'}))
# Record the public interface of $lib in $lib.TOC. See the corresponding
# comment in the posix section above for details.
solink_base = '$ld %(type)s $ldflags -o $lib %(suffix)s'
mtime_preserving_solink_base = (
'if [ ! -e $lib -o ! -e $lib.TOC ] || '
# Always force dependent targets to relink if this library
# reexports something. Handling this correctly would require
# recursive TOC dumping but this is rare in practice, so punt.
'otool -l $lib | grep -q LC_REEXPORT_DYLIB ; then '
'%(solink)s && %(extract_toc)s > $lib.TOC; '
'else '
'%(solink)s && %(extract_toc)s > $lib.tmp && '
'if ! cmp -s $lib.tmp $lib.TOC; then '
'mv $lib.tmp $lib.TOC ; '
'fi; '
'fi'
% { 'solink': solink_base,
'extract_toc':
'{ otool -l $lib | grep LC_ID_DYLIB -A 5; '
'nm -gP $lib | cut -f1-2 -d\' \' | grep -v U$$; true; }'})
solink_suffix = '@$link_file_list$postbuilds'
master_ninja.rule(
'solink',
description='SOLINK $lib, POSTBUILDS',
restat=True,
command=mtime_preserving_solink_base % {'suffix': solink_suffix,
'type': '-shared'},
rspfile='$link_file_list',
rspfile_content='$in $solibs $libs',
pool='link_pool')
master_ninja.rule(
'solink_notoc',
description='SOLINK $lib, POSTBUILDS',
restat=True,
command=solink_base % {'suffix':solink_suffix, 'type': '-shared'},
rspfile='$link_file_list',
rspfile_content='$in $solibs $libs',
pool='link_pool')
master_ninja.rule(
'solink_module',
description='SOLINK(module) $lib, POSTBUILDS',
restat=True,
command=mtime_preserving_solink_base % {'suffix': solink_suffix,
'type': '-bundle'},
rspfile='$link_file_list',
rspfile_content='$in $solibs $libs',
pool='link_pool')
master_ninja.rule(
'solink_module_notoc',
description='SOLINK(module) $lib, POSTBUILDS',
restat=True,
command=solink_base % {'suffix': solink_suffix, 'type': '-bundle'},
rspfile='$link_file_list',
rspfile_content='$in $solibs $libs',
pool='link_pool')
master_ninja.rule(
'link',
description='LINK $out, POSTBUILDS',
command=('$ld $ldflags -o $out '
'$in $solibs $libs$postbuilds'),
pool='link_pool')
master_ninja.rule(
'preprocess_infoplist',
description='PREPROCESS INFOPLIST $out',
command=('$cc -E -P -Wno-trigraphs -x c $defines $in -o $out && '
'plutil -convert xml1 $out $out'))
master_ninja.rule(
'copy_infoplist',
description='COPY INFOPLIST $in',
command='$env ./gyp-mac-tool copy-info-plist $in $out $binary $keys')
master_ninja.rule(
'merge_infoplist',
description='MERGE INFOPLISTS $in',
command='$env ./gyp-mac-tool merge-info-plist $out $in')
master_ninja.rule(
'compile_xcassets',
description='COMPILE XCASSETS $in',
command='$env ./gyp-mac-tool compile-xcassets $keys $in')
master_ninja.rule(
'mac_tool',
description='MACTOOL $mactool_cmd $in',
command='$env ./gyp-mac-tool $mactool_cmd $in $out $binary')
master_ninja.rule(
'package_framework',
description='PACKAGE FRAMEWORK $out, POSTBUILDS',
command='./gyp-mac-tool package-framework $out $version$postbuilds '
'&& touch $out')
if flavor == 'win':
master_ninja.rule(
'stamp',
description='STAMP $out',
command='%s gyp-win-tool stamp $out' % sys.executable)
master_ninja.rule(
'copy',
description='COPY $in $out',
command='%s gyp-win-tool recursive-mirror $in $out' % sys.executable)
else:
master_ninja.rule(
'stamp',
description='STAMP $out',
command='${postbuilds}touch $out')
master_ninja.rule(
'copy',
description='COPY $in $out',
command='rm -rf $out && cp -af $in $out')
master_ninja.newline()
all_targets = set()
for build_file in params['build_files']:
for target in gyp.common.AllTargets(target_list,
target_dicts,
os.path.normpath(build_file)):
all_targets.add(target)
all_outputs = set()
# target_outputs is a map from qualified target name to a Target object.
target_outputs = {}
# target_short_names is a map from target short name to a list of Target
# objects.
target_short_names = {}
# short name of targets that were skipped because they didn't contain anything
# interesting.
# NOTE: there may be overlap between this an non_empty_target_names.
empty_target_names = set()
# Set of non-empty short target names.
# NOTE: there may be overlap between this an empty_target_names.
non_empty_target_names = set()
for qualified_target in target_list:
# qualified_target is like: third_party/icu/icu.gyp:icui18n#target
build_file, name, toolset = \
gyp.common.ParseQualifiedTarget(qualified_target)
this_make_global_settings = data[build_file].get('make_global_settings', [])
assert make_global_settings == this_make_global_settings, (
"make_global_settings needs to be the same for all targets. %s vs. %s" %
(this_make_global_settings, make_global_settings))
spec = target_dicts[qualified_target]
if flavor == 'mac':
gyp.xcode_emulation.MergeGlobalXcodeSettingsToSpec(data[build_file], spec)
# If build_file is a symlink, we must not follow it because there's a chance
# it could point to a path above toplevel_dir, and we cannot correctly deal
# with that case at the moment.
build_file = gyp.common.RelativePath(build_file, options.toplevel_dir,
False)
qualified_target_for_hash = gyp.common.QualifiedTarget(build_file, name,
toolset)
hash_for_rules = hashlib.md5(qualified_target_for_hash).hexdigest()
base_path = os.path.dirname(build_file)
obj = 'obj'
if toolset != 'target':
obj += '.' + toolset
output_file = os.path.join(obj, base_path, name + '.ninja')
ninja_output = StringIO()
writer = NinjaWriter(hash_for_rules, target_outputs, base_path, build_dir,
ninja_output,
toplevel_build, output_file,
flavor, toplevel_dir=options.toplevel_dir)
target = writer.WriteSpec(spec, config_name, generator_flags)
if ninja_output.tell() > 0:
# Only create files for ninja files that actually have contents.
with OpenOutput(os.path.join(toplevel_build, output_file)) as ninja_file:
ninja_file.write(ninja_output.getvalue())
ninja_output.close()
master_ninja.subninja(output_file)
if target:
if name != target.FinalOutput() and spec['toolset'] == 'target':
target_short_names.setdefault(name, []).append(target)
target_outputs[qualified_target] = target
if qualified_target in all_targets:
all_outputs.add(target.FinalOutput())
non_empty_target_names.add(name)
else:
empty_target_names.add(name)
if target_short_names:
# Write a short name to build this target. This benefits both the
# "build chrome" case as well as the gyp tests, which expect to be
# able to run actions and build libraries by their short name.
master_ninja.newline()
master_ninja.comment('Short names for targets.')
for short_name in target_short_names:
master_ninja.build(short_name, 'phony', [x.FinalOutput() for x in
target_short_names[short_name]])
# Write phony targets for any empty targets that weren't written yet. As
# short names are not necessarily unique only do this for short names that
# haven't already been output for another target.
empty_target_names = empty_target_names - non_empty_target_names
if empty_target_names:
master_ninja.newline()
master_ninja.comment('Empty targets (output for completeness).')
for name in sorted(empty_target_names):
master_ninja.build(name, 'phony')
if all_outputs:
master_ninja.newline()
master_ninja.build('all', 'phony', list(all_outputs))
master_ninja.default(generator_flags.get('default_target', 'all'))
master_ninja_file.close()
def PerformBuild(data, configurations, params):
options = params['options']
for config in configurations:
builddir = os.path.join(options.toplevel_dir, 'out', config)
arguments = ['ninja', '-C', builddir]
print 'Building [%s]: %s' % (config, arguments)
subprocess.check_call(arguments)
def CallGenerateOutputForConfig(arglist):
# Ignore the interrupt signal so that the parent process catches it and
# kills all multiprocessing children.
signal.signal(signal.SIGINT, signal.SIG_IGN)
(target_list, target_dicts, data, params, config_name) = arglist
GenerateOutputForConfig(target_list, target_dicts, data, params, config_name)
def GenerateOutput(target_list, target_dicts, data, params):
# Update target_dicts for iOS device builds.
target_dicts = gyp.xcode_emulation.CloneConfigurationForDeviceAndEmulator(
target_dicts)
user_config = params.get('generator_flags', {}).get('config', None)
if gyp.common.GetFlavor(params) == 'win':
target_list, target_dicts = MSVSUtil.ShardTargets(target_list, target_dicts)
target_list, target_dicts = MSVSUtil.InsertLargePdbShims(
target_list, target_dicts, generator_default_variables)
if user_config:
GenerateOutputForConfig(target_list, target_dicts, data, params,
user_config)
else:
config_names = target_dicts[target_list[0]]['configurations'].keys()
if params['parallel']:
try:
pool = multiprocessing.Pool(len(config_names))
arglists = []
for config_name in config_names:
arglists.append(
(target_list, target_dicts, data, params, config_name))
pool.map(CallGenerateOutputForConfig, arglists)
except KeyboardInterrupt, e:
pool.terminate()
raise e
else:
for config_name in config_names:
GenerateOutputForConfig(target_list, target_dicts, data, params,
config_name)
|
dbo/selenium
|
refs/heads/master
|
py/test/selenium/__init__.py
|
40
|
#!/usr/bin/python
#
# Licensed to the Software Freedom Conservancy (SFC) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The SFC licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
|
jlmadurga/permabots-www
|
refs/heads/master
|
permabots_www/users/admin.py
|
183
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
from django import forms
from django.contrib import admin
from django.contrib.auth.admin import UserAdmin as AuthUserAdmin
from django.contrib.auth.forms import UserChangeForm, UserCreationForm
from .models import User
class MyUserChangeForm(UserChangeForm):
class Meta(UserChangeForm.Meta):
model = User
class MyUserCreationForm(UserCreationForm):
error_message = UserCreationForm.error_messages.update({
'duplicate_username': 'This username has already been taken.'
})
class Meta(UserCreationForm.Meta):
model = User
def clean_username(self):
username = self.cleaned_data["username"]
try:
User.objects.get(username=username)
except User.DoesNotExist:
return username
raise forms.ValidationError(self.error_messages['duplicate_username'])
@admin.register(User)
class UserAdmin(AuthUserAdmin):
form = MyUserChangeForm
add_form = MyUserCreationForm
|
mthxx/Comms
|
refs/heads/master
|
database.py
|
1
|
#!/usr/bin/python3.3
from gui import GUI
from config import Config
import mysql.connector
class Database:
DB = ""
def establish_connection():
Database.DB = mysql.connector.connect(host=Config.c['DB IP'],
user=Config.c['DB User'],
password=Config.c['DB Password'],
db=Config.c['DB Name']
)
def login(uname,passwd):
Database.establish_connection()
flag = Database.check_user(uname,passwd)
if flag:
userPermissions = Database.get_user_permissions(uname)
channelPermissions = Database.get_channel_permissions()
return flag, userPermissions, channelPermissions
else:
GUI.LOGIN_STATUS_LABEL.setText("Incorrect Username or Password")
return flag, "", ""
def check_user(uname, passwd):
cur = Database.DB.cursor()
query = ("SELECT username, password FROM gbgraphi_vbulletin.user WHERE user.username = '" + uname + "' and password = md5( concat(md5('" + passwd + "'), salt))")
cur.execute(query)
for (username, password) in cur:
GUI.LOGIN_STATUS_LABEL.setText("Success!")
GUI.USER_BTN.setText(username)
return True
return False
def get_user_permissions(uname):
cur = Database.DB.cursor()
query = ("select game.gname, rank.rank \
from gbgraphi_vbulletin.clan_games as game, gbgraphi_vbulletin.clan_members_ranks as rank, gbgraphi_vbulletin.user as user, gbgraphi_vbulletin.clan_members_members as member \
where user.userid = member.userid \
and rank.rid = member.rank \
and member.gid = game.gid \
and user.username = '" + uname + "'")
cur.execute(query)
for (gname, rank) in cur:
perm = "{},{}".format(gname,rank)
permissions = perm.split(',')
return permissions
def get_channel_permissions():
Database.establish_connection()
cur = Database.DB.cursor()
query = ("select usergroupid, usertitle from usergroup")
cur.execute(query)
queryString = ""
for (usergroupid,usertitle) in cur:
queryString += "{},{}".format(usergroupid, usertitle)
queryString += "\n"
arrTemp = queryString.split('\n')
channelPermissions = []
for i in range(0, len(arrTemp)):
channelPermissions.append(arrTemp[i].split(","))
return channelPermissions
|
gdementen/pycel
|
refs/heads/master
|
src/pycel/tokenizer.py
|
1
|
#========================================================================
# Description: Tokenise an Excel formula using an implementation of
# E. W. Bachtal's algorithm, found here:
#
# http://ewbi.blogs.com/develops/2004/12/excel_formula_p.html
#
# Tested with Python v2.5 (win32)
# Author: Robin Macharg
# Copyright: Algorithm (c) E. W. Bachtal, this implementation (c) R. Macharg
#
# CVS Info:
# $Header: T:\\cvsarchive/Excel\040export\040&\040import\040XML/ExcelXMLTransform/EWBI_Javascript_port/jsport.py,v 1.5 2006/12/07 13:41:08 rmacharg Exp $
#
# Modification History
#
# Date Author Comment
# =======================================================================
# 2006/11/29 - RMM - Made strictly class-based.
# Added parse, render and pretty print methods
# 2006/11 - RMM - RMM = Robin Macharg
# Created
# 2011/10 - Dirk Gorissen - Patch to support scientific notation
#========================================================================
import re
import collections
#========================================================================
# Class: ExcelParserTokens
# Description: Inheritable container for token definitions
#
# Attributes: Self explanatory
#
# Methods: None
#========================================================================
class ExcelParserTokens:
TOK_TYPE_NOOP = "noop";
TOK_TYPE_OPERAND = "operand";
TOK_TYPE_FUNCTION = "function";
TOK_TYPE_SUBEXPR = "subexpression";
TOK_TYPE_ARGUMENT = "argument";
TOK_TYPE_OP_PRE = "operator-prefix";
TOK_TYPE_OP_IN = "operator-infix";
TOK_TYPE_OP_POST = "operator-postfix";
TOK_TYPE_WSPACE = "white-space";
TOK_TYPE_UNKNOWN = "unknown"
TOK_SUBTYPE_START = "start";
TOK_SUBTYPE_STOP = "stop";
TOK_SUBTYPE_TEXT = "text";
TOK_SUBTYPE_NUMBER = "number";
TOK_SUBTYPE_LOGICAL = "logical";
TOK_SUBTYPE_ERROR = "error";
TOK_SUBTYPE_RANGE = "range";
TOK_SUBTYPE_MATH = "math";
TOK_SUBTYPE_CONCAT = "concatenate";
TOK_SUBTYPE_INTERSECT = "intersect";
TOK_SUBTYPE_UNION = "union";
#========================================================================
# Class: f_token
# Description: Encapsulate a formula token
#
# Attributes: tvalue -
# ttype - See token definitions, above, for values
# tsubtype - See token definitions, above, for values
#
# Methods: f_token - __init__()
#========================================================================
class f_token:
def __init__(self, value, type, subtype):
self.tvalue = value
self.ttype = type
self.tsubtype = subtype
def __str__(self):
return self.tvalue
#========================================================================
# Class: f_tokens
# Description: An ordered list of tokens
# Attributes: items - Ordered list
# index - Current position in the list
#
# Methods: f_tokens - __init__()
# f_token - add() - Add a token to the end of the list
# None - addRef() - Add a token to the end of the list
# None - reset() - reset the index to -1
# Boolean - BOF() - End of list?
# Boolean - EOF() - Beginning of list?
# Boolean - moveNext() - Move the index along one
# f_token/None - current() - Return the current token
# f_token/None - next() - Return the next token (leave the index unchanged)
# f_token/None - previous() - Return the previous token (leave the index unchanged)
#========================================================================
class f_tokens:
def __init__(self):
self.items = []
self.index = -1
def add(self, value, type, subtype=""):
if (not subtype):
subtype = ""
token = f_token(value, type, subtype)
self.addRef(token)
return token
def addRef(self, token):
self.items.append(token)
def reset(self):
self.index = -1
def BOF(self):
return self.index <= 0
def EOF(self):
return self.index >= (len(self.items) - 1)
def moveNext(self):
if self.EOF():
return False
self.index += 1
return True
def current(self):
if self.index == -1:
return None
return self.items[self.index]
def next(self):
if self.EOF():
return None
return self.items[self.index + 1]
def previous(self):
if self.index < 1:
return None
return self.items[self.index -1]
#========================================================================
# Class: f_tokenStack
# Inherits: ExcelParserTokens - a list of token values
# Description: A LIFO stack of tokens
#
# Attributes: items - Ordered list
#
# Methods: f_tokenStack - __init__()
# None - push(token) - Push a token onto the stack
# f_token/None - pop() - Pop a token off the stack
# f_token/None - token() - Non-destructively return the top item on the stack
# String - type() - Return the top token's type
# String - subtype() - Return the top token's subtype
# String - value() - Return the top token's value
#========================================================================
class f_tokenStack(ExcelParserTokens):
def __init__(self):
self.items = []
def push(self, token):
self.items.append(token)
def pop(self):
token = self.items.pop()
return f_token("", token.ttype, self.TOK_SUBTYPE_STOP)
def token(self):
# Note: this uses Pythons and/or "hack" to emulate C's ternary operator (i.e. cond ? exp1 : exp2)
return ((len(self.items) > 0) and [self.items[len(self.items) - 1]] or [None])[0]
def value(self):
return ((self.token()) and [(self.token()).tvalue] or [""])[0]
def type(self):
t = self.token()
return ((self.token()) and [(self.token()).ttype] or [""])[0]
def subtype(self):
return ((self.token()) and [(self.token()).tsubtype] or [""])[0]
#========================================================================
# Class: ExcelParser
# Description: Parse an Excel formula into a stream of tokens
# Attributes:
#
# Methods: f_tokens - getTokens(formula) - return a token stream (list)
#========================================================================
class ExcelParser(ExcelParserTokens):
def getTokens(self, formula):
def currentChar():
return formula[offset]
def doubleChar():
return formula[offset:offset+2]
def nextChar():
# JavaScript returns an empty string if the index is out of bounds,
# Python throws an IndexError. We mimic this behaviour here.
try:
formula[offset+1]
except IndexError:
return ""
else:
return formula[offset+1]
def EOF():
return offset >= len(formula)
tokens = f_tokens()
tokenStack = f_tokenStack()
offset = 0
token = ""
inString = False
inPath = False
inRange = False
inError = False
while (len(formula) > 0):
if (formula[0] == " "):
formula = formula[1:]
else:
if (formula[0] == "="):
formula = formula[1:]
break;
# state-dependent character evaluation (order is important)
while not EOF():
# double-quoted strings
# embeds are doubled
# end marks token
if inString:
if currentChar() == "\"":
if nextChar() == "\"":
token += "\""
offset += 1
else:
inString = False
tokens.add(token, self.TOK_TYPE_OPERAND, self.TOK_SUBTYPE_TEXT)
token = ""
else:
token += currentChar()
offset += 1
continue
# single-quoted strings (links)
# embeds are double
# end does not mark a token
if inPath:
if currentChar() == "'":
if nextChar() == "'":
token += "'"
offset += 1
else:
inPath = False
else:
token += currentChar()
offset += 1;
continue;
# bracketed strings (range offset or linked workbook name)
# no embeds (changed to "()" by Excel)
# end does not mark a token
if inRange:
if currentChar() == "]":
inRange = False
token += currentChar()
offset += 1
continue
# error values
# end marks a token, determined from absolute list of values
if inError:
token += currentChar()
offset += 1
if ",#NULL!,#DIV/0!,#VALUE!,#REF!,#NAME?,#NUM!,#N/A,".find("," + token + ",") != -1:
inError = False
tokens.add(token, self.TOK_TYPE_OPERAND, self.TOK_SUBTYPE_ERROR)
token = ""
continue;
# scientific notation check
regexSN = '^[1-9]{1}(\.[0-9]+)?[eE]{1}$';
if (("+-").find(currentChar()) != -1):
if len(token) > 1:
if re.match(regexSN,token):
token += currentChar();
offset += 1;
continue;
# independent character evaulation (order not important)
#
# establish state-dependent character evaluations
if currentChar() == "\"":
if len(token) > 0:
# not expected
tokens.add(token, self.TOK_TYPE_UNKNOWN)
token = ""
inString = True
offset += 1
continue
if currentChar() == "'":
if len(token) > 0:
# not expected
tokens.add(token, self.TOK_TYPE_UNKNOWN)
token = ""
inPath = True
offset += 1
continue
if (currentChar() == "["):
inRange = True
token += currentChar()
offset += 1
continue
if (currentChar() == "#"):
if (len(token) > 0):
# not expected
tokens.add(token, self.TOK_TYPE_UNKNOWN)
token = ""
inError = True
token += currentChar()
offset += 1
continue
# mark start and end of arrays and array rows
if (currentChar() == "{"):
if (len(token) > 0):
# not expected
tokens.add(token, self.TOK_TYPE_UNKNOWN)
token = ""
tokenStack.push(tokens.add("ARRAY", self.TOK_TYPE_FUNCTION, self.TOK_SUBTYPE_START))
tokenStack.push(tokens.add("ARRAYROW", self.TOK_TYPE_FUNCTION, self.TOK_SUBTYPE_START))
offset += 1
continue
if (currentChar() == ";"):
if (len(token) > 0):
tokens.add(token, self.TOK_TYPE_OPERAND)
token = ""
tokens.addRef(tokenStack.pop())
tokens.add(",", self.TOK_TYPE_ARGUMENT)
tokenStack.push(tokens.add("ARRAYROW", self.TOK_TYPE_FUNCTION, self.TOK_SUBTYPE_START))
offset += 1
continue
if (currentChar() == "}"):
if (len(token) > 0):
tokens.add(token, self.TOK_TYPE_OPERAND)
token = ""
tokens.addRef(tokenStack.pop())
tokens.addRef(tokenStack.pop())
offset += 1
continue
# trim white-space
if (currentChar() == " "):
if (len(token) > 0):
tokens.add(token, self.TOK_TYPE_OPERAND)
token = ""
tokens.add("", self.TOK_TYPE_WSPACE)
offset += 1
while ((currentChar() == " ") and (not EOF())):
offset += 1
continue
# multi-character comparators
if (",>=,<=,<>,".find("," + doubleChar() + ",") != -1):
if (len(token) > 0):
tokens.add(token, self.TOK_TYPE_OPERAND)
token = ""
tokens.add(doubleChar(), self.TOK_TYPE_OP_IN, self.TOK_SUBTYPE_LOGICAL)
offset += 2
continue
# standard infix operators
if ("+-*/^&=><".find(currentChar()) != -1):
if (len(token) > 0):
tokens.add(token, self.TOK_TYPE_OPERAND)
token = ""
tokens.add(currentChar(), self.TOK_TYPE_OP_IN)
offset += 1
continue
# standard postfix operators
if ("%".find(currentChar()) != -1):
if (len(token) > 0):
tokens.add(token, self.TOK_TYPE_OPERAND)
token = ""
tokens.add(currentChar(), self.TOK_TYPE_OP_POST)
offset += 1
continue
# start subexpression or function
if (currentChar() == "("):
if (len(token) > 0):
tokenStack.push(tokens.add(token, self.TOK_TYPE_FUNCTION, self.TOK_SUBTYPE_START))
token = ""
else:
tokenStack.push(tokens.add("", self.TOK_TYPE_SUBEXPR, self.TOK_SUBTYPE_START))
offset += 1
continue
# function, subexpression, array parameters
if (currentChar() == ","):
if (len(token) > 0):
tokens.add(token, self.TOK_TYPE_OPERAND)
token = ""
if (not (tokenStack.type() == self.TOK_TYPE_FUNCTION)):
tokens.add(currentChar(), self.TOK_TYPE_OP_IN, self.TOK_SUBTYPE_UNION)
else:
tokens.add(currentChar(), self.TOK_TYPE_ARGUMENT)
offset += 1
continue
# stop subexpression
if (currentChar() == ")"):
if (len(token) > 0):
tokens.add(token, self.TOK_TYPE_OPERAND)
token = ""
tokens.addRef(tokenStack.pop())
offset += 1
continue
# token accumulation
token += currentChar()
offset += 1
# dump remaining accumulation
if (len(token) > 0):
tokens.add(token, self.TOK_TYPE_OPERAND)
# move all tokens to a new collection, excluding all unnecessary white-space tokens
tokens2 = f_tokens()
while (tokens.moveNext()):
token = tokens.current();
if (token.ttype == self.TOK_TYPE_WSPACE):
if ((tokens.BOF()) or (tokens.EOF())):
pass
elif (not(
((tokens.previous().ttype == self.TOK_TYPE_FUNCTION) and (tokens.previous().tsubtype == self.TOK_SUBTYPE_STOP)) or
((tokens.previous().ttype == self.TOK_TYPE_SUBEXPR) and (tokens.previous().tsubtype == self.TOK_SUBTYPE_STOP)) or
(tokens.previous().ttype == self.TOK_TYPE_OPERAND)
)
):
pass
elif (not(
((tokens.next().ttype == self.TOK_TYPE_FUNCTION) and (tokens.next().tsubtype == self.TOK_SUBTYPE_START)) or
((tokens.next().ttype == self.TOK_TYPE_SUBEXPR) and (tokens.next().tsubtype == self.TOK_SUBTYPE_START)) or
(tokens.next().ttype == self.TOK_TYPE_OPERAND)
)
):
pass
else:
tokens2.add(token.tvalue, self.TOK_TYPE_OP_IN, self.TOK_SUBTYPE_INTERSECT)
continue
tokens2.addRef(token);
# switch infix "-" operator to prefix when appropriate, switch infix "+" operator to noop when appropriate, identify operand
# and infix-operator subtypes, pull "@" from in front of function names
while (tokens2.moveNext()):
token = tokens2.current()
if ((token.ttype == self.TOK_TYPE_OP_IN) and (token.tvalue == "-")):
if (tokens2.BOF()):
token.ttype = self.TOK_TYPE_OP_PRE
elif (
((tokens2.previous().ttype == self.TOK_TYPE_FUNCTION) and (tokens2.previous().tsubtype == self.TOK_SUBTYPE_STOP)) or
((tokens2.previous().ttype == self.TOK_TYPE_SUBEXPR) and (tokens2.previous().tsubtype == self.TOK_SUBTYPE_STOP)) or
(tokens2.previous().ttype == self.TOK_TYPE_OP_POST) or
(tokens2.previous().ttype == self.TOK_TYPE_OPERAND)
):
token.tsubtype = self.TOK_SUBTYPE_MATH;
else:
token.ttype = self.TOK_TYPE_OP_PRE
continue
if ((token.ttype == self.TOK_TYPE_OP_IN) and (token.tvalue == "+")):
if (tokens2.BOF()):
token.ttype = self.TOK_TYPE_NOOP
elif (
((tokens2.previous().ttype == self.TOK_TYPE_FUNCTION) and (tokens2.previous().tsubtype == self.TOK_SUBTYPE_STOP)) or
((tokens2.previous().ttype == self.TOK_TYPE_SUBEXPR) and (tokens2.previous().tsubtype == self.TOK_SUBTYPE_STOP)) or
(tokens2.previous().ttype == self.TOK_TYPE_OP_POST) or
(tokens2.previous().ttype == self.TOK_TYPE_OPERAND)
):
token.tsubtype = self.TOK_SUBTYPE_MATH
else:
token.ttype = self.TOK_TYPE_NOOP
continue
if ((token.ttype == self.TOK_TYPE_OP_IN) and (len(token.tsubtype) == 0)):
if (("<>=").find(token.tvalue[0:1]) != -1):
token.tsubtype = self.TOK_SUBTYPE_LOGICAL
elif (token.tvalue == "&"):
token.tsubtype = self.TOK_SUBTYPE_CONCAT
else:
token.tsubtype = self.TOK_SUBTYPE_MATH
continue
if ((token.ttype == self.TOK_TYPE_OPERAND) and (len(token.tsubtype) == 0)):
try:
float(token.tvalue)
except ValueError, e:
if ((token.tvalue == 'TRUE') or (token.tvalue == 'FALSE')):
token.tsubtype = self.TOK_SUBTYPE_LOGICAL
else:
token.tsubtype = self.TOK_SUBTYPE_RANGE
else:
token.tsubtype = self.TOK_SUBTYPE_NUMBER
continue
if (token.ttype == self.TOK_TYPE_FUNCTION):
if (token.tvalue[0:1] == "@"):
token.tvalue = token.tvalue[1:]
continue
tokens2.reset();
# move all tokens to a new collection, excluding all noops
tokens = f_tokens()
while (tokens2.moveNext()):
if (tokens2.current().ttype != self.TOK_TYPE_NOOP):
tokens.addRef(tokens2.current())
tokens.reset()
return tokens
def parse(self, formula):
self.tokens = self.getTokens(formula)
def render(self):
output = ""
if self.tokens:
for t in self.tokens.items:
if t.ttype == self.TOK_TYPE_FUNCTION and t.tsubtype == self.TOK_SUBTYPE_START: output += t.tvalue + "("
elif t.ttype == self.TOK_TYPE_FUNCTION and t.tsubtype == self.TOK_SUBTYPE_STOP: output += ")"
elif t.ttype == self.TOK_TYPE_SUBEXPR and t.tsubtype == self.TOK_SUBTYPE_START: output += "("
elif t.ttype == self.TOK_TYPE_SUBEXPR and t.tsubtype == self.TOK_SUBTYPE_STOP: output += ")"
# TODO: add in RE substitution of " with "" for strings
elif t.ttype == self.TOK_TYPE_OPERAND and t.tsubtype == self.TOK_SUBTYPE_TEXT: output += "\"" + t.tvalue + "\""
elif t.ttype == self.TOK_TYPE_OP_IN and t.tsubtype == self.TOK_SUBTYPE_INTERSECT: output += " "
else: output += t.tvalue
return output
def prettyprint(self):
indent = 0
output = ""
if self.tokens:
for t in self.tokens.items:
#print "'",t.ttype,t.tsubtype,t.tvalue,"'"
if (t.tsubtype == self.TOK_SUBTYPE_STOP):
indent -= 1
output += " "*indent + t.tvalue + " <" + t.ttype +"> <" + t.tsubtype + ">" + "\n"
if (t.tsubtype == self.TOK_SUBTYPE_START):
indent += 1;
return output
class Operator:
def __init__(self,value,precedence,associativity):
self.value = value
self.precedence = precedence
self.associativity = associativity
class ASTNode(object):
def __init__(self,token):
super(ASTNode,self).__init__()
self.token = token
def emit(self):
self.token.tvalue
def __str__(self):
return self.token.tvalue
class OperatorNode(ASTNode):
def __init__(self,*args):
super(OperatorNode,self).__init__(*args)
def emit(self):
pass
class RangeNode(ASTNode):
def __init__(self,*args):
super(RangeNode,self).__init__(*args)
def emit(self):
pass
class FunctionNode(ASTNode):
def __init__(self,*args):
super(FunctionNode,self).__init__(*args)
self.num_args = 0
def emit(self):
pass
def create_node(t):
if t.ttype == "operand" and t.tsubtype == "range":
return RangeNode(t)
elif t.ttype == "function":
return FunctionNode(t)
elif t.ttype == "operator":
return OperatorNode(t)
else:
return ASTNode(t)
def shunting_yard(expression):
#remove leading =
if expression.startswith('='):
expression = expression[1:]
p = ExcelParser();
p.parse(expression)
# insert tokens for '(' and ')', to make things cleaner below
tokens = []
for t in p.tokens.items:
if t.ttype == "function" and t.tsubtype == "start":
t.tsubtype = ""
tokens.append(t)
tokens.append(f_token('(','arglist','start'))
elif t.ttype == "function" and t.tsubtype == "stop":
#t.tsubtype = ""
#tokens.append(t)
tokens.append(f_token(')','arglist','stop'))
elif t.ttype == "subexpression" and t.tsubtype == "start":
t.tvalue = '('
tokens.append(t)
elif t.ttype == "subexpression" and t.tsubtype == "stop":
t.tvalue = ')'
tokens.append(t)
else:
tokens.append(t)
print "tokens: ", "|".join([x.tvalue for x in tokens])
#http://office.microsoft.com/en-us/excel-help/calculation-operators-and-precedence-HP010078886.aspx
operators = {}
operators[':'] = Operator(':',8,'left')
operators[''] = Operator(' ',8,'left')
operators[','] = Operator(',',8,'left')
operators['u-'] = Operator('u-',7,'left') #unary negation
operators['%'] = Operator('%',6,'left')
operators['^'] = Operator('^',5,'left')
operators['*'] = Operator('*',4,'left')
operators['/'] = Operator('/',4,'left')
operators['+'] = Operator('+',3,'left')
operators['-'] = Operator('-',3,'left')
operators['&'] = Operator('&',2,'left')
operators['='] = Operator('=',1,'left')
operators['<'] = Operator('<',1,'left')
operators['>'] = Operator('>',1,'left')
operators['<='] = Operator('<=',1,'left')
operators['>='] = Operator('>=',1,'left')
operators['<>'] = Operator('<>',1,'left')
output = collections.deque()
stack = []
were_values = []
arg_count = []
def po():
print "output: ", "|".join([x.tvalue for x in output])
def so():
print "stack:", "|".join([x.tvalue for x in stack])
for t in tokens:
if t.ttype == "operand":
output.append(create_node(t))
if were_values:
were_values.pop()
were_values.append(True)
elif t.ttype == "function":
stack.append(t)
arg_count.append(0)
if were_values:
were_values.pop()
were_values.append(True)
were_values.append(False)
elif t.ttype == "argument":
while stack and (stack[-1].tsubtype != "start"):
output.append(create_node(stack.pop()))
if were_values.pop(): arg_count[-1] += 1
were_values.append(False)
if not len(stack):
raise Exception("Mismatched or misplaced parentheses")
elif t.ttype.startswith('operator'):
if t.ttype.endswith('-prefix') and t.tvalue =="-":
o1 = operators['u-']
else:
o1 = operators[t.tvalue]
while stack and stack[-1].ttype.startswith('operator'):
if stack[-1].ttype.endswith('-prefix') and stack[-1].tvalue =="-":
o2 = operators['u-']
else:
o2 = operators[stack[-1].tvalue]
if ( (o1.associativity == "left" and o1.precedence <= o2.precedence)
or
(o1.associativity == "right" and o1.precedence < o2.precedence) ):
output.append(create_node(stack.pop()))
else:
break
stack.append(t)
elif t.tsubtype == "start":
stack.append(t)
elif t.tsubtype == "stop":
while stack and stack[-1].tsubtype != "start":
output.append(create_node(stack.pop()))
if not stack:
raise Exception("Mismatched or misplaced parentheses")
stack.pop()
if stack and stack[-1].ttype == "function":
f = create_node(stack.pop())
a = arg_count.pop()
w = were_values.pop()
if w: a += 1
f.num_args = a
print f, "has ",a," args"
output.append(f)
while stack:
if stack[-1].tsubtype == "start" or stack[-1].tsubtype == "stop":
raise Exception("Mismatched or misplaced parentheses")
output.append(create_node(stack.pop()))
#print "Stack is: ", "|".join(stack)
#print "Ouput is: ", "|".join([x.tvalue for x in output])
return output
|
electronpass/electronpass-desktop
|
refs/heads/develop
|
.builder/prepare_cryptopp.py
|
1
|
import os
import zipfile
import urllib
def change_runtime_library(folder, filename):
with open(folder + os.sep + filename) as f:
data = f.readlines()
for i in range(len(data)):
if '<RuntimeLibrary>MultiThreaded</RuntimeLibrary>' in data[i]:
data[i] = data[i].replace('MultiThreaded', 'MultiThreadedDLL')
print 'Changed', filename, 'library to MultiThreadedDLL'
with open(folder + os.sep + filename, 'w') as f:
f.writelines(data)
def unzip(filename, destination):
with zipfile.ZipFile(filename) as zf:
zf.extractall(destination)
print 'Downloading CryptoPP.'
# Please update this URL after new Crypto++ release.
urllib.urlretrieve ("https://github.com/weidai11/cryptopp/archive/CRYPTOPP_6_0_0.zip", "Crypto.zip")
print 'Unziping.'
unzip('Crypto.zip', '.')
folder = [i for i in os.listdir('.') if '.' not in i and 'crypto' in i][0]
unzip(folder + os.sep + 'vs2005.zip', folder + os.sep + 'vs2005')
print 'Changing build configuration.'
files = ['cryptlib.vcxproj', 'cryptest.vcxproj']
for filename in files:
change_runtime_library(folder, filename)
|
aperigault/ansible
|
refs/heads/devel
|
lib/ansible/module_utils/network/junos/facts/lldp_interfaces/lldp_interfaces.py
|
2
|
#
# -*- coding: utf-8 -*-
# Copyright 2019 Red Hat
# GNU General Public License v3.0+
# (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
"""
The junos lldp_interfaces fact class
It is in this file the configuration is collected from the device
for a given resource, parsed, and the facts tree is populated
based on the configuration.
"""
from __future__ import absolute_import, division, print_function
__metaclass__ = type
from copy import deepcopy
from ansible.module_utils._text import to_bytes
from ansible.module_utils.network.common import utils
from ansible.module_utils.network.junos.argspec.lldp_interfaces.lldp_interfaces import Lldp_interfacesArgs
from ansible.module_utils.six import string_types
try:
from lxml import etree
HAS_LXML = True
except ImportError:
HAS_LXML = False
class Lldp_interfacesFacts(object):
""" The junos lldp_interfaces fact class
"""
def __init__(self, module, subspec='config', options='options'):
self._module = module
self.argument_spec = Lldp_interfacesArgs.argument_spec
spec = deepcopy(self.argument_spec)
if subspec:
if options:
facts_argument_spec = spec[subspec][options]
else:
facts_argument_spec = spec[subspec]
else:
facts_argument_spec = spec
self.generated_spec = utils.generate_dict(facts_argument_spec)
def populate_facts(self, connection, ansible_facts, data=None):
""" Populate the facts for interfaces
:param connection: the device connection
:param data: previously collected configuration as lxml ElementTree root instance
or valid xml sting
:rtype: dictionary
:returns: facts
"""
if not HAS_LXML:
self._module.fail_json(msg='lxml is not installed.')
if not data:
config_filter = """
<configuration>
<protocols>
<lldp>
<interface>
</interface>
</lldp>
</protocols>
</configuration>
"""
data = connection.get_configuration(filter=config_filter)
if isinstance(data, string_types):
data = etree.fromstring(to_bytes(data, errors='surrogate_then_replace'))
self._resources = data.xpath('configuration/protocols/lldp/interface')
objs = []
for resource in self._resources:
if resource is not None:
obj = self.render_config(self.generated_spec, resource)
if obj:
objs.append(obj)
facts = {}
if objs:
facts['lldp_interfaces'] = []
params = utils.validate_config(self.argument_spec, {'config': objs})
for cfg in params['config']:
facts['lldp_interfaces'].append(utils.remove_empties(cfg))
ansible_facts['ansible_network_resources'].update(facts)
return ansible_facts
def render_config(self, spec, conf):
"""
Render config as dictionary structure and delete keys
from spec for null values
:param spec: The facts tree, generated from the argspec
:param conf: The ElementTree instance of configuration object
:rtype: dictionary
:returns: The generated config
"""
config = deepcopy(spec)
config['name'] = utils.get_xml_conf_arg(conf, 'name')
if utils.get_xml_conf_arg(conf, 'disable', data='tag'):
config['enable'] = False
return utils.remove_empties(config)
|
odicraig/kodi2odi
|
refs/heads/master
|
addons/plugin.video.exabytetv/main.py
|
1
|
# -*- coding: utf-8 -*-
import urllib
import urllib2
import re
import os
import xbmcplugin
import xbmcgui
import xbmcaddon
import xbmcvfs
import traceback
import cookielib,base64
from BeautifulSoup import BeautifulStoneSoup, BeautifulSoup, BeautifulSOAP
viewmode=None
try:
from xml.sax.saxutils import escape
except: traceback.print_exc()
try:
import json
except:
import simplejson as json
import SimpleDownloader as downloader
import time
try:
import ssl
ssl._create_default_https_context = ssl._create_unverified_context
except:
pass
import zipfile
def ExtractAll(_in, _out):
try:
zin = zipfile.ZipFile(_in, 'r')
zin.extractall(_out)
except Exception, e:
print str(e)
return False
return True
def SportsDevil():
print "SportsDevil"
url = "http://www.exabytetv.info/repo/plugin.video.SportsDevil-2016-08-22.zip"
addonsDir = xbmc.translatePath(os.path.join('special://home', 'addons')).decode("utf-8")
packageFile = os.path.join(addonsDir, 'packages', 'spd.zip')
urllib.urlretrieve(url, packageFile)
ExtractAll(packageFile, addonsDir)
try:
os.remove(packageFile)
except:
pass
xbmc.executebuiltin("UpdateLocalAddons")
xbmc.executebuiltin("UpdateAddonRepos")
def Dutch():
if os.path.exists(os.path.join(xbmc.translatePath("special://home/addons/").decode("utf-8"), 'repository.dss')):
return
url = "http://exabytetv.info/repo/repository.shani-2.8.zip"
addonsDir = xbmc.translatePath(os.path.join('special://home', 'addons')).decode("utf-8")
packageFile = os.path.join(addonsDir, 'packages', 'isr.zip')
urllib.urlretrieve(url, packageFile)
ExtractAll(packageFile, addonsDir)
try:
os.remove(packageFile)
except:
pass
xbmc.executebuiltin("UpdateLocalAddons")
xbmc.executebuiltin("UpdateAddonRepos")
def Repo():
if os.path.exists(os.path.join(xbmc.translatePath("special://home/addons/").decode("utf-8"), 'repository.docshadrach')):
return
url = "https://github.com/XBMCSpot/docshadrach.repository/raw/master/zips/repository.docshadrach-1.0.zip"
addonsDir = xbmc.translatePath(os.path.join('special://home', 'addons')).decode("utf-8")
packageFile = os.path.join(addonsDir, 'packages', 'isr.zip')
urllib.urlretrieve(url, packageFile)
ExtractAll(packageFile, addonsDir)
try:
os.remove(packageFile)
except:
pass
xbmc.executebuiltin("UpdateLocalAddons")
xbmc.executebuiltin("UpdateAddonRepos")
Repo()
tsdownloader=False
resolve_url=['180upload.com', 'allmyvideos.net', 'bestreams.net', 'clicknupload.com', 'cloudzilla.to', 'movshare.net', 'novamov.com', 'nowvideo.sx', 'videoweed.es', 'daclips.in', 'datemule.com', 'fastvideo.in', 'faststream.in', 'filehoot.com', 'filenuke.com', 'sharesix.com', 'plus.google.com', 'picasaweb.google.com', 'gorillavid.com', 'gorillavid.in', 'grifthost.com', 'hugefiles.net', 'ipithos.to', 'ishared.eu', 'kingfiles.net', 'mail.ru', 'my.mail.ru', 'videoapi.my.mail.ru', 'mightyupload.com', 'mooshare.biz', 'movdivx.com', 'movpod.net', 'movpod.in', 'movreel.com', 'mrfile.me', 'nosvideo.com', 'openload.io', 'played.to', 'bitshare.com', 'filefactory.com', 'k2s.cc', 'oboom.com', 'rapidgator.net', 'uploaded.net', 'primeshare.tv', 'bitshare.com', 'filefactory.com', 'k2s.cc', 'oboom.com', 'rapidgator.net', 'uploaded.net', 'sharerepo.com', 'stagevu.com', 'streamcloud.eu', 'streamin.to', 'thefile.me', 'thevideo.me', 'tusfiles.net', 'uploadc.com', 'zalaa.com', 'uploadrocket.net', 'uptobox.com', 'v-vids.com', 'veehd.com', 'vidbull.com', 'videomega.tv', 'vidplay.net', 'vidspot.net', 'vidto.me', 'vidzi.tv', 'vimeo.com', 'vk.com', 'vodlocker.com', 'xfileload.com', 'xvidstage.com', 'zettahost.tv']
g_ignoreSetResolved=['plugin.video.dramasonline','plugin.video.f4mTester','plugin.video.shahidmbcnet','plugin.video.SportsDevil','plugin.stream.vaughnlive.tv','plugin.video.ZemTV-shani']
class NoRedirection(urllib2.HTTPErrorProcessor):
def http_response(self, request, response):
return response
https_response = http_response
REMOTE_DBG=False;
if REMOTE_DBG:
# Make pydev debugger works for auto reload.
# Note pydevd module need to be copied in XBMC\system\python\Lib\pysrc
try:
import pysrc.pydevd as pydevd
# stdoutToServer and stderrToServer redirect stdout and stderr to eclipse console
pydevd.settrace('localhost', stdoutToServer=True, stderrToServer=True)
except ImportError:
sys.stderr.write("Error: " +
"Falta org.python.pydev.debug.pysrc agregar en PYTHONPATH.")
sys.exit(1)
addon = xbmcaddon.Addon('plugin.video.exabytetv')
addon_version = addon.getAddonInfo('version')
profile = xbmc.translatePath(addon.getAddonInfo('profile').decode('utf-8'))
home = xbmc.translatePath(addon.getAddonInfo('path').decode('utf-8'))
favorites = os.path.join(profile, 'favorites')
history = os.path.join(profile, 'history')
REV = os.path.join(profile, 'list_revision')
icon = os.path.join(home, 'icon.png')
FANART = os.path.join(home, 'fanart.jpg')
source_file = os.path.join(profile, 'source_file')
functions_dir = profile
communityfiles = os.path.join(profile, 'LivewebTV')
downloader = downloader.SimpleDownloader()
debug = addon.getSetting('debug')
if os.path.exists(favorites)==True:
FAV = open(favorites).read()
else: FAV = []
SOURCES = [{"url": "http://www.exabytetv.info/addon/main.xml", "fanart": "http://exabytetv.info/addon/fanart.jpg"}]
def addon_log(string):
if debug == 'true':
xbmc.log("[addon.live.streamspro-%s]: %s" %(addon_version, string))
def makeRequest(url, headers=None):
try:
if headers is None:
headers = {'User-agent' : 'THEKING'}
#headers = {'User-agent' : 'THEHOOD'}
req = urllib2.Request(url,None,headers)
response = urllib2.urlopen(req)
data = response.read()
response.close()
return data
except urllib2.URLError, e:
addon_log('URL: '+url)
if hasattr(e, 'code'):
addon_log('We failed with error code - %s.' % e.code)
xbmc.executebuiltin("XBMC.Notification(Exabyte TV,Error - "+str(e.code)+",10000,"+icon+")")
elif hasattr(e, 'reason'):
addon_log('We failed to reach a server.')
addon_log('Reason: %s' %e.reason)
xbmc.executebuiltin("XBMC.Notification(Exabyte TV,No conecta. - "+str(e.reason)+",10000,"+icon+")")
def getSources():
try:
if os.path.exists(favorites) == True:
FAV = open(favorites).read()
if FAV == "[]":
os.remove(favorites)
else:
addDir('[COLOR yellow][B]FAVORITOS[/COLOR][/B]','url',4,os.path.join(home, 'resources', 'favorite.png'),FANART,'','','','')
addDir('','',100,'',FANART,'','','','')
sources = SOURCES
if len(sources) > 1:
for i in sources:
try:
## for pre 1.0.8 sources
if isinstance(i, list):
addDir(i[0].encode('utf-8'),i[1].encode('utf-8'),1,icon,FANART,'','','','','source')
else:
thumb = icon
fanart = FANART
desc = ''
date = ''
credits = ''
genre = ''
if i.has_key('thumbnail'):
thumb = i['thumbnail']
if i.has_key('fanart'):
fanart = i['fanart']
if i.has_key('description'):
desc = i['description']
if i.has_key('date'):
date = i['date']
if i.has_key('genre'):
genre = i['genre']
if i.has_key('credits'):
credits = i['credits']
addDir(i['title'].encode('utf-8'),i['url'].encode('utf-8'),1,thumb,fanart,desc,genre,date,credits,'source')
except: traceback.print_exc()
else:
if len(sources) == 1:
if isinstance(sources[0], list):
getData(sources[0][1].encode('utf-8'),FANART)
else:
getData(sources[0]['url'], sources[0]['fanart'])
except: traceback.print_exc()
def addSource(url=None):
if url is None:
if not addon.getSetting("new_file_source") == "":
source_url = addon.getSetting('new_file_source').decode('utf-8')
elif not addon.getSetting("new_url_source") == "":
source_url = addon.getSetting('new_url_source').decode('utf-8')
else:
source_url = url
if source_url == '' or source_url is None:
return
addon_log('Adding New Source: '+source_url.encode('utf-8'))
media_info = None
#print 'source_url',source_url
data = getSoup(source_url)
if isinstance(data,BeautifulSOAP):
if data.find('channels_info'):
media_info = data.channels_info
elif data.find('items_info'):
media_info = data.items_info
if media_info:
source_media = {}
source_media['url'] = source_url
try: source_media['title'] = media_info.title.string
except: pass
try: source_media['thumbnail'] = media_info.thumbnail.string
except: pass
try: source_media['fanart'] = media_info.fanart.string
except: pass
try: source_media['genre'] = media_info.genre.string
except: pass
try: source_media['description'] = media_info.description.string
except: pass
try: source_media['date'] = media_info.date.string
except: pass
try: source_media['credits'] = media_info.credits.string
except: pass
else:
if '/' in source_url:
nameStr = source_url.split('/')[-1].split('.')[0]
if '\\' in source_url:
nameStr = source_url.split('\\')[-1].split('.')[0]
if '%' in nameStr:
nameStr = urllib.unquote_plus(nameStr)
keyboard = xbmc.Keyboard(nameStr,'Displayed Name, Rename?')
keyboard.doModal()
if (keyboard.isConfirmed() == False):
return
newStr = keyboard.getText()
if len(newStr) == 0:
return
source_media = {}
source_media['title'] = newStr
source_media['url'] = source_url
source_media['fanart'] = fanart
if os.path.exists(source_file)==False:
source_list = []
source_list.append(source_media)
b = open(source_file,"w")
b.write(json.dumps(source_list))
b.close()
else:
sources = json.loads(open(source_file,"r").read())
sources.append(source_media)
b = open(source_file,"w")
b.write(json.dumps(sources))
b.close()
addon.setSetting('new_url_source', "")
addon.setSetting('new_file_source', "")
xbmc.executebuiltin("XBMC.Notification(Exabyte TV,Nueva fuente ,5000,"+icon+")")
if not url is None:
if 'xbmcplus.xb.funpic.de' in url:
xbmc.executebuiltin("XBMC.Container.Update(%s?mode=14,replace)" %sys.argv[0])
elif 'community-links' in url:
xbmc.executebuiltin("XBMC.Container.Update(%s?mode=10,replace)" %sys.argv[0])
else: addon.openSettings()
def rmSource(name):
sources = json.loads(open(source_file,"r").read())
for index in range(len(sources)):
if isinstance(sources[index], list):
if sources[index][0] == name:
del sources[index]
b = open(source_file,"w")
b.write(json.dumps(sources))
b.close()
break
else:
if sources[index]['title'] == name:
del sources[index]
b = open(source_file,"w")
b.write(json.dumps(sources))
b.close()
break
xbmc.executebuiltin("XBMC.Container.Refresh")
def get_xml_database(url, browse=False):
if url is None:
url = 'http://xbmcplus.xb.funpic.de/www-data/filesystem/'
soup = BeautifulSoup(makeRequest(url), convertEntities=BeautifulSoup.HTML_ENTITIES)
for i in soup('a'):
href = i['href']
if not href.startswith('?'):
name = i.string
if name not in ['Parent Directory', 'recycle_bin/']:
if href.endswith('/'):
if browse:
addDir(name,url+href,15,icon,fanart,'','','')
else:
addDir(name,url+href,14,icon,fanart,'','','')
elif href.endswith('.xml'):
if browse:
addDir(name,url+href,1,icon,fanart,'','','','','download')
else:
if os.path.exists(source_file)==True:
if name in SOURCES:
addDir(name+' (in use)',url+href,11,icon,fanart,'','','','','download')
else:
addDir(name,url+href,11,icon,fanart,'','','','','download')
else:
addDir(name,url+href,11,icon,fanart,'','','','','download')
def getCommunitySources(browse=False):
url = 'http://community-links.googlecode.com/svn/trunk/'
soup = BeautifulSoup(makeRequest(url), convertEntities=BeautifulSoup.HTML_ENTITIES)
files = soup('ul')[0]('li')[1:]
for i in files:
name = i('a')[0]['href']
if browse:
addDir(name,url+name,1,icon,fanart,'','','','','download')
else:
addDir(name,url+name,11,icon,fanart,'','','','','download')
def getSoup(url,data=None):
global viewmode,tsdownloader
tsdownloader=False
if url.startswith('http://') or url.startswith('https://'):
enckey=False
if '$$TSDOWNLOADER$$' in url:
tsdownloader=True
url=url.replace("$$TSDOWNLOADER$$","")
if '$$LSProEncKey=' in url:
enckey=url.split('$$LSProEncKey=')[1].split('$$')[0]
rp='$$LSProEncKey=%s$$'%enckey
url=url.replace(rp,"")
data =makeRequest(url)
if enckey:
import pyaes
enckey=enckey.encode("ascii")
print enckey
missingbytes=16-len(enckey)
enckey=enckey+(chr(0)*(missingbytes))
print repr(enckey)
data=base64.b64decode(data)
decryptor = pyaes.new(enckey , pyaes.MODE_ECB, IV=None)
data=decryptor.decrypt(data).split('\0')[0]
#print repr(data)
if re.search("#EXTM3U",data) or 'm3u' in url:
# print 'found m3u data'
return data
elif data == None:
if not '/' in url or not '\\' in url:
# print 'No directory found. Lets make the url to cache dir'
url = os.path.join(communityfiles,url)
if xbmcvfs.exists(url):
if url.startswith("smb://") or url.startswith("nfs://"):
copy = xbmcvfs.copy(url, os.path.join(profile, 'temp', 'sorce_temp.txt'))
if copy:
data = open(os.path.join(profile, 'temp', 'sorce_temp.txt'), "r").read()
xbmcvfs.delete(os.path.join(profile, 'temp', 'sorce_temp.txt'))
else:
addon_log("failed to copy from smb:")
else:
data = open(url, 'r').read()
if re.match("#EXTM3U",data)or 'm3u' in url:
# print 'found m3u data'
return data
else:
addon_log("Soup Data not found!")
return
if '<SetViewMode>' in data:
try:
viewmode=re.findall('<SetViewMode>(.*?)<',data)[0]
xbmc.executebuiltin("Container.SetViewMode(%s)"%viewmode)
print 'done setview',viewmode
except: pass
return BeautifulSOAP(data, convertEntities=BeautifulStoneSoup.XML_ENTITIES)
def getData(url,fanart, data=None):
soup = getSoup(url,data)
#print type(soup)
if isinstance(soup,BeautifulSOAP):
#print 'xxxxxxxxxxsoup',soup
if len(soup('channels')) > 0 and addon.getSetting('donotshowbychannels') == 'false':
channels = soup('channel')
for channel in channels:
# print channel
linkedUrl=''
lcount=0
try:
linkedUrl = channel('externallink')[0].string
lcount=len(channel('externallink'))
except: pass
#print 'linkedUrl',linkedUrl,lcount
if lcount>1: linkedUrl=''
name = channel('name')[0].string
thumbnail = channel('thumbnail')[0].string
if thumbnail == None:
thumbnail = ''
try:
if not channel('fanart'):
if addon.getSetting('use_thumb') == "true":
fanArt = thumbnail
else:
fanArt = fanart
else:
fanArt = channel('fanart')[0].string
if fanArt == None:
raise
except:
fanArt = fanart
try:
desc = channel('info')[0].string
if desc == None:
raise
except:
desc = ''
try:
genre = channel('genre')[0].string
if genre == None:
raise
except:
genre = ''
try:
date = channel('date')[0].string
if date == None:
raise
except:
date = ''
try:
credits = channel('credits')[0].string
if credits == None:
raise
except:
credits = ''
try:
if linkedUrl=='':
addDir(name.encode('utf-8', 'ignore'),url.encode('utf-8'),2,thumbnail,fanArt,desc,genre,date,credits,True)
else:
#print linkedUrl
addDir(name.encode('utf-8'),linkedUrl.encode('utf-8'),1,thumbnail,fanArt,desc,genre,date,None,'source')
except:
addon_log('There was a problem adding directory from getData(): '+name.encode('utf-8', 'ignore'))
else:
addon_log('No Channels: getItems')
getItems(soup('item'),fanart)
else:
parse_m3u(soup)
def parse_m3u(data):
content = data.rstrip()
match = re.compile(r'#EXTINF:(.+?),(.*?)[\n\r]+([^\r\n]+)').findall(content)
total = len(match)
print 'tsdownloader',tsdownloader
# print 'total m3u links',total
for other,channel_name,stream_url in match:
if 'tvg-logo' in other:
thumbnail = re_me(other,'tvg-logo=[\'"](.*?)[\'"]')
if thumbnail:
if thumbnail.startswith('http'):
thumbnail = thumbnail
elif not addon.getSetting('logo-folderPath') == "":
logo_url = addon.getSetting('logo-folderPath')
thumbnail = logo_url + thumbnail
else:
thumbnail = thumbnail
#else:
else:
thumbnail = ''
if 'type' in other:
mode_type = re_me(other,'type=[\'"](.*?)[\'"]')
if mode_type == 'yt-dl':
stream_url = stream_url +"&mode=18"
elif mode_type == 'regex':
url = stream_url.split('®exs=')
#print url[0] getSoup(url,data=None)
regexs = parse_regex(getSoup('',data=url[1]))
addLink(url[0], channel_name,thumbnail,'','','','','',None,regexs,total)
continue
elif mode_type == 'ftv':
stream_url = 'plugin://plugin.video.F.T.V/?name='+urllib.quote(channel_name) +'&url=' +stream_url +'&mode=125&ch_fanart=na'
elif tsdownloader and '.ts' in stream_url:
stream_url = 'plugin://plugin.video.f4mTester/?url='+urllib.quote_plus(stream_url)+'&streamtype=TSDOWNLOADER&name='+urllib.quote(channel_name)
addLink(stream_url, channel_name,thumbnail,'','','','','',None,'',total)
def getChannelItems(name,url,fanart):
soup = getSoup(url)
channel_list = soup.find('channel', attrs={'name' : name.decode('utf-8')})
items = channel_list('item')
try:
fanArt = channel_list('fanart')[0].string
if fanArt == None:
raise
except:
fanArt = fanart
for channel in channel_list('subchannel'):
name = channel('name')[0].string
try:
thumbnail = channel('thumbnail')[0].string
if thumbnail == None:
raise
except:
thumbnail = ''
try:
if not channel('fanart'):
if addon.getSetting('use_thumb') == "true":
fanArt = thumbnail
else:
fanArt = channel('fanart')[0].string
if fanArt == None:
raise
except:
pass
try:
desc = channel('info')[0].string
if desc == None:
raise
except:
desc = ''
try:
genre = channel('genre')[0].string
if genre == None:
raise
except:
genre = ''
try:
date = channel('date')[0].string
if date == None:
raise
except:
date = ''
try:
credits = channel('credits')[0].string
if credits == None:
raise
except:
credits = ''
try:
addDir(name.encode('utf-8', 'ignore'),url.encode('utf-8'),3,thumbnail,fanArt,desc,genre,credits,date)
except:
addon_log('There was a problem adding directory - '+name.encode('utf-8', 'ignore'))
getItems(items,fanArt)
def getSubChannelItems(name,url,fanart):
soup = getSoup(url)
channel_list = soup.find('subchannel', attrs={'name' : name.decode('utf-8')})
items = channel_list('subitem')
getItems(items,fanart)
def getItems(items,fanart,dontLink=False):
total = len(items)
addon_log('Total Items: %s' %total)
add_playlist = addon.getSetting('add_playlist')
ask_playlist_items =addon.getSetting('ask_playlist_items')
use_thumb = addon.getSetting('use_thumb')
parentalblock =addon.getSetting('parentalblocked')
parentalblock= parentalblock=="true"
for item in items:
isXMLSource=False
isJsonrpc = False
applyblock='false'
try:
applyblock = item('parentalblock')[0].string
except:
addon_log('parentalblock Error')
applyblock = ''
if applyblock=='true' and parentalblock: continue
try:
name = item('title')[0].string
if name is None:
name = 'unknown?'
except:
addon_log('Name Error')
name = ''
try:
if item('epg'):
if item.epg_url:
addon_log('Get EPG Regex')
epg_url = item.epg_url.string
epg_regex = item.epg_regex.string
epg_name = get_epg(epg_url, epg_regex)
if epg_name:
name += ' - ' + epg_name
elif item('epg')[0].string > 1:
name += getepg(item('epg')[0].string)
else:
pass
except:
addon_log('EPG Error')
try:
url = []
if len(item('link')) >0:
#print 'item link', item('link')
for i in item('link'):
if not i.string == None:
url.append(i.string)
elif len(item('sportsdevil')) >0:
for i in item('sportsdevil'):
if not i.string == None:
sportsdevil = 'plugin://plugin.video.SportsDevil/?mode=1&item=catcher%3dstreams%26url=' +i.string
referer = item('referer')[0].string
if referer:
#print 'referer found'
sportsdevil = sportsdevil + '%26referer=' +referer
url.append(sportsdevil)
elif len(item('p2p')) >0:
for i in item('p2p'):
if not i.string == None:
if 'sop://' in i.string:
sop = 'plugin://plugin.video.p2p-streams/?mode=2url='+i.string +'&' + 'name='+name
url.append(sop)
else:
p2p='plugin://plugin.video.p2p-streams/?mode=1&url='+i.string +'&' + 'name='+name
url.append(p2p)
elif len(item('vaughn')) >0:
for i in item('vaughn'):
if not i.string == None:
vaughn = 'plugin://plugin.stream.vaughnlive.tv/?mode=PlayLiveStream&channel='+i.string
url.append(vaughn)
elif len(item('ilive')) >0:
for i in item('ilive'):
if not i.string == None:
if not 'http' in i.string:
ilive = 'plugin://plugin.video.tbh.ilive/?url=http://www.streamlive.to/view/'+i.string+'&link=99&mode=iLivePlay'
else:
ilive = 'plugin://plugin.video.tbh.ilive/?url='+i.string+'&link=99&mode=iLivePlay'
elif len(item('yt-dl')) >0:
for i in item('yt-dl'):
if not i.string == None:
ytdl = i.string + '&mode=18'
url.append(ytdl)
elif len(item('dm')) >0:
for i in item('dm'):
if not i.string == None:
dm = "plugin://plugin.video.dailymotion_com/?mode=playVideo&url=" + i.string
url.append(dm)
elif len(item('dmlive')) >0:
for i in item('dmlive'):
if not i.string == None:
dm = "plugin://plugin.video.dailymotion_com/?mode=playLiveVideo&url=" + i.string
url.append(dm)
elif len(item('utube')) >0:
for i in item('utube'):
if not i.string == None:
if ' ' in i.string :
utube = 'plugin://plugin.video.youtube/search/?q='+ urllib.quote_plus(i.string)
isJsonrpc=utube
elif len(i.string) == 11:
utube = 'plugin://plugin.video.youtube/play/?video_id='+ i.string
elif (i.string.startswith('PL') and not '&order=' in i.string) or i.string.startswith('UU'):
utube = 'plugin://plugin.video.youtube/play/?&order=default&playlist_id=' + i.string
elif i.string.startswith('PL') or i.string.startswith('UU'):
utube = 'plugin://plugin.video.youtube/play/?playlist_id=' + i.string
elif i.string.startswith('UC') and len(i.string) > 12:
utube = 'plugin://plugin.video.youtube/channel/' + i.string + '/'
isJsonrpc=utube
elif not i.string.startswith('UC') and not (i.string.startswith('PL')) :
utube = 'plugin://plugin.video.youtube/user/' + i.string + '/'
isJsonrpc=utube
url.append(utube)
elif len(item('imdb')) >0:
for i in item('imdb'):
if not i.string == None:
if addon.getSetting('genesisorpulsar') == '0':
imdb = 'plugin://plugin.video.genesis/?action=play&imdb='+i.string
else:
imdb = 'plugin://plugin.video.pulsar/movie/tt'+i.string+'/play'
url.append(imdb)
elif len(item('f4m')) >0:
for i in item('f4m'):
if not i.string == None:
if '.f4m' in i.string:
f4m = 'plugin://plugin.video.f4mTester/?url='+urllib.quote_plus(i.string)
elif '.m3u8' in i.string:
f4m = 'plugin://plugin.video.f4mTester/?url='+urllib.quote_plus(i.string)+'&streamtype=HLS'
else:
f4m = 'plugin://plugin.video.f4mTester/?url='+urllib.quote_plus(i.string)+'&streamtype=SIMPLE'
url.append(f4m)
elif len(item('ftv')) >0:
for i in item('ftv'):
if not i.string == None:
ftv = 'plugin://plugin.video.F.T.V/?name='+urllib.quote(name) +'&url=' +i.string +'&mode=125&ch_fanart=na'
url.append(ftv)
elif len(item('urlsolve')) >0:
for i in item('urlsolve'):
if not i.string == None:
resolver = i.string +'&mode=19'
url.append(resolver)
if len(url) < 1:
raise
except:
addon_log('Error <link> element, Passing:'+name.encode('utf-8', 'ignore'))
continue
try:
isXMLSource = item('externallink')[0].string
except: pass
if isXMLSource:
ext_url=[isXMLSource]
isXMLSource=True
else:
isXMLSource=False
try:
isJsonrpc = item('jsonrpc')[0].string
except: pass
if isJsonrpc:
ext_url=[isJsonrpc]
#print 'JSON-RPC ext_url',ext_url
isJsonrpc=True
else:
isJsonrpc=False
try:
thumbnail = item('thumbnail')[0].string
if thumbnail == None:
raise
except:
thumbnail = ''
try:
if not item('fanart'):
if addon.getSetting('use_thumb') == "true":
fanArt = thumbnail
else:
fanArt = fanart
else:
fanArt = item('fanart')[0].string
if fanArt == None:
raise
except:
fanArt = fanart
try:
desc = item('info')[0].string
if desc == None:
raise
except:
desc = ''
try:
genre = item('genre')[0].string
if genre == None:
raise
except:
genre = ''
try:
date = item('date')[0].string
if date == None:
raise
except:
date = ''
regexs = None
if item('regex'):
try:
reg_item = item('regex')
regexs = parse_regex(reg_item)
except:
pass
try:
if len(url) > 1:
alt = 0
playlist = []
for i in url:
if add_playlist == "false":
alt += 1
addLink(i,'%s) %s' %(alt, name.encode('utf-8', 'ignore')),thumbnail,fanArt,desc,genre,date,True,playlist,regexs,total)
elif add_playlist == "true" and ask_playlist_items == 'true':
if regexs:
playlist.append(i+'®exs='+regexs)
elif any(x in i for x in resolve_url) and i.startswith('http'):
playlist.append(i+'&mode=19')
else:
playlist.append(i)
else:
playlist.append(i)
if len(playlist) > 1:
addLink('', name,thumbnail,fanArt,desc,genre,date,True,playlist,regexs,total)
else:
if dontLink:
return name,url[0],regexs
if isXMLSource:
if not regexs == None: #<externallink> and <regex>
addDir(name.encode('utf-8'),ext_url[0].encode('utf-8'),1,thumbnail,fanart,desc,genre,date,None,'!!update',regexs,url[0].encode('utf-8'))
#addLink(url[0],name.encode('utf-8', 'ignore')+ '[COLOR yellow]build XML[/COLOR]',thumbnail,fanArt,desc,genre,date,True,None,regexs,total)
else:
addDir(name.encode('utf-8'),ext_url[0].encode('utf-8'),1,thumbnail,fanart,desc,genre,date,None,'source',None,None)
#addDir(name.encode('utf-8'),url[0].encode('utf-8'),1,thumbnail,fanart,desc,genre,date,None,'source')
elif isJsonrpc:
addDir(name.encode('utf-8'),ext_url[0],53,thumbnail,fanart,desc,genre,date,None,'source')
#xbmc.executebuiltin("Container.SetViewMode(500)")
else:
addLink(url[0],name.encode('utf-8', 'ignore'),thumbnail,fanArt,desc,genre,date,True,None,regexs,total)
#print 'success'
except:
addon_log('There was a problem adding item - '+name.encode('utf-8', 'ignore'))
def parse_regex(reg_item):
try:
regexs = {}
for i in reg_item:
regexs[i('name')[0].string] = {}
regexs[i('name')[0].string]['name']=i('name')[0].string
#regexs[i('name')[0].string]['expres'] = i('expres')[0].string
try:
regexs[i('name')[0].string]['expres'] = i('expres')[0].string
if not regexs[i('name')[0].string]['expres']:
regexs[i('name')[0].string]['expres']=''
except:
addon_log("Regex: -- No Referer --")
regexs[i('name')[0].string]['page'] = i('page')[0].string
try:
regexs[i('name')[0].string]['referer'] = i('referer')[0].string
except:
addon_log("Regex: -- No Referer --")
try:
regexs[i('name')[0].string]['connection'] = i('connection')[0].string
except:
addon_log("Regex: -- No connection --")
try:
regexs[i('name')[0].string]['notplayable'] = i('notplayable')[0].string
except:
addon_log("Regex: -- No notplayable --")
try:
regexs[i('name')[0].string]['noredirect'] = i('noredirect')[0].string
except:
addon_log("Regex: -- No noredirect --")
try:
regexs[i('name')[0].string]['origin'] = i('origin')[0].string
except:
addon_log("Regex: -- No origin --")
try:
regexs[i('name')[0].string]['accept'] = i('accept')[0].string
except:
addon_log("Regex: -- No accept --")
try:
regexs[i('name')[0].string]['includeheaders'] = i('includeheaders')[0].string
except:
addon_log("Regex: -- No includeheaders --")
try:
regexs[i('name')[0].string]['listrepeat'] = i('listrepeat')[0].string
# print 'listrepeat',regexs[i('name')[0].string]['listrepeat'],i('listrepeat')[0].string, i
except:
addon_log("Regex: -- No listrepeat --")
try:
regexs[i('name')[0].string]['proxy'] = i('proxy')[0].string
except:
addon_log("Regex: -- No proxy --")
try:
regexs[i('name')[0].string]['x-req'] = i('x-req')[0].string
except:
addon_log("Regex: -- No x-req --")
try:
regexs[i('name')[0].string]['x-addr'] = i('x-addr')[0].string
except:
addon_log("Regex: -- No x-addr --")
try:
regexs[i('name')[0].string]['x-forward'] = i('x-forward')[0].string
except:
addon_log("Regex: -- No x-forward --")
try:
regexs[i('name')[0].string]['agent'] = i('agent')[0].string
except:
addon_log("Regex: -- No User Agent --")
try:
regexs[i('name')[0].string]['post'] = i('post')[0].string
except:
addon_log("Regex: -- Not a post")
try:
regexs[i('name')[0].string]['rawpost'] = i('rawpost')[0].string
except:
addon_log("Regex: -- Not a rawpost")
try:
regexs[i('name')[0].string]['htmlunescape'] = i('htmlunescape')[0].string
except:
addon_log("Regex: -- Not a htmlunescape")
try:
regexs[i('name')[0].string]['readcookieonly'] = i('readcookieonly')[0].string
except:
addon_log("Regex: -- Not a readCookieOnly")
#print i
try:
regexs[i('name')[0].string]['cookiejar'] = i('cookiejar')[0].string
if not regexs[i('name')[0].string]['cookiejar']:
regexs[i('name')[0].string]['cookiejar']=''
except:
addon_log("Regex: -- Not a cookieJar")
try:
regexs[i('name')[0].string]['setcookie'] = i('setcookie')[0].string
except:
addon_log("Regex: -- Not a setcookie")
try:
regexs[i('name')[0].string]['appendcookie'] = i('appendcookie')[0].string
except:
addon_log("Regex: -- Not a appendcookie")
try:
regexs[i('name')[0].string]['ignorecache'] = i('ignorecache')[0].string
except:
addon_log("Regex: -- no ignorecache")
#try:
# regexs[i('name')[0].string]['ignorecache'] = i('ignorecache')[0].string
#except:
# addon_log("Regex: -- no ignorecache")
regexs = urllib.quote(repr(regexs))
return regexs
#print regexs
except:
regexs = None
addon_log('regex Error: '+name.encode('utf-8', 'ignore'))
def get_ustream(url):
try:
for i in range(1, 51):
result = getUrl(url)
if "EXT-X-STREAM-INF" in result: return url
if not "EXTM3U" in result: return
xbmc.sleep(2000)
return
except:
return
def getRegexParsed(regexs, url,cookieJar=None,forCookieJarOnly=False,recursiveCall=False,cachedPages={}, rawPost=False, cookie_jar_file=None):#0,1,2 = URL, regexOnly, CookieJarOnly
if not recursiveCall:
regexs = eval(urllib.unquote(regexs))
#cachedPages = {}
#print 'url',url
doRegexs = re.compile('\$doregex\[([^\]]*)\]').findall(url)
# print 'doRegexs',doRegexs,regexs
setresolved=True
for k in doRegexs:
if k in regexs:
#print 'processing ' ,k
m = regexs[k]
#print m
cookieJarParam=False
if 'cookiejar' in m: # so either create or reuse existing jar
#print 'cookiejar exists',m['cookiejar']
cookieJarParam=m['cookiejar']
if '$doregex' in cookieJarParam:
cookieJar=getRegexParsed(regexs, m['cookiejar'],cookieJar,True, True,cachedPages)
cookieJarParam=True
else:
cookieJarParam=True
#print 'm[cookiejar]',m['cookiejar'],cookieJar
if cookieJarParam:
if cookieJar==None:
#print 'create cookie jar'
cookie_jar_file=None
if 'open[' in m['cookiejar']:
cookie_jar_file=m['cookiejar'].split('open[')[1].split(']')[0]
# print 'cookieJar from file name',cookie_jar_file
cookieJar=getCookieJar(cookie_jar_file)
# print 'cookieJar from file',cookieJar
if cookie_jar_file:
saveCookieJar(cookieJar,cookie_jar_file)
#import cookielib
#cookieJar = cookielib.LWPCookieJar()
#print 'cookieJar new',cookieJar
elif 'save[' in m['cookiejar']:
cookie_jar_file=m['cookiejar'].split('save[')[1].split(']')[0]
complete_path=os.path.join(profile,cookie_jar_file)
# print 'complete_path',complete_path
saveCookieJar(cookieJar,cookie_jar_file)
if m['page'] and '$doregex' in m['page']:
pg=getRegexParsed(regexs, m['page'],cookieJar,recursiveCall=True,cachedPages=cachedPages)
if len(pg)==0:
pg='http://regexfailed'
m['page']=pg
if 'setcookie' in m and m['setcookie'] and '$doregex' in m['setcookie']:
m['setcookie']=getRegexParsed(regexs, m['setcookie'],cookieJar,recursiveCall=True,cachedPages=cachedPages)
if 'appendcookie' in m and m['appendcookie'] and '$doregex' in m['appendcookie']:
m['appendcookie']=getRegexParsed(regexs, m['appendcookie'],cookieJar,recursiveCall=True,cachedPages=cachedPages)
if 'post' in m and '$doregex' in m['post']:
m['post']=getRegexParsed(regexs, m['post'],cookieJar,recursiveCall=True,cachedPages=cachedPages)
# print 'post is now',m['post']
if 'rawpost' in m and '$doregex' in m['rawpost']:
m['rawpost']=getRegexParsed(regexs, m['rawpost'],cookieJar,recursiveCall=True,cachedPages=cachedPages,rawPost=True)
#print 'rawpost is now',m['rawpost']
if 'rawpost' in m and '$epoctime$' in m['rawpost']:
m['rawpost']=m['rawpost'].replace('$epoctime$',getEpocTime())
if 'rawpost' in m and '$epoctime2$' in m['rawpost']:
m['rawpost']=m['rawpost'].replace('$epoctime2$',getEpocTime2())
link=''
if m['page'] and m['page'] in cachedPages and not 'ignorecache' in m and forCookieJarOnly==False :
#print 'using cache page',m['page']
link = cachedPages[m['page']]
else:
if m['page'] and not m['page']=='' and m['page'].startswith('http'):
if '$epoctime$' in m['page']:
m['page']=m['page'].replace('$epoctime$',getEpocTime())
if '$epoctime2$' in m['page']:
m['page']=m['page'].replace('$epoctime2$',getEpocTime2())
#print 'Ingoring Cache',m['page']
page_split=m['page'].split('|')
pageUrl=page_split[0]
header_in_page=None
if len(page_split)>1:
header_in_page=page_split[1]
# if
# proxy = urllib2.ProxyHandler({ ('https' ? proxytouse[:5]=="https":"http") : proxytouse})
# opener = urllib2.build_opener(proxy)
# urllib2.install_opener(opener)
# import urllib2
# print 'urllib2.getproxies',urllib2.getproxies()
current_proxies=urllib2.ProxyHandler(urllib2.getproxies())
#print 'getting pageUrl',pageUrl
req = urllib2.Request(pageUrl)
if 'proxy' in m:
proxytouse= m['proxy']
# print 'proxytouse',proxytouse
# urllib2.getproxies= lambda: {}
if pageUrl[:5]=="https":
proxy = urllib2.ProxyHandler({ 'https' : proxytouse})
#req.set_proxy(proxytouse, 'https')
else:
proxy = urllib2.ProxyHandler({ 'http' : proxytouse})
#req.set_proxy(proxytouse, 'http')
opener = urllib2.build_opener(proxy)
urllib2.install_opener(opener)
req.add_header('User-Agent', 'Mozilla/5.0 (Windows NT 6.1; rv:14.0) Gecko/20100101 Firefox/14.0.1')
proxytouse=None
if 'referer' in m:
req.add_header('Referer', m['referer'])
if 'accept' in m:
req.add_header('Accept', m['accept'])
if 'agent' in m:
req.add_header('User-agent', m['agent'])
if 'x-req' in m:
req.add_header('X-Requested-With', m['x-req'])
if 'x-addr' in m:
req.add_header('x-addr', m['x-addr'])
if 'x-forward' in m:
req.add_header('X-Forwarded-For', m['x-forward'])
if 'setcookie' in m:
# print 'adding cookie',m['setcookie']
req.add_header('Cookie', m['setcookie'])
if 'appendcookie' in m:
# print 'appending cookie to cookiejar',m['appendcookie']
cookiestoApend=m['appendcookie']
cookiestoApend=cookiestoApend.split(';')
for h in cookiestoApend:
n,v=h.split('=')
w,n= n.split(':')
ck = cookielib.Cookie(version=0, name=n, value=v, port=None, port_specified=False, domain=w, domain_specified=False, domain_initial_dot=False, path='/', path_specified=True, secure=False, expires=None, discard=True, comment=None, comment_url=None, rest={'HttpOnly': None}, rfc2109=False)
cookieJar.set_cookie(ck)
if 'origin' in m:
req.add_header('Origin', m['origin'])
if header_in_page:
header_in_page=header_in_page.split('&')
for h in header_in_page:
n,v=h.split('=')
req.add_header(n,v)
if not cookieJar==None:
# print 'cookieJarVal',cookieJar
cookie_handler = urllib2.HTTPCookieProcessor(cookieJar)
opener = urllib2.build_opener(cookie_handler, urllib2.HTTPBasicAuthHandler(), urllib2.HTTPHandler())
opener = urllib2.install_opener(opener)
# print 'noredirect','noredirect' in m
if 'noredirect' in m:
opener = urllib2.build_opener(cookie_handler,NoRedirection, urllib2.HTTPBasicAuthHandler(), urllib2.HTTPHandler())
opener = urllib2.install_opener(opener)
elif 'noredirect' in m:
opener = urllib2.build_opener(NoRedirection, urllib2.HTTPBasicAuthHandler(), urllib2.HTTPHandler())
opener = urllib2.install_opener(opener)
if 'connection' in m:
# print '..........................connection//////.',m['connection']
from keepalive import HTTPHandler
keepalive_handler = HTTPHandler()
opener = urllib2.build_opener(keepalive_handler)
urllib2.install_opener(opener)
#print 'after cookie jar'
post=None
if 'post' in m:
postData=m['post']
#if '$LiveStreamRecaptcha' in postData:
# (captcha_challenge,catpcha_word,idfield)=processRecaptcha(m['page'],cookieJar)
# if captcha_challenge:
# postData=postData.replace('$LiveStreamRecaptcha','manual_recaptcha_challenge_field:'+captcha_challenge+',recaptcha_response_field:'+catpcha_word+',id:'+idfield)
splitpost=postData.split(',');
post={}
for p in splitpost:
n=p.split(':')[0];
v=p.split(':')[1];
post[n]=v
post = urllib.urlencode(post)
if 'rawpost' in m:
post=m['rawpost']
#if '$LiveStreamRecaptcha' in post:
# (captcha_challenge,catpcha_word,idfield)=processRecaptcha(m['page'],cookieJar)
# if captcha_challenge:
# post=post.replace('$LiveStreamRecaptcha','&manual_recaptcha_challenge_field='+captcha_challenge+'&recaptcha_response_field='+catpcha_word+'&id='+idfield)
link=''
try:
if post:
response = urllib2.urlopen(req,post)
else:
response = urllib2.urlopen(req)
if response.info().get('Content-Encoding') == 'gzip':
from StringIO import StringIO
import gzip
buf = StringIO( response.read())
f = gzip.GzipFile(fileobj=buf)
link = f.read()
else:
link=response.read()
if 'proxy' in m and not current_proxies is None:
urllib2.install_opener(urllib2.build_opener(current_proxies))
link=javascriptUnEscape(link)
#print repr(link)
#print link This just print whole webpage in LOG
if 'includeheaders' in m:
#link+=str(response.headers.get('Set-Cookie'))
link+='$$HEADERS_START$$:'
for b in response.headers:
link+= b+':'+response.headers.get(b)+'\n'
link+='$$HEADERS_END$$:'
# print link
addon_log(link)
addon_log(cookieJar )
response.close()
except:
pass
cachedPages[m['page']] = link
#print link
#print 'store link for',m['page'],forCookieJarOnly
if forCookieJarOnly:
return cookieJar# do nothing
elif m['page'] and not m['page'].startswith('http'):
if m['page'].startswith('$pyFunction:'):
val=doEval(m['page'].split('$pyFunction:')[1],'',cookieJar,m )
if forCookieJarOnly:
return cookieJar# do nothing
link=val
link=javascriptUnEscape(link)
else:
link=m['page']
if '$pyFunction:playmedia(' in m['expres'] or 'ActivateWindow' in m['expres'] or '$PLAYERPROXY$=' in url or any(x in url for x in g_ignoreSetResolved):
setresolved=False
if '$doregex' in m['expres']:
m['expres']=getRegexParsed(regexs, m['expres'],cookieJar,recursiveCall=True,cachedPages=cachedPages)
if not m['expres']=='':
#print 'doing it ',m['expres']
if '$LiveStreamCaptcha' in m['expres']:
val=askCaptcha(m,link,cookieJar)
#print 'url and val',url,val
url = url.replace("$doregex[" + k + "]", val)
elif m['expres'].startswith('$pyFunction:') or '#$pyFunction' in m['expres']:
#print 'expres',m['expres']
val=''
if m['expres'].startswith('$pyFunction:'):
val=doEval(m['expres'].split('$pyFunction:')[1],link,cookieJar,m)
else:
val=doEvalFunction(m['expres'],link,cookieJar,m)
if 'ActivateWindow' in m['expres']: return
# print 'url k val',url,k,val
#print 'repr',repr(val)
try:
url = url.replace(u"$doregex[" + k + "]", val)
except: url = url.replace("$doregex[" + k + "]", val.decode("utf-8"))
else:
if 'listrepeat' in m:
listrepeat=m['listrepeat']
ret=re.findall(m['expres'],link)
return listrepeat,ret, m,regexs
val=''
if not link=='':
#print 'link',link
reg = re.compile(m['expres']).search(link)
try:
val=reg.group(1).strip()
except: traceback.print_exc()
if m['page']=='':
val=m['expres']
if rawPost:
# print 'rawpost'
val=urllib.quote_plus(val)
if 'htmlunescape' in m:
#val=urllib.unquote_plus(val)
import HTMLParser
val=HTMLParser.HTMLParser().unescape(val)
try:
url = url.replace("$doregex[" + k + "]", val)
except: url = url.replace("$doregex[" + k + "]", val.decode("utf-8"))
#print 'ur',url
#return val
else:
url = url.replace("$doregex[" + k + "]",'')
if '$epoctime$' in url:
url=url.replace('$epoctime$',getEpocTime())
if '$epoctime2$' in url:
url=url.replace('$epoctime2$',getEpocTime2())
if '$GUID$' in url:
import uuid
url=url.replace('$GUID$',str(uuid.uuid1()).upper())
if '$get_cookies$' in url:
url=url.replace('$get_cookies$',getCookiesString(cookieJar))
if recursiveCall: return url
#print 'final url',repr(url)
if url=="":
return
else:
return url,setresolved
def getmd5(t):
import hashlib
h=hashlib.md5()
h.update(t)
return h.hexdigest()
def decrypt_vaughnlive(encrypted):
retVal=""
# print 'enc',encrypted
#for val in encrypted.split(':'):
# retVal+=chr(int(val.replace("0m0","")))
#return retVal
def playmedia(media_url):
try:
import CustomPlayer
player = CustomPlayer.MyXBMCPlayer()
listitem = xbmcgui.ListItem( label = str(name), iconImage = "DefaultVideo.png", thumbnailImage = xbmc.getInfoImage( "ListItem.Thumb" ), path=media_url )
player.play( media_url,listitem)
xbmc.sleep(1000)
while player.is_active:
xbmc.sleep(200)
except:
traceback.print_exc()
return ''
def kodiJsonRequest(params):
data = json.dumps(params)
request = xbmc.executeJSONRPC(data)
try:
response = json.loads(request)
except UnicodeDecodeError:
response = json.loads(request.decode('utf-8', 'ignore'))
try:
if 'result' in response:
return response['result']
return None
except KeyError:
logger.warn("[%s] %s" % (params['method'], response['error']['message']))
return None
def setKodiProxy(proxysettings=None):
if proxysettings==None:
# print 'proxy set to nothing'
xbmc.executeJSONRPC('{"jsonrpc":"2.0", "method":"Settings.SetSettingValue", "params":{"setting":"network.usehttpproxy", "value":false}, "id":1}')
else:
ps=proxysettings.split(':')
proxyURL=ps[0]
proxyPort=ps[1]
proxyType=ps[2]
proxyUsername=None
proxyPassword=None
if len(ps)>3 and '@' in ps[3]: #jairox ###proxysettings
proxyUsername=ps[3].split('@')[0] #jairox ###ps[3]
proxyPassword=ps[3].split('@')[1] #jairox ###proxysettings.split('@')[-1]
# print 'proxy set to', proxyType, proxyURL,proxyPort
xbmc.executeJSONRPC('{"jsonrpc":"2.0", "method":"Settings.SetSettingValue", "params":{"setting":"network.usehttpproxy", "value":true}, "id":1}')
xbmc.executeJSONRPC('{"jsonrpc":"2.0", "method":"Settings.SetSettingValue", "params":{"setting":"network.httpproxytype", "value":' + str(proxyType) +'}, "id":1}')
xbmc.executeJSONRPC('{"jsonrpc":"2.0", "method":"Settings.SetSettingValue", "params":{"setting":"network.httpproxyserver", "value":"' + str(proxyURL) +'"}, "id":1}')
xbmc.executeJSONRPC('{"jsonrpc":"2.0", "method":"Settings.SetSettingValue", "params":{"setting":"network.httpproxyport", "value":' + str(proxyPort) +'}, "id":1}')
if not proxyUsername==None:
xbmc.executeJSONRPC('{"jsonrpc":"2.0", "method":"Settings.SetSettingValue", "params":{"setting":"network.httpproxyusername", "value":"' + str(proxyUsername) +'"}, "id":1}')
xbmc.executeJSONRPC('{"jsonrpc":"2.0", "method":"Settings.SetSettingValue", "params":{"setting":"network.httpproxypassword", "value":"' + str(proxyPassword) +'"}, "id":1}')
def getConfiguredProxy():
proxyActive = kodiJsonRequest({'jsonrpc': '2.0', "method":"Settings.GetSettingValue", "params":{"setting":"network.usehttpproxy"}, 'id': 1})['value']
# print 'proxyActive',proxyActive
proxyType = kodiJsonRequest({'jsonrpc': '2.0', "method":"Settings.GetSettingValue", "params":{"setting":"network.httpproxytype"}, 'id': 1})['value']
if proxyActive: # PROXY_HTTP
proxyURL = kodiJsonRequest({'jsonrpc': '2.0', "method":"Settings.GetSettingValue", "params":{"setting":"network.httpproxyserver"}, 'id': 1})['value']
proxyPort = unicode(kodiJsonRequest({'jsonrpc': '2.0', "method":"Settings.GetSettingValue", "params":{"setting":"network.httpproxyport"}, 'id': 1})['value'])
proxyUsername = kodiJsonRequest({'jsonrpc': '2.0', "method":"Settings.GetSettingValue", "params":{"setting":"network.httpproxyusername"}, 'id': 1})['value']
proxyPassword = kodiJsonRequest({'jsonrpc': '2.0', "method":"Settings.GetSettingValue", "params":{"setting":"network.httpproxypassword"}, 'id': 1})['value']
if proxyUsername and proxyPassword and proxyURL and proxyPort:
return proxyURL + ':' + str(proxyPort)+':'+str(proxyType) + ':' + proxyUsername + '@' + proxyPassword
elif proxyURL and proxyPort:
return proxyURL + ':' + str(proxyPort)+':'+str(proxyType)
else:
return None
def playmediawithproxy(media_url, name, iconImage,proxyip,port, proxyuser=None, proxypass=None): #jairox
progress = xbmcgui.DialogProgress()
progress.create('Progress', 'Playing with custom proxy')
progress.update( 10, "", "setting proxy..", "" )
proxyset=False
existing_proxy=''
#print 'playmediawithproxy'
try:
existing_proxy=getConfiguredProxy()
print 'existing_proxy',existing_proxy
#read and set here
#jairox
if not proxyuser == None:
setKodiProxy( proxyip + ':' + port + ':0:' + proxyuser + '@' + proxypass)
else:
setKodiProxy( proxyip + ':' + port + ':0')
#print 'proxy setting complete', getConfiguredProxy()
proxyset=True
progress.update( 80, "", "setting proxy complete, now playing", "" )
progress.close()
progress=None
import CustomPlayer
player = CustomPlayer.MyXBMCPlayer()
listitem = xbmcgui.ListItem( label = str(name), iconImage = iconImage, thumbnailImage = xbmc.getInfoImage( "ListItem.Thumb" ), path=media_url )
player.play( media_url,listitem)
xbmc.sleep(1000)
while player.is_active:
xbmc.sleep(200)
except:
traceback.print_exc()
if progress:
progress.close()
if proxyset:
# print 'now resetting the proxy back'
setKodiProxy(existing_proxy)
# print 'reset here'
return ''
def get_saw_rtmp(page_value, referer=None):
if referer:
referer=[('Referer',referer)]
if page_value.startswith("http"):
page_url=page_value
page_value= getUrl(page_value,headers=referer)
str_pattern="(eval\(function\(p,a,c,k,e,(?:r|d).*)"
reg_res=re.compile(str_pattern).findall(page_value)
r=""
if reg_res and len(reg_res)>0:
for v in reg_res:
r1=get_unpacked(v)
r2=re_me(r1,'\'(.*?)\'')
if 'unescape' in r1:
r1=urllib.unquote(r2)
r+=r1+'\n'
# print 'final value is ',r
page_url=re_me(r,'src="(.*?)"')
page_value= getUrl(page_url,headers=referer)
#print page_value
rtmp=re_me(page_value,'streamer\'.*?\'(.*?)\'\)')
playpath=re_me(page_value,'file\',\s\'(.*?)\'')
return rtmp+' playpath='+playpath +' pageUrl='+page_url
def get_leton_rtmp(page_value, referer=None):
if referer:
referer=[('Referer',referer)]
if page_value.startswith("http"):
page_value= getUrl(page_value,headers=referer)
str_pattern="var a = (.*?);\s*var b = (.*?);\s*var c = (.*?);\s*var d = (.*?);\s*var f = (.*?);\s*var v_part = '(.*?)';"
reg_res=re.compile(str_pattern).findall(page_value)[0]
a,b,c,d,f,v=(reg_res)
f=int(f)
a=int(a)/f
b=int(b)/f
c=int(c)/f
d=int(d)/f
ret= 'rtmp://' + str(a) + '.' + str(b) + '.' + str(c) + '.' + str(d) + v;
return ret
def createM3uForDash(url,useragent=None):
str='#EXTM3U'
str+='\n#EXT-X-STREAM-INF:PROGRAM-ID=1,BANDWIDTH=361816'
str+='\n'+url+'&bytes=0-200000'#+'|User-Agent='+useragent
source_file = os.path.join(profile, 'testfile.m3u')
str+='\n'
SaveToFile(source_file,str)
#return 'C:/Users/shani/Downloads/test.m3u8'
return source_file
def SaveToFile(file_name,page_data,append=False):
if append:
f = open(file_name, 'a')
f.write(page_data)
f.close()
else:
f=open(file_name,'wb')
f.write(page_data)
f.close()
return ''
def LoadFile(file_name):
f=open(file_name,'rb')
d=f.read()
f.close()
return d
def get_packed_iphonetv_url(page_data):
import re,base64,urllib;
s=page_data
while 'geh(' in s:
if s.startswith('lol('): s=s[5:-1]
# print 's is ',s
s=re.compile('"(.*?)"').findall(s)[0];
s= base64.b64decode(s);
s=urllib.unquote(s);
print s
return s
def get_ferrari_url(page_data):
# print 'get_dag_url2',page_data
page_data2=getUrl(page_data);
patt='(http.*)'
import uuid
playback=str(uuid.uuid1()).upper()
links=re.compile(patt).findall(page_data2)
headers=[('X-Playback-Session-Id',playback)]
for l in links:
try:
page_datatemp=getUrl(l,headers=headers);
except: pass
return page_data+'|&X-Playback-Session-Id='+playback
def get_dag_url(page_data):
# print 'get_dag_url',page_data
if page_data.startswith('http://dag.total-stream.net'):
headers=[('User-Agent','Verismo-BlackUI_(2.4.7.5.8.0.34)')]
page_data=getUrl(page_data,headers=headers);
if '127.0.0.1' in page_data:
return revist_dag(page_data)
elif re_me(page_data, 'wmsAuthSign%3D([^%&]+)') != '':
final_url = re_me(page_data, '&ver_t=([^&]+)&') + '?wmsAuthSign=' + re_me(page_data, 'wmsAuthSign%3D([^%&]+)') + '==/mp4:' + re_me(page_data, '\\?y=([^&]+)&')
else:
final_url = re_me(page_data, 'href="([^"]+)"[^"]+$')
if len(final_url)==0:
final_url=page_data
final_url = final_url.replace(' ', '%20')
return final_url
def re_me(data, re_patten):
match = ''
m = re.search(re_patten, data)
if m != None:
match = m.group(1)
else:
match = ''
return match
def revist_dag(page_data):
final_url = ''
if '127.0.0.1' in page_data:
final_url = re_me(page_data, '&ver_t=([^&]+)&') + ' live=true timeout=15 playpath=' + re_me(page_data, '\\?y=([a-zA-Z0-9-_\\.@]+)')
if re_me(page_data, 'token=([^&]+)&') != '':
final_url = final_url + '?token=' + re_me(page_data, 'token=([^&]+)&')
elif re_me(page_data, 'wmsAuthSign%3D([^%&]+)') != '':
final_url = re_me(page_data, '&ver_t=([^&]+)&') + '?wmsAuthSign=' + re_me(page_data, 'wmsAuthSign%3D([^%&]+)') + '==/mp4:' + re_me(page_data, '\\?y=([^&]+)&')
else:
final_url = re_me(page_data, 'HREF="([^"]+)"')
if 'dag1.asx' in final_url:
return get_dag_url(final_url)
if 'devinlivefs.fplive.net' not in final_url:
final_url = final_url.replace('devinlive', 'flive')
if 'permlivefs.fplive.net' not in final_url:
final_url = final_url.replace('permlive', 'flive')
return final_url
def get_unwise( str_eval):
page_value=""
try:
ss="w,i,s,e=("+str_eval+')'
exec (ss)
page_value=unwise_func(w,i,s,e)
except: traceback.print_exc(file=sys.stdout)
#print 'unpacked',page_value
return page_value
def unwise_func( w, i, s, e):
lIll = 0;
ll1I = 0;
Il1l = 0;
ll1l = [];
l1lI = [];
while True:
if (lIll < 5):
l1lI.append(w[lIll])
elif (lIll < len(w)):
ll1l.append(w[lIll]);
lIll+=1;
if (ll1I < 5):
l1lI.append(i[ll1I])
elif (ll1I < len(i)):
ll1l.append(i[ll1I])
ll1I+=1;
if (Il1l < 5):
l1lI.append(s[Il1l])
elif (Il1l < len(s)):
ll1l.append(s[Il1l]);
Il1l+=1;
if (len(w) + len(i) + len(s) + len(e) == len(ll1l) + len(l1lI) + len(e)):
break;
lI1l = ''.join(ll1l)#.join('');
I1lI = ''.join(l1lI)#.join('');
ll1I = 0;
l1ll = [];
for lIll in range(0,len(ll1l),2):
#print 'array i',lIll,len(ll1l)
ll11 = -1;
if ( ord(I1lI[ll1I]) % 2):
ll11 = 1;
#print 'val is ', lI1l[lIll: lIll+2]
l1ll.append(chr( int(lI1l[lIll: lIll+2], 36) - ll11));
ll1I+=1;
if (ll1I >= len(l1lI)):
ll1I = 0;
ret=''.join(l1ll)
if 'eval(function(w,i,s,e)' in ret:
# print 'STILL GOing'
ret=re.compile('eval\(function\(w,i,s,e\).*}\((.*?)\)').findall(ret)[0]
return get_unwise(ret)
else:
# print 'FINISHED'
return ret
def get_unpacked( page_value, regex_for_text='', iterations=1, total_iteration=1):
try:
reg_data=None
if page_value.startswith("http"):
page_value= getUrl(page_value)
# print 'page_value',page_value
if regex_for_text and len(regex_for_text)>0:
try:
page_value=re.compile(regex_for_text).findall(page_value)[0] #get the js variable
except: return 'NOTPACKED'
page_value=unpack(page_value,iterations,total_iteration)
except:
page_value='UNPACKEDFAILED'
traceback.print_exc(file=sys.stdout)
# print 'unpacked',page_value
if 'sav1live.tv' in page_value:
page_value=page_value.replace('sav1live.tv','sawlive.tv') #quick fix some bug somewhere
# print 'sav1 unpacked',page_value
return page_value
def unpack(sJavascript,iteration=1, totaliterations=2 ):
# print 'iteration',iteration
if sJavascript.startswith('var _0xcb8a='):
aSplit=sJavascript.split('var _0xcb8a=')
ss="myarray="+aSplit[1].split("eval(")[0]
exec(ss)
a1=62
c1=int(aSplit[1].split(",62,")[1].split(',')[0])
p1=myarray[0]
k1=myarray[3]
with open('temp file'+str(iteration)+'.js', "wb") as filewriter:
filewriter.write(str(k1))
#aa=1/0
else:
if "rn p}('" in sJavascript:
aSplit = sJavascript.split("rn p}('")
else:
aSplit = sJavascript.split("rn A}('")
# print aSplit
p1,a1,c1,k1=('','0','0','')
ss="p1,a1,c1,k1=('"+aSplit[1].split(".spli")[0]+')'
exec(ss)
k1=k1.split('|')
aSplit = aSplit[1].split("))'")
# print ' p array is ',len(aSplit)
# print len(aSplit )
#p=str(aSplit[0]+'))')#.replace("\\","")#.replace('\\\\','\\')
#print aSplit[1]
#aSplit = aSplit[1].split(",")
#print aSplit[0]
#a = int(aSplit[1])
#c = int(aSplit[2])
#k = aSplit[3].split(".")[0].replace("'", '').split('|')
#a=int(a)
#c=int(c)
#p=p.replace('\\', '')
# print 'p val is ',p[0:100],'............',p[-100:],len(p)
# print 'p1 val is ',p1[0:100],'............',p1[-100:],len(p1)
#print a,a1
#print c,a1
#print 'k val is ',k[-10:],len(k)
# print 'k1 val is ',k1[-10:],len(k1)
e = ''
d = ''#32823
#sUnpacked = str(__unpack(p, a, c, k, e, d))
sUnpacked1 = str(__unpack(p1, a1, c1, k1, e, d,iteration))
#print sUnpacked[:200]+'....'+sUnpacked[-100:], len(sUnpacked)
# print sUnpacked1[:200]+'....'+sUnpacked1[-100:], len(sUnpacked1)
#exec('sUnpacked1="'+sUnpacked1+'"')
if iteration>=totaliterations:
# print 'final res',sUnpacked1[:200]+'....'+sUnpacked1[-100:], len(sUnpacked1)
return sUnpacked1#.replace('\\\\', '\\')
else:
# print 'final res for this iteration is',iteration
return unpack(sUnpacked1,iteration+1)#.replace('\\', ''),iteration)#.replace('\\', '');#unpack(sUnpacked.replace('\\', ''))
def __unpack(p, a, c, k, e, d, iteration,v=1):
#with open('before file'+str(iteration)+'.js', "wb") as filewriter:
# filewriter.write(str(p))
while (c >= 1):
c = c -1
if (k[c]):
aa=str(__itoaNew(c, a))
if v==1:
p=re.sub('\\b' + aa +'\\b', k[c], p)# THIS IS Bloody slow!
else:
p=findAndReplaceWord(p,aa,k[c])
#p=findAndReplaceWord(p,aa,k[c])
#with open('after file'+str(iteration)+'.js', "wb") as filewriter:
# filewriter.write(str(p))
return p
#
#function equalavent to re.sub('\\b' + aa +'\\b', k[c], p)
def findAndReplaceWord(source_str, word_to_find,replace_with):
splits=None
splits=source_str.split(word_to_find)
if len(splits)>1:
new_string=[]
current_index=0
for current_split in splits:
#print 'here',i
new_string.append(current_split)
val=word_to_find#by default assume it was wrong to split
#if its first one and item is blank then check next item is valid or not
if current_index==len(splits)-1:
val='' # last one nothing to append normally
else:
if len(current_split)==0: #if blank check next one with current split value
if ( len(splits[current_index+1])==0 and word_to_find[0].lower() not in 'abcdefghijklmnopqrstuvwxyz1234567890_') or (len(splits[current_index+1])>0 and splits[current_index+1][0].lower() not in 'abcdefghijklmnopqrstuvwxyz1234567890_'):# first just just check next
val=replace_with
#not blank, then check current endvalue and next first value
else:
if (splits[current_index][-1].lower() not in 'abcdefghijklmnopqrstuvwxyz1234567890_') and (( len(splits[current_index+1])==0 and word_to_find[0].lower() not in 'abcdefghijklmnopqrstuvwxyz1234567890_') or (len(splits[current_index+1])>0 and splits[current_index+1][0].lower() not in 'abcdefghijklmnopqrstuvwxyz1234567890_')):# first just just check next
val=replace_with
new_string.append(val)
current_index+=1
#aaaa=1/0
source_str=''.join(new_string)
return source_str
def __itoa(num, radix):
# print 'num red',num, radix
result = ""
if num==0: return '0'
while num > 0:
result = "0123456789abcdefghijklmnopqrstuvwxyz"[num % radix] + result
num /= radix
return result
def __itoaNew(cc, a):
aa="" if cc < a else __itoaNew(int(cc / a),a)
cc = (cc % a)
bb=chr(cc + 29) if cc> 35 else str(__itoa(cc,36))
return aa+bb
def getCookiesString(cookieJar):
try:
cookieString=""
for index, cookie in enumerate(cookieJar):
cookieString+=cookie.name + "=" + cookie.value +";"
except: pass
#print 'cookieString',cookieString
return cookieString
def saveCookieJar(cookieJar,COOKIEFILE):
try:
complete_path=os.path.join(profile,COOKIEFILE)
cookieJar.save(complete_path,ignore_discard=True)
except: pass
def getCookieJar(COOKIEFILE):
cookieJar=None
if COOKIEFILE:
try:
complete_path=os.path.join(profile,COOKIEFILE)
cookieJar = cookielib.LWPCookieJar()
cookieJar.load(complete_path,ignore_discard=True)
except:
cookieJar=None
if not cookieJar:
cookieJar = cookielib.LWPCookieJar()
return cookieJar
def doEval(fun_call,page_data,Cookie_Jar,m):
ret_val=''
#print fun_call
if functions_dir not in sys.path:
sys.path.append(functions_dir)
# print fun_call
try:
py_file='import '+fun_call.split('.')[0]
# print py_file,sys.path
exec( py_file)
# print 'done'
except:
#print 'error in import'
traceback.print_exc(file=sys.stdout)
# print 'ret_val='+fun_call
exec ('ret_val='+fun_call)
# print ret_val
#exec('ret_val=1+1')
try:
return str(ret_val)
except: return ret_val
def doEvalFunction(fun_call,page_data,Cookie_Jar,m):
# print 'doEvalFunction'
ret_val=''
if functions_dir not in sys.path:
sys.path.append(functions_dir)
f=open(functions_dir+"/LSProdynamicCode.py","w")
f.write(fun_call);
f.close()
import LSProdynamicCode
ret_val=LSProdynamicCode.GetLSProData(page_data,Cookie_Jar,m)
try:
return str(ret_val)
except: return ret_val
def getGoogleRecaptchaResponse(captchakey, cj,type=1): #1 for get, 2 for post, 3 for rawpost
# #headers=[('User-Agent','Mozilla/5.0 (Windows NT 6.1; rv:14.0) Gecko/20100101 Firefox/14.0.1')]
# html_text=getUrl(url,noredir=True, cookieJar=cj,headers=headers)
# print 'html_text',html_text
recapChallenge=""
solution=""
# cap_reg="recap.*?\?k=(.*?)\""
# match =re.findall(cap_reg, html_text)
# print 'match',match
captcha=False
captcha_reload_response_chall=None
solution=None
if len(captchakey)>0: #new shiny captcha!
captcha_url=captchakey
if not captcha_url.startswith('http'):
captcha_url='http://www.google.com/recaptcha/api/challenge?k='+captcha_url+'&ajax=1'
# print 'captcha_url',captcha_url
captcha=True
cap_chall_reg='challenge.*?\'(.*?)\''
cap_image_reg='\'(.*?)\''
captcha_script=getUrl(captcha_url,cookieJar=cj)
recapChallenge=re.findall(cap_chall_reg, captcha_script)[0]
captcha_reload='http://www.google.com/recaptcha/api/reload?c=';
captcha_k=captcha_url.split('k=')[1]
captcha_reload+=recapChallenge+'&k='+captcha_k+'&reason=i&type=image&lang=en'
captcha_reload_js=getUrl(captcha_reload,cookieJar=cj)
captcha_reload_response_chall=re.findall(cap_image_reg, captcha_reload_js)[0]
captcha_image_url='http://www.google.com/recaptcha/api/image?c='+captcha_reload_response_chall
if not captcha_image_url.startswith("http"):
captcha_image_url='http://www.google.com/recaptcha/api/'+captcha_image_url
import random
n=random.randrange(100,1000,5)
local_captcha = os.path.join(profile,str(n) +"captcha.img" )
localFile = open(local_captcha, "wb")
localFile.write(getUrl(captcha_image_url,cookieJar=cj))
localFile.close()
solver = InputWindow(captcha=local_captcha)
solution = solver.get()
os.remove(local_captcha)
if captcha_reload_response_chall:
if type==1:
return 'recaptcha_challenge_field='+urllib.quote_plus(captcha_reload_response_chall)+'&recaptcha_response_field='+urllib.quote_plus(solution)
elif type==2:
return 'recaptcha_challenge_field:'+captcha_reload_response_chall+',recaptcha_response_field:'+solution
else:
return 'recaptcha_challenge_field='+urllib.quote_plus(captcha_reload_response_chall)+'&recaptcha_response_field='+urllib.quote_plus(solution)
else:
return ''
def getUrl(url, cookieJar=None,post=None, timeout=20, headers=None, noredir=False):
cookie_handler = urllib2.HTTPCookieProcessor(cookieJar)
if noredir:
opener = urllib2.build_opener(NoRedirection,cookie_handler, urllib2.HTTPBasicAuthHandler(), urllib2.HTTPHandler())
else:
opener = urllib2.build_opener(cookie_handler, urllib2.HTTPBasicAuthHandler(), urllib2.HTTPHandler())
#opener = urllib2.install_opener(opener)
req = urllib2.Request(url)
req.add_header('User-Agent','Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/33.0.1750.154 Safari/537.36')
if headers:
for h,hv in headers:
req.add_header(h,hv)
response = opener.open(req,post,timeout=timeout)
link=response.read()
response.close()
return link;
def get_decode(str,reg=None):
if reg:
str=re.findall(reg, str)[0]
s1 = urllib.unquote(str[0: len(str)-1]);
t = '';
for i in range( len(s1)):
t += chr(ord(s1[i]) - s1[len(s1)-1]);
t=urllib.unquote(t)
# print t
return t
def javascriptUnEscape(str):
js=re.findall('unescape\(\'(.*?)\'',str)
# print 'js',js
if (not js==None) and len(js)>0:
for j in js:
#print urllib.unquote(j)
str=str.replace(j ,urllib.unquote(j))
return str
iid=0
def askCaptcha(m,html_page, cookieJar):
global iid
iid+=1
expre= m['expres']
page_url = m['page']
captcha_regex=re.compile('\$LiveStreamCaptcha\[([^\]]*)\]').findall(expre)[0]
captcha_url=re.compile(captcha_regex).findall(html_page)[0]
# print expre,captcha_regex,captcha_url
if not captcha_url.startswith("http"):
page_='http://'+"".join(page_url.split('/')[2:3])
if captcha_url.startswith("/"):
captcha_url=page_+captcha_url
else:
captcha_url=page_+'/'+captcha_url
local_captcha = os.path.join(profile, str(iid)+"captcha.jpg" )
localFile = open(local_captcha, "wb")
# print ' c capurl',captcha_url
req = urllib2.Request(captcha_url)
req.add_header('User-Agent', 'Mozilla/5.0 (Windows NT 6.1; rv:14.0) Gecko/20100101 Firefox/14.0.1')
if 'referer' in m:
req.add_header('Referer', m['referer'])
if 'agent' in m:
req.add_header('User-agent', m['agent'])
if 'setcookie' in m:
# print 'adding cookie',m['setcookie']
req.add_header('Cookie', m['setcookie'])
#cookie_handler = urllib2.HTTPCookieProcessor(cookieJar)
#opener = urllib2.build_opener(cookie_handler, urllib2.HTTPBasicAuthHandler(), urllib2.HTTPHandler())
#opener = urllib2.install_opener(opener)
urllib2.urlopen(req)
response = urllib2.urlopen(req)
localFile.write(response.read())
response.close()
localFile.close()
solver = InputWindow(captcha=local_captcha)
solution = solver.get()
return solution
def askCaptchaNew(imageregex,html_page,cookieJar,m):
global iid
iid+=1
if not imageregex=='':
if html_page.startswith("http"):
page_=getUrl(html_page,cookieJar=cookieJar)
else:
page_=html_page
captcha_url=re.compile(imageregex).findall(html_page)[0]
else:
captcha_url=html_page
if 'oneplay.tv/embed' in html_page:
import oneplay
page_=getUrl(html_page,cookieJar=cookieJar)
captcha_url=oneplay.getCaptchaUrl(page_)
local_captcha = os.path.join(profile, str(iid)+"captcha.jpg" )
localFile = open(local_captcha, "wb")
# print ' c capurl',captcha_url
req = urllib2.Request(captcha_url)
req.add_header('User-Agent', 'Mozilla/5.0 (Windows NT 6.1; rv:14.0) Gecko/20100101 Firefox/14.0.1')
if 'referer' in m:
req.add_header('Referer', m['referer'])
if 'agent' in m:
req.add_header('User-agent', m['agent'])
if 'accept' in m:
req.add_header('Accept', m['accept'])
if 'setcookie' in m:
# print 'adding cookie',m['setcookie']
req.add_header('Cookie', m['setcookie'])
#cookie_handler = urllib2.HTTPCookieProcessor(cookieJar)
#opener = urllib2.build_opener(cookie_handler, urllib2.HTTPBasicAuthHandler(), urllib2.HTTPHandler())
#opener = urllib2.install_opener(opener)
#urllib2.urlopen(req)
response = urllib2.urlopen(req)
localFile.write(response.read())
response.close()
localFile.close()
solver = InputWindow(captcha=local_captcha)
solution = solver.get()
return solution
#########################################################
# Function : GUIEditExportName #
#########################################################
# Parameter : #
# #
# name sugested name for export #
# #
# Returns : #
# #
# name name of export excluding any extension #
# #
#########################################################
def TakeInput(name, headname):
kb = xbmc.Keyboard('default', 'heading', True)
kb.setDefault(name)
kb.setHeading(headname)
kb.setHiddenInput(False)
return kb.getText()
#########################################################
class InputWindow(xbmcgui.WindowDialog):
def __init__(self, *args, **kwargs):
self.cptloc = kwargs.get('captcha')
self.img = xbmcgui.ControlImage(335,30,624,60,self.cptloc)
self.addControl(self.img)
self.kbd = xbmc.Keyboard()
def get(self):
self.show()
time.sleep(2)
self.kbd.doModal()
if (self.kbd.isConfirmed()):
text = self.kbd.getText()
self.close()
return text
self.close()
return False
def getEpocTime():
import time
return str(int(time.time()*1000))
def getEpocTime2():
import time
return str(int(time.time()))
def get_params():
param=[]
paramstring=sys.argv[2]
if len(paramstring)>=2:
params=sys.argv[2]
cleanedparams=params.replace('?','')
if (params[len(params)-1]=='/'):
params=params[0:len(params)-2]
pairsofparams=cleanedparams.split('&')
param={}
for i in range(len(pairsofparams)):
splitparams={}
splitparams=pairsofparams[i].split('=')
if (len(splitparams))==2:
param[splitparams[0]]=splitparams[1]
return param
def getFavorites():
items = json.loads(open(favorites).read())
total = len(items)
for i in items:
name = i[0]
url = i[1]
iconimage = i[2]
try:
fanArt = i[3]
if fanArt == None:
raise
except:
if addon.getSetting('use_thumb') == "true":
fanArt = iconimage
else:
fanArt = fanart
try: playlist = i[5]
except: playlist = None
try: regexs = i[6]
except: regexs = None
if i[4] == 0:
addLink(url,name,iconimage,fanArt,'','','','fav',playlist,regexs,total)
else:
addDir(name,url,i[4],iconimage,fanart,'','','','','fav')
def addFavorite(name,url,iconimage,fanart,mode,playlist=None,regexs=None):
favList = []
try:
# seems that after
name = name.encode('utf-8', 'ignore')
except:
pass
if os.path.exists(favorites)==False:
addon_log('Making Favorites File')
favList.append((name,url,iconimage,fanart,mode,playlist,regexs))
a = open(favorites, "w")
a.write(json.dumps(favList))
a.close()
else:
addon_log('Appending Favorites')
a = open(favorites).read()
data = json.loads(a)
data.append((name,url,iconimage,fanart,mode))
b = open(favorites, "w")
b.write(json.dumps(data))
b.close()
def rmFavorite(name):
data = json.loads(open(favorites).read())
for index in range(len(data)):
if data[index][0]==name:
del data[index]
b = open(favorites, "w")
b.write(json.dumps(data))
b.close()
break
xbmc.executebuiltin("XBMC.Container.Refresh")
def urlsolver(url):
import urlresolver
host = urlresolver.HostedMediaFile(url)
if host:
resolver = urlresolver.resolve(url)
resolved = resolver
if isinstance(resolved,list):
for k in resolved:
quality = addon.getSetting('quality')
if k['quality'] == 'HD' :
resolver = k['url']
break
elif k['quality'] == 'SD' :
resolver = k['url']
elif k['quality'] == '1080p' and addon.getSetting('1080pquality') == 'true' :
resolver = k['url']
break
else:
resolver = resolved
else:
xbmc.executebuiltin("XBMC.Notification(Exabyte TV,Urlresolver donot support this domain. - ,5000)")
resolver=url
return resolver
def play_playlist(name, mu_playlist,queueVideo=None):
playlist = xbmc.PlayList(xbmc.PLAYLIST_VIDEO)
if addon.getSetting('ask_playlist_items') == 'true' and not queueVideo :
import urlparse
names = []
for i in mu_playlist:
d_name=urlparse.urlparse(i).netloc
if d_name == '':
names.append(name)
else:
names.append(d_name)
dialog = xbmcgui.Dialog()
index = dialog.select('Choose a video source', names)
if index >= 0:
if "&mode=19" in mu_playlist[index]:
#playsetresolved (urlsolver(mu_playlist[index].replace('&mode=19','')),name,iconimage,True)
xbmc.Player().play(urlsolver(mu_playlist[index].replace('&mode=19','').replace(';','')))
elif "$doregex" in mu_playlist[index] :
# print mu_playlist[index]
sepate = mu_playlist[index].split('®exs=')
# print sepate
url,setresolved = getRegexParsed(sepate[1], sepate[0])
url2 = url.replace(';','')
xbmc.Player().play(url2)
else:
url = mu_playlist[index]
xbmc.Player().play(url)
elif not queueVideo:
#playlist = xbmc.PlayList(1) # 1 means video
playlist.clear()
item = 0
for i in mu_playlist:
item += 1
info = xbmcgui.ListItem('%s) %s' %(str(item),name))
# Don't do this as regex parsed might take longer
try:
if "$doregex" in i:
sepate = i.split('®exs=')
# print sepate
url,setresolved = getRegexParsed(sepate[1], sepate[0])
elif "&mode=19" in i:
url = urlsolver(i.replace('&mode=19','').replace(';',''))
if url:
playlist.add(url, info)
else:
raise
except Exception:
playlist.add(i, info)
pass #xbmc.Player().play(url)
xbmc.executebuiltin('playlist.playoffset(video,0)')
else:
listitem = xbmcgui.ListItem(name)
playlist.add(mu_playlist, listitem)
def download_file(name, url):
if addon.getSetting('save_location') == "":
xbmc.executebuiltin("XBMC.Notification('Exabyte TV','Choose a location to save files.',15000,"+icon+")")
addon.openSettings()
params = {'url': url, 'download_path': addon.getSetting('save_location')}
downloader.download(name, params)
dialog = xbmcgui.Dialog()
ret = dialog.yesno('Exabyte TV', 'Do you want to add this file as a source?')
if ret:
addSource(os.path.join(addon.getSetting('save_location'), name))
def _search(url,name):
# print url,name
pluginsearchurls = ['plugin://plugin.video.genesis/?action=shows_search',\
'plugin://plugin.video.genesis/?action=movies_search',\
'plugin://plugin.video.salts/?mode=search&section=Movies',\
'plugin://plugin.video.salts/?mode=search&section=TV',\
'plugin://plugin.video.muchmovies.hd/?action=movies_search',\
'plugin://plugin.video.viooz.co/?action=root_search',\
'plugin://plugin.video.ororotv/?action=shows_search',\
'plugin://plugin.video.yifymovies.hd/?action=movies_search',\
'plugin://plugin.video.cartoonhdtwo/?description&fanart&iconimage&mode=3&name=Search&url=url',\
'plugin://plugin.video.youtube/kodion/search/list/',\
'plugin://plugin.video.dailymotion_com/?mode=search&url',\
'plugin://plugin.video.vimeo/kodion/search/list/'\
]
names = ['Gensis TV','Genesis Movie','Salt movie','salt TV','Muchmovies','viooz','ORoroTV',\
'Yifymovies','cartoonHD','Youtube','DailyMotion','Vimeo']
dialog = xbmcgui.Dialog()
index = dialog.select('Choose a video source', names)
if index >= 0:
url = pluginsearchurls[index]
# print 'url',url
pluginquerybyJSON(url)
def addDir(name,url,mode,iconimage,fanart,description,genre,date,credits,showcontext=False,regexs=None,reg_url=None,allinfo={}):
u=sys.argv[0]+"?url="+urllib.quote_plus(url)+"&mode="+str(mode)+"&name="+urllib.quote_plus(name)+"&fanart="+urllib.quote_plus(fanart)
ok=True
if date == '':
date = None
else:
description += '\n\nDate: %s' %date
liz=xbmcgui.ListItem(name, iconImage="DefaultFolder.png", thumbnailImage=iconimage)
if len(allinfo) <1 :
liz.setInfo(type="Video", infoLabels={ "Title": name, "Plot": description, "Genre": genre, "dateadded": date, "credits": credits })
else:
liz.setInfo(type="Video", infoLabels= allinfo)
liz.setProperty("Fanart_Image", fanart)
if showcontext:
contextMenu = []
parentalblock =addon.getSetting('parentalblocked')
parentalblock= parentalblock=="true"
parentalblockedpin =addon.getSetting('parentalblockedpin')
# print 'parentalblockedpin',parentalblockedpin
if len(parentalblockedpin)>0:
if parentalblock:
contextMenu.append(('Disable Parental Block','XBMC.RunPlugin(%s?mode=55&name=%s)' %(sys.argv[0], urllib.quote_plus(name))))
else:
contextMenu.append(('Enable Parental Block','XBMC.RunPlugin(%s?mode=56&name=%s)' %(sys.argv[0], urllib.quote_plus(name))))
if showcontext == 'source':
if name in str(SOURCES):
contextMenu.append(('Remove from Sources','XBMC.RunPlugin(%s?mode=8&name=%s)' %(sys.argv[0], urllib.quote_plus(name))))
elif showcontext == 'download':
contextMenu.append(('Download','XBMC.RunPlugin(%s?url=%s&mode=9&name=%s)'
%(sys.argv[0], urllib.quote_plus(url), urllib.quote_plus(name))))
elif showcontext == 'fav':
contextMenu.append(('Remove from Exabyte TV Favorites','XBMC.RunPlugin(%s?mode=6&name=%s)'
%(sys.argv[0], urllib.quote_plus(name))))
if showcontext == '!!update':
fav_params2 = (
'%s?url=%s&mode=17®exs=%s'
%(sys.argv[0], urllib.quote_plus(reg_url), regexs)
)
contextMenu.append(('[COLOR yellow]!!update[/COLOR]','XBMC.RunPlugin(%s)' %fav_params2))
if not name in FAV:
contextMenu.append(('Add to Exabyte TV Favorites','XBMC.RunPlugin(%s?mode=5&name=%s&url=%s&iconimage=%s&fanart=%s&fav_mode=%s)'
%(sys.argv[0], urllib.quote_plus(name), urllib.quote_plus(url), urllib.quote_plus(iconimage), urllib.quote_plus(fanart), mode)))
liz.addContextMenuItems(contextMenu)
ok=xbmcplugin.addDirectoryItem(handle=int(sys.argv[1]),url=u,listitem=liz,isFolder=True)
return ok
def ytdl_download(url,title,media_type='video'):
# play in xbmc while playing go back to contextMenu(c) to "!!Download!!"
# Trial yasceen: seperate |User-Agent=
import youtubedl
if not url == '':
if media_type== 'audio':
youtubedl.single_YD(url,download=True,audio=True)
else:
youtubedl.single_YD(url,download=True)
elif xbmc.Player().isPlaying() == True :
import YDStreamExtractor
if YDStreamExtractor.isDownloading() == True:
YDStreamExtractor.manageDownloads()
else:
xbmc_url = xbmc.Player().getPlayingFile()
xbmc_url = xbmc_url.split('|User-Agent=')[0]
info = {'url':xbmc_url,'title':title,'media_type':media_type}
youtubedl.single_YD('',download=True,dl_info=info)
else:
xbmc.executebuiltin("XBMC.Notification(DOWNLOAD,First Play [COLOR yellow]WHILE playing download[/COLOR] ,10000)")
def ascii(string):
if isinstance(string, basestring):
if isinstance(string, unicode):
string = string.encode('ascii', 'ignore')
return string
def uni(string, encoding = 'utf-8'):
if isinstance(string, basestring):
if not isinstance(string, unicode):
string = unicode(string, encoding, 'ignore')
return string
def removeNonAscii(s): return "".join(filter(lambda x: ord(x)<128, s))
def sendJSON( command):
data = ''
try:
data = xbmc.executeJSONRPC(uni(command))
except UnicodeEncodeError:
data = xbmc.executeJSONRPC(ascii(command))
return uni(data)
def pluginquerybyJSON(url,give_me_result=None,playlist=False):
if 'audio' in url:
json_query = uni('{"jsonrpc":"2.0","method":"Files.GetDirectory","params": {"directory":"%s","media":"video", "properties": ["title", "album", "artist", "duration","thumbnail", "year"]}, "id": 1}') %url
else:
json_query = uni('{"jsonrpc":"2.0","method":"Files.GetDirectory","params":{"directory":"%s","media":"video","properties":[ "plot","playcount","director", "genre","votes","duration","trailer","premiered","thumbnail","title","year","dateadded","fanart","rating","season","episode","studio","mpaa"]},"id":1}') %url
json_folder_detail = json.loads(sendJSON(json_query))
#print json_folder_detail
if give_me_result:
return json_folder_detail
if json_folder_detail.has_key('error'):
return
else:
for i in json_folder_detail['result']['files'] :
meta ={}
url = i['file']
name = removeNonAscii(i['label'])
thumbnail = removeNonAscii(i['thumbnail'])
fanart = removeNonAscii(i['fanart'])
meta = dict((k,v) for k, v in i.iteritems() if not v == '0' or not v == -1 or v == '')
meta.pop("file", None)
if i['filetype'] == 'file':
if playlist:
play_playlist(name,url,queueVideo='1')
continue
else:
addLink(url,name,thumbnail,fanart,'','','','',None,'',total=len(json_folder_detail['result']['files']),allinfo=meta)
#xbmc.executebuiltin("Container.SetViewMode(500)")
if i['type'] and i['type'] == 'tvshow' :
xbmcplugin.setContent(int(sys.argv[1]), 'tvshows')
elif i['episode'] > 0 :
xbmcplugin.setContent(int(sys.argv[1]), 'episodes')
else:
addDir(name,url,53,thumbnail,fanart,'','','','',allinfo=meta)
xbmcplugin.endOfDirectory(int(sys.argv[1]))
def addLink(url,name,iconimage,fanart,description,genre,date,showcontext,playlist,regexs,total,setCookie="",allinfo={}):
#print 'url,name',url,name
contextMenu =[]
parentalblock =addon.getSetting('parentalblocked')
parentalblock= parentalblock=="true"
parentalblockedpin =addon.getSetting('parentalblockedpin')
# print 'parentalblockedpin',parentalblockedpin
if len(parentalblockedpin)>0:
if parentalblock:
contextMenu.append(('Disable Parental Block','XBMC.RunPlugin(%s?mode=55&name=%s)' %(sys.argv[0], urllib.quote_plus(name))))
else:
contextMenu.append(('Enable Parental Block','XBMC.RunPlugin(%s?mode=56&name=%s)' %(sys.argv[0], urllib.quote_plus(name))))
try:
name = name.encode('utf-8')
except: pass
ok = True
isFolder=False
if regexs:
mode = '17'
if 'listrepeat' in regexs:
isFolder=True
# print 'setting as folder in link'
contextMenu.append(('[COLOR white]!!Download Currently Playing!![/COLOR]','XBMC.RunPlugin(%s?url=%s&mode=21&name=%s)'
%(sys.argv[0], urllib.quote_plus(url), urllib.quote_plus(name))))
elif (any(x in url for x in resolve_url) and url.startswith('http')) or url.endswith('&mode=19'):
url=url.replace('&mode=19','')
mode = '19'
contextMenu.append(('[COLOR white]!!Download Currently Playing!![/COLOR]','XBMC.RunPlugin(%s?url=%s&mode=21&name=%s)'
%(sys.argv[0], urllib.quote_plus(url), urllib.quote_plus(name))))
elif url.endswith('&mode=18'):
url=url.replace('&mode=18','')
mode = '18'
contextMenu.append(('[COLOR white]!!Download!![/COLOR]','XBMC.RunPlugin(%s?url=%s&mode=23&name=%s)'
%(sys.argv[0], urllib.quote_plus(url), urllib.quote_plus(name))))
if addon.getSetting('dlaudioonly') == 'true':
contextMenu.append(('!!Download [COLOR seablue]Audio!![/COLOR]','XBMC.RunPlugin(%s?url=%s&mode=24&name=%s)'
%(sys.argv[0], urllib.quote_plus(url), urllib.quote_plus(name))))
elif url.startswith('magnet:?xt='):
if '&' in url and not '&' in url :
url = url.replace('&','&')
url = 'plugin://plugin.video.pulsar/play?uri=' + url
mode = '12'
else:
mode = '12'
contextMenu.append(('[COLOR white]!!Download Currently Playing!![/COLOR]','XBMC.RunPlugin(%s?url=%s&mode=21&name=%s)'
%(sys.argv[0], urllib.quote_plus(url), urllib.quote_plus(name))))
if 'plugin://plugin.video.youtube/play/?video_id=' in url:
yt_audio_url = url.replace('plugin://plugin.video.youtube/play/?video_id=','https://www.youtube.com/watch?v=')
contextMenu.append(('!!Download [COLOR blue]Audio!![/COLOR]','XBMC.RunPlugin(%s?url=%s&mode=24&name=%s)'
%(sys.argv[0], urllib.quote_plus(yt_audio_url), urllib.quote_plus(name))))
u=sys.argv[0]+"?"
play_list = False
if playlist:
if addon.getSetting('add_playlist') == "false":
u += "url="+urllib.quote_plus(url)+"&mode="+mode
else:
u += "mode=13&name=%s&playlist=%s" %(urllib.quote_plus(name), urllib.quote_plus(str(playlist).replace(',','||')))
name = name + '[COLOR magenta] (' + str(len(playlist)) + ' items )[/COLOR]'
play_list = True
else:
u += "url="+urllib.quote_plus(url)+"&mode="+mode
if regexs:
u += "®exs="+regexs
if not setCookie == '':
u += "&setCookie="+urllib.quote_plus(setCookie)
if date == '':
date = None
else:
description += '\n\nDate: %s' %date
liz=xbmcgui.ListItem(name, iconImage="DefaultVideo.png", thumbnailImage=iconimage)
if len(allinfo) <1:
liz.setInfo(type="Video", infoLabels={ "Title": name, "Plot": description, "Genre": genre, "dateadded": date })
else:
liz.setInfo(type="Video", infoLabels=allinfo)
liz.setProperty("Fanart_Image", fanart)
if (not play_list) and not any(x in url for x in g_ignoreSetResolved) and not '$PLAYERPROXY$=' in url:# (not url.startswith('plugin://plugin.video.f4mTester')):
if regexs:
#print urllib.unquote_plus(regexs)
if '$pyFunction:playmedia(' not in urllib.unquote_plus(regexs) and 'notplayable' not in urllib.unquote_plus(regexs) and 'listrepeat' not in urllib.unquote_plus(regexs) :
#print 'setting isplayable',url, urllib.unquote_plus(regexs),url
liz.setProperty('IsPlayable', 'true')
else:
liz.setProperty('IsPlayable', 'true')
else:
addon_log( 'NOT setting isplayable'+url)
if showcontext:
#contextMenu = []
if showcontext == 'fav':
contextMenu.append(
('Remover favoritos de Exabyte TV','XBMC.RunPlugin(%s?mode=6&name=%s)'
%(sys.argv[0], urllib.quote_plus(name)))
)
elif not name in FAV:
try:
fav_params = (
'%s?mode=5&name=%s&url=%s&iconimage=%s&fanart=%s&fav_mode=0'
%(sys.argv[0], urllib.quote_plus(name), urllib.quote_plus(url), urllib.quote_plus(iconimage), urllib.quote_plus(fanart))
)
except:
fav_params = (
'%s?mode=5&name=%s&url=%s&iconimage=%s&fanart=%s&fav_mode=0'
%(sys.argv[0], urllib.quote_plus(name), urllib.quote_plus(url), urllib.quote_plus(iconimage.encode("utf-8")), urllib.quote_plus(fanart.encode("utf-8")))
)
if playlist:
fav_params += 'playlist='+urllib.quote_plus(str(playlist).replace(',','||'))
if regexs:
fav_params += "®exs="+regexs
contextMenu.append(('Agregar a los favoritos de Exabyte','XBMC.RunPlugin(%s)' %fav_params))
liz.addContextMenuItems(contextMenu)
if not playlist is None:
if addon.getSetting('add_playlist') == "false":
playlist_name = name.split(') ')[1]
contextMenu_ = [
('Play '+playlist_name+' PlayList','XBMC.RunPlugin(%s?mode=13&name=%s&playlist=%s)'
%(sys.argv[0], urllib.quote_plus(playlist_name), urllib.quote_plus(str(playlist).replace(',','||'))))
]
liz.addContextMenuItems(contextMenu_)
#print 'adding',name
ok=xbmcplugin.addDirectoryItem(handle=int(sys.argv[1]),url=u,listitem=liz,totalItems=total,isFolder=isFolder)
#print 'added',name
return ok
def playsetresolved(url,name,iconimage,setresolved=True):
if setresolved:
setres=True
if '$$LSDirect$$' in url:
url=url.replace('$$LSDirect$$','')
setres=False
liz = xbmcgui.ListItem(name, iconImage=iconimage)
liz.setInfo(type='Video', infoLabels={'Title':name})
liz.setProperty("IsPlayable","true")
liz.setPath(url)
if not setres:
xbmc.Player().play(url)
else:
xbmcplugin.setResolvedUrl(int(sys.argv[1]), True, liz)
else:
xbmc.executebuiltin('XBMC.RunPlugin('+url+')')
def getepg(link):
url=urllib.urlopen(link)
source=url.read()
url.close()
source2 = source.split("Jetzt")
source3 = source2[1].split('programm/detail.php?const_id=')
sourceuhrzeit = source3[1].split('<br /><a href="/')
nowtime = sourceuhrzeit[0][40:len(sourceuhrzeit[0])]
sourcetitle = source3[2].split("</a></p></div>")
nowtitle = sourcetitle[0][17:len(sourcetitle[0])]
nowtitle = nowtitle.encode('utf-8')
return " - "+nowtitle+" - "+nowtime
def get_epg(url, regex):
data = makeRequest(url)
try:
item = re.findall(regex, data)[0]
return item
except:
addon_log('regex failed')
addon_log(regex)
return
def d2x(d, root="root",nested=0):
op = lambda tag: '<' + tag + '>'
cl = lambda tag: '</' + tag + '>\n'
ml = lambda v,xml: xml + op(key) + str(v) + cl(key)
xml = op(root) + '\n' if root else ""
for key,vl in d.iteritems():
vtype = type(vl)
if nested==0: key='regex' #enforcing all top level tags to be named as regex
if vtype is list:
for v in vl:
v=escape(v)
xml = ml(v,xml)
if vtype is dict:
xml = ml('\n' + d2x(vl,None,nested+1),xml)
if vtype is not list and vtype is not dict:
if not vl is None: vl=escape(vl)
#print repr(vl)
if vl is None:
xml = ml(vl,xml)
else:
#xml = ml(escape(vl.encode("utf-8")),xml)
xml = ml(vl.encode("utf-8"),xml)
xml += cl(root) if root else ""
return xml
xbmcplugin.setContent(int(sys.argv[1]), 'movies')
try:
xbmcplugin.addSortMethod(int(sys.argv[1]), xbmcplugin.SORT_METHOD_UNSORTED)
except:
pass
try:
xbmcplugin.addSortMethod(int(sys.argv[1]), xbmcplugin.SORT_METHOD_LABEL)
except:
pass
try:
xbmcplugin.addSortMethod(int(sys.argv[1]), xbmcplugin.SORT_METHOD_DATE)
except:
pass
try:
xbmcplugin.addSortMethod(int(sys.argv[1]), xbmcplugin.SORT_METHOD_GENRE)
except:
pass
params=get_params()
url=None
name=None
mode=None
playlist=None
iconimage=None
fanart=FANART
playlist=None
fav_mode=None
regexs=None
try:
url=urllib.unquote_plus(params["url"]).decode('utf-8')
except:
pass
try:
name=urllib.unquote_plus(params["name"])
except:
pass
try:
iconimage=urllib.unquote_plus(params["iconimage"])
except:
pass
try:
fanart=urllib.unquote_plus(params["fanart"])
except:
pass
try:
mode=int(params["mode"])
except:
pass
try:
playlist=eval(urllib.unquote_plus(params["playlist"]).replace('||',','))
except:
pass
try:
fav_mode=int(params["fav_mode"])
except:
pass
try:
regexs=params["regexs"]
except:
pass
playitem=''
try:
playitem=urllib.unquote_plus(params["playitem"])
except:
pass
addon_log("Mode: "+str(mode))
if not url is None:
addon_log("URL: "+str(url.encode('utf-8')))
addon_log("Name: "+str(name))
if not playitem =='':
s=getSoup('',data=playitem)
name,url,regexs=getItems(s,None,dontLink=True)
mode=117
if mode==None:
addon_log("getSources")
getSources()
xbmcplugin.endOfDirectory(int(sys.argv[1]))
elif mode==1:
addon_log("getData")
data=None
if regexs:
data=getRegexParsed(regexs, url)
url=''
#create xml here
getData(url,fanart,data)
xbmcplugin.endOfDirectory(int(sys.argv[1]))
elif mode==2:
addon_log("getChannelItems")
getChannelItems(name,url,fanart)
xbmcplugin.endOfDirectory(int(sys.argv[1]))
elif mode==3:
addon_log("getSubChannelItems")
getSubChannelItems(name,url,fanart)
xbmcplugin.endOfDirectory(int(sys.argv[1]))
elif mode==4:
addon_log("getFavorites")
getFavorites()
xbmcplugin.endOfDirectory(int(sys.argv[1]))
elif mode==5:
addon_log("addFavorite")
try:
name = name.split('\\ ')[1]
except:
pass
try:
name = name.split(' - ')[0]
except:
pass
addFavorite(name,url,iconimage,fanart,fav_mode)
elif mode==6:
addon_log("rmFavorite")
try:
name = name.split('\\ ')[1]
except:
pass
try:
name = name.split(' - ')[0]
except:
pass
rmFavorite(name)
elif mode==7:
SportsDevil()
Dutch()
elif mode==8:
addon_log("rmSource")
rmSource(name)
elif mode==9:
addon_log("download_file")
download_file(name, url)
elif mode==10:
addon_log("getCommunitySources")
getCommunitySources()
elif mode==11:
addon_log("addSource")
addSource(url)
elif mode==12:
addon_log("setResolvedUrl")
if not url.startswith("plugin://plugin") or not any(x in url for x in g_ignoreSetResolved):#not url.startswith("plugin://plugin.video.f4mTester") :
setres=True
if '$$LSDirect$$' in url:
url=url.replace('$$LSDirect$$','')
setres=False
item = xbmcgui.ListItem(path=url)
if not setres:
xbmc.Player().play(url)
else:
xbmcplugin.setResolvedUrl(int(sys.argv[1]), True, item)
else:
# print 'Not setting setResolvedUrl'
xbmc.executebuiltin('XBMC.RunPlugin('+url+')')
elif mode==13:
addon_log("play_playlist")
play_playlist(name, playlist)
elif mode==14:
addon_log("get_xml_database")
get_xml_database(url)
xbmcplugin.endOfDirectory(int(sys.argv[1]))
elif mode==15:
addon_log("browse_xml_database")
get_xml_database(url, True)
xbmcplugin.endOfDirectory(int(sys.argv[1]))
elif mode==16:
addon_log("browse_community")
getCommunitySources(url,browse=True)
xbmcplugin.endOfDirectory(int(sys.argv[1]))
elif mode==17 or mode==117:
addon_log("getRegexParsed")
data=None
if regexs and 'listrepeat' in urllib.unquote_plus(regexs):
listrepeat,ret,m,regexs =getRegexParsed(regexs, url)
# print listrepeat,ret,m,regexs
d=''
# print 'm is' , m
# print 'regexs',regexs
regexname=m['name']
existing_list=regexs.pop(regexname)
# print 'final regexs',regexs,regexname
url=''
import copy
ln=''
rnumber=0
for obj in ret:
try:
rnumber+=1
newcopy=copy.deepcopy(regexs)
# print 'newcopy',newcopy, len(newcopy)
listrepeatT=listrepeat
i=0
for i in range(len(obj)):
# print 'i is ',i, len(obj), len(newcopy)
if len(newcopy)>0:
for the_keyO, the_valueO in newcopy.iteritems():
if the_valueO is not None:
for the_key, the_value in the_valueO.iteritems():
if the_value is not None:
# print 'key and val',the_key, the_value
# print 'aa'
# print '[' + regexname+'.param'+str(i+1) + ']'
# print repr(obj[i])
if type(the_value) is dict:
for the_keyl, the_valuel in the_value.iteritems():
if the_valuel is not None:
val=None
if isinstance(obj,tuple):
try:
val= obj[i].decode('utf-8')
except:
val= obj[i]
else:
try:
val= obj.decode('utf-8')
except:
val= obj
if '[' + regexname+'.param'+str(i+1) + '][DE]' in the_valuel:
the_valuel=the_valuel.replace('[' + regexname+'.param'+str(i+1) + '][DE]', unescape(val))
the_value[the_keyl]=the_valuel.replace('[' + regexname+'.param'+str(i+1) + ']', val)
#print 'first sec',the_value[the_keyl]
else:
val=None
if isinstance(obj,tuple):
try:
val=obj[i].decode('utf-8')
except:
val=obj[i]
else:
try:
val= obj.decode('utf-8')
except:
val= obj
if '[' + regexname+'.param'+str(i+1) + '][DE]' in the_value:
#print 'found DE',the_value.replace('[' + regexname+'.param'+str(i+1) + '][DE]', unescape(val))
the_value=the_value.replace('[' + regexname+'.param'+str(i+1) + '][DE]', unescape(val))
the_valueO[the_key]=the_value.replace('[' + regexname+'.param'+str(i+1) + ']', val)
#print 'second sec val',the_valueO[the_key]
val=None
if isinstance(obj,tuple):
try:
val=obj[i].decode('utf-8')
except:
val=obj[i]
else:
try:
val=obj.decode('utf-8')
except:
val=obj
if '[' + regexname+'.param'+str(i+1) + '][DE]' in listrepeatT:
listrepeatT=listrepeatT.replace('[' + regexname+'.param'+str(i+1) + '][DE]',val)
listrepeatT=listrepeatT.replace('[' + regexname+'.param'+str(i+1) + ']',escape(val))
# print listrepeatT
listrepeatT=listrepeatT.replace('[' + regexname+'.param'+str(0) + ']',str(rnumber))
#newcopy = urllib.quote(repr(newcopy))
# print 'new regex list', repr(newcopy), repr(listrepeatT)
# addLink(listlinkT,listtitleT.encode('utf-8', 'ignore'),listthumbnailT,'','','','',True,None,newcopy, len(ret))
regex_xml=''
# print 'newcopy',newcopy
if len(newcopy)>0:
regex_xml=d2x(newcopy,'lsproroot')
regex_xml=regex_xml.split('<lsproroot>')[1].split('</lsproroot')[0]
#ln+='\n<item>%s\n%s</item>'%(listrepeatT.encode("utf-8"),regex_xml)
try:
ln+='\n<item>%s\n%s</item>'%(listrepeatT,regex_xml)
except: ln+='\n<item>%s\n%s</item>'%(listrepeatT.encode("utf-8"),regex_xml)
except: traceback.print_exc(file=sys.stdout)
# print repr(ln)
# print newcopy
# ln+='</item>'
#print 'ln',ln
addon_log(repr(ln))
getData('','',ln)
xbmcplugin.endOfDirectory(int(sys.argv[1]))
else:
url,setresolved = getRegexParsed(regexs, url)
#print repr(url),setresolved,'imhere'
if url:
if '$PLAYERPROXY$=' in url:
url,proxy=url.split('$PLAYERPROXY$=')
print 'proxy',proxy
#Jairox mod for proxy auth
proxyuser = None
proxypass = None
if len(proxy) > 0 and '@' in proxy:
proxy = proxy.split(':')
proxyuser = proxy[0]
proxypass = proxy[1].split('@')[0]
proxyip = proxy[1].split('@')[1]
port = proxy[2]
else:
proxyip,port=proxy.split(':')
playmediawithproxy(url,name,iconimage,proxyip,port, proxyuser,proxypass) #jairox
else:
playsetresolved(url,name,iconimage,setresolved)
else:
xbmc.executebuiltin("XBMC.Notification(Exabyte TV,Falló extraer regex. - "+"this"+",4000,"+icon+")")
elif mode==18:
addon_log("youtubedl")
try:
import youtubedl
except Exception:
xbmc.executebuiltin("XBMC.Notification(Exabyte TV,Por favor [COLOR yellow]instala Youtube-dl[/COLOR] modulo ,10000,"")")
stream_url=youtubedl.single_YD(url)
playsetresolved(stream_url,name,iconimage)
elif mode==19:
addon_log("Genesiscommonresolvers")
playsetresolved (urlsolver(url),name,iconimage,True)
elif mode==21:
addon_log("download current file using youtube-dl service")
ytdl_download('',name,'video')
elif mode==23:
addon_log("get info then download")
ytdl_download(url,name,'video')
elif mode==24:
addon_log("Audio only youtube download")
ytdl_download(url,name,'audio')
elif mode==25:
addon_log("Searchin Other plugins")
_search(url,name)
xbmcplugin.endOfDirectory(int(sys.argv[1]))
elif mode==55:
addon_log("enabled lock")
parentalblockedpin =addon.getSetting('parentalblockedpin')
keyboard = xbmc.Keyboard('','Enter Pin')
keyboard.doModal()
if not (keyboard.isConfirmed() == False):
newStr = keyboard.getText()
if newStr==parentalblockedpin:
addon.setSetting('parentalblocked', "false")
xbmc.executebuiltin("XBMC.Notification(Exabyte TV,Parental Block Disabled,5000,"+icon+")")
else:
xbmc.executebuiltin("XBMC.Notification(Exabyte TV,Wrong Pin??,5000,"+icon+")")
xbmcplugin.endOfDirectory(int(sys.argv[1]))
elif mode==56:
addon_log("disable lock")
addon.setSetting('parentalblocked', "true")
xbmc.executebuiltin("XBMC.Notification(Exabyte TV,Parental block enabled,5000,"+icon+")")
xbmcplugin.endOfDirectory(int(sys.argv[1]))
elif mode==53:
addon_log("Requesting JSON-RPC Items")
pluginquerybyJSON(url)
#xbmcplugin.endOfDirectory(int(sys.argv[1]))
if not viewmode==None:
print 'setting view mode'
xbmc.executebuiltin("Container.SetViewMode(%s)"%viewmode)
|
billyevans/prefix_storage
|
refs/heads/master
|
check.py
|
1
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import httplib2
import sys
import md5
if __name__ == '__main__':
count = 0
for line in sys.stdin:
conn = httplib2.HTTPConnectionWithTimeout("localhost", int(sys.argv[1]))
key = line.rstrip()
val = md5.md5(key)
conn.request("GET", "/" + key)
res = conn.getresponse()
if res.status != 200:
raise Exception("Wrong status - {0}".format(res.status))
serv_val = res.read()
if val.hexdigest() != serv_val:
raise Exception("wrong response for {0}: {1} != {2}".format(key, val.hexdigest(), serv_val))
count += 1
print("{0} keys checked.".format(count))
|
piensa/geonode
|
refs/heads/wm-develop
|
geonode/base/management/commands/lib/gn24_to_24.py
|
11
|
# -*- coding: utf-8 -*-
#########################################################################
#
# Copyright (C) 2016 OSGeo
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
#########################################################################
import re
import datetime
try:
import json
except ImportError:
from django.utils import simplejson as json
class DefaultMangler(json.JSONDecoder):
def __init__(self, *args, **kwargs):
self.basepk = kwargs.get('basepk', -1)
self.owner = kwargs.get('owner', 'admin')
self.datastore = kwargs.get('datastore', '')
self.siteurl = kwargs.get('siteurl', '')
super(DefaultMangler, self).__init__(*args)
def default(self, obj):
# Let the base class default method raise the TypeError
return json.JSONEncoder.default(self, obj)
def decode(self, json_string):
"""
json_string is basicly string that you give to json.loads method
"""
default_obj = super(DefaultMangler, self).decode(json_string)
# manipulate your object any way you want
# ....
for obj in default_obj:
obj['pk'] = obj['pk'] + self.basepk
return default_obj
class ResourceBaseMangler(DefaultMangler):
def default(self, obj):
# Let the base class default method raise the TypeError
return json.JSONEncoder.default(self, obj)
def decode(self, json_string):
"""
json_string is basicly string that you give to json.loads method
"""
default_obj = super(ResourceBaseMangler, self).decode(json_string)
# manipulate your object any way you want
# ....
upload_sessions = []
for obj in default_obj:
obj['pk'] = obj['pk'] + self.basepk
obj['fields']['owner'] = [self.owner]
if 'distribution_url' in obj['fields']:
if not obj['fields']['distribution_url'] is None and 'layers' in obj['fields']['distribution_url']:
try:
p = '(?P<protocol>http.*://)?(?P<host>[^:/ ]+).?(?P<port>[0-9]*)(?P<details_url>.*)'
m = re.search(p, obj['fields']['distribution_url'])
if 'http' in m.group('protocol'):
obj['fields']['detail_url'] = self.siteurl + m.group('details_url')
else:
obj['fields']['detail_url'] = self.siteurl + obj['fields']['distribution_url']
except:
obj['fields']['detail_url'] = obj['fields']['distribution_url']
upload_sessions.append(self.add_upload_session(obj['pk'], obj['fields']['owner']))
default_obj.extend(upload_sessions)
return default_obj
def add_upload_session(self, pk, owner):
obj = dict()
obj['pk'] = pk
obj['model'] = 'layers.uploadsession'
obj['fields'] = dict()
obj['fields']['user'] = owner
obj['fields']['traceback'] = None
obj['fields']['context'] = None
obj['fields']['error'] = None
obj['fields']['processed'] = True
obj['fields']['date'] = datetime.datetime.now().strftime("%Y-%m-%dT%H:%M:%S")
return obj
class LayerMangler(DefaultMangler):
def default(self, obj):
# Let the base class default method raise the TypeError
return json.JSONEncoder.default(self, obj)
def decode(self, json_string):
"""
json_string is basicly string that you give to json.loads method
"""
default_obj = super(LayerMangler, self).decode(json_string)
# manipulate your object any way you want
# ....
for obj in default_obj:
obj['pk'] = obj['pk'] + self.basepk
obj['fields']['upload_session'] = obj['pk']
obj['fields']['service'] = None
if self.datastore:
obj['fields']['store'] = self.datastore
else:
obj['fields']['store'] = obj['fields']['name']
return default_obj
class LayerAttributesMangler(DefaultMangler):
def default(self, obj):
# Let the base class default method raise the TypeError
return json.JSONEncoder.default(self, obj)
def decode(self, json_string):
"""
json_string is basicly string that you give to json.loads method
"""
default_obj = super(LayerAttributesMangler, self).decode(json_string)
# manipulate your object any way you want
# ....
for obj in default_obj:
obj['pk'] = obj['pk'] + self.basepk
obj['fields']['layer'] = obj['fields']['layer'] + self.basepk
return default_obj
class MapLayersMangler(DefaultMangler):
def default(self, obj):
# Let the base class default method raise the TypeError
return json.JSONEncoder.default(self, obj)
def decode(self, json_string):
"""
json_string is basicly string that you give to json.loads method
"""
default_obj = super(MapLayersMangler, self).decode(json_string)
# manipulate your object any way you want
# ....
for obj in default_obj:
obj['pk'] = obj['pk'] + self.basepk
obj['fields']['map'] = obj['fields']['map'] + self.basepk
return default_obj
|
sencha/chromium-spacewalk
|
refs/heads/master
|
tools/cr/cr/actions/action.py
|
56
|
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""A module for the Action plugin base class."""
import cr
class Action(cr.Plugin):
"""Base class for cr actions.
This provides the standard interface used to add actions to commands,
including support for selecting the right implementation of an action and
handling command line arguments for the action.
"""
@classmethod
def AddArguments(cls, command, parser):
cls.AddSelectorArg(command, parser)
@classmethod
def AddSelectorArg(cls, command, parser):
parser.add_argument(
cls.SELECTOR_ARG, dest=cls.SELECTOR,
choices=cls.Choices(),
default=None,
help=cls.SELECTOR_HELP + 'Overrides ' + cls.SELECTOR
)
@cr.Plugin.activemethod
def Skipping(self):
"""A method that is used to detect void or skip implementations.
Most actions have a skip version that you can select to indicate that you
want to not perform the action at all.
It is important that commands can detect this so they can modify the action
sequence if there are other changes that depend on it (for instance not
performing actions that were only there to produce the inputs of an action
that is being skipped).
Returns:
True if this implementation is a skip action.
"""
return self.name == 'skip'
|
jammerful/buildbot
|
refs/heads/master
|
master/buildbot/test/unit/test_test_util_gpo.py
|
7
|
# This file is part of Buildbot. Buildbot is free software: you can
# redistribute it and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation, version 2.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Copyright Buildbot Team Members
from __future__ import absolute_import
from __future__ import print_function
import sys
import twisted
from twisted.internet import utils
from twisted.trial import reporter
from twisted.trial import unittest
from buildbot.test.util.gpo import Expect
from buildbot.test.util.gpo import GetProcessOutputMixin
class TestGPOMixin(unittest.TestCase):
# these tests use self.patch, but the SkipTest exception gets eaten, so
# explicitly skip things here.
if twisted.version.major <= 9 and sys.version_info[:2] == (2, 7):
skip = "unittest.TestCase.patch is not available"
def runTestMethod(self, method):
class TestCase(GetProcessOutputMixin, unittest.TestCase):
def setUp(self):
self.setUpGetProcessOutput()
def runTest(self):
return method(self)
self.testcase = TestCase()
result = reporter.TestResult()
self.testcase.run(result) # This blocks
return result
def assertTestFailure(self, result, expectedFailure):
self.assertEqual(result.errors, [])
self.assertEqual(len(result.failures), 1)
self.assertTrue(result.failures[0][1].check(unittest.FailTest))
if expectedFailure:
self.assertSubstring(
expectedFailure, result.failures[0][1].getErrorMessage())
def assertSuccessful(self, result):
if not result.wasSuccessful():
output = 'expected success'
if result.failures:
output += ('\ntest failed: %s' %
result.failures[0][1].getErrorMessage())
if result.errors:
output += ('\nerrors: %s' %
map(lambda x: x[1].value, result.errors))
raise self.failureException(output)
self.assertTrue(result.wasSuccessful())
def test_patch(self):
original_getProcessOutput = utils.getProcessOutput
original_getProcessOutputAndValue = utils.getProcessOutputAndValue
def method(testcase):
testcase.expectCommands()
self.assertEqual(utils.getProcessOutput,
testcase.patched_getProcessOutput)
self.assertEqual(utils.getProcessOutputAndValue,
testcase.patched_getProcessOutputAndValue)
result = self.runTestMethod(method)
self.assertSuccessful(result)
self.assertEqual(utils.getProcessOutput,
original_getProcessOutput)
self.assertEqual(utils.getProcessOutputAndValue,
original_getProcessOutputAndValue)
def test_methodChaining(self):
expect = Expect('command')
self.assertEqual(expect, expect.exit(0))
self.assertEqual(expect, expect.stdout("output"))
self.assertEqual(expect, expect.stderr("error"))
def test_gpo_oneCommand(self):
def method(testcase):
testcase.expectCommands(Expect("command"))
d = utils.getProcessOutput("command", ())
d.addCallback(self.assertEqual, '')
d.addCallback(lambda _: testcase.assertAllCommandsRan())
return d
result = self.runTestMethod(method)
self.assertSuccessful(result)
def test_gpo_expectTwo_runOne(self):
def method(testcase):
testcase.expectCommands(Expect("command"))
testcase.expectCommands(Expect("command2"))
d = utils.getProcessOutput("command", ())
d.addCallback(self.assertEqual, '')
d.addCallback(lambda _: testcase.assertAllCommandsRan())
return d
result = self.runTestMethod(method)
self.assertTestFailure(result, "assert all expected commands were run")
def test_gpo_wrongCommand(self):
def method(testcase):
testcase.expectCommands(Expect("command2"))
d = utils.getProcessOutput("command", ())
return d
result = self.runTestMethod(method)
self.assertTestFailure(result, "unexpected command run")
# assert we have a meaningful message
self.assertTestFailure(result, "command2")
def test_gpo_wrongArgs(self):
def method(testcase):
testcase.expectCommands(Expect("command", "arg"))
d = utils.getProcessOutput("command", ("otherarg",))
d.addCallback(lambda _: testcase.assertAllCommandsRan())
return d
result = self.runTestMethod(method)
self.assertTestFailure(result, "unexpected command run")
def test_gpo_missingPath(self):
def method(testcase):
testcase.expectCommands(Expect("command", "arg").path("/home"))
d = utils.getProcessOutput("command", ("otherarg",))
d.addCallback(lambda _: testcase.assertAllCommandsRan())
return d
result = self.runTestMethod(method)
self.assertTestFailure(result, "unexpected command run")
def test_gpo_wrongPath(self):
def method(testcase):
testcase.expectCommands(Expect("command", "arg").path("/home"))
d = utils.getProcessOutput("command", ("otherarg",), path="/work")
d.addCallback(lambda _: testcase.assertAllCommandsRan())
return d
result = self.runTestMethod(method)
self.assertTestFailure(result, "unexpected command run")
def test_gpo_notCurrentPath(self):
def method(testcase):
testcase.expectCommands(Expect("command", "arg"))
d = utils.getProcessOutput("command", ("otherarg",), path="/work")
d.addCallback(lambda _: testcase.assertAllCommandsRan())
return d
result = self.runTestMethod(method)
self.assertTestFailure(result, "unexpected command run")
def test_gpo_errorOutput(self):
def method(testcase):
testcase.expectCommands(Expect("command").stderr("some test"))
d = testcase.assertFailure(
utils.getProcessOutput("command", ()), [IOError])
return d
result = self.runTestMethod(method)
self.assertTestFailure(result, "got stderr: 'some test'")
def test_gpo_errorOutput_errtoo(self):
def method(testcase):
testcase.expectCommands(Expect("command").stderr("some test"))
d = utils.getProcessOutput("command", (), errortoo=True)
d.addCallback(testcase.assertEqual, "some test")
return d
result = self.runTestMethod(method)
self.assertSuccessful(result)
def test_gpo_exitIgnored(self):
def method(testcase):
testcase.expectCommands(Expect("command").exit(1))
d = utils.getProcessOutput("command", ())
d.addCallback(self.assertEqual, '')
return d
result = self.runTestMethod(method)
self.assertSuccessful(result)
def test_gpo_output(self):
def method(testcase):
testcase.expectCommands(Expect("command").stdout("stdout"))
d = utils.getProcessOutput("command", ())
d.addCallback(testcase.assertEqual, "stdout")
return d
result = self.runTestMethod(method)
self.assertSuccessful(result)
def test_gpo_outputAndError(self):
def method(testcase):
testcase.expectCommands(
Expect("command").stdout("stdout").stderr("stderr"))
d = utils.getProcessOutput("command", (), errortoo=True)
@d.addCallback
def cb(res):
testcase.assertSubstring("stdout", res)
testcase.assertSubstring("stderr", res)
return d
result = self.runTestMethod(method)
self.assertSuccessful(result)
def test_gpo_environ_success(self):
def method(testcase):
testcase.expectCommands(Expect("command"))
testcase.addGetProcessOutputExpectEnv({'key': 'value'})
d = utils.getProcessOutput("command", (), env={'key': 'value'})
d.addCallback(self.assertEqual, '')
d.addCallback(lambda _: testcase.assertAllCommandsRan())
return d
result = self.runTestMethod(method)
self.assertSuccessful(result)
def test_gpo_environ_wrongValue(self):
def method(testcase):
testcase.expectCommands(Expect("command"))
testcase.addGetProcessOutputExpectEnv({'key': 'value'})
d = utils.getProcessOutput(
"command", (), env={'key': 'wrongvalue'})
return d
result = self.runTestMethod(method)
self.assertTestFailure(
result, "Expected environment to have key = 'value'")
def test_gpo_environ_missing(self):
def method(testcase):
testcase.expectCommands(Expect("command"))
testcase.addGetProcessOutputExpectEnv({'key': 'value'})
d = utils.getProcessOutput("command", ())
return d
result = self.runTestMethod(method)
self.assertTestFailure(
result, "Expected environment to have key = 'value'")
def test_gpoav_oneCommand(self):
def method(testcase):
testcase.expectCommands(Expect("command"))
d = utils.getProcessOutputAndValue("command", ())
d.addCallback(self.assertEqual, ('', '', 0))
d.addCallback(lambda _: testcase.assertAllCommandsRan())
return d
result = self.runTestMethod(method)
self.assertSuccessful(result)
def test_gpoav_expectTwo_runOne(self):
def method(testcase):
testcase.expectCommands(Expect("command"))
testcase.expectCommands(Expect("command2"))
d = utils.getProcessOutputAndValue("command", ())
d.addCallback(self.assertEqual, ('', '', 0))
d.addCallback(lambda _: testcase.assertAllCommandsRan())
return d
result = self.runTestMethod(method)
self.assertTestFailure(result, "assert all expected commands were run")
def test_gpoav_wrongCommand(self):
def method(testcase):
testcase.expectCommands(Expect("command2"))
d = utils.getProcessOutputAndValue("command", ())
d.addCallback(lambda _: testcase.assertAllCommandsRan())
return d
result = self.runTestMethod(method)
self.assertTestFailure(result, "unexpected command run")
def test_gpoav_wrongArgs(self):
def method(testcase):
testcase.expectCommands(Expect("command", "arg"))
d = utils.getProcessOutputAndValue("command", ("otherarg",))
d.addCallback(lambda _: testcase.assertAllCommandsRan())
return d
result = self.runTestMethod(method)
self.assertTestFailure(result, "unexpected command run")
def test_gpoav_missingPath(self):
def method(testcase):
testcase.expectCommands(Expect("command", "arg").path("/home"))
d = utils.getProcessOutputAndValue("command", ("otherarg",))
d.addCallback(lambda _: testcase.assertAllCommandsRan())
return d
result = self.runTestMethod(method)
self.assertTestFailure(result, "unexpected command run")
def test_gpoav_wrongPath(self):
def method(testcase):
testcase.expectCommands(Expect("command", "arg").path("/home"))
d = utils.getProcessOutputAndValue(
"command", ("otherarg",), path="/work")
d.addCallback(lambda _: testcase.assertAllCommandsRan())
return d
result = self.runTestMethod(method)
self.assertTestFailure(result, "unexpected command run")
def test_gpoav_notCurrentPath(self):
def method(testcase):
testcase.expectCommands(Expect("command", "arg"))
d = utils.getProcessOutputAndValue(
"command", ("otherarg",), path="/work")
d.addCallback(lambda _: testcase.assertAllCommandsRan())
return d
result = self.runTestMethod(method)
self.assertTestFailure(result, "unexpected command run")
def test_gpoav_errorOutput(self):
def method(testcase):
testcase.expectCommands(Expect("command").stderr("some test"))
d = utils.getProcessOutputAndValue("command", ())
d.addCallback(self.assertEqual, ('', 'some test', 0))
return d
result = self.runTestMethod(method)
self.assertSuccessful(result)
def test_gpoav_exit(self):
def method(testcase):
testcase.expectCommands(Expect("command").exit(1))
d = utils.getProcessOutputAndValue("command", ())
d.addCallback(self.assertEqual, ('', '', 1))
return d
result = self.runTestMethod(method)
self.assertSuccessful(result)
def test_gpoav_output(self):
def method(testcase):
testcase.expectCommands(Expect("command").stdout("stdout"))
d = utils.getProcessOutputAndValue("command", ())
d.addCallback(testcase.assertEqual, ("stdout", '', 0))
return d
result = self.runTestMethod(method)
self.assertSuccessful(result)
def test_gpoav_outputAndError(self):
def method(testcase):
testcase.expectCommands(
Expect("command").stdout("stdout").stderr("stderr"))
d = utils.getProcessOutputAndValue("command", ())
d.addCallback(testcase.assertEqual, ("stdout", 'stderr', 0))
return d
result = self.runTestMethod(method)
self.assertSuccessful(result)
|
vietpn/ghost_nodejs
|
refs/heads/master
|
node_modules/grunt-docker/node_modules/docker/node_modules/pygmentize-bundled/vendor/pygments/build-3.3/pygments/formatters/terminal.py
|
363
|
# -*- coding: utf-8 -*-
"""
pygments.formatters.terminal
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Formatter for terminal output with ANSI sequences.
:copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import sys
from pygments.formatter import Formatter
from pygments.token import Keyword, Name, Comment, String, Error, \
Number, Operator, Generic, Token, Whitespace
from pygments.console import ansiformat
from pygments.util import get_choice_opt
__all__ = ['TerminalFormatter']
#: Map token types to a tuple of color values for light and dark
#: backgrounds.
TERMINAL_COLORS = {
Token: ('', ''),
Whitespace: ('lightgray', 'darkgray'),
Comment: ('lightgray', 'darkgray'),
Comment.Preproc: ('teal', 'turquoise'),
Keyword: ('darkblue', 'blue'),
Keyword.Type: ('teal', 'turquoise'),
Operator.Word: ('purple', 'fuchsia'),
Name.Builtin: ('teal', 'turquoise'),
Name.Function: ('darkgreen', 'green'),
Name.Namespace: ('_teal_', '_turquoise_'),
Name.Class: ('_darkgreen_', '_green_'),
Name.Exception: ('teal', 'turquoise'),
Name.Decorator: ('darkgray', 'lightgray'),
Name.Variable: ('darkred', 'red'),
Name.Constant: ('darkred', 'red'),
Name.Attribute: ('teal', 'turquoise'),
Name.Tag: ('blue', 'blue'),
String: ('brown', 'brown'),
Number: ('darkblue', 'blue'),
Generic.Deleted: ('red', 'red'),
Generic.Inserted: ('darkgreen', 'green'),
Generic.Heading: ('**', '**'),
Generic.Subheading: ('*purple*', '*fuchsia*'),
Generic.Error: ('red', 'red'),
Error: ('_red_', '_red_'),
}
class TerminalFormatter(Formatter):
r"""
Format tokens with ANSI color sequences, for output in a text console.
Color sequences are terminated at newlines, so that paging the output
works correctly.
The `get_style_defs()` method doesn't do anything special since there is
no support for common styles.
Options accepted:
`bg`
Set to ``"light"`` or ``"dark"`` depending on the terminal's background
(default: ``"light"``).
`colorscheme`
A dictionary mapping token types to (lightbg, darkbg) color names or
``None`` (default: ``None`` = use builtin colorscheme).
"""
name = 'Terminal'
aliases = ['terminal', 'console']
filenames = []
def __init__(self, **options):
Formatter.__init__(self, **options)
self.darkbg = get_choice_opt(options, 'bg',
['light', 'dark'], 'light') == 'dark'
self.colorscheme = options.get('colorscheme', None) or TERMINAL_COLORS
def format(self, tokensource, outfile):
# hack: if the output is a terminal and has an encoding set,
# use that to avoid unicode encode problems
if not self.encoding and hasattr(outfile, "encoding") and \
hasattr(outfile, "isatty") and outfile.isatty() and \
sys.version_info < (3,):
self.encoding = outfile.encoding
return Formatter.format(self, tokensource, outfile)
def format_unencoded(self, tokensource, outfile):
for ttype, value in tokensource:
color = self.colorscheme.get(ttype)
while color is None:
ttype = ttype[:-1]
color = self.colorscheme.get(ttype)
if color:
color = color[self.darkbg]
spl = value.split('\n')
for line in spl[:-1]:
if line:
outfile.write(ansiformat(color, line))
outfile.write('\n')
if spl[-1]:
outfile.write(ansiformat(color, spl[-1]))
else:
outfile.write(value)
|
ric2b/Vivaldi-browser
|
refs/heads/master
|
chromium/build/android/pylib/results/flakiness_dashboard/results_uploader.py
|
20
|
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Uploads the results to the flakiness dashboard server."""
# pylint: disable=E1002,R0201
import logging
import os
import shutil
import tempfile
import xml
from devil.utils import cmd_helper
from pylib.constants import host_paths
from pylib.results.flakiness_dashboard import json_results_generator
from pylib.utils import repo_utils
class JSONResultsGenerator(json_results_generator.JSONResultsGeneratorBase):
"""Writes test results to a JSON file and handles uploading that file to
the test results server.
"""
def __init__(self, builder_name, build_name, build_number, tmp_folder,
test_results_map, test_results_server, test_type, master_name):
super(JSONResultsGenerator, self).__init__(
builder_name=builder_name,
build_name=build_name,
build_number=build_number,
results_file_base_path=tmp_folder,
builder_base_url=None,
test_results_map=test_results_map,
svn_repositories=(('webkit', 'third_party/WebKit'),
('chrome', '.')),
test_results_server=test_results_server,
test_type=test_type,
master_name=master_name)
#override
def _GetModifierChar(self, test_name):
if test_name not in self._test_results_map:
return self.__class__.NO_DATA_RESULT
return self._test_results_map[test_name].modifier
#override
def _GetSVNRevision(self, in_directory):
"""Returns the git/svn revision for the given directory.
Args:
in_directory: The directory relative to src.
"""
def _is_git_directory(in_directory):
"""Returns true if the given directory is in a git repository.
Args:
in_directory: The directory path to be tested.
"""
if os.path.exists(os.path.join(in_directory, '.git')):
return True
parent = os.path.dirname(in_directory)
if parent == host_paths.DIR_SOURCE_ROOT or parent == in_directory:
return False
return _is_git_directory(parent)
in_directory = os.path.join(host_paths.DIR_SOURCE_ROOT, in_directory)
if not os.path.exists(os.path.join(in_directory, '.svn')):
if _is_git_directory(in_directory):
return repo_utils.GetGitHeadSHA1(in_directory)
else:
return ''
output = cmd_helper.GetCmdOutput(['svn', 'info', '--xml'], cwd=in_directory)
try:
dom = xml.dom.minidom.parseString(output)
return dom.getElementsByTagName('entry')[0].getAttribute('revision')
except xml.parsers.expat.ExpatError:
return ''
return ''
class ResultsUploader(object):
"""Handles uploading buildbot tests results to the flakiness dashboard."""
def __init__(self, tests_type):
self._build_number = os.environ.get('BUILDBOT_BUILDNUMBER')
self._master_name = os.environ.get('BUILDBOT_MASTERNAME')
self._builder_name = os.environ.get('BUILDBOT_BUILDERNAME')
self._tests_type = tests_type
self._build_name = None
if not self._build_number or not self._builder_name:
raise Exception('You should not be uploading tests results to the server'
'from your local machine.')
upstream = (tests_type != 'Chromium_Android_Instrumentation')
if not upstream:
self._build_name = 'chromium-android'
buildbot_branch = os.environ.get('BUILDBOT_BRANCH')
if not buildbot_branch:
buildbot_branch = 'master'
else:
# Ensure there's no leading "origin/"
buildbot_branch = buildbot_branch[buildbot_branch.find('/') + 1:]
self._master_name = '%s-%s' % (self._build_name, buildbot_branch)
self._test_results_map = {}
def AddResults(self, test_results):
# TODO(frankf): Differentiate between fail/crash/timeouts.
conversion_map = [
(test_results.GetPass(), False,
json_results_generator.JSONResultsGeneratorBase.PASS_RESULT),
(test_results.GetFail(), True,
json_results_generator.JSONResultsGeneratorBase.FAIL_RESULT),
(test_results.GetCrash(), True,
json_results_generator.JSONResultsGeneratorBase.FAIL_RESULT),
(test_results.GetTimeout(), True,
json_results_generator.JSONResultsGeneratorBase.FAIL_RESULT),
(test_results.GetUnknown(), True,
json_results_generator.JSONResultsGeneratorBase.NO_DATA_RESULT),
]
for results_list, failed, modifier in conversion_map:
for single_test_result in results_list:
test_result = json_results_generator.TestResult(
test=single_test_result.GetName(),
failed=failed,
elapsed_time=single_test_result.GetDuration() / 1000)
# The WebKit TestResult object sets the modifier it based on test name.
# Since we don't use the same test naming convention as WebKit the
# modifier will be wrong, so we need to overwrite it.
test_result.modifier = modifier
self._test_results_map[single_test_result.GetName()] = test_result
def Upload(self, test_results_server):
if not self._test_results_map:
return
tmp_folder = tempfile.mkdtemp()
try:
results_generator = JSONResultsGenerator(
builder_name=self._builder_name,
build_name=self._build_name,
build_number=self._build_number,
tmp_folder=tmp_folder,
test_results_map=self._test_results_map,
test_results_server=test_results_server,
test_type=self._tests_type,
master_name=self._master_name)
json_files = ["incremental_results.json", "times_ms.json"]
results_generator.GenerateJSONOutput()
results_generator.GenerateTimesMSFile()
results_generator.UploadJSONFiles(json_files)
except Exception as e: # pylint: disable=broad-except
logging.error("Uploading results to test server failed: %s.", e)
finally:
shutil.rmtree(tmp_folder)
def Upload(results, flakiness_dashboard_server, test_type):
"""Reports test results to the flakiness dashboard for Chrome for Android.
Args:
results: test results.
flakiness_dashboard_server: the server to upload the results to.
test_type: the type of the tests (as displayed by the flakiness dashboard).
"""
uploader = ResultsUploader(test_type)
uploader.AddResults(results)
uploader.Upload(flakiness_dashboard_server)
|
ceos-seo/Data_Cube_v2
|
refs/heads/master
|
agdc-v2/examples/ndexpr/ndvi_mask.py
|
1
|
# ------------------------------------------------------------------------------
# Name: ndvi_mask.py
# Purpose: ndvi mask example for ndexpr
# pre-integration into Analytics Engine & Execution Engine.
# post-integration with Data Access API.
#
# Author: Peter Wang
#
# Created: 22 December 2015
# Copyright: 2015 Commonwealth Scientific and Industrial Research Organisation
# (CSIRO)
# License: This software is open source under the Apache v2.0 License
# as provided in the accompanying LICENSE file or available from
# https://github.com/data-cube/agdc-v2/blob/master/LICENSE
# By continuing, you acknowledge that you have read and you accept
# and will abide by the terms of the License.
#
# ------------------------------------------------------------------------------
from __future__ import absolute_import
from __future__ import print_function
if __name__ == '__main__' and __package__ is None:
from os import sys, path
sys.path.append(path.dirname(path.dirname(path.dirname(path.abspath(__file__)))))
from datetime import datetime
import xarray as xr
from datacube.api import API
from datacube.ndexpr import NDexpr
def main():
print('Instantiating API and NDexpr')
g = API()
nd = NDexpr()
print('Retrieving data from API')
# construct data request parameters for band_30 and band_40
data_request_descriptor = {
'platform': 'LANDSAT_5',
'product': 'nbar',
'variables': ('red', 'nir'),
'dimensions': {
'longitude': {
'range': (149.07, 149.18)
},
'latitude': {
'range': (-35.32, -35.28)
},
'time': {
'range': (datetime(1990, 1, 1), datetime(1990, 12, 31))
}
}
}
# get data
d1 = g.get_data(data_request_descriptor)
# construct data request parameters for PQ
pq_request_descriptor = {
'platform': 'LANDSAT_5',
'product': 'pqa',
'variables': ('pixelquality'),
'dimensions': {
'longitude': {
'range': (149.07, 149.18)
},
'latitude': {
'range': (-35.32, -35.28)
},
'time': {
'range': (datetime(1990, 1, 1), datetime(1990, 12, 31))
}
}
}
# get data
d2 = g.get_data(pq_request_descriptor)
# The following 3 lines shouldn't be done like this
# Currently done like this for the sake of the example.
b30 = d1['arrays']['red']
b40 = d1['arrays']['nir']
pq = d2['arrays']['pixelquality']
print('NDexpr demo begins here')
# perform ndvi as expressed in this language.
ndvi = nd.evaluate('((b40 - b30) / (b40 + b30))')
# perform mask on ndvi as expressed in this language.
masked_ndvi = nd.evaluate('ndvi{(pq == 32767) | (pq == 16383) | (pq == 2457)}')
print(masked_ndvi.values)
if __name__ == '__main__':
main()
|
alexm92/sentry
|
refs/heads/master
|
src/sentry/celery.py
|
5
|
from __future__ import absolute_import
from django.conf import settings
from celery import Celery
from celery.app.task import Task
from sentry.utils import metrics
DB_SHARED_THREAD = """\
DatabaseWrapper objects created in a thread can only \
be used in that same thread. The object with alias '%s' \
was created in thread id %s and this is thread id %s.\
"""
def patch_thread_ident():
# monkey patch django.
# This patch make sure that we use real threads to get the ident which
# is going to happen if we are using gevent or eventlet.
# -- patch taken from gunicorn
if getattr(patch_thread_ident, 'called', False):
return
try:
from django.db.backends import BaseDatabaseWrapper, DatabaseError
if 'validate_thread_sharing' in BaseDatabaseWrapper.__dict__:
from six.moves import _thread as thread
_get_ident = thread.get_ident
__old__init__ = BaseDatabaseWrapper.__init__
def _init(self, *args, **kwargs):
__old__init__(self, *args, **kwargs)
self._thread_ident = _get_ident()
def _validate_thread_sharing(self):
if (not self.allow_thread_sharing
and self._thread_ident != _get_ident()):
raise DatabaseError(
DB_SHARED_THREAD % (
self.alias, self._thread_ident, _get_ident()),
)
BaseDatabaseWrapper.__init__ = _init
BaseDatabaseWrapper.validate_thread_sharing = \
_validate_thread_sharing
patch_thread_ident.called = True
except ImportError:
pass
patch_thread_ident()
class SentryTask(Task):
def apply_async(self, *args, **kwargs):
with metrics.timer('jobs.delay', instance=self.name):
return Task.apply_async(self, *args, **kwargs)
class SentryCelery(Celery):
task_cls = SentryTask
def on_configure(self):
from raven.contrib.django.models import client
from raven.contrib.celery import register_signal, register_logger_signal
# register a custom filter to filter out duplicate logs
register_logger_signal(client)
# hook into the Celery error handler
register_signal(client)
app = SentryCelery('sentry')
app.config_from_object(settings)
app.autodiscover_tasks(lambda: settings.INSTALLED_APPS)
|
yourcelf/btb
|
refs/heads/master
|
scanblog/subscriptions/migrations/0002_auto_20150429_1721.py
|
2
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('subscriptions', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='notificationblacklist',
name='email',
field=models.EmailField(max_length=254),
),
]
|
2014c2g14/2014c2
|
refs/heads/master
|
w2/static/Brython2.0.0-20140209-164925/Lib/xml/etree/ElementInclude.py
|
784
|
#
# ElementTree
# $Id: ElementInclude.py 3375 2008-02-13 08:05:08Z fredrik $
#
# limited xinclude support for element trees
#
# history:
# 2003-08-15 fl created
# 2003-11-14 fl fixed default loader
#
# Copyright (c) 2003-2004 by Fredrik Lundh. All rights reserved.
#
# fredrik@pythonware.com
# http://www.pythonware.com
#
# --------------------------------------------------------------------
# The ElementTree toolkit is
#
# Copyright (c) 1999-2008 by Fredrik Lundh
#
# By obtaining, using, and/or copying this software and/or its
# associated documentation, you agree that you have read, understood,
# and will comply with the following terms and conditions:
#
# Permission to use, copy, modify, and distribute this software and
# its associated documentation for any purpose and without fee is
# hereby granted, provided that the above copyright notice appears in
# all copies, and that both that copyright notice and this permission
# notice appear in supporting documentation, and that the name of
# Secret Labs AB or the author not be used in advertising or publicity
# pertaining to distribution of the software without specific, written
# prior permission.
#
# SECRET LABS AB AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH REGARD
# TO THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANT-
# ABILITY AND FITNESS. IN NO EVENT SHALL SECRET LABS AB OR THE AUTHOR
# BE LIABLE FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY
# DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS,
# WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS
# ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE
# OF THIS SOFTWARE.
# --------------------------------------------------------------------
# Licensed to PSF under a Contributor Agreement.
# See http://www.python.org/psf/license for licensing details.
##
# Limited XInclude support for the ElementTree package.
##
import copy
from . import ElementTree
XINCLUDE = "{http://www.w3.org/2001/XInclude}"
XINCLUDE_INCLUDE = XINCLUDE + "include"
XINCLUDE_FALLBACK = XINCLUDE + "fallback"
##
# Fatal include error.
class FatalIncludeError(SyntaxError):
pass
##
# Default loader. This loader reads an included resource from disk.
#
# @param href Resource reference.
# @param parse Parse mode. Either "xml" or "text".
# @param encoding Optional text encoding (UTF-8 by default for "text").
# @return The expanded resource. If the parse mode is "xml", this
# is an ElementTree instance. If the parse mode is "text", this
# is a Unicode string. If the loader fails, it can return None
# or raise an IOError exception.
# @throws IOError If the loader fails to load the resource.
def default_loader(href, parse, encoding=None):
if parse == "xml":
file = open(href, 'rb')
data = ElementTree.parse(file).getroot()
else:
if not encoding:
encoding = 'UTF-8'
file = open(href, 'r', encoding=encoding)
data = file.read()
file.close()
return data
##
# Expand XInclude directives.
#
# @param elem Root element.
# @param loader Optional resource loader. If omitted, it defaults
# to {@link default_loader}. If given, it should be a callable
# that implements the same interface as <b>default_loader</b>.
# @throws FatalIncludeError If the function fails to include a given
# resource, or if the tree contains malformed XInclude elements.
# @throws IOError If the function fails to load a given resource.
def include(elem, loader=None):
if loader is None:
loader = default_loader
# look for xinclude elements
i = 0
while i < len(elem):
e = elem[i]
if e.tag == XINCLUDE_INCLUDE:
# process xinclude directive
href = e.get("href")
parse = e.get("parse", "xml")
if parse == "xml":
node = loader(href, parse)
if node is None:
raise FatalIncludeError(
"cannot load %r as %r" % (href, parse)
)
node = copy.copy(node)
if e.tail:
node.tail = (node.tail or "") + e.tail
elem[i] = node
elif parse == "text":
text = loader(href, parse, e.get("encoding"))
if text is None:
raise FatalIncludeError(
"cannot load %r as %r" % (href, parse)
)
if i:
node = elem[i-1]
node.tail = (node.tail or "") + text + (e.tail or "")
else:
elem.text = (elem.text or "") + text + (e.tail or "")
del elem[i]
continue
else:
raise FatalIncludeError(
"unknown parse type in xi:include tag (%r)" % parse
)
elif e.tag == XINCLUDE_FALLBACK:
raise FatalIncludeError(
"xi:fallback tag must be child of xi:include (%r)" % e.tag
)
else:
include(e, loader)
i = i + 1
|
efortuna/AndroidSDKClone
|
refs/heads/master
|
ndk/prebuilt/linux-x86_64/lib/python2.7/test/test_pipes.py
|
134
|
import pipes
import os
import string
import unittest
from test.test_support import TESTFN, run_unittest, unlink, reap_children
if os.name != 'posix':
raise unittest.SkipTest('pipes module only works on posix')
TESTFN2 = TESTFN + "2"
# tr a-z A-Z is not portable, so make the ranges explicit
s_command = 'tr %s %s' % (string.ascii_lowercase, string.ascii_uppercase)
class SimplePipeTests(unittest.TestCase):
def tearDown(self):
for f in (TESTFN, TESTFN2):
unlink(f)
def testSimplePipe1(self):
t = pipes.Template()
t.append(s_command, pipes.STDIN_STDOUT)
f = t.open(TESTFN, 'w')
f.write('hello world #1')
f.close()
with open(TESTFN) as f:
self.assertEqual(f.read(), 'HELLO WORLD #1')
def testSimplePipe2(self):
with open(TESTFN, 'w') as f:
f.write('hello world #2')
t = pipes.Template()
t.append(s_command + ' < $IN > $OUT', pipes.FILEIN_FILEOUT)
t.copy(TESTFN, TESTFN2)
with open(TESTFN2) as f:
self.assertEqual(f.read(), 'HELLO WORLD #2')
def testSimplePipe3(self):
with open(TESTFN, 'w') as f:
f.write('hello world #2')
t = pipes.Template()
t.append(s_command + ' < $IN', pipes.FILEIN_STDOUT)
with t.open(TESTFN, 'r') as f:
self.assertEqual(f.read(), 'HELLO WORLD #2')
def testEmptyPipeline1(self):
# copy through empty pipe
d = 'empty pipeline test COPY'
with open(TESTFN, 'w') as f:
f.write(d)
with open(TESTFN2, 'w') as f:
f.write('')
t=pipes.Template()
t.copy(TESTFN, TESTFN2)
with open(TESTFN2) as f:
self.assertEqual(f.read(), d)
def testEmptyPipeline2(self):
# read through empty pipe
d = 'empty pipeline test READ'
with open(TESTFN, 'w') as f:
f.write(d)
t=pipes.Template()
with t.open(TESTFN, 'r') as f:
self.assertEqual(f.read(), d)
def testEmptyPipeline3(self):
# write through empty pipe
d = 'empty pipeline test WRITE'
t = pipes.Template()
with t.open(TESTFN, 'w') as f:
f.write(d)
with open(TESTFN) as f:
self.assertEqual(f.read(), d)
def testQuoting(self):
safeunquoted = string.ascii_letters + string.digits + '@%_-+=:,./'
unsafe = '"`$\\!'
self.assertEqual(pipes.quote(''), "''")
self.assertEqual(pipes.quote(safeunquoted), safeunquoted)
self.assertEqual(pipes.quote('test file name'), "'test file name'")
for u in unsafe:
self.assertEqual(pipes.quote('test%sname' % u),
"'test%sname'" % u)
for u in unsafe:
self.assertEqual(pipes.quote("test%s'name'" % u),
"'test%s'\"'\"'name'\"'\"''" % u)
def testRepr(self):
t = pipes.Template()
self.assertEqual(repr(t), "<Template instance, steps=[]>")
t.append('tr a-z A-Z', pipes.STDIN_STDOUT)
self.assertEqual(repr(t),
"<Template instance, steps=[('tr a-z A-Z', '--')]>")
def testSetDebug(self):
t = pipes.Template()
t.debug(False)
self.assertEqual(t.debugging, False)
t.debug(True)
self.assertEqual(t.debugging, True)
def testReadOpenSink(self):
# check calling open('r') on a pipe ending with
# a sink raises ValueError
t = pipes.Template()
t.append('boguscmd', pipes.SINK)
self.assertRaises(ValueError, t.open, 'bogusfile', 'r')
def testWriteOpenSource(self):
# check calling open('w') on a pipe ending with
# a source raises ValueError
t = pipes.Template()
t.prepend('boguscmd', pipes.SOURCE)
self.assertRaises(ValueError, t.open, 'bogusfile', 'w')
def testBadAppendOptions(self):
t = pipes.Template()
# try a non-string command
self.assertRaises(TypeError, t.append, 7, pipes.STDIN_STDOUT)
# try a type that isn't recognized
self.assertRaises(ValueError, t.append, 'boguscmd', 'xx')
# shouldn't be able to append a source
self.assertRaises(ValueError, t.append, 'boguscmd', pipes.SOURCE)
# check appending two sinks
t = pipes.Template()
t.append('boguscmd', pipes.SINK)
self.assertRaises(ValueError, t.append, 'boguscmd', pipes.SINK)
# command needing file input but with no $IN
t = pipes.Template()
self.assertRaises(ValueError, t.append, 'boguscmd $OUT',
pipes.FILEIN_FILEOUT)
t = pipes.Template()
self.assertRaises(ValueError, t.append, 'boguscmd',
pipes.FILEIN_STDOUT)
# command needing file output but with no $OUT
t = pipes.Template()
self.assertRaises(ValueError, t.append, 'boguscmd $IN',
pipes.FILEIN_FILEOUT)
t = pipes.Template()
self.assertRaises(ValueError, t.append, 'boguscmd',
pipes.STDIN_FILEOUT)
def testBadPrependOptions(self):
t = pipes.Template()
# try a non-string command
self.assertRaises(TypeError, t.prepend, 7, pipes.STDIN_STDOUT)
# try a type that isn't recognized
self.assertRaises(ValueError, t.prepend, 'tr a-z A-Z', 'xx')
# shouldn't be able to prepend a sink
self.assertRaises(ValueError, t.prepend, 'boguscmd', pipes.SINK)
# check prepending two sources
t = pipes.Template()
t.prepend('boguscmd', pipes.SOURCE)
self.assertRaises(ValueError, t.prepend, 'boguscmd', pipes.SOURCE)
# command needing file input but with no $IN
t = pipes.Template()
self.assertRaises(ValueError, t.prepend, 'boguscmd $OUT',
pipes.FILEIN_FILEOUT)
t = pipes.Template()
self.assertRaises(ValueError, t.prepend, 'boguscmd',
pipes.FILEIN_STDOUT)
# command needing file output but with no $OUT
t = pipes.Template()
self.assertRaises(ValueError, t.prepend, 'boguscmd $IN',
pipes.FILEIN_FILEOUT)
t = pipes.Template()
self.assertRaises(ValueError, t.prepend, 'boguscmd',
pipes.STDIN_FILEOUT)
def testBadOpenMode(self):
t = pipes.Template()
self.assertRaises(ValueError, t.open, 'bogusfile', 'x')
def testClone(self):
t = pipes.Template()
t.append('tr a-z A-Z', pipes.STDIN_STDOUT)
u = t.clone()
self.assertNotEqual(id(t), id(u))
self.assertEqual(t.steps, u.steps)
self.assertNotEqual(id(t.steps), id(u.steps))
self.assertEqual(t.debugging, u.debugging)
def test_main():
run_unittest(SimplePipeTests)
reap_children()
if __name__ == "__main__":
test_main()
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.