repo_name
stringlengths 5
100
| ref
stringlengths 12
67
| path
stringlengths 4
244
| copies
stringlengths 1
8
| content
stringlengths 0
1.05M
⌀ |
|---|---|---|---|---|
Pablo126/SSBW
|
refs/heads/master
|
Entrega1/lib/python3.5/site-packages/rest_framework/urlpatterns.py
|
18
|
from __future__ import unicode_literals
from django.conf.urls import url
from rest_framework.compat import RegexURLResolver, include
from rest_framework.settings import api_settings
def apply_suffix_patterns(urlpatterns, suffix_pattern, suffix_required):
ret = []
for urlpattern in urlpatterns:
if isinstance(urlpattern, RegexURLResolver):
# Set of included URL patterns
regex = urlpattern.regex.pattern
namespace = urlpattern.namespace
app_name = urlpattern.app_name
kwargs = urlpattern.default_kwargs
# Add in the included patterns, after applying the suffixes
patterns = apply_suffix_patterns(urlpattern.url_patterns,
suffix_pattern,
suffix_required)
ret.append(url(regex, include(patterns, namespace, app_name), kwargs))
else:
# Regular URL pattern
regex = urlpattern.regex.pattern.rstrip('$').rstrip('/') + suffix_pattern
view = urlpattern.callback
kwargs = urlpattern.default_args
name = urlpattern.name
# Add in both the existing and the new urlpattern
if not suffix_required:
ret.append(urlpattern)
ret.append(url(regex, view, kwargs, name))
return ret
def format_suffix_patterns(urlpatterns, suffix_required=False, allowed=None):
"""
Supplement existing urlpatterns with corresponding patterns that also
include a '.format' suffix. Retains urlpattern ordering.
urlpatterns:
A list of URL patterns.
suffix_required:
If `True`, only suffixed URLs will be generated, and non-suffixed
URLs will not be used. Defaults to `False`.
allowed:
An optional tuple/list of allowed suffixes. eg ['json', 'api']
Defaults to `None`, which allows any suffix.
"""
suffix_kwarg = api_settings.FORMAT_SUFFIX_KWARG
if allowed:
if len(allowed) == 1:
allowed_pattern = allowed[0]
else:
allowed_pattern = '(%s)' % '|'.join(allowed)
suffix_pattern = r'\.(?P<%s>%s)/?$' % (suffix_kwarg, allowed_pattern)
else:
suffix_pattern = r'\.(?P<%s>[a-z0-9]+)/?$' % suffix_kwarg
return apply_suffix_patterns(urlpatterns, suffix_pattern, suffix_required)
|
ephes/scikit-learn
|
refs/heads/master
|
examples/svm/plot_svm_anova.py
|
250
|
"""
=================================================
SVM-Anova: SVM with univariate feature selection
=================================================
This example shows how to perform univariate feature before running a SVC
(support vector classifier) to improve the classification scores.
"""
print(__doc__)
import numpy as np
import matplotlib.pyplot as plt
from sklearn import svm, datasets, feature_selection, cross_validation
from sklearn.pipeline import Pipeline
###############################################################################
# Import some data to play with
digits = datasets.load_digits()
y = digits.target
# Throw away data, to be in the curse of dimension settings
y = y[:200]
X = digits.data[:200]
n_samples = len(y)
X = X.reshape((n_samples, -1))
# add 200 non-informative features
X = np.hstack((X, 2 * np.random.random((n_samples, 200))))
###############################################################################
# Create a feature-selection transform and an instance of SVM that we
# combine together to have an full-blown estimator
transform = feature_selection.SelectPercentile(feature_selection.f_classif)
clf = Pipeline([('anova', transform), ('svc', svm.SVC(C=1.0))])
###############################################################################
# Plot the cross-validation score as a function of percentile of features
score_means = list()
score_stds = list()
percentiles = (1, 3, 6, 10, 15, 20, 30, 40, 60, 80, 100)
for percentile in percentiles:
clf.set_params(anova__percentile=percentile)
# Compute cross-validation score using all CPUs
this_scores = cross_validation.cross_val_score(clf, X, y, n_jobs=1)
score_means.append(this_scores.mean())
score_stds.append(this_scores.std())
plt.errorbar(percentiles, score_means, np.array(score_stds))
plt.title(
'Performance of the SVM-Anova varying the percentile of features selected')
plt.xlabel('Percentile')
plt.ylabel('Prediction rate')
plt.axis('tight')
plt.show()
|
tesidroni/mp
|
refs/heads/master
|
Lib/site-packages/numpy/distutils/command/build.py
|
99
|
import os
import sys
from distutils.command.build import build as old_build
from distutils.util import get_platform
from numpy.distutils.command.config_compiler import show_fortran_compilers
class build(old_build):
sub_commands = [('config_cc', lambda *args: True),
('config_fc', lambda *args: True),
('build_src', old_build.has_ext_modules),
] + old_build.sub_commands
user_options = old_build.user_options + [
('fcompiler=', None,
"specify the Fortran compiler type"),
]
help_options = old_build.help_options + [
('help-fcompiler',None, "list available Fortran compilers",
show_fortran_compilers),
]
def initialize_options(self):
old_build.initialize_options(self)
self.fcompiler = None
def finalize_options(self):
build_scripts = self.build_scripts
old_build.finalize_options(self)
plat_specifier = ".%s-%s" % (get_platform(), sys.version[0:3])
if build_scripts is None:
self.build_scripts = os.path.join(self.build_base,
'scripts' + plat_specifier)
def run(self):
old_build.run(self)
|
mpedrero/quill
|
refs/heads/master
|
src/generators/BlogPostGenerator.py
|
1
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import codecs
import shutil
import pystache
import containers.TagData as TagData
import PyRSS2Gen as RSS2
import datetime
class BlogPostGenerator:
def __init__(self, blogMetadata):
self.blogMetadata = blogMetadata
self.outputFolder = blogMetadata.blogFolder
self.templateFolder = blogMetadata.themesFolder
self.tagsFolder = os.path.join(self.outputFolder,"tags")
# Delete possible old files in blog folder
filelist = [ f for f in os.listdir(self.outputFolder) ]
for f in filelist:
try:
os.remove(os.path.join(self.outputFolder,f))
except:
shutil.rmtree(os.path.join(self.outputFolder,f))
def loadTheme(self, themeName="default"):
self.templateFolder = os.path.join(self.templateFolder,themeName)
shutil.copy2(os.path.join(self.templateFolder,"post.css"), self.outputFolder)
shutil.copy2(os.path.join(self.templateFolder,"index.css"), self.outputFolder)
shutil.copy2(os.path.join(self.templateFolder,"tag.css"), self.outputFolder)
shutil.copy2(os.path.join(self.templateFolder,"logo.png"), self.outputFolder)
shutil.copy2(os.path.join(self.templateFolder,"favicon.png"), self.outputFolder)
try:
shutil.copy2(os.path.join(self.templateFolder,"back.png"), self.outputFolder)
shutil.copy2(os.path.join(self.templateFolder,"back2.png"), self.outputFolder)
except:
pass
try:
shutil.rmtree(os.path.join(self.outputFolder,"fonts"))
except:
pass
shutil.copytree(os.path.join(self.templateFolder,"fonts"), os.path.join(self.outputFolder,"fonts"))
def loadImages(self):
try:
shutil.rmtree(os.path.join(self.outputFolder,"images"))
except:
pass
shutil.copytree(self.blogMetadata.imagesFolder, os.path.join(self.outputFolder,"images"))
def generatePost(self, post):
# Instantiate Renderer
renderer = pystache.Renderer()
# Generate dict
if self.blogMetadata.comments.lower() == "yes":
content = {
"title": post.title,
"date": post.date,
"post_text": post.mainText,
"index": "index.html",
"tags": post.tagsURL,
"tagname": self.blogMetadata.tagName+" ",
"etime": post.timeToRead,
"comments": self.blogMetadata.disqusCode,
"analytics": self.blogMetadata.analyticsCode,
"permalink": post.permalink,
"author": post.author,
"shTwitter": post.twitterShare,
"shFacebook":post.facebookShare,
"shGplus": post.gplusShare}
else:
content = {
"title": post.title,
"date": post.date,
"post_text": post.mainText,
"index": "index.html",
"tags": post.tagsURL,
"tagname": self.blogMetadata.tagName+" ",
"etime": post.timeToRead,
"analytics": self.blogMetadata.analyticsCode,
"permalink": post.permalink,
"author": post.author,
"shTwitter": post.twitterShare,
"shFacebook":post.facebookShare,
"shGplus": post.gplusShare}
f = codecs.open(os.path.join(self.outputFolder, post.url),'w','utf-8')
f.write(renderer.render_path(os.path.join(self.templateFolder, "postTemplate.html"),content))
f.close()
def generateIndex(self, postList, blogSettings):
lim = len(postList)
inc = int(self.blogMetadata.postsPerPage)
lower = 0
upper = lower+inc
pageNum = 1
pageMax = lim/inc
if lim%inc != 0:
pageMax = pageMax + 1
while upper < lim:
self.generateIndexPage(postList[lower:upper],pageNum, pageMax, blogSettings)
lower = upper
upper = lower+inc
pageNum = pageNum+1
self.generateIndexPage(postList[lower:],pageNum, pageMax, blogSettings)
def generateIndexPage(self, postList, pageNum, pageMax, blogSettings):
listOfEntries = unicode()
# Instantiate Renderer
renderer = pystache.Renderer(file_encoding='utf-8', string_encoding='utf-8', decode_errors='utf-8')
if pageNum == 1:
f = codecs.open(os.path.join(self.outputFolder, "index.html"),'w','utf-8')
else:
f = codecs.open(os.path.join(self.outputFolder, str("page" + str(pageNum) + ".html")),'w','utf-8')
for post in postList:
if post is postList[-1]:
listOfEntries = listOfEntries + u'<div class="last-entry"><p class="entry-date">'+post.date+u'</p><a class="entry-link" href="./'+post.url+u'">'+unicode(post.title)+u'</a></div>'+u'\n'
else:
listOfEntries = listOfEntries + u'<div class="entry"><p class="entry-date">'+post.date+u'</p><a class="entry-link" href="./'+post.url+u'">'+unicode(post.title)+u'</a></div>'+u'\n'
# Generate dict
# Generate pagination
pagination = unicode()
# Newer pages
if pageNum > 2:
pagination = pagination + u'<a class="newer-entries" href=' + u'page' + str(pageNum-1) + u'.html>' + u'← ' + self.blogMetadata.newerPosts + u'</a>'
elif pageNum == 2:
pagination = pagination + u'<a class="newer-entries" href=index.html>' + u'← ' + self.blogMetadata.newerPosts + u'</a>'
# Page n of m
pagination = pagination + u'<span class="page-number">'+ self.blogMetadata.page + u' ' + str(pageNum) + u' ' + self.blogMetadata.of + u' ' + str(pageMax) + u'</span>'
# Older pages
if pageNum < pageMax:
pagination = pagination + u'<a class="older-entries" href=' + u'page' + str(pageNum+1) + u'.html>' + self.blogMetadata.olderPosts + u' →' + u'</a>'
# Generate link to about page if present
if blogSettings.displayAboutMe.lower() == "yes":
about = unicode()
about = u'<a class="about" href="./about.html" >'+unicode(self.blogMetadata.aboutHeader)+u'</a>'
content = {
"index": u"./index.html",
"rss": u"<a href=feed.xml>rss</a>",
"title": self.blogMetadata.blogName,
"description": self.blogMetadata.blogDescription,
"entries": listOfEntries,
"about": about,
"pagination": pagination}
f.write(renderer.render_path(os.path.join(self.templateFolder, "indexTemplate.html"),content))
else:
content = {
"index": u"./index.html",
"rss": u"<a href=feed.xml>rss</a>",
"title": self.blogMetadata.blogName,
"description": self.blogMetadata.blogDescription,
"entries": listOfEntries,
"pagination": pagination}
f.write(renderer.render_path(os.path.join(self.templateFolder, "indexTemplate.html"),content))
f.close()
def generateTags(self, postList):
listOfTags = list()
listOfTagNames = list()
# Verify if there is a /tags folder, if not, create
if not os.path.exists(self.tagsFolder):
os.makedirs(self.tagsFolder)
# Make a list of tags
for post in postList:
for tag in post.tags:
if not tag in listOfTagNames:
listOfTagNames.append(tag)
newTag = TagData.TagData(tag)
listOfTags.append(newTag)
# Dirty method, don't judge me :(
listOfTags[listOfTagNames.index(tag)].postList.append(post)
# Order posts by date, descending
for tag in listOfTags:
tag.postList.sort(key=lambda PostData: PostData.dateParsed, reverse=True)
# For each tag, generate list of entries (currently only title)
for tag in listOfTags:
listOfEntries = unicode()
# Instantiate Renderer
renderer = pystache.Renderer()
f = codecs.open(os.path.join(self.tagsFolder, tag.url),'w','utf-8')
for post in tag.postList:
if post is tag.postList[-1]:
listOfEntries = listOfEntries + u'<div class="last-entry"><p class="entry-date">'+post.date+u'</p><a class="entry-link" href="../'+post.url+u'">'+post.title+u'</a></div>'+u'\n'
else:
listOfEntries = listOfEntries + u'<div class="entry"><p class="entry-date">'+post.date+u'</p><a class="entry-link" href="../'+post.url+u'">'+post.title+u'</a></div>'+u'\n'
# Generate dict
content = {"title": self.blogMetadata.blogName, "tag-name": tag.name, "tag-header": self.blogMetadata.tagHeader, "entries": listOfEntries}
f.write(renderer.render_path(os.path.join(self.templateFolder, "tagTemplate.html"),content))
f.close()
def generateAbout(self,post):
# Instantiate Renderer
renderer = pystache.Renderer()
# Generate dict
content = {"title": post.title, "post_text": post.mainText, "index": u"index.html", }
f = codecs.open(os.path.join(self.outputFolder, post.url),'w','utf-8')
f.write(renderer.render_path(os.path.join(self.templateFolder, "postTemplate.html"),content))
f.close()
def generateRSS(self,postList):
# create items
rssItems = []
for post in postList:
if self.blogMetadata.completeFeed.lower() == "yes":
rssItems.append(RSS2.RSSItem(
title = post.title,
link = self.blogMetadata.blogURL+'/'+post.url,
description = post.mainText,
guid = RSS2.Guid(self.blogMetadata.blogURL+'/'+post.url),
pubDate = post.dateParsed))
else:
rssItems.append(RSS2.RSSItem(
title = post.title,
link = self.blogMetadata.blogURL+'/'+post.url,
description = '<a href='+self.blogMetadata.blogURL+'/'+post.url+'>'+post.title+'</a>',
guid = RSS2.Guid(self.blogMetadata.blogURL+'/'+post.url),
pubDate = post.dateParsed))
rss = RSS2.RSS2(
title = self.blogMetadata.blogName,
link = self.blogMetadata.blogURL+u"/index.html",
description = self.blogMetadata.blogDescription,
lastBuildDate = datetime.datetime.now(),
items = rssItems,
image = RSS2.Image(self.blogMetadata.blogURL+u'/logo.png', self.blogMetadata.blogName, self.blogMetadata.blogURL+u'/index.html')
)
rss.write_xml(open(os.path.join(self.outputFolder, "feed.xml"),'w'))
|
airspeed-velocity/asv
|
refs/heads/master
|
test/test_environment.py
|
2
|
# -*- coding: utf-8 -*-
# Licensed under a 3-clause BSD style license - see LICENSE.rst
from __future__ import (absolute_import, division, print_function,
unicode_literals)
import os
import sys
import six
import pytest
import json
from collections import defaultdict
from asv import config
from asv import environment
from asv import util
from asv.repo import get_repo
from asv.util import shlex_quote as quote
from .tools import (PYTHON_VER1, PYTHON_VER2, DUMMY1_VERSION, DUMMY2_VERSIONS,
WIN, HAS_PYPY, HAS_CONDA, HAS_VIRTUALENV, HAS_PYTHON_VER2,
generate_test_repo, dummy_packages)
@pytest.mark.skipif(not (HAS_PYTHON_VER2 or HAS_CONDA),
reason="Requires two usable Python versions")
def test_matrix_environments(tmpdir, dummy_packages):
conf = config.Config()
conf.env_dir = six.text_type(tmpdir.join("env"))
conf.pythons = [PYTHON_VER1, PYTHON_VER2]
conf.matrix = {
"asv_dummy_test_package_1": [DUMMY1_VERSION, None],
"asv_dummy_test_package_2": DUMMY2_VERSIONS
}
environments = list(environment.get_environments(conf, None))
assert len(environments) == 2 * 2 * 2
# Only test the first two environments, since this is so time
# consuming
for env in environments[:2]:
env.create()
output = env.run(
['-c', 'import asv_dummy_test_package_1 as p, sys; sys.stdout.write(p.__version__)'],
valid_return_codes=None)
if 'asv_dummy_test_package_1' in env._requirements:
assert output.startswith(six.text_type(env._requirements['asv_dummy_test_package_1']))
output = env.run(
['-c', 'import asv_dummy_test_package_2 as p, sys; sys.stdout.write(p.__version__)'])
assert output.startswith(six.text_type(env._requirements['asv_dummy_test_package_2']))
def test_large_environment_matrix(tmpdir):
# As seen in issue #169, conda can't handle using really long
# directory names in its environment. This creates an environment
# with many dependencies in order to ensure it still works.
conf = config.Config()
conf.env_dir = six.text_type(tmpdir.join("env"))
conf.pythons = [PYTHON_VER1]
for i in range(25):
conf.matrix['foo{0}'.format(i)] = []
environments = list(environment.get_environments(conf, None))
for env in environments:
# Since *actually* installing all the dependencies would make
# this test run a long time, we only set up the environment,
# but don't actually install dependencies into it. This is
# enough to trigger the bug in #169.
env._get_requirements = lambda *a: ([], [])
# pip / virtualenv setup still uses
# _install_requirements
env._install_requirements = lambda *a: None
env.create()
def test_presence_checks(tmpdir, monkeypatch):
conf = config.Config()
if WIN:
# Tell conda to not use hardlinks: on Windows it's not possible
# to delete hard links to files in use, which causes problem when
# trying to cleanup environments during this test
monkeypatch.setenv(str('CONDA_ALWAYS_COPY'), str('True'))
conf.env_dir = six.text_type(tmpdir.join("env"))
conf.pythons = [PYTHON_VER1]
conf.matrix = {}
environments = list(environment.get_environments(conf, None))
for env in environments:
env.create()
assert env.check_presence()
# Check env is recreated when info file is clobbered
info_fn = os.path.join(env._path, 'asv-env-info.json')
data = util.load_json(info_fn)
data['python'] = '0'
data = util.write_json(info_fn, data)
env._is_setup = False
env.create()
data = util.load_json(info_fn)
assert data['python'] == PYTHON_VER1
env.run(['-c', 'import os'])
# Check env is recreated if crucial things are missing
pip_fns = [
os.path.join(env._path, 'bin', 'pip')
]
if WIN:
pip_fns += [
os.path.join(env._path, 'bin', 'pip.exe'),
os.path.join(env._path, 'Scripts', 'pip'),
os.path.join(env._path, 'Scripts', 'pip.exe')
]
some_removed = False
for pip_fn in pip_fns:
if os.path.isfile(pip_fn):
some_removed = True
os.remove(pip_fn)
assert some_removed
env._is_setup = False
env.create()
assert os.path.isfile(pip_fn)
env.run(['-c', 'import os'])
def _sorted_dict_list(lst):
return list(sorted(lst, key=lambda x: list(sorted(x.items()))))
def test_matrix_expand_basic():
conf = config.Config()
conf.environment_type = 'something'
conf.pythons = ["2.6", "2.7"]
conf.matrix = {
'pkg1': None,
'pkg2': '',
'pkg3': [''],
'pkg4': ['1.2', '3.4'],
'pkg5': []
}
combinations = _sorted_dict_list(environment.iter_matrix(
conf.environment_type, conf.pythons, conf))
expected = _sorted_dict_list([
{('python', None): '2.6', ('req', 'pkg2'): '', ('req', 'pkg3'): '',
('req', 'pkg4'): '1.2', ('req', 'pkg5'): ''},
{('python', None): '2.6', ('req', 'pkg2'): '', ('req', 'pkg3'): '',
('req', 'pkg4'): '3.4', ('req', 'pkg5'): ''},
{('python', None): '2.7', ('req', 'pkg2'): '', ('req', 'pkg3'): '',
('req', 'pkg4'): '1.2', ('req', 'pkg5'): ''},
{('python', None): '2.7', ('req', 'pkg2'): '', ('req', 'pkg3'): '',
('req', 'pkg4'): '3.4', ('req', 'pkg5'): ''},
])
assert combinations == expected
def test_matrix_expand_include():
conf = config.Config()
conf.environment_type = 'something'
conf.pythons = ["2.6"]
conf.matrix = {'a': '1'}
conf.include = [
{'python': '3.5', 'b': '2'},
{'sys_platform': sys.platform, 'python': '2.7', 'b': '3'},
{'sys_platform': sys.platform + 'nope', 'python': '2.7', 'b': '3'},
{'environment_type': 'nope', 'python': '2.7', 'b': '4'},
{'environment_type': 'something', 'python': '2.7', 'b': '5'},
]
combinations = _sorted_dict_list(environment.iter_matrix(
conf.environment_type, conf.pythons, conf))
expected = _sorted_dict_list([
{('python', None): '2.6', ('req', 'a'): '1'},
{('python', None): '3.5', ('req', 'b'): '2'},
{('python', None): '2.7', ('req', 'b'): '3'},
{('python', None): '2.7', ('req', 'b'): '5'}
])
assert combinations == expected
conf.include = [
{'b': '2'}
]
with pytest.raises(util.UserError):
list(environment.iter_matrix(conf.environment_type, conf.pythons, conf))
def test_matrix_expand_include_detect_env_type():
conf = config.Config()
conf.environment_type = None
conf.pythons = [PYTHON_VER1]
conf.matrix = {}
conf.exclude = [{}]
conf.include = [
{'sys_platform': sys.platform, 'python': PYTHON_VER1},
]
combinations = _sorted_dict_list(environment.iter_matrix(
conf.environment_type, conf.pythons, conf))
expected = _sorted_dict_list([
{('python', None): PYTHON_VER1},
])
assert combinations == expected
def test_matrix_expand_exclude():
conf = config.Config()
conf.environment_type = 'something'
conf.pythons = ["2.6", "2.7"]
conf.matrix = {
'a': '1',
'b': ['1', None]
}
conf.include = [
{'python': '2.7', 'b': '2', 'c': None}
]
# check basics
conf.exclude = [
{'python': '2.7', 'b': '2'},
{'python': '2.7', 'b': None},
{'python': '2.6', 'a': '1'},
]
combinations = _sorted_dict_list(environment.iter_matrix(
conf.environment_type, conf.pythons, conf))
expected = _sorted_dict_list([
{('python', None): '2.7', ('req', 'a'): '1', ('req', 'b'): '1'},
{('python', None): '2.7', ('req', 'b'): '2'}
])
assert combinations == expected
# check regexp
conf.exclude = [
{'python': '.*', 'b': None},
]
combinations = _sorted_dict_list(environment.iter_matrix(
conf.environment_type, conf.pythons, conf))
expected = _sorted_dict_list([
{('python', None): '2.6', ('req', 'a'): '1', ('req', 'b'): '1'},
{('python', None): '2.7', ('req', 'a'): '1', ('req', 'b'): '1'},
{('python', None): '2.7', ('req', 'b'): '2'}
])
assert combinations == expected
# check environment_type as key
conf.exclude = [
{'environment_type': 'some.*'},
]
combinations = _sorted_dict_list(environment.iter_matrix(
conf.environment_type, conf.pythons, conf))
expected = [
{('python', None): '2.7', ('req', 'b'): '2'}
]
assert combinations == expected
# check sys_platform as key
conf.exclude = [
{'sys_platform': sys.platform},
]
combinations = _sorted_dict_list(environment.iter_matrix(
conf.environment_type, conf.pythons, conf))
expected = [
{('python', None): '2.7', ('req', 'b'): '2'}
]
assert combinations == expected
# check inverted regex
conf.exclude = [
{'python': '(?!2.6).*'}
]
combinations = _sorted_dict_list(environment.iter_matrix(
conf.environment_type, conf.pythons, conf))
expected = _sorted_dict_list([
{('python', None): '2.6', ('req', 'a'): '1', ('req', 'b'): '1'},
{('python', None): '2.6', ('req', 'a'): '1'},
{('python', None): '2.7', ('req', 'b'): '2'}
])
assert combinations == expected
def test_iter_env_matrix_combinations():
conf = config.Config()
conf.environment_type = 'something'
conf.pythons = ["2.6"]
conf.matrix = {}
conf.include = []
# (matrix, expected)
env_matrices = [
({'var0': ['val0', 'val1'], 'var1': ['val2', 'val3']},
[{'var0': 'val0', 'var1': 'val2'},
{'var0': 'val0', 'var1': 'val3'},
{'var0': 'val1', 'var1': 'val2'},
{'var0': 'val1', 'var1': 'val3'}]),
({'var0': ['val0', 'val1'], 'var1': ['val2', None]},
[{'var0': 'val0', 'var1': 'val2'}, {'var0': 'val0'},
{'var0': 'val1', 'var1': 'val2'}, {'var0': 'val1'}]),
({'var0': ['val0', 'val1']},
[{'var0': 'val0'}, {'var0': 'val1'}]),
({}, [{}]),
]
for matrix, expected in env_matrices:
conf.matrix = {'env': matrix}
expected = [{('env', key): value for key, value in item.items()}
for item in expected]
for m in expected:
m['python', None] = "2.6"
result = _sorted_dict_list(environment.iter_matrix(conf.environment_type, conf.pythons, conf))
assert result == _sorted_dict_list(expected)
@pytest.mark.skipif((not HAS_CONDA), reason="Requires conda and conda-build")
def test_conda_pip_install(tmpdir, dummy_packages):
# test that we can install with pip into a conda environment.
conf = config.Config()
conf.env_dir = six.text_type(tmpdir.join("env"))
conf.environment_type = "conda"
conf.pythons = [PYTHON_VER1]
conf.matrix = {
"pip+asv_dummy_test_package_2": [DUMMY2_VERSIONS[0]]
}
environments = list(environment.get_environments(conf, None))
assert len(environments) == 1 * 1 * 1
for env in environments:
env.create()
output = env.run(
['-c', 'import asv_dummy_test_package_2 as p, sys; sys.stdout.write(p.__version__)'])
assert output.startswith(six.text_type(env._requirements['pip+asv_dummy_test_package_2']))
@pytest.mark.skipif((not HAS_CONDA), reason="Requires conda and conda-build")
def test_conda_environment_file(tmpdir, dummy_packages):
env_file_name = six.text_type(tmpdir.join("environment.yml"))
with open(env_file_name, "w") as temp_environment_file:
temp_environment_file.write(
'name: test_conda_envs\ndependencies:\n - asv_dummy_test_package_2')
conf = config.Config()
conf.env_dir = six.text_type(tmpdir.join("env"))
conf.environment_type = "conda"
conf.pythons = [PYTHON_VER1]
conf.conda_environment_file = env_file_name
conf.matrix = {
"asv_dummy_test_package_1": [DUMMY1_VERSION]
}
environments = list(environment.get_environments(conf, None))
assert len(environments) == 1 * 1 * 1
for env in environments:
env.create()
output = env.run(
['-c', 'import asv_dummy_test_package_1 as p, sys; sys.stdout.write(p.__version__)'])
assert output.startswith(six.text_type(DUMMY1_VERSION))
output = env.run(
['-c', 'import asv_dummy_test_package_2 as p, sys; sys.stdout.write(p.__version__)'])
assert output.startswith(six.text_type(DUMMY2_VERSIONS[1]))
@pytest.mark.skipif((not HAS_CONDA), reason="Requires conda and conda-build")
def test_conda_run_executable(tmpdir):
# test that we can install with pip into a conda environment.
conf = config.Config()
conf.env_dir = six.text_type(tmpdir.join("env"))
conf.environment_type = "conda"
conf.pythons = [PYTHON_VER1]
conf.matrix = {}
environments = list(environment.get_environments(conf, None))
assert len(environments) == 1 * 1 * 1
for env in environments:
env.create()
env.run_executable('conda', ['info'])
def test_environment_select():
conf = config.Config()
conf.environment_type = "conda"
conf.pythons = ["2.7", "3.5"]
conf.matrix = {
"six": ["1.10"],
}
conf.include = [
{'environment_type': 'conda', 'python': '1.9'}
]
# Check default environment config
environments = list(environment.get_environments(conf, None))
items = sorted([(env.tool_name, env.python) for env in environments])
assert items == [('conda', '1.9'), ('conda', '2.7'), ('conda', '3.5')]
if HAS_VIRTUALENV:
# Virtualenv plugin fails on initialization if not available,
# so these tests pass only if virtualenv is present
conf.pythons = [PYTHON_VER1]
# Check default python specifiers
environments = list(environment.get_environments(conf, ["conda", "virtualenv"]))
items = sorted((env.tool_name, env.python) for env in environments)
assert items == [('conda', '1.9'), ('conda', PYTHON_VER1), ('virtualenv', PYTHON_VER1)]
# Check specific python specifiers
environments = list(environment.get_environments(conf, ["conda:3.5", "virtualenv:"+PYTHON_VER1]))
items = sorted((env.tool_name, env.python) for env in environments)
assert items == [('conda', '3.5'), ('virtualenv', PYTHON_VER1)]
# Check same specifier
environments = list(environment.get_environments(conf, ["existing:same", ":same", "existing"]))
items = [env.tool_name for env in environments]
assert items == ['existing', 'existing', 'existing']
# Check autodetect existing
executable = os.path.relpath(os.path.abspath(sys.executable))
environments = list(environment.get_environments(conf, ["existing",
":same",
":" + executable]))
assert len(environments) == 3
for env in environments:
assert env.tool_name == "existing"
assert env.python == "{0[0]}.{0[1]}".format(sys.version_info)
assert os.path.normcase(os.path.abspath(env._executable)) == os.path.normcase(os.path.abspath(sys.executable))
# Select by environment name
conf.pythons = ["2.7"]
environments = list(environment.get_environments(conf, ["conda-py2.7-six1.10"]))
assert len(environments) == 1
assert environments[0].python == "2.7"
assert environments[0].tool_name == "conda"
assert environments[0].requirements == {'six': '1.10'}
# Check interaction with exclude
conf.exclude = [{'environment_type': "conda"}]
environments = list(environment.get_environments(conf, ["conda-py2.7-six1.10"]))
assert len(environments) == 0
conf.exclude = [{'environment_type': 'matches nothing'}]
environments = list(environment.get_environments(conf, ["conda-py2.7-six1.10"]))
assert len(environments) == 1
def test_environment_select_autodetect():
conf = config.Config()
conf.environment_type = "conda"
conf.pythons = [PYTHON_VER1]
conf.matrix = {
"six": ["1.10"],
}
# Check autodetect
environments = list(environment.get_environments(conf, [":" + PYTHON_VER1]))
assert len(environments) == 1
assert environments[0].python == PYTHON_VER1
assert environments[0].tool_name in ("virtualenv", "conda")
# Check interaction with exclude
conf.exclude = [{'environment_type': 'matches nothing'}]
environments = list(environment.get_environments(conf, [":" + PYTHON_VER1]))
assert len(environments) == 1
conf.exclude = [{'environment_type': 'virtualenv|conda'}]
environments = list(environment.get_environments(conf, [":" + PYTHON_VER1]))
assert len(environments) == 1
conf.exclude = [{'environment_type': 'conda'}]
environments = list(environment.get_environments(conf, ["conda:" + PYTHON_VER1]))
assert len(environments) == 1
def test_matrix_empty():
conf = config.Config()
conf.environment_type = ""
conf.pythons = [PYTHON_VER1]
conf.matrix = {}
# Check default environment config
environments = list(environment.get_environments(conf, None))
items = [env.python for env in environments]
assert items == [PYTHON_VER1]
def test_matrix_existing():
conf = config.Config()
conf.environment_type = "existing"
conf.pythons = ["same"]
conf.matrix = {'foo': ['a', 'b'], 'bar': ['c', 'd']}
# ExistingEnvironment should ignore the matrix
environments = list(environment.get_environments(conf, None))
items = [(env.tool_name, tuple(env.requirements.keys())) for env in environments]
assert items == [('existing', ())]
conf.exclude = {'environment_type': '.*'}
environments = list(environment.get_environments(conf, None))
items = [(env.tool_name, tuple(env.requirements.keys())) for env in environments]
assert items == [('existing', ())]
# environment.yml should respect the specified order
# of channels when adding packages
@pytest.mark.skipif((not HAS_CONDA),
reason="Requires conda and conda-build")
@pytest.mark.parametrize("channel_list,expected_channel", [
(["defaults", "conda-forge"], "pkgs/main"),
(["conda-forge", "defaults"], "conda-forge"),
])
def test_conda_channel_addition(tmpdir,
channel_list,
expected_channel):
# test that we can add conda channels to environments
# and that we respect the specified priority order
# of channels
conf = config.Config()
conf.env_dir = six.text_type(tmpdir.join("env"))
conf.environment_type = "conda"
conf.pythons = [PYTHON_VER1]
conf.matrix = {}
# these have to be valid channels
# available for online access
conf.conda_channels = channel_list
environments = list(environment.get_environments(conf, None))
# should have one environment per Python version
assert len(environments) == 1
# create the environments
for env in environments:
env.create()
# generate JSON output from conda list
# and parse to verify added channels
# for current env
# (conda info would be more direct, but
# seems to reflect contents of condarc file,
# which we are intentionally trying not to modify)
conda = util.which('conda')
print("\n**conda being used:", conda)
out_str = six.text_type(util.check_output([conda,
'list',
'-p',
os.path.normpath(env._path),
'--json']))
json_package_list = json.loads(out_str)
print(json_package_list)
for installed_package in json_package_list:
# check only explicitly installed packages
if installed_package['name'] not in ('python',):
continue
print(installed_package)
assert installed_package['channel'] == expected_channel
@pytest.mark.skipif(not (HAS_PYPY and HAS_VIRTUALENV), reason="Requires pypy and virtualenv")
def test_pypy_virtualenv(tmpdir):
# test that we can setup a pypy environment
conf = config.Config()
conf.env_dir = six.text_type(tmpdir.join("env"))
conf.environment_type = "virtualenv"
conf.pythons = ["pypy"]
conf.matrix = {}
environments = list(environment.get_environments(conf, None))
assert len(environments) == 1
for env in environments:
env.create()
output = env.run(['-c', 'import sys; print(sys.pypy_version_info)'])
assert output.startswith(six.text_type("(major="))
def test_environment_name_sanitization():
conf = config.Config()
conf.environment_type = "conda"
conf.pythons = ["3.5"]
conf.matrix = {
"pip+git+http://github.com/space-telescope/asv.git": [],
}
# Check name sanitization
environments = list(environment.get_environments(conf, []))
assert len(environments) == 1
assert environments[0].name == "conda-py3.5-pip+git+http___github.com_space-telescope_asv.git"
@pytest.mark.parametrize("environment_type", [
pytest.param("conda", marks=pytest.mark.skipif(not HAS_CONDA, reason="needs conda and conda-build")),
pytest.param("virtualenv", marks=pytest.mark.skipif(not HAS_VIRTUALENV, reason="needs virtualenv"))
])
def test_environment_environ_path(environment_type, tmpdir, monkeypatch):
# Check that virtualenv binary dirs are in the PATH
conf = config.Config()
conf.env_dir = six.text_type(tmpdir.join("env"))
conf.environment_type = environment_type
conf.pythons = [PYTHON_VER1]
conf.matrix = {}
env, = environment.get_environments(conf, [])
env.create()
output = env.run(['-c', 'import os; print(os.environ["PATH"])'])
paths = output.strip().split(os.pathsep)
assert os.path.commonprefix([paths[0], conf.env_dir]) == conf.env_dir
# Check user-site directory is not in sys.path
output = env.run(['-c', 'import site; print(site.ENABLE_USER_SITE)'])
usersite_in_syspath = output.strip()
assert usersite_in_syspath == "False"
# Check PYTHONPATH is ignored
monkeypatch.setenv(str('PYTHONPATH'), str(tmpdir))
output = env.run(['-c', 'import os; print(os.environ.get("PYTHONPATH", ""))'])
assert output.strip() == ""
monkeypatch.setenv(str('ASV_PYTHONPATH'), str("Hello python path"))
output = env.run(['-c', 'import os; print(os.environ["PYTHONPATH"])'])
assert output.strip() == "Hello python path"
def test_build_isolation(tmpdir):
# build should not fail with build_cache on projects that have pyproject.toml
tmpdir = six.text_type(tmpdir)
# Create installable repository with pyproject.toml in it
dvcs = generate_test_repo(tmpdir, [0], dvcs_type='git')
fn = os.path.join(dvcs.path, 'pyproject.toml')
with open(fn, 'w') as f:
f.write('[build-system]\n'
'requires = ["wheel", "setuptools"]')
dvcs.add(fn)
dvcs.commit("Add pyproject.toml")
commit_hash = dvcs.get_hash("master")
# Setup config
conf = config.Config()
conf.env_dir = os.path.join(tmpdir, "env")
conf.pythons = [PYTHON_VER1]
conf.matrix = {}
conf.repo = os.path.abspath(dvcs.path)
conf.build_cache_size = 8
repo = get_repo(conf)
env = list(environment.get_environments(conf, None))[0]
env.create()
# Project installation should succeed
env.install_project(conf, repo, commit_hash)
def test_custom_commands(tmpdir):
# check custom install/uninstall/build commands work
tmpdir = six.text_type(tmpdir)
dvcs = generate_test_repo(tmpdir, [0], dvcs_type='git')
build_py = os.path.abspath(os.path.join(tmpdir, 'build.py'))
install_py = os.path.abspath(os.path.join(tmpdir, 'install.py'))
uninstall_py = os.path.abspath(os.path.join(tmpdir, 'uninstall.py'))
conf = config.Config()
conf.env_dir = os.path.join(tmpdir, "env")
conf.pythons = [PYTHON_VER1]
conf.repo = os.path.abspath(dvcs.path)
conf.matrix = {}
conf.build_cache_size = 0
conf.build_command = ["python {0} {{build_cache_dir}}".format(quote(build_py))]
conf.install_command = ["python {0} {{env_dir}} {{build_cache_dir}}".format(quote(install_py))]
conf.uninstall_command = ["python {0} {{env_dir}}".format(quote(uninstall_py))]
with open(build_py, 'wb') as f:
f.write(b"import os, sys\n"
b"assert sys.argv[1] == os.environ['ASV_BUILD_CACHE_DIR']\n"
b"f = open(os.path.join(os.environ['ASV_BUILD_CACHE_DIR'], 'cached'), 'wb')\n"
b"f.write(b'data')\n"
b"f.close()\n")
with open(install_py, 'wb') as f:
f.write(b"import os, sys, shutil\n"
b"assert sys.argv[1] == os.environ['ASV_ENV_DIR']\n"
b"assert sys.argv[2] == os.environ['ASV_BUILD_CACHE_DIR']\n"
b"shutil.copyfile(os.path.join(os.environ['ASV_BUILD_CACHE_DIR'], 'cached'),\n"
b" os.path.join(os.environ['ASV_ENV_DIR'], 'installed'))\n")
with open(uninstall_py, 'wb') as f:
f.write(b"import os, sys\n"
b"assert sys.argv[1] == os.environ['ASV_ENV_DIR']\n"
b"fn = os.path.join(os.environ['ASV_ENV_DIR'], 'installed')\n"
b"if os.path.isfile(fn): os.unlink(fn)\n")
def get_env():
env = list(environment.get_environments(conf, None))[0]
env.create()
return env
env = get_env()
repo = get_repo(conf)
commit_hash = dvcs.get_branch_hashes()[0]
cache_dir = os.path.join(env._path, 'asv-build-cache')
cache_file = os.path.join(cache_dir, commit_hash, 'cached')
install_file = os.path.join(env._path, 'installed')
# Project installation should succeed with cache size 0,
# and not leave cache files around
env.install_project(conf, repo, commit_hash)
assert os.path.isfile(install_file)
assert not os.listdir(cache_dir)
env._set_installed_commit_hash(None)
# It should succed with nonzero cache size
conf.build_cache_size = 1
env = get_env()
env.install_project(conf, repo, commit_hash)
assert os.path.isfile(cache_file)
assert os.path.isfile(install_file)
# Explicitly check uninstall works
env._uninstall_project()
assert os.path.isfile(cache_file)
assert not os.path.isfile(install_file)
# Check reinstall uses cache and doesn't call build command
conf.build_command = ['python -c "import sys; sys.exit(1)"']
env = get_env()
env.install_project(conf, repo, commit_hash)
assert os.path.isfile(install_file)
assert os.path.isfile(cache_file)
# Bad install command should cause a failure
conf.install_command = ['python -c "import sys; sys.exit(1)"']
env = get_env()
with pytest.raises(util.ProcessError):
env.install_project(conf, repo, commit_hash)
def test_installed_commit_hash(tmpdir):
tmpdir = six.text_type(tmpdir)
dvcs = generate_test_repo(tmpdir, [0], dvcs_type='git')
commit_hash = dvcs.get_branch_hashes()[0]
conf = config.Config()
conf.env_dir = os.path.join(tmpdir, "env")
conf.pythons = [PYTHON_VER1]
conf.repo = os.path.abspath(dvcs.path)
conf.matrix = {}
conf.build_cache_size = 0
repo = get_repo(conf)
def get_env():
return list(environment.get_environments(conf, None))[0]
env = get_env()
env.create()
# Check updating installed_commit_hash
assert env.installed_commit_hash == None
assert env._global_env_vars.get('ASV_COMMIT') == None
env.install_project(conf, repo, commit_hash)
assert env.installed_commit_hash == commit_hash
assert env._global_env_vars.get('ASV_COMMIT') == commit_hash
env = get_env()
assert env.installed_commit_hash == commit_hash
assert env._global_env_vars.get('ASV_COMMIT') == commit_hash
# Configuration change results to reinstall
env._project = "something"
assert env.installed_commit_hash == None
# Uninstall resets hash (but not ASV_COMMIT)
env = get_env()
env._uninstall_project()
assert env.installed_commit_hash == None
assert env._global_env_vars.get('ASV_COMMIT') != None
env = get_env()
assert env.installed_commit_hash == None
assert env._global_env_vars.get('ASV_COMMIT') == None
def test_install_success(tmpdir):
# Check that install_project really installs the package. (gh-805)
# This may fail if pip in install_command e.g. gets confused by an .egg-info
# directory in its cwd to think the package is already installed.
tmpdir = six.text_type(tmpdir)
dvcs = generate_test_repo(tmpdir, [0], dvcs_type='git')
commit_hash = dvcs.get_branch_hashes()[0]
conf = config.Config()
conf.env_dir = os.path.join(tmpdir, "env")
conf.pythons = [PYTHON_VER1]
conf.repo = os.path.abspath(dvcs.path)
conf.matrix = {}
conf.build_cache_size = 0
repo = get_repo(conf)
env = list(environment.get_environments(conf, None))[0]
env.create()
env.install_project(conf, repo, commit_hash)
env.run(['-c', 'import asv_test_repo as t, sys; sys.exit(0 if t.dummy_value == 0 else 1)'])
def test_install_env_matrix_values(tmpdir):
tmpdir = six.text_type(tmpdir)
dvcs = generate_test_repo(tmpdir, [0], dvcs_type='git')
commit_hash = dvcs.get_branch_hashes()[0]
conf = config.Config()
conf.env_dir = os.path.join(tmpdir, "env")
conf.pythons = [PYTHON_VER1]
conf.repo = os.path.abspath(dvcs.path)
conf.matrix = {'env': {'SOME_ASV_TEST_BUILD_VALUE': '1'},
'env_nobuild': {'SOME_ASV_TEST_NON_BUILD_VALUE': '1'}}
repo = get_repo(conf)
env = list(environment.get_environments(conf, None))[0]
env.create()
env.install_project(conf, repo, commit_hash)
env.run(['-c',
'import asv_test_repo.build_time_env as t, sys; '
'sys.exit(0 if t.env["SOME_ASV_TEST_BUILD_VALUE"] == "1" else 1)'])
env.run(['-c',
'import asv_test_repo.build_time_env as t, sys; '
'sys.exit(0 if "SOME_ASV_TEST_NON_BUILD_VALUE" not in t.env else 1)'])
def test_environment_env_matrix():
# (build_vars, non_build_vars, environ_count, build_count)
configs = [
({}, {}, 1, 1),
({"var1": ["val1"]}, {}, 1, 1),
({"var1": ["val1", "val2", "val3"]}, {}, 3, 3),
({"var1": ["val1", "val2"], "var2": ['val3', 'val4']}, {}, 4, 4),
({"var1": ["val1", "val2"], "var2": ['val3', None]}, {}, 4, 4),
({"var1": ["val1", "val2"]}, {"var2": ['val3', None]}, 4, 2),
({"var1": ["val1", "val2"], "var2": ['val3', 'val4']},
{"var3": ['val5', None]}, 8, 4),
]
for build_vars, non_build_vars, environ_count, build_count in configs:
conf = config.Config()
conf.matrix = {
"env": build_vars,
"env_nobuild": non_build_vars,
}
environments = list(environment.get_environments(conf, None))
assert len(environments) == environ_count
assert len(set(e.dir_name for e in environments)) == build_count
def test__parse_matrix():
cases = [
({"env": {"A": "B"}, "env_nobuild": {"C": None}, "req": {"foo": ["9"]}},
{("env", "A"): "B", ("env_nobuild", "C"): None, ("req", "foo"): ["9"]})
]
for rule, expected in cases:
parsed = environment._parse_matrix(rule)
assert parsed == expected
def test__parse_matrix_invalid():
cases = [
{"env": "1", "req": "1", "foo": "1"},
]
for m in cases:
with pytest.raises(util.UserError):
environment._parse_matrix(m)
def test__parse_matrix_legacy():
cases = [
({"foo": "1", "bar": ["2", "3"]},
{("req", "foo"): "1", ("req", "bar"): ["2", "3"]})
]
for m, expected in cases:
parsed = environment._parse_matrix(m)
assert parsed == expected
def test__parse_exclude_include_rule():
cases = [
({"python": "2.6", "environment_type": "conda", "sys_platform": "123",
"env": {"A": "B"}, "env_nobuild": {"C": "D"}, "req": {"foo": "9"}},
{("python", None): "2.6", ("environment_type", None): "conda", ("sys_platform", None): "123",
("env", "A"): "B", ("env_nobuild", "C"): "D", ("req", "foo"): "9"})
]
for rule, expected in cases:
parsed = environment._parse_exclude_include_rule(rule)
assert parsed == expected
def test__parse_exclude_include_rule_invalid():
cases = [
{"python": "2.6", "environment_type": "conda", "sys_platform": "123",
"env": {"A": "B"}, "env_nobuild": {"C": "D"}, "req": {"foo": "9"}, "foo": "9"},
]
for rule in cases:
with pytest.raises(util.UserError):
environment._parse_exclude_include_rule(rule)
def test__parse_matrix_entries():
entries = {("python", None): "2.6", ("env", "A"): "B", ("env_nobuild", "C"): "D", ("req", "foo"): "9"}
python, requirements, tagged_env_vars = environment._parse_matrix_entries(entries)
assert python == "2.6"
assert requirements == {"foo": "9"}
assert tagged_env_vars == {("build", "A"): "B", ("nobuild", "C"): "D"}
|
Fale/ansible
|
refs/heads/devel
|
test/integration/targets/module_utils/module_utils/spam8/ham/bacon.py
|
298
|
data = 'spam8:bacon'
|
kk47/C-Cpp
|
refs/heads/master
|
deppends/python/google/protobuf/descriptor_database.py
|
230
|
# Protocol Buffers - Google's data interchange format
# Copyright 2008 Google Inc. All rights reserved.
# http://code.google.com/p/protobuf/
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Provides a container for DescriptorProtos."""
__author__ = 'matthewtoia@google.com (Matt Toia)'
class DescriptorDatabase(object):
"""A container accepting FileDescriptorProtos and maps DescriptorProtos."""
def __init__(self):
self._file_desc_protos_by_file = {}
self._file_desc_protos_by_symbol = {}
def Add(self, file_desc_proto):
"""Adds the FileDescriptorProto and its types to this database.
Args:
file_desc_proto: The FileDescriptorProto to add.
"""
self._file_desc_protos_by_file[file_desc_proto.name] = file_desc_proto
package = file_desc_proto.package
for message in file_desc_proto.message_type:
self._file_desc_protos_by_symbol.update(
(name, file_desc_proto) for name in _ExtractSymbols(message, package))
for enum in file_desc_proto.enum_type:
self._file_desc_protos_by_symbol[
'.'.join((package, enum.name))] = file_desc_proto
def FindFileByName(self, name):
"""Finds the file descriptor proto by file name.
Typically the file name is a relative path ending to a .proto file. The
proto with the given name will have to have been added to this database
using the Add method or else an error will be raised.
Args:
name: The file name to find.
Returns:
The file descriptor proto matching the name.
Raises:
KeyError if no file by the given name was added.
"""
return self._file_desc_protos_by_file[name]
def FindFileContainingSymbol(self, symbol):
"""Finds the file descriptor proto containing the specified symbol.
The symbol should be a fully qualified name including the file descriptor's
package and any containing messages. Some examples:
'some.package.name.Message'
'some.package.name.Message.NestedEnum'
The file descriptor proto containing the specified symbol must be added to
this database using the Add method or else an error will be raised.
Args:
symbol: The fully qualified symbol name.
Returns:
The file descriptor proto containing the symbol.
Raises:
KeyError if no file contains the specified symbol.
"""
return self._file_desc_protos_by_symbol[symbol]
def _ExtractSymbols(desc_proto, package):
"""Pulls out all the symbols from a descriptor proto.
Args:
desc_proto: The proto to extract symbols from.
package: The package containing the descriptor type.
Yields:
The fully qualified name found in the descriptor.
"""
message_name = '.'.join((package, desc_proto.name))
yield message_name
for nested_type in desc_proto.nested_type:
for symbol in _ExtractSymbols(nested_type, message_name):
yield symbol
for enum_type in desc_proto.enum_type:
yield '.'.join((message_name, enum_type.name))
|
mozilla/mozilla-ignite
|
refs/heads/master
|
vendor-local/lib/python/south/creator/__init__.py
|
179
|
"""
The creator module is responsible for making new migration files, either
as blank templates or autodetecting changes. It contains code that used to
all be in startmigration.py.
"""
|
henrytao-me/openerp.positionq
|
refs/heads/master
|
openerp/addons/account_check_writing/wizard/account_check_batch_printing.py
|
54
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.tools.translate import _
from openerp.osv import fields, osv
class account_check_write(osv.osv_memory):
_name = 'account.check.write'
_description = 'Prin Check in Batch'
_columns = {
'check_number': fields.integer('Next Check Number', required=True, help="The number of the next check number to be printed."),
}
def _get_next_number(self, cr, uid, context=None):
dummy, sequence_id = self.pool.get('ir.model.data').get_object_reference(cr, uid, 'account_check_writing', 'sequence_check_number')
return self.pool.get('ir.sequence').read(cr, uid, sequence_id, ['number_next'])['number_next']
_defaults = {
'check_number': _get_next_number,
}
def print_check_write(self, cr, uid, ids, context=None):
if context is None:
context = {}
voucher_obj = self.pool.get('account.voucher')
ir_sequence_obj = self.pool.get('ir.sequence')
#update the sequence to number the checks from the value encoded in the wizard
dummy, sequence_id = self.pool.get('ir.model.data').get_object_reference(cr, uid, 'account_check_writing', 'sequence_check_number')
increment = ir_sequence_obj.read(cr, uid, sequence_id, ['number_increment'])['number_increment']
new_value = self.browse(cr, uid, ids[0], context=context).check_number
ir_sequence_obj.write(cr, uid, sequence_id, {'number_next': new_value})
#validate the checks so that they get a number
voucher_ids = context.get('active_ids', [])
for check in voucher_obj.browse(cr, uid, voucher_ids, context=context):
new_value += increment
if check.number:
raise osv.except_osv(_('Error!'),_("One of the printed check already got a number."))
voucher_obj.proforma_voucher(cr, uid, voucher_ids, context=context)
#update the sequence again (because the assignation using next_val was made during the same transaction of
#the first update of sequence)
ir_sequence_obj.write(cr, uid, sequence_id, {'number_next': new_value})
#print the checks
check_layout_report = {
'top' : 'account.print.check.top',
'middle' : 'account.print.check.middle',
'bottom' : 'account.print.check.bottom',
}
check_layout = voucher_obj.browse(cr, uid, voucher_ids[0], context=context).company_id.check_layout
if not check_layout:
check_layout = 'top'
return {
'type': 'ir.actions.report.xml',
'report_name':check_layout_report[check_layout],
'datas': {
'model':'account.voucher',
'ids': voucher_ids,
'report_type': 'pdf'
},
'nodestroy': True
}
account_check_write()
|
wolverton-research-group/qmpy
|
refs/heads/master
|
qmpy/web/views/api/formationenergy_list_view.py
|
1
|
from rest_framework import generics
import django_filters.rest_framework
from qmpy.web.serializers.formationenergy import FormationEnergySerializer
from qmpy.materials.formation_energy import FormationEnergy
from qmpy.materials.composition import Composition
from qmpy.materials.element import Element
from qmpy.utils import query_to_Q, parse_formula_regex
from rest_framework.pagination import LimitOffsetPagination
from rest_framework.response import Response
from rest_framework.renderers import JSONRenderer, BrowsableAPIRenderer
from django.db.models import F, Q
import operator
from qmpy.rester import qmpy_rester
from collections import OrderedDict
import time
import datetime
from functools import reduce
DEFAULT_LIMIT = 50
BASE_URL = qmpy_rester.REST_OQMDAPI
class QmpyPagination(LimitOffsetPagination):
default_limit = 50
def get_paginated_response(self, page_data):
data = page_data["data"]
request = page_data["request"]
full_url = request.build_absolute_uri()
representation = full_url.replace(BASE_URL, "")
time_now = time.time()
time_stamp = datetime.datetime.fromtimestamp(time_now).strftime(
"%Y-%m-%d %H:%M:%S"
)
return Response(
OrderedDict(
[
(
"links",
OrderedDict(
[
("next", self.get_next_link()),
("previous", self.get_previous_link()),
(
"base_url",
{
"href": BASE_URL,
"meta": {"_oqmd_version": "1.0"},
},
),
]
),
),
("resource", {}),
("data", data),
(
"meta",
OrderedDict(
[
("query", {"representation": representation}),
("api_version", "1.0"),
("time_stamp", time_stamp),
(
"data_returned",
min(
self.get_limit(request),
self.count - self.get_offset(request),
),
),
("data_available", self.count),
("comments", page_data["comments"]),
("query_tree", page_data["query_tree"]),
(
"more_data_available",
(self.get_next_link() != None)
or (self.get_previous_link() != None),
),
]
),
),
("response_message", "OK"),
]
)
)
class FormationEnergyDetail(generics.RetrieveAPIView):
queryset = FormationEnergy.objects.all()
serializer_class = FormationEnergySerializer
class FormationEnergyList(generics.ListAPIView):
serializer_class = FormationEnergySerializer
pagination_class = QmpyPagination # LimitOffsetPagination
def get_queryset(self):
fes = FormationEnergy.objects.filter(fit="standard")
fes = self.icsd_filter(fes)
fes = self.composition_filter(fes)
fes = self.duplicate_filter(fes)
fes = self.filter(fes)
sort_fes = self.request.GET.get("sort_by", False)
if not sort_fes:
return fes
else:
limit = self.request.GET.get("limit")
sort_offset = self.request.GET.get("sort_offset")
try:
limit = int(limit)
except:
limit = DEFAULT_LIMIT
try:
sort_offset = int(sort_offset)
except:
sort_offset = 0
desc = self.request.GET.get("desc", False)
if sort_fes == "stability":
fes = self.sort_by_stability(fes, limit, sort_offset, desc)
elif sort_fes == "delta_e":
fes = self.sort_by_delta_e(fes, limit, sort_offset, desc)
return fes
def list(self, request, *args, **kwargs):
query_set = self.get_queryset()
page = self.paginate_queryset(query_set)
if page is not None:
serializer = self.get_serializer(page, many=True)
page_data = {
"data": serializer.data,
"request": self.request,
"comments": "", # self.query_comments,
"query_tree": "",
}
return self.get_paginated_response(page_data)
serializer = self.get_serializer(query_set, many=True)
return Response(serializer.data)
def icsd_filter(self, fes):
request = self.request
ificsd = request.GET.get("icsd", None)
if ificsd == None:
return fes
elif ificsd in ["False", "false", "F", "f"]:
return fes.exclude(entry__path__contains="/icsd/")
elif ificsd in ["True", "true", "T", "t"]:
return fes.filter(entry__path__contains="/icsd/")
return fes
def composition_filter(self, fes):
"""
Valid url parameter inputs:
# 1. ?composition=Fe2O3
2. ?composition=Fe-O
3. ?composition={Fe,Ni}O
4. ?composition={3d}2O3
"""
request = self.request
comp = request.GET.get("composition", False)
if not comp:
return fes
if "{" and "}" in comp:
c_dict_lst = parse_formula_regex(comp)
f_lst = []
for cd in c_dict_lst:
f = " ".join(["%s%g" % (k, cd[k]) for k in sorted(cd.keys())])
f_lst.append(f)
fes = fes.filter(composition__formula__in=f_lst)
elif "-" in comp:
c_lst = comp.strip().split("-")
dim = len(c_lst)
q_lst = [Q(composition__element_list__contains=s + "_") for s in c_lst]
combined_q = reduce(operator.or_, q_lst)
combined_q = reduce(
operator.and_, [combined_q, Q(composition__ntypes__lte=dim)]
)
ex_q_lst = [
Q(composition__element_list__contains=e.symbol + "_")
for e in Element.objects.exclude(symbol__in=c_lst)
]
combined_q_not = reduce(operator.or_, ex_q_lst)
fes = fes.filter(combined_q).exclude(combined_q_not)
else:
c = Composition.get(comp)
fes = fes.filter(composition=c)
return fes
def duplicate_filter(self, fes):
"""
Valid url parameter inputs:
?noduplicate=True
"""
request = self.request
dup = request.GET.get("noduplicate", False)
if dup in ["T", "True", "true", "TRUE", "t"]:
fes = fes.filter(entry__id=F("entry__duplicate_of__id"))
return fes
def filter(self, fes):
"""
Valid attributes:
element, generic, prototype, spacegroup,
volume, natoms, ntypes, stability,
delta_e, band_gap
Requirments:
1. Space padding is required between expression.
2. For each epression, space is not allowed.
3. Operators include: 'AND', 'OR'
4. '(' and ')' can be used to change precedence
5. For numerical attributes, we can have '>' or '<' conditions.
Valid examples:
'element=Mn AND band_gap>1'
'( element=O OR element=S ) AND natoms<3'
Invalid examples:
'element = Fe'
'( element=Fe And element=O)'
Additionally, instead of multiple 'element' expressions, we can use
'element_set' expression to combine elements in the filter.
Format of element_set expression:
',': AND operator
'-': OR operator
'(', ')': to change precedence
Examples:
element_set=Al;O,H
element_set=(Mn;Fe),O
"""
request = self.request
filters = request.GET.get("filter", False)
if not filters:
return fes
# shortcut to get all stable phases
filters = filters.replace("stability=0", "stability<=0")
# replace'&' ,'|' and '~' to 'AND', 'OR' and 'NOT', respectively
filters = filters.replace("&", " AND ")
filters = filters.replace("|", " OR ")
filters = filters.replace("~", " NOT ")
q, meta_info = query_to_Q(filters)
if not q:
return []
fes = fes.filter(q)
return fes
def sort_by_stability(self, fes, limit=DEFAULT_LIMIT, sort_offset=0, desc=False):
if desc in ["T", "t", "True", "true"]:
ordered_fes = fes.order_by("-stability")
else:
ordered_fes = fes.order_by("stability")
return ordered_fes[sort_offset : sort_offset + limit]
def sort_by_delta_e(self, fes, limit=DEFAULT_LIMIT, sort_offset=0, desc=False):
if desc in ["T", "t", "True", "true"]:
ordered_fes = fes.order_by("-delta_e")
else:
ordered_fes = fes.order_by("delta_e")
return ordered_fes[sort_offset : sort_offset + limit]
|
jazztpt/edx-platform
|
refs/heads/master
|
lms/djangoapps/certificates/migrations/0023_auto__del_unique_badgeassertion_course_id_user__add_unique_badgeassert.py
|
52
|
# -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Removing unique constraint on 'BadgeAssertion', fields ['course_id', 'user']
db.delete_unique('certificates_badgeassertion', ['course_id', 'user_id'])
# Adding unique constraint on 'BadgeAssertion', fields ['course_id', 'user', 'mode']
db.create_unique('certificates_badgeassertion', ['course_id', 'user_id', 'mode'])
def backwards(self, orm):
# Removing unique constraint on 'BadgeAssertion', fields ['course_id', 'user', 'mode']
db.delete_unique('certificates_badgeassertion', ['course_id', 'user_id', 'mode'])
# Adding unique constraint on 'BadgeAssertion', fields ['course_id', 'user']
db.create_unique('certificates_badgeassertion', ['course_id', 'user_id'])
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'certificates.badgeassertion': {
'Meta': {'unique_together': "(('course_id', 'user', 'mode'),)", 'object_name': 'BadgeAssertion'},
'course_id': ('xmodule_django.models.CourseKeyField', [], {'default': 'None', 'max_length': '255', 'blank': 'True'}),
'data': ('django.db.models.fields.TextField', [], {'default': "'{}'"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'mode': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'certificates.badgeimageconfiguration': {
'Meta': {'object_name': 'BadgeImageConfiguration'},
'default': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'icon': ('django.db.models.fields.files.ImageField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'mode': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '125'})
},
'certificates.certificategenerationconfiguration': {
'Meta': {'ordering': "('-change_date',)", 'object_name': 'CertificateGenerationConfiguration'},
'change_date': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'changed_by': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True', 'on_delete': 'models.PROTECT'}),
'enabled': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'})
},
'certificates.certificategenerationcoursesetting': {
'Meta': {'object_name': 'CertificateGenerationCourseSetting'},
'course_key': ('xmodule_django.models.CourseKeyField', [], {'max_length': '255', 'db_index': 'True'}),
'created': ('model_utils.fields.AutoCreatedField', [], {'default': 'datetime.datetime.now'}),
'enabled': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified': ('model_utils.fields.AutoLastModifiedField', [], {'default': 'datetime.datetime.now'})
},
'certificates.certificatehtmlviewconfiguration': {
'Meta': {'ordering': "('-change_date',)", 'object_name': 'CertificateHtmlViewConfiguration'},
'change_date': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'changed_by': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True', 'on_delete': 'models.PROTECT'}),
'configuration': ('django.db.models.fields.TextField', [], {}),
'enabled': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'})
},
'certificates.certificatewhitelist': {
'Meta': {'object_name': 'CertificateWhitelist'},
'course_id': ('xmodule_django.models.CourseKeyField', [], {'default': 'None', 'max_length': '255', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"}),
'whitelist': ('django.db.models.fields.BooleanField', [], {'default': 'False'})
},
'certificates.examplecertificate': {
'Meta': {'object_name': 'ExampleCertificate'},
'access_key': ('django.db.models.fields.CharField', [], {'default': "'25c5af67da3d47039aa8b00b3a929fa9'", 'max_length': '255', 'db_index': 'True'}),
'created': ('model_utils.fields.AutoCreatedField', [], {'default': 'datetime.datetime.now'}),
'description': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'download_url': ('django.db.models.fields.CharField', [], {'default': 'None', 'max_length': '255', 'null': 'True'}),
'error_reason': ('django.db.models.fields.TextField', [], {'default': 'None', 'null': 'True'}),
'example_cert_set': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['certificates.ExampleCertificateSet']"}),
'full_name': ('django.db.models.fields.CharField', [], {'default': "u'John Do\\xeb'", 'max_length': '255'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified': ('model_utils.fields.AutoLastModifiedField', [], {'default': 'datetime.datetime.now'}),
'status': ('django.db.models.fields.CharField', [], {'default': "'started'", 'max_length': '255'}),
'template': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'uuid': ('django.db.models.fields.CharField', [], {'default': "'88190407a2f14c429a7b5336e3fb0189'", 'unique': 'True', 'max_length': '255', 'db_index': 'True'})
},
'certificates.examplecertificateset': {
'Meta': {'object_name': 'ExampleCertificateSet'},
'course_key': ('xmodule_django.models.CourseKeyField', [], {'max_length': '255', 'db_index': 'True'}),
'created': ('model_utils.fields.AutoCreatedField', [], {'default': 'datetime.datetime.now'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified': ('model_utils.fields.AutoLastModifiedField', [], {'default': 'datetime.datetime.now'})
},
'certificates.generatedcertificate': {
'Meta': {'unique_together': "(('user', 'course_id'),)", 'object_name': 'GeneratedCertificate'},
'course_id': ('xmodule_django.models.CourseKeyField', [], {'default': 'None', 'max_length': '255', 'blank': 'True'}),
'created_date': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'auto_now_add': 'True', 'blank': 'True'}),
'distinction': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'download_url': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '128', 'blank': 'True'}),
'download_uuid': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '32', 'blank': 'True'}),
'error_reason': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '512', 'blank': 'True'}),
'grade': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '5', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '32', 'blank': 'True'}),
'mode': ('django.db.models.fields.CharField', [], {'default': "'honor'", 'max_length': '32'}),
'modified_date': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'auto_now': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'status': ('django.db.models.fields.CharField', [], {'default': "'unavailable'", 'max_length': '32'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"}),
'verify_uuid': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '32', 'blank': 'True'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
}
}
complete_apps = ['certificates']
|
ivanbaldo/yowsup
|
refs/heads/master
|
yowsup/layers/protocol_groups/structs/group.py
|
59
|
class Group(object):
def __init__(self, groupId, creatorJid, subject, subjectOwnerJid, subjectTime, creationTime, participants=None):
self._groupId = groupId
self._creatorJid = creatorJid
self._subject = subject
self._subjectOwnerJid = subjectOwnerJid
self._subjectTime = int(subjectTime)
self._creationTime = int(creationTime)
self._participants = participants or {}
def getId(self):
return self._groupId
def getCreator(self):
return self._creatorJid
def getOwner(self):
return self.getCreator()
def getSubject(self):
return self._subject
def getSubjectOwner(self):
return self._subjectOwnerJid
def getSubjectTime(self):
return self._subjectTime
def getCreationTime(self):
return self._creationTime
def __str__(self):
return "ID: %s, Subject: %s, Creation: %s, Creator: %s, Subject Owner: %s, Subject Time: %s\nParticipants: %s" %\
(self.getId(), self.getSubject(), self.getCreationTime(), self.getCreator(), self.getSubjectOwner(), self.getSubjectTime(), ", ".join(self._participants.keys()))
def getParticipants(self):
return self._participants
|
sajao/CrisisLex
|
refs/heads/master
|
src-collect/tweepy1/utils.py
|
30
|
# Tweepy
# Copyright 2010 Joshua Roesslein
# See LICENSE for details.
from datetime import datetime
import time
import htmlentitydefs
import re
import locale
from urllib import quote
def parse_datetime(string):
# Set locale for date parsing
locale.setlocale(locale.LC_TIME, 'C')
# We must parse datetime this way to work in python 2.4
date = datetime(*(time.strptime(string, '%a %b %d %H:%M:%S +0000 %Y')[0:6]))
# Reset locale back to the default setting
locale.setlocale(locale.LC_TIME, '')
return date
def parse_html_value(html):
return html[html.find('>')+1:html.rfind('<')]
def parse_a_href(atag):
start = atag.find('"') + 1
end = atag.find('"', start)
return atag[start:end]
def parse_search_datetime(string):
# Set locale for date parsing
locale.setlocale(locale.LC_TIME, 'C')
# We must parse datetime this way to work in python 2.4
date = datetime(*(time.strptime(string, '%a, %d %b %Y %H:%M:%S +0000')[0:6]))
# Reset locale back to the default setting
locale.setlocale(locale.LC_TIME, '')
return date
def unescape_html(text):
"""Created by Fredrik Lundh (http://effbot.org/zone/re-sub.htm#unescape-html)"""
def fixup(m):
text = m.group(0)
if text[:2] == "&#":
# character reference
try:
if text[:3] == "&#x":
return unichr(int(text[3:-1], 16))
else:
return unichr(int(text[2:-1]))
except ValueError:
pass
else:
# named entity
try:
text = unichr(htmlentitydefs.name2codepoint[text[1:-1]])
except KeyError:
pass
return text # leave as is
return re.sub("&#?\w+;", fixup, text)
def convert_to_utf8_str(arg):
# written by Michael Norton (http://docondev.blogspot.com/)
if isinstance(arg, unicode):
arg = arg.encode('utf-8')
elif not isinstance(arg, str):
arg = str(arg)
return arg
def import_simplejson():
try:
import simplejson as json
except ImportError:
try:
import json # Python 2.6+
except ImportError:
try:
from django.utils import simplejson as json # Google App Engine
except ImportError:
raise ImportError, "Can't load a json library"
return json
def list_to_csv(item_list):
if item_list:
return ','.join([str(i) for i in item_list])
def urlencode_noplus(query):
return '&'.join(['%s=%s' % (quote(str(k)), quote(str(v))) \
for k, v in query.iteritems()])
|
yugang/crosswalk-test-suite
|
refs/heads/master
|
tools/atip/atip/common/steps.py
|
3
|
# Copyright (c) 2014 Intel Corporation.
#
# Redistribution and use in source and binary forms, with or without modification,
# are permitted provided that the following conditions are met:
#
# * Redistributions of works must retain the original copyright notice, this list
# of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the original copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# * Neither the name of Intel Corporation nor the names of its contributors
# may be used to endorse or promote products derived from this work without
# specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY INTEL CORPORATION "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL INTEL CORPORATION BE LIABLE FOR ANY DIRECT,
# INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
# OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,
# EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# Authors:
# Fan, Yugang <yugang.fan@intel.com>
import time
from behave import step
from atip.web import web
@step(u'I wait for {timeout:d} seconds')
def wait_for_timeout(context, timeout):
time.sleep(timeout)
@step(u'launch "{app_name}"')
def launch_app_by_name(context, app_name):
web.launch_webapp_by_name(context, app_name)
@step(u'switch to "{app_name}"')
def switch_to_app_name(context, app_name):
if app_name in context.apps:
context.app = context.apps[app_name]
assert True
else:
assert False
|
kisna72/django
|
refs/heads/master
|
tests/admin_changelist/urls.py
|
810
|
from django.conf.urls import url
from . import admin
urlpatterns = [
url(r'^admin/', admin.site.urls),
]
|
jmcabandara/imagefactory
|
refs/heads/master
|
imgfac/rest/OAuthTools.py
|
4
|
#
# Copyright 2012 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http:/www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
import oauth2 as oauth
from imgfac.rest.bottle import *
from imgfac.ApplicationConfiguration import ApplicationConfiguration
log = logging.getLogger(__name__)
oauth_server = oauth.Server(signature_methods={'HMAC-SHA1':oauth.SignatureMethod_HMAC_SHA1()})
class Consumer(object):
def __init__(self, key):
consumers = ApplicationConfiguration().configuration['clients']
self.key = key
self.secret = consumers.get(key) if consumers else None
def validate_two_leg_oauth():
try:
auth_header_key = 'Authorization'
auth_header = {}
if auth_header_key in request.headers:
auth_header.update({auth_header_key:request.headers[auth_header_key]})
else:
response.set_header('WWW-Authenticate', 'OAuth')
raise HTTPResponse(status=401, output='Unauthorized: missing authorization')
req = oauth.Request.from_request(request.method,
request.url,
headers=auth_header,
parameters=request.params)
oauth_consumer = Consumer(request.params['oauth_consumer_key'])
oauth_server.verify_request(req, oauth_consumer, None)
return True
except AttributeError as e:
log.debug('Returning HTTP 401 (Unauthorized: authorization failed) on exception: %s' % e)
response.set_header('WWW-Authenticate', 'OAuth')
raise HTTPResponse(status=401, output='Unauthorized: authorization failed')
except Exception as e:
log.exception('Returning HTTP 500 (OAuth validation failed) on exception: %s' % e)
raise HTTPResponse(status=500, output='OAuth validation failed: %s' % e)
def oauth_protect(f):
def decorated_function(*args, **kwargs):
if(not ApplicationConfiguration().configuration['no_oauth']):
validate_two_leg_oauth()
return f(*args, **kwargs)
decorated_function.__name__ = f.__name__
return decorated_function
|
idegtiarov/ceilometer
|
refs/heads/master
|
ceilometer/tests/functional/api/v2/test_statistics_scenarios.py
|
1
|
#
# Copyright 2012 New Dream Network, LLC (DreamHost)
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Test events statistics retrieval."""
import datetime
from ceilometer.publisher import utils
from ceilometer import sample
from ceilometer.tests import db as tests_db
from ceilometer.tests.functional.api import v2
class TestMaxProjectVolume(v2.FunctionalTest):
PATH = '/meters/volume.size/statistics'
def setUp(self):
super(TestMaxProjectVolume, self).setUp()
for i in range(3):
s = sample.Sample(
'volume.size',
'gauge',
'GiB',
5 + i,
'user-id',
'project1',
'resource-id-%s' % i,
timestamp=datetime.datetime(2012, 9, 25, 10 + i, 30 + i),
resource_metadata={'display_name': 'test-volume',
'tag': 'self.sample',
},
source='source1',
)
msg = utils.meter_message_from_counter(
s, self.CONF.publisher.telemetry_secret,
)
self.conn.record_metering_data(msg)
def test_no_time_bounds(self):
data = self.get_json(self.PATH, q=[{'field': 'project_id',
'value': 'project1',
}])
self.assertEqual(7, data[0]['max'])
self.assertEqual(3, data[0]['count'])
def test_start_timestamp(self):
data = self.get_json(self.PATH, q=[{'field': 'project_id',
'value': 'project1',
},
{'field': 'timestamp',
'op': 'ge',
'value': '2012-09-25T11:30:00',
},
])
self.assertEqual(7, data[0]['max'])
self.assertEqual(2, data[0]['count'])
def test_start_timestamp_after(self):
data = self.get_json(self.PATH, q=[{'field': 'project_id',
'value': 'project1',
},
{'field': 'timestamp',
'op': 'ge',
'value': '2012-09-25T12:34:00',
},
])
self.assertEqual([], data)
def test_end_timestamp(self):
data = self.get_json(self.PATH, q=[{'field': 'project_id',
'value': 'project1',
},
{'field': 'timestamp',
'op': 'le',
'value': '2012-09-25T11:30:00',
},
])
self.assertEqual(5, data[0]['max'])
self.assertEqual(1, data[0]['count'])
def test_end_timestamp_before(self):
data = self.get_json(self.PATH, q=[{'field': 'project_id',
'value': 'project1',
},
{'field': 'timestamp',
'op': 'le',
'value': '2012-09-25T09:54:00',
},
])
self.assertEqual([], data)
def test_start_end_timestamp(self):
data = self.get_json(self.PATH, q=[{'field': 'project_id',
'value': 'project1',
},
{'field': 'timestamp',
'op': 'ge',
'value': '2012-09-25T11:30:00',
},
{'field': 'timestamp',
'op': 'le',
'value': '2012-09-25T11:32:00',
},
])
self.assertEqual(6, data[0]['max'])
self.assertEqual(1, data[0]['count'])
class TestMaxResourceVolume(v2.FunctionalTest):
PATH = '/meters/volume.size/statistics'
def setUp(self):
super(TestMaxResourceVolume, self).setUp()
for i in range(3):
s = sample.Sample(
'volume.size',
'gauge',
'GiB',
5 + i,
'user-id',
'project1',
'resource-id',
timestamp=datetime.datetime(2012, 9, 25, 10 + i, 30 + i),
resource_metadata={'display_name': 'test-volume',
'tag': 'self.sample',
},
source='source1',
)
msg = utils.meter_message_from_counter(
s, self.CONF.publisher.telemetry_secret,
)
self.conn.record_metering_data(msg)
def test_no_time_bounds(self):
data = self.get_json(self.PATH, q=[{'field': 'resource_id',
'value': 'resource-id',
}])
self.assertEqual(7, data[0]['max'])
self.assertEqual(3, data[0]['count'])
def test_no_time_bounds_with_period(self):
data = self.get_json(self.PATH,
q=[{'field': 'resource_id',
'value': 'resource-id'}],
period=3600)
self.assertEqual(3, len(data))
self.assertEqual(set([u'2012-09-25T10:30:00',
u'2012-09-25T12:32:00',
u'2012-09-25T11:31:00']),
set(x['duration_start'] for x in data))
self.assertEqual(3600, data[0]['period'])
self.assertEqual(set([u'2012-09-25T10:30:00',
u'2012-09-25T11:30:00',
u'2012-09-25T12:30:00']),
set(x['period_start'] for x in data))
def test_period_with_negative_value(self):
resp = self.get_json(self.PATH, expect_errors=True,
q=[{'field': 'resource_id',
'value': 'resource-id'}],
period=-1)
self.assertEqual(400, resp.status_code)
@tests_db.run_with('sqlite', 'mysql', 'pgsql', 'hbase', 'db2')
def test_period_with_large_value(self):
resp = self.get_json(self.PATH, expect_errors=True,
q=[{'field': 'user_id',
'value': 'user-id'}],
period=10000000000000)
self.assertEqual(400, resp.status_code)
self.assertIn(b"Invalid period", resp.body)
def test_start_timestamp(self):
data = self.get_json(self.PATH, q=[{'field': 'resource_id',
'value': 'resource-id',
},
{'field': 'timestamp',
'op': 'ge',
'value': '2012-09-25T11:30:00',
},
])
self.assertEqual(7, data[0]['max'])
self.assertEqual(2, data[0]['count'])
def test_start_timestamp_after(self):
data = self.get_json(self.PATH, q=[{'field': 'resource_id',
'value': 'resource-id',
},
{'field': 'timestamp',
'op': 'ge',
'value': '2012-09-25T12:34:00',
},
])
self.assertEqual([], data)
def test_end_timestamp(self):
data = self.get_json(self.PATH, q=[{'field': 'resource_id',
'value': 'resource-id',
},
{'field': 'timestamp',
'op': 'le',
'value': '2012-09-25T11:30:00',
},
])
self.assertEqual(5, data[0]['max'])
self.assertEqual(1, data[0]['count'])
def test_end_timestamp_before(self):
data = self.get_json(self.PATH, q=[{'field': 'resource_id',
'value': 'resource-id',
},
{'field': 'timestamp',
'op': 'le',
'value': '2012-09-25T09:54:00',
},
])
self.assertEqual([], data)
def test_start_end_timestamp(self):
data = self.get_json(self.PATH, q=[{'field': 'resource_id',
'value': 'resource-id',
},
{'field': 'timestamp',
'op': 'ge',
'value': '2012-09-25T11:30:00',
},
{'field': 'timestamp',
'op': 'le',
'value': '2012-09-25T11:32:00',
},
])
self.assertEqual(6, data[0]['max'])
self.assertEqual(1, data[0]['count'])
class TestSumProjectVolume(v2.FunctionalTest):
PATH = '/meters/volume.size/statistics'
def setUp(self):
super(TestSumProjectVolume, self).setUp()
for i in range(3):
s = sample.Sample(
'volume.size',
'gauge',
'GiB',
5 + i,
'user-id',
'project1',
'resource-id-%s' % i,
timestamp=datetime.datetime(2012, 9, 25, 10 + i, 30 + i),
resource_metadata={'display_name': 'test-volume',
'tag': 'self.sample',
},
source='source1',
)
msg = utils.meter_message_from_counter(
s, self.CONF.publisher.telemetry_secret,
)
self.conn.record_metering_data(msg)
def test_no_time_bounds(self):
data = self.get_json(self.PATH, q=[{'field': 'project_id',
'value': 'project1',
}])
expected = 5 + 6 + 7
self.assertEqual(expected, data[0]['sum'])
self.assertEqual(3, data[0]['count'])
def test_start_timestamp(self):
data = self.get_json(self.PATH, q=[{'field': 'project_id',
'value': 'project1',
},
{'field': 'timestamp',
'op': 'ge',
'value': '2012-09-25T11:30:00',
},
])
expected = 6 + 7
self.assertEqual(expected, data[0]['sum'])
self.assertEqual(2, data[0]['count'])
def test_start_timestamp_after(self):
data = self.get_json(self.PATH, q=[{'field': 'project_id',
'value': 'project1',
},
{'field': 'timestamp',
'op': 'ge',
'value': '2012-09-25T12:34:00',
},
])
self.assertEqual([], data)
def test_end_timestamp(self):
data = self.get_json(self.PATH, q=[{'field': 'project_id',
'value': 'project1',
},
{'field': 'timestamp',
'op': 'le',
'value': '2012-09-25T11:30:00',
},
])
self.assertEqual(5, data[0]['sum'])
self.assertEqual(1, data[0]['count'])
def test_end_timestamp_before(self):
data = self.get_json(self.PATH, q=[{'field': 'project_id',
'value': 'project1',
},
{'field': 'timestamp',
'op': 'le',
'value': '2012-09-25T09:54:00',
},
])
self.assertEqual([], data)
def test_start_end_timestamp(self):
data = self.get_json(self.PATH, q=[{'field': 'project_id',
'value': 'project1',
},
{'field': 'timestamp',
'op': 'ge',
'value': '2012-09-25T11:30:00',
},
{'field': 'timestamp',
'op': 'le',
'value': '2012-09-25T11:32:00',
},
])
self.assertEqual(6, data[0]['sum'])
self.assertEqual(1, data[0]['count'])
class TestSumResourceVolume(v2.FunctionalTest):
PATH = '/meters/volume.size/statistics'
def setUp(self):
super(TestSumResourceVolume, self).setUp()
for i in range(3):
s = sample.Sample(
'volume.size',
'gauge',
'GiB',
5 + i,
'user-id',
'project1',
'resource-id',
timestamp=datetime.datetime(2012, 9, 25, 10 + i, 30 + i),
resource_metadata={'display_name': 'test-volume',
'tag': 'self.sample',
},
source='source1',
)
msg = utils.meter_message_from_counter(
s, self.CONF.publisher.telemetry_secret,
)
self.conn.record_metering_data(msg)
def test_no_time_bounds(self):
data = self.get_json(self.PATH, q=[{'field': 'resource_id',
'value': 'resource-id',
}])
self.assertEqual(5 + 6 + 7, data[0]['sum'])
self.assertEqual(3, data[0]['count'])
def test_no_time_bounds_with_period(self):
data = self.get_json(self.PATH,
q=[{'field': 'resource_id',
'value': 'resource-id'}],
period=1800)
self.assertEqual(3, len(data))
self.assertEqual(set([u'2012-09-25T10:30:00',
u'2012-09-25T12:32:00',
u'2012-09-25T11:31:00']),
set(x['duration_start'] for x in data))
self.assertEqual(1800, data[0]['period'])
self.assertEqual(set([u'2012-09-25T10:30:00',
u'2012-09-25T11:30:00',
u'2012-09-25T12:30:00']),
set(x['period_start'] for x in data))
def test_start_timestamp(self):
data = self.get_json(self.PATH, q=[{'field': 'resource_id',
'value': 'resource-id',
},
{'field': 'timestamp',
'op': 'ge',
'value': '2012-09-25T11:30:00',
}])
self.assertEqual(6 + 7, data[0]['sum'])
self.assertEqual(2, data[0]['count'])
def test_start_timestamp_with_period(self):
data = self.get_json(self.PATH,
q=[{'field': 'resource_id',
'value': 'resource-id'},
{'field': 'timestamp',
'op': 'ge',
'value': '2012-09-25T10:15:00'}],
period=7200)
self.assertEqual(2, len(data))
self.assertEqual(set([u'2012-09-25T10:30:00',
u'2012-09-25T12:32:00']),
set(x['duration_start'] for x in data))
self.assertEqual(7200, data[0]['period'])
self.assertEqual(set([u'2012-09-25T10:15:00',
u'2012-09-25T12:15:00']),
set(x['period_start'] for x in data))
def test_start_timestamp_after(self):
data = self.get_json(self.PATH, q=[{'field': 'resource_id',
'value': 'resource-id',
},
{'field': 'timestamp',
'op': 'ge',
'value': '2012-09-25T12:34:00',
}])
self.assertEqual([], data)
def test_end_timestamp(self):
data = self.get_json(self.PATH, q=[{'field': 'resource_id',
'value': 'resource-id',
},
{'field': 'timestamp',
'op': 'le',
'value': '2012-09-25T11:30:00',
}])
self.assertEqual(5, data[0]['sum'])
self.assertEqual(1, data[0]['count'])
def test_end_timestamp_before(self):
data = self.get_json(self.PATH, q=[{'field': 'resource_id',
'value': 'resource-id',
},
{'field': 'timestamp',
'op': 'le',
'value': '2012-09-25T09:54:00',
}])
self.assertEqual([], data)
def test_start_end_timestamp(self):
data = self.get_json(self.PATH, q=[{'field': 'resource_id',
'value': 'resource-id',
},
{'field': 'timestamp',
'op': 'ge',
'value': '2012-09-25T11:30:00',
},
{'field': 'timestamp',
'op': 'lt',
'value': '2012-09-25T11:32:00',
}])
self.assertEqual(6, data[0]['sum'])
self.assertEqual(1, data[0]['count'])
class TestGroupByInstance(v2.FunctionalTest):
PATH = '/meters/instance/statistics'
def setUp(self):
super(TestGroupByInstance, self).setUp()
test_sample_data = (
{'volume': 2, 'user': 'user-1', 'project': 'project-1',
'resource': 'resource-1', 'timestamp': (2013, 8, 1, 16, 10),
'metadata_flavor': 'm1.tiny', 'metadata_event': 'event-1',
'source': 'source-2'},
{'volume': 2, 'user': 'user-1', 'project': 'project-2',
'resource': 'resource-1', 'timestamp': (2013, 8, 1, 15, 37),
'metadata_flavor': 'm1.large', 'metadata_event': 'event-1',
'source': 'source-2'},
{'volume': 1, 'user': 'user-2', 'project': 'project-1',
'resource': 'resource-2', 'timestamp': (2013, 8, 1, 10, 11),
'metadata_flavor': 'm1.tiny', 'metadata_event': 'event-2',
'source': 'source-1'},
{'volume': 1, 'user': 'user-2', 'project': 'project-1',
'resource': 'resource-2', 'timestamp': (2013, 8, 1, 10, 40),
'metadata_flavor': 'm1.large', 'metadata_event': 'event-2',
'source': 'source-1'},
{'volume': 2, 'user': 'user-2', 'project': 'project-1',
'resource': 'resource-1', 'timestamp': (2013, 8, 1, 14, 59),
'metadata_flavor': 'm1.large', 'metadata_event': 'event-2',
'source': 'source-1'},
{'volume': 4, 'user': 'user-2', 'project': 'project-2',
'resource': 'resource-2', 'timestamp': (2013, 8, 1, 17, 28),
'metadata_flavor': 'm1.large', 'metadata_event': 'event-2',
'source': 'source-1'},
{'volume': 4, 'user': 'user-3', 'project': 'project-1',
'resource': 'resource-3', 'timestamp': (2013, 8, 1, 11, 22),
'metadata_flavor': 'm1.tiny', 'metadata_event': 'event-2',
'source': 'source-3'},
)
for test_sample in test_sample_data:
c = sample.Sample(
'instance',
sample.TYPE_CUMULATIVE,
unit='s',
volume=test_sample['volume'],
user_id=test_sample['user'],
project_id=test_sample['project'],
resource_id=test_sample['resource'],
timestamp=datetime.datetime(*test_sample['timestamp']),
resource_metadata={'flavor': test_sample['metadata_flavor'],
'event': test_sample['metadata_event'], },
source=test_sample['source'],
)
msg = utils.meter_message_from_counter(
c, self.CONF.publisher.telemetry_secret,
)
self.conn.record_metering_data(msg)
def test_group_by_user(self):
data = self.get_json(self.PATH, groupby=['user_id'])
groupby_keys_set = set(x for sub_dict in data
for x in sub_dict['groupby'].keys())
groupby_vals_set = set(x for sub_dict in data
for x in sub_dict['groupby'].values())
self.assertEqual(set(['user_id']), groupby_keys_set)
self.assertEqual(set(['user-1', 'user-2', 'user-3']), groupby_vals_set)
for r in data:
grp = r['groupby']
if grp == {'user_id': 'user-1'}:
self.assertEqual(2, r['count'])
self.assertEqual('s', r['unit'])
self.assertEqual(2, r['min'])
self.assertEqual(2, r['max'])
self.assertEqual(4, r['sum'])
self.assertEqual(2, r['avg'])
elif grp == {'user_id': 'user-2'}:
self.assertEqual(4, r['count'])
self.assertEqual('s', r['unit'])
self.assertEqual(1, r['min'])
self.assertEqual(4, r['max'])
self.assertEqual(8, r['sum'])
self.assertEqual(2, r['avg'])
elif grp == {'user_id': 'user-3'}:
self.assertEqual(1, r['count'])
self.assertEqual('s', r['unit'])
self.assertEqual(4, r['min'])
self.assertEqual(4, r['max'])
self.assertEqual(4, r['sum'])
self.assertEqual(4, r['avg'])
def test_group_by_resource(self):
data = self.get_json(self.PATH, groupby=['resource_id'])
groupby_keys_set = set(x for sub_dict in data
for x in sub_dict['groupby'].keys())
groupby_vals_set = set(x for sub_dict in data
for x in sub_dict['groupby'].values())
self.assertEqual(set(['resource_id']), groupby_keys_set)
self.assertEqual(set(['resource-1', 'resource-2', 'resource-3']),
groupby_vals_set)
for r in data:
grp = r['groupby']
if grp == {'resource_id': 'resource-1'}:
self.assertEqual(3, r['count'])
self.assertEqual('s', r['unit'])
self.assertEqual(2, r['min'])
self.assertEqual(2, r['max'])
self.assertEqual(6, r['sum'])
self.assertEqual(2, r['avg'])
elif grp == {'resource_id': 'resource-2'}:
self.assertEqual(3, r['count'])
self.assertEqual('s', r['unit'])
self.assertEqual(1, r['min'])
self.assertEqual(4, r['max'])
self.assertEqual(6, r['sum'])
self.assertEqual(2, r['avg'])
elif grp == {'resource_id': 'resource-3'}:
self.assertEqual(1, r['count'])
self.assertEqual('s', r['unit'])
self.assertEqual(4, r['min'])
self.assertEqual(4, r['max'])
self.assertEqual(4, r['sum'])
self.assertEqual(4, r['avg'])
def test_group_by_project(self):
data = self.get_json(self.PATH, groupby=['project_id'])
groupby_keys_set = set(x for sub_dict in data
for x in sub_dict['groupby'].keys())
groupby_vals_set = set(x for sub_dict in data
for x in sub_dict['groupby'].values())
self.assertEqual(set(['project_id']), groupby_keys_set)
self.assertEqual(set(['project-1', 'project-2']), groupby_vals_set)
for r in data:
grp = r['groupby']
if grp == {'project_id': 'project-1'}:
self.assertEqual(5, r['count'])
self.assertEqual('s', r['unit'])
self.assertEqual(1, r['min'])
self.assertEqual(4, r['max'])
self.assertEqual(10, r['sum'])
self.assertEqual(2, r['avg'])
elif grp == {'project_id': 'project-2'}:
self.assertEqual(2, r['count'])
self.assertEqual('s', r['unit'])
self.assertEqual(2, r['min'])
self.assertEqual(4, r['max'])
self.assertEqual(6, r['sum'])
self.assertEqual(3, r['avg'])
def test_group_by_unknown_field(self):
response = self.get_json(self.PATH,
expect_errors=True,
groupby=['wtf'])
self.assertEqual(400, response.status_code)
def test_group_by_multiple_regular(self):
data = self.get_json(self.PATH, groupby=['user_id', 'resource_id'])
groupby_keys_set = set(x for sub_dict in data
for x in sub_dict['groupby'].keys())
groupby_vals_set = set(x for sub_dict in data
for x in sub_dict['groupby'].values())
self.assertEqual(set(['user_id', 'resource_id']), groupby_keys_set)
self.assertEqual(set(['user-1', 'user-2', 'user-3', 'resource-1',
'resource-2', 'resource-3']),
groupby_vals_set)
for r in data:
grp = r['groupby']
if grp == {'user_id': 'user-1',
'resource_id': 'resource-1'}:
self.assertEqual(2, r['count'])
self.assertEqual('s', r['unit'])
self.assertEqual(2, r['min'])
self.assertEqual(2, r['max'])
self.assertEqual(4, r['sum'])
self.assertEqual(2, r['avg'])
elif grp == {'user_id': 'user-2',
'resource_id': 'resource-1'}:
self.assertEqual(1, r['count'])
self.assertEqual('s', r['unit'])
self.assertEqual(2, r['min'])
self.assertEqual(2, r['max'])
self.assertEqual(2, r['sum'])
self.assertEqual(2, r['avg'])
elif grp == {'user_id': 'user-2',
'resource_id': 'resource-2'}:
self.assertEqual(3, r['count'])
self.assertEqual('s', r['unit'])
self.assertEqual(1, r['min'])
self.assertEqual(4, r['max'])
self.assertEqual(6, r['sum'])
self.assertEqual(2, r['avg'])
elif grp == {'user_id': 'user-3',
'resource_id': 'resource-3'}:
self.assertEqual(1, r['count'])
self.assertEqual('s', r['unit'])
self.assertEqual(4, r['min'])
self.assertEqual(4, r['max'])
self.assertEqual(4, r['sum'])
self.assertEqual(4, r['avg'])
else:
self.assertNotEqual(grp, {'user_id': 'user-1',
'resource_id': 'resource-2'})
self.assertNotEqual(grp, {'user_id': 'user-1',
'resource_id': 'resource-3'})
self.assertNotEqual(grp, {'user_id': 'user-2',
'resource_id': 'resource-3'})
self.assertNotEqual(grp, {'user_id': 'user-3',
'resource_id': 'resource-1'})
self.assertNotEqual(grp, {'user_id': 'user-3',
'resource_id': 'resource-2'})
def test_group_by_with_query_filter(self):
data = self.get_json(self.PATH,
q=[{'field': 'project_id',
'op': 'eq',
'value': 'project-1'}],
groupby=['resource_id'])
groupby_keys_set = set(x for sub_dict in data
for x in sub_dict['groupby'].keys())
groupby_vals_set = set(x for sub_dict in data
for x in sub_dict['groupby'].values())
self.assertEqual(set(['resource_id']), groupby_keys_set)
self.assertEqual(set(['resource-1', 'resource-2', 'resource-3']),
groupby_vals_set)
for r in data:
grp = r['groupby']
if grp == {'resource_id': 'resource-1'}:
self.assertEqual(2, r['count'])
self.assertEqual('s', r['unit'])
self.assertEqual(2, r['min'])
self.assertEqual(2, r['max'])
self.assertEqual(4, r['sum'])
self.assertEqual(2, r['avg'])
elif grp == {'resource_id': 'resource-2'}:
self.assertEqual(2, r['count'])
self.assertEqual('s', r['unit'])
self.assertEqual(1, r['min'])
self.assertEqual(1, r['max'])
self.assertEqual(2, r['sum'])
self.assertEqual(1, r['avg'])
elif grp == {'resource_id': 'resource-3'}:
self.assertEqual(1, r['count'])
self.assertEqual('s', r['unit'])
self.assertEqual(4, r['min'])
self.assertEqual(4, r['max'])
self.assertEqual(4, r['sum'])
self.assertEqual(4, r['avg'])
def test_group_by_with_query_filter_multiple(self):
data = self.get_json(self.PATH,
q=[{'field': 'user_id',
'op': 'eq',
'value': 'user-2'},
{'field': 'source',
'op': 'eq',
'value': 'source-1'}],
groupby=['project_id', 'resource_id'])
groupby_keys_set = set(x for sub_dict in data
for x in sub_dict['groupby'].keys())
groupby_vals_set = set(x for sub_dict in data
for x in sub_dict['groupby'].values())
self.assertEqual(set(['project_id', 'resource_id']), groupby_keys_set)
self.assertEqual(set(['project-1', 'project-2',
'resource-1', 'resource-2']),
groupby_vals_set)
for r in data:
grp = r['groupby']
if grp == {'project_id': 'project-1',
'resource_id': 'resource-1'}:
self.assertEqual(1, r['count'])
self.assertEqual('s', r['unit'])
self.assertEqual(2, r['min'])
self.assertEqual(2, r['max'])
self.assertEqual(2, r['sum'])
self.assertEqual(2, r['avg'])
elif grp == {'project_id': 'project-1',
'resource_id': 'resource-2'}:
self.assertEqual(2, r['count'])
self.assertEqual('s', r['unit'])
self.assertEqual(1, r['min'])
self.assertEqual(1, r['max'])
self.assertEqual(2, r['sum'])
self.assertEqual(1, r['avg'])
elif grp == {'project_id': 'project-2',
'resource_id': 'resource-2'}:
self.assertEqual(1, r['count'])
self.assertEqual('s', r['unit'])
self.assertEqual(4, r['min'])
self.assertEqual(4, r['max'])
self.assertEqual(4, r['sum'])
self.assertEqual(4, r['avg'])
else:
self.assertNotEqual(grp, {'project_id': 'project-2',
'resource_id': 'resource-1'})
def test_group_by_with_period(self):
data = self.get_json(self.PATH,
groupby=['project_id'],
period=7200)
groupby_keys_set = set(x for sub_dict in data
for x in sub_dict['groupby'].keys())
groupby_vals_set = set(x for sub_dict in data
for x in sub_dict['groupby'].values())
self.assertEqual(set(['project_id']), groupby_keys_set)
self.assertEqual(set(['project-1', 'project-2']), groupby_vals_set)
period_start_set = set(sub_dict['period_start'] for sub_dict in data)
period_start_valid = set([u'2013-08-01T10:11:00',
u'2013-08-01T14:11:00',
u'2013-08-01T16:11:00'])
self.assertEqual(period_start_valid, period_start_set)
for r in data:
grp = r['groupby']
period_start = r['period_start']
if (grp == {'project_id': 'project-1'} and
period_start == u'2013-08-01T10:11:00'):
self.assertEqual(3, r['count'])
self.assertEqual('s', r['unit'])
self.assertEqual(1, r['min'])
self.assertEqual(4, r['max'])
self.assertEqual(6, r['sum'])
self.assertEqual(2, r['avg'])
self.assertEqual(4260, r['duration'])
self.assertEqual(u'2013-08-01T10:11:00', r['duration_start'])
self.assertEqual(u'2013-08-01T11:22:00', r['duration_end'])
self.assertEqual(7200, r['period'])
self.assertEqual(u'2013-08-01T12:11:00', r['period_end'])
elif (grp == {'project_id': 'project-1'} and
period_start == u'2013-08-01T14:11:00'):
self.assertEqual(2, r['count'])
self.assertEqual('s', r['unit'])
self.assertEqual(2, r['min'])
self.assertEqual(2, r['max'])
self.assertEqual(4, r['sum'])
self.assertEqual(2, r['avg'])
self.assertEqual(4260, r['duration'])
self.assertEqual(u'2013-08-01T14:59:00', r['duration_start'])
self.assertEqual(u'2013-08-01T16:10:00', r['duration_end'])
self.assertEqual(7200, r['period'])
self.assertEqual(u'2013-08-01T16:11:00', r['period_end'])
elif (grp == {'project_id': 'project-2'} and
period_start == u'2013-08-01T14:11:00'):
self.assertEqual(1, r['count'])
self.assertEqual('s', r['unit'])
self.assertEqual(2, r['min'])
self.assertEqual(2, r['max'])
self.assertEqual(2, r['sum'])
self.assertEqual(2, r['avg'])
self.assertEqual(0, r['duration'])
self.assertEqual(u'2013-08-01T15:37:00', r['duration_start'])
self.assertEqual(u'2013-08-01T15:37:00', r['duration_end'])
self.assertEqual(7200, r['period'])
self.assertEqual(u'2013-08-01T16:11:00', r['period_end'])
elif (grp == {'project_id': 'project-2'} and
period_start == u'2013-08-01T16:11:00'):
self.assertEqual(1, r['count'])
self.assertEqual('s', r['unit'])
self.assertEqual(4, r['min'])
self.assertEqual(4, r['max'])
self.assertEqual(4, r['sum'])
self.assertEqual(4, r['avg'])
self.assertEqual(0, r['duration'])
self.assertEqual(u'2013-08-01T17:28:00', r['duration_start'])
self.assertEqual(u'2013-08-01T17:28:00', r['duration_end'])
self.assertEqual(7200, r['period'])
self.assertEqual(u'2013-08-01T18:11:00', r['period_end'])
else:
self.assertNotEqual([grp, period_start],
[{'project_id': 'project-1'},
u'2013-08-01T16:11:00'])
self.assertNotEqual([grp, period_start],
[{'project_id': 'project-2'},
u'2013-08-01T10:11:00'])
def test_group_by_with_query_filter_and_period(self):
data = self.get_json(self.PATH,
q=[{'field': 'source',
'op': 'eq',
'value': 'source-1'}],
groupby=['project_id'],
period=7200)
groupby_keys_set = set(x for sub_dict in data
for x in sub_dict['groupby'].keys())
groupby_vals_set = set(x for sub_dict in data
for x in sub_dict['groupby'].values())
self.assertEqual(set(['project_id']), groupby_keys_set)
self.assertEqual(set(['project-1', 'project-2']), groupby_vals_set)
period_start_set = set(sub_dict['period_start'] for sub_dict in data)
period_start_valid = set([u'2013-08-01T10:11:00',
u'2013-08-01T14:11:00',
u'2013-08-01T16:11:00'])
self.assertEqual(period_start_valid, period_start_set)
for r in data:
grp = r['groupby']
period_start = r['period_start']
if (grp == {'project_id': 'project-1'} and
period_start == u'2013-08-01T10:11:00'):
self.assertEqual(2, r['count'])
self.assertEqual('s', r['unit'])
self.assertEqual(1, r['min'])
self.assertEqual(1, r['max'])
self.assertEqual(2, r['sum'])
self.assertEqual(1, r['avg'])
self.assertEqual(1740, r['duration'])
self.assertEqual(u'2013-08-01T10:11:00', r['duration_start'])
self.assertEqual(u'2013-08-01T10:40:00', r['duration_end'])
self.assertEqual(7200, r['period'])
self.assertEqual(u'2013-08-01T12:11:00', r['period_end'])
elif (grp == {'project_id': 'project-1'} and
period_start == u'2013-08-01T14:11:00'):
self.assertEqual(1, r['count'])
self.assertEqual('s', r['unit'])
self.assertEqual(2, r['min'])
self.assertEqual(2, r['max'])
self.assertEqual(2, r['sum'])
self.assertEqual(2, r['avg'])
self.assertEqual(0, r['duration'])
self.assertEqual(u'2013-08-01T14:59:00', r['duration_start'])
self.assertEqual(u'2013-08-01T14:59:00', r['duration_end'])
self.assertEqual(7200, r['period'])
self.assertEqual(u'2013-08-01T16:11:00', r['period_end'])
elif (grp == {'project_id': 'project-2'} and
period_start == u'2013-08-01T16:11:00'):
self.assertEqual(1, r['count'])
self.assertEqual('s', r['unit'])
self.assertEqual(4, r['min'])
self.assertEqual(4, r['max'])
self.assertEqual(4, r['sum'])
self.assertEqual(4, r['avg'])
self.assertEqual(0, r['duration'])
self.assertEqual(u'2013-08-01T17:28:00', r['duration_start'])
self.assertEqual(u'2013-08-01T17:28:00', r['duration_end'])
self.assertEqual(7200, r['period'])
self.assertEqual(u'2013-08-01T18:11:00', r['period_end'])
else:
self.assertNotEqual([grp, period_start],
[{'project_id': 'project-1'},
u'2013-08-01T16:11:00'])
self.assertNotEqual([grp, period_start],
[{'project_id': 'project-2'},
u'2013-08-01T10:11:00'])
self.assertNotEqual([grp, period_start],
[{'project_id': 'project-2'},
u'2013-08-01T14:11:00'])
def test_group_by_start_timestamp_after(self):
data = self.get_json(self.PATH,
q=[{'field': 'timestamp',
'op': 'ge',
'value': '2013-08-01T17:28:01'}],
groupby=['project_id'])
self.assertEqual([], data)
def test_group_by_end_timestamp_before(self):
data = self.get_json(self.PATH,
q=[{'field': 'timestamp',
'op': 'le',
'value': '2013-08-01T10:10:59'}],
groupby=['project_id'])
self.assertEqual([], data)
def test_group_by_start_timestamp(self):
data = self.get_json(self.PATH,
q=[{'field': 'timestamp',
'op': 'ge',
'value': '2013-08-01T14:58:00'}],
groupby=['project_id'])
groupby_keys_set = set(x for sub_dict in data
for x in sub_dict['groupby'].keys())
groupby_vals_set = set(x for sub_dict in data
for x in sub_dict['groupby'].values())
self.assertEqual(set(['project_id']), groupby_keys_set)
self.assertEqual(set(['project-1', 'project-2']), groupby_vals_set)
for r in data:
grp = r['groupby']
if grp == {'project_id': 'project-1'}:
self.assertEqual(2, r['count'])
self.assertEqual('s', r['unit'])
self.assertEqual(2, r['min'])
self.assertEqual(2, r['max'])
self.assertEqual(4, r['sum'])
self.assertEqual(2, r['avg'])
elif grp == {'project_id': 'project-2'}:
self.assertEqual(2, r['count'])
self.assertEqual('s', r['unit'])
self.assertEqual(2, r['min'])
self.assertEqual(4, r['max'])
self.assertEqual(6, r['sum'])
self.assertEqual(3, r['avg'])
def test_group_by_end_timestamp(self):
data = self.get_json(self.PATH,
q=[{'field': 'timestamp',
'op': 'le',
'value': '2013-08-01T11:45:00'}],
groupby=['project_id'])
groupby_keys_set = set(x for sub_dict in data
for x in sub_dict['groupby'].keys())
groupby_vals_set = set(x for sub_dict in data
for x in sub_dict['groupby'].values())
self.assertEqual(set(['project_id']), groupby_keys_set)
self.assertEqual(set(['project-1']), groupby_vals_set)
for r in data:
grp = r['groupby']
if grp == {'project_id': 'project-1'}:
self.assertEqual(3, r['count'])
self.assertEqual('s', r['unit'])
self.assertEqual(1, r['min'])
self.assertEqual(4, r['max'])
self.assertEqual(6, r['sum'])
self.assertEqual(2, r['avg'])
def test_group_by_start_end_timestamp(self):
data = self.get_json(self.PATH,
q=[{'field': 'timestamp',
'op': 'ge',
'value': '2013-08-01T08:17:03'},
{'field': 'timestamp',
'op': 'le',
'value': '2013-08-01T23:59:59'}],
groupby=['project_id'])
groupby_keys_set = set(x for sub_dict in data
for x in sub_dict['groupby'].keys())
groupby_vals_set = set(x for sub_dict in data
for x in sub_dict['groupby'].values())
self.assertEqual(set(['project_id']), groupby_keys_set)
self.assertEqual(set(['project-1', 'project-2']), groupby_vals_set)
for r in data:
grp = r['groupby']
if grp == {'project_id': 'project-1'}:
self.assertEqual(5, r['count'])
self.assertEqual('s', r['unit'])
self.assertEqual(1, r['min'])
self.assertEqual(4, r['max'])
self.assertEqual(10, r['sum'])
self.assertEqual(2, r['avg'])
elif grp == {'project_id': 'project-2'}:
self.assertEqual(2, r['count'])
self.assertEqual('s', r['unit'])
self.assertEqual(2, r['min'])
self.assertEqual(4, r['max'])
self.assertEqual(6, r['sum'])
self.assertEqual(3, r['avg'])
def test_group_by_start_end_timestamp_with_query_filter(self):
data = self.get_json(self.PATH,
q=[{'field': 'project_id',
'op': 'eq',
'value': 'project-1'},
{'field': 'timestamp',
'op': 'ge',
'value': '2013-08-01T11:01:00'},
{'field': 'timestamp',
'op': 'le',
'value': '2013-08-01T20:00:00'}],
groupby=['resource_id'])
groupby_keys_set = set(x for sub_dict in data
for x in sub_dict['groupby'].keys())
groupby_vals_set = set(x for sub_dict in data
for x in sub_dict['groupby'].values())
self.assertEqual(set(['resource_id']), groupby_keys_set)
self.assertEqual(set(['resource-1', 'resource-3']), groupby_vals_set)
for r in data:
grp = r['groupby']
if grp == {'resource_id': 'resource-1'}:
self.assertEqual(2, r['count'])
self.assertEqual('s', r['unit'])
self.assertEqual(2, r['min'])
self.assertEqual(2, r['max'])
self.assertEqual(4, r['sum'])
self.assertEqual(2, r['avg'])
elif grp == {'resource_id': 'resource-3'}:
self.assertEqual(1, r['count'])
self.assertEqual('s', r['unit'])
self.assertEqual(4, r['min'])
self.assertEqual(4, r['max'])
self.assertEqual(4, r['sum'])
self.assertEqual(4, r['avg'])
def test_group_by_start_end_timestamp_with_period(self):
data = self.get_json(self.PATH,
q=[{'field': 'timestamp',
'op': 'ge',
'value': '2013-08-01T14:00:00'},
{'field': 'timestamp',
'op': 'le',
'value': '2013-08-01T17:00:00'}],
groupby=['project_id'],
period=3600)
groupby_keys_set = set(x for sub_dict in data
for x in sub_dict['groupby'].keys())
groupby_vals_set = set(x for sub_dict in data
for x in sub_dict['groupby'].values())
self.assertEqual(set(['project_id']), groupby_keys_set)
self.assertEqual(set(['project-1', 'project-2']), groupby_vals_set)
period_start_set = set(sub_dict['period_start'] for sub_dict in data)
period_start_valid = set([u'2013-08-01T14:00:00',
u'2013-08-01T15:00:00',
u'2013-08-01T16:00:00'])
self.assertEqual(period_start_valid, period_start_set)
for r in data:
grp = r['groupby']
period_start = r['period_start']
if (grp == {'project_id': 'project-1'} and
period_start == u'2013-08-01T14:00:00'):
self.assertEqual(1, r['count'])
self.assertEqual('s', r['unit'])
self.assertEqual(2, r['min'])
self.assertEqual(2, r['max'])
self.assertEqual(2, r['sum'])
self.assertEqual(2, r['avg'])
self.assertEqual(0, r['duration'])
self.assertEqual(u'2013-08-01T14:59:00', r['duration_start'])
self.assertEqual(u'2013-08-01T14:59:00', r['duration_end'])
self.assertEqual(3600, r['period'])
self.assertEqual(u'2013-08-01T15:00:00', r['period_end'])
elif (grp == {'project_id': 'project-1'} and
period_start == u'2013-08-01T16:00:00'):
self.assertEqual(1, r['count'])
self.assertEqual('s', r['unit'])
self.assertEqual(2, r['min'])
self.assertEqual(2, r['max'])
self.assertEqual(2, r['sum'])
self.assertEqual(2, r['avg'])
self.assertEqual(0, r['duration'])
self.assertEqual(u'2013-08-01T16:10:00', r['duration_start'])
self.assertEqual(u'2013-08-01T16:10:00', r['duration_end'])
self.assertEqual(3600, r['period'])
self.assertEqual(u'2013-08-01T17:00:00', r['period_end'])
elif (grp == {'project_id': 'project-2'} and
period_start == u'2013-08-01T15:00:00'):
self.assertEqual(1, r['count'])
self.assertEqual('s', r['unit'])
self.assertEqual(2, r['min'])
self.assertEqual(2, r['max'])
self.assertEqual(2, r['sum'])
self.assertEqual(2, r['avg'])
self.assertEqual(0, r['duration'])
self.assertEqual(u'2013-08-01T15:37:00', r['duration_start'])
self.assertEqual(u'2013-08-01T15:37:00', r['duration_end'])
self.assertEqual(3600, r['period'])
self.assertEqual(u'2013-08-01T16:00:00', r['period_end'])
else:
self.assertNotEqual([grp, period_start],
[{'project_id': 'project-1'},
u'2013-08-01T15:00:00'])
self.assertNotEqual([grp, period_start],
[{'project_id': 'project-2'},
u'2013-08-01T14:00:00'])
self.assertNotEqual([grp, period_start],
[{'project_id': 'project-2'},
u'2013-08-01T16:00:00'])
def test_group_by_start_end_timestamp_with_query_filter_and_period(self):
data = self.get_json(self.PATH,
q=[{'field': 'source',
'op': 'eq',
'value': 'source-1'},
{'field': 'timestamp',
'op': 'ge',
'value': '2013-08-01T10:00:00'},
{'field': 'timestamp',
'op': 'le',
'value': '2013-08-01T18:00:00'}],
groupby=['project_id'],
period=7200)
groupby_keys_set = set(x for sub_dict in data
for x in sub_dict['groupby'].keys())
groupby_vals_set = set(x for sub_dict in data
for x in sub_dict['groupby'].values())
self.assertEqual(set(['project_id']), groupby_keys_set)
self.assertEqual(set(['project-1', 'project-2']), groupby_vals_set)
period_start_set = set(sub_dict['period_start'] for sub_dict in data)
period_start_valid = set([u'2013-08-01T10:00:00',
u'2013-08-01T14:00:00',
u'2013-08-01T16:00:00'])
self.assertEqual(period_start_valid, period_start_set)
for r in data:
grp = r['groupby']
period_start = r['period_start']
if (grp == {'project_id': 'project-1'} and
period_start == u'2013-08-01T10:00:00'):
self.assertEqual(2, r['count'])
self.assertEqual('s', r['unit'])
self.assertEqual(1, r['min'])
self.assertEqual(1, r['max'])
self.assertEqual(2, r['sum'])
self.assertEqual(1, r['avg'])
self.assertEqual(1740, r['duration'])
self.assertEqual(u'2013-08-01T10:11:00', r['duration_start'])
self.assertEqual(u'2013-08-01T10:40:00', r['duration_end'])
self.assertEqual(7200, r['period'])
self.assertEqual(u'2013-08-01T12:00:00', r['period_end'])
elif (grp == {'project_id': 'project-1'} and
period_start == u'2013-08-01T14:00:00'):
self.assertEqual(1, r['count'])
self.assertEqual('s', r['unit'])
self.assertEqual(2, r['min'])
self.assertEqual(2, r['max'])
self.assertEqual(2, r['sum'])
self.assertEqual(2, r['avg'])
self.assertEqual(0, r['duration'])
self.assertEqual(u'2013-08-01T14:59:00', r['duration_start'])
self.assertEqual(u'2013-08-01T14:59:00', r['duration_end'])
self.assertEqual(7200, r['period'])
self.assertEqual(u'2013-08-01T16:00:00', r['period_end'])
elif (grp == {'project_id': 'project-2'} and
period_start == u'2013-08-01T16:00:00'):
self.assertEqual(1, r['count'])
self.assertEqual('s', r['unit'])
self.assertEqual(4, r['min'])
self.assertEqual(4, r['max'])
self.assertEqual(4, r['sum'])
self.assertEqual(4, r['avg'])
self.assertEqual(0, r['duration'])
self.assertEqual(u'2013-08-01T17:28:00', r['duration_start'])
self.assertEqual(u'2013-08-01T17:28:00', r['duration_end'])
self.assertEqual(7200, r['period'])
self.assertEqual(u'2013-08-01T18:00:00', r['period_end'])
else:
self.assertNotEqual([grp, period_start],
[{'project_id': 'project-1'},
u'2013-08-01T16:00:00'])
self.assertNotEqual([grp, period_start],
[{'project_id': 'project-2'},
u'2013-08-01T10:00:00'])
self.assertNotEqual([grp, period_start],
[{'project_id': 'project-2'},
u'2013-08-01T14:00:00'])
@tests_db.run_with('mongodb', 'hbase', 'db2')
class TestGroupBySource(v2.FunctionalTest):
# FIXME(terriyu): We have to put test_group_by_source in its own class
# because SQLAlchemy currently doesn't support group by source statistics.
# When group by source is supported in SQLAlchemy, this test should be
# moved to TestGroupByInstance with all the other group by statistics
# tests.
PATH = '/meters/instance/statistics'
def setUp(self):
super(TestGroupBySource, self).setUp()
test_sample_data = (
{'volume': 2, 'user': 'user-1', 'project': 'project-1',
'resource': 'resource-1', 'timestamp': (2013, 8, 1, 16, 10),
'metadata_flavor': 'm1.tiny', 'metadata_event': 'event-1',
'source': 'source-2'},
{'volume': 2, 'user': 'user-1', 'project': 'project-2',
'resource': 'resource-1', 'timestamp': (2013, 8, 1, 15, 37),
'metadata_flavor': 'm1.large', 'metadata_event': 'event-1',
'source': 'source-2'},
{'volume': 1, 'user': 'user-2', 'project': 'project-1',
'resource': 'resource-2', 'timestamp': (2013, 8, 1, 10, 11),
'metadata_flavor': 'm1.tiny', 'metadata_event': 'event-2',
'source': 'source-1'},
{'volume': 1, 'user': 'user-2', 'project': 'project-1',
'resource': 'resource-2', 'timestamp': (2013, 8, 1, 10, 40),
'metadata_flavor': 'm1.large', 'metadata_event': 'event-2',
'source': 'source-1'},
{'volume': 2, 'user': 'user-2', 'project': 'project-1',
'resource': 'resource-1', 'timestamp': (2013, 8, 1, 14, 59),
'metadata_flavor': 'm1.large', 'metadata_event': 'event-2',
'source': 'source-1'},
{'volume': 4, 'user': 'user-2', 'project': 'project-2',
'resource': 'resource-2', 'timestamp': (2013, 8, 1, 17, 28),
'metadata_flavor': 'm1.large', 'metadata_event': 'event-2',
'source': 'source-1'},
{'volume': 4, 'user': 'user-3', 'project': 'project-1',
'resource': 'resource-3', 'timestamp': (2013, 8, 1, 11, 22),
'metadata_flavor': 'm1.tiny', 'metadata_event': 'event-2',
'source': 'source-3'},
)
for test_sample in test_sample_data:
c = sample.Sample(
'instance',
sample.TYPE_CUMULATIVE,
unit='s',
volume=test_sample['volume'],
user_id=test_sample['user'],
project_id=test_sample['project'],
resource_id=test_sample['resource'],
timestamp=datetime.datetime(*test_sample['timestamp']),
resource_metadata={'flavor': test_sample['metadata_flavor'],
'event': test_sample['metadata_event'], },
source=test_sample['source'],
)
msg = utils.meter_message_from_counter(
c, self.CONF.publisher.telemetry_secret,
)
self.conn.record_metering_data(msg)
def tearDown(self):
self.conn.clear()
super(TestGroupBySource, self).tearDown()
def test_group_by_source(self):
data = self.get_json(self.PATH, groupby=['source'])
groupby_keys_set = set(x for sub_dict in data
for x in sub_dict['groupby'].keys())
groupby_vals_set = set(x for sub_dict in data
for x in sub_dict['groupby'].values())
self.assertEqual(set(['source']), groupby_keys_set)
self.assertEqual(set(['source-1', 'source-2', 'source-3']),
groupby_vals_set)
for r in data:
grp = r['groupby']
if grp == {'source': 'source-1'}:
self.assertEqual(4, r['count'])
self.assertEqual('s', r['unit'])
self.assertEqual(1, r['min'])
self.assertEqual(4, r['max'])
self.assertEqual(8, r['sum'])
self.assertEqual(2, r['avg'])
elif grp == {'source': 'source-2'}:
self.assertEqual(2, r['count'])
self.assertEqual('s', r['unit'])
self.assertEqual(2, r['min'])
self.assertEqual(2, r['max'])
self.assertEqual(4, r['sum'])
self.assertEqual(2, r['avg'])
elif grp == {'source': 'source-3'}:
self.assertEqual(1, r['count'])
self.assertEqual('s', r['unit'])
self.assertEqual(4, r['min'])
self.assertEqual(4, r['max'])
self.assertEqual(4, r['sum'])
self.assertEqual(4, r['avg'])
class TestSelectableAggregates(v2.FunctionalTest):
PATH = '/meters/instance/statistics'
def setUp(self):
super(TestSelectableAggregates, self).setUp()
test_sample_data = (
{'volume': 2, 'user': 'user-1', 'project': 'project-1',
'resource': 'resource-1', 'timestamp': (2013, 8, 1, 16, 10),
'metadata_flavor': 'm1.tiny', 'metadata_event': 'event-1',
'source': 'source'},
{'volume': 2, 'user': 'user-2', 'project': 'project-2',
'resource': 'resource-3', 'timestamp': (2013, 8, 1, 15, 37),
'metadata_flavor': 'm1.large', 'metadata_event': 'event-1',
'source': 'source'},
{'volume': 1, 'user': 'user-2', 'project': 'project-2',
'resource': 'resource-5', 'timestamp': (2013, 8, 1, 10, 11),
'metadata_flavor': 'm1.medium', 'metadata_event': 'event-2',
'source': 'source'},
{'volume': 2, 'user': 'user-1', 'project': 'project-1',
'resource': 'resource-2', 'timestamp': (2013, 8, 1, 10, 40),
'metadata_flavor': 'm1.large', 'metadata_event': 'event-2',
'source': 'source'},
{'volume': 2, 'user': 'user-2', 'project': 'project-2',
'resource': 'resource-4', 'timestamp': (2013, 8, 1, 14, 59),
'metadata_flavor': 'm1.large', 'metadata_event': 'event-2',
'source': 'source'},
{'volume': 5, 'user': 'user-1', 'project': 'project-1',
'resource': 'resource-2', 'timestamp': (2013, 8, 1, 17, 28),
'metadata_flavor': 'm1.large', 'metadata_event': 'event-2',
'source': 'source'},
{'volume': 4, 'user': 'user-2', 'project': 'project-2',
'resource': 'resource-3', 'timestamp': (2013, 8, 1, 11, 22),
'metadata_flavor': 'm1.large', 'metadata_event': 'event-2',
'source': 'source'},
{'volume': 9, 'user': 'user-3', 'project': 'project-3',
'resource': 'resource-4', 'timestamp': (2013, 8, 1, 11, 59),
'metadata_flavor': 'm1.large', 'metadata_event': 'event-3',
'source': 'source'},
)
for test_sample in test_sample_data:
c = sample.Sample(
'instance',
sample.TYPE_GAUGE,
unit='instance',
volume=test_sample['volume'],
user_id=test_sample['user'],
project_id=test_sample['project'],
resource_id=test_sample['resource'],
timestamp=datetime.datetime(*test_sample['timestamp']),
resource_metadata={'flavor': test_sample['metadata_flavor'],
'event': test_sample['metadata_event'], },
source=test_sample['source'],
)
msg = utils.meter_message_from_counter(
c, self.CONF.publisher.telemetry_secret,
)
self.conn.record_metering_data(msg)
def _do_test_per_tenant_selectable_standard_aggregate(self,
aggregate,
expected_values):
agg_args = {'aggregate.func': aggregate}
data = self.get_json(self.PATH, groupby=['project_id'], **agg_args)
groupby_keys_set = set(x for sub_dict in data
for x in sub_dict['groupby'].keys())
groupby_vals_set = set(x for sub_dict in data
for x in sub_dict['groupby'].values())
self.assertEqual(set(['project_id']), groupby_keys_set)
projects = ['project-1', 'project-2', 'project-3']
self.assertEqual(set(projects), groupby_vals_set)
standard_aggregates = set(['count', 'min', 'max', 'sum', 'avg'])
for r in data:
grp = r['groupby']
for project in projects:
if grp == {'project_id': project}:
expected = expected_values[projects.index(project)]
self.assertEqual('instance', r['unit'])
self.assertAlmostEqual(r[aggregate], expected)
self.assertIn('aggregate', r)
self.assertIn(aggregate, r['aggregate'])
self.assertAlmostEqual(r['aggregate'][aggregate], expected)
for a in standard_aggregates - set([aggregate]):
self.assertNotIn(a, r)
def test_per_tenant_selectable_max(self):
self._do_test_per_tenant_selectable_standard_aggregate('max',
[5, 4, 9])
def test_per_tenant_selectable_min(self):
self._do_test_per_tenant_selectable_standard_aggregate('min',
[2, 1, 9])
def test_per_tenant_selectable_sum(self):
self._do_test_per_tenant_selectable_standard_aggregate('sum',
[9, 9, 9])
def test_per_tenant_selectable_avg(self):
self._do_test_per_tenant_selectable_standard_aggregate('avg',
[3, 2.25, 9])
def test_per_tenant_selectable_count(self):
self._do_test_per_tenant_selectable_standard_aggregate('count',
[3, 4, 1])
def test_per_tenant_selectable_parameterized_aggregate(self):
agg_args = {'aggregate.func': 'cardinality',
'aggregate.param': 'resource_id'}
data = self.get_json(self.PATH, groupby=['project_id'], **agg_args)
groupby_keys_set = set(x for sub_dict in data
for x in sub_dict['groupby'].keys())
groupby_vals_set = set(x for sub_dict in data
for x in sub_dict['groupby'].values())
self.assertEqual(set(['project_id']), groupby_keys_set)
projects = ['project-1', 'project-2', 'project-3']
self.assertEqual(set(projects), groupby_vals_set)
aggregate = 'cardinality/resource_id'
expected_values = [2.0, 3.0, 1.0]
standard_aggregates = set(['count', 'min', 'max', 'sum', 'avg'])
for r in data:
grp = r['groupby']
for project in projects:
if grp == {'project_id': project}:
expected = expected_values[projects.index(project)]
self.assertEqual('instance', r['unit'])
self.assertNotIn(aggregate, r)
self.assertIn('aggregate', r)
self.assertIn(aggregate, r['aggregate'])
self.assertEqual(expected, r['aggregate'][aggregate])
for a in standard_aggregates:
self.assertNotIn(a, r)
def test_large_quantum_selectable_parameterized_aggregate(self):
# add a large number of datapoints that won't impact on cardinality
# if the computation logic is tolerant of different DB behavior on
# larger numbers of samples per-period
for i in range(200):
s = sample.Sample(
'instance',
sample.TYPE_GAUGE,
unit='instance',
volume=i * 1.0,
user_id='user-1',
project_id='project-1',
resource_id='resource-1',
timestamp=datetime.datetime(2013, 8, 1, 11, i % 60),
resource_metadata={'flavor': 'm1.tiny',
'event': 'event-1', },
source='source',
)
msg = utils.meter_message_from_counter(
s, self.CONF.publisher.telemetry_secret,
)
self.conn.record_metering_data(msg)
agg_args = {'aggregate.func': 'cardinality',
'aggregate.param': 'resource_id'}
data = self.get_json(self.PATH, **agg_args)
aggregate = 'cardinality/resource_id'
expected_value = 5.0
standard_aggregates = set(['count', 'min', 'max', 'sum', 'avg'])
r = data[0]
self.assertNotIn(aggregate, r)
self.assertIn('aggregate', r)
self.assertIn(aggregate, r['aggregate'])
self.assertEqual(expected_value, r['aggregate'][aggregate])
for a in standard_aggregates:
self.assertNotIn(a, r)
def test_repeated_unparameterized_aggregate(self):
agg_params = 'aggregate.func=count&aggregate.func=count'
data = self.get_json(self.PATH, override_params=agg_params)
aggregate = 'count'
expected_value = 8.0
standard_aggregates = set(['min', 'max', 'sum', 'avg'])
r = data[0]
self.assertIn(aggregate, r)
self.assertEqual(expected_value, r[aggregate])
self.assertIn('aggregate', r)
self.assertIn(aggregate, r['aggregate'])
self.assertEqual(expected_value, r['aggregate'][aggregate])
for a in standard_aggregates:
self.assertNotIn(a, r)
def test_fully_repeated_parameterized_aggregate(self):
agg_params = ('aggregate.func=cardinality&'
'aggregate.param=resource_id&'
'aggregate.func=cardinality&'
'aggregate.param=resource_id&')
data = self.get_json(self.PATH, override_params=agg_params)
aggregate = 'cardinality/resource_id'
expected_value = 5.0
standard_aggregates = set(['count', 'min', 'max', 'sum', 'avg'])
r = data[0]
self.assertIn('aggregate', r)
self.assertNotIn(aggregate, r)
self.assertIn(aggregate, r['aggregate'])
self.assertEqual(expected_value, r['aggregate'][aggregate])
for a in standard_aggregates:
self.assertNotIn(a, r)
def test_partially_repeated_parameterized_aggregate(self):
agg_params = ('aggregate.func=cardinality&'
'aggregate.param=resource_id&'
'aggregate.func=cardinality&'
'aggregate.param=project_id&')
data = self.get_json(self.PATH, override_params=agg_params)
expected_values = {'cardinality/resource_id': 5.0,
'cardinality/project_id': 3.0}
standard_aggregates = set(['count', 'min', 'max', 'sum', 'avg'])
r = data[0]
self.assertIn('aggregate', r)
for aggregate in expected_values.keys():
self.assertNotIn(aggregate, r)
self.assertIn(aggregate, r['aggregate'])
self.assertEqual(expected_values[aggregate],
r['aggregate'][aggregate])
for a in standard_aggregates:
self.assertNotIn(a, r)
def test_bad_selectable_parameterized_aggregate(self):
agg_args = {'aggregate.func': 'cardinality',
'aggregate.param': 'injection_attack'}
resp = self.get_json(self.PATH, status=[400],
groupby=['project_id'], **agg_args)
self.assertIn('error_message', resp)
self.assertEqual(resp['error_message'].get('faultcode'),
'Client')
self.assertEqual(resp['error_message'].get('faultstring'),
'Bad aggregate: cardinality.injection_attack')
@tests_db.run_with('mongodb', 'hbase', 'db2')
class TestUnparameterizedAggregates(v2.FunctionalTest):
# We put the stddev test case in a separate class so that we
# can easily exclude the sqlalchemy scenario, as sqlite doesn't
# support the stddev_pop function and fails ungracefully with
# OperationalError when it is used. However we still want to
# test the corresponding functionality in the mongo driver.
# For hbase & db2, the skip on NotImplementedError logic works
# in the usual way.
PATH = '/meters/instance/statistics'
def setUp(self):
super(TestUnparameterizedAggregates, self).setUp()
test_sample_data = (
{'volume': 2, 'user': 'user-1', 'project': 'project-1',
'resource': 'resource-1', 'timestamp': (2013, 8, 1, 16, 10),
'metadata_flavor': 'm1.tiny', 'metadata_event': 'event-1',
'source': 'source'},
{'volume': 2, 'user': 'user-2', 'project': 'project-2',
'resource': 'resource-3', 'timestamp': (2013, 8, 1, 15, 37),
'metadata_flavor': 'm1.large', 'metadata_event': 'event-1',
'source': 'source'},
{'volume': 1, 'user': 'user-2', 'project': 'project-2',
'resource': 'resource-5', 'timestamp': (2013, 8, 1, 10, 11),
'metadata_flavor': 'm1.medium', 'metadata_event': 'event-2',
'source': 'source'},
{'volume': 2, 'user': 'user-1', 'project': 'project-1',
'resource': 'resource-2', 'timestamp': (2013, 8, 1, 10, 40),
'metadata_flavor': 'm1.large', 'metadata_event': 'event-2',
'source': 'source'},
{'volume': 2, 'user': 'user-2', 'project': 'project-2',
'resource': 'resource-4', 'timestamp': (2013, 8, 1, 14, 59),
'metadata_flavor': 'm1.large', 'metadata_event': 'event-2',
'source': 'source'},
{'volume': 5, 'user': 'user-1', 'project': 'project-1',
'resource': 'resource-2', 'timestamp': (2013, 8, 1, 17, 28),
'metadata_flavor': 'm1.large', 'metadata_event': 'event-2',
'source': 'source'},
{'volume': 4, 'user': 'user-2', 'project': 'project-2',
'resource': 'resource-3', 'timestamp': (2013, 8, 1, 11, 22),
'metadata_flavor': 'm1.large', 'metadata_event': 'event-2',
'source': 'source'},
{'volume': 9, 'user': 'user-3', 'project': 'project-3',
'resource': 'resource-4', 'timestamp': (2013, 8, 1, 11, 59),
'metadata_flavor': 'm1.large', 'metadata_event': 'event-3',
'source': 'source'},
)
for test_sample in test_sample_data:
c = sample.Sample(
'instance',
sample.TYPE_GAUGE,
unit='instance',
volume=test_sample['volume'],
user_id=test_sample['user'],
project_id=test_sample['project'],
resource_id=test_sample['resource'],
timestamp=datetime.datetime(*test_sample['timestamp']),
resource_metadata={'flavor': test_sample['metadata_flavor'],
'event': test_sample['metadata_event'], },
source=test_sample['source'],
)
msg = utils.meter_message_from_counter(
c, self.CONF.publisher.telemetry_secret,
)
self.conn.record_metering_data(msg)
def test_per_tenant_selectable_unparameterized_aggregate(self):
agg_args = {'aggregate.func': 'stddev'}
data = self.get_json(self.PATH, groupby=['project_id'], **agg_args)
groupby_keys_set = set(x for sub_dict in data
for x in sub_dict['groupby'].keys())
groupby_vals_set = set(x for sub_dict in data
for x in sub_dict['groupby'].values())
self.assertEqual(set(['project_id']), groupby_keys_set)
projects = ['project-1', 'project-2', 'project-3']
self.assertEqual(set(projects), groupby_vals_set)
aggregate = 'stddev'
expected_values = [1.4142, 1.0897, 0.0]
standard_aggregates = set(['count', 'min', 'max', 'sum', 'avg'])
for r in data:
grp = r['groupby']
for project in projects:
if grp == {'project_id': project}:
expected = expected_values[projects.index(project)]
self.assertEqual('instance', r['unit'])
self.assertNotIn(aggregate, r)
self.assertIn('aggregate', r)
self.assertIn(aggregate, r['aggregate'])
self.assertAlmostEqual(r['aggregate'][aggregate],
expected,
places=4)
for a in standard_aggregates:
self.assertNotIn(a, r)
|
hip-odoo/odoo
|
refs/heads/10.0
|
odoo/tools/which.py
|
80
|
#!/usr/bin/env python
""" Which - locate a command
* adapted from Brian Curtin's http://bugs.python.org/file15381/shutil_which.patch
* see http://bugs.python.org/issue444582
* uses ``PATHEXT`` on Windows
* searches current directory before ``PATH`` on Windows,
but not before an explicitly passed path
* accepts both string or iterable for an explicitly passed path, or pathext
* accepts an explicitly passed empty path, or pathext (either '' or [])
* does not search ``PATH`` for files that have a path specified in their name already
* moved defpath and defpathext lists initialization to module level,
instead of initializing them on each function call
* changed interface: which_files() returns generator, which() returns first match,
or raises IOError(errno.ENOENT)
.. function:: which_files(file [, mode=os.F_OK | os.X_OK[, path=None[, pathext=None]]])
Return a generator which yields full paths in which the *file* name exists
in a directory that is part of the file name, or on *path*,
and has the given *mode*.
By default, *mode* matches an inclusive OR of os.F_OK and os.X_OK - an
existing executable file.
The *path* is, by default, the ``PATH`` variable on the platform,
or the string/iterable passed in as *path*.
In the event that a ``PATH`` variable is not found, :const:`os.defpath` is used.
On Windows, a current directory is searched before using the ``PATH`` variable,
but not before an explicitly passed *path*.
The *pathext* is only used on Windows to match files with given extensions appended as well.
It defaults to the ``PATHEXT`` variable, or the string/iterable passed in as *pathext*.
In the event that a ``PATHEXT`` variable is not found,
default value for Windows XP/Vista is used.
The command is always searched without extension first,
even when *pathext* is explicitly passed.
.. function:: which(file [, mode=os.F_OK | os.X_OK[, path=None[, pathext=None]]])
Return first match generated by which_files(file, mode, path, pathext),
or raise IOError(errno.ENOENT).
"""
__docformat__ = 'restructuredtext en'
__all__ = 'which which_files pathsep defpath defpathext F_OK R_OK W_OK X_OK'.split()
import sys
from os import access, defpath, pathsep, environ, F_OK, R_OK, W_OK, X_OK
from os.path import exists, dirname, split, join
windows = sys.platform.startswith('win')
defpath = environ.get('PATH', defpath).split(pathsep)
if windows:
defpath.insert(0, '.') # can insert without checking, when duplicates are removed
# given the quite usual mess in PATH on Windows, let's rather remove duplicates
seen = set()
defpath = [dir for dir in defpath if dir.lower() not in seen and not seen.add(dir.lower())]
del seen
defpathext = [''] + environ.get('PATHEXT',
'.COM;.EXE;.BAT;.CMD;.VBS;.VBE;.JS;.JSE;.WSF;.WSH;.MSC').lower().split(pathsep)
else:
defpathext = ['']
def which_files(file, mode=F_OK | X_OK, path=None, pathext=None):
""" Locate a file in a path supplied as a part of the file name,
or the user's path, or a supplied path.
The function yields full paths (not necessarily absolute paths),
in which the given file name matches an existing file in a directory on the path.
>>> def test_which(expected, *args, **argd):
... result = list(which_files(*args, **argd))
... assert result == expected, 'which_files: %s != %s' % (result, expected)
...
... try:
... result = [ which(*args, **argd) ]
... except IOError:
... result = []
... assert result[:1] == expected[:1], 'which: %s != %s' % (result[:1], expected[:1])
>>> if windows: cmd = environ['COMSPEC']
>>> if windows: test_which([cmd], 'cmd')
>>> if windows: test_which([cmd], 'cmd.exe')
>>> if windows: test_which([cmd], 'cmd', path=dirname(cmd))
>>> if windows: test_which([cmd], 'cmd', pathext='.exe')
>>> if windows: test_which([cmd], cmd)
>>> if windows: test_which([cmd], cmd, path='<nonexistent>')
>>> if windows: test_which([cmd], cmd, pathext='<nonexistent>')
>>> if windows: test_which([cmd], cmd[:-4])
>>> if windows: test_which([cmd], cmd[:-4], path='<nonexistent>')
>>> if windows: test_which([], 'cmd', path='<nonexistent>')
>>> if windows: test_which([], 'cmd', pathext='<nonexistent>')
>>> if windows: test_which([], '<nonexistent>/cmd')
>>> if windows: test_which([], cmd[:-4], pathext='<nonexistent>')
>>> if not windows: sh = '/bin/sh'
>>> if not windows: test_which([sh], 'sh')
>>> if not windows: test_which([sh], 'sh', path=dirname(sh))
>>> if not windows: test_which([sh], 'sh', pathext='<nonexistent>')
>>> if not windows: test_which([sh], sh)
>>> if not windows: test_which([sh], sh, path='<nonexistent>')
>>> if not windows: test_which([sh], sh, pathext='<nonexistent>')
>>> if not windows: test_which([], 'sh', mode=W_OK) # not running as root, are you?
>>> if not windows: test_which([], 'sh', path='<nonexistent>')
>>> if not windows: test_which([], '<nonexistent>/sh')
"""
filepath, file = split(file)
if filepath:
path = (filepath,)
elif path is None:
path = defpath
elif isinstance(path, str):
path = path.split(pathsep)
if pathext is None:
pathext = defpathext
elif isinstance(pathext, str):
pathext = pathext.split(pathsep)
if not '' in pathext:
pathext.insert(0, '') # always check command without extension, even for custom pathext
for dir in path:
basepath = join(dir, file)
for ext in pathext:
fullpath = basepath + ext
if exists(fullpath) and access(fullpath, mode):
yield fullpath
def which(file, mode=F_OK | X_OK, path=None, pathext=None):
""" Locate a file in a path supplied as a part of the file name,
or the user's path, or a supplied path.
The function returns full path (not necessarily absolute path),
in which the given file name matches an existing file in a directory on the path,
or raises IOError(errno.ENOENT).
>>> # for doctest see which_files()
"""
try:
return iter(which_files(file, mode, path, pathext)).next()
except StopIteration:
try:
from errno import ENOENT
except ImportError:
ENOENT = 2
raise IOError(ENOENT, '%s not found' % (mode & X_OK and 'command' or 'file'), file)
if __name__ == '__main__':
import doctest
doctest.testmod()
|
163gal/Time-Line
|
refs/heads/master
|
libs64/wxPython/lib/editor/__init__.py
|
6
|
from editor import Editor
|
skyfielders/python-skyfield
|
refs/heads/master
|
skyfield/named_stars.py
|
1
|
# This list was seeded from:
# https://en.wikipedia.org/wiki/List_of_stars_in_the_Hipparcos_Catalogue
# Recent discussion:
# https://github.com/skyfielders/python-skyfield/issues/304
named_star_dict= {
'Achernar': 7588,
'Acrux': 60718,
'Adhara': 33579,
'Agena': 68702,
'Albireo': 95947,
'Alcor': 65477,
'Aldebaran': 21421,
'Alderamin': 105199,
'Algenib': 15863,
'Algieba': 50583,
'Algol': 14576,
'Alhena': 31681,
'Alioth': 62956,
'Alkaid': 67301,
'Almach': 9640,
'Alnair': 109268,
'Alnilam': 26311,
'Alnitak': 26727,
'Alphard': 46390,
'Alphecca': 76267,
'Alpheratz': 677,
'Altair': 97649,
'Aludra': 35904,
'Ankaa': 2081,
'Antares': 80763,
'Arcturus': 69673,
'Arided': 102098,
'Aridif': 102098,
'Aspidiske': 45556,
'Atria': 82273,
'Avior': 41037,
'Becrux': 62434,
'Bellatrix': 25336,
'Benetnash': 67301,
'Betelgeuse': 27989,
'Birdun': 66657,
'Canopus': 30438,
'Capella': 24608,
'Caph': 746,
'Castor': 36850,
'Deneb': 102098,
'Deneb Kaitos': 3419,
'Denebola': 57632,
'Diphda': 3419,
'Dschubba': 78401,
'Dubhe': 54061,
'Durre Menthor': 8102,
'Elnath': 25428,
'Enif': 107315,
'Etamin': 87833,
'Fomalhaut': 113368,
'Foramen': 93308,
'Gacrux': 61084,
'Gemma': 76267,
'Gienah': 102488,
'Girtab': 86228,
'Gruid': 112122,
'Hadar': 68702,
'Hamal': 9884,
"Herschel's Garnet Star": 107259,
'Izar': 72105,
'Kaus Australis': 90185,
'Kochab': 72607,
'Koo She': 42913,
'Marchab': 113963,
'Marfikent': 71352,
'Markab': 45941,
'Megrez': 59774,
'Men': 71860,
'Menkalinan': 28360,
'Menkent': 68933,
'Merak': 53910,
'Miaplacidus': 45238,
'Mimosa': 62434,
'Mintaka': 25930,
'Mira': 10826,
'Mirach': 5447,
'Mirfak': 15863,
'Mirzam': 30324,
'Mizar': 65378,
'Muhlifein': 61932,
'Murzim': 30324,
'Naos': 39429,
'Nunki': 92855,
'Peacock': 100751,
'Phad': 58001,
'Phecda': 58001,
'Polaris': 11767,
'Pollux': 37826,
'Procyon': 37279,
'Ras Alhague': 86032,
'Rasalhague': 86032,
'Regor': 39953,
'Regulus': 49669,
'Rigel': 24436,
'Rigel Kent': 71683,
'Rigil Kentaurus': 71683,
'Sabik': 84012,
'Sadira': 16537,
'Sadr': 100453,
'Saiph': 27366,
'Sargas': 86228,
'Scheat': 113881,
'Schedar': 3179,
'Scutulum': 45556,
'Shaula': 85927,
'Sirius': 32349,
'Sirrah': 677,
'South Star': 104382,
'Spica': 65474,
'Suhail': 44816,
'Thuban': 68756,
'Toliman': 71683,
'Tseen She': 93308,
'Tsih': 4427,
'Turais': 45556,
'Vega': 91262,
'Wei': 82396,
'Wezen': 34444
}
|
KAMI911/loec
|
refs/heads/master
|
examples/Sharpen/binaries-windows-python26/ImageTk.py
|
3
|
#
# The Python Imaging Library.
# $Id: ImageTk.py 2134 2004-10-06 08:55:20Z fredrik $
#
# a Tk display interface
#
# History:
# 96-04-08 fl Created
# 96-09-06 fl Added getimage method
# 96-11-01 fl Rewritten, removed image attribute and crop method
# 97-05-09 fl Use PyImagingPaste method instead of image type
# 97-05-12 fl Minor tweaks to match the IFUNC95 interface
# 97-05-17 fl Support the "pilbitmap" booster patch
# 97-06-05 fl Added file= and data= argument to image constructors
# 98-03-09 fl Added width and height methods to Image classes
# 98-07-02 fl Use default mode for "P" images without palette attribute
# 98-07-02 fl Explicitly destroy Tkinter image objects
# 99-07-24 fl Support multiple Tk interpreters (from Greg Couch)
# 99-07-26 fl Automatically hook into Tkinter (if possible)
# 99-08-15 fl Hook uses _imagingtk instead of _imaging
#
# Copyright (c) 1997-1999 by Secret Labs AB
# Copyright (c) 1996-1997 by Fredrik Lundh
#
# See the README file for information on usage and redistribution.
#
import Tkinter, Image
##
# The <b>ImageTk</b> module contains support to create and modify
# Tkinter <b>BitmapImage</b> and <b>PhotoImage</b> objects.
# <p>
# For examples, see the demo programs in the <i>Scripts</i>
# directory.
##
# --------------------------------------------------------------------
# Check for Tkinter interface hooks
_pilbitmap_ok = None
def _pilbitmap_check():
global _pilbitmap_ok
if _pilbitmap_ok is None:
try:
im = Image.new("1", (1,1))
Tkinter.BitmapImage(data="PIL:%d" % im.im.id)
_pilbitmap_ok = 1
except Tkinter.TclError:
_pilbitmap_ok = 0
return _pilbitmap_ok
# --------------------------------------------------------------------
# PhotoImage
##
# Creates a Tkinter-compatible photo image. This can be used
# everywhere Tkinter expects an image object. If the image is an RGBA
# image, pixels having alpha 0 are treated as transparent.
class PhotoImage:
##
# Create a photo image object. The constructor takes either
# a PIL image, or a mode and a size. Alternatively, you can
# use the <b>file</b> or <b>data</b> options to initialize
# the photo image object.
# <p>
# @def __init__(image=None, size=None, **options)
# @param image Either a PIL image, or a mode string. If a
# mode string is used, a size must also be given.
# @param size If the first argument is a mode string, this
# defines the size of the image.
# @keyparam file A filename to load the image from (using
# Image.open(file)).
# @keyparam data An 8-bit string containing image data (as
# loaded from an image file).
def __init__(self, image=None, size=None, **kw):
# Tk compatibility: file or data
if image is None:
if kw.has_key("file"):
image = Image.open(kw["file"])
del kw["file"]
elif kw.has_key("data"):
from StringIO import StringIO
image = Image.open(StringIO(kw["data"]))
del kw["data"]
if hasattr(image, "mode") and hasattr(image, "size"):
# got an image instead of a mode
mode = image.mode
if mode == "P":
# palette mapped data
image.load()
try:
mode = image.palette.mode
except AttributeError:
mode = "RGB" # default
size = image.size
kw["width"], kw["height"] = size
else:
mode = image
image = None
if mode not in ["1", "L", "RGB", "RGBA"]:
mode = Image.getmodebase(mode)
self.__mode = mode
self.__size = size
self.__photo = apply(Tkinter.PhotoImage, (), kw)
self.tk = self.__photo.tk
if image:
self.paste(image)
def __del__(self):
name = self.__photo.name
self.__photo.name = None
try:
self.__photo.tk.call("image", "delete", name)
except:
pass # ignore internal errors
##
# Get the Tkinter photo image identifier. This method is
# automatically called by Tkinter whenever a PhotoImage object is
# passed to a Tkinter method.
#
# @return A Tkinter photo image identifier (a string).
def __str__(self):
return str(self.__photo)
##
# Get the width of the image.
#
# @return The width, in pixels.
def width(self):
return self.__size[0]
##
# Get the height of the image.
#
# @return The height, in pixels.
def height(self):
return self.__size[1]
##
# Paste a PIL image into the photo image. Note that this can
# be very slow if the photo image is displayed.
#
# @param im A PIL image. The size must match the target region.
# If the mode does not match, the image is converted to the
# mode of the bitmap image.
# @param box A 4-tuple defining the left, upper, right, and
# lower pixel coordinate. If None is given instead of a
# tuple, all of the image is assumed.
def paste(self, im, box=None):
# convert to blittable
im.load()
image = im.im
if image.isblock() and im.mode == self.__mode:
block = image
else:
block = image.new_block(self.__mode, im.size)
image.convert2(block, image) # convert directly between buffers
tk = self.__photo.tk
try:
tk.call("PyImagingPhoto", self.__photo, block.id)
except Tkinter.TclError, v:
# activate Tkinter hook
try:
import _imagingtk
try:
_imagingtk.tkinit(tk.interpaddr(), 1)
except AttributeError:
_imagingtk.tkinit(id(tk), 0)
tk.call("PyImagingPhoto", self.__photo, block.id)
except (ImportError, AttributeError, Tkinter.TclError):
raise # configuration problem; cannot attach to Tkinter
# --------------------------------------------------------------------
# BitmapImage
##
# Create a Tkinter-compatible bitmap image. This can be used
# everywhere Tkinter expects an image object.
class BitmapImage:
##
# Create a Tkinter-compatible bitmap image.
# <p>
# The given image must have mode "1". Pixels having value 0 are
# treated as transparent. Options, if any, are passed on to
# Tkinter. The most commonly used option is <b>foreground</b>,
# which is used to specify the colour for the non-transparent
# parts. See the Tkinter documentation for information on how to
# specify colours.
#
# @def __init__(image=None, **options)
# @param image A PIL image.
def __init__(self, image=None, **kw):
# Tk compatibility: file or data
if image is None:
if kw.has_key("file"):
image = Image.open(kw["file"])
del kw["file"]
elif kw.has_key("data"):
from StringIO import StringIO
image = Image.open(StringIO(kw["data"]))
del kw["data"]
self.__mode = image.mode
self.__size = image.size
if _pilbitmap_check():
# fast way (requires the pilbitmap booster patch)
image.load()
kw["data"] = "PIL:%d" % image.im.id
self.__im = image # must keep a reference
else:
# slow but safe way
kw["data"] = image.tobitmap()
self.__photo = apply(Tkinter.BitmapImage, (), kw)
def __del__(self):
name = self.__photo.name
self.__photo.name = None
try:
self.__photo.tk.call("image", "delete", name)
except:
pass # ignore internal errors
##
# Get the width of the image.
#
# @return The width, in pixels.
def width(self):
return self.__size[0]
##
# Get the height of the image.
#
# @return The height, in pixels.
def height(self):
return self.__size[1]
##
# Get the Tkinter bitmap image identifier. This method is
# automatically called by Tkinter whenever a BitmapImage object
# is passed to a Tkinter method.
#
# @return A Tkinter bitmap image identifier (a string).
def __str__(self):
return str(self.__photo)
##
# Copies the contents of a PhotoImage to a PIL image memory.
def getimage(photo):
photo.tk.call("PyImagingPhotoGet", photo)
# --------------------------------------------------------------------
# Helper for the Image.show method.
def _show(image, title):
class UI(Tkinter.Label):
def __init__(self, master, im):
if im.mode == "1":
self.image = BitmapImage(im, foreground="white", master=master)
else:
self.image = PhotoImage(im, master=master)
Tkinter.Label.__init__(self, master, image=self.image,
bg="black", bd=0)
if not Tkinter._default_root:
raise IOError, "tkinter not initialized"
top = Tkinter.Toplevel()
if title:
top.title(title)
UI(top, image).pack()
|
cleverhans-lab/cleverhans
|
refs/heads/master
|
cleverhans_v3.1.0/cleverhans/experimental/certification/utils.py
|
2
|
"""File containing some simple helper functions."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
import tensorflow as tf
def diag(diag_elements):
"""Function to create tensorflow diagonal matrix with input diagonal entries.
Args:
diag_elements: tensor with diagonal elements
Returns:
tf matrix with diagonal entries as diag_elements
"""
return tf.diag(tf.reshape(diag_elements, [-1]))
def initialize_dual(
neural_net_params_object,
init_dual_file=None,
random_init_variance=0.01,
init_nu=200.0,
):
"""Function to initialize the dual variables of the class.
Args:
neural_net_params_object: Object with the neural net weights, biases
and types
init_dual_file: Path to file containing dual variables, if the path
is empty, perform random initialization
Expects numpy dictionary with
lambda_pos_0, lambda_pos_1, ..
lambda_neg_0, lambda_neg_1, ..
lambda_quad_0, lambda_quad_1, ..
lambda_lu_0, lambda_lu_1, ..
random_init_variance: variance for random initialization
init_nu: Value to initialize nu variable with
Returns:
dual_var: dual variables initialized appropriately.
"""
lambda_pos = []
lambda_neg = []
lambda_quad = []
lambda_lu = []
if init_dual_file is None:
for i in range(0, neural_net_params_object.num_hidden_layers + 1):
initializer = (
np.random.uniform(
0, random_init_variance, size=(neural_net_params_object.sizes[i], 1)
)
).astype(np.float32)
lambda_pos.append(
tf.get_variable(
"lambda_pos_" + str(i), initializer=initializer, dtype=tf.float32
)
)
initializer = (
np.random.uniform(
0, random_init_variance, size=(neural_net_params_object.sizes[i], 1)
)
).astype(np.float32)
lambda_neg.append(
tf.get_variable(
"lambda_neg_" + str(i), initializer=initializer, dtype=tf.float32
)
)
initializer = (
np.random.uniform(
0, random_init_variance, size=(neural_net_params_object.sizes[i], 1)
)
).astype(np.float32)
lambda_quad.append(
tf.get_variable(
"lambda_quad_" + str(i), initializer=initializer, dtype=tf.float32
)
)
initializer = (
np.random.uniform(
0, random_init_variance, size=(neural_net_params_object.sizes[i], 1)
)
).astype(np.float32)
lambda_lu.append(
tf.get_variable(
"lambda_lu_" + str(i), initializer=initializer, dtype=tf.float32
)
)
nu = tf.get_variable("nu", initializer=init_nu)
else:
# Loading from file
dual_var_init_val = np.load(init_dual_file).item()
for i in range(0, neural_net_params_object.num_hidden_layers + 1):
lambda_pos.append(
tf.get_variable(
"lambda_pos_" + str(i),
initializer=dual_var_init_val["lambda_pos"][i],
dtype=tf.float32,
)
)
lambda_neg.append(
tf.get_variable(
"lambda_neg_" + str(i),
initializer=dual_var_init_val["lambda_neg"][i],
dtype=tf.float32,
)
)
lambda_quad.append(
tf.get_variable(
"lambda_quad_" + str(i),
initializer=dual_var_init_val["lambda_quad"][i],
dtype=tf.float32,
)
)
lambda_lu.append(
tf.get_variable(
"lambda_lu_" + str(i),
initializer=dual_var_init_val["lambda_lu"][i],
dtype=tf.float32,
)
)
nu = tf.get_variable("nu", initializer=1.0 * dual_var_init_val["nu"])
dual_var = {
"lambda_pos": lambda_pos,
"lambda_neg": lambda_neg,
"lambda_quad": lambda_quad,
"lambda_lu": lambda_lu,
"nu": nu,
}
return dual_var
def eig_one_step(current_vector, learning_rate, vector_prod_fn):
"""Function that performs one step of gd (variant) for min eigen value.
Args:
current_vector: current estimate of the eigen vector with minimum eigen
value.
learning_rate: learning rate.
vector_prod_fn: function which returns product H*x, where H is a matrix for
which we computing eigenvector.
Returns:
updated vector after one step
"""
grad = 2 * vector_prod_fn(current_vector)
# Current objective = (1/2)*v^T (2*M*v); v = current_vector
# grad = 2*M*v
current_objective = tf.reshape(
tf.matmul(tf.transpose(current_vector), grad) / 2.0, shape=()
)
# Project the gradient into the tangent space of the constraint region.
# This way we do not waste time taking steps that try to change the
# norm of current_vector
grad = grad - current_vector * tf.matmul(tf.transpose(current_vector), grad)
grad_norm = tf.norm(grad)
grad_norm_sq = tf.square(grad_norm)
# Computing normalized gradient of unit norm
norm_grad = grad / grad_norm
# Computing directional second derivative (dsd)
# dsd = 2*g^T M g, where g is normalized gradient
directional_second_derivative = tf.reshape(
2 * tf.matmul(tf.transpose(norm_grad), vector_prod_fn(norm_grad)), shape=()
)
# Computing grad^\top M grad [useful to compute step size later]
# Just a rescaling of the directional_second_derivative (which uses
# normalized gradient
grad_m_grad = directional_second_derivative * grad_norm_sq / 2
# Directional_second_derivative/2 = objective when vector is norm_grad
# If this is smaller than current objective, simply return that
if directional_second_derivative / 2.0 < current_objective:
return norm_grad
# If curvature is positive, jump to the bottom of the bowl
if directional_second_derivative > 0.0:
step = -1.0 * grad_norm / directional_second_derivative
else:
# If the gradient is very small, do not move
if grad_norm_sq <= 1e-16:
step = 0.0
else:
# Make a heuristic guess of the step size
step = -2.0 * tf.reduce_sum(current_vector * grad) / grad_norm_sq
# Computing gain using the gradient and second derivative
gain = -(
2 * tf.reduce_sum(current_vector * grad) + (step * step) * grad_m_grad
)
# Fall back to pre-determined learning rate if no gain
if gain < 0.0:
step = -learning_rate * grad_norm
current_vector = current_vector + step * norm_grad
return tf.nn.l2_normalize(current_vector)
def minimum_eigen_vector(x, num_steps, learning_rate, vector_prod_fn):
"""Computes eigenvector which corresponds to minimum eigenvalue.
Args:
x: initial value of eigenvector.
num_steps: number of optimization steps.
learning_rate: learning rate.
vector_prod_fn: function which takes x and returns product H*x.
Returns:
approximate value of eigenvector.
This function finds approximate value of eigenvector of matrix H which
corresponds to smallest (by absolute value) eigenvalue of H.
It works by solving optimization problem x^{T}*H*x -> min.
"""
x = tf.nn.l2_normalize(x)
for _ in range(num_steps):
x = eig_one_step(x, learning_rate, vector_prod_fn)
return x
def tf_lanczos_smallest_eigval(
vector_prod_fn,
matrix_dim,
initial_vector,
num_iter=1000,
max_iter=1000,
collapse_tol=1e-9,
dtype=tf.float32,
):
"""Computes smallest eigenvector and eigenvalue using Lanczos in pure TF.
This function computes smallest eigenvector and eigenvalue of the matrix
which is implicitly specified by `vector_prod_fn`.
`vector_prod_fn` is a function which takes `x` and returns a product of matrix
in consideration and `x`.
Computation is done using Lanczos algorithm, see
https://en.wikipedia.org/wiki/Lanczos_algorithm#The_algorithm
Args:
vector_prod_fn: function which takes a vector as an input and returns
matrix vector product.
matrix_dim: dimentionality of the matrix.
initial_vector: guess vector to start the algorithm with
num_iter: user-defined number of iterations for the algorithm
max_iter: maximum number of iterations.
collapse_tol: tolerance to determine collapse of the Krylov subspace
dtype: type of data
Returns:
tuple of (eigenvalue, eigenvector) of smallest eigenvalue and corresponding
eigenvector.
"""
# alpha will store diagonal elements
alpha = tf.TensorArray(dtype, size=1, dynamic_size=True, element_shape=())
# beta will store off diagonal elements
beta = tf.TensorArray(dtype, size=0, dynamic_size=True, element_shape=())
# q will store Krylov space basis
q_vectors = tf.TensorArray(
dtype, size=1, dynamic_size=True, element_shape=(matrix_dim, 1)
)
# If start vector is all zeros, make it a random normal vector and run for max_iter
if tf.norm(initial_vector) < collapse_tol:
initial_vector = tf.random_normal(shape=(matrix_dim, 1), dtype=dtype)
num_iter = max_iter
w = initial_vector / tf.norm(initial_vector)
# Iteration 0 of Lanczos
q_vectors = q_vectors.write(0, w)
w_ = vector_prod_fn(w)
cur_alpha = tf.reduce_sum(w_ * w)
alpha = alpha.write(0, cur_alpha)
w_ = w_ - tf.scalar_mul(cur_alpha, w)
w_prev = w
w = w_
# Subsequent iterations of Lanczos
for i in tf.range(1, num_iter):
cur_beta = tf.norm(w)
if cur_beta < collapse_tol:
# return early if Krylov subspace collapsed
break
# cur_beta is larger than collapse_tol,
# so division will return finite result.
w = w / cur_beta
w_ = vector_prod_fn(w)
cur_alpha = tf.reduce_sum(w_ * w)
q_vectors = q_vectors.write(i, w)
alpha = alpha.write(i, cur_alpha)
beta = beta.write(i - 1, cur_beta)
w_ = w_ - tf.scalar_mul(cur_alpha, w) - tf.scalar_mul(cur_beta, w_prev)
w_prev = w
w = w_
alpha = alpha.stack()
beta = beta.stack()
q_vectors = tf.reshape(q_vectors.stack(), (-1, matrix_dim))
offdiag_submatrix = tf.linalg.diag(beta)
tridiag_matrix = (
tf.linalg.diag(alpha)
+ tf.pad(offdiag_submatrix, [[0, 1], [1, 0]])
+ tf.pad(offdiag_submatrix, [[1, 0], [0, 1]])
)
eigvals, eigvecs = tf.linalg.eigh(tridiag_matrix)
smallest_eigval = eigvals[0]
smallest_eigvec = tf.matmul(tf.reshape(eigvecs[:, 0], (1, -1)), q_vectors)
smallest_eigvec = smallest_eigvec / tf.norm(smallest_eigvec)
smallest_eigvec = tf.reshape(smallest_eigvec, (matrix_dim, 1))
return smallest_eigval, smallest_eigvec
|
sonuyos/couchpotato
|
refs/heads/master
|
libs/tornado/web.py
|
63
|
#!/usr/bin/env python
#
# Copyright 2009 Facebook
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""``tornado.web`` provides a simple web framework with asynchronous
features that allow it to scale to large numbers of open connections,
making it ideal for `long polling
<http://en.wikipedia.org/wiki/Push_technology#Long_polling>`_.
Here is a simple "Hello, world" example app::
import tornado.ioloop
import tornado.web
class MainHandler(tornado.web.RequestHandler):
def get(self):
self.write("Hello, world")
if __name__ == "__main__":
application = tornado.web.Application([
(r"/", MainHandler),
])
application.listen(8888)
tornado.ioloop.IOLoop.instance().start()
See the :doc:`guide` for additional information.
Thread-safety notes
-------------------
In general, methods on `RequestHandler` and elsewhere in Tornado are
not thread-safe. In particular, methods such as
`~RequestHandler.write()`, `~RequestHandler.finish()`, and
`~RequestHandler.flush()` must only be called from the main thread. If
you use multiple threads it is important to use `.IOLoop.add_callback`
to transfer control back to the main thread before finishing the
request.
"""
from __future__ import absolute_import, division, print_function, with_statement
import base64
import binascii
import datetime
import email.utils
import functools
import gzip
import hashlib
import hmac
import mimetypes
import numbers
import os.path
import re
import stat
import sys
import threading
import time
import tornado
import traceback
import types
from io import BytesIO
from tornado.concurrent import Future, is_future
from tornado import escape
from tornado import gen
from tornado import httputil
from tornado import iostream
from tornado import locale
from tornado.log import access_log, app_log, gen_log
from tornado import stack_context
from tornado import template
from tornado.escape import utf8, _unicode
from tornado.util import import_object, ObjectDict, raise_exc_info, unicode_type, _websocket_mask
from tornado.httputil import split_host_and_port
try:
import Cookie # py2
except ImportError:
import http.cookies as Cookie # py3
try:
import urlparse # py2
except ImportError:
import urllib.parse as urlparse # py3
try:
from urllib import urlencode # py2
except ImportError:
from urllib.parse import urlencode # py3
MIN_SUPPORTED_SIGNED_VALUE_VERSION = 1
"""The oldest signed value version supported by this version of Tornado.
Signed values older than this version cannot be decoded.
.. versionadded:: 3.2.1
"""
MAX_SUPPORTED_SIGNED_VALUE_VERSION = 2
"""The newest signed value version supported by this version of Tornado.
Signed values newer than this version cannot be decoded.
.. versionadded:: 3.2.1
"""
DEFAULT_SIGNED_VALUE_VERSION = 2
"""The signed value version produced by `.RequestHandler.create_signed_value`.
May be overridden by passing a ``version`` keyword argument.
.. versionadded:: 3.2.1
"""
DEFAULT_SIGNED_VALUE_MIN_VERSION = 1
"""The oldest signed value accepted by `.RequestHandler.get_secure_cookie`.
May be overrided by passing a ``min_version`` keyword argument.
.. versionadded:: 3.2.1
"""
class RequestHandler(object):
"""Subclass this class and define `get()` or `post()` to make a handler.
If you want to support more methods than the standard GET/HEAD/POST, you
should override the class variable ``SUPPORTED_METHODS`` in your
`RequestHandler` subclass.
"""
SUPPORTED_METHODS = ("GET", "HEAD", "POST", "DELETE", "PATCH", "PUT",
"OPTIONS")
_template_loaders = {} # {path: template.BaseLoader}
_template_loader_lock = threading.Lock()
_remove_control_chars_regex = re.compile(r"[\x00-\x08\x0e-\x1f]")
def __init__(self, application, request, **kwargs):
super(RequestHandler, self).__init__()
self.application = application
self.request = request
self._headers_written = False
self._finished = False
self._auto_finish = True
self._transforms = None # will be set in _execute
self._prepared_future = None
self.path_args = None
self.path_kwargs = None
self.ui = ObjectDict((n, self._ui_method(m)) for n, m in
application.ui_methods.items())
# UIModules are available as both `modules` and `_tt_modules` in the
# template namespace. Historically only `modules` was available
# but could be clobbered by user additions to the namespace.
# The template {% module %} directive looks in `_tt_modules` to avoid
# possible conflicts.
self.ui["_tt_modules"] = _UIModuleNamespace(self,
application.ui_modules)
self.ui["modules"] = self.ui["_tt_modules"]
self.clear()
self.request.connection.set_close_callback(self.on_connection_close)
self.initialize(**kwargs)
def initialize(self):
"""Hook for subclass initialization.
A dictionary passed as the third argument of a url spec will be
supplied as keyword arguments to initialize().
Example::
class ProfileHandler(RequestHandler):
def initialize(self, database):
self.database = database
def get(self, username):
...
app = Application([
(r'/user/(.*)', ProfileHandler, dict(database=database)),
])
"""
pass
@property
def settings(self):
"""An alias for `self.application.settings <Application.settings>`."""
return self.application.settings
def head(self, *args, **kwargs):
raise HTTPError(405)
def get(self, *args, **kwargs):
raise HTTPError(405)
def post(self, *args, **kwargs):
raise HTTPError(405)
def delete(self, *args, **kwargs):
raise HTTPError(405)
def patch(self, *args, **kwargs):
raise HTTPError(405)
def put(self, *args, **kwargs):
raise HTTPError(405)
def options(self, *args, **kwargs):
raise HTTPError(405)
def prepare(self):
"""Called at the beginning of a request before `get`/`post`/etc.
Override this method to perform common initialization regardless
of the request method.
Asynchronous support: Decorate this method with `.gen.coroutine`
or `.return_future` to make it asynchronous (the
`asynchronous` decorator cannot be used on `prepare`).
If this method returns a `.Future` execution will not proceed
until the `.Future` is done.
.. versionadded:: 3.1
Asynchronous support.
"""
pass
def on_finish(self):
"""Called after the end of a request.
Override this method to perform cleanup, logging, etc.
This method is a counterpart to `prepare`. ``on_finish`` may
not produce any output, as it is called after the response
has been sent to the client.
"""
pass
def on_connection_close(self):
"""Called in async handlers if the client closed the connection.
Override this to clean up resources associated with
long-lived connections. Note that this method is called only if
the connection was closed during asynchronous processing; if you
need to do cleanup after every request override `on_finish`
instead.
Proxies may keep a connection open for a time (perhaps
indefinitely) after the client has gone away, so this method
may not be called promptly after the end user closes their
connection.
"""
if _has_stream_request_body(self.__class__):
if not self.request.body.done():
self.request.body.set_exception(iostream.StreamClosedError())
self.request.body.exception()
def clear(self):
"""Resets all headers and content for this response."""
self._headers = httputil.HTTPHeaders({
"Server": "TornadoServer/%s" % tornado.version,
"Content-Type": "text/html; charset=UTF-8",
"Date": httputil.format_timestamp(time.time()),
})
self.set_default_headers()
self._write_buffer = []
self._status_code = 200
self._reason = httputil.responses[200]
def set_default_headers(self):
"""Override this to set HTTP headers at the beginning of the request.
For example, this is the place to set a custom ``Server`` header.
Note that setting such headers in the normal flow of request
processing may not do what you want, since headers may be reset
during error handling.
"""
pass
def set_status(self, status_code, reason=None):
"""Sets the status code for our response.
:arg int status_code: Response status code. If ``reason`` is ``None``,
it must be present in `httplib.responses <http.client.responses>`.
:arg string reason: Human-readable reason phrase describing the status
code. If ``None``, it will be filled in from
`httplib.responses <http.client.responses>`.
"""
self._status_code = status_code
if reason is not None:
self._reason = escape.native_str(reason)
else:
try:
self._reason = httputil.responses[status_code]
except KeyError:
raise ValueError("unknown status code %d", status_code)
def get_status(self):
"""Returns the status code for our response."""
return self._status_code
def set_header(self, name, value):
"""Sets the given response header name and value.
If a datetime is given, we automatically format it according to the
HTTP specification. If the value is not a string, we convert it to
a string. All header values are then encoded as UTF-8.
"""
self._headers[name] = self._convert_header_value(value)
def add_header(self, name, value):
"""Adds the given response header and value.
Unlike `set_header`, `add_header` may be called multiple times
to return multiple values for the same header.
"""
self._headers.add(name, self._convert_header_value(value))
def clear_header(self, name):
"""Clears an outgoing header, undoing a previous `set_header` call.
Note that this method does not apply to multi-valued headers
set by `add_header`.
"""
if name in self._headers:
del self._headers[name]
_INVALID_HEADER_CHAR_RE = re.compile(br"[\x00-\x1f]")
def _convert_header_value(self, value):
if isinstance(value, bytes):
pass
elif isinstance(value, unicode_type):
value = value.encode('utf-8')
elif isinstance(value, numbers.Integral):
# return immediately since we know the converted value will be safe
return str(value)
elif isinstance(value, datetime.datetime):
return httputil.format_timestamp(value)
else:
raise TypeError("Unsupported header value %r" % value)
# If \n is allowed into the header, it is possible to inject
# additional headers or split the request. Also cap length to
# prevent obviously erroneous values.
if (len(value) > 4000 or
RequestHandler._INVALID_HEADER_CHAR_RE.search(value)):
raise ValueError("Unsafe header value %r", value)
return value
_ARG_DEFAULT = []
def get_argument(self, name, default=_ARG_DEFAULT, strip=True):
"""Returns the value of the argument with the given name.
If default is not provided, the argument is considered to be
required, and we raise a `MissingArgumentError` if it is missing.
If the argument appears in the url more than once, we return the
last value.
The returned value is always unicode.
"""
return self._get_argument(name, default, self.request.arguments, strip)
def get_arguments(self, name, strip=True):
"""Returns a list of the arguments with the given name.
If the argument is not present, returns an empty list.
The returned values are always unicode.
"""
return self._get_arguments(name, self.request.arguments, strip)
def get_body_argument(self, name, default=_ARG_DEFAULT, strip=True):
"""Returns the value of the argument with the given name
from the request body.
If default is not provided, the argument is considered to be
required, and we raise a `MissingArgumentError` if it is missing.
If the argument appears in the url more than once, we return the
last value.
The returned value is always unicode.
.. versionadded:: 3.2
"""
return self._get_argument(name, default, self.request.body_arguments, strip)
def get_body_arguments(self, name, strip=True):
"""Returns a list of the body arguments with the given name.
If the argument is not present, returns an empty list.
The returned values are always unicode.
.. versionadded:: 3.2
"""
return self._get_arguments(name, self.request.body_arguments, strip)
def get_query_argument(self, name, default=_ARG_DEFAULT, strip=True):
"""Returns the value of the argument with the given name
from the request query string.
If default is not provided, the argument is considered to be
required, and we raise a `MissingArgumentError` if it is missing.
If the argument appears in the url more than once, we return the
last value.
The returned value is always unicode.
.. versionadded:: 3.2
"""
return self._get_argument(name, default, self.request.query_arguments, strip)
def get_query_arguments(self, name, strip=True):
"""Returns a list of the query arguments with the given name.
If the argument is not present, returns an empty list.
The returned values are always unicode.
.. versionadded:: 3.2
"""
return self._get_arguments(name, self.request.query_arguments, strip)
def _get_argument(self, name, default, source, strip=True):
args = self._get_arguments(name, source, strip=strip)
if not args:
if default is self._ARG_DEFAULT:
raise MissingArgumentError(name)
return default
return args[-1]
def _get_arguments(self, name, source, strip=True):
values = []
for v in source.get(name, []):
v = self.decode_argument(v, name=name)
if isinstance(v, unicode_type):
# Get rid of any weird control chars (unless decoding gave
# us bytes, in which case leave it alone)
v = RequestHandler._remove_control_chars_regex.sub(" ", v)
if strip:
v = v.strip()
values.append(v)
return values
def decode_argument(self, value, name=None):
"""Decodes an argument from the request.
The argument has been percent-decoded and is now a byte string.
By default, this method decodes the argument as utf-8 and returns
a unicode string, but this may be overridden in subclasses.
This method is used as a filter for both `get_argument()` and for
values extracted from the url and passed to `get()`/`post()`/etc.
The name of the argument is provided if known, but may be None
(e.g. for unnamed groups in the url regex).
"""
try:
return _unicode(value)
except UnicodeDecodeError:
raise HTTPError(400, "Invalid unicode in %s: %r" %
(name or "url", value[:40]))
@property
def cookies(self):
"""An alias for `self.request.cookies <.httputil.HTTPServerRequest.cookies>`."""
return self.request.cookies
def get_cookie(self, name, default=None):
"""Gets the value of the cookie with the given name, else default."""
if self.request.cookies is not None and name in self.request.cookies:
return self.request.cookies[name].value
return default
def set_cookie(self, name, value, domain=None, expires=None, path="/",
expires_days=None, **kwargs):
"""Sets the given cookie name/value with the given options.
Additional keyword arguments are set on the Cookie.Morsel
directly.
See http://docs.python.org/library/cookie.html#morsel-objects
for available attributes.
"""
# The cookie library only accepts type str, in both python 2 and 3
name = escape.native_str(name)
value = escape.native_str(value)
if re.search(r"[\x00-\x20]", name + value):
# Don't let us accidentally inject bad stuff
raise ValueError("Invalid cookie %r: %r" % (name, value))
if not hasattr(self, "_new_cookie"):
self._new_cookie = Cookie.SimpleCookie()
if name in self._new_cookie:
del self._new_cookie[name]
self._new_cookie[name] = value
morsel = self._new_cookie[name]
if domain:
morsel["domain"] = domain
if expires_days is not None and not expires:
expires = datetime.datetime.utcnow() + datetime.timedelta(
days=expires_days)
if expires:
morsel["expires"] = httputil.format_timestamp(expires)
if path:
morsel["path"] = path
for k, v in kwargs.items():
if k == 'max_age':
k = 'max-age'
morsel[k] = v
def clear_cookie(self, name, path="/", domain=None):
"""Deletes the cookie with the given name.
Due to limitations of the cookie protocol, you must pass the same
path and domain to clear a cookie as were used when that cookie
was set (but there is no way to find out on the server side
which values were used for a given cookie).
"""
expires = datetime.datetime.utcnow() - datetime.timedelta(days=365)
self.set_cookie(name, value="", path=path, expires=expires,
domain=domain)
def clear_all_cookies(self, path="/", domain=None):
"""Deletes all the cookies the user sent with this request.
See `clear_cookie` for more information on the path and domain
parameters.
.. versionchanged:: 3.2
Added the ``path`` and ``domain`` parameters.
"""
for name in self.request.cookies:
self.clear_cookie(name, path=path, domain=domain)
def set_secure_cookie(self, name, value, expires_days=30, version=None,
**kwargs):
"""Signs and timestamps a cookie so it cannot be forged.
You must specify the ``cookie_secret`` setting in your Application
to use this method. It should be a long, random sequence of bytes
to be used as the HMAC secret for the signature.
To read a cookie set with this method, use `get_secure_cookie()`.
Note that the ``expires_days`` parameter sets the lifetime of the
cookie in the browser, but is independent of the ``max_age_days``
parameter to `get_secure_cookie`.
Secure cookies may contain arbitrary byte values, not just unicode
strings (unlike regular cookies)
.. versionchanged:: 3.2.1
Added the ``version`` argument. Introduced cookie version 2
and made it the default.
"""
self.set_cookie(name, self.create_signed_value(name, value,
version=version),
expires_days=expires_days, **kwargs)
def create_signed_value(self, name, value, version=None):
"""Signs and timestamps a string so it cannot be forged.
Normally used via set_secure_cookie, but provided as a separate
method for non-cookie uses. To decode a value not stored
as a cookie use the optional value argument to get_secure_cookie.
.. versionchanged:: 3.2.1
Added the ``version`` argument. Introduced cookie version 2
and made it the default.
"""
self.require_setting("cookie_secret", "secure cookies")
return create_signed_value(self.application.settings["cookie_secret"],
name, value, version=version)
def get_secure_cookie(self, name, value=None, max_age_days=31,
min_version=None):
"""Returns the given signed cookie if it validates, or None.
The decoded cookie value is returned as a byte string (unlike
`get_cookie`).
.. versionchanged:: 3.2.1
Added the ``min_version`` argument. Introduced cookie version 2;
both versions 1 and 2 are accepted by default.
"""
self.require_setting("cookie_secret", "secure cookies")
if value is None:
value = self.get_cookie(name)
return decode_signed_value(self.application.settings["cookie_secret"],
name, value, max_age_days=max_age_days,
min_version=min_version)
def redirect(self, url, permanent=False, status=None):
"""Sends a redirect to the given (optionally relative) URL.
If the ``status`` argument is specified, that value is used as the
HTTP status code; otherwise either 301 (permanent) or 302
(temporary) is chosen based on the ``permanent`` argument.
The default is 302 (temporary).
"""
if self._headers_written:
raise Exception("Cannot redirect after headers have been written")
if status is None:
status = 301 if permanent else 302
else:
assert isinstance(status, int) and 300 <= status <= 399
self.set_status(status)
self.set_header("Location", urlparse.urljoin(utf8(self.request.uri),
utf8(url)))
self.finish()
def write(self, chunk):
"""Writes the given chunk to the output buffer.
To write the output to the network, use the flush() method below.
If the given chunk is a dictionary, we write it as JSON and set
the Content-Type of the response to be ``application/json``.
(if you want to send JSON as a different ``Content-Type``, call
set_header *after* calling write()).
Note that lists are not converted to JSON because of a potential
cross-site security vulnerability. All JSON output should be
wrapped in a dictionary. More details at
http://haacked.com/archive/2009/06/25/json-hijacking.aspx/ and
https://github.com/facebook/tornado/issues/1009
"""
if self._finished:
raise RuntimeError("Cannot write() after finish(). May be caused "
"by using async operations without the "
"@asynchronous decorator.")
if not isinstance(chunk, (bytes, unicode_type, dict)):
raise TypeError("write() only accepts bytes, unicode, and dict objects")
if isinstance(chunk, dict):
chunk = escape.json_encode(chunk)
self.set_header("Content-Type", "application/json; charset=UTF-8")
chunk = utf8(chunk)
self._write_buffer.append(chunk)
def render(self, template_name, **kwargs):
"""Renders the template with the given arguments as the response."""
html = self.render_string(template_name, **kwargs)
# Insert the additional JS and CSS added by the modules on the page
js_embed = []
js_files = []
css_embed = []
css_files = []
html_heads = []
html_bodies = []
for module in getattr(self, "_active_modules", {}).values():
embed_part = module.embedded_javascript()
if embed_part:
js_embed.append(utf8(embed_part))
file_part = module.javascript_files()
if file_part:
if isinstance(file_part, (unicode_type, bytes)):
js_files.append(file_part)
else:
js_files.extend(file_part)
embed_part = module.embedded_css()
if embed_part:
css_embed.append(utf8(embed_part))
file_part = module.css_files()
if file_part:
if isinstance(file_part, (unicode_type, bytes)):
css_files.append(file_part)
else:
css_files.extend(file_part)
head_part = module.html_head()
if head_part:
html_heads.append(utf8(head_part))
body_part = module.html_body()
if body_part:
html_bodies.append(utf8(body_part))
def is_absolute(path):
return any(path.startswith(x) for x in ["/", "http:", "https:"])
if js_files:
# Maintain order of JavaScript files given by modules
paths = []
unique_paths = set()
for path in js_files:
if not is_absolute(path):
path = self.static_url(path)
if path not in unique_paths:
paths.append(path)
unique_paths.add(path)
js = ''.join('<script src="' + escape.xhtml_escape(p) +
'" type="text/javascript"></script>'
for p in paths)
sloc = html.rindex(b'</body>')
html = html[:sloc] + utf8(js) + b'\n' + html[sloc:]
if js_embed:
js = b'<script type="text/javascript">\n//<![CDATA[\n' + \
b'\n'.join(js_embed) + b'\n//]]>\n</script>'
sloc = html.rindex(b'</body>')
html = html[:sloc] + js + b'\n' + html[sloc:]
if css_files:
paths = []
unique_paths = set()
for path in css_files:
if not is_absolute(path):
path = self.static_url(path)
if path not in unique_paths:
paths.append(path)
unique_paths.add(path)
css = ''.join('<link href="' + escape.xhtml_escape(p) + '" '
'type="text/css" rel="stylesheet"/>'
for p in paths)
hloc = html.index(b'</head>')
html = html[:hloc] + utf8(css) + b'\n' + html[hloc:]
if css_embed:
css = b'<style type="text/css">\n' + b'\n'.join(css_embed) + \
b'\n</style>'
hloc = html.index(b'</head>')
html = html[:hloc] + css + b'\n' + html[hloc:]
if html_heads:
hloc = html.index(b'</head>')
html = html[:hloc] + b''.join(html_heads) + b'\n' + html[hloc:]
if html_bodies:
hloc = html.index(b'</body>')
html = html[:hloc] + b''.join(html_bodies) + b'\n' + html[hloc:]
self.finish(html)
def render_string(self, template_name, **kwargs):
"""Generate the given template with the given arguments.
We return the generated byte string (in utf8). To generate and
write a template as a response, use render() above.
"""
# If no template_path is specified, use the path of the calling file
template_path = self.get_template_path()
if not template_path:
frame = sys._getframe(0)
web_file = frame.f_code.co_filename
while frame.f_code.co_filename == web_file:
frame = frame.f_back
template_path = os.path.dirname(frame.f_code.co_filename)
with RequestHandler._template_loader_lock:
if template_path not in RequestHandler._template_loaders:
loader = self.create_template_loader(template_path)
RequestHandler._template_loaders[template_path] = loader
else:
loader = RequestHandler._template_loaders[template_path]
t = loader.load(template_name)
namespace = self.get_template_namespace()
namespace.update(kwargs)
return t.generate(**namespace)
def get_template_namespace(self):
"""Returns a dictionary to be used as the default template namespace.
May be overridden by subclasses to add or modify values.
The results of this method will be combined with additional
defaults in the `tornado.template` module and keyword arguments
to `render` or `render_string`.
"""
namespace = dict(
handler=self,
request=self.request,
current_user=self.current_user,
locale=self.locale,
_=self.locale.translate,
static_url=self.static_url,
xsrf_form_html=self.xsrf_form_html,
reverse_url=self.reverse_url
)
namespace.update(self.ui)
return namespace
def create_template_loader(self, template_path):
"""Returns a new template loader for the given path.
May be overridden by subclasses. By default returns a
directory-based loader on the given path, using the
``autoescape`` application setting. If a ``template_loader``
application setting is supplied, uses that instead.
"""
settings = self.application.settings
if "template_loader" in settings:
return settings["template_loader"]
kwargs = {}
if "autoescape" in settings:
# autoescape=None means "no escaping", so we have to be sure
# to only pass this kwarg if the user asked for it.
kwargs["autoescape"] = settings["autoescape"]
return template.Loader(template_path, **kwargs)
def flush(self, include_footers=False, callback=None):
"""Flushes the current output buffer to the network.
The ``callback`` argument, if given, can be used for flow control:
it will be run when all flushed data has been written to the socket.
Note that only one flush callback can be outstanding at a time;
if another flush occurs before the previous flush's callback
has been run, the previous callback will be discarded.
.. versionchanged:: 4.0
Now returns a `.Future` if no callback is given.
"""
chunk = b"".join(self._write_buffer)
self._write_buffer = []
if not self._headers_written:
self._headers_written = True
for transform in self._transforms:
self._status_code, self._headers, chunk = \
transform.transform_first_chunk(
self._status_code, self._headers, chunk, include_footers)
# Ignore the chunk and only write the headers for HEAD requests
if self.request.method == "HEAD":
chunk = None
# Finalize the cookie headers (which have been stored in a side
# object so an outgoing cookie could be overwritten before it
# is sent).
if hasattr(self, "_new_cookie"):
for cookie in self._new_cookie.values():
self.add_header("Set-Cookie", cookie.OutputString(None))
start_line = httputil.ResponseStartLine('',
self._status_code,
self._reason)
return self.request.connection.write_headers(
start_line, self._headers, chunk, callback=callback)
else:
for transform in self._transforms:
chunk = transform.transform_chunk(chunk, include_footers)
# Ignore the chunk and only write the headers for HEAD requests
if self.request.method != "HEAD":
return self.request.connection.write(chunk, callback=callback)
else:
future = Future()
future.set_result(None)
return future
def finish(self, chunk=None):
"""Finishes this response, ending the HTTP request."""
if self._finished:
raise RuntimeError("finish() called twice. May be caused "
"by using async operations without the "
"@asynchronous decorator.")
if chunk is not None:
self.write(chunk)
# Automatically support ETags and add the Content-Length header if
# we have not flushed any content yet.
if not self._headers_written:
if (self._status_code == 200 and
self.request.method in ("GET", "HEAD") and
"Etag" not in self._headers):
self.set_etag_header()
if self.check_etag_header():
self._write_buffer = []
self.set_status(304)
if self._status_code == 304:
assert not self._write_buffer, "Cannot send body with 304"
self._clear_headers_for_304()
elif "Content-Length" not in self._headers:
content_length = sum(len(part) for part in self._write_buffer)
self.set_header("Content-Length", content_length)
if hasattr(self.request, "connection"):
# Now that the request is finished, clear the callback we
# set on the HTTPConnection (which would otherwise prevent the
# garbage collection of the RequestHandler when there
# are keepalive connections)
self.request.connection.set_close_callback(None)
self.flush(include_footers=True)
self.request.finish()
self._log()
self._finished = True
self.on_finish()
# Break up a reference cycle between this handler and the
# _ui_module closures to allow for faster GC on CPython.
self.ui = None
def send_error(self, status_code=500, **kwargs):
"""Sends the given HTTP error code to the browser.
If `flush()` has already been called, it is not possible to send
an error, so this method will simply terminate the response.
If output has been written but not yet flushed, it will be discarded
and replaced with the error page.
Override `write_error()` to customize the error page that is returned.
Additional keyword arguments are passed through to `write_error`.
"""
if self._headers_written:
gen_log.error("Cannot send error response after headers written")
if not self._finished:
self.finish()
return
self.clear()
reason = kwargs.get('reason')
if 'exc_info' in kwargs:
exception = kwargs['exc_info'][1]
if isinstance(exception, HTTPError) and exception.reason:
reason = exception.reason
self.set_status(status_code, reason=reason)
try:
self.write_error(status_code, **kwargs)
except Exception:
app_log.error("Uncaught exception in write_error", exc_info=True)
if not self._finished:
self.finish()
def write_error(self, status_code, **kwargs):
"""Override to implement custom error pages.
``write_error`` may call `write`, `render`, `set_header`, etc
to produce output as usual.
If this error was caused by an uncaught exception (including
HTTPError), an ``exc_info`` triple will be available as
``kwargs["exc_info"]``. Note that this exception may not be
the "current" exception for purposes of methods like
``sys.exc_info()`` or ``traceback.format_exc``.
"""
if self.settings.get("serve_traceback") and "exc_info" in kwargs:
# in debug mode, try to send a traceback
self.set_header('Content-Type', 'text/plain')
for line in traceback.format_exception(*kwargs["exc_info"]):
self.write(line)
self.finish()
else:
self.finish("<html><title>%(code)d: %(message)s</title>"
"<body>%(code)d: %(message)s</body></html>" % {
"code": status_code,
"message": self._reason,
})
@property
def locale(self):
"""The locale for the current session.
Determined by either `get_user_locale`, which you can override to
set the locale based on, e.g., a user preference stored in a
database, or `get_browser_locale`, which uses the ``Accept-Language``
header.
.. versionchanged: 4.1
Added a property setter.
"""
if not hasattr(self, "_locale"):
self._locale = self.get_user_locale()
if not self._locale:
self._locale = self.get_browser_locale()
assert self._locale
return self._locale
@locale.setter
def locale(self, value):
self._locale = value
def get_user_locale(self):
"""Override to determine the locale from the authenticated user.
If None is returned, we fall back to `get_browser_locale()`.
This method should return a `tornado.locale.Locale` object,
most likely obtained via a call like ``tornado.locale.get("en")``
"""
return None
def get_browser_locale(self, default="en_US"):
"""Determines the user's locale from ``Accept-Language`` header.
See http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html#sec14.4
"""
if "Accept-Language" in self.request.headers:
languages = self.request.headers["Accept-Language"].split(",")
locales = []
for language in languages:
parts = language.strip().split(";")
if len(parts) > 1 and parts[1].startswith("q="):
try:
score = float(parts[1][2:])
except (ValueError, TypeError):
score = 0.0
else:
score = 1.0
locales.append((parts[0], score))
if locales:
locales.sort(key=lambda pair: pair[1], reverse=True)
codes = [l[0] for l in locales]
return locale.get(*codes)
return locale.get(default)
@property
def current_user(self):
"""The authenticated user for this request.
This is a cached version of `get_current_user`, which you can
override to set the user based on, e.g., a cookie. If that
method is not overridden, this method always returns None.
We lazy-load the current user the first time this method is called
and cache the result after that.
"""
if not hasattr(self, "_current_user"):
self._current_user = self.get_current_user()
return self._current_user
@current_user.setter
def current_user(self, value):
self._current_user = value
def get_current_user(self):
"""Override to determine the current user from, e.g., a cookie."""
return None
def get_login_url(self):
"""Override to customize the login URL based on the request.
By default, we use the ``login_url`` application setting.
"""
self.require_setting("login_url", "@tornado.web.authenticated")
return self.application.settings["login_url"]
def get_template_path(self):
"""Override to customize template path for each handler.
By default, we use the ``template_path`` application setting.
Return None to load templates relative to the calling file.
"""
return self.application.settings.get("template_path")
@property
def xsrf_token(self):
"""The XSRF-prevention token for the current user/session.
To prevent cross-site request forgery, we set an '_xsrf' cookie
and include the same '_xsrf' value as an argument with all POST
requests. If the two do not match, we reject the form submission
as a potential forgery.
See http://en.wikipedia.org/wiki/Cross-site_request_forgery
.. versionchanged:: 3.2.2
The xsrf token will now be have a random mask applied in every
request, which makes it safe to include the token in pages
that are compressed. See http://breachattack.com for more
information on the issue fixed by this change. Old (version 1)
cookies will be converted to version 2 when this method is called
unless the ``xsrf_cookie_version`` `Application` setting is
set to 1.
"""
if not hasattr(self, "_xsrf_token"):
version, token, timestamp = self._get_raw_xsrf_token()
output_version = self.settings.get("xsrf_cookie_version", 2)
if output_version == 1:
self._xsrf_token = binascii.b2a_hex(token)
elif output_version == 2:
mask = os.urandom(4)
self._xsrf_token = b"|".join([
b"2",
binascii.b2a_hex(mask),
binascii.b2a_hex(_websocket_mask(mask, token)),
utf8(str(int(timestamp)))])
else:
raise ValueError("unknown xsrf cookie version %d",
output_version)
if version is None:
expires_days = 30 if self.current_user else None
self.set_cookie("_xsrf", self._xsrf_token,
expires_days=expires_days)
return self._xsrf_token
def _get_raw_xsrf_token(self):
"""Read or generate the xsrf token in its raw form.
The raw_xsrf_token is a tuple containing:
* version: the version of the cookie from which this token was read,
or None if we generated a new token in this request.
* token: the raw token data; random (non-ascii) bytes.
* timestamp: the time this token was generated (will not be accurate
for version 1 cookies)
"""
if not hasattr(self, '_raw_xsrf_token'):
cookie = self.get_cookie("_xsrf")
if cookie:
version, token, timestamp = self._decode_xsrf_token(cookie)
else:
version, token, timestamp = None, None, None
if token is None:
version = None
token = os.urandom(16)
timestamp = time.time()
self._raw_xsrf_token = (version, token, timestamp)
return self._raw_xsrf_token
def _decode_xsrf_token(self, cookie):
"""Convert a cookie string into a the tuple form returned by
_get_raw_xsrf_token.
"""
try:
m = _signed_value_version_re.match(utf8(cookie))
if m:
version = int(m.group(1))
if version == 2:
_, mask, masked_token, timestamp = cookie.split("|")
mask = binascii.a2b_hex(utf8(mask))
token = _websocket_mask(
mask, binascii.a2b_hex(utf8(masked_token)))
timestamp = int(timestamp)
return version, token, timestamp
else:
# Treat unknown versions as not present instead of failing.
raise Exception("Unknown xsrf cookie version")
else:
version = 1
try:
token = binascii.a2b_hex(utf8(cookie))
except (binascii.Error, TypeError):
token = utf8(cookie)
# We don't have a usable timestamp in older versions.
timestamp = int(time.time())
return (version, token, timestamp)
except Exception:
# Catch exceptions and return nothing instead of failing.
gen_log.debug("Uncaught exception in _decode_xsrf_token", exc_info=True)
return None, None, None
def check_xsrf_cookie(self):
"""Verifies that the ``_xsrf`` cookie matches the ``_xsrf`` argument.
To prevent cross-site request forgery, we set an ``_xsrf``
cookie and include the same value as a non-cookie
field with all ``POST`` requests. If the two do not match, we
reject the form submission as a potential forgery.
The ``_xsrf`` value may be set as either a form field named ``_xsrf``
or in a custom HTTP header named ``X-XSRFToken`` or ``X-CSRFToken``
(the latter is accepted for compatibility with Django).
See http://en.wikipedia.org/wiki/Cross-site_request_forgery
Prior to release 1.1.1, this check was ignored if the HTTP header
``X-Requested-With: XMLHTTPRequest`` was present. This exception
has been shown to be insecure and has been removed. For more
information please see
http://www.djangoproject.com/weblog/2011/feb/08/security/
http://weblog.rubyonrails.org/2011/2/8/csrf-protection-bypass-in-ruby-on-rails
.. versionchanged:: 3.2.2
Added support for cookie version 2. Both versions 1 and 2 are
supported.
"""
token = (self.get_argument("_xsrf", None) or
self.request.headers.get("X-Xsrftoken") or
self.request.headers.get("X-Csrftoken"))
if not token:
raise HTTPError(403, "'_xsrf' argument missing from POST")
_, token, _ = self._decode_xsrf_token(token)
_, expected_token, _ = self._get_raw_xsrf_token()
if not _time_independent_equals(utf8(token), utf8(expected_token)):
raise HTTPError(403, "XSRF cookie does not match POST argument")
def xsrf_form_html(self):
"""An HTML ``<input/>`` element to be included with all POST forms.
It defines the ``_xsrf`` input value, which we check on all POST
requests to prevent cross-site request forgery. If you have set
the ``xsrf_cookies`` application setting, you must include this
HTML within all of your HTML forms.
In a template, this method should be called with ``{% module
xsrf_form_html() %}``
See `check_xsrf_cookie()` above for more information.
"""
return '<input type="hidden" name="_xsrf" value="' + \
escape.xhtml_escape(self.xsrf_token) + '"/>'
def static_url(self, path, include_host=None, **kwargs):
"""Returns a static URL for the given relative static file path.
This method requires you set the ``static_path`` setting in your
application (which specifies the root directory of your static
files).
This method returns a versioned url (by default appending
``?v=<signature>``), which allows the static files to be
cached indefinitely. This can be disabled by passing
``include_version=False`` (in the default implementation;
other static file implementations are not required to support
this, but they may support other options).
By default this method returns URLs relative to the current
host, but if ``include_host`` is true the URL returned will be
absolute. If this handler has an ``include_host`` attribute,
that value will be used as the default for all `static_url`
calls that do not pass ``include_host`` as a keyword argument.
"""
self.require_setting("static_path", "static_url")
get_url = self.settings.get("static_handler_class",
StaticFileHandler).make_static_url
if include_host is None:
include_host = getattr(self, "include_host", False)
if include_host:
base = self.request.protocol + "://" + self.request.host
else:
base = ""
return base + get_url(self.settings, path, **kwargs)
def require_setting(self, name, feature="this feature"):
"""Raises an exception if the given app setting is not defined."""
if not self.application.settings.get(name):
raise Exception("You must define the '%s' setting in your "
"application to use %s" % (name, feature))
def reverse_url(self, name, *args):
"""Alias for `Application.reverse_url`."""
return self.application.reverse_url(name, *args)
def compute_etag(self):
"""Computes the etag header to be used for this request.
By default uses a hash of the content written so far.
May be overridden to provide custom etag implementations,
or may return None to disable tornado's default etag support.
"""
hasher = hashlib.sha1()
for part in self._write_buffer:
hasher.update(part)
return '"%s"' % hasher.hexdigest()
def set_etag_header(self):
"""Sets the response's Etag header using ``self.compute_etag()``.
Note: no header will be set if ``compute_etag()`` returns ``None``.
This method is called automatically when the request is finished.
"""
etag = self.compute_etag()
if etag is not None:
self.set_header("Etag", etag)
def check_etag_header(self):
"""Checks the ``Etag`` header against requests's ``If-None-Match``.
Returns ``True`` if the request's Etag matches and a 304 should be
returned. For example::
self.set_etag_header()
if self.check_etag_header():
self.set_status(304)
return
This method is called automatically when the request is finished,
but may be called earlier for applications that override
`compute_etag` and want to do an early check for ``If-None-Match``
before completing the request. The ``Etag`` header should be set
(perhaps with `set_etag_header`) before calling this method.
"""
etag = self._headers.get("Etag")
inm = utf8(self.request.headers.get("If-None-Match", ""))
return bool(etag and inm and inm.find(etag) >= 0)
def _stack_context_handle_exception(self, type, value, traceback):
try:
# For historical reasons _handle_request_exception only takes
# the exception value instead of the full triple,
# so re-raise the exception to ensure that it's in
# sys.exc_info()
raise_exc_info((type, value, traceback))
except Exception:
self._handle_request_exception(value)
return True
@gen.coroutine
def _execute(self, transforms, *args, **kwargs):
"""Executes this request with the given output transforms."""
self._transforms = transforms
try:
if self.request.method not in self.SUPPORTED_METHODS:
raise HTTPError(405)
self.path_args = [self.decode_argument(arg) for arg in args]
self.path_kwargs = dict((k, self.decode_argument(v, name=k))
for (k, v) in kwargs.items())
# If XSRF cookies are turned on, reject form submissions without
# the proper cookie
if self.request.method not in ("GET", "HEAD", "OPTIONS") and \
self.application.settings.get("xsrf_cookies"):
self.check_xsrf_cookie()
result = self.prepare()
if is_future(result):
result = yield result
if result is not None:
raise TypeError("Expected None, got %r" % result)
if self._prepared_future is not None:
# Tell the Application we've finished with prepare()
# and are ready for the body to arrive.
self._prepared_future.set_result(None)
if self._finished:
return
if _has_stream_request_body(self.__class__):
# In streaming mode request.body is a Future that signals
# the body has been completely received. The Future has no
# result; the data has been passed to self.data_received
# instead.
try:
yield self.request.body
except iostream.StreamClosedError:
return
method = getattr(self, self.request.method.lower())
result = method(*self.path_args, **self.path_kwargs)
if is_future(result):
result = yield result
if result is not None:
raise TypeError("Expected None, got %r" % result)
if self._auto_finish and not self._finished:
self.finish()
except Exception as e:
self._handle_request_exception(e)
if (self._prepared_future is not None and
not self._prepared_future.done()):
# In case we failed before setting _prepared_future, do it
# now (to unblock the HTTP server). Note that this is not
# in a finally block to avoid GC issues prior to Python 3.4.
self._prepared_future.set_result(None)
def data_received(self, chunk):
"""Implement this method to handle streamed request data.
Requires the `.stream_request_body` decorator.
"""
raise NotImplementedError()
def _log(self):
"""Logs the current request.
Sort of deprecated since this functionality was moved to the
Application, but left in place for the benefit of existing apps
that have overridden this method.
"""
self.application.log_request(self)
def _request_summary(self):
return self.request.method + " " + self.request.uri + \
" (" + self.request.remote_ip + ")"
def _handle_request_exception(self, e):
if isinstance(e, Finish):
# Not an error; just finish the request without logging.
if not self._finished:
self.finish()
return
self.log_exception(*sys.exc_info())
if self._finished:
# Extra errors after the request has been finished should
# be logged, but there is no reason to continue to try and
# send a response.
return
if isinstance(e, HTTPError):
if e.status_code not in httputil.responses and not e.reason:
gen_log.error("Bad HTTP status code: %d", e.status_code)
self.send_error(500, exc_info=sys.exc_info())
else:
self.send_error(e.status_code, exc_info=sys.exc_info())
else:
self.send_error(500, exc_info=sys.exc_info())
def log_exception(self, typ, value, tb):
"""Override to customize logging of uncaught exceptions.
By default logs instances of `HTTPError` as warnings without
stack traces (on the ``tornado.general`` logger), and all
other exceptions as errors with stack traces (on the
``tornado.application`` logger).
.. versionadded:: 3.1
"""
if isinstance(value, HTTPError):
if value.log_message:
format = "%d %s: " + value.log_message
args = ([value.status_code, self._request_summary()] +
list(value.args))
gen_log.warning(format, *args)
else:
app_log.error("Uncaught exception %s\n%r", self._request_summary(),
self.request, exc_info=(typ, value, tb))
def _ui_module(self, name, module):
def render(*args, **kwargs):
if not hasattr(self, "_active_modules"):
self._active_modules = {}
if name not in self._active_modules:
self._active_modules[name] = module(self)
rendered = self._active_modules[name].render(*args, **kwargs)
return rendered
return render
def _ui_method(self, method):
return lambda *args, **kwargs: method(self, *args, **kwargs)
def _clear_headers_for_304(self):
# 304 responses should not contain entity headers (defined in
# http://www.w3.org/Protocols/rfc2616/rfc2616-sec7.html#sec7.1)
# not explicitly allowed by
# http://www.w3.org/Protocols/rfc2616/rfc2616-sec10.html#sec10.3.5
headers = ["Allow", "Content-Encoding", "Content-Language",
"Content-Length", "Content-MD5", "Content-Range",
"Content-Type", "Last-Modified"]
for h in headers:
self.clear_header(h)
def asynchronous(method):
"""Wrap request handler methods with this if they are asynchronous.
This decorator is unnecessary if the method is also decorated with
``@gen.coroutine`` (it is legal but unnecessary to use the two
decorators together, in which case ``@asynchronous`` must be
first).
This decorator should only be applied to the :ref:`HTTP verb
methods <verbs>`; its behavior is undefined for any other method.
This decorator does not *make* a method asynchronous; it tells
the framework that the method *is* asynchronous. For this decorator
to be useful the method must (at least sometimes) do something
asynchronous.
If this decorator is given, the response is not finished when the
method returns. It is up to the request handler to call
`self.finish() <RequestHandler.finish>` to finish the HTTP
request. Without this decorator, the request is automatically
finished when the ``get()`` or ``post()`` method returns. Example::
class MyRequestHandler(web.RequestHandler):
@web.asynchronous
def get(self):
http = httpclient.AsyncHTTPClient()
http.fetch("http://friendfeed.com/", self._on_download)
def _on_download(self, response):
self.write("Downloaded!")
self.finish()
.. versionadded:: 3.1
The ability to use ``@gen.coroutine`` without ``@asynchronous``.
"""
# Delay the IOLoop import because it's not available on app engine.
from tornado.ioloop import IOLoop
@functools.wraps(method)
def wrapper(self, *args, **kwargs):
self._auto_finish = False
with stack_context.ExceptionStackContext(
self._stack_context_handle_exception):
result = method(self, *args, **kwargs)
if is_future(result):
# If @asynchronous is used with @gen.coroutine, (but
# not @gen.engine), we can automatically finish the
# request when the future resolves. Additionally,
# the Future will swallow any exceptions so we need
# to throw them back out to the stack context to finish
# the request.
def future_complete(f):
f.result()
if not self._finished:
self.finish()
IOLoop.current().add_future(result, future_complete)
# Once we have done this, hide the Future from our
# caller (i.e. RequestHandler._when_complete), which
# would otherwise set up its own callback and
# exception handler (resulting in exceptions being
# logged twice).
return None
return result
return wrapper
def stream_request_body(cls):
"""Apply to `RequestHandler` subclasses to enable streaming body support.
This decorator implies the following changes:
* `.HTTPServerRequest.body` is undefined, and body arguments will not
be included in `RequestHandler.get_argument`.
* `RequestHandler.prepare` is called when the request headers have been
read instead of after the entire body has been read.
* The subclass must define a method ``data_received(self, data):``, which
will be called zero or more times as data is available. Note that
if the request has an empty body, ``data_received`` may not be called.
* ``prepare`` and ``data_received`` may return Futures (such as via
``@gen.coroutine``, in which case the next method will not be called
until those futures have completed.
* The regular HTTP method (``post``, ``put``, etc) will be called after
the entire body has been read.
There is a subtle interaction between ``data_received`` and asynchronous
``prepare``: The first call to ``data_received`` may occur at any point
after the call to ``prepare`` has returned *or yielded*.
"""
if not issubclass(cls, RequestHandler):
raise TypeError("expected subclass of RequestHandler, got %r", cls)
cls._stream_request_body = True
return cls
def _has_stream_request_body(cls):
if not issubclass(cls, RequestHandler):
raise TypeError("expected subclass of RequestHandler, got %r", cls)
return getattr(cls, '_stream_request_body', False)
def removeslash(method):
"""Use this decorator to remove trailing slashes from the request path.
For example, a request to ``/foo/`` would redirect to ``/foo`` with this
decorator. Your request handler mapping should use a regular expression
like ``r'/foo/*'`` in conjunction with using the decorator.
"""
@functools.wraps(method)
def wrapper(self, *args, **kwargs):
if self.request.path.endswith("/"):
if self.request.method in ("GET", "HEAD"):
uri = self.request.path.rstrip("/")
if uri: # don't try to redirect '/' to ''
if self.request.query:
uri += "?" + self.request.query
self.redirect(uri, permanent=True)
return
else:
raise HTTPError(404)
return method(self, *args, **kwargs)
return wrapper
def addslash(method):
"""Use this decorator to add a missing trailing slash to the request path.
For example, a request to ``/foo`` would redirect to ``/foo/`` with this
decorator. Your request handler mapping should use a regular expression
like ``r'/foo/?'`` in conjunction with using the decorator.
"""
@functools.wraps(method)
def wrapper(self, *args, **kwargs):
if not self.request.path.endswith("/"):
if self.request.method in ("GET", "HEAD"):
uri = self.request.path + "/"
if self.request.query:
uri += "?" + self.request.query
self.redirect(uri, permanent=True)
return
raise HTTPError(404)
return method(self, *args, **kwargs)
return wrapper
class Application(httputil.HTTPServerConnectionDelegate):
"""A collection of request handlers that make up a web application.
Instances of this class are callable and can be passed directly to
HTTPServer to serve the application::
application = web.Application([
(r"/", MainPageHandler),
])
http_server = httpserver.HTTPServer(application)
http_server.listen(8080)
ioloop.IOLoop.instance().start()
The constructor for this class takes in a list of `URLSpec` objects
or (regexp, request_class) tuples. When we receive requests, we
iterate over the list in order and instantiate an instance of the
first request class whose regexp matches the request path.
The request class can be specified as either a class object or a
(fully-qualified) name.
Each tuple can contain additional elements, which correspond to the
arguments to the `URLSpec` constructor. (Prior to Tornado 3.2, this
only tuples of two or three elements were allowed).
A dictionary may be passed as the third element of the tuple,
which will be used as keyword arguments to the handler's
constructor and `~RequestHandler.initialize` method. This pattern
is used for the `StaticFileHandler` in this example (note that a
`StaticFileHandler` can be installed automatically with the
static_path setting described below)::
application = web.Application([
(r"/static/(.*)", web.StaticFileHandler, {"path": "/var/www"}),
])
We support virtual hosts with the `add_handlers` method, which takes in
a host regular expression as the first argument::
application.add_handlers(r"www\.myhost\.com", [
(r"/article/([0-9]+)", ArticleHandler),
])
You can serve static files by sending the ``static_path`` setting
as a keyword argument. We will serve those files from the
``/static/`` URI (this is configurable with the
``static_url_prefix`` setting), and we will serve ``/favicon.ico``
and ``/robots.txt`` from the same directory. A custom subclass of
`StaticFileHandler` can be specified with the
``static_handler_class`` setting.
"""
def __init__(self, handlers=None, default_host="", transforms=None,
**settings):
if transforms is None:
self.transforms = []
if settings.get("compress_response") or settings.get("gzip"):
self.transforms.append(GZipContentEncoding)
else:
self.transforms = transforms
self.handlers = []
self.named_handlers = {}
self.default_host = default_host
self.settings = settings
self.ui_modules = {'linkify': _linkify,
'xsrf_form_html': _xsrf_form_html,
'Template': TemplateModule,
}
self.ui_methods = {}
self._load_ui_modules(settings.get("ui_modules", {}))
self._load_ui_methods(settings.get("ui_methods", {}))
if self.settings.get("static_path"):
path = self.settings["static_path"]
handlers = list(handlers or [])
static_url_prefix = settings.get("static_url_prefix",
"/static/")
static_handler_class = settings.get("static_handler_class",
StaticFileHandler)
static_handler_args = settings.get("static_handler_args", {})
static_handler_args['path'] = path
for pattern in [re.escape(static_url_prefix) + r"(.*)",
r"/(favicon\.ico)", r"/(robots\.txt)"]:
handlers.insert(0, (pattern, static_handler_class,
static_handler_args))
if handlers:
self.add_handlers(".*$", handlers)
if self.settings.get('debug'):
self.settings.setdefault('autoreload', True)
self.settings.setdefault('compiled_template_cache', False)
self.settings.setdefault('static_hash_cache', False)
self.settings.setdefault('serve_traceback', True)
# Automatically reload modified modules
if self.settings.get('autoreload'):
from tornado import autoreload
autoreload.start()
def listen(self, port, address="", **kwargs):
"""Starts an HTTP server for this application on the given port.
This is a convenience alias for creating an `.HTTPServer`
object and calling its listen method. Keyword arguments not
supported by `HTTPServer.listen <.TCPServer.listen>` are passed to the
`.HTTPServer` constructor. For advanced uses
(e.g. multi-process mode), do not use this method; create an
`.HTTPServer` and call its
`.TCPServer.bind`/`.TCPServer.start` methods directly.
Note that after calling this method you still need to call
``IOLoop.instance().start()`` to start the server.
"""
# import is here rather than top level because HTTPServer
# is not importable on appengine
from tornado.httpserver import HTTPServer
server = HTTPServer(self, **kwargs)
server.listen(port, address)
def add_handlers(self, host_pattern, host_handlers):
"""Appends the given handlers to our handler list.
Host patterns are processed sequentially in the order they were
added. All matching patterns will be considered.
"""
if not host_pattern.endswith("$"):
host_pattern += "$"
handlers = []
# The handlers with the wildcard host_pattern are a special
# case - they're added in the constructor but should have lower
# precedence than the more-precise handlers added later.
# If a wildcard handler group exists, it should always be last
# in the list, so insert new groups just before it.
if self.handlers and self.handlers[-1][0].pattern == '.*$':
self.handlers.insert(-1, (re.compile(host_pattern), handlers))
else:
self.handlers.append((re.compile(host_pattern), handlers))
for spec in host_handlers:
if isinstance(spec, (tuple, list)):
assert len(spec) in (2, 3, 4)
spec = URLSpec(*spec)
handlers.append(spec)
if spec.name:
if spec.name in self.named_handlers:
app_log.warning(
"Multiple handlers named %s; replacing previous value",
spec.name)
self.named_handlers[spec.name] = spec
def add_transform(self, transform_class):
self.transforms.append(transform_class)
def _get_host_handlers(self, request):
host = split_host_and_port(request.host.lower())[0]
matches = []
for pattern, handlers in self.handlers:
if pattern.match(host):
matches.extend(handlers)
# Look for default host if not behind load balancer (for debugging)
if not matches and "X-Real-Ip" not in request.headers:
for pattern, handlers in self.handlers:
if pattern.match(self.default_host):
matches.extend(handlers)
return matches or None
def _load_ui_methods(self, methods):
if isinstance(methods, types.ModuleType):
self._load_ui_methods(dict((n, getattr(methods, n))
for n in dir(methods)))
elif isinstance(methods, list):
for m in methods:
self._load_ui_methods(m)
else:
for name, fn in methods.items():
if not name.startswith("_") and hasattr(fn, "__call__") \
and name[0].lower() == name[0]:
self.ui_methods[name] = fn
def _load_ui_modules(self, modules):
if isinstance(modules, types.ModuleType):
self._load_ui_modules(dict((n, getattr(modules, n))
for n in dir(modules)))
elif isinstance(modules, list):
for m in modules:
self._load_ui_modules(m)
else:
assert isinstance(modules, dict)
for name, cls in modules.items():
try:
if issubclass(cls, UIModule):
self.ui_modules[name] = cls
except TypeError:
pass
def start_request(self, server_conn, request_conn):
# Modern HTTPServer interface
return _RequestDispatcher(self, request_conn)
def __call__(self, request):
# Legacy HTTPServer interface
dispatcher = _RequestDispatcher(self, None)
dispatcher.set_request(request)
return dispatcher.execute()
def reverse_url(self, name, *args):
"""Returns a URL path for handler named ``name``
The handler must be added to the application as a named `URLSpec`.
Args will be substituted for capturing groups in the `URLSpec` regex.
They will be converted to strings if necessary, encoded as utf8,
and url-escaped.
"""
if name in self.named_handlers:
return self.named_handlers[name].reverse(*args)
raise KeyError("%s not found in named urls" % name)
def log_request(self, handler):
"""Writes a completed HTTP request to the logs.
By default writes to the python root logger. To change
this behavior either subclass Application and override this method,
or pass a function in the application settings dictionary as
``log_function``.
"""
if "log_function" in self.settings:
self.settings["log_function"](handler)
return
if handler.get_status() < 400:
log_method = access_log.info
elif handler.get_status() < 500:
log_method = access_log.warning
else:
log_method = access_log.error
request_time = 1000.0 * handler.request.request_time()
log_method("%d %s %.2fms", handler.get_status(),
handler._request_summary(), request_time)
class _RequestDispatcher(httputil.HTTPMessageDelegate):
def __init__(self, application, connection):
self.application = application
self.connection = connection
self.request = None
self.chunks = []
self.handler_class = None
self.handler_kwargs = None
self.path_args = []
self.path_kwargs = {}
def headers_received(self, start_line, headers):
self.set_request(httputil.HTTPServerRequest(
connection=self.connection, start_line=start_line, headers=headers))
if self.stream_request_body:
self.request.body = Future()
return self.execute()
def set_request(self, request):
self.request = request
self._find_handler()
self.stream_request_body = _has_stream_request_body(self.handler_class)
def _find_handler(self):
# Identify the handler to use as soon as we have the request.
# Save url path arguments for later.
app = self.application
handlers = app._get_host_handlers(self.request)
if not handlers:
self.handler_class = RedirectHandler
self.handler_kwargs = dict(url="%s://%s/" % (self.request.protocol, app.default_host))
return
for spec in handlers:
match = spec.regex.match(self.request.path)
if match:
self.handler_class = spec.handler_class
self.handler_kwargs = spec.kwargs
if spec.regex.groups:
# Pass matched groups to the handler. Since
# match.groups() includes both named and
# unnamed groups, we want to use either groups
# or groupdict but not both.
if spec.regex.groupindex:
self.path_kwargs = dict(
(str(k), _unquote_or_none(v))
for (k, v) in match.groupdict().items())
else:
self.path_args = [_unquote_or_none(s)
for s in match.groups()]
return
if app.settings.get('default_handler_class'):
self.handler_class = app.settings['default_handler_class']
self.handler_kwargs = app.settings.get(
'default_handler_args', {})
else:
self.handler_class = ErrorHandler
self.handler_kwargs = dict(status_code=404)
def data_received(self, data):
if self.stream_request_body:
return self.handler.data_received(data)
else:
self.chunks.append(data)
def finish(self):
if self.stream_request_body:
self.request.body.set_result(None)
else:
self.request.body = b''.join(self.chunks)
self.request._parse_body()
self.execute()
def on_connection_close(self):
if self.stream_request_body:
self.handler.on_connection_close()
else:
self.chunks = None
def execute(self):
# If template cache is disabled (usually in the debug mode),
# re-compile templates and reload static files on every
# request so you don't need to restart to see changes
if not self.application.settings.get("compiled_template_cache", True):
with RequestHandler._template_loader_lock:
for loader in RequestHandler._template_loaders.values():
loader.reset()
if not self.application.settings.get('static_hash_cache', True):
StaticFileHandler.reset()
self.handler = self.handler_class(self.application, self.request,
**self.handler_kwargs)
transforms = [t(self.request) for t in self.application.transforms]
if self.stream_request_body:
self.handler._prepared_future = Future()
# Note that if an exception escapes handler._execute it will be
# trapped in the Future it returns (which we are ignoring here).
# However, that shouldn't happen because _execute has a blanket
# except handler, and we cannot easily access the IOLoop here to
# call add_future (because of the requirement to remain compatible
# with WSGI)
f = self.handler._execute(transforms, *self.path_args, **self.path_kwargs)
f.add_done_callback(lambda f: f.exception())
# If we are streaming the request body, then execute() is finished
# when the handler has prepared to receive the body. If not,
# it doesn't matter when execute() finishes (so we return None)
return self.handler._prepared_future
class HTTPError(Exception):
"""An exception that will turn into an HTTP error response.
Raising an `HTTPError` is a convenient alternative to calling
`RequestHandler.send_error` since it automatically ends the
current function.
To customize the response sent with an `HTTPError`, override
`RequestHandler.write_error`.
:arg int status_code: HTTP status code. Must be listed in
`httplib.responses <http.client.responses>` unless the ``reason``
keyword argument is given.
:arg string log_message: Message to be written to the log for this error
(will not be shown to the user unless the `Application` is in debug
mode). May contain ``%s``-style placeholders, which will be filled
in with remaining positional parameters.
:arg string reason: Keyword-only argument. The HTTP "reason" phrase
to pass in the status line along with ``status_code``. Normally
determined automatically from ``status_code``, but can be used
to use a non-standard numeric code.
"""
def __init__(self, status_code, log_message=None, *args, **kwargs):
self.status_code = status_code
self.log_message = log_message
self.args = args
self.reason = kwargs.get('reason', None)
def __str__(self):
message = "HTTP %d: %s" % (
self.status_code,
self.reason or httputil.responses.get(self.status_code, 'Unknown'))
if self.log_message:
return message + " (" + (self.log_message % self.args) + ")"
else:
return message
class Finish(Exception):
"""An exception that ends the request without producing an error response.
When `Finish` is raised in a `RequestHandler`, the request will end
(calling `RequestHandler.finish` if it hasn't already been called),
but the outgoing response will not be modified and the error-handling
methods (including `RequestHandler.write_error`) will not be called.
This can be a more convenient way to implement custom error pages
than overriding ``write_error`` (especially in library code)::
if self.current_user is None:
self.set_status(401)
self.set_header('WWW-Authenticate', 'Basic realm="something"')
raise Finish()
"""
pass
class MissingArgumentError(HTTPError):
"""Exception raised by `RequestHandler.get_argument`.
This is a subclass of `HTTPError`, so if it is uncaught a 400 response
code will be used instead of 500 (and a stack trace will not be logged).
.. versionadded:: 3.1
"""
def __init__(self, arg_name):
super(MissingArgumentError, self).__init__(
400, 'Missing argument %s' % arg_name)
self.arg_name = arg_name
class ErrorHandler(RequestHandler):
"""Generates an error response with ``status_code`` for all requests."""
def initialize(self, status_code):
self.set_status(status_code)
def prepare(self):
raise HTTPError(self._status_code)
def check_xsrf_cookie(self):
# POSTs to an ErrorHandler don't actually have side effects,
# so we don't need to check the xsrf token. This allows POSTs
# to the wrong url to return a 404 instead of 403.
pass
class RedirectHandler(RequestHandler):
"""Redirects the client to the given URL for all GET requests.
You should provide the keyword argument ``url`` to the handler, e.g.::
application = web.Application([
(r"/oldpath", web.RedirectHandler, {"url": "/newpath"}),
])
"""
def initialize(self, url, permanent=True):
self._url = url
self._permanent = permanent
def get(self):
self.redirect(self._url, permanent=self._permanent)
class StaticFileHandler(RequestHandler):
"""A simple handler that can serve static content from a directory.
A `StaticFileHandler` is configured automatically if you pass the
``static_path`` keyword argument to `Application`. This handler
can be customized with the ``static_url_prefix``, ``static_handler_class``,
and ``static_handler_args`` settings.
To map an additional path to this handler for a static data directory
you would add a line to your application like::
application = web.Application([
(r"/content/(.*)", web.StaticFileHandler, {"path": "/var/www"}),
])
The handler constructor requires a ``path`` argument, which specifies the
local root directory of the content to be served.
Note that a capture group in the regex is required to parse the value for
the ``path`` argument to the get() method (different than the constructor
argument above); see `URLSpec` for details.
To maximize the effectiveness of browser caching, this class supports
versioned urls (by default using the argument ``?v=``). If a version
is given, we instruct the browser to cache this file indefinitely.
`make_static_url` (also available as `RequestHandler.static_url`) can
be used to construct a versioned url.
This handler is intended primarily for use in development and light-duty
file serving; for heavy traffic it will be more efficient to use
a dedicated static file server (such as nginx or Apache). We support
the HTTP ``Accept-Ranges`` mechanism to return partial content (because
some browsers require this functionality to be present to seek in
HTML5 audio or video), but this handler should not be used with
files that are too large to fit comfortably in memory.
**Subclassing notes**
This class is designed to be extensible by subclassing, but because
of the way static urls are generated with class methods rather than
instance methods, the inheritance patterns are somewhat unusual.
Be sure to use the ``@classmethod`` decorator when overriding a
class method. Instance methods may use the attributes ``self.path``
``self.absolute_path``, and ``self.modified``.
Subclasses should only override methods discussed in this section;
overriding other methods is error-prone. Overriding
``StaticFileHandler.get`` is particularly problematic due to the
tight coupling with ``compute_etag`` and other methods.
To change the way static urls are generated (e.g. to match the behavior
of another server or CDN), override `make_static_url`, `parse_url_path`,
`get_cache_time`, and/or `get_version`.
To replace all interaction with the filesystem (e.g. to serve
static content from a database), override `get_content`,
`get_content_size`, `get_modified_time`, `get_absolute_path`, and
`validate_absolute_path`.
.. versionchanged:: 3.1
Many of the methods for subclasses were added in Tornado 3.1.
"""
CACHE_MAX_AGE = 86400 * 365 * 10 # 10 years
_static_hashes = {}
_lock = threading.Lock() # protects _static_hashes
def initialize(self, path, default_filename=None):
self.root = path
self.default_filename = default_filename
@classmethod
def reset(cls):
with cls._lock:
cls._static_hashes = {}
def head(self, path):
return self.get(path, include_body=False)
@gen.coroutine
def get(self, path, include_body=True):
# Set up our path instance variables.
self.path = self.parse_url_path(path)
del path # make sure we don't refer to path instead of self.path again
absolute_path = self.get_absolute_path(self.root, self.path)
self.absolute_path = self.validate_absolute_path(
self.root, absolute_path)
if self.absolute_path is None:
return
self.modified = self.get_modified_time()
self.set_headers()
if self.should_return_304():
self.set_status(304)
return
request_range = None
range_header = self.request.headers.get("Range")
if range_header:
# As per RFC 2616 14.16, if an invalid Range header is specified,
# the request will be treated as if the header didn't exist.
request_range = httputil._parse_request_range(range_header)
size = self.get_content_size()
if request_range:
start, end = request_range
if (start is not None and start >= size) or end == 0:
# As per RFC 2616 14.35.1, a range is not satisfiable only: if
# the first requested byte is equal to or greater than the
# content, or when a suffix with length 0 is specified
self.set_status(416) # Range Not Satisfiable
self.set_header("Content-Type", "text/plain")
self.set_header("Content-Range", "bytes */%s" % (size, ))
return
if start is not None and start < 0:
start += size
if end is not None and end > size:
# Clients sometimes blindly use a large range to limit their
# download size; cap the endpoint at the actual file size.
end = size
# Note: only return HTTP 206 if less than the entire range has been
# requested. Not only is this semantically correct, but Chrome
# refuses to play audio if it gets an HTTP 206 in response to
# ``Range: bytes=0-``.
if size != (end or size) - (start or 0):
self.set_status(206) # Partial Content
self.set_header("Content-Range",
httputil._get_content_range(start, end, size))
else:
start = end = None
if start is not None and end is not None:
content_length = end - start
elif end is not None:
content_length = end
elif start is not None:
content_length = size - start
else:
content_length = size
self.set_header("Content-Length", content_length)
if include_body:
content = self.get_content(self.absolute_path, start, end)
if isinstance(content, bytes):
content = [content]
for chunk in content:
try:
self.write(chunk)
yield self.flush()
except iostream.StreamClosedError:
return
else:
assert self.request.method == "HEAD"
def compute_etag(self):
"""Sets the ``Etag`` header based on static url version.
This allows efficient ``If-None-Match`` checks against cached
versions, and sends the correct ``Etag`` for a partial response
(i.e. the same ``Etag`` as the full file).
.. versionadded:: 3.1
"""
version_hash = self._get_cached_version(self.absolute_path)
if not version_hash:
return None
return '"%s"' % (version_hash, )
def set_headers(self):
"""Sets the content and caching headers on the response.
.. versionadded:: 3.1
"""
self.set_header("Accept-Ranges", "bytes")
self.set_etag_header()
if self.modified is not None:
self.set_header("Last-Modified", self.modified)
content_type = self.get_content_type()
if content_type:
self.set_header("Content-Type", content_type)
cache_time = self.get_cache_time(self.path, self.modified, content_type)
if cache_time > 0:
self.set_header("Expires", datetime.datetime.utcnow() +
datetime.timedelta(seconds=cache_time))
self.set_header("Cache-Control", "max-age=" + str(cache_time))
self.set_extra_headers(self.path)
def should_return_304(self):
"""Returns True if the headers indicate that we should return 304.
.. versionadded:: 3.1
"""
if self.check_etag_header():
return True
# Check the If-Modified-Since, and don't send the result if the
# content has not been modified
ims_value = self.request.headers.get("If-Modified-Since")
if ims_value is not None:
date_tuple = email.utils.parsedate(ims_value)
if date_tuple is not None:
if_since = datetime.datetime(*date_tuple[:6])
if if_since >= self.modified:
return True
return False
@classmethod
def get_absolute_path(cls, root, path):
"""Returns the absolute location of ``path`` relative to ``root``.
``root`` is the path configured for this `StaticFileHandler`
(in most cases the ``static_path`` `Application` setting).
This class method may be overridden in subclasses. By default
it returns a filesystem path, but other strings may be used
as long as they are unique and understood by the subclass's
overridden `get_content`.
.. versionadded:: 3.1
"""
abspath = os.path.abspath(os.path.join(root, path))
return abspath
def validate_absolute_path(self, root, absolute_path):
"""Validate and return the absolute path.
``root`` is the configured path for the `StaticFileHandler`,
and ``path`` is the result of `get_absolute_path`
This is an instance method called during request processing,
so it may raise `HTTPError` or use methods like
`RequestHandler.redirect` (return None after redirecting to
halt further processing). This is where 404 errors for missing files
are generated.
This method may modify the path before returning it, but note that
any such modifications will not be understood by `make_static_url`.
In instance methods, this method's result is available as
``self.absolute_path``.
.. versionadded:: 3.1
"""
root = os.path.abspath(root)
# os.path.abspath strips a trailing /
# it needs to be temporarily added back for requests to root/
if not (absolute_path + os.path.sep).startswith(root):
raise HTTPError(403, "%s is not in root static directory",
self.path)
if (os.path.isdir(absolute_path) and
self.default_filename is not None):
# need to look at the request.path here for when path is empty
# but there is some prefix to the path that was already
# trimmed by the routing
if not self.request.path.endswith("/"):
self.redirect(self.request.path + "/", permanent=True)
return
absolute_path = os.path.join(absolute_path, self.default_filename)
if not os.path.exists(absolute_path):
raise HTTPError(404)
if not os.path.isfile(absolute_path):
raise HTTPError(403, "%s is not a file", self.path)
return absolute_path
@classmethod
def get_content(cls, abspath, start=None, end=None):
"""Retrieve the content of the requested resource which is located
at the given absolute path.
This class method may be overridden by subclasses. Note that its
signature is different from other overridable class methods
(no ``settings`` argument); this is deliberate to ensure that
``abspath`` is able to stand on its own as a cache key.
This method should either return a byte string or an iterator
of byte strings. The latter is preferred for large files
as it helps reduce memory fragmentation.
.. versionadded:: 3.1
"""
with open(abspath, "rb") as file:
if start is not None:
file.seek(start)
if end is not None:
remaining = end - (start or 0)
else:
remaining = None
while True:
chunk_size = 64 * 1024
if remaining is not None and remaining < chunk_size:
chunk_size = remaining
chunk = file.read(chunk_size)
if chunk:
if remaining is not None:
remaining -= len(chunk)
yield chunk
else:
if remaining is not None:
assert remaining == 0
return
@classmethod
def get_content_version(cls, abspath):
"""Returns a version string for the resource at the given path.
This class method may be overridden by subclasses. The
default implementation is a hash of the file's contents.
.. versionadded:: 3.1
"""
data = cls.get_content(abspath)
hasher = hashlib.md5()
if isinstance(data, bytes):
hasher.update(data)
else:
for chunk in data:
hasher.update(chunk)
return hasher.hexdigest()
def _stat(self):
if not hasattr(self, '_stat_result'):
self._stat_result = os.stat(self.absolute_path)
return self._stat_result
def get_content_size(self):
"""Retrieve the total size of the resource at the given path.
This method may be overridden by subclasses.
.. versionadded:: 3.1
.. versionchanged:: 4.0
This method is now always called, instead of only when
partial results are requested.
"""
stat_result = self._stat()
return stat_result[stat.ST_SIZE]
def get_modified_time(self):
"""Returns the time that ``self.absolute_path`` was last modified.
May be overridden in subclasses. Should return a `~datetime.datetime`
object or None.
.. versionadded:: 3.1
"""
stat_result = self._stat()
modified = datetime.datetime.utcfromtimestamp(stat_result[stat.ST_MTIME])
return modified
def get_content_type(self):
"""Returns the ``Content-Type`` header to be used for this request.
.. versionadded:: 3.1
"""
mime_type, encoding = mimetypes.guess_type(self.absolute_path)
return mime_type
def set_extra_headers(self, path):
"""For subclass to add extra headers to the response"""
pass
def get_cache_time(self, path, modified, mime_type):
"""Override to customize cache control behavior.
Return a positive number of seconds to make the result
cacheable for that amount of time or 0 to mark resource as
cacheable for an unspecified amount of time (subject to
browser heuristics).
By default returns cache expiry of 10 years for resources requested
with ``v`` argument.
"""
return self.CACHE_MAX_AGE if "v" in self.request.arguments else 0
@classmethod
def make_static_url(cls, settings, path, include_version=True):
"""Constructs a versioned url for the given path.
This method may be overridden in subclasses (but note that it
is a class method rather than an instance method). Subclasses
are only required to implement the signature
``make_static_url(cls, settings, path)``; other keyword
arguments may be passed through `~RequestHandler.static_url`
but are not standard.
``settings`` is the `Application.settings` dictionary. ``path``
is the static path being requested. The url returned should be
relative to the current host.
``include_version`` determines whether the generated URL should
include the query string containing the version hash of the
file corresponding to the given ``path``.
"""
url = settings.get('static_url_prefix', '/static/') + path
if not include_version:
return url
version_hash = cls.get_version(settings, path)
if not version_hash:
return url
return '%s?v=%s' % (url, version_hash)
def parse_url_path(self, url_path):
"""Converts a static URL path into a filesystem path.
``url_path`` is the path component of the URL with
``static_url_prefix`` removed. The return value should be
filesystem path relative to ``static_path``.
This is the inverse of `make_static_url`.
"""
if os.path.sep != "/":
url_path = url_path.replace("/", os.path.sep)
return url_path
@classmethod
def get_version(cls, settings, path):
"""Generate the version string to be used in static URLs.
``settings`` is the `Application.settings` dictionary and ``path``
is the relative location of the requested asset on the filesystem.
The returned value should be a string, or ``None`` if no version
could be determined.
.. versionchanged:: 3.1
This method was previously recommended for subclasses to override;
`get_content_version` is now preferred as it allows the base
class to handle caching of the result.
"""
abs_path = cls.get_absolute_path(settings['static_path'], path)
return cls._get_cached_version(abs_path)
@classmethod
def _get_cached_version(cls, abs_path):
with cls._lock:
hashes = cls._static_hashes
if abs_path not in hashes:
try:
hashes[abs_path] = cls.get_content_version(abs_path)
except Exception:
gen_log.error("Could not open static file %r", abs_path)
hashes[abs_path] = None
hsh = hashes.get(abs_path)
if hsh:
return hsh
return None
class FallbackHandler(RequestHandler):
"""A `RequestHandler` that wraps another HTTP server callback.
The fallback is a callable object that accepts an
`~.httputil.HTTPServerRequest`, such as an `Application` or
`tornado.wsgi.WSGIContainer`. This is most useful to use both
Tornado ``RequestHandlers`` and WSGI in the same server. Typical
usage::
wsgi_app = tornado.wsgi.WSGIContainer(
django.core.handlers.wsgi.WSGIHandler())
application = tornado.web.Application([
(r"/foo", FooHandler),
(r".*", FallbackHandler, dict(fallback=wsgi_app),
])
"""
def initialize(self, fallback):
self.fallback = fallback
def prepare(self):
self.fallback(self.request)
self._finished = True
class OutputTransform(object):
"""A transform modifies the result of an HTTP request (e.g., GZip encoding)
Applications are not expected to create their own OutputTransforms
or interact with them directly; the framework chooses which transforms
(if any) to apply.
"""
def __init__(self, request):
pass
def transform_first_chunk(self, status_code, headers, chunk, finishing):
return status_code, headers, chunk
def transform_chunk(self, chunk, finishing):
return chunk
class GZipContentEncoding(OutputTransform):
"""Applies the gzip content encoding to the response.
See http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html#sec14.11
.. versionchanged:: 4.0
Now compresses all mime types beginning with ``text/``, instead
of just a whitelist. (the whitelist is still used for certain
non-text mime types).
"""
# Whitelist of compressible mime types (in addition to any types
# beginning with "text/").
CONTENT_TYPES = set(["application/javascript", "application/x-javascript",
"application/xml", "application/atom+xml",
"application/json", "application/xhtml+xml"])
MIN_LENGTH = 5
def __init__(self, request):
self._gzipping = "gzip" in request.headers.get("Accept-Encoding", "")
def _compressible_type(self, ctype):
return ctype.startswith('text/') or ctype in self.CONTENT_TYPES
def transform_first_chunk(self, status_code, headers, chunk, finishing):
if 'Vary' in headers:
headers['Vary'] += b', Accept-Encoding'
else:
headers['Vary'] = b'Accept-Encoding'
if self._gzipping:
ctype = _unicode(headers.get("Content-Type", "")).split(";")[0]
self._gzipping = self._compressible_type(ctype) and \
(not finishing or len(chunk) >= self.MIN_LENGTH) and \
("Content-Encoding" not in headers)
if self._gzipping:
headers["Content-Encoding"] = "gzip"
self._gzip_value = BytesIO()
self._gzip_file = gzip.GzipFile(mode="w", fileobj=self._gzip_value)
chunk = self.transform_chunk(chunk, finishing)
if "Content-Length" in headers:
# The original content length is no longer correct.
# If this is the last (and only) chunk, we can set the new
# content-length; otherwise we remove it and fall back to
# chunked encoding.
if finishing:
headers["Content-Length"] = str(len(chunk))
else:
del headers["Content-Length"]
return status_code, headers, chunk
def transform_chunk(self, chunk, finishing):
if self._gzipping:
self._gzip_file.write(chunk)
if finishing:
self._gzip_file.close()
else:
self._gzip_file.flush()
chunk = self._gzip_value.getvalue()
self._gzip_value.truncate(0)
self._gzip_value.seek(0)
return chunk
def authenticated(method):
"""Decorate methods with this to require that the user be logged in.
If the user is not logged in, they will be redirected to the configured
`login url <RequestHandler.get_login_url>`.
If you configure a login url with a query parameter, Tornado will
assume you know what you're doing and use it as-is. If not, it
will add a `next` parameter so the login page knows where to send
you once you're logged in.
"""
@functools.wraps(method)
def wrapper(self, *args, **kwargs):
if not self.current_user:
if self.request.method in ("GET", "HEAD"):
url = self.get_login_url()
if "?" not in url:
if urlparse.urlsplit(url).scheme:
# if login url is absolute, make next absolute too
next_url = self.request.full_url()
else:
next_url = self.request.uri
url += "?" + urlencode(dict(next=next_url))
self.redirect(url)
return
raise HTTPError(403)
return method(self, *args, **kwargs)
return wrapper
class UIModule(object):
"""A re-usable, modular UI unit on a page.
UI modules often execute additional queries, and they can include
additional CSS and JavaScript that will be included in the output
page, which is automatically inserted on page render.
Subclasses of UIModule must override the `render` method.
"""
def __init__(self, handler):
self.handler = handler
self.request = handler.request
self.ui = handler.ui
self.locale = handler.locale
@property
def current_user(self):
return self.handler.current_user
def render(self, *args, **kwargs):
"""Override in subclasses to return this module's output."""
raise NotImplementedError()
def embedded_javascript(self):
"""Override to return a JavaScript string to be embedded in the page."""
return None
def javascript_files(self):
"""Override to return a list of JavaScript files needed by this module.
If the return values are relative paths, they will be passed to
`RequestHandler.static_url`; otherwise they will be used as-is.
"""
return None
def embedded_css(self):
"""Override to return a CSS string that will be embedded in the page."""
return None
def css_files(self):
"""Override to returns a list of CSS files required by this module.
If the return values are relative paths, they will be passed to
`RequestHandler.static_url`; otherwise they will be used as-is.
"""
return None
def html_head(self):
"""Override to return an HTML string that will be put in the <head/>
element.
"""
return None
def html_body(self):
"""Override to return an HTML string that will be put at the end of
the <body/> element.
"""
return None
def render_string(self, path, **kwargs):
"""Renders a template and returns it as a string."""
return self.handler.render_string(path, **kwargs)
class _linkify(UIModule):
def render(self, text, **kwargs):
return escape.linkify(text, **kwargs)
class _xsrf_form_html(UIModule):
def render(self):
return self.handler.xsrf_form_html()
class TemplateModule(UIModule):
"""UIModule that simply renders the given template.
{% module Template("foo.html") %} is similar to {% include "foo.html" %},
but the module version gets its own namespace (with kwargs passed to
Template()) instead of inheriting the outer template's namespace.
Templates rendered through this module also get access to UIModule's
automatic javascript/css features. Simply call set_resources
inside the template and give it keyword arguments corresponding to
the methods on UIModule: {{ set_resources(js_files=static_url("my.js")) }}
Note that these resources are output once per template file, not once
per instantiation of the template, so they must not depend on
any arguments to the template.
"""
def __init__(self, handler):
super(TemplateModule, self).__init__(handler)
# keep resources in both a list and a dict to preserve order
self._resource_list = []
self._resource_dict = {}
def render(self, path, **kwargs):
def set_resources(**kwargs):
if path not in self._resource_dict:
self._resource_list.append(kwargs)
self._resource_dict[path] = kwargs
else:
if self._resource_dict[path] != kwargs:
raise ValueError("set_resources called with different "
"resources for the same template")
return ""
return self.render_string(path, set_resources=set_resources,
**kwargs)
def _get_resources(self, key):
return (r[key] for r in self._resource_list if key in r)
def embedded_javascript(self):
return "\n".join(self._get_resources("embedded_javascript"))
def javascript_files(self):
result = []
for f in self._get_resources("javascript_files"):
if isinstance(f, (unicode_type, bytes)):
result.append(f)
else:
result.extend(f)
return result
def embedded_css(self):
return "\n".join(self._get_resources("embedded_css"))
def css_files(self):
result = []
for f in self._get_resources("css_files"):
if isinstance(f, (unicode_type, bytes)):
result.append(f)
else:
result.extend(f)
return result
def html_head(self):
return "".join(self._get_resources("html_head"))
def html_body(self):
return "".join(self._get_resources("html_body"))
class _UIModuleNamespace(object):
"""Lazy namespace which creates UIModule proxies bound to a handler."""
def __init__(self, handler, ui_modules):
self.handler = handler
self.ui_modules = ui_modules
def __getitem__(self, key):
return self.handler._ui_module(key, self.ui_modules[key])
def __getattr__(self, key):
try:
return self[key]
except KeyError as e:
raise AttributeError(str(e))
class URLSpec(object):
"""Specifies mappings between URLs and handlers."""
def __init__(self, pattern, handler, kwargs=None, name=None):
"""Parameters:
* ``pattern``: Regular expression to be matched. Any groups
in the regex will be passed in to the handler's get/post/etc
methods as arguments.
* ``handler``: `RequestHandler` subclass to be invoked.
* ``kwargs`` (optional): A dictionary of additional arguments
to be passed to the handler's constructor.
* ``name`` (optional): A name for this handler. Used by
`Application.reverse_url`.
"""
if not pattern.endswith('$'):
pattern += '$'
self.regex = re.compile(pattern)
assert len(self.regex.groupindex) in (0, self.regex.groups), \
("groups in url regexes must either be all named or all "
"positional: %r" % self.regex.pattern)
if isinstance(handler, str):
# import the Module and instantiate the class
# Must be a fully qualified name (module.ClassName)
handler = import_object(handler)
self.handler_class = handler
self.kwargs = kwargs or {}
self.name = name
self._path, self._group_count = self._find_groups()
def __repr__(self):
return '%s(%r, %s, kwargs=%r, name=%r)' % \
(self.__class__.__name__, self.regex.pattern,
self.handler_class, self.kwargs, self.name)
def _find_groups(self):
"""Returns a tuple (reverse string, group count) for a url.
For example: Given the url pattern /([0-9]{4})/([a-z-]+)/, this method
would return ('/%s/%s/', 2).
"""
pattern = self.regex.pattern
if pattern.startswith('^'):
pattern = pattern[1:]
if pattern.endswith('$'):
pattern = pattern[:-1]
if self.regex.groups != pattern.count('('):
# The pattern is too complicated for our simplistic matching,
# so we can't support reversing it.
return (None, None)
pieces = []
for fragment in pattern.split('('):
if ')' in fragment:
paren_loc = fragment.index(')')
if paren_loc >= 0:
pieces.append('%s' + fragment[paren_loc + 1:])
else:
pieces.append(fragment)
return (''.join(pieces), self.regex.groups)
def reverse(self, *args):
assert self._path is not None, \
"Cannot reverse url regex " + self.regex.pattern
assert len(args) == self._group_count, "required number of arguments "\
"not found"
if not len(args):
return self._path
converted_args = []
for a in args:
if not isinstance(a, (unicode_type, bytes)):
a = str(a)
converted_args.append(escape.url_escape(utf8(a), plus=False))
return self._path % tuple(converted_args)
url = URLSpec
if hasattr(hmac, 'compare_digest'): # python 3.3
_time_independent_equals = hmac.compare_digest
else:
def _time_independent_equals(a, b):
if len(a) != len(b):
return False
result = 0
if isinstance(a[0], int): # python3 byte strings
for x, y in zip(a, b):
result |= x ^ y
else: # python2
for x, y in zip(a, b):
result |= ord(x) ^ ord(y)
return result == 0
def create_signed_value(secret, name, value, version=None, clock=None):
if version is None:
version = DEFAULT_SIGNED_VALUE_VERSION
if clock is None:
clock = time.time
timestamp = utf8(str(int(clock())))
value = base64.b64encode(utf8(value))
if version == 1:
signature = _create_signature_v1(secret, name, value, timestamp)
value = b"|".join([value, timestamp, signature])
return value
elif version == 2:
# The v2 format consists of a version number and a series of
# length-prefixed fields "%d:%s", the last of which is a
# signature, all separated by pipes. All numbers are in
# decimal format with no leading zeros. The signature is an
# HMAC-SHA256 of the whole string up to that point, including
# the final pipe.
#
# The fields are:
# - format version (i.e. 2; no length prefix)
# - key version (currently 0; reserved for future key rotation features)
# - timestamp (integer seconds since epoch)
# - name (not encoded; assumed to be ~alphanumeric)
# - value (base64-encoded)
# - signature (hex-encoded; no length prefix)
def format_field(s):
return utf8("%d:" % len(s)) + utf8(s)
to_sign = b"|".join([
b"2|1:0",
format_field(timestamp),
format_field(name),
format_field(value),
b''])
signature = _create_signature_v2(secret, to_sign)
return to_sign + signature
else:
raise ValueError("Unsupported version %d" % version)
# A leading version number in decimal with no leading zeros, followed by a pipe.
_signed_value_version_re = re.compile(br"^([1-9][0-9]*)\|(.*)$")
def decode_signed_value(secret, name, value, max_age_days=31, clock=None, min_version=None):
if clock is None:
clock = time.time
if min_version is None:
min_version = DEFAULT_SIGNED_VALUE_MIN_VERSION
if min_version > 2:
raise ValueError("Unsupported min_version %d" % min_version)
if not value:
return None
# Figure out what version this is. Version 1 did not include an
# explicit version field and started with arbitrary base64 data,
# which makes this tricky.
value = utf8(value)
m = _signed_value_version_re.match(value)
if m is None:
version = 1
else:
try:
version = int(m.group(1))
if version > 999:
# Certain payloads from the version-less v1 format may
# be parsed as valid integers. Due to base64 padding
# restrictions, this can only happen for numbers whose
# length is a multiple of 4, so we can treat all
# numbers up to 999 as versions, and for the rest we
# fall back to v1 format.
version = 1
except ValueError:
version = 1
if version < min_version:
return None
if version == 1:
return _decode_signed_value_v1(secret, name, value, max_age_days, clock)
elif version == 2:
return _decode_signed_value_v2(secret, name, value, max_age_days, clock)
else:
return None
def _decode_signed_value_v1(secret, name, value, max_age_days, clock):
parts = utf8(value).split(b"|")
if len(parts) != 3:
return None
signature = _create_signature_v1(secret, name, parts[0], parts[1])
if not _time_independent_equals(parts[2], signature):
gen_log.warning("Invalid cookie signature %r", value)
return None
timestamp = int(parts[1])
if timestamp < clock() - max_age_days * 86400:
gen_log.warning("Expired cookie %r", value)
return None
if timestamp > clock() + 31 * 86400:
# _cookie_signature does not hash a delimiter between the
# parts of the cookie, so an attacker could transfer trailing
# digits from the payload to the timestamp without altering the
# signature. For backwards compatibility, sanity-check timestamp
# here instead of modifying _cookie_signature.
gen_log.warning("Cookie timestamp in future; possible tampering %r", value)
return None
if parts[1].startswith(b"0"):
gen_log.warning("Tampered cookie %r", value)
return None
try:
return base64.b64decode(parts[0])
except Exception:
return None
def _decode_signed_value_v2(secret, name, value, max_age_days, clock):
def _consume_field(s):
length, _, rest = s.partition(b':')
n = int(length)
field_value = rest[:n]
# In python 3, indexing bytes returns small integers; we must
# use a slice to get a byte string as in python 2.
if rest[n:n + 1] != b'|':
raise ValueError("malformed v2 signed value field")
rest = rest[n + 1:]
return field_value, rest
rest = value[2:] # remove version number
try:
key_version, rest = _consume_field(rest)
timestamp, rest = _consume_field(rest)
name_field, rest = _consume_field(rest)
value_field, rest = _consume_field(rest)
except ValueError:
return None
passed_sig = rest
signed_string = value[:-len(passed_sig)]
expected_sig = _create_signature_v2(secret, signed_string)
if not _time_independent_equals(passed_sig, expected_sig):
return None
if name_field != utf8(name):
return None
timestamp = int(timestamp)
if timestamp < clock() - max_age_days * 86400:
# The signature has expired.
return None
try:
return base64.b64decode(value_field)
except Exception:
return None
def _create_signature_v1(secret, *parts):
hash = hmac.new(utf8(secret), digestmod=hashlib.sha1)
for part in parts:
hash.update(utf8(part))
return utf8(hash.hexdigest())
def _create_signature_v2(secret, s):
hash = hmac.new(utf8(secret), digestmod=hashlib.sha256)
hash.update(utf8(s))
return utf8(hash.hexdigest())
def _unquote_or_none(s):
"""None-safe wrapper around url_unescape to handle unamteched optional
groups correctly.
Note that args are passed as bytes so the handler can decide what
encoding to use.
"""
if s is None:
return s
return escape.url_unescape(s, encoding=None, plus=False)
|
ovresko/erpnext
|
refs/heads/master
|
erpnext/restaurant/doctype/restaurant_order_entry/restaurant_order_entry.py
|
24
|
# -*- coding: utf-8 -*-
# Copyright (c) 2017, Frappe Technologies Pvt. Ltd. and contributors
# For license information, please see license.txt
from __future__ import unicode_literals
import frappe, json
from frappe.model.document import Document
from frappe import _
from erpnext.controllers.queries import item_query
class RestaurantOrderEntry(Document):
pass
@frappe.whitelist()
def get_invoice(table):
'''returns the active invoice linked to the given table'''
invoice_name = frappe.get_value('Sales Invoice', dict(restaurant_table = table, docstatus=0))
restaurant, menu_name = get_restaurant_and_menu_name(table)
if invoice_name:
invoice = frappe.get_doc('Sales Invoice', invoice_name)
else:
invoice = frappe.new_doc('Sales Invoice')
invoice.naming_series = frappe.db.get_value('Restaurant', restaurant, 'invoice_series_prefix')
invoice.is_pos = 1
default_customer = frappe.db.get_value('Restaurant', restaurant, 'default_customer')
if not default_customer:
frappe.throw(_('Please set default customer in Restaurant Settings'))
invoice.customer = default_customer
invoice.taxes_and_charges = frappe.db.get_value('Restaurant', restaurant, 'default_tax_template')
invoice.selling_price_list = frappe.db.get_value('Price List', dict(restaurant_menu=menu_name, enabled=1))
return invoice
@frappe.whitelist()
def sync(table, items):
'''Sync the sales order related to the table'''
invoice = get_invoice(table)
items = json.loads(items)
invoice.items = []
invoice.restaurant_table = table
for d in items:
invoice.append('items', dict(
item_code = d.get('item'),
qty = d.get('qty')
))
invoice.save()
return invoice.as_dict()
@frappe.whitelist()
def make_invoice(table, customer, mode_of_payment):
'''Make table based on Sales Order'''
restaurant, menu = get_restaurant_and_menu_name(table)
invoice = get_invoice(table)
invoice.customer = customer
invoice.restaurant = restaurant
invoice.calculate_taxes_and_totals()
invoice.append('payments', dict(mode_of_payment=mode_of_payment, amount=invoice.grand_total))
invoice.save()
invoice.submit()
frappe.msgprint(_('Invoice Created'), indicator='green', alert=True)
return invoice.name
def item_query_restaurant(doctype='Item', txt='', searchfield='name', start=0, page_len=20, filters=None, as_dict=False):
'''Return items that are selected in active menu of the restaurant'''
restaurant, menu = get_restaurant_and_menu_name(filters['table'])
items = frappe.db.get_all('Restaurant Menu Item', ['item'], dict(parent = menu))
del filters['table']
filters['name'] = ('in', [d.item for d in items])
return item_query('Item', txt, searchfield, start, page_len, filters, as_dict)
def get_restaurant_and_menu_name(table):
if not table:
frappe.throw(_('Please select a table'))
restaurant = frappe.db.get_value('Restaurant Table', table, 'restaurant')
menu = frappe.db.get_value('Restaurant', restaurant, 'active_menu')
if not menu:
frappe.throw(_('Please set an active menu for Restaurant {0}').format(restaurant))
return restaurant, menu
|
michalliu/OpenWrt-Firefly-Libraries
|
refs/heads/master
|
staging_dir/host/lib/python2.7/test/test_textwrap.py
|
85
|
#
# Test suite for the textwrap module.
#
# Original tests written by Greg Ward <gward@python.net>.
# Converted to PyUnit by Peter Hansen <peter@engcorp.com>.
# Currently maintained by Greg Ward.
#
# $Id$
#
import unittest
from test import test_support
from textwrap import TextWrapper, wrap, fill, dedent
class BaseTestCase(unittest.TestCase):
'''Parent class with utility methods for textwrap tests.'''
def show(self, textin):
if isinstance(textin, list):
result = []
for i in range(len(textin)):
result.append(" %d: %r" % (i, textin[i]))
result = '\n'.join(result)
elif isinstance(textin, basestring):
result = " %s\n" % repr(textin)
return result
def check(self, result, expect):
self.assertEqual(result, expect,
'expected:\n%s\nbut got:\n%s' % (
self.show(expect), self.show(result)))
def check_wrap(self, text, width, expect, **kwargs):
result = wrap(text, width, **kwargs)
self.check(result, expect)
def check_split(self, text, expect):
result = self.wrapper._split(text)
self.assertEqual(result, expect,
"\nexpected %r\n"
"but got %r" % (expect, result))
class WrapTestCase(BaseTestCase):
def setUp(self):
self.wrapper = TextWrapper(width=45)
def test_simple(self):
# Simple case: just words, spaces, and a bit of punctuation
text = "Hello there, how are you this fine day? I'm glad to hear it!"
self.check_wrap(text, 12,
["Hello there,",
"how are you",
"this fine",
"day? I'm",
"glad to hear",
"it!"])
self.check_wrap(text, 42,
["Hello there, how are you this fine day?",
"I'm glad to hear it!"])
self.check_wrap(text, 80, [text])
def test_empty_string(self):
# Check that wrapping the empty string returns an empty list.
self.check_wrap("", 6, [])
self.check_wrap("", 6, [], drop_whitespace=False)
def test_empty_string_with_initial_indent(self):
# Check that the empty string is not indented.
self.check_wrap("", 6, [], initial_indent="++")
self.check_wrap("", 6, [], initial_indent="++", drop_whitespace=False)
def test_whitespace(self):
# Whitespace munging and end-of-sentence detection
text = """\
This is a paragraph that already has
line breaks. But some of its lines are much longer than the others,
so it needs to be wrapped.
Some lines are \ttabbed too.
What a mess!
"""
expect = ["This is a paragraph that already has line",
"breaks. But some of its lines are much",
"longer than the others, so it needs to be",
"wrapped. Some lines are tabbed too. What a",
"mess!"]
wrapper = TextWrapper(45, fix_sentence_endings=True)
result = wrapper.wrap(text)
self.check(result, expect)
result = wrapper.fill(text)
self.check(result, '\n'.join(expect))
def test_fix_sentence_endings(self):
wrapper = TextWrapper(60, fix_sentence_endings=True)
# SF #847346: ensure that fix_sentence_endings=True does the
# right thing even on input short enough that it doesn't need to
# be wrapped.
text = "A short line. Note the single space."
expect = ["A short line. Note the single space."]
self.check(wrapper.wrap(text), expect)
# Test some of the hairy end cases that _fix_sentence_endings()
# is supposed to handle (the easy stuff is tested in
# test_whitespace() above).
text = "Well, Doctor? What do you think?"
expect = ["Well, Doctor? What do you think?"]
self.check(wrapper.wrap(text), expect)
text = "Well, Doctor?\nWhat do you think?"
self.check(wrapper.wrap(text), expect)
text = 'I say, chaps! Anyone for "tennis?"\nHmmph!'
expect = ['I say, chaps! Anyone for "tennis?" Hmmph!']
self.check(wrapper.wrap(text), expect)
wrapper.width = 20
expect = ['I say, chaps!', 'Anyone for "tennis?"', 'Hmmph!']
self.check(wrapper.wrap(text), expect)
text = 'And she said, "Go to hell!"\nCan you believe that?'
expect = ['And she said, "Go to',
'hell!" Can you',
'believe that?']
self.check(wrapper.wrap(text), expect)
wrapper.width = 60
expect = ['And she said, "Go to hell!" Can you believe that?']
self.check(wrapper.wrap(text), expect)
text = 'File stdio.h is nice.'
expect = ['File stdio.h is nice.']
self.check(wrapper.wrap(text), expect)
def test_wrap_short(self):
# Wrapping to make short lines longer
text = "This is a\nshort paragraph."
self.check_wrap(text, 20, ["This is a short",
"paragraph."])
self.check_wrap(text, 40, ["This is a short paragraph."])
def test_wrap_short_1line(self):
# Test endcases
text = "This is a short line."
self.check_wrap(text, 30, ["This is a short line."])
self.check_wrap(text, 30, ["(1) This is a short line."],
initial_indent="(1) ")
def test_hyphenated(self):
# Test breaking hyphenated words
text = ("this-is-a-useful-feature-for-"
"reformatting-posts-from-tim-peters'ly")
self.check_wrap(text, 40,
["this-is-a-useful-feature-for-",
"reformatting-posts-from-tim-peters'ly"])
self.check_wrap(text, 41,
["this-is-a-useful-feature-for-",
"reformatting-posts-from-tim-peters'ly"])
self.check_wrap(text, 42,
["this-is-a-useful-feature-for-reformatting-",
"posts-from-tim-peters'ly"])
def test_hyphenated_numbers(self):
# Test that hyphenated numbers (eg. dates) are not broken like words.
text = ("Python 1.0.0 was released on 1994-01-26. Python 1.0.1 was\n"
"released on 1994-02-15.")
self.check_wrap(text, 35, ['Python 1.0.0 was released on',
'1994-01-26. Python 1.0.1 was',
'released on 1994-02-15.'])
self.check_wrap(text, 40, ['Python 1.0.0 was released on 1994-01-26.',
'Python 1.0.1 was released on 1994-02-15.'])
text = "I do all my shopping at 7-11."
self.check_wrap(text, 25, ["I do all my shopping at",
"7-11."])
self.check_wrap(text, 27, ["I do all my shopping at",
"7-11."])
self.check_wrap(text, 29, ["I do all my shopping at 7-11."])
def test_em_dash(self):
# Test text with em-dashes
text = "Em-dashes should be written -- thus."
self.check_wrap(text, 25,
["Em-dashes should be",
"written -- thus."])
# Probe the boundaries of the properly written em-dash,
# ie. " -- ".
self.check_wrap(text, 29,
["Em-dashes should be written",
"-- thus."])
expect = ["Em-dashes should be written --",
"thus."]
self.check_wrap(text, 30, expect)
self.check_wrap(text, 35, expect)
self.check_wrap(text, 36,
["Em-dashes should be written -- thus."])
# The improperly written em-dash is handled too, because
# it's adjacent to non-whitespace on both sides.
text = "You can also do--this or even---this."
expect = ["You can also do",
"--this or even",
"---this."]
self.check_wrap(text, 15, expect)
self.check_wrap(text, 16, expect)
expect = ["You can also do--",
"this or even---",
"this."]
self.check_wrap(text, 17, expect)
self.check_wrap(text, 19, expect)
expect = ["You can also do--this or even",
"---this."]
self.check_wrap(text, 29, expect)
self.check_wrap(text, 31, expect)
expect = ["You can also do--this or even---",
"this."]
self.check_wrap(text, 32, expect)
self.check_wrap(text, 35, expect)
# All of the above behaviour could be deduced by probing the
# _split() method.
text = "Here's an -- em-dash and--here's another---and another!"
expect = ["Here's", " ", "an", " ", "--", " ", "em-", "dash", " ",
"and", "--", "here's", " ", "another", "---",
"and", " ", "another!"]
self.check_split(text, expect)
text = "and then--bam!--he was gone"
expect = ["and", " ", "then", "--", "bam!", "--",
"he", " ", "was", " ", "gone"]
self.check_split(text, expect)
def test_unix_options (self):
# Test that Unix-style command-line options are wrapped correctly.
# Both Optik (OptionParser) and Docutils rely on this behaviour!
text = "You should use the -n option, or --dry-run in its long form."
self.check_wrap(text, 20,
["You should use the",
"-n option, or --dry-",
"run in its long",
"form."])
self.check_wrap(text, 21,
["You should use the -n",
"option, or --dry-run",
"in its long form."])
expect = ["You should use the -n option, or",
"--dry-run in its long form."]
self.check_wrap(text, 32, expect)
self.check_wrap(text, 34, expect)
self.check_wrap(text, 35, expect)
self.check_wrap(text, 38, expect)
expect = ["You should use the -n option, or --dry-",
"run in its long form."]
self.check_wrap(text, 39, expect)
self.check_wrap(text, 41, expect)
expect = ["You should use the -n option, or --dry-run",
"in its long form."]
self.check_wrap(text, 42, expect)
# Again, all of the above can be deduced from _split().
text = "the -n option, or --dry-run or --dryrun"
expect = ["the", " ", "-n", " ", "option,", " ", "or", " ",
"--dry-", "run", " ", "or", " ", "--dryrun"]
self.check_split(text, expect)
def test_funky_hyphens (self):
# Screwy edge cases cooked up by David Goodger. All reported
# in SF bug #596434.
self.check_split("what the--hey!", ["what", " ", "the", "--", "hey!"])
self.check_split("what the--", ["what", " ", "the--"])
self.check_split("what the--.", ["what", " ", "the--."])
self.check_split("--text--.", ["--text--."])
# When I first read bug #596434, this is what I thought David
# was talking about. I was wrong; these have always worked
# fine. The real problem is tested in test_funky_parens()
# below...
self.check_split("--option", ["--option"])
self.check_split("--option-opt", ["--option-", "opt"])
self.check_split("foo --option-opt bar",
["foo", " ", "--option-", "opt", " ", "bar"])
def test_punct_hyphens(self):
# Oh bother, SF #965425 found another problem with hyphens --
# hyphenated words in single quotes weren't handled correctly.
# In fact, the bug is that *any* punctuation around a hyphenated
# word was handled incorrectly, except for a leading "--", which
# was special-cased for Optik and Docutils. So test a variety
# of styles of punctuation around a hyphenated word.
# (Actually this is based on an Optik bug report, #813077).
self.check_split("the 'wibble-wobble' widget",
['the', ' ', "'wibble-", "wobble'", ' ', 'widget'])
self.check_split('the "wibble-wobble" widget',
['the', ' ', '"wibble-', 'wobble"', ' ', 'widget'])
self.check_split("the (wibble-wobble) widget",
['the', ' ', "(wibble-", "wobble)", ' ', 'widget'])
self.check_split("the ['wibble-wobble'] widget",
['the', ' ', "['wibble-", "wobble']", ' ', 'widget'])
def test_funky_parens (self):
# Second part of SF bug #596434: long option strings inside
# parentheses.
self.check_split("foo (--option) bar",
["foo", " ", "(--option)", " ", "bar"])
# Related stuff -- make sure parens work in simpler contexts.
self.check_split("foo (bar) baz",
["foo", " ", "(bar)", " ", "baz"])
self.check_split("blah (ding dong), wubba",
["blah", " ", "(ding", " ", "dong),",
" ", "wubba"])
def test_drop_whitespace_false(self):
# Check that drop_whitespace=False preserves whitespace.
# SF patch #1581073
text = " This is a sentence with much whitespace."
self.check_wrap(text, 10,
[" This is a", " ", "sentence ",
"with ", "much white", "space."],
drop_whitespace=False)
def test_drop_whitespace_false_whitespace_only(self):
# Check that drop_whitespace=False preserves a whitespace-only string.
self.check_wrap(" ", 6, [" "], drop_whitespace=False)
def test_drop_whitespace_false_whitespace_only_with_indent(self):
# Check that a whitespace-only string gets indented (when
# drop_whitespace is False).
self.check_wrap(" ", 6, [" "], drop_whitespace=False,
initial_indent=" ")
def test_drop_whitespace_whitespace_only(self):
# Check drop_whitespace on a whitespace-only string.
self.check_wrap(" ", 6, [])
def test_drop_whitespace_leading_whitespace(self):
# Check that drop_whitespace does not drop leading whitespace (if
# followed by non-whitespace).
# SF bug #622849 reported inconsistent handling of leading
# whitespace; let's test that a bit, shall we?
text = " This is a sentence with leading whitespace."
self.check_wrap(text, 50,
[" This is a sentence with leading whitespace."])
self.check_wrap(text, 30,
[" This is a sentence with", "leading whitespace."])
def test_drop_whitespace_whitespace_line(self):
# Check that drop_whitespace skips the whole line if a non-leading
# line consists only of whitespace.
text = "abcd efgh"
# Include the result for drop_whitespace=False for comparison.
self.check_wrap(text, 6, ["abcd", " ", "efgh"],
drop_whitespace=False)
self.check_wrap(text, 6, ["abcd", "efgh"])
def test_drop_whitespace_whitespace_only_with_indent(self):
# Check that initial_indent is not applied to a whitespace-only
# string. This checks a special case of the fact that dropping
# whitespace occurs before indenting.
self.check_wrap(" ", 6, [], initial_indent="++")
def test_drop_whitespace_whitespace_indent(self):
# Check that drop_whitespace does not drop whitespace indents.
# This checks a special case of the fact that dropping whitespace
# occurs before indenting.
self.check_wrap("abcd efgh", 6, [" abcd", " efgh"],
initial_indent=" ", subsequent_indent=" ")
if test_support.have_unicode:
def test_unicode(self):
# *Very* simple test of wrapping Unicode strings. I'm sure
# there's more to it than this, but let's at least make
# sure textwrap doesn't crash on Unicode input!
text = u"Hello there, how are you today?"
self.check_wrap(text, 50, [u"Hello there, how are you today?"])
self.check_wrap(text, 20, [u"Hello there, how are", "you today?"])
olines = self.wrapper.wrap(text)
self.assertIsInstance(olines, list)
self.assertIsInstance(olines[0], unicode)
otext = self.wrapper.fill(text)
self.assertIsInstance(otext, unicode)
def test_no_split_at_umlaut(self):
text = u"Die Empf\xe4nger-Auswahl"
self.check_wrap(text, 13, [u"Die", u"Empf\xe4nger-", u"Auswahl"])
def test_umlaut_followed_by_dash(self):
text = u"aa \xe4\xe4-\xe4\xe4"
self.check_wrap(text, 7, [u"aa \xe4\xe4-", u"\xe4\xe4"])
def test_split(self):
# Ensure that the standard _split() method works as advertised
# in the comments
text = "Hello there -- you goof-ball, use the -b option!"
result = self.wrapper._split(text)
self.check(result,
["Hello", " ", "there", " ", "--", " ", "you", " ", "goof-",
"ball,", " ", "use", " ", "the", " ", "-b", " ", "option!"])
def test_break_on_hyphens(self):
# Ensure that the break_on_hyphens attributes work
text = "yaba daba-doo"
self.check_wrap(text, 10, ["yaba daba-", "doo"],
break_on_hyphens=True)
self.check_wrap(text, 10, ["yaba", "daba-doo"],
break_on_hyphens=False)
def test_bad_width(self):
# Ensure that width <= 0 is caught.
text = "Whatever, it doesn't matter."
self.assertRaises(ValueError, wrap, text, 0)
self.assertRaises(ValueError, wrap, text, -1)
class LongWordTestCase (BaseTestCase):
def setUp(self):
self.wrapper = TextWrapper()
self.text = '''\
Did you say "supercalifragilisticexpialidocious?"
How *do* you spell that odd word, anyways?
'''
def test_break_long(self):
# Wrap text with long words and lots of punctuation
self.check_wrap(self.text, 30,
['Did you say "supercalifragilis',
'ticexpialidocious?" How *do*',
'you spell that odd word,',
'anyways?'])
self.check_wrap(self.text, 50,
['Did you say "supercalifragilisticexpialidocious?"',
'How *do* you spell that odd word, anyways?'])
# SF bug 797650. Prevent an infinite loop by making sure that at
# least one character gets split off on every pass.
self.check_wrap('-'*10+'hello', 10,
['----------',
' h',
' e',
' l',
' l',
' o'],
subsequent_indent = ' '*15)
# bug 1146. Prevent a long word to be wrongly wrapped when the
# preceding word is exactly one character shorter than the width
self.check_wrap(self.text, 12,
['Did you say ',
'"supercalifr',
'agilisticexp',
'ialidocious?',
'" How *do*',
'you spell',
'that odd',
'word,',
'anyways?'])
def test_nobreak_long(self):
# Test with break_long_words disabled
self.wrapper.break_long_words = 0
self.wrapper.width = 30
expect = ['Did you say',
'"supercalifragilisticexpialidocious?"',
'How *do* you spell that odd',
'word, anyways?'
]
result = self.wrapper.wrap(self.text)
self.check(result, expect)
# Same thing with kwargs passed to standalone wrap() function.
result = wrap(self.text, width=30, break_long_words=0)
self.check(result, expect)
class IndentTestCases(BaseTestCase):
# called before each test method
def setUp(self):
self.text = '''\
This paragraph will be filled, first without any indentation,
and then with some (including a hanging indent).'''
def test_fill(self):
# Test the fill() method
expect = '''\
This paragraph will be filled, first
without any indentation, and then with
some (including a hanging indent).'''
result = fill(self.text, 40)
self.check(result, expect)
def test_initial_indent(self):
# Test initial_indent parameter
expect = [" This paragraph will be filled,",
"first without any indentation, and then",
"with some (including a hanging indent)."]
result = wrap(self.text, 40, initial_indent=" ")
self.check(result, expect)
expect = "\n".join(expect)
result = fill(self.text, 40, initial_indent=" ")
self.check(result, expect)
def test_subsequent_indent(self):
# Test subsequent_indent parameter
expect = '''\
* This paragraph will be filled, first
without any indentation, and then
with some (including a hanging
indent).'''
result = fill(self.text, 40,
initial_indent=" * ", subsequent_indent=" ")
self.check(result, expect)
# Despite the similar names, DedentTestCase is *not* the inverse
# of IndentTestCase!
class DedentTestCase(unittest.TestCase):
def assertUnchanged(self, text):
"""assert that dedent() has no effect on 'text'"""
self.assertEqual(text, dedent(text))
def test_dedent_nomargin(self):
# No lines indented.
text = "Hello there.\nHow are you?\nOh good, I'm glad."
self.assertUnchanged(text)
# Similar, with a blank line.
text = "Hello there.\n\nBoo!"
self.assertUnchanged(text)
# Some lines indented, but overall margin is still zero.
text = "Hello there.\n This is indented."
self.assertUnchanged(text)
# Again, add a blank line.
text = "Hello there.\n\n Boo!\n"
self.assertUnchanged(text)
def test_dedent_even(self):
# All lines indented by two spaces.
text = " Hello there.\n How are ya?\n Oh good."
expect = "Hello there.\nHow are ya?\nOh good."
self.assertEqual(expect, dedent(text))
# Same, with blank lines.
text = " Hello there.\n\n How are ya?\n Oh good.\n"
expect = "Hello there.\n\nHow are ya?\nOh good.\n"
self.assertEqual(expect, dedent(text))
# Now indent one of the blank lines.
text = " Hello there.\n \n How are ya?\n Oh good.\n"
expect = "Hello there.\n\nHow are ya?\nOh good.\n"
self.assertEqual(expect, dedent(text))
def test_dedent_uneven(self):
# Lines indented unevenly.
text = '''\
def foo():
while 1:
return foo
'''
expect = '''\
def foo():
while 1:
return foo
'''
self.assertEqual(expect, dedent(text))
# Uneven indentation with a blank line.
text = " Foo\n Bar\n\n Baz\n"
expect = "Foo\n Bar\n\n Baz\n"
self.assertEqual(expect, dedent(text))
# Uneven indentation with a whitespace-only line.
text = " Foo\n Bar\n \n Baz\n"
expect = "Foo\n Bar\n\n Baz\n"
self.assertEqual(expect, dedent(text))
# dedent() should not mangle internal tabs
def test_dedent_preserve_internal_tabs(self):
text = " hello\tthere\n how are\tyou?"
expect = "hello\tthere\nhow are\tyou?"
self.assertEqual(expect, dedent(text))
# make sure that it preserves tabs when it's not making any
# changes at all
self.assertEqual(expect, dedent(expect))
# dedent() should not mangle tabs in the margin (i.e.
# tabs and spaces both count as margin, but are *not*
# considered equivalent)
def test_dedent_preserve_margin_tabs(self):
text = " hello there\n\thow are you?"
self.assertUnchanged(text)
# same effect even if we have 8 spaces
text = " hello there\n\thow are you?"
self.assertUnchanged(text)
# dedent() only removes whitespace that can be uniformly removed!
text = "\thello there\n\thow are you?"
expect = "hello there\nhow are you?"
self.assertEqual(expect, dedent(text))
text = " \thello there\n \thow are you?"
self.assertEqual(expect, dedent(text))
text = " \t hello there\n \t how are you?"
self.assertEqual(expect, dedent(text))
text = " \thello there\n \t how are you?"
expect = "hello there\n how are you?"
self.assertEqual(expect, dedent(text))
def test_main():
test_support.run_unittest(WrapTestCase,
LongWordTestCase,
IndentTestCases,
DedentTestCase)
if __name__ == '__main__':
test_main()
|
manasapte/pants
|
refs/heads/master
|
contrib/android/tests/python/pants_test/contrib/android/test_android_manifest_parser.py
|
14
|
# coding=utf-8
# Copyright 2015 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import (absolute_import, division, generators, nested_scopes, print_function,
unicode_literals, with_statement)
from pants_test.util.xml_test_base import XmlTestBase
from pants.contrib.android.android_manifest_parser import AndroidManifest, AndroidManifestParser
class TestAndroidManifestParser(XmlTestBase):
"""Test the AndroidManifestParser and AndroidManifest classes."""
# Test AndroidManifestParser.parse_manifest().
def test_parse_manifest(self):
with self.xml_file() as xml:
manifest = AndroidManifestParser.parse_manifest(xml)
self.assertEqual(manifest.path, xml)
def test_bad_parse_manifest(self):
xml = '/no/file/here'
with self.assertRaises(AndroidManifestParser.BadManifestError):
AndroidManifestParser.parse_manifest(xml)
# Test AndroidManifest.package_name.
def test_package_name(self):
with self.xml_file() as xml:
manifest = AndroidManifestParser.parse_manifest(xml)
self.assertEqual(manifest.package_name, 'org.pantsbuild.example.hello')
def test_missing_manifest_element(self):
with self.xml_file(manifest_element='some_other_element') as xml:
with self.assertRaises(AndroidManifestParser.BadManifestError):
AndroidManifestParser.parse_manifest(xml)
def test_missing_package_attribute(self):
with self.xml_file(package_attribute='bad_value') as xml:
with self.assertRaises(AndroidManifestParser.BadManifestError):
AndroidManifestParser.parse_manifest(xml)
def test_weird_package_name(self):
# Should accept unexpected package names, the info gets verified in classes that consume it.
with self.xml_file(package_value='cola') as xml:
manifest = AndroidManifestParser.parse_manifest(xml)
self.assertEqual(manifest.package_name, 'cola')
# Test AndroidManifest.target_sdk.
def test_target_sdk(self):
with self.xml_file() as xml:
manifest = AndroidManifestParser.parse_manifest(xml)
self.assertEqual(manifest.target_sdk, '19')
# These next tests show AndroidManifest.target_sdk fails silently and returns None.
def test_no_uses_sdk_element(self):
with self.xml_file(uses_sdk_element='something-random') as xml:
manifest = AndroidManifestParser.parse_manifest(xml)
self.assertIsNone(manifest.target_sdk)
def test_no_target_sdk_value(self):
with self.xml_file(android_attribute='android:bad_value') as xml:
parsed = AndroidManifestParser.parse_manifest(xml)
self.assertIsNone(parsed.target_sdk)
def test_no_android_part(self):
with self.xml_file(android_attribute='unrelated:targetSdkVersion') as xml:
manifest = AndroidManifestParser.parse_manifest(xml)
self.assertEqual(manifest.package_name, 'org.pantsbuild.example.hello')
def test_missing_whole_targetsdk(self):
with self.xml_file(android_attribute='unrelated:cola') as xml:
manifest = AndroidManifestParser.parse_manifest(xml)
self.assertIsNone(manifest.target_sdk)
# Test AndroidManifest().
def test_android_manifest(self):
with self.xml_file() as xml:
test = AndroidManifest(xml, '19', 'com.foo.bar')
self.assertEqual(test.path, xml)
|
Ziemin/telepathy-gabble
|
refs/heads/untested-otr
|
tests/twisted/tubes/accept-muc-stream-tube.py
|
2
|
"""Test IBB stream tube support in the context of a MUC."""
import sys
import dbus
from servicetest import call_async, EventPattern, assertEquals, assertSameSets
from gabbletest import acknowledge_iq, make_muc_presence, send_error_reply, disconnect_conn
import constants as cs
import ns
import tubetestutil as t
from bytestream import create_from_si_offer, announce_socks5_proxy, BytestreamS5BRelay, BytestreamS5BRelayBugged
from twisted.words.xish import xpath
sample_parameters = dbus.Dictionary({
's': 'hello',
'ay': dbus.ByteArray('hello'),
'u': dbus.UInt32(123),
'i': dbus.Int32(-123),
}, signature='sv')
def test(q, bus, conn, stream, bytestream_cls,
address_type, access_control, access_control_param):
if bytestream_cls in [BytestreamS5BRelay, BytestreamS5BRelayBugged]:
# disable SOCKS5 relay tests because proxy can't be used with muc
# contacts atm
return
if access_control == cs.SOCKET_ACCESS_CONTROL_CREDENTIALS:
print "Skip Socket_Access_Control_Credentials (fdo #45445)"
return
iq_event, disco_event = q.expect_many(
EventPattern('stream-iq', to=None, query_ns='vcard-temp',
query_name='vCard'),
EventPattern('stream-iq', to='localhost', query_ns=ns.DISCO_ITEMS))
acknowledge_iq(stream, iq_event.stanza)
announce_socks5_proxy(q, stream, disco_event.stanza)
# join the muc
call_async(q, conn.Requests, 'CreateChannel', {
cs.CHANNEL_TYPE: cs.CHANNEL_TYPE_TEXT,
cs.TARGET_HANDLE_TYPE: cs.HT_ROOM,
cs.TARGET_ID: 'chat@conf.localhost'})
q.expect_many(
EventPattern('dbus-signal', signal='MembersChangedDetailed',
predicate=lambda e:
e.args[0] == [] and # added
e.args[1] == [] and # removed
e.args[2] == [] and # local pending
len(e.args[3]) == 1 and # remote pending
e.args[4].get('actor', 0) == 0 and
e.args[4].get('change-reason', 0) == 0 and
e.args[4]['contact-ids'][e.args[3][0]] == 'chat@conf.localhost/test'),
EventPattern('stream-presence', to='chat@conf.localhost/test'))
# Send presence for other member of room.
stream.send(make_muc_presence('owner', 'moderator', 'chat@conf.localhost', 'bob'))
# Send presence for own membership of room.
stream.send(make_muc_presence('none', 'participant', 'chat@conf.localhost', 'test'))
event = q.expect('dbus-signal', signal='MembersChangedDetailed',
predicate=lambda e:
len(e.args[0]) == 2 and # added
e.args[1] == [] and # removed
e.args[2] == [] and # local pending
e.args[3] == [] and # remote pending
e.args[4].get('actor', 0) == 0 and
e.args[4].get('change-reason', 0) == 0 and
set([e.args[4]['contact-ids'][h] for h in e.args[0]]) ==
set(['chat@conf.localhost/test', 'chat@conf.localhost/bob']))
for h in event.args[0]:
if event.args[4]['contact-ids'][h] == 'chat@conf.localhost/bob':
bob_handle = h
event = q.expect('dbus-return', method='CreateChannel')
# Bob offers a stream tube
stream_tube_id = 666
presence = make_muc_presence('owner', 'moderator', 'chat@conf.localhost', 'bob')
tubes = presence.addElement((ns.TUBES, 'tubes'))
tube = tubes.addElement((None, 'tube'))
tube['type'] = 'stream'
tube['service'] = 'echo'
tube['id'] = str(stream_tube_id)
parameters = tube.addElement((None, 'parameters'))
parameter = parameters.addElement((None, 'parameter'))
parameter['name'] = 's'
parameter['type'] = 'str'
parameter.addContent('hello')
parameter = parameters.addElement((None, 'parameter'))
parameter['name'] = 'ay'
parameter['type'] = 'bytes'
parameter.addContent('aGVsbG8=')
parameter = parameters.addElement((None, 'parameter'))
parameter['name'] = 'u'
parameter['type'] = 'uint'
parameter.addContent('123')
parameter = parameters.addElement((None, 'parameter'))
parameter['name'] = 'i'
parameter['type'] = 'int'
parameter.addContent('-123')
stream.send(presence)
# text channel
new_event = q.expect('dbus-signal', signal='NewChannels')
channels = new_event.args[0]
assert len(channels) == 1
path, props = channels[0]
assert props[cs.CHANNEL_TYPE] == cs.CHANNEL_TYPE_TEXT
def new_chan_predicate(e):
path, props = e.args[0][0]
return props[cs.CHANNEL_TYPE] == cs.CHANNEL_TYPE_STREAM_TUBE
# tube channel is announced
new_event = q.expect('dbus-signal', signal='NewChannels',
predicate=new_chan_predicate)
channels = new_event.args[0]
assert len(channels) == 1
path, props = channels[0]
assert props[cs.CHANNEL_TYPE] == cs.CHANNEL_TYPE_STREAM_TUBE
assert props[cs.INITIATOR_HANDLE] == bob_handle
assert props[cs.INITIATOR_ID] == 'chat@conf.localhost/bob'
assertSameSets([cs.CHANNEL_IFACE_GROUP, cs.CHANNEL_IFACE_TUBE],
props[cs.INTERFACES])
assert props[cs.REQUESTED] == False
assert props[cs.TARGET_ID] == 'chat@conf.localhost'
assert props[cs.STREAM_TUBE_SERVICE] == 'echo'
assert props[cs.TUBE_PARAMETERS] == {'s': 'hello', 'ay': 'hello', 'u': 123, 'i': -123}
assert access_control in \
props[cs.STREAM_TUBE_SUPPORTED_SOCKET_TYPES][address_type]
tube_chan = bus.get_object(conn.bus_name, path)
tube_props = tube_chan.GetAll(cs.CHANNEL_IFACE_TUBE, dbus_interface=cs.PROPERTIES_IFACE,
byte_arrays=True)
tube_iface = dbus.Interface(tube_chan, cs.CHANNEL_TYPE_STREAM_TUBE)
assert tube_props['Parameters'] == sample_parameters
assert tube_props['State'] == cs.TUBE_CHANNEL_STATE_LOCAL_PENDING
# Accept the tube
call_async(q, tube_iface, 'Accept',
address_type, access_control, access_control_param, byte_arrays=True)
accept_return_event, _ = q.expect_many(
EventPattern('dbus-return', method='Accept'),
EventPattern('dbus-signal', signal='TubeChannelStateChanged',
args=[2]))
address = accept_return_event.value[0]
socket_event, si_event, conn_id = t.connect_to_cm_socket(q, 'chat@conf.localhost/bob',
address_type, address, access_control, access_control_param)
protocol = socket_event.protocol
protocol.sendData("hello initiator")
def accept_tube_si_connection():
bytestream, profile = create_from_si_offer(stream, q, bytestream_cls, si_event.stanza,
'chat@conf.localhost/test')
assert profile == ns.TUBES
muc_stream_node = xpath.queryForNodes('/iq/si/muc-stream[@xmlns="%s"]' %
ns.TUBES, si_event.stanza)[0]
assert muc_stream_node is not None
assert muc_stream_node['tube'] == str(stream_tube_id)
# set the real jid of the target as 'to' because the XMPP server changes
# it when delivering the IQ
result, si = bytestream.create_si_reply(si_event.stanza, 'test@localhost/Resource')
si.addElement((ns.TUBES, 'tube'))
stream.send(result)
bytestream.wait_bytestream_open()
return bytestream
bytestream = accept_tube_si_connection()
binary = bytestream.get_data()
assert binary == 'hello initiator'
# reply on the socket
bytestream.send_data('hi joiner!')
q.expect('socket-data', protocol=protocol, data="hi joiner!")
# peer closes the bytestream
bytestream.close()
e = q.expect('dbus-signal', signal='ConnectionClosed')
assertEquals(conn_id, e.args[0])
assertEquals(cs.CONNECTION_LOST, e.args[1])
# establish another tube connection
socket_event, si_event, conn_id = t.connect_to_cm_socket(q, 'chat@conf.localhost/bob',
address_type, address, access_control, access_control_param)
# bytestream is refused
send_error_reply(stream, si_event.stanza)
e, _ = q.expect_many(
EventPattern('dbus-signal', signal='ConnectionClosed'),
EventPattern('socket-disconnected'))
assertEquals(conn_id, e.args[0])
assertEquals(cs.CONNECTION_REFUSED, e.args[1])
# establish another tube connection
socket_event, si_event, conn_id = t.connect_to_cm_socket(q, 'chat@conf.localhost/bob',
address_type, address, access_control, access_control_param)
protocol = socket_event.protocol
bytestream = accept_tube_si_connection()
# disconnect local socket
protocol.transport.loseConnection()
e, _ = q.expect_many(
EventPattern('dbus-signal', signal='ConnectionClosed'),
EventPattern('socket-disconnected'))
assertEquals(conn_id, e.args[0])
assertEquals(cs.CANCELLED, e.args[1])
# OK, we're done
disconnect_conn(q, conn, stream)
if __name__ == '__main__':
t.exec_stream_tube_test(test)
|
handroissuazo/tensorflow
|
refs/heads/master
|
venv/lib/python2.7/site-packages/pip/_vendor/requests/packages/chardet/langgreekmodel.py
|
2762
|
######################## BEGIN LICENSE BLOCK ########################
# The Original Code is Mozilla Communicator client code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 1998
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
# 255: Control characters that usually does not exist in any text
# 254: Carriage/Return
# 253: symbol (punctuation) that does not belong to word
# 252: 0 - 9
# Character Mapping Table:
Latin7_CharToOrderMap = (
255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00
255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10
253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20
252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30
253, 82,100,104, 94, 98,101,116,102,111,187,117, 92, 88,113, 85, # 40
79,118,105, 83, 67,114,119, 95, 99,109,188,253,253,253,253,253, # 50
253, 72, 70, 80, 81, 60, 96, 93, 89, 68,120, 97, 77, 86, 69, 55, # 60
78,115, 65, 66, 58, 76,106,103, 87,107,112,253,253,253,253,253, # 70
255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 80
255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 90
253,233, 90,253,253,253,253,253,253,253,253,253,253, 74,253,253, # a0
253,253,253,253,247,248, 61, 36, 46, 71, 73,253, 54,253,108,123, # b0
110, 31, 51, 43, 41, 34, 91, 40, 52, 47, 44, 53, 38, 49, 59, 39, # c0
35, 48,250, 37, 33, 45, 56, 50, 84, 57,120,121, 17, 18, 22, 15, # d0
124, 1, 29, 20, 21, 3, 32, 13, 25, 5, 11, 16, 10, 6, 30, 4, # e0
9, 8, 14, 7, 2, 12, 28, 23, 42, 24, 64, 75, 19, 26, 27,253, # f0
)
win1253_CharToOrderMap = (
255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00
255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10
253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20
252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30
253, 82,100,104, 94, 98,101,116,102,111,187,117, 92, 88,113, 85, # 40
79,118,105, 83, 67,114,119, 95, 99,109,188,253,253,253,253,253, # 50
253, 72, 70, 80, 81, 60, 96, 93, 89, 68,120, 97, 77, 86, 69, 55, # 60
78,115, 65, 66, 58, 76,106,103, 87,107,112,253,253,253,253,253, # 70
255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 80
255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 90
253,233, 61,253,253,253,253,253,253,253,253,253,253, 74,253,253, # a0
253,253,253,253,247,253,253, 36, 46, 71, 73,253, 54,253,108,123, # b0
110, 31, 51, 43, 41, 34, 91, 40, 52, 47, 44, 53, 38, 49, 59, 39, # c0
35, 48,250, 37, 33, 45, 56, 50, 84, 57,120,121, 17, 18, 22, 15, # d0
124, 1, 29, 20, 21, 3, 32, 13, 25, 5, 11, 16, 10, 6, 30, 4, # e0
9, 8, 14, 7, 2, 12, 28, 23, 42, 24, 64, 75, 19, 26, 27,253, # f0
)
# Model Table:
# total sequences: 100%
# first 512 sequences: 98.2851%
# first 1024 sequences:1.7001%
# rest sequences: 0.0359%
# negative sequences: 0.0148%
GreekLangModel = (
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,3,2,2,3,3,3,3,3,3,3,3,1,3,3,3,0,2,2,3,3,0,3,0,3,2,0,3,3,3,0,
3,0,0,0,2,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,3,3,3,3,3,0,3,3,0,3,2,3,3,0,3,2,3,3,3,0,0,3,0,3,0,3,3,2,0,0,0,
2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,
0,2,3,2,2,3,3,3,3,3,3,3,3,0,3,3,3,3,0,2,3,3,0,3,3,3,3,2,3,3,3,0,
2,0,0,0,2,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,2,3,3,2,3,3,3,3,3,3,3,3,3,3,3,3,0,2,1,3,3,3,3,2,3,3,2,3,3,2,0,
0,0,0,0,2,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,3,3,3,3,0,3,3,3,3,3,3,0,3,3,0,3,3,3,3,3,3,3,3,3,3,0,3,2,3,3,0,
2,0,1,0,2,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,
0,3,3,3,3,3,2,3,0,0,0,0,3,3,0,3,1,3,3,3,0,3,3,0,3,3,3,3,0,0,0,0,
2,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,3,3,3,3,3,0,3,0,3,3,3,3,3,0,3,2,2,2,3,0,2,3,3,3,3,3,2,3,3,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,3,3,3,3,3,3,2,2,2,3,3,3,3,0,3,1,3,3,3,3,2,3,3,3,3,3,3,3,2,2,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,3,3,3,3,3,2,0,3,0,0,0,3,3,2,3,3,3,3,3,0,0,3,2,3,0,2,3,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,3,0,3,3,3,3,0,0,3,3,0,2,3,0,3,0,3,3,3,0,0,3,0,3,0,2,2,3,3,0,0,
0,0,1,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,3,3,3,3,3,2,0,3,2,3,3,3,3,0,3,3,3,3,3,0,3,3,2,3,2,3,3,2,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,3,3,2,3,2,3,3,3,3,3,3,0,2,3,2,3,2,2,2,3,2,3,3,2,3,0,2,2,2,3,0,
2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,3,0,0,0,3,3,3,2,3,3,0,0,3,0,3,0,0,0,3,2,0,3,0,3,0,0,2,0,2,0,
0,0,0,0,2,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,3,3,3,3,0,3,3,3,3,3,3,0,3,3,0,3,0,0,0,3,3,0,3,3,3,0,0,1,2,3,0,
3,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,3,3,3,3,3,2,0,0,3,2,2,3,3,0,3,3,3,3,3,2,1,3,0,3,2,3,3,2,1,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,3,3,0,2,3,3,3,3,3,3,0,0,3,0,3,0,0,0,3,3,0,3,2,3,0,0,3,3,3,0,
3,0,0,0,2,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,3,3,3,3,0,3,3,3,3,3,3,0,0,3,0,3,0,0,0,3,2,0,3,2,3,0,0,3,2,3,0,
2,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,3,1,2,2,3,3,3,3,3,3,0,2,3,0,3,0,0,0,3,3,0,3,0,2,0,0,2,3,1,0,
2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,3,0,3,3,3,3,0,3,0,3,3,2,3,0,3,3,3,3,3,3,0,3,3,3,0,2,3,0,0,3,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,3,0,3,3,3,0,0,3,0,0,0,3,3,0,3,0,2,3,3,0,0,3,0,3,0,3,3,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,3,0,0,0,3,3,3,3,3,3,0,0,3,0,2,0,0,0,3,3,0,3,0,3,0,0,2,0,2,0,
0,0,0,0,1,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,3,3,3,3,3,3,0,3,0,2,0,3,2,0,3,2,3,2,3,0,0,3,2,3,2,3,3,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,3,0,0,2,3,3,3,3,3,0,0,0,3,0,2,1,0,0,3,2,2,2,0,3,0,0,2,2,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,3,0,3,3,3,2,0,3,0,3,0,3,3,0,2,1,2,3,3,0,0,3,0,3,0,3,3,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,2,3,3,3,0,3,3,3,3,3,3,0,2,3,0,3,0,0,0,2,1,0,2,2,3,0,0,2,2,2,0,
0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,3,0,0,2,3,3,3,2,3,0,0,1,3,0,2,0,0,0,0,3,0,1,0,2,0,0,1,1,1,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,3,3,3,3,3,1,0,3,0,0,0,3,2,0,3,2,3,3,3,0,0,3,0,3,2,2,2,1,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,3,0,3,3,3,0,0,3,0,0,0,0,2,0,2,3,3,2,2,2,2,3,0,2,0,2,2,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,3,3,3,3,2,0,0,0,0,0,0,2,3,0,2,0,2,3,2,0,0,3,0,3,0,3,1,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,3,2,3,3,2,2,3,0,2,0,3,0,0,0,2,0,0,0,0,1,2,0,2,0,2,0,
0,2,0,2,0,2,2,0,0,1,0,2,2,2,0,2,2,2,0,2,2,2,0,0,2,0,0,1,0,0,0,0,
0,2,0,3,3,2,0,0,0,0,0,0,1,3,0,2,0,2,2,2,0,0,2,0,3,0,0,2,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,3,0,2,3,2,0,2,2,0,2,0,2,2,0,2,0,2,2,2,0,0,0,0,0,0,2,3,0,0,0,2,
0,1,2,0,0,0,0,2,2,0,0,0,2,1,0,2,2,0,0,0,0,0,0,1,0,2,0,0,0,0,0,0,
0,0,2,1,0,2,3,2,2,3,2,3,2,0,0,3,3,3,0,0,3,2,0,0,0,1,1,0,2,0,2,2,
0,2,0,2,0,2,2,0,0,2,0,2,2,2,0,2,2,2,2,0,0,2,0,0,0,2,0,1,0,0,0,0,
0,3,0,3,3,2,2,0,3,0,0,0,2,2,0,2,2,2,1,2,0,0,1,2,2,0,0,3,0,0,0,2,
0,1,2,0,0,0,1,2,0,0,0,0,0,0,0,2,2,0,1,0,0,2,0,0,0,2,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,2,3,3,2,2,0,0,0,2,0,2,3,3,0,2,0,0,0,0,0,0,2,2,2,0,2,2,0,2,0,2,
0,2,2,0,0,2,2,2,2,1,0,0,2,2,0,2,0,0,2,0,0,0,0,0,0,2,0,0,0,0,0,0,
0,2,0,3,2,3,0,0,0,3,0,0,2,2,0,2,0,2,2,2,0,0,2,0,0,0,0,0,0,0,0,2,
0,0,2,2,0,0,2,2,2,0,0,0,0,0,0,2,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,2,0,0,3,2,0,2,2,2,2,2,0,0,0,2,0,0,0,0,2,0,1,0,0,2,0,1,0,0,0,
0,2,2,2,0,2,2,0,1,2,0,2,2,2,0,2,2,2,2,1,2,2,0,0,2,0,0,0,0,0,0,0,
0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,
0,2,0,2,0,2,2,0,0,0,0,1,2,1,0,0,2,2,0,0,2,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,3,2,3,0,0,2,0,0,0,2,2,0,2,0,0,0,1,0,0,2,0,2,0,2,2,0,0,0,0,
0,0,2,0,0,0,0,2,2,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,
0,2,2,3,2,2,0,0,0,0,0,0,1,3,0,2,0,2,2,0,0,0,1,0,2,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,2,0,2,0,3,2,0,2,0,0,0,0,0,0,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,
0,0,2,0,0,0,0,1,1,0,0,2,1,2,0,2,2,0,1,0,0,1,0,0,0,2,0,0,0,0,0,0,
0,3,0,2,2,2,0,0,2,0,0,0,2,0,0,0,2,3,0,2,0,0,0,0,0,0,2,2,0,0,0,2,
0,1,2,0,0,0,1,2,2,1,0,0,0,2,0,0,2,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,2,1,2,0,2,2,0,2,0,0,2,0,0,0,0,1,2,1,0,2,1,0,0,0,0,0,0,0,0,0,0,
0,0,2,0,0,0,3,1,2,2,0,2,0,0,0,0,2,0,0,0,2,0,0,3,0,0,0,0,2,2,2,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,2,1,0,2,0,1,2,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,1,0,0,0,0,0,0,2,
0,2,2,0,0,2,2,2,2,2,0,1,2,0,0,0,2,2,0,1,0,2,0,0,2,2,0,0,0,0,0,0,
0,0,0,0,1,0,0,0,0,0,0,0,3,0,0,2,0,0,0,0,0,0,0,0,2,0,2,0,0,0,0,2,
0,1,2,0,0,0,0,2,2,1,0,1,0,1,0,2,2,2,1,0,0,0,0,0,0,1,0,0,0,0,0,0,
0,2,0,1,2,0,0,0,0,0,0,0,0,0,0,2,0,0,2,2,0,0,0,0,1,0,0,0,0,0,0,2,
0,2,2,0,0,0,0,2,2,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,2,0,0,2,0,0,0,
0,2,2,2,2,0,0,0,3,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,2,0,0,0,0,0,0,1,
0,0,2,0,0,0,0,1,2,0,0,0,0,0,0,2,2,1,1,0,0,0,0,0,0,1,0,0,0,0,0,0,
0,2,0,2,2,2,0,0,2,0,0,0,0,0,0,0,2,2,2,0,0,0,2,0,0,0,0,0,0,0,0,2,
0,0,1,0,0,0,0,2,1,0,0,0,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,
0,3,0,2,0,0,0,0,0,0,0,0,2,0,0,0,0,0,2,0,0,0,0,0,0,0,2,0,0,0,0,2,
0,0,2,0,0,0,0,2,2,0,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,2,0,2,2,1,0,0,0,0,0,0,2,0,0,2,0,2,2,2,0,0,0,0,0,0,2,0,0,0,0,2,
0,0,2,0,0,2,0,2,2,0,0,0,0,2,0,2,0,0,0,0,0,2,0,0,0,2,0,0,0,0,0,0,
0,0,3,0,0,0,2,2,0,2,2,0,0,0,0,0,2,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,2,0,0,0,0,0,
0,2,2,2,2,2,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,0,0,0,1,
0,0,0,0,0,0,0,2,1,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,2,2,0,0,0,0,0,2,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,
0,2,0,0,0,2,0,0,0,0,0,1,0,0,0,0,2,2,0,0,0,1,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,1,0,2,0,0,0,
0,2,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,1,0,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,1,0,0,2,0,2,0,0,0,
0,0,0,0,0,0,0,0,2,1,0,0,0,0,0,0,2,0,0,0,1,2,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
)
Latin7GreekModel = {
'charToOrderMap': Latin7_CharToOrderMap,
'precedenceMatrix': GreekLangModel,
'mTypicalPositiveRatio': 0.982851,
'keepEnglishLetter': False,
'charsetName': "ISO-8859-7"
}
Win1253GreekModel = {
'charToOrderMap': win1253_CharToOrderMap,
'precedenceMatrix': GreekLangModel,
'mTypicalPositiveRatio': 0.982851,
'keepEnglishLetter': False,
'charsetName': "windows-1253"
}
# flake8: noqa
|
windskyer/nova
|
refs/heads/master
|
nova/api/openstack/compute/schemas/image_metadata.py
|
95
|
# Copyright 2014 IBM Corporation. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import copy
from nova.api.validation import parameter_types
create = {
'type': 'object',
'properties': {
'metadata': parameter_types.metadata
},
'required': ['metadata'],
'additionalProperties': False,
}
single_metadata = copy.deepcopy(parameter_types.metadata)
single_metadata.update({
'minProperties': 1,
'maxProperties': 1
})
update = {
'type': 'object',
'properties': {
'meta': single_metadata
},
'required': ['meta'],
'additionalProperties': False,
}
update_all = create
|
rowemoore/odoo
|
refs/heads/8.0
|
addons/hr_expense/tests/test_journal_entries.py
|
251
|
from openerp.tests.common import TransactionCase
from openerp import netsvc, workflow
class TestCheckJournalEntry(TransactionCase):
"""
Check journal entries when the expense product is having tax which is tax included.
"""
def setUp(self):
super(TestCheckJournalEntry, self).setUp()
cr, uid = self.cr, self.uid
self.expense_obj = self.registry('hr.expense.expense')
self.exp_line_obj = self.registry('hr.expense.line')
self.product_obj = self.registry('product.product')
self.tax_obj = self.registry('account.tax')
self.code_obj = self.registry('account.tax.code')
_, self.product_id = self.registry("ir.model.data").get_object_reference(cr, uid, "hr_expense", "air_ticket")
_, self.employee_id = self.registry("ir.model.data").get_object_reference(cr, uid, "hr", "employee_mit")
self.base_code_id = self.code_obj.create(cr, uid, {'name': 'Expense Base Code'})
self.tax_id = self.tax_obj.create(cr, uid, {
'name': 'Expense 10%',
'amount': 0.10,
'type': 'percent',
'type_tax_use': 'purchase',
'price_include': True,
'base_code_id': self.base_code_id,
'base_sign': -1,
})
self.product_obj.write(cr, uid, self.product_id, {'supplier_taxes_id': [(6, 0, [self.tax_id])]})
self.expense_id = self.expense_obj.create(cr, uid, {
'name': 'Expense for Minh Tran',
'employee_id': self.employee_id,
})
self.exp_line_obj.create(cr, uid, {
'name': 'Car Travel Expenses',
'product_id': self.product_id,
'unit_amount': 700.00,
'expense_id': self.expense_id
})
def test_journal_entry(self):
cr, uid = self.cr, self.uid
#Submit to Manager
workflow.trg_validate(uid, 'hr.expense.expense', self.expense_id, 'confirm', cr)
#Approve
workflow.trg_validate(uid, 'hr.expense.expense', self.expense_id, 'validate', cr)
#Create Expense Entries
workflow.trg_validate(uid, 'hr.expense.expense', self.expense_id, 'done', cr)
self.expense = self.expense_obj.browse(cr, uid, self.expense_id)
self.assertEquals(self.expense.state, 'done', 'Expense is not in Waiting Payment state')
self.assertTrue(self.expense.account_move_id.id, 'Expense Journal Entry is not created')
for line in self.expense.account_move_id.line_id:
if line.credit:
self.assertEquals(line.credit, 700.00, 'Expense Payable Amount is not matched for journal item')
else:
if line.tax_code_id:
self.assertEquals(line.debit, 636.36, 'Tax Amount is not matched for journal item')
else:
self.assertEquals(line.debit, 63.64, 'Tax Base Amount is not matched for journal item')
|
qma/pants
|
refs/heads/master
|
src/python/pants/goal/run_tracker.py
|
2
|
# coding=utf-8
# Copyright 2014 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import (absolute_import, division, generators, nested_scopes, print_function,
unicode_literals, with_statement)
import json
import multiprocessing
import os
import sys
import threading
import time
import uuid
from contextlib import contextmanager
import requests
from pants.base.build_environment import get_pants_cachedir
from pants.base.run_info import RunInfo
from pants.base.worker_pool import SubprocPool, WorkerPool
from pants.base.workunit import WorkUnit
from pants.goal.aggregated_timings import AggregatedTimings
from pants.goal.artifact_cache_stats import ArtifactCacheStats
from pants.reporting.report import Report
from pants.stats.statsdb import StatsDBFactory
from pants.subsystem.subsystem import Subsystem
from pants.util.dirutil import relative_symlink, safe_file_dump
class RunTracker(Subsystem):
"""Tracks and times the execution of a pants run.
Also manages background work.
Use like this:
run_tracker.start()
with run_tracker.new_workunit('compile'):
with run_tracker.new_workunit('java'):
...
with run_tracker.new_workunit('scala'):
...
run_tracker.close()
Can track execution against multiple 'roots', e.g., one for the main thread and another for
background threads.
"""
options_scope = 'run-tracker'
# The name of the tracking root for the main thread (and the foreground worker threads).
DEFAULT_ROOT_NAME = 'main'
# The name of the tracking root for the background worker threads.
BACKGROUND_ROOT_NAME = 'background'
@classmethod
def subsystem_dependencies(cls):
return (StatsDBFactory,)
@classmethod
def register_options(cls, register):
register('--stats-upload-url', advanced=True, default=None,
help='Upload stats to this URL on run completion.')
register('--stats-upload-timeout', advanced=True, type=int, default=2,
help='Wait at most this many seconds for the stats upload to complete.')
register('--num-foreground-workers', advanced=True, type=int,
default=multiprocessing.cpu_count(),
help='Number of threads for foreground work.')
register('--num-background-workers', advanced=True, type=int,
default=multiprocessing.cpu_count(),
help='Number of threads for background work.')
def __init__(self, *args, **kwargs):
super(RunTracker, self).__init__(*args, **kwargs)
run_timestamp = time.time()
cmd_line = ' '.join(['pants'] + sys.argv[1:])
# run_id is safe for use in paths.
millis = int((run_timestamp * 1000) % 1000)
run_id = 'pants_run_{}_{}_{}'.format(
time.strftime('%Y_%m_%d_%H_%M_%S', time.localtime(run_timestamp)), millis,
uuid.uuid4().hex)
info_dir = os.path.join(self.get_options().pants_workdir, self.options_scope)
self.run_info_dir = os.path.join(info_dir, run_id)
self.run_info = RunInfo(os.path.join(self.run_info_dir, 'info'))
self.run_info.add_basic_info(run_id, run_timestamp)
self.run_info.add_info('cmd_line', cmd_line)
# Create a 'latest' symlink, after we add_infos, so we're guaranteed that the file exists.
link_to_latest = os.path.join(os.path.dirname(self.run_info_dir), 'latest')
relative_symlink(self.run_info_dir, link_to_latest)
# Time spent in a workunit, including its children.
self.cumulative_timings = AggregatedTimings(os.path.join(self.run_info_dir,
'cumulative_timings'))
# Time spent in a workunit, not including its children.
self.self_timings = AggregatedTimings(os.path.join(self.run_info_dir, 'self_timings'))
# Hit/miss stats for the artifact cache.
self.artifact_cache_stats = \
ArtifactCacheStats(os.path.join(self.run_info_dir, 'artifact_cache_stats'))
# Number of threads for foreground work.
self._num_foreground_workers = self.get_options().num_foreground_workers
# Number of threads for background work.
self._num_background_workers = self.get_options().num_background_workers
# We report to this Report.
self.report = None
# self._threadlocal.current_workunit contains the current workunit for the calling thread.
# Note that multiple threads may share a name (e.g., all the threads in a pool).
self._threadlocal = threading.local()
# For main thread work. Created on start().
self._main_root_workunit = None
# For background work. Created lazily if needed.
self._background_worker_pool = None
self._background_root_workunit = None
# Trigger subproc pool init while our memory image is still clean (see SubprocPool docstring).
SubprocPool.set_num_processes(self._num_foreground_workers)
SubprocPool.foreground()
self._aborted = False
def register_thread(self, parent_workunit):
"""Register the parent workunit for all work in the calling thread.
Multiple threads may have the same parent (e.g., all the threads in a pool).
"""
self._threadlocal.current_workunit = parent_workunit
def is_under_main_root(self, workunit):
"""Is the workunit running under the main thread's root."""
return workunit.root() == self._main_root_workunit
def start(self, report):
"""Start tracking this pants run.
report: an instance of pants.reporting.Report."""
self.report = report
self.report.open()
self._main_root_workunit = WorkUnit(run_info_dir=self.run_info_dir, parent=None,
name=RunTracker.DEFAULT_ROOT_NAME, cmd=None)
self.register_thread(self._main_root_workunit)
self._main_root_workunit.start()
self.report.start_workunit(self._main_root_workunit)
def set_root_outcome(self, outcome):
"""Useful for setup code that doesn't have a reference to a workunit."""
self._main_root_workunit.set_outcome(outcome)
@contextmanager
def new_workunit(self, name, labels=None, cmd='', log_config=None):
"""Creates a (hierarchical) subunit of work for the purpose of timing and reporting.
- name: A short name for this work. E.g., 'resolve', 'compile', 'scala', 'zinc'.
- labels: An optional iterable of labels. The reporters can use this to decide how to
display information about this work.
- cmd: An optional longer string representing this work.
E.g., the cmd line of a compiler invocation.
- log_config: An optional tuple WorkUnit.LogConfig of task-level options affecting reporting.
Use like this:
with run_tracker.new_workunit(name='compile', labels=[WorkUnitLabel.TASK]) as workunit:
<do scoped work here>
<set the outcome on workunit if necessary>
Note that the outcome will automatically be set to failure if an exception is raised
in a workunit, and to success otherwise, so usually you only need to set the
outcome explicitly if you want to set it to warning.
"""
parent = self._threadlocal.current_workunit
with self.new_workunit_under_parent(name, parent=parent, labels=labels, cmd=cmd,
log_config=log_config) as workunit:
self._threadlocal.current_workunit = workunit
try:
yield workunit
finally:
self._threadlocal.current_workunit = parent
@contextmanager
def new_workunit_under_parent(self, name, parent, labels=None, cmd='', log_config=None):
"""Creates a (hierarchical) subunit of work for the purpose of timing and reporting.
- name: A short name for this work. E.g., 'resolve', 'compile', 'scala', 'zinc'.
- parent: The new workunit is created under this parent.
- labels: An optional iterable of labels. The reporters can use this to decide how to
display information about this work.
- cmd: An optional longer string representing this work.
E.g., the cmd line of a compiler invocation.
Task code should not typically call this directly.
"""
workunit = WorkUnit(run_info_dir=self.run_info_dir, parent=parent, name=name, labels=labels,
cmd=cmd, log_config=log_config)
workunit.start()
outcome = WorkUnit.FAILURE # Default to failure we will override if we get success/abort.
try:
self.report.start_workunit(workunit)
yield workunit
except KeyboardInterrupt:
outcome = WorkUnit.ABORTED
self._aborted = True
raise
else:
outcome = WorkUnit.SUCCESS
finally:
workunit.set_outcome(outcome)
self.end_workunit(workunit)
def log(self, level, *msg_elements):
"""Log a message against the current workunit."""
self.report.log(self._threadlocal.current_workunit, level, *msg_elements)
@classmethod
def post_stats(cls, url, stats, timeout=2):
"""POST stats to the given url.
:return: True if upload was successful, False otherwise.
"""
def error(msg):
# Report aleady closed, so just print error.
print('WARNING: Failed to upload stats to {} due to {}'.format(url, msg),
file=sys.stderr)
return False
# TODO(benjy): The upload protocol currently requires separate top-level params, with JSON
# values. Probably better for there to be one top-level JSON value, namely json.dumps(stats).
# But this will first require changing the upload receiver at every shop that uses this
# (probably only Foursquare at present).
params = {k: json.dumps(v) for (k, v) in stats.items()}
try:
r = requests.post(url, data=params, timeout=timeout)
if r.status_code != requests.codes.ok:
return error("HTTP error code: {}".format(r.status_code))
except Exception as e: # Broad catch - we don't want to fail the build over upload errors.
return error("Error: {}".format(e))
return True
def store_stats(self):
"""Store stats about this run in local and optionally remote stats dbs."""
stats = {
'run_info': self.run_info.get_as_dict(),
'cumulative_timings': self.cumulative_timings.get_all(),
'self_timings': self.self_timings.get_all(),
'artifact_cache_stats': self.artifact_cache_stats.get_all()
}
# Dump individual stat file.
# TODO(benjy): Do we really need these, once the statsdb is mature?
stats_file = os.path.join(get_pants_cachedir(), 'stats',
'{}.json'.format(self.run_info.get_info('id')))
safe_file_dump(stats_file, json.dumps(stats))
# Add to local stats db.
StatsDBFactory.global_instance().get_db().insert_stats(stats)
# Upload to remote stats db.
stats_url = self.get_options().stats_upload_url
if stats_url:
self.post_stats(stats_url, stats, timeout=self.get_options().stats_upload_timeout)
_log_levels = [Report.ERROR, Report.ERROR, Report.WARN, Report.INFO, Report.INFO]
def end(self):
"""This pants run is over, so stop tracking it.
Note: If end() has been called once, subsequent calls are no-ops.
"""
if self._background_worker_pool:
if self._aborted:
self.log(Report.INFO, "Aborting background workers.")
self._background_worker_pool.abort()
else:
self.log(Report.INFO, "Waiting for background workers to finish.")
self._background_worker_pool.shutdown()
self.end_workunit(self._background_root_workunit)
SubprocPool.shutdown(self._aborted)
# Run a dummy work unit to write out one last timestamp.
with self.new_workunit("complete"):
pass
self.end_workunit(self._main_root_workunit)
outcome = self._main_root_workunit.outcome()
if self._background_root_workunit:
outcome = min(outcome, self._background_root_workunit.outcome())
outcome_str = WorkUnit.outcome_string(outcome)
log_level = RunTracker._log_levels[outcome]
self.log(log_level, outcome_str)
if self.run_info.get_info('outcome') is None:
# If the goal is clean-all then the run info dir no longer exists, so ignore that error.
self.run_info.add_info('outcome', outcome_str, ignore_errors=True)
self.report.close()
self.store_stats()
def end_workunit(self, workunit):
self.report.end_workunit(workunit)
path, duration, self_time, is_tool = workunit.end()
self.cumulative_timings.add_timing(path, duration, is_tool)
self.self_timings.add_timing(path, self_time, is_tool)
def get_background_root_workunit(self):
if self._background_root_workunit is None:
self._background_root_workunit = WorkUnit(run_info_dir=self.run_info_dir, parent=None,
name='background', cmd=None)
self._background_root_workunit.start()
self.report.start_workunit(self._background_root_workunit)
return self._background_root_workunit
def background_worker_pool(self):
if self._background_worker_pool is None: # Initialize lazily.
self._background_worker_pool = WorkerPool(parent_workunit=self.get_background_root_workunit(),
run_tracker=self,
num_workers=self._num_background_workers)
return self._background_worker_pool
|
lepistone/odoo
|
refs/heads/master
|
addons/product_email_template/__openerp__.py
|
65
|
# -*- coding: utf-8 -*-
{
'name': 'Product Email Template',
'depends': ['account'],
'author': 'OpenERP SA',
'category': 'Accounting & Finance',
'description': """
Add email templates to products to be send on invoice confirmation
==================================================================
With this module, link your products to a template to send complete information and tools to your customer.
For instance when invoicing a training, the training agenda and materials will automatically be sent to your customers.'
""",
'website': 'http://www.openerp.com',
'demo': [
'data/product_demo.xml',
],
'data': [
'views/product_view.xml',
'views/email_template_view.xml',
],
'installable': True,
'auto_install': False,
}
|
nnethercote/servo
|
refs/heads/master
|
components/script/dom/bindings/codegen/parser/tests/test_putForwards.py
|
142
|
def WebIDLTest(parser, harness):
threw = False
try:
parser.parse("""
interface I {
[PutForwards=B] readonly attribute long A;
};
""")
results = parser.finish()
except:
threw = True
harness.ok(threw, "Should have thrown.")
parser = parser.reset();
threw = False
try:
parser.parse("""
interface I {
[PutForwards=B] readonly attribute J A;
};
interface J {
};
""")
results = parser.finish()
except:
threw = True
harness.ok(threw, "Should have thrown.")
parser = parser.reset();
threw = False
try:
parser.parse("""
interface I {
[PutForwards=B] attribute J A;
};
interface J {
attribute long B;
};
""")
results = parser.finish()
except:
threw = True
harness.ok(threw, "Should have thrown.")
parser = parser.reset();
threw = False
try:
parser.parse("""
interface I {
[PutForwards=B] static readonly attribute J A;
};
interface J {
attribute long B;
};
""")
results = parser.finish()
except:
threw = True
harness.ok(threw, "Should have thrown.")
parser = parser.reset();
threw = False
try:
parser.parse("""
callback interface I {
[PutForwards=B] readonly attribute J A;
};
interface J {
attribute long B;
};
""")
results = parser.finish()
except:
threw = True
harness.ok(threw, "Should have thrown.")
parser = parser.reset();
threw = False
try:
parser.parse("""
interface I {
[PutForwards=C] readonly attribute J A;
[PutForwards=C] readonly attribute J B;
};
interface J {
[PutForwards=D] readonly attribute K C;
};
interface K {
[PutForwards=A] readonly attribute I D;
};
""")
results = parser.finish()
except:
threw = True
harness.ok(threw, "Should have thrown.")
|
evgchz/scikit-learn
|
refs/heads/master
|
sklearn/feature_extraction/tests/test_image.py
|
12
|
# Authors: Emmanuelle Gouillart <emmanuelle.gouillart@normalesup.org>
# Gael Varoquaux <gael.varoquaux@normalesup.org>
# License: BSD 3 clause
import numpy as np
import scipy as sp
from scipy import ndimage
from nose.tools import assert_equal, assert_true
from numpy.testing import assert_raises
from ..image import img_to_graph, grid_to_graph
from ..image import (extract_patches_2d, reconstruct_from_patches_2d,
PatchExtractor, extract_patches)
from ...utils.graph import connected_components
def test_img_to_graph():
x, y = np.mgrid[:4, :4] - 10
grad_x = img_to_graph(x)
grad_y = img_to_graph(y)
assert_equal(grad_x.nnz, grad_y.nnz)
# Negative elements are the diagonal: the elements of the original
# image. Positive elements are the values of the gradient, they
# should all be equal on grad_x and grad_y
np.testing.assert_array_equal(grad_x.data[grad_x.data > 0],
grad_y.data[grad_y.data > 0])
def test_grid_to_graph():
#Checking that the function works with graphs containing no edges
size = 2
roi_size = 1
# Generating two convex parts with one vertex
# Thus, edges will be empty in _to_graph
mask = np.zeros((size, size), dtype=np.bool)
mask[0:roi_size, 0:roi_size] = True
mask[-roi_size:, -roi_size:] = True
mask = mask.reshape(size ** 2)
A = grid_to_graph(n_x=size, n_y=size, mask=mask, return_as=np.ndarray)
assert_true(connected_components(A)[0] == 2)
# Checking that the function works whatever the type of mask is
mask = np.ones((size, size), dtype=np.int16)
A = grid_to_graph(n_x=size, n_y=size, n_z=size, mask=mask)
assert_true(connected_components(A)[0] == 1)
# Checking dtype of the graph
mask = np.ones((size, size))
A = grid_to_graph(n_x=size, n_y=size, n_z=size, mask=mask, dtype=np.bool)
assert_true(A.dtype == np.bool)
A = grid_to_graph(n_x=size, n_y=size, n_z=size, mask=mask, dtype=np.int)
assert_true(A.dtype == np.int)
A = grid_to_graph(n_x=size, n_y=size, n_z=size, mask=mask, dtype=np.float)
assert_true(A.dtype == np.float)
def test_connect_regions():
lena = sp.misc.lena()
for thr in (50, 150):
mask = lena > thr
graph = img_to_graph(lena, mask)
assert_equal(ndimage.label(mask)[1], connected_components(graph)[0])
def test_connect_regions_with_grid():
lena = sp.misc.lena()
mask = lena > 50
graph = grid_to_graph(*lena.shape, mask=mask)
assert_equal(ndimage.label(mask)[1], connected_components(graph)[0])
mask = lena > 150
graph = grid_to_graph(*lena.shape, mask=mask, dtype=None)
assert_equal(ndimage.label(mask)[1], connected_components(graph)[0])
def _downsampled_lena():
lena = sp.misc.lena().astype(np.float32)
lena = (lena[::2, ::2] + lena[1::2, ::2] + lena[::2, 1::2]
+ lena[1::2, 1::2])
lena = (lena[::2, ::2] + lena[1::2, ::2] + lena[::2, 1::2]
+ lena[1::2, 1::2])
lena = lena.astype(np.float)
lena /= 16.0
return lena
def _orange_lena(lena=None):
lena = _downsampled_lena() if lena is None else lena
lena_color = np.zeros(lena.shape + (3,))
lena_color[:, :, 0] = 256 - lena
lena_color[:, :, 1] = 256 - lena / 2
lena_color[:, :, 2] = 256 - lena / 4
return lena_color
def _make_images(lena=None):
lena = _downsampled_lena() if lena is None else lena
# make a collection of lenas
images = np.zeros((3,) + lena.shape)
images[0] = lena
images[1] = lena + 1
images[2] = lena + 2
return images
downsampled_lena = _downsampled_lena()
orange_lena = _orange_lena(downsampled_lena)
lena_collection = _make_images(downsampled_lena)
def test_extract_patches_all():
lena = downsampled_lena
i_h, i_w = lena.shape
p_h, p_w = 16, 16
expected_n_patches = (i_h - p_h + 1) * (i_w - p_w + 1)
patches = extract_patches_2d(lena, (p_h, p_w))
assert_equal(patches.shape, (expected_n_patches, p_h, p_w))
def test_extract_patches_all_color():
lena = orange_lena
i_h, i_w = lena.shape[:2]
p_h, p_w = 16, 16
expected_n_patches = (i_h - p_h + 1) * (i_w - p_w + 1)
patches = extract_patches_2d(lena, (p_h, p_w))
assert_equal(patches.shape, (expected_n_patches, p_h, p_w, 3))
def test_extract_patches_all_rect():
lena = downsampled_lena
lena = lena[:, 32:97]
i_h, i_w = lena.shape
p_h, p_w = 16, 12
expected_n_patches = (i_h - p_h + 1) * (i_w - p_w + 1)
patches = extract_patches_2d(lena, (p_h, p_w))
assert_equal(patches.shape, (expected_n_patches, p_h, p_w))
def test_extract_patches_max_patches():
lena = downsampled_lena
i_h, i_w = lena.shape
p_h, p_w = 16, 16
patches = extract_patches_2d(lena, (p_h, p_w), max_patches=100)
assert_equal(patches.shape, (100, p_h, p_w))
expected_n_patches = int(0.5 * (i_h - p_h + 1) * (i_w - p_w + 1))
patches = extract_patches_2d(lena, (p_h, p_w), max_patches=0.5)
assert_equal(patches.shape, (expected_n_patches, p_h, p_w))
assert_raises(ValueError, extract_patches_2d, lena, (p_h, p_w),
max_patches=2.0)
assert_raises(ValueError, extract_patches_2d, lena, (p_h, p_w),
max_patches=-1.0)
def test_reconstruct_patches_perfect():
lena = downsampled_lena
p_h, p_w = 16, 16
patches = extract_patches_2d(lena, (p_h, p_w))
lena_reconstructed = reconstruct_from_patches_2d(patches, lena.shape)
np.testing.assert_array_equal(lena, lena_reconstructed)
def test_reconstruct_patches_perfect_color():
lena = orange_lena
p_h, p_w = 16, 16
patches = extract_patches_2d(lena, (p_h, p_w))
lena_reconstructed = reconstruct_from_patches_2d(patches, lena.shape)
np.testing.assert_array_equal(lena, lena_reconstructed)
def test_patch_extractor_fit():
lenas = lena_collection
extr = PatchExtractor(patch_size=(8, 8), max_patches=100, random_state=0)
assert_true(extr == extr.fit(lenas))
def test_patch_extractor_max_patches():
lenas = lena_collection
i_h, i_w = lenas.shape[1:3]
p_h, p_w = 8, 8
max_patches = 100
expected_n_patches = len(lenas) * max_patches
extr = PatchExtractor(patch_size=(p_h, p_w), max_patches=max_patches,
random_state=0)
patches = extr.transform(lenas)
assert_true(patches.shape == (expected_n_patches, p_h, p_w))
max_patches = 0.5
expected_n_patches = len(lenas) * int((i_h - p_h + 1) * (i_w - p_w + 1)
* max_patches)
extr = PatchExtractor(patch_size=(p_h, p_w), max_patches=max_patches,
random_state=0)
patches = extr.transform(lenas)
assert_true(patches.shape == (expected_n_patches, p_h, p_w))
def test_patch_extractor_max_patches_default():
lenas = lena_collection
extr = PatchExtractor(max_patches=100, random_state=0)
patches = extr.transform(lenas)
assert_equal(patches.shape, (len(lenas) * 100, 12, 12))
def test_patch_extractor_all_patches():
lenas = lena_collection
i_h, i_w = lenas.shape[1:3]
p_h, p_w = 8, 8
expected_n_patches = len(lenas) * (i_h - p_h + 1) * (i_w - p_w + 1)
extr = PatchExtractor(patch_size=(p_h, p_w), random_state=0)
patches = extr.transform(lenas)
assert_true(patches.shape == (expected_n_patches, p_h, p_w))
def test_patch_extractor_color():
lenas = _make_images(orange_lena)
i_h, i_w = lenas.shape[1:3]
p_h, p_w = 8, 8
expected_n_patches = len(lenas) * (i_h - p_h + 1) * (i_w - p_w + 1)
extr = PatchExtractor(patch_size=(p_h, p_w), random_state=0)
patches = extr.transform(lenas)
assert_true(patches.shape == (expected_n_patches, p_h, p_w, 3))
def test_extract_patches_strided():
image_shapes_1D = [(10,), (10,), (11,), (10,)]
patch_sizes_1D = [(1,), (2,), (3,), (8,)]
patch_steps_1D = [(1,), (1,), (4,), (2,)]
expected_views_1D = [(10,), (9,), (3,), (2,)]
last_patch_1D = [(10,), (8,), (8,), (2,)]
image_shapes_2D = [(10, 20), (10, 20), (10, 20), (11, 20)]
patch_sizes_2D = [(2, 2), (10, 10), (10, 11), (6, 6)]
patch_steps_2D = [(5, 5), (3, 10), (3, 4), (4, 2)]
expected_views_2D = [(2, 4), (1, 2), (1, 3), (2, 8)]
last_patch_2D = [(5, 15), (0, 10), (0, 8), (4, 14)]
image_shapes_3D = [(5, 4, 3), (3, 3, 3), (7, 8, 9), (7, 8, 9)]
patch_sizes_3D = [(2, 2, 3), (2, 2, 2), (1, 7, 3), (1, 3, 3)]
patch_steps_3D = [(1, 2, 10), (1, 1, 1), (2, 1, 3), (3, 3, 4)]
expected_views_3D = [(4, 2, 1), (2, 2, 2), (4, 2, 3), (3, 2, 2)]
last_patch_3D = [(3, 2, 0), (1, 1, 1), (6, 1, 6), (6, 3, 4)]
image_shapes = image_shapes_1D + image_shapes_2D + image_shapes_3D
patch_sizes = patch_sizes_1D + patch_sizes_2D + patch_sizes_3D
patch_steps = patch_steps_1D + patch_steps_2D + patch_steps_3D
expected_views = expected_views_1D + expected_views_2D + expected_views_3D
last_patches = last_patch_1D + last_patch_2D + last_patch_3D
for (image_shape, patch_size, patch_step, expected_view,
last_patch) in zip(image_shapes, patch_sizes, patch_steps,
expected_views, last_patches):
image = np.arange(np.prod(image_shape)).reshape(image_shape)
patches = extract_patches(image, patch_shape=patch_size,
extraction_step=patch_step)
ndim = len(image_shape)
assert_true(patches.shape[:ndim] == expected_view)
last_patch_slices = [slice(i, i + j, None) for i, j in
zip(last_patch, patch_size)]
assert_true((patches[[slice(-1, None, None)] * ndim] ==
image[last_patch_slices].squeeze()).all())
def test_extract_patches_square():
# test same patch size for all dimensions
lena = downsampled_lena
i_h, i_w = lena.shape
p = 8
expected_n_patches = ((i_h - p + 1), (i_w - p + 1))
patches = extract_patches(lena, patch_shape=p)
assert_true(patches.shape == (expected_n_patches[0], expected_n_patches[1],
p, p))
def test_width_patch():
# width and height of the patch should be less than the image
x = np.array([[1, 2, 3], [4, 5, 6], [7, 8, 9]])
assert_raises(ValueError, extract_patches_2d, x, (4, 1))
assert_raises(ValueError, extract_patches_2d, x, (1, 4))
if __name__ == '__main__':
import nose
nose.runmodule()
|
vitan/hue
|
refs/heads/master
|
desktop/core/ext-py/Django-1.6.10/tests/urlpatterns_reverse/urls_error_handlers_callables.py
|
53
|
# Used by the ErrorHandlerResolutionTests test case.
from __future__ import absolute_import
from django.conf.urls import patterns
from .views import empty_view
urlpatterns = patterns('')
handler400 = empty_view
handler404 = empty_view
handler500 = empty_view
|
fuzzyhandle/pihangout
|
refs/heads/master
|
testweet/tweetexternalip.py
|
1
|
from __future__ import absolute_import, print_function
import json
import urllib
from pprint import pprint
from tweepy.streaming import StreamListener
from tweepy import OAuthHandler
from tweepy import Stream
from tweepy import API
def getTwitterAPIHandle():
with open('api_secret_token.json') as data_file:
authdata = json.load(data_file)
#pprint(authdata)
auth = OAuthHandler(authdata['consumer_key'], authdata['consumer_secret'])
auth.set_access_token(authdata['access_token'], authdata['access_token_secret'])
api = API(auth)
return api
if __name__ == '__main__':
api = getTwitterAPIHandle()
my_ip = json.load(urllib.urlopen('http://jsonip.com'))['ip']
#print (my_ip)
api.send_direct_message(user="hrishikesh_date", text="External IP is %s"%(my_ip))
# print(api.me().name)
# # If the application settings are set for "Read and Write" then
# # this line should tweet out a direct message
# # The "Read and Write" setting is on https://dev.twitter.com/apps
# #api.send_direct_message(user="hrishikesh_date", text="Hello From Pi :)")
|
ceph/Diamond
|
refs/heads/master
|
src/collectors/openstackswift/test/testopenstackswift.py
|
37
|
#!/usr/bin/python
# coding=utf-8
################################################################################
from test import CollectorTestCase
from test import get_collector_config
from openstackswift import OpenstackSwiftCollector
class TestOpenstackSwiftCollector(CollectorTestCase):
def setUp(self, allowed_names=None):
if not allowed_names:
allowed_names = []
config = get_collector_config('OpenstackSwiftCollector', {
'allowed_names': allowed_names,
'interval': 1
})
self.collector = OpenstackSwiftCollector(config, None)
def test_import(self):
self.assertTrue(OpenstackSwiftCollector)
|
xin3liang/platform_external_chromium_org_tools_gyp
|
refs/heads/master
|
test/win/gyptest-lib-ltcg.py
|
269
|
#!/usr/bin/env python
# Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Make sure LTCG setting is extracted properly.
"""
import TestGyp
import sys
if sys.platform == 'win32':
test = TestGyp.TestGyp(formats=['msvs', 'ninja'])
CHDIR = 'lib-flags'
test.run_gyp('ltcg.gyp', chdir=CHDIR)
test.build('ltcg.gyp', test.ALL, chdir=CHDIR)
test.must_not_contain_any_line(test.stdout(), ['restarting link with /LTCG'])
test.pass_test()
|
olifre/rootStaticAnalyzer
|
refs/heads/master
|
.ycm_extra_conf.py
|
1
|
# This file is NOT licensed under the GPLv3, which is the license for the rest
# of YouCompleteMe.
#
# Here's the license text for this file:
#
# This is free and unencumbered software released into the public domain.
#
# Anyone is free to copy, modify, publish, use, compile, sell, or
# distribute this software, either in source code form or as a compiled
# binary, for any purpose, commercial or non-commercial, and by any
# means.
#
# In jurisdictions that recognize copyright laws, the author or authors
# of this software dedicate any and all copyright interest in the
# software to the public domain. We make this dedication for the benefit
# of the public at large and to the detriment of our heirs and
# successors. We intend this dedication to be an overt act of
# relinquishment in perpetuity of all present and future rights to this
# software under copyright law.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
# IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR
# OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
# ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
# OTHER DEALINGS IN THE SOFTWARE.
#
# For more information, please refer to <http://unlicense.org/>
import os
import ycm_core
# These are the compilation flags that will be used in case there's no
# compilation database set (by default, one is not set).
# CHANGE THIS LIST OF FLAGS. YES, THIS IS THE DROID YOU HAVE BEEN LOOKING FOR.
flags = [
'-Wall',
'-Wextra',
'-Werror',
'-Wc++98-compat',
'-Wno-long-long',
'-Wno-variadic-macros',
'-fexceptions',
'-DNDEBUG',
# You 100% do NOT need -DUSE_CLANG_COMPLETER in your flags; only the YCM
# source code needs it.
'-DUSE_CLANG_COMPLETER',
# THIS IS IMPORTANT! Without a "-std=<something>" flag, clang won't know which
# language to use when compiling headers. So it will guess. Badly. So C++
# headers will be compiled as C headers. You don't want that so ALWAYS specify
# a "-std=<something>".
# For a C project, you would set this to something like 'c99' instead of
# 'c++11'.
'-std=c++11',
# ...and the same thing goes for the magic -x option which specifies the
# language that the files to be compiled are written in. This is mostly
# relevant for c++ headers.
# For a C project, you would set this to 'c' instead of 'c++'.
'-x',
'c++',
'-isystem',
'../BoostParts',
'-isystem',
# This path will only work on OS X, but extra paths that don't exist are not
# harmful
'/System/Library/Frameworks/Python.framework/Headers',
'-isystem',
'../llvm/include',
'-isystem',
'../llvm/tools/clang/include',
'-I',
'.',
'-I',
'./ClangCompleter',
'-isystem',
'./tests/gmock/gtest',
'-isystem',
'./tests/gmock/gtest/include',
'-isystem',
'./tests/gmock',
'-isystem',
'./tests/gmock/include',
]
def DirectoryOfThisScript():
return os.path.dirname( os.path.abspath( __file__ ) )
# Set this to the absolute path to the folder (NOT the file!) containing the
# compile_commands.json file to use that instead of 'flags'. See here for
# more details: http://clang.llvm.org/docs/JSONCompilationDatabase.html
#
# You can get CMake to generate this file for you by adding:
# set( CMAKE_EXPORT_COMPILE_COMMANDS 1 )
# to your CMakeLists.txt file.
#
# Most projects will NOT need to set this to anything; you can just change the
# 'flags' list of compilation flags. Notice that YCM itself uses that approach.
compilation_database_folder = DirectoryOfThisScript()+'/build/'
if os.path.exists( compilation_database_folder ):
database = ycm_core.CompilationDatabase( compilation_database_folder )
else:
database = None
SOURCE_EXTENSIONS = [ '.cpp', '.cxx', '.cc', '.c', '.m', '.mm' ]
def MakeRelativePathsInFlagsAbsolute( flags, working_directory ):
if not working_directory:
return list( flags )
new_flags = []
make_next_absolute = False
path_flags = [ '-isystem', '-I', '-iquote', '--sysroot=' ]
for flag in flags:
new_flag = flag
if make_next_absolute:
make_next_absolute = False
if not flag.startswith( '/' ):
new_flag = os.path.join( working_directory, flag )
for path_flag in path_flags:
if flag == path_flag:
make_next_absolute = True
break
if flag.startswith( path_flag ):
path = flag[ len( path_flag ): ]
new_flag = path_flag + os.path.join( working_directory, path )
break
if new_flag:
new_flags.append( new_flag )
return new_flags
def IsHeaderFile( filename ):
extension = os.path.splitext( filename )[ 1 ]
return extension in [ '.h', '.hxx', '.hpp', '.hh' ]
def GetCompilationInfoForFile( filename ):
# The compilation_commands.json file generated by CMake does not have entries
# for header files. So we do our best by asking the db for flags for a
# corresponding source file, if any. If one exists, the flags for that file
# should be good enough.
if IsHeaderFile( filename ):
basename = os.path.splitext( filename )[ 0 ]
for extension in SOURCE_EXTENSIONS:
replacement_file = basename + extension
if os.path.exists( replacement_file ):
compilation_info = database.GetCompilationInfoForFile(
replacement_file )
if compilation_info.compiler_flags_:
return compilation_info
return None
return database.GetCompilationInfoForFile( filename )
def FlagsForFile( filename, **kwargs ):
if database:
# Bear in mind that compilation_info.compiler_flags_ does NOT return a
# python list, but a "list-like" StringVec object
compilation_info = GetCompilationInfoForFile( filename )
if not compilation_info:
return None
final_flags = MakeRelativePathsInFlagsAbsolute(
compilation_info.compiler_flags_,
compilation_info.compiler_working_dir_ )
# NOTE: This is just for YouCompleteMe; it's highly likely that your project
# does NOT need to remove the stdlib flag. DO NOT USE THIS IN YOUR
# ycm_extra_conf IF YOU'RE NOT 100% SURE YOU NEED IT.
try:
final_flags.remove( '-stdlib=libc++' )
except ValueError:
pass
else:
relative_to = DirectoryOfThisScript()
final_flags = MakeRelativePathsInFlagsAbsolute( flags, relative_to )
return {
'flags': final_flags,
'do_cache': True
}
|
juanyaw/python
|
refs/heads/develop
|
cpython/Tools/msi/make_zip.py
|
3
|
import argparse
import re
import sys
import shutil
import os
import tempfile
from pathlib import Path
from zipfile import ZipFile, ZIP_DEFLATED
import subprocess
TKTCL_RE = re.compile(r'^(_?tk|tcl).+\.(pyd|dll)', re.IGNORECASE)
DEBUG_RE = re.compile(r'_d\.(pyd|dll|exe)$', re.IGNORECASE)
PYTHON_DLL_RE = re.compile(r'python\d\d?\.dll$', re.IGNORECASE)
def is_not_debug(p):
if DEBUG_RE.search(p.name):
return False
if TKTCL_RE.search(p.name):
return False
return p.name.lower() not in {
'_ctypes_test.pyd',
'_testbuffer.pyd',
'_testcapi.pyd',
'_testimportmultiple.pyd',
'_testmultiphase.pyd',
'xxlimited.pyd',
}
def is_not_debug_or_python(p):
return is_not_debug(p) and not PYTHON_DLL_RE.search(p.name)
def include_in_lib(p):
name = p.name.lower()
if p.is_dir():
if name in {'__pycache__', 'ensurepip', 'idlelib', 'pydoc_data', 'tkinter', 'turtledemo'}:
return False
if name.startswith('plat-'):
return False
if name == 'test' and p.parts[-2].lower() == 'lib':
return False
return True
suffix = p.suffix.lower()
return suffix not in {'.pyc', '.pyo'}
def include_in_tools(p):
if p.is_dir() and p.name.lower() in {'scripts', 'i18n', 'pynche', 'demo', 'parser'}:
return True
return p.suffix.lower() in {'.py', '.pyw', '.txt'}
FULL_LAYOUT = [
('/', 'PCBuild/$arch', 'python*.exe', is_not_debug),
('/', 'PCBuild/$arch', 'python*.dll', is_not_debug),
('DLLs/', 'PCBuild/$arch', '*.pyd', is_not_debug),
('DLLs/', 'PCBuild/$arch', '*.dll', is_not_debug),
('include/', 'include', '*.h', None),
('include/', 'PC', 'pyconfig.h', None),
('Lib/', 'Lib', '**/*', include_in_lib),
('Tools/', 'Tools', '**/*', include_in_tools),
]
if os.getenv('DOC_FILENAME'):
FULL_LAYOUT.append(('Doc/', 'Doc/build/htmlhelp', os.getenv('DOC_FILENAME'), None))
EMBED_LAYOUT = [
('/', 'PCBuild/$arch', 'python*.exe', is_not_debug),
('/', 'PCBuild/$arch', '*.pyd', is_not_debug),
('/', 'PCBuild/$arch', '*.dll', is_not_debug),
('python35.zip', 'Lib', '**/*', include_in_lib),
]
def copy_to_layout(target, rel_sources):
count = 0
if target.suffix.lower() == '.zip':
if target.exists():
target.unlink()
with ZipFile(str(target), 'w', ZIP_DEFLATED) as f:
for s, rel in rel_sources:
f.write(str(s), str(rel))
count += 1
else:
for s, rel in rel_sources:
try:
(target / rel).parent.mkdir(parents=True)
except FileExistsError:
pass
shutil.copy(str(s), str(target / rel))
count += 1
return count
def rglob(root, pattern, condition):
dirs = [root]
recurse = pattern[:3] in {'**/', '**\\'}
while dirs:
d = dirs.pop(0)
for f in d.glob(pattern[3:] if recurse else pattern):
if recurse and f.is_dir() and (not condition or condition(f)):
dirs.append(f)
elif f.is_file() and (not condition or condition(f)):
yield f, f.relative_to(root)
def main():
parser = argparse.ArgumentParser()
parser.add_argument('-s', '--source', metavar='dir', help='The directory containing the repository root', type=Path)
parser.add_argument('-o', '--out', metavar='file', help='The name of the output self-extracting archive', type=Path, required=True)
parser.add_argument('-t', '--temp', metavar='dir', help='A directory to temporarily extract files into', type=Path, default=None)
parser.add_argument('-e', '--embed', help='Create an embedding layout', action='store_true', default=False)
parser.add_argument('-a', '--arch', help='Specify the architecture to use (win32/amd64)', type=str, default="win32")
ns = parser.parse_args()
source = ns.source or (Path(__file__).parent.parent.parent)
out = ns.out
arch = ns.arch
assert isinstance(source, Path)
assert isinstance(out, Path)
assert isinstance(arch, str)
if ns.temp:
temp = ns.temp
delete_temp = False
else:
temp = Path(tempfile.mkdtemp())
delete_temp = True
try:
out.parent.mkdir(parents=True)
except FileExistsError:
pass
try:
temp.mkdir(parents=True)
except FileExistsError:
pass
layout = EMBED_LAYOUT if ns.embed else FULL_LAYOUT
try:
for t, s, p, c in layout:
s = source / s.replace("$arch", arch)
copied = copy_to_layout(temp / t.rstrip('/'), rglob(s, p, c))
print('Copied {} files'.format(copied))
with open(str(temp / 'pyvenv.cfg'), 'w') as f:
print('applocal = true', file=f)
total = copy_to_layout(out, rglob(temp, '*', None))
print('Wrote {} files to {}'.format(total, out))
finally:
if delete_temp:
shutil.rmtree(temp, True)
if __name__ == "__main__":
sys.exit(int(main() or 0))
|
saltstar/spark
|
refs/heads/master
|
examples/src/main/python/ml/train_validation_split.py
|
71
|
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# $example on$
from pyspark.ml.evaluation import RegressionEvaluator
from pyspark.ml.regression import LinearRegression
from pyspark.ml.tuning import ParamGridBuilder, TrainValidationSplit
# $example off$
from pyspark.sql import SparkSession
"""
This example demonstrates applying TrainValidationSplit to split data
and preform model selection.
Run with:
bin/spark-submit examples/src/main/python/ml/train_validation_split.py
"""
if __name__ == "__main__":
spark = SparkSession\
.builder\
.appName("TrainValidationSplit")\
.getOrCreate()
# $example on$
# Prepare training and test data.
data = spark.read.format("libsvm")\
.load("data/mllib/sample_linear_regression_data.txt")
train, test = data.randomSplit([0.9, 0.1], seed=12345)
lr = LinearRegression(maxIter=10)
# We use a ParamGridBuilder to construct a grid of parameters to search over.
# TrainValidationSplit will try all combinations of values and determine best model using
# the evaluator.
paramGrid = ParamGridBuilder()\
.addGrid(lr.regParam, [0.1, 0.01]) \
.addGrid(lr.fitIntercept, [False, True])\
.addGrid(lr.elasticNetParam, [0.0, 0.5, 1.0])\
.build()
# In this case the estimator is simply the linear regression.
# A TrainValidationSplit requires an Estimator, a set of Estimator ParamMaps, and an Evaluator.
tvs = TrainValidationSplit(estimator=lr,
estimatorParamMaps=paramGrid,
evaluator=RegressionEvaluator(),
# 80% of the data will be used for training, 20% for validation.
trainRatio=0.8)
# Run TrainValidationSplit, and choose the best set of parameters.
model = tvs.fit(train)
# Make predictions on test data. model is the model with combination of parameters
# that performed best.
model.transform(test)\
.select("features", "label", "prediction")\
.show()
# $example off$
spark.stop()
|
bigmlcom/bigmler
|
refs/heads/master
|
bigmler/options/main.py
|
1
|
# -*- coding: utf-8 -*-
#
# Copyright 2014-2021 BigML
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Options for BigMLer main subcommand processing
"""
def get_main_options(defaults=None, constants=None):
"""Main subcommand-related options
"""
if defaults is None:
defaults = {}
if constants is None:
constants = {}
max_models = constants.get('MAX_MODELS')
plurality = constants.get('PLURALITY')
last = constants.get('LAST_PREDICTION')
options = {
# If a BigML model is provided, the script will use it to generate
# predictions.
'--model': {
'action': 'store',
'dest': 'model',
'default': defaults.get('model', None),
'help': "BigML model Id."},
# Use it to compute predictions remotely.
'--remote': {
'action': 'store_true',
'dest': 'remote',
'default': defaults.get('remote', False),
'help': "Compute predictions remotely."},
# The path to a file containing model ids.
'--models': {
'action': 'store',
'dest': 'models',
'default': defaults.get('models', None),
'help': ("Path to a file containing model/ids. One model"
" per line (e.g., model/50a206a8035d0706dc000376"
").")},
# If a BigML json file containing a model structure is provided,
# the script will use it.
'--model-file': {
'action': 'store',
'dest': 'model_file',
'default': defaults.get('model_file', None),
'help': "BigML model JSON structure file."},
# Sets pruning.
'--pruning': {
'action': 'store',
'default': defaults.get('pruning', "smart"),
'choices': ["smart", "statistical", "no-pruning"],
'help': ("Set pruning type: smart, statistical,"
" no-pruning.")},
# Number of models to create when using ensembles.
'--number-of-models': {
'action': 'store',
'dest': 'number_of_models',
'default': defaults.get('number_of_models', 1),
'type': int,
'help': ("Number of models to create when using"
" ensembles.")},
# Replacement to use when sampling.
'--replacement': {
'action': 'store_true',
'default': defaults.get('replacement', False),
'help': "Use replacement when sampling."},
# Max number of models to predict from in parallel.
'--max-batch-models': {
'action': 'store',
'dest': 'max_batch_models',
'default': defaults.get('max_batch_models', max_models),
'type': int,
'help': ("Max number of models to predict from"
" in parallel.")},
# Randomize feature selection at each split.
'--randomize': {
'action': 'store_true',
'dest': 'randomize',
'default': defaults.get('randomize', False),
'help': "Randomize feature selection at each split."},
# Make model a public black-box model.
'--black-box': {
'action': 'store_true',
'dest': 'black_box',
'default': defaults.get('black_box', False),
'help': "Make generated model black-box."},
# Make model a public white-box model.
'--white-box': {
'action': 'store_true',
'dest': 'white_box',
'default': defaults.get('white_box', False),
'help': "Make generated model white-box."},
# Set a price tag to your white-box model.
'--model-price': {
'action': 'store',
'dest': 'model_price',
'type': float,
'default': defaults.get('model_price', 0.0),
'help': ("The price other users must pay to clone your"
" model.")},
# Set credits per prediction to your white box or black box models.
'--cpp': {
'action': 'store',
'type': float,
'default': defaults.get('cpp', 0.0),
'help': ("The number of credits that other users will"
" consume to make a prediction with your"
" model.")},
# Does not create a model just a dataset.
'--no-model': {
'action': 'store_true',
'dest': 'no_model',
'default': defaults.get('no_model', False),
'help': "Do not create a model."},
# Prediction directories to be combined.
'--combine-votes': {
'action': 'store',
'dest': 'votes_dirs',
'default': defaults.get('combine_votes', None),
'help': ("Comma separated list of"
" directories that contain models' votes"
" for the same test set.")},
# Method to combine votes in multiple models predictions
'--method': {
'action': 'store',
'dest': 'method',
'default': defaults.get('method', plurality),
'choices': ["plurality", "confidence weighted",
"probability weighted", "threshold",
"combined"],
'help': ("Method to combine votes from ensemble"
" predictions. Allowed methods: plurality"
", \"confidence weighted\", "
" \"probability weighted\", threshold. Also"
" \"combined\" for datasets with subsets of"
" categories")},
# Evaluate a model
'--evaluate': {
'action': 'store_true',
'help': "Evaluate command."},
# Max number of models to create in parallel.
'--max-parallel-models': {
"action": 'store',
"dest": 'max_parallel_models',
"default": defaults.get('max_parallel_models', 1),
"type": int,
"help": "Max number of models to create in parallel."},
# Max number of evaluations to create in parallel.
'--max-parallel-evaluations': {
"action": 'store',
"dest": 'max_parallel_evaluations',
"default": defaults.get('max_parallel_evaluations', 1),
"type": int,
"help": ("Max number of evaluations to create in"
" parallel.")},
# The name of the field that represents the objective field (i.e.,
# class or label) or its column number.
'--objective': {
"action": 'store',
"dest": 'objective_field',
"default": defaults.get('objective', None),
"help": ("The column number of the Objective Field"
" or its name, if headers are given.")},
# The path to a file containing the mapping of fields' ids from
# the test dataset fields to the model fields.
'--fields-map': {
'action': 'store',
'dest': 'fields_map',
'default': defaults.get('fields_map', None),
'help': ("Path to a csv file describing fields mapping. "
"One definition per line (e.g., 00000,"
"00000a).")},
# Set the part of training data to be held out for cross-validation
'--cross-validation-rate': {
'action': 'store',
'dest': 'cross_validation_rate',
'type': float,
'default': defaults.get('cross_validation_rate', 0.0),
'help': ("Part of training data to be held out for "
"cross-validation.")},
# Number of evaluations used in cross-validation
'--number-of-evaluations': {
'action': 'store',
'dest': 'number_of_evaluations',
'type': int,
'default': defaults.get('number_of_evaluations', 0),
'help': ("Number of evaluations used for"
" cross-validation.")},
# If a BigML ensemble is provided, the script will use it to generate
# predictions.
'--ensemble': {
'action': 'store',
'dest': 'ensemble',
'default': defaults.get('ensemble', None),
'help': "BigML ensemble Id."},
# Prediction log format: `short` will only log predictions, `long` will
# log also confidence information
'--prediction-info': {
'action': 'store',
'dest': 'prediction_info',
'default': defaults.get('prediction_info', 'normal'),
'choices': ["brief", "normal", "full", "full data"],
'help': ("Prediction log format: 'brief' will only "
"log predictions, 'normal' will write confidence"
" too, 'full' will write in a row the"
" input data that generates the prediction"
" followed by the latter.")},
# Multi-label. The objective field has multiple labels.
'--multi-label': {
'action': 'store_true',
'dest': 'multi_label',
'default': defaults.get('multi_label', False),
'help': ("The objective field has multiple labels that"
" should be treated independently.")},
# Prediction header. If set, headers are added to the prediction file.
'--prediction-header': {
'action': 'store_true',
'dest': 'prediction_header',
'default': defaults.get('prediction_header', False),
'help': "Headers are added to the prediction file."},
# Prediction fields. A comma-separated list of the fields that should
# be included in the prediction file.
'--prediction-fields': {
'action': 'store',
'dest': 'prediction_fields',
'default': defaults.get('prediction_fields', None),
'help': "Fields added to the prediction file."},
# Probability: Includes the probability associated to the prediction
'--probability': {
'action': 'store_true',
'dest': 'probability',
'default': defaults.get('probability', False),
'help': ("Adding the probability to predictions.")},
# No_probability: Does not include the probability of the prediction
'--no-probability': {
'action': 'store_false',
'dest': 'probability',
'default': defaults.get('probability', False),
'help': ("Predictions don't include probability.")},
# Max number of ensembles to create in parallel.
'--max-parallel-ensembles': {
'action': 'store',
'dest': 'max_parallel_ensembles',
'default': defaults.get('max_parallel_ensembles', 1),
'type': int,
'help': "Max number of ensembles to create in parallel."},
# The path to a file containing ensemble ids.
'--ensembles': {
'action': 'store',
'dest': 'ensembles',
'default': defaults.get('ensembles', None),
'help': ("Path to a file containing ensemble/ids. One "
"ensemble per line (e.g., "
"ensemble/50a206a8035d0706dc000376).")},
# If a BigML json file containing a model structure is provided,
# the script will use it.
'--ensemble-file': {
'action': 'store',
'dest': 'ensemble_file',
'default': defaults.get('ensemble_file', None),
'help': "BigML ensemble JSON structure file."},
# Threshold. Minimum necessary number of votes to issue a prediction.
'--threshold': {
'action': 'store',
'dest': 'threshold',
'default': defaults.get('threshold', 1),
'type': int,
'help': ("Minimum number of votes to issue a prediction"
" for the threshold combiner.")},
# Class. Label for the category used in threshold voting predictions.
'--class': {
'action': 'store',
'dest': 'threshold_class',
'default': defaults.get('threshold_class', None),
'help': "Category used in threshold combiner method."},
# Max number of categories to be included in a model
'--max-categories': {
'action': 'store',
'dest': 'max_categories',
'default': defaults.get('max_categories', 0),
'type': int,
'help': ("Max number of categories to be included in"
" a model.")},
# No batch predictions. Remote predictions are created individually.
'--no-batch': {
'action': 'store_true',
'dest': 'no_batch',
'default': defaults.get('no_batch', False),
'help': "Create remote predictions individually."},
# Evaluations flag: excluding one dataset from the datasets list to
# test
'--dataset-off': {
'action': 'store_true',
'dest': 'dataset_off',
'default': defaults.get('dataset_off', False),
'help': ("Excluding one dataset at a time from the"
" datasets list to test.")},
# The path to a file containing model attributes.
'--model-attributes': {
'action': 'store',
'dest': 'model_attributes',
'default': defaults.get('model_attributes', None),
'help': ("Path to a json file describing model"
" attributes.")},
# Input fields to include in the model.
'--model-fields': {
"action": 'store',
"dest": 'model_fields',
"default": defaults.get('model_fields', None),
"help": ("Comma-separated list of input fields"
" (predictors) to create the model.")},
# Balance. Automatically balance all the classes evenly.
'--balance': {
"action": 'store_true',
"dest": 'balance',
"default": defaults.get('balance', False),
"help": ("Automatically balance all objective classes"
" evenly.")},
# Balance. Do not automatically balance all the classes evenly.
# (opposed to balance)
'--no-balance': {
"action": 'store_false',
"dest": 'balance',
"default": defaults.get('balance', False),
"help": ("Do not automatically balance all objective"
" classes evenly.")},
# Node threshold. Maximum number of nodes in the tree.
'--node-threshold': {
'action': 'store',
'dest': 'node_threshold',
'default': defaults.get('node_threshold', 0),
'type': int,
'help': "Maximum number of nodes in the model."},
# The path to a file containing ensemble attributes.
'--ensemble-attributes': {
'action': 'store',
'dest': 'ensemble_attributes',
'default': defaults.get('ensemble_attributes', None),
'help': ("Path to a json file describing ensemble"
" attributes.")},
# The path to a file containing evaluation attributes.
'--evaluation-attributes': {
'action': 'store',
'dest': 'evaluation_attributes',
'default': defaults.get('evaluation_attributes', None),
'help': ("Path to a json file describing evaluation"
" attributes.")},
# The path to a file containing batch prediction attributes.
'--batch-prediction-attributes': {
'action': 'store',
'dest': 'batch_prediction_attributes',
'default': defaults.get('batch_prediction_attributes', None),
'help': ("Path to a json file describing batch prediction"
" attributes.")},
# The path to a file containing prediction attributes.
'--prediction-attributes': {
'action': 'store',
'dest': 'prediction_attributes',
'default': defaults.get('prediction_attributes', None),
'help': ("Path to a json file describing prediction"
" attributes.")},
# Weight-field. Use the contents of the given field as weights.
'--weight-field': {
'action': 'store',
'dest': 'weight_field',
'default': defaults.get('weight_field', None),
'help': ("Sets the name (or column) of the field"
" that contains the weights for the instances.")},
# Objective-weights. Path a to a CSV file of class, weight pairs.
'--objective-weights': {
'action': 'store',
'dest': 'objective_weights',
'default': defaults.get('objective_weights', None),
'help': "Path to a CSV file of class, weight pairs."},
# Strategy used in predictions when a missing value is found for the
# field used to split the node.
'--missing-strategy': {
'action': 'store',
'dest': 'missing_strategy',
'default': defaults.get('missing_strategy', last),
'choices': ["last", "proportional"],
'help': ("Strategy used when the field used in the split"
" to next nodes is missing in the input data."
" Allowed values: last or proportional")},
# Default value to use for missings in numeric fields
'--default-numeric-value': {
'action': 'store',
'dest': 'default_numeric_value',
'default': defaults.get('default_numeric_value'),
'choices': ["mean", "median", "minimum", "maximum", "zero"],
'help': ("Value set by default when a numeric field is missing."
" Allowed values: mean, median, minimum, maximum or"
" zero.")},
# Report. Additional output report formats
'--reports': {
'action': 'store',
'dest': 'reports',
'nargs': '*',
'default': defaults.get('reports', []),
'choices': ["gazibit"],
'help': "Output report formats."},
# Set it to use the missing splits operators: including missing values
# in tree branches.
'--missing-splits': {
'action': 'store_true',
'dest': 'missing_splits',
'default': defaults.get('missing_splits', False),
'help': ("Accept missing values as valid in some branches of the"
"tree.")},
# Set it to use the fields and the first node will split in one
# branch per category (only for categorical fields)
'--split-field': {
'action': 'store',
'dest': 'split_field',
'default': defaults.get('split_field', False),
'help': ("Name of the field that should be used in the first"
" split of the model. One branch per category will"
" be created.")},
# Set it to use the fields and the first node will split in one
# branch per category using binary splits (only for categorical fields)
'--focus-field': {
'action': 'store',
'dest': 'focus_field',
'default': defaults.get('focus_field', False),
'help': ("Name of the field that should be used in the first"
" split of the model. One branch per category will"
" be created.")},
# Random candidates: Number of fields to be selected at random in
# ensembles construction
'--random-candidates': {
'action': 'store',
'dest': 'random_candidates',
'default': defaults.get('random_candidates', 0),
'type': int,
'help': ("Number of fields selected at random in ensembles'"
" construction.")},
# Ensemble seed. The value used in ensembles as seed
'--ensemble-sample-seed': {
'action': 'store',
'dest': 'ensemble_sample_seed',
'default': defaults.get('ensemble_sample_seed', None),
'help': "Value used as seed in ensembles."},
# Ensemble sampling to use when using bagging.
'--ensemble-sample-rate': {
'action': 'store',
'dest': 'ensemble_sample_rate',
'default': defaults.get('ensemble_sample_rate', 1.0),
'type': float,
'help': "Ensemble sampling rate for bagging."},
# Ensemble replacement to use when using bagging.
'--ensemble-sample-no-replacement': {
'action': 'store_false',
'dest': 'ensemble_sample_replacement',
'default': defaults.get('ensemble_sample_replacement', True),
'help': "Don't use replacement when bagging."},
# Create a boosting ensemble
'--boosting': {
'action': 'store_true',
'dest': 'boosting',
'default': defaults.get('boosting', False),
'help': "Create a boosted ensemble"},
# Maximum number of iterations used in boosted ensembles.
'--boosting-iterations': {
'action': 'store',
'dest': 'iterations',
'default': defaults.get('iterations', None),
'type': int,
'help': ("Maximum number of iterations used in boosted"
" ensembles.")},
# The portion of the dataset that will be held out for testing
# at the end of every iteration.
'--early-holdout': {
'action': 'store',
'dest': 'early_holdout',
'default': defaults.get('early_holdout', None),
'type': float,
'help': ("The portion of the dataset that will be held out for"
" testing at the end of every iteration in boosted"
" ensembles (between 0 and 1).")},
# Boosted ensemble: Causes the out of bag samples to be tested after
# every iteration.
'--no-early-out-of-bag': {
'action': 'store_false',
'dest': 'early_out_of_bag',
'default': defaults.get('early_out_of_bag', True),
'help': ("Causes the out of bag samples not to be tested after"
" every iteration in boosted ensembles.")},
# It controls how aggressively the boosting algorithm will fit the data
'--learning-rate': {
'action': 'store',
'dest': 'learning_rate',
'default': defaults.get('learning_rate', None),
'type': float,
'help': ("It controls how aggressively the boosting algorithm"
" will fit the data in boosted"
" ensembles (between 0 and 1).")},
# Boosted ensemble: the out_of_bag samples are tested after every
# iteration to choose the gradient step size.
'--no-step-out-of-bag': {
'action': 'store_false',
'dest': 'step_out_of_bag',
'default': defaults.get('step_out_of_bag', True),
'help': ("Causes the out of bag samples not to be tested after"
" every iteration to choose the gradient step size"
" in boosted ensembles.")},
# Disables reports upload.
'--no-upload': {
'action': 'store_false',
'dest': 'upload',
'default': defaults.get('upload', True),
'help': "Disables upload for reports"},
# Use it to compute predictions locally.
'--local': {
'action': 'store_false',
'dest': 'remote',
'default': defaults.get('remote', False),
'help': "Compute predictions locally"},
# Deactivate replacement to use when using sampling.
'--no-replacement': {
'action': 'store_false',
'dest': 'replacement',
'default': defaults.get('replacement', False),
'help': "Don't use replacement when sampling."},
# Doesn't randomize feature selection at each split.
'--no-randomize': {
'action': 'store_false',
'dest': 'randomize',
'default': defaults.get('randomize', False),
'help': ("Doesn't randomize feature selection at each"
" split.")},
# Doesn't make model a public black-box model.
'--no-black-box': {
'action': 'store_false',
'dest': 'black_box',
'default': defaults.get('black_box', False),
'help': "Doesn't make generated model black-box."},
# Doesn't make model a public white-box model.
'--no-white-box': {
'action': 'store_false',
'dest': 'white_box',
'default': defaults.get('white_box', False),
'help': "Doesn't make generated model white-box."},
# Create a model just a dataset.
'--no-no-model': {
'action': 'store_false',
'dest': 'no_model',
'default': defaults.get('no_model', False),
'help': "Create a model."},
# Don't clear global bigmler log files
'--no-clear-logs': {
'action': 'store_false',
'dest': 'clear_logs',
'default': defaults.get('clear_logs', False),
'help': "Don't clear global bigmler log files."},
# Don't store the retrieved resources in the output directory
'--no-store': {
'action': 'store_false',
'dest': 'store',
'default': defaults.get('store', False),
'help': ("Don't store the retrieved resources in the"
" output directory.")},
# Multi-label. The objective field hasn't multiple labels.
'--no-multi-label': {
'action': 'store_false',
'dest': 'multi_label',
'default': defaults.get('multi_label', False),
'help': "The objective field has not multiple labels."},
# Prediction-header.
'--no-prediction-header': {
'action': 'store_false',
'dest': 'prediction_header',
'default': defaults.get('prediction_header', False),
'help': "Headers are not added to the prediction file."},
# Batch predictions. Remote predictions are created in batch mode.
'--batch': {
'action': 'store_false',
'dest': 'no_batch',
'default': defaults.get('no_batch', False),
'help': "Create remote predictions in batch."},
# Multi-dataset. Generating a new dataset from a list of existing
# datasets.
'--no-multi-dataset': {
'action': 'store_false',
'dest': 'multi_dataset',
'default': defaults.get('multi_dataset', False),
'help': "Do not generate a new dataset."},
# Shared. Shares all shareable resources and uses its shared links in
# reports
'--unshared': {
'action': 'store_false',
'dest': 'shared',
'default': defaults.get('shared', False),
'help': ("Share resources and use its shared urls "
" in reports.")},
# Enables reports upload.
'--upload': {
'action': 'store_true',
'dest': 'upload',
'default': defaults.get('upload', True),
'help': "Enables upload for reports"},
# Dataset-off. Turning off the dataset-off flag.
'--no-dataset-off': {
'action': 'store_false',
'dest': 'dataset_off',
'default': defaults.get('dataset_off', False),
'help': "Turning off the dataset-off flag."},
# No missing_splits used: Don't include missing values in branches
# of the tree.
'--no-missing-splits': {
'action': 'store_false',
'dest': 'missing_splits',
'default': defaults.get('missing_splits', False),
'help': ("Turning off the --missing-splits flag: don't include"
" missing values in branches of the tree.")},
# Used in models combinations, ensembles predictions. Keeps prediction
# in memory to be combined and no partial results are stored in files.
'--fast': {
'action': 'store_true',
'dest': 'fast',
'default': defaults.get('fast', True),
'help': ("Enables fast ensemble's predictions with no partial"
" results files.")},
# Used in models combinations, ensembles predictions. Stores
# predictions for each model in files that can be used and combined
# later
'--no-fast': {
'action': 'store_false',
'dest': 'fast',
'default': defaults.get('fast', True),
'help': ("Enables fast ensemble's predictions with partial"
" results files.")},
# Does not create a csv as output of a batch prediction.
'--no-csv': {
'action': 'store_true',
'dest': 'no_csv',
'default': defaults.get('no_csv', False),
'help': ("Do not create a csv file as output of a batch"
" prediction.")},
# Create a csv as output (as opposed to --no-csv).
'--no-no-csv': {
'action': 'store_false',
'dest': 'no_csv',
'default': defaults.get('no_csv', False),
'help': ("Create a csv file as output of a batch"
" prediction (as opposed to --no-csv)")},
# Create a dataset as ouput of a batch prediction
'--to-dataset': {
'action': 'store_true',
'dest': 'to_dataset',
'default': defaults.get('to_dataset', False),
'help': ("Create a dataset as ouput of a batch"
" prediction.")},
# The path to a file containing the operating point description.
'--operating-point': {
'action': 'store',
'dest': 'operating_point',
'default': defaults.get('operating_point', None),
'help': ("Path to a json file containing the operating "
"point description.")},
# Use median as predicted value in local models predictions
'--median': {
'action': 'store_true',
'dest': 'median',
'default': defaults.get('median', False),
'help': ("Use medtan instead on mean as node"
" prediction.")},
# Use mean as predicted value in local models predictions
'--no-median': {
'action': 'store_false',
'dest': 'median',
'default': defaults.get('median', False),
'help': ("Use mean instead on median as node"
" prediction.")}}
return options
|
mdrumond/tensorflow
|
refs/heads/master
|
tensorflow/python/ops/distributions/kullback_leibler.py
|
35
|
# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Registration and usage mechanisms for KL-divergences."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.python.framework import ops
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.util import tf_inspect
_DIVERGENCES = {}
__all__ = [
"RegisterKL",
"kl_divergence",
]
def _registered_kl(type_a, type_b):
"""Get the KL function registered for classes a and b."""
hierarchy_a = tf_inspect.getmro(type_a)
hierarchy_b = tf_inspect.getmro(type_b)
dist_to_children = None
kl_fn = None
for mro_to_a, parent_a in enumerate(hierarchy_a):
for mro_to_b, parent_b in enumerate(hierarchy_b):
candidate_dist = mro_to_a + mro_to_b
candidate_kl_fn = _DIVERGENCES.get((parent_a, parent_b), None)
if not kl_fn or (candidate_kl_fn and candidate_dist < dist_to_children):
dist_to_children = candidate_dist
kl_fn = candidate_kl_fn
return kl_fn
def kl_divergence(distribution_a, distribution_b,
allow_nan_stats=True, name=None):
"""Get the KL-divergence KL(distribution_a || distribution_b).
If there is no KL method registered specifically for `type(distribution_a)`
and `type(distribution_b)`, then the class hierarchies of these types are
searched.
If one KL method is registered between any pairs of classes in these two
parent hierarchies, it is used.
If more than one such registered method exists, the method whose registered
classes have the shortest sum MRO paths to the input types is used.
If more than one such shortest path exists, the first method
identified in the search is used (favoring a shorter MRO distance to
`type(distribution_a)`).
Args:
distribution_a: The first distribution.
distribution_b: The second distribution.
allow_nan_stats: Python `bool`, default `True`. When `True`,
statistics (e.g., mean, mode, variance) use the value "`NaN`" to
indicate the result is undefined. When `False`, an exception is raised
if one or more of the statistic's batch members are undefined.
name: Python `str` name prefixed to Ops created by this class.
Returns:
A Tensor with the batchwise KL-divergence between `distribution_a`
and `distribution_b`.
Raises:
NotImplementedError: If no KL method is defined for distribution types
of `distribution_a` and `distribution_b`.
"""
kl_fn = _registered_kl(type(distribution_a), type(distribution_b))
if kl_fn is None:
raise NotImplementedError(
"No KL(distribution_a || distribution_b) registered for distribution_a "
"type %s and distribution_b type %s"
% (type(distribution_a).__name__, type(distribution_b).__name__))
with ops.name_scope("KullbackLeibler"):
kl_t = kl_fn(distribution_a, distribution_b, name=name)
if allow_nan_stats:
return kl_t
# Check KL for NaNs
kl_t = array_ops.identity(kl_t, name="kl")
with ops.control_dependencies([
control_flow_ops.Assert(
math_ops.logical_not(
math_ops.reduce_any(math_ops.is_nan(kl_t))),
["KL calculation between %s and %s returned NaN values "
"(and was called with allow_nan_stats=False). Values:"
% (distribution_a.name, distribution_b.name), kl_t])]):
return array_ops.identity(kl_t, name="checked_kl")
class RegisterKL(object):
"""Decorator to register a KL divergence implementation function.
Usage:
@distributions.RegisterKL(distributions.Normal, distributions.Normal)
def _kl_normal_mvn(norm_a, norm_b):
# Return KL(norm_a || norm_b)
"""
def __init__(self, dist_cls_a, dist_cls_b):
"""Initialize the KL registrar.
Args:
dist_cls_a: the class of the first argument of the KL divergence.
dist_cls_b: the class of the second argument of the KL divergence.
"""
self._key = (dist_cls_a, dist_cls_b)
def __call__(self, kl_fn):
"""Perform the KL registration.
Args:
kl_fn: The function to use for the KL divergence.
Returns:
kl_fn
Raises:
TypeError: if kl_fn is not a callable.
ValueError: if a KL divergence function has already been registered for
the given argument classes.
"""
if not callable(kl_fn):
raise TypeError("kl_fn must be callable, received: %s" % kl_fn)
if self._key in _DIVERGENCES:
raise ValueError("KL(%s || %s) has already been registered to: %s"
% (self._key[0].__name__, self._key[1].__name__,
_DIVERGENCES[self._key]))
_DIVERGENCES[self._key] = kl_fn
return kl_fn
|
DepthDeluxe/ansible
|
refs/heads/devel
|
test/compile/compile.py
|
126
|
#!/usr/bin/env python
"""Python syntax checker with lint friendly output."""
import os
import parser
import re
import sys
def main():
paths, verbose, skip_patterns = parse_options()
paths = filter_paths(paths, skip_patterns)
check(paths, verbose)
def parse_options():
paths = []
skip_patterns = []
option = None
verbose = False
valid_options = [
'-x',
'-v',
]
for arg in sys.argv[1:]:
if option == '-x':
skip_patterns.append(re.compile(arg))
option = None
elif arg.startswith('-'):
if arg not in valid_options:
raise Exception('Unknown Option: %s' % arg)
if arg == '-v':
verbose = True
else:
option = arg
else:
paths.append(arg)
if option:
raise Exception('Incomplete Option: %s' % option)
return paths, verbose, skip_patterns
def filter_paths(paths, skip_patterns):
if not paths:
paths = ['.']
candidates = paths
paths = []
for candidate in candidates:
if os.path.isdir(candidate):
for root, directories, files in os.walk(candidate):
remove = []
for directory in directories:
if directory.startswith('.'):
remove.append(directory)
for path in remove:
directories.remove(path)
for f in files:
if f.endswith('.py'):
paths.append(os.path.join(root, f))
else:
paths.append(candidate)
final_paths = []
for path in sorted(paths):
skip = False
for skip_pattern in skip_patterns:
if skip_pattern.search(path):
skip = True
break
if skip:
continue
final_paths.append(path)
return final_paths
def check(paths, verbose):
status = 0
for path in paths:
if verbose:
sys.stderr.write('%s\n' % path)
sys.stderr.flush()
source_fd = open(path, 'r')
try:
source = source_fd.read()
finally:
source_fd.close()
try:
parser.suite(source)
except SyntaxError:
ex_type, ex, ex_traceback = sys.exc_info()
status = 1
message = ex.text.splitlines()[0].strip()
sys.stdout.write("%s:%d:%d: SyntaxError: %s\n" % (path, ex.lineno, ex.offset, message))
sys.stdout.flush()
sys.exit(status)
if __name__ == '__main__':
main()
|
ice9js/servo
|
refs/heads/master
|
tests/unit/net/cookie_http_state_utils.py
|
111
|
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
import os
import subprocess
import tempfile
REPO = "https://github.com/abarth/http-state.git"
TEST_FILE = "cookie_http_state.rs"
DOMAIN = "http://home.example.org:8888"
RUST_FN = """
#[test]{should_panic}
fn test_{name}() {{
let r = run("{set_location}",
{set_cookies},
"{location}");
assert_eq!(&r, "{expect}");
}}
"""
SET_COOKIES_INDENT = 18
SHOULD_PANIC = "\n#[should_panic] // Look at cookie_http_state_utils.py if this test fails"
# Those tests should PASS. But until fixes land in servo, keep them failing
FAILING_TESTS = [
"0003", # Waiting for a way to clean expired cookies
"0006", # Waiting for a way to clean expired cookies
"mozilla0001", # Waiting for a way to clean expired cookies
"mozilla0002", # Waiting for a way to clean expired cookies
"mozilla0003", # Waiting for a way to clean expired cookies
"mozilla0005", # Waiting for a way to clean expired cookies
"mozilla0007", # Waiting for a way to clean expired cookies
"mozilla0009", # Waiting for a way to clean expired cookies
"mozilla0010", # Waiting for a way to clean expired cookies
"mozilla0013", # Waiting for a way to clean expired cookies
]
def list_tests(dir):
suffix = "-test"
def keep(name):
return name.endswith(suffix) and not name.startswith("disabled")
tests = [name[:-len(suffix)] for name in os.listdir(dir) if keep(name)]
tests.sort()
return tests
def escape(s):
""" Escape the string `s` so that it can be parsed by rust as a valid
UTF-8 string.
We can't use only `encode("unicode_escape")` as it produces things that
rust does not accept ("\\xbf", "\\u6265" for example). So we manually
convert all character whose code point is greater than 128 to
\\u{code_point}.
All other characters are encoded with "unicode_escape" to get escape
sequences ("\\r" for example) except for `"` that we specifically escape
because our string will be quoted by double-quotes.
Lines are also limited in size, so split the string every 70 characters
(gives room for indentation).
"""
res = ""
last_split = 0
for c in s:
if len(res) - last_split > 70:
res += "\\\n"
last_split = len(res)
o = ord(c)
if o == 34:
res += "\\\""
continue
if o >= 128:
res += "\\u{" + hex(o)[2:] + "}"
else:
res += c.encode("unicode_escape")
return res
def format_slice_cookies(cookies):
esc_cookies = ['"%s"' % escape(c) for c in cookies]
if sum(len(s) for s in esc_cookies) < 80:
sep = ", "
else:
sep = ",\n" + " " * SET_COOKIES_INDENT
return "&[" + sep.join(esc_cookies) + "]"
def generate_code_for_test(test_dir, name):
if name in FAILING_TESTS:
should_panic = SHOULD_PANIC
else:
should_panic = ""
test_file = os.path.join(test_dir, name + "-test")
expect_file = os.path.join(test_dir, name + "-expected")
set_cookies = []
set_location = DOMAIN + "/cookie-parser?" + name
expect = ""
location = DOMAIN + "/cookie-parser-result?" + name
with open(test_file) as fo:
for line in fo:
line = line.decode("utf-8").rstrip()
prefix = "Set-Cookie: "
if line.startswith(prefix):
set_cookies.append(line[len(prefix):])
prefix = "Location: "
if line.startswith(prefix):
location = line[len(prefix):]
if location.startswith("/"):
location = DOMAIN + location
with open(expect_file) as fo:
for line in fo:
line = line.decode("utf-8").rstrip()
prefix = "Cookie: "
if line.startswith(prefix):
expect = line[len(prefix):]
return RUST_FN.format(name=name.replace('-', '_'),
set_location=escape(set_location),
set_cookies=format_slice_cookies(set_cookies),
should_panic=should_panic,
location=escape(location),
expect=escape(expect))
def update_test_file(cachedir):
workdir = os.path.dirname(os.path.realpath(__file__))
test_file = os.path.join(workdir, TEST_FILE)
# Create the cache dir
if not os.path.isdir(cachedir):
os.makedirs(cachedir)
# Clone or update the repo
repo_dir = os.path.join(cachedir, "http-state")
if os.path.isdir(repo_dir):
args = ["git", "pull", "-f"]
process = subprocess.Popen(args, cwd=repo_dir)
if process.wait() != 0:
print("failed to update the http-state git repo")
return 1
else:
args = ["git", "clone", REPO, repo_dir]
process = subprocess.Popen(args)
if process.wait() != 0:
print("failed to clone the http-state git repo")
return 1
# Truncate the unit test file to remove all existing tests
with open(test_file, "r+") as fo:
while True:
line = fo.readline()
if line.strip() == "// Test listing":
fo.truncate()
fo.flush()
break
if line == "":
print("Failed to find listing delimiter on unit test file")
return 1
# Append all tests to unit test file
tests_dir = os.path.join(repo_dir, "tests", "data", "parser")
with open(test_file, "a") as fo:
for test in list_tests(tests_dir):
fo.write(generate_code_for_test(tests_dir, test).encode("utf-8"))
return 0
if __name__ == "__main__":
update_test_file(tempfile.gettempdir())
|
windedge/odoomrp-wip
|
refs/heads/8.0
|
mrp_bom_by_percentage/models/mrp_bom.py
|
20
|
# -*- encoding: utf-8 -*-
##############################################################################
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see http://www.gnu.org/licenses/.
#
##############################################################################
from openerp import models, fields, api, exceptions, _
import openerp.addons.decimal_precision as dp
class MrpBom(models.Model):
_inherit = 'mrp.bom'
@api.one
@api.depends('bom_line_ids', 'bom_line_ids.product_qty')
def _compute_qtytoconsume(self):
self.qty_to_consume = sum(x.product_qty for x in self.bom_line_ids)
by_percentage = fields.Boolean(string='Produce by percentage')
qty_to_consume = fields.Float(
string='QTY to consume', compute='_compute_qtytoconsume',
digits=dp.get_precision('Product Unit of Measure'))
@api.one
@api.onchange('by_percentage', 'bom_line_ids')
def onchange_by_percentage(self):
self.qty_to_consume = sum(x.product_qty for x in self.bom_line_ids)
if self.by_percentage:
self.product_qty = 100
@api.one
@api.constrains('by_percentage', 'qty_to_consume', 'bom_line_ids')
def _check_by_percentage(self):
if self.by_percentage and self.qty_to_consume != 100:
raise exceptions.Warning(_('Quantity to consume <> 100'))
|
hexxter/home-assistant
|
refs/heads/dev
|
homeassistant/components/notify/command_line.py
|
6
|
"""
Support for command line notification services.
For more details about this platform, please refer to the documentation at
https://home-assistant.io/components/notify.command_line/
"""
import logging
import subprocess
import voluptuous as vol
from homeassistant.const import (CONF_COMMAND, CONF_NAME)
from homeassistant.components.notify import (
BaseNotificationService, PLATFORM_SCHEMA)
import homeassistant.helpers.config_validation as cv
_LOGGER = logging.getLogger(__name__)
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
vol.Required(CONF_COMMAND): cv.string,
vol.Optional(CONF_NAME): cv.string,
})
def get_service(hass, config):
"""Get the Command Line notification service."""
command = config[CONF_COMMAND]
return CommandLineNotificationService(command)
# pylint: disable=too-few-public-methods
class CommandLineNotificationService(BaseNotificationService):
"""Implement the notification service for the Command Line service."""
def __init__(self, command):
"""Initialize the service."""
self.command = command
def send_message(self, message="", **kwargs):
"""Send a message to a command line."""
try:
proc = subprocess.Popen(self.command, universal_newlines=True,
stdin=subprocess.PIPE, shell=True)
proc.communicate(input=message)
if proc.returncode != 0:
_LOGGER.error('Command failed: %s', self.command)
except subprocess.SubprocessError:
_LOGGER.error('Error trying to exec Command: %s', self.command)
|
postlund/home-assistant
|
refs/heads/dev
|
tests/components/search/__init__.py
|
23
|
"""Tests for the Search integration."""
|
SujaySKumar/django
|
refs/heads/master
|
django/conf/locale/cs/formats.py
|
504
|
# -*- encoding: utf-8 -*-
# This file is distributed under the same license as the Django package.
#
from __future__ import unicode_literals
# The *_FORMAT strings use the Django date format syntax,
# see http://docs.djangoproject.com/en/dev/ref/templates/builtins/#date
DATE_FORMAT = 'j. E Y'
TIME_FORMAT = 'G:i'
DATETIME_FORMAT = 'j. E Y G:i'
YEAR_MONTH_FORMAT = 'F Y'
MONTH_DAY_FORMAT = 'j. F'
SHORT_DATE_FORMAT = 'd.m.Y'
SHORT_DATETIME_FORMAT = 'd.m.Y G:i'
FIRST_DAY_OF_WEEK = 1 # Monday
# The *_INPUT_FORMATS strings use the Python strftime format syntax,
# see http://docs.python.org/library/datetime.html#strftime-strptime-behavior
DATE_INPUT_FORMATS = [
'%d.%m.%Y', '%d.%m.%y', # '05.01.2006', '05.01.06'
'%d. %m. %Y', '%d. %m. %y', # '5. 1. 2006', '5. 1. 06'
# '%d. %B %Y', '%d. %b. %Y', # '25. October 2006', '25. Oct. 2006'
]
# Kept ISO formats as one is in first position
TIME_INPUT_FORMATS = [
'%H:%M:%S', # '04:30:59'
'%H.%M', # '04.30'
'%H:%M', # '04:30'
]
DATETIME_INPUT_FORMATS = [
'%d.%m.%Y %H:%M:%S', # '05.01.2006 04:30:59'
'%d.%m.%Y %H:%M:%S.%f', # '05.01.2006 04:30:59.000200'
'%d.%m.%Y %H.%M', # '05.01.2006 04.30'
'%d.%m.%Y %H:%M', # '05.01.2006 04:30'
'%d.%m.%Y', # '05.01.2006'
'%d. %m. %Y %H:%M:%S', # '05. 01. 2006 04:30:59'
'%d. %m. %Y %H:%M:%S.%f', # '05. 01. 2006 04:30:59.000200'
'%d. %m. %Y %H.%M', # '05. 01. 2006 04.30'
'%d. %m. %Y %H:%M', # '05. 01. 2006 04:30'
'%d. %m. %Y', # '05. 01. 2006'
'%Y-%m-%d %H.%M', # '2006-01-05 04.30'
]
DECIMAL_SEPARATOR = ','
THOUSAND_SEPARATOR = '\xa0' # non-breaking space
NUMBER_GROUPING = 3
|
coursemdetw/2014c2
|
refs/heads/master
|
exts/w2/static/Brython2.0.0-20140209-164925/Lib/xml/dom/minidom.py
|
727
|
"""Simple implementation of the Level 1 DOM.
Namespaces and other minor Level 2 features are also supported.
parse("foo.xml")
parseString("<foo><bar/></foo>")
Todo:
=====
* convenience methods for getting elements and text.
* more testing
* bring some of the writer and linearizer code into conformance with this
interface
* SAX 2 namespaces
"""
import io
import xml.dom
from xml.dom import EMPTY_NAMESPACE, EMPTY_PREFIX, XMLNS_NAMESPACE, domreg
from xml.dom.minicompat import *
from xml.dom.xmlbuilder import DOMImplementationLS, DocumentLS
# This is used by the ID-cache invalidation checks; the list isn't
# actually complete, since the nodes being checked will never be the
# DOCUMENT_NODE or DOCUMENT_FRAGMENT_NODE. (The node being checked is
# the node being added or removed, not the node being modified.)
#
_nodeTypes_with_children = (xml.dom.Node.ELEMENT_NODE,
xml.dom.Node.ENTITY_REFERENCE_NODE)
class Node(xml.dom.Node):
namespaceURI = None # this is non-null only for elements and attributes
parentNode = None
ownerDocument = None
nextSibling = None
previousSibling = None
prefix = EMPTY_PREFIX # non-null only for NS elements and attributes
def __bool__(self):
return True
def toxml(self, encoding=None):
return self.toprettyxml("", "", encoding)
def toprettyxml(self, indent="\t", newl="\n", encoding=None):
if encoding is None:
writer = io.StringIO()
else:
writer = io.TextIOWrapper(io.BytesIO(),
encoding=encoding,
errors="xmlcharrefreplace",
newline='\n')
if self.nodeType == Node.DOCUMENT_NODE:
# Can pass encoding only to document, to put it into XML header
self.writexml(writer, "", indent, newl, encoding)
else:
self.writexml(writer, "", indent, newl)
if encoding is None:
return writer.getvalue()
else:
return writer.detach().getvalue()
def hasChildNodes(self):
return bool(self.childNodes)
def _get_childNodes(self):
return self.childNodes
def _get_firstChild(self):
if self.childNodes:
return self.childNodes[0]
def _get_lastChild(self):
if self.childNodes:
return self.childNodes[-1]
def insertBefore(self, newChild, refChild):
if newChild.nodeType == self.DOCUMENT_FRAGMENT_NODE:
for c in tuple(newChild.childNodes):
self.insertBefore(c, refChild)
### The DOM does not clearly specify what to return in this case
return newChild
if newChild.nodeType not in self._child_node_types:
raise xml.dom.HierarchyRequestErr(
"%s cannot be child of %s" % (repr(newChild), repr(self)))
if newChild.parentNode is not None:
newChild.parentNode.removeChild(newChild)
if refChild is None:
self.appendChild(newChild)
else:
try:
index = self.childNodes.index(refChild)
except ValueError:
raise xml.dom.NotFoundErr()
if newChild.nodeType in _nodeTypes_with_children:
_clear_id_cache(self)
self.childNodes.insert(index, newChild)
newChild.nextSibling = refChild
refChild.previousSibling = newChild
if index:
node = self.childNodes[index-1]
node.nextSibling = newChild
newChild.previousSibling = node
else:
newChild.previousSibling = None
newChild.parentNode = self
return newChild
def appendChild(self, node):
if node.nodeType == self.DOCUMENT_FRAGMENT_NODE:
for c in tuple(node.childNodes):
self.appendChild(c)
### The DOM does not clearly specify what to return in this case
return node
if node.nodeType not in self._child_node_types:
raise xml.dom.HierarchyRequestErr(
"%s cannot be child of %s" % (repr(node), repr(self)))
elif node.nodeType in _nodeTypes_with_children:
_clear_id_cache(self)
if node.parentNode is not None:
node.parentNode.removeChild(node)
_append_child(self, node)
node.nextSibling = None
return node
def replaceChild(self, newChild, oldChild):
if newChild.nodeType == self.DOCUMENT_FRAGMENT_NODE:
refChild = oldChild.nextSibling
self.removeChild(oldChild)
return self.insertBefore(newChild, refChild)
if newChild.nodeType not in self._child_node_types:
raise xml.dom.HierarchyRequestErr(
"%s cannot be child of %s" % (repr(newChild), repr(self)))
if newChild is oldChild:
return
if newChild.parentNode is not None:
newChild.parentNode.removeChild(newChild)
try:
index = self.childNodes.index(oldChild)
except ValueError:
raise xml.dom.NotFoundErr()
self.childNodes[index] = newChild
newChild.parentNode = self
oldChild.parentNode = None
if (newChild.nodeType in _nodeTypes_with_children
or oldChild.nodeType in _nodeTypes_with_children):
_clear_id_cache(self)
newChild.nextSibling = oldChild.nextSibling
newChild.previousSibling = oldChild.previousSibling
oldChild.nextSibling = None
oldChild.previousSibling = None
if newChild.previousSibling:
newChild.previousSibling.nextSibling = newChild
if newChild.nextSibling:
newChild.nextSibling.previousSibling = newChild
return oldChild
def removeChild(self, oldChild):
try:
self.childNodes.remove(oldChild)
except ValueError:
raise xml.dom.NotFoundErr()
if oldChild.nextSibling is not None:
oldChild.nextSibling.previousSibling = oldChild.previousSibling
if oldChild.previousSibling is not None:
oldChild.previousSibling.nextSibling = oldChild.nextSibling
oldChild.nextSibling = oldChild.previousSibling = None
if oldChild.nodeType in _nodeTypes_with_children:
_clear_id_cache(self)
oldChild.parentNode = None
return oldChild
def normalize(self):
L = []
for child in self.childNodes:
if child.nodeType == Node.TEXT_NODE:
if not child.data:
# empty text node; discard
if L:
L[-1].nextSibling = child.nextSibling
if child.nextSibling:
child.nextSibling.previousSibling = child.previousSibling
child.unlink()
elif L and L[-1].nodeType == child.nodeType:
# collapse text node
node = L[-1]
node.data = node.data + child.data
node.nextSibling = child.nextSibling
if child.nextSibling:
child.nextSibling.previousSibling = node
child.unlink()
else:
L.append(child)
else:
L.append(child)
if child.nodeType == Node.ELEMENT_NODE:
child.normalize()
self.childNodes[:] = L
def cloneNode(self, deep):
return _clone_node(self, deep, self.ownerDocument or self)
def isSupported(self, feature, version):
return self.ownerDocument.implementation.hasFeature(feature, version)
def _get_localName(self):
# Overridden in Element and Attr where localName can be Non-Null
return None
# Node interfaces from Level 3 (WD 9 April 2002)
def isSameNode(self, other):
return self is other
def getInterface(self, feature):
if self.isSupported(feature, None):
return self
else:
return None
# The "user data" functions use a dictionary that is only present
# if some user data has been set, so be careful not to assume it
# exists.
def getUserData(self, key):
try:
return self._user_data[key][0]
except (AttributeError, KeyError):
return None
def setUserData(self, key, data, handler):
old = None
try:
d = self._user_data
except AttributeError:
d = {}
self._user_data = d
if key in d:
old = d[key][0]
if data is None:
# ignore handlers passed for None
handler = None
if old is not None:
del d[key]
else:
d[key] = (data, handler)
return old
def _call_user_data_handler(self, operation, src, dst):
if hasattr(self, "_user_data"):
for key, (data, handler) in list(self._user_data.items()):
if handler is not None:
handler.handle(operation, key, data, src, dst)
# minidom-specific API:
def unlink(self):
self.parentNode = self.ownerDocument = None
if self.childNodes:
for child in self.childNodes:
child.unlink()
self.childNodes = NodeList()
self.previousSibling = None
self.nextSibling = None
# A Node is its own context manager, to ensure that an unlink() call occurs.
# This is similar to how a file object works.
def __enter__(self):
return self
def __exit__(self, et, ev, tb):
self.unlink()
defproperty(Node, "firstChild", doc="First child node, or None.")
defproperty(Node, "lastChild", doc="Last child node, or None.")
defproperty(Node, "localName", doc="Namespace-local name of this node.")
def _append_child(self, node):
# fast path with less checks; usable by DOM builders if careful
childNodes = self.childNodes
if childNodes:
last = childNodes[-1]
node.previousSibling = last
last.nextSibling = node
childNodes.append(node)
node.parentNode = self
def _in_document(node):
# return True iff node is part of a document tree
while node is not None:
if node.nodeType == Node.DOCUMENT_NODE:
return True
node = node.parentNode
return False
def _write_data(writer, data):
"Writes datachars to writer."
if data:
data = data.replace("&", "&").replace("<", "<"). \
replace("\"", """).replace(">", ">")
writer.write(data)
def _get_elements_by_tagName_helper(parent, name, rc):
for node in parent.childNodes:
if node.nodeType == Node.ELEMENT_NODE and \
(name == "*" or node.tagName == name):
rc.append(node)
_get_elements_by_tagName_helper(node, name, rc)
return rc
def _get_elements_by_tagName_ns_helper(parent, nsURI, localName, rc):
for node in parent.childNodes:
if node.nodeType == Node.ELEMENT_NODE:
if ((localName == "*" or node.localName == localName) and
(nsURI == "*" or node.namespaceURI == nsURI)):
rc.append(node)
_get_elements_by_tagName_ns_helper(node, nsURI, localName, rc)
return rc
class DocumentFragment(Node):
nodeType = Node.DOCUMENT_FRAGMENT_NODE
nodeName = "#document-fragment"
nodeValue = None
attributes = None
parentNode = None
_child_node_types = (Node.ELEMENT_NODE,
Node.TEXT_NODE,
Node.CDATA_SECTION_NODE,
Node.ENTITY_REFERENCE_NODE,
Node.PROCESSING_INSTRUCTION_NODE,
Node.COMMENT_NODE,
Node.NOTATION_NODE)
def __init__(self):
self.childNodes = NodeList()
class Attr(Node):
__slots__=('_name', '_value', 'namespaceURI',
'_prefix', 'childNodes', '_localName', 'ownerDocument', 'ownerElement')
nodeType = Node.ATTRIBUTE_NODE
attributes = None
specified = False
_is_id = False
_child_node_types = (Node.TEXT_NODE, Node.ENTITY_REFERENCE_NODE)
def __init__(self, qName, namespaceURI=EMPTY_NAMESPACE, localName=None,
prefix=None):
self.ownerElement = None
self._name = qName
self.namespaceURI = namespaceURI
self._prefix = prefix
self.childNodes = NodeList()
# Add the single child node that represents the value of the attr
self.childNodes.append(Text())
# nodeValue and value are set elsewhere
def _get_localName(self):
try:
return self._localName
except AttributeError:
return self.nodeName.split(":", 1)[-1]
def _get_name(self):
return self.name
def _get_specified(self):
return self.specified
def _get_name(self):
return self._name
def _set_name(self, value):
self._name = value
if self.ownerElement is not None:
_clear_id_cache(self.ownerElement)
nodeName = name = property(_get_name, _set_name)
def _get_value(self):
return self._value
def _set_value(self, value):
self._value = value
self.childNodes[0].data = value
if self.ownerElement is not None:
_clear_id_cache(self.ownerElement)
self.childNodes[0].data = value
nodeValue = value = property(_get_value, _set_value)
def _get_prefix(self):
return self._prefix
def _set_prefix(self, prefix):
nsuri = self.namespaceURI
if prefix == "xmlns":
if nsuri and nsuri != XMLNS_NAMESPACE:
raise xml.dom.NamespaceErr(
"illegal use of 'xmlns' prefix for the wrong namespace")
self._prefix = prefix
if prefix is None:
newName = self.localName
else:
newName = "%s:%s" % (prefix, self.localName)
if self.ownerElement:
_clear_id_cache(self.ownerElement)
self.name = newName
prefix = property(_get_prefix, _set_prefix)
def unlink(self):
# This implementation does not call the base implementation
# since most of that is not needed, and the expense of the
# method call is not warranted. We duplicate the removal of
# children, but that's all we needed from the base class.
elem = self.ownerElement
if elem is not None:
del elem._attrs[self.nodeName]
del elem._attrsNS[(self.namespaceURI, self.localName)]
if self._is_id:
self._is_id = False
elem._magic_id_nodes -= 1
self.ownerDocument._magic_id_count -= 1
for child in self.childNodes:
child.unlink()
del self.childNodes[:]
def _get_isId(self):
if self._is_id:
return True
doc = self.ownerDocument
elem = self.ownerElement
if doc is None or elem is None:
return False
info = doc._get_elem_info(elem)
if info is None:
return False
if self.namespaceURI:
return info.isIdNS(self.namespaceURI, self.localName)
else:
return info.isId(self.nodeName)
def _get_schemaType(self):
doc = self.ownerDocument
elem = self.ownerElement
if doc is None or elem is None:
return _no_type
info = doc._get_elem_info(elem)
if info is None:
return _no_type
if self.namespaceURI:
return info.getAttributeTypeNS(self.namespaceURI, self.localName)
else:
return info.getAttributeType(self.nodeName)
defproperty(Attr, "isId", doc="True if this attribute is an ID.")
defproperty(Attr, "localName", doc="Namespace-local name of this attribute.")
defproperty(Attr, "schemaType", doc="Schema type for this attribute.")
class NamedNodeMap(object):
"""The attribute list is a transient interface to the underlying
dictionaries. Mutations here will change the underlying element's
dictionary.
Ordering is imposed artificially and does not reflect the order of
attributes as found in an input document.
"""
__slots__ = ('_attrs', '_attrsNS', '_ownerElement')
def __init__(self, attrs, attrsNS, ownerElement):
self._attrs = attrs
self._attrsNS = attrsNS
self._ownerElement = ownerElement
def _get_length(self):
return len(self._attrs)
def item(self, index):
try:
return self[list(self._attrs.keys())[index]]
except IndexError:
return None
def items(self):
L = []
for node in self._attrs.values():
L.append((node.nodeName, node.value))
return L
def itemsNS(self):
L = []
for node in self._attrs.values():
L.append(((node.namespaceURI, node.localName), node.value))
return L
def __contains__(self, key):
if isinstance(key, str):
return key in self._attrs
else:
return key in self._attrsNS
def keys(self):
return self._attrs.keys()
def keysNS(self):
return self._attrsNS.keys()
def values(self):
return self._attrs.values()
def get(self, name, value=None):
return self._attrs.get(name, value)
__len__ = _get_length
def _cmp(self, other):
if self._attrs is getattr(other, "_attrs", None):
return 0
else:
return (id(self) > id(other)) - (id(self) < id(other))
def __eq__(self, other):
return self._cmp(other) == 0
def __ge__(self, other):
return self._cmp(other) >= 0
def __gt__(self, other):
return self._cmp(other) > 0
def __le__(self, other):
return self._cmp(other) <= 0
def __lt__(self, other):
return self._cmp(other) < 0
def __ne__(self, other):
return self._cmp(other) != 0
def __getitem__(self, attname_or_tuple):
if isinstance(attname_or_tuple, tuple):
return self._attrsNS[attname_or_tuple]
else:
return self._attrs[attname_or_tuple]
# same as set
def __setitem__(self, attname, value):
if isinstance(value, str):
try:
node = self._attrs[attname]
except KeyError:
node = Attr(attname)
node.ownerDocument = self._ownerElement.ownerDocument
self.setNamedItem(node)
node.value = value
else:
if not isinstance(value, Attr):
raise TypeError("value must be a string or Attr object")
node = value
self.setNamedItem(node)
def getNamedItem(self, name):
try:
return self._attrs[name]
except KeyError:
return None
def getNamedItemNS(self, namespaceURI, localName):
try:
return self._attrsNS[(namespaceURI, localName)]
except KeyError:
return None
def removeNamedItem(self, name):
n = self.getNamedItem(name)
if n is not None:
_clear_id_cache(self._ownerElement)
del self._attrs[n.nodeName]
del self._attrsNS[(n.namespaceURI, n.localName)]
if hasattr(n, 'ownerElement'):
n.ownerElement = None
return n
else:
raise xml.dom.NotFoundErr()
def removeNamedItemNS(self, namespaceURI, localName):
n = self.getNamedItemNS(namespaceURI, localName)
if n is not None:
_clear_id_cache(self._ownerElement)
del self._attrsNS[(n.namespaceURI, n.localName)]
del self._attrs[n.nodeName]
if hasattr(n, 'ownerElement'):
n.ownerElement = None
return n
else:
raise xml.dom.NotFoundErr()
def setNamedItem(self, node):
if not isinstance(node, Attr):
raise xml.dom.HierarchyRequestErr(
"%s cannot be child of %s" % (repr(node), repr(self)))
old = self._attrs.get(node.name)
if old:
old.unlink()
self._attrs[node.name] = node
self._attrsNS[(node.namespaceURI, node.localName)] = node
node.ownerElement = self._ownerElement
_clear_id_cache(node.ownerElement)
return old
def setNamedItemNS(self, node):
return self.setNamedItem(node)
def __delitem__(self, attname_or_tuple):
node = self[attname_or_tuple]
_clear_id_cache(node.ownerElement)
node.unlink()
def __getstate__(self):
return self._attrs, self._attrsNS, self._ownerElement
def __setstate__(self, state):
self._attrs, self._attrsNS, self._ownerElement = state
defproperty(NamedNodeMap, "length",
doc="Number of nodes in the NamedNodeMap.")
AttributeList = NamedNodeMap
class TypeInfo(object):
__slots__ = 'namespace', 'name'
def __init__(self, namespace, name):
self.namespace = namespace
self.name = name
def __repr__(self):
if self.namespace:
return "<TypeInfo %r (from %r)>" % (self.name, self.namespace)
else:
return "<TypeInfo %r>" % self.name
def _get_name(self):
return self.name
def _get_namespace(self):
return self.namespace
_no_type = TypeInfo(None, None)
class Element(Node):
__slots__=('ownerDocument', 'parentNode', 'tagName', 'nodeName', 'prefix',
'namespaceURI', '_localName', 'childNodes', '_attrs', '_attrsNS',
'nextSibling', 'previousSibling')
nodeType = Node.ELEMENT_NODE
nodeValue = None
schemaType = _no_type
_magic_id_nodes = 0
_child_node_types = (Node.ELEMENT_NODE,
Node.PROCESSING_INSTRUCTION_NODE,
Node.COMMENT_NODE,
Node.TEXT_NODE,
Node.CDATA_SECTION_NODE,
Node.ENTITY_REFERENCE_NODE)
def __init__(self, tagName, namespaceURI=EMPTY_NAMESPACE, prefix=None,
localName=None):
self.parentNode = None
self.tagName = self.nodeName = tagName
self.prefix = prefix
self.namespaceURI = namespaceURI
self.childNodes = NodeList()
self.nextSibling = self.previousSibling = None
# Attribute dictionaries are lazily created
# attributes are double-indexed:
# tagName -> Attribute
# URI,localName -> Attribute
# in the future: consider lazy generation
# of attribute objects this is too tricky
# for now because of headaches with
# namespaces.
self._attrs = None
self._attrsNS = None
def _ensure_attributes(self):
if self._attrs is None:
self._attrs = {}
self._attrsNS = {}
def _get_localName(self):
try:
return self._localName
except AttributeError:
return self.tagName.split(":", 1)[-1]
def _get_tagName(self):
return self.tagName
def unlink(self):
if self._attrs is not None:
for attr in list(self._attrs.values()):
attr.unlink()
self._attrs = None
self._attrsNS = None
Node.unlink(self)
def getAttribute(self, attname):
if self._attrs is None:
return ""
try:
return self._attrs[attname].value
except KeyError:
return ""
def getAttributeNS(self, namespaceURI, localName):
if self._attrsNS is None:
return ""
try:
return self._attrsNS[(namespaceURI, localName)].value
except KeyError:
return ""
def setAttribute(self, attname, value):
attr = self.getAttributeNode(attname)
if attr is None:
attr = Attr(attname)
attr.value = value # also sets nodeValue
attr.ownerDocument = self.ownerDocument
self.setAttributeNode(attr)
elif value != attr.value:
attr.value = value
if attr.isId:
_clear_id_cache(self)
def setAttributeNS(self, namespaceURI, qualifiedName, value):
prefix, localname = _nssplit(qualifiedName)
attr = self.getAttributeNodeNS(namespaceURI, localname)
if attr is None:
attr = Attr(qualifiedName, namespaceURI, localname, prefix)
attr.value = value
attr.ownerDocument = self.ownerDocument
self.setAttributeNode(attr)
else:
if value != attr.value:
attr.value = value
if attr.isId:
_clear_id_cache(self)
if attr.prefix != prefix:
attr.prefix = prefix
attr.nodeName = qualifiedName
def getAttributeNode(self, attrname):
if self._attrs is None:
return None
return self._attrs.get(attrname)
def getAttributeNodeNS(self, namespaceURI, localName):
if self._attrsNS is None:
return None
return self._attrsNS.get((namespaceURI, localName))
def setAttributeNode(self, attr):
if attr.ownerElement not in (None, self):
raise xml.dom.InuseAttributeErr("attribute node already owned")
self._ensure_attributes()
old1 = self._attrs.get(attr.name, None)
if old1 is not None:
self.removeAttributeNode(old1)
old2 = self._attrsNS.get((attr.namespaceURI, attr.localName), None)
if old2 is not None and old2 is not old1:
self.removeAttributeNode(old2)
_set_attribute_node(self, attr)
if old1 is not attr:
# It might have already been part of this node, in which case
# it doesn't represent a change, and should not be returned.
return old1
if old2 is not attr:
return old2
setAttributeNodeNS = setAttributeNode
def removeAttribute(self, name):
if self._attrsNS is None:
raise xml.dom.NotFoundErr()
try:
attr = self._attrs[name]
except KeyError:
raise xml.dom.NotFoundErr()
self.removeAttributeNode(attr)
def removeAttributeNS(self, namespaceURI, localName):
if self._attrsNS is None:
raise xml.dom.NotFoundErr()
try:
attr = self._attrsNS[(namespaceURI, localName)]
except KeyError:
raise xml.dom.NotFoundErr()
self.removeAttributeNode(attr)
def removeAttributeNode(self, node):
if node is None:
raise xml.dom.NotFoundErr()
try:
self._attrs[node.name]
except KeyError:
raise xml.dom.NotFoundErr()
_clear_id_cache(self)
node.unlink()
# Restore this since the node is still useful and otherwise
# unlinked
node.ownerDocument = self.ownerDocument
removeAttributeNodeNS = removeAttributeNode
def hasAttribute(self, name):
if self._attrs is None:
return False
return name in self._attrs
def hasAttributeNS(self, namespaceURI, localName):
if self._attrsNS is None:
return False
return (namespaceURI, localName) in self._attrsNS
def getElementsByTagName(self, name):
return _get_elements_by_tagName_helper(self, name, NodeList())
def getElementsByTagNameNS(self, namespaceURI, localName):
return _get_elements_by_tagName_ns_helper(
self, namespaceURI, localName, NodeList())
def __repr__(self):
return "<DOM Element: %s at %#x>" % (self.tagName, id(self))
def writexml(self, writer, indent="", addindent="", newl=""):
# indent = current indentation
# addindent = indentation to add to higher levels
# newl = newline string
writer.write(indent+"<" + self.tagName)
attrs = self._get_attributes()
a_names = sorted(attrs.keys())
for a_name in a_names:
writer.write(" %s=\"" % a_name)
_write_data(writer, attrs[a_name].value)
writer.write("\"")
if self.childNodes:
writer.write(">")
if (len(self.childNodes) == 1 and
self.childNodes[0].nodeType == Node.TEXT_NODE):
self.childNodes[0].writexml(writer, '', '', '')
else:
writer.write(newl)
for node in self.childNodes:
node.writexml(writer, indent+addindent, addindent, newl)
writer.write(indent)
writer.write("</%s>%s" % (self.tagName, newl))
else:
writer.write("/>%s"%(newl))
def _get_attributes(self):
self._ensure_attributes()
return NamedNodeMap(self._attrs, self._attrsNS, self)
def hasAttributes(self):
if self._attrs:
return True
else:
return False
# DOM Level 3 attributes, based on the 22 Oct 2002 draft
def setIdAttribute(self, name):
idAttr = self.getAttributeNode(name)
self.setIdAttributeNode(idAttr)
def setIdAttributeNS(self, namespaceURI, localName):
idAttr = self.getAttributeNodeNS(namespaceURI, localName)
self.setIdAttributeNode(idAttr)
def setIdAttributeNode(self, idAttr):
if idAttr is None or not self.isSameNode(idAttr.ownerElement):
raise xml.dom.NotFoundErr()
if _get_containing_entref(self) is not None:
raise xml.dom.NoModificationAllowedErr()
if not idAttr._is_id:
idAttr._is_id = True
self._magic_id_nodes += 1
self.ownerDocument._magic_id_count += 1
_clear_id_cache(self)
defproperty(Element, "attributes",
doc="NamedNodeMap of attributes on the element.")
defproperty(Element, "localName",
doc="Namespace-local name of this element.")
def _set_attribute_node(element, attr):
_clear_id_cache(element)
element._ensure_attributes()
element._attrs[attr.name] = attr
element._attrsNS[(attr.namespaceURI, attr.localName)] = attr
# This creates a circular reference, but Element.unlink()
# breaks the cycle since the references to the attribute
# dictionaries are tossed.
attr.ownerElement = element
class Childless:
"""Mixin that makes childless-ness easy to implement and avoids
the complexity of the Node methods that deal with children.
"""
__slots__ = ()
attributes = None
childNodes = EmptyNodeList()
firstChild = None
lastChild = None
def _get_firstChild(self):
return None
def _get_lastChild(self):
return None
def appendChild(self, node):
raise xml.dom.HierarchyRequestErr(
self.nodeName + " nodes cannot have children")
def hasChildNodes(self):
return False
def insertBefore(self, newChild, refChild):
raise xml.dom.HierarchyRequestErr(
self.nodeName + " nodes do not have children")
def removeChild(self, oldChild):
raise xml.dom.NotFoundErr(
self.nodeName + " nodes do not have children")
def normalize(self):
# For childless nodes, normalize() has nothing to do.
pass
def replaceChild(self, newChild, oldChild):
raise xml.dom.HierarchyRequestErr(
self.nodeName + " nodes do not have children")
class ProcessingInstruction(Childless, Node):
nodeType = Node.PROCESSING_INSTRUCTION_NODE
__slots__ = ('target', 'data')
def __init__(self, target, data):
self.target = target
self.data = data
# nodeValue is an alias for data
def _get_nodeValue(self):
return self.data
def _set_nodeValue(self, value):
self.data = data
nodeValue = property(_get_nodeValue, _set_nodeValue)
# nodeName is an alias for target
def _get_nodeName(self):
return self.target
def _set_nodeName(self, value):
self.target = value
nodeName = property(_get_nodeName, _set_nodeName)
def writexml(self, writer, indent="", addindent="", newl=""):
writer.write("%s<?%s %s?>%s" % (indent,self.target, self.data, newl))
class CharacterData(Childless, Node):
__slots__=('_data', 'ownerDocument','parentNode', 'previousSibling', 'nextSibling')
def __init__(self):
self.ownerDocument = self.parentNode = None
self.previousSibling = self.nextSibling = None
self._data = ''
Node.__init__(self)
def _get_length(self):
return len(self.data)
__len__ = _get_length
def _get_data(self):
return self._data
def _set_data(self, data):
self._data = data
data = nodeValue = property(_get_data, _set_data)
def __repr__(self):
data = self.data
if len(data) > 10:
dotdotdot = "..."
else:
dotdotdot = ""
return '<DOM %s node "%r%s">' % (
self.__class__.__name__, data[0:10], dotdotdot)
def substringData(self, offset, count):
if offset < 0:
raise xml.dom.IndexSizeErr("offset cannot be negative")
if offset >= len(self.data):
raise xml.dom.IndexSizeErr("offset cannot be beyond end of data")
if count < 0:
raise xml.dom.IndexSizeErr("count cannot be negative")
return self.data[offset:offset+count]
def appendData(self, arg):
self.data = self.data + arg
def insertData(self, offset, arg):
if offset < 0:
raise xml.dom.IndexSizeErr("offset cannot be negative")
if offset >= len(self.data):
raise xml.dom.IndexSizeErr("offset cannot be beyond end of data")
if arg:
self.data = "%s%s%s" % (
self.data[:offset], arg, self.data[offset:])
def deleteData(self, offset, count):
if offset < 0:
raise xml.dom.IndexSizeErr("offset cannot be negative")
if offset >= len(self.data):
raise xml.dom.IndexSizeErr("offset cannot be beyond end of data")
if count < 0:
raise xml.dom.IndexSizeErr("count cannot be negative")
if count:
self.data = self.data[:offset] + self.data[offset+count:]
def replaceData(self, offset, count, arg):
if offset < 0:
raise xml.dom.IndexSizeErr("offset cannot be negative")
if offset >= len(self.data):
raise xml.dom.IndexSizeErr("offset cannot be beyond end of data")
if count < 0:
raise xml.dom.IndexSizeErr("count cannot be negative")
if count:
self.data = "%s%s%s" % (
self.data[:offset], arg, self.data[offset+count:])
defproperty(CharacterData, "length", doc="Length of the string data.")
class Text(CharacterData):
__slots__ = ()
nodeType = Node.TEXT_NODE
nodeName = "#text"
attributes = None
def splitText(self, offset):
if offset < 0 or offset > len(self.data):
raise xml.dom.IndexSizeErr("illegal offset value")
newText = self.__class__()
newText.data = self.data[offset:]
newText.ownerDocument = self.ownerDocument
next = self.nextSibling
if self.parentNode and self in self.parentNode.childNodes:
if next is None:
self.parentNode.appendChild(newText)
else:
self.parentNode.insertBefore(newText, next)
self.data = self.data[:offset]
return newText
def writexml(self, writer, indent="", addindent="", newl=""):
_write_data(writer, "%s%s%s" % (indent, self.data, newl))
# DOM Level 3 (WD 9 April 2002)
def _get_wholeText(self):
L = [self.data]
n = self.previousSibling
while n is not None:
if n.nodeType in (Node.TEXT_NODE, Node.CDATA_SECTION_NODE):
L.insert(0, n.data)
n = n.previousSibling
else:
break
n = self.nextSibling
while n is not None:
if n.nodeType in (Node.TEXT_NODE, Node.CDATA_SECTION_NODE):
L.append(n.data)
n = n.nextSibling
else:
break
return ''.join(L)
def replaceWholeText(self, content):
# XXX This needs to be seriously changed if minidom ever
# supports EntityReference nodes.
parent = self.parentNode
n = self.previousSibling
while n is not None:
if n.nodeType in (Node.TEXT_NODE, Node.CDATA_SECTION_NODE):
next = n.previousSibling
parent.removeChild(n)
n = next
else:
break
n = self.nextSibling
if not content:
parent.removeChild(self)
while n is not None:
if n.nodeType in (Node.TEXT_NODE, Node.CDATA_SECTION_NODE):
next = n.nextSibling
parent.removeChild(n)
n = next
else:
break
if content:
self.data = content
return self
else:
return None
def _get_isWhitespaceInElementContent(self):
if self.data.strip():
return False
elem = _get_containing_element(self)
if elem is None:
return False
info = self.ownerDocument._get_elem_info(elem)
if info is None:
return False
else:
return info.isElementContent()
defproperty(Text, "isWhitespaceInElementContent",
doc="True iff this text node contains only whitespace"
" and is in element content.")
defproperty(Text, "wholeText",
doc="The text of all logically-adjacent text nodes.")
def _get_containing_element(node):
c = node.parentNode
while c is not None:
if c.nodeType == Node.ELEMENT_NODE:
return c
c = c.parentNode
return None
def _get_containing_entref(node):
c = node.parentNode
while c is not None:
if c.nodeType == Node.ENTITY_REFERENCE_NODE:
return c
c = c.parentNode
return None
class Comment(CharacterData):
nodeType = Node.COMMENT_NODE
nodeName = "#comment"
def __init__(self, data):
CharacterData.__init__(self)
self._data = data
def writexml(self, writer, indent="", addindent="", newl=""):
if "--" in self.data:
raise ValueError("'--' is not allowed in a comment node")
writer.write("%s<!--%s-->%s" % (indent, self.data, newl))
class CDATASection(Text):
__slots__ = ()
nodeType = Node.CDATA_SECTION_NODE
nodeName = "#cdata-section"
def writexml(self, writer, indent="", addindent="", newl=""):
if self.data.find("]]>") >= 0:
raise ValueError("']]>' not allowed in a CDATA section")
writer.write("<![CDATA[%s]]>" % self.data)
class ReadOnlySequentialNamedNodeMap(object):
__slots__ = '_seq',
def __init__(self, seq=()):
# seq should be a list or tuple
self._seq = seq
def __len__(self):
return len(self._seq)
def _get_length(self):
return len(self._seq)
def getNamedItem(self, name):
for n in self._seq:
if n.nodeName == name:
return n
def getNamedItemNS(self, namespaceURI, localName):
for n in self._seq:
if n.namespaceURI == namespaceURI and n.localName == localName:
return n
def __getitem__(self, name_or_tuple):
if isinstance(name_or_tuple, tuple):
node = self.getNamedItemNS(*name_or_tuple)
else:
node = self.getNamedItem(name_or_tuple)
if node is None:
raise KeyError(name_or_tuple)
return node
def item(self, index):
if index < 0:
return None
try:
return self._seq[index]
except IndexError:
return None
def removeNamedItem(self, name):
raise xml.dom.NoModificationAllowedErr(
"NamedNodeMap instance is read-only")
def removeNamedItemNS(self, namespaceURI, localName):
raise xml.dom.NoModificationAllowedErr(
"NamedNodeMap instance is read-only")
def setNamedItem(self, node):
raise xml.dom.NoModificationAllowedErr(
"NamedNodeMap instance is read-only")
def setNamedItemNS(self, node):
raise xml.dom.NoModificationAllowedErr(
"NamedNodeMap instance is read-only")
def __getstate__(self):
return [self._seq]
def __setstate__(self, state):
self._seq = state[0]
defproperty(ReadOnlySequentialNamedNodeMap, "length",
doc="Number of entries in the NamedNodeMap.")
class Identified:
"""Mix-in class that supports the publicId and systemId attributes."""
__slots__ = 'publicId', 'systemId'
def _identified_mixin_init(self, publicId, systemId):
self.publicId = publicId
self.systemId = systemId
def _get_publicId(self):
return self.publicId
def _get_systemId(self):
return self.systemId
class DocumentType(Identified, Childless, Node):
nodeType = Node.DOCUMENT_TYPE_NODE
nodeValue = None
name = None
publicId = None
systemId = None
internalSubset = None
def __init__(self, qualifiedName):
self.entities = ReadOnlySequentialNamedNodeMap()
self.notations = ReadOnlySequentialNamedNodeMap()
if qualifiedName:
prefix, localname = _nssplit(qualifiedName)
self.name = localname
self.nodeName = self.name
def _get_internalSubset(self):
return self.internalSubset
def cloneNode(self, deep):
if self.ownerDocument is None:
# it's ok
clone = DocumentType(None)
clone.name = self.name
clone.nodeName = self.name
operation = xml.dom.UserDataHandler.NODE_CLONED
if deep:
clone.entities._seq = []
clone.notations._seq = []
for n in self.notations._seq:
notation = Notation(n.nodeName, n.publicId, n.systemId)
clone.notations._seq.append(notation)
n._call_user_data_handler(operation, n, notation)
for e in self.entities._seq:
entity = Entity(e.nodeName, e.publicId, e.systemId,
e.notationName)
entity.actualEncoding = e.actualEncoding
entity.encoding = e.encoding
entity.version = e.version
clone.entities._seq.append(entity)
e._call_user_data_handler(operation, n, entity)
self._call_user_data_handler(operation, self, clone)
return clone
else:
return None
def writexml(self, writer, indent="", addindent="", newl=""):
writer.write("<!DOCTYPE ")
writer.write(self.name)
if self.publicId:
writer.write("%s PUBLIC '%s'%s '%s'"
% (newl, self.publicId, newl, self.systemId))
elif self.systemId:
writer.write("%s SYSTEM '%s'" % (newl, self.systemId))
if self.internalSubset is not None:
writer.write(" [")
writer.write(self.internalSubset)
writer.write("]")
writer.write(">"+newl)
class Entity(Identified, Node):
attributes = None
nodeType = Node.ENTITY_NODE
nodeValue = None
actualEncoding = None
encoding = None
version = None
def __init__(self, name, publicId, systemId, notation):
self.nodeName = name
self.notationName = notation
self.childNodes = NodeList()
self._identified_mixin_init(publicId, systemId)
def _get_actualEncoding(self):
return self.actualEncoding
def _get_encoding(self):
return self.encoding
def _get_version(self):
return self.version
def appendChild(self, newChild):
raise xml.dom.HierarchyRequestErr(
"cannot append children to an entity node")
def insertBefore(self, newChild, refChild):
raise xml.dom.HierarchyRequestErr(
"cannot insert children below an entity node")
def removeChild(self, oldChild):
raise xml.dom.HierarchyRequestErr(
"cannot remove children from an entity node")
def replaceChild(self, newChild, oldChild):
raise xml.dom.HierarchyRequestErr(
"cannot replace children of an entity node")
class Notation(Identified, Childless, Node):
nodeType = Node.NOTATION_NODE
nodeValue = None
def __init__(self, name, publicId, systemId):
self.nodeName = name
self._identified_mixin_init(publicId, systemId)
class DOMImplementation(DOMImplementationLS):
_features = [("core", "1.0"),
("core", "2.0"),
("core", None),
("xml", "1.0"),
("xml", "2.0"),
("xml", None),
("ls-load", "3.0"),
("ls-load", None),
]
def hasFeature(self, feature, version):
if version == "":
version = None
return (feature.lower(), version) in self._features
def createDocument(self, namespaceURI, qualifiedName, doctype):
if doctype and doctype.parentNode is not None:
raise xml.dom.WrongDocumentErr(
"doctype object owned by another DOM tree")
doc = self._create_document()
add_root_element = not (namespaceURI is None
and qualifiedName is None
and doctype is None)
if not qualifiedName and add_root_element:
# The spec is unclear what to raise here; SyntaxErr
# would be the other obvious candidate. Since Xerces raises
# InvalidCharacterErr, and since SyntaxErr is not listed
# for createDocument, that seems to be the better choice.
# XXX: need to check for illegal characters here and in
# createElement.
# DOM Level III clears this up when talking about the return value
# of this function. If namespaceURI, qName and DocType are
# Null the document is returned without a document element
# Otherwise if doctype or namespaceURI are not None
# Then we go back to the above problem
raise xml.dom.InvalidCharacterErr("Element with no name")
if add_root_element:
prefix, localname = _nssplit(qualifiedName)
if prefix == "xml" \
and namespaceURI != "http://www.w3.org/XML/1998/namespace":
raise xml.dom.NamespaceErr("illegal use of 'xml' prefix")
if prefix and not namespaceURI:
raise xml.dom.NamespaceErr(
"illegal use of prefix without namespaces")
element = doc.createElementNS(namespaceURI, qualifiedName)
if doctype:
doc.appendChild(doctype)
doc.appendChild(element)
if doctype:
doctype.parentNode = doctype.ownerDocument = doc
doc.doctype = doctype
doc.implementation = self
return doc
def createDocumentType(self, qualifiedName, publicId, systemId):
doctype = DocumentType(qualifiedName)
doctype.publicId = publicId
doctype.systemId = systemId
return doctype
# DOM Level 3 (WD 9 April 2002)
def getInterface(self, feature):
if self.hasFeature(feature, None):
return self
else:
return None
# internal
def _create_document(self):
return Document()
class ElementInfo(object):
"""Object that represents content-model information for an element.
This implementation is not expected to be used in practice; DOM
builders should provide implementations which do the right thing
using information available to it.
"""
__slots__ = 'tagName',
def __init__(self, name):
self.tagName = name
def getAttributeType(self, aname):
return _no_type
def getAttributeTypeNS(self, namespaceURI, localName):
return _no_type
def isElementContent(self):
return False
def isEmpty(self):
"""Returns true iff this element is declared to have an EMPTY
content model."""
return False
def isId(self, aname):
"""Returns true iff the named attribute is a DTD-style ID."""
return False
def isIdNS(self, namespaceURI, localName):
"""Returns true iff the identified attribute is a DTD-style ID."""
return False
def __getstate__(self):
return self.tagName
def __setstate__(self, state):
self.tagName = state
def _clear_id_cache(node):
if node.nodeType == Node.DOCUMENT_NODE:
node._id_cache.clear()
node._id_search_stack = None
elif _in_document(node):
node.ownerDocument._id_cache.clear()
node.ownerDocument._id_search_stack= None
class Document(Node, DocumentLS):
__slots__ = ('_elem_info', 'doctype',
'_id_search_stack', 'childNodes', '_id_cache')
_child_node_types = (Node.ELEMENT_NODE, Node.PROCESSING_INSTRUCTION_NODE,
Node.COMMENT_NODE, Node.DOCUMENT_TYPE_NODE)
implementation = DOMImplementation()
nodeType = Node.DOCUMENT_NODE
nodeName = "#document"
nodeValue = None
attributes = None
parentNode = None
previousSibling = nextSibling = None
# Document attributes from Level 3 (WD 9 April 2002)
actualEncoding = None
encoding = None
standalone = None
version = None
strictErrorChecking = False
errorHandler = None
documentURI = None
_magic_id_count = 0
def __init__(self):
self.doctype = None
self.childNodes = NodeList()
# mapping of (namespaceURI, localName) -> ElementInfo
# and tagName -> ElementInfo
self._elem_info = {}
self._id_cache = {}
self._id_search_stack = None
def _get_elem_info(self, element):
if element.namespaceURI:
key = element.namespaceURI, element.localName
else:
key = element.tagName
return self._elem_info.get(key)
def _get_actualEncoding(self):
return self.actualEncoding
def _get_doctype(self):
return self.doctype
def _get_documentURI(self):
return self.documentURI
def _get_encoding(self):
return self.encoding
def _get_errorHandler(self):
return self.errorHandler
def _get_standalone(self):
return self.standalone
def _get_strictErrorChecking(self):
return self.strictErrorChecking
def _get_version(self):
return self.version
def appendChild(self, node):
if node.nodeType not in self._child_node_types:
raise xml.dom.HierarchyRequestErr(
"%s cannot be child of %s" % (repr(node), repr(self)))
if node.parentNode is not None:
# This needs to be done before the next test since this
# may *be* the document element, in which case it should
# end up re-ordered to the end.
node.parentNode.removeChild(node)
if node.nodeType == Node.ELEMENT_NODE \
and self._get_documentElement():
raise xml.dom.HierarchyRequestErr(
"two document elements disallowed")
return Node.appendChild(self, node)
def removeChild(self, oldChild):
try:
self.childNodes.remove(oldChild)
except ValueError:
raise xml.dom.NotFoundErr()
oldChild.nextSibling = oldChild.previousSibling = None
oldChild.parentNode = None
if self.documentElement is oldChild:
self.documentElement = None
return oldChild
def _get_documentElement(self):
for node in self.childNodes:
if node.nodeType == Node.ELEMENT_NODE:
return node
def unlink(self):
if self.doctype is not None:
self.doctype.unlink()
self.doctype = None
Node.unlink(self)
def cloneNode(self, deep):
if not deep:
return None
clone = self.implementation.createDocument(None, None, None)
clone.encoding = self.encoding
clone.standalone = self.standalone
clone.version = self.version
for n in self.childNodes:
childclone = _clone_node(n, deep, clone)
assert childclone.ownerDocument.isSameNode(clone)
clone.childNodes.append(childclone)
if childclone.nodeType == Node.DOCUMENT_NODE:
assert clone.documentElement is None
elif childclone.nodeType == Node.DOCUMENT_TYPE_NODE:
assert clone.doctype is None
clone.doctype = childclone
childclone.parentNode = clone
self._call_user_data_handler(xml.dom.UserDataHandler.NODE_CLONED,
self, clone)
return clone
def createDocumentFragment(self):
d = DocumentFragment()
d.ownerDocument = self
return d
def createElement(self, tagName):
e = Element(tagName)
e.ownerDocument = self
return e
def createTextNode(self, data):
if not isinstance(data, str):
raise TypeError("node contents must be a string")
t = Text()
t.data = data
t.ownerDocument = self
return t
def createCDATASection(self, data):
if not isinstance(data, str):
raise TypeError("node contents must be a string")
c = CDATASection()
c.data = data
c.ownerDocument = self
return c
def createComment(self, data):
c = Comment(data)
c.ownerDocument = self
return c
def createProcessingInstruction(self, target, data):
p = ProcessingInstruction(target, data)
p.ownerDocument = self
return p
def createAttribute(self, qName):
a = Attr(qName)
a.ownerDocument = self
a.value = ""
return a
def createElementNS(self, namespaceURI, qualifiedName):
prefix, localName = _nssplit(qualifiedName)
e = Element(qualifiedName, namespaceURI, prefix)
e.ownerDocument = self
return e
def createAttributeNS(self, namespaceURI, qualifiedName):
prefix, localName = _nssplit(qualifiedName)
a = Attr(qualifiedName, namespaceURI, localName, prefix)
a.ownerDocument = self
a.value = ""
return a
# A couple of implementation-specific helpers to create node types
# not supported by the W3C DOM specs:
def _create_entity(self, name, publicId, systemId, notationName):
e = Entity(name, publicId, systemId, notationName)
e.ownerDocument = self
return e
def _create_notation(self, name, publicId, systemId):
n = Notation(name, publicId, systemId)
n.ownerDocument = self
return n
def getElementById(self, id):
if id in self._id_cache:
return self._id_cache[id]
if not (self._elem_info or self._magic_id_count):
return None
stack = self._id_search_stack
if stack is None:
# we never searched before, or the cache has been cleared
stack = [self.documentElement]
self._id_search_stack = stack
elif not stack:
# Previous search was completed and cache is still valid;
# no matching node.
return None
result = None
while stack:
node = stack.pop()
# add child elements to stack for continued searching
stack.extend([child for child in node.childNodes
if child.nodeType in _nodeTypes_with_children])
# check this node
info = self._get_elem_info(node)
if info:
# We have to process all ID attributes before
# returning in order to get all the attributes set to
# be IDs using Element.setIdAttribute*().
for attr in node.attributes.values():
if attr.namespaceURI:
if info.isIdNS(attr.namespaceURI, attr.localName):
self._id_cache[attr.value] = node
if attr.value == id:
result = node
elif not node._magic_id_nodes:
break
elif info.isId(attr.name):
self._id_cache[attr.value] = node
if attr.value == id:
result = node
elif not node._magic_id_nodes:
break
elif attr._is_id:
self._id_cache[attr.value] = node
if attr.value == id:
result = node
elif node._magic_id_nodes == 1:
break
elif node._magic_id_nodes:
for attr in node.attributes.values():
if attr._is_id:
self._id_cache[attr.value] = node
if attr.value == id:
result = node
if result is not None:
break
return result
def getElementsByTagName(self, name):
return _get_elements_by_tagName_helper(self, name, NodeList())
def getElementsByTagNameNS(self, namespaceURI, localName):
return _get_elements_by_tagName_ns_helper(
self, namespaceURI, localName, NodeList())
def isSupported(self, feature, version):
return self.implementation.hasFeature(feature, version)
def importNode(self, node, deep):
if node.nodeType == Node.DOCUMENT_NODE:
raise xml.dom.NotSupportedErr("cannot import document nodes")
elif node.nodeType == Node.DOCUMENT_TYPE_NODE:
raise xml.dom.NotSupportedErr("cannot import document type nodes")
return _clone_node(node, deep, self)
def writexml(self, writer, indent="", addindent="", newl="", encoding=None):
if encoding is None:
writer.write('<?xml version="1.0" ?>'+newl)
else:
writer.write('<?xml version="1.0" encoding="%s"?>%s' % (
encoding, newl))
for node in self.childNodes:
node.writexml(writer, indent, addindent, newl)
# DOM Level 3 (WD 9 April 2002)
def renameNode(self, n, namespaceURI, name):
if n.ownerDocument is not self:
raise xml.dom.WrongDocumentErr(
"cannot rename nodes from other documents;\n"
"expected %s,\nfound %s" % (self, n.ownerDocument))
if n.nodeType not in (Node.ELEMENT_NODE, Node.ATTRIBUTE_NODE):
raise xml.dom.NotSupportedErr(
"renameNode() only applies to element and attribute nodes")
if namespaceURI != EMPTY_NAMESPACE:
if ':' in name:
prefix, localName = name.split(':', 1)
if ( prefix == "xmlns"
and namespaceURI != xml.dom.XMLNS_NAMESPACE):
raise xml.dom.NamespaceErr(
"illegal use of 'xmlns' prefix")
else:
if ( name == "xmlns"
and namespaceURI != xml.dom.XMLNS_NAMESPACE
and n.nodeType == Node.ATTRIBUTE_NODE):
raise xml.dom.NamespaceErr(
"illegal use of the 'xmlns' attribute")
prefix = None
localName = name
else:
prefix = None
localName = None
if n.nodeType == Node.ATTRIBUTE_NODE:
element = n.ownerElement
if element is not None:
is_id = n._is_id
element.removeAttributeNode(n)
else:
element = None
n.prefix = prefix
n._localName = localName
n.namespaceURI = namespaceURI
n.nodeName = name
if n.nodeType == Node.ELEMENT_NODE:
n.tagName = name
else:
# attribute node
n.name = name
if element is not None:
element.setAttributeNode(n)
if is_id:
element.setIdAttributeNode(n)
# It's not clear from a semantic perspective whether we should
# call the user data handlers for the NODE_RENAMED event since
# we're re-using the existing node. The draft spec has been
# interpreted as meaning "no, don't call the handler unless a
# new node is created."
return n
defproperty(Document, "documentElement",
doc="Top-level element of this document.")
def _clone_node(node, deep, newOwnerDocument):
"""
Clone a node and give it the new owner document.
Called by Node.cloneNode and Document.importNode
"""
if node.ownerDocument.isSameNode(newOwnerDocument):
operation = xml.dom.UserDataHandler.NODE_CLONED
else:
operation = xml.dom.UserDataHandler.NODE_IMPORTED
if node.nodeType == Node.ELEMENT_NODE:
clone = newOwnerDocument.createElementNS(node.namespaceURI,
node.nodeName)
for attr in node.attributes.values():
clone.setAttributeNS(attr.namespaceURI, attr.nodeName, attr.value)
a = clone.getAttributeNodeNS(attr.namespaceURI, attr.localName)
a.specified = attr.specified
if deep:
for child in node.childNodes:
c = _clone_node(child, deep, newOwnerDocument)
clone.appendChild(c)
elif node.nodeType == Node.DOCUMENT_FRAGMENT_NODE:
clone = newOwnerDocument.createDocumentFragment()
if deep:
for child in node.childNodes:
c = _clone_node(child, deep, newOwnerDocument)
clone.appendChild(c)
elif node.nodeType == Node.TEXT_NODE:
clone = newOwnerDocument.createTextNode(node.data)
elif node.nodeType == Node.CDATA_SECTION_NODE:
clone = newOwnerDocument.createCDATASection(node.data)
elif node.nodeType == Node.PROCESSING_INSTRUCTION_NODE:
clone = newOwnerDocument.createProcessingInstruction(node.target,
node.data)
elif node.nodeType == Node.COMMENT_NODE:
clone = newOwnerDocument.createComment(node.data)
elif node.nodeType == Node.ATTRIBUTE_NODE:
clone = newOwnerDocument.createAttributeNS(node.namespaceURI,
node.nodeName)
clone.specified = True
clone.value = node.value
elif node.nodeType == Node.DOCUMENT_TYPE_NODE:
assert node.ownerDocument is not newOwnerDocument
operation = xml.dom.UserDataHandler.NODE_IMPORTED
clone = newOwnerDocument.implementation.createDocumentType(
node.name, node.publicId, node.systemId)
clone.ownerDocument = newOwnerDocument
if deep:
clone.entities._seq = []
clone.notations._seq = []
for n in node.notations._seq:
notation = Notation(n.nodeName, n.publicId, n.systemId)
notation.ownerDocument = newOwnerDocument
clone.notations._seq.append(notation)
if hasattr(n, '_call_user_data_handler'):
n._call_user_data_handler(operation, n, notation)
for e in node.entities._seq:
entity = Entity(e.nodeName, e.publicId, e.systemId,
e.notationName)
entity.actualEncoding = e.actualEncoding
entity.encoding = e.encoding
entity.version = e.version
entity.ownerDocument = newOwnerDocument
clone.entities._seq.append(entity)
if hasattr(e, '_call_user_data_handler'):
e._call_user_data_handler(operation, n, entity)
else:
# Note the cloning of Document and DocumentType nodes is
# implementation specific. minidom handles those cases
# directly in the cloneNode() methods.
raise xml.dom.NotSupportedErr("Cannot clone node %s" % repr(node))
# Check for _call_user_data_handler() since this could conceivably
# used with other DOM implementations (one of the FourThought
# DOMs, perhaps?).
if hasattr(node, '_call_user_data_handler'):
node._call_user_data_handler(operation, node, clone)
return clone
def _nssplit(qualifiedName):
fields = qualifiedName.split(':', 1)
if len(fields) == 2:
return fields
else:
return (None, fields[0])
def _do_pulldom_parse(func, args, kwargs):
events = func(*args, **kwargs)
toktype, rootNode = events.getEvent()
events.expandNode(rootNode)
events.clear()
return rootNode
def parse(file, parser=None, bufsize=None):
"""Parse a file into a DOM by filename or file object."""
if parser is None and not bufsize:
from xml.dom import expatbuilder
return expatbuilder.parse(file)
else:
from xml.dom import pulldom
return _do_pulldom_parse(pulldom.parse, (file,),
{'parser': parser, 'bufsize': bufsize})
def parseString(string, parser=None):
"""Parse a file into a DOM from a string."""
if parser is None:
from xml.dom import expatbuilder
return expatbuilder.parseString(string)
else:
from xml.dom import pulldom
return _do_pulldom_parse(pulldom.parseString, (string,),
{'parser': parser})
def getDOMImplementation(features=None):
if features:
if isinstance(features, str):
features = domreg._parse_feature_string(features)
for f, v in features:
if not Document.implementation.hasFeature(f, v):
return None
return Document.implementation
|
emencia/emencia.recipe.patch
|
refs/heads/master
|
emencia/recipe/__init__.py
|
916
|
# See http://peak.telecommunity.com/DevCenter/setuptools#namespace-packages
try:
__import__('pkg_resources').declare_namespace(__name__)
except ImportError:
from pkgutil import extend_path
__path__ = extend_path(__path__, __name__)
|
amyvmiwei/kbengine
|
refs/heads/master
|
kbe/src/lib/python/Lib/ctypes/test/test_cfuncs.py
|
11
|
# A lot of failures in these tests on Mac OS X.
# Byte order related?
import unittest
from ctypes import *
import _ctypes_test
class CFunctions(unittest.TestCase):
_dll = CDLL(_ctypes_test.__file__)
def S(self):
return c_longlong.in_dll(self._dll, "last_tf_arg_s").value
def U(self):
return c_ulonglong.in_dll(self._dll, "last_tf_arg_u").value
def test_byte(self):
self._dll.tf_b.restype = c_byte
self._dll.tf_b.argtypes = (c_byte,)
self.assertEqual(self._dll.tf_b(-126), -42)
self.assertEqual(self.S(), -126)
def test_byte_plus(self):
self._dll.tf_bb.restype = c_byte
self._dll.tf_bb.argtypes = (c_byte, c_byte)
self.assertEqual(self._dll.tf_bb(0, -126), -42)
self.assertEqual(self.S(), -126)
def test_ubyte(self):
self._dll.tf_B.restype = c_ubyte
self._dll.tf_B.argtypes = (c_ubyte,)
self.assertEqual(self._dll.tf_B(255), 85)
self.assertEqual(self.U(), 255)
def test_ubyte_plus(self):
self._dll.tf_bB.restype = c_ubyte
self._dll.tf_bB.argtypes = (c_byte, c_ubyte)
self.assertEqual(self._dll.tf_bB(0, 255), 85)
self.assertEqual(self.U(), 255)
def test_short(self):
self._dll.tf_h.restype = c_short
self._dll.tf_h.argtypes = (c_short,)
self.assertEqual(self._dll.tf_h(-32766), -10922)
self.assertEqual(self.S(), -32766)
def test_short_plus(self):
self._dll.tf_bh.restype = c_short
self._dll.tf_bh.argtypes = (c_byte, c_short)
self.assertEqual(self._dll.tf_bh(0, -32766), -10922)
self.assertEqual(self.S(), -32766)
def test_ushort(self):
self._dll.tf_H.restype = c_ushort
self._dll.tf_H.argtypes = (c_ushort,)
self.assertEqual(self._dll.tf_H(65535), 21845)
self.assertEqual(self.U(), 65535)
def test_ushort_plus(self):
self._dll.tf_bH.restype = c_ushort
self._dll.tf_bH.argtypes = (c_byte, c_ushort)
self.assertEqual(self._dll.tf_bH(0, 65535), 21845)
self.assertEqual(self.U(), 65535)
def test_int(self):
self._dll.tf_i.restype = c_int
self._dll.tf_i.argtypes = (c_int,)
self.assertEqual(self._dll.tf_i(-2147483646), -715827882)
self.assertEqual(self.S(), -2147483646)
def test_int_plus(self):
self._dll.tf_bi.restype = c_int
self._dll.tf_bi.argtypes = (c_byte, c_int)
self.assertEqual(self._dll.tf_bi(0, -2147483646), -715827882)
self.assertEqual(self.S(), -2147483646)
def test_uint(self):
self._dll.tf_I.restype = c_uint
self._dll.tf_I.argtypes = (c_uint,)
self.assertEqual(self._dll.tf_I(4294967295), 1431655765)
self.assertEqual(self.U(), 4294967295)
def test_uint_plus(self):
self._dll.tf_bI.restype = c_uint
self._dll.tf_bI.argtypes = (c_byte, c_uint)
self.assertEqual(self._dll.tf_bI(0, 4294967295), 1431655765)
self.assertEqual(self.U(), 4294967295)
def test_long(self):
self._dll.tf_l.restype = c_long
self._dll.tf_l.argtypes = (c_long,)
self.assertEqual(self._dll.tf_l(-2147483646), -715827882)
self.assertEqual(self.S(), -2147483646)
def test_long_plus(self):
self._dll.tf_bl.restype = c_long
self._dll.tf_bl.argtypes = (c_byte, c_long)
self.assertEqual(self._dll.tf_bl(0, -2147483646), -715827882)
self.assertEqual(self.S(), -2147483646)
def test_ulong(self):
self._dll.tf_L.restype = c_ulong
self._dll.tf_L.argtypes = (c_ulong,)
self.assertEqual(self._dll.tf_L(4294967295), 1431655765)
self.assertEqual(self.U(), 4294967295)
def test_ulong_plus(self):
self._dll.tf_bL.restype = c_ulong
self._dll.tf_bL.argtypes = (c_char, c_ulong)
self.assertEqual(self._dll.tf_bL(b' ', 4294967295), 1431655765)
self.assertEqual(self.U(), 4294967295)
def test_longlong(self):
self._dll.tf_q.restype = c_longlong
self._dll.tf_q.argtypes = (c_longlong, )
self.assertEqual(self._dll.tf_q(-9223372036854775806), -3074457345618258602)
self.assertEqual(self.S(), -9223372036854775806)
def test_longlong_plus(self):
self._dll.tf_bq.restype = c_longlong
self._dll.tf_bq.argtypes = (c_byte, c_longlong)
self.assertEqual(self._dll.tf_bq(0, -9223372036854775806), -3074457345618258602)
self.assertEqual(self.S(), -9223372036854775806)
def test_ulonglong(self):
self._dll.tf_Q.restype = c_ulonglong
self._dll.tf_Q.argtypes = (c_ulonglong, )
self.assertEqual(self._dll.tf_Q(18446744073709551615), 6148914691236517205)
self.assertEqual(self.U(), 18446744073709551615)
def test_ulonglong_plus(self):
self._dll.tf_bQ.restype = c_ulonglong
self._dll.tf_bQ.argtypes = (c_byte, c_ulonglong)
self.assertEqual(self._dll.tf_bQ(0, 18446744073709551615), 6148914691236517205)
self.assertEqual(self.U(), 18446744073709551615)
def test_float(self):
self._dll.tf_f.restype = c_float
self._dll.tf_f.argtypes = (c_float,)
self.assertEqual(self._dll.tf_f(-42.), -14.)
self.assertEqual(self.S(), -42)
def test_float_plus(self):
self._dll.tf_bf.restype = c_float
self._dll.tf_bf.argtypes = (c_byte, c_float)
self.assertEqual(self._dll.tf_bf(0, -42.), -14.)
self.assertEqual(self.S(), -42)
def test_double(self):
self._dll.tf_d.restype = c_double
self._dll.tf_d.argtypes = (c_double,)
self.assertEqual(self._dll.tf_d(42.), 14.)
self.assertEqual(self.S(), 42)
def test_double_plus(self):
self._dll.tf_bd.restype = c_double
self._dll.tf_bd.argtypes = (c_byte, c_double)
self.assertEqual(self._dll.tf_bd(0, 42.), 14.)
self.assertEqual(self.S(), 42)
def test_longdouble(self):
self._dll.tf_D.restype = c_longdouble
self._dll.tf_D.argtypes = (c_longdouble,)
self.assertEqual(self._dll.tf_D(42.), 14.)
self.assertEqual(self.S(), 42)
def test_longdouble_plus(self):
self._dll.tf_bD.restype = c_longdouble
self._dll.tf_bD.argtypes = (c_byte, c_longdouble)
self.assertEqual(self._dll.tf_bD(0, 42.), 14.)
self.assertEqual(self.S(), 42)
def test_callwithresult(self):
def process_result(result):
return result * 2
self._dll.tf_i.restype = process_result
self._dll.tf_i.argtypes = (c_int,)
self.assertEqual(self._dll.tf_i(42), 28)
self.assertEqual(self.S(), 42)
self.assertEqual(self._dll.tf_i(-42), -28)
self.assertEqual(self.S(), -42)
def test_void(self):
self._dll.tv_i.restype = None
self._dll.tv_i.argtypes = (c_int,)
self.assertEqual(self._dll.tv_i(42), None)
self.assertEqual(self.S(), 42)
self.assertEqual(self._dll.tv_i(-42), None)
self.assertEqual(self.S(), -42)
# The following repeats the above tests with stdcall functions (where
# they are available)
try:
WinDLL
except NameError:
pass
else:
class stdcall_dll(WinDLL):
def __getattr__(self, name):
if name[:2] == '__' and name[-2:] == '__':
raise AttributeError(name)
func = self._FuncPtr(("s_" + name, self))
setattr(self, name, func)
return func
class stdcallCFunctions(CFunctions):
_dll = stdcall_dll(_ctypes_test.__file__)
pass
if __name__ == '__main__':
unittest.main()
|
will-moore/openmicroscopy
|
refs/heads/develop
|
components/tools/OmeroPy/test/unit/clitest/test_db.py
|
12
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Test of the omero db control.
Copyright 2009-2013 Glencoe Software, Inc. All rights reserved.
Use is subject to license terms supplied in LICENSE.txt
"""
import pytest
import os
from path import path
from omero.plugins.db import DatabaseControl
from omero.util.temp_files import create_path
from omero.cli import NonZeroReturnCode
from omero.cli import CLI
from omero_ext.mox import Mox
import getpass
import __builtin__
hash_map = {
('0', ''): 'PJueOtwuTPHB8Nq/1rFVxg==',
('0', '--no-salt'): 'vvFwuczAmpyoRC0Nsv8FCw==',
('1', ''): 'pvL5Tyr9tCD2esF938sHEQ==',
('1', '--no-salt'): 'vvFwuczAmpyoRC0Nsv8FCw==',
}
class TestDatabase(object):
def setup_method(self, method):
self.cli = CLI()
self.cli.register("db", DatabaseControl, "TEST")
self.args = ["db"]
dir = path(__file__) / ".." / ".." / ".." / ".." / ".." / ".." /\
".." / "dist" # FIXME: should not be hard-coded
dir = dir.abspath()
cfg = dir / "etc" / "omero.properties"
cfg = cfg.abspath()
self.cli.dir = dir
self.data = {}
for line in cfg.text().split("\n"):
line = line.strip()
for x in ("version", "patch"):
key = "omero.db." + x
if line.startswith(key):
self.data[x] = line[len(key)+1:]
self.file = create_path()
self.script_file = "%(version)s__%(patch)s.sql" % self.data
if os.path.isfile(self.script_file):
os.rename(self.script_file, self.script_file + '.bak')
assert not os.path.isfile(self.script_file)
self.mox = Mox()
self.mox.StubOutWithMock(getpass, 'getpass')
self.mox.StubOutWithMock(__builtin__, "raw_input")
def teardown_method(self, method):
self.file.remove()
if os.path.isfile(self.script_file):
os.remove(self.script_file)
if os.path.isfile(self.script_file + '.bak'):
os.rename(self.script_file + '.bak', self.script_file)
self.mox.UnsetStubs()
self.mox.VerifyAll()
def password(self, string, strict=True):
self.cli.invoke("db password " + string % self.data, strict=strict)
def testHelp(self):
self.args += ["-h"]
self.cli.invoke(self.args, strict=True)
@pytest.mark.parametrize(
'subcommand', DatabaseControl().get_subcommands())
def testSubcommandHelp(self, subcommand):
self.args += [subcommand, "-h"]
self.cli.invoke(self.args, strict=True)
def testBadVersionDies(self):
with pytest.raises(NonZeroReturnCode):
self.cli.invoke("db script NONE NONE pw", strict=True)
def testPasswordIsAskedForAgainIfDiffer(self):
self.expectPassword("ome")
self.expectConfirmation("bad")
self.expectPassword("ome")
self.expectConfirmation("ome")
self.mox.ReplayAll()
self.password("")
def testPasswordIsAskedForAgainIfEmpty(self):
self.expectPassword("")
self.expectPassword("ome")
self.expectConfirmation("ome")
self.mox.ReplayAll()
self.password("")
@pytest.mark.parametrize('no_salt', ['', '--no-salt'])
@pytest.mark.parametrize('user_id', ['', '0', '1'])
@pytest.mark.parametrize('password', ['', 'ome'])
def testPassword(self, user_id, password, no_salt, capsys):
args = ""
if user_id:
args += "--user-id=%s " % user_id
if no_salt:
args += "%s " % no_salt
if password:
args += "%s" % password
else:
self.expectPassword("ome", id=user_id)
self.expectConfirmation("ome", id=user_id)
self.mox.ReplayAll()
self.password(args)
out, err = capsys.readouterr()
assert out.strip() == self.password_output(user_id, no_salt)
@pytest.mark.parametrize('file_arg', ['', '-f', '--file'])
@pytest.mark.parametrize('no_salt', ['', '--no-salt'])
@pytest.mark.parametrize('password', ['', '--password ome'])
def testScript(self, no_salt, file_arg, password, capsys):
"""
Recommended usage of db script
"""
args = "db script " + password
if no_salt:
args += " %s" % no_salt
if file_arg:
args += " %s %s" % (file_arg, str(self.file))
output = self.file
else:
output = self.script_file
if not password:
self.expectPassword("ome")
self.expectConfirmation("ome")
self.mox.ReplayAll()
self.cli.invoke(args, strict=True)
out, err = capsys.readouterr()
assert 'Using %s for version' % self.data['version'] in err
assert 'Using %s for patch' % self.data['patch'] in err
if password:
assert 'Using password from commandline' in err
with open(output) as f:
lines = f.readlines()
for line in lines:
if line.startswith('insert into password values (0'):
assert line.strip() == self.script_output(no_salt)
@pytest.mark.parametrize('file_arg', ['', '-f', '--file'])
@pytest.mark.parametrize('no_salt', ['', '--no-salt'])
@pytest.mark.parametrize('pos_args', [
'%s %s %s', '--version %s --patch %s --password %s'])
def testScriptDeveloperArgs(self, pos_args, no_salt, file_arg, capsys):
"""
Deprecated and developer usage of db script
"""
arg_values = ('VERSION', 'PATCH', 'PASSWORD')
args = "db script " + pos_args % arg_values
if no_salt:
args += " %s" % no_salt
if file_arg:
args += " %s %s" % (file_arg, str(self.file))
self.file
else:
self.script_file
self.mox.ReplayAll()
with pytest.raises(NonZeroReturnCode):
self.cli.invoke(args, strict=True)
out, err = capsys.readouterr()
assert 'Using %s for version' % (arg_values[0]) in err
assert 'Using %s for patch' % (arg_values[1]) in err
assert 'Using password from commandline' in err
assert 'Invalid Database version/patch' in err
def password_ending(self, user, id):
if id and id != '0':
rv = "user %s: " % id
else:
rv = "%s user: " % user
return "password for OMERO " + rv
def expectPassword(self, pw, user="root", id=None):
getpass.getpass("Please enter %s" %
self.password_ending(user, id)).AndReturn(pw)
def expectConfirmation(self, pw, user="root", id=None):
getpass.getpass("Please re-enter %s" %
self.password_ending(user, id)).AndReturn(pw)
def password_output(self, user_id, no_salt):
update_msg = "UPDATE password SET hash = \'%s\'" \
" WHERE experimenter_id = %s;"
if not user_id:
user_id = "0"
return update_msg % (hash_map[(user_id, no_salt)], user_id)
def script_output(self, no_salt):
root_password_msg = "insert into password values (0,\'%s\');"
return root_password_msg % (hash_map[("0", no_salt)])
|
Dandandan/wikiprogramming
|
refs/heads/master
|
jsrepl/extern/python/closured/lib/python2.7/email/__init__.py
|
262
|
# Copyright (C) 2001-2006 Python Software Foundation
# Author: Barry Warsaw
# Contact: email-sig@python.org
"""A package for parsing, handling, and generating email messages."""
__version__ = '4.0.3'
__all__ = [
# Old names
'base64MIME',
'Charset',
'Encoders',
'Errors',
'Generator',
'Header',
'Iterators',
'Message',
'MIMEAudio',
'MIMEBase',
'MIMEImage',
'MIMEMessage',
'MIMEMultipart',
'MIMENonMultipart',
'MIMEText',
'Parser',
'quopriMIME',
'Utils',
'message_from_string',
'message_from_file',
# new names
'base64mime',
'charset',
'encoders',
'errors',
'generator',
'header',
'iterators',
'message',
'mime',
'parser',
'quoprimime',
'utils',
]
# Some convenience routines. Don't import Parser and Message as side-effects
# of importing email since those cascadingly import most of the rest of the
# email package.
def message_from_string(s, *args, **kws):
"""Parse a string into a Message object model.
Optional _class and strict are passed to the Parser constructor.
"""
from email.parser import Parser
return Parser(*args, **kws).parsestr(s)
def message_from_file(fp, *args, **kws):
"""Read a file and parse its contents into a Message object model.
Optional _class and strict are passed to the Parser constructor.
"""
from email.parser import Parser
return Parser(*args, **kws).parse(fp)
# Lazy loading to provide name mapping from new-style names (PEP 8 compatible
# email 4.0 module names), to old-style names (email 3.0 module names).
import sys
class LazyImporter(object):
def __init__(self, module_name):
self.__name__ = 'email.' + module_name
def __getattr__(self, name):
__import__(self.__name__)
mod = sys.modules[self.__name__]
self.__dict__.update(mod.__dict__)
return getattr(mod, name)
_LOWERNAMES = [
# email.<old name> -> email.<new name is lowercased old name>
'Charset',
'Encoders',
'Errors',
'FeedParser',
'Generator',
'Header',
'Iterators',
'Message',
'Parser',
'Utils',
'base64MIME',
'quopriMIME',
]
_MIMENAMES = [
# email.MIME<old name> -> email.mime.<new name is lowercased old name>
'Audio',
'Base',
'Image',
'Message',
'Multipart',
'NonMultipart',
'Text',
]
for _name in _LOWERNAMES:
importer = LazyImporter(_name.lower())
sys.modules['email.' + _name] = importer
setattr(sys.modules['email'], _name, importer)
import email.mime
for _name in _MIMENAMES:
importer = LazyImporter('mime.' + _name.lower())
sys.modules['email.MIME' + _name] = importer
setattr(sys.modules['email'], 'MIME' + _name, importer)
setattr(sys.modules['email.mime'], _name, importer)
|
Pulgama/supriya
|
refs/heads/master
|
supriya/ugens/Dbufrd.py
|
1
|
import collections
from supriya import CalculationRate
from supriya.ugens.DUGen import DUGen
class Dbufrd(DUGen):
"""
A buffer-reading demand-rate UGen.
::
>>> dbufrd = supriya.ugens.Dbufrd(
... buffer_id=0,
... loop=1,
... phase=0,
... )
>>> dbufrd
Dbufrd()
"""
### CLASS VARIABLES ###
__documentation_section__ = "Demand UGens"
_ordered_input_names = collections.OrderedDict(
[("buffer_id", 0), ("phase", 0), ("loop", 1)]
)
_valid_calculation_rates = (CalculationRate.DEMAND,)
|
fusion809/fusion809.github.io-old
|
refs/heads/master
|
vendor/bundle/ruby/2.1.0/gems/pygments.rb-0.6.3/vendor/pygments-main/pygments/formatters/terminal256.py
|
59
|
# -*- coding: utf-8 -*-
"""
pygments.formatters.terminal256
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Formatter for 256-color terminal output with ANSI sequences.
RGB-to-XTERM color conversion routines adapted from xterm256-conv
tool (http://frexx.de/xterm-256-notes/data/xterm256-conv2.tar.bz2)
by Wolfgang Frisch.
Formatter version 1.
:copyright: Copyright 2006-2014 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
# TODO:
# - Options to map style's bold/underline/italic/border attributes
# to some ANSI attrbutes (something like 'italic=underline')
# - An option to output "style RGB to xterm RGB/index" conversion table
# - An option to indicate that we are running in "reverse background"
# xterm. This means that default colors are white-on-black, not
# black-on-while, so colors like "white background" need to be converted
# to "white background, black foreground", etc...
import sys
from pygments.formatter import Formatter
__all__ = ['Terminal256Formatter']
class EscapeSequence:
def __init__(self, fg=None, bg=None, bold=False, underline=False):
self.fg = fg
self.bg = bg
self.bold = bold
self.underline = underline
def escape(self, attrs):
if len(attrs):
return "\x1b[" + ";".join(attrs) + "m"
return ""
def color_string(self):
attrs = []
if self.fg is not None:
attrs.extend(("38", "5", "%i" % self.fg))
if self.bg is not None:
attrs.extend(("48", "5", "%i" % self.bg))
if self.bold:
attrs.append("01")
if self.underline:
attrs.append("04")
return self.escape(attrs)
def reset_string(self):
attrs = []
if self.fg is not None:
attrs.append("39")
if self.bg is not None:
attrs.append("49")
if self.bold or self.underline:
attrs.append("00")
return self.escape(attrs)
class Terminal256Formatter(Formatter):
r"""
Format tokens with ANSI color sequences, for output in a 256-color
terminal or console. Like in `TerminalFormatter` color sequences
are terminated at newlines, so that paging the output works correctly.
The formatter takes colors from a style defined by the `style` option
and converts them to nearest ANSI 256-color escape sequences. Bold and
underline attributes from the style are preserved (and displayed).
.. versionadded:: 0.9
Options accepted:
`style`
The style to use, can be a string or a Style subclass (default:
``'default'``).
"""
name = 'Terminal256'
aliases = ['terminal256', 'console256', '256']
filenames = []
def __init__(self, **options):
Formatter.__init__(self, **options)
self.xterm_colors = []
self.best_match = {}
self.style_string = {}
self.usebold = 'nobold' not in options
self.useunderline = 'nounderline' not in options
self._build_color_table() # build an RGB-to-256 color conversion table
self._setup_styles() # convert selected style's colors to term. colors
def _build_color_table(self):
# colors 0..15: 16 basic colors
self.xterm_colors.append((0x00, 0x00, 0x00)) # 0
self.xterm_colors.append((0xcd, 0x00, 0x00)) # 1
self.xterm_colors.append((0x00, 0xcd, 0x00)) # 2
self.xterm_colors.append((0xcd, 0xcd, 0x00)) # 3
self.xterm_colors.append((0x00, 0x00, 0xee)) # 4
self.xterm_colors.append((0xcd, 0x00, 0xcd)) # 5
self.xterm_colors.append((0x00, 0xcd, 0xcd)) # 6
self.xterm_colors.append((0xe5, 0xe5, 0xe5)) # 7
self.xterm_colors.append((0x7f, 0x7f, 0x7f)) # 8
self.xterm_colors.append((0xff, 0x00, 0x00)) # 9
self.xterm_colors.append((0x00, 0xff, 0x00)) # 10
self.xterm_colors.append((0xff, 0xff, 0x00)) # 11
self.xterm_colors.append((0x5c, 0x5c, 0xff)) # 12
self.xterm_colors.append((0xff, 0x00, 0xff)) # 13
self.xterm_colors.append((0x00, 0xff, 0xff)) # 14
self.xterm_colors.append((0xff, 0xff, 0xff)) # 15
# colors 16..232: the 6x6x6 color cube
valuerange = (0x00, 0x5f, 0x87, 0xaf, 0xd7, 0xff)
for i in range(217):
r = valuerange[(i // 36) % 6]
g = valuerange[(i // 6) % 6]
b = valuerange[i % 6]
self.xterm_colors.append((r, g, b))
# colors 233..253: grayscale
for i in range(1, 22):
v = 8 + i * 10
self.xterm_colors.append((v, v, v))
def _closest_color(self, r, g, b):
distance = 257*257*3 # "infinity" (>distance from #000000 to #ffffff)
match = 0
for i in range(0, 254):
values = self.xterm_colors[i]
rd = r - values[0]
gd = g - values[1]
bd = b - values[2]
d = rd*rd + gd*gd + bd*bd
if d < distance:
match = i
distance = d
return match
def _color_index(self, color):
index = self.best_match.get(color, None)
if index is None:
try:
rgb = int(str(color), 16)
except ValueError:
rgb = 0
r = (rgb >> 16) & 0xff
g = (rgb >> 8) & 0xff
b = rgb & 0xff
index = self._closest_color(r, g, b)
self.best_match[color] = index
return index
def _setup_styles(self):
for ttype, ndef in self.style:
escape = EscapeSequence()
if ndef['color']:
escape.fg = self._color_index(ndef['color'])
if ndef['bgcolor']:
escape.bg = self._color_index(ndef['bgcolor'])
if self.usebold and ndef['bold']:
escape.bold = True
if self.useunderline and ndef['underline']:
escape.underline = True
self.style_string[str(ttype)] = (escape.color_string(),
escape.reset_string())
def format(self, tokensource, outfile):
# hack: if the output is a terminal and has an encoding set,
# use that to avoid unicode encode problems
if not self.encoding and hasattr(outfile, "encoding") and \
hasattr(outfile, "isatty") and outfile.isatty() and \
sys.version_info < (3,):
self.encoding = outfile.encoding
return Formatter.format(self, tokensource, outfile)
def format_unencoded(self, tokensource, outfile):
for ttype, value in tokensource:
not_found = True
while ttype and not_found:
try:
#outfile.write( "<" + str(ttype) + ">" )
on, off = self.style_string[str(ttype)]
# Like TerminalFormatter, add "reset colors" escape sequence
# on newline.
spl = value.split('\n')
for line in spl[:-1]:
if line:
outfile.write(on + line + off)
outfile.write('\n')
if spl[-1]:
outfile.write(on + spl[-1] + off)
not_found = False
#outfile.write( '#' + str(ttype) + '#' )
except KeyError:
#ottype = ttype
ttype = ttype[:-1]
#outfile.write( '!' + str(ottype) + '->' + str(ttype) + '!' )
if not_found:
outfile.write(value)
|
Yadnyawalkya/integration_tests
|
refs/heads/master
|
cfme/tests/services/test_myservice.py
|
1
|
# -*- coding: utf-8 -*-
from datetime import datetime
import pytest
from cfme import test_requirements
from cfme.infrastructure.provider.virtualcenter import VMwareProvider
from cfme.markers.env_markers.provider import ONE_PER_TYPE
from cfme.services.myservice.ui import MyServiceDetailView
from cfme.utils import browser
from cfme.utils.appliance import ViaUI
from cfme.utils.appliance.implementations.ui import navigate_to
from cfme.utils.blockers import BZ
from cfme.utils.browser import ensure_browser_open
from cfme.utils.update import update
from cfme.utils.version import appliance_is_downstream
from cfme.utils.wait import wait_for
pytestmark = [
pytest.mark.usefixtures('setup_provider', 'catalog_item'),
pytest.mark.meta(server_roles="+automate"),
pytest.mark.long_running,
test_requirements.service,
pytest.mark.tier(2),
pytest.mark.provider([VMwareProvider], selector=ONE_PER_TYPE, scope="module"),
]
@pytest.fixture
def needs_firefox():
""" Fixture which skips the test if not run under firefox.
I recommend putting it in the first place.
"""
ensure_browser_open()
if browser.browser().name != "firefox":
pytest.skip(msg="This test needs firefox to run")
@pytest.mark.parametrize('context', [ViaUI])
def test_retire_service_ui(appliance, context, service_vm):
"""Tests my service
Metadata:
test_flag: provision
Polarion:
assignee: nansari
casecomponent: Services
initialEstimate: 1/4h
tags: service
"""
service, _ = service_vm
with appliance.context.use(context):
service.retire()
@pytest.mark.parametrize('context', [ViaUI])
def test_retire_service_on_date(appliance, context, service_vm):
"""Tests my service retirement
Metadata:
test_flag: provision
Polarion:
assignee: nansari
casecomponent: Services
initialEstimate: 1/4h
tags: service
"""
service, _ = service_vm
with appliance.context.use(context):
dt = datetime.utcnow()
service.retire_on_date(dt)
@pytest.mark.parametrize('context', [ViaUI])
@pytest.mark.meta(blockers=[BZ(1729940)])
def test_crud_set_ownership_and_edit_tags(appliance, context, service_vm):
"""Tests my service crud , edit tags and ownership
Metadata:
test_flag: provision
Polarion:
assignee: nansari
casecomponent: Services
initialEstimate: 1/4h
tags: service
"""
service, _ = service_vm
with appliance.context.use(context):
service.set_ownership("Administrator", "EvmGroup-administrator")
service.add_tag()
with update(service):
service.description = "my edited description"
service.delete()
@pytest.mark.parametrize('context', [ViaUI])
@pytest.mark.parametrize("filetype", ["Text", "CSV", "PDF"])
# PDF not present on upstream
@pytest.mark.uncollectif(lambda filetype: filetype == 'PDF' and not appliance_is_downstream())
def test_download_file(appliance, context, needs_firefox, service_vm, filetype):
"""Tests my service download files
Metadata:
test_flag: provision
Polarion:
assignee: nansari
casecomponent: Services
initialEstimate: 1/16h
tags: service
"""
service, _ = service_vm
with appliance.context.use(context):
service.download_file(filetype)
@pytest.mark.parametrize('context', [ViaUI])
def test_service_link(appliance, context, service_vm):
"""Tests service link from VM details page(BZ1443772)
Polarion:
assignee: nansari
casecomponent: Services
initialEstimate: 1/4h
tags: service
"""
service, vm = service_vm
with appliance.context.use(context):
# TODO: Update to nav to MyService first to click entity link when widget exists
view = navigate_to(vm, 'Details')
view.entities.summary('Relationships').click_at('Service')
new_view = service.create_view(MyServiceDetailView)
assert new_view.wait_displayed()
@pytest.mark.parametrize('context', [ViaUI])
@pytest.mark.meta(automates=[BZ(1720338)])
def test_retire_service_with_retired_vm(appliance, context, service_vm):
"""Tests retire service with an already retired vm.
Metadata:
test_flag: provision
Polarion:
assignee: nansari
casecomponent: Services
initialEstimate: 1/4h
tags: service
Bugzilla:
1720338
"""
service, vm = service_vm
vm.retire()
# using rest entity to check if the VM has retired since it is a lot faster
retire_vm = appliance.rest_api.collections.vms.get(name=vm.name)
wait_for(
lambda: (hasattr(retire_vm, "retired") and retire_vm.retired),
timeout=1000,
delay=5,
fail_func=retire_vm.reload,
)
with appliance.context.use(context):
service.retire()
@pytest.mark.manual
@pytest.mark.tier(3)
def test_retire_on_date_for_multiple_service():
"""
Polarion:
assignee: nansari
casecomponent: Services
testtype: functional
initialEstimate: 1/8h
startsin: 5.5
tags: service
"""
pass
@pytest.mark.meta(coverage=[1678123])
@pytest.mark.manual
@pytest.mark.tier(2)
def test_service_state():
"""
Bugzilla:
1678123
Polarion:
assignee: nansari
casecomponent: Services
initialEstimate: 1/16h
startsin: 5.11
testSteps:
1. Create catalog and catalog item
2. Order the catalog item
3. Provision the service catalog item or fail the service catalog item
4. Go to My services and check service state
expectedResults:
1.
2.
3.
4. Service State should be Provisioned or Failed
"""
pass
|
moreati/django
|
refs/heads/master
|
django/views/debug.py
|
50
|
from __future__ import unicode_literals
import re
import sys
import types
from django.conf import settings
from django.core.urlresolvers import Resolver404, resolve
from django.http import HttpResponse, HttpResponseNotFound
from django.template import Context, Engine, TemplateDoesNotExist
from django.template.defaultfilters import force_escape, pprint
from django.utils import lru_cache, six, timezone
from django.utils.datastructures import MultiValueDict
from django.utils.encoding import force_bytes, smart_text
from django.utils.module_loading import import_string
from django.utils.translation import ugettext as _
# Minimal Django templates engine to render the error templates
# regardless of the project's TEMPLATES setting.
DEBUG_ENGINE = Engine(debug=True)
HIDDEN_SETTINGS = re.compile('API|TOKEN|KEY|SECRET|PASS|SIGNATURE')
CLEANSED_SUBSTITUTE = '********************'
def linebreak_iter(template_source):
yield 0
p = template_source.find('\n')
while p >= 0:
yield p + 1
p = template_source.find('\n', p + 1)
yield len(template_source) + 1
class CallableSettingWrapper(object):
""" Object to wrap callable appearing in settings
* Not to call in the debug page (#21345).
* Not to break the debug page if the callable forbidding to set attributes (#23070).
"""
def __init__(self, callable_setting):
self._wrapped = callable_setting
def __repr__(self):
return repr(self._wrapped)
def cleanse_setting(key, value):
"""Cleanse an individual setting key/value of sensitive content.
If the value is a dictionary, recursively cleanse the keys in
that dictionary.
"""
try:
if HIDDEN_SETTINGS.search(key):
cleansed = CLEANSED_SUBSTITUTE
else:
if isinstance(value, dict):
cleansed = {k: cleanse_setting(k, v) for k, v in value.items()}
else:
cleansed = value
except TypeError:
# If the key isn't regex-able, just return as-is.
cleansed = value
if callable(cleansed):
# For fixing #21345 and #23070
cleansed = CallableSettingWrapper(cleansed)
return cleansed
def get_safe_settings():
"Returns a dictionary of the settings module, with sensitive settings blurred out."
settings_dict = {}
for k in dir(settings):
if k.isupper():
settings_dict[k] = cleanse_setting(k, getattr(settings, k))
return settings_dict
def technical_500_response(request, exc_type, exc_value, tb, status_code=500):
"""
Create a technical server error response. The last three arguments are
the values returned from sys.exc_info() and friends.
"""
reporter = ExceptionReporter(request, exc_type, exc_value, tb)
if request.is_ajax():
text = reporter.get_traceback_text()
return HttpResponse(text, status=status_code, content_type='text/plain')
else:
html = reporter.get_traceback_html()
return HttpResponse(html, status=status_code, content_type='text/html')
@lru_cache.lru_cache()
def get_default_exception_reporter_filter():
# Instantiate the default filter for the first time and cache it.
return import_string(settings.DEFAULT_EXCEPTION_REPORTER_FILTER)()
def get_exception_reporter_filter(request):
default_filter = get_default_exception_reporter_filter()
return getattr(request, 'exception_reporter_filter', default_filter)
class ExceptionReporterFilter(object):
"""
Base for all exception reporter filter classes. All overridable hooks
contain lenient default behaviors.
"""
def get_post_parameters(self, request):
if request is None:
return {}
else:
return request.POST
def get_traceback_frame_variables(self, request, tb_frame):
return list(tb_frame.f_locals.items())
class SafeExceptionReporterFilter(ExceptionReporterFilter):
"""
Use annotations made by the sensitive_post_parameters and
sensitive_variables decorators to filter out sensitive information.
"""
def is_active(self, request):
"""
This filter is to add safety in production environments (i.e. DEBUG
is False). If DEBUG is True then your site is not safe anyway.
This hook is provided as a convenience to easily activate or
deactivate the filter on a per request basis.
"""
return settings.DEBUG is False
def get_cleansed_multivaluedict(self, request, multivaluedict):
"""
Replaces the keys in a MultiValueDict marked as sensitive with stars.
This mitigates leaking sensitive POST parameters if something like
request.POST['nonexistent_key'] throws an exception (#21098).
"""
sensitive_post_parameters = getattr(request, 'sensitive_post_parameters', [])
if self.is_active(request) and sensitive_post_parameters:
multivaluedict = multivaluedict.copy()
for param in sensitive_post_parameters:
if param in multivaluedict:
multivaluedict[param] = CLEANSED_SUBSTITUTE
return multivaluedict
def get_post_parameters(self, request):
"""
Replaces the values of POST parameters marked as sensitive with
stars (*********).
"""
if request is None:
return {}
else:
sensitive_post_parameters = getattr(request, 'sensitive_post_parameters', [])
if self.is_active(request) and sensitive_post_parameters:
cleansed = request.POST.copy()
if sensitive_post_parameters == '__ALL__':
# Cleanse all parameters.
for k, v in cleansed.items():
cleansed[k] = CLEANSED_SUBSTITUTE
return cleansed
else:
# Cleanse only the specified parameters.
for param in sensitive_post_parameters:
if param in cleansed:
cleansed[param] = CLEANSED_SUBSTITUTE
return cleansed
else:
return request.POST
def cleanse_special_types(self, request, value):
try:
# If value is lazy or a complex object of another kind, this check
# might raise an exception. isinstance checks that lazy
# MultiValueDicts will have a return value.
is_multivalue_dict = isinstance(value, MultiValueDict)
except Exception as e:
return '{!r} while evaluating {!r}'.format(e, value)
if is_multivalue_dict:
# Cleanse MultiValueDicts (request.POST is the one we usually care about)
value = self.get_cleansed_multivaluedict(request, value)
return value
def get_traceback_frame_variables(self, request, tb_frame):
"""
Replaces the values of variables marked as sensitive with
stars (*********).
"""
# Loop through the frame's callers to see if the sensitive_variables
# decorator was used.
current_frame = tb_frame.f_back
sensitive_variables = None
while current_frame is not None:
if (current_frame.f_code.co_name == 'sensitive_variables_wrapper'
and 'sensitive_variables_wrapper' in current_frame.f_locals):
# The sensitive_variables decorator was used, so we take note
# of the sensitive variables' names.
wrapper = current_frame.f_locals['sensitive_variables_wrapper']
sensitive_variables = getattr(wrapper, 'sensitive_variables', None)
break
current_frame = current_frame.f_back
cleansed = {}
if self.is_active(request) and sensitive_variables:
if sensitive_variables == '__ALL__':
# Cleanse all variables
for name, value in tb_frame.f_locals.items():
cleansed[name] = CLEANSED_SUBSTITUTE
else:
# Cleanse specified variables
for name, value in tb_frame.f_locals.items():
if name in sensitive_variables:
value = CLEANSED_SUBSTITUTE
else:
value = self.cleanse_special_types(request, value)
cleansed[name] = value
else:
# Potentially cleanse the request and any MultiValueDicts if they
# are one of the frame variables.
for name, value in tb_frame.f_locals.items():
cleansed[name] = self.cleanse_special_types(request, value)
if (tb_frame.f_code.co_name == 'sensitive_variables_wrapper'
and 'sensitive_variables_wrapper' in tb_frame.f_locals):
# For good measure, obfuscate the decorated function's arguments in
# the sensitive_variables decorator's frame, in case the variables
# associated with those arguments were meant to be obfuscated from
# the decorated function's frame.
cleansed['func_args'] = CLEANSED_SUBSTITUTE
cleansed['func_kwargs'] = CLEANSED_SUBSTITUTE
return cleansed.items()
class ExceptionReporter(object):
"""
A class to organize and coordinate reporting on exceptions.
"""
def __init__(self, request, exc_type, exc_value, tb, is_email=False):
self.request = request
self.filter = get_exception_reporter_filter(self.request)
self.exc_type = exc_type
self.exc_value = exc_value
self.tb = tb
self.is_email = is_email
self.template_info = getattr(self.exc_value, 'template_debug', None)
self.template_does_not_exist = False
self.postmortem = None
# Handle deprecated string exceptions
if isinstance(self.exc_type, six.string_types):
self.exc_value = Exception('Deprecated String Exception: %r' % self.exc_type)
self.exc_type = type(self.exc_value)
def get_traceback_data(self):
"""Return a dictionary containing traceback information."""
if self.exc_type and issubclass(self.exc_type, TemplateDoesNotExist):
self.template_does_not_exist = True
self.postmortem = self.exc_value.chain or [self.exc_value]
frames = self.get_traceback_frames()
for i, frame in enumerate(frames):
if 'vars' in frame:
frame_vars = []
for k, v in frame['vars']:
v = pprint(v)
# The force_escape filter assume unicode, make sure that works
if isinstance(v, six.binary_type):
v = v.decode('utf-8', 'replace') # don't choke on non-utf-8 input
# Trim large blobs of data
if len(v) > 4096:
v = '%s... <trimmed %d bytes string>' % (v[0:4096], len(v))
frame_vars.append((k, force_escape(v)))
frame['vars'] = frame_vars
frames[i] = frame
unicode_hint = ''
if self.exc_type and issubclass(self.exc_type, UnicodeError):
start = getattr(self.exc_value, 'start', None)
end = getattr(self.exc_value, 'end', None)
if start is not None and end is not None:
unicode_str = self.exc_value.args[1]
unicode_hint = smart_text(
unicode_str[max(start - 5, 0):min(end + 5, len(unicode_str))],
'ascii', errors='replace'
)
from django import get_version
c = {
'is_email': self.is_email,
'unicode_hint': unicode_hint,
'frames': frames,
'request': self.request,
'filtered_POST': self.filter.get_post_parameters(self.request),
'settings': get_safe_settings(),
'sys_executable': sys.executable,
'sys_version_info': '%d.%d.%d' % sys.version_info[0:3],
'server_time': timezone.now(),
'django_version_info': get_version(),
'sys_path': sys.path,
'template_info': self.template_info,
'template_does_not_exist': self.template_does_not_exist,
'postmortem': self.postmortem,
}
# Check whether exception info is available
if self.exc_type:
c['exception_type'] = self.exc_type.__name__
if self.exc_value:
c['exception_value'] = smart_text(self.exc_value, errors='replace')
if frames:
c['lastframe'] = frames[-1]
return c
def get_traceback_html(self):
"Return HTML version of debug 500 HTTP error page."
t = DEBUG_ENGINE.from_string(TECHNICAL_500_TEMPLATE)
c = Context(self.get_traceback_data(), use_l10n=False)
return t.render(c)
def get_traceback_text(self):
"Return plain text version of debug 500 HTTP error page."
t = DEBUG_ENGINE.from_string(TECHNICAL_500_TEXT_TEMPLATE)
c = Context(self.get_traceback_data(), autoescape=False, use_l10n=False)
return t.render(c)
def _get_lines_from_file(self, filename, lineno, context_lines, loader=None, module_name=None):
"""
Returns context_lines before and after lineno from file.
Returns (pre_context_lineno, pre_context, context_line, post_context).
"""
source = None
if loader is not None and hasattr(loader, "get_source"):
try:
source = loader.get_source(module_name)
except ImportError:
pass
if source is not None:
source = source.splitlines()
if source is None:
try:
with open(filename, 'rb') as fp:
source = fp.read().splitlines()
except (OSError, IOError):
pass
if source is None:
return None, [], None, []
# If we just read the source from a file, or if the loader did not
# apply tokenize.detect_encoding to decode the source into a Unicode
# string, then we should do that ourselves.
if isinstance(source[0], six.binary_type):
encoding = 'ascii'
for line in source[:2]:
# File coding may be specified. Match pattern from PEP-263
# (http://www.python.org/dev/peps/pep-0263/)
match = re.search(br'coding[:=]\s*([-\w.]+)', line)
if match:
encoding = match.group(1).decode('ascii')
break
source = [six.text_type(sline, encoding, 'replace') for sline in source]
lower_bound = max(0, lineno - context_lines)
upper_bound = lineno + context_lines
pre_context = source[lower_bound:lineno]
context_line = source[lineno]
post_context = source[lineno + 1:upper_bound]
return lower_bound, pre_context, context_line, post_context
def get_traceback_frames(self):
def explicit_or_implicit_cause(exc_value):
explicit = getattr(exc_value, '__cause__', None)
implicit = getattr(exc_value, '__context__', None)
return explicit or implicit
# Get the exception and all its causes
exceptions = []
exc_value = self.exc_value
while exc_value:
exceptions.append(exc_value)
exc_value = explicit_or_implicit_cause(exc_value)
frames = []
# No exceptions were supplied to ExceptionReporter
if not exceptions:
return frames
# In case there's just one exception (always in Python 2,
# sometimes in Python 3), take the traceback from self.tb (Python 2
# doesn't have a __traceback__ attribute on Exception)
exc_value = exceptions.pop()
tb = self.tb if six.PY2 or not exceptions else exc_value.__traceback__
while tb is not None:
# Support for __traceback_hide__ which is used by a few libraries
# to hide internal frames.
if tb.tb_frame.f_locals.get('__traceback_hide__'):
tb = tb.tb_next
continue
filename = tb.tb_frame.f_code.co_filename
function = tb.tb_frame.f_code.co_name
lineno = tb.tb_lineno - 1
loader = tb.tb_frame.f_globals.get('__loader__')
module_name = tb.tb_frame.f_globals.get('__name__') or ''
pre_context_lineno, pre_context, context_line, post_context = self._get_lines_from_file(
filename, lineno, 7, loader, module_name,
)
if pre_context_lineno is not None:
frames.append({
'exc_cause': explicit_or_implicit_cause(exc_value),
'exc_cause_explicit': getattr(exc_value, '__cause__', True),
'tb': tb,
'type': 'django' if module_name.startswith('django.') else 'user',
'filename': filename,
'function': function,
'lineno': lineno + 1,
'vars': self.filter.get_traceback_frame_variables(self.request, tb.tb_frame),
'id': id(tb),
'pre_context': pre_context,
'context_line': context_line,
'post_context': post_context,
'pre_context_lineno': pre_context_lineno + 1,
})
# If the traceback for current exception is consumed, try the
# other exception.
if six.PY2:
tb = tb.tb_next
elif not tb.tb_next and exceptions:
exc_value = exceptions.pop()
tb = exc_value.__traceback__
else:
tb = tb.tb_next
return frames
def format_exception(self):
"""
Return the same data as from traceback.format_exception.
"""
import traceback
frames = self.get_traceback_frames()
tb = [(f['filename'], f['lineno'], f['function'], f['context_line']) for f in frames]
list = ['Traceback (most recent call last):\n']
list += traceback.format_list(tb)
list += traceback.format_exception_only(self.exc_type, self.exc_value)
return list
def technical_404_response(request, exception):
"Create a technical 404 error response. The exception should be the Http404."
try:
error_url = exception.args[0]['path']
except (IndexError, TypeError, KeyError):
error_url = request.path_info[1:] # Trim leading slash
try:
tried = exception.args[0]['tried']
except (IndexError, TypeError, KeyError):
tried = []
else:
if (not tried # empty URLconf
or (request.path == '/'
and len(tried) == 1 # default URLconf
and len(tried[0]) == 1
and getattr(tried[0][0], 'app_name', '') == getattr(tried[0][0], 'namespace', '') == 'admin')):
return default_urlconf(request)
urlconf = getattr(request, 'urlconf', settings.ROOT_URLCONF)
if isinstance(urlconf, types.ModuleType):
urlconf = urlconf.__name__
caller = ''
try:
resolver_match = resolve(request.path)
except Resolver404:
pass
else:
obj = resolver_match.func
if hasattr(obj, '__name__'):
caller = obj.__name__
elif hasattr(obj, '__class__') and hasattr(obj.__class__, '__name__'):
caller = obj.__class__.__name__
if hasattr(obj, '__module__'):
module = obj.__module__
caller = '%s.%s' % (module, caller)
t = DEBUG_ENGINE.from_string(TECHNICAL_404_TEMPLATE)
c = Context({
'urlconf': urlconf,
'root_urlconf': settings.ROOT_URLCONF,
'request_path': error_url,
'urlpatterns': tried,
'reason': force_bytes(exception, errors='replace'),
'request': request,
'settings': get_safe_settings(),
'raising_view_name': caller,
})
return HttpResponseNotFound(t.render(c), content_type='text/html')
def default_urlconf(request):
"Create an empty URLconf 404 error response."
t = DEBUG_ENGINE.from_string(DEFAULT_URLCONF_TEMPLATE)
c = Context({
"title": _("Welcome to Django"),
"heading": _("It worked!"),
"subheading": _("Congratulations on your first Django-powered page."),
"instructions": _("Of course, you haven't actually done any work yet. "
"Next, start your first app by running <code>python manage.py startapp [app_label]</code>."),
"explanation": _("You're seeing this message because you have <code>DEBUG = True</code> in your "
"Django settings file and you haven't configured any URLs. Get to work!"),
})
return HttpResponse(t.render(c), content_type='text/html')
#
# Templates are embedded in the file so that we know the error handler will
# always work even if the template loader is broken.
#
TECHNICAL_500_TEMPLATE = ("""
<!DOCTYPE html>
<html lang="en">
<head>
<meta http-equiv="content-type" content="text/html; charset=utf-8">
<meta name="robots" content="NONE,NOARCHIVE">
<title>{% if exception_type %}{{ exception_type }}{% else %}Report{% endif %}"""
"""{% if request %} at {{ request.path_info|escape }}{% endif %}</title>
<style type="text/css">
html * { padding:0; margin:0; }
body * { padding:10px 20px; }
body * * { padding:0; }
body { font:small sans-serif; }
body>div { border-bottom:1px solid #ddd; }
h1 { font-weight:normal; }
h2 { margin-bottom:.8em; }
h2 span { font-size:80%; color:#666; font-weight:normal; }
h3 { margin:1em 0 .5em 0; }
h4 { margin:0 0 .5em 0; font-weight: normal; }
code, pre { font-size: 100%; white-space: pre-wrap; }
table { border:1px solid #ccc; border-collapse: collapse; width:100%; background:white; }
tbody td, tbody th { vertical-align:top; padding:2px 3px; }
thead th {
padding:1px 6px 1px 3px; background:#fefefe; text-align:left;
font-weight:normal; font-size:11px; border:1px solid #ddd;
}
tbody th { width:12em; text-align:right; color:#666; padding-right:.5em; }
table.vars { margin:5px 0 2px 40px; }
table.vars td, table.req td { font-family:monospace; }
table td.code { width:100%; }
table td.code pre { overflow:hidden; }
table.source th { color:#666; }
table.source td { font-family:monospace; white-space:pre; border-bottom:1px solid #eee; }
ul.traceback { list-style-type:none; color: #222; }
ul.traceback li.frame { padding-bottom:1em; color:#666; }
ul.traceback li.user { background-color:#e0e0e0; color:#000 }
div.context { padding:10px 0; overflow:hidden; }
div.context ol { padding-left:30px; margin:0 10px; list-style-position: inside; }
div.context ol li { font-family:monospace; white-space:pre; color:#777; cursor:pointer; padding-left: 2px; }
div.context ol li pre { display:inline; }
div.context ol.context-line li { color:#505050; background-color:#dfdfdf; padding: 3px 2px; }
div.context ol.context-line li span { position:absolute; right:32px; }
.user div.context ol.context-line li { background-color:#bbb; color:#000; }
.user div.context ol li { color:#666; }
div.commands { margin-left: 40px; }
div.commands a { color:#555; text-decoration:none; }
.user div.commands a { color: black; }
#summary { background: #ffc; }
#summary h2 { font-weight: normal; color: #666; }
#explanation { background:#eee; }
#template, #template-not-exist { background:#f6f6f6; }
#template-not-exist ul { margin: 0 0 10px 20px; }
#template-not-exist .postmortem-section { margin-bottom: 3px; }
#unicode-hint { background:#eee; }
#traceback { background:#eee; }
#requestinfo { background:#f6f6f6; padding-left:120px; }
#summary table { border:none; background:transparent; }
#requestinfo h2, #requestinfo h3 { position:relative; margin-left:-100px; }
#requestinfo h3 { margin-bottom:-1em; }
.error { background: #ffc; }
.specific { color:#cc3300; font-weight:bold; }
h2 span.commands { font-size:.7em;}
span.commands a:link {color:#5E5694;}
pre.exception_value { font-family: sans-serif; color: #666; font-size: 1.5em; margin: 10px 0 10px 0; }
.append-bottom { margin-bottom: 10px; }
</style>
{% if not is_email %}
<script type="text/javascript">
//<!--
function getElementsByClassName(oElm, strTagName, strClassName){
// Written by Jonathan Snook, http://www.snook.ca/jon; Add-ons by Robert Nyman, http://www.robertnyman.com
var arrElements = (strTagName == "*" && document.all)? document.all :
oElm.getElementsByTagName(strTagName);
var arrReturnElements = new Array();
strClassName = strClassName.replace(/\-/g, "\\-");
var oRegExp = new RegExp("(^|\\s)" + strClassName + "(\\s|$)");
var oElement;
for(var i=0; i<arrElements.length; i++){
oElement = arrElements[i];
if(oRegExp.test(oElement.className)){
arrReturnElements.push(oElement);
}
}
return (arrReturnElements)
}
function hideAll(elems) {
for (var e = 0; e < elems.length; e++) {
elems[e].style.display = 'none';
}
}
window.onload = function() {
hideAll(getElementsByClassName(document, 'table', 'vars'));
hideAll(getElementsByClassName(document, 'ol', 'pre-context'));
hideAll(getElementsByClassName(document, 'ol', 'post-context'));
hideAll(getElementsByClassName(document, 'div', 'pastebin'));
}
function toggle() {
for (var i = 0; i < arguments.length; i++) {
var e = document.getElementById(arguments[i]);
if (e) {
e.style.display = e.style.display == 'none' ? 'block': 'none';
}
}
return false;
}
function varToggle(link, id) {
toggle('v' + id);
var s = link.getElementsByTagName('span')[0];
var uarr = String.fromCharCode(0x25b6);
var darr = String.fromCharCode(0x25bc);
s.innerHTML = s.innerHTML == uarr ? darr : uarr;
return false;
}
function switchPastebinFriendly(link) {
s1 = "Switch to copy-and-paste view";
s2 = "Switch back to interactive view";
link.innerHTML = link.innerHTML.trim() == s1 ? s2: s1;
toggle('browserTraceback', 'pastebinTraceback');
return false;
}
//-->
</script>
{% endif %}
</head>
<body>
<div id="summary">
<h1>{% if exception_type %}{{ exception_type }}{% else %}Report{% endif %}"""
"""{% if request %} at {{ request.path_info|escape }}{% endif %}</h1>
<pre class="exception_value">"""
"""{% if exception_value %}{{ exception_value|force_escape }}{% else %}No exception message supplied{% endif %}"""
"""</pre>
<table class="meta">
{% if request %}
<tr>
<th>Request Method:</th>
<td>{{ request.META.REQUEST_METHOD }}</td>
</tr>
<tr>
<th>Request URL:</th>
<td>{{ request.get_raw_uri|escape }}</td>
</tr>
{% endif %}
<tr>
<th>Django Version:</th>
<td>{{ django_version_info }}</td>
</tr>
{% if exception_type %}
<tr>
<th>Exception Type:</th>
<td>{{ exception_type }}</td>
</tr>
{% endif %}
{% if exception_type and exception_value %}
<tr>
<th>Exception Value:</th>
<td><pre>{{ exception_value|force_escape }}</pre></td>
</tr>
{% endif %}
{% if lastframe %}
<tr>
<th>Exception Location:</th>
<td>{{ lastframe.filename|escape }} in {{ lastframe.function|escape }}, line {{ lastframe.lineno }}</td>
</tr>
{% endif %}
<tr>
<th>Python Executable:</th>
<td>{{ sys_executable|escape }}</td>
</tr>
<tr>
<th>Python Version:</th>
<td>{{ sys_version_info }}</td>
</tr>
<tr>
<th>Python Path:</th>
<td><pre>{{ sys_path|pprint }}</pre></td>
</tr>
<tr>
<th>Server time:</th>
<td>{{server_time|date:"r"}}</td>
</tr>
</table>
</div>
{% if unicode_hint %}
<div id="unicode-hint">
<h2>Unicode error hint</h2>
<p>The string that could not be encoded/decoded was: <strong>{{ unicode_hint|force_escape }}</strong></p>
</div>
{% endif %}
{% if template_does_not_exist %}
<div id="template-not-exist">
<h2>Template-loader postmortem</h2>
{% if postmortem %}
<p class="append-bottom">Django tried loading these templates, in this order:</p>
{% for entry in postmortem %}
<p class="postmortem-section">Using engine <code>{{ entry.backend.name }}</code>:</p>
<ul>
{% if entry.tried %}
{% for attempt in entry.tried %}
<li><code>{{ attempt.0.loader_name }}</code>: {{ attempt.0.name }} ({{ attempt.1 }})</li>
{% endfor %}
</ul>
{% else %}
<li>This engine did not provide a list of tried templates.</li>
{% endif %}
</ul>
{% endfor %}
{% else %}
<p>No templates were found because your 'TEMPLATES' setting is not configured.</p>
{% endif %}
</div>
{% endif %}
{% if template_info %}
<div id="template">
<h2>Error during template rendering</h2>
<p>In template <code>{{ template_info.name }}</code>, error at line <strong>{{ template_info.line }}</strong></p>
<h3>{{ template_info.message }}</h3>
<table class="source{% if template_info.top %} cut-top{% endif %}
{% if template_info.bottom != template_info.total %} cut-bottom{% endif %}">
{% for source_line in template_info.source_lines %}
{% if source_line.0 == template_info.line %}
<tr class="error"><th>{{ source_line.0 }}</th>
<td>{{ template_info.before }}"""
"""<span class="specific">{{ template_info.during }}</span>"""
"""{{ template_info.after }}</td>
</tr>
{% else %}
<tr><th>{{ source_line.0 }}</th>
<td>{{ source_line.1 }}</td></tr>
{% endif %}
{% endfor %}
</table>
</div>
{% endif %}
{% if frames %}
<div id="traceback">
<h2>Traceback <span class="commands">{% if not is_email %}<a href="#" onclick="return switchPastebinFriendly(this);">
Switch to copy-and-paste view</a></span>{% endif %}
</h2>
{% autoescape off %}
<div id="browserTraceback">
<ul class="traceback">
{% for frame in frames %}
{% ifchanged frame.exc_cause %}{% if frame.exc_cause %}
<li><h3>
{% if frame.exc_cause_explicit %}
The above exception ({{ frame.exc_cause }}) was the direct cause of the following exception:
{% else %}
During handling of the above exception ({{ frame.exc_cause }}), another exception occurred:
{% endif %}
</h3></li>
{% endif %}{% endifchanged %}
<li class="frame {{ frame.type }}">
<code>{{ frame.filename|escape }}</code> in <code>{{ frame.function|escape }}</code>
{% if frame.context_line %}
<div class="context" id="c{{ frame.id }}">
{% if frame.pre_context and not is_email %}
<ol start="{{ frame.pre_context_lineno }}" class="pre-context" id="pre{{ frame.id }}">
{% for line in frame.pre_context %}
<li onclick="toggle('pre{{ frame.id }}', 'post{{ frame.id }}')"><pre>{{ line|escape }}</pre></li>
{% endfor %}
</ol>
{% endif %}
<ol start="{{ frame.lineno }}" class="context-line">
<li onclick="toggle('pre{{ frame.id }}', 'post{{ frame.id }}')"><pre>
""" """{{ frame.context_line|escape }}</pre>{% if not is_email %} <span>...</span>{% endif %}</li></ol>
{% if frame.post_context and not is_email %}
<ol start='{{ frame.lineno|add:"1" }}' class="post-context" id="post{{ frame.id }}">
{% for line in frame.post_context %}
<li onclick="toggle('pre{{ frame.id }}', 'post{{ frame.id }}')"><pre>{{ line|escape }}</pre></li>
{% endfor %}
</ol>
{% endif %}
</div>
{% endif %}
{% if frame.vars %}
<div class="commands">
{% if is_email %}
<h2>Local Vars</h2>
{% else %}
<a href="#" onclick="return varToggle(this, '{{ frame.id }}')"><span>▶</span> Local vars</a>
{% endif %}
</div>
<table class="vars" id="v{{ frame.id }}">
<thead>
<tr>
<th>Variable</th>
<th>Value</th>
</tr>
</thead>
<tbody>
{% for var in frame.vars|dictsort:"0" %}
<tr>
<td>{{ var.0|force_escape }}</td>
<td class="code"><pre>{{ var.1 }}</pre></td>
</tr>
{% endfor %}
</tbody>
</table>
{% endif %}
</li>
{% endfor %}
</ul>
</div>
{% endautoescape %}
<form action="http://dpaste.com/" name="pasteform" id="pasteform" method="post">
{% if not is_email %}
<div id="pastebinTraceback" class="pastebin">
<input type="hidden" name="language" value="PythonConsole">
<input type="hidden" name="title"
value="{{ exception_type|escape }}{% if request %} at {{ request.path_info|escape }}{% endif %}">
<input type="hidden" name="source" value="Django Dpaste Agent">
<input type="hidden" name="poster" value="Django">
<textarea name="content" id="traceback_area" cols="140" rows="25">
Environment:
{% if request %}
Request Method: {{ request.META.REQUEST_METHOD }}
Request URL: {{ request.get_raw_uri|escape }}
{% endif %}
Django Version: {{ django_version_info }}
Python Version: {{ sys_version_info }}
Installed Applications:
{{ settings.INSTALLED_APPS|pprint }}
Installed Middleware:
{{ settings.MIDDLEWARE_CLASSES|pprint }}
{% if template_does_not_exist %}Template loader postmortem
{% if postmortem %}Django tried loading these templates, in this order:
{% for entry in postmortem %}
Using engine {{ entry.backend.name }}:
{% if entry.tried %}{% for attempt in entry.tried %}"""
""" * {{ attempt.0.loader_name }}: {{ attempt.0.name }} ({{ attempt.1 }})
{% endfor %}{% else %} This engine did not provide a list of tried templates.
{% endif %}{% endfor %}
{% else %}No templates were found because your 'TEMPLATES' setting is not configured.
{% endif %}{% endif %}{% if template_info %}
Template error:
In template {{ template_info.name }}, error at line {{ template_info.line }}
{{ template_info.message }}"""
"{% for source_line in template_info.source_lines %}"
"{% if source_line.0 == template_info.line %}"
" {{ source_line.0 }} : {{ template_info.before }} {{ template_info.during }} {{ template_info.after }}"
"{% else %}"
" {{ source_line.0 }} : {{ source_line.1 }}"
"""{% endif %}{% endfor %}{% endif %}
Traceback:{% for frame in frames %}
{% ifchanged frame.exc_cause %}{% if frame.exc_cause %}{% if frame.exc_cause_explicit %}
The above exception ({{ frame.exc_cause }}) was the direct cause of the following exception:
{% else %}
During handling of the above exception ({{ frame.exc_cause }}), another exception occurred:
{% endif %}{% endif %}{% endifchanged %}
File "{{ frame.filename|escape }}" in {{ frame.function|escape }}
{% if frame.context_line %} {{ frame.lineno }}. {{ frame.context_line|escape }}{% endif %}{% endfor %}
Exception Type: {{ exception_type|escape }}{% if request %} at {{ request.path_info|escape }}{% endif %}
Exception Value: {{ exception_value|force_escape }}
</textarea>
<br><br>
<input type="submit" value="Share this traceback on a public Web site">
</div>
</form>
</div>
{% endif %}
{% endif %}
<div id="requestinfo">
<h2>Request information</h2>
{% if request %}
<h3 id="get-info">GET</h3>
{% if request.GET %}
<table class="req">
<thead>
<tr>
<th>Variable</th>
<th>Value</th>
</tr>
</thead>
<tbody>
{% for var in request.GET.items %}
<tr>
<td>{{ var.0 }}</td>
<td class="code"><pre>{{ var.1|pprint }}</pre></td>
</tr>
{% endfor %}
</tbody>
</table>
{% else %}
<p>No GET data</p>
{% endif %}
<h3 id="post-info">POST</h3>
{% if filtered_POST %}
<table class="req">
<thead>
<tr>
<th>Variable</th>
<th>Value</th>
</tr>
</thead>
<tbody>
{% for var in filtered_POST.items %}
<tr>
<td>{{ var.0 }}</td>
<td class="code"><pre>{{ var.1|pprint }}</pre></td>
</tr>
{% endfor %}
</tbody>
</table>
{% else %}
<p>No POST data</p>
{% endif %}
<h3 id="files-info">FILES</h3>
{% if request.FILES %}
<table class="req">
<thead>
<tr>
<th>Variable</th>
<th>Value</th>
</tr>
</thead>
<tbody>
{% for var in request.FILES.items %}
<tr>
<td>{{ var.0 }}</td>
<td class="code"><pre>{{ var.1|pprint }}</pre></td>
</tr>
{% endfor %}
</tbody>
</table>
{% else %}
<p>No FILES data</p>
{% endif %}
<h3 id="cookie-info">COOKIES</h3>
{% if request.COOKIES %}
<table class="req">
<thead>
<tr>
<th>Variable</th>
<th>Value</th>
</tr>
</thead>
<tbody>
{% for var in request.COOKIES.items %}
<tr>
<td>{{ var.0 }}</td>
<td class="code"><pre>{{ var.1|pprint }}</pre></td>
</tr>
{% endfor %}
</tbody>
</table>
{% else %}
<p>No cookie data</p>
{% endif %}
<h3 id="meta-info">META</h3>
<table class="req">
<thead>
<tr>
<th>Variable</th>
<th>Value</th>
</tr>
</thead>
<tbody>
{% for var in request.META.items|dictsort:"0" %}
<tr>
<td>{{ var.0 }}</td>
<td class="code"><pre>{{ var.1|pprint }}</pre></td>
</tr>
{% endfor %}
</tbody>
</table>
{% else %}
<p>Request data not supplied</p>
{% endif %}
<h3 id="settings-info">Settings</h3>
<h4>Using settings module <code>{{ settings.SETTINGS_MODULE }}</code></h4>
<table class="req">
<thead>
<tr>
<th>Setting</th>
<th>Value</th>
</tr>
</thead>
<tbody>
{% for var in settings.items|dictsort:"0" %}
<tr>
<td>{{ var.0 }}</td>
<td class="code"><pre>{{ var.1|pprint }}</pre></td>
</tr>
{% endfor %}
</tbody>
</table>
</div>
{% if not is_email %}
<div id="explanation">
<p>
You're seeing this error because you have <code>DEBUG = True</code> in your
Django settings file. Change that to <code>False</code>, and Django will
display a standard page generated by the handler for this status code.
</p>
</div>
{% endif %}
</body>
</html>
""")
TECHNICAL_500_TEXT_TEMPLATE = (""""""
"""{% firstof exception_type 'Report' %}{% if request %} at {{ request.path_info }}{% endif %}
{% firstof exception_value 'No exception message supplied' %}
{% if request %}
Request Method: {{ request.META.REQUEST_METHOD }}
Request URL: {{ request.get_raw_uri }}{% endif %}
Django Version: {{ django_version_info }}
Python Executable: {{ sys_executable }}
Python Version: {{ sys_version_info }}
Python Path: {{ sys_path }}
Server time: {{server_time|date:"r"}}
Installed Applications:
{{ settings.INSTALLED_APPS|pprint }}
Installed Middleware:
{{ settings.MIDDLEWARE_CLASSES|pprint }}
{% if template_does_not_exist %}Template loader postmortem
{% if postmortem %}Django tried loading these templates, in this order:
{% for entry in postmortem %}
Using engine {{ entry.backend.name }}:
{% if entry.tried %}{% for attempt in entry.tried %}"""
""" * {{ attempt.0.loader_name }}: {{ attempt.0.name }} ({{ attempt.1 }})
{% endfor %}{% else %} This engine did not provide a list of tried templates.
{% endif %}{% endfor %}
{% else %}No templates were found because your 'TEMPLATES' setting is not configured.
{% endif %}
{% endif %}{% if template_info %}
Template error:
In template {{ template_info.name }}, error at line {{ template_info.line }}
{{ template_info.message }}
{% for source_line in template_info.source_lines %}"""
"{% if source_line.0 == template_info.line %}"
" {{ source_line.0 }} : {{ template_info.before }} {{ template_info.during }} {{ template_info.after }}"
"{% else %}"
" {{ source_line.0 }} : {{ source_line.1 }}"
"""{% endif %}{% endfor %}{% endif %}{% if frames %}
Traceback:"""
"{% for frame in frames %}"
"{% ifchanged frame.exc_cause %}"
" {% if frame.exc_cause %}" """
{% if frame.exc_cause_explicit %}
The above exception ({{ frame.exc_cause }}) was the direct cause of the following exception:
{% else %}
During handling of the above exception ({{ frame.exc_cause }}), another exception occurred:
{% endif %}
{% endif %}
{% endifchanged %}
File "{{ frame.filename }}" in {{ frame.function }}
{% if frame.context_line %} {{ frame.lineno }}. {{ frame.context_line }}{% endif %}
{% endfor %}
{% if exception_type %}Exception Type: {{ exception_type }}{% if request %} at {{ request.path_info }}{% endif %}
{% if exception_value %}Exception Value: {{ exception_value }}{% endif %}{% endif %}{% endif %}
{% if request %}Request information:
GET:{% for k, v in request.GET.items %}
{{ k }} = {{ v|stringformat:"r" }}{% empty %} No GET data{% endfor %}
POST:{% for k, v in filtered_POST.items %}
{{ k }} = {{ v|stringformat:"r" }}{% empty %} No POST data{% endfor %}
FILES:{% for k, v in request.FILES.items %}
{{ k }} = {{ v|stringformat:"r" }}{% empty %} No FILES data{% endfor %}
COOKIES:{% for k, v in request.COOKIES.items %}
{{ k }} = {{ v|stringformat:"r" }}{% empty %} No cookie data{% endfor %}
META:{% for k, v in request.META.items|dictsort:"0" %}
{{ k }} = {{ v|stringformat:"r" }}{% endfor %}
{% else %}Request data not supplied
{% endif %}
Settings:
Using settings module {{ settings.SETTINGS_MODULE }}{% for k, v in settings.items|dictsort:"0" %}
{{ k }} = {{ v|stringformat:"r" }}{% endfor %}
{% if not is_email %}
You're seeing this error because you have DEBUG = True in your
Django settings file. Change that to False, and Django will
display a standard page generated by the handler for this status code.
{% endif %}
""")
TECHNICAL_404_TEMPLATE = """
<!DOCTYPE html>
<html lang="en">
<head>
<meta http-equiv="content-type" content="text/html; charset=utf-8">
<title>Page not found at {{ request.path_info|escape }}</title>
<meta name="robots" content="NONE,NOARCHIVE">
<style type="text/css">
html * { padding:0; margin:0; }
body * { padding:10px 20px; }
body * * { padding:0; }
body { font:small sans-serif; background:#eee; }
body>div { border-bottom:1px solid #ddd; }
h1 { font-weight:normal; margin-bottom:.4em; }
h1 span { font-size:60%; color:#666; font-weight:normal; }
table { border:none; border-collapse: collapse; width:100%; }
td, th { vertical-align:top; padding:2px 3px; }
th { width:12em; text-align:right; color:#666; padding-right:.5em; }
#info { background:#f6f6f6; }
#info ol { margin: 0.5em 4em; }
#info ol li { font-family: monospace; }
#summary { background: #ffc; }
#explanation { background:#eee; border-bottom: 0px none; }
</style>
</head>
<body>
<div id="summary">
<h1>Page not found <span>(404)</span></h1>
<table class="meta">
<tr>
<th>Request Method:</th>
<td>{{ request.META.REQUEST_METHOD }}</td>
</tr>
<tr>
<th>Request URL:</th>
<td>{{ request.build_absolute_uri|escape }}</td>
</tr>
{% if raising_view_name %}
<tr>
<th>Raised by:</th>
<td>{{ raising_view_name }}</td>
</tr>
{% endif %}
</table>
</div>
<div id="info">
{% if urlpatterns %}
<p>
Using the URLconf defined in <code>{{ urlconf }}</code>,
Django tried these URL patterns, in this order:
</p>
<ol>
{% for pattern in urlpatterns %}
<li>
{% for pat in pattern %}
{{ pat.regex.pattern }}
{% if forloop.last and pat.name %}[name='{{ pat.name }}']{% endif %}
{% endfor %}
</li>
{% endfor %}
</ol>
<p>The current URL, <code>{{ request_path|escape }}</code>, didn't match any of these.</p>
{% else %}
<p>{{ reason }}</p>
{% endif %}
</div>
<div id="explanation">
<p>
You're seeing this error because you have <code>DEBUG = True</code> in
your Django settings file. Change that to <code>False</code>, and Django
will display a standard 404 page.
</p>
</div>
</body>
</html>
"""
DEFAULT_URLCONF_TEMPLATE = """
<!DOCTYPE html>
<html lang="en"><head>
<meta http-equiv="content-type" content="text/html; charset=utf-8">
<meta name="robots" content="NONE,NOARCHIVE"><title>{{ title }}</title>
<style type="text/css">
html * { padding:0; margin:0; }
body * { padding:10px 20px; }
body * * { padding:0; }
body { font:small sans-serif; }
body>div { border-bottom:1px solid #ddd; }
h1 { font-weight:normal; }
h2 { margin-bottom:.8em; }
h2 span { font-size:80%; color:#666; font-weight:normal; }
h3 { margin:1em 0 .5em 0; }
h4 { margin:0 0 .5em 0; font-weight: normal; }
table { border:1px solid #ccc; border-collapse: collapse; width:100%; background:white; }
tbody td, tbody th { vertical-align:top; padding:2px 3px; }
thead th {
padding:1px 6px 1px 3px; background:#fefefe; text-align:left;
font-weight:normal; font-size:11px; border:1px solid #ddd;
}
tbody th { width:12em; text-align:right; color:#666; padding-right:.5em; }
#summary { background: #e0ebff; }
#summary h2 { font-weight: normal; color: #666; }
#explanation { background:#eee; }
#instructions { background:#f6f6f6; }
#summary table { border:none; background:transparent; }
</style>
</head>
<body>
<div id="summary">
<h1>{{ heading }}</h1>
<h2>{{ subheading }}</h2>
</div>
<div id="instructions">
<p>
{{ instructions|safe }}
</p>
</div>
<div id="explanation">
<p>
{{ explanation|safe }}
</p>
</div>
</body></html>
"""
|
songmonit/CTTMSONLINE_V8
|
refs/heads/master
|
addons/payment_ogone/controllers/__init__.py
|
4497
|
# -*- coding: utf-8 -*-
import main
|
AnthonyBroadCrawford/servo
|
refs/heads/master
|
tests/wpt/web-platform-tests/tools/html5lib/html5lib/__init__.py
|
426
|
"""
HTML parsing library based on the WHATWG "HTML5"
specification. The parser is designed to be compatible with existing
HTML found in the wild and implements well-defined error recovery that
is largely compatible with modern desktop web browsers.
Example usage:
import html5lib
f = open("my_document.html")
tree = html5lib.parse(f)
"""
from __future__ import absolute_import, division, unicode_literals
from .html5parser import HTMLParser, parse, parseFragment
from .treebuilders import getTreeBuilder
from .treewalkers import getTreeWalker
from .serializer import serialize
__all__ = ["HTMLParser", "parse", "parseFragment", "getTreeBuilder",
"getTreeWalker", "serialize"]
__version__ = "0.9999-dev"
|
NoahFlowa/glowing-spoon
|
refs/heads/master
|
venv/lib/python2.7/site-packages/pip/_vendor/distlib/scripts.py
|
333
|
# -*- coding: utf-8 -*-
#
# Copyright (C) 2013-2015 Vinay Sajip.
# Licensed to the Python Software Foundation under a contributor agreement.
# See LICENSE.txt and CONTRIBUTORS.txt.
#
from io import BytesIO
import logging
import os
import re
import struct
import sys
from .compat import sysconfig, detect_encoding, ZipFile
from .resources import finder
from .util import (FileOperator, get_export_entry, convert_path,
get_executable, in_venv)
logger = logging.getLogger(__name__)
_DEFAULT_MANIFEST = '''
<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
<assembly xmlns="urn:schemas-microsoft-com:asm.v1" manifestVersion="1.0">
<assemblyIdentity version="1.0.0.0"
processorArchitecture="X86"
name="%s"
type="win32"/>
<!-- Identify the application security requirements. -->
<trustInfo xmlns="urn:schemas-microsoft-com:asm.v3">
<security>
<requestedPrivileges>
<requestedExecutionLevel level="asInvoker" uiAccess="false"/>
</requestedPrivileges>
</security>
</trustInfo>
</assembly>'''.strip()
# check if Python is called on the first line with this expression
FIRST_LINE_RE = re.compile(b'^#!.*pythonw?[0-9.]*([ \t].*)?$')
SCRIPT_TEMPLATE = '''# -*- coding: utf-8 -*-
if __name__ == '__main__':
import sys, re
def _resolve(module, func):
__import__(module)
mod = sys.modules[module]
parts = func.split('.')
result = getattr(mod, parts.pop(0))
for p in parts:
result = getattr(result, p)
return result
try:
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
func = _resolve('%(module)s', '%(func)s')
rc = func() # None interpreted as 0
except Exception as e: # only supporting Python >= 2.6
sys.stderr.write('%%s\\n' %% e)
rc = 1
sys.exit(rc)
'''
def _enquote_executable(executable):
if ' ' in executable:
# make sure we quote only the executable in case of env
# for example /usr/bin/env "/dir with spaces/bin/jython"
# instead of "/usr/bin/env /dir with spaces/bin/jython"
# otherwise whole
if executable.startswith('/usr/bin/env '):
env, _executable = executable.split(' ', 1)
if ' ' in _executable and not _executable.startswith('"'):
executable = '%s "%s"' % (env, _executable)
else:
if not executable.startswith('"'):
executable = '"%s"' % executable
return executable
class ScriptMaker(object):
"""
A class to copy or create scripts from source scripts or callable
specifications.
"""
script_template = SCRIPT_TEMPLATE
executable = None # for shebangs
def __init__(self, source_dir, target_dir, add_launchers=True,
dry_run=False, fileop=None):
self.source_dir = source_dir
self.target_dir = target_dir
self.add_launchers = add_launchers
self.force = False
self.clobber = False
# It only makes sense to set mode bits on POSIX.
self.set_mode = (os.name == 'posix') or (os.name == 'java' and
os._name == 'posix')
self.variants = set(('', 'X.Y'))
self._fileop = fileop or FileOperator(dry_run)
self._is_nt = os.name == 'nt' or (
os.name == 'java' and os._name == 'nt')
def _get_alternate_executable(self, executable, options):
if options.get('gui', False) and self._is_nt: # pragma: no cover
dn, fn = os.path.split(executable)
fn = fn.replace('python', 'pythonw')
executable = os.path.join(dn, fn)
return executable
if sys.platform.startswith('java'): # pragma: no cover
def _is_shell(self, executable):
"""
Determine if the specified executable is a script
(contains a #! line)
"""
try:
with open(executable) as fp:
return fp.read(2) == '#!'
except (OSError, IOError):
logger.warning('Failed to open %s', executable)
return False
def _fix_jython_executable(self, executable):
if self._is_shell(executable):
# Workaround for Jython is not needed on Linux systems.
import java
if java.lang.System.getProperty('os.name') == 'Linux':
return executable
elif executable.lower().endswith('jython.exe'):
# Use wrapper exe for Jython on Windows
return executable
return '/usr/bin/env %s' % executable
def _get_shebang(self, encoding, post_interp=b'', options=None):
enquote = True
if self.executable:
executable = self.executable
enquote = False # assume this will be taken care of
elif not sysconfig.is_python_build():
executable = get_executable()
elif in_venv(): # pragma: no cover
executable = os.path.join(sysconfig.get_path('scripts'),
'python%s' % sysconfig.get_config_var('EXE'))
else: # pragma: no cover
executable = os.path.join(
sysconfig.get_config_var('BINDIR'),
'python%s%s' % (sysconfig.get_config_var('VERSION'),
sysconfig.get_config_var('EXE')))
if options:
executable = self._get_alternate_executable(executable, options)
if sys.platform.startswith('java'): # pragma: no cover
executable = self._fix_jython_executable(executable)
# Normalise case for Windows
executable = os.path.normcase(executable)
# If the user didn't specify an executable, it may be necessary to
# cater for executable paths with spaces (not uncommon on Windows)
if enquote:
executable = _enquote_executable(executable)
# Issue #51: don't use fsencode, since we later try to
# check that the shebang is decodable using utf-8.
executable = executable.encode('utf-8')
# in case of IronPython, play safe and enable frames support
if (sys.platform == 'cli' and '-X:Frames' not in post_interp
and '-X:FullFrames' not in post_interp): # pragma: no cover
post_interp += b' -X:Frames'
shebang = b'#!' + executable + post_interp + b'\n'
# Python parser starts to read a script using UTF-8 until
# it gets a #coding:xxx cookie. The shebang has to be the
# first line of a file, the #coding:xxx cookie cannot be
# written before. So the shebang has to be decodable from
# UTF-8.
try:
shebang.decode('utf-8')
except UnicodeDecodeError: # pragma: no cover
raise ValueError(
'The shebang (%r) is not decodable from utf-8' % shebang)
# If the script is encoded to a custom encoding (use a
# #coding:xxx cookie), the shebang has to be decodable from
# the script encoding too.
if encoding != 'utf-8':
try:
shebang.decode(encoding)
except UnicodeDecodeError: # pragma: no cover
raise ValueError(
'The shebang (%r) is not decodable '
'from the script encoding (%r)' % (shebang, encoding))
return shebang
def _get_script_text(self, entry):
return self.script_template % dict(module=entry.prefix,
func=entry.suffix)
manifest = _DEFAULT_MANIFEST
def get_manifest(self, exename):
base = os.path.basename(exename)
return self.manifest % base
def _write_script(self, names, shebang, script_bytes, filenames, ext):
use_launcher = self.add_launchers and self._is_nt
linesep = os.linesep.encode('utf-8')
if not use_launcher:
script_bytes = shebang + linesep + script_bytes
else: # pragma: no cover
if ext == 'py':
launcher = self._get_launcher('t')
else:
launcher = self._get_launcher('w')
stream = BytesIO()
with ZipFile(stream, 'w') as zf:
zf.writestr('__main__.py', script_bytes)
zip_data = stream.getvalue()
script_bytes = launcher + shebang + linesep + zip_data
for name in names:
outname = os.path.join(self.target_dir, name)
if use_launcher: # pragma: no cover
n, e = os.path.splitext(outname)
if e.startswith('.py'):
outname = n
outname = '%s.exe' % outname
try:
self._fileop.write_binary_file(outname, script_bytes)
except Exception:
# Failed writing an executable - it might be in use.
logger.warning('Failed to write executable - trying to '
'use .deleteme logic')
dfname = '%s.deleteme' % outname
if os.path.exists(dfname):
os.remove(dfname) # Not allowed to fail here
os.rename(outname, dfname) # nor here
self._fileop.write_binary_file(outname, script_bytes)
logger.debug('Able to replace executable using '
'.deleteme logic')
try:
os.remove(dfname)
except Exception:
pass # still in use - ignore error
else:
if self._is_nt and not outname.endswith('.' + ext): # pragma: no cover
outname = '%s.%s' % (outname, ext)
if os.path.exists(outname) and not self.clobber:
logger.warning('Skipping existing file %s', outname)
continue
self._fileop.write_binary_file(outname, script_bytes)
if self.set_mode:
self._fileop.set_executable_mode([outname])
filenames.append(outname)
def _make_script(self, entry, filenames, options=None):
post_interp = b''
if options:
args = options.get('interpreter_args', [])
if args:
args = ' %s' % ' '.join(args)
post_interp = args.encode('utf-8')
shebang = self._get_shebang('utf-8', post_interp, options=options)
script = self._get_script_text(entry).encode('utf-8')
name = entry.name
scriptnames = set()
if '' in self.variants:
scriptnames.add(name)
if 'X' in self.variants:
scriptnames.add('%s%s' % (name, sys.version[0]))
if 'X.Y' in self.variants:
scriptnames.add('%s-%s' % (name, sys.version[:3]))
if options and options.get('gui', False):
ext = 'pyw'
else:
ext = 'py'
self._write_script(scriptnames, shebang, script, filenames, ext)
def _copy_script(self, script, filenames):
adjust = False
script = os.path.join(self.source_dir, convert_path(script))
outname = os.path.join(self.target_dir, os.path.basename(script))
if not self.force and not self._fileop.newer(script, outname):
logger.debug('not copying %s (up-to-date)', script)
return
# Always open the file, but ignore failures in dry-run mode --
# that way, we'll get accurate feedback if we can read the
# script.
try:
f = open(script, 'rb')
except IOError: # pragma: no cover
if not self.dry_run:
raise
f = None
else:
first_line = f.readline()
if not first_line: # pragma: no cover
logger.warning('%s: %s is an empty file (skipping)',
self.get_command_name(), script)
return
match = FIRST_LINE_RE.match(first_line.replace(b'\r\n', b'\n'))
if match:
adjust = True
post_interp = match.group(1) or b''
if not adjust:
if f:
f.close()
self._fileop.copy_file(script, outname)
if self.set_mode:
self._fileop.set_executable_mode([outname])
filenames.append(outname)
else:
logger.info('copying and adjusting %s -> %s', script,
self.target_dir)
if not self._fileop.dry_run:
encoding, lines = detect_encoding(f.readline)
f.seek(0)
shebang = self._get_shebang(encoding, post_interp)
if b'pythonw' in first_line: # pragma: no cover
ext = 'pyw'
else:
ext = 'py'
n = os.path.basename(outname)
self._write_script([n], shebang, f.read(), filenames, ext)
if f:
f.close()
@property
def dry_run(self):
return self._fileop.dry_run
@dry_run.setter
def dry_run(self, value):
self._fileop.dry_run = value
if os.name == 'nt' or (os.name == 'java' and os._name == 'nt'): # pragma: no cover
# Executable launcher support.
# Launchers are from https://bitbucket.org/vinay.sajip/simple_launcher/
def _get_launcher(self, kind):
if struct.calcsize('P') == 8: # 64-bit
bits = '64'
else:
bits = '32'
name = '%s%s.exe' % (kind, bits)
# Issue 31: don't hardcode an absolute package name, but
# determine it relative to the current package
distlib_package = __name__.rsplit('.', 1)[0]
result = finder(distlib_package).find(name).bytes
return result
# Public API follows
def make(self, specification, options=None):
"""
Make a script.
:param specification: The specification, which is either a valid export
entry specification (to make a script from a
callable) or a filename (to make a script by
copying from a source location).
:param options: A dictionary of options controlling script generation.
:return: A list of all absolute pathnames written to.
"""
filenames = []
entry = get_export_entry(specification)
if entry is None:
self._copy_script(specification, filenames)
else:
self._make_script(entry, filenames, options=options)
return filenames
def make_multiple(self, specifications, options=None):
"""
Take a list of specifications and make scripts from them,
:param specifications: A list of specifications.
:return: A list of all absolute pathnames written to,
"""
filenames = []
for specification in specifications:
filenames.extend(self.make(specification, options))
return filenames
|
ademmers/ansible
|
refs/heads/devel
|
test/lib/ansible_test/_internal/provider/source/unversioned.py
|
23
|
"""Fallback source provider when no other provider matches the content root."""
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
from ... import types as t
from ...constants import (
TIMEOUT_PATH,
)
from ...encoding import (
to_bytes,
)
from . import (
SourceProvider,
)
class UnversionedSource(SourceProvider):
"""Fallback source provider when no other provider matches the content root."""
sequence = 0 # disable automatic detection
@staticmethod
def is_content_root(path): # type: (str) -> bool
"""Return True if the given path is a content root for this provider."""
return False
def get_paths(self, path): # type: (str) -> t.List[str]
"""Return the list of available content paths under the given path."""
paths = []
kill_any_dir = (
'.idea',
'.pytest_cache',
'__pycache__',
'ansible.egg-info',
'ansible_base.egg-info',
'ansible_core.egg-info',
)
kill_sub_dir = {
'test': (
'results',
'cache',
'output',
),
'tests': (
'output',
),
'docs/docsite': (
'_build',
),
}
kill_sub_file = {
'': (
TIMEOUT_PATH,
),
}
kill_extensions = (
'.pyc',
'.pyo',
'.retry',
)
for root, dir_names, file_names in os.walk(path):
rel_root = os.path.relpath(root, path)
if rel_root == '.':
rel_root = ''
for kill in kill_any_dir + kill_sub_dir.get(rel_root, ()):
if kill in dir_names:
dir_names.remove(kill)
kill_files = kill_sub_file.get(rel_root, ())
paths.extend([os.path.join(rel_root, file_name) for file_name in file_names
if not os.path.splitext(file_name)[1] in kill_extensions and file_name not in kill_files])
# include directory symlinks since they will not be traversed and would otherwise go undetected
paths.extend([os.path.join(rel_root, dir_name) + os.path.sep for dir_name in dir_names if os.path.islink(to_bytes(dir_name))])
return paths
|
klunwebale/odoo
|
refs/heads/8.0
|
addons/hr_timesheet/report/hr_timesheet_report.py
|
262
|
from openerp import tools
from openerp.osv import fields,osv
from openerp.addons.decimal_precision import decimal_precision as dp
class hr_timesheet_report(osv.osv):
_name = "hr.timesheet.report"
_description = "Timesheet"
_auto = False
_columns = {
'date': fields.date('Date', readonly=True),
'name': fields.char('Description', size=64,readonly=True),
'product_id' : fields.many2one('product.product', 'Product',readonly=True),
'journal_id' : fields.many2one('account.analytic.journal', 'Journal',readonly=True),
'general_account_id' : fields.many2one('account.account', 'General Account', readonly=True),
'user_id': fields.many2one('res.users', 'User',readonly=True),
'account_id': fields.many2one('account.analytic.account', 'Analytic Account',readonly=True),
'company_id': fields.many2one('res.company', 'Company',readonly=True),
'cost': fields.float('Cost', readonly=True, digits_compute=dp.get_precision('Account')),
'quantity': fields.float('Time', readonly=True), # TDE FIXME master: rename into time
}
def _select(self):
select_str = """
SELECT min(hat.id) as id,
aal.date as date,
sum(aal.amount) as cost,
sum(aal.unit_amount) as quantity,
aal.account_id as account_id,
aal.journal_id as journal_id,
aal.product_id as product_id,
aal.general_account_id as general_account_id,
aal.user_id as user_id,
aal.company_id as company_id,
aal.currency_id as currency_id
"""
return select_str
def _from(self):
from_str = """
account_analytic_line as aal
inner join hr_analytic_timesheet as hat ON (hat.line_id=aal.id)
"""
return from_str
def _group_by(self):
group_by_str = """
GROUP BY aal.date,
aal.account_id,
aal.product_id,
aal.general_account_id,
aal.journal_id,
aal.user_id,
aal.company_id,
aal.currency_id
"""
return group_by_str
def init(self, cr):
# self._table = hr_timesheet_report
tools.drop_view_if_exists(cr, self._table)
cr.execute("""CREATE or REPLACE VIEW %s as (
%s
FROM ( %s )
%s
)""" % (self._table, self._select(), self._from(), self._group_by()))
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
vv1133/home_web
|
refs/heads/master
|
tests/model_forms/models.py
|
51
|
"""
XX. Generating HTML forms from models
This is mostly just a reworking of the ``form_for_model``/``form_for_instance``
tests to use ``ModelForm``. As such, the text may not make sense in all cases,
and the examples are probably a poor fit for the ``ModelForm`` syntax. In other
words, most of these tests should be rewritten.
"""
from __future__ import unicode_literals
import os
import tempfile
from django.core import validators
from django.core.exceptions import ImproperlyConfigured
from django.core.files.storage import FileSystemStorage
from django.db import models
from django.utils import six
from django.utils.encoding import python_2_unicode_compatible
temp_storage_dir = tempfile.mkdtemp(dir=os.environ['DJANGO_TEST_TEMP_DIR'])
temp_storage = FileSystemStorage(temp_storage_dir)
ARTICLE_STATUS = (
(1, 'Draft'),
(2, 'Pending'),
(3, 'Live'),
)
ARTICLE_STATUS_CHAR = (
('d', 'Draft'),
('p', 'Pending'),
('l', 'Live'),
)
@python_2_unicode_compatible
class Category(models.Model):
name = models.CharField(max_length=20)
slug = models.SlugField(max_length=20)
url = models.CharField('The URL', max_length=40)
def __str__(self):
return self.name
def __repr__(self):
return self.__str__()
@python_2_unicode_compatible
class Writer(models.Model):
name = models.CharField(max_length=50, help_text='Use both first and last names.')
class Meta:
ordering = ('name',)
def __str__(self):
return self.name
@python_2_unicode_compatible
class Article(models.Model):
headline = models.CharField(max_length=50)
slug = models.SlugField()
pub_date = models.DateField()
created = models.DateField(editable=False)
writer = models.ForeignKey(Writer)
article = models.TextField()
categories = models.ManyToManyField(Category, blank=True)
status = models.PositiveIntegerField(choices=ARTICLE_STATUS, blank=True, null=True)
def save(self):
import datetime
if not self.id:
self.created = datetime.date.today()
return super(Article, self).save()
def __str__(self):
return self.headline
class ImprovedArticle(models.Model):
article = models.OneToOneField(Article)
class ImprovedArticleWithParentLink(models.Model):
article = models.OneToOneField(Article, parent_link=True)
class BetterWriter(Writer):
score = models.IntegerField()
@python_2_unicode_compatible
class WriterProfile(models.Model):
writer = models.OneToOneField(Writer, primary_key=True)
age = models.PositiveIntegerField()
def __str__(self):
return "%s is %s" % (self.writer, self.age)
@python_2_unicode_compatible
class TextFile(models.Model):
description = models.CharField(max_length=20)
file = models.FileField(storage=temp_storage, upload_to='tests', max_length=15)
def __str__(self):
return self.description
try:
from django.utils.image import Image
test_images = True
@python_2_unicode_compatible
class ImageFile(models.Model):
def custom_upload_path(self, filename):
path = self.path or 'tests'
return '%s/%s' % (path, filename)
description = models.CharField(max_length=20)
# Deliberately put the image field *after* the width/height fields to
# trigger the bug in #10404 with width/height not getting assigned.
width = models.IntegerField(editable=False)
height = models.IntegerField(editable=False)
image = models.ImageField(storage=temp_storage, upload_to=custom_upload_path,
width_field='width', height_field='height')
path = models.CharField(max_length=16, blank=True, default='')
def __str__(self):
return self.description
@python_2_unicode_compatible
class OptionalImageFile(models.Model):
def custom_upload_path(self, filename):
path = self.path or 'tests'
return '%s/%s' % (path, filename)
description = models.CharField(max_length=20)
image = models.ImageField(storage=temp_storage, upload_to=custom_upload_path,
width_field='width', height_field='height',
blank=True, null=True)
width = models.IntegerField(editable=False, null=True)
height = models.IntegerField(editable=False, null=True)
path = models.CharField(max_length=16, blank=True, default='')
def __str__(self):
return self.description
except ImproperlyConfigured:
test_images = False
@python_2_unicode_compatible
class CommaSeparatedInteger(models.Model):
field = models.CommaSeparatedIntegerField(max_length=20)
def __str__(self):
return self.field
@python_2_unicode_compatible
class Product(models.Model):
slug = models.SlugField(unique=True)
def __str__(self):
return self.slug
@python_2_unicode_compatible
class Price(models.Model):
price = models.DecimalField(max_digits=10, decimal_places=2)
quantity = models.PositiveIntegerField()
def __str__(self):
return "%s for %s" % (self.quantity, self.price)
class Meta:
unique_together = (('price', 'quantity'),)
class ArticleStatus(models.Model):
status = models.CharField(max_length=2, choices=ARTICLE_STATUS_CHAR, blank=True, null=True)
@python_2_unicode_compatible
class Inventory(models.Model):
barcode = models.PositiveIntegerField(unique=True)
parent = models.ForeignKey('self', to_field='barcode', blank=True, null=True)
name = models.CharField(blank=False, max_length=20)
class Meta:
ordering = ('name',)
def __str__(self):
return self.name
def __repr__(self):
return self.__str__()
class Book(models.Model):
title = models.CharField(max_length=40)
author = models.ForeignKey(Writer, blank=True, null=True)
special_id = models.IntegerField(blank=True, null=True, unique=True)
class Meta:
unique_together = ('title', 'author')
class BookXtra(models.Model):
isbn = models.CharField(max_length=16, unique=True)
suffix1 = models.IntegerField(blank=True, default=0)
suffix2 = models.IntegerField(blank=True, default=0)
class Meta:
unique_together = (('suffix1', 'suffix2'))
abstract = True
class DerivedBook(Book, BookXtra):
pass
@python_2_unicode_compatible
class ExplicitPK(models.Model):
key = models.CharField(max_length=20, primary_key=True)
desc = models.CharField(max_length=20, blank=True, unique=True)
class Meta:
unique_together = ('key', 'desc')
def __str__(self):
return self.key
@python_2_unicode_compatible
class Post(models.Model):
title = models.CharField(max_length=50, unique_for_date='posted', blank=True)
slug = models.CharField(max_length=50, unique_for_year='posted', blank=True)
subtitle = models.CharField(max_length=50, unique_for_month='posted', blank=True)
posted = models.DateField()
def __str__(self):
return self.title
@python_2_unicode_compatible
class DateTimePost(models.Model):
title = models.CharField(max_length=50, unique_for_date='posted', blank=True)
slug = models.CharField(max_length=50, unique_for_year='posted', blank=True)
subtitle = models.CharField(max_length=50, unique_for_month='posted', blank=True)
posted = models.DateTimeField(editable=False)
def __str__(self):
return self.title
class DerivedPost(Post):
pass
@python_2_unicode_compatible
class BigInt(models.Model):
biggie = models.BigIntegerField()
def __str__(self):
return six.text_type(self.biggie)
class MarkupField(models.CharField):
def __init__(self, *args, **kwargs):
kwargs["max_length"] = 20
super(MarkupField, self).__init__(*args, **kwargs)
def formfield(self, **kwargs):
# don't allow this field to be used in form (real use-case might be
# that you know the markup will always be X, but it is among an app
# that allows the user to say it could be something else)
# regressed at r10062
return None
class CustomFieldForExclusionModel(models.Model):
name = models.CharField(max_length=10)
markup = MarkupField()
class FlexibleDatePost(models.Model):
title = models.CharField(max_length=50, unique_for_date='posted', blank=True)
slug = models.CharField(max_length=50, unique_for_year='posted', blank=True)
subtitle = models.CharField(max_length=50, unique_for_month='posted', blank=True)
posted = models.DateField(blank=True, null=True)
@python_2_unicode_compatible
class Colour(models.Model):
name = models.CharField(max_length=50)
def __iter__(self):
for number in xrange(5):
yield number
def __str__(self):
return self.name
class ColourfulItem(models.Model):
name = models.CharField(max_length=50)
colours = models.ManyToManyField(Colour)
class ArticleStatusNote(models.Model):
name = models.CharField(max_length=20)
status = models.ManyToManyField(ArticleStatus)
class CustomErrorMessage(models.Model):
name1 = models.CharField(max_length=50,
validators=[validators.validate_slug],
error_messages={'invalid': 'Model custom error message.'})
name2 = models.CharField(max_length=50,
validators=[validators.validate_slug],
error_messages={'invalid': 'Model custom error message.'})
|
Rogentos/argent-anaconda
|
refs/heads/master
|
storage/formats/dmraid.py
|
3
|
# dmraid.py
# dmraid device formats
#
# Copyright (C) 2009 Red Hat, Inc.
#
# This copyrighted material is made available to anyone wishing to use,
# modify, copy, or redistribute it subject to the terms and conditions of
# the GNU General Public License v.2, or (at your option) any later version.
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY expressed or implied, including the implied warranties of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General
# Public License for more details. You should have received a copy of the
# GNU General Public License along with this program; if not, write to the
# Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301, USA. Any Red Hat trademarks that are incorporated in the
# source code or documentation are not subject to the GNU General Public
# License and may only be used or replicated with the express permission of
# Red Hat, Inc.
#
# Red Hat Author(s): Dave Lehman <dlehman@redhat.com>
#
from ..storage_log import log_method_call
from flags import flags
from ..errors import *
from . import DeviceFormat, register_device_format
import gettext
_ = lambda x: gettext.ldgettext("anaconda", x)
import logging
log = logging.getLogger("storage")
class DMRaidMember(DeviceFormat):
""" A dmraid member disk. """
_type = "dmraidmember"
_name = "dm-raid member device"
# XXX This looks like trouble.
#
# Maybe a better approach is a RaidMember format with subclass
# for MDRaidMember, letting all *_raid_member types fall through
# to the generic RaidMember format, which is basically read-only.
#
# One problem that presents is the possibility of someone passing
# a dmraid member to the MDRaidArrayDevice constructor.
_udevTypes = ["adaptec_raid_member", "ddf_raid_member",
"highpoint_raid_member", "isw_raid_member",
"jmicron_raid_member", "lsi_mega_raid_member",
"nvidia_raid_member", "promise_fasttrack_raid_member",
"silicon_medley_raid_member", "via_raid_member"]
_formattable = False # can be formatted
_supported = True # is supported
_linuxNative = False # for clearpart
_packages = ["dmraid"] # required packages
_resizable = False # can be resized
_bootable = False # can be used as boot
_maxSize = 0 # maximum size in MB
_minSize = 0 # minimum size in MB
_hidden = True # hide devices with this formatting?
def __init__(self, *args, **kwargs):
""" Create a DeviceFormat instance.
Keyword Arguments:
device -- path to the underlying device
uuid -- this format's UUID
exists -- indicates whether this is an existing format
On initialization this format is like DeviceFormat
"""
log_method_call(self, *args, **kwargs)
DeviceFormat.__init__(self, *args, **kwargs)
# Initialize the attribute that will hold the block object.
self._raidmem = None
def __str__(self):
s = DeviceFormat.__str__(self)
s += (" raidmem = %(raidmem)r" % {"raidmem": self.raidmem})
return s
def _getRaidmem(self):
return self._raidmem
def _setRaidmem(self, raidmem):
self._raidmem = raidmem
raidmem = property(lambda d: d._getRaidmem(),
lambda d,r: d._setRaidmem(r))
def create(self, *args, **kwargs):
log_method_call(self, device=self.device,
type=self.type, status=self.status)
raise DMRaidMemberError("creation of dmraid members is non-sense")
def destroy(self, *args, **kwargs):
log_method_call(self, device=self.device,
type=self.type, status=self.status)
raise DMRaidMemberError("destruction of dmraid members is non-sense")
if not flags.cmdline.has_key("noiswmd"):
DMRaidMember._udevTypes.remove("isw_raid_member")
# The anaconda cmdline has not been parsed yet when we're first imported,
# so we can not use flags.dmraid here
if flags.cmdline.has_key("nodmraid"):
DMRaidMember._udevTypes = []
register_device_format(DMRaidMember)
|
slongwang/selenium
|
refs/heads/master
|
py/test/selenium/webdriver/common/correct_event_firing_tests.py
|
65
|
# Licensed to the Software Freedom Conservancy (SFC) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The SFC licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import unittest
def not_available_on_remote(func):
def testMethod(self):
print(self.driver)
if type(self.driver) == 'remote':
return lambda x: None
else:
return func(self)
return testMethod
class CorrectEventFiringTests(unittest.TestCase):
def testShouldFireClickEventWhenClicking(self):
self._loadPage("javascriptPage")
self._clickOnElementWhichRecordsEvents()
self._assertEventFired("click")
def testShouldFireMouseDownEventWhenClicking(self):
self._loadPage("javascriptPage")
self._clickOnElementWhichRecordsEvents()
self._assertEventFired("mousedown")
def testShouldFireMouseUpEventWhenClicking(self):
self._loadPage("javascriptPage")
self._clickOnElementWhichRecordsEvents()
self._assertEventFired("mouseup")
def testShouldIssueMouseDownEvents(self):
self._loadPage("javascriptPage")
self.driver.find_element_by_id("mousedown").click()
result = self.driver.find_element_by_id("result").text
self.assertEqual(result, "mouse down")
def testShouldIssueClickEvents(self):
self._loadPage("javascriptPage")
self.driver.find_element_by_id("mouseclick").click()
result = self.driver.find_element_by_id("result").text
self.assertEqual(result, "mouse click")
def testShouldIssueMouseUpEvents(self):
self._loadPage("javascriptPage")
self.driver.find_element_by_id("mouseup").click()
result = self.driver.find_element_by_id("result").text
self.assertEqual(result, "mouse up")
def testMouseEventsShouldBubbleUpToContainingElements(self):
self._loadPage("javascriptPage")
self.driver.find_element_by_id("child").click()
result = self.driver.find_element_by_id("result").text
self.assertEqual(result, "mouse down")
def testShouldEmitOnChangeEventsWhenSelectingElements(self):
self._loadPage("javascriptPage")
# Intentionally not looking up the select tag. See selenium r7937 for details.
allOptions = self.driver.find_elements_by_xpath("//select[@id='selector']//option")
initialTextValue = self.driver.find_element_by_id("result").text
foo = allOptions[0]
bar = allOptions[1]
foo.click()
self.assertEqual(self.driver.find_element_by_id("result").text, initialTextValue)
bar.click()
self.assertEqual(self.driver.find_element_by_id("result").text, "bar")
def testShouldEmitOnChangeEventsWhenChangingTheStateOfACheckbox(self):
self._loadPage("javascriptPage")
checkbox = self.driver.find_element_by_id("checkbox")
checkbox.click()
self.assertEqual(self.driver.find_element_by_id("result").text, "checkbox thing")
def testShouldEmitClickEventWhenClickingOnATextInputElement(self):
self._loadPage("javascriptPage")
clicker = self.driver.find_element_by_id("clickField")
clicker.click()
self.assertEqual(clicker.get_attribute("value"), "Clicked")
def testClearingAnElementShouldCauseTheOnChangeHandlerToFire(self):
self._loadPage("javascriptPage")
element = self.driver.find_element_by_id("clearMe")
element.clear()
result = self.driver.find_element_by_id("result")
self.assertEqual(result.text, "Cleared");
# TODO Currently Failing and needs fixing
#def testSendingKeysToAnotherElementShouldCauseTheBlurEventToFire(self):
# self._loadPage("javascriptPage")
# element = self.driver.find_element_by_id("theworks")
# element.send_keys("foo")
# element2 = self.driver.find_element_by_id("changeable")
# element2.send_keys("bar")
# self._assertEventFired("blur")
# TODO Currently Failing and needs fixing
#def testSendingKeysToAnElementShouldCauseTheFocusEventToFire(self):
# self._loadPage("javascriptPage")
# element = self.driver.find_element_by_id("theworks")
# element.send_keys("foo")
# self._assertEventFired("focus")
def _clickOnElementWhichRecordsEvents(self):
self.driver.find_element_by_id("plainButton").click()
def _assertEventFired(self, eventName):
result = self.driver.find_element_by_id("result")
text = result.text
self.assertTrue(eventName in text, "No " + eventName + " fired: " + text)
def _pageURL(self, name):
return self.webserver.where_is(name + '.html')
def _loadSimplePage(self):
self._loadPage("simpleTest")
def _loadPage(self, name):
self.driver.get(self._pageURL(name))
|
nerginer/GrovePi
|
refs/heads/master
|
Software/Python/grove_thumb_joystick.py
|
5
|
#!/usr/bin/env python
#
# GrovePi Example for using the Grove Thumb Joystick (http://www.seeedstudio.com/wiki/Grove_-_Thumb_Joystick)
#
# The GrovePi connects the Raspberry Pi and Grove sensors. You can learn more about GrovePi here: http://www.dexterindustries.com/GrovePi
#
# Have a question about this example? Ask on the forums here: http://www.dexterindustries.com/forum/?forum=grovepi
#
'''
## License
The MIT License (MIT)
GrovePi for the Raspberry Pi: an open source platform for connecting Grove Sensors to the Raspberry Pi.
Copyright (C) 2015 Dexter Industries
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
'''
import time
import grovepi
# Connect the Grove Thumb Joystick to analog port A0
# GrovePi Port A0 uses Arduino pins 0 and 1
# GrovePi Port A1 uses Arduino pins 1 and 2
# Don't plug anything into port A1 that uses pin 1
# Most Grove sensors only use 3 of their 4 pins, which is why the GrovePi shares Arduino pins between adjacent ports
# If the sensor has a pin definition SIG,NC,VCC,GND, the second (white) pin is not connected to anything
# If you wish to connect two joysticks, use ports A0 and A2 (skip A1)
# Uses two pins - one for the X axis and one for the Y axis
# This configuration means you are using port A0
xPin = 0
yPin = 1
grovepi.pinMode(xPin,"INPUT")
grovepi.pinMode(yPin,"INPUT")
# The Grove Thumb Joystick is an analog device that outputs analog signal ranging from 0 to 1023
# The X and Y axes are two ~10k potentiometers and a momentary push button which shorts the x axis
# My joystick produces slightly different results to the specifications found on the url above
# I've listed both here:
# Specifications
# Min Typ Max Click
# X 206 516 798 1023
# Y 203 507 797
# My Joystick
# Min Typ Max Click
# X 253 513 766 1020-1023
# Y 250 505 769
while True:
try:
# Get X/Y coordinates
x = grovepi.analogRead(xPin)
y = grovepi.analogRead(yPin)
# Calculate X/Y resistance
Rx = (float)(1023 - x) * 10 / x
Ry = (float)(1023 - y) * 10 / y
# Was a click detected on the X axis?
click = 1 if x >= 1020 else 0
print ("x =", x, " y =", y, " Rx =", Rx, " Ry =", Ry, " click =", click)
time.sleep(.5)
except IOError:
print ("Error")
|
Lyleo/nupic
|
refs/heads/master
|
tests/swarming/nupic/swarming/experiments/dummy_multi_v2/permutations.py
|
8
|
# ----------------------------------------------------------------------
# Numenta Platform for Intelligent Computing (NuPIC)
# Copyright (C) 2013, Numenta, Inc. Unless you have an agreement
# with Numenta, Inc., for a separate license for this software code, the
# following terms and conditions apply:
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see http://www.gnu.org/licenses.
#
# http://numenta.org/licenses/
# ----------------------------------------------------------------------
"""
Template file used by ExpGenerator to generate the actual
permutations.py file by replacing $XXXXXXXX tokens with desired values.
This permutations.py file was generated by:
'/Users/ronmarianetti/nupic/eng/lib/python2.6/site-packages/nupic/frameworks/opf/expGenerator/ExpGenerator.py'
"""
import os
from nupic.swarming.permutationhelpers import *
# The name of the field being predicted. Any allowed permutation MUST contain
# the prediction field.
# (generated from PREDICTION_FIELD)
predictedField = 'consumption'
numModels = int(os.environ.get('NTA_TEST_max_num_models',10))
permutations = {
'__model_num' : PermuteInt(0, numModels-1, 1),
'modelParams': {
'sensorParams': {
'encoders': {
'gym': PermuteEncoder(fieldName='gym', encoderClass='SDRCategoryEncoder', w=7, n=100),
'timestamp_dayOfWeek': PermuteEncoder(fieldName='timestamp', encoderClass='DateEncoder.dayOfWeek', radius=PermuteChoices([1, 3]), w=7),
'timestamp_timeOfDay': PermuteEncoder(fieldName='timestamp', encoderClass='DateEncoder.timeOfDay', radius=PermuteChoices([1, 8]), w=7),
'consumption': PermuteEncoder(fieldName='consumption', encoderClass='ScalarEncoder', maxval=PermuteInt(100, 300, 25), n=PermuteInt(13, 500, 20), w=7, minval=0),
'address': PermuteEncoder(fieldName='address', encoderClass='SDRCategoryEncoder', w=7, n=100),
},
},
'tpParams': {
'minThreshold': PermuteInt(9, 12),
'activationThreshold': PermuteInt(12, 16),
},
}
}
# Fields selected for final hypersearch report;
# NOTE: These values are used as regular expressions by RunPermutations.py's
# report generator
# (fieldname values generated from PERM_PREDICTED_FIELD_NAME)
report = [
'.*consumption.*',
]
# Permutation optimization setting: either minimize or maximize metric
# used by RunPermutations.
# NOTE: The value is used as a regular expressions by RunPermutations.py's
# report generator
# (generated from minimize = 'prediction:rmse:field=consumption')
minimize = 'prediction:rmse:field=consumption'
def permutationFilter(perm):
""" This function can be used to selectively filter out specific permutation
combinations. It is called by RunPermutations for every possible permutation
of the variables in the permutations dict. It should return True for valid a
combination of permutation values and False for an invalid one.
Parameters:
---------------------------------------------------------
perm: dict of one possible combination of name:value
pairs chosen from permutations.
"""
# An example of how to use this
limit = int(os.environ.get('NTA_TEST_maxvalFilter', 300))
if perm['modelParams']['sensorParams']['encoders']['consumption']['maxval'] > limit:
return False;
return True
|
ionelmc/python-mongoql-conv
|
refs/heads/master
|
tests/test_mongoql_conv.py
|
1
|
import sys
import doctest
if __name__ == '__main__':
from django.core.management import execute_from_command_line
execute_from_command_line([sys.argv[0], 'syncdb', '--noinput'])
results = doctest.testfile('../README.rst', optionflags=doctest.ELLIPSIS|doctest.IGNORE_EXCEPTION_DETAIL)
print(results)
if results.failed:
sys.exit(1)
|
gromez/Sick-Beard
|
refs/heads/development
|
sickbeard/clients/requests/packages/urllib3/__init__.py
|
309
|
# urllib3/__init__.py
# Copyright 2008-2012 Andrey Petrov and contributors (see CONTRIBUTORS.txt)
#
# This module is part of urllib3 and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
"""
urllib3 - Thread-safe connection pooling and re-using.
"""
__author__ = 'Andrey Petrov (andrey.petrov@shazow.net)'
__license__ = 'MIT'
__version__ = 'dev'
from .connectionpool import (
HTTPConnectionPool,
HTTPSConnectionPool,
connection_from_url
)
from . import exceptions
from .filepost import encode_multipart_formdata
from .poolmanager import PoolManager, ProxyManager, proxy_from_url
from .response import HTTPResponse
from .util import make_headers, get_host
# Set default logging handler to avoid "No handler found" warnings.
import logging
try: # Python 2.7+
from logging import NullHandler
except ImportError:
class NullHandler(logging.Handler):
def emit(self, record):
pass
logging.getLogger(__name__).addHandler(NullHandler())
def add_stderr_logger(level=logging.DEBUG):
"""
Helper for quickly adding a StreamHandler to the logger. Useful for
debugging.
Returns the handler after adding it.
"""
# This method needs to be in this __init__.py to get the __name__ correct
# even if urllib3 is vendored within another package.
logger = logging.getLogger(__name__)
handler = logging.StreamHandler()
handler.setFormatter(logging.Formatter('%(asctime)s %(levelname)s %(message)s'))
logger.addHandler(handler)
logger.setLevel(level)
logger.debug('Added an stderr logging handler to logger: %s' % __name__)
return handler
# ... Clean up.
del NullHandler
|
huguesv/PTVS
|
refs/heads/master
|
Python/Templates/Web/ProjectTemplates/Python/Web/StarterBottleProject/app.py
|
8
|
"""
This script runs the application using a development server.
"""
import bottle
import os
import sys
# routes contains the HTTP handlers for our server and must be imported.
import routes
if '--debug' in sys.argv[1:] or 'SERVER_DEBUG' in os.environ:
# Debug mode will enable more verbose output in the console window.
# It must be set at the beginning of the script.
bottle.debug(True)
def wsgi_app():
"""Returns the application to make available through wfastcgi. This is used
when the site is published to Microsoft Azure."""
return bottle.default_app()
if __name__ == '__main__':
PROJECT_ROOT = os.path.abspath(os.path.dirname(__file__))
STATIC_ROOT = os.path.join(PROJECT_ROOT, 'static').replace('\\', '/')
HOST = os.environ.get('SERVER_HOST', 'localhost')
try:
PORT = int(os.environ.get('SERVER_PORT', '5555'))
except ValueError:
PORT = 5555
@bottle.route('/static/<filepath:path>')
def server_static(filepath):
"""Handler for static files, used with the development server.
When running under a production server such as IIS or Apache,
the server should be configured to serve the static files."""
return bottle.static_file(filepath, root=STATIC_ROOT)
# Starts a local test server.
bottle.run(server='wsgiref', host=HOST, port=PORT)
|
prarthitm/edxplatform
|
refs/heads/master
|
common/test/acceptance/pages/lms/course_page.py
|
16
|
"""
Base class for pages in courseware.
"""
from bok_choy.page_object import PageObject
from common.test.acceptance.pages.lms import BASE_URL
from common.test.acceptance.pages.lms.tab_nav import TabNavPage
class CoursePage(PageObject):
"""
Abstract base class for page objects within a course.
"""
# Overridden by subclasses to provide the relative path within the course
# Paths should not include the leading forward slash.
url_path = ""
def __init__(self, browser, course_id):
"""
Course ID is currently of the form "edx/999/2013_Spring"
but this format could change.
"""
super(CoursePage, self).__init__(browser)
self.course_id = course_id
@property
def url(self):
"""
Construct a URL to the page within the course.
"""
return BASE_URL + "/courses/" + self.course_id + "/" + self.url_path
def has_tab(self, tab_name):
"""
Returns true if the current page is showing a tab with the given name.
:return:
"""
tab_nav = TabNavPage(self.browser)
return tab_name in tab_nav.tab_names
|
xwolf12/django
|
refs/heads/master
|
django/apps/__init__.py
|
597
|
from .config import AppConfig
from .registry import apps
__all__ = ['AppConfig', 'apps']
|
vks/servo
|
refs/heads/master
|
tests/wpt/css-tests/tools/html5lib/html5lib/trie/py.py
|
817
|
from __future__ import absolute_import, division, unicode_literals
from six import text_type
from bisect import bisect_left
from ._base import Trie as ABCTrie
class Trie(ABCTrie):
def __init__(self, data):
if not all(isinstance(x, text_type) for x in data.keys()):
raise TypeError("All keys must be strings")
self._data = data
self._keys = sorted(data.keys())
self._cachestr = ""
self._cachepoints = (0, len(data))
def __contains__(self, key):
return key in self._data
def __len__(self):
return len(self._data)
def __iter__(self):
return iter(self._data)
def __getitem__(self, key):
return self._data[key]
def keys(self, prefix=None):
if prefix is None or prefix == "" or not self._keys:
return set(self._keys)
if prefix.startswith(self._cachestr):
lo, hi = self._cachepoints
start = i = bisect_left(self._keys, prefix, lo, hi)
else:
start = i = bisect_left(self._keys, prefix)
keys = set()
if start == len(self._keys):
return keys
while self._keys[i].startswith(prefix):
keys.add(self._keys[i])
i += 1
self._cachestr = prefix
self._cachepoints = (start, i)
return keys
def has_keys_with_prefix(self, prefix):
if prefix in self._data:
return True
if prefix.startswith(self._cachestr):
lo, hi = self._cachepoints
i = bisect_left(self._keys, prefix, lo, hi)
else:
i = bisect_left(self._keys, prefix)
if i == len(self._keys):
return False
return self._keys[i].startswith(prefix)
|
dims/cinder
|
refs/heads/master
|
cinder/tests/unit/scheduler/test_rpcapi.py
|
1
|
# Copyright 2012, Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Unit Tests for cinder.scheduler.rpcapi
"""
import copy
import mock
from cinder import context
from cinder.scheduler import rpcapi as scheduler_rpcapi
from cinder import test
class SchedulerRpcAPITestCase(test.TestCase):
def setUp(self):
super(SchedulerRpcAPITestCase, self).setUp()
def tearDown(self):
super(SchedulerRpcAPITestCase, self).tearDown()
def _test_scheduler_api(self, method, rpc_method,
fanout=False, **kwargs):
ctxt = context.RequestContext('fake_user', 'fake_project')
rpcapi = scheduler_rpcapi.SchedulerAPI()
expected_retval = 'foo' if rpc_method == 'call' else None
target = {
"fanout": fanout,
"version": kwargs.pop('version', rpcapi.RPC_API_VERSION)
}
expected_msg = copy.deepcopy(kwargs)
self.fake_args = None
self.fake_kwargs = None
def _fake_prepare_method(*args, **kwds):
for kwd in kwds:
self.assertEqual(target[kwd], kwds[kwd])
return rpcapi.client
def _fake_rpc_method(*args, **kwargs):
self.fake_args = args
self.fake_kwargs = kwargs
if expected_retval:
return expected_retval
with mock.patch.object(rpcapi.client, "prepare") as mock_prepared:
mock_prepared.side_effect = _fake_prepare_method
with mock.patch.object(rpcapi.client, rpc_method) as mock_method:
mock_method.side_effect = _fake_rpc_method
retval = getattr(rpcapi, method)(ctxt, **kwargs)
self.assertEqual(expected_retval, retval)
expected_args = [ctxt, method, expected_msg]
for arg, expected_arg in zip(self.fake_args, expected_args):
self.assertEqual(expected_arg, arg)
for kwarg, value in self.fake_kwargs.items():
self.assertEqual(expected_msg[kwarg], value)
def test_update_service_capabilities(self):
self._test_scheduler_api('update_service_capabilities',
rpc_method='cast',
service_name='fake_name',
host='fake_host',
capabilities='fake_capabilities',
fanout=True,
version='1.0')
@mock.patch('oslo_messaging.RPCClient.can_send_version',
return_value=True)
def test_create_volume(self, can_send_version):
self._test_scheduler_api('create_volume',
rpc_method='cast',
topic='topic',
volume_id='volume_id',
snapshot_id='snapshot_id',
image_id='image_id',
request_spec='fake_request_spec',
filter_properties='filter_properties',
volume='volume',
version='1.9')
can_send_version.assert_called_once_with('1.9')
@mock.patch('oslo_messaging.RPCClient.can_send_version',
return_value=False)
def test_create_volume_old(self, can_send_version):
# Tests backwards compatibility with older clients
self._test_scheduler_api('create_volume',
rpc_method='cast',
topic='topic',
volume_id='volume_id',
snapshot_id='snapshot_id',
image_id='image_id',
request_spec='fake_request_spec',
filter_properties='filter_properties',
version='1.2')
can_send_version.assert_called_once_with('1.9')
@mock.patch('oslo_messaging.RPCClient.can_send_version',
return_value=True)
def test_migrate_volume_to_host(self, can_send_version):
self._test_scheduler_api('migrate_volume_to_host',
rpc_method='cast',
topic='topic',
volume_id='volume_id',
host='host',
force_host_copy=True,
request_spec='fake_request_spec',
filter_properties='filter_properties',
volume='volume',
version='1.11')
can_send_version.assert_called_once_with('1.11')
@mock.patch('oslo_messaging.RPCClient.can_send_version',
return_value=False)
def test_migrate_volume_to_host_old(self, can_send_version):
self._test_scheduler_api('migrate_volume_to_host',
rpc_method='cast',
topic='topic',
volume_id='volume_id',
host='host',
force_host_copy=True,
request_spec='fake_request_spec',
filter_properties='filter_properties',
volume='volume',
version='1.3')
can_send_version.assert_called_once_with('1.11')
@mock.patch('oslo_messaging.RPCClient.can_send_version',
return_value=True)
def test_retype(self, can_send_version):
self._test_scheduler_api('retype',
rpc_method='cast',
topic='topic',
volume_id='volume_id',
request_spec='fake_request_spec',
filter_properties='filter_properties',
volume='volume',
version='1.10')
can_send_version.assert_called_with('1.10')
@mock.patch('oslo_messaging.RPCClient.can_send_version',
return_value=False)
def test_retype_old(self, can_send_version):
self._test_scheduler_api('retype',
rpc_method='cast',
topic='topic',
volume_id='volume_id',
request_spec='fake_request_spec',
filter_properties='filter_properties',
volume='volume',
version='1.4')
can_send_version.assert_called_with('1.10')
def test_manage_existing(self):
self._test_scheduler_api('manage_existing',
rpc_method='cast',
topic='topic',
volume_id='volume_id',
request_spec='fake_request_spec',
filter_properties='filter_properties',
version='1.5')
def test_get_pools(self):
self._test_scheduler_api('get_pools',
rpc_method='call',
filters=None,
version='1.7')
|
noselhq/skia
|
refs/heads/master
|
bench/tile_analyze.py
|
198
|
#!/usr/bin/env python
# Copyright (c) 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be found
# in the LICENSE file.
""" Analyze per-tile and viewport bench data, and output visualized results.
"""
__author__ = 'bensong@google.com (Ben Chen)'
import bench_util
import boto
import math
import optparse
import os
import re
import shutil
from oauth2_plugin import oauth2_plugin
# The default platform to analyze. Used when OPTION_PLATFORM flag is not set.
DEFAULT_PLATFORM = 'Nexus10_4-1_Float_Bench_32'
# Template for gsutil uri.
GOOGLE_STORAGE_URI_SCHEME = 'gs'
URI_BUCKET = 'chromium-skia-gm'
# Maximum number of rows of tiles to track for viewport covering.
MAX_TILE_ROWS = 8
# Constants for optparse.
USAGE_STRING = 'USAGE: %s [options]'
HOWTO_STRING = """
Note: to read bench data stored in Google Storage, you will need to set up the
corresponding Python library.
See http://developers.google.com/storage/docs/gspythonlibrary for details.
"""
HELP_STRING = """
For the given platform and revision number, find corresponding viewport and
tile benchmarks for each available picture bench, and output visualization and
analysis in HTML. By default it reads from Skia's Google Storage location where
bot data are stored, but if --dir is given, will read from local directory
instead.
""" + HOWTO_STRING
OPTION_DIR = '--dir'
OPTION_DIR_SHORT = '-d'
OPTION_REVISION = '--rev'
OPTION_REVISION_SHORT = '-r'
OPTION_PLATFORM = '--platform'
OPTION_PLATFORM_SHORT = '-p'
# Bench representation algorithm flag.
OPTION_REPRESENTATION_ALG = '--algorithm'
OPTION_REPRESENTATION_ALG_SHORT = '-a'
# Bench representation algorithm. See trunk/bench/bench_util.py.
REPRESENTATION_ALG = bench_util.ALGORITHM_25TH_PERCENTILE
# Constants for bench file matching.
GOOGLE_STORAGE_OBJECT_NAME_PREFIX = 'perfdata/Skia_'
BENCH_FILE_PREFIX_TEMPLATE = 'bench_r%s_'
TILING_FILE_NAME_INDICATOR = '_tile_'
VIEWPORT_FILE_NAME_INDICATOR = '_viewport_'
# Regular expression for matching format '<integer>x<integer>'.
DIMENSIONS_RE = '(\d+)x(\d+)'
# HTML and JS output templates.
HTML_PREFIX = """
<html><head><script type="text/javascript" src="https://www.google.com/jsapi">
</script><script type="text/javascript">google.load("visualization", "1.1",
{packages:["table"]});google.load("prototype", "1.6");</script>
<script type="text/javascript" src="https://systemsbiology-visualizations.googlecode.com/svn/trunk/src/main/js/load.js"></script><script
type="text/javascript"> systemsbiology.load("visualization", "1.0",
{packages:["bioheatmap"]});</script><script type="text/javascript">
google.setOnLoadCallback(drawVisualization); function drawVisualization() {
"""
HTML_SUFFIX = '</body></html>'
BAR_CHART_TEMPLATE = ('<img src="https://chart.googleapis.com/chart?chxr=0,0,'
'300&chxt=x&chbh=15,0&chs=600x150&cht=bhg&chco=80C65A,224499,FF0000,0A8C8A,'
'EBB671,DE091A,000000,00ffff&chds=a&chdl=%s&chd=t:%s" /><br>\n')
DRAW_OPTIONS = ('{passThroughBlack:false,useRowLabels:false,cellWidth:30,'
'cellHeight:30}')
TABLE_OPTIONS = '{showRowNumber:true,firstRowNumber:" ",sort:"disable"}'
def GetFiles(rev, bench_dir, platform):
"""Reads in bench files of interest into a dictionary.
If bench_dir is not empty, tries to read in local bench files; otherwise check
Google Storage. Filters files by revision (rev) and platform, and ignores
non-tile, non-viewport bench files.
Outputs dictionary [filename] -> [file content].
"""
file_dic = {}
if not bench_dir:
uri = boto.storage_uri(URI_BUCKET, GOOGLE_STORAGE_URI_SCHEME)
# The boto API does not allow prefix/wildcard matching of Google Storage
# objects. And Google Storage has a flat structure instead of being
# organized in directories. Therefore, we have to scan all objects in the
# Google Storage bucket to find the files we need, which is slow.
# The option of implementing prefix matching as in gsutil seems to be
# overkill, but gsutil does not provide an API ready for use. If speed is a
# big concern, we suggest copying bot bench data from Google Storage using
# gsutil and use --log_dir for fast local data reading.
for obj in uri.get_bucket():
# Filters out files of no interest.
if (not obj.name.startswith(GOOGLE_STORAGE_OBJECT_NAME_PREFIX) or
(obj.name.find(TILING_FILE_NAME_INDICATOR) < 0 and
obj.name.find(VIEWPORT_FILE_NAME_INDICATOR) < 0) or
obj.name.find(platform) < 0 or
obj.name.find(BENCH_FILE_PREFIX_TEMPLATE % rev) < 0):
continue
file_dic[
obj.name[obj.name.rfind('/') + 1 : ]] = obj.get_contents_as_string()
else:
for f in os.listdir(bench_dir):
if (not os.path.isfile(os.path.join(bench_dir, f)) or
(f.find(TILING_FILE_NAME_INDICATOR) < 0 and
f.find(VIEWPORT_FILE_NAME_INDICATOR) < 0) or
not f.startswith(BENCH_FILE_PREFIX_TEMPLATE % rev)):
continue
file_dic[f] = open(os.path.join(bench_dir, f)).read()
if not file_dic:
raise Exception('No bench file found in "%s" or Google Storage.' %
bench_dir)
return file_dic
def GetTileMatrix(layout, tile_size, values, viewport):
"""For the given tile layout and per-tile bench values, returns a matrix of
bench values with tiles outside the given viewport set to 0.
layout, tile_size and viewport are given in string of format <w>x<h>, where
<w> is viewport width or number of tile columns, and <h> is viewport height or
number of tile rows. We truncate tile rows to MAX_TILE_ROWS to adjust for very
long skp's.
values: per-tile benches ordered row-by-row, starting from the top-left tile.
Returns [sum, matrix] where sum is the total bench tile time that covers the
viewport, and matrix is used for visualizing the tiles.
"""
[tile_cols, tile_rows] = [int(i) for i in layout.split('x')]
[tile_x, tile_y] = [int(i) for i in tile_size.split('x')]
[viewport_x, viewport_y] = [int(i) for i in viewport.split('x')]
viewport_cols = int(math.ceil(viewport_x * 1.0 / tile_x))
viewport_rows = int(math.ceil(viewport_y * 1.0 / tile_y))
truncated_tile_rows = min(tile_rows, MAX_TILE_ROWS)
viewport_tile_sum = 0
matrix = [[0 for y in range(tile_cols)] for x in range(truncated_tile_rows)]
for y in range(min(viewport_cols, tile_cols)):
for x in range(min(truncated_tile_rows, viewport_rows)):
matrix[x][y] = values[x * tile_cols + y]
viewport_tile_sum += values[x * tile_cols + y]
return [viewport_tile_sum, matrix]
def GetTileVisCodes(suffix, matrix):
"""Generates and returns strings of [js_codes, row1, row2] which are codes for
visualizing the benches from the given tile config and matrix data.
row1 is used for the first row of heatmaps; row2 is for corresponding tables.
suffix is only used to avoid name conflicts in the whole html output.
"""
this_js = 'var data_%s=new google.visualization.DataTable();' % suffix
for i in range(len(matrix[0])):
this_js += 'data_%s.addColumn("number","%s");' % (suffix, i)
this_js += 'data_%s.addRows(%s);' % (suffix, str(matrix))
# Adds heatmap chart.
this_js += ('var heat_%s=new org.systemsbiology.visualization' % suffix +
'.BioHeatMap(document.getElementById("%s"));' % suffix +
'heat_%s.draw(data_%s,%s);' % (suffix, suffix, DRAW_OPTIONS))
# Adds data table chart.
this_js += ('var table_%s=new google.visualization.Table(document.' % suffix +
'getElementById("t%s"));table_%s.draw(data_%s,%s);\n' % (
suffix, suffix, suffix, TABLE_OPTIONS))
table_row1 = '<td>%s<div id="%s"></div></td>' % (suffix, suffix)
table_row2 = '<td><div id="t%s"></div></td>' % suffix
return [this_js, table_row1, table_row2]
def OutputTileAnalysis(rev, representation_alg, bench_dir, platform):
"""Reads skp bench data and outputs tile vs. viewport analysis for the given
platform.
Ignores data with revisions other than rev. If bench_dir is not empty, read
from the local directory instead of Google Storage.
Uses the provided representation_alg for calculating bench representations.
Returns (js_codes, body_codes): strings of js/html codes for stats and
visualization.
"""
js_codes = ''
body_codes = ('}</script></head><body>'
'<h3>PLATFORM: %s REVISION: %s</h3><br>' % (platform, rev))
bench_dic = {} # [bench][config] -> [layout, [values]]
file_dic = GetFiles(rev, bench_dir, platform)
for f in file_dic:
for point in bench_util.parse('', file_dic[f].split('\n'),
representation_alg):
if point.time_type: # Ignores non-walltime time_type.
continue
bench = point.bench.replace('.skp', '')
config = point.config.replace('simple_', '')
components = config.split('_')
if components[0] == 'viewport':
bench_dic.setdefault(bench, {})[config] = [components[1], [point.time]]
else: # Stores per-tile benches.
bench_dic.setdefault(bench, {})[config] = [
point.tile_layout, point.per_tile_values]
benches = bench_dic.keys()
benches.sort()
for bench in benches:
body_codes += '<h4>%s</h4><br><table><tr>' % bench
heat_plots = '' # For table row of heatmap plots.
table_plots = '' # For table row of data table plots.
# For bar plot legends and values in URL string.
legends = ''
values = ''
keys = bench_dic[bench].keys()
keys.sort()
if not keys[-1].startswith('viewport'): # No viewport to analyze; skip.
continue
else:
# Extracts viewport size, which for all viewport configs is the same.
viewport = bench_dic[bench][keys[-1]][0]
for config in keys:
[layout, value_li] = bench_dic[bench][config]
if config.startswith('tile_'): # For per-tile data, visualize tiles.
tile_size = config.split('_')[1]
if (not re.search(DIMENSIONS_RE, layout) or
not re.search(DIMENSIONS_RE, tile_size) or
not re.search(DIMENSIONS_RE, viewport)):
continue # Skip unrecognized formats.
[viewport_tile_sum, matrix] = GetTileMatrix(
layout, tile_size, value_li, viewport)
values += '%s|' % viewport_tile_sum
[this_js, row1, row2] = GetTileVisCodes(config + '_' + bench, matrix)
heat_plots += row1
table_plots += row2
js_codes += this_js
else: # For viewport data, there is only one element in value_li.
values += '%s|' % sum(value_li)
legends += '%s:%s|' % (config, sum(value_li))
body_codes += (heat_plots + '</tr><tr>' + table_plots + '</tr></table>' +
'<br>' + BAR_CHART_TEMPLATE % (legends[:-1], values[:-1]))
return (js_codes, body_codes)
def main():
"""Parses flags and outputs expected Skia picture bench results."""
parser = optparse.OptionParser(USAGE_STRING % '%prog' + HELP_STRING)
parser.add_option(OPTION_PLATFORM_SHORT, OPTION_PLATFORM,
dest='plat', default=DEFAULT_PLATFORM,
help='Platform to analyze. Set to DEFAULT_PLATFORM if not given.')
parser.add_option(OPTION_REVISION_SHORT, OPTION_REVISION,
dest='rev',
help='(Mandatory) revision number to analyze.')
parser.add_option(OPTION_DIR_SHORT, OPTION_DIR,
dest='log_dir', default='',
help=('(Optional) local directory where bench log files reside. If left '
'empty (by default), will try to read from Google Storage.'))
parser.add_option(OPTION_REPRESENTATION_ALG_SHORT, OPTION_REPRESENTATION_ALG,
dest='alg', default=REPRESENTATION_ALG,
help=('Bench representation algorithm. '
'Default to "%s".' % REPRESENTATION_ALG))
(options, args) = parser.parse_args()
if not (options.rev and options.rev.isdigit()):
parser.error('Please provide correct mandatory flag %s' % OPTION_REVISION)
return
rev = int(options.rev)
(js_codes, body_codes) = OutputTileAnalysis(
rev, options.alg, options.log_dir, options.plat)
print HTML_PREFIX + js_codes + body_codes + HTML_SUFFIX
if '__main__' == __name__:
main()
|
diorcety/intellij-community
|
refs/heads/master
|
python/lib/Lib/site-packages/django/contrib/localflavor/jp/forms.py
|
333
|
"""
JP-specific Form helpers
"""
from django.forms import ValidationError
from django.utils.translation import ugettext_lazy as _
from django.forms.fields import RegexField, Select
class JPPostalCodeField(RegexField):
"""
A form field that validates its input is a Japanese postcode.
Accepts 7 digits, with or without a hyphen.
"""
default_error_messages = {
'invalid': _('Enter a postal code in the format XXXXXXX or XXX-XXXX.'),
}
def __init__(self, *args, **kwargs):
super(JPPostalCodeField, self).__init__(r'^\d{3}-\d{4}$|^\d{7}$',
max_length=None, min_length=None, *args, **kwargs)
def clean(self, value):
"""
Validates the input and returns a string that contains only numbers.
Returns an empty string for empty values.
"""
v = super(JPPostalCodeField, self).clean(value)
return v.replace('-', '')
class JPPrefectureSelect(Select):
"""
A Select widget that uses a list of Japanese prefectures as its choices.
"""
def __init__(self, attrs=None):
from jp_prefectures import JP_PREFECTURES
super(JPPrefectureSelect, self).__init__(attrs, choices=JP_PREFECTURES)
|
ArthurGarnier/SickRage
|
refs/heads/master
|
lib/pgi/foreign/cairo.py
|
19
|
# Copyright 2016 Christoph Reiter
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
import cffi
import cairocffi
from ._base import ForeignStruct
ffi = cffi.FFI()
@ForeignStruct.register("cairo", "Context")
class Context(ForeignStruct):
def from_pointer(self, pointer):
pointer = ffi.cast("void*", pointer)
return cairocffi.Context._from_pointer(pointer, True)
def to_pointer(self, instance):
return int(ffi.cast("intptr_t", instance._pointer))
def get_type(self):
return cairocffi.Context
@ForeignStruct.register("cairo", "Surface")
class Surface(ForeignStruct):
def from_pointer(self, pointer):
pointer = ffi.cast("void*", pointer)
return cairocffi.Surface._from_pointer(pointer, True)
def to_pointer(self, instance):
return int(ffi.cast("intptr_t", instance._pointer))
def get_type(self):
return cairocffi.Surface
|
chimney37/ml-snippets
|
refs/heads/master
|
neural_network_test.py
|
1
|
#! /usr/bin/env python
# -*- coding:utf-8
# Testing a deep neural network (multi layered perceptron model) with tensorflow.
# Special thanks: Harisson@pythonprogramming.net
'''
File name: neural_network_test.py
Author: chimney37
Date created: 11/03/2017
Python Version: 3.62
'''
import tensorflow as tf
from tensorflow.examples.tutorials.mnist import input_data
'''
We will use the MNIST data, using 60000 training samples and 10000 testing samples of
handwritten and labeled digits, 0 through 9, i.e. 10 total "classes". In actual deep
learning requires half a billion samples for accuracy. It's small enough to work on
any computers. MNIST dataset of images 28x28:784 pixels. Either pixel is "blank" i.e.
0 or there is something there : 0. We will predict the number we're looking at
(0,1,2,...8 or 9).
Steps: input data will send to hidden layer 1, that is weighted. It will undergo an
activation function, so neuron can decided to fire and output data to either output
layer, or another hidden layer. We will have 3 hidden layers. We will use a cost
function (loss function), to determine how wrong we are. Lastly, we will use an
optimizer function: Adam optimizer, to minimize the cost. Cost is minimized by
tinkering with weights. How quickly we want to lower the cost is determined by
learning rate. The lower the value for learning rate, the slower we will learn, and
more likely we'd get better results.
The act of sending the data straight through our network means we're operating a feed
forward neural network. The adjustments of weights backwards is our back propagation.
We do feeding forward and back propagation however many times we want. The cycle is
called an Epoch. We can pick any number for number of epochs. After each epoch, we've
hopefully further fine-tuned our weights lowering our cost and improving accuracy.
'''
# one_hot means one eleent out of others is literally "hot" or on. This is useful for a
# multi-class classification, from 0,1,...to 9. So we want the output to be like
#
# 0 = [1,0,0,0,0,0,0,0,0]
# 1 = [0,1,0,0,0,0,0,0,0]
# ...
# 9 = [0,0,0,0,0,0,0,0,1]
mnist = input_data.read_data_sets("/tmp/data/", one_hot=True)
'''
in building the model, we consider the number of nodes each hidden layer will have.
Nodes in each layer need not be identical, but it can be tweaked, depending on what
we are trying to model (TBD).
Batches are used to control how many features we are going to optimize at once, as computers
are limited by memory.
'''
n_nodes_hl1 = 500
n_nodes_hl2 = 500
n_nodes_hl3 = 500
n_classes = 10
batch_size = 100
# input: 784 is pixels. the matrix is 1 x 2 because we flatten the image: 28x28 to a
# 784 values. This is also known as the shape. If an input data is out of place that doen't
# fit the shape, this specification will ignore the data, without throwing an error.
x = tf.placeholder('float', [None, 784])
# output
y = tf.placeholder('float')
def neural_network_model(data):
# Defining the layers and the output
# weights defined are a giant tensorflow variable, we specify the shape of the variable
# biases: added after the weights
# what happens here is we want (input data * weight) + biases
# biases will make sure that a neuron may still fire even if all inputs are 0
# tf.random_normal outputs random values for the shape we want
# here no flow happens yet, this is just definition
hidden_1_layer = {'weights': tf.Variable(tf.random_normal([784, n_nodes_hl1])),
'biases': tf.Variable(tf.random_normal([n_nodes_hl1]))}
hidden_2_layer = {'weights': tf.Variable(tf.random_normal([n_nodes_hl1, n_nodes_hl2])),
'biases': tf.Variable(tf.random_normal([n_nodes_hl2]))}
hidden_3_layer = {'weights': tf.Variable(tf.random_normal([n_nodes_hl2, n_nodes_hl3])),
'biases': tf.Variable(tf.random_normal([n_nodes_hl3]))}
output_layer = {'weights': tf.Variable(tf.random_normal([n_nodes_hl3, n_classes])), 'biases': tf.Variable(tf.random_normal([n_classes]))}
# input_data * weights + biases (matrix multiplication)
# relu: recified linear: this is the activation function (threshold)
l1 = tf.add(tf.matmul(data, hidden_1_layer['weights']), hidden_1_layer['biases'])
l1 = tf.nn.relu(l1)
l2 = tf.add(tf.matmul(l1, hidden_2_layer['weights']), hidden_2_layer['biases'])
l2 = tf.nn.relu(l2)
l3 = tf.add(tf.matmul(l2, hidden_3_layer['weights']), hidden_3_layer['biases'])
l3 = tf.nn.relu(l3)
# output layer has no activation
output = tf.matmul(l3, output_layer['weights']) + output_layer['biases']
return output
def train_neural_network(x):
# we produce a prediction based on the neural network model
prediction = neural_network_model(x)
# cost function : loss function to optimize the cost. We will
cost = tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits(logits=prediction,
labels=y))
# Adam optimizer is a optimizer along with others like SGD (stochastic graient
# descent and AdaGrad
# Adam optimizer can specifiy learning_rate, default is 0.001
optimizer = tf.train.AdamOptimizer().minimize(cost)
# number of cycles of feedforward and backward propagation
hm_epochs = 14
with tf.Session() as sess:
sess.run(tf.global_variables_initializer())
for epoch in range(hm_epochs):
epoch_loss = 0
for _ in range(int(mnist.train.num_examples/batch_size)):
epoch_x, epoch_y = mnist.train.next_batch(batch_size)
_, c = sess.run([optimizer, cost], feed_dict={x: epoch_x, y: epoch_y})
epoch_loss += c
print('Epoch', epoch, 'completed out of', hm_epochs, 'loss:', epoch_loss)
correct = tf.equal(tf.argmax(prediction, 1), tf.argmax(y, 1))
# tell us how many predictions we made that were perfect matches to their labels
accuracy = tf.reduce_mean(tf.cast(correct, 'float'))
print('Accuracy:', accuracy.eval({x: mnist.test.images, y: mnist.test.labels}))
# 10 and 20 epochs should give ~95% accuracy, but 95% is actually nothing
# great consider other methods. It's amazing that using just pixels can
# actually achieve 95%. state of art is 99%
train_neural_network(x)
|
andreparames/odoo
|
refs/heads/8.0
|
openerp/tools/safe_eval.py
|
250
|
# -*- coding: utf-8 -*-
##############################################################################
# Copyright (C) 2004-2014 OpenERP s.a. (<http://www.openerp.com>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
"""
safe_eval module - methods intended to provide more restricted alternatives to
evaluate simple and/or untrusted code.
Methods in this module are typically used as alternatives to eval() to parse
OpenERP domain strings, conditions and expressions, mostly based on locals
condition/math builtins.
"""
# Module partially ripped from/inspired by several different sources:
# - http://code.activestate.com/recipes/286134/
# - safe_eval in lp:~xrg/openobject-server/optimize-5.0
# - safe_eval in tryton http://hg.tryton.org/hgwebdir.cgi/trytond/rev/bbb5f73319ad
from opcode import HAVE_ARGUMENT, opmap, opname
from psycopg2 import OperationalError
from types import CodeType
import logging
from .misc import ustr
import openerp
__all__ = ['test_expr', 'safe_eval', 'const_eval']
# The time module is usually already provided in the safe_eval environment
# but some code, e.g. datetime.datetime.now() (Windows/Python 2.5.2, bug
# lp:703841), does import time.
_ALLOWED_MODULES = ['_strptime', 'math', 'time']
_UNSAFE_ATTRIBUTES = ['f_builtins', 'f_globals', 'f_locals', 'gi_frame',
'co_code', 'func_globals']
_CONST_OPCODES = set(opmap[x] for x in [
'POP_TOP', 'ROT_TWO', 'ROT_THREE', 'ROT_FOUR', 'DUP_TOP', 'DUP_TOPX',
'POP_BLOCK','SETUP_LOOP', 'BUILD_LIST', 'BUILD_MAP', 'BUILD_TUPLE',
'LOAD_CONST', 'RETURN_VALUE', 'STORE_SUBSCR', 'STORE_MAP'] if x in opmap)
_EXPR_OPCODES = _CONST_OPCODES.union(set(opmap[x] for x in [
'UNARY_POSITIVE', 'UNARY_NEGATIVE', 'UNARY_NOT',
'UNARY_INVERT', 'BINARY_POWER', 'BINARY_MULTIPLY',
'BINARY_DIVIDE', 'BINARY_FLOOR_DIVIDE', 'BINARY_TRUE_DIVIDE',
'BINARY_MODULO', 'BINARY_ADD', 'BINARY_SUBTRACT', 'BINARY_SUBSCR',
'BINARY_LSHIFT', 'BINARY_RSHIFT', 'BINARY_AND', 'BINARY_XOR',
'BINARY_OR', 'INPLACE_ADD', 'INPLACE_SUBTRACT', 'INPLACE_MULTIPLY',
'INPLACE_DIVIDE', 'INPLACE_REMAINDER', 'INPLACE_POWER',
'INPLACE_LEFTSHIFT', 'INPLACE_RIGHTSHIFT', 'INPLACE_AND',
'INPLACE_XOR','INPLACE_OR'
] if x in opmap))
_SAFE_OPCODES = _EXPR_OPCODES.union(set(opmap[x] for x in [
'LOAD_NAME', 'CALL_FUNCTION', 'COMPARE_OP', 'LOAD_ATTR',
'STORE_NAME', 'GET_ITER', 'FOR_ITER', 'LIST_APPEND', 'DELETE_NAME',
'JUMP_FORWARD', 'JUMP_IF_TRUE', 'JUMP_IF_FALSE', 'JUMP_ABSOLUTE',
'MAKE_FUNCTION', 'SLICE+0', 'SLICE+1', 'SLICE+2', 'SLICE+3', 'BREAK_LOOP',
'CONTINUE_LOOP', 'RAISE_VARARGS', 'YIELD_VALUE',
# New in Python 2.7 - http://bugs.python.org/issue4715 :
'JUMP_IF_FALSE_OR_POP', 'JUMP_IF_TRUE_OR_POP', 'POP_JUMP_IF_FALSE',
'POP_JUMP_IF_TRUE', 'SETUP_EXCEPT', 'END_FINALLY',
'LOAD_FAST', 'STORE_FAST', 'DELETE_FAST', 'UNPACK_SEQUENCE',
'LOAD_GLOBAL', # Only allows access to restricted globals
] if x in opmap))
_logger = logging.getLogger(__name__)
def _get_opcodes(codeobj):
"""_get_opcodes(codeobj) -> [opcodes]
Extract the actual opcodes as a list from a code object
>>> c = compile("[1 + 2, (1,2)]", "", "eval")
>>> _get_opcodes(c)
[100, 100, 23, 100, 100, 102, 103, 83]
"""
i = 0
byte_codes = codeobj.co_code
while i < len(byte_codes):
code = ord(byte_codes[i])
yield code
if code >= HAVE_ARGUMENT:
i += 3
else:
i += 1
def assert_no_dunder_name(code_obj, expr):
""" assert_no_dunder_name(code_obj, expr) -> None
Asserts that the code object does not refer to any "dunder name"
(__$name__), so that safe_eval prevents access to any internal-ish Python
attribute or method (both are loaded via LOAD_ATTR which uses a name, not a
const or a var).
Checks that no such name exists in the provided code object (co_names).
:param code_obj: code object to name-validate
:type code_obj: CodeType
:param str expr: expression corresponding to the code object, for debugging
purposes
:raises NameError: in case a forbidden name (containing two underscores)
is found in ``code_obj``
.. note:: actually forbids every name containing 2 underscores
"""
for name in code_obj.co_names:
if "__" in name or name in _UNSAFE_ATTRIBUTES:
raise NameError('Access to forbidden name %r (%r)' % (name, expr))
def assert_valid_codeobj(allowed_codes, code_obj, expr):
""" Asserts that the provided code object validates against the bytecode
and name constraints.
Recursively validates the code objects stored in its co_consts in case
lambdas are being created/used (lambdas generate their own separated code
objects and don't live in the root one)
:param allowed_codes: list of permissible bytecode instructions
:type allowed_codes: set(int)
:param code_obj: code object to name-validate
:type code_obj: CodeType
:param str expr: expression corresponding to the code object, for debugging
purposes
:raises ValueError: in case of forbidden bytecode in ``code_obj``
:raises NameError: in case a forbidden name (containing two underscores)
is found in ``code_obj``
"""
assert_no_dunder_name(code_obj, expr)
for opcode in _get_opcodes(code_obj):
if opcode not in allowed_codes:
raise ValueError(
"opcode %s not allowed (%r)" % (opname[opcode], expr))
for const in code_obj.co_consts:
if isinstance(const, CodeType):
assert_valid_codeobj(allowed_codes, const, 'lambda')
def test_expr(expr, allowed_codes, mode="eval"):
"""test_expr(expression, allowed_codes[, mode]) -> code_object
Test that the expression contains only the allowed opcodes.
If the expression is valid and contains only allowed codes,
return the compiled code object.
Otherwise raise a ValueError, a Syntax Error or TypeError accordingly.
"""
try:
if mode == 'eval':
# eval() does not like leading/trailing whitespace
expr = expr.strip()
code_obj = compile(expr, "", mode)
except (SyntaxError, TypeError, ValueError):
raise
except Exception, e:
import sys
exc_info = sys.exc_info()
raise ValueError, '"%s" while compiling\n%r' % (ustr(e), expr), exc_info[2]
assert_valid_codeobj(allowed_codes, code_obj, expr)
return code_obj
def const_eval(expr):
"""const_eval(expression) -> value
Safe Python constant evaluation
Evaluates a string that contains an expression describing
a Python constant. Strings that are not valid Python expressions
or that contain other code besides the constant raise ValueError.
>>> const_eval("10")
10
>>> const_eval("[1,2, (3,4), {'foo':'bar'}]")
[1, 2, (3, 4), {'foo': 'bar'}]
>>> const_eval("1+2")
Traceback (most recent call last):
...
ValueError: opcode BINARY_ADD not allowed
"""
c = test_expr(expr, _CONST_OPCODES)
return eval(c)
def expr_eval(expr):
"""expr_eval(expression) -> value
Restricted Python expression evaluation
Evaluates a string that contains an expression that only
uses Python constants. This can be used to e.g. evaluate
a numerical expression from an untrusted source.
>>> expr_eval("1+2")
3
>>> expr_eval("[1,2]*2")
[1, 2, 1, 2]
>>> expr_eval("__import__('sys').modules")
Traceback (most recent call last):
...
ValueError: opcode LOAD_NAME not allowed
"""
c = test_expr(expr, _EXPR_OPCODES)
return eval(c)
def _import(name, globals=None, locals=None, fromlist=None, level=-1):
if globals is None:
globals = {}
if locals is None:
locals = {}
if fromlist is None:
fromlist = []
if name in _ALLOWED_MODULES:
return __import__(name, globals, locals, level)
raise ImportError(name)
def safe_eval(expr, globals_dict=None, locals_dict=None, mode="eval", nocopy=False, locals_builtins=False):
"""safe_eval(expression[, globals[, locals[, mode[, nocopy]]]]) -> result
System-restricted Python expression evaluation
Evaluates a string that contains an expression that mostly
uses Python constants, arithmetic expressions and the
objects directly provided in context.
This can be used to e.g. evaluate
an OpenERP domain expression from an untrusted source.
:throws TypeError: If the expression provided is a code object
:throws SyntaxError: If the expression provided is not valid Python
:throws NameError: If the expression provided accesses forbidden names
:throws ValueError: If the expression provided uses forbidden bytecode
"""
if isinstance(expr, CodeType):
raise TypeError("safe_eval does not allow direct evaluation of code objects.")
if globals_dict is None:
globals_dict = {}
# prevent altering the globals/locals from within the sandbox
# by taking a copy.
if not nocopy:
# isinstance() does not work below, we want *exactly* the dict class
if (globals_dict is not None and type(globals_dict) is not dict) \
or (locals_dict is not None and type(locals_dict) is not dict):
_logger.warning(
"Looks like you are trying to pass a dynamic environment, "
"you should probably pass nocopy=True to safe_eval().")
globals_dict = dict(globals_dict)
if locals_dict is not None:
locals_dict = dict(locals_dict)
globals_dict.update(
__builtins__={
'__import__': _import,
'True': True,
'False': False,
'None': None,
'str': str,
'unicode': unicode,
'bool': bool,
'int': int,
'float': float,
'long': long,
'enumerate': enumerate,
'dict': dict,
'list': list,
'tuple': tuple,
'map': map,
'abs': abs,
'min': min,
'max': max,
'sum': sum,
'reduce': reduce,
'filter': filter,
'round': round,
'len': len,
'repr': repr,
'set': set,
'all': all,
'any': any,
'ord': ord,
'chr': chr,
'cmp': cmp,
'divmod': divmod,
'isinstance': isinstance,
'range': range,
'xrange': xrange,
'zip': zip,
'Exception': Exception,
}
)
if locals_builtins:
if locals_dict is None:
locals_dict = {}
locals_dict.update(globals_dict.get('__builtins__'))
c = test_expr(expr, _SAFE_OPCODES, mode=mode)
try:
return eval(c, globals_dict, locals_dict)
except openerp.osv.orm.except_orm:
raise
except openerp.exceptions.Warning:
raise
except openerp.exceptions.RedirectWarning:
raise
except openerp.exceptions.AccessDenied:
raise
except openerp.exceptions.AccessError:
raise
except OperationalError:
# Do not hide PostgreSQL low-level exceptions, to let the auto-replay
# of serialized transactions work its magic
raise
except Exception, e:
import sys
exc_info = sys.exc_info()
raise ValueError, '"%s" while evaluating\n%r' % (ustr(e), expr), exc_info[2]
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
Slysliver/AutoRPG
|
refs/heads/master
|
combatCommands.py
|
1
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# combatCommands.py
#
# Copyright 2016 Slysliver <slysliver@Bucket-Lite>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
# MA 02110-1301, USA.
#
#
# basic damage dealing, needs fucntions to randomize damage
def meleedmg(attkr, dfndr):
damage = attkr.getstat('damage')
bonus = attkr.getstat('atkbns')
dfns = dfndr.getstat('dfsbns')
dfndr.damage((damage + bonus) - dfns)
# calculates combat speed ratio, returns rate of attacks for attacker
def initiative(attkr, dfndr, stat):
attkrspd = attkr.getstat(stat)
dfndrspd = dfndr.getstat(stat)
ratio = attkrspd / dfndrspd
if ratio < 1:
ratio = dfndrspd / attkrspd
return [ratio, False]
else:
return [ratio, True]
def attackingcombat(attkr, dfndr):
atkRatio = initiative(attkr, dfndr, 'speed')
if atkRatio[1] == True:
while attkr.getstat('curhp') != 0 and dfndr.getstat('curhp') != 0:
i = 1
for i in range(i, atkRatio[0]):
meleedmg(attkr, dfndr)
meleedmg(dfndr, attkr)
elif atkRatio[1] == False:
while attkr.getstat('curhp') != 0 and dfndr.getstat('curhp') != 0:
i = 1
for i in range(i, atkRatio[0]):
meleedmg(dfndr, attkr)
meleedmg(attkr, dfndr)
|
clarkperkins/readthedocs.org
|
refs/heads/master
|
readthedocs/core/management/commands/reindex_elasticsearch.py
|
26
|
import logging
from optparse import make_option
from django.core.management.base import BaseCommand
from django.conf import settings
from readthedocs.builds.constants import LATEST
from readthedocs.builds.models import Version
from readthedocs.search import parse_json
from readthedocs.restapi.utils import index_search_request
log = logging.getLogger(__name__)
class Command(BaseCommand):
option_list = BaseCommand.option_list + (
make_option('-p',
dest='project',
default='',
help='Project to index'),
)
def handle(self, *args, **options):
'''
Build/index all versions or a single project's version
'''
project = options['project']
if project:
queryset = Version.objects.public(project__slug=project)
log.info("Building all versions for %s" % project)
elif getattr(settings, 'INDEX_ONLY_LATEST', True):
queryset = Version.objects.public().filter(slug=LATEST)
else:
queryset = Version.objects.public()
for version in queryset:
log.info("Reindexing %s" % version)
try:
commit = version.project.vcs_repo(version.slug).commit
except:
# This will happen on prod
commit = None
try:
page_list = parse_json.process_all_json_files(version, build_dir=False)
index_search_request(
version=version, page_list=page_list, commit=commit,
project_scale=0, page_scale=0, section=False, delete=False)
except Exception:
log.error('Build failed for %s' % version, exc_info=True)
|
ilexius/odoo
|
refs/heads/master
|
addons/website_quote/models/order.py
|
3
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from openerp import api
from openerp.osv import osv, fields
import uuid
import time
import datetime
import openerp.addons.decimal_precision as dp
from openerp import SUPERUSER_ID
from openerp.tools.translate import _
class sale_quote_template(osv.osv):
_name = "sale.quote.template"
_description = "Sale Quotation Template"
_columns = {
'name': fields.char('Quotation Template', required=True),
'website_description': fields.html('Description', translate=True),
'quote_line': fields.one2many('sale.quote.line', 'quote_id', 'Quotation Template Lines', copy=True),
'note': fields.text('Terms and conditions'),
'options': fields.one2many('sale.quote.option', 'template_id', 'Optional Products Lines', copy=True),
'number_of_days': fields.integer('Quotation Duration', help='Number of days for the validity date computation of the quotation'),
'require_payment': fields.selection([
(0, 'Not mandatory on website quote validation'),
(1, 'Immediate after website order validation')
], 'Payment', help="Require immediate payment by the customer when validating the order from the website quote"),
}
def open_template(self, cr, uid, quote_id, context=None):
return {
'type': 'ir.actions.act_url',
'target': 'self',
'url': '/quote/template/%d' % quote_id[0]
}
class sale_quote_line(osv.osv):
_name = "sale.quote.line"
_description = "Quotation Template Lines"
_columns = {
'sequence': fields.integer('Sequence', help="Gives the sequence order when displaying a list of sale quote lines."),
'quote_id': fields.many2one('sale.quote.template', 'Quotation Template Reference', required=True, ondelete='cascade', select=True),
'name': fields.text('Description', required=True, translate=True),
'product_id': fields.many2one('product.product', 'Product', domain=[('sale_ok', '=', True)], required=True),
'website_description': fields.related('product_id', 'product_tmpl_id', 'quote_description', string='Line Description', type='html', translate=True),
'price_unit': fields.float('Unit Price', required=True, digits_compute= dp.get_precision('Product Price')),
'discount': fields.float('Discount (%)', digits_compute= dp.get_precision('Discount')),
'product_uom_qty': fields.float('Quantity', required=True, digits_compute= dp.get_precision('Product UoS')),
'product_uom_id': fields.many2one('product.uom', 'Unit of Measure ', required=True),
}
_order = 'sequence, id'
_defaults = {
'product_uom_qty': 1,
'discount': 0.0,
'sequence': 10,
}
def on_change_product_id(self, cr, uid, ids, product, uom_id=None, context=None):
vals, domain = {}, []
product_obj = self.pool.get('product.product').browse(cr, uid, product, context=context)
name = product_obj.name
if product_obj.description_sale:
name += '\n' + product_obj.description_sale
vals.update({
'price_unit': product_obj.lst_price,
'product_uom_id': product_obj.uom_id.id,
'website_description': product_obj and (product_obj.quote_description or product_obj.website_description) or '',
'name': name,
'product_uom_id': uom_id or product_obj.uom_id.id,
})
uom_obj = self.pool.get('product.uom')
if vals['product_uom_id'] != product_obj.uom_id.id:
selected_uom = uom_obj.browse(cr, uid, vals['product_uom_id'], context=context)
new_price = uom_obj._compute_price(cr, uid, product_obj.uom_id.id, vals['price_unit'], vals['product_uom_id'])
vals['price_unit'] = new_price
if not uom_id:
domain = {'product_uom_id': [('category_id', '=', product_obj.uom_id.category_id.id)]}
return {'value': vals, 'domain': domain}
def product_uom_change(self, cr, uid, ids, product, uom_id, context=None):
context = context or {}
if not uom_id:
return {'value': {'price_unit': 0.0, 'uom_id': False}}
return self.on_change_product_id(cr, uid, ids, product, uom_id=uom_id, context=context)
def _inject_quote_description(self, cr, uid, values, context=None):
values = dict(values or {})
if not values.get('website_description') and values.get('product_id'):
product = self.pool['product.product'].browse(cr, uid, values['product_id'], context=context)
values['website_description'] = product.quote_description or product.website_description or ''
return values
def create(self, cr, uid, values, context=None):
values = self._inject_quote_description(cr, uid, values, context)
ret = super(sale_quote_line, self).create(cr, uid, values, context=context)
# hack because create don t make the job for a related field
if values.get('website_description'):
self.write(cr, uid, ret, {'website_description': values['website_description']}, context=context)
return ret
def write(self, cr, uid, ids, values, context=None):
values = self._inject_quote_description(cr, uid, values, context)
return super(sale_quote_line, self).write(cr, uid, ids, values, context=context)
class sale_order_line(osv.osv):
_inherit = "sale.order.line"
_description = "Sales Order Line"
_columns = {
'website_description': fields.html('Line Description'),
'option_line_id': fields.one2many('sale.order.option', 'line_id', 'Optional Products Lines'),
}
def _inject_quote_description(self, cr, uid, values, context=None):
values = dict(values or {})
if not values.get('website_description') and values.get('product_id'):
product = self.pool['product.product'].browse(cr, uid, values['product_id'], context=context)
values['website_description'] = product.quote_description or product.website_description
return values
def create(self, cr, uid, values, context=None):
values = self._inject_quote_description(cr, uid, values, context)
ret = super(sale_order_line, self).create(cr, uid, values, context=context)
# hack because create don t make the job for a related field
if values.get('website_description'):
self.write(cr, uid, ret, {'website_description': values['website_description']}, context=context)
return ret
def write(self, cr, uid, ids, values, context=None):
values = self._inject_quote_description(cr, uid, values, context)
return super(sale_order_line, self).write(cr, uid, ids, values, context=context)
class sale_order(osv.osv):
_inherit = 'sale.order'
def _get_total(self, cr, uid, ids, name, arg, context=None):
res = {}
for order in self.browse(cr, uid, ids, context=context):
total = 0.0
for line in order.order_line:
total += line.price_subtotal + line.price_unit * ((line.discount or 0.0) / 100.0) * line.product_uom_qty
res[order.id] = total
return res
_columns = {
'access_token': fields.char('Security Token', required=True, copy=False),
'template_id': fields.many2one('sale.quote.template', 'Quotation Template', readonly=True, states={'draft': [('readonly', False)], 'sent': [('readonly', False)]}),
'website_description': fields.html('Description'),
'options' : fields.one2many('sale.order.option', 'order_id', 'Optional Products Lines', readonly=True, states={'draft': [('readonly', False)], 'sent': [('readonly', False)]}, copy=True),
'amount_undiscounted': fields.function(_get_total, string='Amount Before Discount', type="float", digits=0),
'quote_viewed': fields.boolean('Quotation Viewed'),
'require_payment': fields.selection([
(0, 'Not mandatory on website quote validation'),
(1, 'Immediate after website order validation')
], 'Payment', help="Require immediate payment by the customer when validating the order from the website quote"),
}
def _get_template_id(self, cr, uid, context=None):
try:
template_id = self.pool.get('ir.model.data').get_object_reference(cr, uid, 'website_quote', 'website_quote_template_default')[1]
except ValueError:
template_id = False
return template_id
_defaults = {
'access_token': lambda self, cr, uid, ctx={}: str(uuid.uuid4()),
'template_id' : _get_template_id,
}
def open_quotation(self, cr, uid, quote_id, context=None):
quote = self.browse(cr, uid, quote_id[0], context=context)
self.write(cr, uid, quote_id[0], {'quote_viewed': True}, context=context)
return {
'type': 'ir.actions.act_url',
'target': 'self',
'url': '/quote/%s/%s' % (quote.id, quote.access_token)
}
def onchange_template_id(self, cr, uid, ids, template_id, partner=False, fiscal_position_id=False, pricelist_id=False, context=None):
if not template_id:
return {}
if partner:
context = dict(context or {})
context['lang'] = self.pool['res.partner'].browse(cr, uid, partner, context).lang
pricelist_obj = self.pool['product.pricelist']
lines = [(5,)]
quote_template = self.pool.get('sale.quote.template').browse(cr, uid, template_id, context=context)
for line in quote_template.quote_line:
res = self.pool.get('sale.order.line').product_id_change(cr, uid, False,
False, line.product_id.id, line.product_uom_qty, line.product_uom_id.id, line.product_uom_qty,
line.product_uom_id.id, line.name, partner, False, True, time.strftime('%Y-%m-%d'),
False, fiscal_position_id, True, context)
data = res.get('value', {})
if pricelist_id:
uom_context = context.copy()
uom_context['uom'] = line.product_uom_id.id
price = pricelist_obj.price_get(cr, uid, [pricelist_id], line.product_id.id, 1, context=uom_context)[pricelist_id]
else:
price = line.price_unit
if 'tax_id' in data:
data['tax_id'] = [(6, 0, data['tax_id'])]
data.update({
'name': line.name,
'price_unit': price,
'discount': line.discount,
'product_uom_qty': line.product_uom_qty,
'product_id': line.product_id.id,
'product_uom': line.product_uom_id.id,
'website_description': line.website_description,
'state': 'draft',
})
lines.append((0, 0, data))
options = []
for option in quote_template.options:
if pricelist_id:
uom_context = context.copy()
uom_context['uom'] = option.uom_id.id
price = pricelist_obj.price_get(cr, uid, [pricelist_id], option.product_id.id, 1, context=uom_context)[pricelist_id]
else:
price = option.price_unit
options.append((0, 0, {
'product_id': option.product_id.id,
'name': option.name,
'quantity': option.quantity,
'uom_id': option.uom_id.id,
'price_unit': price,
'discount': option.discount,
'website_description': option.website_description,
}))
date = False
if quote_template.number_of_days > 0:
date = (datetime.datetime.now() + datetime.timedelta(quote_template.number_of_days)).strftime("%Y-%m-%d")
data = {
'order_line': lines,
'website_description': quote_template.website_description,
'options': options,
'validity_date': date,
'require_payment': quote_template.require_payment
}
if quote_template.note:
data['note'] = quote_template.note
return {'value': data}
def recommended_products(self, cr, uid, ids, context=None):
order_line = self.browse(cr, uid, ids[0], context=context).order_line
product_pool = self.pool.get('product.product')
products = []
for line in order_line:
products += line.product_id.product_tmpl_id.recommended_products(context=context)
return products
def get_access_action(self, cr, uid, id, context=None):
""" Override method that generated the link to access the document. Instead
of the classic form view, redirect to the online quote if exists. """
quote = self.browse(cr, uid, id, context=context)
if not quote.template_id:
return super(sale_order, self).get_access_action(cr, uid, id, context=context)
return {
'type': 'ir.actions.act_url',
'url': '/quote/%s' % id,
'target': 'self',
'res_id': id,
}
def _confirm_online_quote(self, cr, uid, order_id, tx, context=None):
""" Payment callback: validate the order and write tx details in chatter """
order = self.browse(cr, uid, order_id, context=context)
# create draft invoice if transaction is ok
if tx and tx.state == 'done':
if order.state in ['draft', 'sent']:
self.signal_workflow(cr, SUPERUSER_ID, [order.id], 'manual_invoice', context=context)
message = _('Order payed by %s. Transaction: %s. Amount: %s.') % (tx.partner_id.name, tx.acquirer_reference, tx.amount)
self.message_post(cr, uid, order_id, body=message, type='comment', subtype='mt_comment', context=context)
return True
return False
def create(self, cr, uid, values, context=None):
if not values.get('template_id'):
defaults = self.default_get(cr, uid, ['template_id'], context=context)
template_values = self.onchange_template_id(cr, uid, [], defaults.get('template_id'), partner=values.get('partner_id'), fiscal_position_id=values.get('fiscal_position'), context=context).get('value', {})
values = dict(template_values, **values)
return super(sale_order, self).create(cr, uid, values, context=context)
class sale_quote_option(osv.osv):
_name = "sale.quote.option"
_description = "Quotation Option"
_columns = {
'template_id': fields.many2one('sale.quote.template', 'Quotation Template Reference', ondelete='cascade', select=True, required=True),
'name': fields.text('Description', required=True, translate=True),
'product_id': fields.many2one('product.product', 'Product', domain=[('sale_ok', '=', True)], required=True),
'website_description': fields.html('Option Description', translate=True),
'price_unit': fields.float('Unit Price', required=True, digits_compute= dp.get_precision('Product Price')),
'discount': fields.float('Discount (%)', digits_compute= dp.get_precision('Discount')),
'uom_id': fields.many2one('product.uom', 'Unit of Measure ', required=True),
'quantity': fields.float('Quantity', required=True, digits_compute= dp.get_precision('Product UoS')),
}
_defaults = {
'quantity': 1,
}
def on_change_product_id(self, cr, uid, ids, product, uom_id=None, context=None):
vals, domain = {}, []
product_obj = self.pool.get('product.product').browse(cr, uid, product, context=context)
name = product_obj.name
if product_obj.description_sale:
name += '\n' + product_obj.description_sale
vals.update({
'price_unit': product_obj.list_price,
'website_description': product_obj.product_tmpl_id.quote_description,
'name': name,
'uom_id': uom_id or product_obj.uom_id.id,
})
uom_obj = self.pool.get('product.uom')
if vals['uom_id'] != product_obj.uom_id.id:
selected_uom = uom_obj.browse(cr, uid, vals['uom_id'], context=context)
new_price = uom_obj._compute_price(cr, uid, product_obj.uom_id.id,
vals['price_unit'], vals['uom_id'])
vals['price_unit'] = new_price
if not uom_id:
domain = {'uom_id': [('category_id', '=', product_obj.uom_id.category_id.id)]}
return {'value': vals, 'domain': domain}
def product_uom_change(self, cr, uid, ids, product, uom_id, context=None):
if not uom_id:
return {'value': {'price_unit': 0.0, 'uom_id': False}}
return self.on_change_product_id(cr, uid, ids, product, uom_id=uom_id, context=context)
class sale_order_option(osv.osv):
_name = "sale.order.option"
_description = "Sale Options"
_columns = {
'order_id': fields.many2one('sale.order', 'Sale Order Reference', ondelete='cascade', select=True),
'line_id': fields.many2one('sale.order.line', on_delete="set null"),
'name': fields.text('Description', required=True),
'product_id': fields.many2one('product.product', 'Product', domain=[('sale_ok', '=', True)]),
'website_description': fields.html('Line Description'),
'price_unit': fields.float('Unit Price', required=True, digits_compute= dp.get_precision('Product Price')),
'discount': fields.float('Discount (%)', digits_compute= dp.get_precision('Discount')),
'uom_id': fields.many2one('product.uom', 'Unit of Measure ', required=True),
'quantity': fields.float('Quantity', required=True,
digits_compute= dp.get_precision('Product UoS')),
}
_defaults = {
'quantity': 1,
}
# TODO master: to remove, replaced by onchange of the new api
def on_change_product_id(self, cr, uid, ids, product, uom_id=None, context=None):
vals, domain = {}, []
if not product:
return vals
product_obj = self.pool.get('product.product').browse(cr, uid, product, context=context)
name = product_obj.name
if product_obj.description_sale:
name += '\n'+product_obj.description_sale
vals.update({
'price_unit': product_obj.list_price,
'website_description': product_obj and (product_obj.quote_description or product_obj.website_description),
'name': name,
'uom_id': uom_id or product_obj.uom_id.id,
})
uom_obj = self.pool.get('product.uom')
if vals['uom_id'] != product_obj.uom_id.id:
selected_uom = uom_obj.browse(cr, uid, vals['uom_id'], context=context)
new_price = uom_obj._compute_price(cr, uid, product_obj.uom_id.id, vals['price_unit'], vals['uom_id'])
vals['price_unit'] = new_price
if not uom_id:
domain = {'uom_id': [('category_id', '=', product_obj.uom_id.category_id.id)]}
return {'value': vals, 'domain': domain}
def product_uom_change(self, cr, uid, ids, product, uom_id, context=None):
context = context or {}
if not uom_id:
return {'value': {'price_unit': 0.0, 'uom_id': False}}
return self.on_change_product_id(cr, uid, ids, product, uom_id=uom_id, context=context)
@api.onchange('product_id')
def _onchange_product_id(self):
product = self.product_id.with_context(lang=self.order_id.partner_id.lang)
self.price_unit = product.list_price
self.website_description = product.quote_description or product.website_description
self.name = product.name
if product.description_sale:
self.name += '\n' + product.description_sale
self.uom_id = product.product_tmpl_id.uom_id
if product and self.order_id.pricelist_id:
partner_id = self.order_id.partner_id.id
pricelist = self.order_id.pricelist_id.id
self.price_unit = self.order_id.pricelist_id.price_get(product.id, self.quantity, partner_id)[pricelist]
class product_template(osv.Model):
_inherit = "product.template"
_columns = {
'website_description': fields.html('Description for the website'), # hack, if website_sale is not installed
'quote_description': fields.html('Description for the quote'),
}
|
LarsMichelsen/pmatic
|
refs/heads/master
|
ccu_pkg/python/lib/python2.7/encodings/mbcs.py
|
860
|
""" Python 'mbcs' Codec for Windows
Cloned by Mark Hammond (mhammond@skippinet.com.au) from ascii.py,
which was written by Marc-Andre Lemburg (mal@lemburg.com).
(c) Copyright CNRI, All Rights Reserved. NO WARRANTY.
"""
# Import them explicitly to cause an ImportError
# on non-Windows systems
from codecs import mbcs_encode, mbcs_decode
# for IncrementalDecoder, IncrementalEncoder, ...
import codecs
### Codec APIs
encode = mbcs_encode
def decode(input, errors='strict'):
return mbcs_decode(input, errors, True)
class IncrementalEncoder(codecs.IncrementalEncoder):
def encode(self, input, final=False):
return mbcs_encode(input, self.errors)[0]
class IncrementalDecoder(codecs.BufferedIncrementalDecoder):
_buffer_decode = mbcs_decode
class StreamWriter(codecs.StreamWriter):
encode = mbcs_encode
class StreamReader(codecs.StreamReader):
decode = mbcs_decode
### encodings module API
def getregentry():
return codecs.CodecInfo(
name='mbcs',
encode=encode,
decode=decode,
incrementalencoder=IncrementalEncoder,
incrementaldecoder=IncrementalDecoder,
streamreader=StreamReader,
streamwriter=StreamWriter,
)
|
whtsky/Catsup
|
refs/heads/master
|
catsup/generator/renderer.py
|
1
|
import os
from jinja2 import Environment, FileSystemLoader, TemplateNotFound
from catsup.options import g
from catsup.utils import mkdir, static_url, url_for, urljoin
class Renderer(object):
def __init__(self, templates_path, generator):
self.env = Environment(
loader=FileSystemLoader(templates_path), autoescape=False
)
config = generator.config
self.env.globals.update(
generator=generator,
site=config.site,
config=config.config,
author=config.author,
comment=config.comment,
theme=config.theme.vars,
g=g,
pages=generator.pages,
static_url=static_url,
url_for=url_for,
)
catsup_filter_path = os.path.join(g.catsup_path, "templates", "filters.py")
theme_filter_path = os.path.join(g.theme.path, "filters.py")
self.load_filters_from_pyfile(catsup_filter_path)
self.load_filters_from_pyfile(theme_filter_path)
self.rendered_permalinks = []
def load_filters_from_pyfile(self, path):
if not os.path.exists(path):
return
filters = {}
exec(open(path).read(), {}, filters)
self.env.filters.update(filters)
def render(self, template, **kwargs):
try:
return self.env.get_template(template).render(**kwargs)
except TemplateNotFound:
# logger.warning("Template not found: %s" % template)
pass
def render_to(self, template, permalink, **kwargs):
html = self.render(template, **kwargs)
if not html:
return
permalink, output_name = urljoin(g.base_url, permalink), permalink
kwargs.setdefault("permalink", permalink)
self.rendered_permalinks.append(permalink)
if output_name.endswith("/") or "." not in output_name:
output_name = output_name.rstrip("/")
output_name += "/index.html"
output_path = os.path.join(g.output, output_name.lstrip("/"))
output_path = output_path.encode("utf-8")
mkdir(os.path.dirname(output_path))
with open(output_path, "w") as f:
f.write(html)
def render_sitemap(self):
with open(os.path.join(g.output, "sitemap.txt"), "w") as f:
f.write("\n".join(self.rendered_permalinks))
|
funkyfuture/cerberus
|
refs/heads/master
|
cerberus/tests/test_rule_items.py
|
2
|
from cerberus import errors
from cerberus.tests import assert_fail
def test_items(validator):
field = 'a_list_of_values'
value = ['a string', 'not an integer']
assert_fail(
document={field: value},
validator=validator,
error=(
field,
(field, 'items'),
errors.ITEMS,
({'type': ('string',)}, {'type': ('integer',)}),
),
child_errors=[
((field, 1), (field, 'items', 1, 'type'), errors.TYPE, ('integer',))
],
)
assert (
errors.BasicErrorHandler.messages[errors.TYPE.code].format(
constraint=('integer',)
)
in validator.errors[field][-1][1]
)
def test_items_with_extra_item():
field = 'a_list_of_values'
assert_fail(
document={field: ['a string', 10, 'an extra item']},
error=(
field,
(field, 'items'),
errors.ITEMS_LENGTH,
({'type': ('string',)}, {'type': ('integer',)}),
(2, 3),
),
)
|
philanthropy-u/edx-platform
|
refs/heads/master
|
cms/djangoapps/maintenance/tests.py
|
4
|
"""
Tests for the maintenance app views.
"""
import json
import ddt
from django.conf import settings
from django.urls import reverse
from contentstore.management.commands.utils import get_course_versions
from student.tests.factories import AdminFactory, UserFactory
from xmodule.modulestore import ModuleStoreEnum
from xmodule.modulestore.django import modulestore
from xmodule.modulestore.tests.django_utils import ModuleStoreTestCase
from xmodule.modulestore.tests.factories import CourseFactory, ItemFactory
from .views import COURSE_KEY_ERROR_MESSAGES, MAINTENANCE_VIEWS
# This list contains URLs of all maintenance app views.
MAINTENANCE_URLS = [reverse(view['url']) for view in MAINTENANCE_VIEWS.values()]
class TestMaintenanceIndex(ModuleStoreTestCase):
"""
Tests for maintenance index view.
"""
def setUp(self):
super(TestMaintenanceIndex, self).setUp()
self.user = AdminFactory()
login_success = self.client.login(username=self.user.username, password='test')
self.assertTrue(login_success)
self.view_url = reverse('maintenance:maintenance_index')
def test_maintenance_index(self):
"""
Test that maintenance index view lists all the maintenance app views.
"""
response = self.client.get(self.view_url)
self.assertContains(response, 'Maintenance', status_code=200)
# Check that all the expected links appear on the index page.
for url in MAINTENANCE_URLS:
self.assertContains(response, url, status_code=200)
@ddt.ddt
class MaintenanceViewTestCase(ModuleStoreTestCase):
"""
Base class for maintenance view tests.
"""
view_url = ''
def setUp(self):
super(MaintenanceViewTestCase, self).setUp()
self.user = AdminFactory()
login_success = self.client.login(username=self.user.username, password='test')
self.assertTrue(login_success)
def verify_error_message(self, data, error_message):
"""
Verify the response contains error message.
"""
response = self.client.post(self.view_url, data=data, HTTP_X_REQUESTED_WITH='XMLHttpRequest')
self.assertContains(response, error_message, status_code=200)
def tearDown(self):
"""
Reverse the setup.
"""
self.client.logout()
super(MaintenanceViewTestCase, self).tearDown()
@ddt.ddt
class MaintenanceViewAccessTests(MaintenanceViewTestCase):
"""
Tests for access control of maintenance views.
"""
@ddt.data(MAINTENANCE_URLS)
@ddt.unpack
def test_require_login(self, url):
"""
Test that maintenance app requires user login.
"""
# Log out then try to retrieve the page
self.client.logout()
response = self.client.get(url)
# Expect a redirect to the login page
redirect_url = '{login_url}?next={original_url}'.format(
login_url=reverse('login'),
original_url=url,
)
self.assertRedirects(response, redirect_url)
@ddt.data(MAINTENANCE_URLS)
@ddt.unpack
def test_global_staff_access(self, url):
"""
Test that all maintenance app views are accessible to global staff user.
"""
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
@ddt.data(MAINTENANCE_URLS)
@ddt.unpack
def test_non_global_staff_access(self, url):
"""
Test that all maintenance app views are not accessible to non-global-staff user.
"""
user = UserFactory(username='test', email='test@example.com', password='test')
login_success = self.client.login(username=user.username, password='test')
self.assertTrue(login_success)
response = self.client.get(url)
self.assertContains(
response,
u'Must be {platform_name} staff to perform this action.'.format(platform_name=settings.PLATFORM_NAME),
status_code=403
)
@ddt.ddt
class TestForcePublish(MaintenanceViewTestCase):
"""
Tests for the force publish view.
"""
def setUp(self):
super(TestForcePublish, self).setUp()
self.view_url = reverse('maintenance:force_publish_course')
def setup_test_course(self):
"""
Creates the course and add some changes to it.
Returns:
course: a course object
"""
course = CourseFactory.create(default_store=ModuleStoreEnum.Type.split)
# Add some changes to course
chapter = ItemFactory.create(category='chapter', parent_location=course.location)
self.store.create_child(
self.user.id,
chapter.location,
'html',
block_id='html_component'
)
# verify that course has changes.
self.assertTrue(self.store.has_changes(self.store.get_item(course.location)))
return course
@ddt.data(
('', COURSE_KEY_ERROR_MESSAGES['empty_course_key']),
('edx', COURSE_KEY_ERROR_MESSAGES['invalid_course_key']),
('course-v1:e+d+X', COURSE_KEY_ERROR_MESSAGES['course_key_not_found']),
)
@ddt.unpack
def test_invalid_course_key_messages(self, course_key, error_message):
"""
Test all error messages for invalid course keys.
"""
# validate that course key contains error message
self.verify_error_message(
data={'course-id': course_key},
error_message=error_message
)
def test_mongo_course(self):
"""
Test that we get a error message on old mongo courses.
"""
# validate non split error message
course = CourseFactory.create(default_store=ModuleStoreEnum.Type.mongo)
self.verify_error_message(
data={'course-id': unicode(course.id)},
error_message='Force publishing course is not supported with old mongo courses.'
)
def test_mongo_course_with_split_course_key(self):
"""
Test that we get an error message `course_key_not_found` for a provided split course key
if we already have an old mongo course.
"""
# validate non split error message
course = CourseFactory.create(org='e', number='d', run='X', default_store=ModuleStoreEnum.Type.mongo)
self.verify_error_message(
data={'course-id': unicode(course.id)},
error_message='Force publishing course is not supported with old mongo courses.'
)
# Now search for the course key in split version.
self.verify_error_message(
data={'course-id': 'course-v1:e+d+X'},
error_message=COURSE_KEY_ERROR_MESSAGES['course_key_not_found']
)
def test_already_published(self):
"""
Test that when a course is forcefully publish, we get a 'course is already published' message.
"""
course = self.setup_test_course()
# publish the course
source_store = modulestore()._get_modulestore_for_courselike(course.id) # pylint: disable=protected-access
source_store.force_publish_course(course.id, self.user.id, commit=True)
# now course is published, we should get `already published course` error.
self.verify_error_message(
data={'course-id': unicode(course.id)},
error_message='Course is already in published state.'
)
def verify_versions_are_different(self, course):
"""
Verify draft and published versions point to different locations.
Arguments:
course (object): a course object.
"""
# get draft and publish branch versions
versions = get_course_versions(unicode(course.id))
# verify that draft and publish point to different versions
self.assertNotEqual(versions['draft-branch'], versions['published-branch'])
def get_force_publish_course_response(self, course):
"""
Get force publish the course response.
Arguments:
course (object): a course object.
Returns:
response : response from force publish post view.
"""
# Verify versions point to different locations initially
self.verify_versions_are_different(course)
# force publish course view
data = {
'course-id': unicode(course.id)
}
response = self.client.post(self.view_url, data=data, HTTP_X_REQUESTED_WITH='XMLHttpRequest')
response_data = json.loads(response.content)
return response_data
def test_force_publish_dry_run(self):
"""
Test that dry run does not publishes the course but shows possible outcome if force published is executed.
"""
course = self.setup_test_course()
response = self.get_force_publish_course_response(course)
self.assertIn('current_versions', response)
# verify that course still has changes as we just dry ran force publish course.
self.assertTrue(self.store.has_changes(self.store.get_item(course.location)))
# verify that both branch versions are still different
self.verify_versions_are_different(course)
|
mancoast/CPythonPyc_test
|
refs/heads/master
|
cpython/273_test_pprint.py
|
72
|
import pprint
import test.test_support
import unittest
import test.test_set
try:
uni = unicode
except NameError:
def uni(x):
return x
# list, tuple and dict subclasses that do or don't overwrite __repr__
class list2(list):
pass
class list3(list):
def __repr__(self):
return list.__repr__(self)
class tuple2(tuple):
pass
class tuple3(tuple):
def __repr__(self):
return tuple.__repr__(self)
class dict2(dict):
pass
class dict3(dict):
def __repr__(self):
return dict.__repr__(self)
class QueryTestCase(unittest.TestCase):
def setUp(self):
self.a = range(100)
self.b = range(200)
self.a[-12] = self.b
def test_basic(self):
# Verify .isrecursive() and .isreadable() w/o recursion
pp = pprint.PrettyPrinter()
for safe in (2, 2.0, 2j, "abc", [3], (2,2), {3: 3}, uni("yaddayadda"),
self.a, self.b):
# module-level convenience functions
self.assertFalse(pprint.isrecursive(safe),
"expected not isrecursive for %r" % (safe,))
self.assertTrue(pprint.isreadable(safe),
"expected isreadable for %r" % (safe,))
# PrettyPrinter methods
self.assertFalse(pp.isrecursive(safe),
"expected not isrecursive for %r" % (safe,))
self.assertTrue(pp.isreadable(safe),
"expected isreadable for %r" % (safe,))
def test_knotted(self):
# Verify .isrecursive() and .isreadable() w/ recursion
# Tie a knot.
self.b[67] = self.a
# Messy dict.
self.d = {}
self.d[0] = self.d[1] = self.d[2] = self.d
pp = pprint.PrettyPrinter()
for icky in self.a, self.b, self.d, (self.d, self.d):
self.assertTrue(pprint.isrecursive(icky), "expected isrecursive")
self.assertFalse(pprint.isreadable(icky), "expected not isreadable")
self.assertTrue(pp.isrecursive(icky), "expected isrecursive")
self.assertFalse(pp.isreadable(icky), "expected not isreadable")
# Break the cycles.
self.d.clear()
del self.a[:]
del self.b[:]
for safe in self.a, self.b, self.d, (self.d, self.d):
# module-level convenience functions
self.assertFalse(pprint.isrecursive(safe),
"expected not isrecursive for %r" % (safe,))
self.assertTrue(pprint.isreadable(safe),
"expected isreadable for %r" % (safe,))
# PrettyPrinter methods
self.assertFalse(pp.isrecursive(safe),
"expected not isrecursive for %r" % (safe,))
self.assertTrue(pp.isreadable(safe),
"expected isreadable for %r" % (safe,))
def test_unreadable(self):
# Not recursive but not readable anyway
pp = pprint.PrettyPrinter()
for unreadable in type(3), pprint, pprint.isrecursive:
# module-level convenience functions
self.assertFalse(pprint.isrecursive(unreadable),
"expected not isrecursive for %r" % (unreadable,))
self.assertFalse(pprint.isreadable(unreadable),
"expected not isreadable for %r" % (unreadable,))
# PrettyPrinter methods
self.assertFalse(pp.isrecursive(unreadable),
"expected not isrecursive for %r" % (unreadable,))
self.assertFalse(pp.isreadable(unreadable),
"expected not isreadable for %r" % (unreadable,))
def test_same_as_repr(self):
# Simple objects, small containers and classes that overwrite __repr__
# For those the result should be the same as repr().
# Ahem. The docs don't say anything about that -- this appears to
# be testing an implementation quirk. Starting in Python 2.5, it's
# not true for dicts: pprint always sorts dicts by key now; before,
# it sorted a dict display if and only if the display required
# multiple lines. For that reason, dicts with more than one element
# aren't tested here.
for simple in (0, 0L, 0+0j, 0.0, "", uni(""),
(), tuple2(), tuple3(),
[], list2(), list3(),
{}, dict2(), dict3(),
self.assertTrue, pprint,
-6, -6L, -6-6j, -1.5, "x", uni("x"), (3,), [3], {3: 6},
(1,2), [3,4], {5: 6},
tuple2((1,2)), tuple3((1,2)), tuple3(range(100)),
[3,4], list2([3,4]), list3([3,4]), list3(range(100)),
dict2({5: 6}), dict3({5: 6}),
range(10, -11, -1)
):
native = repr(simple)
for function in "pformat", "saferepr":
f = getattr(pprint, function)
got = f(simple)
self.assertEqual(native, got,
"expected %s got %s from pprint.%s" %
(native, got, function))
def test_basic_line_wrap(self):
# verify basic line-wrapping operation
o = {'RPM_cal': 0,
'RPM_cal2': 48059,
'Speed_cal': 0,
'controldesk_runtime_us': 0,
'main_code_runtime_us': 0,
'read_io_runtime_us': 0,
'write_io_runtime_us': 43690}
exp = """\
{'RPM_cal': 0,
'RPM_cal2': 48059,
'Speed_cal': 0,
'controldesk_runtime_us': 0,
'main_code_runtime_us': 0,
'read_io_runtime_us': 0,
'write_io_runtime_us': 43690}"""
for type in [dict, dict2]:
self.assertEqual(pprint.pformat(type(o)), exp)
o = range(100)
exp = '[%s]' % ',\n '.join(map(str, o))
for type in [list, list2]:
self.assertEqual(pprint.pformat(type(o)), exp)
o = tuple(range(100))
exp = '(%s)' % ',\n '.join(map(str, o))
for type in [tuple, tuple2]:
self.assertEqual(pprint.pformat(type(o)), exp)
# indent parameter
o = range(100)
exp = '[ %s]' % ',\n '.join(map(str, o))
for type in [list, list2]:
self.assertEqual(pprint.pformat(type(o), indent=4), exp)
def test_nested_indentations(self):
o1 = list(range(10))
o2 = dict(first=1, second=2, third=3)
o = [o1, o2]
expected = """\
[ [0, 1, 2, 3, 4, 5, 6, 7, 8, 9],
{ 'first': 1,
'second': 2,
'third': 3}]"""
self.assertEqual(pprint.pformat(o, indent=4, width=42), expected)
def test_sorted_dict(self):
# Starting in Python 2.5, pprint sorts dict displays by key regardless
# of how small the dictionary may be.
# Before the change, on 32-bit Windows pformat() gave order
# 'a', 'c', 'b' here, so this test failed.
d = {'a': 1, 'b': 1, 'c': 1}
self.assertEqual(pprint.pformat(d), "{'a': 1, 'b': 1, 'c': 1}")
self.assertEqual(pprint.pformat([d, d]),
"[{'a': 1, 'b': 1, 'c': 1}, {'a': 1, 'b': 1, 'c': 1}]")
# The next one is kind of goofy. The sorted order depends on the
# alphabetic order of type names: "int" < "str" < "tuple". Before
# Python 2.5, this was in the test_same_as_repr() test. It's worth
# keeping around for now because it's one of few tests of pprint
# against a crazy mix of types.
self.assertEqual(pprint.pformat({"xy\tab\n": (3,), 5: [[]], (): {}}),
r"{5: [[]], 'xy\tab\n': (3,), (): {}}")
def test_subclassing(self):
o = {'names with spaces': 'should be presented using repr()',
'others.should.not.be': 'like.this'}
exp = """\
{'names with spaces': 'should be presented using repr()',
others.should.not.be: like.this}"""
self.assertEqual(DottedPrettyPrinter().pformat(o), exp)
def test_set_reprs(self):
self.assertEqual(pprint.pformat(set()), 'set()')
self.assertEqual(pprint.pformat(set(range(3))), 'set([0, 1, 2])')
self.assertEqual(pprint.pformat(frozenset()), 'frozenset()')
self.assertEqual(pprint.pformat(frozenset(range(3))), 'frozenset([0, 1, 2])')
cube_repr_tgt = """\
{frozenset([]): frozenset([frozenset([2]), frozenset([0]), frozenset([1])]),
frozenset([0]): frozenset([frozenset(),
frozenset([0, 2]),
frozenset([0, 1])]),
frozenset([1]): frozenset([frozenset(),
frozenset([1, 2]),
frozenset([0, 1])]),
frozenset([2]): frozenset([frozenset(),
frozenset([1, 2]),
frozenset([0, 2])]),
frozenset([1, 2]): frozenset([frozenset([2]),
frozenset([1]),
frozenset([0, 1, 2])]),
frozenset([0, 2]): frozenset([frozenset([2]),
frozenset([0]),
frozenset([0, 1, 2])]),
frozenset([0, 1]): frozenset([frozenset([0]),
frozenset([1]),
frozenset([0, 1, 2])]),
frozenset([0, 1, 2]): frozenset([frozenset([1, 2]),
frozenset([0, 2]),
frozenset([0, 1])])}"""
cube = test.test_set.cube(3)
self.assertEqual(pprint.pformat(cube), cube_repr_tgt)
cubo_repr_tgt = """\
{frozenset([frozenset([0, 2]), frozenset([0])]): frozenset([frozenset([frozenset([0,
2]),
frozenset([0,
1,
2])]),
frozenset([frozenset([0]),
frozenset([0,
1])]),
frozenset([frozenset(),
frozenset([0])]),
frozenset([frozenset([2]),
frozenset([0,
2])])]),
frozenset([frozenset([0, 1]), frozenset([1])]): frozenset([frozenset([frozenset([0,
1]),
frozenset([0,
1,
2])]),
frozenset([frozenset([0]),
frozenset([0,
1])]),
frozenset([frozenset([1]),
frozenset([1,
2])]),
frozenset([frozenset(),
frozenset([1])])]),
frozenset([frozenset([1, 2]), frozenset([1])]): frozenset([frozenset([frozenset([1,
2]),
frozenset([0,
1,
2])]),
frozenset([frozenset([2]),
frozenset([1,
2])]),
frozenset([frozenset(),
frozenset([1])]),
frozenset([frozenset([1]),
frozenset([0,
1])])]),
frozenset([frozenset([1, 2]), frozenset([2])]): frozenset([frozenset([frozenset([1,
2]),
frozenset([0,
1,
2])]),
frozenset([frozenset([1]),
frozenset([1,
2])]),
frozenset([frozenset([2]),
frozenset([0,
2])]),
frozenset([frozenset(),
frozenset([2])])]),
frozenset([frozenset([]), frozenset([0])]): frozenset([frozenset([frozenset([0]),
frozenset([0,
1])]),
frozenset([frozenset([0]),
frozenset([0,
2])]),
frozenset([frozenset(),
frozenset([1])]),
frozenset([frozenset(),
frozenset([2])])]),
frozenset([frozenset([]), frozenset([1])]): frozenset([frozenset([frozenset(),
frozenset([0])]),
frozenset([frozenset([1]),
frozenset([1,
2])]),
frozenset([frozenset(),
frozenset([2])]),
frozenset([frozenset([1]),
frozenset([0,
1])])]),
frozenset([frozenset([2]), frozenset([])]): frozenset([frozenset([frozenset([2]),
frozenset([1,
2])]),
frozenset([frozenset(),
frozenset([0])]),
frozenset([frozenset(),
frozenset([1])]),
frozenset([frozenset([2]),
frozenset([0,
2])])]),
frozenset([frozenset([0, 1, 2]), frozenset([0, 1])]): frozenset([frozenset([frozenset([1,
2]),
frozenset([0,
1,
2])]),
frozenset([frozenset([0,
2]),
frozenset([0,
1,
2])]),
frozenset([frozenset([0]),
frozenset([0,
1])]),
frozenset([frozenset([1]),
frozenset([0,
1])])]),
frozenset([frozenset([0]), frozenset([0, 1])]): frozenset([frozenset([frozenset(),
frozenset([0])]),
frozenset([frozenset([0,
1]),
frozenset([0,
1,
2])]),
frozenset([frozenset([0]),
frozenset([0,
2])]),
frozenset([frozenset([1]),
frozenset([0,
1])])]),
frozenset([frozenset([2]), frozenset([0, 2])]): frozenset([frozenset([frozenset([0,
2]),
frozenset([0,
1,
2])]),
frozenset([frozenset([2]),
frozenset([1,
2])]),
frozenset([frozenset([0]),
frozenset([0,
2])]),
frozenset([frozenset(),
frozenset([2])])]),
frozenset([frozenset([0, 1, 2]), frozenset([0, 2])]): frozenset([frozenset([frozenset([1,
2]),
frozenset([0,
1,
2])]),
frozenset([frozenset([0,
1]),
frozenset([0,
1,
2])]),
frozenset([frozenset([0]),
frozenset([0,
2])]),
frozenset([frozenset([2]),
frozenset([0,
2])])]),
frozenset([frozenset([1, 2]), frozenset([0, 1, 2])]): frozenset([frozenset([frozenset([0,
2]),
frozenset([0,
1,
2])]),
frozenset([frozenset([0,
1]),
frozenset([0,
1,
2])]),
frozenset([frozenset([2]),
frozenset([1,
2])]),
frozenset([frozenset([1]),
frozenset([1,
2])])])}"""
cubo = test.test_set.linegraph(cube)
self.assertEqual(pprint.pformat(cubo), cubo_repr_tgt)
def test_depth(self):
nested_tuple = (1, (2, (3, (4, (5, 6)))))
nested_dict = {1: {2: {3: {4: {5: {6: 6}}}}}}
nested_list = [1, [2, [3, [4, [5, [6, []]]]]]]
self.assertEqual(pprint.pformat(nested_tuple), repr(nested_tuple))
self.assertEqual(pprint.pformat(nested_dict), repr(nested_dict))
self.assertEqual(pprint.pformat(nested_list), repr(nested_list))
lv1_tuple = '(1, (...))'
lv1_dict = '{1: {...}}'
lv1_list = '[1, [...]]'
self.assertEqual(pprint.pformat(nested_tuple, depth=1), lv1_tuple)
self.assertEqual(pprint.pformat(nested_dict, depth=1), lv1_dict)
self.assertEqual(pprint.pformat(nested_list, depth=1), lv1_list)
class DottedPrettyPrinter(pprint.PrettyPrinter):
def format(self, object, context, maxlevels, level):
if isinstance(object, str):
if ' ' in object:
return repr(object), 1, 0
else:
return object, 0, 0
else:
return pprint.PrettyPrinter.format(
self, object, context, maxlevels, level)
def test_main():
test.test_support.run_unittest(QueryTestCase)
if __name__ == "__main__":
test_main()
|
sazlin/data-structures
|
refs/heads/master
|
quick_sort.py
|
1
|
def quick_sort(values):
"""simple quick sort implementation"""
if len(values) == 0:
return []
elif len(values) == 1:
return values
elif len(values) == 2:
if values[0] > values[1]:
return values[::-1]
else:
return values
pivot = values[0]
less_list = [x for x in values if x < pivot]
more_list = [x for x in values if x > pivot]
same_list = [x for x in values if x == pivot] # keep track of dupes
less_list = less_list + same_list
if len(more_list) == 0:
more_list.append(less_list.pop())
return quick_sort(less_list) + quick_sort(more_list)
if __name__ == '__main__':
import timeit
print "Quick Sort | Worst Case |",\
timeit.timeit(
setup="""
from quick_sort import quick_sort
worst_case_values = [i for i in xrange(100,1,-1)]
""",
stmt="quick_sort(worst_case_values)",
number=100)
print "Quick Sort | Best Case |",\
timeit.timeit(
setup="""
from quick_sort import quick_sort
best_case_values = [i for i in xrange(1,100,1)]
""",
stmt="quick_sort(best_case_values)",
number=100)
|
fritsvanveen/QGIS
|
refs/heads/master
|
python/ext-libs/future/xmlrpc/__init__.py
|
109
|
from __future__ import absolute_import
import sys
if sys.version_info[0] < 3:
pass
else:
raise ImportError('This package should not be accessible on Python 3. '
'Either you are trying to run from the python-future src folder '
'or your installation of python-future is corrupted.')
|
ojengwa/oh-mainline
|
refs/heads/master
|
vendor/packages/celery/examples/eventlet/bulk_task_producer.py
|
19
|
from __future__ import with_statement
from eventlet import spawn_n, monkey_patch, Timeout
from eventlet.queue import LightQueue
from eventlet.event import Event
from celery import current_app
monkey_patch()
class Receipt(object):
result = None
def __init__(self, callback=None):
self.callback = None
self.ready = Event()
def finished(self, result):
self.result = result
if self.callback:
self.callback(result)
self.ready.send()
def wait(self, timeout=None):
with Timeout(timeout):
return self.ready.wait()
class ProducerPool(object):
Receipt = Receipt
def __init__(self, size=20):
self.size = size
self.inqueue = LightQueue()
self._running = None
self._producers = None
def apply_async(self, task, args, kwargs, callback=None, **options):
if self._running is None:
self._running = spawn_n(self._run)
receipt = self.Receipt(callback)
self.inqueue.put((task, args, kwargs, options, receipt))
return receipt
def _run(self):
self._producers = [spawn_n(self._producer)
for _ in xrange(self.size)]
def _producer(self):
connection = current_app.broker_connection()
publisher = current_app.amqp.TaskPublisher(connection)
inqueue = self.inqueue
while 1:
task, args, kwargs, options, receipt = inqueue.get()
result = task.apply_async(args, kwargs,
publisher=publisher,
**options)
receipt.finished(result)
|
alexryndin/ambari
|
refs/heads/branch-adh-1.5
|
ambari-common/src/main/python/resource_management/libraries/functions/simulate_perf_cluster_alert_behaviour.py
|
5
|
#!/usr/bin/env python
"""
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
__all__ = ["simulate_perf_cluster_alert_behaviour"]
import logging
import random
import time
from datetime import datetime
from resource_management.core.exceptions import Fail
RESULT_CODE_OK = 'OK'
RESULT_CODE_CRITICAL = 'CRITICAL'
RESULT_CODE_UNKNOWN = 'UNKNOWN'
OK_MESSAGE = 'Ok'
FAIL_MESSAGE = 'Expected Fail'
UNKNOWN_MESSAGE = 'Expected Unknown'
logger = logging.getLogger('ambari_alerts')
return_values_map = {"true":[RESULT_CODE_OK, OK_MESSAGE], "false":[RESULT_CODE_CRITICAL, FAIL_MESSAGE],
"none":[RESULT_CODE_UNKNOWN, UNKNOWN_MESSAGE]}
def simulate_perf_cluster_alert_behaviour(alert_behaviour_properties, configurations):
"""
Returns a tuple containing the result code and a pre-formatted result label
Keyword arguments:
configurations (dictionary): a mapping of configuration key to value
parameters (dictionary): a mapping of script parameter key to value
host_name (string): the name of this host where the alert is running
"""
alert_behaviour_type=None
alert_behaviour_type_key=alert_behaviour_properties["alert_behaviour_type"]
if alert_behaviour_type_key in configurations:
alert_behaviour_type = configurations[alert_behaviour_type_key].lower()
if alert_behaviour_type == "percentage":
alert_success_percentage=None
alert_success_percentage_key=alert_behaviour_properties["alert_success_percentage"]
if alert_success_percentage_key in configurations:
alert_success_percentage = configurations[alert_success_percentage_key]
if alert_success_percentage:
random_number = random.uniform(0, 100)
if random_number <= int(alert_success_percentage):
return (RESULT_CODE_OK, [OK_MESSAGE])
else:
return (RESULT_CODE_CRITICAL, [FAIL_MESSAGE])
else:
raise Fail("Percentage behaviour was set but alert.success.percentage was not set!")
elif alert_behaviour_type == "timeout":
alert_timeout_return_value=None
alert_timeout_secs=None
alert_timeout_return_value_key=alert_behaviour_properties["alert_timeout_return_value"]
alert_timeout_secs_key=alert_behaviour_properties["alert_timeout_secs"]
if alert_timeout_return_value_key in configurations:
alert_timeout_return_value = configurations[alert_timeout_return_value_key].lower()
if alert_timeout_secs_key in configurations:
alert_timeout_secs = configurations[alert_timeout_secs_key]
if alert_timeout_return_value and alert_timeout_secs:
logger.info("Sleeping for {0} seconds".format(alert_timeout_secs))
print "Sleeping for {0} seconds".format(alert_timeout_secs)
time.sleep(int(alert_timeout_secs))
return (return_values_map[alert_timeout_return_value][0], [return_values_map[alert_timeout_return_value][1]])
else:
raise Fail("Timeout behaviour was set but alert.timeout.return.value/alert.timeout.secs were not set!")
elif alert_behaviour_type == "flip":
alert_flip_interval_mins=None
alert_flip_interval_mins_key=alert_behaviour_properties["alert_flip_interval_mins"]
if alert_flip_interval_mins_key in configurations:
alert_flip_interval_mins = configurations[alert_flip_interval_mins_key]
if alert_flip_interval_mins:
curr_time = datetime.utcnow()
return_value = ((curr_time.minute / int(alert_flip_interval_mins)) % 2) == 0
return (return_values_map[str(return_value).lower()][0], [return_values_map[str(return_value).lower()][1]])
else:
raise Fail("Flip behaviour was set but alert.flip.interval.mins was not set!")
result_code = RESULT_CODE_OK
label = OK_MESSAGE
return (result_code, [label])
|
amenonsen/ansible
|
refs/heads/devel
|
lib/ansible/plugins/doc_fragments/nso.py
|
38
|
# -*- coding: utf-8 -*-
# Copyright: (c) 2017, Cisco and/or its affiliates.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
class ModuleDocFragment(object):
DOCUMENTATION = r'''
options:
url:
description: NSO JSON-RPC URL, http://localhost:8080/jsonrpc
type: str
required: true
username:
description: NSO username
type: str
required: true
password:
description: NSO password
type: str
required: true
timeout:
description: JSON-RPC request timeout in seconds
type: int
default: 300
version_added: "2.6"
validate_certs:
description: When set to true, validates the SSL certificate of NSO when
using SSL
type: bool
required: false
default: false
'''
|
jshufelt/volatility
|
refs/heads/master
|
volatility/plugins/overlays/windows/win7_sp1_x64_vtypes.py
|
58
|
ntkrnlmp_types = {
'_ARBITER_INTERFACE' : [ 0x30, {
'Size' : [ 0x0, ['unsigned short']],
'Version' : [ 0x2, ['unsigned short']],
'Context' : [ 0x8, ['pointer64', ['void']]],
'InterfaceReference' : [ 0x10, ['pointer64', ['void']]],
'InterfaceDereference' : [ 0x18, ['pointer64', ['void']]],
'ArbiterHandler' : [ 0x20, ['pointer64', ['void']]],
'Flags' : [ 0x28, ['unsigned long']],
} ],
'_KAPC_STATE' : [ 0x30, {
'ApcListHead' : [ 0x0, ['array', 2, ['_LIST_ENTRY']]],
'Process' : [ 0x20, ['pointer64', ['_KPROCESS']]],
'KernelApcInProgress' : [ 0x28, ['unsigned char']],
'KernelApcPending' : [ 0x29, ['unsigned char']],
'UserApcPending' : [ 0x2a, ['unsigned char']],
} ],
'_IA64_LOADER_BLOCK' : [ 0x4, {
'PlaceHolder' : [ 0x0, ['unsigned long']],
} ],
'_IA64_DBGKD_CONTROL_SET' : [ 0x14, {
'Continue' : [ 0x0, ['unsigned long']],
'CurrentSymbolStart' : [ 0x4, ['unsigned long long']],
'CurrentSymbolEnd' : [ 0xc, ['unsigned long long']],
} ],
'_DEVICE_RELATIONS' : [ 0x10, {
'Count' : [ 0x0, ['unsigned long']],
'Objects' : [ 0x8, ['array', 1, ['pointer64', ['_DEVICE_OBJECT']]]],
} ],
'_IMAGE_ROM_OPTIONAL_HEADER' : [ 0x38, {
'Magic' : [ 0x0, ['unsigned short']],
'MajorLinkerVersion' : [ 0x2, ['unsigned char']],
'MinorLinkerVersion' : [ 0x3, ['unsigned char']],
'SizeOfCode' : [ 0x4, ['unsigned long']],
'SizeOfInitializedData' : [ 0x8, ['unsigned long']],
'SizeOfUninitializedData' : [ 0xc, ['unsigned long']],
'AddressOfEntryPoint' : [ 0x10, ['unsigned long']],
'BaseOfCode' : [ 0x14, ['unsigned long']],
'BaseOfData' : [ 0x18, ['unsigned long']],
'BaseOfBss' : [ 0x1c, ['unsigned long']],
'GprMask' : [ 0x20, ['unsigned long']],
'CprMask' : [ 0x24, ['array', 4, ['unsigned long']]],
'GpValue' : [ 0x34, ['unsigned long']],
} ],
'_ALPC_COMPLETION_LIST_HEADER' : [ 0x300, {
'StartMagic' : [ 0x0, ['unsigned long long']],
'TotalSize' : [ 0x8, ['unsigned long']],
'ListOffset' : [ 0xc, ['unsigned long']],
'ListSize' : [ 0x10, ['unsigned long']],
'BitmapOffset' : [ 0x14, ['unsigned long']],
'BitmapSize' : [ 0x18, ['unsigned long']],
'DataOffset' : [ 0x1c, ['unsigned long']],
'DataSize' : [ 0x20, ['unsigned long']],
'AttributeFlags' : [ 0x24, ['unsigned long']],
'AttributeSize' : [ 0x28, ['unsigned long']],
'State' : [ 0x80, ['_ALPC_COMPLETION_LIST_STATE']],
'LastMessageId' : [ 0x88, ['unsigned long']],
'LastCallbackId' : [ 0x8c, ['unsigned long']],
'PostCount' : [ 0x100, ['unsigned long']],
'ReturnCount' : [ 0x180, ['unsigned long']],
'LogSequenceNumber' : [ 0x200, ['unsigned long']],
'UserLock' : [ 0x280, ['_RTL_SRWLOCK']],
'EndMagic' : [ 0x288, ['unsigned long long']],
} ],
'_IMAGE_DEBUG_DIRECTORY' : [ 0x1c, {
'Characteristics' : [ 0x0, ['unsigned long']],
'TimeDateStamp' : [ 0x4, ['unsigned long']],
'MajorVersion' : [ 0x8, ['unsigned short']],
'MinorVersion' : [ 0xa, ['unsigned short']],
'Type' : [ 0xc, ['unsigned long']],
'SizeOfData' : [ 0x10, ['unsigned long']],
'AddressOfRawData' : [ 0x14, ['unsigned long']],
'PointerToRawData' : [ 0x18, ['unsigned long']],
} ],
'_ETW_WMITRACE_WORK' : [ 0xf0, {
'LoggerId' : [ 0x0, ['unsigned long']],
'LoggerName' : [ 0x8, ['array', 65, ['unsigned char']]],
'FileName' : [ 0x49, ['array', 129, ['unsigned char']]],
'MaximumFileSize' : [ 0xcc, ['unsigned long']],
'MinBuffers' : [ 0xd0, ['unsigned long']],
'MaxBuffers' : [ 0xd4, ['unsigned long']],
'BufferSize' : [ 0xd8, ['unsigned long']],
'Mode' : [ 0xdc, ['unsigned long']],
'FlushTimer' : [ 0xe0, ['unsigned long']],
'MatchAny' : [ 0x8, ['unsigned long long']],
'MatchAll' : [ 0x10, ['unsigned long long']],
'EnableProperty' : [ 0x18, ['unsigned long']],
'Guid' : [ 0x1c, ['_GUID']],
'Level' : [ 0x2c, ['unsigned char']],
'Status' : [ 0xe8, ['long']],
} ],
'_DEVICE_MAP' : [ 0x40, {
'DosDevicesDirectory' : [ 0x0, ['pointer64', ['_OBJECT_DIRECTORY']]],
'GlobalDosDevicesDirectory' : [ 0x8, ['pointer64', ['_OBJECT_DIRECTORY']]],
'DosDevicesDirectoryHandle' : [ 0x10, ['pointer64', ['void']]],
'ReferenceCount' : [ 0x18, ['unsigned long']],
'DriveMap' : [ 0x1c, ['unsigned long']],
'DriveType' : [ 0x20, ['array', 32, ['unsigned char']]],
} ],
'_HEAP_DEBUGGING_INFORMATION' : [ 0x30, {
'InterceptorFunction' : [ 0x0, ['pointer64', ['void']]],
'InterceptorValue' : [ 0x8, ['unsigned short']],
'ExtendedOptions' : [ 0xc, ['unsigned long']],
'StackTraceDepth' : [ 0x10, ['unsigned long']],
'MinTotalBlockSize' : [ 0x18, ['unsigned long long']],
'MaxTotalBlockSize' : [ 0x20, ['unsigned long long']],
'HeapLeakEnumerationRoutine' : [ 0x28, ['pointer64', ['void']]],
} ],
'_IO_RESOURCE_LIST' : [ 0x28, {
'Version' : [ 0x0, ['unsigned short']],
'Revision' : [ 0x2, ['unsigned short']],
'Count' : [ 0x4, ['unsigned long']],
'Descriptors' : [ 0x8, ['array', 1, ['_IO_RESOURCE_DESCRIPTOR']]],
} ],
'_MMBANKED_SECTION' : [ 0x38, {
'BasePhysicalPage' : [ 0x0, ['unsigned long long']],
'BasedPte' : [ 0x8, ['pointer64', ['_MMPTE']]],
'BankSize' : [ 0x10, ['unsigned long']],
'BankShift' : [ 0x14, ['unsigned long']],
'BankedRoutine' : [ 0x18, ['pointer64', ['void']]],
'Context' : [ 0x20, ['pointer64', ['void']]],
'CurrentMappedPte' : [ 0x28, ['pointer64', ['_MMPTE']]],
'BankTemplate' : [ 0x30, ['array', 1, ['_MMPTE']]],
} ],
'_WHEA_ERROR_RECORD_HEADER_FLAGS' : [ 0x4, {
'Recovered' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'PreviousError' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'Simulated' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long')]],
'Reserved' : [ 0x0, ['BitField', dict(start_bit = 3, end_bit = 32, native_type='unsigned long')]],
'AsULONG' : [ 0x0, ['unsigned long']],
} ],
'_XSAVE_AREA_HEADER' : [ 0x40, {
'Mask' : [ 0x0, ['unsigned long long']],
'Reserved' : [ 0x8, ['array', 7, ['unsigned long long']]],
} ],
'_HEAP_VIRTUAL_ALLOC_ENTRY' : [ 0x40, {
'Entry' : [ 0x0, ['_LIST_ENTRY']],
'ExtraStuff' : [ 0x10, ['_HEAP_ENTRY_EXTRA']],
'CommitSize' : [ 0x20, ['unsigned long long']],
'ReserveSize' : [ 0x28, ['unsigned long long']],
'BusyBlock' : [ 0x30, ['_HEAP_ENTRY']],
} ],
'_PNP_DEVICE_COMPLETION_REQUEST' : [ 0x68, {
'ListEntry' : [ 0x0, ['_LIST_ENTRY']],
'DeviceNode' : [ 0x10, ['pointer64', ['_DEVICE_NODE']]],
'Context' : [ 0x18, ['pointer64', ['void']]],
'CompletionState' : [ 0x20, ['Enumeration', dict(target = 'long', choices = {768: 'DeviceNodeUnspecified', 769: 'DeviceNodeUninitialized', 770: 'DeviceNodeInitialized', 771: 'DeviceNodeDriversAdded', 772: 'DeviceNodeResourcesAssigned', 773: 'DeviceNodeStartPending', 774: 'DeviceNodeStartCompletion', 775: 'DeviceNodeStartPostWork', 776: 'DeviceNodeStarted', 777: 'DeviceNodeQueryStopped', 778: 'DeviceNodeStopped', 779: 'DeviceNodeRestartCompletion', 780: 'DeviceNodeEnumeratePending', 781: 'DeviceNodeEnumerateCompletion', 782: 'DeviceNodeAwaitingQueuedDeletion', 783: 'DeviceNodeAwaitingQueuedRemoval', 784: 'DeviceNodeQueryRemoved', 785: 'DeviceNodeRemovePendingCloses', 786: 'DeviceNodeRemoved', 787: 'DeviceNodeDeletePendingCloses', 788: 'DeviceNodeDeleted', 789: 'MaxDeviceNodeState'})]],
'IrpPended' : [ 0x24, ['unsigned long']],
'Status' : [ 0x28, ['long']],
'Information' : [ 0x30, ['pointer64', ['void']]],
'WorkItem' : [ 0x38, ['_WORK_QUEUE_ITEM']],
'FailingDriver' : [ 0x58, ['pointer64', ['_DRIVER_OBJECT']]],
'ReferenceCount' : [ 0x60, ['long']],
} ],
'_KTSS64' : [ 0x68, {
'Reserved0' : [ 0x0, ['unsigned long']],
'Rsp0' : [ 0x4, ['unsigned long long']],
'Rsp1' : [ 0xc, ['unsigned long long']],
'Rsp2' : [ 0x14, ['unsigned long long']],
'Ist' : [ 0x1c, ['array', 8, ['unsigned long long']]],
'Reserved1' : [ 0x5c, ['unsigned long long']],
'Reserved2' : [ 0x64, ['unsigned short']],
'IoMapBase' : [ 0x66, ['unsigned short']],
} ],
'_EVENT_FILTER_HEADER' : [ 0x18, {
'Id' : [ 0x0, ['unsigned short']],
'Version' : [ 0x2, ['unsigned char']],
'Reserved' : [ 0x3, ['array', 5, ['unsigned char']]],
'InstanceId' : [ 0x8, ['unsigned long long']],
'Size' : [ 0x10, ['unsigned long']],
'NextOffset' : [ 0x14, ['unsigned long']],
} ],
'_WAIT_CONTEXT_BLOCK' : [ 0x48, {
'WaitQueueEntry' : [ 0x0, ['_KDEVICE_QUEUE_ENTRY']],
'DeviceRoutine' : [ 0x18, ['pointer64', ['void']]],
'DeviceContext' : [ 0x20, ['pointer64', ['void']]],
'NumberOfMapRegisters' : [ 0x28, ['unsigned long']],
'DeviceObject' : [ 0x30, ['pointer64', ['void']]],
'CurrentIrp' : [ 0x38, ['pointer64', ['void']]],
'BufferChainingDpc' : [ 0x40, ['pointer64', ['_KDPC']]],
} ],
'_SECTION_OBJECT' : [ 0x30, {
'StartingVa' : [ 0x0, ['pointer64', ['void']]],
'EndingVa' : [ 0x8, ['pointer64', ['void']]],
'Parent' : [ 0x10, ['pointer64', ['void']]],
'LeftChild' : [ 0x18, ['pointer64', ['void']]],
'RightChild' : [ 0x20, ['pointer64', ['void']]],
'Segment' : [ 0x28, ['pointer64', ['_SEGMENT_OBJECT']]],
} ],
'_CM_NAME_CONTROL_BLOCK' : [ 0x20, {
'Compressed' : [ 0x0, ['unsigned char']],
'RefCount' : [ 0x2, ['unsigned short']],
'NameHash' : [ 0x8, ['_CM_NAME_HASH']],
'ConvKey' : [ 0x8, ['unsigned long']],
'NextHash' : [ 0x10, ['pointer64', ['_CM_KEY_HASH']]],
'NameLength' : [ 0x18, ['unsigned short']],
'Name' : [ 0x1a, ['array', 1, ['wchar']]],
} ],
'_u' : [ 0x50, {
'KeyNode' : [ 0x0, ['_CM_KEY_NODE']],
'KeyValue' : [ 0x0, ['_CM_KEY_VALUE']],
'KeySecurity' : [ 0x0, ['_CM_KEY_SECURITY']],
'KeyIndex' : [ 0x0, ['_CM_KEY_INDEX']],
'ValueData' : [ 0x0, ['_CM_BIG_DATA']],
'KeyList' : [ 0x0, ['array', 1, ['unsigned long']]],
'KeyString' : [ 0x0, ['array', 1, ['wchar']]],
} ],
'_GENERAL_LOOKASIDE_POOL' : [ 0x60, {
'ListHead' : [ 0x0, ['_SLIST_HEADER']],
'SingleListHead' : [ 0x0, ['_SINGLE_LIST_ENTRY']],
'Depth' : [ 0x10, ['unsigned short']],
'MaximumDepth' : [ 0x12, ['unsigned short']],
'TotalAllocates' : [ 0x14, ['unsigned long']],
'AllocateMisses' : [ 0x18, ['unsigned long']],
'AllocateHits' : [ 0x18, ['unsigned long']],
'TotalFrees' : [ 0x1c, ['unsigned long']],
'FreeMisses' : [ 0x20, ['unsigned long']],
'FreeHits' : [ 0x20, ['unsigned long']],
'Type' : [ 0x24, ['Enumeration', dict(target = 'long', choices = {0: 'NonPagedPool', 1: 'PagedPool', 2: 'NonPagedPoolMustSucceed', 3: 'DontUseThisType', 4: 'NonPagedPoolCacheAligned', 5: 'PagedPoolCacheAligned', 6: 'NonPagedPoolCacheAlignedMustS', 7: 'MaxPoolType', 34: 'NonPagedPoolMustSucceedSession', 35: 'DontUseThisTypeSession', 32: 'NonPagedPoolSession', 36: 'NonPagedPoolCacheAlignedSession', 33: 'PagedPoolSession', 38: 'NonPagedPoolCacheAlignedMustSSession', 37: 'PagedPoolCacheAlignedSession'})]],
'Tag' : [ 0x28, ['unsigned long']],
'Size' : [ 0x2c, ['unsigned long']],
'AllocateEx' : [ 0x30, ['pointer64', ['void']]],
'Allocate' : [ 0x30, ['pointer64', ['void']]],
'FreeEx' : [ 0x38, ['pointer64', ['void']]],
'Free' : [ 0x38, ['pointer64', ['void']]],
'ListEntry' : [ 0x40, ['_LIST_ENTRY']],
'LastTotalAllocates' : [ 0x50, ['unsigned long']],
'LastAllocateMisses' : [ 0x54, ['unsigned long']],
'LastAllocateHits' : [ 0x54, ['unsigned long']],
'Future' : [ 0x58, ['array', 2, ['unsigned long']]],
} ],
'_RTL_DYNAMIC_HASH_TABLE_ENTRY' : [ 0x18, {
'Linkage' : [ 0x0, ['_LIST_ENTRY']],
'Signature' : [ 0x10, ['unsigned long long']],
} ],
'__unnamed_205c' : [ 0x4, {
'AsULONG' : [ 0x0, ['unsigned long']],
'IncreasePolicy' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 2, native_type='unsigned long')]],
'DecreasePolicy' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 4, native_type='unsigned long')]],
'Reserved' : [ 0x0, ['BitField', dict(start_bit = 4, end_bit = 32, native_type='unsigned long')]],
} ],
'_PPM_PERF_STATES' : [ 0xb0, {
'Count' : [ 0x0, ['unsigned long']],
'MaxFrequency' : [ 0x4, ['unsigned long']],
'PStateCap' : [ 0x8, ['unsigned long']],
'TStateCap' : [ 0xc, ['unsigned long']],
'MaxPerfState' : [ 0x10, ['unsigned long']],
'MinPerfState' : [ 0x14, ['unsigned long']],
'LowestPState' : [ 0x18, ['unsigned long']],
'IncreaseTime' : [ 0x1c, ['unsigned long']],
'DecreaseTime' : [ 0x20, ['unsigned long']],
'BusyAdjThreshold' : [ 0x24, ['unsigned char']],
'Reserved' : [ 0x25, ['unsigned char']],
'ThrottleStatesOnly' : [ 0x26, ['unsigned char']],
'PolicyType' : [ 0x27, ['unsigned char']],
'TimerInterval' : [ 0x28, ['unsigned long']],
'Flags' : [ 0x2c, ['__unnamed_205c']],
'TargetProcessors' : [ 0x30, ['_KAFFINITY_EX']],
'PStateHandler' : [ 0x58, ['pointer64', ['void']]],
'PStateContext' : [ 0x60, ['unsigned long long']],
'TStateHandler' : [ 0x68, ['pointer64', ['void']]],
'TStateContext' : [ 0x70, ['unsigned long long']],
'FeedbackHandler' : [ 0x78, ['pointer64', ['void']]],
'GetFFHThrottleState' : [ 0x80, ['pointer64', ['void']]],
'State' : [ 0x88, ['array', 1, ['_PPM_PERF_STATE']]],
} ],
'_M128A' : [ 0x10, {
'Low' : [ 0x0, ['unsigned long long']],
'High' : [ 0x8, ['long long']],
} ],
'_HEAP_LOOKASIDE' : [ 0x40, {
'ListHead' : [ 0x0, ['_SLIST_HEADER']],
'Depth' : [ 0x10, ['unsigned short']],
'MaximumDepth' : [ 0x12, ['unsigned short']],
'TotalAllocates' : [ 0x14, ['unsigned long']],
'AllocateMisses' : [ 0x18, ['unsigned long']],
'TotalFrees' : [ 0x1c, ['unsigned long']],
'FreeMisses' : [ 0x20, ['unsigned long']],
'LastTotalAllocates' : [ 0x24, ['unsigned long']],
'LastAllocateMisses' : [ 0x28, ['unsigned long']],
'Counters' : [ 0x2c, ['array', 2, ['unsigned long']]],
} ],
'_WMI_TRACE_PACKET' : [ 0x4, {
'Size' : [ 0x0, ['unsigned short']],
'HookId' : [ 0x2, ['unsigned short']],
'Type' : [ 0x2, ['unsigned char']],
'Group' : [ 0x3, ['unsigned char']],
} ],
'_KTIMER' : [ 0x40, {
'Header' : [ 0x0, ['_DISPATCHER_HEADER']],
'DueTime' : [ 0x18, ['_ULARGE_INTEGER']],
'TimerListEntry' : [ 0x20, ['_LIST_ENTRY']],
'Dpc' : [ 0x30, ['pointer64', ['_KDPC']]],
'Processor' : [ 0x38, ['unsigned long']],
'Period' : [ 0x3c, ['unsigned long']],
} ],
'_RTL_ATOM_TABLE' : [ 0x70, {
'Signature' : [ 0x0, ['unsigned long']],
'CriticalSection' : [ 0x8, ['_RTL_CRITICAL_SECTION']],
'RtlHandleTable' : [ 0x30, ['_RTL_HANDLE_TABLE']],
'NumberOfBuckets' : [ 0x60, ['unsigned long']],
'Buckets' : [ 0x68, ['array', 1, ['pointer64', ['_RTL_ATOM_TABLE_ENTRY']]]],
} ],
'_POP_POWER_ACTION' : [ 0xc0, {
'Updates' : [ 0x0, ['unsigned char']],
'State' : [ 0x1, ['unsigned char']],
'Shutdown' : [ 0x2, ['unsigned char']],
'Action' : [ 0x4, ['Enumeration', dict(target = 'long', choices = {0: 'PowerActionNone', 1: 'PowerActionReserved', 2: 'PowerActionSleep', 3: 'PowerActionHibernate', 4: 'PowerActionShutdown', 5: 'PowerActionShutdownReset', 6: 'PowerActionShutdownOff', 7: 'PowerActionWarmEject'})]],
'LightestState' : [ 0x8, ['Enumeration', dict(target = 'long', choices = {0: 'PowerSystemUnspecified', 1: 'PowerSystemWorking', 2: 'PowerSystemSleeping1', 3: 'PowerSystemSleeping2', 4: 'PowerSystemSleeping3', 5: 'PowerSystemHibernate', 6: 'PowerSystemShutdown', 7: 'PowerSystemMaximum'})]],
'Flags' : [ 0xc, ['unsigned long']],
'Status' : [ 0x10, ['long']],
'DeviceType' : [ 0x14, ['Enumeration', dict(target = 'long', choices = {0: 'PolicyDeviceSystemButton', 1: 'PolicyDeviceThermalZone', 2: 'PolicyDeviceBattery', 3: 'PolicyDeviceMemory', 4: 'PolicyInitiatePowerActionAPI', 5: 'PolicySetPowerStateAPI', 6: 'PolicyImmediateDozeS4', 7: 'PolicySystemIdle', 8: 'PolicyDeviceMax'})]],
'DeviceTypeFlags' : [ 0x18, ['unsigned long']],
'IrpMinor' : [ 0x1c, ['unsigned char']],
'Waking' : [ 0x1d, ['unsigned char']],
'SystemState' : [ 0x20, ['Enumeration', dict(target = 'long', choices = {0: 'PowerSystemUnspecified', 1: 'PowerSystemWorking', 2: 'PowerSystemSleeping1', 3: 'PowerSystemSleeping2', 4: 'PowerSystemSleeping3', 5: 'PowerSystemHibernate', 6: 'PowerSystemShutdown', 7: 'PowerSystemMaximum'})]],
'NextSystemState' : [ 0x24, ['Enumeration', dict(target = 'long', choices = {0: 'PowerSystemUnspecified', 1: 'PowerSystemWorking', 2: 'PowerSystemSleeping1', 3: 'PowerSystemSleeping2', 4: 'PowerSystemSleeping3', 5: 'PowerSystemHibernate', 6: 'PowerSystemShutdown', 7: 'PowerSystemMaximum'})]],
'EffectiveSystemState' : [ 0x28, ['Enumeration', dict(target = 'long', choices = {0: 'PowerSystemUnspecified', 1: 'PowerSystemWorking', 2: 'PowerSystemSleeping1', 3: 'PowerSystemSleeping2', 4: 'PowerSystemSleeping3', 5: 'PowerSystemHibernate', 6: 'PowerSystemShutdown', 7: 'PowerSystemMaximum'})]],
'CurrentSystemState' : [ 0x2c, ['Enumeration', dict(target = 'long', choices = {0: 'PowerSystemUnspecified', 1: 'PowerSystemWorking', 2: 'PowerSystemSleeping1', 3: 'PowerSystemSleeping2', 4: 'PowerSystemSleeping3', 5: 'PowerSystemHibernate', 6: 'PowerSystemShutdown', 7: 'PowerSystemMaximum'})]],
'ShutdownBugCode' : [ 0x30, ['pointer64', ['_POP_SHUTDOWN_BUG_CHECK']]],
'DevState' : [ 0x38, ['pointer64', ['_POP_DEVICE_SYS_STATE']]],
'HiberContext' : [ 0x40, ['pointer64', ['_POP_HIBER_CONTEXT']]],
'WakeTime' : [ 0x48, ['unsigned long long']],
'SleepTime' : [ 0x50, ['unsigned long long']],
'ProgrammedRTCTime' : [ 0x58, ['unsigned long long']],
'WakeOnRTC' : [ 0x60, ['unsigned char']],
'WakeTimerInfo' : [ 0x68, ['pointer64', ['_DIAGNOSTIC_BUFFER']]],
'FilteredCapabilities' : [ 0x70, ['SYSTEM_POWER_CAPABILITIES']],
} ],
'_CM_KEY_VALUE' : [ 0x18, {
'Signature' : [ 0x0, ['unsigned short']],
'NameLength' : [ 0x2, ['unsigned short']],
'DataLength' : [ 0x4, ['unsigned long']],
'Data' : [ 0x8, ['unsigned long']],
'Type' : [ 0xc, ['unsigned long']],
'Flags' : [ 0x10, ['unsigned short']],
'Spare' : [ 0x12, ['unsigned short']],
'Name' : [ 0x14, ['array', 1, ['wchar']]],
} ],
'_AMD64_DBGKD_CONTROL_SET' : [ 0x1c, {
'TraceFlag' : [ 0x0, ['unsigned long']],
'Dr7' : [ 0x4, ['unsigned long long']],
'CurrentSymbolStart' : [ 0xc, ['unsigned long long']],
'CurrentSymbolEnd' : [ 0x14, ['unsigned long long']],
} ],
'_PO_DEVICE_NOTIFY' : [ 0x68, {
'Link' : [ 0x0, ['_LIST_ENTRY']],
'PowerChildren' : [ 0x10, ['_LIST_ENTRY']],
'PowerParents' : [ 0x20, ['_LIST_ENTRY']],
'TargetDevice' : [ 0x30, ['pointer64', ['_DEVICE_OBJECT']]],
'OrderLevel' : [ 0x38, ['unsigned char']],
'DeviceObject' : [ 0x40, ['pointer64', ['_DEVICE_OBJECT']]],
'DeviceName' : [ 0x48, ['pointer64', ['unsigned short']]],
'DriverName' : [ 0x50, ['pointer64', ['unsigned short']]],
'ChildCount' : [ 0x58, ['unsigned long']],
'ActiveChild' : [ 0x5c, ['unsigned long']],
'ParentCount' : [ 0x60, ['unsigned long']],
'ActiveParent' : [ 0x64, ['unsigned long']],
} ],
'_CM_KEY_SECURITY_CACHE_ENTRY' : [ 0x10, {
'Cell' : [ 0x0, ['unsigned long']],
'CachedSecurity' : [ 0x8, ['pointer64', ['_CM_KEY_SECURITY_CACHE']]],
} ],
'_FS_FILTER_CALLBACK_DATA' : [ 0x40, {
'SizeOfFsFilterCallbackData' : [ 0x0, ['unsigned long']],
'Operation' : [ 0x4, ['unsigned char']],
'Reserved' : [ 0x5, ['unsigned char']],
'DeviceObject' : [ 0x8, ['pointer64', ['_DEVICE_OBJECT']]],
'FileObject' : [ 0x10, ['pointer64', ['_FILE_OBJECT']]],
'Parameters' : [ 0x18, ['_FS_FILTER_PARAMETERS']],
} ],
'_PROC_IDLE_STATE_ACCOUNTING' : [ 0x228, {
'TotalTime' : [ 0x0, ['unsigned long long']],
'IdleTransitions' : [ 0x8, ['unsigned long']],
'FailedTransitions' : [ 0xc, ['unsigned long']],
'InvalidBucketIndex' : [ 0x10, ['unsigned long']],
'MinTime' : [ 0x18, ['unsigned long long']],
'MaxTime' : [ 0x20, ['unsigned long long']],
'IdleTimeBuckets' : [ 0x28, ['array', 16, ['_PROC_IDLE_STATE_BUCKET']]],
} ],
'_IMAGE_SECURITY_CONTEXT' : [ 0x8, {
'PageHashes' : [ 0x0, ['pointer64', ['void']]],
'Value' : [ 0x0, ['unsigned long long']],
'SecurityBeingCreated' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long long')]],
'SecurityMandatory' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long long')]],
'Unused' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long long')]],
'PageHashPointer' : [ 0x0, ['BitField', dict(start_bit = 3, end_bit = 64, native_type='unsigned long long')]],
} ],
'__unnamed_209e' : [ 0x4, {
'Level' : [ 0x0, ['unsigned long']],
} ],
'__unnamed_20a0' : [ 0x4, {
'Type' : [ 0x0, ['unsigned long']],
} ],
'_POP_ACTION_TRIGGER' : [ 0x18, {
'Type' : [ 0x0, ['Enumeration', dict(target = 'long', choices = {0: 'PolicyDeviceSystemButton', 1: 'PolicyDeviceThermalZone', 2: 'PolicyDeviceBattery', 3: 'PolicyDeviceMemory', 4: 'PolicyInitiatePowerActionAPI', 5: 'PolicySetPowerStateAPI', 6: 'PolicyImmediateDozeS4', 7: 'PolicySystemIdle', 8: 'PolicyDeviceMax'})]],
'Flags' : [ 0x4, ['unsigned long']],
'Wait' : [ 0x8, ['pointer64', ['_POP_TRIGGER_WAIT']]],
'Battery' : [ 0x10, ['__unnamed_209e']],
'Button' : [ 0x10, ['__unnamed_20a0']],
} ],
'_KENLISTMENT_HISTORY' : [ 0x8, {
'Notification' : [ 0x0, ['unsigned long']],
'NewState' : [ 0x4, ['Enumeration', dict(target = 'long', choices = {0: 'KEnlistmentUninitialized', 256: 'KEnlistmentActive', 258: 'KEnlistmentPrepared', 259: 'KEnlistmentInDoubt', 260: 'KEnlistmentCommitted', 261: 'KEnlistmentCommittedNotify', 262: 'KEnlistmentCommitRequested', 257: 'KEnlistmentPreparing', 264: 'KEnlistmentDelegated', 265: 'KEnlistmentDelegatedDisconnected', 266: 'KEnlistmentPrePreparing', 263: 'KEnlistmentAborted', 268: 'KEnlistmentRecovering', 269: 'KEnlistmentAborting', 270: 'KEnlistmentReadOnly', 271: 'KEnlistmentOutcomeUnavailable', 272: 'KEnlistmentOffline', 273: 'KEnlistmentPrePrepared', 274: 'KEnlistmentInitialized', 267: 'KEnlistmentForgotten'})]],
} ],
'_FAST_IO_DISPATCH' : [ 0xe0, {
'SizeOfFastIoDispatch' : [ 0x0, ['unsigned long']],
'FastIoCheckIfPossible' : [ 0x8, ['pointer64', ['void']]],
'FastIoRead' : [ 0x10, ['pointer64', ['void']]],
'FastIoWrite' : [ 0x18, ['pointer64', ['void']]],
'FastIoQueryBasicInfo' : [ 0x20, ['pointer64', ['void']]],
'FastIoQueryStandardInfo' : [ 0x28, ['pointer64', ['void']]],
'FastIoLock' : [ 0x30, ['pointer64', ['void']]],
'FastIoUnlockSingle' : [ 0x38, ['pointer64', ['void']]],
'FastIoUnlockAll' : [ 0x40, ['pointer64', ['void']]],
'FastIoUnlockAllByKey' : [ 0x48, ['pointer64', ['void']]],
'FastIoDeviceControl' : [ 0x50, ['pointer64', ['void']]],
'AcquireFileForNtCreateSection' : [ 0x58, ['pointer64', ['void']]],
'ReleaseFileForNtCreateSection' : [ 0x60, ['pointer64', ['void']]],
'FastIoDetachDevice' : [ 0x68, ['pointer64', ['void']]],
'FastIoQueryNetworkOpenInfo' : [ 0x70, ['pointer64', ['void']]],
'AcquireForModWrite' : [ 0x78, ['pointer64', ['void']]],
'MdlRead' : [ 0x80, ['pointer64', ['void']]],
'MdlReadComplete' : [ 0x88, ['pointer64', ['void']]],
'PrepareMdlWrite' : [ 0x90, ['pointer64', ['void']]],
'MdlWriteComplete' : [ 0x98, ['pointer64', ['void']]],
'FastIoReadCompressed' : [ 0xa0, ['pointer64', ['void']]],
'FastIoWriteCompressed' : [ 0xa8, ['pointer64', ['void']]],
'MdlReadCompleteCompressed' : [ 0xb0, ['pointer64', ['void']]],
'MdlWriteCompleteCompressed' : [ 0xb8, ['pointer64', ['void']]],
'FastIoQueryOpen' : [ 0xc0, ['pointer64', ['void']]],
'ReleaseForModWrite' : [ 0xc8, ['pointer64', ['void']]],
'AcquireForCcFlush' : [ 0xd0, ['pointer64', ['void']]],
'ReleaseForCcFlush' : [ 0xd8, ['pointer64', ['void']]],
} ],
'_KIDTENTRY64' : [ 0x10, {
'OffsetLow' : [ 0x0, ['unsigned short']],
'Selector' : [ 0x2, ['unsigned short']],
'IstIndex' : [ 0x4, ['BitField', dict(start_bit = 0, end_bit = 3, native_type='unsigned short')]],
'Reserved0' : [ 0x4, ['BitField', dict(start_bit = 3, end_bit = 8, native_type='unsigned short')]],
'Type' : [ 0x4, ['BitField', dict(start_bit = 8, end_bit = 13, native_type='unsigned short')]],
'Dpl' : [ 0x4, ['BitField', dict(start_bit = 13, end_bit = 15, native_type='unsigned short')]],
'Present' : [ 0x4, ['BitField', dict(start_bit = 15, end_bit = 16, native_type='unsigned short')]],
'OffsetMiddle' : [ 0x6, ['unsigned short']],
'OffsetHigh' : [ 0x8, ['unsigned long']],
'Reserved1' : [ 0xc, ['unsigned long']],
'Alignment' : [ 0x0, ['unsigned long long']],
} ],
'_CM_CELL_REMAP_BLOCK' : [ 0x8, {
'OldCell' : [ 0x0, ['unsigned long']],
'NewCell' : [ 0x4, ['unsigned long']],
} ],
'_OBJECT_DIRECTORY_ENTRY' : [ 0x18, {
'ChainLink' : [ 0x0, ['pointer64', ['_OBJECT_DIRECTORY_ENTRY']]],
'Object' : [ 0x8, ['pointer64', ['void']]],
'HashValue' : [ 0x10, ['unsigned long']],
} ],
'_LOADER_PARAMETER_EXTENSION' : [ 0x148, {
'Size' : [ 0x0, ['unsigned long']],
'Profile' : [ 0x4, ['_PROFILE_PARAMETER_BLOCK']],
'EmInfFileImage' : [ 0x18, ['pointer64', ['void']]],
'EmInfFileSize' : [ 0x20, ['unsigned long']],
'TriageDumpBlock' : [ 0x28, ['pointer64', ['void']]],
'LoaderPagesSpanned' : [ 0x30, ['unsigned long long']],
'HeadlessLoaderBlock' : [ 0x38, ['pointer64', ['_HEADLESS_LOADER_BLOCK']]],
'SMBiosEPSHeader' : [ 0x40, ['pointer64', ['_SMBIOS_TABLE_HEADER']]],
'DrvDBImage' : [ 0x48, ['pointer64', ['void']]],
'DrvDBSize' : [ 0x50, ['unsigned long']],
'NetworkLoaderBlock' : [ 0x58, ['pointer64', ['_NETWORK_LOADER_BLOCK']]],
'FirmwareDescriptorListHead' : [ 0x60, ['_LIST_ENTRY']],
'AcpiTable' : [ 0x70, ['pointer64', ['void']]],
'AcpiTableSize' : [ 0x78, ['unsigned long']],
'LastBootSucceeded' : [ 0x7c, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'LastBootShutdown' : [ 0x7c, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'IoPortAccessSupported' : [ 0x7c, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long')]],
'Reserved' : [ 0x7c, ['BitField', dict(start_bit = 3, end_bit = 32, native_type='unsigned long')]],
'LoaderPerformanceData' : [ 0x80, ['pointer64', ['_LOADER_PERFORMANCE_DATA']]],
'BootApplicationPersistentData' : [ 0x88, ['_LIST_ENTRY']],
'WmdTestResult' : [ 0x98, ['pointer64', ['void']]],
'BootIdentifier' : [ 0xa0, ['_GUID']],
'ResumePages' : [ 0xb0, ['unsigned long']],
'DumpHeader' : [ 0xb8, ['pointer64', ['void']]],
'BgContext' : [ 0xc0, ['pointer64', ['void']]],
'NumaLocalityInfo' : [ 0xc8, ['pointer64', ['void']]],
'NumaGroupAssignment' : [ 0xd0, ['pointer64', ['void']]],
'AttachedHives' : [ 0xd8, ['_LIST_ENTRY']],
'MemoryCachingRequirementsCount' : [ 0xe8, ['unsigned long']],
'MemoryCachingRequirements' : [ 0xf0, ['pointer64', ['void']]],
'TpmBootEntropyResult' : [ 0xf8, ['_TPM_BOOT_ENTROPY_LDR_RESULT']],
'ProcessorCounterFrequency' : [ 0x140, ['unsigned long long']],
} ],
'_PI_RESOURCE_ARBITER_ENTRY' : [ 0x70, {
'DeviceArbiterList' : [ 0x0, ['_LIST_ENTRY']],
'ResourceType' : [ 0x10, ['unsigned char']],
'ArbiterInterface' : [ 0x18, ['pointer64', ['_ARBITER_INTERFACE']]],
'DeviceNode' : [ 0x20, ['pointer64', ['_DEVICE_NODE']]],
'ResourceList' : [ 0x28, ['_LIST_ENTRY']],
'BestResourceList' : [ 0x38, ['_LIST_ENTRY']],
'BestConfig' : [ 0x48, ['_LIST_ENTRY']],
'ActiveArbiterList' : [ 0x58, ['_LIST_ENTRY']],
'State' : [ 0x68, ['unsigned char']],
'ResourcesChanged' : [ 0x69, ['unsigned char']],
} ],
'_SECURITY_DESCRIPTOR' : [ 0x28, {
'Revision' : [ 0x0, ['unsigned char']],
'Sbz1' : [ 0x1, ['unsigned char']],
'Control' : [ 0x2, ['unsigned short']],
'Owner' : [ 0x8, ['pointer64', ['void']]],
'Group' : [ 0x10, ['pointer64', ['void']]],
'Sacl' : [ 0x18, ['pointer64', ['_ACL']]],
'Dacl' : [ 0x20, ['pointer64', ['_ACL']]],
} ],
'_KUMS_CONTEXT_HEADER' : [ 0x70, {
'P1Home' : [ 0x0, ['unsigned long long']],
'P2Home' : [ 0x8, ['unsigned long long']],
'P3Home' : [ 0x10, ['unsigned long long']],
'P4Home' : [ 0x18, ['unsigned long long']],
'StackTop' : [ 0x20, ['pointer64', ['void']]],
'StackSize' : [ 0x28, ['unsigned long long']],
'RspOffset' : [ 0x30, ['unsigned long long']],
'Rip' : [ 0x38, ['unsigned long long']],
'FltSave' : [ 0x40, ['pointer64', ['_XSAVE_FORMAT']]],
'Volatile' : [ 0x48, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long long')]],
'Reserved' : [ 0x48, ['BitField', dict(start_bit = 1, end_bit = 64, native_type='unsigned long long')]],
'Flags' : [ 0x48, ['unsigned long long']],
'TrapFrame' : [ 0x50, ['pointer64', ['_KTRAP_FRAME']]],
'ExceptionFrame' : [ 0x58, ['pointer64', ['_KEXCEPTION_FRAME']]],
'SourceThread' : [ 0x60, ['pointer64', ['_KTHREAD']]],
'Return' : [ 0x68, ['unsigned long long']],
} ],
'_RTL_USER_PROCESS_PARAMETERS' : [ 0x400, {
'MaximumLength' : [ 0x0, ['unsigned long']],
'Length' : [ 0x4, ['unsigned long']],
'Flags' : [ 0x8, ['unsigned long']],
'DebugFlags' : [ 0xc, ['unsigned long']],
'ConsoleHandle' : [ 0x10, ['pointer64', ['void']]],
'ConsoleFlags' : [ 0x18, ['unsigned long']],
'StandardInput' : [ 0x20, ['pointer64', ['void']]],
'StandardOutput' : [ 0x28, ['pointer64', ['void']]],
'StandardError' : [ 0x30, ['pointer64', ['void']]],
'CurrentDirectory' : [ 0x38, ['_CURDIR']],
'DllPath' : [ 0x50, ['_UNICODE_STRING']],
'ImagePathName' : [ 0x60, ['_UNICODE_STRING']],
'CommandLine' : [ 0x70, ['_UNICODE_STRING']],
'Environment' : [ 0x80, ['pointer64', ['void']]],
'StartingX' : [ 0x88, ['unsigned long']],
'StartingY' : [ 0x8c, ['unsigned long']],
'CountX' : [ 0x90, ['unsigned long']],
'CountY' : [ 0x94, ['unsigned long']],
'CountCharsX' : [ 0x98, ['unsigned long']],
'CountCharsY' : [ 0x9c, ['unsigned long']],
'FillAttribute' : [ 0xa0, ['unsigned long']],
'WindowFlags' : [ 0xa4, ['unsigned long']],
'ShowWindowFlags' : [ 0xa8, ['unsigned long']],
'WindowTitle' : [ 0xb0, ['_UNICODE_STRING']],
'DesktopInfo' : [ 0xc0, ['_UNICODE_STRING']],
'ShellInfo' : [ 0xd0, ['_UNICODE_STRING']],
'RuntimeData' : [ 0xe0, ['_UNICODE_STRING']],
'CurrentDirectores' : [ 0xf0, ['array', 32, ['_RTL_DRIVE_LETTER_CURDIR']]],
'EnvironmentSize' : [ 0x3f0, ['unsigned long long']],
'EnvironmentVersion' : [ 0x3f8, ['unsigned long long']],
} ],
'_PHYSICAL_MEMORY_RUN' : [ 0x10, {
'BasePage' : [ 0x0, ['unsigned long long']],
'PageCount' : [ 0x8, ['unsigned long long']],
} ],
'_RTL_SRWLOCK' : [ 0x8, {
'Locked' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long long')]],
'Waiting' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long long')]],
'Waking' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long long')]],
'MultipleShared' : [ 0x0, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned long long')]],
'Shared' : [ 0x0, ['BitField', dict(start_bit = 4, end_bit = 64, native_type='unsigned long long')]],
'Value' : [ 0x0, ['unsigned long long']],
'Ptr' : [ 0x0, ['pointer64', ['void']]],
} ],
'_ALPC_MESSAGE_ZONE' : [ 0x30, {
'Mdl' : [ 0x0, ['pointer64', ['_MDL']]],
'UserVa' : [ 0x8, ['pointer64', ['void']]],
'UserLimit' : [ 0x10, ['pointer64', ['void']]],
'SystemVa' : [ 0x18, ['pointer64', ['void']]],
'SystemLimit' : [ 0x20, ['pointer64', ['void']]],
'Size' : [ 0x28, ['unsigned long long']],
} ],
'_KTMOBJECT_NAMESPACE_LINK' : [ 0x28, {
'Links' : [ 0x0, ['_RTL_BALANCED_LINKS']],
'Expired' : [ 0x20, ['unsigned char']],
} ],
'_CACHE_MANAGER_CALLBACKS' : [ 0x20, {
'AcquireForLazyWrite' : [ 0x0, ['pointer64', ['void']]],
'ReleaseFromLazyWrite' : [ 0x8, ['pointer64', ['void']]],
'AcquireForReadAhead' : [ 0x10, ['pointer64', ['void']]],
'ReleaseFromReadAhead' : [ 0x18, ['pointer64', ['void']]],
} ],
'_PROC_PERF_LOAD' : [ 0x2, {
'BusyPercentage' : [ 0x0, ['unsigned char']],
'FrequencyPercentage' : [ 0x1, ['unsigned char']],
} ],
'_PROC_HISTORY_ENTRY' : [ 0x4, {
'Utility' : [ 0x0, ['unsigned short']],
'Frequency' : [ 0x2, ['unsigned char']],
'Reserved' : [ 0x3, ['unsigned char']],
} ],
'_RTL_RANGE' : [ 0x28, {
'Start' : [ 0x0, ['unsigned long long']],
'End' : [ 0x8, ['unsigned long long']],
'UserData' : [ 0x10, ['pointer64', ['void']]],
'Owner' : [ 0x18, ['pointer64', ['void']]],
'Attributes' : [ 0x20, ['unsigned char']],
'Flags' : [ 0x21, ['unsigned char']],
} ],
'_KSPECIAL_REGISTERS' : [ 0xd8, {
'Cr0' : [ 0x0, ['unsigned long long']],
'Cr2' : [ 0x8, ['unsigned long long']],
'Cr3' : [ 0x10, ['unsigned long long']],
'Cr4' : [ 0x18, ['unsigned long long']],
'KernelDr0' : [ 0x20, ['unsigned long long']],
'KernelDr1' : [ 0x28, ['unsigned long long']],
'KernelDr2' : [ 0x30, ['unsigned long long']],
'KernelDr3' : [ 0x38, ['unsigned long long']],
'KernelDr6' : [ 0x40, ['unsigned long long']],
'KernelDr7' : [ 0x48, ['unsigned long long']],
'Gdtr' : [ 0x50, ['_KDESCRIPTOR']],
'Idtr' : [ 0x60, ['_KDESCRIPTOR']],
'Tr' : [ 0x70, ['unsigned short']],
'Ldtr' : [ 0x72, ['unsigned short']],
'MxCsr' : [ 0x74, ['unsigned long']],
'DebugControl' : [ 0x78, ['unsigned long long']],
'LastBranchToRip' : [ 0x80, ['unsigned long long']],
'LastBranchFromRip' : [ 0x88, ['unsigned long long']],
'LastExceptionToRip' : [ 0x90, ['unsigned long long']],
'LastExceptionFromRip' : [ 0x98, ['unsigned long long']],
'Cr8' : [ 0xa0, ['unsigned long long']],
'MsrGsBase' : [ 0xa8, ['unsigned long long']],
'MsrGsSwap' : [ 0xb0, ['unsigned long long']],
'MsrStar' : [ 0xb8, ['unsigned long long']],
'MsrLStar' : [ 0xc0, ['unsigned long long']],
'MsrCStar' : [ 0xc8, ['unsigned long long']],
'MsrSyscallMask' : [ 0xd0, ['unsigned long long']],
} ],
'_SYSTEM_POWER_POLICY' : [ 0xe8, {
'Revision' : [ 0x0, ['unsigned long']],
'PowerButton' : [ 0x4, ['POWER_ACTION_POLICY']],
'SleepButton' : [ 0x10, ['POWER_ACTION_POLICY']],
'LidClose' : [ 0x1c, ['POWER_ACTION_POLICY']],
'LidOpenWake' : [ 0x28, ['Enumeration', dict(target = 'long', choices = {0: 'PowerSystemUnspecified', 1: 'PowerSystemWorking', 2: 'PowerSystemSleeping1', 3: 'PowerSystemSleeping2', 4: 'PowerSystemSleeping3', 5: 'PowerSystemHibernate', 6: 'PowerSystemShutdown', 7: 'PowerSystemMaximum'})]],
'Reserved' : [ 0x2c, ['unsigned long']],
'Idle' : [ 0x30, ['POWER_ACTION_POLICY']],
'IdleTimeout' : [ 0x3c, ['unsigned long']],
'IdleSensitivity' : [ 0x40, ['unsigned char']],
'DynamicThrottle' : [ 0x41, ['unsigned char']],
'Spare2' : [ 0x42, ['array', 2, ['unsigned char']]],
'MinSleep' : [ 0x44, ['Enumeration', dict(target = 'long', choices = {0: 'PowerSystemUnspecified', 1: 'PowerSystemWorking', 2: 'PowerSystemSleeping1', 3: 'PowerSystemSleeping2', 4: 'PowerSystemSleeping3', 5: 'PowerSystemHibernate', 6: 'PowerSystemShutdown', 7: 'PowerSystemMaximum'})]],
'MaxSleep' : [ 0x48, ['Enumeration', dict(target = 'long', choices = {0: 'PowerSystemUnspecified', 1: 'PowerSystemWorking', 2: 'PowerSystemSleeping1', 3: 'PowerSystemSleeping2', 4: 'PowerSystemSleeping3', 5: 'PowerSystemHibernate', 6: 'PowerSystemShutdown', 7: 'PowerSystemMaximum'})]],
'ReducedLatencySleep' : [ 0x4c, ['Enumeration', dict(target = 'long', choices = {0: 'PowerSystemUnspecified', 1: 'PowerSystemWorking', 2: 'PowerSystemSleeping1', 3: 'PowerSystemSleeping2', 4: 'PowerSystemSleeping3', 5: 'PowerSystemHibernate', 6: 'PowerSystemShutdown', 7: 'PowerSystemMaximum'})]],
'WinLogonFlags' : [ 0x50, ['unsigned long']],
'Spare3' : [ 0x54, ['unsigned long']],
'DozeS4Timeout' : [ 0x58, ['unsigned long']],
'BroadcastCapacityResolution' : [ 0x5c, ['unsigned long']],
'DischargePolicy' : [ 0x60, ['array', 4, ['SYSTEM_POWER_LEVEL']]],
'VideoTimeout' : [ 0xc0, ['unsigned long']],
'VideoDimDisplay' : [ 0xc4, ['unsigned char']],
'VideoReserved' : [ 0xc8, ['array', 3, ['unsigned long']]],
'SpindownTimeout' : [ 0xd4, ['unsigned long']],
'OptimizeForPower' : [ 0xd8, ['unsigned char']],
'FanThrottleTolerance' : [ 0xd9, ['unsigned char']],
'ForcedThrottle' : [ 0xda, ['unsigned char']],
'MinThrottle' : [ 0xdb, ['unsigned char']],
'OverThrottled' : [ 0xdc, ['POWER_ACTION_POLICY']],
} ],
'_POOL_HEADER' : [ 0x10, {
'PreviousSize' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 8, native_type='unsigned long')]],
'PoolIndex' : [ 0x0, ['BitField', dict(start_bit = 8, end_bit = 16, native_type='unsigned long')]],
'BlockSize' : [ 0x0, ['BitField', dict(start_bit = 16, end_bit = 24, native_type='unsigned long')]],
'PoolType' : [ 0x0, ['BitField', dict(start_bit = 24, end_bit = 32, native_type='unsigned long')]],
'Ulong1' : [ 0x0, ['unsigned long']],
'PoolTag' : [ 0x4, ['unsigned long']],
'ProcessBilled' : [ 0x8, ['pointer64', ['_EPROCESS']]],
'AllocatorBackTraceIndex' : [ 0x8, ['unsigned short']],
'PoolTagHash' : [ 0xa, ['unsigned short']],
} ],
'_ETW_PROVIDER_TABLE_ENTRY' : [ 0x18, {
'RefCount' : [ 0x0, ['long']],
'State' : [ 0x4, ['Enumeration', dict(target = 'long', choices = {0: 'EtwProviderStateFree', 1: 'EtwProviderStateTransition', 2: 'EtwProviderStateActive', 3: 'EtwProviderStateMax'})]],
'RegEntry' : [ 0x8, ['pointer64', ['_ETW_REG_ENTRY']]],
'Caller' : [ 0x10, ['pointer64', ['void']]],
} ],
'_PEB64' : [ 0x380, {
'InheritedAddressSpace' : [ 0x0, ['unsigned char']],
'ReadImageFileExecOptions' : [ 0x1, ['unsigned char']],
'BeingDebugged' : [ 0x2, ['unsigned char']],
'BitField' : [ 0x3, ['unsigned char']],
'ImageUsesLargePages' : [ 0x3, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned char')]],
'IsProtectedProcess' : [ 0x3, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned char')]],
'IsLegacyProcess' : [ 0x3, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned char')]],
'IsImageDynamicallyRelocated' : [ 0x3, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned char')]],
'SkipPatchingUser32Forwarders' : [ 0x3, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned char')]],
'SpareBits' : [ 0x3, ['BitField', dict(start_bit = 5, end_bit = 8, native_type='unsigned char')]],
'Mutant' : [ 0x8, ['unsigned long long']],
'ImageBaseAddress' : [ 0x10, ['unsigned long long']],
'Ldr' : [ 0x18, ['unsigned long long']],
'ProcessParameters' : [ 0x20, ['unsigned long long']],
'SubSystemData' : [ 0x28, ['unsigned long long']],
'ProcessHeap' : [ 0x30, ['unsigned long long']],
'FastPebLock' : [ 0x38, ['unsigned long long']],
'AtlThunkSListPtr' : [ 0x40, ['unsigned long long']],
'IFEOKey' : [ 0x48, ['unsigned long long']],
'CrossProcessFlags' : [ 0x50, ['unsigned long']],
'ProcessInJob' : [ 0x50, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'ProcessInitializing' : [ 0x50, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'ProcessUsingVEH' : [ 0x50, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long')]],
'ProcessUsingVCH' : [ 0x50, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned long')]],
'ProcessUsingFTH' : [ 0x50, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned long')]],
'ReservedBits0' : [ 0x50, ['BitField', dict(start_bit = 5, end_bit = 32, native_type='unsigned long')]],
'KernelCallbackTable' : [ 0x58, ['unsigned long long']],
'UserSharedInfoPtr' : [ 0x58, ['unsigned long long']],
'SystemReserved' : [ 0x60, ['array', 1, ['unsigned long']]],
'AtlThunkSListPtr32' : [ 0x64, ['unsigned long']],
'ApiSetMap' : [ 0x68, ['unsigned long long']],
'TlsExpansionCounter' : [ 0x70, ['unsigned long']],
'TlsBitmap' : [ 0x78, ['unsigned long long']],
'TlsBitmapBits' : [ 0x80, ['array', 2, ['unsigned long']]],
'ReadOnlySharedMemoryBase' : [ 0x88, ['unsigned long long']],
'HotpatchInformation' : [ 0x90, ['unsigned long long']],
'ReadOnlyStaticServerData' : [ 0x98, ['unsigned long long']],
'AnsiCodePageData' : [ 0xa0, ['unsigned long long']],
'OemCodePageData' : [ 0xa8, ['unsigned long long']],
'UnicodeCaseTableData' : [ 0xb0, ['unsigned long long']],
'NumberOfProcessors' : [ 0xb8, ['unsigned long']],
'NtGlobalFlag' : [ 0xbc, ['unsigned long']],
'CriticalSectionTimeout' : [ 0xc0, ['_LARGE_INTEGER']],
'HeapSegmentReserve' : [ 0xc8, ['unsigned long long']],
'HeapSegmentCommit' : [ 0xd0, ['unsigned long long']],
'HeapDeCommitTotalFreeThreshold' : [ 0xd8, ['unsigned long long']],
'HeapDeCommitFreeBlockThreshold' : [ 0xe0, ['unsigned long long']],
'NumberOfHeaps' : [ 0xe8, ['unsigned long']],
'MaximumNumberOfHeaps' : [ 0xec, ['unsigned long']],
'ProcessHeaps' : [ 0xf0, ['unsigned long long']],
'GdiSharedHandleTable' : [ 0xf8, ['unsigned long long']],
'ProcessStarterHelper' : [ 0x100, ['unsigned long long']],
'GdiDCAttributeList' : [ 0x108, ['unsigned long']],
'LoaderLock' : [ 0x110, ['unsigned long long']],
'OSMajorVersion' : [ 0x118, ['unsigned long']],
'OSMinorVersion' : [ 0x11c, ['unsigned long']],
'OSBuildNumber' : [ 0x120, ['unsigned short']],
'OSCSDVersion' : [ 0x122, ['unsigned short']],
'OSPlatformId' : [ 0x124, ['unsigned long']],
'ImageSubsystem' : [ 0x128, ['unsigned long']],
'ImageSubsystemMajorVersion' : [ 0x12c, ['unsigned long']],
'ImageSubsystemMinorVersion' : [ 0x130, ['unsigned long']],
'ActiveProcessAffinityMask' : [ 0x138, ['unsigned long long']],
'GdiHandleBuffer' : [ 0x140, ['array', 60, ['unsigned long']]],
'PostProcessInitRoutine' : [ 0x230, ['unsigned long long']],
'TlsExpansionBitmap' : [ 0x238, ['unsigned long long']],
'TlsExpansionBitmapBits' : [ 0x240, ['array', 32, ['unsigned long']]],
'SessionId' : [ 0x2c0, ['unsigned long']],
'AppCompatFlags' : [ 0x2c8, ['_ULARGE_INTEGER']],
'AppCompatFlagsUser' : [ 0x2d0, ['_ULARGE_INTEGER']],
'pShimData' : [ 0x2d8, ['unsigned long long']],
'AppCompatInfo' : [ 0x2e0, ['unsigned long long']],
'CSDVersion' : [ 0x2e8, ['_STRING64']],
'ActivationContextData' : [ 0x2f8, ['unsigned long long']],
'ProcessAssemblyStorageMap' : [ 0x300, ['unsigned long long']],
'SystemDefaultActivationContextData' : [ 0x308, ['unsigned long long']],
'SystemAssemblyStorageMap' : [ 0x310, ['unsigned long long']],
'MinimumStackCommit' : [ 0x318, ['unsigned long long']],
'FlsCallback' : [ 0x320, ['unsigned long long']],
'FlsListHead' : [ 0x328, ['LIST_ENTRY64']],
'FlsBitmap' : [ 0x338, ['unsigned long long']],
'FlsBitmapBits' : [ 0x340, ['array', 4, ['unsigned long']]],
'FlsHighIndex' : [ 0x350, ['unsigned long']],
'WerRegistrationData' : [ 0x358, ['unsigned long long']],
'WerShipAssertPtr' : [ 0x360, ['unsigned long long']],
'pContextData' : [ 0x368, ['unsigned long long']],
'pImageHeaderHash' : [ 0x370, ['unsigned long long']],
'TracingFlags' : [ 0x378, ['unsigned long']],
'HeapTracingEnabled' : [ 0x378, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'CritSecTracingEnabled' : [ 0x378, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'SpareTracingBits' : [ 0x378, ['BitField', dict(start_bit = 2, end_bit = 32, native_type='unsigned long')]],
} ],
'_SE_AUDIT_PROCESS_CREATION_INFO' : [ 0x8, {
'ImageFileName' : [ 0x0, ['pointer64', ['_OBJECT_NAME_INFORMATION']]],
} ],
'_HEAP_ENTRY_EXTRA' : [ 0x10, {
'AllocatorBackTraceIndex' : [ 0x0, ['unsigned short']],
'TagIndex' : [ 0x2, ['unsigned short']],
'Settable' : [ 0x8, ['unsigned long long']],
'ZeroInit' : [ 0x0, ['unsigned long long']],
'ZeroInit1' : [ 0x8, ['unsigned long long']],
} ],
'_VF_POOL_TRACE' : [ 0x80, {
'Address' : [ 0x0, ['pointer64', ['void']]],
'Size' : [ 0x8, ['unsigned long long']],
'Thread' : [ 0x10, ['pointer64', ['_ETHREAD']]],
'StackTrace' : [ 0x18, ['array', 13, ['pointer64', ['void']]]],
} ],
'__unnamed_2145' : [ 0x4, {
'LongFlags' : [ 0x0, ['unsigned long']],
'Flags' : [ 0x0, ['_MM_SESSION_SPACE_FLAGS']],
} ],
'_MM_SESSION_SPACE' : [ 0x1f80, {
'ReferenceCount' : [ 0x0, ['long']],
'u' : [ 0x4, ['__unnamed_2145']],
'SessionId' : [ 0x8, ['unsigned long']],
'ProcessReferenceToSession' : [ 0xc, ['long']],
'ProcessList' : [ 0x10, ['_LIST_ENTRY']],
'LastProcessSwappedOutTime' : [ 0x20, ['_LARGE_INTEGER']],
'SessionPageDirectoryIndex' : [ 0x28, ['unsigned long long']],
'NonPagablePages' : [ 0x30, ['unsigned long long']],
'CommittedPages' : [ 0x38, ['unsigned long long']],
'PagedPoolStart' : [ 0x40, ['pointer64', ['void']]],
'PagedPoolEnd' : [ 0x48, ['pointer64', ['void']]],
'SessionObject' : [ 0x50, ['pointer64', ['void']]],
'SessionObjectHandle' : [ 0x58, ['pointer64', ['void']]],
'ResidentProcessCount' : [ 0x60, ['long']],
'SessionPoolAllocationFailures' : [ 0x64, ['array', 4, ['unsigned long']]],
'ImageList' : [ 0x78, ['_LIST_ENTRY']],
'LocaleId' : [ 0x88, ['unsigned long']],
'AttachCount' : [ 0x8c, ['unsigned long']],
'AttachGate' : [ 0x90, ['_KGATE']],
'WsListEntry' : [ 0xa8, ['_LIST_ENTRY']],
'Lookaside' : [ 0xc0, ['array', 21, ['_GENERAL_LOOKASIDE']]],
'Session' : [ 0xb40, ['_MMSESSION']],
'PagedPoolInfo' : [ 0xb98, ['_MM_PAGED_POOL_INFO']],
'Vm' : [ 0xc00, ['_MMSUPPORT']],
'Wsle' : [ 0xc88, ['pointer64', ['_MMWSLE']]],
'DriverUnload' : [ 0xc90, ['pointer64', ['void']]],
'PagedPool' : [ 0xcc0, ['_POOL_DESCRIPTOR']],
'PageDirectory' : [ 0x1e00, ['_MMPTE']],
'SessionVaLock' : [ 0x1e08, ['_KGUARDED_MUTEX']],
'DynamicVaBitMap' : [ 0x1e40, ['_RTL_BITMAP']],
'DynamicVaHint' : [ 0x1e50, ['unsigned long']],
'SpecialPool' : [ 0x1e58, ['_MI_SPECIAL_POOL']],
'SessionPteLock' : [ 0x1ea0, ['_KGUARDED_MUTEX']],
'PoolBigEntriesInUse' : [ 0x1ed8, ['long']],
'PagedPoolPdeCount' : [ 0x1edc, ['unsigned long']],
'SpecialPoolPdeCount' : [ 0x1ee0, ['unsigned long']],
'DynamicSessionPdeCount' : [ 0x1ee4, ['unsigned long']],
'SystemPteInfo' : [ 0x1ee8, ['_MI_SYSTEM_PTE_TYPE']],
'PoolTrackTableExpansion' : [ 0x1f30, ['pointer64', ['void']]],
'PoolTrackTableExpansionSize' : [ 0x1f38, ['unsigned long long']],
'PoolTrackBigPages' : [ 0x1f40, ['pointer64', ['void']]],
'PoolTrackBigPagesSize' : [ 0x1f48, ['unsigned long long']],
'IoState' : [ 0x1f50, ['Enumeration', dict(target = 'long', choices = {1: 'IoSessionStateCreated', 2: 'IoSessionStateInitialized', 3: 'IoSessionStateConnected', 4: 'IoSessionStateDisconnected', 5: 'IoSessionStateDisconnectedLoggedOn', 6: 'IoSessionStateLoggedOn', 7: 'IoSessionStateLoggedOff', 8: 'IoSessionStateTerminated', 9: 'IoSessionStateMax'})]],
'IoStateSequence' : [ 0x1f54, ['unsigned long']],
'IoNotificationEvent' : [ 0x1f58, ['_KEVENT']],
'CpuQuotaBlock' : [ 0x1f70, ['pointer64', ['_PS_CPU_QUOTA_BLOCK']]],
} ],
'_OBJECT_HANDLE_COUNT_ENTRY' : [ 0x10, {
'Process' : [ 0x0, ['pointer64', ['_EPROCESS']]],
'HandleCount' : [ 0x8, ['BitField', dict(start_bit = 0, end_bit = 24, native_type='unsigned long')]],
'LockCount' : [ 0x8, ['BitField', dict(start_bit = 24, end_bit = 32, native_type='unsigned long')]],
} ],
'_CLIENT_ID' : [ 0x10, {
'UniqueProcess' : [ 0x0, ['pointer64', ['void']]],
'UniqueThread' : [ 0x8, ['pointer64', ['void']]],
} ],
'_WHEA_MEMORY_ERROR_SECTION' : [ 0x49, {
'ValidBits' : [ 0x0, ['_WHEA_MEMORY_ERROR_SECTION_VALIDBITS']],
'ErrorStatus' : [ 0x8, ['_WHEA_ERROR_STATUS']],
'PhysicalAddress' : [ 0x10, ['unsigned long long']],
'PhysicalAddressMask' : [ 0x18, ['unsigned long long']],
'Node' : [ 0x20, ['unsigned short']],
'Card' : [ 0x22, ['unsigned short']],
'Module' : [ 0x24, ['unsigned short']],
'Bank' : [ 0x26, ['unsigned short']],
'Device' : [ 0x28, ['unsigned short']],
'Row' : [ 0x2a, ['unsigned short']],
'Column' : [ 0x2c, ['unsigned short']],
'BitPosition' : [ 0x2e, ['unsigned short']],
'RequesterId' : [ 0x30, ['unsigned long long']],
'ResponderId' : [ 0x38, ['unsigned long long']],
'TargetId' : [ 0x40, ['unsigned long long']],
'ErrorType' : [ 0x48, ['unsigned char']],
} ],
'_KWAIT_STATUS_REGISTER' : [ 0x1, {
'Flags' : [ 0x0, ['unsigned char']],
'State' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 2, native_type='unsigned char')]],
'Affinity' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned char')]],
'Priority' : [ 0x0, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned char')]],
'Apc' : [ 0x0, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned char')]],
'UserApc' : [ 0x0, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned char')]],
'Alert' : [ 0x0, ['BitField', dict(start_bit = 6, end_bit = 7, native_type='unsigned char')]],
'Unused' : [ 0x0, ['BitField', dict(start_bit = 7, end_bit = 8, native_type='unsigned char')]],
} ],
'_VI_DEADLOCK_RESOURCE' : [ 0xf8, {
'Type' : [ 0x0, ['Enumeration', dict(target = 'long', choices = {0: 'VfDeadlockUnknown', 1: 'VfDeadlockMutex', 2: 'VfDeadlockMutexAbandoned', 3: 'VfDeadlockFastMutex', 4: 'VfDeadlockFastMutexUnsafe', 5: 'VfDeadlockSpinLock', 6: 'VfDeadlockInStackQueuedSpinLock', 7: 'VfDeadlockUnusedSpinLock', 8: 'VfDeadlockEresource', 9: 'VfDeadlockTypeMaximum'})]],
'NodeCount' : [ 0x4, ['BitField', dict(start_bit = 0, end_bit = 16, native_type='unsigned long')]],
'RecursionCount' : [ 0x4, ['BitField', dict(start_bit = 16, end_bit = 32, native_type='unsigned long')]],
'ResourceAddress' : [ 0x8, ['pointer64', ['void']]],
'ThreadOwner' : [ 0x10, ['pointer64', ['_VI_DEADLOCK_THREAD']]],
'ResourceList' : [ 0x18, ['_LIST_ENTRY']],
'HashChainList' : [ 0x28, ['_LIST_ENTRY']],
'FreeListEntry' : [ 0x28, ['_LIST_ENTRY']],
'StackTrace' : [ 0x38, ['array', 8, ['pointer64', ['void']]]],
'LastAcquireTrace' : [ 0x78, ['array', 8, ['pointer64', ['void']]]],
'LastReleaseTrace' : [ 0xb8, ['array', 8, ['pointer64', ['void']]]],
} ],
'_DBGKD_GET_SET_BUS_DATA' : [ 0x14, {
'BusDataType' : [ 0x0, ['unsigned long']],
'BusNumber' : [ 0x4, ['unsigned long']],
'SlotNumber' : [ 0x8, ['unsigned long']],
'Offset' : [ 0xc, ['unsigned long']],
'Length' : [ 0x10, ['unsigned long']],
} ],
'_MMSECTION_FLAGS' : [ 0x4, {
'BeingDeleted' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'BeingCreated' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'BeingPurged' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long')]],
'NoModifiedWriting' : [ 0x0, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned long')]],
'FailAllIo' : [ 0x0, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned long')]],
'Image' : [ 0x0, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned long')]],
'Based' : [ 0x0, ['BitField', dict(start_bit = 6, end_bit = 7, native_type='unsigned long')]],
'File' : [ 0x0, ['BitField', dict(start_bit = 7, end_bit = 8, native_type='unsigned long')]],
'Networked' : [ 0x0, ['BitField', dict(start_bit = 8, end_bit = 9, native_type='unsigned long')]],
'Rom' : [ 0x0, ['BitField', dict(start_bit = 9, end_bit = 10, native_type='unsigned long')]],
'PhysicalMemory' : [ 0x0, ['BitField', dict(start_bit = 10, end_bit = 11, native_type='unsigned long')]],
'CopyOnWrite' : [ 0x0, ['BitField', dict(start_bit = 11, end_bit = 12, native_type='unsigned long')]],
'Reserve' : [ 0x0, ['BitField', dict(start_bit = 12, end_bit = 13, native_type='unsigned long')]],
'Commit' : [ 0x0, ['BitField', dict(start_bit = 13, end_bit = 14, native_type='unsigned long')]],
'Accessed' : [ 0x0, ['BitField', dict(start_bit = 14, end_bit = 15, native_type='unsigned long')]],
'WasPurged' : [ 0x0, ['BitField', dict(start_bit = 15, end_bit = 16, native_type='unsigned long')]],
'UserReference' : [ 0x0, ['BitField', dict(start_bit = 16, end_bit = 17, native_type='unsigned long')]],
'GlobalMemory' : [ 0x0, ['BitField', dict(start_bit = 17, end_bit = 18, native_type='unsigned long')]],
'DeleteOnClose' : [ 0x0, ['BitField', dict(start_bit = 18, end_bit = 19, native_type='unsigned long')]],
'FilePointerNull' : [ 0x0, ['BitField', dict(start_bit = 19, end_bit = 20, native_type='unsigned long')]],
'GlobalOnlyPerSession' : [ 0x0, ['BitField', dict(start_bit = 20, end_bit = 21, native_type='unsigned long')]],
'SetMappedFileIoComplete' : [ 0x0, ['BitField', dict(start_bit = 21, end_bit = 22, native_type='unsigned long')]],
'CollidedFlush' : [ 0x0, ['BitField', dict(start_bit = 22, end_bit = 23, native_type='unsigned long')]],
'NoChange' : [ 0x0, ['BitField', dict(start_bit = 23, end_bit = 24, native_type='unsigned long')]],
'Spare' : [ 0x0, ['BitField', dict(start_bit = 24, end_bit = 25, native_type='unsigned long')]],
'UserWritable' : [ 0x0, ['BitField', dict(start_bit = 25, end_bit = 26, native_type='unsigned long')]],
'PreferredNode' : [ 0x0, ['BitField', dict(start_bit = 26, end_bit = 32, native_type='unsigned long')]],
} ],
'_SECURITY_CLIENT_CONTEXT' : [ 0x48, {
'SecurityQos' : [ 0x0, ['_SECURITY_QUALITY_OF_SERVICE']],
'ClientToken' : [ 0x10, ['pointer64', ['void']]],
'DirectlyAccessClientToken' : [ 0x18, ['unsigned char']],
'DirectAccessEffectiveOnly' : [ 0x19, ['unsigned char']],
'ServerIsRemote' : [ 0x1a, ['unsigned char']],
'ClientTokenControl' : [ 0x1c, ['_TOKEN_CONTROL']],
} ],
'_MM_PAGED_POOL_INFO' : [ 0x68, {
'Mutex' : [ 0x0, ['_KGUARDED_MUTEX']],
'PagedPoolAllocationMap' : [ 0x38, ['_RTL_BITMAP']],
'FirstPteForPagedPool' : [ 0x48, ['pointer64', ['_MMPTE']]],
'PagedPoolHint' : [ 0x50, ['unsigned long']],
'PagedPoolCommit' : [ 0x58, ['unsigned long long']],
'AllocatedPagedPool' : [ 0x60, ['unsigned long long']],
} ],
'_BITMAP_RANGE' : [ 0x30, {
'Links' : [ 0x0, ['_LIST_ENTRY']],
'BasePage' : [ 0x10, ['long long']],
'FirstDirtyPage' : [ 0x18, ['unsigned long']],
'LastDirtyPage' : [ 0x1c, ['unsigned long']],
'DirtyPages' : [ 0x20, ['unsigned long']],
'Bitmap' : [ 0x28, ['pointer64', ['unsigned long']]],
} ],
'_NT_TIB64' : [ 0x38, {
'ExceptionList' : [ 0x0, ['unsigned long long']],
'StackBase' : [ 0x8, ['unsigned long long']],
'StackLimit' : [ 0x10, ['unsigned long long']],
'SubSystemTib' : [ 0x18, ['unsigned long long']],
'FiberData' : [ 0x20, ['unsigned long long']],
'Version' : [ 0x20, ['unsigned long']],
'ArbitraryUserPointer' : [ 0x28, ['unsigned long long']],
'Self' : [ 0x30, ['unsigned long long']],
} ],
'_IO_SECURITY_CONTEXT' : [ 0x18, {
'SecurityQos' : [ 0x0, ['pointer64', ['_SECURITY_QUALITY_OF_SERVICE']]],
'AccessState' : [ 0x8, ['pointer64', ['_ACCESS_STATE']]],
'DesiredAccess' : [ 0x10, ['unsigned long']],
'FullCreateOptions' : [ 0x14, ['unsigned long']],
} ],
'_PROC_PERF_DOMAIN' : [ 0xb8, {
'Link' : [ 0x0, ['_LIST_ENTRY']],
'Master' : [ 0x10, ['pointer64', ['_KPRCB']]],
'Members' : [ 0x18, ['_KAFFINITY_EX']],
'FeedbackHandler' : [ 0x40, ['pointer64', ['void']]],
'GetFFHThrottleState' : [ 0x48, ['pointer64', ['void']]],
'BoostPolicyHandler' : [ 0x50, ['pointer64', ['void']]],
'PerfSelectionHandler' : [ 0x58, ['pointer64', ['void']]],
'PerfHandler' : [ 0x60, ['pointer64', ['void']]],
'Processors' : [ 0x68, ['pointer64', ['_PROC_PERF_CONSTRAINT']]],
'PerfChangeTime' : [ 0x70, ['unsigned long long']],
'ProcessorCount' : [ 0x78, ['unsigned long']],
'PreviousFrequencyMhz' : [ 0x7c, ['unsigned long']],
'CurrentFrequencyMhz' : [ 0x80, ['unsigned long']],
'PreviousFrequency' : [ 0x84, ['unsigned long']],
'CurrentFrequency' : [ 0x88, ['unsigned long']],
'CurrentPerfContext' : [ 0x8c, ['unsigned long']],
'DesiredFrequency' : [ 0x90, ['unsigned long']],
'MaxFrequency' : [ 0x94, ['unsigned long']],
'MinPerfPercent' : [ 0x98, ['unsigned long']],
'MinThrottlePercent' : [ 0x9c, ['unsigned long']],
'MaxPercent' : [ 0xa0, ['unsigned long']],
'MinPercent' : [ 0xa4, ['unsigned long']],
'ConstrainedMaxPercent' : [ 0xa8, ['unsigned long']],
'ConstrainedMinPercent' : [ 0xac, ['unsigned long']],
'Coordination' : [ 0xb0, ['unsigned char']],
'PerfChangeIntervalCount' : [ 0xb4, ['long']],
} ],
'_X86_DBGKD_CONTROL_SET' : [ 0x10, {
'TraceFlag' : [ 0x0, ['unsigned long']],
'Dr7' : [ 0x4, ['unsigned long']],
'CurrentSymbolStart' : [ 0x8, ['unsigned long']],
'CurrentSymbolEnd' : [ 0xc, ['unsigned long']],
} ],
'_HANDLE_TRACE_DB_ENTRY' : [ 0xa0, {
'ClientId' : [ 0x0, ['_CLIENT_ID']],
'Handle' : [ 0x10, ['pointer64', ['void']]],
'Type' : [ 0x18, ['unsigned long']],
'StackTrace' : [ 0x20, ['array', 16, ['pointer64', ['void']]]],
} ],
'_TP_NBQ_GUARD' : [ 0x20, {
'GuardLinks' : [ 0x0, ['_LIST_ENTRY']],
'Guards' : [ 0x10, ['array', 2, ['pointer64', ['void']]]],
} ],
'_DUMMY_FILE_OBJECT' : [ 0x110, {
'ObjectHeader' : [ 0x0, ['_OBJECT_HEADER']],
'FileObjectBody' : [ 0x38, ['array', 216, ['unsigned char']]],
} ],
'_POP_TRIGGER_WAIT' : [ 0x38, {
'Event' : [ 0x0, ['_KEVENT']],
'Status' : [ 0x18, ['long']],
'Link' : [ 0x20, ['_LIST_ENTRY']],
'Trigger' : [ 0x30, ['pointer64', ['_POP_ACTION_TRIGGER']]],
} ],
'_RELATION_LIST' : [ 0x18, {
'Count' : [ 0x0, ['unsigned long']],
'TagCount' : [ 0x4, ['unsigned long']],
'FirstLevel' : [ 0x8, ['unsigned long']],
'MaxLevel' : [ 0xc, ['unsigned long']],
'Entries' : [ 0x10, ['array', 1, ['pointer64', ['_RELATION_LIST_ENTRY']]]],
} ],
'_IO_TIMER' : [ 0x30, {
'Type' : [ 0x0, ['short']],
'TimerFlag' : [ 0x2, ['short']],
'TimerList' : [ 0x8, ['_LIST_ENTRY']],
'TimerRoutine' : [ 0x18, ['pointer64', ['void']]],
'Context' : [ 0x20, ['pointer64', ['void']]],
'DeviceObject' : [ 0x28, ['pointer64', ['_DEVICE_OBJECT']]],
} ],
'_ARBITER_TEST_ALLOCATION_PARAMETERS' : [ 0x18, {
'ArbitrationList' : [ 0x0, ['pointer64', ['_LIST_ENTRY']]],
'AllocateFromCount' : [ 0x8, ['unsigned long']],
'AllocateFrom' : [ 0x10, ['pointer64', ['_CM_PARTIAL_RESOURCE_DESCRIPTOR']]],
} ],
'_MI_SPECIAL_POOL' : [ 0x48, {
'PteBase' : [ 0x0, ['pointer64', ['_MMPTE']]],
'Lock' : [ 0x8, ['unsigned long long']],
'Paged' : [ 0x10, ['_MI_SPECIAL_POOL_PTE_LIST']],
'NonPaged' : [ 0x20, ['_MI_SPECIAL_POOL_PTE_LIST']],
'PagesInUse' : [ 0x30, ['long long']],
'SpecialPoolPdes' : [ 0x38, ['_RTL_BITMAP']],
} ],
'_ARBITER_QUERY_CONFLICT_PARAMETERS' : [ 0x20, {
'PhysicalDeviceObject' : [ 0x0, ['pointer64', ['_DEVICE_OBJECT']]],
'ConflictingResource' : [ 0x8, ['pointer64', ['_IO_RESOURCE_DESCRIPTOR']]],
'ConflictCount' : [ 0x10, ['pointer64', ['unsigned long']]],
'Conflicts' : [ 0x18, ['pointer64', ['pointer64', ['_ARBITER_CONFLICT_INFO']]]],
} ],
'_PHYSICAL_MEMORY_DESCRIPTOR' : [ 0x20, {
'NumberOfRuns' : [ 0x0, ['unsigned long']],
'NumberOfPages' : [ 0x8, ['unsigned long long']],
'Run' : [ 0x10, ['array', 1, ['_PHYSICAL_MEMORY_RUN']]],
} ],
'__unnamed_21be' : [ 0x4, {
'BaseMiddle' : [ 0x0, ['unsigned char']],
'Flags1' : [ 0x1, ['unsigned char']],
'Flags2' : [ 0x2, ['unsigned char']],
'BaseHigh' : [ 0x3, ['unsigned char']],
} ],
'__unnamed_21c2' : [ 0x4, {
'BaseMiddle' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 8, native_type='unsigned long')]],
'Type' : [ 0x0, ['BitField', dict(start_bit = 8, end_bit = 13, native_type='unsigned long')]],
'Dpl' : [ 0x0, ['BitField', dict(start_bit = 13, end_bit = 15, native_type='unsigned long')]],
'Present' : [ 0x0, ['BitField', dict(start_bit = 15, end_bit = 16, native_type='unsigned long')]],
'LimitHigh' : [ 0x0, ['BitField', dict(start_bit = 16, end_bit = 20, native_type='unsigned long')]],
'System' : [ 0x0, ['BitField', dict(start_bit = 20, end_bit = 21, native_type='unsigned long')]],
'LongMode' : [ 0x0, ['BitField', dict(start_bit = 21, end_bit = 22, native_type='unsigned long')]],
'DefaultBig' : [ 0x0, ['BitField', dict(start_bit = 22, end_bit = 23, native_type='unsigned long')]],
'Granularity' : [ 0x0, ['BitField', dict(start_bit = 23, end_bit = 24, native_type='unsigned long')]],
'BaseHigh' : [ 0x0, ['BitField', dict(start_bit = 24, end_bit = 32, native_type='unsigned long')]],
} ],
'_KGDTENTRY64' : [ 0x10, {
'LimitLow' : [ 0x0, ['unsigned short']],
'BaseLow' : [ 0x2, ['unsigned short']],
'Bytes' : [ 0x4, ['__unnamed_21be']],
'Bits' : [ 0x4, ['__unnamed_21c2']],
'BaseUpper' : [ 0x8, ['unsigned long']],
'MustBeZero' : [ 0xc, ['unsigned long']],
'Alignment' : [ 0x0, ['unsigned long long']],
} ],
'_PNP_DEVICE_EVENT_LIST' : [ 0x88, {
'Status' : [ 0x0, ['long']],
'EventQueueMutex' : [ 0x8, ['_KMUTANT']],
'Lock' : [ 0x40, ['_KGUARDED_MUTEX']],
'List' : [ 0x78, ['_LIST_ENTRY']],
} ],
'_MAILSLOT_CREATE_PARAMETERS' : [ 0x18, {
'MailslotQuota' : [ 0x0, ['unsigned long']],
'MaximumMessageSize' : [ 0x4, ['unsigned long']],
'ReadTimeout' : [ 0x8, ['_LARGE_INTEGER']],
'TimeoutSpecified' : [ 0x10, ['unsigned char']],
} ],
'_PO_IRP_MANAGER' : [ 0x20, {
'DeviceIrpQueue' : [ 0x0, ['_PO_IRP_QUEUE']],
'SystemIrpQueue' : [ 0x10, ['_PO_IRP_QUEUE']],
} ],
'_PPM_PERF_STATE' : [ 0x28, {
'Frequency' : [ 0x0, ['unsigned long']],
'Power' : [ 0x4, ['unsigned long']],
'PercentFrequency' : [ 0x8, ['unsigned char']],
'IncreaseLevel' : [ 0x9, ['unsigned char']],
'DecreaseLevel' : [ 0xa, ['unsigned char']],
'Type' : [ 0xb, ['unsigned char']],
'Control' : [ 0x10, ['unsigned long long']],
'Status' : [ 0x18, ['unsigned long long']],
'TotalHitCount' : [ 0x20, ['unsigned long']],
'DesiredCount' : [ 0x24, ['unsigned long']],
} ],
'_PPM_FFH_THROTTLE_STATE_INFO' : [ 0x20, {
'EnableLogging' : [ 0x0, ['unsigned char']],
'MismatchCount' : [ 0x4, ['unsigned long']],
'Initialized' : [ 0x8, ['unsigned char']],
'LastValue' : [ 0x10, ['unsigned long long']],
'LastLogTickCount' : [ 0x18, ['_LARGE_INTEGER']],
} ],
'_SECURITY_DESCRIPTOR_RELATIVE' : [ 0x14, {
'Revision' : [ 0x0, ['unsigned char']],
'Sbz1' : [ 0x1, ['unsigned char']],
'Control' : [ 0x2, ['unsigned short']],
'Owner' : [ 0x4, ['unsigned long']],
'Group' : [ 0x8, ['unsigned long']],
'Sacl' : [ 0xc, ['unsigned long']],
'Dacl' : [ 0x10, ['unsigned long']],
} ],
'_CLIENT_ID64' : [ 0x10, {
'UniqueProcess' : [ 0x0, ['unsigned long long']],
'UniqueThread' : [ 0x8, ['unsigned long long']],
} ],
'_KDPC_DATA' : [ 0x20, {
'DpcListHead' : [ 0x0, ['_LIST_ENTRY']],
'DpcLock' : [ 0x10, ['unsigned long long']],
'DpcQueueDepth' : [ 0x18, ['long']],
'DpcCount' : [ 0x1c, ['unsigned long']],
} ],
'_NAMED_PIPE_CREATE_PARAMETERS' : [ 0x28, {
'NamedPipeType' : [ 0x0, ['unsigned long']],
'ReadMode' : [ 0x4, ['unsigned long']],
'CompletionMode' : [ 0x8, ['unsigned long']],
'MaximumInstances' : [ 0xc, ['unsigned long']],
'InboundQuota' : [ 0x10, ['unsigned long']],
'OutboundQuota' : [ 0x14, ['unsigned long']],
'DefaultTimeout' : [ 0x18, ['_LARGE_INTEGER']],
'TimeoutSpecified' : [ 0x20, ['unsigned char']],
} ],
'_CM_BIG_DATA' : [ 0x8, {
'Signature' : [ 0x0, ['unsigned short']],
'Count' : [ 0x2, ['unsigned short']],
'List' : [ 0x4, ['unsigned long']],
} ],
'__unnamed_21de' : [ 0x10, {
'UserData' : [ 0x0, ['pointer64', ['void']]],
'Owner' : [ 0x8, ['pointer64', ['void']]],
} ],
'__unnamed_21e0' : [ 0x10, {
'ListHead' : [ 0x0, ['_LIST_ENTRY']],
} ],
'_RTLP_RANGE_LIST_ENTRY' : [ 0x38, {
'Start' : [ 0x0, ['unsigned long long']],
'End' : [ 0x8, ['unsigned long long']],
'Allocated' : [ 0x10, ['__unnamed_21de']],
'Merged' : [ 0x10, ['__unnamed_21e0']],
'Attributes' : [ 0x20, ['unsigned char']],
'PublicFlags' : [ 0x21, ['unsigned char']],
'PrivateFlags' : [ 0x22, ['unsigned short']],
'ListEntry' : [ 0x28, ['_LIST_ENTRY']],
} ],
'_ALPC_COMPLETION_PACKET_LOOKASIDE_ENTRY' : [ 0x18, {
'ListEntry' : [ 0x0, ['_SINGLE_LIST_ENTRY']],
'Packet' : [ 0x8, ['pointer64', ['_IO_MINI_COMPLETION_PACKET_USER']]],
'Lookaside' : [ 0x10, ['pointer64', ['_ALPC_COMPLETION_PACKET_LOOKASIDE']]],
} ],
'__unnamed_21e8' : [ 0x2, {
'AsUSHORT' : [ 0x0, ['unsigned short']],
'AllowScaling' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned short')]],
'Disabled' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned short')]],
'Reserved' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 16, native_type='unsigned short')]],
} ],
'PROCESSOR_IDLESTATE_POLICY' : [ 0x20, {
'Revision' : [ 0x0, ['unsigned short']],
'Flags' : [ 0x2, ['__unnamed_21e8']],
'PolicyCount' : [ 0x4, ['unsigned long']],
'Policy' : [ 0x8, ['array', 3, ['PROCESSOR_IDLESTATE_INFO']]],
} ],
'_ACTIVATION_CONTEXT_STACK' : [ 0x28, {
'ActiveFrame' : [ 0x0, ['pointer64', ['_RTL_ACTIVATION_CONTEXT_STACK_FRAME']]],
'FrameListCache' : [ 0x8, ['_LIST_ENTRY']],
'Flags' : [ 0x18, ['unsigned long']],
'NextCookieSequenceNumber' : [ 0x1c, ['unsigned long']],
'StackId' : [ 0x20, ['unsigned long']],
} ],
'_MSUBSECTION' : [ 0x68, {
'ControlArea' : [ 0x0, ['pointer64', ['_CONTROL_AREA']]],
'SubsectionBase' : [ 0x8, ['pointer64', ['_MMPTE']]],
'NextSubsection' : [ 0x10, ['pointer64', ['_SUBSECTION']]],
'NextMappedSubsection' : [ 0x10, ['pointer64', ['_MSUBSECTION']]],
'PtesInSubsection' : [ 0x18, ['unsigned long']],
'UnusedPtes' : [ 0x20, ['unsigned long']],
'GlobalPerSessionHead' : [ 0x20, ['pointer64', ['_MM_AVL_TABLE']]],
'u' : [ 0x28, ['__unnamed_1f31']],
'StartingSector' : [ 0x2c, ['unsigned long']],
'NumberOfFullSectors' : [ 0x30, ['unsigned long']],
'u1' : [ 0x38, ['__unnamed_1fd3']],
'LeftChild' : [ 0x40, ['pointer64', ['_MMSUBSECTION_NODE']]],
'RightChild' : [ 0x48, ['pointer64', ['_MMSUBSECTION_NODE']]],
'DereferenceList' : [ 0x50, ['_LIST_ENTRY']],
'NumberOfMappedViews' : [ 0x60, ['unsigned long long']],
} ],
'_RTL_DRIVE_LETTER_CURDIR' : [ 0x18, {
'Flags' : [ 0x0, ['unsigned short']],
'Length' : [ 0x2, ['unsigned short']],
'TimeStamp' : [ 0x4, ['unsigned long']],
'DosPath' : [ 0x8, ['_STRING']],
} ],
'_VIRTUAL_EFI_RUNTIME_SERVICES' : [ 0x70, {
'GetTime' : [ 0x0, ['unsigned long long']],
'SetTime' : [ 0x8, ['unsigned long long']],
'GetWakeupTime' : [ 0x10, ['unsigned long long']],
'SetWakeupTime' : [ 0x18, ['unsigned long long']],
'SetVirtualAddressMap' : [ 0x20, ['unsigned long long']],
'ConvertPointer' : [ 0x28, ['unsigned long long']],
'GetVariable' : [ 0x30, ['unsigned long long']],
'GetNextVariableName' : [ 0x38, ['unsigned long long']],
'SetVariable' : [ 0x40, ['unsigned long long']],
'GetNextHighMonotonicCount' : [ 0x48, ['unsigned long long']],
'ResetSystem' : [ 0x50, ['unsigned long long']],
'UpdateCapsule' : [ 0x58, ['unsigned long long']],
'QueryCapsuleCapabilities' : [ 0x60, ['unsigned long long']],
'QueryVariableInfo' : [ 0x68, ['unsigned long long']],
} ],
'_MI_SPECIAL_POOL_PTE_LIST' : [ 0x10, {
'FreePteHead' : [ 0x0, ['_MMPTE']],
'FreePteTail' : [ 0x8, ['_MMPTE']],
} ],
'SYSTEM_POWER_CAPABILITIES' : [ 0x4c, {
'PowerButtonPresent' : [ 0x0, ['unsigned char']],
'SleepButtonPresent' : [ 0x1, ['unsigned char']],
'LidPresent' : [ 0x2, ['unsigned char']],
'SystemS1' : [ 0x3, ['unsigned char']],
'SystemS2' : [ 0x4, ['unsigned char']],
'SystemS3' : [ 0x5, ['unsigned char']],
'SystemS4' : [ 0x6, ['unsigned char']],
'SystemS5' : [ 0x7, ['unsigned char']],
'HiberFilePresent' : [ 0x8, ['unsigned char']],
'FullWake' : [ 0x9, ['unsigned char']],
'VideoDimPresent' : [ 0xa, ['unsigned char']],
'ApmPresent' : [ 0xb, ['unsigned char']],
'UpsPresent' : [ 0xc, ['unsigned char']],
'ThermalControl' : [ 0xd, ['unsigned char']],
'ProcessorThrottle' : [ 0xe, ['unsigned char']],
'ProcessorMinThrottle' : [ 0xf, ['unsigned char']],
'ProcessorMaxThrottle' : [ 0x10, ['unsigned char']],
'FastSystemS4' : [ 0x11, ['unsigned char']],
'spare2' : [ 0x12, ['array', 3, ['unsigned char']]],
'DiskSpinDown' : [ 0x15, ['unsigned char']],
'spare3' : [ 0x16, ['array', 8, ['unsigned char']]],
'SystemBatteriesPresent' : [ 0x1e, ['unsigned char']],
'BatteriesAreShortTerm' : [ 0x1f, ['unsigned char']],
'BatteryScale' : [ 0x20, ['array', 3, ['BATTERY_REPORTING_SCALE']]],
'AcOnLineWake' : [ 0x38, ['Enumeration', dict(target = 'long', choices = {0: 'PowerSystemUnspecified', 1: 'PowerSystemWorking', 2: 'PowerSystemSleeping1', 3: 'PowerSystemSleeping2', 4: 'PowerSystemSleeping3', 5: 'PowerSystemHibernate', 6: 'PowerSystemShutdown', 7: 'PowerSystemMaximum'})]],
'SoftLidWake' : [ 0x3c, ['Enumeration', dict(target = 'long', choices = {0: 'PowerSystemUnspecified', 1: 'PowerSystemWorking', 2: 'PowerSystemSleeping1', 3: 'PowerSystemSleeping2', 4: 'PowerSystemSleeping3', 5: 'PowerSystemHibernate', 6: 'PowerSystemShutdown', 7: 'PowerSystemMaximum'})]],
'RtcWake' : [ 0x40, ['Enumeration', dict(target = 'long', choices = {0: 'PowerSystemUnspecified', 1: 'PowerSystemWorking', 2: 'PowerSystemSleeping1', 3: 'PowerSystemSleeping2', 4: 'PowerSystemSleeping3', 5: 'PowerSystemHibernate', 6: 'PowerSystemShutdown', 7: 'PowerSystemMaximum'})]],
'MinDeviceWakeState' : [ 0x44, ['Enumeration', dict(target = 'long', choices = {0: 'PowerSystemUnspecified', 1: 'PowerSystemWorking', 2: 'PowerSystemSleeping1', 3: 'PowerSystemSleeping2', 4: 'PowerSystemSleeping3', 5: 'PowerSystemHibernate', 6: 'PowerSystemShutdown', 7: 'PowerSystemMaximum'})]],
'DefaultLowLatencyWake' : [ 0x48, ['Enumeration', dict(target = 'long', choices = {0: 'PowerSystemUnspecified', 1: 'PowerSystemWorking', 2: 'PowerSystemSleeping1', 3: 'PowerSystemSleeping2', 4: 'PowerSystemSleeping3', 5: 'PowerSystemHibernate', 6: 'PowerSystemShutdown', 7: 'PowerSystemMaximum'})]],
} ],
'__unnamed_21fe' : [ 0x8, {
'ImageCommitment' : [ 0x0, ['unsigned long long']],
'CreatingProcess' : [ 0x0, ['pointer64', ['_EPROCESS']]],
} ],
'__unnamed_2202' : [ 0x8, {
'ImageInformation' : [ 0x0, ['pointer64', ['_MI_SECTION_IMAGE_INFORMATION']]],
'FirstMappedVa' : [ 0x0, ['pointer64', ['void']]],
} ],
'_SEGMENT' : [ 0x50, {
'ControlArea' : [ 0x0, ['pointer64', ['_CONTROL_AREA']]],
'TotalNumberOfPtes' : [ 0x8, ['unsigned long']],
'SegmentFlags' : [ 0xc, ['_SEGMENT_FLAGS']],
'NumberOfCommittedPages' : [ 0x10, ['unsigned long long']],
'SizeOfSegment' : [ 0x18, ['unsigned long long']],
'ExtendInfo' : [ 0x20, ['pointer64', ['_MMEXTEND_INFO']]],
'BasedAddress' : [ 0x20, ['pointer64', ['void']]],
'SegmentLock' : [ 0x28, ['_EX_PUSH_LOCK']],
'u1' : [ 0x30, ['__unnamed_21fe']],
'u2' : [ 0x38, ['__unnamed_2202']],
'PrototypePte' : [ 0x40, ['pointer64', ['_MMPTE']]],
'ThePtes' : [ 0x48, ['array', 1, ['_MMPTE']]],
} ],
'_DIAGNOSTIC_CONTEXT' : [ 0x20, {
'CallerType' : [ 0x0, ['Enumeration', dict(target = 'long', choices = {0: 'KernelRequester', 1: 'UserProcessRequester', 2: 'UserSharedServiceRequester'})]],
'Process' : [ 0x8, ['pointer64', ['_EPROCESS']]],
'ServiceTag' : [ 0x10, ['unsigned long']],
'DeviceObject' : [ 0x8, ['pointer64', ['_DEVICE_OBJECT']]],
'ReasonSize' : [ 0x18, ['unsigned long long']],
} ],
'__unnamed_220b' : [ 0x4, {
'MissedEtwRegistration' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'Spare' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 32, native_type='unsigned long')]],
} ],
'__unnamed_220d' : [ 0x4, {
'Flags' : [ 0x0, ['__unnamed_220b']],
'Whole' : [ 0x0, ['unsigned long']],
} ],
'_VF_TARGET_VERIFIED_DRIVER_DATA' : [ 0x100, {
'SuspectDriverEntry' : [ 0x0, ['pointer64', ['_VF_SUSPECT_DRIVER_ENTRY']]],
'WMICallback' : [ 0x8, ['pointer64', ['void']]],
'EtwHandlesListHead' : [ 0x10, ['_LIST_ENTRY']],
'u1' : [ 0x20, ['__unnamed_220d']],
'Signature' : [ 0x28, ['unsigned long long']],
'PoolPageHeaders' : [ 0x30, ['_SLIST_HEADER']],
'PoolTrackers' : [ 0x40, ['_SLIST_HEADER']],
'CurrentPagedPoolAllocations' : [ 0x50, ['unsigned long']],
'CurrentNonPagedPoolAllocations' : [ 0x54, ['unsigned long']],
'PeakPagedPoolAllocations' : [ 0x58, ['unsigned long']],
'PeakNonPagedPoolAllocations' : [ 0x5c, ['unsigned long']],
'PagedBytes' : [ 0x60, ['unsigned long long']],
'NonPagedBytes' : [ 0x68, ['unsigned long long']],
'PeakPagedBytes' : [ 0x70, ['unsigned long long']],
'PeakNonPagedBytes' : [ 0x78, ['unsigned long long']],
'RaiseIrqls' : [ 0x80, ['unsigned long']],
'AcquireSpinLocks' : [ 0x84, ['unsigned long']],
'SynchronizeExecutions' : [ 0x88, ['unsigned long']],
'AllocationsWithNoTag' : [ 0x8c, ['unsigned long']],
'AllocationsFailed' : [ 0x90, ['unsigned long']],
'AllocationsFailedDeliberately' : [ 0x94, ['unsigned long']],
'LockedBytes' : [ 0x98, ['unsigned long long']],
'PeakLockedBytes' : [ 0xa0, ['unsigned long long']],
'MappedLockedBytes' : [ 0xa8, ['unsigned long long']],
'PeakMappedLockedBytes' : [ 0xb0, ['unsigned long long']],
'MappedIoSpaceBytes' : [ 0xb8, ['unsigned long long']],
'PeakMappedIoSpaceBytes' : [ 0xc0, ['unsigned long long']],
'PagesForMdlBytes' : [ 0xc8, ['unsigned long long']],
'PeakPagesForMdlBytes' : [ 0xd0, ['unsigned long long']],
'ContiguousMemoryBytes' : [ 0xd8, ['unsigned long long']],
'PeakContiguousMemoryBytes' : [ 0xe0, ['unsigned long long']],
'ContiguousMemoryListHead' : [ 0xe8, ['_LIST_ENTRY']],
} ],
'_PCAT_FIRMWARE_INFORMATION' : [ 0x4, {
'PlaceHolder' : [ 0x0, ['unsigned long']],
} ],
'_PRIVATE_CACHE_MAP' : [ 0x68, {
'NodeTypeCode' : [ 0x0, ['short']],
'Flags' : [ 0x0, ['_PRIVATE_CACHE_MAP_FLAGS']],
'UlongFlags' : [ 0x0, ['unsigned long']],
'ReadAheadMask' : [ 0x4, ['unsigned long']],
'FileObject' : [ 0x8, ['pointer64', ['_FILE_OBJECT']]],
'FileOffset1' : [ 0x10, ['_LARGE_INTEGER']],
'BeyondLastByte1' : [ 0x18, ['_LARGE_INTEGER']],
'FileOffset2' : [ 0x20, ['_LARGE_INTEGER']],
'BeyondLastByte2' : [ 0x28, ['_LARGE_INTEGER']],
'SequentialReadCount' : [ 0x30, ['unsigned long']],
'ReadAheadLength' : [ 0x34, ['unsigned long']],
'ReadAheadOffset' : [ 0x38, ['_LARGE_INTEGER']],
'ReadAheadBeyondLastByte' : [ 0x40, ['_LARGE_INTEGER']],
'ReadAheadSpinLock' : [ 0x48, ['unsigned long long']],
'PrivateLinks' : [ 0x50, ['_LIST_ENTRY']],
'ReadAheadWorkItem' : [ 0x60, ['pointer64', ['void']]],
} ],
'_CM_KEY_NODE' : [ 0x50, {
'Signature' : [ 0x0, ['unsigned short']],
'Flags' : [ 0x2, ['unsigned short']],
'LastWriteTime' : [ 0x4, ['_LARGE_INTEGER']],
'Spare' : [ 0xc, ['unsigned long']],
'Parent' : [ 0x10, ['unsigned long']],
'SubKeyCounts' : [ 0x14, ['array', 2, ['unsigned long']]],
'SubKeyLists' : [ 0x1c, ['array', 2, ['unsigned long']]],
'ValueList' : [ 0x24, ['_CHILD_LIST']],
'ChildHiveReference' : [ 0x1c, ['_CM_KEY_REFERENCE']],
'Security' : [ 0x2c, ['unsigned long']],
'Class' : [ 0x30, ['unsigned long']],
'MaxNameLen' : [ 0x34, ['BitField', dict(start_bit = 0, end_bit = 16, native_type='unsigned long')]],
'UserFlags' : [ 0x34, ['BitField', dict(start_bit = 16, end_bit = 20, native_type='unsigned long')]],
'VirtControlFlags' : [ 0x34, ['BitField', dict(start_bit = 20, end_bit = 24, native_type='unsigned long')]],
'Debug' : [ 0x34, ['BitField', dict(start_bit = 24, end_bit = 32, native_type='unsigned long')]],
'MaxClassLen' : [ 0x38, ['unsigned long']],
'MaxValueNameLen' : [ 0x3c, ['unsigned long']],
'MaxValueDataLen' : [ 0x40, ['unsigned long']],
'WorkVar' : [ 0x44, ['unsigned long']],
'NameLength' : [ 0x48, ['unsigned short']],
'ClassLength' : [ 0x4a, ['unsigned short']],
'Name' : [ 0x4c, ['array', 1, ['wchar']]],
} ],
'_TPM_BOOT_ENTROPY_LDR_RESULT' : [ 0x48, {
'Policy' : [ 0x0, ['unsigned long long']],
'ResultCode' : [ 0x8, ['Enumeration', dict(target = 'long', choices = {0: 'TpmBootEntropyStructureUninitialized', 1: 'TpmBootEntropyDisabledByPolicy', 2: 'TpmBootEntropyNoTpmFound', 3: 'TpmBootEntropyTpmError', 4: 'TpmBootEntropySuccess'})]],
'ResultStatus' : [ 0xc, ['long']],
'Time' : [ 0x10, ['unsigned long long']],
'EntropyLength' : [ 0x18, ['unsigned long']],
'EntropyData' : [ 0x1c, ['array', 40, ['unsigned char']]],
} ],
'_RTL_HANDLE_TABLE' : [ 0x30, {
'MaximumNumberOfHandles' : [ 0x0, ['unsigned long']],
'SizeOfHandleTableEntry' : [ 0x4, ['unsigned long']],
'Reserved' : [ 0x8, ['array', 2, ['unsigned long']]],
'FreeHandles' : [ 0x10, ['pointer64', ['_RTL_HANDLE_TABLE_ENTRY']]],
'CommittedHandles' : [ 0x18, ['pointer64', ['_RTL_HANDLE_TABLE_ENTRY']]],
'UnCommittedHandles' : [ 0x20, ['pointer64', ['_RTL_HANDLE_TABLE_ENTRY']]],
'MaxReservedHandles' : [ 0x28, ['pointer64', ['_RTL_HANDLE_TABLE_ENTRY']]],
} ],
'_PTE_TRACKER' : [ 0x58, {
'ListEntry' : [ 0x0, ['_LIST_ENTRY']],
'Mdl' : [ 0x10, ['pointer64', ['_MDL']]],
'Count' : [ 0x18, ['unsigned long long']],
'SystemVa' : [ 0x20, ['pointer64', ['void']]],
'StartVa' : [ 0x28, ['pointer64', ['void']]],
'Offset' : [ 0x30, ['unsigned long']],
'Length' : [ 0x34, ['unsigned long']],
'Page' : [ 0x38, ['unsigned long long']],
'IoMapping' : [ 0x40, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'Matched' : [ 0x40, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'CacheAttribute' : [ 0x40, ['BitField', dict(start_bit = 2, end_bit = 4, native_type='unsigned long')]],
'Spare' : [ 0x40, ['BitField', dict(start_bit = 4, end_bit = 32, native_type='unsigned long')]],
'CallingAddress' : [ 0x48, ['pointer64', ['void']]],
'CallersCaller' : [ 0x50, ['pointer64', ['void']]],
} ],
'_KTHREAD_COUNTERS' : [ 0x1a8, {
'WaitReasonBitMap' : [ 0x0, ['unsigned long long']],
'UserData' : [ 0x8, ['pointer64', ['_THREAD_PERFORMANCE_DATA']]],
'Flags' : [ 0x10, ['unsigned long']],
'ContextSwitches' : [ 0x14, ['unsigned long']],
'CycleTimeBias' : [ 0x18, ['unsigned long long']],
'HardwareCounters' : [ 0x20, ['unsigned long long']],
'HwCounter' : [ 0x28, ['array', 16, ['_COUNTER_READING']]],
} ],
'_SHARED_CACHE_MAP_LIST_CURSOR' : [ 0x18, {
'SharedCacheMapLinks' : [ 0x0, ['_LIST_ENTRY']],
'Flags' : [ 0x10, ['unsigned long']],
} ],
'_DBGKD_GET_VERSION64' : [ 0x28, {
'MajorVersion' : [ 0x0, ['unsigned short']],
'MinorVersion' : [ 0x2, ['unsigned short']],
'ProtocolVersion' : [ 0x4, ['unsigned char']],
'KdSecondaryVersion' : [ 0x5, ['unsigned char']],
'Flags' : [ 0x6, ['unsigned short']],
'MachineType' : [ 0x8, ['unsigned short']],
'MaxPacketType' : [ 0xa, ['unsigned char']],
'MaxStateChange' : [ 0xb, ['unsigned char']],
'MaxManipulate' : [ 0xc, ['unsigned char']],
'Simulation' : [ 0xd, ['unsigned char']],
'Unused' : [ 0xe, ['array', 1, ['unsigned short']]],
'KernBase' : [ 0x10, ['unsigned long long']],
'PsLoadedModuleList' : [ 0x18, ['unsigned long long']],
'DebuggerDataList' : [ 0x20, ['unsigned long long']],
} ],
'_STRING32' : [ 0x8, {
'Length' : [ 0x0, ['unsigned short']],
'MaximumLength' : [ 0x2, ['unsigned short']],
'Buffer' : [ 0x4, ['unsigned long']],
} ],
'_HMAP_ENTRY' : [ 0x20, {
'BlockAddress' : [ 0x0, ['unsigned long long']],
'BinAddress' : [ 0x8, ['unsigned long long']],
'CmView' : [ 0x10, ['pointer64', ['_CM_VIEW_OF_FILE']]],
'MemAlloc' : [ 0x18, ['unsigned long']],
} ],
'_RTL_ATOM_TABLE_ENTRY' : [ 0x18, {
'HashLink' : [ 0x0, ['pointer64', ['_RTL_ATOM_TABLE_ENTRY']]],
'HandleIndex' : [ 0x8, ['unsigned short']],
'Atom' : [ 0xa, ['unsigned short']],
'ReferenceCount' : [ 0xc, ['unsigned short']],
'Flags' : [ 0xe, ['unsigned char']],
'NameLength' : [ 0xf, ['unsigned char']],
'Name' : [ 0x10, ['array', 1, ['wchar']]],
} ],
'_TXN_PARAMETER_BLOCK' : [ 0x10, {
'Length' : [ 0x0, ['unsigned short']],
'TxFsContext' : [ 0x2, ['unsigned short']],
'TransactionObject' : [ 0x8, ['pointer64', ['void']]],
} ],
'_LOADER_PERFORMANCE_DATA' : [ 0x10, {
'StartTime' : [ 0x0, ['unsigned long long']],
'EndTime' : [ 0x8, ['unsigned long long']],
} ],
'_PNP_DEVICE_ACTION_ENTRY' : [ 0x38, {
'ListEntry' : [ 0x0, ['_LIST_ENTRY']],
'DeviceObject' : [ 0x10, ['pointer64', ['_DEVICE_OBJECT']]],
'RequestType' : [ 0x18, ['Enumeration', dict(target = 'long', choices = {0: 'AssignResources', 1: 'ClearDeviceProblem', 2: 'ClearProblem', 3: 'ClearEjectProblem', 4: 'HaltDevice', 5: 'QueryPowerRelations', 6: 'Rebalance', 7: 'ReenumerateBootDevices', 8: 'ReenumerateDeviceOnly', 9: 'ReenumerateDeviceTree', 10: 'ReenumerateRootDevices', 11: 'RequeryDeviceState', 12: 'ResetDevice', 13: 'ResourceRequirementsChanged', 14: 'RestartEnumeration', 15: 'SetDeviceProblem', 16: 'StartDevice', 17: 'StartSystemDevicesPass0', 18: 'StartSystemDevicesPass1'})]],
'ReorderingBarrier' : [ 0x1c, ['unsigned char']],
'RequestArgument' : [ 0x20, ['unsigned long long']],
'CompletionEvent' : [ 0x28, ['pointer64', ['_KEVENT']]],
'CompletionStatus' : [ 0x30, ['pointer64', ['long']]],
} ],
'_COUNTER_READING' : [ 0x18, {
'Type' : [ 0x0, ['Enumeration', dict(target = 'long', choices = {0: 'PMCCounter', 1: 'MaxHardwareCounterType'})]],
'Index' : [ 0x4, ['unsigned long']],
'Start' : [ 0x8, ['unsigned long long']],
'Total' : [ 0x10, ['unsigned long long']],
} ],
'_MMSESSION' : [ 0x58, {
'SystemSpaceViewLock' : [ 0x0, ['_KGUARDED_MUTEX']],
'SystemSpaceViewLockPointer' : [ 0x38, ['pointer64', ['_KGUARDED_MUTEX']]],
'SystemSpaceViewTable' : [ 0x40, ['pointer64', ['_MMVIEW']]],
'SystemSpaceHashSize' : [ 0x48, ['unsigned long']],
'SystemSpaceHashEntries' : [ 0x4c, ['unsigned long']],
'SystemSpaceHashKey' : [ 0x50, ['unsigned long']],
'BitmapFailures' : [ 0x54, ['unsigned long']],
} ],
'_ETW_REG_ENTRY' : [ 0x50, {
'RegList' : [ 0x0, ['_LIST_ENTRY']],
'GuidEntry' : [ 0x10, ['pointer64', ['_ETW_GUID_ENTRY']]],
'Index' : [ 0x18, ['unsigned short']],
'Flags' : [ 0x1a, ['unsigned short']],
'EnableMask' : [ 0x1c, ['unsigned char']],
'SessionId' : [ 0x20, ['unsigned long']],
'ReplyQueue' : [ 0x20, ['pointer64', ['_ETW_REPLY_QUEUE']]],
'ReplySlot' : [ 0x20, ['array', 4, ['pointer64', ['_ETW_REG_ENTRY']]]],
'Process' : [ 0x40, ['pointer64', ['_EPROCESS']]],
'Callback' : [ 0x40, ['pointer64', ['void']]],
'CallbackContext' : [ 0x48, ['pointer64', ['void']]],
} ],
'_LPCP_PORT_OBJECT' : [ 0x100, {
'ConnectionPort' : [ 0x0, ['pointer64', ['_LPCP_PORT_OBJECT']]],
'ConnectedPort' : [ 0x8, ['pointer64', ['_LPCP_PORT_OBJECT']]],
'MsgQueue' : [ 0x10, ['_LPCP_PORT_QUEUE']],
'Creator' : [ 0x30, ['_CLIENT_ID']],
'ClientSectionBase' : [ 0x40, ['pointer64', ['void']]],
'ServerSectionBase' : [ 0x48, ['pointer64', ['void']]],
'PortContext' : [ 0x50, ['pointer64', ['void']]],
'ClientThread' : [ 0x58, ['pointer64', ['_ETHREAD']]],
'SecurityQos' : [ 0x60, ['_SECURITY_QUALITY_OF_SERVICE']],
'StaticSecurity' : [ 0x70, ['_SECURITY_CLIENT_CONTEXT']],
'LpcReplyChainHead' : [ 0xb8, ['_LIST_ENTRY']],
'LpcDataInfoChainHead' : [ 0xc8, ['_LIST_ENTRY']],
'ServerProcess' : [ 0xd8, ['pointer64', ['_EPROCESS']]],
'MappingProcess' : [ 0xd8, ['pointer64', ['_EPROCESS']]],
'MaxMessageLength' : [ 0xe0, ['unsigned short']],
'MaxConnectionInfoLength' : [ 0xe2, ['unsigned short']],
'Flags' : [ 0xe4, ['unsigned long']],
'WaitEvent' : [ 0xe8, ['_KEVENT']],
} ],
'_ARBITER_LIST_ENTRY' : [ 0x60, {
'ListEntry' : [ 0x0, ['_LIST_ENTRY']],
'AlternativeCount' : [ 0x10, ['unsigned long']],
'Alternatives' : [ 0x18, ['pointer64', ['_IO_RESOURCE_DESCRIPTOR']]],
'PhysicalDeviceObject' : [ 0x20, ['pointer64', ['_DEVICE_OBJECT']]],
'RequestSource' : [ 0x28, ['Enumeration', dict(target = 'long', choices = {0: 'ArbiterRequestLegacyReported', 1: 'ArbiterRequestHalReported', 2: 'ArbiterRequestLegacyAssigned', 3: 'ArbiterRequestPnpDetected', 4: 'ArbiterRequestPnpEnumerated', -1: 'ArbiterRequestUndefined'})]],
'Flags' : [ 0x2c, ['unsigned long']],
'WorkSpace' : [ 0x30, ['long long']],
'InterfaceType' : [ 0x38, ['Enumeration', dict(target = 'long', choices = {0: 'Internal', 1: 'Isa', 2: 'Eisa', 3: 'MicroChannel', 4: 'TurboChannel', 5: 'PCIBus', 6: 'VMEBus', 7: 'NuBus', 8: 'PCMCIABus', 9: 'CBus', 10: 'MPIBus', 11: 'MPSABus', 12: 'ProcessorInternal', 13: 'InternalPowerBus', 14: 'PNPISABus', 15: 'PNPBus', 16: 'Vmcs', 17: 'MaximumInterfaceType', -1: 'InterfaceTypeUndefined'})]],
'SlotNumber' : [ 0x3c, ['unsigned long']],
'BusNumber' : [ 0x40, ['unsigned long']],
'Assignment' : [ 0x48, ['pointer64', ['_CM_PARTIAL_RESOURCE_DESCRIPTOR']]],
'SelectedAlternative' : [ 0x50, ['pointer64', ['_IO_RESOURCE_DESCRIPTOR']]],
'Result' : [ 0x58, ['Enumeration', dict(target = 'long', choices = {0: 'ArbiterResultSuccess', 1: 'ArbiterResultExternalConflict', 2: 'ArbiterResultNullRequest', -1: 'ArbiterResultUndefined'})]],
} ],
'_POP_DEVICE_SYS_STATE' : [ 0x2f8, {
'IrpMinor' : [ 0x0, ['unsigned char']],
'SystemState' : [ 0x4, ['Enumeration', dict(target = 'long', choices = {0: 'PowerSystemUnspecified', 1: 'PowerSystemWorking', 2: 'PowerSystemSleeping1', 3: 'PowerSystemSleeping2', 4: 'PowerSystemSleeping3', 5: 'PowerSystemHibernate', 6: 'PowerSystemShutdown', 7: 'PowerSystemMaximum'})]],
'SpinLock' : [ 0x8, ['unsigned long long']],
'Thread' : [ 0x10, ['pointer64', ['_KTHREAD']]],
'AbortEvent' : [ 0x18, ['pointer64', ['_KEVENT']]],
'ReadySemaphore' : [ 0x20, ['pointer64', ['_KSEMAPHORE']]],
'FinishedSemaphore' : [ 0x28, ['pointer64', ['_KSEMAPHORE']]],
'GetNewDeviceList' : [ 0x30, ['unsigned char']],
'Order' : [ 0x38, ['_PO_DEVICE_NOTIFY_ORDER']],
'Pending' : [ 0x2d0, ['_LIST_ENTRY']],
'Status' : [ 0x2e0, ['long']],
'FailedDevice' : [ 0x2e8, ['pointer64', ['_DEVICE_OBJECT']]],
'Waking' : [ 0x2f0, ['unsigned char']],
'Cancelled' : [ 0x2f1, ['unsigned char']],
'IgnoreErrors' : [ 0x2f2, ['unsigned char']],
'IgnoreNotImplemented' : [ 0x2f3, ['unsigned char']],
'TimeRefreshLockAcquired' : [ 0x2f4, ['unsigned char']],
} ],
'_SEGMENT_FLAGS' : [ 0x4, {
'TotalNumberOfPtes4132' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 10, native_type='unsigned long')]],
'ExtraSharedWowSubsections' : [ 0x0, ['BitField', dict(start_bit = 10, end_bit = 11, native_type='unsigned long')]],
'LargePages' : [ 0x0, ['BitField', dict(start_bit = 11, end_bit = 12, native_type='unsigned long')]],
'WatchProto' : [ 0x0, ['BitField', dict(start_bit = 12, end_bit = 13, native_type='unsigned long')]],
'DebugSymbolsLoaded' : [ 0x0, ['BitField', dict(start_bit = 13, end_bit = 14, native_type='unsigned long')]],
'WriteCombined' : [ 0x0, ['BitField', dict(start_bit = 14, end_bit = 15, native_type='unsigned long')]],
'NoCache' : [ 0x0, ['BitField', dict(start_bit = 15, end_bit = 16, native_type='unsigned long')]],
'FloppyMedia' : [ 0x0, ['BitField', dict(start_bit = 16, end_bit = 17, native_type='unsigned long')]],
'DefaultProtectionMask' : [ 0x0, ['BitField', dict(start_bit = 17, end_bit = 22, native_type='unsigned long')]],
'Binary32' : [ 0x0, ['BitField', dict(start_bit = 22, end_bit = 23, native_type='unsigned long')]],
'ContainsDebug' : [ 0x0, ['BitField', dict(start_bit = 23, end_bit = 24, native_type='unsigned long')]],
'Spare' : [ 0x0, ['BitField', dict(start_bit = 24, end_bit = 32, native_type='unsigned long')]],
} ],
'_VF_KE_CRITICAL_REGION_TRACE' : [ 0x40, {
'Thread' : [ 0x0, ['pointer64', ['_ETHREAD']]],
'StackTrace' : [ 0x8, ['array', 7, ['pointer64', ['void']]]],
} ],
'_DIAGNOSTIC_BUFFER' : [ 0x28, {
'Size' : [ 0x0, ['unsigned long long']],
'CallerType' : [ 0x8, ['Enumeration', dict(target = 'long', choices = {0: 'KernelRequester', 1: 'UserProcessRequester', 2: 'UserSharedServiceRequester'})]],
'ProcessImageNameOffset' : [ 0x10, ['unsigned long long']],
'ProcessId' : [ 0x18, ['unsigned long']],
'ServiceTag' : [ 0x1c, ['unsigned long']],
'DeviceDescriptionOffset' : [ 0x10, ['unsigned long long']],
'DevicePathOffset' : [ 0x18, ['unsigned long long']],
'ReasonOffset' : [ 0x20, ['unsigned long long']],
} ],
'_EX_WORK_QUEUE' : [ 0x58, {
'WorkerQueue' : [ 0x0, ['_KQUEUE']],
'DynamicThreadCount' : [ 0x40, ['unsigned long']],
'WorkItemsProcessed' : [ 0x44, ['unsigned long']],
'WorkItemsProcessedLastPass' : [ 0x48, ['unsigned long']],
'QueueDepthLastPass' : [ 0x4c, ['unsigned long']],
'Info' : [ 0x50, ['EX_QUEUE_WORKER_INFO']],
} ],
'_CLIENT_ID32' : [ 0x8, {
'UniqueProcess' : [ 0x0, ['unsigned long']],
'UniqueThread' : [ 0x4, ['unsigned long']],
} ],
'_TEB32' : [ 0xfe4, {
'NtTib' : [ 0x0, ['_NT_TIB32']],
'EnvironmentPointer' : [ 0x1c, ['unsigned long']],
'ClientId' : [ 0x20, ['_CLIENT_ID32']],
'ActiveRpcHandle' : [ 0x28, ['unsigned long']],
'ThreadLocalStoragePointer' : [ 0x2c, ['unsigned long']],
'ProcessEnvironmentBlock' : [ 0x30, ['unsigned long']],
'LastErrorValue' : [ 0x34, ['unsigned long']],
'CountOfOwnedCriticalSections' : [ 0x38, ['unsigned long']],
'CsrClientThread' : [ 0x3c, ['unsigned long']],
'Win32ThreadInfo' : [ 0x40, ['unsigned long']],
'User32Reserved' : [ 0x44, ['array', 26, ['unsigned long']]],
'UserReserved' : [ 0xac, ['array', 5, ['unsigned long']]],
'WOW32Reserved' : [ 0xc0, ['unsigned long']],
'CurrentLocale' : [ 0xc4, ['unsigned long']],
'FpSoftwareStatusRegister' : [ 0xc8, ['unsigned long']],
'SystemReserved1' : [ 0xcc, ['array', 54, ['unsigned long']]],
'ExceptionCode' : [ 0x1a4, ['long']],
'ActivationContextStackPointer' : [ 0x1a8, ['unsigned long']],
'SpareBytes' : [ 0x1ac, ['array', 36, ['unsigned char']]],
'TxFsContext' : [ 0x1d0, ['unsigned long']],
'GdiTebBatch' : [ 0x1d4, ['_GDI_TEB_BATCH32']],
'RealClientId' : [ 0x6b4, ['_CLIENT_ID32']],
'GdiCachedProcessHandle' : [ 0x6bc, ['unsigned long']],
'GdiClientPID' : [ 0x6c0, ['unsigned long']],
'GdiClientTID' : [ 0x6c4, ['unsigned long']],
'GdiThreadLocalInfo' : [ 0x6c8, ['unsigned long']],
'Win32ClientInfo' : [ 0x6cc, ['array', 62, ['unsigned long']]],
'glDispatchTable' : [ 0x7c4, ['array', 233, ['unsigned long']]],
'glReserved1' : [ 0xb68, ['array', 29, ['unsigned long']]],
'glReserved2' : [ 0xbdc, ['unsigned long']],
'glSectionInfo' : [ 0xbe0, ['unsigned long']],
'glSection' : [ 0xbe4, ['unsigned long']],
'glTable' : [ 0xbe8, ['unsigned long']],
'glCurrentRC' : [ 0xbec, ['unsigned long']],
'glContext' : [ 0xbf0, ['unsigned long']],
'LastStatusValue' : [ 0xbf4, ['unsigned long']],
'StaticUnicodeString' : [ 0xbf8, ['_STRING32']],
'StaticUnicodeBuffer' : [ 0xc00, ['array', 261, ['wchar']]],
'DeallocationStack' : [ 0xe0c, ['unsigned long']],
'TlsSlots' : [ 0xe10, ['array', 64, ['unsigned long']]],
'TlsLinks' : [ 0xf10, ['LIST_ENTRY32']],
'Vdm' : [ 0xf18, ['unsigned long']],
'ReservedForNtRpc' : [ 0xf1c, ['unsigned long']],
'DbgSsReserved' : [ 0xf20, ['array', 2, ['unsigned long']]],
'HardErrorMode' : [ 0xf28, ['unsigned long']],
'Instrumentation' : [ 0xf2c, ['array', 9, ['unsigned long']]],
'ActivityId' : [ 0xf50, ['_GUID']],
'SubProcessTag' : [ 0xf60, ['unsigned long']],
'EtwLocalData' : [ 0xf64, ['unsigned long']],
'EtwTraceData' : [ 0xf68, ['unsigned long']],
'WinSockData' : [ 0xf6c, ['unsigned long']],
'GdiBatchCount' : [ 0xf70, ['unsigned long']],
'CurrentIdealProcessor' : [ 0xf74, ['_PROCESSOR_NUMBER']],
'IdealProcessorValue' : [ 0xf74, ['unsigned long']],
'ReservedPad0' : [ 0xf74, ['unsigned char']],
'ReservedPad1' : [ 0xf75, ['unsigned char']],
'ReservedPad2' : [ 0xf76, ['unsigned char']],
'IdealProcessor' : [ 0xf77, ['unsigned char']],
'GuaranteedStackBytes' : [ 0xf78, ['unsigned long']],
'ReservedForPerf' : [ 0xf7c, ['unsigned long']],
'ReservedForOle' : [ 0xf80, ['unsigned long']],
'WaitingOnLoaderLock' : [ 0xf84, ['unsigned long']],
'SavedPriorityState' : [ 0xf88, ['unsigned long']],
'SoftPatchPtr1' : [ 0xf8c, ['unsigned long']],
'ThreadPoolData' : [ 0xf90, ['unsigned long']],
'TlsExpansionSlots' : [ 0xf94, ['unsigned long']],
'MuiGeneration' : [ 0xf98, ['unsigned long']],
'IsImpersonating' : [ 0xf9c, ['unsigned long']],
'NlsCache' : [ 0xfa0, ['unsigned long']],
'pShimData' : [ 0xfa4, ['unsigned long']],
'HeapVirtualAffinity' : [ 0xfa8, ['unsigned long']],
'CurrentTransactionHandle' : [ 0xfac, ['unsigned long']],
'ActiveFrame' : [ 0xfb0, ['unsigned long']],
'FlsData' : [ 0xfb4, ['unsigned long']],
'PreferredLanguages' : [ 0xfb8, ['unsigned long']],
'UserPrefLanguages' : [ 0xfbc, ['unsigned long']],
'MergedPrefLanguages' : [ 0xfc0, ['unsigned long']],
'MuiImpersonation' : [ 0xfc4, ['unsigned long']],
'CrossTebFlags' : [ 0xfc8, ['unsigned short']],
'SpareCrossTebBits' : [ 0xfc8, ['BitField', dict(start_bit = 0, end_bit = 16, native_type='unsigned short')]],
'SameTebFlags' : [ 0xfca, ['unsigned short']],
'SafeThunkCall' : [ 0xfca, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned short')]],
'InDebugPrint' : [ 0xfca, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned short')]],
'HasFiberData' : [ 0xfca, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned short')]],
'SkipThreadAttach' : [ 0xfca, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned short')]],
'WerInShipAssertCode' : [ 0xfca, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned short')]],
'RanProcessInit' : [ 0xfca, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned short')]],
'ClonedThread' : [ 0xfca, ['BitField', dict(start_bit = 6, end_bit = 7, native_type='unsigned short')]],
'SuppressDebugMsg' : [ 0xfca, ['BitField', dict(start_bit = 7, end_bit = 8, native_type='unsigned short')]],
'DisableUserStackWalk' : [ 0xfca, ['BitField', dict(start_bit = 8, end_bit = 9, native_type='unsigned short')]],
'RtlExceptionAttached' : [ 0xfca, ['BitField', dict(start_bit = 9, end_bit = 10, native_type='unsigned short')]],
'InitialThread' : [ 0xfca, ['BitField', dict(start_bit = 10, end_bit = 11, native_type='unsigned short')]],
'SpareSameTebBits' : [ 0xfca, ['BitField', dict(start_bit = 11, end_bit = 16, native_type='unsigned short')]],
'TxnScopeEnterCallback' : [ 0xfcc, ['unsigned long']],
'TxnScopeExitCallback' : [ 0xfd0, ['unsigned long']],
'TxnScopeContext' : [ 0xfd4, ['unsigned long']],
'LockCount' : [ 0xfd8, ['unsigned long']],
'SpareUlong0' : [ 0xfdc, ['unsigned long']],
'ResourceRetValue' : [ 0xfe0, ['unsigned long']],
} ],
'_CM_KEY_INDEX' : [ 0x8, {
'Signature' : [ 0x0, ['unsigned short']],
'Count' : [ 0x2, ['unsigned short']],
'List' : [ 0x4, ['array', 1, ['unsigned long']]],
} ],
'_VI_DEADLOCK_THREAD' : [ 0x38, {
'Thread' : [ 0x0, ['pointer64', ['_KTHREAD']]],
'CurrentSpinNode' : [ 0x8, ['pointer64', ['_VI_DEADLOCK_NODE']]],
'CurrentOtherNode' : [ 0x10, ['pointer64', ['_VI_DEADLOCK_NODE']]],
'ListEntry' : [ 0x18, ['_LIST_ENTRY']],
'FreeListEntry' : [ 0x18, ['_LIST_ENTRY']],
'NodeCount' : [ 0x28, ['unsigned long']],
'PagingCount' : [ 0x2c, ['unsigned long']],
'ThreadUsesEresources' : [ 0x30, ['unsigned char']],
} ],
'_PPM_IDLE_STATE' : [ 0x60, {
'DomainMembers' : [ 0x0, ['_KAFFINITY_EX']],
'IdleCheck' : [ 0x28, ['pointer64', ['void']]],
'IdleHandler' : [ 0x30, ['pointer64', ['void']]],
'HvConfig' : [ 0x38, ['unsigned long long']],
'Context' : [ 0x40, ['pointer64', ['void']]],
'Latency' : [ 0x48, ['unsigned long']],
'Power' : [ 0x4c, ['unsigned long']],
'TimeCheck' : [ 0x50, ['unsigned long']],
'StateFlags' : [ 0x54, ['unsigned long']],
'PromotePercent' : [ 0x58, ['unsigned char']],
'DemotePercent' : [ 0x59, ['unsigned char']],
'PromotePercentBase' : [ 0x5a, ['unsigned char']],
'DemotePercentBase' : [ 0x5b, ['unsigned char']],
'StateType' : [ 0x5c, ['unsigned char']],
} ],
'_KRESOURCEMANAGER' : [ 0x250, {
'NotificationAvailable' : [ 0x0, ['_KEVENT']],
'cookie' : [ 0x18, ['unsigned long']],
'State' : [ 0x1c, ['Enumeration', dict(target = 'long', choices = {0: 'KResourceManagerUninitialized', 1: 'KResourceManagerOffline', 2: 'KResourceManagerOnline'})]],
'Flags' : [ 0x20, ['unsigned long']],
'Mutex' : [ 0x28, ['_KMUTANT']],
'NamespaceLink' : [ 0x60, ['_KTMOBJECT_NAMESPACE_LINK']],
'RmId' : [ 0x88, ['_GUID']],
'NotificationQueue' : [ 0x98, ['_KQUEUE']],
'NotificationMutex' : [ 0xd8, ['_KMUTANT']],
'EnlistmentHead' : [ 0x110, ['_LIST_ENTRY']],
'EnlistmentCount' : [ 0x120, ['unsigned long']],
'NotificationRoutine' : [ 0x128, ['pointer64', ['void']]],
'Key' : [ 0x130, ['pointer64', ['void']]],
'ProtocolListHead' : [ 0x138, ['_LIST_ENTRY']],
'PendingPropReqListHead' : [ 0x148, ['_LIST_ENTRY']],
'CRMListEntry' : [ 0x158, ['_LIST_ENTRY']],
'Tm' : [ 0x168, ['pointer64', ['_KTM']]],
'Description' : [ 0x170, ['_UNICODE_STRING']],
'Enlistments' : [ 0x180, ['_KTMOBJECT_NAMESPACE']],
'CompletionBinding' : [ 0x228, ['_KRESOURCEMANAGER_COMPLETION_BINDING']],
} ],
'_GDI_TEB_BATCH64' : [ 0x4e8, {
'Offset' : [ 0x0, ['unsigned long']],
'HDC' : [ 0x8, ['unsigned long long']],
'Buffer' : [ 0x10, ['array', 310, ['unsigned long']]],
} ],
'__unnamed_2292' : [ 0x4, {
'NodeSize' : [ 0x0, ['unsigned long']],
'UseLookaside' : [ 0x0, ['unsigned long']],
} ],
'_VF_AVL_TREE' : [ 0x40, {
'Lock' : [ 0x0, ['long']],
'NodeToFree' : [ 0x8, ['pointer64', ['void']]],
'NodeRangeSize' : [ 0x10, ['unsigned long long']],
'NodeCount' : [ 0x18, ['unsigned long long']],
'Tables' : [ 0x20, ['pointer64', ['_VF_AVL_TABLE']]],
'TablesNo' : [ 0x28, ['unsigned long']],
'u1' : [ 0x2c, ['__unnamed_2292']],
} ],
'_FILE_NETWORK_OPEN_INFORMATION' : [ 0x38, {
'CreationTime' : [ 0x0, ['_LARGE_INTEGER']],
'LastAccessTime' : [ 0x8, ['_LARGE_INTEGER']],
'LastWriteTime' : [ 0x10, ['_LARGE_INTEGER']],
'ChangeTime' : [ 0x18, ['_LARGE_INTEGER']],
'AllocationSize' : [ 0x20, ['_LARGE_INTEGER']],
'EndOfFile' : [ 0x28, ['_LARGE_INTEGER']],
'FileAttributes' : [ 0x30, ['unsigned long']],
} ],
'_WHEA_MEMORY_ERROR_SECTION_VALIDBITS' : [ 0x8, {
'ErrorStatus' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long long')]],
'PhysicalAddress' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long long')]],
'PhysicalAddressMask' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long long')]],
'Node' : [ 0x0, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned long long')]],
'Card' : [ 0x0, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned long long')]],
'Module' : [ 0x0, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned long long')]],
'Bank' : [ 0x0, ['BitField', dict(start_bit = 6, end_bit = 7, native_type='unsigned long long')]],
'Device' : [ 0x0, ['BitField', dict(start_bit = 7, end_bit = 8, native_type='unsigned long long')]],
'Row' : [ 0x0, ['BitField', dict(start_bit = 8, end_bit = 9, native_type='unsigned long long')]],
'Column' : [ 0x0, ['BitField', dict(start_bit = 9, end_bit = 10, native_type='unsigned long long')]],
'BitPosition' : [ 0x0, ['BitField', dict(start_bit = 10, end_bit = 11, native_type='unsigned long long')]],
'RequesterId' : [ 0x0, ['BitField', dict(start_bit = 11, end_bit = 12, native_type='unsigned long long')]],
'ResponderId' : [ 0x0, ['BitField', dict(start_bit = 12, end_bit = 13, native_type='unsigned long long')]],
'TargetId' : [ 0x0, ['BitField', dict(start_bit = 13, end_bit = 14, native_type='unsigned long long')]],
'ErrorType' : [ 0x0, ['BitField', dict(start_bit = 14, end_bit = 15, native_type='unsigned long long')]],
'Reserved' : [ 0x0, ['BitField', dict(start_bit = 15, end_bit = 64, native_type='unsigned long long')]],
'ValidBits' : [ 0x0, ['unsigned long long']],
} ],
'_RELATION_LIST_ENTRY' : [ 0x10, {
'Count' : [ 0x0, ['unsigned long']],
'MaxCount' : [ 0x4, ['unsigned long']],
'Devices' : [ 0x8, ['array', 1, ['pointer64', ['_DEVICE_OBJECT']]]],
} ],
'_HEAP_FREE_ENTRY_EXTRA' : [ 0x4, {
'TagIndex' : [ 0x0, ['unsigned short']],
'FreeBackTraceIndex' : [ 0x2, ['unsigned short']],
} ],
'_VI_DEADLOCK_GLOBALS' : [ 0x8168, {
'TimeAcquire' : [ 0x0, ['long long']],
'TimeRelease' : [ 0x8, ['long long']],
'ResourceDatabase' : [ 0x10, ['pointer64', ['_LIST_ENTRY']]],
'ResourceDatabaseCount' : [ 0x18, ['unsigned long long']],
'ResourceAddressRange' : [ 0x20, ['array', 1023, ['_VF_ADDRESS_RANGE']]],
'ThreadDatabase' : [ 0x4010, ['pointer64', ['_LIST_ENTRY']]],
'ThreadDatabaseCount' : [ 0x4018, ['unsigned long long']],
'ThreadAddressRange' : [ 0x4020, ['array', 1023, ['_VF_ADDRESS_RANGE']]],
'AllocationFailures' : [ 0x8010, ['unsigned long']],
'NodesTrimmedBasedOnAge' : [ 0x8014, ['unsigned long']],
'NodesTrimmedBasedOnCount' : [ 0x8018, ['unsigned long']],
'NodesSearched' : [ 0x801c, ['unsigned long']],
'MaxNodesSearched' : [ 0x8020, ['unsigned long']],
'SequenceNumber' : [ 0x8024, ['unsigned long']],
'RecursionDepthLimit' : [ 0x8028, ['unsigned long']],
'SearchedNodesLimit' : [ 0x802c, ['unsigned long']],
'DepthLimitHits' : [ 0x8030, ['unsigned long']],
'SearchLimitHits' : [ 0x8034, ['unsigned long']],
'ABC_ACB_Skipped' : [ 0x8038, ['unsigned long']],
'OutOfOrderReleases' : [ 0x803c, ['unsigned long']],
'NodesReleasedOutOfOrder' : [ 0x8040, ['unsigned long']],
'TotalReleases' : [ 0x8044, ['unsigned long']],
'RootNodesDeleted' : [ 0x8048, ['unsigned long']],
'ForgetHistoryCounter' : [ 0x804c, ['unsigned long']],
'Instigator' : [ 0x8050, ['pointer64', ['void']]],
'NumberOfParticipants' : [ 0x8058, ['unsigned long']],
'Participant' : [ 0x8060, ['array', 32, ['pointer64', ['_VI_DEADLOCK_NODE']]]],
'ChildrenCountWatermark' : [ 0x8160, ['long']],
} ],
'_KTM' : [ 0x3c0, {
'cookie' : [ 0x0, ['unsigned long']],
'Mutex' : [ 0x8, ['_KMUTANT']],
'State' : [ 0x40, ['Enumeration', dict(target = 'long', choices = {0: 'KKtmUninitialized', 1: 'KKtmInitialized', 2: 'KKtmRecovering', 3: 'KKtmOnline', 4: 'KKtmRecoveryFailed', 5: 'KKtmOffline'})]],
'NamespaceLink' : [ 0x48, ['_KTMOBJECT_NAMESPACE_LINK']],
'TmIdentity' : [ 0x70, ['_GUID']],
'Flags' : [ 0x80, ['unsigned long']],
'VolatileFlags' : [ 0x84, ['unsigned long']],
'LogFileName' : [ 0x88, ['_UNICODE_STRING']],
'LogFileObject' : [ 0x98, ['pointer64', ['_FILE_OBJECT']]],
'MarshallingContext' : [ 0xa0, ['pointer64', ['void']]],
'LogManagementContext' : [ 0xa8, ['pointer64', ['void']]],
'Transactions' : [ 0xb0, ['_KTMOBJECT_NAMESPACE']],
'ResourceManagers' : [ 0x158, ['_KTMOBJECT_NAMESPACE']],
'LsnOrderedMutex' : [ 0x200, ['_KMUTANT']],
'LsnOrderedList' : [ 0x238, ['_LIST_ENTRY']],
'CommitVirtualClock' : [ 0x248, ['_LARGE_INTEGER']],
'CommitVirtualClockMutex' : [ 0x250, ['_FAST_MUTEX']],
'BaseLsn' : [ 0x288, ['_CLS_LSN']],
'CurrentReadLsn' : [ 0x290, ['_CLS_LSN']],
'LastRecoveredLsn' : [ 0x298, ['_CLS_LSN']],
'TmRmHandle' : [ 0x2a0, ['pointer64', ['void']]],
'TmRm' : [ 0x2a8, ['pointer64', ['_KRESOURCEMANAGER']]],
'LogFullNotifyEvent' : [ 0x2b0, ['_KEVENT']],
'CheckpointWorkItem' : [ 0x2c8, ['_WORK_QUEUE_ITEM']],
'CheckpointTargetLsn' : [ 0x2e8, ['_CLS_LSN']],
'LogFullCompletedWorkItem' : [ 0x2f0, ['_WORK_QUEUE_ITEM']],
'LogWriteResource' : [ 0x310, ['_ERESOURCE']],
'LogFlags' : [ 0x378, ['unsigned long']],
'LogFullStatus' : [ 0x37c, ['long']],
'RecoveryStatus' : [ 0x380, ['long']],
'LastCheckBaseLsn' : [ 0x388, ['_CLS_LSN']],
'RestartOrderedList' : [ 0x390, ['_LIST_ENTRY']],
'OfflineWorkItem' : [ 0x3a0, ['_WORK_QUEUE_ITEM']],
} ],
'_CONFIGURATION_COMPONENT' : [ 0x28, {
'Class' : [ 0x0, ['Enumeration', dict(target = 'long', choices = {0: 'SystemClass', 1: 'ProcessorClass', 2: 'CacheClass', 3: 'AdapterClass', 4: 'ControllerClass', 5: 'PeripheralClass', 6: 'MemoryClass', 7: 'MaximumClass'})]],
'Type' : [ 0x4, ['Enumeration', dict(target = 'long', choices = {0: 'ArcSystem', 1: 'CentralProcessor', 2: 'FloatingPointProcessor', 3: 'PrimaryIcache', 4: 'PrimaryDcache', 5: 'SecondaryIcache', 6: 'SecondaryDcache', 7: 'SecondaryCache', 8: 'EisaAdapter', 9: 'TcAdapter', 10: 'ScsiAdapter', 11: 'DtiAdapter', 12: 'MultiFunctionAdapter', 13: 'DiskController', 14: 'TapeController', 15: 'CdromController', 16: 'WormController', 17: 'SerialController', 18: 'NetworkController', 19: 'DisplayController', 20: 'ParallelController', 21: 'PointerController', 22: 'KeyboardController', 23: 'AudioController', 24: 'OtherController', 25: 'DiskPeripheral', 26: 'FloppyDiskPeripheral', 27: 'TapePeripheral', 28: 'ModemPeripheral', 29: 'MonitorPeripheral', 30: 'PrinterPeripheral', 31: 'PointerPeripheral', 32: 'KeyboardPeripheral', 33: 'TerminalPeripheral', 34: 'OtherPeripheral', 35: 'LinePeripheral', 36: 'NetworkPeripheral', 37: 'SystemMemory', 38: 'DockingInformation', 39: 'RealModeIrqRoutingTable', 40: 'RealModePCIEnumeration', 41: 'MaximumType'})]],
'Flags' : [ 0x8, ['_DEVICE_FLAGS']],
'Version' : [ 0xc, ['unsigned short']],
'Revision' : [ 0xe, ['unsigned short']],
'Key' : [ 0x10, ['unsigned long']],
'AffinityMask' : [ 0x14, ['unsigned long']],
'Group' : [ 0x14, ['unsigned short']],
'GroupIndex' : [ 0x16, ['unsigned short']],
'ConfigurationDataLength' : [ 0x18, ['unsigned long']],
'IdentifierLength' : [ 0x1c, ['unsigned long']],
'Identifier' : [ 0x20, ['pointer64', ['unsigned char']]],
} ],
'_KTRANSACTION' : [ 0x2d8, {
'OutcomeEvent' : [ 0x0, ['_KEVENT']],
'cookie' : [ 0x18, ['unsigned long']],
'Mutex' : [ 0x20, ['_KMUTANT']],
'TreeTx' : [ 0x58, ['pointer64', ['_KTRANSACTION']]],
'GlobalNamespaceLink' : [ 0x60, ['_KTMOBJECT_NAMESPACE_LINK']],
'TmNamespaceLink' : [ 0x88, ['_KTMOBJECT_NAMESPACE_LINK']],
'UOW' : [ 0xb0, ['_GUID']],
'State' : [ 0xc0, ['Enumeration', dict(target = 'long', choices = {0: 'KTransactionUninitialized', 1: 'KTransactionActive', 2: 'KTransactionPreparing', 3: 'KTransactionPrepared', 4: 'KTransactionInDoubt', 5: 'KTransactionCommitted', 6: 'KTransactionAborted', 7: 'KTransactionDelegated', 8: 'KTransactionPrePreparing', 9: 'KTransactionForgotten', 10: 'KTransactionRecovering', 11: 'KTransactionPrePrepared'})]],
'Flags' : [ 0xc4, ['unsigned long']],
'EnlistmentHead' : [ 0xc8, ['_LIST_ENTRY']],
'EnlistmentCount' : [ 0xd8, ['unsigned long']],
'RecoverableEnlistmentCount' : [ 0xdc, ['unsigned long']],
'PrePrepareRequiredEnlistmentCount' : [ 0xe0, ['unsigned long']],
'PrepareRequiredEnlistmentCount' : [ 0xe4, ['unsigned long']],
'OutcomeRequiredEnlistmentCount' : [ 0xe8, ['unsigned long']],
'PendingResponses' : [ 0xec, ['unsigned long']],
'SuperiorEnlistment' : [ 0xf0, ['pointer64', ['_KENLISTMENT']]],
'LastLsn' : [ 0xf8, ['_CLS_LSN']],
'PromotedEntry' : [ 0x100, ['_LIST_ENTRY']],
'PromoterTransaction' : [ 0x110, ['pointer64', ['_KTRANSACTION']]],
'PromotePropagation' : [ 0x118, ['pointer64', ['void']]],
'IsolationLevel' : [ 0x120, ['unsigned long']],
'IsolationFlags' : [ 0x124, ['unsigned long']],
'Timeout' : [ 0x128, ['_LARGE_INTEGER']],
'Description' : [ 0x130, ['_UNICODE_STRING']],
'RollbackThread' : [ 0x140, ['pointer64', ['_KTHREAD']]],
'RollbackWorkItem' : [ 0x148, ['_WORK_QUEUE_ITEM']],
'RollbackDpc' : [ 0x168, ['_KDPC']],
'RollbackTimer' : [ 0x1a8, ['_KTIMER']],
'LsnOrderedEntry' : [ 0x1e8, ['_LIST_ENTRY']],
'Outcome' : [ 0x1f8, ['Enumeration', dict(target = 'long', choices = {0: 'KTxOutcomeUninitialized', 1: 'KTxOutcomeUndetermined', 2: 'KTxOutcomeCommitted', 3: 'KTxOutcomeAborted', 4: 'KTxOutcomeUnavailable'})]],
'Tm' : [ 0x200, ['pointer64', ['_KTM']]],
'CommitReservation' : [ 0x208, ['long long']],
'TransactionHistory' : [ 0x210, ['array', 10, ['_KTRANSACTION_HISTORY']]],
'TransactionHistoryCount' : [ 0x260, ['unsigned long']],
'DTCPrivateInformation' : [ 0x268, ['pointer64', ['void']]],
'DTCPrivateInformationLength' : [ 0x270, ['unsigned long']],
'DTCPrivateInformationMutex' : [ 0x278, ['_KMUTANT']],
'PromotedTxSelfHandle' : [ 0x2b0, ['pointer64', ['void']]],
'PendingPromotionCount' : [ 0x2b8, ['unsigned long']],
'PromotionCompletedEvent' : [ 0x2c0, ['_KEVENT']],
} ],
'_PRIVATE_CACHE_MAP_FLAGS' : [ 0x4, {
'DontUse' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 16, native_type='unsigned long')]],
'ReadAheadActive' : [ 0x0, ['BitField', dict(start_bit = 16, end_bit = 17, native_type='unsigned long')]],
'ReadAheadEnabled' : [ 0x0, ['BitField', dict(start_bit = 17, end_bit = 18, native_type='unsigned long')]],
'PagePriority' : [ 0x0, ['BitField', dict(start_bit = 18, end_bit = 21, native_type='unsigned long')]],
'Available' : [ 0x0, ['BitField', dict(start_bit = 21, end_bit = 32, native_type='unsigned long')]],
} ],
'_CM_KCB_UOW' : [ 0x60, {
'TransactionListEntry' : [ 0x0, ['_LIST_ENTRY']],
'KCBLock' : [ 0x10, ['pointer64', ['_CM_INTENT_LOCK']]],
'KeyLock' : [ 0x18, ['pointer64', ['_CM_INTENT_LOCK']]],
'KCBListEntry' : [ 0x20, ['_LIST_ENTRY']],
'KeyControlBlock' : [ 0x30, ['pointer64', ['_CM_KEY_CONTROL_BLOCK']]],
'Transaction' : [ 0x38, ['pointer64', ['_CM_TRANS']]],
'UoWState' : [ 0x40, ['unsigned long']],
'ActionType' : [ 0x44, ['Enumeration', dict(target = 'long', choices = {0: 'UoWAddThisKey', 1: 'UoWAddChildKey', 2: 'UoWDeleteThisKey', 3: 'UoWDeleteChildKey', 4: 'UoWSetValueNew', 5: 'UoWSetValueExisting', 6: 'UoWDeleteValue', 7: 'UoWSetKeyUserFlags', 8: 'UoWSetLastWriteTime', 9: 'UoWSetSecurityDescriptor', 10: 'UoWRenameSubKey', 11: 'UoWRenameOldSubKey', 12: 'UoWRenameNewSubKey', 13: 'UoWIsolation', 14: 'UoWInvalid'})]],
'StorageType' : [ 0x48, ['Enumeration', dict(target = 'long', choices = {0: 'Stable', 1: 'Volatile', 2: 'InvalidStorage'})]],
'ChildKCB' : [ 0x50, ['pointer64', ['_CM_KEY_CONTROL_BLOCK']]],
'VolatileKeyCell' : [ 0x50, ['unsigned long']],
'OldValueCell' : [ 0x50, ['unsigned long']],
'NewValueCell' : [ 0x54, ['unsigned long']],
'UserFlags' : [ 0x50, ['unsigned long']],
'LastWriteTime' : [ 0x50, ['_LARGE_INTEGER']],
'TxSecurityCell' : [ 0x50, ['unsigned long']],
'OldChildKCB' : [ 0x50, ['pointer64', ['_CM_KEY_CONTROL_BLOCK']]],
'NewChildKCB' : [ 0x58, ['pointer64', ['_CM_KEY_CONTROL_BLOCK']]],
'OtherChildKCB' : [ 0x50, ['pointer64', ['_CM_KEY_CONTROL_BLOCK']]],
'ThisVolatileKeyCell' : [ 0x58, ['unsigned long']],
} ],
'_MMPTE_TRANSITION' : [ 0x8, {
'Valid' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long long')]],
'Write' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long long')]],
'Owner' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long long')]],
'WriteThrough' : [ 0x0, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned long long')]],
'CacheDisable' : [ 0x0, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned long long')]],
'Protection' : [ 0x0, ['BitField', dict(start_bit = 5, end_bit = 10, native_type='unsigned long long')]],
'Prototype' : [ 0x0, ['BitField', dict(start_bit = 10, end_bit = 11, native_type='unsigned long long')]],
'Transition' : [ 0x0, ['BitField', dict(start_bit = 11, end_bit = 12, native_type='unsigned long long')]],
'PageFrameNumber' : [ 0x0, ['BitField', dict(start_bit = 12, end_bit = 48, native_type='unsigned long long')]],
'Unused' : [ 0x0, ['BitField', dict(start_bit = 48, end_bit = 64, native_type='unsigned long long')]],
} ],
'_KREQUEST_PACKET' : [ 0x20, {
'CurrentPacket' : [ 0x0, ['array', 3, ['pointer64', ['void']]]],
'WorkerRoutine' : [ 0x18, ['pointer64', ['void']]],
} ],
'_VF_WATCHDOG_IRP' : [ 0x20, {
'ListEntry' : [ 0x0, ['_LIST_ENTRY']],
'Irp' : [ 0x10, ['pointer64', ['_IRP']]],
'DueTickCount' : [ 0x18, ['unsigned long']],
'Inserted' : [ 0x1c, ['unsigned char']],
'TrackedStackLocation' : [ 0x1d, ['unsigned char']],
'CancelTimeoutTicks' : [ 0x1e, ['unsigned short']],
} ],
'_flags' : [ 0x1, {
'Removable' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned char')]],
'GroupAssigned' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned char')]],
'GroupCommitted' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned char')]],
'GroupAssignmentFixed' : [ 0x0, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned char')]],
'Fill' : [ 0x0, ['BitField', dict(start_bit = 4, end_bit = 8, native_type='unsigned char')]],
} ],
'__unnamed_22df' : [ 0x8, {
'Head' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 24, native_type='unsigned long long')]],
'Tail' : [ 0x0, ['BitField', dict(start_bit = 24, end_bit = 48, native_type='unsigned long long')]],
'ActiveThreadCount' : [ 0x0, ['BitField', dict(start_bit = 48, end_bit = 64, native_type='unsigned long long')]],
} ],
'__unnamed_22e1' : [ 0x8, {
's1' : [ 0x0, ['__unnamed_22df']],
'Value' : [ 0x0, ['unsigned long long']],
} ],
'_ALPC_COMPLETION_LIST_STATE' : [ 0x8, {
'u1' : [ 0x0, ['__unnamed_22e1']],
} ],
'_PSP_CPU_SHARE_CAPTURED_WEIGHT_DATA' : [ 0x8, {
'CapturedCpuShareWeight' : [ 0x0, ['unsigned long']],
'CapturedTotalWeight' : [ 0x4, ['unsigned long']],
'CombinedData' : [ 0x0, ['long long']],
} ],
'_CM_NAME_HASH' : [ 0x18, {
'ConvKey' : [ 0x0, ['unsigned long']],
'NextHash' : [ 0x8, ['pointer64', ['_CM_NAME_HASH']]],
'NameLength' : [ 0x10, ['unsigned short']],
'Name' : [ 0x12, ['array', 1, ['wchar']]],
} ],
'_PROC_IDLE_STATE_BUCKET' : [ 0x20, {
'TotalTime' : [ 0x0, ['unsigned long long']],
'MinTime' : [ 0x8, ['unsigned long long']],
'MaxTime' : [ 0x10, ['unsigned long long']],
'Count' : [ 0x18, ['unsigned long']],
} ],
'_MMSECURE_FLAGS' : [ 0x4, {
'ReadOnly' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'NoWrite' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'Spare' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 12, native_type='unsigned long')]],
} ],
'_PO_IRP_QUEUE' : [ 0x10, {
'CurrentIrp' : [ 0x0, ['pointer64', ['_IRP']]],
'PendingIrpList' : [ 0x8, ['pointer64', ['_IRP']]],
} ],
'__unnamed_22f4' : [ 0x4, {
'Active' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'OnlyTryAcquireUsed' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'ReleasedOutOfOrder' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long')]],
'SequenceNumber' : [ 0x0, ['BitField', dict(start_bit = 3, end_bit = 32, native_type='unsigned long')]],
'Whole' : [ 0x0, ['unsigned long']],
} ],
'_VI_DEADLOCK_NODE' : [ 0xd0, {
'Parent' : [ 0x0, ['pointer64', ['_VI_DEADLOCK_NODE']]],
'ChildrenList' : [ 0x8, ['_LIST_ENTRY']],
'SiblingsList' : [ 0x18, ['_LIST_ENTRY']],
'ResourceList' : [ 0x28, ['_LIST_ENTRY']],
'FreeListEntry' : [ 0x28, ['_LIST_ENTRY']],
'Root' : [ 0x38, ['pointer64', ['_VI_DEADLOCK_RESOURCE']]],
'ThreadEntry' : [ 0x40, ['pointer64', ['_VI_DEADLOCK_THREAD']]],
'u1' : [ 0x48, ['__unnamed_22f4']],
'ChildrenCount' : [ 0x4c, ['long']],
'StackTrace' : [ 0x50, ['array', 8, ['pointer64', ['void']]]],
'ParentStackTrace' : [ 0x90, ['array', 8, ['pointer64', ['void']]]],
} ],
'PROCESSOR_IDLESTATE_INFO' : [ 0x8, {
'TimeCheck' : [ 0x0, ['unsigned long']],
'DemotePercent' : [ 0x4, ['unsigned char']],
'PromotePercent' : [ 0x5, ['unsigned char']],
'Spare' : [ 0x6, ['array', 2, ['unsigned char']]],
} ],
'_KTMOBJECT_NAMESPACE' : [ 0xa8, {
'Table' : [ 0x0, ['_RTL_AVL_TABLE']],
'Mutex' : [ 0x68, ['_KMUTANT']],
'LinksOffset' : [ 0xa0, ['unsigned short']],
'GuidOffset' : [ 0xa2, ['unsigned short']],
'Expired' : [ 0xa4, ['unsigned char']],
} ],
'_LPCP_PORT_QUEUE' : [ 0x20, {
'NonPagedPortQueue' : [ 0x0, ['pointer64', ['_LPCP_NONPAGED_PORT_QUEUE']]],
'Semaphore' : [ 0x8, ['pointer64', ['_KSEMAPHORE']]],
'ReceiveHead' : [ 0x10, ['_LIST_ENTRY']],
} ],
'_CM_KEY_REFERENCE' : [ 0x10, {
'KeyCell' : [ 0x0, ['unsigned long']],
'KeyHive' : [ 0x8, ['pointer64', ['_HHIVE']]],
} ],
'SYSTEM_POWER_LEVEL' : [ 0x18, {
'Enable' : [ 0x0, ['unsigned char']],
'Spare' : [ 0x1, ['array', 3, ['unsigned char']]],
'BatteryLevel' : [ 0x4, ['unsigned long']],
'PowerPolicy' : [ 0x8, ['POWER_ACTION_POLICY']],
'MinSystemState' : [ 0x14, ['Enumeration', dict(target = 'long', choices = {0: 'PowerSystemUnspecified', 1: 'PowerSystemWorking', 2: 'PowerSystemSleeping1', 3: 'PowerSystemSleeping2', 4: 'PowerSystemSleeping3', 5: 'PowerSystemHibernate', 6: 'PowerSystemShutdown', 7: 'PowerSystemMaximum'})]],
} ],
'_OBJECT_DUMP_CONTROL' : [ 0x10, {
'Stream' : [ 0x0, ['pointer64', ['void']]],
'Detail' : [ 0x8, ['unsigned long']],
} ],
'_VF_ADDRESS_RANGE' : [ 0x10, {
'Start' : [ 0x0, ['pointer64', ['unsigned char']]],
'End' : [ 0x8, ['pointer64', ['unsigned char']]],
} ],
'_OBJECT_SYMBOLIC_LINK' : [ 0x20, {
'CreationTime' : [ 0x0, ['_LARGE_INTEGER']],
'LinkTarget' : [ 0x8, ['_UNICODE_STRING']],
'DosDeviceDriveIndex' : [ 0x18, ['unsigned long']],
} ],
'_LPCP_NONPAGED_PORT_QUEUE' : [ 0x28, {
'Semaphore' : [ 0x0, ['_KSEMAPHORE']],
'BackPointer' : [ 0x20, ['pointer64', ['_LPCP_PORT_OBJECT']]],
} ],
'_KRESOURCEMANAGER_COMPLETION_BINDING' : [ 0x28, {
'NotificationListHead' : [ 0x0, ['_LIST_ENTRY']],
'Port' : [ 0x10, ['pointer64', ['void']]],
'Key' : [ 0x18, ['unsigned long long']],
'BindingProcess' : [ 0x20, ['pointer64', ['_EPROCESS']]],
} ],
'_VF_TRACKER' : [ 0x10, {
'TrackerFlags' : [ 0x0, ['unsigned long']],
'TrackerSize' : [ 0x4, ['unsigned long']],
'TrackerIndex' : [ 0x8, ['unsigned long']],
'TraceDepth' : [ 0xc, ['unsigned long']],
} ],
'_CALL_PERFORMANCE_DATA' : [ 0x408, {
'SpinLock' : [ 0x0, ['unsigned long long']],
'HashTable' : [ 0x8, ['array', 64, ['_LIST_ENTRY']]],
} ],
'_ARBITER_ALTERNATIVE' : [ 0x40, {
'Minimum' : [ 0x0, ['unsigned long long']],
'Maximum' : [ 0x8, ['unsigned long long']],
'Length' : [ 0x10, ['unsigned long long']],
'Alignment' : [ 0x18, ['unsigned long long']],
'Priority' : [ 0x20, ['long']],
'Flags' : [ 0x24, ['unsigned long']],
'Descriptor' : [ 0x28, ['pointer64', ['_IO_RESOURCE_DESCRIPTOR']]],
'Reserved' : [ 0x30, ['array', 3, ['unsigned long']]],
} ],
'_WHEA_ERROR_STATUS' : [ 0x8, {
'ErrorStatus' : [ 0x0, ['unsigned long long']],
'Reserved1' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 8, native_type='unsigned long long')]],
'ErrorType' : [ 0x0, ['BitField', dict(start_bit = 8, end_bit = 16, native_type='unsigned long long')]],
'Address' : [ 0x0, ['BitField', dict(start_bit = 16, end_bit = 17, native_type='unsigned long long')]],
'Control' : [ 0x0, ['BitField', dict(start_bit = 17, end_bit = 18, native_type='unsigned long long')]],
'Data' : [ 0x0, ['BitField', dict(start_bit = 18, end_bit = 19, native_type='unsigned long long')]],
'Responder' : [ 0x0, ['BitField', dict(start_bit = 19, end_bit = 20, native_type='unsigned long long')]],
'Requester' : [ 0x0, ['BitField', dict(start_bit = 20, end_bit = 21, native_type='unsigned long long')]],
'FirstError' : [ 0x0, ['BitField', dict(start_bit = 21, end_bit = 22, native_type='unsigned long long')]],
'Overflow' : [ 0x0, ['BitField', dict(start_bit = 22, end_bit = 23, native_type='unsigned long long')]],
'Reserved2' : [ 0x0, ['BitField', dict(start_bit = 23, end_bit = 64, native_type='unsigned long long')]],
} ],
'_WHEA_PERSISTENCE_INFO' : [ 0x8, {
'Signature' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 16, native_type='unsigned long long')]],
'Length' : [ 0x0, ['BitField', dict(start_bit = 16, end_bit = 40, native_type='unsigned long long')]],
'Identifier' : [ 0x0, ['BitField', dict(start_bit = 40, end_bit = 56, native_type='unsigned long long')]],
'Attributes' : [ 0x0, ['BitField', dict(start_bit = 56, end_bit = 58, native_type='unsigned long long')]],
'DoNotLog' : [ 0x0, ['BitField', dict(start_bit = 58, end_bit = 59, native_type='unsigned long long')]],
'Reserved' : [ 0x0, ['BitField', dict(start_bit = 59, end_bit = 64, native_type='unsigned long long')]],
'AsULONGLONG' : [ 0x0, ['unsigned long long']],
} ],
'_MI_SECTION_IMAGE_INFORMATION' : [ 0x48, {
'ExportedImageInformation' : [ 0x0, ['_SECTION_IMAGE_INFORMATION']],
'InternalImageInformation' : [ 0x40, ['_MI_EXTRA_IMAGE_INFORMATION']],
} ],
'_HEAP_USERDATA_HEADER' : [ 0x20, {
'SFreeListEntry' : [ 0x0, ['_SINGLE_LIST_ENTRY']],
'SubSegment' : [ 0x0, ['pointer64', ['_HEAP_SUBSEGMENT']]],
'Reserved' : [ 0x8, ['pointer64', ['void']]],
'SizeIndex' : [ 0x10, ['unsigned long long']],
'Signature' : [ 0x18, ['unsigned long long']],
} ],
'_STRING64' : [ 0x10, {
'Length' : [ 0x0, ['unsigned short']],
'MaximumLength' : [ 0x2, ['unsigned short']],
'Buffer' : [ 0x8, ['unsigned long long']],
} ],
'_STACK_TABLE' : [ 0x8088, {
'NumStackTraces' : [ 0x0, ['unsigned short']],
'TraceCapacity' : [ 0x2, ['unsigned short']],
'StackTrace' : [ 0x8, ['array', 16, ['pointer64', ['_OBJECT_REF_TRACE']]]],
'StackTableHash' : [ 0x88, ['array', 16381, ['unsigned short']]],
} ],
'_TOKEN_CONTROL' : [ 0x28, {
'TokenId' : [ 0x0, ['_LUID']],
'AuthenticationId' : [ 0x8, ['_LUID']],
'ModifiedId' : [ 0x10, ['_LUID']],
'TokenSource' : [ 0x18, ['_TOKEN_SOURCE']],
} ],
'_DEFERRED_WRITE' : [ 0x48, {
'NodeTypeCode' : [ 0x0, ['short']],
'NodeByteSize' : [ 0x2, ['short']],
'FileObject' : [ 0x8, ['pointer64', ['_FILE_OBJECT']]],
'BytesToWrite' : [ 0x10, ['unsigned long']],
'DeferredWriteLinks' : [ 0x18, ['_LIST_ENTRY']],
'Event' : [ 0x28, ['pointer64', ['_KEVENT']]],
'PostRoutine' : [ 0x30, ['pointer64', ['void']]],
'Context1' : [ 0x38, ['pointer64', ['void']]],
'Context2' : [ 0x40, ['pointer64', ['void']]],
} ],
'_ARBITER_ORDERING_LIST' : [ 0x10, {
'Count' : [ 0x0, ['unsigned short']],
'Maximum' : [ 0x2, ['unsigned short']],
'Orderings' : [ 0x8, ['pointer64', ['_ARBITER_ORDERING']]],
} ],
'_SECTION_IMAGE_INFORMATION' : [ 0x40, {
'TransferAddress' : [ 0x0, ['pointer64', ['void']]],
'ZeroBits' : [ 0x8, ['unsigned long']],
'MaximumStackSize' : [ 0x10, ['unsigned long long']],
'CommittedStackSize' : [ 0x18, ['unsigned long long']],
'SubSystemType' : [ 0x20, ['unsigned long']],
'SubSystemMinorVersion' : [ 0x24, ['unsigned short']],
'SubSystemMajorVersion' : [ 0x26, ['unsigned short']],
'SubSystemVersion' : [ 0x24, ['unsigned long']],
'GpValue' : [ 0x28, ['unsigned long']],
'ImageCharacteristics' : [ 0x2c, ['unsigned short']],
'DllCharacteristics' : [ 0x2e, ['unsigned short']],
'Machine' : [ 0x30, ['unsigned short']],
'ImageContainsCode' : [ 0x32, ['unsigned char']],
'ImageFlags' : [ 0x33, ['unsigned char']],
'ComPlusNativeReady' : [ 0x33, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned char')]],
'ComPlusILOnly' : [ 0x33, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned char')]],
'ImageDynamicallyRelocated' : [ 0x33, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned char')]],
'ImageMappedFlat' : [ 0x33, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned char')]],
'Reserved' : [ 0x33, ['BitField', dict(start_bit = 4, end_bit = 8, native_type='unsigned char')]],
'LoaderFlags' : [ 0x34, ['unsigned long']],
'ImageFileSize' : [ 0x38, ['unsigned long']],
'CheckSum' : [ 0x3c, ['unsigned long']],
} ],
'_VF_AVL_TABLE' : [ 0x70, {
'RtlTable' : [ 0x0, ['_RTL_AVL_TABLE']],
'ReservedNode' : [ 0x68, ['pointer64', ['_VF_AVL_TREE_NODE']]],
} ],
'_TOKEN_AUDIT_POLICY' : [ 0x1b, {
'PerUserPolicy' : [ 0x0, ['array', 27, ['unsigned char']]],
} ],
'__unnamed_234a' : [ 0x10, {
'EndingOffset' : [ 0x0, ['pointer64', ['_LARGE_INTEGER']]],
'ResourceToRelease' : [ 0x8, ['pointer64', ['pointer64', ['_ERESOURCE']]]],
} ],
'__unnamed_234c' : [ 0x8, {
'ResourceToRelease' : [ 0x0, ['pointer64', ['_ERESOURCE']]],
} ],
'__unnamed_2350' : [ 0x8, {
'SyncType' : [ 0x0, ['Enumeration', dict(target = 'long', choices = {0: 'SyncTypeOther', 1: 'SyncTypeCreateSection'})]],
'PageProtection' : [ 0x4, ['unsigned long']],
} ],
'__unnamed_2354' : [ 0x10, {
'NotificationType' : [ 0x0, ['Enumeration', dict(target = 'long', choices = {0: 'NotifyTypeCreate', 1: 'NotifyTypeRetired'})]],
'SafeToRecurse' : [ 0x8, ['unsigned char']],
} ],
'__unnamed_2356' : [ 0x28, {
'Argument1' : [ 0x0, ['pointer64', ['void']]],
'Argument2' : [ 0x8, ['pointer64', ['void']]],
'Argument3' : [ 0x10, ['pointer64', ['void']]],
'Argument4' : [ 0x18, ['pointer64', ['void']]],
'Argument5' : [ 0x20, ['pointer64', ['void']]],
} ],
'_FS_FILTER_PARAMETERS' : [ 0x28, {
'AcquireForModifiedPageWriter' : [ 0x0, ['__unnamed_234a']],
'ReleaseForModifiedPageWriter' : [ 0x0, ['__unnamed_234c']],
'AcquireForSectionSynchronization' : [ 0x0, ['__unnamed_2350']],
'NotifyStreamFileObject' : [ 0x0, ['__unnamed_2354']],
'Others' : [ 0x0, ['__unnamed_2356']],
} ],
'_PROFILE_PARAMETER_BLOCK' : [ 0x10, {
'Status' : [ 0x0, ['unsigned short']],
'Reserved' : [ 0x2, ['unsigned short']],
'DockingState' : [ 0x4, ['unsigned short']],
'Capabilities' : [ 0x6, ['unsigned short']],
'DockID' : [ 0x8, ['unsigned long']],
'SerialNumber' : [ 0xc, ['unsigned long']],
} ],
'_COMPRESSED_DATA_INFO' : [ 0xc, {
'CompressionFormatAndEngine' : [ 0x0, ['unsigned short']],
'CompressionUnitShift' : [ 0x2, ['unsigned char']],
'ChunkShift' : [ 0x3, ['unsigned char']],
'ClusterShift' : [ 0x4, ['unsigned char']],
'Reserved' : [ 0x5, ['unsigned char']],
'NumberOfChunks' : [ 0x6, ['unsigned short']],
'CompressedChunkSizes' : [ 0x8, ['array', 1, ['unsigned long']]],
} ],
'_POP_HIBER_CONTEXT' : [ 0x110, {
'WriteToFile' : [ 0x0, ['unsigned char']],
'ReserveLoaderMemory' : [ 0x1, ['unsigned char']],
'ReserveFreeMemory' : [ 0x2, ['unsigned char']],
'Reset' : [ 0x3, ['unsigned char']],
'HiberFlags' : [ 0x4, ['unsigned char']],
'WroteHiberFile' : [ 0x5, ['unsigned char']],
'MapFrozen' : [ 0x6, ['unsigned char']],
'MemoryMap' : [ 0x8, ['_RTL_BITMAP']],
'DiscardedMemoryPages' : [ 0x18, ['_RTL_BITMAP']],
'ClonedRanges' : [ 0x28, ['_LIST_ENTRY']],
'ClonedRangeCount' : [ 0x38, ['unsigned long']],
'NextCloneRange' : [ 0x40, ['pointer64', ['_LIST_ENTRY']]],
'NextPreserve' : [ 0x48, ['unsigned long long']],
'LoaderMdl' : [ 0x50, ['pointer64', ['_MDL']]],
'AllocatedMdl' : [ 0x58, ['pointer64', ['_MDL']]],
'PagesOut' : [ 0x60, ['unsigned long long']],
'IoPages' : [ 0x68, ['pointer64', ['void']]],
'IoPagesCount' : [ 0x70, ['unsigned long']],
'CurrentMcb' : [ 0x78, ['pointer64', ['void']]],
'DumpStack' : [ 0x80, ['pointer64', ['_DUMP_STACK_CONTEXT']]],
'WakeState' : [ 0x88, ['pointer64', ['_KPROCESSOR_STATE']]],
'PreferredIoWriteSize' : [ 0x90, ['unsigned long']],
'IoProgress' : [ 0x94, ['unsigned long']],
'HiberVa' : [ 0x98, ['unsigned long long']],
'HiberPte' : [ 0xa0, ['_LARGE_INTEGER']],
'Status' : [ 0xa8, ['long']],
'MemoryImage' : [ 0xb0, ['pointer64', ['PO_MEMORY_IMAGE']]],
'CompressionWorkspace' : [ 0xb8, ['pointer64', ['void']]],
'CompressedWriteBuffer' : [ 0xc0, ['pointer64', ['unsigned char']]],
'CompressedWriteBufferSize' : [ 0xc8, ['unsigned long']],
'MaxCompressedOutputSize' : [ 0xcc, ['unsigned long']],
'PerformanceStats' : [ 0xd0, ['pointer64', ['unsigned long']]],
'CompressionBlock' : [ 0xd8, ['pointer64', ['void']]],
'DmaIO' : [ 0xe0, ['pointer64', ['void']]],
'TemporaryHeap' : [ 0xe8, ['pointer64', ['void']]],
'BootLoaderLogMdl' : [ 0xf0, ['pointer64', ['_MDL']]],
'FirmwareRuntimeInformationMdl' : [ 0xf8, ['pointer64', ['_MDL']]],
'ResumeContext' : [ 0x100, ['pointer64', ['void']]],
'ResumeContextPages' : [ 0x108, ['unsigned long']],
} ],
'_OBJECT_REF_TRACE' : [ 0x80, {
'StackTrace' : [ 0x0, ['array', 16, ['pointer64', ['void']]]],
} ],
'_OBJECT_NAME_INFORMATION' : [ 0x10, {
'Name' : [ 0x0, ['_UNICODE_STRING']],
} ],
'_KDESCRIPTOR' : [ 0x10, {
'Pad' : [ 0x0, ['array', 3, ['unsigned short']]],
'Limit' : [ 0x6, ['unsigned short']],
'Base' : [ 0x8, ['pointer64', ['void']]],
} ],
'_PCW_COUNTER_INFORMATION' : [ 0x10, {
'CounterMask' : [ 0x0, ['unsigned long long']],
'InstanceMask' : [ 0x8, ['pointer64', ['_UNICODE_STRING']]],
} ],
'_DUMP_STACK_CONTEXT' : [ 0x110, {
'Init' : [ 0x0, ['_DUMP_INITIALIZATION_CONTEXT']],
'PartitionOffset' : [ 0xa0, ['_LARGE_INTEGER']],
'DumpPointers' : [ 0xa8, ['pointer64', ['void']]],
'PointersLength' : [ 0xb0, ['unsigned long']],
'ModulePrefix' : [ 0xb8, ['pointer64', ['unsigned short']]],
'DriverList' : [ 0xc0, ['_LIST_ENTRY']],
'InitMsg' : [ 0xd0, ['_STRING']],
'ProgMsg' : [ 0xe0, ['_STRING']],
'DoneMsg' : [ 0xf0, ['_STRING']],
'FileObject' : [ 0x100, ['pointer64', ['void']]],
'UsageType' : [ 0x108, ['Enumeration', dict(target = 'long', choices = {0: 'DeviceUsageTypeUndefined', 1: 'DeviceUsageTypePaging', 2: 'DeviceUsageTypeHibernation', 3: 'DeviceUsageTypeDumpFile'})]],
} ],
'_FILE_STANDARD_INFORMATION' : [ 0x18, {
'AllocationSize' : [ 0x0, ['_LARGE_INTEGER']],
'EndOfFile' : [ 0x8, ['_LARGE_INTEGER']],
'NumberOfLinks' : [ 0x10, ['unsigned long']],
'DeletePending' : [ 0x14, ['unsigned char']],
'Directory' : [ 0x15, ['unsigned char']],
} ],
'_POP_SHUTDOWN_BUG_CHECK' : [ 0x40, {
'ThreadHandle' : [ 0x0, ['pointer64', ['void']]],
'ThreadId' : [ 0x8, ['pointer64', ['void']]],
'ProcessId' : [ 0x10, ['pointer64', ['void']]],
'Code' : [ 0x18, ['unsigned long']],
'Parameter1' : [ 0x20, ['unsigned long long']],
'Parameter2' : [ 0x28, ['unsigned long long']],
'Parameter3' : [ 0x30, ['unsigned long long']],
'Parameter4' : [ 0x38, ['unsigned long long']],
} ],
'_MI_EXTRA_IMAGE_INFORMATION' : [ 0x8, {
'SizeOfHeaders' : [ 0x0, ['unsigned long']],
'SizeOfImage' : [ 0x4, ['unsigned long']],
} ],
'_PCW_MASK_INFORMATION' : [ 0x28, {
'CounterMask' : [ 0x0, ['unsigned long long']],
'InstanceMask' : [ 0x8, ['pointer64', ['_UNICODE_STRING']]],
'InstanceId' : [ 0x10, ['unsigned long']],
'CollectMultiple' : [ 0x14, ['unsigned char']],
'Buffer' : [ 0x18, ['pointer64', ['_PCW_BUFFER']]],
'CancelEvent' : [ 0x20, ['pointer64', ['_KEVENT']]],
} ],
'_RTL_HANDLE_TABLE_ENTRY' : [ 0x8, {
'Flags' : [ 0x0, ['unsigned long']],
'NextFree' : [ 0x0, ['pointer64', ['_RTL_HANDLE_TABLE_ENTRY']]],
} ],
'__unnamed_237c' : [ 0x20, {
'TestAllocation' : [ 0x0, ['_ARBITER_TEST_ALLOCATION_PARAMETERS']],
'RetestAllocation' : [ 0x0, ['_ARBITER_RETEST_ALLOCATION_PARAMETERS']],
'BootAllocation' : [ 0x0, ['_ARBITER_BOOT_ALLOCATION_PARAMETERS']],
'QueryAllocatedResources' : [ 0x0, ['_ARBITER_QUERY_ALLOCATED_RESOURCES_PARAMETERS']],
'QueryConflict' : [ 0x0, ['_ARBITER_QUERY_CONFLICT_PARAMETERS']],
'QueryArbitrate' : [ 0x0, ['_ARBITER_QUERY_ARBITRATE_PARAMETERS']],
'AddReserved' : [ 0x0, ['_ARBITER_ADD_RESERVED_PARAMETERS']],
} ],
'_ARBITER_PARAMETERS' : [ 0x20, {
'Parameters' : [ 0x0, ['__unnamed_237c']],
} ],
'__unnamed_2380' : [ 0x8, {
'idxRecord' : [ 0x0, ['unsigned long']],
'cidContainer' : [ 0x4, ['unsigned long']],
} ],
'_CLS_LSN' : [ 0x8, {
'offset' : [ 0x0, ['__unnamed_2380']],
'ullOffset' : [ 0x0, ['unsigned long long']],
} ],
'_NT_TIB32' : [ 0x1c, {
'ExceptionList' : [ 0x0, ['unsigned long']],
'StackBase' : [ 0x4, ['unsigned long']],
'StackLimit' : [ 0x8, ['unsigned long']],
'SubSystemTib' : [ 0xc, ['unsigned long']],
'FiberData' : [ 0x10, ['unsigned long']],
'Version' : [ 0x10, ['unsigned long']],
'ArbitraryUserPointer' : [ 0x14, ['unsigned long']],
'Self' : [ 0x18, ['unsigned long']],
} ],
'POWER_ACTION_POLICY' : [ 0xc, {
'Action' : [ 0x0, ['Enumeration', dict(target = 'long', choices = {0: 'PowerActionNone', 1: 'PowerActionReserved', 2: 'PowerActionSleep', 3: 'PowerActionHibernate', 4: 'PowerActionShutdown', 5: 'PowerActionShutdownReset', 6: 'PowerActionShutdownOff', 7: 'PowerActionWarmEject'})]],
'Flags' : [ 0x4, ['unsigned long']],
'EventCode' : [ 0x8, ['unsigned long']],
} ],
'PO_MEMORY_IMAGE' : [ 0x128, {
'Signature' : [ 0x0, ['unsigned long']],
'ImageType' : [ 0x4, ['unsigned long']],
'CheckSum' : [ 0x8, ['unsigned long']],
'LengthSelf' : [ 0xc, ['unsigned long']],
'PageSelf' : [ 0x10, ['unsigned long long']],
'PageSize' : [ 0x18, ['unsigned long']],
'SystemTime' : [ 0x20, ['_LARGE_INTEGER']],
'InterruptTime' : [ 0x28, ['unsigned long long']],
'FeatureFlags' : [ 0x30, ['unsigned long']],
'HiberFlags' : [ 0x34, ['unsigned char']],
'spare' : [ 0x35, ['array', 3, ['unsigned char']]],
'NoHiberPtes' : [ 0x38, ['unsigned long']],
'HiberVa' : [ 0x40, ['unsigned long long']],
'HiberPte' : [ 0x48, ['_LARGE_INTEGER']],
'NoFreePages' : [ 0x50, ['unsigned long']],
'FreeMapCheck' : [ 0x54, ['unsigned long']],
'WakeCheck' : [ 0x58, ['unsigned long']],
'FirstTablePage' : [ 0x60, ['unsigned long long']],
'PerfInfo' : [ 0x68, ['_PO_HIBER_PERF']],
'FirmwareRuntimeInformationPages' : [ 0xc0, ['unsigned long']],
'FirmwareRuntimeInformation' : [ 0xc8, ['array', 1, ['unsigned long long']]],
'NoBootLoaderLogPages' : [ 0xd0, ['unsigned long']],
'BootLoaderLogPages' : [ 0xd8, ['array', 8, ['unsigned long long']]],
'NotUsed' : [ 0x118, ['unsigned long']],
'ResumeContextCheck' : [ 0x11c, ['unsigned long']],
'ResumeContextPages' : [ 0x120, ['unsigned long']],
} ],
'EX_QUEUE_WORKER_INFO' : [ 0x4, {
'QueueDisabled' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'MakeThreadsAsNecessary' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'WaitMode' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long')]],
'WorkerCount' : [ 0x0, ['BitField', dict(start_bit = 3, end_bit = 32, native_type='unsigned long')]],
'QueueWorkerInfo' : [ 0x0, ['long']],
} ],
'BATTERY_REPORTING_SCALE' : [ 0x8, {
'Granularity' : [ 0x0, ['unsigned long']],
'Capacity' : [ 0x4, ['unsigned long']],
} ],
'_CURDIR' : [ 0x18, {
'DosPath' : [ 0x0, ['_UNICODE_STRING']],
'Handle' : [ 0x10, ['pointer64', ['void']]],
} ],
'_PO_HIBER_PERF' : [ 0x58, {
'IoTicks' : [ 0x0, ['unsigned long long']],
'InitTicks' : [ 0x8, ['unsigned long long']],
'CopyTicks' : [ 0x10, ['unsigned long long']],
'ElapsedTicks' : [ 0x18, ['unsigned long long']],
'CompressTicks' : [ 0x20, ['unsigned long long']],
'ResumeAppTime' : [ 0x28, ['unsigned long long']],
'HiberFileResumeTime' : [ 0x30, ['unsigned long long']],
'BytesCopied' : [ 0x38, ['unsigned long long']],
'PagesProcessed' : [ 0x40, ['unsigned long long']],
'PagesWritten' : [ 0x48, ['unsigned long']],
'DumpCount' : [ 0x4c, ['unsigned long']],
'FileRuns' : [ 0x50, ['unsigned long']],
} ],
'_DEVICE_FLAGS' : [ 0x4, {
'Failed' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'ReadOnly' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'Removable' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long')]],
'ConsoleIn' : [ 0x0, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned long')]],
'ConsoleOut' : [ 0x0, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned long')]],
'Input' : [ 0x0, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned long')]],
'Output' : [ 0x0, ['BitField', dict(start_bit = 6, end_bit = 7, native_type='unsigned long')]],
} ],
'_RTL_BALANCED_LINKS' : [ 0x20, {
'Parent' : [ 0x0, ['pointer64', ['_RTL_BALANCED_LINKS']]],
'LeftChild' : [ 0x8, ['pointer64', ['_RTL_BALANCED_LINKS']]],
'RightChild' : [ 0x10, ['pointer64', ['_RTL_BALANCED_LINKS']]],
'Balance' : [ 0x18, ['unsigned char']],
'Reserved' : [ 0x19, ['array', 3, ['unsigned char']]],
} ],
'_MMVIEW' : [ 0x30, {
'Entry' : [ 0x0, ['unsigned long long']],
'Writable' : [ 0x8, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long long')]],
'ControlArea' : [ 0x8, ['pointer64', ['_CONTROL_AREA']]],
'ViewLinks' : [ 0x10, ['_LIST_ENTRY']],
'SessionViewVa' : [ 0x20, ['pointer64', ['void']]],
'SessionId' : [ 0x28, ['unsigned long']],
} ],
'_MM_SESSION_SPACE_FLAGS' : [ 0x4, {
'Initialized' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'DeletePending' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'PoolInitialized' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long')]],
'DynamicVaInitialized' : [ 0x0, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned long')]],
'WsInitialized' : [ 0x0, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned long')]],
'PoolDestroyed' : [ 0x0, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned long')]],
'ObjectInitialized' : [ 0x0, ['BitField', dict(start_bit = 6, end_bit = 7, native_type='unsigned long')]],
'Filler' : [ 0x0, ['BitField', dict(start_bit = 7, end_bit = 32, native_type='unsigned long')]],
} ],
'_HEADLESS_LOADER_BLOCK' : [ 0x40, {
'UsedBiosSettings' : [ 0x0, ['unsigned char']],
'DataBits' : [ 0x1, ['unsigned char']],
'StopBits' : [ 0x2, ['unsigned char']],
'Parity' : [ 0x3, ['unsigned char']],
'BaudRate' : [ 0x4, ['unsigned long']],
'PortNumber' : [ 0x8, ['unsigned long']],
'PortAddress' : [ 0x10, ['pointer64', ['unsigned char']]],
'PciDeviceId' : [ 0x18, ['unsigned short']],
'PciVendorId' : [ 0x1a, ['unsigned short']],
'PciBusNumber' : [ 0x1c, ['unsigned char']],
'PciBusSegment' : [ 0x1e, ['unsigned short']],
'PciSlotNumber' : [ 0x20, ['unsigned char']],
'PciFunctionNumber' : [ 0x21, ['unsigned char']],
'PciFlags' : [ 0x24, ['unsigned long']],
'SystemGUID' : [ 0x28, ['_GUID']],
'IsMMIODevice' : [ 0x38, ['unsigned char']],
'TerminalType' : [ 0x39, ['unsigned char']],
} ],
'__unnamed_23aa' : [ 0x8, {
'Signature' : [ 0x0, ['unsigned long']],
'CheckSum' : [ 0x4, ['unsigned long']],
} ],
'__unnamed_23ac' : [ 0x10, {
'DiskId' : [ 0x0, ['_GUID']],
} ],
'__unnamed_23ae' : [ 0x10, {
'Mbr' : [ 0x0, ['__unnamed_23aa']],
'Gpt' : [ 0x0, ['__unnamed_23ac']],
} ],
'_DUMP_INITIALIZATION_CONTEXT' : [ 0xa0, {
'Length' : [ 0x0, ['unsigned long']],
'Reserved' : [ 0x4, ['unsigned long']],
'MemoryBlock' : [ 0x8, ['pointer64', ['void']]],
'CommonBuffer' : [ 0x10, ['array', 2, ['pointer64', ['void']]]],
'PhysicalAddress' : [ 0x20, ['array', 2, ['_LARGE_INTEGER']]],
'StallRoutine' : [ 0x30, ['pointer64', ['void']]],
'OpenRoutine' : [ 0x38, ['pointer64', ['void']]],
'WriteRoutine' : [ 0x40, ['pointer64', ['void']]],
'FinishRoutine' : [ 0x48, ['pointer64', ['void']]],
'AdapterObject' : [ 0x50, ['pointer64', ['_ADAPTER_OBJECT']]],
'MappedRegisterBase' : [ 0x58, ['pointer64', ['void']]],
'PortConfiguration' : [ 0x60, ['pointer64', ['void']]],
'CrashDump' : [ 0x68, ['unsigned char']],
'MaximumTransferSize' : [ 0x6c, ['unsigned long']],
'CommonBufferSize' : [ 0x70, ['unsigned long']],
'TargetAddress' : [ 0x78, ['pointer64', ['void']]],
'WritePendingRoutine' : [ 0x80, ['pointer64', ['void']]],
'PartitionStyle' : [ 0x88, ['unsigned long']],
'DiskInfo' : [ 0x8c, ['__unnamed_23ae']],
} ],
'_MI_SYSTEM_PTE_TYPE' : [ 0x48, {
'Bitmap' : [ 0x0, ['_RTL_BITMAP']],
'Flags' : [ 0x10, ['unsigned long']],
'Hint' : [ 0x14, ['unsigned long']],
'BasePte' : [ 0x18, ['pointer64', ['_MMPTE']]],
'FailureCount' : [ 0x20, ['pointer64', ['unsigned long']]],
'Vm' : [ 0x28, ['pointer64', ['_MMSUPPORT']]],
'TotalSystemPtes' : [ 0x30, ['long']],
'TotalFreeSystemPtes' : [ 0x34, ['long']],
'CachedPteCount' : [ 0x38, ['long']],
'PteFailures' : [ 0x3c, ['unsigned long']],
'SpinLock' : [ 0x40, ['unsigned long long']],
'GlobalMutex' : [ 0x40, ['pointer64', ['_KGUARDED_MUTEX']]],
} ],
'_NETWORK_LOADER_BLOCK' : [ 0x20, {
'DHCPServerACK' : [ 0x0, ['pointer64', ['unsigned char']]],
'DHCPServerACKLength' : [ 0x8, ['unsigned long']],
'BootServerReplyPacket' : [ 0x10, ['pointer64', ['unsigned char']]],
'BootServerReplyPacketLength' : [ 0x18, ['unsigned long']],
} ],
'_CM_KEY_SECURITY' : [ 0x28, {
'Signature' : [ 0x0, ['unsigned short']],
'Reserved' : [ 0x2, ['unsigned short']],
'Flink' : [ 0x4, ['unsigned long']],
'Blink' : [ 0x8, ['unsigned long']],
'ReferenceCount' : [ 0xc, ['unsigned long']],
'DescriptorLength' : [ 0x10, ['unsigned long']],
'Descriptor' : [ 0x14, ['_SECURITY_DESCRIPTOR_RELATIVE']],
} ],
'_PO_DEVICE_NOTIFY_ORDER' : [ 0x298, {
'Locked' : [ 0x0, ['unsigned char']],
'WarmEjectPdoPointer' : [ 0x8, ['pointer64', ['pointer64', ['_DEVICE_OBJECT']]]],
'OrderLevel' : [ 0x10, ['array', 9, ['_PO_NOTIFY_ORDER_LEVEL']]],
} ],
'_ARBITER_CONFLICT_INFO' : [ 0x18, {
'OwningObject' : [ 0x0, ['pointer64', ['_DEVICE_OBJECT']]],
'Start' : [ 0x8, ['unsigned long long']],
'End' : [ 0x10, ['unsigned long long']],
} ],
'_PO_NOTIFY_ORDER_LEVEL' : [ 0x48, {
'DeviceCount' : [ 0x0, ['unsigned long']],
'ActiveCount' : [ 0x4, ['unsigned long']],
'WaitSleep' : [ 0x8, ['_LIST_ENTRY']],
'ReadySleep' : [ 0x18, ['_LIST_ENTRY']],
'ReadyS0' : [ 0x28, ['_LIST_ENTRY']],
'WaitS0' : [ 0x38, ['_LIST_ENTRY']],
} ],
'_THREAD_PERFORMANCE_DATA' : [ 0x1c0, {
'Size' : [ 0x0, ['unsigned short']],
'Version' : [ 0x2, ['unsigned short']],
'ProcessorNumber' : [ 0x4, ['_PROCESSOR_NUMBER']],
'ContextSwitches' : [ 0x8, ['unsigned long']],
'HwCountersCount' : [ 0xc, ['unsigned long']],
'UpdateCount' : [ 0x10, ['unsigned long long']],
'WaitReasonBitMap' : [ 0x18, ['unsigned long long']],
'HardwareCounters' : [ 0x20, ['unsigned long long']],
'CycleTime' : [ 0x28, ['_COUNTER_READING']],
'HwCounters' : [ 0x40, ['array', 16, ['_COUNTER_READING']]],
} ],
'_GDI_TEB_BATCH32' : [ 0x4e0, {
'Offset' : [ 0x0, ['unsigned long']],
'HDC' : [ 0x4, ['unsigned long']],
'Buffer' : [ 0x8, ['array', 310, ['unsigned long']]],
} ],
'_ETW_REPLY_QUEUE' : [ 0x48, {
'Queue' : [ 0x0, ['_KQUEUE']],
'EventsLost' : [ 0x40, ['long']],
} ],
'_ARBITER_QUERY_ALLOCATED_RESOURCES_PARAMETERS' : [ 0x8, {
'AllocatedResources' : [ 0x0, ['pointer64', ['pointer64', ['_CM_PARTIAL_RESOURCE_LIST']]]],
} ],
'_RTL_ACTIVATION_CONTEXT_STACK_FRAME' : [ 0x18, {
'Previous' : [ 0x0, ['pointer64', ['_RTL_ACTIVATION_CONTEXT_STACK_FRAME']]],
'ActivationContext' : [ 0x8, ['pointer64', ['_ACTIVATION_CONTEXT']]],
'Flags' : [ 0x10, ['unsigned long']],
} ],
'_ARBITER_ORDERING' : [ 0x10, {
'Start' : [ 0x0, ['unsigned long long']],
'End' : [ 0x8, ['unsigned long long']],
} ],
'_RTL_AVL_TABLE' : [ 0x68, {
'BalancedRoot' : [ 0x0, ['_RTL_BALANCED_LINKS']],
'OrderedPointer' : [ 0x20, ['pointer64', ['void']]],
'WhichOrderedElement' : [ 0x28, ['unsigned long']],
'NumberGenericTableElements' : [ 0x2c, ['unsigned long']],
'DepthOfTree' : [ 0x30, ['unsigned long']],
'RestartKey' : [ 0x38, ['pointer64', ['_RTL_BALANCED_LINKS']]],
'DeleteCount' : [ 0x40, ['unsigned long']],
'CompareRoutine' : [ 0x48, ['pointer64', ['void']]],
'AllocateRoutine' : [ 0x50, ['pointer64', ['void']]],
'FreeRoutine' : [ 0x58, ['pointer64', ['void']]],
'TableContext' : [ 0x60, ['pointer64', ['void']]],
} ],
'_KTRANSACTION_HISTORY' : [ 0x8, {
'RecordType' : [ 0x0, ['Enumeration', dict(target = 'long', choices = {1: 'KTMOH_CommitTransaction_Result', 2: 'KTMOH_RollbackTransaction_Result'})]],
'Payload' : [ 0x4, ['unsigned long']],
} ],
'LIST_ENTRY64' : [ 0x10, {
'Flink' : [ 0x0, ['unsigned long long']],
'Blink' : [ 0x8, ['unsigned long long']],
} ],
'LIST_ENTRY32' : [ 0x8, {
'Flink' : [ 0x0, ['unsigned long']],
'Blink' : [ 0x4, ['unsigned long']],
} ],
'_KUSER_SHARED_DATA' : [ 0x5f0, {
'TickCountLowDeprecated' : [ 0x0, ['unsigned long']],
'TickCountMultiplier' : [ 0x4, ['unsigned long']],
'InterruptTime' : [ 0x8, ['_KSYSTEM_TIME']],
'SystemTime' : [ 0x14, ['_KSYSTEM_TIME']],
'TimeZoneBias' : [ 0x20, ['_KSYSTEM_TIME']],
'ImageNumberLow' : [ 0x2c, ['unsigned short']],
'ImageNumberHigh' : [ 0x2e, ['unsigned short']],
'NtSystemRoot' : [ 0x30, ['array', 260, ['wchar']]],
'MaxStackTraceDepth' : [ 0x238, ['unsigned long']],
'CryptoExponent' : [ 0x23c, ['unsigned long']],
'TimeZoneId' : [ 0x240, ['unsigned long']],
'LargePageMinimum' : [ 0x244, ['unsigned long']],
'Reserved2' : [ 0x248, ['array', 7, ['unsigned long']]],
'NtProductType' : [ 0x264, ['Enumeration', dict(target = 'long', choices = {1: 'NtProductWinNt', 2: 'NtProductLanManNt', 3: 'NtProductServer'})]],
'ProductTypeIsValid' : [ 0x268, ['unsigned char']],
'NtMajorVersion' : [ 0x26c, ['unsigned long']],
'NtMinorVersion' : [ 0x270, ['unsigned long']],
'ProcessorFeatures' : [ 0x274, ['array', 64, ['unsigned char']]],
'Reserved1' : [ 0x2b4, ['unsigned long']],
'Reserved3' : [ 0x2b8, ['unsigned long']],
'TimeSlip' : [ 0x2bc, ['unsigned long']],
'AlternativeArchitecture' : [ 0x2c0, ['Enumeration', dict(target = 'long', choices = {0: 'StandardDesign', 1: 'NEC98x86', 2: 'EndAlternatives'})]],
'AltArchitecturePad' : [ 0x2c4, ['array', 1, ['unsigned long']]],
'SystemExpirationDate' : [ 0x2c8, ['_LARGE_INTEGER']],
'SuiteMask' : [ 0x2d0, ['unsigned long']],
'KdDebuggerEnabled' : [ 0x2d4, ['unsigned char']],
'NXSupportPolicy' : [ 0x2d5, ['unsigned char']],
'ActiveConsoleId' : [ 0x2d8, ['unsigned long']],
'DismountCount' : [ 0x2dc, ['unsigned long']],
'ComPlusPackage' : [ 0x2e0, ['unsigned long']],
'LastSystemRITEventTickCount' : [ 0x2e4, ['unsigned long']],
'NumberOfPhysicalPages' : [ 0x2e8, ['unsigned long']],
'SafeBootMode' : [ 0x2ec, ['unsigned char']],
'TscQpcData' : [ 0x2ed, ['unsigned char']],
'TscQpcEnabled' : [ 0x2ed, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned char')]],
'TscQpcSpareFlag' : [ 0x2ed, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned char')]],
'TscQpcShift' : [ 0x2ed, ['BitField', dict(start_bit = 2, end_bit = 8, native_type='unsigned char')]],
'TscQpcPad' : [ 0x2ee, ['array', 2, ['unsigned char']]],
'SharedDataFlags' : [ 0x2f0, ['unsigned long']],
'DbgErrorPortPresent' : [ 0x2f0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'DbgElevationEnabled' : [ 0x2f0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'DbgVirtEnabled' : [ 0x2f0, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long')]],
'DbgInstallerDetectEnabled' : [ 0x2f0, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned long')]],
'DbgSystemDllRelocated' : [ 0x2f0, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned long')]],
'DbgDynProcessorEnabled' : [ 0x2f0, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned long')]],
'DbgSEHValidationEnabled' : [ 0x2f0, ['BitField', dict(start_bit = 6, end_bit = 7, native_type='unsigned long')]],
'SpareBits' : [ 0x2f0, ['BitField', dict(start_bit = 7, end_bit = 32, native_type='unsigned long')]],
'DataFlagsPad' : [ 0x2f4, ['array', 1, ['unsigned long']]],
'TestRetInstruction' : [ 0x2f8, ['unsigned long long']],
'SystemCall' : [ 0x300, ['unsigned long']],
'SystemCallReturn' : [ 0x304, ['unsigned long']],
'SystemCallPad' : [ 0x308, ['array', 3, ['unsigned long long']]],
'TickCount' : [ 0x320, ['_KSYSTEM_TIME']],
'TickCountQuad' : [ 0x320, ['unsigned long long']],
'ReservedTickCountOverlay' : [ 0x320, ['array', 3, ['unsigned long']]],
'TickCountPad' : [ 0x32c, ['array', 1, ['unsigned long']]],
'Cookie' : [ 0x330, ['unsigned long']],
'CookiePad' : [ 0x334, ['array', 1, ['unsigned long']]],
'ConsoleSessionForegroundProcessId' : [ 0x338, ['long long']],
'Wow64SharedInformation' : [ 0x340, ['array', 16, ['unsigned long']]],
'UserModeGlobalLogger' : [ 0x380, ['array', 16, ['unsigned short']]],
'ImageFileExecutionOptions' : [ 0x3a0, ['unsigned long']],
'LangGenerationCount' : [ 0x3a4, ['unsigned long']],
'Reserved5' : [ 0x3a8, ['unsigned long long']],
'InterruptTimeBias' : [ 0x3b0, ['unsigned long long']],
'TscQpcBias' : [ 0x3b8, ['unsigned long long']],
'ActiveProcessorCount' : [ 0x3c0, ['unsigned long']],
'ActiveGroupCount' : [ 0x3c4, ['unsigned short']],
'Reserved4' : [ 0x3c6, ['unsigned short']],
'AitSamplingValue' : [ 0x3c8, ['unsigned long']],
'AppCompatFlag' : [ 0x3cc, ['unsigned long']],
'SystemDllNativeRelocation' : [ 0x3d0, ['unsigned long long']],
'SystemDllWowRelocation' : [ 0x3d8, ['unsigned long']],
'XStatePad' : [ 0x3dc, ['array', 1, ['unsigned long']]],
'XState' : [ 0x3e0, ['_XSTATE_CONFIGURATION']],
} ],
'__unnamed_1043' : [ 0x8, {
'LowPart' : [ 0x0, ['unsigned long']],
'HighPart' : [ 0x4, ['unsigned long']],
} ],
'_ULARGE_INTEGER' : [ 0x8, {
'LowPart' : [ 0x0, ['unsigned long']],
'HighPart' : [ 0x4, ['unsigned long']],
'u' : [ 0x0, ['__unnamed_1043']],
'QuadPart' : [ 0x0, ['unsigned long long']],
} ],
'__unnamed_1047' : [ 0x8, {
'LowPart' : [ 0x0, ['unsigned long']],
'HighPart' : [ 0x4, ['long']],
} ],
'_LARGE_INTEGER' : [ 0x8, {
'LowPart' : [ 0x0, ['unsigned long']],
'HighPart' : [ 0x4, ['long']],
'u' : [ 0x0, ['__unnamed_1047']],
'QuadPart' : [ 0x0, ['long long']],
} ],
'__unnamed_105f' : [ 0x4, {
'LongFunction' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'Persistent' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'Private' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 32, native_type='unsigned long')]],
} ],
'__unnamed_1061' : [ 0x4, {
'Flags' : [ 0x0, ['unsigned long']],
's' : [ 0x0, ['__unnamed_105f']],
} ],
'_TP_CALLBACK_ENVIRON_V3' : [ 0x48, {
'Version' : [ 0x0, ['unsigned long']],
'Pool' : [ 0x8, ['pointer64', ['_TP_POOL']]],
'CleanupGroup' : [ 0x10, ['pointer64', ['_TP_CLEANUP_GROUP']]],
'CleanupGroupCancelCallback' : [ 0x18, ['pointer64', ['void']]],
'RaceDll' : [ 0x20, ['pointer64', ['void']]],
'ActivationContext' : [ 0x28, ['pointer64', ['_ACTIVATION_CONTEXT']]],
'FinalizationCallback' : [ 0x30, ['pointer64', ['void']]],
'u' : [ 0x38, ['__unnamed_1061']],
'CallbackPriority' : [ 0x3c, ['Enumeration', dict(target = 'long', choices = {0: 'TP_CALLBACK_PRIORITY_HIGH', 1: 'TP_CALLBACK_PRIORITY_NORMAL', 2: 'TP_CALLBACK_PRIORITY_LOW', 3: 'TP_CALLBACK_PRIORITY_INVALID'})]],
'Size' : [ 0x40, ['unsigned long']],
} ],
'_TP_TASK' : [ 0x38, {
'Callbacks' : [ 0x0, ['pointer64', ['_TP_TASK_CALLBACKS']]],
'NumaNode' : [ 0x8, ['unsigned long']],
'IdealProcessor' : [ 0xc, ['unsigned char']],
'PostGuard' : [ 0x10, ['_TP_NBQ_GUARD']],
'NBQNode' : [ 0x30, ['pointer64', ['void']]],
} ],
'_TP_TASK_CALLBACKS' : [ 0x10, {
'ExecuteCallback' : [ 0x0, ['pointer64', ['void']]],
'Unposted' : [ 0x8, ['pointer64', ['void']]],
} ],
'_TP_DIRECT' : [ 0x10, {
'Callback' : [ 0x0, ['pointer64', ['void']]],
'NumaNode' : [ 0x8, ['unsigned long']],
'IdealProcessor' : [ 0xc, ['unsigned char']],
} ],
'_TEB' : [ 0x1818, {
'NtTib' : [ 0x0, ['_NT_TIB']],
'EnvironmentPointer' : [ 0x38, ['pointer64', ['void']]],
'ClientId' : [ 0x40, ['_CLIENT_ID']],
'ActiveRpcHandle' : [ 0x50, ['pointer64', ['void']]],
'ThreadLocalStoragePointer' : [ 0x58, ['pointer64', ['void']]],
'ProcessEnvironmentBlock' : [ 0x60, ['pointer64', ['_PEB']]],
'LastErrorValue' : [ 0x68, ['unsigned long']],
'CountOfOwnedCriticalSections' : [ 0x6c, ['unsigned long']],
'CsrClientThread' : [ 0x70, ['pointer64', ['void']]],
'Win32ThreadInfo' : [ 0x78, ['pointer64', ['void']]],
'User32Reserved' : [ 0x80, ['array', 26, ['unsigned long']]],
'UserReserved' : [ 0xe8, ['array', 5, ['unsigned long']]],
'WOW32Reserved' : [ 0x100, ['pointer64', ['void']]],
'CurrentLocale' : [ 0x108, ['unsigned long']],
'FpSoftwareStatusRegister' : [ 0x10c, ['unsigned long']],
'SystemReserved1' : [ 0x110, ['array', 54, ['pointer64', ['void']]]],
'ExceptionCode' : [ 0x2c0, ['long']],
'ActivationContextStackPointer' : [ 0x2c8, ['pointer64', ['_ACTIVATION_CONTEXT_STACK']]],
'SpareBytes' : [ 0x2d0, ['array', 24, ['unsigned char']]],
'TxFsContext' : [ 0x2e8, ['unsigned long']],
'GdiTebBatch' : [ 0x2f0, ['_GDI_TEB_BATCH']],
'RealClientId' : [ 0x7d8, ['_CLIENT_ID']],
'GdiCachedProcessHandle' : [ 0x7e8, ['pointer64', ['void']]],
'GdiClientPID' : [ 0x7f0, ['unsigned long']],
'GdiClientTID' : [ 0x7f4, ['unsigned long']],
'GdiThreadLocalInfo' : [ 0x7f8, ['pointer64', ['void']]],
'Win32ClientInfo' : [ 0x800, ['array', 62, ['unsigned long long']]],
'glDispatchTable' : [ 0x9f0, ['array', 233, ['pointer64', ['void']]]],
'glReserved1' : [ 0x1138, ['array', 29, ['unsigned long long']]],
'glReserved2' : [ 0x1220, ['pointer64', ['void']]],
'glSectionInfo' : [ 0x1228, ['pointer64', ['void']]],
'glSection' : [ 0x1230, ['pointer64', ['void']]],
'glTable' : [ 0x1238, ['pointer64', ['void']]],
'glCurrentRC' : [ 0x1240, ['pointer64', ['void']]],
'glContext' : [ 0x1248, ['pointer64', ['void']]],
'LastStatusValue' : [ 0x1250, ['unsigned long']],
'StaticUnicodeString' : [ 0x1258, ['_UNICODE_STRING']],
'StaticUnicodeBuffer' : [ 0x1268, ['array', 261, ['wchar']]],
'DeallocationStack' : [ 0x1478, ['pointer64', ['void']]],
'TlsSlots' : [ 0x1480, ['array', 64, ['pointer64', ['void']]]],
'TlsLinks' : [ 0x1680, ['_LIST_ENTRY']],
'Vdm' : [ 0x1690, ['pointer64', ['void']]],
'ReservedForNtRpc' : [ 0x1698, ['pointer64', ['void']]],
'DbgSsReserved' : [ 0x16a0, ['array', 2, ['pointer64', ['void']]]],
'HardErrorMode' : [ 0x16b0, ['unsigned long']],
'Instrumentation' : [ 0x16b8, ['array', 11, ['pointer64', ['void']]]],
'ActivityId' : [ 0x1710, ['_GUID']],
'SubProcessTag' : [ 0x1720, ['pointer64', ['void']]],
'EtwLocalData' : [ 0x1728, ['pointer64', ['void']]],
'EtwTraceData' : [ 0x1730, ['pointer64', ['void']]],
'WinSockData' : [ 0x1738, ['pointer64', ['void']]],
'GdiBatchCount' : [ 0x1740, ['unsigned long']],
'CurrentIdealProcessor' : [ 0x1744, ['_PROCESSOR_NUMBER']],
'IdealProcessorValue' : [ 0x1744, ['unsigned long']],
'ReservedPad0' : [ 0x1744, ['unsigned char']],
'ReservedPad1' : [ 0x1745, ['unsigned char']],
'ReservedPad2' : [ 0x1746, ['unsigned char']],
'IdealProcessor' : [ 0x1747, ['unsigned char']],
'GuaranteedStackBytes' : [ 0x1748, ['unsigned long']],
'ReservedForPerf' : [ 0x1750, ['pointer64', ['void']]],
'ReservedForOle' : [ 0x1758, ['pointer64', ['void']]],
'WaitingOnLoaderLock' : [ 0x1760, ['unsigned long']],
'SavedPriorityState' : [ 0x1768, ['pointer64', ['void']]],
'SoftPatchPtr1' : [ 0x1770, ['unsigned long long']],
'ThreadPoolData' : [ 0x1778, ['pointer64', ['void']]],
'TlsExpansionSlots' : [ 0x1780, ['pointer64', ['pointer64', ['void']]]],
'DeallocationBStore' : [ 0x1788, ['pointer64', ['void']]],
'BStoreLimit' : [ 0x1790, ['pointer64', ['void']]],
'MuiGeneration' : [ 0x1798, ['unsigned long']],
'IsImpersonating' : [ 0x179c, ['unsigned long']],
'NlsCache' : [ 0x17a0, ['pointer64', ['void']]],
'pShimData' : [ 0x17a8, ['pointer64', ['void']]],
'HeapVirtualAffinity' : [ 0x17b0, ['unsigned long']],
'CurrentTransactionHandle' : [ 0x17b8, ['pointer64', ['void']]],
'ActiveFrame' : [ 0x17c0, ['pointer64', ['_TEB_ACTIVE_FRAME']]],
'FlsData' : [ 0x17c8, ['pointer64', ['void']]],
'PreferredLanguages' : [ 0x17d0, ['pointer64', ['void']]],
'UserPrefLanguages' : [ 0x17d8, ['pointer64', ['void']]],
'MergedPrefLanguages' : [ 0x17e0, ['pointer64', ['void']]],
'MuiImpersonation' : [ 0x17e8, ['unsigned long']],
'CrossTebFlags' : [ 0x17ec, ['unsigned short']],
'SpareCrossTebBits' : [ 0x17ec, ['BitField', dict(start_bit = 0, end_bit = 16, native_type='unsigned short')]],
'SameTebFlags' : [ 0x17ee, ['unsigned short']],
'SafeThunkCall' : [ 0x17ee, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned short')]],
'InDebugPrint' : [ 0x17ee, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned short')]],
'HasFiberData' : [ 0x17ee, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned short')]],
'SkipThreadAttach' : [ 0x17ee, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned short')]],
'WerInShipAssertCode' : [ 0x17ee, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned short')]],
'RanProcessInit' : [ 0x17ee, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned short')]],
'ClonedThread' : [ 0x17ee, ['BitField', dict(start_bit = 6, end_bit = 7, native_type='unsigned short')]],
'SuppressDebugMsg' : [ 0x17ee, ['BitField', dict(start_bit = 7, end_bit = 8, native_type='unsigned short')]],
'DisableUserStackWalk' : [ 0x17ee, ['BitField', dict(start_bit = 8, end_bit = 9, native_type='unsigned short')]],
'RtlExceptionAttached' : [ 0x17ee, ['BitField', dict(start_bit = 9, end_bit = 10, native_type='unsigned short')]],
'InitialThread' : [ 0x17ee, ['BitField', dict(start_bit = 10, end_bit = 11, native_type='unsigned short')]],
'SpareSameTebBits' : [ 0x17ee, ['BitField', dict(start_bit = 11, end_bit = 16, native_type='unsigned short')]],
'TxnScopeEnterCallback' : [ 0x17f0, ['pointer64', ['void']]],
'TxnScopeExitCallback' : [ 0x17f8, ['pointer64', ['void']]],
'TxnScopeContext' : [ 0x1800, ['pointer64', ['void']]],
'LockCount' : [ 0x1808, ['unsigned long']],
'SpareUlong0' : [ 0x180c, ['unsigned long']],
'ResourceRetValue' : [ 0x1810, ['pointer64', ['void']]],
} ],
'_LIST_ENTRY' : [ 0x10, {
'Flink' : [ 0x0, ['pointer64', ['_LIST_ENTRY']]],
'Blink' : [ 0x8, ['pointer64', ['_LIST_ENTRY']]],
} ],
'_SINGLE_LIST_ENTRY' : [ 0x8, {
'Next' : [ 0x0, ['pointer64', ['_SINGLE_LIST_ENTRY']]],
} ],
'_RTL_DYNAMIC_HASH_TABLE_CONTEXT' : [ 0x18, {
'ChainHead' : [ 0x0, ['pointer64', ['_LIST_ENTRY']]],
'PrevLinkage' : [ 0x8, ['pointer64', ['_LIST_ENTRY']]],
'Signature' : [ 0x10, ['unsigned long long']],
} ],
'_RTL_DYNAMIC_HASH_TABLE_ENUMERATOR' : [ 0x28, {
'HashEntry' : [ 0x0, ['_RTL_DYNAMIC_HASH_TABLE_ENTRY']],
'ChainHead' : [ 0x18, ['pointer64', ['_LIST_ENTRY']]],
'BucketIndex' : [ 0x20, ['unsigned long']],
} ],
'_RTL_DYNAMIC_HASH_TABLE' : [ 0x28, {
'Flags' : [ 0x0, ['unsigned long']],
'Shift' : [ 0x4, ['unsigned long']],
'TableSize' : [ 0x8, ['unsigned long']],
'Pivot' : [ 0xc, ['unsigned long']],
'DivisorMask' : [ 0x10, ['unsigned long']],
'NumEntries' : [ 0x14, ['unsigned long']],
'NonEmptyBuckets' : [ 0x18, ['unsigned long']],
'NumEnumerators' : [ 0x1c, ['unsigned long']],
'Directory' : [ 0x20, ['pointer64', ['void']]],
} ],
'_UNICODE_STRING' : [ 0x10, {
'Length' : [ 0x0, ['unsigned short']],
'MaximumLength' : [ 0x2, ['unsigned short']],
'Buffer' : [ 0x8, ['pointer64', ['unsigned short']]],
} ],
'_STRING' : [ 0x10, {
'Length' : [ 0x0, ['unsigned short']],
'MaximumLength' : [ 0x2, ['unsigned short']],
'Buffer' : [ 0x8, ['pointer64', ['unsigned char']]],
} ],
'_RTL_BITMAP' : [ 0x10, {
'SizeOfBitMap' : [ 0x0, ['unsigned long']],
'Buffer' : [ 0x8, ['pointer64', ['unsigned long']]],
} ],
'_LUID' : [ 0x8, {
'LowPart' : [ 0x0, ['unsigned long']],
'HighPart' : [ 0x4, ['long']],
} ],
'_IMAGE_NT_HEADERS64' : [ 0x108, {
'Signature' : [ 0x0, ['unsigned long']],
'FileHeader' : [ 0x4, ['_IMAGE_FILE_HEADER']],
'OptionalHeader' : [ 0x18, ['_IMAGE_OPTIONAL_HEADER64']],
} ],
'_IMAGE_DOS_HEADER' : [ 0x40, {
'e_magic' : [ 0x0, ['unsigned short']],
'e_cblp' : [ 0x2, ['unsigned short']],
'e_cp' : [ 0x4, ['unsigned short']],
'e_crlc' : [ 0x6, ['unsigned short']],
'e_cparhdr' : [ 0x8, ['unsigned short']],
'e_minalloc' : [ 0xa, ['unsigned short']],
'e_maxalloc' : [ 0xc, ['unsigned short']],
'e_ss' : [ 0xe, ['unsigned short']],
'e_sp' : [ 0x10, ['unsigned short']],
'e_csum' : [ 0x12, ['unsigned short']],
'e_ip' : [ 0x14, ['unsigned short']],
'e_cs' : [ 0x16, ['unsigned short']],
'e_lfarlc' : [ 0x18, ['unsigned short']],
'e_ovno' : [ 0x1a, ['unsigned short']],
'e_res' : [ 0x1c, ['array', 4, ['unsigned short']]],
'e_oemid' : [ 0x24, ['unsigned short']],
'e_oeminfo' : [ 0x26, ['unsigned short']],
'e_res2' : [ 0x28, ['array', 10, ['unsigned short']]],
'e_lfanew' : [ 0x3c, ['long']],
} ],
'_KPCR' : [ 0x4e80, {
'NtTib' : [ 0x0, ['_NT_TIB']],
'GdtBase' : [ 0x0, ['pointer64', ['_KGDTENTRY64']]],
'TssBase' : [ 0x8, ['pointer64', ['_KTSS64']]],
'UserRsp' : [ 0x10, ['unsigned long long']],
'Self' : [ 0x18, ['pointer64', ['_KPCR']]],
'CurrentPrcb' : [ 0x20, ['pointer64', ['_KPRCB']]],
'LockArray' : [ 0x28, ['pointer64', ['_KSPIN_LOCK_QUEUE']]],
'Used_Self' : [ 0x30, ['pointer64', ['void']]],
'IdtBase' : [ 0x38, ['pointer64', ['_KIDTENTRY64']]],
'Unused' : [ 0x40, ['array', 2, ['unsigned long long']]],
'Irql' : [ 0x50, ['unsigned char']],
'SecondLevelCacheAssociativity' : [ 0x51, ['unsigned char']],
'ObsoleteNumber' : [ 0x52, ['unsigned char']],
'Fill0' : [ 0x53, ['unsigned char']],
'Unused0' : [ 0x54, ['array', 3, ['unsigned long']]],
'MajorVersion' : [ 0x60, ['unsigned short']],
'MinorVersion' : [ 0x62, ['unsigned short']],
'StallScaleFactor' : [ 0x64, ['unsigned long']],
'Unused1' : [ 0x68, ['array', 3, ['pointer64', ['void']]]],
'KernelReserved' : [ 0x80, ['array', 15, ['unsigned long']]],
'SecondLevelCacheSize' : [ 0xbc, ['unsigned long']],
'HalReserved' : [ 0xc0, ['array', 16, ['unsigned long']]],
'Unused2' : [ 0x100, ['unsigned long']],
'KdVersionBlock' : [ 0x108, ['pointer64', ['void']]],
'Unused3' : [ 0x110, ['pointer64', ['void']]],
'PcrAlign1' : [ 0x118, ['array', 24, ['unsigned long']]],
'Prcb' : [ 0x180, ['_KPRCB']],
} ],
'_KPRCB' : [ 0x4d00, {
'MxCsr' : [ 0x0, ['unsigned long']],
'LegacyNumber' : [ 0x4, ['unsigned char']],
'ReservedMustBeZero' : [ 0x5, ['unsigned char']],
'InterruptRequest' : [ 0x6, ['unsigned char']],
'IdleHalt' : [ 0x7, ['unsigned char']],
'CurrentThread' : [ 0x8, ['pointer64', ['_KTHREAD']]],
'NextThread' : [ 0x10, ['pointer64', ['_KTHREAD']]],
'IdleThread' : [ 0x18, ['pointer64', ['_KTHREAD']]],
'NestingLevel' : [ 0x20, ['unsigned char']],
'PrcbPad00' : [ 0x21, ['array', 3, ['unsigned char']]],
'Number' : [ 0x24, ['unsigned long']],
'RspBase' : [ 0x28, ['unsigned long long']],
'PrcbLock' : [ 0x30, ['unsigned long long']],
'PrcbPad01' : [ 0x38, ['unsigned long long']],
'ProcessorState' : [ 0x40, ['_KPROCESSOR_STATE']],
'CpuType' : [ 0x5f0, ['unsigned char']],
'CpuID' : [ 0x5f1, ['unsigned char']],
'CpuStep' : [ 0x5f2, ['unsigned short']],
'CpuStepping' : [ 0x5f2, ['unsigned char']],
'CpuModel' : [ 0x5f3, ['unsigned char']],
'MHz' : [ 0x5f4, ['unsigned long']],
'HalReserved' : [ 0x5f8, ['array', 8, ['unsigned long long']]],
'MinorVersion' : [ 0x638, ['unsigned short']],
'MajorVersion' : [ 0x63a, ['unsigned short']],
'BuildType' : [ 0x63c, ['unsigned char']],
'CpuVendor' : [ 0x63d, ['unsigned char']],
'CoresPerPhysicalProcessor' : [ 0x63e, ['unsigned char']],
'LogicalProcessorsPerCore' : [ 0x63f, ['unsigned char']],
'ApicMask' : [ 0x640, ['unsigned long']],
'CFlushSize' : [ 0x644, ['unsigned long']],
'AcpiReserved' : [ 0x648, ['pointer64', ['void']]],
'InitialApicId' : [ 0x650, ['unsigned long']],
'Stride' : [ 0x654, ['unsigned long']],
'Group' : [ 0x658, ['unsigned short']],
'GroupSetMember' : [ 0x660, ['unsigned long long']],
'GroupIndex' : [ 0x668, ['unsigned char']],
'LockQueue' : [ 0x670, ['array', 17, ['_KSPIN_LOCK_QUEUE']]],
'PPLookasideList' : [ 0x780, ['array', 16, ['_PP_LOOKASIDE_LIST']]],
'PPNPagedLookasideList' : [ 0x880, ['array', 32, ['_GENERAL_LOOKASIDE_POOL']]],
'PPPagedLookasideList' : [ 0x1480, ['array', 32, ['_GENERAL_LOOKASIDE_POOL']]],
'PacketBarrier' : [ 0x2080, ['long']],
'DeferredReadyListHead' : [ 0x2088, ['_SINGLE_LIST_ENTRY']],
'MmPageFaultCount' : [ 0x2090, ['long']],
'MmCopyOnWriteCount' : [ 0x2094, ['long']],
'MmTransitionCount' : [ 0x2098, ['long']],
'MmDemandZeroCount' : [ 0x209c, ['long']],
'MmPageReadCount' : [ 0x20a0, ['long']],
'MmPageReadIoCount' : [ 0x20a4, ['long']],
'MmDirtyPagesWriteCount' : [ 0x20a8, ['long']],
'MmDirtyWriteIoCount' : [ 0x20ac, ['long']],
'MmMappedPagesWriteCount' : [ 0x20b0, ['long']],
'MmMappedWriteIoCount' : [ 0x20b4, ['long']],
'KeSystemCalls' : [ 0x20b8, ['unsigned long']],
'KeContextSwitches' : [ 0x20bc, ['unsigned long']],
'CcFastReadNoWait' : [ 0x20c0, ['unsigned long']],
'CcFastReadWait' : [ 0x20c4, ['unsigned long']],
'CcFastReadNotPossible' : [ 0x20c8, ['unsigned long']],
'CcCopyReadNoWait' : [ 0x20cc, ['unsigned long']],
'CcCopyReadWait' : [ 0x20d0, ['unsigned long']],
'CcCopyReadNoWaitMiss' : [ 0x20d4, ['unsigned long']],
'LookasideIrpFloat' : [ 0x20d8, ['long']],
'IoReadOperationCount' : [ 0x20dc, ['long']],
'IoWriteOperationCount' : [ 0x20e0, ['long']],
'IoOtherOperationCount' : [ 0x20e4, ['long']],
'IoReadTransferCount' : [ 0x20e8, ['_LARGE_INTEGER']],
'IoWriteTransferCount' : [ 0x20f0, ['_LARGE_INTEGER']],
'IoOtherTransferCount' : [ 0x20f8, ['_LARGE_INTEGER']],
'TargetCount' : [ 0x2100, ['long']],
'IpiFrozen' : [ 0x2104, ['unsigned long']],
'DpcData' : [ 0x2180, ['array', 2, ['_KDPC_DATA']]],
'DpcStack' : [ 0x21c0, ['pointer64', ['void']]],
'MaximumDpcQueueDepth' : [ 0x21c8, ['long']],
'DpcRequestRate' : [ 0x21cc, ['unsigned long']],
'MinimumDpcRate' : [ 0x21d0, ['unsigned long']],
'DpcLastCount' : [ 0x21d4, ['unsigned long']],
'ThreadDpcEnable' : [ 0x21d8, ['unsigned char']],
'QuantumEnd' : [ 0x21d9, ['unsigned char']],
'DpcRoutineActive' : [ 0x21da, ['unsigned char']],
'IdleSchedule' : [ 0x21db, ['unsigned char']],
'DpcRequestSummary' : [ 0x21dc, ['long']],
'DpcRequestSlot' : [ 0x21dc, ['array', 2, ['short']]],
'NormalDpcState' : [ 0x21dc, ['short']],
'DpcThreadActive' : [ 0x21de, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned short')]],
'ThreadDpcState' : [ 0x21de, ['short']],
'TimerHand' : [ 0x21e0, ['unsigned long']],
'MasterOffset' : [ 0x21e4, ['long']],
'LastTick' : [ 0x21e8, ['unsigned long']],
'UnusedPad' : [ 0x21ec, ['unsigned long']],
'PrcbPad50' : [ 0x21f0, ['array', 2, ['unsigned long long']]],
'TimerTable' : [ 0x2200, ['_KTIMER_TABLE']],
'DpcGate' : [ 0x4400, ['_KGATE']],
'PrcbPad52' : [ 0x4418, ['pointer64', ['void']]],
'CallDpc' : [ 0x4420, ['_KDPC']],
'ClockKeepAlive' : [ 0x4460, ['long']],
'ClockCheckSlot' : [ 0x4464, ['unsigned char']],
'ClockPollCycle' : [ 0x4465, ['unsigned char']],
'NmiActive' : [ 0x4466, ['unsigned short']],
'DpcWatchdogPeriod' : [ 0x4468, ['long']],
'DpcWatchdogCount' : [ 0x446c, ['long']],
'TickOffset' : [ 0x4470, ['unsigned long long']],
'KeSpinLockOrdering' : [ 0x4478, ['long']],
'PrcbPad70' : [ 0x447c, ['unsigned long']],
'WaitListHead' : [ 0x4480, ['_LIST_ENTRY']],
'WaitLock' : [ 0x4490, ['unsigned long long']],
'ReadySummary' : [ 0x4498, ['unsigned long']],
'QueueIndex' : [ 0x449c, ['unsigned long']],
'TimerExpirationDpc' : [ 0x44a0, ['_KDPC']],
'PrcbPad72' : [ 0x44e0, ['array', 4, ['unsigned long long']]],
'DispatcherReadyListHead' : [ 0x4500, ['array', 32, ['_LIST_ENTRY']]],
'InterruptCount' : [ 0x4700, ['unsigned long']],
'KernelTime' : [ 0x4704, ['unsigned long']],
'UserTime' : [ 0x4708, ['unsigned long']],
'DpcTime' : [ 0x470c, ['unsigned long']],
'InterruptTime' : [ 0x4710, ['unsigned long']],
'AdjustDpcThreshold' : [ 0x4714, ['unsigned long']],
'DebuggerSavedIRQL' : [ 0x4718, ['unsigned char']],
'PrcbPad80' : [ 0x4719, ['array', 7, ['unsigned char']]],
'DpcTimeCount' : [ 0x4720, ['unsigned long']],
'DpcTimeLimit' : [ 0x4724, ['unsigned long']],
'PeriodicCount' : [ 0x4728, ['unsigned long']],
'PeriodicBias' : [ 0x472c, ['unsigned long']],
'AvailableTime' : [ 0x4730, ['unsigned long']],
'KeExceptionDispatchCount' : [ 0x4734, ['unsigned long']],
'ParentNode' : [ 0x4738, ['pointer64', ['_KNODE']]],
'StartCycles' : [ 0x4740, ['unsigned long long']],
'PrcbPad82' : [ 0x4748, ['array', 3, ['unsigned long long']]],
'MmSpinLockOrdering' : [ 0x4760, ['long']],
'PageColor' : [ 0x4764, ['unsigned long']],
'NodeColor' : [ 0x4768, ['unsigned long']],
'NodeShiftedColor' : [ 0x476c, ['unsigned long']],
'SecondaryColorMask' : [ 0x4770, ['unsigned long']],
'PrcbPad83' : [ 0x4774, ['unsigned long']],
'CycleTime' : [ 0x4778, ['unsigned long long']],
'CcFastMdlReadNoWait' : [ 0x4780, ['unsigned long']],
'CcFastMdlReadWait' : [ 0x4784, ['unsigned long']],
'CcFastMdlReadNotPossible' : [ 0x4788, ['unsigned long']],
'CcMapDataNoWait' : [ 0x478c, ['unsigned long']],
'CcMapDataWait' : [ 0x4790, ['unsigned long']],
'CcPinMappedDataCount' : [ 0x4794, ['unsigned long']],
'CcPinReadNoWait' : [ 0x4798, ['unsigned long']],
'CcPinReadWait' : [ 0x479c, ['unsigned long']],
'CcMdlReadNoWait' : [ 0x47a0, ['unsigned long']],
'CcMdlReadWait' : [ 0x47a4, ['unsigned long']],
'CcLazyWriteHotSpots' : [ 0x47a8, ['unsigned long']],
'CcLazyWriteIos' : [ 0x47ac, ['unsigned long']],
'CcLazyWritePages' : [ 0x47b0, ['unsigned long']],
'CcDataFlushes' : [ 0x47b4, ['unsigned long']],
'CcDataPages' : [ 0x47b8, ['unsigned long']],
'CcLostDelayedWrites' : [ 0x47bc, ['unsigned long']],
'CcFastReadResourceMiss' : [ 0x47c0, ['unsigned long']],
'CcCopyReadWaitMiss' : [ 0x47c4, ['unsigned long']],
'CcFastMdlReadResourceMiss' : [ 0x47c8, ['unsigned long']],
'CcMapDataNoWaitMiss' : [ 0x47cc, ['unsigned long']],
'CcMapDataWaitMiss' : [ 0x47d0, ['unsigned long']],
'CcPinReadNoWaitMiss' : [ 0x47d4, ['unsigned long']],
'CcPinReadWaitMiss' : [ 0x47d8, ['unsigned long']],
'CcMdlReadNoWaitMiss' : [ 0x47dc, ['unsigned long']],
'CcMdlReadWaitMiss' : [ 0x47e0, ['unsigned long']],
'CcReadAheadIos' : [ 0x47e4, ['unsigned long']],
'MmCacheTransitionCount' : [ 0x47e8, ['long']],
'MmCacheReadCount' : [ 0x47ec, ['long']],
'MmCacheIoCount' : [ 0x47f0, ['long']],
'PrcbPad91' : [ 0x47f4, ['array', 1, ['unsigned long']]],
'RuntimeAccumulation' : [ 0x47f8, ['unsigned long long']],
'PowerState' : [ 0x4800, ['_PROCESSOR_POWER_STATE']],
'PrcbPad92' : [ 0x4900, ['array', 16, ['unsigned char']]],
'KeAlignmentFixupCount' : [ 0x4910, ['unsigned long']],
'DpcWatchdogDpc' : [ 0x4918, ['_KDPC']],
'DpcWatchdogTimer' : [ 0x4958, ['_KTIMER']],
'Cache' : [ 0x4998, ['array', 5, ['_CACHE_DESCRIPTOR']]],
'CacheCount' : [ 0x49d4, ['unsigned long']],
'CachedCommit' : [ 0x49d8, ['unsigned long']],
'CachedResidentAvailable' : [ 0x49dc, ['unsigned long']],
'HyperPte' : [ 0x49e0, ['pointer64', ['void']]],
'WheaInfo' : [ 0x49e8, ['pointer64', ['void']]],
'EtwSupport' : [ 0x49f0, ['pointer64', ['void']]],
'InterruptObjectPool' : [ 0x4a00, ['_SLIST_HEADER']],
'HypercallPageList' : [ 0x4a10, ['_SLIST_HEADER']],
'HypercallPageVirtual' : [ 0x4a20, ['pointer64', ['void']]],
'VirtualApicAssist' : [ 0x4a28, ['pointer64', ['void']]],
'StatisticsPage' : [ 0x4a30, ['pointer64', ['unsigned long long']]],
'RateControl' : [ 0x4a38, ['pointer64', ['void']]],
'CacheProcessorMask' : [ 0x4a40, ['array', 5, ['unsigned long long']]],
'PackageProcessorSet' : [ 0x4a68, ['_KAFFINITY_EX']],
'CoreProcessorSet' : [ 0x4a90, ['unsigned long long']],
'PebsIndexAddress' : [ 0x4a98, ['pointer64', ['void']]],
'PrcbPad93' : [ 0x4aa0, ['array', 12, ['unsigned long long']]],
'SpinLockAcquireCount' : [ 0x4b00, ['unsigned long']],
'SpinLockContentionCount' : [ 0x4b04, ['unsigned long']],
'SpinLockSpinCount' : [ 0x4b08, ['unsigned long']],
'IpiSendRequestBroadcastCount' : [ 0x4b0c, ['unsigned long']],
'IpiSendRequestRoutineCount' : [ 0x4b10, ['unsigned long']],
'IpiSendSoftwareInterruptCount' : [ 0x4b14, ['unsigned long']],
'ExInitializeResourceCount' : [ 0x4b18, ['unsigned long']],
'ExReInitializeResourceCount' : [ 0x4b1c, ['unsigned long']],
'ExDeleteResourceCount' : [ 0x4b20, ['unsigned long']],
'ExecutiveResourceAcquiresCount' : [ 0x4b24, ['unsigned long']],
'ExecutiveResourceContentionsCount' : [ 0x4b28, ['unsigned long']],
'ExecutiveResourceReleaseExclusiveCount' : [ 0x4b2c, ['unsigned long']],
'ExecutiveResourceReleaseSharedCount' : [ 0x4b30, ['unsigned long']],
'ExecutiveResourceConvertsCount' : [ 0x4b34, ['unsigned long']],
'ExAcqResExclusiveAttempts' : [ 0x4b38, ['unsigned long']],
'ExAcqResExclusiveAcquiresExclusive' : [ 0x4b3c, ['unsigned long']],
'ExAcqResExclusiveAcquiresExclusiveRecursive' : [ 0x4b40, ['unsigned long']],
'ExAcqResExclusiveWaits' : [ 0x4b44, ['unsigned long']],
'ExAcqResExclusiveNotAcquires' : [ 0x4b48, ['unsigned long']],
'ExAcqResSharedAttempts' : [ 0x4b4c, ['unsigned long']],
'ExAcqResSharedAcquiresExclusive' : [ 0x4b50, ['unsigned long']],
'ExAcqResSharedAcquiresShared' : [ 0x4b54, ['unsigned long']],
'ExAcqResSharedAcquiresSharedRecursive' : [ 0x4b58, ['unsigned long']],
'ExAcqResSharedWaits' : [ 0x4b5c, ['unsigned long']],
'ExAcqResSharedNotAcquires' : [ 0x4b60, ['unsigned long']],
'ExAcqResSharedStarveExclusiveAttempts' : [ 0x4b64, ['unsigned long']],
'ExAcqResSharedStarveExclusiveAcquiresExclusive' : [ 0x4b68, ['unsigned long']],
'ExAcqResSharedStarveExclusiveAcquiresShared' : [ 0x4b6c, ['unsigned long']],
'ExAcqResSharedStarveExclusiveAcquiresSharedRecursive' : [ 0x4b70, ['unsigned long']],
'ExAcqResSharedStarveExclusiveWaits' : [ 0x4b74, ['unsigned long']],
'ExAcqResSharedStarveExclusiveNotAcquires' : [ 0x4b78, ['unsigned long']],
'ExAcqResSharedWaitForExclusiveAttempts' : [ 0x4b7c, ['unsigned long']],
'ExAcqResSharedWaitForExclusiveAcquiresExclusive' : [ 0x4b80, ['unsigned long']],
'ExAcqResSharedWaitForExclusiveAcquiresShared' : [ 0x4b84, ['unsigned long']],
'ExAcqResSharedWaitForExclusiveAcquiresSharedRecursive' : [ 0x4b88, ['unsigned long']],
'ExAcqResSharedWaitForExclusiveWaits' : [ 0x4b8c, ['unsigned long']],
'ExAcqResSharedWaitForExclusiveNotAcquires' : [ 0x4b90, ['unsigned long']],
'ExSetResOwnerPointerExclusive' : [ 0x4b94, ['unsigned long']],
'ExSetResOwnerPointerSharedNew' : [ 0x4b98, ['unsigned long']],
'ExSetResOwnerPointerSharedOld' : [ 0x4b9c, ['unsigned long']],
'ExTryToAcqExclusiveAttempts' : [ 0x4ba0, ['unsigned long']],
'ExTryToAcqExclusiveAcquires' : [ 0x4ba4, ['unsigned long']],
'ExBoostExclusiveOwner' : [ 0x4ba8, ['unsigned long']],
'ExBoostSharedOwners' : [ 0x4bac, ['unsigned long']],
'ExEtwSynchTrackingNotificationsCount' : [ 0x4bb0, ['unsigned long']],
'ExEtwSynchTrackingNotificationsAccountedCount' : [ 0x4bb4, ['unsigned long']],
'VendorString' : [ 0x4bb8, ['array', 13, ['unsigned char']]],
'PrcbPad10' : [ 0x4bc5, ['array', 3, ['unsigned char']]],
'FeatureBits' : [ 0x4bc8, ['unsigned long']],
'UpdateSignature' : [ 0x4bd0, ['_LARGE_INTEGER']],
'Context' : [ 0x4bd8, ['pointer64', ['_CONTEXT']]],
'ContextFlags' : [ 0x4be0, ['unsigned long']],
'ExtendedState' : [ 0x4be8, ['pointer64', ['_XSAVE_AREA']]],
'Mailbox' : [ 0x4c00, ['pointer64', ['_REQUEST_MAILBOX']]],
'RequestMailbox' : [ 0x4c80, ['array', 1, ['_REQUEST_MAILBOX']]],
} ],
'_SINGLE_LIST_ENTRY32' : [ 0x4, {
'Next' : [ 0x0, ['unsigned long']],
} ],
'_KTHREAD' : [ 0x360, {
'Header' : [ 0x0, ['_DISPATCHER_HEADER']],
'CycleTime' : [ 0x18, ['unsigned long long']],
'QuantumTarget' : [ 0x20, ['unsigned long long']],
'InitialStack' : [ 0x28, ['pointer64', ['void']]],
'StackLimit' : [ 0x30, ['pointer64', ['void']]],
'KernelStack' : [ 0x38, ['pointer64', ['void']]],
'ThreadLock' : [ 0x40, ['unsigned long long']],
'WaitRegister' : [ 0x48, ['_KWAIT_STATUS_REGISTER']],
'Running' : [ 0x49, ['unsigned char']],
'Alerted' : [ 0x4a, ['array', 2, ['unsigned char']]],
'KernelStackResident' : [ 0x4c, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'ReadyTransition' : [ 0x4c, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'ProcessReadyQueue' : [ 0x4c, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long')]],
'WaitNext' : [ 0x4c, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned long')]],
'SystemAffinityActive' : [ 0x4c, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned long')]],
'Alertable' : [ 0x4c, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned long')]],
'GdiFlushActive' : [ 0x4c, ['BitField', dict(start_bit = 6, end_bit = 7, native_type='unsigned long')]],
'UserStackWalkActive' : [ 0x4c, ['BitField', dict(start_bit = 7, end_bit = 8, native_type='unsigned long')]],
'ApcInterruptRequest' : [ 0x4c, ['BitField', dict(start_bit = 8, end_bit = 9, native_type='unsigned long')]],
'ForceDeferSchedule' : [ 0x4c, ['BitField', dict(start_bit = 9, end_bit = 10, native_type='unsigned long')]],
'QuantumEndMigrate' : [ 0x4c, ['BitField', dict(start_bit = 10, end_bit = 11, native_type='unsigned long')]],
'UmsDirectedSwitchEnable' : [ 0x4c, ['BitField', dict(start_bit = 11, end_bit = 12, native_type='unsigned long')]],
'TimerActive' : [ 0x4c, ['BitField', dict(start_bit = 12, end_bit = 13, native_type='unsigned long')]],
'SystemThread' : [ 0x4c, ['BitField', dict(start_bit = 13, end_bit = 14, native_type='unsigned long')]],
'Reserved' : [ 0x4c, ['BitField', dict(start_bit = 14, end_bit = 32, native_type='unsigned long')]],
'MiscFlags' : [ 0x4c, ['long']],
'ApcState' : [ 0x50, ['_KAPC_STATE']],
'ApcStateFill' : [ 0x50, ['array', 43, ['unsigned char']]],
'Priority' : [ 0x7b, ['unsigned char']],
'NextProcessor' : [ 0x7c, ['unsigned long']],
'DeferredProcessor' : [ 0x80, ['unsigned long']],
'ApcQueueLock' : [ 0x88, ['unsigned long long']],
'WaitStatus' : [ 0x90, ['long long']],
'WaitBlockList' : [ 0x98, ['pointer64', ['_KWAIT_BLOCK']]],
'WaitListEntry' : [ 0xa0, ['_LIST_ENTRY']],
'SwapListEntry' : [ 0xa0, ['_SINGLE_LIST_ENTRY']],
'Queue' : [ 0xb0, ['pointer64', ['_KQUEUE']]],
'Teb' : [ 0xb8, ['pointer64', ['void']]],
'Timer' : [ 0xc0, ['_KTIMER']],
'AutoAlignment' : [ 0x100, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'DisableBoost' : [ 0x100, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'EtwStackTraceApc1Inserted' : [ 0x100, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long')]],
'EtwStackTraceApc2Inserted' : [ 0x100, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned long')]],
'CalloutActive' : [ 0x100, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned long')]],
'ApcQueueable' : [ 0x100, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned long')]],
'EnableStackSwap' : [ 0x100, ['BitField', dict(start_bit = 6, end_bit = 7, native_type='unsigned long')]],
'GuiThread' : [ 0x100, ['BitField', dict(start_bit = 7, end_bit = 8, native_type='unsigned long')]],
'UmsPerformingSyscall' : [ 0x100, ['BitField', dict(start_bit = 8, end_bit = 9, native_type='unsigned long')]],
'VdmSafe' : [ 0x100, ['BitField', dict(start_bit = 9, end_bit = 10, native_type='unsigned long')]],
'UmsDispatched' : [ 0x100, ['BitField', dict(start_bit = 10, end_bit = 11, native_type='unsigned long')]],
'ReservedFlags' : [ 0x100, ['BitField', dict(start_bit = 11, end_bit = 32, native_type='unsigned long')]],
'ThreadFlags' : [ 0x100, ['long']],
'Spare0' : [ 0x104, ['unsigned long']],
'WaitBlock' : [ 0x108, ['array', 4, ['_KWAIT_BLOCK']]],
'WaitBlockFill4' : [ 0x108, ['array', 44, ['unsigned char']]],
'ContextSwitches' : [ 0x134, ['unsigned long']],
'WaitBlockFill5' : [ 0x108, ['array', 92, ['unsigned char']]],
'State' : [ 0x164, ['unsigned char']],
'NpxState' : [ 0x165, ['unsigned char']],
'WaitIrql' : [ 0x166, ['unsigned char']],
'WaitMode' : [ 0x167, ['unsigned char']],
'WaitBlockFill6' : [ 0x108, ['array', 140, ['unsigned char']]],
'WaitTime' : [ 0x194, ['unsigned long']],
'WaitBlockFill7' : [ 0x108, ['array', 168, ['unsigned char']]],
'TebMappedLowVa' : [ 0x1b0, ['pointer64', ['void']]],
'Ucb' : [ 0x1b8, ['pointer64', ['_UMS_CONTROL_BLOCK']]],
'WaitBlockFill8' : [ 0x108, ['array', 188, ['unsigned char']]],
'KernelApcDisable' : [ 0x1c4, ['short']],
'SpecialApcDisable' : [ 0x1c6, ['short']],
'CombinedApcDisable' : [ 0x1c4, ['unsigned long']],
'QueueListEntry' : [ 0x1c8, ['_LIST_ENTRY']],
'TrapFrame' : [ 0x1d8, ['pointer64', ['_KTRAP_FRAME']]],
'FirstArgument' : [ 0x1e0, ['pointer64', ['void']]],
'CallbackStack' : [ 0x1e8, ['pointer64', ['void']]],
'CallbackDepth' : [ 0x1e8, ['unsigned long long']],
'ApcStateIndex' : [ 0x1f0, ['unsigned char']],
'BasePriority' : [ 0x1f1, ['unsigned char']],
'PriorityDecrement' : [ 0x1f2, ['unsigned char']],
'ForegroundBoost' : [ 0x1f2, ['BitField', dict(start_bit = 0, end_bit = 4, native_type='unsigned char')]],
'UnusualBoost' : [ 0x1f2, ['BitField', dict(start_bit = 4, end_bit = 8, native_type='unsigned char')]],
'Preempted' : [ 0x1f3, ['unsigned char']],
'AdjustReason' : [ 0x1f4, ['unsigned char']],
'AdjustIncrement' : [ 0x1f5, ['unsigned char']],
'PreviousMode' : [ 0x1f6, ['unsigned char']],
'Saturation' : [ 0x1f7, ['unsigned char']],
'SystemCallNumber' : [ 0x1f8, ['unsigned long']],
'FreezeCount' : [ 0x1fc, ['unsigned long']],
'UserAffinity' : [ 0x200, ['_GROUP_AFFINITY']],
'Process' : [ 0x210, ['pointer64', ['_KPROCESS']]],
'Affinity' : [ 0x218, ['_GROUP_AFFINITY']],
'IdealProcessor' : [ 0x228, ['unsigned long']],
'UserIdealProcessor' : [ 0x22c, ['unsigned long']],
'ApcStatePointer' : [ 0x230, ['array', 2, ['pointer64', ['_KAPC_STATE']]]],
'SavedApcState' : [ 0x240, ['_KAPC_STATE']],
'SavedApcStateFill' : [ 0x240, ['array', 43, ['unsigned char']]],
'WaitReason' : [ 0x26b, ['unsigned char']],
'SuspendCount' : [ 0x26c, ['unsigned char']],
'Spare1' : [ 0x26d, ['unsigned char']],
'CodePatchInProgress' : [ 0x26e, ['unsigned char']],
'Win32Thread' : [ 0x270, ['pointer64', ['void']]],
'StackBase' : [ 0x278, ['pointer64', ['void']]],
'SuspendApc' : [ 0x280, ['_KAPC']],
'SuspendApcFill0' : [ 0x280, ['array', 1, ['unsigned char']]],
'ResourceIndex' : [ 0x281, ['unsigned char']],
'SuspendApcFill1' : [ 0x280, ['array', 3, ['unsigned char']]],
'QuantumReset' : [ 0x283, ['unsigned char']],
'SuspendApcFill2' : [ 0x280, ['array', 4, ['unsigned char']]],
'KernelTime' : [ 0x284, ['unsigned long']],
'SuspendApcFill3' : [ 0x280, ['array', 64, ['unsigned char']]],
'WaitPrcb' : [ 0x2c0, ['pointer64', ['_KPRCB']]],
'SuspendApcFill4' : [ 0x280, ['array', 72, ['unsigned char']]],
'LegoData' : [ 0x2c8, ['pointer64', ['void']]],
'SuspendApcFill5' : [ 0x280, ['array', 83, ['unsigned char']]],
'LargeStack' : [ 0x2d3, ['unsigned char']],
'UserTime' : [ 0x2d4, ['unsigned long']],
'SuspendSemaphore' : [ 0x2d8, ['_KSEMAPHORE']],
'SuspendSemaphorefill' : [ 0x2d8, ['array', 28, ['unsigned char']]],
'SListFaultCount' : [ 0x2f4, ['unsigned long']],
'ThreadListEntry' : [ 0x2f8, ['_LIST_ENTRY']],
'MutantListHead' : [ 0x308, ['_LIST_ENTRY']],
'SListFaultAddress' : [ 0x318, ['pointer64', ['void']]],
'ReadOperationCount' : [ 0x320, ['long long']],
'WriteOperationCount' : [ 0x328, ['long long']],
'OtherOperationCount' : [ 0x330, ['long long']],
'ReadTransferCount' : [ 0x338, ['long long']],
'WriteTransferCount' : [ 0x340, ['long long']],
'OtherTransferCount' : [ 0x348, ['long long']],
'ThreadCounters' : [ 0x350, ['pointer64', ['_KTHREAD_COUNTERS']]],
'XStateSave' : [ 0x358, ['pointer64', ['_XSTATE_SAVE']]],
} ],
'_KSTACK_AREA' : [ 0x250, {
'StackControl' : [ 0x0, ['_KERNEL_STACK_CONTROL']],
'NpxFrame' : [ 0x50, ['_XSAVE_FORMAT']],
} ],
'_KERNEL_STACK_CONTROL' : [ 0x50, {
'Current' : [ 0x0, ['_KERNEL_STACK_SEGMENT']],
'Previous' : [ 0x28, ['_KERNEL_STACK_SEGMENT']],
} ],
'_UMS_CONTROL_BLOCK' : [ 0x98, {
'UmsContext' : [ 0x0, ['pointer64', ['_RTL_UMS_CONTEXT']]],
'CompletionListEntry' : [ 0x8, ['pointer64', ['_SINGLE_LIST_ENTRY']]],
'CompletionListEvent' : [ 0x10, ['pointer64', ['_KEVENT']]],
'ServiceSequenceNumber' : [ 0x18, ['unsigned long']],
'UmsQueue' : [ 0x20, ['_KQUEUE']],
'QueueEntry' : [ 0x60, ['_LIST_ENTRY']],
'YieldingUmsContext' : [ 0x70, ['pointer64', ['_RTL_UMS_CONTEXT']]],
'YieldingParam' : [ 0x78, ['pointer64', ['void']]],
'UmsTeb' : [ 0x80, ['pointer64', ['void']]],
'PrimaryFlags' : [ 0x88, ['unsigned long']],
'UmsContextHeaderReady' : [ 0x88, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'UmsAssociatedQueue' : [ 0x20, ['pointer64', ['_KQUEUE']]],
'UmsQueueListEntry' : [ 0x28, ['pointer64', ['_LIST_ENTRY']]],
'UmsContextHeader' : [ 0x30, ['pointer64', ['_KUMS_CONTEXT_HEADER']]],
'UmsWaitGate' : [ 0x38, ['_KGATE']],
'StagingArea' : [ 0x50, ['pointer64', ['void']]],
'Flags' : [ 0x58, ['long']],
'UmsForceQueueTermination' : [ 0x58, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'UmsAssociatedQueueUsed' : [ 0x58, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'UmsThreadParked' : [ 0x58, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long')]],
'UmsPrimaryDeliveredContext' : [ 0x58, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned long')]],
'UmsPerformingSingleStep' : [ 0x58, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned long')]],
'TebSelector' : [ 0x90, ['unsigned short']],
} ],
'_KSPIN_LOCK_QUEUE' : [ 0x10, {
'Next' : [ 0x0, ['pointer64', ['_KSPIN_LOCK_QUEUE']]],
'Lock' : [ 0x8, ['pointer64', ['unsigned long long']]],
} ],
'_FAST_MUTEX' : [ 0x38, {
'Count' : [ 0x0, ['long']],
'Owner' : [ 0x8, ['pointer64', ['_KTHREAD']]],
'Contention' : [ 0x10, ['unsigned long']],
'Event' : [ 0x18, ['_KEVENT']],
'OldIrql' : [ 0x30, ['unsigned long']],
} ],
'_KEVENT' : [ 0x18, {
'Header' : [ 0x0, ['_DISPATCHER_HEADER']],
} ],
'__unnamed_11cd' : [ 0x10, {
'Depth' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 16, native_type='unsigned long long')]],
'Sequence' : [ 0x0, ['BitField', dict(start_bit = 16, end_bit = 25, native_type='unsigned long long')]],
'NextEntry' : [ 0x0, ['BitField', dict(start_bit = 25, end_bit = 64, native_type='unsigned long long')]],
'HeaderType' : [ 0x8, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long long')]],
'Init' : [ 0x8, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long long')]],
'Reserved' : [ 0x8, ['BitField', dict(start_bit = 2, end_bit = 61, native_type='unsigned long long')]],
'Region' : [ 0x8, ['BitField', dict(start_bit = 61, end_bit = 64, native_type='unsigned long long')]],
} ],
'__unnamed_11d2' : [ 0x10, {
'Depth' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 16, native_type='unsigned long long')]],
'Sequence' : [ 0x0, ['BitField', dict(start_bit = 16, end_bit = 64, native_type='unsigned long long')]],
'HeaderType' : [ 0x8, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long long')]],
'Init' : [ 0x8, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long long')]],
'Reserved' : [ 0x8, ['BitField', dict(start_bit = 2, end_bit = 4, native_type='unsigned long long')]],
'NextEntry' : [ 0x8, ['BitField', dict(start_bit = 4, end_bit = 64, native_type='unsigned long long')]],
} ],
'__unnamed_11d5' : [ 0x10, {
'Depth' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 16, native_type='unsigned long long')]],
'Sequence' : [ 0x0, ['BitField', dict(start_bit = 16, end_bit = 64, native_type='unsigned long long')]],
'HeaderType' : [ 0x8, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long long')]],
'Reserved' : [ 0x8, ['BitField', dict(start_bit = 1, end_bit = 4, native_type='unsigned long long')]],
'NextEntry' : [ 0x8, ['BitField', dict(start_bit = 4, end_bit = 64, native_type='unsigned long long')]],
} ],
'_SLIST_HEADER' : [ 0x10, {
'Alignment' : [ 0x0, ['unsigned long long']],
'Region' : [ 0x8, ['unsigned long long']],
'Header8' : [ 0x0, ['__unnamed_11cd']],
'Header16' : [ 0x0, ['__unnamed_11d2']],
'HeaderX64' : [ 0x0, ['__unnamed_11d5']],
} ],
'_LOOKASIDE_LIST_EX' : [ 0x60, {
'L' : [ 0x0, ['_GENERAL_LOOKASIDE_POOL']],
} ],
'_SLIST_ENTRY' : [ 0x10, {
'Next' : [ 0x0, ['pointer64', ['_SLIST_ENTRY']]],
} ],
'_NPAGED_LOOKASIDE_LIST' : [ 0x80, {
'L' : [ 0x0, ['_GENERAL_LOOKASIDE']],
} ],
'_PAGED_LOOKASIDE_LIST' : [ 0x80, {
'L' : [ 0x0, ['_GENERAL_LOOKASIDE']],
} ],
'_QUAD' : [ 0x8, {
'UseThisFieldToCopy' : [ 0x0, ['long long']],
'DoNotUseThisField' : [ 0x0, ['double']],
} ],
'_IO_STATUS_BLOCK' : [ 0x10, {
'Status' : [ 0x0, ['long']],
'Pointer' : [ 0x0, ['pointer64', ['void']]],
'Information' : [ 0x8, ['unsigned long long']],
} ],
'_IO_STATUS_BLOCK32' : [ 0x8, {
'Status' : [ 0x0, ['long']],
'Information' : [ 0x4, ['unsigned long']],
} ],
'_EX_PUSH_LOCK' : [ 0x8, {
'Locked' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long long')]],
'Waiting' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long long')]],
'Waking' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long long')]],
'MultipleShared' : [ 0x0, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned long long')]],
'Shared' : [ 0x0, ['BitField', dict(start_bit = 4, end_bit = 64, native_type='unsigned long long')]],
'Value' : [ 0x0, ['unsigned long long']],
'Ptr' : [ 0x0, ['pointer64', ['void']]],
} ],
'_PROCESSOR_NUMBER' : [ 0x4, {
'Group' : [ 0x0, ['unsigned short']],
'Number' : [ 0x2, ['unsigned char']],
'Reserved' : [ 0x3, ['unsigned char']],
} ],
'_EX_PUSH_LOCK_CACHE_AWARE' : [ 0x100, {
'Locks' : [ 0x0, ['array', 32, ['pointer64', ['_EX_PUSH_LOCK']]]],
} ],
'_PP_LOOKASIDE_LIST' : [ 0x10, {
'P' : [ 0x0, ['pointer64', ['_GENERAL_LOOKASIDE']]],
'L' : [ 0x8, ['pointer64', ['_GENERAL_LOOKASIDE']]],
} ],
'_GENERAL_LOOKASIDE' : [ 0x80, {
'ListHead' : [ 0x0, ['_SLIST_HEADER']],
'SingleListHead' : [ 0x0, ['_SINGLE_LIST_ENTRY']],
'Depth' : [ 0x10, ['unsigned short']],
'MaximumDepth' : [ 0x12, ['unsigned short']],
'TotalAllocates' : [ 0x14, ['unsigned long']],
'AllocateMisses' : [ 0x18, ['unsigned long']],
'AllocateHits' : [ 0x18, ['unsigned long']],
'TotalFrees' : [ 0x1c, ['unsigned long']],
'FreeMisses' : [ 0x20, ['unsigned long']],
'FreeHits' : [ 0x20, ['unsigned long']],
'Type' : [ 0x24, ['Enumeration', dict(target = 'long', choices = {0: 'NonPagedPool', 1: 'PagedPool', 2: 'NonPagedPoolMustSucceed', 3: 'DontUseThisType', 4: 'NonPagedPoolCacheAligned', 5: 'PagedPoolCacheAligned', 6: 'NonPagedPoolCacheAlignedMustS', 7: 'MaxPoolType', 34: 'NonPagedPoolMustSucceedSession', 35: 'DontUseThisTypeSession', 32: 'NonPagedPoolSession', 36: 'NonPagedPoolCacheAlignedSession', 33: 'PagedPoolSession', 38: 'NonPagedPoolCacheAlignedMustSSession', 37: 'PagedPoolCacheAlignedSession'})]],
'Tag' : [ 0x28, ['unsigned long']],
'Size' : [ 0x2c, ['unsigned long']],
'AllocateEx' : [ 0x30, ['pointer64', ['void']]],
'Allocate' : [ 0x30, ['pointer64', ['void']]],
'FreeEx' : [ 0x38, ['pointer64', ['void']]],
'Free' : [ 0x38, ['pointer64', ['void']]],
'ListEntry' : [ 0x40, ['_LIST_ENTRY']],
'LastTotalAllocates' : [ 0x50, ['unsigned long']],
'LastAllocateMisses' : [ 0x54, ['unsigned long']],
'LastAllocateHits' : [ 0x54, ['unsigned long']],
'Future' : [ 0x58, ['array', 2, ['unsigned long']]],
} ],
'_EX_FAST_REF' : [ 0x8, {
'Object' : [ 0x0, ['pointer64', ['void']]],
'RefCnt' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 4, native_type='unsigned long long')]],
'Value' : [ 0x0, ['unsigned long long']],
} ],
'_EX_PUSH_LOCK_WAIT_BLOCK' : [ 0x40, {
'WakeEvent' : [ 0x0, ['_KEVENT']],
'Next' : [ 0x18, ['pointer64', ['_EX_PUSH_LOCK_WAIT_BLOCK']]],
'Last' : [ 0x20, ['pointer64', ['_EX_PUSH_LOCK_WAIT_BLOCK']]],
'Previous' : [ 0x28, ['pointer64', ['_EX_PUSH_LOCK_WAIT_BLOCK']]],
'ShareCount' : [ 0x30, ['long']],
'Flags' : [ 0x34, ['long']],
} ],
'_ETHREAD' : [ 0x498, {
'Tcb' : [ 0x0, ['_KTHREAD']],
'CreateTime' : [ 0x360, ['_LARGE_INTEGER']],
'ExitTime' : [ 0x368, ['_LARGE_INTEGER']],
'KeyedWaitChain' : [ 0x368, ['_LIST_ENTRY']],
'ExitStatus' : [ 0x378, ['long']],
'PostBlockList' : [ 0x380, ['_LIST_ENTRY']],
'ForwardLinkShadow' : [ 0x380, ['pointer64', ['void']]],
'StartAddress' : [ 0x388, ['pointer64', ['void']]],
'TerminationPort' : [ 0x390, ['pointer64', ['_TERMINATION_PORT']]],
'ReaperLink' : [ 0x390, ['pointer64', ['_ETHREAD']]],
'KeyedWaitValue' : [ 0x390, ['pointer64', ['void']]],
'ActiveTimerListLock' : [ 0x398, ['unsigned long long']],
'ActiveTimerListHead' : [ 0x3a0, ['_LIST_ENTRY']],
'Cid' : [ 0x3b0, ['_CLIENT_ID']],
'KeyedWaitSemaphore' : [ 0x3c0, ['_KSEMAPHORE']],
'AlpcWaitSemaphore' : [ 0x3c0, ['_KSEMAPHORE']],
'ClientSecurity' : [ 0x3e0, ['_PS_CLIENT_SECURITY_CONTEXT']],
'IrpList' : [ 0x3e8, ['_LIST_ENTRY']],
'TopLevelIrp' : [ 0x3f8, ['unsigned long long']],
'DeviceToVerify' : [ 0x400, ['pointer64', ['_DEVICE_OBJECT']]],
'CpuQuotaApc' : [ 0x408, ['pointer64', ['_PSP_CPU_QUOTA_APC']]],
'Win32StartAddress' : [ 0x410, ['pointer64', ['void']]],
'LegacyPowerObject' : [ 0x418, ['pointer64', ['void']]],
'ThreadListEntry' : [ 0x420, ['_LIST_ENTRY']],
'RundownProtect' : [ 0x430, ['_EX_RUNDOWN_REF']],
'ThreadLock' : [ 0x438, ['_EX_PUSH_LOCK']],
'ReadClusterSize' : [ 0x440, ['unsigned long']],
'MmLockOrdering' : [ 0x444, ['long']],
'CrossThreadFlags' : [ 0x448, ['unsigned long']],
'Terminated' : [ 0x448, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'ThreadInserted' : [ 0x448, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'HideFromDebugger' : [ 0x448, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long')]],
'ActiveImpersonationInfo' : [ 0x448, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned long')]],
'Reserved' : [ 0x448, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned long')]],
'HardErrorsAreDisabled' : [ 0x448, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned long')]],
'BreakOnTermination' : [ 0x448, ['BitField', dict(start_bit = 6, end_bit = 7, native_type='unsigned long')]],
'SkipCreationMsg' : [ 0x448, ['BitField', dict(start_bit = 7, end_bit = 8, native_type='unsigned long')]],
'SkipTerminationMsg' : [ 0x448, ['BitField', dict(start_bit = 8, end_bit = 9, native_type='unsigned long')]],
'CopyTokenOnOpen' : [ 0x448, ['BitField', dict(start_bit = 9, end_bit = 10, native_type='unsigned long')]],
'ThreadIoPriority' : [ 0x448, ['BitField', dict(start_bit = 10, end_bit = 13, native_type='unsigned long')]],
'ThreadPagePriority' : [ 0x448, ['BitField', dict(start_bit = 13, end_bit = 16, native_type='unsigned long')]],
'RundownFail' : [ 0x448, ['BitField', dict(start_bit = 16, end_bit = 17, native_type='unsigned long')]],
'NeedsWorkingSetAging' : [ 0x448, ['BitField', dict(start_bit = 17, end_bit = 18, native_type='unsigned long')]],
'SameThreadPassiveFlags' : [ 0x44c, ['unsigned long']],
'ActiveExWorker' : [ 0x44c, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'ExWorkerCanWaitUser' : [ 0x44c, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'MemoryMaker' : [ 0x44c, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long')]],
'ClonedThread' : [ 0x44c, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned long')]],
'KeyedEventInUse' : [ 0x44c, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned long')]],
'RateApcState' : [ 0x44c, ['BitField', dict(start_bit = 5, end_bit = 7, native_type='unsigned long')]],
'SelfTerminate' : [ 0x44c, ['BitField', dict(start_bit = 7, end_bit = 8, native_type='unsigned long')]],
'SameThreadApcFlags' : [ 0x450, ['unsigned long']],
'Spare' : [ 0x450, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned char')]],
'StartAddressInvalid' : [ 0x450, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned char')]],
'EtwPageFaultCalloutActive' : [ 0x450, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned char')]],
'OwnsProcessWorkingSetExclusive' : [ 0x450, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned char')]],
'OwnsProcessWorkingSetShared' : [ 0x450, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned char')]],
'OwnsSystemCacheWorkingSetExclusive' : [ 0x450, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned char')]],
'OwnsSystemCacheWorkingSetShared' : [ 0x450, ['BitField', dict(start_bit = 6, end_bit = 7, native_type='unsigned char')]],
'OwnsSessionWorkingSetExclusive' : [ 0x450, ['BitField', dict(start_bit = 7, end_bit = 8, native_type='unsigned char')]],
'OwnsSessionWorkingSetShared' : [ 0x451, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned char')]],
'OwnsProcessAddressSpaceExclusive' : [ 0x451, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned char')]],
'OwnsProcessAddressSpaceShared' : [ 0x451, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned char')]],
'SuppressSymbolLoad' : [ 0x451, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned char')]],
'Prefetching' : [ 0x451, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned char')]],
'OwnsDynamicMemoryShared' : [ 0x451, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned char')]],
'OwnsChangeControlAreaExclusive' : [ 0x451, ['BitField', dict(start_bit = 6, end_bit = 7, native_type='unsigned char')]],
'OwnsChangeControlAreaShared' : [ 0x451, ['BitField', dict(start_bit = 7, end_bit = 8, native_type='unsigned char')]],
'OwnsPagedPoolWorkingSetExclusive' : [ 0x452, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned char')]],
'OwnsPagedPoolWorkingSetShared' : [ 0x452, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned char')]],
'OwnsSystemPtesWorkingSetExclusive' : [ 0x452, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned char')]],
'OwnsSystemPtesWorkingSetShared' : [ 0x452, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned char')]],
'TrimTrigger' : [ 0x452, ['BitField', dict(start_bit = 4, end_bit = 6, native_type='unsigned char')]],
'Spare1' : [ 0x452, ['BitField', dict(start_bit = 6, end_bit = 8, native_type='unsigned char')]],
'PriorityRegionActive' : [ 0x453, ['unsigned char']],
'CacheManagerActive' : [ 0x454, ['unsigned char']],
'DisablePageFaultClustering' : [ 0x455, ['unsigned char']],
'ActiveFaultCount' : [ 0x456, ['unsigned char']],
'LockOrderState' : [ 0x457, ['unsigned char']],
'AlpcMessageId' : [ 0x458, ['unsigned long long']],
'AlpcMessage' : [ 0x460, ['pointer64', ['void']]],
'AlpcReceiveAttributeSet' : [ 0x460, ['unsigned long']],
'AlpcWaitListEntry' : [ 0x468, ['_LIST_ENTRY']],
'CacheManagerCount' : [ 0x478, ['unsigned long']],
'IoBoostCount' : [ 0x47c, ['unsigned long']],
'IrpListLock' : [ 0x480, ['unsigned long long']],
'ReservedForSynchTracking' : [ 0x488, ['pointer64', ['void']]],
'CmCallbackListHead' : [ 0x490, ['_SINGLE_LIST_ENTRY']],
} ],
'_EPROCESS' : [ 0x4d0, {
'Pcb' : [ 0x0, ['_KPROCESS']],
'ProcessLock' : [ 0x160, ['_EX_PUSH_LOCK']],
'CreateTime' : [ 0x168, ['_LARGE_INTEGER']],
'ExitTime' : [ 0x170, ['_LARGE_INTEGER']],
'RundownProtect' : [ 0x178, ['_EX_RUNDOWN_REF']],
'UniqueProcessId' : [ 0x180, ['pointer64', ['void']]],
'ActiveProcessLinks' : [ 0x188, ['_LIST_ENTRY']],
'ProcessQuotaUsage' : [ 0x198, ['array', 2, ['unsigned long long']]],
'ProcessQuotaPeak' : [ 0x1a8, ['array', 2, ['unsigned long long']]],
'CommitCharge' : [ 0x1b8, ['unsigned long long']],
'QuotaBlock' : [ 0x1c0, ['pointer64', ['_EPROCESS_QUOTA_BLOCK']]],
'CpuQuotaBlock' : [ 0x1c8, ['pointer64', ['_PS_CPU_QUOTA_BLOCK']]],
'PeakVirtualSize' : [ 0x1d0, ['unsigned long long']],
'VirtualSize' : [ 0x1d8, ['unsigned long long']],
'SessionProcessLinks' : [ 0x1e0, ['_LIST_ENTRY']],
'DebugPort' : [ 0x1f0, ['pointer64', ['void']]],
'ExceptionPortData' : [ 0x1f8, ['pointer64', ['void']]],
'ExceptionPortValue' : [ 0x1f8, ['unsigned long long']],
'ExceptionPortState' : [ 0x1f8, ['BitField', dict(start_bit = 0, end_bit = 3, native_type='unsigned long long')]],
'ObjectTable' : [ 0x200, ['pointer64', ['_HANDLE_TABLE']]],
'Token' : [ 0x208, ['_EX_FAST_REF']],
'WorkingSetPage' : [ 0x210, ['unsigned long long']],
'AddressCreationLock' : [ 0x218, ['_EX_PUSH_LOCK']],
'RotateInProgress' : [ 0x220, ['pointer64', ['_ETHREAD']]],
'ForkInProgress' : [ 0x228, ['pointer64', ['_ETHREAD']]],
'HardwareTrigger' : [ 0x230, ['unsigned long long']],
'PhysicalVadRoot' : [ 0x238, ['pointer64', ['_MM_AVL_TABLE']]],
'CloneRoot' : [ 0x240, ['pointer64', ['void']]],
'NumberOfPrivatePages' : [ 0x248, ['unsigned long long']],
'NumberOfLockedPages' : [ 0x250, ['unsigned long long']],
'Win32Process' : [ 0x258, ['pointer64', ['void']]],
'Job' : [ 0x260, ['pointer64', ['_EJOB']]],
'SectionObject' : [ 0x268, ['pointer64', ['void']]],
'SectionBaseAddress' : [ 0x270, ['pointer64', ['void']]],
'Cookie' : [ 0x278, ['unsigned long']],
'UmsScheduledThreads' : [ 0x27c, ['unsigned long']],
'WorkingSetWatch' : [ 0x280, ['pointer64', ['_PAGEFAULT_HISTORY']]],
'Win32WindowStation' : [ 0x288, ['pointer64', ['void']]],
'InheritedFromUniqueProcessId' : [ 0x290, ['pointer64', ['void']]],
'LdtInformation' : [ 0x298, ['pointer64', ['void']]],
'Spare' : [ 0x2a0, ['pointer64', ['void']]],
'ConsoleHostProcess' : [ 0x2a8, ['unsigned long long']],
'DeviceMap' : [ 0x2b0, ['pointer64', ['void']]],
'EtwDataSource' : [ 0x2b8, ['pointer64', ['void']]],
'FreeTebHint' : [ 0x2c0, ['pointer64', ['void']]],
'FreeUmsTebHint' : [ 0x2c8, ['pointer64', ['void']]],
'PageDirectoryPte' : [ 0x2d0, ['_HARDWARE_PTE']],
'Filler' : [ 0x2d0, ['unsigned long long']],
'Session' : [ 0x2d8, ['pointer64', ['void']]],
'ImageFileName' : [ 0x2e0, ['array', 15, ['unsigned char']]],
'PriorityClass' : [ 0x2ef, ['unsigned char']],
'JobLinks' : [ 0x2f0, ['_LIST_ENTRY']],
'LockedPagesList' : [ 0x300, ['pointer64', ['void']]],
'ThreadListHead' : [ 0x308, ['_LIST_ENTRY']],
'SecurityPort' : [ 0x318, ['pointer64', ['void']]],
'Wow64Process' : [ 0x320, ['pointer64', ['void']]],
'ActiveThreads' : [ 0x328, ['unsigned long']],
'ImagePathHash' : [ 0x32c, ['unsigned long']],
'DefaultHardErrorProcessing' : [ 0x330, ['unsigned long']],
'LastThreadExitStatus' : [ 0x334, ['long']],
'Peb' : [ 0x338, ['pointer64', ['_PEB']]],
'PrefetchTrace' : [ 0x340, ['_EX_FAST_REF']],
'ReadOperationCount' : [ 0x348, ['_LARGE_INTEGER']],
'WriteOperationCount' : [ 0x350, ['_LARGE_INTEGER']],
'OtherOperationCount' : [ 0x358, ['_LARGE_INTEGER']],
'ReadTransferCount' : [ 0x360, ['_LARGE_INTEGER']],
'WriteTransferCount' : [ 0x368, ['_LARGE_INTEGER']],
'OtherTransferCount' : [ 0x370, ['_LARGE_INTEGER']],
'CommitChargeLimit' : [ 0x378, ['unsigned long long']],
'CommitChargePeak' : [ 0x380, ['unsigned long long']],
'AweInfo' : [ 0x388, ['pointer64', ['void']]],
'SeAuditProcessCreationInfo' : [ 0x390, ['_SE_AUDIT_PROCESS_CREATION_INFO']],
'Vm' : [ 0x398, ['_MMSUPPORT']],
'MmProcessLinks' : [ 0x420, ['_LIST_ENTRY']],
'HighestUserAddress' : [ 0x430, ['pointer64', ['void']]],
'ModifiedPageCount' : [ 0x438, ['unsigned long']],
'Flags2' : [ 0x43c, ['unsigned long']],
'JobNotReallyActive' : [ 0x43c, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'AccountingFolded' : [ 0x43c, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'NewProcessReported' : [ 0x43c, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long')]],
'ExitProcessReported' : [ 0x43c, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned long')]],
'ReportCommitChanges' : [ 0x43c, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned long')]],
'LastReportMemory' : [ 0x43c, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned long')]],
'ReportPhysicalPageChanges' : [ 0x43c, ['BitField', dict(start_bit = 6, end_bit = 7, native_type='unsigned long')]],
'HandleTableRundown' : [ 0x43c, ['BitField', dict(start_bit = 7, end_bit = 8, native_type='unsigned long')]],
'NeedsHandleRundown' : [ 0x43c, ['BitField', dict(start_bit = 8, end_bit = 9, native_type='unsigned long')]],
'RefTraceEnabled' : [ 0x43c, ['BitField', dict(start_bit = 9, end_bit = 10, native_type='unsigned long')]],
'NumaAware' : [ 0x43c, ['BitField', dict(start_bit = 10, end_bit = 11, native_type='unsigned long')]],
'ProtectedProcess' : [ 0x43c, ['BitField', dict(start_bit = 11, end_bit = 12, native_type='unsigned long')]],
'DefaultPagePriority' : [ 0x43c, ['BitField', dict(start_bit = 12, end_bit = 15, native_type='unsigned long')]],
'PrimaryTokenFrozen' : [ 0x43c, ['BitField', dict(start_bit = 15, end_bit = 16, native_type='unsigned long')]],
'ProcessVerifierTarget' : [ 0x43c, ['BitField', dict(start_bit = 16, end_bit = 17, native_type='unsigned long')]],
'StackRandomizationDisabled' : [ 0x43c, ['BitField', dict(start_bit = 17, end_bit = 18, native_type='unsigned long')]],
'AffinityPermanent' : [ 0x43c, ['BitField', dict(start_bit = 18, end_bit = 19, native_type='unsigned long')]],
'AffinityUpdateEnable' : [ 0x43c, ['BitField', dict(start_bit = 19, end_bit = 20, native_type='unsigned long')]],
'PropagateNode' : [ 0x43c, ['BitField', dict(start_bit = 20, end_bit = 21, native_type='unsigned long')]],
'ExplicitAffinity' : [ 0x43c, ['BitField', dict(start_bit = 21, end_bit = 22, native_type='unsigned long')]],
'Flags' : [ 0x440, ['unsigned long']],
'CreateReported' : [ 0x440, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'NoDebugInherit' : [ 0x440, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'ProcessExiting' : [ 0x440, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long')]],
'ProcessDelete' : [ 0x440, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned long')]],
'Wow64SplitPages' : [ 0x440, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned long')]],
'VmDeleted' : [ 0x440, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned long')]],
'OutswapEnabled' : [ 0x440, ['BitField', dict(start_bit = 6, end_bit = 7, native_type='unsigned long')]],
'Outswapped' : [ 0x440, ['BitField', dict(start_bit = 7, end_bit = 8, native_type='unsigned long')]],
'ForkFailed' : [ 0x440, ['BitField', dict(start_bit = 8, end_bit = 9, native_type='unsigned long')]],
'Wow64VaSpace4Gb' : [ 0x440, ['BitField', dict(start_bit = 9, end_bit = 10, native_type='unsigned long')]],
'AddressSpaceInitialized' : [ 0x440, ['BitField', dict(start_bit = 10, end_bit = 12, native_type='unsigned long')]],
'SetTimerResolution' : [ 0x440, ['BitField', dict(start_bit = 12, end_bit = 13, native_type='unsigned long')]],
'BreakOnTermination' : [ 0x440, ['BitField', dict(start_bit = 13, end_bit = 14, native_type='unsigned long')]],
'DeprioritizeViews' : [ 0x440, ['BitField', dict(start_bit = 14, end_bit = 15, native_type='unsigned long')]],
'WriteWatch' : [ 0x440, ['BitField', dict(start_bit = 15, end_bit = 16, native_type='unsigned long')]],
'ProcessInSession' : [ 0x440, ['BitField', dict(start_bit = 16, end_bit = 17, native_type='unsigned long')]],
'OverrideAddressSpace' : [ 0x440, ['BitField', dict(start_bit = 17, end_bit = 18, native_type='unsigned long')]],
'HasAddressSpace' : [ 0x440, ['BitField', dict(start_bit = 18, end_bit = 19, native_type='unsigned long')]],
'LaunchPrefetched' : [ 0x440, ['BitField', dict(start_bit = 19, end_bit = 20, native_type='unsigned long')]],
'InjectInpageErrors' : [ 0x440, ['BitField', dict(start_bit = 20, end_bit = 21, native_type='unsigned long')]],
'VmTopDown' : [ 0x440, ['BitField', dict(start_bit = 21, end_bit = 22, native_type='unsigned long')]],
'ImageNotifyDone' : [ 0x440, ['BitField', dict(start_bit = 22, end_bit = 23, native_type='unsigned long')]],
'PdeUpdateNeeded' : [ 0x440, ['BitField', dict(start_bit = 23, end_bit = 24, native_type='unsigned long')]],
'VdmAllowed' : [ 0x440, ['BitField', dict(start_bit = 24, end_bit = 25, native_type='unsigned long')]],
'CrossSessionCreate' : [ 0x440, ['BitField', dict(start_bit = 25, end_bit = 26, native_type='unsigned long')]],
'ProcessInserted' : [ 0x440, ['BitField', dict(start_bit = 26, end_bit = 27, native_type='unsigned long')]],
'DefaultIoPriority' : [ 0x440, ['BitField', dict(start_bit = 27, end_bit = 30, native_type='unsigned long')]],
'ProcessSelfDelete' : [ 0x440, ['BitField', dict(start_bit = 30, end_bit = 31, native_type='unsigned long')]],
'SetTimerResolutionLink' : [ 0x440, ['BitField', dict(start_bit = 31, end_bit = 32, native_type='unsigned long')]],
'ExitStatus' : [ 0x444, ['long']],
'VadRoot' : [ 0x448, ['_MM_AVL_TABLE']],
'AlpcContext' : [ 0x488, ['_ALPC_PROCESS_CONTEXT']],
'TimerResolutionLink' : [ 0x4a8, ['_LIST_ENTRY']],
'RequestedTimerResolution' : [ 0x4b8, ['unsigned long']],
'ActiveThreadsHighWatermark' : [ 0x4bc, ['unsigned long']],
'SmallestTimerResolution' : [ 0x4c0, ['unsigned long']],
'TimerResolutionStackRecord' : [ 0x4c8, ['pointer64', ['_PO_DIAG_STACK_RECORD']]],
} ],
'_KPROCESS' : [ 0x160, {
'Header' : [ 0x0, ['_DISPATCHER_HEADER']],
'ProfileListHead' : [ 0x18, ['_LIST_ENTRY']],
'DirectoryTableBase' : [ 0x28, ['unsigned long long']],
'ThreadListHead' : [ 0x30, ['_LIST_ENTRY']],
'ProcessLock' : [ 0x40, ['unsigned long long']],
'Affinity' : [ 0x48, ['_KAFFINITY_EX']],
'ReadyListHead' : [ 0x70, ['_LIST_ENTRY']],
'SwapListEntry' : [ 0x80, ['_SINGLE_LIST_ENTRY']],
'ActiveProcessors' : [ 0x88, ['_KAFFINITY_EX']],
'AutoAlignment' : [ 0xb0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='long')]],
'DisableBoost' : [ 0xb0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='long')]],
'DisableQuantum' : [ 0xb0, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='long')]],
'ActiveGroupsMask' : [ 0xb0, ['BitField', dict(start_bit = 3, end_bit = 7, native_type='unsigned long')]],
'ReservedFlags' : [ 0xb0, ['BitField', dict(start_bit = 7, end_bit = 32, native_type='long')]],
'ProcessFlags' : [ 0xb0, ['long']],
'BasePriority' : [ 0xb4, ['unsigned char']],
'QuantumReset' : [ 0xb5, ['unsigned char']],
'Visited' : [ 0xb6, ['unsigned char']],
'Unused3' : [ 0xb7, ['unsigned char']],
'ThreadSeed' : [ 0xb8, ['array', 4, ['unsigned long']]],
'IdealNode' : [ 0xc8, ['array', 4, ['unsigned short']]],
'IdealGlobalNode' : [ 0xd0, ['unsigned short']],
'Flags' : [ 0xd2, ['_KEXECUTE_OPTIONS']],
'Unused1' : [ 0xd3, ['unsigned char']],
'Unused2' : [ 0xd4, ['unsigned long']],
'Unused4' : [ 0xd8, ['unsigned long']],
'StackCount' : [ 0xdc, ['_KSTACK_COUNT']],
'ProcessListEntry' : [ 0xe0, ['_LIST_ENTRY']],
'CycleTime' : [ 0xf0, ['unsigned long long']],
'KernelTime' : [ 0xf8, ['unsigned long']],
'UserTime' : [ 0xfc, ['unsigned long']],
'InstrumentationCallback' : [ 0x100, ['pointer64', ['void']]],
'LdtSystemDescriptor' : [ 0x108, ['_KGDTENTRY64']],
'LdtBaseAddress' : [ 0x118, ['pointer64', ['void']]],
'LdtProcessLock' : [ 0x120, ['_KGUARDED_MUTEX']],
'LdtFreeSelectorHint' : [ 0x158, ['unsigned short']],
'LdtTableLength' : [ 0x15a, ['unsigned short']],
} ],
'__unnamed_12d9' : [ 0x2c, {
'InitialPrivilegeSet' : [ 0x0, ['_INITIAL_PRIVILEGE_SET']],
'PrivilegeSet' : [ 0x0, ['_PRIVILEGE_SET']],
} ],
'_ACCESS_STATE' : [ 0xa0, {
'OperationID' : [ 0x0, ['_LUID']],
'SecurityEvaluated' : [ 0x8, ['unsigned char']],
'GenerateAudit' : [ 0x9, ['unsigned char']],
'GenerateOnClose' : [ 0xa, ['unsigned char']],
'PrivilegesAllocated' : [ 0xb, ['unsigned char']],
'Flags' : [ 0xc, ['unsigned long']],
'RemainingDesiredAccess' : [ 0x10, ['unsigned long']],
'PreviouslyGrantedAccess' : [ 0x14, ['unsigned long']],
'OriginalDesiredAccess' : [ 0x18, ['unsigned long']],
'SubjectSecurityContext' : [ 0x20, ['_SECURITY_SUBJECT_CONTEXT']],
'SecurityDescriptor' : [ 0x40, ['pointer64', ['void']]],
'AuxData' : [ 0x48, ['pointer64', ['void']]],
'Privileges' : [ 0x50, ['__unnamed_12d9']],
'AuditPrivileges' : [ 0x7c, ['unsigned char']],
'ObjectName' : [ 0x80, ['_UNICODE_STRING']],
'ObjectTypeName' : [ 0x90, ['_UNICODE_STRING']],
} ],
'_AUX_ACCESS_DATA' : [ 0xd8, {
'PrivilegesUsed' : [ 0x0, ['pointer64', ['_PRIVILEGE_SET']]],
'GenericMapping' : [ 0x8, ['_GENERIC_MAPPING']],
'AccessesToAudit' : [ 0x18, ['unsigned long']],
'MaximumAuditMask' : [ 0x1c, ['unsigned long']],
'TransactionId' : [ 0x20, ['_GUID']],
'NewSecurityDescriptor' : [ 0x30, ['pointer64', ['void']]],
'ExistingSecurityDescriptor' : [ 0x38, ['pointer64', ['void']]],
'ParentSecurityDescriptor' : [ 0x40, ['pointer64', ['void']]],
'DeRefSecurityDescriptor' : [ 0x48, ['pointer64', ['void']]],
'SDLock' : [ 0x50, ['pointer64', ['void']]],
'AccessReasons' : [ 0x58, ['_ACCESS_REASONS']],
} ],
'__unnamed_12e8' : [ 0x8, {
'MasterIrp' : [ 0x0, ['pointer64', ['_IRP']]],
'IrpCount' : [ 0x0, ['long']],
'SystemBuffer' : [ 0x0, ['pointer64', ['void']]],
} ],
'__unnamed_12ed' : [ 0x10, {
'UserApcRoutine' : [ 0x0, ['pointer64', ['void']]],
'IssuingProcess' : [ 0x0, ['pointer64', ['void']]],
'UserApcContext' : [ 0x8, ['pointer64', ['void']]],
} ],
'__unnamed_12ef' : [ 0x10, {
'AsynchronousParameters' : [ 0x0, ['__unnamed_12ed']],
'AllocationSize' : [ 0x0, ['_LARGE_INTEGER']],
} ],
'__unnamed_12fa' : [ 0x50, {
'DeviceQueueEntry' : [ 0x0, ['_KDEVICE_QUEUE_ENTRY']],
'DriverContext' : [ 0x0, ['array', 4, ['pointer64', ['void']]]],
'Thread' : [ 0x20, ['pointer64', ['_ETHREAD']]],
'AuxiliaryBuffer' : [ 0x28, ['pointer64', ['unsigned char']]],
'ListEntry' : [ 0x30, ['_LIST_ENTRY']],
'CurrentStackLocation' : [ 0x40, ['pointer64', ['_IO_STACK_LOCATION']]],
'PacketType' : [ 0x40, ['unsigned long']],
'OriginalFileObject' : [ 0x48, ['pointer64', ['_FILE_OBJECT']]],
} ],
'__unnamed_12fc' : [ 0x58, {
'Overlay' : [ 0x0, ['__unnamed_12fa']],
'Apc' : [ 0x0, ['_KAPC']],
'CompletionKey' : [ 0x0, ['pointer64', ['void']]],
} ],
'_IRP' : [ 0xd0, {
'Type' : [ 0x0, ['short']],
'Size' : [ 0x2, ['unsigned short']],
'MdlAddress' : [ 0x8, ['pointer64', ['_MDL']]],
'Flags' : [ 0x10, ['unsigned long']],
'AssociatedIrp' : [ 0x18, ['__unnamed_12e8']],
'ThreadListEntry' : [ 0x20, ['_LIST_ENTRY']],
'IoStatus' : [ 0x30, ['_IO_STATUS_BLOCK']],
'RequestorMode' : [ 0x40, ['unsigned char']],
'PendingReturned' : [ 0x41, ['unsigned char']],
'StackCount' : [ 0x42, ['unsigned char']],
'CurrentLocation' : [ 0x43, ['unsigned char']],
'Cancel' : [ 0x44, ['unsigned char']],
'CancelIrql' : [ 0x45, ['unsigned char']],
'ApcEnvironment' : [ 0x46, ['unsigned char']],
'AllocationFlags' : [ 0x47, ['unsigned char']],
'UserIosb' : [ 0x48, ['pointer64', ['_IO_STATUS_BLOCK']]],
'UserEvent' : [ 0x50, ['pointer64', ['_KEVENT']]],
'Overlay' : [ 0x58, ['__unnamed_12ef']],
'CancelRoutine' : [ 0x68, ['pointer64', ['void']]],
'UserBuffer' : [ 0x70, ['pointer64', ['void']]],
'Tail' : [ 0x78, ['__unnamed_12fc']],
} ],
'__unnamed_1303' : [ 0x20, {
'SecurityContext' : [ 0x0, ['pointer64', ['_IO_SECURITY_CONTEXT']]],
'Options' : [ 0x8, ['unsigned long']],
'FileAttributes' : [ 0x10, ['unsigned short']],
'ShareAccess' : [ 0x12, ['unsigned short']],
'EaLength' : [ 0x18, ['unsigned long']],
} ],
'__unnamed_1307' : [ 0x20, {
'SecurityContext' : [ 0x0, ['pointer64', ['_IO_SECURITY_CONTEXT']]],
'Options' : [ 0x8, ['unsigned long']],
'Reserved' : [ 0x10, ['unsigned short']],
'ShareAccess' : [ 0x12, ['unsigned short']],
'Parameters' : [ 0x18, ['pointer64', ['_NAMED_PIPE_CREATE_PARAMETERS']]],
} ],
'__unnamed_130b' : [ 0x20, {
'SecurityContext' : [ 0x0, ['pointer64', ['_IO_SECURITY_CONTEXT']]],
'Options' : [ 0x8, ['unsigned long']],
'Reserved' : [ 0x10, ['unsigned short']],
'ShareAccess' : [ 0x12, ['unsigned short']],
'Parameters' : [ 0x18, ['pointer64', ['_MAILSLOT_CREATE_PARAMETERS']]],
} ],
'__unnamed_130d' : [ 0x18, {
'Length' : [ 0x0, ['unsigned long']],
'Key' : [ 0x8, ['unsigned long']],
'ByteOffset' : [ 0x10, ['_LARGE_INTEGER']],
} ],
'__unnamed_1311' : [ 0x20, {
'Length' : [ 0x0, ['unsigned long']],
'FileName' : [ 0x8, ['pointer64', ['_UNICODE_STRING']]],
'FileInformationClass' : [ 0x10, ['Enumeration', dict(target = 'long', choices = {1: 'FileDirectoryInformation', 2: 'FileFullDirectoryInformation', 3: 'FileBothDirectoryInformation', 4: 'FileBasicInformation', 5: 'FileStandardInformation', 6: 'FileInternalInformation', 7: 'FileEaInformation', 8: 'FileAccessInformation', 9: 'FileNameInformation', 10: 'FileRenameInformation', 11: 'FileLinkInformation', 12: 'FileNamesInformation', 13: 'FileDispositionInformation', 14: 'FilePositionInformation', 15: 'FileFullEaInformation', 16: 'FileModeInformation', 17: 'FileAlignmentInformation', 18: 'FileAllInformation', 19: 'FileAllocationInformation', 20: 'FileEndOfFileInformation', 21: 'FileAlternateNameInformation', 22: 'FileStreamInformation', 23: 'FilePipeInformation', 24: 'FilePipeLocalInformation', 25: 'FilePipeRemoteInformation', 26: 'FileMailslotQueryInformation', 27: 'FileMailslotSetInformation', 28: 'FileCompressionInformation', 29: 'FileObjectIdInformation', 30: 'FileCompletionInformation', 31: 'FileMoveClusterInformation', 32: 'FileQuotaInformation', 33: 'FileReparsePointInformation', 34: 'FileNetworkOpenInformation', 35: 'FileAttributeTagInformation', 36: 'FileTrackingInformation', 37: 'FileIdBothDirectoryInformation', 38: 'FileIdFullDirectoryInformation', 39: 'FileValidDataLengthInformation', 40: 'FileShortNameInformation', 41: 'FileIoCompletionNotificationInformation', 42: 'FileIoStatusBlockRangeInformation', 43: 'FileIoPriorityHintInformation', 44: 'FileSfioReserveInformation', 45: 'FileSfioVolumeInformation', 46: 'FileHardLinkInformation', 47: 'FileProcessIdsUsingFileInformation', 48: 'FileNormalizedNameInformation', 49: 'FileNetworkPhysicalNameInformation', 50: 'FileIdGlobalTxDirectoryInformation', 51: 'FileIsRemoteDeviceInformation', 52: 'FileAttributeCacheInformation', 53: 'FileNumaNodeInformation', 54: 'FileStandardLinkInformation', 55: 'FileRemoteProtocolInformation', 56: 'FileMaximumInformation'})]],
'FileIndex' : [ 0x18, ['unsigned long']],
} ],
'__unnamed_1313' : [ 0x10, {
'Length' : [ 0x0, ['unsigned long']],
'CompletionFilter' : [ 0x8, ['unsigned long']],
} ],
'__unnamed_1315' : [ 0x10, {
'Length' : [ 0x0, ['unsigned long']],
'FileInformationClass' : [ 0x8, ['Enumeration', dict(target = 'long', choices = {1: 'FileDirectoryInformation', 2: 'FileFullDirectoryInformation', 3: 'FileBothDirectoryInformation', 4: 'FileBasicInformation', 5: 'FileStandardInformation', 6: 'FileInternalInformation', 7: 'FileEaInformation', 8: 'FileAccessInformation', 9: 'FileNameInformation', 10: 'FileRenameInformation', 11: 'FileLinkInformation', 12: 'FileNamesInformation', 13: 'FileDispositionInformation', 14: 'FilePositionInformation', 15: 'FileFullEaInformation', 16: 'FileModeInformation', 17: 'FileAlignmentInformation', 18: 'FileAllInformation', 19: 'FileAllocationInformation', 20: 'FileEndOfFileInformation', 21: 'FileAlternateNameInformation', 22: 'FileStreamInformation', 23: 'FilePipeInformation', 24: 'FilePipeLocalInformation', 25: 'FilePipeRemoteInformation', 26: 'FileMailslotQueryInformation', 27: 'FileMailslotSetInformation', 28: 'FileCompressionInformation', 29: 'FileObjectIdInformation', 30: 'FileCompletionInformation', 31: 'FileMoveClusterInformation', 32: 'FileQuotaInformation', 33: 'FileReparsePointInformation', 34: 'FileNetworkOpenInformation', 35: 'FileAttributeTagInformation', 36: 'FileTrackingInformation', 37: 'FileIdBothDirectoryInformation', 38: 'FileIdFullDirectoryInformation', 39: 'FileValidDataLengthInformation', 40: 'FileShortNameInformation', 41: 'FileIoCompletionNotificationInformation', 42: 'FileIoStatusBlockRangeInformation', 43: 'FileIoPriorityHintInformation', 44: 'FileSfioReserveInformation', 45: 'FileSfioVolumeInformation', 46: 'FileHardLinkInformation', 47: 'FileProcessIdsUsingFileInformation', 48: 'FileNormalizedNameInformation', 49: 'FileNetworkPhysicalNameInformation', 50: 'FileIdGlobalTxDirectoryInformation', 51: 'FileIsRemoteDeviceInformation', 52: 'FileAttributeCacheInformation', 53: 'FileNumaNodeInformation', 54: 'FileStandardLinkInformation', 55: 'FileRemoteProtocolInformation', 56: 'FileMaximumInformation'})]],
} ],
'__unnamed_1317' : [ 0x20, {
'Length' : [ 0x0, ['unsigned long']],
'FileInformationClass' : [ 0x8, ['Enumeration', dict(target = 'long', choices = {1: 'FileDirectoryInformation', 2: 'FileFullDirectoryInformation', 3: 'FileBothDirectoryInformation', 4: 'FileBasicInformation', 5: 'FileStandardInformation', 6: 'FileInternalInformation', 7: 'FileEaInformation', 8: 'FileAccessInformation', 9: 'FileNameInformation', 10: 'FileRenameInformation', 11: 'FileLinkInformation', 12: 'FileNamesInformation', 13: 'FileDispositionInformation', 14: 'FilePositionInformation', 15: 'FileFullEaInformation', 16: 'FileModeInformation', 17: 'FileAlignmentInformation', 18: 'FileAllInformation', 19: 'FileAllocationInformation', 20: 'FileEndOfFileInformation', 21: 'FileAlternateNameInformation', 22: 'FileStreamInformation', 23: 'FilePipeInformation', 24: 'FilePipeLocalInformation', 25: 'FilePipeRemoteInformation', 26: 'FileMailslotQueryInformation', 27: 'FileMailslotSetInformation', 28: 'FileCompressionInformation', 29: 'FileObjectIdInformation', 30: 'FileCompletionInformation', 31: 'FileMoveClusterInformation', 32: 'FileQuotaInformation', 33: 'FileReparsePointInformation', 34: 'FileNetworkOpenInformation', 35: 'FileAttributeTagInformation', 36: 'FileTrackingInformation', 37: 'FileIdBothDirectoryInformation', 38: 'FileIdFullDirectoryInformation', 39: 'FileValidDataLengthInformation', 40: 'FileShortNameInformation', 41: 'FileIoCompletionNotificationInformation', 42: 'FileIoStatusBlockRangeInformation', 43: 'FileIoPriorityHintInformation', 44: 'FileSfioReserveInformation', 45: 'FileSfioVolumeInformation', 46: 'FileHardLinkInformation', 47: 'FileProcessIdsUsingFileInformation', 48: 'FileNormalizedNameInformation', 49: 'FileNetworkPhysicalNameInformation', 50: 'FileIdGlobalTxDirectoryInformation', 51: 'FileIsRemoteDeviceInformation', 52: 'FileAttributeCacheInformation', 53: 'FileNumaNodeInformation', 54: 'FileStandardLinkInformation', 55: 'FileRemoteProtocolInformation', 56: 'FileMaximumInformation'})]],
'FileObject' : [ 0x10, ['pointer64', ['_FILE_OBJECT']]],
'ReplaceIfExists' : [ 0x18, ['unsigned char']],
'AdvanceOnly' : [ 0x19, ['unsigned char']],
'ClusterCount' : [ 0x18, ['unsigned long']],
'DeleteHandle' : [ 0x18, ['pointer64', ['void']]],
} ],
'__unnamed_1319' : [ 0x20, {
'Length' : [ 0x0, ['unsigned long']],
'EaList' : [ 0x8, ['pointer64', ['void']]],
'EaListLength' : [ 0x10, ['unsigned long']],
'EaIndex' : [ 0x18, ['unsigned long']],
} ],
'__unnamed_131b' : [ 0x4, {
'Length' : [ 0x0, ['unsigned long']],
} ],
'__unnamed_131f' : [ 0x10, {
'Length' : [ 0x0, ['unsigned long']],
'FsInformationClass' : [ 0x8, ['Enumeration', dict(target = 'long', choices = {1: 'FileFsVolumeInformation', 2: 'FileFsLabelInformation', 3: 'FileFsSizeInformation', 4: 'FileFsDeviceInformation', 5: 'FileFsAttributeInformation', 6: 'FileFsControlInformation', 7: 'FileFsFullSizeInformation', 8: 'FileFsObjectIdInformation', 9: 'FileFsDriverPathInformation', 10: 'FileFsVolumeFlagsInformation', 11: 'FileFsMaximumInformation'})]],
} ],
'__unnamed_1321' : [ 0x20, {
'OutputBufferLength' : [ 0x0, ['unsigned long']],
'InputBufferLength' : [ 0x8, ['unsigned long']],
'FsControlCode' : [ 0x10, ['unsigned long']],
'Type3InputBuffer' : [ 0x18, ['pointer64', ['void']]],
} ],
'__unnamed_1323' : [ 0x18, {
'Length' : [ 0x0, ['pointer64', ['_LARGE_INTEGER']]],
'Key' : [ 0x8, ['unsigned long']],
'ByteOffset' : [ 0x10, ['_LARGE_INTEGER']],
} ],
'__unnamed_1325' : [ 0x20, {
'OutputBufferLength' : [ 0x0, ['unsigned long']],
'InputBufferLength' : [ 0x8, ['unsigned long']],
'IoControlCode' : [ 0x10, ['unsigned long']],
'Type3InputBuffer' : [ 0x18, ['pointer64', ['void']]],
} ],
'__unnamed_1327' : [ 0x10, {
'SecurityInformation' : [ 0x0, ['unsigned long']],
'Length' : [ 0x8, ['unsigned long']],
} ],
'__unnamed_1329' : [ 0x10, {
'SecurityInformation' : [ 0x0, ['unsigned long']],
'SecurityDescriptor' : [ 0x8, ['pointer64', ['void']]],
} ],
'__unnamed_132d' : [ 0x10, {
'Vpb' : [ 0x0, ['pointer64', ['_VPB']]],
'DeviceObject' : [ 0x8, ['pointer64', ['_DEVICE_OBJECT']]],
} ],
'__unnamed_1331' : [ 0x8, {
'Srb' : [ 0x0, ['pointer64', ['_SCSI_REQUEST_BLOCK']]],
} ],
'__unnamed_1335' : [ 0x20, {
'Length' : [ 0x0, ['unsigned long']],
'StartSid' : [ 0x8, ['pointer64', ['void']]],
'SidList' : [ 0x10, ['pointer64', ['_FILE_GET_QUOTA_INFORMATION']]],
'SidListLength' : [ 0x18, ['unsigned long']],
} ],
'__unnamed_1339' : [ 0x4, {
'Type' : [ 0x0, ['Enumeration', dict(target = 'long', choices = {0: 'BusRelations', 1: 'EjectionRelations', 2: 'PowerRelations', 3: 'RemovalRelations', 4: 'TargetDeviceRelation', 5: 'SingleBusRelations', 6: 'TransportRelations'})]],
} ],
'__unnamed_133f' : [ 0x20, {
'InterfaceType' : [ 0x0, ['pointer64', ['_GUID']]],
'Size' : [ 0x8, ['unsigned short']],
'Version' : [ 0xa, ['unsigned short']],
'Interface' : [ 0x10, ['pointer64', ['_INTERFACE']]],
'InterfaceSpecificData' : [ 0x18, ['pointer64', ['void']]],
} ],
'__unnamed_1343' : [ 0x8, {
'Capabilities' : [ 0x0, ['pointer64', ['_DEVICE_CAPABILITIES']]],
} ],
'__unnamed_1347' : [ 0x8, {
'IoResourceRequirementList' : [ 0x0, ['pointer64', ['_IO_RESOURCE_REQUIREMENTS_LIST']]],
} ],
'__unnamed_1349' : [ 0x20, {
'WhichSpace' : [ 0x0, ['unsigned long']],
'Buffer' : [ 0x8, ['pointer64', ['void']]],
'Offset' : [ 0x10, ['unsigned long']],
'Length' : [ 0x18, ['unsigned long']],
} ],
'__unnamed_134b' : [ 0x1, {
'Lock' : [ 0x0, ['unsigned char']],
} ],
'__unnamed_134f' : [ 0x4, {
'IdType' : [ 0x0, ['Enumeration', dict(target = 'long', choices = {0: 'BusQueryDeviceID', 1: 'BusQueryHardwareIDs', 2: 'BusQueryCompatibleIDs', 3: 'BusQueryInstanceID', 4: 'BusQueryDeviceSerialNumber', 5: 'BusQueryContainerID'})]],
} ],
'__unnamed_1353' : [ 0x10, {
'DeviceTextType' : [ 0x0, ['Enumeration', dict(target = 'long', choices = {0: 'DeviceTextDescription', 1: 'DeviceTextLocationInformation'})]],
'LocaleId' : [ 0x8, ['unsigned long']],
} ],
'__unnamed_1357' : [ 0x10, {
'InPath' : [ 0x0, ['unsigned char']],
'Reserved' : [ 0x1, ['array', 3, ['unsigned char']]],
'Type' : [ 0x8, ['Enumeration', dict(target = 'long', choices = {0: 'DeviceUsageTypeUndefined', 1: 'DeviceUsageTypePaging', 2: 'DeviceUsageTypeHibernation', 3: 'DeviceUsageTypeDumpFile'})]],
} ],
'__unnamed_135b' : [ 0x4, {
'PowerState' : [ 0x0, ['Enumeration', dict(target = 'long', choices = {0: 'PowerSystemUnspecified', 1: 'PowerSystemWorking', 2: 'PowerSystemSleeping1', 3: 'PowerSystemSleeping2', 4: 'PowerSystemSleeping3', 5: 'PowerSystemHibernate', 6: 'PowerSystemShutdown', 7: 'PowerSystemMaximum'})]],
} ],
'__unnamed_135f' : [ 0x8, {
'PowerSequence' : [ 0x0, ['pointer64', ['_POWER_SEQUENCE']]],
} ],
'__unnamed_1367' : [ 0x20, {
'SystemContext' : [ 0x0, ['unsigned long']],
'SystemPowerStateContext' : [ 0x0, ['_SYSTEM_POWER_STATE_CONTEXT']],
'Type' : [ 0x8, ['Enumeration', dict(target = 'long', choices = {0: 'SystemPowerState', 1: 'DevicePowerState'})]],
'State' : [ 0x10, ['_POWER_STATE']],
'ShutdownType' : [ 0x18, ['Enumeration', dict(target = 'long', choices = {0: 'PowerActionNone', 1: 'PowerActionReserved', 2: 'PowerActionSleep', 3: 'PowerActionHibernate', 4: 'PowerActionShutdown', 5: 'PowerActionShutdownReset', 6: 'PowerActionShutdownOff', 7: 'PowerActionWarmEject'})]],
} ],
'__unnamed_136b' : [ 0x10, {
'AllocatedResources' : [ 0x0, ['pointer64', ['_CM_RESOURCE_LIST']]],
'AllocatedResourcesTranslated' : [ 0x8, ['pointer64', ['_CM_RESOURCE_LIST']]],
} ],
'__unnamed_136d' : [ 0x20, {
'ProviderId' : [ 0x0, ['unsigned long long']],
'DataPath' : [ 0x8, ['pointer64', ['void']]],
'BufferSize' : [ 0x10, ['unsigned long']],
'Buffer' : [ 0x18, ['pointer64', ['void']]],
} ],
'__unnamed_136f' : [ 0x20, {
'Argument1' : [ 0x0, ['pointer64', ['void']]],
'Argument2' : [ 0x8, ['pointer64', ['void']]],
'Argument3' : [ 0x10, ['pointer64', ['void']]],
'Argument4' : [ 0x18, ['pointer64', ['void']]],
} ],
'__unnamed_1371' : [ 0x20, {
'Create' : [ 0x0, ['__unnamed_1303']],
'CreatePipe' : [ 0x0, ['__unnamed_1307']],
'CreateMailslot' : [ 0x0, ['__unnamed_130b']],
'Read' : [ 0x0, ['__unnamed_130d']],
'Write' : [ 0x0, ['__unnamed_130d']],
'QueryDirectory' : [ 0x0, ['__unnamed_1311']],
'NotifyDirectory' : [ 0x0, ['__unnamed_1313']],
'QueryFile' : [ 0x0, ['__unnamed_1315']],
'SetFile' : [ 0x0, ['__unnamed_1317']],
'QueryEa' : [ 0x0, ['__unnamed_1319']],
'SetEa' : [ 0x0, ['__unnamed_131b']],
'QueryVolume' : [ 0x0, ['__unnamed_131f']],
'SetVolume' : [ 0x0, ['__unnamed_131f']],
'FileSystemControl' : [ 0x0, ['__unnamed_1321']],
'LockControl' : [ 0x0, ['__unnamed_1323']],
'DeviceIoControl' : [ 0x0, ['__unnamed_1325']],
'QuerySecurity' : [ 0x0, ['__unnamed_1327']],
'SetSecurity' : [ 0x0, ['__unnamed_1329']],
'MountVolume' : [ 0x0, ['__unnamed_132d']],
'VerifyVolume' : [ 0x0, ['__unnamed_132d']],
'Scsi' : [ 0x0, ['__unnamed_1331']],
'QueryQuota' : [ 0x0, ['__unnamed_1335']],
'SetQuota' : [ 0x0, ['__unnamed_131b']],
'QueryDeviceRelations' : [ 0x0, ['__unnamed_1339']],
'QueryInterface' : [ 0x0, ['__unnamed_133f']],
'DeviceCapabilities' : [ 0x0, ['__unnamed_1343']],
'FilterResourceRequirements' : [ 0x0, ['__unnamed_1347']],
'ReadWriteConfig' : [ 0x0, ['__unnamed_1349']],
'SetLock' : [ 0x0, ['__unnamed_134b']],
'QueryId' : [ 0x0, ['__unnamed_134f']],
'QueryDeviceText' : [ 0x0, ['__unnamed_1353']],
'UsageNotification' : [ 0x0, ['__unnamed_1357']],
'WaitWake' : [ 0x0, ['__unnamed_135b']],
'PowerSequence' : [ 0x0, ['__unnamed_135f']],
'Power' : [ 0x0, ['__unnamed_1367']],
'StartDevice' : [ 0x0, ['__unnamed_136b']],
'WMI' : [ 0x0, ['__unnamed_136d']],
'Others' : [ 0x0, ['__unnamed_136f']],
} ],
'_IO_STACK_LOCATION' : [ 0x48, {
'MajorFunction' : [ 0x0, ['unsigned char']],
'MinorFunction' : [ 0x1, ['unsigned char']],
'Flags' : [ 0x2, ['unsigned char']],
'Control' : [ 0x3, ['unsigned char']],
'Parameters' : [ 0x8, ['__unnamed_1371']],
'DeviceObject' : [ 0x28, ['pointer64', ['_DEVICE_OBJECT']]],
'FileObject' : [ 0x30, ['pointer64', ['_FILE_OBJECT']]],
'CompletionRoutine' : [ 0x38, ['pointer64', ['void']]],
'Context' : [ 0x40, ['pointer64', ['void']]],
} ],
'__unnamed_1387' : [ 0x48, {
'ListEntry' : [ 0x0, ['_LIST_ENTRY']],
'Wcb' : [ 0x0, ['_WAIT_CONTEXT_BLOCK']],
} ],
'_DEVICE_OBJECT' : [ 0x150, {
'Type' : [ 0x0, ['short']],
'Size' : [ 0x2, ['unsigned short']],
'ReferenceCount' : [ 0x4, ['long']],
'DriverObject' : [ 0x8, ['pointer64', ['_DRIVER_OBJECT']]],
'NextDevice' : [ 0x10, ['pointer64', ['_DEVICE_OBJECT']]],
'AttachedDevice' : [ 0x18, ['pointer64', ['_DEVICE_OBJECT']]],
'CurrentIrp' : [ 0x20, ['pointer64', ['_IRP']]],
'Timer' : [ 0x28, ['pointer64', ['_IO_TIMER']]],
'Flags' : [ 0x30, ['unsigned long']],
'Characteristics' : [ 0x34, ['unsigned long']],
'Vpb' : [ 0x38, ['pointer64', ['_VPB']]],
'DeviceExtension' : [ 0x40, ['pointer64', ['void']]],
'DeviceType' : [ 0x48, ['unsigned long']],
'StackSize' : [ 0x4c, ['unsigned char']],
'Queue' : [ 0x50, ['__unnamed_1387']],
'AlignmentRequirement' : [ 0x98, ['unsigned long']],
'DeviceQueue' : [ 0xa0, ['_KDEVICE_QUEUE']],
'Dpc' : [ 0xc8, ['_KDPC']],
'ActiveThreadCount' : [ 0x108, ['unsigned long']],
'SecurityDescriptor' : [ 0x110, ['pointer64', ['void']]],
'DeviceLock' : [ 0x118, ['_KEVENT']],
'SectorSize' : [ 0x130, ['unsigned short']],
'Spare1' : [ 0x132, ['unsigned short']],
'DeviceObjectExtension' : [ 0x138, ['pointer64', ['_DEVOBJ_EXTENSION']]],
'Reserved' : [ 0x140, ['pointer64', ['void']]],
} ],
'_KDPC' : [ 0x40, {
'Type' : [ 0x0, ['unsigned char']],
'Importance' : [ 0x1, ['unsigned char']],
'Number' : [ 0x2, ['unsigned short']],
'DpcListEntry' : [ 0x8, ['_LIST_ENTRY']],
'DeferredRoutine' : [ 0x18, ['pointer64', ['void']]],
'DeferredContext' : [ 0x20, ['pointer64', ['void']]],
'SystemArgument1' : [ 0x28, ['pointer64', ['void']]],
'SystemArgument2' : [ 0x30, ['pointer64', ['void']]],
'DpcData' : [ 0x38, ['pointer64', ['void']]],
} ],
'_IO_DRIVER_CREATE_CONTEXT' : [ 0x20, {
'Size' : [ 0x0, ['short']],
'ExtraCreateParameter' : [ 0x8, ['pointer64', ['_ECP_LIST']]],
'DeviceObjectHint' : [ 0x10, ['pointer64', ['void']]],
'TxnParameters' : [ 0x18, ['pointer64', ['_TXN_PARAMETER_BLOCK']]],
} ],
'_IO_PRIORITY_INFO' : [ 0x10, {
'Size' : [ 0x0, ['unsigned long']],
'ThreadPriority' : [ 0x4, ['unsigned long']],
'PagePriority' : [ 0x8, ['unsigned long']],
'IoPriority' : [ 0xc, ['Enumeration', dict(target = 'long', choices = {0: 'IoPriorityVeryLow', 1: 'IoPriorityLow', 2: 'IoPriorityNormal', 3: 'IoPriorityHigh', 4: 'IoPriorityCritical', 5: 'MaxIoPriorityTypes'})]],
} ],
'_OBJECT_ATTRIBUTES' : [ 0x30, {
'Length' : [ 0x0, ['unsigned long']],
'RootDirectory' : [ 0x8, ['pointer64', ['void']]],
'ObjectName' : [ 0x10, ['pointer64', ['_UNICODE_STRING']]],
'Attributes' : [ 0x18, ['unsigned long']],
'SecurityDescriptor' : [ 0x20, ['pointer64', ['void']]],
'SecurityQualityOfService' : [ 0x28, ['pointer64', ['void']]],
} ],
'_OBJECT_HANDLE_INFORMATION' : [ 0x8, {
'HandleAttributes' : [ 0x0, ['unsigned long']],
'GrantedAccess' : [ 0x4, ['unsigned long']],
} ],
'_EVENT_DATA_DESCRIPTOR' : [ 0x10, {
'Ptr' : [ 0x0, ['unsigned long long']],
'Size' : [ 0x8, ['unsigned long']],
'Reserved' : [ 0xc, ['unsigned long']],
} ],
'_EVENT_DESCRIPTOR' : [ 0x10, {
'Id' : [ 0x0, ['unsigned short']],
'Version' : [ 0x2, ['unsigned char']],
'Channel' : [ 0x3, ['unsigned char']],
'Level' : [ 0x4, ['unsigned char']],
'Opcode' : [ 0x5, ['unsigned char']],
'Task' : [ 0x6, ['unsigned short']],
'Keyword' : [ 0x8, ['unsigned long long']],
} ],
'_PERFINFO_GROUPMASK' : [ 0x20, {
'Masks' : [ 0x0, ['array', 8, ['unsigned long']]],
} ],
'_FILE_OBJECT' : [ 0xd8, {
'Type' : [ 0x0, ['short']],
'Size' : [ 0x2, ['short']],
'DeviceObject' : [ 0x8, ['pointer64', ['_DEVICE_OBJECT']]],
'Vpb' : [ 0x10, ['pointer64', ['_VPB']]],
'FsContext' : [ 0x18, ['pointer64', ['void']]],
'FsContext2' : [ 0x20, ['pointer64', ['void']]],
'SectionObjectPointer' : [ 0x28, ['pointer64', ['_SECTION_OBJECT_POINTERS']]],
'PrivateCacheMap' : [ 0x30, ['pointer64', ['void']]],
'FinalStatus' : [ 0x38, ['long']],
'RelatedFileObject' : [ 0x40, ['pointer64', ['_FILE_OBJECT']]],
'LockOperation' : [ 0x48, ['unsigned char']],
'DeletePending' : [ 0x49, ['unsigned char']],
'ReadAccess' : [ 0x4a, ['unsigned char']],
'WriteAccess' : [ 0x4b, ['unsigned char']],
'DeleteAccess' : [ 0x4c, ['unsigned char']],
'SharedRead' : [ 0x4d, ['unsigned char']],
'SharedWrite' : [ 0x4e, ['unsigned char']],
'SharedDelete' : [ 0x4f, ['unsigned char']],
'Flags' : [ 0x50, ['unsigned long']],
'FileName' : [ 0x58, ['_UNICODE_STRING']],
'CurrentByteOffset' : [ 0x68, ['_LARGE_INTEGER']],
'Waiters' : [ 0x70, ['unsigned long']],
'Busy' : [ 0x74, ['unsigned long']],
'LastLock' : [ 0x78, ['pointer64', ['void']]],
'Lock' : [ 0x80, ['_KEVENT']],
'Event' : [ 0x98, ['_KEVENT']],
'CompletionContext' : [ 0xb0, ['pointer64', ['_IO_COMPLETION_CONTEXT']]],
'IrpListLock' : [ 0xb8, ['unsigned long long']],
'IrpList' : [ 0xc0, ['_LIST_ENTRY']],
'FileObjectExtension' : [ 0xd0, ['pointer64', ['void']]],
} ],
'_EX_RUNDOWN_REF' : [ 0x8, {
'Count' : [ 0x0, ['unsigned long long']],
'Ptr' : [ 0x0, ['pointer64', ['void']]],
} ],
'_MM_PAGE_ACCESS_INFO_HEADER' : [ 0x48, {
'Link' : [ 0x0, ['_SINGLE_LIST_ENTRY']],
'Type' : [ 0x8, ['Enumeration', dict(target = 'long', choices = {0: 'MmPteAccessType', 1: 'MmCcReadAheadType', 2: 'MmPfnRepurposeType', 3: 'MmMaximumPageAccessType'})]],
'EmptySequenceNumber' : [ 0xc, ['unsigned long']],
'CurrentFileIndex' : [ 0xc, ['unsigned long']],
'CreateTime' : [ 0x10, ['unsigned long long']],
'EmptyTime' : [ 0x18, ['unsigned long long']],
'TempEntry' : [ 0x18, ['pointer64', ['_MM_PAGE_ACCESS_INFO']]],
'PageEntry' : [ 0x20, ['pointer64', ['_MM_PAGE_ACCESS_INFO']]],
'FileEntry' : [ 0x28, ['pointer64', ['unsigned long long']]],
'FirstFileEntry' : [ 0x30, ['pointer64', ['unsigned long long']]],
'Process' : [ 0x38, ['pointer64', ['_EPROCESS']]],
'SessionId' : [ 0x40, ['unsigned long']],
'PageFrameEntry' : [ 0x20, ['pointer64', ['unsigned long long']]],
'LastPageFrameEntry' : [ 0x28, ['pointer64', ['unsigned long long']]],
} ],
'_WHEA_ERROR_PACKET_V2' : [ 0x50, {
'Signature' : [ 0x0, ['unsigned long']],
'Version' : [ 0x4, ['unsigned long']],
'Length' : [ 0x8, ['unsigned long']],
'Flags' : [ 0xc, ['_WHEA_ERROR_PACKET_FLAGS']],
'ErrorType' : [ 0x10, ['Enumeration', dict(target = 'long', choices = {0: 'WheaErrTypeProcessor', 1: 'WheaErrTypeMemory', 2: 'WheaErrTypePCIExpress', 3: 'WheaErrTypeNMI', 4: 'WheaErrTypePCIXBus', 5: 'WheaErrTypePCIXDevice', 6: 'WheaErrTypeGeneric'})]],
'ErrorSeverity' : [ 0x14, ['Enumeration', dict(target = 'long', choices = {0: 'WheaErrSevRecoverable', 1: 'WheaErrSevFatal', 2: 'WheaErrSevCorrected', 3: 'WheaErrSevInformational'})]],
'ErrorSourceId' : [ 0x18, ['unsigned long']],
'ErrorSourceType' : [ 0x1c, ['Enumeration', dict(target = 'long', choices = {0: 'WheaErrSrcTypeMCE', 1: 'WheaErrSrcTypeCMC', 2: 'WheaErrSrcTypeCPE', 3: 'WheaErrSrcTypeNMI', 4: 'WheaErrSrcTypePCIe', 5: 'WheaErrSrcTypeGeneric', 6: 'WheaErrSrcTypeINIT', 7: 'WheaErrSrcTypeBOOT', 8: 'WheaErrSrcTypeSCIGeneric', 9: 'WheaErrSrcTypeIPFMCA', 10: 'WheaErrSrcTypeIPFCMC', 11: 'WheaErrSrcTypeIPFCPE', 12: 'WheaErrSrcTypeMax'})]],
'NotifyType' : [ 0x20, ['_GUID']],
'Context' : [ 0x30, ['unsigned long long']],
'DataFormat' : [ 0x38, ['Enumeration', dict(target = 'long', choices = {0: 'WheaDataFormatIPFSalRecord', 1: 'WheaDataFormatXPFMCA', 2: 'WheaDataFormatMemory', 3: 'WheaDataFormatPCIExpress', 4: 'WheaDataFormatNMIPort', 5: 'WheaDataFormatPCIXBus', 6: 'WheaDataFormatPCIXDevice', 7: 'WheaDataFormatGeneric', 8: 'WheaDataFormatMax'})]],
'Reserved1' : [ 0x3c, ['unsigned long']],
'DataOffset' : [ 0x40, ['unsigned long']],
'DataLength' : [ 0x44, ['unsigned long']],
'PshedDataOffset' : [ 0x48, ['unsigned long']],
'PshedDataLength' : [ 0x4c, ['unsigned long']],
} ],
'_WHEA_ERROR_RECORD' : [ 0xc8, {
'Header' : [ 0x0, ['_WHEA_ERROR_RECORD_HEADER']],
'SectionDescriptor' : [ 0x80, ['array', 1, ['_WHEA_ERROR_RECORD_SECTION_DESCRIPTOR']]],
} ],
'_WHEA_ERROR_RECORD_SECTION_DESCRIPTOR' : [ 0x48, {
'SectionOffset' : [ 0x0, ['unsigned long']],
'SectionLength' : [ 0x4, ['unsigned long']],
'Revision' : [ 0x8, ['_WHEA_REVISION']],
'ValidBits' : [ 0xa, ['_WHEA_ERROR_RECORD_SECTION_DESCRIPTOR_VALIDBITS']],
'Reserved' : [ 0xb, ['unsigned char']],
'Flags' : [ 0xc, ['_WHEA_ERROR_RECORD_SECTION_DESCRIPTOR_FLAGS']],
'SectionType' : [ 0x10, ['_GUID']],
'FRUId' : [ 0x20, ['_GUID']],
'SectionSeverity' : [ 0x30, ['Enumeration', dict(target = 'long', choices = {0: 'WheaErrSevRecoverable', 1: 'WheaErrSevFatal', 2: 'WheaErrSevCorrected', 3: 'WheaErrSevInformational'})]],
'FRUText' : [ 0x34, ['array', 20, ['unsigned char']]],
} ],
'_GUID' : [ 0x10, {
'Data1' : [ 0x0, ['unsigned long']],
'Data2' : [ 0x4, ['unsigned short']],
'Data3' : [ 0x6, ['unsigned short']],
'Data4' : [ 0x8, ['array', 8, ['unsigned char']]],
} ],
'_FSRTL_ADVANCED_FCB_HEADER' : [ 0x58, {
'NodeTypeCode' : [ 0x0, ['short']],
'NodeByteSize' : [ 0x2, ['short']],
'Flags' : [ 0x4, ['unsigned char']],
'IsFastIoPossible' : [ 0x5, ['unsigned char']],
'Flags2' : [ 0x6, ['unsigned char']],
'Reserved' : [ 0x7, ['BitField', dict(start_bit = 0, end_bit = 4, native_type='unsigned char')]],
'Version' : [ 0x7, ['BitField', dict(start_bit = 4, end_bit = 8, native_type='unsigned char')]],
'Resource' : [ 0x8, ['pointer64', ['_ERESOURCE']]],
'PagingIoResource' : [ 0x10, ['pointer64', ['_ERESOURCE']]],
'AllocationSize' : [ 0x18, ['_LARGE_INTEGER']],
'FileSize' : [ 0x20, ['_LARGE_INTEGER']],
'ValidDataLength' : [ 0x28, ['_LARGE_INTEGER']],
'FastMutex' : [ 0x30, ['pointer64', ['_FAST_MUTEX']]],
'FilterContexts' : [ 0x38, ['_LIST_ENTRY']],
'PushLock' : [ 0x48, ['_EX_PUSH_LOCK']],
'FileContextSupportPointer' : [ 0x50, ['pointer64', ['pointer64', ['void']]]],
} ],
'_iobuf' : [ 0x30, {
'_ptr' : [ 0x0, ['pointer64', ['unsigned char']]],
'_cnt' : [ 0x8, ['long']],
'_base' : [ 0x10, ['pointer64', ['unsigned char']]],
'_flag' : [ 0x18, ['long']],
'_file' : [ 0x1c, ['long']],
'_charbuf' : [ 0x20, ['long']],
'_bufsiz' : [ 0x24, ['long']],
'_tmpfname' : [ 0x28, ['pointer64', ['unsigned char']]],
} ],
'__unnamed_14ef' : [ 0x8, {
'Long' : [ 0x0, ['unsigned long long']],
'VolatileLong' : [ 0x0, ['unsigned long long']],
'Hard' : [ 0x0, ['_MMPTE_HARDWARE']],
'Flush' : [ 0x0, ['_HARDWARE_PTE']],
'Proto' : [ 0x0, ['_MMPTE_PROTOTYPE']],
'Soft' : [ 0x0, ['_MMPTE_SOFTWARE']],
'TimeStamp' : [ 0x0, ['_MMPTE_TIMESTAMP']],
'Trans' : [ 0x0, ['_MMPTE_TRANSITION']],
'Subsect' : [ 0x0, ['_MMPTE_SUBSECTION']],
'List' : [ 0x0, ['_MMPTE_LIST']],
} ],
'_MMPTE' : [ 0x8, {
'u' : [ 0x0, ['__unnamed_14ef']],
} ],
'__unnamed_1500' : [ 0x10, {
'I386' : [ 0x0, ['_I386_LOADER_BLOCK']],
'Ia64' : [ 0x0, ['_IA64_LOADER_BLOCK']],
} ],
'_LOADER_PARAMETER_BLOCK' : [ 0xf0, {
'OsMajorVersion' : [ 0x0, ['unsigned long']],
'OsMinorVersion' : [ 0x4, ['unsigned long']],
'Size' : [ 0x8, ['unsigned long']],
'Reserved' : [ 0xc, ['unsigned long']],
'LoadOrderListHead' : [ 0x10, ['_LIST_ENTRY']],
'MemoryDescriptorListHead' : [ 0x20, ['_LIST_ENTRY']],
'BootDriverListHead' : [ 0x30, ['_LIST_ENTRY']],
'KernelStack' : [ 0x40, ['unsigned long long']],
'Prcb' : [ 0x48, ['unsigned long long']],
'Process' : [ 0x50, ['unsigned long long']],
'Thread' : [ 0x58, ['unsigned long long']],
'RegistryLength' : [ 0x60, ['unsigned long']],
'RegistryBase' : [ 0x68, ['pointer64', ['void']]],
'ConfigurationRoot' : [ 0x70, ['pointer64', ['_CONFIGURATION_COMPONENT_DATA']]],
'ArcBootDeviceName' : [ 0x78, ['pointer64', ['unsigned char']]],
'ArcHalDeviceName' : [ 0x80, ['pointer64', ['unsigned char']]],
'NtBootPathName' : [ 0x88, ['pointer64', ['unsigned char']]],
'NtHalPathName' : [ 0x90, ['pointer64', ['unsigned char']]],
'LoadOptions' : [ 0x98, ['pointer64', ['unsigned char']]],
'NlsData' : [ 0xa0, ['pointer64', ['_NLS_DATA_BLOCK']]],
'ArcDiskInformation' : [ 0xa8, ['pointer64', ['_ARC_DISK_INFORMATION']]],
'OemFontFile' : [ 0xb0, ['pointer64', ['void']]],
'Extension' : [ 0xb8, ['pointer64', ['_LOADER_PARAMETER_EXTENSION']]],
'u' : [ 0xc0, ['__unnamed_1500']],
'FirmwareInformation' : [ 0xd0, ['_FIRMWARE_INFORMATION_LOADER_BLOCK']],
} ],
'_KLOCK_QUEUE_HANDLE' : [ 0x18, {
'LockQueue' : [ 0x0, ['_KSPIN_LOCK_QUEUE']],
'OldIrql' : [ 0x10, ['unsigned char']],
} ],
'_MMPFNLIST' : [ 0x28, {
'Total' : [ 0x0, ['unsigned long long']],
'ListName' : [ 0x8, ['Enumeration', dict(target = 'long', choices = {0: 'ZeroedPageList', 1: 'FreePageList', 2: 'StandbyPageList', 3: 'ModifiedPageList', 4: 'ModifiedNoWritePageList', 5: 'BadPageList', 6: 'ActiveAndValid', 7: 'TransitionPage'})]],
'Flink' : [ 0x10, ['unsigned long long']],
'Blink' : [ 0x18, ['unsigned long long']],
'Lock' : [ 0x20, ['unsigned long long']],
} ],
'__unnamed_152f' : [ 0x8, {
'Flink' : [ 0x0, ['unsigned long long']],
'WsIndex' : [ 0x0, ['unsigned long']],
'Event' : [ 0x0, ['pointer64', ['_KEVENT']]],
'Next' : [ 0x0, ['pointer64', ['void']]],
'VolatileNext' : [ 0x0, ['pointer64', ['void']]],
'KernelStackOwner' : [ 0x0, ['pointer64', ['_KTHREAD']]],
'NextStackPfn' : [ 0x0, ['_SINGLE_LIST_ENTRY']],
} ],
'__unnamed_1531' : [ 0x8, {
'Blink' : [ 0x0, ['unsigned long long']],
'ImageProtoPte' : [ 0x0, ['pointer64', ['_MMPTE']]],
'ShareCount' : [ 0x0, ['unsigned long long']],
} ],
'__unnamed_1534' : [ 0x4, {
'ReferenceCount' : [ 0x0, ['unsigned short']],
'VolatileReferenceCount' : [ 0x0, ['short']],
'ShortFlags' : [ 0x2, ['unsigned short']],
} ],
'__unnamed_1536' : [ 0x4, {
'ReferenceCount' : [ 0x0, ['unsigned short']],
'e1' : [ 0x2, ['_MMPFNENTRY']],
'e2' : [ 0x0, ['__unnamed_1534']],
} ],
'__unnamed_153e' : [ 0x8, {
'PteFrame' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 52, native_type='unsigned long long')]],
'Unused' : [ 0x0, ['BitField', dict(start_bit = 52, end_bit = 55, native_type='unsigned long long')]],
'PfnImageVerified' : [ 0x0, ['BitField', dict(start_bit = 55, end_bit = 56, native_type='unsigned long long')]],
'AweAllocation' : [ 0x0, ['BitField', dict(start_bit = 56, end_bit = 57, native_type='unsigned long long')]],
'PrototypePte' : [ 0x0, ['BitField', dict(start_bit = 57, end_bit = 58, native_type='unsigned long long')]],
'PageColor' : [ 0x0, ['BitField', dict(start_bit = 58, end_bit = 64, native_type='unsigned long long')]],
} ],
'_MMPFN' : [ 0x30, {
'u1' : [ 0x0, ['__unnamed_152f']],
'u2' : [ 0x8, ['__unnamed_1531']],
'PteAddress' : [ 0x10, ['pointer64', ['_MMPTE']]],
'VolatilePteAddress' : [ 0x10, ['pointer64', ['void']]],
'Lock' : [ 0x10, ['long']],
'PteLong' : [ 0x10, ['unsigned long long']],
'u3' : [ 0x18, ['__unnamed_1536']],
'UsedPageTableEntries' : [ 0x1c, ['unsigned short']],
'VaType' : [ 0x1e, ['unsigned char']],
'ViewCount' : [ 0x1f, ['unsigned char']],
'OriginalPte' : [ 0x20, ['_MMPTE']],
'AweReferenceCount' : [ 0x20, ['long']],
'u4' : [ 0x28, ['__unnamed_153e']],
} ],
'_MI_COLOR_BASE' : [ 0x10, {
'ColorPointer' : [ 0x0, ['pointer64', ['unsigned short']]],
'ColorMask' : [ 0x8, ['unsigned short']],
'ColorNode' : [ 0xa, ['unsigned short']],
} ],
'_MMSUPPORT' : [ 0x88, {
'WorkingSetMutex' : [ 0x0, ['_EX_PUSH_LOCK']],
'ExitGate' : [ 0x8, ['pointer64', ['_KGATE']]],
'AccessLog' : [ 0x10, ['pointer64', ['void']]],
'WorkingSetExpansionLinks' : [ 0x18, ['_LIST_ENTRY']],
'AgeDistribution' : [ 0x28, ['array', 7, ['unsigned long']]],
'MinimumWorkingSetSize' : [ 0x44, ['unsigned long']],
'WorkingSetSize' : [ 0x48, ['unsigned long']],
'WorkingSetPrivateSize' : [ 0x4c, ['unsigned long']],
'MaximumWorkingSetSize' : [ 0x50, ['unsigned long']],
'ChargedWslePages' : [ 0x54, ['unsigned long']],
'ActualWslePages' : [ 0x58, ['unsigned long']],
'WorkingSetSizeOverhead' : [ 0x5c, ['unsigned long']],
'PeakWorkingSetSize' : [ 0x60, ['unsigned long']],
'HardFaultCount' : [ 0x64, ['unsigned long']],
'VmWorkingSetList' : [ 0x68, ['pointer64', ['_MMWSL']]],
'NextPageColor' : [ 0x70, ['unsigned short']],
'LastTrimStamp' : [ 0x72, ['unsigned short']],
'PageFaultCount' : [ 0x74, ['unsigned long']],
'RepurposeCount' : [ 0x78, ['unsigned long']],
'Spare' : [ 0x7c, ['array', 2, ['unsigned long']]],
'Flags' : [ 0x84, ['_MMSUPPORT_FLAGS']],
} ],
'_MMWSL' : [ 0x488, {
'FirstFree' : [ 0x0, ['unsigned long']],
'FirstDynamic' : [ 0x4, ['unsigned long']],
'LastEntry' : [ 0x8, ['unsigned long']],
'NextSlot' : [ 0xc, ['unsigned long']],
'Wsle' : [ 0x10, ['pointer64', ['_MMWSLE']]],
'LowestPagableAddress' : [ 0x18, ['pointer64', ['void']]],
'LastInitializedWsle' : [ 0x20, ['unsigned long']],
'NextAgingSlot' : [ 0x24, ['unsigned long']],
'NumberOfCommittedPageTables' : [ 0x28, ['unsigned long']],
'VadBitMapHint' : [ 0x2c, ['unsigned long']],
'NonDirectCount' : [ 0x30, ['unsigned long']],
'LastVadBit' : [ 0x34, ['unsigned long']],
'MaximumLastVadBit' : [ 0x38, ['unsigned long']],
'LastAllocationSizeHint' : [ 0x3c, ['unsigned long']],
'LastAllocationSize' : [ 0x40, ['unsigned long']],
'NonDirectHash' : [ 0x48, ['pointer64', ['_MMWSLE_NONDIRECT_HASH']]],
'HashTableStart' : [ 0x50, ['pointer64', ['_MMWSLE_HASH']]],
'HighestPermittedHashAddress' : [ 0x58, ['pointer64', ['_MMWSLE_HASH']]],
'MaximumUserPageTablePages' : [ 0x60, ['unsigned long']],
'MaximumUserPageDirectoryPages' : [ 0x64, ['unsigned long']],
'CommittedPageTables' : [ 0x68, ['pointer64', ['unsigned long']]],
'NumberOfCommittedPageDirectories' : [ 0x70, ['unsigned long']],
'CommittedPageDirectories' : [ 0x78, ['array', 128, ['unsigned long long']]],
'NumberOfCommittedPageDirectoryParents' : [ 0x478, ['unsigned long']],
'CommittedPageDirectoryParents' : [ 0x480, ['array', 1, ['unsigned long long']]],
} ],
'__unnamed_156c' : [ 0x8, {
'VirtualAddress' : [ 0x0, ['pointer64', ['void']]],
'Long' : [ 0x0, ['unsigned long long']],
'e1' : [ 0x0, ['_MMWSLENTRY']],
'e2' : [ 0x0, ['_MMWSLE_FREE_ENTRY']],
} ],
'_MMWSLE' : [ 0x8, {
'u1' : [ 0x0, ['__unnamed_156c']],
} ],
'__unnamed_157b' : [ 0x4, {
'LongFlags' : [ 0x0, ['unsigned long']],
'Flags' : [ 0x0, ['_MMSECTION_FLAGS']],
} ],
'__unnamed_1585' : [ 0x10, {
'NumberOfSystemCacheViews' : [ 0x0, ['unsigned long']],
'ImageRelocationStartBit' : [ 0x0, ['unsigned long']],
'WritableUserReferences' : [ 0x4, ['long']],
'ImageRelocationSizeIn64k' : [ 0x4, ['BitField', dict(start_bit = 0, end_bit = 16, native_type='unsigned long')]],
'Unused' : [ 0x4, ['BitField', dict(start_bit = 16, end_bit = 30, native_type='unsigned long')]],
'BitMap64' : [ 0x4, ['BitField', dict(start_bit = 30, end_bit = 31, native_type='unsigned long')]],
'ImageActive' : [ 0x4, ['BitField', dict(start_bit = 31, end_bit = 32, native_type='unsigned long')]],
'SubsectionRoot' : [ 0x8, ['pointer64', ['_MM_SUBSECTION_AVL_TABLE']]],
'SeImageStub' : [ 0x8, ['pointer64', ['_MI_IMAGE_SECURITY_REFERENCE']]],
} ],
'__unnamed_1587' : [ 0x10, {
'e2' : [ 0x0, ['__unnamed_1585']],
} ],
'_CONTROL_AREA' : [ 0x80, {
'Segment' : [ 0x0, ['pointer64', ['_SEGMENT']]],
'DereferenceList' : [ 0x8, ['_LIST_ENTRY']],
'NumberOfSectionReferences' : [ 0x18, ['unsigned long long']],
'NumberOfPfnReferences' : [ 0x20, ['unsigned long long']],
'NumberOfMappedViews' : [ 0x28, ['unsigned long long']],
'NumberOfUserReferences' : [ 0x30, ['unsigned long long']],
'u' : [ 0x38, ['__unnamed_157b']],
'FlushInProgressCount' : [ 0x3c, ['unsigned long']],
'FilePointer' : [ 0x40, ['_EX_FAST_REF']],
'ControlAreaLock' : [ 0x48, ['long']],
'ModifiedWriteCount' : [ 0x4c, ['unsigned long']],
'StartingFrame' : [ 0x4c, ['unsigned long']],
'WaitingForDeletion' : [ 0x50, ['pointer64', ['_MI_SECTION_CREATION_GATE']]],
'u2' : [ 0x58, ['__unnamed_1587']],
'LockedPages' : [ 0x68, ['long long']],
'ViewList' : [ 0x70, ['_LIST_ENTRY']],
} ],
'_MM_STORE_KEY' : [ 0x8, {
'KeyLow' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 60, native_type='unsigned long long')]],
'KeyHigh' : [ 0x0, ['BitField', dict(start_bit = 60, end_bit = 64, native_type='unsigned long long')]],
'EntireKey' : [ 0x0, ['unsigned long long']],
} ],
'_MMPAGING_FILE' : [ 0x90, {
'Size' : [ 0x0, ['unsigned long long']],
'MaximumSize' : [ 0x8, ['unsigned long long']],
'MinimumSize' : [ 0x10, ['unsigned long long']],
'FreeSpace' : [ 0x18, ['unsigned long long']],
'PeakUsage' : [ 0x20, ['unsigned long long']],
'HighestPage' : [ 0x28, ['unsigned long long']],
'File' : [ 0x30, ['pointer64', ['_FILE_OBJECT']]],
'Entry' : [ 0x38, ['array', 2, ['pointer64', ['_MMMOD_WRITER_MDL_ENTRY']]]],
'PageFileName' : [ 0x48, ['_UNICODE_STRING']],
'Bitmap' : [ 0x58, ['pointer64', ['_RTL_BITMAP']]],
'EvictStoreBitmap' : [ 0x60, ['pointer64', ['_RTL_BITMAP']]],
'BitmapHint' : [ 0x68, ['unsigned long']],
'LastAllocationSize' : [ 0x6c, ['unsigned long']],
'ToBeEvictedCount' : [ 0x70, ['unsigned long']],
'PageFileNumber' : [ 0x74, ['BitField', dict(start_bit = 0, end_bit = 4, native_type='unsigned short')]],
'BootPartition' : [ 0x74, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned short')]],
'Spare0' : [ 0x74, ['BitField', dict(start_bit = 5, end_bit = 16, native_type='unsigned short')]],
'AdriftMdls' : [ 0x76, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned short')]],
'Spare1' : [ 0x76, ['BitField', dict(start_bit = 1, end_bit = 16, native_type='unsigned short')]],
'FileHandle' : [ 0x78, ['pointer64', ['void']]],
'Lock' : [ 0x80, ['unsigned long long']],
'LockOwner' : [ 0x88, ['pointer64', ['_ETHREAD']]],
} ],
'_MM_AVL_TABLE' : [ 0x40, {
'BalancedRoot' : [ 0x0, ['_MMADDRESS_NODE']],
'DepthOfTree' : [ 0x28, ['BitField', dict(start_bit = 0, end_bit = 5, native_type='unsigned long long')]],
'Unused' : [ 0x28, ['BitField', dict(start_bit = 5, end_bit = 8, native_type='unsigned long long')]],
'NumberGenericTableElements' : [ 0x28, ['BitField', dict(start_bit = 8, end_bit = 64, native_type='unsigned long long')]],
'NodeHint' : [ 0x30, ['pointer64', ['void']]],
'NodeFreeHint' : [ 0x38, ['pointer64', ['void']]],
} ],
'__unnamed_15bf' : [ 0x8, {
'Balance' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 2, native_type='long long')]],
'Parent' : [ 0x0, ['pointer64', ['_MMVAD']]],
} ],
'__unnamed_15c2' : [ 0x8, {
'LongFlags' : [ 0x0, ['unsigned long long']],
'VadFlags' : [ 0x0, ['_MMVAD_FLAGS']],
} ],
'__unnamed_15c5' : [ 0x8, {
'LongFlags3' : [ 0x0, ['unsigned long long']],
'VadFlags3' : [ 0x0, ['_MMVAD_FLAGS3']],
} ],
'_MMVAD_SHORT' : [ 0x40, {
'u1' : [ 0x0, ['__unnamed_15bf']],
'LeftChild' : [ 0x8, ['pointer64', ['_MMVAD']]],
'RightChild' : [ 0x10, ['pointer64', ['_MMVAD']]],
'StartingVpn' : [ 0x18, ['unsigned long long']],
'EndingVpn' : [ 0x20, ['unsigned long long']],
'u' : [ 0x28, ['__unnamed_15c2']],
'PushLock' : [ 0x30, ['_EX_PUSH_LOCK']],
'u5' : [ 0x38, ['__unnamed_15c5']],
} ],
'__unnamed_15cd' : [ 0x8, {
'Balance' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 2, native_type='long long')]],
'Parent' : [ 0x0, ['pointer64', ['_MMADDRESS_NODE']]],
} ],
'_MMADDRESS_NODE' : [ 0x28, {
'u1' : [ 0x0, ['__unnamed_15cd']],
'LeftChild' : [ 0x8, ['pointer64', ['_MMADDRESS_NODE']]],
'RightChild' : [ 0x10, ['pointer64', ['_MMADDRESS_NODE']]],
'StartingVpn' : [ 0x18, ['unsigned long long']],
'EndingVpn' : [ 0x20, ['unsigned long long']],
} ],
'__unnamed_15d2' : [ 0x4, {
'LongFlags2' : [ 0x0, ['unsigned long']],
'VadFlags2' : [ 0x0, ['_MMVAD_FLAGS2']],
} ],
'_MMVAD' : [ 0x78, {
'u1' : [ 0x0, ['__unnamed_15bf']],
'LeftChild' : [ 0x8, ['pointer64', ['_MMVAD']]],
'RightChild' : [ 0x10, ['pointer64', ['_MMVAD']]],
'StartingVpn' : [ 0x18, ['unsigned long long']],
'EndingVpn' : [ 0x20, ['unsigned long long']],
'u' : [ 0x28, ['__unnamed_15c2']],
'PushLock' : [ 0x30, ['_EX_PUSH_LOCK']],
'u5' : [ 0x38, ['__unnamed_15c5']],
'u2' : [ 0x40, ['__unnamed_15d2']],
'Subsection' : [ 0x48, ['pointer64', ['_SUBSECTION']]],
'MappedSubsection' : [ 0x48, ['pointer64', ['_MSUBSECTION']]],
'FirstPrototypePte' : [ 0x50, ['pointer64', ['_MMPTE']]],
'LastContiguousPte' : [ 0x58, ['pointer64', ['_MMPTE']]],
'ViewLinks' : [ 0x60, ['_LIST_ENTRY']],
'VadsProcess' : [ 0x70, ['pointer64', ['_EPROCESS']]],
} ],
'__unnamed_15dd' : [ 0x38, {
'Mdl' : [ 0x0, ['_MDL']],
'Page' : [ 0x30, ['array', 1, ['unsigned long long']]],
} ],
'_MI_PAGEFILE_TRACES' : [ 0x68, {
'Status' : [ 0x0, ['long']],
'Priority' : [ 0x4, ['unsigned char']],
'IrpPriority' : [ 0x5, ['unsigned char']],
'CurrentTime' : [ 0x8, ['_LARGE_INTEGER']],
'AvailablePages' : [ 0x10, ['unsigned long long']],
'ModifiedPagesTotal' : [ 0x18, ['unsigned long long']],
'ModifiedPagefilePages' : [ 0x20, ['unsigned long long']],
'ModifiedNoWritePages' : [ 0x28, ['unsigned long long']],
'MdlHack' : [ 0x30, ['__unnamed_15dd']],
} ],
'__unnamed_15e3' : [ 0x10, {
'IoStatus' : [ 0x0, ['_IO_STATUS_BLOCK']],
} ],
'__unnamed_15e5' : [ 0x8, {
'KeepForever' : [ 0x0, ['unsigned long long']],
} ],
'_MMMOD_WRITER_MDL_ENTRY' : [ 0xa0, {
'Links' : [ 0x0, ['_LIST_ENTRY']],
'u' : [ 0x10, ['__unnamed_15e3']],
'Irp' : [ 0x20, ['pointer64', ['_IRP']]],
'u1' : [ 0x28, ['__unnamed_15e5']],
'PagingFile' : [ 0x30, ['pointer64', ['_MMPAGING_FILE']]],
'File' : [ 0x38, ['pointer64', ['_FILE_OBJECT']]],
'ControlArea' : [ 0x40, ['pointer64', ['_CONTROL_AREA']]],
'FileResource' : [ 0x48, ['pointer64', ['_ERESOURCE']]],
'WriteOffset' : [ 0x50, ['_LARGE_INTEGER']],
'IssueTime' : [ 0x58, ['_LARGE_INTEGER']],
'PointerMdl' : [ 0x60, ['pointer64', ['_MDL']]],
'Mdl' : [ 0x68, ['_MDL']],
'Page' : [ 0x98, ['array', 1, ['unsigned long long']]],
} ],
'_MDL' : [ 0x30, {
'Next' : [ 0x0, ['pointer64', ['_MDL']]],
'Size' : [ 0x8, ['short']],
'MdlFlags' : [ 0xa, ['short']],
'Process' : [ 0x10, ['pointer64', ['_EPROCESS']]],
'MappedSystemVa' : [ 0x18, ['pointer64', ['void']]],
'StartVa' : [ 0x20, ['pointer64', ['void']]],
'ByteCount' : [ 0x28, ['unsigned long']],
'ByteOffset' : [ 0x2c, ['unsigned long']],
} ],
'_HHIVE' : [ 0x598, {
'Signature' : [ 0x0, ['unsigned long']],
'GetCellRoutine' : [ 0x8, ['pointer64', ['void']]],
'ReleaseCellRoutine' : [ 0x10, ['pointer64', ['void']]],
'Allocate' : [ 0x18, ['pointer64', ['void']]],
'Free' : [ 0x20, ['pointer64', ['void']]],
'FileSetSize' : [ 0x28, ['pointer64', ['void']]],
'FileWrite' : [ 0x30, ['pointer64', ['void']]],
'FileRead' : [ 0x38, ['pointer64', ['void']]],
'FileFlush' : [ 0x40, ['pointer64', ['void']]],
'HiveLoadFailure' : [ 0x48, ['pointer64', ['void']]],
'BaseBlock' : [ 0x50, ['pointer64', ['_HBASE_BLOCK']]],
'DirtyVector' : [ 0x58, ['_RTL_BITMAP']],
'DirtyCount' : [ 0x68, ['unsigned long']],
'DirtyAlloc' : [ 0x6c, ['unsigned long']],
'BaseBlockAlloc' : [ 0x70, ['unsigned long']],
'Cluster' : [ 0x74, ['unsigned long']],
'Flat' : [ 0x78, ['unsigned char']],
'ReadOnly' : [ 0x79, ['unsigned char']],
'DirtyFlag' : [ 0x7a, ['unsigned char']],
'HvBinHeadersUse' : [ 0x7c, ['unsigned long']],
'HvFreeCellsUse' : [ 0x80, ['unsigned long']],
'HvUsedCellsUse' : [ 0x84, ['unsigned long']],
'CmUsedCellsUse' : [ 0x88, ['unsigned long']],
'HiveFlags' : [ 0x8c, ['unsigned long']],
'CurrentLog' : [ 0x90, ['unsigned long']],
'LogSize' : [ 0x94, ['array', 2, ['unsigned long']]],
'RefreshCount' : [ 0x9c, ['unsigned long']],
'StorageTypeCount' : [ 0xa0, ['unsigned long']],
'Version' : [ 0xa4, ['unsigned long']],
'Storage' : [ 0xa8, ['array', 2, ['_DUAL']]],
} ],
'_CM_VIEW_OF_FILE' : [ 0x58, {
'MappedViewLinks' : [ 0x0, ['_LIST_ENTRY']],
'PinnedViewLinks' : [ 0x10, ['_LIST_ENTRY']],
'FlushedViewLinks' : [ 0x20, ['_LIST_ENTRY']],
'CmHive' : [ 0x30, ['pointer64', ['_CMHIVE']]],
'Bcb' : [ 0x38, ['pointer64', ['void']]],
'ViewAddress' : [ 0x40, ['pointer64', ['void']]],
'FileOffset' : [ 0x48, ['unsigned long']],
'Size' : [ 0x4c, ['unsigned long']],
'UseCount' : [ 0x50, ['unsigned long']],
} ],
'_CMHIVE' : [ 0xbe8, {
'Hive' : [ 0x0, ['_HHIVE']],
'FileHandles' : [ 0x598, ['array', 6, ['pointer64', ['void']]]],
'NotifyList' : [ 0x5c8, ['_LIST_ENTRY']],
'HiveList' : [ 0x5d8, ['_LIST_ENTRY']],
'PreloadedHiveList' : [ 0x5e8, ['_LIST_ENTRY']],
'HiveRundown' : [ 0x5f8, ['_EX_RUNDOWN_REF']],
'ParseCacheEntries' : [ 0x600, ['_LIST_ENTRY']],
'KcbCacheTable' : [ 0x610, ['pointer64', ['_CM_KEY_HASH_TABLE_ENTRY']]],
'KcbCacheTableSize' : [ 0x618, ['unsigned long']],
'Identity' : [ 0x61c, ['unsigned long']],
'HiveLock' : [ 0x620, ['pointer64', ['_FAST_MUTEX']]],
'ViewLock' : [ 0x628, ['_EX_PUSH_LOCK']],
'ViewLockOwner' : [ 0x630, ['pointer64', ['_KTHREAD']]],
'ViewLockLast' : [ 0x638, ['unsigned long']],
'ViewUnLockLast' : [ 0x63c, ['unsigned long']],
'WriterLock' : [ 0x640, ['pointer64', ['_FAST_MUTEX']]],
'FlusherLock' : [ 0x648, ['pointer64', ['_ERESOURCE']]],
'FlushDirtyVector' : [ 0x650, ['_RTL_BITMAP']],
'FlushOffsetArray' : [ 0x660, ['pointer64', ['CMP_OFFSET_ARRAY']]],
'FlushOffsetArrayCount' : [ 0x668, ['unsigned long']],
'FlushHiveTruncated' : [ 0x66c, ['unsigned long']],
'FlushLock2' : [ 0x670, ['pointer64', ['_FAST_MUTEX']]],
'SecurityLock' : [ 0x678, ['_EX_PUSH_LOCK']],
'MappedViewList' : [ 0x680, ['_LIST_ENTRY']],
'PinnedViewList' : [ 0x690, ['_LIST_ENTRY']],
'FlushedViewList' : [ 0x6a0, ['_LIST_ENTRY']],
'MappedViewCount' : [ 0x6b0, ['unsigned short']],
'PinnedViewCount' : [ 0x6b2, ['unsigned short']],
'UseCount' : [ 0x6b4, ['unsigned long']],
'ViewsPerHive' : [ 0x6b8, ['unsigned long']],
'FileObject' : [ 0x6c0, ['pointer64', ['_FILE_OBJECT']]],
'LastShrinkHiveSize' : [ 0x6c8, ['unsigned long']],
'ActualFileSize' : [ 0x6d0, ['_LARGE_INTEGER']],
'FileFullPath' : [ 0x6d8, ['_UNICODE_STRING']],
'FileUserName' : [ 0x6e8, ['_UNICODE_STRING']],
'HiveRootPath' : [ 0x6f8, ['_UNICODE_STRING']],
'SecurityCount' : [ 0x708, ['unsigned long']],
'SecurityCacheSize' : [ 0x70c, ['unsigned long']],
'SecurityHitHint' : [ 0x710, ['long']],
'SecurityCache' : [ 0x718, ['pointer64', ['_CM_KEY_SECURITY_CACHE_ENTRY']]],
'SecurityHash' : [ 0x720, ['array', 64, ['_LIST_ENTRY']]],
'UnloadEventCount' : [ 0xb20, ['unsigned long']],
'UnloadEventArray' : [ 0xb28, ['pointer64', ['pointer64', ['_KEVENT']]]],
'RootKcb' : [ 0xb30, ['pointer64', ['_CM_KEY_CONTROL_BLOCK']]],
'Frozen' : [ 0xb38, ['unsigned char']],
'UnloadWorkItem' : [ 0xb40, ['pointer64', ['_CM_WORKITEM']]],
'UnloadWorkItemHolder' : [ 0xb48, ['_CM_WORKITEM']],
'GrowOnlyMode' : [ 0xb70, ['unsigned char']],
'GrowOffset' : [ 0xb74, ['unsigned long']],
'KcbConvertListHead' : [ 0xb78, ['_LIST_ENTRY']],
'KnodeConvertListHead' : [ 0xb88, ['_LIST_ENTRY']],
'CellRemapArray' : [ 0xb98, ['pointer64', ['_CM_CELL_REMAP_BLOCK']]],
'Flags' : [ 0xba0, ['unsigned long']],
'TrustClassEntry' : [ 0xba8, ['_LIST_ENTRY']],
'FlushCount' : [ 0xbb8, ['unsigned long']],
'CmRm' : [ 0xbc0, ['pointer64', ['_CM_RM']]],
'CmRmInitFailPoint' : [ 0xbc8, ['unsigned long']],
'CmRmInitFailStatus' : [ 0xbcc, ['long']],
'CreatorOwner' : [ 0xbd0, ['pointer64', ['_KTHREAD']]],
'RundownThread' : [ 0xbd8, ['pointer64', ['_KTHREAD']]],
'LastWriteTime' : [ 0xbe0, ['_LARGE_INTEGER']],
} ],
'_CM_KEY_CONTROL_BLOCK' : [ 0x128, {
'RefCount' : [ 0x0, ['unsigned long']],
'ExtFlags' : [ 0x4, ['BitField', dict(start_bit = 0, end_bit = 16, native_type='unsigned long')]],
'PrivateAlloc' : [ 0x4, ['BitField', dict(start_bit = 16, end_bit = 17, native_type='unsigned long')]],
'Delete' : [ 0x4, ['BitField', dict(start_bit = 17, end_bit = 18, native_type='unsigned long')]],
'HiveUnloaded' : [ 0x4, ['BitField', dict(start_bit = 18, end_bit = 19, native_type='unsigned long')]],
'Decommissioned' : [ 0x4, ['BitField', dict(start_bit = 19, end_bit = 20, native_type='unsigned long')]],
'LockTablePresent' : [ 0x4, ['BitField', dict(start_bit = 20, end_bit = 21, native_type='unsigned long')]],
'TotalLevels' : [ 0x4, ['BitField', dict(start_bit = 21, end_bit = 31, native_type='unsigned long')]],
'DelayedDeref' : [ 0x8, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'DelayedClose' : [ 0x8, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'Parking' : [ 0x8, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long')]],
'KeyHash' : [ 0x10, ['_CM_KEY_HASH']],
'ConvKey' : [ 0x10, ['unsigned long']],
'NextHash' : [ 0x18, ['pointer64', ['_CM_KEY_HASH']]],
'KeyHive' : [ 0x20, ['pointer64', ['_HHIVE']]],
'KeyCell' : [ 0x28, ['unsigned long']],
'KcbPushlock' : [ 0x30, ['_EX_PUSH_LOCK']],
'Owner' : [ 0x38, ['pointer64', ['_KTHREAD']]],
'SharedCount' : [ 0x38, ['long']],
'SlotHint' : [ 0x40, ['unsigned long']],
'ParentKcb' : [ 0x48, ['pointer64', ['_CM_KEY_CONTROL_BLOCK']]],
'NameBlock' : [ 0x50, ['pointer64', ['_CM_NAME_CONTROL_BLOCK']]],
'CachedSecurity' : [ 0x58, ['pointer64', ['_CM_KEY_SECURITY_CACHE']]],
'ValueCache' : [ 0x60, ['_CACHED_CHILD_LIST']],
'IndexHint' : [ 0x70, ['pointer64', ['_CM_INDEX_HINT_BLOCK']]],
'HashKey' : [ 0x70, ['unsigned long']],
'SubKeyCount' : [ 0x70, ['unsigned long']],
'KeyBodyListHead' : [ 0x78, ['_LIST_ENTRY']],
'FreeListEntry' : [ 0x78, ['_LIST_ENTRY']],
'KeyBodyArray' : [ 0x88, ['array', 4, ['pointer64', ['_CM_KEY_BODY']]]],
'KcbLastWriteTime' : [ 0xa8, ['_LARGE_INTEGER']],
'KcbMaxNameLen' : [ 0xb0, ['unsigned short']],
'KcbMaxValueNameLen' : [ 0xb2, ['unsigned short']],
'KcbMaxValueDataLen' : [ 0xb4, ['unsigned long']],
'KcbUserFlags' : [ 0xb8, ['BitField', dict(start_bit = 0, end_bit = 4, native_type='unsigned long')]],
'KcbVirtControlFlags' : [ 0xb8, ['BitField', dict(start_bit = 4, end_bit = 8, native_type='unsigned long')]],
'KcbDebug' : [ 0xb8, ['BitField', dict(start_bit = 8, end_bit = 16, native_type='unsigned long')]],
'Flags' : [ 0xb8, ['BitField', dict(start_bit = 16, end_bit = 32, native_type='unsigned long')]],
'RealKeyName' : [ 0xc0, ['pointer64', ['unsigned char']]],
'KCBUoWListHead' : [ 0xc8, ['_LIST_ENTRY']],
'DelayQueueEntry' : [ 0xd8, ['_LIST_ENTRY']],
'Stolen' : [ 0xd8, ['pointer64', ['unsigned char']]],
'TransKCBOwner' : [ 0xe8, ['pointer64', ['_CM_TRANS']]],
'KCBLock' : [ 0xf0, ['_CM_INTENT_LOCK']],
'KeyLock' : [ 0x100, ['_CM_INTENT_LOCK']],
'TransValueCache' : [ 0x110, ['_CHILD_LIST']],
'TransValueListOwner' : [ 0x118, ['pointer64', ['_CM_TRANS']]],
'FullKCBName' : [ 0x120, ['pointer64', ['_UNICODE_STRING']]],
} ],
'_CM_KEY_HASH_TABLE_ENTRY' : [ 0x18, {
'Lock' : [ 0x0, ['_EX_PUSH_LOCK']],
'Owner' : [ 0x8, ['pointer64', ['_KTHREAD']]],
'Entry' : [ 0x10, ['pointer64', ['_CM_KEY_HASH']]],
} ],
'__unnamed_1669' : [ 0xc, {
'Failure' : [ 0x0, ['Enumeration', dict(target = 'long', choices = {0: '_None', 1: '_CmInitializeHive', 2: '_HvInitializeHive', 3: '_HvpBuildMap', 4: '_HvpBuildMapAndCopy', 5: '_HvpInitMap', 6: '_HvLoadHive', 7: '_HvpReadFileImageAndBuildMap', 8: '_HvpRecoverData', 9: '_HvpRecoverWholeHive', 10: '_HvpMapFileImageAndBuildMap', 11: '_CmpValidateHiveSecurityDescriptors', 12: '_HvpEnlistBinInMap', 13: '_CmCheckRegistry', 14: '_CmRegistryIO', 15: '_CmCheckRegistry2', 16: '_CmpCheckKey', 17: '_CmpCheckValueList', 18: '_HvCheckHive', 19: '_HvCheckBin'})]],
'Status' : [ 0x4, ['long']],
'Point' : [ 0x8, ['unsigned long']],
} ],
'__unnamed_166c' : [ 0x18, {
'Action' : [ 0x0, ['unsigned long']],
'Handle' : [ 0x8, ['pointer64', ['void']]],
'Status' : [ 0x10, ['long']],
} ],
'__unnamed_166e' : [ 0x8, {
'CheckStack' : [ 0x0, ['pointer64', ['void']]],
} ],
'__unnamed_1670' : [ 0x20, {
'Cell' : [ 0x0, ['unsigned long']],
'CellPoint' : [ 0x8, ['pointer64', ['_CELL_DATA']]],
'RootPoint' : [ 0x10, ['pointer64', ['void']]],
'Index' : [ 0x18, ['unsigned long']],
} ],
'__unnamed_1672' : [ 0x18, {
'List' : [ 0x0, ['pointer64', ['_CELL_DATA']]],
'Index' : [ 0x8, ['unsigned long']],
'Cell' : [ 0xc, ['unsigned long']],
'CellPoint' : [ 0x10, ['pointer64', ['_CELL_DATA']]],
} ],
'__unnamed_1676' : [ 0x10, {
'Space' : [ 0x0, ['unsigned long']],
'MapPoint' : [ 0x4, ['unsigned long']],
'BinPoint' : [ 0x8, ['pointer64', ['_HBIN']]],
} ],
'__unnamed_167a' : [ 0x10, {
'Bin' : [ 0x0, ['pointer64', ['_HBIN']]],
'CellPoint' : [ 0x8, ['pointer64', ['_HCELL']]],
} ],
'__unnamed_167c' : [ 0x4, {
'FileOffset' : [ 0x0, ['unsigned long']],
} ],
'_HIVE_LOAD_FAILURE' : [ 0x160, {
'Hive' : [ 0x0, ['pointer64', ['_HHIVE']]],
'Index' : [ 0x8, ['unsigned long']],
'RecoverableIndex' : [ 0xc, ['unsigned long']],
'Locations' : [ 0x10, ['array', 8, ['__unnamed_1669']]],
'RecoverableLocations' : [ 0x70, ['array', 8, ['__unnamed_1669']]],
'RegistryIO' : [ 0xd0, ['__unnamed_166c']],
'CheckRegistry2' : [ 0xe8, ['__unnamed_166e']],
'CheckKey' : [ 0xf0, ['__unnamed_1670']],
'CheckValueList' : [ 0x110, ['__unnamed_1672']],
'CheckHive' : [ 0x128, ['__unnamed_1676']],
'CheckHive1' : [ 0x138, ['__unnamed_1676']],
'CheckBin' : [ 0x148, ['__unnamed_167a']],
'RecoverData' : [ 0x158, ['__unnamed_167c']],
} ],
'_PCW_COUNTER_DESCRIPTOR' : [ 0x8, {
'Id' : [ 0x0, ['unsigned short']],
'StructIndex' : [ 0x2, ['unsigned short']],
'Offset' : [ 0x4, ['unsigned short']],
'Size' : [ 0x6, ['unsigned short']],
} ],
'_PCW_REGISTRATION_INFORMATION' : [ 0x30, {
'Version' : [ 0x0, ['unsigned long']],
'Name' : [ 0x8, ['pointer64', ['_UNICODE_STRING']]],
'CounterCount' : [ 0x10, ['unsigned long']],
'Counters' : [ 0x18, ['pointer64', ['_PCW_COUNTER_DESCRIPTOR']]],
'Callback' : [ 0x20, ['pointer64', ['void']]],
'CallbackContext' : [ 0x28, ['pointer64', ['void']]],
} ],
'_PCW_PROCESSOR_INFO' : [ 0x80, {
'IdleTime' : [ 0x0, ['unsigned long long']],
'AvailableTime' : [ 0x8, ['unsigned long long']],
'UserTime' : [ 0x10, ['unsigned long long']],
'KernelTime' : [ 0x18, ['unsigned long long']],
'Interrupts' : [ 0x20, ['unsigned long']],
'DpcTime' : [ 0x28, ['unsigned long long']],
'InterruptTime' : [ 0x30, ['unsigned long long']],
'DpcCount' : [ 0x38, ['unsigned long']],
'DpcRate' : [ 0x3c, ['unsigned long']],
'C1Time' : [ 0x40, ['unsigned long long']],
'C2Time' : [ 0x48, ['unsigned long long']],
'C3Time' : [ 0x50, ['unsigned long long']],
'C1Transitions' : [ 0x58, ['unsigned long long']],
'C2Transitions' : [ 0x60, ['unsigned long long']],
'C3Transitions' : [ 0x68, ['unsigned long long']],
'ParkingStatus' : [ 0x70, ['unsigned long']],
'CurrentFrequency' : [ 0x74, ['unsigned long']],
'PercentMaxFrequency' : [ 0x78, ['unsigned long']],
'StateFlags' : [ 0x7c, ['unsigned long']],
} ],
'_PCW_DATA' : [ 0x10, {
'Data' : [ 0x0, ['pointer64', ['void']]],
'Size' : [ 0x8, ['unsigned long']],
} ],
'_ETW_PERF_COUNTERS' : [ 0x18, {
'TotalActiveSessions' : [ 0x0, ['long']],
'TotalBufferMemoryNonPagedPool' : [ 0x4, ['long']],
'TotalBufferMemoryPagedPool' : [ 0x8, ['long']],
'TotalGuidsEnabled' : [ 0xc, ['long']],
'TotalGuidsNotEnabled' : [ 0x10, ['long']],
'TotalGuidsPreEnabled' : [ 0x14, ['long']],
} ],
'_ETW_SESSION_PERF_COUNTERS' : [ 0x18, {
'BufferMemoryPagedPool' : [ 0x0, ['long']],
'BufferMemoryNonPagedPool' : [ 0x4, ['long']],
'EventsLoggedCount' : [ 0x8, ['unsigned long long']],
'EventsLost' : [ 0x10, ['long']],
'NumConsumers' : [ 0x14, ['long']],
} ],
'_CONTEXT32_UPDATE' : [ 0x4, {
'NumberEntries' : [ 0x0, ['unsigned long']],
} ],
'_KTIMER_TABLE' : [ 0x2200, {
'TimerExpiry' : [ 0x0, ['array', 64, ['pointer64', ['_KTIMER']]]],
'TimerEntries' : [ 0x200, ['array', 256, ['_KTIMER_TABLE_ENTRY']]],
} ],
'_KTIMER_TABLE_ENTRY' : [ 0x20, {
'Lock' : [ 0x0, ['unsigned long long']],
'Entry' : [ 0x8, ['_LIST_ENTRY']],
'Time' : [ 0x18, ['_ULARGE_INTEGER']],
} ],
'_KAFFINITY_EX' : [ 0x28, {
'Count' : [ 0x0, ['unsigned short']],
'Size' : [ 0x2, ['unsigned short']],
'Reserved' : [ 0x4, ['unsigned long']],
'Bitmap' : [ 0x8, ['array', 4, ['unsigned long long']]],
} ],
'_KAFFINITY_ENUMERATION_CONTEXT' : [ 0x18, {
'Affinity' : [ 0x0, ['pointer64', ['_KAFFINITY_EX']]],
'CurrentMask' : [ 0x8, ['unsigned long long']],
'CurrentIndex' : [ 0x10, ['unsigned short']],
} ],
'_GROUP_AFFINITY' : [ 0x10, {
'Mask' : [ 0x0, ['unsigned long long']],
'Group' : [ 0x8, ['unsigned short']],
'Reserved' : [ 0xa, ['array', 3, ['unsigned short']]],
} ],
'_KTRAP_FRAME' : [ 0x190, {
'P1Home' : [ 0x0, ['unsigned long long']],
'P2Home' : [ 0x8, ['unsigned long long']],
'P3Home' : [ 0x10, ['unsigned long long']],
'P4Home' : [ 0x18, ['unsigned long long']],
'P5' : [ 0x20, ['unsigned long long']],
'PreviousMode' : [ 0x28, ['unsigned char']],
'PreviousIrql' : [ 0x29, ['unsigned char']],
'FaultIndicator' : [ 0x2a, ['unsigned char']],
'ExceptionActive' : [ 0x2b, ['unsigned char']],
'MxCsr' : [ 0x2c, ['unsigned long']],
'Rax' : [ 0x30, ['unsigned long long']],
'Rcx' : [ 0x38, ['unsigned long long']],
'Rdx' : [ 0x40, ['unsigned long long']],
'R8' : [ 0x48, ['unsigned long long']],
'R9' : [ 0x50, ['unsigned long long']],
'R10' : [ 0x58, ['unsigned long long']],
'R11' : [ 0x60, ['unsigned long long']],
'GsBase' : [ 0x68, ['unsigned long long']],
'GsSwap' : [ 0x68, ['unsigned long long']],
'Xmm0' : [ 0x70, ['_M128A']],
'Xmm1' : [ 0x80, ['_M128A']],
'Xmm2' : [ 0x90, ['_M128A']],
'Xmm3' : [ 0xa0, ['_M128A']],
'Xmm4' : [ 0xb0, ['_M128A']],
'Xmm5' : [ 0xc0, ['_M128A']],
'FaultAddress' : [ 0xd0, ['unsigned long long']],
'ContextRecord' : [ 0xd0, ['unsigned long long']],
'TimeStampCKCL' : [ 0xd0, ['unsigned long long']],
'Dr0' : [ 0xd8, ['unsigned long long']],
'Dr1' : [ 0xe0, ['unsigned long long']],
'Dr2' : [ 0xe8, ['unsigned long long']],
'Dr3' : [ 0xf0, ['unsigned long long']],
'Dr6' : [ 0xf8, ['unsigned long long']],
'Dr7' : [ 0x100, ['unsigned long long']],
'DebugControl' : [ 0x108, ['unsigned long long']],
'LastBranchToRip' : [ 0x110, ['unsigned long long']],
'LastBranchFromRip' : [ 0x118, ['unsigned long long']],
'LastExceptionToRip' : [ 0x120, ['unsigned long long']],
'LastExceptionFromRip' : [ 0x128, ['unsigned long long']],
'LastBranchControl' : [ 0x108, ['unsigned long long']],
'LastBranchMSR' : [ 0x110, ['unsigned long']],
'SegDs' : [ 0x130, ['unsigned short']],
'SegEs' : [ 0x132, ['unsigned short']],
'SegFs' : [ 0x134, ['unsigned short']],
'SegGs' : [ 0x136, ['unsigned short']],
'TrapFrame' : [ 0x138, ['unsigned long long']],
'Rbx' : [ 0x140, ['unsigned long long']],
'Rdi' : [ 0x148, ['unsigned long long']],
'Rsi' : [ 0x150, ['unsigned long long']],
'Rbp' : [ 0x158, ['unsigned long long']],
'ErrorCode' : [ 0x160, ['unsigned long long']],
'ExceptionFrame' : [ 0x160, ['unsigned long long']],
'TimeStampKlog' : [ 0x160, ['unsigned long long']],
'Rip' : [ 0x168, ['unsigned long long']],
'SegCs' : [ 0x170, ['unsigned short']],
'Fill0' : [ 0x172, ['unsigned char']],
'Logging' : [ 0x173, ['unsigned char']],
'Fill1' : [ 0x174, ['array', 2, ['unsigned short']]],
'EFlags' : [ 0x178, ['unsigned long']],
'Fill2' : [ 0x17c, ['unsigned long']],
'Rsp' : [ 0x180, ['unsigned long long']],
'SegSs' : [ 0x188, ['unsigned short']],
'Fill3' : [ 0x18a, ['unsigned short']],
'CodePatchCycle' : [ 0x18c, ['long']],
} ],
'_XSTATE_SAVE' : [ 0x38, {
'Prev' : [ 0x0, ['pointer64', ['_XSTATE_SAVE']]],
'Thread' : [ 0x8, ['pointer64', ['_KTHREAD']]],
'Level' : [ 0x10, ['unsigned char']],
'XStateContext' : [ 0x18, ['_XSTATE_CONTEXT']],
} ],
'_XSAVE_AREA' : [ 0x240, {
'LegacyState' : [ 0x0, ['_XSAVE_FORMAT']],
'Header' : [ 0x200, ['_XSAVE_AREA_HEADER']],
} ],
'_KEXCEPTION_FRAME' : [ 0x140, {
'P1Home' : [ 0x0, ['unsigned long long']],
'P2Home' : [ 0x8, ['unsigned long long']],
'P3Home' : [ 0x10, ['unsigned long long']],
'P4Home' : [ 0x18, ['unsigned long long']],
'P5' : [ 0x20, ['unsigned long long']],
'InitialStack' : [ 0x28, ['unsigned long long']],
'Xmm6' : [ 0x30, ['_M128A']],
'Xmm7' : [ 0x40, ['_M128A']],
'Xmm8' : [ 0x50, ['_M128A']],
'Xmm9' : [ 0x60, ['_M128A']],
'Xmm10' : [ 0x70, ['_M128A']],
'Xmm11' : [ 0x80, ['_M128A']],
'Xmm12' : [ 0x90, ['_M128A']],
'Xmm13' : [ 0xa0, ['_M128A']],
'Xmm14' : [ 0xb0, ['_M128A']],
'Xmm15' : [ 0xc0, ['_M128A']],
'TrapFrame' : [ 0xd0, ['unsigned long long']],
'CallbackStack' : [ 0xd8, ['unsigned long long']],
'OutputBuffer' : [ 0xe0, ['unsigned long long']],
'OutputLength' : [ 0xe8, ['unsigned long long']],
'MxCsr' : [ 0xf0, ['unsigned long long']],
'Rbp' : [ 0xf8, ['unsigned long long']],
'Rbx' : [ 0x100, ['unsigned long long']],
'Rdi' : [ 0x108, ['unsigned long long']],
'Rsi' : [ 0x110, ['unsigned long long']],
'R12' : [ 0x118, ['unsigned long long']],
'R13' : [ 0x120, ['unsigned long long']],
'R14' : [ 0x128, ['unsigned long long']],
'R15' : [ 0x130, ['unsigned long long']],
'Return' : [ 0x138, ['unsigned long long']],
} ],
'_PNP_DEVICE_COMPLETION_QUEUE' : [ 0x50, {
'DispatchedList' : [ 0x0, ['_LIST_ENTRY']],
'DispatchedCount' : [ 0x10, ['unsigned long']],
'CompletedList' : [ 0x18, ['_LIST_ENTRY']],
'CompletedSemaphore' : [ 0x28, ['_KSEMAPHORE']],
'SpinLock' : [ 0x48, ['unsigned long long']],
} ],
'_KSEMAPHORE' : [ 0x20, {
'Header' : [ 0x0, ['_DISPATCHER_HEADER']],
'Limit' : [ 0x18, ['long']],
} ],
'_DEVOBJ_EXTENSION' : [ 0x70, {
'Type' : [ 0x0, ['short']],
'Size' : [ 0x2, ['unsigned short']],
'DeviceObject' : [ 0x8, ['pointer64', ['_DEVICE_OBJECT']]],
'PowerFlags' : [ 0x10, ['unsigned long']],
'Dope' : [ 0x18, ['pointer64', ['_DEVICE_OBJECT_POWER_EXTENSION']]],
'ExtensionFlags' : [ 0x20, ['unsigned long']],
'DeviceNode' : [ 0x28, ['pointer64', ['void']]],
'AttachedTo' : [ 0x30, ['pointer64', ['_DEVICE_OBJECT']]],
'StartIoCount' : [ 0x38, ['long']],
'StartIoKey' : [ 0x3c, ['long']],
'StartIoFlags' : [ 0x40, ['unsigned long']],
'Vpb' : [ 0x48, ['pointer64', ['_VPB']]],
'DependentList' : [ 0x50, ['_LIST_ENTRY']],
'ProviderList' : [ 0x60, ['_LIST_ENTRY']],
} ],
'__unnamed_1763' : [ 0x8, {
'LegacyDeviceNode' : [ 0x0, ['pointer64', ['_DEVICE_NODE']]],
'PendingDeviceRelations' : [ 0x0, ['pointer64', ['_DEVICE_RELATIONS']]],
'Information' : [ 0x0, ['pointer64', ['void']]],
} ],
'__unnamed_1765' : [ 0x8, {
'NextResourceDeviceNode' : [ 0x0, ['pointer64', ['_DEVICE_NODE']]],
} ],
'__unnamed_1769' : [ 0x20, {
'DockStatus' : [ 0x0, ['Enumeration', dict(target = 'long', choices = {0: 'DOCK_NOTDOCKDEVICE', 1: 'DOCK_QUIESCENT', 2: 'DOCK_ARRIVING', 3: 'DOCK_DEPARTING', 4: 'DOCK_EJECTIRP_COMPLETED'})]],
'ListEntry' : [ 0x8, ['_LIST_ENTRY']],
'SerialNumber' : [ 0x18, ['pointer64', ['unsigned short']]],
} ],
'_DEVICE_NODE' : [ 0x268, {
'Sibling' : [ 0x0, ['pointer64', ['_DEVICE_NODE']]],
'Child' : [ 0x8, ['pointer64', ['_DEVICE_NODE']]],
'Parent' : [ 0x10, ['pointer64', ['_DEVICE_NODE']]],
'LastChild' : [ 0x18, ['pointer64', ['_DEVICE_NODE']]],
'PhysicalDeviceObject' : [ 0x20, ['pointer64', ['_DEVICE_OBJECT']]],
'InstancePath' : [ 0x28, ['_UNICODE_STRING']],
'ServiceName' : [ 0x38, ['_UNICODE_STRING']],
'PendingIrp' : [ 0x48, ['pointer64', ['_IRP']]],
'Level' : [ 0x50, ['unsigned long']],
'Notify' : [ 0x58, ['_PO_DEVICE_NOTIFY']],
'PoIrpManager' : [ 0xc0, ['_PO_IRP_MANAGER']],
'State' : [ 0xe0, ['Enumeration', dict(target = 'long', choices = {768: 'DeviceNodeUnspecified', 769: 'DeviceNodeUninitialized', 770: 'DeviceNodeInitialized', 771: 'DeviceNodeDriversAdded', 772: 'DeviceNodeResourcesAssigned', 773: 'DeviceNodeStartPending', 774: 'DeviceNodeStartCompletion', 775: 'DeviceNodeStartPostWork', 776: 'DeviceNodeStarted', 777: 'DeviceNodeQueryStopped', 778: 'DeviceNodeStopped', 779: 'DeviceNodeRestartCompletion', 780: 'DeviceNodeEnumeratePending', 781: 'DeviceNodeEnumerateCompletion', 782: 'DeviceNodeAwaitingQueuedDeletion', 783: 'DeviceNodeAwaitingQueuedRemoval', 784: 'DeviceNodeQueryRemoved', 785: 'DeviceNodeRemovePendingCloses', 786: 'DeviceNodeRemoved', 787: 'DeviceNodeDeletePendingCloses', 788: 'DeviceNodeDeleted', 789: 'MaxDeviceNodeState'})]],
'PreviousState' : [ 0xe4, ['Enumeration', dict(target = 'long', choices = {768: 'DeviceNodeUnspecified', 769: 'DeviceNodeUninitialized', 770: 'DeviceNodeInitialized', 771: 'DeviceNodeDriversAdded', 772: 'DeviceNodeResourcesAssigned', 773: 'DeviceNodeStartPending', 774: 'DeviceNodeStartCompletion', 775: 'DeviceNodeStartPostWork', 776: 'DeviceNodeStarted', 777: 'DeviceNodeQueryStopped', 778: 'DeviceNodeStopped', 779: 'DeviceNodeRestartCompletion', 780: 'DeviceNodeEnumeratePending', 781: 'DeviceNodeEnumerateCompletion', 782: 'DeviceNodeAwaitingQueuedDeletion', 783: 'DeviceNodeAwaitingQueuedRemoval', 784: 'DeviceNodeQueryRemoved', 785: 'DeviceNodeRemovePendingCloses', 786: 'DeviceNodeRemoved', 787: 'DeviceNodeDeletePendingCloses', 788: 'DeviceNodeDeleted', 789: 'MaxDeviceNodeState'})]],
'StateHistory' : [ 0xe8, ['array', -80, ['Enumeration', dict(target = 'long', choices = {768: 'DeviceNodeUnspecified', 769: 'DeviceNodeUninitialized', 770: 'DeviceNodeInitialized', 771: 'DeviceNodeDriversAdded', 772: 'DeviceNodeResourcesAssigned', 773: 'DeviceNodeStartPending', 774: 'DeviceNodeStartCompletion', 775: 'DeviceNodeStartPostWork', 776: 'DeviceNodeStarted', 777: 'DeviceNodeQueryStopped', 778: 'DeviceNodeStopped', 779: 'DeviceNodeRestartCompletion', 780: 'DeviceNodeEnumeratePending', 781: 'DeviceNodeEnumerateCompletion', 782: 'DeviceNodeAwaitingQueuedDeletion', 783: 'DeviceNodeAwaitingQueuedRemoval', 784: 'DeviceNodeQueryRemoved', 785: 'DeviceNodeRemovePendingCloses', 786: 'DeviceNodeRemoved', 787: 'DeviceNodeDeletePendingCloses', 788: 'DeviceNodeDeleted', 789: 'MaxDeviceNodeState'})]]],
'StateHistoryEntry' : [ 0x138, ['unsigned long']],
'CompletionStatus' : [ 0x13c, ['long']],
'Flags' : [ 0x140, ['unsigned long']],
'UserFlags' : [ 0x144, ['unsigned long']],
'Problem' : [ 0x148, ['unsigned long']],
'ResourceList' : [ 0x150, ['pointer64', ['_CM_RESOURCE_LIST']]],
'ResourceListTranslated' : [ 0x158, ['pointer64', ['_CM_RESOURCE_LIST']]],
'DuplicatePDO' : [ 0x160, ['pointer64', ['_DEVICE_OBJECT']]],
'ResourceRequirements' : [ 0x168, ['pointer64', ['_IO_RESOURCE_REQUIREMENTS_LIST']]],
'InterfaceType' : [ 0x170, ['Enumeration', dict(target = 'long', choices = {0: 'Internal', 1: 'Isa', 2: 'Eisa', 3: 'MicroChannel', 4: 'TurboChannel', 5: 'PCIBus', 6: 'VMEBus', 7: 'NuBus', 8: 'PCMCIABus', 9: 'CBus', 10: 'MPIBus', 11: 'MPSABus', 12: 'ProcessorInternal', 13: 'InternalPowerBus', 14: 'PNPISABus', 15: 'PNPBus', 16: 'Vmcs', 17: 'MaximumInterfaceType', -1: 'InterfaceTypeUndefined'})]],
'BusNumber' : [ 0x174, ['unsigned long']],
'ChildInterfaceType' : [ 0x178, ['Enumeration', dict(target = 'long', choices = {0: 'Internal', 1: 'Isa', 2: 'Eisa', 3: 'MicroChannel', 4: 'TurboChannel', 5: 'PCIBus', 6: 'VMEBus', 7: 'NuBus', 8: 'PCMCIABus', 9: 'CBus', 10: 'MPIBus', 11: 'MPSABus', 12: 'ProcessorInternal', 13: 'InternalPowerBus', 14: 'PNPISABus', 15: 'PNPBus', 16: 'Vmcs', 17: 'MaximumInterfaceType', -1: 'InterfaceTypeUndefined'})]],
'ChildBusNumber' : [ 0x17c, ['unsigned long']],
'ChildBusTypeIndex' : [ 0x180, ['unsigned short']],
'RemovalPolicy' : [ 0x182, ['unsigned char']],
'HardwareRemovalPolicy' : [ 0x183, ['unsigned char']],
'TargetDeviceNotify' : [ 0x188, ['_LIST_ENTRY']],
'DeviceArbiterList' : [ 0x198, ['_LIST_ENTRY']],
'DeviceTranslatorList' : [ 0x1a8, ['_LIST_ENTRY']],
'NoTranslatorMask' : [ 0x1b8, ['unsigned short']],
'QueryTranslatorMask' : [ 0x1ba, ['unsigned short']],
'NoArbiterMask' : [ 0x1bc, ['unsigned short']],
'QueryArbiterMask' : [ 0x1be, ['unsigned short']],
'OverUsed1' : [ 0x1c0, ['__unnamed_1763']],
'OverUsed2' : [ 0x1c8, ['__unnamed_1765']],
'BootResources' : [ 0x1d0, ['pointer64', ['_CM_RESOURCE_LIST']]],
'BootResourcesTranslated' : [ 0x1d8, ['pointer64', ['_CM_RESOURCE_LIST']]],
'CapabilityFlags' : [ 0x1e0, ['unsigned long']],
'DockInfo' : [ 0x1e8, ['__unnamed_1769']],
'DisableableDepends' : [ 0x208, ['unsigned long']],
'PendedSetInterfaceState' : [ 0x210, ['_LIST_ENTRY']],
'LegacyBusListEntry' : [ 0x220, ['_LIST_ENTRY']],
'DriverUnloadRetryCount' : [ 0x230, ['unsigned long']],
'PreviousParent' : [ 0x238, ['pointer64', ['_DEVICE_NODE']]],
'DeletedChildren' : [ 0x240, ['unsigned long']],
'NumaNodeIndex' : [ 0x244, ['unsigned long']],
'ContainerID' : [ 0x248, ['_GUID']],
'OverrideFlags' : [ 0x258, ['unsigned char']],
'RequiresUnloadedDriver' : [ 0x259, ['unsigned char']],
'PendingEjectRelations' : [ 0x260, ['pointer64', ['_PENDING_RELATIONS_LIST_ENTRY']]],
} ],
'_KNODE' : [ 0xc0, {
'PagedPoolSListHead' : [ 0x0, ['_SLIST_HEADER']],
'NonPagedPoolSListHead' : [ 0x10, ['array', 3, ['_SLIST_HEADER']]],
'Affinity' : [ 0x40, ['_GROUP_AFFINITY']],
'ProximityId' : [ 0x50, ['unsigned long']],
'NodeNumber' : [ 0x54, ['unsigned short']],
'PrimaryNodeNumber' : [ 0x56, ['unsigned short']],
'MaximumProcessors' : [ 0x58, ['unsigned char']],
'Color' : [ 0x59, ['unsigned char']],
'Flags' : [ 0x5a, ['_flags']],
'NodePad0' : [ 0x5b, ['unsigned char']],
'Seed' : [ 0x5c, ['unsigned long']],
'MmShiftedColor' : [ 0x60, ['unsigned long']],
'FreeCount' : [ 0x68, ['array', 2, ['unsigned long long']]],
'Right' : [ 0x78, ['unsigned long']],
'Left' : [ 0x7c, ['unsigned long']],
'CachedKernelStacks' : [ 0x80, ['_CACHED_KSTACK_LIST']],
'ParkLock' : [ 0xa0, ['long']],
'NodePad1' : [ 0xa4, ['unsigned long']],
} ],
'_PNP_ASSIGN_RESOURCES_CONTEXT' : [ 0x10, {
'IncludeFailedDevices' : [ 0x0, ['unsigned long']],
'DeviceCount' : [ 0x4, ['unsigned long']],
'DeviceList' : [ 0x8, ['array', 1, ['pointer64', ['_DEVICE_OBJECT']]]],
} ],
'_PNP_RESOURCE_REQUEST' : [ 0x40, {
'PhysicalDevice' : [ 0x0, ['pointer64', ['_DEVICE_OBJECT']]],
'Flags' : [ 0x8, ['unsigned long']],
'AllocationType' : [ 0xc, ['Enumeration', dict(target = 'long', choices = {0: 'ArbiterRequestLegacyReported', 1: 'ArbiterRequestHalReported', 2: 'ArbiterRequestLegacyAssigned', 3: 'ArbiterRequestPnpDetected', 4: 'ArbiterRequestPnpEnumerated', -1: 'ArbiterRequestUndefined'})]],
'Priority' : [ 0x10, ['unsigned long']],
'Position' : [ 0x14, ['unsigned long']],
'ResourceRequirements' : [ 0x18, ['pointer64', ['_IO_RESOURCE_REQUIREMENTS_LIST']]],
'ReqList' : [ 0x20, ['pointer64', ['void']]],
'ResourceAssignment' : [ 0x28, ['pointer64', ['_CM_RESOURCE_LIST']]],
'TranslatedResourceAssignment' : [ 0x30, ['pointer64', ['_CM_RESOURCE_LIST']]],
'Status' : [ 0x38, ['long']],
} ],
'_IO_RESOURCE_REQUIREMENTS_LIST' : [ 0x48, {
'ListSize' : [ 0x0, ['unsigned long']],
'InterfaceType' : [ 0x4, ['Enumeration', dict(target = 'long', choices = {0: 'Internal', 1: 'Isa', 2: 'Eisa', 3: 'MicroChannel', 4: 'TurboChannel', 5: 'PCIBus', 6: 'VMEBus', 7: 'NuBus', 8: 'PCMCIABus', 9: 'CBus', 10: 'MPIBus', 11: 'MPSABus', 12: 'ProcessorInternal', 13: 'InternalPowerBus', 14: 'PNPISABus', 15: 'PNPBus', 16: 'Vmcs', 17: 'MaximumInterfaceType', -1: 'InterfaceTypeUndefined'})]],
'BusNumber' : [ 0x8, ['unsigned long']],
'SlotNumber' : [ 0xc, ['unsigned long']],
'Reserved' : [ 0x10, ['array', 3, ['unsigned long']]],
'AlternativeLists' : [ 0x1c, ['unsigned long']],
'List' : [ 0x20, ['array', 1, ['_IO_RESOURCE_LIST']]],
} ],
'_EXCEPTION_RECORD64' : [ 0x98, {
'ExceptionCode' : [ 0x0, ['long']],
'ExceptionFlags' : [ 0x4, ['unsigned long']],
'ExceptionRecord' : [ 0x8, ['unsigned long long']],
'ExceptionAddress' : [ 0x10, ['unsigned long long']],
'NumberParameters' : [ 0x18, ['unsigned long']],
'__unusedAlignment' : [ 0x1c, ['unsigned long']],
'ExceptionInformation' : [ 0x20, ['array', 15, ['unsigned long long']]],
} ],
'_EXCEPTION_RECORD32' : [ 0x50, {
'ExceptionCode' : [ 0x0, ['long']],
'ExceptionFlags' : [ 0x4, ['unsigned long']],
'ExceptionRecord' : [ 0x8, ['unsigned long']],
'ExceptionAddress' : [ 0xc, ['unsigned long']],
'NumberParameters' : [ 0x10, ['unsigned long']],
'ExceptionInformation' : [ 0x14, ['array', 15, ['unsigned long']]],
} ],
'_DBGKM_EXCEPTION64' : [ 0xa0, {
'ExceptionRecord' : [ 0x0, ['_EXCEPTION_RECORD64']],
'FirstChance' : [ 0x98, ['unsigned long']],
} ],
'_DBGKM_EXCEPTION32' : [ 0x54, {
'ExceptionRecord' : [ 0x0, ['_EXCEPTION_RECORD32']],
'FirstChance' : [ 0x50, ['unsigned long']],
} ],
'_DBGKD_LOAD_SYMBOLS64' : [ 0x28, {
'PathNameLength' : [ 0x0, ['unsigned long']],
'BaseOfDll' : [ 0x8, ['unsigned long long']],
'ProcessId' : [ 0x10, ['unsigned long long']],
'CheckSum' : [ 0x18, ['unsigned long']],
'SizeOfImage' : [ 0x1c, ['unsigned long']],
'UnloadSymbols' : [ 0x20, ['unsigned char']],
} ],
'_DBGKD_LOAD_SYMBOLS32' : [ 0x18, {
'PathNameLength' : [ 0x0, ['unsigned long']],
'BaseOfDll' : [ 0x4, ['unsigned long']],
'ProcessId' : [ 0x8, ['unsigned long']],
'CheckSum' : [ 0xc, ['unsigned long']],
'SizeOfImage' : [ 0x10, ['unsigned long']],
'UnloadSymbols' : [ 0x14, ['unsigned char']],
} ],
'_DBGKD_READ_MEMORY64' : [ 0x10, {
'TargetBaseAddress' : [ 0x0, ['unsigned long long']],
'TransferCount' : [ 0x8, ['unsigned long']],
'ActualBytesRead' : [ 0xc, ['unsigned long']],
} ],
'_DBGKD_READ_MEMORY32' : [ 0xc, {
'TargetBaseAddress' : [ 0x0, ['unsigned long']],
'TransferCount' : [ 0x4, ['unsigned long']],
'ActualBytesRead' : [ 0x8, ['unsigned long']],
} ],
'_DBGKD_WRITE_MEMORY64' : [ 0x10, {
'TargetBaseAddress' : [ 0x0, ['unsigned long long']],
'TransferCount' : [ 0x8, ['unsigned long']],
'ActualBytesWritten' : [ 0xc, ['unsigned long']],
} ],
'_DBGKD_WRITE_MEMORY32' : [ 0xc, {
'TargetBaseAddress' : [ 0x0, ['unsigned long']],
'TransferCount' : [ 0x4, ['unsigned long']],
'ActualBytesWritten' : [ 0x8, ['unsigned long']],
} ],
'_DBGKD_WRITE_BREAKPOINT64' : [ 0x10, {
'BreakPointAddress' : [ 0x0, ['unsigned long long']],
'BreakPointHandle' : [ 0x8, ['unsigned long']],
} ],
'_DBGKD_WRITE_BREAKPOINT32' : [ 0x8, {
'BreakPointAddress' : [ 0x0, ['unsigned long']],
'BreakPointHandle' : [ 0x4, ['unsigned long']],
} ],
'_DBGKD_READ_WRITE_IO64' : [ 0x10, {
'IoAddress' : [ 0x0, ['unsigned long long']],
'DataSize' : [ 0x8, ['unsigned long']],
'DataValue' : [ 0xc, ['unsigned long']],
} ],
'_DBGKD_READ_WRITE_IO32' : [ 0xc, {
'DataSize' : [ 0x0, ['unsigned long']],
'IoAddress' : [ 0x4, ['unsigned long']],
'DataValue' : [ 0x8, ['unsigned long']],
} ],
'_DBGKD_READ_WRITE_IO_EXTENDED64' : [ 0x20, {
'DataSize' : [ 0x0, ['unsigned long']],
'InterfaceType' : [ 0x4, ['unsigned long']],
'BusNumber' : [ 0x8, ['unsigned long']],
'AddressSpace' : [ 0xc, ['unsigned long']],
'IoAddress' : [ 0x10, ['unsigned long long']],
'DataValue' : [ 0x18, ['unsigned long']],
} ],
'_DBGKD_READ_WRITE_IO_EXTENDED32' : [ 0x18, {
'DataSize' : [ 0x0, ['unsigned long']],
'InterfaceType' : [ 0x4, ['unsigned long']],
'BusNumber' : [ 0x8, ['unsigned long']],
'AddressSpace' : [ 0xc, ['unsigned long']],
'IoAddress' : [ 0x10, ['unsigned long']],
'DataValue' : [ 0x14, ['unsigned long']],
} ],
'_DBGKD_SET_SPECIAL_CALL32' : [ 0x4, {
'SpecialCall' : [ 0x0, ['unsigned long']],
} ],
'_DBGKD_SET_SPECIAL_CALL64' : [ 0x8, {
'SpecialCall' : [ 0x0, ['unsigned long long']],
} ],
'_DBGKD_SET_INTERNAL_BREAKPOINT32' : [ 0x8, {
'BreakpointAddress' : [ 0x0, ['unsigned long']],
'Flags' : [ 0x4, ['unsigned long']],
} ],
'_DBGKD_SET_INTERNAL_BREAKPOINT64' : [ 0x10, {
'BreakpointAddress' : [ 0x0, ['unsigned long long']],
'Flags' : [ 0x8, ['unsigned long']],
} ],
'_DBGKD_GET_INTERNAL_BREAKPOINT64' : [ 0x20, {
'BreakpointAddress' : [ 0x0, ['unsigned long long']],
'Flags' : [ 0x8, ['unsigned long']],
'Calls' : [ 0xc, ['unsigned long']],
'MaxCallsPerPeriod' : [ 0x10, ['unsigned long']],
'MinInstructions' : [ 0x14, ['unsigned long']],
'MaxInstructions' : [ 0x18, ['unsigned long']],
'TotalInstructions' : [ 0x1c, ['unsigned long']],
} ],
'_DBGKD_GET_INTERNAL_BREAKPOINT32' : [ 0x1c, {
'BreakpointAddress' : [ 0x0, ['unsigned long']],
'Flags' : [ 0x4, ['unsigned long']],
'Calls' : [ 0x8, ['unsigned long']],
'MaxCallsPerPeriod' : [ 0xc, ['unsigned long']],
'MinInstructions' : [ 0x10, ['unsigned long']],
'MaxInstructions' : [ 0x14, ['unsigned long']],
'TotalInstructions' : [ 0x18, ['unsigned long']],
} ],
'__unnamed_1811' : [ 0x28, {
'ReadMemory' : [ 0x0, ['_DBGKD_READ_MEMORY64']],
'WriteMemory' : [ 0x0, ['_DBGKD_WRITE_MEMORY64']],
'GetContext' : [ 0x0, ['_DBGKD_GET_CONTEXT']],
'SetContext' : [ 0x0, ['_DBGKD_SET_CONTEXT']],
'WriteBreakPoint' : [ 0x0, ['_DBGKD_WRITE_BREAKPOINT64']],
'RestoreBreakPoint' : [ 0x0, ['_DBGKD_RESTORE_BREAKPOINT']],
'Continue' : [ 0x0, ['_DBGKD_CONTINUE']],
'Continue2' : [ 0x0, ['_DBGKD_CONTINUE2']],
'ReadWriteIo' : [ 0x0, ['_DBGKD_READ_WRITE_IO64']],
'ReadWriteIoExtended' : [ 0x0, ['_DBGKD_READ_WRITE_IO_EXTENDED64']],
'QuerySpecialCalls' : [ 0x0, ['_DBGKD_QUERY_SPECIAL_CALLS']],
'SetSpecialCall' : [ 0x0, ['_DBGKD_SET_SPECIAL_CALL64']],
'SetInternalBreakpoint' : [ 0x0, ['_DBGKD_SET_INTERNAL_BREAKPOINT64']],
'GetInternalBreakpoint' : [ 0x0, ['_DBGKD_GET_INTERNAL_BREAKPOINT64']],
'GetVersion64' : [ 0x0, ['_DBGKD_GET_VERSION64']],
'BreakPointEx' : [ 0x0, ['_DBGKD_BREAKPOINTEX']],
'ReadWriteMsr' : [ 0x0, ['_DBGKD_READ_WRITE_MSR']],
'SearchMemory' : [ 0x0, ['_DBGKD_SEARCH_MEMORY']],
'GetSetBusData' : [ 0x0, ['_DBGKD_GET_SET_BUS_DATA']],
'FillMemory' : [ 0x0, ['_DBGKD_FILL_MEMORY']],
'QueryMemory' : [ 0x0, ['_DBGKD_QUERY_MEMORY']],
'SwitchPartition' : [ 0x0, ['_DBGKD_SWITCH_PARTITION']],
} ],
'_DBGKD_MANIPULATE_STATE64' : [ 0x38, {
'ApiNumber' : [ 0x0, ['unsigned long']],
'ProcessorLevel' : [ 0x4, ['unsigned short']],
'Processor' : [ 0x6, ['unsigned short']],
'ReturnStatus' : [ 0x8, ['long']],
'u' : [ 0x10, ['__unnamed_1811']],
} ],
'__unnamed_1818' : [ 0x28, {
'ReadMemory' : [ 0x0, ['_DBGKD_READ_MEMORY32']],
'WriteMemory' : [ 0x0, ['_DBGKD_WRITE_MEMORY32']],
'ReadMemory64' : [ 0x0, ['_DBGKD_READ_MEMORY64']],
'WriteMemory64' : [ 0x0, ['_DBGKD_WRITE_MEMORY64']],
'GetContext' : [ 0x0, ['_DBGKD_GET_CONTEXT']],
'SetContext' : [ 0x0, ['_DBGKD_SET_CONTEXT']],
'WriteBreakPoint' : [ 0x0, ['_DBGKD_WRITE_BREAKPOINT32']],
'RestoreBreakPoint' : [ 0x0, ['_DBGKD_RESTORE_BREAKPOINT']],
'Continue' : [ 0x0, ['_DBGKD_CONTINUE']],
'Continue2' : [ 0x0, ['_DBGKD_CONTINUE2']],
'ReadWriteIo' : [ 0x0, ['_DBGKD_READ_WRITE_IO32']],
'ReadWriteIoExtended' : [ 0x0, ['_DBGKD_READ_WRITE_IO_EXTENDED32']],
'QuerySpecialCalls' : [ 0x0, ['_DBGKD_QUERY_SPECIAL_CALLS']],
'SetSpecialCall' : [ 0x0, ['_DBGKD_SET_SPECIAL_CALL32']],
'SetInternalBreakpoint' : [ 0x0, ['_DBGKD_SET_INTERNAL_BREAKPOINT32']],
'GetInternalBreakpoint' : [ 0x0, ['_DBGKD_GET_INTERNAL_BREAKPOINT32']],
'GetVersion32' : [ 0x0, ['_DBGKD_GET_VERSION32']],
'BreakPointEx' : [ 0x0, ['_DBGKD_BREAKPOINTEX']],
'ReadWriteMsr' : [ 0x0, ['_DBGKD_READ_WRITE_MSR']],
'SearchMemory' : [ 0x0, ['_DBGKD_SEARCH_MEMORY']],
} ],
'_DBGKD_MANIPULATE_STATE32' : [ 0x34, {
'ApiNumber' : [ 0x0, ['unsigned long']],
'ProcessorLevel' : [ 0x4, ['unsigned short']],
'Processor' : [ 0x6, ['unsigned short']],
'ReturnStatus' : [ 0x8, ['long']],
'u' : [ 0xc, ['__unnamed_1818']],
} ],
'_DBGKD_READ_WRITE_MSR' : [ 0xc, {
'Msr' : [ 0x0, ['unsigned long']],
'DataValueLow' : [ 0x4, ['unsigned long']],
'DataValueHigh' : [ 0x8, ['unsigned long']],
} ],
'_DBGKD_BREAKPOINTEX' : [ 0x8, {
'BreakPointCount' : [ 0x0, ['unsigned long']],
'ContinueStatus' : [ 0x4, ['long']],
} ],
'_DBGKD_SEARCH_MEMORY' : [ 0x18, {
'SearchAddress' : [ 0x0, ['unsigned long long']],
'FoundAddress' : [ 0x0, ['unsigned long long']],
'SearchLength' : [ 0x8, ['unsigned long long']],
'PatternLength' : [ 0x10, ['unsigned long']],
} ],
'_DBGKD_RESTORE_BREAKPOINT' : [ 0x4, {
'BreakPointHandle' : [ 0x0, ['unsigned long']],
} ],
'_DBGKD_CONTINUE' : [ 0x4, {
'ContinueStatus' : [ 0x0, ['long']],
} ],
'_DBGKD_CONTINUE2' : [ 0x20, {
'ContinueStatus' : [ 0x0, ['long']],
'ControlSet' : [ 0x4, ['_AMD64_DBGKD_CONTROL_SET']],
'AnyControlSet' : [ 0x4, ['_DBGKD_ANY_CONTROL_SET']],
} ],
'_CPU_INFO' : [ 0x10, {
'Eax' : [ 0x0, ['unsigned long']],
'Ebx' : [ 0x4, ['unsigned long']],
'Ecx' : [ 0x8, ['unsigned long']],
'Edx' : [ 0xc, ['unsigned long']],
} ],
'_KSYSTEM_TIME' : [ 0xc, {
'LowPart' : [ 0x0, ['unsigned long']],
'High1Time' : [ 0x4, ['long']],
'High2Time' : [ 0x8, ['long']],
} ],
'_VOLUME_CACHE_MAP' : [ 0x38, {
'NodeTypeCode' : [ 0x0, ['short']],
'NodeByteCode' : [ 0x2, ['short']],
'UseCount' : [ 0x4, ['unsigned long']],
'DeviceObject' : [ 0x8, ['pointer64', ['_DEVICE_OBJECT']]],
'VolumeCacheMapLinks' : [ 0x10, ['_LIST_ENTRY']],
'Flags' : [ 0x20, ['unsigned long']],
'DirtyPages' : [ 0x28, ['unsigned long long']],
'PagesQueuedToDisk' : [ 0x30, ['unsigned long']],
} ],
'_SHARED_CACHE_MAP' : [ 0x1f8, {
'NodeTypeCode' : [ 0x0, ['short']],
'NodeByteSize' : [ 0x2, ['short']],
'OpenCount' : [ 0x4, ['unsigned long']],
'FileSize' : [ 0x8, ['_LARGE_INTEGER']],
'BcbList' : [ 0x10, ['_LIST_ENTRY']],
'SectionSize' : [ 0x20, ['_LARGE_INTEGER']],
'ValidDataLength' : [ 0x28, ['_LARGE_INTEGER']],
'ValidDataGoal' : [ 0x30, ['_LARGE_INTEGER']],
'InitialVacbs' : [ 0x38, ['array', 4, ['pointer64', ['_VACB']]]],
'Vacbs' : [ 0x58, ['pointer64', ['pointer64', ['_VACB']]]],
'FileObjectFastRef' : [ 0x60, ['_EX_FAST_REF']],
'VacbLock' : [ 0x68, ['_EX_PUSH_LOCK']],
'DirtyPages' : [ 0x70, ['unsigned long']],
'LoggedStreamLinks' : [ 0x78, ['_LIST_ENTRY']],
'SharedCacheMapLinks' : [ 0x88, ['_LIST_ENTRY']],
'Flags' : [ 0x98, ['unsigned long']],
'Status' : [ 0x9c, ['long']],
'Mbcb' : [ 0xa0, ['pointer64', ['_MBCB']]],
'Section' : [ 0xa8, ['pointer64', ['void']]],
'CreateEvent' : [ 0xb0, ['pointer64', ['_KEVENT']]],
'WaitOnActiveCount' : [ 0xb8, ['pointer64', ['_KEVENT']]],
'PagesToWrite' : [ 0xc0, ['unsigned long']],
'BeyondLastFlush' : [ 0xc8, ['long long']],
'Callbacks' : [ 0xd0, ['pointer64', ['_CACHE_MANAGER_CALLBACKS']]],
'LazyWriteContext' : [ 0xd8, ['pointer64', ['void']]],
'PrivateList' : [ 0xe0, ['_LIST_ENTRY']],
'LogHandle' : [ 0xf0, ['pointer64', ['void']]],
'FlushToLsnRoutine' : [ 0xf8, ['pointer64', ['void']]],
'DirtyPageThreshold' : [ 0x100, ['unsigned long']],
'LazyWritePassCount' : [ 0x104, ['unsigned long']],
'UninitializeEvent' : [ 0x108, ['pointer64', ['_CACHE_UNINITIALIZE_EVENT']]],
'BcbLock' : [ 0x110, ['_KGUARDED_MUTEX']],
'LastUnmapBehindOffset' : [ 0x148, ['_LARGE_INTEGER']],
'Event' : [ 0x150, ['_KEVENT']],
'HighWaterMappingOffset' : [ 0x168, ['_LARGE_INTEGER']],
'PrivateCacheMap' : [ 0x170, ['_PRIVATE_CACHE_MAP']],
'WriteBehindWorkQueueEntry' : [ 0x1d8, ['pointer64', ['void']]],
'VolumeCacheMap' : [ 0x1e0, ['pointer64', ['_VOLUME_CACHE_MAP']]],
'ProcImagePathHash' : [ 0x1e8, ['unsigned long']],
'WritesInProgress' : [ 0x1ec, ['unsigned long']],
'PipelinedReadAheadSize' : [ 0x1f0, ['unsigned long']],
} ],
'__unnamed_188a' : [ 0x8, {
'FileOffset' : [ 0x0, ['_LARGE_INTEGER']],
'ActiveCount' : [ 0x0, ['unsigned short']],
} ],
'_VACB' : [ 0x30, {
'BaseAddress' : [ 0x0, ['pointer64', ['void']]],
'SharedCacheMap' : [ 0x8, ['pointer64', ['_SHARED_CACHE_MAP']]],
'Overlay' : [ 0x10, ['__unnamed_188a']],
'Links' : [ 0x18, ['_LIST_ENTRY']],
'ArrayHead' : [ 0x28, ['pointer64', ['_VACB_ARRAY_HEADER']]],
} ],
'_KGUARDED_MUTEX' : [ 0x38, {
'Count' : [ 0x0, ['long']],
'Owner' : [ 0x8, ['pointer64', ['_KTHREAD']]],
'Contention' : [ 0x10, ['unsigned long']],
'Gate' : [ 0x18, ['_KGATE']],
'KernelApcDisable' : [ 0x30, ['short']],
'SpecialApcDisable' : [ 0x32, ['short']],
'CombinedApcDisable' : [ 0x30, ['unsigned long']],
} ],
'__unnamed_18a8' : [ 0x8, {
'FileObject' : [ 0x0, ['pointer64', ['_FILE_OBJECT']]],
} ],
'__unnamed_18aa' : [ 0x8, {
'SharedCacheMap' : [ 0x0, ['pointer64', ['_SHARED_CACHE_MAP']]],
} ],
'__unnamed_18ac' : [ 0x8, {
'Event' : [ 0x0, ['pointer64', ['_KEVENT']]],
} ],
'__unnamed_18ae' : [ 0x4, {
'Reason' : [ 0x0, ['unsigned long']],
} ],
'__unnamed_18b0' : [ 0x8, {
'Read' : [ 0x0, ['__unnamed_18a8']],
'Write' : [ 0x0, ['__unnamed_18aa']],
'Event' : [ 0x0, ['__unnamed_18ac']],
'Notification' : [ 0x0, ['__unnamed_18ae']],
} ],
'_WORK_QUEUE_ENTRY' : [ 0x20, {
'WorkQueueLinks' : [ 0x0, ['_LIST_ENTRY']],
'Parameters' : [ 0x10, ['__unnamed_18b0']],
'Function' : [ 0x18, ['unsigned char']],
} ],
'VACB_LEVEL_ALLOCATION_LIST' : [ 0x20, {
'VacbLevelList' : [ 0x0, ['_LIST_ENTRY']],
'VacbLevelWithBcbListHeads' : [ 0x10, ['pointer64', ['void']]],
'VacbLevelsAllocated' : [ 0x18, ['unsigned long']],
} ],
'_VACB_LEVEL_REFERENCE' : [ 0x8, {
'Reference' : [ 0x0, ['long']],
'SpecialReference' : [ 0x4, ['long']],
} ],
'_CACHE_UNINITIALIZE_EVENT' : [ 0x20, {
'Next' : [ 0x0, ['pointer64', ['_CACHE_UNINITIALIZE_EVENT']]],
'Event' : [ 0x8, ['_KEVENT']],
} ],
'_HEAP_LIST_LOOKUP' : [ 0x38, {
'ExtendedLookup' : [ 0x0, ['pointer64', ['_HEAP_LIST_LOOKUP']]],
'ArraySize' : [ 0x8, ['unsigned long']],
'ExtraItem' : [ 0xc, ['unsigned long']],
'ItemCount' : [ 0x10, ['unsigned long']],
'OutOfRangeItems' : [ 0x14, ['unsigned long']],
'BaseIndex' : [ 0x18, ['unsigned long']],
'ListHead' : [ 0x20, ['pointer64', ['_LIST_ENTRY']]],
'ListsInUseUlong' : [ 0x28, ['pointer64', ['unsigned long']]],
'ListHints' : [ 0x30, ['pointer64', ['pointer64', ['_LIST_ENTRY']]]],
} ],
'_HEAP' : [ 0x208, {
'Entry' : [ 0x0, ['_HEAP_ENTRY']],
'SegmentSignature' : [ 0x10, ['unsigned long']],
'SegmentFlags' : [ 0x14, ['unsigned long']],
'SegmentListEntry' : [ 0x18, ['_LIST_ENTRY']],
'Heap' : [ 0x28, ['pointer64', ['_HEAP']]],
'BaseAddress' : [ 0x30, ['pointer64', ['void']]],
'NumberOfPages' : [ 0x38, ['unsigned long']],
'FirstEntry' : [ 0x40, ['pointer64', ['_HEAP_ENTRY']]],
'LastValidEntry' : [ 0x48, ['pointer64', ['_HEAP_ENTRY']]],
'NumberOfUnCommittedPages' : [ 0x50, ['unsigned long']],
'NumberOfUnCommittedRanges' : [ 0x54, ['unsigned long']],
'SegmentAllocatorBackTraceIndex' : [ 0x58, ['unsigned short']],
'Reserved' : [ 0x5a, ['unsigned short']],
'UCRSegmentList' : [ 0x60, ['_LIST_ENTRY']],
'Flags' : [ 0x70, ['unsigned long']],
'ForceFlags' : [ 0x74, ['unsigned long']],
'CompatibilityFlags' : [ 0x78, ['unsigned long']],
'EncodeFlagMask' : [ 0x7c, ['unsigned long']],
'Encoding' : [ 0x80, ['_HEAP_ENTRY']],
'PointerKey' : [ 0x90, ['unsigned long long']],
'Interceptor' : [ 0x98, ['unsigned long']],
'VirtualMemoryThreshold' : [ 0x9c, ['unsigned long']],
'Signature' : [ 0xa0, ['unsigned long']],
'SegmentReserve' : [ 0xa8, ['unsigned long long']],
'SegmentCommit' : [ 0xb0, ['unsigned long long']],
'DeCommitFreeBlockThreshold' : [ 0xb8, ['unsigned long long']],
'DeCommitTotalFreeThreshold' : [ 0xc0, ['unsigned long long']],
'TotalFreeSize' : [ 0xc8, ['unsigned long long']],
'MaximumAllocationSize' : [ 0xd0, ['unsigned long long']],
'ProcessHeapsListIndex' : [ 0xd8, ['unsigned short']],
'HeaderValidateLength' : [ 0xda, ['unsigned short']],
'HeaderValidateCopy' : [ 0xe0, ['pointer64', ['void']]],
'NextAvailableTagIndex' : [ 0xe8, ['unsigned short']],
'MaximumTagIndex' : [ 0xea, ['unsigned short']],
'TagEntries' : [ 0xf0, ['pointer64', ['_HEAP_TAG_ENTRY']]],
'UCRList' : [ 0xf8, ['_LIST_ENTRY']],
'AlignRound' : [ 0x108, ['unsigned long long']],
'AlignMask' : [ 0x110, ['unsigned long long']],
'VirtualAllocdBlocks' : [ 0x118, ['_LIST_ENTRY']],
'SegmentList' : [ 0x128, ['_LIST_ENTRY']],
'AllocatorBackTraceIndex' : [ 0x138, ['unsigned short']],
'NonDedicatedListLength' : [ 0x13c, ['unsigned long']],
'BlocksIndex' : [ 0x140, ['pointer64', ['void']]],
'UCRIndex' : [ 0x148, ['pointer64', ['void']]],
'PseudoTagEntries' : [ 0x150, ['pointer64', ['_HEAP_PSEUDO_TAG_ENTRY']]],
'FreeLists' : [ 0x158, ['_LIST_ENTRY']],
'LockVariable' : [ 0x168, ['pointer64', ['_HEAP_LOCK']]],
'CommitRoutine' : [ 0x170, ['pointer64', ['void']]],
'FrontEndHeap' : [ 0x178, ['pointer64', ['void']]],
'FrontHeapLockCount' : [ 0x180, ['unsigned short']],
'FrontEndHeapType' : [ 0x182, ['unsigned char']],
'Counters' : [ 0x188, ['_HEAP_COUNTERS']],
'TuningParameters' : [ 0x1f8, ['_HEAP_TUNING_PARAMETERS']],
} ],
'__unnamed_1901' : [ 0x28, {
'CriticalSection' : [ 0x0, ['_RTL_CRITICAL_SECTION']],
} ],
'_HEAP_LOCK' : [ 0x28, {
'Lock' : [ 0x0, ['__unnamed_1901']],
} ],
'_RTL_CRITICAL_SECTION' : [ 0x28, {
'DebugInfo' : [ 0x0, ['pointer64', ['_RTL_CRITICAL_SECTION_DEBUG']]],
'LockCount' : [ 0x8, ['long']],
'RecursionCount' : [ 0xc, ['long']],
'OwningThread' : [ 0x10, ['pointer64', ['void']]],
'LockSemaphore' : [ 0x18, ['pointer64', ['void']]],
'SpinCount' : [ 0x20, ['unsigned long long']],
} ],
'_HEAP_ENTRY' : [ 0x10, {
'PreviousBlockPrivateData' : [ 0x0, ['pointer64', ['void']]],
'Size' : [ 0x8, ['unsigned short']],
'Flags' : [ 0xa, ['unsigned char']],
'SmallTagIndex' : [ 0xb, ['unsigned char']],
'PreviousSize' : [ 0xc, ['unsigned short']],
'SegmentOffset' : [ 0xe, ['unsigned char']],
'LFHFlags' : [ 0xe, ['unsigned char']],
'UnusedBytes' : [ 0xf, ['unsigned char']],
'CompactHeader' : [ 0x8, ['unsigned long long']],
'Reserved' : [ 0x0, ['pointer64', ['void']]],
'FunctionIndex' : [ 0x8, ['unsigned short']],
'ContextValue' : [ 0xa, ['unsigned short']],
'InterceptorValue' : [ 0x8, ['unsigned long']],
'UnusedBytesLength' : [ 0xc, ['unsigned short']],
'EntryOffset' : [ 0xe, ['unsigned char']],
'ExtendedBlockSignature' : [ 0xf, ['unsigned char']],
'ReservedForAlignment' : [ 0x0, ['pointer64', ['void']]],
'Code1' : [ 0x8, ['unsigned long']],
'Code2' : [ 0xc, ['unsigned short']],
'Code3' : [ 0xe, ['unsigned char']],
'Code4' : [ 0xf, ['unsigned char']],
'AgregateCode' : [ 0x8, ['unsigned long long']],
} ],
'_HEAP_SEGMENT' : [ 0x70, {
'Entry' : [ 0x0, ['_HEAP_ENTRY']],
'SegmentSignature' : [ 0x10, ['unsigned long']],
'SegmentFlags' : [ 0x14, ['unsigned long']],
'SegmentListEntry' : [ 0x18, ['_LIST_ENTRY']],
'Heap' : [ 0x28, ['pointer64', ['_HEAP']]],
'BaseAddress' : [ 0x30, ['pointer64', ['void']]],
'NumberOfPages' : [ 0x38, ['unsigned long']],
'FirstEntry' : [ 0x40, ['pointer64', ['_HEAP_ENTRY']]],
'LastValidEntry' : [ 0x48, ['pointer64', ['_HEAP_ENTRY']]],
'NumberOfUnCommittedPages' : [ 0x50, ['unsigned long']],
'NumberOfUnCommittedRanges' : [ 0x54, ['unsigned long']],
'SegmentAllocatorBackTraceIndex' : [ 0x58, ['unsigned short']],
'Reserved' : [ 0x5a, ['unsigned short']],
'UCRSegmentList' : [ 0x60, ['_LIST_ENTRY']],
} ],
'_HEAP_FREE_ENTRY' : [ 0x20, {
'PreviousBlockPrivateData' : [ 0x0, ['pointer64', ['void']]],
'Size' : [ 0x8, ['unsigned short']],
'Flags' : [ 0xa, ['unsigned char']],
'SmallTagIndex' : [ 0xb, ['unsigned char']],
'PreviousSize' : [ 0xc, ['unsigned short']],
'SegmentOffset' : [ 0xe, ['unsigned char']],
'LFHFlags' : [ 0xe, ['unsigned char']],
'UnusedBytes' : [ 0xf, ['unsigned char']],
'CompactHeader' : [ 0x8, ['unsigned long long']],
'Reserved' : [ 0x0, ['pointer64', ['void']]],
'FunctionIndex' : [ 0x8, ['unsigned short']],
'ContextValue' : [ 0xa, ['unsigned short']],
'InterceptorValue' : [ 0x8, ['unsigned long']],
'UnusedBytesLength' : [ 0xc, ['unsigned short']],
'EntryOffset' : [ 0xe, ['unsigned char']],
'ExtendedBlockSignature' : [ 0xf, ['unsigned char']],
'ReservedForAlignment' : [ 0x0, ['pointer64', ['void']]],
'Code1' : [ 0x8, ['unsigned long']],
'Code2' : [ 0xc, ['unsigned short']],
'Code3' : [ 0xe, ['unsigned char']],
'Code4' : [ 0xf, ['unsigned char']],
'AgregateCode' : [ 0x8, ['unsigned long long']],
'FreeList' : [ 0x10, ['_LIST_ENTRY']],
} ],
'_PEB' : [ 0x380, {
'InheritedAddressSpace' : [ 0x0, ['unsigned char']],
'ReadImageFileExecOptions' : [ 0x1, ['unsigned char']],
'BeingDebugged' : [ 0x2, ['unsigned char']],
'BitField' : [ 0x3, ['unsigned char']],
'ImageUsesLargePages' : [ 0x3, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned char')]],
'IsProtectedProcess' : [ 0x3, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned char')]],
'IsLegacyProcess' : [ 0x3, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned char')]],
'IsImageDynamicallyRelocated' : [ 0x3, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned char')]],
'SkipPatchingUser32Forwarders' : [ 0x3, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned char')]],
'SpareBits' : [ 0x3, ['BitField', dict(start_bit = 5, end_bit = 8, native_type='unsigned char')]],
'Mutant' : [ 0x8, ['pointer64', ['void']]],
'ImageBaseAddress' : [ 0x10, ['pointer64', ['void']]],
'Ldr' : [ 0x18, ['pointer64', ['_PEB_LDR_DATA']]],
'ProcessParameters' : [ 0x20, ['pointer64', ['_RTL_USER_PROCESS_PARAMETERS']]],
'SubSystemData' : [ 0x28, ['pointer64', ['void']]],
'ProcessHeap' : [ 0x30, ['pointer64', ['void']]],
'FastPebLock' : [ 0x38, ['pointer64', ['_RTL_CRITICAL_SECTION']]],
'AtlThunkSListPtr' : [ 0x40, ['pointer64', ['void']]],
'IFEOKey' : [ 0x48, ['pointer64', ['void']]],
'CrossProcessFlags' : [ 0x50, ['unsigned long']],
'ProcessInJob' : [ 0x50, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'ProcessInitializing' : [ 0x50, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'ProcessUsingVEH' : [ 0x50, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long')]],
'ProcessUsingVCH' : [ 0x50, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned long')]],
'ProcessUsingFTH' : [ 0x50, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned long')]],
'ReservedBits0' : [ 0x50, ['BitField', dict(start_bit = 5, end_bit = 32, native_type='unsigned long')]],
'KernelCallbackTable' : [ 0x58, ['pointer64', ['void']]],
'UserSharedInfoPtr' : [ 0x58, ['pointer64', ['void']]],
'SystemReserved' : [ 0x60, ['array', 1, ['unsigned long']]],
'AtlThunkSListPtr32' : [ 0x64, ['unsigned long']],
'ApiSetMap' : [ 0x68, ['pointer64', ['void']]],
'TlsExpansionCounter' : [ 0x70, ['unsigned long']],
'TlsBitmap' : [ 0x78, ['pointer64', ['void']]],
'TlsBitmapBits' : [ 0x80, ['array', 2, ['unsigned long']]],
'ReadOnlySharedMemoryBase' : [ 0x88, ['pointer64', ['void']]],
'HotpatchInformation' : [ 0x90, ['pointer64', ['void']]],
'ReadOnlyStaticServerData' : [ 0x98, ['pointer64', ['pointer64', ['void']]]],
'AnsiCodePageData' : [ 0xa0, ['pointer64', ['void']]],
'OemCodePageData' : [ 0xa8, ['pointer64', ['void']]],
'UnicodeCaseTableData' : [ 0xb0, ['pointer64', ['void']]],
'NumberOfProcessors' : [ 0xb8, ['unsigned long']],
'NtGlobalFlag' : [ 0xbc, ['unsigned long']],
'CriticalSectionTimeout' : [ 0xc0, ['_LARGE_INTEGER']],
'HeapSegmentReserve' : [ 0xc8, ['unsigned long long']],
'HeapSegmentCommit' : [ 0xd0, ['unsigned long long']],
'HeapDeCommitTotalFreeThreshold' : [ 0xd8, ['unsigned long long']],
'HeapDeCommitFreeBlockThreshold' : [ 0xe0, ['unsigned long long']],
'NumberOfHeaps' : [ 0xe8, ['unsigned long']],
'MaximumNumberOfHeaps' : [ 0xec, ['unsigned long']],
'ProcessHeaps' : [ 0xf0, ['pointer64', ['pointer64', ['void']]]],
'GdiSharedHandleTable' : [ 0xf8, ['pointer64', ['void']]],
'ProcessStarterHelper' : [ 0x100, ['pointer64', ['void']]],
'GdiDCAttributeList' : [ 0x108, ['unsigned long']],
'LoaderLock' : [ 0x110, ['pointer64', ['_RTL_CRITICAL_SECTION']]],
'OSMajorVersion' : [ 0x118, ['unsigned long']],
'OSMinorVersion' : [ 0x11c, ['unsigned long']],
'OSBuildNumber' : [ 0x120, ['unsigned short']],
'OSCSDVersion' : [ 0x122, ['unsigned short']],
'OSPlatformId' : [ 0x124, ['unsigned long']],
'ImageSubsystem' : [ 0x128, ['unsigned long']],
'ImageSubsystemMajorVersion' : [ 0x12c, ['unsigned long']],
'ImageSubsystemMinorVersion' : [ 0x130, ['unsigned long']],
'ActiveProcessAffinityMask' : [ 0x138, ['unsigned long long']],
'GdiHandleBuffer' : [ 0x140, ['array', 60, ['unsigned long']]],
'PostProcessInitRoutine' : [ 0x230, ['pointer64', ['void']]],
'TlsExpansionBitmap' : [ 0x238, ['pointer64', ['void']]],
'TlsExpansionBitmapBits' : [ 0x240, ['array', 32, ['unsigned long']]],
'SessionId' : [ 0x2c0, ['unsigned long']],
'AppCompatFlags' : [ 0x2c8, ['_ULARGE_INTEGER']],
'AppCompatFlagsUser' : [ 0x2d0, ['_ULARGE_INTEGER']],
'pShimData' : [ 0x2d8, ['pointer64', ['void']]],
'AppCompatInfo' : [ 0x2e0, ['pointer64', ['void']]],
'CSDVersion' : [ 0x2e8, ['_UNICODE_STRING']],
'ActivationContextData' : [ 0x2f8, ['pointer64', ['_ACTIVATION_CONTEXT_DATA']]],
'ProcessAssemblyStorageMap' : [ 0x300, ['pointer64', ['_ASSEMBLY_STORAGE_MAP']]],
'SystemDefaultActivationContextData' : [ 0x308, ['pointer64', ['_ACTIVATION_CONTEXT_DATA']]],
'SystemAssemblyStorageMap' : [ 0x310, ['pointer64', ['_ASSEMBLY_STORAGE_MAP']]],
'MinimumStackCommit' : [ 0x318, ['unsigned long long']],
'FlsCallback' : [ 0x320, ['pointer64', ['_FLS_CALLBACK_INFO']]],
'FlsListHead' : [ 0x328, ['_LIST_ENTRY']],
'FlsBitmap' : [ 0x338, ['pointer64', ['void']]],
'FlsBitmapBits' : [ 0x340, ['array', 4, ['unsigned long']]],
'FlsHighIndex' : [ 0x350, ['unsigned long']],
'WerRegistrationData' : [ 0x358, ['pointer64', ['void']]],
'WerShipAssertPtr' : [ 0x360, ['pointer64', ['void']]],
'pContextData' : [ 0x368, ['pointer64', ['void']]],
'pImageHeaderHash' : [ 0x370, ['pointer64', ['void']]],
'TracingFlags' : [ 0x378, ['unsigned long']],
'HeapTracingEnabled' : [ 0x378, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'CritSecTracingEnabled' : [ 0x378, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'SpareTracingBits' : [ 0x378, ['BitField', dict(start_bit = 2, end_bit = 32, native_type='unsigned long')]],
} ],
'_PEB_LDR_DATA' : [ 0x58, {
'Length' : [ 0x0, ['unsigned long']],
'Initialized' : [ 0x4, ['unsigned char']],
'SsHandle' : [ 0x8, ['pointer64', ['void']]],
'InLoadOrderModuleList' : [ 0x10, ['_LIST_ENTRY']],
'InMemoryOrderModuleList' : [ 0x20, ['_LIST_ENTRY']],
'InInitializationOrderModuleList' : [ 0x30, ['_LIST_ENTRY']],
'EntryInProgress' : [ 0x40, ['pointer64', ['void']]],
'ShutdownInProgress' : [ 0x48, ['unsigned char']],
'ShutdownThreadId' : [ 0x50, ['pointer64', ['void']]],
} ],
'_LDR_DATA_TABLE_ENTRY' : [ 0xe0, {
'InLoadOrderLinks' : [ 0x0, ['_LIST_ENTRY']],
'InMemoryOrderLinks' : [ 0x10, ['_LIST_ENTRY']],
'InInitializationOrderLinks' : [ 0x20, ['_LIST_ENTRY']],
'DllBase' : [ 0x30, ['pointer64', ['void']]],
'EntryPoint' : [ 0x38, ['pointer64', ['void']]],
'SizeOfImage' : [ 0x40, ['unsigned long']],
'FullDllName' : [ 0x48, ['_UNICODE_STRING']],
'BaseDllName' : [ 0x58, ['_UNICODE_STRING']],
'Flags' : [ 0x68, ['unsigned long']],
'LoadCount' : [ 0x6c, ['unsigned short']],
'TlsIndex' : [ 0x6e, ['unsigned short']],
'HashLinks' : [ 0x70, ['_LIST_ENTRY']],
'SectionPointer' : [ 0x70, ['pointer64', ['void']]],
'CheckSum' : [ 0x78, ['unsigned long']],
'TimeDateStamp' : [ 0x80, ['unsigned long']],
'LoadedImports' : [ 0x80, ['pointer64', ['void']]],
'EntryPointActivationContext' : [ 0x88, ['pointer64', ['_ACTIVATION_CONTEXT']]],
'PatchInformation' : [ 0x90, ['pointer64', ['void']]],
'ForwarderLinks' : [ 0x98, ['_LIST_ENTRY']],
'ServiceTagLinks' : [ 0xa8, ['_LIST_ENTRY']],
'StaticLinks' : [ 0xb8, ['_LIST_ENTRY']],
'ContextInformation' : [ 0xc8, ['pointer64', ['void']]],
'OriginalBase' : [ 0xd0, ['unsigned long long']],
'LoadTime' : [ 0xd8, ['_LARGE_INTEGER']],
} ],
'_HEAP_SUBSEGMENT' : [ 0x30, {
'LocalInfo' : [ 0x0, ['pointer64', ['_HEAP_LOCAL_SEGMENT_INFO']]],
'UserBlocks' : [ 0x8, ['pointer64', ['_HEAP_USERDATA_HEADER']]],
'AggregateExchg' : [ 0x10, ['_INTERLOCK_SEQ']],
'BlockSize' : [ 0x18, ['unsigned short']],
'Flags' : [ 0x1a, ['unsigned short']],
'BlockCount' : [ 0x1c, ['unsigned short']],
'SizeIndex' : [ 0x1e, ['unsigned char']],
'AffinityIndex' : [ 0x1f, ['unsigned char']],
'Alignment' : [ 0x18, ['array', 2, ['unsigned long']]],
'SFreeListEntry' : [ 0x20, ['_SINGLE_LIST_ENTRY']],
'Lock' : [ 0x28, ['unsigned long']],
} ],
'__unnamed_197f' : [ 0x4, {
'DataLength' : [ 0x0, ['short']],
'TotalLength' : [ 0x2, ['short']],
} ],
'__unnamed_1981' : [ 0x4, {
's1' : [ 0x0, ['__unnamed_197f']],
'Length' : [ 0x0, ['unsigned long']],
} ],
'__unnamed_1983' : [ 0x4, {
'Type' : [ 0x0, ['short']],
'DataInfoOffset' : [ 0x2, ['short']],
} ],
'__unnamed_1985' : [ 0x4, {
's2' : [ 0x0, ['__unnamed_1983']],
'ZeroInit' : [ 0x0, ['unsigned long']],
} ],
'_PORT_MESSAGE' : [ 0x28, {
'u1' : [ 0x0, ['__unnamed_1981']],
'u2' : [ 0x4, ['__unnamed_1985']],
'ClientId' : [ 0x8, ['_CLIENT_ID']],
'DoNotUseThisField' : [ 0x8, ['double']],
'MessageId' : [ 0x18, ['unsigned long']],
'ClientViewSize' : [ 0x20, ['unsigned long long']],
'CallbackId' : [ 0x20, ['unsigned long']],
} ],
'_ALPC_MESSAGE_ATTRIBUTES' : [ 0x8, {
'AllocatedAttributes' : [ 0x0, ['unsigned long']],
'ValidAttributes' : [ 0x4, ['unsigned long']],
} ],
'_ALPC_HANDLE_ENTRY' : [ 0x8, {
'Object' : [ 0x0, ['pointer64', ['void']]],
} ],
'_BLOB_TYPE' : [ 0x38, {
'ResourceId' : [ 0x0, ['Enumeration', dict(target = 'long', choices = {0: 'BLOB_TYPE_UNKNOWN', 1: 'BLOB_TYPE_CONNECTION_INFO', 2: 'BLOB_TYPE_MESSAGE', 3: 'BLOB_TYPE_SECURITY_CONTEXT', 4: 'BLOB_TYPE_SECTION', 5: 'BLOB_TYPE_REGION', 6: 'BLOB_TYPE_VIEW', 7: 'BLOB_TYPE_RESERVE', 8: 'BLOB_TYPE_DIRECT_TRANSFER', 9: 'BLOB_TYPE_HANDLE_DATA', 10: 'BLOB_TYPE_MAX_ID'})]],
'PoolTag' : [ 0x4, ['unsigned long']],
'Flags' : [ 0x8, ['unsigned long']],
'CreatedObjects' : [ 0xc, ['unsigned long']],
'DeletedObjects' : [ 0x10, ['unsigned long']],
'DeleteProcedure' : [ 0x18, ['pointer64', ['void']]],
'DestroyProcedure' : [ 0x20, ['pointer64', ['void']]],
'UsualSize' : [ 0x28, ['unsigned long long']],
'LookasideIndex' : [ 0x30, ['unsigned long']],
} ],
'__unnamed_199e' : [ 0x1, {
'ReferenceCache' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned char')]],
'Lookaside' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned char')]],
'Initializing' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned char')]],
'Deleted' : [ 0x0, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned char')]],
} ],
'__unnamed_19a0' : [ 0x1, {
's1' : [ 0x0, ['__unnamed_199e']],
'Flags' : [ 0x0, ['unsigned char']],
} ],
'_BLOB' : [ 0x20, {
'ResourceList' : [ 0x0, ['_LIST_ENTRY']],
'FreeListEntry' : [ 0x0, ['_SLIST_ENTRY']],
'u1' : [ 0x10, ['__unnamed_19a0']],
'ResourceId' : [ 0x11, ['unsigned char']],
'CachedReferences' : [ 0x12, ['short']],
'ReferenceCount' : [ 0x14, ['long']],
'Lock' : [ 0x18, ['_EX_PUSH_LOCK']],
} ],
'__unnamed_19b3' : [ 0x4, {
'Internal' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'Secure' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
} ],
'__unnamed_19b5' : [ 0x4, {
's1' : [ 0x0, ['__unnamed_19b3']],
} ],
'_KALPC_SECTION' : [ 0x48, {
'SectionObject' : [ 0x0, ['pointer64', ['void']]],
'Size' : [ 0x8, ['unsigned long long']],
'HandleTable' : [ 0x10, ['pointer64', ['_ALPC_HANDLE_TABLE']]],
'SectionHandle' : [ 0x18, ['pointer64', ['void']]],
'OwnerProcess' : [ 0x20, ['pointer64', ['_EPROCESS']]],
'OwnerPort' : [ 0x28, ['pointer64', ['_ALPC_PORT']]],
'u1' : [ 0x30, ['__unnamed_19b5']],
'NumberOfRegions' : [ 0x34, ['unsigned long']],
'RegionListHead' : [ 0x38, ['_LIST_ENTRY']],
} ],
'__unnamed_19bb' : [ 0x4, {
'Secure' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
} ],
'__unnamed_19bd' : [ 0x4, {
's1' : [ 0x0, ['__unnamed_19bb']],
} ],
'_KALPC_REGION' : [ 0x58, {
'RegionListEntry' : [ 0x0, ['_LIST_ENTRY']],
'Section' : [ 0x10, ['pointer64', ['_KALPC_SECTION']]],
'Offset' : [ 0x18, ['unsigned long long']],
'Size' : [ 0x20, ['unsigned long long']],
'ViewSize' : [ 0x28, ['unsigned long long']],
'u1' : [ 0x30, ['__unnamed_19bd']],
'NumberOfViews' : [ 0x34, ['unsigned long']],
'ViewListHead' : [ 0x38, ['_LIST_ENTRY']],
'ReadOnlyView' : [ 0x48, ['pointer64', ['_KALPC_VIEW']]],
'ReadWriteView' : [ 0x50, ['pointer64', ['_KALPC_VIEW']]],
} ],
'__unnamed_19c3' : [ 0x4, {
'WriteAccess' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'AutoRelease' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'ForceUnlink' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long')]],
} ],
'__unnamed_19c5' : [ 0x4, {
's1' : [ 0x0, ['__unnamed_19c3']],
} ],
'_KALPC_VIEW' : [ 0x60, {
'ViewListEntry' : [ 0x0, ['_LIST_ENTRY']],
'Region' : [ 0x10, ['pointer64', ['_KALPC_REGION']]],
'OwnerPort' : [ 0x18, ['pointer64', ['_ALPC_PORT']]],
'OwnerProcess' : [ 0x20, ['pointer64', ['_EPROCESS']]],
'Address' : [ 0x28, ['pointer64', ['void']]],
'Size' : [ 0x30, ['unsigned long long']],
'SecureViewHandle' : [ 0x38, ['pointer64', ['void']]],
'WriteAccessHandle' : [ 0x40, ['pointer64', ['void']]],
'u1' : [ 0x48, ['__unnamed_19c5']],
'NumberOfOwnerMessages' : [ 0x4c, ['unsigned long']],
'ProcessViewListEntry' : [ 0x50, ['_LIST_ENTRY']],
} ],
'_ALPC_COMMUNICATION_INFO' : [ 0x40, {
'ConnectionPort' : [ 0x0, ['pointer64', ['_ALPC_PORT']]],
'ServerCommunicationPort' : [ 0x8, ['pointer64', ['_ALPC_PORT']]],
'ClientCommunicationPort' : [ 0x10, ['pointer64', ['_ALPC_PORT']]],
'CommunicationList' : [ 0x18, ['_LIST_ENTRY']],
'HandleTable' : [ 0x28, ['_ALPC_HANDLE_TABLE']],
} ],
'__unnamed_19e1' : [ 0x4, {
'Initialized' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'Type' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 3, native_type='unsigned long')]],
'ConnectionPending' : [ 0x0, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned long')]],
'ConnectionRefused' : [ 0x0, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned long')]],
'Disconnected' : [ 0x0, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned long')]],
'Closed' : [ 0x0, ['BitField', dict(start_bit = 6, end_bit = 7, native_type='unsigned long')]],
'NoFlushOnClose' : [ 0x0, ['BitField', dict(start_bit = 7, end_bit = 8, native_type='unsigned long')]],
'ReturnExtendedInfo' : [ 0x0, ['BitField', dict(start_bit = 8, end_bit = 9, native_type='unsigned long')]],
'Waitable' : [ 0x0, ['BitField', dict(start_bit = 9, end_bit = 10, native_type='unsigned long')]],
'DynamicSecurity' : [ 0x0, ['BitField', dict(start_bit = 10, end_bit = 11, native_type='unsigned long')]],
'Wow64CompletionList' : [ 0x0, ['BitField', dict(start_bit = 11, end_bit = 12, native_type='unsigned long')]],
'Lpc' : [ 0x0, ['BitField', dict(start_bit = 12, end_bit = 13, native_type='unsigned long')]],
'LpcToLpc' : [ 0x0, ['BitField', dict(start_bit = 13, end_bit = 14, native_type='unsigned long')]],
'HasCompletionList' : [ 0x0, ['BitField', dict(start_bit = 14, end_bit = 15, native_type='unsigned long')]],
'HadCompletionList' : [ 0x0, ['BitField', dict(start_bit = 15, end_bit = 16, native_type='unsigned long')]],
'EnableCompletionList' : [ 0x0, ['BitField', dict(start_bit = 16, end_bit = 17, native_type='unsigned long')]],
} ],
'__unnamed_19e3' : [ 0x4, {
's1' : [ 0x0, ['__unnamed_19e1']],
'State' : [ 0x0, ['unsigned long']],
} ],
'_ALPC_PORT' : [ 0x1a0, {
'PortListEntry' : [ 0x0, ['_LIST_ENTRY']],
'CommunicationInfo' : [ 0x10, ['pointer64', ['_ALPC_COMMUNICATION_INFO']]],
'OwnerProcess' : [ 0x18, ['pointer64', ['_EPROCESS']]],
'CompletionPort' : [ 0x20, ['pointer64', ['void']]],
'CompletionKey' : [ 0x28, ['pointer64', ['void']]],
'CompletionPacketLookaside' : [ 0x30, ['pointer64', ['_ALPC_COMPLETION_PACKET_LOOKASIDE']]],
'PortContext' : [ 0x38, ['pointer64', ['void']]],
'StaticSecurity' : [ 0x40, ['_SECURITY_CLIENT_CONTEXT']],
'MainQueue' : [ 0x88, ['_LIST_ENTRY']],
'PendingQueue' : [ 0x98, ['_LIST_ENTRY']],
'LargeMessageQueue' : [ 0xa8, ['_LIST_ENTRY']],
'WaitQueue' : [ 0xb8, ['_LIST_ENTRY']],
'Semaphore' : [ 0xc8, ['pointer64', ['_KSEMAPHORE']]],
'DummyEvent' : [ 0xc8, ['pointer64', ['_KEVENT']]],
'PortAttributes' : [ 0xd0, ['_ALPC_PORT_ATTRIBUTES']],
'Lock' : [ 0x118, ['_EX_PUSH_LOCK']],
'ResourceListLock' : [ 0x120, ['_EX_PUSH_LOCK']],
'ResourceListHead' : [ 0x128, ['_LIST_ENTRY']],
'CompletionList' : [ 0x138, ['pointer64', ['_ALPC_COMPLETION_LIST']]],
'MessageZone' : [ 0x140, ['pointer64', ['_ALPC_MESSAGE_ZONE']]],
'CallbackObject' : [ 0x148, ['pointer64', ['_CALLBACK_OBJECT']]],
'CallbackContext' : [ 0x150, ['pointer64', ['void']]],
'CanceledQueue' : [ 0x158, ['_LIST_ENTRY']],
'SequenceNo' : [ 0x168, ['long']],
'u1' : [ 0x16c, ['__unnamed_19e3']],
'TargetQueuePort' : [ 0x170, ['pointer64', ['_ALPC_PORT']]],
'TargetSequencePort' : [ 0x178, ['pointer64', ['_ALPC_PORT']]],
'CachedMessage' : [ 0x180, ['pointer64', ['_KALPC_MESSAGE']]],
'MainQueueLength' : [ 0x188, ['unsigned long']],
'PendingQueueLength' : [ 0x18c, ['unsigned long']],
'LargeMessageQueueLength' : [ 0x190, ['unsigned long']],
'CanceledQueueLength' : [ 0x194, ['unsigned long']],
'WaitQueueLength' : [ 0x198, ['unsigned long']],
} ],
'_OBJECT_TYPE' : [ 0xd0, {
'TypeList' : [ 0x0, ['_LIST_ENTRY']],
'Name' : [ 0x10, ['_UNICODE_STRING']],
'DefaultObject' : [ 0x20, ['pointer64', ['void']]],
'Index' : [ 0x28, ['unsigned char']],
'TotalNumberOfObjects' : [ 0x2c, ['unsigned long']],
'TotalNumberOfHandles' : [ 0x30, ['unsigned long']],
'HighWaterNumberOfObjects' : [ 0x34, ['unsigned long']],
'HighWaterNumberOfHandles' : [ 0x38, ['unsigned long']],
'TypeInfo' : [ 0x40, ['_OBJECT_TYPE_INITIALIZER']],
'TypeLock' : [ 0xb0, ['_EX_PUSH_LOCK']],
'Key' : [ 0xb8, ['unsigned long']],
'CallbackList' : [ 0xc0, ['_LIST_ENTRY']],
} ],
'_PORT_MESSAGE32' : [ 0x18, {
'u1' : [ 0x0, ['__unnamed_1981']],
'u2' : [ 0x4, ['__unnamed_1985']],
'ClientId' : [ 0x8, ['_CLIENT_ID32']],
'DoNotUseThisField' : [ 0x8, ['double']],
'MessageId' : [ 0x10, ['unsigned long']],
'ClientViewSize' : [ 0x14, ['unsigned long']],
'CallbackId' : [ 0x14, ['unsigned long']],
} ],
'__unnamed_1a00' : [ 0x4, {
'QueueType' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 2, native_type='unsigned long')]],
'QueuePortType' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 6, native_type='unsigned long')]],
'Canceled' : [ 0x0, ['BitField', dict(start_bit = 6, end_bit = 7, native_type='unsigned long')]],
'Ready' : [ 0x0, ['BitField', dict(start_bit = 7, end_bit = 8, native_type='unsigned long')]],
'ReleaseMessage' : [ 0x0, ['BitField', dict(start_bit = 8, end_bit = 9, native_type='unsigned long')]],
'SharedQuota' : [ 0x0, ['BitField', dict(start_bit = 9, end_bit = 10, native_type='unsigned long')]],
'ReplyWaitReply' : [ 0x0, ['BitField', dict(start_bit = 10, end_bit = 11, native_type='unsigned long')]],
'OwnerPortReference' : [ 0x0, ['BitField', dict(start_bit = 11, end_bit = 12, native_type='unsigned long')]],
'ReserveReference' : [ 0x0, ['BitField', dict(start_bit = 12, end_bit = 13, native_type='unsigned long')]],
'ReceiverReference' : [ 0x0, ['BitField', dict(start_bit = 13, end_bit = 14, native_type='unsigned long')]],
'ViewAttributeRetrieved' : [ 0x0, ['BitField', dict(start_bit = 14, end_bit = 15, native_type='unsigned long')]],
'InDispatch' : [ 0x0, ['BitField', dict(start_bit = 15, end_bit = 16, native_type='unsigned long')]],
} ],
'__unnamed_1a02' : [ 0x4, {
's1' : [ 0x0, ['__unnamed_1a00']],
'State' : [ 0x0, ['unsigned long']],
} ],
'_KALPC_MESSAGE' : [ 0x100, {
'Entry' : [ 0x0, ['_LIST_ENTRY']],
'ExtensionBuffer' : [ 0x10, ['pointer64', ['void']]],
'ExtensionBufferSize' : [ 0x18, ['unsigned long long']],
'QuotaProcess' : [ 0x20, ['pointer64', ['_EPROCESS']]],
'QuotaBlock' : [ 0x20, ['pointer64', ['void']]],
'SequenceNo' : [ 0x28, ['long']],
'u1' : [ 0x2c, ['__unnamed_1a02']],
'CancelSequencePort' : [ 0x30, ['pointer64', ['_ALPC_PORT']]],
'CancelQueuePort' : [ 0x38, ['pointer64', ['_ALPC_PORT']]],
'CancelSequenceNo' : [ 0x40, ['long']],
'CancelListEntry' : [ 0x48, ['_LIST_ENTRY']],
'WaitingThread' : [ 0x58, ['pointer64', ['_ETHREAD']]],
'Reserve' : [ 0x60, ['pointer64', ['_KALPC_RESERVE']]],
'PortQueue' : [ 0x68, ['pointer64', ['_ALPC_PORT']]],
'OwnerPort' : [ 0x70, ['pointer64', ['_ALPC_PORT']]],
'MessageAttributes' : [ 0x78, ['_KALPC_MESSAGE_ATTRIBUTES']],
'DataUserVa' : [ 0xb0, ['pointer64', ['void']]],
'DataSystemVa' : [ 0xb8, ['pointer64', ['void']]],
'CommunicationInfo' : [ 0xc0, ['pointer64', ['_ALPC_COMMUNICATION_INFO']]],
'ConnectionPort' : [ 0xc8, ['pointer64', ['_ALPC_PORT']]],
'ServerThread' : [ 0xd0, ['pointer64', ['_ETHREAD']]],
'PortMessage' : [ 0xd8, ['_PORT_MESSAGE']],
} ],
'_REMOTE_PORT_VIEW' : [ 0x18, {
'Length' : [ 0x0, ['unsigned long']],
'ViewSize' : [ 0x8, ['unsigned long long']],
'ViewBase' : [ 0x10, ['pointer64', ['void']]],
} ],
'_KALPC_RESERVE' : [ 0x28, {
'OwnerPort' : [ 0x0, ['pointer64', ['_ALPC_PORT']]],
'HandleTable' : [ 0x8, ['pointer64', ['_ALPC_HANDLE_TABLE']]],
'Handle' : [ 0x10, ['pointer64', ['void']]],
'Message' : [ 0x18, ['pointer64', ['_KALPC_MESSAGE']]],
'Active' : [ 0x20, ['long']],
} ],
'_KALPC_HANDLE_DATA' : [ 0x10, {
'Flags' : [ 0x0, ['unsigned long']],
'ObjectType' : [ 0x4, ['unsigned long']],
'DuplicateContext' : [ 0x8, ['pointer64', ['_OB_DUPLICATE_OBJECT_STATE']]],
} ],
'_KALPC_MESSAGE_ATTRIBUTES' : [ 0x38, {
'ClientContext' : [ 0x0, ['pointer64', ['void']]],
'ServerContext' : [ 0x8, ['pointer64', ['void']]],
'PortContext' : [ 0x10, ['pointer64', ['void']]],
'CancelPortContext' : [ 0x18, ['pointer64', ['void']]],
'SecurityData' : [ 0x20, ['pointer64', ['_KALPC_SECURITY_DATA']]],
'View' : [ 0x28, ['pointer64', ['_KALPC_VIEW']]],
'HandleData' : [ 0x30, ['pointer64', ['_KALPC_HANDLE_DATA']]],
} ],
'__unnamed_1a41' : [ 0x4, {
'Revoked' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'Impersonated' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
} ],
'__unnamed_1a43' : [ 0x4, {
's1' : [ 0x0, ['__unnamed_1a41']],
} ],
'_KALPC_SECURITY_DATA' : [ 0x70, {
'HandleTable' : [ 0x0, ['pointer64', ['_ALPC_HANDLE_TABLE']]],
'ContextHandle' : [ 0x8, ['pointer64', ['void']]],
'OwningProcess' : [ 0x10, ['pointer64', ['_EPROCESS']]],
'OwnerPort' : [ 0x18, ['pointer64', ['_ALPC_PORT']]],
'DynamicSecurity' : [ 0x20, ['_SECURITY_CLIENT_CONTEXT']],
'u1' : [ 0x68, ['__unnamed_1a43']],
} ],
'_IO_MINI_COMPLETION_PACKET_USER' : [ 0x50, {
'ListEntry' : [ 0x0, ['_LIST_ENTRY']],
'PacketType' : [ 0x10, ['unsigned long']],
'KeyContext' : [ 0x18, ['pointer64', ['void']]],
'ApcContext' : [ 0x20, ['pointer64', ['void']]],
'IoStatus' : [ 0x28, ['long']],
'IoStatusInformation' : [ 0x30, ['unsigned long long']],
'MiniPacketCallback' : [ 0x38, ['pointer64', ['void']]],
'Context' : [ 0x40, ['pointer64', ['void']]],
'Allocated' : [ 0x48, ['unsigned char']],
} ],
'_ALPC_DISPATCH_CONTEXT' : [ 0x38, {
'PortObject' : [ 0x0, ['pointer64', ['_ALPC_PORT']]],
'Message' : [ 0x8, ['pointer64', ['_KALPC_MESSAGE']]],
'CommunicationInfo' : [ 0x10, ['pointer64', ['_ALPC_COMMUNICATION_INFO']]],
'TargetThread' : [ 0x18, ['pointer64', ['_ETHREAD']]],
'TargetPort' : [ 0x20, ['pointer64', ['_ALPC_PORT']]],
'Flags' : [ 0x28, ['unsigned long']],
'TotalLength' : [ 0x2c, ['unsigned short']],
'Type' : [ 0x2e, ['unsigned short']],
'DataInfoOffset' : [ 0x30, ['unsigned short']],
} ],
'_DRIVER_OBJECT' : [ 0x150, {
'Type' : [ 0x0, ['short']],
'Size' : [ 0x2, ['short']],
'DeviceObject' : [ 0x8, ['pointer64', ['_DEVICE_OBJECT']]],
'Flags' : [ 0x10, ['unsigned long']],
'DriverStart' : [ 0x18, ['pointer64', ['void']]],
'DriverSize' : [ 0x20, ['unsigned long']],
'DriverSection' : [ 0x28, ['pointer64', ['void']]],
'DriverExtension' : [ 0x30, ['pointer64', ['_DRIVER_EXTENSION']]],
'DriverName' : [ 0x38, ['_UNICODE_STRING']],
'HardwareDatabase' : [ 0x48, ['pointer64', ['_UNICODE_STRING']]],
'FastIoDispatch' : [ 0x50, ['pointer64', ['_FAST_IO_DISPATCH']]],
'DriverInit' : [ 0x58, ['pointer64', ['void']]],
'DriverStartIo' : [ 0x60, ['pointer64', ['void']]],
'DriverUnload' : [ 0x68, ['pointer64', ['void']]],
'MajorFunction' : [ 0x70, ['array', 28, ['pointer64', ['void']]]],
} ],
'_FILE_SEGMENT_ELEMENT' : [ 0x8, {
'Buffer' : [ 0x0, ['pointer64', ['void']]],
'Alignment' : [ 0x0, ['unsigned long long']],
} ],
'_RELATIVE_SYMLINK_INFO' : [ 0x20, {
'ExposedNamespaceLength' : [ 0x0, ['unsigned short']],
'Flags' : [ 0x2, ['unsigned short']],
'DeviceNameLength' : [ 0x4, ['unsigned short']],
'Reserved' : [ 0x6, ['unsigned short']],
'InteriorMountPoint' : [ 0x8, ['pointer64', ['_RELATIVE_SYMLINK_INFO']]],
'OpenedName' : [ 0x10, ['_UNICODE_STRING']],
} ],
'_ECP_LIST' : [ 0x18, {
'Signature' : [ 0x0, ['unsigned long']],
'Flags' : [ 0x4, ['unsigned long']],
'EcpList' : [ 0x8, ['_LIST_ENTRY']],
} ],
'_IOP_FILE_OBJECT_EXTENSION' : [ 0x48, {
'FoExtFlags' : [ 0x0, ['unsigned long']],
'FoExtPerTypeExtension' : [ 0x8, ['array', 7, ['pointer64', ['void']]]],
'FoIoPriorityHint' : [ 0x40, ['Enumeration', dict(target = 'long', choices = {0: 'IopIoPriorityNotSet', 1: 'IopIoPriorityVeryLow', 2: 'IopIoPriorityLow', 3: 'IopIoPriorityNormal', 4: 'IopIoPriorityHigh', 5: 'IopIoPriorityCritical', 6: 'MaxIopIoPriorityTypes'})]],
} ],
'_OPEN_PACKET' : [ 0xb8, {
'Type' : [ 0x0, ['short']],
'Size' : [ 0x2, ['short']],
'FileObject' : [ 0x8, ['pointer64', ['_FILE_OBJECT']]],
'FinalStatus' : [ 0x10, ['long']],
'Information' : [ 0x18, ['unsigned long long']],
'ParseCheck' : [ 0x20, ['unsigned long']],
'RelatedFileObject' : [ 0x28, ['pointer64', ['_FILE_OBJECT']]],
'OriginalAttributes' : [ 0x30, ['pointer64', ['_OBJECT_ATTRIBUTES']]],
'AllocationSize' : [ 0x38, ['_LARGE_INTEGER']],
'CreateOptions' : [ 0x40, ['unsigned long']],
'FileAttributes' : [ 0x44, ['unsigned short']],
'ShareAccess' : [ 0x46, ['unsigned short']],
'EaBuffer' : [ 0x48, ['pointer64', ['void']]],
'EaLength' : [ 0x50, ['unsigned long']],
'Options' : [ 0x54, ['unsigned long']],
'Disposition' : [ 0x58, ['unsigned long']],
'BasicInformation' : [ 0x60, ['pointer64', ['_FILE_BASIC_INFORMATION']]],
'NetworkInformation' : [ 0x68, ['pointer64', ['_FILE_NETWORK_OPEN_INFORMATION']]],
'CreateFileType' : [ 0x70, ['Enumeration', dict(target = 'long', choices = {0: 'CreateFileTypeNone', 1: 'CreateFileTypeNamedPipe', 2: 'CreateFileTypeMailslot'})]],
'MailslotOrPipeParameters' : [ 0x78, ['pointer64', ['void']]],
'Override' : [ 0x80, ['unsigned char']],
'QueryOnly' : [ 0x81, ['unsigned char']],
'DeleteOnly' : [ 0x82, ['unsigned char']],
'FullAttributes' : [ 0x83, ['unsigned char']],
'LocalFileObject' : [ 0x88, ['pointer64', ['_DUMMY_FILE_OBJECT']]],
'InternalFlags' : [ 0x90, ['unsigned long']],
'DriverCreateContext' : [ 0x98, ['_IO_DRIVER_CREATE_CONTEXT']],
} ],
'_ETW_SYSTEMTIME' : [ 0x10, {
'Year' : [ 0x0, ['unsigned short']],
'Month' : [ 0x2, ['unsigned short']],
'DayOfWeek' : [ 0x4, ['unsigned short']],
'Day' : [ 0x6, ['unsigned short']],
'Hour' : [ 0x8, ['unsigned short']],
'Minute' : [ 0xa, ['unsigned short']],
'Second' : [ 0xc, ['unsigned short']],
'Milliseconds' : [ 0xe, ['unsigned short']],
} ],
'_TIME_FIELDS' : [ 0x10, {
'Year' : [ 0x0, ['short']],
'Month' : [ 0x2, ['short']],
'Day' : [ 0x4, ['short']],
'Hour' : [ 0x6, ['short']],
'Minute' : [ 0x8, ['short']],
'Second' : [ 0xa, ['short']],
'Milliseconds' : [ 0xc, ['short']],
'Weekday' : [ 0xe, ['short']],
} ],
'_WMI_LOGGER_CONTEXT' : [ 0x330, {
'LoggerId' : [ 0x0, ['unsigned long']],
'BufferSize' : [ 0x4, ['unsigned long']],
'MaximumEventSize' : [ 0x8, ['unsigned long']],
'CollectionOn' : [ 0xc, ['long']],
'LoggerMode' : [ 0x10, ['unsigned long']],
'AcceptNewEvents' : [ 0x14, ['long']],
'GetCpuClock' : [ 0x18, ['pointer64', ['void']]],
'StartTime' : [ 0x20, ['_LARGE_INTEGER']],
'LogFileHandle' : [ 0x28, ['pointer64', ['void']]],
'LoggerThread' : [ 0x30, ['pointer64', ['_ETHREAD']]],
'LoggerStatus' : [ 0x38, ['long']],
'NBQHead' : [ 0x40, ['pointer64', ['void']]],
'OverflowNBQHead' : [ 0x48, ['pointer64', ['void']]],
'QueueBlockFreeList' : [ 0x50, ['_SLIST_HEADER']],
'GlobalList' : [ 0x60, ['_LIST_ENTRY']],
'BatchedBufferList' : [ 0x70, ['pointer64', ['_WMI_BUFFER_HEADER']]],
'CurrentBuffer' : [ 0x70, ['_EX_FAST_REF']],
'LoggerName' : [ 0x78, ['_UNICODE_STRING']],
'LogFileName' : [ 0x88, ['_UNICODE_STRING']],
'LogFilePattern' : [ 0x98, ['_UNICODE_STRING']],
'NewLogFileName' : [ 0xa8, ['_UNICODE_STRING']],
'ClockType' : [ 0xb8, ['unsigned long']],
'MaximumFileSize' : [ 0xbc, ['unsigned long']],
'LastFlushedBuffer' : [ 0xc0, ['unsigned long']],
'FlushTimer' : [ 0xc4, ['unsigned long']],
'FlushThreshold' : [ 0xc8, ['unsigned long']],
'ByteOffset' : [ 0xd0, ['_LARGE_INTEGER']],
'MinimumBuffers' : [ 0xd8, ['unsigned long']],
'BuffersAvailable' : [ 0xdc, ['long']],
'NumberOfBuffers' : [ 0xe0, ['long']],
'MaximumBuffers' : [ 0xe4, ['unsigned long']],
'EventsLost' : [ 0xe8, ['unsigned long']],
'BuffersWritten' : [ 0xec, ['unsigned long']],
'LogBuffersLost' : [ 0xf0, ['unsigned long']],
'RealTimeBuffersDelivered' : [ 0xf4, ['unsigned long']],
'RealTimeBuffersLost' : [ 0xf8, ['unsigned long']],
'SequencePtr' : [ 0x100, ['pointer64', ['long']]],
'LocalSequence' : [ 0x108, ['unsigned long']],
'InstanceGuid' : [ 0x10c, ['_GUID']],
'FileCounter' : [ 0x11c, ['long']],
'BufferCallback' : [ 0x120, ['pointer64', ['void']]],
'PoolType' : [ 0x128, ['Enumeration', dict(target = 'long', choices = {0: 'NonPagedPool', 1: 'PagedPool', 2: 'NonPagedPoolMustSucceed', 3: 'DontUseThisType', 4: 'NonPagedPoolCacheAligned', 5: 'PagedPoolCacheAligned', 6: 'NonPagedPoolCacheAlignedMustS', 7: 'MaxPoolType', 34: 'NonPagedPoolMustSucceedSession', 35: 'DontUseThisTypeSession', 32: 'NonPagedPoolSession', 36: 'NonPagedPoolCacheAlignedSession', 33: 'PagedPoolSession', 38: 'NonPagedPoolCacheAlignedMustSSession', 37: 'PagedPoolCacheAlignedSession'})]],
'ReferenceTime' : [ 0x130, ['_ETW_REF_CLOCK']],
'Consumers' : [ 0x140, ['_LIST_ENTRY']],
'NumConsumers' : [ 0x150, ['unsigned long']],
'TransitionConsumer' : [ 0x158, ['pointer64', ['_ETW_REALTIME_CONSUMER']]],
'RealtimeLogfileHandle' : [ 0x160, ['pointer64', ['void']]],
'RealtimeLogfileName' : [ 0x168, ['_UNICODE_STRING']],
'RealtimeWriteOffset' : [ 0x178, ['_LARGE_INTEGER']],
'RealtimeReadOffset' : [ 0x180, ['_LARGE_INTEGER']],
'RealtimeLogfileSize' : [ 0x188, ['_LARGE_INTEGER']],
'RealtimeLogfileUsage' : [ 0x190, ['unsigned long long']],
'RealtimeMaximumFileSize' : [ 0x198, ['unsigned long long']],
'RealtimeBuffersSaved' : [ 0x1a0, ['unsigned long']],
'RealtimeReferenceTime' : [ 0x1a8, ['_ETW_REF_CLOCK']],
'NewRTEventsLost' : [ 0x1b8, ['Enumeration', dict(target = 'long', choices = {0: 'EtwRtEventNoLoss', 1: 'EtwRtEventLost', 2: 'EtwRtBufferLost', 3: 'EtwRtBackupLost', 4: 'EtwRtEventLossMax'})]],
'LoggerEvent' : [ 0x1c0, ['_KEVENT']],
'FlushEvent' : [ 0x1d8, ['_KEVENT']],
'FlushTimeOutTimer' : [ 0x1f0, ['_KTIMER']],
'FlushDpc' : [ 0x230, ['_KDPC']],
'LoggerMutex' : [ 0x270, ['_KMUTANT']],
'LoggerLock' : [ 0x2a8, ['_EX_PUSH_LOCK']],
'BufferListSpinLock' : [ 0x2b0, ['unsigned long long']],
'BufferListPushLock' : [ 0x2b0, ['_EX_PUSH_LOCK']],
'ClientSecurityContext' : [ 0x2b8, ['_SECURITY_CLIENT_CONTEXT']],
'SecurityDescriptor' : [ 0x300, ['_EX_FAST_REF']],
'BufferSequenceNumber' : [ 0x308, ['long long']],
'Flags' : [ 0x310, ['unsigned long']],
'Persistent' : [ 0x310, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'AutoLogger' : [ 0x310, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'FsReady' : [ 0x310, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long')]],
'RealTime' : [ 0x310, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned long')]],
'Wow' : [ 0x310, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned long')]],
'KernelTrace' : [ 0x310, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned long')]],
'NoMoreEnable' : [ 0x310, ['BitField', dict(start_bit = 6, end_bit = 7, native_type='unsigned long')]],
'StackTracing' : [ 0x310, ['BitField', dict(start_bit = 7, end_bit = 8, native_type='unsigned long')]],
'ErrorLogged' : [ 0x310, ['BitField', dict(start_bit = 8, end_bit = 9, native_type='unsigned long')]],
'RealtimeLoggerContextFreed' : [ 0x310, ['BitField', dict(start_bit = 9, end_bit = 10, native_type='unsigned long')]],
'RequestFlag' : [ 0x314, ['unsigned long']],
'RequestNewFie' : [ 0x314, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'RequestUpdateFile' : [ 0x314, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'RequestFlush' : [ 0x314, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long')]],
'RequestDisableRealtime' : [ 0x314, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned long')]],
'RequestDisconnectConsumer' : [ 0x314, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned long')]],
'RequestConnectConsumer' : [ 0x314, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned long')]],
'HookIdMap' : [ 0x318, ['_RTL_BITMAP']],
} ],
'_ETW_LOGGER_HANDLE' : [ 0x1, {
'DereferenceAndLeave' : [ 0x0, ['unsigned char']],
} ],
'_ETW_BUFFER_HANDLE' : [ 0x10, {
'TraceBuffer' : [ 0x0, ['pointer64', ['_WMI_BUFFER_HEADER']]],
'BufferFastRef' : [ 0x8, ['pointer64', ['_EX_FAST_REF']]],
} ],
'_SYSTEM_TRACE_HEADER' : [ 0x20, {
'Marker' : [ 0x0, ['unsigned long']],
'Version' : [ 0x0, ['unsigned short']],
'HeaderType' : [ 0x2, ['unsigned char']],
'Flags' : [ 0x3, ['unsigned char']],
'Header' : [ 0x4, ['unsigned long']],
'Packet' : [ 0x4, ['_WMI_TRACE_PACKET']],
'ThreadId' : [ 0x8, ['unsigned long']],
'ProcessId' : [ 0xc, ['unsigned long']],
'SystemTime' : [ 0x10, ['_LARGE_INTEGER']],
'KernelTime' : [ 0x18, ['unsigned long']],
'UserTime' : [ 0x1c, ['unsigned long']],
} ],
'_PERFINFO_TRACE_HEADER' : [ 0x18, {
'Marker' : [ 0x0, ['unsigned long']],
'Version' : [ 0x0, ['unsigned short']],
'HeaderType' : [ 0x2, ['unsigned char']],
'Flags' : [ 0x3, ['unsigned char']],
'Header' : [ 0x4, ['unsigned long']],
'Packet' : [ 0x4, ['_WMI_TRACE_PACKET']],
'TS' : [ 0x8, ['unsigned long long']],
'SystemTime' : [ 0x8, ['_LARGE_INTEGER']],
'Data' : [ 0x10, ['array', 1, ['unsigned char']]],
} ],
'_NBQUEUE_BLOCK' : [ 0x20, {
'SListEntry' : [ 0x0, ['_SLIST_ENTRY']],
'Next' : [ 0x10, ['unsigned long long']],
'Data' : [ 0x18, ['unsigned long long']],
} ],
'_KMUTANT' : [ 0x38, {
'Header' : [ 0x0, ['_DISPATCHER_HEADER']],
'MutantListEntry' : [ 0x18, ['_LIST_ENTRY']],
'OwnerThread' : [ 0x28, ['pointer64', ['_KTHREAD']]],
'Abandoned' : [ 0x30, ['unsigned char']],
'ApcDisable' : [ 0x31, ['unsigned char']],
} ],
'_ETW_LAST_ENABLE_INFO' : [ 0x10, {
'EnableFlags' : [ 0x0, ['_LARGE_INTEGER']],
'LoggerId' : [ 0x8, ['unsigned short']],
'Level' : [ 0xa, ['unsigned char']],
'Enabled' : [ 0xb, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned char')]],
'InternalFlag' : [ 0xb, ['BitField', dict(start_bit = 1, end_bit = 8, native_type='unsigned char')]],
} ],
'_TRACE_ENABLE_CONTEXT' : [ 0x8, {
'LoggerId' : [ 0x0, ['unsigned short']],
'Level' : [ 0x2, ['unsigned char']],
'InternalFlag' : [ 0x3, ['unsigned char']],
'EnableFlags' : [ 0x4, ['unsigned long']],
} ],
'_TRACE_ENABLE_CONTEXT_EX' : [ 0x10, {
'LoggerId' : [ 0x0, ['unsigned short']],
'Level' : [ 0x2, ['unsigned char']],
'InternalFlag' : [ 0x3, ['unsigned char']],
'EnableFlags' : [ 0x4, ['unsigned long']],
'EnableFlagsHigh' : [ 0x8, ['unsigned long']],
'Reserved' : [ 0xc, ['unsigned long']],
} ],
'_ETW_GUID_ENTRY' : [ 0x1b0, {
'GuidList' : [ 0x0, ['_LIST_ENTRY']],
'RefCount' : [ 0x10, ['long']],
'Guid' : [ 0x14, ['_GUID']],
'RegListHead' : [ 0x28, ['_LIST_ENTRY']],
'SecurityDescriptor' : [ 0x38, ['pointer64', ['void']]],
'LastEnable' : [ 0x40, ['_ETW_LAST_ENABLE_INFO']],
'MatchId' : [ 0x40, ['unsigned long long']],
'ProviderEnableInfo' : [ 0x50, ['_TRACE_ENABLE_INFO']],
'EnableInfo' : [ 0x70, ['array', 8, ['_TRACE_ENABLE_INFO']]],
'FilterData' : [ 0x170, ['array', 8, ['pointer64', ['_EVENT_FILTER_HEADER']]]],
} ],
'_TRACE_ENABLE_INFO' : [ 0x20, {
'IsEnabled' : [ 0x0, ['unsigned long']],
'Level' : [ 0x4, ['unsigned char']],
'Reserved1' : [ 0x5, ['unsigned char']],
'LoggerId' : [ 0x6, ['unsigned short']],
'EnableProperty' : [ 0x8, ['unsigned long']],
'Reserved2' : [ 0xc, ['unsigned long']],
'MatchAnyKeyword' : [ 0x10, ['unsigned long long']],
'MatchAllKeyword' : [ 0x18, ['unsigned long long']],
} ],
'_LUID_AND_ATTRIBUTES' : [ 0xc, {
'Luid' : [ 0x0, ['_LUID']],
'Attributes' : [ 0x8, ['unsigned long']],
} ],
'_TOKEN' : [ 0x310, {
'TokenSource' : [ 0x0, ['_TOKEN_SOURCE']],
'TokenId' : [ 0x10, ['_LUID']],
'AuthenticationId' : [ 0x18, ['_LUID']],
'ParentTokenId' : [ 0x20, ['_LUID']],
'ExpirationTime' : [ 0x28, ['_LARGE_INTEGER']],
'TokenLock' : [ 0x30, ['pointer64', ['_ERESOURCE']]],
'ModifiedId' : [ 0x38, ['_LUID']],
'Privileges' : [ 0x40, ['_SEP_TOKEN_PRIVILEGES']],
'AuditPolicy' : [ 0x58, ['_SEP_AUDIT_POLICY']],
'SessionId' : [ 0x74, ['unsigned long']],
'UserAndGroupCount' : [ 0x78, ['unsigned long']],
'RestrictedSidCount' : [ 0x7c, ['unsigned long']],
'VariableLength' : [ 0x80, ['unsigned long']],
'DynamicCharged' : [ 0x84, ['unsigned long']],
'DynamicAvailable' : [ 0x88, ['unsigned long']],
'DefaultOwnerIndex' : [ 0x8c, ['unsigned long']],
'UserAndGroups' : [ 0x90, ['pointer64', ['_SID_AND_ATTRIBUTES']]],
'RestrictedSids' : [ 0x98, ['pointer64', ['_SID_AND_ATTRIBUTES']]],
'PrimaryGroup' : [ 0xa0, ['pointer64', ['void']]],
'DynamicPart' : [ 0xa8, ['pointer64', ['unsigned long']]],
'DefaultDacl' : [ 0xb0, ['pointer64', ['_ACL']]],
'TokenType' : [ 0xb8, ['Enumeration', dict(target = 'long', choices = {1: 'TokenPrimary', 2: 'TokenImpersonation'})]],
'ImpersonationLevel' : [ 0xbc, ['Enumeration', dict(target = 'long', choices = {0: 'SecurityAnonymous', 1: 'SecurityIdentification', 2: 'SecurityImpersonation', 3: 'SecurityDelegation'})]],
'TokenFlags' : [ 0xc0, ['unsigned long']],
'TokenInUse' : [ 0xc4, ['unsigned char']],
'IntegrityLevelIndex' : [ 0xc8, ['unsigned long']],
'MandatoryPolicy' : [ 0xcc, ['unsigned long']],
'LogonSession' : [ 0xd0, ['pointer64', ['_SEP_LOGON_SESSION_REFERENCES']]],
'OriginatingLogonSession' : [ 0xd8, ['_LUID']],
'SidHash' : [ 0xe0, ['_SID_AND_ATTRIBUTES_HASH']],
'RestrictedSidHash' : [ 0x1f0, ['_SID_AND_ATTRIBUTES_HASH']],
'pSecurityAttributes' : [ 0x300, ['pointer64', ['_AUTHZBASEP_SECURITY_ATTRIBUTES_INFORMATION']]],
'VariablePart' : [ 0x308, ['unsigned long long']],
} ],
'_SEP_LOGON_SESSION_REFERENCES' : [ 0x50, {
'Next' : [ 0x0, ['pointer64', ['_SEP_LOGON_SESSION_REFERENCES']]],
'LogonId' : [ 0x8, ['_LUID']],
'BuddyLogonId' : [ 0x10, ['_LUID']],
'ReferenceCount' : [ 0x18, ['unsigned long']],
'Flags' : [ 0x1c, ['unsigned long']],
'pDeviceMap' : [ 0x20, ['pointer64', ['_DEVICE_MAP']]],
'Token' : [ 0x28, ['pointer64', ['void']]],
'AccountName' : [ 0x30, ['_UNICODE_STRING']],
'AuthorityName' : [ 0x40, ['_UNICODE_STRING']],
} ],
'_OBJECT_HEADER' : [ 0x38, {
'PointerCount' : [ 0x0, ['long long']],
'HandleCount' : [ 0x8, ['long long']],
'NextToFree' : [ 0x8, ['pointer64', ['void']]],
'Lock' : [ 0x10, ['_EX_PUSH_LOCK']],
'TypeIndex' : [ 0x18, ['unsigned char']],
'TraceFlags' : [ 0x19, ['unsigned char']],
'InfoMask' : [ 0x1a, ['unsigned char']],
'Flags' : [ 0x1b, ['unsigned char']],
'ObjectCreateInfo' : [ 0x20, ['pointer64', ['_OBJECT_CREATE_INFORMATION']]],
'QuotaBlockCharged' : [ 0x20, ['pointer64', ['void']]],
'SecurityDescriptor' : [ 0x28, ['pointer64', ['void']]],
'Body' : [ 0x30, ['_QUAD']],
} ],
'_OBJECT_HEADER_QUOTA_INFO' : [ 0x20, {
'PagedPoolCharge' : [ 0x0, ['unsigned long']],
'NonPagedPoolCharge' : [ 0x4, ['unsigned long']],
'SecurityDescriptorCharge' : [ 0x8, ['unsigned long']],
'SecurityDescriptorQuotaBlock' : [ 0x10, ['pointer64', ['void']]],
'Reserved' : [ 0x18, ['unsigned long long']],
} ],
'_OBJECT_HEADER_PROCESS_INFO' : [ 0x10, {
'ExclusiveProcess' : [ 0x0, ['pointer64', ['_EPROCESS']]],
'Reserved' : [ 0x8, ['unsigned long long']],
} ],
'_OBJECT_HEADER_HANDLE_INFO' : [ 0x10, {
'HandleCountDataBase' : [ 0x0, ['pointer64', ['_OBJECT_HANDLE_COUNT_DATABASE']]],
'SingleEntry' : [ 0x0, ['_OBJECT_HANDLE_COUNT_ENTRY']],
} ],
'_OBJECT_HEADER_NAME_INFO' : [ 0x20, {
'Directory' : [ 0x0, ['pointer64', ['_OBJECT_DIRECTORY']]],
'Name' : [ 0x8, ['_UNICODE_STRING']],
'ReferenceCount' : [ 0x18, ['long']],
} ],
'_OBJECT_HEADER_CREATOR_INFO' : [ 0x20, {
'TypeList' : [ 0x0, ['_LIST_ENTRY']],
'CreatorUniqueProcess' : [ 0x10, ['pointer64', ['void']]],
'CreatorBackTraceIndex' : [ 0x18, ['unsigned short']],
'Reserved' : [ 0x1a, ['unsigned short']],
} ],
'_OBP_LOOKUP_CONTEXT' : [ 0x20, {
'Directory' : [ 0x0, ['pointer64', ['_OBJECT_DIRECTORY']]],
'Object' : [ 0x8, ['pointer64', ['void']]],
'HashValue' : [ 0x10, ['unsigned long']],
'HashIndex' : [ 0x14, ['unsigned short']],
'DirectoryLocked' : [ 0x16, ['unsigned char']],
'LockedExclusive' : [ 0x17, ['unsigned char']],
'LockStateSignature' : [ 0x18, ['unsigned long']],
} ],
'_OBJECT_DIRECTORY' : [ 0x150, {
'HashBuckets' : [ 0x0, ['array', 37, ['pointer64', ['_OBJECT_DIRECTORY_ENTRY']]]],
'Lock' : [ 0x128, ['_EX_PUSH_LOCK']],
'DeviceMap' : [ 0x130, ['pointer64', ['_DEVICE_MAP']]],
'SessionId' : [ 0x138, ['unsigned long']],
'NamespaceEntry' : [ 0x140, ['pointer64', ['void']]],
'Flags' : [ 0x148, ['unsigned long']],
} ],
'_PS_CLIENT_SECURITY_CONTEXT' : [ 0x8, {
'ImpersonationData' : [ 0x0, ['unsigned long long']],
'ImpersonationToken' : [ 0x0, ['pointer64', ['void']]],
'ImpersonationLevel' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 2, native_type='unsigned long long')]],
'EffectiveOnly' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long long')]],
} ],
'_DBGKD_ANY_CONTROL_SET' : [ 0x1c, {
'X86ControlSet' : [ 0x0, ['_X86_DBGKD_CONTROL_SET']],
'AlphaControlSet' : [ 0x0, ['unsigned long']],
'IA64ControlSet' : [ 0x0, ['_IA64_DBGKD_CONTROL_SET']],
'Amd64ControlSet' : [ 0x0, ['_AMD64_DBGKD_CONTROL_SET']],
'ArmControlSet' : [ 0x0, ['_ARM_DBGKD_CONTROL_SET']],
'PpcControlSet' : [ 0x0, ['_PPC_DBGKD_CONTROL_SET']],
} ],
'_MMVAD_FLAGS3' : [ 0x8, {
'PreferredNode' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 6, native_type='unsigned long long')]],
'Teb' : [ 0x0, ['BitField', dict(start_bit = 6, end_bit = 7, native_type='unsigned long long')]],
'Spare' : [ 0x0, ['BitField', dict(start_bit = 7, end_bit = 8, native_type='unsigned long long')]],
'SequentialAccess' : [ 0x0, ['BitField', dict(start_bit = 8, end_bit = 9, native_type='unsigned long long')]],
'LastSequentialTrim' : [ 0x0, ['BitField', dict(start_bit = 9, end_bit = 24, native_type='unsigned long long')]],
'Spare2' : [ 0x0, ['BitField', dict(start_bit = 24, end_bit = 32, native_type='unsigned long long')]],
'LargePageCreating' : [ 0x0, ['BitField', dict(start_bit = 32, end_bit = 33, native_type='unsigned long long')]],
'Spare3' : [ 0x0, ['BitField', dict(start_bit = 33, end_bit = 64, native_type='unsigned long long')]],
} ],
'_MI_VERIFIER_POOL_HEADER' : [ 0x8, {
'VerifierPoolEntry' : [ 0x0, ['pointer64', ['_VI_POOL_ENTRY']]],
} ],
'_HBASE_BLOCK' : [ 0x1000, {
'Signature' : [ 0x0, ['unsigned long']],
'Sequence1' : [ 0x4, ['unsigned long']],
'Sequence2' : [ 0x8, ['unsigned long']],
'TimeStamp' : [ 0xc, ['_LARGE_INTEGER']],
'Major' : [ 0x14, ['unsigned long']],
'Minor' : [ 0x18, ['unsigned long']],
'Type' : [ 0x1c, ['unsigned long']],
'Format' : [ 0x20, ['unsigned long']],
'RootCell' : [ 0x24, ['unsigned long']],
'Length' : [ 0x28, ['unsigned long']],
'Cluster' : [ 0x2c, ['unsigned long']],
'FileName' : [ 0x30, ['array', 64, ['unsigned char']]],
'RmId' : [ 0x70, ['_GUID']],
'LogId' : [ 0x80, ['_GUID']],
'Flags' : [ 0x90, ['unsigned long']],
'TmId' : [ 0x94, ['_GUID']],
'GuidSignature' : [ 0xa4, ['unsigned long']],
'Reserved1' : [ 0xa8, ['array', 85, ['unsigned long']]],
'CheckSum' : [ 0x1fc, ['unsigned long']],
'Reserved2' : [ 0x200, ['array', 882, ['unsigned long']]],
'ThawTmId' : [ 0xfc8, ['_GUID']],
'ThawRmId' : [ 0xfd8, ['_GUID']],
'ThawLogId' : [ 0xfe8, ['_GUID']],
'BootType' : [ 0xff8, ['unsigned long']],
'BootRecover' : [ 0xffc, ['unsigned long']],
} ],
'_ERESOURCE' : [ 0x68, {
'SystemResourcesList' : [ 0x0, ['_LIST_ENTRY']],
'OwnerTable' : [ 0x10, ['pointer64', ['_OWNER_ENTRY']]],
'ActiveCount' : [ 0x18, ['short']],
'Flag' : [ 0x1a, ['unsigned short']],
'SharedWaiters' : [ 0x20, ['pointer64', ['_KSEMAPHORE']]],
'ExclusiveWaiters' : [ 0x28, ['pointer64', ['_KEVENT']]],
'OwnerEntry' : [ 0x30, ['_OWNER_ENTRY']],
'ActiveEntries' : [ 0x40, ['unsigned long']],
'ContentionCount' : [ 0x44, ['unsigned long']],
'NumberOfSharedWaiters' : [ 0x48, ['unsigned long']],
'NumberOfExclusiveWaiters' : [ 0x4c, ['unsigned long']],
'Reserved2' : [ 0x50, ['pointer64', ['void']]],
'Address' : [ 0x58, ['pointer64', ['void']]],
'CreatorBackTraceIndex' : [ 0x58, ['unsigned long long']],
'SpinLock' : [ 0x60, ['unsigned long long']],
} ],
'_ARM_DBGKD_CONTROL_SET' : [ 0xc, {
'Continue' : [ 0x0, ['unsigned long']],
'CurrentSymbolStart' : [ 0x4, ['unsigned long']],
'CurrentSymbolEnd' : [ 0x8, ['unsigned long']],
} ],
'_LPCP_MESSAGE' : [ 0x50, {
'Entry' : [ 0x0, ['_LIST_ENTRY']],
'FreeEntry' : [ 0x0, ['_SINGLE_LIST_ENTRY']],
'Reserved0' : [ 0x8, ['unsigned long']],
'SenderPort' : [ 0x10, ['pointer64', ['void']]],
'RepliedToThread' : [ 0x18, ['pointer64', ['_ETHREAD']]],
'PortContext' : [ 0x20, ['pointer64', ['void']]],
'Request' : [ 0x28, ['_PORT_MESSAGE']],
} ],
'_HARDWARE_PTE' : [ 0x8, {
'Valid' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long long')]],
'Write' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long long')]],
'Owner' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long long')]],
'WriteThrough' : [ 0x0, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned long long')]],
'CacheDisable' : [ 0x0, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned long long')]],
'Accessed' : [ 0x0, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned long long')]],
'Dirty' : [ 0x0, ['BitField', dict(start_bit = 6, end_bit = 7, native_type='unsigned long long')]],
'LargePage' : [ 0x0, ['BitField', dict(start_bit = 7, end_bit = 8, native_type='unsigned long long')]],
'Global' : [ 0x0, ['BitField', dict(start_bit = 8, end_bit = 9, native_type='unsigned long long')]],
'CopyOnWrite' : [ 0x0, ['BitField', dict(start_bit = 9, end_bit = 10, native_type='unsigned long long')]],
'Prototype' : [ 0x0, ['BitField', dict(start_bit = 10, end_bit = 11, native_type='unsigned long long')]],
'reserved0' : [ 0x0, ['BitField', dict(start_bit = 11, end_bit = 12, native_type='unsigned long long')]],
'PageFrameNumber' : [ 0x0, ['BitField', dict(start_bit = 12, end_bit = 48, native_type='unsigned long long')]],
'reserved1' : [ 0x0, ['BitField', dict(start_bit = 48, end_bit = 52, native_type='unsigned long long')]],
'SoftwareWsIndex' : [ 0x0, ['BitField', dict(start_bit = 52, end_bit = 63, native_type='unsigned long long')]],
'NoExecute' : [ 0x0, ['BitField', dict(start_bit = 63, end_bit = 64, native_type='unsigned long long')]],
} ],
'_DUAL' : [ 0x278, {
'Length' : [ 0x0, ['unsigned long']],
'Map' : [ 0x8, ['pointer64', ['_HMAP_DIRECTORY']]],
'SmallDir' : [ 0x10, ['pointer64', ['_HMAP_TABLE']]],
'Guard' : [ 0x18, ['unsigned long']],
'FreeDisplay' : [ 0x20, ['array', 24, ['_FREE_DISPLAY']]],
'FreeSummary' : [ 0x260, ['unsigned long']],
'FreeBins' : [ 0x268, ['_LIST_ENTRY']],
} ],
'_ALPC_PORT_ATTRIBUTES' : [ 0x48, {
'Flags' : [ 0x0, ['unsigned long']],
'SecurityQos' : [ 0x4, ['_SECURITY_QUALITY_OF_SERVICE']],
'MaxMessageLength' : [ 0x10, ['unsigned long long']],
'MemoryBandwidth' : [ 0x18, ['unsigned long long']],
'MaxPoolUsage' : [ 0x20, ['unsigned long long']],
'MaxSectionSize' : [ 0x28, ['unsigned long long']],
'MaxViewSize' : [ 0x30, ['unsigned long long']],
'MaxTotalSectionSize' : [ 0x38, ['unsigned long long']],
'DupObjectTypes' : [ 0x40, ['unsigned long']],
'Reserved' : [ 0x44, ['unsigned long']],
} ],
'_CM_INDEX_HINT_BLOCK' : [ 0x8, {
'Count' : [ 0x0, ['unsigned long']],
'HashKey' : [ 0x4, ['array', 1, ['unsigned long']]],
} ],
'_KQUEUE' : [ 0x40, {
'Header' : [ 0x0, ['_DISPATCHER_HEADER']],
'EntryListHead' : [ 0x18, ['_LIST_ENTRY']],
'CurrentCount' : [ 0x28, ['unsigned long']],
'MaximumCount' : [ 0x2c, ['unsigned long']],
'ThreadListHead' : [ 0x30, ['_LIST_ENTRY']],
} ],
'_KSTACK_COUNT' : [ 0x4, {
'Value' : [ 0x0, ['long']],
'State' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 3, native_type='unsigned long')]],
'StackCount' : [ 0x0, ['BitField', dict(start_bit = 3, end_bit = 32, native_type='unsigned long')]],
} ],
'_DISPATCHER_HEADER' : [ 0x18, {
'Type' : [ 0x0, ['unsigned char']],
'TimerControlFlags' : [ 0x1, ['unsigned char']],
'Absolute' : [ 0x1, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned char')]],
'Coalescable' : [ 0x1, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned char')]],
'KeepShifting' : [ 0x1, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned char')]],
'EncodedTolerableDelay' : [ 0x1, ['BitField', dict(start_bit = 3, end_bit = 8, native_type='unsigned char')]],
'Abandoned' : [ 0x1, ['unsigned char']],
'Signalling' : [ 0x1, ['unsigned char']],
'ThreadControlFlags' : [ 0x2, ['unsigned char']],
'CpuThrottled' : [ 0x2, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned char')]],
'CycleProfiling' : [ 0x2, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned char')]],
'CounterProfiling' : [ 0x2, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned char')]],
'Reserved' : [ 0x2, ['BitField', dict(start_bit = 3, end_bit = 8, native_type='unsigned char')]],
'Hand' : [ 0x2, ['unsigned char']],
'Size' : [ 0x2, ['unsigned char']],
'TimerMiscFlags' : [ 0x3, ['unsigned char']],
'Index' : [ 0x3, ['BitField', dict(start_bit = 0, end_bit = 6, native_type='unsigned char')]],
'Inserted' : [ 0x3, ['BitField', dict(start_bit = 6, end_bit = 7, native_type='unsigned char')]],
'Expired' : [ 0x3, ['BitField', dict(start_bit = 7, end_bit = 8, native_type='unsigned char')]],
'DebugActive' : [ 0x3, ['unsigned char']],
'ActiveDR7' : [ 0x3, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned char')]],
'Instrumented' : [ 0x3, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned char')]],
'Reserved2' : [ 0x3, ['BitField', dict(start_bit = 2, end_bit = 6, native_type='unsigned char')]],
'UmsScheduled' : [ 0x3, ['BitField', dict(start_bit = 6, end_bit = 7, native_type='unsigned char')]],
'UmsPrimary' : [ 0x3, ['BitField', dict(start_bit = 7, end_bit = 8, native_type='unsigned char')]],
'DpcActive' : [ 0x3, ['unsigned char']],
'Lock' : [ 0x0, ['long']],
'SignalState' : [ 0x4, ['long']],
'WaitListHead' : [ 0x8, ['_LIST_ENTRY']],
} ],
'_VI_POOL_ENTRY' : [ 0x20, {
'PageHeader' : [ 0x0, ['_VI_POOL_PAGE_HEADER']],
'InUse' : [ 0x0, ['_VI_POOL_ENTRY_INUSE']],
'NextFree' : [ 0x0, ['pointer64', ['_SLIST_ENTRY']]],
} ],
'_MM_PAGE_ACCESS_INFO' : [ 0x8, {
'Flags' : [ 0x0, ['_MM_PAGE_ACCESS_INFO_FLAGS']],
'FileOffset' : [ 0x0, ['unsigned long long']],
'VirtualAddress' : [ 0x0, ['pointer64', ['void']]],
'PointerProtoPte' : [ 0x0, ['pointer64', ['void']]],
} ],
'_HEAP_COUNTERS' : [ 0x70, {
'TotalMemoryReserved' : [ 0x0, ['unsigned long long']],
'TotalMemoryCommitted' : [ 0x8, ['unsigned long long']],
'TotalMemoryLargeUCR' : [ 0x10, ['unsigned long long']],
'TotalSizeInVirtualBlocks' : [ 0x18, ['unsigned long long']],
'TotalSegments' : [ 0x20, ['unsigned long']],
'TotalUCRs' : [ 0x24, ['unsigned long']],
'CommittOps' : [ 0x28, ['unsigned long']],
'DeCommitOps' : [ 0x2c, ['unsigned long']],
'LockAcquires' : [ 0x30, ['unsigned long']],
'LockCollisions' : [ 0x34, ['unsigned long']],
'CommitRate' : [ 0x38, ['unsigned long']],
'DecommittRate' : [ 0x3c, ['unsigned long']],
'CommitFailures' : [ 0x40, ['unsigned long']],
'InBlockCommitFailures' : [ 0x44, ['unsigned long']],
'CompactHeapCalls' : [ 0x48, ['unsigned long']],
'CompactedUCRs' : [ 0x4c, ['unsigned long']],
'AllocAndFreeOps' : [ 0x50, ['unsigned long']],
'InBlockDeccommits' : [ 0x54, ['unsigned long']],
'InBlockDeccomitSize' : [ 0x58, ['unsigned long long']],
'HighWatermarkSize' : [ 0x60, ['unsigned long long']],
'LastPolledSize' : [ 0x68, ['unsigned long long']],
} ],
'_CM_KEY_HASH' : [ 0x20, {
'ConvKey' : [ 0x0, ['unsigned long']],
'NextHash' : [ 0x8, ['pointer64', ['_CM_KEY_HASH']]],
'KeyHive' : [ 0x10, ['pointer64', ['_HHIVE']]],
'KeyCell' : [ 0x18, ['unsigned long']],
} ],
'_SYSPTES_HEADER' : [ 0x28, {
'ListHead' : [ 0x0, ['_LIST_ENTRY']],
'Count' : [ 0x10, ['unsigned long long']],
'NumberOfEntries' : [ 0x18, ['unsigned long long']],
'NumberOfEntriesPeak' : [ 0x20, ['unsigned long long']],
} ],
'_EXCEPTION_RECORD' : [ 0x98, {
'ExceptionCode' : [ 0x0, ['long']],
'ExceptionFlags' : [ 0x4, ['unsigned long']],
'ExceptionRecord' : [ 0x8, ['pointer64', ['_EXCEPTION_RECORD']]],
'ExceptionAddress' : [ 0x10, ['pointer64', ['void']]],
'NumberParameters' : [ 0x18, ['unsigned long']],
'ExceptionInformation' : [ 0x20, ['array', 15, ['unsigned long long']]],
} ],
'_PENDING_RELATIONS_LIST_ENTRY' : [ 0x68, {
'Link' : [ 0x0, ['_LIST_ENTRY']],
'WorkItem' : [ 0x10, ['_WORK_QUEUE_ITEM']],
'DeviceEvent' : [ 0x30, ['pointer64', ['_PNP_DEVICE_EVENT_ENTRY']]],
'DeviceObject' : [ 0x38, ['pointer64', ['_DEVICE_OBJECT']]],
'RelationsList' : [ 0x40, ['pointer64', ['_RELATION_LIST']]],
'EjectIrp' : [ 0x48, ['pointer64', ['_IRP']]],
'Lock' : [ 0x50, ['Enumeration', dict(target = 'long', choices = {0: 'IRPLOCK_CANCELABLE', 1: 'IRPLOCK_CANCEL_STARTED', 2: 'IRPLOCK_CANCEL_COMPLETE', 3: 'IRPLOCK_COMPLETED'})]],
'Problem' : [ 0x54, ['unsigned long']],
'ProfileChangingEject' : [ 0x58, ['unsigned char']],
'DisplaySafeRemovalDialog' : [ 0x59, ['unsigned char']],
'LightestSleepState' : [ 0x5c, ['Enumeration', dict(target = 'long', choices = {0: 'PowerSystemUnspecified', 1: 'PowerSystemWorking', 2: 'PowerSystemSleeping1', 3: 'PowerSystemSleeping2', 4: 'PowerSystemSleeping3', 5: 'PowerSystemHibernate', 6: 'PowerSystemShutdown', 7: 'PowerSystemMaximum'})]],
'DockInterface' : [ 0x60, ['pointer64', ['DOCK_INTERFACE']]],
} ],
'_I386_LOADER_BLOCK' : [ 0x10, {
'CommonDataArea' : [ 0x0, ['pointer64', ['void']]],
'MachineType' : [ 0x8, ['unsigned long']],
'VirtualBias' : [ 0xc, ['unsigned long']],
} ],
'_CELL_DATA' : [ 0x50, {
'u' : [ 0x0, ['_u']],
} ],
'_ARC_DISK_INFORMATION' : [ 0x10, {
'DiskSignatures' : [ 0x0, ['_LIST_ENTRY']],
} ],
'_INITIAL_PRIVILEGE_SET' : [ 0x2c, {
'PrivilegeCount' : [ 0x0, ['unsigned long']],
'Control' : [ 0x4, ['unsigned long']],
'Privilege' : [ 0x8, ['array', 3, ['_LUID_AND_ATTRIBUTES']]],
} ],
'_HEAP_TUNING_PARAMETERS' : [ 0x10, {
'CommittThresholdShift' : [ 0x0, ['unsigned long']],
'MaxPreCommittThreshold' : [ 0x8, ['unsigned long long']],
} ],
'_MMWSLE_NONDIRECT_HASH' : [ 0x10, {
'Key' : [ 0x0, ['pointer64', ['void']]],
'Index' : [ 0x8, ['unsigned long']],
} ],
'_HMAP_DIRECTORY' : [ 0x2000, {
'Directory' : [ 0x0, ['array', 1024, ['pointer64', ['_HMAP_TABLE']]]],
} ],
'_KAPC' : [ 0x58, {
'Type' : [ 0x0, ['unsigned char']],
'SpareByte0' : [ 0x1, ['unsigned char']],
'Size' : [ 0x2, ['unsigned char']],
'SpareByte1' : [ 0x3, ['unsigned char']],
'SpareLong0' : [ 0x4, ['unsigned long']],
'Thread' : [ 0x8, ['pointer64', ['_KTHREAD']]],
'ApcListEntry' : [ 0x10, ['_LIST_ENTRY']],
'KernelRoutine' : [ 0x20, ['pointer64', ['void']]],
'RundownRoutine' : [ 0x28, ['pointer64', ['void']]],
'NormalRoutine' : [ 0x30, ['pointer64', ['void']]],
'NormalContext' : [ 0x38, ['pointer64', ['void']]],
'SystemArgument1' : [ 0x40, ['pointer64', ['void']]],
'SystemArgument2' : [ 0x48, ['pointer64', ['void']]],
'ApcStateIndex' : [ 0x50, ['unsigned char']],
'ApcMode' : [ 0x51, ['unsigned char']],
'Inserted' : [ 0x52, ['unsigned char']],
} ],
'_HANDLE_TABLE' : [ 0x68, {
'TableCode' : [ 0x0, ['unsigned long long']],
'QuotaProcess' : [ 0x8, ['pointer64', ['_EPROCESS']]],
'UniqueProcessId' : [ 0x10, ['pointer64', ['void']]],
'HandleLock' : [ 0x18, ['_EX_PUSH_LOCK']],
'HandleTableList' : [ 0x20, ['_LIST_ENTRY']],
'HandleContentionEvent' : [ 0x30, ['_EX_PUSH_LOCK']],
'DebugInfo' : [ 0x38, ['pointer64', ['_HANDLE_TRACE_DEBUG_INFO']]],
'ExtraInfoPages' : [ 0x40, ['long']],
'Flags' : [ 0x44, ['unsigned long']],
'StrictFIFO' : [ 0x44, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned char')]],
'FirstFreeHandle' : [ 0x48, ['unsigned long']],
'LastFreeHandleEntry' : [ 0x50, ['pointer64', ['_HANDLE_TABLE_ENTRY']]],
'HandleCount' : [ 0x58, ['unsigned long']],
'NextHandleNeedingPool' : [ 0x5c, ['unsigned long']],
'HandleCountHighWatermark' : [ 0x60, ['unsigned long']],
} ],
'_POOL_TRACKER_BIG_PAGES' : [ 0x18, {
'Va' : [ 0x0, ['pointer64', ['void']]],
'Key' : [ 0x8, ['unsigned long']],
'PoolType' : [ 0xc, ['unsigned long']],
'NumberOfBytes' : [ 0x10, ['unsigned long long']],
} ],
'_MMVAD_FLAGS2' : [ 0x4, {
'FileOffset' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 24, native_type='unsigned long')]],
'SecNoChange' : [ 0x0, ['BitField', dict(start_bit = 24, end_bit = 25, native_type='unsigned long')]],
'OneSecured' : [ 0x0, ['BitField', dict(start_bit = 25, end_bit = 26, native_type='unsigned long')]],
'MultipleSecured' : [ 0x0, ['BitField', dict(start_bit = 26, end_bit = 27, native_type='unsigned long')]],
'Spare' : [ 0x0, ['BitField', dict(start_bit = 27, end_bit = 28, native_type='unsigned long')]],
'LongVad' : [ 0x0, ['BitField', dict(start_bit = 28, end_bit = 29, native_type='unsigned long')]],
'ExtendableFile' : [ 0x0, ['BitField', dict(start_bit = 29, end_bit = 30, native_type='unsigned long')]],
'Inherit' : [ 0x0, ['BitField', dict(start_bit = 30, end_bit = 31, native_type='unsigned long')]],
'CopyOnWrite' : [ 0x0, ['BitField', dict(start_bit = 31, end_bit = 32, native_type='unsigned long')]],
} ],
'_TEB_ACTIVE_FRAME' : [ 0x18, {
'Flags' : [ 0x0, ['unsigned long']],
'Previous' : [ 0x8, ['pointer64', ['_TEB_ACTIVE_FRAME']]],
'Context' : [ 0x10, ['pointer64', ['_TEB_ACTIVE_FRAME_CONTEXT']]],
} ],
'_FILE_GET_QUOTA_INFORMATION' : [ 0x14, {
'NextEntryOffset' : [ 0x0, ['unsigned long']],
'SidLength' : [ 0x4, ['unsigned long']],
'Sid' : [ 0x8, ['_SID']],
} ],
'_ACCESS_REASONS' : [ 0x80, {
'Data' : [ 0x0, ['array', 32, ['unsigned long']]],
} ],
'_CM_KEY_BODY' : [ 0x58, {
'Type' : [ 0x0, ['unsigned long']],
'KeyControlBlock' : [ 0x8, ['pointer64', ['_CM_KEY_CONTROL_BLOCK']]],
'NotifyBlock' : [ 0x10, ['pointer64', ['_CM_NOTIFY_BLOCK']]],
'ProcessID' : [ 0x18, ['pointer64', ['void']]],
'KeyBodyList' : [ 0x20, ['_LIST_ENTRY']],
'Flags' : [ 0x30, ['BitField', dict(start_bit = 0, end_bit = 16, native_type='unsigned long')]],
'HandleTags' : [ 0x30, ['BitField', dict(start_bit = 16, end_bit = 32, native_type='unsigned long')]],
'KtmTrans' : [ 0x38, ['pointer64', ['void']]],
'KtmUow' : [ 0x40, ['pointer64', ['_GUID']]],
'ContextListHead' : [ 0x48, ['_LIST_ENTRY']],
} ],
'_KWAIT_BLOCK' : [ 0x30, {
'WaitListEntry' : [ 0x0, ['_LIST_ENTRY']],
'Thread' : [ 0x10, ['pointer64', ['_KTHREAD']]],
'Object' : [ 0x18, ['pointer64', ['void']]],
'NextWaitBlock' : [ 0x20, ['pointer64', ['_KWAIT_BLOCK']]],
'WaitKey' : [ 0x28, ['unsigned short']],
'WaitType' : [ 0x2a, ['unsigned char']],
'BlockState' : [ 0x2b, ['unsigned char']],
'SpareLong' : [ 0x2c, ['long']],
} ],
'_MMPTE_PROTOTYPE' : [ 0x8, {
'Valid' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long long')]],
'Unused0' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 8, native_type='unsigned long long')]],
'ReadOnly' : [ 0x0, ['BitField', dict(start_bit = 8, end_bit = 9, native_type='unsigned long long')]],
'Unused1' : [ 0x0, ['BitField', dict(start_bit = 9, end_bit = 10, native_type='unsigned long long')]],
'Prototype' : [ 0x0, ['BitField', dict(start_bit = 10, end_bit = 11, native_type='unsigned long long')]],
'Protection' : [ 0x0, ['BitField', dict(start_bit = 11, end_bit = 16, native_type='unsigned long long')]],
'ProtoAddress' : [ 0x0, ['BitField', dict(start_bit = 16, end_bit = 64, native_type='long long')]],
} ],
'_WHEA_ERROR_PACKET_FLAGS' : [ 0x4, {
'PreviousError' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'Reserved1' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'HypervisorError' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long')]],
'Simulated' : [ 0x0, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned long')]],
'PlatformPfaControl' : [ 0x0, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned long')]],
'PlatformDirectedOffline' : [ 0x0, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned long')]],
'Reserved2' : [ 0x0, ['BitField', dict(start_bit = 6, end_bit = 32, native_type='unsigned long')]],
'AsULONG' : [ 0x0, ['unsigned long']],
} ],
'_THERMAL_INFORMATION_EX' : [ 0x78, {
'ThermalStamp' : [ 0x0, ['unsigned long']],
'ThermalConstant1' : [ 0x4, ['unsigned long']],
'ThermalConstant2' : [ 0x8, ['unsigned long']],
'Processors' : [ 0x10, ['_KAFFINITY_EX']],
'SamplingPeriod' : [ 0x38, ['unsigned long']],
'CurrentTemperature' : [ 0x3c, ['unsigned long']],
'PassiveTripPoint' : [ 0x40, ['unsigned long']],
'CriticalTripPoint' : [ 0x44, ['unsigned long']],
'ActiveTripPointCount' : [ 0x48, ['unsigned char']],
'ActiveTripPoint' : [ 0x4c, ['array', 10, ['unsigned long']]],
'S4TransitionTripPoint' : [ 0x74, ['unsigned long']],
} ],
'__unnamed_1c5c' : [ 0x4, {
'FilePointerIndex' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 9, native_type='unsigned long')]],
'HardFault' : [ 0x0, ['BitField', dict(start_bit = 9, end_bit = 10, native_type='unsigned long')]],
'Image' : [ 0x0, ['BitField', dict(start_bit = 10, end_bit = 11, native_type='unsigned long')]],
'Spare0' : [ 0x0, ['BitField', dict(start_bit = 11, end_bit = 12, native_type='unsigned long')]],
} ],
'__unnamed_1c5e' : [ 0x4, {
'FilePointerIndex' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 9, native_type='unsigned long')]],
'HardFault' : [ 0x0, ['BitField', dict(start_bit = 9, end_bit = 10, native_type='unsigned long')]],
'Spare1' : [ 0x0, ['BitField', dict(start_bit = 10, end_bit = 12, native_type='unsigned long')]],
} ],
'_MM_PAGE_ACCESS_INFO_FLAGS' : [ 0x4, {
'File' : [ 0x0, ['__unnamed_1c5c']],
'Private' : [ 0x0, ['__unnamed_1c5e']],
} ],
'_VI_VERIFIER_ISSUE' : [ 0x20, {
'IssueType' : [ 0x0, ['unsigned long long']],
'Address' : [ 0x8, ['pointer64', ['void']]],
'Parameters' : [ 0x10, ['array', 2, ['unsigned long long']]],
} ],
'_MMSUBSECTION_FLAGS' : [ 0x4, {
'SubsectionAccessed' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned short')]],
'Protection' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 6, native_type='unsigned short')]],
'StartingSector4132' : [ 0x0, ['BitField', dict(start_bit = 6, end_bit = 16, native_type='unsigned short')]],
'SubsectionStatic' : [ 0x2, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned short')]],
'GlobalMemory' : [ 0x2, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned short')]],
'DirtyPages' : [ 0x2, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned short')]],
'Spare' : [ 0x2, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned short')]],
'SectorEndOffset' : [ 0x2, ['BitField', dict(start_bit = 4, end_bit = 16, native_type='unsigned short')]],
} ],
'_EXCEPTION_POINTERS' : [ 0x10, {
'ExceptionRecord' : [ 0x0, ['pointer64', ['_EXCEPTION_RECORD']]],
'ContextRecord' : [ 0x8, ['pointer64', ['_CONTEXT']]],
} ],
'_OBJECT_REF_INFO' : [ 0x28, {
'ObjectHeader' : [ 0x0, ['pointer64', ['_OBJECT_HEADER']]],
'NextRef' : [ 0x8, ['pointer64', ['void']]],
'ImageFileName' : [ 0x10, ['array', 16, ['unsigned char']]],
'NextPos' : [ 0x20, ['unsigned short']],
'MaxStacks' : [ 0x22, ['unsigned short']],
'StackInfo' : [ 0x24, ['array', 0, ['_OBJECT_REF_STACK_INFO']]],
} ],
'_HBIN' : [ 0x20, {
'Signature' : [ 0x0, ['unsigned long']],
'FileOffset' : [ 0x4, ['unsigned long']],
'Size' : [ 0x8, ['unsigned long']],
'Reserved1' : [ 0xc, ['array', 2, ['unsigned long']]],
'TimeStamp' : [ 0x14, ['_LARGE_INTEGER']],
'Spare' : [ 0x1c, ['unsigned long']],
} ],
'_MI_IMAGE_SECURITY_REFERENCE' : [ 0x18, {
'SecurityContext' : [ 0x0, ['_IMAGE_SECURITY_CONTEXT']],
'DynamicRelocations' : [ 0x8, ['pointer64', ['void']]],
'ReferenceCount' : [ 0x10, ['long']],
} ],
'_HEAP_TAG_ENTRY' : [ 0x48, {
'Allocs' : [ 0x0, ['unsigned long']],
'Frees' : [ 0x4, ['unsigned long']],
'Size' : [ 0x8, ['unsigned long long']],
'TagIndex' : [ 0x10, ['unsigned short']],
'CreatorBackTraceIndex' : [ 0x12, ['unsigned short']],
'TagName' : [ 0x14, ['array', 24, ['wchar']]],
} ],
'_SECURITY_QUALITY_OF_SERVICE' : [ 0xc, {
'Length' : [ 0x0, ['unsigned long']],
'ImpersonationLevel' : [ 0x4, ['Enumeration', dict(target = 'long', choices = {0: 'SecurityAnonymous', 1: 'SecurityIdentification', 2: 'SecurityImpersonation', 3: 'SecurityDelegation'})]],
'ContextTrackingMode' : [ 0x8, ['unsigned char']],
'EffectiveOnly' : [ 0x9, ['unsigned char']],
} ],
'__unnamed_1c7f' : [ 0x10, {
'List' : [ 0x0, ['_LIST_ENTRY']],
'Secured' : [ 0x0, ['_MMADDRESS_LIST']],
} ],
'__unnamed_1c85' : [ 0x8, {
'Banked' : [ 0x0, ['pointer64', ['_MMBANKED_SECTION']]],
'ExtendedInfo' : [ 0x0, ['pointer64', ['_MMEXTEND_INFO']]],
} ],
'_MMVAD_LONG' : [ 0x90, {
'u1' : [ 0x0, ['__unnamed_15bf']],
'LeftChild' : [ 0x8, ['pointer64', ['_MMVAD']]],
'RightChild' : [ 0x10, ['pointer64', ['_MMVAD']]],
'StartingVpn' : [ 0x18, ['unsigned long long']],
'EndingVpn' : [ 0x20, ['unsigned long long']],
'u' : [ 0x28, ['__unnamed_15c2']],
'PushLock' : [ 0x30, ['_EX_PUSH_LOCK']],
'u5' : [ 0x38, ['__unnamed_15c5']],
'u2' : [ 0x40, ['__unnamed_15d2']],
'Subsection' : [ 0x48, ['pointer64', ['_SUBSECTION']]],
'FirstPrototypePte' : [ 0x50, ['pointer64', ['_MMPTE']]],
'LastContiguousPte' : [ 0x58, ['pointer64', ['_MMPTE']]],
'ViewLinks' : [ 0x60, ['_LIST_ENTRY']],
'VadsProcess' : [ 0x70, ['pointer64', ['_EPROCESS']]],
'u3' : [ 0x78, ['__unnamed_1c7f']],
'u4' : [ 0x88, ['__unnamed_1c85']],
} ],
'_MMWSLE_FREE_ENTRY' : [ 0x8, {
'MustBeZero' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long long')]],
'PreviousFree' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 32, native_type='unsigned long long')]],
'NextFree' : [ 0x0, ['BitField', dict(start_bit = 32, end_bit = 64, native_type='unsigned long long')]],
} ],
'_NT_TIB' : [ 0x38, {
'ExceptionList' : [ 0x0, ['pointer64', ['_EXCEPTION_REGISTRATION_RECORD']]],
'StackBase' : [ 0x8, ['pointer64', ['void']]],
'StackLimit' : [ 0x10, ['pointer64', ['void']]],
'SubSystemTib' : [ 0x18, ['pointer64', ['void']]],
'FiberData' : [ 0x20, ['pointer64', ['void']]],
'Version' : [ 0x20, ['unsigned long']],
'ArbitraryUserPointer' : [ 0x28, ['pointer64', ['void']]],
'Self' : [ 0x30, ['pointer64', ['_NT_TIB']]],
} ],
'_WHEA_REVISION' : [ 0x2, {
'MinorRevision' : [ 0x0, ['unsigned char']],
'MajorRevision' : [ 0x1, ['unsigned char']],
'AsUSHORT' : [ 0x0, ['unsigned short']],
} ],
'_EJOB' : [ 0x1c8, {
'Event' : [ 0x0, ['_KEVENT']],
'JobLinks' : [ 0x18, ['_LIST_ENTRY']],
'ProcessListHead' : [ 0x28, ['_LIST_ENTRY']],
'JobLock' : [ 0x38, ['_ERESOURCE']],
'TotalUserTime' : [ 0xa0, ['_LARGE_INTEGER']],
'TotalKernelTime' : [ 0xa8, ['_LARGE_INTEGER']],
'ThisPeriodTotalUserTime' : [ 0xb0, ['_LARGE_INTEGER']],
'ThisPeriodTotalKernelTime' : [ 0xb8, ['_LARGE_INTEGER']],
'TotalPageFaultCount' : [ 0xc0, ['unsigned long']],
'TotalProcesses' : [ 0xc4, ['unsigned long']],
'ActiveProcesses' : [ 0xc8, ['unsigned long']],
'TotalTerminatedProcesses' : [ 0xcc, ['unsigned long']],
'PerProcessUserTimeLimit' : [ 0xd0, ['_LARGE_INTEGER']],
'PerJobUserTimeLimit' : [ 0xd8, ['_LARGE_INTEGER']],
'MinimumWorkingSetSize' : [ 0xe0, ['unsigned long long']],
'MaximumWorkingSetSize' : [ 0xe8, ['unsigned long long']],
'LimitFlags' : [ 0xf0, ['unsigned long']],
'ActiveProcessLimit' : [ 0xf4, ['unsigned long']],
'Affinity' : [ 0xf8, ['_KAFFINITY_EX']],
'PriorityClass' : [ 0x120, ['unsigned char']],
'AccessState' : [ 0x128, ['pointer64', ['_JOB_ACCESS_STATE']]],
'UIRestrictionsClass' : [ 0x130, ['unsigned long']],
'EndOfJobTimeAction' : [ 0x134, ['unsigned long']],
'CompletionPort' : [ 0x138, ['pointer64', ['void']]],
'CompletionKey' : [ 0x140, ['pointer64', ['void']]],
'SessionId' : [ 0x148, ['unsigned long']],
'SchedulingClass' : [ 0x14c, ['unsigned long']],
'ReadOperationCount' : [ 0x150, ['unsigned long long']],
'WriteOperationCount' : [ 0x158, ['unsigned long long']],
'OtherOperationCount' : [ 0x160, ['unsigned long long']],
'ReadTransferCount' : [ 0x168, ['unsigned long long']],
'WriteTransferCount' : [ 0x170, ['unsigned long long']],
'OtherTransferCount' : [ 0x178, ['unsigned long long']],
'ProcessMemoryLimit' : [ 0x180, ['unsigned long long']],
'JobMemoryLimit' : [ 0x188, ['unsigned long long']],
'PeakProcessMemoryUsed' : [ 0x190, ['unsigned long long']],
'PeakJobMemoryUsed' : [ 0x198, ['unsigned long long']],
'CurrentJobMemoryUsed' : [ 0x1a0, ['unsigned long long']],
'MemoryLimitsLock' : [ 0x1a8, ['_EX_PUSH_LOCK']],
'JobSetLinks' : [ 0x1b0, ['_LIST_ENTRY']],
'MemberLevel' : [ 0x1c0, ['unsigned long']],
'JobFlags' : [ 0x1c4, ['unsigned long']],
} ],
'__unnamed_1c99' : [ 0x4, {
'AsULONG' : [ 0x0, ['unsigned long']],
'AllowScaling' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'Disabled' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'HvMaxCState' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 6, native_type='unsigned long')]],
'Reserved' : [ 0x0, ['BitField', dict(start_bit = 6, end_bit = 32, native_type='unsigned long')]],
} ],
'_PPM_IDLE_STATES' : [ 0xa0, {
'Count' : [ 0x0, ['unsigned long']],
'Flags' : [ 0x4, ['__unnamed_1c99']],
'TargetState' : [ 0x8, ['unsigned long']],
'ActualState' : [ 0xc, ['unsigned long']],
'OldState' : [ 0x10, ['unsigned long']],
'TargetProcessors' : [ 0x18, ['_KAFFINITY_EX']],
'State' : [ 0x40, ['array', 1, ['_PPM_IDLE_STATE']]],
} ],
'__unnamed_1ca2' : [ 0x18, {
'EfiInformation' : [ 0x0, ['_EFI_FIRMWARE_INFORMATION']],
'PcatInformation' : [ 0x0, ['_PCAT_FIRMWARE_INFORMATION']],
} ],
'_FIRMWARE_INFORMATION_LOADER_BLOCK' : [ 0x20, {
'FirmwareTypeEfi' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'Reserved' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 32, native_type='unsigned long')]],
'u' : [ 0x8, ['__unnamed_1ca2']],
} ],
'_HEAP_UCR_DESCRIPTOR' : [ 0x30, {
'ListEntry' : [ 0x0, ['_LIST_ENTRY']],
'SegmentEntry' : [ 0x10, ['_LIST_ENTRY']],
'Address' : [ 0x20, ['pointer64', ['void']]],
'Size' : [ 0x28, ['unsigned long long']],
} ],
'_ETW_REALTIME_CONSUMER' : [ 0x88, {
'Links' : [ 0x0, ['_LIST_ENTRY']],
'ProcessHandle' : [ 0x10, ['pointer64', ['void']]],
'ProcessObject' : [ 0x18, ['pointer64', ['_EPROCESS']]],
'NextNotDelivered' : [ 0x20, ['pointer64', ['void']]],
'RealtimeConnectContext' : [ 0x28, ['pointer64', ['void']]],
'DisconnectEvent' : [ 0x30, ['pointer64', ['_KEVENT']]],
'DataAvailableEvent' : [ 0x38, ['pointer64', ['_KEVENT']]],
'UserBufferCount' : [ 0x40, ['pointer64', ['unsigned long']]],
'UserBufferListHead' : [ 0x48, ['pointer64', ['_SINGLE_LIST_ENTRY']]],
'BuffersLost' : [ 0x50, ['unsigned long']],
'EmptyBuffersCount' : [ 0x54, ['unsigned long']],
'LoggerId' : [ 0x58, ['unsigned long']],
'ShutDownRequested' : [ 0x5c, ['unsigned char']],
'NewBuffersLost' : [ 0x5d, ['unsigned char']],
'Disconnected' : [ 0x5e, ['unsigned char']],
'ReservedBufferSpaceBitMap' : [ 0x60, ['_RTL_BITMAP']],
'ReservedBufferSpace' : [ 0x70, ['pointer64', ['unsigned char']]],
'ReservedBufferSpaceSize' : [ 0x78, ['unsigned long']],
'UserPagesAllocated' : [ 0x7c, ['unsigned long']],
'UserPagesReused' : [ 0x80, ['unsigned long']],
'Wow' : [ 0x84, ['unsigned char']],
} ],
'_POOL_DESCRIPTOR' : [ 0x1140, {
'PoolType' : [ 0x0, ['Enumeration', dict(target = 'long', choices = {0: 'NonPagedPool', 1: 'PagedPool', 2: 'NonPagedPoolMustSucceed', 3: 'DontUseThisType', 4: 'NonPagedPoolCacheAligned', 5: 'PagedPoolCacheAligned', 6: 'NonPagedPoolCacheAlignedMustS', 7: 'MaxPoolType', 34: 'NonPagedPoolMustSucceedSession', 35: 'DontUseThisTypeSession', 32: 'NonPagedPoolSession', 36: 'NonPagedPoolCacheAlignedSession', 33: 'PagedPoolSession', 38: 'NonPagedPoolCacheAlignedMustSSession', 37: 'PagedPoolCacheAlignedSession'})]],
'PagedLock' : [ 0x8, ['_KGUARDED_MUTEX']],
'NonPagedLock' : [ 0x8, ['unsigned long long']],
'RunningAllocs' : [ 0x40, ['long']],
'RunningDeAllocs' : [ 0x44, ['long']],
'TotalBigPages' : [ 0x48, ['long']],
'ThreadsProcessingDeferrals' : [ 0x4c, ['long']],
'TotalBytes' : [ 0x50, ['unsigned long long']],
'PoolIndex' : [ 0x80, ['unsigned long']],
'TotalPages' : [ 0xc0, ['long']],
'PendingFrees' : [ 0x100, ['pointer64', ['pointer64', ['void']]]],
'PendingFreeDepth' : [ 0x108, ['long']],
'ListHeads' : [ 0x140, ['array', 256, ['_LIST_ENTRY']]],
} ],
'_KGATE' : [ 0x18, {
'Header' : [ 0x0, ['_DISPATCHER_HEADER']],
} ],
'_WHEA_ERROR_RECORD_HEADER' : [ 0x80, {
'Signature' : [ 0x0, ['unsigned long']],
'Revision' : [ 0x4, ['_WHEA_REVISION']],
'SignatureEnd' : [ 0x6, ['unsigned long']],
'SectionCount' : [ 0xa, ['unsigned short']],
'Severity' : [ 0xc, ['Enumeration', dict(target = 'long', choices = {0: 'WheaErrSevRecoverable', 1: 'WheaErrSevFatal', 2: 'WheaErrSevCorrected', 3: 'WheaErrSevInformational'})]],
'ValidBits' : [ 0x10, ['_WHEA_ERROR_RECORD_HEADER_VALIDBITS']],
'Length' : [ 0x14, ['unsigned long']],
'Timestamp' : [ 0x18, ['_WHEA_TIMESTAMP']],
'PlatformId' : [ 0x20, ['_GUID']],
'PartitionId' : [ 0x30, ['_GUID']],
'CreatorId' : [ 0x40, ['_GUID']],
'NotifyType' : [ 0x50, ['_GUID']],
'RecordId' : [ 0x60, ['unsigned long long']],
'Flags' : [ 0x68, ['_WHEA_ERROR_RECORD_HEADER_FLAGS']],
'PersistenceInfo' : [ 0x6c, ['_WHEA_PERSISTENCE_INFO']],
'Reserved' : [ 0x74, ['array', 12, ['unsigned char']]],
} ],
'_ALPC_PROCESS_CONTEXT' : [ 0x20, {
'Lock' : [ 0x0, ['_EX_PUSH_LOCK']],
'ViewListHead' : [ 0x8, ['_LIST_ENTRY']],
'PagedPoolQuotaCache' : [ 0x18, ['unsigned long long']],
} ],
'_DRIVER_EXTENSION' : [ 0x38, {
'DriverObject' : [ 0x0, ['pointer64', ['_DRIVER_OBJECT']]],
'AddDevice' : [ 0x8, ['pointer64', ['void']]],
'Count' : [ 0x10, ['unsigned long']],
'ServiceKeyName' : [ 0x18, ['_UNICODE_STRING']],
'ClientDriverExtension' : [ 0x28, ['pointer64', ['_IO_CLIENT_EXTENSION']]],
'FsFilterCallbacks' : [ 0x30, ['pointer64', ['_FS_FILTER_CALLBACKS']]],
} ],
'_PRIVILEGE_SET' : [ 0x14, {
'PrivilegeCount' : [ 0x0, ['unsigned long']],
'Control' : [ 0x4, ['unsigned long']],
'Privilege' : [ 0x8, ['array', 1, ['_LUID_AND_ATTRIBUTES']]],
} ],
'_CM_NOTIFY_BLOCK' : [ 0x58, {
'HiveList' : [ 0x0, ['_LIST_ENTRY']],
'PostList' : [ 0x10, ['_LIST_ENTRY']],
'KeyControlBlock' : [ 0x20, ['pointer64', ['_CM_KEY_CONTROL_BLOCK']]],
'KeyBody' : [ 0x28, ['pointer64', ['_CM_KEY_BODY']]],
'Filter' : [ 0x30, ['BitField', dict(start_bit = 0, end_bit = 30, native_type='unsigned long')]],
'WatchTree' : [ 0x30, ['BitField', dict(start_bit = 30, end_bit = 31, native_type='unsigned long')]],
'NotifyPending' : [ 0x30, ['BitField', dict(start_bit = 31, end_bit = 32, native_type='unsigned long')]],
'SubjectContext' : [ 0x38, ['_SECURITY_SUBJECT_CONTEXT']],
} ],
'_KINTERRUPT' : [ 0xa0, {
'Type' : [ 0x0, ['short']],
'Size' : [ 0x2, ['short']],
'InterruptListEntry' : [ 0x8, ['_LIST_ENTRY']],
'ServiceRoutine' : [ 0x18, ['pointer64', ['void']]],
'MessageServiceRoutine' : [ 0x20, ['pointer64', ['void']]],
'MessageIndex' : [ 0x28, ['unsigned long']],
'ServiceContext' : [ 0x30, ['pointer64', ['void']]],
'SpinLock' : [ 0x38, ['unsigned long long']],
'TickCount' : [ 0x40, ['unsigned long']],
'ActualLock' : [ 0x48, ['pointer64', ['unsigned long long']]],
'DispatchAddress' : [ 0x50, ['pointer64', ['void']]],
'Vector' : [ 0x58, ['unsigned long']],
'Irql' : [ 0x5c, ['unsigned char']],
'SynchronizeIrql' : [ 0x5d, ['unsigned char']],
'FloatingSave' : [ 0x5e, ['unsigned char']],
'Connected' : [ 0x5f, ['unsigned char']],
'Number' : [ 0x60, ['unsigned long']],
'ShareVector' : [ 0x64, ['unsigned char']],
'Pad' : [ 0x65, ['array', 3, ['unsigned char']]],
'Mode' : [ 0x68, ['Enumeration', dict(target = 'long', choices = {0: 'LevelSensitive', 1: 'Latched'})]],
'Polarity' : [ 0x6c, ['Enumeration', dict(target = 'long', choices = {0: 'InterruptPolarityUnknown', 1: 'InterruptActiveHigh', 2: 'InterruptActiveLow'})]],
'ServiceCount' : [ 0x70, ['unsigned long']],
'DispatchCount' : [ 0x74, ['unsigned long']],
'Rsvd1' : [ 0x78, ['unsigned long long']],
'TrapFrame' : [ 0x80, ['pointer64', ['_KTRAP_FRAME']]],
'Reserved' : [ 0x88, ['pointer64', ['void']]],
'DispatchCode' : [ 0x90, ['array', 4, ['unsigned long']]],
} ],
'_HANDLE_TABLE_ENTRY' : [ 0x10, {
'Object' : [ 0x0, ['pointer64', ['void']]],
'ObAttributes' : [ 0x0, ['unsigned long']],
'InfoTable' : [ 0x0, ['pointer64', ['_HANDLE_TABLE_ENTRY_INFO']]],
'Value' : [ 0x0, ['unsigned long long']],
'GrantedAccess' : [ 0x8, ['unsigned long']],
'GrantedAccessIndex' : [ 0x8, ['unsigned short']],
'CreatorBackTraceIndex' : [ 0xa, ['unsigned short']],
'NextFreeTableEntry' : [ 0x8, ['unsigned long']],
} ],
'_SID' : [ 0xc, {
'Revision' : [ 0x0, ['unsigned char']],
'SubAuthorityCount' : [ 0x1, ['unsigned char']],
'IdentifierAuthority' : [ 0x2, ['_SID_IDENTIFIER_AUTHORITY']],
'SubAuthority' : [ 0x8, ['array', 1, ['unsigned long']]],
} ],
'_AUTHZBASEP_SECURITY_ATTRIBUTES_INFORMATION' : [ 0x30, {
'SecurityAttributeCount' : [ 0x0, ['unsigned long']],
'SecurityAttributesList' : [ 0x8, ['_LIST_ENTRY']],
'WorkingSecurityAttributeCount' : [ 0x18, ['unsigned long']],
'WorkingSecurityAttributesList' : [ 0x20, ['_LIST_ENTRY']],
} ],
'_IMAGE_FILE_HEADER' : [ 0x14, {
'Machine' : [ 0x0, ['unsigned short']],
'NumberOfSections' : [ 0x2, ['unsigned short']],
'TimeDateStamp' : [ 0x4, ['unsigned long']],
'PointerToSymbolTable' : [ 0x8, ['unsigned long']],
'NumberOfSymbols' : [ 0xc, ['unsigned long']],
'SizeOfOptionalHeader' : [ 0x10, ['unsigned short']],
'Characteristics' : [ 0x12, ['unsigned short']],
} ],
'_MMEXTEND_INFO' : [ 0x10, {
'CommittedSize' : [ 0x0, ['unsigned long long']],
'ReferenceCount' : [ 0x8, ['unsigned long']],
} ],
'_HIVE_LIST_ENTRY' : [ 0x88, {
'FileName' : [ 0x0, ['pointer64', ['unsigned short']]],
'BaseName' : [ 0x8, ['pointer64', ['unsigned short']]],
'RegRootName' : [ 0x10, ['pointer64', ['unsigned short']]],
'CmHive' : [ 0x18, ['pointer64', ['_CMHIVE']]],
'HHiveFlags' : [ 0x20, ['unsigned long']],
'CmHiveFlags' : [ 0x24, ['unsigned long']],
'CmKcbCacheSize' : [ 0x28, ['unsigned long']],
'CmHive2' : [ 0x30, ['pointer64', ['_CMHIVE']]],
'HiveMounted' : [ 0x38, ['unsigned char']],
'ThreadFinished' : [ 0x39, ['unsigned char']],
'ThreadStarted' : [ 0x3a, ['unsigned char']],
'Allocate' : [ 0x3b, ['unsigned char']],
'WinPERequired' : [ 0x3c, ['unsigned char']],
'StartEvent' : [ 0x40, ['_KEVENT']],
'FinishedEvent' : [ 0x58, ['_KEVENT']],
'MountLock' : [ 0x70, ['_KEVENT']],
} ],
'_CONTEXT' : [ 0x4d0, {
'P1Home' : [ 0x0, ['unsigned long long']],
'P2Home' : [ 0x8, ['unsigned long long']],
'P3Home' : [ 0x10, ['unsigned long long']],
'P4Home' : [ 0x18, ['unsigned long long']],
'P5Home' : [ 0x20, ['unsigned long long']],
'P6Home' : [ 0x28, ['unsigned long long']],
'ContextFlags' : [ 0x30, ['unsigned long']],
'MxCsr' : [ 0x34, ['unsigned long']],
'SegCs' : [ 0x38, ['unsigned short']],
'SegDs' : [ 0x3a, ['unsigned short']],
'SegEs' : [ 0x3c, ['unsigned short']],
'SegFs' : [ 0x3e, ['unsigned short']],
'SegGs' : [ 0x40, ['unsigned short']],
'SegSs' : [ 0x42, ['unsigned short']],
'EFlags' : [ 0x44, ['unsigned long']],
'Dr0' : [ 0x48, ['unsigned long long']],
'Dr1' : [ 0x50, ['unsigned long long']],
'Dr2' : [ 0x58, ['unsigned long long']],
'Dr3' : [ 0x60, ['unsigned long long']],
'Dr6' : [ 0x68, ['unsigned long long']],
'Dr7' : [ 0x70, ['unsigned long long']],
'Rax' : [ 0x78, ['unsigned long long']],
'Rcx' : [ 0x80, ['unsigned long long']],
'Rdx' : [ 0x88, ['unsigned long long']],
'Rbx' : [ 0x90, ['unsigned long long']],
'Rsp' : [ 0x98, ['unsigned long long']],
'Rbp' : [ 0xa0, ['unsigned long long']],
'Rsi' : [ 0xa8, ['unsigned long long']],
'Rdi' : [ 0xb0, ['unsigned long long']],
'R8' : [ 0xb8, ['unsigned long long']],
'R9' : [ 0xc0, ['unsigned long long']],
'R10' : [ 0xc8, ['unsigned long long']],
'R11' : [ 0xd0, ['unsigned long long']],
'R12' : [ 0xd8, ['unsigned long long']],
'R13' : [ 0xe0, ['unsigned long long']],
'R14' : [ 0xe8, ['unsigned long long']],
'R15' : [ 0xf0, ['unsigned long long']],
'Rip' : [ 0xf8, ['unsigned long long']],
'FltSave' : [ 0x100, ['_XSAVE_FORMAT']],
'Header' : [ 0x100, ['array', 2, ['_M128A']]],
'Legacy' : [ 0x120, ['array', 8, ['_M128A']]],
'Xmm0' : [ 0x1a0, ['_M128A']],
'Xmm1' : [ 0x1b0, ['_M128A']],
'Xmm2' : [ 0x1c0, ['_M128A']],
'Xmm3' : [ 0x1d0, ['_M128A']],
'Xmm4' : [ 0x1e0, ['_M128A']],
'Xmm5' : [ 0x1f0, ['_M128A']],
'Xmm6' : [ 0x200, ['_M128A']],
'Xmm7' : [ 0x210, ['_M128A']],
'Xmm8' : [ 0x220, ['_M128A']],
'Xmm9' : [ 0x230, ['_M128A']],
'Xmm10' : [ 0x240, ['_M128A']],
'Xmm11' : [ 0x250, ['_M128A']],
'Xmm12' : [ 0x260, ['_M128A']],
'Xmm13' : [ 0x270, ['_M128A']],
'Xmm14' : [ 0x280, ['_M128A']],
'Xmm15' : [ 0x290, ['_M128A']],
'VectorRegister' : [ 0x300, ['array', 26, ['_M128A']]],
'VectorControl' : [ 0x4a0, ['unsigned long long']],
'DebugControl' : [ 0x4a8, ['unsigned long long']],
'LastBranchToRip' : [ 0x4b0, ['unsigned long long']],
'LastBranchFromRip' : [ 0x4b8, ['unsigned long long']],
'LastExceptionToRip' : [ 0x4c0, ['unsigned long long']],
'LastExceptionFromRip' : [ 0x4c8, ['unsigned long long']],
} ],
'_ALPC_HANDLE_TABLE' : [ 0x18, {
'Handles' : [ 0x0, ['pointer64', ['_ALPC_HANDLE_ENTRY']]],
'TotalHandles' : [ 0x8, ['unsigned long']],
'Flags' : [ 0xc, ['unsigned long']],
'Lock' : [ 0x10, ['_EX_PUSH_LOCK']],
} ],
'_MMPTE_HARDWARE' : [ 0x8, {
'Valid' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long long')]],
'Dirty1' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long long')]],
'Owner' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long long')]],
'WriteThrough' : [ 0x0, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned long long')]],
'CacheDisable' : [ 0x0, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned long long')]],
'Accessed' : [ 0x0, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned long long')]],
'Dirty' : [ 0x0, ['BitField', dict(start_bit = 6, end_bit = 7, native_type='unsigned long long')]],
'LargePage' : [ 0x0, ['BitField', dict(start_bit = 7, end_bit = 8, native_type='unsigned long long')]],
'Global' : [ 0x0, ['BitField', dict(start_bit = 8, end_bit = 9, native_type='unsigned long long')]],
'CopyOnWrite' : [ 0x0, ['BitField', dict(start_bit = 9, end_bit = 10, native_type='unsigned long long')]],
'Unused' : [ 0x0, ['BitField', dict(start_bit = 10, end_bit = 11, native_type='unsigned long long')]],
'Write' : [ 0x0, ['BitField', dict(start_bit = 11, end_bit = 12, native_type='unsigned long long')]],
'PageFrameNumber' : [ 0x0, ['BitField', dict(start_bit = 12, end_bit = 48, native_type='unsigned long long')]],
'reserved1' : [ 0x0, ['BitField', dict(start_bit = 48, end_bit = 52, native_type='unsigned long long')]],
'SoftwareWsIndex' : [ 0x0, ['BitField', dict(start_bit = 52, end_bit = 63, native_type='unsigned long long')]],
'NoExecute' : [ 0x0, ['BitField', dict(start_bit = 63, end_bit = 64, native_type='unsigned long long')]],
} ],
'_IO_COMPLETION_CONTEXT' : [ 0x10, {
'Port' : [ 0x0, ['pointer64', ['void']]],
'Key' : [ 0x8, ['pointer64', ['void']]],
} ],
'_IOV_FORCED_PENDING_TRACE' : [ 0x200, {
'Irp' : [ 0x0, ['pointer64', ['_IRP']]],
'Thread' : [ 0x8, ['pointer64', ['_ETHREAD']]],
'StackTrace' : [ 0x10, ['array', 62, ['pointer64', ['void']]]],
} ],
'_DBGKD_SET_CONTEXT' : [ 0x4, {
'ContextFlags' : [ 0x0, ['unsigned long']],
} ],
'_VI_POOL_ENTRY_INUSE' : [ 0x20, {
'VirtualAddress' : [ 0x0, ['pointer64', ['void']]],
'CallingAddress' : [ 0x8, ['pointer64', ['void']]],
'NumberOfBytes' : [ 0x10, ['unsigned long long']],
'Tag' : [ 0x18, ['unsigned long long']],
} ],
'_ALPC_COMPLETION_LIST' : [ 0x98, {
'Entry' : [ 0x0, ['_LIST_ENTRY']],
'OwnerProcess' : [ 0x10, ['pointer64', ['_EPROCESS']]],
'Mdl' : [ 0x18, ['pointer64', ['_MDL']]],
'UserVa' : [ 0x20, ['pointer64', ['void']]],
'UserLimit' : [ 0x28, ['pointer64', ['void']]],
'DataUserVa' : [ 0x30, ['pointer64', ['void']]],
'SystemVa' : [ 0x38, ['pointer64', ['void']]],
'TotalSize' : [ 0x40, ['unsigned long long']],
'Header' : [ 0x48, ['pointer64', ['_ALPC_COMPLETION_LIST_HEADER']]],
'List' : [ 0x50, ['pointer64', ['void']]],
'ListSize' : [ 0x58, ['unsigned long long']],
'Bitmap' : [ 0x60, ['pointer64', ['void']]],
'BitmapSize' : [ 0x68, ['unsigned long long']],
'Data' : [ 0x70, ['pointer64', ['void']]],
'DataSize' : [ 0x78, ['unsigned long long']],
'BitmapLimit' : [ 0x80, ['unsigned long']],
'BitmapNextHint' : [ 0x84, ['unsigned long']],
'ConcurrencyCount' : [ 0x88, ['unsigned long']],
'AttributeFlags' : [ 0x8c, ['unsigned long']],
'AttributeSize' : [ 0x90, ['unsigned long']],
} ],
'_INTERFACE' : [ 0x20, {
'Size' : [ 0x0, ['unsigned short']],
'Version' : [ 0x2, ['unsigned short']],
'Context' : [ 0x8, ['pointer64', ['void']]],
'InterfaceReference' : [ 0x10, ['pointer64', ['void']]],
'InterfaceDereference' : [ 0x18, ['pointer64', ['void']]],
} ],
'_ACL' : [ 0x8, {
'AclRevision' : [ 0x0, ['unsigned char']],
'Sbz1' : [ 0x1, ['unsigned char']],
'AclSize' : [ 0x2, ['unsigned short']],
'AceCount' : [ 0x4, ['unsigned short']],
'Sbz2' : [ 0x6, ['unsigned short']],
} ],
'_LAZY_WRITER' : [ 0x88, {
'ScanDpc' : [ 0x0, ['_KDPC']],
'ScanTimer' : [ 0x40, ['_KTIMER']],
'ScanActive' : [ 0x80, ['unsigned char']],
'OtherWork' : [ 0x81, ['unsigned char']],
'PendingTeardownScan' : [ 0x82, ['unsigned char']],
'PendingPeriodicScan' : [ 0x83, ['unsigned char']],
'PendingLowMemoryScan' : [ 0x84, ['unsigned char']],
'PendingPowerScan' : [ 0x85, ['unsigned char']],
} ],
'_PI_BUS_EXTENSION' : [ 0x70, {
'Flags' : [ 0x0, ['unsigned long']],
'NumberCSNs' : [ 0x4, ['unsigned char']],
'ReadDataPort' : [ 0x8, ['pointer64', ['unsigned char']]],
'DataPortMapped' : [ 0x10, ['unsigned char']],
'AddressPort' : [ 0x18, ['pointer64', ['unsigned char']]],
'AddrPortMapped' : [ 0x20, ['unsigned char']],
'CommandPort' : [ 0x28, ['pointer64', ['unsigned char']]],
'CmdPortMapped' : [ 0x30, ['unsigned char']],
'NextSlotNumber' : [ 0x34, ['unsigned long']],
'DeviceList' : [ 0x38, ['_SINGLE_LIST_ENTRY']],
'CardList' : [ 0x40, ['_SINGLE_LIST_ENTRY']],
'PhysicalBusDevice' : [ 0x48, ['pointer64', ['_DEVICE_OBJECT']]],
'FunctionalBusDevice' : [ 0x50, ['pointer64', ['_DEVICE_OBJECT']]],
'AttachedDevice' : [ 0x58, ['pointer64', ['_DEVICE_OBJECT']]],
'BusNumber' : [ 0x60, ['unsigned long']],
'SystemPowerState' : [ 0x64, ['Enumeration', dict(target = 'long', choices = {0: 'PowerSystemUnspecified', 1: 'PowerSystemWorking', 2: 'PowerSystemSleeping1', 3: 'PowerSystemSleeping2', 4: 'PowerSystemSleeping3', 5: 'PowerSystemHibernate', 6: 'PowerSystemShutdown', 7: 'PowerSystemMaximum'})]],
'DevicePowerState' : [ 0x68, ['Enumeration', dict(target = 'long', choices = {0: 'PowerDeviceUnspecified', 1: 'PowerDeviceD0', 2: 'PowerDeviceD1', 3: 'PowerDeviceD2', 4: 'PowerDeviceD3', 5: 'PowerDeviceMaximum'})]],
} ],
'_SID_AND_ATTRIBUTES' : [ 0x10, {
'Sid' : [ 0x0, ['pointer64', ['void']]],
'Attributes' : [ 0x8, ['unsigned long']],
} ],
'_SID_IDENTIFIER_AUTHORITY' : [ 0x6, {
'Value' : [ 0x0, ['array', 6, ['unsigned char']]],
} ],
'_IO_WORKITEM' : [ 0x40, {
'WorkItem' : [ 0x0, ['_WORK_QUEUE_ITEM']],
'Routine' : [ 0x20, ['pointer64', ['void']]],
'IoObject' : [ 0x28, ['pointer64', ['void']]],
'Context' : [ 0x30, ['pointer64', ['void']]],
'Type' : [ 0x38, ['unsigned long']],
} ],
'_CM_RM' : [ 0x88, {
'RmListEntry' : [ 0x0, ['_LIST_ENTRY']],
'TransactionListHead' : [ 0x10, ['_LIST_ENTRY']],
'TmHandle' : [ 0x20, ['pointer64', ['void']]],
'Tm' : [ 0x28, ['pointer64', ['void']]],
'RmHandle' : [ 0x30, ['pointer64', ['void']]],
'KtmRm' : [ 0x38, ['pointer64', ['void']]],
'RefCount' : [ 0x40, ['unsigned long']],
'ContainerNum' : [ 0x44, ['unsigned long']],
'ContainerSize' : [ 0x48, ['unsigned long long']],
'CmHive' : [ 0x50, ['pointer64', ['_CMHIVE']]],
'LogFileObject' : [ 0x58, ['pointer64', ['void']]],
'MarshallingContext' : [ 0x60, ['pointer64', ['void']]],
'RmFlags' : [ 0x68, ['unsigned long']],
'LogStartStatus1' : [ 0x6c, ['long']],
'LogStartStatus2' : [ 0x70, ['long']],
'BaseLsn' : [ 0x78, ['unsigned long long']],
'RmLock' : [ 0x80, ['pointer64', ['_ERESOURCE']]],
} ],
'_CHILD_LIST' : [ 0x8, {
'Count' : [ 0x0, ['unsigned long']],
'List' : [ 0x4, ['unsigned long']],
} ],
'_MMVAD_FLAGS' : [ 0x8, {
'CommitCharge' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 51, native_type='unsigned long long')]],
'NoChange' : [ 0x0, ['BitField', dict(start_bit = 51, end_bit = 52, native_type='unsigned long long')]],
'VadType' : [ 0x0, ['BitField', dict(start_bit = 52, end_bit = 55, native_type='unsigned long long')]],
'MemCommit' : [ 0x0, ['BitField', dict(start_bit = 55, end_bit = 56, native_type='unsigned long long')]],
'Protection' : [ 0x0, ['BitField', dict(start_bit = 56, end_bit = 61, native_type='unsigned long long')]],
'Spare' : [ 0x0, ['BitField', dict(start_bit = 61, end_bit = 63, native_type='unsigned long long')]],
'PrivateMemory' : [ 0x0, ['BitField', dict(start_bit = 63, end_bit = 64, native_type='unsigned long long')]],
} ],
'_MMWSLE_HASH' : [ 0x4, {
'Index' : [ 0x0, ['unsigned long']],
} ],
'_UNEXPECTED_INTERRUPT' : [ 0x10, {
'PushImmOp' : [ 0x0, ['unsigned char']],
'PushImm' : [ 0x1, ['unsigned long']],
'PushRbp' : [ 0x5, ['unsigned char']],
'JmpOp' : [ 0x6, ['unsigned char']],
'JmpOffset' : [ 0x7, ['long']],
} ],
'_DBGKD_FILL_MEMORY' : [ 0x10, {
'Address' : [ 0x0, ['unsigned long long']],
'Length' : [ 0x8, ['unsigned long']],
'Flags' : [ 0xc, ['unsigned short']],
'PatternLength' : [ 0xe, ['unsigned short']],
} ],
'_HEAP_STOP_ON_VALUES' : [ 0x30, {
'AllocAddress' : [ 0x0, ['unsigned long long']],
'AllocTag' : [ 0x8, ['_HEAP_STOP_ON_TAG']],
'ReAllocAddress' : [ 0x10, ['unsigned long long']],
'ReAllocTag' : [ 0x18, ['_HEAP_STOP_ON_TAG']],
'FreeAddress' : [ 0x20, ['unsigned long long']],
'FreeTag' : [ 0x28, ['_HEAP_STOP_ON_TAG']],
} ],
'_HEAP_PSEUDO_TAG_ENTRY' : [ 0x10, {
'Allocs' : [ 0x0, ['unsigned long']],
'Frees' : [ 0x4, ['unsigned long']],
'Size' : [ 0x8, ['unsigned long long']],
} ],
'_CALL_HASH_ENTRY' : [ 0x28, {
'ListEntry' : [ 0x0, ['_LIST_ENTRY']],
'CallersAddress' : [ 0x10, ['pointer64', ['void']]],
'CallersCaller' : [ 0x18, ['pointer64', ['void']]],
'CallCount' : [ 0x20, ['unsigned long']],
} ],
'_VF_TRACKER_STAMP' : [ 0x10, {
'Thread' : [ 0x0, ['pointer64', ['void']]],
'Flags' : [ 0x8, ['BitField', dict(start_bit = 0, end_bit = 8, native_type='unsigned char')]],
'OldIrql' : [ 0x9, ['BitField', dict(start_bit = 0, end_bit = 8, native_type='unsigned char')]],
'NewIrql' : [ 0xa, ['BitField', dict(start_bit = 0, end_bit = 8, native_type='unsigned char')]],
'Processor' : [ 0xb, ['BitField', dict(start_bit = 0, end_bit = 8, native_type='unsigned char')]],
} ],
'_VI_TRACK_IRQL' : [ 0x38, {
'Thread' : [ 0x0, ['pointer64', ['void']]],
'OldIrql' : [ 0x8, ['unsigned char']],
'NewIrql' : [ 0x9, ['unsigned char']],
'Processor' : [ 0xa, ['unsigned short']],
'TickCount' : [ 0xc, ['unsigned long']],
'StackTrace' : [ 0x10, ['array', 5, ['pointer64', ['void']]]],
} ],
'_PNP_DEVICE_EVENT_ENTRY' : [ 0x90, {
'ListEntry' : [ 0x0, ['_LIST_ENTRY']],
'Argument' : [ 0x10, ['unsigned long']],
'CallerEvent' : [ 0x18, ['pointer64', ['_KEVENT']]],
'Callback' : [ 0x20, ['pointer64', ['void']]],
'Context' : [ 0x28, ['pointer64', ['void']]],
'VetoType' : [ 0x30, ['pointer64', ['Enumeration', dict(target = 'long', choices = {0: 'PNP_VetoTypeUnknown', 1: 'PNP_VetoLegacyDevice', 2: 'PNP_VetoPendingClose', 3: 'PNP_VetoWindowsApp', 4: 'PNP_VetoWindowsService', 5: 'PNP_VetoOutstandingOpen', 6: 'PNP_VetoDevice', 7: 'PNP_VetoDriver', 8: 'PNP_VetoIllegalDeviceRequest', 9: 'PNP_VetoInsufficientPower', 10: 'PNP_VetoNonDisableable', 11: 'PNP_VetoLegacyDriver', 12: 'PNP_VetoInsufficientRights'})]]],
'VetoName' : [ 0x38, ['pointer64', ['_UNICODE_STRING']]],
'Data' : [ 0x40, ['_PLUGPLAY_EVENT_BLOCK']],
} ],
'_HEAP_STOP_ON_TAG' : [ 0x4, {
'HeapAndTagIndex' : [ 0x0, ['unsigned long']],
'TagIndex' : [ 0x0, ['unsigned short']],
'HeapIndex' : [ 0x2, ['unsigned short']],
} ],
'_DBGKD_GET_CONTEXT' : [ 0x4, {
'Unused' : [ 0x0, ['unsigned long']],
} ],
'_TEB_ACTIVE_FRAME_CONTEXT' : [ 0x10, {
'Flags' : [ 0x0, ['unsigned long']],
'FrameName' : [ 0x8, ['pointer64', ['unsigned char']]],
} ],
'_NLS_DATA_BLOCK' : [ 0x18, {
'AnsiCodePageData' : [ 0x0, ['pointer64', ['void']]],
'OemCodePageData' : [ 0x8, ['pointer64', ['void']]],
'UnicodeCaseTableData' : [ 0x10, ['pointer64', ['void']]],
} ],
'_ALIGNED_AFFINITY_SUMMARY' : [ 0x80, {
'CpuSet' : [ 0x0, ['_KAFFINITY_EX']],
'SMTSet' : [ 0x28, ['_KAFFINITY_EX']],
} ],
'_XSTATE_CONFIGURATION' : [ 0x210, {
'EnabledFeatures' : [ 0x0, ['unsigned long long']],
'Size' : [ 0x8, ['unsigned long']],
'OptimizedSave' : [ 0xc, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'Features' : [ 0x10, ['array', 64, ['_XSTATE_FEATURE']]],
} ],
'_CM_KEY_SECURITY_CACHE' : [ 0x38, {
'Cell' : [ 0x0, ['unsigned long']],
'ConvKey' : [ 0x4, ['unsigned long']],
'List' : [ 0x8, ['_LIST_ENTRY']],
'DescriptorLength' : [ 0x18, ['unsigned long']],
'RealRefCount' : [ 0x1c, ['unsigned long']],
'Descriptor' : [ 0x20, ['_SECURITY_DESCRIPTOR_RELATIVE']],
} ],
'_MMPTE_SOFTWARE' : [ 0x8, {
'Valid' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long long')]],
'PageFileLow' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 5, native_type='unsigned long long')]],
'Protection' : [ 0x0, ['BitField', dict(start_bit = 5, end_bit = 10, native_type='unsigned long long')]],
'Prototype' : [ 0x0, ['BitField', dict(start_bit = 10, end_bit = 11, native_type='unsigned long long')]],
'Transition' : [ 0x0, ['BitField', dict(start_bit = 11, end_bit = 12, native_type='unsigned long long')]],
'UsedPageTableEntries' : [ 0x0, ['BitField', dict(start_bit = 12, end_bit = 22, native_type='unsigned long long')]],
'InStore' : [ 0x0, ['BitField', dict(start_bit = 22, end_bit = 23, native_type='unsigned long long')]],
'Reserved' : [ 0x0, ['BitField', dict(start_bit = 23, end_bit = 32, native_type='unsigned long long')]],
'PageFileHigh' : [ 0x0, ['BitField', dict(start_bit = 32, end_bit = 64, native_type='unsigned long long')]],
} ],
'_RTL_UMS_CONTEXT' : [ 0x540, {
'Link' : [ 0x0, ['_SINGLE_LIST_ENTRY']],
'Context' : [ 0x10, ['_CONTEXT']],
'Teb' : [ 0x4e0, ['pointer64', ['void']]],
'UserContext' : [ 0x4e8, ['pointer64', ['void']]],
'ScheduledThread' : [ 0x4f0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'HasQuantumReq' : [ 0x4f0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'HasAffinityReq' : [ 0x4f0, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long')]],
'HasPriorityReq' : [ 0x4f0, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned long')]],
'Suspended' : [ 0x4f0, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned long')]],
'VolatileContext' : [ 0x4f0, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned long')]],
'Terminated' : [ 0x4f0, ['BitField', dict(start_bit = 6, end_bit = 7, native_type='unsigned long')]],
'DebugActive' : [ 0x4f0, ['BitField', dict(start_bit = 7, end_bit = 8, native_type='unsigned long')]],
'RunningOnSelfThread' : [ 0x4f0, ['BitField', dict(start_bit = 8, end_bit = 9, native_type='unsigned long')]],
'DenyRunningOnSelfThread' : [ 0x4f0, ['BitField', dict(start_bit = 9, end_bit = 10, native_type='unsigned long')]],
'ReservedFlags' : [ 0x4f0, ['BitField', dict(start_bit = 10, end_bit = 32, native_type='unsigned long')]],
'Flags' : [ 0x4f0, ['long']],
'KernelUpdateLock' : [ 0x4f8, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long long')]],
'Reserved' : [ 0x4f8, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long long')]],
'PrimaryClientID' : [ 0x4f8, ['BitField', dict(start_bit = 2, end_bit = 64, native_type='unsigned long long')]],
'ContextLock' : [ 0x4f8, ['unsigned long long']],
'QuantumValue' : [ 0x500, ['unsigned long long']],
'AffinityMask' : [ 0x508, ['_GROUP_AFFINITY']],
'Priority' : [ 0x518, ['long']],
'PrimaryUmsContext' : [ 0x520, ['pointer64', ['_RTL_UMS_CONTEXT']]],
'SwitchCount' : [ 0x528, ['unsigned long']],
'KernelYieldCount' : [ 0x52c, ['unsigned long']],
'MixedYieldCount' : [ 0x530, ['unsigned long']],
'YieldCount' : [ 0x534, ['unsigned long']],
} ],
'_CM_RESOURCE_LIST' : [ 0x28, {
'Count' : [ 0x0, ['unsigned long']],
'List' : [ 0x4, ['array', 1, ['_CM_FULL_RESOURCE_DESCRIPTOR']]],
} ],
'_POOL_TRACKER_TABLE' : [ 0x28, {
'Key' : [ 0x0, ['long']],
'NonPagedAllocs' : [ 0x4, ['long']],
'NonPagedFrees' : [ 0x8, ['long']],
'NonPagedBytes' : [ 0x10, ['unsigned long long']],
'PagedAllocs' : [ 0x18, ['unsigned long']],
'PagedFrees' : [ 0x1c, ['unsigned long']],
'PagedBytes' : [ 0x20, ['unsigned long long']],
} ],
'_MM_SUBSECTION_AVL_TABLE' : [ 0x38, {
'BalancedRoot' : [ 0x0, ['_MMSUBSECTION_NODE']],
'DepthOfTree' : [ 0x28, ['BitField', dict(start_bit = 0, end_bit = 5, native_type='unsigned long long')]],
'Unused' : [ 0x28, ['BitField', dict(start_bit = 5, end_bit = 8, native_type='unsigned long long')]],
'NumberGenericTableElements' : [ 0x28, ['BitField', dict(start_bit = 8, end_bit = 64, native_type='unsigned long long')]],
'NodeHint' : [ 0x30, ['pointer64', ['void']]],
} ],
'_HANDLE_TABLE_ENTRY_INFO' : [ 0x4, {
'AuditMask' : [ 0x0, ['unsigned long']],
} ],
'_CM_FULL_RESOURCE_DESCRIPTOR' : [ 0x24, {
'InterfaceType' : [ 0x0, ['Enumeration', dict(target = 'long', choices = {0: 'Internal', 1: 'Isa', 2: 'Eisa', 3: 'MicroChannel', 4: 'TurboChannel', 5: 'PCIBus', 6: 'VMEBus', 7: 'NuBus', 8: 'PCMCIABus', 9: 'CBus', 10: 'MPIBus', 11: 'MPSABus', 12: 'ProcessorInternal', 13: 'InternalPowerBus', 14: 'PNPISABus', 15: 'PNPBus', 16: 'Vmcs', 17: 'MaximumInterfaceType', -1: 'InterfaceTypeUndefined'})]],
'BusNumber' : [ 0x4, ['unsigned long']],
'PartialResourceList' : [ 0x8, ['_CM_PARTIAL_RESOURCE_LIST']],
} ],
'_WHEA_ERROR_RECORD_SECTION_DESCRIPTOR_FLAGS' : [ 0x4, {
'Primary' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'ContainmentWarning' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'Reset' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long')]],
'ThresholdExceeded' : [ 0x0, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned long')]],
'ResourceNotAvailable' : [ 0x0, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned long')]],
'LatentError' : [ 0x0, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned long')]],
'Reserved' : [ 0x0, ['BitField', dict(start_bit = 6, end_bit = 32, native_type='unsigned long')]],
'AsULONG' : [ 0x0, ['unsigned long']],
} ],
'_WMI_BUFFER_HEADER' : [ 0x48, {
'BufferSize' : [ 0x0, ['unsigned long']],
'SavedOffset' : [ 0x4, ['unsigned long']],
'CurrentOffset' : [ 0x8, ['unsigned long']],
'ReferenceCount' : [ 0xc, ['long']],
'TimeStamp' : [ 0x10, ['_LARGE_INTEGER']],
'SequenceNumber' : [ 0x18, ['long long']],
'Padding0' : [ 0x20, ['array', 2, ['unsigned long']]],
'SlistEntry' : [ 0x20, ['_SINGLE_LIST_ENTRY']],
'NextBuffer' : [ 0x20, ['pointer64', ['_WMI_BUFFER_HEADER']]],
'ClientContext' : [ 0x28, ['_ETW_BUFFER_CONTEXT']],
'State' : [ 0x2c, ['Enumeration', dict(target = 'long', choices = {0: 'EtwBufferStateFree', 1: 'EtwBufferStateGeneralLogging', 2: 'EtwBufferStateCSwitch', 3: 'EtwBufferStateFlush', 4: 'EtwBufferStateMaximum'})]],
'Offset' : [ 0x30, ['unsigned long']],
'BufferFlag' : [ 0x34, ['unsigned short']],
'BufferType' : [ 0x36, ['unsigned short']],
'Padding1' : [ 0x38, ['array', 4, ['unsigned long']]],
'ReferenceTime' : [ 0x38, ['_ETW_REF_CLOCK']],
'GlobalEntry' : [ 0x38, ['_LIST_ENTRY']],
'Pointer0' : [ 0x38, ['pointer64', ['void']]],
'Pointer1' : [ 0x40, ['pointer64', ['void']]],
} ],
'_POWER_SEQUENCE' : [ 0xc, {
'SequenceD1' : [ 0x0, ['unsigned long']],
'SequenceD2' : [ 0x4, ['unsigned long']],
'SequenceD3' : [ 0x8, ['unsigned long']],
} ],
'_PROCESSOR_POWER_STATE' : [ 0x100, {
'IdleStates' : [ 0x0, ['pointer64', ['_PPM_IDLE_STATES']]],
'IdleTimeLast' : [ 0x8, ['unsigned long long']],
'IdleTimeTotal' : [ 0x10, ['unsigned long long']],
'IdleTimeEntry' : [ 0x18, ['unsigned long long']],
'IdleAccounting' : [ 0x20, ['pointer64', ['_PROC_IDLE_ACCOUNTING']]],
'Hypervisor' : [ 0x28, ['Enumeration', dict(target = 'long', choices = {0: 'ProcHypervisorNone', 1: 'ProcHypervisorPresent', 2: 'ProcHypervisorPower'})]],
'PerfHistoryTotal' : [ 0x2c, ['unsigned long']],
'ThermalConstraint' : [ 0x30, ['unsigned char']],
'PerfHistoryCount' : [ 0x31, ['unsigned char']],
'PerfHistorySlot' : [ 0x32, ['unsigned char']],
'Reserved' : [ 0x33, ['unsigned char']],
'LastSysTime' : [ 0x34, ['unsigned long']],
'WmiDispatchPtr' : [ 0x38, ['unsigned long long']],
'WmiInterfaceEnabled' : [ 0x40, ['long']],
'FFHThrottleStateInfo' : [ 0x48, ['_PPM_FFH_THROTTLE_STATE_INFO']],
'PerfActionDpc' : [ 0x68, ['_KDPC']],
'PerfActionMask' : [ 0xa8, ['long']],
'IdleCheck' : [ 0xb0, ['_PROC_IDLE_SNAP']],
'PerfCheck' : [ 0xc0, ['_PROC_IDLE_SNAP']],
'Domain' : [ 0xd0, ['pointer64', ['_PROC_PERF_DOMAIN']]],
'PerfConstraint' : [ 0xd8, ['pointer64', ['_PROC_PERF_CONSTRAINT']]],
'Load' : [ 0xe0, ['pointer64', ['_PROC_PERF_LOAD']]],
'PerfHistory' : [ 0xe8, ['pointer64', ['_PROC_HISTORY_ENTRY']]],
'Utility' : [ 0xf0, ['unsigned long']],
'OverUtilizedHistory' : [ 0xf4, ['unsigned long']],
'AffinityCount' : [ 0xf8, ['unsigned long']],
'AffinityHistory' : [ 0xfc, ['unsigned long']],
} ],
'_OBJECT_REF_STACK_INFO' : [ 0xc, {
'Sequence' : [ 0x0, ['unsigned long']],
'Index' : [ 0x4, ['unsigned short']],
'NumTraces' : [ 0x6, ['unsigned short']],
'Tag' : [ 0x8, ['unsigned long']],
} ],
'_PPC_DBGKD_CONTROL_SET' : [ 0xc, {
'Continue' : [ 0x0, ['unsigned long']],
'CurrentSymbolStart' : [ 0x4, ['unsigned long']],
'CurrentSymbolEnd' : [ 0x8, ['unsigned long']],
} ],
'_MMPFNENTRY' : [ 0x2, {
'PageLocation' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 3, native_type='unsigned char')]],
'WriteInProgress' : [ 0x0, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned char')]],
'Modified' : [ 0x0, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned char')]],
'ReadInProgress' : [ 0x0, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned char')]],
'CacheAttribute' : [ 0x0, ['BitField', dict(start_bit = 6, end_bit = 8, native_type='unsigned char')]],
'Priority' : [ 0x1, ['BitField', dict(start_bit = 0, end_bit = 3, native_type='unsigned char')]],
'Rom' : [ 0x1, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned char')]],
'InPageError' : [ 0x1, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned char')]],
'KernelStack' : [ 0x1, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned char')]],
'RemovalRequested' : [ 0x1, ['BitField', dict(start_bit = 6, end_bit = 7, native_type='unsigned char')]],
'ParityError' : [ 0x1, ['BitField', dict(start_bit = 7, end_bit = 8, native_type='unsigned char')]],
} ],
'_SEGMENT_OBJECT' : [ 0x40, {
'BaseAddress' : [ 0x0, ['pointer64', ['void']]],
'TotalNumberOfPtes' : [ 0x8, ['unsigned long']],
'SizeOfSegment' : [ 0x10, ['_LARGE_INTEGER']],
'NonExtendedPtes' : [ 0x18, ['unsigned long']],
'ImageCommitment' : [ 0x1c, ['unsigned long']],
'ControlArea' : [ 0x20, ['pointer64', ['_CONTROL_AREA']]],
'Subsection' : [ 0x28, ['pointer64', ['_SUBSECTION']]],
'MmSectionFlags' : [ 0x30, ['pointer64', ['_MMSECTION_FLAGS']]],
'MmSubSectionFlags' : [ 0x38, ['pointer64', ['_MMSUBSECTION_FLAGS']]],
} ],
'_PCW_CALLBACK_INFORMATION' : [ 0x28, {
'AddCounter' : [ 0x0, ['_PCW_COUNTER_INFORMATION']],
'RemoveCounter' : [ 0x0, ['_PCW_COUNTER_INFORMATION']],
'EnumerateInstances' : [ 0x0, ['_PCW_MASK_INFORMATION']],
'CollectData' : [ 0x0, ['_PCW_MASK_INFORMATION']],
} ],
'_TOKEN_SOURCE' : [ 0x10, {
'SourceName' : [ 0x0, ['array', 8, ['unsigned char']]],
'SourceIdentifier' : [ 0x8, ['_LUID']],
} ],
'_DBGKD_QUERY_MEMORY' : [ 0x18, {
'Address' : [ 0x0, ['unsigned long long']],
'Reserved' : [ 0x8, ['unsigned long long']],
'AddressSpace' : [ 0x10, ['unsigned long']],
'Flags' : [ 0x14, ['unsigned long']],
} ],
'DOCK_INTERFACE' : [ 0x30, {
'Size' : [ 0x0, ['unsigned short']],
'Version' : [ 0x2, ['unsigned short']],
'Context' : [ 0x8, ['pointer64', ['void']]],
'InterfaceReference' : [ 0x10, ['pointer64', ['void']]],
'InterfaceDereference' : [ 0x18, ['pointer64', ['void']]],
'ProfileDepartureSetMode' : [ 0x20, ['pointer64', ['void']]],
'ProfileDepartureUpdate' : [ 0x28, ['pointer64', ['void']]],
} ],
'CMP_OFFSET_ARRAY' : [ 0x18, {
'FileOffset' : [ 0x0, ['unsigned long']],
'DataBuffer' : [ 0x8, ['pointer64', ['void']]],
'DataLength' : [ 0x10, ['unsigned long']],
} ],
'_MMSUPPORT_FLAGS' : [ 0x4, {
'WorkingSetType' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 3, native_type='unsigned char')]],
'ModwriterAttached' : [ 0x0, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned char')]],
'TrimHard' : [ 0x0, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned char')]],
'MaximumWorkingSetHard' : [ 0x0, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned char')]],
'ForceTrim' : [ 0x0, ['BitField', dict(start_bit = 6, end_bit = 7, native_type='unsigned char')]],
'MinimumWorkingSetHard' : [ 0x0, ['BitField', dict(start_bit = 7, end_bit = 8, native_type='unsigned char')]],
'SessionMaster' : [ 0x1, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned char')]],
'TrimmerState' : [ 0x1, ['BitField', dict(start_bit = 1, end_bit = 3, native_type='unsigned char')]],
'Reserved' : [ 0x1, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned char')]],
'PageStealers' : [ 0x1, ['BitField', dict(start_bit = 4, end_bit = 8, native_type='unsigned char')]],
'MemoryPriority' : [ 0x2, ['BitField', dict(start_bit = 0, end_bit = 8, native_type='unsigned char')]],
'WsleDeleted' : [ 0x3, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned char')]],
'VmExiting' : [ 0x3, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned char')]],
'ExpansionFailed' : [ 0x3, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned char')]],
'Available' : [ 0x3, ['BitField', dict(start_bit = 3, end_bit = 8, native_type='unsigned char')]],
} ],
'_IMAGE_OPTIONAL_HEADER64' : [ 0xf0, {
'Magic' : [ 0x0, ['unsigned short']],
'MajorLinkerVersion' : [ 0x2, ['unsigned char']],
'MinorLinkerVersion' : [ 0x3, ['unsigned char']],
'SizeOfCode' : [ 0x4, ['unsigned long']],
'SizeOfInitializedData' : [ 0x8, ['unsigned long']],
'SizeOfUninitializedData' : [ 0xc, ['unsigned long']],
'AddressOfEntryPoint' : [ 0x10, ['unsigned long']],
'BaseOfCode' : [ 0x14, ['unsigned long']],
'ImageBase' : [ 0x18, ['unsigned long long']],
'SectionAlignment' : [ 0x20, ['unsigned long']],
'FileAlignment' : [ 0x24, ['unsigned long']],
'MajorOperatingSystemVersion' : [ 0x28, ['unsigned short']],
'MinorOperatingSystemVersion' : [ 0x2a, ['unsigned short']],
'MajorImageVersion' : [ 0x2c, ['unsigned short']],
'MinorImageVersion' : [ 0x2e, ['unsigned short']],
'MajorSubsystemVersion' : [ 0x30, ['unsigned short']],
'MinorSubsystemVersion' : [ 0x32, ['unsigned short']],
'Win32VersionValue' : [ 0x34, ['unsigned long']],
'SizeOfImage' : [ 0x38, ['unsigned long']],
'SizeOfHeaders' : [ 0x3c, ['unsigned long']],
'CheckSum' : [ 0x40, ['unsigned long']],
'Subsystem' : [ 0x44, ['unsigned short']],
'DllCharacteristics' : [ 0x46, ['unsigned short']],
'SizeOfStackReserve' : [ 0x48, ['unsigned long long']],
'SizeOfStackCommit' : [ 0x50, ['unsigned long long']],
'SizeOfHeapReserve' : [ 0x58, ['unsigned long long']],
'SizeOfHeapCommit' : [ 0x60, ['unsigned long long']],
'LoaderFlags' : [ 0x68, ['unsigned long']],
'NumberOfRvaAndSizes' : [ 0x6c, ['unsigned long']],
'DataDirectory' : [ 0x70, ['array', 16, ['_IMAGE_DATA_DIRECTORY']]],
} ],
'_ALPC_COMPLETION_PACKET_LOOKASIDE' : [ 0x50, {
'Lock' : [ 0x0, ['unsigned long long']],
'Size' : [ 0x8, ['unsigned long']],
'ActiveCount' : [ 0xc, ['unsigned long']],
'PendingNullCount' : [ 0x10, ['unsigned long']],
'PendingCheckCompletionListCount' : [ 0x14, ['unsigned long']],
'PendingDelete' : [ 0x18, ['unsigned long']],
'FreeListHead' : [ 0x20, ['_SINGLE_LIST_ENTRY']],
'CompletionPort' : [ 0x28, ['pointer64', ['void']]],
'CompletionKey' : [ 0x30, ['pointer64', ['void']]],
'Entry' : [ 0x38, ['array', 1, ['_ALPC_COMPLETION_PACKET_LOOKASIDE_ENTRY']]],
} ],
'_TERMINATION_PORT' : [ 0x10, {
'Next' : [ 0x0, ['pointer64', ['_TERMINATION_PORT']]],
'Port' : [ 0x8, ['pointer64', ['void']]],
} ],
'_MEMORY_ALLOCATION_DESCRIPTOR' : [ 0x28, {
'ListEntry' : [ 0x0, ['_LIST_ENTRY']],
'MemoryType' : [ 0x10, ['Enumeration', dict(target = 'long', choices = {0: 'LoaderExceptionBlock', 1: 'LoaderSystemBlock', 2: 'LoaderFree', 3: 'LoaderBad', 4: 'LoaderLoadedProgram', 5: 'LoaderFirmwareTemporary', 6: 'LoaderFirmwarePermanent', 7: 'LoaderOsloaderHeap', 8: 'LoaderOsloaderStack', 9: 'LoaderSystemCode', 10: 'LoaderHalCode', 11: 'LoaderBootDriver', 12: 'LoaderConsoleInDriver', 13: 'LoaderConsoleOutDriver', 14: 'LoaderStartupDpcStack', 15: 'LoaderStartupKernelStack', 16: 'LoaderStartupPanicStack', 17: 'LoaderStartupPcrPage', 18: 'LoaderStartupPdrPage', 19: 'LoaderRegistryData', 20: 'LoaderMemoryData', 21: 'LoaderNlsData', 22: 'LoaderSpecialMemory', 23: 'LoaderBBTMemory', 24: 'LoaderReserve', 25: 'LoaderXIPRom', 26: 'LoaderHALCachedMemory', 27: 'LoaderLargePageFiller', 28: 'LoaderErrorLogMemory', 29: 'LoaderMaximum'})]],
'BasePage' : [ 0x18, ['unsigned long long']],
'PageCount' : [ 0x20, ['unsigned long long']],
} ],
'_CM_INTENT_LOCK' : [ 0x10, {
'OwnerCount' : [ 0x0, ['unsigned long']],
'OwnerTable' : [ 0x8, ['pointer64', ['pointer64', ['_CM_KCB_UOW']]]],
} ],
'_PROC_IDLE_ACCOUNTING' : [ 0x2c0, {
'StateCount' : [ 0x0, ['unsigned long']],
'TotalTransitions' : [ 0x4, ['unsigned long']],
'ResetCount' : [ 0x8, ['unsigned long']],
'StartTime' : [ 0x10, ['unsigned long long']],
'BucketLimits' : [ 0x18, ['array', 16, ['unsigned long long']]],
'State' : [ 0x98, ['array', 1, ['_PROC_IDLE_STATE_ACCOUNTING']]],
} ],
'_THERMAL_INFORMATION' : [ 0x58, {
'ThermalStamp' : [ 0x0, ['unsigned long']],
'ThermalConstant1' : [ 0x4, ['unsigned long']],
'ThermalConstant2' : [ 0x8, ['unsigned long']],
'Processors' : [ 0x10, ['unsigned long long']],
'SamplingPeriod' : [ 0x18, ['unsigned long']],
'CurrentTemperature' : [ 0x1c, ['unsigned long']],
'PassiveTripPoint' : [ 0x20, ['unsigned long']],
'CriticalTripPoint' : [ 0x24, ['unsigned long']],
'ActiveTripPointCount' : [ 0x28, ['unsigned char']],
'ActiveTripPoint' : [ 0x2c, ['array', 10, ['unsigned long']]],
} ],
'_MAPPED_FILE_SEGMENT' : [ 0x30, {
'ControlArea' : [ 0x0, ['pointer64', ['_CONTROL_AREA']]],
'TotalNumberOfPtes' : [ 0x8, ['unsigned long']],
'SegmentFlags' : [ 0xc, ['_SEGMENT_FLAGS']],
'NumberOfCommittedPages' : [ 0x10, ['unsigned long long']],
'SizeOfSegment' : [ 0x18, ['unsigned long long']],
'ExtendInfo' : [ 0x20, ['pointer64', ['_MMEXTEND_INFO']]],
'BasedAddress' : [ 0x20, ['pointer64', ['void']]],
'SegmentLock' : [ 0x28, ['_EX_PUSH_LOCK']],
} ],
'_TEB64' : [ 0x1818, {
'NtTib' : [ 0x0, ['_NT_TIB64']],
'EnvironmentPointer' : [ 0x38, ['unsigned long long']],
'ClientId' : [ 0x40, ['_CLIENT_ID64']],
'ActiveRpcHandle' : [ 0x50, ['unsigned long long']],
'ThreadLocalStoragePointer' : [ 0x58, ['unsigned long long']],
'ProcessEnvironmentBlock' : [ 0x60, ['unsigned long long']],
'LastErrorValue' : [ 0x68, ['unsigned long']],
'CountOfOwnedCriticalSections' : [ 0x6c, ['unsigned long']],
'CsrClientThread' : [ 0x70, ['unsigned long long']],
'Win32ThreadInfo' : [ 0x78, ['unsigned long long']],
'User32Reserved' : [ 0x80, ['array', 26, ['unsigned long']]],
'UserReserved' : [ 0xe8, ['array', 5, ['unsigned long']]],
'WOW32Reserved' : [ 0x100, ['unsigned long long']],
'CurrentLocale' : [ 0x108, ['unsigned long']],
'FpSoftwareStatusRegister' : [ 0x10c, ['unsigned long']],
'SystemReserved1' : [ 0x110, ['array', 54, ['unsigned long long']]],
'ExceptionCode' : [ 0x2c0, ['long']],
'ActivationContextStackPointer' : [ 0x2c8, ['unsigned long long']],
'SpareBytes' : [ 0x2d0, ['array', 24, ['unsigned char']]],
'TxFsContext' : [ 0x2e8, ['unsigned long']],
'GdiTebBatch' : [ 0x2f0, ['_GDI_TEB_BATCH64']],
'RealClientId' : [ 0x7d8, ['_CLIENT_ID64']],
'GdiCachedProcessHandle' : [ 0x7e8, ['unsigned long long']],
'GdiClientPID' : [ 0x7f0, ['unsigned long']],
'GdiClientTID' : [ 0x7f4, ['unsigned long']],
'GdiThreadLocalInfo' : [ 0x7f8, ['unsigned long long']],
'Win32ClientInfo' : [ 0x800, ['array', 62, ['unsigned long long']]],
'glDispatchTable' : [ 0x9f0, ['array', 233, ['unsigned long long']]],
'glReserved1' : [ 0x1138, ['array', 29, ['unsigned long long']]],
'glReserved2' : [ 0x1220, ['unsigned long long']],
'glSectionInfo' : [ 0x1228, ['unsigned long long']],
'glSection' : [ 0x1230, ['unsigned long long']],
'glTable' : [ 0x1238, ['unsigned long long']],
'glCurrentRC' : [ 0x1240, ['unsigned long long']],
'glContext' : [ 0x1248, ['unsigned long long']],
'LastStatusValue' : [ 0x1250, ['unsigned long']],
'StaticUnicodeString' : [ 0x1258, ['_STRING64']],
'StaticUnicodeBuffer' : [ 0x1268, ['array', 261, ['wchar']]],
'DeallocationStack' : [ 0x1478, ['unsigned long long']],
'TlsSlots' : [ 0x1480, ['array', 64, ['unsigned long long']]],
'TlsLinks' : [ 0x1680, ['LIST_ENTRY64']],
'Vdm' : [ 0x1690, ['unsigned long long']],
'ReservedForNtRpc' : [ 0x1698, ['unsigned long long']],
'DbgSsReserved' : [ 0x16a0, ['array', 2, ['unsigned long long']]],
'HardErrorMode' : [ 0x16b0, ['unsigned long']],
'Instrumentation' : [ 0x16b8, ['array', 11, ['unsigned long long']]],
'ActivityId' : [ 0x1710, ['_GUID']],
'SubProcessTag' : [ 0x1720, ['unsigned long long']],
'EtwLocalData' : [ 0x1728, ['unsigned long long']],
'EtwTraceData' : [ 0x1730, ['unsigned long long']],
'WinSockData' : [ 0x1738, ['unsigned long long']],
'GdiBatchCount' : [ 0x1740, ['unsigned long']],
'CurrentIdealProcessor' : [ 0x1744, ['_PROCESSOR_NUMBER']],
'IdealProcessorValue' : [ 0x1744, ['unsigned long']],
'ReservedPad0' : [ 0x1744, ['unsigned char']],
'ReservedPad1' : [ 0x1745, ['unsigned char']],
'ReservedPad2' : [ 0x1746, ['unsigned char']],
'IdealProcessor' : [ 0x1747, ['unsigned char']],
'GuaranteedStackBytes' : [ 0x1748, ['unsigned long']],
'ReservedForPerf' : [ 0x1750, ['unsigned long long']],
'ReservedForOle' : [ 0x1758, ['unsigned long long']],
'WaitingOnLoaderLock' : [ 0x1760, ['unsigned long']],
'SavedPriorityState' : [ 0x1768, ['unsigned long long']],
'SoftPatchPtr1' : [ 0x1770, ['unsigned long long']],
'ThreadPoolData' : [ 0x1778, ['unsigned long long']],
'TlsExpansionSlots' : [ 0x1780, ['unsigned long long']],
'DeallocationBStore' : [ 0x1788, ['unsigned long long']],
'BStoreLimit' : [ 0x1790, ['unsigned long long']],
'MuiGeneration' : [ 0x1798, ['unsigned long']],
'IsImpersonating' : [ 0x179c, ['unsigned long']],
'NlsCache' : [ 0x17a0, ['unsigned long long']],
'pShimData' : [ 0x17a8, ['unsigned long long']],
'HeapVirtualAffinity' : [ 0x17b0, ['unsigned long']],
'CurrentTransactionHandle' : [ 0x17b8, ['unsigned long long']],
'ActiveFrame' : [ 0x17c0, ['unsigned long long']],
'FlsData' : [ 0x17c8, ['unsigned long long']],
'PreferredLanguages' : [ 0x17d0, ['unsigned long long']],
'UserPrefLanguages' : [ 0x17d8, ['unsigned long long']],
'MergedPrefLanguages' : [ 0x17e0, ['unsigned long long']],
'MuiImpersonation' : [ 0x17e8, ['unsigned long']],
'CrossTebFlags' : [ 0x17ec, ['unsigned short']],
'SpareCrossTebBits' : [ 0x17ec, ['BitField', dict(start_bit = 0, end_bit = 16, native_type='unsigned short')]],
'SameTebFlags' : [ 0x17ee, ['unsigned short']],
'SafeThunkCall' : [ 0x17ee, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned short')]],
'InDebugPrint' : [ 0x17ee, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned short')]],
'HasFiberData' : [ 0x17ee, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned short')]],
'SkipThreadAttach' : [ 0x17ee, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned short')]],
'WerInShipAssertCode' : [ 0x17ee, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned short')]],
'RanProcessInit' : [ 0x17ee, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned short')]],
'ClonedThread' : [ 0x17ee, ['BitField', dict(start_bit = 6, end_bit = 7, native_type='unsigned short')]],
'SuppressDebugMsg' : [ 0x17ee, ['BitField', dict(start_bit = 7, end_bit = 8, native_type='unsigned short')]],
'DisableUserStackWalk' : [ 0x17ee, ['BitField', dict(start_bit = 8, end_bit = 9, native_type='unsigned short')]],
'RtlExceptionAttached' : [ 0x17ee, ['BitField', dict(start_bit = 9, end_bit = 10, native_type='unsigned short')]],
'InitialThread' : [ 0x17ee, ['BitField', dict(start_bit = 10, end_bit = 11, native_type='unsigned short')]],
'SpareSameTebBits' : [ 0x17ee, ['BitField', dict(start_bit = 11, end_bit = 16, native_type='unsigned short')]],
'TxnScopeEnterCallback' : [ 0x17f0, ['unsigned long long']],
'TxnScopeExitCallback' : [ 0x17f8, ['unsigned long long']],
'TxnScopeContext' : [ 0x1800, ['unsigned long long']],
'LockCount' : [ 0x1808, ['unsigned long']],
'SpareUlong0' : [ 0x180c, ['unsigned long']],
'ResourceRetValue' : [ 0x1810, ['unsigned long long']],
} ],
'_GDI_TEB_BATCH' : [ 0x4e8, {
'Offset' : [ 0x0, ['unsigned long']],
'HDC' : [ 0x8, ['unsigned long long']],
'Buffer' : [ 0x10, ['array', 310, ['unsigned long']]],
} ],
'_MM_DRIVER_VERIFIER_DATA' : [ 0xa0, {
'Level' : [ 0x0, ['unsigned long']],
'RaiseIrqls' : [ 0x4, ['unsigned long']],
'AcquireSpinLocks' : [ 0x8, ['unsigned long']],
'SynchronizeExecutions' : [ 0xc, ['unsigned long']],
'AllocationsAttempted' : [ 0x10, ['unsigned long']],
'AllocationsSucceeded' : [ 0x14, ['unsigned long']],
'AllocationsSucceededSpecialPool' : [ 0x18, ['unsigned long']],
'AllocationsWithNoTag' : [ 0x1c, ['unsigned long']],
'TrimRequests' : [ 0x20, ['unsigned long']],
'Trims' : [ 0x24, ['unsigned long']],
'AllocationsFailed' : [ 0x28, ['unsigned long']],
'AllocationsFailedDeliberately' : [ 0x2c, ['unsigned long']],
'Loads' : [ 0x30, ['unsigned long']],
'Unloads' : [ 0x34, ['unsigned long']],
'UnTrackedPool' : [ 0x38, ['unsigned long']],
'UserTrims' : [ 0x3c, ['unsigned long']],
'CurrentPagedPoolAllocations' : [ 0x40, ['unsigned long']],
'CurrentNonPagedPoolAllocations' : [ 0x44, ['unsigned long']],
'PeakPagedPoolAllocations' : [ 0x48, ['unsigned long']],
'PeakNonPagedPoolAllocations' : [ 0x4c, ['unsigned long']],
'PagedBytes' : [ 0x50, ['unsigned long long']],
'NonPagedBytes' : [ 0x58, ['unsigned long long']],
'PeakPagedBytes' : [ 0x60, ['unsigned long long']],
'PeakNonPagedBytes' : [ 0x68, ['unsigned long long']],
'BurstAllocationsFailedDeliberately' : [ 0x70, ['unsigned long']],
'SessionTrims' : [ 0x74, ['unsigned long']],
'OptionChanges' : [ 0x78, ['unsigned long']],
'VerifyMode' : [ 0x7c, ['unsigned long']],
'PreviousBucketName' : [ 0x80, ['_UNICODE_STRING']],
'ActivityCounter' : [ 0x90, ['unsigned long']],
'PreviousActivityCounter' : [ 0x94, ['unsigned long']],
'WorkerTrimRequests' : [ 0x98, ['unsigned long']],
} ],
'_VI_FAULT_TRACE' : [ 0x48, {
'Thread' : [ 0x0, ['pointer64', ['_ETHREAD']]],
'StackTrace' : [ 0x8, ['array', 8, ['pointer64', ['void']]]],
} ],
'_GENERIC_MAPPING' : [ 0x10, {
'GenericRead' : [ 0x0, ['unsigned long']],
'GenericWrite' : [ 0x4, ['unsigned long']],
'GenericExecute' : [ 0x8, ['unsigned long']],
'GenericAll' : [ 0xc, ['unsigned long']],
} ],
'_OBJECT_HANDLE_COUNT_DATABASE' : [ 0x18, {
'CountEntries' : [ 0x0, ['unsigned long']],
'HandleCountEntries' : [ 0x8, ['array', 1, ['_OBJECT_HANDLE_COUNT_ENTRY']]],
} ],
'_OWNER_ENTRY' : [ 0x10, {
'OwnerThread' : [ 0x0, ['unsigned long long']],
'IoPriorityBoosted' : [ 0x8, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'OwnerReferenced' : [ 0x8, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'OwnerCount' : [ 0x8, ['BitField', dict(start_bit = 2, end_bit = 32, native_type='unsigned long')]],
'TableSize' : [ 0x8, ['unsigned long']],
} ],
'_MI_SECTION_CREATION_GATE' : [ 0x20, {
'Next' : [ 0x0, ['pointer64', ['_MI_SECTION_CREATION_GATE']]],
'Gate' : [ 0x8, ['_KGATE']],
} ],
'_ETIMER' : [ 0x110, {
'KeTimer' : [ 0x0, ['_KTIMER']],
'TimerApc' : [ 0x40, ['_KAPC']],
'TimerDpc' : [ 0x98, ['_KDPC']],
'ActiveTimerListEntry' : [ 0xd8, ['_LIST_ENTRY']],
'Lock' : [ 0xe8, ['unsigned long long']],
'Period' : [ 0xf0, ['long']],
'ApcAssociated' : [ 0xf4, ['unsigned char']],
'WakeReason' : [ 0xf8, ['pointer64', ['_DIAGNOSTIC_CONTEXT']]],
'WakeTimerListEntry' : [ 0x100, ['_LIST_ENTRY']],
} ],
'_FREE_DISPLAY' : [ 0x18, {
'RealVectorSize' : [ 0x0, ['unsigned long']],
'Display' : [ 0x8, ['_RTL_BITMAP']],
} ],
'_POOL_BLOCK_HEAD' : [ 0x20, {
'Header' : [ 0x0, ['_POOL_HEADER']],
'List' : [ 0x10, ['_LIST_ENTRY']],
} ],
'__unnamed_1e01' : [ 0x8, {
'Flags' : [ 0x0, ['_MMSECURE_FLAGS']],
'StartVa' : [ 0x0, ['pointer64', ['void']]],
} ],
'_MMADDRESS_LIST' : [ 0x10, {
'u1' : [ 0x0, ['__unnamed_1e01']],
'EndVa' : [ 0x8, ['pointer64', ['void']]],
} ],
'_XSTATE_FEATURE' : [ 0x8, {
'Offset' : [ 0x0, ['unsigned long']],
'Size' : [ 0x4, ['unsigned long']],
} ],
'_ARBITER_INSTANCE' : [ 0x698, {
'Signature' : [ 0x0, ['unsigned long']],
'MutexEvent' : [ 0x8, ['pointer64', ['_KEVENT']]],
'Name' : [ 0x10, ['pointer64', ['unsigned short']]],
'OrderingName' : [ 0x18, ['pointer64', ['unsigned short']]],
'ResourceType' : [ 0x20, ['long']],
'Allocation' : [ 0x28, ['pointer64', ['_RTL_RANGE_LIST']]],
'PossibleAllocation' : [ 0x30, ['pointer64', ['_RTL_RANGE_LIST']]],
'OrderingList' : [ 0x38, ['_ARBITER_ORDERING_LIST']],
'ReservedList' : [ 0x48, ['_ARBITER_ORDERING_LIST']],
'ReferenceCount' : [ 0x58, ['long']],
'Interface' : [ 0x60, ['pointer64', ['_ARBITER_INTERFACE']]],
'AllocationStackMaxSize' : [ 0x68, ['unsigned long']],
'AllocationStack' : [ 0x70, ['pointer64', ['_ARBITER_ALLOCATION_STATE']]],
'UnpackRequirement' : [ 0x78, ['pointer64', ['void']]],
'PackResource' : [ 0x80, ['pointer64', ['void']]],
'UnpackResource' : [ 0x88, ['pointer64', ['void']]],
'ScoreRequirement' : [ 0x90, ['pointer64', ['void']]],
'TestAllocation' : [ 0x98, ['pointer64', ['void']]],
'RetestAllocation' : [ 0xa0, ['pointer64', ['void']]],
'CommitAllocation' : [ 0xa8, ['pointer64', ['void']]],
'RollbackAllocation' : [ 0xb0, ['pointer64', ['void']]],
'BootAllocation' : [ 0xb8, ['pointer64', ['void']]],
'QueryArbitrate' : [ 0xc0, ['pointer64', ['void']]],
'QueryConflict' : [ 0xc8, ['pointer64', ['void']]],
'AddReserved' : [ 0xd0, ['pointer64', ['void']]],
'StartArbiter' : [ 0xd8, ['pointer64', ['void']]],
'PreprocessEntry' : [ 0xe0, ['pointer64', ['void']]],
'AllocateEntry' : [ 0xe8, ['pointer64', ['void']]],
'GetNextAllocationRange' : [ 0xf0, ['pointer64', ['void']]],
'FindSuitableRange' : [ 0xf8, ['pointer64', ['void']]],
'AddAllocation' : [ 0x100, ['pointer64', ['void']]],
'BacktrackAllocation' : [ 0x108, ['pointer64', ['void']]],
'OverrideConflict' : [ 0x110, ['pointer64', ['void']]],
'InitializeRangeList' : [ 0x118, ['pointer64', ['void']]],
'TransactionInProgress' : [ 0x120, ['unsigned char']],
'TransactionEvent' : [ 0x128, ['pointer64', ['_KEVENT']]],
'Extension' : [ 0x130, ['pointer64', ['void']]],
'BusDeviceObject' : [ 0x138, ['pointer64', ['_DEVICE_OBJECT']]],
'ConflictCallbackContext' : [ 0x140, ['pointer64', ['void']]],
'ConflictCallback' : [ 0x148, ['pointer64', ['void']]],
'PdoDescriptionString' : [ 0x150, ['array', 336, ['wchar']]],
'PdoSymbolicNameString' : [ 0x3f0, ['array', 672, ['unsigned char']]],
'PdoAddressString' : [ 0x690, ['array', 1, ['wchar']]],
} ],
'_KDEVICE_QUEUE_ENTRY' : [ 0x18, {
'DeviceListEntry' : [ 0x0, ['_LIST_ENTRY']],
'SortKey' : [ 0x10, ['unsigned long']],
'Inserted' : [ 0x14, ['unsigned char']],
} ],
'__unnamed_1e5a' : [ 0x4, {
'UserData' : [ 0x0, ['unsigned long']],
'Next' : [ 0x0, ['unsigned long']],
} ],
'__unnamed_1e5c' : [ 0x8, {
'Last' : [ 0x0, ['unsigned long']],
'u' : [ 0x4, ['__unnamed_1e5a']],
} ],
'__unnamed_1e5e' : [ 0x4, {
'u' : [ 0x0, ['__unnamed_1e5a']],
} ],
'__unnamed_1e60' : [ 0x8, {
'OldCell' : [ 0x0, ['__unnamed_1e5c']],
'NewCell' : [ 0x0, ['__unnamed_1e5e']],
} ],
'_HCELL' : [ 0xc, {
'Size' : [ 0x0, ['long']],
'u' : [ 0x4, ['__unnamed_1e60']],
} ],
'_HMAP_TABLE' : [ 0x4000, {
'Table' : [ 0x0, ['array', 512, ['_HMAP_ENTRY']]],
} ],
'_PROC_PERF_CONSTRAINT' : [ 0x30, {
'Prcb' : [ 0x0, ['pointer64', ['_KPRCB']]],
'PerfContext' : [ 0x8, ['unsigned long long']],
'PercentageCap' : [ 0x10, ['unsigned long']],
'ThermalCap' : [ 0x14, ['unsigned long']],
'TargetFrequency' : [ 0x18, ['unsigned long']],
'AcumulatedFullFrequency' : [ 0x1c, ['unsigned long']],
'AcumulatedZeroFrequency' : [ 0x20, ['unsigned long']],
'FrequencyHistoryTotal' : [ 0x24, ['unsigned long']],
'AverageFrequency' : [ 0x28, ['unsigned long']],
} ],
'_IMAGE_DATA_DIRECTORY' : [ 0x8, {
'VirtualAddress' : [ 0x0, ['unsigned long']],
'Size' : [ 0x4, ['unsigned long']],
} ],
'_DEVICE_CAPABILITIES' : [ 0x40, {
'Size' : [ 0x0, ['unsigned short']],
'Version' : [ 0x2, ['unsigned short']],
'DeviceD1' : [ 0x4, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'DeviceD2' : [ 0x4, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'LockSupported' : [ 0x4, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long')]],
'EjectSupported' : [ 0x4, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned long')]],
'Removable' : [ 0x4, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned long')]],
'DockDevice' : [ 0x4, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned long')]],
'UniqueID' : [ 0x4, ['BitField', dict(start_bit = 6, end_bit = 7, native_type='unsigned long')]],
'SilentInstall' : [ 0x4, ['BitField', dict(start_bit = 7, end_bit = 8, native_type='unsigned long')]],
'RawDeviceOK' : [ 0x4, ['BitField', dict(start_bit = 8, end_bit = 9, native_type='unsigned long')]],
'SurpriseRemovalOK' : [ 0x4, ['BitField', dict(start_bit = 9, end_bit = 10, native_type='unsigned long')]],
'WakeFromD0' : [ 0x4, ['BitField', dict(start_bit = 10, end_bit = 11, native_type='unsigned long')]],
'WakeFromD1' : [ 0x4, ['BitField', dict(start_bit = 11, end_bit = 12, native_type='unsigned long')]],
'WakeFromD2' : [ 0x4, ['BitField', dict(start_bit = 12, end_bit = 13, native_type='unsigned long')]],
'WakeFromD3' : [ 0x4, ['BitField', dict(start_bit = 13, end_bit = 14, native_type='unsigned long')]],
'HardwareDisabled' : [ 0x4, ['BitField', dict(start_bit = 14, end_bit = 15, native_type='unsigned long')]],
'NonDynamic' : [ 0x4, ['BitField', dict(start_bit = 15, end_bit = 16, native_type='unsigned long')]],
'WarmEjectSupported' : [ 0x4, ['BitField', dict(start_bit = 16, end_bit = 17, native_type='unsigned long')]],
'NoDisplayInUI' : [ 0x4, ['BitField', dict(start_bit = 17, end_bit = 18, native_type='unsigned long')]],
'Reserved1' : [ 0x4, ['BitField', dict(start_bit = 18, end_bit = 19, native_type='unsigned long')]],
'Reserved' : [ 0x4, ['BitField', dict(start_bit = 19, end_bit = 32, native_type='unsigned long')]],
'Address' : [ 0x8, ['unsigned long']],
'UINumber' : [ 0xc, ['unsigned long']],
'DeviceState' : [ 0x10, ['array', -28, ['Enumeration', dict(target = 'long', choices = {0: 'PowerDeviceUnspecified', 1: 'PowerDeviceD0', 2: 'PowerDeviceD1', 3: 'PowerDeviceD2', 4: 'PowerDeviceD3', 5: 'PowerDeviceMaximum'})]]],
'SystemWake' : [ 0x2c, ['Enumeration', dict(target = 'long', choices = {0: 'PowerSystemUnspecified', 1: 'PowerSystemWorking', 2: 'PowerSystemSleeping1', 3: 'PowerSystemSleeping2', 4: 'PowerSystemSleeping3', 5: 'PowerSystemHibernate', 6: 'PowerSystemShutdown', 7: 'PowerSystemMaximum'})]],
'DeviceWake' : [ 0x30, ['Enumeration', dict(target = 'long', choices = {0: 'PowerDeviceUnspecified', 1: 'PowerDeviceD0', 2: 'PowerDeviceD1', 3: 'PowerDeviceD2', 4: 'PowerDeviceD3', 5: 'PowerDeviceMaximum'})]],
'D1Latency' : [ 0x34, ['unsigned long']],
'D2Latency' : [ 0x38, ['unsigned long']],
'D3Latency' : [ 0x3c, ['unsigned long']],
} ],
'_CACHED_KSTACK_LIST' : [ 0x20, {
'SListHead' : [ 0x0, ['_SLIST_HEADER']],
'MinimumFree' : [ 0x10, ['long']],
'Misses' : [ 0x14, ['unsigned long']],
'MissesLast' : [ 0x18, ['unsigned long']],
'Pad0' : [ 0x1c, ['unsigned long']],
} ],
'__unnamed_1e73' : [ 0x18, {
'Length' : [ 0x0, ['unsigned long']],
'Alignment' : [ 0x4, ['unsigned long']],
'MinimumAddress' : [ 0x8, ['_LARGE_INTEGER']],
'MaximumAddress' : [ 0x10, ['_LARGE_INTEGER']],
} ],
'__unnamed_1e77' : [ 0x18, {
'MinimumVector' : [ 0x0, ['unsigned long']],
'MaximumVector' : [ 0x4, ['unsigned long']],
'AffinityPolicy' : [ 0x8, ['unsigned short']],
'Group' : [ 0xa, ['unsigned short']],
'PriorityPolicy' : [ 0xc, ['Enumeration', dict(target = 'long', choices = {0: 'IrqPriorityUndefined', 1: 'IrqPriorityLow', 2: 'IrqPriorityNormal', 3: 'IrqPriorityHigh'})]],
'TargetedProcessors' : [ 0x10, ['unsigned long long']],
} ],
'__unnamed_1e79' : [ 0x8, {
'MinimumChannel' : [ 0x0, ['unsigned long']],
'MaximumChannel' : [ 0x4, ['unsigned long']],
} ],
'__unnamed_1e7b' : [ 0xc, {
'Data' : [ 0x0, ['array', 3, ['unsigned long']]],
} ],
'__unnamed_1e7d' : [ 0x10, {
'Length' : [ 0x0, ['unsigned long']],
'MinBusNumber' : [ 0x4, ['unsigned long']],
'MaxBusNumber' : [ 0x8, ['unsigned long']],
'Reserved' : [ 0xc, ['unsigned long']],
} ],
'__unnamed_1e7f' : [ 0xc, {
'Priority' : [ 0x0, ['unsigned long']],
'Reserved1' : [ 0x4, ['unsigned long']],
'Reserved2' : [ 0x8, ['unsigned long']],
} ],
'__unnamed_1e81' : [ 0x18, {
'Length40' : [ 0x0, ['unsigned long']],
'Alignment40' : [ 0x4, ['unsigned long']],
'MinimumAddress' : [ 0x8, ['_LARGE_INTEGER']],
'MaximumAddress' : [ 0x10, ['_LARGE_INTEGER']],
} ],
'__unnamed_1e83' : [ 0x18, {
'Length48' : [ 0x0, ['unsigned long']],
'Alignment48' : [ 0x4, ['unsigned long']],
'MinimumAddress' : [ 0x8, ['_LARGE_INTEGER']],
'MaximumAddress' : [ 0x10, ['_LARGE_INTEGER']],
} ],
'__unnamed_1e85' : [ 0x18, {
'Length64' : [ 0x0, ['unsigned long']],
'Alignment64' : [ 0x4, ['unsigned long']],
'MinimumAddress' : [ 0x8, ['_LARGE_INTEGER']],
'MaximumAddress' : [ 0x10, ['_LARGE_INTEGER']],
} ],
'__unnamed_1e87' : [ 0x18, {
'Port' : [ 0x0, ['__unnamed_1e73']],
'Memory' : [ 0x0, ['__unnamed_1e73']],
'Interrupt' : [ 0x0, ['__unnamed_1e77']],
'Dma' : [ 0x0, ['__unnamed_1e79']],
'Generic' : [ 0x0, ['__unnamed_1e73']],
'DevicePrivate' : [ 0x0, ['__unnamed_1e7b']],
'BusNumber' : [ 0x0, ['__unnamed_1e7d']],
'ConfigData' : [ 0x0, ['__unnamed_1e7f']],
'Memory40' : [ 0x0, ['__unnamed_1e81']],
'Memory48' : [ 0x0, ['__unnamed_1e83']],
'Memory64' : [ 0x0, ['__unnamed_1e85']],
} ],
'_IO_RESOURCE_DESCRIPTOR' : [ 0x20, {
'Option' : [ 0x0, ['unsigned char']],
'Type' : [ 0x1, ['unsigned char']],
'ShareDisposition' : [ 0x2, ['unsigned char']],
'Spare1' : [ 0x3, ['unsigned char']],
'Flags' : [ 0x4, ['unsigned short']],
'Spare2' : [ 0x6, ['unsigned short']],
'u' : [ 0x8, ['__unnamed_1e87']],
} ],
'_POP_THERMAL_ZONE' : [ 0x1e8, {
'Link' : [ 0x0, ['_LIST_ENTRY']],
'State' : [ 0x10, ['unsigned char']],
'Flags' : [ 0x11, ['unsigned char']],
'Mode' : [ 0x12, ['unsigned char']],
'PendingMode' : [ 0x13, ['unsigned char']],
'ActivePoint' : [ 0x14, ['unsigned char']],
'PendingActivePoint' : [ 0x15, ['unsigned char']],
'Throttle' : [ 0x18, ['long']],
'LastTime' : [ 0x20, ['unsigned long long']],
'SampleRate' : [ 0x28, ['unsigned long']],
'LastTemp' : [ 0x2c, ['unsigned long']],
'PassiveTimer' : [ 0x30, ['_KTIMER']],
'PassiveDpc' : [ 0x70, ['_KDPC']],
'OverThrottled' : [ 0xb0, ['_POP_ACTION_TRIGGER']],
'Irp' : [ 0xc8, ['pointer64', ['_IRP']]],
'Info' : [ 0xd0, ['_THERMAL_INFORMATION_EX']],
'InfoLastUpdateTime' : [ 0x148, ['_LARGE_INTEGER']],
'Metrics' : [ 0x150, ['_POP_THERMAL_ZONE_METRICS']],
} ],
'_MMPTE_LIST' : [ 0x8, {
'Valid' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long long')]],
'OneEntry' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long long')]],
'filler0' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 5, native_type='unsigned long long')]],
'Protection' : [ 0x0, ['BitField', dict(start_bit = 5, end_bit = 10, native_type='unsigned long long')]],
'Prototype' : [ 0x0, ['BitField', dict(start_bit = 10, end_bit = 11, native_type='unsigned long long')]],
'Transition' : [ 0x0, ['BitField', dict(start_bit = 11, end_bit = 12, native_type='unsigned long long')]],
'filler1' : [ 0x0, ['BitField', dict(start_bit = 12, end_bit = 32, native_type='unsigned long long')]],
'NextEntry' : [ 0x0, ['BitField', dict(start_bit = 32, end_bit = 64, native_type='unsigned long long')]],
} ],
'_VI_POOL_PAGE_HEADER' : [ 0x18, {
'NextPage' : [ 0x0, ['pointer64', ['_SLIST_ENTRY']]],
'VerifierEntry' : [ 0x8, ['pointer64', ['void']]],
'Signature' : [ 0x10, ['unsigned long long']],
} ],
'_HANDLE_TRACE_DEBUG_INFO' : [ 0xf0, {
'RefCount' : [ 0x0, ['long']],
'TableSize' : [ 0x4, ['unsigned long']],
'BitMaskFlags' : [ 0x8, ['unsigned long']],
'CloseCompactionLock' : [ 0x10, ['_FAST_MUTEX']],
'CurrentStackIndex' : [ 0x48, ['unsigned long']],
'TraceDb' : [ 0x50, ['array', 1, ['_HANDLE_TRACE_DB_ENTRY']]],
} ],
'_CM_WORKITEM' : [ 0x28, {
'ListEntry' : [ 0x0, ['_LIST_ENTRY']],
'Private' : [ 0x10, ['unsigned long']],
'WorkerRoutine' : [ 0x18, ['pointer64', ['void']]],
'Parameter' : [ 0x20, ['pointer64', ['void']]],
} ],
'_POP_THERMAL_ZONE_METRICS' : [ 0x98, {
'MetricsResource' : [ 0x0, ['_ERESOURCE']],
'ActiveCount' : [ 0x68, ['unsigned long']],
'PassiveCount' : [ 0x6c, ['unsigned long']],
'LastActiveStartTick' : [ 0x70, ['_LARGE_INTEGER']],
'AverageActiveTime' : [ 0x78, ['_LARGE_INTEGER']],
'LastPassiveStartTick' : [ 0x80, ['_LARGE_INTEGER']],
'AveragePassiveTime' : [ 0x88, ['_LARGE_INTEGER']],
'StartTickSinceLastReset' : [ 0x90, ['_LARGE_INTEGER']],
} ],
'_CM_TRANS' : [ 0xa8, {
'TransactionListEntry' : [ 0x0, ['_LIST_ENTRY']],
'KCBUoWListHead' : [ 0x10, ['_LIST_ENTRY']],
'LazyCommitListEntry' : [ 0x20, ['_LIST_ENTRY']],
'KtmTrans' : [ 0x30, ['pointer64', ['void']]],
'CmRm' : [ 0x38, ['pointer64', ['_CM_RM']]],
'KtmEnlistmentObject' : [ 0x40, ['pointer64', ['_KENLISTMENT']]],
'KtmEnlistmentHandle' : [ 0x48, ['pointer64', ['void']]],
'KtmUow' : [ 0x50, ['_GUID']],
'StartLsn' : [ 0x60, ['unsigned long long']],
'TransState' : [ 0x68, ['unsigned long']],
'HiveCount' : [ 0x6c, ['unsigned long']],
'HiveArray' : [ 0x70, ['array', 7, ['pointer64', ['_CMHIVE']]]],
} ],
'_WHEA_ERROR_RECORD_HEADER_VALIDBITS' : [ 0x4, {
'PlatformId' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'Timestamp' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'PartitionId' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long')]],
'Reserved' : [ 0x0, ['BitField', dict(start_bit = 3, end_bit = 32, native_type='unsigned long')]],
'AsULONG' : [ 0x0, ['unsigned long']],
} ],
'_CM_PARTIAL_RESOURCE_LIST' : [ 0x1c, {
'Version' : [ 0x0, ['unsigned short']],
'Revision' : [ 0x2, ['unsigned short']],
'Count' : [ 0x4, ['unsigned long']],
'PartialDescriptors' : [ 0x8, ['array', 1, ['_CM_PARTIAL_RESOURCE_DESCRIPTOR']]],
} ],
'_RTL_RANGE_LIST' : [ 0x20, {
'ListHead' : [ 0x0, ['_LIST_ENTRY']],
'Flags' : [ 0x10, ['unsigned long']],
'Count' : [ 0x14, ['unsigned long']],
'Stamp' : [ 0x18, ['unsigned long']],
} ],
'_OBJECT_CREATE_INFORMATION' : [ 0x40, {
'Attributes' : [ 0x0, ['unsigned long']],
'RootDirectory' : [ 0x8, ['pointer64', ['void']]],
'ProbeMode' : [ 0x10, ['unsigned char']],
'PagedPoolCharge' : [ 0x14, ['unsigned long']],
'NonPagedPoolCharge' : [ 0x18, ['unsigned long']],
'SecurityDescriptorCharge' : [ 0x1c, ['unsigned long']],
'SecurityDescriptor' : [ 0x20, ['pointer64', ['void']]],
'SecurityQos' : [ 0x28, ['pointer64', ['_SECURITY_QUALITY_OF_SERVICE']]],
'SecurityQualityOfService' : [ 0x30, ['_SECURITY_QUALITY_OF_SERVICE']],
} ],
'_RTL_CRITICAL_SECTION_DEBUG' : [ 0x30, {
'Type' : [ 0x0, ['unsigned short']],
'CreatorBackTraceIndex' : [ 0x2, ['unsigned short']],
'CriticalSection' : [ 0x8, ['pointer64', ['_RTL_CRITICAL_SECTION']]],
'ProcessLocksList' : [ 0x10, ['_LIST_ENTRY']],
'EntryCount' : [ 0x20, ['unsigned long']],
'ContentionCount' : [ 0x24, ['unsigned long']],
'Flags' : [ 0x28, ['unsigned long']],
'CreatorBackTraceIndexHigh' : [ 0x2c, ['unsigned short']],
'SpareUSHORT' : [ 0x2e, ['unsigned short']],
} ],
'_POOL_HACKER' : [ 0x30, {
'Header' : [ 0x0, ['_POOL_HEADER']],
'Contents' : [ 0x10, ['array', 8, ['unsigned long']]],
} ],
'_PO_DIAG_STACK_RECORD' : [ 0x10, {
'StackDepth' : [ 0x0, ['unsigned long']],
'Stack' : [ 0x8, ['array', 1, ['pointer64', ['void']]]],
} ],
'_SECTION_OBJECT_POINTERS' : [ 0x18, {
'DataSectionObject' : [ 0x0, ['pointer64', ['void']]],
'SharedCacheMap' : [ 0x8, ['pointer64', ['void']]],
'ImageSectionObject' : [ 0x10, ['pointer64', ['void']]],
} ],
'_SEP_AUDIT_POLICY' : [ 0x1c, {
'AdtTokenPolicy' : [ 0x0, ['_TOKEN_AUDIT_POLICY']],
'PolicySetStatus' : [ 0x1b, ['unsigned char']],
} ],
'__unnamed_1ec2' : [ 0x4, {
'SnapSharedExportsFailed' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'Spare' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 32, native_type='unsigned long')]],
} ],
'__unnamed_1ec4' : [ 0x18, {
'AllSharedExportThunks' : [ 0x0, ['_VF_TARGET_ALL_SHARED_EXPORT_THUNKS']],
'Flags' : [ 0x0, ['__unnamed_1ec2']],
} ],
'_VF_TARGET_DRIVER' : [ 0x30, {
'TreeNode' : [ 0x0, ['_VF_AVL_TREE_NODE']],
'u1' : [ 0x10, ['__unnamed_1ec4']],
'VerifiedData' : [ 0x28, ['pointer64', ['_VF_TARGET_VERIFIED_DRIVER_DATA']]],
} ],
'__unnamed_1ecc' : [ 0x14, {
'ClassGuid' : [ 0x0, ['_GUID']],
'SymbolicLinkName' : [ 0x10, ['array', 1, ['wchar']]],
} ],
'__unnamed_1ece' : [ 0x2, {
'DeviceIds' : [ 0x0, ['array', 1, ['wchar']]],
} ],
'__unnamed_1ed0' : [ 0x2, {
'DeviceId' : [ 0x0, ['array', 1, ['wchar']]],
} ],
'__unnamed_1ed2' : [ 0x10, {
'NotificationStructure' : [ 0x0, ['pointer64', ['void']]],
'DeviceIds' : [ 0x8, ['array', 1, ['wchar']]],
} ],
'__unnamed_1ed4' : [ 0x8, {
'Notification' : [ 0x0, ['pointer64', ['void']]],
} ],
'__unnamed_1ed6' : [ 0x8, {
'NotificationCode' : [ 0x0, ['unsigned long']],
'NotificationData' : [ 0x4, ['unsigned long']],
} ],
'__unnamed_1ed8' : [ 0x8, {
'VetoType' : [ 0x0, ['Enumeration', dict(target = 'long', choices = {0: 'PNP_VetoTypeUnknown', 1: 'PNP_VetoLegacyDevice', 2: 'PNP_VetoPendingClose', 3: 'PNP_VetoWindowsApp', 4: 'PNP_VetoWindowsService', 5: 'PNP_VetoOutstandingOpen', 6: 'PNP_VetoDevice', 7: 'PNP_VetoDriver', 8: 'PNP_VetoIllegalDeviceRequest', 9: 'PNP_VetoInsufficientPower', 10: 'PNP_VetoNonDisableable', 11: 'PNP_VetoLegacyDriver', 12: 'PNP_VetoInsufficientRights'})]],
'DeviceIdVetoNameBuffer' : [ 0x4, ['array', 1, ['wchar']]],
} ],
'__unnamed_1eda' : [ 0x10, {
'BlockedDriverGuid' : [ 0x0, ['_GUID']],
} ],
'__unnamed_1edc' : [ 0x2, {
'ParentId' : [ 0x0, ['array', 1, ['wchar']]],
} ],
'__unnamed_1ede' : [ 0x20, {
'PowerSettingGuid' : [ 0x0, ['_GUID']],
'Flags' : [ 0x10, ['unsigned long']],
'SessionId' : [ 0x14, ['unsigned long']],
'DataLength' : [ 0x18, ['unsigned long']],
'Data' : [ 0x1c, ['array', 1, ['unsigned char']]],
} ],
'__unnamed_1ee0' : [ 0x20, {
'DeviceClass' : [ 0x0, ['__unnamed_1ecc']],
'TargetDevice' : [ 0x0, ['__unnamed_1ece']],
'InstallDevice' : [ 0x0, ['__unnamed_1ed0']],
'CustomNotification' : [ 0x0, ['__unnamed_1ed2']],
'ProfileNotification' : [ 0x0, ['__unnamed_1ed4']],
'PowerNotification' : [ 0x0, ['__unnamed_1ed6']],
'VetoNotification' : [ 0x0, ['__unnamed_1ed8']],
'BlockedDriverNotification' : [ 0x0, ['__unnamed_1eda']],
'InvalidIDNotification' : [ 0x0, ['__unnamed_1edc']],
'PowerSettingNotification' : [ 0x0, ['__unnamed_1ede']],
'PropertyChangeNotification' : [ 0x0, ['__unnamed_1ed0']],
} ],
'_PLUGPLAY_EVENT_BLOCK' : [ 0x50, {
'EventGuid' : [ 0x0, ['_GUID']],
'EventCategory' : [ 0x10, ['Enumeration', dict(target = 'long', choices = {0: 'HardwareProfileChangeEvent', 1: 'TargetDeviceChangeEvent', 2: 'DeviceClassChangeEvent', 3: 'CustomDeviceEvent', 4: 'DeviceInstallEvent', 5: 'DeviceArrivalEvent', 6: 'VetoEvent', 7: 'BlockedDriverEvent', 8: 'InvalidIDEvent', 9: 'DevicePropertyChangeEvent', 10: 'DeviceInstanceRemovalEvent', 11: 'MaxPlugEventCategory'})]],
'Result' : [ 0x18, ['pointer64', ['unsigned long']]],
'Flags' : [ 0x20, ['unsigned long']],
'TotalSize' : [ 0x24, ['unsigned long']],
'DeviceObject' : [ 0x28, ['pointer64', ['void']]],
'u' : [ 0x30, ['__unnamed_1ee0']],
} ],
'_VF_SUSPECT_DRIVER_ENTRY' : [ 0x28, {
'Links' : [ 0x0, ['_LIST_ENTRY']],
'Loads' : [ 0x10, ['unsigned long']],
'Unloads' : [ 0x14, ['unsigned long']],
'BaseName' : [ 0x18, ['_UNICODE_STRING']],
} ],
'_MMPTE_TIMESTAMP' : [ 0x8, {
'MustBeZero' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long long')]],
'PageFileLow' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 5, native_type='unsigned long long')]],
'Protection' : [ 0x0, ['BitField', dict(start_bit = 5, end_bit = 10, native_type='unsigned long long')]],
'Prototype' : [ 0x0, ['BitField', dict(start_bit = 10, end_bit = 11, native_type='unsigned long long')]],
'Transition' : [ 0x0, ['BitField', dict(start_bit = 11, end_bit = 12, native_type='unsigned long long')]],
'Reserved' : [ 0x0, ['BitField', dict(start_bit = 12, end_bit = 32, native_type='unsigned long long')]],
'GlobalTimeStamp' : [ 0x0, ['BitField', dict(start_bit = 32, end_bit = 64, native_type='unsigned long long')]],
} ],
'_SID_AND_ATTRIBUTES_HASH' : [ 0x110, {
'SidCount' : [ 0x0, ['unsigned long']],
'SidAttr' : [ 0x8, ['pointer64', ['_SID_AND_ATTRIBUTES']]],
'Hash' : [ 0x10, ['array', 32, ['unsigned long long']]],
} ],
'_XSTATE_CONTEXT' : [ 0x20, {
'Mask' : [ 0x0, ['unsigned long long']],
'Length' : [ 0x8, ['unsigned long']],
'Reserved1' : [ 0xc, ['unsigned long']],
'Area' : [ 0x10, ['pointer64', ['_XSAVE_AREA']]],
'Buffer' : [ 0x18, ['pointer64', ['void']]],
} ],
'_XSAVE_FORMAT' : [ 0x200, {
'ControlWord' : [ 0x0, ['unsigned short']],
'StatusWord' : [ 0x2, ['unsigned short']],
'TagWord' : [ 0x4, ['unsigned char']],
'Reserved1' : [ 0x5, ['unsigned char']],
'ErrorOpcode' : [ 0x6, ['unsigned short']],
'ErrorOffset' : [ 0x8, ['unsigned long']],
'ErrorSelector' : [ 0xc, ['unsigned short']],
'Reserved2' : [ 0xe, ['unsigned short']],
'DataOffset' : [ 0x10, ['unsigned long']],
'DataSelector' : [ 0x14, ['unsigned short']],
'Reserved3' : [ 0x16, ['unsigned short']],
'MxCsr' : [ 0x18, ['unsigned long']],
'MxCsr_Mask' : [ 0x1c, ['unsigned long']],
'FloatRegisters' : [ 0x20, ['array', 8, ['_M128A']]],
'XmmRegisters' : [ 0xa0, ['array', 16, ['_M128A']]],
'Reserved4' : [ 0x1a0, ['array', 96, ['unsigned char']]],
} ],
'_MBCB' : [ 0xc0, {
'NodeTypeCode' : [ 0x0, ['short']],
'NodeIsInZone' : [ 0x2, ['short']],
'PagesToWrite' : [ 0x4, ['unsigned long']],
'DirtyPages' : [ 0x8, ['unsigned long']],
'Reserved' : [ 0xc, ['unsigned long']],
'BitmapRanges' : [ 0x10, ['_LIST_ENTRY']],
'ResumeWritePage' : [ 0x20, ['long long']],
'MostRecentlyDirtiedPage' : [ 0x28, ['long long']],
'BitmapRange1' : [ 0x30, ['_BITMAP_RANGE']],
'BitmapRange2' : [ 0x60, ['_BITMAP_RANGE']],
'BitmapRange3' : [ 0x90, ['_BITMAP_RANGE']],
} ],
'_PS_CPU_QUOTA_BLOCK' : [ 0x4080, {
'ListEntry' : [ 0x0, ['_LIST_ENTRY']],
'SessionId' : [ 0x10, ['unsigned long']],
'CpuShareWeight' : [ 0x14, ['unsigned long']],
'CapturedWeightData' : [ 0x18, ['_PSP_CPU_SHARE_CAPTURED_WEIGHT_DATA']],
'DuplicateInputMarker' : [ 0x20, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'Reserved' : [ 0x20, ['BitField', dict(start_bit = 1, end_bit = 32, native_type='unsigned long')]],
'MiscFlags' : [ 0x20, ['long']],
'BlockCurrentGenerationLock' : [ 0x0, ['unsigned long long']],
'CyclesAccumulated' : [ 0x8, ['unsigned long long']],
'CycleCredit' : [ 0x40, ['unsigned long long']],
'BlockCurrentGeneration' : [ 0x48, ['unsigned long']],
'CpuCyclePercent' : [ 0x4c, ['unsigned long']],
'CyclesFinishedForCurrentGeneration' : [ 0x50, ['unsigned char']],
'Cpu' : [ 0x80, ['array', 256, ['_PS_PER_CPU_QUOTA_CACHE_AWARE']]],
} ],
'__unnamed_1efc' : [ 0x1, {
'AsUCHAR' : [ 0x0, ['unsigned char']],
'NoDomainAccounting' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned char')]],
'IncreasePolicy' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 3, native_type='unsigned char')]],
'DecreasePolicy' : [ 0x0, ['BitField', dict(start_bit = 3, end_bit = 5, native_type='unsigned char')]],
'Reserved' : [ 0x0, ['BitField', dict(start_bit = 5, end_bit = 8, native_type='unsigned char')]],
} ],
'PROCESSOR_PERFSTATE_POLICY' : [ 0x1c, {
'Revision' : [ 0x0, ['unsigned long']],
'MaxThrottle' : [ 0x4, ['unsigned char']],
'MinThrottle' : [ 0x5, ['unsigned char']],
'BusyAdjThreshold' : [ 0x6, ['unsigned char']],
'Spare' : [ 0x7, ['unsigned char']],
'Flags' : [ 0x7, ['__unnamed_1efc']],
'TimeCheck' : [ 0x8, ['unsigned long']],
'IncreaseTime' : [ 0xc, ['unsigned long']],
'DecreaseTime' : [ 0x10, ['unsigned long']],
'IncreasePercent' : [ 0x14, ['unsigned long']],
'DecreasePercent' : [ 0x18, ['unsigned long']],
} ],
'_BUS_EXTENSION_LIST' : [ 0x10, {
'Next' : [ 0x0, ['pointer64', ['void']]],
'BusExtension' : [ 0x8, ['pointer64', ['_PI_BUS_EXTENSION']]],
} ],
'_CACHED_CHILD_LIST' : [ 0x10, {
'Count' : [ 0x0, ['unsigned long']],
'ValueList' : [ 0x8, ['unsigned long long']],
'RealKcb' : [ 0x8, ['pointer64', ['_CM_KEY_CONTROL_BLOCK']]],
} ],
'_KDEVICE_QUEUE' : [ 0x28, {
'Type' : [ 0x0, ['short']],
'Size' : [ 0x2, ['short']],
'DeviceListHead' : [ 0x8, ['_LIST_ENTRY']],
'Lock' : [ 0x18, ['unsigned long long']],
'Busy' : [ 0x20, ['unsigned char']],
'Reserved' : [ 0x20, ['BitField', dict(start_bit = 0, end_bit = 8, native_type='long long')]],
'Hint' : [ 0x20, ['BitField', dict(start_bit = 8, end_bit = 64, native_type='long long')]],
} ],
'_SYSTEM_POWER_STATE_CONTEXT' : [ 0x4, {
'Reserved1' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 8, native_type='unsigned long')]],
'TargetSystemState' : [ 0x0, ['BitField', dict(start_bit = 8, end_bit = 12, native_type='unsigned long')]],
'EffectiveSystemState' : [ 0x0, ['BitField', dict(start_bit = 12, end_bit = 16, native_type='unsigned long')]],
'CurrentSystemState' : [ 0x0, ['BitField', dict(start_bit = 16, end_bit = 20, native_type='unsigned long')]],
'IgnoreHibernationPath' : [ 0x0, ['BitField', dict(start_bit = 20, end_bit = 21, native_type='unsigned long')]],
'PseudoTransition' : [ 0x0, ['BitField', dict(start_bit = 21, end_bit = 22, native_type='unsigned long')]],
'Reserved2' : [ 0x0, ['BitField', dict(start_bit = 22, end_bit = 32, native_type='unsigned long')]],
'ContextAsUlong' : [ 0x0, ['unsigned long']],
} ],
'_OBJECT_TYPE_INITIALIZER' : [ 0x70, {
'Length' : [ 0x0, ['unsigned short']],
'ObjectTypeFlags' : [ 0x2, ['unsigned char']],
'CaseInsensitive' : [ 0x2, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned char')]],
'UnnamedObjectsOnly' : [ 0x2, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned char')]],
'UseDefaultObject' : [ 0x2, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned char')]],
'SecurityRequired' : [ 0x2, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned char')]],
'MaintainHandleCount' : [ 0x2, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned char')]],
'MaintainTypeList' : [ 0x2, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned char')]],
'SupportsObjectCallbacks' : [ 0x2, ['BitField', dict(start_bit = 6, end_bit = 7, native_type='unsigned char')]],
'ObjectTypeCode' : [ 0x4, ['unsigned long']],
'InvalidAttributes' : [ 0x8, ['unsigned long']],
'GenericMapping' : [ 0xc, ['_GENERIC_MAPPING']],
'ValidAccessMask' : [ 0x1c, ['unsigned long']],
'RetainAccess' : [ 0x20, ['unsigned long']],
'PoolType' : [ 0x24, ['Enumeration', dict(target = 'long', choices = {0: 'NonPagedPool', 1: 'PagedPool', 2: 'NonPagedPoolMustSucceed', 3: 'DontUseThisType', 4: 'NonPagedPoolCacheAligned', 5: 'PagedPoolCacheAligned', 6: 'NonPagedPoolCacheAlignedMustS', 7: 'MaxPoolType', 34: 'NonPagedPoolMustSucceedSession', 35: 'DontUseThisTypeSession', 32: 'NonPagedPoolSession', 36: 'NonPagedPoolCacheAlignedSession', 33: 'PagedPoolSession', 38: 'NonPagedPoolCacheAlignedMustSSession', 37: 'PagedPoolCacheAlignedSession'})]],
'DefaultPagedPoolCharge' : [ 0x28, ['unsigned long']],
'DefaultNonPagedPoolCharge' : [ 0x2c, ['unsigned long']],
'DumpProcedure' : [ 0x30, ['pointer64', ['void']]],
'OpenProcedure' : [ 0x38, ['pointer64', ['void']]],
'CloseProcedure' : [ 0x40, ['pointer64', ['void']]],
'DeleteProcedure' : [ 0x48, ['pointer64', ['void']]],
'ParseProcedure' : [ 0x50, ['pointer64', ['void']]],
'SecurityProcedure' : [ 0x58, ['pointer64', ['void']]],
'QueryNameProcedure' : [ 0x60, ['pointer64', ['void']]],
'OkayToCloseProcedure' : [ 0x68, ['pointer64', ['void']]],
} ],
'__unnamed_1f31' : [ 0x4, {
'LongFlags' : [ 0x0, ['unsigned long']],
'SubsectionFlags' : [ 0x0, ['_MMSUBSECTION_FLAGS']],
} ],
'_SUBSECTION' : [ 0x38, {
'ControlArea' : [ 0x0, ['pointer64', ['_CONTROL_AREA']]],
'SubsectionBase' : [ 0x8, ['pointer64', ['_MMPTE']]],
'NextSubsection' : [ 0x10, ['pointer64', ['_SUBSECTION']]],
'PtesInSubsection' : [ 0x18, ['unsigned long']],
'UnusedPtes' : [ 0x20, ['unsigned long']],
'GlobalPerSessionHead' : [ 0x20, ['pointer64', ['_MM_AVL_TABLE']]],
'u' : [ 0x28, ['__unnamed_1f31']],
'StartingSector' : [ 0x2c, ['unsigned long']],
'NumberOfFullSectors' : [ 0x30, ['unsigned long']],
} ],
'_KPROCESSOR_STATE' : [ 0x5b0, {
'SpecialRegisters' : [ 0x0, ['_KSPECIAL_REGISTERS']],
'ContextFrame' : [ 0xe0, ['_CONTEXT']],
} ],
'_IO_CLIENT_EXTENSION' : [ 0x10, {
'NextExtension' : [ 0x0, ['pointer64', ['_IO_CLIENT_EXTENSION']]],
'ClientIdentificationAddress' : [ 0x8, ['pointer64', ['void']]],
} ],
'_PS_PER_CPU_QUOTA_CACHE_AWARE' : [ 0x40, {
'SortedListEntry' : [ 0x0, ['_LIST_ENTRY']],
'IdleOnlyListHead' : [ 0x10, ['_LIST_ENTRY']],
'CycleBaseAllowance' : [ 0x20, ['unsigned long long']],
'CyclesRemaining' : [ 0x28, ['long long']],
'CurrentGeneration' : [ 0x30, ['unsigned long']],
} ],
'_ETW_BUFFER_CONTEXT' : [ 0x4, {
'ProcessorNumber' : [ 0x0, ['unsigned char']],
'Alignment' : [ 0x1, ['unsigned char']],
'LoggerId' : [ 0x2, ['unsigned short']],
} ],
'_PROC_IDLE_SNAP' : [ 0x10, {
'Time' : [ 0x0, ['unsigned long long']],
'Idle' : [ 0x8, ['unsigned long long']],
} ],
'_KERNEL_STACK_SEGMENT' : [ 0x28, {
'StackBase' : [ 0x0, ['unsigned long long']],
'StackLimit' : [ 0x8, ['unsigned long long']],
'KernelStack' : [ 0x10, ['unsigned long long']],
'InitialStack' : [ 0x18, ['unsigned long long']],
'ActualLimit' : [ 0x20, ['unsigned long long']],
} ],
'_KEXECUTE_OPTIONS' : [ 0x1, {
'ExecuteDisable' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned char')]],
'ExecuteEnable' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned char')]],
'DisableThunkEmulation' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned char')]],
'Permanent' : [ 0x0, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned char')]],
'ExecuteDispatchEnable' : [ 0x0, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned char')]],
'ImageDispatchEnable' : [ 0x0, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned char')]],
'DisableExceptionChainValidation' : [ 0x0, ['BitField', dict(start_bit = 6, end_bit = 7, native_type='unsigned char')]],
'Spare' : [ 0x0, ['BitField', dict(start_bit = 7, end_bit = 8, native_type='unsigned char')]],
'ExecuteOptions' : [ 0x0, ['unsigned char']],
} ],
'_SEP_TOKEN_PRIVILEGES' : [ 0x18, {
'Present' : [ 0x0, ['unsigned long long']],
'Enabled' : [ 0x8, ['unsigned long long']],
'EnabledByDefault' : [ 0x10, ['unsigned long long']],
} ],
'_WORK_QUEUE_ITEM' : [ 0x20, {
'List' : [ 0x0, ['_LIST_ENTRY']],
'WorkerRoutine' : [ 0x10, ['pointer64', ['void']]],
'Parameter' : [ 0x18, ['pointer64', ['void']]],
} ],
'_ARBITER_ALLOCATION_STATE' : [ 0x50, {
'Start' : [ 0x0, ['unsigned long long']],
'End' : [ 0x8, ['unsigned long long']],
'CurrentMinimum' : [ 0x10, ['unsigned long long']],
'CurrentMaximum' : [ 0x18, ['unsigned long long']],
'Entry' : [ 0x20, ['pointer64', ['_ARBITER_LIST_ENTRY']]],
'CurrentAlternative' : [ 0x28, ['pointer64', ['_ARBITER_ALTERNATIVE']]],
'AlternativeCount' : [ 0x30, ['unsigned long']],
'Alternatives' : [ 0x38, ['pointer64', ['_ARBITER_ALTERNATIVE']]],
'Flags' : [ 0x40, ['unsigned short']],
'RangeAttributes' : [ 0x42, ['unsigned char']],
'RangeAvailableAttributes' : [ 0x43, ['unsigned char']],
'WorkSpace' : [ 0x48, ['unsigned long long']],
} ],
'_VACB_ARRAY_HEADER' : [ 0x10, {
'VacbArrayIndex' : [ 0x0, ['unsigned long']],
'MappingCount' : [ 0x4, ['unsigned long']],
'HighestMappedIndex' : [ 0x8, ['unsigned long']],
'Reserved' : [ 0xc, ['unsigned long']],
} ],
'_MMWSLENTRY' : [ 0x8, {
'Valid' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long long')]],
'Spare' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long long')]],
'Hashed' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long long')]],
'Direct' : [ 0x0, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned long long')]],
'Protection' : [ 0x0, ['BitField', dict(start_bit = 4, end_bit = 9, native_type='unsigned long long')]],
'Age' : [ 0x0, ['BitField', dict(start_bit = 9, end_bit = 12, native_type='unsigned long long')]],
'VirtualPageNumber' : [ 0x0, ['BitField', dict(start_bit = 12, end_bit = 64, native_type='unsigned long long')]],
} ],
'_DBGKD_SWITCH_PARTITION' : [ 0x4, {
'Partition' : [ 0x0, ['unsigned long']],
} ],
'_REQUEST_MAILBOX' : [ 0x40, {
'Next' : [ 0x0, ['pointer64', ['_REQUEST_MAILBOX']]],
'RequestSummary' : [ 0x8, ['long long']],
'RequestPacket' : [ 0x10, ['_KREQUEST_PACKET']],
} ],
'_DBGKD_GET_VERSION32' : [ 0x28, {
'MajorVersion' : [ 0x0, ['unsigned short']],
'MinorVersion' : [ 0x2, ['unsigned short']],
'ProtocolVersion' : [ 0x4, ['unsigned short']],
'Flags' : [ 0x6, ['unsigned short']],
'KernBase' : [ 0x8, ['unsigned long']],
'PsLoadedModuleList' : [ 0xc, ['unsigned long']],
'MachineType' : [ 0x10, ['unsigned short']],
'ThCallbackStack' : [ 0x12, ['unsigned short']],
'NextCallback' : [ 0x14, ['unsigned short']],
'FramePointer' : [ 0x16, ['unsigned short']],
'KiCallUserMode' : [ 0x18, ['unsigned long']],
'KeUserCallbackDispatcher' : [ 0x1c, ['unsigned long']],
'BreakpointWithStatus' : [ 0x20, ['unsigned long']],
'DebuggerDataList' : [ 0x24, ['unsigned long']],
} ],
'_INTERLOCK_SEQ' : [ 0x8, {
'Depth' : [ 0x0, ['unsigned short']],
'FreeEntryOffset' : [ 0x2, ['unsigned short']],
'OffsetAndDepth' : [ 0x0, ['unsigned long']],
'Sequence' : [ 0x4, ['unsigned long']],
'Exchg' : [ 0x0, ['long long']],
} ],
'_WHEA_TIMESTAMP' : [ 0x8, {
'Seconds' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 8, native_type='unsigned long long')]],
'Minutes' : [ 0x0, ['BitField', dict(start_bit = 8, end_bit = 16, native_type='unsigned long long')]],
'Hours' : [ 0x0, ['BitField', dict(start_bit = 16, end_bit = 24, native_type='unsigned long long')]],
'Precise' : [ 0x0, ['BitField', dict(start_bit = 24, end_bit = 25, native_type='unsigned long long')]],
'Reserved' : [ 0x0, ['BitField', dict(start_bit = 25, end_bit = 32, native_type='unsigned long long')]],
'Day' : [ 0x0, ['BitField', dict(start_bit = 32, end_bit = 40, native_type='unsigned long long')]],
'Month' : [ 0x0, ['BitField', dict(start_bit = 40, end_bit = 48, native_type='unsigned long long')]],
'Year' : [ 0x0, ['BitField', dict(start_bit = 48, end_bit = 56, native_type='unsigned long long')]],
'Century' : [ 0x0, ['BitField', dict(start_bit = 56, end_bit = 64, native_type='unsigned long long')]],
'AsLARGE_INTEGER' : [ 0x0, ['_LARGE_INTEGER']],
} ],
'_PEB32' : [ 0x248, {
'InheritedAddressSpace' : [ 0x0, ['unsigned char']],
'ReadImageFileExecOptions' : [ 0x1, ['unsigned char']],
'BeingDebugged' : [ 0x2, ['unsigned char']],
'BitField' : [ 0x3, ['unsigned char']],
'ImageUsesLargePages' : [ 0x3, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned char')]],
'IsProtectedProcess' : [ 0x3, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned char')]],
'IsLegacyProcess' : [ 0x3, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned char')]],
'IsImageDynamicallyRelocated' : [ 0x3, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned char')]],
'SkipPatchingUser32Forwarders' : [ 0x3, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned char')]],
'SpareBits' : [ 0x3, ['BitField', dict(start_bit = 5, end_bit = 8, native_type='unsigned char')]],
'Mutant' : [ 0x4, ['unsigned long']],
'ImageBaseAddress' : [ 0x8, ['unsigned long']],
'Ldr' : [ 0xc, ['unsigned long']],
'ProcessParameters' : [ 0x10, ['unsigned long']],
'SubSystemData' : [ 0x14, ['unsigned long']],
'ProcessHeap' : [ 0x18, ['unsigned long']],
'FastPebLock' : [ 0x1c, ['unsigned long']],
'AtlThunkSListPtr' : [ 0x20, ['unsigned long']],
'IFEOKey' : [ 0x24, ['unsigned long']],
'CrossProcessFlags' : [ 0x28, ['unsigned long']],
'ProcessInJob' : [ 0x28, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'ProcessInitializing' : [ 0x28, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'ProcessUsingVEH' : [ 0x28, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long')]],
'ProcessUsingVCH' : [ 0x28, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned long')]],
'ProcessUsingFTH' : [ 0x28, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned long')]],
'ReservedBits0' : [ 0x28, ['BitField', dict(start_bit = 5, end_bit = 32, native_type='unsigned long')]],
'KernelCallbackTable' : [ 0x2c, ['unsigned long']],
'UserSharedInfoPtr' : [ 0x2c, ['unsigned long']],
'SystemReserved' : [ 0x30, ['array', 1, ['unsigned long']]],
'AtlThunkSListPtr32' : [ 0x34, ['unsigned long']],
'ApiSetMap' : [ 0x38, ['unsigned long']],
'TlsExpansionCounter' : [ 0x3c, ['unsigned long']],
'TlsBitmap' : [ 0x40, ['unsigned long']],
'TlsBitmapBits' : [ 0x44, ['array', 2, ['unsigned long']]],
'ReadOnlySharedMemoryBase' : [ 0x4c, ['unsigned long']],
'HotpatchInformation' : [ 0x50, ['unsigned long']],
'ReadOnlyStaticServerData' : [ 0x54, ['unsigned long']],
'AnsiCodePageData' : [ 0x58, ['unsigned long']],
'OemCodePageData' : [ 0x5c, ['unsigned long']],
'UnicodeCaseTableData' : [ 0x60, ['unsigned long']],
'NumberOfProcessors' : [ 0x64, ['unsigned long']],
'NtGlobalFlag' : [ 0x68, ['unsigned long']],
'CriticalSectionTimeout' : [ 0x70, ['_LARGE_INTEGER']],
'HeapSegmentReserve' : [ 0x78, ['unsigned long']],
'HeapSegmentCommit' : [ 0x7c, ['unsigned long']],
'HeapDeCommitTotalFreeThreshold' : [ 0x80, ['unsigned long']],
'HeapDeCommitFreeBlockThreshold' : [ 0x84, ['unsigned long']],
'NumberOfHeaps' : [ 0x88, ['unsigned long']],
'MaximumNumberOfHeaps' : [ 0x8c, ['unsigned long']],
'ProcessHeaps' : [ 0x90, ['unsigned long']],
'GdiSharedHandleTable' : [ 0x94, ['unsigned long']],
'ProcessStarterHelper' : [ 0x98, ['unsigned long']],
'GdiDCAttributeList' : [ 0x9c, ['unsigned long']],
'LoaderLock' : [ 0xa0, ['unsigned long']],
'OSMajorVersion' : [ 0xa4, ['unsigned long']],
'OSMinorVersion' : [ 0xa8, ['unsigned long']],
'OSBuildNumber' : [ 0xac, ['unsigned short']],
'OSCSDVersion' : [ 0xae, ['unsigned short']],
'OSPlatformId' : [ 0xb0, ['unsigned long']],
'ImageSubsystem' : [ 0xb4, ['unsigned long']],
'ImageSubsystemMajorVersion' : [ 0xb8, ['unsigned long']],
'ImageSubsystemMinorVersion' : [ 0xbc, ['unsigned long']],
'ActiveProcessAffinityMask' : [ 0xc0, ['unsigned long']],
'GdiHandleBuffer' : [ 0xc4, ['array', 34, ['unsigned long']]],
'PostProcessInitRoutine' : [ 0x14c, ['unsigned long']],
'TlsExpansionBitmap' : [ 0x150, ['unsigned long']],
'TlsExpansionBitmapBits' : [ 0x154, ['array', 32, ['unsigned long']]],
'SessionId' : [ 0x1d4, ['unsigned long']],
'AppCompatFlags' : [ 0x1d8, ['_ULARGE_INTEGER']],
'AppCompatFlagsUser' : [ 0x1e0, ['_ULARGE_INTEGER']],
'pShimData' : [ 0x1e8, ['unsigned long']],
'AppCompatInfo' : [ 0x1ec, ['unsigned long']],
'CSDVersion' : [ 0x1f0, ['_STRING32']],
'ActivationContextData' : [ 0x1f8, ['unsigned long']],
'ProcessAssemblyStorageMap' : [ 0x1fc, ['unsigned long']],
'SystemDefaultActivationContextData' : [ 0x200, ['unsigned long']],
'SystemAssemblyStorageMap' : [ 0x204, ['unsigned long']],
'MinimumStackCommit' : [ 0x208, ['unsigned long']],
'FlsCallback' : [ 0x20c, ['unsigned long']],
'FlsListHead' : [ 0x210, ['LIST_ENTRY32']],
'FlsBitmap' : [ 0x218, ['unsigned long']],
'FlsBitmapBits' : [ 0x21c, ['array', 4, ['unsigned long']]],
'FlsHighIndex' : [ 0x22c, ['unsigned long']],
'WerRegistrationData' : [ 0x230, ['unsigned long']],
'WerShipAssertPtr' : [ 0x234, ['unsigned long']],
'pContextData' : [ 0x238, ['unsigned long']],
'pImageHeaderHash' : [ 0x23c, ['unsigned long']],
'TracingFlags' : [ 0x240, ['unsigned long']],
'HeapTracingEnabled' : [ 0x240, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'CritSecTracingEnabled' : [ 0x240, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'SpareTracingBits' : [ 0x240, ['BitField', dict(start_bit = 2, end_bit = 32, native_type='unsigned long')]],
} ],
'_VPB' : [ 0x60, {
'Type' : [ 0x0, ['short']],
'Size' : [ 0x2, ['short']],
'Flags' : [ 0x4, ['unsigned short']],
'VolumeLabelLength' : [ 0x6, ['unsigned short']],
'DeviceObject' : [ 0x8, ['pointer64', ['_DEVICE_OBJECT']]],
'RealDevice' : [ 0x10, ['pointer64', ['_DEVICE_OBJECT']]],
'SerialNumber' : [ 0x18, ['unsigned long']],
'ReferenceCount' : [ 0x1c, ['unsigned long']],
'VolumeLabel' : [ 0x20, ['array', 32, ['wchar']]],
} ],
'_CACHE_DESCRIPTOR' : [ 0xc, {
'Level' : [ 0x0, ['unsigned char']],
'Associativity' : [ 0x1, ['unsigned char']],
'LineSize' : [ 0x2, ['unsigned short']],
'Size' : [ 0x4, ['unsigned long']],
'Type' : [ 0x8, ['Enumeration', dict(target = 'long', choices = {0: 'CacheUnified', 1: 'CacheInstruction', 2: 'CacheData', 3: 'CacheTrace'})]],
} ],
'_FILE_BASIC_INFORMATION' : [ 0x28, {
'CreationTime' : [ 0x0, ['_LARGE_INTEGER']],
'LastAccessTime' : [ 0x8, ['_LARGE_INTEGER']],
'LastWriteTime' : [ 0x10, ['_LARGE_INTEGER']],
'ChangeTime' : [ 0x18, ['_LARGE_INTEGER']],
'FileAttributes' : [ 0x20, ['unsigned long']],
} ],
'_SECURITY_SUBJECT_CONTEXT' : [ 0x20, {
'ClientToken' : [ 0x0, ['pointer64', ['void']]],
'ImpersonationLevel' : [ 0x8, ['Enumeration', dict(target = 'long', choices = {0: 'SecurityAnonymous', 1: 'SecurityIdentification', 2: 'SecurityImpersonation', 3: 'SecurityDelegation'})]],
'PrimaryToken' : [ 0x10, ['pointer64', ['void']]],
'ProcessAuditId' : [ 0x18, ['pointer64', ['void']]],
} ],
'_KBUGCHECK_ACTIVE_STATE' : [ 0x4, {
'BugCheckState' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 2, native_type='unsigned long')]],
'RecursionCount' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 4, native_type='unsigned long')]],
'BugCheckOwner' : [ 0x0, ['BitField', dict(start_bit = 4, end_bit = 32, native_type='unsigned long')]],
'Value' : [ 0x0, ['long']],
} ],
'_PF_KERNEL_GLOBALS' : [ 0x60, {
'AccessBufferAgeThreshold' : [ 0x0, ['unsigned long long']],
'AccessBufferRef' : [ 0x8, ['_EX_RUNDOWN_REF']],
'AccessBufferExistsEvent' : [ 0x10, ['_KEVENT']],
'AccessBufferMax' : [ 0x28, ['unsigned long']],
'AccessBufferList' : [ 0x40, ['_SLIST_HEADER']],
'StreamSequenceNumber' : [ 0x50, ['long']],
'Flags' : [ 0x54, ['unsigned long']],
'ScenarioPrefetchCount' : [ 0x58, ['long']],
} ],
'_ARBITER_QUERY_ARBITRATE_PARAMETERS' : [ 0x8, {
'ArbitrationList' : [ 0x0, ['pointer64', ['_LIST_ENTRY']]],
} ],
'_ARBITER_BOOT_ALLOCATION_PARAMETERS' : [ 0x8, {
'ArbitrationList' : [ 0x0, ['pointer64', ['_LIST_ENTRY']]],
} ],
'_EXCEPTION_REGISTRATION_RECORD' : [ 0x10, {
'Next' : [ 0x0, ['pointer64', ['_EXCEPTION_REGISTRATION_RECORD']]],
'Handler' : [ 0x8, ['pointer64', ['void']]],
} ],
'_POP_SYSTEM_IDLE' : [ 0x38, {
'AverageIdleness' : [ 0x0, ['long']],
'LowestIdleness' : [ 0x4, ['long']],
'Time' : [ 0x8, ['unsigned long']],
'Timeout' : [ 0xc, ['unsigned long']],
'LastUserInput' : [ 0x10, ['unsigned long']],
'Action' : [ 0x14, ['POWER_ACTION_POLICY']],
'MinState' : [ 0x20, ['Enumeration', dict(target = 'long', choices = {0: 'PowerSystemUnspecified', 1: 'PowerSystemWorking', 2: 'PowerSystemSleeping1', 3: 'PowerSystemSleeping2', 4: 'PowerSystemSleeping3', 5: 'PowerSystemHibernate', 6: 'PowerSystemShutdown', 7: 'PowerSystemMaximum'})]],
'SystemRequired' : [ 0x24, ['unsigned char']],
'IdleWorker' : [ 0x25, ['unsigned char']],
'Sampling' : [ 0x26, ['unsigned char']],
'LastTick' : [ 0x28, ['unsigned long long']],
'LastSystemRequiredTime' : [ 0x30, ['unsigned long']],
} ],
'_VF_TARGET_ALL_SHARED_EXPORT_THUNKS' : [ 0x18, {
'SharedExportThunks' : [ 0x0, ['pointer64', ['_VERIFIER_SHARED_EXPORT_THUNK']]],
'PoolSharedExportThunks' : [ 0x8, ['pointer64', ['_VERIFIER_SHARED_EXPORT_THUNK']]],
'OrderDependentSharedExportThunks' : [ 0x10, ['pointer64', ['_VERIFIER_SHARED_EXPORT_THUNK']]],
} ],
'_ETW_REF_CLOCK' : [ 0x10, {
'StartTime' : [ 0x0, ['_LARGE_INTEGER']],
'StartPerfClock' : [ 0x8, ['_LARGE_INTEGER']],
} ],
'_OB_DUPLICATE_OBJECT_STATE' : [ 0x28, {
'SourceProcess' : [ 0x0, ['pointer64', ['_EPROCESS']]],
'SourceHandle' : [ 0x8, ['pointer64', ['void']]],
'Object' : [ 0x10, ['pointer64', ['void']]],
'TargetAccess' : [ 0x18, ['unsigned long']],
'ObjectInfo' : [ 0x1c, ['_HANDLE_TABLE_ENTRY_INFO']],
'HandleAttributes' : [ 0x20, ['unsigned long']],
} ],
'_MMPTE_SUBSECTION' : [ 0x8, {
'Valid' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long long')]],
'Unused0' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 5, native_type='unsigned long long')]],
'Protection' : [ 0x0, ['BitField', dict(start_bit = 5, end_bit = 10, native_type='unsigned long long')]],
'Prototype' : [ 0x0, ['BitField', dict(start_bit = 10, end_bit = 11, native_type='unsigned long long')]],
'Unused1' : [ 0x0, ['BitField', dict(start_bit = 11, end_bit = 16, native_type='unsigned long long')]],
'SubsectionAddress' : [ 0x0, ['BitField', dict(start_bit = 16, end_bit = 64, native_type='long long')]],
} ],
'_POWER_STATE' : [ 0x4, {
'SystemState' : [ 0x0, ['Enumeration', dict(target = 'long', choices = {0: 'PowerSystemUnspecified', 1: 'PowerSystemWorking', 2: 'PowerSystemSleeping1', 3: 'PowerSystemSleeping2', 4: 'PowerSystemSleeping3', 5: 'PowerSystemHibernate', 6: 'PowerSystemShutdown', 7: 'PowerSystemMaximum'})]],
'DeviceState' : [ 0x0, ['Enumeration', dict(target = 'long', choices = {0: 'PowerDeviceUnspecified', 1: 'PowerDeviceD0', 2: 'PowerDeviceD1', 3: 'PowerDeviceD2', 4: 'PowerDeviceD3', 5: 'PowerDeviceMaximum'})]],
} ],
'_EFI_FIRMWARE_INFORMATION' : [ 0x18, {
'FirmwareVersion' : [ 0x0, ['unsigned long']],
'VirtualEfiRuntimeServices' : [ 0x8, ['pointer64', ['_VIRTUAL_EFI_RUNTIME_SERVICES']]],
'SetVirtualAddressMapStatus' : [ 0x10, ['long']],
'MissedMappingsCount' : [ 0x14, ['unsigned long']],
} ],
'__unnamed_1fa6' : [ 0xc, {
'Start' : [ 0x0, ['_LARGE_INTEGER']],
'Length' : [ 0x8, ['unsigned long']],
} ],
'__unnamed_1fa8' : [ 0x10, {
'Level' : [ 0x0, ['unsigned short']],
'Group' : [ 0x2, ['unsigned short']],
'Vector' : [ 0x4, ['unsigned long']],
'Affinity' : [ 0x8, ['unsigned long long']],
} ],
'__unnamed_1faa' : [ 0x10, {
'Group' : [ 0x0, ['unsigned short']],
'MessageCount' : [ 0x2, ['unsigned short']],
'Vector' : [ 0x4, ['unsigned long']],
'Affinity' : [ 0x8, ['unsigned long long']],
} ],
'__unnamed_1fac' : [ 0x10, {
'Raw' : [ 0x0, ['__unnamed_1faa']],
'Translated' : [ 0x0, ['__unnamed_1fa8']],
} ],
'__unnamed_1fae' : [ 0xc, {
'Channel' : [ 0x0, ['unsigned long']],
'Port' : [ 0x4, ['unsigned long']],
'Reserved1' : [ 0x8, ['unsigned long']],
} ],
'__unnamed_1fb0' : [ 0xc, {
'Start' : [ 0x0, ['unsigned long']],
'Length' : [ 0x4, ['unsigned long']],
'Reserved' : [ 0x8, ['unsigned long']],
} ],
'__unnamed_1fb2' : [ 0xc, {
'DataSize' : [ 0x0, ['unsigned long']],
'Reserved1' : [ 0x4, ['unsigned long']],
'Reserved2' : [ 0x8, ['unsigned long']],
} ],
'__unnamed_1fb4' : [ 0xc, {
'Start' : [ 0x0, ['_LARGE_INTEGER']],
'Length40' : [ 0x8, ['unsigned long']],
} ],
'__unnamed_1fb6' : [ 0xc, {
'Start' : [ 0x0, ['_LARGE_INTEGER']],
'Length48' : [ 0x8, ['unsigned long']],
} ],
'__unnamed_1fb8' : [ 0xc, {
'Start' : [ 0x0, ['_LARGE_INTEGER']],
'Length64' : [ 0x8, ['unsigned long']],
} ],
'__unnamed_1fba' : [ 0x10, {
'Generic' : [ 0x0, ['__unnamed_1fa6']],
'Port' : [ 0x0, ['__unnamed_1fa6']],
'Interrupt' : [ 0x0, ['__unnamed_1fa8']],
'MessageInterrupt' : [ 0x0, ['__unnamed_1fac']],
'Memory' : [ 0x0, ['__unnamed_1fa6']],
'Dma' : [ 0x0, ['__unnamed_1fae']],
'DevicePrivate' : [ 0x0, ['__unnamed_1e7b']],
'BusNumber' : [ 0x0, ['__unnamed_1fb0']],
'DeviceSpecificData' : [ 0x0, ['__unnamed_1fb2']],
'Memory40' : [ 0x0, ['__unnamed_1fb4']],
'Memory48' : [ 0x0, ['__unnamed_1fb6']],
'Memory64' : [ 0x0, ['__unnamed_1fb8']],
} ],
'_CM_PARTIAL_RESOURCE_DESCRIPTOR' : [ 0x14, {
'Type' : [ 0x0, ['unsigned char']],
'ShareDisposition' : [ 0x1, ['unsigned char']],
'Flags' : [ 0x2, ['unsigned short']],
'u' : [ 0x4, ['__unnamed_1fba']],
} ],
'__unnamed_1fbf' : [ 0x4, {
'PhysicalAddress' : [ 0x0, ['unsigned long']],
'VirtualSize' : [ 0x0, ['unsigned long']],
} ],
'_IMAGE_SECTION_HEADER' : [ 0x28, {
'Name' : [ 0x0, ['array', 8, ['unsigned char']]],
'Misc' : [ 0x8, ['__unnamed_1fbf']],
'VirtualAddress' : [ 0xc, ['unsigned long']],
'SizeOfRawData' : [ 0x10, ['unsigned long']],
'PointerToRawData' : [ 0x14, ['unsigned long']],
'PointerToRelocations' : [ 0x18, ['unsigned long']],
'PointerToLinenumbers' : [ 0x1c, ['unsigned long']],
'NumberOfRelocations' : [ 0x20, ['unsigned short']],
'NumberOfLinenumbers' : [ 0x22, ['unsigned short']],
'Characteristics' : [ 0x24, ['unsigned long']],
} ],
'_ARBITER_ADD_RESERVED_PARAMETERS' : [ 0x8, {
'ReserveDevice' : [ 0x0, ['pointer64', ['_DEVICE_OBJECT']]],
} ],
'__unnamed_1fc9' : [ 0x50, {
'CellData' : [ 0x0, ['_CELL_DATA']],
'List' : [ 0x0, ['array', 1, ['unsigned long long']]],
} ],
'_CM_CACHED_VALUE_INDEX' : [ 0x58, {
'CellIndex' : [ 0x0, ['unsigned long']],
'Data' : [ 0x8, ['__unnamed_1fc9']],
} ],
'_CONFIGURATION_COMPONENT_DATA' : [ 0x48, {
'Parent' : [ 0x0, ['pointer64', ['_CONFIGURATION_COMPONENT_DATA']]],
'Child' : [ 0x8, ['pointer64', ['_CONFIGURATION_COMPONENT_DATA']]],
'Sibling' : [ 0x10, ['pointer64', ['_CONFIGURATION_COMPONENT_DATA']]],
'ComponentEntry' : [ 0x18, ['_CONFIGURATION_COMPONENT']],
'ConfigurationData' : [ 0x40, ['pointer64', ['void']]],
} ],
'_DBGKD_QUERY_SPECIAL_CALLS' : [ 0x4, {
'NumberOfSpecialCalls' : [ 0x0, ['unsigned long']],
} ],
'__unnamed_1fd3' : [ 0x8, {
'Balance' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 2, native_type='long long')]],
'Parent' : [ 0x0, ['pointer64', ['_MMSUBSECTION_NODE']]],
} ],
'_MMSUBSECTION_NODE' : [ 0x28, {
'u' : [ 0x0, ['__unnamed_1f31']],
'StartingSector' : [ 0x4, ['unsigned long']],
'NumberOfFullSectors' : [ 0x8, ['unsigned long']],
'u1' : [ 0x10, ['__unnamed_1fd3']],
'LeftChild' : [ 0x18, ['pointer64', ['_MMSUBSECTION_NODE']]],
'RightChild' : [ 0x20, ['pointer64', ['_MMSUBSECTION_NODE']]],
} ],
'_VF_AVL_TREE_NODE' : [ 0x10, {
'p' : [ 0x0, ['pointer64', ['void']]],
'RangeSize' : [ 0x8, ['unsigned long long']],
} ],
'__unnamed_1fdb' : [ 0x8, {
'IdleTime' : [ 0x0, ['unsigned long']],
'NonIdleTime' : [ 0x4, ['unsigned long']],
} ],
'__unnamed_1fdd' : [ 0x8, {
'Disk' : [ 0x0, ['__unnamed_1fdb']],
} ],
'_DEVICE_OBJECT_POWER_EXTENSION' : [ 0x58, {
'IdleCount' : [ 0x0, ['unsigned long']],
'BusyCount' : [ 0x4, ['unsigned long']],
'BusyReference' : [ 0x8, ['unsigned long']],
'TotalBusyCount' : [ 0xc, ['unsigned long']],
'ConservationIdleTime' : [ 0x10, ['unsigned long']],
'PerformanceIdleTime' : [ 0x14, ['unsigned long']],
'DeviceObject' : [ 0x18, ['pointer64', ['_DEVICE_OBJECT']]],
'IdleList' : [ 0x20, ['_LIST_ENTRY']],
'IdleType' : [ 0x30, ['Enumeration', dict(target = 'long', choices = {0: 'DeviceIdleNormal', 1: 'DeviceIdleDisk'})]],
'IdleState' : [ 0x34, ['Enumeration', dict(target = 'long', choices = {0: 'PowerDeviceUnspecified', 1: 'PowerDeviceD0', 2: 'PowerDeviceD1', 3: 'PowerDeviceD2', 4: 'PowerDeviceD3', 5: 'PowerDeviceMaximum'})]],
'CurrentState' : [ 0x38, ['Enumeration', dict(target = 'long', choices = {0: 'PowerDeviceUnspecified', 1: 'PowerDeviceD0', 2: 'PowerDeviceD1', 3: 'PowerDeviceD2', 4: 'PowerDeviceD3', 5: 'PowerDeviceMaximum'})]],
'Volume' : [ 0x40, ['_LIST_ENTRY']],
'Specific' : [ 0x50, ['__unnamed_1fdd']],
} ],
'_ARBITER_RETEST_ALLOCATION_PARAMETERS' : [ 0x18, {
'ArbitrationList' : [ 0x0, ['pointer64', ['_LIST_ENTRY']]],
'AllocateFromCount' : [ 0x8, ['unsigned long']],
'AllocateFrom' : [ 0x10, ['pointer64', ['_CM_PARTIAL_RESOURCE_DESCRIPTOR']]],
} ],
'_WHEA_ERROR_RECORD_SECTION_DESCRIPTOR_VALIDBITS' : [ 0x1, {
'FRUId' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned char')]],
'FRUText' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned char')]],
'Reserved' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 8, native_type='unsigned char')]],
'AsUCHAR' : [ 0x0, ['unsigned char']],
} ],
'_FS_FILTER_CALLBACKS' : [ 0x68, {
'SizeOfFsFilterCallbacks' : [ 0x0, ['unsigned long']],
'Reserved' : [ 0x4, ['unsigned long']],
'PreAcquireForSectionSynchronization' : [ 0x8, ['pointer64', ['void']]],
'PostAcquireForSectionSynchronization' : [ 0x10, ['pointer64', ['void']]],
'PreReleaseForSectionSynchronization' : [ 0x18, ['pointer64', ['void']]],
'PostReleaseForSectionSynchronization' : [ 0x20, ['pointer64', ['void']]],
'PreAcquireForCcFlush' : [ 0x28, ['pointer64', ['void']]],
'PostAcquireForCcFlush' : [ 0x30, ['pointer64', ['void']]],
'PreReleaseForCcFlush' : [ 0x38, ['pointer64', ['void']]],
'PostReleaseForCcFlush' : [ 0x40, ['pointer64', ['void']]],
'PreAcquireForModifiedPageWriter' : [ 0x48, ['pointer64', ['void']]],
'PostAcquireForModifiedPageWriter' : [ 0x50, ['pointer64', ['void']]],
'PreReleaseForModifiedPageWriter' : [ 0x58, ['pointer64', ['void']]],
'PostReleaseForModifiedPageWriter' : [ 0x60, ['pointer64', ['void']]],
} ],
'_KENLISTMENT' : [ 0x1e0, {
'cookie' : [ 0x0, ['unsigned long']],
'NamespaceLink' : [ 0x8, ['_KTMOBJECT_NAMESPACE_LINK']],
'EnlistmentId' : [ 0x30, ['_GUID']],
'Mutex' : [ 0x40, ['_KMUTANT']],
'NextSameTx' : [ 0x78, ['_LIST_ENTRY']],
'NextSameRm' : [ 0x88, ['_LIST_ENTRY']],
'ResourceManager' : [ 0x98, ['pointer64', ['_KRESOURCEMANAGER']]],
'Transaction' : [ 0xa0, ['pointer64', ['_KTRANSACTION']]],
'State' : [ 0xa8, ['Enumeration', dict(target = 'long', choices = {0: 'KEnlistmentUninitialized', 256: 'KEnlistmentActive', 258: 'KEnlistmentPrepared', 259: 'KEnlistmentInDoubt', 260: 'KEnlistmentCommitted', 261: 'KEnlistmentCommittedNotify', 262: 'KEnlistmentCommitRequested', 257: 'KEnlistmentPreparing', 264: 'KEnlistmentDelegated', 265: 'KEnlistmentDelegatedDisconnected', 266: 'KEnlistmentPrePreparing', 263: 'KEnlistmentAborted', 268: 'KEnlistmentRecovering', 269: 'KEnlistmentAborting', 270: 'KEnlistmentReadOnly', 271: 'KEnlistmentOutcomeUnavailable', 272: 'KEnlistmentOffline', 273: 'KEnlistmentPrePrepared', 274: 'KEnlistmentInitialized', 267: 'KEnlistmentForgotten'})]],
'Flags' : [ 0xac, ['unsigned long']],
'NotificationMask' : [ 0xb0, ['unsigned long']],
'Key' : [ 0xb8, ['pointer64', ['void']]],
'KeyRefCount' : [ 0xc0, ['unsigned long']],
'RecoveryInformation' : [ 0xc8, ['pointer64', ['void']]],
'RecoveryInformationLength' : [ 0xd0, ['unsigned long']],
'DynamicNameInformation' : [ 0xd8, ['pointer64', ['void']]],
'DynamicNameInformationLength' : [ 0xe0, ['unsigned long']],
'FinalNotification' : [ 0xe8, ['pointer64', ['_KTMNOTIFICATION_PACKET']]],
'SupSubEnlistment' : [ 0xf0, ['pointer64', ['_KENLISTMENT']]],
'SupSubEnlHandle' : [ 0xf8, ['pointer64', ['void']]],
'SubordinateTxHandle' : [ 0x100, ['pointer64', ['void']]],
'CrmEnlistmentEnId' : [ 0x108, ['_GUID']],
'CrmEnlistmentTmId' : [ 0x118, ['_GUID']],
'CrmEnlistmentRmId' : [ 0x128, ['_GUID']],
'NextHistory' : [ 0x138, ['unsigned long']],
'History' : [ 0x13c, ['array', 20, ['_KENLISTMENT_HISTORY']]],
} ],
}
|
pombredanne/django-narcissus
|
refs/heads/master
|
setup.py
|
1
|
from distutils.core import setup
VERSION = __import__('narcissus').__version__
try:
long_description = open('README.rst', 'rt').read()
except IOError:
long_description = ''
description = "A blogging app that helps you easily share status updates, "
description += "links, photos, videos, and long-form articles on your personal"
description += " website."
setup(
name='django-narcissus',
version=VERSION,
description=description,
long_description = long_description,
author='Lincoln Loop',
license='License :: OSI Approved :: BSD License',
url='http://github.com/lincolnloop/django-narcissus',
packages=['narcissus'],
classifiers=[
'Framework :: Django',
'Intended Audience :: Developers',
'Development Status :: 3 - Alpha',
]
)
|
tensorflow/models
|
refs/heads/master
|
official/nlp/xlnet/preprocess_classification_data.py
|
1
|
# Copyright 2021 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Script to pre-process classification data into tfrecords."""
import collections
import csv
import os
# Import libraries
from absl import app
from absl import flags
from absl import logging
import numpy as np
import tensorflow as tf
import sentencepiece as spm
from official.nlp.xlnet import classifier_utils
from official.nlp.xlnet import preprocess_utils
flags.DEFINE_bool(
"overwrite_data",
default=False,
help="If False, will use cached data if available.")
flags.DEFINE_string("output_dir", default="", help="Output dir for TF records.")
flags.DEFINE_string(
"spiece_model_file", default="", help="Sentence Piece model path.")
flags.DEFINE_string("data_dir", default="", help="Directory for input data.")
# task specific
flags.DEFINE_string("eval_split", default="dev", help="could be dev or test")
flags.DEFINE_string("task_name", default=None, help="Task name")
flags.DEFINE_integer(
"eval_batch_size", default=64, help="batch size for evaluation")
flags.DEFINE_integer("max_seq_length", default=128, help="Max sequence length")
flags.DEFINE_integer(
"num_passes",
default=1,
help="Num passes for processing training data. "
"This is use to batch data without loss for TPUs.")
flags.DEFINE_bool("uncased", default=False, help="Use uncased.")
flags.DEFINE_bool(
"is_regression", default=False, help="Whether it's a regression task.")
flags.DEFINE_bool(
"use_bert_format",
default=False,
help="Whether to use BERT format to arrange input data.")
FLAGS = flags.FLAGS
class InputExample(object):
"""A single training/test example for simple sequence classification."""
def __init__(self, guid, text_a, text_b=None, label=None):
"""Constructs a InputExample.
Args:
guid: Unique id for the example.
text_a: string. The untokenized text of the first sequence. For single
sequence tasks, only this sequence must be specified.
text_b: (Optional) string. The untokenized text of the second sequence.
Only must be specified for sequence pair tasks.
label: (Optional) string. The label of the example. This should be
specified for train and dev examples, but not for test examples.
"""
self.guid = guid
self.text_a = text_a
self.text_b = text_b
self.label = label
class DataProcessor(object):
"""Base class for data converters for sequence classification data sets."""
def get_train_examples(self, data_dir):
"""Gets a collection of `InputExample`s for the train set."""
raise NotImplementedError()
def get_dev_examples(self, data_dir):
"""Gets a collection of `InputExample`s for the dev set."""
raise NotImplementedError()
def get_test_examples(self, data_dir):
"""Gets a collection of `InputExample`s for prediction."""
raise NotImplementedError()
def get_labels(self):
"""Gets the list of labels for this data set."""
raise NotImplementedError()
@classmethod
def _read_tsv(cls, input_file, quotechar=None):
"""Reads a tab separated value file."""
with tf.io.gfile.GFile(input_file, "r") as f:
reader = csv.reader(f, delimiter="\t", quotechar=quotechar)
lines = []
for line in reader:
# pylint: disable=g-explicit-length-test
if len(line) == 0:
continue
lines.append(line)
return lines
class GLUEProcessor(DataProcessor):
"""GLUEProcessor."""
def __init__(self):
self.train_file = "train.tsv"
self.dev_file = "dev.tsv"
self.test_file = "test.tsv"
self.label_column = None
self.text_a_column = None
self.text_b_column = None
self.contains_header = True
self.test_text_a_column = None
self.test_text_b_column = None
self.test_contains_header = True
def get_train_examples(self, data_dir):
"""See base class."""
return self._create_examples(
self._read_tsv(os.path.join(data_dir, self.train_file)), "train")
def get_dev_examples(self, data_dir):
"""See base class."""
return self._create_examples(
self._read_tsv(os.path.join(data_dir, self.dev_file)), "dev")
def get_test_examples(self, data_dir):
"""See base class."""
if self.test_text_a_column is None:
self.test_text_a_column = self.text_a_column
if self.test_text_b_column is None:
self.test_text_b_column = self.text_b_column
return self._create_examples(
self._read_tsv(os.path.join(data_dir, self.test_file)), "test")
def get_labels(self):
"""See base class."""
return ["0", "1"]
def _create_examples(self, lines, set_type):
"""Creates examples for the training and dev sets."""
examples = []
for (i, line) in enumerate(lines):
if i == 0 and self.contains_header and set_type != "test":
continue
if i == 0 and self.test_contains_header and set_type == "test":
continue
guid = "%s-%s" % (set_type, i)
a_column = (
self.text_a_column if set_type != "test" else self.test_text_a_column)
b_column = (
self.text_b_column if set_type != "test" else self.test_text_b_column)
# there are some incomplete lines in QNLI
if len(line) <= a_column:
logging.warning("Incomplete line, ignored.")
continue
text_a = line[a_column]
if b_column is not None:
if len(line) <= b_column:
logging.warning("Incomplete line, ignored.")
continue
text_b = line[b_column]
else:
text_b = None
if set_type == "test":
label = self.get_labels()[0]
else:
if len(line) <= self.label_column:
logging.warning("Incomplete line, ignored.")
continue
label = line[self.label_column]
examples.append(
InputExample(guid=guid, text_a=text_a, text_b=text_b, label=label))
return examples
class Yelp5Processor(DataProcessor):
"""Yelp5Processor."""
def get_train_examples(self, data_dir):
return self._create_examples(os.path.join(data_dir, "train.csv"))
def get_dev_examples(self, data_dir):
return self._create_examples(os.path.join(data_dir, "test.csv"))
def get_labels(self):
"""See base class."""
return ["1", "2", "3", "4", "5"]
def _create_examples(self, input_file):
"""Creates examples for the training and dev sets."""
examples = []
with tf.io.gfile.GFile(input_file) as f:
reader = csv.reader(f)
for i, line in enumerate(reader):
label = line[0]
text_a = line[1].replace('""', '"').replace('\\"', '"')
examples.append(
InputExample(guid=str(i), text_a=text_a, text_b=None, label=label))
return examples
class ImdbProcessor(DataProcessor):
"""ImdbProcessor."""
def get_labels(self):
return ["neg", "pos"]
def get_train_examples(self, data_dir):
return self._create_examples(os.path.join(data_dir, "train"))
def get_dev_examples(self, data_dir):
return self._create_examples(os.path.join(data_dir, "test"))
def _create_examples(self, data_dir):
"""Creates examples."""
examples = []
for label in ["neg", "pos"]:
cur_dir = os.path.join(data_dir, label)
for filename in tf.io.gfile.listdir(cur_dir):
if not filename.endswith("txt"):
continue
if len(examples) % 1000 == 0:
logging.info("Loading dev example %d", len(examples))
path = os.path.join(cur_dir, filename)
with tf.io.gfile.GFile(path) as f:
text = f.read().strip().replace("<br />", " ")
examples.append(
InputExample(
guid="unused_id", text_a=text, text_b=None, label=label))
return examples
class MnliMatchedProcessor(GLUEProcessor):
"""MnliMatchedProcessor."""
def __init__(self):
super(MnliMatchedProcessor, self).__init__()
self.dev_file = "dev_matched.tsv"
self.test_file = "test_matched.tsv"
self.label_column = -1
self.text_a_column = 8
self.text_b_column = 9
def get_labels(self):
return ["contradiction", "entailment", "neutral"]
class MnliMismatchedProcessor(MnliMatchedProcessor):
def __init__(self):
super(MnliMismatchedProcessor, self).__init__()
self.dev_file = "dev_mismatched.tsv"
self.test_file = "test_mismatched.tsv"
class StsbProcessor(GLUEProcessor):
"""StsbProcessor."""
def __init__(self):
super(StsbProcessor, self).__init__()
self.label_column = 9
self.text_a_column = 7
self.text_b_column = 8
def get_labels(self):
return [0.0]
def _create_examples(self, lines, set_type):
"""Creates examples for the training and dev sets."""
examples = []
for (i, line) in enumerate(lines):
if i == 0 and self.contains_header and set_type != "test":
continue
if i == 0 and self.test_contains_header and set_type == "test":
continue
guid = "%s-%s" % (set_type, i)
a_column = (
self.text_a_column if set_type != "test" else self.test_text_a_column)
b_column = (
self.text_b_column if set_type != "test" else self.test_text_b_column)
# there are some incomplete lines in QNLI
if len(line) <= a_column:
logging.warning("Incomplete line, ignored.")
continue
text_a = line[a_column]
if b_column is not None:
if len(line) <= b_column:
logging.warning("Incomplete line, ignored.")
continue
text_b = line[b_column]
else:
text_b = None
if set_type == "test":
label = self.get_labels()[0]
else:
if len(line) <= self.label_column:
logging.warning("Incomplete line, ignored.")
continue
label = float(line[self.label_column])
examples.append(
InputExample(guid=guid, text_a=text_a, text_b=text_b, label=label))
return examples
def file_based_convert_examples_to_features(examples,
label_list,
max_seq_length,
tokenize_fn,
output_file,
num_passes=1):
"""Convert a set of `InputExample`s to a TFRecord file."""
# do not create duplicated records
if tf.io.gfile.exists(output_file) and not FLAGS.overwrite_data:
logging.info("Do not overwrite tfrecord %s exists.", output_file)
return
logging.info("Create new tfrecord %s.", output_file)
writer = tf.io.TFRecordWriter(output_file)
examples *= num_passes
for (ex_index, example) in enumerate(examples):
if ex_index % 10000 == 0:
logging.info("Writing example %d of %d", ex_index, len(examples))
feature = classifier_utils.convert_single_example(ex_index, example,
label_list,
max_seq_length,
tokenize_fn,
FLAGS.use_bert_format)
def create_int_feature(values):
f = tf.train.Feature(int64_list=tf.train.Int64List(value=list(values)))
return f
def create_float_feature(values):
f = tf.train.Feature(float_list=tf.train.FloatList(value=list(values)))
return f
features = collections.OrderedDict()
features["input_ids"] = create_int_feature(feature.input_ids)
features["input_mask"] = create_float_feature(feature.input_mask)
features["segment_ids"] = create_int_feature(feature.segment_ids)
if label_list is not None:
features["label_ids"] = create_int_feature([feature.label_id])
else:
features["label_ids"] = create_float_feature([float(feature.label_id)])
features["is_real_example"] = create_int_feature(
[int(feature.is_real_example)])
tf_example = tf.train.Example(features=tf.train.Features(feature=features))
writer.write(tf_example.SerializeToString())
writer.close()
def main(_):
logging.set_verbosity(logging.INFO)
processors = {
"mnli_matched": MnliMatchedProcessor,
"mnli_mismatched": MnliMismatchedProcessor,
"sts-b": StsbProcessor,
"imdb": ImdbProcessor,
"yelp5": Yelp5Processor
}
task_name = FLAGS.task_name.lower()
if task_name not in processors:
raise ValueError("Task not found: %s" % (task_name))
processor = processors[task_name]()
label_list = processor.get_labels() if not FLAGS.is_regression else None
sp = spm.SentencePieceProcessor()
sp.Load(FLAGS.spiece_model_file)
def tokenize_fn(text):
text = preprocess_utils.preprocess_text(text, lower=FLAGS.uncased)
return preprocess_utils.encode_ids(sp, text)
spm_basename = os.path.basename(FLAGS.spiece_model_file)
train_file_base = "{}.len-{}.train.tf_record".format(spm_basename,
FLAGS.max_seq_length)
train_file = os.path.join(FLAGS.output_dir, train_file_base)
logging.info("Use tfrecord file %s", train_file)
train_examples = processor.get_train_examples(FLAGS.data_dir)
np.random.shuffle(train_examples)
logging.info("Num of train samples: %d", len(train_examples))
file_based_convert_examples_to_features(train_examples, label_list,
FLAGS.max_seq_length, tokenize_fn,
train_file, FLAGS.num_passes)
if FLAGS.eval_split == "dev":
eval_examples = processor.get_dev_examples(FLAGS.data_dir)
else:
eval_examples = processor.get_test_examples(FLAGS.data_dir)
logging.info("Num of eval samples: %d", len(eval_examples))
# TPU requires a fixed batch size for all batches, therefore the number
# of examples must be a multiple of the batch size, or else examples
# will get dropped. So we pad with fake examples which are ignored
# later on. These do NOT count towards the metric (all tf.metrics
# support a per-instance weight, and these get a weight of 0.0).
#
# Modified in XL: We also adopt the same mechanism for GPUs.
while len(eval_examples) % FLAGS.eval_batch_size != 0:
eval_examples.append(classifier_utils.PaddingInputExample())
eval_file_base = "{}.len-{}.{}.eval.tf_record".format(spm_basename,
FLAGS.max_seq_length,
FLAGS.eval_split)
eval_file = os.path.join(FLAGS.output_dir, eval_file_base)
file_based_convert_examples_to_features(eval_examples, label_list,
FLAGS.max_seq_length, tokenize_fn,
eval_file)
if __name__ == "__main__":
app.run(main)
|
ojengwa/sympy
|
refs/heads/master
|
sympy/printing/jscode.py
|
7
|
"""
Javascript code printer
The JavascriptCodePrinter converts single sympy expressions into single
Javascript expressions, using the functions defined in the Javascript
Math object where possible.
"""
from __future__ import print_function, division
from sympy.core import S, C
from sympy.printing.codeprinter import CodePrinter
from sympy.printing.precedence import precedence
from sympy.core.compatibility import string_types
# dictionary mapping sympy function to (argument_conditions, Javascript_function).
# Used in JavascriptCodePrinter._print_Function(self)
known_functions = {
}
function_translations = {
'Abs': 'Math.abs',
'acos': 'Math.acos',
'asin': 'Math.asin',
'atan': 'Math.atan',
'ceiling': 'Math.ceil',
'cos': 'Math.cos',
'exp': 'Math.exp',
'floor': 'Math.floor',
'log': 'Math.log',
'sin': 'Math.sin',
'tan': 'Math.tan',
}
class JavascriptCodePrinter(CodePrinter):
""""A Printer to convert python expressions to strings of javascript code
"""
printmethod = '_javascript'
_default_settings = {
'order': None,
'full_prec': 'auto',
'precision': 15,
'user_functions': {},
'human': True,
}
def __init__(self, settings={}):
"""Register function mappings supplied by user"""
CodePrinter.__init__(self, settings)
self.known_functions = dict(known_functions)
userfuncs = settings.get('user_functions', {})
for k, v in userfuncs.items():
if not isinstance(v, tuple):
userfuncs[k] = (lambda *x: True, v)
self.known_functions.update(userfuncs)
def _rate_index_position(self, p):
"""function to calculate score based on position among indices
This method is used to sort loops in an optimized order, see
CodePrinter._sort_optimized()
"""
return p*5
def _get_statement(self, codestring):
return "%s;" % codestring
def doprint(self, expr, assign_to=None):
"""
Actually format the expression as Javascript code.
"""
if isinstance(assign_to, string_types):
assign_to = C.Symbol(assign_to)
elif not isinstance(assign_to, (C.Basic, type(None))):
raise TypeError("JavascriptCodePrinter cannot assign to object of type %s" %
type(assign_to))
# keep a set of expressions that are not strictly translatable to Javascript
# and number constants that must be declared and initialized
not_js = self._not_supported = set()
self._number_symbols = set()
# We treat top level Piecewise here to get if tests outside loops
lines = []
if isinstance(expr, C.Piecewise):
for i, (e, c) in enumerate(expr.args):
if i == 0:
lines.append("if (%s) {" % self._print(c))
elif i == len(expr.args) - 1 and c == True:
lines.append("else {")
else:
lines.append("else if (%s) {" % self._print(c))
code0 = self._doprint_a_piece(e, assign_to)
lines.extend(code0)
lines.append("}")
else:
code0 = self._doprint_a_piece(expr, assign_to)
lines.extend(code0)
# format the output
if self._settings["human"]:
frontlines = []
if len(not_js) > 0:
frontlines.append("// Not Javascript:")
for expr in sorted(not_js, key=str):
frontlines.append("// %s" % repr(expr))
for name, value in sorted(self._number_symbols, key=str):
frontlines.append("var %s = %s;" % (name, value))
lines = frontlines + lines
lines = "\n".join(lines)
result = self.indent_code(lines)
else:
lines = self.indent_code("\n".join(lines))
result = self._number_symbols, not_js, lines
del self._not_supported
del self._number_symbols
return result
def _get_loop_opening_ending(self, indices):
"""Returns a tuple (open_lines, close_lines) containing lists of codelines
"""
open_lines = []
close_lines = []
loopstart = "for (var %(varble)s=%(start)s; %(varble)s<%(end)s; %(varble)s++){"
for i in indices:
# Javascript arrays start at 0 and end at dimension-1
open_lines.append(loopstart % {
'varble': self._print(i.label),
'start': self._print(i.lower),
'end': self._print(i.upper + 1)})
close_lines.append("}")
return open_lines, close_lines
def _print_Pow(self, expr):
PREC = precedence(expr)
if expr.exp == -1:
return '1/%s' % (self.parenthesize(expr.base, PREC))
elif expr.exp == 0.5:
return 'Math.sqrt(%s)' % self._print(expr.base)
else:
return 'Math.pow(%s, %s)' % (self._print(expr.base),
self._print(expr.exp))
def _print_Rational(self, expr):
p, q = int(expr.p), int(expr.q)
return '%d/%d' % (p, q)
def _print_Indexed(self, expr):
# calculate index for 1d array
dims = expr.shape
inds = [ i.label for i in expr.indices ]
elem = S.Zero
offset = S.One
for i in reversed(range(expr.rank)):
elem += offset*inds[i]
offset *= dims[i]
return "%s[%s]" % (self._print(expr.base.label), self._print(elem))
def _print_Exp1(self, expr):
return "Math.E"
def _print_Pi(self, expr):
return 'Math.PI'
def _print_Infinity(self, expr):
return 'Number.POSITIVE_INFINITY'
def _print_NegativeInfinity(self, expr):
return 'Number.NEGATIVE_INFINITY'
def _print_Piecewise(self, expr):
# This method is called only for inline if constructs
# Top level piecewise is handled in doprint()
ecpairs = ["(%s) {\n%s\n}\n" % (self._print(c), self._print(e))
for e, c in expr.args[:-1]]
last_line = ""
if expr.args[-1].cond == True:
last_line = "else {\n%s\n}" % self._print(expr.args[-1].expr)
else:
ecpairs.append("(%s) {\n%s\n" %
(self._print(expr.args[-1].cond),
self._print(expr.args[-1].expr)))
code = "if %s" + last_line
return code % "else if ".join(ecpairs)
def _print_Function(self, expr):
if expr.func.__name__ in self.known_functions:
cond_cfunc = self.known_functions[expr.func.__name__]
for cond, cfunc in cond_cfunc:
if cond(*expr.args):
return "%s(%s)" % (cfunc, self.stringify(expr.args, ", "))
if expr.func.__name__ in function_translations:
tr = function_translations[expr.func.__name__]
return "%s(%s)" % (tr, self.stringify(expr.args, ", "))
if hasattr(expr, '_imp_') and isinstance(expr._imp_, C.Lambda):
# inlined function
return self._print(expr._imp_(*expr.args))
return CodePrinter._print_Function(self, expr)
def indent_code(self, code):
"""Accepts a string of code or a list of code lines"""
if isinstance(code, string_types):
code_lines = self.indent_code(code.splitlines(True))
return ''.join(code_lines)
tab = " "
inc_token = ('{', '(', '{\n', '(\n')
dec_token = ('}', ')')
code = [ line.lstrip(' \t') for line in code ]
increase = [ int(any(map(line.endswith, inc_token))) for line in code ]
decrease = [ int(any(map(line.startswith, dec_token)))
for line in code ]
pretty = []
level = 0
for n, line in enumerate(code):
if line == '' or line == '\n':
pretty.append(line)
continue
level -= decrease[n]
pretty.append("%s%s" % (tab*level, line))
level += increase[n]
return pretty
def jscode(expr, assign_to=None, **settings):
"""Converts an expr to a string of javascript code
Parameters
==========
expr : sympy.core.Expr
a sympy expression to be converted
precision : optional
the precision for numbers such as pi [default=15]
user_functions : optional
A dictionary where keys are FunctionClass instances and values
are their string representations. Alternatively the
dictionary values can be a list of tuples i.e. [(argument_test,
jsfunction_string)].
human : optional
If True, the result is a single string that may contain some
constant declarations for the number symbols. If False, the
same information is returned in a more programmer-friendly
data structure.
Examples
========
>>> from sympy import jscode, symbols, Rational, sin
>>> x, tau = symbols(["x", "tau"])
>>> jscode((2*tau)**Rational(7,2))
'8*Math.sqrt(2)*Math.pow(tau, 7/2)'
>>> jscode(sin(x), assign_to="s")
's = Math.sin(x);'
"""
return JavascriptCodePrinter(settings).doprint(expr, assign_to)
def print_jscode(expr, **settings):
"""Prints the Javascript representation of the given expression.
See jscode for the meaning of the optional arguments.
"""
print(jscode(expr, **settings))
|
hjanime/VisTrails
|
refs/heads/master
|
vistrails/db/versions/v1_0_2/domain/log.py
|
4
|
###############################################################################
##
## Copyright (C) 2014-2015, New York University.
## Copyright (C) 2011-2014, NYU-Poly.
## Copyright (C) 2006-2011, University of Utah.
## All rights reserved.
## Contact: contact@vistrails.org
##
## This file is part of VisTrails.
##
## "Redistribution and use in source and binary forms, with or without
## modification, are permitted provided that the following conditions are met:
##
## - Redistributions of source code must retain the above copyright notice,
## this list of conditions and the following disclaimer.
## - Redistributions in binary form must reproduce the above copyright
## notice, this list of conditions and the following disclaimer in the
## documentation and/or other materials provided with the distribution.
## - Neither the name of the New York University nor the names of its
## contributors may be used to endorse or promote products derived from
## this software without specific prior written permission.
##
## THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
## AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
## THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
## PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
## CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
## EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
## PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
## OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
## WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
## OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
## ADVISED OF THE POSSIBILITY OF SUCH DAMAGE."
##
###############################################################################
from __future__ import division
from auto_gen import DBLog as _DBLog
from auto_gen import DBAbstraction, DBModule, DBGroup, DBLoopExec, \
DBGroupExec, DBModuleExec
from id_scope import IdScope
import copy
class DBLog(_DBLog):
def __init__(self, *args, **kwargs):
_DBLog.__init__(self, *args, **kwargs)
self.id_scope = IdScope(1,
{DBLoopExec.vtType: 'item_exec',
DBModuleExec.vtType: 'item_exec',
DBGroupExec.vtType: 'item_exec',
DBAbstraction.vtType: DBModule.vtType,
DBGroup.vtType: DBModule.vtType})
def __copy__(self):
return DBLog.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = _DBLog.do_copy(self, new_ids, id_scope, id_remap)
cp.__class__ = DBLog
cp.id_scope = copy.copy(self.id_scope)
return cp
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBLog()
new_obj = _DBLog.update_version(old_obj, trans_dict, new_obj)
new_obj.update_id_scope()
return new_obj
def update_id_scope(self):
pass
|
mathspace/libcloud
|
refs/heads/trunk
|
docs/examples/dns/auroradns/enable_disable_record.py
|
31
|
from libcloud.dns.types import Provider
from libcloud.dns.types import RecordType
from libcloud.dns.providers import get_driver
cls = get_driver(Provider.AURORADNS)
driver = cls('myapikey', 'mysecret')
zone = driver.get_zone('auroradns.eu')
record = zone.create_record(name='www', type=RecordType.AAAA,
data='2a00:f10:452::1', ex_disabled=True)
record.update(ex_disabled=False)
|
ScreamingUdder/mantid
|
refs/heads/master
|
Testing/SystemTests/tests/analysis/SphinxWarnings.py
|
3
|
#pylint: disable=invalid-name
"""
Some of the sphinx warnings come from the C++ code, from the properties of the algorithms or from the summary string
This test tries to detect the most common such errors.
It also detects if a new category is created (i.e. someone uses Utilities instead of Utility)
"""
from __future__ import (absolute_import, division, print_function)
import stresstesting
import mantid
import re
from six import iteritems
class SphinxWarnings(stresstesting.MantidStressTest):
def __init__(self):
stresstesting.MantidStressTest.__init__(self)
self.allowedCategories=['Arithmetic',
'CorrectionFunctions',
'Crystal',
'DataHandling',
'Diagnostics',
'Diffraction',
'Events',
'Examples',
'ILL',
'ISIS',
'Inelastic',
'MDAlgorithms',
'MPI',
'Muon',
'Optimization',
'PythonAlgorithms',
'Quantification',
'Reflectometry',
'Remote',
'SANS',
'Simulation',
'SINQ',
'Sample',
'Transforms',
'Utility',
'Workflow']
self.errorMessage=""
def checkString(self,s):
tocheck=s
outputString=''
#replace strong emphasis: Space**NotSpaceText**
sub=re.compile(r' \*\*[^ ].+?\*\*')
for i in sub.findall(tocheck):
tocheck=tocheck.replace(i," ")
#replace emphasis: Space*NotSpaceText*
sub=re.compile(r' \*[^ ].+?\*')
for i in sub.findall(tocheck):
tocheck=tocheck.replace(i," ")
#replace correctly named hyperlinks: Space`Name link>`__
sub=re.compile(r' \`.+? <.+?.\`__')
for i in sub.findall(tocheck):
tocheck=tocheck.replace(i," ")
#find strong emphasis errors
sub=re.compile(r' \*\*[^ ]+')
result=sub.findall(tocheck)
if len(result)>0:
outputString+="Strong emphasis error: "+str(result)+"\n"
#find emphasis errors
sub=re.compile(r' \*[^ ]+')
result=sub.findall(tocheck)
if len(result)>0:
outputString+="Emphasis error: "+str(result)+"\n"
#find potentially duplicate named hyperlinks
sub=re.compile(r' \`.+? <.+?.\`_')
result=sub.findall(tocheck)
if len(result)>0:
outputString+="Potentially unsafe named hyperlink: "+str(result)+"\n"
#find potentially wrong substitutions
sub=re.compile(r'\|.+?\|')
result=sub.findall(tocheck)
if len(result)>0:
outputString+="Potentially unsafe substitution: "+str(result)+"\n"
return outputString
def runTest(self):
algs = mantid.AlgorithmFactory.getRegisteredAlgorithms(True)
for (name, versions) in iteritems(algs):
for version in versions:
if mantid.api.DeprecatedAlgorithmChecker(name,version).isDeprecated()=='':
# get an instance
alg = mantid.AlgorithmManager.create(name, version)
#check categories
for cat in alg.categories():
if cat.split("\\")[0] not in self.allowedCategories:
self.errorMessage += name+" "+str(version)+" Category: "+cat.split("\\")[0]+" is not in the allowed list."
self.errorMessage += " If you need this category, please add it to the systemtest.\n"
#check summary
summary=alg.summary()
result=self.checkString(summary)
if len(result)>0:
self.errorMessage+=name+" "+str(version)+" Summary: "+result+"\n"
#check properties
properties=alg.getProperties()
for prop in properties:
propName=prop.name
propDoc=prop.documentation
result=self.checkString(propDoc)
if len(result)>0:
self.errorMessage+=name+" "+str(version)+" Property: "+propName+" Documentation: "+result +"\n"
def validate(self):
if self.errorMessage!="":
print("Found the following errors:\n",self.errorMessage)
return False
return True
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.