hexsha stringlengths 40 40 | size int64 4 1.02M | ext stringclasses 8 values | lang stringclasses 1 value | max_stars_repo_path stringlengths 4 209 | max_stars_repo_name stringlengths 5 121 | max_stars_repo_head_hexsha stringlengths 40 40 | max_stars_repo_licenses listlengths 1 10 | max_stars_count int64 1 191k ⌀ | max_stars_repo_stars_event_min_datetime stringlengths 24 24 ⌀ | max_stars_repo_stars_event_max_datetime stringlengths 24 24 ⌀ | max_issues_repo_path stringlengths 4 209 | max_issues_repo_name stringlengths 5 121 | max_issues_repo_head_hexsha stringlengths 40 40 | max_issues_repo_licenses listlengths 1 10 | max_issues_count int64 1 67k ⌀ | max_issues_repo_issues_event_min_datetime stringlengths 24 24 ⌀ | max_issues_repo_issues_event_max_datetime stringlengths 24 24 ⌀ | max_forks_repo_path stringlengths 4 209 | max_forks_repo_name stringlengths 5 121 | max_forks_repo_head_hexsha stringlengths 40 40 | max_forks_repo_licenses listlengths 1 10 | max_forks_count int64 1 105k ⌀ | max_forks_repo_forks_event_min_datetime stringlengths 24 24 ⌀ | max_forks_repo_forks_event_max_datetime stringlengths 24 24 ⌀ | content stringlengths 4 1.02M | avg_line_length float64 1.07 66.1k | max_line_length int64 4 266k | alphanum_fraction float64 0.01 1 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
15fe3264637abe95d2ebc65314bec5a68f45721f | 13,222 | py | Python | cvat/settings/base.py | arthurtibame/cvat | 0062ecdec34a9ffcad33e1664a7cac663bec4ecf | [
"MIT"
] | null | null | null | cvat/settings/base.py | arthurtibame/cvat | 0062ecdec34a9ffcad33e1664a7cac663bec4ecf | [
"MIT"
] | null | null | null | cvat/settings/base.py | arthurtibame/cvat | 0062ecdec34a9ffcad33e1664a7cac663bec4ecf | [
"MIT"
] | 1 | 2021-09-17T10:19:30.000Z | 2021-09-17T10:19:30.000Z | # Copyright (C) 2018-2019 Intel Corporation
#
# SPDX-License-Identifier: MIT
"""
Django settings for CVAT project.
Generated by 'django-admin startproject' using Django 2.0.1.
For more information on this file, see
https://docs.djangoproject.com/en/2.0/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/2.0/ref/settings/
"""
import os
import sys
import fcntl
import shutil
import subprocess
import mimetypes
mimetypes.add_type("application/wasm", ".wasm", True)
from pathlib import Path
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = str(Path(__file__).parents[2])
ALLOWED_HOSTS = os.environ.get('ALLOWED_HOSTS', 'localhost,127.0.0.1').split(',')
INTERNAL_IPS = ['127.0.0.1']
try:
sys.path.append(BASE_DIR)
from keys.secret_key import SECRET_KEY # pylint: disable=unused-import
except ImportError:
from django.utils.crypto import get_random_string
keys_dir = os.path.join(BASE_DIR, 'keys')
if not os.path.isdir(keys_dir):
os.mkdir(keys_dir)
with open(os.path.join(keys_dir, 'secret_key.py'), 'w') as f:
chars = 'abcdefghijklmnopqrstuvwxyz0123456789!@#$%^&*(-_=+)'
f.write("SECRET_KEY = '{}'\n".format(get_random_string(50, chars)))
from keys.secret_key import SECRET_KEY
def generate_ssh_keys():
keys_dir = '{}/keys'.format(os.getcwd())
ssh_dir = '{}/.ssh'.format(os.getenv('HOME'))
pidfile = os.path.join(ssh_dir, 'ssh.pid')
with open(pidfile, "w") as pid:
fcntl.flock(pid, fcntl.LOCK_EX)
try:
subprocess.run(['ssh-add {}/*'.format(ssh_dir)], shell = True, stderr = subprocess.PIPE)
keys = subprocess.run(['ssh-add -l'], shell = True,
stdout = subprocess.PIPE).stdout.decode('utf-8').split('\n')
if 'has no identities' in keys[0]:
print('SSH keys were not found')
volume_keys = os.listdir(keys_dir)
if not ('id_rsa' in volume_keys and 'id_rsa.pub' in volume_keys):
print('New pair of keys are being generated')
subprocess.run(['ssh-keygen -b 4096 -t rsa -f {}/id_rsa -q -N ""'.format(ssh_dir)], shell = True)
shutil.copyfile('{}/id_rsa'.format(ssh_dir), '{}/id_rsa'.format(keys_dir))
shutil.copymode('{}/id_rsa'.format(ssh_dir), '{}/id_rsa'.format(keys_dir))
shutil.copyfile('{}/id_rsa.pub'.format(ssh_dir), '{}/id_rsa.pub'.format(keys_dir))
shutil.copymode('{}/id_rsa.pub'.format(ssh_dir), '{}/id_rsa.pub'.format(keys_dir))
else:
print('Copying them from keys volume')
shutil.copyfile('{}/id_rsa'.format(keys_dir), '{}/id_rsa'.format(ssh_dir))
shutil.copymode('{}/id_rsa'.format(keys_dir), '{}/id_rsa'.format(ssh_dir))
shutil.copyfile('{}/id_rsa.pub'.format(keys_dir), '{}/id_rsa.pub'.format(ssh_dir))
shutil.copymode('{}/id_rsa.pub'.format(keys_dir), '{}/id_rsa.pub'.format(ssh_dir))
subprocess.run(['ssh-add', '{}/id_rsa'.format(ssh_dir)], shell = True)
finally:
fcntl.flock(pid, fcntl.LOCK_UN)
try:
if os.getenv("SSH_AUTH_SOCK", None):
generate_ssh_keys()
except Exception:
pass
# Application definition
JS_3RDPARTY = {}
CSS_3RDPARTY = {}
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'cvat.apps.authentication',
'cvat.apps.documentation',
'cvat.apps.dataset_manager',
'cvat.apps.engine',
'cvat.apps.git',
'cvat.apps.restrictions',
'cvat.apps.lambda_manager',
'django_rq',
'compressor',
'cacheops',
'sendfile',
'dj_pagination',
'revproxy',
'rules',
'rest_framework',
'rest_framework.authtoken',
'django_filters',
'drf_yasg',
'rest_auth',
'django.contrib.sites',
'allauth',
'allauth.account',
'corsheaders',
'allauth.socialaccount',
'rest_auth.registration'
]
SITE_ID = 1
REST_FRAMEWORK = {
'DEFAULT_PERMISSION_CLASSES': [
'rest_framework.permissions.IsAuthenticated',
],
'DEFAULT_AUTHENTICATION_CLASSES': [
'cvat.apps.authentication.auth.TokenAuthentication',
'cvat.apps.authentication.auth.SignatureAuthentication',
'rest_framework.authentication.SessionAuthentication',
'rest_framework.authentication.BasicAuthentication'
],
'DEFAULT_VERSIONING_CLASS':
# Don't try to use URLPathVersioning. It will give you /api/{version}
# in path and '/api/docs' will not collapse similar items (flat list
# of all possible methods isn't readable).
'rest_framework.versioning.NamespaceVersioning',
# Need to add 'api-docs' here as a workaround for include_docs_urls.
'ALLOWED_VERSIONS': ('v1', 'api-docs'),
'DEFAULT_PAGINATION_CLASS':
'cvat.apps.engine.pagination.CustomPagination',
'PAGE_SIZE': 10,
'DEFAULT_FILTER_BACKENDS': (
'rest_framework.filters.SearchFilter',
'django_filters.rest_framework.DjangoFilterBackend',
'rest_framework.filters.OrderingFilter'),
# Disable default handling of the 'format' query parameter by REST framework
'URL_FORMAT_OVERRIDE': 'scheme',
'DEFAULT_THROTTLE_CLASSES': [
'rest_framework.throttling.AnonRateThrottle',
],
'DEFAULT_THROTTLE_RATES': {
'anon': '100/hour',
},
}
REST_AUTH_REGISTER_SERIALIZERS = {
'REGISTER_SERIALIZER': 'cvat.apps.restrictions.serializers.RestrictedRegisterSerializer'
}
if os.getenv('DJANGO_LOG_VIEWER_HOST'):
INSTALLED_APPS += ['cvat.apps.log_viewer']
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'corsheaders.middleware.CorsMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
# FIXME
# 'corsheaders.middleware.CorsPostCsrfMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'dj_pagination.middleware.PaginationMiddleware',
]
UI_URL = ''
STATICFILES_FINDERS = [
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
'compressor.finders.CompressorFinder',
]
ROOT_URLCONF = 'cvat.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'cvat.wsgi.application'
# Django Auth
DJANGO_AUTH_TYPE = 'BASIC'
DJANGO_AUTH_DEFAULT_GROUPS = []
LOGIN_URL = 'login'
LOGIN_REDIRECT_URL = '/'
AUTH_LOGIN_NOTE = '<p>Have not registered yet? <a href="/auth/register">Register here</a>.</p>'
AUTHENTICATION_BACKENDS = [
'rules.permissions.ObjectPermissionBackend',
'django.contrib.auth.backends.ModelBackend'
]
# https://github.com/pennersr/django-allauth
ACCOUNT_EMAIL_VERIFICATION = 'none'
# Django-RQ
# https://github.com/rq/django-rq
RQ_QUEUES = {
'default': {
'HOST': 'localhost',
'PORT': 6379,
'DB': 0,
'DEFAULT_TIMEOUT': '4h'
},
'low': {
'HOST': 'localhost',
'PORT': 6379,
'DB': 0,
'DEFAULT_TIMEOUT': '24h'
}
}
NUCLIO = {
'SCHEME': 'http',
'HOST': 'localhost',
'PORT': 8070,
'DEFAULT_TIMEOUT': 120
}
RQ_SHOW_ADMIN_LINK = True
RQ_EXCEPTION_HANDLERS = ['cvat.apps.engine.views.rq_handler']
# JavaScript and CSS compression
# https://django-compressor.readthedocs.io
COMPRESS_CSS_FILTERS = [
'compressor.filters.css_default.CssAbsoluteFilter',
'compressor.filters.cssmin.rCSSMinFilter'
]
COMPRESS_JS_FILTERS = [] # No compression for js files (template literals were compressed bad)
# Password validation
# https://docs.djangoproject.com/en/2.0/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Cache DB access (e.g. for engine.task.get_frame)
# https://github.com/Suor/django-cacheops
CACHEOPS_REDIS = {
'host': 'localhost', # redis-server is on same machine
'port': 6379, # default redis port
'db': 1, # SELECT non-default redis database
}
CACHEOPS = {
# Automatically cache any Task.objects.get() calls for 15 minutes
# This also includes .first() and .last() calls.
'engine.task': {'ops': 'get', 'timeout': 60*15},
# Automatically cache any Job.objects.get() calls for 15 minutes
# This also includes .first() and .last() calls.
'engine.job': {'ops': 'get', 'timeout': 60*15},
}
CACHEOPS_DEGRADE_ON_FAILURE = True
# Internationalization
# https://docs.djangoproject.com/en/2.0/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = os.getenv('TZ', 'Etc/UTC')
USE_I18N = True
USE_L10N = True
USE_TZ = True
CSRF_COOKIE_NAME = "csrftoken"
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/2.0/howto/static-files/
STATIC_URL = '/static/'
STATIC_ROOT = os.path.join(BASE_DIR, 'static')
os.makedirs(STATIC_ROOT, exist_ok=True)
DATA_ROOT = os.path.join(BASE_DIR, 'data')
os.makedirs(DATA_ROOT, exist_ok=True)
MEDIA_DATA_ROOT = os.path.join(DATA_ROOT, 'data')
os.makedirs(MEDIA_DATA_ROOT, exist_ok=True)
TASKS_ROOT = os.path.join(DATA_ROOT, 'tasks')
os.makedirs(TASKS_ROOT, exist_ok=True)
SHARE_ROOT = os.path.join(BASE_DIR, 'share')
os.makedirs(SHARE_ROOT, exist_ok=True)
MODELS_ROOT = os.path.join(DATA_ROOT, 'models')
os.makedirs(MODELS_ROOT, exist_ok=True)
LOGS_ROOT = os.path.join(BASE_DIR, 'logs')
os.makedirs(MODELS_ROOT, exist_ok=True)
MIGRATIONS_LOGS_ROOT = os.path.join(LOGS_ROOT, 'migrations')
os.makedirs(MIGRATIONS_LOGS_ROOT, exist_ok=True)
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'formatters': {
'standard': {
'format': '[%(asctime)s] %(levelname)s %(name)s: %(message)s'
}
},
'handlers': {
'console': {
'class': 'logging.StreamHandler',
'filters': [],
'formatter': 'standard',
},
'server_file': {
'class': 'logging.handlers.RotatingFileHandler',
'level': 'DEBUG',
'filename': os.path.join(BASE_DIR, 'logs', 'cvat_server.log'),
'formatter': 'standard',
'maxBytes': 1024*1024*50, # 50 MB
'backupCount': 5,
},
'logstash': {
'level': 'INFO',
'class': 'logstash.TCPLogstashHandler',
'host': os.getenv('DJANGO_LOG_SERVER_HOST', 'localhost'),
'port': os.getenv('DJANGO_LOG_SERVER_PORT', 5000),
'version': 1,
'message_type': 'django',
}
},
'loggers': {
'cvat.server': {
'handlers': ['console', 'server_file'],
'level': os.getenv('DJANGO_LOG_LEVEL', 'DEBUG'),
},
'cvat.client': {
'handlers': [],
'level': os.getenv('DJANGO_LOG_LEVEL', 'DEBUG'),
},
'revproxy': {
'handlers': ['console', 'server_file'],
'level': os.getenv('DJANGO_LOG_LEVEL', 'DEBUG')
},
'django': {
'handlers': ['console', 'server_file'],
'level': 'INFO',
'propagate': True
}
},
}
if os.getenv('DJANGO_LOG_SERVER_HOST'):
LOGGING['loggers']['cvat.server']['handlers'] += ['logstash']
LOGGING['loggers']['cvat.client']['handlers'] += ['logstash']
DATA_UPLOAD_MAX_MEMORY_SIZE = 100 * 1024 * 1024 # 100 MB
DATA_UPLOAD_MAX_NUMBER_FIELDS = None # this django check disabled
LOCAL_LOAD_MAX_FILES_COUNT = 500
LOCAL_LOAD_MAX_FILES_SIZE = 512 * 1024 * 1024 # 512 MB
DATUMARO_PATH = os.path.join(BASE_DIR, 'datumaro')
sys.path.append(DATUMARO_PATH)
RESTRICTIONS = {
'user_agreements': [],
# this setting limits the number of tasks for the user
'task_limit': None,
# this setting reduse task visibility to owner and assignee only
'reduce_task_visibility': False,
# allow access to analytics component to users with the following roles
'analytics_access': (
'engine.role.observer',
'engine.role.annotator',
'engine.role.user',
'engine.role.admin',
),
}
| 31.037559 | 117 | 0.644607 |
b21e9c99c6a2707981369de65c3756116ceb260c | 22,778 | py | Python | bin/user/weatherlink_live/mappers.py | nickbp/weewx-weatherlink-live | 2c18b5da578e175cec3c893bf73450e53da71fd4 | [
"MIT"
] | 12 | 2021-01-10T12:00:25.000Z | 2022-01-22T09:20:05.000Z | bin/user/weatherlink_live/mappers.py | nickbp/weewx-weatherlink-live | 2c18b5da578e175cec3c893bf73450e53da71fd4 | [
"MIT"
] | 19 | 2020-10-21T12:38:27.000Z | 2022-03-02T13:33:19.000Z | bin/user/weatherlink_live/mappers.py | nickbp/weewx-weatherlink-live | 2c18b5da578e175cec3c893bf73450e53da71fd4 | [
"MIT"
] | 2 | 2021-02-19T13:09:04.000Z | 2022-03-01T20:16:55.000Z | # Copyright © 2020-2021 Michael Schantl and contributors
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
"""
Mappings of API to observations
"""
import logging
from typing import Dict, List, Optional
from user.weatherlink_live.packets import NotInPacket, DavisConditionsPacket
from user.weatherlink_live.static import PacketSource, targets
from user.weatherlink_live.static.packets import DataStructureType, KEY_TEMPERATURE, KEY_HUMIDITY, KEY_DEW_POINT, \
KEY_HEAT_INDEX, KEY_WET_BULB, KEY_WIND_DIR, KEY_RAIN_AMOUNT_DAILY, KEY_RAIN_SIZE, KEY_RAIN_RATE, \
KEY_SOLAR_RADIATION, KEY_UV_INDEX, KEY_WIND_CHILL, KEY_THW_INDEX, KEY_THSW_INDEX, KEY_SOIL_MOISTURE, \
KEY_TEMPERATURE_LEAF_SOIL, KEY_LEAF_WETNESS, KEY_TEMPERATURE_INDOOR, KEY_HUMIDITY_INDOOR, KEY_DEW_POINT_INDOOR, \
KEY_HEAT_INDEX_INDOOR, KEY_BARO_ABSOLUTE, KEY_BARO_SEA_LEVEL, KEY_WIND_SPEED, KEY_BATTERY_FLAG
log = logging.getLogger(__name__)
def _parse_option_boolean(opts: list, check_for: str) -> bool:
if len(opts) < 1:
return False
uppercase_opts = [opt.upper() for opt in opts]
uppercase_check_for = check_for.upper()
return uppercase_check_for in uppercase_opts
class AbstractMapping(object):
def __init__(self, mapping_opts: list, used_map_targets: list,
log_success: bool = False, log_error: bool = True):
self.mapping_opts = mapping_opts
self.log_success = log_success
self.log_error = log_error
self.targets = self.__search_multi_targets(self._map_target_dict, used_map_targets)
self._log("Mapping targets: %s" % repr(self.targets))
def __str__(self):
return "%s[%s]" % (type(self).__name__, self.mapping_opts)
def _log(self, message: str, level: int = logging.DEBUG):
log.log(level, "%s: %s" % (str(self), message))
def _log_mapping_success(self, target: str, value: float = None):
if self.log_success:
self._log("Mapped: %s=%s" % (target, repr(value)))
def _log_mapping_notResponsible(self, message: str):
"""Logged when the mapper doesn't feel responsible for a packet"""
if self.log_success: # because this is part of normal operation
self._log("Mapping not responsible: %s" % message)
def _log_mapping_notInPacket(self):
if self.log_success: # because this is part of normal operation
self._log("Observation not found in packet")
def _parse_option_int(self, opts: list, index: int) -> int:
try:
return int(opts[index])
except IndexError as e:
raise IndexError("Mapping options for mapping %s incomplete: Expected at least %d parameters; got %d" % (
str(self), index + 1, len(opts)
)) from e
except ValueError as e:
raise ValueError("Could not parse mapping option %d for mapping %s: Expected an integer; got %s" % (
index + 1, str(self), repr(opts[index])
)) from e
def __search_multi_targets(self, available_map_targets_dict: dict = (), used_map_targets: list = []) -> dict:
if len(available_map_targets_dict) < 1:
return {}
target_length = min([len(target_list) for target_list in available_map_targets_dict.values()])
for i in range(0, target_length):
map_targets = dict([
(k, v[i]) for k, v in available_map_targets_dict.items()
])
if any([map_target in used_map_targets for map_target in map_targets.values()]):
continue
else:
return map_targets
raise RuntimeError("Mapping %s has all map targets used: %s" % (
str(self), available_map_targets_dict
))
def map(self, packet: DavisConditionsPacket, record: dict):
try:
self._do_mapping(packet, record)
except NotInPacket:
self._log_mapping_notInPacket()
pass
def _do_mapping(self, packet: DavisConditionsPacket, record: dict):
pass
@property
def _map_target_dict(self) -> Dict[str, List[str]]:
raise NotImplementedError()
def _set_record_entry(self, record: dict, key: str, value: float = None):
record.update({key: value})
self._log_mapping_success(key, value)
class TMapping(AbstractMapping):
def __init__(self, mapping_opts: list, used_map_targets: list, log_success: bool = False, log_error: bool = True):
super().__init__(mapping_opts, used_map_targets, log_success, log_error)
self.tx_id = self._parse_option_int(mapping_opts, 0)
@property
def _map_target_dict(self) -> Dict[str, List[str]]:
return {
't': targets.TEMP
}
def _do_mapping(self, packet: DavisConditionsPacket, record: dict):
target = self.targets['t']
self._set_record_entry(record, target,
packet.get_observation(KEY_TEMPERATURE, DataStructureType.ISS, self.tx_id))
class THMapping(AbstractMapping):
def __init__(self, mapping_opts: list, used_map_targets: list, log_success: bool = False, log_error: bool = True):
super().__init__(mapping_opts, used_map_targets, log_success, log_error)
self.tx_id = self._parse_option_int(mapping_opts, 0)
@property
def _map_target_dict(self) -> Dict[str, List[str]]:
return {
't': targets.TEMP,
'h': targets.HUM,
'dp': targets.DEW_POINT,
'hi': targets.HEAT_INDEX,
'wb': targets.WET_BULB
}
def _do_mapping(self, packet: DavisConditionsPacket, record: dict):
target_t = self.targets['t']
target_h = self.targets['h']
target_dp = self.targets['dp']
target_hi = self.targets['hi']
target_wb = self.targets['wb']
self._set_record_entry(record, target_t,
packet.get_observation(KEY_TEMPERATURE, DataStructureType.ISS, self.tx_id))
self._set_record_entry(record, target_h,
packet.get_observation(KEY_HUMIDITY, DataStructureType.ISS, self.tx_id))
self._set_record_entry(record, target_dp,
packet.get_observation(KEY_DEW_POINT, DataStructureType.ISS, self.tx_id))
self._set_record_entry(record, target_hi,
packet.get_observation(KEY_HEAT_INDEX, DataStructureType.ISS, self.tx_id))
self._set_record_entry(record, target_wb,
packet.get_observation(KEY_WET_BULB, DataStructureType.ISS, self.tx_id))
class WindMapping(AbstractMapping):
def __init__(self, mapping_opts: list, used_map_targets: list, log_success: bool = False, log_error: bool = True):
super().__init__(mapping_opts, used_map_targets, log_success, log_error)
self.tx_id = self._parse_option_int(mapping_opts, 0)
@property
def _map_target_dict(self) -> Dict[str, List[str]]:
return {
'wind_dir': targets.WIND_DIR,
'wind_speed': targets.WIND_SPEED,
'gust_dir': targets.WIND_GUST_DIR,
'gust_speed': targets.WIND_GUST_SPEED
}
def _do_mapping(self, packet: DavisConditionsPacket, record: dict):
if packet.data_source != PacketSource.WEATHER_PUSH:
self._log_mapping_notResponsible("Not a broadcast packet")
return
target_dir = self.targets['wind_dir']
target_speed = self.targets['wind_speed']
self._set_record_entry(record, target_dir,
packet.get_observation(KEY_WIND_DIR, DataStructureType.ISS, self.tx_id))
self._set_record_entry(record, target_speed,
packet.get_observation(KEY_WIND_SPEED, DataStructureType.ISS, self.tx_id))
class RainMapping(AbstractMapping):
def __init__(self, mapping_opts: list, used_map_targets: list, log_success: bool = False, log_error: bool = True):
super().__init__(mapping_opts, used_map_targets, log_success, log_error)
# 0: Reserved, 1: 0.01", 2: 0.2 mm, 3: 0.1 mm, 4: 0.001"
self.rain_bucket_sizes = {
1: 0.01,
4: 0.001,
2: (1 / 25.4) * 0.2,
3: (1 / 25.4) * 0.1
}
self.tx_id = self._parse_option_int(mapping_opts, 0)
self.last_daily_rain_count = None
@property
def _map_target_dict(self) -> Dict[str, List[str]]:
return {
'amount': targets.RAIN_AMOUNT,
'rate': targets.RAIN_RATE,
'count': targets.RAIN_COUNT,
'count_rate': targets.RAIN_COUNT_RATE,
'size': targets.RAIN_SIZE,
}
def _do_mapping(self, packet: DavisConditionsPacket, record: dict):
if packet.data_source != PacketSource.WEATHER_PUSH:
self._log_mapping_notResponsible("Not a broadcast packet")
return
target_amount = self.targets['amount']
target_rate = self.targets['rate']
target_count = self.targets['count']
target_rate_count = self.targets['count_rate']
target_size = self.targets['size']
rain_bucket_factor = self.rain_bucket_factor(packet)
self._set_record_entry(record, target_size, rain_bucket_factor)
rain_rate_count = packet.get_observation(KEY_RAIN_RATE, DataStructureType.ISS, self.tx_id)
self._set_record_entry(record, target_rate_count, rain_rate_count)
self._set_record_entry(record, target_rate, self._multiply(rain_rate_count, rain_bucket_factor))
current_daily_rain_count = packet.get_observation(KEY_RAIN_AMOUNT_DAILY, DataStructureType.ISS, self.tx_id)
if current_daily_rain_count is None:
self._log("Daily rain count not in packet. Skipping diff calculation")
return
if self.last_daily_rain_count is None:
self._log("First daily rain value", logging.INFO)
elif self.last_daily_rain_count > current_daily_rain_count:
self._log("Last daily rain (%d) larger than current (%d). Probably reset" % (
self.last_daily_rain_count, current_daily_rain_count), logging.INFO)
self._set_record_entry(record, target_count, current_daily_rain_count)
self._set_record_entry(record, target_amount, self._multiply(current_daily_rain_count, rain_bucket_factor))
else:
count_diff = current_daily_rain_count - self.last_daily_rain_count
self._set_record_entry(record, target_count, count_diff)
self._set_record_entry(record, target_amount, self._multiply(count_diff, rain_bucket_factor))
self.last_daily_rain_count = current_daily_rain_count
@staticmethod
def _multiply(a: Optional[float], b: Optional[float]) -> Optional[float]:
if a is None or b is None:
return None
return a * b
def rain_bucket_factor(self, packet) -> Optional[float]:
rain_bucket_size = packet.get_observation(KEY_RAIN_SIZE, DataStructureType.ISS, self.tx_id)
if rain_bucket_size is None:
return None
try:
return self.rain_bucket_sizes[rain_bucket_size]
except KeyError as e:
raise KeyError("Unexpected rain bucket size %s" % repr(rain_bucket_size))
class SolarMapping(AbstractMapping):
def __init__(self, mapping_opts: list, used_map_targets: list, log_success: bool = False, log_error: bool = True):
super().__init__(mapping_opts, used_map_targets, log_success, log_error)
self.tx_id = self._parse_option_int(mapping_opts, 0)
@property
def _map_target_dict(self) -> Dict[str, List[str]]:
return {
'solar': targets.SOLAR_RADIATION
}
def _do_mapping(self, packet: DavisConditionsPacket, record: dict):
target = self.targets['solar']
self._set_record_entry(record, target,
packet.get_observation(KEY_SOLAR_RADIATION, DataStructureType.ISS, self.tx_id))
class UvMapping(AbstractMapping):
def __init__(self, mapping_opts: list, used_map_targets: list, log_success: bool = False, log_error: bool = True):
super().__init__(mapping_opts, used_map_targets, log_success, log_error)
self.tx_id = self._parse_option_int(mapping_opts, 0)
@property
def _map_target_dict(self) -> Dict[str, List[str]]:
return {
'uv': targets.UV
}
def _do_mapping(self, packet: DavisConditionsPacket, record: dict):
target = self.targets['uv']
self._set_record_entry(record, target,
packet.get_observation(KEY_UV_INDEX, DataStructureType.ISS, self.tx_id))
class WindChillMapping(AbstractMapping):
def __init__(self, mapping_opts: list, used_map_targets: list, log_success: bool = False, log_error: bool = True):
super().__init__(mapping_opts, used_map_targets, log_success, log_error)
self.tx_id = self._parse_option_int(mapping_opts, 0)
@property
def _map_target_dict(self) -> Dict[str, List[str]]:
return {
'windchill': targets.WINDCHILL
}
def _do_mapping(self, packet: DavisConditionsPacket, record: dict):
target = self.targets['windchill']
self._set_record_entry(record, target,
packet.get_observation(KEY_WIND_CHILL, DataStructureType.ISS, self.tx_id))
class ThwMapping(AbstractMapping):
def __init__(self, mapping_opts: list, used_map_targets: list, log_success: bool = False, log_error: bool = True):
self.is_app_temp = _parse_option_boolean(mapping_opts, 'appTemp')
super().__init__(mapping_opts, used_map_targets, log_success, log_error)
self.tx_id = self._parse_option_int(mapping_opts, 0)
@property
def _map_target_dict(self) -> Dict[str, List[str]]:
target_dict = {
'thw': targets.THW,
'app_temp': targets.APPARENT_TEMPERATURE
} if self.is_app_temp else {
'thw': targets.THW
}
return target_dict
def _do_mapping(self, packet: DavisConditionsPacket, record: dict):
target = self.targets['thw']
self._set_record_entry(record, target,
packet.get_observation(KEY_THW_INDEX, DataStructureType.ISS, self.tx_id))
if self.is_app_temp:
target_app_temp = self.targets['app_temp']
self._set_record_entry(record, target_app_temp,
packet.get_observation(KEY_THW_INDEX, DataStructureType.ISS, self.tx_id))
class ThswMapping(AbstractMapping):
def __init__(self, mapping_opts: list, used_map_targets: list, log_success: bool = False, log_error: bool = True):
self.is_app_temp = _parse_option_boolean(mapping_opts, 'appTemp')
super().__init__(mapping_opts, used_map_targets, log_success, log_error)
self.tx_id = self._parse_option_int(mapping_opts, 0)
@property
def _map_target_dict(self) -> Dict[str, List[str]]:
target_dict = {
'thsw': targets.THSW,
'app_temp': targets.APPARENT_TEMPERATURE
} if self.is_app_temp else {
'thsw': targets.THSW
}
return target_dict
def _do_mapping(self, packet: DavisConditionsPacket, record: dict):
target = self.targets['thsw']
self._set_record_entry(record, target,
packet.get_observation(KEY_THSW_INDEX, DataStructureType.ISS, self.tx_id))
if self.is_app_temp:
target_app_temp = self.targets['app_temp']
self._set_record_entry(record, target_app_temp,
packet.get_observation(KEY_THSW_INDEX, DataStructureType.ISS, self.tx_id))
class SoilTempMapping(AbstractMapping):
def __init__(self, mapping_opts: list, used_map_targets: list, log_success: bool = False, log_error: bool = True):
super().__init__(mapping_opts, used_map_targets, log_success, log_error)
self.tx_id = self._parse_option_int(mapping_opts, 0)
self.sensor = self._parse_option_int(mapping_opts, 1)
@property
def _map_target_dict(self) -> Dict[str, List[str]]:
return {
'soil_temp': targets.SOIL_TEMP
}
def _do_mapping(self, packet: DavisConditionsPacket, record: dict):
target = self.targets['soil_temp']
self._set_record_entry(record, target,
packet.get_observation(KEY_TEMPERATURE_LEAF_SOIL % self.sensor,
DataStructureType.LEAF_SOIL, self.tx_id))
class SoilMoistureMapping(AbstractMapping):
def __init__(self, mapping_opts: list, used_map_targets: list, log_success: bool = False, log_error: bool = True):
super().__init__(mapping_opts, used_map_targets, log_success, log_error)
self.tx_id = self._parse_option_int(mapping_opts, 0)
self.sensor = self._parse_option_int(mapping_opts, 1)
@property
def _map_target_dict(self) -> Dict[str, List[str]]:
return {
'soil_moisture': targets.SOIL_MOISTURE
}
def _do_mapping(self, packet: DavisConditionsPacket, record: dict):
target = self.targets['soil_moisture']
self._set_record_entry(record, target,
packet.get_observation(KEY_SOIL_MOISTURE % self.sensor,
DataStructureType.LEAF_SOIL, self.tx_id))
class LeafWetnessMapping(AbstractMapping):
def __init__(self, mapping_opts: list, used_map_targets: list, log_success: bool = False, log_error: bool = True):
super().__init__(mapping_opts, used_map_targets, log_success, log_error)
self.tx_id = self._parse_option_int(mapping_opts, 0)
self.sensor = self._parse_option_int(mapping_opts, 1)
@property
def _map_target_dict(self) -> Dict[str, List[str]]:
return {
'leaf_wetness': targets.LEAF_WETNESS
}
def _do_mapping(self, packet: DavisConditionsPacket, record: dict):
target = self.targets['leaf_wetness']
self._set_record_entry(record, target,
packet.get_observation(KEY_LEAF_WETNESS % self.sensor,
DataStructureType.LEAF_SOIL, self.tx_id))
class THIndoorMapping(AbstractMapping):
def __init__(self, mapping_opts: list, used_map_targets: list, log_success: bool = False, log_error: bool = True):
super().__init__(mapping_opts, used_map_targets, log_success, log_error)
@property
def _map_target_dict(self) -> Dict[str, List[str]]:
return {
't': targets.INDOOR_TEMP,
'h': targets.INDOOR_HUM,
'dp': targets.INDOOR_DEW_POINT,
'hi': targets.INDOOR_HEAT_INDEX
}
def _do_mapping(self, packet: DavisConditionsPacket, record: dict):
target_t = self.targets['t']
target_h = self.targets['h']
target_dp = self.targets['dp']
target_hi = self.targets['hi']
self._set_record_entry(record, target_t,
packet.get_observation(KEY_TEMPERATURE_INDOOR, DataStructureType.WLL_TH))
self._set_record_entry(record, target_h,
packet.get_observation(KEY_HUMIDITY_INDOOR, DataStructureType.WLL_TH))
self._set_record_entry(record, target_dp,
packet.get_observation(KEY_DEW_POINT_INDOOR, DataStructureType.WLL_TH))
self._set_record_entry(record, target_hi,
packet.get_observation(KEY_HEAT_INDEX_INDOOR, DataStructureType.WLL_TH))
class BaroMapping(AbstractMapping):
def __init__(self, mapping_opts: list, used_map_targets: list, log_success: bool = False, log_error: bool = True):
super().__init__(mapping_opts, used_map_targets, log_success, log_error)
@property
def _map_target_dict(self) -> Dict[str, List[str]]:
return {
'baro_abs': targets.BARO_ABSOLUTE,
'baro_sl': targets.BARO_SEA_LEVEL
}
def _do_mapping(self, packet: DavisConditionsPacket, record: dict):
target_abs = self.targets['baro_abs']
target_sl = self.targets['baro_sl']
self._set_record_entry(record, target_abs,
packet.get_observation(KEY_BARO_ABSOLUTE, DataStructureType.WLL_BARO))
self._set_record_entry(record, target_sl,
packet.get_observation(KEY_BARO_SEA_LEVEL, DataStructureType.WLL_BARO))
class BatteryStatusMapping(AbstractMapping):
def __init__(self, mapping_opts: list, used_map_targets: list, log_success: bool = False, log_error: bool = True):
super().__init__(mapping_opts, used_map_targets, log_success, log_error)
self.tx_id = self._parse_option_int(mapping_opts, 0)
further_opts = mapping_opts[1:]
try:
self.further_targets = [targets.BATTERY_STATUS_NAMED[key] for key in further_opts]
except KeyError as e:
raise KeyError("Invalid battery remap target") from e
@property
def _map_target_dict(self) -> Dict[str, List[str]]:
return {
'battery': targets.BATTERY_STATUS
}
def _do_mapping(self, packet: DavisConditionsPacket, record: dict):
battery_num = self.targets['battery']
self._set_record_entry(record, battery_num,
packet.get_observation(KEY_BATTERY_FLAG, tx=self.tx_id))
for target in self.further_targets:
self._set_record_entry(record, target,
packet.get_observation(KEY_BATTERY_FLAG, tx=self.tx_id))
| 41.414545 | 119 | 0.661998 |
7c3a97080a949eb4e87f5a2e69cd7e08b4cc6120 | 216 | py | Python | maiusculas.py | carlos-moreno/algorithms | 1b202dc853f2e00982de0882a5af498c6cefad31 | [
"MIT"
] | null | null | null | maiusculas.py | carlos-moreno/algorithms | 1b202dc853f2e00982de0882a5af498c6cefad31 | [
"MIT"
] | null | null | null | maiusculas.py | carlos-moreno/algorithms | 1b202dc853f2e00982de0882a5af498c6cefad31 | [
"MIT"
] | null | null | null | def maiusculas(frase):
"""
Retorna as letras em maiúsculo da frase.
"""
result = []
for letra in frase:
if letra.isupper():
result.append(letra)
return ''.join(result)
| 21.6 | 48 | 0.550926 |
4b70834e6f4aa06f73bb356a2a9286f9ae3b5676 | 7,192 | py | Python | experiments/budget/MCTS_bot_budget_50.py | ernestvmo/botbowl | 8b70faf615fc70eb40aa8b3519a7d2339872ea15 | [
"Apache-2.0"
] | 1 | 2022-03-29T09:53:03.000Z | 2022-03-29T09:53:03.000Z | experiments/budget/MCTS_bot_budget_50.py | ernestvmo/botbowl | 8b70faf615fc70eb40aa8b3519a7d2339872ea15 | [
"Apache-2.0"
] | null | null | null | experiments/budget/MCTS_bot_budget_50.py | ernestvmo/botbowl | 8b70faf615fc70eb40aa8b3519a7d2339872ea15 | [
"Apache-2.0"
] | null | null | null | import botbowl
from botbowl.core import Action, Agent
import numpy as np
from copy import deepcopy
import random
import time
from botbowl.core.model import Team
PRINT = False
IGNORE_IN_GAME = [botbowl.ActionType.PLACE_PLAYER, botbowl.ActionType.END_SETUP, botbowl.ActionType.SETUP_FORMATION_SPREAD,
botbowl.ActionType.SETUP_FORMATION_LINE, botbowl.ActionType.SETUP_FORMATION_WEDGE, botbowl.ActionType.SETUP_FORMATION_ZONE]
class Node:
def __init__(self, action=None, parent=None, C=4):
self.parent = parent
self.children = []
self.action = action
self.evaluations = []
self.C = C
self.n_wins = 0
self.n_sims = 0
def UTC(self, root):
if self.n_sims != 0:
return self.n_wins / self.n_sims + self.C * (np.sqrt(np.log(root.n_sims) / self.n_sims))
else:
return float('inf')
def extract_children(self, game: botbowl.Game):
for action_choice in game.get_available_actions():
for player in action_choice.players:
self.children.append(
Node(Action(action_choice.action_type, player=player), parent=self))
for position in action_choice.positions:
self.children.append(
Node(Action(action_choice.action_type, position=position), parent=self))
if len(action_choice.players) == len(action_choice.positions) == 0:
self.children.append(
Node(Action(action_choice.action_type), parent=self))
return self
class SearchBot(botbowl.Agent):
def __init__(self, name, budget=50, time_budget=5, seed=None):
super().__init__(name)
self.my_team = None
self.budget = budget
self.time_budget = time_budget
self.path = []
self.last_action = None
def new_game(self, game, team):
print("NEW GAME woop woop")
self.my_team = team
def end_game(self, game: botbowl.Game):
# game._end_game()
print("END GAME")
pass
def selection(self, node: Node) -> Node:
return node.children[np.argmax([n.UTC(node) for n in node.children])]
def rollout(self, game: botbowl.Game, node: Node):
step_before_rollout = game.get_step()
if PRINT:
print(
f'condition 1: {not game.state.game_over and len(node.children) == 0}')
while not game.state.game_over and len(node.children) == 0:
action = np.random.choice(
node.extract_children(game).children).action
# if True:
# print('---------------->', action)
if action.action_type != botbowl.ActionType.PLACE_PLAYER:
game.step(action)
win = game.get_winner()
if PRINT:
print(f'winner: {win}')
if win == None:
# DRAW -- score is zero
score = -1
elif win == self:
score = 10
else:
score = -5
game.revert(step_before_rollout) # not sure if necessary
return score
def expand(self, game: botbowl.Game, node: Node):
game.step(node.action)
self.path.append(node)
node.extract_children(game=game)
def backpropagate(self, score, node: Node):
for n in range(len(self.path)):
self.path[n].n_sims += 1
self.path[n].n_wins += score
node.n_sims += 1
node.n_wins += score
def act(self, game: botbowl.Game):
game_copy = deepcopy(game)
game_copy.enable_forward_model()
game_copy.home_agent.human = True
game_copy.away_agent.human = True
root_step = game_copy.get_step()
root_node = Node()
available_actions = [
elem.action_type for elem in game_copy.get_available_actions()]
if PRINT:
print(available_actions)
# if we only have one action, return it, no need to choose what the best action can be
# if len(available_actions) == 1:
# return Action(available_actions[0])
# handle placing ball randomly on board
if len(available_actions) == 1:
if available_actions[0] == botbowl.ActionType.PLACE_BALL:
if PRINT:
print(
f'positions: {game_copy.get_available_actions()[0].positions}')
return Action(botbowl.ActionType.PLACE_BALL, position=np.random.choice(game.get_available_actions()[0].positions))
# else:
# print(f'single action is: {available_actions[0]}')
# input()
# handle heads or tail
if botbowl.ActionType.HEADS in available_actions or botbowl.ActionType.TAILS in available_actions:
return np.random.choice([Action(botbowl.ActionType.HEADS), Action(botbowl.ActionType.TAILS)])
# handle kick or receive
if botbowl.ActionType.KICK in available_actions or botbowl.ActionType.RECEIVE in available_actions:
# return np.random.choice([Action(botbowl.ActionType.KICK), Action(botbowl.ActionType.RECEIVE)])
return Action(botbowl.ActionType.KICK) # TODO remove
# handle the action to setup the bot team
if botbowl.ActionType.PLACE_PLAYER in available_actions or botbowl.ActionType.END_SETUP in available_actions or botbowl.ActionType.SETUP_FORMATION_SPREAD in available_actions or botbowl.ActionType.SETUP_FORMATION_WEDGE in available_actions:
available_actions.remove(botbowl.ActionType.PLACE_PLAYER)
for elem in game_copy.get_players_on_pitch(team=self.my_team):
return Action(botbowl.ActionType.END_SETUP)
available_actions.remove(botbowl.ActionType.END_SETUP)
return Action(np.random.choice(available_actions))
if game.get_ball().on_ground and botbowl.ActionType.MOVE in available_actions and self.last_action == botbowl.ActionType.START_MOVE:
return Action(botbowl.ActionType.MOVE, game.get_ball().position,
player=np.random.choice(game.get_players_on_pitch(team=self.my_team)))
root_node.extract_children(game=game_copy)
start = time.time()
for i in range(self.budget):
# while time.time() - start < self.time_budget:
# selection of node
node = self.selection(root_node)
self.path = [root_node]
while True:
if node.n_sims == 0:
score = self.rollout(game=game_copy, node=node)
self.backpropagate(score=score, node=node)
break
else:
self.expand(game=game_copy, node=node)
node = self.selection(node)
# if time.time() - start >= self.time_budget:
# break
game_copy.revert(root_step)
self.last_action = root_node.children[np.argmax(
[n.n_wins for n in root_node.children])].action
return self.last_action
# Register the bot to the framework
botbowl.register_bot('MCTS-bot-budget-50', SearchBot)
| 38.05291 | 248 | 0.616379 |
af75099a3768361c0ee9f9d369ad1d904dbf9b77 | 1,089 | py | Python | asyncio_gevent/async_to_sync.py | steverice/asyncio-gevent | 6978ecaab63b957361bca2b2e6e87703c50fde89 | [
"MIT"
] | null | null | null | asyncio_gevent/async_to_sync.py | steverice/asyncio-gevent | 6978ecaab63b957361bca2b2e6e87703c50fde89 | [
"MIT"
] | null | null | null | asyncio_gevent/async_to_sync.py | steverice/asyncio-gevent | 6978ecaab63b957361bca2b2e6e87703c50fde89 | [
"MIT"
] | null | null | null | from typing import Callable, Optional
from .future_to_greenlet import future_to_greenlet
def async_to_sync(
coroutine: Optional[Callable] = None,
autostart_future: bool = True,
autocancel_future: bool = True,
autokill_greenlet: bool = True,
):
"""
Wrap a coroutine function in a blocking function that spawns a greenlet and blocks until the future is done.
"""
if coroutine is None:
def decorator(coroutine: Callable):
return async_to_sync(
coroutine=coroutine,
autostart_future=autostart_future,
autocancel_future=autocancel_future,
autokill_greenlet=autokill_greenlet,
)
return decorator
def fn(*args, **kwargs):
greenlet = future_to_greenlet(
coroutine(*args, **kwargs),
autostart_future=autostart_future,
autocancel_future=autocancel_future,
autokill_greenlet=autokill_greenlet,
)
greenlet.start()
greenlet.join()
return greenlet.get()
return fn
| 27.923077 | 112 | 0.637282 |
131c3ea26cac6972757e10c993324a9b8338a82a | 1,916 | py | Python | var/spack/repos/builtin/packages/r-hmisc/package.py | xiki-tempula/spack | 9d66c05e93ab8a933fc59915040c0e0c86a4aac4 | [
"ECL-2.0",
"Apache-2.0",
"MIT"
] | 1 | 2021-02-22T18:04:31.000Z | 2021-02-22T18:04:31.000Z | var/spack/repos/builtin/packages/r-hmisc/package.py | xiki-tempula/spack | 9d66c05e93ab8a933fc59915040c0e0c86a4aac4 | [
"ECL-2.0",
"Apache-2.0",
"MIT"
] | 1 | 2021-01-11T09:16:43.000Z | 2021-01-12T20:07:23.000Z | var/spack/repos/builtin/packages/r-hmisc/package.py | xiki-tempula/spack | 9d66c05e93ab8a933fc59915040c0e0c86a4aac4 | [
"ECL-2.0",
"Apache-2.0",
"MIT"
] | 1 | 2020-09-21T14:35:49.000Z | 2020-09-21T14:35:49.000Z | # Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class RHmisc(RPackage):
"""Contains many functions useful for data analysis, high-level
graphics, utility operations, functions for computing sample size
and power, importing and annotating datasets, imputing missing
values, advanced table making, variable clustering, character
string manipulation, conversion of R objects to LaTeX and html
code, and recoding variables."""
homepage = "http://biostat.mc.vanderbilt.edu/Hmisc"
url = "https://cloud.r-project.org/src/contrib/Hmisc_4.1-1.tar.gz"
list_url = "https://cloud.r-project.org/src/contrib/Archive/Hmisc"
version('4.2-0', sha256='9e9614673288dd00295f250fa0bf96fc9e9fed692c69bf97691081c1a01411d9')
version('4.1-1', sha256='991db21cdf73ffbf5b0239a4876b2e76fd243ea33528afd88dc968792f281498')
depends_on('r-lattice', type=('build', 'run'))
depends_on('r-survival@2.40-1:', type=('build', 'run'))
depends_on('r-formula', type=('build', 'run'))
depends_on('r-ggplot2@2.2:', type=('build', 'run'))
depends_on('r-latticeextra', type=('build', 'run'))
depends_on('r-acepack', type=('build', 'run'))
depends_on('r-gridextra', type=('build', 'run'))
depends_on('r-data-table', type=('build', 'run'))
depends_on('r-htmltools', type=('build', 'run'))
depends_on('r-base64enc', type=('build', 'run'))
depends_on('r-htmltable@1.11.0:', type=('build', 'run'))
depends_on('r-viridis', type=('build', 'run'))
depends_on('r-cluster', type=('build', 'run'))
depends_on('r-rpart', type=('build', 'run'))
depends_on('r-nnet', type=('build', 'run'))
depends_on('r-foreign', type=('build', 'run'))
depends_on('r-gtable', type=('build', 'run'))
| 46.731707 | 95 | 0.680585 |
c38fc218d1a667abb31f0aaff1e9851ac311886d | 11,101 | py | Python | sdk/formrecognizer/azure-ai-formrecognizer/tests/test_copy_model.py | ankitarorabit/azure-sdk-for-python | dd90281cbad9400f8080754a5ef2f56791a5a88f | [
"MIT"
] | 3 | 2020-06-23T02:25:27.000Z | 2021-09-07T18:48:11.000Z | sdk/formrecognizer/azure-ai-formrecognizer/tests/test_copy_model.py | ankitarorabit/azure-sdk-for-python | dd90281cbad9400f8080754a5ef2f56791a5a88f | [
"MIT"
] | 510 | 2019-07-17T16:11:19.000Z | 2021-08-02T08:38:32.000Z | sdk/formrecognizer/azure-ai-formrecognizer/tests/test_copy_model.py | ankitarorabit/azure-sdk-for-python | dd90281cbad9400f8080754a5ef2f56791a5a88f | [
"MIT"
] | 5 | 2019-09-04T12:51:37.000Z | 2020-09-16T07:28:40.000Z | # coding=utf-8
# ------------------------------------
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT License.
# ------------------------------------
import pytest
import functools
from azure.core.exceptions import HttpResponseError
from azure.ai.formrecognizer._generated.models import CopyOperationResult
from azure.ai.formrecognizer import CustomFormModelInfo
from azure.ai.formrecognizer import FormTrainingClient
from testcase import FormRecognizerTest
from preparers import GlobalClientPreparer as _GlobalClientPreparer
from preparers import FormRecognizerPreparer
GlobalClientPreparer = functools.partial(_GlobalClientPreparer, FormTrainingClient)
class TestCopyModel(FormRecognizerTest):
@FormRecognizerPreparer()
@GlobalClientPreparer()
def test_copy_model_none_model_id(self, client, formrecognizer_storage_container_sas_url):
with self.assertRaises(ValueError):
client.begin_copy_model(model_id=None, target={})
@FormRecognizerPreparer()
@GlobalClientPreparer()
def test_copy_model_empty_model_id(self, client, formrecognizer_storage_container_sas_url):
with self.assertRaises(ValueError):
client.begin_copy_model(model_id="", target={})
@FormRecognizerPreparer()
@GlobalClientPreparer()
def test_copy_model_successful(self, client, formrecognizer_storage_container_sas_url, formrecognizer_region, formrecognizer_resource_id):
poller = client.begin_training(formrecognizer_storage_container_sas_url, use_training_labels=False)
model = poller.result()
target = client.get_copy_authorization(resource_region=formrecognizer_region, resource_id=formrecognizer_resource_id)
poller = client.begin_copy_model(model.model_id, target=target)
copy = poller.result()
copied_model = client.get_custom_model(copy.model_id)
self.assertEqual(copy.status, "ready")
self.assertIsNotNone(copy.training_started_on)
self.assertIsNotNone(copy.training_completed_on)
self.assertEqual(target["modelId"], copy.model_id)
self.assertNotEqual(target["modelId"], model.model_id)
self.assertIsNotNone(copied_model)
@FormRecognizerPreparer()
@GlobalClientPreparer()
def test_copy_model_fail(self, client, formrecognizer_storage_container_sas_url, formrecognizer_region, formrecognizer_resource_id):
poller = client.begin_training(formrecognizer_storage_container_sas_url, use_training_labels=False)
model = poller.result()
# give an incorrect region
target = client.get_copy_authorization(resource_region="eastus", resource_id=formrecognizer_resource_id)
with pytest.raises(HttpResponseError) as e:
poller = client.begin_copy_model(model.model_id, target=target)
copy = poller.result()
self.assertIsNotNone(e.value.error.code)
self.assertIsNotNone(e.value.error.message)
@FormRecognizerPreparer()
@GlobalClientPreparer()
def test_copy_model_case_insensitive_region(self, client, formrecognizer_storage_container_sas_url, formrecognizer_region, formrecognizer_resource_id):
poller = client.begin_training(formrecognizer_storage_container_sas_url, use_training_labels=False)
model = poller.result()
# give region all uppercase
target = client.get_copy_authorization(resource_region=formrecognizer_region.upper(), resource_id=formrecognizer_resource_id)
poller = client.begin_copy_model(model.model_id, target=target)
copy = poller.result()
self.assertEqual(copy.status, "ready")
self.assertIsNotNone(copy.training_started_on)
self.assertIsNotNone(copy.training_completed_on)
self.assertEqual(target["modelId"], copy.model_id)
self.assertNotEqual(target["modelId"], model.model_id)
@FormRecognizerPreparer()
@GlobalClientPreparer()
def test_copy_model_fail_bad_model_id(self, client, formrecognizer_storage_container_sas_url, formrecognizer_region, formrecognizer_resource_id):
poller = client.begin_training(formrecognizer_storage_container_sas_url, use_training_labels=False)
model = poller.result()
target = client.get_copy_authorization(resource_region=formrecognizer_region, resource_id=formrecognizer_resource_id)
with self.assertRaises(HttpResponseError):
# give bad model_id
poller = client.begin_copy_model("00000000-0000-0000-0000-000000000000", target=target)
copy = poller.result()
@FormRecognizerPreparer()
@GlobalClientPreparer()
def test_copy_model_transform(self, client, formrecognizer_storage_container_sas_url, formrecognizer_region, formrecognizer_resource_id):
poller = client.begin_training(formrecognizer_storage_container_sas_url, use_training_labels=False)
model = poller.result()
target = client.get_copy_authorization(resource_region=formrecognizer_region, resource_id=formrecognizer_resource_id)
raw_response = []
def callback(response, _, headers):
copy_result = client._deserialize(CopyOperationResult, response)
model_info = CustomFormModelInfo._from_generated(copy_result, target["modelId"])
raw_response.append(copy_result)
raw_response.append(model_info)
poller = client.begin_copy_model(model.model_id, target=target, cls=callback)
copy = poller.result()
actual = raw_response[0]
copy = raw_response[1]
self.assertEqual(copy.training_started_on, actual.created_date_time)
self.assertEqual(copy.status, actual.status)
self.assertEqual(copy.training_completed_on, actual.last_updated_date_time)
self.assertEqual(copy.model_id, target["modelId"])
@FormRecognizerPreparer()
@GlobalClientPreparer()
def test_copy_authorization(self, client, formrecognizer_storage_container_sas_url, formrecognizer_region, formrecognizer_resource_id):
target = client.get_copy_authorization(resource_region="eastus", resource_id=formrecognizer_resource_id)
self.assertIsNotNone(target["modelId"])
self.assertIsNotNone(target["accessToken"])
self.assertIsNotNone(target["expirationDateTimeTicks"])
self.assertEqual(target["resourceRegion"], "eastus")
self.assertEqual(target["resourceId"], formrecognizer_resource_id)
@FormRecognizerPreparer()
@GlobalClientPreparer()
@pytest.mark.live_test_only
def test_copy_continuation_token(self, client, formrecognizer_storage_container_sas_url, formrecognizer_region, formrecognizer_resource_id):
poller = client.begin_training(formrecognizer_storage_container_sas_url, use_training_labels=False)
model = poller.result()
target = client.get_copy_authorization(resource_region=formrecognizer_region, resource_id=formrecognizer_resource_id)
initial_poller = client.begin_copy_model(model.model_id, target=target)
cont_token = initial_poller.continuation_token()
poller = client.begin_copy_model(model.model_id, None, continuation_token=cont_token)
result = poller.result()
self.assertIsNotNone(result)
copied_model = client.get_custom_model(result.model_id)
self.assertIsNotNone(copied_model)
initial_poller.wait() # necessary so azure-devtools doesn't throw assertion error
@FormRecognizerPreparer()
@GlobalClientPreparer()
def test_copy_model_with_labeled_model_name(self, client, formrecognizer_storage_container_sas_url, formrecognizer_region, formrecognizer_resource_id):
poller = client.begin_training(formrecognizer_storage_container_sas_url, use_training_labels=True, model_name="mymodel")
model = poller.result()
target = client.get_copy_authorization(resource_region=formrecognizer_region, resource_id=formrecognizer_resource_id)
poller = client.begin_copy_model(model.model_id, target=target)
copy = poller.result()
copied_model = client.get_custom_model(copy.model_id)
self.assertEqual(copy.status, "ready")
self.assertIsNotNone(copy.training_started_on)
self.assertIsNotNone(copy.training_completed_on)
self.assertEqual(target["modelId"], copy.model_id)
self.assertNotEqual(target["modelId"], model.model_id)
self.assertIsNotNone(copied_model)
self.assertEqual(copied_model.model_name, "mymodel")
@FormRecognizerPreparer()
@GlobalClientPreparer()
def test_copy_model_with_unlabeled_model_name(self, client, formrecognizer_storage_container_sas_url, formrecognizer_region, formrecognizer_resource_id):
poller = client.begin_training(formrecognizer_storage_container_sas_url, use_training_labels=False, model_name="mymodel")
model = poller.result()
target = client.get_copy_authorization(resource_region=formrecognizer_region, resource_id=formrecognizer_resource_id)
poller = client.begin_copy_model(model.model_id, target=target)
copy = poller.result()
copied_model = client.get_custom_model(copy.model_id)
self.assertEqual(copy.status, "ready")
self.assertIsNotNone(copy.training_started_on)
self.assertIsNotNone(copy.training_completed_on)
self.assertEqual(target["modelId"], copy.model_id)
self.assertNotEqual(target["modelId"], model.model_id)
self.assertIsNotNone(copied_model)
self.assertEqual(copied_model.model_name, "mymodel")
@FormRecognizerPreparer()
@GlobalClientPreparer()
def test_copy_model_with_composed_model(self, client, formrecognizer_storage_container_sas_url, formrecognizer_region, formrecognizer_resource_id):
poller_1 = client.begin_training(formrecognizer_storage_container_sas_url, use_training_labels=True, model_name="model1")
model_1 = poller_1.result()
poller_2 = client.begin_training(formrecognizer_storage_container_sas_url, use_training_labels=True, model_name="model2")
model_2 = poller_2.result()
composed_poller = client.begin_create_composed_model([model_1.model_id, model_2.model_id], model_name="composedmodel")
composed_model = composed_poller.result()
target = client.get_copy_authorization(resource_region=formrecognizer_region, resource_id=formrecognizer_resource_id)
poller = client.begin_copy_model(composed_model.model_id, target=target)
copy = poller.result()
copied_model = client.get_custom_model(copy.model_id)
self.assertEqual(copy.status, "ready")
self.assertIsNotNone(copy.training_started_on)
self.assertIsNotNone(copy.training_completed_on)
self.assertEqual(target["modelId"], copy.model_id)
self.assertNotEqual(target["modelId"], composed_model.model_id)
self.assertIsNotNone(copied_model)
self.assertEqual(copied_model.model_name, "composedmodel")
for submodel in copied_model.submodels:
assert submodel.model_id in [model_1.model_id, model_2.model_id]
| 47.038136 | 157 | 0.756238 |
b986581a75d6e77342ffb7d23b1f42458660eb4b | 16,025 | py | Python | source/result/file_3_7.py | lintongtong123/JackokiePapers | 65a4be9c9b636c7c3d0c4ab24096572a737480c7 | [
"MIT"
] | null | null | null | source/result/file_3_7.py | lintongtong123/JackokiePapers | 65a4be9c9b636c7c3d0c4ab24096572a737480c7 | [
"MIT"
] | null | null | null | source/result/file_3_7.py | lintongtong123/JackokiePapers | 65a4be9c9b636c7c3d0c4ab24096572a737480c7 | [
"MIT"
] | 1 | 2019-09-24T01:32:44.000Z | 2019-09-24T01:32:44.000Z | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Time : 2018/3/11 15:23
# @Author : Jackokie Zhao
# @Site : www.jackokie.com
# @File : file_3_7.py
# @Software: PyCharm
# @contact: jackokie@gmail.com
import os
import pickle
import numpy as np
import tensorflow as tf
import matplotlib
from sklearn.manifold import TSNE
matplotlib.use('agg')
import matplotlib.pyplot as plt
import matplotlib.cm as cm
from tensorflow.contrib import layers
num_epoch = 200
batch_size = 1024
learning_rate = 0.01
train_ratio = 0.9
log_dir = './log/'
orig_file_path = '/home/scl1/data/jackokie/RML2016.10a_dict.dat'
[height, width] = [2, 128]
num_channels = 1
num_kernel_1 = 64
num_kernel_2 = 32
hidden_units_1 = 32
hidden_units_2 = 16
dropout = 0.5
num_classes = 7
train_show_step = 100
test_show_step = 1000
seed = 'jackokie'
reg_val_l1 = 0.001
reg_val_l2 = 0.001
def load_data(data_path, input_shape):
""" Load the original data for training...
Parameters:
data_path: The original data path.
input_shape:
Returns:
train_data: Training data structured.
"""
# load the original data.
orig_data = pickle.load(open(data_path, 'rb'), encoding='iso-8859-1')
# Get the set of snr & modulations
mode_snr = list(orig_data.keys())
mods, snrs = [sorted(list(set(x[i] for x in mode_snr))) for i in [0, 1]]
mods.remove('AM-DSB')
mods.remove('WBFM')
mods.remove('8PSK')
mods.remove('QAM16')
# Build the train set.
samples = []
labels = []
samples_snr = []
mod2cate = dict()
cate2mod = dict()
for cate in range(len(mods)):
cate2mod[cate] = mods[cate]
mod2cate[mods[cate]] = cate
for snr in snrs:
for mod in mods:
samples.extend(orig_data[(mod, snr)])
labels.extend(1000 * [mod2cate[mod]])
samples_snr.extend(1000 * [snr])
shape = [len(labels), height, width, 1]
samples = np.array(samples).reshape(shape)
samples_snr = np.array(samples_snr)
labels = np.array(labels)
return samples, labels, mod2cate, cate2mod, snrs, mods, samples_snr
def accuracy_compute(predictions, labels):
"""Return the error rate based on dense predictions and sparse labels.
Parameters:
predictions: The prediction logits matrix.
labels: The real labels of prediction data.
Returns:
accuracy: The predictions' accuracy.
"""
with tf.name_scope('test_accuracy'):
accu = 100 * np.sum(np.argmax(predictions, 1) == labels) / predictions.shape[0]
tf.summary.scalar('test_accuracy', accu)
return accu
def conv(data, kernel_shape, activation, name, dropout=1, regularizer=None, reg_val=0):
""" Convolution layer.
Parameters:
data: The input data.
kernel_shape: The kernel_shape of current convolutional layer.
activation: The activation function.
name: The name of current layer.
dropout: Whether do the dropout work.
regularizer: Whether use the L2 or L1 regularizer.
reg_val: regularizer value.
Return:
conv_out: The output of current layer.
"""
if regularizer == 'L1':
regularizer = layers.l1_regularizer(reg_val)
elif regularizer == 'L2':
regularizer = layers.l2_regularizer(reg_val)
with tf.name_scope(name):
# Convolution layer 1.
with tf.variable_scope('conv_weights', regularizer=regularizer):
conv_weights = tf.Variable(
tf.truncated_normal(kernel_shape, stddev=0.1, dtype=tf.float32))
with tf.variable_scope('conv_bias'):
conv_biases = tf.Variable(
tf.constant(0.0, dtype=tf.float32, shape=[kernel_shape[3]]))
with tf.name_scope('conv'):
conv = tf.nn.conv2d(data, conv_weights, strides=[1, 1, 1, 1], padding='SAME')
with tf.name_scope('activation'):
conv_out = activation(tf.nn.bias_add(conv, conv_biases))
with tf.name_scope('dropout'):
conv_out = tf.nn.dropout(conv_out, dropout)
return conv_out
def hidden(data, activation, name, hidden_units, dropout=1, regularizer=None, reg_val=None):
""" Hidden layer.
Parameters:
data: The input data.
activation: The activation function.
name: The layer's name.
hidden_units: Number of hidden_out units.
dropout: Whether do the dropout job.
regularizer: Whether use the L2 or L1 regularizer.
reg_val: regularizer value.
Return:
hidden_out: Output of current layer.
"""
if regularizer == 'L1':
regularizer = layers.l1_regularizer(reg_val)
elif regularizer == 'L2':
regularizer = layers.l2_regularizer(reg_val)
with tf.name_scope(name):
# Fully connected layer 1. Note that the '+' operation automatically.
with tf.variable_scope('fc_weights', regularizer=regularizer):
input_units = int(data.shape[1])
fc_weights = tf.Variable( # fully connected, depth 512.
tf.truncated_normal([input_units, hidden_units],
stddev=0.1, dtype=tf.float32))
with tf.name_scope('fc_bias'):
fc_biases = tf.Variable(
tf.constant(0.0, dtype=tf.float32, shape=[hidden_units]))
with tf.name_scope('activation'):
hidden_out = activation(tf.nn.xw_plus_b(data, fc_weights, fc_biases))
if dropout is not None:
hidden_out = tf.nn.dropout(hidden_out, dropout)
return hidden_out
def cnn_2_model(input_pl, activation=tf.nn.relu, dropout=1):
""" CNN 2 Model in the paper.
Parameters:
input_pl: The input data placeholder.
activation: The activation function.
dropout: Whether use the dholderropout.
Returns:
logits: The model output value for each category.
"""
kernel1 = [1, 5, num_channels, num_kernel_1]
kernel2 = [2, 7, num_kernel_1, num_kernel_2]
conv1 = conv(input_pl, kernel1, activation, 'conv_1', dropout)
# pool = tf.nn.avg_pool(conv1, ksize=[1, 1, 3, 1], strides=[1, 1, 1, 1], padding='SAME')
conv2 = conv(conv1, kernel2, activation, 'conv_2', dropout)
# Reshape the feature map cuboid into a 2D matrix to feed it to the
# fully connected layers.
flatten = tf.reshape(conv2, [batch_size, width * height * num_kernel_2])
hidden_1 = hidden(flatten, activation, 'hidden_1', hidden_units_1, dropout)
logits = hidden(hidden_1, activation, 'hidden_2', num_classes)
return logits, hidden_1
def eval_in_batches(data, sess, eval_prediction, eval_placeholder, keep_prob):
"""Get all predictions for a dataset by running it in small batches.
Parameters:
data: The evaluation data set.
sess: The session with the graph.
eval_prediction: The evaluation operator, which output the logits.
eval_placeholder: The placeholder of evaluation data in the graph.
Returns:
predictions: The eval result of the input data, which has the format
of [size, num_classes]
"""
size = data.shape[0]
if size < batch_size:
raise ValueError("batch size for evals larger than dataset: %d" % size)
predictions = np.ndarray(shape=(size, num_classes), dtype=np.float32)
for begin in range(0, size, batch_size):
end = begin + batch_size
if end <= size:
predictions[begin:end, :] = sess.run(
eval_prediction,
feed_dict={eval_placeholder: data[begin:end, ...],
keep_prob: 1})
else:
batch_predictions = sess.run(
eval_prediction,
feed_dict={eval_placeholder: data[-batch_size:, ...],
keep_prob: 1})
predictions[begin:, :] = batch_predictions[begin - size:, :]
return predictions
def build_data(samples, labels):
""" Build the train and test set.
Parameters:
samples: The whole samples we have.
labels: The samples' labels correspondently.
Returns:
train_data: The train set data.
train_labels: The train data's category labels.
test_data: The test set data.
test_labels: The test data's category labels.
"""
num_samples = len(samples)
indexes = list(range(num_samples))
np.random.shuffle(indexes)
num_train = int(train_ratio * num_samples)
# Get the indexes of train data and test data.
train_indexes = indexes[0:num_train]
test_indexes = indexes[num_train:num_samples]
# Build the train data and test data.
train_data = samples[train_indexes]
train_labels = labels[train_indexes]
test_data = samples[test_indexes]
test_labels = labels[test_indexes]
return train_data, test_data, \
train_labels, test_labels, \
train_indexes, test_indexes
def accuracy_snr(predictions, labels, indexes, snrs, samples_snr):
""" Compute the error rate of difference snr.
Parameters:
predictions:
labels:
indexes:
snrs:
samples_snr:
Returns:
acc_snr
"""
labels = labels.reshape([len(labels), ])
predict_snr = samples_snr[indexes]
acc_snr = dict()
for snr in snrs:
idx = (predict_snr == snr).reshape([len(labels)])
samples_temp = predictions[idx]
labels_temp = labels[idx]
acc_snr[snr] = accuracy_compute(samples_temp, labels_temp)
return acc_snr
def acc_snr_show(snrs, acc_snr, path):
""" Show the train procedure.
Parameters:
sd
Returns:
Hello
"""
# Plot accuracy curve
plt.figure(figsize=[7, 6], dpi=160)
plt.plot(snrs, list(map(lambda x: acc_snr[x], snrs)))
plt.xlabel("信噪比/dB")
plt.ylabel("准确率")
plt.title("不同信噪比下CAE-CNN分类性能")
plt.tight_layout()
plt.savefig(path)
def get_snr_sample(samples, labels, indexes, samples_snr, snr=0):
labels = labels.reshape([len(labels), ])
predict_snr = samples_snr[indexes]
idx = (predict_snr == snr).reshape([len(labels)])
return samples[idx], labels[idx]
def confusion_matrix(predict, labels, num_classes):
""" Show the confusion of predict.
Parameters:
num_classes: The count of different classes.
predict: The predict result of samples.
labels: The real class of the samples.
Returns:
conf_norm: The normalized confusion matrix.
"""
# Compute the count of correct and error samples in each snr.
conf = np.zeros([num_classes, num_classes])
for i in range(0, len(labels)):
j = labels[i]
k = np.argmax(predict[i])
conf[j, k] = conf[j, k] + 1
# Compute the count of correct and error ratio in each snr.
# =====confusion matrix=====.
conf_norm = np.zeros([num_classes, num_classes])
for i in range(0, num_classes):
conf_norm[i, :] = conf[i, :] / np.sum(conf[i, :])
return conf_norm
def plot_confusion_matrix(conf_matrix, labels=[],
title='调制识别混淆矩阵',
cmap=cm.Blues, name=None):
""" Plot the confusion matrix.
Parameter:
conf_matrix:
labels:
title:
cmap:
name:
Returns:
None.
"""
plt.figure(figsize=[7, 6], dpi=160)
plt.imshow(conf_matrix, interpolation='nearest', cmap=cmap, origin='upper')
plt.title(title)
plt.colorbar()
tick_marks = np.arange(len(labels))
plt.xticks(tick_marks, labels, rotation=45)
plt.yticks(tick_marks, labels)
plt.ylabel('True label')
plt.xlabel('Predicted label')
plt.tight_layout()
if name is None:
plt.show()
else:
plt.savefig(name)
def main():
# Define the input data.
input_shape = [batch_size, height, width, num_channels]
# Load the train data and test data.
samples, labels, mod2cate, cate2mod, snrs, mods, samples_snr = \
load_data(orig_file_path, input_shape)
train_data, test_data, \
train_labels, test_labels, \
train_indexes, test_indexes = build_data(samples, labels)
# Define the input placeholder.
train_data_node = tf.placeholder(tf.float32, shape=[None, height, width, num_channels])
train_labels_node = tf.placeholder(tf.int64, shape=[None])
keep_prob = tf.placeholder("float")
# eval_data = tf.placeholder(tf.float32, shape=(batch_size, height, width, num_channels))
# Model.
logits, hid = cnn_2_model(train_data_node, tf.nn.relu, keep_prob)
loss = tf.reduce_mean(tf.nn.sparse_softmax_cross_entropy_with_logits(
labels=train_labels_node, logits=logits))
# Use simple adam for the optimization.
global_step = tf.Variable(0, name='global_step', trainable=False)
optimizer = tf.train.AdamOptimizer().minimize(loss, global_step=global_step)
# Predictions for the current training minibatch.
train_prediction = tf.nn.softmax(logits)
correct_prediction = tf.equal(tf.argmax(train_prediction, 1), train_labels_node)
with tf.name_scope('accuracy'):
accuracy = tf.reduce_mean(tf.cast(correct_prediction, tf.float32))
tf.summary.scalar('value', accuracy)
saver = tf.train.Saver()
config = tf.ConfigProto(gpu_options=tf.GPUOptions(allow_growth=True))
# Create a local session to run the training.
with tf.Session(config=config) as sess:
# Run all the initializers to prepare the trainable parameters.
tf.global_variables_initializer().run()
print('Initialized!')
# Loop through training steps.
num_train = len(train_labels)
max_step_train = int(num_epoch * num_train / batch_size)
for step in range(max_step_train):
# Compute the offset of the current minibatch in the data.
# Note that we could use better randomization across epochs.
offset = (step * batch_size) % (num_train - batch_size)
batch_data = train_data[offset:(offset + batch_size), ...]
batch_labels = train_labels[offset:(offset + batch_size)]
# This dictionary maps the batch data (as a numpy array) to the
# node in the graph it should be fed to.
feed_dict = {train_data_node: batch_data,
train_labels_node: batch_labels,
keep_prob: 0.5}
# Run the optimizer to update weights.
sess.run(optimizer, feed_dict=feed_dict)
# print some extra information once reach the evaluation frequency
if step % train_show_step == 0:
# fetch some extra nodes' data
loss_step, train_accu = \
sess.run([loss, accuracy],
feed_dict=feed_dict)
# eval_acc = accuracy(predictions, batch_labels, 'train_accuracy')
print('Step: %d(epoch %.2f) loss: %.3f, train_accuracy: %.3f%%' %
(step, float(step) * batch_size / num_train, loss_step, train_accu * 100))
if step % test_show_step == 0: # Test the test set.
test_predictions = eval_in_batches(test_data, sess, train_prediction, train_data_node, keep_prob)
print('Test accuracy: %.3f%% ' % accuracy_compute(test_predictions, test_labels))
test_predicts = eval_in_batches(test_data, sess, train_prediction, train_data_node, keep_prob)
# Plot zero dB confusion matrix.
zero_predict, zero_label = get_snr_sample(test_predicts, test_labels, test_indexes, samples_snr, snr=0)
conf_matrix = confusion_matrix(zero_predict, zero_label, num_classes)
final_mods = ['AM-SSB', 'BPSK', 'CPFSK', 'GFSK', 'PAM4', 'QAM64', 'QPSK']
plot_confusion_matrix(conf_matrix, labels=final_mods, cmap=cm.Greys, title='RF分类混淆矩阵', name='./fig_1.png')
if __name__ == '__main__':
if tf.gfile.Exists(log_dir):
tf.gfile.DeleteRecursively(log_dir)
tf.gfile.MakeDirs(log_dir)
main()
| 35.065646 | 117 | 0.641685 |
134a38233fbed1864a2130e993b80235bad8fce8 | 932 | py | Python | py/py_0679_freefarea.py | lcsm29/project-euler | fab794ece5aa7a11fc7c2177f26250f40a5b1447 | [
"MIT"
] | null | null | null | py/py_0679_freefarea.py | lcsm29/project-euler | fab794ece5aa7a11fc7c2177f26250f40a5b1447 | [
"MIT"
] | null | null | null | py/py_0679_freefarea.py | lcsm29/project-euler | fab794ece5aa7a11fc7c2177f26250f40a5b1447 | [
"MIT"
] | null | null | null | # Solution of;
# Project Euler Problem 679: Freefarea
# https://projecteuler.net/problem=679
#
# Let $S$ be the set consisting of the four letters
# $\{\texttt{`A'},\texttt{`E'},\texttt{`F'},\texttt{`R'}\}$. For $n\ge 0$, let
# $S^*(n)$ denote the set of words of length $n$ consisting of letters
# belonging to $S$. We designate the words $\texttt{FREE}, \texttt{FARE},
# \texttt{AREA}, \texttt{REEF}$ as keywords. Let $f(n)$ be the number of words
# in $S^*(n)$ that contains all four keywords exactly once. This first happens
# for $n=9$, and indeed there is a unique 9 lettered word that contain each of
# the keywords once: $\texttt{FREEFAREA}$So, $f(9)=1$. You are also given that
# $f(15)=72863$. Find $f(30)$.
#
# by lcsm29 http://github.com/lcsm29/project-euler
import timed
def dummy(n):
pass
if __name__ == '__main__':
n = 1000
i = 10000
prob_id = 679
timed.caller(dummy, n, i, prob_id)
| 33.285714 | 79 | 0.656652 |
4f7a0cab6d4f0169abeb46234e6f0864d6c7ef54 | 2,169 | py | Python | tests/test_pgbmut.py | intendo/pygbe | 6ac43ebfb3a4d342a380d7938af119cfba587746 | [
"BSD-3-Clause"
] | 36 | 2015-02-17T15:45:23.000Z | 2019-10-28T15:14:23.000Z | tests/test_pgbmut.py | intendo/pygbe | 6ac43ebfb3a4d342a380d7938af119cfba587746 | [
"BSD-3-Clause"
] | 48 | 2016-02-04T22:50:36.000Z | 2019-06-25T17:01:06.000Z | tests/test_pgbmut.py | intendo/pygbe | 6ac43ebfb3a4d342a380d7938af119cfba587746 | [
"BSD-3-Clause"
] | 26 | 2015-05-15T22:14:50.000Z | 2019-02-07T19:00:47.000Z | import os
import pickle
import pytest
import functools
import sys
import atexit
def report_e():
print('PGBmut sensor test could not run using GPU because pycuda were not found. CPU were used instead.')
def report_g():
print('PGBmut sensor test had run using GPU.')
def report_c():
print('PGBmut sensor test had run using CPU.')
from pygbe.main import main
@pytest.mark.parametrize('key', ['total_elements',
'E_solv_kJ',
'E_coul_kcal',
'E_coul_kJ',
'E_solv_kcal'])
def test_PGB_mut_sensor(key, arc):
results = get_results(arc)
with open('pgbmut.pickle', 'rb') as f:
base_results = pickle.load(f)
if arc == 'gpu':
assert abs(base_results[key] - results[key]) / abs(base_results[key]) < 1e-12
elif arc == 'cpu':
assert abs(base_results[key] - results[key]) / abs(base_results[key]) < 1e-6
def test_pgbmut_iterations(arc):
results = get_results(arc)
with open('pgbmut.pickle', 'rb') as f:
base_results = pickle.load(f)
assert base_results['iterations'] == results['iterations']
@functools.lru_cache(6)
def get_results(arc):
if arc == 'gpu':
try:
import pycuda
if sys.stdout != sys.__stdout__:
atexit.register(report_g)
except ImportError:
if sys.stdout != sys.__stdout__:
atexit.register(report_e)
elif arc == 'cpu':
if sys.stdout != sys.__stdout__:
atexit.register(report_c)
print('Generating results for 1PGBmut example...')
if os.getcwd().rsplit('/', 1)[1] == 'tests':
results = main(['','../examples/1PGBmut_sensor'],
log_output=False,
return_results_dict=True)
elif os.getcwd().rsplit('/', 1)[1] == 'pygbe':
results = main(['','./examples/1PGBmut_sensor'],
log_output=False,
return_results_dict=True)
else:
print("Run tests from either the main repo directory or the tests directory")
return results
| 31.897059 | 109 | 0.578608 |
99bce0b5adda970c23bc14d9cae4bedc06d7fd13 | 19,266 | py | Python | tests/unit_tests/test_ef_version.py | momoneko/ef-open | 6b679bc48922988a4a4ddc1872205d1f1800c192 | [
"Apache-2.0"
] | null | null | null | tests/unit_tests/test_ef_version.py | momoneko/ef-open | 6b679bc48922988a4a4ddc1872205d1f1800c192 | [
"Apache-2.0"
] | null | null | null | tests/unit_tests/test_ef_version.py | momoneko/ef-open | 6b679bc48922988a4a4ddc1872205d1f1800c192 | [
"Apache-2.0"
] | 1 | 2020-11-13T13:26:41.000Z | 2020-11-13T13:26:41.000Z | """
Copyright 2016-2017 Ellation, Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import datetime
import os
import StringIO
import unittest
import StringIO
from dateutil.tz import tzutc
from mock import Mock, patch
import context_paths
from botocore.exceptions import ClientError
import ef_version
class TestEFVersion(unittest.TestCase):
"""
TestEFVersion class for ef_version testing.
Setup initializes self in the same manner we initialize ef-context to ensure the appropriate
members are available when testing. This is necessary given the pattern of passing the context
object as a parameter to methods.
"""
def setUp(self):
self.build_number = "000001"
self.commit_hash = "sfasdf10984jhoksfgls89734hd8i4w98sf"
self.env = "test"
self.env_full = "global.testaccount"
self.history = "text"
self.key = "ami-id"
self.location = "https://s3-us-west-2.amazonaws.com/ellation-cx-proto3-static/foo/dist-hash"
self.noprecheck = None
self.parsed_env_full = "global"
self.service = "test-instance"
self.service_name = "test-instance"
self.value = "11111111"
self.mock_version = Mock(name="mocked Version object")
self.service_registry_file = os.path.abspath(os.path.join(os.path.dirname(__file__),
'../test_data/test_service_registry_1.json'))
# Shared or context derived mocks
self.aws_client = Mock(name="mocked aws client")
self.mock_version.value = self.value
self.mock_version.location = self.location
self.service_registry = Mock(name="mocked service registry")
self.service_registry.filespec = self.service_registry_file
self.service_registry.service_record.return_value = {"type": "aws_ec2"}
self.version = Mock(name="mocked version object")
def test_validate_context(self):
"""Verify that a valid instance type returns True"""
self.assertTrue(ef_version.validate_context(self))
def test_validate_context_invalid_key(self):
"""Verify that a invalid key raises an exception"""
self.key = 'ami-i'
with self.assertRaises(SystemExit):
ef_version.validate_context(self)
def test_validate_context_invalid_service(self):
"""Verify that an invalid instance service raises an exception"""
self.service_registry.service_record.return_value = None
with self.assertRaises(SystemExit):
ef_version.validate_context(self)
def test_validate_context_invalid_type(self):
"""Verify that an invalid type raises an exception"""
self.service_registry.service_record.return_value = {"type": "aws_ec"}
with self.assertRaises(SystemExit):
ef_version.validate_context(self)
def test_args_get(self):
"""Test parsing args with all valid values for get"""
args = [self.service, self.key, self.env, "--get", "--sr", "{}".format(self.service_registry_file)]
context = ef_version.handle_args_and_set_context(args)
self.assertEqual(context.env, self.env)
self.assertEqual(context.service_name, self.service_name)
self.assertEqual(context.service_registry.filespec, self.service_registry_file)
def test_args_invalid_env(self):
"""Verify that an invalid environment arg raises an exception"""
args = [self.service, self.key, "invalid_env"]
with self.assertRaises(SystemExit):
ef_version.handle_args_and_set_context(args)
def test_args_get_parse_env_full(self):
"""Test parsing args with all valid values for get using account scoped env"""
args = [self.service, self.key, self.env_full, "--get", "--sr", "{}".format(self.service_registry_file)]
context = ef_version.handle_args_and_set_context(args)
self.assertEqual(context.env, self.parsed_env_full)
self.assertEqual(context.service_name, self.service_name)
self.assertEqual(context.service_registry.filespec, self.service_registry_file)
def test_args_get_force_env_full(self):
"""Test parsing args with all valid values for get and add --env_full flag"""
args = [self.service, self.key, self.env_full, "--get", "--sr", "{}".format(self.service_registry_file), "--force_env_full"]
context = ef_version.handle_args_and_set_context(args)
self.assertEqual(context.env, self.env_full)
self.assertEqual(context.service_name, self.service_name)
self.assertEqual(context.service_registry.filespec, self.service_registry_file)
def test_args_get_force_env_full_env_not_account_scoped(self):
"""Test parsing args with all valid values for get and add --env_full flag"""
args = [self.service, self.key, self.env, "--get", "--sr", "{}".format(self.service_registry_file), "--force_env_full"]
context = ef_version.handle_args_and_set_context(args)
self.assertEqual(context.env, self.env)
self.assertEqual(context.service_name, self.service_name)
self.assertEqual(context.service_registry.filespec, self.service_registry_file)
def test_args_rollback(self):
"""Test parsing args with all valid values for --rollback"""
args = [self.service, self.key, self.env, "--rollback", "--sr", "{}".format(self.service_registry_file)]
context = ef_version.handle_args_and_set_context(args)
self.assertEqual(context.rollback, True)
self.assertEqual(context.env, self.env)
self.assertEqual(context.service_name, self.service_name)
def test_args_rollback_to(self):
"""Test parsing args with all valid values for --rollback-to"""
args = [self.service, self.key, self.env, "--rollback-to", self.value, "--sr", "{}".format(self.service_registry_file)]
context = ef_version.handle_args_and_set_context(args)
self.assertEqual(context.rollback_to, self.value)
self.assertEqual(context.env, self.env)
self.assertEqual(context.service_name, self.service_name)
def test_args_set(self):
"""Test parsing args with all valid values for set"""
args = [self.service, self.key, self.env, "--set", self.value, "--location", self.location, "--build",
self.build_number, "--commit_hash", self.commit_hash, "--sr", "{}".format(self.service_registry_file)]
context = ef_version.handle_args_and_set_context(args)
self.assertEqual(context.build_number, self.build_number)
self.assertEqual(context.commit_hash, self.commit_hash)
self.assertEqual(context.env, self.env)
self.assertEqual(context.location, self.location)
self.assertEqual(context.service_name, self.service_name)
self.assertEqual(context.service_registry.filespec, self.service_registry_file)
self.assertEqual(context.value, self.value)
def test_args_history(self):
"""Test parsing args with all valid values for history"""
args = [self.service, self.key, self.env, "--history", self.history, "--sr", "{}".format(self.service_registry_file)]
context = ef_version.handle_args_and_set_context(args)
self.assertEqual(context.env, self.env)
self.assertEqual(context.history, self.history)
self.assertEqual(context.service_name, self.service_name)
self.assertEqual(context.service_registry.filespec, self.service_registry_file)
@patch('ef_version.isfunction')
def test_noprecheck(self, mock_isfunction):
"""Test precheck resolves the correct precheck method"""
mock_isfunction.return_value = True
self.noprecheck = True
self.assertTrue(ef_version.precheck(self))
mock_isfunction.assert_not_called()
@patch('ef_version.isfunction')
@patch('ef_version.globals')
def test_precheck(self, mock_globals, mock_isfunction):
"""Test precheck returns correct method"""
mock_isfunction.return_value = True
mock_precheck_method = Mock(name='mock precheck method')
mock_precheck_method.return_value = True
mock_globals.return_value = {"precheck_ami_id": mock_precheck_method}
self.assertTrue(ef_version.precheck(self))
mock_precheck_method.assert_called_once()
@patch('ef_version.Version')
@patch('urllib2.urlopen')
def test_precheck_dist_hash(self, mock_urlopen, mock_version_object):
"""Test precheck of dist hash version"""
mock_version_object.return_value = self.mock_version
mock_s3_response = Mock(name='mock s3 response')
mock_s3_response.getcode.return_value = 200
mock_s3_response.read.return_value = self.value
mock_urlopen.return_value = mock_s3_response
self.assertTrue(ef_version.precheck_dist_hash(self))
@patch('ef_version.Version')
@patch('urllib2.urlopen')
def test_precheck_dist_hash_s3_404(self, mock_urlopen, mock_version_object):
"""Test precheck to validate error thrown on a Non-200 response from s3"""
mock_version_object.return_value = self.mock_version
mock_s3_response = Mock(name='mock s3 response')
mock_s3_response.getcode.return_value = 404
mock_urlopen.return_value = mock_s3_response
with self.assertRaises(IOError):
ef_version.precheck_dist_hash(self)
@patch('ef_version.Version')
@patch('urllib2.urlopen')
def test_precheck_dist_hash_urllib_error(self, mock_urlopen, mock_version_object):
"""Test preckek to validate error thrown on url error"""
mock_version_object.return_value = self.mock_version
mock_s3_response = Mock(name='mock s3 response')
mock_urlopen.return_value = mock_s3_response
mock_urlopen.side_effect = IOError
with self.assertRaises(IOError):
ef_version.precheck_dist_hash(self)
@patch('ef_version.Version')
def test_precheck_dist_hash_version_none(self, mock_version_object):
"""Test precheck_dist_hash when current version is none"""
response = {"Error": {"Code": "NoSuchKey"}}
mock_version_object.side_effect = ClientError(response, "Get Object")
self.assertTrue(ef_version.precheck_dist_hash(self))
class TestVersion(unittest.TestCase):
def setUp(self):
version_body = StringIO.StringIO("ami-0f24a2bf2a5fb4090")
version_id = "a8Hwa86edlxkc24HLI_FvCp5J4eJZerK"
last_modified = datetime.datetime(2018, 11, 6, 6, 10, 4, tzinfo=tzutc())
self.metadata_fields = {
'build_number': "43",
'commit_hash': "0f24a2bf2a5fb4090",
'location': "in space",
'modified_by': "random_dude",
'status': "stable",
}
self.version_attrs = {
'last_modified': last_modified.strftime("%Y-%m-%dT%H:%M:%S%Z"),
'value': version_body.getvalue()
}
self.version_attrs.update(self.metadata_fields)
self.object_version = {
"AcceptRanges": "bytes",
"ContentType": "binary/octet-stream",
"LastModified": last_modified,
"ContentLength": 21,
"ContentEncoding": "utf-8",
"VersionId": version_id,
"ETag": "\"8c6a54dc6a1c907f7664f11a7b369d7b\"",
"Metadata": {
"ef-location": self.version_attrs["location"],
"ef-version-status": self.version_attrs["status"],
"ef-buildnumber": self.version_attrs["build_number"],
"ef-commithash": self.version_attrs["commit_hash"],
"ef-modifiedby": self.version_attrs["modified_by"]
},
"Body": version_body
}
def test_version_init(self):
"""
Test that a Version object is build correctly
"""
version = ef_version.Version(self.object_version)
for attr, expected in self.version_attrs.items():
actual = getattr(version, attr)
self.assertEqual(
expected, actual,
msg="{attr}: expecting {expected!r}, got {actual!r}".format(**locals()))
def test_version_init_no_metadata(self):
"""
Test that a Version object is build correctly with missing metadata
"""
self.object_version["Metadata"] = {}
# clear out the metadata fields in version_attrs
for field in self.metadata_fields:
self.version_attrs[field] = ""
version = ef_version.Version(self.object_version)
for attr, expected in self.version_attrs.items():
actual = getattr(version, attr)
self.assertEqual(
expected, actual,
msg="{attr}: expecting {expected!r}, got {actual!r}".format(**locals()))
class TestEFVersionModule(unittest.TestCase):
def setUp(self):
self.versions = map(
ef_version.Version, [
{
u'Body': StringIO.StringIO("ami-0f85b8e7ca0788951"),
u'LastModified': datetime.datetime(2019, 2, 4, 5, 44, 26),
u'VersionId': 'CZmfHynYjwlH92LlOa1Oc7EurAfT_ZaM',
u'Metadata': {
'ef-buildnumber': '258',
'ef-commithash': '338432d7e23e93dcf957e62598800468a17ff6d1',
'ef-location': '',
'ef-modifiedby': 'arn:aws:iam::097710525421:user/ci',
'ef-version-status': 'stable'}},
{
u'Body': StringIO.StringIO("ami-0f85b8e7ca0788951"),
u'LastModified': datetime.datetime(2019, 2, 4, 5, 35, 6),
u'VersionId': '2WndwRGdS.nolumBcURZFZsMLhSKvfYi',
u'Metadata': {
'ef-buildnumber': '258',
'ef-commithash': '338432d7e23e93dcf957e62598800468a17ff6d1',
'ef-location': '',
'ef-modifiedby': 'arn:aws:iam::097710525421:user/ci',
'ef-version-status': 'undefined'}},
{
u'Body': StringIO.StringIO("ami-07106419da94f1568"),
u'LastModified': datetime.datetime(2019, 2, 1, 6, 4, 53),
u'VersionId': 'bYdch7nPOWINnzdYPm8lZ9_r_9LTgyFt',
u'Metadata': {
'ef-buildnumber': '257',
'ef-commithash': '338432d7e23e93dcf957e62598800468a17ff6d1',
'ef-location': '',
'ef-modifiedby': 'arn:aws:iam::097710525421:user/ci',
'ef-version-status': 'stable'}},
{
u'Body': StringIO.StringIO("ami-07106419da94f1568"),
u'LastModified': datetime.datetime(2019, 2, 1, 5, 54, 37),
u'VersionId': 'zgT2aQliuYKqBqbFNme3dl3l_sAzTni8',
u'Metadata': {
'ef-buildnumber': '257',
'ef-commithash': '338432d7e23e93dcf957e62598800468a17ff6d1',
'ef-location': '',
'ef-modifiedby': 'arn:aws:iam::097710525421:user/ci',
'ef-version-status': 'undefined'}},
{
u'Body': StringIO.StringIO("ami-053bd53d8210575aa"),
u'LastModified': datetime.datetime(2019, 1, 31, 5, 44, 16),
u'VersionId': '1ao3Qo4.jj_CZbidXp9oaP4yOpmpq_Se',
u'Metadata': {
'ef-buildnumber': '256',
'ef-commithash': '338432d7e23e93dcf957e62598800468a17ff6d1',
'ef-location': '',
'ef-modifiedby': 'arn:aws:iam::097710525421:user/ci',
'ef-version-status': 'stable'}},
{
u'Body': StringIO.StringIO("ami-053bd53d8210575aa"),
u'LastModified': datetime.datetime(2019, 1, 31, 5, 33, 24),
u'VersionId': 'b0tRbmuz7HsSMzrPaDxwUORqdQMisi9h',
u'Metadata': {
'ef-buildnumber': '256',
'ef-commithash': '338432d7e23e93dcf957e62598800468a17ff6d1',
'ef-location': '',
'ef-modifiedby': 'arn:aws:iam::097710525421:user/ci',
'ef-version-status': 'undefined'}},
])
@patch('ef_version.cmd_set')
@patch('ef_version.get_versions')
def test_cmd_rollback_latest_stable(self, mock_get_versions, mock_cmd_set):
'''Test cmd_rollback to the latest stable version'''
context = Mock(ef_version.EFVersionContext)
context.env = "alpha0"
context.key = "ami-id"
context.limit = 10
context.service_name = "playheads"
context.rollback = True
rollback_stable_target = self.versions[3]
mock_get_versions.return_value = self.versions
ef_version.cmd_rollback(context)
self.assertEqual(context.stable, True)
self.assertEqual(context.value, rollback_stable_target.value)
self.assertEqual(context.build_number, rollback_stable_target.build_number)
self.assertEqual(context.commit_hash, rollback_stable_target.commit_hash)
self.assertEqual(context.location, rollback_stable_target.location)
mock_get_versions.assert_called_with(context)
mock_cmd_set.assert_called_once_with(context)
@patch('ef_version.cmd_set')
@patch('ef_version.get_versions')
def test_cmd_rollback_to_ami(self, get_versions, cmd_set):
'''Test cmd_rollback to a specific ami version'''
ami_id = "ami-abcdefgh12345678"
desired_version = ef_version.Version({
u'Body': StringIO.StringIO(ami_id),
u'LastModified': datetime.datetime(2019, 1, 30, 5, 33, 24),
u'VersionId': 'b0tRbmuz7HsSMzrPaDxwUORqdQMisi9h',
u'Metadata': {
'ef-buildnumber': '256',
'ef-commithash': '338432d7e23e93dcf957e62598800468a17ff6d1',
'ef-location': '',
'ef-modifiedby': 'arn:aws:iam::097710525421:user/ci',
'ef-version-status': 'stable'}
})
context = Mock(ef_version.EFVersionContext)
context.env = "alpha0"
context.key = "ami-id"
context.limit = 10
context.service_name = "playheads"
context.rollback_to = ami_id
# inserting at the end so the code doesn't take the first one
get_versions.return_value = self.versions + [desired_version]
ef_version.cmd_rollback_to(context)
self.assertEqual(context.stable, True)
self.assertEqual(context.value, desired_version.value)
self.assertEqual(context.build_number, desired_version.build_number)
self.assertEqual(context.commit_hash, desired_version.commit_hash)
self.assertEqual(context.location, desired_version.location)
get_versions.assert_called_once_with(context)
cmd_set.assert_called_once_with(context)
@patch('ef_version.cmd_set')
@patch('ef_version.get_versions')
def test_cmd_rollback_to_unknown_ami(self, get_versions, cmd_set):
'''Test cmd_rollback_to fails on missing ami_id in history'''
ami_id = "ami-abcdefgh12345678"
context = Mock(ef_version.EFVersionContext)
context.env = "alpha0"
context.key = "ami-id"
context.limit = 10
context.service_name = "playheads"
context.rollback_to = ami_id
get_versions.return_value = self.versions
with self.assertRaises(SystemExit) as e:
ef_version.cmd_rollback_to(context)
get_versions.assert_called_once_with(context)
cmd_set.assert_not_called()
| 43.294382 | 129 | 0.677307 |
eaec47195410207093b2bee34ffdc05cc3214bb8 | 8,447 | py | Python | softlearning/environments/adapters/softlearning_env.py | abhishekunique/RND-ashwin | f8bcf3c593df2dacc0efba0875533be71ccb5011 | [
"MIT"
] | null | null | null | softlearning/environments/adapters/softlearning_env.py | abhishekunique/RND-ashwin | f8bcf3c593df2dacc0efba0875533be71ccb5011 | [
"MIT"
] | 7 | 2020-09-25T22:41:46.000Z | 2022-03-12T00:37:25.000Z | softlearning/environments/adapters/softlearning_env.py | abhishekunique/RND-ashwin | f8bcf3c593df2dacc0efba0875533be71ccb5011 | [
"MIT"
] | null | null | null | """Implements the SoftlearningEnv that is usable in softlearning algorithms."""
from abc import ABCMeta, abstractmethod
from collections import defaultdict, OrderedDict
import copy
import numpy as np
import tensorflow as tf
from gym import spaces
class SoftlearningEnv(metaclass=ABCMeta):
"""The abstract Softlearning environment class.
It's an abstract class defining the interface an adapter needs to implement
in order to function with softlearning algorithms. It closely follows the
gym.Env, yet that may not be the case in the future.
The main API methods that users of this class need to know are:
step
reset
render
close
seed
And set the following attributes:
action_space: The Space object corresponding to valid actions
observation_space: The Space object corresponding to valid observations
reward_range: A tuple corresponding to the min and max possible rewards
The methods are accessed publicly as "step", "reset", etc.. The
non-underscored versions are wrapper methods to which we may add
functionality over time.
"""
# Set this in SOME subclasses
metadata = {'render.modes': []}
reward_range = (-float('inf'), float('inf'))
spec = None
# Set these in ALL subclasses
action_space = None
observation_space = None
def __init__(self, domain, task, goal_keys=(), *args, **kwargs):
"""Initialize an environment based on domain and task.
Keyword Arguments:
domain --
task --
*args --
**kwargs --
"""
self._domain = domain
self._task = task
self.goal_keys = goal_keys
@property
def observation_space(self):
return self._observation_space
@property
def observation_shape(self):
if not isinstance(self.observation_space, spaces.Dict):
raise NotImplementedError(type(self.observation_space))
return OrderedDict((
(key, tf.TensorShape(space.shape))
for key, space in self.observation_space.spaces.items()
))
@property
def action_space(self, *args, **kwargs):
return self._action_space
@property
def action_shape(self, *args, discrete=False):
action_shape = tf.TensorShape(self.action_space.shape)
if len(action_shape) > 1:
raise NotImplementedError(
"Shape of the action space ({}) is not flat, make sure to"
" check the implemenation.".format(self.action_space))
return action_shape
@abstractmethod
def step(self, action):
"""Run one timestep of the environment's dynamics. When end of
episode is reached, you are responsible for calling `reset()`
to reset this environment's state.
Accepts an action and returns a tuple (observation, reward, done, info).
Args:
action (object): an action provided by the environment
Returns:
observation (object): agent's observation of the current environment
reward (float) : amount of reward returned after previous action
done (boolean): whether the episode has ended, in which case further step() calls will return undefined results
info (dict): contains auxiliary diagnostic information (helpful for debugging, and sometimes learning)
"""
raise NotImplementedError
@abstractmethod
def reset(self):
"""Resets the state of the environment and returns an initial observation.
Returns: observation (object): the initial observation of the
space.
"""
raise NotImplementedError
def _filter_observation(self, observation):
observation = type(observation)([
(name, value)
for name, value in observation.items()
if name in (*self.observation_keys, *self.goal_keys)
])
return observation
@abstractmethod
def render(self, mode='human'):
"""Renders the environment.
The set of supported modes varies per environment. (And some
environments do not support rendering at all.) By convention,
if mode is:
- human: render to the current display or terminal and
return nothing. Usually for human consumption.
- rgb_array: Return an numpy.ndarray with shape (x, y, 3),
representing RGB values for an x-by-y pixel image, suitable
for turning into a video.
- ansi: Return a string (str) or StringIO.StringIO containing a
terminal-style text representation. The text can include newlines
and ANSI escape sequences (e.g. for colors).
Note:
Make sure that your class's metadata 'render.modes' key includes
the list of supported modes. It's recommended to call super()
in implementations to use the functionality of this method.
Args:
mode (str): the mode to render with
close (bool): close all open renderings
Example:
class MyEnv(Env):
metadata = {'render.modes': ['human', 'rgb_array']}
def render(self, mode='human'):
if mode == 'rgb_array':
return np.array(...) # return RGB frame suitable for video
elif mode is 'human':
... # pop up a window and render
else:
super(MyEnv, self).render(mode=mode) # just raise an exception
"""
raise NotImplementedError
def render_rollouts(self, paths):
"""Renders past rollouts of the environment."""
if hasattr(self._env, 'render_rollouts'):
return self._env.render_rollouts(paths)
unwrapped_env = self.unwrapped
if hasattr(unwrapped_env, 'render_rollouts'):
return unwrapped_env.render_rollouts(paths)
@abstractmethod
def seed(self, seed=None):
"""Sets the seed for this env's random number generator(s).
Note:
Some environments use multiple pseudorandom number generators.
We want to capture all such seeds used in order to ensure that
there aren't accidental correlations between multiple generators.
Returns:
list<bigint>: Returns the list of seeds used in this env's random
number generators. The first value in the list should be the
"main" seed, or the value which a reproducer should pass to
'seed'. Often, the main seed equals the provided 'seed', but
this won't be true if seed=None, for example.
"""
pass
def copy(self):
"""Create a deep copy the environment."""
return copy.deepcopy(self)
@property
@abstractmethod
def unwrapped(self):
"""Unwrap this env.
Returns:
gym.Env: The base non-wrapped gym.Env instance
"""
return self._env
def __str__(self):
return '<{type_name}(domain={domain}, task={task}) <{env}>>'.format(
type_name=type(self).__name__,
domain=self._domain,
task=self._task,
env=self._env)
def get_path_infos(self, paths, *args, **kwargs):
"""Aggregate diagnostics from the environment infos.
TODO(hartikainen): Figure out better format for logging general
environment infos.
"""
results = defaultdict(list)
for path in paths:
for info_key, info_values in path.get('infos', {}).items():
info_values = np.array(info_values)
results[info_key + '-first'].append(info_values[0])
results[info_key + '-last'].append(info_values[-1])
results[info_key + '-mean'].append(np.mean(info_values))
results[info_key + '-median'].append(np.median(info_values))
if np.array(info_values).dtype != np.dtype('bool'):
results[info_key + '-range'].append(np.ptp(info_values))
aggregated_results = {}
for key, value in results.items():
aggregated_results[key + '-mean'] = np.mean(value)
return aggregated_results
def __getattr__(self, name):
if name == '_env':
return self.__getattribute__('_env')
return getattr(self._env, name)
| 34.904959 | 123 | 0.623061 |
3dbbed9e49b0f5d602e74d7cc65cc155ec40cff3 | 1,727 | py | Python | theano_ctc/__init__.py | deeplearningtoolkit/theano-ctc | 31660d462ed569093801b69dc6700db0e12b34f7 | [
"BSD-3-Clause"
] | null | null | null | theano_ctc/__init__.py | deeplearningtoolkit/theano-ctc | 31660d462ed569093801b69dc6700db0e12b34f7 | [
"BSD-3-Clause"
] | null | null | null | theano_ctc/__init__.py | deeplearningtoolkit/theano-ctc | 31660d462ed569093801b69dc6700db0e12b34f7 | [
"BSD-3-Clause"
] | null | null | null | import theano
from theano_ctc.cpu_ctc import CpuCtc
import os
from ctypes import cdll
cdll.LoadLibrary(os.path.join(os.environ["CTC_LIB"], "build", "libwarpctc.so"))
def ctc_cost(acts, labels, input_lengths = None):
"""
Given sequences of output layer activations and labels, compute the softmax output at each timestep,
and then compute the CTC cost of each sequence with respect to its corresponding label sequence.
:param acts: Tensor of pre-softmax activations, with shape=[maxInputSeqLen, batchSize, targetN],
where
maxInputSeqLen >= the length of the longest input sequence.
batchSize is the number of sequences being simultaneously computed / trained.
targetN is the number of network outputs (<blank> is always target 0).
:param labels: Matrix of training labels, with shape=[batchSize, maxOutputSeqLen].
Since <blank> is always output 0, labels should be > 0 (targets) or negative (ignored).
maxOutputSeqLen >= the length of the longest target sequence (excluding <blank>s,
which CTC alignment adds). Label values < 0 at any location are ignored,
so [1], [-1, 1, -1], and [-1, -1, 1] are treated the same.
:param input_lengths: Vector of input sequence lengths, with shape=[batchSize].
For sequence s (0 <= s < batchSize), CTC is calculated on acts[0:input_lengths[s], s, :].
If input_lengths is None, then all sequences in the batch are assumed to have length maxInputSeqLen.
:return: Vector of CTC costs, with shape=[batchSize]
"""
# This should be properly integrated into the theano optimization catalog.
# Until then, this forces the choice based on device configuration.
return CpuCtc()(acts, labels, input_lengths)
| 47.972222 | 106 | 0.729589 |
f7d17b69180bd9f32c7a77e9e4c7314b69b02a02 | 16,368 | py | Python | botorch/acquisition/monte_carlo.py | shalijiang/bo | af13f0a38b579ab504f49a01f1ced13532a3ad49 | [
"MIT"
] | 3 | 2020-08-07T17:15:57.000Z | 2021-07-12T11:16:44.000Z | botorch/acquisition/monte_carlo.py | shalijiang/bo | af13f0a38b579ab504f49a01f1ced13532a3ad49 | [
"MIT"
] | 1 | 2021-06-07T08:22:11.000Z | 2021-07-07T23:02:14.000Z | botorch/acquisition/monte_carlo.py | shalijiang/bo | af13f0a38b579ab504f49a01f1ced13532a3ad49 | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
r"""
Batch acquisition functions using the reparameterization trick in combination
with (quasi) Monte-Carlo sampling. See [Rezende2014reparam]_ and
[Wilson2017reparam]_
.. [Rezende2014reparam]
D. J. Rezende, S. Mohamed, and D. Wierstra. Stochastic backpropagation and
approximate inference in deep generative models. ICML 2014.
.. [Wilson2017reparam]
J. T. Wilson, R. Moriconi, F. Hutter, and M. P. Deisenroth.
The reparameterization trick for acquisition functions. ArXiv 2017.
"""
import math
from abc import ABC, abstractmethod
from typing import Optional, Union
import torch
from torch import Tensor
from ..exceptions.errors import UnsupportedError
from ..models.model import Model
from ..sampling.samplers import MCSampler, SobolQMCNormalSampler
from ..utils.transforms import (
concatenate_pending_points,
match_batch_shape,
t_batch_mode_transform,
)
from .acquisition import AcquisitionFunction
from .objective import IdentityMCObjective, MCAcquisitionObjective
from .utils import prune_inferior_points
class MCAcquisitionFunction(AcquisitionFunction, ABC):
r"""Abstract base class for Monte-Carlo based batch acquisition functions."""
def __init__(
self,
model: Model,
sampler: Optional[MCSampler] = None,
objective: Optional[MCAcquisitionObjective] = None,
X_pending: Optional[Tensor] = None,
) -> None:
r"""Constructor for the MCAcquisitionFunction base class.
Args:
model: A fitted model.
sampler: The sampler used to draw base samples. Defaults to
`SobolQMCNormalSampler(num_samples=512, collapse_batch_dims=True)`.
objective: The MCAcquisitionObjective under which the samples are
evaluated. Defaults to `IdentityMCObjective()`.
X_pending: A `m x d`-dim Tensor of `m` design points that have
points that have been submitted for function evaluation
but have not yet been evaluated.
"""
super().__init__(model=model)
if sampler is None:
sampler = SobolQMCNormalSampler(num_samples=512, collapse_batch_dims=True)
self.add_module("sampler", sampler)
if objective is None:
if model.num_outputs != 1:
raise UnsupportedError(
"Must specify an objective when using a multi-output model."
)
objective = IdentityMCObjective()
elif not isinstance(objective, MCAcquisitionObjective):
raise UnsupportedError(
"Only objectives of type MCAcquisitionObjective are supported for "
"MC acquisition functions."
)
self.add_module("objective", objective)
self.set_X_pending(X_pending)
@abstractmethod
def forward(self, X: Tensor) -> Tensor:
r"""Takes in a `(b) x q x d` X Tensor of `(b)` t-batches with `q` `d`-dim
design points each, and returns a one-dimensional Tensor with
`(b)` elements. Should utilize the result of set_X_pending as needed
to account for pending function evaluations.
"""
pass # pragma: no cover
class qExpectedImprovement(MCAcquisitionFunction):
r"""MC-based batch Expected Improvement.
This computes qEI by
(1) sampling the joint posterior over q points
(2) evaluating the improvement over the current best for each sample
(3) maximizing over q
(4) averaging over the samples
`qEI(X) = E(max(max Y - best_f, 0)), Y ~ f(X), where X = (x_1,...,x_q)`
Example:
>>> model = SingleTaskGP(train_X, train_Y)
>>> best_f = train_Y.max()[0]
>>> sampler = SobolQMCNormalSampler(1000)
>>> qEI = qExpectedImprovement(model, best_f, sampler)
>>> qei = qEI(test_X)
"""
def __init__(
self,
model: Model,
best_f: Union[float, Tensor],
sampler: Optional[MCSampler] = None,
objective: Optional[MCAcquisitionObjective] = None,
X_pending: Optional[Tensor] = None,
) -> None:
r"""q-Expected Improvement.
Args:
model: A fitted model.
best_f: The best objective value observed so far (assumed noiseless).
sampler: The sampler used to draw base samples. Defaults to
`SobolQMCNormalSampler(num_samples=500, collapse_batch_dims=True)`
objective: The MCAcquisitionObjective under which the samples are
evaluated. Defaults to `IdentityMCObjective()`.
X_pending: A `m x d`-dim Tensor of `m` design points that have
points that have been submitted for function evaluation
but have not yet been evaluated. Concatenated into X upon
forward call. Copied and set to have no gradient.
"""
super().__init__(
model=model, sampler=sampler, objective=objective, X_pending=X_pending
)
if not torch.is_tensor(best_f):
best_f = torch.tensor(float(best_f))
self.register_buffer("best_f", best_f)
@concatenate_pending_points
@t_batch_mode_transform()
def forward(self, X: Tensor) -> Tensor:
r"""Evaluate qExpectedImprovement on the candidate set `X`.
Args:
X: A `(b) x q x d`-dim Tensor of `(b)` t-batches with `q` `d`-dim
design points each.
Returns:
A `(b)`-dim Tensor of Expected Improvement values at the given
design points `X`.
"""
posterior = self.model.posterior(X)
samples = self.sampler(posterior)
obj = self.objective(samples)
obj = (obj - self.best_f.unsqueeze(-1)).clamp_min(0)
q_ei = obj.max(dim=-1)[0].mean(dim=0)
return q_ei
class qNoisyExpectedImprovement(MCAcquisitionFunction):
r"""MC-based batch Noisy Expected Improvement.
This function does not assume a `best_f` is known (which would require
noiseless observations). Instead, it uses samples from the joint posterior
over the `q` test points and previously observed points. The improvement
over previously observed points is computed for each sample and averaged.
`qNEI(X) = E(max(max Y - max Y_baseline, 0))`, where
`(Y, Y_baseline) ~ f((X, X_baseline)), X = (x_1,...,x_q)`
Example:
>>> model = SingleTaskGP(train_X, train_Y)
>>> sampler = SobolQMCNormalSampler(1000)
>>> qNEI = qNoisyExpectedImprovement(model, train_X, sampler)
>>> qnei = qNEI(test_X)
"""
def __init__(
self,
model: Model,
X_baseline: Tensor,
sampler: Optional[MCSampler] = None,
objective: Optional[MCAcquisitionObjective] = None,
X_pending: Optional[Tensor] = None,
prune_baseline: bool = False,
) -> None:
r"""q-Noisy Expected Improvement.
Args:
model: A fitted model.
X_baseline: A `r x d`-dim Tensor of `r` design points that have
already been observed. These points are considered as the
potential best design point.
sampler: The sampler used to draw base samples. Defaults to
`SobolQMCNormalSampler(num_samples=500, collapse_batch_dims=True)`.
objective: The MCAcquisitionObjective under which the samples are
evaluated. Defaults to `IdentityMCObjective()`.
X_pending: A `m x d`-dim Tensor of `m` design points that have
points that have been submitted for function evaluation
but have not yet been evaluated. Concatenated into X upon
forward call. Copied and set to have no gradient.
prune_baseline: If True, remove points in `X_baseline` that are
highly unlikely to be the best point. This can significantly
improve performance and is generally recommended. In order to
customize pruning parameters, instead manually call
`botorch.acquisition.utils.prune_inferior_points` on `X_baseline`
before instantiating the acquisition function.
"""
super().__init__(
model=model, sampler=sampler, objective=objective, X_pending=X_pending
)
if prune_baseline:
X_baseline = prune_inferior_points(
model=model, X=X_baseline, objective=objective
)
self.register_buffer("X_baseline", X_baseline)
@concatenate_pending_points
@t_batch_mode_transform()
def forward(self, X: Tensor) -> Tensor:
r"""Evaluate qNoisyExpectedImprovement on the candidate set `X`.
Args:
X: A `(b) x q x d`-dim Tensor of `(b)` t-batches with `q` `d`-dim
design points each.
Returns:
A `(b)`-dim Tensor of Noisy Expected Improvement values at the given
design points `X`.
"""
q = X.shape[-2]
X_full = torch.cat([X, match_batch_shape(self.X_baseline, X)], dim=-2)
# TODO (T41248036): Implement more efficient way to compute posterior
# over both training and test points in GPyTorch
posterior = self.model.posterior(X_full)
samples = self.sampler(posterior)
obj = self.objective(samples)
diffs = obj[:, :, :q].max(dim=-1)[0] - obj[:, :, q:].max(dim=-1)[0]
return diffs.clamp_min(0).mean(dim=0)
class qProbabilityOfImprovement(MCAcquisitionFunction):
r"""MC-based batch Probability of Improvement.
Estimates the probability of improvement over the current best observed
value by sampling from the joint posterior distribution of the q-batch.
MC-based estimates of a probability involves taking expectation of an
indicator function; to support auto-differntiation, the indicator is
replaced with a sigmoid function with temperature parameter `tau`.
`qPI(X) = P(max Y >= best_f), Y ~ f(X), X = (x_1,...,x_q)`
Example:
>>> model = SingleTaskGP(train_X, train_Y)
>>> best_f = train_Y.max()[0]
>>> sampler = SobolQMCNormalSampler(1000)
>>> qPI = qProbabilityOfImprovement(model, best_f, sampler)
>>> qpi = qPI(test_X)
"""
def __init__(
self,
model: Model,
best_f: Union[float, Tensor],
sampler: Optional[MCSampler] = None,
objective: Optional[MCAcquisitionObjective] = None,
X_pending: Optional[Tensor] = None,
tau: float = 1e-3,
) -> None:
r"""q-Probability of Improvement.
Args:
model: A fitted model.
best_f: The best objective value observed so far (assumed noiseless).
sampler: The sampler used to draw base samples. Defaults to
`SobolQMCNormalSampler(num_samples=500, collapse_batch_dims=True)`
objective: The MCAcquisitionObjective under which the samples are
evaluated. Defaults to `IdentityMCObjective()`.
X_pending: A `m x d`-dim Tensor of `m` design points that have
points that have been submitted for function evaluation
but have not yet been evaluated. Concatenated into X upon
forward call. Copied and set to have no gradient.
tau: The temperature parameter used in the sigmoid approximation
of the step function. Smaller values yield more accurate
approximations of the function, but result in gradients
estimates with higher variance.
"""
super().__init__(
model=model, sampler=sampler, objective=objective, X_pending=X_pending
)
if not torch.is_tensor(best_f):
best_f = torch.tensor(float(best_f))
self.register_buffer("best_f", best_f)
if not torch.is_tensor(tau):
tau = torch.tensor(float(tau))
self.register_buffer("tau", tau)
@concatenate_pending_points
@t_batch_mode_transform()
def forward(self, X: Tensor) -> Tensor:
r"""Evaluate qProbabilityOfImprovement on the candidate set `X`.
Args:
X: A `(b) x q x d`-dim Tensor of `(b)` t-batches with `q` `d`-dim
design points each.
Returns:
A `(b)`-dim Tensor of Probability of Improvement values at the given
design points `X`.
"""
posterior = self.model.posterior(X)
samples = self.sampler(posterior)
obj = self.objective(samples)
max_obj = obj.max(dim=-1)[0]
val = torch.sigmoid((max_obj - self.best_f) / self.tau).mean(dim=0)
return val
class qSimpleRegret(MCAcquisitionFunction):
r"""MC-based batch Simple Regret.
Samples from the joint posterior over the q-batch and computes the simple
regret.
`qSR(X) = E(max Y), Y ~ f(X), X = (x_1,...,x_q)`
Example:
>>> model = SingleTaskGP(train_X, train_Y)
>>> sampler = SobolQMCNormalSampler(1000)
>>> qSR = qSimpleRegret(model, sampler)
>>> qsr = qSR(test_X)
"""
@concatenate_pending_points
@t_batch_mode_transform()
def forward(self, X: Tensor) -> Tensor:
r"""Evaluate qSimpleRegret on the candidate set `X`.
Args:
X: A `(b) x q x d`-dim Tensor of `(b)` t-batches with `q` `d`-dim
design points each.
Returns:
A `(b)`-dim Tensor of Simple Regret values at the given design
points `X`.
"""
posterior = self.model.posterior(X)
samples = self.sampler(posterior)
obj = self.objective(samples)
val = obj.max(dim=-1)[0].mean(dim=0)
return val
class qUpperConfidenceBound(MCAcquisitionFunction):
r"""MC-based batch Upper Confidence Bound.
Uses a reparameterization to extend UCB to qUCB for q > 1 (See Appendix A
of [Wilson2017reparam].)
`qUCB = E(max(mu + |Y_tilde - mu|))`, where `Y_tilde ~ N(mu, beta pi/2 Sigma)`
and `f(X)` has distribution `N(mu, Sigma)`.
Example:
>>> model = SingleTaskGP(train_X, train_Y)
>>> sampler = SobolQMCNormalSampler(1000)
>>> qUCB = qUpperConfidenceBound(model, 0.1, sampler)
>>> qucb = qUCB(test_X)
"""
def __init__(
self,
model: Model,
beta: float,
sampler: Optional[MCSampler] = None,
objective: Optional[MCAcquisitionObjective] = None,
X_pending: Optional[Tensor] = None,
) -> None:
r"""q-Upper Confidence Bound.
Args:
model: A fitted model.
beta: Controls tradeoff between mean and standard deviation in UCB.
sampler: The sampler used to draw base samples. Defaults to
`SobolQMCNormalSampler(num_samples=500, collapse_batch_dims=True)`
objective: The MCAcquisitionObjective under which the samples are
evaluated. Defaults to `IdentityMCObjective()`.
X_pending: A `m x d`-dim Tensor of `m` design points that have
points that have been submitted for function evaluation
but have not yet been evaluated. Concatenated into X upon
forward call. Copied and set to have no gradient.
"""
super().__init__(
model=model, sampler=sampler, objective=objective, X_pending=X_pending
)
self.beta_prime = math.sqrt(beta * math.pi / 2)
@concatenate_pending_points
@t_batch_mode_transform()
def forward(self, X: Tensor) -> Tensor:
r"""Evaluate qUpperConfidenceBound on the candidate set `X`.
Args:
X: A `(b) x q x d`-dim Tensor of `(b)` t-batches with `q` `d`-dim
design points each.
Returns:
A `(b)`-dim Tensor of Upper Confidence Bound values at the given
design points `X`.
"""
posterior = self.model.posterior(X)
samples = self.sampler(posterior)
obj = self.objective(samples)
mean = obj.mean(dim=0)
ucb_samples = mean + self.beta_prime * (obj - mean).abs()
return ucb_samples.max(dim=-1)[0].mean(dim=0)
| 39.346154 | 86 | 0.627077 |
005d75f97fe749116d2c741874220f8976062e66 | 321 | py | Python | 2_Python Advanced/7_Gui/17_labelframe.py | Arunken/PythonScripts | 702d0a3af7a9be3311f9da0afc5285d453f15484 | [
"Apache-2.0"
] | null | null | null | 2_Python Advanced/7_Gui/17_labelframe.py | Arunken/PythonScripts | 702d0a3af7a9be3311f9da0afc5285d453f15484 | [
"Apache-2.0"
] | 1 | 2021-06-02T00:58:47.000Z | 2021-06-02T00:58:47.000Z | 2_Python Advanced/7_Gui/17_labelframe.py | Arunken/PythonScripts | 702d0a3af7a9be3311f9da0afc5285d453f15484 | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
"""
Created on Fri May 25 09:56:31 2018
@author: SilverDoe
"""
from tkinter import *
root = Tk()
labelframe = LabelFrame(root, text = "This is a LabelFrame")
labelframe.pack(fill = "both", expand = "yes")
left = Label(labelframe, text = "Inside the LabelFrame")
left.pack()
root.mainloop() | 17.833333 | 60 | 0.663551 |
f56417bdf6023afc15ca0a484e8a89a479dd9f08 | 35,478 | py | Python | test/functional/swift_test_client.py | larsbutler/swift | 216d68eaa861b0607f1a05828f757f19cb8e6b64 | [
"Apache-2.0"
] | null | null | null | test/functional/swift_test_client.py | larsbutler/swift | 216d68eaa861b0607f1a05828f757f19cb8e6b64 | [
"Apache-2.0"
] | null | null | null | test/functional/swift_test_client.py | larsbutler/swift | 216d68eaa861b0607f1a05828f757f19cb8e6b64 | [
"Apache-2.0"
] | null | null | null | # Copyright (c) 2010-2012 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import hashlib
import json
import os
import random
import socket
import time
from unittest2 import SkipTest
from xml.dom import minidom
import six
from six.moves import http_client
from six.moves import urllib
from swiftclient import get_auth
from swift.common import constraints
from swift.common.utils import config_true_value
from test import safe_repr
http_client._MAXHEADERS = constraints.MAX_HEADER_COUNT
class AuthenticationFailed(Exception):
pass
class RequestError(Exception):
pass
class ResponseError(Exception):
def __init__(self, response, method=None, path=None):
self.status = response.status
self.reason = response.reason
self.method = method
self.path = path
self.headers = response.getheaders()
for name, value in self.headers:
if name.lower() == 'x-trans-id':
self.txid = value
break
else:
self.txid = None
super(ResponseError, self).__init__()
def __str__(self):
return repr(self)
def __repr__(self):
return '%d: %r (%r %r) txid=%s' % (
self.status, self.reason, self.method, self.path, self.txid)
def listing_empty(method):
for i in range(6):
if len(method()) == 0:
return True
time.sleep(2 ** i)
return False
def listing_items(method):
marker = None
once = True
items = []
while once or items:
for i in items:
yield i
if once or marker:
if marker:
items = method(parms={'marker': marker})
else:
items = method()
if len(items) == 10000:
marker = items[-1]
else:
marker = None
once = False
else:
items = []
class Connection(object):
def __init__(self, config):
for key in 'auth_host auth_port auth_ssl username password'.split():
if key not in config:
raise SkipTest(
"Missing required configuration parameter: %s" % key)
self.auth_host = config['auth_host']
self.auth_port = int(config['auth_port'])
self.auth_ssl = config['auth_ssl'] in ('on', 'true', 'yes', '1')
self.insecure = config_true_value(config.get('insecure', 'false'))
self.auth_prefix = config.get('auth_prefix', '/')
self.auth_version = str(config.get('auth_version', '1'))
self.account = config.get('account')
self.username = config['username']
self.password = config['password']
self.storage_host = None
self.storage_port = None
self.storage_url = None
self.conn_class = None
def get_account(self):
return Account(self, self.account)
def authenticate(self, clone_conn=None):
if clone_conn:
self.conn_class = clone_conn.conn_class
self.storage_host = clone_conn.storage_host
self.storage_url = clone_conn.storage_url
self.storage_port = clone_conn.storage_port
self.storage_token = clone_conn.storage_token
return
if self.auth_version == "1":
auth_path = '%sv1.0' % (self.auth_prefix)
if self.account:
auth_user = '%s:%s' % (self.account, self.username)
else:
auth_user = self.username
else:
auth_user = self.username
auth_path = self.auth_prefix
auth_scheme = 'https://' if self.auth_ssl else 'http://'
auth_netloc = "%s:%d" % (self.auth_host, self.auth_port)
auth_url = auth_scheme + auth_netloc + auth_path
authargs = dict(snet=False, tenant_name=self.account,
auth_version=self.auth_version, os_options={},
insecure=self.insecure)
(storage_url, storage_token) = get_auth(
auth_url, auth_user, self.password, **authargs)
if not (storage_url and storage_token):
raise AuthenticationFailed()
x = storage_url.split('/')
if x[0] == 'http:':
self.conn_class = http_client.HTTPConnection
self.storage_port = 80
elif x[0] == 'https:':
self.conn_class = http_client.HTTPSConnection
self.storage_port = 443
else:
raise ValueError('unexpected protocol %s' % (x[0]))
self.storage_host = x[2].split(':')[0]
if ':' in x[2]:
self.storage_port = int(x[2].split(':')[1])
# Make sure storage_url is a string and not unicode, since
# keystoneclient (called by swiftclient) returns them in
# unicode and this would cause troubles when doing
# no_safe_quote query.
self.storage_url = str('/%s/%s' % (x[3], x[4]))
self.account_name = str(x[4])
self.auth_user = auth_user
# With v2 keystone, storage_token is unicode.
# We want it to be string otherwise this would cause
# troubles when doing query with already encoded
# non ascii characters in its headers.
self.storage_token = str(storage_token)
self.user_acl = '%s:%s' % (self.account, self.username)
self.http_connect()
return self.storage_url, self.storage_token
def cluster_info(self):
"""
Retrieve the data in /info, or {} on 404
"""
status = self.make_request('GET', '/info',
cfg={'absolute_path': True})
if status // 100 == 4:
return {}
if not 200 <= status <= 299:
raise ResponseError(self.response, 'GET', '/info')
return json.loads(self.response.read())
def http_connect(self):
self.connection = self.conn_class(self.storage_host,
port=self.storage_port)
# self.connection.set_debuglevel(3)
def make_path(self, path=None, cfg=None):
if path is None:
path = []
if cfg is None:
cfg = {}
if cfg.get('version_only_path'):
return '/' + self.storage_url.split('/')[1]
if path:
quote = urllib.parse.quote
if cfg.get('no_quote') or cfg.get('no_path_quote'):
quote = lambda x: x
return '%s/%s' % (self.storage_url,
'/'.join([quote(i) for i in path]))
else:
return self.storage_url
def make_headers(self, hdrs, cfg=None):
if cfg is None:
cfg = {}
headers = {}
if not cfg.get('no_auth_token'):
headers['X-Auth-Token'] = self.storage_token
if cfg.get('use_token'):
headers['X-Auth-Token'] = cfg.get('use_token')
if isinstance(hdrs, dict):
headers.update(hdrs)
return headers
def make_request(self, method, path=None, data='', hdrs=None, parms=None,
cfg=None):
if path is None:
path = []
if hdrs is None:
hdrs = {}
if parms is None:
parms = {}
if cfg is None:
cfg = {}
if not cfg.get('absolute_path'):
# Set absolute_path=True to make a request to exactly the given
# path, not storage path + given path. Useful for
# non-account/container/object requests.
path = self.make_path(path, cfg=cfg)
headers = self.make_headers(hdrs, cfg=cfg)
if isinstance(parms, dict) and parms:
quote = urllib.parse.quote
if cfg.get('no_quote') or cfg.get('no_parms_quote'):
quote = lambda x: x
query_args = ['%s=%s' % (quote(x), quote(str(y)))
for (x, y) in parms.items()]
path = '%s?%s' % (path, '&'.join(query_args))
if not cfg.get('no_content_length'):
if cfg.get('set_content_length'):
headers['Content-Length'] = cfg.get('set_content_length')
else:
headers['Content-Length'] = len(data)
def try_request():
self.http_connect()
self.connection.request(method, path, data, headers)
return self.connection.getresponse()
self.response = None
try_count = 0
fail_messages = []
while try_count < 5:
try_count += 1
try:
self.response = try_request()
except http_client.HTTPException as e:
fail_messages.append(safe_repr(e))
continue
if self.response.status == 401:
fail_messages.append("Response 401")
self.authenticate()
continue
elif self.response.status == 503:
fail_messages.append("Response 503")
if try_count != 5:
time.sleep(5)
continue
break
if self.response:
return self.response.status
request = "{method} {path} headers: {headers} data: {data}".format(
method=method, path=path, headers=headers, data=data)
raise RequestError('Unable to complete http request: %s. '
'Attempts: %s, Failures: %s' %
(request, len(fail_messages), fail_messages))
def put_start(self, path, hdrs=None, parms=None, cfg=None, chunked=False):
if hdrs is None:
hdrs = {}
if parms is None:
parms = {}
if cfg is None:
cfg = {}
self.http_connect()
path = self.make_path(path, cfg)
headers = self.make_headers(hdrs, cfg=cfg)
if chunked:
headers['Transfer-Encoding'] = 'chunked'
headers.pop('Content-Length', None)
if isinstance(parms, dict) and parms:
quote = urllib.parse.quote
if cfg.get('no_quote') or cfg.get('no_parms_quote'):
quote = lambda x: x
query_args = ['%s=%s' % (quote(x), quote(str(y)))
for (x, y) in parms.items()]
path = '%s?%s' % (path, '&'.join(query_args))
self.connection = self.conn_class(self.storage_host,
port=self.storage_port)
# self.connection.set_debuglevel(3)
self.connection.putrequest('PUT', path)
for key, value in headers.items():
self.connection.putheader(key, value)
self.connection.endheaders()
def put_data(self, data, chunked=False):
if chunked:
self.connection.send('%x\r\n%s\r\n' % (len(data), data))
else:
self.connection.send(data)
def put_end(self, chunked=False):
if chunked:
self.connection.send('0\r\n\r\n')
self.response = self.connection.getresponse()
self.connection.close()
return self.response.status
class Base(object):
def __str__(self):
return self.name
def header_fields(self, required_fields, optional_fields=None):
if optional_fields is None:
optional_fields = ()
headers = dict(self.conn.response.getheaders())
ret = {}
for field in required_fields:
if field[1] not in headers:
raise ValueError("%s was not found in response header" %
(field[1]))
try:
ret[field[0]] = int(headers[field[1]])
except ValueError:
ret[field[0]] = headers[field[1]]
for field in optional_fields:
if field[1] not in headers:
continue
try:
ret[field[0]] = int(headers[field[1]])
except ValueError:
ret[field[0]] = headers[field[1]]
return ret
class Account(Base):
def __init__(self, conn, name):
self.conn = conn
self.name = str(name)
def update_metadata(self, metadata=None, cfg=None):
if metadata is None:
metadata = {}
if cfg is None:
cfg = {}
headers = dict(("X-Account-Meta-%s" % k, v)
for k, v in metadata.items())
self.conn.make_request('POST', self.path, hdrs=headers, cfg=cfg)
if not 200 <= self.conn.response.status <= 299:
raise ResponseError(self.conn.response, 'POST',
self.conn.make_path(self.path))
return True
def container(self, container_name):
return Container(self.conn, self.name, container_name)
def containers(self, hdrs=None, parms=None, cfg=None):
if hdrs is None:
hdrs = {}
if parms is None:
parms = {}
if cfg is None:
cfg = {}
format_type = parms.get('format', None)
if format_type not in [None, 'json', 'xml']:
raise RequestError('Invalid format: %s' % format_type)
if format_type is None and 'format' in parms:
del parms['format']
status = self.conn.make_request('GET', self.path, hdrs=hdrs,
parms=parms, cfg=cfg)
if status == 200:
if format_type == 'json':
conts = json.loads(self.conn.response.read())
for cont in conts:
cont['name'] = cont['name'].encode('utf-8')
return conts
elif format_type == 'xml':
conts = []
tree = minidom.parseString(self.conn.response.read())
for x in tree.getElementsByTagName('container'):
cont = {}
for key in ['name', 'count', 'bytes']:
cont[key] = x.getElementsByTagName(key)[0].\
childNodes[0].nodeValue
conts.append(cont)
for cont in conts:
cont['name'] = cont['name'].encode('utf-8')
return conts
else:
lines = self.conn.response.read().split('\n')
if lines and not lines[-1]:
lines = lines[:-1]
return lines
elif status == 204:
return []
raise ResponseError(self.conn.response, 'GET',
self.conn.make_path(self.path))
def delete_containers(self):
for c in listing_items(self.containers):
cont = self.container(c)
cont.update_metadata(hdrs={'x-versions-location': ''})
if not cont.delete_recursive():
return False
return listing_empty(self.containers)
def info(self, hdrs=None, parms=None, cfg=None):
if hdrs is None:
hdrs = {}
if parms is None:
parms = {}
if cfg is None:
cfg = {}
if self.conn.make_request('HEAD', self.path, hdrs=hdrs,
parms=parms, cfg=cfg) != 204:
raise ResponseError(self.conn.response, 'HEAD',
self.conn.make_path(self.path))
fields = [['object_count', 'x-account-object-count'],
['container_count', 'x-account-container-count'],
['bytes_used', 'x-account-bytes-used']]
return self.header_fields(fields)
@property
def path(self):
return []
class Container(Base):
# policy_specified is set in __init__.py when tests are being set up.
policy_specified = None
def __init__(self, conn, account, name):
self.conn = conn
self.account = str(account)
self.name = str(name)
def create(self, hdrs=None, parms=None, cfg=None):
if hdrs is None:
hdrs = {}
if parms is None:
parms = {}
if cfg is None:
cfg = {}
if self.policy_specified and 'X-Storage-Policy' not in hdrs:
hdrs['X-Storage-Policy'] = self.policy_specified
return self.conn.make_request('PUT', self.path, hdrs=hdrs,
parms=parms, cfg=cfg) in (201, 202)
def update_metadata(self, hdrs=None, cfg=None):
if hdrs is None:
hdrs = {}
if cfg is None:
cfg = {}
self.conn.make_request('POST', self.path, hdrs=hdrs, cfg=cfg)
if not 200 <= self.conn.response.status <= 299:
raise ResponseError(self.conn.response, 'POST',
self.conn.make_path(self.path))
return True
def delete(self, hdrs=None, parms=None):
if hdrs is None:
hdrs = {}
if parms is None:
parms = {}
return self.conn.make_request('DELETE', self.path, hdrs=hdrs,
parms=parms) == 204
def delete_files(self):
for f in listing_items(self.files):
file_item = self.file(f)
if not file_item.delete():
return False
return listing_empty(self.files)
def delete_recursive(self):
return self.delete_files() and self.delete()
def file(self, file_name):
return File(self.conn, self.account, self.name, file_name)
def files(self, hdrs=None, parms=None, cfg=None):
if hdrs is None:
hdrs = {}
if parms is None:
parms = {}
if cfg is None:
cfg = {}
format_type = parms.get('format', None)
if format_type not in [None, 'json', 'xml']:
raise RequestError('Invalid format: %s' % format_type)
if format_type is None and 'format' in parms:
del parms['format']
status = self.conn.make_request('GET', self.path, hdrs=hdrs,
parms=parms, cfg=cfg)
if status == 200:
if format_type == 'json':
files = json.loads(self.conn.response.read())
for file_item in files:
for key in ('name', 'subdir', 'content_type'):
if key in file_item:
file_item[key] = file_item[key].encode('utf-8')
return files
elif format_type == 'xml':
files = []
tree = minidom.parseString(self.conn.response.read())
container = tree.getElementsByTagName('container')[0]
for x in container.childNodes:
file_item = {}
if x.tagName == 'object':
for key in ['name', 'hash', 'bytes', 'content_type',
'last_modified']:
file_item[key] = x.getElementsByTagName(key)[0].\
childNodes[0].nodeValue
elif x.tagName == 'subdir':
file_item['subdir'] = x.getElementsByTagName(
'name')[0].childNodes[0].nodeValue
else:
raise ValueError('Found unexpected element %s'
% x.tagName)
files.append(file_item)
for file_item in files:
if 'subdir' in file_item:
file_item['subdir'] = file_item['subdir'].\
encode('utf-8')
else:
file_item['name'] = file_item['name'].encode('utf-8')
file_item['content_type'] = file_item['content_type'].\
encode('utf-8')
file_item['bytes'] = int(file_item['bytes'])
return files
else:
content = self.conn.response.read()
if content:
lines = content.split('\n')
if lines and not lines[-1]:
lines = lines[:-1]
return lines
else:
return []
elif status == 204:
return []
raise ResponseError(self.conn.response, 'GET',
self.conn.make_path(self.path))
def info(self, hdrs=None, parms=None, cfg=None):
if hdrs is None:
hdrs = {}
if parms is None:
parms = {}
if cfg is None:
cfg = {}
self.conn.make_request('HEAD', self.path, hdrs=hdrs,
parms=parms, cfg=cfg)
if self.conn.response.status == 204:
required_fields = [['bytes_used', 'x-container-bytes-used'],
['object_count', 'x-container-object-count'],
['last_modified', 'last-modified']]
optional_fields = [
['versions', 'x-versions-location'],
['tempurl_key', 'x-container-meta-temp-url-key'],
['tempurl_key2', 'x-container-meta-temp-url-key-2']]
return self.header_fields(required_fields, optional_fields)
raise ResponseError(self.conn.response, 'HEAD',
self.conn.make_path(self.path))
@property
def path(self):
return [self.name]
class File(Base):
def __init__(self, conn, account, container, name):
self.conn = conn
self.account = str(account)
self.container = str(container)
self.name = str(name)
self.chunked_write_in_progress = False
self.content_type = None
self.content_range = None
self.size = None
self.metadata = {}
def make_headers(self, cfg=None):
if cfg is None:
cfg = {}
headers = {}
if not cfg.get('no_content_length'):
if cfg.get('set_content_length'):
headers['Content-Length'] = cfg.get('set_content_length')
elif self.size:
headers['Content-Length'] = self.size
else:
headers['Content-Length'] = 0
if cfg.get('use_token'):
headers['X-Auth-Token'] = cfg.get('use_token')
if cfg.get('no_content_type'):
pass
elif self.content_type:
headers['Content-Type'] = self.content_type
else:
headers['Content-Type'] = 'application/octet-stream'
for key in self.metadata:
headers['X-Object-Meta-' + key] = self.metadata[key]
return headers
@classmethod
def compute_md5sum(cls, data):
block_size = 4096
if isinstance(data, str):
data = six.StringIO(data)
checksum = hashlib.md5()
buff = data.read(block_size)
while buff:
checksum.update(buff)
buff = data.read(block_size)
data.seek(0)
return checksum.hexdigest()
def copy(self, dest_cont, dest_file, hdrs=None, parms=None, cfg=None):
if hdrs is None:
hdrs = {}
if parms is None:
parms = {}
if cfg is None:
cfg = {}
if 'destination' in cfg:
headers = {'Destination': cfg['destination']}
elif cfg.get('no_destination'):
headers = {}
else:
headers = {'Destination': '%s/%s' % (dest_cont, dest_file)}
headers.update(hdrs)
if 'Destination' in headers:
headers['Destination'] = urllib.parse.quote(headers['Destination'])
return self.conn.make_request('COPY', self.path, hdrs=headers,
parms=parms) == 201
def copy_account(self, dest_account, dest_cont, dest_file,
hdrs=None, parms=None, cfg=None):
if hdrs is None:
hdrs = {}
if parms is None:
parms = {}
if cfg is None:
cfg = {}
if 'destination' in cfg:
headers = {'Destination': cfg['destination']}
elif cfg.get('no_destination'):
headers = {}
else:
headers = {'Destination-Account': dest_account,
'Destination': '%s/%s' % (dest_cont, dest_file)}
headers.update(hdrs)
if 'Destination-Account' in headers:
headers['Destination-Account'] = \
urllib.parse.quote(headers['Destination-Account'])
if 'Destination' in headers:
headers['Destination'] = urllib.parse.quote(headers['Destination'])
return self.conn.make_request('COPY', self.path, hdrs=headers,
parms=parms) == 201
def delete(self, hdrs=None, parms=None, cfg=None):
if hdrs is None:
hdrs = {}
if parms is None:
parms = {}
if self.conn.make_request('DELETE', self.path, hdrs=hdrs,
cfg=cfg, parms=parms) != 204:
raise ResponseError(self.conn.response, 'DELETE',
self.conn.make_path(self.path))
return True
def info(self, hdrs=None, parms=None, cfg=None):
if hdrs is None:
hdrs = {}
if parms is None:
parms = {}
if cfg is None:
cfg = {}
if self.conn.make_request('HEAD', self.path, hdrs=hdrs,
parms=parms, cfg=cfg) != 200:
raise ResponseError(self.conn.response, 'HEAD',
self.conn.make_path(self.path))
fields = [['content_length', 'content-length'],
['content_type', 'content-type'],
['last_modified', 'last-modified'],
['etag', 'etag']]
optional_fields = [['x_object_manifest', 'x-object-manifest']]
header_fields = self.header_fields(fields,
optional_fields=optional_fields)
header_fields['etag'] = header_fields['etag'].strip('"')
return header_fields
def initialize(self, hdrs=None, parms=None):
if hdrs is None:
hdrs = {}
if parms is None:
parms = {}
if not self.name:
return False
status = self.conn.make_request('HEAD', self.path, hdrs=hdrs,
parms=parms)
if status == 404:
return False
elif (status < 200) or (status > 299):
raise ResponseError(self.conn.response, 'HEAD',
self.conn.make_path(self.path))
for hdr in self.conn.response.getheaders():
if hdr[0].lower() == 'content-type':
self.content_type = hdr[1]
if hdr[0].lower().startswith('x-object-meta-'):
self.metadata[hdr[0][14:]] = hdr[1]
if hdr[0].lower() == 'etag':
self.etag = hdr[1].strip('"')
if hdr[0].lower() == 'content-length':
self.size = int(hdr[1])
if hdr[0].lower() == 'last-modified':
self.last_modified = hdr[1]
return True
def load_from_filename(self, filename, callback=None):
fobj = open(filename, 'rb')
self.write(fobj, callback=callback)
fobj.close()
@property
def path(self):
return [self.container, self.name]
@classmethod
def random_data(cls, size=None):
if size is None:
size = random.randint(1, 32768)
fd = open('/dev/urandom', 'r')
data = fd.read(size)
fd.close()
return data
def read(self, size=-1, offset=0, hdrs=None, buffer=None,
callback=None, cfg=None, parms=None):
if cfg is None:
cfg = {}
if parms is None:
parms = {}
if size > 0:
range_string = 'bytes=%d-%d' % (offset, (offset + size) - 1)
if hdrs:
hdrs['Range'] = range_string
else:
hdrs = {'Range': range_string}
status = self.conn.make_request('GET', self.path, hdrs=hdrs,
cfg=cfg, parms=parms)
if (status < 200) or (status > 299):
raise ResponseError(self.conn.response, 'GET',
self.conn.make_path(self.path))
for hdr in self.conn.response.getheaders():
if hdr[0].lower() == 'content-type':
self.content_type = hdr[1]
if hdr[0].lower() == 'content-range':
self.content_range = hdr[1]
if hasattr(buffer, 'write'):
scratch = self.conn.response.read(8192)
transferred = 0
while len(scratch) > 0:
buffer.write(scratch)
transferred += len(scratch)
if callable(callback):
callback(transferred, self.size)
scratch = self.conn.response.read(8192)
return None
else:
return self.conn.response.read()
def read_md5(self):
status = self.conn.make_request('GET', self.path)
if (status < 200) or (status > 299):
raise ResponseError(self.conn.response, 'GET',
self.conn.make_path(self.path))
checksum = hashlib.md5()
scratch = self.conn.response.read(8192)
while len(scratch) > 0:
checksum.update(scratch)
scratch = self.conn.response.read(8192)
return checksum.hexdigest()
def save_to_filename(self, filename, callback=None):
try:
fobj = open(filename, 'wb')
self.read(buffer=fobj, callback=callback)
finally:
fobj.close()
def sync_metadata(self, metadata=None, cfg=None, parms=None):
if metadata is None:
metadata = {}
if cfg is None:
cfg = {}
self.metadata.update(metadata)
if self.metadata:
headers = self.make_headers(cfg=cfg)
if not cfg.get('no_content_length'):
if cfg.get('set_content_length'):
headers['Content-Length'] = \
cfg.get('set_content_length')
else:
headers['Content-Length'] = 0
self.conn.make_request('POST', self.path, hdrs=headers,
parms=parms, cfg=cfg)
if self.conn.response.status not in (201, 202):
raise ResponseError(self.conn.response, 'POST',
self.conn.make_path(self.path))
return True
def chunked_write(self, data=None, hdrs=None, parms=None, cfg=None):
if hdrs is None:
hdrs = {}
if parms is None:
parms = {}
if cfg is None:
cfg = {}
if data is not None and self.chunked_write_in_progress:
self.conn.put_data(data, True)
elif data is not None:
self.chunked_write_in_progress = True
headers = self.make_headers(cfg=cfg)
headers.update(hdrs)
self.conn.put_start(self.path, hdrs=headers, parms=parms,
cfg=cfg, chunked=True)
self.conn.put_data(data, True)
elif self.chunked_write_in_progress:
self.chunked_write_in_progress = False
return self.conn.put_end(True) == 201
else:
raise RuntimeError
def write(self, data='', hdrs=None, parms=None, callback=None, cfg=None,
return_resp=False):
if hdrs is None:
hdrs = {}
if parms is None:
parms = {}
if cfg is None:
cfg = {}
block_size = 2 ** 20
if isinstance(data, file):
try:
data.flush()
data.seek(0)
except IOError:
pass
self.size = int(os.fstat(data.fileno())[6])
else:
data = six.StringIO(data)
self.size = data.len
headers = self.make_headers(cfg=cfg)
headers.update(hdrs)
self.conn.put_start(self.path, hdrs=headers, parms=parms, cfg=cfg)
transferred = 0
buff = data.read(block_size)
buff_len = len(buff)
try:
while buff_len > 0:
self.conn.put_data(buff)
transferred += buff_len
if callable(callback):
callback(transferred, self.size)
buff = data.read(block_size)
buff_len = len(buff)
self.conn.put_end()
except socket.timeout as err:
raise err
if (self.conn.response.status < 200) or \
(self.conn.response.status > 299):
raise ResponseError(self.conn.response, 'PUT',
self.conn.make_path(self.path))
try:
data.seek(0)
except IOError:
pass
self.md5 = self.compute_md5sum(data)
if return_resp:
return self.conn.response
return True
def write_random(self, size=None, hdrs=None, parms=None, cfg=None):
if hdrs is None:
hdrs = {}
if parms is None:
parms = {}
if cfg is None:
cfg = {}
data = self.random_data(size)
if not self.write(data, hdrs=hdrs, parms=parms, cfg=cfg):
raise ResponseError(self.conn.response, 'PUT',
self.conn.make_path(self.path))
self.md5 = self.compute_md5sum(six.StringIO(data))
return data
def write_random_return_resp(self, size=None, hdrs=None, parms=None,
cfg=None):
if hdrs is None:
hdrs = {}
if parms is None:
parms = {}
if cfg is None:
cfg = {}
data = self.random_data(size)
resp = self.write(data, hdrs=hdrs, parms=parms, cfg=cfg,
return_resp=True)
if not resp:
raise ResponseError(self.conn.response)
self.md5 = self.compute_md5sum(six.StringIO(data))
return resp
def post(self, hdrs=None, parms=None, cfg=None, return_resp=False):
if hdrs is None:
hdrs = {}
if parms is None:
parms = {}
if cfg is None:
cfg = {}
headers = self.make_headers(cfg=cfg)
headers.update(hdrs)
self.conn.make_request('POST', self.path, hdrs=headers,
parms=parms, cfg=cfg)
if self.conn.response.status not in (201, 202):
raise ResponseError(self.conn.response, 'POST',
self.conn.make_path(self.path))
if return_resp:
return self.conn.response
return True
| 33.438266 | 79 | 0.522493 |
d05d3df2cd3f63a844ce605a658bf818c64a69b9 | 2,846 | py | Python | MNIST/TF_handwriting_convolutional.py | vais-ral/CCPi-ML | ca9baeb0dd5db3a97ac8ab9e33e03aeae42ebfa4 | [
"Apache-2.0"
] | null | null | null | MNIST/TF_handwriting_convolutional.py | vais-ral/CCPi-ML | ca9baeb0dd5db3a97ac8ab9e33e03aeae42ebfa4 | [
"Apache-2.0"
] | null | null | null | MNIST/TF_handwriting_convolutional.py | vais-ral/CCPi-ML | ca9baeb0dd5db3a97ac8ab9e33e03aeae42ebfa4 | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
"""
Created on Fri Jul 13 10:57:52 2018
@author: zyv57124
"""
import scipy.io as sio
import tensorflow as tf
from tensorflow import keras
import numpy as np
import matplotlib
import matplotlib.pyplot as plt
from keras.models import Sequential
from keras.layers.convolutional import Conv2D
from keras.layers.convolutional import MaxPooling2D
from keras.layers.core import Activation
from keras.layers.core import Flatten
from keras.layers.core import Dense
from keras import backend as K
from keras.layers.normalization import BatchNormalization
from keras.layers.core import Dropout
from keras.preprocessing.image import ImageDataGenerator
from keras.optimizers import Adam
from keras.preprocessing.image import img_to_array
from sklearn.preprocessing import LabelBinarizer
from sklearn.model_selection import train_test_split
import matplotlib.pyplot as plt
import numpy as np
import argparse
import random
import pickle
#Load data ------------------------------------------------------
def loadMATData(file1):
return sio.loadmat(file1)
#Load Data-------------------------------------------------------
data = loadMATData('ex3data1.mat')
features = data['X']
labels = data['y']
filter = labels ==10
labels[filter] = 0
#shuffle data---------------------------------------------------
ran = np.arange(features.shape[0])
np.random.shuffle(ran)
features = features[ran]
labels = labels[ran]
training_features = features[:3500]
training_labels = labels[:3500]
test_features = features[3501:]
test_labels = labels[3501:]
#TF Neaural Network Builder--------------------------------------
height, width, depth = 20, 20, 1
training_features = training_features.astype('float32')
test_features = test_features.astype('float32')
test_labels = test_labels.astype('float32')
batch_size =32
num_epochs = 200
kernel_size = 3
pool_size = 2
conv_depth_1 = 32
conv_depth_2 = 64
drop_prob_1 = 0.25
drop_prob_2 = 0.5
hidden_size = 512
n_images = 5000
#
#input_shapes = np.array([20, 20, 1])
#input_shapes = input_shapes.reshape(n_images, 20, 20, 1)
print(training_features.shape)
training_features = training_features.reshape(training_features.shape[0],20, 20, 1)
print(training_features.shape)
model = Sequential()
model.add(Conv2D(batch_size, (7, 7), padding="SAME", input_shape = (20,20,1)))
model.add(Flatten())
model.add(Dense(10, activation ='softmax'))
model.compile(optimizer=tf.train.AdamOptimizer(), loss='sparse_categorical_crossentropy', metrics=['accuracy'])
model.fit(training_features , training_labels, epochs=15)
test_features = test_features.reshape(test_features.shape[0],20, 20, 1)
predictions = model.predict(test_features)
count = 0
for i in range(0, len(test_labels)):
pred = (np.argmax(predictions[i]))
if test_labels[i][0] == pred:
count +=1
print("Correct predictions: ", count) | 28.747475 | 111 | 0.721363 |
5d9b07f23d659b00864ced60f8279c3befaf99ca | 3,400 | py | Python | examples/expectations/column_aggregate_expectation_template.py | afeld/great_expectations | ca2dc1f8951c727040d680b543aee91753c2c862 | [
"Apache-2.0"
] | null | null | null | examples/expectations/column_aggregate_expectation_template.py | afeld/great_expectations | ca2dc1f8951c727040d680b543aee91753c2c862 | [
"Apache-2.0"
] | null | null | null | examples/expectations/column_aggregate_expectation_template.py | afeld/great_expectations | ca2dc1f8951c727040d680b543aee91753c2c862 | [
"Apache-2.0"
] | null | null | null | """
This is a template for creating custom ColumnExpectations.
For detailed instructions on how to use it, please see:
https://docs.greatexpectations.io/docs/guides/expectations/creating_custom_expectations/how_to_create_custom_column_aggregate_expectations
"""
from typing import Dict
from great_expectations.core import ExpectationConfiguration
from great_expectations.execution_engine import (
ExecutionEngine,
PandasExecutionEngine,
SparkDFExecutionEngine,
SqlAlchemyExecutionEngine,
)
from great_expectations.expectations.expectation import ColumnExpectation
from great_expectations.expectations.metrics import (
ColumnAggregateMetricProvider,
column_aggregate_partial,
column_aggregate_value,
)
# This class defines a Metric to support your Expectation.
# For most ColumnExpectations, the main business logic for calculation will live in this class.
class ColumnAggregateMatchesSomeCriteria(ColumnAggregateMetricProvider):
# This is the id string that will be used to reference your Metric.
metric_name = "METRIC NAME GOES HERE"
# This method implements the core logic for the PandasExecutionEngine
@column_aggregate_value(engine=PandasExecutionEngine)
def _pandas(cls, column, **kwargs):
raise NotImplementedError
# This method defines the business logic for evaluating your Metric when using a SqlAlchemyExecutionEngine
# @column_aggregate_partial(engine=SqlAlchemyExecutionEngine)
# def _sqlalchemy(cls, column, _dialect, **kwargs):
# raise NotImplementedError
#
# This method defines the business logic for evaluating your Metric when using a SparkDFExecutionEngine
# @column_aggregate_partial(engine=SparkDFExecutionEngine)
# def _spark(cls, column, **kwargs):
# raise NotImplementedError
# This class defines the Expectation itself
class ExpectColumnAggregateToMatchSomeCriteria(ColumnExpectation):
"""TODO: add a docstring here"""
# These examples will be shown in the public gallery.
# They will also be executed as unit tests for your Expectation.
examples = []
# This is a tuple consisting of all Metrics necessary to evaluate the Expectation.
metric_dependencies = ("METRIC NAME GOES HERE",)
# This a tuple of parameter names that can affect whether the Expectation evaluates to True or False.
success_keys = ("min_value", "strict_min", "max_value", "strict_max")
# This dictionary contains default values for any parameters that should have default values.
default_kwarg_values = {}
# This method performs a validation of your metrics against your success keys, returning a dict indicating the success or failure of the Expectation.
def _validate(
self,
configuration: ExpectationConfiguration,
metrics: Dict,
runtime_configuration: dict = None,
execution_engine: ExecutionEngine = None,
):
raise NotImplementedError
# This object contains metadata for display in the public Gallery
library_metadata = {
"tags": [], # Tags for this Expectation in the Gallery
"contributors": [ # Github handles for all contributors to this Expectation.
"@your_name_here", # Don't forget to add your github handle here!
],
}
if __name__ == "__main__":
ExpectColumnAggregateToMatchSomeCriteria().print_diagnostic_checklist()
| 40 | 153 | 0.756176 |
b7a5b4e7e027ddc784ef714a76cd98a9f6074bbd | 3,330 | py | Python | code/tools/external/python/pygccxml/declarations/type_visitor.py | jgresula/jagpdf | 6c36958b109e6522e6b57d04144dd83c024778eb | [
"MIT"
] | 54 | 2015-02-16T14:25:16.000Z | 2022-03-16T07:54:25.000Z | code/tools/external/python/pygccxml/declarations/type_visitor.py | jgresula/jagpdf | 6c36958b109e6522e6b57d04144dd83c024778eb | [
"MIT"
] | null | null | null | code/tools/external/python/pygccxml/declarations/type_visitor.py | jgresula/jagpdf | 6c36958b109e6522e6b57d04144dd83c024778eb | [
"MIT"
] | 30 | 2015-03-05T08:52:25.000Z | 2022-02-17T13:49:15.000Z | # Copyright 2004 Roman Yakovenko.
# Distributed under the Boost Software License, Version 1.0. (See
# accompanying file LICENSE_1_0.txt or copy at
# http://www.boost.org/LICENSE_1_0.txt)
"""
defines types visitor class interface
"""
class type_visitor_t(object):
"""
types visitor interface
All functions within this class should be redefined in derived classes.
"""
def __init__(self):
object.__init__(self)
def visit_void( self ):
raise NotImplementedError()
def visit_char( self ):
raise NotImplementedError()
def visit_unsigned_char( self ):
raise NotImplementedError()
def visit_signed_char( self ):
raise NotImplementedError()
def visit_wchar( self ):
raise NotImplementedError()
def visit_short_int( self ):
raise NotImplementedError()
def visit_short_unsigned_int( self ):
raise NotImplementedError()
def visit_bool( self ):
raise NotImplementedError()
def visit_int( self ):
raise NotImplementedError()
def visit_unsigned_int( self ):
raise NotImplementedError()
def visit_long_int( self ):
raise NotImplementedError()
def visit_long_unsigned_int( self ):
raise NotImplementedError()
def visit_long_long_int( self ):
raise NotImplementedError()
def visit_long_long_unsigned_int( self ):
raise NotImplementedError()
def visit_float( self ):
raise NotImplementedError()
def visit_double( self ):
raise NotImplementedError()
def visit_long_double( self ):
raise NotImplementedError()
def visit_complex_long_double(self):
raise NotImplementedError()
def visit_complex_double(self):
raise NotImplementedError()
def visit_complex_float(self):
raise NotImplementedError()
def visit_jbyte(self):
raise NotImplementedError()
def visit_jshort(self):
raise NotImplementedError()
def visit_jint(self):
raise NotImplementedError()
def visit_jlong(self):
raise NotImplementedError()
def visit_jfloat(self):
raise NotImplementedError()
def visit_jdouble(self):
raise NotImplementedError()
def visit_jchar(self):
raise NotImplementedError()
def visit_jboolean(self):
raise NotImplementedError()
def visit_volatile( self ):
raise NotImplementedError()
def visit_const( self ):
raise NotImplementedError()
def visit_pointer( self ):
raise NotImplementedError()
def visit_reference( self ):
raise NotImplementedError()
def visit_array( self ):
raise NotImplementedError()
def visit_free_function_type( self ):
raise NotImplementedError()
def visit_member_function_type( self ):
raise NotImplementedError()
def visit_member_variable_type( self ):
raise NotImplementedError()
def visit_declarated( self ):
raise NotImplementedError()
def visit_restrict( self ):
raise NotImplementedError()
| 25.227273 | 75 | 0.627628 |
f7930f37fe1811c075b7ec3976fe12faf6721d9e | 404 | py | Python | datastructures/trees/binary/lca_recursion.py | hariharanragothaman/pymaster | b3d033b4d5c75c69f587c94d9d12cd4a349a6a69 | [
"Apache-2.0"
] | 10 | 2020-09-21T22:23:09.000Z | 2022-01-25T16:58:44.000Z | datastructures/trees/binary/lca_recursion.py | hariharanragothaman/pymaster | b3d033b4d5c75c69f587c94d9d12cd4a349a6a69 | [
"Apache-2.0"
] | null | null | null | datastructures/trees/binary/lca_recursion.py | hariharanragothaman/pymaster | b3d033b4d5c75c69f587c94d9d12cd4a349a6a69 | [
"Apache-2.0"
] | null | null | null | """
Standard recipe to find the lowest common ancestor
"""
def lca(self, root, a, b):
# Standard Recipe to find the lowest-common ancestor
if root is None or root is a or root is b:
return root
left = self.lca(root.left, a, b)
right = self.lca(root.right, a, b)
if left is not None and right is not None:
return root
return left if left else right
| 26.933333 | 57 | 0.626238 |
48e7a64c643d6c73d67723f7d381d422e42d4a4a | 463 | py | Python | nomadgram/users/migrations/0004_user_profile_image.py | kcrossfitter/nomadgram | a75d83c4421249ca25527ef7817ccb3e3f4213e5 | [
"MIT"
] | null | null | null | nomadgram/users/migrations/0004_user_profile_image.py | kcrossfitter/nomadgram | a75d83c4421249ca25527ef7817ccb3e3f4213e5 | [
"MIT"
] | 18 | 2020-06-05T17:05:48.000Z | 2022-03-11T23:26:59.000Z | nomadgram/users/migrations/0004_user_profile_image.py | kcrossfitter/nomadgram | a75d83c4421249ca25527ef7817ccb3e3f4213e5 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
# Generated by Django 1.11.9 on 2018-01-28 15:56
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('users', '0003_auto_20180127_1042'),
]
operations = [
migrations.AddField(
model_name='user',
name='profile_image',
field=models.ImageField(null=True, upload_to=''),
),
]
| 22.047619 | 61 | 0.615551 |
67d26e64b89cc40dc53f5014b3f41d3c371fa0e2 | 56,659 | py | Python | openprocurement/tender/openuadefense/tests/auction.py | Leits/openprocurement.tender.openuadefense | e7c512ed21166ae1928950bce80a11106fa2e545 | [
"Apache-2.0"
] | null | null | null | openprocurement/tender/openuadefense/tests/auction.py | Leits/openprocurement.tender.openuadefense | e7c512ed21166ae1928950bce80a11106fa2e545 | [
"Apache-2.0"
] | 2 | 2021-03-26T00:34:56.000Z | 2022-03-21T22:20:41.000Z | openprocurement/tender/openuadefense/tests/auction.py | leits/openprocurement.tender.openuadefense | e7c512ed21166ae1928950bce80a11106fa2e545 | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
import unittest
from datetime import timedelta
from openprocurement.api.models import get_now
from openprocurement.tender.openuadefense.tests.base import (
BaseTenderUAContentWebTest, test_tender_data, test_features_tender_ua_data)
from openprocurement.api.tests.base import test_features_tender_data, test_lots, test_organization
from openprocurement.tender.openua.tests.base import test_bids
# from openprocurement.api.tests.base import BaseTenderWebTest, test_tender_data, test_features_tender_data, test_bids, test_lots
class TenderAuctionResourceTest(BaseTenderUAContentWebTest):
#initial_data = tender_data
initial_status = 'active.tendering'
initial_bids = test_bids
def test_get_tender_auction_not_found(self):
response = self.app.get('/tenders/some_id/auction', status=404)
self.assertEqual(response.status, '404 Not Found')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['status'], 'error')
self.assertEqual(response.json['errors'], [
{u'description': u'Not Found', u'location':
u'url', u'name': u'tender_id'}
])
response = self.app.patch_json('/tenders/some_id/auction', {'data': {}}, status=404)
self.assertEqual(response.status, '404 Not Found')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['status'], 'error')
self.assertEqual(response.json['errors'], [
{u'description': u'Not Found', u'location':
u'url', u'name': u'tender_id'}
])
response = self.app.post_json('/tenders/some_id/auction', {'data': {}}, status=404)
self.assertEqual(response.status, '404 Not Found')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['status'], 'error')
self.assertEqual(response.json['errors'], [
{u'description': u'Not Found', u'location':
u'url', u'name': u'tender_id'}
])
def test_get_tender_auction(self):
response = self.app.get('/tenders/{}/auction'.format(self.tender_id), status=403)
self.assertEqual(response.status, '403 Forbidden')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['errors'][0]["description"], "Can't get auction info in current (active.tendering) tender status")
self.set_status('active.auction')
response = self.app.get('/tenders/{}/auction'.format(self.tender_id))
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
auction = response.json['data']
self.assertNotEqual(auction, self.initial_data)
self.assertIn('dateModified', auction)
self.assertIn('minimalStep', auction)
self.assertNotIn("procuringEntity", auction)
self.assertNotIn("tenderers", auction["bids"][0])
self.assertEqual(auction["bids"][0]['value']['amount'], self.initial_bids[0]['value']['amount'])
self.assertEqual(auction["bids"][1]['value']['amount'], self.initial_bids[1]['value']['amount'])
#self.assertEqual(self.initial_data["auctionPeriod"]['startDate'], auction["auctionPeriod"]['startDate'])
response = self.app.get('/tenders/{}/auction?opt_jsonp=callback'.format(self.tender_id))
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/javascript')
self.assertIn('callback({"data": {"', response.body)
response = self.app.get('/tenders/{}/auction?opt_pretty=1'.format(self.tender_id))
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertIn('{\n "data": {\n "', response.body)
self.set_status('active.qualification')
response = self.app.get('/tenders/{}/auction'.format(self.tender_id), status=403)
self.assertEqual(response.status, '403 Forbidden')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['errors'][0]["description"], "Can't get auction info in current (active.qualification) tender status")
def test_post_tender_auction(self):
self.app.authorization = ('Basic', ('auction', ''))
response = self.app.post_json('/tenders/{}/auction'.format(self.tender_id), {'data': {}}, status=403)
self.assertEqual(response.status, '403 Forbidden')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['errors'][0]["description"], "Can't report auction results in current (active.tendering) tender status")
self.set_status('active.auction')
response = self.app.post_json('/tenders/{}/auction'.format(self.tender_id), {'data': {'bids': [{'invalid_field': 'invalid_value'}]}}, status=422)
self.assertEqual(response.status, '422 Unprocessable Entity')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['errors'], [
{u'description': {u'invalid_field': u'Rogue field'}, u'location': u'body', u'name': u'bids'}
])
patch_data = {
'bids': [
{
"id": self.initial_bids[1]['id'],
"value": {
"amount": 409,
"currency": "UAH",
"valueAddedTaxIncluded": True
}
}
]
}
response = self.app.post_json('/tenders/{}/auction'.format(self.tender_id), {'data': patch_data}, status=422)
self.assertEqual(response.status, '422 Unprocessable Entity')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['errors'][0]["description"], "Number of auction results did not match the number of tender bids")
patch_data['bids'].append({
"value": {
"amount": 419,
"currency": "UAH",
"valueAddedTaxIncluded": True
}
})
patch_data['bids'][1]['id'] = "some_id"
response = self.app.post_json('/tenders/{}/auction'.format(self.tender_id), {'data': patch_data}, status=422)
self.assertEqual(response.status, '422 Unprocessable Entity')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['errors'][0]["description"], {u'id': [u'Hash value is wrong length.']})
patch_data['bids'][1]['id'] = "00000000000000000000000000000000"
response = self.app.post_json('/tenders/{}/auction'.format(self.tender_id), {'data': patch_data}, status=422)
self.assertEqual(response.status, '422 Unprocessable Entity')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['errors'][0]["description"], "Auction bids should be identical to the tender bids")
patch_data['bids'][1]['id'] = self.initial_bids[0]['id']
response = self.app.post_json('/tenders/{}/auction'.format(self.tender_id), {'data': patch_data})
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
tender = response.json['data']
self.assertNotEqual(tender["bids"][0]['value']['amount'], self.initial_bids[0]['value']['amount'])
self.assertNotEqual(tender["bids"][1]['value']['amount'], self.initial_bids[1]['value']['amount'])
self.assertEqual(tender["bids"][0]['value']['amount'], patch_data["bids"][1]['value']['amount'])
self.assertEqual(tender["bids"][1]['value']['amount'], patch_data["bids"][0]['value']['amount'])
self.assertEqual('active.qualification', tender["status"])
self.assertIn("tenderers", tender["bids"][0])
self.assertIn("name", tender["bids"][0]["tenderers"][0])
# self.assertIn(tender["awards"][0]["id"], response.headers['Location'])
self.assertEqual(tender["awards"][0]['bid_id'], patch_data["bids"][0]['id'])
self.assertEqual(tender["awards"][0]['value']['amount'], patch_data["bids"][0]['value']['amount'])
self.assertEqual(tender["awards"][0]['suppliers'], self.initial_bids[0]['tenderers'])
response = self.app.post_json('/tenders/{}/auction'.format(self.tender_id), {'data': patch_data}, status=403)
self.assertEqual(response.status, '403 Forbidden')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['errors'][0]["description"], "Can't report auction results in current (active.qualification) tender status")
def test_patch_tender_auction(self):
self.app.authorization = ('Basic', ('auction', ''))
response = self.app.patch_json('/tenders/{}/auction'.format(self.tender_id), {'data': {}}, status=403)
self.assertEqual(response.status, '403 Forbidden')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['errors'][0]["description"], "Can't update auction urls in current (active.tendering) tender status")
self.set_status('active.auction')
response = self.app.patch_json('/tenders/{}/auction'.format(self.tender_id), {'data': {'bids': [{'invalid_field': 'invalid_value'}]}}, status=422)
self.assertEqual(response.status, '422 Unprocessable Entity')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['errors'], [
{u'description': {u'invalid_field': u'Rogue field'}, u'location': u'body', u'name': u'bids'}
])
patch_data = {
'auctionUrl': u'http://auction-sandbox.openprocurement.org/tenders/{}'.format(self.tender_id),
'bids': [
{
"id": self.initial_bids[1]['id'],
"participationUrl": u'http://auction-sandbox.openprocurement.org/tenders/{}?key_for_bid={}'.format(self.tender_id, self.initial_bids[1]['id'])
}
]
}
response = self.app.patch_json('/tenders/{}/auction'.format(self.tender_id), {'data': patch_data}, status=422)
self.assertEqual(response.status, '422 Unprocessable Entity')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['errors'][0]["description"], "Number of auction results did not match the number of tender bids")
patch_data['bids'].append({
"participationUrl": u'http://auction-sandbox.openprocurement.org/tenders/{}?key_for_bid={}'.format(self.tender_id, self.initial_bids[0]['id'])
})
patch_data['bids'][1]['id'] = "some_id"
response = self.app.patch_json('/tenders/{}/auction'.format(self.tender_id), {'data': patch_data}, status=422)
self.assertEqual(response.status, '422 Unprocessable Entity')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['errors'][0]["description"], {u'id': [u'Hash value is wrong length.']})
patch_data['bids'][1]['id'] = "00000000000000000000000000000000"
response = self.app.patch_json('/tenders/{}/auction'.format(self.tender_id), {'data': patch_data}, status=422)
self.assertEqual(response.status, '422 Unprocessable Entity')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['errors'][0]["description"], "Auction bids should be identical to the tender bids")
patch_data['bids'][1]['id'] = self.initial_bids[0]['id']
response = self.app.patch_json('/tenders/{}/auction'.format(self.tender_id), {'data': patch_data})
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
tender = response.json['data']
self.assertEqual(tender["bids"][0]['participationUrl'], patch_data["bids"][1]['participationUrl'])
self.assertEqual(tender["bids"][1]['participationUrl'], patch_data["bids"][0]['participationUrl'])
self.set_status('complete')
response = self.app.patch_json('/tenders/{}/auction'.format(self.tender_id), {'data': patch_data}, status=403)
self.assertEqual(response.status, '403 Forbidden')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['errors'][0]["description"], "Can't update auction urls in current (complete) tender status")
def test_post_tender_auction_document(self):
self.app.authorization = ('Basic', ('auction', ''))
response = self.app.post('/tenders/{}/documents'.format(self.tender_id), upload_files=[('file', 'name.doc', 'content')], status=403)
self.assertEqual(response.status, '403 Forbidden')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['errors'][0]["description"], "Can't add document in current (active.tendering) tender status")
self.set_status('active.auction')
response = self.app.post('/tenders/{}/documents'.format(self.tender_id), upload_files=[('file', 'name.doc', 'content')])
self.assertEqual(response.status, '201 Created')
self.assertEqual(response.content_type, 'application/json')
doc_id = response.json["data"]['id']
key = response.json["data"]["url"].split('?')[-1].split('=')[-1]
patch_data = {
'bids': [
{
"id": self.initial_bids[1]['id'],
"value": {
"amount": 409,
"currency": "UAH",
"valueAddedTaxIncluded": True
}
},
{
'id': self.initial_bids[0]['id'],
"value": {
"amount": 419,
"currency": "UAH",
"valueAddedTaxIncluded": True
}
}
]
}
response = self.app.post_json('/tenders/{}/auction'.format(self.tender_id), {'data': patch_data})
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
response = self.app.put('/tenders/{}/documents/{}'.format(self.tender_id, doc_id), upload_files=[('file', 'name.doc', 'content_with_names')])
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(doc_id, response.json["data"]["id"])
key2 = response.json["data"]["url"].split('?')[-1].split('=')[-1]
self.assertNotEqual(key, key2)
self.set_status('complete')
response = self.app.post('/tenders/{}/documents'.format(self.tender_id), upload_files=[('file', 'name.doc', 'content')], status=403)
self.assertEqual(response.status, '403 Forbidden')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['errors'][0]["description"], "Can't add document in current (complete) tender status")
class TenderSameValueAuctionResourceTest(BaseTenderUAContentWebTest):
initial_status = 'active.auction'
initial_bids = [
{
"tenderers": [
test_organization
],
"value": {
"amount": 469,
"currency": "UAH",
"valueAddedTaxIncluded": True
},
'selfEligible': True,
'selfQualified': True,
}
for i in range(3)
]
def test_post_tender_auction_not_changed(self):
self.app.authorization = ('Basic', ('auction', ''))
response = self.app.post_json('/tenders/{}/auction'.format(self.tender_id), {'data': {'bids': self.initial_bids}})
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
tender = response.json['data']
self.assertEqual('active.qualification', tender["status"])
self.assertEqual(tender["awards"][0]['bid_id'], self.initial_bids[0]['id'])
self.assertEqual(tender["awards"][0]['value']['amount'], self.initial_bids[0]['value']['amount'])
self.assertEqual(tender["awards"][0]['suppliers'], self.initial_bids[0]['tenderers'])
def test_post_tender_auction_reversed(self):
self.app.authorization = ('Basic', ('auction', ''))
now = get_now()
patch_data = {
'bids': [
{
"id": b['id'],
"date": (now - timedelta(seconds=i)).isoformat(),
"value": b['value']
}
for i, b in enumerate(self.initial_bids)
]
}
response = self.app.post_json('/tenders/{}/auction'.format(self.tender_id), {'data': patch_data})
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
tender = response.json['data']
self.assertEqual('active.qualification', tender["status"])
self.assertEqual(tender["awards"][0]['bid_id'], self.initial_bids[2]['id'])
self.assertEqual(tender["awards"][0]['value']['amount'], self.initial_bids[2]['value']['amount'])
self.assertEqual(tender["awards"][0]['suppliers'], self.initial_bids[2]['tenderers'])
class TenderLotAuctionResourceTest(TenderAuctionResourceTest):
initial_lots = test_lots
initial_data = test_tender_data
def test_get_tender_auction(self):
response = self.app.get('/tenders/{}/auction'.format(self.tender_id), status=403)
self.assertEqual(response.status, '403 Forbidden')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['errors'][0]["description"], "Can't get auction info in current (active.tendering) tender status")
self.set_status('active.auction')
response = self.app.get('/tenders/{}/auction'.format(self.tender_id))
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
auction = response.json['data']
self.assertNotEqual(auction, self.initial_data)
self.assertIn('dateModified', auction)
self.assertIn('minimalStep', auction)
self.assertIn('lots', auction)
self.assertNotIn("procuringEntity", auction)
self.assertNotIn("tenderers", auction["bids"][0])
self.assertEqual(auction["bids"][0]['lotValues'][0]['value']['amount'], self.initial_bids[0]['lotValues'][0]['value']['amount'])
self.assertEqual(auction["bids"][1]['lotValues'][0]['value']['amount'], self.initial_bids[1]['lotValues'][0]['value']['amount'])
self.set_status('active.qualification')
response = self.app.get('/tenders/{}/auction'.format(self.tender_id), status=403)
self.assertEqual(response.status, '403 Forbidden')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['errors'][0]["description"], "Can't get auction info in current (active.qualification) tender status")
def test_post_tender_auction(self):
self.app.authorization = ('Basic', ('auction', ''))
response = self.app.post_json('/tenders/{}/auction'.format(self.tender_id), {'data': {}}, status=403)
self.assertEqual(response.status, '403 Forbidden')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['errors'][0]["description"], "Can't report auction results in current (active.tendering) tender status")
self.set_status('active.auction')
response = self.app.post_json('/tenders/{}/auction'.format(self.tender_id), {'data': {'bids': [{'invalid_field': 'invalid_value'}]}}, status=422)
self.assertEqual(response.status, '422 Unprocessable Entity')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['errors'], [
{u'description': {u'invalid_field': u'Rogue field'}, u'location': u'body', u'name': u'bids'}
])
patch_data = {
'bids': [
{
"id": self.initial_bids[1]['id'],
'lotValues': [
{
"value": {
"amount": 409,
"currency": "UAH",
"valueAddedTaxIncluded": True
}
}
]
}
]
}
response = self.app.post_json('/tenders/{}/auction'.format(self.tender_id), {'data': patch_data}, status=422)
self.assertEqual(response.status, '422 Unprocessable Entity')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['errors'][0]["description"], "Number of auction results did not match the number of tender bids")
patch_data['bids'].append({
'lotValues': [
{
"value": {
"amount": 419,
"currency": "UAH",
"valueAddedTaxIncluded": True
}
}
]
})
patch_data['bids'][1]['id'] = "some_id"
response = self.app.post_json('/tenders/{}/auction'.format(self.tender_id), {'data': patch_data}, status=422)
self.assertEqual(response.status, '422 Unprocessable Entity')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['errors'][0]["description"], {u'id': [u'Hash value is wrong length.']})
patch_data['bids'][1]['id'] = "00000000000000000000000000000000"
response = self.app.post_json('/tenders/{}/auction'.format(self.tender_id), {'data': patch_data}, status=422)
self.assertEqual(response.status, '422 Unprocessable Entity')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['errors'][0]["description"], "Auction bids should be identical to the tender bids")
patch_data['bids'][1]['id'] = self.initial_bids[0]['id']
for lot in self.initial_lots:
response = self.app.post_json('/tenders/{}/auction/{}'.format(self.tender_id, lot['id']), {'data': patch_data})
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
tender = response.json['data']
self.assertNotEqual(tender["bids"][0]['lotValues'][0]['value']['amount'], self.initial_bids[0]['lotValues'][0]['value']['amount'])
self.assertNotEqual(tender["bids"][1]['lotValues'][0]['value']['amount'], self.initial_bids[1]['lotValues'][0]['value']['amount'])
self.assertEqual(tender["bids"][0]['lotValues'][0]['value']['amount'], patch_data["bids"][1]['lotValues'][0]['value']['amount'])
self.assertEqual(tender["bids"][1]['lotValues'][0]['value']['amount'], patch_data["bids"][0]['lotValues'][0]['value']['amount'])
self.assertEqual('active.qualification', tender["status"])
self.assertIn("tenderers", tender["bids"][0])
self.assertIn("name", tender["bids"][0]["tenderers"][0])
# self.assertIn(tender["awards"][0]["id"], response.headers['Location'])
self.assertEqual(tender["awards"][0]['bid_id'], patch_data["bids"][0]['id'])
self.assertEqual(tender["awards"][0]['value']['amount'], patch_data["bids"][0]['lotValues'][0]['value']['amount'])
self.assertEqual(tender["awards"][0]['suppliers'], self.initial_bids[0]['tenderers'])
response = self.app.post_json('/tenders/{}/auction'.format(self.tender_id), {'data': patch_data}, status=403)
self.assertEqual(response.status, '403 Forbidden')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['errors'][0]["description"], "Can't report auction results in current (active.qualification) tender status")
def test_patch_tender_auction(self):
self.app.authorization = ('Basic', ('auction', ''))
response = self.app.patch_json('/tenders/{}/auction'.format(self.tender_id), {'data': {}}, status=403)
self.assertEqual(response.status, '403 Forbidden')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['errors'][0]["description"], "Can't update auction urls in current (active.tendering) tender status")
self.set_status('active.auction')
self.app.authorization = ('Basic', ('chronograph', ''))
response = self.app.patch_json('/tenders/{}'.format(self.tender_id), {'data': {'id': self.tender_id}})
self.assertEqual(response.status, '200 OK')
self.app.authorization = ('Basic', ('auction', ''))
response = self.app.patch_json('/tenders/{}/auction'.format(self.tender_id), {'data': {'bids': [{'invalid_field': 'invalid_value'}]}}, status=422)
self.assertEqual(response.status, '422 Unprocessable Entity')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['errors'], [
{u'description': {u'invalid_field': u'Rogue field'}, u'location': u'body', u'name': u'bids'}
])
patch_data = {
'auctionUrl': u'http://auction-sandbox.openprocurement.org/tenders/{}'.format(self.tender_id),
'bids': [
{
"id": self.initial_bids[1]['id'],
"participationUrl": u'http://auction-sandbox.openprocurement.org/tenders/{}?key_for_bid={}'.format(self.tender_id, self.initial_bids[1]['id'])
}
]
}
response = self.app.patch_json('/tenders/{}/auction'.format(self.tender_id), {'data': patch_data}, status=422)
self.assertEqual(response.status, '422 Unprocessable Entity')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['errors'], [
{u'description': [{u'participationUrl': [u'url should be posted for each lot of bid']}], u'location': u'body', u'name': u'bids'}
])
del patch_data['bids'][0]["participationUrl"]
patch_data['bids'][0]['lotValues'] = [
{
"participationUrl": u'http://auction-sandbox.openprocurement.org/tenders/{}?key_for_bid={}'.format(self.tender_id, self.initial_bids[0]['id'])
}
]
response = self.app.patch_json('/tenders/{}/auction'.format(self.tender_id), {'data': patch_data}, status=422)
self.assertEqual(response.status, '422 Unprocessable Entity')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['errors'], [
{u'description': ["url should be posted for each lot"], u'location': u'body', u'name': u'auctionUrl'}
])
patch_data['lots'] = [
{
"auctionUrl": patch_data.pop('auctionUrl')
}
]
response = self.app.patch_json('/tenders/{}/auction'.format(self.tender_id), {'data': patch_data}, status=422)
self.assertEqual(response.status, '422 Unprocessable Entity')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['errors'][0]["description"], "Number of auction results did not match the number of tender bids")
patch_data['bids'].append({
'lotValues': [
{
"participationUrl": u'http://auction-sandbox.openprocurement.org/tenders/{}?key_for_bid={}'.format(self.tender_id, self.initial_bids[0]['id'])
}
]
})
patch_data['bids'][1]['id'] = "some_id"
response = self.app.patch_json('/tenders/{}/auction'.format(self.tender_id), {'data': patch_data}, status=422)
self.assertEqual(response.status, '422 Unprocessable Entity')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['errors'][0]["description"], {u'id': [u'Hash value is wrong length.']})
patch_data['bids'][1]['id'] = "00000000000000000000000000000000"
response = self.app.patch_json('/tenders/{}/auction'.format(self.tender_id), {'data': patch_data}, status=422)
self.assertEqual(response.status, '422 Unprocessable Entity')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['errors'][0]["description"], "Auction bids should be identical to the tender bids")
patch_data['bids'][1]['id'] = self.initial_bids[0]['id']
response = self.app.patch_json('/tenders/{}/auction'.format(self.tender_id), {'data': patch_data})
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertIsNone(response.json)
for lot in self.initial_lots:
response = self.app.patch_json('/tenders/{}/auction/{}'.format(self.tender_id, lot['id']), {'data': patch_data})
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
tender = response.json['data']
self.assertEqual(tender["bids"][0]['lotValues'][0]['participationUrl'], patch_data["bids"][1]['lotValues'][0]['participationUrl'])
self.assertEqual(tender["bids"][1]['lotValues'][0]['participationUrl'], patch_data["bids"][0]['lotValues'][0]['participationUrl'])
self.assertEqual(tender["lots"][0]['auctionUrl'], patch_data["lots"][0]['auctionUrl'])
self.set_status('complete')
response = self.app.patch_json('/tenders/{}/auction'.format(self.tender_id), {'data': patch_data}, status=403)
self.assertEqual(response.status, '403 Forbidden')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['errors'][0]["description"], "Can't update auction urls in current (complete) tender status")
def test_post_tender_auction_document(self):
self.app.authorization = ('Basic', ('auction', ''))
response = self.app.post('/tenders/{}/documents'.format(self.tender_id), upload_files=[('file', 'name.doc', 'content')], status=403)
self.assertEqual(response.status, '403 Forbidden')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['errors'][0]["description"], "Can't add document in current (active.tendering) tender status")
self.set_status('active.auction')
response = self.app.post('/tenders/{}/documents'.format(self.tender_id), upload_files=[('file', 'name.doc', 'content')])
self.assertEqual(response.status, '201 Created')
self.assertEqual(response.content_type, 'application/json')
doc_id = response.json["data"]['id']
key = response.json["data"]["url"].split('?')[-1].split('=')[-1]
response = self.app.patch_json('/tenders/{}/documents/{}'.format(self.tender_id, doc_id), {'data': {"documentOf": "lot", 'relatedItem': self.initial_lots[0]['id']}})
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json["data"]["documentOf"], "lot")
self.assertEqual(response.json["data"]["relatedItem"], self.initial_lots[0]['id'])
patch_data = {
'bids': [
{
"id": self.initial_bids[1]['id'],
'lotValues': [
{
"value": {
"amount": 409,
"currency": "UAH",
"valueAddedTaxIncluded": True
}
}
]
},
{
'id': self.initial_bids[0]['id'],
'lotValues': [
{
"value": {
"amount": 419,
"currency": "UAH",
"valueAddedTaxIncluded": True
}
}
]
}
]
}
response = self.app.post_json('/tenders/{}/auction'.format(self.tender_id), {'data': patch_data})
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
response = self.app.put('/tenders/{}/documents/{}'.format(self.tender_id, doc_id), upload_files=[('file', 'name.doc', 'content_with_names')])
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(doc_id, response.json["data"]["id"])
key2 = response.json["data"]["url"].split('?')[-1].split('=')[-1]
self.assertNotEqual(key, key2)
self.set_status('complete')
response = self.app.post('/tenders/{}/documents'.format(self.tender_id), upload_files=[('file', 'name.doc', 'content')], status=403)
self.assertEqual(response.status, '403 Forbidden')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['errors'][0]["description"], "Can't add document in current (complete) tender status")
class TenderMultipleLotAuctionResourceTest(TenderAuctionResourceTest):
initial_lots = 2 * test_lots
def test_get_tender_auction(self):
response = self.app.get('/tenders/{}/auction'.format(self.tender_id), status=403)
self.assertEqual(response.status, '403 Forbidden')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['errors'][0]["description"], "Can't get auction info in current (active.tendering) tender status")
self.set_status('active.auction')
response = self.app.get('/tenders/{}/auction'.format(self.tender_id))
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
auction = response.json['data']
self.assertNotEqual(auction, self.initial_data)
self.assertIn('dateModified', auction)
self.assertIn('minimalStep', auction)
self.assertIn('lots', auction)
self.assertNotIn("procuringEntity", auction)
self.assertNotIn("tenderers", auction["bids"][0])
self.assertEqual(auction["bids"][0]['lotValues'][0]['value']['amount'], self.initial_bids[0]['lotValues'][0]['value']['amount'])
self.assertEqual(auction["bids"][1]['lotValues'][0]['value']['amount'], self.initial_bids[1]['lotValues'][0]['value']['amount'])
self.assertEqual(auction["bids"][0]['lotValues'][1]['value']['amount'], self.initial_bids[0]['lotValues'][1]['value']['amount'])
self.assertEqual(auction["bids"][1]['lotValues'][1]['value']['amount'], self.initial_bids[1]['lotValues'][1]['value']['amount'])
self.set_status('active.qualification')
response = self.app.get('/tenders/{}/auction'.format(self.tender_id), status=403)
self.assertEqual(response.status, '403 Forbidden')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['errors'][0]["description"], "Can't get auction info in current (active.qualification) tender status")
def test_post_tender_auction(self):
self.app.authorization = ('Basic', ('auction', ''))
response = self.app.post_json('/tenders/{}/auction'.format(self.tender_id), {'data': {}}, status=403)
self.assertEqual(response.status, '403 Forbidden')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['errors'][0]["description"], "Can't report auction results in current (active.tendering) tender status")
self.set_status('active.auction')
response = self.app.post_json('/tenders/{}/auction'.format(self.tender_id), {'data': {'bids': [{'invalid_field': 'invalid_value'}]}}, status=422)
self.assertEqual(response.status, '422 Unprocessable Entity')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['errors'], [
{u'description': {u'invalid_field': u'Rogue field'}, u'location': u'body', u'name': u'bids'}
])
patch_data = {
'bids': [
{
"id": self.initial_bids[1]['id'],
'lotValues': [
{
"value": {
"amount": 409,
"currency": "UAH",
"valueAddedTaxIncluded": True
}
}
]
}
]
}
response = self.app.post_json('/tenders/{}/auction'.format(self.tender_id), {'data': patch_data}, status=422)
self.assertEqual(response.status, '422 Unprocessable Entity')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['errors'][0]["description"], "Number of auction results did not match the number of tender bids")
patch_data['bids'].append({
'lotValues': [
{
"value": {
"amount": 419,
"currency": "UAH",
"valueAddedTaxIncluded": True
}
}
]
})
patch_data['bids'][1]['id'] = "some_id"
response = self.app.post_json('/tenders/{}/auction'.format(self.tender_id), {'data': patch_data}, status=422)
self.assertEqual(response.status, '422 Unprocessable Entity')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['errors'][0]["description"], {u'id': [u'Hash value is wrong length.']})
patch_data['bids'][1]['id'] = "00000000000000000000000000000000"
response = self.app.post_json('/tenders/{}/auction'.format(self.tender_id), {'data': patch_data}, status=422)
self.assertEqual(response.status, '422 Unprocessable Entity')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['errors'][0]["description"], "Auction bids should be identical to the tender bids")
patch_data['bids'][1]['id'] = self.initial_bids[0]['id']
response = self.app.post_json('/tenders/{}/auction'.format(self.tender_id), {'data': patch_data}, status=422)
self.assertEqual(response.status, '422 Unprocessable Entity')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['errors'][0]["description"], [{"lotValues": ["Number of lots of auction results did not match the number of tender lots"]}])
for bid in patch_data['bids']:
bid['lotValues'] = [bid['lotValues'][0].copy() for i in self.initial_lots]
patch_data['bids'][0]['lotValues'][1]['relatedLot'] = self.initial_bids[0]['lotValues'][0]['relatedLot']
response = self.app.patch_json('/tenders/{}/auction'.format(self.tender_id), {'data': patch_data}, status=422)
self.assertEqual(response.status, '422 Unprocessable Entity')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['errors'][0]["description"], [{u'lotValues': [{u'relatedLot': [u'relatedLot should be one of lots of bid']}]}])
patch_data['bids'][0]['lotValues'][1]['relatedLot'] = self.initial_bids[0]['lotValues'][1]['relatedLot']
for lot in self.initial_lots:
response = self.app.post_json('/tenders/{}/auction/{}'.format(self.tender_id, lot['id']), {'data': patch_data})
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
tender = response.json['data']
self.assertNotEqual(tender["bids"][0]['lotValues'][0]['value']['amount'], self.initial_bids[0]['lotValues'][0]['value']['amount'])
self.assertNotEqual(tender["bids"][1]['lotValues'][0]['value']['amount'], self.initial_bids[1]['lotValues'][0]['value']['amount'])
self.assertEqual(tender["bids"][0]['lotValues'][0]['value']['amount'], patch_data["bids"][1]['lotValues'][0]['value']['amount'])
self.assertEqual(tender["bids"][1]['lotValues'][0]['value']['amount'], patch_data["bids"][0]['lotValues'][0]['value']['amount'])
self.assertEqual('active.qualification', tender["status"])
self.assertIn("tenderers", tender["bids"][0])
self.assertIn("name", tender["bids"][0]["tenderers"][0])
# self.assertIn(tender["awards"][0]["id"], response.headers['Location'])
self.assertEqual(tender["awards"][0]['bid_id'], patch_data["bids"][0]['id'])
self.assertEqual(tender["awards"][0]['value']['amount'], patch_data["bids"][0]['lotValues'][0]['value']['amount'])
self.assertEqual(tender["awards"][0]['suppliers'], self.initial_bids[0]['tenderers'])
response = self.app.post_json('/tenders/{}/auction'.format(self.tender_id), {'data': patch_data}, status=403)
self.assertEqual(response.status, '403 Forbidden')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['errors'][0]["description"], "Can't report auction results in current (active.qualification) tender status")
def test_patch_tender_auction(self):
self.app.authorization = ('Basic', ('auction', ''))
response = self.app.patch_json('/tenders/{}/auction'.format(self.tender_id), {'data': {}}, status=403)
self.assertEqual(response.status, '403 Forbidden')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['errors'][0]["description"], "Can't update auction urls in current (active.tendering) tender status")
self.set_status('active.auction')
self.app.authorization = ('Basic', ('chronograph', ''))
response = self.app.patch_json('/tenders/{}'.format(self.tender_id), {'data': {'id': self.tender_id}})
self.assertEqual(response.status, '200 OK')
self.app.authorization = ('Basic', ('auction', ''))
response = self.app.patch_json('/tenders/{}/auction'.format(self.tender_id), {'data': {'bids': [{'invalid_field': 'invalid_value'}]}}, status=422)
self.assertEqual(response.status, '422 Unprocessable Entity')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['errors'], [
{u'description': {u'invalid_field': u'Rogue field'}, u'location': u'body', u'name': u'bids'}
])
patch_data = {
'auctionUrl': u'http://auction-sandbox.openprocurement.org/tenders/{}'.format(self.tender_id),
'bids': [
{
"id": self.initial_bids[1]['id'],
"participationUrl": u'http://auction-sandbox.openprocurement.org/tenders/{}?key_for_bid={}'.format(self.tender_id, self.initial_bids[1]['id'])
}
]
}
response = self.app.patch_json('/tenders/{}/auction'.format(self.tender_id), {'data': patch_data}, status=422)
self.assertEqual(response.status, '422 Unprocessable Entity')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['errors'], [
{u'description': [{u'participationUrl': [u'url should be posted for each lot of bid']}], u'location': u'body', u'name': u'bids'}
])
del patch_data['bids'][0]["participationUrl"]
patch_data['bids'][0]['lotValues'] = [
{
"participationUrl": u'http://auction-sandbox.openprocurement.org/tenders/{}?key_for_bid={}'.format(self.tender_id, self.initial_bids[0]['id'])
}
]
response = self.app.patch_json('/tenders/{}/auction'.format(self.tender_id), {'data': patch_data}, status=422)
self.assertEqual(response.status, '422 Unprocessable Entity')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['errors'], [
{u'description': ["url should be posted for each lot"], u'location': u'body', u'name': u'auctionUrl'}
])
patch_data['lots'] = [
{
"auctionUrl": patch_data.pop('auctionUrl')
}
]
response = self.app.patch_json('/tenders/{}/auction'.format(self.tender_id), {'data': patch_data}, status=422)
self.assertEqual(response.status, '422 Unprocessable Entity')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['errors'][0]["description"], "Number of auction results did not match the number of tender bids")
patch_data['bids'].append({
'lotValues': [
{
"participationUrl": u'http://auction-sandbox.openprocurement.org/tenders/{}?key_for_bid={}'.format(self.tender_id, self.initial_bids[0]['id'])
}
]
})
patch_data['bids'][1]['id'] = "some_id"
response = self.app.patch_json('/tenders/{}/auction'.format(self.tender_id), {'data': patch_data}, status=422)
self.assertEqual(response.status, '422 Unprocessable Entity')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['errors'][0]["description"], {u'id': [u'Hash value is wrong length.']})
patch_data['bids'][1]['id'] = "00000000000000000000000000000000"
response = self.app.patch_json('/tenders/{}/auction'.format(self.tender_id), {'data': patch_data}, status=422)
self.assertEqual(response.status, '422 Unprocessable Entity')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['errors'][0]["description"], "Auction bids should be identical to the tender bids")
patch_data['bids'][1]['id'] = self.initial_bids[0]['id']
response = self.app.patch_json('/tenders/{}/auction'.format(self.tender_id), {'data': patch_data}, status=422)
self.assertEqual(response.status, '422 Unprocessable Entity')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['errors'][0]["description"], u'Number of lots did not match the number of tender lots')
patch_data['lots'] = [patch_data['lots'][0].copy() for i in self.initial_lots]
patch_data['lots'][1]['id'] = "00000000000000000000000000000000"
response = self.app.patch_json('/tenders/{}/auction'.format(self.tender_id), {'data': patch_data}, status=422)
self.assertEqual(response.status, '422 Unprocessable Entity')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['errors'][0]["description"], u'Auction lots should be identical to the tender lots')
patch_data['lots'][1]['id'] = self.initial_lots[1]['id']
response = self.app.patch_json('/tenders/{}/auction'.format(self.tender_id), {'data': patch_data}, status=422)
self.assertEqual(response.status, '422 Unprocessable Entity')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['errors'][0]["description"], [{"lotValues": ["Number of lots of auction results did not match the number of tender lots"]}])
for bid in patch_data['bids']:
bid['lotValues'] = [bid['lotValues'][0].copy() for i in self.initial_lots]
patch_data['bids'][0]['lotValues'][1]['relatedLot'] = self.initial_bids[0]['lotValues'][0]['relatedLot']
response = self.app.patch_json('/tenders/{}/auction'.format(self.tender_id), {'data': patch_data}, status=422)
self.assertEqual(response.status, '422 Unprocessable Entity')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['errors'][0]["description"], [{u'lotValues': [{u'relatedLot': [u'relatedLot should be one of lots of bid']}]}])
patch_data['bids'][0]['lotValues'][1]['relatedLot'] = self.initial_bids[0]['lotValues'][1]['relatedLot']
response = self.app.patch_json('/tenders/{}/auction'.format(self.tender_id), {'data': patch_data})
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertIsNone(response.json)
for lot in self.initial_lots:
response = self.app.patch_json('/tenders/{}/auction/{}'.format(self.tender_id, lot['id']), {'data': patch_data})
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
tender = response.json['data']
self.assertEqual(tender["bids"][0]['lotValues'][0]['participationUrl'], patch_data["bids"][1]['lotValues'][0]['participationUrl'])
self.assertEqual(tender["bids"][1]['lotValues'][0]['participationUrl'], patch_data["bids"][0]['lotValues'][0]['participationUrl'])
self.assertEqual(tender["lots"][0]['auctionUrl'], patch_data["lots"][0]['auctionUrl'])
self.app.authorization = ('Basic', ('token', ''))
response = self.app.post_json('/tenders/{}/cancellations'.format(self.tender_id), {'data': {
'reason': 'cancellation reason',
'status': 'active',
"cancellationOf": "lot",
"relatedLot": self.initial_lots[0]['id']
}})
self.assertEqual(response.status, '201 Created')
self.app.authorization = ('Basic', ('auction', ''))
response = self.app.patch_json('/tenders/{}/auction/{}'.format(self.tender_id, self.initial_lots[0]['id']), {'data': patch_data}, status=403)
self.assertEqual(response.status, '403 Forbidden')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['errors'][0]["description"], "Can update auction urls only in active lot status")
def test_post_tender_auction_document(self):
self.app.authorization = ('Basic', ('auction', ''))
response = self.app.post('/tenders/{}/documents'.format(self.tender_id), upload_files=[('file', 'name.doc', 'content')], status=403)
self.assertEqual(response.status, '403 Forbidden')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['errors'][0]["description"], "Can't add document in current (active.tendering) tender status")
self.set_status('active.auction')
response = self.app.post('/tenders/{}/documents'.format(self.tender_id), upload_files=[('file', 'name.doc', 'content')])
self.assertEqual(response.status, '201 Created')
self.assertEqual(response.content_type, 'application/json')
doc_id = response.json["data"]['id']
key = response.json["data"]["url"].split('?')[-1].split('=')[-1]
response = self.app.patch_json('/tenders/{}/documents/{}'.format(self.tender_id, doc_id), {'data': {"documentOf": "lot", 'relatedItem': self.initial_lots[0]['id']}})
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json["data"]["documentOf"], "lot")
self.assertEqual(response.json["data"]["relatedItem"], self.initial_lots[0]['id'])
patch_data = {
'bids': [
{
"id": self.initial_bids[1]['id'],
'lotValues': [
{
"value": {
"amount": 409,
"currency": "UAH",
"valueAddedTaxIncluded": True
}
}
for i in self.initial_lots
]
},
{
'id': self.initial_bids[0]['id'],
'lotValues': [
{
"value": {
"amount": 419,
"currency": "UAH",
"valueAddedTaxIncluded": True
}
}
for i in self.initial_lots
]
}
]
}
response = self.app.post_json('/tenders/{}/auction'.format(self.tender_id), {'data': patch_data})
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
response = self.app.put('/tenders/{}/documents/{}'.format(self.tender_id, doc_id), upload_files=[('file', 'name.doc', 'content_with_names')])
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(doc_id, response.json["data"]["id"])
key2 = response.json["data"]["url"].split('?')[-1].split('=')[-1]
self.assertNotEqual(key, key2)
self.set_status('complete')
response = self.app.post('/tenders/{}/documents'.format(self.tender_id), upload_files=[('file', 'name.doc', 'content')], status=403)
self.assertEqual(response.status, '403 Forbidden')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['errors'][0]["description"], "Can't add document in current (complete) tender status")
class TenderFeaturesAuctionResourceTest(BaseTenderUAContentWebTest):
initial_data = test_features_tender_ua_data
initial_status = 'active.auction'
initial_bids = [
{
"parameters": [
{
"code": i["code"],
"value": 0.1,
}
for i in test_features_tender_data['features']
],
"tenderers": [
test_organization
],
"value": {
"amount": 469,
"currency": "UAH",
"valueAddedTaxIncluded": True
},
'selfEligible': True,
'selfQualified': True,
},
{
"parameters": [
{
"code": i["code"],
"value": 0.15,
}
for i in test_features_tender_data['features']
],
"tenderers": [
test_organization
],
"value": {
"amount": 479,
"currency": "UAH",
"valueAddedTaxIncluded": True
},
'selfEligible': True,
'selfQualified': True,
}
]
def test_get_tender_auction(self):
response = self.app.get('/tenders/{}/auction'.format(self.tender_id))
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
auction = response.json['data']
self.assertNotEqual(auction, self.initial_data)
self.assertIn('dateModified', auction)
self.assertIn('minimalStep', auction)
self.assertNotIn("procuringEntity", auction)
self.assertNotIn("tenderers", auction["bids"][0])
self.assertEqual(auction["bids"][0]['value']['amount'], self.initial_bids[0]['value']['amount'])
self.assertEqual(auction["bids"][1]['value']['amount'], self.initial_bids[1]['value']['amount'])
self.assertIn('features', auction)
self.assertIn('parameters', auction["bids"][0])
def suite():
suite = unittest.TestSuite()
suite.addTest(unittest.makeSuite(TenderAuctionResourceTest))
suite.addTest(unittest.makeSuite(TenderSameValueAuctionResourceTest))
suite.addTest(unittest.makeSuite(TenderFeaturesAuctionResourceTest))
return suite
if __name__ == '__main__':
unittest.main(defaultTest='suite')
| 53.451887 | 173 | 0.611147 |
645afd4eaac9343b36b5c47de6fc4ec110676f19 | 1,265 | py | Python | nginxpla/module/search_engine.py | evirma/nginxpla | 24ff15809cb3c6f81821c09d07e52c629fca491a | [
"MIT"
] | null | null | null | nginxpla/module/search_engine.py | evirma/nginxpla | 24ff15809cb3c6f81821c09d07e52c629fca491a | [
"MIT"
] | null | null | null | nginxpla/module/search_engine.py | evirma/nginxpla | 24ff15809cb3c6f81821c09d07e52c629fca491a | [
"MIT"
] | null | null | null | from nginxpla.module_config import ModuleConfig
from nginxpla.module.simple import SimpleModule
from functools import lru_cache
class SearchEngineModule(SimpleModule):
def handle_record(self, record):
if self.is_needed is False:
return record
required = ['se']
if self.is_needed is None and not self.config.is_required(set(required)):
self.is_needed = False
return record
ua = record.get('http_user_agent', None)
if not ua:
record['se'] = '-'
else:
record['se'] = self.get_search_engine_by_ua(ua)
return record
@lru_cache(maxsize=102400)
def get_search_engine_by_ua(self, ua):
options = self.config.options
se = '-'
for p in options['engines'].values():
if self.search(ua, p['searches']):
se = p['title']
break
return se
@staticmethod
def search(what, where):
for s in where:
if what.find(s) != -1:
return 1
return 0
def __init__(self, module_config: ModuleConfig):
super(SearchEngineModule, self).__init__(module_config)
self.is_needed = None
self.config = module_config
| 27.5 | 81 | 0.592885 |
e58713521b5f756e6da04e7786cfec76d9e823c8 | 3,192 | py | Python | recipes/docopt.cpp/all/conanfile.py | nadzkie0/conan-center-index | fde12bf20f2c4cb6a7554d09a5c9433a0f5cb72c | [
"MIT"
] | null | null | null | recipes/docopt.cpp/all/conanfile.py | nadzkie0/conan-center-index | fde12bf20f2c4cb6a7554d09a5c9433a0f5cb72c | [
"MIT"
] | 5 | 2021-03-25T01:49:56.000Z | 2021-03-28T16:42:12.000Z | recipes/docopt.cpp/all/conanfile.py | nadzkie0/conan-center-index | fde12bf20f2c4cb6a7554d09a5c9433a0f5cb72c | [
"MIT"
] | 4 | 2021-06-03T23:24:03.000Z | 2022-03-03T17:16:09.000Z | from conans import ConanFile, CMake, tools
import os
class DocoptCppConan(ConanFile):
name = "docopt.cpp"
license = "MIT"
url = "https://github.com/conan-io/conan-center-index"
homepage = "https://github.com/docopt/docopt.cpp"
settings = "os", "compiler", "build_type", "arch"
description = "C++11 port of docopt"
options = {"shared": [True, False], "fPIC": [True, False], "boost_regex": [True, False]}
default_options = {"shared": False, "fPIC": True, "boost_regex": False}
topics = ("CLI", "getopt", "options", "argparser")
generators = "cmake", "cmake_find_package"
exports_sources = ["patches/**", "CMakeLists.txt"]
_cmake = None
@property
def _source_subfolder(self):
return "source_subfolder"
@property
def _build_subfolder(self):
return "build_subfolder"
def config_options(self):
if self.settings.os == "Windows":
del self.options.fPIC
def configure(self):
if self.options.shared:
del self.options.fPIC
if self.settings.compiler.cppstd:
tools.check_min_cppstd(self, "11")
def requirements(self):
if self.options.boost_regex:
self.requires("boost/1.74.0")
def source(self):
tools.get(**self.conan_data["sources"][self.version])
extracted_dir = self.name + "-" + self.version
os.rename(extracted_dir, self._source_subfolder)
def _configure_cmake(self):
if self._cmake:
return self._cmake
self._cmake = CMake(self)
self._cmake.definitions["USE_BOOST_REGEX"] = self.options.boost_regex
self._cmake.configure(build_folder=self._build_subfolder)
return self._cmake
def build(self):
for patch in self.conan_data.get("patches", {}).get(self.version, []):
tools.patch(**patch)
cmake = self._configure_cmake()
cmake.build()
def package(self):
self.copy("LICENSE*", dst="licenses", src=self._source_subfolder)
cmake = self._configure_cmake()
cmake.install()
tools.rmdir(os.path.join(self.package_folder, "lib", "cmake"))
tools.rmdir(os.path.join(self.package_folder, "lib", "pkgconfig"))
def package_info(self):
# TODO: imported CMake target shouldn't be namespaced
self.cpp_info.names["cmake_find_package"] = "docopt"
self.cpp_info.names["cmake_find_package_multi"] = "docopt"
self.cpp_info.names["pkg_config"] = "docopt"
cmake_target = "docopt" if self.options.shared else "docopt_s"
self.cpp_info.components["docopt"].names["cmake_find_package"] = cmake_target
self.cpp_info.components["docopt"].names["cmake_find_package_multi"] = cmake_target
self.cpp_info.components["docopt"].libs = ["docopt"]
if self.settings.os == "Linux":
self.cpp_info.components["docopt"].system_libs = ["m"]
if self.settings.compiler == "Visual Studio" and self.options.shared:
self.cpp_info.components["docopt"].defines = ["DOCOPT_DLL"]
if self.options.boost_regex:
self.cpp_info.components["docopt"].requires.append("boost::boost")
| 38.457831 | 92 | 0.642231 |
0dafdbf49cbdaf084e9d0c0ea9b21c160ec98361 | 15,407 | py | Python | gym_collision_avoidance/envs/config.py | cmubig/Social-Navigation-Simulator | 5b91b4b1a3d88748ffa3d08689fec60e78ea0634 | [
"MIT"
] | 3 | 2021-12-16T05:39:14.000Z | 2022-02-25T06:07:51.000Z | gym_collision_avoidance/envs/config.py | cmubig/Social-Navigation-Simulator | 5b91b4b1a3d88748ffa3d08689fec60e78ea0634 | [
"MIT"
] | null | null | null | gym_collision_avoidance/envs/config.py | cmubig/Social-Navigation-Simulator | 5b91b4b1a3d88748ffa3d08689fec60e78ea0634 | [
"MIT"
] | 1 | 2021-11-09T18:04:01.000Z | 2021-11-09T18:04:01.000Z | import numpy as np
#from gym_collision_avoidance.experiments.src.master_config import Master_Config
from gym_collision_avoidance.experiments.src.master_config_deploy import Master_Config
master_config = Master_Config()
class Config(object):
def __init__(self):
#########################################################################
# GENERAL PARAMETERS
self.COLLISION_AVOIDANCE = True
self.continuous, self.discrete = range(2) # Initialize game types as enum
self.ACTION_SPACE_TYPE = self.continuous
### DISPLAY
self.ANIMATE_EPISODES = True
self.SHOW_EPISODE_PLOTS = False
self.SAVE_EPISODE_PLOTS = True
if not hasattr(self, "PLOT_CIRCLES_ALONG_TRAJ"):
self.PLOT_CIRCLES_ALONG_TRAJ = True
self.ANIMATION_PERIOD_STEPS = 2 # plot every n-th DT step (if animate mode on)
self.PLT_LIMITS = None
self.PLT_FIG_SIZE = (10, 8)
if not hasattr(self, "USE_STATIC_MAP"):
self.USE_STATIC_MAP = False
### TRAIN / PLAY / EVALUATE
self.TRAIN_MODE = True # Enable to see the trained agent in action (for testing)
self.PLAY_MODE = False # Enable to see the trained agent in action (for testing)
self.EVALUATE_MODE = False # Enable to see the trained agent in action (for testing)
### REWARDS
self.REWARD_AT_GOAL = 1.0 # reward given when agent reaches goal position
self.REWARD_COLLISION_WITH_AGENT = -0.25 # reward given when agent collides with another agent
self.REWARD_COLLISION_WITH_WALL = -0.25 # reward given when agent collides with wall
self.REWARD_GETTING_CLOSE = -0.1 # reward when agent gets close to another agent (unused?)
self.REWARD_ENTERED_NORM_ZONE = -0.05 # reward when agent enters another agent's social zone
self.REWARD_TIME_STEP = 0.0 # default reward given if none of the others apply (encourage speed)
self.REWARD_WIGGLY_BEHAVIOR = 0.0
self.WIGGLY_BEHAVIOR_THRESHOLD = np.inf
self.COLLISION_DIST = 0.0 # meters between agents' boundaries for collision
self.GETTING_CLOSE_RANGE = 0.2 # meters between agents' boundaries for collision
# self.SOCIAL_NORMS = "right"
# self.SOCIAL_NORMS = "left"
self.SOCIAL_NORMS = "none"
### SIMULATION
self.DT = 0.2 # seconds between simulation time steps
self.NEAR_GOAL_THRESHOLD = 0.2
self.MAX_TIME_RATIO = 2. # agent has this number times the straight-line-time to reach its goal before "timing out"
### TEST CASE SETTINGS
self.TEST_CASE_FN = "get_testcase_random"
self.TEST_CASE_ARGS = {
'policy_to_ensure': 'learning_ga3c',
'policies': ['noncoop', 'learning_ga3c', 'static'],
'policy_distr': [0.05, 0.9, 0.05],
'speed_bnds': [0.5, 2.0],
'radius_bnds': [0.2, 0.8],
'side_length': [
{'num_agents': [0,5], 'side_length': [4,5]},
{'num_agents': [5,np.inf], 'side_length': [6,8]},
],
# 'agents_sensors': ['other_agents_states_encoded'],
}
if not hasattr(self, "MAX_NUM_AGENTS_IN_ENVIRONMENT"):
self.MAX_NUM_AGENTS_IN_ENVIRONMENT = 4
if not hasattr(self, "MAX_NUM_AGENTS_TO_SIM"):
self.MAX_NUM_AGENTS_TO_SIM = 4
self.MAX_NUM_OTHER_AGENTS_IN_ENVIRONMENT = self.MAX_NUM_AGENTS_IN_ENVIRONMENT - 1
if not hasattr(self, "MAX_NUM_OTHER_AGENTS_OBSERVED"):
self.MAX_NUM_OTHER_AGENTS_OBSERVED = self.MAX_NUM_AGENTS_IN_ENVIRONMENT - 1
### EXPERIMENTS
self.PLOT_EVERY_N_EPISODES = 100 # for tensorboard visualization
### SENSORS
self.SENSING_HORIZON = np.inf
# self.SENSING_HORIZON = 3.0
self.LASERSCAN_LENGTH = 512 # num range readings in one scan
self.LASERSCAN_NUM_PAST = 3 # num range readings in one scan
self.NUM_STEPS_IN_OBS_HISTORY = 1 # number of time steps to store in observation vector
self.NUM_PAST_ACTIONS_IN_STATE = 0
### RVO AGENTS
self.RVO_TIME_HORIZON = 20.0 #5.0
self.RVO_COLLAB_COEFF = 0.5
self.RVO_ANTI_COLLAB_T = 1.0
### OBSERVATION VECTOR
self.TRAIN_SINGLE_AGENT = False
self.STATE_INFO_DICT = {
'dist_to_goal': {
'dtype': np.float32,
'size': 1,
'bounds': [-np.inf, np.inf],
'attr': 'get_agent_data("dist_to_goal")',
'std': np.array([5.], dtype=np.float32),
'mean': np.array([0.], dtype=np.float32)
},
'radius': {
'dtype': np.float32,
'size': 1,
'bounds': [0, np.inf],
'attr': 'get_agent_data("radius")',
'std': np.array([1.0], dtype=np.float32),
'mean': np.array([0.5], dtype=np.float32)
},
'heading_ego_frame': {
'dtype': np.float32,
'size': 1,
'bounds': [-np.pi, np.pi],
'attr': 'get_agent_data("heading_ego_frame")',
'std': np.array([3.14], dtype=np.float32),
'mean': np.array([0.], dtype=np.float32)
},
'pref_speed': {
'dtype': np.float32,
'size': 1,
'bounds': [0, np.inf],
'attr': 'get_agent_data("pref_speed")',
'std': np.array([1.0], dtype=np.float32),
'mean': np.array([1.0], dtype=np.float32)
},
'num_other_agents': {
'dtype': np.float32,
'size': 1,
'bounds': [0, np.inf],
'attr': 'get_agent_data("num_other_agents_observed")',
'std': np.array([1.0], dtype=np.float32),
'mean': np.array([1.0], dtype=np.float32)
},
'other_agent_states': {
'dtype': np.float32,
'size': 7,
'bounds': [-np.inf, np.inf],
'attr': 'get_agent_data("other_agent_states")',
'std': np.array([5.0, 5.0, 1.0, 1.0, 1.0, 5.0, 1.0], dtype=np.float32),
'mean': np.array([0.0, 0.0, 0.0, 0.0, 0.5, 0.0, 1.0], dtype=np.float32)
},
'other_agents_states': {
'dtype': np.float32,
'size': (self.MAX_NUM_OTHER_AGENTS_OBSERVED,7),
'bounds': [-np.inf, np.inf],
'attr': 'get_sensor_data("other_agents_states")',
'std': np.tile(np.array([5.0, 5.0, 1.0, 1.0, 1.0, 5.0, 1.0], dtype=np.float32), (self.MAX_NUM_OTHER_AGENTS_OBSERVED, 1)),
'mean': np.tile(np.array([0.0, 0.0, 0.0, 0.0, 0.5, 0.0, 1.0], dtype=np.float32), (self.MAX_NUM_OTHER_AGENTS_OBSERVED, 1)),
},
'laserscan': {
'dtype': np.float32,
'size': (self.LASERSCAN_NUM_PAST, self.LASERSCAN_LENGTH),
'bounds': [0., 6.],
'attr': 'get_sensor_data("laserscan")',
'std': 5.*np.ones((self.LASERSCAN_NUM_PAST, self.LASERSCAN_LENGTH), dtype=np.float32),
'mean': 5.*np.ones((self.LASERSCAN_NUM_PAST, self.LASERSCAN_LENGTH), dtype=np.float32)
},
'is_learning': {
'dtype': np.float32,
'size': 1,
'bounds': [0., 1.],
'attr': 'get_agent_data_equiv("policy.str", "learning")'
},
'other_agents_states_encoded': {
'dtype': np.float32,
'size': 100.,
'bounds': [0., 1.],
'attr': 'get_sensor_data("other_agents_states_encoded")'
}
}
self.setup_obs()
# self.AGENT_SORTING_METHOD = "closest_last"
self.AGENT_SORTING_METHOD = "closest_first"
# self.AGENT_SORTING_METHOD = "time_to_impact"
def setup_obs(self):
if not hasattr(self, "STATES_IN_OBS"):
self.STATES_IN_OBS = ['is_learning', 'num_other_agents', 'dist_to_goal', 'heading_ego_frame', 'pref_speed', 'radius', 'other_agents_states']
# STATES_IN_OBS = ['dist_to_goal', 'radius', 'heading_ego_frame', 'pref_speed', 'other_agent_states', 'use_ppo', 'laserscan']
# STATES_IN_OBS = ['dist_to_goal', 'radius', 'heading_ego_frame', 'pref_speed', 'other_agent_states', 'use_ppo'] # 2-agent net
# STATES_IN_OBS = ['dist_to_goal', 'radius', 'heading_ego_frame', 'pref_speed', 'other_agents_states', 'use_ppo', 'num_other_agents', 'laserscan'] # LSTM
if not hasattr(self, "STATES_NOT_USED_IN_POLICY"):
self.STATES_NOT_USED_IN_POLICY = ['is_learning']
self.MEAN_OBS = {}; self.STD_OBS = {}
for state in self.STATES_IN_OBS:
if 'mean' in self.STATE_INFO_DICT[state]:
self.MEAN_OBS[state] = self.STATE_INFO_DICT[state]['mean']
if 'std' in self.STATE_INFO_DICT[state]:
self.STD_OBS[state] = self.STATE_INFO_DICT[state]['std']
class EvaluateConfig(Config):
def __init__(self):
self.MAX_NUM_AGENTS_IN_ENVIRONMENT = master_config.MAX_NUM_AGENTS_IN_ENVIRONMENT
Config.__init__(self)
self.EVALUATE_MODE = master_config.EVALUATE_MODE
self.TRAIN_MODE = master_config.TRAIN_MODE
self.DT = master_config.DT
self.MAX_TIME_RATIO = master_config.MAX_TIME_RATIO
class Example(EvaluateConfig):
def __init__(self):
EvaluateConfig.__init__(self)
self.SAVE_EPISODE_PLOTS = master_config.SAVE_EPISODE_PLOTS
self.SHOW_EPISODE_PLOTS = master_config.SHOW_EPISODE_PLOTS
self.ANIMATE_EPISODES = master_config.ANIMATE_EPISODES
self.NEAR_GOAL_THRESHOLD = master_config.NEAR_GOAL_THRESHOLD
self.PLT_LIMITS = [[-10, 10], [-10, 10]]
self.PLT_FIG_SIZE = master_config.PLT_FIG_SIZE
self.PLOT_CIRCLES_ALONG_TRAJ = master_config.PLOT_CIRCLES_ALONG_TRAJ
self.NUM_AGENTS_TO_TEST = master_config.NUM_AGENTS_TO_TEST
self.POLICIES_TO_TEST = master_config.POLICIES_TO_TEST
self.NUM_TEST_CASES = master_config.NUM_TEST_CASES
self.MAX_NUM_AGENTS_IN_ENVIRONMENT = master_config.MAX_NUM_AGENTS_IN_ENVIRONMENT
self.MAX_NUM_OTHER_AGENTS_OBSERVED = master_config.MAX_NUM_OTHER_AGENTS_OBSERVED
self.agent_time_out = master_config.agent_time_out
class Formations(EvaluateConfig): #CADRL
def __init__(self):
EvaluateConfig.__init__(self)
self.SAVE_EPISODE_PLOTS = True
self.SHOW_EPISODE_PLOTS = False
self.ANIMATE_EPISODES = True
self.NEAR_GOAL_THRESHOLD = 0.2
self.PLT_LIMITS = [[-5, 6], [-2, 7]]
self.PLT_FIG_SIZE = (10,10)
self.PLOT_CIRCLES_ALONG_TRAJ = False
self.NUM_AGENTS_TO_TEST = [6]
self.POLICIES_TO_TEST = ['GA3C-CADRL-10']
self.NUM_TEST_CASES = 2
self.LETTERS = ['C', 'A', 'D', 'R', 'L']
############################
class Custom(EvaluateConfig):
def __init__(self):
EvaluateConfig.__init__(self)
self.SAVE_EPISODE_PLOTS = master_config.SAVE_EPISODE_PLOTS
self.SHOW_EPISODE_PLOTS = master_config.SHOW_EPISODE_PLOTS
self.ANIMATE_EPISODES = master_config.ANIMATE_EPISODES
self.NEAR_GOAL_THRESHOLD = master_config.NEAR_GOAL_THRESHOLD
self.PLT_LIMITS = master_config.PLT_LIMITS
self.PLT_FIG_SIZE = master_config.PLT_FIG_SIZE
self.PLOT_CIRCLES_ALONG_TRAJ = master_config.PLOT_CIRCLES_ALONG_TRAJ
self.NUM_AGENTS_TO_TEST = master_config.NUM_AGENTS_TO_TEST
self.POLICIES_TO_TEST = master_config.POLICIES_TO_TEST
self.NUM_TEST_CASES = master_config.NUM_TEST_CASES
self.MAX_NUM_AGENTS_IN_ENVIRONMENT = master_config.MAX_NUM_AGENTS_IN_ENVIRONMENT
self.MAX_NUM_OTHER_AGENTS_OBSERVED = master_config.MAX_NUM_OTHER_AGENTS_OBSERVED
self.agent_time_out = master_config.agent_time_out
#######################################
class SmallTestSuite(EvaluateConfig):
def __init__(self):
EvaluateConfig.__init__(self)
self.SAVE_EPISODE_PLOTS = True
self.SHOW_EPISODE_PLOTS = False
self.ANIMATE_EPISODES = False
self.PLOT_CIRCLES_ALONG_TRAJ = True
self.NUM_TEST_CASES = 4
class FullTestSuite(EvaluateConfig): #FULL_TEST
def __init__(self):
self.MAX_NUM_OTHER_AGENTS_OBSERVED = 19
EvaluateConfig.__init__(self)
self.SAVE_EPISODE_PLOTS = True
self.SHOW_EPISODE_PLOTS = False
self.ANIMATE_EPISODES = False
self.PLOT_CIRCLES_ALONG_TRAJ = True
self.NUM_TEST_CASES = 4
self.NUM_AGENTS_TO_TEST = [2,3,4]
self.RECORD_PICKLE_FILES = False
# # DRLMACA
# self.FIXED_RADIUS_AND_VPREF = True
# self.NEAR_GOAL_THRESHOLD = 0.8
# Normal
self.POLICIES_TO_TEST = [
'CADRL', 'RVO', 'GA3C-CADRL-10'
# 'GA3C-CADRL-4-WS-4-1', 'GA3C-CADRL-4-WS-4-2', 'GA3C-CADRL-4-WS-4-3', 'GA3C-CADRL-4-WS-4-4', 'GA3C-CADRL-4-WS-4-5',
# 'GA3C-CADRL-4-WS-6-1', 'GA3C-CADRL-4-WS-6-2', 'GA3C-CADRL-4-WS-6-3', 'GA3C-CADRL-4-WS-6-4',
# 'GA3C-CADRL-4-WS-8-1', 'GA3C-CADRL-4-WS-8-2', 'GA3C-CADRL-4-WS-8-3', 'GA3C-CADRL-4-WS-8-4',
# 'GA3C-CADRL-4-LSTM-1', 'GA3C-CADRL-4-LSTM-2', 'GA3C-CADRL-4-LSTM-3', 'GA3C-CADRL-4-LSTM-4', 'GA3C-CADRL-4-LSTM-5',
# 'GA3C-CADRL-10-WS-4-1', 'GA3C-CADRL-10-WS-4-2', 'GA3C-CADRL-10-WS-4-3', 'GA3C-CADRL-10-WS-4-4', 'GA3C-CADRL-10-WS-4-5',
# 'GA3C-CADRL-10-WS-6-1', 'GA3C-CADRL-10-WS-6-2', 'GA3C-CADRL-10-WS-6-3', 'GA3C-CADRL-10-WS-6-4',
# 'GA3C-CADRL-10-WS-8-1', 'GA3C-CADRL-10-WS-8-2', 'GA3C-CADRL-10-WS-8-3', 'GA3C-CADRL-10-WS-8-4',
# 'GA3C-CADRL-10-LSTM-1', 'GA3C-CADRL-10-LSTM-2', 'GA3C-CADRL-10-LSTM-3', 'GA3C-CADRL-10-LSTM-4', 'GA3C-CADRL-10-LSTM-5',
# 'CADRL', 'RVO'
]
self.FIXED_RADIUS_AND_VPREF = False
self.NEAR_GOAL_THRESHOLD = 0.2
class CollectRegressionDataset(EvaluateConfig):
def __init__(self):
self.MAX_NUM_AGENTS_IN_ENVIRONMENT = 4
self.MAX_NUM_AGENTS_TO_SIM = 4
self.DATASET_NAME = ""
# # Laserscan mode
# self.USE_STATIC_MAP = True
# self.STATES_IN_OBS = ['is_learning', 'num_other_agents', 'dist_to_goal', 'heading_ego_frame', 'pref_speed', 'radius', 'laserscan']
# self.DATASET_NAME = "laserscan_"
EvaluateConfig.__init__(self)
self.TEST_CASE_ARGS['policies'] = 'CADRL'
self.AGENT_SORTING_METHOD = "closest_first"
# # Laserscan mode
# self.TEST_CASE_ARGS['agents_sensors'] = ['laserscan', 'other_agents_states']
| 48.602524 | 165 | 0.58058 |
88d9d0bbffdaafb81a1d7b42bc0d930b8010810f | 173,519 | py | Python | windows/generated_def/meta.py | 1orenz0/PythonForWindows | f3de7b528b020b45ac6a871c975006fc1db1c3b0 | [
"BSD-3-Clause"
] | 1 | 2021-06-22T16:50:31.000Z | 2021-06-22T16:50:31.000Z | windows/generated_def/meta.py | 1orenz0/PythonForWindows | f3de7b528b020b45ac6a871c975006fc1db1c3b0 | [
"BSD-3-Clause"
] | null | null | null | windows/generated_def/meta.py | 1orenz0/PythonForWindows | f3de7b528b020b45ac6a871c975006fc1db1c3b0 | [
"BSD-3-Clause"
] | 1 | 2021-05-12T12:58:27.000Z | 2021-05-12T12:58:27.000Z | windef = set(['STATUS_SUSPEND_COUNT_EXCEEDED', 'ERROR_BAD_NET_NAME', 'CMSG_ENVELOPED', 'STANDARD_RIGHTS_WRITE', 'RRF_RT_REG_QWORD', 'ERROR_SYSTEM_SHUTDOWN', 'PROV_FORTEZZA', 'STATUS_TRANSPORT_FULL', 'STATUS_ACPI_INCORRECT_ARGUMENT_COUNT', 'DBG_REPLY_LATER', 'AF_HYLINK', 'ERROR_INVALID_VARIANT', 'CMSG_HASH_DATA_PARAM', 'CMSG_CTRL_ADD_CRL', 'IMAGE_SCN_LNK_REMOVE', 'STGM_TRANSACTED', 'STATUS_BAD_REMOTE_ADAPTER', 'STATUS_DS_SAM_INIT_FAILURE', 'STATUS_VDM_DISALLOWED', 'CONTEXT_FULL', 'RPC_NT_BAD_STUB_DATA', 'szOID_RSA_certExtensions', 'STATUS_SXS_KEY_NOT_FOUND', 'CRYPT_ASN_ENCODING', 'EXCEPTION_FLT_UNDERFLOW', 'CMSG_CMS_RECIPIENT_INDEX_PARAM', 'ERROR_BAD_NETPATH', 'OS_NT4ORGREATER', 'SYMBOLIC_LINK_ALL_ACCESS', 'STATUS_PIPE_BUSY', 'CERT_X500_NAME_STR', 'KEY_QUERY_VALUE', 'STATUS_INSUFFICIENT_RESOURCE_FOR_SPECIFIED_SHARED_SECTION_SIZE', 'CERT_HASH_PROP_ID', 'STATUS_EVENT_PENDING', 'STATUS_PENDING', 'STATUS_TRANSACTION_FREEZE_IN_PROGRESS', 'ERROR_NO_SPOOL_SPACE', 'STATUS_LOG_CORRUPTION_DETECTED', 'ERROR_CARDBUS_NOT_SUPPORTED', 'NTE_PROVIDER_DLL_FAIL', 'STATUS_NOINTERFACE', 'SYSTEM_MANDATORY_LABEL_NO_WRITE_UP', 'CERT_QUERY_CONTENT_FLAG_PKCS10', 'ANYSIZE_ARRAY', 'PRODUCT_STANDARD_EVALUATION_SERVER', 'RPC_C_QOS_CAPABILITIES_MUTUAL_AUTH', 'CLUSAPI_CHANGE_ACCESS', 'szOID_OIWSEC_desEDE', 'IMAGE_REL_BASED_DIR64', 'STATUS_MAGAZINE_NOT_PRESENT', 'STATUS_TRANSACTIONS_NOT_FROZEN', 'STATUS_SXS_COMPONENT_STORE_CORRUPT', 'STATUS_FLT_INSTANCE_NAME_COLLISION', 'OS_WIN95_GOLD', 'STATUS_NOT_SNAPSHOT_VOLUME', 'STATUS_GRAPHICS_CANT_EVICT_PINNED_ALLOCATION', 'CLAIM_SECURITY_ATTRIBUTE_TYPE_INT64', 'STATUS_TOO_MANY_SECRETS', 'IMAGE_FILE_LARGE_ADDRESS_AWARE', 'STATUS_GRAPHICS_CANT_LOCK_MEMORY', 'AF_NS', 'SDDL_REVISION_1', 'STATUS_FILES_OPEN', 'STATUS_NO_BROWSER_SERVERS_FOUND', 'STATUS_THREADPOOL_RELEASED_DURING_OPERATION', 'CONTAINER_INHERIT_ACE', 'DIGSIG_E_CRYPTO', 'ERROR_DBG_TERMINATE_THREAD', 'ERROR_EVT_FILTER_UNSUPPORTEDOP', 'PAGE_EXECUTE_READ', 'SEC_COMMIT', 'STATUS_TRANSACTED_MAPPING_UNSUPPORTED_REMOTE', 'SID_HASH_SIZE', 'STATUS_AGENTS_EXHAUSTED', 'TOKEN_SECURITY_ATTRIBUTE_TYPE_INT64', 'SE_GROUP_INTEGRITY_ENABLED', 'ERROR_BAD_UNIT', 'STATUS_GRAPHICS_INVALID_DISPLAY_ADAPTER', 'szOID_RSA_SHA384RSA', 'ERROR_PAGE_FAULT_TRANSITION', 'ERROR_HANDLE_DISK_FULL', 'CERT_PUBKEY_ALG_PARA_PROP_ID', 'ERROR_PWD_HISTORY_CONFLICT', 'CMSG_CTRL_ADD_CMS_SIGNER_INFO', 'STATUS_LOG_NOT_ENOUGH_CONTAINERS', 'RPC_NT_ZERO_DIVIDE', 'SERVICE_TRIGGER_TYPE_CUSTOM', 'STATUS_GRAPHICS_ADAPTER_ACCESS_NOT_EXCLUDED', 'STATUS_DISK_RESET_FAILED', 'CERT_TRUST_HAS_KEY_MATCH_ISSUER', 'STATUS_INTERNAL_ERROR', 'STATUS_GRAPHICS_INSUFFICIENT_DMA_BUFFER', 'szOID_OIWSEC_mdc2RSA', 'STATUS_FWP_SUBLAYER_NOT_FOUND', 'STATUS_INVALID_DOMAIN_ROLE', 'STATUS_LOG_POLICY_ALREADY_INSTALLED', 'ERROR_REGISTRY_CORRUPT', 'ERROR_PROCESS_NOT_IN_JOB', 'STATUS_HOPLIMIT_EXCEEDED', 'RPC_NT_INVALID_NET_ADDR', 'CMSG_SIGNED_AND_ENVELOPED', 'STATUS_TRANSACTION_NOT_FOUND', 'SC_MANAGER_ENUMERATE_SERVICE', 'STATUS_NO_CALLBACK_ACTIVE', 'SERVICE_STOP_PENDING', 'SERVICE_TRIGGER_TYPE_FIREWALL_PORT_EVENT', 'ERROR_CANTOPEN', 'ERROR_EVT_CONFIGURATION_ERROR', 'EXCEPTION_ARRAY_BOUNDS_EXCEEDED', 'STATUS_CANT_WAIT', 'KEY_WOW64_32KEY', 'STATUS_WMI_GUID_NOT_FOUND', 'TOKEN_SECURITY_ATTRIBUTE_USE_FOR_DENY_ONLY', 'SEMAPHORE_MODIFY_STATE', 'ERROR_RING2_STACK_IN_USE', 'STATUS_CLUSTER_NETWORK_NOT_INTERNAL', 'CERT_E_VALIDITYPERIODNESTING', 'SERVICE_CONTROL_PRESHUTDOWN', 'ERROR_DISK_FULL', 'STATUS_NO_SUCH_DOMAIN', 'STATUS_FILE_CLOSED', 'ERROR_NOTIFY_CLEANUP', 'FILE_OPEN_REQUIRING_OPLOCK', 'ERROR_RESOURCE_REQUIREMENTS_CHANGED', 'IMAGE_DLLCHARACTERISTICS_HIGH_ENTROPY_VA', 'STATUS_DATA_LATE_ERROR', 'PAGE_WRITECOPY', 'SE_ASSIGNPRIMARYTOKEN_NAME', 'STATUS_GRAPHICS_ALLOCATION_BUSY', 'PIPE_SERVER_END', 'CERT_SYSTEM_STORE_USERS_ID', 'STATUS_VIRUS_DELETED', 'RPC_NT_INVALID_ASYNC_CALL', 'STATUS_FLT_NOT_SAFE_TO_POST_OPERATION', 'CERT_TRUST_CTL_IS_NOT_VALID_FOR_USAGE', 'EXCEPTION_BREAKPOINT', 'RPC_NT_ENTRY_NOT_FOUND', 'STATUS_PASSWORD_EXPIRED', 'STATUS_DATA_OVERRUN', 'STATUS_INVALID_EA_FLAG', 'PIPE_CLIENT_END', 'EXCEPTION_POSSIBLE_DEADLOCK', 'STATUS_INVALID_TASK_INDEX', 'STATUS_CONNECTION_ABORTED', 'STATUS_WMI_READ_ONLY', 'CERT_MD5_HASH_PROP_ID', 'CMSG_DATA', 'STATUS_LOG_PINNED_RESERVATION', 'STATUS_LICENSE_VIOLATION', 'szOID_X957_SHA1DSA', 'ERROR_INVALID_AT_INTERRUPT_TIME', 'STATUS_PKINIT_NAME_MISMATCH', 'TRUST_E_EXPLICIT_DISTRUST', 'STATUS_GRAPHICS_CHILD_DESCRIPTOR_NOT_SUPPORTED', 'STATUS_NO_TRUST_LSA_SECRET', 'FILE_FLAG_POSIX_SEMANTICS', 'STATUS_PORT_DISCONNECTED', 'SERVICE_CONTROL_DEVICEEVENT', 'REPORT_NO_PRIVATE_KEY', 'STATUS_GRAPHICS_DISPLAY_DEVICE_NOT_ATTACHED_TO_DESKTOP', 'PROCESS_SET_LIMITED_INFORMATION', 'STATUS_INVALID_VOLUME_LABEL', 'RPC_RESPONSE_TYPE_BIND_OK', 'STATUS_REQUEST_OUT_OF_SEQUENCE', 'STATUS_CLUSTER_NODE_ALREADY_MEMBER', 'FAX_PORT_QUERY', 'szOID_PKCS_8', 'szOID_PKCS_9', 'CERT_SYSTEM_STORE_LOCAL_MACHINE', 'szOID_PKCS_4', 'szOID_PKCS_5', 'szOID_PKCS_6', 'szOID_PKCS_7', 'NTE_INTERNAL_ERROR', 'szOID_PKCS_1', 'szOID_PKCS_2', 'szOID_PKCS_3', 'FILE_ATTRIBUTE_REPARSE_POINT', 'STATUS_PNP_IRQ_TRANSLATION_FAILED', 'ACCESS_MAX_MS_V4_ACE_TYPE', 'CERT_SYSTEM_STORE_LOCATION_SHIFT', 'SM_CXDLGFRAME', 'DEBUG_PROCESS', 'STATUS_FVE_BAD_METADATA_POINTER', 'ACCESS_DENIED_CALLBACK_OBJECT_ACE_TYPE', 'STATUS_FATAL_APP_EXIT', 'szOID_RSA_envelopedData', 'STATUS_WX86_EXCEPTION_CHAIN', 'DEBUG_EVENT_CHANGE_DEBUGGEE_STATE', 'STATUS_DUPLICATE_OBJECTID', 'SE_LOCK_MEMORY_NAME', 'STATUS_GRAPHICS_SOURCE_NOT_IN_TOPOLOGY', 'STATUS_VERIFIER_STOP', 'ERROR_NETNAME_DELETED', 'PORT_ALL_ACCESS', 'szOID_OIWDIR_HASH', 'STATUS_CONNECTION_REFUSED', 'STATUS_FILE_IDENTITY_NOT_PERSISTENT', 'STATUS_BAD_DESCRIPTOR_FORMAT', 'STATUS_FVE_TPM_NO_VMK', 'STATUS_APP_INIT_FAILURE', 'STATUS_NOT_A_REPARSE_POINT', 'STATUS_MP_PROCESSOR_MISMATCH', 'STATUS_NDIS_DOT11_MEDIA_IN_USE', 'CERT_NAME_EMAIL_TYPE', 'SYSTEM_ALARM_CALLBACK_OBJECT_ACE_TYPE', 'STATUS_PROCESS_IS_PROTECTED', 'szOID_RSA_SMIMECapabilities', 'STATUS_GRAPHICS_INVALID_TOTAL_REGION', 'CERT_QUERY_CONTENT_FLAG_CERT_PAIR', 'LPC_DEBUG_EVENT', 'STATUS_EVENTLOG_FILE_CORRUPT', 'ERROR_TIMER_RESUME_IGNORED', 'TXFS_MINIVERSION_DEFAULT_VIEW', 'STATUS_NDIS_ERROR_READING_FILE', 'STATUS_DS_GLOBAL_CANT_HAVE_UNIVERSAL_MEMBER', 'IMAGE_SCN_MEM_WRITE', 'CERT_QUERY_CONTENT_PKCS7_SIGNED_EMBED', 'STATUS_FILE_LOCKED_WITH_WRITERS', 'SE_CREATE_PAGEFILE_NAME', 'STATUS_CONTENT_BLOCKED', 'STATUS_NOT_MAPPED_VIEW', 'STATUS_INVALID_HW_PROFILE', 'ERROR_INVALID_PASSWORD', 'ERROR_INSUFFICIENT_LOGON_INFO', 'STATUS_FAILED_DRIVER_ENTRY', 'MS_DEF_PROV_A', 'STATUS_LOG_HARD_ERROR', 'ERROR_TRANSLATION_COMPLETE', 'TH32CS_SNAPHEAPLIST', 'STATUS_FVE_OLD_METADATA_COPY', 'FILE_CREATE_PIPE_INSTANCE', 'szOID_OIWSEC_desCFB', 'IMAGE_SCN_MEM_DISCARDABLE', 'MS_DEF_PROV_W', 'STATUS_DLL_MIGHT_BE_INCOMPATIBLE', 'FILE_SUPPORTS_TRANSACTIONS', 'STATUS_UNABLE_TO_UNLOAD_MEDIA', 'STATUS_CTX_NOT_CONSOLE', 'MEM_4MB_PAGES', 'STATUS_DS_OID_MAPPED_GROUP_CANT_HAVE_MEMBERS', 'STATUS_GRAPHICS_OPM_INVALID_ENCRYPTED_PARAMETERS', 'STATUS_HANDLES_CLOSED', 'STATUS_INCOMPATIBLE_FILE_MAP', 'ERROR_RELOC_CHAIN_XEEDS_SEGLIM', 'NTE_PROV_TYPE_NO_MATCH', 'PKCS_7_NDR_ENCODING', 'STATUS_SMARTCARD_IO_ERROR', 'ERROR_IO_PENDING', 'STATUS_FWP_INCOMPATIBLE_SA_STATE', 'CERT_CHAIN_REVOCATION_CHECK_CACHE_ONLY', 'API_SET_SCHEMA_ENTRY_FLAGS_SEALED', 'STATUS_NDIS_OFFLOAD_POLICY', 'STATUS_GRAPHICS_PVP_HFS_FAILED', 'ERROR_DBG_EXCEPTION_HANDLED', 'ERROR_SIGNAL_PENDING', 'RPC_NT_INVALID_PIPE_OPERATION', 'STATUS_FWP_DUPLICATE_KEYMOD', 'STATUS_REMOTE_STORAGE_MEDIA_ERROR', 'STATUS_GRAPHICS_VIDEO_PRESENT_TARGETS_LESS_THAN_SOURCES', 'STATUS_LAST_ADMIN', 'X509_CERT_CRL_TO_BE_SIGNED', 'STATUS_SYSTEM_DEVICE_NOT_FOUND', 'EXCEPTION_INVALID_DISPOSITION', 'NTE_BUFFER_TOO_SMALL', 'STATUS_CALLBACK_RETURNED_THREAD_AFFINITY', 'IO_COMPLETION_MODIFY_STATE', 'CMSG_ENCRYPTED_DIGEST', 'CERT_FIND_SUBJECT_INFO_ACCESS', 'PRODUCT_MEDIUMBUSINESS_SERVER_SECURITY', 'STATUS_FLT_DO_NOT_DETACH', 'STATUS_CLUSTER_NO_SECURITY_CONTEXT', 'NTE_BAD_DATA', 'LPC_CLIENT_DIED', 'AT_KEYEXCHANGE', 'STATUS_NO_MEDIA_IN_DEVICE', 'OS_WIN2000DATACENTER', 'PRODUCT_ESSENTIALBUSINESS_SERVER_MGMTSVC', 'CERT_IE30_RESERVED_PROP_ID', 'STATUS_TRANSACTION_NO_RELEASE', 'STATUS_ACPI_INVALID_ARGUMENT', 'CMSG_CTRL_ENABLE_STRONG_SIGNATURE', 'SERVICE_ALL_ACCESS', 'STATUS_DEVICE_ALREADY_ATTACHED', 'STATUS_OFFLOAD_READ_FLT_NOT_SUPPORTED', 'STATUS_DATATYPE_MISALIGNMENT_ERROR', 'STATUS_FLT_CONTEXT_ALLOCATION_NOT_FOUND', 'THREAD_SUSPEND_RESUME', 'LPC_ERROR_EVENT', 'TH32CS_INHERIT', 'STATUS_SXS_PROCESS_DEFAULT_ALREADY_SET', 'IMAGE_SCN_LNK_NRELOC_OVFL', 'ERROR_WOW_ASSERTION', 'TOKEN_SECURITY_ATTRIBUTE_MANDATORY', 'SC_MANAGER_CREATE_SERVICE', 'NTE_KEYSET_ENTRY_BAD', 'szOID_RSA_DH', 'ERROR_ACPI_ERROR', 'STATUS_INVALID_MESSAGE', 'STATUS_CTX_SHADOW_INVALID', 'szOID_RSA_HASH', 'STATUS_OBJECT_NAME_EXISTS', 'STATUS_NO_LOGON_SERVERS', 'IMAGE_SCN_MEM_SHARED', 'EXCEPTION_FLT_INEXACT_RESULT', 'ERROR_TRANSACTION_NOT_REQUESTED', 'STATUS_LOG_BLOCK_INCOMPLETE', 'FILE_SHARE_READ', 'PRODUCT_CORE_COUNTRYSPECIFIC', 'ACCESS_DENIED_OBJECT_ACE_TYPE', 'RPC_NT_SS_CANNOT_GET_CALL_HANDLE', 'INHERIT_PARENT_AFFINITY', 'STATUS_LOG_POLICY_INVALID', 'PRODUCT_ENTERPRISE_SERVER', 'OS_DOMAINMEMBER', 'OBJ_KERNEL_HANDLE', 'CMSG_HASHED', 'ERROR_VOLSNAP_HIBERNATE_READY', 'ALPC_MSGFLG_SYNC_REQUEST', 'CERT_OID_NAME_STR', 'STATUS_CALLBACK_RETURNED_LDR_LOCK', 'FILE_ACTION_REMOVED', 'STATUS_EXPIRED_HANDLE', 'STATUS_GRAPHICS_SOURCE_ALREADY_IN_SET', 'EVT_VARIANT_TYPE_ARRAY', 'STATUS_GRAPHICS_OPM_THEATER_MODE_ENABLED', 'BIND_IF_SYNTAX_NDR64', 'CERT_FORTEZZA_DATA_PROP_ID', 'szOID_RSA_counterSign', 'STATUS_CTX_INVALID_MODEMNAME', 'CREATE_NEW_CONSOLE', 'STATUS_DS_CROSS_DOM_MOVE_FAILED', 'STATUS_FWP_INVALID_ACTION_TYPE', 'X509_CERT', 'STATUS_STREAM_MINIVERSION_NOT_FOUND', 'ERROR_CACHE_PAGE_LOCKED', 'STATUS_THREAD_NOT_IN_PROCESS', 'CERT_KEY_CONTEXT_PROP_ID', 'STATUS_TOO_LATE', 'STATUS_NDIS_MULTICAST_NOT_FOUND', 'ANY_SIZE', 'WTD_STATEACTION_CLOSE', 'ERROR_WRITE_PROTECT', 'CERT_FIND_PUBKEY_MD5_HASH', 'szOID_INFOSEC_SuiteAConfidentiality', 'STATUS_INVALID_LOGON_HOURS', 'ERROR_LOST_WRITEBEHIND_DATA', 'STATUS_REQUEST_ABORTED', 'CERT_SUBJECT_PUBLIC_KEY_MD5_HASH_PROP_ID', 'SM_CYKANJIWINDOW', 'STATUS_UNWIND_CONSOLIDATE', 'TOKEN_SECURITY_ATTRIBUTE_TYPE_FQBN', 'ERROR_SYSTEM_POWERSTATE_TRANSITION', 'OS_WOW6432', 'CERT_KEY_SPEC_PROP_ID', 'SEC_WRITECOMBINE', 'STATUS_GRAPHICS_OPM_OUTPUT_DOES_NOT_SUPPORT_HDCP', 'DIRECTORY_TRAVERSE', 'STATUS_USER_DELETE_TRUST_QUOTA_EXCEEDED', 'SM_CYVIRTUALSCREEN', 'CERT_TRUST_HAS_PREFERRED_ISSUER', 'SM_CYMINTRACK', 'FILE_EXISTS', 'SE_MACHINE_ACCOUNT_NAME', 'X509_KEY_USAGE_RESTRICTION', 'SE_SACL_PROTECTED', 'SM_CYMAXTRACK', 'STATUS_GRAPHICS_INVALID_MONITOR_SOURCEMODESET', 'STATUS_INVALID_LOCK_SEQUENCE', 'WTD_STATEACTION_IGNORE', 'STATUS_GRAPHICS_TRY_AGAIN_NOW', 'CERT_TRUST_IS_EXPLICIT_DISTRUST', 'STATUS_IO_REPARSE_TAG_MISMATCH', 'CERT_INFO_NOT_BEFORE_FLAG', 'API_SET_SCHEMA_FLAGS_SEALED', 'STATUS_WX86_SINGLE_STEP', 'DIGSIG_E_DECODE', 'ERROR_TOO_MANY_CMDS', 'STATUS_BAD_TOKEN_TYPE', 'OBJ_CASE_INSENSITIVE', 'STATUS_DS_INIT_FAILURE_CONSOLE', 'STATUS_REDIRECTOR_STARTED', 'STG_TOEND', 'THREAD_GET_CONTEXT', 'CERT_SYSTEM_STORE_LOCAL_MACHINE_GROUP_POLICY_ID', 'STATUS_FVE_VIRTUALIZED_SPACE_TOO_BIG', 'STATUS_DEBUG_ATTACH_FAILED', 'REG_RESOURCE_REQUIREMENTS_LIST', 'STATUS_VALIDATE_CONTINUE', 'AF_INET', 'STATUS_NO_GUID_TRANSLATION', 'STATUS_FVE_REBOOT_REQUIRED', 'szOID_RSAES_OAEP', 'ERROR_FAIL_RESTART', 'STATUS_ISSUING_CA_UNTRUSTED', 'ERROR_EA_TABLE_FULL', 'STATUS_GRAPHICS_TARGET_NOT_IN_TOPOLOGY', 'CERT_SYSTEM_STORE_SERVICES', 'STATUS_FVE_FS_NOT_EXTENDED', 'STATUS_IPSEC_CLEAR_TEXT_DROP', 'STATUS_PRIVILEGED_INSTRUCTION', 'STATUS_INVALID_BLOCK_LENGTH', 'STATUS_INVALID_PAGE_PROTECTION', 'STATUS_FVE_TPM_DISABLED', 'STATUS_NDIS_DOT11_AUTO_CONFIG_ENABLED', 'MEM_RESET', 'DIGSIG_E_EXTENSIBILITY', 'EXCEPTION_FLT_INVALID_OPERATION', 'API_SET_LOOKUP_ORDINAL', 'SERVICE_ACTIVE', 'STATUS_LOGON_SESSION_EXISTS', 'STATUS_BAD_WORKING_SET_LIMIT', 'ERROR_IMAGE_NOT_AT_BASE', 'RPC_NT_PIPE_CLOSED', 'ERROR_CURRENT_DIRECTORY', 'STATUS_INVALID_PORT_HANDLE', 'RPC_S_UNSUPPORTED_TRANS_SYN', 'STATUS_END_OF_FILE', 'ERROR_DUP_NAME', 'ERROR_TIMER_NOT_CANCELED', 'RPC_NT_NO_BINDINGS', 'STATUS_LOG_NO_RESTART', 'FILE_ATTRIBUTE_ARCHIVE', 'ERROR_INVALID_UNWIND_TARGET', 'IMAGE_SCN_ALIGN_512BYTES', 'FAX_CONFIG_SET', 'NMPWAIT_WAIT_FOREVER', 'CERT_ISSUER_PUBLIC_KEY_MD5_HASH_PROP_ID', 'STATUS_IMPLEMENTATION_LIMIT', 'ERROR_NOT_TINY_STREAM', 'ERROR_SECTOR_NOT_FOUND', 'STATUS_PIPE_CONNECTED', 'ERROR_INVALID_ACCESS', 'CERT_STORE_PROV_PKCS7', 'STATUS_UNHANDLED_EXCEPTION', 'STATUS_LOCAL_USER_SESSION_KEY', 'HKEY_USERS', 'STATUS_SECTION_NOT_EXTENDED', 'NTE_UI_REQUIRED', 'szOID_RSA_MD2', 'STATUS_IPSEC_BAD_SPI', 'szOID_RSA_MD5', 'szOID_RSA_MD4', 'OS_WEBSERVER', 'DEBUG_EVENT_EXIT_THREAD', 'SM_CYDOUBLECLK', 'STGFMT_FILE', 'CERT_STORE_UPDATE_KEYID_FLAG', 'STATUS_NOT_SUPPORTED', 'STATUS_RECEIVE_PARTIAL', 'STATUS_ENLISTMENT_NOT_SUPERIOR', 'STATUS_MONITOR_NO_DESCRIPTOR', 'CMSG_ENCRYPTED', 'CERT_COMPARE_SHA1_HASH', 'ERROR_NOT_CAPABLE', 'WTD_UI_NOBAD', 'CERT_FIND_ISSUER_STR_A', 'DEBUG_EVENT_CHANGE_SYMBOL_STATE', 'CRYPT_SF', 'szOID_INFOSEC_SuiteASignature', 'SM_CYVSCROLL', 'ERROR_USER_PROFILE_LOAD', 'STATUS_TOO_MANY_SESSIONS', 'CERT_QUERY_CONTENT_FLAG_PKCS7_SIGNED', 'STATUS_IPSEC_DOSP_MAX_PER_IP_RATELIMIT_QUEUES', 'STATUS_LOG_FILE_FULL', 'STD_INPUT_HANDLE', 'CRYPT_KEK', 'TH32CS_SNAPALL', 'ERROR_DOMAIN_CTRLR_CONFIG_ERROR', 'CMSG_ENCODED_MESSAGE', 'PORT_CONNECT', 'CLAIM_SECURITY_ATTRIBUTE_TYPE_SID', 'STATUS_BAD_SERVICE_ENTRYPOINT', 'STATUS_PARAMETER_QUOTA_EXCEEDED', 'IMAGE_FILE_MACHINE_ARM', 'CREATE_DEFAULT_ERROR_MODE', 'STATUS_TRANSACTION_NO_MATCH', 'CERT_SOURCE_LOCATION_PROP_ID', 'ERROR_SYNCHRONIZATION_REQUIRED', 'STATUS_PRIVILEGE_NOT_HELD', 'STATUS_FWP_INJECT_HANDLE_STALE', 'PRODUCT_ULTIMATE', 'ERROR_DISK_CHANGE', 'SERVICE_START_PENDING', 'STATUS_RESOURCE_DATA_NOT_FOUND', 'SERVICE_START', 'IMAGE_SCN_ALIGN_128BYTES', 'STATUS_CONVERT_TO_LARGE', 'STATUS_EVALUATION_EXPIRATION', 'SW_SHOWMAXIMIZED', 'STATUS_PAGEFILE_QUOTA', 'STATUS_WX86_CREATEWX86TIB', 'SE_PRIVILEGE_ENABLED_BY_DEFAULT', 'STATUS_GRAPHICS_OPM_INVALID_POINTER', 'ERROR_BADSTARTPOSITION', 'CERT_INFO_SERIAL_NUMBER_FLAG', 'STATUS_FWP_IN_USE', 'CERT_FIND_PUBLIC_KEY', 'RPC_NT_INCOMPLETE_NAME', 'szOID_PKCS_12', 'CERT_COMPARE_SHIFT', 'STATUS_FVE_PARTIAL_METADATA', 'PRODUCT_ENTERPRISE_SERVER_IA64', 'STATUS_WRONG_VOLUME', 'PROV_RSA_AES', 'STATUS_GRAPHICS_OPM_RESOLUTION_TOO_HIGH', 'STATUS_DEVICE_CONFIGURATION_ERROR', 'IMAGE_FILE_REMOVABLE_RUN_FROM_SWAP', 'STATUS_GRAPHICS_PVP_INVALID_CERTIFICATE_LENGTH', 'CERT_COMPARE_MASK', 'szOID_RSA_unstructAddr', 'ERROR_WAIT_3', 'ERROR_WAIT_2', 'ERROR_WAIT_1', 'ALPC_MESSAGE_TOKEN_ATTRIBUTE', 'szOID_NIST_sha256', 'COMPRESSION_ENGINE_MAXIMUM', 'STATUS_CTX_MODEM_RESPONSE_TIMEOUT', 'CERT_QUERY_OBJECT_BLOB', 'STATUS_NOT_A_DIRECTORY', 'STATUS_FLOAT_INVALID_OPERATION', 'IMAGE_DLLCHARACTERISTICS_NX_COMPAT', 'ERROR_DRIVER_CANCEL_TIMEOUT', 'PRODUCT_HOME_PREMIUM_N', 'EXCEPTION_PRIV_INSTRUCTION', 'STATUS_MFT_TOO_FRAGMENTED', 'PRODUCT_SB_SOLUTION_SERVER', 'STATUS_INVALID_PARAMETER_8', 'FILE_ACTION_RENAMED_NEW_NAME', 'STATUS_NDIS_ADAPTER_NOT_READY', 'STATUS_DISK_QUOTA_EXCEEDED', 'STATUS_PAGE_FAULT_COPY_ON_WRITE', 'DIRECTORY_QUERY', 'STATUS_FWP_CALLOUT_NOTIFICATION_FAILED', 'ERROR_DBG_PRINTEXCEPTION_C', 'STATUS_TOO_MANY_CONTEXT_IDS', 'STATUS_BUFFER_ALL_ZEROS', 'AF_IRDA', 'CERT_ARCHIVED_PROP_ID', 'DIRECTORY_ALL_ACCESS', 'GROUP_SECURITY_INFORMATION', 'RPC_C_AUTHN_LEVEL_PKT', 'PIPE_WAIT', 'STATUS_INVALID_READ_MODE', 'PRODUCT_ENTERPRISE_SERVER_V', 'MAXIMUM_ALLOWED', 'SERVICE_TRIGGER_TYPE_CUSTOM_SYSTEM_STATE_CHANGE', 'ERROR_REDIR_PAUSED', 'COMPRESSION_FORMAT_NONE', 'HIGH_PRIORITY_CLASS', 'STATUS_DOMAIN_LIMIT_EXCEEDED', 'PRODUCT_HOME_PREMIUM', 'STATUS_FLOAT_MULTIPLE_FAULTS', 'STATUS_MONITOR_INVALID_DESCRIPTOR_CHECKSUM', 'STATUS_PARTITION_FAILURE', 'STATUS_FLT_FILTER_NOT_READY', 'NORMAL_PRIORITY_CLASS', 'ERROR_FAIL_SHUTDOWN', 'STATUS_GRAPHICS_INVALID_MONITOR_FREQUENCYRANGESET', 'STATUS_DRIVER_UNABLE_TO_LOAD', 'ASYNC_MODE_COMPATIBILITY', 'X509_CERT_TO_BE_SIGNED', 'ERROR_WAS_LOCKED', 'STATUS_FVE_TPM_SRK_AUTH_NOT_ZERO', 'szOID_DSALG_SIGN', 'STATUS_FILE_IS_A_DIRECTORY', 'STATUS_CRASH_DUMP', 'STATUS_ACPI_INVALID_ARGTYPE', 'NTE_EXISTS', 'STATUS_FLT_IO_COMPLETE', 'ERROR_REPARSE', 'SERVICE_PAUSED', 'IMAGE_SIZEOF_SHORT_NAME', 'CMSG_RECIPIENT_COUNT_PARAM', 'szOID_RSA_challengePwd', 'SERVICE_ACCEPT_SESSIONCHANGE', 'STATUS_HIBERNATION_FAILURE', 'CMSG_ENCODING_TYPE_MASK', 'IMAGE_SCN_ALIGN_64BYTES', 'ERROR_EA_LIST_INCONSISTENT', 'STATUS_RXACT_COMMIT_FAILURE', 'STATUS_RANGE_LIST_CONFLICT', 'ERROR_CANT_ENABLE_DENY_ONLY', 'SE_DACL_DEFAULTED', 'STATUS_JOURNAL_ENTRY_DELETED', 'SW_RESTORE', 'ERROR_EVALUATION_EXPIRATION', 'STATUS_THREADPOOL_SET_EVENT_ON_COMPLETION_FAILED', 'CERT_TRUST_HAS_ISSUANCE_CHAIN_POLICY', 'CERT_COMPARE_PUBKEY_MD5_HASH', 'STATUS_CLUSTER_INVALID_REQUEST', 'PRODUCT_ESSENTIALBUSINESS_SERVER_ADDL', 'PRODUCT_DATACENTER_SERVER', 'STATUS_TM_IDENTITY_MISMATCH', 'STARTF_USESHOWWINDOW', 'ERROR_GUID_SUBSTITUTION_MADE', 'DEBUG_EVENT_CREATE_PROCESS', 'STATUS_LOG_METADATA_CORRUPT', 'STATUS_GRAPHICS_PATH_CONTENT_GEOMETRY_TRANSFORMATION_NOT_PINNED', 'IMAGE_SCN_ALIGN_1BYTES', 'STATUS_GRAPHICS_PVP_NO_DISPLAY_DEVICE_CORRESPONDS_TO_NAME', 'OS_TERMINALSERVER', 'ERROR_EVT_INVALID_CHANNEL_PROPERTY_VALUE', 'STATUS_GRAPHICS_VIDPN_MODALITY_NOT_SUPPORTED', 'STATUS_CLUSTER_INVALID_NETWORK_PROVIDER', 'NTE_PROV_TYPE_ENTRY_BAD', 'EXCEPTION_CONTINUE_SEARCH', 'EVENTLOG_SEEK_READ', 'REG_FULL_RESOURCE_DESCRIPTOR', 'MEM_MAPPED', 'STATUS_FWP_DUPLICATE_CONDITION', 'STATUS_FVE_KEYFILE_NOT_FOUND', 'STATUS_DRIVER_BLOCKED_CRITICAL', 'JOB_OBJECT_TERMINATE', 'STATUS_CANNOT_ACCEPT_TRANSACTED_WORK', 'STATUS_IO_TIMEOUT', 'CERT_FIND_EXISTING', 'STATUS_NO_LOG_SPACE', 'KEY_ALL_ACCESS', 'ERROR_INVALID_LEVEL', 'ERROR_THREAD_WAS_SUSPENDED', 'CERT_COMPARE_HASH', 'RRF_RT_REG_DWORD', 'STATUS_FWP_TIMEOUT', 'TRUST_E_SUBJECT_FORM_UNKNOWN', 'ERROR_EVT_PUBLISHER_METADATA_NOT_FOUND', 'STATUS_NDIS_MULTICAST_EXISTS', 'STATUS_LOG_POLICY_NOT_INSTALLED', 'RPC_C_IMP_LEVEL_IMPERSONATE', 'ERROR_INVALID_OPLOCK_PROTOCOL', 'FILE_GENERIC_READ', 'NTE_BAD_PUBLIC_KEY', 'STATUS_LINK_TIMEOUT', 'IMAGE_REL_BASED_LOW', 'STATUS_MESSAGE_LOST', 'SERVICE_CONTROL_POWEREVENT', 'SERVICE_STATE_ALL', 'STATUS_INVALID_ID_AUTHORITY', 'SERVICE_ACCEPT_PAUSE_CONTINUE', 'STATUS_GRAPHICS_START_DEFERRED', 'STATUS_INVALID_IMAGE_NOT_MZ', 'ERROR_ATOMIC_LOCKS_NOT_SUPPORTED', 'STATUS_FLOPPY_ID_MARK_NOT_FOUND', 'STATUS_DS_LOCAL_CANT_HAVE_CROSSDOMAIN_LOCAL_MEMBER', 'STATUS_HASH_NOT_SUPPORTED', 'CLAIM_SECURITY_ATTRIBUTE_TYPE_INVALID', 'IMAGE_SCN_ALIGN_256BYTES', 'STATUS_LICENSE_QUOTA_EXCEEDED', 'STATUS_UNABLE_TO_DECOMMIT_VM', 'STATUS_MEDIA_CHANGED', 'STATUS_COULD_NOT_RESIZE_LOG', 'SERVICE_TRIGGER_DATA_TYPE_LEVEL', 'STATUS_CLUSTER_NETWORK_ALREADY_OFFLINE', 'FILE_NAMED_STREAMS', 'STATUS_VOLSNAP_PREPARE_HIBERNATE', 'STATUS_FVE_OVERLAPPED_UPDATE', 'SYSTEM_AUDIT_ACE_TYPE', 'STATUS_VHD_PARENT_VHD_ACCESS_DENIED', 'RPC_C_QOS_CAPABILITIES_DEFAULT', 'SE_BACKUP_NAME', 'IMAGE_FILE_BYTES_REVERSED_HI', 'STATUS_CTX_INVALID_PD', 'STATUS_RESUME_HIBERNATION', 'STATUS_GRAPHICS_FREQUENCYRANGE_NOT_IN_SET', 'FILE_SUPPORTS_SPARSE_FILES', 'ERROR_MEDIA_CHECK', 'ERROR_ELEVATION_REQUIRED', 'STATUS_DS_LOCAL_MEMBER_OF_LOCAL_ONLY', 'ERROR_EVT_FILTER_UNEXPECTEDTOKEN', 'STATUS_LPC_REQUESTS_NOT_ALLOWED', 'IMAGE_DIRECTORY_ENTRY_BASERELOC', 'STATUS_CS_ENCRYPTION_NEW_ENCRYPTED_FILE', 'ERROR_PNP_BAD_MPS_TABLE', 'STATUS_NO_DATA_DETECTED', 'CRYPT_ACQUIRE_SILENT_FLAG', 'CERT_COMPARE_ISSUER_OF', 'szOID_OIWSEC_desMAC', 'STATUS_REMOTE_SESSION_LIMIT', 'STATUS_WAS_LOCKED', 'CRYPT_ACQUIRE_NO_HEALING', 'SE_AUDIT_NAME', 'STATUS_SERVER_UNAVAILABLE', 'ERROR_EVT_INVALID_QUERY', 'STATUS_NTLM_BLOCKED', 'STATUS_FLT_INVALID_CONTEXT_REGISTRATION', 'STATUS_THREAD_WAS_SUSPENDED', 'GENERIC_ALL', 'THREAD_DIRECT_IMPERSONATION', 'CERT_OCSP_RESPONSE_PROP_ID', 'RPC_NT_ENUM_VALUE_OUT_OF_RANGE', 'CERT_E_MALFORMED', 'WTD_UI_ALL', 'STATUS_IO_REISSUE_AS_CACHED', 'RPC_NT_INVALID_OBJECT', 'STATUS_DUPLICATE_NAME', 'DEBUG_EVENT_SYSTEM_ERROR', 'STATUS_DLL_INIT_FAILED', 'CERT_SYSTEM_STORE_CURRENT_SERVICE', 'ERROR_STOPPED_ON_SYMLINK', 'RPC_NT_ENTRY_ALREADY_EXISTS', 'PRODUCT_SERVER_FOR_SB_SOLUTIONS_EM', 'CERT_STORE_PROV_SYSTEM_REGISTRY_A', 'STATUS_PROCESS_NOT_IN_JOB', 'ERROR_DLL_INIT_FAILED_LOGOFF', 'STATUS_VIDEO_HUNG_DISPLAY_DRIVER_THREAD_RECOVERED', 'IMAGE_FILE_MACHINE_ARMNT', 'MEM_PRIVATE', 'STATUS_NO_MATCH', 'szOID_RSA_extCertAttrs', 'PRODUCT_STANDARD_SERVER_CORE', 'szOID_ECC_CURVE_P521', 'SE_PRIVILEGE_REMOVED', 'STATUS_PROFILING_NOT_STOPPED', 'STATUS_NOLOGON_SERVER_TRUST_ACCOUNT', 'STATUS_IPSEC_WRONG_SA', 'PRODUCT_MULTIPOINT_PREMIUM_SERVER', 'STATUS_DS_ATTRIBUTE_OR_VALUE_EXISTS', 'STATUS_LOG_ARCHIVE_IN_PROGRESS', 'STATUS_NOT_MAPPED_DATA', 'WTD_CHOICE_FILE', 'IMAGE_DIRECTORY_ENTRY_TLS', 'STATUS_PATH_NOT_COVERED', 'CMSG_SIGNER_COUNT_PARAM', 'ERROR_TOO_MANY_SESS', 'OS_XPORGREATER', 'CERT_INFO_EXTENSION_FLAG', 'SM_CXDOUBLECLK', 'STATUS_WORKING_SET_QUOTA', 'SEC_RESERVE', 'ALPC_PORFLG_ALLOW_LPC_REQUESTS', 'STATUS_NOTHING_TO_TERMINATE', 'STG_LAYOUT_SEQUENTIAL', 'STATUS_FWP_INCOMPATIBLE_CIPHER_CONFIG', 'STATUS_INVALID_LOCK_RANGE', 'SERVICE_ACCEPT_HARDWAREPROFILECHANGE', 'NTE_BAD_KEYSET', 'ERROR_EVT_FILTER_INVTYPE', 'CERT_STORE_ADD_REPLACE_EXISTING', 'CMSG_COMPUTED_HASH_PARAM', 'ERROR_TRANSACTION_REQUEST_NOT_VALID', 'AF_UNSPEC', 'CMSG_SIGNER_UNAUTH_ATTR_PARAM', 'ATTRIBUTE_SECURITY_INFORMATION', 'ALPC_HANDLEFLG_DUPLICATE_SAME_ACCESS', 'ERROR_NOT_SAME_DEVICE', 'ERROR_EVT_PUBLISHER_DISABLED', 'ERROR_REQ_NOT_ACCEP', 'STATUS_ALREADY_COMMITTED', 'USAGE_MATCH_TYPE_AND', 'SE_ENABLE_DELEGATION_NAME', 'STATUS_NOT_REGISTRY_FILE', 'DRIVE_FIXED', 'SECTION_QUERY', 'SERVICE_TRIGGER_DATA_TYPE_BINARY', 'ERROR_BUFFER_ALL_ZEROS', 'STATUS_NDIS_INVALID_PACKET', 'ERROR_INSUFFICIENT_RESOURCE_FOR_SPECIFIED_SHARED_SECTION_SIZE', 'STARTF_USESTDHANDLES', 'PROV_SPYRUS_LYNKS', 'STATUS_CLUSTER_NO_NET_ADAPTERS', 'STATUS_GRAPHICS_FREQUENCYRANGE_ALREADY_IN_SET', 'AF_DECnet', 'STATUS_IO_PRIVILEGE_FAILED', 'PROCESS_CREATE_PROCESS', 'RPC_NT_OBJECT_NOT_FOUND', 'CERT_DATE_STAMP_PROP_ID', 'ERROR_EVT_FILTER_PARSEERR', 'szOID_RSA_unstructName', 'ACCESS_MIN_MS_ACE_TYPE', 'STATUS_DS_ATTRIBUTE_TYPE_UNDEFINED', 'STATUS_UNSUCCESSFUL', 'STATUS_ILLEGAL_FUNCTION', 'STATUS_FVE_CONV_WRITE_ERROR', 'SM_MENUDROPALIGNMENT', 'CRYPT_E_INVALID_MSG_TYPE', 'SE_IMPERSONATE_NAME', 'RRF_RT_REG_SZ', 'STATUS_FLT_INTERNAL_ERROR', 'STATUS_OBJECT_NAME_COLLISION', 'STATUS_DS_RIDMGR_INIT_ERROR', 'ERROR_BAD_FUNCTION_TABLE', 'SEC_IMAGE', 'STATUS_FT_READ_RECOVERY_FROM_BACKUP', 'STATUS_SERIAL_MORE_WRITES', 'SEC_PROTECTED_IMAGE', 'SERVICE_RUNS_IN_SYSTEM_PROCESS', 'STATUS_SYSTEM_SHUTDOWN', 'STATUS_LOGON_SESSION_COLLISION', 'CMSG_LENGTH_ONLY_FLAG', 'ACCESS_MAX_MS_V2_ACE_TYPE', 'ERROR_INVALID_HW_PROFILE', 'IMAGE_FILE_MACHINE_M32R', 'PRODUCT_WEB_SERVER', 'STATUS_SYSTEM_HIVE_TOO_LARGE', 'STATUS_GRAPHICS_GAMMA_RAMP_NOT_SUPPORTED', 'IMAGE_SCN_MEM_NOT_PAGED', 'STATUS_INVALID_CONNECTION', 'ERROR_PREDEFINED_HANDLE', 'CALLBACK_MODIFY_STATE', 'SE_GROUP_INTEGRITY', 'ERROR_BAD_SERVICE_ENTRYPOINT', 'STATUS_KEY_HAS_CHILDREN', 'ERROR_OPLOCK_SWITCHED_TO_NEW_HANDLE', 'ERROR_DBG_UNABLE_TO_PROVIDE_HANDLE', 'CERT_COMPARE_SUBJECT_INFO_ACCESS', 'IMAGE_SCN_ALIGN_16BYTES', 'CERT_COMPARE_NAME_STR_A', 'STATUS_INVALID_LEVEL', 'ERROR_INVALID_EXE_SIGNATURE', 'STATUS_IP_ADDRESS_CONFLICT1', 'ERROR_TOO_MANY_OPEN_FILES', 'CERT_E_INVALID_POLICY', 'STATUS_SXS_MANIFEST_PARSE_ERROR', 'ERROR_BROKEN_PIPE', 'CREATE_IGNORE_SYSTEM_DEFAULT', 'CERT_TRUST_INVALID_POLICY_CONSTRAINTS', 'STATUS_GRACEFUL_DISCONNECT', 'EVENTLOG_AUDIT_SUCCESS', 'CERT_COMPARE_NAME_STR_W', 'FILE_ACTION_MODIFIED', 'ERROR_NETWORK_BUSY', 'STATUS_BAD_MASTER_BOOT_RECORD', 'IMAGE_DLLCHARACTERISTICS_APPCONTAINER', 'STATUS_NO_SUCH_PRIVILEGE', 'ERROR_SEM_USER_LIMIT', 'STATUS_BAD_INHERITANCE_ACL', 'szOID_INFOSEC_mosaicKMandSig', 'STATUS_SMARTCARD_NO_CARD', 'ERROR_SYSTEM_POWERSTATE_COMPLEX_TRANSITION', 'STATUS_INVALID_PARAMETER_10', 'FILE_FLAG_OPEN_NO_RECALL', 'SM_MOUSEHORIZONTALWHEELPRESENT', 'FILE_NO_INTERMEDIATE_BUFFERING', 'ERROR_PIPE_LOCAL', 'X509_NAME_VALUE', 'ERROR_FILE_SYSTEM_LIMITATION', 'STATUS_OFFLOAD_WRITE_FILE_NOT_SUPPORTED', 'CERT_STORE_PROV_MEMORY', 'STATUS_INVALID_USER_BUFFER', 'TOKEN_MANDATORY_POLICY_OFF', 'STATUS_NONCONTINUABLE_EXCEPTION', 'STATUS_ACCESS_DISABLED_NO_SAFER_UI_BY_POLICY', 'STATUS_MAPPED_ALIGNMENT', 'STATUS_DEVICE_OFF_LINE', 'STATUS_IMAGE_MACHINE_TYPE_MISMATCH_EXE', 'STATUS_RECOVERY_FAILURE', 'IMAGE_SCN_TYPE_NOLOAD', 'ALPC_PORFLG_SYSTEM_PROCESS', 'szOID_OIWSEC_keyHashSeal', 'STATUS_FWP_INCOMPATIBLE_TXN', 'ERROR_PAGE_FAULT_COPY_ON_WRITE', 'STATUS_GRAPHICS_NO_RECOMMENDED_VIDPN_TOPOLOGY', 'STATUS_SOURCE_ELEMENT_EMPTY', 'ERROR_EXE_MACHINE_TYPE_MISMATCH', 'LABEL_SECURITY_INFORMATION', 'STATUS_OPERATION_NOT_SUPPORTED_IN_TRANSACTION', 'STATUS_REPARSE_OBJECT', 'RPC_C_IMP_LEVEL_ANONYMOUS', 'STATUS_CTX_SHADOW_DENIED', 'ERROR_INVALID_DRIVE', 'STATUS_ILL_FORMED_SERVICE_ENTRY', 'STATUS_ADDRESS_ALREADY_EXISTS', 'STATUS_FWP_INVALID_ENUMERATOR', 'SE_GROUP_MANDATORY', 'RPC_NT_NO_MORE_BINDINGS', 'CLAIM_SECURITY_ATTRIBUTE_TYPE_BOOLEAN', 'CERT_KEY_PROV_HANDLE_PROP_ID', 'STATUS_GRAPHICS_CANT_ACCESS_ACTIVE_VIDPN', 'IMAGE_SCN_MEM_FARDATA', 'TOKEN_ALL_ACCESS_P', 'STATUS_INVALID_GROUP_ATTRIBUTES', 'STATUS_GRAPHICS_I2C_DEVICE_DOES_NOT_EXIST', 'STATUS_SECTION_PROTECTION', 'GENERIC_READ', 'STATUS_WMI_TRY_AGAIN', 'szOID_RSA_RC5_CBCPad', 'IO_COMPLETION_ALL_ACCESS', 'STATUS_CTX_CLIENT_LICENSE_IN_USE', 'STATUS_WAIT_63', 'DRIVE_UNKNOWN', 'STATUS_MUI_FILE_NOT_FOUND', 'CMSG_CMS_SIGNER_INFO_PARAM', 'ERROR_MAX_SESSIONS_REACHED', 'SM_DIGITIZER', 'RPC_NT_INTERFACE_NOT_FOUND', 'CMSG_TYPE_PARAM', 'SE_INC_BASE_PRIORITY_NAME', 'MS_STRONG_PROV', 'JOB_OBJECT_ALL_ACCESS', 'SEC_NOCACHE', 'STATUS_DS_NO_NEST_LOCALGROUP_IN_MIXEDDOMAIN', 'STATUS_FWP_CONTEXT_INCOMPATIBLE_WITH_LAYER', 'ERROR_IP_ADDRESS_CONFLICT2', 'ERROR_IP_ADDRESS_CONFLICT1', 'STATUS_ASSERTION_FAILURE', 'REG_QWORD', 'RPC_NT_INVALID_NETWORK_OPTIONS', 'ERROR_USER_APC', 'SM_CXMIN', 'CMSG_VERSION_PARAM', 'ERROR_CANNOT_MAKE', 'EVENTLOG_INFORMATION_TYPE', 'STATUS_SXS_ASSEMBLY_MISSING', 'STATUS_NO_MORE_ENTRIES', 'STATUS_FWP_PROVIDER_NOT_FOUND', 'STATUS_SXS_IDENTITIES_DIFFERENT', 'RPC_NT_INVALID_ASYNC_HANDLE', 'FILE_OPEN_IF', 'STATUS_ORDINAL_NOT_FOUND', 'STATUS_INVALID_FILE_FOR_SECTION', 'CREATE_BREAKAWAY_FROM_JOB', 'STATUS_PORT_UNREACHABLE', 'IMAGE_SCN_CNT_INITIALIZED_DATA', 'STATUS_VOLUME_MOUNTED', 'STATUS_JOURNAL_DELETE_IN_PROGRESS', 'PRODUCT_ENTERPRISE', 'STATUS_GRAPHICS_INVALID_CLIENT_TYPE', 'STATUS_GRAPHICS_DDCCI_MONITOR_RETURNED_INVALID_TIMING_STATUS_BYTE', 'STATUS_INVALID_DOMAIN_STATE', 'STATUS_INSUFFICIENT_RESOURCES', 'IMAGE_DLLCHARACTERISTICS_NO_ISOLATION', 'STATUS_PORT_ALREADY_SET', 'STATUS_SXS_CORRUPTION', 'STATUS_DATA_NOT_ACCEPTED', 'X509_EXTENSIONS', 'RPC_C_AUTHN_LEVEL_NONE', 'STATUS_SERIAL_COUNTER_TIMEOUT', 'STATUS_ABANDONED', 'STATUS_UNDEFINED_CHARACTER', 'ERROR_ASSERTION_FAILURE', 'STATUS_GRAPHICS_INVALID_STRIDE', 'STATUS_INVALID_CID', 'STATUS_NDIS_ALREADY_MAPPED', 'SM_CXMINSPACING', 'STATUS_CANNOT_EXECUTE_FILE_IN_TRANSACTION', 'STATUS_LOST_WRITEBEHIND_DATA_NETWORK_SERVER_ERROR', 'szOID_INFOSEC_sdnsIntegrity', 'CERT_NAME_FRIENDLY_DISPLAY_TYPE', 'SM_CXFOCUSBORDER', 'STATUS_CLUSTER_NODE_UNREACHABLE', 'STATUS_GRAPHICS_OPM_INVALID_CONFIGURATION_REQUEST', 'ERROR_REM_NOT_LIST', 'STATUS_FILE_INVALID', 'STATUS_TRANSACTION_SUPERIOR_EXISTS', 'STATUS_WAIT_0', 'SM_CXSMICON', 'STATUS_ONLY_IF_CONNECTED', 'STATUS_FLT_BUFFER_TOO_SMALL', 'STATUS_SXS_SYSTEM_DEFAULT_ACTIVATION_CONTEXT_EMPTY', 'STATUS_ISSUING_CA_UNTRUSTED_KDC', 'ERROR_NO_ACE_CONDITION', 'ERROR_LOST_WRITEBEHIND_DATA_NETWORK_DISCONNECTED', 'STATUS_TRANSACTIONMANAGER_NOT_FOUND', 'STATUS_ALL_USER_TRUST_QUOTA_EXCEEDED', 'STATUS_CANT_OPEN_MINIVERSION_WITH_MODIFY_INTENT', 'STGM_CREATE', 'CERT_SHA1_HASH_PROP_ID', 'STATUS_REPLY_MESSAGE_MISMATCH', 'STATUS_GRAPHICS_INVALID_DRIVER_MODEL', 'STATUS_GRAPHICS_NO_DISPLAY_DEVICE_CORRESPONDS_TO_NAME', 'STATUS_GRAPHICS_OPM_INVALID_HANDLE', 'CERT_STORE_PROV_SYSTEM_REGISTRY', 'FILE_NO_EA_KNOWLEDGE', 'CRYPT_USER_KEYSET', 'STATUS_FWP_NO_TXN_IN_PROGRESS', 'STATUS_EFS_ALG_BLOB_TOO_BIG', 'CMSG_BARE_CONTENT_FLAG', 'ERROR_SEGMENT_NOTIFICATION', 'szOID_RSA_preferSignedData', 'ERROR_SAME_DRIVE', 'ERROR_NOACCESS', 'FILE_SUPPORTS_ENCRYPTION', 'ERROR_EVT_CHANNEL_NOT_FOUND', 'SE_INC_WORKING_SET_NAME', 'KEY_EXECUTE', 'EXCEPTION_INVALID_HANDLE', 'RPC_NT_UNKNOWN_AUTHN_LEVEL', 'MS_DEF_RSA_SIG_PROV', 'NTE_BAD_ALGID', 'RPC_S_UNKNOWN_IF', 'STATUS_FWP_INCOMPATIBLE_AUTH_CONFIG', 'ERROR_OBJECT_NAME_EXISTS', 'THREAD_TERMINATE', 'STATUS_NOT_TINY_STREAM', 'SW_SHOWNA', 'DRIVE_REMOTE', 'STATUS_FS_DRIVER_REQUIRED', 'szOID_OIWSEC_md4RSA', 'CERT_CHAIN_REVOCATION_CHECK_END_CERT', 'S_OK', 'ERROR_NO_MORE_MATCHES', 'IMAGE_FILE_MACHINE_MIPS16', 'SYSTEM_MANDATORY_LABEL_NO_EXECUTE_UP', 'OS_WIN2000PRO', 'STATUS_GRAPHICS_STALE_VIDPN_TOPOLOGY', 'FILE_ADD_FILE', 'STATUS_FLT_NO_HANDLER_DEFINED', 'EXCEPTION_FLT_STACK_CHECK', 'CMSG_BARE_CONTENT_PARAM', 'FILE_NOTIFY_CHANGE_SECURITY', 'STATUS_SERVER_DISABLED', 'ERROR_PROFILING_NOT_STOPPED', 'SERVICE_DEMAND_START', 'STATUS_GRAPHICS_PATH_CONTENT_GEOMETRY_TRANSFORMATION_NOT_SUPPORTED', 'RPC_NT_SS_HANDLES_MISMATCH', 'STATUS_GRAPHICS_CANT_RENDER_LOCKED_ALLOCATION', 'CONTROL_C_EXIT', 'STATUS_REGISTRY_CORRUPT', 'MS_DEF_RSA_SIG_PROV_A', 'STATUS_NDIS_UNSUPPORTED_REVISION', 'CERT_ENCODING_TYPE_MASK', 'STATUS_PNP_TRANSLATION_FAILED', 'STATUS_RESOURCEMANAGER_READ_ONLY', 'SE_RELABEL_NAME', 'ERROR_QUOTA_LIST_INCONSISTENT', 'STATUS_GRAPHICS_NO_MONITORS_CORRESPOND_TO_DISPLAY_DEVICE', 'ERROR_STACK_OVERFLOW', 'szOID_ECC_PUBLIC_KEY', 'ERROR_SEM_TIMEOUT', 'CMSG_UNPROTECTED_ATTR_PARAM', 'FILE_SUPPORTS_OBJECT_IDS', 'STATUS_INVALID_QUOTA_LOWER', 'STATUS_NDIS_RESET_IN_PROGRESS', 'EVENTLOG_SEQUENTIAL_READ', 'IMAGE_SCN_CNT_CODE', 'CERT_NAME_DISABLE_IE4_UTF8_FLAG', 'STATUS_GRAPHICS_INVALID_MONITORDESCRIPTOR', 'ABOVE_NORMAL_PRIORITY_CLASS', 'PROV_DSS', 'STATUS_CLUSTER_NODE_ALREADY_DOWN', 'ERROR_INVALID_FLAG_NUMBER', 'CERT_AUTO_ENROLL_RETRY_PROP_ID', 'IMAGE_SCN_GPREL', 'szOID_RFC3161_counterSign', 'ERROR_FILE_INVALID', 'STATUS_RM_METADATA_CORRUPT', 'ERROR_OPLOCK_NOT_GRANTED', 'ERROR_INVALID_SEGDPL', 'PRODUCT_DATACENTER_SERVER_CORE_V', 'ERROR_ABIOS_ERROR', 'RPC_NT_BYTE_COUNT_TOO_SMALL', 'RPC_RESPONSE_TYPE_SUCCESS', 'STATUS_RXACT_COMMIT_NECESSARY', 'X509_KEY_ATTRIBUTES', 'OS_ANYSERVER', 'STATUS_TRANSACTION_NOT_ENLISTED', 'WTD_UI_NONE', 'STATUS_GRAPHICS_ALLOCATION_CLOSED', 'FILE_ATTRIBUTE_SYSTEM', 'X509_OCTET_STRING', 'STATUS_SPARSE_NOT_ALLOWED_IN_TRANSACTION', 'FILE_WRITE_ATTRIBUTES', 'ERROR_NOINTERFACE', 'ERROR_INVALID_ORDINAL', 'SM_CYSMICON', 'PRODUCT_STORAGE_EXPRESS_SERVER', 'IMAGE_DIRECTORY_ENTRY_SECURITY', 'STATUS_INVALID_SID', 'STATUS_FLOAT_STACK_CHECK', 'STATUS_FLT_INVALID_ASYNCHRONOUS_REQUEST', 'IMAGE_SCN_MEM_READ', 'MS_ENH_RSA_AES_PROV_A', 'STATUS_FLT_VOLUME_NOT_FOUND', 'ERROR_MAX_THRDS_REACHED', 'SERVICE_CONTROL_NETBINDADD', 'X509_ANY_STRING', 'ERROR_EAS_NOT_SUPPORTED', 'STATUS_GRAPHICS_INVALID_FREQUENCY', 'CONTEXT_CONTROL', 'szOID_RSA_SHA256RSA', 'PAGE_EXECUTE_WRITECOPY', 'SM_CXSCREEN', 'CRYPT_E_NOT_DECRYPTED', 'STATUS_LOST_WRITEBEHIND_DATA_NETWORK_DISCONNECTED', 'ERROR_BACKUP_CONTROLLER', 'STATUS_GRAPHICS_OPM_NOT_SUPPORTED', 'STATUS_DRIVER_BLOCKED', 'ERROR_REGISTRY_RECOVERED', 'SECURITY_MANDATORY_SYSTEM_RID', 'CREATE_SEPARATE_WOW_VDM', 'STATUS_NDIS_DEVICE_FAILED', 'ERROR_ENVVAR_NOT_FOUND', 'ERROR_INVALID_FLAGS', 'DBG_PRINTEXCEPTION_C', 'STATUS_FVE_AUTH_INVALID_CONFIG', 'STARTF_FORCEOFFFEEDBACK', 'MS_ENH_RSA_AES_PROV_XP_A', 'SERVICE_INTERACTIVE_PROCESS', 'STATUS_GRAPHICS_MULTISAMPLING_NOT_SUPPORTED', 'OS_NT', 'CREATE_THREAD_DEBUG_EVENT', 'ERROR_EVENT_PENDING', 'STATUS_INVALID_EA_NAME', 'ERROR_TRANSACTION_ALREADY_COMMITTED', 'CERT_QUERY_CONTENT_FLAG_PKCS7_UNSIGNED', 'ERROR_TOO_MANY_MODULES', 'ERROR_FORMS_AUTH_REQUIRED', 'PRODUCT_STANDARD_SERVER_V', 'MS_ENH_RSA_AES_PROV_XP_W', 'STATUS_FLOPPY_UNKNOWN_ERROR', 'STATUS_PROCESS_IS_TERMINATING', 'ERROR_CALL_NOT_IMPLEMENTED', 'IMAGE_FILE_MACHINE_MIPSFPU', 'STATUS_TIMER_RESUME_IGNORED', 'STATUS_TRANSACTION_ABORTED', 'STATUS_INVALID_DISPOSITION', 'MS_ENHANCED_PROV_A', 'SM_ARRANGE', 'SW_SHOWNORMAL', 'CRYPT_ENCODE_DECODE_NONE', 'MS_ENHANCED_PROV_W', 'PROV_DSS_DH', 'STATUS_INTEGER_OVERFLOW', 'CMSG_CERT_PARAM', 'ACCESS_ALLOWED_CALLBACK_OBJECT_ACE_TYPE', 'STATUS_ILLEGAL_CHARACTER', 'UNPROTECTED_SACL_SECURITY_INFORMATION', 'CRYPT_DEFAULT_CONTAINER_OPTIONAL', 'ERROR_DBG_CONTROL_BREAK', 'STATUS_IPSEC_INTEGRITY_CHECK_FAILED', 'szOID_DSALG_RSA', 'RPC_NT_INVALID_BOUND', 'STATUS_SXS_IDENTITY_DUPLICATE_ATTRIBUTE', 'ERROR_ABANDON_HIBERFILE', 'STATUS_LOCK_NOT_GRANTED', 'ERROR_FLOPPY_VOLUME', 'AF_UNKNOWN1', 'ERROR_WAIT_FOR_OPLOCK', 'SERVICE_CONTROL_NETBINDENABLE', 'RPC_NT_NO_INTERFACES', 'STATUS_GRAPHICS_UNSWIZZLING_APERTURE_UNAVAILABLE', 'szOID_RSA_signEnvData', 'STATUS_NO_SUCH_USER', 'SECTION_MAP_READ', 'STATUS_UNEXPECTED_IO_ERROR', 'STATUS_WRONG_PASSWORD_CORE', 'SECTION_MAP_EXECUTE', 'STATUS_CTX_CONSOLE_CONNECT', 'EVENT_ALL_ACCESS', 'STATUS_WRONG_EFS', 'STATUS_WMI_INSTANCE_NOT_FOUND', 'CERT_FIND_SUBJECT_ATTR', 'TOKEN_SECURITY_ATTRIBUTE_TYPE_UINT64', 'CERT_QUERY_OBJECT_FILE', 'STATUS_RESOURCE_LANG_NOT_FOUND', 'SCOPE_SECURITY_INFORMATION', 'STATUS_FLT_DUPLICATE_ENTRY', 'IMAGE_FILE_AGGRESIVE_WS_TRIM', 'STATUS_TRANSACTION_INVALID_MARSHALL_BUFFER', 'STATUS_TOO_MANY_GUIDS_REQUESTED', 'STATUS_GRAPHICS_VIDPN_TOPOLOGY_NOT_SUPPORTED', 'ERROR_DIR_NOT_EMPTY', 'STATUS_MUST_BE_KDC', 'szOID_INFOSEC_mosaicConfidentiality', 'STATUS_LONGJUMP', 'STATUS_NDIS_BAD_VERSION', 'STATUS_TRANSACTION_RECORD_TOO_LONG', 'ERROR_NO_CALLBACK_ACTIVE', 'ERROR_GEN_FAILURE', 'CERT_QUERY_FORMAT_FLAG_BASE64_ENCODED', 'SE_SACL_PRESENT', 'STATUS_ACPI_FATAL', 'CRYPT_E_UNEXPECTED_ENCODING', 'CERT_STORE_PROV_SYSTEM_A', 'STATUS_CTX_TD_ERROR', 'STATUS_DS_VERSION_CHECK_FAILURE', 'STATUS_IN_PAGE_ERROR', 'NTE_BAD_KEY', 'ERROR_CALLBACK_POP_STACK', 'ERROR_INTERRUPT_VECTOR_ALREADY_CONNECTED', 'CERT_TRUST_IS_NOT_TIME_VALID', 'SERVICE_ACCEPT_STOP', 'CMSG_VERIFY_SIGNER_CERT', 'CERT_STORE_PROV_SYSTEM_W', 'STANDARD_RIGHTS_REQUIRED', 'RPC_NT_INVALID_BINDING', 'ERROR_EVT_QUERY_RESULT_STALE', 'ERROR_UNEXPECTED_MM_MAP_ERROR', 'STATUS_NOT_SERVER_SESSION', 'STATUS_INVALID_SUB_AUTHORITY', 'STATUS_ACPI_ADDRESS_NOT_MAPPED', 'SERVICE_RECOGNIZER_DRIVER', 'ERROR_EVT_EVENT_TEMPLATE_NOT_FOUND', 'STATUS_TRANSACTION_ALREADY_ABORTED', 'RRF_RT_ANY', 'CERT_INFO_VERSION_FLAG', 'CERT_TRUST_IS_COMPLEX_CHAIN', 'CRYPT_USER_PROTECTED', 'STATUS_ENCRYPTION_FAILED', 'ERROR_FS_DRIVER_REQUIRED', 'FILE_CASE_SENSITIVE_SEARCH', 'STATUS_DS_DOMAIN_RENAME_IN_PROGRESS', 'STATUS_INVALID_TASK_NAME', 'STATUS_ACPI_MUTEX_NOT_OWNER', 'TOKEN_SECURITY_ATTRIBUTE_CUSTOM_FLAGS', 'STATUS_NOT_SAME_DEVICE', 'NTE_SYS_ERR', 'STATUS_LPC_RECEIVE_BUFFER_EXPECTED', 'STGM_PRIORITY', 'STATUS_NDIS_PM_WOL_PATTERN_LIST_FULL', 'SE_CREATE_GLOBAL_NAME', 'STATUS_CRM_PROTOCOL_ALREADY_EXISTS', 'STATUS_CONFLICTING_ADDRESSES', 'SW_SHOWMINIMIZED', 'STATUS_RECOVERY_NOT_NEEDED', 'STATUS_DS_HAVE_PRIMARY_MEMBERS', 'STATUS_POSSIBLE_DEADLOCK', 'ERROR_DRIVER_FAILED_PRIOR_UNLOAD', 'PROCESS_QUERY_LIMITED_INFORMATION', 'STGM_DIRECT_SWMR', 'STATUS_BAD_CLUSTERS', 'RPC_NT_INVALID_PIPE_OBJECT', 'STATUS_WX86_FLOAT_STACK_CHECK', 'STATUS_INSUFFICIENT_POWER', 'SM_CYCAPTION', 'CMSG_CMS_RECIPIENT_INFO_PARAM', 'CERT_INFO_ISSUER_UNIQUE_ID_FLAG', 'szOID_ECDSA_SHA384', 'szOID_RSA_SMIMEalgCMSRC2wrap', 'SE_CREATE_TOKEN_NAME', 'DRIVE_NO_ROOT_DIR', 'MS_ENHANCED_PROV', 'CERT_SUBJECT_NAME_MD5_HASH_PROP_ID', 'STATUS_FVE_KEYFILE_INVALID', 'TOKEN_MANDATORY_POLICY_NEW_PROCESS_MIN', 'STATUS_GRAPHICS_ADAPTER_MUST_HAVE_AT_LEAST_ONE_SOURCE', 'szOID_NIST_AES192_CBC', 'STATUS_NDIS_OFFLOAD_CONNECTION_REJECTED', 'CMSG_CTRL_VERIFY_SIGNATURE_EX', 'ERROR_OUT_OF_STRUCTURES', 'CMSG_RECIPIENT_INDEX_PARAM', 'IMAGE_FILE_MACHINE_R4000', 'STATUS_CANT_OPEN_ANONYMOUS', 'RPC_NT_INVALID_STRING_BINDING', 'STATUS_TXF_METADATA_ALREADY_PRESENT', 'COMPRESSION_ENGINE_STANDARD', 'STATUS_FWP_NOT_FOUND', 'MUTANT_QUERY_STATE', 'STATUS_WRONG_COMPARTMENT', 'STATUS_CANT_RECOVER_WITH_HANDLE_OPEN', 'STATUS_PKU2U_CERT_FAILURE', 'SM_CLEANBOOT', 'ERROR_WAIT_63', 'STATUS_GRAPHICS_NO_PREFERRED_MODE', 'IMAGE_SCN_ALIGN_1024BYTES', 'SE_SELF_RELATIVE', 'CRYPT_E_MSG_ERROR', 'ERROR_CANCEL_VIOLATION', 'ERROR_READ_FAULT', 'ERROR_EXTRANEOUS_INFORMATION', 'CERT_QUERY_FORMAT_ASN_ASCII_HEX_ENCODED', 'CERT_TRUST_HAS_VALID_NAME_CONSTRAINTS', 'SEMAPHORE_QUERY_STATE', 'STATUS_NOT_EXPORT_FORMAT', 'STATUS_CHECKOUT_REQUIRED', 'STATUS_OBJECT_PATH_INVALID', 'STATUS_GRAPHICS_PVP_NO_MONITORS_CORRESPOND_TO_DISPLAY_DEVICE', 'STATUS_CTX_LICENSE_NOT_AVAILABLE', 'OUTPUT_DEBUG_STRING_EVENT', 'CERT_TRUST_HAS_NOT_SUPPORTED_CRITICAL_EXT', 'STATUS_DRIVER_FAILED_SLEEP', 'STATUS_GRAPHICS_MODE_NOT_PINNED', 'STATUS_GRAPHICS_OPM_PROTECTED_OUTPUT_DOES_NOT_HAVE_COPP_SEMANTICS', 'CMSG_VERIFY_SIGNER_NULL', 'ERROR_INVALID_STARTING_CODESEG', 'szOID_INFOSEC_mosaicSignature', 'ERROR_INVALID_ADDRESS', 'PROV_DH_SCHANNEL', 'FILE_NOTIFY_CHANGE_FILE_NAME', 'STATUS_CROSSREALM_DELEGATION_FAILURE', 'ERROR_INVALID_LOCK_RANGE', 'RPC_C_QOS_IDENTITY_STATIC', 'CERT_STORE_CRL_CONTEXT', 'SECTION_EXTEND_SIZE', 'STATUS_GRAPHICS_DATASET_IS_EMPTY', 'STATUS_INVALID_IMAGE_WIN_32', 'ERROR_NO_VOLUME_LABEL', 'CALLBACK_ALL_ACCESS', 'ERROR_REGISTRY_HIVE_RECOVERED', 'CERT_TRUST_NO_ISSUANCE_CHAIN_POLICY', 'CERT_NAME_DNS_TYPE', 'STATUS_CLUSTER_INVALID_NETWORK', 'FILE_TRAVERSE', 'SERVICE_CONTROL_NETBINDREMOVE', 'STATUS_INCOMPATIBLE_DRIVER_BLOCKED', 'szOID_INFOSEC', 'STATUS_GRAPHICS_OPM_NO_PROTECTED_OUTPUTS_EXIST', 'STATUS_LINK_FAILED', 'PROCESS_VM_READ', 'STATUS_STACK_OVERFLOW', 'TOKEN_ALL_ACCESS', 'STATUS_POLICY_ONLY_IN_DS', 'STATUS_FLT_VOLUME_ALREADY_MOUNTED', 'STATUS_CURRENT_TRANSACTION_NOT_VALID', 'STATUS_AUTHIP_FAILURE', 'NTE_INVALID_PARAMETER', 'NTE_SIGNATURE_FILE_BAD', 'ERROR_NO_PAGEFILE', 'PROCESS_TRUST_LABEL_SECURITY_INFORMATION', 'ERROR_UNRECOGNIZED_VOLUME', 'CMSG_CTRL_DEL_CRL', 'RPC_NT_NO_PROTSEQS_REGISTERED', 'STATUS_ABANDON_HIBERFILE', 'STATUS_IMAGE_ALREADY_LOADED', 'CRYPT_NDR_ENCODING', 'STATUS_ENTRYPOINT_NOT_FOUND', 'ERROR_NOTIFY_ENUM_DIR', 'STATUS_ALPC_CHECK_COMPLETION_LIST', 'STATUS_RESOURCE_IN_USE', 'RPC_NT_ALREADY_LISTENING', 'FILE_NOTIFY_CHANGE_LAST_ACCESS', 'ERROR_PORT_NOT_SET', 'OS_APPLIANCE', 'SM_CXICONSPACING', 'CERT_SYSTEM_STORE_CURRENT_USER_ID', 'SEMAPHORE_ALL_ACCESS', 'STATUS_GRAPHICS_MONITOR_NO_LONGER_EXISTS', 'ERROR_CRASH_DUMP', 'ERROR_LOCK_VIOLATION', 'DEBUG_EVENT_EXIT_PROCESS', 'MEM_RELEASE', 'STATUS_PTE_CHANGED', 'ALPC_HANDLEFLG_DUPLICATE_INHERIT', 'STATUS_PARITY_ERROR', 'ALPC_MESSAGE_CONTEXT_ATTRIBUTE', 'STATUS_IPSEC_QUEUE_OVERFLOW', 'INHERIT_CALLER_PRIORITY', 'IMAGE_SCN_NO_DEFER_SPEC_EXC', 'STATUS_NDIS_OPEN_FAILED', 'STATUS_PNP_REBOOT_REQUIRED', 'STATUS_WMI_ALREADY_DISABLED', 'CERT_ISSUER_SERIAL_NUMBER_MD5_HASH_PROP_ID', 'FAX_JOB_MANAGE', 'STATUS_GRAPHICS_PRESENT_REDIRECTION_DISABLED', 'STATUS_INVALID_PIPE_STATE', 'ERROR_BAD_DRIVER_LEVEL', 'AF_INET6', 'STATUS_FWP_CONDITION_NOT_FOUND', 'PIPE_ACCESS_INBOUND', 'ERROR_BUFFER_OVERFLOW', 'STATUS_INVALID_VIEW_SIZE', 'NTE_BAD_KEY_STATE', 'STATUS_IPSEC_DOSP_INVALID_PACKET', 'SERVICE_ACCEPT_TIMECHANGE', 'STATUS_GRAPHICS_TOPOLOGY_CHANGES_NOT_ALLOWED', 'STATUS_FSFILTER_OP_COMPLETED_SUCCESSFULLY', 'CERT_TRUST_IS_PEER_TRUSTED', 'STATUS_IPSEC_DOSP_RECEIVED_MULTICAST', 'ERROR_ACCESS_DENIED', 'KEY_CREATE_SUB_KEY', 'SM_CXCURSOR', 'STATUS_GRAPHICS_MODE_ALREADY_IN_MODESET', 'E_INVALIDARG', 'PKCS12_PREFER_CNG_KSP', 'STATUS_DEVICE_NOT_READY', 'CMSG_CTRL_ADD_SIGNER_UNAUTH_ATTR', 'SERVICE_AUTO_START', 'OS_HOME', 'WTD_CHOICE_CATALOG', 'CERT_STORE_PROV_PKCS12', 'RPC_NT_UNKNOWN_MGR_TYPE', 'STATUS_CALLBACK_RETURNED_THREAD_PRIORITY', 'SM_CYMINSPACING', 'STATUS_NOT_SUPPORTED_ON_SBS', 'ALPC_MESSAGE_WORK_ON_BEHALF_ATTRIBUTE', 'RPC_NT_PROTSEQ_NOT_FOUND', 'STATUS_GRAPHICS_PRESENT_OCCLUDED', 'SM_XVIRTUALSCREEN', 'TOKEN_ADJUST_GROUPS', 'CMSG_SIGNER_INFO_PARAM', 'DBG_EXCEPTION_NOT_HANDLED', 'FILE_SUPPORTS_EXTENDED_ATTRIBUTES', 'ACCESS_ALLOWED_ACE_TYPE', 'STATUS_NO_SUCH_ALIAS', 'NTE_BUFFERS_OVERLAP', 'X509_MULTI_BYTE_INTEGER', 'STATUS_SXS_VERSION_CONFLICT', 'STATUS_IO_REPARSE_DATA_INVALID', 'IMAGE_DLLCHARACTERISTICS_NO_SEH', 'ACCESS_DENIED_ACE_TYPE', 'STATUS_CS_ENCRYPTION_INVALID_SERVER_RESPONSE', 'SE_REMOTE_SHUTDOWN_NAME', 'szOID_INFOSEC_sdnsTokenProtection', 'STATUS_AUDIT_FAILED', 'STATUS_QUOTA_LIST_INCONSISTENT', 'STATUS_VHD_DIFFERENCING_CHAIN_CYCLE_DETECTED', 'PRODUCT_PROFESSIONAL', 'HKEY_CLASSES_ROOT', 'IMAGE_FILE_MACHINE_TRICORE', 'STATUS_FWP_DYNAMIC_SESSION_IN_PROGRESS', 'EXCEPTION_ACCESS_VIOLATION', 'ACCESS_ALLOWED_COMPOUND_ACE_TYPE', 'X509_BASIC_CONSTRAINTS', 'PERSIST_E_SIZEDEFINITE', 'STATUS_MAX_REFERRALS_EXCEEDED', 'STATUS_BAD_BINDINGS', 'ERROR_BAD_ENVIRONMENT', 'STATUS_WMI_NOT_SUPPORTED', 'STATUS_WOW_ASSERTION', 'REG_SZ', 'EXIT_THREAD_DEBUG_EVENT', 'CRYPT_SILENT', 'STATUS_CLIENT_SERVER_PARAMETERS_INVALID', 'ERROR_DEVICE_ALREADY_ATTACHED', 'STATUS_LOG_RESERVATION_INVALID', 'STATUS_TM_INITIALIZATION_FAILED', 'CERT_STORE_DELETE_FLAG', 'ERROR_NO_YIELD_PERFORMED', 'szOID_DSALG', 'STATUS_GRAPHICS_I2C_NOT_SUPPORTED', 'STATUS_CLUSTER_NODE_NOT_MEMBER', 'SERVICE_KERNEL_DRIVER', 'SYSTEM_AUDIT_CALLBACK_OBJECT_ACE_TYPE', 'szOID_OIWSEC_desCBC', 'ERROR_PAGE_FAULT_DEMAND_ZERO', 'SERVICE_CONTROL_TRIGGEREVENT', 'CLAIM_SECURITY_ATTRIBUTE_TYPE_STRING', 'SERVICE_CONTROL_SESSIONCHANGE', 'CERT_ENROLLMENT_PROP_ID', 'ERROR_TRANSACTION_NOT_ACTIVE', 'STATUS_NAME_TOO_LONG', 'szOID_NIST_AES256_WRAP', 'PAGE_READWRITE', 'STATUS_INCOMPATIBLE_WITH_GLOBAL_SHORT_NAME_REGISTRY_SETTING', 'STATUS_GRAPHICS_CHAINLINKS_NOT_STARTED', 'STATUS_TIMEOUT', 'SE_SYSTEMTIME_NAME', 'SERVICE_TRIGGER_DATA_TYPE_STRING', 'STATUS_CTX_SECURITY_LAYER_ERROR', 'STATUS_TOO_MANY_THREADS', 'MEM_TOP_DOWN', 'CERT_NAME_URL_TYPE', 'SM_YVIRTUALSCREEN', 'SM_MAXIMUMTOUCHES', 'ERROR_FT_READ_RECOVERY_FROM_BACKUP', 'IMAGE_DLLCHARACTERISTICS_NO_BIND', 'ERROR_DBG_CONTROL_C', 'ERROR_PIPE_LISTENING', 'STATUS_KDC_CERT_REVOKED', 'ALPC_SECFLG_CREATE_HANDLE', 'szOID_INFOSEC_sdnsSignature', 'ERROR_BAD_STACK', 'STATUS_COMPRESSION_NOT_ALLOWED_IN_TRANSACTION', 'CRYPT_NEWKEYSET', 'STATUS_DIRECTORY_NOT_RM', 'CERT_FIND_PROPERTY', 'RPC_NT_NOT_RPC_ERROR', 'STATUS_INVALID_LDT_DESCRIPTOR', 'FILE_ACTION_ADDED', 'szOID_NIST_sha512', 'SM_CXMENUCHECK', 'STATUS_ILL_FORMED_PASSWORD', 'CERT_REQUEST_ORIGINATOR_PROP_ID', 'FILE_ATTRIBUTE_READONLY', 'STATUS_MARSHALL_OVERFLOW', 'szOID_RSA_SHA1RSA', 'ERROR_BAD_ACCESSOR_FLAGS', 'SPECIFIC_RIGHTS_ALL', 'STATUS_LOG_CONTAINER_READ_FAILED', 'MEM_COMMIT', 'FILE_NOTIFY_CHANGE_CREATION', 'STATUS_SXS_INVALID_DEACTIVATION', 'STATUS_MONITOR_INVALID_STANDARD_TIMING_BLOCK', 'szOID_DH_SINGLE_PASS_STDDH_SHA256_KDF', 'STATUS_CORRUPT_SYSTEM_FILE', 'STATUS_GRAPHICS_INVALID_GAMMA_RAMP', 'STATUS_GRAPHICS_INVALID_MONITOR_CAPABILITY_ORIGIN', 'STATUS_CANNOT_MAKE', 'PRODUCT_STORAGE_WORKGROUP_EVALUATION_SERVER', 'STATUS_NO_RANGES_PROCESSED', 'CERT_QUERY_CONTENT_FLAG_SERIALIZED_CERT', 'SYSTEM_RESOURCE_ATTRIBUTE_ACE_TYPE', 'CERT_QUERY_CONTENT_SERIALIZED_CRL', 'STATUS_GRAPHICS_MONITORDESCRIPTOR_ALREADY_IN_SET', 'szOID_RSA_SMIMEalgCMS3DESwrap', 'RRF_RT_DWORD', 'RPC_S_PROTOCOL_ERROR', 'X509_INTEGER', 'STATUS_INVALID_VARIANT', 'szOID_DSALG_HASH', 'STATUS_SAM_NEED_BOOTKEY_FLOPPY', 'STATUS_HANDLE_NO_LONGER_VALID', 'STATUS_WX86_BREAKPOINT', 'DRIVE_RAMDISK', 'STATUS_PAGE_FAULT_PAGING_FILE', 'STATUS_GRAPHICS_OPM_SPANNING_MODE_ENABLED', 'szOID_RSA_RC2CBC', 'SM_CXMENUSIZE', 'CERT_FIND_ISSUER_STR_W', 'STATUS_NDIS_UNSUPPORTED_MEDIA', 'STATUS_RM_NOT_ACTIVE', 'ERROR_DIR_NOT_ROOT', 'STATUS_VOLUME_DISMOUNTED', 'PAGE_EXECUTE_READWRITE', 'STATUS_BAD_INITIAL_PC', 'STATUS_PRENT4_MACHINE_ACCOUNT', 'STATUS_TRANSACTION_NOT_JOINED', 'CMSG_CMS_RECIPIENT_COUNT_PARAM', 'STATUS_GRAPHICS_MODE_NOT_IN_MODESET', 'RPC_NT_UNSUPPORTED_TYPE', 'szOID_ECDSA_SHA1', 'STATUS_ALLOTTED_SPACE_EXCEEDED', 'CERT_QUERY_CONTENT_FLAG_SERIALIZED_STORE', 'szOID_DSALG_CRPT', 'STATUS_CSS_RESETS_EXHAUSTED', 'FILE_SYNCHRONOUS_IO_NONALERT', 'DUPLICATE_CLOSE_SOURCE', 'IMAGE_SCN_ALIGN_4096BYTES', 'DACL_SECURITY_INFORMATION', 'STATUS_FLT_INSTANCE_ALTITUDE_COLLISION', 'OBJ_OPENLINK', 'STATUS_BAD_COMPRESSION_BUFFER', 'STATUS_MEMBER_IN_ALIAS', 'STATUS_AUTHENTICATION_FIREWALL_FAILED', 'STATUS_EA_CORRUPT_ERROR', 'THREAD_IMPERSONATE', 'CMSG_VERIFY_SIGNER_CHAIN', 'PRODUCT_STORAGE_WORKGROUP_SERVER_CORE', 'CERT_SYSTEM_STORE_LOCAL_MACHINE_ENTERPRISE_ID', 'CERT_TRUST_IS_UNTRUSTED_ROOT', 'STATUS_FLOPPY_BAD_REGISTERS', 'SE_TRUSTED_CREDMAN_ACCESS_NAME', 'SE_OWNER_DEFAULTED', 'ERROR_CORRUPT_SYSTEM_FILE', 'MEM_IMAGE', 'STATUS_IPSEC_INVALID_PACKET', 'STATUS_NOT_ALL_ASSIGNED', 'CRYPT_ACQUIRE_COMPARE_KEY_FLAG', 'CRYPT_E_UNKNOWN_ALGO', 'STATUS_ACCESS_AUDIT_BY_POLICY', 'STATUS_FWP_INVALID_NET_MASK', 'STATUS_FLT_MUST_BE_NONPAGED_POOL', 'ERROR_NETWORK_ACCESS_DENIED', 'STATUS_NETWORK_SESSION_EXPIRED', 'ERROR_INTERRUPT_STILL_CONNECTED', 'PKCS_ATTRIBUTE', 'SM_CXEDGE', 'ERROR_FILENAME_EXCED_RANGE', 'STATUS_FVE_PIN_INVALID', 'EPT_NT_CANT_CREATE', 'AF_PUP', 'ERROR_DIRECT_ACCESS_HANDLE', 'STATUS_FT_WRITE_RECOVERY', 'STATUS_PORT_NOT_SET', 'STATUS_FLOAT_DIVIDE_BY_ZERO', 'ERROR_SYSTEM_HIVE_TOO_LARGE', 'INHERIT_ONLY_ACE', 'FILE_DOES_NOT_EXIST', 'STATUS_IO_REPARSE_TAG_INVALID', 'szOID_RSA_MGF1', 'EVENT_QUERY_STATE', 'STATUS_GRAPHICS_MCA_INTERNAL_ERROR', 'SE_CREATE_SYMBOLIC_LINK_NAME', 'LPC_REPLY', 'ERROR_SEM_IS_SET', 'RPC_NT_SS_IN_NULL_CONTEXT', 'PROCESS_TERMINATE', 'SM_CYMINIMIZED', 'STATUS_FVE_TRANSIENT_STATE', 'RPC_NT_INVALID_NAF_ID', 'STATUS_INVALID_PARAMETER_11', 'STATUS_HMAC_NOT_SUPPORTED', 'ALPC_MSGFLG_RELEASE_MESSAGE', 'ERROR_EVT_INVALID_PUBLISHER_PROPERTY_VALUE', 'WTD_REVOKE_NONE', 'szOID_NIST_AES192_WRAP', 'STATUS_HASH_NOT_PRESENT', 'STATUS_SXS_CANT_GEN_ACTCTX', 'ERROR_DBG_EXCEPTION_NOT_HANDLED', 'STATUS_GRAPHICS_ALLOCATION_CONTENT_LOST', 'STATUS_DS_CANT_MOD_OBJ_CLASS', 'X509_PUBLIC_KEY_INFO', 'DBG_COMMAND_EXCEPTION', 'STATUS_GRAPHICS_MONITORDESCRIPTOR_NOT_IN_SET', 'STATUS_LOG_CLIENT_NOT_REGISTERED', 'STATUS_LOG_BLOCKS_EXHAUSTED', 'NTE_HMAC_NOT_SUPPORTED', 'STATUS_INVALID_OPLOCK_PROTOCOL', 'PRODUCT_HOME_BASIC', 'ALPC_CANCELFLGP_FLUSH', 'STATUS_DLL_MIGHT_BE_INSECURE', 'STATUS_KDC_CERT_EXPIRED', 'SM_CYSCREEN', 'CERT_STORE_PROV_MSG', 'STATUS_LOGON_TYPE_NOT_GRANTED', 'STATUS_DS_DUPLICATE_ID_FOUND', 'STATUS_NO_S4U_PROT_SUPPORT', 'STATUS_LOG_EPHEMERAL', 'ERROR_INSUFFICIENT_POWER', 'STATUS_LOG_TAIL_INVALID', 'szOID_OIWSEC_dhCommMod', 'STATUS_LOG_PINNED', 'ERROR_EA_ACCESS_DENIED', 'STATUS_CSS_KEY_NOT_PRESENT', 'STATUS_ARBITRATION_UNHANDLED', 'STATUS_ACPI_STACK_OVERFLOW', 'CERT_SYSTEM_STORE_CURRENT_SERVICE_ID', 'STATUS_ACPI_ALREADY_INITIALIZED', 'STATUS_INVALID_CRUNTIME_PARAMETER', 'SACL_SECURITY_INFORMATION', 'STATUS_CLUSTER_NETINTERFACE_EXISTS', 'STATUS_SHARED_IRQ_BUSY', 'PRODUCT_ENTERPRISE_SERVER_CORE_V', 'ERROR_NESTING_NOT_ALLOWED', 'STATUS_FAILED_STACK_SWITCH', 'STATUS_RM_ALREADY_STARTED', 'CREATE_UNICODE_ENVIRONMENT', 'STATUS_ELEVATION_REQUIRED', 'ERROR_NOT_SUBSTED', 'STATUS_IO_REPARSE_TAG_NOT_HANDLED', 'ERROR_BAD_LENGTH', 'RPC_C_AUTHN_LEVEL_CALL', 'STATUS_GRAPHICS_EMPTY_ADAPTER_MONITOR_MODE_SUPPORT_INTERSECTION', 'FILE_SUPPORTS_USN_JOURNAL', 'RPC_NT_NOTHING_TO_EXPORT', 'ERROR_BAD_FORMAT', 'NTE_BAD_PROVIDER', 'szOID_OIWSEC_sha1RSASign', 'ERROR_NOT_REGISTRY_FILE', 'STATUS_TRANSACTION_INVALID_TYPE', 'OS_FASTUSERSWITCHING', 'STATUS_PIPE_LISTENING', 'STATUS_FVE_BAD_INFORMATION', 'ERROR_UNSUPPORTED_COMPRESSION', 'VER_NT_DOMAIN_CONTROLLER', 'RPC_NT_SS_CONTEXT_DAMAGED', 'CMSG_CTRL_DEL_SIGNER', 'SE_SYNC_AGENT_NAME', 'STATUS_OPEN_FAILED', 'FILE_OVERWRITE_IF', 'STATUS_XMLDSIG_ERROR', 'STATUS_GRAPHICS_PRESENT_DENIED', 'SERVICE_INTERROGATE', 'EXPORT_PRIVATE_KEYS', 'STATUS_NDIS_INVALID_PORT', 'STATUS_DS_SAM_INIT_FAILURE_CONSOLE', 'X509_UNICODE_ANY_STRING', 'STATUS_NDIS_INDICATION_REQUIRED', 'STATUS_UNRECOGNIZED_MEDIA', 'STATUS_LOGON_FAILURE', 'IMAGE_FILE_MACHINE_R3000', 'PRODUCT_DATACENTER_EVALUATION_SERVER', 'STATUS_DEVICE_PAPER_EMPTY', 'STATUS_FVE_KEYFILE_NO_VMK', 'STATUS_RM_DISCONNECTED', 'FILE_OPEN_BY_FILE_ID', 'ERROR_TRANSACTION_ALREADY_ABORTED', 'STATUS_INTERNAL_DB_CORRUPTION', 'TOKEN_QUERY', 'STATUS_NO_EVENT_PAIR', 'CERT_FIND_ISSUER_NAME', 'CERT_TRUST_HAS_EXCLUDED_NAME_CONSTRAINT', 'STATUS_NOT_IMPLEMENTED', 'ERROR_ARENA_TRASHED', 'STATUS_INVALID_LABEL', 'RPC_NT_WRONG_ES_VERSION', 'OPEN_EXISTING', 'STATUS_GRAPHICS_INVALID_VIDPN_TOPOLOGY', 'IMAGE_REL_BASED_HIGH', 'FILE_MAP_READ', 'STATUS_FWP_NOTIFICATION_DROPPED', 'STATUS_GRAPHICS_CHAINLINKS_NOT_ENUMERATED', 'ACCESS_MAX_MS_V3_ACE_TYPE', 'PROV_REPLACE_OWF', 'TOKEN_SECURITY_ATTRIBUTE_NON_INHERITABLE', 'STATUS_GRAPHICS_INVALID_VIDEO_PRESENT_SOURCE', 'STATUS_DS_SRC_SID_EXISTS_IN_FOREST', 'STATUS_INVALID_ACE_CONDITION', 'EVENT_MODIFY_STATE', 'STATUS_GRAPHICS_NOT_POST_DEVICE_DRIVER', 'STARTF_RUNFULLSCREEN', 'STATUS_SECTION_TOO_BIG', 'RPC_NT_TYPE_ALREADY_REGISTERED', 'MAX_PATH', 'CERT_QUERY_CONTENT_FLAG_CRL', 'THREAD_QUERY_INFORMATION', 'STATUS_ALREADY_WIN32', 'ERROR_DBG_CONTINUE', 'IMAGE_SCN_LNK_COMDAT', 'SC_MANAGER_QUERY_LOCK_STATUS', 'EPT_NT_NOT_REGISTERED', 'CERT_STORE_PROV_SYSTEM', 'ERROR_META_EXPANSION_TOO_LONG', 'STATUS_VIRTDISK_NOT_VIRTUAL_DISK', 'szOID_INFOSEC_mosaicKeyManagement', 'szOID_INFOSEC_SuiteAKeyManagement', 'STATUS_GRAPHICS_CHAINLINKS_NOT_POWERED_ON', 'STATUS_OFFLOAD_WRITE_FLT_NOT_SUPPORTED', 'CERT_COMPARE_NAME', 'IMAGE_FILE_MACHINE_MIPSFPU16', 'CERT_TRUST_HAS_NOT_SUPPORTED_NAME_CONSTRAINT', 'STATUS_ACPI_ACQUIRE_GLOBAL_LOCK', 'FILE_OPEN_REMOTE_INSTANCE', 'ERROR_EVT_QUERY_RESULT_INVALID_POSITION', 'IMAGE_FILE_MACHINE_WCEMIPSV2', 'STATUS_SMARTCARD_NO_KEYSET', 'STATUS_NDIS_ADAPTER_REMOVED', 'ERROR_DELETE_PENDING', 'STATUS_NO_KERB_KEY', 'SM_CXBORDER', 'STATUS_NETLOGON_NOT_STARTED', 'OS_WIN2000SERVER', 'ERROR_DRIVER_DATABASE_ERROR', 'STATUS_WRONG_PASSWORD', 'STATUS_NDIS_INVALID_OID', 'STATUS_SXS_SECTION_NOT_FOUND', 'STATUS_NO_EFS', 'CERT_TRUST_REVOCATION_STATUS_UNKNOWN', 'SM_REMOTESESSION', 'STATUS_SXS_ASSEMBLY_NOT_FOUND', 'ERROR_TOO_MANY_THREADS', 'szOID_TIMESTAMP_TOKEN', 'CERT_STORE_OPEN_EXISTING_FLAG', 'RPC_NT_CALL_CANCELLED', 'STATUS_CS_ENCRYPTION_UNSUPPORTED_SERVER', 'STATUS_INVALID_DEVICE_STATE', 'STATUS_NO_PA_DATA', 'ERROR_CANTFETCHBACKWARDS', 'STATUS_INSUFF_SERVER_RESOURCES', 'STATUS_ACPI_INVALID_OPCODE', 'CERT_SMART_CARD_DATA_PROP_ID', 'CERT_COMPARE_SUBJECT_CERT', 'STATUS_SMARTCARD_NO_KEY_CONTAINER', 'STATUS_NOT_LOCKED', 'ERROR_SHARING_BUFFER_EXCEEDED', 'ERROR_INVALID_IMAGE_HASH', 'STATUS_FWP_FILTER_NOT_FOUND', 'SM_MOUSEWHEELPRESENT', 'EXTENDED_STARTUPINFO_PRESENT', 'STATUS_PROCESS_IN_JOB', 'STATUS_REQUEST_NOT_ACCEPTED', 'FILE_OVERWRITE', 'SM_CXMAXTRACK', 'VALID_INHERIT_FLAGS', 'ERROR_NO_PROC_SLOTS', 'STGM_WRITE', 'ERROR_EVT_FILTER_INVTEST', 'STATUS_LOG_READ_MODE_INVALID', 'ERROR_ALREADY_ASSIGNED', 'szOID_X957', 'ERROR_BAD_ARGUMENTS', 'STATUS_FLOAT_INEXACT_RESULT', 'STATUS_ADAPTER_HARDWARE_ERROR', 'STATUS_SXS_CORRUPT_ACTIVATION_STACK', 'PRODUCT_MULTIPOINT_STANDARD_SERVER', 'FILE_DIRECTORY_FILE', 'NTE_BAD_SIGNATURE', 'PIPE_NOWAIT', 'ERROR_CHILD_MUST_BE_VOLATILE', 'PAGE_EXECUTE', 'STATUS_FWP_TYPE_MISMATCH', 'TOKEN_ADJUST_SESSIONID', 'CERT_TRUST_IS_SELF_SIGNED', 'STATUS_INVALID_PARAMETER', 'CMSG_CTRL_MAIL_LIST_DECRYPT', 'ERROR_EAS_DIDNT_FIT', 'PROTECTED_DACL_SECURITY_INFORMATION', 'STATUS_GRAPHICS_INVALID_ALLOCATION_USAGE', 'STATUS_DS_OBJ_CLASS_VIOLATION', 'PRODUCT_BUSINESS', 'ERROR_INVALID_LDT_DESCRIPTOR', 'ERROR_MULTIPLE_FAULT_VIOLATION', 'PRODUCT_ENTERPRISE_EVALUATION', 'STATUS_SXS_IDENTITY_PARSE_ERROR', 'STATUS_CRYPTO_SYSTEM_INVALID', 'TRUST_E_FAIL', 'CERT_COMPARE_CROSS_CERT_DIST_POINTS', 'STGM_DELETEONRELEASE', 'STATUS_INVALID_LDT_SIZE', 'SM_SERVERR2', 'STATUS_GRAPHICS_OPM_INVALID_INFORMATION_REQUEST', 'STATUS_INSUFFICIENT_LOGON_INFO', 'STATUS_REPARSE', 'RPC_NT_UNSUPPORTED_AUTHN_LEVEL', 'ERROR_BAD_EXE_FORMAT', 'CERT_STORE_PROV_PHYSICAL_W', 'ERROR_HIBERNATED', 'DEBUG_EVENT_SESSION_STATUS', 'STATUS_GRAPHICS_ONLY_CONSOLE_SESSION_SUPPORTED', 'STATUS_FVE_NO_FEATURE_LICENSE', 'FILE_ADD_SUBDIRECTORY', 'ERROR_LABEL_TOO_LONG', 'CERT_SYSTEM_STORE_LOCAL_MACHINE_ID', 'MS_DEF_RSA_SIG_PROV_W', 'TOKEN_IMPERSONATE', 'ACCESS_ALLOWED_CALLBACK_ACE_TYPE', 'SM_CYMENUSIZE', 'RPC_NT_INVALID_TAG', 'RPC_NT_ADDRESS_ERROR', 'AF_12844', 'STATUS_GRAPHICS_OPM_DRIVER_INTERNAL_ERROR', 'SERVICE_ENUMERATE_DEPENDENTS', 'GENERIC_WRITE', 'STATUS_CTX_WINSTATION_NAME_COLLISION', 'STATUS_PIPE_EMPTY', 'SE_GROUP_ENABLED', 'ERROR_EVENT_DONE', 'TOKEN_SECURITY_ATTRIBUTE_TYPE_OCTET_STRING', 'STATUS_DEVICE_DATA_ERROR', 'szOID_INFOSEC_mosaicUpdatedSig', 'STATUS_NONEXISTENT_SECTOR', 'STATUS_TM_VOLATILE', 'ERROR_RESUME_HIBERNATION', 'STATUS_CLUSTER_NODE_ALREADY_UP', 'STATUS_GRAPHICS_PVP_DISPLAY_DEVICE_NOT_ATTACHED_TO_DESKTOP', 'STATUS_DISK_OPERATION_FAILED', 'ERROR_CLIENT_SERVER_PARAMETERS_INVALID', 'ERROR_EVT_VERSION_TOO_NEW', 'STGFMT_DOCUMENT', 'CMSG_VERIFY_SIGNER_PUBKEY', 'STATUS_SECURITY_STREAM_IS_INCONSISTENT', 'EXCEPTION_NONCONTINUABLE_EXCEPTION', 'STATUS_NDIS_OFFLOAD_PATH_REJECTED', 'STATUS_CRC_ERROR', 'STATUS_FLOATED_SECTION', 'SM_DBCSENABLED', 'OS_WIN98_GOLD', 'STATUS_MONITOR_NO_MORE_DESCRIPTOR_DATA', 'IMAGE_FILE_BYTES_REVERSED_LO', 'ERROR_RANGE_LIST_CONFLICT', 'STATUS_DEVICE_NOT_CONNECTED', 'X509_BASIC_CONSTRAINTS2', 'STATUS_CLUSTER_NETWORK_EXISTS', 'STATUS_KDC_UNABLE_TO_REFER', 'STATUS_BAD_VALIDATION_CLASS', 'DBG_TERMINATE_PROCESS', 'REG_MULTI_SZ', 'STATUS_FWP_NET_EVENTS_DISABLED', 'STATUS_DS_NO_RIDS_ALLOCATED', 'STATUS_SHORT_NAMES_NOT_ENABLED_ON_VOLUME', 'KEY_WRITE', 'ERROR_OPERATION_ABORTED', 'STATUS_MORE_ENTRIES', 'AF_VOICEVIEW', 'STATUS_REPARSE_POINT_NOT_RESOLVED', 'STATUS_SERVER_SHUTDOWN_IN_PROGRESS', 'szOID_NIST_sha384', 'SM_CXPADDEDBORDER', 'STATUS_INVALID_PARAMETER_MIX', 'SERVICE_WIN32', 'SERVICE_ERROR_IGNORE', 'PRODUCT_SMALLBUSINESS_SERVER_PREMIUM_CORE', 'FILE_FLAG_WRITE_THROUGH', 'STATUS_GRAPHICS_DDCCI_INVALID_MESSAGE_CHECKSUM', 'PRODUCT_STORAGE_STANDARD_EVALUATION_SERVER', 'FILE_RESERVE_OPFILTER', 'STATUS_PURGE_FAILED', 'ERROR_WAKE_SYSTEM', 'STATUS_FVE_NO_LICENSE', 'CMSG_DETACHED_FLAG', 'STATUS_LOG_METADATA_INCONSISTENT', 'ERROR_NO_SIGNAL_SENT', 'STATUS_SPECIAL_USER', 'STATUS_INVALID_TRANSACTION', 'szOID_OIWSEC', 'ERROR_INCOMPATIBLE_WITH_GLOBAL_SHORT_NAME_REGISTRY_SETTING', 'CREATE_SHARED_WOW_VDM', 'STATUS_DESTINATION_ELEMENT_FULL', 'SM_CYSMSIZE', 'STATUS_GRAPHICS_NO_MORE_ELEMENTS_IN_DATASET', 'SERVICE_ACCEPT_TRIGGEREVENT', 'ERROR_FIRMWARE_UPDATED', 'CRYPT_ACQUIRE_USE_PROV_INFO_FLAG', 'API_SET_SCHEMA_FLAGS_HOST_EXTENSION', 'STATUS_INVALID_PARAMETER_9', 'RPC_NT_INVALID_STRING_UUID', 'STATUS_GRAPHICS_OPM_ALL_HDCP_HARDWARE_ALREADY_IN_USE', 'FILE_MAP_COPY', 'STATUS_DS_NAME_NOT_UNIQUE', 'STATUS_GRAPHICS_I2C_ERROR_TRANSMITTING_DATA', 'ERROR_ILLEGAL_CHARACTER', 'NTE_BAD_UID', 'FILE_READ_ONLY_VOLUME', 'STATUS_LOG_POLICY_CONFLICT', 'STATUS_INVALID_IMAGE_PROTECT', 'szOID_OIW', 'ERROR_IO_INCOMPLETE', 'STATUS_GRAPHICS_OPM_INTERNAL_ERROR', 'ERROR_INVALID_EA_HANDLE', 'IMAGE_DIRECTORY_ENTRY_EXCEPTION', 'CERT_QUERY_FORMAT_BASE64_ENCODED', 'STATUS_FVE_NOT_DATA_VOLUME', 'STATUS_CTX_CLIENT_LICENSE_NOT_SET', 'STATUS_PAGEFILE_CREATE_FAILED', 'ALPC_MESSAGE_SECURITY_ATTRIBUTE', 'PROV_MS_EXCHANGE', 'CREATE_FORCEDOS', 'szOID_OIWSEC_md4RSA2', 'STATUS_MONITOR_INVALID_USER_FRIENDLY_MONDSC_BLOCK', 'SYSTEM_MANDATORY_LABEL_ACE_TYPE', 'ERROR_CAN_NOT_COMPLETE', 'PRODUCT_CORE_ARM', 'STATUS_SINGLE_STEP', 'STATUS_OBJECT_NAME_INVALID', 'STATUS_ACCOUNT_RESTRICTION', 'TH32CS_SNAPPROCESS', 'STATUS_CLUSTER_NETWORK_NOT_FOUND', 'STATUS_SXS_INVALID_IDENTITY_ATTRIBUTE_VALUE', 'SM_CXMINTRACK', 'STATUS_CTX_MODEM_RESPONSE_BUSY', 'CERT_E_EXPIRED', 'ERROR_PRINT_CANCELLED', 'STATUS_DS_GC_REQUIRED', 'RPC_NT_NOT_ALL_OBJS_UNEXPORTED', 'ERROR_BAD_THREADID_ADDR', 'CERT_STORE_PROV_PHYSICAL', 'szOID_OIWSEC_rsaXchg', 'STATUS_SERVER_NOT_DISABLED', 'STATUS_LOG_CONTAINER_OPEN_FAILED', 'szOID_ECC_CURVE_P384', 'CMSG_INDEFINITE_LENGTH', 'ERROR_CONVERT_TO_LARGE', 'ERROR_INVALID_EVENT_COUNT', 'STATUS_GRAPHICS_INVALID_POINTER', 'STATUS_THREADPOOL_FREE_LIBRARY_ON_COMPLETION_FAILED', 'DBG_APP_NOT_IDLE', 'RPC_NT_SS_CHAR_TRANS_SHORT_FILE', 'PRODUCT_DATACENTER_SERVER_V', 'STATUS_VIRTDISK_PROVIDER_NOT_FOUND', 'STATUS_ADDRESS_ALREADY_ASSOCIATED', 'STATUS_NETWORK_OPEN_RESTRICTION', 'CMSG_CRYPT_RELEASE_CONTEXT_FLAG', 'FILE_EXECUTE', 'S_FALSE', 'AF_OSI', 'ERROR_SEM_OWNER_DIED', 'ERROR_BAD_MCFG_TABLE', 'RPC_NT_PROXY_ACCESS_DENIED', 'EPT_NT_INVALID_ENTRY', 'NTE_NO_MORE_ITEMS', 'STATUS_CLUSTER_NODE_EXISTS', 'ERROR_EVT_INVALID_CHANNEL_PATH', 'ERROR_KEY_DELETED', 'STATUS_ACPI_NOT_INITIALIZED', 'STATUS_GRAPHICS_PVP_MIRRORING_DEVICES_NOT_SUPPORTED', 'PRODUCT_SERVER_FOR_SB_SOLUTIONS', 'STATUS_MUTANT_NOT_OWNED', 'SM_CXDRAG', 'STATUS_INVALID_LDT_OFFSET', 'STATUS_VARIABLE_NOT_FOUND', 'WTD_CHOICE_SIGNER', 'OS_DATACENTER', 'ALPC_MESSAGE_DIRECT_ATTRIBUTE', 'ERROR_FAIL_NOACTION_REBOOT', 'TOKEN_MANDATORY_POLICY_NO_WRITE_UP', 'NTE_NO_KEY', 'SM_PENWINDOWS', 'STATUS_MUI_INVALID_LOCALE_NAME', 'CERT_QUERY_CONTENT_PKCS7_SIGNED', 'STATUS_NDIS_MULTICAST_FULL', 'ACCESS_MAX_MS_V5_ACE_TYPE', 'STATUS_CONNECTION_IN_USE', 'STATUS_CTX_SHADOW_NOT_RUNNING', 'OS_WIN2000TERMINAL', 'STATUS_DS_CANT_ON_NON_LEAF', 'FILE_APPEND_DATA', 'STATUS_SXS_THREAD_QUERIES_DISABLED', 'SERVICE_TYPE_ALL', 'STATUS_NO_SUCH_PACKAGE', 'RPC_NT_PROTSEQ_NOT_SUPPORTED', 'ERROR_FAIL_I24', 'STATUS_LOGIN_TIME_RESTRICTION', 'STATUS_CALLBACK_RETURNED_TRANSACTION', 'STATUS_TOO_MANY_LUIDS_REQUESTED', 'SERVICE_CONTROL_CONTINUE', 'CERT_TRUST_HAS_NOT_PERMITTED_NAME_CONSTRAINT', 'STATUS_FWP_CANNOT_PEND', 'SM_CYMAXIMIZED', 'X509_KEY_USAGE', 'FILE_FLAG_DELETE_ON_CLOSE', 'GENERIC_EXECUTE', 'STATUS_GRAPHICS_INVALID_VIDPN', 'STATUS_ILLEGAL_DLL_RELOCATION', 'CERT_TRUST_IS_NOT_TIME_NESTED', 'RPC_NT_SERVER_UNAVAILABLE', 'ERROR_RXACT_COMMITTED', 'ERROR_IS_SUBST_PATH', 'STATUS_LOG_RESIZE_INVALID_SIZE', 'STATUS_INVALID_IMAGE_LE_FORMAT', 'FILE_ACTION_RENAMED_OLD_NAME', 'CERT_QUERY_CONTENT_FLAG_PFX', 'STATUS_FLOAT_DENORMAL_OPERAND', 'PRODUCT_SOLUTION_EMBEDDEDSERVER', 'FILE_NOTIFY_CHANGE_DIR_NAME', 'SM_CYVTHUMB', 'STATUS_MEMBER_NOT_IN_GROUP', 'STATUS_DATATYPE_MISALIGNMENT', 'STATUS_SXS_EARLY_DEACTIVATION', 'IMAGE_REL_BASED_MACHINE_SPECIFIC_8', 'STATUS_INSUFFICIENT_NVRAM_RESOURCES', 'STATUS_FAIL_CHECK', 'CERT_TRUST_INVALID_NAME_CONSTRAINTS', 'TOKEN_ADJUST_PRIVILEGES', 'ERROR_THREAD_MODE_NOT_BACKGROUND', 'MEM_FREE', 'STATUS_CANNOT_ABORT_TRANSACTIONS', 'STATUS_MEMBERS_PRIMARY_GROUP', 'CERT_STORE_ENUM_ARCHIVED_FLAG', 'FILE_MAP_ALL_ACCESS', 'LPC_LOST_REPLY', 'CERT_STORE_MAXIMUM_ALLOWED_FLAG', 'STATUS_LOG_PINNED_ARCHIVE_TAIL', 'PRODUCT_SMALLBUSINESS_SERVER', 'CREATE_NEW', 'CERT_STORE_PROV_FILE', 'szOID_RSA_SHA512RSA', 'IMAGE_REL_BASED_MACHINE_SPECIFIC_7', 'PRODUCT_HYPERV', 'SW_SHOWDEFAULT', 'szOID_OIWDIR', 'STGM_SIMPLE', 'NTE_DOUBLE_ENCRYPT', 'TOKEN_SECURITY_ATTRIBUTE_TYPE_STRING', 'CRYPT_ONLINE', 'STATUS_GRAPHICS_UAB_NOT_SUPPORTED', 'STATUS_RESOURCE_NAME_NOT_FOUND', 'szOID_INFOSEC_mosaicIntegrity', 'FILE_OPEN_REPARSE_POINT', 'DIRECTORY_CREATE_SUBDIRECTORY', 'SE_DACL_AUTO_INHERITED', 'IMAGE_FILE_MACHINE_ALPHA64', 'IO_COMPLETION_QUERY_STATE', 'STATUS_NDIS_MEDIA_DISCONNECTED', 'STATUS_RECEIVE_EXPEDITED', 'STATUS_FWP_CONTEXT_INCOMPATIBLE_WITH_CALLOUT', 'FILE_ATTRIBUTE_NOT_CONTENT_INDEXED', 'STATUS_GRAPHICS_INVALID_COLORBASIS', 'SE_MANAGE_VOLUME_NAME', 'ERROR_MISSING_SYSTEMFILE', 'STATUS_BAD_LOGON_SESSION_STATE', 'X509_NAME', 'STATUS_PORT_CONNECTION_REFUSED', 'FILE_NO_COMPRESSION', 'STATUS_INVALID_WORKSTATION', 'IMAGE_SCN_TYPE_DSECT', 'AF_CHAOS', 'RPC_NT_DUPLICATE_ENDPOINT', 'STATUS_NOT_SAFE_MODE_DRIVER', 'MS_ENH_RSA_AES_PROV_W', 'ERROR_RECEIVE_EXPEDITED', 'PAGE_NOCACHE', 'STATUS_GRAPHICS_DDCCI_INVALID_CAPABILITIES_STRING', 'CRYPT_VOLATILE', 'CERT_STORE_PROV_LDAP', 'ERROR_EVT_NON_VALIDATING_MSXML', 'PROV_EC_ECDSA_FULL', 'ERROR_IS_JOIN_TARGET', 'STATUS_RESTART_BOOT_APPLICATION', 'STATUS_EAS_NOT_SUPPORTED', 'STATUS_ACPI_INVALID_SUPERNAME', 'STATUS_EOM_OVERFLOW', 'ERROR_MEMORY_HARDWARE', 'CERT_QUERY_CONTENT_FLAG_SERIALIZED_CTL', 'STATUS_FWP_INCOMPATIBLE_AUTH_METHOD', 'STATUS_GRAPHICS_GPU_EXCEPTION_ON_DEVICE', 'ERROR_WX86_ERROR', 'STATUS_GRAPHICS_DDCCI_INVALID_MESSAGE_COMMAND', 'STATUS_RXACT_COMMITTED', 'ERROR_INVALID_VERIFY_SWITCH', 'ERROR_BADDB', 'RPC_NT_CALL_IN_PROGRESS', 'STATUS_INSTANCE_NOT_AVAILABLE', 'CREATE_NEW_PROCESS_GROUP', 'ERROR_VC_DISCONNECTED', 'CMSG_ENVELOPE_ALGORITHM_PARAM', 'DEBUG_EVENT_CHANGE_ENGINE_STATE', 'IMAGE_SCN_ALIGN_32BYTES', 'NTE_SILENT_CONTEXT', 'TXFS_MINIVERSION_DIRTY_VIEW', 'IMAGE_FILE_DEBUG_STRIPPED', 'PRODUCT_STORAGE_WORKGROUP_SERVER', 'CMSG_RECIPIENT_INFO_PARAM', 'TH32CS_SNAPMODULE', 'STATUS_THREAD_IS_TERMINATING', 'STATUS_NO_IMPERSONATION_TOKEN', 'RPC_NT_INVALID_NAME_SYNTAX', 'szOID_OIWSEC_mdc2', 'PRODUCT_PROFESSIONAL_WMC', 'CERT_NAME_RDN_TYPE', 'SM_CYDLGFRAME', 'FILE_OPEN_FOR_FREE_SPACE_QUERY', 'STATUS_FWP_TCPIP_NOT_READY', 'STATUS_CLUSTER_POISONED', 'NTE_KEYSET_NOT_DEF', 'ERROR_VOLUME_MOUNTED', 'STATUS_LOG_STATE_INVALID', 'CMSG_ENCRYPT_PARAM', 'ERROR_WRITE_FAULT', 'RPC_NT_UNKNOWN_AUTHN_SERVICE', 'STATUS_PIPE_CLOSING', 'FILE_FILE_COMPRESSION', 'STATUS_DISK_REPAIR_DISABLED', 'STATUS_GRAPHICS_STALE_MODESET', 'STATUS_LOGON_NOT_GRANTED', 'STATUS_CTX_MODEM_RESPONSE_NO_DIALTONE', 'STATUS_INVALID_COMPUTER_NAME', 'CERT_RENEWAL_PROP_ID', 'STATUS_ILLEGAL_INSTRUCTION', 'STATUS_NOT_FOUND', 'STATUS_CHECKING_FILE_SYSTEM', 'CERT_QUERY_CONTENT_SERIALIZED_CTL', 'SW_HIDE', 'STATUS_FVE_DRY_RUN_FAILED', 'szOID_OIWSEC_desECB', 'MS_STRONG_PROV_W', 'ERROR_UNEXPECTED_MM_EXTEND_ERR', 'STATUS_INVALID_PARAMETER_4', 'PRODUCT_SB_SOLUTION_SERVER_EM', 'CLUSAPI_ALL_ACCESS', 'STATUS_FVE_DEBUGGER_ENABLED', 'STATUS_DS_SHUTTING_DOWN', 'STATUS_GRAPHICS_POLLING_TOO_FREQUENTLY', 'szOID_RSA_signingTime', 'STATUS_GRAPHICS_CANNOTCOLORCONVERT', 'ERROR_LOST_WRITEBEHIND_DATA_NETWORK_SERVER_ERROR', 'STATUS_NO_SUCH_FILE', 'STATUS_IPSEC_REPLAY_CHECK_FAILED', 'CERT_E_PURPOSE', 'RPC_C_QOS_IDENTITY_DYNAMIC', 'ERROR_NO_TOKEN', 'STATUS_NO_TGT_REPLY', 'CERT_E_REVOKED', 'szOID_OIWSEC_dsaCommSHA1', 'STATUS_FWP_LIFETIME_MISMATCH', 'FILE_SUPPORTS_REPARSE_POINTS', 'STATUS_DRIVER_ENTRYPOINT_NOT_FOUND', 'IMAGE_SCN_TYPE_COPY', 'ERROR_VALIDATE_CONTINUE', 'STATUS_INVALID_PARAMETER_2', 'ERROR_SUBST_TO_JOIN', 'LPC_CONNECTION_REQUEST', 'STATUS_GRAPHICS_INVALID_COPYPROTECTION_TYPE', 'RPC_RESPONSE_TYPE_FAIL', 'SE_GROUP_OWNER', 'EXCEPTION_DEBUG_EVENT', 'REPORT_NOT_ABLE_TO_EXPORT_PRIVATE_KEY', 'SM_CXSMSIZE', 'STATUS_VOLSNAP_HIBERNATE_READY', 'IMAGE_DIRECTORY_ENTRY_IAT', 'TOKEN_SECURITY_ATTRIBUTE_DISABLED_BY_DEFAULT', 'CMSG_CRL_COUNT_PARAM', 'SERVICE_CONTROL_PAUSE', 'FILE_SUPERSEDED', 'STATUS_HOST_UNREACHABLE', 'CERT_STORE_PROV_SMART_CARD_W', 'CRYPT_ARCHIVABLE', 'ERROR_CHILD_NOT_COMPLETE', 'STATUS_RANGE_NOT_FOUND', 'SYSTEM_MANDATORY_LABEL_NO_READ_UP', 'STATUS_TRUSTED_RELATIONSHIP_FAILURE', 'CERT_TRUST_NO_ERROR', 'STATUS_TOO_MANY_PAGING_FILES', 'STATUS_ACPI_INVALID_MUTEX_LEVEL', 'STATUS_DS_INVALID_GROUP_TYPE', 'STATUS_CTX_LICENSE_EXPIRED', 'STATUS_IPSEC_AUTH_FIREWALL_DROP', 'FILE_VALID_MAILSLOT_OPTION_FLAGS', 'CERT_FIND_CROSS_CERT_DIST_POINTS', 'STATUS_GRAPHICS_NO_RECOMMENDED_FUNCTIONAL_VIDPN', 'STATUS_USER_MAPPED_FILE', 'ERROR_NOT_ALL_ASSIGNED', 'ERROR_CANTREAD', 'ERROR_VIRUS_DELETED', 'FAILED_ACCESS_ACE_FLAG', 'RPC_NT_SEC_PKG_ERROR', 'IMAGE_SCN_ALIGN_MASK', 'NTE_BAD_TYPE', 'X509_KEYGEN_REQUEST_TO_BE_SIGNED', 'ERROR_FULLSCREEN_MODE', 'STATUS_COMMITMENT_LIMIT', 'NTE_BAD_FLAGS', 'CERT_STORE_SHARE_CONTEXT_FLAG', 'CERT_AIA_URL_RETRIEVED_PROP_ID', 'ERROR_LOST_WRITEBEHIND_DATA_LOCAL_DISK_ERROR', 'AF_IPX', 'SERVICE_ACCEPT_PARAMCHANGE', 'OS_TERMINALCLIENT', 'CERT_STORE_BACKUP_RESTORE_FLAG', 'STATUS_IMAGE_CHECKSUM_MISMATCH', 'SM_IMMENABLED', 'STILL_ACTIVE', 'STATUS_REQUEST_CANCELED', 'PRODUCT_CORE_LANGUAGESPECIFIC', 'CERT_STORE_CTL_CONTEXT', 'STATUS_EVENTLOG_CANT_START', 'NTE_BAD_HASH', 'TOKEN_SECURITY_ATTRIBUTE_TYPE_BOOLEAN', 'STATUS_WAS_UNLOCKED', 'OS_WELCOMELOGONUI', 'CREATE_PROCESS_DEBUG_EVENT', 'STATUS_NOLOGON_WORKSTATION_TRUST_ACCOUNT', 'CERT_NAME_UPN_TYPE', 'SERVICE_ERROR_SEVERE', 'STATUS_RESOURCEMANAGER_NOT_FOUND', 'STATUS_INTERRUPT_VECTOR_ALREADY_CONNECTED', 'STATUS_INVALID_IMAGE_WIN_64', 'STATUS_IMAGE_ALREADY_LOADED_AS_DLL', 'STATUS_GRAPHICS_MODE_ID_MUST_BE_UNIQUE', 'szOID_ECDSA_SHA256', 'ERROR_INVALID_PARAMETER', 'STATUS_REGISTRY_HIVE_RECOVERED', 'OBJECT_INHERIT_ACE', 'DBG_NO_STATE_CHANGE', 'STATUS_FILE_RENAMED', 'szOID_RSA_MD4RSA', 'CMSG_CTRL_ADD_ATTR_CERT', 'STATUS_NOT_CAPABLE', 'ERROR_CANT_TERMINATE_SELF', 'STATUS_NO_SAVEPOINT_WITH_OPEN_FILES', 'SW_SHOWNOACTIVATE', 'PKCS12_INCLUDE_EXTENDED_PROPERTIES', 'RPC_REQUEST_TYPE_BIND', 'NTE_INVALID_HANDLE', 'STATUS_BUS_RESET', 'STATUS_FLT_INVALID_NAME_REQUEST', 'STATUS_CLUSTER_NODE_PAUSED', 'PROCESS_DUP_HANDLE', 'STATUS_PROTOCOL_UNREACHABLE', 'ERROR_COMPRESSION_DISABLED', 'FILE_FLAG_BACKUP_SEMANTICS', 'STATUS_OBJECTID_NOT_FOUND', 'ERROR_OPEN_FAILED', 'CRYPT_CREATE_IV', 'PRODUCT_SMALLBUSINESS_SERVER_PREMIUM', 'STATUS_FVE_NOT_OS_VOLUME', 'FILE_VOLUME_QUOTAS', 'NTE_PROV_TYPE_NOT_DEF', 'RPC_C_AUTHN_LEVEL_DEFAULT', 'STATUS_FWP_MATCH_TYPE_MISMATCH', 'REG_EXPAND_SZ', 'CRYPT_E_ATTRIBUTES_MISSING', 'FILE_VALID_PIPE_OPTION_FLAGS', 'STATUS_ACCESS_DISABLED_BY_POLICY_PUBLISHER', 'CERT_FIND_SUBJECT_NAME', 'RPC_NT_NOT_CANCELLED', 'STATUS_FWP_OUT_OF_BOUNDS', 'FILE_READ_DATA', 'EXCEPTION_FLT_DENORMAL_OPERAND', 'SE_SHUTDOWN_NAME', 'szOID_OIWSEC_sha1', 'STATUS_CTX_NO_OUTBUF', 'RPC_NT_BINDING_INCOMPLETE', 'SM_CYFRAME', 'STATUS_CTX_LICENSE_CLIENT_INVALID', 'STATUS_NO_MEDIA', 'STATUS_SOME_NOT_MAPPED', 'STATUS_FLT_NO_WAITER_FOR_REPLY', 'STATUS_BAD_NETWORK_PATH', 'szOID_RSA_RC4', 'FILE_GENERIC_EXECUTE', 'EXCEPTION_STACK_OVERFLOW', 'szOID_RSA_ENCRYPT', 'STATUS_SXS_MANIFEST_IDENTITY_SAME_BUT_CONTENTS_DIFFERENT', 'PRODUCT_ENTERPRISE_SERVER_CORE', 'STATUS_AMBIGUOUS_SYSTEM_DEVICE', 'szOID_OIWSEC_dsa', 'SERVICE_DISABLED', 'STATUS_TRANSACTION_SCOPE_CALLBACKS_NOT_SET', 'FILE_NOTIFY_CHANGE_LAST_WRITE', 'STATUS_CTX_WINSTATION_ACCESS_DENIED', 'STATUS_ARRAY_BOUNDS_EXCEEDED', 'CERT_COMPARE_ATTR', 'STATUS_OBJECT_TYPE_MISMATCH', 'STATUS_FVE_POLICY_USER_DISABLE_RDV_NOT_ALLOWED', 'STATUS_GRAPHICS_RESOURCES_NOT_RELATED', 'PRODUCT_ESSENTIALBUSINESS_SERVER_ADDLSVC', 'CWCSTORAGENAME', 'IMAGE_FILE_LOCAL_SYMS_STRIPPED', 'CERT_STORE_CERTIFICATE_CONTEXT', 'CERT_ENHKEY_USAGE_PROP_ID', 'SE_PRIVILEGE_ENABLED', 'IDLE_PRIORITY_CLASS', 'STATUS_NO_TRACKING_SERVICE', 'STATUS_SECTION_NOT_IMAGE', 'STATUS_IPSEC_DOSP_BLOCK', 'STATUS_FIRMWARE_UPDATED', 'STATUS_PROFILING_AT_LIMIT', 'STATUS_PAGE_FAULT_GUARD_PAGE', 'STATUS_FVE_FAILED_BAD_FS', 'MAX_INTERFACE_NAME_LEN', 'STATUS_HEAP_CORRUPTION', 'ERROR_TOO_MANY_NAMES', 'STATUS_CONTROL_C_EXIT', 'INHERITED_ACE', 'STATUS_SETMARK_DETECTED', 'PRODUCT_STORAGE_ENTERPRISE_SERVER', 'STATUS_MOUNT_POINT_NOT_RESOLVED', 'STATUS_GRAPHICS_INVALID_VIDPN_SOURCEMODESET', 'STATUS_RESOURCE_ENUM_USER_STOP', 'ERROR_PATH_BUSY', 'ERROR_THREAD_MODE_ALREADY_BACKGROUND', 'szOID_OIWSEC_md2RSASign', 'MAX_ADAPTER_NAME', 'OS_TABLETPC', 'STATUS_GRAPHICS_TARGET_ID_MUST_BE_UNIQUE', 'ERROR_TOO_MANY_POSTS', 'STATUS_GRAPHICS_INVALID_PATH_IMPORTANCE_ORDINAL', 'CONTEXT_SEGMENTS', 'STATUS_LOG_START_OF_LOG', 'ERROR_NO_MORE_FILES', 'STATUS_IPSEC_DOSP_STATE_LOOKUP_FAILED', 'STGM_SHARE_DENY_READ', 'SECURITY_MANDATORY_HIGH_RID', 'CERT_QUERY_CONTENT_PKCS7_UNSIGNED', 'DBG_EXCEPTION_HANDLED', 'STATUS_ADVANCED_INSTALLER_FAILED', 'IMAGE_FILE_MACHINE_AMD64', 'ALPC_MSGFLG_LPC_MODE', 'ERROR_NET_OPEN_FAILED', 'CRYPT_CREATE_SALT', 'FILE_OPEN_NO_RECALL', 'RPC_NT_NO_ENTRY_NAME', 'szOID_ANSI_X942', 'JOB_OBJECT_SET_ATTRIBUTES', 'IMAGE_DIRECTORY_ENTRY_COM_DESCRIPTOR', 'STATUS_WAIT_3', 'STATUS_VIRUS_INFECTED', 'STATUS_WAIT_1', 'SECURITY_MANDATORY_UNTRUSTED_RID', 'STATUS_INVALID_IMAGE_HASH', 'CRYPT_E_OID_FORMAT', 'CERT_CHAIN_REVOCATION_CHECK_CHAIN_EXCLUDE_ROOT', 'STATUS_TRUST_FAILURE', 'STATUS_CTX_WINSTATION_NAME_INVALID', 'SW_MAXIMIZE', 'EXCEPTION_INT_DIVIDE_BY_ZERO', 'STG_LAYOUT_INTERLEAVED', 'IMAGE_FILE_SYSTEM', 'EVT_VARIANT_TYPE_MASK', 'AF_DLI', 'RPC_NT_UUID_NO_ADDRESS', 'STATUS_EA_TOO_LARGE', 'STATUS_USER2USER_REQUIRED', 'STATUS_MUI_INVALID_ULTIMATEFALLBACK_NAME', 'CERT_QUERY_CONTENT_SERIALIZED_STORE', 'CERT_FIND_KEY_IDENTIFIER', 'X509_AUTHORITY_KEY_ID', 'API_SET_SCHEMA_ENTRY_FLAGS_EXTENSION', 'STATUS_TIME_DIFFERENCE_AT_DC', 'CMSG_CTRL_KEY_AGREE_DECRYPT', 'IMAGE_SCN_ALIGN_2048BYTES', 'STATUS_GRAPHICS_INVALID_VISIBLEREGION_SIZE', 'ERROR_THREAD_NOT_IN_PROCESS', 'PKCS12_ALWAYS_CNG_KSP', 'CERT_SYSTEM_STORE_USERS', 'OBJ_FORCE_ACCESS_CHECK', 'STATUS_VOLMGR_MIRROR_NOT_SUPPORTED', 'ERROR_EXE_CANNOT_MODIFY_STRONG_SIGNED_BINARY', 'RPC_NT_INVALID_ES_ACTION', 'STATUS_PIPE_BROKEN', 'ERROR_EVT_EVENT_DEFINITION_NOT_FOUND', 'CERT_COMPARE_SIGNATURE_HASH', 'szOID_INFOSEC_sdnsConfidentiality', 'STATUS_WMI_ITEMID_NOT_FOUND', 'STATUS_FVE_TOO_SMALL', 'RPC_NT_COMM_FAILURE', 'ERROR_BAD_FILE_TYPE', 'ERROR_PNP_RESTART_ENUMERATION', 'NTE_DECRYPTION_FAILURE', 'ERROR_HANDLE_EOF', 'STATUS_CLUSTER_JOIN_NOT_IN_PROGRESS', 'STATUS_QUOTA_EXCEEDED', 'DBG_CONTROL_C', 'ERROR_RXACT_STATE_CREATED', 'STATUS_FLT_NO_DEVICE_OBJECT', 'RSA_CSP_PUBLICKEYBLOB', 'RPC_X_BAD_STUB_DATA', 'KEY_WOW64_RES', 'ERROR_TOO_MANY_MUXWAITERS', 'STATUS_PAGEFILE_QUOTA_EXCEEDED', 'CONTEXT_EXTENDED_REGISTERS', 'SE_TCB_NAME', 'ALPC_PORFLG_WAITABLE_PORT', 'STATUS_TRANSACTION_TIMED_OUT', 'ERROR_DBG_REPLY_LATER', 'ERROR_PNP_REBOOT_REQUIRED', 'STATUS_COPY_PROTECTION_FAILURE', 'X509_NDR_ENCODING', 'STATUS_NDIS_PM_PROTOCOL_OFFLOAD_LIST_FULL', 'THREAD_SET_THREAD_TOKEN', 'ERROR_DRIVE_LOCKED', 'STATUS_FLT_INSTANCE_NOT_FOUND', 'CERT_FIND_HASH', 'ERROR_ACCESS_AUDIT_BY_POLICY', 'STATUS_GRAPHICS_SPECIFIED_CHILD_ALREADY_CONNECTED', 'AF_NETDES', 'ERROR_BAD_COMMAND', 'AF_SNA', 'STATUS_NDIS_CLOSING', 'CERT_QUERY_CONTENT_FLAG_PFX_AND_LOAD', 'STATUS_MONITOR_INVALID_MANUFACTURE_DATE', 'STATUS_FWP_TXN_ABORTED', 'szOID_RSA_SSA_PSS', 'PROV_RSA_FULL', 'SM_RESERVED4', 'SM_RESERVED1', 'SM_RESERVED3', 'SM_RESERVED2', 'STATUS_TRANSACTION_NOT_REQUESTED', 'szOID_X957_DSA', 'STATUS_CANNOT_DELETE', 'RPC_C_QOS_CAPABILITIES_MAKE_FULLSIC', 'STATUS_MAPPED_FILE_SIZE_ZERO', 'STATUS_PLUGPLAY_QUERY_VETOED', 'szOID_RSA_MD5RSA', 'CERT_TRUST_IS_OFFLINE_REVOCATION', 'SE_UNDOCK_NAME', 'szOID_INFOSEC_mosaicUpdatedInteg', 'CERT_TRUST_INVALID_EXTENSION', 'STATUS_CTX_CLOSE_PENDING', 'CERT_STORE_NO_CRYPT_RELEASE_FLAG', 'STATUS_FVE_VOLUME_NOT_BOUND', 'PROTECTED_SACL_SECURITY_INFORMATION', 'SYSTEM_ALARM_CALLBACK_ACE_TYPE', 'STATUS_SMARTCARD_CARD_BLOCKED', 'STATUS_INVALID_IMAGE_FORMAT', 'STATUS_FILE_IS_OFFLINE', 'ERROR_LONGJUMP', 'CERT_SYSTEM_STORE_CURRENT_USER_GROUP_POLICY', 'CERT_SYSTEM_STORE_LOCAL_MACHINE_GROUP_POLICY', 'STATUS_UNEXPECTED_NETWORK_ERROR', 'szOID_OIWSEC_dsaCommSHA', 'MAILSLOT_NO_MESSAGE', 'STATUS_LOG_METADATA_INVALID', 'STATUS_GUID_SUBSTITUTION_MADE', 'STATUS_PRINT_QUEUE_FULL', 'STATUS_NDIS_INVALID_DATA', 'szOID_RSA_data', 'PRODUCT_STORAGE_ENTERPRISE_SERVER_CORE', 'ERROR_WAKE_SYSTEM_DEBUGGER', 'STATUS_CRED_REQUIRES_CONFIRMATION', 'IMAGE_SCN_MEM_PRELOAD', 'STATUS_EXTRANEOUS_INFORMATION', 'STATUS_TRANSACTION_OBJECT_EXPIRED', 'CERT_E_UNTRUSTEDROOT', 'STATUS_WX86_CONTINUE', 'CERT_E_UNTRUSTEDCA', 'STATUS_DOMAIN_EXISTS', 'FILE_ATTRIBUTE_DEVICE', 'STATUS_DIRECTORY_NOT_EMPTY', 'NMPWAIT_NOWAIT', 'STATUS_FLT_CBDQ_DISABLED', 'OS_PERSONALTERMINALSERVER', 'CERT_ARCHIVED_KEY_HASH_PROP_ID', 'RPC_NT_INVALID_RPC_PROTSEQ', 'STATUS_NETWORK_ACCESS_DENIED', 'STATUS_CANNOT_IMPERSONATE', 'ERROR_APP_INIT_FAILURE', 'STATUS_GRAPHICS_INVALID_MONITOR_FREQUENCYRANGE_CONSTRAINT', 'STATUS_PWD_TOO_SHORT', 'szOID_OIWSEC_md5RSA', 'REG_LINK', 'STATUS_BUFFER_OVERFLOW', 'STATUS_LOG_SECTOR_INVALID', 'STATUS_CALLBACK_POP_STACK', 'SERVICE_TRIGGER_TYPE_NETWORK_ENDPOINT', 'FILE_ATTRIBUTE_ENCRYPTED', 'STATUS_FVE_FAILED_AUTHENTICATION', 'STATUS_DATA_ERROR', 'CERT_FRIENDLY_NAME_PROP_ID', 'STATUS_PIPE_NOT_AVAILABLE', 'IMAGE_DIRECTORY_ENTRY_BOUND_IMPORT', 'STATUS_NO_LDT', 'CLUSAPI_NO_ACCESS', 'STATUS_TRANSACTION_RESPONDED', 'CMSG_HASH_ALGORITHM_PARAM', 'STATUS_GRAPHICS_DDCCI_INVALID_DATA', 'RPC_NT_ALREADY_REGISTERED', 'STATUS_NDIS_DOT11_POWER_STATE_INVALID', 'STARTF_USESIZE', 'OBJECT_TYPE_ALL_ACCESS', 'FILE_FLAG_OPEN_REPARSE_POINT', 'STATUS_PRIMARY_TRANSPORT_CONNECT_FAILED', 'STATUS_REMOTE_RESOURCES', 'IMAGE_REL_BASED_RESERVED', 'OBJ_OPENIF', 'STATUS_CHILD_MUST_BE_VOLATILE', 'STATUS_LOG_ARCHIVE_NOT_IN_PROGRESS', 'FILE_ATTRIBUTE_HIDDEN', 'CERT_E_CN_NO_MATCH', 'STATUS_END_OF_MEDIA', 'OS_PROFESSIONAL', 'ERROR_FILE_NOT_FOUND', 'STATUS_VDM_HARD_ERROR', 'SERVICE_WIN32_OWN_PROCESS', 'ERROR_UNWIND_CONSOLIDATE', 'RPC_C_AUTHN_LEVEL_PKT_INTEGRITY', 'STATUS_MEMORY_NOT_ALLOCATED', 'ERROR_ILLEGAL_FLOAT_CONTEXT', 'BIND_IF_SYNTAX_NDR32', 'ERROR_CRC', 'SM_MOUSEPRESENT', 'STATUS_PROCESS_CLONED', 'ACCESS_ALLOWED_OBJECT_ACE_TYPE', 'ERROR_THREAD_1_INACTIVE', 'CERT_STORE_PROV_REG', 'ERROR_SERVER_SID_MISMATCH', 'STATUS_NO_IP_ADDRESSES', 'ERROR_DIRECTORY', 'EXCEPTION_SINGLE_STEP', 'CERT_AUTO_ENROLL_PROP_ID', 'STATUS_GRAPHICS_ADAPTER_MUST_HAVE_AT_LEAST_ONE_TARGET', 'ERROR_ABANDONED_WAIT_63', 'STATUS_FILE_NOT_ENCRYPTED', 'ERROR_VOLSNAP_PREPARE_HIBERNATE', 'STATUS_PORT_ALREADY_HAS_COMPLETION_LIST', 'SERVICE_CONTROL_STOP', 'SEC_WINNT_AUTH_IDENTITY_ANSI', 'ERROR_SYSTEM_PROCESS_TERMINATED', 'STATUS_DS_NO_MORE_RIDS', 'DELETE', 'MS_STRONG_PROV_A', 'STATUS_TRANSACTION_PROPAGATION_FAILED', 'SE_DEBUG_NAME', 'ERROR_FILE_TOO_LARGE', 'STATUS_SYNCHRONIZATION_REQUIRED', 'IMAGE_FILE_32BIT_MACHINE', 'STATUS_INVALID_HANDLE', 'STATUS_ALREADY_REGISTERED', 'STATUS_NETWORK_UNREACHABLE', 'STATUS_DS_INIT_FAILURE', 'CRYPT_ACQUIRE_ONLY_NCRYPT_KEY_FLAG', 'STATUS_GRAPHICS_INVALID_PHYSICAL_MONITOR_HANDLE', 'STATUS_ROLLBACK_TIMER_EXPIRED', 'SM_CXVSCROLL', 'SE_SACL_AUTO_INHERIT_REQ', 'ERROR_NOT_OWNER', 'STATUS_REMOTE_NOT_LISTENING', 'PROFILE_KERNEL', 'STATUS_CANT_CREATE_MORE_STREAM_MINIVERSIONS', 'STATUS_BEGINNING_OF_MEDIA', 'STATUS_BEYOND_VDL', 'IMAGE_FILE_UP_SYSTEM_ONLY', 'EPT_NT_CANT_PERFORM_OP', 'SM_SLOWMACHINE', 'ERROR_DRIVERS_LEAKING_LOCKED_PAGES', 'FILE_VOLUME_IS_COMPRESSED', 'STATUS_CLUSTER_NETINTERFACE_NOT_FOUND', 'STATUS_NO_RECOVERY_POLICY', 'SECTION_MAP_WRITE', 'STATUS_LOGON_SERVER_CONFLICT', 'STATUS_WAIT_2', 'STATUS_ENLISTMENT_NOT_FOUND', 'STATUS_DRIVER_PROCESS_TERMINATED', 'STATUS_INTEGER_DIVIDE_BY_ZERO', 'PKCS12_EXPORT_RESERVED_MASK', 'PRODUCT_PROFESSIONAL_E', 'SE_DACL_PRESENT', 'PRODUCT_PROFESSIONAL_N', 'STATUS_USER_APC', 'OS_MEORGREATER', 'STATUS_NET_WRITE_FAULT', 'STATUS_REINITIALIZATION_NEEDED', 'ERROR_INVALID_ACE_CONDITION', 'ERROR_PWD_TOO_RECENT', 'ERROR_PORT_MESSAGE_TOO_LONG', 'STATUS_GRAPHICS_NO_VIDEO_MEMORY', 'ERROR_UNWIND', 'ERROR_EXCL_SEM_ALREADY_OWNED', 'TOKEN_SECURITY_ATTRIBUTE_TYPE_INVALID', 'CERT_E_WRONG_USAGE', 'STATUS_CALLBACK_RETURNED_PRI_BACK', 'DUPLICATE_SAME_ACCESS', 'TRUST_E_BAD_DIGEST', 'STATUS_NOT_LOGON_PROCESS', 'STATUS_INVALID_SYSTEM_SERVICE', 'STATUS_UNSUPPORTED_PREAUTH', 'CERT_FIND_ANY', 'szOID_RSA', 'STATUS_DS_GLOBAL_CANT_HAVE_CROSSDOMAIN_MEMBER', 'CERT_TRUST_CTL_IS_NOT_SIGNATURE_VALID', 'IMAGE_DIRECTORY_ENTRY_EXPORT', 'STATUS_GRAPHICS_TRY_AGAIN_LATER', 'STATUS_GRAPHICS_INVALID_VIDEO_PRESENT_TARGET', 'STATUS_IO_DEVICE_ERROR', 'STATUS_ACPI_INVALID_OBJTYPE', 'STATUS_GRAPHICS_DDCCI_INVALID_MESSAGE_LENGTH', 'ALPC_MSGFLG_WAIT_USER_MODE', 'STATUS_DEBUGGER_INACTIVE', 'szOID_DH_SINGLE_PASS_STDDH_SHA1_KDF', 'STATUS_MORE_PROCESSING_REQUIRED', 'STATUS_CTL_FILE_NOT_SUPPORTED', 'PIPE_TYPE_BYTE', 'PKCS_7_ASN_ENCODING', 'STATUS_NO_INHERITANCE', 'STATUS_OBJECT_NAME_NOT_FOUND', 'PROV_RSA_SIG', 'SE_SYSTEM_PROFILE_NAME', 'STATUS_FWP_TXN_IN_PROGRESS', 'IMAGE_SCN_TYPE_OVER', 'STATUS_ACCESS_DISABLED_BY_POLICY_OTHER', 'ERROR_VERSION_PARSE_ERROR', 'STATUS_SERVER_HAS_OPEN_HANDLES', 'ERROR_DEBUG_ATTACH_FAILED', 'STATUS_ABANDONED_WAIT_0', 'STATUS_CTX_WINSTATION_BUSY', 'IMAGE_SCN_MEM_16BIT', 'ERROR_BIOS_FAILED_TO_CONNECT_INTERRUPT', 'STATUS_TOO_MANY_SIDS', 'ERROR_FT_WRITE_RECOVERY', 'CERT_FIND_ISSUER_ATTR', 'AF_UNIX', 'STATUS_SHUTDOWN_IN_PROGRESS', 'RPC_NT_GROUP_MEMBER_NOT_FOUND', 'ERROR_DEVICE_ENUMERATION_ERROR', 'CERT_INFO_SIGNATURE_ALGORITHM_FLAG', 'STATUS_FVE_FS_MOUNTED', 'SERVICE_FILE_SYSTEM_DRIVER', 'TOKEN_DUPLICATE', 'CERT_COMPARE_CTL_USAGE', 'LPC_PORT_CLOSED', 'STATUS_EVENT_DONE', 'STATUS_NDIS_INVALID_LENGTH', 'STATUS_REMOTE_DISCONNECT', 'STATUS_BAD_INITIAL_STACK', 'ERROR_INVALID_MINALLOCSIZE', 'STATUS_CTX_WD_NOT_FOUND', 'STATUS_ALERTED', 'CERT_INFO_SUBJECT_UNIQUE_ID_FLAG', 'STATUS_CSS_KEY_NOT_ESTABLISHED', 'STATUS_FWP_EM_NOT_SUPPORTED', 'STANDARD_RIGHTS_EXECUTE', 'MEM_ROTATE', 'ERROR_FILE_CHECKED_OUT', 'STATUS_LOG_INVALID_RANGE', 'SM_CYSIZEFRAME', 'FILE_WRITE_THROUGH', 'ERROR_SIGNAL_REFUSED', 'STATUS_LOG_MULTIPLEXED', 'CERT_NAME_ATTR_TYPE', 'STATUS_CLUSTER_INVALID_NODE', 'ERROR_PNP_IRQ_TRANSLATION_FAILED', 'ERROR_ALREADY_WIN32', 'ERROR_NO_MORE_SEARCH_HANDLES', 'STATUS_LOST_WRITEBEHIND_DATA_LOCAL_DISK_ERROR', 'STATUS_GRAPHICS_PRESENT_UNOCCLUDED', 'ALPC_MESSAGE_HANDLE_ATTRIBUTE', 'VER_NT_WORKSTATION', 'ERROR_SYSTEM_IMAGE_BAD_SIGNATURE', 'STATUS_MUTANT_LIMIT_EXCEEDED', 'szOID_ECDSA_SPECIFIED', 'STATUS_INVALID_IMPORT_OF_NON_DLL', 'PKCS12_ALLOW_OVERWRITE_KEY', 'STATUS_ACCOUNT_LOCKED_OUT', 'STATUS_GRAPHICS_INVALID_PATH_CONTENT_TYPE', 'ACE_INHERITED_OBJECT_TYPE_PRESENT', 'STATUS_TRANSACTIONS_UNSUPPORTED_REMOTE', 'PIPE_TYPE_MESSAGE', 'STATUS_FLOAT_MULTIPLE_TRAPS', 'CRYPT_E_INVALID_INDEX', 'PROCESS_MODE_BACKGROUND_END', 'STATUS_NDIS_GROUP_ADDRESS_IN_USE', 'AF_BAN', 'STATUS_NO_SUCH_LOGON_SESSION', 'STATUS_VIRTUAL_CIRCUIT_CLOSED', 'STATUS_CTX_CONSOLE_DISCONNECT', 'OS_SERVERADMINUI', 'szOID_RSA_signedData', 'KEY_NOTIFY', 'STATUS_VHD_DIFFERENCING_CHAIN_ERROR_IN_PARENT', 'STATUS_FVE_AUTH_INVALID_APPLICATION', 'ERROR_HIBERNATION_FAILURE', 'CMSG_SIGNER_CERT_INFO_PARAM', 'ERROR_PROCESS_MODE_ALREADY_BACKGROUND', 'AF_APPLETALK', 'OWNER_SECURITY_INFORMATION', 'SC_MANAGER_MODIFY_BOOT_CONFIG', 'ERROR_RECEIVE_PARTIAL', 'CERT_E_UNTRUSTEDTESTROOT', 'STATUS_ALLOCATE_BUCKET', 'SM_REMOTECONTROL', 'PRODUCT_ENTERPRISE_N', 'PRODUCT_STARTER_N', 'ERROR_AUDIT_FAILED', 'FILE_FLAG_FIRST_PIPE_INSTANCE', 'ERROR_EVT_FILTER_NOTELTSET', 'STATUS_GUARD_PAGE_VIOLATION', 'PRODUCT_ENTERPRISE_E', 'STATUS_FWP_WRONG_SESSION', 'CREATE_ALWAYS', 'CMSG_CTRL_VERIFY_HASH', 'ERROR_SCOPE_NOT_FOUND', 'PROCESS_SET_QUOTA', 'RPC_NT_UUID_LOCAL_ONLY', 'RPC_NT_INVALID_ENDPOINT_FORMAT', 'SE_GROUP_RESOURCE', 'ERROR_MOUNT_POINT_NOT_RESOLVED', 'STATUS_KDC_INVALID_REQUEST', 'PROCESS_MODE_BACKGROUND_BEGIN', 'READ_CONTROL', 'EXCEPTION_FLT_OVERFLOW', 'ERROR_INVALID_SEGMENT_NUMBER', 'STATUS_PIPE_DISCONNECTED', 'TXFS_MINIVERSION_COMMITTED_VIEW', 'FILE_CREATE_TREE_CONNECTION', 'TRUST_E_ACTION_UNKNOWN', 'STATUS_GRAPHICS_MIRRORING_DEVICES_NOT_SUPPORTED', 'STATUS_DS_DOMAIN_NAME_EXISTS_IN_FOREST', 'STATUS_MCA_EXCEPTION', 'CERT_TRUST_HAS_NOT_DEFINED_NAME_CONSTRAINT', 'CERT_TRUST_HAS_CRL_VALIDITY_EXTENDED', 'ERROR_INVALID_STACKSEG', 'ERROR_JOIN_TO_SUBST', 'STATUS_DS_CANT_START', 'OS_WIN2000ADVSERVER', 'PRODUCT_HOME_SERVER', 'STATUS_LOST_WRITEBEHIND_DATA', 'STATUS_DECRYPTION_FAILED', 'SUCCESSFUL_ACCESS_ACE_FLAG', 'STATUS_SEGMENT_NOTIFICATION', 'STATUS_CLUSTER_JOIN_IN_PROGRESS', 'STATUS_ADDRESS_NOT_ASSOCIATED', 'SM_CXSIZE', 'CERT_QUERY_FORMAT_BINARY', 'ERROR_BAD_CURRENT_DIRECTORY', 'STATUS_PROFILING_NOT_STARTED', 'MUTANT_ALL_ACCESS', 'STATUS_SXS_FILE_NOT_PART_OF_ASSEMBLY', 'STATUS_CS_ENCRYPTION_FILE_NOT_CSE', 'SW_NORMAL', 'CERT_STORE_ADD_REPLACE_EXISTING_INHERIT_PROPERTIES', 'SERVICE_ERROR_CRITICAL', 'ERROR_VERIFIER_STOP', 'STATUS_KEY_DELETED', 'NMPWAIT_USE_DEFAULT_WAIT', 'ERROR_UNHANDLED_EXCEPTION', 'STATUS_ACPI_RS_ACCESS', 'ERROR_REPLY_MESSAGE_MISMATCH', 'ERROR_INVALID_PLUGPLAY_DEVICE_PATH', 'SE_SACL_AUTO_INHERITED', 'STATUS_VHD_CHILD_PARENT_SIZE_MISMATCH', 'STATUS_NONE_MAPPED', 'DIGSIG_E_ENCODE', 'STATUS_GRAPHICS_ALLOCATION_INVALID', 'MEM_RESERVE', 'STATUS_SXS_FILE_HASH_MISMATCH', 'STATUS_DS_NO_NEST_GLOBALGROUP_IN_MIXEDDOMAIN', 'ERROR_LOGON_SERVER_CONFLICT', 'CERT_TRUST_CTL_IS_NOT_TIME_VALID', 'SERVICE_CONTINUE_PENDING', 'IMAGE_FILE_MACHINE_CEE', 'IMAGE_FILE_MACHINE_CEF', 'STATUS_DS_UNAVAILABLE', 'ERROR_BAD_PIPE', 'ERROR_NOT_SUPPORTED', 'PROCESS_SET_SESSIONID', 'RPC_NT_FP_OVERFLOW', 'STATUS_UNSUPPORTED_COMPRESSION', 'ERROR_TOO_MANY_SEM_REQUESTS', 'STATUS_HIBERNATED', 'SE_GROUP_DEFAULTED', 'CMSG_CONTENT_PARAM', 'FILE_DELETE_CHILD', 'STATUS_DOMAIN_TRUST_INCONSISTENT', 'CMSG_CRL_PARAM', 'CMSG_SIGNER_HASH_ALGORITHM_PARAM', 'STATUS_EA_LIST_INCONSISTENT', 'STATUS_FWP_ACTION_INCOMPATIBLE_WITH_LAYER', 'STANDARD_RIGHTS_ALL', 'CERT_STORE_ADD_ALWAYS', 'ERROR_SERVICE_NOTIFICATION', 'REG_DWORD_LITTLE_ENDIAN', 'STATUS_GRAPHICS_UNSWIZZLING_APERTURE_UNSUPPORTED', 'STATUS_SAM_NEED_BOOTKEY_PASSWORD', 'TRUST_E_NOSIGNATURE', 'IMAGE_REL_BASED_MACHINE_SPECIFIC_9', 'OPEN_ALWAYS', 'CERT_QUERY_CONTENT_PFX_AND_LOAD', 'ERROR_ERRORS_ENCOUNTERED', 'IMAGE_REL_BASED_MACHINE_SPECIFIC_5', 'AF_CLUSTER', 'SERVICE_USER_DEFINED_CONTROL', 'FILE_SHARE_DELETE', 'FILE_LIST_DIRECTORY', 'FILE_FLAG_OVERLAPPED', 'STATUS_INVALID_BUFFER_SIZE', 'SYSTEM_PROCESS_TRUST_LABEL_VALID_MASK', 'CRYPT_E_NOT_FOUND', 'STATUS_MESSAGE_RETRIEVED', 'STATUS_INTERRUPT_STILL_CONNECTED', 'RRF_RT_REG_NONE', 'RPC_NT_SEND_INCOMPLETE', 'CERT_NAME_SIMPLE_DISPLAY_TYPE', 'STATUS_POLICY_OBJECT_NOT_FOUND', 'CMSG_CTRL_DEL_SIGNER_UNAUTH_ATTR', 'ERROR_CANNOT_BREAK_OPLOCK', 'STATUS_NO_MORE_FILES', 'FILE_OPENED', 'STATUS_THREAD_ALREADY_IN_TASK', 'SM_CXFULLSCREEN', 'STATUS_WAKE_SYSTEM_DEBUGGER', 'STATUS_FILE_FORCED_CLOSED', 'SM_MIDEASTENABLED', 'STATUS_IMAGE_SUBSYSTEM_NOT_PRESENT', 'SE_DACL_PROTECTED', 'FILE_SHARE_WRITE', 'STATUS_GRAPHICS_UNASSIGNED_MODESET_ALREADY_EXISTS', 'ERROR_LOCK_FAILED', 'CMSG_CTRL_DEL_CERT', 'CMSG_SIGNER_AUTH_ATTR_PARAM', 'ERROR_IMAGE_MACHINE_TYPE_MISMATCH', 'E_FAIL', 'STATUS_CRM_PROTOCOL_NOT_FOUND', 'CMSG_CTRL_KEY_TRANS_DECRYPT', 'STATUS_GRAPHICS_COPP_NOT_SUPPORTED', 'RPC_NT_NO_CALL_ACTIVE', 'RPC_NT_NO_PRINC_NAME', 'EXCEPTION_NONCONTINUABLE', 'ERROR_ACCESS_DISABLED_NO_SAFER_UI_BY_POLICY', 'STATUS_MUI_INVALID_FILE', 'STATUS_FLT_DISALLOW_FAST_IO', 'SE_SECURITY_NAME', 'CERT_FIND_SIGNATURE_HASH', 'STATUS_SMARTCARD_SUBSYSTEM_FAILURE', 'STATUS_INVALID_OWNER', 'STATUS_FLT_ALREADY_ENLISTED', 'CRYPT_E_HASH_VALUE', 'STATUS_CTX_CLIENT_QUERY_TIMEOUT', 'FAX_JOB_QUERY', 'IMAGE_FILE_MACHINE_AM33', 'MAXLEN_PHYSADDR', 'WRITE_DAC', 'szOID_PKCS', 'STATUS_OBJECT_PATH_NOT_FOUND', 'ERROR_PROCESS_IN_JOB', 'IMAGE_SCN_MEM_EXECUTE', 'PROV_EC_ECNRA_FULL', 'szOID_INFOSEC_SuiteAIntegrity', 'STATUS_SXS_PROCESS_TERMINATION_REQUESTED', 'STATUS_LOCAL_DISCONNECT', 'STATUS_DEVICE_FEATURE_NOT_SUPPORTED', 'FILE_ATTRIBUTE_COMPRESSED', 'CERT_KEY_PROV_INFO_PROP_ID', 'AF_NETBIOS', 'FILE_SUPERSEDE', 'IMAGE_NUMBEROF_DIRECTORY_ENTRIES', 'STATUS_TOO_MANY_NODES', 'szOID_RSA_messageDigest', 'szOID_RSA_RSA', 'SE_PROF_SINGLE_PROCESS_NAME', 'STATUS_GRAPHICS_CANCEL_VIDPN_TOPOLOGY_AUGMENTATION', 'STATUS_CLUSTER_NODE_DOWN', 'STATUS_POWER_STATE_INVALID', 'SERVICE_ADAPTER', 'TIMER_MODIFY_STATE', 'STATUS_NDIS_INTERFACE_NOT_FOUND', 'STATUS_CSS_SCRAMBLED_SECTOR', 'ERROR_DISCARDED', 'ERROR_OUTOFMEMORY', 'PRODUCT_STANDARD_SERVER_CORE_V', 'szOID_RSA_MD2RSA', 'STATUS_NO_TOKEN', 'szOID_RSA_encryptedData', 'STATUS_FWP_NULL_POINTER', 'STATUS_NO_LINK_TRACKING_IN_TRANSACTION', 'CMSG_CTRL_ADD_SIGNER', 'OS_WIN2000ORGREATER', 'WRITE_OWNER', 'FILE_MAXIMUM_DISPOSITION', 'ERROR_INFLOOP_IN_RELOC_CHAIN', 'SW_MAX', 'STATUS_INVALID_PORT_ATTRIBUTES', 'DBG_CONTINUE', 'CMSG_CONTENTS_OCTETS_FLAG', 'RPC_C_IMP_LEVEL_DELEGATE', 'ERROR_BAD_PATHNAME', 'SM_CYDRAG', 'PRODUCT_STANDARD_SERVER_SOLUTIONS', 'STATUS_NO_USER_SESSION_KEY', 'ERROR_IO_PRIVILEGE_FAILED', 'STATUS_WMI_ALREADY_ENABLED', 'STATUS_PREDEFINED_HANDLE', 'FILE_CREATE', 'ALPC_MESSAGE_VIEW_ATTRIBUTE', 'CERT_E_REVOCATION_FAILURE', 'STATUS_GRAPHICS_NO_ACTIVE_VIDPN', 'ERROR_EXE_MARKED_INVALID', 'ERROR_NET_WRITE_FAULT', 'STATUS_STREAM_MINIVERSION_NOT_VALID', 'EVENTLOG_BACKWARDS_READ', 'STATUS_SMARTCARD_CERT_REVOKED', 'FILE_READ_EA', 'STATUS_FWP_TOO_MANY_CALLOUTS', 'COMPRESSION_FORMAT_XPRESS_HUFF', 'STATUS_PWD_HISTORY_CONFLICT', 'STATUS_COMPRESSION_DISABLED', 'ERROR_PAGEFILE_QUOTA_EXCEEDED', 'ERROR_MCA_OCCURED', 'IMAGE_DLLCHARACTERISTICS_GUARD_CF', 'IMAGE_FILE_MACHINE_R10000', 'SERVICE_CHANGE_CONFIG', 'OBJECT_TYPE_CREATE', 'ERROR_NOT_SAFE_MODE_DRIVER', 'CRYPT_ACQUIRE_ALLOW_NCRYPT_KEY_FLAG', 'REG_QWORD_LITTLE_ENDIAN', 'SERVICE_DRIVER', 'IMAGE_SCN_ALIGN_8BYTES', 'IMAGE_DIRECTORY_ENTRY_RESOURCE', 'RRF_ZEROONFAILURE', 'ERROR_INVALID_QUOTA_LOWER', 'USAGE_MATCH_TYPE_OR', 'ERROR_SEM_NOT_FOUND', 'STATUS_ACPI_INVALID_TARGETTYPE', 'THREAD_ALL_ACCESS', 'szOID_OIWDIR_CRPT', 'STATUS_SXS_ACTIVATION_CONTEXT_DISABLED', 'JOB_OBJECT_QUERY', 'ERROR_RXACT_COMMIT_NECESSARY', 'PRODUCT_ULTIMATE_N', 'STATUS_ACPI_POWER_REQUEST_FAILED', 'STATUS_GRAPHICS_PATH_NOT_IN_TOPOLOGY', 'PRODUCT_ULTIMATE_E', 'STATUS_INVALID_PRIMARY_GROUP', 'STATUS_TIMER_RESOLUTION_NOT_SET', 'STATUS_DISK_CORRUPT_ERROR', 'E_NOINTERFACE', 'STATUS_FILE_TOO_LARGE', 'STATUS_CONNECTION_COUNT_LIMIT', 'STATUS_CALLBACK_BYPASS', 'STATUS_BAD_STACK', 'TH32CS_SNAPTHREAD', 'STATUS_CANNOT_BREAK_OPLOCK', 'STATUS_UNEXPECTED_MM_EXTEND_ERR', 'STATUS_DELAY_LOAD_FAILED', 'IMAGE_SCN_LNK_OTHER', 'SM_TABLETPC', 'PROV_INTEL_SEC', 'PROCESS_SET_INFORMATION', 'STATUS_DS_NO_FPO_IN_UNIVERSAL_GROUPS', 'IMAGE_FILE_MACHINE_EBC', 'PRODUCT_STANDARD_SERVER_SOLUTIONS_CORE', 'SERVICE_TRIGGER_DATA_TYPE_KEYWORD_ANY', 'CONTEXT_ALL', 'szOID_OIWSEC_shaRSA', 'STATUS_FWP_INVALID_PARAMETER', 'SM_CXICON', 'SM_CMONITORS', 'NTE_PERM', 'STATUS_NO_UNICODE_TRANSLATION', 'LPC_REQUEST', 'CERT_XML_NAME_STR', 'DBG_RIPEXCEPTION', 'IMAGE_SCN_ALIGN_8192BYTES', 'PROCESS_ALL_ACCESS', 'STATUS_CARDBUS_NOT_SUPPORTED', 'DETACHED_PROCESS', 'STGM_FAILIFTHERE', 'STATUS_DRIVER_FAILED_PRIOR_UNLOAD', 'SM_CYMIN', 'RPC_NT_PROTOCOL_ERROR', 'STATUS_PROTOCOL_NOT_SUPPORTED', 'STATUS_LUIDS_EXHAUSTED', 'RPC_NT_INVALID_VERS_OPTION', 'ERROR_TOO_MANY_SEMAPHORES', 'SW_MINIMIZE', 'STGM_READ', 'CRYPT_E_FILE_ERROR', 'CERT_BACKED_UP_PROP_ID', 'STATUS_FWP_ALREADY_EXISTS', 'PRODUCT_CORE', 'CERT_TRUST_HAS_NAME_MATCH_ISSUER', 'FILE_SUPPORTS_OPEN_BY_FILE_ID', 'CERT_SIMPLE_NAME_STR', 'ERROR_EVT_INVALID_PUBLISHER_NAME', 'STATUS_LOG_BLOCK_VERSION', 'STATUS_FT_ORPHANING', 'CMSG_CERT_COUNT_PARAM', 'ALPC_MSGFLG_WOW64_CALL', 'szOID_NIST_AES256_CBC', 'ERROR_BUSY', 'STATUS_GRAPHICS_INVALID_PIXELFORMAT', 'ERROR_DBG_RIPEXCEPTION', 'ERROR_IMAGE_SUBSYSTEM_NOT_PRESENT', 'IMAGE_DIRECTORY_ENTRY_DELAY_IMPORT', 'PIPE_READMODE_MESSAGE', 'STATUS_GRAPHICS_INVALID_MODE_PRUNING_ALGORITHM', 'SE_GROUP_LOGON_ID', 'ERROR_BAD_COMPRESSION_BUFFER', 'STATUS_REGISTRY_IO_FAILED', 'STATUS_FOUND_OUT_OF_SCOPE', 'STATUS_IPSEC_DOSP_MAX_ENTRIES', 'REG_RESOURCE_LIST', 'STATUS_INVALID_ACL', 'STATUS_ACPI_INVALID_DATA', 'ERROR_INVALID_LIST_FORMAT', 'ERROR_PAGE_FAULT_PAGING_FILE', 'STATUS_NDIS_LOW_POWER_STATE', 'ALPC_CANCELFLG_NO_CONTEXT_CHECK', 'STATUS_IP_ADDRESS_CONFLICT2', 'IMAGE_SCN_TYPE_REG', 'STATUS_GRAPHICS_INVALID_SCANLINE_ORDERING', 'CERT_EFS_PROP_ID', 'STATUS_SMI_PRIMITIVE_INSTALLER_FAILED', 'CMSG_SIGNED', 'CERT_QUERY_CONTENT_FLAG_SERIALIZED_CRL', 'SEC_FILE', 'IMAGE_FILE_MACHINE_SH3DSP', 'SECURITY_MANDATORY_PROTECTED_PROCESS_RID', 'ERROR_SHARING_PAUSED', 'STATUS_CURRENT_DOMAIN_NOT_ALLOWED', 'RRF_RT_QWORD', 'TOKEN_MANDATORY_POLICY_VALID_MASK', 'FILE_NON_DIRECTORY_FILE', 'COMPRESSION_FORMAT_DEFAULT', 'UNLOAD_DLL_DEBUG_EVENT', 'SEC_LARGE_PAGES', 'PRODUCT_STARTER', 'STATUS_KDC_UNKNOWN_ETYPE', 'NTE_TOKEN_KEYSET_STORAGE_FULL', 'STATUS_APC_RETURNED_WHILE_IMPERSONATING', 'EXCEPTION_FLT_DIVIDE_BY_ZERO', 'FILE_SEQUENTIAL_WRITE_ONCE', 'EXCEPTION_INT_OVERFLOW', 'JOB_OBJECT_SET_SECURITY_ATTRIBUTES', 'STATUS_SMARTCARD_WRONG_PIN', 'szOID_ECDSA_SHA512', 'ERROR_JOIN_TO_JOIN', 'szOID_PKCS_10', 'STATUS_SMARTCARD_SILENT_CONTEXT', 'PROV_SSL', 'SERVICE_TRIGGER_TYPE_DEVICE_INTERFACE_ARRIVAL', 'CERT_QUERY_CONTENT_FLAG_ALL', 'EXCEPTION_GUARD_PAGE', 'STATUS_CTX_RESPONSE_ERROR', 'IMAGE_FILE_LINE_NUMS_STRIPPED', 'STATUS_INVALID_DEVICE_OBJECT_PARAMETER', 'STATUS_UNEXPECTED_MM_MAP_ERROR', 'ERROR_ROWSNOTRELEASED', 'ERROR_FILE_EXISTS', 'STATUS_CANCELLED', 'STATUS_LOG_SECTOR_PARITY_INVALID', 'IMAGE_FILE_MACHINE_SH3E', 'ERROR_ADAP_HDW_ERR', 'STATUS_COMMITMENT_MINIMUM', 'STATUS_TOO_MANY_NAMES', 'CERT_QUERY_CONTENT_CRL', 'IMAGE_DLLCHARACTERISTICS_DYNAMIC_BASE', 'IMAGE_SCN_ALIGN_2BYTES', 'DRIVE_CDROM', 'EVENTLOG_FORWARDS_READ', 'ERROR_FSFILTER_OP_COMPLETED_SUCCESSFULLY', 'STATUS_MINIVERSION_INACCESSIBLE_FROM_SPECIFIED_TRANSACTION', 'STATUS_CLEANER_CARTRIDGE_INSTALLED', 'FILE_PERSISTENT_ACLS', 'STATUS_CANT_DISABLE_MANDATORY', 'CERT_FIND_MD5_HASH', 'CMSG_SIGNER_CERT_ID_PARAM', 'STATUS_ACCESS_VIOLATION', 'STATUS_GRAPHICS_INVALID_MONITOR_SOURCE_MODE', 'STATUS_SERVICE_NOTIFICATION', 'STATUS_FWP_TOO_MANY_BOOTTIME_FILTERS', 'CERT_TRUST_IS_NOT_SIGNATURE_VALID', 'CERT_QUERY_CONTENT_PKCS10', 'STATUS_FWP_INVALID_WEIGHT', 'STATUS_NOTIFY_CLEANUP', 'STATUS_FWP_KM_CLIENTS_ONLY', 'PAGE_GUARD', 'STATUS_WX86_INTERNAL_ERROR', 'STATUS_UNABLE_TO_LOCK_MEDIA', 'RPC_NT_NO_MORE_ENTRIES', 'LPC_DATAGRAM', 'DEBUG_ONLY_THIS_PROCESS', 'AF_FIREFOX', 'STATUS_MCA_OCCURED', 'ERROR_PLUGPLAY_QUERY_VETOED', 'ERROR_DATA_NOT_ACCEPTED', 'ERROR_INVALID_BLOCK', 'STATUS_INVALID_PLUGPLAY_DEVICE_PATH', 'RPC_NT_NO_PROTSEQS', 'STATUS_NO_SUCH_MEMBER', 'SM_SECURE', 'RPC_NT_WRONG_PIPE_VERSION', 'STATUS_OBJECT_PATH_SYNTAX_BAD', 'ERROR_CONTROL_C_EXIT', 'STATUS_REDIRECTOR_NOT_STARTED', 'RPC_NT_INTERNAL_ERROR', 'EXIT_PROCESS_DEBUG_EVENT', 'TRUST_E_PROVIDER_UNKNOWN', 'STATUS_CTX_MODEM_INF_NOT_FOUND', 'CREATE_PRESERVE_CODE_AUTHZ_LEVEL', 'PIPE_ACCESS_OUTBOUND', 'KEY_READ', 'CERT_FIND_ISSUER_OF', 'IMAGE_FILE_DLL', 'STATUS_DEVICE_NOT_PARTITIONED', 'CERT_STORE_PROV_FILENAME_A', 'MS_DEF_PROV', 'STATUS_SYMLINK_CLASS_DISABLED', 'SYSTEM_AUDIT_CALLBACK_ACE_TYPE', 'IMAGE_DIRECTORY_ENTRY_ARCHITECTURE', 'SYSTEM_SCOPED_POLICY_ID_ACE_TYPE', 'FILE_ATTRIBUTE_SPARSE_FILE', 'STATUS_CANT_ACCESS_DOMAIN_INFO', 'CERT_STORE_PROV_FILENAME_W', 'STATUS_DS_GLOBAL_CANT_HAVE_LOCAL_MEMBER', 'ERROR_SUBST_TO_SUBST', 'szOID_RSA_contentType', 'FILE_ATTRIBUTE_OFFLINE', 'STATUS_DS_NO_ATTRIBUTE_OR_VALUE', 'STATUS_INVALID_SIGNATURE', 'STGM_SHARE_DENY_WRITE', 'ERROR_CHECKING_FILE_SYSTEM', 'STATUS_REVOCATION_OFFLINE_C', 'ERROR_FLOAT_MULTIPLE_TRAPS', 'RPC_NT_NAME_SERVICE_UNAVAILABLE', 'STATUS_DS_GROUP_CONVERSION_ERROR', 'ERROR_PATH_NOT_FOUND', 'PRODUCT_HOME_PREMIUM_SERVER', 'SE_PRIVILEGE_USED_FOR_ACCESS', 'ERROR_EVT_UNRESOLVED_PARAMETER_INSERT', 'STD_OUTPUT_HANDLE', 'CERT_EXTENDED_ERROR_INFO_PROP_ID', 'COMPRESSION_ENGINE_HIBER', 'STATUS_FLT_DO_NOT_ATTACH', 'TIMER_ALL_ACCESS', 'STATUS_WRONG_CREDENTIAL_HANDLE', 'ERROR_RANGE_NOT_FOUND', 'STATUS_TRANSACTION_REQUEST_NOT_VALID', 'STATUS_GRAPHICS_INCOMPATIBLE_PRIVATE_FORMAT', 'CRYPT_NO_SALT', 'ERROR_WX86_WARNING', 'STATUS_LOG_FULL', 'STATUS_THREADPOOL_HANDLE_EXCEPTION', 'STATUS_TXF_DIR_NOT_EMPTY', 'STATUS_LOG_INCONSISTENT_SECURITY', 'LPC_EXCEPTION', 'STATUS_SXS_INVALID_IDENTITY_ATTRIBUTE_NAME', 'PAGE_READONLY', 'AF_ATM', 'EXCEPTION_IN_PAGE_ERROR', 'STARTF_USEFILLATTRIBUTE', 'STATUS_CONNECTION_RESET', 'STATUS_DRIVER_ORDINAL_NOT_FOUND', 'STARTF_USECOUNTCHARS', 'IMAGE_SCN_LNK_INFO', 'STATUS_GRAPHICS_VIDPN_SOURCE_IN_USE', 'STATUS_DOMAIN_CTRLR_CONFIG_ERROR', 'FILE_OVERWRITTEN', 'FILE_SUPPORTS_HARD_LINKS', 'STATUS_DELETE_PENDING', 'STATUS_INVALID_SECURITY_DESCR', 'FILE_SUPPORTS_REMOTE_STORAGE', 'SERVICE_TRIGGER_TYPE_IP_ADDRESS_AVAILABILITY', 'SE_SYSTEM_ENVIRONMENT_NAME', 'STATUS_LPC_REPLY_LOST', 'NTE_FAIL', 'STATUS_FLT_CONTEXT_ALREADY_LINKED', 'FAX_JOB_SUBMIT', 'CERT_COMPARE_KEY_IDENTIFIER', 'STATUS_REDIRECTOR_HAS_OPEN_HANDLES', 'STATUS_FWP_PROVIDER_CONTEXT_NOT_FOUND', 'SERVICE_ERROR_NORMAL', 'IMAGE_REL_BASED_HIGHADJ', 'SM_CYFULLSCREEN', 'SERVICE_CONTROL_SHUTDOWN', 'PRODUCT_STORAGE_STANDARD_SERVER', 'STATUS_DRIVER_INTERNAL_ERROR', 'STATUS_UNEXPECTED_MM_CREATE_ERR', 'HKEY_LOCAL_MACHINE', 'STATUS_VOLUME_DIRTY', 'STATUS_NO_PAGEFILE', 'RPC_NT_SS_CHAR_TRANS_OPEN_FAIL', 'STATUS_FLT_CONTEXT_ALREADY_DEFINED', 'STATUS_INSTRUCTION_MISALIGNMENT', 'STATUS_INVALID_ACCOUNT_NAME', 'szOID_RSA_digestedData', 'STATUS_CANT_ENABLE_DENY_ONLY', 'ERROR_ILLEGAL_DLL_RELOCATION', 'MEM_PHYSICAL', 'szOID_DS', 'IMAGE_DLLCHARACTERISTICS_FORCE_INTEGRITY', 'ERROR_NOT_JOINED', 'SM_CYSIZE', 'WTD_CHOICE_BLOB', 'STATUS_PRINT_CANCELLED', 'CRYPT_FORCE_KEY_PROTECTION_HIGH', 'CERT_COMPARE_MD5_HASH', 'SERVICE_CONTROL_PARAMCHANGE', 'STATUS_SXS_SETTING_NOT_REGISTERED', 'STATUS_ACPI_ASSERT_FAILED', 'PRODUCT_DATACENTER_SERVER_CORE', 'STATUS_ALIAS_EXISTS', 'szOID_ECC_CURVE_P256', 'STATUS_FWP_INVALID_RANGE', 'STATUS_FORMS_AUTH_REQUIRED', 'STATUS_LOG_CONTAINER_WRITE_FAILED', 'MAXLEN_IFDESCR', 'CERT_COMPARE_EXISTING', 'RRF_RT_REG_BINARY', 'VER_NT_SERVER', 'AF_DATAKIT', 'STATUS_SMARTCARD_LOGON_REQUIRED', 'ERROR_DBG_COMMAND_EXCEPTION', 'PRODUCT_ESSENTIALBUSINESS_SERVER_MGMT', 'NTE_TEMPORARY_PROFILE', 'ALPC_VIEWFLG_NOT_SECURE', 'STATUS_FWP_CALLOUT_NOT_FOUND', 'RPC_NT_WRONG_STUB_VERSION', 'TOKEN_SECURITY_ATTRIBUTE_TYPE_SID', 'X509_CERT_REQUEST_TO_BE_SIGNED', 'ERROR_MARSHALL_OVERFLOW', 'SERVICE_STOPPED', 'SERVICE_QUERY_STATUS', 'CERT_TRUST_IS_FROM_EXCLUSIVE_TRUST_STORE', 'STATUS_GRAPHICS_OPM_SIGNALING_NOT_SUPPORTED', 'ERROR_PROFILING_AT_LIMIT', 'STATUS_DRIVER_CANCEL_TIMEOUT', 'OBJ_PERMANENT', 'CERT_INFO_SUBJECT_FLAG', 'STATUS_MEMBER_IN_GROUP', 'STATUS_REPARSE_ATTRIBUTE_CONFLICT', 'STATUS_FWP_NULL_DISPLAY_NAME', 'STATUS_GRAPHICS_PINNED_MODE_MUST_REMAIN_IN_SET', 'ERROR_PROFILING_NOT_STARTED', 'STATUS_MONITOR_UNKNOWN_DESCRIPTOR_FORMAT', 'HKEY_PERFORMANCE_TEXT', 'TRUST_E_SUBJECT_NOT_TRUSTED', 'SM_CXFRAME', 'WTD_UI_NOGOOD', 'STATUS_FLT_FILTER_NOT_FOUND', 'STATUS_NO_MORE_EAS', 'STGM_NOSCRATCH', 'ERROR_ALLOCATE_BUCKET', 'STATUS_GRAPHICS_INVALID_ALLOCATION_HANDLE', 'STATUS_DRIVER_DATABASE_ERROR', 'STATUS_MUI_INVALID_RC_CONFIG', 'STATUS_TRANSACTIONAL_OPEN_NOT_ALLOWED', 'STATUS_GRAPHICS_NO_AVAILABLE_VIDPN_TARGET', 'szOID_RSA_emailAddr', 'CERT_STORE_UNSAFE_PHYSICAL_FLAG', 'ERROR_SHARING_VIOLATION', 'RPC_NT_FP_UNDERFLOW', 'ERROR_EVT_FILTER_OUT_OF_RANGE', 'CONTEXT_DEBUG_REGISTERS', 'STATUS_GRAPHICS_UNKNOWN_CHILD_STATUS', 'STATUS_GRAPHICS_INVALID_PIXELVALUEACCESSMODE', 'CRYPT_ACQUIRE_NCRYPT_KEY_FLAGS_MASK', 'ERROR_REQUEST_OUT_OF_SEQUENCE', 'ERROR_INVALID_PORT_ATTRIBUTES', 'STATUS_DS_BUSY', 'STATUS_FWP_BUILTIN_OBJECT', 'SE_GROUP_USE_FOR_DENY_ONLY', 'CMSG_ATTR_CERT_COUNT_PARAM', 'STATUS_SXS_ASSEMBLY_IS_NOT_A_DEPLOYMENT', 'STATUS_GRAPHICS_INVALID_VIDPN_PRESENT_PATH', 'RPC_C_IMP_LEVEL_IDENTIFY', 'STATUS_CS_ENCRYPTION_EXISTING_ENCRYPTED_FILE', 'ERROR_PRINTQ_FULL', 'EVENTLOG_WARNING_TYPE', 'FILE_SEQUENTIAL_ONLY', 'STATUS_IMAGE_MACHINE_TYPE_MISMATCH', 'ERROR_PNP_INVALID_ID', 'ERROR_PROC_NOT_FOUND', 'ERROR_OUT_OF_PAPER', 'SM_CXVIRTUALSCREEN', 'ALPC_MSGFLG_REPLY_MESSAGE', 'STATUS_RANGE_NOT_LOCKED', 'ACCESS_DENIED_CALLBACK_ACE_TYPE', 'STATUS_DEVICE_PROTOCOL_ERROR', 'STATUS_MEMBER_NOT_IN_ALIAS', 'STATUS_IPSEC_DOSP_KEYMOD_NOT_ALLOWED', 'ERROR_INVALID_MESSAGE', 'RPC_NT_UNKNOWN_IF', 'STATUS_ACPI_INVALID_INDEX', 'SM_STARTER', 'KEY_SET_VALUE', 'RPC_NT_SS_CONTEXT_MISMATCH', 'STGFMT_ANY', 'PRODUCT_SERVER_FOR_SMALLBUSINESS_V', 'STATUS_DEVICE_DOES_NOT_EXIST', 'STATUS_CTX_LOGON_DISABLED', 'STATUS_GRAPHICS_INVALID_VIDPN_TARGET_SUBSET_TYPE', 'SERVICE_RUNNING', 'IMAGE_SCN_TYPE_GROUP', 'STATUS_DATA_LOST_REPAIR', 'STATUS_ACCESS_DISABLED_BY_POLICY_PATH', 'ERROR_RING2SEG_MUST_BE_MOVABLE', 'CERT_STORE_SHARE_STORE_FLAG', 'STATUS_SXS_INVALID_ACTCTXDATA_FORMAT', 'STATUS_INVALID_IMAGE_WIN_16', 'STATUS_MUI_FILE_NOT_LOADED', 'STATUS_FILE_NOT_AVAILABLE', 'RRF_RT_REG_EXPAND_SZ', 'STATUS_PNP_INVALID_ID', 'ERROR_PIPE_NOT_CONNECTED', 'NTE_NOT_FOUND', 'SM_CXHSCROLL', 'STATUS_IMAGE_NOT_AT_BASE', 'STATUS_LOGIN_WKSTA_RESTRICTION', 'ACCESS_MIN_MS_OBJECT_ACE_TYPE', 'STATUS_CERTIFICATE_MAPPING_NOT_UNIQUE', 'szOID_OIWDIR_md2RSA', 'ERROR_PNP_TRANSLATION_FAILED', 'FILE_FLAG_SEQUENTIAL_SCAN', 'WARMING_NOT_SAME_FLAG_FOR_WINXP', 'TIMER_QUERY_STATE', 'FILE_VALID_SET_FLAGS', 'STATUS_HOST_DOWN', 'ERROR_PIPE_CONNECTED', 'ERROR_EVT_INVALID_OPERATION_OVER_ENABLED_DIRECT_CHANNEL', 'STATUS_DISK_RECALIBRATE_FAILED', 'szOID_INFOSEC_sdnsKMandSig', 'ERROR_PAGEFILE_CREATE_FAILED', 'STATUS_WORKING_SET_LIMIT_RANGE', 'IMAGE_SCN_TYPE_NO_PAD', 'FILE_RANDOM_ACCESS', 'STATUS_INDOUBT_TRANSACTIONS_EXIST', 'STATUS_OBJECTID_EXISTS', 'STATUS_CTX_BAD_VIDEO_MODE', 'ERROR_NOT_ENOUGH_MEMORY', 'STATUS_DEVICE_DOOR_OPEN', 'STATUS_FWP_RESERVED', 'SYSTEM_ALARM_OBJECT_ACE_TYPE', 'STATUS_FILE_CHECKED_OUT', 'STATUS_ACPI_INVALID_TABLE', 'IMAGE_SCN_MEM_LOCKED', 'NTE_NOT_SUPPORTED', 'CONTEXT_FLOATING_POINT', 'ERROR_KEY_HAS_CHILDREN', 'STATUS_REVOCATION_OFFLINE_KDC', 'CERT_CTL_USAGE_PROP_ID', 'STATUS_FLT_DELETING_OBJECT', 'ERROR_INSUFFICIENT_BUFFER', 'STATUS_COULD_NOT_INTERPRET', 'STATUS_DEVICE_ENUMERATION_ERROR', 'PRODUCT_CLUSTER_SERVER_V', 'ERROR_INVALID_TARGET_HANDLE', 'STATUS_SYSTEM_POWERSTATE_TRANSITION', 'EXCEPTION_DATATYPE_MISALIGNMENT', 'STATUS_NO_SUCH_DEVICE', 'STATUS_GRAPHICS_INVALID_VIDEO_PRESENT_SOURCE_MODE', 'STATUS_BAD_NETWORK_NAME', 'PRODUCT_EMBEDDED', 'PIPE_UNLIMITED_INSTANCES', 'STATUS_BUFFER_TOO_SMALL', 'API_SET_SCHEMA_VERSION_V4', 'API_SET_SCHEMA_VERSION_V2', 'API_SET_SCHEMA_VERSION_V3', 'SERVICE_PAUSE_CONTINUE', 'STATUS_SYSTEM_POWERSTATE_COMPLEX_TRANSITION', 'PRODUCT_MEDIUMBUSINESS_SERVER_MESSAGING', 'FILE_FLAG_NO_BUFFERING', 'DEBUG_EVENT_EXCEPTION', 'SM_CYBORDER', 'STATUS_PARTIAL_COPY', 'STATUS_GRAPHICS_PARTIAL_DATA_POPULATED', 'STATUS_TRUSTED_DOMAIN_FAILURE', 'PRODUCT_ENTERPRISE_N_EVALUATION', 'STATUS_GRAPHICS_DDCCI_VCP_NOT_SUPPORTED', 'STATUS_SERIAL_NO_DEVICE_INITED', 'ERROR_COMMITMENT_MINIMUM', 'DBG_TERMINATE_THREAD', 'OS_MEDIACENTER', 'STATUS_XML_PARSE_ERROR', 'FILE_VALID_OPTION_FLAGS', 'KEY_CREATE_LINK', 'STATUS_CLUSTER_NODE_NOT_FOUND', 'STATUS_GRAPHICS_MONITORDESCRIPTOR_ID_MUST_BE_UNIQUE', 'STATUS_TRANSACTION_INTEGRITY_VIOLATED', 'STATUS_DLL_INIT_FAILED_LOGOFF', 'STATUS_NO_MORE_MATCHES', 'WAIT_IO_COMPLETION', 'STATUS_FLT_NOT_INITIALIZED', 'STATUS_FLT_NAME_CACHE_MISS', 'ERROR_INSTRUCTION_MISALIGNMENT', 'STATUS_VERIFY_REQUIRED', 'ERROR_INVALID_MODULETYPE', 'CRYPT_UPDATE_KEY', 'CERT_STORE_SET_LOCALIZED_NAME_FLAG', 'STATUS_GRAPHICS_WRONG_ALLOCATION_DEVICE', 'STGFMT_DOCFILE', 'CERT_COMPARE_ANY', 'CRYPT_DATA_KEY', 'STATUS_MESSAGE_NOT_FOUND', 'SERVICE_STOP', 'RPC_NT_MAX_CALLS_TOO_SMALL', 'RPC_REQUEST_TYPE_CALL', 'STATUS_DOWNGRADE_DETECTED', 'ERROR_DLL_MIGHT_BE_INSECURE', 'STATUS_FLOPPY_WRONG_CYLINDER', 'NTE_FIXEDPARAMETER', 'STATUS_EFS_NOT_ALLOWED_IN_TRANSACTION', 'szOID_OIWSEC_dsaSHA1', 'DEBUG_EVENT_UNLOAD_MODULE', 'STATUS_CSS_REGION_MISMATCH', 'SERVICE_CONTROL_TIMECHANGE', 'STATUS_FWP_ACTION_INCOMPATIBLE_WITH_SUBLAYER', 'CERT_TRUST_INVALID_BASIC_CONSTRAINTS', 'PERSIST_E_NOTSELFSIZING', 'CERT_QUERY_FORMAT_FLAG_ASN_ASCII_HEX_ENCODED', 'SERVICE_BOOT_START', 'STATUS_PROPSET_NOT_FOUND', 'STATUS_NDIS_INVALID_ADDRESS', 'STATUS_FAIL_FAST_EXCEPTION', 'PRODUCT_SOLUTION_EMBEDDEDSERVER_CORE', 'STATUS_FLOAT_UNDERFLOW', 'ERROR_NOT_LOCKED', 'WTD_STATEACTION_AUTO_CACHE', 'KEY_WOW64_64KEY', 'FILE_FLAG_RANDOM_ACCESS', 'STATUS_BIZRULES_NOT_ENABLED', 'STATUS_LOG_METADATA_FLUSH_FAILED', 'STATUS_CONNECTION_DISCONNECTED', 'IMAGE_SCN_CNT_UNINITIALIZED_DATA', 'STATUS_ACCOUNT_EXPIRED', 'STATUS_DS_INCORRECT_ROLE_OWNER', 'STATUS_CTX_SHADOW_ENDED_BY_MODE_CHANGE', 'WTD_STATEACTION_VERIFY', 'STATUS_IPSEC_THROTTLE_DROP', 'STATUS_HANDLE_NOT_CLOSABLE', 'STATUS_FWP_INCOMPATIBLE_LAYER', 'ERROR_PARTIAL_COPY', 'STATUS_GRAPHICS_LEADLINK_NOT_ENUMERATED', 'ERROR_PIPE_BUSY', 'PIPE_READMODE_BYTE', 'SM_CMOUSEBUTTONS', 'ALPC_HANDLEFLG_DUPLICATE_SAME_ATTRIBUTES', 'CERT_FIND_SUBJECT_CERT', 'STATUS_BAD_FUNCTION_TABLE', 'PAGE_NOACCESS', 'STATUS_NOT_COMMITTED', 'CERT_COMPARE_ENHKEY_USAGE', 'STATUS_SXS_RELEASE_ACTIVATION_CONTEXT', 'EVENTLOG_ERROR_TYPE', 'STATUS_NO_TRUST_SAM_ACCOUNT', 'STATUS_PER_USER_TRUST_QUOTA_EXCEEDED', 'STATUS_FT_MISSING_MEMBER', 'STATUS_INVALID_LOGON_TYPE', 'STATUS_DEVICE_POWERED_OFF', 'STATUS_GRAPHICS_NO_AVAILABLE_IMPORTANCE_ORDINALS', 'RPC_NT_OUT_OF_RESOURCES', 'szOID_OIWDIR_SIGN', 'TOKEN_SECURITY_ATTRIBUTE_VALID_FLAGS', 'FILE_READ_ATTRIBUTES', 'STATUS_GRAPHICS_INVALID_VIDEOPRESENTTARGETSET', 'PKCS_UTC_TIME', 'STATUS_IMAGE_MP_UP_MISMATCH', 'CREATE_SUSPENDED', 'STATUS_FLOAT_OVERFLOW', 'IMAGE_FILE_NET_RUN_FROM_SWAP', 'STATUS_PORT_MESSAGE_TOO_LONG', 'CERT_SYSTEM_STORE_CURRENT_USER_GROUP_POLICY_ID', 'X509_CHOICE_OF_TIME', 'STATUS_MONITOR_INVALID_SERIAL_NUMBER_MONDSC_BLOCK', 'API_SET_LOAD_SCHEMA_ORDINAL', 'IMAGE_FILE_MACHINE_I386', 'STATUS_INVALID_MEMBER', 'RPC_NT_UNKNOWN_AUTHN_TYPE', 'CERT_E_INVALID_NAME', 'STATUS_GRAPHICS_TARGET_ALREADY_IN_SET', 'ERROR_FATAL_APP_EXIT', 'STATUS_WAKE_SYSTEM', 'SC_MANAGER_ALL_ACCESS', 'SECURITY_MANDATORY_LOW_RID', 'MEM_LARGE_PAGES', 'RPC_NT_PIPE_EMPTY', 'STATUS_TRANSACTIONMANAGER_RECOVERY_NAME_COLLISION', 'ERROR_DEV_NOT_EXIST', 'MEM_WRITE_WATCH', 'STATUS_FILE_SYSTEM_LIMITATION', 'RPC_NT_PROCNUM_OUT_OF_RANGE', 'szOID_RSA_DES_EDE3_CBC', 'KEY_ENUMERATE_SUB_KEYS', 'ERROR_NO_EVENT_PAIR', 'LOAD_DLL_DEBUG_EVENT', 'PROFILE_SERVER', 'STATUS_VIDEO_DRIVER_DEBUG_REPORT_REQUEST', 'ERROR_INVALID_SIGNAL_NUMBER', 'CERT_INFO_ISSUER_FLAG', 'ERROR_DRIVER_FAILED_SLEEP', 'DRIVE_REMOVABLE', 'CERT_SYSTEM_STORE_LOCATION_MASK', 'STATUS_NO_SPOOL_SPACE', 'STATUS_PNP_RESTART_ENUMERATION', 'OS_SERVER', 'TOKEN_SECURITY_ATTRIBUTE_DISABLED', 'CERT_QUERY_CONTENT_PFX', 'CERT_TRUST_HAS_EXACT_MATCH_ISSUER', 'STATUS_GRAPHICS_CLIENTVIDPN_NOT_SET', 'ERROR_SWAPERROR', 'EXCEPTION_MAXIMUM_PARAMETERS', 'STATUS_GRAPHICS_SOURCE_ID_MUST_BE_UNIQUE', 'STATUS_DOMAIN_CONTROLLER_NOT_FOUND', 'WTD_REVOKE_WHOLECHAIN', 'STATUS_LOG_GROWTH_FAILED', 'INFINITE', 'STATUS_LOG_RECORD_NONEXISTENT', 'STATUS_RXACT_INVALID_STATE', 'DBG_CONTROL_BREAK', 'STATUS_WAIT_FOR_OPLOCK', 'STATUS_SYSTEM_PROCESS_TERMINATED', 'CRYPT_INITIATOR', 'STATUS_SYSTEM_IMAGE_BAD_SIGNATURE', 'STATUS_GRAPHICS_NO_VIDPNMGR', 'X509_ALTERNATE_NAME', 'STATUS_XML_ENCODING_MISMATCH', 'CERT_QUERY_CONTENT_CERT_PAIR', 'EVENTLOG_AUDIT_FAILURE', 'STATUS_NDIS_BUFFER_TOO_SHORT', 'STATUS_LOG_SECTOR_REMAPPED', 'REG_DWORD', 'IMAGE_FILE_RELOCS_STRIPPED', 'STATUS_VIDEO_HUNG_DISPLAY_DRIVER_THREAD', 'STATUS_PASSWORD_RESTRICTION', 'JOB_OBJECT_ASSIGN_PROCESS', 'ERROR_OPLOCK_BREAK_IN_PROGRESS', 'STATUS_TRANSACTION_ALREADY_COMMITTED', 'STATUS_BREAKPOINT', 'SM_CYSMCAPTION', 'STATUS_SERVER_SID_MISMATCH', 'RPC_NT_SERVER_TOO_BUSY', 'CERT_TRUST_IS_NOT_VALID_FOR_USAGE', 'STATUS_NDIS_PAUSED', 'STATUS_INVALID_IMAGE_NE_FORMAT', 'ERROR_ITERATED_DATA_EXCEEDS_64k', 'STATUS_GRAPHICS_INVALID_MONITOR_FREQUENCYRANGE', 'CERT_STORE_PROV_SMART_CARD', 'RPC_NT_UNKNOWN_AUTHZ_SERVICE', 'CMSG_INNER_CONTENT_TYPE_PARAM', 'ERROR_SEEK', 'EXCEPTION_ILLEGAL_INSTRUCTION', 'IMAGE_FILE_EXECUTABLE_IMAGE', 'FILE_ATTRIBUTE_VIRTUAL', 'szOID_RSA_SMIMEalg', 'FILE_WRITE_DATA', 'STATUS_STOPPED_ON_SYMLINK', 'szOID_INFOSEC_SuiteAKMandSig', 'SE_GROUP_VALID_ATTRIBUTES', 'CERT_FIND_CTL_USAGE', 'CLAIM_SECURITY_ATTRIBUTE_TYPE_FQBN', 'STATUS_INVALID_SERVER_STATE', 'STATUS_FWP_NEVER_MATCH', 'ERROR_IS_JOINED', 'STATUS_GRAPHICS_MONITOR_NOT_CONNECTED', 'CONTEXT_I386', 'SERVICE_TRIGGER_DATA_TYPE_KEYWORD_ALL', 'STATUS_TRANSACTIONAL_CONFLICT', 'SC_MANAGER_CONNECT', 'PROCESS_VM_WRITE', 'ERROR_INVALID_HANDLE', 'SM_CXMAXIMIZED', 'ERROR_INVALID_FUNCTION', 'szOID_INFOSEC_mosaicKMandUpdSig', 'STGM_CONVERT', 'STATUS_IPSEC_SA_LIFETIME_EXPIRED', 'STATUS_TRANSACTION_REQUIRED_PROMOTION', 'STATUS_USER_EXISTS', 'FILE_NOTIFY_CHANGE_ATTRIBUTES', 'STATUS_LOG_CONTAINER_STATE_INVALID', 'STATUS_FVE_BAD_DATA', 'STATUS_GRAPHICS_INVALID_ACTIVE_REGION', 'CERT_STORE_ADD_NEWER_INHERIT_PROPERTIES', 'IMAGE_DIRECTORY_ENTRY_GLOBALPTR', 'STATUS_OPLOCK_NOT_GRANTED', 'STATUS_INVALID_PARAMETER_12', 'ERROR_LOG_HARD_ERROR', 'STATUS_RECEIVE_PARTIAL_EXPEDITED', 'ERROR_CANNOT_COPY', 'PROCESS_CREATE_THREAD', 'BACKUP_SECURITY_INFORMATION', 'STARTF_USEPOSITION', 'CERT_TRUST_IS_REVOKED', 'ERROR_MCA_EXCEPTION', 'COMPRESSION_FORMAT_LZNT1', 'CMSG_MAX_LENGTH_FLAG', 'ERROR_EVT_FILTER_ALREADYSCOPED', 'REG_NONE', 'STATUS_RESOURCE_REQUIREMENTS_CHANGED', 'STATUS_GRAPHICS_NOT_EXCLUSIVE_MODE_OWNER', 'SM_CARETBLINKINGENABLED', 'SE_INCREASE_QUOTA_NAME', 'ERROR_EVT_CHANNEL_CANNOT_ACTIVATE', 'SECTION_MAP_EXECUTE_EXPLICIT', 'SM_CYCURSOR', 'STATUS_DFS_UNAVAILABLE', 'SERVICE_ACCEPT_SHUTDOWN', 'SYSTEM_ALARM_ACE_TYPE', 'STATUS_SXS_MANIFEST_FORMAT_ERROR', 'SE_CHANGE_NOTIFY_NAME', 'IMAGE_REL_BASED_HIGHLOW', 'STATUS_TRANSACTIONMANAGER_NOT_ONLINE', 'STATUS_FILE_ENCRYPTED', 'STGFMT_NATIVE', 'STATUS_INVALID_DEVICE_REQUEST', 'ERROR_BAD_REM_ADAP', 'STATUS_ACPI_INVALID_REGION', 'SE_RESTORE_NAME', 'STATUS_DS_INVALID_ATTRIBUTE_SYNTAX', 'szOID_INFOSEC_SuiteATokenProtection', 'STATUS_INVALID_TOKEN', 'ERROR_LOCKED', 'STATUS_CALLBACK_RETURNED_LANG', 'RPC_NT_FP_DIV_ZERO', 'NTE_NO_MEMORY', 'STATUS_REG_NAT_CONSUMPTION', 'ERROR_EVT_MAX_INSERTS_REACHED', 'SM_CXFIXEDFRAME', 'RPC_NT_NO_CONTEXT_AVAILABLE', 'ERROR_SECURITY_STREAM_IS_INCONSISTENT', 'STATUS_GRAPHICS_OPM_PROTECTED_OUTPUT_NO_LONGER_EXISTS', 'FILE_WRITE_EA', 'SM_CYEDGE', 'STATUS_AUDITING_DISABLED', 'STATUS_UNABLE_TO_DELETE_SECTION', 'ERROR_EVT_INVALID_EVENT_DATA', 'ERROR_UNEXP_NET_ERR', 'EVENTLOG_SUCCESS', 'CERT_STORE_ADD_USE_EXISTING', 'STGM_DIRECT', 'CRYPT_DELETEKEYSET', 'ERROR_EVT_MESSAGE_NOT_FOUND', 'STATUS_GRAPHICS_PATH_ALREADY_IN_TOPOLOGY', 'BELOW_NORMAL_PRIORITY_CLASS', 'STATUS_GRAPHICS_NO_DISPLAY_MODE_MANAGEMENT_SUPPORT', 'OS_SMALLBUSINESSSERVER', 'SERVICE_ACCEPT_NETBINDCHANGE', 'STATUS_GRAPHICS_OPM_SESSION_TYPE_CHANGE_IN_PROGRESS', 'STATUS_RDP_PROTOCOL_ERROR', 'OBJ_INHERIT', 'IMAGE_FILE_MACHINE_UNKNOWN', 'PKCS_CONTENT_INFO_SEQUENCE_OF_ANY', 'RPC_C_QOS_CAPABILITIES_ANY_AUTHORITY', 'CRYPT_SGCKEY', 'TOKEN_SOURCE_LENGTH', 'PKCS12_NO_PERSIST_KEY', 'CERT_QUERY_FORMAT_FLAG_ALL', 'PRODUCT_CORE_N', 'CRYPT_E_AUTH_ATTR_MISSING', 'STATUS_FVE_TPM_INVALID_PCR', 'STATUS_LOG_APPENDED_FLUSH_FAILED', 'CERT_QUERY_CONTENT_CTL', 'ERROR_EVT_SUBSCRIPTION_TO_DIRECT_CHANNEL', 'X509_ENUMERATED', 'STATUS_TOO_MANY_OPENED_FILES', 'STATUS_NDIS_INVALID_DEVICE_REQUEST', 'PROCESS_VM_OPERATION', 'STATUS_UNWIND', 'STATUS_SXS_MANIFEST_TOO_BIG', 'SM_CYFOCUSBORDER', 'STATUS_SXS_MULTIPLE_DEACTIVATION', 'STATUS_GRAPHICS_INVALID_MONITORDESCRIPTORSET', 'STATUS_APPHELP_BLOCK', 'STATUS_CANNOT_LOAD_REGISTRY_FILE', 'ERROR_AUTODATASEG_EXCEEDS_64k', 'ERROR_STACK_OVERFLOW_READ', 'CRYPT_RECIPIENT', 'PRODUCT_STANDARD_SERVER', 'STATUS_NO_SUCH_GROUP', 'szOID_NIST_AES128_CBC', 'ACCESS_SYSTEM_SECURITY', 'MAILSLOT_WAIT_FOREVER', 'STATUS_HIVE_UNLOADED', 'OS_TERMINALREMOTEADMIN', 'STATUS_OFFLOAD_READ_FILE_NOT_SUPPORTED', 'STATUS_NO_TXF_METADATA', 'STATUS_NOTIFY_ENUM_DIR', 'CERT_STORE_PROV_COLLECTION', 'CERT_SYSTEM_STORE_CURRENT_USER', 'STATUS_SMARTCARD_CERT_EXPIRED', 'STATUS_NO_SECURITY_ON_OBJECT', 'STATUS_SXS_WRONG_SECTION_TYPE', 'STATUS_ALL_SIDS_FILTERED', 'ACCESS_MAX_MS_OBJECT_ACE_TYPE', 'szOID_OIWDIR_md2', 'STATUS_REMOTE_FILE_VERSION_MISMATCH', 'SYSTEM_PROCESS_TRUST_NOCONSTRAINT_MASK', 'STATUS_GENERIC_NOT_MAPPED', 'RPC_C_AUTHN_LEVEL_CONNECT', 'STGFMT_STORAGE', 'STATUS_FLOPPY_VOLUME', 'STATUS_FVE_CONV_READ_ERROR', 'ERROR_REGISTRY_IO_FAILED', 'CERT_SYSTEM_STORE_LOCAL_MACHINE_ENTERPRISE', 'PROFILE_USER', 'AF_LAT', 'ERROR_INVALID_DATA', 'AT_SIGNATURE', 'RPC_NT_CANT_CREATE_ENDPOINT', 'ERROR_INVALID_TRANSACTION', 'PAGE_WRITECOMBINE', 'CERT_AUTHORITY_INFO_ACCESS_PROP_ID', 'PRODUCT_HOME_PREMIUM_E', 'STATUS_DS_OID_NOT_FOUND', 'STATUS_FVE_NOT_ENCRYPTED', 'szOID_OIWSEC_rsaSign', 'OS_WIN95ORGREATER', 'STATUS_FVE_FAILED_SECTOR_SIZE', 'STATUS_SXS_TRANSACTION_CLOSURE_INCOMPLETE', 'ERROR_IS_SUBSTED', 'SYSTEM_AUDIT_OBJECT_ACE_TYPE', 'UNPROTECTED_DACL_SECURITY_INFORMATION', 'FILE_SYNCHRONOUS_IO_ALERT', 'CMSG_CMS_ENCAPSULATED_CONTENT_FLAG', 'STATUS_GRAPHICS_OPM_HDCP_SRM_NEVER_SET', 'STATUS_GRAPHICS_ADAPTER_WAS_RESET', 'STATUS_DIRECTORY_SERVICE_REQUIRED', 'STATUS_RETRY', 'CMSG_ATTR_CERT_PARAM', 'API_SET_RELEASE_SCHEMA_ORDINAL', 'CONTEXT_INTEGER', 'STATUS_FWP_TOO_MANY_SUBLAYERS', 'REG_DWORD_BIG_ENDIAN', 'STATUS_GRAPHICS_INVALID_VIDEOPRESENTSOURCESET', 'ERROR_CANT_WAIT', 'SERVICE_ACCEPT_POWEREVENT', 'SE_DACL_AUTO_INHERIT_REQ', 'STATUS_DS_ADMIN_LIMIT_EXCEEDED', 'SW_SHOW', 'STATUS_GRAPHICS_PARAMETER_ARRAY_TOO_SMALL', 'STATUS_MEDIA_CHECK', 'STATUS_FWP_LAYER_NOT_FOUND', 'FILE_OPEN_FOR_BACKUP_INTENT', 'NO_PROPAGATE_INHERIT_ACE', 'STATUS_INVALID_ADDRESS_WILDCARD', 'ERROR_MOD_NOT_FOUND', 'STATUS_GRAPHICS_TOO_MANY_REFERENCES', 'STATUS_TOKEN_ALREADY_IN_USE', 'ERROR_NOT_READY', 'STATUS_ACPI_HANDLER_COLLISION', 'STATUS_MISSING_SYSTEMFILE', 'STATUS_TRANSACTION_NOT_ROOT', 'STATUS_CANT_CROSS_RM_BOUNDARY', 'SECURITY_MANDATORY_MEDIUM_PLUS_RID', 'STATUS_GENERIC_COMMAND_FAILED', 'STATUS_GRAPHICS_INVALID_VIDPN_TARGETMODESET', 'ERROR_DBG_TERMINATE_PROCESS', 'STATUS_BAD_MCFG_TABLE', 'STATUS_DS_AG_CANT_HAVE_UNIVERSAL_MEMBER', 'STATUS_FILE_LOCK_CONFLICT', 'SM_CYFIXEDFRAME', 'STATUS_PNP_BAD_MPS_TABLE', 'SE_UNSOLICITED_INPUT_NAME', 'CERT_COMPARE_KEY_SPEC', 'STATUS_NDIS_FILE_NOT_FOUND', 'STATUS_ACPI_INVALID_ACCESS_SIZE', 'SM_NETWORK', 'EXCEPTION_CONTINUE_EXECUTION', 'ERROR_FLOAT_MULTIPLE_FAULTS', 'STATUS_ACPI_REG_HANDLER_FAILED', 'ERROR_PWD_TOO_SHORT', 'PRODUCT_SERVER_FOR_SMALLBUSINESS', 'CLUSAPI_READ_ACCESS', 'STARTF_FORCEONFEEDBACK', 'ERROR_TOO_MANY_TCBS', 'STATUS_BAD_DEVICE_TYPE', 'HKEY_PERFORMANCE_NLSTEXT', 'STATUS_CANT_TERMINATE_SELF', 'STATUS_GRAPHICS_OPM_PARAMETER_ARRAY_TOO_SMALL', 'STATUS_CTX_GRAPHICS_INVALID', 'STATUS_LM_CROSS_ENCRYPTION_REQUIRED', 'ERROR_IOPL_NOT_ENABLED', 'STATUS_RXACT_STATE_CREATED', 'STATUS_FVE_ACTION_NOT_ALLOWED', 'STATUS_INFO_LENGTH_MISMATCH', 'STATUS_FILEMARK_DETECTED', 'IMAGE_SCN_MEM_PURGEABLE', 'STATUS_GRAPHICS_ADAPTER_CHAIN_NOT_READY', 'RPC_NT_NO_MORE_MEMBERS', 'STATUS_SUCCESS', 'STATUS_MONITOR_INVALID_DETAILED_TIMING_BLOCK', 'SE_TAKE_OWNERSHIP_NAME', 'CERT_STORE_ADD_NEWER', 'STATUS_CTX_CDM_DISCONNECT', 'PIPE_REJECT_REMOTE_CLIENTS', 'ERROR_BADKEY', 'ALPC_CANCELFLG_TRY_CANCEL', 'STATUS_LOG_RECORDS_RESERVED_INVALID', 'STATUS_GRAPHICS_INVALID_PRIMARYSURFACE_SIZE', 'STATUS_STACK_OVERFLOW_READ', 'CRYPT_EXPORTABLE', 'STATUS_CACHE_PAGE_LOCKED', 'SERVICE_PAUSE_PENDING', 'PRODUCT_UNLICENSED', 'STATUS_REGISTRY_QUOTA_LIMIT', 'STATUS_FVE_VOLUME_TOO_SMALL', 'RIP_EVENT', 'ERROR_CHECKOUT_REQUIRED', 'AF_IMPLINK', 'PRODUCT_STORAGE_STANDARD_SERVER_CORE', 'ERROR_NO_DATA', 'SYSTEM_PROCESS_TRUST_LABEL_ACE_TYPE', 'IMAGE_DIRECTORY_ENTRY_DEBUG', 'CERT_QUERY_FORMAT_FLAG_BINARY', 'ERROR_NO_GUID_TRANSLATION', 'RPC_NT_CALL_FAILED_DNE', 'ERROR_DYNLINK_FROM_INVALID_RING', 'PROV_RNG', 'CREATE_NO_WINDOW', 'STATUS_TOO_MANY_COMMANDS', 'CERT_TRUST_IS_PARTIAL_CHAIN', 'PROV_RSA_SCHANNEL', 'STATUS_TOO_MANY_ADDRESSES', 'CERT_INFO_SUBJECT_PUBLIC_KEY_INFO_FLAG', 'SM_SAMEDISPLAYFORMAT', 'CMSG_CTRL_VERIFY_SIGNATURE', 'STATUS_TIMER_NOT_CANCELED', 'AF_CCITT', 'SERVICE_CONTROL_HARDWAREPROFILECHANGE', 'STATUS_INTERNAL_DB_ERROR', 'TOKEN_QUERY_SOURCE', 'FILE_MAP_WRITE', 'STATUS_NONEXISTENT_EA_ENTRY', 'STATUS_DS_FLAT_NAME_EXISTS_IN_FOREST', 'X509_CERT_POLICIES', 'IMAGE_REL_BASED_ABSOLUTE', 'STATUS_DS_MACHINE_ACCOUNT_QUOTA_EXCEEDED', 'STATUS_DRIVERS_LEAKING_LOCKED_PAGES', 'STATUS_LOG_READ_CONTEXT_INVALID', 'SM_SHUTTINGDOWN', 'szOID_OIWSEC_sha', 'STATUS_GRAPHICS_SESSION_TYPE_CHANGE_IN_PROGRESS', 'CERT_SYSTEM_STORE_UNPROTECTED_FLAG', 'SYMBOLIC_LINK_QUERY', 'STATUS_OPLOCK_BREAK_IN_PROGRESS', 'STATUS_ACPI_MUTEX_NOT_OWNED', 'STATUS_TRANSACTION_NOT_ACTIVE', 'NTE_BAD_HASH_STATE', 'ERROR_MORE_DATA', 'STATUS_SMARTCARD_NO_CERTIFICATE', 'STATUS_CTX_CDM_CONNECT', 'STATUS_DS_SENSITIVE_GROUP_VIOLATION', 'ERROR_FAILED_DRIVER_ENTRY', 'PKCS_TIME_REQUEST', 'ERROR_ABANDONED_WAIT_0', 'STATUS_REGISTRY_RECOVERED', 'ERROR_WAS_UNLOCKED', 'STATUS_NDIS_BAD_CHARACTERISTICS', 'CRYPT_VERIFYCONTEXT', 'AF_ISO', 'STATUS_CSS_AUTHENTICATION_FAILURE', 'FILE_OPEN', 'TOKEN_SECURITY_ATTRIBUTE_VALUE_CASE_SENSITIVE', 'STANDARD_RIGHTS_READ', 'STATUS_FWP_DUPLICATE_AUTH_METHOD', 'STATUS_FWP_SESSION_ABORTED', 'STATUS_PAGE_FAULT_TRANSITION', 'ERROR_EVT_CANNOT_OPEN_CHANNEL_OF_QUERY', 'STATUS_BAD_DLL_ENTRYPOINT', 'STATUS_NDIS_REQUEST_ABORTED', 'CERT_SIGNATURE_HASH_PROP_ID', 'ACE_OBJECT_TYPE_PRESENT', 'CONTEXT_XSTATE', 'SECTION_ALL_ACCESS', 'OS_ADVSERVER', 'ERROR_INVALID_EA_NAME', 'CERT_PUBKEY_HASH_RESERVED_PROP_ID', 'SERVICE_TRIGGER_TYPE_DOMAIN_JOIN', 'CERT_INFO_NOT_AFTER_FLAG', 'REG_BINARY', 'FILE_DELETE_ON_CLOSE', 'CERT_STORE_PROV_FILENAME', 'MEM_DECOMMIT', 'szOID_OIWSEC_shaDSA', 'STATUS_PASSWORD_MUST_CHANGE', 'STATUS_USER_SESSION_DELETED', 'DEBUG_EVENT_BREAKPOINT', 'STATUS_CONNECTION_ACTIVE', 'IMAGE_SCN_MEM_NOT_CACHED', 'ERROR_TIMER_RESOLUTION_NOT_SET', 'STATUS_SEMAPHORE_LIMIT_EXCEEDED', 'STATUS_SMARTCARD_CARD_NOT_AUTHENTICATED', 'szOID_INFOSEC_mosaicTokenProtection', 'STATUS_SHARED_POLICY', 'NTE_BAD_KEYSET_PARAM', 'STATUS_CLUSTER_LOCAL_NODE_NOT_FOUND', 'RPC_NT_CALL_FAILED', 'SM_SWAPBUTTON', 'SM_DEBUG', 'IMAGE_DIRECTORY_ENTRY_LOAD_CONFIG', 'STATUS_GRAPHICS_INVALID_PATH_CONTENT_GEOMETRY_TRANSFORMATION', 'STATUS_PKINIT_FAILURE', 'STATUS_NDIS_INVALID_PORT_STATE', 'CERT_TRUST_IS_CYCLIC', 'CMSG_AUTHENTICATED_ATTRIBUTES_FLAG', 'STATUS_CLUSTER_NODE_UP', 'CERT_KEY_IDENTIFIER_PROP_ID', 'STATUS_INVALID_ADDRESS', 'OBJ_VALID_ATTRIBUTES', 'STATUS_ACCESS_DISABLED_BY_POLICY_DEFAULT', 'ERROR_DS_VERSION_CHECK_FAILURE', 'STATUS_NO_QUOTAS_FOR_ACCOUNT', 'SM_CYICONSPACING', 'RPC_NT_CANNOT_SUPPORT', 'STATUS_SPECIAL_GROUP', 'ERROR_INVALID_DEVICE_OBJECT_PARAMETER', 'CERT_FIND_CERT_ID', 'STATUS_LOG_CONTAINER_LIMIT_EXCEEDED', 'SM_CYICON', 'STATUS_ACCESS_DENIED', 'STATUS_ACCOUNT_DISABLED', 'STATUS_NO_SECRETS', 'STATUS_CONNECTION_INVALID', 'STATUS_PAGE_FAULT_DEMAND_ZERO', 'RPC_C_IMP_LEVEL_DEFAULT', 'ERROR_ALREADY_EXISTS', 'TOKEN_ASSIGN_PRIMARY', 'RPC_NT_STRING_TOO_LONG', 'ERROR_EVT_MESSAGE_ID_NOT_FOUND', 'FILE_UNICODE_ON_DISK', 'CMSG_CTRL_ADD_CERT', 'CERT_STORE_READONLY_FLAG', 'RPC_C_AUTHN_LEVEL_PKT_PRIVACY', 'CERT_QUERY_CONTENT_FLAG_PKCS7_SIGNED_EMBED', 'szOID_OIWSEC_dsaComm', 'STATUS_DFS_EXIT_PATH_FOUND', 'STGM_READWRITE', 'STATUS_CTX_MODEM_RESPONSE_VOICE', 'REALTIME_PRIORITY_CLASS', 'ACCESS_MAX_MS_ACE_TYPE', 'SM_CXSIZEFRAME', 'ERROR_MUTANT_LIMIT_EXCEEDED', 'STATUS_FWP_INVALID_FLAGS', 'STATUS_FLT_POST_OPERATION_CLEANUP', 'STATUS_IMAGE_CERT_REVOKED', 'STATUS_DEVICE_UNREACHABLE', 'DEBUG_EVENT_CREATE_THREAD', 'ERROR_BAD_NET_RESP', 'CERT_PVK_FILE_PROP_ID', 'STATUS_REDIRECTOR_PAUSED', 'ERROR_EVT_UNRESOLVED_VALUE_INSERT', 'ASYNC_MODE_DEFAULT', 'SERVICE_TRIGGER_TYPE_GROUP_POLICY', 'STATUS_LOG_DEDICATED', 'ERROR_EXE_CANNOT_MODIFY_SIGNED_BINARY', 'STATUS_SXS_FILE_HASH_MISSING', 'ERROR_REGISTRY_QUOTA_LIMIT', 'STATUS_DIRECTORY_IS_A_REPARSE_POINT', 'RPC_NT_NULL_REF_POINTER', 'DBG_UNABLE_TO_PROVIDE_HANDLE', 'NTE_BAD_PROV_TYPE', 'OS_WIN98ORGREATER', 'RPC_NT_PIPE_DISCIPLINE_ERROR', 'ERROR_ARBITRATION_UNHANDLED', 'CMSG_CTRL_DECRYPT', 'STATUS_GRAPHICS_DRIVER_MISMATCH', 'PRODUCT_BUSINESS_N', 'CERT_FIND_ISSUER_STR', 'CERT_COMPARE_CERT_ID', 'SE_TIME_ZONE_NAME', 'ERROR_SHORT_NAMES_NOT_ENABLED_ON_VOLUME', 'CERT_QUERY_CONTENT_FLAG_CERT', 'SE_CREATE_PERMANENT_NAME', 'TRUNCATE_EXISTING', 'STATUS_INVALID_PARAMETER_5', 'SM_CXHTHUMB', 'STATUS_INVALID_PARAMETER_7', 'STATUS_INVALID_PARAMETER_6', 'STATUS_INVALID_PARAMETER_1', 'RPC_NT_UNSUPPORTED_TRANS_SYN', 'STATUS_INVALID_PARAMETER_3', 'CERT_COMPARE_PUBLIC_KEY', 'STATUS_FWP_ZERO_LENGTH_ARRAY', 'BIND_IF_SYNTAX_UNKNOWN', 'AF_ECMA', 'ERROR_SYSTEM_TRACE', 'STATUS_CONTEXT_MISMATCH', 'DIRECTORY_CREATE_OBJECT', 'STATUS_TXF_ATTRIBUTE_CORRUPT', 'PIPE_ACCEPT_REMOTE_CLIENTS', 'CERT_STORE_CREATE_NEW_FLAG', 'IMAGE_FILE_MACHINE_IA64', 'SW_FORCEMINIMIZE', 'ERROR_VDM_HARD_ERROR', 'STATUS_UNFINISHED_CONTEXT_DELETED', 'ERROR_PRIMARY_TRANSPORT_CONNECT_FAILED', 'CRYPT_ACQUIRE_PREFER_NCRYPT_KEY_FLAG', 'ERROR_NOTHING_TO_TERMINATE', 'STATUS_WMI_GUID_DISCONNECTED', 'szOID_OIWSEC_md5RSASign', 'STATUS_LPC_INVALID_CONNECTION_USAGE', 'INVALID_HANDLE_VALUE', 'STATUS_UNRECOGNIZED_VOLUME', 'CERT_FIND_SHA1_HASH', 'STATUS_CTX_SHADOW_DISABLED', 'WTD_STATEACTION_AUTO_CACHE_FLUSH', 'STATUS_GRAPHICS_INCONSISTENT_DEVICE_LINK_STATE', 'PRODUCT_WEB_SERVER_CORE', 'STATUS_FILE_CORRUPT_ERROR', 'STATUS_TRANSLATION_COMPLETE', 'STATUS_GROUP_EXISTS', 'IMAGE_SCN_ALIGN_4BYTES', 'STATUS_GUIDS_EXHAUSTED', 'szOID_OIWSEC_desOFB', 'STATUS_FVE_LOCKED_VOLUME', 'CERT_NEXT_UPDATE_LOCATION_PROP_ID', 'X509_BITS', 'CERT_QUERY_CONTENT_CERT', 'STATUS_RESOURCE_NOT_OWNED', 'CERT_STORE_PROV_SERIALIZED', 'ERROR_IS_SUBST_TARGET', 'SE_SACL_DEFAULTED', 'NULL', 'STATUS_SHARING_PAUSED', 'STATUS_CTX_INVALID_WD', 'STATUS_INVALID_USER_PRINCIPAL_NAME', 'SE_GROUP_ENABLED_BY_DEFAULT', 'STATUS_ABANDONED_WAIT_63', 'ERROR_INVALID_LDT_OFFSET', 'FILE_ATTRIBUTE_TEMPORARY', 'ERROR_MP_PROCESSOR_MISMATCH', 'ERROR_BAD_DEV_TYPE', 'FILE_GENERIC_WRITE', 'STATUS_CTX_PD_NOT_FOUND', 'FAX_PORT_SET', 'CLAIM_SECURITY_ATTRIBUTE_TYPE_OCTET_STRING', 'STATUS_WX86_UNSIMULATE', 'CERT_STORE_PROV_LDAP_W', 'STATUS_NO_SECURITY_CONTEXT', 'EXCEPTION_EXECUTE_HANDLER', 'STATUS_INVALID_THREAD', 'OS_EMBEDDED', 'CMSG_ENCODED_SIGNER', 'STATUS_MEDIA_WRITE_PROTECTED', 'STGM_SHARE_DENY_NONE', 'CMSG_CMS_RECIPIENT_ENCRYPTED_KEY_INDEX_PARAM', 'ERROR_EA_FILE_CORRUPT', 'CMSG_CTRL_DEL_ATTR_CERT', 'STATUS_GRAPHICS_VIDPN_TOPOLOGY_CURRENTLY_NOT_SUPPORTED', 'STATUS_SECRET_TOO_LONG', 'STATUS_CALLBACK_RETURNED_WHILE_IMPERSONATING', 'HKEY_CURRENT_USER', 'CERT_FIND_SUBJECT_STR_W', 'THREAD_SET_CONTEXT', 'ERROR_UNEXPECTED_MM_CREATE_ERR', 'ERROR_BUSY_DRIVE', 'SM_CXMINIMIZED', 'PRODUCT_UNDEFINED', 'SERVICE_QUERY_CONFIG', 'CERT_FIND_SUBJECT_STR_A', 'ERROR_REPARSE_OBJECT', 'OBJ_EXCLUSIVE', 'PRODUCT_STARTER_E', 'ERROR_DISK_TOO_FRAGMENTED', 'WOW64_MAXIMUM_SUPPORTED_EXTENSION', 'SERVICE_ACCEPT_PRESHUTDOWN', 'STATUS_RECURSIVE_DISPATCH', 'STATUS_INVALID_NETWORK_RESPONSE', 'FILE_ATTRIBUTE_NORMAL', 'IMAGE_DLLCHARACTERISTICS_WDM_DRIVER', 'STATUS_DISK_FULL', 'FILE_NOTIFY_CHANGE_SIZE', 'RPC_NT_UNSUPPORTED_NAME_SYNTAX', 'SM_CYHSCROLL', 'SYNCHRONIZE', 'CERT_COMPARE_PROPERTY', 'ERROR_EVT_FILTER_INVARG', 'TOKEN_ADJUST_DEFAULT', 'STATUS_GRAPHICS_MAX_NUM_PATHS_REACHED', 'STATUS_MUTUAL_AUTHENTICATION_FAILED', 'STATUS_SPECIAL_ACCOUNT', 'SC_MANAGER_LOCK', 'SM_CYMENUCHECK', 'STGM_SHARE_EXCLUSIVE', 'CERT_CHAIN_REVOCATION_CHECK_CHAIN', 'ERROR_CANTSCROLLBACKWARDS', 'STATUS_CTX_WINSTATION_NOT_FOUND', 'WRITE_WATCH_FLAG_RESET', 'STATUS_REMOTE_STORAGE_NOT_ACTIVE', 'STATUS_PLUGPLAY_NO_DEVICE', 'NTE_BAD_VER', 'STATUS_ILLEGAL_ELEMENT_ADDRESS', 'DEBUG_EVENT_LOAD_MODULE', 'SERVICE_CONTROL_INTERROGATE', 'STATUS_INVALID_INFO_CLASS', 'NTE_PROV_DLL_NOT_FOUND', 'STATUS_UNMAPPABLE_CHARACTER', 'RPC_NT_BINDING_HAS_NO_AUTH', 'STATUS_ALREADY_DISCONNECTED', 'ERROR_NO_LOG_SPACE', 'STATUS_DS_CANT_ON_RDN', 'STATUS_NETWORK_CREDENTIAL_CONFLICT', 'STATUS_LOG_RESTART_INVALID', 'ALPC_MSGFLG_WAIT_ALERTABLE', 'STATUS_FWP_INVALID_INTERVAL', 'STATUS_CTX_MODEM_RESPONSE_NO_CARRIER', 'ERROR_EVT_FILTER_TOO_COMPLEX', 'ERROR_PAGE_FAULT_GUARD_PAGE', 'ERROR_EVT_MALFORMED_XML_TEXT', 'SERVICE_INACTIVE', 'STATUS_ND_QUEUE_OVERFLOW', 'STATUS_TOO_MANY_LINKS', 'STATUS_LOG_CLIENT_ALREADY_REGISTERED', 'STATUS_ILLEGAL_FLOAT_CONTEXT', 'IMAGE_FILE_MACHINE_POWERPCFP', 'STATUS_NO_USER_KEYS', 'COMPRESSION_FORMAT_XPRESS', 'STATUS_LOG_FULL_HANDLER_IN_PROGRESS', 'STATUS_TRANSACTION_INVALID_ID', 'SERVICE_WIN32_SHARE_PROCESS', 'STATUS_VOLUME_NOT_UPGRADED', 'STATUS_DS_UNIVERSAL_CANT_HAVE_LOCAL_MEMBER', 'STATUS_DEVICE_BUSY', 'PIPE_ACCESS_DUPLEX', 'ERROR_OPLOCK_HANDLE_CLOSED', 'CERT_E_ISSUERCHAINING', 'STATUS_RESOURCE_TYPE_NOT_FOUND', 'STATUS_WX86_EXCEPTION_CONTINUE', 'STATUS_STRONG_CRYPTO_NOT_SUPPORTED', 'STATUS_BAD_CURRENT_DIRECTORY', 'STATUS_GRAPHICS_OPM_INVALID_SRM', 'STATUS_GRAPHICS_INVALID_VIDPN_TOPOLOGY_RECOMMENDATION_REASON', 'STATUS_GRAPHICS_MONITOR_COULD_NOT_BE_ASSOCIATED_WITH_ADAPTER', 'STATUS_BACKUP_CONTROLLER', 'STATUS_FREE_VM_NOT_AT_BASE', 'SM_SHOWSOUNDS', 'PRODUCT_HOME_BASIC_E', 'OS_WINDOWS', 'szOID_RSA_PSPECIFIED', 'ERROR_IMAGE_MACHINE_TYPE_MISMATCH_EXE', 'PRODUCT_HOME_BASIC_N', 'STATUS_TRANSACTION_RESPONSE_NOT_ENLISTED', 'STATUS_STACK_BUFFER_OVERRUN', 'STATUS_JOURNAL_NOT_ACTIVE', 'ERROR_MR_MID_NOT_FOUND', 'ERROR_SUCCESS', 'ERROR_CANNOT_GRANT_REQUESTED_OPLOCK', 'SM_CYMENU', 'FILE_CASE_PRESERVED_NAMES', 'STATUS_BAD_IMPERSONATION_LEVEL', 'STATUS_MONITOR_WMI_DATABLOCK_REGISTRATION_FAILED', 'STATUS_LOG_SPACE_RESERVED_INVALID', 'STATUS_PORT_CLOSED', 'STATUS_UNKNOWN_REVISION', 'ERROR_EVT_VERSION_TOO_OLD', 'PRODUCT_CLUSTER_SERVER', 'STATUS_SAM_INIT_FAILURE', 'PRODUCT_STORAGE_EXPRESS_SERVER_CORE', 'szOID_RSA_hashedData', 'STATUS_INVALID_IDN_NORMALIZATION', 'ERROR_ALERTED', 'RPC_NT_NO_ENDPOINT_FOUND', 'STATUS_HARDWARE_MEMORY_ERROR', 'STATUS_TS_INCOMPATIBLE_SESSIONS', 'STATUS_NO_YIELD_PERFORMED', 'STATUS_DEVICE_REMOVED', 'ERROR_INVALID_NAME', 'STATUS_THREADPOOL_RELEASE_SEMAPHORE_ON_COMPLETION_FAILED', 'STATUS_GRAPHICS_OPM_OUTPUT_DOES_NOT_SUPPORT_CGMSA', 'CLAIM_SECURITY_ATTRIBUTE_TYPE_UINT64', 'RPC_NT_INVALID_AUTH_IDENTITY', 'szOID_INFOSEC_sdnsKeyManagement', 'STATUS_FWP_INJECT_HANDLE_CLOSING', 'STATUS_GRAPHICS_NOT_A_LINKED_ADAPTER', 'STATUS_FWP_INCOMPATIBLE_DH_GROUP', 'X509_UNICODE_NAME_VALUE', 'PERSIST_E_SIZEINDEFINITE', 'STATUS_NO_MEMORY', 'X509_UNICODE_NAME', 'ERROR_WRONG_DISK', 'STATUS_FVE_RAW_BLOCKED', 'STATUS_ENCOUNTERED_WRITE_IN_PROGRESS', 'STATUS_DS_GC_NOT_AVAILABLE', 'RPC_NT_NOT_LISTENING', 'SM_MEDIACENTER', 'CERT_ACCESS_STATE_PROP_ID', 'STATUS_LOG_BLOCK_INVALID', 'ERROR_WAIT_NO_CHILDREN', 'CERT_NAME_ISSUER_FLAG', 'PROCESS_QUERY_INFORMATION', 'STATUS_ADDRESS_CLOSED', 'FILE_ALL_ACCESS', 'STATUS_NOLOGON_INTERDOMAIN_TRUST_ACCOUNT', 'IMAGE_FILE_MACHINE_THUMB', 'STATUS_FVE_FAILED_WRONG_FS', 'ERROR_COULD_NOT_INTERPRET', 'ERROR_ARITHMETIC_OVERFLOW', 'STATUS_PROCEDURE_NOT_FOUND', 'PRODUCT_MEDIUMBUSINESS_SERVER_MANAGEMENT', 'STATUS_FILE_LOCKED_WITH_ONLY_READERS', 'STATUS_FULLSCREEN_MODE', 'X509_ASN_ENCODING', 'ERROR_UNDEFINED_CHARACTER', 'NTE_BAD_LEN', 'CERT_FIND_ENHKEY_USAGE', 'STATUS_FILE_DELETED', 'ERROR_INVALID_CATEGORY', 'szOID_DH_SINGLE_PASS_STDDH_SHA384_KDF', 'CERT_QUERY_CONTENT_SERIALIZED_CERT', 'szOID_RSA_SETOAEP_RSA', 'ERROR_NOT_DOS_DISK', 'szOID_NIST_AES128_WRAP', 'CERT_STORE_PROV_SYSTEM_REGISTRY_W', 'ERROR_NEGATIVE_SEEK', 'CRYPT_MACHINE_KEYSET', 'STATUS_GRAPHICS_I2C_ERROR_RECEIVING_DATA', 'CRYPT_ACQUIRE_CACHE_FLAG', 'CERT_FIND_SUBJECT_STR', 'STATUS_CANT_BREAK_TRANSACTIONAL_DEPENDENCY', 'WOW64_SIZE_OF_80387_REGISTERS', 'RRF_RT_REG_MULTI_SZ', 'FILE_ATTRIBUTE_DIRECTORY', 'HKEY_PERFORMANCE_DATA', 'IMAGE_FILE_MACHINE_POWERPC', 'STATUS_WX86_EXCEPTION_LASTCHANCE', 'SE_RM_CONTROL_VALID', 'CERT_DESCRIPTION_PROP_ID', 'ERROR_NOTIFICATION_GUID_ALREADY_DEFINED', 'szOID_ANSI_X942_DH', 'SERVICE_CONTROL_NETBINDDISABLE', 'FAX_CONFIG_QUERY', 'STATUS_NO_EAS_ON_FILE', 'SEC_WINNT_AUTH_IDENTITY_UNICODE', 'THREAD_SET_INFORMATION', 'STATUS_BIOS_FAILED_TO_CONNECT_INTERRUPT', 'STATUS_FWP_TRAFFIC_MISMATCH', 'STATUS_TOO_MANY_PRINCIPALS', 'CERT_CROSS_CERT_DIST_POINTS_PROP_ID', 'PROV_EC_ECDSA_SIG', 'STATUS_THREADPOOL_RELEASE_MUTEX_ON_COMPLETION_FAILED', 'STATUS_NOTIFICATION_GUID_ALREADY_DEFINED', 'FILE_COMPLETE_IF_OPLOCKED', 'CERT_SYSTEM_STORE_SERVICES_ID', 'ERROR_DISK_REPAIR_DISABLED', 'CERT_STORE_MANIFOLD_FLAG', 'STATUS_BAD_FILE_TYPE', 'STATUS_GRAPHICS_OPM_PROTECTED_OUTPUT_DOES_NOT_HAVE_OPM_SEMANTICS', 'STATUS_LOG_CANT_DELETE', 'STATUS_SHARING_VIOLATION', 'ERROR_RECEIVE_PARTIAL_EXPEDITED', 'IMAGE_DLLCHARACTERISTICS_TERMINAL_SERVER_AWARE', 'CERT_QUERY_CONTENT_FLAG_CTL', 'STATUS_GRAPHICS_INVALID_VIDEO_PRESENT_TARGET_MODE', 'SE_LOAD_DRIVER_NAME', 'STD_ERROR_HANDLE', 'PROV_EC_ECNRA_SIG', 'STATUS_PWD_TOO_RECENT', 'PROCESS_SUSPEND_RESUME', 'szOID_RSA_SMIMEalgESDH', 'STATUS_NDIS_ADAPTER_NOT_FOUND', 'STATUS_OBJECT_NO_LONGER_EXISTS', 'STATUS_NETWORK_NAME_DELETED', 'STATUS_GRAPHICS_LEADLINK_START_DEFERRED', 'STATUS_NETWORK_BUSY', 'IMAGE_FILE_MACHINE_ALPHA', 'STATUS_DLL_NOT_FOUND', 'STATUS_INVALID_UNWIND_TARGET', 'STATUS_GRAPHICS_INTERNAL_ERROR', 'STATUS_WMI_SET_FAILURE', 'STATUS_REVISION_MISMATCH', 'RPC_NT_INVALID_TIMEOUT', 'STATUS_INVALID_ADDRESS_COMPONENT', 'PRODUCT_SERVER_FOUNDATION', 'STATUS_GRAPHICS_PRESENT_MODE_CHANGED', 'STATUS_GRAPHICS_INVALID_ALLOCATION_INSTANCE', 'STGTY_REPEAT', 'ERROR_CANNOT_LOAD_REGISTRY_FILE', 'STATUS_NDIS_NOT_SUPPORTED', 'STATUS_ACPI_INVALID_EVENTTYPE', 'FILE_CREATED', 'ERROR_KERNEL_APC', 'SERVICE_SYSTEM_START', 'STATUS_DEVICE_REQUIRES_CLEANING', 'ERROR_VIRUS_INFECTED', 'IMAGE_FILE_MACHINE_AXP64', 'CERT_FIND_KEY_SPEC', 'STATUS_DS_MEMBERSHIP_EVALUATED_LOCALLY', 'STATUS_PKINIT_CLIENT_FAILURE', 'ERROR_IS_JOIN_PATH', 'STATUS_CLUSTER_NODE_NOT_PAUSED', 'ERROR_DLL_MIGHT_BE_INCOMPATIBLE', 'STATUS_VOLMGR_RAID5_NOT_SUPPORTED', 'STATUS_GRAPHICS_OPM_OUTPUT_DOES_NOT_SUPPORT_ACP', 'ERROR_SEEK_ON_DEVICE', 'CERT_STORE_DEFER_CLOSE_UNTIL_LAST_FREE_FLAG', 'STATUS_NOT_CLIENT_SESSION', 'STATUS_HUNG_DISPLAY_DRIVER_THREAD', 'STATUS_CLUSTER_NETWORK_ALREADY_ONLINE', 'ERROR_INVALID_LDT_SIZE', 'RRF_NOEXPAND', 'STATUS_NDIS_RESOURCE_CONFLICT', 'ERROR_BAD_DLL_ENTRYPOINT', 'IMAGE_FILE_MACHINE_SH4', 'IMAGE_FILE_MACHINE_SH5', 'IMAGE_FILE_MACHINE_SH3', 'ERROR_PROCESS_MODE_NOT_BACKGROUND', 'ERROR_CANTWRITE', 'SECURITY_MANDATORY_MEDIUM_RID', 'WTD_CHOICE_CERT', 'CRYPT_PREGEN', 'STATUS_NT_CROSS_ENCRYPTION_REQUIRED', 'ERROR_HANDLES_CLOSED', 'STATUS_FWP_PROVIDER_CONTEXT_MISMATCH', 'STATUS_DS_CANT_MOD_PRIMARYGROUPID', 'CERT_STORE_ADD_NEW', 'FILE_MAP_EXECUTE', 'SW_SHOWMINNOACTIVE', 'TH32CS_SNAPMODULE32', 'STGM_NOSNAPSHOT', 'STATUS_NULL_LM_PASSWORD', 'STATUS_EVENTLOG_FILE_CHANGED', 'STATUS_FVE_RAW_ACCESS', 'RPC_S_PROCNUM_OUT_OF_RANGE', 'IMAGE_DIRECTORY_ENTRY_IMPORT', 'CERT_E_CHAINING', 'ERROR_EVT_MESSAGE_LOCALE_NOT_FOUND', 'CREATE_PROTECTED_PROCESS', 'ERROR_NO_MORE_ITEMS', 'RPC_NT_WRONG_KIND_OF_BINDING', 'STATUS_UNABLE_TO_FREE_VM', 'STATUS_FVE_CONV_RECOVERY_FAILED', 'ERROR_FOUND_OUT_OF_SCOPE'])
structs = set(['PFILE_ALIGNMENT_INFORMATION', 'PACCESS_DENIED_CALLBACK_ACE', '_CLAIM_SECURITY_ATTRIBUTE_V1_UNION', 'CERT_INFO', 'MIB_UDPTABLE_OWNER_PID', '_FILE_FS_LABEL_INFORMATION', 'CRYPT_KEY_PROV_PARAM', '_DEBUG_STACK_FRAME', 'LPEXCEPTION_DEBUG_INFO', '__tagVARIANT', 'FILE_ATTRIBUTE_TAG_INFORMATION', '_TOKEN_SOURCE', 'CURDIR', '_CMSG_RECIPIENT_ENCRYPTED_KEY_ENCODE_INFO', 'SPC_LINK_', 'IP_ADAPTER_INDEX_MAP', 'PRTL_UNLOAD_EVENT_TRACE64', '_ALPC_PORT_ATTRIBUTES64', 'CRYPT_UINT_BLOB', 'PRTL_RELATIVE_NAME_U', 'LOAD_DLL_DEBUG_INFO', 'PCERT_RDN_VALUE_BLOB', 'PPSAPI_WORKING_SET_EX_INFORMATION', 'CRYPT_ATTRIBUTES', 'API_SET_NAMESPACE_ARRAY_V4', 'TOKEN_ORIGIN', 'CMSG_ENVELOPED_ENCODE_INFO', 'PALPC_DIRECT_ATTR64', '_OBJECT_DIRECTORY_INFORMATION', 'PUNICODE_STRING', '_ANON_PROCESS_MITIGATION_EXTENSION_POINT_DISABLE_POLICY_UNION', 'ALPC_SECURITY_ATTR64', 'PFILE_FULL_EA_INFORMATION', 'SERVICE_STATUS_PROCESS', 'CMSG_RECIPIENT_ENCRYPTED_KEY_ENCODE_INFO', 'PIMAGE_OPTIONAL_HEADER32', '_TOKEN_SECURITY_ATTRIBUTES_INFORMATION', '_CMSG_SIGNER_INFO', 'MIB_TCP6ROW_OWNER_PID', '_DEBUG_SYMBOL_ENTRY', 'LPCONTEXT64', 'ALPC_SERVER_INFORMATION_TMP_OUT', 'PFILE_ALL_INFORMATION', 'PDEBUG_PROCESSOR_IDENTIFICATION_ALL', 'PFILE_FS_DEVICE_INFORMATION', '_MIB_TCP6ROW_OWNER_PID', 'PPROCESS_MEMORY_COUNTERS', 'SID_AND_ATTRIBUTES_HASH', '_FILE_NAME_INFORMATION', 'PACE_HEADER', '_DEBUG_OFFSET_REGION', 'PROCESS_BASIC_INFORMATION', 'OSVERSIONINFOW', 'LPSECURITY_ATTRIBUTES', 'OSVERSIONINFOA', 'EXCEPTION_POINTERS64', '_PSAPI_WORKING_SET_BLOCK64', '_KEY_VALUE_PARTIAL_INFORMATION', 'DEBUG_LAST_EVENT_INFO_EXIT_THREAD', 'PMIB_IFROW', 'LPLOAD_DLL_DEBUG_INFO', '_ALPC_WORK_ON_BEHALF_ATTR', 'PSIP_INDIRECT_DATA', 'PFILE_FS_SIZE_INFORMATION', 'PDEBUG_STACK_FRAME', 'PROCESS_MITIGATION_DYNAMIC_CODE_POLICY', 'CLAIM_SECURITY_ATTRIBUTE_OCTET_STRING_VALUE', '_JIT_DEBUG_INFO', '_DEBUG_EVENT', 'PTOKEN_STATISTICS', 'EVT_VARIANT', 'PMIB_UDPROW_OWNER_PID', 'PTOKEN_PRIMARY_GROUP', 'MULTI_QI', 'LPEXIT_THREAD_DEBUG_INFO', 'WINTRUST_BLOB_INFO', 'PUBLIC_OBJECT_BASIC_INFORMATION', 'RTL_RELATIVE_NAME_U', '_PROCESS_MITIGATION_CONTROL_FLOW_GUARD_POLICY', '_DEBUG_SPECIFIC_FILTER_PARAMETERS', '_CRL_ENTRY', 'BITMAPCOREINFO', 'FILE_FULL_EA_INFORMATION', 'PPSAPI_WORKING_SET_INFORMATION64', '_ACCESS_ALLOWED_CALLBACK_ACE', 'FILE_STANDARD_INFORMATION', 'PSYSTEM_RESOURCE_ATTRIBUTE_ACE', 'THREAD_BASIC_INFORMATION', 'PCLAIM_SECURITY_ATTRIBUTES_INFORMATION', 'REFIID', '_LOAD_DLL_DEBUG_INFO', 'PFILE_IS_REMOTE_DEVICE_INFORMATION', 'PFILE_FS_ATTRIBUTE_INFORMATION', '_ANON_PROCESS_MITIGATION_CONTROL_FLOW_GUARD_POLICY_UNION', '_CRYPT_HASH_MESSAGE_PARA', 'PALPC_SECURITY_ATTR64', 'PALPC_HANDLE_ATTR32', 'PIMAGE_EXPORT_DIRECTORY', '_TMPUNION_CMSG_KEY_AGREE_RECIPIENT_ENCODE_INFO', 'DISPPARAMS', 'PCRYPTCATMEMBER', 'FILE_IO_PRIORITY_HINT_INFORMATION', 'ACE_HEADER', 'tagBITMAP', 'tagBITMAPINFOHEADER', 'PDEBUG_PROCESSOR_IDENTIFICATION_ARM', 'TOKEN_GROUPS_AND_PRIVILEGES', '_IMAGE_BASE_RELOCATION', 'SECURITY_DESCRIPTOR', 'LPBITMAPCOREINFO', 'SYSTEM_MANDATORY_LABEL_ACE', '_STARTUPINFOEXW', '_MIB_TCPROW', 'STARTUPINFOEXW', '_CRYPT_KEY_PROV_PARAM', 'TOKEN_STATISTICS', '_IMAGE_NT_HEADERS', 'PFILE_STANDARD_INFORMATION', 'PFILE_ACCESS_INFORMATION', 'PROCESS_MITIGATION_BINARY_SIGNATURE_POLICY', 'TOKEN_PRIVILEGES', '_RPC_IF_ID', 'COAUTHINFO', 'SOLE_AUTHENTICATION_SERVICE', 'PACCESS_DENIED_OBJECT_ACE', 'PIMAGE_BASE_RELOCATION', '_TMP_lowmid', '_ALPC_TOKEN_ATTR', 'SIP_INDIRECT_DATA', 'PPSAPI_WORKING_SET_INFORMATION32', 'PCTL_CONTEXT', 'KEY_VALUE_FULL_INFORMATION', 'PCERT_KEY_CONTEXT', 'REFCLSID', 'PCCTL_CONTEXT', 'LSA_UNICODE_STRING', 'PCRYPT_UINT_BLOB', 'PPORT_MESSAGE64', '_SYMBOL_INFO', 'LPSYSTEMTIME', 'IID', 'PCRYPT_BIT_BLOB', 'PSPC_LINK', '_RIP_INFO', 'PTOKEN_DEFAULT_DACL', 'OBJECT_DIRECTORY_INFORMATION', 'PPSAPI_WORKING_SET_BLOCK64', '_ALPC_SECURITY_ATTR32', '_CREATE_PROCESS_DEBUG_INFO', 'PDEBUG_MODULE_PARAMETERS', 'IO_STATUS_BLOCK', 'PCURDIR', 'EXIT_THREAD_DEBUG_INFO', 'PSID_AND_ATTRIBUTES', 'ALPC_SECURITY_ATTR32', 'PIMAGEHLP_MODULE64', '_CLAIM_SECURITY_ATTRIBUTE_V1', 'ALPC_DATA_VIEW_ATTR32', 'API_SET_NAMESPACE_ENTRY_V6', 'DEBUG_PROCESSOR_IDENTIFICATION_X86', 'API_SET_NAMESPACE_ENTRY_V4', '_EVT_VARIANT', 'PFILE_POSITION_INFORMATION', 'PFILE_FS_LABEL_INFORMATION', 'PCRYPT_KEY_VERIFY_MESSAGE_PARA', '_TMP_UNION_DEBUG_INFO', 'PFILE_IO_PRIORITY_HINT_INFORMATION', 'PCERT_CHAIN_ELEMENT', 'PSYSTEM_MODULE32', 'PMEMORY_BASIC_INFORMATION32', 'PAPI_SET_NAMESPACE_ARRAY_V4', 'PAPI_SET_NAMESPACE_ARRAY_V2', 'PPROCESS_INSTRUMENTATION_CALLBACK_INFORMATION', '_PSAPI_WORKING_SET_INFORMATION64', 'PROCESS_MEMORY_COUNTERS', 'PBITMAPINFOHEADER', 'LPCREATE_THREAD_DEBUG_INFO', 'PCRYPT_DER_BLOB', 'MEMORY_BASIC_INFORMATION', 'PDEBUG_SYMBOL_SOURCE_ENTRY', 'PCTL_USAGE', 'PFILE_FS_SECTOR_SIZE_INFORMATION', '_CMSG_KEY_TRANS_RECIPIENT_ENCODE_INFO', 'PSAPI_WORKING_SET_BLOCK64', 'CERT_RDN_VALUE_BLOB', '_ALPC_DIRECT_ATTR32', 'PALPC_CONTEXT_ATTR64', '_SYSTEM_MODULE_INFORMATION32', 'CERT_ENHKEY_USAGE', 'SYSTEM_HANDLE_INFORMATION', 'CERT_ISSUER_SERIAL_NUMBER', 'PSAPI_WORKING_SET_BLOCK', 'PRGBTRIPLE', '_SYSTEM_ALARM_OBJECT_ACE', 'IMAGE_NT_HEADERS64', 'PFILE_FS_FULL_SIZE_INFORMATION', 'CREATE_THREAD_DEBUG_INFO', 'SYSTEM_AUDIT_OBJECT_ACE', 'PBITMAPCOREINFO', '_TMP_UNION_IO_STATUS_BLOCK', 'PRTL_OSVERSIONINFOEXW', 'BG_JOB_TIMES', '_SYSTEM_VERIFIER_INFORMATION', 'RPC_IF_ID', '_CONTEXT32', '_PORT_MESSAGE64', '_EXCEPTION_POINTERS32', '_TOKEN_SECURITY_ATTRIBUTE_OCTET_STRING_VALUE', 'PALPC_DATA_VIEW_ATTR64', 'PMIB_UDPTABLE_OWNER_PID', '_TOKEN_SECURITY_ATTRIBUTE_V1_UNION', 'SYSTEM_MODULE32', '_ACCESS_DENIED_ACE', '_DEBUG_EXCEPTION_FILTER_PARAMETERS', 'CERT_STRONG_SIGN_PARA', '_TOKEN_ELEVATION', '_GUID', 'PCERT_REVOCATION_INFO', 'PACL', 'PKEY_VALUE_BASIC_INFORMATION', 'SIP_INDIRECT_DATA_', '_IP_INTERFACE_INFO', 'WINTRUST_CERT_INFO_', '_PORT_MESSAGE_TMP_SUBSTRUCT_S1', '_PORT_MESSAGE_TMP_SUBSTRUCT_S2', 'RTL_OSVERSIONINFOW', 'IMAGE_IMPORT_BY_NAME', '_ALPC_DATA_VIEW_ATTR64', 'PFILE_FS_VOLUME_INFORMATION', '_STARTUPINFOEXA', 'PEB', 'API_SET_VALUE_ARRAY_V2', '_TOKEN_MANDATORY_LABEL', 'CLIENT_ID64', '_MIB_UDPTABLE_OWNER_PID', 'CERT_USAGE_MATCH', 'PMODLOAD_DATA', 'SYSTEM_MODULE_INFORMATION32', '_API_SET_NAMESPACE_V6', 'PIMAGE_SECTION_HEADER', 'TMPUNION_CMSG_CTRL_DECRYPT_PARA', 'PSYSTEM_ALARM_CALLBACK_OBJECT_ACE', 'DEBUG_PROCESSOR_IDENTIFICATION_ALL', 'PPSAPI_WORKING_SET_EX_BLOCK32', '_CLAIM_SECURITY_ATTRIBUTE_RELATIVE_V1_UNION', '_TOKEN_ORIGIN', '_ALPC_HANDLE_ATTR64', '_MIB_TCP6TABLE_OWNER_PID', '_CLIENT_ID', '_API_SET_NAMESPACE_ENTRY', 'POBJECT_DIRECTORY_INFORMATION', '_CERT_KEY_CONTEXT', '_ENUM_SERVICE_STATUS_PROCESSA', 'LPVARIANT', 'DECIMAL', 'SID_AND_ATTRIBUTES', '_ACL', 'EXCEPTION_RECORD64', '_ENUM_SERVICE_STATUS_PROCESSW', 'SYMBOL_INFO', '_CRYPT_KEY_PROV_INFO', 'PSYSTEM_MODULE_INFORMATION64', '_LUID_AND_ATTRIBUTES', 'PALPC_SECURITY_ATTR', 'WINTRUST_CATALOG_INFO', 'PPROCESS_MITIGATION_ASLR_POLICY', '_FILE_GET_EA_INFORMATION', 'ALPC_CONTEXT_ATTR', '_EXCEPTION_RECORD64', 'PRLIST_ENTRY', 'POSVERSIONINFOA', '_CERT_SIMPLE_CHAIN', 'LUID', 'CLIENT_ID', 'IMAGE_OPTIONAL_HEADER64', 'tagRGBTRIPLE', 'CTL_INFO', '_CERT_CHAIN_ENGINE_CONFIG', 'POSVERSIONINFOW', 'TMP_SPC_LINK_UNION', '_IMAGE_FILE_HEADER', 'FILE_MODE_INFORMATION', 'CRYPT_ATTRIBUTE_TYPE_VALUE', 'PALPC_DATA_VIEW_ATTR', '_CMSG_RECIPIENT_ENCODE_INFO', '_FILE_STREAM_INFORMATION', 'PLSA_UNICODE_STRING', '_CTL_INFO', 'PLDR_DATA_TABLE_ENTRY', '_FILE_FS_SIZE_INFORMATION', '_CALLFRAME_MARSHALCONTEXT', 'STRING', 'LPSAFEARRAYBOUND', 'ALPC_WORK_ON_BEHALF_ATTR', 'PCONTEXT32', 'PDEBUG_SYMBOL_PARAMETERS', 'PMIB_UDP6ROW_OWNER_PID', 'BITMAPCOREHEADER', 'PALPC_MESSAGE_ATTRIBUTES', '_TOKEN_SECURITY_ATTRIBUTE_V1', 'CERT_EXTENSIONS', 'TOKEN_SECURITY_ATTRIBUTE_V1_UNION', 'LPBITMAPINFOHEADER', '_SID_AND_ATTRIBUTES', 'MIB_IFROW', 'SYSTEM_PROCESS_TRUST_LABEL_ACE', 'LPSTARTUPINFOW', 'PTOKEN_ORIGIN', 'PEVENTLOGRECORD', '_FILE_MODE_INFORMATION', 'PM128A', 'TOKEN_SECURITY_ATTRIBUTES_INFORMATION_UNION', 'LPSTARTUPINFOA', '_CERT_STRONG_SIGN_SERIALIZED_INFO', 'ALPC_PORT_ATTRIBUTES32', '_FILE_FS_ATTRIBUTE_INFORMATION', 'DEBUG_SYMBOL_ENTRY', 'CLAIM_SECURITY_ATTRIBUTE_RELATIVE_V1', 'PFILE_NAME_INFORMATION', '_CRYPT_ATTRIBUTES', '_EXIT_THREAD_DEBUG_INFO', 'PFILE_ATTRIBUTE_TAG_INFORMATION', 'PCERT_PUBLIC_KEY_INFO', '_PROCESS_MITIGATION_IMAGE_LOAD_POLICY', 'PMIB_TCP6TABLE_OWNER_PID', 'PCMSG_RECIPIENT_ENCODE_INFO', 'TOKEN_SECURITY_ATTRIBUTE_OCTET_STRING_VALUE', 'tagMULTI_QI', 'PSECURITY_ATTRIBUTES', 'PROCESS_MITIGATION_CONTROL_FLOW_GUARD_POLICY', 'MEMORY_BASIC_INFORMATION64', '_TMP_signscale', '_API_SET_NAMESPACE_ENTRY_V6', 'BITMAPINFO', 'CRYPTCATMEMBER', 'CERT_CONTEXT', 'PDEBUG_PROCESSOR_IDENTIFICATION_AMD64', 'TOKEN_APPCONTAINER_INFORMATION', 'PDEBUG_LAST_EVENT_INFO_EXIT_THREAD', 'PSTRING', 'tagSAFEARRAY', 'LPCLSID', '_DEBUG_LAST_EVENT_INFO_LOAD_MODULE', '_CMSG_STREAM_INFO', 'PWOW64_CONTEXT', 'PPROCESS_MITIGATION_DYNAMIC_CODE_POLICY', 'CRYPT_BIT_BLOB', 'ACCESS_ALLOWED_ACE', 'DEBUG_EXCEPTION_FILTER_PARAMETERS', 'PACCESS_ALLOWED_ACE', '_CRYPT_ENCRYPT_MESSAGE_PARA', 'FILE_INTERNAL_INFORMATION', 'FILE_ALL_INFORMATION', 'MIB_TCPROW_OWNER_PID', 'FILE_NETWORK_OPEN_INFORMATION', 'PMIB_TCP6ROW_OWNER_PID', 'PWINTRUST_SGNR_INFO', 'PDEBUG_SYMBOL_ENTRY', '_FILE_EA_INFORMATION', 'IMAGE_IMPORT_DESCRIPTOR', 'LPRGBTRIPLE', 'PEXCEPTION_RECORD64', 'CRYPTCATMEMBER_', 'PLUID', 'FILE_STREAM_INFORMATION', '_CERT_EXTENSIONS', 'PIMAGE_IMPORT_BY_NAME', 'SPC_LINK', 'PAPI_SET_VALUE_ENTRY_V2', 'LPSERVICE_STATUS', 'PSAPI_WORKING_SET_EX_BLOCK', 'PCRYPT_ALGORITHM_IDENTIFIER', 'CLAIM_SECURITY_ATTRIBUTES_INFORMATION', 'LPOUTPUT_DEBUG_STRING_INFO', 'CRYPT_ATTRIBUTE', 'PCRYPT_VERIFY_MESSAGE_PARA', 'CERT_BLOB', 'PEXCEPTION_POINTERS64', 'CERT_SIMPLE_CHAIN', '_ANON_PROCESS_MITIGATION_DEP_POLICY_BITFIELD', 'PSYSTEMTIME', 'PDEBUG_LAST_EVENT_INFO_EXCEPTION', 'PTHREAD_BASIC_INFORMATION', '_SHITEMID', 'SYSTEM_SCOPED_POLICY_ID_ACE', '_MEMORY_BASIC_INFORMATION64', '_ANON_PROCESS_MITIGATION_BINARY_SIGNATURE_POLICY_UNION', 'PCERT_TRUST_STATUS', 'LPVARIANTARG', 'OUTPUT_DEBUG_STRING_INFO', 'CLSID', 'PUBLIC_OBJECT_TYPE_INFORMATION', 'IMAGE_BASE_RELOCATION', 'PCLAIM_SECURITY_ATTRIBUTE_V1', '_SERVICE_STATUS', 'FILE_GET_EA_INFORMATION', '_TOKEN_GROUPS_AND_PRIVILEGES', '_FILE_IS_REMOTE_DEVICE_INFORMATION', 'CMSG_SIGNER_INFO', 'PTOKEN_PRIVILEGES', 'PRTL_UNLOAD_EVENT_TRACE32', 'ENUM_SERVICE_STATUS_PROCESSW', 'TOKEN_LINKED_TOKEN', 'PIMAGE_NT_HEADERS64', 'PBITMAPCOREHEADER', 'PCRYPT_ATTRIBUTE', 'IMAGE_EXPORT_DIRECTORY', 'PIP_ADAPTER_INDEX_MAP', 'PIDLIST_ABSOLUTE', '_FILE_INTERNAL_INFORMATION', '_RTL_UNLOAD_EVENT_TRACE64', '_tagVARIANT', 'BG_JOB_PROGRESS', '_SYSTEM_ALARM_ACE', '_PSAPI_WORKING_SET_BLOCK32', 'FILE_BASIC_INFORMATION', 'PCRYPT_OBJID_BLOB', 'CRYPT_ATTR_BLOB', 'M128A', 'CTL_USAGE', '_SYSTEM_AUDIT_CALLBACK_ACE', '_XSAVE_FORMAT_64', 'SYSTEM_ALARM_CALLBACK_OBJECT_ACE', 'PCERT_STRONG_SIGN_PARA', '_RTL_UNLOAD_EVENT_TRACE', '_TMPUNION_CERT_ID', 'GUID', '_PROCESS_INSTRUMENTATION_CALLBACK_INFORMATION', '_PEB', '_CMSG_MAIL_LIST_RECIPIENT_ENCODE_INFO', '_OSVERSIONINFOEXW', '_DEBUG_MODULE_AND_ID', '_PROCESS_MITIGATION_DYNAMIC_CODE_POLICY', '_FLOATING_SAVE_AREA', 'DEBUG_SYMBOL_PARAMETERS', 'VARIANT', 'PCRYPT_DIGEST_BLOB', '_OSVERSIONINFOEXA', '_COSERVERINFO', 'PCRYPT_INTEGER_BLOB', 'PCERT_BLOB', '_ALPC_CONTEXT_ATTR32', '_FILE_STANDARD_INFORMATION', '_PORT_MESSAGE_TMP_UNION_U2', '_PORT_MESSAGE_TMP_UNION_U1', '_PSAPI_WORKING_SET_INFORMATION', 'PEVT_VARIANT', 'CERT_CHAIN_CONTEXT', '_DEBUG_BREAKPOINT_PARAMETERS', 'PTOKEN_MANDATORY_POLICY', 'PSAPI_WORKING_SET_EX_INFORMATION32', '_DEBUG_VALUE', 'PROCESS_MITIGATION_DEP_POLICY', 'PDEBUG_PROCESSOR_IDENTIFICATION_IA64', '_SYSTEM_AUDIT_OBJECT_ACE', 'PORT_MESSAGE32', 'CRYPT_OBJID_BLOB', '_STARTUPINFOA', '_ALPC_HANDLE_ATTR', 'PALPC_SECURITY_ATTR32', '_STARTUPINFOW', 'CLAIM_SECURITY_ATTRIBUTE_V1', 'PPEB_LDR_DATA', 'PFILE_GET_EA_INFORMATION', 'FILE_FS_SECTOR_SIZE_INFORMATION', 'DEBUG_PROCESSOR_IDENTIFICATION_ARM', 'PALPC_PORT_ATTRIBUTES32', 'IP_INTERFACE_INFO', '_PSAPI_WORKING_SET_EX_INFORMATION32', 'ALPC_CONTEXT_ATTR32', 'RGBQUAD', 'BITMAPINFOHEADER', 'CRYPT_VERIFY_MESSAGE_PARA', 'EXCEPTION_RECORD', '_DEBUG_PROCESSOR_IDENTIFICATION_AMD64', 'TOKEN_GROUPS', 'COAUTHIDENTITY', 'PMIB_IFTABLE', 'PSECURITY_QUALITY_OF_SERVICE', 'PSYSTEM_AUDIT_CALLBACK_ACE', '_CURDIR', 'CERT_REVOCATION_INFO', '_ANON_PROCESS_MITIGATION_IMAGE_LOAD_POLICY_UNION', 'PEB_LDR_DATA', 'RTL_UNLOAD_EVENT_TRACE', 'PSAPI_WORKING_SET_INFORMATION32', '_CLIENT_ID64', 'ACCESS_DENIED_OBJECT_ACE', 'PWINTRUST_BLOB_INFO', '_TOKEN_USER', '_ANON_evt_variant_sub_union', 'tagSAFEARRAYBOUND', 'PPSAPI_WORKING_SET_EX_INFORMATION64', 'PMIB_TCPTABLE_OWNER_PID', '_FILE_DISPOSITION_INFORMATION', 'CALLFRAMEINFO', 'PPORT_MESSAGE32', 'LPOSVERSIONINFOA', 'PCTL_ENTRY', 'ALPC_HANDLE_ATTR64', 'MIB_IPADDRTABLE', '_CRYPT_ATTRIBUTE_TYPE_VALUE', 'PMIB_IPADDRROW_XP', 'PACCESS_ALLOWED_CALLBACK_OBJECT_ACE', 'PPROCESS_MITIGATION_STRICT_HANDLE_CHECK_POLICY', 'PCMSG_STREAM_INFO', 'EXIT_PROCESS_DEBUG_INFO', 'WNDCLASSEXA', 'PMIB_TCPROW_OWNER_PID', 'SYSTEM_ALARM_OBJECT_ACE', 'PORT_MESSAGE64', 'WNDCLASSEXW', 'PSYSTEM_MODULE64', 'MIB_UDPROW_OWNER_PID', '_CLAIM_SECURITY_ATTRIBUTE_FQBN_VALUE', 'PCRYPT_HASH_BLOB', '_CRYPTOAPI_BLOB', 'LPBITMAP', '_TOKEN_GROUPS', '_DEBUG_LAST_EVENT_INFO_EXCEPTION', 'SAFEARRAYBOUND', '__tagBRECORD', '_PSAPI_WORKING_SET_BLOCK', 'PSID_IDENTIFIER_AUTHORITY', 'tagRGBQUAD', 'EVENTLOG_FULL_INFORMATION', 'POSVERSIONINFOEXA', '_TOKEN_MANDATORY_POLICY', 'SYSTEMTIME', 'tagBITMAPFILEHEADER', '_RTL_DRIVE_LETTER_CURDIR', 'PIMAGE_OPTIONAL_HEADER64', 'IMAGE_SECTION_HEADER', 'ACL', 'WOW64_CONTEXT', '_PROCESS_MITIGATION_STRICT_HANDLE_CHECK_POLICY', '_CERT_CHAIN_PARA', 'PACCESS_ALLOWED_CALLBACK_ACE', 'PPROCESS_INFORMATION', 'LPBITMAPINFO', '_CERT_STRONG_SIGN_PARA', '_BG_JOB_TIMES', '_SYSTEM_ALARM_CALLBACK_OBJECT_ACE', 'TOKEN_SECURITY_ATTRIBUTE_FQBN_VALUE', 'PDEBUG_EXCEPTION_FILTER_PARAMETERS', 'PCCRL_CONTEXT', 'TMP_lowmid_union', 'TMPUNION_CERT_ID', 'CERT_NAME_BLOB', '_ALPC_SECURITY_ATTR64', 'PFILE_STREAM_INFORMATION', '_IMAGE_IMPORT_BY_NAME', 'LPBITMAPCOREHEADER', 'DATA_BLOB', 'PCLAIM_SECURITY_ATTRIBUTE_OCTET_STRING_VALUE', '_CRL_INFO', '_BG_FILE_PROGRESS', '_PROCESS_MEMORY_COUNTERS_EX', '_TOKEN_PRIVILEGES', 'PTOKEN_OWNER', 'WOW64_FLOATING_SAVE_AREA', '_IO_STATUS_BLOCK', 'PCCERT_STRONG_SIGN_PARA', 'PLUID_AND_ATTRIBUTES', '_TOKEN_OWNER', 'RIP_INFO', 'FILE_FS_DRIVER_PATH_INFORMATION', '_CRYPT_ATTRIBUTE', '_CRYPT_VERIFY_MESSAGE_PARA', 'PTOKEN_LINKED_TOKEN', 'FILE_FS_LABEL_INFORMATION', 'PSAPI_WORKING_SET_EX_BLOCK32', 'SYSTEM_MODULE64', '_ACE_HEADER', 'tagBITMAPCOREHEADER', 'PCCERT_CONTEXT', 'PROCESS_MITIGATION_IMAGE_LOAD_POLICY', 'PCRL_ENTRY', 'SPC_SP_OPUS_INFO', 'PPROCESS_MITIGATION_BINARY_SIGNATURE_POLICY', '_DEBUG_SYMBOL_SOURCE_ENTRY', 'PRTL_OSVERSIONINFOW', 'tagBITMAPINFO', 'LPENUM_SERVICE_STATUS_PROCESSA', '_KEY_VALUE_BASIC_INFORMATION', 'PDEBUG_BREAKPOINT_PARAMETERS', '_API_SET_VALUE_ENTRY_V2', 'DEBUG_STACK_FRAME', '_WOW64_FLOATING_SAVE_AREA', 'TMP_UNION_IO_STATUS_BLOCK', 'LPCONTEXT32', 'FILE_POSITION_INFORMATION', '_IMAGE_SECTION_HEADER', '_COAUTHINFO', 'PALPC_WORK_ON_BEHALF_ATTR', 'CRYPT_ENCODE_PARA', 'WINTRUST_SGNR_INFO_', 'PIO_STATUS_BLOCK', 'PPROCESS_MITIGATION_CONTROL_FLOW_GUARD_POLICY', 'PIMAGE_FILE_HEADER', 'RTL_USER_PROCESS_PARAMETERS', 'FILE_FS_SIZE_INFORMATION', 'FILE_FS_FULL_SIZE_INFORMATION', '_PROCESS_MITIGATION_DEP_POLICY', 'API_SET_VALUE_ENTRY_V2', 'PDATA_BLOB', '_TMP_SPC_LINK_UNION', 'CERT_ID', 'PCRYPT_ATTRIBUTE_TYPE_VALUE', 'PCRYPT_ATTRIBUTES', 'FILE_DISPOSITION_INFORMATION', 'DEBUG_PROCESSOR_IDENTIFICATION_IA64', 'CERT_STRONG_SIGN_SERIALIZED_INFO', '_FILE_FS_VOLUME_INFORMATION', 'PCERT_EXTENSION', 'PSYSTEM_AUDIT_OBJECT_ACE', '_PROCESS_MITIGATION_SYSTEM_CALL_DISABLE_POLICY', 'PFILE_FS_OBJECTID_INFORMATION', 'SHITEMID', '_ALPC_MESSAGE_ATTRIBUTES', 'CMSG_STREAM_INFO', '_MIB_IFTABLE', '_ANON_PROCESS_MITIGATION_IMAGE_LOAD_POLICY_BITFIELD', 'NPRGBTRIPLE', 'LPRIP_INFO', 'CLAIM_SECURITY_ATTRIBUTES_INFORMATION_UNION', 'FILE_FS_VOLUME_INFORMATION', 'PANSI_STRING', '_PSAPI_WORKING_SET_EX_BLOCK32', 'PSYSTEM_PROCESS_TRUST_LABEL_ACE', '_SYSTEMTIME', '_FILE_IO_PRIORITY_HINT_INFORMATION', 'SERVICE_STATUS', 'PDEBUG_MODULE_AND_ID', '_CERT_REVOCATION_CRL_INFO', 'VARIANTARG', 'CLAIM_SECURITY_ATTRIBUTE_RELATIVE_V1_UNION', 'PDEBUG_SPECIFIC_FILTER_PARAMETERS', 'WINTRUST_FILE_INFO_', 'API_SET_VALUE_ARRAY_V4', '_CMSG_KEY_AGREE_RECIPIENT_ENCODE_INFO', 'PSPC_SP_OPUS_INFO', '_IMAGE_OPTIONAL_HEADER', 'XSAVE_FORMAT_32', 'ACCESS_DENIED_ACE', '_SID_AND_ATTRIBUTES_HASH', '_ANON_PROCESS_MITIGATION_SYSTEM_CALL_DISABLE_POLICY_UNION', 'LPJIT_DEBUG_INFO', 'PXSAVE_FORMAT_32', 'PDEBUG_VALUE', '_ALPC_CONTEXT_ATTR', 'WINTRUST_CATALOG_INFO_', '_CERT_CHAIN_ELEMENT', 'BG_FILE_INFO', '_TOKEN_ACCESS_INFORMATION', 'FILE_FS_DEVICE_INFORMATION', 'PFILE_MODE_INFORMATION', 'EXCEPINFO', 'PBITMAPFILEHEADER', '_TMP_CONTEXT64_SUBUNION', 'PDEBUG_OFFSET_REGION', 'PFILE_BASIC_INFORMATION', 'CERT_CHAIN_ENGINE_CONFIG', 'PISECURITY_DESCRIPTOR', 'TOKEN_ACCESS_INFORMATION', 'CRYPT_INTEGER_BLOB', 'PPROCESS_BASIC_INFORMATION', '_ACCESS_ALLOWED_CALLBACK_OBJECT_ACE', 'PWINTRUST_CATALOG_INFO', 'STARTUPINFOEXA', 'LPBITMAPFILEHEADER', '_EXCEPTION_POINTERS64', 'FILE_FS_CONTROL_INFORMATION', 'ACCESS_DENIED_CALLBACK_ACE', 'TOKEN_SECURITY_ATTRIBUTES_INFORMATION', 'PTOKEN_APPCONTAINER_INFORMATION', '_SYSTEM_MODULE64', '_TMPUNION_CMSG_MAIL_LIST_RECIPIENT_ENCODE_INFO', 'DEBUG_VALUE_TMP_UNION', '_TMP_CMSG_RECIPIENT_ENCODE_INFO_UNION', 'WINTRUST_FILE_INFO', 'REFGUID', '_SERVICE_STATUS_PROCESS', 'MIB_IPADDRROW', 'CMSG_CTRL_DECRYPT_PARA', 'PFILE_FS_DRIVER_PATH_INFORMATION', 'ALPC_DIRECT_ATTR32', '_CREATE_THREAD_DEBUG_INFO', 'MIB_IPADDRROW_XP', 'MEMORY_BASIC_INFORMATION32', 'SECURITY_ATTRIBUTES', 'IMAGE_NT_HEADERS32', 'POSVERSIONINFOEXW', 'PCMSG_RECIPIENT_ENCRYPTED_KEY_ENCODE_INFO', 'MIB_TCPTABLE_OWNER_PID', 'LPDEBUG_EVENT', 'PSPC_SERIALIZED_OBJECT', 'LPCREATE_PROCESS_DEBUG_INFO', 'TMP_CMSG_RECIPIENT_ENCODE_INFO_UNION', 'ALPC_HANDLE_ATTR', 'TOKEN_ELEVATION', '_FILE_FULL_EA_INFORMATION', '_ACCESS_DENIED_OBJECT_ACE', '_ACL_SIZE_INFORMATION', '_ACCESS_ALLOWED_ACE', 'COSERVERINFO', 'CTL_ENTRY', 'PCRL_INFO', '_CLIENT_ID32', 'CRYPT_ENCRYPT_MESSAGE_PARA', 'CERT_CHAIN_PARA', 'PALPC_CONTEXT_ATTR', '_DEBUG_PROCESSOR_IDENTIFICATION_X86', 'PALPC_DATA_VIEW_ATTR32', '_DEBUG_PROCESSOR_IDENTIFICATION_ALL', '_PROCESS_MITIGATION_BINARY_SIGNATURE_POLICY', 'PCMSG_SIGNER_INFO', 'PMIB_UDP6TABLE_OWNER_PID', 'SECURITY_QUALITY_OF_SERVICE', 'CRYPT_DER_BLOB', '_API_SET_NAMESPACE_ARRAY', '_CLAIM_SECURITY_ATTRIBUTE_OCTET_STRING_VALUE', 'ACCESS_ALLOWED_CALLBACK_OBJECT_ACE', 'PSYMBOL_INFO', 'BG_FILE_PROGRESS', 'PALPC_DIRECT_ATTR32', 'PCRYPT_ATTR_BLOB', '_TOKEN_STATISTICS', 'CRYPT_DIGEST_BLOB', 'PCMSG_KEY_AGREE_RECIPIENT_ENCODE_INFO', '_SYSTEM_PROCESS_TRUST_LABEL_ACE', 'PPUBLIC_OBJECT_BASIC_INFORMATION', 'CRYPT_KEY_VERIFY_MESSAGE_PARA', 'PCRL_CONTEXT', 'CLIENT_ID32', 'PALPC_TOKEN_ATTR', 'CRYPTCATATTRIBUTE_', 'ALPC_DIRECT_ATTR', '_API_SET_NAMESPACE_ENTRY_V4', 'LPTHREADENTRY32', 'CATALOG_INFO_', 'SAFEARRAY', '_ALPC_SECURITY_ATTR', 'PAPI_SET_NAMESPACE_ENTRY_V2', 'PCANSI_STRING', '_OVERLAPPED', 'PSYSTEM_AUDIT_CALLBACK_OBJECT_ACE', '_SECURITY_ATTRIBUTES', 'PTOKEN_MANDATORY_LABEL', '_API_SET_NAMESPACE_ENTRY_V2', '_API_SET_NAMESPACE_ARRAY_V4', '_IMAGE_NT_HEADERS64', 'PCRYPT_KEY_PROV_INFO', 'TOKEN_DEFAULT_DACL', 'DEBUG_LAST_EVENT_INFO_EXCEPTION', 'CRYPT_SIGN_MESSAGE_PARA', 'PTOKEN_SECURITY_ATTRIBUTE_V1', 'PROCESS_MITIGATION_EXTENSION_POINT_DISABLE_POLICY', 'SYSTEM_RESOURCE_ATTRIBUTE_ACE', 'PPSAPI_WORKING_SET_EX_BLOCK64', 'PMEMORY_BASIC_INFORMATION', '_IMAGE_DATA_DIRECTORY', 'tagWNDCLASSEXW', '_ANON_PEB_SYSTEM_DEPENDENT_03', '_CRL_CONTEXT', 'OBJECT_ATTRIBUTES', 'PCRYPT_DATA_BLOB', '_ANON_PEB_SYSTEM_DEPENDENT_07', 'PROCESS_MITIGATION_STRICT_HANDLE_CHECK_POLICY', '_THREAD_BASIC_INFORMATION', '_ANON_PROCESS_MITIGATION_ASLR_POLICY_UNION', '_CALLFRAMEINFO', 'PTOKEN_SECURITY_ATTRIBUTE_FQBN_VALUE', 'tagWNDCLASSEXA', '_PORT_MESSAGE32_TMP_UNION', '_LUID', 'PFILE_INTERNAL_INFORMATION', 'PCRYPT_HASH_MESSAGE_PARA', 'PLIST_ENTRY', 'DEBUG_OFFSET_REGION', '_EVENTLOGRECORD', 'EXCEPTION_DEBUG_INFO', '_CMSG_ENVELOPED_ENCODE_INFO', 'CRYPT_KEY_PROV_INFO', 'PCERT_ENHKEY_USAGE', '_OSVERSIONINFOW', 'PCERT_ID', 'PRTL_USER_PROCESS_PARAMETERS', 'PCRYPTCATATTRIBUTE', 'PAPI_SET_NAMESPACE_ENTRY', 'PSYSTEM_SCOPED_POLICY_ID_ACE', '_ANON_PROCESS_MITIGATION_CONTROL_FLOW_GUARD_POLICY_BITFIELD', '_OSVERSIONINFOA', 'CERT_REVOCATION_CRL_INFO', 'PBITMAPINFO', '_PROCESS_BASIC_INFORMATION', 'LPSTARTUPINFOEXA', 'PROCESSENTRY32', '_MIB_UDP6TABLE_OWNER_PID', '__PUBLIC_OBJECT_TYPE_INFORMATION', 'LUID_AND_ATTRIBUTES', '_CERT_USAGE_MATCH', 'ACL_REVISION_INFORMATION', 'tagEXCEPINFO', 'PWINTRUST_FILE_INFO', '_ANON_PROCESS_MITIGATION_STRICT_HANDLE_CHECK_POLICY_BITFIELD', '_ANON_PROCESS_MITIGATION_STRICT_HANDLE_CHECK_POLICY_UNION', '_CRYPT_DECRYPT_MESSAGE_PARA', '_MIB_IFROW', 'PCERT_SIMPLE_CHAIN', '_MIB_TCPTABLE_OWNER_PID', '_ACCESS_ALLOWED_OBJECT_ACE', 'PACL_SIZE_INFORMATION', 'PSAPI_WORKING_SET_EX_INFORMATION64', '_SECURITY_DESCRIPTOR', 'PPROCESS_MITIGATION_SYSTEM_CALL_DISABLE_POLICY', '_TMPUNION_CMSG_CTRL_DECRYPT_PARA', 'PCLAIM_SECURITY_ATTRIBUTE_OCTET_STRING_RELATIVE', 'PTOKEN_SOURCE', 'CERT_EXTENSION', '_BG_FILE_INFO', 'RTL_DRIVE_LETTER_CURDIR', '_TOKEN_SECURITY_ATTRIBUTES_INFORMATION_UNION', '_PEB_LDR_DATA', '_TMP_DUMMYSTRUCTNAME', 'PCRYPT_ENCRYPT_MESSAGE_PARA', 'CRYPTCATATTRIBUTE', '_RTL_RELATIVE_NAME_U', 'ACCESS_ALLOWED_OBJECT_ACE', '_ANON_TMP_variant_sub_union', 'PCRYPT_DECRYPT_MESSAGE_PARA', '_MODLOAD_DATA', '_TOKEN_PRIMARY_GROUP', 'PPROCESS_MITIGATION_IMAGE_LOAD_POLICY', 'ALPC_DATA_VIEW_ATTR64', '_FILE_FS_FULL_SIZE_INFORMATION', 'ALPC_PORT_ATTRIBUTES64', 'LDR_DATA_TABLE_ENTRY', '_FILE_NETWORK_OPEN_INFORMATION', 'OVERLAPPED', 'PDEBUG_LAST_EVENT_INFO_EXIT_PROCESS', 'RGBTRIPLE', '_SYSTEM_AUDIT_CALLBACK_OBJECT_ACE', 'FILE_ALIGNMENT_INFORMATION', '_IMAGE_IMPORT_DESCRIPTOR', 'ENUM_SERVICE_STATUS_PROCESSA', 'PSOLE_AUTHENTICATION_SERVICE', '_DEBUG_VALUE_TMP_SUBSTRUCT3', '_DEBUG_VALUE_TMP_SUBSTRUCT2', '_DEBUG_VALUE_TMP_SUBSTRUCT1', '_SYSTEM_HANDLE', 'LPENUM_SERVICE_STATUS_PROCESSW', 'tagPROCESSENTRY32', '_ANON_PROCESS_MITIGATION_BINARY_SIGNATURE_POLICY_BITFIELD', 'LPEVENTLOG_FULL_INFORMATION', 'PWINTRUST_DATA', 'API_SET_NAMESPACE_V6', '_EXCEPTION_DEBUG_INFO', '_PROCESS_MEMORY_COUNTERS', 'PCERT_CHAIN_CONTEXT', 'SID_IDENTIFIER_AUTHORITY', '_PSAPI_WORKING_SET_EX_BLOCK64', 'THREADENTRY32', '_PUBLIC_OBJECT_BASIC_INFORMATION', 'PPSAPI_WORKING_SET_BLOCK32', 'MIB_IFTABLE', '_CERT_ISSUER_SERIAL_NUMBER', 'tagSOLE_AUTHENTICATION_SERVICE', '_CERT_INFO', 'MODLOAD_DATA', '_API_SET_VALUE_ARRAY_V2', '_DEBUG_LAST_EVENT_INFO_SYSTEM_ERROR', 'SYSTEM_VERIFIER_INFORMATION', 'PEXCEPTION_RECORD32', 'PAPI_SET_VALUE_ENTRY', 'API_SET_NAMESPACE_ENTRY_V2', 'ALPC_HANDLE_ATTR32', 'PPROCESSENTRY32W', 'PCRYPT_SIGN_MESSAGE_PARA', 'SPC_SERIALIZED_OBJECT', '_MIB_UDPROW_OWNER_PID', '_CRYPT_ENCODE_PARA', '_ANON_PROCESS_MITIGATION_DYNAMIC_CODE_POLICY_UNION', 'PSAPI_WORKING_SET_BLOCK32', '_API_SET_VALUE_ARRAY_V4', 'PALPC_DIRECT_ATTR', 'PDEBUG_LAST_EVENT_INFO_SYSTEM_ERROR', 'DEBUG_MODULE_AND_ID', 'DEBUG_LAST_EVENT_INFO_LOAD_MODULE', '_SYSTEM_MODULE_INFORMATION64', 'PPSAPI_WORKING_SET_INFORMATION', 'LPPROCESSENTRY32W', 'PFILE_FS_CONTROL_INFORMATION', '_ANON_PROCESS_MITIGATION_SYSTEM_CALL_DISABLE_POLICY_BITFIELD', 'tagPROCESSENTRY32W', 'PKEY_VALUE_FULL_INFORMATION', 'TOKEN_USER', 'CRL_INFO', '_API_SET_NAMESPACE_ARRAY_V2', 'CMSG_MAIL_LIST_RECIPIENT_ENCODE_INFO', 'PMIB_IPADDRTABLE', 'PCRL_BLOB', 'LPWNDCLASSEXA', 'PCERT_EXTENSIONS', 'MIB_TCPROW', 'PROCESS_MEMORY_COUNTERS_EX', 'LPOVERLAPPED', 'PCLAIM_SECURITY_ATTRIBUTE_FQBN_VALUE', '_FILE_BASIC_INFORMATION', 'PACCESS_ALLOWED_OBJECT_ACE', 'PDEBUG_LAST_EVENT_INFO_LOAD_MODULE', 'PALPC_CONTEXT_ATTR32', 'MIB_UDP6TABLE_OWNER_PID', 'PIMAGE_NT_HEADERS32', 'PCERT_CHAIN_PARA', 'TMP_CERT_STRONG_SIGN_PARA_UNION_TYPE', '_TOKEN_LINKED_TOKEN', 'EXCEPTION_POINTERS32', 'ACCESS_ALLOWED_CALLBACK_ACE', 'WINTRUST_CERT_INFO', 'CRL_BLOB', 'PCLAIM_SECURITY_ATTRIBUTE_RELATIVE_V1', 'POBJECT_ATTRIBUTES', 'PCERT_CHAIN_ENGINE_CONFIG', '_DEBUG_LAST_EVENT_INFO_BREAKPOINT', 'PTOKEN_ELEVATION', 'PFILE_EA_INFORMATION', 'KEY_VALUE_PARTIAL_INFORMATION', '_CERT_EXTENSION', '_LIST_ENTRY', '_PORT_MESSAGE32', 'PCERT_REVOCATION_CRL_INFO', 'RTL_OSVERSIONINFOEXW', 'PFILE_DISPOSITION_INFORMATION', '_CTL_ENTRY', '_IMAGE_OPTIONAL_HEADER64', '_PROCESS_INFORMATION', '_CERT_CHAIN_CONTEXT', '_SYSTEM_HANDLE_INFORMATION', '_MIB_UDP6ROW_OWNER_PID', 'LPUNLOAD_DLL_DEBUG_INFO', 'EVENTLOGRECORD', '_LSA_UNICODE_STRING', '_MIB_IPADDRTABLE', 'PDEBUG_REGISTER_DESCRIPTION', 'CRL_CONTEXT', 'PPSAPI_WORKING_SET_EX_BLOCK', 'PAPI_SET_VALUE_ARRAY_V2', '_WOW64_CONTEXT', 'ALPC_TOKEN_ATTR', '_UNLOAD_DLL_DEBUG_INFO', '_ALPC_DATA_VIEW_ATTR32', 'CREATE_PROCESS_DEBUG_INFO', '_COAUTHIDENTITY', 'tagDISPPARAMS', 'PIP_INTERFACE_INFO', '_ANON_PEB_SYSTEM_DEPENDENT_02', '_ANON_PEB_UNION_1', '_ANON_PEB_UNION_2', '_SYSTEM_RESOURCE_ATTRIBUTE_ACE', 'WINTRUST_SGNR_INFO', 'PWINTRUST_CERT_INFO', 'PALPC_HANDLE_ATTR64', '_TOKEN_APPCONTAINER_INFORMATION', 'PRTL_UNLOAD_EVENT_TRACE', '_SYSTEM_ALARM_CALLBACK_ACE', '_ANON_PEB_SYSTEM_DEPENDENT_06', '_XSAVE_FORMAT_32', '_ITEMIDLIST', '_ANON_PROCESS_MITIGATION_DYNAMIC_CODE_POLICY_BITFIELD', '_KEY_VALUE_FULL_INFORMATION', 'PRTLP_CURDIR_REF', '_OUTPUT_DEBUG_STRING_INFO', 'TOKEN_MANDATORY_POLICY', 'PPEB', 'PRTL_DRIVE_LETTER_CURDIR', '_PORT_MESSAGE64_TMP_UNION', 'PSYSTEM_ALARM_OBJECT_ACE', '_SYSTEM_AUDIT_ACE', '_CONTEXT64', 'SYSTEM_MODULE_INFORMATION64', '_CLAIM_SECURITY_ATTRIBUTES_INFORMATION_UNION', 'WINTRUST_BLOB_INFO_', '_ANON_PROCESS_MITIGATION_EXTENSION_POINT_DISABLE_POLICY_BITFIELD', 'PCMSG_CTRL_DECRYPT_PARA', '_ALPC_SERVER_INFORMATION_TMP_IN', 'PIMAGE_IMPORT_DESCRIPTOR', '_WINTRUST_DATA', 'PCRYPT_ENCODE_PARA', '_FILE_FS_SECTOR_SIZE_INFORMATION', '_ALPC_SERVER_INFORMATION_TMP_OUT', '_IP_ADAPTER_INDEX_MAP', 'FILE_ACCESS_INFORMATION', 'PROCESS_INFORMATION', 'BITMAPFILEHEADER', 'CONTEXT64', '_CERT_ID', '_FILE_ATTRIBUTE_TAG_INFORMATION', 'JIT_DEBUG_INFO', 'PCERT_NAME_BLOB', 'CRYPT_DECRYPT_MESSAGE_PARA', 'PCCERT_SIMPLE_CHAIN', '_SECURITY_QUALITY_OF_SERVICE', 'IMAGE_OPTIONAL_HEADER32', 'PALPC_PORT_ATTRIBUTES64', 'CRYPT_HASH_BLOB', 'PCIDLIST_ABSOLUTE', 'ALPC_CONTEXT_ATTR64', 'PAPI_SET_NAMESPACE_ENTRY_V4', '_CERT_REVOCATION_INFO', '_DEBUG_LAST_EVENT_INFO_UNLOAD_MODULE', 'PCRYPT_KEY_PROV_PARAM', 'PPSAPI_WORKING_SET_BLOCK', '_DEBUG_PROCESSOR_IDENTIFICATION_IA64', 'CLAIM_SECURITY_ATTRIBUTE_OCTET_STRING_RELATIVE', 'tagTHREADENTRY32', 'CMSG_RECIPIENT_ENCODE_INFO', 'PCTL_INFO', 'PCCERT_CHAIN_CONTEXT', '_PSAPI_WORKING_SET_EX_INFORMATION64', '_CLAIM_SECURITY_ATTRIBUTE_RELATIVE_V1', '_MIB_IPADDRROW_XP', 'UNICODE_STRING', 'PROCESS_MITIGATION_ASLR_POLICY', 'PCMSG_ENVELOPED_ENCODE_INFO', '_CTL_CONTEXT', '_DEBUG_REGISTER_DESCRIPTION', 'TOKEN_MANDATORY_LABEL', 'PSYSTEM_ALARM_CALLBACK_ACE', 'PSAPI_WORKING_SET_EX_BLOCK64', 'SYSTEM_ALARM_ACE', '_IMAGE_EXPORT_DIRECTORY', '_DEBUG_LAST_EVENT_INFO_EXIT_PROCESS', 'DEBUG_LAST_EVENT_INFO_BREAKPOINT', 'DEBUG_LAST_EVENT_INFO_EXIT_PROCESS', 'RTL_UNLOAD_EVENT_TRACE32', 'PSYSTEM_MANDATORY_LABEL_ACE', 'PBITMAP', 'TOKEN_PRIMARY_GROUP', 'TOKEN_OWNER', 'ALPC_SECURITY_ATTR', 'PSYSTEM_HANDLE_INFORMATION', 'TMPUNION_CMSG_MAIL_LIST_RECIPIENT_ENCODE_INFO', 'CERT_CHAIN_ELEMENT', 'DEBUG_SYMBOL_SOURCE_ENTRY', '_ACL_REVISION_INFORMATION', 'CERT_KEY_CONTEXT', '_SYSTEM_MANDATORY_LABEL_ACE', 'PSYSTEM_VERIFIER_INFORMATION', '_MIB_TCPROW_OWNER_PID', '_CERT_PUBLIC_KEY_INFO', '_CERT_TRUST_STATUS', '_DEBUG_SYMBOL_PARAMETERS', 'PXSAVE_FORMAT_64', '_PSAPI_WORKING_SET_EX_INFORMATION', '_RTLP_CURDIR_REF', 'XSAVE_FORMAT_64', 'PCERT_TRUST_LIST_INFO', 'PMEMORY_BASIC_INFORMATION64', 'CALLFRAME_MARSHALCONTEXT', 'PCERT_INFO', '_OBJECT_ATTRIBUTES', 'PSYSTEM_MODULE_INFORMATION32', '_SPC_SERIALIZED_OBJECT', 'CERT_TRUST_LIST_INFO', 'FILE_FS_OBJECTID_INFORMATION', 'PTOKEN_GROUPS_AND_PRIVILEGES', '_PSAPI_WORKING_SET_INFORMATION32', 'DEBUG_LAST_EVENT_INFO_SYSTEM_ERROR', '_FILE_FS_OBJECTID_INFORMATION', 'LIST_ENTRY', 'TMP_UNION_DEBUG_INFO', 'PROCESS_INSTRUMENTATION_CALLBACK_INFORMATION', '_FILE_ALIGNMENT_INFORMATION', '_DEBUG_PROCESSOR_IDENTIFICATION_ALPHA', '_SYSTEM_SCOPED_POLICY_ID_ACE', 'PCERT_STRONG_SIGN_SERIALIZED_INFO', '_CLAIM_SECURITY_ATTRIBUTES_INFORMATION', '_DEBUG_LAST_EVENT_INFO_EXIT_THREAD', '_ANON_PROCESS_MITIGATION_ASLR_POLICY_BITFIELD', 'ALPC_SERVER_INFORMATION_TMP_IN', 'PTOKEN_GROUPS', 'PTOKEN_ACCESS_INFORMATION', 'CALLFRAMEPARAMINFO', 'CATALOG_INFO', '_SYSTEM_MODULE32', '_RTL_UNLOAD_EVENT_TRACE32', 'PALPC_HANDLE_ATTR', 'PPUBLIC_OBJECT_TYPE_INFORMATION', '_tagBRECORD', '_LDR_DATA_TABLE_ENTRY', 'PSID_AND_ATTRIBUTES_HASH', 'DEBUG_PROCESSOR_IDENTIFICATION_ALPHA', '_CALLFRAMEPARAMINFO', 'PACCESS_DENIED_CALLBACK_OBJECT_ACE', 'PTHREADENTRY32', 'DEBUG_VALUE', 'ALPC_SERVER_INFORMATION', 'PIMAGE_DATA_DIRECTORY', 'PROCESSENTRY32W', 'TOKEN_SOURCE', '_CRYPT_BIT_BLOB', 'CERT_TRUST_STATUS', '_ALPC_DIRECT_ATTR', 'FILE_IS_REMOTE_DEVICE_INFORMATION', 'SYSTEM_ALARM_CALLBACK_ACE', 'SYSTEM_AUDIT_CALLBACK_ACE', 'CRL_ENTRY', 'PSAPI_WORKING_SET_EX_INFORMATION', 'PTOKEN_USER', 'PDEBUG_LAST_EVENT_INFO_UNLOAD_MODULE', 'LPPROCESSENTRY32', 'DEBUG_BREAKPOINT_PARAMETERS', 'TMP_DUMMYSTRUCTNAME', 'FILE_FS_ATTRIBUTE_INFORMATION', 'PCERT_USAGE_MATCH', 'PCMSG_KEY_TRANS_RECIPIENT_ENCODE_INFO', 'ACL_SIZE_INFORMATION', 'TMP_signscale_union', '_MEMORY_BASIC_INFORMATION32', 'KEY_VALUE_BASIC_INFORMATION', 'PEXCEPTION_POINTERS32', 'IMAGE_FILE_HEADER', 'DEBUG_PROCESSOR_IDENTIFICATION_AMD64', 'SYSTEM_AUDIT_ACE', 'API_SET_NAMESPACE_ENTRY', 'FLOATING_SAVE_AREA', '_PSAPI_WORKING_SET_EX_BLOCK', 'CRYPT_DATA_BLOB', 'PTOKEN_SECURITY_ATTRIBUTES_INFORMATION', 'PTOKEN_SECURITY_ATTRIBUTE_OCTET_STRING_VALUE', 'RTL_UNLOAD_EVENT_TRACE64', '_FILE_FS_DEVICE_INFORMATION', '_PROCESS_MITIGATION_EXTENSION_POINT_DISABLE_POLICY', '_CRYPT_ALGORITHM_IDENTIFIER', 'PACCESS_DENIED_ACE', '_FILE_ALL_INFORMATION', 'LPEXIT_PROCESS_DEBUG_INFO', '_CRYPT_KEY_VERIFY_MESSAGE_PARA', 'API_SET_NAMESPACE_ARRAY_V2', 'ALPC_MESSAGE_ATTRIBUTES', 'PCERT_ISSUER_SERIAL_NUMBER', 'PPROCESS_MITIGATION_DEP_POLICY', '_ALPC_HANDLE_ATTR32', '_EXIT_PROCESS_DEBUG_INFO', 'LPWNDCLASSEXW', 'LPOSVERSIONINFOW', '_ACCESS_DENIED_CALLBACK_ACE', 'PEXCEPTION_RECORD', 'PCMSG_MAIL_LIST_RECIPIENT_ENCODE_INFO', 'IMAGE_DATA_DIRECTORY', 'BITMAP', 'PKEY_VALUE_PARTIAL_INFORMATION', '_FILE_FS_DRIVER_PATH_INFORMATION', 'PFILE_NETWORK_OPEN_INFORMATION', 'PORT_MESSAGE_TMP_UNION', 'LPPROCESS_INFORMATION', '_IMAGEHLP_MODULE64', '_EVENTLOG_FULL_INFORMATION', 'tagBITMAPCOREINFO', '_PROCESS_MITIGATION_ASLR_POLICY', '_ANON_PROCESS_MITIGATION_DEP_POLICY_UNION', '_API_SET_VALUE_ENTRY', 'PWNDCLASSEXW', 'LPSTARTUPINFOEXW', 'LPGUID', '_FILE_POSITION_INFORMATION', 'PWNDCLASSEXA', 'PPROCESSENTRY32', 'API_SET_NAMESPACE_ARRAY', 'PMIB_TCPROW', '_FILE_FS_CONTROL_INFORMATION', 'FILE_NAME_INFORMATION', '_CLAIM_SECURITY_ATTRIBUTE_OCTET_STRING_RELATIVE', '_ALPC_DATA_VIEW_ATTR', 'TOKEN_SECURITY_ATTRIBUTE_V1', 'UNLOAD_DLL_DEBUG_INFO', '_STRING', '_ALPC_DIRECT_ATTR64', '_TOKEN_DEFAULT_DACL', '_ALPC_CONTEXT_ATTR64', 'TMP_WINTRUST_UNION_TYPE', 'PCONTEXT64', 'PACL_REVISION_INFORMATION', 'PPSAPI_WORKING_SET_EX_INFORMATION32', 'CERT_PUBLIC_KEY_INFO', 'DEBUG_SPECIFIC_FILTER_PARAMETERS', 'API_SET_VALUE_ENTRY', 'PCCERT_CHAIN_ELEMENT', '_DEBUG_VALUE_TMP_UNION', 'DEBUG_LAST_EVENT_INFO_UNLOAD_MODULE', '_MEMORY_BASIC_INFORMATION', '_RTL_USER_PROCESS_PARAMETERS', 'TMP_CONTEXT64_SUBUNION', '_EXCEPTION_RECORD32', 'MIB_TCP6TABLE_OWNER_PID', 'CLAIM_SECURITY_ATTRIBUTE_FQBN_VALUE', 'ALPC_DATA_VIEW_ATTR', '_CTL_USAGE', 'CMSG_KEY_TRANS_RECIPIENT_ENCODE_INFO', 'LPOSVERSIONINFOEXW', 'DEBUG_MODULE_PARAMETERS', '_FILE_ACCESS_INFORMATION', '_CERT_CONTEXT', 'LPOSVERSIONINFOEXA', 'DEBUG_REGISTER_DESCRIPTION', '_TOKEN_SECURITY_ATTRIBUTE_FQBN_VALUE', 'STARTUPINFOA', 'PAPI_SET_NAMESPACE_ARRAY', 'tagDEC', 'STARTUPINFOW', 'EXCEPTION_RECORD32', 'MIB_UDP6ROW_OWNER_PID', 'PDEBUG_PROCESSOR_IDENTIFICATION_ALPHA', 'PCERT_CONTEXT', '_SID_IDENTIFIER_AUTHORITY', 'PDEBUG_LAST_EVENT_INFO_BREAKPOINT', '_ALPC_PORT_ATTRIBUTES32', 'CRYPT_ALGORITHM_IDENTIFIER', 'IMAGEHLP_MODULE64', '_CERT_TRUST_LIST_INFO', 'WINTRUST_DATA', 'SYSTEM_HANDLE', '_DEBUG_MODULE_PARAMETERS', 'OSVERSIONINFOEXW', 'ACCESS_DENIED_CALLBACK_OBJECT_ACE', '_M128A', 'CTL_CONTEXT', '_TMP_WINTRUST_UNION_TYPE', 'OSVERSIONINFOEXA', '_CMSG_CTRL_DECRYPT_PARA', 'PSAPI_WORKING_SET_INFORMATION64', 'CRYPT_HASH_MESSAGE_PARA', 'ITEMIDLIST', '_SPC_SP_OPUS_INFO', 'PPROCESS_MITIGATION_EXTENSION_POINT_DISABLE_POLICY', '_ACCESS_DENIED_CALLBACK_OBJECT_ACE', 'PDEBUG_PROCESSOR_IDENTIFICATION_X86', 'PSAPI_WORKING_SET_INFORMATION', '_EXCEPTION_RECORD', 'CLAIM_SECURITY_ATTRIBUTE_V1_UNION', '_BG_JOB_PROGRESS', 'DEBUG_EVENT', 'LPSERVICE_STATUS_PROCESS', 'CONTEXT32', '_DEBUG_PROCESSOR_IDENTIFICATION_ARM', 'FILE_EA_INFORMATION', 'RTLP_CURDIR_REF', 'SYSTEM_AUDIT_CALLBACK_OBJECT_ACE', 'PROCESS_MITIGATION_SYSTEM_CALL_DISABLE_POLICY', 'ALPC_DIRECT_ATTR64', 'CMSG_KEY_AGREE_RECIPIENT_ENCODE_INFO', 'NPBITMAP', 'TMPUNION_CMSG_KEY_AGREE_RECIPIENT_ENCODE_INFO', 'DEBUG_VALUE_TMP_SUBSTRUCT1', 'DEBUG_VALUE_TMP_SUBSTRUCT3', 'DEBUG_VALUE_TMP_SUBSTRUCT2', '_CRYPT_SIGN_MESSAGE_PARA'])
enums = set(['PROCESS_INFORMATION_CLASS', 'BG_JOB_PROXY_USAGE', '_BG_JOB_PROXY_USAGE', '_IMAGEHLP_SYMBOL_TYPE_INFO', 'CLSCTX', '_THREAD_INFORMATION_CLASS', '_TASK_RUNLEVEL_TYPE', 'tagCLSCTX', '_TASK_ACTION_TYPE', 'FS_INFORMATION_CLASS', 'WBEM_GENUS_TYPE', 'BG_JOB_TYPE', 'tag_WBEM_GENERIC_FLAG_TYPE', 'IO_PRIORITY_HINT', 'EVT_SYSTEM_PROPERTY_ID', 'ALPC_PORT_INFORMATION_CLASS', 'EVT_EVENT_METADATA_PROPERTY_ID', 'tag_WBEM_CHANGE_FLAG_TYPE', '_SC_ENUM_TYPE', '_CALLFRAME_COPY', '_IO_PRIORITY_HINT', 'SE_OBJECT_TYPE', '_EVT_SYSTEM_PROPERTY_ID', 'PSID_NAME_USE', 'TASK_CREATION', 'PTOKEN_ELEVATION_TYPE', '_MIB_TCP_STATE', '_TASK_TRIGGER_TYPE2', 'TASK_TRIGGER_TYPE2', 'tag_WBEM_CONNECT_OPTIONS', 'TASK_COMPATIBILITY', 'SC_STATUS_TYPE', 'EVT_LOG_PROPERTY_ID', '_EVT_RENDER_FLAGS', '_ACL_INFORMATION_CLASS', '_ALPC_PORT_INFORMATION_CLASS', '_INTERNAL_IF_OPER_STATUS', 'NET_FW_PROFILE_TYPE2', 'PTHREAD_INFORMATION_CLASS', '_TASK_INSTANCES_POLICY', 'NET_FW_IP_PROTOCOL_', '_SE_OBJECT_TYPE', 'BG_ERROR_CONTEXT', 'TASK_STATE', 'tagMSHLFLAGS', 'EVT_RENDER_CONTEXT_FLAGS', '_VARENUM', '_TASK_ENUM_FLAGS', '_BG_JOB_TYPE', 'PPROCESS_MITIGATION_POLICY', 'VARENUM', 'INTERNAL_IF_OPER_STATUS', 'TYPEKIND', 'NET_FW_ACTION', 'SC_ENUM_TYPE', '_WELL_KNOWN_SID_TYPE', 'TASK_ENUM_FLAGS', '_MEMORY_INFORMATION_CLASS', 'tag_WBEM_QUERY_FLAG_TYPE', 'UDP_TABLE_CLASS', '_KEY_VALUE_INFORMATION_CLASS', 'TASK_INSTANCES_POLICY', 'tag_WBEM_GENUS_TYPE', 'PROCESS_MITIGATION_POLICY', '_SECURITY_IMPERSONATION_LEVEL', '_EVT_QUERY_FLAGS', '_PROCESS_MITIGATION_POLICY', '_SC_STATUS_TYPE', 'PFILE_INFORMATION_CLASS', '_UDP_TABLE_CLASS', '_BG_JOB_PRIORITY', 'EVT_CHANNEL_TYPE', '_PROCESSINFOCLASS', '_EVT_CHANNEL_CONFIG_PROPERTY_ID', '_EVT_LOG_PROPERTY_ID', '_BG_ERROR_CONTEXT', 'PTOKEN_INFORMATION_CLASS', 'WBEM_CONNECT_OPTIONS', 'ACL_INFORMATION_CLASS', 'PALPC_MESSAGE_INFORMATION_CLASS', 'PTOKEN_TYPE', 'TASK_RUNLEVEL_TYPE', 'SECURITY_IMPERSONATION_LEVEL', '_EVT_OPEN_LOG_FLAGS', 'BG_JOB_PRIORITY', 'CALLFRAME_COPY', 'SYM_TYPE', 'TOKEN_INFORMATION_CLASS', 'tagTOKEN_TYPE', 'TASK_LOGON_TYPE', 'COINIT', 'tag_WBEM_TIMEOUT_TYPE', 'THREAD_INFORMATION_CLASS', '_TASK_LOGON_TYPE', 'NET_FW_RULE_DIRECTION', 'EVT_OPEN_LOG_FLAGS', '_EVT_CHANNEL_TYPE', 'CALLFRAME_WALK', '_TOKEN_INFORMATION_CLASS', 'tagTYPEKIND', 'tagCOINIT', 'TASK_ACTION_TYPE', 'TASK_RUN_FLAGS', 'NET_FW_PROFILE_TYPE2_', '_FS_INFORMATION_CLASS', 'EVT_QUERY_FLAGS', '_EVT_VARIANT_TYPE', 'WBEM_QUERY_FLAG_TYPE', 'OBJECT_INFORMATION_CLASS', 'ALPC_MESSAGE_INFORMATION_CLASS', '_ALPC_MESSAGE_INFORMATION_CLASS', 'MIB_TCP_STATE', '_TCP_TABLE_CLASS', 'PROCESSINFOCLASS', 'SECTION_INHERIT', '_SYSTEM_INFORMATION_CLASS', 'SYSTEM_INFORMATION_CLASS', 'EVT_CHANNEL_CONFIG_PROPERTY_ID', 'tag_WBEMSTATUS', 'RTL_PATH_TYPE', 'EVT_FORMAT_MESSAGE_FLAGS', '_TOKEN_ELEVATION_TYPE', 'PSECURITY_IMPERSONATION_LEVEL', 'SID_NAME_USE', '_TASK_CREATION', 'EVT_CHANNEL_ISOLATION_TYPE', '_EVT_PUBLISHER_METADATA_PROPERTY_ID', 'KEY_VALUE_INFORMATION_CLASS', 'MEMORY_INFORMATION_CLASS', '_TASK_COMPATIBILITY', '_SID_NAME_USE', 'WBEM_GENERIC_FLAG_TYPE', 'NET_FW_MODIFY_STATE', 'NET_FW_RULE_DIRECTION_', '_EVT_RENDER_CONTEXT_FLAGS', 'BG_JOB_STATE', 'MSHLFLAGS', 'EVT_VARIANT_TYPE', 'WBEM_TIMEOUT_TYPE', 'FILE_INFORMATION_CLASS', '_FILE_INFORMATION_CLASS', 'NET_FW_ACTION_', 'tagCALLFRAME_WALK', '_EVT_FORMAT_MESSAGE_FLAGS', '_BG_JOB_STATE', '_RTL_PATH_TYPE', 'WBEM_CHANGE_FLAG_TYPE', 'TCP_TABLE_CLASS', 'IMAGEHLP_SYMBOL_TYPE_INFO', 'TOKEN_ELEVATION_TYPE', '_EVT_CHANNEL_ISOLATION_TYPE', 'NET_FW_MODIFY_STATE_', '_TASK_STATE', 'WBEMSTATUS', 'EVT_RENDER_FLAGS', '_OBJECT_INFORMATION_CLASS', 'WELL_KNOWN_SID_TYPE', '_EVT_EVENT_METADATA_PROPERTY_ID', 'TOKEN_TYPE', 'NET_FW_IP_PROTOCOL', '_SECTION_INHERIT', 'EVT_PUBLISHER_METADATA_PROPERTY_ID'])
functions = set(['EvtGetChannelConfigProperty', 'NtQuerySymbolicLinkObject', 'GetStdHandle', 'GetDriveTypeW', 'GetFileVersionInfoSizeA', 'CertFreeCertificateContext', 'GetModuleBaseNameA', 'ConvertSidToStringSidW', 'GetComputerNameA', 'CryptAcquireCertificatePrivateKey', 'AddVectoredContinueHandler', 'GetDriveTypeA', 'CryptEncrypt', 'DebugBreak', 'EvtOpenLog', 'GetProcessDEPPolicy', 'GetProcessMitigationPolicy', 'ConvertSidToStringSidA', 'GetExtendedTcpTable', 'CreatePipe', 'CreateWellKnownSid', 'GetSecurityDescriptorGroup', 'NtWow64ReadVirtualMemory64', 'DebugActiveProcessStop', 'SymInitialize', 'RegOpenKeyExW', 'SymLoadModuleEx', 'LoadLibraryExW', 'CryptMsgGetParam', 'NtOpenSymbolicLinkObject', 'RegOpenKeyExA', 'QueryWorkingSetEx', 'CertEnumCTLsInStore', 'GetFirmwareEnvironmentVariableExW', 'CryptCATAdminCalcHashFromFileHandle2', 'CertCreateCertificateContext', 'EvtOpenEventMetadataEnum', 'CloseEventLog', 'CryptMsgUpdate', 'GetSecurityDescriptorDacl', 'NtAlpcDisconnectPort', 'WriteFile', 'GetCursorPos', 'WaitForSingleObject', 'EvtOpenPublisherMetadata', 'CoGetInterceptor', 'EvtRender', 'NtAlpcConnectPortEx', 'ConnectNamedPipe', 'GetExitCodeProcess', 'GetProcessImageFileNameW', 'CoInitializeSecurity', 'EnumClipboardFormats', 'AllocConsole', 'GetProcessImageFileNameA', 'NtQueryInformationFile', 'OpenClipboard', 'CryptDestroyKey', 'PeekNamedPipe', 'LookupPrivilegeValueA', 'EvtFormatMessage', 'NtAlpcDeleteSecurityContext', 'OpenServiceA', 'WriteProcessMemory', 'NtSetContextThread', 'FindNextVolumeA', 'RegQueryValueExA', 'CryptCATCatalogInfoFromContext', 'OpenServiceW', 'NtSetValueKey', 'LookupPrivilegeValueW', 'CreateTransaction', 'GetSecurityInfo', 'CryptEncodeObjectEx', 'FindNextVolumeW', 'RegQueryValueExW', 'NtQueryVirtualMemory', 'VerQueryValueA', 'UpdateProcThreadAttribute', 'CryptCATGetAttrInfo', 'GetSidSubAuthority', 'ConvertStringSecurityDescriptorToSecurityDescriptorW', 'LoadLibraryExA', 'SetConsoleCtrlHandler', 'NtAlpcCreateSectionView', 'ConvertStringSecurityDescriptorToSecurityDescriptorA', 'InitializeProcThreadAttributeList', 'GetModuleBaseNameW', 'NtQuerySystemInformation', 'NtEnumerateValueKey', 'GetSidSubAuthorityCount', 'CreateThread', 'CreateProcessAsUserA', 'GetExitCodeThread', 'SetNamedPipeHandleState', 'SetTcpEntry', 'CloseWindow', 'CoCreateInstanceEx', 'CreateProcessAsUserW', 'NtProtectVirtualMemory', 'SetPriorityClass', 'CoInitializeEx', 'EvtOpenPublisherEnum', 'SHGetPathFromIDListA', 'CryptCATGetMemberInfo', 'CryptExportKey', 'VirtualQuery', 'DebugActiveProcess', 'EvtGetPublisherMetadataProperty', 'CryptCATAdminAcquireContext', 'EqualSid', 'FindFirstVolumeW', 'LdrLoadDll', 'SetThreadToken', 'GetCurrentThreadId', 'GetExtendedUdpTable', 'SleepEx', 'CryptUIDlgViewContext', 'GetSecurityDescriptorRMControl', 'CreateToolhelp32Snapshot', 'RtlInitUnicodeString', 'GetFirmwareEnvironmentVariableExA', 'RegCloseKey', 'NtOpenEvent', 'NtSetInformationFile', 'GetVersionExW', 'OpenEventLogA', 'GetNumberOfEventLogRecords', 'CommitTransaction', 'GetVersionExA', 'GetSecurityDescriptorLength', 'CreateRemoteThread', 'NtAlpcSendWaitReceivePort', 'OpenProcessToken', 'SymLoadModuleExW', 'OpenProcess', 'EnumChildWindows', 'EvtGetLogInfo', 'GetProcAddress', 'CryptSignMessage', 'GlobalUnlock', 'GetComputerNameW', 'RtlAnsiStringToUnicodeString', 'SetWindowTextA', 'SetThreadContext', 'CertCompareCertificate', 'Wow64GetThreadContext', 'DebugBreakProcess', 'SymSetOptions', 'OpenEventLogW', 'SetWindowTextW', 'GetClipboardFormatNameA', 'FreeConsole', 'CertEnumCertificateContextProperties', 'BringWindowToTop', 'Process32First', 'lstrcmpW', 'GetAce', 'NtGetContextThread', 'NtQueryLicenseValue', 'StartServiceW', 'GetDesktopWindow', 'CryptCATAdminEnumCatalogFromHash', 'QueryDosDeviceA', 'RemoveVectoredExceptionHandler', 'EmptyClipboard', 'EvtOpenChannelEnum', 'EvtNextChannelPath', 'VirtualAllocEx', 'NtCreateProcessEx', 'NtAlpcCreateSecurityContext', 'FindWindowW', 'EvtClose', 'GetInterfaceInfo', 'FindWindowA', 'GlobalAlloc', 'NtOpenDirectoryObject', 'GetWindowThreadProcessId', 'CryptCATAdminReleaseCatalogContext', 'SizeofResource', 'Wow64DisableWow64FsRedirection', 'VirtualFreeEx', 'MoveWindow', 'ContinueDebugEvent', 'GetSecurityDescriptorOwner', 'CloseClipboard', 'CertDuplicateCTLContext', 'SetThreadAffinityMask', 'NtQueryObject', 'NtFreeVirtualMemory', 'ApiSetResolveToHost', 'CryptVerifyMessageSignatureWithKey', 'CryptCATEnumerateCatAttr', 'DuplicateTokenEx', 'GetWindowModuleFileNameA', 'CryptVerifyMessageHash', 'EnumServicesStatusExW', 'NtSetInformationProcess', 'GetModuleHandleA', 'NtUnmapViewOfSection', 'GetWindowModuleFileNameW', 'Process32NextW', 'Wow64RevertWow64FsRedirection', 'ConvertStringSidToSidW', 'GlobalLock', 'GetModuleHandleW', 'EnumServicesStatusExA', 'TpCallbackSendAlpcMessageOnCompletion', 'DeleteProcThreadAttributeList', 'GetFileVersionInfoA', 'CreateProcessA', 'NtOpenKey', 'NtAlpcCreatePort', 'OpenSCManagerW', 'ObjectFromLresult', 'VirtualFree', 'GetFileVersionInfoW', 'OpenSCManagerA', 'NtQueryInformationProcess', 'EvtGetEventMetadataProperty', 'NtOpenSection', 'GetForegroundWindow', 'CopySid', 'CertAddCertificateContextToStore', 'GetFileVersionInfoSizeW', 'SetWindowPos', 'GetProcessId', 'EvtQuery', 'GetCurrentProcess', 'NtReadVirtualMemory', 'SymLoadModuleExA', 'MessageBoxA', 'WindowFromPoint', 'GetThreadContext', 'GetTokenInformation', 'SetStdHandle', 'PFXImportCertStore', 'NtCreateKey', 'WaitForDebugEvent', 'Wow64EnableWow64FsRedirection', 'CryptQueryObject', 'OpenThreadToken', 'GetSecurityDescriptorSacl', 'GetProcessMemoryInfo', 'CryptCATEnumerateMember', 'NtCreateFile', 'LoadLibraryW', 'Thread32First', 'CryptMsgOpenToEncode', 'OpenTransaction', 'CertDuplicateCertificateContext', 'LocalFree', 'CryptMsgVerifyCountersignatureEncodedEx', 'ResumeThread', 'GetMappedFileNameW', 'GetLogicalDriveStringsA', 'LoadResource', 'GetWindowTextW', 'GetLogicalDriveStringsW', 'QueryDosDeviceW', 'NtAlpcQueryInformationMessage', 'CryptCATAdminAcquireContext2', 'GetWindowTextA', 'GetCurrentThread', 'GetEventLogInformation', 'CryptVerifyMessageSignature', 'CertEnumCertificatesInStore', 'CryptMsgVerifyCountersignatureEncoded', 'OpenThread', 'GetClassInfoExW', 'CertStrToNameW', 'LookupPrivilegeNameW', 'GetModuleFileNameW', 'EnumWindows', 'RtlGetCompressionWorkSpaceSize', 'AddVectoredExceptionHandler', 'CertStrToNameA', 'GetClassInfoExA', 'ExitProcess', 'CryptCATEnumerateAttr', 'CertOpenStore', 'GetModuleFileNameA', 'GetAclInformation', 'LoadLibraryA', 'SetProcessMitigationPolicy', 'CloseServiceHandle', 'GetVolumeInformationA', 'NtQueryEaFile', 'GetMappedFileNameA', 'NtAlpcConnectPort', 'GetVolumeInformationW', 'Process32Next', 'MapViewOfFile', 'CryptGenKey', 'RtlDecompressBuffer', 'NtWriteVirtualMemory', 'GetVolumeNameForVolumeMountPointA', 'GetFirmwareEnvironmentVariableW', 'SetClipboardData', 'DebugSetProcessKillOnExit', 'NtSetEaFile', 'GetFirmwareEnvironmentVariableA', 'SymGetModuleInfo64', 'GetIpAddrTable', 'GetVolumeNameForVolumeMountPointW', 'ExitThread', 'CryptDecrypt', 'CryptDecodeObject', 'EvtGetObjectArraySize', 'ReadEventLogA', 'RtlGetUnloadEventTraceEx', 'TerminateProcess', 'EvtOpenChannelConfig', 'NtAlpcCreatePortSection', 'ConvertSecurityDescriptorToStringSecurityDescriptorW', 'GetWindowRect', 'NtAlpcCreateResourceReserve', 'WinVerifyTrust', 'VirtualQueryEx', 'ReadEventLogW', 'SymGetTypeInfo', 'CryptCATAdminReleaseContext', 'GetClipboardFormatNameW', 'GetVersion', 'NtEnumerateSystemEnvironmentValuesEx', 'CryptHashCertificate', 'ConvertSecurityDescriptorToStringSecurityDescriptorA', 'StartServiceA', 'LookupAccountSidW', 'CertCreateSelfSignCertificate', 'TerminateThread', 'OpenBackupEventLogW', 'DeviceIoControl', 'AlpcInitializeMessageAttribute', 'FreeLibrary', 'GetLengthSid', 'LookupAccountSidA', 'Wow64SetThreadContext', 'VirtualProtect', 'DuplicateToken', 'OpenBackupEventLogA', 'NtAlpcAcceptConnectPort', 'GetParent', 'GetSystemMetrics', 'NtCreateThreadEx', 'GetWindowsDirectoryW', 'QueryWorkingSet', 'EvtCreateRenderContext', 'PfnCryptGetSignerCertificate', 'SHGetPathFromIDListW', 'CryptDecryptMessage', 'GetWindowsDirectoryA', 'NtAllocateVirtualMemory', 'VerQueryValueW', 'NtAlpcDeleteResourceReserve', 'StrStrIW', 'VirtualProtectEx', 'CreateFileMappingW', 'StrStrIA', 'CryptAcquireContextA', 'GetSidIdentifierAuthority', 'ReadProcessMemory', 'CreateFileTransactedA', 'NtQueryInformationThread', 'CertFindCertificateInStore', 'CryptMsgClose', 'CreateFileMappingA', 'Thread32Next', 'DuplicateHandle', 'CreateFileTransactedW', 'RtlDosPathNameToNtPathName_U', 'GetLongPathNameW', 'CertGetCertificateContextProperty', 'NtQueryDirectoryObject', 'GetProductInfo', 'NtCreateSection', 'NtAlpcQueryInformation', 'CryptMsgOpenToDecode', 'FindFirstVolumeA', 'FreeSid', 'GetSidLengthRequired', 'CryptMsgControl', 'CreateFileW', 'FindResourceA', 'CryptSignAndEncryptMessage', 'ShellExecuteW', 'NtWow64WriteVirtualMemory64', 'EvtNextPublisherId', 'CreateFileA', 'EvtNextEventMetadata', 'ShellExecuteA', 'IsValidSecurityDescriptor', 'NtQueryValueKey', 'GetLastError', 'RtlDecompressBufferEx', 'GetShortPathNameW', 'CreateNamedPipeW', 'GetSecurityDescriptorControl', 'GlobalFree', 'FindResourceW', 'AlpcGetMessageAttribute', 'AdjustTokenPrivileges', 'CryptReleaseContext', 'GetProcessTimes', 'GetIfTable', 'CryptEncryptMessage', 'SymFromName', 'CreateNamedPipeA', 'GetShortPathNameA', 'NtMapViewOfSection', 'GetNamedSecurityInfoW', 'MessageBoxW', 'NtAlpcDeletePortSection', 'GetCurrentProcessorNumber', 'RegGetValueW', 'LockResource', 'ConvertStringSidToSidA', 'GetLongPathNameA', 'SetTokenInformation', 'GetNamedSecurityInfoA', 'NtAlpcDeleteSectionView', 'CertGetCertificateChain', 'RegGetValueA', 'EvtGetObjectArrayProperty', 'Process32FirstW', 'SuspendThread', 'CoCreateInstance', 'RtlEqualUnicodeString', 'RealGetWindowClassW', 'lstrcmpA', 'RollbackTransaction', 'NtQueryVolumeInformationFile', 'NtQueryDirectoryFile', 'ReadFile', 'CryptCATAdminCalcHashFromFileHandle', 'GetClipboardData', 'CloseHandle', 'LookupPrivilegeNameA', 'CreateProcessW', 'GetPriorityClass', 'PFXExportCertStoreEx', 'RealGetWindowClassA', 'CryptAcquireContextW', 'CertFreeCTLContext', 'CertGetNameStringA', 'GetClassNameW', 'SymFromAddr', 'GetThreadId', 'CLSIDFromProgID', 'OpenEventW', 'NtAlpcRevokeSecurityContext', 'IsOS', 'GetClassNameA', 'Sleep', 'CertGetNameStringW', 'EvtNext', 'ZwDuplicateObject', 'RtlInitString', 'OpenEventA', 'VirtualAlloc'])
interfaces = set(['IUnknown', 'ITaskService', 'INetFwPolicy2', 'IRunningTask', 'IRunningTaskCollection', 'IActionCollection', 'IShellLinkW', 'IBackgroundCopyJob', 'INetFwRules', 'IAction', 'IWbemContext', 'INetFwServiceRestriction', 'IShellLinkA', 'ICallFrame', 'IWbemQualifierSet', 'ICallInterceptor', 'IWbemObjectSink', 'IBackgroundCopyManager', 'IDispatch', 'ICallFrameEvents', 'IPrincipal', 'ITaskSettings', 'IWbemLocator', 'IComHandlerAction', 'IExecAction', 'IBackgroundCopyFile', 'IEnumWbemClassObject', 'ITaskFolder', 'ICallFrameWalker', 'ITaskNamedValueCollection', 'IRegistrationInfo', 'IWbemServices', 'IIdleSettings', 'IEnumBackgroundCopyFiles', 'IEnumBackgroundCopyJobs', 'ITaskFolderCollection', 'IBackgroundCopyError', 'IRepetitionPattern', 'INetworkSettings', 'ITrigger', 'IWbemClassObject', 'IShowMessageAction', 'INetFwRule', 'ITaskNamedValuePair', 'ITaskDefinition', 'IPersistFile', 'IRegisteredTask', 'IEnumVARIANT', 'IRegisteredTaskCollection', 'IWbemCallResult', 'IEmailAction', 'IBackgroundCopyCallback', 'ITriggerCollection'])
def generate_walker(namelist, target_module):
def my_walker():
for name in namelist:
yield name, getattr(target_module, name)
return my_walker
import windef as windef_module
windef_walker = generate_walker(windef, windef_module)
import winstructs as winstructs_module
structs_walker = generate_walker(structs, winstructs_module)
import winstructs as winstructs_module
enums_walker = generate_walker(enums, winstructs_module)
import winfuncs as winfuncs_module
functions_walker = generate_walker(functions, winfuncs_module)
import interfaces as interfaces_module
interfaces_walker = generate_walker(interfaces, interfaces_module)
| 7,544.304348 | 124,672 | 0.855399 |
1df8a39d1f8c408d3f53c69ae3ff79ade2548192 | 1,057 | py | Python | duckql/functions/convert_timezone.py | Sibyx/duckql-python | 7780f145d643e6884178f146218551731984eea3 | [
"MIT"
] | 4 | 2020-04-15T09:35:15.000Z | 2021-11-11T13:03:46.000Z | duckql/functions/convert_timezone.py | Sibyx/duckql-python | 7780f145d643e6884178f146218551731984eea3 | [
"MIT"
] | 2 | 2020-04-08T12:10:56.000Z | 2020-04-15T09:14:44.000Z | duckql/functions/convert_timezone.py | Sibyx/duckql-python | 7780f145d643e6884178f146218551731984eea3 | [
"MIT"
] | 1 | 2020-04-15T09:11:39.000Z | 2020-04-15T09:11:39.000Z | from typing import Union
try:
from typing import Literal
except ImportError:
from typing_extensions import Literal
from ..functions.base import BaseFunction
from ..properties.constant import Constant
from ..properties.property import Property
from ..structures.cast_operator import CastOperator
from ..structures.case import Case
class ConvertTimezone(BaseFunction):
obj: Literal['functions.ConvertTimezone'] = 'functions.ConvertTimezone'
property: Union[Property, BaseFunction, Constant, CastOperator, Case]
date_from: Constant
date_to: Constant
alias: str = None
def to_sql(self) -> str:
"""
TODO: not supported in PostgreSQL
https://dev.mysql.com/doc/refman/8.0/en/date-and-time-functions.html#function_convert-tz MySQL
https://mariadb.com/kb/en/library/convert_tz/ MariaDB
:return: str
"""
sql = f"CONVERT_TZ({self.property}, {self.date_from}, {self.date_to})"
if self.alias is not None:
sql = f"{sql} AS {self.alias}"
return sql
| 30.2 | 102 | 0.698202 |
259a8b898575aecbeca4f2c355d5bbd6a72d6d56 | 1,298 | py | Python | test/test_ec2_configuration.py | httpsgithu/python-client | f85a530367cdabe458a11919ad35609b9bc0606b | [
"Apache-2.0"
] | 11 | 2016-05-30T17:16:45.000Z | 2021-06-11T19:32:59.000Z | test/test_ec2_configuration.py | httpsgithu/python-client | f85a530367cdabe458a11919ad35609b9bc0606b | [
"Apache-2.0"
] | 25 | 2016-05-02T23:05:19.000Z | 2020-11-18T22:43:20.000Z | test/test_ec2_configuration.py | httpsgithu/python-client | f85a530367cdabe458a11919ad35609b9bc0606b | [
"Apache-2.0"
] | 30 | 2016-04-29T17:17:11.000Z | 2022-02-11T04:58:37.000Z | # coding: utf-8
"""
Wavefront REST API
<p>The Wavefront REST API enables you to interact with Wavefront servers using standard REST API tools. You can use the REST API to automate commonly executed operations such as automatically tagging sources.</p><p>When you make REST API calls outside the Wavefront REST API documentation you must add the header \"Authorization: Bearer <<API-TOKEN>>\" to your HTTP requests.</p> # noqa: E501
OpenAPI spec version: v2
Contact: chitimba@wavefront.com
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import unittest
import wavefront_api_client
from wavefront_api_client.models.ec2_configuration import EC2Configuration # noqa: E501
from wavefront_api_client.rest import ApiException
class TestEC2Configuration(unittest.TestCase):
"""EC2Configuration unit test stubs"""
def setUp(self):
pass
def tearDown(self):
pass
def testEC2Configuration(self):
"""Test EC2Configuration"""
# FIXME: construct object with mandatory attributes with example values
# model = wavefront_api_client.models.ec2_configuration.EC2Configuration() # noqa: E501
pass
if __name__ == '__main__':
unittest.main()
| 31.658537 | 409 | 0.738829 |
604cce555e29403e3e88e3d278df919e0167ba99 | 5,438 | py | Python | vurf/cli.py | ViliamV/vurf | 2d56471366c6ed3e69f951cd5415e304d9865c7d | [
"MIT"
] | 1 | 2021-12-28T17:50:51.000Z | 2021-12-28T17:50:51.000Z | vurf/cli.py | ViliamV/vurf | 2d56471366c6ed3e69f951cd5415e304d9865c7d | [
"MIT"
] | null | null | null | vurf/cli.py | ViliamV/vurf | 2d56471366c6ed3e69f951cd5415e304d9865c7d | [
"MIT"
] | null | null | null | import sys
from functools import wraps
from types import SimpleNamespace
from typing import Callable, Iterable, Optional
import click
from vurf.constants import APP_NAME, CONFIG_NAME
from vurf.lib import ensure_config, expand_path
from vurf.nodes import Root
from vurf.parser import parse
from vurf.types import Config
SECTION_ENV = f"{APP_NAME}_SECTION"
class HintedObject(SimpleNamespace):
config: Config
root: Root
quit: bool
class HintedContext(SimpleNamespace):
obj: HintedObject
defaul_section_option = click.option(
"-s",
"--section",
required=False,
envvar=SECTION_ENV,
help=f"Defaults to `default_section` from config. Reads {SECTION_ENV} env variable.",
)
all_sections_option = click.option(
"-s",
"--section",
required=False,
envvar=SECTION_ENV,
help=f"Defaults to all sections. Reads {SECTION_ENV} env variable.",
)
separator_option = click.option(
"--separator",
required=False,
type=str,
default="\n",
help='Section separator. Defaults to "\\n".',
)
def write_packages(ctx: HintedContext):
with expand_path(ctx.obj.config.packages_location).open("w") as f:
f.write(ctx.obj.root.to_string())
def no_traceback(f: Callable) -> Callable:
@wraps(f)
def wrapper(*args, **kwds):
try:
return f(*args, **kwds)
except Exception as e:
args = " ".join(map(str, e.args))
sys.stderr.write(f"Error: {args}\n")
sys.exit(1)
return wrapper
@click.group()
@click.pass_context
@click.option("-q", "--quiet", is_flag=True, help="Don't produce unnecessary output.")
@click.version_option(package_name=APP_NAME.lower())
@no_traceback
def main(ctx, quiet):
config = ensure_config(quiet)
with expand_path(config.packages_location).open() as f:
root = parse(f)
ctx.obj = ctx.ensure_object(SimpleNamespace)
ctx.obj.config = config
ctx.obj.root = root
ctx.obj.quiet = quiet
# Root -> get_sections
@main.command(help="Print list of sections.")
@separator_option
@click.pass_context
@no_traceback
def sections(ctx: HintedContext, separator: str):
click.echo(separator.join(ctx.obj.root.get_sections()))
# Root -> has_section
@main.command(help="Exit with indication if section is in sections.")
@click.argument("section")
@click.pass_context
@no_traceback
def has_section(ctx: HintedContext, section: str):
sys.exit(int(not ctx.obj.root.has_section(section)))
# Root -> get_packages
@main.command(help="Print list of packages.")
@all_sections_option
@separator_option
@click.pass_context
@no_traceback
def packages(ctx: HintedContext, section: Optional[str], separator: str):
click.echo(separator.join(ctx.obj.root.get_packages(section, ctx.obj.config.parameters)))
# Root -> has_package
@main.command(help="Exit with indication if package is in packages.")
@all_sections_option
@click.argument("package")
@click.pass_context
@no_traceback
def has(ctx: HintedContext, section: Optional[str], package: str):
sys.exit(int(not ctx.obj.root.has_package(section, package)))
# Root -> get_package_section
@main.command(help="Print the first section that contains the package.")
@click.pass_context
@click.argument("package")
@no_traceback
def package_section(ctx: HintedContext, package: str):
section = ctx.obj.root.get_package_section(package)
if section is None:
sys.exit(1)
click.echo(section)
# Root -> add_package
@main.command(help="Add package(s).")
@defaul_section_option
@click.argument("packages", nargs=-1)
@click.pass_context
@no_traceback
def add(ctx: HintedContext, section: Optional[str], packages: Iterable[str]):
if section is None:
section = ctx.obj.config.default_section
for package in packages:
ctx.obj.root.add_package(section, package)
write_packages(ctx)
# Root -> remove_package
@main.command(help="Remove package(s).")
@defaul_section_option
@click.argument("packages", nargs=-1)
@click.pass_context
@no_traceback
def remove(ctx: HintedContext, section: Optional[str], packages: Iterable[str]):
if section is None:
section = ctx.obj.config.default_section
for package in packages:
ctx.obj.root.remove_package(section, package)
write_packages(ctx)
# Root -> install
@main.command(help="Install packages.")
@all_sections_option
@click.pass_context
@no_traceback
def install(ctx: HintedContext, section: Optional[str]):
click.echo(ctx.obj.root.install(section, ctx.obj.config.sections, ctx.obj.config.parameters))
# Root -> uninstall
@main.command(help="Uninstall packages.")
@all_sections_option
@click.pass_context
@no_traceback
def uninstall(ctx: HintedContext, section: Optional[str]):
click.echo(ctx.obj.root.uninstall(section, ctx.obj.config.sections, ctx.obj.config.parameters))
@main.command(help="Print default section.")
@click.pass_context
@no_traceback
def default(ctx):
click.echo(ctx.obj.config.default_section)
@main.command(help="Format packages file.")
@click.pass_context
@no_traceback
def format(ctx):
write_packages(ctx)
@main.command(help="Edit packages file.")
@click.pass_context
@no_traceback
def edit(ctx):
click.edit(filename=str(expand_path(ctx.obj.config.packages_location)))
@main.command(help="Edit config file.")
@no_traceback
def config():
click.edit(filename=f"{click.get_app_dir(APP_NAME)}/{CONFIG_NAME}")
if __name__ == "__main__":
main()
| 25.895238 | 99 | 0.726186 |
12c6e9f11fc88f9ba9590eb1b332fba8e9332cc0 | 1,322 | py | Python | test/plugins/test_rope_rename.py | borisbarath/python-language-server | b03f2d791e0ed66d5f1cc544869c0383cfdc3d9d | [
"MIT"
] | null | null | null | test/plugins/test_rope_rename.py | borisbarath/python-language-server | b03f2d791e0ed66d5f1cc544869c0383cfdc3d9d | [
"MIT"
] | null | null | null | test/plugins/test_rope_rename.py | borisbarath/python-language-server | b03f2d791e0ed66d5f1cc544869c0383cfdc3d9d | [
"MIT"
] | null | null | null | import os
import pytest
from pyls import uris
from pyls.plugins.rope_rename import pyls_rename
from pyls.workspace import Document
DOC_NAME = "test1.py"
DOC = """class Test1():
pass
class Test2(Test1):
pass
"""
@pytest.fixture
def tmp_workspace(workspace):
def create_file(name, content):
fn = os.path.join(workspace.root_path, name)
with open(fn, "w") as f:
f.write(content)
workspace.put_document(uris.from_fs_path(fn), content)
create_file(DOC_NAME, DOC)
return workspace
def test_rope_rename(tmp_workspace, config): # pylint: disable=redefined-outer-name
position = {"line": 0, "character": 6}
DOC_URI = uris.from_fs_path(os.path.join(tmp_workspace.root_path, DOC_NAME))
doc = Document(DOC_URI, tmp_workspace)
result = pyls_rename(config, tmp_workspace, doc, position, "ShouldBeRenamed")
assert len(result.keys()) == 1
changes = result.get("documentChanges")
assert len(changes) == 1
changes = changes[0]
assert changes.get("edits") == [
{
"range": {
"start": {"line": 0, "character": 0},
"end": {"line": 5, "character": 0},
},
"newText": "class ShouldBeRenamed():\n pass\n\nclass Test2(ShouldBeRenamed):\n pass\n",
}
]
| 26.44 | 105 | 0.627837 |
74c5c5fa64de2ffbc884d445ed770b4f2a61985c | 20,942 | py | Python | ppcls/arch/backbone/legendary_models/resnet.py | zhusonghe/PaddleClas-1 | e2e492f9c78ed5084cc50d7c45eef4cc41e1eeaf | [
"Apache-2.0"
] | 150 | 2021-11-02T05:31:51.000Z | 2022-03-24T06:22:22.000Z | ppcls/arch/backbone/legendary_models/resnet.py | zhusonghe/PaddleClas-1 | e2e492f9c78ed5084cc50d7c45eef4cc41e1eeaf | [
"Apache-2.0"
] | 4 | 2021-12-01T11:55:58.000Z | 2022-02-24T16:14:37.000Z | ppcls/arch/backbone/legendary_models/resnet.py | zhusonghe/PaddleClas-1 | e2e492f9c78ed5084cc50d7c45eef4cc41e1eeaf | [
"Apache-2.0"
] | 33 | 2021-11-03T00:29:41.000Z | 2022-03-15T13:15:56.000Z | # copyright (c) 2021 PaddlePaddle Authors. All Rights Reserve.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import, division, print_function
import numpy as np
import paddle
from paddle import ParamAttr
import paddle.nn as nn
from paddle.nn import Conv2D, BatchNorm, Linear
from paddle.nn import AdaptiveAvgPool2D, MaxPool2D, AvgPool2D
from paddle.nn.initializer import Uniform
import math
from ppcls.arch.backbone.base.theseus_layer import TheseusLayer
from ppcls.utils.save_load import load_dygraph_pretrain, load_dygraph_pretrain_from_url
MODEL_URLS = {
"ResNet18":
"https://paddle-imagenet-models-name.bj.bcebos.com/dygraph/legendary_models/ResNet18_pretrained.pdparams",
"ResNet18_vd":
"https://paddle-imagenet-models-name.bj.bcebos.com/dygraph/legendary_models/ResNet18_vd_pretrained.pdparams",
"ResNet34":
"https://paddle-imagenet-models-name.bj.bcebos.com/dygraph/legendary_models/ResNet34_pretrained.pdparams",
"ResNet34_vd":
"https://paddle-imagenet-models-name.bj.bcebos.com/dygraph/legendary_models/ResNet34_vd_pretrained.pdparams",
"ResNet50":
"https://paddle-imagenet-models-name.bj.bcebos.com/dygraph/legendary_models/ResNet50_pretrained.pdparams",
"ResNet50_vd":
"https://paddle-imagenet-models-name.bj.bcebos.com/dygraph/legendary_models/ResNet50_vd_pretrained.pdparams",
"ResNet101":
"https://paddle-imagenet-models-name.bj.bcebos.com/dygraph/legendary_models/ResNet101_pretrained.pdparams",
"ResNet101_vd":
"https://paddle-imagenet-models-name.bj.bcebos.com/dygraph/legendary_models/ResNet101_vd_pretrained.pdparams",
"ResNet152":
"https://paddle-imagenet-models-name.bj.bcebos.com/dygraph/legendary_models/ResNet152_pretrained.pdparams",
"ResNet152_vd":
"https://paddle-imagenet-models-name.bj.bcebos.com/dygraph/legendary_models/ResNet152_vd_pretrained.pdparams",
"ResNet200_vd":
"https://paddle-imagenet-models-name.bj.bcebos.com/dygraph/legendary_models/ResNet200_vd_pretrained.pdparams",
}
MODEL_STAGES_PATTERN = {
"ResNet18": ["blocks[1]", "blocks[3]", "blocks[5]", "blocks[7]"],
"ResNet34": ["blocks[2]", "blocks[6]", "blocks[12]", "blocks[15]"],
"ResNet50": ["blocks[2]", "blocks[6]", "blocks[12]", "blocks[15]"],
"ResNet101": ["blocks[2]", "blocks[6]", "blocks[29]", "blocks[32]"],
"ResNet152": ["blocks[2]", "blocks[10]", "blocks[46]", "blocks[49]"],
"ResNet200": ["blocks[2]", "blocks[14]", "blocks[62]", "blocks[65]"]
}
__all__ = MODEL_URLS.keys()
'''
ResNet config: dict.
key: depth of ResNet.
values: config's dict of specific model.
keys:
block_type: Two different blocks in ResNet, BasicBlock and BottleneckBlock are optional.
block_depth: The number of blocks in different stages in ResNet.
num_channels: The number of channels to enter the next stage.
'''
NET_CONFIG = {
"18": {
"block_type": "BasicBlock",
"block_depth": [2, 2, 2, 2],
"num_channels": [64, 64, 128, 256]
},
"34": {
"block_type": "BasicBlock",
"block_depth": [3, 4, 6, 3],
"num_channels": [64, 64, 128, 256]
},
"50": {
"block_type": "BottleneckBlock",
"block_depth": [3, 4, 6, 3],
"num_channels": [64, 256, 512, 1024]
},
"101": {
"block_type": "BottleneckBlock",
"block_depth": [3, 4, 23, 3],
"num_channels": [64, 256, 512, 1024]
},
"152": {
"block_type": "BottleneckBlock",
"block_depth": [3, 8, 36, 3],
"num_channels": [64, 256, 512, 1024]
},
"200": {
"block_type": "BottleneckBlock",
"block_depth": [3, 12, 48, 3],
"num_channels": [64, 256, 512, 1024]
},
}
class ConvBNLayer(TheseusLayer):
def __init__(self,
num_channels,
num_filters,
filter_size,
stride=1,
groups=1,
is_vd_mode=False,
act=None,
lr_mult=1.0,
data_format="NCHW"):
super().__init__()
self.is_vd_mode = is_vd_mode
self.act = act
self.avg_pool = AvgPool2D(
kernel_size=2, stride=2, padding=0, ceil_mode=True)
self.conv = Conv2D(
in_channels=num_channels,
out_channels=num_filters,
kernel_size=filter_size,
stride=stride,
padding=(filter_size - 1) // 2,
groups=groups,
weight_attr=ParamAttr(learning_rate=lr_mult),
bias_attr=False,
data_format=data_format)
self.bn = BatchNorm(
num_filters,
param_attr=ParamAttr(learning_rate=lr_mult),
bias_attr=ParamAttr(learning_rate=lr_mult),
data_layout=data_format)
self.relu = nn.ReLU()
def forward(self, x):
if self.is_vd_mode:
x = self.avg_pool(x)
x = self.conv(x)
x = self.bn(x)
if self.act:
x = self.relu(x)
return x
class BottleneckBlock(TheseusLayer):
def __init__(self,
num_channels,
num_filters,
stride,
shortcut=True,
if_first=False,
lr_mult=1.0,
data_format="NCHW"):
super().__init__()
self.conv0 = ConvBNLayer(
num_channels=num_channels,
num_filters=num_filters,
filter_size=1,
act="relu",
lr_mult=lr_mult,
data_format=data_format)
self.conv1 = ConvBNLayer(
num_channels=num_filters,
num_filters=num_filters,
filter_size=3,
stride=stride,
act="relu",
lr_mult=lr_mult,
data_format=data_format)
self.conv2 = ConvBNLayer(
num_channels=num_filters,
num_filters=num_filters * 4,
filter_size=1,
act=None,
lr_mult=lr_mult,
data_format=data_format)
if not shortcut:
self.short = ConvBNLayer(
num_channels=num_channels,
num_filters=num_filters * 4,
filter_size=1,
stride=stride if if_first else 1,
is_vd_mode=False if if_first else True,
lr_mult=lr_mult,
data_format=data_format)
self.relu = nn.ReLU()
self.shortcut = shortcut
def forward(self, x):
identity = x
x = self.conv0(x)
x = self.conv1(x)
x = self.conv2(x)
if self.shortcut:
short = identity
else:
short = self.short(identity)
x = paddle.add(x=x, y=short)
x = self.relu(x)
return x
class BasicBlock(TheseusLayer):
def __init__(self,
num_channels,
num_filters,
stride,
shortcut=True,
if_first=False,
lr_mult=1.0,
data_format="NCHW"):
super().__init__()
self.stride = stride
self.conv0 = ConvBNLayer(
num_channels=num_channels,
num_filters=num_filters,
filter_size=3,
stride=stride,
act="relu",
lr_mult=lr_mult,
data_format=data_format)
self.conv1 = ConvBNLayer(
num_channels=num_filters,
num_filters=num_filters,
filter_size=3,
act=None,
lr_mult=lr_mult,
data_format=data_format)
if not shortcut:
self.short = ConvBNLayer(
num_channels=num_channels,
num_filters=num_filters,
filter_size=1,
stride=stride if if_first else 1,
is_vd_mode=False if if_first else True,
lr_mult=lr_mult,
data_format=data_format)
self.shortcut = shortcut
self.relu = nn.ReLU()
def forward(self, x):
identity = x
x = self.conv0(x)
x = self.conv1(x)
if self.shortcut:
short = identity
else:
short = self.short(identity)
x = paddle.add(x=x, y=short)
x = self.relu(x)
return x
class ResNet(TheseusLayer):
"""
ResNet
Args:
config: dict. config of ResNet.
version: str="vb". Different version of ResNet, version vd can perform better.
class_num: int=1000. The number of classes.
lr_mult_list: list. Control the learning rate of different stages.
Returns:
model: nn.Layer. Specific ResNet model depends on args.
"""
def __init__(self,
config,
stages_pattern,
version="vb",
class_num=1000,
lr_mult_list=[1.0, 1.0, 1.0, 1.0, 1.0],
data_format="NCHW",
input_image_channel=3,
return_patterns=None,
return_stages=None):
super().__init__()
self.cfg = config
self.lr_mult_list = lr_mult_list
self.is_vd_mode = version == "vd"
self.class_num = class_num
self.num_filters = [64, 128, 256, 512]
self.block_depth = self.cfg["block_depth"]
self.block_type = self.cfg["block_type"]
self.num_channels = self.cfg["num_channels"]
self.channels_mult = 1 if self.num_channels[-1] == 256 else 4
assert isinstance(self.lr_mult_list, (
list, tuple
)), "lr_mult_list should be in (list, tuple) but got {}".format(
type(self.lr_mult_list))
assert len(self.lr_mult_list
) == 5, "lr_mult_list length should be 5 but got {}".format(
len(self.lr_mult_list))
self.stem_cfg = {
#num_channels, num_filters, filter_size, stride
"vb": [[input_image_channel, 64, 7, 2]],
"vd":
[[input_image_channel, 32, 3, 2], [32, 32, 3, 1], [32, 64, 3, 1]]
}
self.stem = nn.Sequential(* [
ConvBNLayer(
num_channels=in_c,
num_filters=out_c,
filter_size=k,
stride=s,
act="relu",
lr_mult=self.lr_mult_list[0],
data_format=data_format)
for in_c, out_c, k, s in self.stem_cfg[version]
])
self.max_pool = MaxPool2D(
kernel_size=3, stride=2, padding=1, data_format=data_format)
block_list = []
for block_idx in range(len(self.block_depth)):
shortcut = False
for i in range(self.block_depth[block_idx]):
block_list.append(globals()[self.block_type](
num_channels=self.num_channels[block_idx] if i == 0 else
self.num_filters[block_idx] * self.channels_mult,
num_filters=self.num_filters[block_idx],
stride=2 if i == 0 and block_idx != 0 else 1,
shortcut=shortcut,
if_first=block_idx == i == 0 if version == "vd" else True,
lr_mult=self.lr_mult_list[block_idx + 1],
data_format=data_format))
shortcut = True
self.blocks = nn.Sequential(*block_list)
self.avg_pool = AdaptiveAvgPool2D(1, data_format=data_format)
self.flatten = nn.Flatten()
self.avg_pool_channels = self.num_channels[-1] * 2
stdv = 1.0 / math.sqrt(self.avg_pool_channels * 1.0)
self.fc = Linear(
self.avg_pool_channels,
self.class_num,
weight_attr=ParamAttr(initializer=Uniform(-stdv, stdv)))
self.data_format = data_format
super().init_res(
stages_pattern,
return_patterns=return_patterns,
return_stages=return_stages)
def forward(self, x):
with paddle.static.amp.fp16_guard():
if self.data_format == "NHWC":
x = paddle.transpose(x, [0, 2, 3, 1])
x.stop_gradient = True
x = self.stem(x)
x = self.max_pool(x)
x = self.blocks(x)
x = self.avg_pool(x)
x = self.flatten(x)
x = self.fc(x)
return x
def _load_pretrained(pretrained, model, model_url, use_ssld):
if pretrained is False:
pass
elif pretrained is True:
load_dygraph_pretrain_from_url(model, model_url, use_ssld=use_ssld)
elif isinstance(pretrained, str):
load_dygraph_pretrain(model, pretrained)
else:
raise RuntimeError(
"pretrained type is not available. Please use `string` or `boolean` type."
)
def ResNet18(pretrained=False, use_ssld=False, **kwargs):
"""
ResNet18
Args:
pretrained: bool=False or str. If `True` load pretrained parameters, `False` otherwise.
If str, means the path of the pretrained model.
use_ssld: bool=False. Whether using distillation pretrained model when pretrained=True.
Returns:
model: nn.Layer. Specific `ResNet18` model depends on args.
"""
model = ResNet(
config=NET_CONFIG["18"],
stages_pattern=MODEL_STAGES_PATTERN["ResNet18"],
version="vb",
**kwargs)
_load_pretrained(pretrained, model, MODEL_URLS["ResNet18"], use_ssld)
return model
def ResNet18_vd(pretrained=False, use_ssld=False, **kwargs):
"""
ResNet18_vd
Args:
pretrained: bool=False or str. If `True` load pretrained parameters, `False` otherwise.
If str, means the path of the pretrained model.
use_ssld: bool=False. Whether using distillation pretrained model when pretrained=True.
Returns:
model: nn.Layer. Specific `ResNet18_vd` model depends on args.
"""
model = ResNet(
config=NET_CONFIG["18"],
stages_pattern=MODEL_STAGES_PATTERN["ResNet18"],
version="vd",
**kwargs)
_load_pretrained(pretrained, model, MODEL_URLS["ResNet18_vd"], use_ssld)
return model
def ResNet34(pretrained=False, use_ssld=False, **kwargs):
"""
ResNet34
Args:
pretrained: bool=False or str. If `True` load pretrained parameters, `False` otherwise.
If str, means the path of the pretrained model.
use_ssld: bool=False. Whether using distillation pretrained model when pretrained=True.
Returns:
model: nn.Layer. Specific `ResNet34` model depends on args.
"""
model = ResNet(
config=NET_CONFIG["34"],
stages_pattern=MODEL_STAGES_PATTERN["ResNet34"],
version="vb",
**kwargs)
_load_pretrained(pretrained, model, MODEL_URLS["ResNet34"], use_ssld)
return model
def ResNet34_vd(pretrained=False, use_ssld=False, **kwargs):
"""
ResNet34_vd
Args:
pretrained: bool=False or str. If `True` load pretrained parameters, `False` otherwise.
If str, means the path of the pretrained model.
use_ssld: bool=False. Whether using distillation pretrained model when pretrained=True.
Returns:
model: nn.Layer. Specific `ResNet34_vd` model depends on args.
"""
model = ResNet(
config=NET_CONFIG["34"],
stages_pattern=MODEL_STAGES_PATTERN["ResNet34"],
version="vd",
**kwargs)
_load_pretrained(pretrained, model, MODEL_URLS["ResNet34_vd"], use_ssld)
return model
def ResNet50(pretrained=False, use_ssld=False, **kwargs):
"""
ResNet50
Args:
pretrained: bool=False or str. If `True` load pretrained parameters, `False` otherwise.
If str, means the path of the pretrained model.
use_ssld: bool=False. Whether using distillation pretrained model when pretrained=True.
Returns:
model: nn.Layer. Specific `ResNet50` model depends on args.
"""
model = ResNet(
config=NET_CONFIG["50"],
stages_pattern=MODEL_STAGES_PATTERN["ResNet50"],
version="vb",
**kwargs)
_load_pretrained(pretrained, model, MODEL_URLS["ResNet50"], use_ssld)
return model
def ResNet50_vd(pretrained=False, use_ssld=False, **kwargs):
"""
ResNet50_vd
Args:
pretrained: bool=False or str. If `True` load pretrained parameters, `False` otherwise.
If str, means the path of the pretrained model.
use_ssld: bool=False. Whether using distillation pretrained model when pretrained=True.
Returns:
model: nn.Layer. Specific `ResNet50_vd` model depends on args.
"""
model = ResNet(
config=NET_CONFIG["50"],
stages_pattern=MODEL_STAGES_PATTERN["ResNet50"],
version="vd",
**kwargs)
_load_pretrained(pretrained, model, MODEL_URLS["ResNet50_vd"], use_ssld)
return model
def ResNet101(pretrained=False, use_ssld=False, **kwargs):
"""
ResNet101
Args:
pretrained: bool=False or str. If `True` load pretrained parameters, `False` otherwise.
If str, means the path of the pretrained model.
use_ssld: bool=False. Whether using distillation pretrained model when pretrained=True.
Returns:
model: nn.Layer. Specific `ResNet101` model depends on args.
"""
model = ResNet(
config=NET_CONFIG["101"],
stages_pattern=MODEL_STAGES_PATTERN["ResNet101"],
version="vb",
**kwargs)
_load_pretrained(pretrained, model, MODEL_URLS["ResNet101"], use_ssld)
return model
def ResNet101_vd(pretrained=False, use_ssld=False, **kwargs):
"""
ResNet101_vd
Args:
pretrained: bool=False or str. If `True` load pretrained parameters, `False` otherwise.
If str, means the path of the pretrained model.
use_ssld: bool=False. Whether using distillation pretrained model when pretrained=True.
Returns:
model: nn.Layer. Specific `ResNet101_vd` model depends on args.
"""
model = ResNet(
config=NET_CONFIG["101"],
stages_pattern=MODEL_STAGES_PATTERN["ResNet101"],
version="vd",
**kwargs)
_load_pretrained(pretrained, model, MODEL_URLS["ResNet101_vd"], use_ssld)
return model
def ResNet152(pretrained=False, use_ssld=False, **kwargs):
"""
ResNet152
Args:
pretrained: bool=False or str. If `True` load pretrained parameters, `False` otherwise.
If str, means the path of the pretrained model.
use_ssld: bool=False. Whether using distillation pretrained model when pretrained=True.
Returns:
model: nn.Layer. Specific `ResNet152` model depends on args.
"""
model = ResNet(
config=NET_CONFIG["152"],
stages_pattern=MODEL_STAGES_PATTERN["ResNet152"],
version="vb",
**kwargs)
_load_pretrained(pretrained, model, MODEL_URLS["ResNet152"], use_ssld)
return model
def ResNet152_vd(pretrained=False, use_ssld=False, **kwargs):
"""
ResNet152_vd
Args:
pretrained: bool=False or str. If `True` load pretrained parameters, `False` otherwise.
If str, means the path of the pretrained model.
use_ssld: bool=False. Whether using distillation pretrained model when pretrained=True.
Returns:
model: nn.Layer. Specific `ResNet152_vd` model depends on args.
"""
model = ResNet(
config=NET_CONFIG["152"],
stages_pattern=MODEL_STAGES_PATTERN["ResNet152"],
version="vd",
**kwargs)
_load_pretrained(pretrained, model, MODEL_URLS["ResNet152_vd"], use_ssld)
return model
def ResNet200_vd(pretrained=False, use_ssld=False, **kwargs):
"""
ResNet200_vd
Args:
pretrained: bool=False or str. If `True` load pretrained parameters, `False` otherwise.
If str, means the path of the pretrained model.
use_ssld: bool=False. Whether using distillation pretrained model when pretrained=True.
Returns:
model: nn.Layer. Specific `ResNet200_vd` model depends on args.
"""
model = ResNet(
config=NET_CONFIG["200"],
stages_pattern=MODEL_STAGES_PATTERN["ResNet200"],
version="vd",
**kwargs)
_load_pretrained(pretrained, model, MODEL_URLS["ResNet200_vd"], use_ssld)
return model
| 35.375 | 114 | 0.605577 |
e8957fa2d8ce44c186f4ebc7f82f40c4f39634e6 | 2,245 | py | Python | hedwig/callback.py | eytanhanig/pr-hedwig-python | 972cf918101db7ed2cd980450f76216a77b448c6 | [
"Apache-2.0"
] | null | null | null | hedwig/callback.py | eytanhanig/pr-hedwig-python | 972cf918101db7ed2cd980450f76216a77b448c6 | [
"Apache-2.0"
] | null | null | null | hedwig/callback.py | eytanhanig/pr-hedwig-python | 972cf918101db7ed2cd980450f76216a77b448c6 | [
"Apache-2.0"
] | null | null | null | import inspect
import typing
from hedwig import Message, ConfigurationError, MessageType, CallbackNotFound
from hedwig.conf import settings
class Callback:
def __init__(self, fn: typing.Callable) -> None:
self._fn = fn
signature = inspect.signature(fn)
message_found = False
for p in signature.parameters.values():
# if **kwargs is specified, just pass all things by default since function can always inspect arg names
if p.kind == inspect.Parameter.VAR_KEYWORD:
# disallow use of *kwargs
raise ConfigurationError("Use of **kwargs is not allowed")
elif p.kind == inspect.Parameter.VAR_POSITIONAL:
# disallow use of *args
raise ConfigurationError("Use of *args is not allowed")
elif p.name == 'message':
if p.annotation is not inspect.Signature.empty and p.annotation is not Message:
raise ConfigurationError("Signature for 'message' param must be `hedwig.Message`")
message_found = True
else:
raise ConfigurationError(f"Unknown param '{p.name}' not allowed")
if not message_found:
raise ConfigurationError("Callback must accept a parameter called 'message'")
@property
def fn(self) -> typing.Callable:
""""
return: Task function
"""
return self._fn
def call(self, message: 'hedwig.Message') -> None:
"""
Calls the task with this message
:param message: The message
"""
self.fn(message)
def __str__(self) -> str:
return f'Hedwig task: {self.fn}'
@classmethod
def find_by_message(cls, msg_type: MessageType, major_version: int) -> 'hedwig.Callback':
"""
Finds a callback by message type
:return: Callback
:raises CallbackNotFound: if task isn't registered
"""
if (msg_type, major_version) in _ALL_CALLBACKS:
return _ALL_CALLBACKS[(msg_type, major_version)]
raise CallbackNotFound(msg_type, major_version)
_ALL_CALLBACKS = {
(MessageType(k[0]), k[1]): Callback(v) for k, v in settings.HEDWIG_CALLBACKS.items()
}
| 34.538462 | 115 | 0.621381 |
fdbd75daa981f2748e6535821616b487e34ab153 | 162 | py | Python | mysite/urls.py | kunlun-bird/pythonlearning-django | fcdfe1b420a744dee47817f6f15d5044673dd248 | [
"Apache-2.0"
] | null | null | null | mysite/urls.py | kunlun-bird/pythonlearning-django | fcdfe1b420a744dee47817f6f15d5044673dd248 | [
"Apache-2.0"
] | null | null | null | mysite/urls.py | kunlun-bird/pythonlearning-django | fcdfe1b420a744dee47817f6f15d5044673dd248 | [
"Apache-2.0"
] | null | null | null | from django.contrib import admin
from django.urls import path, include
urlpatterns = [
path('admin/', admin.site.urls),
path('', include('blog.urls'))
]
| 20.25 | 37 | 0.685185 |
b7ac370cdbfd7afef0b615e5fe8292b6851c6b91 | 20,674 | py | Python | src/skspatial/objects/line.py | CristianoPizzamiglio/scikit-spatial | 95ca2d4f2948cf6a69ec4bc7236b70fd66db1de5 | [
"BSD-3-Clause"
] | 35 | 2019-08-22T04:25:29.000Z | 2022-03-10T16:23:45.000Z | src/skspatial/objects/line.py | CristianoPizzamiglio/scikit-spatial | 95ca2d4f2948cf6a69ec4bc7236b70fd66db1de5 | [
"BSD-3-Clause"
] | 241 | 2019-03-04T23:08:26.000Z | 2022-03-27T13:25:30.000Z | src/skspatial/objects/line.py | CristianoPizzamiglio/scikit-spatial | 95ca2d4f2948cf6a69ec4bc7236b70fd66db1de5 | [
"BSD-3-Clause"
] | 7 | 2020-11-13T21:40:05.000Z | 2022-02-07T00:38:30.000Z | """Module for the Line class."""
from __future__ import annotations
from typing import Optional
import numpy as np
from matplotlib.axes import Axes
from mpl_toolkits.mplot3d import Axes3D
from skspatial.objects._base_line_plane import _BaseLinePlane
from skspatial.objects.point import Point
from skspatial.objects.points import Points
from skspatial.objects.vector import Vector
from skspatial.plotting import _connect_points_2d
from skspatial.plotting import _connect_points_3d
from skspatial.transformation import transform_coordinates
from skspatial.typing import array_like
class Line(_BaseLinePlane):
"""
A line in space.
The line is defined by a point and a direction vector.
Parameters
----------
point : array_like
Point on the line.
direction : array_like
Direction vector of the line.
kwargs : dict, optional
Additional keywords passed to :meth:`Vector.is_zero`.
This method is used to ensure that the direction vector is not the zero vector.
Attributes
----------
point : Point
Point on the line.
direction : Vector
Unit direction vector.
vector : Vector
Same as the direction.
dimension : int
Dimension of the line.
Raises
------
ValueError
If the point and vector have different dimensions.
If the vector is all zeros.
Examples
--------
>>> from skspatial.objects import Line
>>> line = Line(point=[0, 0], direction=[3, 0])
>>> line
Line(point=Point([0, 0]), direction=Vector([3, 0]))
>>> line.direction
Vector([3, 0])
The direction can also be accessed with the ``vector`` attribute.
>>> line.vector
Vector([3, 0])
The line dimension is the dimension of the point and vector.
>>> line.dimension
2
>>> Line([0, 0], [1, 0, 0])
Traceback (most recent call last):
...
ValueError: The point and vector must have the same dimension.
>>> Line([1, 1], [0, 0])
Traceback (most recent call last):
...
ValueError: The vector must not be the zero vector.
"""
def __init__(self, point: array_like, direction: array_like):
super().__init__(point, direction)
self.direction = self.vector
@classmethod
def from_points(cls, point_a: array_like, point_b: array_like) -> Line:
"""
Instantiate a line from two points.
Parameters
----------
point_a, point_b : array_like
Two points defining the line.
Returns
-------
Line
Line containing the two input points.
Examples
--------
>>> from skspatial.objects import Line
>>> Line.from_points([0, 0], [1, 0])
Line(point=Point([0, 0]), direction=Vector([1, 0]))
The order of the points affects the line point and direction vector.
>>> Line.from_points([1, 0], [0, 0])
Line(point=Point([1, 0]), direction=Vector([-1, 0]))
"""
vector_ab = Vector.from_points(point_a, point_b)
return cls(point_a, vector_ab)
@classmethod
def from_slope(cls, slope: float, y_intercept: float) -> Line:
r"""
Instantiate a 2D line from a slope and Y-intercept.
A 2D line can be represented by the equation
.. math:: y = mx + b
where :math:`m` is the slope and :math:`p` is the Y-intercept.
Parameters
----------
slope : {int, float}
Slope of the 2D line.
y_intercept : {int, float}
Y coordinate of the point where the line intersects the Y axis.
Returns
-------
Line
A 2D Line object.
Examples
--------
>>> from skspatial.objects import Line
>>> Line.from_slope(2, 0)
Line(point=Point([0, 0]), direction=Vector([1, 2]))
>>> Line.from_slope(-3, 5)
Line(point=Point([0, 5]), direction=Vector([ 1, -3]))
>>> line_a = Line.from_slope(1, 0)
>>> line_b = Line.from_slope(0, 5)
>>> line_a.intersect_line(line_b)
Point([5., 5.])
"""
point = [0, y_intercept]
direction = [1, slope]
return cls(point, direction)
def is_coplanar(self, other: Line, **kwargs: float) -> bool:
"""
Check if the line is coplanar with another.
Parameters
----------
other : Line
Other line.
kwargs : dict, optional
Additional keywords passed to :func:`numpy.linalg.matrix_rank`
Returns
-------
bool
True if the line is coplanar; false otherwise.
Raises
------
TypeError
If the input is not a line.
References
----------
http://mathworld.wolfram.com/Coplanar.html
Examples
--------
>>> from skspatial.objects import Line
>>> line_a = Line(point=[0, 0, 0], direction=[1, 0, 0])
>>> line_b = Line([-5, 3, 0], [7, 1, 0])
>>> line_c = Line([0, 0, 0], [0, 0, 1])
>>> line_a.is_coplanar(line_b)
True
>>> line_a.is_coplanar(line_c)
True
>>> line_b.is_coplanar(line_c)
False
The input must be another line.
>>> from skspatial.objects import Plane
>>> line_a.is_coplanar(Plane(line_a.point, line_a.vector))
Traceback (most recent call last):
...
TypeError: The input must also be a line.
"""
if not isinstance(other, type(self)):
raise TypeError("The input must also be a line.")
point_1 = self.point
point_2 = self.to_point()
point_3 = other.point
point_4 = other.to_point()
points = Points([point_1, point_2, point_3, point_4])
return points.are_coplanar(**kwargs)
def to_point(self, t: float = 1) -> Point:
r"""
Return a point along the line using a parameter `t`.
Parameters
----------
t : {int, float}
Parameter that defines the new point along the line.
Returns
-------
Point
New point along the line.
Notes
-----
The new point :math:`p` is computed as:
.. math:: p = \mathtt{line.point} + t \cdot \mathtt{line.direction}
Examples
--------
>>> from skspatial.objects import Line
>>> line = Line(point=[0, 0], direction=[2, 0])
>>> line.to_point()
Point([2, 0])
>>> line.to_point(t=2)
Point([4, 0])
"""
vector_along_line = t * self.direction
return self.point + vector_along_line
def project_point(self, point: array_like) -> Point:
"""
Project a point onto the line.
Parameters
----------
point : array_like
Input point.
Returns
-------
Point
Projection of the point onto the line.
Examples
--------
>>> from skspatial.objects import Line
>>> Line(point=[0, 0], direction=[8, 0]).project_point([5, 5])
Point([5., 0.])
>>> Line(point=[0, 0, 0], direction=[1, 1, 0]).project_point([5, 5, 3])
Point([5., 5., 0.])
"""
# Vector from the point on the line to the point in space.
vector_to_point = Vector.from_points(self.point, point)
# Project the vector onto the line.
vector_projected = self.direction.project_vector(vector_to_point)
# Add the projected vector to the point on the line.
return self.point + vector_projected
def project_vector(self, vector: array_like) -> Vector:
"""
Project a vector onto the line.
Parameters
----------
vector : array_like
Input vector.
Returns
-------
Vector
Projection of the vector onto the line.
Examples
--------
>>> from skspatial.objects import Line
>>> line = Line([-1, 5, 3], [3, 4, 5])
>>> line.project_vector([1, 1, 1])
Vector([0.72, 0.96, 1.2 ])
"""
return self.direction.project_vector(vector)
def side_point(self, point: array_like) -> int:
"""
Find the side of the line where a point lies.
The line and point must be 2D.
Parameters
----------
point : array_like
Input point.
Returns
-------
int
-1 if the point is left of the line.
0 if the point is on the line.
1 if the point is right of the line.
Examples
--------
>>> from skspatial.objects import Line
>>> line = Line([0, 0], [1, 1])
The point is on the line.
>>> line.side_point([2, 2])
0
The point is to the right of the line.
>>> line.side_point([5, 3])
1
The point is to the left of the line.
>>> line.side_point([5, 10])
-1
"""
vector_to_point = Vector.from_points(self.point, point)
return self.direction.side_vector(vector_to_point)
def distance_point(self, point: array_like) -> np.float64:
"""
Return the distance from a point to the line.
This is the distance from the point to its projection on the line.
Parameters
----------
point : array_like
Input point.
Returns
-------
np.float64
Distance from the point to the line.
Examples
--------
>>> from skspatial.objects import Line
>>> line = Line([0, 0], [1, 0])
>>> line.distance_point([0, 0])
0.0
>>> line.distance_point([5, 0])
0.0
>>> line.distance_point([5, -5])
5.0
>>> line = Line([5, 2, -3], [3, 8, 2])
>>> line.distance_point([5, -5, 3]).round(3)
7.737
"""
point_projected = self.project_point(point)
return point_projected.distance_point(point)
def distance_line(self, other: Line) -> np.float64:
"""
Return the shortest distance from the line to another.
Parameters
----------
other : Line
Other line.
Returns
-------
np.float64
Distance between the lines.
References
----------
http://mathworld.wolfram.com/Line-LineDistance.html
Examples
--------
There are three cases:
1. The lines intersect (i.e., they are coplanar and not parallel).
>>> from skspatial.objects import Line
>>> line_a = Line([1, 2], [4, 3])
>>> line_b = Line([-4, 1], [7, 23])
>>> line_a.distance_line(line_b)
0.0
2. The lines are parallel.
>>> line_a = Line([0, 0], [1, 0])
>>> line_b = Line([0, 5], [-1, 0])
>>> line_a.distance_line(line_b)
5.0
3. The lines are skew.
>>> line_a = Line([0, 0, 0], [1, 0, 1])
>>> line_b = Line([1, 0, 0], [1, 1, 1])
>>> line_a.distance_line(line_b).round(3)
0.707
"""
if self.direction.is_parallel(other.direction):
# The lines are parallel.
# The distance between the lines is the distance from line point B to line A.
distance = self.distance_point(other.point)
elif self.is_coplanar(other):
# The lines must intersect, since they are coplanar and not parallel.
distance = np.float64(0)
else:
# The lines are skew.
vector_ab = Vector.from_points(self.point, other.point)
vector_perpendicular = self.direction.cross(other.direction)
distance = abs(vector_ab.dot(vector_perpendicular)) / vector_perpendicular.norm()
return distance
def intersect_line(self, other: Line, **kwargs) -> Point:
"""
Intersect the line with another.
The lines must be coplanar and not parallel.
Parameters
----------
other : Line
Other line.
kwargs : dict, optional
Additional keywords passed to :meth:`Vector.is_parallel`.
Returns
-------
Point
The point at the intersection.
Raises
------
ValueError
If the lines don't have the same dimension.
If the line dimension is greater than three.
If the lines are parallel.
If the lines are not coplanar.
References
----------
http://mathworld.wolfram.com/Line-LineIntersection.html
Examples
--------
>>> from skspatial.objects import Line
>>> line_a = Line([0, 0], [1, 0])
>>> line_b = Line([5, 5], [0, 1])
>>> line_a.intersect_line(line_b)
Point([5., 0.])
>>> line_a = Line([0, 0, 0], [1, 1, 1])
>>> line_b = Line([5, 5, 0], [0, 0, -8])
>>> line_a.intersect_line(line_b)
Point([5., 5., 5.])
>>> line_a = Line([0, 0, 0], [1, 0, 0])
>>> line_b = Line([0, 0], [1, 1])
>>> line_a.intersect_line(line_b)
Traceback (most recent call last):
...
ValueError: The lines must have the same dimension.
>>> line_a = Line(4 * [0], [1, 0, 0, 0])
>>> line_b = Line(4 * [0], [0, 0, 0, 1])
>>> line_a.intersect_line(line_b)
Traceback (most recent call last):
...
ValueError: The line dimension cannot be greater than 3.
>>> line_a = Line([0, 0], [0, 1])
>>> line_b = Line([0, 1], [0, 1])
>>> line_a = Line([0, 0], [1, 0])
>>> line_b = Line([0, 1], [2, 0])
>>> line_a.intersect_line(line_b)
Traceback (most recent call last):
...
ValueError: The lines must not be parallel.
>>> line_a = Line([1, 2, 3], [-4, 1, 1])
>>> line_b = Line([4, 5, 6], [3, 1, 5])
>>> line_a.intersect_line(line_b)
Traceback (most recent call last):
...
ValueError: The lines must be coplanar.
"""
if self.dimension != other.dimension:
raise ValueError("The lines must have the same dimension.")
if self.dimension > 3 or other.dimension > 3:
raise ValueError("The line dimension cannot be greater than 3.")
if self.direction.is_parallel(other.direction, **kwargs):
raise ValueError("The lines must not be parallel.")
if not self.is_coplanar(other):
raise ValueError("The lines must be coplanar.")
# Vector from line A to line B.
vector_ab = Vector.from_points(self.point, other.point)
# Vector perpendicular to both lines.
vector_perpendicular = self.direction.cross(other.direction)
num = vector_ab.cross(other.direction).dot(vector_perpendicular)
denom = vector_perpendicular.norm() ** 2
# Vector along line A to the intersection point.
vector_a_scaled = num / denom * self.direction
return self.point + vector_a_scaled
@classmethod
def best_fit(cls, points: array_like, tol: Optional[float] = None, **kwargs) -> Line:
"""
Return the line of best fit for a set of points.
Parameters
----------
points : array_like
Input points.
tol : float | None, optional
Keyword passed to :meth:`Points.are_collinear` (default None).
kwargs : dict, optional
Additional keywords passed to :func:`numpy.linalg.svd`
Returns
-------
Line
The line of best fit.
Raises
------
ValueError
If the points are concurrent.
Examples
--------
>>> from skspatial.objects import Line
>>> points = [[0, 0], [1, 2], [2, 1], [2, 3], [3, 2]]
>>> line = Line.best_fit(points)
The point on the line is the centroid of the points.
>>> line.point
Point([1.6, 1.6])
The line direction is a unit vector.
>>> line.direction.round(3)
Vector([0.707, 0.707])
"""
points_spatial = Points(points)
if points_spatial.are_concurrent(tol=tol):
raise ValueError("The points must not be concurrent.")
points_centered, centroid = points_spatial.mean_center(return_centroid=True)
_, _, vh = np.linalg.svd(points_centered, **kwargs)
direction = vh[0, :]
return cls(centroid, direction)
def transform_points(self, points: array_like) -> np.ndarray:
"""
Transform points to a one-dimensional coordinate system defined by the line.
The point on the line acts as the origin of the coordinate system.
This is analogous is projecting the points onto the line,
then computing the signed distance from the line point to the projections.
Parameters
----------
points : (N, D) array_like
Array of N points with dimension D.
Returns
-------
ndarray
(N,) array of N coordinates.
Examples
--------
>>> from skspatial.objects import Line
>>> points = [[-1, 1], [0, 1], [1, 1], [2, 1]]
>>> Line([0, 0], [1, 0]).transform_points(points)
array([-1., 0., 1., 2.])
The point on the line acts as the origin of the coordinates.
>>> Line([1, 0], [1, 0]).transform_points(points)
array([-2., -1., 0., 1.])
The sign of the coordinates depends on the direction of the line.
>>> Line([0, 0], [-1, 0]).transform_points(points)
array([ 1., 0., -1., -2.])
The magnitude of the direction vector is irrelevant.
>>> Line([0, 0], [5, 0]).transform_points(points)
array([-1., 0., 1., 2.])
"""
# Basis vector of the subspace (the line).
vectors_basis = [self.direction.unit()]
column = transform_coordinates(points, self.point.to_array(), vectors_basis)
return column.flatten()
def plot_2d(self, ax_2d: Axes, t_1: float = 0, t_2: float = 1, **kwargs) -> None:
"""
Plot a 2D line.
The line is plotted by connecting two 2D points.
Parameters
----------
ax_2d : Axes
Instance of :class:`~matplotlib.axes.Axes`.
t_1, t_2 : {int, float}
Parameters to determine points 1 and 2 along the line.
These are passed to :meth:`Line.to_point`.
Defaults are 0 and 1.
kwargs : dict, optional
Additional keywords passed to :meth:`~matplotlib.axes.Axes.plot`.
Examples
--------
.. plot::
:include-source:
>>> import matplotlib.pyplot as plt
>>> from skspatial.objects import Line
>>> _, ax = plt.subplots()
>>> line = Line([1, 2], [3, 4])
>>> line.plot_2d(ax, t_1=-2, t_2=3, c='k')
>>> line.point.plot_2d(ax, c='r', s=100, zorder=3)
>>> grid = ax.grid()
"""
point_1 = self.to_point(t_1)
point_2 = self.to_point(t_2)
_connect_points_2d(ax_2d, point_1, point_2, **kwargs)
def plot_3d(self, ax_3d: Axes3D, t_1: float = 0, t_2: float = 1, **kwargs) -> None:
"""
Plot a 3D line.
The line is plotted by connecting two 3D points.
Parameters
----------
ax_3d : Axes3D
Instance of :class:`~mpl_toolkits.mplot3d.axes3d.Axes3D`.
t_1, t_2 : {int, float}
Parameters to determine points 1 and 2 along the line.
These are passed to :meth:`Line.to_point`.
Defaults are 0 and 1.
kwargs : dict, optional
Additional keywords passed to :meth:`~mpl_toolkits.mplot3d.axes3d.Axes3D.plot`.
Examples
--------
.. plot::
:include-source:
>>> import matplotlib.pyplot as plt
>>> from mpl_toolkits.mplot3d import Axes3D
>>> from skspatial.objects import Line
>>> fig = plt.figure()
>>> ax = fig.add_subplot(111, projection='3d')
>>> line = Line([1, 2, 3], [0, 1, 1])
>>> line.plot_3d(ax, c='k')
>>> line.point.plot_3d(ax, s=100)
"""
point_1 = self.to_point(t_1)
point_2 = self.to_point(t_2)
_connect_points_3d(ax_3d, point_1, point_2, **kwargs)
| 27.060209 | 93 | 0.539373 |
1633a57866b9ac6b9e401581ce89b3a72aa8b732 | 10,949 | py | Python | src/bin/plots/plot_inception_cka_vs_crossentropy.py | asd249180/similarity_and_matching | 225cbc4850a790a37ea18d4c519a4306e9db3590 | [
"Apache-2.0"
] | null | null | null | src/bin/plots/plot_inception_cka_vs_crossentropy.py | asd249180/similarity_and_matching | 225cbc4850a790a37ea18d4c519a4306e9db3590 | [
"Apache-2.0"
] | null | null | null | src/bin/plots/plot_inception_cka_vs_crossentropy.py | asd249180/similarity_and_matching | 225cbc4850a790a37ea18d4c519a4306e9db3590 | [
"Apache-2.0"
] | null | null | null | import pandas as pd
from matplotlib import pyplot as plt
import os
import sys
import seaborn as sns
import json
import argparse
from dotmap import DotMap
import numpy as np
# ================================================================
# SETTINGS
# ================================================================
settings = DotMap()
# PLOT
# ----------------------------------------
settings.plot.colors = {
'frank_crossentropy' : '#F00',
'frank_cka' : '#F60',
'ps_inv_crossentropy' : '#00F',
'ps_inv_cka' : '#06F'
}
settings.plot.rename_dict = {
'frank_crossentropy' : 'With Task Loss - Cross-entropy',
'ps_inv_crossentropy' : 'Linear Least Squares - Cross-entropy',
'frank_cka' : 'With Task Loss - CKA',
'ps_inv_cka' : 'Linear Least Squares - CKA'
}
settings.plot.linewidth = 5
settings.plot.ce_linestyle = '-'
settings.plot.cka_linestyle = '--'
settings.plot.x_label = 'Layer'
settings.plot.y_label_left = 'CKA between stiched layers'
settings.plot.y_label_right = 'Cross-entropy'
# MEAN & STD TABLE
# ----------------------------------------
settings.mean_std_table.caption = 'Mean metrics with standard deviations in parentheses.'
settings.mean_std_table.column_names = ['Cross-entropy', 'CKA'] # Order cannot be changed this way
settings.mean_std_table.row_names = ['Ps. Inv', 'Frank'] # Order cannot be changed this way
# LAYERWISE TABLE
settings.layerwise_table.caption = 'Effect on logits, layerwise split on Frank stitches.'
settings.layerwise_table.column_names = {'crossentropy' : 'Cross-entropy', 'cka' : 'CKA'}
# Layers to process:
layers_to_process = [
# 'conv2d0_pre_relu',
# 'conv2d1_pre_relu',
# 'conv2d2_pre_relu',
'mixed3a_pre_relu',
'mixed3b_pre_relu',
'mixed4a_pre_relu',
'mixed4b_pre_relu',
'mixed4c_pre_relu',
'mixed4d_pre_relu',
'mixed4e_pre_relu',
'mixed5a_pre_relu',
'mixed5b_pre_relu',
]
# ================================================================
# PROCESSING
# ================================================================
def parse_args(args):
parser = argparse.ArgumentParser(description='Simple settings.')
parser.add_argument('--csv', default="results/find_trans_inception/summary.csv")
parser.add_argument('--out-dir', default='results/find_trans_inception/plots/cka_ce/')
return parser.parse_args(args)
def filter_df(df):
filters = [
df.l1 == 0.,
df.target_type=='soft_2',
df.init =='ps_inv',
df.front_layer.isin(layers_to_process),
df.end_layer.isin(layers_to_process),
df.front_model != df.end_model,
df.temperature == 1.0,
]
return df[np.logical_and.reduce(filters)]
def get_df(csv, out_dir):
# Filter csv to relevant parts
filtered_df = filter_df(pd.read_csv(csv))
filtered_df['front_layer'] = filtered_df['front_layer'].str[:-9] #.capitalize()
filtered_df = filtered_df.sort_values(['front_layer']).copy()
# Calculate accuracies
df = filtered_df.copy()
df['frank_cka'] = df['cka_frank']
df['ps_inv_cka'] = df['cka_ps_inv']
df['frank_crossentropy'] = df['after_crossentropy']
df['ps_inv_crossentropy'] = df['ps_inv_crossentropy']
# Rename columns in local dataframe
# df = df.rename(columns={
# 'after_cka' : 'frank_cka'
# })
# Group values into one column with a category column
def make_one_col(column):
new_df = df[['front_layer', column]].copy()
new_df['matrix'] = column
new_df['style'] = 'crossentropy' if 'crossentropy' in column else 'cka'
new_df = new_df.rename(columns={column : 'value'})
return new_df
dfs = [
make_one_col('frank_crossentropy'),
make_one_col('frank_cka'),
make_one_col('ps_inv_crossentropy'),
make_one_col('ps_inv_cka'),
]
sum_df = pd.concat(dfs, ignore_index=True).reset_index(drop=True)
# Save
filtered_df.to_csv(os.path.join(out_dir, 'filtered_df.csv'), index=False)
sum_df.to_csv(os.path.join(out_dir, 'sum_df.csv'), index=False)
return filtered_df, sum_df
def save_table_mean_std(df, out_dir):
global settings
conf = settings.mean_std_table
out_file = os.path.join(out_dir, 'overall_mean_std.tex')
# Generatre mean and std
df = df.copy()
df = df.groupby(['matrix'])['value'].describe()[['mean', 'std']].copy()
# Create table in desired format
_mean = lambda x: f"{df.loc[x, 'mean']:0.3f}"
_std = lambda x: f"(\pm{df.loc[x, 'std']:0.3f})"
_cell = lambda x: f"{_mean(x)} {_std(x)}"
_row = lambda x: [_cell(x+'_crossentropy'), _cell(x+'_cka')]
new_df = pd.DataFrame({
conf.row_names[0] : _row('ps_inv'),
conf.row_names[1] : _row('frank')
}, index=conf.column_names)
# Convert table to latex
table_latex = new_df.to_latex(escape=False, column_format='l c c')
# Place the latex table in a figure and add captopn
latex = "\\begin{table}\n\\centering\n" + table_latex + \
" \\caption{" + conf.caption + "}\n" + \
" \\label{fig:my_label}\n" + \
"\\end{table}"
# Save
with open(out_file, "w") as text_file:
print(latex, file=text_file)
def save_table_layerwise(df, out_dir):
global settings
conf = settings.layerwise_table
out_file = os.path.join(out_dir, 'layerwise_mean_std.tex')
df = df.copy()
# Create CKA/Acc and PsInv/Frank categories
df['mode'] = df.matrix.apply(lambda x: x.split('_')[-1])
df['method'] = df.matrix.apply(lambda x: '_'.join(x.split('_')[:-1]))
# Available layers in order
layers = df['front_layer'].drop_duplicates().sort_values()
# Create dataframe
new_df = pd.DataFrame(index=layers)
for layer in layers:
for mode in df['mode'].drop_duplicates():
# Filter ps_inv and frank
subdf = df[(df.front_layer==layer)&(df['mode']==mode)]
ps_inv = subdf[subdf.method == 'ps_inv']['value'].reset_index(drop=True)
frank = subdf[subdf.method == 'frank']['value'].reset_index(drop=True)
# Get mode spcific changes (e.g. % mark)
mark = '\%' if mode=='cka' else ''
multiplier = 100 if mode=='cka' else 1
# Caulculate mean and std
mean = (frank-ps_inv).mean() * multiplier
std = (frank-ps_inv).std() * multiplier
# Insert variable in table
val = f"{mean:1.3f}{mark} (\pm{std:1.3f}{mark})"
new_df.loc[layer, mode] = val
# Final decoration on table
new_df.index.name = None
new_df = new_df.rename(columns=conf.column_names)
# Convert table to latex
table_latex = new_df.to_latex(escape=False, column_format='l c c')
# Place the latex table in a figure and add captopn
latex = "\\begin{table}\n\\centering\n" + table_latex + \
" \\caption{" + conf.caption + "}\n" + \
" \\label{fig:my_label}\n" + \
"\\end{table}"
# Save
with open(out_file, "w") as text_file:
print(latex, file=text_file)
def save_diagram(df, out_dir):
global settings
conf = settings.plot
out_file = os.path.join(out_dir,'crossentropy_vs_cka.pdf')
fig = plt.figure(figsize=(16,9));
subdf = df[df['style']=='cka']
cka_ax = sns.lineplot(
data=subdf, #kind="line",
x="front_layer", y="value",
hue="matrix", style='style',
palette=conf.colors,
linewidth=conf.linewidth);
ce_ax = plt.twinx()
subdf = df[df['style']!='cka']
ce_ax = sns.lineplot(
data=subdf, #kind="line",
x="front_layer", y="value",
hue="matrix", style='style',
palette=conf.colors,
linewidth=conf.linewidth, ax=ce_ax);
xlabels = df['front_layer'].drop_duplicates().str.replace('.add', '').tolist()
xlabels.sort()
def set_linestyle(ax, linesetyle):
# Remove columns from legend
h,l = ax.get_legend_handles_labels()
cols_to_remove = ['matrix', 'style', 'crossentropy', 'cka']
h = [x for (x,y) in zip(h,l) if y not in cols_to_remove]
l = [x for x in l if x not in cols_to_remove]
# Set linewidth in legend
for x in h:
x.set_linewidth(conf.linewidth)
# Set linestyles of CKA
h[0].set_linestyle(linesetyle)
h[1].set_linestyle(linesetyle)
ax.lines[0].set_linestyle(linesetyle)
ax.lines[1].set_linestyle(linesetyle)
return h, l
h1, l1 = set_linestyle(cka_ax, conf.cka_linestyle)
h2, l2 = set_linestyle(ce_ax, conf.ce_linestyle)
h, l = h1+h2, l1+l2
cka_ax.get_legend().remove()
ce_ax.get_legend().remove()
# Remove sns default legend and set custom
# g._legend.remove()
legends = plt.legend(h,l,loc='center', fontsize=20)
for i in range(4):
legend = legends.get_texts()[i]
title = legend.get_text()
new_title = conf.rename_dict[title]
legend.set_text(new_title)
cka_ax.set_ylabel(conf.y_label_left, size = 24)
cka_ax.set_xlabel(conf.x_label, size = 24)
cka_ax.set_xticklabels(xlabels, size=24, rotation=-45)
cka_ax.tick_params(axis='y', labelsize=24)
cka_ax.set_ylim([0.6 ,1])
cka_ax.xaxis.grid()
# Set grids
# plt.grid()
ce_ax.set_ylabel(conf.y_label_right, size = 24)
ce_ax.set_xlabel(conf.x_label, size = 24)
ce_ax.set_ylim([0,0.5])
ce_ax.xaxis.grid()
plt.tick_params(axis='x', labelsize=24, rotation=-45)
plt.tick_params(axis='y', labelsize=24, labelright=True)
# Save
plt.savefig(out_file, bbox_inches='tight')
#plt.show()
def save_report(filtered_df, out_dir):
data = DotMap()
data.no_experiments = len(filtered_df)
data.unique_networks = list(set(filtered_df.front_model).union(set(filtered_df.end_model)))
data.same_networks_compared = str((filtered_df.front_model == filtered_df.end_model).any())
extra_cols = ['l1', 'target_type', 'weight_decay', 'init', 'temperature']
for col in extra_cols:
data[col] = filtered_df[col].drop_duplicates().tolist()
data.layers = filtered_df.front_layer.drop_duplicates().tolist()
data.layers.sort()
with open(os.path.join(out_dir, 'run_settings.json'), 'w') as fp:
json.dump(data, fp)
return data
def main(args=None):
if args is None:
args = sys.argv[1:]
conf = parse_args(args)
# Create directory if not exist
os.makedirs(conf.out_dir, exist_ok=True)
# Read in original csv
filtered_df, df = get_df(conf.csv, conf.out_dir)
# Run and save measurements
save_table_layerwise(df, conf.out_dir)
save_report(filtered_df, conf.out_dir)
save_table_mean_std(df, conf.out_dir)
save_diagram(df, conf.out_dir)
print(f'Results saved at {conf.out_dir}')
if __name__ == '__main__':
main() | 31.553314 | 98 | 0.615125 |
4761b35b44c48ce9a261e0f6f3a4d0904c089b06 | 1,658 | py | Python | src/tests/test_click_config_merge.py | ndejong/pyvboxmanage | 6cb49546782ae97f177e7035982b1dc86b8f61db | [
"BSD-2-Clause"
] | 1 | 2020-12-28T02:19:35.000Z | 2020-12-28T02:19:35.000Z | src/tests/test_click_config_merge.py | ndejong/pyvboxmanage | 6cb49546782ae97f177e7035982b1dc86b8f61db | [
"BSD-2-Clause"
] | null | null | null | src/tests/test_click_config_merge.py | ndejong/pyvboxmanage | 6cb49546782ae97f177e7035982b1dc86b8f61db | [
"BSD-2-Clause"
] | null | null | null |
import os
import pytest
from click.testing import CliRunner
from pyvboxmanage.cli import click
def test_pyvboxmanage_dryrun_test01_merge_test02():
runner = CliRunner()
config01_filename = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'test01config.yml')
config02_filename = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'test02config.yml')
result = runner.invoke(click.pyvboxmanage, args='--dry-run "{}" "{}"'.
format(config01_filename, config02_filename))
assert result.exit_code == 0
assert 'Successfully executed command line "vboxmanage showvminfo test02targetvname"' in result.stdout
assert 'Successfully executed command line "vboxmanage unregistervm test02targetvname --delete"' in result.stdout
assert 'Successfully executed command line "vboxmanage clonevm test02sourcevname --basefolder "/test02targetbasefolder" --groups "/test02targetgroups" --mode "machine" --name "test02targetvname" --register"' in result.stdout
assert 'Successfully executed command line "vboxmanage modifyvm test02targetvname --bridgeadapter1 "test02targetbridgeadapter" --bridgeadapter2 "test02targetbridgeadapter" --bridgeadapter3 "test02targetbridgeadapter" --bridgeadapter4 "test02targetbridgeadapter" --cableconnected1 "on" --cableconnected2 "on" --cableconnected3 "on" --cableconnected4 "on" --macaddress1 "08002722E901" --macaddress2 "08002722E902" --macaddress3 "08002722E903" --macaddress4 "08002722E904" --nic1 "bridged" --nic2 "bridged" --nic3 "bridged" --nic4 "bridged" --nictype1 "82543GC" --nictype2 "82543GC" --nictype3 "82543GC" --nictype4 "82543GC""' in result.stdout
| 82.9 | 644 | 0.767793 |
65a5b5c275bdf931708c131fcec5a61d8b7df8dc | 4,462 | py | Python | gitee/models/body.py | pygitee/pygitee | 7622314a4dbb08cf2f729b6cdd0a2887b96e394e | [
"MIT"
] | null | null | null | gitee/models/body.py | pygitee/pygitee | 7622314a4dbb08cf2f729b6cdd0a2887b96e394e | [
"MIT"
] | null | null | null | gitee/models/body.py | pygitee/pygitee | 7622314a4dbb08cf2f729b6cdd0a2887b96e394e | [
"MIT"
] | null | null | null | # coding: utf-8
import pprint
import re # noqa: F401
import six
class Body(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'access_token': 'str',
'refs': 'str',
'branch_name': 'str'
}
attribute_map = {
'access_token': 'access_token',
'refs': 'refs',
'branch_name': 'branch_name'
}
def __init__(self, access_token=None, refs='master', branch_name=None): # noqa: E501
"""Body - a model defined in Swagger""" # noqa: E501
self._access_token = None
self._refs = None
self._branch_name = None
self.discriminator = None
if access_token is not None:
self.access_token = access_token
self.refs = refs
self.branch_name = branch_name
@property
def access_token(self):
"""Gets the access_token of this Body. # noqa: E501
用户授权码 # noqa: E501
:return: The access_token of this Body. # noqa: E501
:rtype: str
"""
return self._access_token
@access_token.setter
def access_token(self, access_token):
"""Sets the access_token of this Body.
用户授权码 # noqa: E501
:param access_token: The access_token of this Body. # noqa: E501
:type: str
"""
self._access_token = access_token
@property
def refs(self):
"""Gets the refs of this Body. # noqa: E501
起点名称, 默认:master # noqa: E501
:return: The refs of this Body. # noqa: E501
:rtype: str
"""
return self._refs
@refs.setter
def refs(self, refs):
"""Sets the refs of this Body.
起点名称, 默认:master # noqa: E501
:param refs: The refs of this Body. # noqa: E501
:type: str
"""
if refs is None:
raise ValueError("Invalid value for `refs`, must not be `None`") # noqa: E501
self._refs = refs
@property
def branch_name(self):
"""Gets the branch_name of this Body. # noqa: E501
新创建的分支名称 # noqa: E501
:return: The branch_name of this Body. # noqa: E501
:rtype: str
"""
return self._branch_name
@branch_name.setter
def branch_name(self, branch_name):
"""Sets the branch_name of this Body.
新创建的分支名称 # noqa: E501
:param branch_name: The branch_name of this Body. # noqa: E501
:type: str
"""
if branch_name is None:
raise ValueError("Invalid value for `branch_name`, must not be `None`") # noqa: E501
self._branch_name = branch_name
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
if issubclass(Body, dict):
for key, value in self.items():
result[key] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, Body):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| 27.374233 | 97 | 0.548633 |
720fb70c201343c2a3f7a4786e75f6db8b12cba3 | 4,852 | py | Python | source/CreateRevisionFunction/app.py | kwwaikar/aws-data-exchange-publisher-coordinator | 6626f198f2fc433418debf25a6fab5a9b26d2b57 | [
"MIT-0"
] | 3 | 2020-12-29T18:37:09.000Z | 2021-12-10T02:08:15.000Z | source/CreateRevisionFunction/app.py | kwwaikar/aws-data-exchange-publisher-coordinator | 6626f198f2fc433418debf25a6fab5a9b26d2b57 | [
"MIT-0"
] | null | null | null | source/CreateRevisionFunction/app.py | kwwaikar/aws-data-exchange-publisher-coordinator | 6626f198f2fc433418debf25a6fab5a9b26d2b57 | [
"MIT-0"
] | 4 | 2021-03-01T20:37:25.000Z | 2021-12-09T17:26:01.000Z | #!/usr/bin/python
# -*- coding: utf-8 -*-
##############################################################################
# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
# SPDX-License-Identifier: MIT-0
#
# Permission is hereby granted, free of charge, to any person obtaining a copy of this
# software and associated documentation files (the "Software"), to deal in the Software
# without restriction, including without limitation the rights to use, copy, modify,
# merge, publish, distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED,
# INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
# PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
# HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
##############################################################################
import json
import boto3
import os
import logging
from datetime import datetime
import urllib3
#This function creates a new revision for the dataset and starts the job to add it to AWS Data Exchange
def lambda_handler(event, context):
try:
global log_level
log_level = str(os.environ.get('LOG_LEVEL')).upper()
if log_level not in [
'DEBUG', 'INFO',
'WARNING', 'ERROR',
'CRITICAL'
]:
log_level = 'ERROR'
logging.getLogger().setLevel(log_level)
logging.debug('event={}'.format(event))
dataexchange = boto3.client(service_name='dataexchange')
s3 = boto3.client(
service_name='s3'
)
bucket = event['Bucket']
key = event['Key']
response = s3.get_object(Bucket=bucket,Key=key)
manifest = json.loads(response['Body'].read(amt=response['ContentLength']))
datasetId = manifest['Manifest']['DatasetId']
productId = manifest['Manifest']['ProductId']
logging.debug('datasetId={}'.format(datasetId))
revision = dataexchange.create_revision(DataSetId=datasetId,Comment="from aws-data-exchange-publisher-coordinator")
revisionId = revision['Id']
print('revisionId={}'.format(revisionId))
revisiondetails = {
"ImportAssetsFromS3": {
"AssetSources":
manifest['Manifest']['Assets']
,
"DataSetId": datasetId,
"RevisionId": revisionId
}
}
logging.debug('revision details = {}'.format(revisiondetails))
jobresponse = dataexchange.create_job(Type='IMPORT_ASSETS_FROM_S3',Details=revisiondetails)
jobArnparts = jobresponse['Arn'].split('/')
jobId = jobArnparts[1]
logging.info('jobId={}'.format(jobId))
startjobresponse = dataexchange.start_job(JobId=jobId)
httpResponse = startjobresponse['ResponseMetadata']['HTTPStatusCode']
logging.debug('HTTPResponse={}'.format(httpResponse))
jobresponse = dataexchange.get_job(JobId=jobId)
logging.debug('get job = {}'.format(jobresponse))
jobStatus = jobresponse['State']
sendMetrics=os.environ.get('AnonymousUsage')
if sendMetrics=="Yes":
metricdata = {
"Version" : os.environ.get('Version'),
"AssetCount" : len(manifest['Manifest']['Assets'])
}
solutionData={
"Solution": os.environ.get('SolutionId'),
"UUID": os.environ.get('UUID'),
"TimeStamp": datetime.utcnow().strftime('%Y-%m-%d %H:%M:%S.%f'),
"Data": metricdata
}
http = urllib3.PoolManager()
metricURL = "https://metrics.awssolutionsbuilder.com/generic"
encoded_data = json.dumps(solutionData).encode('utf-8')
headers={'Content-Type': 'application/json'}
http.request('POST',metricURL,
body=encoded_data,
headers=headers)
except Exception as e:
logging.error(e)
raise e
return {
"statusCode": httpResponse,
"Message": "Job Started",
"ProductId" : productId,
"DataSetId": datasetId,
"RevisionId": revisionId,
"JobId": jobId,
"JobStatus" : jobStatus
}
| 43.711712 | 124 | 0.570692 |
1e4c384f0500582fa7d925aad7319c5a5ffc3453 | 3,363 | py | Python | hpe3d/utils/renderer.py | dorianhenning/hpe3d | 14ea2b0f76145793e2b15ce6fd31736c4be26b5b | [
"MIT"
] | 2 | 2020-05-27T08:21:20.000Z | 2021-06-28T15:42:58.000Z | hpe3d/utils/renderer.py | dorianhenning/hpe3d | 14ea2b0f76145793e2b15ce6fd31736c4be26b5b | [
"MIT"
] | 1 | 2022-03-22T14:07:05.000Z | 2022-03-22T14:07:08.000Z | hpe3d/utils/renderer.py | dorianhenning/hpe3d | 14ea2b0f76145793e2b15ce6fd31736c4be26b5b | [
"MIT"
] | 4 | 2020-05-27T08:21:21.000Z | 2021-08-08T19:17:13.000Z | import torch
from torchvision.utils import make_grid
import numpy as np
import pyrender
import trimesh
import hpe3d.utils.constants as C
class Renderer:
'''
Renderer used for visualizing the SMPL model
Code adapted from https://github.com/vchoutas/smplify-x
'''
def __init__(
self,
focal_length=C.FOCAL_LENGTH,
img_res=C.IMG_SIZE,
camera_center=C.CAMERA_CENTER,
faces=None
):
self.renderer = pyrender.OffscreenRenderer(viewport_width=img_res[0],
viewport_height=img_res[1],
point_size=1.0)
self.focal_length = focal_length
self.camera_center = camera_center
self.faces = faces
def visualize_tb(self, vertices, camera_translation, images):
vertices = vertices.cpu().numpy()
camera_translation = camera_translation.cpu().numpy()
images = images.cpu()
images_np = np.transpose(images.numpy(), (0, 2, 3, 1))
rend_imgs = []
for i in range(vertices.shape[0]):
rend_img = torch.from_numpy(
np.transpose(
self.__call__(vertices[i],
camera_translation[i],
images_np[i]),
(2, 0, 1))).float()
rend_imgs.append(images[i])
rend_imgs.append(rend_img)
rend_imgs = make_grid(rend_imgs, nrow=2)
return rend_imgs
def __call__(self, vertices, camera_pose, image, color=(0.8, 0.3, 0.3, 1.0)):
material = pyrender.MetallicRoughnessMaterial(
metallicFactor=0.2,
alphaMode='OPAQUE',
baseColorFactor=color)
mesh = trimesh.Trimesh(vertices, self.faces)
# Rotate mesh 180 deg around x (pyrender coordinate frame)
rot = trimesh.transformations.rotation_matrix(
np.radians(180), [1, 0, 0])
mesh.apply_transform(rot)
mesh = pyrender.Mesh.from_trimesh(mesh, material=material)
# Rotate trafo 180 deg around x (pyrender coordinate frame)
Rx = np.array([[1, 0, 0, 0],
[0, -1, 0, 0],
[0, 0, -1, 0],
[0, 0, 0, 1]], dtype=float)
camera_pose = np.dot(camera_pose, Rx)
scene = pyrender.Scene(ambient_light=(0.5, 0.5, 0.5))
scene.add(mesh, 'mesh')
camera = pyrender.IntrinsicsCamera(fx=self.focal_length[0], fy=self.focal_length[1],
cx=self.camera_center[0], cy=self.camera_center[1])
scene.add(camera, pose=camera_pose)
light = pyrender.DirectionalLight(color=[1.0, 1.0, 1.0], intensity=1)
light_pose = np.eye(4)
light_pose[:3, 3] = np.array([0, -1, 1])
scene.add(light, pose=light_pose)
light_pose[:3, 3] = np.array([0, 1, 1])
scene.add(light, pose=light_pose)
light_pose[:3, 3] = np.array([1, 1, 2])
scene.add(light, pose=light_pose)
color, rend_depth = self.renderer.render(scene, flags=pyrender.RenderFlags.RGBA)
valid_mask = (rend_depth > 0)[:, :, None]
output_img = (color[:, :, :3] * valid_mask +
(1 - valid_mask) * image).astype(np.uint8)
return output_img
| 36.554348 | 94 | 0.559917 |
1e6fc48510a4b6dc67c1fb82dceb56acc72b4060 | 6,482 | py | Python | tests/loader/test_olsson_loader.py | swershrimpy/gtsfm | 8d301eb3ef9172345a1ac1369fd4e19764d28946 | [
"Apache-2.0"
] | 122 | 2021-02-07T23:01:58.000Z | 2022-03-30T13:10:35.000Z | tests/loader/test_olsson_loader.py | swershrimpy/gtsfm | 8d301eb3ef9172345a1ac1369fd4e19764d28946 | [
"Apache-2.0"
] | 273 | 2021-01-30T16:45:26.000Z | 2022-03-16T15:02:33.000Z | tests/loader/test_olsson_loader.py | swershrimpy/gtsfm | 8d301eb3ef9172345a1ac1369fd4e19764d28946 | [
"Apache-2.0"
] | 13 | 2021-03-12T03:01:27.000Z | 2022-03-11T03:16:54.000Z | """Unit tests for the Olsson Loader class.
Authors: John Lambert
"""
import unittest
from pathlib import Path
from unittest.mock import MagicMock, patch
import dask
import numpy as np
import pytest
from gtsam import Cal3Bundler, Rot3, Pose3
import gtsfm.utils.io as io_utils
from gtsfm.loader.olsson_loader import OlssonLoader
DATA_ROOT_PATH = Path(__file__).resolve().parent.parent / "data"
DEFAULT_FOLDER = DATA_ROOT_PATH / "set1_lund_door"
EXIF_FOLDER = DATA_ROOT_PATH / "set2_lund_door_nointrinsics"
NO_EXTRINSICS_FOLDER = DATA_ROOT_PATH / "set3_lund_doornointrinsics_noextrinsics"
NO_EXIF_FOLDER = DATA_ROOT_PATH / "set4_lund_door_nointrinsics_noextrinsics_noexif"
class TestFolderLoader(unittest.TestCase):
"""Unit tests for folder loader, which loads image from a folder on disk."""
def setUp(self) -> None:
"""Set up the loader for the test."""
super().setUp()
self.loader = OlssonLoader(str(DEFAULT_FOLDER), image_extension="JPG", max_frame_lookahead=4)
def test_len(self) -> None:
"""Test the number of entries in the loader."""
self.assertEqual(12, len(self.loader))
def test_get_image_valid_index(self) -> None:
"""Tests that get_image works for all valid indices."""
for idx in range(len(self.loader)):
self.assertIsNotNone(self.loader.get_image(idx))
def test_get_image_invalid_index(self) -> None:
"""Test that get_image raises an exception on an invalid index."""
# negative index
with self.assertRaises(IndexError):
self.loader.get_image(-1)
# len() as index
with self.assertRaises(IndexError):
self.loader.get_image(12)
# index > len()
with self.assertRaises(IndexError):
self.loader.get_image(15)
def test_image_contents(self) -> None:
"""Test the actual image which is being fetched by the loader at an index.
This test's primary purpose is to check if the ordering of filename is being respected by the loader
"""
index_to_test = 5
file_path = DEFAULT_FOLDER / "images" / "DSC_0006.JPG"
loader_image = self.loader.get_image_full_res(index_to_test)
expected_image = io_utils.load_image(file_path)
np.testing.assert_allclose(expected_image.value_array, loader_image.value_array)
def test_get_camera_pose_exists(self) -> None:
"""Tests that the correct pose is fetched (present on disk)."""
fetched_pose = self.loader.get_camera_pose(1)
wRi_expected = np.array(
[
[0.998079, 0.015881, 0.0598844],
[-0.0161175, 0.999864, 0.00346851],
[-0.0598212, -0.00442703, 0.998199],
]
)
wti_expected = np.array([-0.826311, -0.00409053, 0.111315])
expected_pose = Pose3(Rot3(wRi_expected), wti_expected)
self.assertTrue(expected_pose.equals(fetched_pose, 1e-2))
def test_get_camera_pose_missing(self):
"""Tests that the camera pose is None, because it is missing on disk."""
loader = OlssonLoader(str(NO_EXTRINSICS_FOLDER), image_extension="JPG")
fetched_pose = loader.get_camera_pose(5)
self.assertIsNone(fetched_pose)
def test_get_camera_intrinsics_explicit(self) -> None:
"""Tests getter for intrinsics when explicit data.mat file with intrinsics are present on disk."""
expected_fx = 2398.119
expected_fy = 2393.952
expected_fx = min(expected_fx, expected_fy)
expected_px = 628.265
expected_py = 932.382
computed = self.loader.get_camera_intrinsics_full_res(5)
expected = Cal3Bundler(fx=expected_fx, k1=0, k2=0, u0=expected_px, v0=expected_py)
self.assertTrue(expected.equals(computed, 1e-3))
def test_get_camera_intrinsics_exif(self) -> None:
"""Tests getter for intrinsics when explicit numpy arrays are absent and we fall back on exif."""
loader = OlssonLoader(EXIF_FOLDER, image_extension="JPG", use_gt_intrinsics=False)
computed = loader.get_camera_intrinsics_full_res(5)
expected = Cal3Bundler(fx=2378.983, k1=0, k2=0, u0=648.0, v0=968.0)
self.assertTrue(expected.equals(computed, 1e-3))
def test_get_camera_intrinsics_missing(self) -> None:
"""Tests getter for intrinsics when explicit numpy arrays are absent, exif is missing, and we raise an error."""
loader = OlssonLoader(NO_EXIF_FOLDER, image_extension="JPG")
with pytest.raises(ValueError):
computed = loader.get_camera_intrinsics(5)
def test_create_computation_graph_for_images(self) -> None:
"""Tests the graph for loading all the images."""
image_graph = self.loader.create_computation_graph_for_images()
# check the length of the graph
self.assertEqual(12, len(image_graph))
results = dask.compute(image_graph)[0]
# randomly check image loads from a few indices
np.testing.assert_allclose(results[5].value_array, self.loader.get_image(5).value_array)
np.testing.assert_allclose(results[7].value_array, self.loader.get_image(7).value_array)
def test_create_computation_graph_for_intrinsics(self) -> None:
"""Tests the graph for all intrinsics."""
intrinsics_graph = self.loader.create_computation_graph_for_intrinsics()
# check the length of the graph
self.assertEqual(12, len(intrinsics_graph))
results = dask.compute(intrinsics_graph)[0]
# randomly check intrinsics from a few indices
self.assertTrue(self.loader.get_camera_intrinsics(5).equals(results[5], 1e-5))
self.assertTrue(self.loader.get_camera_intrinsics(7).equals(results[7], 1e-5))
@patch("gtsfm.loader.loader_base.LoaderBase.is_valid_pair", return_value=True)
def test_is_valid_pair_within_lookahead(self, base_is_valid_pair_mock: MagicMock) -> None:
i1 = 1
i2 = 3
self.assertTrue(self.loader.is_valid_pair(i1, i2))
base_is_valid_pair_mock.assert_called_once_with(i1, i2)
@patch("gtsfm.loader.loader_base.LoaderBase.is_valid_pair", return_value=True)
def test_is_valid_pair_outside_lookahead(self, base_is_valid_pair_mock: MagicMock) -> None:
i1 = 1
i2 = 10
self.assertFalse(self.loader.is_valid_pair(i1, i2))
base_is_valid_pair_mock.assert_called_once_with(i1, i2)
if __name__ == "__main__":
unittest.main()
| 40.26087 | 120 | 0.693613 |
2be4f8301abfbbfe85af6245730d54ba3ffe9d45 | 5,367 | py | Python | pytorchvideo/models/hub/x3d.py | kevinmtian/pytorchvideo | 168e16859a6029ef8ebeb476f9163bebb6c6b87d | [
"Apache-2.0"
] | 2,391 | 2021-04-13T18:10:18.000Z | 2022-03-31T15:07:09.000Z | pytorchvideo/models/hub/x3d.py | kevinmtian/pytorchvideo | 168e16859a6029ef8ebeb476f9163bebb6c6b87d | [
"Apache-2.0"
] | 156 | 2021-04-13T18:51:49.000Z | 2022-03-31T08:05:50.000Z | pytorchvideo/models/hub/x3d.py | kevinmtian/pytorchvideo | 168e16859a6029ef8ebeb476f9163bebb6c6b87d | [
"Apache-2.0"
] | 231 | 2021-04-14T05:04:55.000Z | 2022-03-22T09:35:46.000Z | # Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved.
from typing import Any, Optional
import torch.nn as nn
from pytorchvideo.models.x3d import create_x3d
from torch.hub import load_state_dict_from_url
root_dir = "https://dl.fbaipublicfiles.com/pytorchvideo/model_zoo/kinetics"
checkpoint_paths = {
"x3d_xs": f"{root_dir}/X3D_XS.pyth",
"x3d_s": f"{root_dir}/X3D_S.pyth",
"x3d_m": f"{root_dir}/X3D_M.pyth",
"x3d_l": f"{root_dir}/X3D_L.pyth",
}
def _x3d(
pretrained: bool = False,
progress: bool = True,
checkpoint_path: Optional[str] = None,
**kwargs: Any,
) -> nn.Module:
model = create_x3d(**kwargs)
if pretrained and checkpoint_path is not None:
# All models are loaded onto CPU by default
checkpoint = load_state_dict_from_url(
checkpoint_path, progress=progress, map_location="cpu"
)
state_dict = checkpoint["model_state"]
model.load_state_dict(state_dict)
return model
def x3d_xs(
pretrained: bool = False,
progress: bool = True,
**kwargs,
):
r"""
X3D-XS model architecture [1] trained on the Kinetics dataset.
Model with pretrained weights has top1 accuracy of 69.12.
[1] Christoph Feichtenhofer, "X3D: Expanding Architectures for
Efficient Video Recognition." https://arxiv.org/abs/2004.04730
Args:
pretrained (bool): If True, returns a model pre-trained on the Kinetics dataset
progress (bool): If True, displays a progress bar of the download to stderr
kwargs: use these to modify any of the other model settings. All the
options are defined in pytorchvideo/models/x3d.py
NOTE: to use the pretrained model, do not modify the model configuration
via the kwargs. Only modify settings via kwargs to initialize a new model
without pretrained weights.
"""
return _x3d(
pretrained=pretrained,
progress=progress,
checkpoint_path=checkpoint_paths["x3d_xs"],
input_clip_length=4,
input_crop_size=160,
**kwargs,
)
def x3d_s(
pretrained: bool = False,
progress: bool = True,
**kwargs,
):
"""
X3D-XS model architecture [1] trained on the Kinetics dataset.
Model with pretrained weights has top1 accuracy of 73.33.
[1] Christoph Feichtenhofer, "X3D: Expanding Architectures for
Efficient Video Recognition." https://arxiv.org/abs/2004.04730
Args:
pretrained (bool): If True, returns a model pre-trained on the Kinetics dataset
progress (bool): If True, displays a progress bar of the download to stderr
kwargs: use these to modify any of the other model settings. All the
options are defined in pytorchvideo/models/x3d.py
NOTE: to use the pretrained model, do not modify the model configuration
via the kwargs. Only modify settings via kwargs to initialize a new model
without pretrained weights.
"""
return _x3d(
pretrained=pretrained,
progress=progress,
checkpoint_path=checkpoint_paths["x3d_s"],
input_clip_length=13,
input_crop_size=160,
**kwargs,
)
def x3d_m(
pretrained: bool = False,
progress: bool = True,
**kwargs,
):
"""
X3D-XS model architecture [1] trained on the Kinetics dataset.
Model with pretrained weights has top1 accuracy of 75.94.
[1] Christoph Feichtenhofer, "X3D: Expanding Architectures for
Efficient Video Recognition." https://arxiv.org/abs/2004.04730
Args:
pretrained (bool): If True, returns a model pre-trained on the Kinetics dataset
progress (bool): If True, displays a progress bar of the download to stderr
kwargs: use these to modify any of the other model settings. All the
options are defined in pytorchvideo/models/x3d.py
NOTE: to use the pretrained model, do not modify the model configuration
via the kwargs. Only modify settings via kwargs to initialize a new model
without pretrained weights.
"""
return _x3d(
pretrained=pretrained,
progress=progress,
checkpoint_path=checkpoint_paths["x3d_m"],
input_clip_length=16,
input_crop_size=224,
**kwargs,
)
def x3d_l(
pretrained: bool = False,
progress: bool = True,
**kwargs,
):
"""
X3D-XS model architecture [1] trained on the Kinetics dataset.
Model with pretrained weights has top1 accuracy of 77.44.
[1] Christoph Feichtenhofer, "X3D: Expanding Architectures for
Efficient Video Recognition." https://arxiv.org/abs/2004.04730
Args:
pretrained (bool): If True, returns a model pre-trained on the Kinetics dataset
progress (bool): If True, displays a progress bar of the download to stderr
kwargs: use these to modify any of the other model settings. All the
options are defined in pytorchvideo/models/x3d.py
NOTE: to use the pretrained model, do not modify the model configuration
via the kwargs. Only modify settings via kwargs to initialize a new model
without pretrained weights.
"""
return _x3d(
pretrained=pretrained,
progress=progress,
checkpoint_path=checkpoint_paths["x3d_l"],
input_clip_length=16,
input_crop_size=312,
depth_factor=5.0,
**kwargs,
)
| 32.92638 | 87 | 0.680455 |
2b121abb100f78a39359b341bc923a75ce77edaa | 2,978 | py | Python | data-science-not/weeks/m01_introduction/phone_book.py | nurseiit/comm-unist | e7a122c910bf12eddf5c0ffc2c666995b4989408 | [
"MIT"
] | 4 | 2019-07-03T00:57:01.000Z | 2020-12-11T23:06:11.000Z | data-science-not/weeks/m01_introduction/phone_book.py | nurseiit/comm-unist | e7a122c910bf12eddf5c0ffc2c666995b4989408 | [
"MIT"
] | 1 | 2019-10-19T17:42:42.000Z | 2019-10-19T17:42:42.000Z | data-science-not/weeks/m01_introduction/phone_book.py | nurseiit/comm-unist | e7a122c910bf12eddf5c0ffc2c666995b4989408 | [
"MIT"
] | 1 | 2019-11-05T04:14:08.000Z | 2019-11-05T04:14:08.000Z | """
Your objective is to implement a simple Phone Book.
Phone numbers are stored in a dictionary, e.g. { "Adam" : "010-0000-1111", "Alice" : "010-0011-2233"}
"""
def add_contact(phone_book, name, number):
"""
This function allows to store a new contact in the phone book
:param phone_book: the phone book (dictionary)
:param name: the name of the contact (a string)
:param number: the cell number of the contact (a string)
"""
phone_book[name] = number
def search_contact(phone_book, name):
"""
This functions allows to search for a contact. It should print a meaningful message, e.g.:
"Contact "Alice" found: 010-1111-2222" OR
"Contact Alice not found!"
This function should also return the boolean value True if the contact is found, False otherwise
:param phone_book: the phone book (dictionary)
:param name: the name of the contact to search
"""
if name in phone_book:
print('Contact "%s" found: %s' % (name, phone_book[name]))
else:
print('Contact "%s" not found!' % name)
def delete(phone_book, name):
"""
This function deletes a contact from the phone book (note: you should manage also the case in which the
contact to delete is not in the phone book!)
:param phone_book: the phone book (dictionary)
:param name: he name of the contact to search
"""
if name in phone_book:
del phone_book[name]
def count_contacts(phone_book):
"""
This function counts the number of contacts in the phone book and prints a message, e.g.:
"The number of contacts is: 25"
:param phone_book: the phone book (dictionary)
"""
print('The number of contacts is: %d' % len(phone_book))
def print_phone_book(phone_book):
"""
This function prints on the console the content of the entire phone book
:param phone_book: the phone book (dictionary)
"""
for key, val in phone_book.items():
print('%s: %s' % (key, val))
if __name__ == '__main__':
# #uncomment the code below to test your implementation
#
# # phone book initialised:
phone_book = {"John" : "010-6787-990011", "Jin" : "010-4455-7788", "Bob" : "010-8872-0011"}
#
print_phone_book(phone_book) # print the phone book content
add_contact(phone_book, "Alice", "010-7865-8899") # add one entry
search_contact(phone_book, "Jiyoung") # search for Jyoung's number
search_contact(phone_book, "Jin") # search for Jin's number
count_contacts(phone_book) # should output 4
delete(phone_book, "Bob") # delete Bob from the phone book
delete(phone_book, "Alice")
add_contact(phone_book, "Marco", "010-9988-6677")
count_contacts(phone_book) # should output 3
print_phone_book(phone_book)
#
# #end uncomment """
| 39.184211 | 107 | 0.629617 |
c3a129ba5941f083eb6e36a06f255b2dab07c6af | 1,172 | py | Python | deploy-agent/deployd/common/caller.py | jsoref/pinterest-teletraan | e1093a8fc232c66b01595f46ed5f2f2ff42d6dc6 | [
"Apache-2.0"
] | 1 | 2020-07-31T10:51:29.000Z | 2020-07-31T10:51:29.000Z | deploy-agent/deployd/common/caller.py | jsoref/pinterest-teletraan | e1093a8fc232c66b01595f46ed5f2f2ff42d6dc6 | [
"Apache-2.0"
] | null | null | null | deploy-agent/deployd/common/caller.py | jsoref/pinterest-teletraan | e1093a8fc232c66b01595f46ed5f2f2ff42d6dc6 | [
"Apache-2.0"
] | null | null | null | # Copyright 2016 Pinterest, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import subprocess
import traceback
import logging
log = logging.getLogger(__name__)
class Caller(object):
def __init__(self):
pass
@staticmethod
def call_and_log(cmd, **kwargs):
try:
process = subprocess.Popen(cmd, stdout=subprocess.PIPE,
stderr=subprocess.PIPE, **kwargs)
output, error = process.communicate()
return output.strip(), error.strip(), process.poll()
except Exception as e:
log.error(traceback.format_exc())
return None, e.message, 1
| 32.555556 | 74 | 0.673208 |
47c2e7bc9728f450efef02b4fcf7a9a98b1e801a | 3,234 | py | Python | pymatgen/vis/structure_chemview.py | cajfisher/pymatgen | 286c304e38102d567723a71f733e0c304b72035d | [
"MIT"
] | 6 | 2015-02-06T08:27:09.000Z | 2021-02-28T14:42:52.000Z | pymatgen/vis/structure_chemview.py | cajfisher/pymatgen | 286c304e38102d567723a71f733e0c304b72035d | [
"MIT"
] | null | null | null | pymatgen/vis/structure_chemview.py | cajfisher/pymatgen | 286c304e38102d567723a71f733e0c304b72035d | [
"MIT"
] | 3 | 2015-10-21T08:04:40.000Z | 2019-03-19T23:11:15.000Z | """
Visualization for structures using chemview.
"""
import numpy as np
from pymatgen.symmetry.analyzer import SpacegroupAnalyzer
from pymatgen.analysis.molecule_structure_comparator import CovalentRadius
from monty.dev import requires
try:
from chemview import MolecularViewer
from chemview.utils import get_atom_color
chemview_loaded = True
except ImportError:
chemview_loaded = False
@requires(chemview_loaded, "To use quick_view, you need to have chemview installed.")
def quick_view(structure, bonds=True, conventional=False, transform=None, show_box=True, bond_tol=0.2,
stick_radius=0.1):
"""
A function to visualize pymatgen Structure objects in jupyter notebook using chemview package.
Args:
structure: pymatgen Structure
bonds: (bool) visualize bonds. Bonds are found by comparing distances
to added covalent radii of pairs. Defaults to True.
conventional: (bool) use conventional cell. Defaults to False.
transform: (list) can be used to make supercells with pymatgen.Structure.make_supercell method
show_box: (bool) unit cell is shown. Defaults to True.
bond_tol: (float) used if bonds=True. Sets the extra distance tolerance when finding bonds.
stick_radius: (float) radius of bonds.
Returns:
A chemview.MolecularViewer object
"""
s = structure.copy()
if conventional:
s = SpacegroupAnalyzer(s).get_conventional_standard_structure()
if transform:
s.make_supercell(transform)
atom_types = [i.symbol for i in s.species]
if bonds:
bonds = []
for i in range(s.num_sites - 1):
sym_i = s[i].specie.symbol
for j in range(i + 1, s.num_sites):
sym_j = s[j].specie.symbol
max_d = CovalentRadius.radius[sym_i] + CovalentRadius.radius[sym_j] + bond_tol
if s.get_distance(i, j, np.array([0, 0, 0])) < max_d:
bonds.append((i, j))
bonds = bonds if bonds else None
mv = MolecularViewer(s.cart_coords, topology={'atom_types': atom_types, 'bonds': bonds})
if bonds:
mv.ball_and_sticks(stick_radius=stick_radius)
for i in s.sites:
el = i.specie.symbol
coord = i.coords
r = CovalentRadius.radius[el]
mv.add_representation('spheres', {'coordinates': coord.astype('float32'),
'colors': [get_atom_color(el)],
'radii': [r * 0.5],
'opacity': 1.0})
if show_box:
o = np.array([0, 0, 0])
a, b, c = s.lattice.matrix[0], s.lattice.matrix[1], s.lattice.matrix[2]
starts = [o, o, o, a, a, b, b, c, c, a + b, a + c, b + c]
ends = [a, b, c, a + b, a + c, b + a, b + c, c + a, c + b, a + b + c, a + b + c, a + b + c]
colors = [0xffffff for i in range(12)]
mv.add_representation('lines', {'startCoords': np.array(starts),
'endCoords': np.array(ends),
'startColors': colors,
'endColors': colors})
return mv
| 40.425 | 102 | 0.592455 |
57812817af07b5843a52812826630ce432c76344 | 3,257 | py | Python | jumboSmash/users/views.py | wolfep15/arith | 14e628210e54d8f171dff796e7c71d3ea1574067 | [
"MIT"
] | 1 | 2020-08-04T01:10:56.000Z | 2020-08-04T01:10:56.000Z | jumboSmash/users/views.py | wolfep15/arith | 14e628210e54d8f171dff796e7c71d3ea1574067 | [
"MIT"
] | null | null | null | jumboSmash/users/views.py | wolfep15/arith | 14e628210e54d8f171dff796e7c71d3ea1574067 | [
"MIT"
] | null | null | null | from rest_framework import status
from rest_framework.views import APIView
from rest_framework.response import Response
from rest_framework.permissions import AllowAny
from users.models import User, Profile
from users.serializers import (
UserIdSerializer,
SimpleUserSerializer,
ProfileSerializer,
SetUpSerializer,
)
from django.core.exceptions import ObjectDoesNotExist, MultipleObjectsReturned
from django.shortcuts import render
import logging
class ListUsers(APIView):
"""
View to get list of all users
"""
def get(self, request):
queryset = User.objects.all()
serializer = SimpleUserSerializer(queryset, many=True)
return Response(serializer.data)
class GetProfile(APIView):
"""
Get a profile for given user
"""
# TODO don't show if banned <- or should this be blocked elsewhere
def get(self, request):
user_serializer = UserIdSerializer(data=request.query_params)
user_serializer.is_valid(raise_exception=True)
requested_user = user_serializer.validated_data["user"]
approved, pending = Profile.objects.get_profiles(requested_user)
profile = pending or approved if requested_user == request.user else approved
if profile:
serializer = ProfileSerializer(profile)
return Response(serializer.data)
else:
return Response(
"User does not have an approved profile",
status=status.HTTP_404_NOT_FOUND,
)
class EditProfile(APIView):
"""
Edit profile for currently logged in user
"""
def get(self, request):
return Response({"d": Profile.objects.get_upload_urls(request.user.id)})
def post(self, request):
user_id = request.user.id
serializer = ProfileSerializer(
data=request.data, context={"user": request.user}
)
serializer.is_valid(raise_exception=True)
profile = serializer.save()
return Response(status=status.HTTP_204_NO_CONTENT)
class SetupUser(APIView):
"""
Setup the profile for a user
"""
def get(self, request):
return Response(request.user.id_upload_url())
def post(self, request):
user_id = request.user.id
serializer = SetUpSerializer(data=request.data)
serializer.is_valid(raise_exception=True)
if request.user.status == User.INACTIVE:
request.user.preferred_name = serializer["name"].value
request.user.save()
return Response(status=status.HTTP_204_NO_CONTENT)
else:
return Response("User is already setup", status=status.HTTP_409_CONFLICT)
class CheckUserExists(APIView):
"""
Endpoint to confirm email is in our list of seniors.
"""
permissions = [AllowAny]
def get(self, request):
email = request.query_params.get("email")
if not email:
return render(request, "user_check.html", {})
try:
user = User.objects.get(email=email)
return render(
request, "successful_check.html", {"email": email, "user": user},
)
except:
return render(request, "failed_check.html", {"email": email})
| 30.157407 | 85 | 0.657353 |
254f9a7ca77fc0661d244ac0b85f2dd7b0dc260e | 2,281 | py | Python | app/process/exp_center.py | HansBug/dgdvapp | f3142d2c265afda427bbeee46c8073e1126eeef5 | [
"Apache-2.0"
] | null | null | null | app/process/exp_center.py | HansBug/dgdvapp | f3142d2c265afda427bbeee46c8073e1126eeef5 | [
"Apache-2.0"
] | null | null | null | app/process/exp_center.py | HansBug/dgdvapp | f3142d2c265afda427bbeee46c8073e1126eeef5 | [
"Apache-2.0"
] | null | null | null | import glob
import os
import pandas as pd
from app.proto import MpsProtoData
def find_expdata_in_directory(directory: str):
matchings = glob.glob1(directory, 'expdata_*')
if matchings:
return os.path.join(directory, matchings[0])
else:
raise FileNotFoundError(f'No expdata file found in {repr(directory)}.')
def exp_center_file_in_directory(directory: str) -> str:
return os.path.join(directory, 'exp_center.csv')
def exp_center_process_in_directory(directory: str, force: bool = False):
return exp_center_process(
find_expdata_in_directory(directory),
exp_center_file_in_directory(directory),
force=force,
)
def exp_center_trans(src_file: str) -> pd.DataFrame:
mpd = MpsProtoData()
with open(src_file, 'rb') as f:
con = f.read()
index = 0
data = {
'id': [],
'type': [],
'time': [],
'lng': [],
'lat': [],
'height': []
}
while index < len(con):
type_ = con[index]
cur = index + 1
lengths = []
while con[cur] >= 128:
lengths.append(con[cur] - 128)
cur += 1
if len(lengths) < 5:
lengths.append(con[cur])
lengths.reverse()
final_length = 0
for length in lengths:
final_length = final_length * 128 + length
content = con[cur + 1:cur + 1 + final_length]
if type_ == 1:
index = cur + 1 + final_length
continue
else:
mpd.ParseFromString(content)
if mpd.id == 20000:
data['id'].append(mpd.id)
data['type'].append(mpd.type)
data['time'].append(mpd.time)
data['lng'].append(mpd.lng)
data['lat'].append(mpd.lat)
data['height'].append(mpd.h)
index = cur + 1 + final_length
return pd.DataFrame(data)
def exp_center_process(src_file: str, dst_file: str, force: bool = False):
if not force and os.path.exists(dst_file):
return
df = exp_center_trans(src_file)
df.to_csv(dst_file)
| 28.873418 | 79 | 0.530907 |
5edbf1abbd9b01f17e7def7211c00eac72f2b6f9 | 435 | py | Python | 0311_intentional_error.py | arunkumarang/python | 1960e285dfe2ef54d2e3ab37584bfef8b24ecca9 | [
"Apache-2.0"
] | null | null | null | 0311_intentional_error.py | arunkumarang/python | 1960e285dfe2ef54d2e3ab37584bfef8b24ecca9 | [
"Apache-2.0"
] | null | null | null | 0311_intentional_error.py | arunkumarang/python | 1960e285dfe2ef54d2e3ab37584bfef8b24ecca9 | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/python
import sys
def main():
guest_lists = ['senthil', 'raj', 'ameen']
guest_lists.insert(0, 'naveen')
guest_lists.insert(2, 'prabhu')
guest_lists.append('ragu')
print('list item:', guest_lists[-1])
new_guests = []
new_guests.append(guest_lists[-2])
print('new list item:', new_guests[-1])
if __name__ == '__main__':
main()
sys.exit(0)
| 18.913043 | 46 | 0.558621 |
7ab33c44e643fb860b4f84327659af2debbe5ca3 | 302 | py | Python | awwardsApp/admin.py | derrickmacharia/Awwards | 3e07d6e78ba1628fc087e0f865f7930e39891d11 | [
"MIT"
] | null | null | null | awwardsApp/admin.py | derrickmacharia/Awwards | 3e07d6e78ba1628fc087e0f865f7930e39891d11 | [
"MIT"
] | null | null | null | awwardsApp/admin.py | derrickmacharia/Awwards | 3e07d6e78ba1628fc087e0f865f7930e39891d11 | [
"MIT"
] | null | null | null | from typing import Callable
from django.contrib import admin
from .models import Category, Location, Profile, Project, Rating
# Register your models here.
admin.site.register(Project)
admin.site.register(Profile)
admin.site.register(Category)
admin.site.register(Location)
admin.site.register(Rating)
| 27.454545 | 64 | 0.817881 |
0192dfd5eca3ad2dd22df3d187267dfb450c5ba5 | 1,545 | py | Python | lastVer/urls.py | aymanexpo/mini-GED | de23fb57c902bfcc39329fbe4eda647c8d7bf016 | [
"MIT"
] | 1 | 2020-09-14T00:54:52.000Z | 2020-09-14T00:54:52.000Z | lastVer/urls.py | aymanexpo/mini-GED | de23fb57c902bfcc39329fbe4eda647c8d7bf016 | [
"MIT"
] | null | null | null | lastVer/urls.py | aymanexpo/mini-GED | de23fb57c902bfcc39329fbe4eda647c8d7bf016 | [
"MIT"
] | null | null | null | """lastVer URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/3.0/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.contrib.sites import requests
from django.urls import path, include
from lastVer.core import views
from django.conf import settings
from django.conf.urls.static import static
urlpatterns = [
path('accounts/', include('django.contrib.auth.urls')),
path('', views.home, name='home'),
path('upload/', views.upload, name='upload'),
path('singup/', views.singup, name='singup'),
path('admin/', admin.site.urls),
path('swf/',views.showfile ,name='showfile'),
#path('upload/', views.upload.as_view(), name='upload2')
path('Docs/', views.Docs_list, name='Docs_list'),
path('Docs/upload/', views.upload_Docs, name='upload_Docs'),
path('Docs/<int:pk>', views.delete_Docs, name='delete_Docs'),
path('todo', views.todo, name='todo')
]
if settings.DEBUG:
urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
| 39.615385 | 80 | 0.70356 |
59ed4c65b2aaa08599c3a95e0798942319457d67 | 1,283 | py | Python | stockscraper.py | abdu997/StockScraper | 24f98a93e13e93cceb743ba7509e769021977c9e | [
"MIT"
] | null | null | null | stockscraper.py | abdu997/StockScraper | 24f98a93e13e93cceb743ba7509e769021977c9e | [
"MIT"
] | null | null | null | stockscraper.py | abdu997/StockScraper | 24f98a93e13e93cceb743ba7509e769021977c9e | [
"MIT"
] | null | null | null | import sys
import csv
import requests
import json
from datetime import datetime
from pandas.tseries.offsets import BDay
def to_integer(dt_time):
return str(10000 * dt_time.year + 100 * dt_time.month + dt_time.day)
def GetData(symbol, startdate, days):
# Get your token from https://iexcloud.io/cloud-login#/register/
token = ""
data = []
# Get testing days
dates = []
for x in range(int(days) + 1):
dates.append(to_integer(datetime.strptime(startdate, '%Y%m%d') + BDay(x)))
# Fetch backtest period data
for date in dates:
url = "https://cloud.iexapis.com/stable/stock/" + symbol + "/chart/date/" + date + "?token=" + token
for x in requests.get(url = url).json():
data.append(x)
return data
format = sys.argv[4]
data = GetData(sys.argv[1], sys.argv[2], sys.argv[3])
with open(sys.argv[5] + "." + format, "w") as file:
if format == "csv":
csv_file = csv.writer(file)
keys = []
for x in data[0]:
keys.append(x)
csv_file.writerow(keys)
for item in data:
y = []
for x in keys:
y.append(item[x])
csv_file.writerow(y)
elif format == "json":
json.dump(data, file)
print ("Done")
| 26.183673 | 108 | 0.582229 |
146673a4233746bd7870a270bd1bfc7607b96d2a | 2,300 | py | Python | src/default/saver.py | Nkzono99/emses_inp_generator | ca9ea33619f1425840a4ba14500705e7199f2356 | [
"MIT"
] | null | null | null | src/default/saver.py | Nkzono99/emses_inp_generator | ca9ea33619f1425840a4ba14500705e7199f2356 | [
"MIT"
] | null | null | null | src/default/saver.py | Nkzono99/emses_inp_generator | ca9ea33619f1425840a4ba14500705e7199f2356 | [
"MIT"
] | null | null | null | import math
from utils import UnitConversionKey, Units
class Saver:
def __init__(self):
self.savers = []
self.exceptors = []
def add_saver(self, saver, exceptor=None):
if exceptor is None:
def exceptor(inp, values, unit): return True
self.savers.append(saver)
self.exceptors.append(exceptor)
def save(self, filename, inp, values):
dx = float(values['dx'])
to_c = float(values['em_c'])
unit = Units(dx=dx, to_c=to_c)
convkey = UnitConversionKey(dx=dx, to_c=to_c)
for saver, exceptor in zip(self.savers, self.exceptors):
if exceptor(inp, values, unit):
saver(inp, values, unit)
inp.save(filename, convkey=convkey)
def create_default_saver(use_physical_dt=False):
saver = Saver()
saver.add_saver(save_esorem)
saver.add_saver(save_job_con)
saver.add_saver(save_plasma)
if use_physical_dt:
saver.add_saver(save_tmgrid_physical_dt)
else:
saver.add_saver(save_tmgrid_emses_dt)
saver.add_saver(save_system)
saver.add_saver(save_mpi)
return saver
def save_esorem(inp, values, unit):
inp['esorem']['emflag'] = 1 if values['use_em'] else 0
def save_job_con(inp, values, unit):
jobnum = list(map(int, values['jobnum'].split(' ')))
inp.setlist('jobcon', 'jobnum', jobnum)
inp['jobcon']['nstep'] = int(values['nstep'])
def save_plasma(inp, values, unit):
inp['plasma']['cv'] = float(values['em_c'])
def save_tmgrid_emses_dt(inp, values, unit):
inp['tmgrid']['dt'] = float(values['dt'])
inp['tmgrid']['nx'] = int(values['nx'])
inp['tmgrid']['ny'] = int(values['ny'])
inp['tmgrid']['nz'] = int(values['nz'])
def save_tmgrid_physical_dt(inp, values, unit):
inp['tmgrid']['dt'] = unit.t.trans(float(values['dt']))
inp['tmgrid']['nx'] = int(values['nx'])
inp['tmgrid']['ny'] = int(values['ny'])
inp['tmgrid']['nz'] = int(values['nz'])
def save_system(inp, values, unit):
nspec = 3 if values['use_pe'] else 2
inp['system']['nspec'] = nspec
def save_mpi(inp, values, unit):
inp.setlist('mpi', 'nodes', [int(values['nodesx']),
int(values['nodesy']),
int(values['nodesz'])])
| 27.380952 | 64 | 0.605652 |
7a4469e08074f68f81fa00d50603bea0e1b3d9a6 | 12,495 | py | Python | docs/conf.py | esert-g/python-bigquery-storage | fd454e6c60f410b1bd4fbc37bda3bcbcb708538b | [
"Apache-2.0"
] | 44 | 2020-02-12T21:28:37.000Z | 2022-03-31T06:16:30.000Z | docs/conf.py | esert-g/python-bigquery-storage | fd454e6c60f410b1bd4fbc37bda3bcbcb708538b | [
"Apache-2.0"
] | 178 | 2020-02-05T10:49:45.000Z | 2022-03-31T01:48:44.000Z | docs/conf.py | esert-g/python-bigquery-storage | fd454e6c60f410b1bd4fbc37bda3bcbcb708538b | [
"Apache-2.0"
] | 23 | 2020-02-05T23:12:15.000Z | 2022-02-24T08:33:14.000Z | # -*- coding: utf-8 -*-
# Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# google-cloud-bigquery-storage documentation build configuration file
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys
import os
import shlex
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
sys.path.insert(0, os.path.abspath(".."))
# For plugins that can not read conf.py.
# See also: https://github.com/docascode/sphinx-docfx-yaml/issues/85
sys.path.insert(0, os.path.abspath("."))
__version__ = ""
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
needs_sphinx = "1.5.5"
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
"sphinx.ext.autodoc",
"sphinx.ext.autosummary",
"sphinx.ext.intersphinx",
"sphinx.ext.coverage",
"sphinx.ext.doctest",
"sphinx.ext.napoleon",
"sphinx.ext.todo",
"sphinx.ext.viewcode",
"recommonmark",
]
# autodoc/autosummary flags
autoclass_content = "both"
autodoc_default_options = {"members": True}
autosummary_generate = True
# Add any paths that contain templates here, relative to this directory.
templates_path = ["_templates"]
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
# source_suffix = ['.rst', '.md']
source_suffix = [".rst", ".md"]
# The encoding of source files.
# source_encoding = 'utf-8-sig'
# The root toctree document.
root_doc = "index"
# General information about the project.
project = "google-cloud-bigquery-storage"
copyright = "2019, Google"
author = "Google APIs"
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The full version, including alpha/beta/rc tags.
release = __version__
# The short X.Y version.
version = ".".join(release.split(".")[0:2])
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
# today = ''
# Else, today_fmt is used as the format for a strftime call.
# today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = [
"_build",
"**/.nox/**/*",
"samples/AUTHORING_GUIDE.md",
"samples/CONTRIBUTING.md",
"samples/snippets/README.rst",
]
# The reST default role (used for this markup: `text`) to use for all
# documents.
# default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
# add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
# add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
# show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = "sphinx"
# A list of ignored prefixes for module index sorting.
# modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
# keep_warnings = False
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = True
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = "alabaster"
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
html_theme_options = {
"description": "Google Cloud Client Libraries for google-cloud-bigquery-storage",
"github_user": "googleapis",
"github_repo": "python-bigquery-storage",
"github_banner": True,
"font_family": "'Roboto', Georgia, sans",
"head_font_family": "'Roboto', Georgia, serif",
"code_font_family": "'Roboto Mono', 'Consolas', monospace",
}
# Add any paths that contain custom themes here, relative to this directory.
# html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
# html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
# html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
# html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
# html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ["_static"]
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
# html_extra_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
# html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
# html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
# html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
# html_additional_pages = {}
# If false, no module index is generated.
# html_domain_indices = True
# If false, no index is generated.
# html_use_index = True
# If true, the index is split into individual pages for each letter.
# html_split_index = False
# If true, links to the reST sources are added to the pages.
# html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
# html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
# html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
# html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
# html_file_suffix = None
# Language to be used for generating the HTML full-text search index.
# Sphinx supports the following languages:
# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja'
# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr'
# html_search_language = 'en'
# A dictionary with options for the search language support, empty by default.
# Now only 'ja' uses this config value
# html_search_options = {'type': 'default'}
# The name of a javascript file (relative to the configuration directory) that
# implements a search results scorer. If empty, the default will be used.
# html_search_scorer = 'scorer.js'
# Output file base name for HTML help builder.
htmlhelp_basename = "google-cloud-bigquery-storage-doc"
# -- Options for warnings ------------------------------------------------------
suppress_warnings = [
# Temporarily suppress this to avoid "more than one target found for
# cross-reference" warning, which are intractable for us to avoid while in
# a mono-repo.
# See https://github.com/sphinx-doc/sphinx/blob
# /2a65ffeef5c107c19084fabdd706cdff3f52d93c/sphinx/domains/python.py#L843
"ref.python"
]
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
# Latex figure (float) alignment
#'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(
root_doc,
"google-cloud-bigquery-storage.tex",
"google-cloud-bigquery-storage Documentation",
author,
"manual",
)
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
# latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
# latex_use_parts = False
# If true, show page references after internal links.
# latex_show_pagerefs = False
# If true, show URL addresses after external links.
# latex_show_urls = False
# Documents to append as an appendix to all manuals.
# latex_appendices = []
# If false, no module index is generated.
# latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(
root_doc,
"google-cloud-bigquery-storage",
"google-cloud-bigquery-storage Documentation",
[author],
1,
)
]
# If true, show URL addresses after external links.
# man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(
root_doc,
"google-cloud-bigquery-storage",
"google-cloud-bigquery-storage Documentation",
author,
"google-cloud-bigquery-storage",
"google-cloud-bigquery-storage Library",
"APIs",
)
]
# Documents to append as an appendix to all manuals.
# texinfo_appendices = []
# If false, no module index is generated.
# texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
# texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
# texinfo_no_detailmenu = False
# Example configuration for intersphinx: refer to the Python standard library.
intersphinx_mapping = {
"python": ("https://python.readthedocs.org/en/latest/", None),
"google-auth": ("https://googleapis.dev/python/google-auth/latest/", None),
"google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None,),
"grpc": ("https://grpc.github.io/grpc/python/", None),
"proto-plus": ("https://proto-plus-python.readthedocs.io/en/latest/", None),
"protobuf": ("https://googleapis.dev/python/protobuf/latest/", None),
}
# Napoleon settings
napoleon_google_docstring = True
napoleon_numpy_docstring = True
napoleon_include_private_with_doc = False
napoleon_include_special_with_doc = True
napoleon_use_admonition_for_examples = False
napoleon_use_admonition_for_notes = False
napoleon_use_admonition_for_references = False
napoleon_use_ivar = False
napoleon_use_param = True
napoleon_use_rtype = True
| 32.709424 | 88 | 0.706202 |
def8904d2c7dd58dedbc787032a9a79323d3e018 | 3,449 | py | Python | MetamorphicTests/all_mutants/load_model_forecast_file/19.py | anuragbms/Sales-forecasting-with-RNNs | 22b4639ecbb48381af53326ace94a3538201b586 | [
"Apache-2.0"
] | null | null | null | MetamorphicTests/all_mutants/load_model_forecast_file/19.py | anuragbms/Sales-forecasting-with-RNNs | 22b4639ecbb48381af53326ace94a3538201b586 | [
"Apache-2.0"
] | null | null | null | MetamorphicTests/all_mutants/load_model_forecast_file/19.py | anuragbms/Sales-forecasting-with-RNNs | 22b4639ecbb48381af53326ace94a3538201b586 | [
"Apache-2.0"
] | 1 | 2022-02-06T14:59:43.000Z | 2022-02-06T14:59:43.000Z | def gen_mutant():
import tensorflow as tf
from tensorflow.python.saved_model import tag_constants
import pandas as pd
import numpy as np
DATAFILE_VALIDATE = '/home/ubuntu/anurag/rnn/data_for_MRs/mock_kaggle_edit_validate_normalise.csv'
TRAINED_MODEL_PATH = '/home/ubuntu/anurag/rnn/savedModel'
TIME_STEPS = 10
NUMBER_OF_DAYS_TO_FORECAST = 1
BATCH_SIZE = 100
MIN = 0
RANGE = 542
data_validate = pd.read_csv(DATAFILE_VALIDATE)
numValidationData = len(data_validate)
validationData_sales = data_validate['sales_add_309'][0:numValidationData]
print(len(validationData_sales))
validationData_sales_normalised = [(i - MIN) / RANGE for i in validationData_sales]
validationDataSequence_sales = np.zeros(shape=(((len(validationData_sales) - TIME_STEPS) - NUMBER_OF_DAYS_TO_FORECAST) + 1, TIME_STEPS, 1))
validationDataSequence_sales_target = np.zeros(shape=(((len(validationData_sales) - TIME_STEPS) - NUMBER_OF_DAYS_TO_FORECAST) + 1, NUMBER_OF_DAYS_TO_FORECAST))
start = 0
for i in range(TIME_STEPS, (len(validationData_sales) - NUMBER_OF_DAYS_TO_FORECAST) + 1):
validationDataSequence_sales[start,:,0] = validationData_sales_normalised[start:i]
validationDataSequence_sales_target[start] = validationData_sales_normalised[i:i + NUMBER_OF_DAYS_TO_FORECAST]
start += 1
validationDataSequence_sales_target.shape
with tf.Session() as sess:
print('Loading the model from:', TRAINED_MODEL_PATH)
tf.saved_model.loader.load(sess=sess, export_dir=TRAINED_MODEL_PATH, tags=[tag_constants.SERVING])
inputSequence = tf.get_default_graph().get_tensor_by_name('inputSequencePlaceholder:0')
targetForecast = tf.get_default_graph().get_tensor_by_name('targetPlaceholder:0')
loss = tf.get_default_graph().get_tensor_by_name('loss_comp:0')
forecast_originalScale = tf.get_default_graph().get_tensor_by_name('forecast_original_scale:0')
startLoc = 0
totalLoss = 0
for i in range(0, len(validationDataSequence_sales) // BATCH_SIZE):
sequence = validationDataSequence_sales[startLoc:startLoc + BATCH_SIZE,:,:]
target = validationDataSequence_sales_target[startLoc:startLoc + BATCH_SIZE]
(fcast, ls) = sess.run([forecast_originalScale, loss], feed_dict={inputSequence: sequence, targetForecast: target})
print('first five predictions (original scale):', fcast[0:5])
print('first five actuals (original scale) :', (target[0:5] * RANGE) - MIN)
totalLoss += ls
startLoc += BATCH_SIZE
if startLoc < len(validationDataSequence_sales):
sequence = validationDataSequence_sales[startLoc:]
target = validationDataSequence_sales_target[startLoc:]
(fcast, ls) = sess.run([forecast_originalScale, loss], feed_dict={inputSequence: sequence, targetForecast: target})
totalLoss += ls
print('Validation complete. Total loss:', totalLoss) | 27.592 | 163 | 0.635547 |
b40fe89dc0f0d599e3f7558af830ce29754fa920 | 6,498 | py | Python | whoville/cloudbreak/models/stack_descriptor.py | balazsgaspar/whoville | 0d26853bf5cfd3485067b0c23f886e2b4ab742f8 | [
"Apache-2.0"
] | 30 | 2017-06-12T13:05:24.000Z | 2021-08-03T09:00:48.000Z | whoville/cloudbreak/models/stack_descriptor.py | balazsgaspar/whoville | 0d26853bf5cfd3485067b0c23f886e2b4ab742f8 | [
"Apache-2.0"
] | 6 | 2017-12-27T23:12:45.000Z | 2019-03-07T22:14:24.000Z | whoville/cloudbreak/models/stack_descriptor.py | balazsgaspar/whoville | 0d26853bf5cfd3485067b0c23f886e2b4ab742f8 | [
"Apache-2.0"
] | 31 | 2017-06-12T13:05:28.000Z | 2019-09-20T01:50:29.000Z | # coding: utf-8
"""
Cloudbreak API
Cloudbreak is a powerful left surf that breaks over a coral reef, a mile off southwest the island of Tavarua, Fiji. Cloudbreak is a cloud agnostic Hadoop as a Service API. Abstracts the provisioning and ease management and monitoring of on-demand clusters. SequenceIQ's Cloudbreak is a RESTful application development platform with the goal of helping developers to build solutions for deploying Hadoop YARN clusters in different environments. Once it is deployed in your favourite servlet container it exposes a REST API allowing to span up Hadoop clusters of arbitary sizes and cloud providers. Provisioning Hadoop has never been easier. Cloudbreak is built on the foundation of cloud providers API (Amazon AWS, Microsoft Azure, Google Cloud Platform, Openstack), Apache Ambari, Docker lightweight containers, Swarm and Consul. For further product documentation follow the link: <a href=\"http://hortonworks.com/apache/cloudbreak/\">http://hortonworks.com/apache/cloudbreak/</a>
OpenAPI spec version: 2.9.0
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from pprint import pformat
from six import iteritems
import re
class StackDescriptor(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'version': 'str',
'min_ambari': 'str',
'repo': 'StackRepoDetailsJson',
'mpacks': 'dict(str, list[ManagementPackEntry])',
'ambari': 'AmbariInfoJson'
}
attribute_map = {
'version': 'version',
'min_ambari': 'minAmbari',
'repo': 'repo',
'mpacks': 'mpacks',
'ambari': 'ambari'
}
def __init__(self, version=None, min_ambari=None, repo=None, mpacks=None, ambari=None):
"""
StackDescriptor - a model defined in Swagger
"""
self._version = None
self._min_ambari = None
self._repo = None
self._mpacks = None
self._ambari = None
if version is not None:
self.version = version
if min_ambari is not None:
self.min_ambari = min_ambari
if repo is not None:
self.repo = repo
if mpacks is not None:
self.mpacks = mpacks
if ambari is not None:
self.ambari = ambari
@property
def version(self):
"""
Gets the version of this StackDescriptor.
:return: The version of this StackDescriptor.
:rtype: str
"""
return self._version
@version.setter
def version(self, version):
"""
Sets the version of this StackDescriptor.
:param version: The version of this StackDescriptor.
:type: str
"""
self._version = version
@property
def min_ambari(self):
"""
Gets the min_ambari of this StackDescriptor.
:return: The min_ambari of this StackDescriptor.
:rtype: str
"""
return self._min_ambari
@min_ambari.setter
def min_ambari(self, min_ambari):
"""
Sets the min_ambari of this StackDescriptor.
:param min_ambari: The min_ambari of this StackDescriptor.
:type: str
"""
self._min_ambari = min_ambari
@property
def repo(self):
"""
Gets the repo of this StackDescriptor.
:return: The repo of this StackDescriptor.
:rtype: StackRepoDetailsJson
"""
return self._repo
@repo.setter
def repo(self, repo):
"""
Sets the repo of this StackDescriptor.
:param repo: The repo of this StackDescriptor.
:type: StackRepoDetailsJson
"""
self._repo = repo
@property
def mpacks(self):
"""
Gets the mpacks of this StackDescriptor.
:return: The mpacks of this StackDescriptor.
:rtype: dict(str, list[ManagementPackEntry])
"""
return self._mpacks
@mpacks.setter
def mpacks(self, mpacks):
"""
Sets the mpacks of this StackDescriptor.
:param mpacks: The mpacks of this StackDescriptor.
:type: dict(str, list[ManagementPackEntry])
"""
self._mpacks = mpacks
@property
def ambari(self):
"""
Gets the ambari of this StackDescriptor.
:return: The ambari of this StackDescriptor.
:rtype: AmbariInfoJson
"""
return self._ambari
@ambari.setter
def ambari(self, ambari):
"""
Sets the ambari of this StackDescriptor.
:param ambari: The ambari of this StackDescriptor.
:type: AmbariInfoJson
"""
self._ambari = ambari
def to_dict(self):
"""
Returns the model properties as a dict
"""
result = {}
for attr, _ in iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""
Returns the string representation of the model
"""
return pformat(self.to_dict())
def __repr__(self):
"""
For `print` and `pprint`
"""
return self.to_str()
def __eq__(self, other):
"""
Returns true if both objects are equal
"""
if not isinstance(other, StackDescriptor):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""
Returns true if both objects are not equal
"""
return not self == other
| 28.5 | 984 | 0.58895 |
5739d3d841171247f1e2d0e65a66600d1bc25d60 | 28,966 | py | Python | bingads/v13/bulk/entities/bulk_ads.py | wbrp/BingAds-Python-SDK | 8262f24f3ef3111ccf3b35b8fd0ef31030fd6621 | [
"MIT"
] | null | null | null | bingads/v13/bulk/entities/bulk_ads.py | wbrp/BingAds-Python-SDK | 8262f24f3ef3111ccf3b35b8fd0ef31030fd6621 | [
"MIT"
] | null | null | null | bingads/v13/bulk/entities/bulk_ads.py | wbrp/BingAds-Python-SDK | 8262f24f3ef3111ccf3b35b8fd0ef31030fd6621 | [
"MIT"
] | null | null | null | from bingads.service_client import _CAMPAIGN_OBJECT_FACTORY_V13
from bingads.v13.internal.bulk.string_table import _StringTable
from bingads.v13.internal.bulk.entities.single_record_bulk_entity import _SingleRecordBulkEntity
from bingads.v13.internal.bulk.mappings import _SimpleBulkMapping
from bingads.v13.internal.extensions import *
# Define type used
ProductAd = type(_CAMPAIGN_OBJECT_FACTORY_V13.create('ProductAd'))
TextAd = type(_CAMPAIGN_OBJECT_FACTORY_V13.create('TextAd'))
AppInstallAd = type(_CAMPAIGN_OBJECT_FACTORY_V13.create('AppInstallAd'))
ExpandedTextAd = type(_CAMPAIGN_OBJECT_FACTORY_V13.create('ExpandedTextAd'))
DynamicSearchAd = type(_CAMPAIGN_OBJECT_FACTORY_V13.create('DynamicSearchAd'))
ResponsiveSearchAd = type(_CAMPAIGN_OBJECT_FACTORY_V13.create('ResponsiveSearchAd'))
ResponsiveAd = type(_CAMPAIGN_OBJECT_FACTORY_V13.create('ResponsiveAd'))
class _BulkAd(_SingleRecordBulkEntity):
""" This abstract base class provides properties that are shared by all bulk ad classes.
*See also:*
* :class:`.BulkProductAd`
* :class:`.BulkTextAd`
* :class:`.BulkAppInstallAd`
* :class:`.BulkExpandedTextAd`
* :class:`.BulkDynamicSearchAd`
* :class:`.BulkResponsiveAd`
* :class:`.BulkResponsiveSearchAd`
"""
def __init__(self,
ad_group_id=None,
campaign_name=None,
ad_group_name=None,
ad=None):
super(_BulkAd, self).__init__()
self._ad_group_id = ad_group_id
self._campaign_name = campaign_name
self._ad_group_name = ad_group_name
self._ad = ad
self._performance_data = None
@property
def ad_group_id(self):
""" The identifier of the ad group that contains the ad.
Corresponds to the 'Parent Id' field in the bulk file.
:rtype: int
"""
return self._ad_group_id
@ad_group_id.setter
def ad_group_id(self, ad_group_id):
self._ad_group_id = ad_group_id
@property
def campaign_name(self):
""" The name of the campaign that contains the ad.
Corresponds to the 'Campaign' field in the bulk file.
:rtype: str
"""
return self._campaign_name
@campaign_name.setter
def campaign_name(self, campaign_name):
self._campaign_name = campaign_name
@property
def ad_group_name(self):
""" The name of the ad group that contains the ad.
Corresponds to the 'Ad Group' field in the bulk file.
:rtype: str
"""
return self._ad_group_name
@ad_group_name.setter
def ad_group_name(self, ad_group_name):
self._ad_group_name = ad_group_name
@property
def ad(self):
""" The type of ad.
"""
return self._ad
@ad.setter
def ad(self, ad):
self._ad = ad
_MAPPINGS = [
_SimpleBulkMapping(
header=_StringTable.Status,
field_to_csv=lambda c: bulk_str(c.ad.Status),
csv_to_field=lambda c, v: setattr(c.ad, 'Status', v if v else None)
),
_SimpleBulkMapping(
header=_StringTable.Id,
field_to_csv=lambda c: bulk_str(c.ad.Id),
csv_to_field=lambda c, v: setattr(c.ad, 'Id', int(v) if v else None)
),
_SimpleBulkMapping(
header=_StringTable.ParentId,
field_to_csv=lambda c: bulk_str(c.ad_group_id),
csv_to_field=lambda c, v: setattr(c, '_ad_group_id', int(v) if v else None)
),
_SimpleBulkMapping(
header=_StringTable.Campaign,
field_to_csv=lambda c: c.campaign_name,
csv_to_field=lambda c, v: setattr(c, '_campaign_name', v)
),
_SimpleBulkMapping(
header=_StringTable.AdGroup,
field_to_csv=lambda c: c.ad_group_name,
csv_to_field=lambda c, v: setattr(c, '_ad_group_name', v)
),
_SimpleBulkMapping(
header=_StringTable.EditorialStatus,
field_to_csv=lambda c: c.ad.EditorialStatus,
csv_to_field=lambda c, v: setattr(c.ad, 'EditorialStatus', v if v else None)
),
_SimpleBulkMapping(
header=_StringTable.DevicePreference,
field_to_csv=lambda c: bulk_device_preference_str(c.ad.DevicePreference),
csv_to_field=lambda c, v: setattr(c.ad, 'DevicePreference', parse_device_preference(v))
),
_SimpleBulkMapping(
header=_StringTable.AdFormatPreference,
field_to_csv=lambda c: bulk_str(c.ad.AdFormatPreference),
csv_to_field=lambda c, v: setattr(c.ad, 'AdFormatPreference', v if v else None)
),
_SimpleBulkMapping(
header=_StringTable.FinalUrl,
field_to_csv=lambda c: field_to_csv_Urls(c.ad.FinalUrls, c.ad.Id),
csv_to_field=lambda c, v: csv_to_field_Urls(c.ad.FinalUrls, v)
),
_SimpleBulkMapping(
header=_StringTable.FinalMobileUrl,
field_to_csv=lambda c: field_to_csv_Urls(c.ad.FinalMobileUrls, c.ad.Id),
csv_to_field=lambda c, v: csv_to_field_Urls(c.ad.FinalMobileUrls, v)
),
_SimpleBulkMapping(
header=_StringTable.TrackingTemplate,
field_to_csv=lambda c: bulk_str(c.ad.TrackingUrlTemplate),
csv_to_field=lambda c, v: setattr(c.ad, 'TrackingUrlTemplate', v if v else None)
),
_SimpleBulkMapping(
header=_StringTable.CustomParameter,
field_to_csv=lambda c: field_to_csv_UrlCustomParameters(c.ad),
csv_to_field=lambda c, v: csv_to_field_UrlCustomParameters(c.ad, v)
),
_SimpleBulkMapping(
header=_StringTable.FinalUrlSuffix,
field_to_csv=lambda c: bulk_optional_str(c.ad.FinalUrlSuffix, c.ad.Id),
csv_to_field=lambda c, v: setattr(c.ad, 'FinalUrlSuffix', v if v else None)
)
]
def process_mappings_to_row_values(self, row_values, exclude_readonly_data):
self.convert_to_values(row_values, _BulkAd._MAPPINGS)
def process_mappings_from_row_values(self, row_values):
row_values.convert_to_entity(self, _BulkAd._MAPPINGS)
def read_additional_data(self, stream_reader):
super(_BulkAd, self).read_additional_data(stream_reader)
class BulkProductAd(_BulkAd):
""" Represents a product ad.
This class exposes the :attr:`product_ad` property that can be read and written as fields of the Product Ad record in a bulk file.
For more information, see Product Ad at https://go.microsoft.com/fwlink/?linkid=846127.
*See also:*
* :class:`.BulkServiceManager`
* :class:`.BulkOperation`
* :class:`.BulkFileReader`
* :class:`.BulkFileWriter`
"""
def __init__(self,
ad_group_id=None,
campaign_name=None,
ad_group_name=None,
ad=None):
super(BulkProductAd, self).__init__(
ad_group_id,
campaign_name,
ad_group_name,
ad
)
self.product_ad = ad
@property
def product_ad(self):
""" The product ad.
See Product Ad at: https://go.microsoft.com/fwlink/?linkid=846127.
"""
return self._ad
@product_ad.setter
def product_ad(self, product_ad):
if product_ad is not None and not isinstance(product_ad, ProductAd):
raise ValueError('Not an instance of ProductAd')
self._ad = product_ad
_MAPPINGS = [
_SimpleBulkMapping(
header=_StringTable.PromotionalText,
field_to_csv=lambda c: bulk_optional_str(c.product_ad.PromotionalText, c.product_ad.Id),
csv_to_field=lambda c, v: setattr(c.product_ad, 'PromotionalText', v if v else '')
),
]
def process_mappings_from_row_values(self, row_values):
self.product_ad = _CAMPAIGN_OBJECT_FACTORY_V13.create('ProductAd')
self.product_ad.Type = 'Product'
super(BulkProductAd, self).process_mappings_from_row_values(row_values)
row_values.convert_to_entity(self, BulkProductAd._MAPPINGS)
def process_mappings_to_row_values(self, row_values, exclude_readonly_data):
self._validate_property_not_null(self.product_ad, 'product_ad')
super(BulkProductAd, self).process_mappings_to_row_values(row_values, exclude_readonly_data)
self.convert_to_values(row_values, BulkProductAd._MAPPINGS)
class BulkTextAd(_BulkAd):
""" Represents a Text Ad.
This class exposes the :attr:`text_ad` property that can be read and written as fields of the Text Ad record in a bulk file.
For more information, see Text Ad at https://go.microsoft.com/fwlink/?linkid=846127.
*See also:*
* :class:`.BulkServiceManager`
* :class:`.BulkOperation`
* :class:`.BulkFileReader`
* :class:`.BulkFileWriter`
"""
def __init__(self,
ad_group_id=None,
campaign_name=None,
ad_group_name=None,
ad=None):
super(BulkTextAd, self).__init__(
ad_group_id,
campaign_name,
ad_group_name,
ad,
)
self.text_ad = ad
@property
def text_ad(self):
""" The text ad.
see Text Ad at https://go.microsoft.com/fwlink/?linkid=846127.
"""
return self._ad
@text_ad.setter
def text_ad(self, text_ad):
if text_ad is not None and not isinstance(text_ad, TextAd):
raise ValueError('Not an instance of TextAd')
self._ad = text_ad
_MAPPINGS = [
_SimpleBulkMapping(
header=_StringTable.Title,
field_to_csv=lambda c: c.text_ad.Title,
csv_to_field=lambda c, v: setattr(c.text_ad, 'Title', v)
),
_SimpleBulkMapping(
header=_StringTable.Text,
field_to_csv=lambda c: c.text_ad.Text,
csv_to_field=lambda c, v: setattr(c.text_ad, 'Text', v)
),
_SimpleBulkMapping(
header=_StringTable.DisplayUrl,
field_to_csv=lambda c: bulk_optional_str(c.text_ad.DisplayUrl, c.text_ad.Id),
csv_to_field=lambda c, v: setattr(c.text_ad, 'DisplayUrl', v if v else '')
),
_SimpleBulkMapping(
header=_StringTable.DestinationUrl,
field_to_csv=lambda c: bulk_optional_str(c.text_ad.DestinationUrl, c.text_ad.Id),
csv_to_field=lambda c, v: setattr(c.text_ad, 'DestinationUrl', v if v else '')
),
]
def process_mappings_from_row_values(self, row_values):
self.text_ad = _CAMPAIGN_OBJECT_FACTORY_V13.create('TextAd')
self.text_ad.Type = 'Text'
super(BulkTextAd, self).process_mappings_from_row_values(row_values)
row_values.convert_to_entity(self, BulkTextAd._MAPPINGS)
def process_mappings_to_row_values(self, row_values, exclude_readonly_data):
self._validate_property_not_null(self.text_ad, 'text_ad')
super(BulkTextAd, self).process_mappings_to_row_values(row_values, exclude_readonly_data)
self.convert_to_values(row_values, BulkTextAd._MAPPINGS)
class BulkAppInstallAd(_BulkAd):
""" Represents an App Install Ad.
This class exposes the :attr:`app_install_ad` property that can be read and written as fields of the App Install Ad record in a bulk file.
For more information, see App Install Ad at https://go.microsoft.com/fwlink/?linkid=846127.
*See also:*
* :class:`.BulkServiceManager`
* :class:`.BulkOperation`
* :class:`.BulkFileReader`
* :class:`.BulkFileWriter`
"""
def __init__(self,
ad_group_id=None,
campaign_name=None,
ad_group_name=None,
ad=None):
super(BulkAppInstallAd, self).__init__(
ad_group_id,
campaign_name,
ad_group_name,
ad,
)
self.app_install_ad = ad
@property
def app_install_ad(self):
""" The App Install Ad.
see App Install Ad at https://go.microsoft.com/fwlink/?linkid=846127.
"""
return self._ad
@app_install_ad.setter
def app_install_ad(self, app_install_ad):
if app_install_ad is not None and not isinstance(app_install_ad, AppInstallAd):
raise ValueError('Not an instance of AppInstallAd')
self._ad = app_install_ad
_MAPPINGS = [
_SimpleBulkMapping(
header=_StringTable.AppPlatform,
field_to_csv=lambda c: c.app_install_ad.AppPlatform,
csv_to_field=lambda c, v: setattr(c.app_install_ad, 'AppPlatform', v)
),
_SimpleBulkMapping(
header=_StringTable.AppStoreId,
field_to_csv=lambda c: c.app_install_ad.AppStoreId,
csv_to_field=lambda c, v: setattr(c.app_install_ad, 'AppStoreId', v)
),
_SimpleBulkMapping(
header=_StringTable.Title,
field_to_csv=lambda c: c.app_install_ad.Title,
csv_to_field=lambda c, v: setattr(c.app_install_ad, 'Title', v)
),
_SimpleBulkMapping(
header=_StringTable.Text,
field_to_csv=lambda c: c.app_install_ad.Text,
csv_to_field=lambda c, v: setattr(c.app_install_ad, 'Text', v)
),
]
def process_mappings_from_row_values(self, row_values):
self.app_install_ad = _CAMPAIGN_OBJECT_FACTORY_V13.create('AppInstallAd')
self.app_install_ad.Type = 'AppInstall'
super(BulkAppInstallAd, self).process_mappings_from_row_values(row_values)
row_values.convert_to_entity(self, BulkAppInstallAd._MAPPINGS)
def process_mappings_to_row_values(self, row_values, exclude_readonly_data):
self._validate_property_not_null(self.app_install_ad, 'app_install_ad')
super(BulkAppInstallAd, self).process_mappings_to_row_values(row_values, exclude_readonly_data)
self.convert_to_values(row_values, BulkAppInstallAd._MAPPINGS)
class BulkExpandedTextAd(_BulkAd):
""" Represents an Expanded Text Ad.
This class exposes the :attr:`expanded_text_ad` property that can be read and written as fields of the Expanded Text Ad record in a bulk file.
For more information, see Expanded Text Ad at https://go.microsoft.com/fwlink/?linkid=846127.
*See also:*
* :class:`.BulkServiceManager`
* :class:`.BulkOperation`
* :class:`.BulkFileReader`
* :class:`.BulkFileWriter`
"""
def __init__(self,
ad_group_id=None,
campaign_name=None,
ad_group_name=None,
ad=None):
super(BulkExpandedTextAd, self).__init__(
ad_group_id,
campaign_name,
ad_group_name,
ad,
)
self.expanded_text_ad = ad
@property
def expanded_text_ad(self):
""" The Expanded Text Ad.
see Expanded Text Ad at https://go.microsoft.com/fwlink/?linkid=846127.
"""
return self._ad
@expanded_text_ad.setter
def expanded_text_ad(self, expanded_text_ad):
if expanded_text_ad is not None and not isinstance(expanded_text_ad, ExpandedTextAd):
raise ValueError('Not an instance of ExpandedTextAd')
self._ad = expanded_text_ad
_MAPPINGS = [
_SimpleBulkMapping(
header=_StringTable.Text,
field_to_csv=lambda c: c.expanded_text_ad.Text,
csv_to_field=lambda c, v: setattr(c.expanded_text_ad, 'Text', v)
),
_SimpleBulkMapping(
header=_StringTable.TextPart2,
field_to_csv=lambda c: bulk_optional_str(c.expanded_text_ad.TextPart2, c.expanded_text_ad.Id),
csv_to_field=lambda c, v: setattr(c.expanded_text_ad, 'TextPart2', v if v else '')
),
_SimpleBulkMapping(
header=_StringTable.TitlePart1,
field_to_csv=lambda c: c.expanded_text_ad.TitlePart1,
csv_to_field=lambda c, v: setattr(c.expanded_text_ad, 'TitlePart1', v)
),
_SimpleBulkMapping(
header=_StringTable.TitlePart2,
field_to_csv=lambda c: c.expanded_text_ad.TitlePart2,
csv_to_field=lambda c, v: setattr(c.expanded_text_ad, 'TitlePart2', v)
),
_SimpleBulkMapping(
header=_StringTable.TitlePart3,
field_to_csv=lambda c: bulk_optional_str(c.expanded_text_ad.TitlePart3, c.expanded_text_ad.Id),
csv_to_field=lambda c, v: setattr(c.expanded_text_ad, 'TitlePart3', v if v else '')
),
_SimpleBulkMapping(
header=_StringTable.Path1,
field_to_csv=lambda c: bulk_optional_str(c.expanded_text_ad.Path1, c.expanded_text_ad.Id),
csv_to_field=lambda c, v: setattr(c.expanded_text_ad, 'Path1', v)
),
_SimpleBulkMapping(
header=_StringTable.Path2,
field_to_csv=lambda c: bulk_optional_str(c.expanded_text_ad.Path2, c.expanded_text_ad.Id),
csv_to_field=lambda c, v: setattr(c.expanded_text_ad, 'Path2', v)
),
_SimpleBulkMapping(
header=_StringTable.Domain,
field_to_csv=lambda c: bulk_optional_str(c.expanded_text_ad.Domain, c.expanded_text_ad.Id),
csv_to_field=lambda c, v: setattr(c.expanded_text_ad, 'Domain', v)
),
]
def process_mappings_from_row_values(self, row_values):
self.expanded_text_ad = _CAMPAIGN_OBJECT_FACTORY_V13.create('ExpandedTextAd')
self.expanded_text_ad.Type = 'ExpandedText'
super(BulkExpandedTextAd, self).process_mappings_from_row_values(row_values)
row_values.convert_to_entity(self, BulkExpandedTextAd._MAPPINGS)
def process_mappings_to_row_values(self, row_values, exclude_readonly_data):
self._validate_property_not_null(self.expanded_text_ad, 'expanded_text_ad')
super(BulkExpandedTextAd, self).process_mappings_to_row_values(row_values, exclude_readonly_data)
self.convert_to_values(row_values, BulkExpandedTextAd._MAPPINGS)
class BulkDynamicSearchAd(_BulkAd):
""" Represents a Dynamic Search Ad.
This class exposes the :attr:`dynamic_search_ad` property that can be read and written as fields of the Dynamic Search Ad record in a bulk file.
For more information, see Dynamic Search Ad at https://go.microsoft.com/fwlink/?linkid=836840.
*See also:*
* :class:`.BulkServiceManager`
* :class:`.BulkOperation`
* :class:`.BulkFileReader`
* :class:`.BulkFileWriter`
"""
def __init__(self,
ad_group_id=None,
campaign_name=None,
ad_group_name=None,
ad=None):
super(BulkDynamicSearchAd, self).__init__(
ad_group_id,
campaign_name,
ad_group_name,
ad,
)
self.dynamic_search_ad = ad
@property
def dynamic_search_ad(self):
""" The dynamic search ad.
see Dynamic Search Ad at https://go.microsoft.com/fwlink/?linkid=836840.
"""
return self._ad
@dynamic_search_ad.setter
def dynamic_search_ad(self, dynamic_search_ad):
if dynamic_search_ad is not None and not isinstance(dynamic_search_ad, DynamicSearchAd):
raise ValueError('Not an instance of DynamicSearchAd')
self._ad = dynamic_search_ad
_MAPPINGS = [
_SimpleBulkMapping(
header=_StringTable.Text,
field_to_csv=lambda c: c.dynamic_search_ad.Text,
csv_to_field=lambda c, v: setattr(c.dynamic_search_ad, 'Text', v)
),
_SimpleBulkMapping(
header=_StringTable.Path1,
field_to_csv=lambda c: c.dynamic_search_ad.Path1,
csv_to_field=lambda c, v: setattr(c.dynamic_search_ad, 'Path1', v)
),
_SimpleBulkMapping(
header=_StringTable.Path2,
field_to_csv=lambda c: c.dynamic_search_ad.Path2,
csv_to_field=lambda c, v: setattr(c.dynamic_search_ad, 'Path2', v)
),
_SimpleBulkMapping(
header=_StringTable.TextPart2,
field_to_csv=lambda c: c.dynamic_search_ad.TextPart2,
csv_to_field=lambda c, v: setattr(c.dynamic_search_ad, 'TextPart2', v)
),
]
def process_mappings_from_row_values(self, row_values):
self.dynamic_search_ad = _CAMPAIGN_OBJECT_FACTORY_V13.create('DynamicSearchAd')
self.dynamic_search_ad.Type = 'DynamicSearch'
super(BulkDynamicSearchAd, self).process_mappings_from_row_values(row_values)
row_values.convert_to_entity(self, BulkDynamicSearchAd._MAPPINGS)
def process_mappings_to_row_values(self, row_values, exclude_readonly_data):
self._validate_property_not_null(self.dynamic_search_ad, 'dynamic_search_ad')
super(BulkDynamicSearchAd, self).process_mappings_to_row_values(row_values, exclude_readonly_data)
self.convert_to_values(row_values, BulkDynamicSearchAd._MAPPINGS)
class BulkResponsiveAd(_BulkAd):
""" Represents a Responsive Ad.
This class exposes the :attr:`responsive_ad` property that can be read and written as fields of the Responsive Ad record in a bulk file.
For more information, see Responsive Ad at https://go.microsoft.com/fwlink/?linkid=836840.
*See also:*
* :class:`.BulkServiceManager`
* :class:`.BulkOperation`
* :class:`.BulkFileReader`
* :class:`.BulkFileWriter`
"""
def __init__(self,
ad_group_id=None,
campaign_name=None,
ad_group_name=None,
ad=None):
super(BulkResponsiveAd, self).__init__(
ad_group_id,
campaign_name,
ad_group_name,
ad,
)
self._ad = ad
@property
def responsive_ad(self):
""" The responsive search ad.
see Responsive Ad at https://go.microsoft.com/fwlink/?linkid=836840.
"""
return self._ad
@responsive_ad.setter
def responsive_ad(self, responsive_ad):
if responsive_ad is not None and not isinstance(responsive_ad, ResponsiveAd):
raise ValueError('Not an instance of ResponsiveAd')
self._ad = responsive_ad
_MAPPINGS = [
_SimpleBulkMapping(
header=_StringTable.BusinessName,
field_to_csv=lambda c: c.responsive_ad.BusinessName,
csv_to_field=lambda c, v: setattr(c.responsive_ad, 'BusinessName', v)
),
_SimpleBulkMapping(
header=_StringTable.CallToAction,
field_to_csv=lambda c: c.responsive_ad.CallToAction,
csv_to_field=lambda c, v: setattr(c.responsive_ad, 'CallToAction', v if v else None)
),
_SimpleBulkMapping(
header=_StringTable.Headline,
field_to_csv=lambda c: c.responsive_ad.Headline,
csv_to_field=lambda c, v: setattr(c.responsive_ad, 'Headline', v)
),
_SimpleBulkMapping(
header=_StringTable.LongHeadline,
field_to_csv=lambda c: c.responsive_ad.LongHeadlineString,
csv_to_field=lambda c, v: setattr(c.responsive_ad, 'LongHeadlineString', v)
),
_SimpleBulkMapping(
header=_StringTable.LandscapeImageMediaId,
field_to_csv=lambda c: c.responsive_ad.LandscapeImageMediaId,
csv_to_field=lambda c, v: setattr(c.responsive_ad, 'LandscapeImageMediaId', int(v) if v else None)
),
_SimpleBulkMapping(
header=_StringTable.LandscapeLogoMediaId,
field_to_csv=lambda c: c.responsive_ad.LandscapeLogoMediaId,
csv_to_field=lambda c, v: setattr(c.responsive_ad, 'LandscapeLogoMediaId', int(v) if v else None)
),
_SimpleBulkMapping(
header=_StringTable.SquareImageMediaId,
field_to_csv=lambda c: c.responsive_ad.SquareImageMediaId,
csv_to_field=lambda c, v: setattr(c.responsive_ad, 'SquareImageMediaId', int(v) if v else None)
),
_SimpleBulkMapping(
header=_StringTable.SquareLogoMediaId,
field_to_csv=lambda c: c.responsive_ad.SquareLogoMediaId,
csv_to_field=lambda c, v: setattr(c.responsive_ad, 'SquareLogoMediaId', int(v) if v else None)
),
_SimpleBulkMapping(
header=_StringTable.Text,
field_to_csv=lambda c: c.responsive_ad.Text,
csv_to_field=lambda c, v: setattr(c.responsive_ad, 'Text', v)
),
_SimpleBulkMapping(
header=_StringTable.Images,
field_to_csv=lambda c: field_to_csv_ImageAssetLinks(c.responsive_ad.Images),
csv_to_field=lambda c, v: csv_to_field_ImageAssetLinks(c.responsive_ad.Images, v)
),
]
def process_mappings_from_row_values(self, row_values):
self.responsive_ad = _CAMPAIGN_OBJECT_FACTORY_V13.create('ResponsiveAd')
self.responsive_ad.Type = 'Responsive'
super(BulkResponsiveAd, self).process_mappings_from_row_values(row_values)
row_values.convert_to_entity(self, BulkResponsiveAd._MAPPINGS)
def process_mappings_to_row_values(self, row_values, exclude_readonly_data):
self._validate_property_not_null(self.responsive_ad, 'responsive_ad')
super(BulkResponsiveAd, self).process_mappings_to_row_values(row_values, exclude_readonly_data)
self.convert_to_values(row_values, BulkResponsiveAd._MAPPINGS)
class BulkResponsiveSearchAd(_BulkAd):
""" Represents a Responsive Search Ad.
This class exposes the :attr:`responsive_search_ad` property that can be read and written as fields of the Responsive Search Ad record in a bulk file.
For more information, see Responsive Search Ad at https://go.microsoft.com/fwlink/?linkid=836840.
*See also:*
* :class:`.BulkServiceManager`
* :class:`.BulkOperation`
* :class:`.BulkFileReader`
* :class:`.BulkFileWriter`
"""
def __init__(self,
ad_group_id=None,
campaign_name=None,
ad_group_name=None,
ad=None):
super(BulkResponsiveSearchAd, self).__init__(
ad_group_id,
campaign_name,
ad_group_name,
ad,
)
self._ad = ad
@property
def responsive_search_ad(self):
""" The responsive search ad.
see Responsive Search Ad at https://go.microsoft.com/fwlink/?linkid=836840.
"""
return self._ad
@responsive_search_ad.setter
def responsive_search_ad(self, rsa):
if rsa is not None and not isinstance(rsa, ResponsiveSearchAd):
raise ValueError('Not an instance of ResponsiveSearchAd')
self._ad = rsa
_MAPPINGS = [
_SimpleBulkMapping(
header=_StringTable.Path1,
field_to_csv=lambda c: bulk_optional_str(c.responsive_search_ad.Path1, c.responsive_search_ad.Id),
csv_to_field=lambda c, v: setattr(c.responsive_search_ad, 'Path1', v)
),
_SimpleBulkMapping(
header=_StringTable.Path2,
field_to_csv=lambda c: bulk_optional_str(c.responsive_search_ad.Path2, c.responsive_search_ad.Id),
csv_to_field=lambda c, v: setattr(c.responsive_search_ad, 'Path2', v)
),
_SimpleBulkMapping(
header=_StringTable.Domain,
field_to_csv=lambda c: bulk_optional_str(c.responsive_search_ad.Domain, c.responsive_search_ad.Id),
csv_to_field=lambda c, v: setattr(c.responsive_search_ad, 'Domain', v)
),
_SimpleBulkMapping(
header=_StringTable.Headline,
field_to_csv=lambda c: field_to_csv_Rsa_TextAssetLinks(c.responsive_search_ad.Headlines),
csv_to_field=lambda c, v: csv_to_field_Rsa_TextAssetLinks(c.responsive_search_ad.Headlines, v)
),
_SimpleBulkMapping(
header=_StringTable.Description,
field_to_csv=lambda c: field_to_csv_Rsa_TextAssetLinks(c.responsive_search_ad.Descriptions),
csv_to_field=lambda c, v: csv_to_field_Rsa_TextAssetLinks(c.responsive_search_ad.Descriptions ,v)
)
]
def process_mappings_from_row_values(self, row_values):
self.responsive_search_ad = _CAMPAIGN_OBJECT_FACTORY_V13.create('ResponsiveSearchAd')
self.responsive_search_ad.Type = 'ResponsiveSearch'
super(BulkResponsiveSearchAd, self).process_mappings_from_row_values(row_values)
row_values.convert_to_entity(self, BulkResponsiveSearchAd._MAPPINGS)
def process_mappings_to_row_values(self, row_values, exclude_readonly_data):
self._validate_property_not_null(self.responsive_search_ad, 'responsive_search_ad')
super(BulkResponsiveSearchAd, self).process_mappings_to_row_values(row_values, exclude_readonly_data)
self.convert_to_values(row_values, BulkResponsiveSearchAd._MAPPINGS) | 38.013123 | 154 | 0.660395 |
c90d7e77663332d19369fe943c4825caf3ffad82 | 370 | py | Python | src/app/utils.py | vendari12/Quantified-self | 533ffe8e1706c4cde6087c3d3953debc27b41186 | [
"Apache-2.0"
] | 1 | 2022-03-26T18:55:20.000Z | 2022-03-26T18:55:20.000Z | src/app/utils.py | vendari12/Quantified-self | 533ffe8e1706c4cde6087c3d3953debc27b41186 | [
"Apache-2.0"
] | null | null | null | src/app/utils.py | vendari12/Quantified-self | 533ffe8e1706c4cde6087c3d3953debc27b41186 | [
"Apache-2.0"
] | 1 | 2022-03-14T08:59:09.000Z | 2022-03-14T08:59:09.000Z | from flask_login import LoginManager
from flask_sqlalchemy import SQLAlchemy
db = SQLAlchemy()
login_manager = LoginManager()
def register_template_utils(app):
"""Register Jinja 2 helpers (called from __init__.py)."""
#app.add_template_global(index_for_role)
#app.jinja_env.globals.update(json_load=json_load, image_size=image_size)
pass
| 16.818182 | 77 | 0.754054 |
683ca113c8dd3c94f78ce464193b25e04ba98a19 | 27,383 | py | Python | conftest.py | meghana-sesetti/adversarial-robustness-toolbox | 6a5ce9e4142734ad9004e5c093ef8fa754ea6b39 | [
"MIT"
] | 1 | 2020-12-26T10:02:05.000Z | 2020-12-26T10:02:05.000Z | conftest.py | Tikquuss/adversarial-robustness-toolbox | 62ffe7c951d8a60d49a9ea6ac7b04aa4432a3fb7 | [
"MIT"
] | 33 | 2021-01-18T08:30:34.000Z | 2022-03-11T07:05:13.000Z | conftest.py | Tikquuss/adversarial-robustness-toolbox | 62ffe7c951d8a60d49a9ea6ac7b04aa4432a3fb7 | [
"MIT"
] | 1 | 2020-09-28T12:58:01.000Z | 2020-09-28T12:58:01.000Z | # MIT License
#
# Copyright (C) The Adversarial Robustness Toolbox (ART) Authors 2020
#
# Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
# documentation files (the "Software"), to deal in the Software without restriction, including without limitation the
# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all copies or substantial portions of the
# Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
# TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import json
import logging
import os
import shutil
import tempfile
import numpy as np
import pytest
import requests
from art.data_generators import PyTorchDataGenerator, TensorFlowDataGenerator, KerasDataGenerator, MXDataGenerator
from art.defences.preprocessor import FeatureSqueezing, JpegCompression, SpatialSmoothing
from art.estimators.classification import KerasClassifier
from tests.utils import master_seed, get_image_classifier_kr, get_image_classifier_tf, get_image_classifier_pt
from tests.utils import get_tabular_classifier_kr, get_tabular_classifier_tf, get_tabular_classifier_pt
from tests.utils import get_tabular_classifier_scikit_list, load_dataset, get_image_classifier_kr_tf
from tests.utils import get_image_classifier_mxnet_custom_ini, get_image_classifier_kr_tf_with_wildcard
from tests.utils import get_image_classifier_kr_tf_functional, get_image_classifier_kr_functional
from tests.utils import get_attack_classifier_pt
logger = logging.getLogger(__name__)
art_supported_frameworks = ["keras", "tensorflow", "pytorch", "scikitlearn", "kerastf", "mxnet"]
master_seed(1234)
default_framework = "tensorflow"
def pytest_addoption(parser):
parser.addoption(
"--mlFramework",
action="store",
default=default_framework,
help="ART tests allow you to specify which mlFramework to use. The default mlFramework used is `tensorflow`. "
"Other options available are {0}".format(art_supported_frameworks),
)
@pytest.fixture
def image_dl_estimator_defended(framework):
def _image_dl_estimator_defended(one_classifier=False, **kwargs):
sess = None
classifier_list = None
clip_values = (0, 1)
fs = FeatureSqueezing(bit_depth=2, clip_values=clip_values)
defenses = []
if kwargs.get("defenses") is None:
defenses.append(fs)
else:
if "FeatureSqueezing" in kwargs.get("defenses"):
defenses.append(fs)
if "JpegCompression" in kwargs.get("defenses"):
defenses.append(JpegCompression(clip_values=clip_values, apply_predict=True))
if "SpatialSmoothing" in kwargs.get("defenses"):
defenses.append(SpatialSmoothing())
del kwargs["defenses"]
if framework == "keras":
classifier = get_image_classifier_kr(**kwargs)
# Get the ready-trained Keras model
classifier_list = [
KerasClassifier(model=classifier._model, clip_values=(0, 1), preprocessing_defences=defenses)
]
if framework == "tensorflow":
logging.warning("{0} doesn't have a defended image classifier defined yet".format(framework))
if framework == "pytorch":
logging.warning("{0} doesn't have a defended image classifier defined yet".format(framework))
if framework == "scikitlearn":
logging.warning("{0} doesn't have a defended image classifier defined yet".format(framework))
if framework == "kerastf":
classifier = get_image_classifier_kr_tf(**kwargs)
classifier_list = [
KerasClassifier(model=classifier._model, clip_values=(0, 1), preprocessing_defences=defenses)
]
if classifier_list is None:
return None, None
if one_classifier:
return classifier_list[0], sess
return classifier_list, sess
return _image_dl_estimator_defended
@pytest.fixture
def image_dl_estimator_for_attack(image_dl_estimator, image_dl_estimator_defended):
def _image_dl_estimator_for_attack(attack, defended=False, **kwargs):
if defended:
classifier_list, _ = image_dl_estimator_defended(kwargs)
else:
classifier_list, _ = image_dl_estimator()
if classifier_list is None:
return None
return [
potential_classifier
for potential_classifier in classifier_list
if all(t in type(potential_classifier).__mro__ for t in attack._estimator_requirements)
]
return _image_dl_estimator_for_attack
@pytest.fixture(autouse=True)
def setup_tear_down_framework(framework):
# Ran before each test
if framework == "keras":
pass
if framework == "tensorflow":
import tensorflow as tf
if tf.__version__[0] != "2":
tf.reset_default_graph()
if framework == "pytorch":
pass
if framework == "scikitlearn":
pass
yield True
# Ran after each test
if framework == "keras":
import keras
keras.backend.clear_session()
if framework == "tensorflow":
pass
if framework == "pytorch":
pass
if framework == "scikitlearn":
pass
@pytest.fixture
def image_iterator(framework, is_tf_version_2, get_default_mnist_subset, default_batch_size):
(x_train_mnist, y_train_mnist), (_, _) = get_default_mnist_subset
if framework == "keras" or framework == "kerastf":
from keras.preprocessing.image import ImageDataGenerator
keras_gen = ImageDataGenerator(
width_shift_range=0.075,
height_shift_range=0.075,
rotation_range=12,
shear_range=0.075,
zoom_range=0.05,
fill_mode="constant",
cval=0,
)
return keras_gen.flow(x_train_mnist, y_train_mnist, batch_size=default_batch_size)
if framework == "tensorflow":
import tensorflow as tf
if not is_tf_version_2:
x_tensor = tf.convert_to_tensor(x_train_mnist.reshape(10, 100, 28, 28, 1))
y_tensor = tf.convert_to_tensor(y_train_mnist.reshape(10, 100, 10))
# tmp = x_train_mnist.shape[0] / default_batch_size
# x_tensor = tf.convert_to_tensor(x_train_mnist.reshape(tmp, default_batch_size, 28, 28, 1))
# y_tensor = tf.convert_to_tensor(y_train_mnist.reshape(tmp, default_batch_size, 10))
dataset = tf.data.Dataset.from_tensor_slices((x_tensor, y_tensor))
return dataset.make_initializable_iterator()
if framework == "pytorch":
import torch
# Create tensors from data
x_train_tens = torch.from_numpy(x_train_mnist)
x_train_tens = x_train_tens.float()
y_train_tens = torch.from_numpy(y_train_mnist)
dataset = torch.utils.data.TensorDataset(x_train_tens, y_train_tens)
return torch.utils.data.DataLoader(dataset=dataset, batch_size=default_batch_size, shuffle=True)
if framework == "mxnet":
import mxnet
dataset = mxnet.gluon.data.dataset.ArrayDataset(x_train_mnist, y_train_mnist)
return mxnet.gluon.data.DataLoader(dataset, batch_size=5, shuffle=True)
return None
@pytest.fixture
def image_data_generator(framework, is_tf_version_2, get_default_mnist_subset, image_iterator, default_batch_size):
def _image_data_generator(**kwargs):
(x_train_mnist, y_train_mnist), (_, _) = get_default_mnist_subset
if framework == "keras" or framework == "kerastf":
return KerasDataGenerator(
iterator=image_iterator, size=x_train_mnist.shape[0], batch_size=default_batch_size,
)
if framework == "tensorflow":
if not is_tf_version_2:
return TensorFlowDataGenerator(
sess=kwargs["sess"],
iterator=image_iterator,
iterator_type="initializable",
iterator_arg={},
size=x_train_mnist.shape[0],
batch_size=default_batch_size,
)
if framework == "pytorch":
return PyTorchDataGenerator(
iterator=image_iterator, size=x_train_mnist.shape[0], batch_size=default_batch_size
)
if framework == "mxnet":
return MXDataGenerator(iterator=image_iterator, size=x_train_mnist.shape[0], batch_size=default_batch_size)
return _image_data_generator
@pytest.fixture
def store_expected_values(request, is_tf_version_2):
"""
Stores expected values to be retrieved by the expected_values fixture
Note1: Numpy arrays MUST be converted to list before being stored as json
Note2: It's possible to store both a framework independent and framework specific value. If both are stored the
framework specific value will be used
:param request:
:return:
"""
def _store_expected_values(values_to_store, framework=""):
framework_name = framework
if framework == "tensorflow":
if is_tf_version_2:
framework_name = "tensorflow2"
else:
framework_name = "tensorflow1"
if framework_name is not "":
framework_name = "_" + framework_name
file_name = request.node.location[0].split("/")[-1][:-3] + ".json"
try:
with open(
os.path.join(os.path.dirname(__file__), os.path.dirname(request.node.location[0]), file_name), "r"
) as f:
expected_values = json.load(f)
except FileNotFoundError:
expected_values = {}
test_name = request.node.name + framework_name
expected_values[test_name] = values_to_store
with open(
os.path.join(os.path.dirname(__file__), os.path.dirname(request.node.location[0]), file_name), "w"
) as f:
json.dump(expected_values, f, indent=4)
return _store_expected_values
@pytest.fixture
def expected_values(framework, request, is_tf_version_2):
"""
Retrieves the expected values that were stored using the store_expected_values fixture
:param request:
:return:
"""
file_name = request.node.location[0].split("/")[-1][:-3] + ".json"
framework_name = framework
if framework == "tensorflow":
if is_tf_version_2:
framework_name = "tensorflow2"
else:
framework_name = "tensorflow1"
if framework_name is not "":
framework_name = "_" + framework_name
def _expected_values():
with open(
os.path.join(os.path.dirname(__file__), os.path.dirname(request.node.location[0]), file_name), "r"
) as f:
expected_values = json.load(f)
# searching first for any framework specific expected value
framework_specific_values = request.node.name + framework_name
if framework_specific_values in expected_values:
return expected_values[framework_specific_values]
elif request.node.name in expected_values:
return expected_values[request.node.name]
else:
raise NotImplementedError(
"Couldn't find any expected values for test {0} and framework {1}".format(
request.node.name, framework_name
)
)
return _expected_values
@pytest.fixture(scope="session")
def get_image_classifier_mx_model():
import mxnet
# TODO needs to be made parameterizable once Mxnet allows multiple identical models to be created in one session
from_logits = True
class Model(mxnet.gluon.nn.Block):
def __init__(self, **kwargs):
super(Model, self).__init__(**kwargs)
self.model = mxnet.gluon.nn.Sequential()
self.model.add(
mxnet.gluon.nn.Conv2D(channels=1, kernel_size=7, activation="relu",),
mxnet.gluon.nn.MaxPool2D(pool_size=4, strides=4),
mxnet.gluon.nn.Flatten(),
mxnet.gluon.nn.Dense(10, activation=None,),
)
def forward(self, x):
y = self.model(x)
if from_logits:
return y
return y.softmax()
model = Model()
custom_init = get_image_classifier_mxnet_custom_ini()
model.initialize(init=custom_init)
return model
@pytest.fixture
def get_image_classifier_mx_instance(get_image_classifier_mx_model, mnist_shape):
import mxnet
from art.estimators.classification import MXClassifier
model = get_image_classifier_mx_model
def _get_image_classifier_mx_instance(from_logits=True):
if from_logits is False:
# due to the fact that only 1 instance of get_image_classifier_mx_model can be created in one session
# this will be resolved once Mxnet allows for 2 models with identical weights to be created in 1 session
raise NotImplementedError("Currently only supporting Mxnet classifier with from_logit set to True")
loss = mxnet.gluon.loss.SoftmaxCrossEntropyLoss(from_logits=from_logits)
trainer = mxnet.gluon.Trainer(model.collect_params(), "sgd", {"learning_rate": 0.1})
# Get classifier
mxc = MXClassifier(
model=model,
loss=loss,
input_shape=mnist_shape,
# input_shape=(28, 28, 1),
nb_classes=10,
optimizer=trainer,
ctx=None,
channels_first=True,
clip_values=(0, 1),
preprocessing_defences=None,
postprocessing_defences=None,
preprocessing=(0, 1),
)
return mxc
return _get_image_classifier_mx_instance
@pytest.fixture
def supported_losses_types(framework):
def supported_losses_types():
if framework == "keras":
return ["label", "function_losses", "function_backend"]
if framework == "kerastf":
# if loss_type is not "label" and loss_name not in ["categorical_hinge", "kullback_leibler_divergence"]:
return ["label", "function", "class"]
raise NotImplementedError("Could not find supported_losses_types for framework {0}".format(framework))
return supported_losses_types
@pytest.fixture
def supported_losses_logit(framework):
def _supported_losses_logit():
if framework == "keras":
return ["categorical_crossentropy_function_backend", "sparse_categorical_crossentropy_function_backend"]
if framework == "kerastf":
# if loss_type is not "label" and loss_name not in ["categorical_hinge", "kullback_leibler_divergence"]:
return [
"categorical_crossentropy_function",
"categorical_crossentropy_class",
"sparse_categorical_crossentropy_function",
"sparse_categorical_crossentropy_class",
]
raise NotImplementedError("Could not find supported_losses_logit for framework {0}".format(framework))
return _supported_losses_logit
@pytest.fixture
def supported_losses_proba(framework):
def _supported_losses_proba():
if framework == "keras":
return [
"categorical_hinge_function_losses",
"categorical_crossentropy_label",
"categorical_crossentropy_function_losses",
"categorical_crossentropy_function_backend",
"sparse_categorical_crossentropy_label",
"sparse_categorical_crossentropy_function_losses",
"sparse_categorical_crossentropy_function_backend",
"kullback_leibler_divergence_function_losses",
]
if framework == "kerastf":
return [
"categorical_hinge_function",
"categorical_hinge_class",
"categorical_crossentropy_label",
"categorical_crossentropy_function",
"categorical_crossentropy_class",
"sparse_categorical_crossentropy_label",
"sparse_categorical_crossentropy_function",
"sparse_categorical_crossentropy_class",
"kullback_leibler_divergence_function",
"kullback_leibler_divergence_class",
]
raise NotImplementedError("Could not find supported_losses_proba for framework {0}".format(framework))
return _supported_losses_proba
@pytest.fixture
def image_dl_estimator(framework, get_image_classifier_mx_instance):
def _image_dl_estimator(one_classifier=False, functional=False, **kwargs):
sess = None
wildcard = False
classifier_list = None
if kwargs.get("wildcard") is not None:
if kwargs.get("wildcard") is True:
wildcard = True
del kwargs["wildcard"]
if framework == "keras":
if wildcard is False and functional is False:
if functional:
classifier_list = [get_image_classifier_kr_functional(**kwargs)]
else:
classifier_list = [get_image_classifier_kr(**kwargs)]
if framework == "tensorflow":
if wildcard is False and functional is False:
classifier, sess = get_image_classifier_tf(**kwargs)
classifier_list = [classifier]
if framework == "pytorch":
if wildcard is False and functional is False:
classifier_list = [get_image_classifier_pt(**kwargs)]
if framework == "scikitlearn":
logging.warning("{0} doesn't have an image classifier defined yet".format(framework))
classifier_list = None
if framework == "kerastf":
if wildcard:
classifier_list = [get_image_classifier_kr_tf_with_wildcard(**kwargs)]
else:
if functional:
classifier_list = [get_image_classifier_kr_tf_functional(**kwargs)]
else:
classifier_list = [get_image_classifier_kr_tf(**kwargs)]
if framework == "mxnet":
if wildcard is False and functional is False:
classifier_list = [get_image_classifier_mx_instance(**kwargs)]
if classifier_list is None:
return None, None
if one_classifier:
return classifier_list[0], sess
return classifier_list, sess
return _image_dl_estimator
@pytest.fixture
def get_tabular_classifier_list(framework):
def _get_tabular_classifier_list(clipped=True):
if framework == "keras":
if clipped:
classifier_list = [get_tabular_classifier_kr()]
else:
classifier = get_tabular_classifier_kr()
classifier_list = [KerasClassifier(model=classifier.model, use_logits=False, channels_first=True)]
if framework == "tensorflow":
if clipped:
classifier, _ = get_tabular_classifier_tf()
classifier_list = [classifier]
else:
logging.warning("{0} doesn't have an uncliped classifier defined yet".format(framework))
classifier_list = None
if framework == "pytorch":
if clipped:
classifier_list = [get_tabular_classifier_pt()]
else:
logging.warning("{0} doesn't have an uncliped classifier defined yet".format(framework))
classifier_list = None
if framework == "scikitlearn":
return get_tabular_classifier_scikit_list(clipped=False)
return classifier_list
return _get_tabular_classifier_list
@pytest.fixture
def get_attack_classifier_list(framework):
def _get_attack_classifier_list(one_classifier=False, **kwargs):
if framework == "keras":
logging.warning("{0} doesn't have an image attack defined yet".format(framework))
classifier_list = None
if framework == "tensorflow":
logging.warning("{0} doesn't have an image attack defined yet".format(framework))
classifier_list = None
if framework == "pytorch":
classifier_list = [get_attack_classifier_pt(**kwargs)]
if framework == "scikitlearn":
logging.warning("{0} doesn't have an image attack defined yet".format(framework))
classifier_list = None
if classifier_list is None:
return None
if one_classifier:
return classifier_list[0]
return classifier_list
return _get_attack_classifier_list
@pytest.fixture(scope="function")
def create_test_image(create_test_dir):
test_dir = create_test_dir
# Download one ImageNet pic for tests
url = "http://farm1.static.flickr.com/163/381342603_81db58bea4.jpg"
result = requests.get(url, stream=True)
if result.status_code == 200:
image = result.raw.read()
f = open(os.path.join(test_dir, "test.jpg"), "wb")
f.write(image)
f.close()
yield os.path.join(test_dir, "test.jpg")
@pytest.fixture(scope="session")
def framework(request):
mlFramework = request.config.getoption("--mlFramework")
if mlFramework not in art_supported_frameworks:
raise Exception(
"mlFramework value {0} is unsupported. Please use one of these valid values: {1}".format(
mlFramework, " ".join(art_supported_frameworks)
)
)
# if utils_test.is_valid_framework(mlFramework):
# raise Exception("The mlFramework specified was incorrect. Valid options available
# are {0}".format(art_supported_frameworks))
return mlFramework
@pytest.fixture(scope="session")
def default_batch_size():
yield 16
@pytest.fixture(scope="session")
def is_tf_version_2():
import tensorflow as tf
if tf.__version__[0] == "2":
yield True
else:
yield False
@pytest.fixture(scope="session")
def load_iris_dataset():
logging.info("Loading Iris dataset")
(x_train_iris, y_train_iris), (x_test_iris, y_test_iris), _, _ = load_dataset("iris")
yield (x_train_iris, y_train_iris), (x_test_iris, y_test_iris)
@pytest.fixture(scope="function")
def get_iris_dataset(load_iris_dataset, framework):
(x_train_iris, y_train_iris), (x_test_iris, y_test_iris) = load_iris_dataset
x_train_iris_original = x_train_iris.copy()
y_train_iris_original = y_train_iris.copy()
x_test_iris_original = x_test_iris.copy()
y_test_iris_original = y_test_iris.copy()
yield (x_train_iris, y_train_iris), (x_test_iris, y_test_iris)
np.testing.assert_array_almost_equal(x_train_iris_original, x_train_iris, decimal=3)
np.testing.assert_array_almost_equal(y_train_iris_original, y_train_iris, decimal=3)
np.testing.assert_array_almost_equal(x_test_iris_original, x_test_iris, decimal=3)
np.testing.assert_array_almost_equal(y_test_iris_original, y_test_iris, decimal=3)
@pytest.fixture(scope="session")
def default_dataset_subset_sizes():
n_train = 1000
n_test = 100
yield n_train, n_test
@pytest.fixture()
def mnist_shape(framework):
if framework == "pytorch" or framework == "mxnet":
return (1, 28, 28)
else:
return (28, 28, 1)
@pytest.fixture()
def get_default_mnist_subset(get_mnist_dataset, default_dataset_subset_sizes, mnist_shape):
(x_train_mnist, y_train_mnist), (x_test_mnist, y_test_mnist) = get_mnist_dataset
n_train, n_test = default_dataset_subset_sizes
x_train_mnist = np.reshape(x_train_mnist, (x_train_mnist.shape[0],) + mnist_shape).astype(np.float32)
x_test_mnist = np.reshape(x_test_mnist, (x_test_mnist.shape[0],) + mnist_shape).astype(np.float32)
yield (x_train_mnist[:n_train], y_train_mnist[:n_train]), (x_test_mnist[:n_test], y_test_mnist[:n_test])
@pytest.fixture(scope="session")
def load_mnist_dataset():
logging.info("Loading mnist")
(x_train_mnist, y_train_mnist), (x_test_mnist, y_test_mnist), _, _ = load_dataset("mnist")
yield (x_train_mnist, y_train_mnist), (x_test_mnist, y_test_mnist)
@pytest.fixture(scope="function")
def create_test_dir():
test_dir = tempfile.mkdtemp()
yield test_dir
shutil.rmtree(test_dir)
@pytest.fixture(scope="function")
def get_mnist_dataset(load_mnist_dataset, mnist_shape):
(x_train_mnist, y_train_mnist), (x_test_mnist, y_test_mnist) = load_mnist_dataset
x_train_mnist = np.reshape(x_train_mnist, (x_train_mnist.shape[0],) + mnist_shape).astype(np.float32)
x_test_mnist = np.reshape(x_test_mnist, (x_test_mnist.shape[0],) + mnist_shape).astype(np.float32)
x_train_mnist_original = x_train_mnist.copy()
y_train_mnist_original = y_train_mnist.copy()
x_test_mnist_original = x_test_mnist.copy()
y_test_mnist_original = y_test_mnist.copy()
yield (x_train_mnist, y_train_mnist), (x_test_mnist, y_test_mnist)
# Check that the test data has not been modified, only catches changes in attack.generate if self has been used
np.testing.assert_array_almost_equal(x_train_mnist_original, x_train_mnist, decimal=3)
np.testing.assert_array_almost_equal(y_train_mnist_original, y_train_mnist, decimal=3)
np.testing.assert_array_almost_equal(x_test_mnist_original, x_test_mnist, decimal=3)
np.testing.assert_array_almost_equal(y_test_mnist_original, y_test_mnist, decimal=3)
# ART test fixture to skip test for specific mlFramework values
# eg: @pytest.mark.only_with_platform("tensorflow")
@pytest.fixture(autouse=True)
def only_with_platform(request, framework):
if request.node.get_closest_marker("only_with_platform"):
if framework not in request.node.get_closest_marker("only_with_platform").args:
pytest.skip("skipped on this platform: {}".format(framework))
# ART test fixture to skip test for specific mlFramework values
# eg: @pytest.mark.skipMlFramework("tensorflow","scikitlearn")
@pytest.fixture(autouse=True)
def skip_by_platform(request, framework):
if request.node.get_closest_marker("skipMlFramework"):
if framework in request.node.get_closest_marker("skipMlFramework").args:
pytest.skip("skipped on this platform: {}".format(framework))
@pytest.fixture
def make_customer_record():
def _make_customer_record(name):
return {"name": name, "orders": []}
return _make_customer_record
@pytest.fixture(autouse=True)
def framework_agnostic(request, framework):
if request.node.get_closest_marker("framework_agnostic"):
if framework is not default_framework:
pytest.skip("framework agnostic test skipped for framework : {}".format(framework))
| 37.510959 | 120 | 0.674397 |
ee9c6c16b4900505b6a09c0bf7a2dae5293e7576 | 17,726 | py | Python | third_party/tools/trendx_tool/str_unigram_cc_on/tokenizer.py | ITh4cker/auto_tools | 8e0e00cdf8bf60ee3f26fa5ae8f18c376298d0aa | [
"Apache-2.0"
] | null | null | null | third_party/tools/trendx_tool/str_unigram_cc_on/tokenizer.py | ITh4cker/auto_tools | 8e0e00cdf8bf60ee3f26fa5ae8f18c376298d0aa | [
"Apache-2.0"
] | null | null | null | third_party/tools/trendx_tool/str_unigram_cc_on/tokenizer.py | ITh4cker/auto_tools | 8e0e00cdf8bf60ee3f26fa5ae8f18c376298d0aa | [
"Apache-2.0"
] | 1 | 2021-06-16T07:32:17.000Z | 2021-06-16T07:32:17.000Z | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import print_function
#from __future__ import unicode_literals
import logging
import string
import sys
import time
# Fall back to default JSON lib if third-party libs is not found.
try:
import ujson as json
except ImportError:
try:
import simplejson as json
except ImportError:
import json
logging.basicConfig()
LOGGER = logging.getLogger(__name__)
LOGGER.setLevel(logging.INFO)
"""Default logger"""
# For trie tree
__END__ = '__end__'
# Form schema
DEFAULT_TOKEN = '_'
ERROR_TOKEN = '\x7F'
# Character sets
#
# A word character is a character from a-z, A-Z, 0-9, including the _ (underscore) character.
# !!!: add '$' char into WORD_CHARS for jQuery.
# Refer to: http://www.w3schools.com/js/js_syntax.asp
# Subsequent characters may be letters, digits, underscores, or dollar signs.
WORD_CHARS = set(list(string.ascii_letters) + list(string.digits) + ['_', '$'])
DIGIT_CHARS = set(list(string.digits))
DIGIT_AND_E_CHARS = set(list(string.digits) + ['e', 'E'])
DIGIT_AND_SIGN_CHARS = set(list(string.digits) + ['+', '-'])
DIGIT_AND_SCIENCE_CHARS = set(list(string.digits) + ['e', 'E', '+', '-'])
E_CHARS = {'e', 'E'}
SIGN_CHARS = {'+', '-'}
# https://developer.mozilla.org/zh-TW/docs/Web/JavaScript/Guide/Grammar_and_types
WHITESPACE_CHARS = set(list(string.whitespace)) # {'\t', '\n', '\x0b', '\x0c', '\r', ' '}
NEWLINE_CHARS = {'\n', '\r'}
# Follow JSLint's rule to distinguish between division operator and regex syntax.
# http://stackoverflow.com/questions/4726295/division-regexp-conflict-while-tokenizing-javascript
LIKELY_REGEX_CHARS = set(list('(,=:[!&|?{};'))
LIMIT_NUMBERS = {'Infinity', 'NaN'}
# Types
#
COMMENT_TYPE = 'CommentType'
SKIP_TYPE = 'SkipType'
STRING_TYPE = 'StringType'
REGEX_TYPE = 'RegExType'
NUMBER_TYPE = 'NumberType'
UNKNOWN_TYPE = 'UnknownType'
JS_TYPES = {STRING_TYPE, REGEX_TYPE, NUMBER_TYPE}
STRUCT_TYPES = {REGEX_TYPE, NUMBER_TYPE, UNKNOWN_TYPE}
# For input/output
#
JS_FILE_EXTENSION = '.js'
# @cc_on
IS_CC_ON = True #False
def load_mapper(schema_path):
with open(schema_path, 'r') as f:
try:
tmp_mapper = json.load(f)
except:
LOGGER.error('Read schema file failed. Please make sure the file format is JSON.')
sys.exit(1)
return {key: val for key, val in tmp_mapper.iteritems()}
def make_trie(words):
root = dict()
for word in words:
current_dict = root
for letter in word:
current_dict = current_dict.setdefault(letter, {})
current_dict[__END__] = __END__
return root
def in_trie(trie, word):
current_dict = trie
for letter in word:
if letter in current_dict:
current_dict = current_dict[letter]
else:
return False
else:
if __END__ in current_dict:
return True
else:
return False
def search_trie(trie, text, i):
current_dict = trie
length = len(text)
for j in xrange(i, length):
letter = text[j]
if letter in current_dict:
current_dict = current_dict[letter]
# For partial matching.
elif __END__ in current_dict:
return j
else:
return -1
else:
if __END__ in current_dict:
return j + 1
else:
return -1
def is_escaping(text, i):
return backward_count(text, i, '\\') % 2
def backward_count(text, i, char):
count = 0
i -= 1
while i > 0:
if text[i] != char:
break
count += 1
i -= 1
return count
def get_prev_valid_char(text, i, tokens, chunks):
tmp_chunks = []
length = len(tokens)
end = -1
begin = -1 * (length + 1)
for k in xrange(end, begin, -1):
tmp_chunks.append(chunks[k])
if tokens[k] not in {SKIP_TYPE, COMMENT_TYPE}:
break
i -= len(''.join(tmp_chunks))
# LOGGER.debug('get_prev_valid_char: k: {}, tmp_chunks: {}, num_tmp_chunks: {}, num_tokens: {} num_chunks: {}'.format(
# k, tmp_chunks, len(tmp_chunks), length, len(chunks)))
char = None
while i > 0:
if text[i] not in WHITESPACE_CHARS:
char = text[i]
break
i -= 1
return char
def obfuscate(file_path, mapper, trie):
'''Obfuscate JavaScript file, follows ATSE's rule.
Step 0: Build a Trie Tree for searching and matching.
Step 1: Find word Boundary and do Tokenization
Step 2: Created Custom Type Matcher: Number, String, RegEx and Comment.
Step 3: While doing tokenization, make use of the Trie Tree to solve ambiguous words.
Step 4: Also make use of the Trie Tree to match the remaining words is Number or Unknown type,
like regular expression's \w.
Step 5: Convert the found words to one byte char by a custom defined mapper/schema (from ATSE).
'''
# Process JS file only.
# if file_path.suffix != JS_FILE_EXTENSION:
# return
LOGGER.info('Obfuscating file: {}'.format(file_path))
tokens = []
chunks = [] # Recording splitted words
strings = []
struct_tokens = []
# Read file in binary mode is much faster.
with open(file_path, 'rb') as f:
text = f.read()
# # !!!: Convert to byte array is WAY faster.
# try:
# text = bytearray(text)
# except TypeError:
# pass
length = len(text)
last = 0
i = 0
data_type = None
while i < length:
# LOGGER.debug('i letter: {}'.format(text[i]))
# Primary tokenization
i, data_type = tokenize(text, i, trie, tokens, chunks)
token = None
if data_type:
add_token_by_type(tokens, data_type)
# LOGGER.debug('Found data type token: {}'.format(text[last:i + 1]))
if data_type == STRING_TYPE:
str_ = text[last + 1:i]
strings.append(str_)
else:
# Secondary token matching
token, data_type = match(text, last, i, trie)
if token:
tokens.append(token)
else:
add_token_by_type(tokens, data_type)
chunk = text[last:i + 1]
chunks.append(chunk)
# Handle structure tokens
if token or data_type in STRUCT_TYPES:
# XXX: Follow ATSE's rule, stop encoding while an error occurs.
if mapper.get(token, DEFAULT_TOKEN) == ERROR_TOKEN:
break
struct_tokens.append(chunk)
i += 1
last = i
# LOGGER.debug('Tokens: {}'.format(tokens))
obfuscated = []
for token in tokens:
# Follow ATSE's rule, stop encoding while an error occurs.
# TODO: create obfuscated list simultaneously.
if mapper.get(token, DEFAULT_TOKEN) == ERROR_TOKEN:
break
obfuscated_token = mapper.get(token, DEFAULT_TOKEN)
obfuscated.append(obfuscated_token)
# LOGGER.debug('{} => {}'.format(token, obfuscated_token))
# obfuscated = [mapper.get(token, DEFAULT_TOKEN) for token in tokens
# if mapper.get(token, DEFAULT_TOKEN) != ERROR_TOKEN]
# if LOGGER.isEnabledFor(logging.DEBUG):
# length = len(obfuscated)
# for i in xrange(length):
# LOGGER.debug('{} => {}'.format(tokens[i], obfuscated[i]))
obfuscated_text = ''.join(obfuscated)
# LOGGER.debug('Obfuscated list: {}'.format(obfuscated))
# LOGGER.debug('Obfuscated text: {}'.format(obfuscated_text))
# return obfuscated_text
return ''.join(strings)
# return struct_tokens
def tokenize(text, i, trie, tokens, chunks):
length = len(text)
# Always starts from boundary,
# because we're looking for another boundary to split text.
is_prev_boundary = True
ori_i = i
j = i
num_slashs = 0
is_single_comment = False
is_multi_comments = False
is_string = False
while j < length:
letter = text[j]
LOGGER.debug('j: {} letter: {}'.format(j, letter))
# Comments
# Will eat up everything until the terminator to be matched, including nested comments.
#
# Mixed examples with differnet comment types: 'ooxx"gg"damn' or "ooxx'gg'damn"
first_two = text[j:j + 2]
if first_two == '//':
is_single_comment = True
j += 2
while j < length:
if text[j] in NEWLINE_CHARS:
break
j += 1
i = j
break
elif first_two == '/*':
# XXX: Do tokenization for /*@ @*/ comment if @cc_on is been turned on.
if text[j:j + 8] == '/*@cc_on':
j += 7
i = j
global IS_CC_ON
IS_CC_ON = True
LOGGER.info('@cc_on is on.');
break
is_multi_comments = True
j += 2
while j < length:
if text[j - 1:j + 1] == '*/':
break
j += 1
i = j
break
# Floating point numbers, including scientific notation form.
# Example: Math.random()||.1,
#
# Scientific notation Examples:
# var a = 4.23E-10;
# var a = 4.23e-10;
# var a = 4.23E+10;
# var a = 4.23e+10;
# var a = 4.23E10;
# var a = 4.23e10;
#
# var a = 4.E-10;
# var a = 4.e-10;
# var a = 4.E+10;
# var a = 4.e+10;
# var a = 4.E10;
# var a = 4.e10;
elif text[j] == '.' and (j + 1) < length and text[j + 1] in DIGIT_AND_E_CHARS:
# Example: return.5-Math.cos(a*Math.PI)/2
# Example with 'e' char: var exponent = parsedNumber.e;
if is_prev_boundary or ((j - 1) >= 0 and text[j - 1] in DIGIT_CHARS):
# !!!: Scientific notation must be followed by digits or sign symbols.
if text[j + 1] in E_CHARS and (j + 2) < length and text[j + 2] not in DIGIT_AND_SIGN_CHARS:
pass
else:
j += 1
# XXX: Reset to False, so it'll go to Boundary type II.
is_prev_boundary = False
while j < length:
if text[j] in E_CHARS:
j += 1
if j < length and text[j] in SIGN_CHARS:
j += 1
elif text[j] not in DIGIT_CHARS:
break
j += 1
# Interger numbers in scientific notation
#
# Scientific notation Examples:
# var a = 4E-10;
# var a = 4e-10;
# var a = 4E+10;
# var a = 4e+10;
# var a = 4E10;
# var a = 4e10;
elif text[j] in E_CHARS and (j - 1) >= 0 and text[j - 1] in DIGIT_CHARS:
j += 1
if j < length and text[j] in SIGN_CHARS:
j += 2
# String
#
# With single quote
# XXX: Hardcode check is_prev_boundary, in odrder to fix the minified case: return"function".
elif text[j] == '\'' and is_prev_boundary and not is_escaping(text, j):
is_string = True
j += 1
while j < length:
if text[j] == '\'' and not is_escaping(text, j):
break
j += 1
i = j
break
# With double quotes
elif text[j] == '"' and is_prev_boundary and not is_escaping(text, j):
is_string = True
j += 1
while j < length:
if text[j] == '"' and not is_escaping(text, j):
break
j += 1
i = j
break
# Division operator or regex syntax
#
elif text[j] == '/' and is_prev_boundary and not is_escaping(text, j):
prev_char = get_prev_valid_char(text, j, tokens, chunks)
# LOGGER.debug('{}CHECK REGEX{}: text[j-1]: {}, prev char: {}, In RegEx Chars: {}, is_prev_boundary: {}'.format(
# '-'*10, '-'*10, text[j - 1], prev_char, prev_char not in LIKELY_REGEX_CHARS, is_prev_boundary))
# !!!: Special case: return/[$_a-zA-Z][$_a-zA-Z0-9]*/.test(b)
if ((chunks and chunks[-1] == 'return') or
((j - 1) >= 0 and text[j - 1] not in WORD_CHARS and prev_char in LIKELY_REGEX_CHARS)):
num_slashs += 1
# LOGGER.debug('num_slashs++')
j += 1
# XXX: Reset to False, so it'll go to Boundary type II.
is_prev_boundary = False
while j < length:
if text[j] == '/' and not is_escaping(text, j):
num_slashs += 1
break
j += 1
elif IS_CC_ON and text[j:j + 3] == '@*/':
j += 2
i = j
LOGGER.info('Skip conditional comment end tag, @*/');
break
# RegEx may come with modifiers, like /\w+/ig.
# So that, we should read more chars.
if 0 < num_slashs and num_slashs <= 2:
j += 1
if num_slashs == 2:
num_slashs += 1
continue
# !!!: Check is j bigger than length, since j may have been modified.
# Boundary Type I: boundary is between current char and its next char.
# Example: "ooxox"; <== boundary is bet. "\"" and ";"
if is_prev_boundary and (j >= length or text[j] not in WORD_CHARS):
# Greedy searching trie tree for ambiguous examples, e.g., "=", "==" and "===".
# For example:
# when we meet "=", we should peek the following two chars,
# because the to-be-parsed word may be "==" or "===".
pos = search_trie(trie, text, j)
LOGGER.debug('SEARCH FOUND WITH BOUNDARY: {}, pos: {}, j: {}'.format(text[j], pos, j))
if pos >= 0:
i = pos - 1
else:
i = j
break
# Boundary Type II: boundary is between previous char and current char.
# Example: /[A-Z]/ig; <== boundary is bet. "g" and ";"
elif not is_prev_boundary and (j >= length or text[j] not in WORD_CHARS):
pos = search_trie(trie, text, j - 1)
LOGGER.debug('SEARCH FOUND WITHOUT BOUNDARY: {}, pos: {}, j: {}'.format(text[j - 1], pos, j))
if pos >= 0:
i = pos - 1
else:
i = j - 1
break
# Set up boundary
if j < length and text[j] not in WORD_CHARS:
is_prev_boundary = True
else:
is_prev_boundary = False
j += 1
data_type = None
if is_single_comment or is_multi_comments:
data_type = COMMENT_TYPE
elif is_string:
data_type = STRING_TYPE
elif num_slashs == 3:
data_type = REGEX_TYPE
# Fixed non-js file case, e.g., jsdb2/cc/files/63498de04bd2099e438d1a5dd1b709d84970e07b
if i == ori_i and j == length:
i = length - 1
return (i, data_type)
def add_token_by_type(tokens=None, data_type=None):
if tokens is None:
return
if data_type == COMMENT_TYPE:
tokens.append(data_type)
elif data_type in JS_TYPES:
tokens.append(data_type)
elif data_type == UNKNOWN_TYPE:
tokens.append(DEFAULT_TOKEN)
else:
# Try to record everything.
tokens.append(SKIP_TYPE)
def match(text, last, i, trie):
token = text[last:i + 1]
is_matched = in_trie(trie, token)
# LOGGER.debug('Is Matched: {}: token: {}, last: {}, i: {}'.format(is_matched, token, last, i))
matched_token = None
data_type = None
if is_matched:
# XXX: Follow ATSE's logic to deal with @ sign.
if not IS_CC_ON and token == '@':
matched_token = ERROR_TOKEN
else:
matched_token = token
# FIXME: check whole chars in text[last:i + 1] are belonging to WORD_CHARS.
# Floating example: Math.random()||.1,z=x.length;
elif not is_matched and (text[last] == '.' or text[last] in WORD_CHARS):
# !!!: Don't let Python cast float('Infinity') to inf, or cast float('NaN') to nan.
if token in LIMIT_NUMBERS:
data_type = UNKNOWN_TYPE
elif text[last:last + 2] == '0x':
try:
int(token, 16)
data_type = NUMBER_TYPE
except ValueError:
data_type = UNKNOWN_TYPE
elif text[last] == '0':
try:
int(token, 8)
data_type = NUMBER_TYPE
except ValueError:
try:
float(token)
data_type = NUMBER_TYPE
except ValueError:
data_type = UNKNOWN_TYPE
else:
try:
float(token)
data_type = NUMBER_TYPE
except ValueError:
data_type = UNKNOWN_TYPE
# Debugging
# LOGGER.debug('Found data_type: {}'.format(data_type))
# if matched_token:
# LOGGER.debug('Found token: {}'.format(token))
# elif data_type == NUMBER_TYPE:
# LOGGER.debug('Found number token: {}'.format(token))
# elif data_type == UNKNOWN_TYPE:
# LOGGER.debug('Found unknown token: {}'.format(token))
return (matched_token, data_type)
# vim: set hls is ai et sw=4 sts=4 ts=8 nu ft=python:
| 32.704797 | 123 | 0.542649 |
9a92133b54b41ef6ce7342543bc88b938e290514 | 9,130 | py | Python | 3rdParty/V8/V8-5.0.71.39/tools/swarming_client/tools/swarming_load_test_client.py | mikestaub/arangodb | 1bdf414de29b31bcaf80769a095933f66f8256ce | [
"ICU",
"BSL-1.0",
"Zlib",
"Apache-2.0"
] | 27 | 2016-04-27T01:02:03.000Z | 2021-12-13T08:53:19.000Z | 3rdParty/V8/V8-5.0.71.39/tools/swarming_client/tools/swarming_load_test_client.py | mikestaub/arangodb | 1bdf414de29b31bcaf80769a095933f66f8256ce | [
"ICU",
"BSL-1.0",
"Zlib",
"Apache-2.0"
] | 2 | 2017-03-09T09:00:50.000Z | 2017-09-21T15:48:20.000Z | 3rdParty/V8/V8-5.0.71.39/tools/swarming_client/tools/swarming_load_test_client.py | mikestaub/arangodb | 1bdf414de29b31bcaf80769a095933f66f8256ce | [
"ICU",
"BSL-1.0",
"Zlib",
"Apache-2.0"
] | 17 | 2016-04-27T02:06:39.000Z | 2019-12-18T08:07:00.000Z | #!/usr/bin/env python
# Copyright 2013 The Swarming Authors. All rights reserved.
# Use of this source code is governed under the Apache License, Version 2.0 that
# can be found in the LICENSE file.
"""Triggers a ton of fake jobs to test its handling under high load.
Generates an histogram with the latencies to process the tasks and number of
retries.
"""
import json
import logging
import optparse
import os
import random
import re
import string
import sys
import time
ROOT_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
sys.path.insert(0, ROOT_DIR)
from third_party import colorama
import swarming
from utils import graph
from utils import net
from utils import threading_utils
import swarming_load_test_bot
# Amount of time the timer should be reduced on the Swarming side.
TIMEOUT_OVERHEAD = 10
def print_results(results, columns, buckets):
delays = [i for i in results if isinstance(i, float)]
failures = [i for i in results if not isinstance(i, float)]
graph.print_histogram(
graph.generate_histogram(delays, buckets), columns, '%5.3f')
print('')
print('Total items : %d' % len(results))
average = 0
if delays:
average = sum(delays)/ len(delays)
print('Average delay: %.2fs' % average)
#print('Average overhead: %s' % graph.to_units(total_size / len(sizes)))
print('')
if failures:
print('')
print('%sFAILURES%s:' % (colorama.Fore.RED, colorama.Fore.RESET))
print('\n'.join(' %s' % i for i in failures))
def trigger_task(
swarming_url, dimensions, sleep_time, output_size, progress,
unique, timeout, index):
"""Triggers a Swarming job and collects results.
Returns the total amount of time to run a task remotely, including all the
overhead.
"""
name = 'load-test-%d-%s' % (index, unique)
start = time.time()
logging.info('trigger')
manifest = swarming.Manifest(
isolate_server='http://localhost:1',
namespace='dummy-isolate',
isolated_hash=1,
task_name=name,
extra_args=[],
env={},
dimensions=dimensions,
deadline=int(timeout-TIMEOUT_OVERHEAD),
verbose=False,
profile=False,
priority=100)
cmd = [
'python',
'-c',
'import time; print(\'1\'*%s); time.sleep(%d); print(\'Back\')' %
(output_size, sleep_time)
]
manifest.add_task('echo stuff', cmd)
data = {'request': manifest.to_json()}
response = net.url_open(swarming_url + '/test', data=data)
if not response:
# Failed to trigger. Return a failure.
return 'failed_trigger'
result = json.load(response)
# Old API uses harcoded config name. New API doesn't have concept of config
# name so it uses the task name. Ignore this detail.
test_keys = []
for key in result['test_keys']:
key.pop('config_name')
test_keys.append(key.pop('test_key'))
assert re.match('[0-9a-f]+', test_keys[-1]), test_keys
expected = {
u'priority': 100,
u'test_case_name': unicode(name),
u'test_keys': [
{
u'num_instances': 1,
u'instance_index': 0,
}
],
}
assert result == expected, '\n%s\n%s' % (result, expected)
progress.update_item('%5d' % index, processing=1)
try:
logging.info('collect')
new_test_keys = swarming.get_task_keys(swarming_url, name)
if not new_test_keys:
return 'no_test_keys'
assert test_keys == new_test_keys, (test_keys, new_test_keys)
out = [
output
for _index, output in swarming.yield_results(
swarming_url, test_keys, timeout, None, False, None)
]
if not out:
return 'no_result'
for item in out:
item.pop('machine_tag')
item.pop('machine_id')
# TODO(maruel): Assert output even when run on a real bot.
_out_actual = item.pop('output')
# assert out_actual == swarming_load_test_bot.TASK_OUTPUT, out_actual
expected = [
{
u'config_instance_index': 0,
u'exit_codes': u'0',
u'num_config_instances': 1,
}
]
assert out == expected, '\n%s\n%s' % (out, expected)
return time.time() - start
finally:
progress.update_item('%5d - done' % index, processing=-1, processed=1)
def main():
colorama.init()
parser = optparse.OptionParser(description=sys.modules[__name__].__doc__)
parser.add_option(
'-S', '--swarming',
metavar='URL', default='',
help='Swarming server to use')
swarming.add_filter_options(parser)
parser.set_defaults(dimensions=[('os', swarming_load_test_bot.OS_NAME)])
group = optparse.OptionGroup(parser, 'Load generated')
group.add_option(
'-s', '--send-rate', type='float', default=16., metavar='RATE',
help='Rate (item/s) of sending requests as a float, default: %default')
group.add_option(
'-D', '--duration', type='float', default=60., metavar='N',
help='Duration (s) of the sending phase of the load test, '
'default: %default')
group.add_option(
'-m', '--concurrent', type='int', default=200, metavar='N',
help='Maximum concurrent on-going requests, default: %default')
group.add_option(
'-t', '--timeout', type='float', default=15*60., metavar='N',
help='Task expiration and timeout to get results, the task itself will '
'have %ds less than the value provided. Default: %%default' %
TIMEOUT_OVERHEAD)
group.add_option(
'-o', '--output-size', type='int', default=100, metavar='N',
help='Bytes sent to stdout, default: %default')
group.add_option(
'--sleep', type='int', default=60, metavar='N',
help='Amount of time the bot should sleep, e.g. faking work, '
'default: %default')
parser.add_option_group(group)
group = optparse.OptionGroup(parser, 'Display options')
group.add_option(
'--columns', type='int', default=graph.get_console_width(), metavar='N',
help='For histogram display, default:%default')
group.add_option(
'--buckets', type='int', default=20, metavar='N',
help='Number of buckets for histogram display, default:%default')
parser.add_option_group(group)
parser.add_option(
'--dump', metavar='FOO.JSON', help='Dumps to json file')
parser.add_option(
'-v', '--verbose', action='store_true', help='Enables logging')
options, args = parser.parse_args()
logging.basicConfig(level=logging.INFO if options.verbose else logging.FATAL)
if args:
parser.error('Unsupported args: %s' % args)
options.swarming = options.swarming.rstrip('/')
if not options.swarming:
parser.error('--swarming is required.')
if options.duration <= 0:
parser.error('Needs --duration > 0. 0.01 is a valid value.')
swarming.process_filter_options(parser, options)
total = int(round(options.send_rate * options.duration))
print(
'Sending %.1f i/s for %ds with max %d parallel requests; timeout %.1fs; '
'total %d' %
(options.send_rate, options.duration, options.concurrent,
options.timeout, total))
print('[processing/processed/todo]')
# This is used so there's no clash between runs and actual real usage.
unique = ''.join(random.choice(string.ascii_letters) for _ in range(8))
columns = [('processing', 0), ('processed', 0), ('todo', 0)]
progress = threading_utils.Progress(columns)
index = 0
results = []
with threading_utils.ThreadPoolWithProgress(
progress, 1, options.concurrent, 0) as pool:
try:
start = time.time()
while True:
duration = time.time() - start
if duration > options.duration:
break
should_have_triggered_so_far = int(round(duration * options.send_rate))
while index < should_have_triggered_so_far:
pool.add_task(
0,
trigger_task,
options.swarming,
options.dimensions,
options.sleep,
options.output_size,
progress,
unique,
options.timeout,
index)
progress.update_item('', todo=1)
index += 1
progress.print_update()
time.sleep(0.01)
progress.update_item('Getting results for on-going tasks.', raw=True)
for i in pool.iter_results():
results.append(i)
# This is a bit excessive but it's useful in the case where some tasks
# hangs, so at least partial data is available.
if options.dump:
results.sort()
if os.path.exists(options.dump):
os.rename(options.dump, options.dump + '.old')
with open(options.dump, 'wb') as f:
json.dump(results, f, separators=(',',':'))
if not options.dump:
results.sort()
except KeyboardInterrupt:
aborted = pool.abort()
progress.update_item(
'Got Ctrl-C. Aborted %d unsent tasks.' % aborted,
raw=True,
todo=-aborted)
progress.print_update()
progress.print_update()
# At this point, progress is not used anymore.
print('')
print(' - Took %.1fs.' % (time.time() - start))
print('')
print_results(results, options.columns, options.buckets)
return 0
if __name__ == '__main__':
sys.exit(main())
| 32.491103 | 80 | 0.647645 |
eb23d7a17ff5950ca374f6cb8a76b6940898330d | 5,140 | py | Python | kubernetes/client/models/v1_env_from_source.py | lp67/python | 33c5ea9835356410ce4a9fa54a02c6a2a22143c6 | [
"Apache-2.0"
] | null | null | null | kubernetes/client/models/v1_env_from_source.py | lp67/python | 33c5ea9835356410ce4a9fa54a02c6a2a22143c6 | [
"Apache-2.0"
] | 4 | 2019-11-19T10:33:47.000Z | 2022-03-01T03:33:52.000Z | kubernetes/client/models/v1_env_from_source.py | mohramadan911/PythonClientAPI | 5d111812c81b7a573ac8661d1aec60bb97072412 | [
"Apache-2.0"
] | 2 | 2021-08-10T16:35:31.000Z | 2021-09-14T04:53:06.000Z | # coding: utf-8
"""
Kubernetes
No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) # noqa: E501
The version of the OpenAPI document: release-1.20
Generated by: https://openapi-generator.tech
"""
import pprint
import re # noqa: F401
import six
from kubernetes.client.configuration import Configuration
class V1EnvFromSource(object):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
openapi_types = {
'config_map_ref': 'V1ConfigMapEnvSource',
'prefix': 'str',
'secret_ref': 'V1SecretEnvSource'
}
attribute_map = {
'config_map_ref': 'configMapRef',
'prefix': 'prefix',
'secret_ref': 'secretRef'
}
def __init__(self, config_map_ref=None, prefix=None, secret_ref=None, local_vars_configuration=None): # noqa: E501
"""V1EnvFromSource - a model defined in OpenAPI""" # noqa: E501
if local_vars_configuration is None:
local_vars_configuration = Configuration()
self.local_vars_configuration = local_vars_configuration
self._config_map_ref = None
self._prefix = None
self._secret_ref = None
self.discriminator = None
if config_map_ref is not None:
self.config_map_ref = config_map_ref
if prefix is not None:
self.prefix = prefix
if secret_ref is not None:
self.secret_ref = secret_ref
@property
def config_map_ref(self):
"""Gets the config_map_ref of this V1EnvFromSource. # noqa: E501
:return: The config_map_ref of this V1EnvFromSource. # noqa: E501
:rtype: V1ConfigMapEnvSource
"""
return self._config_map_ref
@config_map_ref.setter
def config_map_ref(self, config_map_ref):
"""Sets the config_map_ref of this V1EnvFromSource.
:param config_map_ref: The config_map_ref of this V1EnvFromSource. # noqa: E501
:type: V1ConfigMapEnvSource
"""
self._config_map_ref = config_map_ref
@property
def prefix(self):
"""Gets the prefix of this V1EnvFromSource. # noqa: E501
An optional identifier to prepend to each key in the ConfigMap. Must be a C_IDENTIFIER. # noqa: E501
:return: The prefix of this V1EnvFromSource. # noqa: E501
:rtype: str
"""
return self._prefix
@prefix.setter
def prefix(self, prefix):
"""Sets the prefix of this V1EnvFromSource.
An optional identifier to prepend to each key in the ConfigMap. Must be a C_IDENTIFIER. # noqa: E501
:param prefix: The prefix of this V1EnvFromSource. # noqa: E501
:type: str
"""
self._prefix = prefix
@property
def secret_ref(self):
"""Gets the secret_ref of this V1EnvFromSource. # noqa: E501
:return: The secret_ref of this V1EnvFromSource. # noqa: E501
:rtype: V1SecretEnvSource
"""
return self._secret_ref
@secret_ref.setter
def secret_ref(self, secret_ref):
"""Sets the secret_ref of this V1EnvFromSource.
:param secret_ref: The secret_ref of this V1EnvFromSource. # noqa: E501
:type: V1SecretEnvSource
"""
self._secret_ref = secret_ref
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, V1EnvFromSource):
return False
return self.to_dict() == other.to_dict()
def __ne__(self, other):
"""Returns true if both objects are not equal"""
if not isinstance(other, V1EnvFromSource):
return True
return self.to_dict() != other.to_dict()
| 29.371429 | 124 | 0.604475 |
8e3c61d098dcb79591172d5d798be6c7ba001049 | 28,650 | py | Python | tb_rest_client/api/api_pe/role_controller_api.py | samson0v/python_tb_rest_client | 08ff7898740f7cec2170e85d5c3c89e222e967f7 | [
"Apache-2.0"
] | 30 | 2020-06-19T06:42:50.000Z | 2021-08-23T21:16:36.000Z | tb_rest_client/api/api_pe/role_controller_api.py | samson0v/python_tb_rest_client | 08ff7898740f7cec2170e85d5c3c89e222e967f7 | [
"Apache-2.0"
] | 25 | 2021-08-30T01:17:27.000Z | 2022-03-16T14:10:14.000Z | tb_rest_client/api/api_pe/role_controller_api.py | samson0v/python_tb_rest_client | 08ff7898740f7cec2170e85d5c3c89e222e967f7 | [
"Apache-2.0"
] | 23 | 2020-07-06T13:41:54.000Z | 2021-08-23T21:04:50.000Z | # coding: utf-8
"""
ThingsBoard REST API
ThingsBoard Professional Edition IoT platform REST API documentation. # noqa: E501
OpenAPI spec version: 3.3.3PAAS-RC1
Contact: info@thingsboard.io
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from tb_rest_client.api_client import ApiClient
class RoleControllerApi(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def delete_role_using_delete(self, role_id, **kwargs): # noqa: E501
"""Delete role (deleteRole) # noqa: E501
Deletes the role. Referencing non-existing role Id will cause an error. Security check is performed to verify that the user has 'DELETE' permission for the entity (entities). # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_role_using_delete(role_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str role_id: A string value representing the role id. For example, '784f394c-42b6-435a-983c-b7beff2784f9' (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.delete_role_using_delete_with_http_info(role_id, **kwargs) # noqa: E501
else:
(data) = self.delete_role_using_delete_with_http_info(role_id, **kwargs) # noqa: E501
return data
def delete_role_using_delete_with_http_info(self, role_id, **kwargs): # noqa: E501
"""Delete role (deleteRole) # noqa: E501
Deletes the role. Referencing non-existing role Id will cause an error. Security check is performed to verify that the user has 'DELETE' permission for the entity (entities). # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_role_using_delete_with_http_info(role_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str role_id: A string value representing the role id. For example, '784f394c-42b6-435a-983c-b7beff2784f9' (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['role_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_role_using_delete" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'role_id' is set
if ('role_id' not in params or
params['role_id'] is None):
raise ValueError("Missing the required parameter `role_id` when calling `delete_role_using_delete`") # noqa: E501
collection_formats = {}
path_params = {}
if 'role_id' in params:
path_params['roleId'] = params['role_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['X-Authorization'] # noqa: E501
return self.api_client.call_api(
'/api/role/{roleId}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_role_by_id_using_get(self, role_id, **kwargs): # noqa: E501
"""Get Role by Id (getRoleById) # noqa: E501
Fetch the Role object based on the provided Role Id. Role Contains a set of permissions. Role has two types. Generic Role may be assigned to the user group and will provide permissions for all entities of a certain type. Group Role may be assigned to both user and entity group and will provides permissions only for the entities that belong to specified entity group. The assignment of the Role to the User Group is done using [Group Permission Controller](/swagger-ui.html#/group-permission-controller). Security check is performed to verify that the user has 'READ' permission for the entity (entities). # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_role_by_id_using_get(role_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str role_id: A string value representing the role id. For example, '784f394c-42b6-435a-983c-b7beff2784f9' (required)
:return: Role
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_role_by_id_using_get_with_http_info(role_id, **kwargs) # noqa: E501
else:
(data) = self.get_role_by_id_using_get_with_http_info(role_id, **kwargs) # noqa: E501
return data
def get_role_by_id_using_get_with_http_info(self, role_id, **kwargs): # noqa: E501
"""Get Role by Id (getRoleById) # noqa: E501
Fetch the Role object based on the provided Role Id. Role Contains a set of permissions. Role has two types. Generic Role may be assigned to the user group and will provide permissions for all entities of a certain type. Group Role may be assigned to both user and entity group and will provides permissions only for the entities that belong to specified entity group. The assignment of the Role to the User Group is done using [Group Permission Controller](/swagger-ui.html#/group-permission-controller). Security check is performed to verify that the user has 'READ' permission for the entity (entities). # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_role_by_id_using_get_with_http_info(role_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str role_id: A string value representing the role id. For example, '784f394c-42b6-435a-983c-b7beff2784f9' (required)
:return: Role
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['role_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_role_by_id_using_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'role_id' is set
if ('role_id' not in params or
params['role_id'] is None):
raise ValueError("Missing the required parameter `role_id` when calling `get_role_by_id_using_get`") # noqa: E501
collection_formats = {}
path_params = {}
if 'role_id' in params:
path_params['roleId'] = params['role_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['X-Authorization'] # noqa: E501
return self.api_client.call_api(
'/api/role/{roleId}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Role', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_roles_by_ids_using_get(self, role_ids, **kwargs): # noqa: E501
"""Get Roles By Ids (getRolesByIds) # noqa: E501
Returns the list of rows based on their ids. Security check is performed to verify that the user has 'READ' permission for the entity (entities). # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_roles_by_ids_using_get(role_ids, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str role_ids: A list of role ids, separated by comma ',' (required)
:return: list[Role]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_roles_by_ids_using_get_with_http_info(role_ids, **kwargs) # noqa: E501
else:
(data) = self.get_roles_by_ids_using_get_with_http_info(role_ids, **kwargs) # noqa: E501
return data
def get_roles_by_ids_using_get_with_http_info(self, role_ids, **kwargs): # noqa: E501
"""Get Roles By Ids (getRolesByIds) # noqa: E501
Returns the list of rows based on their ids. Security check is performed to verify that the user has 'READ' permission for the entity (entities). # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_roles_by_ids_using_get_with_http_info(role_ids, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str role_ids: A list of role ids, separated by comma ',' (required)
:return: list[Role]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['role_ids'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_roles_by_ids_using_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'role_ids' is set
if ('role_ids' not in params or
params['role_ids'] is None):
raise ValueError("Missing the required parameter `role_ids` when calling `get_roles_by_ids_using_get`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
if 'role_ids' in params:
query_params.append(('roleIds', params['role_ids'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['X-Authorization'] # noqa: E501
return self.api_client.call_api(
'/api/roles{?roleIds}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[Role]', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_roles_using_get(self, page_size, page, **kwargs): # noqa: E501
"""Get Roles (getRoles) # noqa: E501
Returns a page of roles that are available for the current user. Role Contains a set of permissions. Role has two types. Generic Role may be assigned to the user group and will provide permissions for all entities of a certain type. Group Role may be assigned to both user and entity group and will provides permissions only for the entities that belong to specified entity group. The assignment of the Role to the User Group is done using [Group Permission Controller](/swagger-ui.html#/group-permission-controller).You can specify parameters to filter the results. The result is wrapped with PageData object that allows you to iterate over result set using pagination. See the 'Model' tab of the Response Class for more details. Available for users with 'TENANT_ADMIN' or 'CUSTOMER_USER' authority. Security check is performed to verify that the user has 'READ' permission for the entity (entities). # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_roles_using_get(page_size, page, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int page_size: Maximum amount of entities in a one page (required)
:param int page: Sequence number of page starting from 0 (required)
:param str type: Type of the role
:param str text_search: The case insensitive 'startsWith' filter based on the role name.
:param str sort_property: Property of entity to sort by
:param str sort_order: Sort order. ASC (ASCENDING) or DESC (DESCENDING)
:return: PageDataRole
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_roles_using_get_with_http_info(page_size, page, **kwargs) # noqa: E501
else:
(data) = self.get_roles_using_get_with_http_info(page_size, page, **kwargs) # noqa: E501
return data
def get_roles_using_get_with_http_info(self, page_size, page, **kwargs): # noqa: E501
"""Get Roles (getRoles) # noqa: E501
Returns a page of roles that are available for the current user. Role Contains a set of permissions. Role has two types. Generic Role may be assigned to the user group and will provide permissions for all entities of a certain type. Group Role may be assigned to both user and entity group and will provides permissions only for the entities that belong to specified entity group. The assignment of the Role to the User Group is done using [Group Permission Controller](/swagger-ui.html#/group-permission-controller).You can specify parameters to filter the results. The result is wrapped with PageData object that allows you to iterate over result set using pagination. See the 'Model' tab of the Response Class for more details. Available for users with 'TENANT_ADMIN' or 'CUSTOMER_USER' authority. Security check is performed to verify that the user has 'READ' permission for the entity (entities). # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_roles_using_get_with_http_info(page_size, page, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int page_size: Maximum amount of entities in a one page (required)
:param int page: Sequence number of page starting from 0 (required)
:param str type: Type of the role
:param str text_search: The case insensitive 'startsWith' filter based on the role name.
:param str sort_property: Property of entity to sort by
:param str sort_order: Sort order. ASC (ASCENDING) or DESC (DESCENDING)
:return: PageDataRole
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['page_size', 'page', 'type', 'text_search', 'sort_property', 'sort_order'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_roles_using_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'page_size' is set
if ('page_size' not in params or
params['page_size'] is None):
raise ValueError("Missing the required parameter `page_size` when calling `get_roles_using_get`") # noqa: E501
# verify the required parameter 'page' is set
if ('page' not in params or
params['page'] is None):
raise ValueError("Missing the required parameter `page` when calling `get_roles_using_get`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
if 'page_size' in params:
query_params.append(('pageSize', params['page_size'])) # noqa: E501
if 'page' in params:
query_params.append(('page', params['page'])) # noqa: E501
if 'type' in params:
query_params.append(('type', params['type'])) # noqa: E501
if 'text_search' in params:
query_params.append(('textSearch', params['text_search'])) # noqa: E501
if 'sort_property' in params:
query_params.append(('sortProperty', params['sort_property'])) # noqa: E501
if 'sort_order' in params:
query_params.append(('sortOrder', params['sort_order'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['X-Authorization'] # noqa: E501
return self.api_client.call_api(
'/api/roles{?page,pageSize,sortOrder,sortProperty,textSearch,type}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='PageDataRole', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def save_role_using_post(self, **kwargs): # noqa: E501
"""Create Or Update Role (saveRole) # noqa: E501
Creates or Updates the Role. When creating Role, platform generates Role Id as [time-based UUID](https://en.wikipedia.org/wiki/Universally_unique_identifier#Version_1_(date-time_and_MAC_address)). The newly created Role id will be present in the response. Specify existing Role id to update the permission. Referencing non-existing Group Permission Id will cause 'Not Found' error. Role Contains a set of permissions. Role has two types. Generic Role may be assigned to the user group and will provide permissions for all entities of a certain type. Group Role may be assigned to both user and entity group and will provides permissions only for the entities that belong to specified entity group. The assignment of the Role to the User Group is done using [Group Permission Controller](/swagger-ui.html#/group-permission-controller). Example of Generic Role with read-only permissions for any resource and all permissions for the 'DEVICE' and 'PROFILE' resources is listed below: ```json { \"name\": \"Read-Only User\", \"type\": \"GENERIC\", \"permissions\": { \"ALL\": [ \"READ\", \"RPC_CALL\", \"READ_CREDENTIALS\", \"READ_ATTRIBUTES\", \"READ_TELEMETRY\" ], \"DEVICE\": [ \"ALL\" ] \"PROFILE\": [ \"ALL\" ] }, \"additionalInfo\": { \"description\": \"Read-only permissions for everything, Write permissions for devices and own profile.\" } } ``` Example of Group Role with read-only permissions. Note that the group role has no association with the resources. The type of the resource is taken from the entity group that this role is assigned to: ```json { \"name\": \"Entity Group Read-only User\", \"type\": \"GROUP\", \"permissions\": [ \"READ\", \"RPC_CALL\", \"READ_CREDENTIALS\", \"READ_ATTRIBUTES\", \"READ_TELEMETRY\" ], \"additionalInfo\": { \"description\": \"Read-only permissions.\" } } ``` Security check is performed to verify that the user has 'WRITE' permission for the entity (entities). # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.save_role_using_post(async_req=True)
>>> result = thread.get()
:param async_req bool
:param Role body:
:return: Role
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.save_role_using_post_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.save_role_using_post_with_http_info(**kwargs) # noqa: E501
return data
def save_role_using_post_with_http_info(self, **kwargs): # noqa: E501
"""Create Or Update Role (saveRole) # noqa: E501
Creates or Updates the Role. When creating Role, platform generates Role Id as [time-based UUID](https://en.wikipedia.org/wiki/Universally_unique_identifier#Version_1_(date-time_and_MAC_address)). The newly created Role id will be present in the response. Specify existing Role id to update the permission. Referencing non-existing Group Permission Id will cause 'Not Found' error. Role Contains a set of permissions. Role has two types. Generic Role may be assigned to the user group and will provide permissions for all entities of a certain type. Group Role may be assigned to both user and entity group and will provides permissions only for the entities that belong to specified entity group. The assignment of the Role to the User Group is done using [Group Permission Controller](/swagger-ui.html#/group-permission-controller). Example of Generic Role with read-only permissions for any resource and all permissions for the 'DEVICE' and 'PROFILE' resources is listed below: ```json { \"name\": \"Read-Only User\", \"type\": \"GENERIC\", \"permissions\": { \"ALL\": [ \"READ\", \"RPC_CALL\", \"READ_CREDENTIALS\", \"READ_ATTRIBUTES\", \"READ_TELEMETRY\" ], \"DEVICE\": [ \"ALL\" ] \"PROFILE\": [ \"ALL\" ] }, \"additionalInfo\": { \"description\": \"Read-only permissions for everything, Write permissions for devices and own profile.\" } } ``` Example of Group Role with read-only permissions. Note that the group role has no association with the resources. The type of the resource is taken from the entity group that this role is assigned to: ```json { \"name\": \"Entity Group Read-only User\", \"type\": \"GROUP\", \"permissions\": [ \"READ\", \"RPC_CALL\", \"READ_CREDENTIALS\", \"READ_ATTRIBUTES\", \"READ_TELEMETRY\" ], \"additionalInfo\": { \"description\": \"Read-only permissions.\" } } ``` Security check is performed to verify that the user has 'WRITE' permission for the entity (entities). # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.save_role_using_post_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param Role body:
:return: Role
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method save_role_using_post" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['X-Authorization'] # noqa: E501
return self.api_client.call_api(
'/api/role', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Role', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| 53.752345 | 2,058 | 0.646806 |
0e2d7ff42d8230ea0b7194a7ae7e69884eeeca2c | 1,402 | py | Python | example_random_art.py | maxispeicher/docker_simulation_pipeline_example | 4537b036297a8fb591b7ae5656b100da541baace | [
"MIT"
] | 6 | 2020-12-08T10:42:29.000Z | 2022-03-25T19:34:37.000Z | example_random_art.py | maxispeicher/docker_simulation_pipeline_example | 4537b036297a8fb591b7ae5656b100da541baace | [
"MIT"
] | 1 | 2020-11-26T08:52:40.000Z | 2020-11-26T08:52:40.000Z | example_random_art.py | maxispeicher/docker_simulation_pipeline_example | 4537b036297a8fb591b7ae5656b100da541baace | [
"MIT"
] | 2 | 2020-11-25T14:30:24.000Z | 2020-11-26T09:39:32.000Z | #!/usr/bin/env python
"""Example code to demonstrate DockerSimManager
This example generates random art in different scales and sizes
"""
from docker_sim_manager import DockerSimManager
from pathlib import Path
from docker_sim_manager import SimJob
__author__ = "Michael Wittmann"
__copyright__ = "Copyright 2020, Michael Wittmann"
__license__ = "MIT"
__version__ = "1.0.0"
__maintainer__ = "Michael Wittmann"
__email__ = "michael.wittmann@tum.de"
__status__ = "Example"
if __name__ == '__main__':
# Choose output directory on your host's file system
output_folder = Path.home().joinpath('example_docker_simulation').joinpath('random_art')
# Generate DockerSimManager object. Specify simulation container, number of parallel containers and output_path
docker_manager = DockerSimManager('ghcr.io/michaelwittmann/docker_simulation_pipeline_example/random-art-image',
10,
output_folder)
# Add 20 simulation jobs to the job queue
for i in range(1,10):
docker_manager.add_sim_job(SimJob(f'randomArt_size{i}x{i}',None, command=f'-g {i} -i 30 -s 5000 -o /mnt/data -n 50'))
for i in range(1,10):
docker_manager.add_sim_job(SimJob(f'random_Art_invaders_{i}',None, command=f'-g 10 -i {i} -s 5000 -o /mnt/data -n 50'))
# Start computation
docker_manager.start_computation() | 35.948718 | 127 | 0.708987 |
b2a17853f79264fdea48f2c004ee05c5bebc8abb | 48 | py | Python | cfdata/tabular/processors/min_max/__init__.py | carefree0910/carefree-data | ae0f4ea5724b4efd5d76f2a9d420acf3322c1d19 | [
"MIT"
] | 9 | 2020-10-25T11:52:34.000Z | 2022-01-23T02:45:41.000Z | cfdata/tabular/processors/min_max/__init__.py | carefree0910/carefree-data | ae0f4ea5724b4efd5d76f2a9d420acf3322c1d19 | [
"MIT"
] | 2 | 2020-08-02T01:58:48.000Z | 2021-02-26T11:24:19.000Z | cfdata/tabular/processors/min_max/__init__.py | carefree0910/carefree-data | ae0f4ea5724b4efd5d76f2a9d420acf3322c1d19 | [
"MIT"
] | 1 | 2021-11-04T14:34:13.000Z | 2021-11-04T14:34:13.000Z | from .core import MinMax
__all__ = ["MinMax"]
| 9.6 | 24 | 0.6875 |
0a21ed321e23f78e7323df868d974e6f85f082e8 | 13,714 | py | Python | sdk/python/pulumi_azure_native/web/v20160301/get_certificate.py | sebtelko/pulumi-azure-native | 711ec021b5c73da05611c56c8a35adb0ce3244e4 | [
"Apache-2.0"
] | null | null | null | sdk/python/pulumi_azure_native/web/v20160301/get_certificate.py | sebtelko/pulumi-azure-native | 711ec021b5c73da05611c56c8a35adb0ce3244e4 | [
"Apache-2.0"
] | null | null | null | sdk/python/pulumi_azure_native/web/v20160301/get_certificate.py | sebtelko/pulumi-azure-native | 711ec021b5c73da05611c56c8a35adb0ce3244e4 | [
"Apache-2.0"
] | null | null | null | # coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from ... import _utilities
from . import outputs
__all__ = [
'GetCertificateResult',
'AwaitableGetCertificateResult',
'get_certificate',
]
@pulumi.output_type
class GetCertificateResult:
"""
SSL certificate for an app.
"""
def __init__(__self__, cer_blob=None, expiration_date=None, friendly_name=None, geo_region=None, host_names=None, hosting_environment_profile=None, id=None, issue_date=None, issuer=None, key_vault_id=None, key_vault_secret_name=None, key_vault_secret_status=None, kind=None, location=None, name=None, pfx_blob=None, public_key_hash=None, self_link=None, server_farm_id=None, site_name=None, subject_name=None, tags=None, thumbprint=None, type=None, valid=None):
if cer_blob and not isinstance(cer_blob, str):
raise TypeError("Expected argument 'cer_blob' to be a str")
pulumi.set(__self__, "cer_blob", cer_blob)
if expiration_date and not isinstance(expiration_date, str):
raise TypeError("Expected argument 'expiration_date' to be a str")
pulumi.set(__self__, "expiration_date", expiration_date)
if friendly_name and not isinstance(friendly_name, str):
raise TypeError("Expected argument 'friendly_name' to be a str")
pulumi.set(__self__, "friendly_name", friendly_name)
if geo_region and not isinstance(geo_region, str):
raise TypeError("Expected argument 'geo_region' to be a str")
pulumi.set(__self__, "geo_region", geo_region)
if host_names and not isinstance(host_names, list):
raise TypeError("Expected argument 'host_names' to be a list")
pulumi.set(__self__, "host_names", host_names)
if hosting_environment_profile and not isinstance(hosting_environment_profile, dict):
raise TypeError("Expected argument 'hosting_environment_profile' to be a dict")
pulumi.set(__self__, "hosting_environment_profile", hosting_environment_profile)
if id and not isinstance(id, str):
raise TypeError("Expected argument 'id' to be a str")
pulumi.set(__self__, "id", id)
if issue_date and not isinstance(issue_date, str):
raise TypeError("Expected argument 'issue_date' to be a str")
pulumi.set(__self__, "issue_date", issue_date)
if issuer and not isinstance(issuer, str):
raise TypeError("Expected argument 'issuer' to be a str")
pulumi.set(__self__, "issuer", issuer)
if key_vault_id and not isinstance(key_vault_id, str):
raise TypeError("Expected argument 'key_vault_id' to be a str")
pulumi.set(__self__, "key_vault_id", key_vault_id)
if key_vault_secret_name and not isinstance(key_vault_secret_name, str):
raise TypeError("Expected argument 'key_vault_secret_name' to be a str")
pulumi.set(__self__, "key_vault_secret_name", key_vault_secret_name)
if key_vault_secret_status and not isinstance(key_vault_secret_status, str):
raise TypeError("Expected argument 'key_vault_secret_status' to be a str")
pulumi.set(__self__, "key_vault_secret_status", key_vault_secret_status)
if kind and not isinstance(kind, str):
raise TypeError("Expected argument 'kind' to be a str")
pulumi.set(__self__, "kind", kind)
if location and not isinstance(location, str):
raise TypeError("Expected argument 'location' to be a str")
pulumi.set(__self__, "location", location)
if name and not isinstance(name, str):
raise TypeError("Expected argument 'name' to be a str")
pulumi.set(__self__, "name", name)
if pfx_blob and not isinstance(pfx_blob, str):
raise TypeError("Expected argument 'pfx_blob' to be a str")
pulumi.set(__self__, "pfx_blob", pfx_blob)
if public_key_hash and not isinstance(public_key_hash, str):
raise TypeError("Expected argument 'public_key_hash' to be a str")
pulumi.set(__self__, "public_key_hash", public_key_hash)
if self_link and not isinstance(self_link, str):
raise TypeError("Expected argument 'self_link' to be a str")
pulumi.set(__self__, "self_link", self_link)
if server_farm_id and not isinstance(server_farm_id, str):
raise TypeError("Expected argument 'server_farm_id' to be a str")
pulumi.set(__self__, "server_farm_id", server_farm_id)
if site_name and not isinstance(site_name, str):
raise TypeError("Expected argument 'site_name' to be a str")
pulumi.set(__self__, "site_name", site_name)
if subject_name and not isinstance(subject_name, str):
raise TypeError("Expected argument 'subject_name' to be a str")
pulumi.set(__self__, "subject_name", subject_name)
if tags and not isinstance(tags, dict):
raise TypeError("Expected argument 'tags' to be a dict")
pulumi.set(__self__, "tags", tags)
if thumbprint and not isinstance(thumbprint, str):
raise TypeError("Expected argument 'thumbprint' to be a str")
pulumi.set(__self__, "thumbprint", thumbprint)
if type and not isinstance(type, str):
raise TypeError("Expected argument 'type' to be a str")
pulumi.set(__self__, "type", type)
if valid and not isinstance(valid, bool):
raise TypeError("Expected argument 'valid' to be a bool")
pulumi.set(__self__, "valid", valid)
@property
@pulumi.getter(name="cerBlob")
def cer_blob(self) -> str:
"""
Raw bytes of .cer file
"""
return pulumi.get(self, "cer_blob")
@property
@pulumi.getter(name="expirationDate")
def expiration_date(self) -> str:
"""
Certificate expiration date.
"""
return pulumi.get(self, "expiration_date")
@property
@pulumi.getter(name="friendlyName")
def friendly_name(self) -> str:
"""
Friendly name of the certificate.
"""
return pulumi.get(self, "friendly_name")
@property
@pulumi.getter(name="geoRegion")
def geo_region(self) -> str:
"""
Region of the certificate.
"""
return pulumi.get(self, "geo_region")
@property
@pulumi.getter(name="hostNames")
def host_names(self) -> Optional[Sequence[str]]:
"""
Host names the certificate applies to.
"""
return pulumi.get(self, "host_names")
@property
@pulumi.getter(name="hostingEnvironmentProfile")
def hosting_environment_profile(self) -> 'outputs.HostingEnvironmentProfileResponse':
"""
Specification for the App Service Environment to use for the certificate.
"""
return pulumi.get(self, "hosting_environment_profile")
@property
@pulumi.getter
def id(self) -> str:
"""
Resource Id.
"""
return pulumi.get(self, "id")
@property
@pulumi.getter(name="issueDate")
def issue_date(self) -> str:
"""
Certificate issue Date.
"""
return pulumi.get(self, "issue_date")
@property
@pulumi.getter
def issuer(self) -> str:
"""
Certificate issuer.
"""
return pulumi.get(self, "issuer")
@property
@pulumi.getter(name="keyVaultId")
def key_vault_id(self) -> Optional[str]:
"""
Key Vault Csm resource Id.
"""
return pulumi.get(self, "key_vault_id")
@property
@pulumi.getter(name="keyVaultSecretName")
def key_vault_secret_name(self) -> Optional[str]:
"""
Key Vault secret name.
"""
return pulumi.get(self, "key_vault_secret_name")
@property
@pulumi.getter(name="keyVaultSecretStatus")
def key_vault_secret_status(self) -> str:
"""
Status of the Key Vault secret.
"""
return pulumi.get(self, "key_vault_secret_status")
@property
@pulumi.getter
def kind(self) -> Optional[str]:
"""
Kind of resource.
"""
return pulumi.get(self, "kind")
@property
@pulumi.getter
def location(self) -> str:
"""
Resource Location.
"""
return pulumi.get(self, "location")
@property
@pulumi.getter
def name(self) -> str:
"""
Resource Name.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="pfxBlob")
def pfx_blob(self) -> Optional[str]:
"""
Pfx blob.
"""
return pulumi.get(self, "pfx_blob")
@property
@pulumi.getter(name="publicKeyHash")
def public_key_hash(self) -> str:
"""
Public key hash.
"""
return pulumi.get(self, "public_key_hash")
@property
@pulumi.getter(name="selfLink")
def self_link(self) -> str:
"""
Self link.
"""
return pulumi.get(self, "self_link")
@property
@pulumi.getter(name="serverFarmId")
def server_farm_id(self) -> Optional[str]:
"""
Resource ID of the associated App Service plan, formatted as: "/subscriptions/{subscriptionID}/resourceGroups/{groupName}/providers/Microsoft.Web/serverfarms/{appServicePlanName}".
"""
return pulumi.get(self, "server_farm_id")
@property
@pulumi.getter(name="siteName")
def site_name(self) -> str:
"""
App name.
"""
return pulumi.get(self, "site_name")
@property
@pulumi.getter(name="subjectName")
def subject_name(self) -> str:
"""
Subject name of the certificate.
"""
return pulumi.get(self, "subject_name")
@property
@pulumi.getter
def tags(self) -> Optional[Mapping[str, str]]:
"""
Resource tags.
"""
return pulumi.get(self, "tags")
@property
@pulumi.getter
def thumbprint(self) -> str:
"""
Certificate thumbprint.
"""
return pulumi.get(self, "thumbprint")
@property
@pulumi.getter
def type(self) -> str:
"""
Resource type.
"""
return pulumi.get(self, "type")
@property
@pulumi.getter
def valid(self) -> bool:
"""
Is the certificate valid?.
"""
return pulumi.get(self, "valid")
class AwaitableGetCertificateResult(GetCertificateResult):
# pylint: disable=using-constant-test
def __await__(self):
if False:
yield self
return GetCertificateResult(
cer_blob=self.cer_blob,
expiration_date=self.expiration_date,
friendly_name=self.friendly_name,
geo_region=self.geo_region,
host_names=self.host_names,
hosting_environment_profile=self.hosting_environment_profile,
id=self.id,
issue_date=self.issue_date,
issuer=self.issuer,
key_vault_id=self.key_vault_id,
key_vault_secret_name=self.key_vault_secret_name,
key_vault_secret_status=self.key_vault_secret_status,
kind=self.kind,
location=self.location,
name=self.name,
pfx_blob=self.pfx_blob,
public_key_hash=self.public_key_hash,
self_link=self.self_link,
server_farm_id=self.server_farm_id,
site_name=self.site_name,
subject_name=self.subject_name,
tags=self.tags,
thumbprint=self.thumbprint,
type=self.type,
valid=self.valid)
def get_certificate(name: Optional[str] = None,
resource_group_name: Optional[str] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetCertificateResult:
"""
SSL certificate for an app.
:param str name: Name of the certificate.
:param str resource_group_name: Name of the resource group to which the resource belongs.
"""
__args__ = dict()
__args__['name'] = name
__args__['resourceGroupName'] = resource_group_name
if opts is None:
opts = pulumi.InvokeOptions()
if opts.version is None:
opts.version = _utilities.get_version()
__ret__ = pulumi.runtime.invoke('azure-native:web/v20160301:getCertificate', __args__, opts=opts, typ=GetCertificateResult).value
return AwaitableGetCertificateResult(
cer_blob=__ret__.cer_blob,
expiration_date=__ret__.expiration_date,
friendly_name=__ret__.friendly_name,
geo_region=__ret__.geo_region,
host_names=__ret__.host_names,
hosting_environment_profile=__ret__.hosting_environment_profile,
id=__ret__.id,
issue_date=__ret__.issue_date,
issuer=__ret__.issuer,
key_vault_id=__ret__.key_vault_id,
key_vault_secret_name=__ret__.key_vault_secret_name,
key_vault_secret_status=__ret__.key_vault_secret_status,
kind=__ret__.kind,
location=__ret__.location,
name=__ret__.name,
pfx_blob=__ret__.pfx_blob,
public_key_hash=__ret__.public_key_hash,
self_link=__ret__.self_link,
server_farm_id=__ret__.server_farm_id,
site_name=__ret__.site_name,
subject_name=__ret__.subject_name,
tags=__ret__.tags,
thumbprint=__ret__.thumbprint,
type=__ret__.type,
valid=__ret__.valid)
| 36.184697 | 465 | 0.638763 |
32c4f2ff6e6b726ad8286672a093b1873ea8acc6 | 8,489 | py | Python | autoPredictSimple.py | ksu-hmi/mouth-check | f659a8c76707eb7f974dc3fbdd93b1324315f91b | [
"MIT"
] | null | null | null | autoPredictSimple.py | ksu-hmi/mouth-check | f659a8c76707eb7f974dc3fbdd93b1324315f91b | [
"MIT"
] | null | null | null | autoPredictSimple.py | ksu-hmi/mouth-check | f659a8c76707eb7f974dc3fbdd93b1324315f91b | [
"MIT"
] | null | null | null | #IMPORT
from keras.models import load_model
from PIL import Image
import numpy as np
import time
import matplotlib.pyplot as plt
import cv2
import sys
from visualizer import visualize
from sklearn.metrics import confusion_matrix, plot_confusion_matrix
import itertools
"""
# Classe permettant de réaliser une prédiction sur une nouvelle donnée
#Translated to English: Class used to make a prediction on new data
"""
def main():
"""
# On definit les chemins d'acces au différentes hyper parametre
# Translated to English: We define the access paths to the different hyperparameters
"""
modelPath = 'C:\\model.h5'
imagePath = '.\\predict\\test1.jpg'
maskPath = '.\\predict\\mask1.png'
#predict(modelPath, imagePath)
predictNconfusion(modelPath, imagePath, maskPath)
def predictNconfusion(modelPath, imagePath, maskPath):
image = Image.open(imagePath).convert('RGB')
img = image.resize(size=(256, 256))
img = np.asarray(img, dtype=np.float32) / 255.
print("START LOAD")
model = load_model(modelPath, compile=False)
print("END LOAD")
dimension = img.shape
img = img.reshape(1, dimension[0], dimension[1], dimension[2])
prediction = model.predict(img)
res = np.asarray(prediction[0]*100)
res[res >= 0.95] = 1
res[res < 0.95] = 0
np.set_printoptions(threshold=sys.maxsize)
mask = Image.open(maskPath)
mask = mask.resize(size=(256, 256))
maskNp = np.asarray(mask)
#print(maskNp)
#visualize(image, mask)
res = res[:, :, 0]
print(maskNp.shape)
print(res.shape)
print(maskNp.dtype)
print(res.dtype)
res = res.astype(np.uint8)
test = confusion_matrix(maskNp.flatten(), res.flatten())
test = test.astype('float') / test.sum(axis=1)[:, np.newaxis]
#plot_confusion_matrix(clf, maskNp.flatten(), maskNp.flatten())
plt.figure()
cmap = plt.cm.Blues
classes = ['background', 'carrie']
title = 'Confusion matrix'
plt.imshow(test, interpolation='nearest', cmap=cmap)
plt.title(title)
plt.colorbar()
tick_marks = np.arange(2)
plt.xticks(tick_marks, classes, rotation=45)
plt.yticks(tick_marks, classes)
fmt = '.2f'
thresh = test.max() / 2.
for i, j in itertools.product(range(test.shape[0]), range(test.shape[1])):
plt.text(j, i, format(test[i, j], fmt), horizontalalignment="center",
color="white" if test[i, j] > thresh else "black")
plt.ylabel('True label')
plt.xlabel('Predicted label')
plt.tight_layout()
plt.show()
plt.imshow(res)
plt.show()
def predict(modelPath,imagePath):
"""
# Fonction qui permet de convertir une image en array, de charger le modele et de lui injecter notre image pour une prediction
:param modelPath: chemin du modèle au format hdf5
:param imagePath: chemin de l'image pour realiser une prediction
:param imageSize: défini la taille de l'image. IMPORTANT : doit être de la même taille que celle des images
du dataset d'entrainements
:param label: nom de nos 5 classes de sortie
"""
"""
# Translated to English: Function which allows to convert an image into an array, to load the model and to inject our image to it for a prediction
: param modelPath: model path in hdf5 format
: param imagePath: image path to make a prediction
: param imageSize: set the size of the image. IMPORTANT: Must be the same size as the pictures
of the training dataset
: param label: name of our 5 output classes
"""
start = time.time()
# Chargement du modele
# Loading the model
print("Chargement du modèle :\n")
model = load_model(modelPath)
print("\nModel chargé.")
#Chargement de notre image et traitement
#Loading our image and processing
data = []
img = Image.open(imagePath).convert('RGB')
img = img.resize(size=(256, 256))
#img.load()
#img = img.resize(size=imageSize)
img = np.asarray(img, dtype=np.float32) / 255.
#img = np.asarray(img)
#data.append(img)
#data = np.asarray(data)
plt.imshow(img)
plt.show()
#On reshape pour correspondre aux dimensions de notre modele
# Arg1 : correspond au nombre d'image que on injecte
# Arg2 : correspond a la largeur de l'image
# Arg3 : correspond a la hauteur de l'image
# Arg4 : correspond au nombre de canaux de l'image (1 grayscale, 3 couleurs)
#dimension = data[0].shape
# Translated to English: We reshape to match the dimensions of our model
# Arg1: corresponds to the number of images that we inject
# Arg2: corresponds to the width of the image
# Arg3: corresponds to the height of the image
# Arg4: corresponds to the number of image channels (1 grayscale, 3 colors)
#dimension = data[0].shape
dimension = img.shape
print(dimension)
#Reshape pour passer de 3 à 4 dimension pour notre réseau
#data = data.astype(np.float32).reshape(data.shape[0], dimension[0], dimension[1], dimension[2])
# Translated to English: Reshape to go from 3 to 4 dimension for our network
#data = data.astype(np.float32).reshape(data.shape[0], dimension[0], dimension[1], dimension[2])
img = img.reshape(1, dimension[0], dimension[1], dimension[2])
np.set_printoptions(threshold=sys.maxsize)
#On realise une prediction
#Translated to English: We make a prediction
prediction = model.predict(img)
res = np.asarray(prediction[0]*100)
print("PREDICTION\n")
print(res)
print(res.shape)
# MULTICLASS
#res = np.argmax(res, axis = 2)
# Translated to English: MULTICLASS
#res = np.argmax(res, axis = 2)
res[res >= 0.95] = 255
res[res <= 0.1] = 0
#print(res)
#print(res.shape)
plt.imshow(res)
plt.show()
'''
pr_mask = model.predict(np.expand_dims(img, axis=0)).squeeze()
# pr_mask.shape == (H, W, C)
pr_mask = np.argmax(pr_mask, axis=2)
# pr_mask.shape == (H, W)
# to count the occurrences, say car is equal to 3 in the pr_mask
num_car_pixels = numpy.count_nonzero(pr_mask == 3)
percent_car_pixels = (num_car_pixels / (H * W)) * 100
'''
'''
Translated to English: pr_mask = model.predict(np.expand_dims(img, axis=0)).squeeze()
# pr_mask.shape == (H, W, C)
pr_mask = np.argmax(pr_mask, axis=2)
# pr_mask.shape == (H, W)
# to count the occurrences, say car is equal to 3 in the pr_mask
num_car_pixels = numpy.count_nonzero(pr_mask == 3)
percent_car_pixels = (num_car_pixels / (H * W)) * 100
'''
'''
test11 = np.asarray(prediction[0], dtype=np.float32)
test22 = np.asarray(prediction[0], dtype=np.uint8)
test1 = np.asarray(prediction[0]*100, dtype=np.float32)
test2 = np.asarray(prediction[0]*100, dtype=np.uint8)
test3 = np.asarray(prediction[0]*255, dtype=np.uint8)
test4 = np.asarray(prediction[0]*255, dtype=np.float32)
cv2.imwrite("predict/predicted.jpg", cv2.cvtColor(prediction[0], cv2.COLOR_RGB2BGR))
cv2.imwrite("predict/test2.jpg", cv2.cvtColor(test2, cv2.COLOR_RGB2BGR))
cv2.imwrite("predict/test3.jpg", cv2.cvtColor(test3, cv2.COLOR_RGB2BGR))
cv2.imwrite("predict/test4.jpg", cv2.cvtColor(test4, cv2.COLOR_RGB2BGR))
plt.imshow(cv2.cvtColor(prediction[0], cv2.COLOR_RGB2BGR))
plt.show()
plt.imshow(cv2.cvtColor(test1, cv2.COLOR_RGB2BGR))
plt.show()
plt.imshow(test2)
plt.show()
plt.imshow(test3)
plt.show()
plt.imshow(test4)
plt.show()
plt.imshow(test11)
plt.show()
plt.imshow(test22)
plt.show()
class_index = np.argmax(prediction, axis=2)
colors = {0: [255, 255, 255]}
colored_image = np.array([colors[x] for x in np.nditer(class_index)],
dtype=np.uint8)
output_image = np.reshape(colored_image, (256, 256, 3))
plt.imshow(output_image)
plt.show()
'''
#On recupere le mot correspondant à l'indice precedent
#word = label[maxPredict]
#pred = prediction[0][maxPredict] * 100.
# Translated to English: We retrieve the word corresponding to the previous index
#word = label[maxPredict]
#pred = prediction[0][maxPredict] * 100.
end = time.time()
#On affiche les prédictions
#We display the predictions
print()
print('----------')
print(" Prediction :")
print('temps prediction : ' + "{0:.2f}secs".format(end-start))
print('----------')
if __name__ == "__main__":
"""
# MAIN
"""
main()
| 31.095238 | 150 | 0.659795 |
b754f826c4708a2d49c27e2125b2552eeb71f33b | 1,709 | py | Python | setoptconf/setting.py | carlio/setoptconf-tmp | 959733fa2babe3ae0afe3f8826977fdcf7b8c09a | [
"MIT"
] | null | null | null | setoptconf/setting.py | carlio/setoptconf-tmp | 959733fa2babe3ae0afe3f8826977fdcf7b8c09a | [
"MIT"
] | null | null | null | setoptconf/setting.py | carlio/setoptconf-tmp | 959733fa2babe3ae0afe3f8826977fdcf7b8c09a | [
"MIT"
] | null | null | null | # pylint: disable=W0401,W0223
import re
from .datatype import *
from .exception import NamingError
__all__ = (
"Setting",
"StringSetting",
"IntegerSetting",
"FloatSetting",
"BooleanSetting",
"ListSetting",
"ChoiceSetting",
)
class Setting(DataType):
RE_NAME = re.compile(r"^[a-z](?:[a-z0-9]|[_](?![_]))*[a-z0-9]$")
def __init__(self, name, default=None, required=False):
if Setting.RE_NAME.match(name):
self.name = name
else:
raise NamingError(name)
self._value = None
self.default = self.sanitize(default)
self.required = required
self.established = False
@property
def value(self):
return self._value
@value.setter
def value(self, value):
self._value = self.sanitize(value)
self.established = True
def __str__(self): # pragma: no cover
return unicode(self.name)
def __repr__(self): # pragma: no cover
return "<%s(%s=%s)>" % (
self.__class__.__name__,
self.name,
self.value if self.established else "",
)
class StringSetting(Setting, String):
pass
class IntegerSetting(Setting, Integer):
pass
class FloatSetting(Setting, Float):
pass
class BooleanSetting(Setting, Boolean):
pass
class ListSetting(Setting, List):
def __init__(self, name, subtype, **kwargs):
List.__init__(self, subtype)
Setting.__init__(self, name, **kwargs)
class ChoiceSetting(Setting, Choice):
def __init__(self, name, choices, subtype=None, **kwargs):
Choice.__init__(self, choices, subtype=subtype)
Setting.__init__(self, name, **kwargs)
| 21.3625 | 68 | 0.617905 |
71d097087d8843b67adb12f4cbc261a11f212b20 | 718 | py | Python | setup.py | megacoder/fns | 7f3aae85bfff6c322ad895bae94131eda01f1afb | [
"MIT"
] | 1 | 2018-11-27T14:10:38.000Z | 2018-11-27T14:10:38.000Z | setup.py | megacoder/fns | 7f3aae85bfff6c322ad895bae94131eda01f1afb | [
"MIT"
] | null | null | null | setup.py | megacoder/fns | 7f3aae85bfff6c322ad895bae94131eda01f1afb | [
"MIT"
] | null | null | null | #!/usr/bin/env python
# vim: noet sw=4 ts=4
from setuptools import setup
import glob
import os
NAME = 'fns'
VERSION = '1.0.2'
with open( '{0}/version.py'.format( NAME ), 'w') as f:
print >>f, 'Version="{0}"'.format( VERSION )
setup(
name = NAME,
version = VERSION,
description = 'Traverse lots of directories, make unique hardlinks in one directory.',
author = 'Tommy Reynolds',
author_email = 'Tommy.Reynolds@MegaCoder.com',
license = 'MIT',
url = 'http://www.MegaCoder.com',
long_description = open('README.md').read(),
packages = [ NAME ],
entry_points = {
'console_scripts' : [
'{0}={0}.cli:main'.format( NAME )
],
},
)
| 23.16129 | 92 | 0.586351 |
d9aa76907831e086c366637615c425b63881eb5b | 1,875 | py | Python | rir-delegated-latest-requests.py | bashrootshell/ip-country-codes | ed286092dcd2f55c52f329abf76a9a2f509ee8d6 | [
"BSD-3-Clause"
] | null | null | null | rir-delegated-latest-requests.py | bashrootshell/ip-country-codes | ed286092dcd2f55c52f329abf76a9a2f509ee8d6 | [
"BSD-3-Clause"
] | null | null | null | rir-delegated-latest-requests.py | bashrootshell/ip-country-codes | ed286092dcd2f55c52f329abf76a9a2f509ee8d6 | [
"BSD-3-Clause"
] | null | null | null | #!/usr/bin/env python3
from requests import get
from sys import argv
from re import search
from math import log2
"""
Programmed with requests.
Global RIR IPv4 CIDR prefix extractor, by country.
It now searches for a particular CC in all RIRs:
RIPE NCC, APNIC, ARIN, LACNIC and AFRINIC
Usage: ./program.py countrycode (optional: file)
If a file isn't an argument, it prints prefixes to stdout.
PEP8 compliant
"Explicit is better than implicit."
— The Zen of Python
"""
RIRs = ("https://ftp.lacnic.net/pub/stats/ripencc/delegated-ripencc-latest",
"https://ftp.lacnic.net/pub/stats/apnic/delegated-apnic-latest",
"https://ftp.lacnic.net/pub/stats/arin/delegated-arin-extended-latest",
"https://ftp.lacnic.net/pub/stats/lacnic/delegated-lacnic-latest",
"https://ftp.lacnic.net/pub/stats/afrinic/delegated-afrinic-latest")
if len(argv) > 1:
for url in RIRs:
# reads content from URLs one by one
for prefix in get(url).text.split():
regex = search(str(argv[1]) + '.*ipv4', prefix)
if regex: # searches for cc and ipv4 strings
netaddr = prefix.split("|")[3] # net addr
bitmask = int(prefix.split("|")[4]) # bits used by net addr
cidrmask = int(32 - log2(bitmask)) # converts bits into CIDR
if len(argv) == 2:
print(f'{netaddr}/{cidrmask}') # prints to stdout
elif len(argv) == 3:
with open(f'{argv[2]}.txt', 'a') as file:
print(f'{netaddr}/{cidrmask}', file=file)
else:
print('Please provide at least a universal country code. (Optional: a\
filename descriptor to save the results.)\n\
Ex: ./program.py GB (print to stdout) OR ./program.py GB ipaddr-gb.txt \
(write to file "ipaddr-gb.txt" as an example)')
| 39.0625 | 79 | 0.621333 |
a00f82f9ea722f36bc22e99b42a9b97612b4f4a4 | 18,467 | py | Python | ects/consensus/difficulty_adjustment.py | ects-io/ects-blockchain | a798034a8c8bce34d4b87fb2c98351d06f9eaf8e | [
"Apache-2.0"
] | null | null | null | ects/consensus/difficulty_adjustment.py | ects-io/ects-blockchain | a798034a8c8bce34d4b87fb2c98351d06f9eaf8e | [
"Apache-2.0"
] | null | null | null | ects/consensus/difficulty_adjustment.py | ects-io/ects-blockchain | a798034a8c8bce34d4b87fb2c98351d06f9eaf8e | [
"Apache-2.0"
] | null | null | null | from typing import List, Optional, Tuple
from ects.consensus.block_record import BlockRecord
from ects.consensus.blockchain_interface import BlockchainInterface
from ects.consensus.constants import ConsensusConstants
from ects.types.blockchain_format.sized_bytes import bytes32
from ects.util.ints import uint8, uint32, uint64, uint128
from ects.util.significant_bits import count_significant_bits, truncate_to_significant_bits
def _get_blocks_at_height(
blocks: BlockchainInterface,
prev_b: BlockRecord,
target_height: uint32,
max_num_blocks: uint32 = uint32(1),
) -> List[BlockRecord]:
"""
Return a consecutive list of BlockRecords starting at target_height, returning a maximum of
max_num_blocks. Assumes all block records are present. Does a slot linear search, if the blocks are not
in the path of the peak. Can only fetch ancestors of prev_b.
Args:
blocks: dict from header hash to BlockRecord.
prev_b: prev_b (to start backwards search).
target_height: target block to start
max_num_blocks: max number of blocks to fetch (although less might be fetched)
"""
if blocks.contains_height(prev_b.height):
header_hash = blocks.height_to_hash(prev_b.height)
if header_hash == prev_b.header_hash:
# Efficient fetching, since we are fetching ancestor blocks within the heaviest chain. We can directly
# use the height_to_block_record method
block_list: List[BlockRecord] = []
for h in range(target_height, target_height + max_num_blocks):
assert blocks.contains_height(uint32(h))
block_list.append(blocks.height_to_block_record(uint32(h)))
return block_list
# Slow fetching, goes back one by one, since we are in a fork
curr_b: BlockRecord = prev_b
target_blocks = []
while curr_b.height >= target_height:
if curr_b.height < target_height + max_num_blocks:
target_blocks.append(curr_b)
if curr_b.height == 0:
break
curr_b = blocks.block_record(curr_b.prev_hash)
return list(reversed(target_blocks))
def _get_second_to_last_transaction_block_in_previous_epoch(
constants: ConsensusConstants,
blocks: BlockchainInterface,
last_b: BlockRecord,
) -> BlockRecord:
"""
Retrieves the second to last transaction block in the previous epoch.
Args:
constants: consensus constants being used for this chain
blocks: dict from header hash to block of all relevant blocks
last_b: last-block in the current epoch, or last block we have seen, if potentially finishing epoch soon
prev epoch surpassed prev epoch started epoch sur. epoch started
v v v v
|.B...B....B. B....B...|......B....B.....B...B.|.B.B.B..|..B...B.B.B...|.B.B.B. B.|........
PREV EPOCH CURR EPOCH NEW EPOCH
The blocks selected for the timestamps are the second to last transaction blocks in each epoch.
Block at height 0 is an exception. Note that H mod EPOCH_BLOCKS where H is the height of the first block in the
epoch, must be >= 0, and < 128.
"""
# This height is guaranteed to be in the next epoch (even when last_b is not actually the last block)
height_in_next_epoch = (
last_b.height + 2 * constants.MAX_SUB_SLOT_BLOCKS + constants.MIN_BLOCKS_PER_CHALLENGE_BLOCK + 5
)
height_epoch_surpass: uint32 = uint32(height_in_next_epoch - (height_in_next_epoch % constants.EPOCH_BLOCKS))
height_prev_epoch_surpass: uint32 = uint32(height_epoch_surpass - constants.EPOCH_BLOCKS)
assert height_prev_epoch_surpass % constants.EPOCH_BLOCKS == height_prev_epoch_surpass % constants.EPOCH_BLOCKS == 0
# Sanity check, don't go too far past epoch barrier
assert (height_in_next_epoch - height_epoch_surpass) < (5 * constants.MAX_SUB_SLOT_BLOCKS)
if height_prev_epoch_surpass == 0:
# The genesis block is an edge case, where we measure from the first block in epoch (height 0), as opposed to
# a block in the previous epoch, which would be height < 0
return _get_blocks_at_height(blocks, last_b, uint32(0))[0]
# If the prev slot is the first slot, the iterations start at 0
# We will compute the timestamps of the 2nd to last block in epoch, as well as the total iterations at infusion
prev_slot_start_iters: uint128
prev_slot_time_start: uint64
# The target block must be in this range. Either the surpass block must be a transaction block, or something
# in it's sub slot must be a transaction block. If that is the only transaction block in the sub-slot, the last
# block in the previous sub-slot from that must also be a transaction block (therefore -1 is used).
# The max height for the new epoch to start is surpass + 2*MAX_SUB_SLOT_BLOCKS + MIN_BLOCKS_PER_CHALLENGE_BLOCK - 3,
# since we might have a deficit > 0 when surpass is hit. The +3 is added just in case
fetched_blocks = _get_blocks_at_height(
blocks,
last_b,
uint32(height_prev_epoch_surpass - constants.MAX_SUB_SLOT_BLOCKS - 1),
uint32(3 * constants.MAX_SUB_SLOT_BLOCKS + constants.MIN_BLOCKS_PER_CHALLENGE_BLOCK + 3),
)
# We want to find the last block in the slot at which we surpass the height.
# The last block in epoch will be before this.
fetched_index: int = constants.MAX_SUB_SLOT_BLOCKS
curr_b: BlockRecord = fetched_blocks[fetched_index]
fetched_index += 1
assert curr_b.height == height_prev_epoch_surpass - 1
next_b: BlockRecord = fetched_blocks[fetched_index]
assert next_b.height == height_prev_epoch_surpass
# Wait until the slot finishes with a challenge chain infusion at start of slot
# Note that there are no overflow blocks at the start of new epochs
while next_b.sub_epoch_summary_included is None:
curr_b = next_b
next_b = fetched_blocks[fetched_index]
fetched_index += 1
# Backtrack to find the second to last tx block
found_tx_block = 1 if curr_b.is_transaction_block else 0
while found_tx_block < 2:
curr_b = blocks.block_record(curr_b.prev_hash)
if curr_b.is_transaction_block:
found_tx_block += 1
return curr_b
def height_can_be_first_in_epoch(constants: ConsensusConstants, height: uint32) -> bool:
return (height - (height % constants.SUB_EPOCH_BLOCKS)) % constants.EPOCH_BLOCKS == 0
def can_finish_sub_and_full_epoch(
constants: ConsensusConstants,
blocks: BlockchainInterface,
height: uint32,
prev_header_hash: Optional[bytes32],
deficit: uint8,
block_at_height_included_ses: bool,
) -> Tuple[bool, bool]:
"""
Returns a bool tuple
first bool is true if the next sub-slot after height will form part of a new sub-epoch. Therefore
block height is the last block, and height + 1 is in a new sub-epoch.
second bool is true if the next sub-slot after height will form part of a new sub-epoch and epoch.
Therefore, block height is the last block, and height + 1 is in a new epoch.
Args:
constants: consensus constants being used for this chain
blocks: dictionary from header hash to SBR of all included SBR
height: block height of the (potentially) last block in the sub-epoch
prev_header_hash: prev_header hash of the block at height, assuming not genesis
deficit: deficit of block at height height
block_at_height_included_ses: whether or not the block at height height already included a SES
"""
if height < constants.SUB_EPOCH_BLOCKS - 1:
return False, False
assert prev_header_hash is not None
if deficit > 0:
return False, False
if block_at_height_included_ses:
# If we just included a sub_epoch_summary, we cannot include one again
return False, False
# This does not check the two edge cases where (height + 1) % constants.SUB_EPOCH_BLOCKS is 0 or 1
# If it's 0, height+1 is the first place that a sub-epoch can be included
# If it's 1, we just checked whether 0 included it in the previous check
if (height + 1) % constants.SUB_EPOCH_BLOCKS > 1:
curr: BlockRecord = blocks.block_record(prev_header_hash)
while curr.height % constants.SUB_EPOCH_BLOCKS > 0:
if curr.sub_epoch_summary_included is not None:
return False, False
curr = blocks.block_record(curr.prev_hash)
if curr.sub_epoch_summary_included is not None:
return False, False
# For checking new epoch, make sure the epoch blocks are aligned
return True, height_can_be_first_in_epoch(constants, uint32(height + 1))
def _get_next_sub_slot_iters(
constants: ConsensusConstants,
blocks: BlockchainInterface,
prev_header_hash: bytes32,
height: uint32,
curr_sub_slot_iters: uint64,
deficit: uint8,
block_at_height_included_ses: bool,
new_slot: bool,
signage_point_total_iters: uint128,
skip_epoch_check=False,
) -> uint64:
"""
Returns the slot iterations required for the next block after the one at height, where new_slot is true
iff the next block will be in the next slot. WARNING: assumes that the block at height is not the first block
in a sub-epoch.
Args:
constants: consensus constants being used for this chain
blocks: dictionary from header hash to SBR of all included SBR
prev_header_hash: header hash of the previous block
height: the block height of the block to look at
curr_sub_slot_iters: sub-slot iters at the infusion point of the block at height
deficit: deficit of block at height height
new_slot: whether or not there is a new slot after height
signage_point_total_iters: signage point iters of the block at height
skip_epoch_check: don't check correct epoch
"""
next_height: uint32 = uint32(height + 1)
if next_height < constants.EPOCH_BLOCKS:
return uint64(constants.SUB_SLOT_ITERS_STARTING)
if not blocks.contains_block(prev_header_hash):
raise ValueError(f"Header hash {prev_header_hash} not in blocks")
prev_b: BlockRecord = blocks.block_record(prev_header_hash)
# If we are in the same epoch, return same ssi
if not skip_epoch_check:
_, can_finish_epoch = can_finish_sub_and_full_epoch(
constants, blocks, height, prev_header_hash, deficit, block_at_height_included_ses
)
if not new_slot or not can_finish_epoch:
return curr_sub_slot_iters
last_block_prev: BlockRecord = _get_second_to_last_transaction_block_in_previous_epoch(constants, blocks, prev_b)
# This gets the last transaction block before this block's signage point. Assuming the block at height height
# is the last block infused in the epoch: If this block ends up being a
# transaction block, then last_block_curr will be the second to last tx block in the epoch. If this block
# is not a transaction block, that means there was exactly one other tx block included in between our signage
# point and infusion point, and therefore last_block_curr is the second to last as well.
last_block_curr = prev_b
while last_block_curr.total_iters > signage_point_total_iters or not last_block_curr.is_transaction_block:
last_block_curr = blocks.block_record(last_block_curr.prev_hash)
assert last_block_curr.timestamp is not None and last_block_prev.timestamp is not None
# This is computed as the iterations per second in last epoch, times the target number of seconds per slot
new_ssi_precise: uint64 = uint64(
constants.SUB_SLOT_TIME_TARGET
* (last_block_curr.total_iters - last_block_prev.total_iters)
// (last_block_curr.timestamp - last_block_prev.timestamp)
)
# Only change by a max factor as a sanity check
max_ssi = uint64(constants.DIFFICULTY_CHANGE_MAX_FACTOR * last_block_curr.sub_slot_iters)
min_ssi = uint64(last_block_curr.sub_slot_iters // constants.DIFFICULTY_CHANGE_MAX_FACTOR)
if new_ssi_precise >= last_block_curr.sub_slot_iters:
new_ssi_precise = uint64(min(new_ssi_precise, max_ssi))
else:
new_ssi_precise = uint64(max([constants.NUM_SPS_SUB_SLOT, new_ssi_precise, min_ssi]))
new_ssi = truncate_to_significant_bits(new_ssi_precise, constants.SIGNIFICANT_BITS)
new_ssi = uint64(new_ssi - new_ssi % constants.NUM_SPS_SUB_SLOT) # Must divide the sub slot
assert count_significant_bits(new_ssi) <= constants.SIGNIFICANT_BITS
return new_ssi
def _get_next_difficulty(
constants: ConsensusConstants,
blocks: BlockchainInterface,
prev_header_hash: bytes32,
height: uint32,
current_difficulty: uint64,
deficit: uint8,
block_at_height_included_ses: bool,
new_slot: bool,
signage_point_total_iters: uint128,
skip_epoch_check=False,
) -> uint64:
"""
Returns the difficulty of the next block that extends onto block.
Used to calculate the number of iterations. WARNING: assumes that the block at height is not the first block
in a sub-epoch.
Args:
constants: consensus constants being used for this chain
blocks: dictionary from header hash to SBR of all included SBR
prev_header_hash: header hash of the previous block
height: the block height of the block to look at
deficit: deficit of block at height height
current_difficulty: difficulty at the infusion point of the block at height
new_slot: whether or not there is a new slot after height
signage_point_total_iters: signage point iters of the block at height
skip_epoch_check: don't check correct epoch
"""
next_height: uint32 = uint32(height + 1)
if next_height < (constants.EPOCH_BLOCKS - 3 * constants.MAX_SUB_SLOT_BLOCKS):
# We are in the first epoch
return uint64(constants.DIFFICULTY_STARTING)
if not blocks.contains_block(prev_header_hash):
raise ValueError(f"Header hash {prev_header_hash} not in blocks")
prev_b: BlockRecord = blocks.block_record(prev_header_hash)
# If we are in the same slot as previous block, return same difficulty
if not skip_epoch_check:
_, can_finish_epoch = can_finish_sub_and_full_epoch(
constants, blocks, height, prev_header_hash, deficit, block_at_height_included_ses
)
if not new_slot or not can_finish_epoch:
return current_difficulty
last_block_prev: BlockRecord = _get_second_to_last_transaction_block_in_previous_epoch(constants, blocks, prev_b)
# This gets the last transaction block before this block's signage point. Assuming the block at height height
# is the last block infused in the epoch: If this block ends up being a
# transaction block, then last_block_curr will be the second to last tx block in the epoch. If this block
# is not a transaction block, that means there was exactly one other tx block included in between our signage
# point and infusion point, and therefore last_block_curr is the second to last as well.
last_block_curr = prev_b
while last_block_curr.total_iters > signage_point_total_iters or not last_block_curr.is_transaction_block:
last_block_curr = blocks.block_record(last_block_curr.prev_hash)
assert last_block_curr.timestamp is not None
assert last_block_prev.timestamp is not None
actual_epoch_time: uint64 = uint64(last_block_curr.timestamp - last_block_prev.timestamp)
old_difficulty = uint64(prev_b.weight - blocks.block_record(prev_b.prev_hash).weight)
# Terms are rearranged so there is only one division.
new_difficulty_precise = uint64(
(last_block_curr.weight - last_block_prev.weight)
* constants.SUB_SLOT_TIME_TARGET
// (constants.SLOT_BLOCKS_TARGET * actual_epoch_time)
)
# Only change by a max factor, to prevent attacks, as in greenpaper, and must be at least 1
max_diff = uint64(constants.DIFFICULTY_CHANGE_MAX_FACTOR * old_difficulty)
min_diff = uint64(old_difficulty // constants.DIFFICULTY_CHANGE_MAX_FACTOR)
if new_difficulty_precise >= old_difficulty:
new_difficulty_precise = uint64(min(new_difficulty_precise, max_diff))
else:
new_difficulty_precise = uint64(max([uint64(1), new_difficulty_precise, min_diff]))
new_difficulty = truncate_to_significant_bits(new_difficulty_precise, constants.SIGNIFICANT_BITS)
assert count_significant_bits(new_difficulty) <= constants.SIGNIFICANT_BITS
return uint64(new_difficulty)
def get_next_sub_slot_iters_and_difficulty(
constants: ConsensusConstants,
is_first_in_sub_slot: bool,
prev_b: Optional[BlockRecord],
blocks: BlockchainInterface,
) -> Tuple[uint64, uint64]:
"""
Retrieves the current sub_slot iters and difficulty of the next block after prev_b.
Args:
constants: consensus constants being used for this chain
is_first_in_sub_slot: Whether the next block is the first in the sub slot
prev_b: the previous block (last block in the epoch)
blocks: dictionary from header hash to SBR of all included SBR
"""
# genesis
if prev_b is None:
return constants.SUB_SLOT_ITERS_STARTING, constants.DIFFICULTY_STARTING
if prev_b.height != 0:
prev_difficulty: uint64 = uint64(prev_b.weight - blocks.block_record(prev_b.prev_hash).weight)
else:
# prev block is genesis
prev_difficulty = uint64(prev_b.weight)
if prev_b.sub_epoch_summary_included is not None:
return prev_b.sub_slot_iters, prev_difficulty
sp_total_iters = prev_b.sp_total_iters(constants)
difficulty: uint64 = _get_next_difficulty(
constants,
blocks,
prev_b.prev_hash,
prev_b.height,
prev_difficulty,
prev_b.deficit,
False, # Already checked above
is_first_in_sub_slot,
sp_total_iters,
)
sub_slot_iters: uint64 = _get_next_sub_slot_iters(
constants,
blocks,
prev_b.prev_hash,
prev_b.height,
prev_b.sub_slot_iters,
prev_b.deficit,
False, # Already checked above
is_first_in_sub_slot,
sp_total_iters,
)
return sub_slot_iters, difficulty
| 44.931873 | 120 | 0.720583 |
f7683bfe7c98c1157843468c70095dc610eced0e | 9,067 | py | Python | custom_scripts/skipgram_modified.py | aday651/embed-asym-exeriments | 986f147425c3e4f42c04a25f69577bbeef6b3c23 | [
"MIT"
] | 2 | 2021-07-13T05:14:47.000Z | 2021-07-14T20:11:18.000Z | custom_scripts/skipgram_modified.py | aday651/embed-asym-exeriments | 986f147425c3e4f42c04a25f69577bbeef6b3c23 | [
"MIT"
] | null | null | null | custom_scripts/skipgram_modified.py | aday651/embed-asym-exeriments | 986f147425c3e4f42c04a25f69577bbeef6b3c23 | [
"MIT"
] | null | null | null | import tensorflow as tf
from custom_scripts.node_classifier_modified import make_node_classifier
def make_skipgram(**kwargs):
""" Uses the skipgram objective for relational data
Returns
-------
A model function for skipgram edge prediction (with a nonsense vertex classifier attached for testing convenience)
"""
def make_label_logits(embeddings, features, mode, params):
# TODO: fix this. What's going on? Basically, the size of
# embeddings is dynamic, and so we need a way to properly
# handle this in order to set the size of this zeros array...
#return tf.zeros([tf.shape(embeddings)[0], params['n_labels']],
# dtype=tf.float32)
return tf.zeros([embeddings.get_shape().as_list()[0],
params['n_labels']],
dtype=tf.float32)
def make_no_label_loss(logits, present_labels, split):
return tf.constant(0, dtype=tf.float32)
return make_node_classifier(make_label_logits=make_label_logits,
make_edge_logits=_make_edge_list_logits,
make_label_pred_loss=make_no_label_loss,
make_edge_pred_loss=make_simple_skipgram_loss(None),
**kwargs)
def make_multilabel_logistic_regression(label_task_weight=0.5, regularization=0., clip=None, **kwargs):
""" Uses the skipgram objective for relational data, and predicts labels with logistic regression
using the skipgram embeddings as the features.
Parameters
----------
label_task_weight: the weight for the label task (between 0 and 1). By default, the label and edge
task are weighted equally.
clip: if not None, the value to clip the edge loss at.
kwargs: additional arguments are forwarded to the `make_node_classifier` template.
Returns
-------
A model function for simple multilabel logistic regression.
"""
def make_label_logits(embeddings, features, mode, params):
# actually computes 0.5 * \sum w^2, so it should just reproduce sklearn
regularizer = tf.keras.regularizers.l2(l=0.5 * (label_task_weight * regularization))
layer = tf.compat.v1.layers.dense(
embeddings, params['n_labels'], activation=None, use_bias=True,
kernel_regularizer=regularizer,
bias_regularizer=regularizer,
name='logits_labels')
return layer
edge_task_weight = 1 - label_task_weight
return make_node_classifier(
make_label_logits=make_label_logits,
make_edge_logits=_make_edge_list_logits,
make_label_pred_loss=make_weighted_loss(_make_label_sigmoid_cross_entropy_loss, label_task_weight),
make_edge_pred_loss=make_weighted_loss(make_simple_skipgram_loss(clip), edge_task_weight),
**kwargs)
def make_multilabel_deep_logistic_regression():
""" Uses the skipgram objective for relational data, and predicts labels with deep logistic regression
using the skipgram embeddings as the features
Returns
-------
a function be passed to model_fn
"""
def make_label_logits(embeddings, features, mode, params):
for units in params['hidden_units']:
net = tf.compat.v1.layers.dense(embeddings, units=units, activation=tf.nn.relu)
return tf.compat.v1.layers.dense(net, params['n_labels'], activation=None)
return make_node_classifier(make_label_logits=make_label_logits,
make_edge_logits=_make_edge_list_logits,
make_label_pred_loss=_make_label_sigmoid_cross_entropy_loss,
make_edge_pred_loss=make_simple_skipgram_loss(12))
#
# helper functions follow
#
def _make_label_sigmoid_cross_entropy_loss(logits, present_labels, split):
""" Helper function to create label loss
Parameters
----------
logits: tensor of shape [batch_size, num_verts, num_labels]
present_labels: tensor of shape [batch_size, num_verts, num_labels]; labels of labelled verts
split: tensor of shape [batch_size, num_verts], 0 if censored, 1 if not censored
Returns
-------
The cross-entropy loss corresponding to the label.
"""
if len(logits.shape) == 3:
batch_size = tf.cast(tf.shape(input=logits)[0], dtype=tf.float32)
else:
batch_size = 1
label_pred_losses = tf.compat.v1.losses.sigmoid_cross_entropy(
present_labels, logits=logits, weights=tf.expand_dims(split, -1), reduction=tf.compat.v1.losses.Reduction.NONE)
# sum rather than (tf default of) mean because ¯\_(ツ)_/¯
label_pred_loss = tf.reduce_sum(input_tensor=label_pred_losses)
return label_pred_loss / batch_size
def make_weighted_loss(loss_fn, weight=1.0):
""" Adapts the given loss function by multiplying by a given constant.
Parameters
----------
loss_fn: a function to create the loss
weight: the value by which to weigh the loss.
Returns
-------
fn: The adapted loss
"""
def fn(*args, **kwargs):
loss = loss_fn(*args, **kwargs)
if weight != 0:
return weight * loss
else:
return tf.constant(0.0, dtype=loss.dtype)
return fn
def _make_edge_list_logits(embeddings, features, edge_list, weights, params):
""" Helper function to create the skipgram loss for edge structure
Parameters
----------
embeddings: the embeddings features for the current subgraph.
features: features from tensorflow dataset (not used)
edge_list: edge list of the subgraph
weights: weights of the edges in the subgraph
params: other parameters
Returns
-------
a tensor representing the edge prediction loss.
"""
with tf.compat.v1.name_scope('edge_list_logits'):
# Here I want to change this depending on the values of
# params["indef_ip"] and whether it is true or false
if params["indef_ip"]:
diag = tf.ones(int(float(params["embedding_dim"])/2),
dtype=tf.float32)
dm = tf.linalg.diag(tf.concat([diag, -1*diag], 0))
pairwise_inner_prods = tf.matmul(embeddings,
tf.matmul(embeddings, dm), transpose_b=True, name='all_edges_logit')
else:
pairwise_inner_prods = tf.matmul(embeddings, embeddings, transpose_b=True, name='all_edges_logit')
if len(edge_list.shape) == 2:
edge_list = tf.expand_dims(edge_list, axis=0)
pairwise_inner_prods = tf.expand_dims(pairwise_inner_prods, axis=0)
no_batch = True
else:
no_batch = False
edge_list_shape = tf.shape(input=edge_list)
batch_size = edge_list.shape[0] if edge_list.shape[0] is not None else edge_list_shape[0]
num_edges = edge_list.shape[1] if edge_list.shape[1] is not None else edge_list_shape[1]
batch_index = tf.tile(
tf.expand_dims(tf.expand_dims(tf.range(batch_size), -1), -1),
tf.stack([1, num_edges, 1]))
edge_index = tf.concat([batch_index, edge_list], axis=-1)
edge_logit = tf.gather_nd(pairwise_inner_prods, edge_index)
if no_batch:
edge_logit = tf.squeeze(edge_logit, axis=0)
return edge_logit
def make_simple_skipgram_loss(clip=None):
""" Makes a simple skipgram loss for edge prediction from a given edge list.
This function takes a simple edge list and does not further modify it. In particular,
it does not apply any transformation such as windowing or pruning.
Parameters
----------
clip: If not None, a value to clip the individual losses at.
Returns
-------
loss: a function which computes the loss.
"""
def loss(edge_logits, num_vertex, edge_list, edge_weights, params):
with tf.compat.v1.name_scope('skipgram_loss', values=[edge_logits, edge_list, edge_weights]):
if len(edge_list.shape) == 3:
batch_size = tf.cast(tf.shape(input=edge_list)[0], dtype=tf.float32)
else:
batch_size = 1.
edge_present = tf.cast(tf.equal(edge_weights, 1), dtype=tf.float32)
# values of -1 in the weights indicate padded edges which should be ignored
# in loss computation.
edge_censored = tf.cast(tf.not_equal(edge_weights, -1), dtype=tf.float32)
edge_pred_loss = tf.nn.sigmoid_cross_entropy_with_logits(
labels=edge_present, logits=edge_logits)
edge_pred_loss = edge_pred_loss * edge_censored
if clip:
edge_pred_loss = tf.clip_by_value(edge_pred_loss, 0, clip)
# sum instead of (tf default of) mean because mean screws up learning rates for embeddings
loss_value = tf.divide(tf.reduce_sum(input_tensor=edge_pred_loss), batch_size,
name='skipgram_edge_loss')
return loss_value
return loss | 38.096639 | 119 | 0.656667 |
3fea20dbdcbb61cd724817eecc436dde24e0214c | 1,631 | py | Python | api/app/resources/bookings/exam/exam_post.py | neilz0r/queue-management | 45226bdc4bd547c48c6df2e11a751133d48e0855 | [
"Apache-2.0"
] | 1 | 2019-10-04T23:30:14.000Z | 2019-10-04T23:30:14.000Z | api/app/resources/bookings/exam/exam_post.py | Alireja2/queue-management | b133b9ca279d03b6ebe8900fd9c73a97f6a0863b | [
"Apache-2.0"
] | null | null | null | api/app/resources/bookings/exam/exam_post.py | Alireja2/queue-management | b133b9ca279d03b6ebe8900fd9c73a97f6a0863b | [
"Apache-2.0"
] | null | null | null | '''Copyright 2018 Province of British Columbia
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.'''
import logging
from flask import request, g
from flask_restplus import Resource
from app.models.theq import CSR
from app.schemas.bookings import ExamSchema
from qsystem import api, api_call_with_retry, db, oidc
@api.route("/exams/", methods=["POST"])
class ExamPost(Resource):
exam_schema = ExamSchema()
@oidc.accept_token(require_token=True)
@api_call_with_retry
def post(self):
csr = CSR.find_by_username(g.oidc_token_info['username'])
json_data = request.get_json()
exam, warning = self.exam_schema.load(json_data)
if warning:
logging.warning("WARNING: %s", warning)
return {"message": warning}, 422
if exam.office_id == csr.office_id or csr.liaison_designate == 1:
db.session.add(exam)
db.session.commit()
result = self.exam_schema.dump(exam)
return {"exam": result.data,
"errors": result.errors}, 201
else:
return {"The Exam Office ID and CSR Office ID do not match!"}, 403
| 30.773585 | 78 | 0.694666 |
ad276f63404272925e9e00ac3454cd6f10e0aa74 | 1,499 | py | Python | joongram/users/tests/test_views.py | joonoaspdosa/joongram | 6901e2fb829eff5eae01d38b6cd969032d6e8101 | [
"MIT"
] | 1 | 2019-05-04T06:20:24.000Z | 2019-05-04T06:20:24.000Z | joongram/users/tests/test_views.py | joonoaspdosa/joongram | 6901e2fb829eff5eae01d38b6cd969032d6e8101 | [
"MIT"
] | null | null | null | joongram/users/tests/test_views.py | joonoaspdosa/joongram | 6901e2fb829eff5eae01d38b6cd969032d6e8101 | [
"MIT"
] | null | null | null | import pytest
from django.conf import settings
from django.test import RequestFactory
from joongram.users.views import UserRedirectView, UserUpdateView
pytestmark = pytest.mark.django_db
class TestUserUpdateView:
"""
TODO:
extracting view initialization code as class-scoped fixture
would be great if only pytest-django supported non-function-scoped
fixture db access -- this is a work-in-progress for now:
https://github.com/pytest-dev/pytest-django/pull/258
"""
def test_get_success_url(
self, user: settings.AUTH_USER_MODEL, request_factory: RequestFactory
):
view = UserUpdateView()
request = request_factory.get("/fake-url/")
request.user = user
view.request = request
assert view.get_success_url() == f"/users/{user.username}/"
def test_get_object(
self, user: settings.AUTH_USER_MODEL, request_factory: RequestFactory
):
view = UserUpdateView()
request = request_factory.get("/fake-url/")
request.user = user
view.request = request
assert view.get_object() == user
class TestUserRedirectView:
def test_get_redirect_url(
self, user: settings.AUTH_USER_MODEL, request_factory: RequestFactory
):
view = UserRedirectView()
request = request_factory.get("/fake-url")
request.user = user
view.request = request
assert view.get_redirect_url() == f"/users/{user.username}/"
| 28.283019 | 77 | 0.675117 |
dece6092697de5098460dfeb9b6e12027b479c97 | 477 | py | Python | NST_Hotbox/W_hotbox/Single/BlacksMatch/001.py | CreativeLyons/NST_Hotbox | 48d23a651d9578a70b16bcc2c034de4b3586883f | [
"MIT"
] | null | null | null | NST_Hotbox/W_hotbox/Single/BlacksMatch/001.py | CreativeLyons/NST_Hotbox | 48d23a651d9578a70b16bcc2c034de4b3586883f | [
"MIT"
] | null | null | null | NST_Hotbox/W_hotbox/Single/BlacksMatch/001.py | CreativeLyons/NST_Hotbox | 48d23a651d9578a70b16bcc2c034de4b3586883f | [
"MIT"
] | null | null | null | #----------------------------------------------------------------------------------------------------------
#
# AUTOMATICALLY GENERATED FILE TO BE USED BY W_HOTBOX
#
# NAME: Toggle Premult Input
#
#----------------------------------------------------------------------------------------------------------
ns = nuke.selectedNodes()
for n in ns:
knob = n.knob('premultInput')
currentState = knob.getValue()
newState = abs(currentState-1)
knob.setValue(newState)
| 31.8 | 107 | 0.381551 |
81334859f46800e94afb5d19afb41fd05332cbd9 | 5,140 | py | Python | parlai/tasks/light_dialog/worlds.py | kifish/ParlAI | 93a0f31f3d6b03a97c1a081927427dbe1eb1242e | [
"MIT"
] | 7 | 2020-04-23T06:43:45.000Z | 2021-01-22T12:24:07.000Z | parlai/tasks/light_dialog/worlds.py | kifish/ParlAI | 93a0f31f3d6b03a97c1a081927427dbe1eb1242e | [
"MIT"
] | 14 | 2020-03-13T19:08:56.000Z | 2020-05-12T07:38:41.000Z | parlai/tasks/light_dialog/worlds.py | kifish/ParlAI | 93a0f31f3d6b03a97c1a081927427dbe1eb1242e | [
"MIT"
] | 4 | 2020-08-06T19:38:51.000Z | 2021-07-08T11:36:48.000Z | #!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates.
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
from parlai.core.worlds import create_task
from parlai.core.worlds import DialogPartnerWorld, validate
from parlai.tasks.self_chat.worlds import InteractiveWorld as SelfChatBaseWorld
from parlai.agents.repeat_label.repeat_label import RepeatLabelAgent
from copy import deepcopy
import random
import pickle
import os
class InteractiveSimpleWorld(DialogPartnerWorld):
def __init__(self, opt, agents, shared=None):
super().__init__(opt, agents, shared)
print("[ loading personas.. ]")
self.load_personas()
def load_personas(self):
# Create Light data so we can assign personas.
light_opt = self.opt.copy()
light_opt['task'] = 'light_dialog'
light_opt['interactive_task'] = False
light_agent = RepeatLabelAgent(light_opt)
self.light_world = create_task(light_opt, light_agent)
self.cnt = 0
def get_new_personas(self):
# Find a new episode
while True:
self.light_world.parley()
msg = self.light_world.get_acts()[0]
if msg.get('episode_done', False):
self.light_world.parley()
msg = self.light_world.get_acts()[0]
break
txt = msg.get('text', '').split('\n')
a1_persona = "" # (typically human in interactive)
a2_persona = ""
p = {}
for t in txt:
p[t.split(' ')[0]] = t
a1_persona = (
' _task_speech\n'
+ p['_setting_name']
+ '\n'
+ p['_setting_desc']
+ '\n'
+ p['_self_name'].replace("_self_name", '_partner_name')
+ '\n'
+ p['_partner_name'].replace("_partner_name", '_self_name')
+ '\n'
+ '_self_persona I am a '
+ ' '.join(p['_partner_name'].split(' ')[1:])
+ '.\n'
)
a2_persona = (
' _task_speech\n'
+ p['_setting_name']
+ '\n'
+ p['_setting_desc']
+ '\n'
+ p['_partner_name']
+ '\n'
+ p['_self_name']
+ '\n'
+ p['_self_persona']
+ '\n'
)
return a1_persona, a2_persona
def parley(self):
"""
Agent 0 goes first.
Alternate between the two agents.
"""
if self.cnt == 0:
self.p1, self.p2 = self.get_new_personas()
acts = self.acts
agents = self.agents
if self.cnt == 0:
# add the persona on to the first message to agent 0
act = {}
act['text'] = self.p1
act['episode_done'] = False
act['id'] = 'persona'
agents[0].observe(validate(act))
act = deepcopy(agents[0].act())
acts[0] = act
if self.cnt == 0:
# add the persona on to the first message to agent 1
act.force_set('text', self.p2 + act.get('text', 'hi'))
agents[1].observe(validate(act))
else:
agents[1].observe(validate(act))
acts[1] = agents[1].act()
agents[0].observe(validate(acts[1]))
self.update_counters()
self.cnt += 1
if act['episode_done']:
print("CHAT DONE ")
print("\n... preparing new chat... \n")
self.cnt = 0
class InteractiveSelfchatWorld(SelfChatBaseWorld):
def init_contexts(self):
print('[ loading contexts.. ]')
data_path = os.path.join(
self.opt['datapath'], 'light_dialogue', 'light_environment.pkl'
)
file = open(data_path, 'rb')
self.db = pickle.load(file)
# compact list of rooms
rs = []
for _k, r in self.db['rooms'].items():
rs.append(r)
self.db['rooms'] = rs
# compact list of characters
cs = []
for _k, c in self.db['characters'].items():
cs.append(c)
self.db['all_characters'] = cs
def make_context(self, room, c1, c2):
s = '_task_speech\n'
s += (
'_setting_name '
+ room.get('setting', '')
+ ', '
+ room.get('category', '')
+ '\n'
)
s += '_setting_desc ' + room.get('description', '') + '\n'
s += '_partner_name ' + c2.get('name', '') + '\n'
s += '_self_name ' + c1.get('name', '') + '\n'
s += '_self_persona ' + random.choice(c1.get('personas', ['']))
return s
def get_contexts(self):
room = random.choice(self.db['rooms'])
if len(room.get('in_characters', [])) > 0:
c1 = self.db['characters'][random.choice(room['in_characters'])]
else:
c1 = random.choice(self.db['all_characters'])
c2 = random.choice(self.db['all_characters'])
p1 = self.make_context(room, c1, c2)
p2 = self.make_context(room, c2, c1)
return [p1, p2]
| 32.125 | 79 | 0.52607 |
85a3b52ea18e6bf61f28f7ddbe412707a262e2e6 | 45,410 | py | Python | sdk/python/pulumi_azure_native/servicefabricmesh/v20180701preview/_inputs.py | pulumi-bot/pulumi-azure-native | f7b9490b5211544318e455e5cceafe47b628e12c | [
"Apache-2.0"
] | 31 | 2020-09-21T09:41:01.000Z | 2021-02-26T13:21:59.000Z | sdk/python/pulumi_azure_native/servicefabricmesh/v20180701preview/_inputs.py | pulumi-bot/pulumi-azure-native | f7b9490b5211544318e455e5cceafe47b628e12c | [
"Apache-2.0"
] | 231 | 2020-09-21T09:38:45.000Z | 2021-03-01T11:16:03.000Z | sdk/python/pulumi_azure_native/servicefabricmesh/v20180701preview/_inputs.py | pulumi-bot/pulumi-azure-native | f7b9490b5211544318e455e5cceafe47b628e12c | [
"Apache-2.0"
] | 4 | 2020-09-29T14:14:59.000Z | 2021-02-10T20:38:16.000Z | # coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union
from ... import _utilities, _tables
from ._enums import *
__all__ = [
'AzureInternalMonitoringPipelineSinkDescriptionArgs',
'ContainerCodePackagePropertiesArgs',
'ContainerLabelArgs',
'ContainerVolumeArgs',
'DiagnosticsDescriptionArgs',
'DiagnosticsRefArgs',
'EndpointPropertiesArgs',
'EnvironmentVariableArgs',
'ImageRegistryCredentialArgs',
'IngressConfigArgs',
'Layer4IngressConfigArgs',
'NetworkRefArgs',
'ResourceLimitsArgs',
'ResourceRequestsArgs',
'ResourceRequirementsArgs',
'ServiceResourceDescriptionArgs',
'SettingArgs',
'VolumeProviderParametersAzureFileArgs',
]
@pulumi.input_type
class AzureInternalMonitoringPipelineSinkDescriptionArgs:
def __init__(__self__, *,
kind: pulumi.Input[str],
account_name: Optional[pulumi.Input[str]] = None,
auto_key_config_url: Optional[pulumi.Input[str]] = None,
description: Optional[pulumi.Input[str]] = None,
fluentd_config_url: Optional[Any] = None,
ma_config_url: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
namespace: Optional[pulumi.Input[str]] = None):
"""
Diagnostics settings for Geneva.
:param pulumi.Input[str] kind: The kind of DiagnosticsSink.
Expected value is 'AzureInternalMonitoringPipeline'.
:param pulumi.Input[str] account_name: Azure Internal monitoring pipeline account.
:param pulumi.Input[str] auto_key_config_url: Azure Internal monitoring pipeline autokey associated with the certificate.
:param pulumi.Input[str] description: A description of the sink.
:param Any fluentd_config_url: Azure Internal monitoring agent fluentd configuration.
:param pulumi.Input[str] ma_config_url: Azure Internal monitoring agent configuration.
:param pulumi.Input[str] name: Name of the sink. This value is referenced by DiagnosticsReferenceDescription
:param pulumi.Input[str] namespace: Azure Internal monitoring pipeline account namespace.
"""
pulumi.set(__self__, "kind", 'AzureInternalMonitoringPipeline')
if account_name is not None:
pulumi.set(__self__, "account_name", account_name)
if auto_key_config_url is not None:
pulumi.set(__self__, "auto_key_config_url", auto_key_config_url)
if description is not None:
pulumi.set(__self__, "description", description)
if fluentd_config_url is not None:
pulumi.set(__self__, "fluentd_config_url", fluentd_config_url)
if ma_config_url is not None:
pulumi.set(__self__, "ma_config_url", ma_config_url)
if name is not None:
pulumi.set(__self__, "name", name)
if namespace is not None:
pulumi.set(__self__, "namespace", namespace)
@property
@pulumi.getter
def kind(self) -> pulumi.Input[str]:
"""
The kind of DiagnosticsSink.
Expected value is 'AzureInternalMonitoringPipeline'.
"""
return pulumi.get(self, "kind")
@kind.setter
def kind(self, value: pulumi.Input[str]):
pulumi.set(self, "kind", value)
@property
@pulumi.getter(name="accountName")
def account_name(self) -> Optional[pulumi.Input[str]]:
"""
Azure Internal monitoring pipeline account.
"""
return pulumi.get(self, "account_name")
@account_name.setter
def account_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "account_name", value)
@property
@pulumi.getter(name="autoKeyConfigUrl")
def auto_key_config_url(self) -> Optional[pulumi.Input[str]]:
"""
Azure Internal monitoring pipeline autokey associated with the certificate.
"""
return pulumi.get(self, "auto_key_config_url")
@auto_key_config_url.setter
def auto_key_config_url(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "auto_key_config_url", value)
@property
@pulumi.getter
def description(self) -> Optional[pulumi.Input[str]]:
"""
A description of the sink.
"""
return pulumi.get(self, "description")
@description.setter
def description(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "description", value)
@property
@pulumi.getter(name="fluentdConfigUrl")
def fluentd_config_url(self) -> Optional[Any]:
"""
Azure Internal monitoring agent fluentd configuration.
"""
return pulumi.get(self, "fluentd_config_url")
@fluentd_config_url.setter
def fluentd_config_url(self, value: Optional[Any]):
pulumi.set(self, "fluentd_config_url", value)
@property
@pulumi.getter(name="maConfigUrl")
def ma_config_url(self) -> Optional[pulumi.Input[str]]:
"""
Azure Internal monitoring agent configuration.
"""
return pulumi.get(self, "ma_config_url")
@ma_config_url.setter
def ma_config_url(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "ma_config_url", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
Name of the sink. This value is referenced by DiagnosticsReferenceDescription
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter
def namespace(self) -> Optional[pulumi.Input[str]]:
"""
Azure Internal monitoring pipeline account namespace.
"""
return pulumi.get(self, "namespace")
@namespace.setter
def namespace(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "namespace", value)
@pulumi.input_type
class ContainerCodePackagePropertiesArgs:
def __init__(__self__, *,
image: pulumi.Input[str],
name: pulumi.Input[str],
resources: pulumi.Input['ResourceRequirementsArgs'],
commands: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
diagnostics: Optional[pulumi.Input['DiagnosticsRefArgs']] = None,
endpoints: Optional[pulumi.Input[Sequence[pulumi.Input['EndpointPropertiesArgs']]]] = None,
entrypoint: Optional[pulumi.Input[str]] = None,
environment_variables: Optional[pulumi.Input[Sequence[pulumi.Input['EnvironmentVariableArgs']]]] = None,
image_registry_credential: Optional[pulumi.Input['ImageRegistryCredentialArgs']] = None,
labels: Optional[pulumi.Input[Sequence[pulumi.Input['ContainerLabelArgs']]]] = None,
settings: Optional[pulumi.Input[Sequence[pulumi.Input['SettingArgs']]]] = None,
volume_refs: Optional[pulumi.Input[Sequence[pulumi.Input['ContainerVolumeArgs']]]] = None):
"""
Describes a container and its runtime properties.
:param pulumi.Input[str] image: The Container image to use.
:param pulumi.Input[str] name: The name of the code package.
:param pulumi.Input['ResourceRequirementsArgs'] resources: This type describes the resource requirements for a container or a service.
:param pulumi.Input[Sequence[pulumi.Input[str]]] commands: Command array to execute within the container in exec form.
:param pulumi.Input['DiagnosticsRefArgs'] diagnostics: Reference to sinks in DiagnosticsDescription.
:param pulumi.Input[Sequence[pulumi.Input['EndpointPropertiesArgs']]] endpoints: The endpoints exposed by this container.
:param pulumi.Input[str] entrypoint: Override for the default entry point in the container.
:param pulumi.Input[Sequence[pulumi.Input['EnvironmentVariableArgs']]] environment_variables: The environment variables to set in this container
:param pulumi.Input['ImageRegistryCredentialArgs'] image_registry_credential: Image registry credential.
:param pulumi.Input[Sequence[pulumi.Input['ContainerLabelArgs']]] labels: The labels to set in this container.
:param pulumi.Input[Sequence[pulumi.Input['SettingArgs']]] settings: The settings to set in this container. The setting file path can be fetched from environment variable "Fabric_SettingPath". The path for Windows container is "C:\\secrets". The path for Linux container is "/var/secrets".
:param pulumi.Input[Sequence[pulumi.Input['ContainerVolumeArgs']]] volume_refs: The volumes to be attached to the container.
"""
pulumi.set(__self__, "image", image)
pulumi.set(__self__, "name", name)
pulumi.set(__self__, "resources", resources)
if commands is not None:
pulumi.set(__self__, "commands", commands)
if diagnostics is not None:
pulumi.set(__self__, "diagnostics", diagnostics)
if endpoints is not None:
pulumi.set(__self__, "endpoints", endpoints)
if entrypoint is not None:
pulumi.set(__self__, "entrypoint", entrypoint)
if environment_variables is not None:
pulumi.set(__self__, "environment_variables", environment_variables)
if image_registry_credential is not None:
pulumi.set(__self__, "image_registry_credential", image_registry_credential)
if labels is not None:
pulumi.set(__self__, "labels", labels)
if settings is not None:
pulumi.set(__self__, "settings", settings)
if volume_refs is not None:
pulumi.set(__self__, "volume_refs", volume_refs)
@property
@pulumi.getter
def image(self) -> pulumi.Input[str]:
"""
The Container image to use.
"""
return pulumi.get(self, "image")
@image.setter
def image(self, value: pulumi.Input[str]):
pulumi.set(self, "image", value)
@property
@pulumi.getter
def name(self) -> pulumi.Input[str]:
"""
The name of the code package.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: pulumi.Input[str]):
pulumi.set(self, "name", value)
@property
@pulumi.getter
def resources(self) -> pulumi.Input['ResourceRequirementsArgs']:
"""
This type describes the resource requirements for a container or a service.
"""
return pulumi.get(self, "resources")
@resources.setter
def resources(self, value: pulumi.Input['ResourceRequirementsArgs']):
pulumi.set(self, "resources", value)
@property
@pulumi.getter
def commands(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
Command array to execute within the container in exec form.
"""
return pulumi.get(self, "commands")
@commands.setter
def commands(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "commands", value)
@property
@pulumi.getter
def diagnostics(self) -> Optional[pulumi.Input['DiagnosticsRefArgs']]:
"""
Reference to sinks in DiagnosticsDescription.
"""
return pulumi.get(self, "diagnostics")
@diagnostics.setter
def diagnostics(self, value: Optional[pulumi.Input['DiagnosticsRefArgs']]):
pulumi.set(self, "diagnostics", value)
@property
@pulumi.getter
def endpoints(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['EndpointPropertiesArgs']]]]:
"""
The endpoints exposed by this container.
"""
return pulumi.get(self, "endpoints")
@endpoints.setter
def endpoints(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['EndpointPropertiesArgs']]]]):
pulumi.set(self, "endpoints", value)
@property
@pulumi.getter
def entrypoint(self) -> Optional[pulumi.Input[str]]:
"""
Override for the default entry point in the container.
"""
return pulumi.get(self, "entrypoint")
@entrypoint.setter
def entrypoint(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "entrypoint", value)
@property
@pulumi.getter(name="environmentVariables")
def environment_variables(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['EnvironmentVariableArgs']]]]:
"""
The environment variables to set in this container
"""
return pulumi.get(self, "environment_variables")
@environment_variables.setter
def environment_variables(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['EnvironmentVariableArgs']]]]):
pulumi.set(self, "environment_variables", value)
@property
@pulumi.getter(name="imageRegistryCredential")
def image_registry_credential(self) -> Optional[pulumi.Input['ImageRegistryCredentialArgs']]:
"""
Image registry credential.
"""
return pulumi.get(self, "image_registry_credential")
@image_registry_credential.setter
def image_registry_credential(self, value: Optional[pulumi.Input['ImageRegistryCredentialArgs']]):
pulumi.set(self, "image_registry_credential", value)
@property
@pulumi.getter
def labels(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['ContainerLabelArgs']]]]:
"""
The labels to set in this container.
"""
return pulumi.get(self, "labels")
@labels.setter
def labels(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['ContainerLabelArgs']]]]):
pulumi.set(self, "labels", value)
@property
@pulumi.getter
def settings(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['SettingArgs']]]]:
"""
The settings to set in this container. The setting file path can be fetched from environment variable "Fabric_SettingPath". The path for Windows container is "C:\\secrets". The path for Linux container is "/var/secrets".
"""
return pulumi.get(self, "settings")
@settings.setter
def settings(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['SettingArgs']]]]):
pulumi.set(self, "settings", value)
@property
@pulumi.getter(name="volumeRefs")
def volume_refs(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['ContainerVolumeArgs']]]]:
"""
The volumes to be attached to the container.
"""
return pulumi.get(self, "volume_refs")
@volume_refs.setter
def volume_refs(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['ContainerVolumeArgs']]]]):
pulumi.set(self, "volume_refs", value)
@pulumi.input_type
class ContainerLabelArgs:
def __init__(__self__, *,
name: pulumi.Input[str],
value: pulumi.Input[str]):
"""
Describes a container label.
:param pulumi.Input[str] name: The name of the container label.
:param pulumi.Input[str] value: The value of the container label.
"""
pulumi.set(__self__, "name", name)
pulumi.set(__self__, "value", value)
@property
@pulumi.getter
def name(self) -> pulumi.Input[str]:
"""
The name of the container label.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: pulumi.Input[str]):
pulumi.set(self, "name", value)
@property
@pulumi.getter
def value(self) -> pulumi.Input[str]:
"""
The value of the container label.
"""
return pulumi.get(self, "value")
@value.setter
def value(self, value: pulumi.Input[str]):
pulumi.set(self, "value", value)
@pulumi.input_type
class ContainerVolumeArgs:
def __init__(__self__, *,
destination_path: pulumi.Input[str],
name: pulumi.Input[str],
read_only: Optional[pulumi.Input[bool]] = None):
"""
Describes how a volume is attached to a container.
:param pulumi.Input[str] destination_path: The path within the container at which the volume should be mounted. Only valid path characters are allowed.
:param pulumi.Input[str] name: Name of the volume.
:param pulumi.Input[bool] read_only: The flag indicating whether the volume is read only. Default is 'false'.
"""
pulumi.set(__self__, "destination_path", destination_path)
pulumi.set(__self__, "name", name)
if read_only is not None:
pulumi.set(__self__, "read_only", read_only)
@property
@pulumi.getter(name="destinationPath")
def destination_path(self) -> pulumi.Input[str]:
"""
The path within the container at which the volume should be mounted. Only valid path characters are allowed.
"""
return pulumi.get(self, "destination_path")
@destination_path.setter
def destination_path(self, value: pulumi.Input[str]):
pulumi.set(self, "destination_path", value)
@property
@pulumi.getter
def name(self) -> pulumi.Input[str]:
"""
Name of the volume.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: pulumi.Input[str]):
pulumi.set(self, "name", value)
@property
@pulumi.getter(name="readOnly")
def read_only(self) -> Optional[pulumi.Input[bool]]:
"""
The flag indicating whether the volume is read only. Default is 'false'.
"""
return pulumi.get(self, "read_only")
@read_only.setter
def read_only(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "read_only", value)
@pulumi.input_type
class DiagnosticsDescriptionArgs:
def __init__(__self__, *,
default_sink_refs: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
enabled: Optional[pulumi.Input[bool]] = None,
sinks: Optional[pulumi.Input[Sequence[pulumi.Input['AzureInternalMonitoringPipelineSinkDescriptionArgs']]]] = None):
"""
Describes the diagnostics options available
:param pulumi.Input[Sequence[pulumi.Input[str]]] default_sink_refs: The sinks to be used if diagnostics is enabled. Sink choices can be overridden at the service and code package level.
:param pulumi.Input[bool] enabled: Status of whether or not sinks are enabled.
:param pulumi.Input[Sequence[pulumi.Input['AzureInternalMonitoringPipelineSinkDescriptionArgs']]] sinks: List of supported sinks that can be referenced.
"""
if default_sink_refs is not None:
pulumi.set(__self__, "default_sink_refs", default_sink_refs)
if enabled is not None:
pulumi.set(__self__, "enabled", enabled)
if sinks is not None:
pulumi.set(__self__, "sinks", sinks)
@property
@pulumi.getter(name="defaultSinkRefs")
def default_sink_refs(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
The sinks to be used if diagnostics is enabled. Sink choices can be overridden at the service and code package level.
"""
return pulumi.get(self, "default_sink_refs")
@default_sink_refs.setter
def default_sink_refs(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "default_sink_refs", value)
@property
@pulumi.getter
def enabled(self) -> Optional[pulumi.Input[bool]]:
"""
Status of whether or not sinks are enabled.
"""
return pulumi.get(self, "enabled")
@enabled.setter
def enabled(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "enabled", value)
@property
@pulumi.getter
def sinks(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['AzureInternalMonitoringPipelineSinkDescriptionArgs']]]]:
"""
List of supported sinks that can be referenced.
"""
return pulumi.get(self, "sinks")
@sinks.setter
def sinks(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['AzureInternalMonitoringPipelineSinkDescriptionArgs']]]]):
pulumi.set(self, "sinks", value)
@pulumi.input_type
class DiagnosticsRefArgs:
def __init__(__self__, *,
enabled: Optional[pulumi.Input[bool]] = None,
sink_refs: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None):
"""
Reference to sinks in DiagnosticsDescription.
:param pulumi.Input[bool] enabled: Status of whether or not sinks are enabled.
:param pulumi.Input[Sequence[pulumi.Input[str]]] sink_refs: List of sinks to be used if enabled. References the list of sinks in DiagnosticsDescription.
"""
if enabled is not None:
pulumi.set(__self__, "enabled", enabled)
if sink_refs is not None:
pulumi.set(__self__, "sink_refs", sink_refs)
@property
@pulumi.getter
def enabled(self) -> Optional[pulumi.Input[bool]]:
"""
Status of whether or not sinks are enabled.
"""
return pulumi.get(self, "enabled")
@enabled.setter
def enabled(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "enabled", value)
@property
@pulumi.getter(name="sinkRefs")
def sink_refs(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
List of sinks to be used if enabled. References the list of sinks in DiagnosticsDescription.
"""
return pulumi.get(self, "sink_refs")
@sink_refs.setter
def sink_refs(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "sink_refs", value)
@pulumi.input_type
class EndpointPropertiesArgs:
def __init__(__self__, *,
name: pulumi.Input[str],
port: Optional[pulumi.Input[int]] = None):
"""
Describes a container endpoint.
:param pulumi.Input[str] name: The name of the endpoint.
:param pulumi.Input[int] port: Port used by the container.
"""
pulumi.set(__self__, "name", name)
if port is not None:
pulumi.set(__self__, "port", port)
@property
@pulumi.getter
def name(self) -> pulumi.Input[str]:
"""
The name of the endpoint.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: pulumi.Input[str]):
pulumi.set(self, "name", value)
@property
@pulumi.getter
def port(self) -> Optional[pulumi.Input[int]]:
"""
Port used by the container.
"""
return pulumi.get(self, "port")
@port.setter
def port(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "port", value)
@pulumi.input_type
class EnvironmentVariableArgs:
def __init__(__self__, *,
name: Optional[pulumi.Input[str]] = None,
value: Optional[pulumi.Input[str]] = None):
"""
Describes an environment variable for the container.
:param pulumi.Input[str] name: The name of the environment variable.
:param pulumi.Input[str] value: The value of the environment variable.
"""
if name is not None:
pulumi.set(__self__, "name", name)
if value is not None:
pulumi.set(__self__, "value", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
The name of the environment variable.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter
def value(self) -> Optional[pulumi.Input[str]]:
"""
The value of the environment variable.
"""
return pulumi.get(self, "value")
@value.setter
def value(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "value", value)
@pulumi.input_type
class ImageRegistryCredentialArgs:
def __init__(__self__, *,
server: pulumi.Input[str],
username: pulumi.Input[str],
password: Optional[pulumi.Input[str]] = None):
"""
Image registry credential.
:param pulumi.Input[str] server: Docker image registry server, without protocol such as `http` and `https`.
:param pulumi.Input[str] username: The username for the private registry.
:param pulumi.Input[str] password: The password for the private registry.
"""
pulumi.set(__self__, "server", server)
pulumi.set(__self__, "username", username)
if password is not None:
pulumi.set(__self__, "password", password)
@property
@pulumi.getter
def server(self) -> pulumi.Input[str]:
"""
Docker image registry server, without protocol such as `http` and `https`.
"""
return pulumi.get(self, "server")
@server.setter
def server(self, value: pulumi.Input[str]):
pulumi.set(self, "server", value)
@property
@pulumi.getter
def username(self) -> pulumi.Input[str]:
"""
The username for the private registry.
"""
return pulumi.get(self, "username")
@username.setter
def username(self, value: pulumi.Input[str]):
pulumi.set(self, "username", value)
@property
@pulumi.getter
def password(self) -> Optional[pulumi.Input[str]]:
"""
The password for the private registry.
"""
return pulumi.get(self, "password")
@password.setter
def password(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "password", value)
@pulumi.input_type
class IngressConfigArgs:
def __init__(__self__, *,
layer4: Optional[pulumi.Input[Sequence[pulumi.Input['Layer4IngressConfigArgs']]]] = None,
qos_level: Optional[pulumi.Input[Union[str, 'IngressQoSLevel']]] = None):
"""
Describes public connectivity configuration for the network.
:param pulumi.Input[Sequence[pulumi.Input['Layer4IngressConfigArgs']]] layer4: Configuration for layer4 public connectivity for this network.
:param pulumi.Input[Union[str, 'IngressQoSLevel']] qos_level: The QoS tier for ingress.
"""
if layer4 is not None:
pulumi.set(__self__, "layer4", layer4)
if qos_level is not None:
pulumi.set(__self__, "qos_level", qos_level)
@property
@pulumi.getter
def layer4(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['Layer4IngressConfigArgs']]]]:
"""
Configuration for layer4 public connectivity for this network.
"""
return pulumi.get(self, "layer4")
@layer4.setter
def layer4(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['Layer4IngressConfigArgs']]]]):
pulumi.set(self, "layer4", value)
@property
@pulumi.getter(name="qosLevel")
def qos_level(self) -> Optional[pulumi.Input[Union[str, 'IngressQoSLevel']]]:
"""
The QoS tier for ingress.
"""
return pulumi.get(self, "qos_level")
@qos_level.setter
def qos_level(self, value: Optional[pulumi.Input[Union[str, 'IngressQoSLevel']]]):
pulumi.set(self, "qos_level", value)
@pulumi.input_type
class Layer4IngressConfigArgs:
def __init__(__self__, *,
application_name: Optional[pulumi.Input[str]] = None,
endpoint_name: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
public_port: Optional[pulumi.Input[int]] = None,
service_name: Optional[pulumi.Input[str]] = None):
"""
Describes the layer4 configuration for public connectivity for this network.
:param pulumi.Input[str] application_name: The application name which contains the service to be exposed.
:param pulumi.Input[str] endpoint_name: The service endpoint that needs to be exposed.
:param pulumi.Input[str] name: Layer4 ingress config name.
:param pulumi.Input[int] public_port: Specifies the public port at which the service endpoint below needs to be exposed.
:param pulumi.Input[str] service_name: The service whose endpoint needs to be exposed at the public port.
"""
if application_name is not None:
pulumi.set(__self__, "application_name", application_name)
if endpoint_name is not None:
pulumi.set(__self__, "endpoint_name", endpoint_name)
if name is not None:
pulumi.set(__self__, "name", name)
if public_port is not None:
pulumi.set(__self__, "public_port", public_port)
if service_name is not None:
pulumi.set(__self__, "service_name", service_name)
@property
@pulumi.getter(name="applicationName")
def application_name(self) -> Optional[pulumi.Input[str]]:
"""
The application name which contains the service to be exposed.
"""
return pulumi.get(self, "application_name")
@application_name.setter
def application_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "application_name", value)
@property
@pulumi.getter(name="endpointName")
def endpoint_name(self) -> Optional[pulumi.Input[str]]:
"""
The service endpoint that needs to be exposed.
"""
return pulumi.get(self, "endpoint_name")
@endpoint_name.setter
def endpoint_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "endpoint_name", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
Layer4 ingress config name.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter(name="publicPort")
def public_port(self) -> Optional[pulumi.Input[int]]:
"""
Specifies the public port at which the service endpoint below needs to be exposed.
"""
return pulumi.get(self, "public_port")
@public_port.setter
def public_port(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "public_port", value)
@property
@pulumi.getter(name="serviceName")
def service_name(self) -> Optional[pulumi.Input[str]]:
"""
The service whose endpoint needs to be exposed at the public port.
"""
return pulumi.get(self, "service_name")
@service_name.setter
def service_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "service_name", value)
@pulumi.input_type
class NetworkRefArgs:
def __init__(__self__, *,
name: Optional[pulumi.Input[str]] = None):
"""
Describes a network reference in a service.
:param pulumi.Input[str] name: Name of the network.
"""
if name is not None:
pulumi.set(__self__, "name", name)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
Name of the network.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@pulumi.input_type
class ResourceLimitsArgs:
def __init__(__self__, *,
cpu: Optional[pulumi.Input[float]] = None,
memory_in_gb: Optional[pulumi.Input[float]] = None):
"""
This type describes the resource limits for a given container. It describes the most amount of resources a container is allowed to use before being restarted.
:param pulumi.Input[float] cpu: CPU limits in cores. At present, only full cores are supported.
:param pulumi.Input[float] memory_in_gb: The memory limit in GB.
"""
if cpu is not None:
pulumi.set(__self__, "cpu", cpu)
if memory_in_gb is not None:
pulumi.set(__self__, "memory_in_gb", memory_in_gb)
@property
@pulumi.getter
def cpu(self) -> Optional[pulumi.Input[float]]:
"""
CPU limits in cores. At present, only full cores are supported.
"""
return pulumi.get(self, "cpu")
@cpu.setter
def cpu(self, value: Optional[pulumi.Input[float]]):
pulumi.set(self, "cpu", value)
@property
@pulumi.getter(name="memoryInGB")
def memory_in_gb(self) -> Optional[pulumi.Input[float]]:
"""
The memory limit in GB.
"""
return pulumi.get(self, "memory_in_gb")
@memory_in_gb.setter
def memory_in_gb(self, value: Optional[pulumi.Input[float]]):
pulumi.set(self, "memory_in_gb", value)
@pulumi.input_type
class ResourceRequestsArgs:
def __init__(__self__, *,
cpu: pulumi.Input[float],
memory_in_gb: pulumi.Input[float]):
"""
This type describes the requested resources for a given container. It describes the least amount of resources required for the container. A container can consume more than requested resources up to the specified limits before being restarted. Currently, the requested resources are treated as limits.
:param pulumi.Input[float] cpu: Requested number of CPU cores. At present, only full cores are supported.
:param pulumi.Input[float] memory_in_gb: The memory request in GB for this container.
"""
pulumi.set(__self__, "cpu", cpu)
pulumi.set(__self__, "memory_in_gb", memory_in_gb)
@property
@pulumi.getter
def cpu(self) -> pulumi.Input[float]:
"""
Requested number of CPU cores. At present, only full cores are supported.
"""
return pulumi.get(self, "cpu")
@cpu.setter
def cpu(self, value: pulumi.Input[float]):
pulumi.set(self, "cpu", value)
@property
@pulumi.getter(name="memoryInGB")
def memory_in_gb(self) -> pulumi.Input[float]:
"""
The memory request in GB for this container.
"""
return pulumi.get(self, "memory_in_gb")
@memory_in_gb.setter
def memory_in_gb(self, value: pulumi.Input[float]):
pulumi.set(self, "memory_in_gb", value)
@pulumi.input_type
class ResourceRequirementsArgs:
def __init__(__self__, *,
requests: pulumi.Input['ResourceRequestsArgs'],
limits: Optional[pulumi.Input['ResourceLimitsArgs']] = None):
"""
This type describes the resource requirements for a container or a service.
:param pulumi.Input['ResourceRequestsArgs'] requests: Describes the requested resources for a given container.
:param pulumi.Input['ResourceLimitsArgs'] limits: Describes the maximum limits on the resources for a given container.
"""
pulumi.set(__self__, "requests", requests)
if limits is not None:
pulumi.set(__self__, "limits", limits)
@property
@pulumi.getter
def requests(self) -> pulumi.Input['ResourceRequestsArgs']:
"""
Describes the requested resources for a given container.
"""
return pulumi.get(self, "requests")
@requests.setter
def requests(self, value: pulumi.Input['ResourceRequestsArgs']):
pulumi.set(self, "requests", value)
@property
@pulumi.getter
def limits(self) -> Optional[pulumi.Input['ResourceLimitsArgs']]:
"""
Describes the maximum limits on the resources for a given container.
"""
return pulumi.get(self, "limits")
@limits.setter
def limits(self, value: Optional[pulumi.Input['ResourceLimitsArgs']]):
pulumi.set(self, "limits", value)
@pulumi.input_type
class ServiceResourceDescriptionArgs:
def __init__(__self__, *,
code_packages: pulumi.Input[Sequence[pulumi.Input['ContainerCodePackagePropertiesArgs']]],
os_type: pulumi.Input[Union[str, 'OperatingSystemTypes']],
description: Optional[pulumi.Input[str]] = None,
diagnostics: Optional[pulumi.Input['DiagnosticsRefArgs']] = None,
health_state: Optional[pulumi.Input[Union[str, 'HealthState']]] = None,
name: Optional[pulumi.Input[str]] = None,
network_refs: Optional[pulumi.Input[Sequence[pulumi.Input['NetworkRefArgs']]]] = None,
replica_count: Optional[pulumi.Input[int]] = None):
"""
This type describes a service resource.
:param pulumi.Input[Sequence[pulumi.Input['ContainerCodePackagePropertiesArgs']]] code_packages: Describes the set of code packages that forms the service. A code package describes the container and the properties for running it. All the code packages are started together on the same host and share the same context (network, process etc.).
:param pulumi.Input[Union[str, 'OperatingSystemTypes']] os_type: The Operating system type required by the code in service.
:param pulumi.Input[str] description: User readable description of the service.
:param pulumi.Input['DiagnosticsRefArgs'] diagnostics: Reference to sinks in DiagnosticsDescription.
:param pulumi.Input[Union[str, 'HealthState']] health_state: The health state of a resource such as Application, Service, or Network.
:param pulumi.Input[str] name: The name of the resource
:param pulumi.Input[Sequence[pulumi.Input['NetworkRefArgs']]] network_refs: The names of the private networks that this service needs to be part of.
:param pulumi.Input[int] replica_count: The number of replicas of the service to create. Defaults to 1 if not specified.
"""
pulumi.set(__self__, "code_packages", code_packages)
pulumi.set(__self__, "os_type", os_type)
if description is not None:
pulumi.set(__self__, "description", description)
if diagnostics is not None:
pulumi.set(__self__, "diagnostics", diagnostics)
if health_state is not None:
pulumi.set(__self__, "health_state", health_state)
if name is not None:
pulumi.set(__self__, "name", name)
if network_refs is not None:
pulumi.set(__self__, "network_refs", network_refs)
if replica_count is not None:
pulumi.set(__self__, "replica_count", replica_count)
@property
@pulumi.getter(name="codePackages")
def code_packages(self) -> pulumi.Input[Sequence[pulumi.Input['ContainerCodePackagePropertiesArgs']]]:
"""
Describes the set of code packages that forms the service. A code package describes the container and the properties for running it. All the code packages are started together on the same host and share the same context (network, process etc.).
"""
return pulumi.get(self, "code_packages")
@code_packages.setter
def code_packages(self, value: pulumi.Input[Sequence[pulumi.Input['ContainerCodePackagePropertiesArgs']]]):
pulumi.set(self, "code_packages", value)
@property
@pulumi.getter(name="osType")
def os_type(self) -> pulumi.Input[Union[str, 'OperatingSystemTypes']]:
"""
The Operating system type required by the code in service.
"""
return pulumi.get(self, "os_type")
@os_type.setter
def os_type(self, value: pulumi.Input[Union[str, 'OperatingSystemTypes']]):
pulumi.set(self, "os_type", value)
@property
@pulumi.getter
def description(self) -> Optional[pulumi.Input[str]]:
"""
User readable description of the service.
"""
return pulumi.get(self, "description")
@description.setter
def description(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "description", value)
@property
@pulumi.getter
def diagnostics(self) -> Optional[pulumi.Input['DiagnosticsRefArgs']]:
"""
Reference to sinks in DiagnosticsDescription.
"""
return pulumi.get(self, "diagnostics")
@diagnostics.setter
def diagnostics(self, value: Optional[pulumi.Input['DiagnosticsRefArgs']]):
pulumi.set(self, "diagnostics", value)
@property
@pulumi.getter(name="healthState")
def health_state(self) -> Optional[pulumi.Input[Union[str, 'HealthState']]]:
"""
The health state of a resource such as Application, Service, or Network.
"""
return pulumi.get(self, "health_state")
@health_state.setter
def health_state(self, value: Optional[pulumi.Input[Union[str, 'HealthState']]]):
pulumi.set(self, "health_state", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
The name of the resource
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter(name="networkRefs")
def network_refs(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['NetworkRefArgs']]]]:
"""
The names of the private networks that this service needs to be part of.
"""
return pulumi.get(self, "network_refs")
@network_refs.setter
def network_refs(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['NetworkRefArgs']]]]):
pulumi.set(self, "network_refs", value)
@property
@pulumi.getter(name="replicaCount")
def replica_count(self) -> Optional[pulumi.Input[int]]:
"""
The number of replicas of the service to create. Defaults to 1 if not specified.
"""
return pulumi.get(self, "replica_count")
@replica_count.setter
def replica_count(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "replica_count", value)
@pulumi.input_type
class SettingArgs:
def __init__(__self__, *,
name: Optional[pulumi.Input[str]] = None,
value: Optional[pulumi.Input[str]] = None):
"""
Describes a setting for the container.
:param pulumi.Input[str] name: The name of the setting.
:param pulumi.Input[str] value: The value of the setting.
"""
if name is not None:
pulumi.set(__self__, "name", name)
if value is not None:
pulumi.set(__self__, "value", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
The name of the setting.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter
def value(self) -> Optional[pulumi.Input[str]]:
"""
The value of the setting.
"""
return pulumi.get(self, "value")
@value.setter
def value(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "value", value)
@pulumi.input_type
class VolumeProviderParametersAzureFileArgs:
def __init__(__self__, *,
account_name: pulumi.Input[str],
share_name: pulumi.Input[str],
account_key: Optional[pulumi.Input[str]] = None):
"""
This type describes a volume provided by an Azure Files file share.
:param pulumi.Input[str] account_name: Name of the Azure storage account for the File Share.
:param pulumi.Input[str] share_name: Name of the Azure Files file share that provides storage for the volume.
:param pulumi.Input[str] account_key: Access key of the Azure storage account for the File Share.
"""
pulumi.set(__self__, "account_name", account_name)
pulumi.set(__self__, "share_name", share_name)
if account_key is not None:
pulumi.set(__self__, "account_key", account_key)
@property
@pulumi.getter(name="accountName")
def account_name(self) -> pulumi.Input[str]:
"""
Name of the Azure storage account for the File Share.
"""
return pulumi.get(self, "account_name")
@account_name.setter
def account_name(self, value: pulumi.Input[str]):
pulumi.set(self, "account_name", value)
@property
@pulumi.getter(name="shareName")
def share_name(self) -> pulumi.Input[str]:
"""
Name of the Azure Files file share that provides storage for the volume.
"""
return pulumi.get(self, "share_name")
@share_name.setter
def share_name(self, value: pulumi.Input[str]):
pulumi.set(self, "share_name", value)
@property
@pulumi.getter(name="accountKey")
def account_key(self) -> Optional[pulumi.Input[str]]:
"""
Access key of the Azure storage account for the File Share.
"""
return pulumi.get(self, "account_key")
@account_key.setter
def account_key(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "account_key", value)
| 38.288364 | 349 | 0.646906 |
ddb7b6648d38dbcacd8aef05e97ebb22e7cbfcca | 781 | py | Python | src/reversion/migrations/0003_auto_20150805_2140.py | matllubos/django-reversion | d80a24b6a195c8a68bfc3100ba533419226fa18d | [
"BSD-3-Clause"
] | null | null | null | src/reversion/migrations/0003_auto_20150805_2140.py | matllubos/django-reversion | d80a24b6a195c8a68bfc3100ba533419226fa18d | [
"BSD-3-Clause"
] | 2 | 2016-04-29T10:19:24.000Z | 2017-08-17T15:16:14.000Z | src/reversion/migrations/0003_auto_20150805_2140.py | druids/django-reversion | d80a24b6a195c8a68bfc3100ba533419226fa18d | [
"BSD-3-Clause"
] | 3 | 2017-05-30T14:27:46.000Z | 2021-06-23T15:32:41.000Z | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('reversion', '0002_auto_20141216_1509'),
]
operations = [
migrations.AlterModelOptions(
name='revision',
options={'ordering': ('created_at',)},
),
migrations.RenameField(
model_name='revision',
old_name='date_created',
new_name='created_at',
),
migrations.AddField(
model_name='version',
name='type',
field=models.PositiveIntegerField(default=2, choices=[(1, 'Created'), (2, 'Changed'), (3, 'Deleted')]),
preserve_default=False,
),
]
| 26.033333 | 115 | 0.565941 |
a000b1994624933dfe28e280a162d4c967ad9b12 | 99 | py | Python | SciFiReaders/readers/spectroscopy/__init__.py | itsalexis962/SciFiReaders | 483b228d1f79aafac85cbadd4d0aea488b563902 | [
"MIT"
] | 8 | 2021-05-07T00:59:39.000Z | 2021-12-10T21:03:59.000Z | SciFiReaders/readers/spectroscopy/__init__.py | itsalexis962/SciFiReaders | 483b228d1f79aafac85cbadd4d0aea488b563902 | [
"MIT"
] | 31 | 2021-02-19T21:16:25.000Z | 2022-03-04T22:28:09.000Z | SciFiReaders/readers/spectroscopy/__init__.py | itsalexis962/SciFiReaders | 483b228d1f79aafac85cbadd4d0aea488b563902 | [
"MIT"
] | 6 | 2021-05-07T01:48:09.000Z | 2022-01-21T21:14:36.000Z | from .SpeReader import RamanSpeReader
__all__ = ['RamanSpeReader']
all_readers = [ RamanSpeReader] | 24.75 | 37 | 0.79798 |
88e33ae66b1993bcc6a5c271ffbd8da19de20093 | 38,407 | py | Python | pysnmp/INFORMANT-PERF-CITRIX.py | agustinhenze/mibs.snmplabs.com | 1fc5c07860542b89212f4c8ab807057d9a9206c7 | [
"Apache-2.0"
] | 11 | 2021-02-02T16:27:16.000Z | 2021-08-31T06:22:49.000Z | pysnmp/INFORMANT-PERF-CITRIX.py | agustinhenze/mibs.snmplabs.com | 1fc5c07860542b89212f4c8ab807057d9a9206c7 | [
"Apache-2.0"
] | 75 | 2021-02-24T17:30:31.000Z | 2021-12-08T00:01:18.000Z | pysnmp/INFORMANT-PERF-CITRIX.py | agustinhenze/mibs.snmplabs.com | 1fc5c07860542b89212f4c8ab807057d9a9206c7 | [
"Apache-2.0"
] | 10 | 2019-04-30T05:51:36.000Z | 2022-02-16T03:33:41.000Z | #
# PySNMP MIB module INFORMANT-PERF-CITRIX (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/INFORMANT-PERF-CITRIX
# Produced by pysmi-0.3.4 at Mon Apr 29 19:42:19 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
OctetString, Integer, ObjectIdentifier = mibBuilder.importSymbols("ASN1", "OctetString", "Integer", "ObjectIdentifier")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
ConstraintsIntersection, SingleValueConstraint, ValueSizeConstraint, ConstraintsUnion, ValueRangeConstraint = mibBuilder.importSymbols("ASN1-REFINEMENT", "ConstraintsIntersection", "SingleValueConstraint", "ValueSizeConstraint", "ConstraintsUnion", "ValueRangeConstraint")
ModuleCompliance, NotificationGroup = mibBuilder.importSymbols("SNMPv2-CONF", "ModuleCompliance", "NotificationGroup")
Gauge32, iso, Bits, Unsigned32, Integer32, IpAddress, ObjectIdentity, ModuleIdentity, Counter32, NotificationType, TimeTicks, MibScalar, MibTable, MibTableRow, MibTableColumn, Counter64, MibIdentifier = mibBuilder.importSymbols("SNMPv2-SMI", "Gauge32", "iso", "Bits", "Unsigned32", "Integer32", "IpAddress", "ObjectIdentity", "ModuleIdentity", "Counter32", "NotificationType", "TimeTicks", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "Counter64", "MibIdentifier")
DisplayString, TextualConvention = mibBuilder.importSymbols("SNMPv2-TC", "DisplayString", "TextualConvention")
InstanceName, informant = mibBuilder.importSymbols("WTCS", "InstanceName", "informant")
citrixPerformance = ModuleIdentity((1, 3, 6, 1, 4, 1, 9600, 1, 41))
citrixPerformance.setRevisions(('2008-06-13 22:54',))
if mibBuilder.loadTexts: citrixPerformance.setLastUpdated('200806132254Z')
if mibBuilder.loadTexts: citrixPerformance.setOrganization('Informant Systems, Inc.')
ctxCPUUtilizationMgmtUserTable = MibTable((1, 3, 6, 1, 4, 1, 9600, 1, 41, 1), )
if mibBuilder.loadTexts: ctxCPUUtilizationMgmtUserTable.setStatus('current')
ctxCPUUtilizationMgmtUserEntry = MibTableRow((1, 3, 6, 1, 4, 1, 9600, 1, 41, 1, 1), ).setIndexNames((0, "INFORMANT-PERF-CITRIX", "ctxcumuInstance"))
if mibBuilder.loadTexts: ctxCPUUtilizationMgmtUserEntry.setStatus('current')
ctxcumuInstance = MibTableColumn((1, 3, 6, 1, 4, 1, 9600, 1, 41, 1, 1, 1), InstanceName()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ctxcumuInstance.setStatus('current')
ctxcumuCPUEntitlement = MibTableColumn((1, 3, 6, 1, 4, 1, 9600, 1, 41, 1, 1, 2), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ctxcumuCPUEntitlement.setStatus('current')
ctxcumuCPUReservation = MibTableColumn((1, 3, 6, 1, 4, 1, 9600, 1, 41, 1, 1, 3), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ctxcumuCPUReservation.setStatus('current')
ctxcumuCPUShares = MibTableColumn((1, 3, 6, 1, 4, 1, 9600, 1, 41, 1, 1, 4), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ctxcumuCPUShares.setStatus('current')
ctxcumuCPUUsage = MibTableColumn((1, 3, 6, 1, 4, 1, 9600, 1, 41, 1, 1, 5), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ctxcumuCPUUsage.setStatus('current')
ctxcumuLongTermCPUUsage = MibTableColumn((1, 3, 6, 1, 4, 1, 9600, 1, 41, 1, 1, 6), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ctxcumuLongTermCPUUsage.setStatus('current')
ctxDataLayerTable = MibTable((1, 3, 6, 1, 4, 1, 9600, 1, 41, 2), )
if mibBuilder.loadTexts: ctxDataLayerTable.setStatus('current')
ctxDataLayerEntry = MibTableRow((1, 3, 6, 1, 4, 1, 9600, 1, 41, 2, 1), ).setIndexNames((0, "INFORMANT-PERF-CITRIX", "ctxdlInstance"))
if mibBuilder.loadTexts: ctxDataLayerEntry.setStatus('current')
ctxdlInstance = MibTableColumn((1, 3, 6, 1, 4, 1, 9600, 1, 41, 2, 1, 1), InstanceName()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ctxdlInstance.setStatus('current')
ctxdlCommitsPerSec = MibTableColumn((1, 3, 6, 1, 4, 1, 9600, 1, 41, 2, 1, 2), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ctxdlCommitsPerSec.setStatus('current')
ctxdlContextsPerSec = MibTableColumn((1, 3, 6, 1, 4, 1, 9600, 1, 41, 2, 1, 3), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ctxdlContextsPerSec.setStatus('current')
ctxdlDeletesPerSec = MibTableColumn((1, 3, 6, 1, 4, 1, 9600, 1, 41, 2, 1, 4), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ctxdlDeletesPerSec.setStatus('current')
ctxdlInsertsPerSec = MibTableColumn((1, 3, 6, 1, 4, 1, 9600, 1, 41, 2, 1, 5), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ctxdlInsertsPerSec.setStatus('current')
ctxdlNumberOfContextsInThePool = MibTableColumn((1, 3, 6, 1, 4, 1, 9600, 1, 41, 2, 1, 6), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ctxdlNumberOfContextsInThePool.setStatus('current')
ctxdlNumOfCntxtRequestsWaiting = MibTableColumn((1, 3, 6, 1, 4, 1, 9600, 1, 41, 2, 1, 7), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ctxdlNumOfCntxtRequestsWaiting.setStatus('current')
ctxdlReadStreamsCreatedPerSec = MibTableColumn((1, 3, 6, 1, 4, 1, 9600, 1, 41, 2, 1, 8), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ctxdlReadStreamsCreatedPerSec.setStatus('current')
ctxdlStreamBytesReadPerSec = MibTableColumn((1, 3, 6, 1, 4, 1, 9600, 1, 41, 2, 1, 9), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ctxdlStreamBytesReadPerSec.setStatus('current')
ctxdlStreamBytesWrittenPerSec = MibTableColumn((1, 3, 6, 1, 4, 1, 9600, 1, 41, 2, 1, 10), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ctxdlStreamBytesWrittenPerSec.setStatus('current')
ctxdlStreamsCreatedPerSec = MibTableColumn((1, 3, 6, 1, 4, 1, 9600, 1, 41, 2, 1, 11), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ctxdlStreamsCreatedPerSec.setStatus('current')
ctxdlUpdatesPerSec = MibTableColumn((1, 3, 6, 1, 4, 1, 9600, 1, 41, 2, 1, 12), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ctxdlUpdatesPerSec.setStatus('current')
ctxdlWriteStreamsCreatedPerSec = MibTableColumn((1, 3, 6, 1, 4, 1, 9600, 1, 41, 2, 1, 13), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ctxdlWriteStreamsCreatedPerSec.setStatus('current')
ctxIMANetworkingTable = MibTable((1, 3, 6, 1, 4, 1, 9600, 1, 41, 3), )
if mibBuilder.loadTexts: ctxIMANetworkingTable.setStatus('current')
ctxIMANetworkingEntry = MibTableRow((1, 3, 6, 1, 4, 1, 9600, 1, 41, 3, 1), ).setIndexNames((0, "INFORMANT-PERF-CITRIX", "ctximanInstance"))
if mibBuilder.loadTexts: ctxIMANetworkingEntry.setStatus('current')
ctximanInstance = MibTableColumn((1, 3, 6, 1, 4, 1, 9600, 1, 41, 3, 1, 1), InstanceName()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ctximanInstance.setStatus('current')
ctximanBytesReceivedPerSec = MibTableColumn((1, 3, 6, 1, 4, 1, 9600, 1, 41, 3, 1, 2), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ctximanBytesReceivedPerSec.setStatus('current')
ctximanBytesSentPerSec = MibTableColumn((1, 3, 6, 1, 4, 1, 9600, 1, 41, 3, 1, 3), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ctximanBytesSentPerSec.setStatus('current')
ctximanNetworkConnections = MibTableColumn((1, 3, 6, 1, 4, 1, 9600, 1, 41, 3, 1, 4), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ctximanNetworkConnections.setStatus('current')
citrixLicensing = MibIdentifier((1, 3, 6, 1, 4, 1, 9600, 1, 41, 4))
ctxlAverageCheckInRspTimeMs = MibScalar((1, 3, 6, 1, 4, 1, 9600, 1, 41, 4, 1), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ctxlAverageCheckInRspTimeMs.setStatus('current')
ctxlAverageCheckOutRspTimeMs = MibScalar((1, 3, 6, 1, 4, 1, 9600, 1, 41, 4, 2), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ctxlAverageCheckOutRspTimeMs.setStatus('current')
ctxlLastRecordCheckInRspTimeMs = MibScalar((1, 3, 6, 1, 4, 1, 9600, 1, 41, 4, 3), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ctxlLastRecordCheckInRspTimeMs.setStatus('current')
ctxlLastRecordCheckOutRspTimeMs = MibScalar((1, 3, 6, 1, 4, 1, 9600, 1, 41, 4, 4), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ctxlLastRecordCheckOutRspTimeMs.setStatus('current')
ctxlServerConnectionFailure = MibScalar((1, 3, 6, 1, 4, 1, 9600, 1, 41, 4, 5), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ctxlServerConnectionFailure.setStatus('current')
ctxlMaximumCheckInRspTimeMs = MibScalar((1, 3, 6, 1, 4, 1, 9600, 1, 41, 4, 6), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ctxlMaximumCheckInRspTimeMs.setStatus('current')
ctxlMaximumCheckOutRspTimeMs = MibScalar((1, 3, 6, 1, 4, 1, 9600, 1, 41, 4, 7), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ctxlMaximumCheckOutRspTimeMs.setStatus('current')
citrixMetaFramePresentationSvr = ObjectIdentity((1, 3, 6, 1, 4, 1, 9600, 1, 41, 5))
if mibBuilder.loadTexts: citrixMetaFramePresentationSvr.setStatus('current')
ctxmpsApplEnumerationsPerSec = MibScalar((1, 3, 6, 1, 4, 1, 9600, 1, 41, 5, 1), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ctxmpsApplEnumerationsPerSec.setStatus('current')
ctxmpsApplResolutionTimeMs = MibScalar((1, 3, 6, 1, 4, 1, 9600, 1, 41, 5, 2), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ctxmpsApplResolutionTimeMs.setStatus('current')
ctxmpsApplResolutionFailedPerSec = MibScalar((1, 3, 6, 1, 4, 1, 9600, 1, 41, 5, 3), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ctxmpsApplResolutionFailedPerSec.setStatus('current')
ctxmpsApplResolutionsPerSec = MibScalar((1, 3, 6, 1, 4, 1, 9600, 1, 41, 5, 4), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ctxmpsApplResolutionsPerSec.setStatus('current')
ctxmpsDataStoreConnectionFailure = MibScalar((1, 3, 6, 1, 4, 1, 9600, 1, 41, 5, 5), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ctxmpsDataStoreConnectionFailure.setStatus('current')
ctxmpsDataStoreBytesRead = MibScalar((1, 3, 6, 1, 4, 1, 9600, 1, 41, 5, 6), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ctxmpsDataStoreBytesRead.setStatus('current')
ctxmpsDataStoreBytesReadPerSec = MibScalar((1, 3, 6, 1, 4, 1, 9600, 1, 41, 5, 7), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ctxmpsDataStoreBytesReadPerSec.setStatus('current')
ctxmpsDataStoreBytesWritePerSec = MibScalar((1, 3, 6, 1, 4, 1, 9600, 1, 41, 5, 8), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ctxmpsDataStoreBytesWritePerSec.setStatus('current')
ctxmpsDataStoreReads = MibScalar((1, 3, 6, 1, 4, 1, 9600, 1, 41, 5, 9), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ctxmpsDataStoreReads.setStatus('current')
ctxmpsDataStoreReadsPerSec = MibScalar((1, 3, 6, 1, 4, 1, 9600, 1, 41, 5, 10), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ctxmpsDataStoreReadsPerSec.setStatus('current')
ctxmpsDataStoreWritesPerSec = MibScalar((1, 3, 6, 1, 4, 1, 9600, 1, 41, 5, 11), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ctxmpsDataStoreWritesPerSec.setStatus('current')
ctxmpsDSGatewayUpdateCount = MibScalar((1, 3, 6, 1, 4, 1, 9600, 1, 41, 5, 12), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ctxmpsDSGatewayUpdateCount.setStatus('current')
ctxmpsDSGatewayUpdateBytesSent = MibScalar((1, 3, 6, 1, 4, 1, 9600, 1, 41, 5, 13), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ctxmpsDSGatewayUpdateBytesSent.setStatus('current')
ctxmpsDSQueryCount = MibScalar((1, 3, 6, 1, 4, 1, 9600, 1, 41, 5, 14), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ctxmpsDSQueryCount.setStatus('current')
ctxmpsDSQueryRequestBytesReceive = MibScalar((1, 3, 6, 1, 4, 1, 9600, 1, 41, 5, 15), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ctxmpsDSQueryRequestBytesReceive.setStatus('current')
ctxmpsDSQueryResponseBytesSent = MibScalar((1, 3, 6, 1, 4, 1, 9600, 1, 41, 5, 16), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ctxmpsDSQueryResponseBytesSent.setStatus('current')
ctxmpsDSUpdateBytesReceived = MibScalar((1, 3, 6, 1, 4, 1, 9600, 1, 41, 5, 17), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ctxmpsDSUpdateBytesReceived.setStatus('current')
ctxmpsDSUpdatePacketsReceived = MibScalar((1, 3, 6, 1, 4, 1, 9600, 1, 41, 5, 18), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ctxmpsDSUpdatePacketsReceived.setStatus('current')
ctxmpsDSUpdateResponseBytesSent = MibScalar((1, 3, 6, 1, 4, 1, 9600, 1, 41, 5, 19), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ctxmpsDSUpdateResponseBytesSent.setStatus('current')
ctxmpsDSBytesReadPerSec = MibScalar((1, 3, 6, 1, 4, 1, 9600, 1, 41, 5, 20), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ctxmpsDSBytesReadPerSec.setStatus('current')
ctxmpsDSBytesWrittenPerSec = MibScalar((1, 3, 6, 1, 4, 1, 9600, 1, 41, 5, 21), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ctxmpsDSBytesWrittenPerSec.setStatus('current')
ctxmpsDSReadsPerSec = MibScalar((1, 3, 6, 1, 4, 1, 9600, 1, 41, 5, 22), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ctxmpsDSReadsPerSec.setStatus('current')
ctxmpsDSWritesPerSec = MibScalar((1, 3, 6, 1, 4, 1, 9600, 1, 41, 5, 23), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ctxmpsDSWritesPerSec.setStatus('current')
ctxmpsFilteredApplEnumsPerSec = MibScalar((1, 3, 6, 1, 4, 1, 9600, 1, 41, 5, 24), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ctxmpsFilteredApplEnumsPerSec.setStatus('current')
ctxmpsLCCacheBytesReadPerSec = MibScalar((1, 3, 6, 1, 4, 1, 9600, 1, 41, 5, 25), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ctxmpsLCCacheBytesReadPerSec.setStatus('current')
ctxmpsLCCacheBytesWrittenPerSec = MibScalar((1, 3, 6, 1, 4, 1, 9600, 1, 41, 5, 26), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ctxmpsLCCacheBytesWrittenPerSec.setStatus('current')
ctxmpsLCCacheReadsPerSec = MibScalar((1, 3, 6, 1, 4, 1, 9600, 1, 41, 5, 27), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ctxmpsLCCacheReadsPerSec.setStatus('current')
ctxmpsLCCacheWritesPerSec = MibScalar((1, 3, 6, 1, 4, 1, 9600, 1, 41, 5, 28), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ctxmpsLCCacheWritesPerSec.setStatus('current')
ctxmpsMaximumNumberOfXMLThreads = MibScalar((1, 3, 6, 1, 4, 1, 9600, 1, 41, 5, 29), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ctxmpsMaximumNumberOfXMLThreads.setStatus('current')
ctxmpsNumberOfXMLThreads = MibScalar((1, 3, 6, 1, 4, 1, 9600, 1, 41, 5, 30), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ctxmpsNumberOfXMLThreads.setStatus('current')
ctxmpsNumberOfBusyXMLThreads = MibScalar((1, 3, 6, 1, 4, 1, 9600, 1, 41, 5, 31), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ctxmpsNumberOfBusyXMLThreads.setStatus('current')
ctxmpsResWorkItemQueueExecuteCnt = MibScalar((1, 3, 6, 1, 4, 1, 9600, 1, 41, 5, 32), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ctxmpsResWorkItemQueueExecuteCnt.setStatus('current')
ctxmpsResWorkItemQueueReadyCount = MibScalar((1, 3, 6, 1, 4, 1, 9600, 1, 41, 5, 33), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ctxmpsResWorkItemQueueReadyCount.setStatus('current')
ctxmpsWorkItemQueueExecuteCount = MibScalar((1, 3, 6, 1, 4, 1, 9600, 1, 41, 5, 34), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ctxmpsWorkItemQueueExecuteCount.setStatus('current')
ctxmpsWorkItemQueuePendingCount = MibScalar((1, 3, 6, 1, 4, 1, 9600, 1, 41, 5, 35), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ctxmpsWorkItemQueuePendingCount.setStatus('current')
ctxmpsWorkItemQueueReadyCount = MibScalar((1, 3, 6, 1, 4, 1, 9600, 1, 41, 5, 36), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ctxmpsWorkItemQueueReadyCount.setStatus('current')
ctxmpsZoneElections = MibScalar((1, 3, 6, 1, 4, 1, 9600, 1, 41, 5, 37), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ctxmpsZoneElections.setStatus('current')
ctxmpsZoneElectionsWon = MibScalar((1, 3, 6, 1, 4, 1, 9600, 1, 41, 5, 38), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ctxmpsZoneElectionsWon.setStatus('current')
ctxSmartAuditorAgent = MibIdentifier((1, 3, 6, 1, 4, 1, 9600, 1, 41, 6))
ctxsaaActiveRecordingCount = MibScalar((1, 3, 6, 1, 4, 1, 9600, 1, 41, 6, 1), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ctxsaaActiveRecordingCount.setStatus('current')
ctxsaaReadSmartAuditorDriverSec = MibScalar((1, 3, 6, 1, 4, 1, 9600, 1, 41, 6, 2), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ctxsaaReadSmartAuditorDriverSec.setStatus('current')
ctxSmartAuditorStorageManager = MibIdentifier((1, 3, 6, 1, 4, 1, 9600, 1, 41, 7))
ctxsasmActiveRecordingCount = MibScalar((1, 3, 6, 1, 4, 1, 9600, 1, 41, 7, 1), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ctxsasmActiveRecordingCount.setStatus('current')
ctxsasmMessageBytesPerSec = MibScalar((1, 3, 6, 1, 4, 1, 9600, 1, 41, 7, 2), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ctxsasmMessageBytesPerSec.setStatus('current')
ctxsasmMessagesPerSec = MibScalar((1, 3, 6, 1, 4, 1, 9600, 1, 41, 7, 3), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ctxsasmMessagesPerSec.setStatus('current')
ctxDesktopBrokerDatabaseService = MibIdentifier((1, 3, 6, 1, 4, 1, 9600, 1, 41, 8))
ctxdbdsHostedDesktopReleasesSec = MibScalar((1, 3, 6, 1, 4, 1, 9600, 1, 41, 8, 1), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ctxdbdsHostedDesktopReleasesSec.setStatus('current')
ctxdbdsHostedDesktopRequestsSec = MibScalar((1, 3, 6, 1, 4, 1, 9600, 1, 41, 8, 2), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ctxdbdsHostedDesktopRequestsSec.setStatus('current')
ctxdbdsHostedDesktopStateUpdSec = MibScalar((1, 3, 6, 1, 4, 1, 9600, 1, 41, 8, 3), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ctxdbdsHostedDesktopStateUpdSec.setStatus('current')
ctxICASessionTable = MibTable((1, 3, 6, 1, 4, 1, 9600, 1, 41, 9), )
if mibBuilder.loadTexts: ctxICASessionTable.setStatus('current')
ctxICASessionEntry = MibTableRow((1, 3, 6, 1, 4, 1, 9600, 1, 41, 9, 1), ).setIndexNames((0, "INFORMANT-PERF-CITRIX", "ctxisInstance"))
if mibBuilder.loadTexts: ctxICASessionEntry.setStatus('current')
ctxisInstance = MibTableColumn((1, 3, 6, 1, 4, 1, 9600, 1, 41, 9, 1, 1), InstanceName()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ctxisInstance.setStatus('current')
ctxisInputAudioBandwidth = MibTableColumn((1, 3, 6, 1, 4, 1, 9600, 1, 41, 9, 1, 2), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ctxisInputAudioBandwidth.setStatus('current')
ctxisInputCOM1Bandwidth = MibTableColumn((1, 3, 6, 1, 4, 1, 9600, 1, 41, 9, 1, 3), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ctxisInputCOM1Bandwidth.setStatus('current')
ctxisInputCOM2Bandwidth = MibTableColumn((1, 3, 6, 1, 4, 1, 9600, 1, 41, 9, 1, 4), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ctxisInputCOM2Bandwidth.setStatus('current')
ctxisInputCOMBandwidth = MibTableColumn((1, 3, 6, 1, 4, 1, 9600, 1, 41, 9, 1, 5), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ctxisInputCOMBandwidth.setStatus('current')
ctxisInputClipboardBandwidt = MibTableColumn((1, 3, 6, 1, 4, 1, 9600, 1, 41, 9, 1, 6), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ctxisInputClipboardBandwidt.setStatus('current')
ctxisInputControlChanBandwidth = MibTableColumn((1, 3, 6, 1, 4, 1, 9600, 1, 41, 9, 1, 7), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ctxisInputControlChanBandwidth.setStatus('current')
ctxisInputDriveBandwidth = MibTableColumn((1, 3, 6, 1, 4, 1, 9600, 1, 41, 9, 1, 8), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ctxisInputDriveBandwidth.setStatus('current')
ctxisInputFontDataBandwidth = MibTableColumn((1, 3, 6, 1, 4, 1, 9600, 1, 41, 9, 1, 9), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ctxisInputFontDataBandwidth.setStatus('current')
ctxisInputLPT1Bandwidth = MibTableColumn((1, 3, 6, 1, 4, 1, 9600, 1, 41, 9, 1, 10), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ctxisInputLPT1Bandwidth.setStatus('current')
ctxisInputLPT2Bandwidth = MibTableColumn((1, 3, 6, 1, 4, 1, 9600, 1, 41, 9, 1, 11), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ctxisInputLPT2Bandwidth.setStatus('current')
ctxisInputLicensingBandwidth = MibTableColumn((1, 3, 6, 1, 4, 1, 9600, 1, 41, 9, 1, 12), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ctxisInputLicensingBandwidth.setStatus('current')
ctxisInputManagementBandwidth = MibTableColumn((1, 3, 6, 1, 4, 1, 9600, 1, 41, 9, 1, 13), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ctxisInputManagementBandwidth.setStatus('current')
ctxisInputPNBandwidth = MibTableColumn((1, 3, 6, 1, 4, 1, 9600, 1, 41, 9, 1, 14), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ctxisInputPNBandwidth.setStatus('current')
ctxisInputPrinterBandwidth = MibTableColumn((1, 3, 6, 1, 4, 1, 9600, 1, 41, 9, 1, 15), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ctxisInputPrinterBandwidth.setStatus('current')
ctxisInputSeamlessBandwidth = MibTableColumn((1, 3, 6, 1, 4, 1, 9600, 1, 41, 9, 1, 16), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ctxisInputSeamlessBandwidth.setStatus('current')
ctxisInputSessionBandwidth = MibTableColumn((1, 3, 6, 1, 4, 1, 9600, 1, 41, 9, 1, 17), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ctxisInputSessionBandwidth.setStatus('current')
ctxisInputSessionCompression = MibTableColumn((1, 3, 6, 1, 4, 1, 9600, 1, 41, 9, 1, 18), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ctxisInputSessionCompression.setStatus('current')
ctxisInputSessionLineSpeed = MibTableColumn((1, 3, 6, 1, 4, 1, 9600, 1, 41, 9, 1, 19), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ctxisInputSessionLineSpeed.setStatus('current')
ctxisInputSpeedScreenDataChanBW = MibTableColumn((1, 3, 6, 1, 4, 1, 9600, 1, 41, 9, 1, 20), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ctxisInputSpeedScreenDataChanBW.setStatus('current')
ctxisInputTextEchoBandwidth = MibTableColumn((1, 3, 6, 1, 4, 1, 9600, 1, 41, 9, 1, 21), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ctxisInputTextEchoBandwidth.setStatus('current')
ctxisInputThinWireBandwidth = MibTableColumn((1, 3, 6, 1, 4, 1, 9600, 1, 41, 9, 1, 22), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ctxisInputThinWireBandwidth.setStatus('current')
ctxisInputVideoFrameBandwidth = MibTableColumn((1, 3, 6, 1, 4, 1, 9600, 1, 41, 9, 1, 23), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ctxisInputVideoFrameBandwidth.setStatus('current')
ctxisLatencyLastRecorded = MibTableColumn((1, 3, 6, 1, 4, 1, 9600, 1, 41, 9, 1, 24), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ctxisLatencyLastRecorded.setStatus('current')
ctxisLatencySessionAverage = MibTableColumn((1, 3, 6, 1, 4, 1, 9600, 1, 41, 9, 1, 25), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ctxisLatencySessionAverage.setStatus('current')
ctxisLatencySessionDeviation = MibTableColumn((1, 3, 6, 1, 4, 1, 9600, 1, 41, 9, 1, 26), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ctxisLatencySessionDeviation.setStatus('current')
ctxisOutputAudioBandwidth = MibTableColumn((1, 3, 6, 1, 4, 1, 9600, 1, 41, 9, 1, 27), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ctxisOutputAudioBandwidth.setStatus('current')
ctxisOutputCOM1Bandwidth = MibTableColumn((1, 3, 6, 1, 4, 1, 9600, 1, 41, 9, 1, 28), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ctxisOutputCOM1Bandwidth.setStatus('current')
ctxisOutputCOM2Bandwidth = MibTableColumn((1, 3, 6, 1, 4, 1, 9600, 1, 41, 9, 1, 29), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ctxisOutputCOM2Bandwidth.setStatus('current')
ctxisOutputCOMBandwidth = MibTableColumn((1, 3, 6, 1, 4, 1, 9600, 1, 41, 9, 1, 30), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ctxisOutputCOMBandwidth.setStatus('current')
ctxisOutputClipboardBandwidth = MibTableColumn((1, 3, 6, 1, 4, 1, 9600, 1, 41, 9, 1, 31), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ctxisOutputClipboardBandwidth.setStatus('current')
ctxisOutputControlChannBandwidth = MibTableColumn((1, 3, 6, 1, 4, 1, 9600, 1, 41, 9, 1, 32), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ctxisOutputControlChannBandwidth.setStatus('current')
ctxisOutputDriveBandwidth = MibTableColumn((1, 3, 6, 1, 4, 1, 9600, 1, 41, 9, 1, 33), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ctxisOutputDriveBandwidth.setStatus('current')
ctxisOutputFontDataBandwidth = MibTableColumn((1, 3, 6, 1, 4, 1, 9600, 1, 41, 9, 1, 34), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ctxisOutputFontDataBandwidth.setStatus('current')
ctxisOutputLPT1Bandwidth = MibTableColumn((1, 3, 6, 1, 4, 1, 9600, 1, 41, 9, 1, 35), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ctxisOutputLPT1Bandwidth.setStatus('current')
ctxisOutputLPT2Bandwidth = MibTableColumn((1, 3, 6, 1, 4, 1, 9600, 1, 41, 9, 1, 36), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ctxisOutputLPT2Bandwidth.setStatus('current')
ctxisOutputLicensingBandwidth = MibTableColumn((1, 3, 6, 1, 4, 1, 9600, 1, 41, 9, 1, 37), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ctxisOutputLicensingBandwidth.setStatus('current')
ctxisOutputManagementBandwidth = MibTableColumn((1, 3, 6, 1, 4, 1, 9600, 1, 41, 9, 1, 38), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ctxisOutputManagementBandwidth.setStatus('current')
ctxisOutputPNBandwidth = MibTableColumn((1, 3, 6, 1, 4, 1, 9600, 1, 41, 9, 1, 39), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ctxisOutputPNBandwidth.setStatus('current')
ctxisOutputPrinterBandwidth = MibTableColumn((1, 3, 6, 1, 4, 1, 9600, 1, 41, 9, 1, 40), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ctxisOutputPrinterBandwidth.setStatus('current')
ctxisOutputSeamlessBandwidth = MibTableColumn((1, 3, 6, 1, 4, 1, 9600, 1, 41, 9, 1, 41), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ctxisOutputSeamlessBandwidth.setStatus('current')
ctxisOutputSessionBandwidth = MibTableColumn((1, 3, 6, 1, 4, 1, 9600, 1, 41, 9, 1, 42), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ctxisOutputSessionBandwidth.setStatus('current')
ctxisOutputSessionCompression = MibTableColumn((1, 3, 6, 1, 4, 1, 9600, 1, 41, 9, 1, 43), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ctxisOutputSessionCompression.setStatus('current')
ctxisOutputSessionLineSpeed = MibTableColumn((1, 3, 6, 1, 4, 1, 9600, 1, 41, 9, 1, 44), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ctxisOutputSessionLineSpeed.setStatus('current')
ctxisOutputSpeedScreenDataChanBW = MibTableColumn((1, 3, 6, 1, 4, 1, 9600, 1, 41, 9, 1, 45), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ctxisOutputSpeedScreenDataChanBW.setStatus('current')
ctxisOutputTextEchoBandwidth = MibTableColumn((1, 3, 6, 1, 4, 1, 9600, 1, 41, 9, 1, 46), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ctxisOutputTextEchoBandwidth.setStatus('current')
ctxisOutputThinWireBandwidth = MibTableColumn((1, 3, 6, 1, 4, 1, 9600, 1, 41, 9, 1, 47), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ctxisOutputThinWireBandwidth.setStatus('current')
ctxisOutputVideoFrameBandwidth = MibTableColumn((1, 3, 6, 1, 4, 1, 9600, 1, 41, 9, 1, 48), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ctxisOutputVideoFrameBandwidth.setStatus('current')
ctxSecureTicketAuthority = ObjectIdentity((1, 3, 6, 1, 4, 1, 9600, 1, 41, 10))
if mibBuilder.loadTexts: ctxSecureTicketAuthority.setStatus('current')
ctxstaSTABadDataRequestCount = MibScalar((1, 3, 6, 1, 4, 1, 9600, 1, 41, 10, 1), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ctxstaSTABadDataRequestCount.setStatus('current')
ctxstaSTABadRefreshRequestCount = MibScalar((1, 3, 6, 1, 4, 1, 9600, 1, 41, 10, 2), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ctxstaSTABadRefreshRequestCount.setStatus('current')
ctxstaSTABadTicketRequestCount = MibScalar((1, 3, 6, 1, 4, 1, 9600, 1, 41, 10, 3), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ctxstaSTABadTicketRequestCount.setStatus('current')
ctxstaSTACountOfActiveTickets = MibScalar((1, 3, 6, 1, 4, 1, 9600, 1, 41, 10, 4), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ctxstaSTACountOfActiveTickets.setStatus('current')
ctxstaSTAGoodDataRequestCount = MibScalar((1, 3, 6, 1, 4, 1, 9600, 1, 41, 10, 5), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ctxstaSTAGoodDataRequestCount.setStatus('current')
ctxstaSTAGoodRefreshRequestCount = MibScalar((1, 3, 6, 1, 4, 1, 9600, 1, 41, 10, 6), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ctxstaSTAGoodRefreshRequestCount.setStatus('current')
ctxstaSTAGoodTicketRequestCount = MibScalar((1, 3, 6, 1, 4, 1, 9600, 1, 41, 10, 7), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ctxstaSTAGoodTicketRequestCount.setStatus('current')
ctxstaSTAPeakAllRequestRate = MibScalar((1, 3, 6, 1, 4, 1, 9600, 1, 41, 10, 8), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ctxstaSTAPeakAllRequestRate.setStatus('current')
ctxstaSTAPeakDataRequestRate = MibScalar((1, 3, 6, 1, 4, 1, 9600, 1, 41, 10, 9), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ctxstaSTAPeakDataRequestRate.setStatus('current')
ctxstaSTAPeakTicketRefreshRate = MibScalar((1, 3, 6, 1, 4, 1, 9600, 1, 41, 10, 10), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ctxstaSTAPeakTicketRefreshRate.setStatus('current')
ctxstaSTAPeakTicketRequestRate = MibScalar((1, 3, 6, 1, 4, 1, 9600, 1, 41, 10, 11), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ctxstaSTAPeakTicketRequestRate.setStatus('current')
ctxstaSTATicketTimeoutCount = MibScalar((1, 3, 6, 1, 4, 1, 9600, 1, 41, 10, 12), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ctxstaSTATicketTimeoutCount.setStatus('current')
mibBuilder.exportSymbols("INFORMANT-PERF-CITRIX", ctxmpsDSQueryResponseBytesSent=ctxmpsDSQueryResponseBytesSent, ctxisLatencySessionAverage=ctxisLatencySessionAverage, ctxlLastRecordCheckInRspTimeMs=ctxlLastRecordCheckInRspTimeMs, ctxisInputPNBandwidth=ctxisInputPNBandwidth, citrixMetaFramePresentationSvr=citrixMetaFramePresentationSvr, ctxcumuCPUUsage=ctxcumuCPUUsage, ctxstaSTAGoodDataRequestCount=ctxstaSTAGoodDataRequestCount, ctxstaSTAPeakDataRequestRate=ctxstaSTAPeakDataRequestRate, ctxmpsDataStoreBytesReadPerSec=ctxmpsDataStoreBytesReadPerSec, ctxisOutputThinWireBandwidth=ctxisOutputThinWireBandwidth, ctxdlNumberOfContextsInThePool=ctxdlNumberOfContextsInThePool, ctxcumuCPUShares=ctxcumuCPUShares, ctxmpsZoneElectionsWon=ctxmpsZoneElectionsWon, ctxisInputDriveBandwidth=ctxisInputDriveBandwidth, ctxisInputSessionLineSpeed=ctxisInputSessionLineSpeed, ctxICASessionTable=ctxICASessionTable, ctxdlReadStreamsCreatedPerSec=ctxdlReadStreamsCreatedPerSec, ctxstaSTABadRefreshRequestCount=ctxstaSTABadRefreshRequestCount, ctxisInputAudioBandwidth=ctxisInputAudioBandwidth, ctxCPUUtilizationMgmtUserEntry=ctxCPUUtilizationMgmtUserEntry, ctxmpsDSGatewayUpdateCount=ctxmpsDSGatewayUpdateCount, ctxstaSTAGoodTicketRequestCount=ctxstaSTAGoodTicketRequestCount, ctxdlDeletesPerSec=ctxdlDeletesPerSec, ctxmpsWorkItemQueueReadyCount=ctxmpsWorkItemQueueReadyCount, ctxDesktopBrokerDatabaseService=ctxDesktopBrokerDatabaseService, citrixLicensing=citrixLicensing, ctxisInputManagementBandwidth=ctxisInputManagementBandwidth, ctximanNetworkConnections=ctximanNetworkConnections, ctxSecureTicketAuthority=ctxSecureTicketAuthority, ctxstaSTABadDataRequestCount=ctxstaSTABadDataRequestCount, ctxmpsApplResolutionsPerSec=ctxmpsApplResolutionsPerSec, ctxmpsDSGatewayUpdateBytesSent=ctxmpsDSGatewayUpdateBytesSent, ctxSmartAuditorAgent=ctxSmartAuditorAgent, ctxisInputSeamlessBandwidth=ctxisInputSeamlessBandwidth, ctxisLatencySessionDeviation=ctxisLatencySessionDeviation, ctxisInputTextEchoBandwidth=ctxisInputTextEchoBandwidth, ctxisInstance=ctxisInstance, ctxisOutputSpeedScreenDataChanBW=ctxisOutputSpeedScreenDataChanBW, ctxisInputCOM1Bandwidth=ctxisInputCOM1Bandwidth, ctxdlInsertsPerSec=ctxdlInsertsPerSec, ctxlServerConnectionFailure=ctxlServerConnectionFailure, ctxSmartAuditorStorageManager=ctxSmartAuditorStorageManager, ctxdlCommitsPerSec=ctxdlCommitsPerSec, ctxstaSTAPeakTicketRefreshRate=ctxstaSTAPeakTicketRefreshRate, ctxdlNumOfCntxtRequestsWaiting=ctxdlNumOfCntxtRequestsWaiting, ctxmpsDSReadsPerSec=ctxmpsDSReadsPerSec, ctxmpsNumberOfXMLThreads=ctxmpsNumberOfXMLThreads, ctxsaaActiveRecordingCount=ctxsaaActiveRecordingCount, ctxisInputControlChanBandwidth=ctxisInputControlChanBandwidth, ctxmpsDataStoreBytesRead=ctxmpsDataStoreBytesRead, ctxisInputSessionBandwidth=ctxisInputSessionBandwidth, ctxmpsMaximumNumberOfXMLThreads=ctxmpsMaximumNumberOfXMLThreads, ctxDataLayerEntry=ctxDataLayerEntry, ctxisOutputCOM1Bandwidth=ctxisOutputCOM1Bandwidth, ctxdbdsHostedDesktopReleasesSec=ctxdbdsHostedDesktopReleasesSec, ctxisInputSpeedScreenDataChanBW=ctxisInputSpeedScreenDataChanBW, ctxisOutputCOMBandwidth=ctxisOutputCOMBandwidth, ctxisInputLicensingBandwidth=ctxisInputLicensingBandwidth, ctximanBytesReceivedPerSec=ctximanBytesReceivedPerSec, ctxmpsDSBytesReadPerSec=ctxmpsDSBytesReadPerSec, ctxdlStreamsCreatedPerSec=ctxdlStreamsCreatedPerSec, ctxstaSTAPeakTicketRequestRate=ctxstaSTAPeakTicketRequestRate, ctxsasmMessageBytesPerSec=ctxsasmMessageBytesPerSec, ctxmpsLCCacheReadsPerSec=ctxmpsLCCacheReadsPerSec, ctxmpsDataStoreConnectionFailure=ctxmpsDataStoreConnectionFailure, ctxcumuInstance=ctxcumuInstance, ctxmpsZoneElections=ctxmpsZoneElections, ctxmpsDSUpdatePacketsReceived=ctxmpsDSUpdatePacketsReceived, ctxisInputLPT1Bandwidth=ctxisInputLPT1Bandwidth, ctxisOutputControlChannBandwidth=ctxisOutputControlChannBandwidth, ctxmpsFilteredApplEnumsPerSec=ctxmpsFilteredApplEnumsPerSec, citrixPerformance=citrixPerformance, ctximanBytesSentPerSec=ctximanBytesSentPerSec, ctxDataLayerTable=ctxDataLayerTable, ctxlMaximumCheckOutRspTimeMs=ctxlMaximumCheckOutRspTimeMs, ctxmpsDataStoreReadsPerSec=ctxmpsDataStoreReadsPerSec, ctxisOutputSeamlessBandwidth=ctxisOutputSeamlessBandwidth, ctxcumuCPUReservation=ctxcumuCPUReservation, ctxisInputFontDataBandwidth=ctxisInputFontDataBandwidth, ctxisInputVideoFrameBandwidth=ctxisInputVideoFrameBandwidth, ctxmpsDataStoreReads=ctxmpsDataStoreReads, ctxisInputThinWireBandwidth=ctxisInputThinWireBandwidth, ctxICASessionEntry=ctxICASessionEntry, ctxisOutputLPT2Bandwidth=ctxisOutputLPT2Bandwidth, PYSNMP_MODULE_ID=citrixPerformance, ctxcumuCPUEntitlement=ctxcumuCPUEntitlement, ctxlAverageCheckOutRspTimeMs=ctxlAverageCheckOutRspTimeMs, ctxmpsDSUpdateResponseBytesSent=ctxmpsDSUpdateResponseBytesSent, ctxmpsDataStoreWritesPerSec=ctxmpsDataStoreWritesPerSec, ctxstaSTATicketTimeoutCount=ctxstaSTATicketTimeoutCount, ctxdbdsHostedDesktopRequestsSec=ctxdbdsHostedDesktopRequestsSec, ctxisOutputFontDataBandwidth=ctxisOutputFontDataBandwidth, ctxisOutputLPT1Bandwidth=ctxisOutputLPT1Bandwidth, ctxmpsDataStoreBytesWritePerSec=ctxmpsDataStoreBytesWritePerSec, ctxmpsDSQueryCount=ctxmpsDSQueryCount, ctxisOutputPNBandwidth=ctxisOutputPNBandwidth, ctxdlStreamBytesReadPerSec=ctxdlStreamBytesReadPerSec, ctxdlStreamBytesWrittenPerSec=ctxdlStreamBytesWrittenPerSec, ctxlLastRecordCheckOutRspTimeMs=ctxlLastRecordCheckOutRspTimeMs, ctxdbdsHostedDesktopStateUpdSec=ctxdbdsHostedDesktopStateUpdSec, ctxlMaximumCheckInRspTimeMs=ctxlMaximumCheckInRspTimeMs, ctxisOutputAudioBandwidth=ctxisOutputAudioBandwidth, ctxmpsApplEnumerationsPerSec=ctxmpsApplEnumerationsPerSec, ctxisOutputPrinterBandwidth=ctxisOutputPrinterBandwidth, ctxisInputClipboardBandwidt=ctxisInputClipboardBandwidt, ctxisInputPrinterBandwidth=ctxisInputPrinterBandwidth, ctxmpsLCCacheBytesWrittenPerSec=ctxmpsLCCacheBytesWrittenPerSec, ctxdlWriteStreamsCreatedPerSec=ctxdlWriteStreamsCreatedPerSec, ctxlAverageCheckInRspTimeMs=ctxlAverageCheckInRspTimeMs, ctxmpsDSUpdateBytesReceived=ctxmpsDSUpdateBytesReceived, ctxisLatencyLastRecorded=ctxisLatencyLastRecorded, ctxcumuLongTermCPUUsage=ctxcumuLongTermCPUUsage, ctxisOutputTextEchoBandwidth=ctxisOutputTextEchoBandwidth, ctxisOutputVideoFrameBandwidth=ctxisOutputVideoFrameBandwidth, ctxIMANetworkingEntry=ctxIMANetworkingEntry, ctxstaSTAPeakAllRequestRate=ctxstaSTAPeakAllRequestRate, ctxisInputLPT2Bandwidth=ctxisInputLPT2Bandwidth, ctxmpsDSQueryRequestBytesReceive=ctxmpsDSQueryRequestBytesReceive, ctxisOutputClipboardBandwidth=ctxisOutputClipboardBandwidth, ctxdlInstance=ctxdlInstance, ctxisInputCOMBandwidth=ctxisInputCOMBandwidth, ctxIMANetworkingTable=ctxIMANetworkingTable, ctxisOutputSessionCompression=ctxisOutputSessionCompression, ctxisOutputCOM2Bandwidth=ctxisOutputCOM2Bandwidth, ctxisOutputManagementBandwidth=ctxisOutputManagementBandwidth, ctxmpsApplResolutionTimeMs=ctxmpsApplResolutionTimeMs, ctxmpsNumberOfBusyXMLThreads=ctxmpsNumberOfBusyXMLThreads, ctxisInputCOM2Bandwidth=ctxisInputCOM2Bandwidth, ctxmpsDSWritesPerSec=ctxmpsDSWritesPerSec, ctxstaSTACountOfActiveTickets=ctxstaSTACountOfActiveTickets, ctxsasmActiveRecordingCount=ctxsasmActiveRecordingCount, ctxisOutputDriveBandwidth=ctxisOutputDriveBandwidth, ctxmpsApplResolutionFailedPerSec=ctxmpsApplResolutionFailedPerSec, ctxstaSTAGoodRefreshRequestCount=ctxstaSTAGoodRefreshRequestCount, ctxdlContextsPerSec=ctxdlContextsPerSec, ctxsaaReadSmartAuditorDriverSec=ctxsaaReadSmartAuditorDriverSec, ctxisInputSessionCompression=ctxisInputSessionCompression, ctximanInstance=ctximanInstance, ctxmpsResWorkItemQueueReadyCount=ctxmpsResWorkItemQueueReadyCount, ctxsasmMessagesPerSec=ctxsasmMessagesPerSec, ctxmpsLCCacheWritesPerSec=ctxmpsLCCacheWritesPerSec, ctxisOutputLicensingBandwidth=ctxisOutputLicensingBandwidth, ctxmpsLCCacheBytesReadPerSec=ctxmpsLCCacheBytesReadPerSec, ctxisOutputSessionLineSpeed=ctxisOutputSessionLineSpeed, ctxmpsWorkItemQueueExecuteCount=ctxmpsWorkItemQueueExecuteCount, ctxmpsDSBytesWrittenPerSec=ctxmpsDSBytesWrittenPerSec, ctxmpsWorkItemQueuePendingCount=ctxmpsWorkItemQueuePendingCount, ctxstaSTABadTicketRequestCount=ctxstaSTABadTicketRequestCount, ctxCPUUtilizationMgmtUserTable=ctxCPUUtilizationMgmtUserTable, ctxdlUpdatesPerSec=ctxdlUpdatesPerSec, ctxmpsResWorkItemQueueExecuteCnt=ctxmpsResWorkItemQueueExecuteCnt, ctxisOutputSessionBandwidth=ctxisOutputSessionBandwidth)
| 121.541139 | 8,338 | 0.793137 |
3cd7b51a403639c5b90863b67cc95efd4dd1f094 | 5,269 | py | Python | activitysim/examples/example_multiple_zone/scripts/two_zone_example_data.py | mxndrwgrdnr/activitysim | 722d6e36b2210d5d24dfa2ac4a3504c1e8f75336 | [
"BSD-3-Clause"
] | 85 | 2018-02-16T15:08:13.000Z | 2022-03-23T15:08:08.000Z | activitysim/examples/example_multiple_zone/scripts/two_zone_example_data.py | mxndrwgrdnr/activitysim | 722d6e36b2210d5d24dfa2ac4a3504c1e8f75336 | [
"BSD-3-Clause"
] | 311 | 2018-01-16T01:59:47.000Z | 2022-03-29T00:46:40.000Z | activitysim/examples/example_multiple_zone/scripts/two_zone_example_data.py | mxndrwgrdnr/activitysim | 722d6e36b2210d5d24dfa2ac4a3504c1e8f75336 | [
"BSD-3-Clause"
] | 63 | 2018-02-05T15:27:51.000Z | 2022-03-04T20:36:33.000Z | # Creating the Two Zone Example Data
#
# Transform the TM1 TAZ-based model 25 zone inputs to a two-zone (MAZ and TAZ) set of inputs for software development.
#
# The 25 zones are downtown San Francisco and they are converted to 25 MAZs.
# MAZs 1,2,3,4 are small and adjacent and assigned TAZ 2 and TAP 10002.
# MAZs 13,14,15 are small and adjacent and as signed TAZ 14 and TAP 10014.
# TAZs 1,3,4,13,15 are removed from the final data set.
#
# This script should work for the full TM1 example as well.
import os
import sys
import pandas as pd
import numpy as np
import openmatrix as omx
# Create example directory
input_data = os.path.join(os.path.dirname(__file__), '../data_1')
output_data = os.path.join(os.path.dirname(__file__), '../data_2')
MAZ_MULTIPLIER = 1000
# ### initialize output data directory
# new empty output_dir
if os.path.exists(output_data):
# shutil.rmtree(output_data)
# os.makedirs(output_data)
file_type = ('csv', 'omx')
for file_name in os.listdir(output_data):
if file_name.endswith(file_type):
os.unlink(os.path.join(output_data, file_name))
else:
os.makedirs(output_data)
# ### Convert tazs to mazs and add transit access distance by mode
land_use = pd.read_csv(os.path.join(input_data, 'land_use.csv'))
if 'ZONE' in land_use.columns:
land_use.insert(loc=0, column='MAZ', value=land_use.ZONE)
land_use.insert(loc=1, column='TAZ', value=land_use.ZONE)
land_use.drop(columns=['ZONE'], inplace=True)
else:
land_use.insert(loc=0, column='MAZ', value=land_use.TAZ)
land_use.TAZ = land_use.TAZ.replace([1, 2, 3, 4], 2)
land_use.TAZ = land_use.TAZ.replace([13, 14, 15], 14)
# make MAZ indexes different from TAZ to drive MAZ/TAZ confusion errors and omisisons
land_use.MAZ *= MAZ_MULTIPLIER
shortWalk = 0.333 # the tm1 example assumes this distance for transit access
longWalk = 0.667
land_use['access_dist_transit'] = shortWalk
# FIXME - could assign longWalk where maz != taz, but then results wodl differe from one-zone
# land_use['access_dist_transit'] =\
# np.where(land_use.TAZ*MAZ_MULTIPLIER==land_use.MAZ, shortWalk, longWalk)
land_use.to_csv(os.path.join(output_data, 'land_use.csv'), index=False)
# ### Put households in mazs instead of tazs
households = pd.read_csv(os.path.join(input_data, 'households.csv'))
households.rename(columns={'TAZ': 'MAZ'}, inplace=True)
households.MAZ *= MAZ_MULTIPLIER
households.to_csv(os.path.join(output_data, 'households.csv'), index=False)
persons = pd.read_csv(os.path.join(input_data, 'persons.csv'))
persons.to_csv(os.path.join(output_data, 'persons.csv'), index=False)
# ### Create maz correspondence file
# FIXME - not clear we need this
maz_df = land_use[['MAZ', 'TAZ']]
maz_df.to_csv(os.path.join(output_data, 'maz.csv'), index=False)
print("maz.csv\n%s" % (maz_df.head(6), ))
# ### Create taz file
# TAZ
# 2
# 5
# 6
# 7
new_zone_labels = np.unique(land_use.TAZ)
new_zone_indexes = (new_zone_labels-1)
taz_df = pd.DataFrame({'TAZ': new_zone_labels}, index=new_zone_indexes)
taz_df.to_csv(os.path.join(output_data, 'taz.csv'), index=False)
print("taz.csv\n%s" % (taz_df.head(6), ))
# currently this has only the one TAZ column, but the legacy table had:
# index TAZ
# offset int64
# terminal_time float64 # occasional small integer (1-5), but mostly blank (only if it has a TAP?
# ptype float64 # parking type at TAP? (rarer than terminal_time, never alone)
# ### Create taz skims
with omx.open_file(os.path.join(input_data, 'skims.omx'), 'r') as skims_file, \
omx.open_file(os.path.join(output_data, 'taz_skims.omx'), "w") as output_skims_file:
skims = skims_file.list_matrices()
num_zones = skims_file.shape()[0]
# assume zones labels were 1-based in skims file
assert not skims_file.listMappings()
assert num_zones == len(land_use)
for skim_name in skims_file.list_matrices():
old_skim = skims_file[skim_name][:]
new_skim = old_skim[new_zone_indexes, :][:, new_zone_indexes]
output_skims_file[skim_name] = new_skim
# print("skim:", skim_name, ": shape", str(new_skim.shape))
output_skims_file.create_mapping("taz", new_zone_labels)
print("taz skims created: " + os.path.join(output_data, 'taz_skims.omx'))
# ### Create maz to maz time/distance
max_distance_for_walk = 1.0
max_distance_for_bike = 5.0
with omx.open_file(os.path.join(input_data, 'skims.omx')) as skims_file:
# create df with DIST column
maz_to_maz = pd.DataFrame(np.transpose(skims_file['DIST'])).unstack().reset_index()
maz_to_maz.columns = ['OMAZ', 'DMAZ', 'DIST']
maz_to_maz['OMAZ'] = (maz_to_maz['OMAZ'] + 1) * MAZ_MULTIPLIER
maz_to_maz['DMAZ'] = (maz_to_maz['DMAZ'] + 1) * MAZ_MULTIPLIER
# additional columns
for c in ['DISTBIKE', 'DISTWALK']:
maz_to_maz[c] = pd.DataFrame(np.transpose(skims_file[c])).unstack().values
maz_to_maz.loc[maz_to_maz['DIST'] <= max_distance_for_walk, ['OMAZ', 'DMAZ', 'DISTWALK']].\
to_csv(os.path.join(output_data, 'maz_to_maz_walk.csv'), index=False)
maz_to_maz.loc[maz_to_maz['DIST'] <= max_distance_for_bike, ['OMAZ', 'DMAZ', 'DIST', 'DISTBIKE']].\
to_csv(os.path.join(output_data, 'maz_to_maz_bike.csv'), index=False)
sys.exit(0)
| 34.664474 | 118 | 0.709812 |
856fe8dd9cdeb97a549429bb7b7f3c2635e9c426 | 41,882 | py | Python | lib/galaxy/tools/data/__init__.py | davelsan/galaxy | fc8bef2810f89ea0dbc5f24aaa31fed22230cb74 | [
"CC-BY-3.0"
] | null | null | null | lib/galaxy/tools/data/__init__.py | davelsan/galaxy | fc8bef2810f89ea0dbc5f24aaa31fed22230cb74 | [
"CC-BY-3.0"
] | null | null | null | lib/galaxy/tools/data/__init__.py | davelsan/galaxy | fc8bef2810f89ea0dbc5f24aaa31fed22230cb74 | [
"CC-BY-3.0"
] | null | null | null | """
Manage tool data tables, which store (at the application level) data that is
used by tools, for example in the generation of dynamic options. Tables are
loaded and stored by names which tools use to refer to them. This allows
users to configure data tables for a local Galaxy instance without needing
to modify the tool configurations.
"""
import errno
import hashlib
import logging
import os
import os.path
import re
import string
import time
from collections import OrderedDict
from glob import glob
from tempfile import NamedTemporaryFile
from xml.etree import ElementTree
import refgenconf
import requests
from galaxy import util
from galaxy.util import RW_R__R__
from galaxy.util.dictifiable import Dictifiable
from galaxy.util.renamed_temporary_file import RenamedTemporaryFile
from galaxy.util.template import fill_template
log = logging.getLogger(__name__)
DEFAULT_TABLE_TYPE = 'tabular'
TOOL_DATA_TABLE_CONF_XML = """<?xml version="1.0"?>
<tables>
</tables>
"""
class ToolDataPathFiles(object):
def __init__(self, tool_data_path):
self.tool_data_path = os.path.abspath(tool_data_path)
self.update_time = 0
@property
def tool_data_path_files(self):
if time.time() - self.update_time > 1:
self.update_files()
return self._tool_data_path_files
def update_files(self):
try:
content = os.walk(self.tool_data_path)
self._tool_data_path_files = set(filter(os.path.exists, [os.path.join(dirpath, fn) for dirpath, _, fn_list in content for fn in fn_list if fn and fn.endswith('.loc') or fn.endswith('.loc.sample')]))
self.update_time = time.time()
except Exception:
log.exception()
self._tool_data_path_files = set()
def exists(self, path):
path = os.path.abspath(path)
if path in self.tool_data_path_files:
return True
else:
return os.path.exists(path)
class ToolDataTableManager(object):
"""Manages a collection of tool data tables"""
def __init__(self, tool_data_path, config_filename=None, tool_data_table_config_path_set=None, other_config_dict=None):
self.tool_data_path = tool_data_path
# This stores all defined data table entries from both the tool_data_table_conf.xml file and the shed_tool_data_table_conf.xml file
# at server startup. If tool shed repositories are installed that contain a valid file named tool_data_table_conf.xml.sample, entries
# from that file are inserted into this dict at the time of installation.
self.data_tables = {}
self.tool_data_path_files = ToolDataPathFiles(self.tool_data_path)
self.other_config_dict = other_config_dict or {}
for single_config_filename in util.listify(config_filename):
if not single_config_filename:
continue
self.load_from_config_file(single_config_filename, self.tool_data_path, from_shed_config=False)
def __getitem__(self, key):
return self.data_tables.__getitem__(key)
def __setitem__(self, key, value):
return self.data_tables.__setitem__(key, value)
def __contains__(self, key):
return self.data_tables.__contains__(key)
def get(self, name, default=None):
try:
return self[name]
except KeyError:
return default
def set(self, name, value):
self[name] = value
def get_tables(self):
return self.data_tables
def load_from_config_file(self, config_filename, tool_data_path, from_shed_config=False):
"""
This method is called under 3 conditions:
1. When the ToolDataTableManager is initialized (see __init__ above).
2. Just after the ToolDataTableManager is initialized and the additional entries defined by shed_tool_data_table_conf.xml
are being loaded into the ToolDataTableManager.data_tables.
3. When a tool shed repository that includes a tool_data_table_conf.xml.sample file is being installed into a local
Galaxy instance. In this case, we have 2 entry types to handle, files whose root tag is <tables>, for example:
"""
table_elems = []
if not isinstance(config_filename, list):
config_filename = [config_filename]
for filename in config_filename:
tree = util.parse_xml(filename)
root = tree.getroot()
for table_elem in root.findall('table'):
table = ToolDataTable.from_elem(table_elem, tool_data_path, from_shed_config, filename=filename, tool_data_path_files=self.tool_data_path_files, other_config_dict=self.other_config_dict)
table_elems.append(table_elem)
if table.name not in self.data_tables:
self.data_tables[table.name] = table
log.debug("Loaded tool data table '%s' from file '%s'", table.name, filename)
else:
log.debug("Loading another instance of data table '%s' from file '%s', attempting to merge content.", table.name, filename)
self.data_tables[table.name].merge_tool_data_table(table, allow_duplicates=False) # only merge content, do not persist to disk, do not allow duplicate rows when merging
# FIXME: This does not account for an entry with the same unique build ID, but a different path.
return table_elems
def add_new_entries_from_config_file(self, config_filename, tool_data_path, shed_tool_data_table_config, persist=False):
"""
This method is called when a tool shed repository that includes a tool_data_table_conf.xml.sample file is being
installed into a local galaxy instance. We have 2 cases to handle, files whose root tag is <tables>, for example::
<tables>
<!-- Location of Tmap files -->
<table name="tmap_indexes" comment_char="#">
<columns>value, dbkey, name, path</columns>
<file path="tool-data/tmap_index.loc" />
</table>
</tables>
and files whose root tag is <table>, for example::
<!-- Location of Tmap files -->
<table name="tmap_indexes" comment_char="#">
<columns>value, dbkey, name, path</columns>
<file path="tool-data/tmap_index.loc" />
</table>
"""
error_message = ''
try:
table_elems = self.load_from_config_file(config_filename=config_filename,
tool_data_path=tool_data_path,
from_shed_config=True)
except Exception as e:
error_message = 'Error attempting to parse file %s: %s' % (str(os.path.split(config_filename)[1]), util.unicodify(e))
log.debug(error_message, exc_info=True)
table_elems = []
if persist:
# Persist Galaxy's version of the changed tool_data_table_conf.xml file.
self.to_xml_file(shed_tool_data_table_config, table_elems)
return table_elems, error_message
def to_xml_file(self, shed_tool_data_table_config, new_elems=None, remove_elems=None):
"""
Write the current in-memory version of the shed_tool_data_table_conf.xml file to disk.
remove_elems are removed before new_elems are added.
"""
if not (new_elems or remove_elems):
log.debug('ToolDataTableManager.to_xml_file called without any elements to add or remove.')
return # no changes provided, no need to persist any changes
if not new_elems:
new_elems = []
if not remove_elems:
remove_elems = []
full_path = os.path.abspath(shed_tool_data_table_config)
# FIXME: we should lock changing this file by other threads / head nodes
try:
try:
tree = util.parse_xml(full_path)
except (OSError, IOError) as e:
if e.errno == errno.ENOENT:
with open(full_path, 'w') as fh:
fh.write(TOOL_DATA_TABLE_CONF_XML)
tree = util.parse_xml(full_path)
else:
raise
root = tree.getroot()
out_elems = [elem for elem in root]
except Exception as e:
out_elems = []
log.debug('Could not parse existing tool data table config, assume no existing elements: %s', e)
for elem in remove_elems:
# handle multiple occurrences of remove elem in existing elems
while elem in out_elems:
remove_elems.remove(elem)
# add new elems
out_elems.extend(new_elems)
out_path_is_new = not os.path.exists(full_path)
root = ElementTree.fromstring('<?xml version="1.0"?>\n<tables></tables>')
for elem in out_elems:
root.append(elem)
with RenamedTemporaryFile(full_path, mode='w') as out:
out.write(util.xml_to_string(root, pretty=True))
os.chmod(full_path, RW_R__R__)
if out_path_is_new:
self.tool_data_path_files.update_files()
def reload_tables(self, table_names=None, path=None):
"""
Reload tool data tables. If neither table_names nor path is given, reloads all tool data tables.
"""
tables = self.get_tables()
if not table_names:
if path:
table_names = self.get_table_names_by_path(path)
else:
table_names = list(tables.keys())
elif not isinstance(table_names, list):
table_names = [table_names]
for table_name in table_names:
tables[table_name].reload_from_files()
log.debug("Reloaded tool data table '%s' from files.", table_name)
return table_names
def get_table_names_by_path(self, path):
"""Returns a list of table names given a path"""
table_names = set()
for name, data_table in self.data_tables.items():
if path in data_table.filenames:
table_names.add(name)
return list(table_names)
class ToolDataTable(object):
@classmethod
def from_elem(cls, table_elem, tool_data_path, from_shed_config, filename, tool_data_path_files, other_config_dict=None):
table_type = table_elem.get('type', 'tabular')
assert table_type in tool_data_table_types, "Unknown data table type '%s'" % table_type
return tool_data_table_types[table_type](table_elem, tool_data_path, from_shed_config=from_shed_config, filename=filename, tool_data_path_files=tool_data_path_files, other_config_dict=other_config_dict)
def __init__(self, config_element, tool_data_path, from_shed_config=False, filename=None, tool_data_path_files=None, other_config_dict=None):
self.name = config_element.get('name')
self.comment_char = config_element.get('comment_char')
self.empty_field_value = config_element.get('empty_field_value', '')
self.empty_field_values = {}
self.allow_duplicate_entries = util.asbool(config_element.get('allow_duplicate_entries', True))
self.here = filename and os.path.dirname(filename)
self.filenames = OrderedDict()
self.tool_data_path = tool_data_path
self.tool_data_path_files = tool_data_path_files
self.other_config_dict = other_config_dict or {}
self.missing_index_file = None
# increment this variable any time a new entry is added, or when the table is totally reloaded
# This value has no external meaning, and does not represent an abstract version of the underlying data
self._loaded_content_version = 1
self._load_info = ([config_element, tool_data_path], {'from_shed_config': from_shed_config, 'tool_data_path_files': self.tool_data_path_files, 'other_config_dict': other_config_dict})
self._merged_load_info = []
def _update_version(self, version=None):
if version is not None:
self._loaded_content_version = version
else:
self._loaded_content_version += 1
return self._loaded_content_version
def get_empty_field_by_name(self, name):
return self.empty_field_values.get(name, self.empty_field_value)
def _add_entry(self, entry, allow_duplicates=True, persist=False, persist_on_error=False, entry_source=None, **kwd):
raise NotImplementedError("Abstract method")
def add_entry(self, entry, allow_duplicates=True, persist=False, persist_on_error=False, entry_source=None, **kwd):
self._add_entry(entry, allow_duplicates=allow_duplicates, persist=persist, persist_on_error=persist_on_error, entry_source=entry_source, **kwd)
return self._update_version()
def add_entries(self, entries, allow_duplicates=True, persist=False, persist_on_error=False, entry_source=None, **kwd):
for entry in entries:
self.add_entry(entry, allow_duplicates=allow_duplicates, persist=persist, persist_on_error=persist_on_error, entry_source=entry_source, **kwd)
return self._loaded_content_version
def _remove_entry(self, values):
raise NotImplementedError("Abstract method")
def remove_entry(self, values):
self._remove_entry(values)
return self._update_version()
def is_current_version(self, other_version):
return self._loaded_content_version == other_version
def merge_tool_data_table(self, other_table, allow_duplicates=True, persist=False, persist_on_error=False, entry_source=None, **kwd):
raise NotImplementedError("Abstract method")
def reload_from_files(self):
new_version = self._update_version()
merged_info = self._merged_load_info
self.__init__(*self._load_info[0], **self._load_info[1])
self._update_version(version=new_version)
for (tool_data_table_class, load_info) in merged_info:
self.merge_tool_data_table(tool_data_table_class(*load_info[0], **load_info[1]), allow_duplicates=False)
return self._update_version()
class TabularToolDataTable(ToolDataTable, Dictifiable):
"""
Data stored in a tabular / separated value format on disk, allows multiple
files to be merged but all must have the same column definitions::
<table type="tabular" name="test">
<column name='...' index = '...' />
<file path="..." />
<file path="..." />
</table>
"""
dict_collection_visible_keys = ['name']
type_key = 'tabular'
def __init__(self, config_element, tool_data_path, from_shed_config=False, filename=None, tool_data_path_files=None, other_config_dict=None):
super(TabularToolDataTable, self).__init__(config_element, tool_data_path, from_shed_config, filename, tool_data_path_files, other_config_dict=other_config_dict)
self.config_element = config_element
self.data = []
self.configure_and_load(config_element, tool_data_path, from_shed_config)
def configure_and_load(self, config_element, tool_data_path, from_shed_config=False, url_timeout=10):
"""
Configure and load table from an XML element.
"""
self.separator = config_element.get('separator', '\t')
self.comment_char = config_element.get('comment_char', '#')
# Configure columns
self.parse_column_spec(config_element)
# store repo info if available:
repo_elem = config_element.find('tool_shed_repository')
if repo_elem is not None:
repo_info = dict(tool_shed=repo_elem.find('tool_shed').text, name=repo_elem.find('repository_name').text,
owner=repo_elem.find('repository_owner').text, installed_changeset_revision=repo_elem.find('installed_changeset_revision').text)
else:
repo_info = None
# Read every file
for file_element in config_element.findall('file'):
tmp_file = None
filename = file_element.get('path', None)
if filename is None:
# Handle URLs as files
filename = file_element.get('url', None)
if filename:
tmp_file = NamedTemporaryFile(prefix='TTDT_URL_%s-' % self.name, mode='w')
try:
tmp_file.write(requests.get(filename, timeout=url_timeout).text)
except Exception as e:
log.error('Error loading Data Table URL "%s": %s', filename, e)
continue
log.debug('Loading Data Table URL "%s" as filename "%s".', filename, tmp_file.name)
filename = tmp_file.name
tmp_file.flush()
else:
# Pull the filename from a global config
filename = file_element.get('from_config', None) or None
if filename:
filename = self.other_config_dict.get(filename, None)
filename = file_path = expand_here_template(filename, here=self.here)
found = False
if file_path is None:
log.debug("Encountered a file element (%s) that does not contain a path value when loading tool data table '%s'.", util.xml_to_string(file_element), self.name)
continue
# FIXME: splitting on and merging paths from a configuration file when loading is wonky
# Data should exist on disk in the state needed, i.e. the xml configuration should
# point directly to the desired file to load. Munging of the tool_data_tables_conf.xml.sample
# can be done during installing / testing / metadata resetting with the creation of a proper
# tool_data_tables_conf.xml file, containing correct <file path=> attributes. Allowing a
# path.join with a different root should be allowed, but splitting should not be necessary.
if tool_data_path and from_shed_config:
# Must identify with from_shed_config as well, because the
# regular galaxy app has and uses tool_data_path.
# We're loading a tool in the tool shed, so we cannot use the Galaxy tool-data
# directory which is hard-coded into the tool_data_table_conf.xml entries.
filename = os.path.split(file_path)[1]
filename = os.path.join(tool_data_path, filename)
if self.tool_data_path_files.exists(filename):
found = True
else:
# Since the path attribute can include a hard-coded path to a specific directory
# (e.g., <file path="tool-data/cg_crr_files.loc" />) which may not be the same value
# as self.tool_data_path, we'll parse the path to get the filename and see if it is
# in self.tool_data_path.
file_path, file_name = os.path.split(filename)
if file_path != self.tool_data_path:
corrected_filename = os.path.join(self.tool_data_path, file_name)
if self.tool_data_path_files.exists(corrected_filename):
filename = corrected_filename
found = True
elif not from_shed_config and self.tool_data_path_files.exists("%s.sample" % corrected_filename):
log.info("Could not find tool data %s, reading sample" % corrected_filename)
filename = "%s.sample" % corrected_filename
found = True
errors = []
if found:
self.extend_data_with(filename, errors=errors)
self._update_version()
else:
self.missing_index_file = filename
# TODO: some data tables need to exist (even if they are empty)
# for tools to load. In an installed Galaxy environment and the
# default tool_data_table_conf.xml, this will emit spurious
# warnings about missing location files that would otherwise be
# empty and we don't care about unless the admin chooses to
# populate them.
log.warning("Cannot find index file '%s' for tool data table '%s'" % (filename, self.name))
if filename not in self.filenames or not self.filenames[filename]['found']:
self.filenames[filename] = dict(found=found, filename=filename, from_shed_config=from_shed_config, tool_data_path=tool_data_path,
config_element=config_element, tool_shed_repository=repo_info, errors=errors)
else:
log.debug("Filename '%s' already exists in filenames (%s), not adding", filename, list(self.filenames.keys()))
# Remove URL tmp file
if tmp_file is not None:
tmp_file.close()
def merge_tool_data_table(self, other_table, allow_duplicates=True, persist=False, persist_on_error=False, entry_source=None, **kwd):
assert self.columns == other_table.columns, "Merging tabular data tables with non matching columns is not allowed: %s:%s != %s:%s" % (self.name, self.columns, other_table.name, other_table.columns)
# merge filename info
for filename, info in other_table.filenames.items():
if filename not in self.filenames:
self.filenames[filename] = info
# save info about table
self._merged_load_info.append((other_table.__class__, other_table._load_info))
# If we are merging in a data table that does not allow duplicates, enforce that upon the data table
if self.allow_duplicate_entries and not other_table.allow_duplicate_entries:
log.debug('While attempting to merge tool data table "%s", the other instance of the table specified that duplicate entries are not allowed, now deduplicating all previous entries.', self.name)
self.allow_duplicate_entries = False
self._deduplicate_data()
# add data entries and return current data table version
return self.add_entries(other_table.data, allow_duplicates=allow_duplicates, persist=persist, persist_on_error=persist_on_error, entry_source=entry_source, **kwd)
def handle_found_index_file(self, filename):
self.missing_index_file = None
self.extend_data_with(filename)
def get_fields(self):
return self.data
def get_field(self, value):
rval = None
for i in self.get_named_fields_list():
if i['value'] == value:
rval = TabularToolDataField(i)
return rval
def get_named_fields_list(self):
rval = []
named_columns = self.get_column_name_list()
for fields in self.get_fields():
field_dict = {}
for i, field in enumerate(fields):
if i == len(named_columns):
break
field_name = named_columns[i]
if field_name is None:
field_name = i # check that this is supposed to be 0 based.
field_dict[field_name] = field
rval.append(field_dict)
return rval
def get_version_fields(self):
return (self._loaded_content_version, self.get_fields())
def parse_column_spec(self, config_element):
"""
Parse column definitions, which can either be a set of 'column' elements
with a name and index (as in dynamic options config), or a shorthand
comma separated list of names in order as the text of a 'column_names'
element.
A column named 'value' is required.
"""
self.columns = {}
if config_element.find('columns') is not None:
column_names = util.xml_text(config_element.find('columns'))
column_names = [n.strip() for n in column_names.split(',')]
for index, name in enumerate(column_names):
self.columns[name] = index
self.largest_index = index
else:
self.largest_index = 0
for column_elem in config_element.findall('column'):
name = column_elem.get('name', None)
assert name is not None, "Required 'name' attribute missing from column def"
index = column_elem.get('index', None)
assert index is not None, "Required 'index' attribute missing from column def"
index = int(index)
self.columns[name] = index
if index > self.largest_index:
self.largest_index = index
empty_field_value = column_elem.get('empty_field_value', None)
if empty_field_value is not None:
self.empty_field_values[name] = empty_field_value
assert 'value' in self.columns, "Required 'value' column missing from column def"
if 'name' not in self.columns:
self.columns['name'] = self.columns['value']
def extend_data_with(self, filename, errors=None):
here = os.path.dirname(os.path.abspath(filename))
self.data.extend(self.parse_file_fields(filename, errors=errors, here=here))
if not self.allow_duplicate_entries:
self._deduplicate_data()
def parse_file_fields(self, filename, errors=None, here="__HERE__"):
"""
Parse separated lines from file and return a list of tuples.
TODO: Allow named access to fields using the column names.
"""
separator_char = "<TAB>" if self.separator == "\t" else self.separator
rval = []
with open(filename) as fh:
for i, line in enumerate(fh):
if line.lstrip().startswith(self.comment_char):
continue
line = line.rstrip("\n\r")
if line:
line = expand_here_template(line, here=here)
fields = line.split(self.separator)
if self.largest_index < len(fields):
rval.append(fields)
else:
line_error = "Line %i in tool data table '%s' is invalid (HINT: '%s' characters must be used to separate fields):\n%s" % ((i + 1), self.name, separator_char, line)
if errors is not None:
errors.append(line_error)
log.warning(line_error)
log.debug("Loaded %i lines from '%s' for '%s'", len(rval), filename, self.name)
return rval
def get_column_name_list(self):
rval = []
for i in range(self.largest_index + 1):
found_column = False
for name, index in self.columns.items():
if index == i:
if not found_column:
rval.append(name)
elif name == 'value':
# the column named 'value' always has priority over other named columns
rval[-1] = name
found_column = True
if not found_column:
rval.append(None)
return rval
def get_entry(self, query_attr, query_val, return_attr, default=None):
"""
Returns table entry associated with a col/val pair.
"""
rval = self.get_entries(query_attr, query_val, return_attr, default=default, limit=1)
if rval:
return rval[0]
return default
def get_entries(self, query_attr, query_val, return_attr, default=None, limit=None):
"""
Returns table entry associated with a col/val pair.
"""
query_col = self.columns.get(query_attr, None)
if query_col is None:
return default
if return_attr is not None:
return_col = self.columns.get(return_attr, None)
if return_col is None:
return default
rval = []
# Look for table entry.
for fields in self.get_fields():
if fields[query_col] == query_val:
if return_attr is None:
field_dict = {}
for i, col_name in enumerate(self.get_column_name_list()):
field_dict[col_name or i] = fields[i]
rval.append(field_dict)
else:
rval.append(fields[return_col])
if limit is not None and len(rval) == limit:
break
return rval or default
def get_filename_for_source(self, source, default=None):
if source:
# if dict, assume is compatible info dict, otherwise call method
if isinstance(source, dict):
source_repo_info = source
else:
source_repo_info = source.get_tool_shed_repository_info_dict()
else:
source_repo_info = None
filename = default
for name, value in self.filenames.items():
repo_info = value.get('tool_shed_repository', None)
if (not source_repo_info and not repo_info) or (source_repo_info and repo_info and source_repo_info == repo_info):
filename = name
break
return filename
def _add_entry(self, entry, allow_duplicates=True, persist=False, persist_on_error=False, entry_source=None, **kwd):
# accepts dict or list of columns
if isinstance(entry, dict):
fields = []
for column_name in self.get_column_name_list():
if column_name not in entry:
log.debug("Using default column value for column '%s' when adding data table entry (%s) to table '%s'.", column_name, entry, self.name)
field_value = self.get_empty_field_by_name(column_name)
else:
field_value = entry[column_name]
fields.append(field_value)
else:
fields = entry
is_error = False
if self.largest_index < len(fields):
fields = self._replace_field_separators(fields)
if (allow_duplicates and self.allow_duplicate_entries) or fields not in self.get_fields():
self.data.append(fields)
else:
log.debug("Attempted to add fields (%s) to data table '%s', but this entry already exists and allow_duplicates is False.", fields, self.name)
is_error = True
else:
log.error("Attempted to add fields (%s) to data table '%s', but there were not enough fields specified ( %i < %i ).", fields, self.name, len(fields), self.largest_index + 1)
is_error = True
filename = None
if persist and (not is_error or persist_on_error):
filename = self.get_filename_for_source(entry_source)
if filename is None:
# should we default to using any filename here instead?
log.error("Unable to determine filename for persisting data table '%s' values: '%s'.", self.name, fields)
is_error = True
else:
# FIXME: Need to lock these files for editing
log.debug("Persisting changes to file: %s", filename)
try:
data_table_fh = open(filename, 'r+b')
except IOError as e:
log.warning('Error opening data table file (%s) with r+b, assuming file does not exist and will open as wb: %s', filename, e)
data_table_fh = open(filename, 'wb')
if os.stat(filename).st_size != 0:
# ensure last existing line ends with new line
data_table_fh.seek(-1, 2) # last char in file
last_char = data_table_fh.read(1)
if last_char not in [b'\n', b'\r']:
data_table_fh.write(b'\n')
fields = "%s\n" % self.separator.join(fields)
data_table_fh.write(fields.encode('utf-8'))
return not is_error
def _remove_entry(self, values):
# update every file
for filename in self.filenames:
if os.path.exists(filename):
values = self._replace_field_separators(values)
self.filter_file_fields(filename, values)
else:
log.warning("Cannot find index file '%s' for tool data table '%s'" % (filename, self.name))
self.reload_from_files()
def filter_file_fields(self, loc_file, values):
"""
Reads separated lines from file and print back only the lines that pass a filter.
"""
with open(loc_file) as reader:
rval = ""
for line in reader:
if line.lstrip().startswith(self.comment_char):
rval += line
else:
line_s = line.rstrip("\n\r")
if line_s:
fields = line_s.split(self.separator)
if fields != values:
rval += line
with open(loc_file, 'w') as writer:
writer.write(rval)
return rval
def _replace_field_separators(self, fields, separator=None, replace=None, comment_char=None):
# make sure none of the fields contain separator
# make sure separator replace is different from comment_char,
# due to possible leading replace
if separator is None:
separator = self.separator
if replace is None:
if separator == " ":
if comment_char == "\t":
replace = "_"
else:
replace = "\t"
else:
if comment_char == " ":
replace = "_"
else:
replace = " "
return [x.replace(separator, replace) for x in fields]
def _deduplicate_data(self):
# Remove duplicate entries, without recreating self.data object
dup_lines = []
hash_set = set()
for i, fields in enumerate(self.data):
fields_hash = hash(self.separator.join(fields))
if fields_hash in hash_set:
dup_lines.append(i)
log.debug('Found duplicate entry in tool data table "%s", but duplicates are not allowed, removing additional entry for: "%s"', self.name, fields)
else:
hash_set.add(fields_hash)
for i in reversed(dup_lines):
self.data.pop(i)
@property
def xml_string(self):
return util.xml_to_string(self.config_element)
def to_dict(self, view='collection'):
rval = super(TabularToolDataTable, self).to_dict()
if view == 'element':
rval['columns'] = sorted(self.columns.keys(), key=lambda x: self.columns[x])
rval['fields'] = self.get_fields()
return rval
class TabularToolDataField(Dictifiable):
dict_collection_visible_keys = []
def __init__(self, data):
self.data = data
def __getitem__(self, key):
return self.data[key]
def get_base_path(self):
return os.path.normpath(os.path.abspath(self.data['path']))
def get_base_dir(self):
path = self.get_base_path()
if not os.path.isdir(path):
path = os.path.dirname(path)
return path
def clean_base_dir(self, path):
return re.sub("^" + self.get_base_dir() + r"/*", "", path)
def get_files(self):
return glob(self.get_base_path() + "*")
def get_filesize_map(self, rm_base_dir=False):
out = {}
for path in self.get_files():
if rm_base_dir:
out[self.clean_base_dir(path)] = os.path.getsize(path)
else:
out[path] = os.path.getsize(path)
return out
def get_fingerprint(self):
sha1 = hashlib.sha1()
fmap = self.get_filesize_map(True)
for k in sorted(fmap.keys()):
sha1.update(util.smart_str(k))
sha1.update(util.smart_str(fmap[k]))
return sha1.hexdigest()
def to_dict(self):
rval = super(TabularToolDataField, self).to_dict()
rval['name'] = self.data['value']
rval['fields'] = self.data
rval['base_dir'] = self.get_base_dir(),
rval['files'] = self.get_filesize_map(True)
rval['fingerprint'] = self.get_fingerprint()
return rval
class RefgenieToolDataTable(TabularToolDataTable):
"""
Data stored in refgenie
<table name="all_fasta" type="refgenie" asset="fasta" >
<file path="refgenie.yml" />
<field name="value" template="true">${__REFGENIE_UUID__}</field>
<field name="dbkey" template="true">${__REFGENIE_GENOME__}</field>
<field name="name" template="true">${__REFGENIE_DISPLAY_NAME__}</field>
<field name="path" template="true">${__REFGENIE_ASSET__}</field>
</table>
"""
dict_collection_visible_keys = ['name']
type_key = 'refgenie'
def __init__(self, config_element, tool_data_path, from_shed_config=False, filename=None, tool_data_path_files=None, other_config_dict=None):
super(RefgenieToolDataTable, self).__init__(config_element, tool_data_path, from_shed_config, filename, tool_data_path_files, other_config_dict=other_config_dict)
self.config_element = config_element
self.data = []
self.configure_and_load(config_element, tool_data_path, from_shed_config)
def configure_and_load(self, config_element, tool_data_path, from_shed_config=False, url_timeout=10):
self.rg_asset = config_element.get('asset', None)
assert self.rg_asset, ValueError('You must specify an asset attribute.')
super(RefgenieToolDataTable, self).configure_and_load(config_element, tool_data_path, from_shed_config=from_shed_config, url_timeout=url_timeout)
def parse_column_spec(self, config_element):
self.columns = {}
self.key_map = {}
self.template_for_column = {}
self.strip_for_column = {}
self.largest_index = 0
for i, elem in enumerate(config_element.findall('field')):
name = elem.get('name', None)
assert name, ValueError('You must provide a name refgenie field element.')
value = elem.text
self.key_map[name] = value
column_index = int(elem.get('column_index', i))
empty_field_value = elem.get('empty_field_value', None)
if empty_field_value is not None:
self.empty_field_values[name] = empty_field_value
self.template_for_column[name] = util.asbool(elem.get('template', False))
self.strip_for_column[name] = util.asbool(elem.get('strip', False))
self.columns[name] = column_index
self.largest_index = max(self.largest_index, column_index)
if 'name' not in self.columns:
self.columns['name'] = self.columns['value']
def parse_file_fields(self, filename, errors=None, here="__HERE__"):
rgc = refgenconf.RefGenConf(filename)
rval = []
for genome in rgc.list_genomes_by_asset(self.rg_asset):
genome_attributes = rgc.get_genome_attributes(genome)
description = genome_attributes.get('description', None)
asset_list = rgc.list(genome, include_tags=True)[genome]
for tagged_asset in asset_list:
asset, tag = tagged_asset.rsplit(':', 1)
if asset != self.rg_asset:
continue
digest = rgc.id(genome, asset, tag=tag)
uuid = 'refgenie:%s/%s:%s@%s' % (genome, self.rg_asset, tag, digest)
display_name = description or '%s/%s' % (genome, tagged_asset)
def _seek_key(key):
return rgc.seek(genome, asset, tag_name=tag, seek_key=key)
template_dict = {
'__REFGENIE_UUID__': uuid,
'__REFGENIE_GENOME__': genome,
'__REFGENIE_TAG__': tag,
'__REFGENIE_DISPLAY_NAME__': display_name,
'__REFGENIE_ASSET__': rgc.seek(genome, asset, tag_name=tag),
'__REFGENIE_ASSET_NAME__': asset,
'__REFGENIE_DIGEST__': digest,
'__REFGENIE_GENOME_ATTRIBUTES__': genome_attributes,
'__REFGENIE__': rgc,
'__REFGENIE_SEEK_KEY__': _seek_key,
}
fields = [''] * (self.largest_index + 1)
for name, index in self.columns.items():
rg_value = self.key_map[name]
# Default is hard-coded value
if self.template_for_column.get(name, False):
rg_value = fill_template(rg_value, template_dict)
if self.strip_for_column.get(name, False):
rg_value = rg_value.strip()
fields[index] = rg_value
rval.append(fields)
log.debug("Loaded %i entries from refgenie '%s' asset '%s' for '%s'", len(rval), filename, self.rg_asset, self.name)
return rval
def _remove_entry(self, values):
raise NotImplementedError("Not supported")
def expand_here_template(content, here=None):
if here and content:
content = string.Template(content).safe_substitute({"__HERE__": here})
return content
# Registry of tool data types by type_key
tool_data_table_types = dict([(cls.type_key, cls) for cls in [TabularToolDataTable, RefgenieToolDataTable]])
| 46.432373 | 210 | 0.620362 |
d7b57b55788fc6e46a1ebd33f96951602b635edd | 4,387 | py | Python | src/bobbit/protocol/slack.py | ginglis13/bobbit | 0d59b1ec4cae0560da4bd7dc71e1fd825f839852 | [
"MIT"
] | null | null | null | src/bobbit/protocol/slack.py | ginglis13/bobbit | 0d59b1ec4cae0560da4bd7dc71e1fd825f839852 | [
"MIT"
] | null | null | null | src/bobbit/protocol/slack.py | ginglis13/bobbit | 0d59b1ec4cae0560da4bd7dc71e1fd825f839852 | [
"MIT"
] | null | null | null | ''' bobbit.protocol.slack '''
import itertools
import json
import logging
import re
import aiohttp
from bobbit.message import Message
from bobbit.protocol.base import BaseClient
# Slack Constants
SLACK_API_DOMAIN = 'https://api.slack.com'
SLACK_CHANNEL_RX = r'<#[0-9A-Z]+\|([^>]+)>'
# Slack Client
class SlackClient(BaseClient):
def __init__(self, *args, **kwargs):
self.token = kwargs['password']
self.url = None
self.counter = itertools.count()
self.channels = {}
self.http_client = aiohttp.ClientSession()
self.ws = None
# Slack methods
async def get_channel(self, channel):
if channel not in self.channels:
url = f'{SLACK_API_DOMAIN}/api/conversations.list'
params = {
'limit' : 1000,
'types' : 'public_channel,private_channel',
'exclude_archived': 'true',
'exclude_members' : 'true',
'token' : self.token,
}
async with self.http_client.get(url, params=params) as response:
data = await response.json()
if data['ok']:
for c in data['channels']:
self.channels['#' + c['name']] = c['id']
try:
return self.channels[channel]
except KeyError:
return channel
# Client methods
async def connect(self):
''' Connect to Slack via Websocket '''
url = f'{SLACK_API_DOMAIN}/api/rtm.connect'
params = {'token': self.token}
logging.info('Retrieving websocket URL from: %s', url)
async with self.http_client.get(url, params=params) as response:
data = await response.json()
self.url = data['url']
logging.info('Connecting to websocket: %s', self.url)
self.ws = await self.http_client.ws_connect(self.url)
async def send_message(self, message):
if message.channel.startswith('#'):
message.channel = await self.get_channel(message.channel)
message.body = self.format_message(message)
await self.ws.send_str(json.dumps({
'id' : next(self.counter),
'type' : 'message',
'channel' : message.channel,
'text' : message.body,
}))
logging.debug('Sent message: %s', message)
async def recv_message(self):
message = None
while not message:
ws_message = await self.ws.receive()
json_message = json.loads(ws_message.data)
logging.debug('Received JSON: %s', json_message)
if json_message.get('type') != 'message':
continue
try:
message = Message(
body = re.sub(SLACK_CHANNEL_RX, r'#\1', json_message['text']),
nick = json_message['user'],
channel = json_message['channel'],
)
except KeyError:
pass
logging.debug('Received message: %s', message)
return message
# Formatting
@staticmethod
def format_message(message):
if message.highlighted:
if message.nick.startswith('@') or message.nick.startswith('<'):
return f'{message.nick}: {message.body}'
else:
return f'<@{message.nick}>: {message.body}'
else:
return message.body
@staticmethod
def format_text(text, *args, **kwargs):
FORMAT_CODES = {
'bold' : '*',
'B' : '*',
'color' : '',
'C' : '',
'black' : '',
'blue' : '',
'green' : '',
'red' : '',
'brown' : '',
'magenta' : '',
'orange' : '',
'yellow' : '',
'lightgreen' : '',
'cyan' : '',
'lightcyan' : '',
'lightblue' : '',
'pink' : '',
'gray' : '',
'lightgray' : '',
'default' : '',
}
kwargs.update(FORMAT_CODES)
return text.format(*args, **kwargs)
# vim: set sts=4 sw=4 ts=8 expandtab ft=python:
| 29.843537 | 85 | 0.49168 |
69bb3a683d7de07bd563a299b5fe24eb1fbd4a05 | 12,869 | py | Python | experiments/1dt_shallowwater/pred.py | flabowski/POD-UQNN | 1c81be432e69d24ae894828f42918fbc1fe54bc1 | [
"MIT"
] | 15 | 2020-05-29T11:42:14.000Z | 2022-03-20T03:53:44.000Z | experiments/1dt_shallowwater/pred.py | flabowski/POD-UQNN | 1c81be432e69d24ae894828f42918fbc1fe54bc1 | [
"MIT"
] | null | null | null | experiments/1dt_shallowwater/pred.py | flabowski/POD-UQNN | 1c81be432e69d24ae894828f42918fbc1fe54bc1 | [
"MIT"
] | 11 | 2020-06-09T01:16:22.000Z | 2021-04-27T08:53:02.000Z | """POD-NN modeling for 1D Shekel Equation."""
#%% Imports
import sys
import os
import numpy as np
import meshio
import matplotlib.pyplot as plt
from mpl_toolkits.axes_grid1 import make_axes_locatable
from scipy.interpolate import griddata
sys.path.append(os.path.join("..", ".."))
from poduqnn.podnnmodel import PodnnModel
from poduqnn.metrics import re_s
from poduqnn.plotting import figsize, savefig
from poduqnn.handling import sample_mu
from hyperparams import HP as hp
from hyperparams import u
#%% Load models
model = PodnnModel.load("cache")
X_v_train, v_train, U_train, X_v_val, v_val, U_val = model.load_train_data()
#%% Predict and restruct
U_pred, U_pred_sig = model.predict(X_v_val)
#%% Validation metrics
U_pred, _ = model.predict(X_v_val)
err_val = re_s(U_val, U_pred)
print(f"RE_v: {err_val:4f}")
#%% Sample the new model to generate a test prediction
mu_lhs = sample_mu(hp["n_s_tst"], np.array(hp["mu_min"]), np.array(hp["mu_max"]))
X_v_tst, U_tst, _, _ = \
model.create_snapshots(model.n_d, model.n_h, u, mu_lhs,
t_min=hp["t_min"], t_max=hp["t_max"])
U_pred, U_pred_sig = model.predict(X_v_tst)
print(f"RE_tst: {re_s(U_tst, U_pred):4f}")
U_tst = model.restruct(U_tst)[0]
U_pred = model.restruct(U_pred, n_t=hp["n_t"] - 1)[0]
#%% Samples graph
# hp["mu_min_out"] = [0.0005]
# hp["mu_max_out"] = [0.0105]
# n_samples = 3
# mu_lhs_in = sample_mu(n_samples, np.array(hp["mu_min"]), np.array(hp["mu_max"]))
# mu_lhs_out_min = sample_mu(n_samples, np.array(hp["mu_min_out"]), np.array(hp["mu_min"]))
# mu_lhs_out_max = sample_mu(n_samples, np.array(hp["mu_max"]), np.array(hp["mu_max_out"]))
# mu_lhs_out = np.vstack((mu_lhs_out_min, mu_lhs_out_max))
mu_lhs_in = np.array([12]).reshape(-1, 1)
mu_lhs_out = np.array([25]).reshape(-1, 1)
X_v_samples, _, _, _ = \
model.create_snapshots(model.n_d, model.n_h, u, mu_lhs_in,
t_min=hp["t_min"], t_max=hp["t_max"])
X_v_samples_out, _, _, _ = \
model.create_snapshots(model.n_d, model.n_h, u, mu_lhs_out,
t_min=hp["t_min"], t_max=hp["t_max"])
U_pred, U_pred_sig = model.predict(X_v_samples)
U_pred_out, U_pred_sig_out = model.predict(X_v_samples_out)
mpiw_tst = 4 * U_pred_sig.mean()
print(f"MPIW_tst: {mpiw_tst:.4e}")
mpiw_tst_out = 4 * U_pred_sig_out.mean()
print(f"MPIW_tst_out: {mpiw_tst_out:.4e}")
#%% Contours for demo
x = np.linspace(hp["x_min"], hp["x_max"], hp["n_x"])
t = np.linspace(hp["t_min"], hp["t_max"], hp["n_t"])
xxT, ttT = np.meshgrid(x, t)
xx, tt = xxT.T, ttT.T
XT = np.hstack((xx.flatten()[:, None], tt.flatten()[:, None]))
# Slices
n_samples = 1
times = [0, 25]
ylim = [[(-3, 28), (-3, 26)], [(-3, 28), (-3, 28)]]
has_sim_data = False
if os.path.exists(os.path.join("data", "sel.csv")):
has_sim_data = True
sel = np.loadtxt(os.path.join("data", "sel.csv"), skiprows=1, delimiter=",")[:, 6].astype("int")
for j, time in enumerate(times):
n_plot_x = 1
n_plot_y = 3
fig = plt.figure(figsize=figsize(n_plot_x, n_plot_y, scale=2.))
gs = fig.add_gridspec(n_plot_x, n_plot_y)
actual_row = 0
for row, mu_lhs in enumerate([mu_lhs_in, mu_lhs_out]):
X_v_samples, U_samples, _, _ = \
model.create_snapshots(model.n_d, model.n_h, u, mu_lhs,
t_min=hp["t_min"], t_max=hp["t_max"])
U_samples = np.reshape(U_samples, (hp["n_v"], hp["n_x"], hp["n_t"], -1))
U_samples = U_samples[0]
x = np.linspace(hp["x_min"], hp["x_max"], hp["n_x"])
idx = np.random.choice(X_v_samples.shape[0], n_samples, replace=False)
for col, idx_i in enumerate(idx):
lbl = r"{\scriptscriptstyle\textrm{tst}}" if row == 0 else r"{\scriptscriptstyle\textrm{out}}"
st = hp["n_t"] * col
en = hp["n_t"] * (col + 1)
X_i = X_v_samples[st:en, :]
if j > 0:
U_pred_i, U_pred_i_sig = model.predict(X_i)
U_pred_i = np.reshape(U_pred_i, (hp["n_v"], hp["n_x"], hp["n_t"], -1))
U_pred_i_sig = np.reshape(U_pred_i_sig, (hp["n_v"], hp["n_x"], hp["n_t"], -1))
U_pred_i = U_pred_i[0]
U_pred_i_sig = U_pred_i_sig[0]
else:
U_pred_i = U_samples
U_pred_i_sig = np.zeros_like(U_samples)
if row == 0 and j == 1:
ax = fig.add_subplot(gs[0, actual_row])
U_grid = griddata(XT, U_pred_i.flatten(), (xx, tt), method='cubic')
h = ax.imshow(U_grid.T, interpolation='nearest', cmap='rainbow',
extent=[x.min(), x.max(), t.min(), t.max()],
origin='lower', aspect='auto')
divider = make_axes_locatable(ax)
cax = divider.append_axes("right", size="5%", pad=0.05)
ax.axhline(X_i[25, 0], color="w", ls="-.")
fig.colorbar(h, cax=cax)
ax.set_xlabel(r"$x\ [\textrm{m}]$")
ax.set_ylabel(r"$t\ [\textrm{s}]$")
# ax.set_title(r"$u_D(\bar{s_{\textrm{tst}}})$")
ax.set_title(r"$\hat{u}^\mu_D(s=" + f"{X_i[0, 1]:.1f}" + r"\textrm{ m}\in \Omega)$")
if row == 0 and j == 0:
ax = fig.add_subplot(gs[0, actual_row])
U_grid = griddata(XT, U_samples[:, :, col].flatten(), (xx, tt), method='cubic')
h = ax.imshow(U_grid.T, interpolation='nearest', cmap='rainbow',
extent=[x.min(), x.max(), t.min(), t.max()],
origin='lower', aspect='auto')
divider = make_axes_locatable(ax)
cax = divider.append_axes("right", size="5%", pad=0.05)
ax.axhline(0.02, color="w", ls="-.")
fig.colorbar(h, cax=cax)
ax.set_xlabel(r"$x\ [\textrm{m}]$")
ax.set_ylabel(r"$t\ [\textrm{s}]$")
# ax.set_title(r"$u_D(\bar{s_{\textrm{tst}}})$")
ax.set_title(r"$u_D(s=" + f"{X_i[0, 1]:.1f}" + r"\textrm{ m}\in \Omega)$")
ax = fig.add_subplot(gs[0, actual_row + 1])
if has_sim_data:
vtkfilename = os.path.join("data", f"cas1_{int(X_i[0, 1])}m", f"0_FV-Paraview_{time}.vtk")
if os.path.exists(vtkfilename):
vtk = meshio.read(vtkfilename)
x_sim = vtk.points[sel, 1]
h_sim = vtk.point_data["h"][sel]
ax.plot(x_sim, h_sim, "k:", label=r"$u_\textrm{sim}(s_{" + lbl + r"})$")
ax.plot(x, U_pred_i[:, time, 0], "b-", label=r"$\hat{u}^\mu_D(s_{" + lbl + r"})$")
ax.plot(x, U_samples[:, time, col], "r--", label=r"$u_D(s_{" + lbl + r"})$")
lower = U_pred_i[:, time, 0] - 2*U_pred_i_sig[:, time, 0]
upper = U_pred_i[:, time, 0] + 2*U_pred_i_sig[:, time, 0]
ax.fill_between(x, lower, upper, alpha=0.2, label=r"$\pm2\hat{u}^\sigma_D(s_{" + lbl + r"})$")
if row == 0 and j == 1:
ax.text(0, 20, "POD-EnsNN")
ax.set_xlabel(r"$x\ [\textrm{m}]$")
ax.set_ylabel(r"$h\ [\textrm{m}]$")
ax.set_ylim(ylim[j][row])
if row == 0:
ax.set_title(r"$\Delta\eta=" + f"{X_i[0, 1]:.1f}" + r"\textrm{ m}\in \Omega"
+ f",\ t={X_i[time, 0]:.2f}" + r"\ \textrm{s}$")
else:
ax.set_title(r"$\Delta\eta=" + f"{X_i[0, 1]:.1f}" + r"\textrm{ m}\in \Omega{\footnotesize\textrm{out}}"
+ f",\ t={X_i[time, 0]:.2f}" + r"\ \textrm{s}$")
actual_row += 1
if j == 0:
ax.legend()
plt.tight_layout()
savefig(os.path.join("results", f"podensnn-1dswt-graph-meansamples-h-{j}"), True)
#%% Velocity plots
times = [0, 25]
# ylim = [(-1, 1), (-1, 10)]
ylim = [[(-3, 17), (-3, 17)], [(-3, 17), (-3, 17)]]
for j, time in enumerate(times):
n_plot_x = 1
n_plot_y = 3
fig = plt.figure(figsize=figsize(n_plot_x, n_plot_y, scale=2.))
gs = fig.add_gridspec(n_plot_x, n_plot_y)
actual_row = 0
for row, mu_lhs in enumerate([mu_lhs_in, mu_lhs_out]):
X_v_samples, U_samples, _, _ = \
model.create_snapshots(model.n_d, model.n_h, u, mu_lhs,
t_min=hp["t_min"], t_max=hp["t_max"])
U_samples = np.reshape(U_samples, (hp["n_v"], hp["n_x"], hp["n_t"], -1))
U_samples = U_samples[1]
x = np.linspace(hp["x_min"], hp["x_max"], hp["n_x"])
idx = np.random.choice(X_v_samples.shape[0], n_samples, replace=False)
for col, idx_i in enumerate(idx):
lbl = r"{\scriptscriptstyle\textrm{tst}}" if row == 0 else r"{\scriptscriptstyle\textrm{out}}"
st = hp["n_t"] * col
en = hp["n_t"] * (col + 1)
X_i = X_v_samples[st:en, :]
if j > 0:
U_pred_i, U_pred_i_sig = model.predict(X_i)
U_pred_i = np.reshape(U_pred_i, (hp["n_v"], hp["n_x"], hp["n_t"], -1))
U_pred_i_sig = np.reshape(U_pred_i_sig, (hp["n_v"], hp["n_x"], hp["n_t"], -1))
U_pred_i = U_pred_i[1]
U_pred_i_sig = U_pred_i_sig[1]
else:
U_pred_i = U_samples
U_pred_i_sig = np.zeros_like(U_samples)
if row == 0 and j == 1:
ax = fig.add_subplot(gs[0, actual_row])
U_grid = griddata(XT, U_pred_i.flatten(), (xx, tt), method='cubic')
h = ax.imshow(U_grid.T, interpolation='nearest', cmap='rainbow',
extent=[x.min(), x.max(), t.min(), t.max()],
origin='lower', aspect='auto')
divider = make_axes_locatable(ax)
cax = divider.append_axes("right", size="5%", pad=0.05)
ax.axhline(X_i[25, 0], color="w", ls="-.")
fig.colorbar(h, cax=cax)
ax.set_xlabel(r"$x\ [\textrm{m}]$")
ax.set_ylabel(r"$t\ [\textrm{s}]$")
# ax.set_title(r"$u_D(\bar{s_{\textrm{tst}}})$")
ax.set_title(r"$\hat{u}_D^\mu(s=" + f"{X_i[0, 1]:.1f}" + r"\textrm{ m}\in \Omega)$")
if row == 0 and j == 0:
ax = fig.add_subplot(gs[0, actual_row])
U_grid = griddata(XT, U_samples[:, :, col].flatten(), (xx, tt), method='cubic')
h = ax.imshow(U_grid.T, interpolation='nearest', cmap='rainbow',
extent=[x.min(), x.max(), t.min(), t.max()],
origin='lower', aspect='auto')
divider = make_axes_locatable(ax)
cax = divider.append_axes("right", size="5%", pad=0.05)
ax.axhline(0.02, color="w", ls="-.")
fig.colorbar(h, cax=cax)
ax.set_xlabel(r"$x\ [\textrm{m}]$")
ax.set_ylabel(r"$t\ [\textrm{s}]$")
# ax.set_title(r"$u_D(\bar{s_{\textrm{tst}}})$")
ax.set_title(r"$u_D(s=" + f"{X_i[0, 1]:.1f}" + r"\textrm{ m}\in \Omega)$")
ax = fig.add_subplot(gs[0, actual_row + 1])
if has_sim_data:
vtkfilename = os.path.join("data", f"cas1_{int(X_i[0, 1])}m", f"0_FV-Paraview_{time}.vtk")
if os.path.exists(vtkfilename):
vtk = meshio.read(vtkfilename)
x_sim = vtk.points[sel, 1]
u_sim = vtk.point_data["velocity"][sel, 1]
ax.plot(x_sim, u_sim, "k:", label=r"$u_\textrm{sim}(s_{" + lbl + r"})$")
ax.plot(x, U_pred_i[:, time, 0], "b-", label=r"$\hat{u}_D(s_{" + lbl + r"})$")
ax.plot(x, U_samples[:, time, col], "r--", label=r"$u_D(s_{" + lbl + r"})$")
lower = U_pred_i[:, time, 0] - 2*U_pred_i_sig[:, time, 0]
upper = U_pred_i[:, time, 0] + 2*U_pred_i_sig[:, time, 0]
ax.fill_between(x, lower, upper, alpha=0.2, label=r"$2\sigma_D(s_{" + lbl + r"})$")
if row == 0 and j == 1:
ax.text(0, 10, "POD-EnsNN")
ax.set_xlabel(r"$x\ [\textrm{m}]$")
ax.set_ylabel(r"$u\ [\textrm{m/s}]$")
ax.set_ylim(ylim[j][row])
if row == 0:
ax.set_title(r"$\Delta\eta=" + f"{X_i[0, 1]:.1f}" + r"\textrm{ m}\in \Omega"
+ f",\ t={X_i[time, 0]:.2f}" + r"\ \textrm{s}$")
else:
ax.set_title(r"$\Delta\eta=" + f"{X_i[0, 1]:.1f}" + r"\textrm{ m}\in \Omega{\footnotesize\textrm{out}}"
+ f",\ t={X_i[time, 0]:.2f}" + r"\ \textrm{s}$")
actual_row += 1
if j == 0:
ax.legend()
plt.tight_layout()
savefig(os.path.join("results", f"podensnn-1dswt-graph-meansamples-u-{j}"), True)
| 47.662963 | 119 | 0.52102 |
fc9ed4d15ec37eaa816a6defefa8090b8393dc6c | 5,930 | py | Python | lib/platform.py | haribommi/vaapi-fits | cbf2a463bd3b2c9af5c45a1376b0bde2b703ed23 | [
"BSD-3-Clause"
] | null | null | null | lib/platform.py | haribommi/vaapi-fits | cbf2a463bd3b2c9af5c45a1376b0bde2b703ed23 | [
"BSD-3-Clause"
] | null | null | null | lib/platform.py | haribommi/vaapi-fits | cbf2a463bd3b2c9af5c45a1376b0bde2b703ed23 | [
"BSD-3-Clause"
] | null | null | null | ###
### Copyright (C) 2018-2019 Intel Corporation
###
### SPDX-License-Identifier: BSD-3-Clause
###
from __future__ import absolute_import
import os
JPEG_DECODE_PLATFORMS = []
JPEG_ENCODE_PLATFORMS = []
MPEG2_DECODE_PLATFORMS = []
MPEG2_ENCODE_PLATFORMS = []
VC1_DECODE_PLATFORMS = []
AVC_DECODE_PLATFORMS = []
AVC_ENCODE_PLATFORMS = []
AVC_ENCODE_LP_PLATFORMS = []
HEVC_DECODE_8BIT_PLATFORMS = []
HEVC_ENCODE_8BIT_PLATFORMS = []
HEVC_ENCODE_8BIT_LP_PLATFORMS = []
HEVC_DECODE_10BIT_PLATFORMS = []
HEVC_ENCODE_10BIT_PLATFORMS = []
HEVC_ENCODE_10BIT_LP_PLATFORMS= []
VP8_DECODE_PLATFORMS = []
VP8_ENCODE_PLATFORMS = []
VP9_DECODE_8BIT_PLATFORMS = []
VP9_ENCODE_8BIT_PLATFORMS = []
VP9_DECODE_10BIT_PLATFORMS = []
VP9_ENCODE_10BIT_PLATFORMS = []
VPP_PLATFORMS = []
driver = os.environ.get("LIBVA_DRIVER_NAME", None) or "i965"
if "i965" == driver:
JPEG_DECODE_PLATFORMS = [ "BYT", "HSW", "BDW", "BSW", "SKL", "APL", "KBL", "GLK", "CFL", "WHL"]
JPEG_ENCODE_PLATFORMS = [ "SKL", "APL", "KBL", "GLK", "CFL", "WHL"]
MPEG2_DECODE_PLATFORMS = [ "BYT", "HSW", "BDW", "BSW", "SKL", "APL", "KBL", "GLK", "CFL", "WHL"]
MPEG2_ENCODE_PLATFORMS = ["IVB", "BYT", "HSW", "BDW", "BSW", "SKL", "KBL", "CFL", "WHL"]
VC1_DECODE_PLATFORMS = [ "BYT", "HSW", "BDW", "BSW", "SKL", "APL", "KBL", "GLK", "CFL", "WHL"]
AVC_DECODE_PLATFORMS = [ "BYT", "HSW", "BDW", "BSW", "SKL", "APL", "KBL", "GLK", "CFL", "WHL"]
AVC_ENCODE_PLATFORMS = ["IVB", "BYT", "HSW", "BDW", "BSW", "SKL", "APL", "KBL", "GLK", "CFL", "WHL"]
AVC_ENCODE_LP_PLATFORMS = [ "SKL", "APL", "KBL", "CFL", "WHL"]
HEVC_DECODE_8BIT_PLATFORMS = [ "BSW", "SKL", "APL", "KBL", "GLK", "CFL", "WHL"]
HEVC_ENCODE_8BIT_PLATFORMS = [ "SKL", "APL", "KBL", "GLK", "CFL", "WHL"]
HEVC_DECODE_10BIT_PLATFORMS = [ "APL", "KBL", "GLK", "CFL", "WHL"]
HEVC_ENCODE_10BIT_PLATFORMS = [ "KBL", "GLK", "CFL", "WHL"]
VP8_DECODE_PLATFORMS = [ "BDW", "BSW", "SKL", "APL", "KBL", "GLK", "CFL", "WHL"]
VP8_ENCODE_PLATFORMS = [ "SKL", "APL", "KBL", "GLK", "CFL", "WHL"]
VP9_DECODE_8BIT_PLATFORMS = [ "APL", "KBL", "GLK", "CFL", "WHL"]
VP9_ENCODE_8BIT_PLATFORMS = [ "KBL", "GLK", "CFL", "WHL"]
VP9_DECODE_10BIT_PLATFORMS = [ "KBL", "GLK", "CFL", "WHL"]
VPP_PLATFORMS = ["IVB", "BYT", "HSW", "BDW", "BSW", "SKL", "APL", "KBL", "GLK", "CFL", "WHL"]
elif "iHD" == driver:
JPEG_DECODE_PLATFORMS = [ "BDW", "SKL", "APL", "KBL", "CFL", "WHL", "ICL"]
JPEG_ENCODE_PLATFORMS = [ "SKL", "APL", "KBL", "CFL", "WHL", "ICL"]
MPEG2_DECODE_PLATFORMS = [ "BDW", "SKL", "APL", "KBL", "CFL", "WHL", "ICL"]
MPEG2_ENCODE_PLATFORMS = [ "BDW", "SKL", "KBL", "CFL", "WHL", "ICL"]
VC1_DECODE_PLATFORMS = [ "BDW", "SKL", "APL", "KBL", "CFL", "WHL", "ICL"]
AVC_DECODE_PLATFORMS = [ "BDW", "SKL", "APL", "KBL", "CFL", "WHL", "ICL"]
AVC_ENCODE_PLATFORMS = [ "BDW", "SKL", "APL", "KBL", "CFL", "WHL", "ICL"]
AVC_ENCODE_LP_PLATFORMS = [ "SKL", "APL", "KBL", "CFL", "WHL", "ICL"]
HEVC_DECODE_8BIT_PLATFORMS = [ "SKL", "APL", "KBL", "CFL", "WHL", "ICL"]
HEVC_ENCODE_8BIT_PLATFORMS = [ "SKL", "APL", "KBL", "CFL", "WHL", "ICL"]
HEVC_ENCODE_8BIT_LP_PLATFORMS = [ "ICL"]
HEVC_DECODE_10BIT_PLATFORMS = [ "APL", "KBL", "CFL", "WHL", "ICL"]
HEVC_ENCODE_10BIT_PLATFORMS = [ "KBL", "CFL", "WHL", "ICL"]
HEVC_ENCODE_10BIT_LP_PLATFORMS= [ "ICL"]
VP8_DECODE_PLATFORMS = [ "BDW", "SKL", "APL", "KBL", "CFL", "WHL", "ICL"]
VP8_ENCODE_PLATFORMS = [ "ICL"]
VP9_DECODE_8BIT_PLATFORMS = [ "APL", "KBL", "CFL", "WHL", "ICL"]
VP9_ENCODE_8BIT_PLATFORMS = [ "ICL"]
VP9_DECODE_10BIT_PLATFORMS = [ "KBL", "CFL", "WHL", "ICL"]
VP9_ENCODE_10BIT_PLATFORMS = [ "ICL"]
VPP_PLATFORMS = [ "BDW", "SKL", "APL", "KBL", "CFL", "WHL", "ICL"]
def platform_tags(platforms):
def wrap(f):
import slash
for platform in platforms:
f = slash.tag(platform)(f)
return f
return wrap
def info():
import platform
try:
import cpuinfo
cpu = cpuinfo.get_cpu_info()["brand"]
except:
cpu = "unknown"
return dict(
node = str(platform.node()),
kernel = str(platform.release()),
dist = str(platform.dist()),
cpu = cpu,
)
| 59.89899 | 118 | 0.424621 |
08052b639b76fe0dfc3a7811d3db52cba9256c07 | 632 | py | Python | manage.py | Ashik19aug/Open-Library | f0faf14561c73fc6b7afdf28a126dd767f5b9a46 | [
"MIT"
] | null | null | null | manage.py | Ashik19aug/Open-Library | f0faf14561c73fc6b7afdf28a126dd767f5b9a46 | [
"MIT"
] | 4 | 2021-03-19T00:58:30.000Z | 2021-09-22T18:44:56.000Z | manage.py | Ashik19aug/Open-Library | f0faf14561c73fc6b7afdf28a126dd767f5b9a46 | [
"MIT"
] | null | null | null | #!/usr/bin/env python
"""Django's command-line utility for administrative tasks."""
import os
import sys
def main():
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'open_library.settings')
try:
from django.core.management import execute_from_command_line
except ImportError as exc:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
) from exc
execute_from_command_line(sys.argv)
if __name__ == '__main__':
main()
| 28.727273 | 76 | 0.685127 |
4dd00ec06bb8c60029dd811b5f3555e9346205b1 | 2,126 | py | Python | posts/renderers.py | fr33mang/vas3k.club | c37dae5a8ec7ac1a5fb328e023b1474cc956f13c | [
"MIT"
] | null | null | null | posts/renderers.py | fr33mang/vas3k.club | c37dae5a8ec7ac1a5fb328e023b1474cc956f13c | [
"MIT"
] | null | null | null | posts/renderers.py | fr33mang/vas3k.club | c37dae5a8ec7ac1a5fb328e023b1474cc956f13c | [
"MIT"
] | null | null | null | from django.http import HttpResponse
from django.shortcuts import render
from django.template import TemplateDoesNotExist
from comments.forms import CommentForm, ReplyForm, BattleCommentForm
from comments.models import Comment
from posts.models import PostVote, Post, PostSubscription
POSSIBLE_COMMENT_ORDERS = {"created_at", "-created_at", "-upvotes"}
def render_post(request, post, context=None):
# render "raw" newsletters
if post.type == Post.TYPE_WEEKLY_DIGEST:
return HttpResponse(post.html)
# select votes and comments
if request.me:
comments = Comment.objects_for_user(request.me).filter(post=post).all()
is_voted = PostVote.objects.filter(post=post, user=request.me).exists()
upvoted_at = int(PostVote.objects.filter(post=post, user=request.me).first().created_at.timestamp() * 1000) if is_voted else None
subscription = PostSubscription.get(request.me, post)
else:
comments = Comment.visible_objects().filter(post=post).all()
is_voted = False
upvoted_at = None
subscription = None
# order comments
comment_order = request.GET.get("comment_order") or "-upvotes"
if comment_order in POSSIBLE_COMMENT_ORDERS:
comments = comments.order_by(comment_order, "created_at") # additionally sort by time to preserve an order
# hide deleted comments for battle (visual junk)
if post.type == Post.TYPE_BATTLE:
comments = comments.filter(is_deleted=False)
context = {
**(context or {}),
"post": post,
"comments": comments,
"comment_form": CommentForm(),
"comment_order": comment_order,
"reply_form": ReplyForm(),
"is_voted": is_voted,
"upvoted_at": upvoted_at,
"subscription": subscription,
}
# TODO: make a proper mapping here in future
if post.type == Post.TYPE_BATTLE:
context["comment_form"] = BattleCommentForm()
try:
return render(request, f"posts/show/{post.type}.html", context)
except TemplateDoesNotExist:
return render(request, "posts/show/post.html", context)
| 36.655172 | 137 | 0.689558 |
63d1786539a1a23c38088be23d0fc6e8c818489d | 666 | py | Python | bot/exts/dev.py | rmenai/project-e.m | 7428b1c2bde5c33918553b4a779435e0c9beebbc | [
"MIT"
] | null | null | null | bot/exts/dev.py | rmenai/project-e.m | 7428b1c2bde5c33918553b4a779435e0c9beebbc | [
"MIT"
] | 7 | 2022-02-28T19:11:25.000Z | 2022-03-25T13:37:59.000Z | bot/exts/dev.py | rmenai/project-e.m | 7428b1c2bde5c33918553b4a779435e0c9beebbc | [
"MIT"
] | null | null | null | import logging
from discord.commands import ApplicationContext, permissions, slash_command
from discord.ext import commands
from bot import settings
from bot.bot import Bot
log = logging.getLogger(__name__)
class Dev(commands.Cog):
"""Test different commands and pycord features."""
def __init__(self, bot: Bot):
self.bot = bot
@slash_command(guild_ids=settings.dev_guild_ids)
@permissions.has_role(settings.roles.admin)
async def test(self, ctx: ApplicationContext) -> None:
"""Usage for testing purposes."""
await ctx.defer()
def setup(bot: Bot) -> None:
"""Load the `Dev` cog."""
bot.add_cog(Dev(bot))
| 23.785714 | 75 | 0.701201 |
67c9619672e3e9df37d657207859b3f02d978f59 | 692 | py | Python | Assignments/Assignment 8/2d_arrays.py | kaceyvolmar/CMPT-120L-910-20F | 40bc0901614f4a4e0b0e420ffade5164376a7b05 | [
"MIT"
] | null | null | null | Assignments/Assignment 8/2d_arrays.py | kaceyvolmar/CMPT-120L-910-20F | 40bc0901614f4a4e0b0e420ffade5164376a7b05 | [
"MIT"
] | 1 | 2020-09-18T01:20:58.000Z | 2020-09-18T01:20:58.000Z | Assignments/Assignment 8/2d_arrays.py | kaceyvolmar/CMPT-120L-910-20F | 40bc0901614f4a4e0b0e420ffade5164376a7b05 | [
"MIT"
] | null | null | null | def sum_2d_array(two_d_array):
"""
- Add code in the defined function to sum up the internal arrays. Returning an array of the sums.
- Your input will be a 2d array
- Output should be a 1d array
- If a sub array is empty the sum is 0
"""
arr = []
for array in two_d_array:
arr.append(sum(array))
return arr
if __name__ == "__main__":
two_d_array = [
[2, 6, 7, 98, 3, 434, 2, 4, 2],
[-12, 3, 454, 6778, 234, -999, 2543, -2323],
[1, 2, 3, 4, 5, 6, 7, 8, 9, 10],
[],
[1000000000000000],
[1],
[0]
]
answers = sum_2d_array(two_d_array)
print(answers) | 23.066667 | 101 | 0.520231 |
52faf466bc0de28cc04e07e79dc61586496ef4fe | 1,540 | py | Python | third-party/gtd/gtd/ml/torch/recurrent.py | timpowellgit/phrasenode | a4dc105a69785f289a4e7998d078d6727686b94d | [
"Apache-2.0"
] | 81 | 2018-02-21T15:53:38.000Z | 2022-03-20T21:25:49.000Z | third-party/gtd/gtd/ml/torch/recurrent.py | timpowellgit/phrasenode | a4dc105a69785f289a4e7998d078d6727686b94d | [
"Apache-2.0"
] | 14 | 2018-03-09T19:04:43.000Z | 2020-12-06T13:54:40.000Z | third-party/gtd/gtd/ml/torch/recurrent.py | timpowellgit/phrasenode | a4dc105a69785f289a4e7998d078d6727686b94d | [
"Apache-2.0"
] | 38 | 2018-03-09T19:42:32.000Z | 2022-03-15T15:39:51.000Z | import torch
from gtd.ml.torch.utils import GPUVariable
from torch.nn import Module
from gtd.ml.torch.utils import conditional
def tile_state(h, batch_size):
"""Tile a given hidden state batch_size times.
Args:
h (Variable): a single hidden state of shape (hidden_dim,)
batch_size (int)
Returns:
a Variable of shape (batch_size, hidden_dim)
"""
tiler = GPUVariable(torch.ones(batch_size, 1))
return torch.mm(tiler, h.unsqueeze(0)) # (batch_size, hidden_size)
def gated_update(h, h_new, update):
"""If update == 1.0, return h_new; if update == 0.0, return h.
Applies this logic to each element in a batch.
Args:
h (Variable): of shape (batch_size, hidden_dim)
h_new (Variable): of shape (batch_size, hidden_dim)
update (Variable): of shape (batch_size, 1).
Returns:
Variable: of shape (batch_size, hidden_dim)
"""
batch_size, hidden_dim = h.size()
gate = update.expand(batch_size, hidden_dim)
return conditional(gate, h_new, h)
class AdditionCell(Module):
"""Just add the input vector to the hidden state vector."""
def __init__(self, input_dim, hidden_dim):
super(AdditionCell, self).__init__()
self.W = GPUVariable(torch.eye(input_dim, hidden_dim))
# truncates input if input_dim > hidden_dim
# pads with zeros if input_dim < hidden_dim
self.hidden_size = hidden_dim
def forward(self, x, hc):
h, c = hc
h = x.mm(self.W) + h
return h, c | 28.518519 | 71 | 0.652597 |
eefd605e85294332e4476eee352cd24725b97c60 | 50 | py | Python | preprocessing/__init__.py | sayanpr8175/KNN-implementation-for-Cats-dogs-panda-classification | c6fd11d6d9670c5deeffa9b137c10db01b322522 | [
"MIT"
] | null | null | null | preprocessing/__init__.py | sayanpr8175/KNN-implementation-for-Cats-dogs-panda-classification | c6fd11d6d9670c5deeffa9b137c10db01b322522 | [
"MIT"
] | null | null | null | preprocessing/__init__.py | sayanpr8175/KNN-implementation-for-Cats-dogs-panda-classification | c6fd11d6d9670c5deeffa9b137c10db01b322522 | [
"MIT"
] | null | null | null | from .simplepreprocessor import SimplePreprocessor | 50 | 50 | 0.92 |
9e228b890f7e58726cffb2b7b323d8d7470558e4 | 15,627 | py | Python | tests/transformations/state_fusion_test.py | jnice-81/dace | 5211794a2d17b7189037ac485ab0b292fb02aa0d | [
"BSD-3-Clause"
] | 227 | 2019-03-15T23:39:06.000Z | 2022-03-30T07:49:08.000Z | tests/transformations/state_fusion_test.py | jnice-81/dace | 5211794a2d17b7189037ac485ab0b292fb02aa0d | [
"BSD-3-Clause"
] | 834 | 2019-07-31T22:49:31.000Z | 2022-03-28T14:01:32.000Z | tests/transformations/state_fusion_test.py | jnice-81/dace | 5211794a2d17b7189037ac485ab0b292fb02aa0d | [
"BSD-3-Clause"
] | 64 | 2019-03-19T05:40:37.000Z | 2022-03-11T15:02:42.000Z | # Copyright 2019-2021 ETH Zurich and the DaCe authors. All rights reserved.
import dace
from dace.transformation import transformation
from dace.transformation.interstate import StateFusion
import networkx as nx
import numpy as np
# Inter-state condition tests
def test_fuse_assignments():
"""
Two states in which the interstate assignment depends on an interstate
value going into the first state. Should fail.
"""
sdfg = dace.SDFG('state_fusion_test')
state1 = sdfg.add_state()
state2 = sdfg.add_state()
state3 = sdfg.add_state()
sdfg.add_edge(state1, state2, dace.InterstateEdge(assignments=dict(k=1)))
sdfg.add_edge(state2, state3,
dace.InterstateEdge(assignments=dict(k='k + 1')))
sdfg.apply_transformations_repeated(StateFusion, strict=True)
assert sdfg.number_of_nodes() == 3
def test_fuse_assignment_in_use():
"""
Two states with an interstate assignment in between, where the assigned
value is used in the first state. Should fail.
"""
sdfg = dace.SDFG('state_fusion_test')
sdfg.add_array('A', [2], dace.int32)
state1, state2, state3, state4 = tuple(sdfg.add_state() for _ in range(4))
sdfg.add_edge(state1, state2, dace.InterstateEdge(assignments=dict(k=1)))
sdfg.add_edge(state2, state3, dace.InterstateEdge())
sdfg.add_edge(state3, state4, dace.InterstateEdge(assignments=dict(k=2)))
state3.add_edge(state3.add_tasklet('one', {}, {'a'}, 'a = k'), 'a',
state3.add_write('A'), None, dace.Memlet('A[0]'))
state4.add_edge(state3.add_tasklet('two', {}, {'a'}, 'a = k'), 'a',
state3.add_write('A'), None, dace.Memlet('A[1]'))
try:
StateFusion.apply_to(sdfg,
strict=True,
first_state=state3,
second_state=state4)
raise AssertionError('States fused, test failed')
except ValueError:
print('Exception successfully caught')
# Connected components tests
def test_two_to_one_cc_fusion():
""" Two states, first with two connected components, second with one. """
sdfg = dace.SDFG('state_fusion_test')
sdfg.add_array('A', [1], dace.int32)
sdfg.add_array('B', [1], dace.int32)
sdfg.add_array('C', [1], dace.int32)
state1, state2 = tuple(sdfg.add_state() for _ in range(2))
sdfg.add_edge(state1, state2, dace.InterstateEdge())
# First state
state1.add_edge(state1.add_tasklet('one', {}, {'a'}, 'a = 1'), 'a',
state1.add_write('A'), None, dace.Memlet('A'))
t2 = state1.add_tasklet('two', {}, {'b', 'c'}, 'b = 2; c = 3')
state1.add_edge(t2, 'b', state1.add_write('B'), None, dace.Memlet('B'))
state1.add_edge(t2, 'c', state1.add_write('C'), None, dace.Memlet('C'))
# Second state
t2 = state2.add_tasklet('three', {'a', 'b', 'c'}, {'out'}, 'out = a+b+c')
state2.add_edge(state2.add_read('A'), None, t2, 'a', dace.Memlet('A'))
state2.add_edge(state2.add_read('B'), None, t2, 'b', dace.Memlet('B'))
state2.add_edge(state2.add_read('C'), None, t2, 'c', dace.Memlet('C'))
state2.add_edge(t2, 'out', state2.add_write('C'), None, dace.Memlet('C'))
assert sdfg.apply_transformations_repeated(StateFusion, strict=True) == 1
def test_one_to_two_cc_fusion():
""" Two states, first with one connected component, second with two. """
sdfg = dace.SDFG('state_fusion_test')
sdfg.add_array('A', [1], dace.int32)
sdfg.add_array('B', [1], dace.int32)
state1, state2 = tuple(sdfg.add_state() for _ in range(2))
sdfg.add_edge(state1, state2, dace.InterstateEdge())
# First state
t1 = state1.add_tasklet('one', {}, {'a', 'b'}, 'a = 1; b = 2')
state1.add_edge(t1, 'a', state1.add_write('A'), None, dace.Memlet('A'))
state1.add_edge(t1, 'b', state1.add_write('B'), None, dace.Memlet('B'))
# Second state
state2.add_edge(state2.add_read('A'), None,
state2.add_tasklet('one', {'a'}, {}, ''), 'a',
dace.Memlet('A'))
state2.add_edge(state2.add_read('B'), None,
state2.add_tasklet('two', {'b'}, {}, ''), 'b',
dace.Memlet('B'))
assert sdfg.apply_transformations_repeated(StateFusion, strict=True) == 1
def test_two_cc_fusion_separate():
""" Two states, both with two connected components, fused separately. """
sdfg = dace.SDFG('state_fusion_test')
sdfg.add_array('A', [1], dace.int32)
sdfg.add_array('B', [1], dace.int32)
sdfg.add_array('C', [1], dace.int32)
state1, state2 = tuple(sdfg.add_state() for _ in range(2))
sdfg.add_edge(state1, state2, dace.InterstateEdge())
# First state
state1.add_edge(state1.add_tasklet('one', {}, {'a'}, 'a = 1'), 'a',
state1.add_write('A'), None, dace.Memlet('A'))
t2 = state1.add_tasklet('two', {}, {'b', 'c'}, 'b = 2; c = 3')
state1.add_edge(t2, 'b', state1.add_write('B'), None, dace.Memlet('B'))
state1.add_edge(t2, 'c', state1.add_write('C'), None, dace.Memlet('C'))
# Second state
state2.add_edge(state2.add_read('A'), None,
state2.add_tasklet('one', {'a'}, {}, ''), 'a',
dace.Memlet('A'))
t2 = state2.add_tasklet('two', {'b', 'c'}, {}, '')
state2.add_edge(state2.add_read('B'), None, t2, 'b', dace.Memlet('B'))
state2.add_edge(state2.add_read('C'), None, t2, 'c', dace.Memlet('C'))
assert sdfg.apply_transformations_repeated(StateFusion, strict=True) == 1
def test_two_cc_fusion_together():
""" Two states, both with two connected components, fused to one CC. """
sdfg = dace.SDFG('state_fusion_test')
sdfg.add_array('A', [1], dace.int32)
sdfg.add_array('B', [1], dace.int32)
sdfg.add_array('C', [1], dace.int32)
state1, state2 = tuple(sdfg.add_state() for _ in range(2))
sdfg.add_edge(state1, state2, dace.InterstateEdge())
# First state
state1.add_edge(state1.add_tasklet('one', {}, {'a'}, 'a = 1'), 'a',
state1.add_write('A'), None, dace.Memlet('A'))
t2 = state1.add_tasklet('two', {}, {'b', 'c'}, 'b = 2; c = 3')
state1.add_edge(t2, 'b', state1.add_write('B'), None, dace.Memlet('B'))
state1.add_edge(t2, 'c', state1.add_write('C'), None, dace.Memlet('C'))
# Second state
state2.add_edge(state2.add_read('B'), None,
state2.add_tasklet('one', {'a'}, {}, ''), 'a',
dace.Memlet('B'))
t2 = state2.add_tasklet('two', {'b', 'c'}, {'d', 'e'}, 'd = b + c; e = b')
state2.add_edge(state2.add_read('A'), None, t2, 'b', dace.Memlet('A'))
state2.add_edge(state2.add_read('C'), None, t2, 'c', dace.Memlet('C'))
state2.add_edge(t2, 'd', state2.add_write('A'), None, dace.Memlet('A'))
state2.add_edge(t2, 'e', state2.add_write('C'), None, dace.Memlet('C'))
assert sdfg.apply_transformations_repeated(StateFusion, strict=True) == 1
# Data race avoidance tests
def test_write_write_path():
"""
Two states where both write to the same range of an array, but there is
a path between the write and the second write.
"""
@dace.program
def state_fusion_test(A: dace.int32[20, 20]):
A += 1
tmp = A + 2
A[:] = tmp + 3
sdfg = state_fusion_test.to_sdfg(strict=False)
sdfg.apply_transformations_repeated(StateFusion, strict=True)
assert len(sdfg.nodes()) == 1
def test_write_write_no_overlap():
"""
Two states where both write to different ranges of an array.
"""
N = dace.symbol('N', positive=True)
@dace.program
def state_fusion_test(A: dace.int32[N, N]):
A[0:N - 1, :] = 1
A[N - 1, :] = 2
sdfg = state_fusion_test.to_sdfg(strict=False)
sdfg.apply_transformations_repeated(StateFusion, strict=True)
assert len(sdfg.nodes()) == 1
def test_read_write_no_overlap():
"""
Two states where two separate CCs write and read to/from an array, but
in different ranges.
"""
N = dace.symbol('N')
@dace.program
def state_fusion_test(A: dace.int32[N, N], B: dace.int32[N, N]):
A[:, 5:N] = 1
B[:, 3:6] = A[:, 0:3]
sdfg = state_fusion_test.to_sdfg(strict=False)
sdfg.apply_transformations_repeated(StateFusion, strict=True)
assert len(sdfg.nodes()) == 1
def test_array_in_middle_no_overlap():
"""
Two states that write and read from an array without overlap. Should be
fused to two separate components.
"""
sdfg = dace.SDFG('state_fusion_test')
sdfg.add_array('A', [10, 10], dace.int32)
sdfg.add_array('B', [5, 5], dace.int32)
sdfg.add_array('C', [5, 5], dace.int32)
state = sdfg.add_state()
t1 = state.add_tasklet('init_a1', {}, {'a'}, '')
rw1 = state.add_access('A')
t2 = state.add_tasklet('a2b', {'a'}, {'b'}, '')
wb = state.add_write('B')
state.add_edge(t1, 'a', rw1, None, dace.Memlet('A[0:5, 0:5]'))
state.add_edge(rw1, None, t2, 'a', dace.Memlet('A[0:5, 0:5]'))
state.add_edge(t2, 'b', wb, None, dace.Memlet('B'))
state2 = sdfg.add_state_after(state)
t1 = state2.add_tasklet('init_a2', {}, {'a'}, '')
rw2 = state2.add_access('A')
t2 = state2.add_tasklet('a2c', {'a'}, {'c'}, '')
wc = state2.add_write('C')
state2.add_edge(t1, 'a', rw2, None, dace.Memlet('A[5:10, 5:10]'))
state2.add_edge(rw2, None, t2, 'a', dace.Memlet('A[5:10, 5:10]'))
state2.add_edge(t2, 'c', wc, None, dace.Memlet('C'))
assert sdfg.apply_transformations_repeated(StateFusion, strict=True) == 1
assert len(list(nx.weakly_connected_components(sdfg.node(0).nx))) == 2
def test_array_in_middle_overlap():
"""
Two states that write and read from an array with overlap. Should not be
fused.
"""
sdfg = dace.SDFG('state_fusion_test')
sdfg.add_array('A', [10, 10], dace.int32)
sdfg.add_array('B', [5, 5], dace.int32)
sdfg.add_array('C', [5, 5], dace.int32)
state = sdfg.add_state()
t1 = state.add_tasklet('init_a1', {}, {'a'}, '')
rw1 = state.add_access('A')
t2 = state.add_tasklet('a2b', {'a'}, {'b'}, '')
wb = state.add_write('B')
state.add_edge(t1, 'a', rw1, None, dace.Memlet('A[0:5, 0:5]'))
state.add_edge(rw1, None, t2, 'a', dace.Memlet('A[0:5, 0:5]'))
state.add_edge(t2, 'b', wb, None, dace.Memlet('B'))
state2 = sdfg.add_state_after(state)
t1 = state2.add_tasklet('init_a2', {}, {'a'}, '')
rw2 = state2.add_access('A')
t2 = state2.add_tasklet('a2c', {'a'}, {'c'}, '')
wc = state2.add_write('C')
state2.add_edge(t1, 'a', rw2, None, dace.Memlet('A[0:5, 0:5]'))
state2.add_edge(rw2, None, t2, 'a', dace.Memlet('A[0:5, 0:5]'))
state2.add_edge(t2, 'c', wc, None, dace.Memlet('C'))
assert sdfg.apply_transformations_repeated(StateFusion, strict=True) == 0
def test_two_outputs_same_name():
"""
First state writes to the same array twice, second state updates one value.
Should be fused to the right node in the second state or a data race will
occur.
"""
sdfg = dace.SDFG('state_fusion_test')
sdfg.add_array('A', [2], dace.int32)
sdfg.add_scalar('scal', dace.int32)
state = sdfg.add_state()
r = state.add_read('scal')
t1 = state.add_tasklet('init_a1', {'s'}, {'a'}, 'a = 1 + s')
w1 = state.add_write('A')
t2 = state.add_tasklet('init_a2', {'s'}, {'a'}, 'a = 2 + s')
w2 = state.add_write('A')
state.add_edge(r, None, t1, 's', dace.Memlet('scal'))
state.add_edge(t1, 'a', w1, None, dace.Memlet('A[0]'))
state.add_edge(r, None, t2, 's', dace.Memlet('scal'))
state.add_edge(t2, 'a', w2, None, dace.Memlet('A[1]'))
state2 = sdfg.add_state_after(state)
r1 = state2.add_read('A')
t1 = state2.add_tasklet('update_a2', {'a'}, {'b'}, 'b = a + 2')
w1 = state2.add_write('A')
state2.add_edge(r1, None, t1, 'a', dace.Memlet('A[1]'))
state2.add_edge(t1, 'b', w1, None, dace.Memlet('A[1]'))
assert sdfg.apply_transformations_repeated(StateFusion, strict=True) == 1
A = np.zeros([2], dtype=np.int32)
sdfg(A=A, scal=np.int32(0))
assert A[0] == 1 and A[1] == 4
def test_inout_read_after_write():
"""
First state ends with a computation that reads an array, while the second
state both reads and writes to that same array. Fusion will then cause
a RAW conflict.
"""
sdfg = dace.SDFG('state_fusion_test')
sdfg.add_array('A', [1], dace.int32)
sdfg.add_array('B', [1], dace.int32)
sdfg.add_array('C', [1], dace.int32)
state = sdfg.add_state()
r = state.add_read('A')
t1 = state.add_tasklet('init_b', {'a'}, {'b'}, 'b = a + 1')
rw = state.add_access('B')
t2 = state.add_tasklet('init_c', {'b'}, {'c'}, 'c = 2 + b')
w = state.add_access('C')
state.add_edge(r, None, t1, 'a', dace.Memlet('A[0]'))
state.add_edge(t1, 'b', rw, None, dace.Memlet('B[0]'))
state.add_edge(rw, None, t2, 'b', dace.Memlet('B[0]'))
state.add_edge(t2, 'c', w, None, dace.Memlet('C[0]'))
state2 = sdfg.add_state_after(state)
r1 = state2.add_read('B')
t1 = state2.add_tasklet('update_b', {'bin'}, {'bout'}, 'bout = bin + bin')
w1 = state2.add_write('B')
state2.add_edge(r1, None, t1, 'bin', dace.Memlet('B[0]'))
state2.add_edge(t1, 'bout', w1, None, dace.Memlet('B[0]'))
assert sdfg.apply_transformations_repeated(StateFusion, strict=True) == 0
A = np.zeros([1], dtype=np.int32)
B = np.zeros([1], dtype=np.int32)
C = np.zeros([1], dtype=np.int32)
sdfg(A=A, B=B, C=C)
assert C[0] == 3
assert B[0] == 2
def test_inout_second_state():
"""
Second state has a computation that reads and writes to the same array,
while the first state also reads from that same array. Fusion will then
cause a potential data race.
"""
sdfg = dace.SDFG('state_fusion_test')
sdfg.add_array('A', [1], dace.int32)
sdfg.add_array('B', [1], dace.int32)
state = sdfg.add_state()
r = state.add_read('A')
t1 = state.add_tasklet('init_b', {'a'}, {'b'}, 'b = a + 1')
w = state.add_write('B')
state.add_edge(r, None, t1, 'a', dace.Memlet('A[0]'))
state.add_edge(t1, 'b', w, None, dace.Memlet('B[0]'))
state2 = sdfg.add_state_after(state)
r1 = state2.add_read('A')
t1 = state2.add_tasklet('update_a', {'a'}, {'aout'}, 'aout = a + 5')
w1 = state2.add_write('A')
state2.add_edge(r1, None, t1, 'a', dace.Memlet('A[0]'))
state2.add_edge(t1, 'aout', w1, None, dace.Memlet('A[0]'))
assert sdfg.apply_transformations_repeated(StateFusion, strict=True) == 0
A = np.zeros([1], dtype=np.int32)
B = np.zeros([1], dtype=np.int32)
sdfg(A=A, B=B)
assert A[0] == 5
assert B[0] == 1
def test_inout_second_state_2():
@dace.program
def func(A: dace.float64[128, 128], B: dace.float64[128, 128]):
B << A
for i, j in dace.map[0:128, 0:128]:
with dace.tasklet:
ai << A[i, j]
ao >> A[i, j]
ao = 2 * ai
sdfg = func.to_sdfg(strict=False)
sdfg.apply_strict_transformations()
assert sdfg.number_of_nodes() == 2
if __name__ == '__main__':
test_fuse_assignments()
test_fuse_assignment_in_use()
test_two_to_one_cc_fusion()
test_one_to_two_cc_fusion()
test_two_cc_fusion_separate()
test_two_cc_fusion_together()
test_write_write_path()
test_write_write_no_overlap()
test_read_write_no_overlap()
test_array_in_middle_no_overlap()
test_array_in_middle_overlap()
test_two_outputs_same_name()
test_inout_read_after_write()
test_inout_second_state()
test_inout_second_state_2()
| 37.929612 | 80 | 0.612338 |
c53bc48eeae019c5442cf1e93a2009cabf77c1c5 | 32,892 | py | Python | Packs/RTIR/Integrations/RTIR/RTIR.py | mchasepan/content | 177c7fe86c4872141107f48075c6578daffc4bd4 | [
"MIT"
] | 1 | 2020-07-22T05:55:11.000Z | 2020-07-22T05:55:11.000Z | Packs/RTIR/Integrations/RTIR/RTIR.py | mchasepan/content | 177c7fe86c4872141107f48075c6578daffc4bd4 | [
"MIT"
] | null | null | null | Packs/RTIR/Integrations/RTIR/RTIR.py | mchasepan/content | 177c7fe86c4872141107f48075c6578daffc4bd4 | [
"MIT"
] | 2 | 2020-07-15T06:41:52.000Z | 2020-07-19T18:45:23.000Z | from CommonServerPython import *
''' IMPORTS '''
import requests
import json
import re
import urllib
''' GLOBAL VARS '''
SERVER = None
BASE_URL = None
USERNAME = None
PASSWORD = None
USE_SSL = None
FETCH_PRIORITY = 0
FETCH_STATUS = None
FETCH_QUEUE = None
CURLY_BRACKETS_REGEX = r'\{(.*?)\}' # Extracts string in curly brackets, e.g. '{string}' -> 'string'
apostrophe = "'"
SESSION = requests.session()
SESSION.verify = USE_SSL
REFERER = None
HEADERS = {'Referer': REFERER} if REFERER else {} # type: dict
''' HELPER FUNCTIONS '''
def ticket_to_incident(ticket):
incident = {
'name': 'RTIR Ticket ' + str(ticket['ID']),
'rawJSON': json.dumps(ticket),
}
attachments, attachments_content = get_ticket_attachments(ticket['ID'])
if attachments:
incident_attachments = []
for i in range(len(attachments)):
incident_attachments.append({
'path': attachments_content[i]['FileID'],
'name': attachments[i]['Name']
})
incident['attachment'] = incident_attachments # type: ignore
return incident
def ticket_string_to_id(ticket_string):
'''
Translates 'ticket/1' to the integer 1
'''
slash_index = ticket_string.index('/')
ticket_id = int(ticket_string[slash_index + 1:])
return ticket_id
def http_request(method, suffix_url, data=None, files=None, query=None):
# Returns the http request
url = BASE_URL + suffix_url
params = {'user': USERNAME, 'pass': PASSWORD}
if query:
params.update(query)
response = SESSION.request(method, url, data=data, params=params, files=files, headers=HEADERS) # type: ignore
# handle request failure
if response.status_code not in {200}:
message = parse_error_response(response)
return_error('Error in API call with status code {}\n{}'.format(response.status_code, message))
return response
def parse_error_response(response):
try:
res = response.json()
msg = res.get('message')
if res.get('details') and res.get('details')[0].get('message'):
msg = msg + "\n" + json.dumps(res.get('details')[0])
except Exception:
return response.text
return msg
def login():
data = {
'user': USERNAME,
'pass': PASSWORD
}
res = SESSION.post(SERVER, data=data) # type: ignore
response_text = str(res.text)
are_credentials_wrong = 'Your username or password is incorrect' in response_text
if are_credentials_wrong:
return_error("Error: login failed. please check your credentials.")
def logout():
suffix_url = 'logout'
http_request('POST', suffix_url)
def parse_ticket_data(raw_query):
raw_tickets = search_ticket_request(raw_query)
headers = ['ID', 'Subject', 'Status', 'Priority', 'Created', 'Queue', 'Creator', 'Owner', 'InitialPriority',
'FinalPriority']
search_context = []
data = raw_tickets.content.split('\n')
data = data[2:]
for line in data:
split_line = line.split(': ')
search_ticket = get_ticket_request(split_line[0]).content
search_ticket = search_ticket.split('\n')
search_ticket = search_ticket[2:]
id_ticket = search_ticket[0].upper()
search_ticket[0] = id_ticket
current_ticket_search = build_ticket(search_ticket)
for key in search_ticket: # Adding ticket custom fields to outputs
if key.startswith('CF.'):
split_key = key.split(':')
if split_key[0]:
custom_field_regex = re.findall(CURLY_BRACKETS_REGEX, key)[0].replace(' ',
'') # Regex and removing white spaces
current_ticket_search[custom_field_regex] = split_key[1]
headers.append(custom_field_regex)
if current_ticket_search:
search_context.append(current_ticket_search)
return search_context
''' FUNCTIONS '''
def create_ticket_request(encoded):
suffix_url = 'ticket/new'
ticket_id = http_request('POST', suffix_url, data=encoded)
return ticket_id
def create_ticket_attachments_request(encoded, files_data):
suffix_url = 'ticket/new'
ticket_id = http_request('POST', suffix_url, files=files_data)
return ticket_id
def create_ticket():
queue = demisto.args().get('queue')
data = 'id: ticket/new\nQueue: {}\n'.format(queue)
subject = demisto.args().get('subject')
if subject:
data += "Subject: {}\n".format(subject)
requestor = demisto.args().get('requestor')
if requestor:
data += "Requestor: {}\n".format(requestor)
cc = demisto.args().get('cc', '')
if cc:
data += "Cc: {}\n".format(cc)
admin_cc = demisto.args().get('admin-cc', '')
if admin_cc:
data += "AdminCc: {}\n".format(admin_cc)
owner = demisto.args().get('owner')
if owner:
data += "Owner: {}\n".format(owner)
status = demisto.args().get('status')
if status:
data += "Status: {}\n".format(status)
priority = demisto.args().get('priority')
if priority:
data += "Priority: {}\n".format(priority)
initial_priority = demisto.args().get('initial-priority')
if initial_priority:
data += "Initial-priority: {}\n".format(initial_priority)
final_priority = demisto.args().get('final-priority')
if final_priority:
data += "FinalPriority: {}\n".format(final_priority)
text = demisto.args().get('text')
if text:
data += "Text: {}\n".format(unicode(text).encode('utf-8'))
customfields = demisto.args().get('customfields')
if customfields:
cf_list = customfields.split(',')
for cf in cf_list:
equal_index = cf.index('=')
key = 'CF-{}: '.format(cf[:equal_index])
value = cf[equal_index + 1:]
data = data + key + value + '\n'
attachments = demisto.args().get('attachment')
if attachments:
files_data = {}
if isinstance(attachments, list): # Given as list
attachments_list = attachments
else: # Given as string
attachments_list = attachments.split(',')
for i, file_pair in enumerate(attachments_list):
file = demisto.getFilePath(file_pair)
file_name = file['name']
files_data['attachment_{:d}'.format(i + 1)] = (file_name, open(file['path'], 'rb'))
data += 'Attachment: {}'.format(file_name)
encoded = "content=" + urllib.quote_plus(data)
if attachments:
files_data.update({'content': (None, data)}) # type: ignore
raw_ticket_res = create_ticket_attachments_request(encoded, files_data)
else:
raw_ticket_res = create_ticket_request(encoded)
ticket_id = re.findall('\d+', raw_ticket_res.content)[-1]
if ticket_id == -1:
return_error('Ticket creation failed')
ticket_context = ({
'ID': ticket_id,
'Subject': subject,
'Creator': requestor,
'InitialPriority': initial_priority,
'Priority': priority,
'FinalPriority': final_priority,
'Owner': owner
})
ec = {
'RTIR.Ticket(val.ID && val.ID === obj.ID)': ticket_context
}
hr = 'Ticket {} was created successfully.'.format(ticket_id)
demisto.results({
'Type': entryTypes['note'],
'Contents': raw_ticket_res.content,
'ContentsFormat': formats['text'],
'ReadableContentsFormat': formats['markdown'],
'HumanReadable': hr,
'EntryContext': ec
})
def get_ticket_request(ticket_id):
suffix_url = 'ticket/{}/show'.format(ticket_id)
raw_ticket = http_request('GET', suffix_url)
return raw_ticket
def fix_query_suffix(query):
new_query = query
if new_query.endswith('+AND+'):
new_query = new_query[:-5]
elif new_query.endswith('+OR+'):
new_query = new_query[:-4]
return new_query
def build_search_query():
raw_query = ''
args = demisto.args()
ticket_id = args.get('ticket-id')
if ticket_id:
raw_query += 'id={}{}{}+AND+'.format(apostrophe, ticket_id, apostrophe)
subject = args.get('subject')
if subject:
raw_query += 'Subject={}{}{}+AND+'.format(apostrophe, subject, apostrophe)
status = args.get('status')
if status:
raw_query += 'Status={}{}{}+AND+'.format(apostrophe, status, apostrophe)
creator = args.get('creator')
if creator:
raw_query += 'Creator={}{}{}+AND+'.format(apostrophe, creator, apostrophe)
priority_equal_to = args.get('priority-equal-to')
if priority_equal_to:
raw_query += 'Priority={}{}{}+AND+'.format(apostrophe, priority_equal_to, apostrophe)
priority_greater_than = args.get('priority-greater-than')
if priority_greater_than:
raw_query += 'Priority>{}{}{}+AND+'.format(apostrophe, priority_greater_than, apostrophe)
created_after = args.get('created-after')
if created_after:
raw_query += 'Created>{}{}{}+AND+'.format(apostrophe, created_after, apostrophe)
created_on = args.get('created-on')
if created_on:
raw_query += 'Created={}{}{}+AND+'.format(apostrophe, created_on, apostrophe)
created_before = args.get('created-before')
if created_before:
raw_query += 'Created<{}{}{}+AND+'.format(apostrophe, created_before, apostrophe)
owner = args.get('owner')
if owner:
raw_query += 'Created={}{}{}+AND+'.format(apostrophe, owner, apostrophe)
due = args.get('due')
if due:
raw_query += 'Due={}{}{}+AND+'.format(apostrophe, due, apostrophe)
queue = args.get('queue')
if queue:
raw_query += 'Queue={}{}{}+AND+'.format(apostrophe, queue, apostrophe)
raw_query = fix_query_suffix(raw_query)
return raw_query
def build_ticket(rtir_search_ticket):
current_ticket_search = {}
for entity in rtir_search_ticket:
if ': ' in entity:
header, content = entity.split(': ', 1)
if 'ID' == header:
content = ticket_string_to_id(content)
if header in {'ID', 'Subject', 'Status', 'Priority', 'Created', 'Queue', 'Creator', 'Owner',
'InitialPriority', 'FinalPriority'}:
current_ticket_search[header] = content
return current_ticket_search
def search_ticket():
raw_query = build_search_query()
raw_tickets = search_ticket_request(raw_query)
headers = ['ID', 'Subject', 'Status', 'Priority', 'Created', 'Queue', 'Creator', 'Owner', 'InitialPriority',
'FinalPriority']
search_context = []
data = raw_tickets.content.split('\n')
data = data[2:]
results_limit = int(demisto.args().get('results_limit', 0))
data = data if (results_limit == 0) else data[:results_limit]
for line in data:
split_line = line.split(': ')
empty_line_response = ['NO OBJECTS SPECIFIED.', '']
is_line_non_empty = split_line[0] != ''
if is_line_non_empty:
search_ticket = get_ticket_request(split_line[0]).content
search_ticket = search_ticket.split('\n')
search_ticket = search_ticket[2:]
id_ticket = search_ticket[0].upper()
search_ticket[0] = id_ticket
else:
search_ticket = empty_line_response
current_ticket_search = build_ticket(search_ticket)
for key in search_ticket: # Adding ticket custom fields to outputs
if key.startswith('CF.'):
split_key = key.split(':')
if split_key[0]:
custom_field_regex = re.findall(CURLY_BRACKETS_REGEX, key)[0].replace(' ',
'') # Regex and removing white spaces
current_ticket_search[custom_field_regex] = split_key[1]
headers.append(custom_field_regex)
if current_ticket_search:
search_context.append(current_ticket_search)
if search_context:
ec = {
'RTIR.Ticket(val.ID && val.ID === obj.ID)': search_context
}
title = 'RTIR ticket search results'
demisto.results({
'Type': entryTypes['note'],
'Contents': search_context,
'ContentsFormat': formats['json'],
'ReadableContentsFormat': formats['markdown'],
'HumanReadable': tableToMarkdown(title, search_context, headers, removeNull=True),
'EntryContext': ec
})
else:
demisto.results('No results found.')
def search_ticket_request(raw_query):
suffix_url = 'search/ticket'
raw_tickets = http_request('GET', suffix_url, query={'query': raw_query})
return raw_tickets
def close_ticket_request(ticket_id, encoded):
suffix_url = 'ticket/{}/edit'.format(ticket_id)
closed_ticket = http_request('POST', suffix_url, data=encoded)
return closed_ticket
def close_ticket():
ticket_id = demisto.args().get('ticket-id')
content = '\nStatus: resolved'
encoded = "content=" + urllib.quote_plus(content)
closed_ticket = close_ticket_request(ticket_id, encoded)
if '200 Ok' in closed_ticket.content:
ec = {
'RTIR.Ticket(val.ID && val.ID === obj.ID)': {
'ID': int(ticket_id),
'State': 'resolved'
}
}
hr = 'Ticket {} was resolved successfully.'.format(ticket_id)
demisto.results({
'Type': entryTypes['note'],
'Contents': hr,
'ContentsFormat': formats['json'],
'ReadableContentsFormat': formats['markdown'],
'HumanReadable': hr,
'EntryContext': ec
})
else:
return_error('Failed to resolve ticket')
def edit_ticket_request(ticket_id, encoded):
suffix_url = 'ticket/{}/edit'.format(ticket_id)
edited_ticket = http_request('POST', suffix_url, data=encoded)
return edited_ticket
def edit_ticket():
arguments_given = False
ticket_id = demisto.args().get('ticket-id')
content = 'ID: ' + ticket_id
kwargs = {}
subject = demisto.args().get('subject')
if subject:
content += '\nSubject: ' + subject
arguments_given = True
kwargs['Subject'] = subject
owner = demisto.args().get('owner')
if owner:
content += '\nOwner: ' + owner
arguments_given = True
kwargs['Owner'] = owner
status = demisto.args().get('status')
if status:
content += '\nStatus: ' + status
arguments_given = True
kwargs['Status'] = status
priority = demisto.args().get('priority')
if priority:
content += '\nPriority: ' + priority
arguments_given = True
kwargs['Priority'] = int(priority)
final_priority = demisto.args().get('final-priority')
if final_priority:
content += '\nFinalPriority: ' + final_priority
arguments_given = True
kwargs['FinalPriority'] = int(final_priority)
due = demisto.args().get('due')
if due:
content += '\nDue: ' + due
arguments_given = True
kwargs['Due'] = due
customfields = demisto.args().get('customfields')
if customfields:
cf_list = customfields.split(',')
for cf in cf_list:
equal_index = cf.index('=')
key = 'CF-{}: '.format(cf[:equal_index])
value = cf[equal_index + 1:]
content = content + key + value + '\n'
if arguments_given:
encoded = "content=" + urllib.quote_plus(content)
edited_ticket = edit_ticket_request(ticket_id, encoded)
if "200 Ok" in edited_ticket.content:
ticket_context = ({
'ID': ticket_id,
'Subject': subject,
'State': status,
'Priority': priority,
'FinalPriority': final_priority,
'Owner': owner
})
ec = {
'RTIR.Ticket(val.ID && val.ID === obj.ID)': ticket_context
}
hr = 'Ticket {} was edited successfully.'.format(ticket_id)
demisto.results({
'Type': entryTypes['note'],
'Contents': hr,
'ContentsFormat': formats['json'],
'ReadableContentsFormat': formats['markdown'],
'HumanReadable': hr,
'EntryContext': ec
})
else:
return_error('Failed to edit ticket')
else:
return_error('No arguments were given to edit the ticket.')
def get_ticket_attachments(ticket_id):
suffix_url = 'ticket/{}/attachments'.format(ticket_id)
raw_attachments = http_request('GET', suffix_url).text
attachments = []
attachments_content = []
attachments_list = parse_attachments_list(raw_attachments)
for attachment_id, attachment_name, attachment_type, attachment_size in attachments_list:
attachments.append({
'ID': attachment_id,
'Name': attachment_name,
'Type': attachment_type,
'Size': attachment_size
})
suffix_url = 'ticket/{}/attachments/{}'.format(ticket_id, attachment_id)
raw_attachment_content = http_request('GET', suffix_url).content
attachment_content = parse_attachment_content(attachment_id, raw_attachment_content)
attachments_content.append(fileResult(attachment_name, attachment_content))
return attachments, attachments_content
def parse_attachments_list(raw_attachments):
"""
Parses attachments details from raw attachments response.
Example input:
RT/4.4.2 200 Ok
id: ticket/6325/attachments
Attachments: 504: mimecast-get-remediation-incident.log (text/plain / 3.5k)
505: mimecast-get-remediation-incident2.log (text/plain / 3.6k)
Example output:
[('504', 'mimecast-get-remediation-incident.log', 'text/plain', '3.5k'),
('505', 'mimecast-get-remediation-incident2.log', 'text/plain', '3.6k')]
Args:
raw_attachments: The raw attachments response
Returns:
A list of tuples containing the id, name, format and size of each attachment
"""
attachments_regex = re.compile(r'(\d+): (.+) \((.+) \/ (.+)\)')
attachments_list = attachments_regex.findall(raw_attachments)
return attachments_list
def parse_attachment_content(attachment_id, raw_attachment_content):
# type: (str, str) -> str
"""
Parses raw attachment response into the attachment content
Example input:
From: root@localhost
Subject: <ticket subject>
X-RT-Interface: REST
Content-Type: text/plain
Content-Disposition: form-data;
name="attachment_1";
filename="mimecast-get-remediation-incident.log";
filename="mimecast-get-remediation-incident.log"
Content-Transfer-Encoding: binary
Content-Length: <length of the content>
Content: <the actual attachment content...>
Example output:
<the actual attachment content...>
Args:
attachment_id: The ID of the attachment
raw_attachment_content: The raw attachment content, should be like the example input
Returns:
The actual content
"""
attachment_content_pattern = re.compile(r'Content: (.*)', flags=re.DOTALL)
attachment_content = attachment_content_pattern.findall(raw_attachment_content)
if not attachment_content:
return_error('Could not parse attachment content for attachment id {}'.format(attachment_id))
return attachment_content[0]
def get_ticket_attachments_command():
ticket_id = demisto.args().get('ticket-id')
attachments, attachments_content = get_ticket_attachments(ticket_id)
if attachments:
ec = {
'RTIR.Ticket(val.ID && val.ID === obj.ID)': {
'ID': int(ticket_id),
'Attachment': attachments
}
}
title = 'RTIR ticket {} attachments'.format(ticket_id)
demisto.results({
'Type': entryTypes['note'],
'Contents': attachments,
'ContentsFormat': formats['json'],
'ReadableContentsFormat': formats['markdown'],
'HumanReadable': tableToMarkdown(title, attachments, removeNull=True),
'EntryContext': ec
})
demisto.results(attachments_content)
else:
demisto.results('No attachments found.')
def get_ticket_history_by_id(ticket_id, history_id):
"""Accepts ticket ID and history ID as input and returns a dictionary of ticket history entry properties"""
suffix_url = 'ticket/{}/history/id/{}'.format(ticket_id, history_id)
raw_history = http_request('GET', suffix_url)
return parse_history_response(raw_history.content)
def parse_history_response(raw_history):
# type: (str) -> dict
"""
Parses raw history string into dict
Example input:
RT/4.4.2 200 Ok
# 24/24 (id/80/total)
id: 80
Ticket: 5
TimeTaken: 0
Type: Create
Field:
OldValue:
NewValue: some new value
Data:
Description: Ticket created by root
Content: Some
Multi line
Content
Creator: root
Created: 2018-07-09 11:25:59
Attachments:
Example output:
{'ID': '80',
'Ticket': '5',
'TimeTaken': '0',
'Type': 'Create',
'Field': '',
'OldValue': '',
'NewValue': 'some new value',
'Data': '',
'Description': 'Ticket created by root',
'Content': 'Some\nMulti line\nContent',
'Creator': 'root',
'Created': '2018-07-09 11:25:59',
'Attachments': ''}
Args:
raw_history: The raw ticket history string response
Returns:
A pasred dict with keys and values
"""
keys = re.findall(r'^([a-z|A-Z]+):', raw_history, flags=re.MULTILINE)
values = re.split(r'\n[a-z|A-Z]+:', raw_history)[1:]
if len(keys) != len(values):
return {}
current_history_context = {key.upper() if key == 'id' else key: value.strip() for key, value in zip(keys, values)}
return current_history_context
def get_ticket_history(ticket_id):
suffix_url = 'ticket/{}/history'.format(ticket_id)
raw_history = http_request('GET', suffix_url)
history_context = []
headers = ['ID', 'Created', 'Creator', 'Description', 'Content']
data = raw_history.text.split('\n')
data = data[4:]
for line in data:
history_id = line.split(': ')[0]
if not history_id:
continue
history_response = get_ticket_history_by_id(ticket_id, history_id)
history_context.append(history_response)
return history_context, headers
def get_ticket_history_command():
ticket_id = demisto.args().get('ticket-id')
history_context, headers = get_ticket_history(ticket_id)
if history_context:
ec = {
'RTIR.Ticket(val.ID && val.ID === obj.ID)': {
'ID': int(ticket_id),
'History': history_context
}
}
title = 'RTIR ticket {} history'.format(ticket_id)
demisto.results({
'Type': entryTypes['note'],
'Contents': history_context,
'ContentsFormat': formats['json'],
'ReadableContentsFormat': formats['markdown'],
'HumanReadable': tableToMarkdown(title, history_context, headers, removeNull=True),
'EntryContext': ec
})
else:
demisto.results('No results found.')
def get_ticket():
ticket_id = demisto.args().get('ticket-id')
raw_ticket = get_ticket_request(ticket_id)
if not raw_ticket or 'Ticket {} does not exist'.format(ticket_id) in raw_ticket.text:
return_error('Failed to get ticket, possibly does not exist.')
ticket_context = []
data = raw_ticket.content.split('\n')
data = data[2:]
current_ticket = {}
for line in data:
split_line = line.split(': ')
if len(split_line) == 2:
current_ticket[split_line[0]] = split_line[1]
ticket = {
'ID': ticket_string_to_id(current_ticket['id']),
'Subject': current_ticket.get('Subject'),
'State': current_ticket.get('Status'),
'Creator': current_ticket.get('Creator'),
'Created': current_ticket.get('Created'),
'Priority': current_ticket.get('Priority'),
'InitialPriority': current_ticket.get('InitialPriority'),
'FinalPriority': current_ticket.get('FinalPriority'),
'Queue': current_ticket.get('Queue'),
'Owner': current_ticket.get('Owner')
}
for key in data: # Adding ticket custom fields to outputs
if key.startswith('CF.'):
split_key = key.split(':')
if split_key[0]:
custom_field_regex = re.findall(CURLY_BRACKETS_REGEX, key)[0].replace(' ',
'') # Regex and removing white spaces
ticket[custom_field_regex] = split_key[1]
suffix_url = 'ticket/{}/links/show'.format(ticket_id)
raw_links = http_request('GET', suffix_url)
links = parse_ticket_links(raw_links.text)
ticket['LinkedTo'] = links
ticket_context.append(ticket)
ec = {
'RTIR.Ticket(val.ID && val.ID === obj.ID)': ticket
}
title = 'RTIR ticket {}'.format(ticket_id)
headers = ['ID', 'Subject', 'Status', 'Priority', 'Created', 'Queue', 'Creator', 'Owner', 'InitialPriority',
'FinalPriority', 'LinkedTo']
demisto.results({
'Type': entryTypes['note'],
'Contents': ticket_context,
'ContentsFormat': formats['json'],
'ReadableContentsFormat': formats['markdown'],
'HumanReadable': tableToMarkdown(title, ticket, headers, removeNull=True),
'EntryContext': ec
})
def parse_ticket_links(raw_links):
# type: (str) -> list
"""
Parses the link IDs from the ticket link response
An example to an expected 'raw_links' is:
"RT/4.4.4 200 Ok
id: ticket/68315/links
Members: some-url.com/ticket/65461,
some-url.com/ticket/65462,
some-url.com/ticket/65463"
For 'raw_links' as descripbed above- the output will be [{'ID': '65461'}, {'ID': '65462'}, {'ID': '65463'}]
Args:
raw_links: The raw links string response
Returns:
A list of parsed IDs
"""
links = [{'ID': link} for link in re.findall(r'/ticket/(\d+)', raw_links)] if raw_links else []
return links
def add_comment_request(ticket_id, encoded):
suffix_url = 'ticket/{}/comment'.format(ticket_id)
added_comment = http_request('POST', suffix_url, data=encoded)
return added_comment
def add_comment_attachment(ticket_id, encoded, files_data):
suffix_url = 'ticket/{}/comment'.format(ticket_id)
comment = http_request('POST', suffix_url, files=files_data)
return comment.content
def add_comment():
ticket_id = demisto.args().get('ticket-id')
text = demisto.args().get('text')
content = 'Action: comment\n'
if text:
content += '\nText: ' + text.encode('utf-8')
attachments = demisto.args().get('attachment')
if attachments:
files_data = {}
if isinstance(attachments, list):
attachments_list = attachments
else: # Given as string
attachments_list = attachments.split(',')
for i, file_pair in enumerate(attachments_list):
file = demisto.getFilePath(file_pair)
file_name = file['name']
files_data['attachment_{:d}'.format(i + 1)] = (file_name, open(file['path'], 'rb'))
content += 'Attachment: {}\n'.format(file_name)
encoded = "content=" + urllib.quote_plus(content)
if attachments:
files_data.update({'content': (None, content)}) # type: ignore
comment = add_comment_attachment(ticket_id, encoded, files_data)
return_outputs('Added comment to ticket {} successfully.'.format(ticket_id), {}, comment)
else:
added_comment = add_comment_request(ticket_id, encoded)
if '200' in added_comment.content:
demisto.results('Added comment to ticket {} successfully.'.format(ticket_id))
else:
return_error('Failed to add comment')
def add_reply_request(ticket_id, encoded):
suffix_url = 'ticket/{}/comment'.format(ticket_id)
added_reply = http_request('POST', suffix_url, data=encoded)
return added_reply
def add_reply():
ticket_id = demisto.args().get('ticket-id')
content = 'Action: comment\n'
text = demisto.args().get('text')
if text:
content += '\nText: ' + text.encode('utf-8')
cc = demisto.args().get('cc')
if cc:
content += '\nCc: ' + cc
try:
encoded = "content=" + urllib.quote_plus(content)
added_reply = add_reply_request(ticket_id, encoded)
if '200' in added_reply.content:
demisto.results('Replied successfully to ticket {}.'.format(ticket_id))
else:
return_error('Failed to reply')
except Exception as e:
demisto.error(str(e))
return_error('Failed to reply')
def get_ticket_id(ticket):
return int(ticket['ID'])
def fetch_incidents():
last_run = demisto.getLastRun()
last_ticket_id = last_run['ticket_id'] if (last_run and last_run['ticket_id']) else 0
raw_query = 'id>{}+AND+Priority>{}+AND+Queue={}{}{}'.format(last_ticket_id, FETCH_PRIORITY, apostrophe, FETCH_QUEUE,
apostrophe)
if FETCH_STATUS:
status_list = FETCH_STATUS.split(',')
status_query = '+AND+('
for status in status_list:
status_query += 'Status={}{}{}+OR+'.format(apostrophe, status, apostrophe)
status_query = fix_query_suffix(status_query)
raw_query += status_query + ')'
tickets = parse_ticket_data(raw_query)
tickets.sort(key=get_ticket_id)
fetch_batch_limit = int(demisto.params().get('fetch_limit', 0))
tickets = tickets if (fetch_batch_limit == 0) else tickets[:fetch_batch_limit]
incidents = []
max_ticket_id = last_ticket_id
for ticket in tickets:
ticket_id = ticket['ID']
history_context, _ = get_ticket_history(ticket_id)
ticket['History'] = history_context
incidents.append(ticket_to_incident(ticket))
max_ticket_id = max(max_ticket_id, ticket_id)
if tickets:
demisto.setLastRun({'ticket_id': max_ticket_id})
demisto.incidents(incidents)
''' EXECUTION CODE '''
def main():
handle_proxy()
# disable insecure warnings
requests.packages.urllib3.disable_warnings()
''' GLOBAL VARS '''
global SERVER, USERNAME, PASSWORD, BASE_URL, USE_SSL, FETCH_PRIORITY, FETCH_STATUS, FETCH_QUEUE, HEADERS, REFERER
SERVER = demisto.params().get('server', '')[:-1] if demisto.params().get('server', '').endswith(
'/') else demisto.params().get('server', '')
USERNAME = demisto.params()['credentials']['identifier']
PASSWORD = demisto.params()['credentials']['password']
BASE_URL = urljoin(SERVER, '/REST/1.0/')
USE_SSL = not demisto.params().get('unsecure', False)
FETCH_PRIORITY = int(demisto.params()['fetch_priority']) - 1
FETCH_STATUS = demisto.params()['fetch_status']
FETCH_QUEUE = demisto.params()['fetch_queue']
REFERER = demisto.params().get('referer')
HEADERS = {'Referer': REFERER} if REFERER else {}
LOG('command is %s' % (demisto.command(),))
try:
if demisto.command() == 'test-module':
login()
logout()
demisto.results('ok')
if demisto.command() in {'fetch-incidents'}:
fetch_incidents()
elif demisto.command() == 'rtir-create-ticket':
create_ticket()
elif demisto.command() == 'rtir-search-ticket':
search_ticket()
elif demisto.command() == 'rtir-resolve-ticket':
close_ticket()
elif demisto.command() == 'rtir-edit-ticket':
edit_ticket()
elif demisto.command() == 'rtir-ticket-history':
get_ticket_history_command()
elif demisto.command() == 'rtir-ticket-attachments':
get_ticket_attachments_command()
elif demisto.command() == 'rtir-get-ticket':
get_ticket()
elif demisto.command() == 'rtir-add-comment':
add_comment()
elif demisto.command() == 'rtir-add-reply':
add_reply()
except Exception, e:
LOG(e.message)
LOG.print_log()
raise
if __name__ in ('__builtin__', 'builtins'):
main()
| 33.874356 | 128 | 0.611364 |
b94d5d6f8b79aa97fa4e6cc0500bf00e8e2e8e74 | 26,948 | py | Python | pyapprox_dev/pyapprox_dev/fenics_models/advection_diffusion_wrappers.py | ConnectedSystems/pyapprox | 4f405654c707cba83d211f327c0f0fdbc95efa29 | [
"MIT"
] | 26 | 2019-12-16T02:21:15.000Z | 2022-03-17T09:59:18.000Z | pyapprox_dev/pyapprox_dev/fenics_models/advection_diffusion_wrappers.py | ConnectedSystems/pyapprox | 4f405654c707cba83d211f327c0f0fdbc95efa29 | [
"MIT"
] | 9 | 2020-03-03T03:04:55.000Z | 2021-08-19T22:50:42.000Z | pyapprox_dev/pyapprox_dev/fenics_models/advection_diffusion_wrappers.py | ConnectedSystems/pyapprox | 4f405654c707cba83d211f327c0f0fdbc95efa29 | [
"MIT"
] | 7 | 2020-03-02T03:49:17.000Z | 2021-02-17T02:07:53.000Z | import sys
if sys.platform == 'win32':
raise ImportError('Not available on Windows')
from pyapprox_dev.fenics_models.advection_diffusion import *
def dl_qoi_functional_misc(u):
r"""
Use the QoI from [JEGGIJNME2020]
To reproduce adaptive multi index results use following
expr = dl.Expression(
'1./(sigma*sigma*2*pi)*std::exp(-(std::pow(x[0]-xk,2)+std::pow(x[1]-yk,2))/sigma*sigma)',
xk=0.3,yk=0.5,sigma=0.16,degree=2)
The /sigma*sigma is an error it should be 1/(2*sigma*sigma)
"""
expr = dla.Expression(
'1./(sigma*sigma*2*pi)*std::exp(-(std::pow(x[0]-xk,2)+std::pow(x[1]-yk,2))/(2*sigma*sigma))',
xk=0.3, yk=0.5, sigma=0.16, degree=2)
qoi = dla.assemble(u*expr*dl.dx(u.function_space().mesh()))
return qoi
def qoi_functional_misc(u):
return np.asarray([dl_qoi_functional_misc(u)])
def qoi_functional_grad_misc(u, model):
J = dl_qoi_functional_misc(u)
control = dla.Control(model.kappa)
dJd_kappa = dla.compute_gradient(J, [control])[0]
return dJd_kappa.vector()[:].copy()
def get_misc_forcing(degree):
r"""
Use the forcing from [JEGGIJNME2020]
"""
forcing = dla.Expression(
'(1.5+cos(2*pi*t))*cos(x[0])', degree=degree, t=0)
return forcing
def get_gaussian_source_forcing(degree, random_sample):
forcing = dla.Expression(
'A/(sig2*2*pi)*std::exp(-(std::pow(x[0]-xk,2)+std::pow(x[1]-yk,2))/(2*sig2))', xk=random_sample[0], yk=random_sample[1], sig2=0.05**2, A=2., degree=degree)
return forcing
def get_nobile_diffusivity(corr_len, degree, random_sample):
nvars = random_sample.shape[0]
path = os.path.abspath(os.path.dirname(__file__))
if '2017' in dl.__version__:
filename = os.path.join(
path, "src,""nobile_diffusivity_fenics_class_2017.cpp")
else:
filename = os.path.join(
path, "src", "nobile_diffusivity_fenics_class.cpp")
with open(filename, 'r') as kappa_file:
kappa_code = kappa_file.read()
if '2017' in dl.__version__:
kappa = dla.UserExpression(kappa_code, degree=degree)
else:
kappa = dla.CompiledExpression(
dl.compile_cpp_code(kappa_code).NobileDiffusivityExpression(),
degree=degree)
kappa.initialize_kle(nvars, corr_len)
if '2017' in dl.__version__:
for ii in range(random_sample.shape[0]):
kappa.set_random_sample(random_sample[ii], ii)
else:
kappa.set_random_sample(random_sample)
return kappa
def get_default_velocity(degree, vel_vec):
if vel_vec.shape[0] == 2:
beta = dla.Expression(
(str(vel_vec[0]), str(vel_vec[1])), degree=degree)
else:
beta = dla.Constant(velocity[0])
return beta
def setup_dirichlet_and_periodic_boundary_conditions_and_function_space(
degree, random_sample):
assert random_sample is None
pbc = RectangularMeshPeriodicBoundary(1)
function_space = dl.FunctionSpace(
mesh, "CG", degree, constrained_domain=pbc)
bndry_obj = dl.CompiledSubDomain(
"on_boundary&&(near(x[0],0)||near(x[0],1))")
boundary_conditions = [['dirichlet', bndry_obj, dla.Constant(0)]]
return boundary_conditions
def setup_zero_flux_neumann_boundary_conditions(degree, random_sample):
assert random_sample is None
function_space = dl.FunctionSpace(mesh, "CG", degree)
bndry_objs = get_2d_rectangular_mesh_boundaries(0, 1, 0, 1)
boundary_conditions = [
['neumann', bndry_objs[0], dla.Constant(0)],
['neumann', bndry_objs[1], dla.Constant(0)],
['neumann', bndry_objs[2], dla.Constant(0)],
['neumann', bndry_objs[3], dla.Constant(0)]]
return boundary_conditions
class AdvectionDiffusionModel(object):
#%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%#
# Change the following functions to modify governing equations
# initialize_random_expressions()
# get_initial_condition()
# get_boundary_conditions_and_function_space()
# get_velocity()
# get_forcing()
# get_diffusivity()
#%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%#
def initialize_random_expressions(self, random_sample):
r"""
Overide this class to split random_samples into the parts that effect
the 5 random quantities
"""
init_condition = self.get_initial_condition(None)
boundary_conditions, function_space = \
self.get_boundary_conditions_and_function_space(None)
beta = self.get_velocity(None)
forcing = self.get_forcing(None)
kappa = self.get_diffusivity(random_sample)
return init_condition, boundary_conditions, function_space, beta, \
forcing, kappa
def get_initial_condition(self, random_sample):
r"""By Default the initial condition is deterministic and set to zero"""
assert random_sample is None
initial_condition = dla.Constant(0.0)
return initial_condition
def get_boundary_conditions_and_function_space(self, random_sample):
r"""By Default the boundary conditions are deterministic, Dirichlet and
and set to zero"""
assert random_sample is None
function_space = dl.FunctionSpace(self.mesh, "CG", self.degree)
boundary_conditions = None
return boundary_conditions, function_space
def get_velocity(self, random_sample):
r"""By Default the advection is deterministic and set to zero"""
assert random_sample is None
beta = dla.Expression((str(0), str(0)), degree=self.degree)
return beta
def get_forcing(self, random_sample):
r"""By Default the forcing is deterministic and set to
.. math:: (1.5+\cos(2\pi t))*cos(x_1)
where :math:`t` is time and :math:`x_1` is the first spatial dimension.
"""
forcing = get_misc_forcing(self.degree)
return forcing
def get_diffusivity(self, random_sample):
r"""
Use the random diffusivity specified in [JEGGIJNME2020].
"""
kappa = get_nobile_diffusivity(
self.options['corr_len'], self.degree, random_sample)
return kappa
#%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%#
# Change the following functions to modify mapping of discretization
# parameters to mesh and timestep resolution
# get_timestep()
# get_mesh_resolution()
#%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%#
def get_timestep(self, dt_level):
dt = self.final_time/2**(dt_level+2)
return dt
def get_mesh_resolution(self, mesh_levels):
nx_level, ny_level = mesh_levels
nx = 2**(nx_level+2)
ny = 2**(ny_level+2)
return nx, ny
def get_mesh(self, resolution_levels):
r"""The arguments to this function are the outputs of
get_degrees_of_freedom_and_timestep()"""
nx, ny = np.asarray(resolution_levels, dtype=int)
mesh = dla.RectangleMesh(dl.Point(0, 0), dl.Point(1, 1), nx, ny)
return mesh
def set_num_config_vars(self):
r"""
Should be equal to the number of physical dimensions + 1
(for the temporal resolution)
"""
self.num_config_vars = 3
#%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%#
# Do not change the following functions
#%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%#
def __init__(self, final_time, degree, qoi_functional,
second_order_timestepping=True, options={},
qoi_functional_grad=None):
self.final_time = final_time
self.qoi_functional = qoi_functional
self.degree = degree
self.second_order_timestepping = second_order_timestepping
self.set_num_config_vars()
self.options = options
self.qoi_functional_grad = qoi_functional_grad
def solve(self, samples):
r"""
Run the simulation
Notes
-----
Dolfin objects must be initialized inside this function otherwise
this object cannot be pickled and used with multiprocessing.Pool
"""
assert samples.ndim == 2
assert samples.shape[1] == 1
resolution_levels = samples[-self.num_config_vars:, 0]
dt = self.get_timestep(resolution_levels[-1])
self.mesh = self.get_mesh(
self.get_mesh_resolution(resolution_levels[:-1]))
random_sample = samples[:-self.num_config_vars, 0]
init_condition, boundary_conditions, function_space, beta, \
forcing, kappa = self.initialize_random_expressions(
random_sample)
# when dla is dolfin_adjoint
# Must project dla.CompiledExpression to avoid error
# site-packages/pyadjoint/overloaded_type.py", line 136,
# in _ad_convert_type raise NotImplementedError
self.kappa = dla.interpolate(kappa, function_space)
# this is not necessary when just using dolfin
sol = run_model(
function_space, self.kappa, forcing,
init_condition, dt, self.final_time,
boundary_conditions, velocity=beta,
second_order_timestepping=self.second_order_timestepping,
intermediate_times=self.options.get('intermediate_times', None))
return sol
def __call__(self, samples, jac=False):
sol = self.solve(samples)
vals = np.atleast_1d(self.qoi_functional(sol))
if vals.ndim == 1:
vals = vals[:, np.newaxis]
if jac is False:
return vals
assert self.qoi_functional_grad is not None
grad = self.qoi_functional_grad(sol, self)
return vals, grad
class AdvectionDiffusionSourceInversionModel(AdvectionDiffusionModel):
def initialize_random_expressions(self, random_sample):
r"""
Overide this class to split random_samples into the parts that effect
the 5 random quantities
"""
init_condition = self.get_initial_condition(None)
boundary_conditions, function_space = \
self.get_boundary_conditions_and_function_space(None)
beta = self.get_velocity(None)
forcing = self.get_forcing(random_sample[:2])
kappa = self.get_diffusivity(random_sample[2:])
kappa = dla.project(kappa, function_space)
return init_condition, boundary_conditions, function_space, beta, \
forcing, kappa
def get_forcing(self, random_sample):
source_stop_time = self.final_time
s = self.options['source_strength']
h = self.options['source_width']
forcing = dla.Expression(
'((t>ft)?0.:1.)*s/(2.*pi*h*h)*std::exp(-(pow(x[0]-x0,2)+pow(x[1]-x1,2))/(2.*h*h))', x0=random_sample[0], x1=random_sample[1], t=0, ft=source_stop_time, s=s, h=h, degree=self.degree)
return forcing
def get_diffusivity(self, random_sample):
r"""
Use the random diffusivity specified in [JEGGIJNME2020].
"""
kappa = dla.Constant(1.0)
return kappa
def get_boundary_conditions_and_function_space(
self, random_sample):
r"""By Default the boundary conditions are deterministic, Dirichlet and
and set to zero"""
assert random_sample is None
function_space = dl.FunctionSpace(self.mesh, "CG", self.degree)
bndry_objs = get_2d_rectangular_mesh_boundaries(0, 1, 0, 1)
boundary_conditions = [
['neumann', bndry_objs[0], dla.Constant(0)],
['neumann', bndry_objs[1], dla.Constant(0)],
['neumann', bndry_objs[2], dla.Constant(0)],
['neumann', bndry_objs[3], dla.Constant(0)]]
return boundary_conditions, function_space
def qoi_functional_source_inversion(sols):
r"""
JINGLAI LI AND YOUSSEF M. MARZOUK. ADAPTIVE CONSTRUCTION OF SURROGATES FOR
THE BAYESIAN SOLUTION OF INVERSE PROBLEMS
sensor_times t=0.1, t=0.2
noise std = 0.1
true source location = 0.25,0.25
source strength and width
s=2, sigma=0.05
difusivity = 1
Youssef M. Marzouk, Habib N. Najm, Larry A. Rahn,
Stochastic spectral methods for efficient Bayesian solution of inverse problems,
Journal of Computational Physics,
Volume 224, Issue 2,
2007,
Pages 560-586,https://doi.org/10.1016/j.jcp.2006.10.010
noise_std = 0.4
"""
sensor_locations = np.array(
[[0, 0], [0, 0.5], [0., 1.], [0.5, 0], [0.5, 0.5], [0.5, 1.],
[1, 0], [1, 0.5], [1., 1.]]).T
vals = np.empty(sensor_locations.shape[1]*len(sols))
kk = 0
for jj, sol in enumerate(sols):
for ii, loc in enumerate(sensor_locations.T):
vals[kk] = sol(loc)
kk += 1
return vals
def setup_advection_diffusion_benchmark(nvars, corr_len,
max_eval_concurrency=1):
r"""
Compute functionals of the following model of transient advection-diffusion (with 3 configure variables which control the two spatial mesh resolutions and the timestep)
.. math::
\frac{\partial u}{\partial t}(x,t,\rv) + \nabla u(x,t,\rv)-\nabla\cdot\left[k(x,\rv) \nabla u(x,t,\rv)\right] &=g(x,t) \qquad (x,t,\rv)\in D\times [0,1]\times\rvdom\\
\mathcal{B}(x,t,\rv)&=0 \qquad\qquad (x,t,\rv)\in \partial D\times[0,1]\times\rvdom\\
u(x,t,\rv)&=u_0(x,\rv) \qquad (x,t,\rv)\in D\times\{t=0\}\times\rvdom
Following [NTWSIAMNA2008]_, [JEGGIJNME2020]_ we set
.. math:: g(x,t)=(1.5+\cos(2\pi t))\cos(x_1),
the initial condition as :math:`u(x,z)=0`, :math:`B(x,t,z)` to be zero dirichlet boundary conditions.
and we model the diffusivity :math:`k` as a random field represented by the
Karhunen-Loeve (like) expansion (KLE)
.. math::
\log(k(x,\rv)-0.5)=1+\rv_1\left(\frac{\sqrt{\pi L}}{2}\right)^{1/2}+\sum_{k=2}^d \lambda_k\phi(x)\rv_k,
with
.. math::
\lambda_k=\left(\sqrt{\pi L}\right)^{1/2}\exp\left(-\frac{(\lfloor\frac{k}{2}\rfloor\pi L)^2}{4}\right) k>1, \qquad\qquad \phi(x)=
\begin{cases}
\sin\left(\frac{(\lfloor\frac{k}{2}\rfloor\pi x_1)}{L_p}\right) & k \text{ even}\,,\\
\cos\left(\frac{(\lfloor\frac{k}{2}\rfloor\pi x_1)}{L_p}\right) & k \text{ odd}\,.
\end{cases}
where :math:`L_p=\max(1,2L_c)`, :math:`L=\frac{L_c}{L_p}`.
The quantity of interest :math:`f(z)` is the measurement of the solution at a location :math:`x_k` at the final time :math:`T=1` obtained via the linear functional
.. math:: f(z)=\int_D u(x,T,z)\frac{1}{2\pi\sigma^2}\exp\left(-\frac{\lVert x-x_k \rVert^2_2}{\sigma^2}\right) dx
Parameters
----------
nvars : integer
The number of variables of the KLE
corr_len : float
The correlation length :math:`L_c` of the covariance kernel
max_eval_concurrency : integer
The maximum number of simulations that can be run in parallel. Should be no more than the maximum number of cores on the computer being used
Returns
-------
benchmark : pya.Benchmark
Object containing the benchmark attributes documented below
fun : callable
The quantity of interest :math:`f(w)` with signature
``fun(w) -> np.ndarray``
where ``w`` is a 2D np.ndarray with shape (nvars+3,nsamples) and the
output is a 2D np.ndarray with shape (nsamples,1). The first ``nvars``
rows of ``w`` are realizations of the random variables. The last 3 rows
are configuration variables specifying the numerical discretization of
the PDE model. Specifically the first and second configuration variables
specify the levels :math:`l_{x_1}` and :math:`l_{x_2}` which dictate
the resolution of the FEM mesh in the directions :math:`{x_1}` and
:math:`{x_2}` respectively. The number of cells in the :math:`{x_i}`
direction is given by :math:`2^{l_{x_i}+2}`. The third configuration
variable specifies the level :math:`l_t` of the temporal discretization.
The number of timesteps satisfies :math:`2^{l_{t}+2}` so the timestep
size is and :math:`T/2^{l_{t}+2}`.
variable : pya.IndependentMultivariateRandomVariable
Object containing information of the joint density of the inputs z
which is the tensor product of independent and identically distributed
uniform variables on :math:`[-\sqrt{3},\sqrt{3}]`.
Examples
--------
>>> from pyapprox_dev.benchmarks.benchmarks import setup_benchmark
>>> benchmark=setup_benchmark('advection-diffusion',nvars=2)
>>> print(benchmark.keys())
dict_keys(['fun', 'variable'])
"""
from scipy import stats
from pyapprox.models.wrappers import TimerModelWrapper, PoolModel, \
WorkTrackingModel
from pyapprox.models.wrappers import PoolModel
from pyapprox.variables import IndependentMultivariateRandomVariable
from pyapprox.benchmarks.benchmarks import Benchmark
univariate_variables = [stats.uniform(-np.sqrt(3), 2*np.sqrt(3))]*nvars
variable = IndependentMultivariateRandomVariable(univariate_variables)
final_time, degree = 1.0, 1
options = {'corr_len': corr_len}
base_model = AdvectionDiffusionModel(
final_time, degree, qoi_functional_misc, second_order_timestepping=False,
options=options, qoi_functional_grad=qoi_functional_grad_misc)
# add wrapper to allow execution times to be captured
timer_model = TimerModelWrapper(base_model, base_model)
pool_model = PoolModel(
timer_model, max_eval_concurrency, base_model=base_model)
# add wrapper that tracks execution times.
model = WorkTrackingModel(pool_model, base_model,
base_model.num_config_vars)
attributes = {'fun': model, 'variable': variable}
return Benchmark(attributes)
def setup_multi_level_advection_diffusion_benchmark(
nvars, corr_len, max_eval_concurrency=1):
r"""
Compute functionals of the transient advection-diffusion (with 1 configure variables which controls the two spatial mesh resolutions and the timestep). An integer increase in the configure variable value will raise the 3 numerical discretiation paramaters by the same integer.
See :func:`pyapprox_dev.advection_diffusion_wrappers.setup_advection_diffusion_benchmark` for details on function arguments and output.
"""
from scipy import stats
from pyapprox.models.wrappers import TimerModelWrapper, PoolModel, \
WorkTrackingModel
from pyapprox.models.wrappers import PoolModel
from pyapprox.variables import IndependentMultivariateRandomVariable
from pyapprox.benchmarks.benchmarks import Benchmark
from pyapprox.models.wrappers import MultiLevelWrapper
univariate_variables = [stats.uniform(-np.sqrt(3), 2*np.sqrt(3))]*nvars
variable = IndependentMultivariateRandomVariable(univariate_variables)
final_time, degree = 1.0, 1
options = {'corr_len': corr_len}
base_model = AdvectionDiffusionModel(
final_time, degree, qoi_functional_misc, second_order_timestepping=False,
options=options)
multilevel_model = MultiLevelWrapper(
base_model, base_model.num_config_vars)
# add wrapper to allow execution times to be captured
timer_model = TimerModelWrapper(multilevel_model, base_model)
pool_model = PoolModel(
timer_model, max_eval_concurrency, base_model=base_model)
model = WorkTrackingModel(
pool_model, base_model, multilevel_model.num_config_vars)
attributes = {'fun': model, 'variable': variable,
'multi_level_model': multilevel_model}
return Benchmark(attributes)
def setup_advection_diffusion_source_inversion_benchmark(measurement_times=np.array([0.05, 0.15]), source_strength=0.5, source_width=0.1, true_sample=np.array([[0.25, 0.75, 4, 4, 4]]).T, noise_stdev=0.4, max_eval_concurrency=1):
r"""
Compute functionals of the following model of transient diffusion of
a contaminant
.. math::
\frac{\partial u}{\partial t}(x,t,\rv) + \nabla u(x,t,\rv)-\nabla\cdot\left[k(x,\rv) \nabla u(x,t,\rv)\right] &=g(x,t) \qquad (x,t,\rv)\in D\times [0,1]\times\rvdom\\
\mathcal{B}(x,t,\rv)&=0 \qquad\qquad (x,t,\rv)\in \partial D\times[0,1]\times\rvdom\\
u(x,t,\rv)&=u_0(x,\rv) \qquad (x,t,\rv)\in D\times\{t=0\}\times\rvdom
Following [MNRJCP2006]_, [LMSISC2014]_ we set
.. math:: g(x,t)=\frac{s}{2\pi h^2}\exp\left(-\frac{\lvert x-x_\mathrm{src}\rvert^2}{2h^2}\right)
the initial condition as :math:`u(x,z)=0`, :math:`B(x,t,z)` to be zero Neumann boundary conditions, i.e.
.. math:: \nabla u\cdot n = 0 \quad\mathrm{on} \quad\partial D
and we model the diffusivity :math:`k=1` as a constant.
The quantities of interest are point observations :math:`u(x_l)`
taken at :math:`P` points in time :math:`\{t_p\}_{p=1}^P` at :math:`L`
locations :math:`\{x_l\}_{l=1}^L`. The final time :math:`T` is the last
observation time.
These functionals can be used to define the posterior distribution
.. math:: \pi_{\text{post}}(\rv)=\frac{\pi(\V{y}|\rv)\pi(\rv)}{\int_{\rvdom} \pi(\V{y}|\rv)\pi(\rv)d\rv}
where the prior is the tensor product of independent and identically
distributed uniform variables on :math:`[0,1]` i.e.
:math:`\pi(\rv)=1`, and the likelihood is given by
.. math:: \pi(\V{y}|\rv)=\frac{1}{(2\pi)^{d/2}\sigma}\exp\left(-\frac{1}{2}\frac{(y-f(\rv))^T(y-f(\rv))}{\sigma^2}\right)
and :math:`y` are noisy observations of the solution `u` at the 9
points of a uniform :math:`3\times 3` grid covering the physical domain
:math:`D` at successive times :math:`\{t_p\}_{p=1}^P`. Here the noise is indepenent and Normally distrbuted with mean
zero and variance :math:`\sigma^2`.
Parameters
----------
measurement_times : np.ndarray (P)
The times :math:`\{t_p\}_{p=1}^P` at which measurements of the
contaminant concentration are taken
source_strength : float
The source strength :math:`s`
source_width : float
The source width :math:`h`
true_sample : np.ndarray (2)
The true location of the source used to generate the observations
used in the likelihood function
noise_stdev : float
The standard deviation :math:`sigma` of the observational noise
max_eval_concurrency : integer
The maximum number of simulations that can be run in parallel. Should
be no more than the maximum number of cores on the computer being used
Returns
-------
benchmark : pya.Benchmark
Object containing the benchmark attributes documented below
fun : callable
The quantity of interest :math:`f(w)` with signature
``fun(w) -> np.ndarray``
where ``w`` is a 2D np.ndarray with shape (nvars+3,nsamples) and the
output is a 2D np.ndarray with shape (nsamples,1). The first ``nvars``
rows of ``w`` are realizations of the random variables. The last 3 rows
are configuration variables specifying the numerical discretization of
the PDE model. Specifically the first and second configuration variables
specify the levels :math:`l_{x_1}` and :math:`l_{x_2}` which dictate
the resolution of the FEM mesh in the directions :math:`{x_1}` and
:math:`{x_2}` respectively. The number of cells in the :math:`{x_i}`
direction is given by :math:`2^{l_{x_i}+2}`. The third configuration
variable specifies the level :math:`l_t` of the temporal discretization.
The number of timesteps satisfies :math:`2^{l_{t}+2}` so the timestep
size is and :math:`T/2^{l_{t}+2}`.
variable : pya.IndependentMultivariateRandomVariable
Object containing information of the joint density of the inputs z
which is the tensor product of independent and identically distributed
uniform variables on :math:`[0,1]`.
Examples
--------
>>> from pyapprox_dev.benchmarks.benchmarks import setup_benchmark
>>> benchmark=setup_benchmark('advection-diffusion',nvars=2)
>>> print(benchmark.keys())
dict_keys(['fun', 'variable'])
References
----------
.. [MNRJCP2006] `Youssef M. Marzouk, Habib N. Najm, Larry A. Rahn, Stochastic spectral methods for efficient Bayesian solution of inverse problems, Journal of Computational Physics, Volume 224, Issue 2, 2007, Pages 560-586, <https://doi.org/10.1016/j.jcp.2006.10.010>`_
.. [LMSISC2014] `Jinglai Li and Youssef M. Marzouk. Adaptive Construction of Surrogates for the Bayesian Solution of Inverse Problems, SIAM Journal on Scientific Computing 2014 36:3, A1163-A1186 <https://doi.org/10.1137/130938189>`_
Notes
-----
The example from [MNRJCP2006]_ can be obtained by setting `s=0.5`, `h=0.1`,
`measurement_times=np.array([0.05,0.15])` and `noise_stdev=0.1`
The example from [LMSISC2014]_ can be obtained by setting `s=2`, `h=0.05`,
`measurement_times=np.array([0.1,0.2])` and `noise_stdev=0.1`
"""
from scipy import stats
from pyapprox.models.wrappers import TimerModelWrapper, PoolModel, \
WorkTrackingModel
from pyapprox.models.wrappers import PoolModel
from pyapprox.variables import IndependentMultivariateRandomVariable
from pyapprox.benchmarks.benchmarks import Benchmark
univariate_variables = [stats.uniform(0, 1)]*2
variable = IndependentMultivariateRandomVariable(univariate_variables)
final_time, degree = measurement_times.max(), 2
options = {'intermediate_times': measurement_times[:-1],
'source_strength': source_strength, 'source_width': source_width}
base_model = AdvectionDiffusionSourceInversionModel(
final_time, degree, qoi_functional_source_inversion,
second_order_timestepping=False, options=options)
# add wrapper to allow execution times to be captured
timer_model = TimerModelWrapper(base_model, base_model)
pool_model = PoolModel(
timer_model, max_eval_concurrency, base_model=base_model)
# add wrapper that tracks execution times.
model = WorkTrackingModel(pool_model, base_model)
from pyapprox.bayesian_inference.markov_chain_monte_carlo import \
GaussianLogLike
if true_sample.shape != (5, 1):
msg = 'true_sample must be the concatenation of random sample and the '
msg += 'configure sample'
raise Exception(msg)
noiseless_data = model(true_sample)[0, :]
noise = np.random.normal(0, noise_stdev, (noiseless_data.shape[0]))
data = noiseless_data + noise
loglike = GaussianLogLike(model, data, noise_stdev)
attributes = {'fun': model, 'variable': variable, 'loglike': loglike}
return Benchmark(attributes)
| 41.522342 | 280 | 0.656004 |
63be28f3531518f4b55f87e2dfb865e1a38b1537 | 1,097 | py | Python | imagehost/imagehost/main.py | bijualbert/NodeVid.com | b5fae2d8a0a47052f012b2d5ae7c67431f5314e8 | [
"Apache-2.0"
] | 1 | 2018-04-30T17:07:14.000Z | 2018-04-30T17:07:14.000Z | imagehost/imagehost/main.py | NavinF/NodeVid.com | b5fae2d8a0a47052f012b2d5ae7c67431f5314e8 | [
"Apache-2.0"
] | null | null | null | imagehost/imagehost/main.py | NavinF/NodeVid.com | b5fae2d8a0a47052f012b2d5ae7c67431f5314e8 | [
"Apache-2.0"
] | null | null | null | from flask import Flask, url_for, request, send_file
import ffmpy
import requests
import os.path
import posixpath
# Workflow
# Delete video and keyframes (from previous try)
# Accept input url via json
# Download video
# Feed video into FFMpeg, create keyframes
# Serve keyframes out
# Repeat
# internal.atube.com/keyframes?url={}&frame={}
app = Flask(__name__, static_url_path='/static')
@app.route("/keyframes")
def keyframes():
url = request.args.get('url')
frame = request.args.get('frame')
path = url.rsplit('/', 1)[1]
filename = posixpath.basename(path)
if not os.path.isfile(filename):
r = requests.get(url, allow_redirects=True)
open(filename, 'wb').write(r.content)
os.mkdir('static/' + filename)
ff = ffmpy.FFmpeg(
inputs={filename: '-skip_frame nokey'},
outputs={'static/' + filename + '/thumbnails-%09d.jpeg': '-vsync 0 -r 30 -f image2'}
)
ff.run()
return send_file('static/' + filename + '/thumbnails-%09d.jpeg' % int(frame))
if __name__ == '__main__':
app.run(host='::')
| 26.119048 | 92 | 0.646308 |
157044cbf54a6160f6109e4420f85916e19c8b0e | 5,771 | py | Python | fig/project.py | KyleAMathews/fig | a8bc51b22994928482b4d5dd4824abd9133736ce | [
"BSD-3-Clause"
] | 1 | 2020-12-26T07:55:13.000Z | 2020-12-26T07:55:13.000Z | fig/project.py | KyleAMathews/fig | a8bc51b22994928482b4d5dd4824abd9133736ce | [
"BSD-3-Clause"
] | null | null | null | fig/project.py | KyleAMathews/fig | a8bc51b22994928482b4d5dd4824abd9133736ce | [
"BSD-3-Clause"
] | null | null | null | from __future__ import unicode_literals
from __future__ import absolute_import
import logging
from .service import Service
log = logging.getLogger(__name__)
def sort_service_dicts(services):
# Get all services that are dependant on another.
dependent_services = [s for s in services if s.get('links')]
flatten_links = sum([s['links'] for s in dependent_services], [])
# Get all services that are not linked to and don't link to others.
non_dependent_sevices = [s for s in services if s['name'] not in flatten_links and not s.get('links')]
sorted_services = []
# Topological sort.
while dependent_services:
n = dependent_services.pop()
# Check if a service is dependent on itself, if so raise an error.
if n['name'] in n.get('links', []):
raise DependencyError('A service can not link to itself: %s' % n['name'])
sorted_services.append(n)
for l in n['links']:
# Get the linked service.
linked_service = next(s for s in services if l == s['name'])
# Check that there isn't a circular import between services.
if n['name'] in linked_service.get('links', []):
raise DependencyError('Circular import between %s and %s' % (n['name'], linked_service['name']))
# Check the linked service has no links and is not already in the
# sorted service list.
if not linked_service.get('links') and linked_service not in sorted_services:
sorted_services.insert(0, linked_service)
return non_dependent_sevices + sorted_services
class Project(object):
"""
A collection of services.
"""
def __init__(self, name, services, client):
self.name = name
self.services = services
self.client = client
@classmethod
def from_dicts(cls, name, service_dicts, client):
"""
Construct a ServiceCollection from a list of dicts representing services.
"""
project = cls(name, [], client)
for service_dict in sort_service_dicts(service_dicts):
# Reference links by object
links = []
if 'links' in service_dict:
for service_name in service_dict.get('links', []):
links.append(project.get_service(service_name))
del service_dict['links']
project.services.append(Service(client=client, project=name, links=links, **service_dict))
return project
@classmethod
def from_config(cls, name, config, client):
dicts = []
for service_name, service in list(config.items()):
service['name'] = service_name
dicts.append(service)
return cls.from_dicts(name, dicts, client)
def get_service(self, name):
"""
Retrieve a service by name. Raises NoSuchService
if the named service does not exist.
"""
for service in self.services:
if service.name == name:
return service
raise NoSuchService(name)
def get_services(self, service_names=None):
"""
Returns a list of this project's services filtered
by the provided list of names, or all services if
service_names is None or [].
Preserves the original order of self.services.
Raises NoSuchService if any of the named services
do not exist.
"""
if service_names is None or len(service_names) == 0:
return self.services
else:
unsorted = [self.get_service(name) for name in service_names]
return [s for s in self.services if s in unsorted]
def recreate_containers(self, service_names=None):
"""
For each service, create or recreate their containers.
Returns a tuple with two lists. The first is a list of
(service, old_container) tuples; the second is a list
of (service, new_container) tuples.
"""
old = []
new = []
for service in self.get_services(service_names):
(s_old, s_new) = service.recreate_containers()
old += [(service, container) for container in s_old]
new += [(service, container) for container in s_new]
return (old, new)
def start(self, service_names=None, **options):
for service in self.get_services(service_names):
service.start(**options)
def stop(self, service_names=None, **options):
for service in self.get_services(service_names):
service.stop(**options)
def kill(self, service_names=None, **options):
for service in self.get_services(service_names):
service.kill(**options)
def build(self, service_names=None, **options):
for service in self.get_services(service_names):
if service.can_be_built():
service.build(**options)
else:
log.info('%s uses an image, skipping' % service.name)
def remove_stopped(self, service_names=None, **options):
for service in self.get_services(service_names):
service.remove_stopped(**options)
def containers(self, service_names=None, *args, **kwargs):
l = []
for service in self.get_services(service_names):
for container in service.containers(*args, **kwargs):
l.append(container)
return l
class NoSuchService(Exception):
def __init__(self, name):
self.name = name
self.msg = "No such service: %s" % self.name
def __str__(self):
return self.msg
class DependencyError(Exception):
def __init__(self, msg):
self.msg = msg
def __str__(self):
return self.msg | 36.295597 | 112 | 0.621209 |
c26a53c0b3649d06f428ccf4507ac2f9636a7cc7 | 628 | py | Python | stringss/string compression.py | PayalSasmal10/datastructures-algorithms | 8b73ad0e0a6c9bcb860183615ebbda510549fb1f | [
"MIT"
] | 2 | 2022-02-04T04:18:13.000Z | 2022-02-21T10:05:15.000Z | stringss/string compression.py | PayalSasmal10/Datastructures-Algorithms | 8b73ad0e0a6c9bcb860183615ebbda510549fb1f | [
"MIT"
] | 1 | 2022-01-07T18:52:22.000Z | 2022-01-07T18:52:22.000Z | stringss/string compression.py | PayalSasmal10/datastructures-algorithoms | 8b73ad0e0a6c9bcb860183615ebbda510549fb1f | [
"MIT"
] | null | null | null | # input = ["a", "a", "b", "b", "c", "c", "c"]
# o/p = a2b2c3
# O(n) time complexity
string_chr = ["a", "a", "b", "b", "c", "c", "c"]
def stringCompression(string_chr):
newString = ''
count = 1
for i in range(len(string_chr)-1):
print(i)
print(string_chr[i])
if string_chr[i] == string_chr[i+1]:
count += 1
else:
newString += string_chr[i]+str(count)
count = 1
if i == len(string_chr)-2:
newString += string_chr[i]+str(count)
i += 1
return newString
print(stringCompression(string_chr))
| 21.655172 | 49 | 0.490446 |
96fca01cd42b473cbe505aaca2f805d677694126 | 1,797 | py | Python | PythonLearn/basic/list.py | OKKyu/PythonLearn | 48dc4cc2a1a34d99b09f8d37a5566d448dcf987c | [
"MIT"
] | null | null | null | PythonLearn/basic/list.py | OKKyu/PythonLearn | 48dc4cc2a1a34d99b09f8d37a5566d448dcf987c | [
"MIT"
] | null | null | null | PythonLearn/basic/list.py | OKKyu/PythonLearn | 48dc4cc2a1a34d99b09f8d37a5566d448dcf987c | [
"MIT"
] | null | null | null | #! python3
# -*- coding: UTF-8 -*-
'''
list.py
Basics of list object.
'''
# リスト宣言 JavaScriptと同じ
squares = [1, 4, 9, 16, 25]
print(squares)
# 文字列や他シーケンスオブジェクト同様の添え字アクセスが可能。
print(squares[0])
print(squares[-1])
print(squares[-3:])
# 全リスト要素をスライスした場合は新しいリストコピーを返却する。
newSqu = squares[:]
print("Original List:" + str(squares))
print("Copied List:" + str(newSqu))
# リストは可変のため要素の内容を変更できる。しかも型は問わない
newSqu[1] = 'hello'
print("Original List:" + str(squares))
print("Copied List:" + str(newSqu))
# リストの連結も可能
newSqu = newSqu + ['HeyHeyHeeey!', 'GYaaaaaAAA!!']
print("Copied List:" + str(newSqu))
# append()を使って末尾に要素を追加することも可。
newSqu.append(32111)
print("Copied List:" + str(newSqu))
# スライスを使って代入も可能。
newSqu[2:4] = [0, 0, 1]
print("Copied List:" + str(newSqu))
# 削除も可能。
newSqu[2:5] = []
print("Copied List:" + str(newSqu))
# 全部削除
newSqu[:] = []
print("Copied List:" + str(newSqu))
# len関数で要素数を取得可能。
print("Original List len:" + str(len(squares)))
# 入れ子も可能。多次元配列。
listlist = [[1, 3, 5], ['a', 'b', 'c']]
print("[0][1] :" + str(listlist[0][1]))
print("[1][2] :" + str(listlist[1][2]))
# これは面白い。for文でリスト作成が可能。
# リスト内包表記という。リスト [ ] の中にfor文を書くことで、for文の出力した要素をリストの要素にする。
# i * 2 を配列要素として返却する。これをrange(10)で10回繰り返す。
numbers = [i * 2 for i in range(10)]
print(numbers)
print(len(numbers))
# 読みにくいが以下の通り
# [1 for i in range(3)] で値が1の要素を3つ持つリストを作成
# 上記をrange(4)で4回繰り返す。
# [1,1,1]のリストが4個入った2次元配列が完成。
numbers2 = [[1 for i in range(3)] for j in range(4)]
numbers2[0][1] = 2
print(numbers2)
# リストから各変数への代入ができる。これは便利そう。
number3 = [i for i in range(3)]
number3_0, number3_1, number3_2 = number3
print(number3)
print(str(number3_0) + " " + str(number3_1) + " " + str(number3_2))
# list search
list.index("1")
list.count("1")
| 23.038462 | 68 | 0.636617 |
ad8ca90e0939c88232da35c9dd0d816207c3d22b | 4,531 | py | Python | unit_tests.py | robertsmd/secret-sharing | 5acd3d49e096a51ee3392532e6dd886d38400139 | [
"MIT"
] | null | null | null | unit_tests.py | robertsmd/secret-sharing | 5acd3d49e096a51ee3392532e6dd886d38400139 | [
"MIT"
] | null | null | null | unit_tests.py | robertsmd/secret-sharing | 5acd3d49e096a51ee3392532e6dd886d38400139 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
"""
Secret Sharing
~~~~~
:copyright: (c) 2014 by Halfmoon Labs
:license: MIT, see LICENSE for more details.
"""
import random
import unittest
from test import test_support
from secretsharing import secret_int_to_points, points_to_secret_int, \
point_to_share_string, share_string_to_point, \
HexToHexSecretSharer, PlaintextToHexSecretSharer, \
BitcoinToB58SecretSharer, BitcoinToB32SecretSharer, \
BitcoinToZB32SecretSharer, base64_chars, is_hex
from secretsharing import SecretSharer
class ShamirSharingTest(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
def check_shared_secret(self, sharer_class, format_check_function, m, n, secret):
shares = sharer_class.split_secret(secret, m, n)
random.shuffle(shares)
if format_check_function:
for share in shares:
has_the_right_base = format_check_function(share.split('-')[1])
self.assertTrue(has_the_right_base)
return shares
def check_recovered_secret(self, sharer_class, shares, m, original_secret):
recovered_secret = sharer_class.recover_secret(shares[0:m])
self.assertEqual(recovered_secret, original_secret)
return recovered_secret
def check_splitting_and_recovery(self, sharer_class, check_function, m, n, secret):
shares = self.check_shared_secret(
sharer_class, check_function, m, n, secret)
recovered_secret = self.check_recovered_secret(
sharer_class, shares, m, secret)
return recovered_secret
def test_hex_to_hex_sharing(self):
recovered_secret = self.check_splitting_and_recovery(
SecretSharer, is_hex, 3, 5,
"c4bbcb1fbec99d65bf59d85c8cb62ee2db963f0fe106f483d9afa73bd4e39a8a")
def test_hex_with_zero_to_hex_sharing(self):
recovered_secret = self.check_splitting_and_recovery(
SecretSharer, is_hex, 3, 5,
"04bbcb1fbec99d65bf59d85c8cb62ee2db963f0fe106f483d9afa73bd4e39a8a")
def test_printable_ascii_to_hex_sharing(self):
recovered_secret = self.check_splitting_and_recovery(
PlaintextToHexSecretSharer, is_hex, 3, 5,
"correct horse battery staple")
def test_printable_with_zero_ascii_to_hex_sharing(self):
recovered_secret = self.check_splitting_and_recovery(
PlaintextToHexSecretSharer, is_hex, 3, 5,
"000correct horse battery staple")
def test_b58_to_b32_sharing(self):
recovered_secret = self.check_splitting_and_recovery(
BitcoinToB32SecretSharer, None, 3, 5,
"5KJvsngHeMpm884wtkJNzQGaCErckhHJBGFsvd3VyK5qMZXj3hS")
def test_b58_to_zb32_sharing(self):
recovered_secret = self.check_splitting_and_recovery(
BitcoinToZB32SecretSharer, None, 3, 5,
"5KJvsngHeMpm884wtkJNzQGaCErckhHJBGFsvd3VyK5qMZXj3hS")
def test_b58_to_b58_sharing(self):
recovered_secret = self.check_splitting_and_recovery(
BitcoinToB58SecretSharer, None, 3, 5,
"5KJvsngHeMpm884wtkJNzQGaCErckhHJBGFsvd3VyK5qMZXj3hS")
def test_hex_to_base64_sharing(self):
class HexToBase64SecretSharer(SecretSharer):
share_charset = base64_chars
sharer_class = HexToBase64SecretSharer
recovered_secret = self.check_splitting_and_recovery(
sharer_class, None, 3, 5,
"c4bbcb1fbec99d65bf59d85c8cb62ee2db963f0fe106f483d9afa73bd4e39a8a")
def test_2_of_3_sharing(self):
recovered_secret = self.check_splitting_and_recovery(
SecretSharer, is_hex, 2, 3,
"c4bbcb1fbec99d65bf59d85c8cb62ee2db963f0fe106f483d9afa73bd4e39a8a")
def test_4_of_7_sharing(self):
recovered_secret = self.check_splitting_and_recovery(
SecretSharer, is_hex, 4, 7,
"c4bbcb1fbec99d65bf59d85c8cb62ee2db963f0fe106f483d9afa73bd4e39a8a")
def test_5_of_9_sharing(self):
recovered_secret = self.check_splitting_and_recovery(
SecretSharer, is_hex, 5, 9,
"c4bbcb1fbec99d65bf59d85c8cb62ee2db963f0fe106f483d9afa73bd4e39a8a")
def test_2_of_2_sharing(self):
recovered_secret = self.check_splitting_and_recovery(
SecretSharer, is_hex, 2, 2,
"c4bbcb1fbec99d65bf59d85c8cb62ee2db963f0fe106f483d9afa73bd4e39a8a")
def test_main():
test_support.run_unittest(
ShamirSharingTest
)
if __name__ == '__main__':
test_main() | 37.758333 | 87 | 0.716177 |
ce32033c857c78bf32504c9865a9e2a54577a695 | 4,901 | py | Python | scripts/plugin_base.py | JamieSharpe/ALEAPP | acb06736d772d75c9dc0fd58b9f2a1726e795fb4 | [
"MIT"
] | null | null | null | scripts/plugin_base.py | JamieSharpe/ALEAPP | acb06736d772d75c9dc0fd58b9f2a1726e795fb4 | [
"MIT"
] | null | null | null | scripts/plugin_base.py | JamieSharpe/ALEAPP | acb06736d772d75c9dc0fd58b9f2a1726e795fb4 | [
"MIT"
] | null | null | null | """
Base class for an artefact plugin system.
"""
from scripts.ilapfuncs import *
from scripts import search_files
import os
import traceback
class ArtefactPlugin:
"""
Base class that each artefact plugin must inherit from.
Each plugin must:
* Provide a category.
* Provide a name.
* Provide a description.
* Provide a path filter list.
* Provide a feathericon for HTML report - https://feathericons.com/
* Implement the _process_artefact method.
Addition classes/methods/modules may be added by the plugin author to further support their parsings.
"""
def __init__(self):
"""
Setup of artefact.
"""
# Author Details
self.author: str = 'Unknown Author'
self.author_email: str = ''
self.author_url: str = ''
self.contributors: list = [] # List of names who helped in the artefact identification/parsing.
# Artefact Details
self.name: str = 'Unknown Plugin' # Friendly plugin name.
self.category: str = 'Generic' # Artefact category.
self.description: str = '' # What does the plugin do.
self.artefact_reference: str = '' # Description on what the artefact is.
self.path_filters: list = [] # Collection of regex search filters to locate an artefact.
self.icon: str = '' # feathricon for report.
# Artefact Processing Details
# Do not alter these in your plugin implementation. They are purely for referencing.
self.files_found: list = [] # Collection of all the file paths that match self.path_filters.
self.report_folder: str = '' # Artefact report folder.
self.seeker: search_files.FileSeekerBase = None # Seeker object to search for additional files in the evidence.
self.wrap_text: bool = True # Determine if text should be wrapped on a new line.
self.debug_mode: bool = False # Determine if only this plugin should run. See plugin_manager.py[.plugins_in_debug_only]
def full_name(self):
return f'{self.category} - {self.name}'
def process_artefact(self, report_folder_base: str) -> bool:
"""
Processor wrapper.
This prepares any temp folders, and wraps the plugins processor in a try/catch for safety.
:param report_folder_base:
:return:
"""
logfunc()
logfunc(f'Processing {self.full_name()} artifact parser.')
if not self.files_found:
logfunc('No artefacts to parse.')
return True
# Setup report folder for HTML files.
self.report_folder = os.path.join(report_folder_base, self.full_name())
os.makedirs(self.report_folder, exist_ok = True)
processor_success_status = False
try:
processor_success_status = self._processor()
except Exception as ex:
logfunc(f'Processing {self.name} artifacts had errors!')
logfunc(f'Error was {ex}')
logfunc(f'Exception Traceback: {traceback.format_exc()}')
processor_success_status = False
logfunc(f'{self.full_name()} artifact completed {("successfully" if processor_success_status else "unsuccessfully")}.')
return processor_success_status
def _processor(self) -> bool:
"""
Core processing method. This is where you implement your artefact parsing/processing.
The plugin system will only call this method.
This method is not to be called externally.
:return: Bool - True on successful parse, False if errors occurred.
"""
raise NotImplementedError(f'The plugin "{self.full_name()}" ({__name__}) has not been implemented.')
def search_for_artefacts(self):
"""
Searches for artefacts among the seeker using the provided plugin's path_filters.
:return:
"""
logfunc()
logfunc(f'Plugin "{self.full_name()}" - Searching for artefacts.')
for path_filter in self.path_filters:
path_files_found = self.seeker.search(path_filter)
if not path_files_found:
logfunc(f'\tPlugin "{self.full_name()}" with regex "{path_filter}" located no files.')
continue
self.files_found.extend(path_files_found)
# Log all the files found with the path filter.
for path_located in path_files_found:
if path_located.startswith('\\\\?\\'):
path_located = path_located[4:]
logfunc(f'\tPlugin "{self.full_name()}" with regex "{path_filter}" located file "{path_located}"')
logfunc(f'Plugin "{self.full_name()}" found {len(self.files_found)} artefact(s).')
logfunc(f'Plugin "{self.full_name()}" - Search complete.')
def __str__(self):
return f'{self.full_name()} - {__name__}'
| 37.128788 | 128 | 0.637421 |
e3ea36c99c948f7334d1e44e00421858ce68b74d | 2,558 | py | Python | Experimental/Test_ast_python_file_classes.py | rchateauneu/survol | ba66d3ec453b2d9dd3a8dabc6d53f71aa9ba8c78 | [
"BSD-3-Clause"
] | 9 | 2017-10-05T23:36:23.000Z | 2021-08-09T15:40:03.000Z | Experimental/Test_ast_python_file_classes.py | rchateauneu/survol | ba66d3ec453b2d9dd3a8dabc6d53f71aa9ba8c78 | [
"BSD-3-Clause"
] | 21 | 2018-01-02T09:33:03.000Z | 2018-08-27T11:09:52.000Z | Experimental/Test_ast_python_file_classes.py | rchateauneu/survol | ba66d3ec453b2d9dd3a8dabc6d53f71aa9ba8c78 | [
"BSD-3-Clause"
] | 4 | 2018-06-23T09:05:45.000Z | 2021-01-22T15:36:50.000Z | from __future__ import print_function
import ast
def show_info(functionNode):
print("Function name:", functionNode.name)
print("f=", functionNode)
print("f=", dir(functionNode))
print("Args:")
for arg in functionNode.args.args:
#import pdb; pdb.set_trace()
print("\tParameter name:", arg)
class MyClass:
def f1(self):
return "jj"
@property
def the_field(self):
return self.an_the_field
filename = "Test_ast_python_file_classes.py"
with open(filename) as file:
node = ast.parse(file.read())
functions = [n for n in node.body if isinstance(n, ast.FunctionDef)]
classes = [n for n in node.body if isinstance(n, ast.ClassDef)]
if False:
print("")
print("Functions")
for function in functions:
show_info(function)
print("")
print("Classes")
for class_ in classes:
print("Class name:", class_.name)
print(" ", dir(class_))
for n in class_.body:
decorators_names = [oned.id for oned in n.decorator_list]
if 'property' in decorators_names:
print(" Property n=", n.name)
if False:
# [<class '_ast.Name'>]
print(" d=", [type(oned) for oned in n.decorator_list])
print(" d=", [dir(oned) for oned in n.decorator_list])
print(" d=", [oned.id for oned in n.decorator_list])
# r= ['__class__', '__delattr__', '__dict__', '__doc__', '__format__', '__getattribute__', '__hash__', '__init__', '__
# module__', '__new__', '__reduce__', '__reduce_ex__', '__repr__', '__setattr__', '__sizeof__', '__str__', '__subclasshook__', '__weak
# ref__', '_attributes', '_fields', 'args', 'body', 'col_offset', 'decorator_list', 'lineno', 'name']
print(" a=", n.args)
# a= ['__class__', '__delattr__', '__dict__', '__doc__', '__format__', '__getattribute__', '__hash__', '__init__', '__
# module__', '__new__', '__reduce__', '__reduce_ex__', '__repr__', '__setattr__', '__sizeof__', '__str__', '__subclasshook__', '__weak
# ref__', '_attributes', '_fields', 'args', 'defaults', 'kwarg', 'vararg']
print(" a=", n.args.kwarg)
print(" t=", type(n))
if False:
methods = [n for n in class_.body if isinstance(n, ast.FunctionDef)]
print("Methods only")
for method in methods:
print(" ")
show_info(method) | 38.757576 | 146 | 0.57154 |
de34b7c3e9aef3142e76d9a470ea025755de9989 | 1,313 | py | Python | acme_test.py | WillHK/DS-Unit-3-Sprint-1-Software-Engineering | a0cb1bcb8ff2ce04bee68424da771ea7ffaa662c | [
"MIT"
] | null | null | null | acme_test.py | WillHK/DS-Unit-3-Sprint-1-Software-Engineering | a0cb1bcb8ff2ce04bee68424da771ea7ffaa662c | [
"MIT"
] | null | null | null | acme_test.py | WillHK/DS-Unit-3-Sprint-1-Software-Engineering | a0cb1bcb8ff2ce04bee68424da771ea7ffaa662c | [
"MIT"
] | null | null | null | #!/usr/bin/env python
import unittest
from acme import Product
from acme_report import generate_products, ADJECTIVES, NOUNS
class AcmeProductTests(unittest.TestCase):
"""Acme Products Testing Suite"""
def test_default_product_price(self):
"""Test default product price"""
prod = Product('Test Product')
self.assertEqual(prod.price, 10)
def test_default_product_weight(self):
"""Test default product weight"""
prod = Product('Test Product 2')
self.assertEqual(prod.weight, 20)
def test_explodability_method(self):
"""Test explodability method output"""
prod = Product('Test Product 3', 15, 25, .9)
self.assertEqual(prod.explode(), "...boom!")
class AcmeReportTests(unittest.TestCase):
"""Acme Reporting Sytem Testing Suite"""
def test_default_num_products(self):
"""Test """
products = generate_products()
self.assertEqual(len(products), 30)
def test_legal_names(self):
products = generate_products()
for product in products:
split_name = product.name.split()
self.assertEqual(len(split_name), 2)
self.assertIn(split_name[0], ADJECTIVES)
self.assertIn(split_name[1], NOUNS)
if __name__ == '__main__':
unittest.main() | 33.666667 | 60 | 0.658035 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.