text
stringlengths 4
1.02M
| meta
dict |
|---|---|
"""
flask_security.cli
~~~~~~~~~~~~~~~~~~
Command Line Interface for managing accounts and roles.
:copyright: (c) 2016 by CERN.
:license: MIT, see LICENSE for more details.
"""
from __future__ import absolute_import, print_function
from functools import wraps
import click
from flask import current_app
from werkzeug.datastructures import MultiDict
from werkzeug.local import LocalProxy
from .utils import hash_password
try:
from flask.cli import with_appcontext
except ImportError:
from flask_cli import with_appcontext
_security = LocalProxy(lambda: current_app.extensions['security'])
_datastore = LocalProxy(lambda: current_app.extensions['security'].datastore)
def commit(fn):
"""Decorator to commit changes in datastore."""
@wraps(fn)
def wrapper(*args, **kwargs):
fn(*args, **kwargs)
_datastore.commit()
return wrapper
@click.group()
def users():
"""User commands."""
@click.group()
def roles():
"""Role commands."""
@users.command('create')
@click.argument('identity')
@click.password_option()
@click.option('-a', '--active', default=False, is_flag=True)
@with_appcontext
@commit
def users_create(identity, password, active):
"""Create a user."""
kwargs = {attr: identity for attr in _security.user_identity_attributes}
kwargs.update(**{'password': password, 'active': 'y' if active else ''})
form = _security.confirm_register_form(
MultiDict(kwargs), csrf_enabled=False
)
if form.validate():
kwargs['password'] = hash_password(kwargs['password'])
kwargs['active'] = active
_datastore.create_user(**kwargs)
click.secho('User created successfully.', fg='green')
kwargs['password'] = '****'
click.echo(kwargs)
else:
raise click.UsageError('Error creating user. %s' % form.errors)
@roles.command('create')
@click.argument('name')
@click.option('-d', '--description', default=None)
@with_appcontext
@commit
def roles_create(**kwargs):
"""Create a role."""
_datastore.create_role(**kwargs)
click.secho('Role "%(name)s" created successfully.' % kwargs, fg='green')
@roles.command('add')
@click.argument('user')
@click.argument('role')
@with_appcontext
@commit
def roles_add(user, role):
"""Add user to role."""
user, role = _datastore._prepare_role_modify_args(user, role)
if user is None:
raise click.UsageError('Cannot find user.')
if role is None:
raise click.UsageError('Cannot find role.')
if _datastore.add_role_to_user(user, role):
click.secho('Role "{0}" added to user "{1}" '
'successfully.'.format(role, user), fg='green')
else:
raise click.UsageError('Cannot add role to user.')
@roles.command('remove')
@click.argument('user')
@click.argument('role')
@with_appcontext
@commit
def roles_remove(user, role):
"""Remove user from role."""
user, role = _datastore._prepare_role_modify_args(user, role)
if user is None:
raise click.UsageError('Cannot find user.')
if role is None:
raise click.UsageError('Cannot find role.')
if _datastore.remove_role_from_user(user, role):
click.secho('Role "{0}" removed from user "{1}" '
'successfully.'.format(role, user), fg='green')
else:
raise click.UsageError('Cannot remove role from user.')
@users.command('activate')
@click.argument('user')
@with_appcontext
@commit
def users_activate(user):
"""Activate a user."""
user_obj = _datastore.get_user(user)
if user_obj is None:
raise click.UsageError('ERROR: User not found.')
if _datastore.activate_user(user_obj):
click.secho('User "{0}" has been activated.'.format(user), fg='green')
else:
click.secho('User "{0}" was already activated.'.format(user),
fg='yellow')
@users.command('deactivate')
@click.argument('user')
@with_appcontext
@commit
def users_deactivate(user):
"""Deactivate a user."""
user_obj = _datastore.get_user(user)
if user_obj is None:
raise click.UsageError('ERROR: User not found.')
if _datastore.deactivate_user(user_obj):
click.secho('User "{0}" has been deactivated.'.format(user),
fg='green')
else:
click.secho('User "{0}" was already deactivated.'.format(user),
fg='yellow')
|
{
"content_hash": "a3341cb7f2bcd11e72d41f74d4f9e2f5",
"timestamp": "",
"source": "github",
"line_count": 155,
"max_line_length": 78,
"avg_line_length": 28.387096774193548,
"alnum_prop": 0.6427272727272727,
"repo_name": "morreene/tradenews",
"id": "f085265b9e401fba87bedfdd6b972600da5f8d2e",
"size": "4424",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "venv/Lib/site-packages/flask_security/cli.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "1572055"
},
{
"name": "HTML",
"bytes": "464815"
},
{
"name": "JavaScript",
"bytes": "5197624"
},
{
"name": "PHP",
"bytes": "8415"
},
{
"name": "Python",
"bytes": "50512"
},
{
"name": "Shell",
"bytes": "110"
}
],
"symlink_target": ""
}
|
from django.contrib.auth.models import User, Group
from rest_framework import viewsets
from quickstart.serializers import UserSerializer, GroupSerializer
class UserViewSet(viewsets.ModelViewSet):
"""
允许查看和编辑user 的 API endpoint
"""
queryset = User.objects.all()
serializer_class = UserSerializer
class GroupViewSet(viewsets.ModelViewSet):
"""
允许查看和编辑group的 API endpoint
"""
queryset = Group.objects.all()
serializer_class = GroupSerializer
|
{
"content_hash": "3024eb0e493ca06831d20b4751712997",
"timestamp": "",
"source": "github",
"line_count": 19,
"max_line_length": 66,
"avg_line_length": 25.42105263157895,
"alnum_prop": 0.7391304347826086,
"repo_name": "fqc/django_rest_test",
"id": "8edb62ba317ba14cef739a1cc65a5bcd95c0008f",
"size": "551",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "quickstart/views.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "8977"
}
],
"symlink_target": ""
}
|
'''Some helper functions for PyTorch, including:
- get_mean_and_std: calculate the mean and std value of dataset.
- msr_init: net parameter initialization.
- progress_bar: progress bar mimic xlua.progress.
'''
import os
import sys
import time
import math
import torch
_, term_width = os.popen('stty size', 'r').read().split()
term_width = int(term_width)
TOTAL_BAR_LENGTH = 65.
last_time = time.time()
begin_time = last_time
def progress_bar(current, total, msg=None):
global last_time, begin_time
if current == 0:
begin_time = time.time() # Reset for new bar.
cur_len = int(TOTAL_BAR_LENGTH*current/total)
rest_len = int(TOTAL_BAR_LENGTH - cur_len) - 1
sys.stdout.write(' [')
for i in range(cur_len):
sys.stdout.write('=')
sys.stdout.write('>')
for i in range(rest_len):
sys.stdout.write('.')
sys.stdout.write(']')
cur_time = time.time()
step_time = cur_time - last_time
last_time = cur_time
tot_time = cur_time - begin_time
L = []
L.append(' Step: %s' % format_time(step_time))
L.append(' | Tot: %s' % format_time(tot_time))
if msg:
L.append(' | ' + msg)
msg = ''.join(L)
sys.stdout.write(msg)
for i in range(term_width-int(TOTAL_BAR_LENGTH)-len(msg)-3):
sys.stdout.write(' ')
# Go back to the center of the bar.
for i in range(term_width-int(TOTAL_BAR_LENGTH/2)+2):
sys.stdout.write('\b')
sys.stdout.write(' %d/%d ' % (current+1, total))
if current < total-1:
sys.stdout.write('\r')
else:
sys.stdout.write('\n')
sys.stdout.flush()
def format_time(seconds):
days = int(seconds / 3600/24)
seconds = seconds - days*3600*24
hours = int(seconds / 3600)
seconds = seconds - hours*3600
minutes = int(seconds / 60)
seconds = seconds - minutes*60
secondsf = int(seconds)
seconds = seconds - secondsf
millis = int(seconds*1000)
f = ''
i = 1
if days > 0:
f += str(days) + 'D'
i += 1
if hours > 0 and i <= 2:
f += str(hours) + 'h'
i += 1
if minutes > 0 and i <= 2:
f += str(minutes) + 'm'
i += 1
if secondsf > 0 and i <= 2:
f += str(secondsf) + 's'
i += 1
if millis > 0 and i <= 2:
f += str(millis) + 'ms'
i += 1
if f == '':
f = '0ms'
return f
class AverageMeter(object):
"""Computes and stores the average and current value"""
def __init__(self):
self.reset()
def reset(self):
self.val = 0
self.avg = 0
self.sum = 0
self.count = 0
self.min = 100
self.max = 0
def update(self, val, n=1):
self.val = val
self.sum += val * n
self.count += n
self.avg = self.sum / self.count
if val < self.min:
self.min = val
if val > self.max:
self.max = val
def accuracy(output, target, topk=(1,)):
"""Computes the accuracy over the k top predictions for the specified values of k"""
with torch.no_grad():
maxk = max(topk)
batch_size = target.size(0)
_, pred = output.topk(maxk, 1, True, True)
pred = pred.t()
correct = pred.eq(target.view(1, -1).expand_as(pred))
res = []
for k in topk:
correct_k = correct[:k].view(-1).float().sum(0, keepdim=True)
res.append(correct_k.mul_(100.0 / batch_size))
return res
def accuracy_(output, target, topk=(1,)):
"""Computes the precision@k for the specified values of k"""
maxk = max(topk)
batch_size = target.size(0)
_, pred = output.topk(maxk, 1, True, True)
pred = pred.t()
correct = pred.eq(target.view(1, -1).expand_as(pred))
res = []
for k in topk:
correct_k = correct[:k].view(-1).float().sum(0)
res.append(correct_k.mul_(1. / batch_size))
return res
|
{
"content_hash": "74af8927837dc635bf28efa4c2ba08a4",
"timestamp": "",
"source": "github",
"line_count": 146,
"max_line_length": 88,
"avg_line_length": 26.931506849315067,
"alnum_prop": 0.5521363173957273,
"repo_name": "roatienza/Deep-Learning-Experiments",
"id": "13c4a8f3f0ffd920a2b8e112c94e8e645ecc82dd",
"size": "3932",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "versions/2020/transformer/code/ui.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Jupyter Notebook",
"bytes": "4110255"
},
{
"name": "Python",
"bytes": "154986"
}
],
"symlink_target": ""
}
|
import itertools
import requests
from bs4 import BeautifulSoup
from data import Database
from stackoverflow import SalaryCalculator
salcalc = SalaryCalculator()
(pos, exp, skl, cnt) = salcalc.get_mainstats()
(cur, cnt) = salcalc.get_currencies_and_countries([int(c[0]) for c in cnt])
with Database('debug.db') as db:
db.reset_all()
db.create_positions([(p[1], int(p[0])) for p in pos])
pos = db.read_positions()
print(len(pos))
db.create_experiences([(int(e[0]),) for e in exp])
exp = db.read_experiences()
print(len(exp))
db.create_skilllevels([(float(s[0]),) for s in skl])
skl = db.read_skilllevels()
print(len(skl))
db.create_countries([(c[0], c[1], c[2], None) for c in cnt])
cnt = db.read_countries()
print(len(cnt))
db.populate_salaries()
sal = db.read_salaries()
print(len(sal))
|
{
"content_hash": "c498a94da1395c166e7420c69851a3c2",
"timestamp": "",
"source": "github",
"line_count": 33,
"max_line_length": 75,
"avg_line_length": 26.12121212121212,
"alnum_prop": 0.6473317865429234,
"repo_name": "treischl/so-sal-calc",
"id": "996c8fc869809b1ac52f1397f1e17037f600125f",
"size": "862",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "test.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "9129"
}
],
"symlink_target": ""
}
|
from __future__ import unicode_literals
import copy
import sys
from functools import update_wrapper
import warnings
from django.apps import apps
from django.apps.config import MODELS_MODULE_NAME
import django.db.models.manager # NOQA: Imported to register signal handler.
from django.conf import settings
from django.core import checks
from django.core.exceptions import (ObjectDoesNotExist,
MultipleObjectsReturned, FieldError, ValidationError, NON_FIELD_ERRORS)
from django.db.models.fields import AutoField, FieldDoesNotExist
from django.db.models.fields.related import (ForeignObjectRel, ManyToOneRel,
OneToOneField, add_lazy_relation)
from django.db import (router, transaction, DatabaseError,
DEFAULT_DB_ALIAS)
from django.db.models.query import Q
from django.db.models.query_utils import DeferredAttribute, deferred_class_factory
from django.db.models.deletion import Collector
from django.db.models.options import Options
from django.db.models import signals
from django.utils.translation import ugettext_lazy as _
from django.utils.functional import curry
from django.utils.encoding import force_str, force_text
from django.utils import six
from django.utils.six.moves import zip
from django.utils.text import get_text_list, capfirst
def subclass_exception(name, parents, module, attached_to=None):
"""
Create exception subclass. Used by ModelBase below.
If 'attached_to' is supplied, the exception will be created in a way that
allows it to be pickled, assuming the returned exception class will be added
as an attribute to the 'attached_to' class.
"""
class_dict = {'__module__': module}
if attached_to is not None:
def __reduce__(self):
# Exceptions are special - they've got state that isn't
# in self.__dict__. We assume it is all in self.args.
return (unpickle_inner_exception, (attached_to, name), self.args)
def __setstate__(self, args):
self.args = args
class_dict['__reduce__'] = __reduce__
class_dict['__setstate__'] = __setstate__
return type(name, parents, class_dict)
class ModelBase(type):
"""
Metaclass for all models.
"""
def __new__(cls, name, bases, attrs):
super_new = super(ModelBase, cls).__new__
# six.with_metaclass() inserts an extra class called 'NewBase' in the
# inheritance tree: Model -> NewBase -> object. But the initialization
# should be executed only once for a given model class.
# attrs will never be empty for classes declared in the standard way
# (ie. with the `class` keyword). This is quite robust.
if name == 'NewBase' and attrs == {}:
return super_new(cls, name, bases, attrs)
# Also ensure initialization is only performed for subclasses of Model
# (excluding Model class itself).
parents = [b for b in bases if isinstance(b, ModelBase) and
not (b.__name__ == 'NewBase' and b.__mro__ == (b, object))]
if not parents:
return super_new(cls, name, bases, attrs)
# Create the class.
module = attrs.pop('__module__')
new_class = super_new(cls, name, bases, {'__module__': module})
attr_meta = attrs.pop('Meta', None)
abstract = getattr(attr_meta, 'abstract', False)
if not attr_meta:
meta = getattr(new_class, 'Meta', None)
else:
meta = attr_meta
base_meta = getattr(new_class, '_meta', None)
# Look for an application configuration to attach the model to.
app_config = apps.get_containing_app_config(module)
if getattr(meta, 'app_label', None) is None:
if app_config is None:
# If the model is imported before the configuration for its
# application is created (#21719), or isn't in an installed
# application (#21680), use the legacy logic to figure out the
# app_label by looking one level up from the package or module
# named 'models'. If no such package or module exists, fall
# back to looking one level up from the module this model is
# defined in.
# For 'django.contrib.sites.models', this would be 'sites'.
# For 'geo.models.places' this would be 'geo'.
msg = (
"Model class %s.%s doesn't declare an explicit app_label "
"and either isn't in an application in INSTALLED_APPS or "
"else was imported before its application was loaded. " %
(module, name))
if abstract:
msg += "Its app_label will be set to None in Django 1.9."
else:
msg += "This will no longer be supported in Django 1.9."
warnings.warn(msg, PendingDeprecationWarning, stacklevel=2)
model_module = sys.modules[new_class.__module__]
package_components = model_module.__name__.split('.')
package_components.reverse() # find the last occurrence of 'models'
try:
app_label_index = package_components.index(MODELS_MODULE_NAME) + 1
except ValueError:
app_label_index = 1
kwargs = {"app_label": package_components[app_label_index]}
else:
kwargs = {"app_label": app_config.label}
else:
kwargs = {}
new_class.add_to_class('_meta', Options(meta, **kwargs))
if not abstract:
new_class.add_to_class(
'DoesNotExist',
subclass_exception(
str('DoesNotExist'),
tuple(x.DoesNotExist for x in parents if hasattr(x, '_meta') and not x._meta.abstract) or (ObjectDoesNotExist,),
module,
attached_to=new_class))
new_class.add_to_class(
'MultipleObjectsReturned',
subclass_exception(
str('MultipleObjectsReturned'),
tuple(x.MultipleObjectsReturned for x in parents if hasattr(x, '_meta') and not x._meta.abstract) or (MultipleObjectsReturned,),
module,
attached_to=new_class))
if base_meta and not base_meta.abstract:
# Non-abstract child classes inherit some attributes from their
# non-abstract parent (unless an ABC comes before it in the
# method resolution order).
if not hasattr(meta, 'ordering'):
new_class._meta.ordering = base_meta.ordering
if not hasattr(meta, 'get_latest_by'):
new_class._meta.get_latest_by = base_meta.get_latest_by
is_proxy = new_class._meta.proxy
# If the model is a proxy, ensure that the base class
# hasn't been swapped out.
if is_proxy and base_meta and base_meta.swapped:
raise TypeError("%s cannot proxy the swapped model '%s'." % (name, base_meta.swapped))
if getattr(new_class, '_default_manager', None):
if not is_proxy:
# Multi-table inheritance doesn't inherit default manager from
# parents.
new_class._default_manager = None
new_class._base_manager = None
else:
# Proxy classes do inherit parent's default manager, if none is
# set explicitly.
new_class._default_manager = new_class._default_manager._copy_to_model(new_class)
new_class._base_manager = new_class._base_manager._copy_to_model(new_class)
# Add all attributes to the class.
for obj_name, obj in attrs.items():
new_class.add_to_class(obj_name, obj)
# All the fields of any type declared on this model
new_fields = (
new_class._meta.local_fields +
new_class._meta.local_many_to_many +
new_class._meta.virtual_fields
)
field_names = set(f.name for f in new_fields)
# Basic setup for proxy models.
if is_proxy:
base = None
for parent in [kls for kls in parents if hasattr(kls, '_meta')]:
if parent._meta.abstract:
if parent._meta.fields:
raise TypeError("Abstract base class containing model fields not permitted for proxy model '%s'." % name)
else:
continue
if base is not None:
raise TypeError("Proxy model '%s' has more than one non-abstract model base class." % name)
else:
base = parent
if base is None:
raise TypeError("Proxy model '%s' has no non-abstract model base class." % name)
if (new_class._meta.local_fields or
new_class._meta.local_many_to_many):
raise FieldError("Proxy model '%s' contains model fields." % name)
new_class._meta.setup_proxy(base)
new_class._meta.concrete_model = base._meta.concrete_model
else:
new_class._meta.concrete_model = new_class
# Collect the parent links for multi-table inheritance.
parent_links = {}
for base in reversed([new_class] + parents):
# Conceptually equivalent to `if base is Model`.
if not hasattr(base, '_meta'):
continue
# Skip concrete parent classes.
if base != new_class and not base._meta.abstract:
continue
# Locate OneToOneField instances.
for field in base._meta.local_fields:
if isinstance(field, OneToOneField):
parent_links[field.rel.to] = field
# Do the appropriate setup for any model parents.
for base in parents:
original_base = base
if not hasattr(base, '_meta'):
# Things without _meta aren't functional models, so they're
# uninteresting parents.
continue
parent_fields = base._meta.local_fields + base._meta.local_many_to_many
# Check for clashes between locally declared fields and those
# on the base classes (we cannot handle shadowed fields at the
# moment).
for field in parent_fields:
if field.name in field_names:
raise FieldError(
'Local field %r in class %r clashes '
'with field of similar name from '
'base class %r' % (field.name, name, base.__name__)
)
if not base._meta.abstract:
# Concrete classes...
base = base._meta.concrete_model
if base in parent_links:
field = parent_links[base]
elif not is_proxy:
attr_name = '%s_ptr' % base._meta.model_name
field = OneToOneField(base, name=attr_name,
auto_created=True, parent_link=True)
# Only add the ptr field if it's not already present;
# e.g. migrations will already have it specified
if not hasattr(new_class, attr_name):
new_class.add_to_class(attr_name, field)
else:
field = None
new_class._meta.parents[base] = field
else:
# .. and abstract ones.
for field in parent_fields:
new_class.add_to_class(field.name, copy.deepcopy(field))
# Pass any non-abstract parent classes onto child.
new_class._meta.parents.update(base._meta.parents)
# Inherit managers from the abstract base classes.
new_class.copy_managers(base._meta.abstract_managers)
# Proxy models inherit the non-abstract managers from their base,
# unless they have redefined any of them.
if is_proxy:
new_class.copy_managers(original_base._meta.concrete_managers)
# Inherit virtual fields (like GenericForeignKey) from the parent
# class
for field in base._meta.virtual_fields:
if base._meta.abstract and field.name in field_names:
raise FieldError(
'Local field %r in class %r clashes '
'with field of similar name from '
'abstract base class %r' % (field.name, name, base.__name__)
)
new_class.add_to_class(field.name, copy.deepcopy(field))
if abstract:
# Abstract base models can't be instantiated and don't appear in
# the list of models for an app. We do the final setup for them a
# little differently from normal models.
attr_meta.abstract = False
new_class.Meta = attr_meta
return new_class
new_class._prepare()
new_class._meta.apps.register_model(new_class._meta.app_label, new_class)
return new_class
def copy_managers(cls, base_managers):
# This is in-place sorting of an Options attribute, but that's fine.
base_managers.sort()
for _, mgr_name, manager in base_managers: # NOQA (redefinition of _)
val = getattr(cls, mgr_name, None)
if not val or val is manager:
new_manager = manager._copy_to_model(cls)
cls.add_to_class(mgr_name, new_manager)
def add_to_class(cls, name, value):
if hasattr(value, 'contribute_to_class'):
value.contribute_to_class(cls, name)
else:
setattr(cls, name, value)
def _prepare(cls):
"""
Creates some methods once self._meta has been populated.
"""
opts = cls._meta
opts._prepare(cls)
if opts.order_with_respect_to:
cls.get_next_in_order = curry(cls._get_next_or_previous_in_order, is_next=True)
cls.get_previous_in_order = curry(cls._get_next_or_previous_in_order, is_next=False)
# defer creating accessors on the foreign class until we are
# certain it has been created
def make_foreign_order_accessors(field, model, cls):
setattr(
field.rel.to,
'get_%s_order' % cls.__name__.lower(),
curry(method_get_order, cls)
)
setattr(
field.rel.to,
'set_%s_order' % cls.__name__.lower(),
curry(method_set_order, cls)
)
add_lazy_relation(
cls,
opts.order_with_respect_to,
opts.order_with_respect_to.rel.to,
make_foreign_order_accessors
)
# Give the class a docstring -- its definition.
if cls.__doc__ is None:
cls.__doc__ = "%s(%s)" % (cls.__name__, ", ".join(f.attname for f in opts.fields))
if hasattr(cls, 'get_absolute_url'):
cls.get_absolute_url = update_wrapper(curry(get_absolute_url, opts, cls.get_absolute_url),
cls.get_absolute_url)
signals.class_prepared.send(sender=cls)
class ModelState(object):
"""
A class for storing instance state
"""
def __init__(self, db=None):
self.db = db
# If true, uniqueness validation checks will consider this a new, as-yet-unsaved object.
# Necessary for correct validation of new instances of objects with explicit (non-auto) PKs.
# This impacts validation only; it has no effect on the actual save.
self.adding = True
class Model(six.with_metaclass(ModelBase)):
_deferred = False
def __init__(self, *args, **kwargs):
signals.pre_init.send(sender=self.__class__, args=args, kwargs=kwargs)
# Set up the storage for instance state
self._state = ModelState()
# There is a rather weird disparity here; if kwargs, it's set, then args
# overrides it. It should be one or the other; don't duplicate the work
# The reason for the kwargs check is that standard iterator passes in by
# args, and instantiation for iteration is 33% faster.
args_len = len(args)
if args_len > len(self._meta.concrete_fields):
# Daft, but matches old exception sans the err msg.
raise IndexError("Number of args exceeds number of fields")
if not kwargs:
fields_iter = iter(self._meta.concrete_fields)
# The ordering of the zip calls matter - zip throws StopIteration
# when an iter throws it. So if the first iter throws it, the second
# is *not* consumed. We rely on this, so don't change the order
# without changing the logic.
for val, field in zip(args, fields_iter):
setattr(self, field.attname, val)
else:
# Slower, kwargs-ready version.
fields_iter = iter(self._meta.fields)
for val, field in zip(args, fields_iter):
setattr(self, field.attname, val)
kwargs.pop(field.name, None)
# Maintain compatibility with existing calls.
if isinstance(field.rel, ManyToOneRel):
kwargs.pop(field.attname, None)
# Now we're left with the unprocessed fields that *must* come from
# keywords, or default.
for field in fields_iter:
is_related_object = False
# This slightly odd construct is so that we can access any
# data-descriptor object (DeferredAttribute) without triggering its
# __get__ method.
if (field.attname not in kwargs and
(isinstance(self.__class__.__dict__.get(field.attname), DeferredAttribute)
or field.column is None)):
# This field will be populated on request.
continue
if kwargs:
if isinstance(field.rel, ForeignObjectRel):
try:
# Assume object instance was passed in.
rel_obj = kwargs.pop(field.name)
is_related_object = True
except KeyError:
try:
# Object instance wasn't passed in -- must be an ID.
val = kwargs.pop(field.attname)
except KeyError:
val = field.get_default()
else:
# Object instance was passed in. Special case: You can
# pass in "None" for related objects if it's allowed.
if rel_obj is None and field.null:
val = None
else:
try:
val = kwargs.pop(field.attname)
except KeyError:
# This is done with an exception rather than the
# default argument on pop because we don't want
# get_default() to be evaluated, and then not used.
# Refs #12057.
val = field.get_default()
else:
val = field.get_default()
if is_related_object:
# If we are passed a related instance, set it using the
# field.name instead of field.attname (e.g. "user" instead of
# "user_id") so that the object gets properly cached (and type
# checked) by the RelatedObjectDescriptor.
setattr(self, field.name, rel_obj)
else:
setattr(self, field.attname, val)
if kwargs:
for prop in list(kwargs):
try:
if isinstance(getattr(self.__class__, prop), property):
setattr(self, prop, kwargs.pop(prop))
except AttributeError:
pass
if kwargs:
raise TypeError("'%s' is an invalid keyword argument for this function" % list(kwargs)[0])
super(Model, self).__init__()
signals.post_init.send(sender=self.__class__, instance=self)
def __repr__(self):
try:
u = six.text_type(self)
except (UnicodeEncodeError, UnicodeDecodeError):
u = '[Bad Unicode data]'
return force_str('<%s: %s>' % (self.__class__.__name__, u))
def __str__(self):
if six.PY2 and hasattr(self, '__unicode__'):
return force_text(self).encode('utf-8')
return '%s object' % self.__class__.__name__
def __eq__(self, other):
if not isinstance(other, Model):
return False
if self._meta.concrete_model != other._meta.concrete_model:
return False
my_pk = self._get_pk_val()
if my_pk is None:
return self is other
return my_pk == other._get_pk_val()
def __ne__(self, other):
return not self.__eq__(other)
def __hash__(self):
if self._get_pk_val() is None:
raise TypeError("Model instances without primary key value are unhashable")
return hash(self._get_pk_val())
def __reduce__(self):
"""
Provides pickling support. Normally, this just dispatches to Python's
standard handling. However, for models with deferred field loading, we
need to do things manually, as they're dynamically created classes and
only module-level classes can be pickled by the default path.
"""
data = self.__dict__
if not self._deferred:
class_id = self._meta.app_label, self._meta.object_name
return model_unpickle, (class_id, [], simple_class_factory), data
defers = []
for field in self._meta.fields:
if isinstance(self.__class__.__dict__.get(field.attname),
DeferredAttribute):
defers.append(field.attname)
model = self._meta.proxy_for_model
class_id = model._meta.app_label, model._meta.object_name
return (model_unpickle, (class_id, defers, deferred_class_factory), data)
def _get_pk_val(self, meta=None):
if not meta:
meta = self._meta
return getattr(self, meta.pk.attname)
def _set_pk_val(self, value):
return setattr(self, self._meta.pk.attname, value)
pk = property(_get_pk_val, _set_pk_val)
def serializable_value(self, field_name):
"""
Returns the value of the field name for this instance. If the field is
a foreign key, returns the id value, instead of the object. If there's
no Field object with this name on the model, the model attribute's
value is returned directly.
Used to serialize a field's value (in the serializer, or form output,
for example). Normally, you would just access the attribute directly
and not use this method.
"""
try:
field = self._meta.get_field_by_name(field_name)[0]
except FieldDoesNotExist:
return getattr(self, field_name)
return getattr(self, field.attname)
def save(self, force_insert=False, force_update=False, using=None,
update_fields=None):
"""
Saves the current instance. Override this in a subclass if you want to
control the saving process.
The 'force_insert' and 'force_update' parameters can be used to insist
that the "save" must be an SQL insert or update (or equivalent for
non-SQL backends), respectively. Normally, they should not be set.
"""
using = using or router.db_for_write(self.__class__, instance=self)
if force_insert and (force_update or update_fields):
raise ValueError("Cannot force both insert and updating in model saving.")
if update_fields is not None:
# If update_fields is empty, skip the save. We do also check for
# no-op saves later on for inheritance cases. This bailout is
# still needed for skipping signal sending.
if len(update_fields) == 0:
return
update_fields = frozenset(update_fields)
field_names = set()
for field in self._meta.fields:
if not field.primary_key:
field_names.add(field.name)
if field.name != field.attname:
field_names.add(field.attname)
non_model_fields = update_fields.difference(field_names)
if non_model_fields:
raise ValueError("The following fields do not exist in this "
"model or are m2m fields: %s"
% ', '.join(non_model_fields))
# If saving to the same database, and this model is deferred, then
# automatically do a "update_fields" save on the loaded fields.
elif not force_insert and self._deferred and using == self._state.db:
field_names = set()
for field in self._meta.concrete_fields:
if not field.primary_key and not hasattr(field, 'through'):
field_names.add(field.attname)
deferred_fields = [
f.attname for f in self._meta.fields
if (f.attname not in self.__dict__ and
isinstance(self.__class__.__dict__[f.attname], DeferredAttribute))
]
loaded_fields = field_names.difference(deferred_fields)
if loaded_fields:
update_fields = frozenset(loaded_fields)
self.save_base(using=using, force_insert=force_insert,
force_update=force_update, update_fields=update_fields)
save.alters_data = True
def save_base(self, raw=False, force_insert=False,
force_update=False, using=None, update_fields=None):
"""
Handles the parts of saving which should be done only once per save,
yet need to be done in raw saves, too. This includes some sanity
checks and signal sending.
The 'raw' argument is telling save_base not to save any parent
models and not to do any changes to the values before save. This
is used by fixture loading.
"""
using = using or router.db_for_write(self.__class__, instance=self)
assert not (force_insert and (force_update or update_fields))
assert update_fields is None or len(update_fields) > 0
cls = origin = self.__class__
# Skip proxies, but keep the origin as the proxy model.
if cls._meta.proxy:
cls = cls._meta.concrete_model
meta = cls._meta
if not meta.auto_created:
signals.pre_save.send(sender=origin, instance=self, raw=raw, using=using,
update_fields=update_fields)
with transaction.commit_on_success_unless_managed(using=using, savepoint=False):
if not raw:
self._save_parents(cls, using, update_fields)
updated = self._save_table(raw, cls, force_insert, force_update, using, update_fields)
# Store the database on which the object was saved
self._state.db = using
# Once saved, this is no longer a to-be-added instance.
self._state.adding = False
# Signal that the save is complete
if not meta.auto_created:
signals.post_save.send(sender=origin, instance=self, created=(not updated),
update_fields=update_fields, raw=raw, using=using)
save_base.alters_data = True
def _save_parents(self, cls, using, update_fields):
"""
Saves all the parents of cls using values from self.
"""
meta = cls._meta
for parent, field in meta.parents.items():
# Make sure the link fields are synced between parent and self.
if (field and getattr(self, parent._meta.pk.attname) is None
and getattr(self, field.attname) is not None):
setattr(self, parent._meta.pk.attname, getattr(self, field.attname))
self._save_parents(cls=parent, using=using, update_fields=update_fields)
self._save_table(cls=parent, using=using, update_fields=update_fields)
# Set the parent's PK value to self.
if field:
setattr(self, field.attname, self._get_pk_val(parent._meta))
# Since we didn't have an instance of the parent handy set
# attname directly, bypassing the descriptor. Invalidate
# the related object cache, in case it's been accidentally
# populated. A fresh instance will be re-built from the
# database if necessary.
cache_name = field.get_cache_name()
if hasattr(self, cache_name):
delattr(self, cache_name)
def _save_table(self, raw=False, cls=None, force_insert=False,
force_update=False, using=None, update_fields=None):
"""
Does the heavy-lifting involved in saving. Updates or inserts the data
for a single table.
"""
meta = cls._meta
non_pks = [f for f in meta.local_concrete_fields if not f.primary_key]
if update_fields:
non_pks = [f for f in non_pks
if f.name in update_fields or f.attname in update_fields]
pk_val = self._get_pk_val(meta)
pk_set = pk_val is not None
if not pk_set and (force_update or update_fields):
raise ValueError("Cannot force an update in save() with no primary key.")
updated = False
# If possible, try an UPDATE. If that doesn't update anything, do an INSERT.
if pk_set and not force_insert:
base_qs = cls._base_manager.using(using)
values = [(f, None, (getattr(self, f.attname) if raw else f.pre_save(self, False)))
for f in non_pks]
forced_update = update_fields or force_update
updated = self._do_update(base_qs, using, pk_val, values, update_fields,
forced_update)
if force_update and not updated:
raise DatabaseError("Forced update did not affect any rows.")
if update_fields and not updated:
raise DatabaseError("Save with update_fields did not affect any rows.")
if not updated:
if meta.order_with_respect_to:
# If this is a model with an order_with_respect_to
# autopopulate the _order field
field = meta.order_with_respect_to
order_value = cls._base_manager.using(using).filter(
**{field.name: getattr(self, field.attname)}).count()
self._order = order_value
fields = meta.local_concrete_fields
if not pk_set:
fields = [f for f in fields if not isinstance(f, AutoField)]
update_pk = bool(meta.has_auto_field and not pk_set)
result = self._do_insert(cls._base_manager, using, fields, update_pk, raw)
if update_pk:
setattr(self, meta.pk.attname, result)
return updated
def _do_update(self, base_qs, using, pk_val, values, update_fields, forced_update):
"""
This method will try to update the model. If the model was updated (in
the sense that an update query was done and a matching row was found
from the DB) the method will return True.
"""
filtered = base_qs.filter(pk=pk_val)
if not values:
# We can end up here when saving a model in inheritance chain where
# update_fields doesn't target any field in current model. In that
# case we just say the update succeeded. Another case ending up here
# is a model with just PK - in that case check that the PK still
# exists.
return update_fields is not None or filtered.exists()
if self._meta.select_on_save and not forced_update:
if filtered.exists():
filtered._update(values)
return True
else:
return False
return filtered._update(values) > 0
def _do_insert(self, manager, using, fields, update_pk, raw):
"""
Do an INSERT. If update_pk is defined then this method should return
the new pk for the model.
"""
return manager._insert([self], fields=fields, return_id=update_pk,
using=using, raw=raw)
def delete(self, using=None):
using = using or router.db_for_write(self.__class__, instance=self)
assert self._get_pk_val() is not None, "%s object can't be deleted because its %s attribute is set to None." % (self._meta.object_name, self._meta.pk.attname)
collector = Collector(using=using)
collector.collect([self])
collector.delete()
delete.alters_data = True
def _get_FIELD_display(self, field):
value = getattr(self, field.attname)
return force_text(dict(field.flatchoices).get(value, value), strings_only=True)
def _get_next_or_previous_by_FIELD(self, field, is_next, **kwargs):
if not self.pk:
raise ValueError("get_next/get_previous cannot be used on unsaved objects.")
op = 'gt' if is_next else 'lt'
order = '' if is_next else '-'
param = force_text(getattr(self, field.attname))
q = Q(**{'%s__%s' % (field.name, op): param})
q = q | Q(**{field.name: param, 'pk__%s' % op: self.pk})
qs = self.__class__._default_manager.using(self._state.db).filter(**kwargs).filter(q).order_by('%s%s' % (order, field.name), '%spk' % order)
try:
return qs[0]
except IndexError:
raise self.DoesNotExist("%s matching query does not exist." % self.__class__._meta.object_name)
def _get_next_or_previous_in_order(self, is_next):
cachename = "__%s_order_cache" % is_next
if not hasattr(self, cachename):
op = 'gt' if is_next else 'lt'
order = '_order' if is_next else '-_order'
order_field = self._meta.order_with_respect_to
obj = self._default_manager.filter(**{
order_field.name: getattr(self, order_field.attname)
}).filter(**{
'_order__%s' % op: self._default_manager.values('_order').filter(**{
self._meta.pk.name: self.pk
})
}).order_by(order)[:1].get()
setattr(self, cachename, obj)
return getattr(self, cachename)
def prepare_database_save(self, unused):
if self.pk is None:
raise ValueError("Unsaved model instance %r cannot be used in an ORM query." % self)
return self.pk
def clean(self):
"""
Hook for doing any extra model-wide validation after clean() has been
called on every field by self.clean_fields. Any ValidationError raised
by this method will not be associated with a particular field; it will
have a special-case association with the field defined by NON_FIELD_ERRORS.
"""
pass
def validate_unique(self, exclude=None):
"""
Checks unique constraints on the model and raises ``ValidationError``
if any failed.
"""
unique_checks, date_checks = self._get_unique_checks(exclude=exclude)
errors = self._perform_unique_checks(unique_checks)
date_errors = self._perform_date_checks(date_checks)
for k, v in date_errors.items():
errors.setdefault(k, []).extend(v)
if errors:
raise ValidationError(errors)
def _get_unique_checks(self, exclude=None):
"""
Gather a list of checks to perform. Since validate_unique could be
called from a ModelForm, some fields may have been excluded; we can't
perform a unique check on a model that is missing fields involved
in that check.
Fields that did not validate should also be excluded, but they need
to be passed in via the exclude argument.
"""
if exclude is None:
exclude = []
unique_checks = []
unique_togethers = [(self.__class__, self._meta.unique_together)]
for parent_class in self._meta.parents.keys():
if parent_class._meta.unique_together:
unique_togethers.append((parent_class, parent_class._meta.unique_together))
for model_class, unique_together in unique_togethers:
for check in unique_together:
for name in check:
# If this is an excluded field, don't add this check.
if name in exclude:
break
else:
unique_checks.append((model_class, tuple(check)))
# These are checks for the unique_for_<date/year/month>.
date_checks = []
# Gather a list of checks for fields declared as unique and add them to
# the list of checks.
fields_with_class = [(self.__class__, self._meta.local_fields)]
for parent_class in self._meta.parents.keys():
fields_with_class.append((parent_class, parent_class._meta.local_fields))
for model_class, fields in fields_with_class:
for f in fields:
name = f.name
if name in exclude:
continue
if f.unique:
unique_checks.append((model_class, (name,)))
if f.unique_for_date and f.unique_for_date not in exclude:
date_checks.append((model_class, 'date', name, f.unique_for_date))
if f.unique_for_year and f.unique_for_year not in exclude:
date_checks.append((model_class, 'year', name, f.unique_for_year))
if f.unique_for_month and f.unique_for_month not in exclude:
date_checks.append((model_class, 'month', name, f.unique_for_month))
return unique_checks, date_checks
def _perform_unique_checks(self, unique_checks):
errors = {}
for model_class, unique_check in unique_checks:
# Try to look up an existing object with the same values as this
# object's values for all the unique field.
lookup_kwargs = {}
for field_name in unique_check:
f = self._meta.get_field(field_name)
lookup_value = getattr(self, f.attname)
if lookup_value is None:
# no value, skip the lookup
continue
if f.primary_key and not self._state.adding:
# no need to check for unique primary key when editing
continue
lookup_kwargs[str(field_name)] = lookup_value
# some fields were skipped, no reason to do the check
if len(unique_check) != len(lookup_kwargs):
continue
qs = model_class._default_manager.filter(**lookup_kwargs)
# Exclude the current object from the query if we are editing an
# instance (as opposed to creating a new one)
# Note that we need to use the pk as defined by model_class, not
# self.pk. These can be different fields because model inheritance
# allows single model to have effectively multiple primary keys.
# Refs #17615.
model_class_pk = self._get_pk_val(model_class._meta)
if not self._state.adding and model_class_pk is not None:
qs = qs.exclude(pk=model_class_pk)
if qs.exists():
if len(unique_check) == 1:
key = unique_check[0]
else:
key = NON_FIELD_ERRORS
errors.setdefault(key, []).append(self.unique_error_message(model_class, unique_check))
return errors
def _perform_date_checks(self, date_checks):
errors = {}
for model_class, lookup_type, field, unique_for in date_checks:
lookup_kwargs = {}
# there's a ticket to add a date lookup, we can remove this special
# case if that makes it's way in
date = getattr(self, unique_for)
if date is None:
continue
if lookup_type == 'date':
lookup_kwargs['%s__day' % unique_for] = date.day
lookup_kwargs['%s__month' % unique_for] = date.month
lookup_kwargs['%s__year' % unique_for] = date.year
else:
lookup_kwargs['%s__%s' % (unique_for, lookup_type)] = getattr(date, lookup_type)
lookup_kwargs[field] = getattr(self, field)
qs = model_class._default_manager.filter(**lookup_kwargs)
# Exclude the current object from the query if we are editing an
# instance (as opposed to creating a new one)
if not self._state.adding and self.pk is not None:
qs = qs.exclude(pk=self.pk)
if qs.exists():
errors.setdefault(field, []).append(
self.date_error_message(lookup_type, field, unique_for)
)
return errors
def date_error_message(self, lookup_type, field_name, unique_for):
opts = self._meta
field = opts.get_field(field_name)
return ValidationError(
message=field.error_messages['unique_for_date'],
code='unique_for_date',
params={
'model': self,
'model_name': six.text_type(capfirst(opts.verbose_name)),
'lookup_type': lookup_type,
'field': field_name,
'field_label': six.text_type(capfirst(field.verbose_name)),
'date_field': unique_for,
'date_field_label': six.text_type(capfirst(opts.get_field(unique_for).verbose_name)),
}
)
def unique_error_message(self, model_class, unique_check):
opts = model_class._meta
params = {
'model': self,
'model_class': model_class,
'model_name': six.text_type(capfirst(opts.verbose_name)),
'unique_check': unique_check,
}
# A unique field
if len(unique_check) == 1:
field = opts.get_field(unique_check[0])
params['field_label'] = six.text_type(capfirst(field.verbose_name))
return ValidationError(
message=field.error_messages['unique'],
code='unique',
params=params,
)
# unique_together
else:
field_labels = [capfirst(opts.get_field(f).verbose_name) for f in unique_check]
params['field_labels'] = six.text_type(get_text_list(field_labels, _('and')))
return ValidationError(
message=_("%(model_name)s with this %(field_labels)s already exists."),
code='unique_together',
params=params,
)
def full_clean(self, exclude=None, validate_unique=True):
"""
Calls clean_fields, clean, and validate_unique, on the model,
and raises a ``ValidationError`` for any errors that occurred.
"""
errors = {}
if exclude is None:
exclude = []
try:
self.clean_fields(exclude=exclude)
except ValidationError as e:
errors = e.update_error_dict(errors)
# Form.clean() is run even if other validation fails, so do the
# same with Model.clean() for consistency.
try:
self.clean()
except ValidationError as e:
errors = e.update_error_dict(errors)
# Run unique checks, but only for fields that passed validation.
if validate_unique:
for name in errors.keys():
if name != NON_FIELD_ERRORS and name not in exclude:
exclude.append(name)
try:
self.validate_unique(exclude=exclude)
except ValidationError as e:
errors = e.update_error_dict(errors)
if errors:
raise ValidationError(errors)
def clean_fields(self, exclude=None):
"""
Cleans all fields and raises a ValidationError containing a dict
of all validation errors if any occur.
"""
if exclude is None:
exclude = []
errors = {}
for f in self._meta.fields:
if f.name in exclude:
continue
# Skip validation for empty fields with blank=True. The developer
# is responsible for making sure they have a valid value.
raw_value = getattr(self, f.attname)
if f.blank and raw_value in f.empty_values:
continue
try:
setattr(self, f.attname, f.clean(raw_value, self))
except ValidationError as e:
errors[f.name] = e.error_list
if errors:
raise ValidationError(errors)
@classmethod
def check(cls, **kwargs):
errors = []
errors.extend(cls._check_swappable())
errors.extend(cls._check_managers(**kwargs))
if not cls._meta.swapped:
errors.extend(cls._check_fields(**kwargs))
errors.extend(cls._check_m2m_through_same_relationship())
clash_errors = cls._check_id_field() + cls._check_field_name_clashes()
errors.extend(clash_errors)
# If there are field name clashes, hide consequent column name
# clashes.
if not clash_errors:
errors.extend(cls._check_column_name_clashes())
errors.extend(cls._check_index_together())
errors.extend(cls._check_unique_together())
errors.extend(cls._check_ordering())
return errors
@classmethod
def _check_swappable(cls):
""" Check if the swapped model exists. """
errors = []
if cls._meta.swapped:
try:
apps.get_model(cls._meta.swapped)
except ValueError:
errors.append(
checks.Error(
'"%s" is not of the form "app_label.app_name".' % cls._meta.swappable,
hint=None,
obj=cls,
id='E002',
)
)
except LookupError:
app_label, model_name = cls._meta.swapped.split('.')
errors.append(
checks.Error(
('The model has been swapped out for %s.%s '
'which has not been installed or is abstract.') % (
app_label, model_name
),
hint=('Ensure that you did not misspell the model '
'name and the app name as well as the model '
'is not abstract. Does your INSTALLED_APPS '
'setting contain the "%s" app?') % app_label,
obj=cls,
id='E003',
)
)
return errors
@classmethod
def _check_managers(cls, **kwargs):
""" Perform all manager checks. """
errors = []
managers = cls._meta.concrete_managers + cls._meta.abstract_managers
for (_, _, manager) in managers:
errors.extend(manager.check(**kwargs))
return errors
@classmethod
def _check_fields(cls, **kwargs):
""" Perform all field checks. """
errors = []
for field in cls._meta.local_fields:
errors.extend(field.check(**kwargs))
for field in cls._meta.local_many_to_many:
errors.extend(field.check(from_model=cls, **kwargs))
return errors
@classmethod
def _check_m2m_through_same_relationship(cls):
""" Check if no relationship model is used by more than one m2m field.
"""
errors = []
seen_intermediary_signatures = []
fields = cls._meta.local_many_to_many
# Skip when the target model wasn't found.
fields = (f for f in fields if isinstance(f.rel.to, ModelBase))
# Skip when the relationship model wasn't found.
fields = (f for f in fields if isinstance(f.rel.through, ModelBase))
for f in fields:
signature = (f.rel.to, cls, f.rel.through)
if signature in seen_intermediary_signatures:
errors.append(
checks.Error(
('The model has two many-to-many relations through '
'the intermediary %s model, which is not permitted.') % (
f.rel.through._meta.object_name
),
hint=None,
obj=cls,
id='E004',
)
)
else:
seen_intermediary_signatures.append(signature)
return errors
@classmethod
def _check_id_field(cls):
""" Check if `id` field is a primary key. """
fields = list(f for f in cls._meta.local_fields
if f.name == 'id' and f != cls._meta.pk)
# fields is empty or consists of the invalid "id" field
if fields and not fields[0].primary_key and cls._meta.pk.name == 'id':
return [
checks.Error(
('You cannot use "id" as a field name, because each model '
'automatically gets an "id" field if none '
'of the fields have primary_key=True.'),
hint=('Remove or rename "id" field '
'or add primary_key=True to a field.'),
obj=cls,
id='E005',
)
]
else:
return []
@classmethod
def _check_field_name_clashes(cls):
""" Ref #17673. """
errors = []
used_fields = {} # name or attname -> field
# Check that multi-inheritance doesn't cause field name shadowing.
for parent in cls._meta.parents:
for f in parent._meta.local_fields:
clash = used_fields.get(f.name) or used_fields.get(f.attname) or None
if clash:
errors.append(
checks.Error(
('The field "%s" from parent model '
'%s clashes with the field "%s" '
'from parent model %s.') % (
clash.name, clash.model._meta,
f.name, f.model._meta
),
hint=None,
obj=cls,
id='E053',
)
)
used_fields[f.name] = f
used_fields[f.attname] = f
# Check that fields defined in the model don't clash with fields from
# parents.
for f in cls._meta.local_fields:
clash = used_fields.get(f.name) or used_fields.get(f.attname) or None
# Note that we may detect clash between user-defined non-unique
# field "id" and automatically added unique field "id", both
# defined at the same model. This special case is considered in
# _check_id_field and here we ignore it.
id_conflict = (f.name == "id" and
clash and clash.name == "id" and clash.model == cls)
if clash and not id_conflict:
errors.append(
checks.Error(
('The field clashes with the field "%s" '
'from model %s.') % (
clash.name, clash.model._meta
),
hint=None,
obj=f,
id='E054',
)
)
used_fields[f.name] = f
used_fields[f.attname] = f
return errors
@classmethod
def _check_column_name_clashes(cls):
# Store a list of column names which have already been used by other fields.
used_column_names = []
errors = []
for f in cls._meta.local_fields:
_, column_name = f.get_attname_column()
# Ensure the column name is not already in use.
if column_name and column_name in used_column_names:
errors.append(
checks.Error(
'Field "%s" has column name "%s" that is already used.' % (f.name, column_name),
hint=None,
obj=cls,
)
)
else:
used_column_names.append(column_name)
return errors
@classmethod
def _check_index_together(cls):
""" Check the value of "index_together" option. """
if not isinstance(cls._meta.index_together, (tuple, list)):
return [
checks.Error(
'"index_together" must be a list or tuple.',
hint=None,
obj=cls,
id='E006',
)
]
elif any(not isinstance(fields, (tuple, list))
for fields in cls._meta.index_together):
return [
checks.Error(
'All "index_together" elements must be lists or tuples.',
hint=None,
obj=cls,
id='E007',
)
]
else:
errors = []
for fields in cls._meta.index_together:
errors.extend(cls._check_local_fields(fields, "index_together"))
return errors
@classmethod
def _check_unique_together(cls):
""" Check the value of "unique_together" option. """
if not isinstance(cls._meta.unique_together, (tuple, list)):
return [
checks.Error(
'"unique_together" must be a list or tuple.',
hint=None,
obj=cls,
id='E008',
)
]
elif any(not isinstance(fields, (tuple, list))
for fields in cls._meta.unique_together):
return [
checks.Error(
'All "unique_together" elements must be lists or tuples.',
hint=None,
obj=cls,
id='E009',
)
]
else:
errors = []
for fields in cls._meta.unique_together:
errors.extend(cls._check_local_fields(fields, "unique_together"))
return errors
@classmethod
def _check_local_fields(cls, fields, option):
from django.db import models
errors = []
for field_name in fields:
try:
field = cls._meta.get_field(field_name,
many_to_many=True)
except models.FieldDoesNotExist:
errors.append(
checks.Error(
'"%s" points to a missing field named "%s".' % (option, field_name),
hint='Ensure that you did not misspell the field name.',
obj=cls,
id='E010',
)
)
else:
if isinstance(field.rel, models.ManyToManyRel):
errors.append(
checks.Error(
('"%s" refers to a m2m "%s" field, but '
'ManyToManyFields are not supported in "%s".') % (
option, field_name, option
),
hint=None,
obj=cls,
id='E011',
)
)
return errors
@classmethod
def _check_ordering(cls):
""" Check "ordering" option -- is it a list of lists and do all fields
exist? """
from django.db.models import FieldDoesNotExist
if not cls._meta.ordering:
return []
if not isinstance(cls._meta.ordering, (list, tuple)):
return [
checks.Error(
('"ordering" must be a tuple or list '
'(even if you want to order by only one field).'),
hint=None,
obj=cls,
id='E012',
)
]
errors = []
fields = cls._meta.ordering
# Skip '?' fields.
fields = (f for f in fields if f != '?')
# Convert "-field" to "field".
fields = ((f[1:] if f.startswith('-') else f) for f in fields)
fields = (f for f in fields if
f != '_order' or not cls._meta.order_with_respect_to)
# Skip ordering in the format field1__field2 (FIXME: checking
# this format would be nice, but it's a little fiddly).
fields = (f for f in fields if '__' not in f)
# Skip ordering on pk. This is always a valid order_by field
# but is an alias and therefore won't be found by opts.get_field.
fields = (f for f in fields if f != 'pk')
for field_name in fields:
try:
cls._meta.get_field(field_name, many_to_many=False)
except FieldDoesNotExist:
errors.append(
checks.Error(
'"ordering" pointing to a missing "%s" field.' % field_name,
hint='Ensure that you did not misspell the field name.',
obj=cls,
id='E013',
)
)
return errors
############################################
# HELPER FUNCTIONS (CURRIED MODEL METHODS) #
############################################
# ORDERING METHODS #########################
def method_set_order(ordered_obj, self, id_list, using=None):
if using is None:
using = DEFAULT_DB_ALIAS
rel_val = getattr(self, ordered_obj._meta.order_with_respect_to.rel.field_name)
order_name = ordered_obj._meta.order_with_respect_to.name
# FIXME: It would be nice if there was an "update many" version of update
# for situations like this.
with transaction.commit_on_success_unless_managed(using=using):
for i, j in enumerate(id_list):
ordered_obj.objects.filter(**{'pk': j, order_name: rel_val}).update(_order=i)
def method_get_order(ordered_obj, self):
rel_val = getattr(self, ordered_obj._meta.order_with_respect_to.rel.field_name)
order_name = ordered_obj._meta.order_with_respect_to.name
pk_name = ordered_obj._meta.pk.name
return [r[pk_name] for r in
ordered_obj.objects.filter(**{order_name: rel_val}).values(pk_name)]
##############################################
# HELPER FUNCTIONS (CURRIED MODEL FUNCTIONS) #
##############################################
def get_absolute_url(opts, func, self, *args, **kwargs):
return settings.ABSOLUTE_URL_OVERRIDES.get('%s.%s' % (opts.app_label, opts.model_name), func)(self, *args, **kwargs)
########
# MISC #
########
def simple_class_factory(model, attrs):
"""
Needed for dynamic classes.
"""
return model
def model_unpickle(model_id, attrs, factory):
"""
Used to unpickle Model subclasses with deferred fields.
"""
if isinstance(model_id, tuple):
model = apps.get_model(*model_id)
else:
# Backwards compat - the model was cached directly in earlier versions.
model = model_id
cls = factory(model, attrs)
return cls.__new__(cls)
model_unpickle.__safe_for_unpickle__ = True
def unpickle_inner_exception(klass, exception_name):
# Get the exception class from the class it is attached to:
exception = getattr(klass, exception_name)
return exception.__new__(exception)
|
{
"content_hash": "68863969b4867f371b3772a24f4cf23b",
"timestamp": "",
"source": "github",
"line_count": 1473,
"max_line_length": 166,
"avg_line_length": 41.54446707399864,
"alnum_prop": 0.5443908816079746,
"repo_name": "deployed/django",
"id": "6270e35b37ffd85eb89438e33f12c837140078ac",
"size": "61195",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "django/db/models/base.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "52958"
},
{
"name": "JavaScript",
"bytes": "102315"
},
{
"name": "Python",
"bytes": "9508205"
},
{
"name": "Shell",
"bytes": "12137"
}
],
"symlink_target": ""
}
|
from .proxy_only_resource import ProxyOnlyResource
class VnetValidationTestFailure(ProxyOnlyResource):
"""A class that describes a test that failed during NSG and UDR validation.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar id: Resource Id.
:vartype id: str
:ivar name: Resource Name.
:vartype name: str
:param kind: Kind of resource.
:type kind: str
:ivar type: Resource type.
:vartype type: str
:param test_name: The name of the test that failed.
:type test_name: str
:param details: The details of what caused the failure, e.g. the blocking
rule name, etc.
:type details: str
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'test_name': {'key': 'properties.testName', 'type': 'str'},
'details': {'key': 'properties.details', 'type': 'str'},
}
def __init__(self, kind=None, test_name=None, details=None):
super(VnetValidationTestFailure, self).__init__(kind=kind)
self.test_name = test_name
self.details = details
|
{
"content_hash": "fe29e2e7806fe974c0f9c1ba51b61cae",
"timestamp": "",
"source": "github",
"line_count": 43,
"max_line_length": 79,
"avg_line_length": 32.27906976744186,
"alnum_prop": 0.590057636887608,
"repo_name": "AutorestCI/azure-sdk-for-python",
"id": "c0d453c1b53a3a221e205c4c94b148663b89e64d",
"size": "1862",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "azure-mgmt-web/azure/mgmt/web/models/vnet_validation_test_failure.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "34619070"
}
],
"symlink_target": ""
}
|
import os, sys
from distutils.core import setup, Command
from distutils.extension import Extension
from unittest import TextTestRunner, TestLoader
from glob import glob
from os.path import splitext, basename, join as pjoin, walk
#-----------------------------------------------------------------------------
# Extra commands
#-----------------------------------------------------------------------------
class TestCommand(Command):
user_options = [ ]
def initialize_options(self):
self._dir = os.getcwd()
def finalize_options(self):
pass
def run(self):
'''
Finds all the tests modules in pyhead/tests/, and runs them.
'''
testfiles = [ ]
for t in glob(pjoin(self._dir, 'tests', '*test*.py')):
if not t.endswith('__init__.py'):
testfiles.append('.'.join(
['tests', splitext(basename(t))[0]])
)
tests = TestLoader().loadTestsFromNames(testfiles)
t = TextTestRunner(verbosity = 2)
t.run(tests)
class CleanCommand(Command):
user_options = [ ]
def initialize_options(self):
self._clean_me = [pjoin('pyhead', 'pyhead.so') ]
for root, dirs, files in os.walk('.'):
for f in files:
if f.endswith('.pyc'):
self._clean_me.append(pjoin(root, f))
def finalize_options(self):
pass
def run(self):
for clean_me in self._clean_me:
try:
os.unlink(clean_me)
except:
pass
#-----------------------------------------------------------------------------
# Extensions
#-----------------------------------------------------------------------------
cmdclass = {'test':TestCommand, 'clean':CleanCommand }
ryan_parser_source = os.path.join('pyhead', 'ryan', 'http_parser.c')
zed_parser_source = os.path.join('pyhead', 'zed', 'http11_parser.c')
try:
from Cython.Distutils import build_ext
except ImportError:
ryan_parser = os.path.join('pyhead', 'ryan', 'ryan.c')
zed_parser = os.path.join('pyhead', 'zed', 'zed.c')
else:
ryan_parser = os.path.join('pyhead', 'ryan', 'ryan.pyx')
zed_parser = os.path.join('pyhead', 'zed', 'zed.pyx')
cmdclass['build_ext'] = build_ext
ryan = Extension(
'pyhead._ryan',
sources = [ryan_parser, ryan_parser_source],
include_dirs=[os.path.join('pyhead','ryan')]
)
zed = Extension(
'pyhead._zed',
sources = [zed_parser, zed_parser_source],
include_dirs=[os.path.join('pyhead','zed')]
)
#-----------------------------------------------------------------------------
# Main setup
#-----------------------------------------------------------------------------
setup(
name = "pyhead",
version = "0.1",
packages = ['pyhead'],
ext_modules = [ryan, zed],
author = "Nicholas Piël",
author_email = "nicholas@nichol.as",
description = "Python bindings for different HTTP parsers",
license = "MIT",
cmdclass = cmdclass
)
|
{
"content_hash": "0e87ebf23c5716be79f099d979e72315",
"timestamp": "",
"source": "github",
"line_count": 102,
"max_line_length": 78,
"avg_line_length": 29.65686274509804,
"alnum_prop": 0.49785123966942146,
"repo_name": "Nichol4s/PyHead",
"id": "dc1f23204385bf1dbe2bc87a92d602a01dc7bdcb",
"size": "4341",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "setup.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "486974"
},
{
"name": "Python",
"bytes": "81797"
}
],
"symlink_target": ""
}
|
"""Forms related to accounts management."""
from collections import OrderedDict
from django import forms
from django.core.urlresolvers import reverse
from django.http import QueryDict
from django.utils.translation import ugettext as _, ugettext_lazy
from modoboa.core.models import User
from modoboa.lib import events, parameters
from modoboa.lib.email_utils import split_mailbox
from modoboa.lib.exceptions import PermDeniedException, Conflict, NotFound
from modoboa.lib.form_utils import (
DomainNameField, DynamicForm, TabForms, WizardForm
)
from modoboa.lib.permissions import get_account_roles
from modoboa.lib.web_utils import render_to_json_response
from ..models import Domain, Mailbox, Alias
class AccountFormGeneral(forms.ModelForm):
username = forms.CharField(
label=ugettext_lazy("Username"),
help_text=ugettext_lazy(
"The user's name. Must be a valid e-mail address for simple users "
"or administrators with a mailbox."
)
)
role = forms.ChoiceField(
label=ugettext_lazy("Role"),
choices=[('', ugettext_lazy("Choose"))],
help_text=ugettext_lazy("What level of permission this user will have")
)
password1 = forms.CharField(
label=ugettext_lazy("Password"), widget=forms.widgets.PasswordInput
)
password2 = forms.CharField(
label=ugettext_lazy("Confirmation"),
widget=forms.widgets.PasswordInput,
help_text=ugettext_lazy(
"Enter the same password as above, for verification."
)
)
class Meta:
model = User
fields = ("username", "first_name", "last_name", "role", "is_active")
def __init__(self, user, *args, **kwargs):
super(AccountFormGeneral, self).__init__(*args, **kwargs)
self.fields = OrderedDict(
(key, self.fields[key]) for key in
['role', 'username', 'first_name', 'last_name', 'password1',
'password2', 'is_active']
)
self.fields["is_active"].label = _("Enabled")
self.user = user
if user.group == "DomainAdmins":
self.fields["role"] = forms.CharField(
label="",
widget=forms.HiddenInput(attrs={"class": "form-control"}),
required=False
)
else:
self.fields["role"].choices = [('', ugettext_lazy("Choose"))]
self.fields["role"].choices += \
get_account_roles(user, kwargs['instance']) \
if 'instance' in kwargs else get_account_roles(user)
if "instance" in kwargs:
account = kwargs["instance"]
domain_disabled = account.mailbox_set.count() and \
not account.mailbox_set.all()[0].domain.enabled
if domain_disabled:
self.fields["is_active"].widget.attrs['disabled'] = "disabled"
if args:
if args[0].get("password1", "") == "" \
and args[0].get("password2", "") == "":
self.fields["password1"].required = False
self.fields["password2"].required = False
if domain_disabled:
del self.fields["is_active"]
self.fields["role"].initial = account.group
if not account.is_local \
and parameters.get_admin(
"LDAP_AUTH_METHOD", app="core") == "directbind":
del self.fields["password1"]
del self.fields["password2"]
def domain_is_disabled(self):
"""Little shortcut to get the domain's state.
We need this information inside a template and the form is the
only object available...
"""
if not self.instance.mailbox_set.count():
return False
return not self.instance.mailbox_set.all()[0].domain.enabled
def clean_role(self):
if self.user.group == "DomainAdmins":
if self.instance == self.user:
return "DomainAdmins"
return "SimpleUsers"
return self.cleaned_data["role"]
def clean_username(self):
from django.core.validators import validate_email
if "role" not in self.cleaned_data:
return self.cleaned_data["username"]
if self.cleaned_data["role"] != "SimpleUsers":
return self.cleaned_data["username"]
uname = self.cleaned_data["username"].lower()
validate_email(uname)
return uname
def clean_password2(self):
password1 = self.cleaned_data.get("password1", "")
password2 = self.cleaned_data["password2"]
if password1 != password2:
raise forms.ValidationError(
_("The two password fields didn't match."))
return password2
def save(self, commit=True):
account = super(AccountFormGeneral, self).save(commit=False)
if self.user == account and not self.cleaned_data["is_active"]:
raise PermDeniedException(_("You can't disable your own account"))
if commit:
if "password1" in self.cleaned_data \
and self.cleaned_data["password1"] != "":
account.set_password(self.cleaned_data["password1"])
account.save()
account.set_role(self.cleaned_data["role"])
return account
class AccountFormMail(forms.Form, DynamicForm):
"""Form to handle mail part."""
email = forms.EmailField(label=ugettext_lazy("E-mail"), required=False)
quota = forms.IntegerField(
label=ugettext_lazy("Quota"),
required=False,
help_text=_("Quota in MB for this mailbox. Define a custom value or "
"use domain's default one. Leave empty to define an "
"unlimited value (not allowed for domain "
"administrators)."),
widget=forms.widgets.TextInput(attrs={"class": "form-control"})
)
quota_act = forms.BooleanField(required=False)
aliases = forms.EmailField(
label=ugettext_lazy("Alias(es)"),
required=False,
help_text=ugettext_lazy(
"Alias(es) of this mailbox. Indicate only one address per input, "
"press ENTER to add a new input. Use the '*' character to create "
"a 'catchall' alias (ex: *@domain.tld)."
)
)
def __init__(self, *args, **kwargs):
if "instance" in kwargs:
self.mb = kwargs["instance"]
del kwargs["instance"]
else:
self.mb = None
super(AccountFormMail, self).__init__(*args, **kwargs)
self.field_widths = {
"quota": 3
}
self.extra_fields = []
result = events.raiseQueryEvent('ExtraFormFields', 'mailform', self.mb)
for fname, field in result:
self.fields[fname] = field
self.extra_fields.append(fname)
if self.mb is not None:
self.fields["email"].required = True
cpt = 1
for alias in self.mb.alias_set.all():
if len(alias.get_recipients()) >= 2:
continue
name = "aliases_%d" % cpt
self._create_field(forms.EmailField, name, alias.full_address)
cpt += 1
self.fields["email"].initial = self.mb.full_address
self.fields["quota_act"].initial = self.mb.use_domain_quota
if not self.mb.use_domain_quota and self.mb.quota:
self.fields["quota"].initial = self.mb.quota
else:
self.fields["quota_act"].initial = True
if len(args) and isinstance(args[0], QueryDict):
self._load_from_qdict(args[0], "aliases", forms.EmailField)
def clean_email(self):
"""Ensure lower case emails"""
return self.cleaned_data["email"].lower()
def clean(self):
"""Custom fields validation.
Check if quota is >= 0 only when the domain value is not used.
"""
super(AccountFormMail, self).clean()
if not self.cleaned_data["quota_act"] \
and self.cleaned_data['quota'] is not None:
if self.cleaned_data["quota"] < 0:
self.add_error("quota", _("Must be a positive integer"))
return self.cleaned_data
def create_mailbox(self, user, account):
"""Create a mailbox associated to :kw:`account`."""
locpart, domname = split_mailbox(self.cleaned_data["email"])
try:
domain = Domain.objects.get(name=domname)
except Domain.DoesNotExist:
raise NotFound(_("Domain does not exist"))
if not user.can_access(domain):
raise PermDeniedException
try:
Mailbox.objects.get(address=locpart, domain=domain)
except Mailbox.DoesNotExist:
pass
else:
raise Conflict(
_("Mailbox %s already exists" % self.cleaned_data["email"])
)
events.raiseEvent("CanCreate", user, "mailboxes")
self.mb = Mailbox(address=locpart, domain=domain, user=account,
use_domain_quota=self.cleaned_data["quota_act"])
self.mb.set_quota(self.cleaned_data["quota"],
user.has_perm("modoboa_admin.add_domain"))
self.mb.save(creator=user)
def update_mailbox(self, user, account):
newaddress = None
if self.cleaned_data["email"] != self.mb.full_address:
newaddress = self.cleaned_data["email"]
elif (account.group == "SimpleUsers" and
account.username != self.mb.full_address):
newaddress = account.username
if newaddress is not None:
self.mb.old_full_address = self.mb.full_address
local_part, domname = split_mailbox(newaddress)
try:
domain = Domain.objects.get(name=domname)
except Domain.DoesNotExist:
raise NotFound(_("Domain does not exist"))
if not user.can_access(domain):
raise PermDeniedException
self.mb.rename(local_part, domain)
self.mb.use_domain_quota = self.cleaned_data["quota_act"]
override_rules = True \
if not self.mb.quota or user.has_perm("modoboa_admin.add_domain") \
else False
self.mb.set_quota(self.cleaned_data["quota"], override_rules)
self.mb.save()
events.raiseEvent('MailboxModified', self.mb)
def _update_aliases(self, user, account):
"""Update mailbox aliases."""
aliases = []
for name, value in self.cleaned_data.iteritems():
if not name.startswith("aliases"):
continue
if value == "":
continue
aliases.append(value.lower())
for alias in self.mb.alias_set.all():
if alias.full_address not in aliases:
if len(alias.get_recipients()) >= 2:
continue
alias.delete()
else:
aliases.remove(alias.full_address)
if not aliases:
return
events.raiseEvent(
"CanCreate", user, "mailbox_aliases", len(aliases)
)
for alias in aliases:
local_part, domname = split_mailbox(alias)
try:
self.mb.alias_set.get(address=local_part, domain__name=domname)
except Alias.DoesNotExist:
pass
else:
continue
al = Alias(address=local_part, enabled=account.is_active)
al.domain = Domain.objects.get(name=domname)
al.save(int_rcpts=[self.mb])
al.post_create(user)
def save(self, user, account):
"""Save or update account mailbox."""
if self.cleaned_data["email"] == "":
return None
if self.cleaned_data["quota_act"]:
self.cleaned_data["quota"] = None
if not hasattr(self, "mb") or self.mb is None:
self.create_mailbox(user, account)
else:
self.update_mailbox(user, account)
events.raiseEvent(
'SaveExtraFormFields', 'mailform', self.mb, self.cleaned_data
)
account.email = self.cleaned_data["email"]
account.save()
self._update_aliases(user, account)
return self.mb
class AccountPermissionsForm(forms.Form, DynamicForm):
domains = DomainNameField(
label=ugettext_lazy("Domain(s)"),
required=False,
help_text=ugettext_lazy("Domain(s) that user administrates")
)
def __init__(self, *args, **kwargs):
if "instance" in kwargs:
self.account = kwargs["instance"]
del kwargs["instance"]
super(AccountPermissionsForm, self).__init__(*args, **kwargs)
if not hasattr(self, "account") or self.account is None:
return
for pos, dom in enumerate(Domain.objects.get_for_admin(self.account)):
name = "domains_%d" % (pos + 1)
self._create_field(DomainNameField, name, dom.name)
if len(args) and isinstance(args[0], QueryDict):
self._load_from_qdict(args[0], "domains", DomainNameField)
def save(self):
current_domains = [
dom.name for dom in Domain.objects.get_for_admin(self.account)
]
for name, value in self.cleaned_data.items():
if not name.startswith("domains"):
continue
if value in ["", None]:
continue
if value not in current_domains:
domain = Domain.objects.get(name=value)
domain.add_admin(self.account)
for domain in Domain.objects.get_for_admin(self.account):
if not filter(lambda name: self.cleaned_data[name] == domain.name,
self.cleaned_data.keys()):
domain.remove_admin(self.account)
class AccountForm(TabForms):
"""Account edition form."""
def __init__(self, request, *args, **kwargs):
self.user = request.user
self.forms = [
dict(id="general", title=_("General"),
formtpl="modoboa_admin/account_general_form.html",
cls=AccountFormGeneral,
new_args=[self.user], mandatory=True),
dict(id="mail",
title=_("Mail"), formtpl="modoboa_admin/mailform.html",
cls=AccountFormMail),
dict(
id="perms", title=_("Permissions"),
formtpl="modoboa_admin/permsform.html",
cls=AccountPermissionsForm
)
]
cbargs = [self.user]
if "instances" in kwargs:
cbargs += [kwargs["instances"]["general"]]
self.forms += events.raiseQueryEvent("ExtraAccountForm", *cbargs)
super(AccountForm, self).__init__(request, *args, **kwargs)
def extra_context(self, context):
account = self.instances["general"]
context.update({
'title': account.username,
'formid': 'accountform',
'action': reverse("modoboa_admin:account_change",
args=[account.id]),
})
def check_perms(self, account):
if account.is_superuser:
return False
return self.user.has_perm("modoboa_admin.add_domain") \
and account.has_perm("core.add_user")
def _before_is_valid(self, form):
if form["id"] == "general":
return True
if hasattr(self, "check_%s" % form["id"]):
if not getattr(self, "check_%s" % form["id"])(self.account):
return False
return True
extra_forms = events.raiseQueryEvent(
"CheckExtraAccountForm", self.account, form)
if False in extra_forms:
return False
return True
def is_valid(self):
"""Two steps validation.
"""
self.instances["general"].oldgroup = self.instances["general"].group
if super(AccountForm, self).is_valid(mandatory_only=True):
self.account = self.forms[0]["instance"].save()
return super(AccountForm, self).is_valid(optional_only=True)
return False
def save(self):
"""Custom save method
As forms interact with each other, it is simpler to make
custom code to save them.
"""
events.raiseEvent(
"AccountModified", self.instances["general"], self.account
)
self.forms[1]["instance"].save(self.user, self.account)
if len(self.forms) <= 2:
return
for f in self.forms[2:]:
f["instance"].save()
def done(self):
return render_to_json_response(_("Account updated"))
class AccountWizard(WizardForm):
"""Account creation wizard."""
def __init__(self, request):
super(AccountWizard, self).__init__(request)
self.add_step(
AccountFormGeneral, _("General"), new_args=[request.user]
)
self.add_step(
AccountFormMail, _("Mail"), formtpl="modoboa_admin/mailform.html"
)
def extra_context(self, context):
context.update({
'title': _("New account"),
'action': reverse("modoboa_admin:account_add"),
'formid': 'newaccount_form'
})
def done(self):
from modoboa.lib.web_utils import render_to_json_response
account = self.first_step.form.save()
account.post_create(self.request.user)
mailform = self.steps[1].form
mailform.save(self.request.user, account)
return render_to_json_response(_("Account created"))
|
{
"content_hash": "940f00ae6c4a5994f6a212d1fc498d0e",
"timestamp": "",
"source": "github",
"line_count": 478,
"max_line_length": 79,
"avg_line_length": 37.10041841004184,
"alnum_prop": 0.5707116273824292,
"repo_name": "disko/modoboa-admin",
"id": "d73337dcd00bffaa858d3b6ce165ae85f0406e16",
"size": "17734",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "modoboa_admin/forms/account.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "676"
},
{
"name": "HTML",
"bytes": "13386"
},
{
"name": "JavaScript",
"bytes": "16827"
},
{
"name": "Python",
"bytes": "183282"
}
],
"symlink_target": ""
}
|
import os
from setuptools import setup, find_packages, Command
import sys
from gunicorn import __version__
CLASSIFIERS = [
'Development Status :: 4 - Beta',
'Environment :: Other Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: MacOS :: MacOS X',
'Operating System :: POSIX',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Topic :: Internet',
'Topic :: Utilities',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Internet :: WWW/HTTP :: WSGI',
'Topic :: Internet :: WWW/HTTP :: WSGI :: Server',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content']
# read long description
with open(os.path.join(os.path.dirname(__file__), 'README.rst')) as f:
long_description = f.read()
# read dev requirements
fname = os.path.join(os.path.dirname(__file__), 'requirements_dev.txt')
with open(fname) as f:
tests_require = list(map(lambda l: l.strip(), f.readlines()))
class PyTest(Command):
user_options = [
("cov", None, "measure coverage")
]
def initialize_options(self):
self.cov = None
def finalize_options(self):
pass
def run(self):
import subprocess
basecmd = [sys.executable, '-m', 'pytest']
if self.cov:
basecmd += ['--cov', 'gunicorn']
errno = subprocess.call(basecmd + ['tests'])
raise SystemExit(errno)
REQUIREMENTS = []
setup(
name = 'gunicorn',
version = __version__,
description = 'WSGI HTTP Server for UNIX',
long_description = long_description,
author = 'Benoit Chesneau',
author_email = 'benoitc@e-engura.com',
license = 'MIT',
url = 'http://gunicorn.org',
classifiers = CLASSIFIERS,
zip_safe = False,
packages = find_packages(exclude=['examples', 'tests']),
include_package_data = True,
tests_require = tests_require,
cmdclass = {'test': PyTest},
install_requires = REQUIREMENTS,
entry_points="""
[console_scripts]
gunicorn=gunicorn.app.wsgiapp:run
gunicorn_django=gunicorn.app.djangoapp:run
gunicorn_paster=gunicorn.app.pasterapp:run
[paste.server_runner]
main=gunicorn.app.pasterapp:paste_server
"""
)
|
{
"content_hash": "ce0c5e8158654d3c33e75ba7b6cac2c4",
"timestamp": "",
"source": "github",
"line_count": 93,
"max_line_length": 71,
"avg_line_length": 27.9247311827957,
"alnum_prop": 0.6241817481709665,
"repo_name": "DataDog/gunicorn",
"id": "be54106f6582dfeb69e90cffd4995efb93ee76e9",
"size": "2726",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "setup.py",
"mode": "33188",
"license": "mit",
"language": [],
"symlink_target": ""
}
|
import os
try:
__IPYTHON__
import sys
del sys.argv[1:]
except:
pass
import srwl_bl
import srwlib
import srwlpy
import srwl_uti_smp
def set_optics(v=None):
el = []
pp = []
names = ['M1', 'M1_Grating', 'Grating', 'GA', 'GA_M3A', 'M3A', 'M3', 'M3_SSA', 'SSA', 'SSA_KBAperture', 'KBAperture', 'KBh', 'KBh_KBv', 'KBv', 'KBv_Sample', 'Sample']
for el_name in names:
if el_name == 'M1':
# M1: mirror 34.366m
mirror_file = v.op_M1_hfn
assert os.path.isfile(mirror_file), \
'Missing input file {}, required by M1 beamline element'.format(mirror_file)
el.append(srwlib.srwl_opt_setup_surf_height_1d(
srwlib.srwl_uti_read_data_cols(mirror_file, "\t", 0, 1),
_dim=v.op_M1_dim,
_ang=abs(v.op_M1_ang),
_amp_coef=v.op_M1_amp_coef,
_size_x=v.op_M1_size_x,
_size_y=v.op_M1_size_y,
))
pp.append(v.op_M1_pp)
elif el_name == 'M1_Grating':
# M1_Grating: drift 34.366m
el.append(srwlib.SRWLOptD(
_L=v.op_M1_Grating_L,
))
pp.append(v.op_M1_Grating_pp)
elif el_name == 'Grating':
# Grating: grating 55.0m
mirror = srwlib.SRWLOptMirPl(
_size_tang=v.op_Grating_size_tang,
_size_sag=v.op_Grating_size_sag,
_nvx=v.op_Grating_nvx,
_nvy=v.op_Grating_nvy,
_nvz=v.op_Grating_nvz,
_tvx=v.op_Grating_tvx,
_tvy=v.op_Grating_tvy,
_x=v.op_Grating_x,
_y=v.op_Grating_y,
)
el.append(srwlib.SRWLOptG(
_mirSub=mirror,
_m=v.op_Grating_m,
_grDen=v.op_Grating_grDen,
_grDen1=v.op_Grating_grDen1,
_grDen2=v.op_Grating_grDen2,
_grDen3=v.op_Grating_grDen3,
_grDen4=v.op_Grating_grDen4,
))
pp.append(v.op_Grating_pp)
elif el_name == 'GA':
# GA: aperture 55.0m
el.append(srwlib.SRWLOptA(
_shape=v.op_GA_shape,
_ap_or_ob='a',
_Dx=v.op_GA_Dx,
_Dy=v.op_GA_Dy,
_x=v.op_GA_x,
_y=v.op_GA_y,
))
pp.append(v.op_GA_pp)
elif el_name == 'GA_M3A':
# GA_M3A: drift 55.0m
el.append(srwlib.SRWLOptD(
_L=v.op_GA_M3A_L,
))
pp.append(v.op_GA_M3A_pp)
elif el_name == 'M3A':
# M3A: aperture 89.63m
el.append(srwlib.SRWLOptA(
_shape=v.op_M3A_shape,
_ap_or_ob='a',
_Dx=v.op_M3A_Dx,
_Dy=v.op_M3A_Dy,
_x=v.op_M3A_x,
_y=v.op_M3A_y,
))
pp.append(v.op_M3A_pp)
elif el_name == 'M3':
# M3: ellipsoidMirror 89.63m
el.append(srwlib.SRWLOptMirEl(
_p=v.op_M3_p,
_q=v.op_M3_q,
_ang_graz=v.op_M3_ang,
_size_tang=v.op_M3_size_tang,
_size_sag=v.op_M3_size_sag,
_nvx=v.op_M3_nvx,
_nvy=v.op_M3_nvy,
_nvz=v.op_M3_nvz,
_tvx=v.op_M3_tvx,
_tvy=v.op_M3_tvy,
_x=v.op_M3_x,
_y=v.op_M3_y,
))
pp.append(v.op_M3_pp)
elif el_name == 'M3_SSA':
# M3_SSA: drift 89.63m
el.append(srwlib.SRWLOptD(
_L=v.op_M3_SSA_L,
))
pp.append(v.op_M3_SSA_pp)
elif el_name == 'SSA':
# SSA: aperture 97.636m
el.append(srwlib.SRWLOptA(
_shape=v.op_SSA_shape,
_ap_or_ob='a',
_Dx=v.op_SSA_Dx,
_Dy=v.op_SSA_Dy,
_x=v.op_SSA_x,
_y=v.op_SSA_y,
))
pp.append(v.op_SSA_pp)
elif el_name == 'SSA_KBAperture':
# SSA_KBAperture: drift 97.636m
el.append(srwlib.SRWLOptD(
_L=v.op_SSA_KBAperture_L,
))
pp.append(v.op_SSA_KBAperture_pp)
elif el_name == 'KBAperture':
# KBAperture: aperture 103.646m
el.append(srwlib.SRWLOptA(
_shape=v.op_KBAperture_shape,
_ap_or_ob='a',
_Dx=v.op_KBAperture_Dx,
_Dy=v.op_KBAperture_Dy,
_x=v.op_KBAperture_x,
_y=v.op_KBAperture_y,
))
pp.append(v.op_KBAperture_pp)
elif el_name == 'KBh':
# KBh: ellipsoidMirror 103.646m
el.append(srwlib.SRWLOptMirEl(
_p=v.op_KBh_p,
_q=v.op_KBh_q,
_ang_graz=v.op_KBh_ang,
_size_tang=v.op_KBh_size_tang,
_size_sag=v.op_KBh_size_sag,
_nvx=v.op_KBh_nvx,
_nvy=v.op_KBh_nvy,
_nvz=v.op_KBh_nvz,
_tvx=v.op_KBh_tvx,
_tvy=v.op_KBh_tvy,
_x=v.op_KBh_x,
_y=v.op_KBh_y,
))
pp.append(v.op_KBh_pp)
elif el_name == 'KBh_KBv':
# KBh_KBv: drift 103.646m
el.append(srwlib.SRWLOptD(
_L=v.op_KBh_KBv_L,
))
pp.append(v.op_KBh_KBv_pp)
elif el_name == 'KBv':
# KBv: ellipsoidMirror 104.146m
el.append(srwlib.SRWLOptMirEl(
_p=v.op_KBv_p,
_q=v.op_KBv_q,
_ang_graz=v.op_KBv_ang,
_size_tang=v.op_KBv_size_tang,
_size_sag=v.op_KBv_size_sag,
_nvx=v.op_KBv_nvx,
_nvy=v.op_KBv_nvy,
_nvz=v.op_KBv_nvz,
_tvx=v.op_KBv_tvx,
_tvy=v.op_KBv_tvy,
_x=v.op_KBv_x,
_y=v.op_KBv_y,
))
pp.append(v.op_KBv_pp)
elif el_name == 'KBv_Sample':
# KBv_Sample: drift 104.146m
el.append(srwlib.SRWLOptD(
_L=v.op_KBv_Sample_L,
))
pp.append(v.op_KBv_Sample_pp)
elif el_name == 'Sample':
# Sample: watch 104.557m
pass
pp.append(v.op_fin_pp)
return srwlib.SRWLOptC(el, pp)
varParam = srwl_bl.srwl_uti_ext_options([
['name', 's', 'NSLS-II ESM beamline', 'simulation name'],
#---Data Folder
['fdir', 's', '', 'folder (directory) name for reading-in input and saving output data files'],
#---Electron Beam
['ebm_nm', 's', 'NSLS-II Low Beta Final', 'standard electron beam name'],
['ebm_nms', 's', '', 'standard electron beam name suffix: e.g. can be Day1, Final'],
['ebm_i', 'f', 0.5, 'electron beam current [A]'],
['ebm_e', 'f', 3.0, 'electron beam avarage energy [GeV]'],
['ebm_de', 'f', 0.0, 'electron beam average energy deviation [GeV]'],
['ebm_x', 'f', 0.0, 'electron beam initial average horizontal position [m]'],
['ebm_y', 'f', 0.0, 'electron beam initial average vertical position [m]'],
['ebm_xp', 'f', 0.0, 'electron beam initial average horizontal angle [rad]'],
['ebm_yp', 'f', 0.0, 'electron beam initial average vertical angle [rad]'],
['ebm_z', 'f', 0., 'electron beam initial average longitudinal position [m]'],
['ebm_dr', 'f', -1.86675, 'electron beam longitudinal drift [m] to be performed before a required calculation'],
['ebm_ens', 'f', 0.00089, 'electron beam relative energy spread'],
['ebm_emx', 'f', 5.5e-10, 'electron beam horizontal emittance [m]'],
['ebm_emy', 'f', 8e-12, 'electron beam vertical emittance [m]'],
# Definition of the beam through Twiss:
['ebm_betax', 'f', 2.02, 'horizontal beta-function [m]'],
['ebm_betay', 'f', 1.06, 'vertical beta-function [m]'],
['ebm_alphax', 'f', 0.0, 'horizontal alpha-function [rad]'],
['ebm_alphay', 'f', 0.0, 'vertical alpha-function [rad]'],
['ebm_etax', 'f', 0.0, 'horizontal dispersion function [m]'],
['ebm_etay', 'f', 0.0, 'vertical dispersion function [m]'],
['ebm_etaxp', 'f', 0.0, 'horizontal dispersion function derivative [rad]'],
['ebm_etayp', 'f', 0.0, 'vertical dispersion function derivative [rad]'],
# Definition of the beam through Moments:
['ebm_sigx', 'f', 3.3331666625e-05, 'horizontal RMS size of electron beam [m]'],
['ebm_sigy', 'f', 2.91204395571e-06, 'vertical RMS size of electron beam [m]'],
['ebm_sigxp', 'f', 1.65008250619e-05, 'horizontal RMS angular divergence of electron beam [rad]'],
['ebm_sigyp', 'f', 2.74721127897e-06, 'vertical RMS angular divergence of electron beam [rad]'],
['ebm_mxxp', 'f', 0.0, 'horizontal position-angle mixed 2nd order moment of electron beam [m]'],
['ebm_myyp', 'f', 0.0, 'vertical position-angle mixed 2nd order moment of electron beam [m]'],
#---Undulator
['und_bx', 'f', 0.0, 'undulator horizontal peak magnetic field [T]'],
['und_by', 'f', 0.187782, 'undulator vertical peak magnetic field [T]'],
['und_phx', 'f', 0.0, 'initial phase of the horizontal magnetic field [rad]'],
['und_phy', 'f', 0.0, 'initial phase of the vertical magnetic field [rad]'],
['und_b2e', '', '', 'estimate undulator fundamental photon energy (in [eV]) for the amplitude of sinusoidal magnetic field defined by und_b or und_bx, und_by', 'store_true'],
['und_e2b', '', '', 'estimate undulator field amplitude (in [T]) for the photon energy defined by w_e', 'store_true'],
['und_per', 'f', 0.057, 'undulator period [m]'],
['und_len', 'f', 3.5055, 'undulator length [m]'],
['und_zc', 'f', 0.0, 'undulator center longitudinal position [m]'],
['und_sx', 'i', 1, 'undulator horizontal magnetic field symmetry vs longitudinal position'],
['und_sy', 'i', 1, 'undulator vertical magnetic field symmetry vs longitudinal position'],
['und_g', 'f', 6.72, 'undulator gap [mm] (assumes availability of magnetic measurement or simulation data)'],
['und_ph', 'f', 0.0, 'shift of magnet arrays [mm] for which the field should be set up'],
['und_mdir', 's', '', 'name of magnetic measurements sub-folder'],
['und_mfs', 's', '', 'name of magnetic measurements for different gaps summary file'],
#---Calculation Types
# Electron Trajectory
['tr', '', '', 'calculate electron trajectory', 'store_true'],
['tr_cti', 'f', 0.0, 'initial time moment (c*t) for electron trajectory calculation [m]'],
['tr_ctf', 'f', 0.0, 'final time moment (c*t) for electron trajectory calculation [m]'],
['tr_np', 'f', 10000, 'number of points for trajectory calculation'],
['tr_mag', 'i', 1, 'magnetic field to be used for trajectory calculation: 1- approximate, 2- accurate'],
['tr_fn', 's', 'res_trj.dat', 'file name for saving calculated trajectory data'],
['tr_pl', 's', '', 'plot the resulting trajectiry in graph(s): ""- dont plot, otherwise the string should list the trajectory components to plot'],
#Single-Electron Spectrum vs Photon Energy
['ss', '', '', 'calculate single-e spectrum vs photon energy', 'store_true'],
['ss_ei', 'f', 100.0, 'initial photon energy [eV] for single-e spectrum vs photon energy calculation'],
['ss_ef', 'f', 20000.0, 'final photon energy [eV] for single-e spectrum vs photon energy calculation'],
['ss_ne', 'i', 10000, 'number of points vs photon energy for single-e spectrum vs photon energy calculation'],
['ss_x', 'f', 0.0, 'horizontal position [m] for single-e spectrum vs photon energy calculation'],
['ss_y', 'f', 0.0, 'vertical position [m] for single-e spectrum vs photon energy calculation'],
['ss_meth', 'i', 1, 'method to use for single-e spectrum vs photon energy calculation: 0- "manual", 1- "auto-undulator", 2- "auto-wiggler"'],
['ss_prec', 'f', 0.01, 'relative precision for single-e spectrum vs photon energy calculation (nominal value is 0.01)'],
['ss_pol', 'i', 6, 'polarization component to extract after spectrum vs photon energy calculation: 0- Linear Horizontal, 1- Linear Vertical, 2- Linear 45 degrees, 3- Linear 135 degrees, 4- Circular Right, 5- Circular Left, 6- Total'],
['ss_mag', 'i', 1, 'magnetic field to be used for single-e spectrum vs photon energy calculation: 1- approximate, 2- accurate'],
['ss_ft', 's', 'f', 'presentation/domain: "f"- frequency (photon energy), "t"- time'],
['ss_u', 'i', 1, 'electric field units: 0- arbitrary, 1- sqrt(Phot/s/0.1%bw/mm^2), 2- sqrt(J/eV/mm^2) or sqrt(W/mm^2), depending on representation (freq. or time)'],
['ss_fn', 's', 'res_spec_se.dat', 'file name for saving calculated single-e spectrum vs photon energy'],
['ss_pl', 's', '', 'plot the resulting single-e spectrum in a graph: ""- dont plot, "e"- show plot vs photon energy'],
#Multi-Electron Spectrum vs Photon Energy (taking into account e-beam emittance, energy spread and collection aperture size)
['sm', '', '', 'calculate multi-e spectrum vs photon energy', 'store_true'],
['sm_ei', 'f', 100.0, 'initial photon energy [eV] for multi-e spectrum vs photon energy calculation'],
['sm_ef', 'f', 20000.0, 'final photon energy [eV] for multi-e spectrum vs photon energy calculation'],
['sm_ne', 'i', 10000, 'number of points vs photon energy for multi-e spectrum vs photon energy calculation'],
['sm_x', 'f', 0.0, 'horizontal center position [m] for multi-e spectrum vs photon energy calculation'],
['sm_rx', 'f', 0.001, 'range of horizontal position / horizontal aperture size [m] for multi-e spectrum vs photon energy calculation'],
['sm_nx', 'i', 1, 'number of points vs horizontal position for multi-e spectrum vs photon energy calculation'],
['sm_y', 'f', 0.0, 'vertical center position [m] for multi-e spectrum vs photon energy calculation'],
['sm_ry', 'f', 0.001, 'range of vertical position / vertical aperture size [m] for multi-e spectrum vs photon energy calculation'],
['sm_ny', 'i', 1, 'number of points vs vertical position for multi-e spectrum vs photon energy calculation'],
['sm_mag', 'i', 1, 'magnetic field to be used for calculation of multi-e spectrum spectrum or intensity distribution: 1- approximate, 2- accurate'],
['sm_hi', 'i', 1, 'initial UR spectral harmonic to be taken into account for multi-e spectrum vs photon energy calculation'],
['sm_hf', 'i', 15, 'final UR spectral harmonic to be taken into account for multi-e spectrum vs photon energy calculation'],
['sm_prl', 'f', 1.0, 'longitudinal integration precision parameter for multi-e spectrum vs photon energy calculation'],
['sm_pra', 'f', 1.0, 'azimuthal integration precision parameter for multi-e spectrum vs photon energy calculation'],
['sm_meth', 'i', -1, 'method to use for spectrum vs photon energy calculation in case of arbitrary input magnetic field: 0- "manual", 1- "auto-undulator", 2- "auto-wiggler", -1- dont use this accurate integration method (rather use approximate if possible)'],
['sm_prec', 'f', 0.01, 'relative precision for spectrum vs photon energy calculation in case of arbitrary input magnetic field (nominal value is 0.01)'],
['sm_nm', 'i', 1, 'number of macro-electrons for calculation of spectrum in case of arbitrary input magnetic field'],
['sm_na', 'i', 5, 'number of macro-electrons to average on each node at parallel (MPI-based) calculation of spectrum in case of arbitrary input magnetic field'],
['sm_ns', 'i', 5, 'saving periodicity (in terms of macro-electrons) for intermediate intensity at calculation of multi-electron spectrum in case of arbitrary input magnetic field'],
['sm_type', 'i', 1, 'calculate flux (=1) or flux per unit surface (=2)'],
['sm_pol', 'i', 6, 'polarization component to extract after calculation of multi-e flux or intensity: 0- Linear Horizontal, 1- Linear Vertical, 2- Linear 45 degrees, 3- Linear 135 degrees, 4- Circular Right, 5- Circular Left, 6- Total'],
['sm_rm', 'i', 1, 'method for generation of pseudo-random numbers for e-beam phase-space integration: 1- standard pseudo-random number generator, 2- Halton sequences, 3- LPtau sequences (to be implemented)'],
['sm_fn', 's', 'res_spec_me.dat', 'file name for saving calculated milti-e spectrum vs photon energy'],
['sm_pl', 's', '', 'plot the resulting spectrum-e spectrum in a graph: ""- dont plot, "e"- show plot vs photon energy'],
#to add options for the multi-e calculation from "accurate" magnetic field
#Power Density Distribution vs horizontal and vertical position
['pw', '', '', 'calculate SR power density distribution', 'store_true'],
['pw_x', 'f', 0.0, 'central horizontal position [m] for calculation of power density distribution vs horizontal and vertical position'],
['pw_rx', 'f', 0.015, 'range of horizontal position [m] for calculation of power density distribution vs horizontal and vertical position'],
['pw_nx', 'i', 100, 'number of points vs horizontal position for calculation of power density distribution'],
['pw_y', 'f', 0.0, 'central vertical position [m] for calculation of power density distribution vs horizontal and vertical position'],
['pw_ry', 'f', 0.015, 'range of vertical position [m] for calculation of power density distribution vs horizontal and vertical position'],
['pw_ny', 'i', 100, 'number of points vs vertical position for calculation of power density distribution'],
['pw_pr', 'f', 1.0, 'precision factor for calculation of power density distribution'],
['pw_meth', 'i', 1, 'power density computation method (1- "near field", 2- "far field")'],
['pw_zst', 'f', 0., 'initial longitudinal position along electron trajectory of power density distribution (effective if pow_sst < pow_sfi)'],
['pw_zfi', 'f', 0., 'final longitudinal position along electron trajectory of power density distribution (effective if pow_sst < pow_sfi)'],
['pw_mag', 'i', 1, 'magnetic field to be used for power density calculation: 1- approximate, 2- accurate'],
['pw_fn', 's', 'res_pow.dat', 'file name for saving calculated power density distribution'],
['pw_pl', 's', '', 'plot the resulting power density distribution in a graph: ""- dont plot, "x"- vs horizontal position, "y"- vs vertical position, "xy"- vs horizontal and vertical position'],
#Single-Electron Intensity distribution vs horizontal and vertical position
['si', '', '', 'calculate single-e intensity distribution (without wavefront propagation through a beamline) vs horizontal and vertical position', 'store_true'],
#Single-Electron Wavefront Propagation
['ws', '', '', 'calculate single-electron (/ fully coherent) wavefront propagation', 'store_true'],
#Multi-Electron (partially-coherent) Wavefront Propagation
['wm', '', '', 'calculate multi-electron (/ partially coherent) wavefront propagation', 'store_true'],
['w_e', 'f', 1000.0, 'photon energy [eV] for calculation of intensity distribution vs horizontal and vertical position'],
['w_ef', 'f', -1.0, 'final photon energy [eV] for calculation of intensity distribution vs horizontal and vertical position'],
['w_ne', 'i', 1, 'number of points vs photon energy for calculation of intensity distribution'],
['w_x', 'f', 0.0, 'central horizontal position [m] for calculation of intensity distribution'],
['w_rx', 'f', 0.002, 'range of horizontal position [m] for calculation of intensity distribution'],
['w_nx', 'i', 100, 'number of points vs horizontal position for calculation of intensity distribution'],
['w_y', 'f', 0.0, 'central vertical position [m] for calculation of intensity distribution vs horizontal and vertical position'],
['w_ry', 'f', 0.002, 'range of vertical position [m] for calculation of intensity distribution vs horizontal and vertical position'],
['w_ny', 'i', 100, 'number of points vs vertical position for calculation of intensity distribution'],
['w_smpf', 'f', 1.0, 'sampling factor for calculation of intensity distribution vs horizontal and vertical position'],
['w_meth', 'i', 1, 'method to use for calculation of intensity distribution vs horizontal and vertical position: 0- "manual", 1- "auto-undulator", 2- "auto-wiggler"'],
['w_prec', 'f', 0.01, 'relative precision for calculation of intensity distribution vs horizontal and vertical position'],
['w_u', 'i', 1, 'electric field units: 0- arbitrary, 1- sqrt(Phot/s/0.1%bw/mm^2), 2- sqrt(J/eV/mm^2) or sqrt(W/mm^2), depending on representation (freq. or time)'],
['si_pol', 'i', 6, 'polarization component to extract after calculation of intensity distribution: 0- Linear Horizontal, 1- Linear Vertical, 2- Linear 45 degrees, 3- Linear 135 degrees, 4- Circular Right, 5- Circular Left, 6- Total'],
['si_type', 'i', 0, 'type of a characteristic to be extracted after calculation of intensity distribution: 0- Single-Electron Intensity, 1- Multi-Electron Intensity, 2- Single-Electron Flux, 3- Multi-Electron Flux, 4- Single-Electron Radiation Phase, 5- Re(E): Real part of Single-Electron Electric Field, 6- Im(E): Imaginary part of Single-Electron Electric Field, 7- Single-Electron Intensity, integrated over Time or Photon Energy'],
['w_mag', 'i', 1, 'magnetic field to be used for calculation of intensity distribution vs horizontal and vertical position: 1- approximate, 2- accurate'],
['si_fn', 's', 'res_int_se.dat', 'file name for saving calculated single-e intensity distribution (without wavefront propagation through a beamline) vs horizontal and vertical position'],
['si_pl', 's', '', 'plot the input intensity distributions in graph(s): ""- dont plot, "x"- vs horizontal position, "y"- vs vertical position, "xy"- vs horizontal and vertical position'],
['ws_fni', 's', 'res_int_pr_se.dat', 'file name for saving propagated single-e intensity distribution vs horizontal and vertical position'],
['ws_pl', 's', '', 'plot the resulting intensity distributions in graph(s): ""- dont plot, "x"- vs horizontal position, "y"- vs vertical position, "xy"- vs horizontal and vertical position'],
['wm_nm', 'i', 100000, 'number of macro-electrons (coherent wavefronts) for calculation of multi-electron wavefront propagation'],
['wm_na', 'i', 5, 'number of macro-electrons (coherent wavefronts) to average on each node for parallel (MPI-based) calculation of multi-electron wavefront propagation'],
['wm_ns', 'i', 5, 'saving periodicity (in terms of macro-electrons / coherent wavefronts) for intermediate intensity at multi-electron wavefront propagation calculation'],
['wm_ch', 'i', 0, 'type of a characteristic to be extracted after calculation of multi-electron wavefront propagation: #0- intensity (s0); 1- four Stokes components; 2- mutual intensity cut vs x; 3- mutual intensity cut vs y'],
['wm_ap', 'i', 0, 'switch specifying representation of the resulting Stokes parameters: coordinate (0) or angular (1)'],
['wm_x0', 'f', 0, 'horizontal center position for mutual intensity cut calculation'],
['wm_y0', 'f', 0, 'vertical center position for mutual intensity cut calculation'],
['wm_ei', 'i', 0, 'integration over photon energy is required (1) or not (0); if the integration is required, the limits are taken from w_e, w_ef'],
['wm_rm', 'i', 1, 'method for generation of pseudo-random numbers for e-beam phase-space integration: 1- standard pseudo-random number generator, 2- Halton sequences, 3- LPtau sequences (to be implemented)'],
['wm_am', 'i', 0, 'multi-electron integration approximation method: 0- no approximation (use the standard 5D integration method), 1- integrate numerically only over e-beam energy spread and use convolution to treat transverse emittance'],
['wm_fni', 's', 'res_int_pr_me.dat', 'file name for saving propagated multi-e intensity distribution vs horizontal and vertical position'],
#to add options
['op_r', 'f', 34.366, 'longitudinal position of the first optical element [m]'],
# Former appParam:
['rs_type', 's', 'u', 'source type, (u) idealized undulator, (t), tabulated undulator, (m) multipole, (g) gaussian beam'],
#---Beamline optics:
# M1: mirror
['op_M1_hfn', 's', 'mirror_1d.dat', 'heightProfileFile'],
['op_M1_dim', 's', 'x', 'orientation'],
['op_M1_ang', 'f', 0.0436332, 'grazingAngle'],
['op_M1_amp_coef', 'f', 1.0, 'heightAmplification'],
['op_M1_size_x', 'f', 0.001, 'horizontalTransverseSize'],
['op_M1_size_y', 'f', 0.001, 'verticalTransverseSize'],
# M1_Grating: drift
['op_M1_Grating_L', 'f', 20.634, 'length'],
# Grating: grating
['op_Grating_size_tang', 'f', 0.2, 'tangentialSize'],
['op_Grating_size_sag', 'f', 0.015, 'sagittalSize'],
['op_Grating_nvx', 'f', 0.0, 'normalVectorX'],
['op_Grating_nvy', 'f', 0.99991607766, 'normalVectorY'],
['op_Grating_nvz', 'f', -0.0129552165771, 'normalVectorZ'],
['op_Grating_tvx', 'f', 0.0, 'tangentialVectorX'],
['op_Grating_tvy', 'f', 0.0129552165771, 'tangentialVectorY'],
['op_Grating_x', 'f', 0.0, 'horizontalOffset'],
['op_Grating_y', 'f', 0.0, 'verticalOffset'],
['op_Grating_m', 'f', 1.0, 'diffractionOrder'],
['op_Grating_grDen', 'f', 1800.0, 'grooveDensity0'],
['op_Grating_grDen1', 'f', 0.08997, 'grooveDensity1'],
['op_Grating_grDen2', 'f', 3.004e-06, 'grooveDensity2'],
['op_Grating_grDen3', 'f', 9.73e-11, 'grooveDensity3'],
['op_Grating_grDen4', 'f', 0.0, 'grooveDensity4'],
# GA: aperture
['op_GA_shape', 's', 'r', 'shape'],
['op_GA_Dx', 'f', 0.015, 'horizontalSize'],
['op_GA_Dy', 'f', 0.00259104331543, 'verticalSize'],
['op_GA_x', 'f', 0.0, 'horizontalOffset'],
['op_GA_y', 'f', 0.0, 'verticalOffset'],
# GA_M3A: drift
['op_GA_M3A_L', 'f', 34.63, 'length'],
# M3A: aperture
['op_M3A_shape', 's', 'r', 'shape'],
['op_M3A_Dx', 'f', 0.01832012956, 'horizontalSize'],
['op_M3A_Dy', 'f', 0.02, 'verticalSize'],
['op_M3A_x', 'f', 0.0, 'horizontalOffset'],
['op_M3A_y', 'f', 0.0, 'verticalOffset'],
# M3: ellipsoidMirror
['op_M3_hfn', 's', 'None', 'heightProfileFile'],
['op_M3_dim', 's', 'x', 'orientation'],
['op_M3_p', 'f', 89.63, 'firstFocusLength'],
['op_M3_q', 'f', 8.006, 'focalLength'],
['op_M3_ang', 'f', 0.0436332, 'grazingAngle'],
['op_M3_amp_coef', 'f', 1.0, 'heightAmplification'],
['op_M3_size_tang', 'f', 0.42, 'tangentialSize'],
['op_M3_size_sag', 'f', 0.02, 'sagittalSize'],
['op_M3_nvx', 'f', 0.999048222947, 'normalVectorX'],
['op_M3_nvy', 'f', 0.0, 'normalVectorY'],
['op_M3_nvz', 'f', -0.0436193560953, 'normalVectorZ'],
['op_M3_tvx', 'f', -0.0436193560953, 'tangentialVectorX'],
['op_M3_tvy', 'f', 0.0, 'tangentialVectorY'],
['op_M3_x', 'f', 0.0, 'horizontalOffset'],
['op_M3_y', 'f', 0.0, 'verticalOffset'],
# M3_SSA: drift
['op_M3_SSA_L', 'f', 8.006, 'length'],
# SSA: aperture
['op_SSA_shape', 's', 'r', 'shape'],
['op_SSA_Dx', 'f', 0.0015, 'horizontalSize'],
['op_SSA_Dy', 'f', 0.0015, 'verticalSize'],
['op_SSA_x', 'f', 0.0, 'horizontalOffset'],
['op_SSA_y', 'f', 0.0, 'verticalOffset'],
# SSA_KBAperture: drift
['op_SSA_KBAperture_L', 'f', 6.01, 'length'],
# KBAperture: aperture
['op_KBAperture_shape', 's', 'r', 'shape'],
['op_KBAperture_Dx', 'f', 0.0130858068286, 'horizontalSize'],
['op_KBAperture_Dy', 'f', 0.003, 'verticalSize'],
['op_KBAperture_x', 'f', 0.0, 'horizontalOffset'],
['op_KBAperture_y', 'f', 0.0, 'verticalOffset'],
# KBh: ellipsoidMirror
['op_KBh_hfn', 's', 'None', 'heightProfileFile'],
['op_KBh_dim', 's', 'x', 'orientation'],
['op_KBh_p', 'f', 6.01, 'firstFocusLength'],
['op_KBh_q', 'f', 0.911, 'focalLength'],
['op_KBh_ang', 'f', 0.0872665, 'grazingAngle'],
['op_KBh_amp_coef', 'f', 1.0, 'heightAmplification'],
['op_KBh_size_tang', 'f', 0.3, 'tangentialSize'],
['op_KBh_size_sag', 'f', 0.05, 'sagittalSize'],
['op_KBh_nvx', 'f', 0.996194694832, 'normalVectorX'],
['op_KBh_nvy', 'f', 0.0, 'normalVectorY'],
['op_KBh_nvz', 'f', -0.0871557800056, 'normalVectorZ'],
['op_KBh_tvx', 'f', -0.0871557800056, 'tangentialVectorX'],
['op_KBh_tvy', 'f', 0.0, 'tangentialVectorY'],
['op_KBh_x', 'f', 0.0, 'horizontalOffset'],
['op_KBh_y', 'f', 0.0, 'verticalOffset'],
# KBh_KBv: drift
['op_KBh_KBv_L', 'f', 0.5, 'length'],
# KBv: ellipsoidMirror
['op_KBv_hfn', 's', 'None', 'heightProfileFile'],
['op_KBv_dim', 's', 'x', 'orientation'],
['op_KBv_p', 'f', 6.51, 'firstFocusLength'],
['op_KBv_q', 'f', 0.411, 'focalLength'],
['op_KBv_ang', 'f', 0.0872665, 'grazingAngle'],
['op_KBv_amp_coef', 'f', 1.0, 'heightAmplification'],
['op_KBv_size_tang', 'f', 0.3, 'tangentialSize'],
['op_KBv_size_sag', 'f', 0.05, 'sagittalSize'],
['op_KBv_nvx', 'f', 0.0, 'normalVectorX'],
['op_KBv_nvy', 'f', 0.996194694832, 'normalVectorY'],
['op_KBv_nvz', 'f', -0.0871557800056, 'normalVectorZ'],
['op_KBv_tvx', 'f', 0.0, 'tangentialVectorX'],
['op_KBv_tvy', 'f', -0.0871557800056, 'tangentialVectorY'],
['op_KBv_x', 'f', 0.0, 'horizontalOffset'],
['op_KBv_y', 'f', 0.0, 'verticalOffset'],
# KBv_Sample: drift
['op_KBv_Sample_L', 'f', 0.411, 'length'],
#---Propagation parameters
['op_M1_pp', 'f', [0, 0, 1.0, 0, 0, 1.0, 1.0, 1.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], 'M1'],
['op_M1_Grating_pp', 'f', [0, 0, 1.0, 1, 0, 1.2, 3.5, 1.2, 3.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], 'M1_Grating'],
['op_Grating_pp', 'f', [0, 0, 1.0, 0, 0, 1.0, 1.0, 1.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], 'Grating'],
['op_GA_pp', 'f', [0, 0, 1.0, 0, 0, 1.0, 1.0, 1.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], 'GA'],
['op_GA_M3A_pp', 'f', [0, 0, 1.0, 1, 0, 1.0, 1.0, 1.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], 'GA_M3A'],
['op_M3A_pp', 'f', [0, 0, 1.0, 0, 0, 1.0, 1.0, 1.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], 'M3A'],
['op_M3_pp', 'f', [0, 0, 1.0, 0, 0, 1.0, 1.0, 1.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], 'M3'],
['op_M3_SSA_pp', 'f', [0, 0, 1.0, 1, 0, 3.0, 1.0, 3.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], 'M3_SSA'],
['op_SSA_pp', 'f', [0, 0, 1.0, 0, 0, 0.4, 1.0, 0.4, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], 'SSA'],
['op_SSA_KBAperture_pp', 'f', [0, 0, 1.0, 1, 0, 1.0, 1.0, 1.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], 'SSA_KBAperture'],
['op_KBAperture_pp', 'f', [0, 0, 1.0, 0, 0, 1.0, 1.0, 1.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], 'KBAperture'],
['op_KBh_pp', 'f', [0, 0, 1.0, 0, 0, 1.0, 1.0, 1.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], 'KBh'],
['op_KBh_KBv_pp', 'f', [0, 0, 1.0, 1, 0, 2.0, 1.0, 2.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], 'KBh_KBv'],
['op_KBv_pp', 'f', [0, 0, 1.0, 0, 0, 1.0, 1.0, 1.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], 'KBv'],
['op_KBv_Sample_pp', 'f', [0, 0, 1.0, 1, 0, 1.0, 1.0, 1.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], 'KBv_Sample'],
['op_fin_pp', 'f', [0, 0, 1.0, 0, 1, 0.07, 1.5, 0.07, 6.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], 'final post-propagation (resize) parameters'],
#[ 0]: Auto-Resize (1) or not (0) Before propagation
#[ 1]: Auto-Resize (1) or not (0) After propagation
#[ 2]: Relative Precision for propagation with Auto-Resizing (1. is nominal)
#[ 3]: Allow (1) or not (0) for semi-analytical treatment of the quadratic (leading) phase terms at the propagation
#[ 4]: Do any Resizing on Fourier side, using FFT, (1) or not (0)
#[ 5]: Horizontal Range modification factor at Resizing (1. means no modification)
#[ 6]: Horizontal Resolution modification factor at Resizing
#[ 7]: Vertical Range modification factor at Resizing
#[ 8]: Vertical Resolution modification factor at Resizing
#[ 9]: Type of wavefront Shift before Resizing (not yet implemented)
#[10]: New Horizontal wavefront Center position after Shift (not yet implemented)
#[11]: New Vertical wavefront Center position after Shift (not yet implemented)
#[12]: Optional: Orientation of the Output Optical Axis vector in the Incident Beam Frame: Horizontal Coordinate
#[13]: Optional: Orientation of the Output Optical Axis vector in the Incident Beam Frame: Vertical Coordinate
#[14]: Optional: Orientation of the Output Optical Axis vector in the Incident Beam Frame: Longitudinal Coordinate
#[15]: Optional: Orientation of the Horizontal Base vector of the Output Frame in the Incident Beam Frame: Horizontal Coordinate
#[16]: Optional: Orientation of the Horizontal Base vector of the Output Frame in the Incident Beam Frame: Vertical Coordinate
])
def main():
v = srwl_bl.srwl_uti_parse_options(varParam, use_sys_argv=True)
op = set_optics(v)
v.ss = True
v.ss_pl = 'e'
v.sm = True
v.sm_pl = 'e'
v.pw = True
v.pw_pl = 'xy'
v.si = True
v.si_pl = 'xy'
v.tr = True
v.tr_pl = 'xz'
v.ws = True
v.ws_pl = 'xy'
mag = None
if v.rs_type == 'm':
mag = srwlib.SRWLMagFldC()
mag.arXc.append(0)
mag.arYc.append(0)
mag.arMagFld.append(srwlib.SRWLMagFldM(v.mp_field, v.mp_order, v.mp_distribution, v.mp_len))
mag.arZc.append(v.mp_zc)
srwl_bl.SRWLBeamline(_name=v.name, _mag_approx=mag).calc_all(v, op)
if __name__ == '__main__':
main()
|
{
"content_hash": "df183b2cc66372d155ada5d1c3ebbfc4",
"timestamp": "",
"source": "github",
"line_count": 560,
"max_line_length": 440,
"avg_line_length": 60.198214285714286,
"alnum_prop": 0.5943163952419092,
"repo_name": "mrakitin/sirepo",
"id": "0a9930b68c1383fd9e0be6272a7b67f150e5de59",
"size": "33733",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "tests/template/srw_import_data/nsls-ii-esm-beamline.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "152"
},
{
"name": "CSS",
"bytes": "261041"
},
{
"name": "HTML",
"bytes": "335926"
},
{
"name": "JavaScript",
"bytes": "2681704"
},
{
"name": "Opal",
"bytes": "38855"
},
{
"name": "Python",
"bytes": "1949902"
},
{
"name": "Shell",
"bytes": "17536"
}
],
"symlink_target": ""
}
|
from __future__ import unicode_literals
from django.conf import settings
import django.contrib.auth.models
import django.contrib.postgres.fields.jsonb
from django.db import migrations, models
import django.db.models.deletion
import django.utils.timezone
import uuid
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='User',
fields=[
('password', models.CharField(max_length=128, verbose_name='password')),
('last_login', models.DateTimeField(blank=True, null=True, verbose_name='last login')),
('id', models.UUIDField(default=uuid.uuid4, primary_key=True, serialize=False)),
('username', models.CharField(max_length=256, unique=True, verbose_name='Username')),
('name', models.TextField(blank=True, max_length=100, null=True, verbose_name='Name')),
('is_active', models.BooleanField(default=True, help_text='Designates whether this user should be treated as active. Unselect this instead of deleting accounts.', verbose_name='active')),
('date_joined', models.DateTimeField(default=django.utils.timezone.now, verbose_name='date joined')),
('is_staff', models.BooleanField(default=False, help_text='Designates whether the user can log into this admin site.', verbose_name='staff status')),
('is_superuser', models.BooleanField(default=False, help_text='Designates that this user has all permissions without explicitly assigning them.', verbose_name='superuser status')),
],
options={
'verbose_name': 'User',
'verbose_name_plural': 'Users',
},
managers=[
('objects', django.contrib.auth.models.UserManager()),
],
),
migrations.CreateModel(
name='Mailbox',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=256, verbose_name='Name')),
('uri', models.CharField(blank=True, default=None, help_text='Example: imap+ssl://myusername:mypassword@someserver', max_length=256, null=True, verbose_name='URI')),
('from_email', models.CharField(blank=True, default=None, max_length=255, null=True, verbose_name='From email')),
('active', models.BooleanField(default=True, verbose_name='Active')),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='Message',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('from_address', django.contrib.postgres.fields.jsonb.JSONField(blank=True, default=[], verbose_name='Sent from')),
('to_address', django.contrib.postgres.fields.jsonb.JSONField(blank=True, default=[], verbose_name='Sent to')),
('cc_address', django.contrib.postgres.fields.jsonb.JSONField(blank=True, default=[], verbose_name='CC')),
('bcc_address', django.contrib.postgres.fields.jsonb.JSONField(blank=True, default=[], verbose_name='BCC')),
('reply_to', django.contrib.postgres.fields.jsonb.JSONField(blank=True, null=True, verbose_name='Reply-To')),
('in_reply_to', django.contrib.postgres.fields.jsonb.JSONField(blank=True, null=True, verbose_name='In reply to')),
('headers', django.contrib.postgres.fields.jsonb.JSONField(blank=True, default={}, verbose_name='Headers')),
('subject', models.CharField(max_length=255, verbose_name='Subject')),
('original', models.TextField(verbose_name='Original (raw) text')),
('plain_body', models.TextField(blank=True, verbose_name='Text')),
('html_body', models.TextField(blank=True, verbose_name='HTML')),
('date', models.DateTimeField(blank=True, verbose_name='Date')),
('message_id', models.CharField(max_length=998)),
('references', django.contrib.postgres.fields.jsonb.JSONField(blank=True, null=True, verbose_name='References')),
],
),
migrations.CreateModel(
name='Thread',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('subject', models.CharField(max_length=256)),
('mailbox', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='threads', to='mailme.Mailbox')),
],
),
migrations.AddField(
model_name='message',
name='thread',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='messages', to='mailme.Thread'),
),
]
|
{
"content_hash": "9986e55799416440cff13224b7704542",
"timestamp": "",
"source": "github",
"line_count": 85,
"max_line_length": 203,
"avg_line_length": 59.96470588235294,
"alnum_prop": 0.6156562683931724,
"repo_name": "mailme/mailme",
"id": "e88809b19a66936b2fa7cae1bb7150d06da51047",
"size": "5169",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/mailme/migrations/0001_initial.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Makefile",
"bytes": "2075"
},
{
"name": "Python",
"bytes": "82469"
},
{
"name": "Shell",
"bytes": "1066"
}
],
"symlink_target": ""
}
|
import numpy as np
import unittest
import sys
sys.path.append('..')
from op_test import OpTest
import paddle.fluid as fluid
import paddle
paddle.enable_static()
class TestStackOpBase(OpTest):
def initDefaultParameters(self):
self.num_inputs = 4
self.input_dim = (5, 6, 7)
self.axis = 0
def initParameters(self):
pass
def get_x_names(self):
x_names = []
for i in range(self.num_inputs):
x_names.append('x{}'.format(i))
return x_names
def setUp(self):
self.initDefaultParameters()
self.initParameters()
self.op_type = 'stack'
self.set_mlu()
self.init_dtype()
self.x = []
for i in range(self.num_inputs):
self.x.append(
np.random.random(size=self.input_dim).astype(self.dtype)
)
tmp = []
x_names = self.get_x_names()
for i in range(self.num_inputs):
tmp.append((x_names[i], self.x[i]))
self.inputs = {'X': tmp}
self.outputs = {'Y': np.stack(self.x, axis=self.axis)}
self.attrs = {'axis': self.axis}
def set_mlu(self):
self.__class__.use_mlu = True
self.place = paddle.MLUPlace(0)
self.__class__.no_need_check_grad = True
def init_dtype(self):
self.dtype = np.float32
def test_check_output(self):
self.check_output_with_place(self.place)
class TestStackOp1(TestStackOpBase):
def initParameters(self):
self.num_inputs = 16
class TestStackOp2(TestStackOpBase):
def initParameters(self):
self.num_inputs = 20
class TestStackOp3(TestStackOpBase):
def initParameters(self):
self.axis = -1
class TestStackOp4(TestStackOpBase):
def initParameters(self):
self.axis = -4
class TestStackOp5(TestStackOpBase):
def initParameters(self):
self.axis = 1
class TestStackOp6(TestStackOpBase):
def initParameters(self):
self.axis = 3
class TestStackOpINT32(TestStackOpBase):
def init_dtype(self):
self.dtype = np.int32
class TestStackOpINT64(TestStackOpBase):
def init_dtype(self):
self.dtype = np.int64
class TestStackOpHalf(TestStackOpBase):
def init_dtype(self):
self.dtype = np.float16
class API_test(unittest.TestCase):
def test_out(self):
with fluid.program_guard(fluid.Program(), fluid.Program()):
data1 = fluid.layers.data('data1', shape=[1, 2], dtype='float32')
data2 = fluid.layers.data('data2', shape=[1, 2], dtype='float32')
data3 = fluid.layers.data('data3', shape=[1, 2], dtype='float32')
result_stack = paddle.stack([data1, data2, data3], axis=0)
place = paddle.MLUPlace(0)
exe = fluid.Executor(place)
input1 = np.random.random([1, 2]).astype('float32')
input2 = np.random.random([1, 2]).astype('float32')
input3 = np.random.random([1, 2]).astype('float32')
(result,) = exe.run(
feed={"data1": input1, "data2": input2, "data3": input3},
fetch_list=[result_stack],
)
expected_result = np.stack([input1, input2, input3], axis=0)
np.testing.assert_allclose(expected_result, result)
def test_single_tensor_error(self):
with fluid.program_guard(fluid.Program(), fluid.Program()):
x = paddle.rand([2, 3])
self.assertRaises(TypeError, paddle.stack, x)
class API_DygraphTest(unittest.TestCase):
def test_out(self):
data1 = np.array([[1.0, 2.0]]).astype("float32")
data2 = np.array([[3.0, 4.0]]).astype("float32")
data3 = np.array([[5.0, 6.0]]).astype("float32")
with fluid.dygraph.guard(place=paddle.MLUPlace(0)):
x1 = fluid.dygraph.to_variable(data1)
x2 = fluid.dygraph.to_variable(data2)
x3 = fluid.dygraph.to_variable(data3)
result = paddle.stack([x1, x2, x3])
result_np = result.numpy()
expected_result = np.stack([data1, data2, data3])
np.testing.assert_allclose(expected_result, result_np)
with fluid.dygraph.guard(place=paddle.MLUPlace(0)):
y1 = fluid.dygraph.to_variable(data1)
result = paddle.stack([y1], axis=0)
result_np_2 = result.numpy()
expected_result_2 = np.stack([data1], axis=0)
np.testing.assert_allclose(expected_result_2, result_np_2)
def test_single_tensor_error(self):
with fluid.dygraph.guard(place=paddle.MLUPlace(0)):
x = paddle.to_tensor([1, 2, 3])
self.assertRaises(Exception, paddle.stack, x)
if __name__ == '__main__':
unittest.main()
|
{
"content_hash": "8790d8ecdf49fac353e676434630ad21",
"timestamp": "",
"source": "github",
"line_count": 159,
"max_line_length": 77,
"avg_line_length": 29.79245283018868,
"alnum_prop": 0.5938357610301879,
"repo_name": "PaddlePaddle/Paddle",
"id": "eefe1d7d691fcdc2325637d38349a50dc58f6c08",
"size": "5348",
"binary": false,
"copies": "2",
"ref": "refs/heads/develop",
"path": "python/paddle/fluid/tests/unittests/mlu/test_stack_op_mlu.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "58544"
},
{
"name": "C",
"bytes": "210300"
},
{
"name": "C++",
"bytes": "36848680"
},
{
"name": "CMake",
"bytes": "902619"
},
{
"name": "Cuda",
"bytes": "5227207"
},
{
"name": "Dockerfile",
"bytes": "4361"
},
{
"name": "Go",
"bytes": "49796"
},
{
"name": "Java",
"bytes": "16630"
},
{
"name": "Jinja",
"bytes": "23852"
},
{
"name": "MLIR",
"bytes": "39982"
},
{
"name": "Python",
"bytes": "36203874"
},
{
"name": "R",
"bytes": "1332"
},
{
"name": "Shell",
"bytes": "553177"
}
],
"symlink_target": ""
}
|
import gevent
from gevent import monkey
monkey.patch_all()
import ConfigParser
import bottle
import time
import base64
try:
from keystoneclient.middleware import auth_token
except Exception:
pass
from pysandesh.gen_py.sandesh.ttypes import SandeshLevel
# Open port for access to API server for trouble shooting
class LocalAuth(object):
def __init__(self, app, conf_info):
self._http_host = 'localhost'
self._http_port = conf_info['admin_port']
self._http_app = bottle.Bottle()
self._http_app.merge(app.routes)
self._http_app.config.auth_open = True
self._conf_info = conf_info
# 2 decorators below due to change in api between bottle 0.11.6
# (which insists on global app) vs later (which need on specific
# app)
@self._http_app.hook('before_request')
@bottle.hook('before_request')
def local_auth_check(*args, **kwargs):
if bottle.request.app != self._http_app:
return
# expect header to have something like 'Basic YWJjOmRlZg=='
auth_hdr_val = bottle.request.environ.get('HTTP_AUTHORIZATION')
if not auth_hdr_val:
bottle.abort(401, 'HTTP_AUTHORIZATION header missing')
try:
auth_type, user_passwd = auth_hdr_val.split()
except Exception as e:
bottle.abort(401, 'Auth Exception: %s' %(str(e)))
enc_user_passwd = auth_hdr_val.split()[1]
user_passwd = base64.b64decode(enc_user_passwd)
user, passwd = user_passwd.split(':')
if (not self._conf_info.get('admin_user') == user or
not self._conf_info.get('admin_password') == passwd):
bottle.abort(401, 'Authentication check failed')
# Add admin role to the request
bottle.request.environ['HTTP_X_ROLE'] = 'admin'
# end __init__
def start_http_server(self):
self._http_app.run(
host=self._http_host, port=self._http_port, server='gevent')
# end start_http_server
# end class LocalAuth
# Pre-auth filter
class AuthPreKeystone(object):
def __init__(self, app, conf, multi_tenancy):
self.app = app
self.conf = conf
self.mt = multi_tenancy
def get_mt(self):
return self.mt
def set_mt(self, value):
self.mt = value
def __call__(self, env, start_response):
app = self.app if self.mt else bottle.app()
return app(env, start_response)
# Post-auth filter. Normalize user/role supplied by quantum plugin for
# consumption by Perms
class AuthPostKeystone(object):
def __init__(self, app, conf):
self.app = app
self.conf = conf
def __call__(self, env, start_response):
"""
# Following will be brought back after RBAC refactoring
# todo validate request is from quantum plugin
# X-Api-User-id and X-Api-Role supplied by Quantum.
# Note that Quantum sends admin token
if 'HTTP_X_API_USER_ID' in env:
env['HTTP_X_USER'] = self.conf[
'auth_svc'].user_id_to_name(env['HTTP_X_API_USER_ID'])
elif 'HTTP_X_API_USER' in env:
env['HTTP_X_USER'] = env['HTTP_X_API_USER']
if 'HTTP_X_API_ROLE' in env:
env['HTTP_X_ROLE'] = env['HTTP_X_API_ROLE']
"""
# only allow admin access when MT is on
roles = []
if 'HTTP_X_ROLE' in env:
roles = env['HTTP_X_ROLE'].split(',')
if not 'admin' in [x.lower() for x in roles]:
resp = auth_token.MiniResp('Permission Denied', env)
start_response('403 Permission Denied', resp.headers)
return resp.body
return self.app(env, start_response)
class AuthServiceKeystone(object):
def __init__(self, server_mgr, args):
self._conf_info = {
'auth_host': args.auth_host,
'auth_port': args.auth_port,
'auth_protocol': args.auth_protocol,
'admin_user': args.admin_user,
'admin_password': args.admin_password,
'admin_tenant_name': args.admin_tenant_name,
'admin_port': args.admin_port,
}
self._server_mgr = server_mgr
self._auth_method = args.auth
self._multi_tenancy = args.multi_tenancy
self._auth_token = None
self._auth_middleware = None
if not self._auth_method:
return
if self._auth_method != 'keystone':
raise UnknownAuthMethod()
# map keystone id to users. Needed for quantum plugin because contrail
# plugin doesn't have access to user token and ends up sending admin
# admin token along with user-id and role
self._ks_users = {}
# configure memcache if enabled
if self._multi_tenancy and 'memcache_servers' in args:
self._conf_info[
'memcache_servers'] = args.memcache_servers.split(',')
if 'token_cache_time' in args:
self._conf_info['token_cache_time'] = args.token_cache_time
# end __init__
def json_request(self, method, path, retry_after_authn=False):
if self._auth_token is None or self._auth_middleware is None:
return {}
headers = {'X-Auth-Token': self._auth_token}
response, data = self._auth_middleware._json_request(
method, path, additional_headers=headers)
try:
status_code = response.status_code
except AttributeError:
status_code = response.status
# avoid multiple reauth
if ((status_code == 401) and (not retry_after_authn)):
try:
self._auth_token = self._auth_middleware.get_admin_token()
return self.json_request(method, path, retry_after_authn=True)
except Exception as e:
self._server_mgr.config_log(
"Error in getting admin token from keystone: " + str(e),
level=SandeshLevel.SYS_WARN)
return {}
return data if status_code == 200 else {}
# end json_request
def get_projects(self):
return self.json_request('GET', '/v2.0/tenants')
# end get_projects
def get_middleware_app(self):
if not self._auth_method:
return None
if not self._multi_tenancy:
return None
# keystone middleware is needed for fetching objects
# app = bottle.app()
app = AuthPostKeystone(bottle.app(), {'auth_svc': self})
auth_middleware = auth_token.AuthProtocol(app, self._conf_info)
self._auth_middleware = auth_middleware
while True:
try:
self._auth_token = auth_middleware.get_admin_token()
break
except auth_token.ServiceError as e:
msg = "Error in getting admin token: " + str(e)
time.sleep(2)
self._server_mgr.config_log("Auth token fetched from keystone.",
level=SandeshLevel.SYS_NOTICE)
# open access for troubleshooting
admin_port = self._conf_info['admin_port']
self._local_auth_app = LocalAuth(bottle.app(), self._conf_info)
gevent.spawn(self._local_auth_app.start_http_server)
app = auth_middleware
# allow multi tenancy to be updated dynamically
app = AuthPreKeystone(
auth_middleware,
{'admin_token': self._auth_token},
self._multi_tenancy)
return app
# end get_middleware_app
def verify_signed_token(self, user_token):
try:
return self._auth_middleware.verify_signed_token(user_token)
except:
return None
# end
# convert keystone user id to name
def user_id_to_name(self, id):
if id in self._ks_users:
return self._ks_users[id]
# fetch from keystone
content = self.json_request('GET', '/v2.0/users')
if 'users' in content:
self._ks_users = dict((user['id'], user['name'])
for user in content['users'])
# check it again
if id in self._ks_users:
return self._ks_users[id]
else:
return ''
# end user_id_to_name
# end class AuthService
|
{
"content_hash": "90125e8945ee83a77ee6d64377e9ac97",
"timestamp": "",
"source": "github",
"line_count": 249,
"max_line_length": 78,
"avg_line_length": 33.6425702811245,
"alnum_prop": 0.5776530977676972,
"repo_name": "cloudwatt/contrail-controller",
"id": "b4e15f9c0a5624001f2afa3104fa11fa76db71c3",
"size": "8642",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "src/config/api-server/vnc_auth_keystone.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "ApacheConf",
"bytes": "80579"
},
{
"name": "C",
"bytes": "44989"
},
{
"name": "C++",
"bytes": "14908777"
},
{
"name": "CSS",
"bytes": "531"
},
{
"name": "Java",
"bytes": "171966"
},
{
"name": "Lua",
"bytes": "8164"
},
{
"name": "Makefile",
"bytes": "12449"
},
{
"name": "Objective-C",
"bytes": "720"
},
{
"name": "Protocol Buffer",
"bytes": "1120"
},
{
"name": "Python",
"bytes": "3057429"
},
{
"name": "Shell",
"bytes": "54611"
},
{
"name": "Thrift",
"bytes": "40763"
}
],
"symlink_target": ""
}
|
"""\
===========================
Simple Pygame drawing board
===========================
A simple drawing board for the pygame display service.
Use your left mouse button to draw to the board and the
right to erase your artwork.
"""
import pygame
import Axon
from Axon.Ipc import producerFinished
from Kamaelia.UI.PygameDisplay import PygameDisplay
class MagnaDoodle(Axon.Component.component):
"""\
MagnaDoodle(...) -> A new MagnaDoodle component.
A simple drawing board for the pygame display service.
(this component and its documentation is heaviliy based on Kamaelia.UI.Pygame.Button)
Keyword arguments:
- position -- (x,y) position of top left corner in pixels
- margin -- pixels margin between caption and button edge (default=8)
- bgcolour -- (r,g,b) fill colour (default=(224,224,224))
- fgcolour -- (r,g,b) text colour (default=(0,0,0))
- transparent -- draw background transparent if True (default=False)
- size -- None or (w,h) in pixels (default=None)
"""
Inboxes = { "inbox" : "Receive events from PygameDisplay",
"control" : "For shutdown messages",
"callback" : "Receive callbacks from PygameDisplay"
}
Outboxes = { "outbox" : "not used",
"signal" : "For shutdown messages",
"display_signal" : "Outbox used for communicating to the display surface" }
def __init__(self, caption=None, position=None, margin=8, bgcolour = (124,124,124), fgcolour = (0,0,0), msg=None,
transparent = False, size=(200,200)):
"""x.__init__(...) initializes x; see x.__class__.__doc__ for signature"""
super(MagnaDoodle,self).__init__()
self.backgroundColour = bgcolour
self.foregroundColour = fgcolour
self.margin = margin
self.oldpos = None
self.drawing = False
### print "KEY",key
self.size = size
self.innerRect = pygame.Rect(10, 10, self.size[0]-20, self.size[1]-20)
if msg is None:
msg = ("CLICK", self.id)
self.eventMsg = msg
if transparent:
transparency = bgcolour
else:
transparency = None
self.disprequest = { "DISPLAYREQUEST" : True,
"callback" : (self,"callback"),
"events" : (self, "inbox"),
"size": self.size,
"transparency" : transparency }
if not position is None:
self.disprequest["position"] = position
def waitBox(self,boxname):
"""Generator. yields 1 until data ready on the named inbox."""
waiting = True
while waiting:
if self.dataReady(boxname): return
else: yield 1
def drawBG(self):
self.display.fill( (255,0,0) )
self.display.fill( self.backgroundColour, self.innerRect )
def main(self):
"""Main loop."""
displayservice = PygameDisplay.getDisplayService()
self.link((self,"display_signal"), displayservice)
self.send( self.disprequest,
"display_signal")
for _ in self.waitBox("callback"): yield 1
self.display = self.recv("callback")
self.drawBG()
self.blitToSurface()
self.send({ "ADDLISTENEVENT" : pygame.MOUSEBUTTONDOWN,
"surface" : self.display},
"display_signal")
self.send({ "ADDLISTENEVENT" : pygame.MOUSEBUTTONUP,
"surface" : self.display},
"display_signal")
self.send({ "ADDLISTENEVENT" : pygame.MOUSEMOTION,
"surface" : self.display},
"display_signal")
done = False
while not done:
while self.dataReady("control"):
cmsg = self.recv("control")
if isinstance(cmsg, producerFinished) or isinstance(cmsg, shutdownMicroprocess):
self.send(cmsg, "signal")
done = True
while self.dataReady("inbox"):
for event in self.recv("inbox"):
if event.type == pygame.MOUSEBUTTONDOWN:
if event.button == 1:
self.drawing = True
elif event.button == 3:
self.oldpos = None
self.drawBG()
self.blitToSurface()
elif event.type == pygame.MOUSEBUTTONUP and event.button == 1:
self.drawing = False
self.oldpos = None
elif event.type == pygame.MOUSEMOTION:
# print "BUTTON", event.button
if self.drawing and self.innerRect.collidepoint(*event.pos):
if self.oldpos == None:
self.oldpos = event.pos
else:
pygame.draw.line(self.display, (0,0,0), self.oldpos, event.pos, 3)
self.oldpos = event.pos
self.blitToSurface()
self.pause()
yield 1
def blitToSurface(self):
self.send({"REDRAW":True, "surface":self.display}, "display_signal")
__kamaelia_components__ = ( MagnaDoodle, )
if __name__ == "__main__":
from Kamaelia.Util.ConsoleEcho import consoleEchoer
from pygame.locals import *
Magna = MagnaDoodle().activate()
Axon.Scheduler.scheduler.run.runThreads()
# Licensed to the BBC under a Contributor Agreement: THF
|
{
"content_hash": "4f4d727096eb2a89c54c7545a85bffc6",
"timestamp": "",
"source": "github",
"line_count": 161,
"max_line_length": 116,
"avg_line_length": 34.701863354037265,
"alnum_prop": 0.5462681224270628,
"repo_name": "bbc/kamaelia",
"id": "ee62730ca0a7b0770304ae9dfeb6e99546076bc1",
"size": "6493",
"binary": false,
"copies": "6",
"ref": "refs/heads/master",
"path": "Sketches/CL/Topology3D/THF/Kamaelia/UI/Pygame/MagnaDoodle.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "ApacheConf",
"bytes": "62985"
},
{
"name": "C",
"bytes": "212854"
},
{
"name": "C++",
"bytes": "327546"
},
{
"name": "CSS",
"bytes": "114434"
},
{
"name": "ChucK",
"bytes": "422"
},
{
"name": "Diff",
"bytes": "483"
},
{
"name": "Gettext Catalog",
"bytes": "3919909"
},
{
"name": "HTML",
"bytes": "1288960"
},
{
"name": "Java",
"bytes": "31832"
},
{
"name": "JavaScript",
"bytes": "829491"
},
{
"name": "Makefile",
"bytes": "5768"
},
{
"name": "NSIS",
"bytes": "18867"
},
{
"name": "PHP",
"bytes": "49059"
},
{
"name": "Perl",
"bytes": "31234"
},
{
"name": "Processing",
"bytes": "2885"
},
{
"name": "Pure Data",
"bytes": "7485482"
},
{
"name": "Python",
"bytes": "18896320"
},
{
"name": "Ruby",
"bytes": "4165"
},
{
"name": "Shell",
"bytes": "711244"
}
],
"symlink_target": ""
}
|
from __future__ import print_function
from Tkinter import TclError
class WidgetRedirector:
"""Support for redirecting arbitrary widget subcommands.
Some Tk operations don't normally pass through tkinter. For example, if a
character is inserted into a Text widget by pressing a key, a default Tk
binding to the widget's 'insert' operation is activated, and the Tk library
processes the insert without calling back into tkinter.
Although a binding to <Key> could be made via tkinter, what we really want
to do is to hook the Tk 'insert' operation itself. For one thing, we want
a text.insert call in idle code to have the same effect as a key press.
When a widget is instantiated, a Tcl command is created whose name is the
same as the pathname widget._w. This command is used to invoke the various
widget operations, e.g. insert (for a Text widget). We are going to hook
this command and provide a facility ('register') to intercept the widget
operation. We will also intercept method calls on the Tkinter class
instance that represents the tk widget.
In IDLE, WidgetRedirector is used in Percolator to intercept Text
commands. The function being registered provides access to the top
of a Percolator chain. At the bottom of the chain is a call to the
original Tk widget operation.
"""
def __init__(self, widget):
'''Initialize attributes and setup redirection.
_operations: dict mapping operation name to new function.
widget: the widget whose tcl command is to be intercepted.
tk: widget.tk, a convenience attribute, probably not needed.
orig: new name of the original tcl command.
Since renaming to orig fails with TclError when orig already
exists, only one WidgetDirector can exist for a given widget.
'''
self._operations = {}
self.widget = widget # widget instance
self.tk = tk = widget.tk # widget's root
w = widget._w # widget's (full) Tk pathname
self.orig = w + "_orig"
# Rename the Tcl command within Tcl:
tk.call("rename", w, self.orig)
# Create a new Tcl command whose name is the widget's pathname, and
# whose action is to dispatch on the operation passed to the widget:
tk.createcommand(w, self.dispatch)
def __repr__(self):
return "WidgetRedirector(%s<%s>)" % (self.widget.__class__.__name__,
self.widget._w)
def close(self):
"Unregister operations and revert redirection created by .__init__."
for operation in list(self._operations):
self.unregister(operation)
widget = self.widget
tk = widget.tk
w = widget._w
# Restore the original widget Tcl command.
tk.deletecommand(w)
tk.call("rename", self.orig, w)
del self.widget, self.tk # Should not be needed
# if instance is deleted after close, as in Percolator.
def register(self, operation, function):
'''Return OriginalCommand(operation) after registering function.
Registration adds an operation: function pair to ._operations.
It also adds a widget function attribute that masks the Tkinter
class instance method. Method masking operates independently
from command dispatch.
If a second function is registered for the same operation, the
first function is replaced in both places.
'''
self._operations[operation] = function
setattr(self.widget, operation, function)
return OriginalCommand(self, operation)
def unregister(self, operation):
'''Return the function for the operation, or None.
Deleting the instance attribute unmasks the class attribute.
'''
if operation in self._operations:
function = self._operations[operation]
del self._operations[operation]
try:
delattr(self.widget, operation)
except AttributeError:
pass
return function
else:
return None
def dispatch(self, operation, *args):
'''Callback from Tcl which runs when the widget is referenced.
If an operation has been registered in self._operations, apply the
associated function to the args passed into Tcl. Otherwise, pass the
operation through to Tk via the original Tcl function.
Note that if a registered function is called, the operation is not
passed through to Tk. Apply the function returned by self.register()
to *args to accomplish that. For an example, see ColorDelegator.py.
'''
m = self._operations.get(operation)
try:
if m:
return m(*args)
else:
return self.tk.call((self.orig, operation) + args)
except TclError:
return ""
class OriginalCommand:
'''Callable for original tk command that has been redirected.
Returned by .register; can be used in the function registered.
redir = WidgetRedirector(text)
def my_insert(*args):
print("insert", args)
original_insert(*args)
original_insert = redir.register("insert", my_insert)
'''
def __init__(self, redir, operation):
'''Create .tk_call and .orig_and_operation for .__call__ method.
.redir and .operation store the input args for __repr__.
.tk and .orig copy attributes of .redir (probably not needed).
'''
self.redir = redir
self.operation = operation
self.tk = redir.tk # redundant with self.redir
self.orig = redir.orig # redundant with self.redir
# These two could be deleted after checking recipient code.
self.tk_call = redir.tk.call
self.orig_and_operation = (redir.orig, operation)
def __repr__(self):
return "OriginalCommand(%r, %r)" % (self.redir, self.operation)
def __call__(self, *args):
return self.tk_call(self.orig_and_operation + args)
def _widget_redirector(parent): # htest #
from Tkinter import Tk, Text
import re
root = Tk()
root.title("Test WidgetRedirector")
width, height, x, y = list(map(int, re.split('[x+]', parent.geometry())))
root.geometry("+%d+%d"%(x, y + 150))
text = Text(root)
text.pack()
text.focus_set()
redir = WidgetRedirector(text)
def my_insert(*args):
print("insert", args)
original_insert(*args)
original_insert = redir.register("insert", my_insert)
root.mainloop()
if __name__ == "__main__":
import unittest
unittest.main('idlelib.idle_test.test_widgetredir',
verbosity=2, exit=False)
from idlelib.idle_test.htest import run
run(_widget_redirector)
|
{
"content_hash": "189c3969c457dcd6cbac88ada5c58d12",
"timestamp": "",
"source": "github",
"line_count": 175,
"max_line_length": 79,
"avg_line_length": 39.462857142857146,
"alnum_prop": 0.6397335650159282,
"repo_name": "tequa/ammisoft",
"id": "54431f7382536bb91fdc6022d30e3e7027590de5",
"size": "6906",
"binary": false,
"copies": "9",
"ref": "refs/heads/master",
"path": "ammimain/WinPython-64bit-2.7.13.1Zero/python-2.7.13.amd64/Lib/idlelib/WidgetRedirector.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Batchfile",
"bytes": "9595"
},
{
"name": "C",
"bytes": "715524"
},
{
"name": "C#",
"bytes": "8440"
},
{
"name": "C++",
"bytes": "59535"
},
{
"name": "CSS",
"bytes": "5382"
},
{
"name": "F#",
"bytes": "2310"
},
{
"name": "Forth",
"bytes": "506"
},
{
"name": "Fortran",
"bytes": "67146"
},
{
"name": "GLSL",
"bytes": "1040"
},
{
"name": "HTML",
"bytes": "46416"
},
{
"name": "Inno Setup",
"bytes": "1227"
},
{
"name": "JavaScript",
"bytes": "24663"
},
{
"name": "Jupyter Notebook",
"bytes": "629939"
},
{
"name": "Makefile",
"bytes": "895"
},
{
"name": "Mask",
"bytes": "969"
},
{
"name": "Objective-C",
"bytes": "567"
},
{
"name": "PowerShell",
"bytes": "3673"
},
{
"name": "Python",
"bytes": "29200377"
},
{
"name": "Shell",
"bytes": "6905"
},
{
"name": "Tcl",
"bytes": "2124176"
},
{
"name": "Visual Basic",
"bytes": "2144"
}
],
"symlink_target": ""
}
|
import sys
import Queue
class Link():
''' No repeat link '''
def __init__(self):
self.map = {}
self.tail = "head"
self.map["head"] = {"stat": 0, "next": "null"}
def __contains__(self, key):
return key in self.map
def __len__(self):
return len(self.map) - 1
def isEmpty(self):
if self.getHead() == "null":
return True
else:
return False
def clearLink(self):
self.map.clear()
def getTail(self):
return self.tail
def getHead(self):
return self.map["head"]["next"]
def add(self, string):
args = string.split('\t')
item = args[0]
stat = args[1]
if item not in self.map:
self.map[item] = {"stat": stat, "next": "null"}
self.map[self.tail]["next"] = item
self.tail = item
def pop(self):
if not self.isEmpty():
head_task = self.map["head"]["next"]
rt_value = "%s\t%s" % (head_task, self.map[head_task]["stat"])
self.map["head"]["next"] = self.map[head_task]["next"]
del self.map[head_task]
if head_task == self.tail:
self.tail = "head"
return rt_value
return None
def test_output(self, name=""):
print >> sys.stderr, name
print >> sys.stderr, "-" * 10 + "TEST_OUTPUT" + "-" * 10
print >> sys.stderr, "Tail: %s\nHead: %s\nLength: %s" % (self.getTail(), self.getHead(), self.__len__())
head = "head"
while head != "null":
print >> sys.stderr, "%s\t%s\t%s" % (head, self.map[head]["stat"], self.map[head]["next"])
head = self.map[head]["next"]
print >> sys.stderr, "-" * 31
class OrderedMapQueue(Queue.Queue):
''' ordered-map queue '''
def _init(self, maxsize=0):
self.queue = Link()
def _put(self, item):
self.queue.add(item)
def _get(self):
return self.queue.pop()
def _qsize(self):
return self.queue.__len__()
if __name__ == "__main__":
myqueue = OrderedMapQueue()
myqueue.put("task2\t-2")
myqueue.put("task3\t-1")
myqueue.put("task1\t-2")
myqueue.put("task3\t-1")
myqueue.put("task3\t-2")
myqueue.queue.test_output()
print myqueue.get()
myqueue.queue.test_output()
print myqueue.get()
myqueue.queue.test_output()
print myqueue.get()
myqueue.queue.test_output()
|
{
"content_hash": "271636d431d9b8953c4030f1639ee35c",
"timestamp": "",
"source": "github",
"line_count": 94,
"max_line_length": 112,
"avg_line_length": 26.26595744680851,
"alnum_prop": 0.5180234912920211,
"repo_name": "turnyouon/Inforobot",
"id": "2e3c572f8dd66e6476d44946d4d1e40e38ce5297",
"size": "2528",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "basic/seeder/queue.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "2528"
}
],
"symlink_target": ""
}
|
from calvin.actor.actor import Actor, manage, condition
import time # NEVER DO THIS OUTSIDE OF TEST
class Burn(Actor):
"""
forward a token unchanged and Burns cycles
Inputs:
token : a token
Outputs:
token : the same token
"""
@manage(['dump', 'last', 'duration'])
def init(self, dump=False, duration=0.1):
self.dump = dump
self.last = None
self.duration = duration
def log(self, data):
print "%s<%s,%s>: %s" % (self.__class__.__name__, self.name, self.id, data)
@condition(['token'], ['token'])
def donothing(self, input):
if self.dump:
self.log(input)
self.last = input
# Burn cycles until duration passed
t = time.time()
while time.time() - t < self.duration:
pass
return (input, )
def report(self, **kwargs):
self.duration = kwargs.get('duration', self.duration)
return self.last
action_priority = (donothing, )
test_set = [
{
'setup': [lambda self: self.init(duration=0.0001)],
'inports': {'token': [1, 2, 3]},
'outports': {'token': [1, 2, 3]}
}
]
|
{
"content_hash": "edb1e978a603961c8fbde1c93c1281d6",
"timestamp": "",
"source": "github",
"line_count": 44,
"max_line_length": 83,
"avg_line_length": 27.25,
"alnum_prop": 0.5387823185988324,
"repo_name": "EricssonResearch/calvin-base",
"id": "cc5055a84f790df0c9215746353f96875f41f129",
"size": "1804",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "calvin/actorstore/systemactors/std/Burn.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "769"
},
{
"name": "Dockerfile",
"bytes": "612"
},
{
"name": "HTML",
"bytes": "24571"
},
{
"name": "JavaScript",
"bytes": "78325"
},
{
"name": "Makefile",
"bytes": "816"
},
{
"name": "Python",
"bytes": "3291484"
},
{
"name": "Shell",
"bytes": "37140"
}
],
"symlink_target": ""
}
|
from ._azure_dev_ops_connector_operations import AzureDevOpsConnectorOperations
from ._azure_dev_ops_repo_operations import AzureDevOpsRepoOperations
from ._azure_dev_ops_connector_stats_operations import AzureDevOpsConnectorStatsOperations
from ._azure_dev_ops_org_operations import AzureDevOpsOrgOperations
from ._azure_dev_ops_project_operations import AzureDevOpsProjectOperations
from ._git_hub_connector_operations import GitHubConnectorOperations
from ._git_hub_repo_operations import GitHubRepoOperations
from ._git_hub_connector_stats_operations import GitHubConnectorStatsOperations
from ._git_hub_owner_operations import GitHubOwnerOperations
from ._operations import Operations
from ._patch import __all__ as _patch_all
from ._patch import * # type: ignore # pylint: disable=unused-wildcard-import
from ._patch import patch_sdk as _patch_sdk
__all__ = [
"AzureDevOpsConnectorOperations",
"AzureDevOpsRepoOperations",
"AzureDevOpsConnectorStatsOperations",
"AzureDevOpsOrgOperations",
"AzureDevOpsProjectOperations",
"GitHubConnectorOperations",
"GitHubRepoOperations",
"GitHubConnectorStatsOperations",
"GitHubOwnerOperations",
"Operations",
]
__all__.extend([p for p in _patch_all if p not in __all__])
_patch_sdk()
|
{
"content_hash": "d28cf3565a3b339c8302a134baf69328",
"timestamp": "",
"source": "github",
"line_count": 29,
"max_line_length": 90,
"avg_line_length": 43.86206896551724,
"alnum_prop": 0.7932389937106918,
"repo_name": "Azure/azure-sdk-for-python",
"id": "4d9878888d0b0585c4a662fc18f7a93cd8c46b62",
"size": "1740",
"binary": false,
"copies": "2",
"ref": "refs/heads/main",
"path": "sdk/securitydevops/azure-mgmt-securitydevops/azure/mgmt/securitydevops/aio/operations/__init__.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "1224"
},
{
"name": "Bicep",
"bytes": "24196"
},
{
"name": "CSS",
"bytes": "6089"
},
{
"name": "Dockerfile",
"bytes": "4892"
},
{
"name": "HTML",
"bytes": "12058"
},
{
"name": "JavaScript",
"bytes": "8137"
},
{
"name": "Jinja",
"bytes": "10377"
},
{
"name": "Jupyter Notebook",
"bytes": "272022"
},
{
"name": "PowerShell",
"bytes": "518535"
},
{
"name": "Python",
"bytes": "715484989"
},
{
"name": "Shell",
"bytes": "3631"
}
],
"symlink_target": ""
}
|
import json
import re
from multiprocessing.pool import Pool
import requests
__author__ = 'Jeff.West@yahoo.com'
# This script iterates an index and issues a PUT request for an empty string to force a reindex of the entity
index_url_template = 'http://elasticsearch013wo:9200/{index_name}/_search?size={size}&from={from_var}'
index_names = [
'es-index-name'
]
baas_url = 'http://localhost:8080/org/{app_id}/{collection}/{entity_id}'
counter = 0
size = 1000
total_docs = 167501577
from_var = 0
page = 0
work_items = []
def work(item):
url = 'http://localhost:8080/org/{app_id}/{collection}/{entity_id}'.format(
app_id=item[0],
collection=item[1],
entity_id=item[2]
)
r_put = requests.put(url, data=json.dumps({'russo': ''}))
if r_put.status_code == 200:
print '[%s]: %s' % (r_put.status_code, url)
elif r_put.status_code:
print '[%s]: %s | %s' % (r_put.status_code, url, r.text)
while from_var < total_docs:
from_var = page * size
page += 1
for index_name in index_names:
index_url = index_url_template.format(index_name=index_name, size=size, from_var=from_var)
print 'Getting URL: ' + index_url
r = requests.get(index_url)
if r.status_code != 200:
print r.text
exit()
response = r.json()
hits = response.get('hits', {}).get('hits')
re_app_id = re.compile('appId\((.+),')
re_ent_id = re.compile('entityId\((.+),')
re_type = re.compile('entityId\(.+,(.+)\)')
print 'Index: %s | hits: %s' % (index_name, len(hits))
for hit_data in hits:
source = hit_data.get('_source')
application_id = source.get('applicationId')
app_id_find = re_app_id.findall(application_id)
if len(app_id_find) > 0:
app_id = app_id_find[0]
if app_id != '5f20f423-f2a8-11e4-a478-12a5923b55dc':
continue
entity_id_tmp = source.get('entityId')
entity_id_find = re_ent_id.findall(entity_id_tmp)
entity_type_find = re_type.findall(entity_id_tmp)
if len(entity_id_find) > 0 and len(entity_type_find) > 0:
entity_id = entity_id_find[0]
collection = entity_type_find[0]
if collection in ['logs', 'log']:
print 'skipping logs...'
continue
work_items.append((app_id, collection, entity_id))
counter += 1
pool = Pool(16)
print 'Work Items: %s' % len(work_items)
print 'Starting Work'
pool.map(work, work_items)
print 'done: %s' % counter
|
{
"content_hash": "75fc51a482b7f003872cb8fb933e6a8a",
"timestamp": "",
"source": "github",
"line_count": 109,
"max_line_length": 109,
"avg_line_length": 25.10091743119266,
"alnum_prop": 0.5515350877192983,
"repo_name": "mdunker/usergrid",
"id": "101fa980ee923bf8c8c36847b0301d243f6cf634",
"size": "3567",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "utils/usergrid-util-python/es_tools/es_index_iterator_reindexer.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "2211"
},
{
"name": "CSS",
"bytes": "252921"
},
{
"name": "GAP",
"bytes": "7673"
},
{
"name": "Gherkin",
"bytes": "260"
},
{
"name": "Groovy",
"bytes": "38652"
},
{
"name": "HTML",
"bytes": "2949624"
},
{
"name": "Java",
"bytes": "10860414"
},
{
"name": "JavaScript",
"bytes": "581093"
},
{
"name": "Nu",
"bytes": "8658"
},
{
"name": "Objective-C",
"bytes": "396026"
},
{
"name": "PHP",
"bytes": "441368"
},
{
"name": "Perl",
"bytes": "60137"
},
{
"name": "Python",
"bytes": "398058"
},
{
"name": "Ruby",
"bytes": "200736"
},
{
"name": "Scala",
"bytes": "185325"
},
{
"name": "Shell",
"bytes": "127130"
}
],
"symlink_target": ""
}
|
"""Python module for generating .ninja files.
Note that this is emphatically not a required piece of Ninja; it's
just a helpful utility for build-file-generation systems that already
use Python.
"""
import textwrap
def escape_path(word):
return word.replace("$ ", "$$ ").replace(" ", "$ ").replace(":", "$:")
class Writer(object):
def __init__(self, output, width=78):
self.output = output
self.width = width
def newline(self):
self.output.write("\n")
def comment(self, text):
for line in textwrap.wrap(text, self.width - 2):
self.output.write("# " + line + "\n")
def variable(self, key, value, indent=0):
if value is None:
return
if isinstance(value, list):
value = " ".join(filter(None, value)) # Filter out empty strings.
self._line("%s = %s" % (key, value), indent)
def pool(self, name, depth):
self._line("pool %s" % name)
self.variable("depth", depth, indent=1)
def rule(
self,
name,
command,
description=None,
depfile=None,
generator=False,
pool=None,
restat=False,
rspfile=None,
rspfile_content=None,
deps=None,
):
self._line("rule %s" % name)
self.variable("command", command, indent=1)
if description:
self.variable("description", description, indent=1)
if depfile:
self.variable("depfile", depfile, indent=1)
if generator:
self.variable("generator", "1", indent=1)
if pool:
self.variable("pool", pool, indent=1)
if restat:
self.variable("restat", "1", indent=1)
if rspfile:
self.variable("rspfile", rspfile, indent=1)
if rspfile_content:
self.variable("rspfile_content", rspfile_content, indent=1)
if deps:
self.variable("deps", deps, indent=1)
def build(
self, outputs, rule, inputs=None, implicit=None, order_only=None, variables=None
):
outputs = self._as_list(outputs)
all_inputs = self._as_list(inputs)[:]
out_outputs = list(map(escape_path, outputs))
all_inputs = list(map(escape_path, all_inputs))
if implicit:
implicit = map(escape_path, self._as_list(implicit))
all_inputs.append("|")
all_inputs.extend(implicit)
if order_only:
order_only = map(escape_path, self._as_list(order_only))
all_inputs.append("||")
all_inputs.extend(order_only)
self._line(
"build %s: %s" % (" ".join(out_outputs), " ".join([rule] + all_inputs))
)
if variables:
if isinstance(variables, dict):
iterator = iter(variables.items())
else:
iterator = iter(variables)
for key, val in iterator:
self.variable(key, val, indent=1)
return outputs
def include(self, path):
self._line("include %s" % path)
def subninja(self, path):
self._line("subninja %s" % path)
def default(self, paths):
self._line("default %s" % " ".join(self._as_list(paths)))
def _count_dollars_before_index(self, s, i):
"""Returns the number of '$' characters right in front of s[i]."""
dollar_count = 0
dollar_index = i - 1
while dollar_index > 0 and s[dollar_index] == "$":
dollar_count += 1
dollar_index -= 1
return dollar_count
def _line(self, text, indent=0):
"""Write 'text' word-wrapped at self.width characters."""
leading_space = " " * indent
while len(leading_space) + len(text) > self.width:
# The text is too wide; wrap if possible.
# Find the rightmost space that would obey our width constraint and
# that's not an escaped space.
available_space = self.width - len(leading_space) - len(" $")
space = available_space
while True:
space = text.rfind(" ", 0, space)
if space < 0 or self._count_dollars_before_index(text, space) % 2 == 0:
break
if space < 0:
# No such space; just use the first unescaped space we can find.
space = available_space - 1
while True:
space = text.find(" ", space + 1)
if (
space < 0
or self._count_dollars_before_index(text, space) % 2 == 0
):
break
if space < 0:
# Give up on breaking.
break
self.output.write(leading_space + text[0:space] + " $\n")
text = text[space + 1 :]
# Subsequent lines are continuations, so indent them.
leading_space = " " * (indent + 2)
self.output.write(leading_space + text + "\n")
def _as_list(self, input):
if input is None:
return []
if isinstance(input, list):
return input
return [input]
def escape(string):
"""Escape a string such that it can be embedded into a Ninja file without
further interpretation."""
assert "\n" not in string, "Ninja syntax does not allow newlines"
# We only have one special metacharacter: '$'.
return string.replace("$", "$$")
|
{
"content_hash": "60cda58c1eaa9e4af189f52ac01781f7",
"timestamp": "",
"source": "github",
"line_count": 170,
"max_line_length": 88,
"avg_line_length": 32.40588235294118,
"alnum_prop": 0.5340352151025595,
"repo_name": "nodegit/node-gyp",
"id": "14212358082a622e7a49f187c338a502f952ce6d",
"size": "5650",
"binary": false,
"copies": "7",
"ref": "refs/heads/master",
"path": "gyp/pylib/gyp/ninja_syntax.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "397"
},
{
"name": "C",
"bytes": "820"
},
{
"name": "C++",
"bytes": "653"
},
{
"name": "Emacs Lisp",
"bytes": "14357"
},
{
"name": "JavaScript",
"bytes": "45123"
},
{
"name": "Python",
"bytes": "1188589"
},
{
"name": "Shell",
"bytes": "546"
}
],
"symlink_target": ""
}
|
'''
The MIT License (MIT)
Copyright (c) 2015-present Badoo Trading Limited.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
'''
import os
import platform
import shutil
git_hooks_dir = os.path.abspath(os.path.join(os.getcwd(), '.git/hooks'))
if not os.path.exists(git_hooks_dir):
print 'Created', git_hooks_dir
os.makedirs(git_hooks_dir)
pre_commit_hook_dst = os.path.join(git_hooks_dir, 'pre-commit')
if os.path.islink(pre_commit_hook_dst) or os.path.isfile(pre_commit_hook_dst):
print 'Removed previously installed hook'
os.remove(pre_commit_hook_dst)
scripts_dir = os.path.split(os.path.abspath(__file__))[0]
pre_commit_hook_src = os.path.join(scripts_dir, 'pre_commit.py')
if platform.system() == 'Windows':
shutil.copy(pre_commit_hook_src, pre_commit_hook_dst)
print 'Installed git hook into', pre_commit_hook_dst
else:
os.symlink(pre_commit_hook_src, pre_commit_hook_dst)
print 'Installed git hook into', pre_commit_hook_dst, '~>', pre_commit_hook_src
|
{
"content_hash": "bca58c8040c5c71fb21ae8d282d10ddd",
"timestamp": "",
"source": "github",
"line_count": 43,
"max_line_length": 80,
"avg_line_length": 45.627906976744185,
"alnum_prop": 0.7629969418960245,
"repo_name": "badoo/objective-c-style-guide",
"id": "ef70ea66581f0d28fdff3918f53c4f1ff50ef3f6",
"size": "1981",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "scripts/install_pre_commit_hook.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "11733"
}
],
"symlink_target": ""
}
|
from __future__ import absolute_import
from .job import ImageInferenceJob
|
{
"content_hash": "dece3638f024c744b37f5d60d6e95477",
"timestamp": "",
"source": "github",
"line_count": 3,
"max_line_length": 38,
"avg_line_length": 25,
"alnum_prop": 0.8,
"repo_name": "TimZaman/DIGITS",
"id": "72cba967087695f2afdf197ee55443204d4fcc76",
"size": "139",
"binary": false,
"copies": "8",
"ref": "refs/heads/master",
"path": "digits/inference/images/__init__.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "4032"
},
{
"name": "HTML",
"bytes": "285736"
},
{
"name": "JavaScript",
"bytes": "45826"
},
{
"name": "Lua",
"bytes": "110640"
},
{
"name": "Makefile",
"bytes": "87"
},
{
"name": "Protocol Buffer",
"bytes": "384"
},
{
"name": "Python",
"bytes": "933415"
},
{
"name": "Shell",
"bytes": "12431"
}
],
"symlink_target": ""
}
|
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [("fluent_contents", "0001_initial")]
operations = [
migrations.CreateModel(
name="DisqusCommentsAreaItem",
fields=[
(
"contentitem_ptr",
models.OneToOneField(
parent_link=True,
on_delete=models.CASCADE,
auto_created=True,
primary_key=True,
serialize=False,
to="fluent_contents.ContentItem",
),
),
(
"allow_new",
models.BooleanField(
default=True, verbose_name="Allow posting new comments"
),
),
],
options={
"db_table": "contentitem_disquswidgets_disquscommentsareaitem",
"verbose_name": "Disqus comments area",
"verbose_name_plural": "Disqus comments areas",
},
bases=("fluent_contents.contentitem",),
)
]
|
{
"content_hash": "2c20fc25403fd09de84953f1cf8f2e6c",
"timestamp": "",
"source": "github",
"line_count": 39,
"max_line_length": 79,
"avg_line_length": 32,
"alnum_prop": 0.45032051282051283,
"repo_name": "edoburu/django-fluent-contents",
"id": "649bcfbd34e8fbd41c72ae51d8ef18a42677cb4e",
"size": "1272",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "fluent_contents/plugins/disquswidgets/migrations/0001_initial.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "13178"
},
{
"name": "HTML",
"bytes": "35807"
},
{
"name": "JavaScript",
"bytes": "80446"
},
{
"name": "Python",
"bytes": "494720"
}
],
"symlink_target": ""
}
|
from typing import Optional
# third party
from nacl.signing import VerifyKey
# relative
from ... import UID
from ......logger import info
from ....abstract.node import AbstractNode
from .simple_messages import NodeRunnableMessageWithReply
class DoesObjectExistMessage(NodeRunnableMessageWithReply):
__attr_allowlist__ = ["obj_id"]
def __init__(self, obj_id: UID) -> None:
self.obj_id = obj_id
def run(self, node: AbstractNode, verify_key: Optional[VerifyKey] = None) -> bool:
try:
return bool(node.store.get_or_none(self.obj_id, proxy_only=True)) # type: ignore
except Exception as e:
info("Exception in DoesObjectExistMessage:" + str(e))
return False
|
{
"content_hash": "b8bddfdeb8e79813857647b1e14ad4ef",
"timestamp": "",
"source": "github",
"line_count": 25,
"max_line_length": 93,
"avg_line_length": 29.28,
"alnum_prop": 0.674863387978142,
"repo_name": "OpenMined/PySyft",
"id": "74625d6305b3ad1055db48cb0559b595dd0505af",
"size": "741",
"binary": false,
"copies": "1",
"ref": "refs/heads/dev",
"path": "packages/syft/src/syft/core/node/common/node_service/simple/obj_exists.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "2084"
},
{
"name": "Cap'n Proto",
"bytes": "1377"
},
{
"name": "Dockerfile",
"bytes": "9740"
},
{
"name": "HCL",
"bytes": "4438"
},
{
"name": "JavaScript",
"bytes": "85898"
},
{
"name": "Jupyter Notebook",
"bytes": "33167760"
},
{
"name": "Makefile",
"bytes": "7605"
},
{
"name": "Mako",
"bytes": "510"
},
{
"name": "PowerShell",
"bytes": "161"
},
{
"name": "Python",
"bytes": "3710174"
},
{
"name": "Shell",
"bytes": "52371"
},
{
"name": "TypeScript",
"bytes": "346493"
}
],
"symlink_target": ""
}
|
"""empty message
Revision ID: 4fa66a99c1dd
Revises: 39939c989fb1
Create Date: 2013-11-08 00:02:57.894562
"""
# revision identifiers, used by Alembic.
revision = '4fa66a99c1dd'
down_revision = '39939c989fb1'
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.add_column('user', sa.Column('is_listed', sa.Boolean(), nullable=True, server_default="True"))
op.alter_column('user', 'is_listed', server_default=None)
op.drop_column('user', u'premium_until')
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.add_column('user', sa.Column(u'premium_until', postgresql.TIMESTAMP(), nullable=True))
op.drop_column('user', 'is_listed')
### end Alembic commands ###
|
{
"content_hash": "c1ba7cc32a46463b71396acd60966180",
"timestamp": "",
"source": "github",
"line_count": 29,
"max_line_length": 101,
"avg_line_length": 30.103448275862068,
"alnum_prop": 0.697594501718213,
"repo_name": "SevereOverfl0w/MCDirectory",
"id": "0ccfbab558c411266f14473dad542fed4f777cb8",
"size": "873",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "migrations/versions/4fa66a99c1dd_.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "316618"
},
{
"name": "JavaScript",
"bytes": "661360"
},
{
"name": "Python",
"bytes": "29376"
}
],
"symlink_target": ""
}
|
import unittest
import torch
import numpy as np
import os
import math
import uuid
import tempfile
import pyprob
from pyprob import util
from pyprob.distributions import Empirical, Normal, Categorical, Uniform, Poisson, Beta, Mixture, TruncatedNormal
empirical_samples = 25000
class DistributionsTestCase(unittest.TestCase):
def test_distributions_empirical(self):
values = util.to_tensor([1, 2, 3])
log_weights = util.to_tensor([1, 2, 3])
dist_mean_correct = 2.5752103328704834
dist_stddev_correct = 0.6514633893966675
dist_expectation_sin_correct = 0.3921678960323334
dist_map_sin_mean_correct = 0.3921678960323334
dist_min_correct = 1
dist_max_correct = 3
dist_mode_correct = 3
dist_unweighted_mean_correct = 2
dist_unweighted_stddev_correct = 0.816497
dist = Empirical(values, log_weights)
dist_empirical = Empirical([dist.sample() for i in range(empirical_samples)])
dist_mean = float(dist.mean)
dist_mean_empirical = float(dist_empirical.mean)
dist_stddev = float(dist.stddev)
dist_stddev_empirical = float(dist_empirical.stddev)
dist_expectation_sin = float(dist.expectation(torch.sin))
dist_map_sin_mean = float(dist.map(torch.sin).mean)
dist_min = float(dist.min)
dist_max = float(dist.max)
dist_mode = float(dist.mode)
dist_unweighted = dist.unweighted()
dist_unweighted_mean = float(dist_unweighted.mean)
dist_unweighted_stddev = float(dist_unweighted.stddev)
util.eval_print('dist_mean', 'dist_mean_empirical', 'dist_mean_correct', 'dist_stddev', 'dist_stddev_empirical', 'dist_stddev_correct', 'dist_expectation_sin', 'dist_expectation_sin_correct', 'dist_map_sin_mean', 'dist_map_sin_mean_correct', 'dist_min', 'dist_min_correct', 'dist_max', 'dist_max_correct', 'dist_mode', 'dist_mode_correct', 'dist_unweighted_mean', 'dist_unweighted_mean_correct', 'dist_unweighted_stddev', 'dist_unweighted_stddev_correct')
# self.assertEqual(dist_sample_shape, dist_sample_shape_correct)
self.assertAlmostEqual(dist_mean, dist_mean_correct, places=1)
self.assertAlmostEqual(dist_mean_empirical, dist_mean_correct, places=1)
self.assertAlmostEqual(dist_stddev, dist_stddev_correct, places=1)
self.assertAlmostEqual(dist_stddev_empirical, dist_stddev_correct, places=1)
self.assertAlmostEqual(dist_expectation_sin, dist_expectation_sin_correct, places=1)
self.assertAlmostEqual(dist_map_sin_mean, dist_map_sin_mean_correct, places=1)
self.assertAlmostEqual(dist_min, dist_min_correct, places=1)
self.assertAlmostEqual(dist_max, dist_max_correct, places=1)
self.assertAlmostEqual(dist_mode, dist_mode_correct, places=1)
self.assertAlmostEqual(dist_unweighted_mean, dist_unweighted_mean_correct, places=1)
self.assertAlmostEqual(dist_unweighted_stddev, dist_unweighted_stddev_correct, places=1)
def test_distributions_empirical_copy(self):
file_name_1 = os.path.join(tempfile.mkdtemp(), str(uuid.uuid4()))
file_name_2 = os.path.join(tempfile.mkdtemp(), str(uuid.uuid4()))
values = util.to_tensor([1, 2, 3])
log_weights = util.to_tensor([1, 2, 3])
dist_mean_correct = 2.5752103328704834
dist_stddev_correct = 0.6514633893966675
dist_1 = Empirical(values, log_weights) # In memory
dist_1_mean = float(dist_1.mean)
dist_1_stddev = float(dist_1.stddev)
dist_2 = dist_1.copy() # In memory
dist_2_mean = float(dist_2.mean)
dist_2_stddev = float(dist_2.stddev)
dist_3 = dist_2.copy(file_name=file_name_1) # On disk
dist_3_mean = float(dist_3.mean)
dist_3_stddev = float(dist_3.stddev)
dist_4 = dist_3.copy(file_name=file_name_2) # On disk
dist_4_mean = float(dist_4.mean)
dist_4_stddev = float(dist_4.stddev)
dist_5 = dist_4.copy() # In memory
dist_5_mean = float(dist_5.mean)
dist_5_stddev = float(dist_5.stddev)
util.eval_print('dist_1_mean', 'dist_2_mean', 'dist_3_mean', 'dist_4_mean', 'dist_5_mean', 'dist_mean_correct', 'dist_1_stddev', 'dist_2_stddev', 'dist_3_stddev', 'dist_4_stddev', 'dist_5_stddev', 'dist_stddev_correct')
self.assertAlmostEqual(dist_1_mean, dist_mean_correct, places=1)
self.assertAlmostEqual(dist_1_stddev, dist_stddev_correct, places=1)
self.assertAlmostEqual(dist_2_mean, dist_mean_correct, places=1)
self.assertAlmostEqual(dist_2_stddev, dist_stddev_correct, places=1)
self.assertAlmostEqual(dist_3_mean, dist_mean_correct, places=1)
self.assertAlmostEqual(dist_3_stddev, dist_stddev_correct, places=1)
self.assertAlmostEqual(dist_4_mean, dist_mean_correct, places=1)
self.assertAlmostEqual(dist_4_stddev, dist_stddev_correct, places=1)
self.assertAlmostEqual(dist_5_mean, dist_mean_correct, places=1)
self.assertAlmostEqual(dist_5_stddev, dist_stddev_correct, places=1)
def test_distributions_empirical_disk(self):
file_name = os.path.join(tempfile.mkdtemp(), str(uuid.uuid4()))
values = util.to_tensor([1, 2, 3])
log_weights = util.to_tensor([1, 2, 3])
dist_mean_correct = 2.5752103328704834
dist_stddev_correct = 0.6514633893966675
dist_expectation_sin_correct = 0.3921678960323334
dist_map_sin_mean_correct = 0.3921678960323334
dist_min_correct = 1
dist_max_correct = 3
dist_mode_correct = 3
dist_unweighted_mean_correct = 2
dist_unweighted_stddev_correct = 0.816497
dist = Empirical(values=values, log_weights=log_weights, file_name=file_name)
dist_empirical = Empirical([dist.sample() for i in range(empirical_samples)])
dist_mean = float(dist.mean)
dist_mean_empirical = float(dist_empirical.mean)
dist_stddev = float(dist.stddev)
dist_stddev_empirical = float(dist_empirical.stddev)
dist_expectation_sin = float(dist.expectation(torch.sin))
dist_map_sin_mean = float(dist.map(torch.sin).mean)
dist_min = float(dist.min)
dist_max = float(dist.max)
dist_mode = float(dist.mode)
dist_unweighted = dist.copy().unweighted()
dist_unweighted_mean = float(dist_unweighted.mean)
dist_unweighted_stddev = float(dist_unweighted.stddev)
util.eval_print('dist_mean', 'dist_mean_empirical', 'dist_mean_correct', 'dist_stddev', 'dist_stddev_empirical', 'dist_stddev_correct', 'dist_expectation_sin', 'dist_expectation_sin_correct', 'dist_map_sin_mean', 'dist_map_sin_mean_correct', 'dist_min', 'dist_min_correct', 'dist_max', 'dist_max_correct', 'dist_mode', 'dist_mode_correct', 'dist_unweighted_mean', 'dist_unweighted_mean_correct', 'dist_unweighted_stddev', 'dist_unweighted_stddev_correct')
# self.assertEqual(dist_sample_shape, dist_sample_shape_correct)
self.assertAlmostEqual(dist_mean, dist_mean_correct, places=1)
self.assertAlmostEqual(dist_mean_empirical, dist_mean_correct, places=1)
self.assertAlmostEqual(dist_stddev, dist_stddev_correct, places=1)
self.assertAlmostEqual(dist_stddev_empirical, dist_stddev_correct, places=1)
self.assertAlmostEqual(dist_expectation_sin, dist_expectation_sin_correct, places=1)
self.assertAlmostEqual(dist_map_sin_mean, dist_map_sin_mean_correct, places=1)
self.assertAlmostEqual(dist_min, dist_min_correct, places=1)
self.assertAlmostEqual(dist_max, dist_max_correct, places=1)
self.assertAlmostEqual(dist_mode, dist_mode_correct, places=1)
self.assertAlmostEqual(dist_unweighted_mean, dist_unweighted_mean_correct, places=1)
self.assertAlmostEqual(dist_unweighted_stddev, dist_unweighted_stddev_correct, places=1)
def test_distributions_empirical_disk_append(self):
file_name = os.path.join(tempfile.mkdtemp(), str(uuid.uuid4()))
dist_means_correct = -1.2
dist_stddevs_correct = 1.4
dist_empirical_length_correct = 2000
dist = Normal(dist_means_correct, dist_stddevs_correct)
dist_empirical = Empirical(file_name=file_name)
dist_empirical.add_sequence([dist.sample() for i in range(1000)])
dist_empirical.finalize()
dist_empirical.close()
dist_empirical_2 = Empirical(file_name=file_name)
dist_empirical_2.add_sequence([dist.sample() for i in range(1000)])
dist_empirical_2.finalize()
dist_empirical_length = dist_empirical_2.length
dist_means = util.to_numpy(dist.mean)
dist_means_empirical = util.to_numpy(dist_empirical_2.mean)
dist_stddevs = util.to_numpy(dist.stddev)
dist_stddevs_empirical = util.to_numpy(dist_empirical_2.stddev)
util.eval_print('dist_means', 'dist_means_empirical', 'dist_means_correct', 'dist_stddevs', 'dist_stddevs_empirical', 'dist_stddevs_correct', 'dist_empirical_length', 'dist_empirical_length_correct')
self.assertTrue(np.allclose(dist_means, dist_means_correct, atol=0.1))
self.assertTrue(np.allclose(dist_means_empirical, dist_means_correct, atol=0.1))
self.assertTrue(np.allclose(dist_stddevs, dist_stddevs_correct, atol=0.1))
self.assertTrue(np.allclose(dist_stddevs_empirical, dist_stddevs_correct, atol=0.1))
self.assertEqual(dist_empirical_length, dist_empirical_length_correct)
def test_distributions_empirical_combine_duplicates(self):
values = [1, 2, 2, 3, 3, 3]
values_combined_correct = [1, 2, 3]
dist_mean_correct = 2.333333
dist_stddev_correct = 0.745356
dist = Empirical(values)
dist_combined = dist.combine_duplicates()
values_combined = dist_combined.get_values()
dist_mean = float(dist.mean)
dist_stddev = float(dist.stddev)
dist_mean_combined = float(dist_combined.mean)
dist_stddev_combined = float(dist_combined.stddev)
util.eval_print('values', 'values_combined', 'values_combined_correct', 'dist_mean', 'dist_mean_combined', 'dist_mean_correct', 'dist_stddev', 'dist_stddev_combined', 'dist_stddev_correct')
self.assertEqual(set(values_combined), set(values_combined_correct))
self.assertAlmostEqual(dist_mean, dist_mean_correct, places=1)
self.assertAlmostEqual(dist_mean_combined, dist_mean_correct, places=1)
self.assertAlmostEqual(dist_stddev, dist_stddev_correct, places=1)
self.assertAlmostEqual(dist_stddev_combined, dist_stddev_correct, places=1)
def test_distributions_empirical_numpy(self):
samples = 25
dist_means_correct = 10
dist_stddevs_correct = 0.01
dist = Normal(dist_means_correct, dist_stddevs_correct)
dist_empirical = Empirical([dist.sample() for i in range(samples)])
dist_empirical_values_numpy = dist_empirical.values_numpy()
dist_empirical_values_numpy_len = len(dist_empirical_values_numpy)
dist_empirical_values_numpy_mean = np.mean(dist_empirical_values_numpy)
dist_empirical_values_numpy_stddev = np.std(dist_empirical_values_numpy)
dist_empirical_weights_numpy = dist_empirical.weights_numpy()
dist_empirical_weights_numpy_len = len(dist_empirical_weights_numpy)
util.eval_print('samples', 'dist_empirical_values_numpy_len', 'dist_empirical_weights_numpy_len', 'dist_empirical_values_numpy_mean', 'dist_means_correct', 'dist_empirical_values_numpy_stddev', 'dist_stddevs_correct')
self.assertEqual(dist_empirical_values_numpy_len, samples)
self.assertEqual(dist_empirical_weights_numpy_len, samples)
self.assertAlmostEqual(dist_empirical_values_numpy_mean, dist_means_correct, places=1)
self.assertAlmostEqual(dist_empirical_values_numpy_stddev, dist_stddevs_correct, places=0)
def test_distributions_empirical_resample(self):
dist_means_correct = [2]
dist_stddevs_correct = [5]
dist = Normal(dist_means_correct, dist_stddevs_correct)
dist_empirical = Empirical([dist.sample() for i in range(empirical_samples)])
dist_empirical = dist_empirical.resample(int(empirical_samples/2))
dist_metadata = dist_empirical.metadata
dist_means_empirical = util.to_numpy(dist_empirical.mean)
dist_stddevs_empirical = util.to_numpy(dist_empirical.stddev)
util.eval_print('dist_means_empirical', 'dist_means_correct', 'dist_stddevs_empirical', 'dist_stddevs_correct', 'dist_metadata')
self.assertTrue(np.allclose(dist_means_empirical, dist_means_correct, atol=0.25))
self.assertTrue(np.allclose(dist_stddevs_empirical, dist_stddevs_correct, atol=0.25))
def test_distributions_empirical_resample_disk(self):
dist_means_correct = [2]
dist_stddevs_correct = [5]
file_name = os.path.join(tempfile.mkdtemp(), str(uuid.uuid4()))
dist = Normal(dist_means_correct, dist_stddevs_correct)
dist_empirical = Empirical([dist.sample() for i in range(empirical_samples)])
dist_empirical = dist_empirical.resample(int(empirical_samples/8))
dist_empirical.copy(file_name=file_name)
dist_empirical_disk = Empirical(file_name=file_name)
dist_metadata = dist_empirical_disk.metadata
dist_means_empirical = util.to_numpy(dist_empirical_disk.mean)
dist_stddevs_empirical = util.to_numpy(dist_empirical_disk.stddev)
util.eval_print('file_name', 'dist_means_empirical', 'dist_means_correct', 'dist_stddevs_empirical', 'dist_stddevs_correct', 'dist_metadata')
self.assertTrue(np.allclose(dist_means_empirical, dist_means_correct, atol=0.25))
self.assertTrue(np.allclose(dist_stddevs_empirical, dist_stddevs_correct, atol=0.25))
def test_distributions_empirical_thin(self):
values = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12]
dist_thinned_values_correct = [1, 4, 7, 10]
dist = Empirical(values)
dist_thinned = dist.thin(4)
dist_thinned_values = list(dist_thinned.values_numpy())
util.eval_print('dist_thinned_values', 'dist_thinned_values_correct')
self.assertEqual(dist_thinned_values, dist_thinned_values_correct)
def test_distributions_empirical_thin_disk(self):
values = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12]
dist_thinned_values_correct = [1, 4, 7, 10]
file_name = os.path.join(tempfile.mkdtemp(), str(uuid.uuid4()))
dist = Empirical(values)
dist_thinned = dist.thin(4)
dist_thinned.copy(file_name=file_name)
dist_thinned_disk = Empirical(file_name=file_name)
dist_thinned_values = list(dist_thinned_disk.values_numpy())
util.eval_print('file_name', 'dist_thinned_values', 'dist_thinned_values_correct')
self.assertEqual(dist_thinned_values, dist_thinned_values_correct)
def test_distributions_empirical_slice_and_index(self):
dist_slice_elements_correct = [0, 1, 2]
dist_first_correct = 0
dist_last_correct = 5
dist = Empirical([0, 1, 2, 3, 4, 5])
dist_slice_elements = dist[0:3].get_values()
dist_first = dist[0]
dist_last = dist[-1]
util.eval_print('dist_slice_elements', 'dist_slice_elements_correct', 'dist_first', 'dist_first_correct', 'dist_last', 'dist_last_correct')
self.assertEqual(dist_slice_elements, dist_slice_elements_correct)
self.assertEqual(dist_first, dist_first_correct)
self.assertEqual(dist_last, dist_last_correct)
def test_distributions_empirical_sample_min_max_index(self):
dist_mean_1_correct = 2
dist_mean_2_correct = 3
dist_mean_3_correct = 4
# 0 1 2 3 4 5 6 7 8
dist = Empirical([2, 2, 2, 2, 3, 3, 3, 4, 4])
dist_mean_1 = float(Empirical([dist.sample(min_index=0, max_index=3)]).mean)
dist_mean_2 = float(Empirical([dist.sample(min_index=4, max_index=6)]).mean)
dist_mean_3 = float(Empirical([dist.sample(min_index=7, max_index=8)]).mean)
util.eval_print('dist_mean_1', 'dist_mean_1_correct', 'dist_mean_2', 'dist_mean_2_correct', 'dist_mean_3', 'dist_mean_3_correct')
self.assertAlmostEqual(dist_mean_1, dist_mean_1_correct, places=1)
self.assertAlmostEqual(dist_mean_2, dist_mean_2_correct, places=1)
self.assertAlmostEqual(dist_mean_3, dist_mean_3_correct, places=1)
def test_distributions_empirical_combine_unweighted(self):
dist1_mean_correct = 1
dist1_stddev_correct = 3
dist2_mean_correct = 5
dist2_stddev_correct = 2
dist3_mean_correct = -2.5
dist3_stddev_correct = 1.2
dist_combined_mean_correct = 1.16667
dist_combined_stddev_correct = 3.76858
empirical_samples = 100000
dist1 = Normal(dist1_mean_correct, dist1_stddev_correct)
dist1_empirical = Empirical([dist1.sample() for i in range(empirical_samples)])
dist1_mean_empirical = float(dist1_empirical.mean)
dist1_stddev_empirical = float(dist1_empirical.stddev)
dist2 = Normal(dist2_mean_correct, dist2_stddev_correct)
dist2_empirical = Empirical([dist2.sample() for i in range(empirical_samples)])
dist2_mean_empirical = float(dist2_empirical.mean)
dist2_stddev_empirical = float(dist2_empirical.stddev)
dist3 = Normal(dist3_mean_correct, dist3_stddev_correct)
dist3_empirical = Empirical([dist3.sample() for i in range(empirical_samples)])
dist3_mean_empirical = float(dist3_empirical.mean)
dist3_stddev_empirical = float(dist3_empirical.stddev)
dist_combined_empirical = Empirical(concat_empiricals=[dist1_empirical, dist2_empirical, dist3_empirical])
dist_combined_mean_empirical = float(dist_combined_empirical.mean)
dist_combined_stddev_empirical = float(dist_combined_empirical.stddev)
util.eval_print('dist1_mean_empirical', 'dist1_stddev_empirical', 'dist1_mean_correct', 'dist1_stddev_correct', 'dist2_mean_empirical', 'dist2_stddev_empirical', 'dist2_mean_correct', 'dist2_stddev_correct', 'dist3_mean_empirical', 'dist3_stddev_empirical', 'dist3_mean_correct', 'dist3_stddev_correct', 'dist_combined_mean_empirical', 'dist_combined_stddev_empirical', 'dist_combined_mean_correct', 'dist_combined_stddev_correct')
self.assertAlmostEqual(dist1_mean_empirical, dist1_mean_correct, places=1)
self.assertAlmostEqual(dist1_stddev_empirical, dist1_stddev_correct, places=1)
self.assertAlmostEqual(dist2_mean_empirical, dist2_mean_correct, places=1)
self.assertAlmostEqual(dist2_stddev_empirical, dist2_stddev_correct, places=1)
self.assertAlmostEqual(dist3_mean_empirical, dist3_mean_correct, places=1)
self.assertAlmostEqual(dist3_stddev_empirical, dist3_stddev_correct, places=1)
self.assertAlmostEqual(dist_combined_mean_empirical, dist_combined_mean_correct, places=1)
self.assertAlmostEqual(dist_combined_stddev_empirical, dist_combined_stddev_correct, places=1)
def test_distributions_empirical_disk_combine_unweighted(self):
file_name_1 = os.path.join(tempfile.mkdtemp(), str(uuid.uuid4()))
file_name_2 = os.path.join(tempfile.mkdtemp(), str(uuid.uuid4()))
file_name_3 = os.path.join(tempfile.mkdtemp(), str(uuid.uuid4()))
file_name_combined = os.path.join(tempfile.mkdtemp(), str(uuid.uuid4()))
dist1_mean_correct = 1
dist1_stddev_correct = 3
dist2_mean_correct = 5
dist2_stddev_correct = 2
dist3_mean_correct = -2.5
dist3_stddev_correct = 1.2
dist_combined_mean_correct = 1.16667
dist_combined_stddev_correct = 3.76858
dist1 = Normal(dist1_mean_correct, dist1_stddev_correct)
dist1_empirical = Empirical([dist1.sample() for i in range(int(empirical_samples / 10))], file_name=file_name_1)
dist1_mean_empirical = float(dist1_empirical.mean)
dist1_stddev_empirical = float(dist1_empirical.stddev)
dist1_empirical.close()
dist2 = Normal(dist2_mean_correct, dist2_stddev_correct)
dist2_empirical = Empirical([dist2.sample() for i in range(int(empirical_samples / 10))], file_name=file_name_2)
dist2_mean_empirical = float(dist2_empirical.mean)
dist2_stddev_empirical = float(dist2_empirical.stddev)
dist2_empirical.close()
dist3 = Normal(dist3_mean_correct, dist3_stddev_correct)
dist3_empirical = Empirical([dist3.sample() for i in range(int(empirical_samples / 10))], file_name=file_name_3)
dist3_mean_empirical = float(dist3_empirical.mean)
dist3_stddev_empirical = float(dist3_empirical.stddev)
dist3_empirical.close()
dist_combined_empirical = Empirical(concat_empirical_file_names=[file_name_1, file_name_2, file_name_3], file_name=file_name_combined)
dist_combined_mean_empirical = float(dist_combined_empirical.mean)
dist_combined_stddev_empirical = float(dist_combined_empirical.stddev)
util.eval_print('dist1_mean_empirical', 'dist1_mean_correct', 'dist1_stddev_empirical', 'dist1_stddev_correct', 'dist2_mean_empirical', 'dist2_mean_correct', 'dist2_stddev_empirical', 'dist2_stddev_correct', 'dist3_mean_empirical', 'dist3_mean_correct', 'dist3_stddev_empirical', 'dist3_stddev_correct', 'dist_combined_mean_empirical', 'dist_combined_mean_correct', 'dist_combined_stddev_empirical', 'dist_combined_stddev_correct')
self.assertAlmostEqual(dist1_mean_empirical, dist1_mean_correct, places=0)
self.assertAlmostEqual(dist1_stddev_empirical, dist1_stddev_correct, places=0)
self.assertAlmostEqual(dist2_mean_empirical, dist2_mean_correct, places=0)
self.assertAlmostEqual(dist2_stddev_empirical, dist2_stddev_correct, places=0)
self.assertAlmostEqual(dist3_mean_empirical, dist3_mean_correct, places=0)
self.assertAlmostEqual(dist3_stddev_empirical, dist3_stddev_correct, places=0)
self.assertAlmostEqual(dist_combined_mean_empirical, dist_combined_mean_correct, places=0)
self.assertAlmostEqual(dist_combined_stddev_empirical, dist_combined_stddev_correct, places=0)
def test_distributions_empirical_combine_weighted(self):
dist1_values = [1, 2, 3]
dist1_log_weights = [1, 2, 3]
dist1_mean_correct = 2.5752103328704834
dist1_stddev_correct = 0.6514633893966675
dist2_values = [1.4, -9, 5]
dist2_log_weights = [-10, -2, -3]
dist2_mean_correct = -5.233193397521973
dist2_stddev_correct = 6.207840442657471
dist3_values = [10, 4, -1]
dist3_log_weights = [1, -2, -2.5]
dist3_mean_correct = 9.415830612182617
dist3_stddev_correct = 2.168320417404175
dist_combined_mean_correct = 3.1346240043640137
dist_combined_stddev_correct = 2.2721681594848633
dist1_empirical = Empirical(values=dist1_values, log_weights=dist1_log_weights)
dist1_mean_empirical = float(dist1_empirical.mean)
dist1_stddev_empirical = float(dist1_empirical.stddev)
dist2_empirical = Empirical(values=dist2_values, log_weights=dist2_log_weights)
dist2_mean_empirical = float(dist2_empirical.mean)
dist2_stddev_empirical = float(dist2_empirical.stddev)
dist3_empirical = Empirical(values=dist3_values, log_weights=dist3_log_weights)
dist3_mean_empirical = float(dist3_empirical.mean)
dist3_stddev_empirical = float(dist3_empirical.stddev)
dist_combined_empirical = Empirical(concat_empiricals=[dist1_empirical, dist2_empirical, dist3_empirical])
dist_combined_mean_empirical = float(dist_combined_empirical.mean)
dist_combined_stddev_empirical = float(dist_combined_empirical.stddev)
util.eval_print('dist1_mean_empirical', 'dist1_mean_correct', 'dist1_stddev_empirical', 'dist1_stddev_correct', 'dist2_mean_empirical', 'dist2_mean_correct', 'dist2_stddev_empirical', 'dist2_stddev_correct', 'dist3_mean_empirical', 'dist3_mean_correct', 'dist3_stddev_empirical', 'dist3_stddev_correct', 'dist_combined_mean_empirical', 'dist_combined_mean_correct', 'dist_combined_stddev_empirical', 'dist_combined_stddev_correct')
self.assertAlmostEqual(dist1_mean_empirical, dist1_mean_correct, places=1)
self.assertAlmostEqual(dist1_stddev_empirical, dist1_stddev_correct, places=1)
self.assertAlmostEqual(dist2_mean_empirical, dist2_mean_correct, places=1)
self.assertAlmostEqual(dist2_stddev_empirical, dist2_stddev_correct, places=1)
self.assertAlmostEqual(dist3_mean_empirical, dist3_mean_correct, places=1)
self.assertAlmostEqual(dist3_stddev_empirical, dist3_stddev_correct, places=1)
self.assertAlmostEqual(dist_combined_mean_empirical, dist_combined_mean_correct, places=1)
self.assertAlmostEqual(dist_combined_stddev_empirical, dist_combined_stddev_correct, places=1)
def test_distributions_empirical_disk_combine_weighted(self):
file_name_1 = os.path.join(tempfile.mkdtemp(), str(uuid.uuid4()))
file_name_2 = os.path.join(tempfile.mkdtemp(), str(uuid.uuid4()))
file_name_3 = os.path.join(tempfile.mkdtemp(), str(uuid.uuid4()))
file_name_combined = os.path.join(tempfile.mkdtemp(), str(uuid.uuid4()))
dist1_values = [1, 2, 3]
dist1_log_weights = [1, 2, 3]
dist1_mean_correct = 2.5752103328704834
dist1_stddev_correct = 0.6514633893966675
dist2_values = [1.4, -9, 5]
dist2_log_weights = [-10, -2, -3]
dist2_mean_correct = -5.233193397521973
dist2_stddev_correct = 6.207840442657471
dist3_values = [10, 4, -1]
dist3_log_weights = [1, -2, -2.5]
dist3_mean_correct = 9.415830612182617
dist3_stddev_correct = 2.168320417404175
dist_combined_mean_correct = 3.1346240043640137
dist_combined_stddev_correct = 2.2721681594848633
dist1_empirical = Empirical(values=dist1_values, log_weights=dist1_log_weights, file_name=file_name_1)
dist1_mean_empirical = float(dist1_empirical.mean)
dist1_stddev_empirical = float(dist1_empirical.stddev)
dist1_empirical.close()
dist2_empirical = Empirical(values=dist2_values, log_weights=dist2_log_weights, file_name=file_name_2)
dist2_mean_empirical = float(dist2_empirical.mean)
dist2_stddev_empirical = float(dist2_empirical.stddev)
dist2_empirical.close()
dist3_empirical = Empirical(values=dist3_values, log_weights=dist3_log_weights, file_name=file_name_3)
dist3_mean_empirical = float(dist3_empirical.mean)
dist3_stddev_empirical = float(dist3_empirical.stddev)
dist3_empirical.close()
dist_combined_empirical = Empirical(concat_empirical_file_names=[file_name_1, file_name_2, file_name_3], file_name=file_name_combined)
dist_combined_mean_empirical = float(dist_combined_empirical.mean)
dist_combined_stddev_empirical = float(dist_combined_empirical.stddev)
util.eval_print('dist1_mean_empirical', 'dist1_mean_correct', 'dist1_stddev_empirical', 'dist1_stddev_correct', 'dist2_mean_empirical', 'dist2_mean_correct', 'dist2_stddev_empirical', 'dist2_stddev_correct', 'dist3_mean_empirical', 'dist3_mean_correct', 'dist3_stddev_empirical', 'dist3_stddev_correct', 'dist_combined_mean_empirical', 'dist_combined_mean_correct', 'dist_combined_stddev_empirical', 'dist_combined_stddev_correct')
self.assertAlmostEqual(dist1_mean_empirical, dist1_mean_correct, places=0)
self.assertAlmostEqual(dist1_stddev_empirical, dist1_stddev_correct, places=0)
self.assertAlmostEqual(dist2_mean_empirical, dist2_mean_correct, places=0)
self.assertAlmostEqual(dist2_stddev_empirical, dist2_stddev_correct, places=0)
self.assertAlmostEqual(dist3_mean_empirical, dist3_mean_correct, places=0)
self.assertAlmostEqual(dist3_stddev_empirical, dist3_stddev_correct, places=0)
self.assertAlmostEqual(dist_combined_mean_empirical, dist_combined_mean_correct, places=0)
self.assertAlmostEqual(dist_combined_stddev_empirical, dist_combined_stddev_correct, places=0)
def test_distributions_empirical_save_load(self):
file_name = os.path.join(tempfile.mkdtemp(), str(uuid.uuid4()))
values = util.to_tensor([1, 2, 3])
log_weights = util.to_tensor([1, 2, 3])
dist_mean_correct = 2.5752103328704834
dist_stddev_correct = 0.6514633893966675
dist_expectation_sin_correct = 0.3921678960323334
dist_map_sin_mean_correct = 0.3921678960323334
dist_on_file = Empirical(values, log_weights=log_weights, file_name=file_name)
dist_on_file.close()
dist = Empirical(file_name=file_name)
dist_empirical = Empirical([dist.sample() for i in range(empirical_samples)])
dist_mean = float(dist.mean)
dist_mean_empirical = float(dist_empirical.mean)
dist_stddev = float(dist.stddev)
dist_stddev_empirical = float(dist_empirical.stddev)
dist_expectation_sin = float(dist.expectation(torch.sin))
dist_map_sin_mean = float(dist.map(torch.sin).mean)
os.remove(file_name)
util.eval_print('file_name', 'dist_mean', 'dist_mean_empirical', 'dist_mean_correct', 'dist_stddev', 'dist_stddev_empirical', 'dist_stddev_correct', 'dist_expectation_sin', 'dist_expectation_sin_correct', 'dist_map_sin_mean', 'dist_map_sin_mean_correct')
self.assertAlmostEqual(dist_mean, dist_mean_correct, places=1)
self.assertAlmostEqual(dist_mean_empirical, dist_mean_correct, places=1)
self.assertAlmostEqual(dist_stddev, dist_stddev_correct, places=1)
self.assertAlmostEqual(dist_stddev_empirical, dist_stddev_correct, places=1)
self.assertAlmostEqual(dist_expectation_sin, dist_expectation_sin_correct, places=1)
self.assertAlmostEqual(dist_map_sin_mean, dist_map_sin_mean_correct, places=1)
def test_distributions_empirical_concat_mem_to_mem(self):
values_correct = [0., 1, 2, 3, 4, 5, 6, 7, 8, 9]
log_weights_correct = [-10, -15, -200, -2, -3, -22, -100, 1, 2, -0.3]
mean_correct = 7.741360664367676
stddev_correct = 0.7910336256027222
ess_correct = 1.9459790014029552
dist_correct = Empirical(values=values_correct, log_weights=log_weights_correct)
dist_correct_mean = float(dist_correct.mean)
dist_correct_stddev = float(dist_correct.stddev)
dist_correct_ess = float(dist_correct.effective_sample_size)
empiricals = []
empiricals.append(Empirical(values=values_correct[0:3], log_weights=log_weights_correct[0:3]))
empiricals.append(Empirical(values=values_correct[3:5], log_weights=log_weights_correct[3:5]))
empiricals.append(Empirical(values=values_correct[5:9], log_weights=log_weights_correct[5:9]))
empiricals.append(Empirical(values=values_correct[9:10], log_weights=log_weights_correct[9:10]))
concat_emp = Empirical(concat_empiricals=empiricals)
concat_emp_mean = float(concat_emp.mean)
concat_emp_stddev = float(concat_emp.stddev)
concat_emp_ess = float(concat_emp.effective_sample_size)
util.eval_print('values_correct', 'log_weights_correct', 'dist_correct_mean', 'concat_emp_mean', 'mean_correct', 'dist_correct_stddev', 'concat_emp_stddev', 'stddev_correct', 'dist_correct_ess', 'concat_emp_ess', 'ess_correct')
self.assertAlmostEqual(dist_correct_mean, mean_correct, places=1)
self.assertAlmostEqual(dist_correct_stddev, stddev_correct, places=1)
self.assertAlmostEqual(dist_correct_ess, ess_correct, places=1)
self.assertAlmostEqual(concat_emp_mean, mean_correct, places=1)
self.assertAlmostEqual(concat_emp_stddev, stddev_correct, places=1)
self.assertAlmostEqual(concat_emp_ess, ess_correct, places=1)
def test_distributions_empirical_concat_file_to_mem(self):
values_correct = [0., 1, 2, 3, 4, 5, 6, 7, 8, 9]
log_weights_correct = [-10, -15, -200, -2, -3, -22, -100, 1, 2, -0.3]
mean_correct = 7.741360664367676
stddev_correct = 0.7910336256027222
ess_correct = 1.9459790014029552
dist_correct = Empirical(values=values_correct, log_weights=log_weights_correct)
dist_correct_mean = float(dist_correct.mean)
dist_correct_stddev = float(dist_correct.stddev)
dist_correct_ess = float(dist_correct.effective_sample_size)
file_names = [os.path.join(tempfile.mkdtemp(), str(uuid.uuid4())) for i in range(0, 4)]
empiricals = []
empiricals.append(Empirical(values=values_correct[0:3], log_weights=log_weights_correct[0:3], file_name=file_names[0]))
empiricals.append(Empirical(values=values_correct[3:5], log_weights=log_weights_correct[3:5], file_name=file_names[1]))
empiricals.append(Empirical(values=values_correct[5:9], log_weights=log_weights_correct[5:9], file_name=file_names[2]))
empiricals.append(Empirical(values=values_correct[9:10], log_weights=log_weights_correct[9:10], file_name=file_names[3]))
[emp.close() for emp in empiricals]
concat_emp = Empirical(concat_empirical_file_names=file_names)
concat_emp_mean = float(concat_emp.mean)
concat_emp_stddev = float(concat_emp.stddev)
concat_emp_ess = float(concat_emp.effective_sample_size)
[os.remove(file_name) for file_name in file_names]
util.eval_print('file_names', 'values_correct', 'log_weights_correct', 'dist_correct_mean', 'concat_emp_mean', 'mean_correct', 'dist_correct_stddev', 'concat_emp_stddev', 'stddev_correct', 'dist_correct_ess', 'concat_emp_ess', 'ess_correct')
self.assertAlmostEqual(dist_correct_mean, mean_correct, places=1)
self.assertAlmostEqual(dist_correct_stddev, stddev_correct, places=1)
self.assertAlmostEqual(dist_correct_ess, ess_correct, places=1)
self.assertAlmostEqual(concat_emp_mean, mean_correct, places=1)
self.assertAlmostEqual(concat_emp_stddev, stddev_correct, places=1)
self.assertAlmostEqual(concat_emp_ess, ess_correct, places=1)
def test_distributions_empirical_concat_file_to_file(self):
values_correct = [0., 1, 2, 3, 4, 5, 6, 7, 8, 9]
log_weights_correct = [-10, -15, -200, -2, -3, -22, -100, 1, 2, -0.3]
mean_correct = 7.741360664367676
stddev_correct = 0.7910336256027222
ess_correct = 1.9459790014029552
dist_correct = Empirical(values=values_correct, log_weights=log_weights_correct)
dist_correct_mean = float(dist_correct.mean)
dist_correct_stddev = float(dist_correct.stddev)
dist_correct_ess = float(dist_correct.effective_sample_size)
file_names = [os.path.join(tempfile.mkdtemp(), str(uuid.uuid4())) for i in range(0, 4)]
empiricals = []
empiricals.append(Empirical(values=values_correct[0:3], log_weights=log_weights_correct[0:3], file_name=file_names[0]))
empiricals.append(Empirical(values=values_correct[3:5], log_weights=log_weights_correct[3:5], file_name=file_names[1]))
empiricals.append(Empirical(values=values_correct[5:9], log_weights=log_weights_correct[5:9], file_name=file_names[2]))
empiricals.append(Empirical(values=values_correct[9:10], log_weights=log_weights_correct[9:10], file_name=file_names[3]))
[emp.close() for emp in empiricals]
concat_file_name = os.path.join(tempfile.mkdtemp(), str(uuid.uuid4()))
concat_emp = Empirical(concat_empirical_file_names=file_names, file_name=concat_file_name)
concat_emp.close()
concat_emp2 = Empirical(file_name=concat_file_name)
concat_emp_mean = float(concat_emp2.mean)
concat_emp_stddev = float(concat_emp2.stddev)
concat_emp_ess = float(concat_emp2.effective_sample_size)
[os.remove(file_name) for file_name in file_names]
os.remove(concat_file_name)
util.eval_print('file_names', 'concat_file_name', 'values_correct', 'log_weights_correct', 'dist_correct_mean', 'concat_emp_mean', 'mean_correct', 'dist_correct_stddev', 'concat_emp_stddev', 'stddev_correct', 'dist_correct_ess', 'concat_emp_ess', 'ess_correct')
self.assertAlmostEqual(dist_correct_mean, mean_correct, places=1)
self.assertAlmostEqual(dist_correct_stddev, stddev_correct, places=1)
self.assertAlmostEqual(dist_correct_ess, ess_correct, places=1)
self.assertAlmostEqual(concat_emp_mean, mean_correct, places=1)
self.assertAlmostEqual(concat_emp_stddev, stddev_correct, places=1)
self.assertAlmostEqual(concat_emp_ess, ess_correct, places=1)
def test_distributions_normal(self):
dist_batch_shape_correct = torch.Size()
dist_event_shape_correct = torch.Size()
dist_sample_shape_correct = torch.Size()
dist_log_prob_shape_correct = torch.Size()
dist_means_correct = 0
dist_stddevs_correct = 1
dist_log_probs_correct = -0.918939
dist = Normal(dist_means_correct, dist_stddevs_correct)
dist_empirical = Empirical([dist.sample() for i in range(empirical_samples)])
dist_means = util.to_numpy(dist.mean)
dist_means_empirical = util.to_numpy(dist_empirical.mean)
dist_stddevs = util.to_numpy(dist.stddev)
dist_stddevs_empirical = util.to_numpy(dist_empirical.stddev)
dist_log_probs = util.to_numpy(dist.log_prob(dist_means_correct))
dist_batch_shape = dist.batch_shape
dist_event_shape = dist.event_shape
dist_sample_shape = dist.sample().size()
dist_log_prob_shape = dist.log_prob(dist_means_correct).size()
util.eval_print('dist_batch_shape', 'dist_batch_shape_correct', 'dist_event_shape', 'dist_event_shape_correct', 'dist_sample_shape', 'dist_sample_shape_correct', 'dist_log_prob_shape', 'dist_log_prob_shape_correct', 'dist_means', 'dist_means_empirical', 'dist_means_correct', 'dist_stddevs', 'dist_stddevs_empirical', 'dist_stddevs_correct', 'dist_log_probs', 'dist_log_probs_correct')
self.assertEqual(dist_batch_shape, dist_batch_shape_correct)
self.assertEqual(dist_event_shape, dist_event_shape_correct)
self.assertEqual(dist_sample_shape, dist_sample_shape_correct)
self.assertEqual(dist_log_prob_shape, dist_log_prob_shape_correct)
self.assertTrue(np.allclose(dist_means, dist_means_correct, atol=0.1))
self.assertTrue(np.allclose(dist_means_empirical, dist_means_correct, atol=0.1))
self.assertTrue(np.allclose(dist_stddevs, dist_stddevs_correct, atol=0.1))
self.assertTrue(np.allclose(dist_stddevs_empirical, dist_stddevs_correct, atol=0.1))
self.assertTrue(np.allclose(dist_log_probs, dist_log_probs_correct, atol=0.1))
def test_distributions_normal_batched_2(self):
dist_batch_shape_correct = torch.Size([2])
dist_event_shape_correct = torch.Size()
dist_sample_shape_correct = torch.Size([2])
dist_log_prob_shape_correct = torch.Size([2])
dist_means_correct = [0, 2]
dist_stddevs_correct = [1, 3]
dist_log_probs_correct = [-0.918939, -2.01755]
dist = Normal(dist_means_correct, dist_stddevs_correct)
dist_batch_shape = dist.batch_shape
dist_event_shape = dist.event_shape
dist_sample_shape = dist.sample().size()
dist_empirical = Empirical([dist.sample() for i in range(empirical_samples)])
dist_means = util.to_numpy(dist.mean)
dist_means_empirical = util.to_numpy(dist_empirical.mean)
dist_stddevs = util.to_numpy(dist.stddev)
dist_stddevs_empirical = util.to_numpy(dist_empirical.stddev)
dist_log_probs = util.to_numpy(dist.log_prob(dist_means_correct))
dist_log_prob_shape = dist.log_prob(dist_means_correct).size()
util.eval_print('dist_batch_shape', 'dist_batch_shape_correct', 'dist_event_shape', 'dist_event_shape_correct', 'dist_sample_shape', 'dist_sample_shape_correct', 'dist_log_prob_shape', 'dist_log_prob_shape_correct', 'dist_means', 'dist_means_empirical', 'dist_means_correct', 'dist_stddevs', 'dist_stddevs_empirical', 'dist_stddevs_correct', 'dist_log_probs', 'dist_log_probs_correct')
self.assertEqual(dist_batch_shape, dist_batch_shape_correct)
self.assertEqual(dist_event_shape, dist_event_shape_correct)
self.assertEqual(dist_sample_shape, dist_sample_shape_correct)
self.assertEqual(dist_log_prob_shape, dist_log_prob_shape_correct)
self.assertTrue(np.allclose(dist_means, dist_means_correct, atol=0.1))
self.assertTrue(np.allclose(dist_means_empirical, dist_means_correct, atol=0.1))
self.assertTrue(np.allclose(dist_stddevs, dist_stddevs_correct, atol=0.1))
self.assertTrue(np.allclose(dist_stddevs_empirical, dist_stddevs_correct, atol=0.1))
self.assertTrue(np.allclose(dist_log_probs, dist_log_probs_correct, atol=0.1))
def test_distributions_normal_batched_2_1(self):
dist_batch_shape_correct = torch.Size([2, 1])
dist_event_shape_correct = torch.Size()
dist_sample_shape_correct = torch.Size([2, 1])
dist_log_prob_shape_correct = torch.Size([2, 1])
dist_means_correct = [[0], [2]]
dist_stddevs_correct = [[1], [3]]
dist_log_probs_correct = [[-0.918939], [-2.01755]]
dist = Normal(dist_means_correct, dist_stddevs_correct)
dist_batch_shape = dist.batch_shape
dist_event_shape = dist.event_shape
dist_sample_shape = dist.sample().size()
dist_empirical = Empirical([dist.sample() for i in range(empirical_samples)])
dist_means = util.to_numpy(dist.mean)
dist_means_empirical = util.to_numpy(dist_empirical.mean)
dist_stddevs = util.to_numpy(dist.stddev)
dist_stddevs_empirical = util.to_numpy(dist_empirical.stddev)
dist_log_probs = util.to_numpy(dist.log_prob(dist_means_correct))
dist_log_prob_shape = dist.log_prob(dist_means_correct).size()
util.eval_print('dist_batch_shape', 'dist_batch_shape_correct', 'dist_event_shape', 'dist_event_shape_correct', 'dist_sample_shape', 'dist_sample_shape_correct', 'dist_log_prob_shape', 'dist_log_prob_shape_correct', 'dist_means', 'dist_means_empirical', 'dist_means_correct', 'dist_stddevs', 'dist_stddevs_empirical', 'dist_stddevs_correct', 'dist_log_probs', 'dist_log_probs_correct')
self.assertEqual(dist_batch_shape, dist_batch_shape_correct)
self.assertEqual(dist_event_shape, dist_event_shape_correct)
self.assertEqual(dist_sample_shape, dist_sample_shape_correct)
self.assertEqual(dist_log_prob_shape, dist_log_prob_shape_correct)
self.assertTrue(np.allclose(dist_means, dist_means_correct, atol=0.1))
self.assertTrue(np.allclose(dist_means_empirical, dist_means_correct, atol=0.1))
self.assertTrue(np.allclose(dist_stddevs, dist_stddevs_correct, atol=0.1))
self.assertTrue(np.allclose(dist_stddevs_empirical, dist_stddevs_correct, atol=0.1))
self.assertTrue(np.allclose(dist_log_probs, dist_log_probs_correct, atol=0.1))
def test_distributions_normal_batched_2_3(self):
dist_batch_shape_correct = torch.Size([2, 3])
dist_event_shape_correct = torch.Size()
dist_sample_shape_correct = torch.Size([2, 3])
dist_log_prob_shape_correct = torch.Size([2, 3])
dist_means_correct = [[0, 2, 0], [2, 0, 2]]
dist_stddevs_correct = [[1, 3, 1], [3, 1, 3]]
dist_log_probs_correct = [[-0.918939, -2.01755, -0.918939], [-2.01755, -0.918939, -2.01755]]
dist = Normal(dist_means_correct, dist_stddevs_correct)
dist_batch_shape = dist.batch_shape
dist_event_shape = dist.event_shape
dist_sample_shape = dist.sample().size()
dist_empirical = Empirical([dist.sample() for i in range(empirical_samples)])
dist_means = util.to_numpy(dist.mean)
dist_means_empirical = util.to_numpy(dist_empirical.mean)
dist_stddevs = util.to_numpy(dist.stddev)
dist_stddevs_empirical = util.to_numpy(dist_empirical.stddev)
dist_log_probs = util.to_numpy(dist.log_prob(dist_means_correct))
dist_log_prob_shape = dist.log_prob(dist_means_correct).size()
util.eval_print('dist_batch_shape', 'dist_batch_shape_correct', 'dist_event_shape', 'dist_event_shape_correct', 'dist_sample_shape', 'dist_sample_shape_correct', 'dist_log_prob_shape', 'dist_log_prob_shape_correct', 'dist_means', 'dist_means_empirical', 'dist_means_correct', 'dist_stddevs', 'dist_stddevs_empirical', 'dist_stddevs_correct', 'dist_log_probs', 'dist_log_probs_correct')
self.assertEqual(dist_batch_shape, dist_batch_shape_correct)
self.assertEqual(dist_event_shape, dist_event_shape_correct)
self.assertEqual(dist_sample_shape, dist_sample_shape_correct)
self.assertEqual(dist_log_prob_shape, dist_log_prob_shape_correct)
self.assertTrue(np.allclose(dist_means, dist_means_correct, atol=0.1))
self.assertTrue(np.allclose(dist_means_empirical, dist_means_correct, atol=0.1))
self.assertTrue(np.allclose(dist_stddevs, dist_stddevs_correct, atol=0.1))
self.assertTrue(np.allclose(dist_stddevs_empirical, dist_stddevs_correct, atol=0.1))
self.assertTrue(np.allclose(dist_log_probs, dist_log_probs_correct, atol=0.1))
def test_distributions_truncated_normal(self):
dist_batch_shape_correct = torch.Size()
dist_event_shape_correct = torch.Size()
dist_sample_shape_correct = torch.Size()
dist_log_prob_shape_correct = torch.Size()
dist_means_non_truncated_correct = 2
dist_stddevs_non_truncated_correct = 3
dist_means_correct = 0.901189
dist_stddevs_correct = 1.95118
dist_lows_correct = -4
dist_highs_correct = 4
dist_log_probs_correct = -1.69563
dist = TruncatedNormal(dist_means_non_truncated_correct, dist_stddevs_non_truncated_correct, dist_lows_correct, dist_highs_correct)
dist_empirical = Empirical([dist.sample() for i in range(empirical_samples)])
dist_means = util.to_numpy(dist.mean)
dist_means_empirical = util.to_numpy(dist_empirical.mean)
dist_stddevs = util.to_numpy(dist.stddev)
dist_stddevs_empirical = util.to_numpy(dist_empirical.stddev)
dist_log_probs = util.to_numpy(dist.log_prob(dist_means_non_truncated_correct))
dist_batch_shape = dist.batch_shape
dist_event_shape = dist.event_shape
dist_sample_shape = dist.sample().size()
dist_log_prob_shape = dist.log_prob(dist_means_non_truncated_correct).size()
util.eval_print('dist_batch_shape', 'dist_batch_shape_correct', 'dist_event_shape', 'dist_event_shape_correct', 'dist_sample_shape', 'dist_sample_shape_correct', 'dist_log_prob_shape', 'dist_log_prob_shape_correct', 'dist_means', 'dist_means_empirical', 'dist_means_correct', 'dist_stddevs', 'dist_stddevs_empirical', 'dist_stddevs_correct', 'dist_log_probs', 'dist_log_probs_correct')
self.assertEqual(dist_batch_shape, dist_batch_shape_correct)
self.assertEqual(dist_event_shape, dist_event_shape_correct)
self.assertEqual(dist_sample_shape, dist_sample_shape_correct)
self.assertEqual(dist_log_prob_shape, dist_log_prob_shape_correct)
self.assertTrue(np.allclose(dist_means, dist_means_correct, atol=0.1))
self.assertTrue(np.allclose(dist_means_empirical, dist_means_correct, atol=0.1))
self.assertTrue(np.allclose(dist_stddevs, dist_stddevs_correct, atol=0.1))
self.assertTrue(np.allclose(dist_stddevs_empirical, dist_stddevs_correct, atol=0.1))
self.assertTrue(np.allclose(dist_log_probs, dist_log_probs_correct, atol=0.1))
def test_distributions_truncated_normal_batched_2(self):
dist_batch_shape_correct = torch.Size([2])
dist_event_shape_correct = torch.Size()
dist_sample_shape_correct = torch.Size([2])
dist_log_prob_shape_correct = torch.Size([2])
dist_means_non_truncated_correct = [0, 2]
dist_stddevs_non_truncated_correct = [1, 3]
dist_means_correct = [0, 0.901189]
dist_stddevs_correct = [0.53956, 1.95118]
dist_lows_correct = [-1, -4]
dist_highs_correct = [1, 4]
dist_log_probs_correct = [-0.537223, -1.69563]
dist = TruncatedNormal(dist_means_non_truncated_correct, dist_stddevs_non_truncated_correct, dist_lows_correct, dist_highs_correct)
dist_empirical = Empirical([dist.sample() for i in range(empirical_samples)])
dist_means = util.to_numpy(dist.mean)
dist_means_empirical = util.to_numpy(dist_empirical.mean)
dist_stddevs = util.to_numpy(dist.stddev)
dist_stddevs_empirical = util.to_numpy(dist_empirical.stddev)
dist_log_probs = util.to_numpy(dist.log_prob(dist_means_non_truncated_correct))
dist_batch_shape = dist.batch_shape
dist_event_shape = dist.event_shape
dist_sample_shape = dist.sample().size()
dist_log_prob_shape = dist.log_prob(dist_means_non_truncated_correct).size()
util.eval_print('dist_batch_shape', 'dist_batch_shape_correct', 'dist_event_shape', 'dist_event_shape_correct', 'dist_sample_shape', 'dist_sample_shape_correct', 'dist_log_prob_shape', 'dist_log_prob_shape_correct', 'dist_means', 'dist_means_empirical', 'dist_means_correct', 'dist_stddevs', 'dist_stddevs_empirical', 'dist_stddevs_correct', 'dist_log_probs', 'dist_log_probs_correct')
self.assertEqual(dist_batch_shape, dist_batch_shape_correct)
self.assertEqual(dist_event_shape, dist_event_shape_correct)
self.assertEqual(dist_sample_shape, dist_sample_shape_correct)
self.assertEqual(dist_log_prob_shape, dist_log_prob_shape_correct)
self.assertTrue(np.allclose(dist_means, dist_means_correct, atol=0.1))
self.assertTrue(np.allclose(dist_means_empirical, dist_means_correct, atol=0.1))
self.assertTrue(np.allclose(dist_stddevs, dist_stddevs_correct, atol=0.1))
self.assertTrue(np.allclose(dist_stddevs_empirical, dist_stddevs_correct, atol=0.1))
self.assertTrue(np.allclose(dist_log_probs, dist_log_probs_correct, atol=0.1))
def test_distributions_truncated_normal_batched_2_1(self):
dist_batch_shape_correct = torch.Size([2, 1])
dist_event_shape_correct = torch.Size()
dist_sample_shape_correct = torch.Size([2, 1])
dist_log_prob_shape_correct = torch.Size([2, 1])
dist_means_non_truncated_correct = [[0], [2]]
dist_stddevs_non_truncated_correct = [[1], [3]]
dist_means_correct = [[0], [0.901189]]
dist_stddevs_correct = [[0.53956], [1.95118]]
dist_lows_correct = [[-1], [-4]]
dist_highs_correct = [[1], [4]]
dist_log_probs_correct = [[-0.537223], [-1.69563]]
dist = TruncatedNormal(dist_means_non_truncated_correct, dist_stddevs_non_truncated_correct, dist_lows_correct, dist_highs_correct)
dist_empirical = Empirical([dist.sample() for i in range(empirical_samples)])
dist_means = util.to_numpy(dist.mean)
dist_means_empirical = util.to_numpy(dist_empirical.mean)
dist_stddevs = util.to_numpy(dist.stddev)
dist_stddevs_empirical = util.to_numpy(dist_empirical.stddev)
dist_log_probs = util.to_numpy(dist.log_prob(dist_means_non_truncated_correct))
dist_batch_shape = dist.batch_shape
dist_event_shape = dist.event_shape
dist_sample_shape = dist.sample().size()
dist_log_prob_shape = dist.log_prob(dist_means_non_truncated_correct).size()
util.eval_print('dist_batch_shape', 'dist_batch_shape_correct', 'dist_event_shape', 'dist_event_shape_correct', 'dist_sample_shape', 'dist_sample_shape_correct', 'dist_log_prob_shape', 'dist_log_prob_shape_correct', 'dist_means', 'dist_means_empirical', 'dist_means_correct', 'dist_stddevs', 'dist_stddevs_empirical', 'dist_stddevs_correct', 'dist_log_probs', 'dist_log_probs_correct')
self.assertEqual(dist_batch_shape, dist_batch_shape_correct)
self.assertEqual(dist_event_shape, dist_event_shape_correct)
self.assertEqual(dist_sample_shape, dist_sample_shape_correct)
self.assertEqual(dist_log_prob_shape, dist_log_prob_shape_correct)
self.assertTrue(np.allclose(dist_means, dist_means_correct, atol=0.1))
self.assertTrue(np.allclose(dist_means_empirical, dist_means_correct, atol=0.1))
self.assertTrue(np.allclose(dist_stddevs, dist_stddevs_correct, atol=0.1))
self.assertTrue(np.allclose(dist_stddevs_empirical, dist_stddevs_correct, atol=0.1))
self.assertTrue(np.allclose(dist_log_probs, dist_log_probs_correct, atol=0.1))
def test_distributions_truncated_normal_clamped_batched_2_1(self):
dist_batch_shape_correct = torch.Size([2, 1])
dist_event_shape_correct = torch.Size()
dist_sample_shape_correct = torch.Size([2, 1])
dist_log_prob_shape_correct = torch.Size([2, 1])
dist_means_non_truncated = [[0], [2]]
dist_means_non_truncated_correct = [[0.5], [1]]
dist_stddevs_non_truncated = [[1], [3]]
dist_means_correct = [[0.744836], [-0.986679]]
dist_stddevs_correct = [[0.143681], [1.32416]]
dist_lows_correct = [[0.5], [-4]]
dist_highs_correct = [[1], [1]]
dist_log_prob_arguments = [[0.75], [-3]]
dist_log_probs_correct = [[0.702875], [-2.11283]]
dist = TruncatedNormal(dist_means_non_truncated, dist_stddevs_non_truncated, dist_lows_correct, dist_highs_correct, clamp_mean_between_low_high=True)
dist_empirical = Empirical([dist.sample() for i in range(empirical_samples)])
dist_means_non_truncated = util.to_numpy(dist._mean_non_truncated)
dist_means = util.to_numpy(dist.mean)
dist_means_empirical = util.to_numpy(dist_empirical.mean)
dist_stddevs = util.to_numpy(dist.stddev)
dist_stddevs_empirical = util.to_numpy(dist_empirical.stddev)
dist_batch_shape = dist.batch_shape
dist_event_shape = dist.event_shape
dist_sample_shape = dist.sample().size()
dist_log_probs = util.to_numpy(dist.log_prob(dist_log_prob_arguments))
dist_log_prob_shape = dist.log_prob(dist_log_prob_arguments).size()
util.eval_print('dist_batch_shape', 'dist_batch_shape_correct', 'dist_event_shape', 'dist_event_shape_correct', 'dist_sample_shape', 'dist_sample_shape_correct', 'dist_means_non_truncated', 'dist_means_non_truncated_correct', 'dist_means', 'dist_means_empirical', 'dist_means_correct', 'dist_stddevs', 'dist_stddevs_empirical', 'dist_stddevs_correct', 'dist_log_probs', 'dist_log_probs_correct')
self.assertEqual(dist_batch_shape, dist_batch_shape_correct)
self.assertEqual(dist_event_shape, dist_event_shape_correct)
self.assertEqual(dist_sample_shape, dist_sample_shape_correct)
self.assertEqual(dist_log_prob_shape, dist_log_prob_shape_correct)
self.assertTrue(np.allclose(dist_means_non_truncated, dist_means_non_truncated_correct, atol=0.1))
self.assertTrue(np.allclose(dist_means, dist_means_correct, atol=0.1))
self.assertTrue(np.allclose(dist_means_empirical, dist_means_correct, atol=0.1))
self.assertTrue(np.allclose(dist_stddevs, dist_stddevs_correct, atol=0.1))
self.assertTrue(np.allclose(dist_stddevs_empirical, dist_stddevs_correct, atol=0.1))
self.assertTrue(np.allclose(dist_log_probs, dist_log_probs_correct, atol=0.1))
def test_distributions_categorical(self):
dist_batch_shape_correct = torch.Size()
dist_event_shape_correct = torch.Size()
dist_sample_shape_correct = torch.Size()
dist_log_prob_shape_correct = torch.Size()
dist_means_correct = 1.6
dist_stddevs_correct = 0.666
dist_log_probs_correct = -2.30259
dist = Categorical([0.1, 0.2, 0.7])
dist_batch_shape = dist.batch_shape
dist_event_shape = dist.event_shape
dist_sample_shape = dist.sample().size()
dist_empirical = Empirical([dist.sample().float() for i in range(empirical_samples)])
dist_means_empirical = util.to_numpy(dist_empirical.mean)
dist_stddevs_empirical = util.to_numpy(dist_empirical.stddev)
dist_log_probs = util.to_numpy(dist.log_prob(0))
dist_log_prob_shape = dist.log_prob(0).size()
util.eval_print('dist_batch_shape', 'dist_batch_shape_correct', 'dist_event_shape', 'dist_event_shape_correct', 'dist_sample_shape', 'dist_sample_shape_correct', 'dist_log_prob_shape', 'dist_log_prob_shape_correct', 'dist_means_empirical', 'dist_means_correct', 'dist_stddevs_empirical', 'dist_stddevs_correct', 'dist_log_probs', 'dist_log_probs_correct')
self.assertEqual(dist_batch_shape, dist_batch_shape_correct)
self.assertEqual(dist_event_shape, dist_event_shape_correct)
self.assertEqual(dist_sample_shape, dist_sample_shape_correct)
self.assertTrue(np.allclose(dist_means_empirical, dist_means_correct, atol=0.1))
self.assertTrue(np.allclose(dist_stddevs_empirical, dist_stddevs_correct, atol=0.1))
self.assertEqual(dist_log_prob_shape, dist_log_prob_shape_correct)
self.assertTrue(np.allclose(dist_log_probs, dist_log_probs_correct, atol=0.1))
def test_distributions_categorical_batched_2(self):
dist_batch_shape_correct = torch.Size([2])
dist_event_shape_correct = torch.Size()
dist_sample_shape_correct = torch.Size([2])
dist_log_prob_shape_correct = torch.Size([2])
dist_means_correct = [1.6, 1.1]
dist_stddevs_correct = [0.666, 0.7]
dist_log_probs_correct = [-2.30259, -0.693147]
dist = Categorical([[0.1, 0.2, 0.7],
[0.2, 0.5, 0.3]])
dist_batch_shape = dist.batch_shape
dist_event_shape = dist.event_shape
dist_sample_shape = dist.sample().size()
dist_empirical = Empirical([dist.sample().float() for i in range(empirical_samples)])
dist_means_empirical = util.to_numpy(dist_empirical.mean)
dist_stddevs_empirical = util.to_numpy(dist_empirical.stddev)
dist_log_probs = util.to_numpy(dist.log_prob([0, 1]))
dist_log_prob_shape = dist.log_prob([0, 1]).size()
util.eval_print('dist_batch_shape', 'dist_batch_shape_correct', 'dist_event_shape', 'dist_event_shape_correct', 'dist_sample_shape', 'dist_sample_shape_correct', 'dist_log_prob_shape', 'dist_log_prob_shape_correct', 'dist_means_empirical', 'dist_means_correct', 'dist_stddevs_empirical', 'dist_stddevs_correct', 'dist_log_probs', 'dist_log_probs_correct')
self.assertEqual(dist_batch_shape, dist_batch_shape_correct)
self.assertEqual(dist_event_shape, dist_event_shape_correct)
self.assertEqual(dist_sample_shape, dist_sample_shape_correct)
self.assertTrue(np.allclose(dist_means_empirical, dist_means_correct, atol=0.1))
self.assertTrue(np.allclose(dist_stddevs_empirical, dist_stddevs_correct, atol=0.1))
self.assertEqual(dist_log_prob_shape, dist_log_prob_shape_correct)
self.assertTrue(np.allclose(dist_log_probs, dist_log_probs_correct, atol=0.1))
def test_distributions_categorical_logits(self):
dist_batch_shape_correct = torch.Size()
dist_event_shape_correct = torch.Size()
dist_sample_shape_correct = torch.Size()
dist_log_prob_shape_correct = torch.Size()
dist_means_correct = 1.6
dist_stddevs_correct = 0.666
dist_log_probs_correct = -2.30259
dist = Categorical(logits=[-2.30259, -1.60944, -0.356675])
dist_batch_shape = dist.batch_shape
dist_event_shape = dist.event_shape
dist_sample_shape = dist.sample().size()
dist_empirical = Empirical([dist.sample().float() for i in range(empirical_samples)])
dist_means_empirical = util.to_numpy(dist_empirical.mean)
dist_stddevs_empirical = util.to_numpy(dist_empirical.stddev)
dist_log_probs = util.to_numpy(dist.log_prob(0))
dist_log_prob_shape = dist.log_prob(0).size()
util.eval_print('dist_batch_shape', 'dist_batch_shape_correct', 'dist_event_shape', 'dist_event_shape_correct', 'dist_sample_shape', 'dist_sample_shape_correct', 'dist_log_prob_shape', 'dist_log_prob_shape_correct', 'dist_means_empirical', 'dist_means_correct', 'dist_stddevs_empirical', 'dist_stddevs_correct', 'dist_log_probs', 'dist_log_probs_correct')
self.assertEqual(dist_batch_shape, dist_batch_shape_correct)
self.assertEqual(dist_event_shape, dist_event_shape_correct)
self.assertEqual(dist_sample_shape, dist_sample_shape_correct)
self.assertTrue(np.allclose(dist_means_empirical, dist_means_correct, atol=0.1))
self.assertTrue(np.allclose(dist_stddevs_empirical, dist_stddevs_correct, atol=0.1))
self.assertEqual(dist_log_prob_shape, dist_log_prob_shape_correct)
self.assertTrue(np.allclose(dist_log_probs, dist_log_probs_correct, atol=0.1))
def test_distributions_uniform(self):
dist_batch_shape_correct = torch.Size()
dist_event_shape_correct = torch.Size()
dist_sample_shape_correct = torch.Size()
dist_log_prob_shape_correct = torch.Size()
dist_means_correct = 0.5
dist_stddevs_correct = 0.288675
dist_lows_correct = 0
dist_highs_correct = 1
dist_log_probs_correct = 0
dist = Uniform(dist_lows_correct, dist_highs_correct)
dist_batch_shape = dist.batch_shape
dist_event_shape = dist.event_shape
dist_sample_shape = dist.sample().size()
dist_empirical = Empirical([dist.sample() for i in range(empirical_samples)])
dist_lows = util.to_numpy(dist.low)
dist_highs = util.to_numpy(dist.high)
dist_means = util.to_numpy(dist.mean)
dist_means_empirical = util.to_numpy(dist_empirical.mean)
dist_stddevs = util.to_numpy(dist.stddev)
dist_stddevs_empirical = util.to_numpy(dist_empirical.stddev)
dist_log_probs = util.to_numpy(dist.log_prob(dist_means_correct))
dist_log_prob_shape = dist.log_prob(dist_means_correct).size()
util.eval_print('dist_batch_shape', 'dist_batch_shape_correct', 'dist_event_shape', 'dist_event_shape_correct', 'dist_sample_shape', 'dist_sample_shape_correct', 'dist_log_prob_shape', 'dist_log_prob_shape_correct', 'dist_lows', 'dist_lows_correct', 'dist_highs', 'dist_highs_correct', 'dist_means', 'dist_means_empirical', 'dist_means_correct', 'dist_stddevs', 'dist_stddevs_empirical', 'dist_stddevs_correct', 'dist_log_probs', 'dist_log_probs_correct')
self.assertEqual(dist_batch_shape, dist_batch_shape_correct)
self.assertEqual(dist_event_shape, dist_event_shape_correct)
self.assertEqual(dist_sample_shape, dist_sample_shape_correct)
self.assertEqual(dist_log_prob_shape, dist_log_prob_shape_correct)
self.assertTrue(np.allclose(dist_means, dist_means_correct, atol=0.1))
self.assertTrue(np.allclose(dist_means_empirical, dist_means_correct, atol=0.1))
self.assertTrue(np.allclose(dist_stddevs, dist_stddevs_correct, atol=0.1))
self.assertTrue(np.allclose(dist_stddevs_empirical, dist_stddevs_correct, atol=0.1))
self.assertTrue(np.allclose(dist_lows, dist_lows_correct, atol=0.1))
self.assertTrue(np.allclose(dist_highs, dist_highs_correct, atol=0.1))
self.assertTrue(np.allclose(dist_log_probs, dist_log_probs_correct, atol=0.1))
def test_distributions_uniform_batched_4_1(self):
dist_batch_shape_correct = torch.Size([4, 1])
dist_event_shape_correct = torch.Size()
dist_sample_shape_correct = torch.Size([4, 1])
dist_log_prob_shape_correct = torch.Size([4, 1])
dist_means_correct = [[0.5], [7.5], [0.5], [0.5]]
dist_stddevs_correct = [[0.288675], [1.44338], [0.288675], [0.288675]]
dist_lows_correct = [[0], [5], [0], [0]]
dist_highs_correct = [[1], [10], [1], [1]]
dist_values = [[0.5], [7.5], [0], [1]]
dist_log_probs_correct = [[0], [-1.60944], [0.], [float('-inf')]]
dist = Uniform(dist_lows_correct, dist_highs_correct)
dist_batch_shape = dist.batch_shape
dist_event_shape = dist.event_shape
dist_sample_shape = dist.sample().size()
dist_empirical = Empirical([dist.sample() for i in range(empirical_samples)])
dist_lows = util.to_numpy(dist.low)
dist_highs = util.to_numpy(dist.high)
dist_means = util.to_numpy(dist.mean)
dist_means_empirical = util.to_numpy(dist_empirical.mean)
dist_stddevs = util.to_numpy(dist.stddev)
dist_stddevs_empirical = util.to_numpy(dist_empirical.stddev)
dist_log_probs = util.to_numpy(dist.log_prob(dist_values))
dist_log_prob_shape = dist.log_prob(dist_values).size()
util.eval_print('dist_batch_shape', 'dist_batch_shape_correct', 'dist_event_shape', 'dist_event_shape_correct', 'dist_sample_shape', 'dist_sample_shape_correct', 'dist_log_prob_shape', 'dist_log_prob_shape_correct', 'dist_lows', 'dist_lows_correct', 'dist_highs', 'dist_highs_correct', 'dist_means', 'dist_means_empirical', 'dist_means_correct', 'dist_stddevs', 'dist_stddevs_empirical', 'dist_stddevs_correct', 'dist_log_probs', 'dist_log_probs_correct')
self.assertEqual(dist_batch_shape, dist_batch_shape_correct)
self.assertEqual(dist_event_shape, dist_event_shape_correct)
self.assertEqual(dist_sample_shape, dist_sample_shape_correct)
self.assertEqual(dist_log_prob_shape, dist_log_prob_shape_correct)
self.assertTrue(np.allclose(dist_means, dist_means_correct, atol=0.1))
self.assertTrue(np.allclose(dist_means_empirical, dist_means_correct, atol=0.1))
self.assertTrue(np.allclose(dist_stddevs, dist_stddevs_correct, atol=0.1))
self.assertTrue(np.allclose(dist_stddevs_empirical, dist_stddevs_correct, atol=0.1))
self.assertTrue(np.allclose(dist_lows, dist_lows_correct, atol=0.1))
self.assertTrue(np.allclose(dist_highs, dist_highs_correct, atol=0.1))
self.assertTrue(np.allclose(dist_log_probs, dist_log_probs_correct, atol=0.1))
def test_distributions_poisson(self):
dist_batch_shape_correct = torch.Size()
dist_event_shape_correct = torch.Size()
dist_sample_shape_correct = torch.Size()
dist_log_prob_shape_correct = torch.Size()
dist_means_correct = 4
dist_stddevs_correct = math.sqrt(4)
dist_rates_correct = 4
dist_log_probs_correct = -1.63288
dist = Poisson(dist_rates_correct)
dist_batch_shape = dist.batch_shape
dist_event_shape = dist.event_shape
dist_sample_shape = dist.sample().size()
dist_empirical = Empirical([dist.sample() for i in range(empirical_samples)])
dist_rates = util.to_numpy(dist.rate)
dist_means = util.to_numpy(dist.mean)
dist_means_empirical = util.to_numpy(dist_empirical.mean)
dist_stddevs = util.to_numpy(dist.stddev)
dist_stddevs_empirical = util.to_numpy(dist_empirical.stddev)
dist_log_probs = util.to_numpy(dist.log_prob(dist_means_correct))
dist_log_prob_shape = dist.log_prob(dist_means_correct).size()
util.eval_print('dist_batch_shape', 'dist_batch_shape_correct', 'dist_event_shape', 'dist_event_shape_correct', 'dist_sample_shape', 'dist_sample_shape_correct', 'dist_log_prob_shape', 'dist_log_prob_shape_correct', 'dist_rates', 'dist_rates_correct', 'dist_means', 'dist_means_empirical', 'dist_means_correct', 'dist_stddevs', 'dist_stddevs_empirical', 'dist_stddevs_correct', 'dist_log_probs', 'dist_log_probs_correct')
self.assertEqual(dist_batch_shape, dist_batch_shape_correct)
self.assertEqual(dist_event_shape, dist_event_shape_correct)
self.assertEqual(dist_sample_shape, dist_sample_shape_correct)
self.assertEqual(dist_log_prob_shape, dist_log_prob_shape_correct)
self.assertTrue(np.allclose(dist_means, dist_means_correct, atol=0.1))
self.assertTrue(np.allclose(dist_means_empirical, dist_means_correct, atol=0.25))
self.assertTrue(np.allclose(dist_stddevs, dist_stddevs_correct, atol=0.1))
self.assertTrue(np.allclose(dist_stddevs_empirical, dist_stddevs_correct, atol=0.1))
self.assertTrue(np.allclose(dist_rates, dist_rates_correct, atol=0.1))
self.assertTrue(np.allclose(dist_log_probs, dist_log_probs_correct, atol=0.1))
def test_distributions_poisson_batched_2_1(self):
dist_batch_shape_correct = torch.Size([2, 1])
dist_event_shape_correct = torch.Size()
dist_sample_shape_correct = torch.Size([2, 1])
dist_log_prob_shape_correct = torch.Size([2, 1])
dist_means_correct = [[4], [100]]
dist_stddevs_correct = [[math.sqrt(4)], [math.sqrt(100)]]
dist_rates_correct = [[4], [100]]
dist_log_probs_correct = [[-1.63288], [-3.22236]]
dist = Poisson(dist_rates_correct)
dist_batch_shape = dist.batch_shape
dist_event_shape = dist.event_shape
dist_sample_shape = dist.sample().size()
dist_empirical = Empirical([dist.sample() for i in range(empirical_samples)])
dist_rates = util.to_numpy(dist.rate)
dist_means = util.to_numpy(dist.mean)
dist_means_empirical = util.to_numpy(dist_empirical.mean)
dist_stddevs = util.to_numpy(dist.stddev)
dist_stddevs_empirical = util.to_numpy(dist_empirical.stddev)
dist_log_probs = util.to_numpy(dist.log_prob(dist_means_correct))
dist_log_prob_shape = dist.log_prob(dist_means_correct).size()
util.eval_print('dist_batch_shape', 'dist_batch_shape_correct', 'dist_event_shape', 'dist_event_shape_correct', 'dist_sample_shape', 'dist_sample_shape_correct', 'dist_log_prob_shape', 'dist_log_prob_shape_correct', 'dist_rates', 'dist_rates_correct', 'dist_means', 'dist_means_empirical', 'dist_means_correct', 'dist_stddevs', 'dist_stddevs_empirical', 'dist_stddevs_correct', 'dist_log_probs', 'dist_log_probs_correct')
self.assertEqual(dist_batch_shape, dist_batch_shape_correct)
self.assertEqual(dist_event_shape, dist_event_shape_correct)
self.assertEqual(dist_sample_shape, dist_sample_shape_correct)
self.assertEqual(dist_log_prob_shape, dist_log_prob_shape_correct)
self.assertTrue(np.allclose(dist_means, dist_means_correct, atol=0.1))
self.assertTrue(np.allclose(dist_means_empirical, dist_means_correct, atol=0.25))
self.assertTrue(np.allclose(dist_stddevs, dist_stddevs_correct, atol=0.1))
self.assertTrue(np.allclose(dist_stddevs_empirical, dist_stddevs_correct, atol=0.25))
self.assertTrue(np.allclose(dist_rates, dist_rates_correct, atol=0.1))
self.assertTrue(np.allclose(dist_log_probs, dist_log_probs_correct, atol=0.1))
def test_distributions_poisson_batched_1_3(self):
dist_batch_shape_correct = torch.Size([1, 3])
dist_event_shape_correct = torch.Size()
dist_sample_shape_correct = torch.Size([1, 3])
dist_log_prob_shape_correct = torch.Size([1, 3])
dist_means_correct = [[1, 2, 15]]
dist_stddevs_correct = [[math.sqrt(1), math.sqrt(2), math.sqrt(15)]]
dist_rates_correct = [[1, 2, 15]]
dist_log_probs_correct = [[-1, -1.30685, -2.27852]]
dist = Poisson(dist_rates_correct)
dist_batch_shape = dist.batch_shape
dist_event_shape = dist.event_shape
dist_sample_shape = dist.sample().size()
dist_empirical = Empirical([dist.sample() for i in range(empirical_samples)])
dist_rates = util.to_numpy(dist.rate)
dist_means = util.to_numpy(dist.mean)
dist_means_empirical = util.to_numpy(dist_empirical.mean)
dist_stddevs = util.to_numpy(dist.stddev)
dist_stddevs_empirical = util.to_numpy(dist_empirical.stddev)
dist_log_probs = util.to_numpy(dist.log_prob(dist_means_correct))
dist_log_prob_shape = dist.log_prob(dist_means_correct).size()
util.eval_print('dist_batch_shape', 'dist_batch_shape_correct', 'dist_event_shape', 'dist_event_shape_correct', 'dist_sample_shape', 'dist_sample_shape_correct', 'dist_log_prob_shape', 'dist_log_prob_shape_correct', 'dist_rates', 'dist_rates_correct', 'dist_means', 'dist_means_empirical', 'dist_means_correct', 'dist_stddevs', 'dist_stddevs_empirical', 'dist_stddevs_correct', 'dist_log_probs', 'dist_log_probs_correct')
self.assertEqual(dist_batch_shape, dist_batch_shape_correct)
self.assertEqual(dist_event_shape, dist_event_shape_correct)
self.assertEqual(dist_sample_shape, dist_sample_shape_correct)
self.assertEqual(dist_log_prob_shape, dist_log_prob_shape_correct)
self.assertTrue(np.allclose(dist_means, dist_means_correct, atol=0.1))
self.assertTrue(np.allclose(dist_means_empirical, dist_means_correct, atol=0.25))
self.assertTrue(np.allclose(dist_stddevs, dist_stddevs_correct, atol=0.1))
self.assertTrue(np.allclose(dist_stddevs_empirical, dist_stddevs_correct, atol=0.1))
self.assertTrue(np.allclose(dist_rates, dist_rates_correct, atol=0.1))
self.assertTrue(np.allclose(dist_log_probs, dist_log_probs_correct, atol=0.1))
def test_distributions_beta(self):
dist_batch_shape_correct = torch.Size()
dist_event_shape_correct = torch.Size()
dist_sample_shape_correct = torch.Size()
dist_log_prob_shape_correct = torch.Size()
dist_concentration1s_correct = 2
dist_concentration0s_correct = 5
dist_means_correct = 0.285714
dist_stddevs_correct = 0.159719
dist_log_probs_correct = 0.802545
dist = Beta(dist_concentration1s_correct, dist_concentration0s_correct)
dist_batch_shape = dist.batch_shape
dist_event_shape = dist.event_shape
dist_sample_shape = dist.sample().size()
dist_empirical = Empirical([dist.sample() for i in range(empirical_samples)])
dist_means = util.to_numpy(dist.mean)
dist_means_empirical = util.to_numpy(dist_empirical.mean)
dist_stddevs = util.to_numpy(dist.stddev)
dist_stddevs_empirical = util.to_numpy(dist_empirical.stddev)
dist_log_probs = util.to_numpy(dist.log_prob(dist_means_correct))
dist_log_prob_shape = dist.log_prob(dist_means_correct).size()
util.eval_print('dist_batch_shape', 'dist_batch_shape_correct', 'dist_event_shape', 'dist_event_shape_correct', 'dist_sample_shape', 'dist_sample_shape_correct', 'dist_log_prob_shape', 'dist_log_prob_shape_correct', 'dist_means', 'dist_means_empirical', 'dist_means_correct', 'dist_stddevs', 'dist_stddevs_empirical', 'dist_stddevs_correct', 'dist_log_probs', 'dist_log_probs_correct')
self.assertEqual(dist_batch_shape, dist_batch_shape_correct)
self.assertEqual(dist_event_shape, dist_event_shape_correct)
self.assertEqual(dist_sample_shape, dist_sample_shape_correct)
self.assertEqual(dist_log_prob_shape, dist_log_prob_shape_correct)
self.assertTrue(np.allclose(dist_means, dist_means_correct, atol=0.1))
self.assertTrue(np.allclose(dist_means_empirical, dist_means_correct, atol=0.1))
self.assertTrue(np.allclose(dist_stddevs, dist_stddevs_correct, atol=0.1))
self.assertTrue(np.allclose(dist_stddevs_empirical, dist_stddevs_correct, atol=0.1))
self.assertTrue(np.allclose(dist_log_probs, dist_log_probs_correct, atol=0.1))
def test_distributions_beta_batched_4_1(self):
dist_batch_shape_correct = torch.Size([4, 1])
dist_event_shape_correct = torch.Size()
dist_sample_shape_correct = torch.Size([4, 1])
dist_log_prob_shape_correct = torch.Size([4, 1])
dist_concentration1s_correct = [[0.5], [7.5], [7.5], [7.5]]
dist_concentration0s_correct = [[0.75], [2.5], [2.5], [2.5]]
dist_means_correct = [[0.4], [0.75], [0.75], [0.75]]
dist_stddevs_correct = [[0.326599], [0.130558], [0.130558], [0.130558]]
dist_values = [[0.415584], [0.807999], [0.], [1.]]
dist_log_probs_correct = [[-0.300597], [1.12163], [float('-inf')], [float('-inf')]]
dist = Beta(dist_concentration1s_correct, dist_concentration0s_correct)
dist_batch_shape = dist.batch_shape
dist_event_shape = dist.event_shape
dist_sample_shape = dist.sample().size()
dist_empirical = Empirical([dist.sample() for i in range(empirical_samples)])
dist_means = util.to_numpy(dist.mean)
dist_means_empirical = util.to_numpy(dist_empirical.mean)
dist_stddevs = util.to_numpy(dist.stddev)
dist_stddevs_empirical = util.to_numpy(dist_empirical.stddev)
dist_log_probs = util.to_numpy(dist.log_prob(dist_values))
dist_log_prob_shape = dist.log_prob(dist_means_correct).size()
util.eval_print('dist_batch_shape', 'dist_batch_shape_correct', 'dist_event_shape', 'dist_event_shape_correct', 'dist_sample_shape', 'dist_sample_shape_correct', 'dist_log_prob_shape', 'dist_log_prob_shape_correct', 'dist_means', 'dist_means_empirical', 'dist_means_correct', 'dist_stddevs', 'dist_stddevs_empirical', 'dist_stddevs_correct', 'dist_log_probs', 'dist_log_probs_correct')
self.assertEqual(dist_batch_shape, dist_batch_shape_correct)
self.assertEqual(dist_event_shape, dist_event_shape_correct)
self.assertEqual(dist_sample_shape, dist_sample_shape_correct)
self.assertEqual(dist_log_prob_shape, dist_log_prob_shape_correct)
self.assertTrue(np.allclose(dist_means, dist_means_correct, atol=0.1))
self.assertTrue(np.allclose(dist_means_empirical, dist_means_correct, atol=0.1))
self.assertTrue(np.allclose(dist_stddevs, dist_stddevs_correct, atol=0.1))
self.assertTrue(np.allclose(dist_stddevs_empirical, dist_stddevs_correct, atol=0.1))
self.assertTrue(np.allclose(dist_log_probs, dist_log_probs_correct, atol=0.1))
def test_distributions_beta_low_high(self):
dist_batch_shape_correct = torch.Size()
dist_event_shape_correct = torch.Size()
dist_sample_shape_correct = torch.Size()
dist_log_prob_shape_correct = torch.Size()
dist_concentration1s_correct = 2
dist_concentration0s_correct = 3
dist_lows_correct = -2
dist_highs_correct = 5
dist_means_correct = 0.8
dist_stddevs_correct = 1.4
dist_log_probs_correct = 0.546965
dist = Beta(dist_concentration1s_correct, dist_concentration0s_correct, low=dist_lows_correct, high=dist_highs_correct)
dist_batch_shape = dist.batch_shape
dist_event_shape = dist.event_shape
dist_sample_shape = dist.sample().size()
dist_empirical = Empirical([dist.sample() for i in range(empirical_samples)])
dist_means = util.to_numpy(dist.mean)
dist_means_empirical = util.to_numpy(dist_empirical.mean)
dist_stddevs = util.to_numpy(dist.stddev)
dist_stddevs_empirical = util.to_numpy(dist_empirical.stddev)
dist_lows = util.to_numpy(dist.low)
dist_lows_empirical = util.to_numpy(dist_empirical.min)
dist_highs = util.to_numpy(dist.high)
dist_highs_empirical = util.to_numpy(dist_empirical.max)
dist_log_probs = util.to_numpy(dist.log_prob(dist_means_correct))
dist_log_prob_shape = dist.log_prob(dist_means_correct).size()
util.eval_print('dist_batch_shape', 'dist_batch_shape_correct', 'dist_event_shape', 'dist_event_shape_correct', 'dist_sample_shape', 'dist_sample_shape_correct', 'dist_log_prob_shape', 'dist_log_prob_shape_correct', 'dist_means', 'dist_means_empirical', 'dist_means_correct', 'dist_stddevs', 'dist_stddevs_empirical', 'dist_stddevs_correct', 'dist_lows', 'dist_lows_empirical', 'dist_lows_correct', 'dist_highs', 'dist_highs_empirical', 'dist_highs_correct', 'dist_log_probs', 'dist_log_probs_correct')
self.assertEqual(dist_batch_shape, dist_batch_shape_correct)
self.assertEqual(dist_event_shape, dist_event_shape_correct)
self.assertEqual(dist_sample_shape, dist_sample_shape_correct)
self.assertEqual(dist_log_prob_shape, dist_log_prob_shape_correct)
self.assertTrue(np.allclose(dist_means, dist_means_correct, atol=0.1))
self.assertTrue(np.allclose(dist_means_empirical, dist_means_correct, atol=0.1))
self.assertTrue(np.allclose(dist_stddevs, dist_stddevs_correct, atol=0.1))
self.assertTrue(np.allclose(dist_stddevs_empirical, dist_stddevs_correct, atol=0.1))
self.assertTrue(np.allclose(dist_lows, dist_lows_correct, atol=0.1))
self.assertTrue(np.allclose(dist_lows_empirical, dist_lows_correct, atol=0.1))
self.assertTrue(np.allclose(dist_highs, dist_highs_correct, atol=0.1))
self.assertTrue(np.allclose(dist_highs_empirical, dist_highs_correct, atol=0.33))
self.assertTrue(np.allclose(dist_log_probs, dist_log_probs_correct, atol=0.1))
def test_distributions_beta_low_high_batched_2(self):
dist_batch_shape_correct = torch.Size([2])
dist_event_shape_correct = torch.Size()
dist_sample_shape_correct = torch.Size([2])
dist_log_prob_shape_correct = torch.Size([2])
dist_concentration1s_correct = [2, 2]
dist_concentration0s_correct = [3, 3]
dist_lows_correct = [-2, 3]
dist_highs_correct = [5, 4]
dist_means_correct = [0.8, 3.4]
dist_stddevs_correct = [1.4, 0.2]
dist_log_probs_correct = [0.546965, 0.546965]
dist = Beta(dist_concentration1s_correct, dist_concentration0s_correct, low=dist_lows_correct, high=dist_highs_correct)
dist_batch_shape = dist.batch_shape
dist_event_shape = dist.event_shape
dist_sample_shape = dist.sample().size()
dist_empirical = Empirical([dist.sample() for i in range(empirical_samples)])
dist_means = util.to_numpy(dist.mean)
dist_means_empirical = util.to_numpy(dist_empirical.mean)
dist_stddevs = util.to_numpy(dist.stddev)
dist_stddevs_empirical = util.to_numpy(dist_empirical.stddev)
dist_lows = util.to_numpy(dist.low)
dist_lows_empirical = util.to_numpy([dist_empirical.map(lambda x: x[0]).min, dist_empirical.map(lambda x: x[1]).min])
dist_highs = util.to_numpy(dist.high)
dist_highs_empirical = util.to_numpy([dist_empirical.map(lambda x: x[0]).max, dist_empirical.map(lambda x: x[1]).max])
dist_log_probs = util.to_numpy(dist.log_prob(dist_means_correct))
dist_log_prob_shape = dist.log_prob(dist_means_correct).size()
util.eval_print('dist_batch_shape', 'dist_batch_shape_correct', 'dist_event_shape', 'dist_event_shape_correct', 'dist_sample_shape', 'dist_sample_shape_correct', 'dist_log_prob_shape', 'dist_log_prob_shape_correct', 'dist_means', 'dist_means_empirical', 'dist_means_correct', 'dist_stddevs', 'dist_stddevs_empirical', 'dist_stddevs_correct', 'dist_lows', 'dist_lows_empirical', 'dist_lows_correct', 'dist_highs', 'dist_highs_empirical', 'dist_highs_correct', 'dist_log_probs', 'dist_log_probs_correct')
self.assertEqual(dist_batch_shape, dist_batch_shape_correct)
self.assertEqual(dist_event_shape, dist_event_shape_correct)
self.assertEqual(dist_sample_shape, dist_sample_shape_correct)
self.assertEqual(dist_log_prob_shape, dist_log_prob_shape_correct)
self.assertTrue(np.allclose(dist_means, dist_means_correct, atol=0.1))
self.assertTrue(np.allclose(dist_means_empirical, dist_means_correct, atol=0.1))
self.assertTrue(np.allclose(dist_stddevs, dist_stddevs_correct, atol=0.1))
self.assertTrue(np.allclose(dist_stddevs_empirical, dist_stddevs_correct, atol=0.1))
self.assertTrue(np.allclose(dist_lows, dist_lows_correct, atol=0.1))
self.assertTrue(np.allclose(dist_lows_empirical, dist_lows_correct, atol=0.1))
self.assertTrue(np.allclose(dist_highs, dist_highs_correct, atol=0.1))
self.assertTrue(np.allclose(dist_highs_empirical, dist_highs_correct, atol=0.33))
self.assertTrue(np.allclose(dist_log_probs, dist_log_probs_correct, atol=0.1))
def test_distributions_beta_low_high_batched_2_1(self):
dist_batch_shape_correct = torch.Size([2, 1])
dist_event_shape_correct = torch.Size()
dist_sample_shape_correct = torch.Size([2, 1])
dist_log_prob_shape_correct = torch.Size([2, 1])
dist_concentration1s_correct = [[2], [2]]
dist_concentration0s_correct = [[3], [3]]
dist_lows_correct = [[-2], [3]]
dist_highs_correct = [[5], [4]]
dist_means_correct = [[0.8], [3.4]]
dist_stddevs_correct = [[1.4], [0.2]]
dist_log_probs_correct = [[0.546965], [0.546965]]
dist = Beta(dist_concentration1s_correct, dist_concentration0s_correct, low=dist_lows_correct, high=dist_highs_correct)
dist_batch_shape = dist.batch_shape
dist_event_shape = dist.event_shape
dist_sample_shape = dist.sample().size()
dist_empirical = Empirical([dist.sample() for i in range(empirical_samples)])
dist_means = util.to_numpy(dist.mean)
dist_means_empirical = util.to_numpy(dist_empirical.mean)
dist_stddevs = util.to_numpy(dist.stddev)
dist_stddevs_empirical = util.to_numpy(dist_empirical.stddev)
dist_lows = util.to_numpy(dist.low)
dist_lows_empirical = util.to_numpy([[dist_empirical.map(lambda x: x[0]).min], [dist_empirical.map(lambda x: x[1]).min]])
dist_highs = util.to_numpy(dist.high)
dist_highs_empirical = util.to_numpy([[dist_empirical.map(lambda x: x[0]).max], [dist_empirical.map(lambda x: x[1]).max]])
dist_log_probs = util.to_numpy(dist.log_prob(dist_means_correct))
dist_log_prob_shape = dist.log_prob(dist_means_correct).size()
util.eval_print('dist_batch_shape', 'dist_batch_shape_correct', 'dist_event_shape', 'dist_event_shape_correct', 'dist_sample_shape', 'dist_sample_shape_correct', 'dist_log_prob_shape', 'dist_log_prob_shape_correct', 'dist_means', 'dist_means_empirical', 'dist_means_correct', 'dist_stddevs', 'dist_stddevs_empirical', 'dist_stddevs_correct', 'dist_lows', 'dist_lows_empirical', 'dist_lows_correct', 'dist_highs', 'dist_highs_empirical', 'dist_highs_correct', 'dist_log_probs', 'dist_log_probs_correct')
self.assertEqual(dist_batch_shape, dist_batch_shape_correct)
self.assertEqual(dist_event_shape, dist_event_shape_correct)
self.assertEqual(dist_sample_shape, dist_sample_shape_correct)
self.assertEqual(dist_log_prob_shape, dist_log_prob_shape_correct)
self.assertTrue(np.allclose(dist_means, dist_means_correct, atol=0.1))
self.assertTrue(np.allclose(dist_means_empirical, dist_means_correct, atol=0.1))
self.assertTrue(np.allclose(dist_stddevs, dist_stddevs_correct, atol=0.1))
self.assertTrue(np.allclose(dist_stddevs_empirical, dist_stddevs_correct, atol=0.1))
self.assertTrue(np.allclose(dist_lows, dist_lows_correct, atol=0.1))
self.assertTrue(np.allclose(dist_lows_empirical, dist_lows_correct, atol=0.1))
self.assertTrue(np.allclose(dist_highs, dist_highs_correct, atol=0.1))
self.assertTrue(np.allclose(dist_highs_empirical, dist_highs_correct, atol=0.25))
self.assertTrue(np.allclose(dist_log_probs, dist_log_probs_correct, atol=0.1))
def test_distributions_mixture(self):
dist_batch_shape_correct = torch.Size()
dist_event_shape_correct = torch.Size()
dist_sample_shape_correct = torch.Size()
dist_log_prob_shape_correct = torch.Size()
dist_1 = Normal(0, 0.1)
dist_2 = Normal(2, 0.1)
dist_3 = Normal(3, 0.1)
dist_means_correct = 0.7
dist_stddevs_correct = 1.10454
dist_log_probs_correct = -23.473
dist = Mixture([dist_1, dist_2, dist_3], probs=[0.7, 0.2, 0.1])
dist_batch_shape = dist.batch_shape
dist_event_shape = dist.event_shape
dist_sample_shape = dist.sample().size()
dist_empirical = Empirical([dist.sample() for i in range(empirical_samples)])
dist_means = util.to_numpy(dist.mean)
dist_means_empirical = util.to_numpy(dist_empirical.mean)
dist_stddevs = util.to_numpy(dist.stddev)
dist_stddevs_empirical = util.to_numpy(dist_empirical.stddev)
dist_log_probs = util.to_numpy(dist.log_prob(dist_means_correct))
dist_log_prob_shape = dist.log_prob(dist_means_correct).size()
util.eval_print('dist_batch_shape', 'dist_batch_shape_correct', 'dist_event_shape', 'dist_event_shape_correct', 'dist_sample_shape', 'dist_sample_shape_correct', 'dist_log_prob_shape', 'dist_log_prob_shape_correct', 'dist_means', 'dist_means_empirical', 'dist_means_correct', 'dist_stddevs', 'dist_stddevs_empirical', 'dist_stddevs_correct', 'dist_log_probs', 'dist_log_probs_correct')
self.assertEqual(dist_batch_shape, dist_batch_shape_correct)
self.assertEqual(dist_event_shape, dist_event_shape_correct)
self.assertEqual(dist_sample_shape, dist_sample_shape_correct)
self.assertEqual(dist_log_prob_shape, dist_log_prob_shape_correct)
self.assertTrue(np.allclose(dist_means, dist_means_correct, atol=0.1))
self.assertTrue(np.allclose(dist_means_empirical, dist_means_correct, atol=0.1))
self.assertTrue(np.allclose(dist_stddevs, dist_stddevs_correct, atol=0.1))
self.assertTrue(np.allclose(dist_stddevs_empirical, dist_stddevs_correct, atol=0.1))
self.assertTrue(np.allclose(dist_log_probs, dist_log_probs_correct, atol=0.1))
def test_distributions_mixture_batched_2(self):
dist_batch_shape_correct = torch.Size([2])
dist_event_shape_correct = torch.Size()
dist_sample_shape_correct = torch.Size([2])
dist_log_prob_shape_correct = torch.Size([2])
dist_1 = Normal([0, 1], [0.1, 1])
dist_2 = Normal([2, 5], [0.1, 1])
dist_3 = Normal([3, 10], [0.1, 1])
dist_means_correct = [0.7, 8.1]
dist_stddevs_correct = [1.10454, 3.23883]
dist_log_probs_correct = [-23.473, -3.06649]
dist = Mixture([dist_1, dist_2, dist_3], probs=[[0.7, 0.2, 0.1], [0.1, 0.2, 0.7]])
dist_batch_shape = dist.batch_shape
dist_event_shape = dist.event_shape
dist_sample_shape = dist.sample().size()
dist_empirical = Empirical([dist.sample() for i in range(empirical_samples)])
dist_means = util.to_numpy(dist.mean)
dist_means_empirical = util.to_numpy(dist_empirical.mean)
dist_stddevs = util.to_numpy(dist.stddev)
dist_stddevs_empirical = util.to_numpy(dist_empirical.stddev)
dist_log_probs = util.to_numpy(dist.log_prob(dist_means_correct))
dist_log_prob_shape = dist.log_prob(dist_means_correct).size()
util.eval_print('dist_batch_shape', 'dist_batch_shape_correct', 'dist_event_shape', 'dist_event_shape_correct', 'dist_sample_shape', 'dist_sample_shape_correct', 'dist_log_prob_shape', 'dist_log_prob_shape_correct', 'dist_means', 'dist_means_empirical', 'dist_means_correct', 'dist_stddevs', 'dist_stddevs_empirical', 'dist_stddevs_correct', 'dist_log_probs', 'dist_log_probs_correct')
self.assertEqual(dist_batch_shape, dist_batch_shape_correct)
self.assertEqual(dist_event_shape, dist_event_shape_correct)
self.assertEqual(dist_sample_shape, dist_sample_shape_correct)
self.assertEqual(dist_log_prob_shape, dist_log_prob_shape_correct)
self.assertTrue(np.allclose(dist_means, dist_means_correct, atol=0.1))
self.assertTrue(np.allclose(dist_means_empirical, dist_means_correct, atol=0.1))
self.assertTrue(np.allclose(dist_stddevs, dist_stddevs_correct, atol=0.1))
self.assertTrue(np.allclose(dist_stddevs_empirical, dist_stddevs_correct, atol=0.1))
self.assertTrue(np.allclose(dist_log_probs, dist_log_probs_correct, atol=0.1))
if __name__ == '__main__':
pyprob.set_random_seed(123)
pyprob.set_verbosity(1)
unittest.main(verbosity=2)
|
{
"content_hash": "ee983fb9b3ec16e39a3eb1b8e6345f57",
"timestamp": "",
"source": "github",
"line_count": 1465,
"max_line_length": 510,
"avg_line_length": 63.03686006825939,
"alnum_prop": 0.6784913751096384,
"repo_name": "probprog/pyprob",
"id": "9ededafd789be98b0fec5175fe6438ef5a6f5356",
"size": "92349",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/test_distributions.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Dockerfile",
"bytes": "667"
},
{
"name": "Jupyter Notebook",
"bytes": "123161"
},
{
"name": "Python",
"bytes": "580095"
},
{
"name": "Shell",
"bytes": "215"
}
],
"symlink_target": ""
}
|
import glob
from distutils.core import setup , run_setup
import sys
import os
keyw = """\
Pyjamas, GUI, Compiler, AJAX, Widget Set
"""
datadir = os.path.join("share", "pyjamas")
bp_data_files = glob.glob(os.path.join("builder", "boilerplate", "*"))
test_files = glob.glob(os.path.join("pyjs", "tests", "*"))
stub_files = glob.glob(os.path.join("stubs", "*"))
addons_data_files = glob.glob(os.path.join("addons", "*.py"))
#pygtkweb_data_files = glob.glob(os.path.join("pygtkweb", "*.py"))
data_files = [
(os.path.join(datadir, "builder", "boilerplate"), bp_data_files),
(os.path.join(datadir, "pyjs", "tests"), test_files),
(os.path.join(datadir, "stubs"), stub_files),
(os.path.join(datadir, "stubs"), stub_files),
#(os.path.join(datadir, "pygtkweb"), pygtkweb_data_files)
]
# main purpose of this function is to exclude "output" which
# could have been built by a developer.
def get_files(d):
res = []
for p in glob.glob(os.path.join(d, "*")):
if not p:
continue
(pth, fname) = os.path.split(p)
if fname == "output":
continue
if fname == "PureMVC_Python_1_0":
continue
if fname[-4:] == ".pyc": # ehmm.. no.
continue
if os.path.isdir(p):
get_dir(p)
else:
res.append(p)
return res
def get_dir(dirname):
for d in glob.glob("%s/*" % dirname):
if os.path.isdir(d):
(pth, fname) = os.path.split(d)
expath = get_files(d)
pth = os.path.join(os.path.join(datadir, dirname), fname)
data_files.append((pth, expath))
else:
data_files.append((os.path.join(datadir, dirname), [d]))
# recursively grab the library and the examples subdirectories - all contents
get_dir("library")
get_dir("examples")
# likewise pyjs/src/pyjs
get_dir(os.path.join("pyjs", "src", "pyjs", "builtin"))
get_dir(os.path.join("pyjs", "src", "pyjs", "lib"))
get_dir(os.path.join("pyjs", "src", "pyjs", "boilerplate"))
#from pprint import pprint
#pprint(data_files)
import distutils.core
if __name__ == '__main__':
print >> sys.stderr, """
Have you run bootstrap.py to create bin/pyjsbuild
and bin/pyjscompile?
e.g. on Unix systems:
python bootstrap.py /usr/share/pyjamas /usr
"""
setup(name = "Pyjamas",
version = "0.7~+pre2",
description = "Pyjamas Widget API for Web applications, in Python",
long_description = open('README', 'rt').read(),
url = "http://pyjs.org",
author = "The Pyjamas Project",
author_email = "lkcl@lkcl.net",
keywords = keyw,
packages=["pyjs", "pyjd"],
package_dir = {'pyjs': os.path.join('pyjs', 'src', 'pyjs'),
'pyjd': 'pyjd'},
data_files = data_files,
license = "Apache Software License",
platforms = ["any"],
classifiers = [
"Development Status :: 5 - Production/Stable",
"Natural Language :: English",
"Intended Audience :: Developers",
"License :: OSI Approved :: Apache Software License",
"Operating System :: OS Independent",
"Programming Language :: Python"
])
|
{
"content_hash": "6e998c77429751fc957748712bb55ba7",
"timestamp": "",
"source": "github",
"line_count": 104,
"max_line_length": 77,
"avg_line_length": 31.259615384615383,
"alnum_prop": 0.5770532143955706,
"repo_name": "certik/pyjamas",
"id": "026e0fb7b63bb96093c91776442f2d5969b2ad7f",
"size": "3390",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "run_bootstrap_first_then_setup.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "JavaScript",
"bytes": "401884"
},
{
"name": "PHP",
"bytes": "121841"
},
{
"name": "Python",
"bytes": "4074658"
},
{
"name": "Shell",
"bytes": "14552"
}
],
"symlink_target": ""
}
|
from __future__ import unicode_literals
import unittest
import os
from pymatgen import Element, Composition
from pymatgen.phasediagram.entries import PDEntryIO, PDEntry
from pymatgen.phasediagram.pdmaker import PhaseDiagram, \
GrandPotentialPhaseDiagram, CompoundPhaseDiagram, PhaseDiagramError
from pymatgen.phasediagram.pdanalyzer import PDAnalyzer
from pymatgen.phasediagram.plotter import PDPlotter
class PhaseDiagramTest(unittest.TestCase):
def setUp(self):
module_dir = os.path.dirname(os.path.abspath(__file__))
(self.elements, self.entries) = \
PDEntryIO.from_csv(os.path.join(module_dir, "pdentries_test.csv"))
self.pd = PhaseDiagram(self.entries)
def test_init(self):
#Ensure that a bad set of entries raises a PD error. Remove all Li
#from self.entries.
entries = filter(lambda e: (not e.composition.is_element) or
e.composition.elements[0] != Element("Li"),
self.entries)
self.assertRaises(PhaseDiagramError, PhaseDiagram, entries,
self.elements)
def test_dim1(self):
#Ensure that dim 1 PDs can eb generated.
for el in ["Li", "Fe", "O2"]:
entries = [e for e in self.entries
if e.composition.reduced_formula == el]
pd = PhaseDiagram(entries)
self.assertEqual(len(pd.stable_entries), 1)
a = PDAnalyzer(pd)
for e in entries:
decomp, ehull = a.get_decomp_and_e_above_hull(e)
self.assertGreaterEqual(ehull, 0)
plotter = PDPlotter(pd)
lines, stable_entries, unstable_entries = plotter.pd_plot_data
self.assertEqual(lines[0][1], [0, 0])
def test_stable_entries(self):
stable_formulas = [ent.composition.reduced_formula
for ent in self.pd.stable_entries]
expected_stable = ["Fe2O3", "Li5FeO4", "LiFeO2", "Fe3O4", "Li", "Fe",
"Li2O", "O2", "FeO"]
for formula in expected_stable:
self.assertTrue(formula in stable_formulas,
formula + " not in stable entries!")
def test_get_formation_energy(self):
stable_formation_energies = {ent.composition.reduced_formula:
self.pd.get_form_energy(ent)
for ent in self.pd.stable_entries}
expected_formation_energies = {'Li5FeO4': -164.8117344866667,
'Li2O2': -14.119232793333332,
'Fe2O3': -16.574164339999996,
'FeO': -5.7141519966666685, 'Li': 0.0,
'LiFeO2': -7.732752316666666,
'Li2O': -6.229303868333332,
'Fe': 0.0, 'Fe3O4': -22.565714456666683,
'Li2FeO3': -45.67166036000002,
'O2': 0.0}
for formula, energy in expected_formation_energies.items():
self.assertAlmostEqual(energy, stable_formation_energies[formula],
7)
def test_all_entries_hulldata(self):
self.assertEqual(len(self.pd.all_entries_hulldata), 492)
def test_planar_inputs(self):
e1 = PDEntry('H', 0)
e2 = PDEntry('He', 0)
e3 = PDEntry('Li', 0)
e4 = PDEntry('Be', 0)
e5 = PDEntry('B', 0)
e6 = PDEntry('Rb', 0)
pd = PhaseDiagram([e1, e2, e3, e4, e5, e6],
map(Element, ['Rb', 'He', 'B', 'Be', 'Li', 'H']))
self.assertEqual(len(pd.facets), 1)
def test_str(self):
self.assertIsNotNone(str(self.pd))
class GrandPotentialPhaseDiagramTest(unittest.TestCase):
def setUp(self):
module_dir = os.path.dirname(os.path.abspath(__file__))
(self.elements, self.entries) = PDEntryIO.from_csv(
os.path.join(module_dir, "pdentries_test.csv"))
self.pd = GrandPotentialPhaseDiagram(self.entries, {Element("O"): -5},
self.elements)
self.pd6 = GrandPotentialPhaseDiagram(self.entries, {Element("O"): -6})
def test_stable_entries(self):
stable_formulas = [ent.original_entry.composition.reduced_formula
for ent in self.pd.stable_entries]
expected_stable = ['Li5FeO4', 'Li2FeO3', 'LiFeO2', 'Fe2O3', 'Li2O2']
for formula in expected_stable:
self.assertTrue(formula in stable_formulas, formula +
" not in stable entries!")
self.assertEqual(len(self.pd6.stable_entries), 4)
def test_get_formation_energy(self):
stable_formation_energies = {
ent.original_entry.composition.reduced_formula:
self.pd.get_form_energy(ent)
for ent in self.pd.stable_entries}
expected_formation_energies = {'Fe2O3': 0.0,
'Li5FeO4': -5.305515040000046,
'Li2FeO3': -2.3424741500000152,
'LiFeO2': -0.43026396250000154,
'Li2O2': 0.0}
for formula, energy in expected_formation_energies.items():
self.assertAlmostEqual(energy, stable_formation_energies[formula],
7, "Calculated formation for " +
formula + " is not correct!")
def test_str(self):
self.assertIsNotNone(str(self.pd))
class CompoundPhaseDiagramTest(unittest.TestCase):
def setUp(self):
module_dir = os.path.dirname(os.path.abspath(__file__))
(self.elements, self.entries) = PDEntryIO.from_csv(
os.path.join(module_dir, "pdentries_test.csv"))
self.pd = CompoundPhaseDiagram(self.entries, [Composition("Li2O"),
Composition("Fe2O3")])
def test_stable_entries(self):
stable_formulas = [ent.name for ent in self.pd.stable_entries]
expected_stable = ["Fe2O3", "Li5FeO4", "LiFeO2", "Li2O"]
for formula in expected_stable:
self.assertTrue(formula in stable_formulas)
def test_get_formation_energy(self):
stable_formation_energies = {ent.name:
self.pd.get_form_energy(ent)
for ent in self.pd.stable_entries}
expected_formation_energies = {'Li5FeO4': -7.0773284399999739,
'Fe2O3': 0,
'LiFeO2': -0.47455929750000081,
'Li2O': 0}
for formula, energy in expected_formation_energies.items():
self.assertAlmostEqual(energy, stable_formation_energies[formula],
7)
def test_str(self):
self.assertIsNotNone(str(self.pd))
if __name__ == '__main__':
unittest.main()
|
{
"content_hash": "cd60393373b1a132e3aeb9a2159886fe",
"timestamp": "",
"source": "github",
"line_count": 163,
"max_line_length": 79,
"avg_line_length": 43.87116564417178,
"alnum_prop": 0.5403440078310726,
"repo_name": "sonium0/pymatgen",
"id": "22dba3e0c8acf2a41c29f205ecca3d8bade20373",
"size": "7261",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "pymatgen/phasediagram/tests/test_pdmaker.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Common Lisp",
"bytes": "3029065"
},
{
"name": "Groff",
"bytes": "868"
},
{
"name": "Perl",
"bytes": "229104"
},
{
"name": "Propeller Spin",
"bytes": "4026362"
},
{
"name": "Python",
"bytes": "3590333"
}
],
"symlink_target": ""
}
|
"""Generate Macro-ized C++ code for wrapping detach functions.
This works by iterating over all functions, and for each function, visiting the
types of its arguments and return value. Type visitors are transitive.
Eventually, all reachable types are visited. Visitors of certain kinds of types
will emit type wrappers if and only if a wrapper is necessary to maintain
attach/detach requirements.
Author: Peter Goodman (peter.goodman@gmail.com)
Copyright: Copyright 2012-2013 Peter Goodman, all rights reserved.
"""
from cparser import *
from cprinter import pretty_print_type
from ignore import should_ignore
from wrap import *
import re
def OUT(*args):
print "".join(map(str, args))
def NULL(*args):
pass
VA_LIST_FUNCS = set()
def ifdef_name(scoped_name):
return scoped_name.replace(" ", "_") \
.replace("::", "_") \
.replace(".", "_")
def pre_wrap_var(ctype, var_name, O, indent=" "):
intern_ctype = ctype.base_type()
if not must_wrap([intern_ctype]):
return
if not var_name:
if isinstance(intern_ctype, CTypeStruct):
pre_wrap_fields(intern_ctype, O)
elif is_function_pointer(intern_ctype):
O(indent, "WRAP_FUNCTION(", var_name, ");")
else:
O(indent, "PRE_OUT_WRAP(", var_name, ");")
def avoid_wrap_fields(ctype, O, pred):
num_wrappable_fields = 0
for field_ctype, field_name in ctype.fields():
if pred(field_ctype):
num_wrappable_fields += 1
if num_wrappable_fields < 2:
return
for field_ctype, field_name in ctype.fields():
if is_function_pointer(field_ctype):
O(" ABORT_IF_FUNCTION_IS_WRAPPED(arg.", field_name, ");")
break
def pre_wrap_fields(ctype, O):
avoid_wrap_fields(ctype, O, will_pre_wrap_type)
for field_ctype, field_name in ctype.fields():
if will_pre_wrap_type(field_ctype):
pre_wrap_var(
field_ctype,
field_name and ("arg.%s" % field_name) or None,
O)
def post_wrap_fields(ctype, O):
avoid_wrap_fields(ctype, O, will_post_wrap_type)
for field_ctype, field_name in ctype.fields():
if will_post_wrap_type(field_ctype):
if is_function_pointer(field_ctype):
O(" WRAP_FUNCTION(arg.", field_name, ");")
else:
O(" PRE_OUT_WRAP(arg.", field_name, ");")
def scoped_name(ctype):
parts = []
while True:
# if the type is scoped, then we use the internal name
# to omit 'struct', 'union', and 'enum' so that we don't
# end up with things like "foo::struct bar".
if ctype.parent_ctype:
parts.append(ctype.internal_name)
ctype = ctype.parent_ctype
# if the type is not scoped, then we use the full name
# to disambiguate it from, e.g. functions with the same
# name
else:
parts.append(ctype.name)
break
return "::".join(reversed(parts))
def wrap_struct(ctype, *args):
will_pre = will_pre_wrap_fields(ctype)
will_post = will_post_wrap_fields(ctype)
if not will_pre and not will_post:
return
# make sure we're not trying to wrap a struct that is
# embedded in an anonymous union
parent_ctype = ctype.parent_ctype
while parent_ctype:
if isinstance(parent_ctype, CTypeUnion) \
and not parent_ctype.had_name:
return
parent_ctype = parent_ctype.parent_ctype
name = scoped_name(ctype)
O = ctype.has_name and OUT or NULL
O("#ifndef APP_WRAPPER_FOR_", ifdef_name(name))
O("TYPE_WRAPPER(", name, ", ", "{")
if will_pre:
O(" NO_PRE_IN")
O(" PRE_OUT {")
#O(" if(!&arg) { granary_break_on_fault(); }")
pre_wrap_fields(ctype, O)
O(" }")
else:
O(" NO_PRE")
if will_post:
O(" NO_POST_IN")
O(" POST_OUT {")
#O(" if(!&arg) { granary_break_on_fault(); }")
post_wrap_fields(ctype, O)
O(" }")
else:
O(" NO_POST")
O(" NO_RETURN")
O("})")
O("#endif")
O("")
O("")
def wrap_typedef(ctype, name):
O = OUT
# e.g. "typedef struct foo foo;" is somewhat ambiguous (from the perspective
# of C++ template partial specialization), so we omit such typedefs.
#if name != ctype.internal_name:
# O("TYPEDEF_WRAPPER(", name, ", ", ctype.name, ")")
#O("")
# Output Granary code that will wrap a C function.
def wrap_function(ctype, orig_ctype, func):
# only care about non-variadic functions if they return wrappable types.
# otherwise, we always care about manually wrapping variadic functions
# and functions that don't return.
if not ctype.is_variadic \
and not has_extension_attribute(orig_ctype, "noreturn"):
#if not will_wrap_function(ctype.ret_type, []):
return
# don't wrap deprecated functions; the compiler will complain about them.
if has_extension_attribute(orig_ctype, "deprecated"):
return
if not must_wrap([ctype.ret_type] + ctype.param_types):
return
# internal function
#elif func.startswith("__"):
# return
O = OUT
internal_ret_type = ctype.ret_type.base_type()
suffix, is_void = "", False
if isinstance(internal_ret_type, CTypeBuiltIn) \
and "void" == internal_ret_type.name:
suffix, is_void = "_VOID", True
variadic = ""
if ctype.is_variadic:
if ctype.param_types:
variadic = ", "
variadic += "..."
arg_list = []
num_params = [0]
def next_param(p):
if p:
return p
else:
num_params[0] += 1
return "_arg%d" % num_params[0]
param_names = map(next_param, ctype.param_names)
last_arg_name = ""
for (arg_ctype, arg_name) in zip(ctype.param_types, param_names):
if not arg_name:
arg_name = ""
last_arg_name = arg_name
arg_list.append(pretty_print_type(arg_ctype, arg_name, lang="C++").strip(" "))
args = ", ".join(arg_list)
# get an output string for the return type.
ret_type = ""
if not is_void:
ret_type = pretty_print_type(ctype.ret_type, "", lang="C++").strip(" ")
ret_type = " (%s), " % ret_type
addr_check = ""
if func.startswith("__"):
addr_check = " && defined(DETACH_ADDR_%s)" % func
# TODO: re-enable auto-wrapping of variadic functions?
if ctype.is_variadic:
return
O("#if defined(CAN_WRAP_", func, ") && CAN_WRAP_", func, addr_check)
O("#ifndef APP_WRAPPER_FOR_", func)
O("#define APP_WRAPPER_FOR_", func)
O("FUNCTION_WRAPPER", suffix, "(APP, ", func, ",", ret_type,"(", args, variadic, "), {")
if ctype.is_variadic:
O(" va_list args__;")
O(" va_start(args__, %s);" % last_arg_name)
# assignment of return value; unattributed_type is used in place of base type
# so that we don't end up with anonymous structs/unions/enums.
a, r_v = "", ""
if not is_void:
r_v = "ret"
a = pretty_print_type(ctype.ret_type.unattributed_type(), r_v, "C++") + " = "
for (arg_ctype, arg_name) in zip(ctype.param_types, param_names):
pre_wrap_var(arg_ctype, arg_name, O, indent=" ")
global VA_LIST_FUNCS
va_func = "v%s" % func
special = False
if ctype.is_variadic and va_func in VA_LIST_FUNCS:
O(" IF_KERNEL( auto ", va_func, "((decltype(::", va_func, ") *) DETACH_ADDR_", va_func,"); ) ")
O(" ", a, va_func, "(", ", ".join(param_names + ["args__"]), ");")
else:
if ctype.is_variadic:
special = True
O(" // TODO: variadic arguments")
O(" D( granary_fault(); )")
O(" ", a, func, "(", ", ".join(param_names), ");")
if ctype.is_variadic:
O(" va_end(args__);")
#O(" D( granary::printf(\"FUNCTION_WRAPPER(APP, %s) %s\\n\"); )" % (func, special and "*" or ""))
if not is_void and not isinstance(ctype.ret_type.base_type(), CTypeBuiltIn):
O(" RETURN_IN_WRAP(", r_v, ");")
if not is_void:
O(" return ", r_v, ";")
O("})")
O("#endif")
O("#endif")
O()
O()
def visit_enum(ctype):
pass
def visit_function(ctype):
visit_type(ctype.ret_type)
for param_ctype in ctype.param_types:
if param_ctype:
visit_type(param_ctype)
def visit_attributed(ctype):
visit_type(ctype.ctype)
def visit_expression(ctype):
pass
def visit_bitfield(ctype):
visit_type(ctype.ctype)
def visit_array(ctype):
visit_type(ctype.ctype)
def visit_pointer(ctype):
visit_type(ctype.ctype)
def visit_typedef(ctype):
visit_type(ctype.ctype)
inner = ctype.ctype.base_type()
if isinstance(inner, CTypeStruct) and will_pre_wrap_fields(inner):
if not inner.has_name:
# todo: make sure some structures are not double wrapped
wrap_struct(inner, ctype.name)
else:
wrap_typedef(inner, ctype.name)
def visit_builtin(ctype):
pass
def visit_union(ctype):
for field_ctype, field_name in ctype.fields():
visit_type(field_ctype)
def visit_struct(ctype):
for field_ctype, field_name in ctype.fields():
visit_type(field_ctype)
if ctype.has_name:
wrap_struct(ctype)
def visit_use(ctype):
visit_type(ctype.ctype)
TYPES = set()
VISITORS = {
CTypeUse: visit_use,
CTypeEnum: visit_enum,
CTypeFunction: visit_function,
CTypeAttributed: visit_attributed,
CTypeExpression: visit_expression,
CTypeBitfield: visit_bitfield,
CTypeArray: visit_array,
CTypePointer: visit_pointer,
CTypeDefinition: visit_typedef,
CTypeBuiltIn: visit_builtin,
CTypeUnion: visit_union,
CTypeStruct: visit_struct,
}
def visit_type(ctype):
if ctype in TYPES:
return
TYPES.add(ctype)
VISITORS[ctype.__class__](ctype)
def visit_var_def(var, ctype):
visit_type(ctype)
orig_ctype = ctype
ctype = orig_ctype.base_type()
# don't declare enumeration constants
if isinstance(ctype, CTypeFunction):
wrap_function(ctype, orig_ctype, var)
def visit_possible_variadic_def(name, ctype, va_list_ctype):
global VA_LIST_FUNCS
if not isinstance(ctype, CTypeFunction):
return
if not ctype.param_types:
return
last_param_ctype = ctype.param_types[-1].base_type()
if last_param_ctype is va_list_ctype:
VA_LIST_FUNCS.add(name)
if "__main__" == __name__:
import sys
with open(sys.argv[1]) as lines_:
tokens = CTokenizer(lines_)
parser = CParser()
parser.parse_units(tokens)
va_list = None
try:
va_list = parser.get_type("va_list", CTypeDefinition)
va_list = va_list.base_type()
except:
pass
OUT("/* Auto-generated wrappers. */")
OUT("#define D(...) __VA_ARGS__ ")
OUT("")
for var, ctype in parser.vars():
if not should_ignore(var) and var.startswith("v"):
visit_possible_variadic_def(var, ctype.base_type(), va_list)
for var, ctype in parser.vars():
if not should_ignore(var):
visit_var_def(var, ctype)
|
{
"content_hash": "abb6b3eea781ff2bab03e0339a18b3cc",
"timestamp": "",
"source": "github",
"line_count": 417,
"max_line_length": 104,
"avg_line_length": 25.49400479616307,
"alnum_prop": 0.6240240805192362,
"repo_name": "Granary/granary",
"id": "7c18c397449345c5437712a581c31b3de70af2a4",
"size": "10631",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "scripts/generate_wrappers.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Assembly",
"bytes": "277993"
},
{
"name": "C",
"bytes": "77839"
},
{
"name": "C++",
"bytes": "1140672"
},
{
"name": "GDB",
"bytes": "19659"
},
{
"name": "Makefile",
"bytes": "29819"
},
{
"name": "Python",
"bytes": "235588"
},
{
"name": "Shell",
"bytes": "423"
}
],
"symlink_target": ""
}
|
from licant.modules import submodule
from licant.cxx_modules import application
import licant
from licant.scripter import scriptq
scriptq.execute("../../gxx.g.py")
application("target",
sources = ["main.cpp"],
include_paths = ["../.."],
modules = [
submodule("gxx", "posix"),
submodule("gxx.dprint", "cout"),
submodule("gxx.print", "cout"),
],
)
licant.ex("target")
|
{
"content_hash": "cb0c4ce743e24312db4093f8a16b6549",
"timestamp": "",
"source": "github",
"line_count": 18,
"max_line_length": 42,
"avg_line_length": 21,
"alnum_prop": 0.6772486772486772,
"repo_name": "Mirmik/gxx",
"id": "c3f1b07ae5743706f18e263349c1b217d2e84f66",
"size": "417",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/HIDE/tree/make.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Assembly",
"bytes": "64"
},
{
"name": "C",
"bytes": "690591"
},
{
"name": "C++",
"bytes": "1010156"
},
{
"name": "Lua",
"bytes": "2409"
},
{
"name": "Objective-C",
"bytes": "4072"
},
{
"name": "Python",
"bytes": "49652"
},
{
"name": "QMake",
"bytes": "955"
}
],
"symlink_target": ""
}
|
import json, re, sys, urllib
## osu!apy Methods
## build_request - Returns the full API request URL using the provided base URL and parameters.
## list_params - The list of parameters to add to the end of the request URL.
## URL - The base API request URL to append the list of parameters to.
def build_request(list_of_params, url):
## Build the request URL.
for param in list_of_params:
url += str(param)
if (param != ""):
url += "&"
## Remove the trailing '&' because I'm OCD.
return url[:-1]
## get_beatmaps - Returns a JSON payload containing information about a beatmap set or beatmap.
## key - Your API key. (Required)
## since - A MYSQL-formatted date which is the cut off for the returned data.
## set_id - A beatmap set ID.
## beatmap_id - A beatmap ID.
## user_id - A user ID.
def get_beatmaps(key, since, set_id, beatmap_id, user_id):
## Create a list to store the attributes which are present.
list_of_params = []
## Populate the list of PHP variables.
## Only prepend the PHP variable names if they are there.
list_of_params.append(parameterize_key(key))
list_of_params.append(parameterize_since(since))
list_of_params.append(parameterize_id("s", set_id))
list_of_params.append(parameterize_id("b", beatmap_id))
list_of_params.append(parameterize_id("u", user_id))
## Build the request URLand return the response.
return urllib.urlopen(build_request(list_of_params, "https://osu.ppy.sh/api/get_beatmaps?")).read()
## get_match - Returns information about multiplayer match.
## key - Your API key. (Required)
## multi_id - A multiplayer match ID.
def get_match(key, multi_id):
## Create a list to store the attributes which are present.
list_of_params = []
## Populate the list of PHP variables.
## Only prepend the PHP variable names if they are there.
list_of_params.append(parameterize_key(key))
list_of_params.append(parameterize_id("mp", multi_id))
## Build the request URLand return the response.
return urllib.urlopen(build_request(list_of_params, "https://osu.ppy.sh/api/get_beatmaps?")).read()
## get_scores - Returns information about the top 50 scores of a specified beatmap.
## key - Your API key.
## beatmap_id - A beatmap ID.
## user_id - A user ID.
## mode - The game mode for which to get info.
## (0 = osu!, 1 = Taiko, 2 = CtB, 3 = osu!mania, Default = 0)
def get_scores(key, beatmap_id, user_id, mode):
## Create a list to store the attributes which are present.
list_of_params = []
## Populate the list of PHP variables.
## Only prepend the PHP variable names if they are there.
list_of_params.append(parameterize_key(key))
list_of_params.append(parameterize_id("b", beatmap_id))
list_of_params.append(parameterize_id("u", user_id))
list_of_params.append(parameterize_mode(mode))
## Build the full request URL and return the response.
return urllib.urlopen(build_request(list_of_params, "https://osu.ppy.sh/api/get_scores?")).read()
## get_user - Returns a JSON payload containing information about a beatmap set or beatmap.
## key - Your API key. (Required)
## user_id - A user ID. (Required)
## mode - The game mode for which to get info.
## (0 = osu!, 1 = Taiko, 2 = CtB, 3 = osu!mania, Default = 0)
## type - Specifies rather the user_id specified is an ID or a username.
## (id = id, string = username, default = Autodetect)
## event_days - Maximum number of days between now and last event date.
## (1 - 31, default = 1)
def get_user(key, user_id, mode, type, event_days):
## Create a list to store the attributes which are present.
list_of_params = []
## Populate the list of PHP variables.
## Only prepend the PHP variable names if they are there.
list_of_params.append(parameterize_key(key))
list_of_params.append(parameterize_id("u", user_id))
list_of_params.append(parameterize_mode(mode))
list_of_params.append(parameterize_type(type))
list_of_params.append(parameterize_event_days(event_days))
## Build the request URL and return the response.
return urllib.urlopen(build_request(list_of_params, "https://osu.ppy.sh/api/get_user?")).read()
## get_user_best - Returns the top scores for the specified user.
## key - Your API key. (Required)
## user_id - A user ID. (Required)
## mode - The game mode for which to get info.
## (0 = osu!, 1 = Taiko, 2 = CtB, 3 = osu!mania, Default = 0)
## limit - # of results to return.
## (1 - 50, Default = 10).
## type - Specifies rather the user_id specified is an ID or a username.
## (id = id, string = username, default = Autodetect)
def get_user_best(key, user_id, mode, limit, type):
## Create a list to store the attributes which are present.
list_of_params = []
## Populate the list of PHP variables.
## Only prepend the PHP variable names if they are there.
list_of_params.append(parameterize_key(key))
list_of_params.append(parameterize_id("u", user_id))
list_of_params.append(parameterize_mode(mode))
list_of_params.append(parameterize_limit(limit))
list_of_params.append(parameterize_type(type))
## Build the full request URL and return the response.
return urllib.urlopen(build_request(list_of_params, "https://osu.ppy.sh/api/get_user_best?")).read()
## get_user_recent - Returns the user's ten most recent plays.
## key - Your API key. (Required)
## user_id - A user ID. (Required)
## mode - The game mode for which to get info.
## (0 = osu!, 1 = Taiko, 2 = CtB, 3 = osu!mania, Default = 0)
## type - Specifies rather the user_id specified is an ID or a username.
## (id = id, string = username, default = Autodetect)
def get_user_recent(key, user_id, mode, type):
## Create a list to store the attributes which are present.
list_of_params = []
## Populate the list of PHP variables.
## Only prepend the PHP variable names if they are there.
list_of_params.append(parameterize_key(key))
list_of_params.append(parameterize_id("u", user_id))
list_of_params.append(parameterize_mode(mode))
list_of_params.append(parameterize_type(type))
## Build the full request URL and return the response.
return urllib.urlopen(build_request(list_of_params, "https://osu.ppy.sh/api/get_user_recent?")).read()
## parameterize_event_days - Formats event days as a PHP parameter.
def parameterize_event_days(event_days):
if (event_days == ""):
event_days = "event_days=1"
elif (int(event_days) >= 1 and int(event_days) <= 31):
event_days = "event_days=" + str(event_days)
else:
print " Invalid event_days \"" + str(event_days) + ".\""
sys.exit()
return event_days
## parameterize_id - Formats an ID as a PHP parameter.
## t - The type of ID.
## (b = beatmap, s = beatmap set, u = user)
## id - A beatmap, beatmap set, or user ID.
def parameterize_id(t, id):
if (t != "b" and t != "s" and t != "u" and t != "mp"):
print " Invalid type \"" + str(t) + ".\""
sys.exit()
if (len(str(id)) != 0):
return t + "=" + str(id)
else:
return ""
## parameterize_key - Formats an API key as a PHP parameter.
## key - An API key.
def parameterize_key(key):
if (len(key) == 40):
return "k=" + key
else:
print " Invalid key \"" + str(key) + ".\""
sys.exit()
## parameterize_limit - Formats the limit as a PHP parameter.
## limit - The maximum # of scores to show.
def parameterize_limit(limit):
## Default case: 10 scores
if (limit == ""):
limit = "limit=10"
elif (int(limit) >= 1 and int(limit) <= 50):
limit = "limit=" + str(limit)
else:
print " Invalid limit \"" + str(limit) + ".\""
sys.exit()
return limit
## parameterize_mode - Formats a mode as a PHP parameter.
## mode - The game mode for which to get info.
def parameterize_mode(mode):
## Default case: 0 (osu!)
if (mode == ""):
mode = "m=0"
elif (int(mode) >= 0 and int(mode) <= 3):
mode = "m=" + str(mode)
else:
print " Invalid mode \"" + str(mode) + ".\""
sys.exit()
return mode
## parameterize_since - Formats a since as a PHP parameter.
## since - A MYSQL-formatted date which is the cut off for the time period in which to return data.
def parameterize_since(since):
if (since == ""):
return since
if (re.match("[0-9]{4}\-[0-1]?[1-9]\-[0-3]?[1-9] [0-2]?[0-9]\:[0-5][0-9]\:[0-5][0-9]", since)):
return "since=" + str(since)
else:
print " Invalid since \"" + str(since) + ".\""
sys.exit()
## parameterize_type - Formats a type as a PHP parameter.
## type - Specifies rather the user_id specified is an ID or a username.
def parameterize_type(type):
if (type == ""):
return type
elif (type == "id" or type == "string"):
return "type=" + str(type)
else:
print " Invalid type \"" + str(type) + ".\""
sys.exit()
## End of classless methods.
|
{
"content_hash": "19493cdeb69a234c492a34e7e25ba70b",
"timestamp": "",
"source": "github",
"line_count": 228,
"max_line_length": 112,
"avg_line_length": 39.6359649122807,
"alnum_prop": 0.6411419718933274,
"repo_name": "albinohat/osu-apy",
"id": "bccca801c8d99d67c3ecd8c640c567d1fb1d4028",
"size": "10298",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "2.7/osu_apy.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "29162"
}
],
"symlink_target": ""
}
|
import unittest
from telemetry.core import wpr_modes
from telemetry.core.backends.chrome import chrome_browser_backend
class FakePlatformBackend(object):
def __init__(self, wpr_http_device_port, wpr_https_device_port,
is_host_platform):
self.wpr_http_device_port = wpr_http_device_port
self.wpr_https_device_port = wpr_https_device_port
self.is_host_platform = is_host_platform
class FakeBrowserOptions(object):
def __init__(self, netsim=False, wpr_mode=wpr_modes.WPR_OFF):
self.netsim = netsim
self.wpr_mode = wpr_mode
self.browser_type = 'chrome'
self.dont_override_profile = False
class FakeForwarderFactory(object):
host_ip = '127.0.0.1'
def __init__(self, does_forwarder_override_dns):
self.does_forwarder_override_dns = does_forwarder_override_dns
class TestChromeBrowserBackend(chrome_browser_backend.ChromeBrowserBackend):
# The test does not need to define the abstract methods. pylint: disable=W0223
def __init__(self, browser_options, does_forwarder_override_dns=False,
wpr_http_device_port=None, wpr_https_device_port=None,
is_running_locally=False):
super(TestChromeBrowserBackend, self).__init__(
platform_backend=FakePlatformBackend(
wpr_http_device_port, wpr_https_device_port, is_running_locally),
supports_tab_control=False,
supports_extensions=False,
browser_options=browser_options,
output_profile_path=None,
extensions_to_load=None)
self._forwarder_factory = FakeForwarderFactory(does_forwarder_override_dns)
class ReplayStartupArgsTest(unittest.TestCase):
"""Test expected inputs for GetReplayBrowserStartupArgs."""
def testReplayOffGivesEmptyArgs(self):
browser_options = FakeBrowserOptions()
browser_backend = TestChromeBrowserBackend(browser_options)
self.assertEqual([], browser_backend.GetReplayBrowserStartupArgs())
def BasicArgsHelper(self, is_running_locally):
# Covers Android without RNDIS and CrOS.
browser_options = FakeBrowserOptions(
wpr_mode=wpr_modes.WPR_REPLAY,
netsim=False)
browser_backend = TestChromeBrowserBackend(
browser_options,
does_forwarder_override_dns=False,
wpr_http_device_port=456,
wpr_https_device_port=567,
is_running_locally=is_running_locally)
expected_args = [
'--host-resolver-rules=MAP * 127.0.0.1,EXCLUDE localhost',
'--ignore-certificate-errors',
'--testing-fixed-http-port=456',
'--testing-fixed-https-port=567'
]
self.assertEqual(
expected_args,
sorted(browser_backend.GetReplayBrowserStartupArgs()))
def testBasicArgs(self):
# The result is the same regardless of whether running locally.
self.BasicArgsHelper(is_running_locally=True)
self.BasicArgsHelper(is_running_locally=False)
def testDesktopNetsimGivesNoFixedPortsNorHostResolverRules(self):
browser_options = FakeBrowserOptions(
wpr_mode=wpr_modes.WPR_REPLAY,
netsim=True)
browser_backend = TestChromeBrowserBackend(
browser_options,
does_forwarder_override_dns=False,
wpr_http_device_port=80,
wpr_https_device_port=443,
is_running_locally=True)
expected_args = ['--ignore-certificate-errors']
self.assertEqual(
expected_args,
sorted(browser_backend.GetReplayBrowserStartupArgs()))
def ForwarderOverridesDnsHelper(self, is_netsim):
# Android with --use-rndis uses standard remote ports and
# relies on the forwarder to override DNS resolution.
browser_options = FakeBrowserOptions(
wpr_mode=wpr_modes.WPR_REPLAY,
netsim=is_netsim)
browser_backend = TestChromeBrowserBackend(
browser_options,
does_forwarder_override_dns=True,
wpr_http_device_port=80,
wpr_https_device_port=443,
is_running_locally=False)
expected_args = ['--ignore-certificate-errors']
self.assertEqual(
expected_args,
sorted(browser_backend.GetReplayBrowserStartupArgs()))
def testAndroidRndisGivesNoFixedPortsNorHostResolverRules(self):
# The result is the same regardless of netsim setting.
self.ForwarderOverridesDnsHelper(is_netsim=True)
self.ForwarderOverridesDnsHelper(is_netsim=False)
def testRemoteCrOsNetsimStillUsesHostResolver(self):
# CrOS has not implemented the forwarder override for DNS.
browser_options = FakeBrowserOptions(
wpr_mode=wpr_modes.WPR_REPLAY,
netsim=True)
browser_backend = TestChromeBrowserBackend(
browser_options,
does_forwarder_override_dns=False,
wpr_http_device_port=80,
wpr_https_device_port=443,
is_running_locally=False)
expected_args = [
'--host-resolver-rules=MAP * 127.0.0.1,EXCLUDE localhost',
'--ignore-certificate-errors',
]
self.assertEqual(
expected_args,
sorted(browser_backend.GetReplayBrowserStartupArgs()))
|
{
"content_hash": "e08268bb36a7655356f58200f1a4b35b",
"timestamp": "",
"source": "github",
"line_count": 135,
"max_line_length": 80,
"avg_line_length": 37.111111111111114,
"alnum_prop": 0.7007984031936128,
"repo_name": "dednal/chromium.src",
"id": "baed6512530a51c37d3cbca37886bf8b958b6fb4",
"size": "5173",
"binary": false,
"copies": "9",
"ref": "refs/heads/nw12",
"path": "tools/telemetry/telemetry/core/backends/chrome/chrome_browser_backend_unittest.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "AppleScript",
"bytes": "6973"
},
{
"name": "Arduino",
"bytes": "464"
},
{
"name": "Assembly",
"bytes": "34522"
},
{
"name": "Batchfile",
"bytes": "8451"
},
{
"name": "C",
"bytes": "9240962"
},
{
"name": "C++",
"bytes": "222772775"
},
{
"name": "CSS",
"bytes": "875874"
},
{
"name": "Dart",
"bytes": "74976"
},
{
"name": "Go",
"bytes": "18155"
},
{
"name": "HTML",
"bytes": "27190037"
},
{
"name": "Java",
"bytes": "7645280"
},
{
"name": "JavaScript",
"bytes": "18828195"
},
{
"name": "Makefile",
"bytes": "96270"
},
{
"name": "Objective-C",
"bytes": "1397246"
},
{
"name": "Objective-C++",
"bytes": "7575073"
},
{
"name": "PHP",
"bytes": "97817"
},
{
"name": "PLpgSQL",
"bytes": "248854"
},
{
"name": "Perl",
"bytes": "63937"
},
{
"name": "Protocol Buffer",
"bytes": "418340"
},
{
"name": "Python",
"bytes": "8032766"
},
{
"name": "Shell",
"bytes": "464218"
},
{
"name": "Standard ML",
"bytes": "4965"
},
{
"name": "XSLT",
"bytes": "418"
},
{
"name": "nesC",
"bytes": "18335"
}
],
"symlink_target": ""
}
|
from django.views.generic import TemplateView
class AgendaView(TemplateView):
template_name = "opconsole_agenda.html"
|
{
"content_hash": "079e24a441451914e7e57b891b9637ea",
"timestamp": "",
"source": "github",
"line_count": 4,
"max_line_length": 45,
"avg_line_length": 30.5,
"alnum_prop": 0.8032786885245902,
"repo_name": "baalkor/timetracking",
"id": "aec0fdbc63293346307b5ede35f23d4dff4a9946",
"size": "122",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "opconsole/views/agendaView.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "2854"
},
{
"name": "HTML",
"bytes": "46860"
},
{
"name": "JavaScript",
"bytes": "17219"
},
{
"name": "Python",
"bytes": "98024"
}
],
"symlink_target": ""
}
|
import datetime
import json
import logging
from multiprocessing import Pool
import os
import re
import requests
from db import RssDB, Thread
ROOT_PATH = os.path.dirname(os.path.realpath(__file__)) + os.sep + '..' + os.sep
l = logging.getLogger(__name__)
def download_mp(p, save_path, http_session):
# Create necessary directory
d = p.get('dir', '')
if not os.path.exists(save_path + os.sep + d):
try:
os.makedirs(save_path + os.sep + d)
except OSError:
pass
url = "https://i.4cdn.org/%s/%s%s" % (p['board'], p['tim'], p['ext'])
r = http_session.get(url, stream=True, timeout=30)
if r.status_code == 200:
with open(save_path + os.sep + p.get('dir', '') + os.sep\
+ p['filename'] + '_' + p['tim'] + p['ext'], 'wb') as f:
for chunk in r:
f.write(chunk)
class DownloaderBase():
def __init__(self, conf):
self.conf = conf
# DB related stuff
self.rssdb = RssDB(self.conf['db']['uri'], self.conf['down']['save_path'])
self.db_engine = self.rssdb.db_engine
self.db_session = self.rssdb.db_session
# HTTP session
self.http_session = self._login()
def _login(self):
http_session = requests.Session()
return http_session
################################################################################################
################################################################################################
# Main actions available #
################################################################################################
################################################################################################
def monitor(self):
posts_to_down = 0
pool = Pool(self.conf['down']['max_dl'])
for thread in self.db_session.query(Thread).filter_by(active=True):
try:
data = self.http_session.get("https://a.4cdn.org/%s/thread/%s.json" %\
(thread.board, thread.no), timeout=30)
except requests.exceptions.Timeout:
continue
if data.status_code == 404:
thread.active = False
continue
if data.status_code != 200:
l.warning("Got error code '%s'. Skipping thread...", data.status_code)
continue
data_json = {}
try:
data_json = json.loads(data.content.decode('UTF-8'))
except ValueError:
l.warning("Could not parse content of thread '%s' of board '%s'!",\
thread.no, thread.board)
continue
thread.date_updated = datetime.datetime.utcnow()
for post in data_json.get('posts', []):
# Get the title of the thread if not available
if not thread.com:
thread.com = re.sub(r'\W+', '-',\
post.get('sub') or post.get('com') or str(post.get('no')))
thread.com = thread.com[:50]
# Download post if necessary
last_no = post.get('no', 9999999999999)
if last_no > thread.last_no and post.get('filename'):
posts_to_down += 1
post_to_down = {
'dir': thread.com,
'board': thread.board,
'filename': str(post.get('filename', '')),
'tim': str(post.get('tim', '')),
'ext': str(post.get('ext', '')),
}
pool.apply_async(
download_mp,
args=(post_to_down, self.conf['down']['save_path'], self.http_session)
)
l.debug("Downloading %s", post_to_down['filename'])
thread.last_no = last_no
self.db_session.commit()
pool.close()
pool.join()
if posts_to_down:
l.info("%s file(s) downloaded!", posts_to_down)
else:
l.info("No file to download")
def add_thread(self, board, no, com=''):
if self.db_session.query(Thread).filter_by(board=board, no=no).first():
return
thread = Thread()
thread.board = str(board)
thread.no = int(no)
thread.com = com
thread.last_no = 0
thread.active = True
self.db_session.add(thread)
self.db_session.commit()
l.info("Thread '%s' of board '%s' added for monitoring", no, board)
def clean(self):
date_limit =\
datetime.datetime.utcnow() - datetime.timedelta(days=self.conf['db']['max_age_days'])
threads_removed = self.db_session.query(Thread)\
.filter(Thread.date_updated < date_limit, Thread.active == False).delete()
self.db_session.commit()
l.info("%s threads(s) removed", threads_removed)
def show(self):
for thread in self.db_session.query(Thread).order_by(Thread.id):
print(thread)
|
{
"content_hash": "7efe6238ae502c0cdd538f52d4d79de2",
"timestamp": "",
"source": "github",
"line_count": 145,
"max_line_length": 100,
"avg_line_length": 35.91724137931035,
"alnum_prop": 0.4650537634408602,
"repo_name": "nicolasmartinelli/4chan-download",
"id": "d588caf5866456f97f9b83c33d68234a002e988f",
"size": "5247",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "pychanmonitor/downloader.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "4412"
},
{
"name": "JavaScript",
"bytes": "4176"
},
{
"name": "Python",
"bytes": "9179"
}
],
"symlink_target": ""
}
|
import os
import datetime as dt
import time
from astropy.io import fits as pyfits
import matplotlib
matplotlib.use('Agg')
import pylab as pl
import numpy as np
from scipy import stats
from AllSky340 import AllSky340
cam = AllSky340(port="/dev/ttyUSB0",
baudrate=460800,
timeout=0.1)
cam.log_info("Image acquisition script starting up.")
ndark = 0
exp = 60.0
os.chdir("/var/www/skycam/")
def get_obsdir():
"""return the obsdate directory"""
now = dt.datetime.now() - dt.timedelta(days=0.5)
year = now.strftime('%Y/')
obsdir=now.strftime('%Y/%m%d/')
if not os.path.isdir(year): os.mkdir(year)
if not os.path.isdir(obsdir): os.mkdir(obsdir)
return obsdir
while True:
if os.path.isfile("STOP"):
cam.log_info("Image acquisition script shutting down.")
os.system("rm STOP")
break
try:
if ndark == 0:
dark = cam.getImage(exp, light=False)
ndark += 1
imag = cam.getImage(exp, light=True)
# get the time and set up labels and filenames
obsdir = get_obsdir()
now = time.localtime()
imag -= dark
min = stats.scoreatpercentile(imag.flat, 1)
max = stats.scoreatpercentile(imag.flat, 99.5)
filename = obsdir+time.strftime("AllSky_%Y%m%d_%H%M%S.fits")
jpg = obsdir+time.strftime("AllSky_%Y%m%d_%H%M%S.jpg")
date = time.strftime("%Y/%m/%d")
sast = time.strftime("%H:%M:%S")
elabel = "Exposure: %f sec" % exp
# set up and create the FITS file
cards = []
cards.append(pyfits.createCard("DATEOBS", date, "Date of observation"))
cards.append(pyfits.createCard("TIMEOBS",
sast,
"Time of observation (SAST)"))
cards.append(pyfits.createCard("EXPTIME", exp, "Exposure time (s)"))
header = pyfits.Header(cards=cards)
pyfits.writeto(filename, imag, header=header, clobber=True)
# set up and create the JPG file
fig = pl.figure()
ax = fig.add_subplot(111)
ax.set_axis_off()
ax.set_xticklabels([])
ax.set_yticklabels([])
ax.set_xticks([])
ax.set_yticks([])
pl.imshow(np.flipud(imag), cmap=matplotlib.cm.gray, aspect='normal',
vmin=min, vmax=max)
pl.text(10, 5, date, color='w',
verticalalignment='top', fontweight='bold')
pl.text(630, 5, sast, color='w',
verticalalignment='top', horizontalalignment='right',
fontweight='bold')
pl.text(10, 475, "%.2g sec" % exp, color='w', fontweight='bold')
pl.savefig(jpg, bbox_inches="tight", pad_inches=0.0, quality=95)
pl.close()
os.system("ln -sf /var/www/skycam/%s Data/AllSkyCurrentImage.JPG" % jpg)
os.system("ln -sf /var/www/skycam/%s Data/AllSkyCurrentImage.fits" % filename)
except Exception, err:
cam.log_err("Oops! Something went wrong...%s" % err)
ndark += 1
if ndark > 10:
ndark = 0
if np.median(imag) > 15000.0:
exp /= 2.0
ndark = 0
if np.median(imag) < 4000.0 and exp < 60.0:
exp *= 2.0
ndark = 0
|
{
"content_hash": "a8add78c64b7423089699051e08edff2",
"timestamp": "",
"source": "github",
"line_count": 104,
"max_line_length": 86,
"avg_line_length": 31.115384615384617,
"alnum_prop": 0.5754017305315204,
"repo_name": "saltastro/skycam",
"id": "f5267613a0e6a122247f359b4ed52db63c15a5ee",
"size": "3259",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "skycam.py",
"mode": "33261",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "114976"
},
{
"name": "Shell",
"bytes": "2210"
},
{
"name": "TeX",
"bytes": "15859"
}
],
"symlink_target": ""
}
|
'''
Test correctness of matvec for various cases.
'''
import dynamite_test_runner as dtr
import numpy as np
import hamiltonians
from dynamite import config
from dynamite.msc_tools import msc_dtype
from dynamite.operators import identity, sigmax, sigmay, index_sum, index_product
from dynamite.subspaces import Full, Parity, Auto, SpinConserve
from dynamite.states import State
from dynamite.tools import complex_enabled
def generate_hamiltonian_tests(cls):
for H_name in hamiltonians.get_names(complex_enabled()):
setattr(cls, 'test_'+H_name, lambda self, n=H_name: self.check_hamiltonian(n))
return cls
class FullSpace(dtr.DynamiteTestCase):
def check_nonzeros(self, state, nonzeros):
'''
Check that a vector has the correct nonzeros.
Parameters
----------
state : dynamite.states.State
The state containing the vector
nonzeros : dict
A dictionary, where the keys are the indices of the nonzero elements
and the values are the nonzero values
'''
# first check that the state's norm is right
correct_norm = sum(np.abs(v)**2 for v in nonzeros.values())
self.assertEqual(state.vec.norm(), correct_norm)
istart, iend = state.vec.getOwnershipRange()
for idx, val in nonzeros.items():
if istart <= idx < iend:
self.assertEqual(state.vec[idx], val, msg = 'idx: %d' % idx)
else:
# we have to do this for MPI
self.assertEqual(0, 0)
def test_identity(self):
s = State(state = 3)
r = identity() * s
correct = {3 : 1}
self.check_nonzeros(r, correct)
def test_spinflip(self):
H = index_product(sigmax())
s = State(state='D'*H.get_length())
r = H * s
correct = {0 : 1}
self.check_nonzeros(r, correct)
@generate_hamiltonian_tests
class FullHamiltonians(dtr.DynamiteTestCase):
def check_hamiltonian(self, H_name):
H = getattr(hamiltonians, H_name)()
bra, ket = H.create_states()
#ket.set_product(0)
ket.set_random(seed = 0)
#ket.vec.set(1)
H.dot(ket, bra)
self.assertLess(1E-3, bra.vec.norm(), msg = 'petsc vec norm incorrect')
ket_np = ket.to_numpy()
bra_check = bra.to_numpy()
if ket_np is not None:
self.assertNotEqual(np.linalg.norm(bra_check), 0, msg = 'numpy vec zero')
H_np = H.to_numpy()
bra_np = H_np.dot(ket_np)
inner_prod = bra_check.dot(bra_np.conj())
if inner_prod != 0:
inner_prod /= np.linalg.norm(bra_check) * np.linalg.norm(bra_np)
bad_idxs = np.where(np.abs(bra_check - bra_np) > 1E-12)[0]
msg = '\n'
for idx in bad_idxs:
msg += 'at {}: correct: {} check: {}\n'.format(idx, bra_np[idx], bra_check[idx])
else:
inner_prod = 1
msg = ''
self.assertLess(np.abs(1 - inner_prod), 1E-9, msg=msg)
@generate_hamiltonian_tests
class Subspaces(dtr.DynamiteTestCase):
def compare_to_full(self, H, x_sub, x_full, check_subspace):
'''
Compare multiplication under the full Hamiltonian to multiplication
in the subspace.
Parameters
----------
H : dynamite.operators.Operator
The operator to multiply.
x : dynamite.states.State
The state to multiply (subspace should be Full)
check_subspace : dynamite.subspace.Subspace
The subspace to multiply under.
'''
extra_conversion = isinstance(check_subspace, SpinConserve)
extra_conversion = extra_conversion and check_subspace.spinflip
# compare all possible combinations of going to and from the full space
self.assertTrue(isinstance(x_full.subspace, Full))
self.assertIs(x_sub.subspace, check_subspace)
to_space = identity()
to_space.allow_projection = True
if extra_conversion:
to_space.add_subspace(SpinConserve(check_subspace.L, check_subspace.k), Full())
else:
to_space.add_subspace(check_subspace, Full())
correct_full = State(subspace=Full())
H.dot(x_full, correct_full)
if extra_conversion:
tmp = State(subspace=to_space.left_subspace)
to_space.dot(correct_full, tmp)
correct_sub = SpinConserve.convert_spinflip(tmp, sign=check_subspace.spinflip)
else:
correct_sub = State(subspace=check_subspace)
to_space.dot(correct_full, correct_sub)
with self.subTest(which='s2s'):
self.check_s2s(H, x_sub, check_subspace, correct_sub)
if not extra_conversion:
H.allow_projection = True
with self.subTest(which='f2s'):
self.check_f2s(H, x_full, check_subspace, correct_sub)
with self.subTest(which='s2f'):
self.check_s2f(H, x_sub, check_subspace, correct_sub)
@classmethod
def generate_random_in_subspace(cls, space):
x_sub = State(subspace=space, state='random', seed=0)
if isinstance(space, SpinConserve) and space.spinflip:
tmp = SpinConserve.convert_spinflip(x_sub)
else:
tmp = x_sub
from_space = identity()
from_space.add_subspace(Full(), tmp.subspace)
x_full = State(subspace=Full())
from_space.dot(tmp, x_full)
return x_sub, x_full
def check_f2s(self, H, x_full, check_subspace, correct):
'''
check multiplication from full to subspace
'''
H.add_subspace(check_subspace, Full())
result = State(subspace=check_subspace)
H.dot(x_full, result)
eps = H.nnz*np.finfo(msc_dtype[2]).eps
self.check_vec_equal(correct, result, eps=eps)
def check_s2f(self, H, x_sub, check_subspace, correct):
'''
check multiplication from subspace to full
'''
H.add_subspace(Full(), check_subspace)
to_space = identity()
to_space.add_subspace(check_subspace, Full())
to_space.allow_projection = True
sub_state = State(subspace=check_subspace)
full_state = State(subspace=Full())
H.dot(x_sub, full_state)
to_space.dot(full_state, sub_state)
eps = H.nnz*np.finfo(msc_dtype[2]).eps
self.check_vec_equal(correct, sub_state, eps=eps)
def check_s2s(self, H, x_sub, check_subspace, correct):
'''
check multiplication from subspace to subspace
'''
H.add_subspace(check_subspace)
result = H.dot(x_sub)
eps = H.nnz*np.finfo(msc_dtype[2]).eps
self.check_vec_equal(correct, result, eps=eps)
def test_parity_XX_even(self):
H = index_sum(sigmax(0)*sigmax(1))
sp = Parity('even')
xs = self.generate_random_in_subspace(sp)
self.compare_to_full(H, *xs, sp)
def test_parity_XX_odd(self):
H = index_sum(sigmax(0)*sigmax(1))
sp = Parity('odd')
xs = self.generate_random_in_subspace(sp)
self.compare_to_full(H, *xs, sp)
def test_parity_YY_even(self):
H = index_sum(sigmay(0)*sigmay(1))
sp = Parity('even')
xs = self.generate_random_in_subspace(sp)
self.compare_to_full(H, *xs, sp)
def test_parity_YY_odd(self):
H = index_sum(sigmay(0)*sigmay(1))
sp = Parity('odd')
xs = self.generate_random_in_subspace(sp)
self.compare_to_full(H, *xs, sp)
def test_spin_conserve_half_filling(self):
H = index_sum(sigmax(0)*sigmax(1) + sigmay(0)*sigmay(1))
for spinflip in ['+', '-', None]:
if spinflip is not None and config.L%2 != 0:
continue
with self.subTest(spinflip=spinflip):
sp = SpinConserve(config.L, config.L//2, spinflip=spinflip)
xs = self.generate_random_in_subspace(sp)
self.compare_to_full(H, *xs, sp)
def test_spin_conserve_third_filling(self):
H = index_sum(sigmax(0)*sigmax(1) + sigmay(0)*sigmay(1))
sp = SpinConserve(config.L, config.L//3)
xs = self.generate_random_in_subspace(sp)
self.compare_to_full(H, *xs, sp)
def check_hamiltonian(self, H_name):
for space in [1, 2]:
for sort in [True, False]:
with self.subTest(space=space):
with self.subTest(sort=sort):
H = getattr(hamiltonians, H_name)()
sp = Auto(H, (1 << (H.L//2))-space, sort=sort)
xs = self.generate_random_in_subspace(sp)
self.compare_to_full(H, *xs, sp)
# TODO: write tests where this is not just the identity
class Projection(dtr.DynamiteTestCase):
def check_projection(self, from_subspace, to_subspace):
s = State(subspace=from_subspace)
s.set_random(seed=0)
r = State(subspace=to_subspace)
project = identity()
project.add_subspace(to_subspace, from_subspace)
project.allow_projection = True
project.dot(s, result=r)
s_np = s.to_numpy()
r_np = r.to_numpy()
from_states = set(from_subspace.idx_to_state(np.arange(from_subspace.get_dimension())))
if s_np is not None:
states = to_subspace.idx_to_state(np.arange(to_subspace.get_dimension()))
for i,state in enumerate(states):
if state not in from_states:
self.assertEqual(r_np[i], 0, msg=i)
else:
self.assertEqual(s_np[from_subspace.state_to_idx(state)], r_np[i])
def test_projections(self):
half_chain = config.L // 2
state = 'U'*half_chain + 'D'*(config.L-half_chain)
full = Full()
even_parity = Parity('even')
odd_parity = Parity('odd')
auto = Auto(hamiltonians.localized(), state)
subspace_list = [full, even_parity, odd_parity, auto]
for from_subspace in subspace_list:
for to_subspace in subspace_list:
with self.subTest(from_s=from_subspace, to_s=to_subspace):
self.check_projection(from_subspace, to_subspace)
if __name__ == '__main__':
dtr.main()
|
{
"content_hash": "1f355d86716e590df93142f90085b542",
"timestamp": "",
"source": "github",
"line_count": 308,
"max_line_length": 97,
"avg_line_length": 33.66558441558441,
"alnum_prop": 0.5835663998456939,
"repo_name": "GregDMeyer/dynamite",
"id": "adafd5ac7276431cdf0382567d7a05068a80afe7",
"size": "10369",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/integration/test_multiply.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "60780"
},
{
"name": "Cuda",
"bytes": "19934"
},
{
"name": "Cython",
"bytes": "17181"
},
{
"name": "Dockerfile",
"bytes": "5179"
},
{
"name": "Makefile",
"bytes": "136"
},
{
"name": "Python",
"bytes": "329639"
}
],
"symlink_target": ""
}
|
import glob
import os.path
import pkgutil
import re
import sys
import tempfile
import zipfile
__version__ = '${python.version}'
PACKAGE_EXTENSIONS = {'.zip', '.egg', '.jar'}
PACKAGE_DEV = re.compile("[.]dev[0-9]*$")
def configure(jars=[], packages=[], files=[], spark_home=None, spark_master='yarn', tmp_path=None):
os.environ['PYSPARK_PYTHON'] = sys.executable
spark_home = process_spark_home(spark_home)
pyspark_dir = os.path.join(spark_home, 'python')
pyspark_lib_dir = os.path.join(pyspark_dir, 'lib')
pyspark_lib_zips = glob.glob(os.path.join(pyspark_lib_dir, '*.zip'))
sys_path_set = {path for path in sys.path}
for pyspark_lib_zip in pyspark_lib_zips:
if pyspark_lib_zip not in sys_path_set and os.path.basename(pyspark_lib_zip) != 'pyspark.zip':
sys.path.insert(1, pyspark_lib_zip)
if pyspark_dir not in sys_path_set:
sys.path.insert(1, pyspark_dir)
py_files = pyspark_lib_zips + process_executor_packages(packages, tmp_path)
assert spark_master is 'yarn', 'only yarn master is supported with this release'
import pyspark
import geomesa_pyspark.types
# Need differential behavior based for <= Spark 2.0.x, Spark 2.1.0
# is the fist release to provide the module __version__ attribute
pyspark_pre21 = getattr(pyspark, '__version__', None) is None
if pyspark_pre21 and len(jars) > 0:
os.environ['PYSPARK_SUBMIT_ARGS'] = ' '.join(['--driver-class-path', ','.join(jars), 'pyspark-shell'])
conf = (
pyspark.SparkConf()
.setMaster(spark_master)
.set('spark.yarn.dist.jars', ','.join(jars))
.set('spark.yarn.dist.files', ','.join(py_files + files))
.setExecutorEnv('PYTHONPATH', ":".join(map(os.path.basename, py_files)))
.setExecutorEnv('PYSPARK_PYTHON', sys.executable)
)
if not pyspark_pre21 and len(jars):
conf.set('spark.driver.extraClassPath', ','.join(jars))
return conf
def process_spark_home(spark_home):
if spark_home is None:
spark_home = os.environ.get('SPARK_HOME', None)
assert spark_home is not None, 'unable to resolve SPARK_HOME'
assert os.path.isdir(spark_home), '%s is not a directory' % spark_home
os.environ['SPARK_HOME'] = spark_home
return spark_home
def process_executor_packages(executor_packages, tmp_path=None):
if tmp_path is None:
version_info = sys.version_info
tmp_path = os.path.join(tempfile.gettempdir(), 'spark-python-%s.%s' % (version_info.major, version_info.minor))
if not os.path.isdir(tmp_path):
os.makedirs(tmp_path)
driver_packages = {module for _, module, package in pkgutil.iter_modules() if package is True}
executor_files = []
for executor_package in executor_packages:
if executor_package not in driver_packages:
raise ImportError('unable to locate ' + executor_package + ' installed in driver')
package = sys.modules.get(executor_package, None)
if package is None:
package = pkgutil.get_loader(executor_package).load_module(executor_package)
package_path = os.path.dirname(package.__file__)
package_root = os.path.dirname(package_path)
if package_root[-4:].lower() in PACKAGE_EXTENSIONS:
executor_files.append(package_root)
elif os.path.isdir(package_root):
package_version = getattr(package, '__version__', getattr(package, 'VERSION', None))
zip_name = "%s.zip" % executor_package if package_version is None\
else "%s-%s.zip" % (executor_package, package_version)
zip_path = os.path.join(tmp_path, zip_name)
if (not os.path.isfile(zip_path)) or ((package_version and PACKAGE_DEV.search(package_version)) is not None):
zip_package(package_path, zip_path)
executor_files.append(zip_path)
return executor_files
def zip_package(package_path, zip_path):
path_offset = len(os.path.dirname(package_path)) + 1
with zipfile.PyZipFile(zip_path, 'w') as writer:
for root, _, files in os.walk(package_path):
for file in files:
full_path = os.path.join(root, file)
archive_path = full_path[path_offset:]
writer.write(full_path, archive_path)
def init_sql(spark):
spark._jvm.org.apache.spark.sql.SQLTypes.init(spark._jwrapped)
|
{
"content_hash": "7444687feab975144d37ece1168a99b0",
"timestamp": "",
"source": "github",
"line_count": 115,
"max_line_length": 121,
"avg_line_length": 38.66086956521739,
"alnum_prop": 0.6396761133603239,
"repo_name": "elahrvivaz/geomesa",
"id": "2ba865adbd3a7dbdb9bf74326da7caa864de4059",
"size": "4446",
"binary": false,
"copies": "5",
"ref": "refs/heads/master",
"path": "geomesa-spark/geomesa_pyspark/src/main/python/geomesa_pyspark/__init__.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "HTML",
"bytes": "2900"
},
{
"name": "Java",
"bytes": "301988"
},
{
"name": "JavaScript",
"bytes": "140"
},
{
"name": "Python",
"bytes": "12067"
},
{
"name": "R",
"bytes": "2716"
},
{
"name": "Scala",
"bytes": "8440279"
},
{
"name": "Scheme",
"bytes": "3143"
},
{
"name": "Shell",
"bytes": "154842"
}
],
"symlink_target": ""
}
|
from .resource import Resource
class LocalNetworkGateway(Resource):
"""A common class for general resource information.
Variables are only populated by the server, and will be ignored when
sending a request.
:param id: Resource ID.
:type id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:param location: Resource location.
:type location: str
:param tags: Resource tags.
:type tags: dict
:param local_network_address_space: Local network site address space.
:type local_network_address_space: :class:`AddressSpace
<azure.mgmt.network.v2017_06_01.models.AddressSpace>`
:param gateway_ip_address: IP address of local network gateway.
:type gateway_ip_address: str
:param bgp_settings: Local network gateway's BGP speaker settings.
:type bgp_settings: :class:`BgpSettings
<azure.mgmt.network.v2017_06_01.models.BgpSettings>`
:param resource_guid: The resource GUID property of the
LocalNetworkGateway resource.
:type resource_guid: str
:ivar provisioning_state: The provisioning state of the
LocalNetworkGateway resource. Possible values are: 'Updating', 'Deleting',
and 'Failed'.
:vartype provisioning_state: str
:param etag: A unique read-only string that changes whenever the resource
is updated.
:type etag: str
"""
_validation = {
'name': {'readonly': True},
'type': {'readonly': True},
'provisioning_state': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'location': {'key': 'location', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'local_network_address_space': {'key': 'properties.localNetworkAddressSpace', 'type': 'AddressSpace'},
'gateway_ip_address': {'key': 'properties.gatewayIpAddress', 'type': 'str'},
'bgp_settings': {'key': 'properties.bgpSettings', 'type': 'BgpSettings'},
'resource_guid': {'key': 'properties.resourceGuid', 'type': 'str'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
}
def __init__(self, id=None, location=None, tags=None, local_network_address_space=None, gateway_ip_address=None, bgp_settings=None, resource_guid=None, etag=None):
super(LocalNetworkGateway, self).__init__(id=id, location=location, tags=tags)
self.local_network_address_space = local_network_address_space
self.gateway_ip_address = gateway_ip_address
self.bgp_settings = bgp_settings
self.resource_guid = resource_guid
self.provisioning_state = None
self.etag = etag
|
{
"content_hash": "03d2f828546bca28900b2ee87bbb835b",
"timestamp": "",
"source": "github",
"line_count": 67,
"max_line_length": 167,
"avg_line_length": 42.492537313432834,
"alnum_prop": 0.644538110291535,
"repo_name": "v-iam/azure-sdk-for-python",
"id": "be374ae921394ea23683b749f0b604495824b806",
"size": "3321",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "azure-mgmt-network/azure/mgmt/network/v2017_06_01/models/local_network_gateway.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "19856874"
}
],
"symlink_target": ""
}
|
from ccxt.base.exchange import Exchange
from ccxt.base.errors import ExchangeError
class _1broker (Exchange):
def describe(self):
return self.deep_extend(super(_1broker, self).describe(), {
'id': '_1broker',
'name': '1Broker',
'countries': 'US',
'rateLimit': 1500,
'version': 'v2',
'hasPublicAPI': False,
'hasCORS': True,
'hasFetchTrades': False,
'hasFetchOHLCV': True,
'timeframes': {
'1m': '60',
'15m': '900',
'1h': '3600',
'1d': '86400',
},
'urls': {
'logo': 'https://user-images.githubusercontent.com/1294454/27766021-420bd9fc-5ecb-11e7-8ed6-56d0081efed2.jpg',
'api': 'https://1broker.com/api',
'www': 'https://1broker.com',
'doc': 'https://1broker.com/?c=en/content/api-documentation',
},
'requiredCredentials': {
'apiKey': True,
'secret': False,
},
'api': {
'private': {
'get': [
'market/bars',
'market/categories',
'market/details',
'market/list',
'market/quotes',
'market/ticks',
'order/cancel',
'order/create',
'order/open',
'position/close',
'position/close_cancel',
'position/edit',
'position/history',
'position/open',
'position/shared/get',
'social/profile_statistics',
'social/profile_trades',
'user/bitcoin_deposit_address',
'user/details',
'user/overview',
'user/quota_status',
'user/transaction_log',
],
},
},
})
def fetch_categories(self):
response = self.privateGetMarketCategories()
# they return an empty string among their categories, wtf?
categories = response['response']
result = []
for i in range(0, len(categories)):
if categories[i]:
result.append(categories[i])
return result
def fetch_markets(self):
self_ = self # workaround for Babel bug(not passing `self` to _recursive() call)
categories = self.fetch_categories()
result = []
for c in range(0, len(categories)):
category = categories[c]
markets = self_.privateGetMarketList({
'category': category.lower(),
})
for p in range(0, len(markets['response'])):
market = markets['response'][p]
id = market['symbol']
symbol = None
base = None
quote = None
if (category == 'FOREX') or (category == 'CRYPTO'):
symbol = market['name']
parts = symbol.split('/')
base = parts[0]
quote = parts[1]
else:
base = id
quote = 'USD'
symbol = base + '/' + quote
base = self_.common_currency_code(base)
quote = self_.common_currency_code(quote)
result.append({
'id': id,
'symbol': symbol,
'base': base,
'quote': quote,
'info': market,
})
return result
def fetch_balance(self, params={}):
self.load_markets()
balance = self.privateGetUserOverview()
response = balance['response']
result = {
'info': response,
}
currencies = list(self.currencies.keys())
for c in range(0, len(currencies)):
currency = currencies[c]
result[currency] = self.account()
total = float(response['balance'])
result['BTC']['free'] = total
result['BTC']['total'] = total
return self.parse_balance(result)
def fetch_order_book(self, symbol, params={}):
self.load_markets()
response = self.privateGetMarketQuotes(self.extend({
'symbols': self.market_id(symbol),
}, params))
orderbook = response['response'][0]
timestamp = self.parse8601(orderbook['updated'])
bidPrice = float(orderbook['bid'])
askPrice = float(orderbook['ask'])
bid = [bidPrice, None]
ask = [askPrice, None]
return {
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'bids': [bid],
'asks': [ask],
}
def fetch_trades(self, symbol):
raise ExchangeError(self.id + ' fetchTrades() method not implemented yet')
def fetch_ticker(self, symbol, params={}):
self.load_markets()
result = self.privateGetMarketBars(self.extend({
'symbol': self.market_id(symbol),
'resolution': 60,
'limit': 1,
}, params))
orderbook = self.fetch_order_book(symbol)
ticker = result['response'][0]
timestamp = self.parse8601(ticker['date'])
return {
'symbol': symbol,
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'high': float(ticker['h']),
'low': float(ticker['l']),
'bid': orderbook['bids'][0][0],
'ask': orderbook['asks'][0][0],
'vwap': None,
'open': float(ticker['o']),
'close': float(ticker['c']),
'first': None,
'last': None,
'change': None,
'percentage': None,
'average': None,
'baseVolume': None,
'quoteVolume': None,
'info': ticker,
}
def parse_ohlcv(self, ohlcv, market=None, timeframe='1m', since=None, limit=None):
return [
self.parse8601(ohlcv['date']),
float(ohlcv['o']),
float(ohlcv['h']),
float(ohlcv['l']),
float(ohlcv['c']),
None,
]
def fetch_ohlcv(self, symbol, timeframe='1m', since=None, limit=None, params={}):
self.load_markets()
market = self.market(symbol)
request = {
'symbol': market['id'],
'resolution': self.timeframes[timeframe],
}
if since:
request['date_start'] = self.iso8601(since) # they also support date_end
if limit:
request['limit'] = limit
result = self.privateGetMarketBars(self.extend(request, params))
return self.parse_ohlcvs(result['response'], market, timeframe, since, limit)
def create_order(self, symbol, type, side, amount, price=None, params={}):
self.load_markets()
order = {
'symbol': self.market_id(symbol),
'margin': amount,
'direction': 'short' if (side == 'sell') else 'long',
'leverage': 1,
'type': side,
}
if type == 'limit':
order['price'] = price
else:
order['type'] += '_market'
result = self.privateGetOrderCreate(self.extend(order, params))
return {
'info': result,
'id': result['response']['order_id'],
}
def cancel_order(self, id, symbol=None, params={}):
self.load_markets()
return self.privatePostOrderCancel({'order_id': id})
def sign(self, path, api='public', method='GET', params={}, headers=None, body=None):
self.check_required_credentials()
url = self.urls['api'] + '/' + self.version + '/' + path + '.php'
query = self.extend({'token': self.apiKey}, params)
url += '?' + self.urlencode(query)
return {'url': url, 'method': method, 'body': body, 'headers': headers}
def request(self, path, api='public', method='GET', params={}, headers=None, body=None):
response = self.fetch2(path, api, method, params, headers, body)
if 'warning' in response:
if response['warning']:
raise ExchangeError(self.id + ' ' + self.json(response))
if 'error' in response:
if response['error']:
raise ExchangeError(self.id + ' ' + self.json(response))
return response
|
{
"content_hash": "39323b903d6d5c2d4ae77de0d24b1ccc",
"timestamp": "",
"source": "github",
"line_count": 239,
"max_line_length": 126,
"avg_line_length": 36.69874476987448,
"alnum_prop": 0.4749743472808118,
"repo_name": "tritoanst/ccxt",
"id": "f1168727a8cd2150b628d9f142b77067a9bbd727",
"size": "8796",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "python/ccxt/_1broker.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "JavaScript",
"bytes": "3955653"
},
{
"name": "PHP",
"bytes": "783191"
},
{
"name": "Python",
"bytes": "680573"
},
{
"name": "Shell",
"bytes": "833"
}
],
"symlink_target": ""
}
|
from django.shortcuts import render_to_response
from django.http import HttpResponseRedirect, HttpResponse
from django.template import RequestContext
from django.utils.translation import ugettext_lazy as _
from instance.models import Host
from dashboard.views import sort_host
from webvirtmgr.server import ConnServer
from libvirt import libvirtError
from webvirtmgr.settings import TIME_JS_REFRESH
def cpuusage(request, host_id):
"""
Return CPU Usage in %
"""
if not request.user.is_authenticated():
return HttpResponseRedirect('/login')
host = Host.objects.get(id=host_id)
try:
conn = ConnServer(host)
except:
conn = None
if conn:
cpu_usage = conn.cpu_get_usage()
return HttpResponse(cpu_usage)
def memusage(request, host_id):
"""
Return Memory Usage in %
"""
if not request.user.is_authenticated():
return HttpResponseRedirect('/login')
host = Host.objects.get(id=host_id)
try:
conn = ConnServer(host)
except:
conn = None
if conn:
mem_usage = conn.memory_get_usage()
return HttpResponse(mem_usage[2])
def overview(request, host_id):
"""
Overview page.
"""
if not request.user.is_authenticated():
return HttpResponseRedirect('/login')
errors = []
time_refresh = TIME_JS_REFRESH
host = Host.objects.get(id=host_id)
all_vm = hostname = arch = cpus = cpu_model = \
type_conn = libvirt_ver = all_mem = \
mem_usage = mem_percent = cpu_usage = None
try:
conn = ConnServer(host)
except libvirtError as e:
conn = None
if not conn:
errors.append(e.message)
else:
have_kvm = conn.hard_accel_node()
if not have_kvm:
msg = _('Your CPU doesn\'t support hardware virtualization')
errors.append(msg)
all_vm = sort_host(conn.vds_get_node())
hostname, arch, cpus, cpu_model, type_conn, libvirt_ver = conn.node_get_info()
all_mem, mem_usage, mem_percent = conn.memory_get_usage()
cpu_usage = conn.cpu_get_usage()
if request.method == 'POST':
vname = request.POST.get('vname', '')
dom = conn.lookupVM(vname)
if 'start' in request.POST:
try:
dom.create()
return HttpResponseRedirect(request.get_full_path())
except libvirtError as msg_error:
errors.append(msg_error.message)
if 'shutdown' in request.POST:
try:
dom.shutdown()
return HttpResponseRedirect(request.get_full_path())
except libvirtError as msg_error:
errors.append(msg_error.message)
if 'destroy' in request.POST:
try:
dom.destroy()
return HttpResponseRedirect(request.get_full_path())
except libvirtError as msg_error:
errors.append(msg_error.message)
if 'suspend' in request.POST:
try:
dom.suspend()
return HttpResponseRedirect(request.get_full_path())
except libvirtError as msg_error:
errors.append(msg_error.message)
if 'resume' in request.POST:
try:
dom.resume()
return HttpResponseRedirect(request.get_full_path())
except libvirtError as msg_error:
errors.append(msg_error.message)
conn.close()
return render_to_response('overview.html', {'host_id': host_id,
'errors': errors,
'time_refresh': time_refresh,
'all_vm': all_vm,
'hostname': hostname,
'arch': arch, 'cpus': cpus, 'cpu_model': cpu_model, 'cpu_usage': cpu_usage,
'type_conn': type_conn, 'libvirt_ver': libvirt_ver,
'all_mem': all_mem, 'mem_usage': mem_usage, 'mem_percent': mem_percent
},
context_instance=RequestContext(request))
|
{
"content_hash": "9b98bba7e441e08e3cef99d9eef4703e",
"timestamp": "",
"source": "github",
"line_count": 127,
"max_line_length": 123,
"avg_line_length": 34.98425196850393,
"alnum_prop": 0.5320729237002025,
"repo_name": "selboo/starl-mangle",
"id": "8a10011a0dcc5a3fd2bb29f4f151237440c3417e",
"size": "4443",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "webvirtmgr/overview/views.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "ApacheConf",
"bytes": "1463"
},
{
"name": "CSS",
"bytes": "197524"
},
{
"name": "HTML",
"bytes": "792119"
},
{
"name": "JavaScript",
"bytes": "517786"
},
{
"name": "PHP",
"bytes": "613053"
},
{
"name": "Python",
"bytes": "312293"
},
{
"name": "Shell",
"bytes": "4409"
}
],
"symlink_target": ""
}
|
from flask import Flask, Markup
from flask import redirect, url_for, request
from flask import session, g, render_template
from pymongo import Connection
# from pymongo import ASCENDING, DESCENDING
from bson.objectid import ObjectId
import markdown
from mcsession import McSessionInterface
app = Flask(__name__)
app.secret_key = '6st3grahy2du'
app.session_interface = McSessionInterface()
app.debug = True
db = Connection().pawiki
@app.before_request
def before_request():
g.mongo = Connection().pawiki
@app.route('/')
def index():
return redirect(url_for('view_page', pagename='FrontPage'))
@app.route('/all/')
def all_pages():
pages = []
cursor = g.mongo.pawiki.find({ }, {'name': 1}).sort('_id', 1)
for i in xrange(cursor.count()):
pages.append(cursor.next()['name'])
return render_template('list.html', pages=pages)
@app.route('/<pagename>')
def view_page(pagename):
g.mongo.pawiki.ensure_index('name', unique=True)
page = g.mongo.pawiki.find_one({'name': pagename})
if not page:
return redirect(url_for('edit_page', pagename=pagename))
session['last'] = pagename
return render_template('view.html', page=page)
@app.route('/<pagename>/edit', methods=['GET', 'POST'])
def edit_page(pagename):
g.mongo.pawiki.ensure_index('name', unique=True)
page = {}
sahtml = []
if request.method == 'POST':
pageid = g.mongo.pawiki.find_one({'name': pagename}, {'_id': 1})
page['name'] = pagename
for k in request.form.keys():
if k == 'salvesta':
continue
if k == 'data':
page['htmldata'] = Markup(markdown.markdown(request.form[k], safe_mode='escape'))
page[k] = request.form[k]
continue
page[k] = request.form[k]
if page['seealso']:
for a in page['seealso'].split(' '):
url = url_for('view_page', pagename=a)
sahtml.append('<a href="%s">%s</a>' % (url, a))
page['sahtml'] = sahtml
if pageid:
page['_id'] = ObjectId(pageid['_id'])
g.mongo.pawiki.save(page)
else:
g.mongo.pawiki.insert(page)
return redirect(url_for('view_page', pagename=page['name']))
page = g.mongo.pawiki.find_one({'name': pagename})
if not page:
page = {}
page['title'] = 'Lehe pealkiri'
page['subtitle'] = 'Lehe alapeakiri'
page['sitesub'] = ''
page['data'] = 'Muuda seda'
page['name'] = pagename
page['seealso'] = 'Seotud lehtede nimed'
session['last'] = pagename
return render_template('edit.html', page=page)
@app.route('/search', methods=['GET', 'POST'])
def search_page():
#return render_template('search.html', data=request.form)
return redirect(url_for('view_page', pagename='FrontPage'))
if __name__ == '__main__':
app.run('0.0.0.0')
|
{
"content_hash": "046cf5eb9458e208f1db2fde07b79cf9",
"timestamp": "",
"source": "github",
"line_count": 97,
"max_line_length": 97,
"avg_line_length": 30.15463917525773,
"alnum_prop": 0.5955555555555555,
"repo_name": "arand/pawiki",
"id": "e225f6daedb93a3ae9122d6de5367d39461da66a",
"size": "3016",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "main.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "CSS",
"bytes": "78341"
},
{
"name": "JavaScript",
"bytes": "17420"
},
{
"name": "Perl",
"bytes": "225"
},
{
"name": "Python",
"bytes": "5445"
}
],
"symlink_target": ""
}
|
from LRR import Hook as HookBase
class MyAmazingDataClass:
def __init__(self):
self.my_truth = False
self.courts_in_english = []
class Hook(HookBase):
def __init__(self):
HookBase.__init__(self, Data=MyAmazingDataClass)
self.opt.jurisdiction = "jp"
def court(self, options, arg):
if options.has_key('en'):
self.data.courts_in_english.append(options['en'])
else:
self.data.courts_in_english.append(arg)
def reporter_start(self, options, dates, arg):
pass
def reporter_end(self, options, arg):
pass
def variation(self, arg):
pass
def export(self):
self.data.courts_in_english.sort()
for court in self.data.courts_in_english:
print court
|
{
"content_hash": "23d21a877b090e94bf97cca79e19c029",
"timestamp": "",
"source": "github",
"line_count": 32,
"max_line_length": 61,
"avg_line_length": 25.21875,
"alnum_prop": 0.5861214374225526,
"repo_name": "fbennett/legal-resource-registry",
"id": "a346ab07a0352690ec34d621db040700f45f6811",
"size": "807",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tools/plugins/testx.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "CSS",
"bytes": "12286"
},
{
"name": "HTML",
"bytes": "33943"
},
{
"name": "JavaScript",
"bytes": "231590"
},
{
"name": "Python",
"bytes": "156110"
},
{
"name": "Shell",
"bytes": "578"
},
{
"name": "TeX",
"bytes": "586"
}
],
"symlink_target": ""
}
|
"""nrvr.diskimage.isoimage - Clone and modify an .iso disk image
The main class provided by this module is IsoImage.
Implemented subclasses of IsoImageModification are
* IsoImageModificationFromString
* IsoImageModificationFromPath
* IsoImageModificationByReplacement
* IsoImageModificationFromByteRange
As implemented works in Linux.
As implemented requires mount, umount, iso-info, iso-read, genisoimage commands.
Nevertheless essential. To be improved as needed.
Idea and first implementation - Leo Baschy <srguiwiz12 AT nrvr DOT com>
Contributor - Nora Baschy
Public repository - https://github.com/srguiwiz/nrvr-commander
Copyright (c) Nirvana Research 2006-2015.
Simplified BSD License"""
import codecs
import os
import os.path
import re
import shutil
from nrvr.process.commandcapture import CommandCapture
from nrvr.util.requirements import SystemRequirements
from nrvr.util.times import Timestamp
class IsoImage(object):
"""An .iso ISO 9660 (or UDF) CD-ROM or DVD-ROM disk image."""
@classmethod
def commandsUsedInImplementation(cls):
"""Return a list to be passed to SystemRequirements.commandsRequired().
This class can be passed to SystemRequirements.commandsRequiredByImplementations()."""
return ["mount", "umount",
"iso-info", "iso-read",
(["genisoimage"], ["mkisofs"])]
def __init__(self, isoImagePath):
"""Create new IsoImage descriptor.
A descriptor can describe an .iso image that does or doesn't yet exist on the host disk."""
# really want abspath and expanduser
self._isoImagePath = os.path.abspath(os.path.expanduser(isoImagePath))
self.mountDir = None
@property
def isoImagePath(self):
"""Path of the .iso image."""
return self._isoImagePath
def exists(self):
"""Return True if .iso image exists on the host disk."""
return os.path.exists(self._isoImagePath)
def remove(self):
"""Remove (delete) .iso image from the host disk."""
os.remove(self._isoImagePath)
def mount(self, mountDir, udf=False):
"""Mount .iso image."""
if not os.path.exists(mountDir):
# was os.makedirs, but that could allow unintended wild creations to go undetected
os.mkdir(mountDir, 0755)
# mount man page says something since Linux 2.6.25 there is auto-destruction of loop devices,
# which makes mounting and unmounting easier for us than it used to be,
# also see https://lkml.org/lkml/2007/10/30/413,
# also see http://marc.info/?l=util-linux-ng&m=119362955431694
if not udf: # iso9660
filesystemType = "iso9660"
else: # udf
filesystemType = "udf"
CommandCapture(["mount", "-o", "loop", "-t", filesystemType, "-r",
self._isoImagePath, mountDir])
# record only in case of success
self.mountDir = mountDir
def unmount(self):
"""Unmount .iso image."""
if self.mountDir:
CommandCapture(["umount", "-d", self.mountDir],
exceptionIfNotZero=False, exceptionIfAnyStderr=False)
def copyToDirectory(self, copyDirectory, udf=False, ignoreJoliet=True, tolerance=0.0):
"""Copy all files into a directory.
Not using mount command, no need to run as root."""
# as of 2013-09-29 given known uses of this package and known bugs of iso-info
# it appears better to default to ignoreJoliet=True
# see https://savannah.gnu.org/bugs/?40130
# see https://savannah.gnu.org/bugs/?40138
#
# really want abspath and expanduser
copyDirectory = os.path.abspath(os.path.expanduser(copyDirectory))
# make sure not merging with pre-existing directory or files
if os.path.exists(copyDirectory):
shutil.rmtree(copyDirectory)
# make directory
os.mkdir(copyDirectory, 0755)
if not udf: # iso9660
# get directories info in a reasonably parsable list
isoInfoLArgs = ["iso-info", "-i", self._isoImagePath, "-l"]
if ignoreJoliet:
isoInfoLArgs.insert(1, "--no-joliet")
isoInfoL = CommandCapture(isoInfoLArgs, copyToStdio=False)
# directories without leading slash and without trailing slash
directories = re.findall(r"(?m)^[ \t]*/(.+?)/?[ \t]*:[ \t]*$", isoInfoL.stdout)
# get files info in a reasonably parsable list
isoInfoFArgs = ["iso-info", "-i", self._isoImagePath, "-f"]
if ignoreJoliet:
isoInfoFArgs.insert(1, "--no-joliet")
isoInfoF = CommandCapture(isoInfoFArgs, copyToStdio=False)
# files without leading slash and without trailing slash
files = re.findall(r"(?m)^[ \t]*[0-9]*[ \t]+/(.+?)/?[ \t]*$", isoInfoF.stdout)
else: # udf
# apparently since libcdio 0.93 if using iso-info option -i then -U doesn't work right
# apparently since libcdio 0.92 not using iso-info option -i works right for -U and -l
# even though man iso-info says to use -i
#
# get directories and files info in a reasonably parsable list
isoInfoUArgs = ["iso-info", self._isoImagePath, "-U"]
if ignoreJoliet:
isoInfoUArgs.insert(1, "--no-joliet")
isoInfoU = CommandCapture(isoInfoUArgs, copyToStdio=False)
# list below excluded line 123456 /.
isoInfoUList = re.search(r"(?s)[ \t]*[0-9]+[ \t]+/\.\s*\n(.*)", isoInfoU.stdout).group(1)
# directories without leading slash and without trailing slash
directories = re.findall(r"(?m)^[ \t]*[0-9]+[ \t]+/(.+?)/\.[ \t]*$", isoInfoUList)
# files without leading slash and without trailing slash dot
files = re.findall(r"(?m)^[ \t]*[0-9]+[ \t]+/(.*?)(?:/\.)?[ \t]*$", isoInfoUList)
# sorting matters to allow building a tree of directories
directories = sorted(directories)
# make directories
for relativePathOnIso in directories:
pathOnHost = os.path.join(copyDirectory, relativePathOnIso)
os.mkdir(pathOnHost, 0755)
# tolerate some defects in iso-read
readAttemptCount = 0
readSuccessCount = 0
# copy files
for relativePathOnIso in files:
if relativePathOnIso in directories:
# directories exist already, nothing to do
continue
pathOnHost = os.path.join(copyDirectory, relativePathOnIso)
# copy file
try:
readAttemptCount += 1
isoReadArgs = ["iso-read",
"-i", self._isoImagePath,
"-e", relativePathOnIso,
"-o", pathOnHost];
if udf: # udf
isoReadArgs.append("-U")
CommandCapture(isoReadArgs, copyToStdio=False)
readSuccessCount += 1
except Exception as ex:
print ex
# check tolerance
readFailureCount = readAttemptCount - readSuccessCount
if readFailureCount > readAttemptCount * tolerance:
raise Exception("too many ({0} of {1}) failures reading {2}".format(readFailureCount, readAttemptCount, self._isoImagePath))
elif readFailureCount:
print "continuing despite some ({0} of {1}) failures reading {2}".format(readFailureCount, readAttemptCount, self._isoImagePath)
return copyDirectory
def cloneWithModifications(self, modifications=[], cloneIsoImagePath=None, udf=False, ignoreJoliet=True,
pause=False):
"""Clone with any number of instances of IsoImageModification applied.
A temporary assembly directory in the same directory as cloneIsoImagePath needs disk space,
but it is removed automatically upon completion of cloning.
modifications
a list of IsoImageModification instances.
cloneIsoImagePath
if not given then in same directory with a timestamp in the filename.
return
IsoImage(cloneIsoImagePath)."""
# as of 2013-09-29 given known uses of this package and known bugs of iso-info
# it appears better to default to ignoreJoliet=True
# see https://savannah.gnu.org/bugs/?40130
# see https://savannah.gnu.org/bugs/?40138
#
# timestamp to the microsecond should be good enough
timestamp = Timestamp.microsecondTimestamp()
# ensure there is a cloneIsoImagePath
if not cloneIsoImagePath:
# insert timestamp before extension
isoImagePathSplitext = os.path.splitext(self._isoImagePath)
cloneIsoImagePath = isoImagePathSplitext[0] + "." + timestamp + isoImagePathSplitext[1]
if os.path.exists(cloneIsoImagePath):
raise Exception("won't overwrite already existing {0}".format(cloneIsoImagePath))
temporaryAssemblyDirectory = cloneIsoImagePath + ".tmpdir"
#os.mkdir(temporaryAssemblyDirectory, 0755)
try:
# copy files from original .iso image
print "copying files from {0}, this may take a few minutes".format(self._isoImagePath)
self.copyToDirectory(temporaryAssemblyDirectory, udf=udf, ignoreJoliet=ignoreJoliet)
# give a chance to look
if pause:
raw_input("you requested to pause before applying modifications, press Enter to continue:")
# apply modifications
print "applying modifications into {0}".format(temporaryAssemblyDirectory)
for modification in modifications:
modification.writeIntoAssembly(temporaryAssemblyDirectory)
# give a chance to look
if pause:
raw_input("you requested to pause after applying modifications, press Enter to continue:")
# make new .iso image file
print "making new {0}, this may take a few minutes".format(cloneIsoImagePath)
if SystemRequirements.which("genisoimage"):
# preferred choice
makeIsoImageCommandName = "genisoimage"
elif SystemRequirements.which("mkisofs"):
# acceptable choice
makeIsoImageCommandName = "mkisofs"
else:
# preferred choice for error message
makeIsoImageCommandName = "genisoimage"
genisoimageOptions = self.genisoimageOptions(label=timestamp, udf=udf, ignoreJoliet=ignoreJoliet)
CommandCapture([makeIsoImageCommandName] +
genisoimageOptions +
["-o", cloneIsoImagePath,
temporaryAssemblyDirectory],
copyToStdio=False,
exceptionIfAnyStderr=False)
finally:
# remove in a specific, hopefully most resilient order
shutil.rmtree(temporaryAssemblyDirectory, ignore_errors=True)
return IsoImage(cloneIsoImagePath)
def cloneWithModificationsUsingMount(self, modifications=[], cloneIsoImagePath=None, udf=False, ignoreJoliet=True,
pause=False):
"""Clone with any number of instances of IsoImageModification applied.
This is an older implementation which regrettably because of the mount command requires
having superuser privileges.
It is still here in case a newer implementation doesn't work right, which could be for any
of a number of reasons, for example for symbolic links.
A temporary assembly directory in the same directory as cloneIsoImagePath needs disk space,
but it is removed automatically upon completion of cloning.
modifications
a list of IsoImageModification instances.
cloneIsoImagePath
if not given then in same directory with a timestamp in the filename.
return
IsoImage(cloneIsoImagePath)."""
# timestamp to the microsecond should be good enough
timestamp = Timestamp.microsecondTimestamp()
# ensure there is a cloneIsoImagePath
if not cloneIsoImagePath:
# insert timestamp before extension
isoImagePathSplitext = os.path.splitext(self._isoImagePath)
cloneIsoImagePath = isoImagePathSplitext[0] + "." + timestamp + isoImagePathSplitext[1]
if os.path.exists(cloneIsoImagePath):
raise Exception("won't overwrite already existing {0}".format(cloneIsoImagePath))
temporaryMountDirectory = cloneIsoImagePath + ".mnt"
temporaryAssemblyDirectory = cloneIsoImagePath + ".tmpdir"
os.mkdir(temporaryMountDirectory, 0755)
#os.mkdir(temporaryAssemblyDirectory, 0755)
try:
# mount
self.mount(temporaryMountDirectory, udf=udf)
# copy files from original .iso image
print "copying files from {0}, this may take a few minutes".format(self._isoImagePath)
shutil.copytree(temporaryMountDirectory, temporaryAssemblyDirectory, symlinks=True)
# give a chance to look
if pause:
raw_input("you requested to pause before applying modifications, press Enter to continue:")
# apply modifications
print "applying modifications into {0}".format(temporaryAssemblyDirectory)
for modification in modifications:
modification.writeIntoAssembly(temporaryAssemblyDirectory)
# give a chance to look
if pause:
raw_input("you requested to pause after applying modifications, press Enter to continue:")
# make new .iso image file
print "making new {0}, this may take a few minutes".format(cloneIsoImagePath)
genisoimageOptions = self.genisoimageOptions(label=timestamp, udf=udf, ignoreJoliet=ignoreJoliet)
CommandCapture(["genisoimage"] +
genisoimageOptions +
["-o", cloneIsoImagePath,
temporaryAssemblyDirectory],
copyToStdio=False,
exceptionIfAnyStderr=False)
finally:
# remove in a specific, hopefully most resilient order
self.unmount()
shutil.rmtree(temporaryAssemblyDirectory, ignore_errors=True)
os.rmdir(temporaryMountDirectory)
return IsoImage(cloneIsoImagePath)
def genisoimageOptions(self, label=None, udf=False, ignoreJoliet=True):
"""Auxiliary method, called by cloneWithModifications.
Can be overridden by subclass methods genisoimageOptions,
which may want to extend the returned list.
Could be improved in the future.
Could recognize content of .iso image.
Could select different options depending on content of .iso image.
Maybe could use iso-info -d 9 -i self.isoImagePath.
Could be overridden for a subclass."""
# this implementation has been made to be a workable basis for most uses
if not label:
label = Timestamp.microsecondTimestamp()
genisoimageOptions = []
if udf: # udf
genisoimageOptions.append("-udf")
if not ignoreJoliet:
# broader compatibility of filenames and metadata
genisoimageOptions.append("-J")
genisoimageOptions.extend([
# broader compatibility of filenames and metadata
"-r", "-T",
"-f",
#
# possibly needed labeling,
# volume id, volume name or label, max 32 characters
"-V", label[-32:]
])
return genisoimageOptions
class IsoImageModification(object):
"""A modification to an .iso image."""
def __init__(self, pathOnIso):
self.pathOnIso = pathOnIso
def pathInTemporaryAssemblyDirectory(self, temporaryAssemblyDirectory):
"""Auxiliary method, called by subclass method writeIntoAssembly."""
# remove any leading slash in order to make it relative
relativePathOnIso = re.sub(r"^/*(.*?)$", r"\g<1>", self.pathOnIso)
return os.path.abspath(os.path.join(temporaryAssemblyDirectory, relativePathOnIso))
def writeIntoAssembly(self, temporaryAssemblyDirectory):
"""To be implemented in subclasses."""
raise NotImplementedError("Method writeIntoAssembly to be implemented in subclasses of IsoImageModification.")
class IsoImageModificationFromString(IsoImageModification):
"""A modification to an .iso image, copy from string into file."""
def __init__(self, pathOnIso, string, encoding="utf-8"):
super(IsoImageModificationFromString, self).__init__(pathOnIso)
self.string = string
self.encoding = encoding
def writeIntoAssembly(self, temporaryAssemblyDirectory):
pathInTemporaryAssemblyDirectory = self.pathInTemporaryAssemblyDirectory(temporaryAssemblyDirectory)
# remove pre-existing file, if any
if os.path.exists(pathInTemporaryAssemblyDirectory):
os.remove(pathInTemporaryAssemblyDirectory)
# if necessary make directory
directoryInTemporaryAssemblyDirectory = os.path.dirname(pathInTemporaryAssemblyDirectory)
if not os.path.exists(directoryInTemporaryAssemblyDirectory):
os.makedirs(directoryInTemporaryAssemblyDirectory)
# write
with codecs.open(pathInTemporaryAssemblyDirectory, "w", encoding=self.encoding) as temporaryFile:
temporaryFile.write(self.string)
class IsoImageModificationFromPath(IsoImageModification):
"""A modification to an .iso image, copy from path into file or into directory."""
def __init__(self, pathOnIso, pathOnHost):
super(IsoImageModificationFromPath, self).__init__(pathOnIso)
self.pathOnHost = pathOnHost
def writeIntoAssembly(self, temporaryAssemblyDirectory):
pathInTemporaryAssemblyDirectory = self.pathInTemporaryAssemblyDirectory(temporaryAssemblyDirectory)
if not os.path.isdir(self.pathOnHost):
# if not a directory then
# remove pre-existing file, if any
if os.path.exists(pathInTemporaryAssemblyDirectory):
os.remove(pathInTemporaryAssemblyDirectory)
# copy
shutil.copy2(self.pathOnHost, pathInTemporaryAssemblyDirectory)
else:
# if a directory then
# remove pre-existing directory, if any
if os.path.exists(pathInTemporaryAssemblyDirectory):
shutil.rmtree(pathInTemporaryAssemblyDirectory)
# copy
shutil.copytree(self.pathOnHost, pathInTemporaryAssemblyDirectory, symlinks=True)
class IsoImageModificationByReplacement(IsoImageModification):
# raw string in addition to triple-quoted string because of backslashes \
r"""A modification to an .iso image, replace within file.
Treats whole file as one string to match.
To match a newline a regular expression may use "(\r?\n)",
which nicely allows in the replacement to place appropriate newlines
by backreference, e.g. by "\g<1>"."""
def __init__(self, pathOnIso, regularExpression, replacement, encoding="utf-8"):
super(IsoImageModificationByReplacement, self).__init__(pathOnIso)
self.regularExpression = regularExpression
self.replacement = replacement
self.encoding = encoding
def writeIntoAssembly(self, temporaryAssemblyDirectory):
pathInTemporaryAssemblyDirectory = self.pathInTemporaryAssemblyDirectory(temporaryAssemblyDirectory)
# read pre-existing file
with codecs.open(pathInTemporaryAssemblyDirectory, "r", encoding=self.encoding) as inputFile:
fileContent = inputFile.read()
fileContent = self.regularExpression.sub(self.replacement, fileContent)
# overwrite
with codecs.open(pathInTemporaryAssemblyDirectory, "w", encoding=self.encoding) as outputFile:
outputFile.write(fileContent)
class IsoImageModificationFromByteRange(IsoImageModification):
"""A modification to an .iso image, copy from byte range from file into a file by itself."""
def __init__(self, pathOnIso, pathOnHost, start, stop):
super(IsoImageModificationFromByteRange, self).__init__(pathOnIso)
self.pathOnHost = pathOnHost
self.start = start
self.stop = stop
def writeIntoAssembly(self, temporaryAssemblyDirectory):
pathInTemporaryAssemblyDirectory = self.pathInTemporaryAssemblyDirectory(temporaryAssemblyDirectory)
if self.start:
start = self.start
else:
start = 0
if self.stop:
stop = self.stop
else:
stop = os.path.getsize(self.pathOnHost)
# remove pre-existing file, if any
if os.path.exists(pathInTemporaryAssemblyDirectory):
os.remove(pathInTemporaryAssemblyDirectory)
# copy
with open(self.pathOnHost, "rb") as inputFile:
inputFile.seek(start)
with open(pathInTemporaryAssemblyDirectory, "wb") as outputFile:
current = start
while current < stop:
remainder = stop - current
chunk = min(remainder, 10240)
bytes = inputFile.read(chunk)
outputFile.write(bytes)
current += chunk
if __name__ == "__main__":
from nrvr.util.requirements import SystemRequirements
SystemRequirements.commandsRequiredByImplementations([IsoImage], verbose=True)
#
import tempfile
_testDir = os.path.join(tempfile.gettempdir(), Timestamp.microsecondTimestamp())
os.mkdir(_testDir, 0755)
try:
_originalDir = os.path.join(_testDir, "cd")
os.mkdir(_originalDir, 0755)
with open(os.path.join(_originalDir, "cheese.txt"), "w") as outputFile:
outputFile.write("please")
os.mkdir(os.path.join(_originalDir, "empty"))
os.mkdir(os.path.join(_originalDir, "something"))
with open(os.path.join(_originalDir, u"something/\xf6sterreichischer K\xe4se.txt"), "w") as outputFile:
outputFile.write("stinkt, aber gesund")
os.mkdir(os.path.join(_originalDir, "tree"))
with open(os.path.join(_originalDir, "tree/leaf.txt"), "w") as outputFile:
outputFile.write("green")
os.mkdir(os.path.join(_originalDir, "tree/branch"))
with open(os.path.join(_originalDir, "tree/branch/fruit.txt"), "w") as outputFile:
outputFile.write("yummy")
os.mkdir(os.path.join(_originalDir, "tree/branch/another one"))
_isoImageFile = os.path.join(_testDir, "cd.iso")
CommandCapture(["genisoimage",
"-r", "-J", "-T",
"-o", _isoImageFile,
_originalDir],
copyToStdio=False,
exceptionIfAnyStderr=False)
_isoImage = IsoImage(_isoImageFile)
_copyDir = os.path.join(_testDir, "cd2")
_isoImage.copyToDirectory(_copyDir)
finally:
shutil.rmtree(_testDir)
|
{
"content_hash": "6397fa7d3363b6536a245c4aa547107f",
"timestamp": "",
"source": "github",
"line_count": 477,
"max_line_length": 140,
"avg_line_length": 49.339622641509436,
"alnum_prop": 0.6383683875079669,
"repo_name": "srguiwiz/nrvr-commander",
"id": "be4f7b19f38fa02af27e30f103eb37c088f42863",
"size": "23554",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/nrvr/diskimage/isoimage.py",
"mode": "33261",
"license": "bsd-2-clause",
"language": [
{
"name": "Python",
"bytes": "532906"
}
],
"symlink_target": ""
}
|
"""
Created on Mon Apr 1 18:19:38 2013
@author: matz
"""
import sys
import cvtype
import datatype
import document
import generator
import package
import test
# abbreviations
DT = test.Default()
# mean1DWrapper
dcl = document.Document()
dclIncludes = ["<opencv2/core/core.hpp>"]
dcl.text(
"""
double mean(const cv::Mat & input);
""")
dtnIncludes = ["<opencv2/core/core.hpp>"]
dtn = document.Document()
dtn.text(
"""
double mean(const cv::Mat & input)
{
cv::Scalar value = cv::mean(input);
return value[0];
}
""")
mean1DWrapper = package.Function(dcl, dclIncludes, dtn, dtnIncludes)
# merge2Wrapper
dcl = document.Document()
dclIncludes = ["<opencv2/core/core.hpp>"]
dcl.text(
"""
void merge(const cv::Mat & input1, const cv::Mat & input2, cv::Mat & dst);
""")
dtnIncludes = ["<opencv2/core/core.hpp>"]
dtn = document.Document()
dtn.text(
"""
void merge(const cv::Mat & input1, const cv::Mat & input2, cv::Mat & dst)
{
std::vector<cv::Mat> mv(2);
mv[0] = input1;
mv[1] = input2;
dst = dst.reshape(2);
cv::merge(mv, dst);
}
""")
merge2Wrapper = package.Function(dcl, dclIncludes, dtn, dtnIncludes)
# sum1DWrapper
dcl = document.Document()
dclIncludes = ["<opencv2/core/core.hpp>"]
dcl.text(
"""
double sum(const cv::Mat & input);
""")
dtnIncludes = ["<opencv2/core/core.hpp>"]
dtn = document.Document()
dtn.text(
"""
double sum(const cv::Mat & input)
{
cv::Scalar value = cv::sum(input);
return value[0];
}
""")
sum1DWrapper = package.Function(dcl, dclIncludes, dtn, dtnIncludes)
# initializations
initInCopy = document.Document((
"{1}->initializeImage({0}->width(), {0}->height(), {0}->stride(), "
"{1}->data(), {0}->pixelType());").format("src1CastedData", "dstCastedData"
))
initOutCopy = document.Document((
"{1}->initializeImage({1}->width(), {1}->height(), {1}->stride(), "
"{1}->data(), {0}->pixelType());").format("src1CastedData", "dstCastedData"
))
initInDdepth = document.Document((
"runtime::Image::PixelType pixelType = cvsupport::computeOutPixelType("
"convertDdepth(m_ddepth), src1CastedData->pixelType());\n"
"unsigned int stride = runtime::Image::pixelSize(pixelType) * "
"src1CastedData->width();\n"
"{1}->initializeImage({0}->width(), {0}->height(), stride, "
"{1}->data(), pixelType);").format("src1CastedData", "dstCastedData"
))
initOutDdepth = document.Document((
"runtime::Image::PixelType pixelType = cvsupport::computeOutPixelType("
"convertDdepth(m_ddepth), src1CastedData->pixelType());\n"
"unsigned int stride = runtime::Image::pixelSize(pixelType) * "
"src1CastedData->width();\n"
"{1}->initializeImage({1}->width(), {1}->height(), stride, "
"{1}->data(), pixelType);").format("src1CastedData", "dstCastedData"
))
pixelTypeCheck = document.Document(
"""
if((src1CastedData->rows() != src2CastedData->rows()) || (src1CastedData->cols() != src2CastedData->cols()))
throw runtime::InputError(INPUT_SRC_1, *this, "Input images must have the same size.");
if(src1CastedData->numChannels() != src2CastedData->numChannels())
throw runtime::InputError(INPUT_SRC_1, *this, "Input images must have the same number of channels.");
if(src1CastedData->depth() != src2CastedData->depth())
throw runtime::InputError(INPUT_SRC_1, *this, "Input images must have the same depth if the destination depth is not explicitely given.");
""")
pixelTypeDdepthCheck = document.Document(
"""
if((src1CastedData->rows() != src2CastedData->rows()) || (src1CastedData->cols() != src2CastedData->cols()))
throw runtime::InputError(INPUT_SRC_1, *this, "Input images must have the same size.");
if(src1CastedData->numChannels() != src2CastedData->numChannels())
throw runtime::InputError(INPUT_SRC_1, *this, "Input images must have the same number of channels.");
if(m_ddepth == SAME && (src1CastedData->depth() != src2CastedData->depth()))
throw runtime::InputError(INPUT_SRC_1, *this, "Input images must have the same depth if the destination depth is not explicitely given.");
""")
# arguments
srcMatrix = package.Argument(
"src", "Source", cvtype.Mat(),
datatype.Matrix("runtime::Variant::MATRIX")
)
srcImg1 = package.Argument(
"src1", "Source 1", cvtype.Mat(), datatype.Image()
)
srcImg2 = package.Argument(
"src2", "Source 2", cvtype.Mat(), datatype.Image()
)
dstImgDdepth = package.Argument(
"dst", "Destination", cvtype.Mat(), datatype.Image(), initIn = initInDdepth,
initOut = initOutDdepth
)
dstImg = package.Argument(
"dst", "Destination", cvtype.Mat(), datatype.Image(), initIn = initInCopy,
initOut = initOutCopy
)
descriptions = [
package.EnumDescription("SAME", "Same as inputs", -1),
package.EnumDescription("DEPTH_8_BIT", "8-bit", "CV_8U"),
package.EnumDescription("DEPTH_16_BIT", "16-bit", "CV_16U")
]
ddepth = package.EnumParameter(
"ddepth", "Destination depth", descriptions = descriptions,
default = 0
)
noArray = package.Constant("cv::noArray()")
alpha = package.NumericParameter(
"alpha", "Alpha", cvtype.Float64(), datatype.Float64(), default = 1.0
)
beta = package.NumericParameter(
"beta", "Beta", cvtype.Float64(), datatype.Float64(), default = 1.0
)
gamma = package.NumericParameter(
"gamma", "Gamma", cvtype.Float64(), datatype.Float64(), default = 1.0
)
scalar1D = package.Argument(
"value", "Value", cvtype.Float64(), datatype.Float64()
)
# test data
lenna = test.ImageFile("lenna.jpg")
barbara = test.ImageFile("barbara.jpg")
lenna_16bit = test.ImageFile("lenna.jpg", deepColor = True)
barbara_16bit = test.ImageFile("barbara.jpg", deepColor = True)
barbara = test.ImageFile("barbara.jpg")
lenna_bw = test.ImageFile("lenna.jpg", grayscale = True)
barbara_bw = test.ImageFile("barbara.jpg", grayscale = True)
memory = test.ImageBuffer(5000000)
lenna_32f = test.MatrixFile("lenna_32f.npy")
row_32f = test.MatrixFile("row_32f.npy")
row_64f = test.MatrixFile("row_64f.npy")
column_32f = test.MatrixFile("column_32f.npy")
column_64f = test.MatrixFile("column_64f.npy")
# absdiff
manual = package.Option(
"manual", "Manual",
[package.Input(srcImg1), package.Input(srcImg2), package.Output(dstImg)],
inputCheck = pixelTypeCheck,
tests = [
[lenna, barbara, memory],
[lenna_bw, barbara_bw, memory]
]
)
allocate = package.Option(
"allocate", "Allocate",
[package.Input(srcImg1), package.Input(srcImg2), package.Allocation(dstImg)],
inputCheck = pixelTypeCheck,
tests = [
[lenna_16bit, barbara_16bit, DT]
]
)
absdiff = package.Method(
"absdiff", options = [manual, allocate]
)
# add
manual = package.Option(
"manual", "Manual",
[package.Input(srcImg1), package.Input(srcImg2), package.Output(dstImgDdepth),
noArray, ddepth],
inputCheck = pixelTypeDdepthCheck,
tests = [
[lenna, barbara, memory, DT, DT],
[lenna_bw, barbara_bw, memory, DT, DT],
[lenna_16bit, barbara, memory, DT, 1]
]
)
allocate = package.Option(
"allocate", "Allocate",
[package.Input(srcImg1), package.Input(srcImg2),
package.Allocation(dstImgDdepth), noArray, ddepth],
inputCheck = pixelTypeDdepthCheck,
tests = [
[lenna_16bit, barbara_16bit, DT, DT, DT],
[lenna_16bit, barbara, DT, DT, 2]
]
)
add = package.Method(
"add", options = [manual, allocate]
)
# addWeighted
manual = package.Option(
"manual", "Manual",
[package.Input(srcImg1), alpha, package.Input(srcImg2),
beta, gamma, package.Output(dstImgDdepth), ddepth],
inputCheck = pixelTypeDdepthCheck,
tests = [
[lenna, DT, barbara, DT, DT, memory, DT],
[lenna_bw, 2.0, barbara_bw, 0.5, 3.0, memory, DT],
[lenna_16bit, 1.0, barbara, 0.5, -10, memory, 1]
]
)
allocate = package.Option(
"allocate", "Allocate",
[package.Input(srcImg1), alpha, package.Input(srcImg2),
beta, gamma, package.Allocation(dstImgDdepth), ddepth],
inputCheck = pixelTypeDdepthCheck,
tests = [
[lenna_16bit, -1.0, barbara_16bit, 10.0, 2.0, DT, DT],
[lenna_16bit, -10.0, barbara, 2.0, 0.0, DT, 2]
]
)
addWeighted = package.Method(
"addWeighted", options = [manual, allocate]
)
# bitwise_and
manual = package.Option(
"manual", "Manual",
[package.Input(srcImg1), package.Input(srcImg2), package.Output(dstImg)],
inputCheck = pixelTypeCheck,
tests = [
[lenna, barbara, memory],
[lenna_bw, barbara_bw, memory]
]
)
allocate = package.Option(
"allocate", "Allocate",
[package.Input(srcImg1), package.Input(srcImg2), package.Allocation(dstImg)],
inputCheck = pixelTypeCheck,
tests = [
[lenna_16bit, barbara_16bit, DT]
]
)
bitwise_and = package.Method(
"bitwise_and", options = [manual, allocate]
)
# bitwise_not
manual = package.Option(
"manual", "Manual",
[package.Input(srcImg1), package.Output(dstImg)],
tests = [
[lenna, memory],
[lenna_bw, memory]
]
)
allocate = package.Option(
"allocate", "Allocate",
[package.Input(srcImg1), package.Allocation(dstImg)],
tests = [
[lenna_16bit, DT]
]
)
inPlace = package.Option(
"inPlace", "In place",
[package.InputOutput(srcImg1), package.RefInput(dstImg, srcImg1)],
tests = [
[lenna_16bit, DT]
]
)
bitwise_not = package.Method(
"bitwise_not", options = [manual, allocate, inPlace]
)
# bitwise_or
manual = package.Option(
"manual", "Manual",
[package.Input(srcImg1), package.Input(srcImg2), package.Output(dstImg)],
inputCheck = pixelTypeCheck,
tests = [
[lenna, barbara, memory],
[lenna_bw, barbara_bw, memory]
]
)
allocate = package.Option(
"allocate", "Allocate",
[package.Input(srcImg1), package.Input(srcImg2), package.Allocation(dstImg)],
inputCheck = pixelTypeCheck,
tests = [
[lenna_16bit, barbara_16bit, DT]
]
)
bitwise_or = package.Method(
"bitwise_or", options = [manual, allocate]
)
# bitwise_xor
manual = package.Option(
"manual", "Manual",
[package.Input(srcImg1), package.Input(srcImg2), package.Output(dstImg)],
inputCheck = pixelTypeCheck,
tests = [
[lenna, barbara, memory],
[lenna_bw, barbara_bw, memory]
]
)
allocate = package.Option(
"allocate", "Allocate",
[package.Input(srcImg1), package.Input(srcImg2), package.Allocation(dstImg)],
inputCheck = pixelTypeCheck,
tests = [
[lenna_16bit, barbara_16bit, DT]
]
)
bitwise_xor = package.Method(
"bitwise_xor", options = [manual, allocate]
)
# mean
allocate = package.Option(
"allocate", "Allocate",
[package.Input(srcMatrix), package.ReturnValue(scalar1D)],
tests = [
[lenna_bw, DT],
[lenna_32f, DT]
]
)
mean = package.Method(
"mean", namespace = "", options = [allocate]
)
# merge
initMatrixInCopy = document.Document((
"{1}->initializeMatrix({0}->rows(), 2*{0}->cols(), 2*{0}->cols()*{0}->valueSize(), "
"{1}->data(), {0}->valueType());").format("src1CastedData", "dstCastedData"
))
initMatrixOutCopy = document.Document((
"{1}->initializeMatrix({1}->rows(), {1}->cols(), {1}->stride(), "
"{1}->data(), {0}->valueType());").format("src1CastedData", "dstCastedData"
))
valueTypeCheck = document.Document(
"""
if((src1CastedData->rows() != src2CastedData->rows()) || (src1CastedData->cols() != src2CastedData->cols()))
throw runtime::InputError(INPUT_SRC_1, *this, "Input matrices must have the same size.");
if(src1CastedData->type() != src2CastedData->type())
throw runtime::InputError(INPUT_SRC_1, *this, "Input matrices must have the same types.");
""")
srcMatrix1 = package.Argument(
"src1", "Source 1", cvtype.Mat(),
datatype.Matrix("runtime::Variant::MATRIX")
)
srcMatrix2 = package.Argument(
"src2", "Source 2", cvtype.Mat(),
datatype.Matrix("runtime::Variant::MATRIX")
)
dstMatrix = package.Argument(
"dst", "Destination", cvtype.Mat(), datatype.Matrix(),
initIn = initMatrixInCopy, initOut = initMatrixOutCopy
)
manual = package.Option(
"manual", "Manual",
[package.Input(srcMatrix1), package.Input(srcMatrix2),
package.Output(dstMatrix)],
inputCheck = valueTypeCheck,
tests = [
[column_32f, column_32f, memory],
[row_64f, row_64f, memory]
]
)
allocate = package.Option(
"allocate", "Allocate",
[package.Input(srcMatrix1), package.Input(srcMatrix2),
package.Allocation(dstMatrix)],
inputCheck = valueTypeCheck,
tests = [
[column_64f, column_64f, DT],
[row_32f, row_32f, DT]
]
)
merge = package.Method(
"merge", namespace = "", options = [manual, allocate]
)
# sum
allocate = package.Option(
"allocate", "Allocate",
[package.Input(srcMatrix), package.ReturnValue(scalar1D)],
tests = [
[lenna_bw, DT],
[lenna_32f, DT]
]
)
sumFunction = package.Method(
"sum", namespace = "", options = [allocate]
)
core = package.Package(
"cvcore", 0, 1, 0,
methods = [
absdiff,
add,
addWeighted,
bitwise_and,
bitwise_not,
bitwise_or,
bitwise_xor,
mean,
merge,
sumFunction
],
functions = [
mean1DWrapper,
merge2Wrapper,
sum1DWrapper
],
testFiles = [
"barbara.jpg",
"lenna.jpg",
"lenna_32f.npy",
"row_32f.npy",
"row_64f.npy",
"column_32f.npy",
"column_64f.npy"
]
)
package = core
if __name__ == '__main__':
if len(sys.argv) > 1:
for arg in sys.argv[1:]:
generator.generateMethodFiles(package, globals()[arg])
else:
generator.generatePackageFiles(package)
|
{
"content_hash": "3f97a1356eb9ead93316ef37276d6220",
"timestamp": "",
"source": "github",
"line_count": 470,
"max_line_length": 142,
"avg_line_length": 29.22340425531915,
"alnum_prop": 0.6398980706224973,
"repo_name": "uboot/stromx-opencv",
"id": "57923618628db4899a26411d9b5bc600ff6e8a03",
"size": "13759",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "opencv/cvcore.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C++",
"bytes": "1306994"
},
{
"name": "CMake",
"bytes": "25149"
},
{
"name": "Python",
"bytes": "211367"
}
],
"symlink_target": ""
}
|
import pandas as pd
import numpy as np
## The path here is the .csv file path in HDFS
pdata = sqlContext.read.format('csv').load("/FileStore/tables/[file name in HDFS]",
index_col="ID", header =True).toPandas()
# cell 2 - Bollean Indexing
pdata.loc[(pdata["Gender"]=="Female") & (pdata["Salary_Account"]=="ICICI Bank") & (pdata["Mobile_Verified"]=="Y"),
["Gender", "Salary_Account", "Mobile_Verified"]]
# cell 3 - apply function, similar to R apply()
def get_missing_data(x):
return sum(x.isnull())
print "find missing data for each column:"
print pdata.apply(get_missing_data, axis = 0)
print "find missing data for each row:"
print pdata.apply(get_missing_data, axis = 1)
# cell 4 - fillna(), updating missing values with the overall mean/mode/median of the column
from scipy.stats import mode
# check the mode
mode(pdata['Gender'])[0][0]
pdata['Gender'].fillna(mode(pdata['Gender'])[0][0], inplace=True)
pdata.apply(get_missing_data, axis=0)
# cell 5 - create Excel style pivot table
# check data type first
pdata.dtypes
# convert Monthly_Income into numerical data
pdata['Monthly_Income'] = pdata['Monthly_Income'].astype(float)
pdata.dtypes
pivot_t = pdata.pivot_table(values=['Monthly_Income'], index=['Gender', 'Mobile_Verified', 'Device_Type'], aggfunc = np.mean)
print pivot_t
# cell 6 - MUltiple Indexing
## I like this, only iterate rows with Monthly_Income as null
for i, r in pdata.loc[pdata['Monthly_Income'].isnull(),:].iterrows():
index_list = tuple([r(['Gender']), r(['Mobile_Verified']), r(['Device_Type'])])
pdata.loc[i, 'Monthly_Income'] = pivot_t.loc[index_list].values[0] # using multiple index to locate data
print pdata.apply(get_missing_data, axis=0)
# cell 7 - cross tab
print pd.crosstab(pdata['Gender'], pdata['Mobile_Verified'], margins=True)
print
def get_percentage(ser):
return ser/float(ser[-1])
print pd.crosstab(pdata['Gender'], pdata['Mobile_Verified'], margins=True).apply(get_percentage, axis=1)
# cell 8 - data merging
people_rate = pd.DataFrame([200, 400], index=['Mobile', 'Web-browser'], columns=['people_rate'])
people_rate
data_merge = pdata.merge(right=people_rate, how='inner', left_on='Device_Type', right_index=True, sort=False)
data_merge.pivot_table(values=['Monthly_Income'], index=['Device_Type', 'people_rate'], aggfunc = len)
# sorting on multiple columns
sorted_data = pdata.sort_values(['Loan-Amount_submitted', 'Interest_Rate'], ascenting=False)
sorted_data[['Loan-Amount_submitted', 'Interest_Rate']].head(10)
|
{
"content_hash": "c6b0a92bcd449d0e00df707c13dc5402",
"timestamp": "",
"source": "github",
"line_count": 76,
"max_line_length": 125,
"avg_line_length": 34.56578947368421,
"alnum_prop": 0.6764370003806623,
"repo_name": "hanhanwu/Hanhan_Data_Science_Practice",
"id": "4976b8ab038b370854bf554bf136ee686c436091",
"size": "2900",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "try_pandas.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "4999670"
},
{
"name": "Java",
"bytes": "3992"
},
{
"name": "Jupyter Notebook",
"bytes": "34043858"
},
{
"name": "Python",
"bytes": "19866"
},
{
"name": "R",
"bytes": "252946"
}
],
"symlink_target": ""
}
|
import pyxb_114.binding.generate
import pyxb_114.utils.domutils
import os.path
from pyxb_114.exceptions_ import *
import unittest
class TestIncludeDD (unittest.TestCase):
def testDefault (self):
schema_path = '%s/../schemas/test-include-ad.xsd' % (os.path.dirname(__file__),)
self.assertRaises(pyxb_114.SchemaValidationError, pyxb_114.binding.generate.GeneratePython, schema_location=schema_path)
if __name__ == '__main__':
unittest.main()
|
{
"content_hash": "4c2a999b114bfff185c30dc569f4eb98",
"timestamp": "",
"source": "github",
"line_count": 17,
"max_line_length": 128,
"avg_line_length": 27.88235294117647,
"alnum_prop": 0.7088607594936709,
"repo_name": "msherry/PyXB-1.1.4",
"id": "c3b12582a94af6356a7fc3131c4e7242eeef1f49",
"size": "474",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/drivers/test-include-ad.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Perl",
"bytes": "6307"
},
{
"name": "Python",
"bytes": "1521054"
},
{
"name": "Shell",
"bytes": "23730"
}
],
"symlink_target": ""
}
|
"""
Copyright 2020 Google LLC.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import tensorflow as tf
from smart_news_query_embeddings.models.bert_keras_model import BertKerasModel
from tensorflow.keras.layers import Dense, Input, Flatten, Dropout, concatenate, BatchNormalization, LeakyReLU
from tensorflow.keras.models import Model
from tensorflow.keras.optimizers import Adam
class TwoTowerModel(BertKerasModel):
def build_model(self):
self.flatten = Flatten(name="flatten")
self.dense1_1 = Dense(self.dense_size, name="dense1_1")
if self.use_batch_norm:
self.bn1 = BatchNormalization(name="bn1")
self.relu1_1 = LeakyReLU(name="relu1_1")
self.dropout1 = Dropout(self.dropout_rate)
self.dense1_2 = Dense(self.dense_size, name="dense1_2")
if self.use_batch_norm:
self.bn2 = BatchNormalization(name="bn2")
self.relu1_2 = LeakyReLU(name="relu1_2")
self.dropout2 = Dropout(self.dropout_rate)
self.dense2_1 = Dense(self.dense_size, name="dense2_1")
self.relu2_1 = LeakyReLU(name="relu2_1")
self.dense2_2 = Dense(self.dense_size, name="dense2_2")
self.relu2_2 = LeakyReLU(name="relu2_2")
self.final_dense = Dense(128, name="final_dense")
self.final_relu = LeakyReLU(name="final_relu")
self.output_layer = Dense(2, activation="sigmoid", name="output_dense")
self.embedding_layers = [
self.bert_layer,
self.flatten,
self.dense1_1,
self.bn1,
self.relu1_1,
self.dropout1,
self.dense1_2,
self.bn2
] if self.use_batch_norm else [
self.bert_layer,
self.flatten,
self.dense1_1,
self.relu1_1,
self.dropout1,
self.dense1_2,
]
def call(self, inputs):
input_ids, input_labels = inputs
out1 = self.bert_layer(input_ids)
out1 = self.flatten(out1)
out1 = self.dense1_1(out1)
if self.use_batch_norm:
out1 = self.bn1(out1)
out1 = self.relu1_1(out1)
out1 = self.dropout1(out1)
out1 = self.dense1_2(out1)
if self.use_batch_norm:
out1 = self.bn2(out1)
out1 = self.relu1_2(out1)
out1 = self.dropout2(out1)
out2 = self.dense2_1(input_labels)
out2 = self.relu2_1(out2)
out2 = self.dense2_2(out2)
out2 = self.relu2_2(out2)
out = concatenate([out1, out2])
out = self.final_dense(out)
out = self.final_relu(out)
out = self.output_layer(out)
return out
|
{
"content_hash": "c91b1c7b8ccb8b63124f8fb459c21c64",
"timestamp": "",
"source": "github",
"line_count": 86,
"max_line_length": 110,
"avg_line_length": 36.56976744186046,
"alnum_prop": 0.6260731319554849,
"repo_name": "googleinterns/smart-news-query-embeddings",
"id": "59d246e0099fd6b25c0917223721c8f82227c361",
"size": "3145",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "smart_news_query_embeddings/models/two_tower_model.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "66800"
},
{
"name": "Shell",
"bytes": "101"
}
],
"symlink_target": ""
}
|
import os, re, requests, random, json
import config
"""
YTS Bot class, the brain of the bot
"""
class YTSBot:
"""
Initialize the bot
"""
def __init__(self):
self.user = None
self.awaiting_choice = {}
"""
Respond to a channel message
params: userid of user who is talking, message that was sent
return: the bot's response, can be None
"""
def respond(self, userid, message):
if self.user in message:
message = message.split(self.user, 1)[1].lower()
# Check if someone wants the bot to find a movie
if 'find ' in message:
title = message.split('find ', 1)[1]
return self.find_movie(userid, title)
# Check if someone is selecting a movie from the given list
if userid in self.awaiting_choice.keys():
if 'no' in message:
del(self.awaiting_choice[userid])
return random.choice(config.negative_responses) + '<@' + userid + '>. I won\'t download any of them.'
matches = re.findall('\d+', message)
if len(matches) > 0:
if 'about' in message:
return 'Note: This is a debug feature...\n' + str(self.awaiting_choice[userid][int(matches[-1]) -1])
else:
return self.select_movie(userid, int(matches[-1]) - 1)
# If the message is nothing of interest, just ignore it
return None
"""
Let the user select a movie
params: userid of user who is talking, selection number of movie
return: a friendly response from the bot
"""
def select_movie(self, userid, selection):
if selection >= len(self.awaiting_choice[userid]):
return 'That movie is not on the list.'
# Grab movie info out of stored dictionary
movie = self.awaiting_choice[userid][selection]
del(self.awaiting_choice[userid])
# Go download the file
url = [torrent['url'] for torrent in movie['torrents'] if torrent['quality'] == config.search_values['quality']][0]
self.__download(url)
return random.choice(config.positive_responses) + '<@' + userid + '>. I\'ll download ' + movie['title'] + ' (' + str(movie['year']) + ')!'
"""
Find a movie for a user
params: userid of user who is talking, movie title (the search term)
return: a response letting the user know which movies were found
"""
def find_movie(self, userid, movie_title):
data = self.__query(movie_title)['data']
if data['movie_count'] == 0:
return 'I couldn\'t find that movie, <@' + userid + '>.'
# Ask which movie to grab
self.awaiting_choice[userid] = data['movies']
outstring = 'Which movie did you mean, <@' + userid + '>?\n'
outstring += '\n'.join(['[' + str(data['movies'].index(movie) + 1) + '] ' + movie['title'] + ' (' + str(movie['year']) + ')\n' + movie['medium_cover_image'] for movie in data['movies']])
return outstring
"""
Grab file from url
params: url of file
return: None
"""
def __download(self, url):
print('Downloading ' + url)
filename = url.split('/')[-1]
result = requests.get(url)
with open(os.path.expanduser(config.download_folder + '/' + filename + '.torrent'), 'wb') as outfile:
outfile.write(result.content)
return
"""
Query YTS API for movies
params: search term
return: JSON dictionary of search results
"""
def __query(self, search_term):
search_values = config.search_values
search_values['query_term'] = search_term
result = requests.get(config.yts_url, params = search_values)
print('Querying ' + result.url)
text = result.text
return json.loads(text[text.find('{'):])
|
{
"content_hash": "acb35cd5498f7fd9c0a584633b67fc7a",
"timestamp": "",
"source": "github",
"line_count": 108,
"max_line_length": 188,
"avg_line_length": 31.48148148148148,
"alnum_prop": 0.66,
"repo_name": "opcecco/yts-slack",
"id": "51b27b945fcb54f14dc16af2768cd61c3de3968f",
"size": "3532",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "ytsbot.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "8513"
}
],
"symlink_target": ""
}
|
"""Handles NSWorkspace notifications from crankd."""
__author__ = 'crc@google.com (Clay Caviness)'
from ApplicationUsage import ApplicationUsage
# from FirefoxPreferenceManager import FirefoxPreferenceManager
class NSWorkspaceHandler(object):
"""Handles NSWorkspace events from crankd. Unusable outside of crankd."""
def __init__(self):
self.au = ApplicationUsage()
# self.fpm = FirefoxPreferenceManager()
def OnApplicationLaunch(self, *args, **kwargs):
"""The main entry point for launches."""
self.au.OnApplicationLaunch(*args, **kwargs)
# self.fpm.OnWillLaunchApplication(*args, **kwargs)
def OnApplicationQuit(self, *args, **kwargs):
"""The main entry point for quits."""
self.au.OnApplicationQuit(*args, **kwargs)
|
{
"content_hash": "5b9bcbd329b8db193063dd72c1358e43",
"timestamp": "",
"source": "github",
"line_count": 25,
"max_line_length": 75,
"avg_line_length": 31.56,
"alnum_prop": 0.6996197718631179,
"repo_name": "google/macops",
"id": "ad2a26e6f3f4c10e29b279d03da18fa813b03c69",
"size": "1426",
"binary": false,
"copies": "7",
"ref": "refs/heads/master",
"path": "crankd/NSWorkspaceHandler.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "1775"
},
{
"name": "Makefile",
"bytes": "19923"
},
{
"name": "Objective-C",
"bytes": "20064"
},
{
"name": "Python",
"bytes": "404292"
},
{
"name": "Ruby",
"bytes": "8289"
},
{
"name": "Shell",
"bytes": "3514"
}
],
"symlink_target": ""
}
|
"""Tests for PrecisionOp."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import tensorflow.python.platform
import numpy as np
import tensorflow as tf
class InTopKTest(tf.test.TestCase):
def _validateInTopK(self, predictions, target, k, expected):
np_ans = np.array(expected)
with self.test_session():
precision = tf.nn.in_top_k(predictions, target, k)
out = precision.eval()
self.assertAllClose(np_ans, out)
self.assertShapeEqual(np_ans, precision)
def testInTop1(self):
predictions = [[0.1, 0.3, 0.2, 0.4], [0.1, 0.2, 0.3, 0.4]]
target = [3, 1]
self._validateInTopK(predictions, target, 1, [True, False])
def testInTop2(self):
predictions = [[0.1, 0.3, 0.2, 0.4], [0.1, 0.2, 0.3, 0.4]]
target = [0, 2]
self._validateInTopK(predictions, target, 2, [False, True])
def testInTop2Tie(self):
# Class 2 and 3 tie for 2nd, so both are considered in top 2.
predictions = [[0.1, 0.3, 0.2, 0.2], [0.1, 0.3, 0.2, 0.2]]
target = [2, 3]
self._validateInTopK(predictions, target, 2, [True, True])
if __name__ == "__main__":
tf.test.main()
|
{
"content_hash": "283b8efc9a41c5c4b45dbb7d407830cc",
"timestamp": "",
"source": "github",
"line_count": 40,
"max_line_length": 65,
"avg_line_length": 29.575,
"alnum_prop": 0.6356720202874049,
"repo_name": "arunhotra/tensorflow",
"id": "c27cfd34f83253aa97e105c0f6477cd47c7ed9cb",
"size": "1183",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "tensorflow/python/kernel_tests/in_topk_op_test.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "127104"
},
{
"name": "C++",
"bytes": "4899429"
},
{
"name": "CSS",
"bytes": "107"
},
{
"name": "HTML",
"bytes": "637241"
},
{
"name": "Java",
"bytes": "44388"
},
{
"name": "JavaScript",
"bytes": "5067"
},
{
"name": "Objective-C",
"bytes": "630"
},
{
"name": "Protocol Buffer",
"bytes": "45213"
},
{
"name": "Python",
"bytes": "2477787"
},
{
"name": "Shell",
"bytes": "1714"
},
{
"name": "TypeScript",
"bytes": "237446"
}
],
"symlink_target": ""
}
|
from rest_framework import permissions
from rest_framework.throttling import UserRateThrottle, AnonRateThrottle, SimpleRateThrottle
import logging
from api.base import settings
logger = logging.getLogger(__name__)
class BaseThrottle(SimpleRateThrottle):
def get_ident(self, request):
if request.META.get('HTTP_X_THROTTLE_TOKEN'):
return request.META['HTTP_X_THROTTLE_TOKEN']
return super(BaseThrottle, self).get_ident(request)
def allow_request(self, request, view):
"""
Implement the check to see if the request should be throttled.
"""
if self.get_ident(request) == settings.BYPASS_THROTTLE_TOKEN:
logger.info('Bypass header (X-Throttle-Token) passed')
return True
if self.rate is None:
return True
self.key = self.get_cache_key(request, view)
if self.key is None:
return True
self.history = self.cache.get(self.key, [])
self.now = self.timer()
# Drop any requests from the history which have now passed the throttle duration
while self.history and self.history[-1] <= self.now - self.duration:
self.history.pop()
if len(self.history) >= self.num_requests:
return self.throttle_failure()
return self.throttle_success()
class NonCookieAuthThrottle(BaseThrottle, AnonRateThrottle):
scope = 'non-cookie-auth'
def allow_request(self, request, view):
"""
Allow all unauthenticated requests that are made with a cookie.
"""
if bool(request.COOKIES):
return True
return super(NonCookieAuthThrottle, self).allow_request(request, view)
class AddContributorThrottle(BaseThrottle, UserRateThrottle):
scope = 'add-contributor'
def allow_request(self, request, view):
"""
Allow all add contributor requests that do not send contributor emails.
"""
if request.method == 'POST' and request.query_params.get('send_email') == 'false':
return True
return super(AddContributorThrottle, self).allow_request(request, view)
class CreateGuidThrottle(BaseThrottle, UserRateThrottle):
scope = 'create-guid'
def allow_request(self, request, view):
"""
Allow all create file requests that do not create new guids.
"""
if not request.query_params.get('create_guid'):
return True
return super(CreateGuidThrottle, self).allow_request(request, view)
class RootAnonThrottle(AnonRateThrottle):
scope = 'root-anon-throttle'
class TestUserRateThrottle(BaseThrottle, UserRateThrottle):
scope = 'test-user'
class TestAnonRateThrottle(BaseThrottle, AnonRateThrottle):
scope = 'test-anon'
class SendEmailThrottle(BaseThrottle, UserRateThrottle):
scope = 'send-email'
def allow_request(self, request, view):
if request.method in permissions.SAFE_METHODS:
return True
return super(SendEmailThrottle, self).allow_request(request, view)
class SendEmailDeactivationThrottle(SendEmailThrottle):
def allow_request(self, request, view):
"""
Throttle deactivation requests on the UserSettings endpoint
"""
if not request.data.get('deactivation_requested'):
return True
return super(SendEmailDeactivationThrottle, self).allow_request(request, view)
class BurstRateThrottle(UserRateThrottle):
scope = 'burst'
class FilesRateThrottle(NonCookieAuthThrottle, UserRateThrottle):
scope = 'files'
class FilesBurstRateThrottle(NonCookieAuthThrottle, UserRateThrottle):
scope = 'files-burst'
|
{
"content_hash": "58a6ed28c3f070f3d49b643712ac939a",
"timestamp": "",
"source": "github",
"line_count": 131,
"max_line_length": 92,
"avg_line_length": 28.18320610687023,
"alnum_prop": 0.6738894907908992,
"repo_name": "brianjgeiger/osf.io",
"id": "86aa01b1377f38429b42a97922e0407f52ac9b50",
"size": "3692",
"binary": false,
"copies": "3",
"ref": "refs/heads/develop",
"path": "api/base/throttling.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "93287"
},
{
"name": "Dockerfile",
"bytes": "5876"
},
{
"name": "HTML",
"bytes": "364479"
},
{
"name": "JavaScript",
"bytes": "1789498"
},
{
"name": "Jupyter Notebook",
"bytes": "41326"
},
{
"name": "Mako",
"bytes": "685055"
},
{
"name": "Python",
"bytes": "11891113"
},
{
"name": "Shell",
"bytes": "2841"
},
{
"name": "VCL",
"bytes": "13885"
}
],
"symlink_target": ""
}
|
import __builtin__
__builtin__.process = 'client'
# Temporary hack patch:
__builtin__.__dict__.update(__import__('pandac.PandaModules', fromlist=['*']).__dict__)
from direct.extensions_native import HTTPChannel_extensions
from direct.extensions_native import Mat3_extensions
from direct.extensions_native import VBase3_extensions
from direct.extensions_native import VBase4_extensions
from direct.extensions_native import NodePath_extensions
from panda3d.core import loadPrcFile
if __debug__:
loadPrcFile('config/general.prc')
loadPrcFile('config/release/dev.prc')
from direct.directnotify.DirectNotifyGlobal import directNotify
notify = directNotify.newCategory('ClientStart')
notify.setInfo(True)
from otp.settings.Settings import Settings
preferencesFilename = ConfigVariableString(
'preferences-filename', 'preferences.json').getValue()
notify.info('Reading %s...' % preferencesFilename)
__builtin__.settings = Settings(preferencesFilename)
if 'fullscreen' not in settings:
settings['fullscreen'] = False
if 'music' not in settings:
settings['music'] = True
if 'sfx' not in settings:
settings['sfx'] = True
if 'musicVol' not in settings:
settings['musicVol'] = 1.0
if 'sfxVol' not in settings:
settings['sfxVol'] = 1.0
if 'loadDisplay' not in settings:
settings['loadDisplay'] = 'pandagl'
if 'toonChatSounds' not in settings:
settings['toonChatSounds'] = True
loadPrcFileData('Settings: res', 'win-size %d %d' % tuple(settings.get('res', (800, 600))))
loadPrcFileData('Settings: fullscreen', 'fullscreen %s' % settings['fullscreen'])
loadPrcFileData('Settings: music', 'audio-music-active %s' % settings['music'])
loadPrcFileData('Settings: sfx', 'audio-sfx-active %s' % settings['sfx'])
loadPrcFileData('Settings: musicVol', 'audio-master-music-volume %s' % settings['musicVol'])
loadPrcFileData('Settings: sfxVol', 'audio-master-sfx-volume %s' % settings['sfxVol'])
loadPrcFileData('Settings: loadDisplay', 'load-display %s' % settings['loadDisplay'])
loadPrcFileData('Settings: toonChatSounds', 'toon-chat-sounds %s' % settings['toonChatSounds'])
import os
from toontown.toonbase.ContentPacksManager import ContentPackError
from toontown.toonbase.ContentPacksManager import ContentPacksManager
contentPacksFilepath = ConfigVariableString(
'content-packs-filepath', 'contentpacks/').getValue()
contentPacksSortFilename = ConfigVariableString(
'content-packs-sort-filename', 'sort.yaml').getValue()
if not os.path.exists(contentPacksFilepath):
os.makedirs(contentPacksFilepath)
__builtin__.ContentPackError = ContentPackError
__builtin__.contentPacksMgr = ContentPacksManager(
filepath=contentPacksFilepath, sortFilename=contentPacksSortFilename)
contentPacksMgr.applyAll()
import time
import sys
import random
import __builtin__
try:
launcher
except:
from toontown.launcher.TTILauncher import TTILauncher
launcher = TTILauncher()
__builtin__.launcher = launcher
notify.info('Starting the game...')
if launcher.isDummy():
http = HTTPClient()
else:
http = launcher.http
tempLoader = Loader()
backgroundNode = tempLoader.loadSync(Filename('phase_3/models/gui/loading-background'))
from direct.gui import DirectGuiGlobals
from direct.gui.DirectGui import *
notify.info('Setting the default font...')
import ToontownGlobals
DirectGuiGlobals.setDefaultFontFunc(ToontownGlobals.getInterfaceFont)
launcher.setPandaErrorCode(7)
import ToonBase
ToonBase.ToonBase()
from pandac.PandaModules import *
if base.win is None:
notify.error('Unable to open window; aborting.')
launcher.setPandaErrorCode(0)
launcher.setPandaWindowOpen()
ConfigVariableDouble('decompressor-step-time').setValue(0.01)
ConfigVariableDouble('extractor-step-time').setValue(0.01)
backgroundNodePath = aspect2d.attachNewNode(backgroundNode, 0)
backgroundNodePath.setPos(0.0, 0.0, 0.0)
backgroundNodePath.setScale(render2d, VBase3(1))
backgroundNodePath.find('**/fg').hide()
logo = OnscreenImage(
image='phase_3/maps/toontown-logo.png',
scale=(1 / (4.0/3.0), 1, 1 / (4.0/3.0)),
pos=backgroundNodePath.find('**/fg').getPos())
logo.setTransparency(TransparencyAttrib.MAlpha)
logo.setBin('fixed', 20)
logo.reparentTo(backgroundNodePath)
backgroundNodePath.find('**/bg').setBin('fixed', 10)
base.graphicsEngine.renderFrame()
DirectGuiGlobals.setDefaultRolloverSound(base.loadSfx('phase_3/audio/sfx/GUI_rollover.ogg'))
DirectGuiGlobals.setDefaultClickSound(base.loadSfx('phase_3/audio/sfx/GUI_create_toon_fwd.ogg'))
DirectGuiGlobals.setDefaultDialogGeom(loader.loadModel('phase_3/models/gui/dialog_box_gui'))
import TTLocalizer
from otp.otpbase import OTPGlobals
OTPGlobals.setDefaultProductPrefix(TTLocalizer.ProductPrefix)
if base.musicManagerIsValid:
music = base.loadMusic('phase_3/audio/bgm/ttf_theme.ogg')
if music:
music.setLoop(1)
music.setVolume(0.9)
music.play()
notify.info('Loading the default GUI sounds...')
DirectGuiGlobals.setDefaultRolloverSound(base.loadSfx('phase_3/audio/sfx/GUI_rollover.ogg'))
DirectGuiGlobals.setDefaultClickSound(base.loadSfx('phase_3/audio/sfx/GUI_create_toon_fwd.ogg'))
else:
music = None
import ToontownLoader
from direct.gui.DirectGui import *
serverVersion = base.config.GetString('server-version', 'no_version_set')
version = OnscreenText(serverVersion, pos=(-1.3, -0.975), scale=0.06, fg=Vec4(0, 0, 0, 1), align=TextNode.ALeft)
version.setPos(0.03,0.03)
version.reparentTo(base.a2dBottomLeft)
from toontown.suit import Suit
Suit.loadModels()
loader.beginBulkLoad('init', TTLocalizer.LoaderLabel, 138, 0, TTLocalizer.TIP_NONE, 0)
from ToonBaseGlobal import *
from direct.showbase.MessengerGlobal import *
from toontown.distributed import ToontownClientRepository
cr = ToontownClientRepository.ToontownClientRepository(serverVersion, launcher)
cr.music = music
del music
base.initNametagGlobals()
base.cr = cr
loader.endBulkLoad('init')
from otp.friends import FriendManager
from otp.distributed.OtpDoGlobals import *
cr.generateGlobalObject(OTP_DO_ID_FRIEND_MANAGER, 'FriendManager')
if not launcher.isDummy():
base.startShow(cr, launcher.getGameServer())
else:
base.startShow(cr)
backgroundNodePath.reparentTo(hidden)
backgroundNodePath.removeNode()
del backgroundNodePath
del backgroundNode
del tempLoader
version.cleanup()
del version
base.loader = base.loader
__builtin__.loader = base.loader
autoRun = ConfigVariableBool('toontown-auto-run', 1)
if autoRun:
try:
run()
except SystemExit:
raise
except:
from direct.showbase import PythonUtil
print PythonUtil.describeException()
raise
|
{
"content_hash": "b84dd56b62f5cd369c766b3774605968",
"timestamp": "",
"source": "github",
"line_count": 187,
"max_line_length": 112,
"avg_line_length": 35.411764705882355,
"alnum_prop": 0.76698882512836,
"repo_name": "Spiderlover/Toontown",
"id": "5fda86ea96d147ebc5136abcd46127a2e2de3028",
"size": "6645",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "toontown/toonbase/ClientStart.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "7774"
},
{
"name": "Python",
"bytes": "17241353"
},
{
"name": "Shell",
"bytes": "7699"
}
],
"symlink_target": ""
}
|
__all__ = ['assert_assigned_type_and_shape_match']
import jax
from objax.typing import JaxArray
TRACER_TYPES = (jax.interpreters.partial_eval.JaxprTracer,
jax.interpreters.partial_eval.DynamicJaxprTracer)
def split_shape_and_device(array):
if isinstance(array, jax.interpreters.pxla.ShardedDeviceArray):
return array.shape[0], array.shape[1:]
else:
return None, array.shape
def assert_assigned_type_and_shape_match(existing_tensor, new_tensor):
assert isinstance(new_tensor, JaxArray.__args__), \
f'Assignments to variable must be an instance of JaxArray, but received f{type(new_tensor)}.'
new_tensor_device, new_tensor_shape = split_shape_and_device(new_tensor)
self_device, self_shape = split_shape_and_device(existing_tensor)
device_mismatch_error = f'Can not replicate a variable that is currently on ' \
f'{self_device} devices to {new_tensor_device} devices.'
assert (new_tensor_device is None) or (self_device is None) or (self_device == new_tensor_device), \
device_mismatch_error
shorter_length = min(len(new_tensor.shape), len(existing_tensor.shape))
is_special_ok = (isinstance(new_tensor, TRACER_TYPES) or isinstance(existing_tensor, TRACER_TYPES))
is_special_ok = is_special_ok and existing_tensor.shape[-shorter_length:] == new_tensor.shape[-shorter_length:]
shape_mismatch_error = f'Assign can not change shape of variable. The current variable shape is {self_shape},' \
f' but the requested new shape is {new_tensor_shape}.'
assert is_special_ok or new_tensor_shape == self_shape or new_tensor.shape == existing_tensor.shape, \
shape_mismatch_error
|
{
"content_hash": "a4990d6b32a189872193464fbf485d8c",
"timestamp": "",
"source": "github",
"line_count": 38,
"max_line_length": 116,
"avg_line_length": 45.71052631578947,
"alnum_prop": 0.6937248128957973,
"repo_name": "google/objax",
"id": "5cb43e4ff2ed4865c8806ed7dddd128bd673db13",
"size": "2313",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "objax/util/check.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "451588"
},
{
"name": "Shell",
"bytes": "2528"
}
],
"symlink_target": ""
}
|
from Queue import Empty
from contextlib import contextmanager
from multiprocessing import Process, Queue
import os
import signal
import time
import traceback
from . import command
def setup_testing():
"""For testing only: Use threading under the hood instead of multiprocessing
to make coverage work.
"""
global Queue
global Process
del Queue
del Process
from Queue import Queue
from threading import Thread as Process
# Monkeypatch threading Queue to look like multiprocessing Queue.
Queue.cancel_join_thread = lambda self: None
class NormalResult():
def __init__(self, result):
self.result = result
self.exception = None
class ExceptionResult():
def __init__(self, exception):
self.exception = exception
class MaybeResult():
def __init__(self, heartbeat, value):
self.heartbeat = heartbeat
self.value = value
@staticmethod
def create_heartbeat():
return MaybeResult(True, None)
@staticmethod
def create_result(value):
return MaybeResult(False, value)
def Worker(fn, work_queue, done_queue,
process_context_fn=None, process_context_args=None):
"""Worker to be run in a child process.
The worker stops when the poison pill "STOP" is reached.
"""
try:
kwargs = {}
if process_context_fn and process_context_args is not None:
kwargs.update(process_context=process_context_fn(*process_context_args))
for args in iter(work_queue.get, "STOP"):
try:
done_queue.put(NormalResult(fn(*args, **kwargs)))
except command.AbortException:
# SIGINT, SIGTERM or internal hard timeout.
break
except Exception, e:
traceback.print_exc()
print(">>> EXCEPTION: %s" % e)
done_queue.put(ExceptionResult(e))
# When we reach here on normal tear down, all items have been pulled from
# the done_queue before and this should have no effect. On fast abort, it's
# possible that a fast worker left items on the done_queue in memory, which
# will never be pulled. This call purges those to avoid a deadlock.
done_queue.cancel_join_thread()
except KeyboardInterrupt:
assert False, 'Unreachable'
@contextmanager
def without_sig():
int_handler = signal.signal(signal.SIGINT, signal.SIG_IGN)
term_handler = signal.signal(signal.SIGTERM, signal.SIG_IGN)
try:
yield
finally:
signal.signal(signal.SIGINT, int_handler)
signal.signal(signal.SIGTERM, term_handler)
class Pool():
"""Distributes tasks to a number of worker processes.
New tasks can be added dynamically even after the workers have been started.
Requirement: Tasks can only be added from the parent process, e.g. while
consuming the results generator."""
# Factor to calculate the maximum number of items in the work/done queue.
# Necessary to not overflow the queue's pipe if a keyboard interrupt happens.
BUFFER_FACTOR = 4
def __init__(self, num_workers, heartbeat_timeout=1):
self.num_workers = num_workers
self.processes = []
self.terminated = False
self.abort_now = False
# Invariant: processing_count >= #work_queue + #done_queue. It is greater
# when a worker takes an item from the work_queue and before the result is
# submitted to the done_queue. It is equal when no worker is working,
# e.g. when all workers have finished, and when no results are processed.
# Count is only accessed by the parent process. Only the parent process is
# allowed to remove items from the done_queue and to add items to the
# work_queue.
self.processing_count = 0
self.heartbeat_timeout = heartbeat_timeout
# Disable sigint and sigterm to prevent subprocesses from capturing the
# signals.
with without_sig():
self.work_queue = Queue()
self.done_queue = Queue()
def imap_unordered(self, fn, gen,
process_context_fn=None, process_context_args=None):
"""Maps function "fn" to items in generator "gen" on the worker processes
in an arbitrary order. The items are expected to be lists of arguments to
the function. Returns a results iterator. A result value of type
MaybeResult either indicates a heartbeat of the runner, i.e. indicating
that the runner is still waiting for the result to be computed, or it wraps
the real result.
Args:
process_context_fn: Function executed once by each worker. Expected to
return a process-context object. If present, this object is passed
as additional argument to each call to fn.
process_context_args: List of arguments for the invocation of
process_context_fn. All arguments will be pickled and sent beyond the
process boundary.
"""
if self.terminated:
return
try:
internal_error = False
gen = iter(gen)
self.advance = self._advance_more
# Disable sigint and sigterm to prevent subprocesses from capturing the
# signals.
with without_sig():
for w in xrange(self.num_workers):
p = Process(target=Worker, args=(fn,
self.work_queue,
self.done_queue,
process_context_fn,
process_context_args))
p.start()
self.processes.append(p)
self.advance(gen)
while self.processing_count > 0:
while True:
try:
# Read from result queue in a responsive fashion. If available,
# this will return a normal result immediately or a heartbeat on
# heartbeat timeout (default 1 second).
result = self._get_result_from_queue()
except:
# TODO(machenbach): Handle a few known types of internal errors
# gracefully, e.g. missing test files.
internal_error = True
continue
if self.abort_now:
# SIGINT, SIGTERM or internal hard timeout.
return
yield result
break
self.advance(gen)
except KeyboardInterrupt:
assert False, 'Unreachable'
except Exception as e:
traceback.print_exc()
print(">>> EXCEPTION: %s" % e)
finally:
self._terminate()
if internal_error:
raise Exception("Internal error in a worker process.")
def _advance_more(self, gen):
while self.processing_count < self.num_workers * self.BUFFER_FACTOR:
try:
self.work_queue.put(gen.next())
self.processing_count += 1
except StopIteration:
self.advance = self._advance_empty
break
def _advance_empty(self, gen):
pass
def add(self, args):
"""Adds an item to the work queue. Can be called dynamically while
processing the results from imap_unordered."""
assert not self.terminated
self.work_queue.put(args)
self.processing_count += 1
def abort(self):
"""Schedules abort on next queue read.
This is safe to call when handling SIGINT, SIGTERM or when an internal
hard timeout is reached.
"""
self.abort_now = True
def _terminate(self):
"""Terminates execution and cleans up the queues.
If abort() was called before termination, this also terminates the
subprocesses and doesn't wait for ongoing tests.
"""
if self.terminated:
return
self.terminated = True
# Drain out work queue from tests
try:
while True:
self.work_queue.get(True, 0.1)
except Empty:
pass
# Make sure all processes stop
for _ in self.processes:
# During normal tear down the workers block on get(). Feed a poison pill
# per worker to make them stop.
self.work_queue.put("STOP")
if self.abort_now:
for p in self.processes:
os.kill(p.pid, signal.SIGTERM)
for p in self.processes:
p.join()
# Drain the queues to prevent stderr chatter when queues are garbage
# collected.
try:
while True: self.work_queue.get(False)
except:
pass
try:
while True: self.done_queue.get(False)
except:
pass
def _get_result_from_queue(self):
"""Attempts to get the next result from the queue.
Returns: A wrapped result if one was available within heartbeat timeout,
a heartbeat result otherwise.
Raises:
Exception: If an exception occured when processing the task on the
worker side, it is reraised here.
"""
while True:
try:
result = self.done_queue.get(timeout=self.heartbeat_timeout)
self.processing_count -= 1
if result.exception:
raise result.exception
return MaybeResult.create_result(result.result)
except Empty:
return MaybeResult.create_heartbeat()
|
{
"content_hash": "7bde0f317c28404107790bb3a1dbfa4d",
"timestamp": "",
"source": "github",
"line_count": 276,
"max_line_length": 79,
"avg_line_length": 31.902173913043477,
"alnum_prop": 0.6549687677455991,
"repo_name": "MTASZTAKI/ApertusVR",
"id": "7c9a250bc383919b2bbde26ef16b28aa18c0d1d3",
"size": "8992",
"binary": false,
"copies": "1",
"ref": "refs/heads/0.9",
"path": "plugins/languageAPI/jsAPI/3rdParty/nodejs/10.1.0/source/deps/v8/tools/testrunner/local/pool.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "7599"
},
{
"name": "C++",
"bytes": "1207412"
},
{
"name": "CMake",
"bytes": "165066"
},
{
"name": "CSS",
"bytes": "1816"
},
{
"name": "GLSL",
"bytes": "223507"
},
{
"name": "HLSL",
"bytes": "141879"
},
{
"name": "HTML",
"bytes": "34827"
},
{
"name": "JavaScript",
"bytes": "140550"
},
{
"name": "Python",
"bytes": "1370"
}
],
"symlink_target": ""
}
|
from typing import Optional
from parlai.core.params import ParlaiParser
from parlai.core.opt import Opt
from parlai.core.message import Message
from parlai.core.teachers import FixedDialogTeacher
from parlai.utils.io import PathManager
import parlai.tasks.md_gender.utils as gend_utils
import parlai.utils.logging as logging
from copy import deepcopy
import os
import random
import sys as _sys
class YelpTeacher(FixedDialogTeacher):
"""
Yelp MD Gender Teacher.
"""
@classmethod
def add_cmdline_args(
cls, parser: ParlaiParser, partial_opt: Optional[Opt] = None
) -> ParlaiParser:
super().add_cmdline_args(parser, partial_opt)
parser = gend_utils.add_common_args(parser)
return parser
def __init__(self, opt, shared=None):
self.opt = opt
self.is_train = 'train' in opt['datatype'] and 'evalmode' not in opt['datatype']
self.add_unknown_classes = opt['add_unknown_classes'] and self.is_train
self.label_candidates = gend_utils.ALL_CANDS
if shared is None:
# set map
self.data = self._setup_data(opt)
else:
self.data = shared['data']
super().__init__(opt, shared)
self.reset()
def _check_data_downloaded(self, opt):
# Checks whether the data is downloaded properly
# Also checks whether data is built, and builds it if so
RESET = '\033[0m'
RED = '\033[1;91m'
YELLOW = '\033[1;93m'
GREEN = '\033[1;92m'
BLUE = '\033[1;96m'
CYAN = '\033[1;94m'
MAGENTA = '\033[1;95m'
# only use colors if we're outputting to a terminal
USE_COLORS = _sys.stdout.isatty()
if not USE_COLORS:
RESET = RED = YELLOW = GREEN = BLUE = CYAN = MAGENTA = ''
# generate the rainbow stars
rainbow = [RED, YELLOW, GREEN, CYAN, BLUE, MAGENTA]
size = 78 // len(rainbow)
stars = ''.join([color + '*' * size for color in rainbow])
stars += RESET
self.data_path = os.path.join(opt['datapath'], 'md_gender', 'yelp')
if not os.path.exists(self.data_path):
PathManager.mkdirs(self.data_path)
if not PathManager.exists(
os.path.join(self.data_path, 'valid.fader.with_cat.40000')
):
raise RuntimeError(
f'\n\n{stars}\nThis data must be downloaded following instructions in '
'the README here:'
'<https://github.com/facebookresearch/MultipleAttributeTextRewriting/blob/main/data/README.md>. '
'\nIt cannot be automatically downloaded, as one must agree to '
'the terms outlined on the website before gaining access to the data.\n\n'
'Once downloaded, please put the data in the following '
f'directory: \n{self.data_path}\n{stars}'
)
elif not PathManager.exists(os.path.join(self.data_path, 'classtrain.txt')):
logging.info('[ Building data ... ]')
# build train
with open(os.path.join(self.data_path, 'classtrain.txt'), 'w') as f:
for fle_num in [4000, 6000, 8000]:
train_fle = f'train.fader.with_cat.{fle_num}'
with open(os.path.join(self.data_path, train_fle)) as g:
lines = g.readlines()
for line in lines:
tabs = line.split('\t')
text = tabs[0]
gend = tabs[1]
if gend == '0':
f.write(f'male\t{text}\n')
elif gend == '1':
f.write(f'female\t{text}\n')
# build valid and test
for pair in [('dev', 'valid'), ('test', 'test')]:
with open(
os.path.join(self.data_path, f'female_only.{pair[0]}.en'), 'w'
) as fem_val:
with open(
os.path.join(self.data_path, f'male_only.{pair[0]}.en'), 'w'
) as masc_val:
for fle_num in [4000, 6000, 8000]:
valid_fle = f'{pair[1]}.fader.with_cat.{fle_num}'
with open(
os.path.join(self.data_path, valid_fle), 'r'
) as g:
lines = g.readlines()
for line in lines:
tabs = line.split('\t')
text = tabs[0]
gend = tabs[1]
if gend == '0':
masc_val.write(f'{text}\n')
elif gend == '1':
fem_val.write(f'{text}\n')
def _load_gender_data(self, datatype):
"""
Load data from the checkpoint.
"""
dt = datatype.split(':')[0]
data = []
folder = self.data_path
if dt == 'train':
gender_cnt = {gend_utils.MASC: 0, gend_utils.FEM: 0}
fle = os.path.join(folder, 'classtrain.txt')
with open(fle, 'r') as f:
lines = f.read().splitlines()
for line in lines:
gender, text = line.split('\t')
data.append(
{
'text': text,
'labels': [f'SELF:{gender}'],
'class_type': 'self',
}
)
gender_cnt[gender] += 1
print('Trainset gender cnts:\n' + '=' * 50)
tot = sum(gender_cnt.values())
for k, v in gender_cnt.items():
print(f'{k}: {v} ({v / tot})')
print(f'TOTAL: {tot}')
else:
if dt == 'valid':
dt = 'dev'
# female data
female_fle = os.path.join(folder, f'female_only.{dt}.en')
# male data
male_fle = os.path.join(folder, f'male_only.{dt}.en')
with open(female_fle, 'r') as f:
with open(male_fle, 'r') as m:
f_lines = f.read().splitlines()
m_lines = m.read().splitlines()
for f_line, m_line in zip(f_lines, m_lines):
# alternate this
data.append(
{
'text': f_line,
'labels': [f'SELF:{gend_utils.FEM}'],
'class_type': 'self',
}
)
data.append(
{
'text': m_line,
'labels': [f'SELF:{gend_utils.MASC}'],
'class_type': 'self',
}
)
return data
def _setup_data(self, opt):
# check that the data was downloaded and set up properly
self._check_data_downloaded(opt)
# Load map from image ID to gender
data = self._load_gender_data(opt['datatype'])
extra_data = []
if self.add_unknown_classes:
# load about data (unknown but inferred)
extra_data = gend_utils.get_inferred_about_data(self.opt['task'], self.opt)
# now create partner/TO data: true neutral
for ex in data:
partner_ex = deepcopy(ex)
partner_ex['labels'] = [f'PARTNER:{gend_utils.NEUTRAL}']
partner_ex['class_type'] = 'neutral'
extra_data.append(ex)
sample_rate = self.opt['unknown_temp']
if sample_rate < 1.0:
to_samp = int(sample_rate * len(extra_data))
sampled = random.sample(extra_data, to_samp)
data += sampled
else:
data += extra_data
data = data + extra_data
if self.is_train:
random.shuffle(data)
return data
def get(self, episode_idx, entry_idx=0):
ep = self.data[episode_idx]
ep['label_candidates'] = self.label_candidates[ep['class_type']]
ep['id'] = 'Yelp Gender'
ep['episode_done'] = True
return Message(ep)
def num_examples(self):
return len(self.data)
def num_episodes(self):
return len(self.data)
def share(self):
shared = super().share()
shared['data'] = self.data
return shared
|
{
"content_hash": "a95655693ff2c79ece915608c42de119",
"timestamp": "",
"source": "github",
"line_count": 225,
"max_line_length": 113,
"avg_line_length": 38.76,
"alnum_prop": 0.4709322325421397,
"repo_name": "facebookresearch/ParlAI",
"id": "fd9a6114fa32e7490d79e1ccd35ce1756abf1b19",
"size": "8921",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "parlai/tasks/md_gender/yelp.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C++",
"bytes": "2000"
},
{
"name": "CSS",
"bytes": "38474"
},
{
"name": "Cuda",
"bytes": "4118"
},
{
"name": "Dockerfile",
"bytes": "1218"
},
{
"name": "HTML",
"bytes": "645771"
},
{
"name": "JavaScript",
"bytes": "405110"
},
{
"name": "Makefile",
"bytes": "289"
},
{
"name": "Python",
"bytes": "6802410"
},
{
"name": "Shell",
"bytes": "26147"
}
],
"symlink_target": ""
}
|
from __future__ import absolute_import, division, print_function, unicode_literals
def prepare_dependencies(self):
self.add_to_build_file('a', 'java_library(name="a")')
self.add_to_build_file('b', 'java_library(name="b", dependencies=["a:a"])')
self.add_to_build_file('c', 'java_library(name="c", dependencies=["a:a"])')
self.add_to_build_file('d', 'java_library(name="d", dependencies=["a:a", "b"])')
targets = { 'a': self.make_target('a') }
targets['b'] = self.make_target('b', dependencies=[targets['a']])
targets['c'] = self.make_target('c', dependencies=[targets['a'], targets['b']])
targets['d'] = self.make_target('d', dependencies=[targets['a'], targets['b']])
return targets
|
{
"content_hash": "882e1a4a8c08278f0ccfde4111c2d94f",
"timestamp": "",
"source": "github",
"line_count": 15,
"max_line_length": 82,
"avg_line_length": 47.06666666666667,
"alnum_prop": 0.6458923512747875,
"repo_name": "twitter/pants",
"id": "5654e02de0a46b3829d9ba38e8fe72c8cf720539",
"size": "853",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "contrib/buildrefactor/tests/python/pants_test/contrib/buildrefactor/buildozer_util.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "655"
},
{
"name": "C++",
"bytes": "2010"
},
{
"name": "CSS",
"bytes": "9444"
},
{
"name": "Dockerfile",
"bytes": "5639"
},
{
"name": "GAP",
"bytes": "1283"
},
{
"name": "Gherkin",
"bytes": "919"
},
{
"name": "Go",
"bytes": "2765"
},
{
"name": "HTML",
"bytes": "85294"
},
{
"name": "Java",
"bytes": "498956"
},
{
"name": "JavaScript",
"bytes": "22906"
},
{
"name": "Python",
"bytes": "6700799"
},
{
"name": "Rust",
"bytes": "765598"
},
{
"name": "Scala",
"bytes": "89346"
},
{
"name": "Shell",
"bytes": "94395"
},
{
"name": "Thrift",
"bytes": "2953"
}
],
"symlink_target": ""
}
|
import os
from unittest import mock
import flask
import markupsafe
import pytest
from airflow.dag_processing.processor import DagFileProcessor
from airflow.security import permissions
from airflow.utils.session import create_session
from airflow.utils.state import State
from airflow.www.utils import UIAlert
from airflow.www.views import FILTER_STATUS_COOKIE, FILTER_TAGS_COOKIE
from tests.test_utils.api_connexion_utils import create_user
from tests.test_utils.db import clear_db_dags, clear_db_import_errors, clear_db_serialized_dags
from tests.test_utils.www import check_content_in_response, check_content_not_in_response, client_with_login
def clean_db():
clear_db_dags()
clear_db_import_errors()
clear_db_serialized_dags()
@pytest.fixture(autouse=True)
def setup():
clean_db()
yield
clean_db()
def test_home(capture_templates, admin_client):
with capture_templates() as templates:
resp = admin_client.get('home', follow_redirects=True)
check_content_in_response('DAGs', resp)
val_state_color_mapping = (
'const STATE_COLOR = {'
'"deferred": "mediumpurple", "failed": "red", '
'"null": "lightblue", "queued": "gray", '
'"removed": "lightgrey", "restarting": "violet", "running": "lime", '
'"scheduled": "tan", "sensing": "mediumpurple", '
'"shutdown": "blue", "skipped": "pink", '
'"success": "green", "up_for_reschedule": "turquoise", '
'"up_for_retry": "gold", "upstream_failed": "orange"};'
)
check_content_in_response(val_state_color_mapping, resp)
assert len(templates) == 1
assert templates[0].name == 'airflow/dags.html'
state_color_mapping = State.state_color.copy()
state_color_mapping["null"] = state_color_mapping.pop(None)
assert templates[0].local_context['state_color'] == state_color_mapping
def test_home_filter_tags(admin_client):
with admin_client:
admin_client.get('home?tags=example&tags=data', follow_redirects=True)
assert 'example,data' == flask.session[FILTER_TAGS_COOKIE]
admin_client.get('home?reset_tags', follow_redirects=True)
assert flask.session[FILTER_TAGS_COOKIE] is None
def test_home_status_filter_cookie(admin_client):
with admin_client:
admin_client.get('home', follow_redirects=True)
assert 'all' == flask.session[FILTER_STATUS_COOKIE]
admin_client.get('home?status=active', follow_redirects=True)
assert 'active' == flask.session[FILTER_STATUS_COOKIE]
admin_client.get('home?status=paused', follow_redirects=True)
assert 'paused' == flask.session[FILTER_STATUS_COOKIE]
admin_client.get('home?status=all', follow_redirects=True)
assert 'all' == flask.session[FILTER_STATUS_COOKIE]
@pytest.fixture(scope="module")
def user_single_dag(app):
"""Create User that can only access the first DAG from TEST_FILTER_DAG_IDS"""
return create_user(
app,
username="user_single_dag",
role_name="role_single_dag",
permissions=[
(permissions.ACTION_CAN_READ, permissions.RESOURCE_WEBSITE),
(permissions.ACTION_CAN_READ, permissions.resource_name_for_dag(TEST_FILTER_DAG_IDS[0])),
],
)
@pytest.fixture()
def client_single_dag(app, user_single_dag):
"""Client for User that can only access the first DAG from TEST_FILTER_DAG_IDS"""
return client_with_login(
app,
username="user_single_dag",
password="user_single_dag",
)
TEST_FILTER_DAG_IDS = ['filter_test_1', 'filter_test_2']
def _process_file(file_path, session):
dag_file_processor = DagFileProcessor(dag_ids=[], log=mock.MagicMock())
dag_file_processor.process_file(file_path, [], False, session)
@pytest.fixture()
def working_dags(tmpdir):
dag_contents_template = "from airflow import DAG\ndag = DAG('{}')"
with create_session() as session:
for dag_id in TEST_FILTER_DAG_IDS:
filename = os.path.join(tmpdir, f"{dag_id}.py")
with open(filename, "w") as f:
f.writelines(dag_contents_template.format(dag_id))
_process_file(filename, session)
@pytest.fixture()
def broken_dags(tmpdir, working_dags):
with create_session() as session:
for dag_id in TEST_FILTER_DAG_IDS:
filename = os.path.join(tmpdir, f"{dag_id}.py")
with open(filename, "w") as f:
f.writelines('airflow DAG')
_process_file(filename, session)
def test_home_importerrors(broken_dags, user_client):
# Users with "can read on DAGs" gets all DAG import errors
resp = user_client.get('home', follow_redirects=True)
check_content_in_response("Import Errors", resp)
for dag_id in TEST_FILTER_DAG_IDS:
check_content_in_response(f"/{dag_id}.py", resp)
@pytest.mark.parametrize('page', ['home', 'home?status=active', 'home?status=paused', 'home?status=all'])
def test_home_importerrors_filtered_singledag_user(broken_dags, client_single_dag, page):
# Users that can only see certain DAGs get a filtered list of import errors
resp = client_single_dag.get(page, follow_redirects=True)
check_content_in_response("Import Errors", resp)
# They can see the first DAGs import error
check_content_in_response(f"/{TEST_FILTER_DAG_IDS[0]}.py", resp)
# But not the rest
for dag_id in TEST_FILTER_DAG_IDS[1:]:
check_content_not_in_response(f"/{dag_id}.py", resp)
def test_home_dag_list(working_dags, user_client):
# Users with "can read on DAGs" gets all DAGs
resp = user_client.get('home', follow_redirects=True)
for dag_id in TEST_FILTER_DAG_IDS:
check_content_in_response(f"dag_id={dag_id}", resp)
def test_home_dag_list_filtered_singledag_user(working_dags, client_single_dag):
# Users that can only see certain DAGs get a filtered list
resp = client_single_dag.get('home', follow_redirects=True)
# They can see the first DAG
check_content_in_response(f"dag_id={TEST_FILTER_DAG_IDS[0]}", resp)
# But not the rest
for dag_id in TEST_FILTER_DAG_IDS[1:]:
check_content_not_in_response(f"dag_id={dag_id}", resp)
def test_home_robots_header_in_response(user_client):
# Responses should include X-Robots-Tag header
resp = user_client.get('home', follow_redirects=True)
assert resp.headers['X-Robots-Tag'] == 'noindex, nofollow'
@pytest.mark.parametrize(
"client, flash_message, expected",
[
("user_client", UIAlert("hello world"), True),
("user_client", UIAlert("hello world", roles=["User"]), True),
("user_client", UIAlert("hello world", roles=["User", "Admin"]), True),
("user_client", UIAlert("hello world", roles=["Admin"]), False),
("admin_client", UIAlert("hello world"), True),
("admin_client", UIAlert("hello world", roles=["Admin"]), True),
("admin_client", UIAlert("hello world", roles=["User", "Admin"]), True),
],
)
def test_dashboard_flash_messages_role_filtering(request, client, flash_message, expected):
with mock.patch("airflow.settings.DASHBOARD_UIALERTS", [flash_message]):
resp = request.getfixturevalue(client).get("home", follow_redirects=True)
if expected:
check_content_in_response(flash_message.message, resp)
else:
check_content_not_in_response(flash_message.message, resp)
def test_dashboard_flash_messages_many(user_client):
messages = [
UIAlert("hello world"),
UIAlert("im_not_here", roles=["Admin"]),
UIAlert("_hello_world_"),
]
with mock.patch("airflow.settings.DASHBOARD_UIALERTS", messages):
resp = user_client.get("home", follow_redirects=True)
check_content_in_response("hello world", resp)
check_content_not_in_response("im_not_here", resp)
check_content_in_response("_hello_world_", resp)
def test_dashboard_flash_messages_markup(user_client):
link = '<a href="http://example.com">hello world</a>'
user_input = markupsafe.Markup("Hello <em>%s</em>") % ("foo&bar",)
messages = [
UIAlert(link, html=True),
UIAlert(user_input),
]
with mock.patch("airflow.settings.DASHBOARD_UIALERTS", messages):
resp = user_client.get("home", follow_redirects=True)
check_content_in_response(link, resp)
check_content_in_response(user_input, resp)
def test_dashboard_flash_messages_type(user_client):
messages = [
UIAlert("hello world", category="foo"),
]
with mock.patch("airflow.settings.DASHBOARD_UIALERTS", messages):
resp = user_client.get("home", follow_redirects=True)
check_content_in_response("hello world", resp)
check_content_in_response("alert-foo", resp)
|
{
"content_hash": "720f0fe0b13641211d2d4ac08717deb2",
"timestamp": "",
"source": "github",
"line_count": 230,
"max_line_length": 108,
"avg_line_length": 38.11739130434783,
"alnum_prop": 0.6625983802897228,
"repo_name": "mistercrunch/airflow",
"id": "9b16acea84fd2ec0d72a8f2592742786dfc6d82a",
"size": "9553",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "tests/www/views/test_views_home.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "36341"
},
{
"name": "HTML",
"bytes": "99243"
},
{
"name": "JavaScript",
"bytes": "891460"
},
{
"name": "Mako",
"bytes": "494"
},
{
"name": "Python",
"bytes": "773270"
},
{
"name": "Shell",
"bytes": "5659"
}
],
"symlink_target": ""
}
|
import random, os
from flask import *
import names
# set up flask constants
DEBUG = False
SECRET_KEY = "donotcare"
USERNAME = "donotcare"
PASSWORD = "donotcare"
INDEX_NAME = "nameindex.txt"
INDEX_SIZE = 1000
# create app
app = Flask(__name__)
app.config.from_object(__name__)
minAdj = len(names.adjectives)-1
minAni = len(names.animals)-1
index = []
pos = 0
def generateNameList():
def generateName():
return "%s%s" % (
names.adjectives[random.randint(0, minAdj)],
names.animals[random.randint(0, minAni)]
)
index = []
while len(index) < INDEX_SIZE:
name = generateName()
if name not in index:
index.append(name)
if os.path.exists(INDEX_NAME):
os.remove(INDEX_NAME)
f = open(INDEX_NAME, "w")
f.write("\n".join(index))
f.close()
def loadIndex():
global index
f = open(INDEX_NAME, "r")
data = f.read()
f.close()
index = data.split("\n")
@app.route('/')
def index():
global pos
global INDEX_SIZE
pos += 1
if pos-1 < len(index):
return index[pos-1]
else:
# names exhausted, generate new index, keep pos counter the same
app.logger.warning("Names exhaused. From this point, duplicates are possible.")
INDEX_SIZE = INDEX_SIZE*2
pos = 0
generateNameList()
loadIndex()
return index[pos-1]
if __name__ == '__main__':
if os.path.exists(INDEX_NAME):
loadIndex()
else:
generateNameList()
loadIndex()
app.run(host='0.0.0.0')
|
{
"content_hash": "01d028573a63333f67f27eeb60ae12f8",
"timestamp": "",
"source": "github",
"line_count": 82,
"max_line_length": 87,
"avg_line_length": 19.341463414634145,
"alnum_prop": 0.5813366960907944,
"repo_name": "MatthewJWalls/nomenclature",
"id": "2f92770c70373227bff26550e86dbe112fd8fc94",
"size": "1609",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "app.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "20947"
},
{
"name": "Shell",
"bytes": "208"
}
],
"symlink_target": ""
}
|
from __future__ import absolute_import
from django.utils import timezone
from sentry.testutils import AcceptanceTestCase
class ProjectOverviewTest(AcceptanceTestCase):
def setUp(self):
super(ProjectOverviewTest, self).setUp()
self.user = self.create_user('foo@example.com')
self.org = self.create_organization(
owner=self.user, name='Rowdy Tiger')
self.team = self.create_team(
organization=self.org, name='Mariachi Band')
self.project = self.create_project(
organization=self.org,
teams=[self.team],
name='Bengal',
)
self.login_as(self.user)
self.path = '/{}/{}/dashboard/'.format(
self.org.slug, self.project.slug)
def test_with_issues(self):
self.project.update(first_event=timezone.now())
self.create_group(
project=self.project,
message='Foo bar',
)
self.browser.get(self.path)
self.browser.wait_until('.chart-wrapper')
self.browser.wait_until_not('.loading')
self.browser.snapshot('project dashboard with issues')
def test_with_no_issues(self):
self.project.update(first_event=timezone.now())
self.browser.get(self.path)
self.browser.wait_until_not('.loading')
self.browser.wait_until('.group-list-empty')
self.browser.wait_until_not('.loading')
self.browser.snapshot('project dashboard without issues')
|
{
"content_hash": "7bbd04e1822a75ba0b6416d98f5b1752",
"timestamp": "",
"source": "github",
"line_count": 42,
"max_line_length": 65,
"avg_line_length": 35.523809523809526,
"alnum_prop": 0.6260053619302949,
"repo_name": "ifduyue/sentry",
"id": "77101f5b15dc8006ab53f90abd8241d788a6d69d",
"size": "1492",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "tests/acceptance/test_project_overview.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "301292"
},
{
"name": "HTML",
"bytes": "241298"
},
{
"name": "JavaScript",
"bytes": "3295572"
},
{
"name": "Lua",
"bytes": "65795"
},
{
"name": "Makefile",
"bytes": "6892"
},
{
"name": "Python",
"bytes": "36910084"
},
{
"name": "Ruby",
"bytes": "217"
},
{
"name": "Shell",
"bytes": "5701"
}
],
"symlink_target": ""
}
|
import tests.model_control.test_ozone_custom_models_enabled as testmod
testmod.build_model( ['None'] , ['PolyTrend'] , ['BestCycle'] , ['NoAR'] );
|
{
"content_hash": "8efa9ae9a7ab02c1a371c2753f8137f5",
"timestamp": "",
"source": "github",
"line_count": 4,
"max_line_length": 75,
"avg_line_length": 37,
"alnum_prop": 0.6959459459459459,
"repo_name": "antoinecarme/pyaf",
"id": "69503bcdd4ac1b8a1e483e217abecf1c3a2f001e",
"size": "148",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/model_control/detailed/transf_None/model_control_one_enabled_None_PolyTrend_BestCycle_NoAR.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Makefile",
"bytes": "6773299"
},
{
"name": "Procfile",
"bytes": "24"
},
{
"name": "Python",
"bytes": "54209093"
},
{
"name": "R",
"bytes": "807"
},
{
"name": "Shell",
"bytes": "3619"
}
],
"symlink_target": ""
}
|
class easyconfig(objekt)
def __init__(self, filepath, seperator=":", encode = "utf-8"):
self._encode = encode
self.__filepath = filepath
self.__seperator = seperator
def __del__(self):
def newconf(self, key, value):
with open(self.filepath, "a", encoding = self.encode) as file:
file.write("{}{}{}\n".format(str(key), str(self.seperator),
str(value)))
def readconf(self,key):
with open(self.filepath,"r", encoding = self.encode) as file:
for line in file:
line = line.split(self.seperator)
if line[1] == key and line[1][1] != self.delchar:
return line[2]
break
def writeconf(self, key, value):
with open(self.filepath, "wr", encoding = self.encode) as file:
for line in file:
last_pos = file.tell()
line = line.split(self.seperator)
if line[1] == key:
break
file.seek(last_pos)
file.write("{}{}{}\n".format(str(key), str(self.seperator),
str(value)))
return True
def delconf(self, key):
with open(self.filepath, "wr", encoding = self.encode) as file:
for line in file:
last_pos = file.tell()
line = line.split(self.seperator)
if line[1] == key:
break
file.seek(last_pos)
file.write("{}{}".format(str(key), str())+(" "*line[2].length)+"\n")
return True
|
{
"content_hash": "d20e7f61822b2dc63a886e48df854e71",
"timestamp": "",
"source": "github",
"line_count": 44,
"max_line_length": 80,
"avg_line_length": 36.68181818181818,
"alnum_prop": 0.49008674101610905,
"repo_name": "nam3less/Snippets",
"id": "189b4c42ac0279a5360f204f247b0edf6add63d2",
"size": "1614",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "EasyConfig/EasyConfig.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C++",
"bytes": "4614"
},
{
"name": "Python",
"bytes": "1614"
}
],
"symlink_target": ""
}
|
__author__ = 'jeffrey.starr@ztoztechnologies.com'
from infer import infer
|
{
"content_hash": "dd884e2312239b91b0a58013a9acc224",
"timestamp": "",
"source": "github",
"line_count": 3,
"max_line_length": 49,
"avg_line_length": 25,
"alnum_prop": 0.76,
"repo_name": "jeffreystarr/dateinfer",
"id": "50b674e594df923c3344edb1e36d2f53cd031ad4",
"size": "75",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "dateinfer/__init__.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "27713"
}
],
"symlink_target": ""
}
|
from actions import *
|
{
"content_hash": "84ab28d4fdf16aeb96e49e67f819f4e8",
"timestamp": "",
"source": "github",
"line_count": 1,
"max_line_length": 21,
"avg_line_length": 21,
"alnum_prop": 0.8095238095238095,
"repo_name": "python-acoustics/Sea",
"id": "06b967f676d7d412ac146f1e9958c5546d02d673",
"size": "23",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "gui/addItem/__init__.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "220852"
},
{
"name": "Shell",
"bytes": "5106"
}
],
"symlink_target": ""
}
|
"""HTTP proxy for opening RPC connection to dashd.
AuthServiceProxy has the following improvements over python-jsonrpc's
ServiceProxy class:
- HTTP connections persist for the life of the AuthServiceProxy object
(if server supports HTTP/1.1)
- sends protocol 'version', per JSON-RPC 1.1
- sends proper, incrementing 'id'
- sends Basic HTTP authentication headers
- parses all JSON numbers that look like floats as Decimal
- uses standard Python json lib
"""
import base64
import decimal
from http import HTTPStatus
import http.client
import json
import logging
import os
import socket
import time
import urllib.parse
HTTP_TIMEOUT = 30
USER_AGENT = "AuthServiceProxy/0.1"
log = logging.getLogger("BitcoinRPC")
class JSONRPCException(Exception):
def __init__(self, rpc_error, http_status=None):
try:
errmsg = '%(message)s (%(code)i)' % rpc_error
except (KeyError, TypeError):
errmsg = ''
super().__init__(errmsg)
self.error = rpc_error
self.http_status = http_status
def EncodeDecimal(o):
if isinstance(o, decimal.Decimal):
return str(o)
raise TypeError(repr(o) + " is not JSON serializable")
class AuthServiceProxy():
__id_count = 0
# ensure_ascii: escape unicode as \uXXXX, passed to json.dumps
def __init__(self, service_url, service_name=None, timeout=HTTP_TIMEOUT, connection=None, ensure_ascii=True):
self.__service_url = service_url
self._service_name = service_name
self.ensure_ascii = ensure_ascii # can be toggled on the fly by tests
self.__url = urllib.parse.urlparse(service_url)
user = None if self.__url.username is None else self.__url.username.encode('utf8')
passwd = None if self.__url.password is None else self.__url.password.encode('utf8')
authpair = user + b':' + passwd
self.__auth_header = b'Basic ' + base64.b64encode(authpair)
self.timeout = timeout
self._set_conn(connection)
def __getattr__(self, name):
if name.startswith('__') and name.endswith('__'):
# Python internal stuff
raise AttributeError
if self._service_name is not None:
name = "%s.%s" % (self._service_name, name)
return AuthServiceProxy(self.__service_url, name, connection=self.__conn)
def _request(self, method, path, postdata):
'''
Do a HTTP request, with retry if we get disconnected (e.g. due to a timeout).
This is a workaround for https://bugs.python.org/issue3566 which is fixed in Python 3.5.
'''
headers = {'Host': self.__url.hostname,
'User-Agent': USER_AGENT,
'Authorization': self.__auth_header,
'Content-type': 'application/json'}
if os.name == 'nt':
# Windows somehow does not like to re-use connections
# TODO: Find out why the connection would disconnect occasionally and make it reusable on Windows
# Avoid "ConnectionAbortedError: [WinError 10053] An established connection was aborted by the software in your host machine"
self._set_conn()
try:
self.__conn.request(method, path, postdata, headers)
return self._get_response()
except (BrokenPipeError, ConnectionResetError):
# Python 3.5+ raises BrokenPipeError when the connection was reset
# ConnectionResetError happens on FreeBSD
self.__conn.close()
self.__conn.request(method, path, postdata, headers)
return self._get_response()
except OSError as e:
retry = (
'[WinError 10053] An established connection was aborted by the software in your host machine' in str(e))
if retry:
self.__conn.close()
self.__conn.request(method, path, postdata, headers)
return self._get_response()
else:
raise
def get_request(self, *args, **argsn):
AuthServiceProxy.__id_count += 1
log.debug("-{}-> {} {}".format(
AuthServiceProxy.__id_count,
self._service_name,
json.dumps(args or argsn, default=EncodeDecimal, ensure_ascii=self.ensure_ascii),
))
if args and argsn:
raise ValueError('Cannot handle both named and positional arguments')
return {'version': '1.1',
'method': self._service_name,
'params': args or argsn,
'id': AuthServiceProxy.__id_count}
def __call__(self, *args, **argsn):
postdata = json.dumps(self.get_request(*args, **argsn), default=EncodeDecimal, ensure_ascii=self.ensure_ascii)
response, status = self._request('POST', self.__url.path, postdata.encode('utf-8'))
if response['error'] is not None:
raise JSONRPCException(response['error'], status)
elif 'result' not in response:
raise JSONRPCException({
'code': -343, 'message': 'missing JSON-RPC result'}, status)
elif status != HTTPStatus.OK:
raise JSONRPCException({
'code': -342, 'message': 'non-200 HTTP status code but no JSON-RPC error'}, status)
else:
return response['result']
def batch(self, rpc_call_list):
postdata = json.dumps(list(rpc_call_list), default=EncodeDecimal, ensure_ascii=self.ensure_ascii)
log.debug("--> " + postdata)
response, status = self._request('POST', self.__url.path, postdata.encode('utf-8'))
if status != HTTPStatus.OK:
raise JSONRPCException({
'code': -342, 'message': 'non-200 HTTP status code but no JSON-RPC error'}, status)
return response
def _get_response(self):
req_start_time = time.time()
try:
http_response = self.__conn.getresponse()
except socket.timeout:
raise JSONRPCException({
'code': -344,
'message': '%r RPC took longer than %f seconds. Consider '
'using larger timeout for calls that take '
'longer to return.' % (self._service_name,
self.__conn.timeout)})
if http_response is None:
raise JSONRPCException({
'code': -342, 'message': 'missing HTTP response from server'})
content_type = http_response.getheader('Content-Type')
if content_type != 'application/json':
raise JSONRPCException(
{'code': -342, 'message': 'non-JSON HTTP response with \'%i %s\' from server' % (http_response.status, http_response.reason)},
http_response.status)
responsedata = http_response.read().decode('utf8')
response = json.loads(responsedata, parse_float=decimal.Decimal)
elapsed = time.time() - req_start_time
if "error" in response and response["error"] is None:
log.debug("<-%s- [%.6f] %s" % (response["id"], elapsed, json.dumps(response["result"], default=EncodeDecimal, ensure_ascii=self.ensure_ascii)))
else:
log.debug("<-- [%.6f] %s" % (elapsed, responsedata))
return response, http_response.status
def __truediv__(self, relative_uri):
return AuthServiceProxy("{}/{}".format(self.__service_url, relative_uri), self._service_name, connection=self.__conn)
def _set_conn(self, connection=None):
port = 80 if self.__url.port is None else self.__url.port
if connection:
self.__conn = connection
self.timeout = connection.timeout
elif self.__url.scheme == 'https':
self.__conn = http.client.HTTPSConnection(self.__url.hostname, port, timeout=self.timeout)
else:
self.__conn = http.client.HTTPConnection(self.__url.hostname, port, timeout=self.timeout)
|
{
"content_hash": "27b6e000d48815b0d2636d1106bcc3df",
"timestamp": "",
"source": "github",
"line_count": 183,
"max_line_length": 155,
"avg_line_length": 43.4207650273224,
"alnum_prop": 0.6042033727661716,
"repo_name": "dashpay/dash",
"id": "3252780e1484f2171be3d196ca5bc136656dbb6e",
"size": "8838",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "test/functional/test_framework/authproxy.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Assembly",
"bytes": "28178"
},
{
"name": "C",
"bytes": "1866352"
},
{
"name": "C++",
"bytes": "9729795"
},
{
"name": "CMake",
"bytes": "32255"
},
{
"name": "CSS",
"bytes": "113028"
},
{
"name": "Dockerfile",
"bytes": "6344"
},
{
"name": "GDB",
"bytes": "444"
},
{
"name": "HTML",
"bytes": "21833"
},
{
"name": "M4",
"bytes": "235904"
},
{
"name": "Makefile",
"bytes": "128711"
},
{
"name": "Objective-C++",
"bytes": "5478"
},
{
"name": "Python",
"bytes": "1899906"
},
{
"name": "QMake",
"bytes": "1389"
},
{
"name": "Sage",
"bytes": "39795"
},
{
"name": "Shell",
"bytes": "134642"
}
],
"symlink_target": ""
}
|
__version__=''' $Id$ '''
"""Tests performed on all Python source files of the ReportLab distribution.
"""
from reportlab.lib.testutils import setOutDir,makeSuiteForClasses, SecureTestCase, GlobDirectoryWalker, outputfile, printLocation
setOutDir(__name__)
import os, sys, string, fnmatch, re
import unittest
from reportlab.lib.utils import open_and_read, open_and_readlines
# Helper function and class.
def unique(seq):
"Remove elements from a list that occur more than once."
# Return input if it has less than 2 elements.
if len(seq) < 2:
return seq
# Make a sorted copy of the input sequence.
cnvt = isinstance(seq,basestring)
seq2 = seq[:]
if cnvt: seq2 = list(seq2)
seq2.sort()
# Remove adjacent elements if they are identical.
i = 0
while i < len(seq2)-1:
elem = seq2[i]
try:
while elem == seq2[i+1]:
del seq2[i+1]
except IndexError:
pass
i += 1
# Try to return something of the same type as the input.
if cnvt:
return seq[0:0].join(seq2)
else:
return seq2
class SelfTestCase(unittest.TestCase):
"Test unique() function."
def testUnique(self):
"Test unique() function."
cases = [([], []),
([0], [0]),
([0, 1, 2], [0, 1, 2]),
([2, 1, 0], [0, 1, 2]),
([0, 0, 1, 1, 2, 2, 3, 3], [0, 1, 2, 3]),
('abcabcabc', 'abc')
]
msg = "Failed: unique(%s) returns %s instead of %s."
for sequence, expectedOutput in cases:
output = unique(sequence)
args = (sequence, output, expectedOutput)
assert output == expectedOutput, msg % args
class AsciiFileTestCase(unittest.TestCase):
"Test if Python files are pure ASCII ones."
def testAscii(self):
"Test if Python files are pure ASCII ones."
from reportlab.lib.testutils import RL_HOME
allPyFiles = GlobDirectoryWalker(RL_HOME, '*.py')
for path in allPyFiles:
fileContent = open_and_read(path,'r')
nonAscii = filter(lambda c: ord(c)>127, fileContent)
nonAscii = unique(nonAscii)
truncPath = path[string.find(path, 'reportlab'):]
args = (truncPath, repr(map(ord, nonAscii)))
msg = "File %s contains characters: %s." % args
## if nonAscii:
## print msg
assert nonAscii == '', msg
class FilenameTestCase(unittest.TestCase):
"Test if Python files contain trailing digits."
def testTrailingDigits(self):
"Test if Python files contain trailing digits."
from reportlab.lib.testutils import RL_HOME
allPyFiles = GlobDirectoryWalker(RL_HOME, '*.py')
for path in allPyFiles:
#hack - exclude barcode extensions from this test
if string.find(path, 'barcode'):
pass
else:
basename = os.path.splitext(path)[0]
truncPath = path[string.find(path, 'reportlab'):]
msg = "Filename %s contains trailing digits." % truncPath
assert basename[-1] not in string.digits, msg
## if basename[-1] in string.digits:
## print truncPath
class FirstLineTestCase(SecureTestCase):
"Testing if objects in the ReportLab package have docstrings."
def findSuspiciousModules(self, folder, rootName):
"Get all modul paths with non-Unix-like first line."
firstLinePat = re.compile('^#!.*python.*')
paths = []
for file in GlobDirectoryWalker(folder, '*.py'):
if os.path.basename(file) == '__init__.py':
continue
firstLine = open_and_readlines(file)[0]
if not firstLinePat.match(firstLine):
paths.append(file)
return paths
def test1(self):
"Test if all Python files have a Unix-like first line."
path = outputfile("test_firstline.log")
file = open(path, 'w')
file.write('No Unix-like first line found in the files below.\n\n')
from reportlab.lib.testutils import RL_HOME
paths = self.findSuspiciousModules(RL_HOME, 'reportlab')
paths.sort()
for p in paths:
file.write("%s\n" % p)
file.close()
def makeSuite():
suite = makeSuiteForClasses(SelfTestCase, AsciiFileTestCase, FilenameTestCase)
if sys.platform[:4] != 'java':
loader = unittest.TestLoader()
suite.addTest(loader.loadTestsFromTestCase(FirstLineTestCase))
return suite
#noruntests
if __name__ == "__main__":
unittest.TextTestRunner().run(makeSuite())
printLocation()
|
{
"content_hash": "1a686d183117bee32a93819c52024945",
"timestamp": "",
"source": "github",
"line_count": 150,
"max_line_length": 129,
"avg_line_length": 31.733333333333334,
"alnum_prop": 0.5869747899159664,
"repo_name": "MatthewWilkes/reportlab",
"id": "d3fc904642e99eb0c5e0a1a1c8739606c62175e0",
"size": "4840",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/test_pyfiles.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "740949"
},
{
"name": "C++",
"bytes": "14870"
},
{
"name": "CSS",
"bytes": "16419"
},
{
"name": "Java",
"bytes": "6333"
},
{
"name": "Python",
"bytes": "3186241"
},
{
"name": "Shell",
"bytes": "4864"
}
],
"symlink_target": ""
}
|
import unittest
from knowru_markdown import markdown_to_html
class KnowruMarkdownUnitTestTestCase(unittest.TestCase):
def test_paragraph(self):
result = markdown_to_html('this is a paragraph.')
self.assertEqual(result, '<p>this is a paragraph.</p>')
def test_header(self):
result = markdown_to_html('# this is a header')
self.assertEqual(result, '<h1>this is a header</h1>')
def test_figure_without_title_and_caption(self):
result = markdown_to_html('')
self.assertEqual(result, """<figure>
<img alt="Alt text" src="/path/to/img.jpg" title="" />
<figcaption></figcaption>
</figure>""")
def test_figure_with_title_but_without_caption(self):
result = markdown_to_html('')
self.assertEqual(result, u"""<figure>
<img alt="Alt text" src="/path/to/img.jpg" title="Optional title" />
<figcaption></figcaption>
</figure>""")
def test_figure_with_title_and_caption(self):
result = markdown_to_html(')')
print result
self.assertEqual(result, u"""<figure>
<img alt="Alt text" src="/path/to/img.jpg" title="Optional title" />
<figcaption>Optional figcaption</figcaption>
</figure>""")
def test_blockquote_with_footer_and_cite_content_and_title(self):
result = markdown_to_html('> some content("some footer" ["cite content" "cite title"])')
self.assertEqual(result, u"""<blockquote>
<p>some content</p>
<footer>some footer in <cite title="cite title">cite content</cite></footer>
</blockquote>""")
def test_code_block(self):
result = markdown_to_html("""``` shell
$ docker build -t knowru/plumber_example https://github.com/Knowru/plumber_example.git
```
``` shell
$ docker run -p 8000:8000 -d knowru/plumber_example
```""")
self.assertEqual(result, u"""<pre class="brush: shell">
$ docker build -t knowru/plumber_example https://github.com/Knowru/plumber_example.git
</pre>
<pre class="brush: shell">
$ docker run -p 8000:8000 -d knowru/plumber_example
</pre>""")
def test_anchor_has_target_blank_and_is_alone(self):
result = markdown_to_html('[content](http://url "title")')
self.assertEqual(result, '<p><a href="http://url" title="title" target="_blank">content</a></p>')
def test_anchor_has_target_blank_and_is_not_alone(self):
result = markdown_to_html('In [our last post (How to create a RESTful API for a machine learning credit model in R)](https://www.knowru.com/blog/how-create-restful-api-for-machine-learning-credit-model-in-r/ "How to create a RESTful API for a machine learning credit model in R")')
self.assertEqual(result, '<p>In <a href="https://www.knowru.com/blog/how-create-restful-api-for-machine-learning-credit-model-in-r/" title="How to create a RESTful API for a machine learning credit model in R" target="_blank">our last post (How to create a RESTful API for a machine learning credit model in R)</a></p>')
def test_knowru_markdown_recursion(self):
result
if __name__ == '__main__':
unittest.main()
|
{
"content_hash": "3561930480a0cd01152716498d674437",
"timestamp": "",
"source": "github",
"line_count": 69,
"max_line_length": 328,
"avg_line_length": 46.52173913043478,
"alnum_prop": 0.6669781931464175,
"repo_name": "jeffjaehoyang/knowru_markdown",
"id": "2c14def96b885d6399eba9d96b14429874c788d9",
"size": "3210",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "unit_tests.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "6655"
}
],
"symlink_target": ""
}
|
from keras.models import Sequential, load_model
from keras.layers.core import Dense, Reshape, Flatten, Dropout
from keras.layers.convolutional import Conv1D, Conv2D
from keras.layers.normalization import BatchNormalization
from keras.activations import softmax
from keras.losses import binary_crossentropy
from keras.optimizers import RMSprop, Adam, SGD
from keras.initializers import Orthogonal
import keras.backend as K
import numpy as np
import pandas as pd
def load_relations(relations_path):
relations = pd.read_hdf(relations_path).values
return relations
def load_labels(labels_path):
labels = pd.read_hdf(labels_path).values
return labels
# Metrics for each individual class
def make_metrics(class_id, one_hot):
def precision(y_true, y_pred):
if not one_hot:
return K.mean(K.equal(y_true[:, class_id], K.round(y_pred)[:, class_id]))
else:
return K.mean(K.equal(K.round(y_true)[:, class_id], K.cast(K.equal(K.argmax(y_pred, 1), class_id), "float32")))
def tp(y_true, y_pred):
if not one_hot:
true_positives = K.sum(y_true[:, class_id] * y_pred[:, class_id])
else:
true_positives = K.sum(y_true[:, class_id] * K.cast(K.equal(K.argmax(y_pred, 1), class_id), "float32"))
return true_positives
def pp(y_true, y_pred):
possible_positives = K.sum(y_true[:, class_id])
return possible_positives
def recall(y_true, y_pred):
return tp(y_true, y_pred) / (pp(y_true, y_pred) + K.epsilon())
def fscore(y_true, y_pred, beta=1):
prec = precision(y_true, y_pred)
rec = recall(y_true, y_pred)
beta_sq = beta ** 2
fbeta_score = (1 + beta_sq) * (prec * rec) / \
(beta_sq * prec + rec + K.epsilon())
return fbeta_score
def binary_entropy(y_true, y_pred):
return binary_crossentropy(y_true[:, class_id], y_pred[:, class_id])
return precision, recall, fscore, binary_entropy
class RelationClassifier:
def __init__(self):
self.model = None
def new(self, input_dim, relation_count, one_hot=False, filters=32, max_filters=128,
subtract_embeddings=False, dropout=False, learn_rate=0.001, optimizer="rmsprop",
kernel_size=3, lr_decay=0):
"""Creates a new model expecting an input vector of shape `(-, input_dim)`
making predictions for `relation_count` classes"""
self.model = RelationClassifier._get_model(input_dim, relation_count, one_hot,
filters, max_filters, subtract_embeddings,
dropout, learn_rate, optimizer, kernel_size,
lr_decay)
def save(self, path):
"""Saves the model to `path`"""
self.model.compile(optimizer=RMSprop(), loss="binary_crossentropy",
metrics=["binary_accuracy"])
self.model.save(path)
def load(self, path):
"""Loads the model from `path`"""
self.model = load_model(path)
# Check if the output is one-hot by looking if the activation of
# the last layer is softmax
one_hot = self.model.layers[-1].activation == softmax
all_metrics = ["categorical_accuracy" if one_hot else "binary_accuracy"]
for class_id in range(self.model.layers[-1].output_shape[1]):
all_metrics += make_metrics(class_id, one_hot)
self.model.compile(optimizer=RMSprop(), loss="binary_crossentropy",
metrics=all_metrics)
def train(self, relations, labels, batch_size=256, validation_split=0.1, epochs=10,
val_data=None, verbose=1):
"""Trains the model given `relations` of shape `(-, input_dim)` and
`labels` of shape `(-, relation_count)`"""
if self.model is None:
raise ValueError(
"A model must be created (.new) or loaded (.load) first.")
self.model.fit(relations, labels, batch_size=batch_size,
validation_split=validation_split if val_data is None else 0,
epochs=epochs, shuffle=True, validation_data=val_data,
verbose=verbose)
def replace_last_layer(self, output_dim, one_hot):
"""Replaces the last layer of the model and adds a new output layer with size `output_dim`"""
if self.model is None:
raise ValueError(
"A model must be created (.new) or loaded (.load) first.")
# Remove last layer
self.model.layers.pop()
self.model.outputs = [self.model.layers[-1].output]
self.model.layers[-1].outbound_nodes = []
# Make layers untrainable
for layer in self.model.layers:
layer.trainable = False
# Add new output layer
self.model.add(Dense(output_dim, activation="softmax" if one_hot else "sigmoid",
kernel_initializer="orthogonal"))
# Restore metrics
all_metrics = ["categorical_accuracy" if one_hot else "binary_accuracy"]
for class_id in range(self.model.layers[-1].output_shape[1]):
all_metrics += make_metrics(class_id, one_hot)
# Compile model
self.model.compile(optimizer=RMSprop(), loss="binary_crossentropy",
metrics=all_metrics)
def predict(self, relations):
"""Returns the class predictions for `relations`"""
if self.model is None:
raise ValueError(
"A model must be created (.new) or loaded (.load) first.")
return self.model.predict(relations)
@staticmethod
def _get_model(input_dim, output_dim, one_hot, filters=32, max_filters=128, subtract_embeddings=False, dropout=False,
learn_rate=0.001, optimizer="rmsprop", kernel_size=3, lr_decay=0):
if optimizer == "rmsprop":
optimizer = RMSprop(lr=learn_rate, decay=lr_decay)
elif optimizer == "adam":
optimizer = Adam(lr=learn_rate, decay=lr_decay)
elif optimizer == "sgd":
optimizer = SGD(lr=learn_rate, momentum=0.9, decay=lr_decay)
else:
raise ValueError("Invalid argument optimizer")
initializer = Orthogonal(np.sqrt(2))
model = Sequential()
dropout = dropout if isinstance(dropout, float) else (0.5 if dropout else 0)
if not subtract_embeddings:
# Reshape to 2D, do 2D conv and reshape to 1D
model.add(Reshape(input_shape=(input_dim,),
target_shape=(2, input_dim // 2, 1)))
model.add(Conv2D(filters=filters, kernel_size=[2, 7], strides=[1, 2],
padding="VALID", activation="relu", kernel_initializer=initializer))
model.add(BatchNormalization())
else:
# Reshape to add single channel
model.add(Reshape(input_shape=(input_dim,),
target_shape=(input_dim, 1)))
model.add(Conv1D(filters=filters, kernel_size=7, strides=2,
padding="VALID", activation="relu", kernel_initializer=initializer))
model.add(BatchNormalization())
model.add(Reshape((-1, filters)))
if dropout:
model.add(Dropout(dropout))
# Half conv with kernel size 3 until not greater than 3
while model.layers[-1].output_shape[1] > 3:
filters = min(filters * 2, max_filters)
model.add(Conv1D(filters=filters, kernel_size=kernel_size, strides=2,
padding="SAME", activation="relu", kernel_initializer=initializer))
model.add(BatchNormalization())
if dropout:
model.add(Dropout(dropout))
# Conv valid so output is 1 if necessary
if model.layers[-1].output_shape[1] != 1:
filters = min(filters * 2, max_filters)
model.add(Conv1D(filters=filters, kernel_size=(model.layers[-1].output_shape[1],),
padding="VALID", activation="relu", kernel_initializer=initializer))
model.add(BatchNormalization())
if dropout:
model.add(Dropout(dropout))
# Dense sigmoid output
model.add(Flatten())
#model.add(Dropout(0.5))
model.add(Dense(output_dim, activation="softmax" if one_hot else "sigmoid",
kernel_initializer="orthogonal"))
print(model.summary())
all_metrics = ["categorical_accuracy" if one_hot else "binary_accuracy"]
for class_id in range(output_dim):
all_metrics += make_metrics(class_id, one_hot)
model.compile(optimizer=optimizer, loss="categorical_crossentropy" if one_hot else "binary_crossentropy",
metrics=all_metrics)
return model
|
{
"content_hash": "195ffffcaa5095de3e362e496e2ff9d8",
"timestamp": "",
"source": "github",
"line_count": 218,
"max_line_length": 123,
"avg_line_length": 41.03211009174312,
"alnum_prop": 0.5936277249860257,
"repo_name": "waelkht/Onto.KOM",
"id": "926499dac1a1dfd2f114c87958a12f00a7a20531",
"size": "8945",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "ontokom/classification.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "51613"
}
],
"symlink_target": ""
}
|
import syslog
from commands import getoutput
from pUtil import tolog
def sysLog(message):
""" Write message to syslog """
status = False
try:
if message != "":
syslog.syslog(syslog.LOG_ERR, message)
else:
tolog("!!WARNING!!4444!! Will not write empty message to syslog")
except Exception, e:
tolog("!!WARNING!!4444!! Failed to write to syslog: %s" % (e))
else:
status = True
return status
def getSysLogTail():
""" Return the tail of the syslog """
out = ""
path = "/var/log/messages"
cmd = "tail %s" % (path)
tolog("Executing command: %s" % (cmd))
try:
out = getoutput(cmd)
except Exception, e:
tolog("!!WARNING!!4444!! Could not read path %s, %s" % (path, e))
return out
def dumpSysLogTail():
""" Dump the syslog tail to the pilot log """
tolog("syslog tail:\n%s" % (getSysLogTail()))
if __name__ == "__main__":
message = "hi"
if sysLog(message):
tolog("ok")
out = getSysLogTail()
if out != "":
print out
else:
print "NOT ok"
else:
tolog("not ok")
dumpSysLogTail()
|
{
"content_hash": "52d27cbea2a307037004281481cadb89",
"timestamp": "",
"source": "github",
"line_count": 53,
"max_line_length": 77,
"avg_line_length": 22.58490566037736,
"alnum_prop": 0.543859649122807,
"repo_name": "PanDAWMS/pilot",
"id": "17daca92553d4d30e87671f84cc9329892e1e7fc",
"size": "1197",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "SysLog.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "4990965"
},
{
"name": "Shell",
"bytes": "23530"
}
],
"symlink_target": ""
}
|
"""
The :class:`LocalResponseNormalization2DLayer
<lasagne.layers.LocalResponseNormalization2DLayer>` implementation contains
code from `pylearn2 <http://github.com/lisa-lab/pylearn2>`_, which is covered
by the following license:
Copyright (c) 2011--2014, Université de Montréal
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
1. Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
3. Neither the name of the copyright holder nor the names of its contributors
may be used to endorse or promote products derived from this software
without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""
import theano
import theano.tensor as T
from lasagne import init
from lasagne import nonlinearities
from lasagne.layers import Layer
__all__ = [
"LocalResponseNormalization2DLayer",
"BatchNormLayer",
"batch_norm",
]
class LocalResponseNormalization2DLayer(Layer):
"""
Cross-channel Local Response Normalization for 2D feature maps.
Aggregation is purely across channels, not within channels,
and performed "pixelwise".
Input order is assumed to be `BC01`.
If the value of the ith channel is :math:`x_i`, the output is
.. math::
x_i = \frac{x_i}{ (k + ( \alpha \sum_j x_j^2 ))^\beta }
where the summation is performed over this position on :math:`n`
neighboring channels.
This code is adapted from pylearn2. See the module docstring for license
information.
"""
def __init__(self, incoming, alpha=1e-4, k=2, beta=0.75, n=5, **kwargs):
"""
:parameters:
- incoming: input layer or shape
- alpha: see equation above
- k: see equation above
- beta: see equation above
- n: number of adjacent channels to normalize over.
"""
super(LocalResponseNormalization2DLayer, self).__init__(incoming,
**kwargs)
self.alpha = alpha
self.k = k
self.beta = beta
self.n = n
if n % 2 == 0:
raise NotImplementedError("Only works with odd n")
def get_output_shape_for(self, input_shape):
return input_shape
def get_output_for(self, input, **kwargs):
input_shape = self.input_shape
if any(s is None for s in input_shape):
input_shape = input.shape
half_n = self.n // 2
input_sqr = T.sqr(input)
b, ch, r, c = input_shape
extra_channels = T.alloc(0., b, ch + 2*half_n, r, c)
input_sqr = T.set_subtensor(extra_channels[:, half_n:half_n+ch, :, :],
input_sqr)
scale = self.k
for i in range(self.n):
scale += self.alpha * input_sqr[:, i:i+ch, :, :]
scale = scale ** self.beta
return input / scale
class BatchNormLayer(Layer):
"""
lasagne.layers.BatchNormLayer(incoming, axes='auto', epsilon=1e-4,
alpha=0.1, mode='low_mem',
beta=lasagne.init.Constant(0), gamma=lasagne.init.Constant(1),
mean=lasagne.init.Constant(0), var=lasagne.init.Constant(1), **kwargs)
Batch Normalization
This layer implements batch normalization of its inputs, following [1]_:
.. math::
y = \\frac{x - \\mu}{\\sqrt{\\sigma^2 + \\epsilon}} \\gamma + \\beta
That is, the input is normalized to zero mean and unit variance, and then
linearly transformed. The crucial part is that the mean and variance are
computed across the batch dimension, i.e., over examples, not per example.
During training, :math:`\\mu` and :math:`\\sigma^2` are defined to be the
mean and variance of the current input mini-batch :math:`x`, and during
testing, they are replaced with average statistics over the training
data. Consequently, this layer has four stored parameters: :math:`\\beta`,
:math:`\\gamma`, and the averages :math:`\\mu` and :math:`\\sigma^2`.
By default, this layer learns the average statistics as exponential moving
averages computed during training, so it can be plugged into an existing
network without any changes of the training procedure (see Notes).
Parameters
----------
incoming : a :class:`Layer` instance or a tuple
The layer feeding into this layer, or the expected input shape
axes : 'auto', int or tuple of int
The axis or axes to normalize over. If ``'auto'`` (the default),
normalize over all axes except for the second: this will normalize over
the minibatch dimension for dense layers, and additionally over all
spatial dimensions for convolutional layers.
epsilon : scalar
Small constant :math:`\\epsilon` added to the variance before taking
the square root and dividing by it, to avoid numerical problems
alpha : scalar
Coefficient for the exponential moving average of batch-wise means and
standard deviations computed during training; the closer to one, the
more it will depend on the last batches seen
mode : {'low_mem', 'high_mem'}
Specify which batch normalization implementation to use: ``'low_mem'``
avoids storing intermediate representations and thus requires less
memory, while ``'high_mem'`` can reuse representations for the backward
pass and is thus 5-10% faster.
beta : Theano shared variable, expression, numpy array, callable or None
Initial value, expression or initializer for :math:`\\beta`. Must match
the incoming shape, skipping all axes in `axes`. Set to ``None`` to fix
it to 0.0 instead of learning it.
See :func:`lasagne.utils.create_param` for more information.
gamma : Theano shared variable, expression, numpy array, callable or None
Initial value, expression or initializer for :math:`\\gamma`. Must
match the incoming shape, skipping all axes in `axes`. Set to ``None``
to fix it to 1.0 instead of learning it.
See :func:`lasagne.utils.create_param` for more information.
mean : Theano shared variable, expression, numpy array, or callable
Initial value, expression or initializer for :math:`\\mu`. Must match
the incoming shape, skipping all axes in `axes`.
See :func:`lasagne.utils.create_param` for more information.
var : Theano shared variable, expression, numpy array, or callable
Initial value, expression or initializer for :math:`\\sigma^2`. Must
match the incoming shape, skipping all axes in `axes`.
See :func:`lasagne.utils.create_param` for more information.
**kwargs
Any additional keyword arguments are passed to the :class:`Layer`
superclass.
Notes
-----
This layer should be inserted between a linear transformation (such as a
:class:`DenseLayer`, or :class:`Conv2DLayer`) and its nonlinearity. The
convenience function :func:`batch_norm` modifies an existing layer to
insert batch normalization in front of its nonlinearity.
The behavior can be controlled by passing keyword arguments to
:func:`lasagne.layers.get_output()` when building the output expression
of any network containing this layer.
During training, [1]_ normalize each input mini-batch by its statistics
and update an exponential moving average of the statistics to be used for
validation. This can be achieved by passing ``deterministic=False``.
For validation, [1]_ normalize each input mini-batch by the stored
statistics. This can be achieved by passing ``deterministic=True``.
For more fine-grained control, ``batch_norm_update_averages`` can be passed
to update the exponential moving averages (``True``) or not (``False``),
and ``batch_norm_use_averages`` can be passed to use the exponential moving
averages for normalization (``True``) or normalize each mini-batch by its
own statistics (``False``). These settings override ``deterministic``.
Note that for testing a model after training, [1]_ replace the stored
exponential moving average statistics by fixing all network weights and
re-computing average statistics over the training data in a layerwise
fashion. This is not part of the layer implementation.
In case you set `axes` to not include the batch dimension (the first axis,
usually), normalization is done per example, not across examples. This does
not require any averages, so you can pass ``batch_norm_update_averages``
and ``batch_norm_use_averages`` as ``False`` in this case.
See also
--------
batch_norm : Convenience function to apply batch normalization to a layer
References
----------
.. [1] Ioffe, Sergey and Szegedy, Christian (2015):
Batch Normalization: Accelerating Deep Network Training by Reducing
Internal Covariate Shift. http://arxiv.org/abs/1502.03167.
"""
def __init__(self, incoming, axes='auto', epsilon=1e-4, alpha=0.1,
mode='low_mem', beta=init.Constant(0), gamma=init.Constant(1),
mean=init.Constant(0), var=init.Constant(1), **kwargs):
super(BatchNormLayer, self).__init__(incoming, **kwargs)
if axes == 'auto':
# default: normalize over all but the second axis
axes = (0,) + tuple(range(2, len(self.input_shape)))
elif isinstance(axes, int):
axes = (axes,)
self.axes = axes
self.epsilon = epsilon
self.alpha = alpha
self.mode = mode
# create parameters, ignoring all dimensions in axes
shape = [size for axis, size in enumerate(self.input_shape)
if axis not in self.axes]
if any(size is None for size in shape):
raise ValueError("BatchNormLayer needs specified input sizes for "
"all axes not normalized over.")
if beta is None:
self.beta = None
else:
self.beta = self.add_param(beta, shape, 'beta',
trainable=True, regularizable=False)
if gamma is None:
self.gamma = None
else:
self.gamma = self.add_param(gamma, shape, 'gamma',
trainable=True, regularizable=True)
self.mean = self.add_param(mean, shape, 'mean',
trainable=False, regularizable=False)
self.var = self.add_param(var, shape, 'var',
trainable=False, regularizable=False)
def get_output_for(self, input, deterministic=False, **kwargs):
input_mean = input.mean(self.axes)
input_var = input.var(self.axes)
# Decide whether to use the stored averages or mini-batch statistics
use_averages = kwargs.get('batch_norm_use_averages',
deterministic)
if use_averages:
mean = self.mean
var = self.var
else:
mean = input_mean
var = input_var
# Decide whether to update the stored averages
update_averages = kwargs.get('batch_norm_update_averages',
not deterministic)
if update_averages:
# Trick: To update the stored statistics, we create memory-aliased
# clones of the stored statistics:
running_mean = theano.clone(self.mean, share_inputs=False)
running_var = theano.clone(self.var, share_inputs=False)
# set a default update for them:
running_mean.default_update = ((1 - self.alpha) * running_mean +
self.alpha * input_mean)
running_var.default_update = ((1 - self.alpha) * running_var +
self.alpha * input_var)
# and make sure they end up in the graph without participating in
# the computation (this way their default_update will be collected
# and applied, but the computation will be optimized away):
mean += 0 * running_mean
var += 0 * running_var
# prepare dimshuffle pattern inserting broadcastable axes as needed
param_axes = iter(range(input.ndim - len(self.axes)))
pattern = ['x' if input_axis in self.axes
else next(param_axes)
for input_axis in range(input.ndim)]
# apply dimshuffle pattern to all parameters
beta = 0 if self.beta is None else self.beta.dimshuffle(pattern)
gamma = 1 if self.gamma is None else self.gamma.dimshuffle(pattern)
mean = mean.dimshuffle(pattern)
std = T.sqrt(var + self.epsilon).dimshuffle(pattern)
# normalize
# normalized = (input - mean) * (gamma / std) + beta
normalized = T.nnet.batch_normalization(input, gamma=gamma, beta=beta,
mean=mean, std=std,
mode=self.mode)
return normalized
def batch_norm(layer, **kwargs):
"""
Apply batch normalization to an existing layer. This is a convenience
function modifying an existing layer to include batch normalization: It
will steal the layer's nonlinearity if there is one (effectively
introducing the normalization right before the nonlinearity), remove
the layer's bias if there is one (because it would be redundant), and add
a :class:`BatchNormLayer` and :class:`NonlinearityLayer` on top.
Parameters
----------
layer : A :class:`Layer` instance
The layer to apply the normalization to; note that it will be
irreversibly modified as specified above
**kwargs
Any additional keyword arguments are passed on to the
:class:`BatchNormLayer` constructor. Especially note the `mode`
argument, which controls a memory usage to performance tradeoff.
Returns
-------
BatchNormLayer or NonlinearityLayer instance
A batch normalization layer stacked on the given modified `layer`, or
a nonlinearity layer stacked on top of both if `layer` was nonlinear.
Examples
--------
Just wrap any layer into a :func:`batch_norm` call on creating it:
>>> from lasagne.layers import InputLayer, DenseLayer, batch_norm
>>> from lasagne.nonlinearities import tanh
>>> l1 = InputLayer((64, 768))
>>> l2 = batch_norm(DenseLayer(l1, num_units=500, nonlinearity=tanh))
This introduces batch normalization right before its nonlinearity:
>>> from lasagne.layers import get_all_layers
>>> [l.__class__.__name__ for l in get_all_layers(l2)]
['InputLayer', 'DenseLayer', 'BatchNormLayer', 'NonlinearityLayer']
"""
nonlinearity = getattr(layer, 'nonlinearity', None)
if nonlinearity is not None:
layer.nonlinearity = nonlinearities.identity
if hasattr(layer, 'b'):
del layer.params[layer.b]
layer.b = None
layer = BatchNormLayer(layer, **kwargs)
if nonlinearity is not None:
from lasagne.layers import NonlinearityLayer
layer = NonlinearityLayer(layer, nonlinearity)
return layer
|
{
"content_hash": "456e52b8d6bc1bcccd75e718dcfb816a",
"timestamp": "",
"source": "github",
"line_count": 366,
"max_line_length": 79,
"avg_line_length": 44.59016393442623,
"alnum_prop": 0.6525735294117647,
"repo_name": "VCG/gp",
"id": "4477269ac3184131d1e54af87dcf5f0cd50facc3",
"size": "16347",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "gp/nets/BatchNormLayer.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "1231"
},
{
"name": "HTML",
"bytes": "59805"
},
{
"name": "JavaScript",
"bytes": "4075"
},
{
"name": "Jupyter Notebook",
"bytes": "19996593"
},
{
"name": "Matlab",
"bytes": "19322"
},
{
"name": "Python",
"bytes": "867475"
},
{
"name": "Shell",
"bytes": "117697"
},
{
"name": "TeX",
"bytes": "489980"
}
],
"symlink_target": ""
}
|
import os
from oslo_log import log as logging
from tacker.sol_refactored.common import exceptions as sol_ex
from tacker.sol_refactored.common import vnf_instance_utils as inst_utils
from tacker.sol_refactored.infra_drivers.kubernetes import kubernetes_common
from tacker.sol_refactored.infra_drivers.kubernetes import kubernetes_resource
from tacker.sol_refactored.infra_drivers.kubernetes import kubernetes_utils
LOG = logging.getLogger(__name__)
class Helm(kubernetes_common.KubernetesCommon):
def __init__(self):
pass
def instantiate(self, req, inst, grant_req, grant, vnfd):
vim_info = inst_utils.select_vim_info(inst.vimConnectionInfo)
with kubernetes_utils.AuthContextManager(vim_info) as acm:
k8s_api_client = acm.init_k8s_api_client()
helm_client = acm.init_helm_client()
self._instantiate(req, inst, grant_req, grant, vnfd,
k8s_api_client, helm_client)
def _instantiate(self, req, inst, grant_req, grant, vnfd,
k8s_api_client, helm_client):
namespace = req.additionalParams.get('namespace', 'default')
helm_chart_path = req.additionalParams['helm_chart_path']
chart_name = os.path.join(vnfd.csar_dir, helm_chart_path)
release_name = self._get_release_name(inst)
helm_value_names = req.additionalParams['helm_value_names']
vdus_num = self._get_vdus_num_from_grant_req_res_defs(
grant_req.addResources)
# Create parameters
parameters = req.additionalParams.get('helm_parameters', {})
for vdu_name, vdu_num in vdus_num.items():
replicaParam = helm_value_names.get(vdu_name, {}).get('replica')
if replicaParam:
parameters[replicaParam] = vdu_num
if helm_client.is_release_exist(release_name, namespace):
# helm upgrade. It is retry case.
revision = helm_client.upgrade(release_name, chart_name,
namespace, parameters)
else:
# helm install
revision = helm_client.install(release_name, chart_name,
namespace, parameters)
# get manifest from helm chart
k8s_resources = helm_client.get_manifest(release_name, namespace)
k8s_reses = self._create_reses_from_manifest(k8s_api_client, namespace,
k8s_resources)
vdu_reses = self._select_vdu_reses(vnfd, req.flavourId, k8s_reses)
# wait k8s resource create complete
self._wait_k8s_reses_ready(k8s_reses)
# make instantiated info
self._init_instantiated_vnf_info(inst, req.flavourId, vdu_reses,
namespace, helm_chart_path, helm_value_names, release_name,
revision)
self._update_vnfc_info(inst, k8s_api_client)
def instantiate_rollback(self, req, inst, grant_req, grant, vnfd):
vim_info = inst_utils.select_vim_info(inst.vimConnectionInfo)
with kubernetes_utils.AuthContextManager(vim_info) as acm:
k8s_api_client = acm.init_k8s_api_client()
helm_client = acm.init_helm_client()
namespace = req.additionalParams.get('namespace', 'default')
release_name = self._get_release_name(inst)
self._delete_resource(release_name, namespace,
k8s_api_client, helm_client)
def terminate(self, req, inst, grant_req, grant, vnfd):
vim_info = inst_utils.select_vim_info(inst.vimConnectionInfo)
with kubernetes_utils.AuthContextManager(vim_info) as acm:
k8s_api_client = acm.init_k8s_api_client()
helm_client = acm.init_helm_client()
namespace = inst.instantiatedVnfInfo.metadata['namespace']
release_name = inst.instantiatedVnfInfo.metadata['release_name']
self._delete_resource(release_name, namespace,
k8s_api_client, helm_client)
def _delete_resource(self, release_name, namespace, k8s_api_client,
helm_client):
if not helm_client.is_release_exist(release_name, namespace):
LOG.info(f'HELM release {release_name} is not exist.')
return
# get k8s manifest from helm chart
k8s_resources = helm_client.get_manifest(release_name, namespace)
k8s_reses = self._create_reses_from_manifest(k8s_api_client,
namespace, k8s_resources)
# uninstall release
helm_client.uninstall(release_name, namespace)
# wait k8s resource delete complete
self._wait_k8s_reses_deleted(k8s_reses)
def scale(self, req, inst, grant_req, grant, vnfd):
vim_info = inst_utils.select_vim_info(inst.vimConnectionInfo)
with kubernetes_utils.AuthContextManager(vim_info) as acm:
k8s_api_client = acm.init_k8s_api_client()
helm_client = acm.init_helm_client()
self._scale(req, inst, grant_req, grant, vnfd,
k8s_api_client, helm_client)
def _scale(self, req, inst, grant_req, grant, vnfd,
k8s_api_client, helm_client):
if req.type == 'SCALE_OUT':
vdus_num = self._get_vdus_num_from_grant_req_res_defs(
grant_req.addResources)
for vdu_name, vdu_num in vdus_num.items():
vdus_num[vdu_name] = (self._get_current_vdu_num(inst, vdu_name)
+ vdu_num)
elif req.type == 'SCALE_IN':
vdus_num = self._get_vdus_num_from_grant_req_res_defs(
grant_req.removeResources)
for vdu_name, vdu_num in vdus_num.items():
vdus_num[vdu_name] = (self._get_current_vdu_num(inst, vdu_name)
- vdu_num)
metadata = inst.instantiatedVnfInfo.metadata
namespace = metadata['namespace']
release_name = metadata['release_name']
chart_name = os.path.join(vnfd.csar_dir, metadata['helm_chart_path'])
helm_value_names = metadata['helm_value_names']
# Create scale parameters
parameters = {}
for vdu_name, vdu_num in vdus_num.items():
replicaParam = helm_value_names.get(vdu_name, {}).get('replica')
if not replicaParam:
raise sol_ex.HelmParameterNotFound(vdu_name=vdu_name)
parameters[replicaParam] = vdu_num
# update
revision = helm_client.upgrade(release_name, chart_name,
namespace, parameters)
vdu_reses = []
for vdu_name, vdu_num in vdus_num.items():
vdu_res = self._get_vdu_res(inst, k8s_api_client, vdu_name)
vdu_res.body['spec']['replicas'] = vdu_num
vdu_reses.append(vdu_res)
# wait k8s resource create complete
self._wait_k8s_reses_updated(vdu_reses, k8s_api_client,
namespace, old_pods_names=set())
# make instantiated info
self._update_vnfc_info(inst, k8s_api_client)
metadata['revision'] = revision
def scale_rollback(self, req, inst, grant_req, grant, vnfd):
vim_info = inst_utils.select_vim_info(inst.vimConnectionInfo)
with kubernetes_utils.AuthContextManager(vim_info) as acm:
k8s_api_client = acm.init_k8s_api_client()
helm_client = acm.init_helm_client()
self._scale_rollback(req, inst, grant_req, grant, vnfd,
k8s_api_client, helm_client)
def _scale_rollback(self, req, inst, grant_req, grant, vnfd,
k8s_api_client, helm_client):
vdus_num = self._get_vdus_num_from_grant_req_res_defs(
grant_req.addResources)
metadata = inst.instantiatedVnfInfo.metadata
namespace = metadata['namespace']
release_name = metadata['release_name']
revision = metadata['revision']
# rollback
helm_client.rollback(release_name, revision, namespace)
vdu_reses = [self._get_vdu_res(inst, k8s_api_client, vdu_name)
for vdu_name in vdus_num]
# wait k8s resource create complete
self._wait_k8s_reses_updated(vdu_reses, k8s_api_client,
namespace, old_pods_names=set())
def change_vnfpkg(self, req, inst, grant_req, grant, vnfd):
vim_info = inst_utils.select_vim_info(inst.vimConnectionInfo)
with kubernetes_utils.AuthContextManager(vim_info) as acm:
k8s_api_client = acm.init_k8s_api_client()
helm_client = acm.init_helm_client()
if req.additionalParams['upgrade_type'] == 'RollingUpdate':
self._change_vnfpkg_rolling_update(req, inst, grant_req,
grant, vnfd, k8s_api_client, helm_client)
else:
# not reach here
pass
def _change_vnfpkg_rolling_update(self, req, inst, grant_req, grant, vnfd,
k8s_api_client, helm_client):
metadata = inst.instantiatedVnfInfo.metadata
namespace = metadata['namespace']
release_name = metadata['release_name']
helm_chart_path = req.additionalParams.get('helm_chart_path',
metadata['helm_chart_path'])
chart_name = os.path.join(vnfd.csar_dir, helm_chart_path)
vdus_num = self._get_vdus_num_from_grant_req_res_defs(
grant_req.addResources)
# update
revision = helm_client.upgrade(release_name, chart_name,
namespace, {})
# get manifest from helm chart
k8s_resources = helm_client.get_manifest(release_name, namespace)
k8s_reses = self._create_reses_from_manifest(
k8s_api_client, namespace, k8s_resources)
vdu_reses = self._select_vdu_reses(
vnfd, inst.instantiatedVnfInfo.flavourId, k8s_reses)
# wait k8s resource update complete
target_reses = {vdu: res for vdu, res in vdu_reses.items()
if vdu in vdus_num.keys()}
old_pods_names = {vnfc.computeResource.resourceId
for vnfc in inst.instantiatedVnfInfo.vnfcResourceInfo
if vnfc.vduId in vdus_num.keys()}
self._wait_k8s_reses_updated(
list(target_reses.values()), k8s_api_client, namespace,
old_pods_names)
# make instantiated info
self._update_vnfc_info(inst, k8s_api_client)
metadata['vdu_reses'].update(
{vdu: res.body for vdu, res in target_reses.items()})
metadata['helm_chart_path'] = helm_chart_path
metadata['revision'] = revision
inst.vnfdId = req.vnfdId
def change_vnfpkg_rollback(self, req, inst, grant_req, grant, vnfd):
vim_info = inst_utils.select_vim_info(inst.vimConnectionInfo)
with kubernetes_utils.AuthContextManager(vim_info) as acm:
k8s_api_client = acm.init_k8s_api_client()
helm_client = acm.init_helm_client()
if req.additionalParams['upgrade_type'] == 'RollingUpdate':
self._change_vnfpkg_rolling_update_rollback(
req, inst, grant_req, grant, vnfd, k8s_api_client,
helm_client)
else:
# not reach here
pass
def _change_vnfpkg_rolling_update_rollback(self, req, inst, grant_req,
grant, vnfd, k8s_api_client, helm_client):
metadata = inst.instantiatedVnfInfo.metadata
namespace = metadata['namespace']
release_name = metadata['release_name']
revision = metadata['revision']
original_pods = {vnfc.computeResource.resourceId for vnfc in
inst.instantiatedVnfInfo.vnfcResourceInfo}
all_pods = kubernetes_utils.list_namespaced_pods(
k8s_api_client, namespace)
current_pods = {pod.metadata.name for pod in all_pods}
old_pods_names = current_pods - original_pods
# rollback
helm_client.rollback(release_name, revision, namespace)
target_vdus = {res_def.resourceTemplateId
for res_def in grant_req.addResources
if res_def.type == 'COMPUTE'}
target_reses = [self._get_vdu_res(inst, k8s_api_client, vdu_name)
for vdu_name in target_vdus]
# wait k8s resource update complete
self._wait_k8s_reses_updated(
target_reses, k8s_api_client, namespace, old_pods_names)
# make instantiated info
self._update_vnfc_info(inst, k8s_api_client)
def _create_reses_from_manifest(self, k8s_api_client, namespace,
k8s_resources):
for k8s_res in k8s_resources:
if k8s_res['kind'] in kubernetes_utils.SUPPORTED_NAMESPACE_KINDS:
k8s_res.setdefault('metadata', {})
k8s_res['metadata'].setdefault('namespace', namespace)
k8s_reses = []
for k8s_res in k8s_resources:
try:
cls = getattr(kubernetes_resource, k8s_res['kind'])
k8s_reses.append(cls(k8s_api_client, k8s_res))
except AttributeError:
LOG.info("Not support kind %s. ignored.", k8s_res['kind'])
return k8s_reses
def _get_release_name(self, inst):
release_name = 'vnf' + inst.id.replace('-', '')
return release_name
def _select_vdu_reses(self, vnfd, flavour_id, k8s_reses):
vdu_nodes = vnfd.get_vdu_nodes(flavour_id)
vdu_ids = {value.get('properties').get('name'): key
for key, value in vdu_nodes.items()}
# In helm case, res.name is {properties.name}-{some string}.
# NOTE: {some string} must not include '-'.
return {vdu_ids[res.name[:res.name.rfind("-")]]: res
for res in k8s_reses
if (res.kind in kubernetes_common.TARGET_KIND
and res.name[:res.name.rfind("-")] in vdu_ids)}
def _init_instantiated_vnf_info(self, inst, flavour_id, vdu_reses,
namespace, helm_chart_path, helm_value_names, release_name,
revision):
super()._init_instantiated_vnf_info(inst, flavour_id, vdu_reses,
namespace)
inst.instantiatedVnfInfo.metadata.update(
{
'helm_chart_path': helm_chart_path,
'helm_value_names': helm_value_names,
'release_name': release_name,
'revision': revision
}
)
def _is_match_pod_naming_rule(self, rsc_kind, rsc_name, pod_name):
return rsc_name in pod_name
|
{
"content_hash": "9a2c1da84b959876cd27f5d56fb5be7c",
"timestamp": "",
"source": "github",
"line_count": 337,
"max_line_length": 79,
"avg_line_length": 43.931750741839764,
"alnum_prop": 0.5925700776764606,
"repo_name": "openstack/tacker",
"id": "d81bfc17ee6a0c02bab11692dd48056674e66e79",
"size": "15467",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tacker/sol_refactored/infra_drivers/kubernetes/helm.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Jinja",
"bytes": "10809"
},
{
"name": "Mako",
"bytes": "1046"
},
{
"name": "Python",
"bytes": "7648075"
},
{
"name": "Ruby",
"bytes": "2841"
},
{
"name": "Shell",
"bytes": "61750"
},
{
"name": "Smarty",
"bytes": "3624"
}
],
"symlink_target": ""
}
|
import _plotly_utils.basevalidators
class ColorValidator(_plotly_utils.basevalidators.ColorValidator):
def __init__(
self, plotly_name="color", parent_name="barpolar.selected.marker", **kwargs
):
super(ColorValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop("edit_type", "style"),
**kwargs,
)
|
{
"content_hash": "5ee8854b66ad4ef6ed8d341ecfd9c1ff",
"timestamp": "",
"source": "github",
"line_count": 13,
"max_line_length": 83,
"avg_line_length": 32.46153846153846,
"alnum_prop": 0.6066350710900474,
"repo_name": "plotly/plotly.py",
"id": "5e0113d377f692dcb2f74496b790e25d003eec03",
"size": "422",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "packages/python/plotly/plotly/validators/barpolar/selected/marker/_color.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "545"
},
{
"name": "JavaScript",
"bytes": "2074"
},
{
"name": "PostScript",
"bytes": "565328"
},
{
"name": "Python",
"bytes": "31506317"
},
{
"name": "TypeScript",
"bytes": "71337"
}
],
"symlink_target": ""
}
|
"""
WSGI config for smt project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.6/howto/deployment/wsgi/
"""
import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "smt.settings")
from django.core.wsgi import get_wsgi_application
application = get_wsgi_application()
|
{
"content_hash": "88567f7d0996f335d31436a65db7a2fb",
"timestamp": "",
"source": "github",
"line_count": 14,
"max_line_length": 78,
"avg_line_length": 27.214285714285715,
"alnum_prop": 0.7690288713910761,
"repo_name": "ilya-yurtaev/smt",
"id": "2888077951dfb1bc44aaeb0c0abb2c61658daa31",
"size": "381",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "smt/wsgi.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "CSS",
"bytes": "2353"
},
{
"name": "JavaScript",
"bytes": "677142"
},
{
"name": "Python",
"bytes": "15189"
}
],
"symlink_target": ""
}
|
"""Task design containers."""
import numpy
import random
import pprint
__all__ = ['Design', 'Block', 'Trial', 'Array']
class Design(list):
"""Top-level task design container.
The :class:`Design` is a list of :class:`Block` objects, which themselves
are lists of :class:`Trial` objects.
"""
def add_block(self):
"""Add a block to the design.
Returns
-------
block : design.Block
The created block.
"""
block = Block(len(self))
self.append(block)
return block
class Block(list):
"""List of trials.
Experiments often consist of a set of blocks, each containing the same set
of trials in randomized order. You usually shouldn't need to create a block
directly -- use :meth:`Design.add_block` instead.
Parameters
----------
index : int
Index of the block in the design. This is required to pass along to
each trial in the block, so that the trial knows which block it belongs
to.
"""
def __init__(self, index, *args, **kwargs):
super(Block, self).__init__(*args, **kwargs)
self.index = index
def add_trial(self, attrs=None):
"""Add a trial to the block.
A :class:`Trial` object is created and added to the block. You can
optionally provide a dictionary of attribute name/value pairs to
initialize the trial.
Parameters
----------
attrs : dict, optional
Dictionary of attribute name/value pairs.
Returns
-------
trial : Trial
The trial object created. This can be used to add new attributes or
arrays. See :class:`Trial`.
"""
if attrs is None:
attrs = {}
attrs.update({'block': self.index, 'trial': len(self)})
trial = Trial(attrs=attrs)
self.append(trial)
return trial
def shuffle(self, reset_index=True, seed=None):
"""Shuffle the block's trials in random order.
Parameters
----------
reset_index : bool, optional
Whether or not to set the ``trial`` attribute of each trial such
that they remain in sequential order after shuffling. This is the
default.
seed : int, optional
If provided, the random seed will be set to the specified value to
ensure reproducible shuffling. Note that if you have multiple
identical blocks and want to shuffle them differently, use a
different seed value for each block.
"""
if seed is not None:
random.seed(seed)
random.shuffle(self)
if reset_index:
for i, trial in enumerate(self):
trial.attrs['trial'] = i
class Trial(object):
"""Container of trial data.
There are two kinds of data typically needed during a trial: attributes and
arrays. Attributes are scalar quantities or primitives like integers,
floating point numbers, booleans, strings, etc. Arrays are NumPy arrays,
useful for holding things like cursor trajectories.
There are two primary purposes for each of these two kinds of data. First,
it's useful to design a task with pre-determined values, such as the target
location or the cursor trajectory to follow. The other purpose is to
temporarily hold runtime data using the same interface, such as the final
cursor position or the time-to-target.
You shouldn't normally need to create a trial directly -- instead, use
:meth:`Block.add_trial`.
Attributes
----------
attrs : dict
Dictionary mapping attribute names to their values.
arrays : dict
Dictionary mapping array names to :class:`Array` objects, which contain
the array data.
"""
def __init__(self, attrs):
self.attrs = attrs
self.arrays = {}
def add_array(self, name, **kwargs):
"""Add an array to the trial.
Parameters
----------
name : str
Name of the array.
kwargs : dict
Keyword arguments passed along to :class:`Array`.
"""
self.arrays[name] = Array(**kwargs)
def __str__(self):
return pprint.pformat(self.attrs)
class Array(object):
"""Trial array.
The array is not much more than a NumPy array with a :meth:`stack` method
for conveniently adding new data to the array. This is useful in cases
where you iteratively collect new segments of data and want to concatenate
them. For example, you could use an :class:`Array` to collect the samples
from a data acquisition device as they come in.
You usually don't need to create an array manually -- instead, use
:meth:`Trial.add_array`.
Parameters
----------
data : ndarray, optional
Data to initialize the array with. If ``None``, the first array passed
to :meth:`stack` is used for initialization.
stack_axis : int, optional
Axis to stack the data along.
Attributes
----------
data : ndarray, optional
The NumPy array holding the data.
"""
_stack_funcs = {0: numpy.vstack, 1: numpy.hstack, 2: numpy.dstack}
def __init__(self, data=None, stack_axis=1):
self.data = data
self.stack_axis = stack_axis
def stack(self, data):
"""Stack new data onto the array.
Parameters
----------
data : ndarray
New data to add. The direction to stack along is specified in the
array's constructor (stack_axis).
"""
if self.data is None:
self.data = data
else:
self.data = self._stack_funcs[self.stack_axis]([self.data, data])
def clear(self):
"""Clears the buffer.
Anything that was in the buffer is not retrievable.
"""
self.data = None
|
{
"content_hash": "a45eb10a96b5fcfac0b072bce6d2d767",
"timestamp": "",
"source": "github",
"line_count": 197,
"max_line_length": 79,
"avg_line_length": 30.14720812182741,
"alnum_prop": 0.6039737329516753,
"repo_name": "ucdrascal/hcibench",
"id": "5edf3af665fcc0876226bbf3fc230fbe40b61ced",
"size": "5939",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "axopy/design.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "79202"
}
],
"symlink_target": ""
}
|
"""
Install the packages you have listed in the requirements file you input as
first argument.
"""
from __future__ import (absolute_import, division, print_function, unicode_literals)
try:
from fabric.api import task, local
except:
from invoke import task
from invoke import run as local
import os
import os.path as op
import shutil
from glob import glob
from setuptools import find_packages
from pip.req import parse_requirements
# Get version without importing, which avoids dependency issues
module_name = find_packages(exclude=['tests'])[0]
version_pyfile = op.join(module_name, 'version.py')
exec(compile(open(version_pyfile).read(), version_pyfile, 'exec'))
# get current dir
CWD = op.realpath(op.curdir)
#ignore dirs
IGNORE = ['.git', '.idea']
def get_requirements(*args):
"""Parse all requirements files given and return a list of the dependencies"""
install_deps = []
try:
for fpath in args:
install_deps.extend([str(d.req or d.url) for d in parse_requirements(fpath)])
except:
print('Error reading {} file looking for dependencies.'.format(fpath))
return [dep for dep in install_deps if dep != 'None']
def recursive_glob(base_directory, regex=None):
"""Uses glob to find all files that match the regex in base_directory.
@param base_directory: string
@param regex: string
@return: set
"""
if regex is None:
regex = ''
files = glob(os.path.join(base_directory, regex))
for path, dirlist, filelist in os.walk(base_directory):
for ignored in IGNORE:
try:
dirlist.remove(ignored)
except:
pass
for dir_name in dirlist:
files.extend(glob(os.path.join(path, dir_name, regex)))
return files
def recursive_remove(work_dir=CWD, regex='*'):
[os.remove(fn) for fn in recursive_glob(work_dir, regex)]
def recursive_rmtrees(work_dir=CWD, regex='*'):
[shutil.rmtree(fn, ignore_errors=True) for fn in recursive_glob(work_dir, regex)]
@task
def install_deps(req_filepaths = ['requirements.txt']):
# for line in fileinput.input():
deps = get_requirements(*req_filepaths)
try:
for dep_name in deps:
cmd = "pip install '{0}'".format(dep_name)
print('#', cmd)
local(cmd)
except:
print('Error installing {}'.format(dep_name))
@task
def version():
print(__version__)
@task
def install():
clean()
install_deps()
local('python setup.py install')
@task
def develop():
clean()
install_deps()
local('python setup.py develop')
@task
def clean(work_dir=CWD):
clean_build(work_dir)
clean_pyc(work_dir)
@task
def clean_build(work_dir=CWD):
shutil.rmtree('build', ignore_errors=True)
shutil.rmtree('dist', ignore_errors=True)
shutil.rmtree('.eggs', ignore_errors=True)
recursive_rmtrees(work_dir, '__pycache__')
recursive_rmtrees(work_dir, '*.egg-info')
recursive_rmtrees(work_dir, '*.egg')
recursive_rmtrees(work_dir, '.ipynb_checkpoints')
@task
def clean_pyc(work_dir=CWD):
recursive_remove(work_dir, '*.pyc')
recursive_remove(work_dir, '*.pyo')
recursive_remove(work_dir, '*~')
@task
def lint():
local('flake8 ' + module_name + ' test')
@task
def test(filepath=''):
if filepath:
if not op.exists(filepath):
print('Error: could not find file {}'.format(filepath))
exit(-1)
cmd = 'python setup.py test -a ' + filepath
else:
cmd = 'python setup.py test'
local(cmd)
@task
def test_all():
local('tox')
@task
def coverage():
local('coverage local --source ' + module_name + ' setup.py test')
local('coverage report -m')
local('coverage html')
local('open htmlcov/index.html')
@task
def docs(doc_type='html'):
os.remove(op.join('docs', module_name + '.rst'))
os.remove(op.join('docs', 'modules.rst'))
local('sphinx-apidoc -o docs/ ' + module_name)
os.chdir('docs')
local('make clean')
local('make ' + doc_type)
os.chdir(CWD)
local('open docs/_build/html/index.html')
@task
def release():
clean()
local('pip install -U pip setuptools twine wheel')
local('python setup.py sdist bdist_wheel')
#local('python setup.py bdist_wheel upload')
local('twine upload dist/*')
@task
def sdist():
clean()
local('python setup.py sdist')
local('python setup.py bdist_wheel upload')
print(os.listdir('dist'))
|
{
"content_hash": "590897f5e53ab6ca864741a3b87b97df",
"timestamp": "",
"source": "github",
"line_count": 193,
"max_line_length": 89,
"avg_line_length": 23.53886010362694,
"alnum_prop": 0.6332819722650231,
"repo_name": "Neurita/boyle",
"id": "ca396d41e41f0a08881f65ae03c34b963f3545fe",
"size": "4566",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "fabfile.py",
"mode": "33261",
"license": "bsd-3-clause",
"language": [
{
"name": "Makefile",
"bytes": "1687"
},
{
"name": "Python",
"bytes": "391188"
}
],
"symlink_target": ""
}
|
from __future__ import (absolute_import, division, print_function)
from .wcsBase import WCSBase, WCSCapabilitiesReader, ServiceException
from bcube_owslib.util import openURL, testXMLValue, xmltag_split
from urllib import urlencode
from bcube_owslib.crs import Crs
import logging
from owslib.util import log
def ns(tags):
"""go by the tag name, no namespaces for future/version-proofing
"""
return '/'.join(['*[local-name()="%s"]' % t if t not in ['*', '..', '.'] else t
for t in tags.split('/') if t])
def find(elem, xpath):
"""assumes one element to return
"""
if elem is None:
return None
return next(iter(elem.xpath(xpath)), None)
def findall(elem, xpath):
if elem is None:
return None
return elem.xpath(xpath)
class WebCoverageService_1_1_x(WCSBase):
"""Abstraction for OGC Web Coverage Service (WCS), version 1.1.0
Implements IWebCoverageService.
"""
def __getitem__(self, name):
''' check contents dictionary to allow dict like access to service layers'''
if name in self.__getattribute__('contents').keys():
return self.__getattribute__('contents')[name]
else:
raise KeyError("No content named %s" % name)
def __init__(self, url, xml=None, cookies=None, version='1.1.0'):
self.version = version
self.url = url
self.cookies = cookies
# initialize from saved capability document or access the server
reader = WCSCapabilitiesReader(self.version)
if xml:
self._capabilities = reader.readString(xml)
else:
self._capabilities = reader.read(self.url)
# check for exceptions
se = find(self._capabilities, ns("Exception"))
if se is not None:
err_message = str(se.text).strip()
raise ServiceException(err_message, xml)
# build metadata objects:
# serviceIdentification metadata
elem = find(self._capabilities, ns('ServiceIdentification'))
if elem is None:
elem = find(self._capabilities, ns('ServiceIdentification'))
self.identification = ServiceIdentification(elem)
# serviceProvider
elem = find(self._capabilities, ns('ServiceProvider'))
self.provider = ServiceProvider(elem)
# serviceOperations
self.operations = []
for elem in findall(self._capabilities, ns('OperationsMetadata/Operation')):
self.operations.append(Operation(elem))
# exceptions - ***********TO DO *************
self.exceptions = [f.text for f
in findall(self._capabilities, ns('Capability/Exception/Format'))]
# serviceContents: our assumption is that services use a top-level layer
# as a metadata organizer, nothing more.
self.contents = {}
for elem in findall(
self._capabilities, '/' + ns('*/Contents') + '//' + ns('CoverageSummary')):
cm = ContentMetadata(elem, None, self)
self.contents[cm.id] = cm
# top = find(self._capabilities, ns('Contents/CoverageSummary'))
# for elem in findall(self._capabilities, ns('Contents/CoverageSummary/CoverageSummary')):
# cm = ContentMetadata(elem, top, self)
# self.contents[cm.id] = cm
# if self.contents == {}:
# # non-hierarchical.
# top = None
# for elem in findall(self._capabilities, ns('Contents/CoverageSummary')):
# cm = ContentMetadata(elem, top, self)
# # make the describeCoverage requests to
# # populate the supported formats/crs attributes
# self.contents[cm.id] = cm
def items(self):
'''supports dict-like items() access'''
items = []
for item in self.contents:
items.append((item, self.contents[item]))
return items
# TODO: Handle rest of the WCS 1.1.0 keyword parameters e.g. GridCRS etc.
def getCoverage(self,
identifier=None,
bbox=None,
time=None,
format=None,
store=False,
rangesubset=None,
gridbaseCRS=None,
gridtype=None,
gridCS=None,
gridorigin=None,
gridoffsets=None,
method='Get',
**kwargs):
"""Request and return a coverage from the WCS as a file-like object
note: additional **kwargs helps with multi-version implementation
core keyword arguments should be supported cross version
example:
cvg=wcs.getCoverageRequest(identifier=['TuMYrRQ4'], time=['2792-06-01T00:00:00.0'],
bbox=(-112,36,-106,41),format='application/netcdf', store='true')
is equivalent to:
http://myhost/mywcs?SERVICE=WCS&REQUEST=GetCoverage&IDENTIFIER=TuMYrRQ4&VERSION=1.1.0&BOUNDINGBOX=
-180,-90,180,90&TIMESEQUENCE=2792-06-01T00:00:00.0&FORMAT=application/netcdf
if store = true, returns a coverages XML file
if store = false, returns a multipart mime
"""
if log.isEnabledFor(logging.DEBUG):
log.debug('''WCS 1.1.0 DEBUG: Parameters passed to GetCoverage: identifier=%s, bbox=%s,
time=%s, format=%s, rangesubset=%s, gridbaseCRS=%s, gridtype=%s, gridCS=%s,
gridorigin=%s, gridoffsets=%s, method=%s, other_arguments=%s''' % (
identifier,
bbox,
time,
format,
rangesubset,
gridbaseCRS,
gridtype,
gridCS,
gridorigin,
gridoffsets,
method,
str(kwargs)
))
if method == 'Get':
method = '{http://www.opengis.net/wcs/1.1/ows}Get'
try:
base_url = next(
(m.get('url') for m in self.getOperationByName('GetCoverage').methods
if m.get('type').lower() == method.lower())
)
except StopIteration:
base_url = self.url
# process kwargs
request = {'version': self.version, 'request': 'GetCoverage', 'service': 'WCS'}
assert len(identifier) > 0
request['identifier'] = identifier
# request['identifier'] = ','.join(identifier)
if bbox:
request['boundingbox'] = ','.join([repr(x) for x in bbox])
if time:
request['timesequence'] = ','.join(time)
request['format'] = format
request['store'] = store
# rangesubset: untested - require a server implementation
if rangesubset:
request['RangeSubset'] = rangesubset
# GridCRS structure: untested - require a server implementation
if gridbaseCRS:
request['gridbaseCRS'] = gridbaseCRS
if gridtype:
request['gridtype'] = gridtype
if gridCS:
request['gridCS'] = gridCS
if gridorigin:
request['gridorigin'] = gridorigin
if gridoffsets:
request['gridoffsets'] = gridoffsets
# anything else e.g. vendor specific parameters must go through kwargs
if kwargs:
for kw in kwargs:
request[kw] = kwargs[kw]
# encode and request
data = urlencode(request)
u = openURL(base_url, data, method, self.cookies)
return u
def getOperationByName(self, name):
"""Return a named operation item."""
for item in self.operations:
if item.name == name:
return item
raise KeyError("No operation named %s" % name)
class Operation(object):
"""Abstraction for operation metadata
Implements IOperationMetadata.
"""
def __init__(self, elem):
self.name = elem.get('name')
# this is not valid
# self.formatOptions = [f.text for f in findall(elem, ns('Parameter/AllowedValues/Value'))]
self.formatOptions = []
# for the constraints, to match the parameter values
# and these are at the parent OperationMetadata level
constraints = []
for constraint in findall(elem, ns('../Constraint')):
# let's just make that an or?
cxp = ns('AllowedValues/Value') + ' | ' + ns('Value')
constraints.append({
constraint.attrib.get('name'): {
"values": [i.text for i in findall(constraint, cxp)]
}
})
self.constraints = constraints
methods = []
for verb in findall(elem, ns('DCP/HTTP/*')):
methods.append(
{
"type": xmltag_split(verb.tag),
"url": verb.attrib['{http://www.w3.org/1999/xlink}href']
}
)
self.methods = methods
# for the parameters
parameters = {}
for parameter in findall(elem, ns('Parameter')):
parameters[parameter.attrib['name']] = {'values': [i.text for i in findall(parameter,
ns('AllowedValues/Value'))]}
self.parameters = parameters
class ServiceIdentification(object):
""" Abstraction for ServiceIdentification Metadata
implements IServiceIdentificationMetadata"""
def __init__(self, elem):
self.service = "WCS"
# TODO: fix this
self.version = "1.1.0"
self.title = testXMLValue(find(elem, ns('Title')))
self.abstract = testXMLValue(find(elem, ns('Abstract')))
self.keywords = [f.text for f in findall(elem, ns('Keywords/Keyword'))]
self.fees = testXMLValue(find(elem, ns('Fees')))
self.accessconstraints = testXMLValue(find(elem, ns('AccessConstraints')))
class ServiceProvider(object):
""" Abstraction for ServiceProvider metadata
implements IServiceProviderMetadata """
def __init__(self, elem):
name = elem.find('{http://www.opengis.net/ows}ProviderName')
if name is not None:
self.name = name.text
else:
self.name = None
# self.contact=ServiceContact(elem.find('{http://www.opengis.net/ows}ServiceContact'))
self.contact = ContactMetadata(elem)
self.url = self.name # no obvious definitive place for url in wcs, repeat provider name?
class ContactMetadata(object):
''' implements IContactMetadata'''
def __init__(self, elem):
self.name = testXMLValue(find(elem, ns('ServiceContact/IndividualName')))
self.organization = testXMLValue(find(elem, ns('ProviderName')))
self.address = testXMLValue(find(elem,
ns('ServiceContact/ContactInfo/Address/DeliveryPoint')))
self.city = testXMLValue(find(elem, ns('ServiceContact/ContactInfo/Address/City')))
self.region = testXMLValue(find(elem,
ns('ServiceContact/ContactInfo/Address/AdministrativeArea')))
self.postcode = testXMLValue(find(elem,
ns('ServiceContact/ContactInfo/Address/PostalCode')))
self.country = testXMLValue(find(elem, ns('ServiceContact/ContactInfo/Address/Country')))
self.email = testXMLValue(find(elem,
ns('ServiceContact/ContactInfo/Address/ElectronicMailAddress')))
class ContentMetadata(object):
"""Abstraction for WCS ContentMetadata
Implements IContentMetadata
"""
def __init__(self, elem, parent, service):
"""Initialize.
CoverageSummary elements can be nested more than once and still be valid
as XML. If the leaf does not have an identifier, the parent identifier
should be used. SupportedCRS is the union of parent/child CRS values.
There is nothing in the spec about inherited keyword sets. And no Description
element?
"""
# TODO - examine the parent for bounding box info.
self._service = service
self._elem = elem
# find returns the first item so we should be good here
self.id = testXMLValue(
find(elem, '*[local-name()="Identifier"] | ../*[local-name()="Identifier"]')
)
self.abstract = testXMLValue(
find(elem, '*[local-name()="Abstract"] | ../*[local-name()="Abstract"]')
)
self.title = testXMLValue(
find(elem, '*[local-name()="Title"] | ../*[local-name()="Title"]')
)
# get the ancestors related to nested CoverageSummary elems and the local keywords
tags = [elem.tag] + [e.tag for e in elem.iterancestors() if e.tag in ['CoverageSummary']]
xpaths = ['/'.join(['..'] * i + ['*[local-name()="Keywords"]', '*[local-name()="Keyword"]'])
for i in xrange(len(tags))]
self.keywords = [k.text for k in findall(elem, ' | '.join(xpaths))] if xpaths else []
self.boundingBoxWGS84 = None
b = find(elem, ns('WGS84BoundingBox'))
if b is not None:
lc = find(b, ns('LowerCorner')).text
uc = find(b, ns('UpperCorner')).text
self.boundingBoxWGS84 = (
float(lc.split()[0]), float(lc.split()[1]),
float(uc.split()[0]), float(uc.split()[1]),
)
# bboxes - other CRS
self.boundingBoxes = []
for bbox in findall(elem, ns('BoundingBox')):
if bbox is not None:
try:
lc = find(b, ns('LowerCorner')).text
uc = find(b, ns('UpperCorner')).text
boundingBox = (
float(lc.split()[0]), float(lc.split()[1]),
float(uc.split()[0]), float(uc.split()[1]),
b.attrib['crs']
)
self.boundingBoxes.append(boundingBox)
except:
pass
# others not used but needed for iContentMetadata harmonisation
self.styles = None
self.crsOptions = None
self.attribution = None
self.metadataUrls = None
# SupportedCRS
self.supportedCRS = []
for crs in elem.findall('{http://www.opengis.net/wcs/1.1}SupportedCRS'):
self.supportedCRS.append(Crs(crs.text))
# SupportedFormats
self.supportedFormats = []
for format in findall(elem, ns('SupportedFormat')):
self.supportedFormats.append(format.text)
# grid is either a gml:Grid or a gml:RectifiedGrid if supplied
# as part of the DescribeCoverage response.
def _getGrid(self):
grid = None
# TODO- convert this to 1.1 from 1.0
# if not hasattr(self, 'descCov'):
# self.descCov=self._service.getDescribeCoverage(self.id)
# gridelem= self.descCov.find(
# ns('CoverageOffering/')+ns('domainSet/')+ns('spatialDomain/')+'{http://www.opengis.net/gml}RectifiedGrid')
# if gridelem is not None:
# grid=RectifiedGrid(gridelem)
# else:
# gridelem=self.descCov.find(
# ns('CoverageOffering/')+ns('domainSet/')+ns('spatialDomain/')+'{http://www.opengis.net/gml}Grid')
# grid=Grid(gridelem)
return grid
grid = property(_getGrid, None)
# time limits/postions require a describeCoverage request therefore only resolve when requested
def _getTimeLimits(self):
timelimits = []
for elem in findall(
self._service.getDescribeCoverage(self.id),
ns('CoverageDescription/Domain/TemporalDomain/TimePeriod')):
subelems = elem.getchildren()
timelimits = [subelems[0].text, subelems[1].text]
return timelimits
timelimits = property(_getTimeLimits, None)
# TODO timepositions property
def _getTimePositions(self):
return []
timepositions = property(_getTimePositions, None)
|
{
"content_hash": "2911555c82b867fddc40532c9c93c7ba",
"timestamp": "",
"source": "github",
"line_count": 422,
"max_line_length": 119,
"avg_line_length": 38.19668246445497,
"alnum_prop": 0.5729263601960419,
"repo_name": "b-cube/pipeline-demo",
"id": "0c92838ab1b31cba9a22d4f8787c5e129e3cb00a",
"size": "16494",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "demo/bcube_owslib/coverage/wcs110.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "618440"
}
],
"symlink_target": ""
}
|
from swgpy.object import *
def create(kernel):
result = Intangible()
result.template = "object/draft_schematic/clothing/shared_clothing_armor_zam_boots.iff"
result.attribute_template_id = -1
result.stfName("string_id_table","")
#### BEGIN MODIFICATIONS ####
#### END MODIFICATIONS ####
return result
|
{
"content_hash": "074e271a5cdcad4d1138b75ed37f8268",
"timestamp": "",
"source": "github",
"line_count": 13,
"max_line_length": 88,
"avg_line_length": 24.384615384615383,
"alnum_prop": 0.7003154574132492,
"repo_name": "obi-two/Rebelion",
"id": "612d14b8cc7311d88869b51d5abff62dbdf02c88",
"size": "462",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "data/scripts/templates/object/draft_schematic/clothing/shared_clothing_armor_zam_boots.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "11818"
},
{
"name": "C",
"bytes": "7699"
},
{
"name": "C++",
"bytes": "2293610"
},
{
"name": "CMake",
"bytes": "39727"
},
{
"name": "PLSQL",
"bytes": "42065"
},
{
"name": "Python",
"bytes": "7499185"
},
{
"name": "SQLPL",
"bytes": "41864"
}
],
"symlink_target": ""
}
|
import unittest
from unittest.mock import MagicMock, patch
from alarmlistener.alarm_notification_handler import AlarmNotificationHandler
from alarmlistener.event_controller import EventController
class TestAlarmNotificationHandler(unittest.TestCase):
def test_handle_method_triggers_alarm_event_on_controller(self):
server_mock = MagicMock()
server_mock.event_controller.mock_add_spec(EventController)
# Instantiation of object already calls 'handle()' method
AlarmNotificationHandler(MagicMock(), MagicMock(), server_mock)
self.assertTrue(server_mock.event_controller.trigger_alarm_event.called)
self.assertEqual(server_mock.event_controller.trigger_alarm_event.call_count, 1)
@patch('alarmlistener.alarm_notification_handler.log')
def test_exception_raised_on_socket_close_should_log_message(self, log_mock):
server_mock = MagicMock()
request_mock = MagicMock()
request_mock.close.side_effect = OSError('some exception')
# Instantiation of object already calls 'handle()' method
AlarmNotificationHandler(request_mock, MagicMock(), server_mock)
self.assertTrue(log_mock.debug.called)
self.assertTrue(log_mock.warning.called)
self.assertEqual(log_mock.warning.call_count, 1)
self.assertFalse(server_mock.event_controller.trigger_alarm_event.called, msg="Trigger Alarm Event should not have been called")
|
{
"content_hash": "e46a382d33a06908c60e831cf8ddce0d",
"timestamp": "",
"source": "github",
"line_count": 32,
"max_line_length": 136,
"avg_line_length": 45.15625,
"alnum_prop": 0.7432525951557093,
"repo_name": "mdonkers/AlarmListener",
"id": "9db34ecb098e7884f430eb3986a8dc91605e0bb8",
"size": "1445",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/test_alarm_notification_handler.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "2277"
},
{
"name": "Python",
"bytes": "32104"
},
{
"name": "Shell",
"bytes": "171"
}
],
"symlink_target": ""
}
|
import vstruct
from vstruct.primitives import *
KPROCESS_STATE = v_enum()
KPROCESS_STATE.ProcessInMemory = 0
KPROCESS_STATE.ProcessOutOfMemory = 1
KPROCESS_STATE.ProcessInTransition = 2
KPROCESS_STATE.ProcessOutTransition = 3
KPROCESS_STATE.ProcessInSwap = 4
KPROCESS_STATE.ProcessOutSwap = 5
KPROCESS_STATE.ProcessAllSwapStates = 6
WHEA_ERROR_SEVERITY = v_enum()
WHEA_ERROR_SEVERITY.WheaErrSevRecoverable = 0
WHEA_ERROR_SEVERITY.WheaErrSevFatal = 1
WHEA_ERROR_SEVERITY.WheaErrSevCorrected = 2
WHEA_ERROR_SEVERITY.WheaErrSevInformational = 3
REG_NOTIFY_CLASS = v_enum()
REG_NOTIFY_CLASS.RegNtDeleteKey = 0
REG_NOTIFY_CLASS.RegNtPreDeleteKey = 1
REG_NOTIFY_CLASS.RegNtSetValueKey = 2
REG_NOTIFY_CLASS.RegNtPreSetValueKey = 3
REG_NOTIFY_CLASS.RegNtDeleteValueKey = 4
REG_NOTIFY_CLASS.RegNtPreDeleteValueKey = 5
REG_NOTIFY_CLASS.RegNtSetInformationKey = 6
REG_NOTIFY_CLASS.RegNtPreSetInformationKey = 7
REG_NOTIFY_CLASS.RegNtRenameKey = 8
REG_NOTIFY_CLASS.RegNtPreRenameKey = 9
REG_NOTIFY_CLASS.RegNtEnumerateKey = 10
REG_NOTIFY_CLASS.RegNtPreEnumerateKey = 11
REG_NOTIFY_CLASS.RegNtEnumerateValueKey = 12
REG_NOTIFY_CLASS.RegNtPreEnumerateValueKey = 13
REG_NOTIFY_CLASS.RegNtQueryKey = 14
REG_NOTIFY_CLASS.RegNtPreQueryKey = 15
REG_NOTIFY_CLASS.RegNtQueryValueKey = 16
REG_NOTIFY_CLASS.RegNtPreQueryValueKey = 17
REG_NOTIFY_CLASS.RegNtQueryMultipleValueKey = 18
REG_NOTIFY_CLASS.RegNtPreQueryMultipleValueKey = 19
REG_NOTIFY_CLASS.RegNtPreCreateKey = 20
REG_NOTIFY_CLASS.RegNtPostCreateKey = 21
REG_NOTIFY_CLASS.RegNtPreOpenKey = 22
REG_NOTIFY_CLASS.RegNtPostOpenKey = 23
REG_NOTIFY_CLASS.RegNtKeyHandleClose = 24
REG_NOTIFY_CLASS.RegNtPreKeyHandleClose = 25
REG_NOTIFY_CLASS.RegNtPostDeleteKey = 26
REG_NOTIFY_CLASS.RegNtPostSetValueKey = 27
REG_NOTIFY_CLASS.RegNtPostDeleteValueKey = 28
REG_NOTIFY_CLASS.RegNtPostSetInformationKey = 29
REG_NOTIFY_CLASS.RegNtPostRenameKey = 30
REG_NOTIFY_CLASS.RegNtPostEnumerateKey = 31
REG_NOTIFY_CLASS.RegNtPostEnumerateValueKey = 32
REG_NOTIFY_CLASS.RegNtPostQueryKey = 33
REG_NOTIFY_CLASS.RegNtPostQueryValueKey = 34
REG_NOTIFY_CLASS.RegNtPostQueryMultipleValueKey = 35
REG_NOTIFY_CLASS.RegNtPostKeyHandleClose = 36
REG_NOTIFY_CLASS.RegNtPreCreateKeyEx = 37
REG_NOTIFY_CLASS.RegNtPostCreateKeyEx = 38
REG_NOTIFY_CLASS.RegNtPreOpenKeyEx = 39
REG_NOTIFY_CLASS.RegNtPostOpenKeyEx = 40
REG_NOTIFY_CLASS.RegNtPreFlushKey = 41
REG_NOTIFY_CLASS.RegNtPostFlushKey = 42
REG_NOTIFY_CLASS.RegNtPreLoadKey = 43
REG_NOTIFY_CLASS.RegNtPostLoadKey = 44
REG_NOTIFY_CLASS.RegNtPreUnLoadKey = 45
REG_NOTIFY_CLASS.RegNtPostUnLoadKey = 46
REG_NOTIFY_CLASS.RegNtPreQueryKeySecurity = 47
REG_NOTIFY_CLASS.RegNtPostQueryKeySecurity = 48
REG_NOTIFY_CLASS.RegNtPreSetKeySecurity = 49
REG_NOTIFY_CLASS.RegNtPostSetKeySecurity = 50
REG_NOTIFY_CLASS.RegNtCallbackObjectContextCleanup = 51
REG_NOTIFY_CLASS.RegNtPreRestoreKey = 52
REG_NOTIFY_CLASS.RegNtPostRestoreKey = 53
REG_NOTIFY_CLASS.RegNtPreSaveKey = 54
REG_NOTIFY_CLASS.RegNtPostSaveKey = 55
REG_NOTIFY_CLASS.RegNtPreReplaceKey = 56
REG_NOTIFY_CLASS.RegNtPostReplaceKey = 57
REG_NOTIFY_CLASS.MaxRegNtNotifyClass = 58
DEVICE_RELATION_TYPE = v_enum()
DEVICE_RELATION_TYPE.BusRelations = 0
DEVICE_RELATION_TYPE.EjectionRelations = 1
DEVICE_RELATION_TYPE.PowerRelations = 2
DEVICE_RELATION_TYPE.RemovalRelations = 3
DEVICE_RELATION_TYPE.TargetDeviceRelation = 4
DEVICE_RELATION_TYPE.SingleBusRelations = 5
DEVICE_RELATION_TYPE.TransportRelations = 6
FILE_INFORMATION_CLASS = v_enum()
FILE_INFORMATION_CLASS.FileDirectoryInformation = 0
FILE_INFORMATION_CLASS.FileFullDirectoryInformation = 1
FILE_INFORMATION_CLASS.FileBothDirectoryInformation = 2
FILE_INFORMATION_CLASS.FileBasicInformation = 3
FILE_INFORMATION_CLASS.FileStandardInformation = 4
FILE_INFORMATION_CLASS.FileInternalInformation = 5
FILE_INFORMATION_CLASS.FileEaInformation = 6
FILE_INFORMATION_CLASS.FileAccessInformation = 7
FILE_INFORMATION_CLASS.FileNameInformation = 8
FILE_INFORMATION_CLASS.FileRenameInformation = 9
FILE_INFORMATION_CLASS.FileLinkInformation = 10
FILE_INFORMATION_CLASS.FileNamesInformation = 11
FILE_INFORMATION_CLASS.FileDispositionInformation = 12
FILE_INFORMATION_CLASS.FilePositionInformation = 13
FILE_INFORMATION_CLASS.FileFullEaInformation = 14
FILE_INFORMATION_CLASS.FileModeInformation = 15
FILE_INFORMATION_CLASS.FileAlignmentInformation = 16
FILE_INFORMATION_CLASS.FileAllInformation = 17
FILE_INFORMATION_CLASS.FileAllocationInformation = 18
FILE_INFORMATION_CLASS.FileEndOfFileInformation = 19
FILE_INFORMATION_CLASS.FileAlternateNameInformation = 20
FILE_INFORMATION_CLASS.FileStreamInformation = 21
FILE_INFORMATION_CLASS.FilePipeInformation = 22
FILE_INFORMATION_CLASS.FilePipeLocalInformation = 23
FILE_INFORMATION_CLASS.FilePipeRemoteInformation = 24
FILE_INFORMATION_CLASS.FileMailslotQueryInformation = 25
FILE_INFORMATION_CLASS.FileMailslotSetInformation = 26
FILE_INFORMATION_CLASS.FileCompressionInformation = 27
FILE_INFORMATION_CLASS.FileObjectIdInformation = 28
FILE_INFORMATION_CLASS.FileCompletionInformation = 29
FILE_INFORMATION_CLASS.FileMoveClusterInformation = 30
FILE_INFORMATION_CLASS.FileQuotaInformation = 31
FILE_INFORMATION_CLASS.FileReparsePointInformation = 32
FILE_INFORMATION_CLASS.FileNetworkOpenInformation = 33
FILE_INFORMATION_CLASS.FileAttributeTagInformation = 34
FILE_INFORMATION_CLASS.FileTrackingInformation = 35
FILE_INFORMATION_CLASS.FileIdBothDirectoryInformation = 36
FILE_INFORMATION_CLASS.FileIdFullDirectoryInformation = 37
FILE_INFORMATION_CLASS.FileValidDataLengthInformation = 38
FILE_INFORMATION_CLASS.FileShortNameInformation = 39
FILE_INFORMATION_CLASS.FileIoCompletionNotificationInformation = 40
FILE_INFORMATION_CLASS.FileIoStatusBlockRangeInformation = 41
FILE_INFORMATION_CLASS.FileIoPriorityHintInformation = 42
FILE_INFORMATION_CLASS.FileSfioReserveInformation = 43
FILE_INFORMATION_CLASS.FileSfioVolumeInformation = 44
FILE_INFORMATION_CLASS.FileHardLinkInformation = 45
FILE_INFORMATION_CLASS.FileProcessIdsUsingFileInformation = 46
FILE_INFORMATION_CLASS.FileNormalizedNameInformation = 47
FILE_INFORMATION_CLASS.FileNetworkPhysicalNameInformation = 48
FILE_INFORMATION_CLASS.FileIdGlobalTxDirectoryInformation = 49
FILE_INFORMATION_CLASS.FileIsRemoteDeviceInformation = 50
FILE_INFORMATION_CLASS.FileAttributeCacheInformation = 51
FILE_INFORMATION_CLASS.FileNumaNodeInformation = 52
FILE_INFORMATION_CLASS.FileStandardLinkInformation = 53
FILE_INFORMATION_CLASS.FileRemoteProtocolInformation = 54
FILE_INFORMATION_CLASS.FileMaximumInformation = 55
ALTERNATIVE_ARCHITECTURE_TYPE = v_enum()
ALTERNATIVE_ARCHITECTURE_TYPE.StandardDesign = 0
ALTERNATIVE_ARCHITECTURE_TYPE.NEC98x86 = 1
ALTERNATIVE_ARCHITECTURE_TYPE.EndAlternatives = 2
BUS_QUERY_ID_TYPE = v_enum()
BUS_QUERY_ID_TYPE.BusQueryDeviceID = 0
BUS_QUERY_ID_TYPE.BusQueryHardwareIDs = 1
BUS_QUERY_ID_TYPE.BusQueryCompatibleIDs = 2
BUS_QUERY_ID_TYPE.BusQueryInstanceID = 3
BUS_QUERY_ID_TYPE.BusQueryDeviceSerialNumber = 4
BUS_QUERY_ID_TYPE.BusQueryContainerID = 5
KOBJECTS = v_enum()
KOBJECTS.EventNotificationObject = 0
KOBJECTS.EventSynchronizationObject = 1
KOBJECTS.MutantObject = 2
KOBJECTS.ProcessObject = 3
KOBJECTS.QueueObject = 4
KOBJECTS.SemaphoreObject = 5
KOBJECTS.ThreadObject = 6
KOBJECTS.GateObject = 7
KOBJECTS.TimerNotificationObject = 8
KOBJECTS.TimerSynchronizationObject = 9
KOBJECTS.Spare2Object = 10
KOBJECTS.Spare3Object = 11
KOBJECTS.Spare4Object = 12
KOBJECTS.Spare5Object = 13
KOBJECTS.Spare6Object = 14
KOBJECTS.Spare7Object = 15
KOBJECTS.Spare8Object = 16
KOBJECTS.Spare9Object = 17
KOBJECTS.ApcObject = 18
KOBJECTS.DpcObject = 19
KOBJECTS.DeviceQueueObject = 20
KOBJECTS.EventPairObject = 21
KOBJECTS.InterruptObject = 22
KOBJECTS.ProfileObject = 23
KOBJECTS.ThreadedDpcObject = 24
KOBJECTS.MaximumKernelObject = 25
NT_PRODUCT_TYPE = v_enum()
NT_PRODUCT_TYPE.NtProductWinNt = 0
NT_PRODUCT_TYPE.NtProductLanManNt = 1
NT_PRODUCT_TYPE.NtProductServer = 2
DEVICE_POWER_STATE = v_enum()
DEVICE_POWER_STATE.PowerDeviceUnspecified = 0
DEVICE_POWER_STATE.PowerDeviceD0 = 1
DEVICE_POWER_STATE.PowerDeviceD1 = 2
DEVICE_POWER_STATE.PowerDeviceD2 = 3
DEVICE_POWER_STATE.PowerDeviceD3 = 4
DEVICE_POWER_STATE.PowerDeviceMaximum = 5
WHEA_ERROR_SOURCE_TYPE = v_enum()
WHEA_ERROR_SOURCE_TYPE.WheaErrSrcTypeMCE = 0
WHEA_ERROR_SOURCE_TYPE.WheaErrSrcTypeCMC = 1
WHEA_ERROR_SOURCE_TYPE.WheaErrSrcTypeCPE = 2
WHEA_ERROR_SOURCE_TYPE.WheaErrSrcTypeNMI = 3
WHEA_ERROR_SOURCE_TYPE.WheaErrSrcTypePCIe = 4
WHEA_ERROR_SOURCE_TYPE.WheaErrSrcTypeGeneric = 5
WHEA_ERROR_SOURCE_TYPE.WheaErrSrcTypeINIT = 6
WHEA_ERROR_SOURCE_TYPE.WheaErrSrcTypeBOOT = 7
WHEA_ERROR_SOURCE_TYPE.WheaErrSrcTypeSCIGeneric = 8
WHEA_ERROR_SOURCE_TYPE.WheaErrSrcTypeIPFMCA = 9
WHEA_ERROR_SOURCE_TYPE.WheaErrSrcTypeIPFCMC = 10
WHEA_ERROR_SOURCE_TYPE.WheaErrSrcTypeIPFCPE = 11
WHEA_ERROR_SOURCE_TYPE.WheaErrSrcTypeMax = 12
PROC_HYPERVISOR_STATE = v_enum()
PROC_HYPERVISOR_STATE.ProcHypervisorNone = 0
PROC_HYPERVISOR_STATE.ProcHypervisorPresent = 1
PROC_HYPERVISOR_STATE.ProcHypervisorPower = 2
RTL_GENERIC_COMPARE_RESULTS = v_enum()
RTL_GENERIC_COMPARE_RESULTS.GenericLessThan = 0
RTL_GENERIC_COMPARE_RESULTS.GenericGreaterThan = 1
RTL_GENERIC_COMPARE_RESULTS.GenericEqual = 2
KWAIT_BLOCK_STATE = v_enum()
KWAIT_BLOCK_STATE.WaitBlockBypassStart = 0
KWAIT_BLOCK_STATE.WaitBlockBypassComplete = 1
KWAIT_BLOCK_STATE.WaitBlockActive = 2
KWAIT_BLOCK_STATE.WaitBlockInactive = 3
KWAIT_BLOCK_STATE.WaitBlockAllStates = 4
WHEA_ERROR_TYPE = v_enum()
WHEA_ERROR_TYPE.WheaErrTypeProcessor = 0
WHEA_ERROR_TYPE.WheaErrTypeMemory = 1
WHEA_ERROR_TYPE.WheaErrTypePCIExpress = 2
WHEA_ERROR_TYPE.WheaErrTypeNMI = 3
WHEA_ERROR_TYPE.WheaErrTypePCIXBus = 4
WHEA_ERROR_TYPE.WheaErrTypePCIXDevice = 5
WHEA_ERROR_TYPE.WheaErrTypeGeneric = 6
PROCESSOR_CACHE_TYPE = v_enum()
PROCESSOR_CACHE_TYPE.CacheUnified = 0
PROCESSOR_CACHE_TYPE.CacheInstruction = 1
PROCESSOR_CACHE_TYPE.CacheData = 2
PROCESSOR_CACHE_TYPE.CacheTrace = 3
MCA_EXCEPTION_TYPE = v_enum()
MCA_EXCEPTION_TYPE.HAL_MCE_RECORD = 0
MCA_EXCEPTION_TYPE.HAL_MCA_RECORD = 1
EVENT_TYPE = v_enum()
EVENT_TYPE.NotificationEvent = 0
EVENT_TYPE.SynchronizationEvent = 1
KSPIN_LOCK_QUEUE_NUMBER = v_enum()
KSPIN_LOCK_QUEUE_NUMBER.LockQueueUnusedSpare0 = 0
KSPIN_LOCK_QUEUE_NUMBER.LockQueueExpansionLock = 1
KSPIN_LOCK_QUEUE_NUMBER.LockQueueUnusedSpare2 = 2
KSPIN_LOCK_QUEUE_NUMBER.LockQueueSystemSpaceLock = 3
KSPIN_LOCK_QUEUE_NUMBER.LockQueueVacbLock = 4
KSPIN_LOCK_QUEUE_NUMBER.LockQueueMasterLock = 5
KSPIN_LOCK_QUEUE_NUMBER.LockQueueNonPagedPoolLock = 6
KSPIN_LOCK_QUEUE_NUMBER.LockQueueIoCancelLock = 7
KSPIN_LOCK_QUEUE_NUMBER.LockQueueWorkQueueLock = 8
KSPIN_LOCK_QUEUE_NUMBER.LockQueueIoVpbLock = 9
KSPIN_LOCK_QUEUE_NUMBER.LockQueueIoDatabaseLock = 10
KSPIN_LOCK_QUEUE_NUMBER.LockQueueIoCompletionLock = 11
KSPIN_LOCK_QUEUE_NUMBER.LockQueueNtfsStructLock = 12
KSPIN_LOCK_QUEUE_NUMBER.LockQueueAfdWorkQueueLock = 13
KSPIN_LOCK_QUEUE_NUMBER.LockQueueBcbLock = 14
KSPIN_LOCK_QUEUE_NUMBER.LockQueueMmNonPagedPoolLock = 15
KSPIN_LOCK_QUEUE_NUMBER.LockQueueUnusedSpare16 = 16
KSPIN_LOCK_QUEUE_NUMBER.LockQueueMaximumLock = 17
TP_CALLBACK_PRIORITY = v_enum()
TP_CALLBACK_PRIORITY.TP_CALLBACK_PRIORITY_HIGH = 0
TP_CALLBACK_PRIORITY.TP_CALLBACK_PRIORITY_NORMAL = 1
TP_CALLBACK_PRIORITY.TP_CALLBACK_PRIORITY_LOW = 2
TP_CALLBACK_PRIORITY.TP_CALLBACK_PRIORITY_INVALID = 3
FSINFOCLASS = v_enum()
FSINFOCLASS.FileFsVolumeInformation = 0
FSINFOCLASS.FileFsLabelInformation = 1
FSINFOCLASS.FileFsSizeInformation = 2
FSINFOCLASS.FileFsDeviceInformation = 3
FSINFOCLASS.FileFsAttributeInformation = 4
FSINFOCLASS.FileFsControlInformation = 5
FSINFOCLASS.FileFsFullSizeInformation = 6
FSINFOCLASS.FileFsObjectIdInformation = 7
FSINFOCLASS.FileFsDriverPathInformation = 8
FSINFOCLASS.FileFsVolumeFlagsInformation = 9
FSINFOCLASS.FileFsMaximumInformation = 10
WORKING_SET_TYPE = v_enum()
WORKING_SET_TYPE.WorkingSetTypeUser = 0
WORKING_SET_TYPE.WorkingSetTypeSession = 1
WORKING_SET_TYPE.WorkingSetTypeSystemTypes = 2
WORKING_SET_TYPE.WorkingSetTypeSystemCache = 3
WORKING_SET_TYPE.WorkingSetTypePagedPool = 4
WORKING_SET_TYPE.WorkingSetTypeSystemPtes = 5
WORKING_SET_TYPE.WorkingSetTypeMaximum = 6
POOL_TYPE = v_enum()
POOL_TYPE.NonPagedPool = 0
POOL_TYPE.PagedPool = 1
POOL_TYPE.NonPagedPoolMustSucceed = 2
POOL_TYPE.DontUseThisType = 3
POOL_TYPE.NonPagedPoolCacheAligned = 4
POOL_TYPE.PagedPoolCacheAligned = 5
POOL_TYPE.NonPagedPoolCacheAlignedMustS = 6
POOL_TYPE.MaxPoolType = 7
POOL_TYPE.NonPagedPoolSession = 8
POOL_TYPE.PagedPoolSession = 9
POOL_TYPE.NonPagedPoolMustSucceedSession = 10
POOL_TYPE.DontUseThisTypeSession = 11
POOL_TYPE.NonPagedPoolCacheAlignedSession = 12
POOL_TYPE.PagedPoolCacheAlignedSession = 13
POOL_TYPE.NonPagedPoolCacheAlignedMustSSession = 14
IO_PRIORITY_HINT = v_enum()
IO_PRIORITY_HINT.IoPriorityVeryLow = 0
IO_PRIORITY_HINT.IoPriorityLow = 1
IO_PRIORITY_HINT.IoPriorityNormal = 2
IO_PRIORITY_HINT.IoPriorityHigh = 3
IO_PRIORITY_HINT.IoPriorityCritical = 4
IO_PRIORITY_HINT.MaxIoPriorityTypes = 5
MODE = v_enum()
MODE.KernelMode = 0
MODE.UserMode = 1
MODE.MaximumMode = 2
FS_FILTER_SECTION_SYNC_TYPE = v_enum()
FS_FILTER_SECTION_SYNC_TYPE.SyncTypeOther = 0
FS_FILTER_SECTION_SYNC_TYPE.SyncTypeCreateSection = 1
OB_OPEN_REASON = v_enum()
OB_OPEN_REASON.ObCreateHandle = 0
OB_OPEN_REASON.ObOpenHandle = 1
OB_OPEN_REASON.ObDuplicateHandle = 2
OB_OPEN_REASON.ObInheritHandle = 3
OB_OPEN_REASON.ObMaxOpenReason = 4
HEAP_FAILURE_TYPE = v_enum()
HEAP_FAILURE_TYPE.heap_failure_internal = 0
HEAP_FAILURE_TYPE.heap_failure_unknown = 1
HEAP_FAILURE_TYPE.heap_failure_generic = 2
HEAP_FAILURE_TYPE.heap_failure_entry_corruption = 3
HEAP_FAILURE_TYPE.heap_failure_multiple_entries_corruption = 4
HEAP_FAILURE_TYPE.heap_failure_virtual_block_corruption = 5
HEAP_FAILURE_TYPE.heap_failure_buffer_overrun = 6
HEAP_FAILURE_TYPE.heap_failure_buffer_underrun = 7
HEAP_FAILURE_TYPE.heap_failure_block_not_busy = 8
HEAP_FAILURE_TYPE.heap_failure_invalid_argument = 9
HEAP_FAILURE_TYPE.heap_failure_usage_after_free = 10
HEAP_FAILURE_TYPE.heap_failure_cross_heap_operation = 11
HEAP_FAILURE_TYPE.heap_failure_freelists_corruption = 12
HEAP_FAILURE_TYPE.heap_failure_listentry_corruption = 13
DEVICE_TEXT_TYPE = v_enum()
DEVICE_TEXT_TYPE.DeviceTextDescription = 0
DEVICE_TEXT_TYPE.DeviceTextLocationInformation = 1
POWER_STATE_TYPE = v_enum()
POWER_STATE_TYPE.SystemPowerState = 0
POWER_STATE_TYPE.DevicePowerState = 1
IRQ_PRIORITY = v_enum()
IRQ_PRIORITY.IrqPriorityUndefined = 0
IRQ_PRIORITY.IrqPriorityLow = 1
IRQ_PRIORITY.IrqPriorityNormal = 2
IRQ_PRIORITY.IrqPriorityHigh = 3
KWAIT_STATE = v_enum()
KWAIT_STATE.WaitInProgress = 0
KWAIT_STATE.WaitCommitted = 1
KWAIT_STATE.WaitAborted = 2
KWAIT_STATE.MaximumWaitState = 3
LSA_FOREST_TRUST_RECORD_TYPE = v_enum()
LSA_FOREST_TRUST_RECORD_TYPE.ForestTrustTopLevelName = 0
LSA_FOREST_TRUST_RECORD_TYPE.ForestTrustTopLevelNameEx = 1
LSA_FOREST_TRUST_RECORD_TYPE.ForestTrustDomainInfo = 2
LSA_FOREST_TRUST_RECORD_TYPE.ForestTrustRecordTypeLast = 3
IO_ALLOCATION_ACTION = v_enum()
IO_ALLOCATION_ACTION.KeepObject = 0
IO_ALLOCATION_ACTION.DeallocateObject = 1
IO_ALLOCATION_ACTION.DeallocateObjectKeepRegisters = 2
EXCEPTION_DISPOSITION = v_enum()
EXCEPTION_DISPOSITION.ExceptionContinueExecution = 0
EXCEPTION_DISPOSITION.ExceptionContinueSearch = 1
EXCEPTION_DISPOSITION.ExceptionNestedException = 2
EXCEPTION_DISPOSITION.ExceptionCollidedUnwind = 3
SECURITY_OPERATION_CODE = v_enum()
SECURITY_OPERATION_CODE.SetSecurityDescriptor = 0
SECURITY_OPERATION_CODE.QuerySecurityDescriptor = 1
SECURITY_OPERATION_CODE.DeleteSecurityDescriptor = 2
SECURITY_OPERATION_CODE.AssignSecurityDescriptor = 3
PP_NPAGED_LOOKASIDE_NUMBER = v_enum()
PP_NPAGED_LOOKASIDE_NUMBER.LookasideSmallIrpList = 0
PP_NPAGED_LOOKASIDE_NUMBER.LookasideMediumIrpList = 1
PP_NPAGED_LOOKASIDE_NUMBER.LookasideLargeIrpList = 2
PP_NPAGED_LOOKASIDE_NUMBER.LookasideMdlList = 3
PP_NPAGED_LOOKASIDE_NUMBER.LookasideCreateInfoList = 4
PP_NPAGED_LOOKASIDE_NUMBER.LookasideNameBufferList = 5
PP_NPAGED_LOOKASIDE_NUMBER.LookasideTwilightList = 6
PP_NPAGED_LOOKASIDE_NUMBER.LookasideCompletionList = 7
PP_NPAGED_LOOKASIDE_NUMBER.LookasideScratchBufferList = 8
PP_NPAGED_LOOKASIDE_NUMBER.LookasideMaximumList = 9
WHEA_ERROR_PACKET_DATA_FORMAT = v_enum()
WHEA_ERROR_PACKET_DATA_FORMAT.WheaDataFormatIPFSalRecord = 0
WHEA_ERROR_PACKET_DATA_FORMAT.WheaDataFormatXPFMCA = 1
WHEA_ERROR_PACKET_DATA_FORMAT.WheaDataFormatMemory = 2
WHEA_ERROR_PACKET_DATA_FORMAT.WheaDataFormatPCIExpress = 3
WHEA_ERROR_PACKET_DATA_FORMAT.WheaDataFormatNMIPort = 4
WHEA_ERROR_PACKET_DATA_FORMAT.WheaDataFormatPCIXBus = 5
WHEA_ERROR_PACKET_DATA_FORMAT.WheaDataFormatPCIXDevice = 6
WHEA_ERROR_PACKET_DATA_FORMAT.WheaDataFormatGeneric = 7
WHEA_ERROR_PACKET_DATA_FORMAT.WheaDataFormatMax = 8
FS_FILTER_STREAM_FO_NOTIFICATION_TYPE = v_enum()
FS_FILTER_STREAM_FO_NOTIFICATION_TYPE.NotifyTypeCreate = 0
FS_FILTER_STREAM_FO_NOTIFICATION_TYPE.NotifyTypeRetired = 1
DISPLAYCONFIG_SCANLINE_ORDERING = v_enum()
DISPLAYCONFIG_SCANLINE_ORDERING.DISPLAYCONFIG_SCANLINE_ORDERING_UNSPECIFIED = 0
DISPLAYCONFIG_SCANLINE_ORDERING.DISPLAYCONFIG_SCANLINE_ORDERING_PROGRESSIVE = 1
DISPLAYCONFIG_SCANLINE_ORDERING.DISPLAYCONFIG_SCANLINE_ORDERING_INTERLACED = 2
DISPLAYCONFIG_SCANLINE_ORDERING.DISPLAYCONFIG_SCANLINE_ORDERING_INTERLACED_UPPERFIELDFIRST = 3
DISPLAYCONFIG_SCANLINE_ORDERING.DISPLAYCONFIG_SCANLINE_ORDERING_INTERLACED_LOWERFIELDFIRST = 4
DISPLAYCONFIG_SCANLINE_ORDERING.DISPLAYCONFIG_SCANLINE_ORDERING_FORCE_UINT32 = 5
SECURITY_IMPERSONATION_LEVEL = v_enum()
SECURITY_IMPERSONATION_LEVEL.SecurityAnonymous = 0
SECURITY_IMPERSONATION_LEVEL.SecurityIdentification = 1
SECURITY_IMPERSONATION_LEVEL.SecurityImpersonation = 2
SECURITY_IMPERSONATION_LEVEL.SecurityDelegation = 3
DEVICE_USAGE_NOTIFICATION_TYPE = v_enum()
DEVICE_USAGE_NOTIFICATION_TYPE.DeviceUsageTypeUndefined = 0
DEVICE_USAGE_NOTIFICATION_TYPE.DeviceUsageTypePaging = 1
DEVICE_USAGE_NOTIFICATION_TYPE.DeviceUsageTypeHibernation = 2
DEVICE_USAGE_NOTIFICATION_TYPE.DeviceUsageTypeDumpFile = 3
INTERFACE_TYPE = v_enum()
INTERFACE_TYPE.InterfaceTypeUndefined = 0
INTERFACE_TYPE.Internal = 1
INTERFACE_TYPE.Isa = 2
INTERFACE_TYPE.Eisa = 3
INTERFACE_TYPE.MicroChannel = 4
INTERFACE_TYPE.TurboChannel = 5
INTERFACE_TYPE.PCIBus = 6
INTERFACE_TYPE.VMEBus = 7
INTERFACE_TYPE.NuBus = 8
INTERFACE_TYPE.PCMCIABus = 9
INTERFACE_TYPE.CBus = 10
INTERFACE_TYPE.MPIBus = 11
INTERFACE_TYPE.MPSABus = 12
INTERFACE_TYPE.ProcessorInternal = 13
INTERFACE_TYPE.InternalPowerBus = 14
INTERFACE_TYPE.PNPISABus = 15
INTERFACE_TYPE.PNPBus = 16
INTERFACE_TYPE.Vmcs = 17
INTERFACE_TYPE.MaximumInterfaceType = 18
PS_RESOURCE_TYPE = v_enum()
PS_RESOURCE_TYPE.PsResourceNonPagedPool = 0
PS_RESOURCE_TYPE.PsResourcePagedPool = 1
PS_RESOURCE_TYPE.PsResourcePageFile = 2
PS_RESOURCE_TYPE.PsResourceWorkingSet = 3
PS_RESOURCE_TYPE.PsResourceCpuRate = 4
PS_RESOURCE_TYPE.PsResourceMax = 5
MM_PAGE_ACCESS_TYPE = v_enum()
MM_PAGE_ACCESS_TYPE.MmPteAccessType = 0
MM_PAGE_ACCESS_TYPE.MmCcReadAheadType = 1
MM_PAGE_ACCESS_TYPE.MmPfnRepurposeType = 2
MM_PAGE_ACCESS_TYPE.MmMaximumPageAccessType = 3
PF_FILE_ACCESS_TYPE = v_enum()
PF_FILE_ACCESS_TYPE.PfFileAccessTypeRead = 0
PF_FILE_ACCESS_TYPE.PfFileAccessTypeWrite = 1
PF_FILE_ACCESS_TYPE.PfFileAccessTypeMax = 2
HARDWARE_COUNTER_TYPE = v_enum()
HARDWARE_COUNTER_TYPE.PMCCounter = 0
HARDWARE_COUNTER_TYPE.MaxHardwareCounterType = 1
ReplacesCorHdrNumericDefines = v_enum()
ReplacesCorHdrNumericDefines.COMIMAGE_FLAGS_ILONLY = 0
ReplacesCorHdrNumericDefines.COMIMAGE_FLAGS_32BITREQUIRED = 1
ReplacesCorHdrNumericDefines.COMIMAGE_FLAGS_IL_LIBRARY = 2
ReplacesCorHdrNumericDefines.COMIMAGE_FLAGS_STRONGNAMESIGNED = 3
ReplacesCorHdrNumericDefines.COMIMAGE_FLAGS_NATIVE_ENTRYPOINT = 4
ReplacesCorHdrNumericDefines.COMIMAGE_FLAGS_TRACKDEBUGDATA = 5
ReplacesCorHdrNumericDefines.COR_VERSION_MAJOR_V2 = 6
ReplacesCorHdrNumericDefines.COR_VERSION_MAJOR = 7
ReplacesCorHdrNumericDefines.COR_VERSION_MINOR = 8
ReplacesCorHdrNumericDefines.COR_DELETED_NAME_LENGTH = 9
ReplacesCorHdrNumericDefines.COR_VTABLEGAP_NAME_LENGTH = 10
ReplacesCorHdrNumericDefines.NATIVE_TYPE_MAX_CB = 11
ReplacesCorHdrNumericDefines.COR_ILMETHOD_SECT_SMALL_MAX_DATASIZE = 12
ReplacesCorHdrNumericDefines.IMAGE_COR_MIH_METHODRVA = 13
ReplacesCorHdrNumericDefines.IMAGE_COR_MIH_EHRVA = 14
ReplacesCorHdrNumericDefines.IMAGE_COR_MIH_BASICBLOCK = 15
ReplacesCorHdrNumericDefines.COR_VTABLE_32BIT = 16
ReplacesCorHdrNumericDefines.COR_VTABLE_64BIT = 17
ReplacesCorHdrNumericDefines.COR_VTABLE_FROM_UNMANAGED = 18
ReplacesCorHdrNumericDefines.COR_VTABLE_FROM_UNMANAGED_RETAIN_APPDOMAIN = 19
ReplacesCorHdrNumericDefines.COR_VTABLE_CALL_MOST_DERIVED = 20
ReplacesCorHdrNumericDefines.IMAGE_COR_EATJ_THUNK_SIZE = 21
ReplacesCorHdrNumericDefines.MAX_CLASS_NAME = 22
ReplacesCorHdrNumericDefines.MAX_PACKAGE_NAME = 23
SYSTEM_POWER_STATE = v_enum()
SYSTEM_POWER_STATE.PowerSystemUnspecified = 0
SYSTEM_POWER_STATE.PowerSystemWorking = 1
SYSTEM_POWER_STATE.PowerSystemSleeping1 = 2
SYSTEM_POWER_STATE.PowerSystemSleeping2 = 3
SYSTEM_POWER_STATE.PowerSystemSleeping3 = 4
SYSTEM_POWER_STATE.PowerSystemHibernate = 5
SYSTEM_POWER_STATE.PowerSystemShutdown = 6
SYSTEM_POWER_STATE.PowerSystemMaximum = 7
MEMORY_CACHING_TYPE_ORIG = v_enum()
MEMORY_CACHING_TYPE_ORIG.MmFrameBufferCached = 0
POWER_ACTION = v_enum()
POWER_ACTION.PowerActionNone = 0
POWER_ACTION.PowerActionReserved = 1
POWER_ACTION.PowerActionSleep = 2
POWER_ACTION.PowerActionHibernate = 3
POWER_ACTION.PowerActionShutdown = 4
POWER_ACTION.PowerActionShutdownReset = 5
POWER_ACTION.PowerActionShutdownOff = 6
POWER_ACTION.PowerActionWarmEject = 7
class _unnamed_9074(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.McaCod = v_uint16()
self.MsCod = v_uint16()
self.OtherInfo = v_uint32()
class _unnamed_9775(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Group = v_uint16()
self.MessageCount = v_uint16()
self.Vector = v_uint32()
self.Affinity = v_uint32()
class _unnamed_9770(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Level = v_uint16()
self.Group = v_uint16()
self.Vector = v_uint32()
self.Affinity = v_uint32()
class _unnamed_9079(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.BankNumber = v_uint8()
self.Reserved2 = v_bytes(size=7) # FIXME Unknown Array Type
self.Status = MCI_STATS()
self.Address = MCI_ADDR()
self.Misc = v_uint64()
class IO_PRIORITY_INFO(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Size = v_uint32()
self.ThreadPriority = v_uint32()
self.PagePriority = v_uint32()
self.IoPriority = v_uint32()
class KEXECUTE_OPTIONS(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.ExecuteDisable = v_uint8()
class SID(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Revision = v_uint8()
self.SubAuthorityCount = v_uint8()
self.IdentifierAuthority = SID_IDENTIFIER_AUTHORITY()
self.SubAuthority = v_bytes(size=4) # FIXME Unknown Array Type
class WHEA_ERROR_PACKET_V2(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Signature = v_uint32()
self.Version = v_uint32()
self.Length = v_uint32()
self.Flags = WHEA_ERROR_PACKET_FLAGS()
self.ErrorType = v_uint32()
self.ErrorSeverity = v_uint32()
self.ErrorSourceId = v_uint32()
self.ErrorSourceType = v_uint32()
self.NotifyType = GUID()
self.Context = v_uint64()
self.DataFormat = v_uint32()
self.Reserved1 = v_uint32()
self.DataOffset = v_uint32()
self.DataLength = v_uint32()
self.PshedDataOffset = v_uint32()
self.PshedDataLength = v_uint32()
class _unnamed_8009(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Length = v_uint32()
self.FsInformationClass = v_uint32()
class GROUP_AFFINITY(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Mask = v_uint32()
self.Group = v_uint16()
self.Reserved = v_bytes(size=6) # FIXME Unknown Array Type
class KTSS(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Backlink = v_uint16()
self.Reserved0 = v_uint16()
self.Esp0 = v_uint32()
self.Ss0 = v_uint16()
self.Reserved1 = v_uint16()
self.NotUsed1 = v_bytes(size=16) # FIXME Unknown Array Type
self.CR3 = v_uint32()
self.Eip = v_uint32()
self.EFlags = v_uint32()
self.Eax = v_uint32()
self.Ecx = v_uint32()
self.Edx = v_uint32()
self.Ebx = v_uint32()
self.Esp = v_uint32()
self.Ebp = v_uint32()
self.Esi = v_uint32()
self.Edi = v_uint32()
self.Es = v_uint16()
self.Reserved2 = v_uint16()
self.Cs = v_uint16()
self.Reserved3 = v_uint16()
self.Ss = v_uint16()
self.Reserved4 = v_uint16()
self.Ds = v_uint16()
self.Reserved5 = v_uint16()
self.Fs = v_uint16()
self.Reserved6 = v_uint16()
self.Gs = v_uint16()
self.Reserved7 = v_uint16()
self.LDT = v_uint16()
self.Reserved8 = v_uint16()
self.Flags = v_uint16()
self.IoMapBase = v_uint16()
self.IoMaps = v_uint16()
self.IntDirectionMap = v_bytes(size=32) # FIXME Unknown Array Type
class CURDIR(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.DosPath = UNICODE_STRING()
self.Handle = v_ptr32()
class PERFINFO_GROUPMASK(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Masks = v_bytes(size=32) # FIXME Unknown Array Type
class HANDLE_TABLE_ENTRY_INFO(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.AuditMask = v_uint32()
class _unnamed_9803(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Start = LARGE_INTEGER()
self.Length64 = v_uint32()
class _unnamed_9800(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Start = LARGE_INTEGER()
self.Length48 = v_uint32()
class WHEA_ERROR_RECORD_SECTION_DESCRIPTOR(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.SectionOffset = v_uint32()
self.SectionLength = v_uint32()
self.Revision = WHEA_REVISION()
self.ValidBits = WHEA_ERROR_RECORD_SECTION_DESCRIPTOR_VALIDBITS()
self.Reserved = v_uint8()
self.Flags = WHEA_ERROR_RECORD_SECTION_DESCRIPTOR_FLAGS()
self.SectionType = GUID()
self.FRUId = GUID()
self.SectionSeverity = v_uint32()
self.FRUText = v_bytes(size=20) # FIXME Unknown Array Type
class PS_CPU_QUOTA_BLOCK(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.ListEntry = LIST_ENTRY()
self.SessionId = v_uint32()
self.CpuShareWeight = v_uint32()
self.CapturedWeightData = PSP_CPU_SHARE_CAPTURED_WEIGHT_DATA()
self.DuplicateInputMarker = v_uint32()
self._pad0040 = v_bytes(size=36)
self.CycleCredit = v_uint64()
self.BlockCurrentGeneration = v_uint32()
self.CpuCyclePercent = v_uint32()
self.CyclesFinishedForCurrentGeneration = v_uint8()
self._pad0080 = v_bytes(size=47)
self.Cpu = v_uint8()
class _unnamed_9783(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Channel = v_uint32()
self.Port = v_uint32()
self.Reserved1 = v_uint32()
class RTL_TRACE_SEGMENT(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Magic = v_uint32()
self.Database = v_ptr32()
self.NextSegment = v_ptr32()
self.TotalSize = v_uint32()
self.SegmentStart = v_ptr32()
self.SegmentEnd = v_ptr32()
self.SegmentFree = v_ptr32()
class _unnamed_9787(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Data = v_bytes(size=12) # FIXME Unknown Array Type
class _unnamed_9789(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Start = v_uint32()
self.Length = v_uint32()
self.Reserved = v_uint32()
class DEVICE_CAPABILITIES(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Size = v_uint16()
self.Version = v_uint16()
self.DeviceD1 = v_uint32()
self.Address = v_uint32()
self.UINumber = v_uint32()
self.DeviceState = v_uint32()
self.SystemWake = v_uint32()
self.DeviceWake = v_uint32()
self.D1Latency = v_uint32()
self.D2Latency = v_uint32()
self.D3Latency = v_uint32()
class _unnamed_7990(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Length = v_uint32()
self.EaList = v_ptr32()
self.EaListLength = v_uint32()
self.EaIndex = v_uint32()
class _unnamed_7995(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Length = v_uint32()
class KPROCESS(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Header = DISPATCHER_HEADER()
self.ProfileListHead = LIST_ENTRY()
self.DirectoryTableBase = v_uint32()
self.LdtDescriptor = KGDTENTRY()
self.Int21Descriptor = KIDTENTRY()
self.ThreadListHead = LIST_ENTRY()
self.ProcessLock = v_uint32()
self.Affinity = KAFFINITY_EX()
self.ReadyListHead = LIST_ENTRY()
self.SwapListEntry = SINGLE_LIST_ENTRY()
self.ActiveProcessors = KAFFINITY_EX()
self.AutoAlignment = v_uint32()
self.BasePriority = v_uint8()
self.QuantumReset = v_uint8()
self.Visited = v_uint8()
self.Unused3 = v_uint8()
self.ThreadSeed = v_bytes(size=16) # FIXME Unknown Array Type
self.IdealNode = v_bytes(size=8) # FIXME Unknown Array Type
self.IdealGlobalNode = v_uint16()
self.Flags = KEXECUTE_OPTIONS()
self.Unused1 = v_uint8()
self.IopmOffset = v_uint16()
self._pad009c = v_bytes(size=2)
self.Unused4 = v_uint32()
self.StackCount = KSTACK_COUNT()
self.ProcessListEntry = LIST_ENTRY()
self._pad00b0 = v_bytes(size=4)
self.CycleTime = v_uint64()
self.KernelTime = v_uint32()
self.UserTime = v_uint32()
self.VdmTrapcHandler = v_ptr32()
class DEVICE_OBJECT_POWER_EXTENSION(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
class _unnamed_7909(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Length = v_uint32()
self.Key = v_uint32()
self.ByteOffset = LARGE_INTEGER()
class _unnamed_10332(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Length48 = v_uint32()
self.Alignment48 = v_uint32()
self.MinimumAddress = LARGE_INTEGER()
self.MaximumAddress = LARGE_INTEGER()
class WHEA_ERROR_RECORD_SECTION_DESCRIPTOR_FLAGS(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Primary = v_uint32()
class TP_CALLBACK_ENVIRON_V3(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Version = v_uint32()
self.Pool = v_ptr32()
self.CleanupGroup = v_ptr32()
self.CleanupGroupCancelCallback = v_ptr32()
self.RaceDll = v_ptr32()
self.ActivationContext = v_ptr32()
self.FinalizationCallback = v_ptr32()
self.u = _unnamed_5798()
self.CallbackPriority = v_uint32()
self.Size = v_uint32()
class _unnamed_7902(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.SecurityContext = v_ptr32()
self.Options = v_uint32()
self.Reserved = v_uint16()
self.ShareAccess = v_uint16()
self.Parameters = v_ptr32()
class RTL_ACTIVATION_CONTEXT_STACK_FRAME(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Previous = v_ptr32()
self.ActivationContext = v_ptr32()
self.Flags = v_uint32()
class ALPC_PROCESS_CONTEXT(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Lock = EX_PUSH_LOCK()
self.ViewListHead = LIST_ENTRY()
self.PagedPoolQuotaCache = v_uint32()
class OBJECT_HANDLE_INFORMATION(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.HandleAttributes = v_uint32()
self.GrantedAccess = v_uint32()
class PROC_PERF_DOMAIN(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Link = LIST_ENTRY()
self.Master = v_ptr32()
self.Members = KAFFINITY_EX()
self.FeedbackHandler = v_ptr32()
self.GetFFHThrottleState = v_ptr32()
self.BoostPolicyHandler = v_ptr32()
self.PerfSelectionHandler = v_ptr32()
self.PerfHandler = v_ptr32()
self.Processors = v_ptr32()
self._pad0040 = v_bytes(size=4)
self.PerfChangeTime = v_uint64()
self.ProcessorCount = v_uint32()
self.PreviousFrequencyMhz = v_uint32()
self.CurrentFrequencyMhz = v_uint32()
self.PreviousFrequency = v_uint32()
self.CurrentFrequency = v_uint32()
self.CurrentPerfContext = v_uint32()
self.DesiredFrequency = v_uint32()
self.MaxFrequency = v_uint32()
self.MinPerfPercent = v_uint32()
self.MinThrottlePercent = v_uint32()
self.MaxPercent = v_uint32()
self.MinPercent = v_uint32()
self.ConstrainedMaxPercent = v_uint32()
self.ConstrainedMinPercent = v_uint32()
self.Coordination = v_uint8()
self._pad0084 = v_bytes(size=3)
self.PerfChangeIntervalCount = v_uint32()
class KTIMER_TABLE_ENTRY(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Lock = v_uint32()
self.Entry = LIST_ENTRY()
self._pad0010 = v_bytes(size=4)
self.Time = ULARGE_INTEGER()
class PS_CLIENT_SECURITY_CONTEXT(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.ImpersonationData = v_uint32()
class RTL_AVL_TABLE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.BalancedRoot = RTL_BALANCED_LINKS()
self.OrderedPointer = v_ptr32()
self.WhichOrderedElement = v_uint32()
self.NumberGenericTableElements = v_uint32()
self.DepthOfTree = v_uint32()
self.RestartKey = v_ptr32()
self.DeleteCount = v_uint32()
self.CompareRoutine = v_ptr32()
self.AllocateRoutine = v_ptr32()
self.FreeRoutine = v_ptr32()
self.TableContext = v_ptr32()
class RTL_TRACE_DATABASE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Magic = v_uint32()
self.Flags = v_uint32()
self.Tag = v_uint32()
self.SegmentList = v_ptr32()
self.MaximumSize = v_uint32()
self.CurrentSize = v_uint32()
self.Owner = v_ptr32()
self.Lock = RTL_CRITICAL_SECTION()
self.NoOfBuckets = v_uint32()
self.Buckets = v_ptr32()
self.HashFunction = v_ptr32()
self.NoOfTraces = v_uint32()
self.NoOfHits = v_uint32()
self.HashCounter = v_bytes(size=64) # FIXME Unknown Array Type
class OWNER_ENTRY(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.OwnerThread = v_uint32()
self.IoPriorityBoosted = v_uint32()
class DEVOBJ_EXTENSION(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Type = v_uint16()
self.Size = v_uint16()
self.DeviceObject = v_ptr32()
self.PowerFlags = v_uint32()
self.Dope = v_ptr32()
self.ExtensionFlags = v_uint32()
self.DeviceNode = v_ptr32()
self.AttachedTo = v_ptr32()
self.StartIoCount = v_uint32()
self.StartIoKey = v_uint32()
self.StartIoFlags = v_uint32()
self.Vpb = v_ptr32()
self.DependentList = LIST_ENTRY()
self.ProviderList = LIST_ENTRY()
class HEAP_LOCAL_SEGMENT_INFO(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Hint = v_ptr32()
self.ActiveSubsegment = v_ptr32()
self.CachedItems = v_bytes(size=64) # FIXME Unknown Array Type
self.SListHeader = SLIST_HEADER()
self.Counters = HEAP_BUCKET_COUNTERS()
self.LocalData = v_ptr32()
self.LastOpSequence = v_uint32()
self.BucketIndex = v_uint16()
self.LastUsed = v_uint16()
class HANDLE_TABLE_ENTRY(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Object = v_ptr32()
self.GrantedAccess = v_uint32()
class HEAP_COUNTERS(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.TotalMemoryReserved = v_uint32()
self.TotalMemoryCommitted = v_uint32()
self.TotalMemoryLargeUCR = v_uint32()
self.TotalSizeInVirtualBlocks = v_uint32()
self.TotalSegments = v_uint32()
self.TotalUCRs = v_uint32()
self.CommittOps = v_uint32()
self.DeCommitOps = v_uint32()
self.LockAcquires = v_uint32()
self.LockCollisions = v_uint32()
self.CommitRate = v_uint32()
self.DecommittRate = v_uint32()
self.CommitFailures = v_uint32()
self.InBlockCommitFailures = v_uint32()
self.CompactHeapCalls = v_uint32()
self.CompactedUCRs = v_uint32()
self.AllocAndFreeOps = v_uint32()
self.InBlockDeccommits = v_uint32()
self.InBlockDeccomitSize = v_uint32()
self.HighWatermarkSize = v_uint32()
self.LastPolledSize = v_uint32()
class MAILSLOT_CREATE_PARAMETERS(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.MailslotQuota = v_uint32()
self.MaximumMessageSize = v_uint32()
self.ReadTimeout = LARGE_INTEGER()
self.TimeoutSpecified = v_uint8()
class FS_FILTER_CALLBACK_DATA(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.SizeOfFsFilterCallbackData = v_uint32()
self.Operation = v_uint8()
self.Reserved = v_uint8()
self._pad0008 = v_bytes(size=2)
self.DeviceObject = v_ptr32()
self.FileObject = v_ptr32()
self.Parameters = FS_FILTER_PARAMETERS()
class PPM_IDLE_STATE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.DomainMembers = KAFFINITY_EX()
self.IdleCheck = v_ptr32()
self.IdleHandler = v_ptr32()
self.Context = v_ptr32()
self.Latency = v_uint32()
self.Power = v_uint32()
self.TimeCheck = v_uint32()
self.StateFlags = v_uint32()
self.PromotePercent = v_uint8()
self.DemotePercent = v_uint8()
self.PromotePercentBase = v_uint8()
self.DemotePercentBase = v_uint8()
self.StateType = v_uint8()
class ACCESS_STATE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.OperationID = LUID()
self.SecurityEvaluated = v_uint8()
self.GenerateAudit = v_uint8()
self.GenerateOnClose = v_uint8()
self.PrivilegesAllocated = v_uint8()
self.Flags = v_uint32()
self.RemainingDesiredAccess = v_uint32()
self.PreviouslyGrantedAccess = v_uint32()
self.OriginalDesiredAccess = v_uint32()
self.SubjectSecurityContext = SECURITY_SUBJECT_CONTEXT()
self.SecurityDescriptor = v_ptr32()
self.AuxData = v_ptr32()
self.Privileges = _unnamed_7708()
self.AuditPrivileges = v_uint8()
self._pad0064 = v_bytes(size=3)
self.ObjectName = UNICODE_STRING()
self.ObjectTypeName = UNICODE_STRING()
class TP_CALLBACK_INSTANCE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
class PROC_IDLE_ACCOUNTING(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.StateCount = v_uint32()
self.TotalTransitions = v_uint32()
self.ResetCount = v_uint32()
self._pad0010 = v_bytes(size=4)
self.StartTime = v_uint64()
self.BucketLimits = v_bytes(size=128) # FIXME Unknown Array Type
self.State = v_bytes(size=128) # FIXME Unknown Array Type
class GDI_TEB_BATCH(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Offset = v_uint32()
self.HDC = v_uint32()
self.Buffer = v_bytes(size=1240) # FIXME Unknown Array Type
class THREAD_PERFORMANCE_DATA(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Size = v_uint16()
self.Version = v_uint16()
self.ProcessorNumber = PROCESSOR_NUMBER()
self.ContextSwitches = v_uint32()
self.HwCountersCount = v_uint32()
self.UpdateCount = v_uint64()
self.WaitReasonBitMap = v_uint64()
self.HardwareCounters = v_uint64()
self.CycleTime = COUNTER_READING()
self.HwCounters = COUNTER_READING()
class PAGEFAULT_HISTORY(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
class ECP_LIST(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
class _unnamed_8043(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Vpb = v_ptr32()
self.DeviceObject = v_ptr32()
class SECTION_OBJECT_POINTERS(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.DataSectionObject = v_ptr32()
self.SharedCacheMap = v_ptr32()
self.ImageSectionObject = v_ptr32()
class _unnamed_8047(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Srb = v_ptr32()
class KTRAP_FRAME(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.DbgEbp = v_uint32()
self.DbgEip = v_uint32()
self.DbgArgMark = v_uint32()
self.DbgArgPointer = v_uint32()
self.TempSegCs = v_uint16()
self.Logging = v_uint8()
self.Reserved = v_uint8()
self.TempEsp = v_uint32()
self.Dr0 = v_uint32()
self.Dr1 = v_uint32()
self.Dr2 = v_uint32()
self.Dr3 = v_uint32()
self.Dr6 = v_uint32()
self.Dr7 = v_uint32()
self.SegGs = v_uint32()
self.SegEs = v_uint32()
self.SegDs = v_uint32()
self.Edx = v_uint32()
self.Ecx = v_uint32()
self.Eax = v_uint32()
self.PreviousPreviousMode = v_uint32()
self.ExceptionList = v_ptr32()
self.SegFs = v_uint32()
self.Edi = v_uint32()
self.Esi = v_uint32()
self.Ebx = v_uint32()
self.Ebp = v_uint32()
self.ErrCode = v_uint32()
self.Eip = v_uint32()
self.SegCs = v_uint32()
self.EFlags = v_uint32()
self.HardwareEsp = v_uint32()
self.HardwareSegSs = v_uint32()
self.V86Es = v_uint32()
self.V86Ds = v_uint32()
self.V86Fs = v_uint32()
self.V86Gs = v_uint32()
class _unnamed_8120(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Capabilities = v_ptr32()
class MCI_ADDR(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Address = v_uint32()
self.Reserved = v_uint32()
class IO_TIMER(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Type = v_uint16()
self.TimerFlag = v_uint16()
self.TimerList = LIST_ENTRY()
self.TimerRoutine = v_ptr32()
self.Context = v_ptr32()
self.DeviceObject = v_ptr32()
class WHEA_REVISION(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.MinorRevision = v_uint8()
self.MajorRevision = v_uint8()
class TP_CLEANUP_GROUP(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
class PROC_IDLE_SNAP(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Time = v_uint64()
self.Idle = v_uint64()
class SECURITY_DESCRIPTOR(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Revision = v_uint8()
self.Sbz1 = v_uint8()
self.Control = v_uint16()
self.Owner = v_ptr32()
self.Group = v_ptr32()
self.Sacl = v_ptr32()
self.Dacl = v_ptr32()
class _unnamed_7708(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.InitialPrivilegeSet = INITIAL_PRIVILEGE_SET()
class OBJECT_TYPE_INITIALIZER(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Length = v_uint16()
self.ObjectTypeFlags = v_uint8()
self._pad0004 = v_bytes(size=1)
self.ObjectTypeCode = v_uint32()
self.InvalidAttributes = v_uint32()
self.GenericMapping = GENERIC_MAPPING()
self.ValidAccessMask = v_uint32()
self.RetainAccess = v_uint32()
self.PoolType = v_uint32()
self.DefaultPagedPoolCharge = v_uint32()
self.DefaultNonPagedPoolCharge = v_uint32()
self.DumpProcedure = v_ptr32()
self.OpenProcedure = v_ptr32()
self.CloseProcedure = v_ptr32()
self.DeleteProcedure = v_ptr32()
self.ParseProcedure = v_ptr32()
self.SecurityProcedure = v_ptr32()
self.QueryNameProcedure = v_ptr32()
self.OkayToCloseProcedure = v_ptr32()
class TP_DIRECT(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Callback = v_ptr32()
self.NumaNode = v_uint32()
self.IdealProcessor = v_uint8()
class XSTATE_SAVE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Reserved1 = v_uint64()
self.Reserved2 = v_uint32()
self.Prev = v_ptr32()
self.Reserved3 = v_ptr32()
self.Thread = v_ptr32()
self.Reserved4 = v_ptr32()
self.Level = v_uint8()
class HEAP_ENTRY_EXTRA(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.AllocatorBackTraceIndex = v_uint16()
self.TagIndex = v_uint16()
self.Settable = v_uint32()
class HEAP_PSEUDO_TAG_ENTRY(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Allocs = v_uint32()
self.Frees = v_uint32()
self.Size = v_uint32()
class PAGED_LOOKASIDE_LIST(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.L = GENERAL_LOOKASIDE()
self.Lock__ObsoleteButDoNotDelete = FAST_MUTEX()
class _unnamed_9563(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Balance = v_uint32()
class LARGE_INTEGER(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.LowPart = v_uint32()
self.HighPart = v_uint32()
class NPAGED_LOOKASIDE_LIST(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.L = GENERAL_LOOKASIDE()
self.Lock__ObsoleteButDoNotDelete = v_uint32()
class _unnamed_7790(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.UserApcRoutine = v_ptr32()
self.UserApcContext = v_ptr32()
class VPB(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Type = v_uint16()
self.Size = v_uint16()
self.Flags = v_uint16()
self.VolumeLabelLength = v_uint16()
self.DeviceObject = v_ptr32()
self.RealDevice = v_ptr32()
self.SerialNumber = v_uint32()
self.ReferenceCount = v_uint32()
self.VolumeLabel = v_bytes(size=64) # FIXME Unknown Array Type
class PP_LOOKASIDE_LIST(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.P = v_ptr32()
self.L = v_ptr32()
class OBJECT_NAME_INFORMATION(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Name = UNICODE_STRING()
class IO_RESOURCE_LIST(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Version = v_uint16()
self.Revision = v_uint16()
self.Count = v_uint32()
self.Descriptors = v_uint32()
class KUSER_SHARED_DATA(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.TickCountLowDeprecated = v_uint32()
self.TickCountMultiplier = v_uint32()
self.InterruptTime = KSYSTEM_TIME()
self.SystemTime = KSYSTEM_TIME()
self.TimeZoneBias = KSYSTEM_TIME()
self.ImageNumberLow = v_uint16()
self.ImageNumberHigh = v_uint16()
self.NtSystemRoot = v_bytes(size=520) # FIXME Unknown Array Type
self.MaxStackTraceDepth = v_uint32()
self.CryptoExponent = v_uint32()
self.TimeZoneId = v_uint32()
self.LargePageMinimum = v_uint32()
self.Reserved2 = v_bytes(size=28) # FIXME Unknown Array Type
self.NtProductType = v_uint32()
self.ProductTypeIsValid = v_uint8()
self._pad026c = v_bytes(size=3)
self.NtMajorVersion = v_uint32()
self.NtMinorVersion = v_uint32()
self.ProcessorFeatures = v_bytes(size=64) # FIXME Unknown Array Type
self.Reserved1 = v_uint32()
self.Reserved3 = v_uint32()
self.TimeSlip = v_uint32()
self.AlternativeArchitecture = v_uint32()
self.AltArchitecturePad = v_bytes(size=4) # FIXME Unknown Array Type
self.SystemExpirationDate = LARGE_INTEGER()
self.SuiteMask = v_uint32()
self.KdDebuggerEnabled = v_uint8()
self.NXSupportPolicy = v_uint8()
self._pad02d8 = v_bytes(size=2)
self.ActiveConsoleId = v_uint32()
self.DismountCount = v_uint32()
self.ComPlusPackage = v_uint32()
self.LastSystemRITEventTickCount = v_uint32()
self.NumberOfPhysicalPages = v_uint32()
self.SafeBootMode = v_uint8()
self.TscQpcData = v_uint8()
self.TscQpcPad = v_bytes(size=2) # FIXME Unknown Array Type
self.SharedDataFlags = v_uint32()
self.DataFlagsPad = v_bytes(size=4) # FIXME Unknown Array Type
self.TestRetInstruction = v_uint64()
self.SystemCall = v_uint32()
self.SystemCallReturn = v_uint32()
self.SystemCallPad = v_bytes(size=24) # FIXME Unknown Array Type
self.TickCount = KSYSTEM_TIME()
self.TickCountPad = v_bytes(size=4) # FIXME Unknown Array Type
self.Cookie = v_uint32()
self.CookiePad = v_bytes(size=4) # FIXME Unknown Array Type
self.ConsoleSessionForegroundProcessId = v_uint64()
self.Wow64SharedInformation = v_bytes(size=64) # FIXME Unknown Array Type
self.UserModeGlobalLogger = v_bytes(size=32) # FIXME Unknown Array Type
self.ImageFileExecutionOptions = v_uint32()
self.LangGenerationCount = v_uint32()
self.Reserved5 = v_uint64()
self.InterruptTimeBias = v_uint64()
self.TscQpcBias = v_uint64()
self.ActiveProcessorCount = v_uint32()
self.ActiveGroupCount = v_uint16()
self.Reserved4 = v_uint16()
self.AitSamplingValue = v_uint32()
self.AppCompatFlag = v_uint32()
self.SystemDllNativeRelocation = v_uint64()
self.SystemDllWowRelocation = v_uint32()
self.XStatePad = v_bytes(size=4) # FIXME Unknown Array Type
self.XState = XSTATE_CONFIGURATION()
class SYSTEM_POWER_STATE_CONTEXT(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Reserved1 = v_uint32()
class FS_FILTER_PARAMETERS(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.AcquireForModifiedPageWriter = _unnamed_9854()
class HEAP(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Entry = HEAP_ENTRY()
self.SegmentSignature = v_uint32()
self.SegmentFlags = v_uint32()
self.SegmentListEntry = LIST_ENTRY()
self.Heap = v_ptr32()
self.BaseAddress = v_ptr32()
self.NumberOfPages = v_uint32()
self.FirstEntry = v_ptr32()
self.LastValidEntry = v_ptr32()
self.NumberOfUnCommittedPages = v_uint32()
self.NumberOfUnCommittedRanges = v_uint32()
self.SegmentAllocatorBackTraceIndex = v_uint16()
self.Reserved = v_uint16()
self.UCRSegmentList = LIST_ENTRY()
self.Flags = v_uint32()
self.ForceFlags = v_uint32()
self.CompatibilityFlags = v_uint32()
self.EncodeFlagMask = v_uint32()
self.Encoding = HEAP_ENTRY()
self.PointerKey = v_uint32()
self.Interceptor = v_uint32()
self.VirtualMemoryThreshold = v_uint32()
self.Signature = v_uint32()
self.SegmentReserve = v_uint32()
self.SegmentCommit = v_uint32()
self.DeCommitFreeBlockThreshold = v_uint32()
self.DeCommitTotalFreeThreshold = v_uint32()
self.TotalFreeSize = v_uint32()
self.MaximumAllocationSize = v_uint32()
self.ProcessHeapsListIndex = v_uint16()
self.HeaderValidateLength = v_uint16()
self.HeaderValidateCopy = v_ptr32()
self.NextAvailableTagIndex = v_uint16()
self.MaximumTagIndex = v_uint16()
self.TagEntries = v_ptr32()
self.UCRList = LIST_ENTRY()
self.AlignRound = v_uint32()
self.AlignMask = v_uint32()
self.VirtualAllocdBlocks = LIST_ENTRY()
self.SegmentList = LIST_ENTRY()
self.AllocatorBackTraceIndex = v_uint16()
self._pad00b4 = v_bytes(size=2)
self.NonDedicatedListLength = v_uint32()
self.BlocksIndex = v_ptr32()
self.UCRIndex = v_ptr32()
self.PseudoTagEntries = v_ptr32()
self.FreeLists = LIST_ENTRY()
self.LockVariable = v_ptr32()
self.CommitRoutine = v_ptr32()
self.FrontEndHeap = v_ptr32()
self.FrontHeapLockCount = v_uint16()
self.FrontEndHeapType = v_uint8()
self._pad00dc = v_bytes(size=1)
self.Counters = HEAP_COUNTERS()
self.TuningParameters = HEAP_TUNING_PARAMETERS()
class IO_STATUS_BLOCK(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Status = v_uint32()
self.Information = v_uint32()
class PRIVILEGE_SET(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.PrivilegeCount = v_uint32()
self.Control = v_uint32()
self.Privilege = v_uint32()
class CM_RESOURCE_LIST(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Count = v_uint32()
self.List = v_uint32()
class EPROCESS(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Pcb = KPROCESS()
self.ProcessLock = EX_PUSH_LOCK()
self._pad00d0 = v_bytes(size=4)
self.CreateTime = LARGE_INTEGER()
self.ExitTime = LARGE_INTEGER()
self.RundownProtect = EX_RUNDOWN_REF()
self.UniqueProcessId = v_ptr32()
self.ActiveProcessLinks = LIST_ENTRY()
self.ProcessQuotaUsage = v_bytes(size=8) # FIXME Unknown Array Type
self.ProcessQuotaPeak = v_bytes(size=8) # FIXME Unknown Array Type
self.CommitCharge = v_uint32()
self.QuotaBlock = v_ptr32()
self.CpuQuotaBlock = v_ptr32()
self.PeakVirtualSize = v_uint32()
self.VirtualSize = v_uint32()
self.SessionProcessLinks = LIST_ENTRY()
self.DebugPort = v_ptr32()
self.ExceptionPortData = v_ptr32()
self.ObjectTable = v_ptr32()
self.Token = EX_FAST_REF()
self.WorkingSetPage = v_uint32()
self.AddressCreationLock = EX_PUSH_LOCK()
self.RotateInProgress = v_ptr32()
self.ForkInProgress = v_ptr32()
self.HardwareTrigger = v_uint32()
self.PhysicalVadRoot = v_ptr32()
self.CloneRoot = v_ptr32()
self.NumberOfPrivatePages = v_uint32()
self.NumberOfLockedPages = v_uint32()
self.Win32Process = v_ptr32()
self.Job = v_ptr32()
self.SectionObject = v_ptr32()
self.SectionBaseAddress = v_ptr32()
self.Cookie = v_uint32()
self.Spare8 = v_uint32()
self.WorkingSetWatch = v_ptr32()
self.Win32WindowStation = v_ptr32()
self.InheritedFromUniqueProcessId = v_ptr32()
self.LdtInformation = v_ptr32()
self.VdmObjects = v_ptr32()
self.ConsoleHostProcess = v_uint32()
self.DeviceMap = v_ptr32()
self.EtwDataSource = v_ptr32()
self.FreeTebHint = v_ptr32()
self._pad0190 = v_bytes(size=4)
self.PageDirectoryPte = HARDWARE_PTE_X86()
self._pad0198 = v_bytes(size=4)
self.Session = v_ptr32()
self.ImageFileName = v_bytes(size=15) # FIXME Unknown Array Type
self.PriorityClass = v_uint8()
self.JobLinks = LIST_ENTRY()
self.LockedPagesList = v_ptr32()
self.ThreadListHead = LIST_ENTRY()
self.SecurityPort = v_ptr32()
self.PaeTop = v_ptr32()
self.ActiveThreads = v_uint32()
self.ImagePathHash = v_uint32()
self.DefaultHardErrorProcessing = v_uint32()
self.LastThreadExitStatus = v_uint32()
self.Peb = v_ptr32()
self.PrefetchTrace = EX_FAST_REF()
self.ReadOperationCount = LARGE_INTEGER()
self.WriteOperationCount = LARGE_INTEGER()
self.OtherOperationCount = LARGE_INTEGER()
self.ReadTransferCount = LARGE_INTEGER()
self.WriteTransferCount = LARGE_INTEGER()
self.OtherTransferCount = LARGE_INTEGER()
self.CommitChargeLimit = v_uint32()
self.CommitChargePeak = v_uint32()
self.AweInfo = v_ptr32()
self.SeAuditProcessCreationInfo = SE_AUDIT_PROCESS_CREATION_INFO()
self.Vm = MMSUPPORT()
self.MmProcessLinks = LIST_ENTRY()
self.HighestUserAddress = v_ptr32()
self.ModifiedPageCount = v_uint32()
self.Flags2 = v_uint32()
self.Flags = v_uint32()
self.ExitStatus = v_uint32()
self.VadRoot = MM_AVL_TABLE()
self.AlpcContext = ALPC_PROCESS_CONTEXT()
self.TimerResolutionLink = LIST_ENTRY()
self.RequestedTimerResolution = v_uint32()
self.ActiveThreadsHighWatermark = v_uint32()
self.SmallestTimerResolution = v_uint32()
self.TimerResolutionStackRecord = v_ptr32()
class TP_TASK(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Callbacks = v_ptr32()
self.NumaNode = v_uint32()
self.IdealProcessor = v_uint8()
self._pad000c = v_bytes(size=3)
self.PostGuard = TP_NBQ_GUARD()
self.NBQNode = v_ptr32()
class TEB_ACTIVE_FRAME_CONTEXT(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Flags = v_uint32()
self.FrameName = v_ptr32()
class KTIMER(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Header = DISPATCHER_HEADER()
self.DueTime = ULARGE_INTEGER()
self.TimerListEntry = LIST_ENTRY()
self.Dpc = v_ptr32()
self.Period = v_uint32()
class CM_PARTIAL_RESOURCE_DESCRIPTOR(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Type = v_uint8()
self.ShareDisposition = v_uint8()
self.Flags = v_uint16()
self.u = _unnamed_9547()
class _unnamed_7890(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.SecurityContext = v_ptr32()
self.Options = v_uint32()
self.Reserved = v_uint16()
self.ShareAccess = v_uint16()
self.Parameters = v_ptr32()
class OBJECT_ATTRIBUTES(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Length = v_uint32()
self.RootDirectory = v_ptr32()
self.ObjectName = v_ptr32()
self.Attributes = v_uint32()
self.SecurityDescriptor = v_ptr32()
self.SecurityQualityOfService = v_ptr32()
class CM_FULL_RESOURCE_DESCRIPTOR(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.InterfaceType = v_uint32()
self.BusNumber = v_uint32()
self.PartialResourceList = CM_PARTIAL_RESOURCE_LIST()
class KTIMER_TABLE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.TimerExpiry = v_bytes(size=64) # FIXME Unknown Array Type
self.TimerEntries = v_bytes(size=64) # FIXME Unknown Array Type
class FAST_IO_DISPATCH(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.SizeOfFastIoDispatch = v_uint32()
self.FastIoCheckIfPossible = v_ptr32()
self.FastIoRead = v_ptr32()
self.FastIoWrite = v_ptr32()
self.FastIoQueryBasicInfo = v_ptr32()
self.FastIoQueryStandardInfo = v_ptr32()
self.FastIoLock = v_ptr32()
self.FastIoUnlockSingle = v_ptr32()
self.FastIoUnlockAll = v_ptr32()
self.FastIoUnlockAllByKey = v_ptr32()
self.FastIoDeviceControl = v_ptr32()
self.AcquireFileForNtCreateSection = v_ptr32()
self.ReleaseFileForNtCreateSection = v_ptr32()
self.FastIoDetachDevice = v_ptr32()
self.FastIoQueryNetworkOpenInfo = v_ptr32()
self.AcquireForModWrite = v_ptr32()
self.MdlRead = v_ptr32()
self.MdlReadComplete = v_ptr32()
self.PrepareMdlWrite = v_ptr32()
self.MdlWriteComplete = v_ptr32()
self.FastIoReadCompressed = v_ptr32()
self.FastIoWriteCompressed = v_ptr32()
self.MdlReadCompleteCompressed = v_ptr32()
self.MdlWriteCompleteCompressed = v_ptr32()
self.FastIoQueryOpen = v_ptr32()
self.ReleaseForModWrite = v_ptr32()
self.AcquireForCcFlush = v_ptr32()
self.ReleaseForCcFlush = v_ptr32()
class _unnamed_8164(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.InPath = v_uint8()
self.Reserved = v_bytes(size=3) # FIXME Unknown Array Type
self.Type = v_uint32()
class _unnamed_9856(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.SyncType = v_uint32()
self.PageProtection = v_uint32()
class RTL_DYNAMIC_HASH_TABLE_CONTEXT(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.ChainHead = v_ptr32()
self.PrevLinkage = v_ptr32()
self.Signature = v_uint32()
class MMWSL(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
class PROC_IDLE_STATE_ACCOUNTING(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.TotalTime = v_uint64()
self.IdleTransitions = v_uint32()
self.FailedTransitions = v_uint32()
self.InvalidBucketIndex = v_uint32()
self._pad0018 = v_bytes(size=4)
self.MinTime = v_uint64()
self.MaxTime = v_uint64()
self.IdleTimeBuckets = v_uint64()
class _unnamed_9067(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Mca = _unnamed_9079()
class KSPECIAL_REGISTERS(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Cr0 = v_uint32()
self.Cr2 = v_uint32()
self.Cr3 = v_uint32()
self.Cr4 = v_uint32()
self.KernelDr0 = v_uint32()
self.KernelDr1 = v_uint32()
self.KernelDr2 = v_uint32()
self.KernelDr3 = v_uint32()
self.KernelDr6 = v_uint32()
self.KernelDr7 = v_uint32()
self.Gdtr = DESCRIPTOR()
self.Idtr = DESCRIPTOR()
self.Tr = v_uint16()
self.Ldtr = v_uint16()
self.Reserved = v_bytes(size=24) # FIXME Unknown Array Type
class RTL_CRITICAL_SECTION(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.DebugInfo = v_ptr32()
self.LockCount = v_uint32()
self.RecursionCount = v_uint32()
self.OwningThread = v_ptr32()
self.LockSemaphore = v_ptr32()
self.SpinCount = v_uint32()
class KSYSTEM_TIME(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.LowPart = v_uint32()
self.High1Time = v_uint32()
self.High2Time = v_uint32()
class PROC_IDLE_STATE_BUCKET(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.TotalTime = v_uint64()
self.MinTime = v_uint64()
self.MaxTime = v_uint64()
self.Count = v_uint32()
class RTL_STD_LIST_HEAD(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Next = v_ptr32()
self.Lock = RTL_STACK_DATABASE_LOCK()
class FLOATING_SAVE_AREA(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.ControlWord = v_uint32()
self.StatusWord = v_uint32()
self.TagWord = v_uint32()
self.ErrorOffset = v_uint32()
self.ErrorSelector = v_uint32()
self.DataOffset = v_uint32()
self.DataSelector = v_uint32()
self.RegisterArea = v_bytes(size=80) # FIXME Unknown Array Type
self.Cr0NpxState = v_uint32()
class DPH_HEAP_BLOCK(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.pNextAlloc = v_ptr32()
self._pad0010 = v_bytes(size=12)
self.pUserAllocation = v_ptr32()
self.pVirtualBlock = v_ptr32()
self.nVirtualBlockSize = v_uint32()
self.nVirtualAccessSize = v_uint32()
self.nUserRequestedSize = v_uint32()
self.nUserActualSize = v_uint32()
self.UserValue = v_ptr32()
self.UserFlags = v_uint32()
self.StackTrace = v_ptr32()
self.AdjacencyEntry = LIST_ENTRY()
self.pVirtualRegion = v_ptr32()
class KQUEUE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Header = DISPATCHER_HEADER()
self.EntryListHead = LIST_ENTRY()
self.CurrentCount = v_uint32()
self.MaximumCount = v_uint32()
self.ThreadListHead = LIST_ENTRY()
class _unnamed_8017(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Length = v_ptr32()
self.Key = v_uint32()
self.ByteOffset = LARGE_INTEGER()
class LUID_AND_ATTRIBUTES(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Luid = LUID()
self.Attributes = v_uint32()
class _unnamed_8012(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.OutputBufferLength = v_uint32()
self.InputBufferLength = v_uint32()
self.FsControlCode = v_uint32()
self.Type3InputBuffer = v_ptr32()
class HEAP_BUCKET(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.BlockUnits = v_uint16()
self.SizeIndex = v_uint8()
self.UseAffinity = v_uint8()
class CM_PARTIAL_RESOURCE_LIST(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Version = v_uint16()
self.Revision = v_uint16()
self.Count = v_uint32()
self.PartialDescriptors = v_uint32()
class KTHREAD(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Header = DISPATCHER_HEADER()
self.CycleTime = v_uint64()
self.HighCycleTime = v_uint32()
self._pad0020 = v_bytes(size=4)
self.QuantumTarget = v_uint64()
self.InitialStack = v_ptr32()
self.StackLimit = v_ptr32()
self.KernelStack = v_ptr32()
self.ThreadLock = v_uint32()
self.WaitRegister = KWAIT_STATUS_REGISTER()
self.Running = v_uint8()
self.Alerted = v_bytes(size=2) # FIXME Unknown Array Type
self.KernelStackResident = v_uint32()
self.ApcState = KAPC_STATE()
self.NextProcessor = v_uint32()
self.DeferredProcessor = v_uint32()
self.ApcQueueLock = v_uint32()
self.ContextSwitches = v_uint32()
self.State = v_uint8()
self.NpxState = v_uint8()
self.WaitIrql = v_uint8()
self.WaitMode = v_uint8()
self.WaitStatus = v_uint32()
self.WaitBlockList = v_ptr32()
self.WaitListEntry = LIST_ENTRY()
self.Queue = v_ptr32()
self.WaitTime = v_uint32()
self.KernelApcDisable = v_uint16()
self.SpecialApcDisable = v_uint16()
self.Teb = v_ptr32()
self._pad0090 = v_bytes(size=4)
self.Timer = KTIMER()
self.AutoAlignment = v_uint32()
self.ServiceTable = v_ptr32()
self.WaitBlock = v_ptr32()
self.QueueListEntry = LIST_ENTRY()
self.TrapFrame = v_ptr32()
self.FirstArgument = v_ptr32()
self.CallbackStack = v_ptr32()
self.ApcStateIndex = v_uint8()
self.BasePriority = v_uint8()
self.PriorityDecrement = v_uint8()
self.Preempted = v_uint8()
self.AdjustReason = v_uint8()
self.AdjustIncrement = v_uint8()
self.PreviousMode = v_uint8()
self.Saturation = v_uint8()
self.SystemCallNumber = v_uint32()
self.FreezeCount = v_uint32()
self.UserAffinity = GROUP_AFFINITY()
self.Process = v_ptr32()
self.Affinity = GROUP_AFFINITY()
self.IdealProcessor = v_uint32()
self.UserIdealProcessor = v_uint32()
self.ApcStatePointer = v_bytes(size=8) # FIXME Unknown Array Type
self.SavedApcState = KAPC_STATE()
self.SuspendCount = v_uint8()
self.Spare1 = v_uint8()
self.OtherPlatformFill = v_uint8()
self._pad018c = v_bytes(size=1)
self.Win32Thread = v_ptr32()
self.StackBase = v_ptr32()
self.SuspendApc = KAPC()
self.UserTime = v_uint32()
self.SuspendSemaphore = KSEMAPHORE()
self.SListFaultCount = v_uint32()
self.ThreadListEntry = LIST_ENTRY()
self.MutantListHead = LIST_ENTRY()
self.SListFaultAddress = v_ptr32()
self.ThreadCounters = v_ptr32()
self.XStateSave = v_ptr32()
class CONTEXT(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.ContextFlags = v_uint32()
self.Dr0 = v_uint32()
self.Dr1 = v_uint32()
self.Dr2 = v_uint32()
self.Dr3 = v_uint32()
self.Dr6 = v_uint32()
self.Dr7 = v_uint32()
self.FloatSave = FLOATING_SAVE_AREA()
self.SegGs = v_uint32()
self.SegFs = v_uint32()
self.SegEs = v_uint32()
self.SegDs = v_uint32()
self.Edi = v_uint32()
self.Esi = v_uint32()
self.Ebx = v_uint32()
self.Edx = v_uint32()
self.Ecx = v_uint32()
self.Eax = v_uint32()
self.Ebp = v_uint32()
self.Eip = v_uint32()
self.SegCs = v_uint32()
self.EFlags = v_uint32()
self.Esp = v_uint32()
self.SegSs = v_uint32()
self.ExtendedRegisters = v_bytes(size=512) # FIXME Unknown Array Type
class MCI_STATS(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.MciStats = _unnamed_9074()
class _unnamed_9793(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.DataSize = v_uint32()
self.Reserved1 = v_uint32()
self.Reserved2 = v_uint32()
class _unnamed_9797(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Start = LARGE_INTEGER()
self.Length40 = v_uint32()
class PROC_PERF_LOAD(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.BusyPercentage = v_uint8()
self.FrequencyPercentage = v_uint8()
class AUX_ACCESS_DATA(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.PrivilegesUsed = v_ptr32()
self.GenericMapping = GENERIC_MAPPING()
self.AccessesToAudit = v_uint32()
self.MaximumAuditMask = v_uint32()
self.TransactionId = GUID()
self.NewSecurityDescriptor = v_ptr32()
self.ExistingSecurityDescriptor = v_ptr32()
self.ParentSecurityDescriptor = v_ptr32()
self.DeRefSecurityDescriptor = v_ptr32()
self.SDLock = v_ptr32()
self.AccessReasons = ACCESS_REASONS()
class _unnamed_10337(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Length64 = v_uint32()
self.Alignment64 = v_uint32()
self.MinimumAddress = LARGE_INTEGER()
self.MaximumAddress = LARGE_INTEGER()
class HEAP_LOCAL_DATA(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.DeletedSubSegments = SLIST_HEADER()
self.CrtZone = v_ptr32()
self.LowFragHeap = v_ptr32()
self.Sequence = v_uint32()
self._pad0018 = v_bytes(size=4)
self.SegmentInfo = v_uint32()
class _unnamed_8680(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.CriticalSection = RTL_CRITICAL_SECTION()
class DPH_BLOCK_INFORMATION(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.StartStamp = v_uint32()
self.Heap = v_ptr32()
self.RequestedSize = v_uint32()
self.ActualSize = v_uint32()
self.FreeQueue = LIST_ENTRY()
self.StackTrace = v_ptr32()
self.EndStamp = v_uint32()
class _unnamed_10308(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.MinimumVector = v_uint32()
self.MaximumVector = v_uint32()
self.AffinityPolicy = v_uint16()
self.Group = v_uint16()
self.PriorityPolicy = v_uint32()
self.TargetedProcessors = v_uint32()
class PF_KERNEL_GLOBALS(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.AccessBufferAgeThreshold = v_uint64()
self.AccessBufferRef = EX_RUNDOWN_REF()
self.AccessBufferExistsEvent = KEVENT()
self.AccessBufferMax = v_uint32()
self.AccessBufferList = SLIST_HEADER()
self.StreamSequenceNumber = v_uint32()
self.Flags = v_uint32()
self.ScenarioPrefetchCount = v_uint32()
class _unnamed_9086(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Address = v_uint64()
self.Type = v_uint64()
class _unnamed_7851(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.DeviceQueueEntry = KDEVICE_QUEUE_ENTRY()
self.Thread = v_ptr32()
self.AuxiliaryBuffer = v_ptr32()
self.ListEntry = LIST_ENTRY()
self.CurrentStackLocation = v_ptr32()
self.OriginalFileObject = v_ptr32()
class EVENT_DATA_DESCRIPTOR(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Ptr = v_uint64()
self.Size = v_uint32()
self.Reserved = v_uint32()
class IO_DRIVER_CREATE_CONTEXT(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Size = v_uint16()
self._pad0004 = v_bytes(size=2)
self.ExtraCreateParameter = v_ptr32()
self.DeviceObjectHint = v_ptr32()
self.TxnParameters = v_ptr32()
class EJOB(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Event = KEVENT()
self.JobLinks = LIST_ENTRY()
self.ProcessListHead = LIST_ENTRY()
self.JobLock = ERESOURCE()
self.TotalUserTime = LARGE_INTEGER()
self.TotalKernelTime = LARGE_INTEGER()
self.ThisPeriodTotalUserTime = LARGE_INTEGER()
self.ThisPeriodTotalKernelTime = LARGE_INTEGER()
self.TotalPageFaultCount = v_uint32()
self.TotalProcesses = v_uint32()
self.ActiveProcesses = v_uint32()
self.TotalTerminatedProcesses = v_uint32()
self.PerProcessUserTimeLimit = LARGE_INTEGER()
self.PerJobUserTimeLimit = LARGE_INTEGER()
self.MinimumWorkingSetSize = v_uint32()
self.MaximumWorkingSetSize = v_uint32()
self.LimitFlags = v_uint32()
self.ActiveProcessLimit = v_uint32()
self.Affinity = KAFFINITY_EX()
self.PriorityClass = v_uint8()
self._pad00c4 = v_bytes(size=3)
self.AccessState = v_ptr32()
self.UIRestrictionsClass = v_uint32()
self.EndOfJobTimeAction = v_uint32()
self.CompletionPort = v_ptr32()
self.CompletionKey = v_ptr32()
self.SessionId = v_uint32()
self.SchedulingClass = v_uint32()
self.ReadOperationCount = v_uint64()
self.WriteOperationCount = v_uint64()
self.OtherOperationCount = v_uint64()
self.ReadTransferCount = v_uint64()
self.WriteTransferCount = v_uint64()
self.OtherTransferCount = v_uint64()
self.ProcessMemoryLimit = v_uint32()
self.JobMemoryLimit = v_uint32()
self.PeakProcessMemoryUsed = v_uint32()
self.PeakJobMemoryUsed = v_uint32()
self.CurrentJobMemoryUsed = v_uint64()
self.MemoryLimitsLock = EX_PUSH_LOCK()
self.JobSetLinks = LIST_ENTRY()
self.MemberLevel = v_uint32()
self.JobFlags = v_uint32()
class HANDLE_TRACE_DEBUG_INFO(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.RefCount = v_uint32()
self.TableSize = v_uint32()
self.BitMaskFlags = v_uint32()
self.CloseCompactionLock = FAST_MUTEX()
self.CurrentStackIndex = v_uint32()
self.TraceDb = v_uint32()
class KPROCESSOR_STATE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.ContextFrame = CONTEXT()
self.SpecialRegisters = KSPECIAL_REGISTERS()
class KiIoAccessMap(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.DirectionMap = v_bytes(size=32) # FIXME Unknown Array Type
self.IoMap = v_bytes(size=8196) # FIXME Unknown Array Type
class _unnamed_8209(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.SystemContext = v_uint32()
self.Type = v_uint32()
self.State = POWER_STATE()
self.ShutdownType = v_uint32()
class KAPC(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Type = v_uint8()
self.SpareByte0 = v_uint8()
self.Size = v_uint8()
self.SpareByte1 = v_uint8()
self.SpareLong0 = v_uint32()
self.Thread = v_ptr32()
self.ApcListEntry = LIST_ENTRY()
self.KernelRoutine = v_ptr32()
self.RundownRoutine = v_ptr32()
self.NormalRoutine = v_ptr32()
self.NormalContext = v_ptr32()
self.SystemArgument1 = v_ptr32()
self.SystemArgument2 = v_ptr32()
self.ApcStateIndex = v_uint8()
self.ApcMode = v_uint8()
self.Inserted = v_uint8()
class _unnamed_6579(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Bytes = _unnamed_9134()
class RTL_STACK_DATABASE_LOCK(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Lock = RTL_SRWLOCK()
class SID_IDENTIFIER_AUTHORITY(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Value = v_bytes(size=6) # FIXME Unknown Array Type
class XSTATE_FEATURE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Offset = v_uint32()
self.Size = v_uint32()
class WHEA_TIMESTAMP(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Seconds = v_uint64()
class ACTIVATION_CONTEXT(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
class RTL_CRITICAL_SECTION_DEBUG(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Type = v_uint16()
self.CreatorBackTraceIndex = v_uint16()
self.CriticalSection = v_ptr32()
self.ProcessLocksList = LIST_ENTRY()
self.EntryCount = v_uint32()
self.ContentionCount = v_uint32()
self.Flags = v_uint32()
self.CreatorBackTraceIndexHigh = v_uint16()
self.SpareUSHORT = v_uint16()
class DISPATCHER_HEADER(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Type = v_uint8()
self.TimerControlFlags = v_uint8()
self.ThreadControlFlags = v_uint8()
self.TimerMiscFlags = v_uint8()
self.SignalState = v_uint32()
self.WaitListHead = LIST_ENTRY()
class ASSEMBLY_STORAGE_MAP(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
class _unnamed_9134(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.BaseMid = v_uint8()
self.Flags1 = v_uint8()
self.Flags2 = v_uint8()
self.BaseHi = v_uint8()
class PROCESSOR_POWER_STATE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.IdleStates = v_ptr32()
self._pad0008 = v_bytes(size=4)
self.IdleTimeLast = v_uint64()
self.IdleTimeTotal = v_uint64()
self.IdleTimeEntry = v_uint64()
self.IdleAccounting = v_ptr32()
self.Hypervisor = v_uint32()
self.PerfHistoryTotal = v_uint32()
self.ThermalConstraint = v_uint8()
self.PerfHistoryCount = v_uint8()
self.PerfHistorySlot = v_uint8()
self.Reserved = v_uint8()
self.LastSysTime = v_uint32()
self.WmiDispatchPtr = v_uint32()
self.WmiInterfaceEnabled = v_uint32()
self._pad0040 = v_bytes(size=4)
self.FFHThrottleStateInfo = PPM_FFH_THROTTLE_STATE_INFO()
self.PerfActionDpc = KDPC()
self.PerfActionMask = v_uint32()
self._pad0088 = v_bytes(size=4)
self.IdleCheck = PROC_IDLE_SNAP()
self.PerfCheck = PROC_IDLE_SNAP()
self.Domain = v_ptr32()
self.PerfConstraint = v_ptr32()
self.Load = v_ptr32()
self.PerfHistory = v_ptr32()
self.Utility = v_uint32()
self.OverUtilizedHistory = v_uint32()
self.AffinityCount = v_uint32()
self.AffinityHistory = v_uint32()
class _unnamed_8055(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Length = v_uint32()
self.StartSid = v_ptr32()
self.SidList = v_ptr32()
self.SidListLength = v_uint32()
class POWER_SEQUENCE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.SequenceD1 = v_uint32()
self.SequenceD2 = v_uint32()
self.SequenceD3 = v_uint32()
class DPH_HEAP_ROOT(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Signature = v_uint32()
self.HeapFlags = v_uint32()
self.HeapCritSect = v_ptr32()
self.nRemoteLockAcquired = v_uint32()
self.pVirtualStorageListHead = v_ptr32()
self.pVirtualStorageListTail = v_ptr32()
self.nVirtualStorageRanges = v_uint32()
self.nVirtualStorageBytes = v_uint32()
self.BusyNodesTable = RTL_AVL_TABLE()
self.NodeToAllocate = v_ptr32()
self.nBusyAllocations = v_uint32()
self.nBusyAllocationBytesCommitted = v_uint32()
self.pFreeAllocationListHead = v_ptr32()
self.pFreeAllocationListTail = v_ptr32()
self.nFreeAllocations = v_uint32()
self.nFreeAllocationBytesCommitted = v_uint32()
self.AvailableAllocationHead = LIST_ENTRY()
self.nAvailableAllocations = v_uint32()
self.nAvailableAllocationBytesCommitted = v_uint32()
self.pUnusedNodeListHead = v_ptr32()
self.pUnusedNodeListTail = v_ptr32()
self.nUnusedNodes = v_uint32()
self.nBusyAllocationBytesAccessible = v_uint32()
self.pNodePoolListHead = v_ptr32()
self.pNodePoolListTail = v_ptr32()
self.nNodePools = v_uint32()
self.nNodePoolBytes = v_uint32()
self.NextHeap = LIST_ENTRY()
self.ExtraFlags = v_uint32()
self.Seed = v_uint32()
self.NormalHeap = v_ptr32()
self.CreateStackTrace = v_ptr32()
self.FirstThread = v_ptr32()
class JOB_ACCESS_STATE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
class SECURITY_QUALITY_OF_SERVICE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Length = v_uint32()
self.ImpersonationLevel = v_uint32()
self.ContextTrackingMode = v_uint8()
self.EffectiveOnly = v_uint8()
class COMPRESSED_DATA_INFO(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.CompressionFormatAndEngine = v_uint16()
self.CompressionUnitShift = v_uint8()
self.ChunkShift = v_uint8()
self.ClusterShift = v_uint8()
self.Reserved = v_uint8()
self.NumberOfChunks = v_uint16()
self.CompressedChunkSizes = v_bytes(size=4) # FIXME Unknown Array Type
class WHEA_ERROR_RECORD(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Header = WHEA_ERROR_RECORD_HEADER()
self.SectionDescriptor = WHEA_ERROR_RECORD_HEADER()
class PS_PER_CPU_QUOTA_CACHE_AWARE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.SortedListEntry = LIST_ENTRY()
self.IdleOnlyListHead = LIST_ENTRY()
self.CycleBaseAllowance = v_uint64()
self.CyclesRemaining = v_uint64()
self.CurrentGeneration = v_uint32()
class PROC_PERF_CONSTRAINT(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Prcb = v_ptr32()
self.PerfContext = v_uint32()
self.PercentageCap = v_uint32()
self.ThermalCap = v_uint32()
self.TargetFrequency = v_uint32()
self.AcumulatedFullFrequency = v_uint32()
self.AcumulatedZeroFrequency = v_uint32()
self.FrequencyHistoryTotal = v_uint32()
self.AverageFrequency = v_uint32()
class LUID(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.LowPart = v_uint32()
self.HighPart = v_uint32()
class CLIENT_ID(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.UniqueProcess = v_ptr32()
self.UniqueThread = v_ptr32()
class RTL_STACK_TRACE_ENTRY(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.HashChain = RTL_STD_LIST_ENTRY()
self.TraceCount = v_uint16()
self.IndexHigh = v_uint16()
self.Index = v_uint16()
self.Depth = v_uint16()
self.BackTrace = v_bytes(size=128) # FIXME Unknown Array Type
class OBJECT_DUMP_CONTROL(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Stream = v_ptr32()
self.Detail = v_uint32()
class HANDLE_TRACE_DB_ENTRY(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.ClientId = CLIENT_ID()
self.Handle = v_ptr32()
self.Type = v_uint32()
self.StackTrace = v_bytes(size=64) # FIXME Unknown Array Type
class GENERAL_LOOKASIDE_POOL(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.ListHead = SLIST_HEADER()
self.Depth = v_uint16()
self.MaximumDepth = v_uint16()
self.TotalAllocates = v_uint32()
self.AllocateMisses = v_uint32()
self.TotalFrees = v_uint32()
self.FreeMisses = v_uint32()
self.Type = v_uint32()
self.Tag = v_uint32()
self.Size = v_uint32()
self.AllocateEx = v_ptr32()
self.FreeEx = v_ptr32()
self.ListEntry = LIST_ENTRY()
self.LastTotalAllocates = v_uint32()
self.LastAllocateMisses = v_uint32()
self.Future = v_bytes(size=8) # FIXME Unknown Array Type
class HARDWARE_PTE_X86(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Valid = v_uint32()
class RTL_SRWLOCK(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Locked = v_uint32()
class HEAP_TAG_ENTRY(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Allocs = v_uint32()
self.Frees = v_uint32()
self.Size = v_uint32()
self.TagIndex = v_uint16()
self.CreatorBackTraceIndex = v_uint16()
self.TagName = v_bytes(size=48) # FIXME Unknown Array Type
class STRING(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Length = v_uint16()
self.MaximumLength = v_uint16()
self.Buffer = v_ptr32()
class TP_POOL(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
class LIST_ENTRY32(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Flink = v_uint32()
self.Blink = v_uint32()
class SINGLE_LIST_ENTRY(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Next = v_ptr32()
class _unnamed_7812(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Create = _unnamed_7874()
class PPM_FFH_THROTTLE_STATE_INFO(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.EnableLogging = v_uint8()
self._pad0004 = v_bytes(size=3)
self.MismatchCount = v_uint32()
self.Initialized = v_uint8()
self._pad0010 = v_bytes(size=7)
self.LastValue = v_uint64()
self.LastLogTickCount = LARGE_INTEGER()
class KDEVICE_QUEUE_ENTRY(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.DeviceListEntry = LIST_ENTRY()
self.SortKey = v_uint32()
self.Inserted = v_uint8()
class _unnamed_8027(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.SecurityInformation = v_uint32()
self.Length = v_uint32()
class CACHED_KSTACK_LIST(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.SListHead = SLIST_HEADER()
self.MinimumFree = v_uint32()
self.Misses = v_uint32()
self.MissesLast = v_uint32()
self.Pad0 = v_uint32()
class HEAP_FAILURE_INFORMATION(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Version = v_uint32()
self.StructureSize = v_uint32()
self.FailureType = v_uint32()
self.HeapAddress = v_ptr32()
self.Address = v_ptr32()
self.Param1 = v_ptr32()
self.Param2 = v_ptr32()
self.Param3 = v_ptr32()
self.PreviousBlock = v_ptr32()
self.NextBlock = v_ptr32()
self.ExpectedEncodedEntry = HEAP_ENTRY()
self.ExpectedDecodedEntry = HEAP_ENTRY()
self.StackTrace = v_bytes(size=128) # FIXME Unknown Array Type
class _unnamed_9370(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.AsULONG = v_uint32()
class EX_FAST_REF(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Object = v_ptr32()
class INTERLOCK_SEQ(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Depth = v_uint16()
self.FreeEntryOffset = v_uint16()
self.Sequence = v_uint32()
class KSPIN_LOCK_QUEUE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Next = v_ptr32()
self.Lock = v_ptr32()
class WHEA_ERROR_PACKET_FLAGS(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.PreviousError = v_uint32()
class FS_FILTER_CALLBACKS(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.SizeOfFsFilterCallbacks = v_uint32()
self.Reserved = v_uint32()
self.PreAcquireForSectionSynchronization = v_ptr32()
self.PostAcquireForSectionSynchronization = v_ptr32()
self.PreReleaseForSectionSynchronization = v_ptr32()
self.PostReleaseForSectionSynchronization = v_ptr32()
self.PreAcquireForCcFlush = v_ptr32()
self.PostAcquireForCcFlush = v_ptr32()
self.PreReleaseForCcFlush = v_ptr32()
self.PostReleaseForCcFlush = v_ptr32()
self.PreAcquireForModifiedPageWriter = v_ptr32()
self.PostAcquireForModifiedPageWriter = v_ptr32()
self.PreReleaseForModifiedPageWriter = v_ptr32()
self.PostReleaseForModifiedPageWriter = v_ptr32()
class _unnamed_10298(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Length = v_uint32()
self.Alignment = v_uint32()
self.MinimumAddress = LARGE_INTEGER()
self.MaximumAddress = LARGE_INTEGER()
class MM_DRIVER_VERIFIER_DATA(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Level = v_uint32()
self.RaiseIrqls = v_uint32()
self.AcquireSpinLocks = v_uint32()
self.SynchronizeExecutions = v_uint32()
self.AllocationsAttempted = v_uint32()
self.AllocationsSucceeded = v_uint32()
self.AllocationsSucceededSpecialPool = v_uint32()
self.AllocationsWithNoTag = v_uint32()
self.TrimRequests = v_uint32()
self.Trims = v_uint32()
self.AllocationsFailed = v_uint32()
self.AllocationsFailedDeliberately = v_uint32()
self.Loads = v_uint32()
self.Unloads = v_uint32()
self.UnTrackedPool = v_uint32()
self.UserTrims = v_uint32()
self.CurrentPagedPoolAllocations = v_uint32()
self.CurrentNonPagedPoolAllocations = v_uint32()
self.PeakPagedPoolAllocations = v_uint32()
self.PeakNonPagedPoolAllocations = v_uint32()
self.PagedBytes = v_uint32()
self.NonPagedBytes = v_uint32()
self.PeakPagedBytes = v_uint32()
self.PeakNonPagedBytes = v_uint32()
self.BurstAllocationsFailedDeliberately = v_uint32()
self.SessionTrims = v_uint32()
self.OptionChanges = v_uint32()
self.VerifyMode = v_uint32()
self.PreviousBucketName = UNICODE_STRING()
self.ActivityCounter = v_uint32()
self.PreviousActivityCounter = v_uint32()
self.WorkerTrimRequests = v_uint32()
class IO_RESOURCE_DESCRIPTOR(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Option = v_uint8()
self.Type = v_uint8()
self.ShareDisposition = v_uint8()
self.Spare1 = v_uint8()
self.Flags = v_uint16()
self.Spare2 = v_uint16()
self.u = _unnamed_9722()
class EX_PUSH_LOCK_CACHE_AWARE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Locks = v_bytes(size=128) # FIXME Unknown Array Type
class RTL_TRACE_BLOCK(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Magic = v_uint32()
self.Count = v_uint32()
self.Size = v_uint32()
self.UserCount = v_uint32()
self.UserSize = v_uint32()
self.UserContext = v_ptr32()
self.Next = v_ptr32()
self.Trace = v_ptr32()
class IMAGE_OPTIONAL_HEADER(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Magic = v_uint16()
self.MajorLinkerVersion = v_uint8()
self.MinorLinkerVersion = v_uint8()
self.SizeOfCode = v_uint32()
self.SizeOfInitializedData = v_uint32()
self.SizeOfUninitializedData = v_uint32()
self.AddressOfEntryPoint = v_uint32()
self.BaseOfCode = v_uint32()
self.BaseOfData = v_uint32()
self.ImageBase = v_uint32()
self.SectionAlignment = v_uint32()
self.FileAlignment = v_uint32()
self.MajorOperatingSystemVersion = v_uint16()
self.MinorOperatingSystemVersion = v_uint16()
self.MajorImageVersion = v_uint16()
self.MinorImageVersion = v_uint16()
self.MajorSubsystemVersion = v_uint16()
self.MinorSubsystemVersion = v_uint16()
self.Win32VersionValue = v_uint32()
self.SizeOfImage = v_uint32()
self.SizeOfHeaders = v_uint32()
self.CheckSum = v_uint32()
self.Subsystem = v_uint16()
self.DllCharacteristics = v_uint16()
self.SizeOfStackReserve = v_uint32()
self.SizeOfStackCommit = v_uint32()
self.SizeOfHeapReserve = v_uint32()
self.SizeOfHeapCommit = v_uint32()
self.LoaderFlags = v_uint32()
self.NumberOfRvaAndSizes = v_uint32()
self.DataDirectory = v_uint32()
class SCSI_REQUEST_BLOCK(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
class ETHREAD(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Tcb = KTHREAD()
self.CreateTime = LARGE_INTEGER()
self.ExitTime = LARGE_INTEGER()
self.ExitStatus = v_uint32()
self.PostBlockList = LIST_ENTRY()
self.TerminationPort = v_ptr32()
self.ActiveTimerListLock = v_uint32()
self.ActiveTimerListHead = LIST_ENTRY()
self.Cid = CLIENT_ID()
self.KeyedWaitSemaphore = KSEMAPHORE()
self.ClientSecurity = PS_CLIENT_SECURITY_CONTEXT()
self.IrpList = LIST_ENTRY()
self.TopLevelIrp = v_uint32()
self.DeviceToVerify = v_ptr32()
self.CpuQuotaApc = v_ptr32()
self.Win32StartAddress = v_ptr32()
self.LegacyPowerObject = v_ptr32()
self.ThreadListEntry = LIST_ENTRY()
self.RundownProtect = EX_RUNDOWN_REF()
self.ThreadLock = EX_PUSH_LOCK()
self.ReadClusterSize = v_uint32()
self.MmLockOrdering = v_uint32()
self.CrossThreadFlags = v_uint32()
self.SameThreadPassiveFlags = v_uint32()
self.SameThreadApcFlags = v_uint32()
self.CacheManagerActive = v_uint8()
self.DisablePageFaultClustering = v_uint8()
self.ActiveFaultCount = v_uint8()
self.LockOrderState = v_uint8()
self.AlpcMessageId = v_uint32()
self.AlpcMessage = v_ptr32()
self.AlpcWaitListEntry = LIST_ENTRY()
self.CacheManagerCount = v_uint32()
self.IoBoostCount = v_uint32()
self.IrpListLock = v_uint32()
self.ReservedForSynchTracking = v_ptr32()
self.CmCallbackListHead = SINGLE_LIST_ENTRY()
class FAST_MUTEX(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Count = v_uint32()
self.Owner = v_ptr32()
self.Contention = v_uint32()
self.Event = KEVENT()
self.OldIrql = v_uint32()
class WHEA_ERROR_RECORD_HEADER_VALIDBITS(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.PlatformId = v_uint32()
class KDEVICE_QUEUE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Type = v_uint16()
self.Size = v_uint16()
self.DeviceListHead = LIST_ENTRY()
self.Lock = v_uint32()
self.Busy = v_uint8()
class _unnamed_8156(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.DeviceTextType = v_uint32()
self.LocaleId = v_uint32()
class _unnamed_8151(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.IdType = v_uint32()
class IO_SECURITY_CONTEXT(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.SecurityQos = v_ptr32()
self.AccessState = v_ptr32()
self.DesiredAccess = v_uint32()
self.FullCreateOptions = v_uint32()
class TERMINATION_PORT(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Next = v_ptr32()
self.Port = v_ptr32()
class PROC_HISTORY_ENTRY(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Utility = v_uint16()
self.Frequency = v_uint8()
self.Reserved = v_uint8()
class IO_CLIENT_EXTENSION(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.NextExtension = v_ptr32()
self.ClientIdentificationAddress = v_ptr32()
class INITIAL_PRIVILEGE_SET(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.PrivilegeCount = v_uint32()
self.Control = v_uint32()
self.Privilege = v_uint32()
class WHEA_ERROR_RECORD_HEADER_FLAGS(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Recovered = v_uint32()
class XSTATE_CONFIGURATION(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.EnabledFeatures = v_uint64()
self.Size = v_uint32()
self.OptimizedSave = v_uint32()
self.Features = v_uint32()
class KWAIT_BLOCK(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.WaitListEntry = LIST_ENTRY()
self.Thread = v_ptr32()
self.Object = v_ptr32()
self.NextWaitBlock = v_ptr32()
self.WaitKey = v_uint16()
self.WaitType = v_uint8()
self.BlockState = v_uint8()
class ACTIVATION_CONTEXT_DATA(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
class FILE_NETWORK_OPEN_INFORMATION(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.CreationTime = LARGE_INTEGER()
self.LastAccessTime = LARGE_INTEGER()
self.LastWriteTime = LARGE_INTEGER()
self.ChangeTime = LARGE_INTEGER()
self.AllocationSize = LARGE_INTEGER()
self.EndOfFile = LARGE_INTEGER()
self.FileAttributes = v_uint32()
class DESCRIPTOR(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Pad = v_uint16()
self.Limit = v_uint16()
self.Base = v_uint32()
class _unnamed_8022(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.OutputBufferLength = v_uint32()
self.InputBufferLength = v_uint32()
self.IoControlCode = v_uint32()
self.Type3InputBuffer = v_ptr32()
class HEAP_USERDATA_HEADER(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.SFreeListEntry = SINGLE_LIST_ENTRY()
self.Reserved = v_ptr32()
self.SizeIndex = v_uint32()
self.Signature = v_uint32()
class RTL_DRIVE_LETTER_CURDIR(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Flags = v_uint16()
self.Length = v_uint16()
self.TimeStamp = v_uint32()
self.DosPath = STRING()
class CACHE_DESCRIPTOR(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Level = v_uint8()
self.Associativity = v_uint8()
self.LineSize = v_uint16()
self.Size = v_uint32()
self.Type = v_uint32()
class ULARGE_INTEGER(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.LowPart = v_uint32()
self.HighPart = v_uint32()
class TEB_ACTIVE_FRAME(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Flags = v_uint32()
self.Previous = v_ptr32()
self.Context = v_ptr32()
class GENERAL_LOOKASIDE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.ListHead = SLIST_HEADER()
self.Depth = v_uint16()
self.MaximumDepth = v_uint16()
self.TotalAllocates = v_uint32()
self.AllocateMisses = v_uint32()
self.TotalFrees = v_uint32()
self.FreeMisses = v_uint32()
self.Type = v_uint32()
self.Tag = v_uint32()
self.Size = v_uint32()
self.AllocateEx = v_ptr32()
self.FreeEx = v_ptr32()
self.ListEntry = LIST_ENTRY()
self.LastTotalAllocates = v_uint32()
self.LastAllocateMisses = v_uint32()
self.Future = v_bytes(size=8) # FIXME Unknown Array Type
class _unnamed_7775(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.AsynchronousParameters = _unnamed_7790()
class KWAIT_STATUS_REGISTER(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Flags = v_uint8()
class KGDTENTRY(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.LimitLow = v_uint16()
self.BaseLow = v_uint16()
self.HighWord = _unnamed_6579()
class NAMED_PIPE_CREATE_PARAMETERS(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.NamedPipeType = v_uint32()
self.ReadMode = v_uint32()
self.CompletionMode = v_uint32()
self.MaximumInstances = v_uint32()
self.InboundQuota = v_uint32()
self.OutboundQuota = v_uint32()
self.DefaultTimeout = LARGE_INTEGER()
self.TimeoutSpecified = v_uint8()
class NT_TIB(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.ExceptionList = v_ptr32()
self.StackBase = v_ptr32()
self.StackLimit = v_ptr32()
self.SubSystemTib = v_ptr32()
self.FiberData = v_ptr32()
self.ArbitraryUserPointer = v_ptr32()
self.Self = v_ptr32()
class _unnamed_10315(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.MinimumChannel = v_uint32()
self.MaximumChannel = v_uint32()
class RTL_STD_LIST_ENTRY(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Next = v_ptr32()
class POWER_STATE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.SystemState = v_uint32()
class UNICODE_STRING(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Length = v_uint16()
self.MaximumLength = v_uint16()
self.Buffer = v_ptr32()
class HEAP_LIST_LOOKUP(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.ExtendedLookup = v_ptr32()
self.ArraySize = v_uint32()
self.ExtraItem = v_uint32()
self.ItemCount = v_uint32()
self.OutOfRangeItems = v_uint32()
self.BaseIndex = v_uint32()
self.ListHead = v_ptr32()
self.ListsInUseUlong = v_ptr32()
self.ListHints = v_ptr32()
class _unnamed_5755(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.LowPart = v_uint32()
self.HighPart = v_uint32()
class EPROCESS_QUOTA_BLOCK(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
class _unnamed_10318(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Length = v_uint32()
self.MinBusNumber = v_uint32()
self.MaxBusNumber = v_uint32()
self.Reserved = v_uint32()
class HEAP_DEBUGGING_INFORMATION(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.InterceptorFunction = v_ptr32()
self.InterceptorValue = v_uint16()
self._pad0008 = v_bytes(size=2)
self.ExtendedOptions = v_uint32()
self.StackTraceDepth = v_uint32()
self.MinTotalBlockSize = v_uint32()
self.MaxTotalBlockSize = v_uint32()
self.HeapLeakEnumerationRoutine = v_ptr32()
class ACCESS_REASONS(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Data = v_bytes(size=128) # FIXME Unknown Array Type
class STACK_TRACE_DATABASE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Reserved = v_bytes(size=56) # FIXME Unknown Array Type
self.Reserved2 = v_ptr32()
self.PeakHashCollisionListLength = v_uint32()
self.LowerMemoryStart = v_ptr32()
self.PreCommitted = v_uint8()
self.DumpInProgress = v_uint8()
self._pad0048 = v_bytes(size=2)
self.CommitBase = v_ptr32()
self.CurrentLowerCommitLimit = v_ptr32()
self.CurrentUpperCommitLimit = v_ptr32()
self.NextFreeLowerMemory = v_ptr32()
self.NextFreeUpperMemory = v_ptr32()
self.NumberOfEntriesLookedUp = v_uint32()
self.NumberOfEntriesAdded = v_uint32()
self.EntryIndexArray = v_ptr32()
self.NumberOfEntriesAllocated = v_uint32()
self.NumberOfEntriesAvailable = v_uint32()
self.NumberOfAllocationFailures = v_uint32()
self._pad0078 = v_bytes(size=4)
self.FreeLists = v_uint32()
self.NumberOfBuckets = v_uint32()
self.Buckets = v_uint32()
class _unnamed_9139(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.BaseMid = v_uint32()
class KDPC(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Type = v_uint8()
self.Importance = v_uint8()
self.Number = v_uint16()
self.DpcListEntry = LIST_ENTRY()
self.DeferredRoutine = v_ptr32()
self.DeferredContext = v_ptr32()
self.SystemArgument1 = v_ptr32()
self.SystemArgument2 = v_ptr32()
self.DpcData = v_ptr32()
class KEVENT(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Header = DISPATCHER_HEADER()
class KSEMAPHORE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Header = DISPATCHER_HEADER()
self.Limit = v_uint32()
class MM_PAGE_ACCESS_INFO_HEADER(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Link = SINGLE_LIST_ENTRY()
self.Type = v_uint32()
self.EmptySequenceNumber = v_uint32()
self._pad0010 = v_bytes(size=4)
self.CreateTime = v_uint64()
self.EmptyTime = v_uint64()
self.PageEntry = v_ptr32()
self.FileEntry = v_ptr32()
self.FirstFileEntry = v_ptr32()
self.Process = v_ptr32()
self.SessionId = v_uint32()
class OBJECT_TYPE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.TypeList = LIST_ENTRY()
self.Name = UNICODE_STRING()
self.DefaultObject = v_ptr32()
self.Index = v_uint8()
self._pad0018 = v_bytes(size=3)
self.TotalNumberOfObjects = v_uint32()
self.TotalNumberOfHandles = v_uint32()
self.HighWaterNumberOfObjects = v_uint32()
self.HighWaterNumberOfHandles = v_uint32()
self.TypeInfo = OBJECT_TYPE_INITIALIZER()
self.TypeLock = EX_PUSH_LOCK()
self.Key = v_uint32()
self.CallbackList = LIST_ENTRY()
class HANDLE_TABLE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.TableCode = v_uint32()
self.QuotaProcess = v_ptr32()
self.UniqueProcessId = v_ptr32()
self.HandleLock = EX_PUSH_LOCK()
self.HandleTableList = LIST_ENTRY()
self.HandleContentionEvent = EX_PUSH_LOCK()
self.DebugInfo = v_ptr32()
self.ExtraInfoPages = v_uint32()
self.Flags = v_uint32()
self.FirstFreeHandle = v_uint32()
self.LastFreeHandleEntry = v_ptr32()
self.HandleCount = v_uint32()
self.NextHandleNeedingPool = v_uint32()
self.HandleCountHighWatermark = v_uint32()
class MMSUPPORT_FLAGS(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.WorkingSetType = v_uint8()
self.SessionMaster = v_uint8()
self.MemoryPriority = v_uint8()
self.WsleDeleted = v_uint8()
class HEAP_LOCK(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Lock = _unnamed_8680()
class EXCEPTION_REGISTRATION_RECORD(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Next = v_ptr32()
self.Handler = v_ptr32()
class FILE_BASIC_INFORMATION(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.CreationTime = LARGE_INTEGER()
self.LastAccessTime = LARGE_INTEGER()
self.LastWriteTime = LARGE_INTEGER()
self.ChangeTime = LARGE_INTEGER()
self.FileAttributes = v_uint32()
class LIST_ENTRY(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Flink = v_ptr32()
self.Blink = v_ptr32()
class M128A(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Low = v_uint64()
self.High = v_uint64()
class _unnamed_5801(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.LongFunction = v_uint32()
class RTL_DYNAMIC_HASH_TABLE_ENUMERATOR(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.HashEntry = RTL_DYNAMIC_HASH_TABLE_ENTRY()
self.ChainHead = v_ptr32()
self.BucketIndex = v_uint32()
class _unnamed_8069(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Type = v_uint32()
class GUID(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Data1 = v_uint32()
self.Data2 = v_uint16()
self.Data3 = v_uint16()
self.Data4 = v_bytes(size=8) # FIXME Unknown Array Type
class HEAP_UCR_DESCRIPTOR(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.ListEntry = LIST_ENTRY()
self.SegmentEntry = LIST_ENTRY()
self.Address = v_ptr32()
self.Size = v_uint32()
class MCA_EXCEPTION(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.VersionNumber = v_uint32()
self.ExceptionType = v_uint32()
self.TimeStamp = LARGE_INTEGER()
self.ProcessorNumber = v_uint32()
self.Reserved1 = v_uint32()
self.u = _unnamed_9067()
self.ExtCnt = v_uint32()
self.Reserved3 = v_uint32()
self.ExtReg = v_bytes(size=192) # FIXME Unknown Array Type
class PSP_CPU_QUOTA_APC(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
class KAPC_STATE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.ApcListHead = v_bytes(size=192) # FIXME Unknown Array Type
self.Process = v_ptr32()
self.KernelApcInProgress = v_uint8()
self.KernelApcPending = v_uint8()
self.UserApcPending = v_uint8()
class COUNTER_READING(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Type = v_uint32()
self.Index = v_uint32()
self.Start = v_uint64()
self.Total = v_uint64()
class KDPC_DATA(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.DpcListHead = LIST_ENTRY()
self.DpcLock = v_uint32()
self.DpcQueueDepth = v_uint32()
self.DpcCount = v_uint32()
class KIDTENTRY(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Offset = v_uint16()
self.Selector = v_uint16()
self.Access = v_uint16()
self.ExtendedOffset = v_uint16()
class XSAVE_AREA(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.LegacyState = XSAVE_FORMAT()
self.Header = XSAVE_AREA_HEADER()
class GENERIC_MAPPING(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.GenericRead = v_uint32()
self.GenericWrite = v_uint32()
self.GenericExecute = v_uint32()
self.GenericAll = v_uint32()
class IRP(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Type = v_uint16()
self.Size = v_uint16()
self.MdlAddress = v_ptr32()
self.Flags = v_uint32()
self.AssociatedIrp = _unnamed_7772()
self.ThreadListEntry = LIST_ENTRY()
self.IoStatus = IO_STATUS_BLOCK()
self.RequestorMode = v_uint8()
self.PendingReturned = v_uint8()
self.StackCount = v_uint8()
self.CurrentLocation = v_uint8()
self.Cancel = v_uint8()
self.CancelIrql = v_uint8()
self.ApcEnvironment = v_uint8()
self.AllocationFlags = v_uint8()
self.UserIosb = v_ptr32()
self.UserEvent = v_ptr32()
self.Overlay = _unnamed_7775()
self.CancelRoutine = v_ptr32()
self.UserBuffer = v_ptr32()
self.Tail = _unnamed_7778()
class KTHREAD_COUNTERS(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.WaitReasonBitMap = v_uint64()
self.UserData = v_ptr32()
self.Flags = v_uint32()
self.ContextSwitches = v_uint32()
self._pad0018 = v_bytes(size=4)
self.CycleTimeBias = v_uint64()
self.HardwareCounters = v_uint64()
self.HwCounter = v_uint64()
class _unnamed_9767(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Start = LARGE_INTEGER()
self.Length = v_uint32()
class DRIVER_OBJECT(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Type = v_uint16()
self.Size = v_uint16()
self.DeviceObject = v_ptr32()
self.Flags = v_uint32()
self.DriverStart = v_ptr32()
self.DriverSize = v_uint32()
self.DriverSection = v_ptr32()
self.DriverExtension = v_ptr32()
self.DriverName = UNICODE_STRING()
self.HardwareDatabase = v_ptr32()
self.FastIoDispatch = v_ptr32()
self.DriverInit = v_ptr32()
self.DriverStartIo = v_ptr32()
self.DriverUnload = v_ptr32()
self.MajorFunction = v_bytes(size=112) # FIXME Unknown Array Type
class FILE_GET_QUOTA_INFORMATION(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.NextEntryOffset = v_uint32()
self.SidLength = v_uint32()
self.Sid = SID()
class KGATE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Header = DISPATCHER_HEADER()
class IO_COMPLETION_CONTEXT(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Port = v_ptr32()
self.Key = v_ptr32()
class DRIVER_EXTENSION(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.DriverObject = v_ptr32()
self.AddDevice = v_ptr32()
self.Count = v_uint32()
self.ServiceKeyName = UNICODE_STRING()
self.ClientDriverExtension = v_ptr32()
self.FsFilterCallbacks = v_ptr32()
class TP_NBQ_GUARD(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.GuardLinks = LIST_ENTRY()
self.Guards = v_bytes(size=8) # FIXME Unknown Array Type
class flags(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Removable = v_uint8()
class MM_AVL_TABLE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.BalancedRoot = MMADDRESS_NODE()
self.DepthOfTree = v_uint32()
self.NodeHint = v_ptr32()
self.NodeFreeHint = v_ptr32()
class WHEA_PERSISTENCE_INFO(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Signature = v_uint64()
class _unnamed_9547(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Generic = _unnamed_9767()
class WHEA_ERROR_RECORD_SECTION_DESCRIPTOR_VALIDBITS(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.FRUId = v_uint8()
class EXCEPTION_RECORD(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.ExceptionCode = v_uint32()
self.ExceptionFlags = v_uint32()
self.ExceptionRecord = v_ptr32()
self.ExceptionAddress = v_ptr32()
self.NumberParameters = v_uint32()
self.ExceptionInformation = v_bytes(size=60) # FIXME Unknown Array Type
class PROCESSOR_NUMBER(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Group = v_uint16()
self.Number = v_uint8()
self.Reserved = v_uint8()
class MM_PAGE_ACCESS_INFO(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Flags = MM_PAGE_ACCESS_INFO_FLAGS()
self.PointerProtoPte = v_ptr32()
class _unnamed_7772(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.MasterIrp = v_ptr32()
class KPCR(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.NtTib = NT_TIB()
self.SelfPcr = v_ptr32()
self.Prcb = v_ptr32()
self.Irql = v_uint8()
self._pad0028 = v_bytes(size=3)
self.IRR = v_uint32()
self.IrrActive = v_uint32()
self.IDR = v_uint32()
self.KdVersionBlock = v_ptr32()
self.IDT = v_ptr32()
self.GDT = v_ptr32()
self.TSS = v_ptr32()
self.MajorVersion = v_uint16()
self.MinorVersion = v_uint16()
self.SetMember = v_uint32()
self.StallScaleFactor = v_uint32()
self.SpareUnused = v_uint8()
self.Number = v_uint8()
self.Spare0 = v_uint8()
self.SecondLevelCacheAssociativity = v_uint8()
self.VdmAlert = v_uint32()
self.KernelReserved = v_bytes(size=56) # FIXME Unknown Array Type
self.SecondLevelCacheSize = v_uint32()
self.HalReserved = v_bytes(size=64) # FIXME Unknown Array Type
self.InterruptMode = v_uint32()
self.Spare1 = v_uint8()
self._pad00dc = v_bytes(size=3)
self.KernelReserved2 = v_bytes(size=68) # FIXME Unknown Array Type
self.PrcbData = KPRCB()
class IMAGE_FILE_HEADER(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Machine = v_uint16()
self.NumberOfSections = v_uint16()
self.TimeDateStamp = v_uint32()
self.PointerToSymbolTable = v_uint32()
self.NumberOfSymbols = v_uint32()
self.SizeOfOptionalHeader = v_uint16()
self.Characteristics = v_uint16()
class LFH_BLOCK_ZONE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.ListEntry = LIST_ENTRY()
self.FreePointer = v_ptr32()
self.Limit = v_ptr32()
class _unnamed_7778(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Overlay = _unnamed_7851()
class FILE_STANDARD_INFORMATION(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.AllocationSize = LARGE_INTEGER()
self.EndOfFile = LARGE_INTEGER()
self.NumberOfLinks = v_uint32()
self.DeletePending = v_uint8()
self.Directory = v_uint8()
class LFH_HEAP(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Lock = RTL_CRITICAL_SECTION()
self.SubSegmentZones = LIST_ENTRY()
self.ZoneBlockSize = v_uint32()
self.Heap = v_ptr32()
self.SegmentChange = v_uint32()
self.SegmentCreate = v_uint32()
self.SegmentInsertInFree = v_uint32()
self.SegmentDelete = v_uint32()
self.CacheAllocs = v_uint32()
self.CacheFrees = v_uint32()
self.SizeInCache = v_uint32()
self._pad0048 = v_bytes(size=4)
self.RunInfo = HEAP_BUCKET_RUN_INFO()
self.UserBlockCache = HEAP_BUCKET_RUN_INFO()
self.Buckets = HEAP_BUCKET_RUN_INFO()
self.LocalData = HEAP_BUCKET_RUN_INFO()
class _unnamed_7982(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Length = v_uint32()
self.FileInformationClass = v_uint32()
self.FileObject = v_ptr32()
self.ReplaceIfExists = v_uint8()
self.AdvanceOnly = v_uint8()
class HEAP_BUCKET_RUN_INFO(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Bucket = v_uint32()
self.RunLength = v_uint32()
class PEB_LDR_DATA(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Length = v_uint32()
self.Initialized = v_uint8()
self._pad0008 = v_bytes(size=3)
self.SsHandle = v_ptr32()
self.InLoadOrderModuleList = LIST_ENTRY()
self.InMemoryOrderModuleList = LIST_ENTRY()
self.InInitializationOrderModuleList = LIST_ENTRY()
self.EntryInProgress = v_ptr32()
self.ShutdownInProgress = v_uint8()
self._pad002c = v_bytes(size=3)
self.ShutdownThreadId = v_ptr32()
class _unnamed_5768(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.LowPart = v_uint32()
self.HighPart = v_uint32()
class HEAP_ENTRY(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Size = v_uint16()
self.Flags = v_uint8()
self.SmallTagIndex = v_uint8()
self.PreviousSize = v_uint16()
self.SegmentOffset = v_uint8()
self.UnusedBytes = v_uint8()
class MM_PAGE_ACCESS_INFO_FLAGS(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.File = _unnamed_8980()
class SECURITY_SUBJECT_CONTEXT(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.ClientToken = v_ptr32()
self.ImpersonationLevel = v_uint32()
self.PrimaryToken = v_ptr32()
self.ProcessAuditId = v_ptr32()
class _unnamed_7979(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Length = v_uint32()
self.FileInformationClass = v_uint32()
class _unnamed_10323(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Priority = v_uint32()
self.Reserved1 = v_uint32()
self.Reserved2 = v_uint32()
class _unnamed_10327(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Length40 = v_uint32()
self.Alignment40 = v_uint32()
self.MinimumAddress = LARGE_INTEGER()
self.MaximumAddress = LARGE_INTEGER()
class _unnamed_7976(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Length = v_uint32()
self.CompletionFilter = v_uint32()
class _unnamed_7970(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Length = v_uint32()
self.FileName = v_ptr32()
self.FileInformationClass = v_uint32()
self.FileIndex = v_uint32()
class _unnamed_7874(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.SecurityContext = v_ptr32()
self.Options = v_uint32()
self.FileAttributes = v_uint16()
self.ShareAccess = v_uint16()
self.EaLength = v_uint32()
class INTERFACE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Size = v_uint16()
self.Version = v_uint16()
self.Context = v_ptr32()
self.InterfaceReference = v_ptr32()
self.InterfaceDereference = v_ptr32()
class SLIST_HEADER(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Alignment = v_uint64()
class _unnamed_5798(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Flags = v_uint32()
class IMAGE_DATA_DIRECTORY(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.VirtualAddress = v_uint32()
self.Size = v_uint32()
class FILE_OBJECT(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Type = v_uint16()
self.Size = v_uint16()
self.DeviceObject = v_ptr32()
self.Vpb = v_ptr32()
self.FsContext = v_ptr32()
self.FsContext2 = v_ptr32()
self.SectionObjectPointer = v_ptr32()
self.PrivateCacheMap = v_ptr32()
self.FinalStatus = v_uint32()
self.RelatedFileObject = v_ptr32()
self.LockOperation = v_uint8()
self.DeletePending = v_uint8()
self.ReadAccess = v_uint8()
self.WriteAccess = v_uint8()
self.DeleteAccess = v_uint8()
self.SharedRead = v_uint8()
self.SharedWrite = v_uint8()
self.SharedDelete = v_uint8()
self.Flags = v_uint32()
self.FileName = UNICODE_STRING()
self.CurrentByteOffset = LARGE_INTEGER()
self.Waiters = v_uint32()
self.Busy = v_uint32()
self.LastLock = v_ptr32()
self.Lock = KEVENT()
self.Event = KEVENT()
self.CompletionContext = v_ptr32()
self.IrpListLock = v_uint32()
self.IrpList = LIST_ENTRY()
self.FileObjectExtension = v_ptr32()
class PPM_IDLE_STATES(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Count = v_uint32()
self.Flags = _unnamed_9370()
self.TargetState = v_uint32()
self.ActualState = v_uint32()
self.OldState = v_uint32()
self.NewlyUnparked = v_uint8()
self._pad0018 = v_bytes(size=3)
self.TargetProcessors = KAFFINITY_EX()
self.State = KAFFINITY_EX()
class _unnamed_8142(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Lock = v_uint8()
class HEAP_SUBSEGMENT(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.LocalInfo = v_ptr32()
self.UserBlocks = v_ptr32()
self.AggregateExchg = INTERLOCK_SEQ()
self.BlockSize = v_uint16()
self.Flags = v_uint16()
self.BlockCount = v_uint16()
self.SizeIndex = v_uint8()
self.AffinityIndex = v_uint8()
self.SFreeListEntry = SINGLE_LIST_ENTRY()
self.Lock = v_uint32()
class ERESOURCE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.SystemResourcesList = LIST_ENTRY()
self.OwnerTable = v_ptr32()
self.ActiveCount = v_uint16()
self.Flag = v_uint16()
self.SharedWaiters = v_ptr32()
self.ExclusiveWaiters = v_ptr32()
self.OwnerEntry = OWNER_ENTRY()
self.ActiveEntries = v_uint32()
self.ContentionCount = v_uint32()
self.NumberOfSharedWaiters = v_uint32()
self.NumberOfExclusiveWaiters = v_uint32()
self.Address = v_ptr32()
self.SpinLock = v_uint32()
class _unnamed_8220(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.AllocatedResources = v_ptr32()
self.AllocatedResourcesTranslated = v_ptr32()
class _unnamed_8224(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.ProviderId = v_uint32()
self.DataPath = v_ptr32()
self.BufferSize = v_uint32()
self.Buffer = v_ptr32()
class _unnamed_8229(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Argument1 = v_ptr32()
self.Argument2 = v_ptr32()
self.Argument3 = v_ptr32()
self.Argument4 = v_ptr32()
class PEB(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.InheritedAddressSpace = v_uint8()
self.ReadImageFileExecOptions = v_uint8()
self.BeingDebugged = v_uint8()
self.BitField = v_uint8()
self.Mutant = v_ptr32()
self.ImageBaseAddress = v_ptr32()
self.Ldr = v_ptr32()
self.ProcessParameters = v_ptr32()
self.SubSystemData = v_ptr32()
self.ProcessHeap = v_ptr32()
self.FastPebLock = v_ptr32()
self.AtlThunkSListPtr = v_ptr32()
self.IFEOKey = v_ptr32()
self.CrossProcessFlags = v_uint32()
self.KernelCallbackTable = v_ptr32()
self.SystemReserved = v_bytes(size=4) # FIXME Unknown Array Type
self.AtlThunkSListPtr32 = v_uint32()
self.ApiSetMap = v_ptr32()
self.TlsExpansionCounter = v_uint32()
self.TlsBitmap = v_ptr32()
self.TlsBitmapBits = v_bytes(size=8) # FIXME Unknown Array Type
self.ReadOnlySharedMemoryBase = v_ptr32()
self.HotpatchInformation = v_ptr32()
self.ReadOnlyStaticServerData = v_ptr32()
self.AnsiCodePageData = v_ptr32()
self.OemCodePageData = v_ptr32()
self.UnicodeCaseTableData = v_ptr32()
self.NumberOfProcessors = v_uint32()
self.NtGlobalFlag = v_uint32()
self._pad0070 = v_bytes(size=4)
self.CriticalSectionTimeout = LARGE_INTEGER()
self.HeapSegmentReserve = v_uint32()
self.HeapSegmentCommit = v_uint32()
self.HeapDeCommitTotalFreeThreshold = v_uint32()
self.HeapDeCommitFreeBlockThreshold = v_uint32()
self.NumberOfHeaps = v_uint32()
self.MaximumNumberOfHeaps = v_uint32()
self.ProcessHeaps = v_ptr32()
self.GdiSharedHandleTable = v_ptr32()
self.ProcessStarterHelper = v_ptr32()
self.GdiDCAttributeList = v_uint32()
self.LoaderLock = v_ptr32()
self.OSMajorVersion = v_uint32()
self.OSMinorVersion = v_uint32()
self.OSBuildNumber = v_uint16()
self.OSCSDVersion = v_uint16()
self.OSPlatformId = v_uint32()
self.ImageSubsystem = v_uint32()
self.ImageSubsystemMajorVersion = v_uint32()
self.ImageSubsystemMinorVersion = v_uint32()
self.ActiveProcessAffinityMask = v_uint32()
self.GdiHandleBuffer = v_bytes(size=136) # FIXME Unknown Array Type
self.PostProcessInitRoutine = v_ptr32()
self.TlsExpansionBitmap = v_ptr32()
self.TlsExpansionBitmapBits = v_bytes(size=128) # FIXME Unknown Array Type
self.SessionId = v_uint32()
self.AppCompatFlags = ULARGE_INTEGER()
self.AppCompatFlagsUser = ULARGE_INTEGER()
self.pShimData = v_ptr32()
self.AppCompatInfo = v_ptr32()
self.CSDVersion = UNICODE_STRING()
self.ActivationContextData = v_ptr32()
self.ProcessAssemblyStorageMap = v_ptr32()
self.SystemDefaultActivationContextData = v_ptr32()
self.SystemAssemblyStorageMap = v_ptr32()
self.MinimumStackCommit = v_uint32()
self.FlsCallback = v_ptr32()
self.FlsListHead = LIST_ENTRY()
self.FlsBitmap = v_ptr32()
self.FlsBitmapBits = v_bytes(size=16) # FIXME Unknown Array Type
self.FlsHighIndex = v_uint32()
self.WerRegistrationData = v_ptr32()
self.WerShipAssertPtr = v_ptr32()
self.pContextData = v_ptr32()
self.pImageHeaderHash = v_ptr32()
self.TracingFlags = v_uint32()
class TP_TASK_CALLBACKS(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.ExecuteCallback = v_ptr32()
self.Unposted = v_ptr32()
class RTL_BALANCED_LINKS(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Parent = v_ptr32()
self.LeftChild = v_ptr32()
self.RightChild = v_ptr32()
self.Balance = v_uint8()
self.Reserved = v_bytes(size=3) # FIXME Unknown Array Type
class _unnamed_9722(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Port = _unnamed_10298()
class EX_PUSH_LOCK(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Locked = v_uint32()
class XSTATE_CONTEXT(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Mask = v_uint64()
self.Length = v_uint32()
self.Reserved1 = v_uint32()
self.Area = v_ptr32()
self.Reserved2 = v_uint32()
self.Buffer = v_ptr32()
self.Reserved3 = v_uint32()
class HEAP_FREE_ENTRY(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Size = v_uint16()
self.Flags = v_uint8()
self.SmallTagIndex = v_uint8()
self.PreviousSize = v_uint16()
self.SegmentOffset = v_uint8()
self.UnusedBytes = v_uint8()
self.FreeList = LIST_ENTRY()
class KSTACK_COUNT(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Value = v_uint32()
class _unnamed_8030(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.SecurityInformation = v_uint32()
self.SecurityDescriptor = v_ptr32()
class MDL(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Next = v_ptr32()
self.Size = v_uint16()
self.MdlFlags = v_uint16()
self.Process = v_ptr32()
self.MappedSystemVa = v_ptr32()
self.StartVa = v_ptr32()
self.ByteCount = v_uint32()
self.ByteOffset = v_uint32()
class _unnamed_8134(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.IoResourceRequirementList = v_ptr32()
class _unnamed_8137(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.WhichSpace = v_uint32()
self.Buffer = v_ptr32()
self.Offset = v_uint32()
self.Length = v_uint32()
class HEAP_SEGMENT(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Entry = HEAP_ENTRY()
self.SegmentSignature = v_uint32()
self.SegmentFlags = v_uint32()
self.SegmentListEntry = LIST_ENTRY()
self.Heap = v_ptr32()
self.BaseAddress = v_ptr32()
self.NumberOfPages = v_uint32()
self.FirstEntry = v_ptr32()
self.LastValidEntry = v_ptr32()
self.NumberOfUnCommittedPages = v_uint32()
self.NumberOfUnCommittedRanges = v_uint32()
self.SegmentAllocatorBackTraceIndex = v_uint16()
self.Reserved = v_uint16()
self.UCRSegmentList = LIST_ENTRY()
class WHEA_ERROR_RECORD_HEADER(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Signature = v_uint32()
self.Revision = WHEA_REVISION()
self.SignatureEnd = v_uint32()
self.SectionCount = v_uint16()
self.Severity = v_uint32()
self.ValidBits = WHEA_ERROR_RECORD_HEADER_VALIDBITS()
self.Length = v_uint32()
self.Timestamp = WHEA_TIMESTAMP()
self.PlatformId = GUID()
self.PartitionId = GUID()
self.CreatorId = GUID()
self.NotifyType = GUID()
self.RecordId = v_uint64()
self.Flags = WHEA_ERROR_RECORD_HEADER_FLAGS()
self.PersistenceInfo = WHEA_PERSISTENCE_INFO()
self.Reserved = v_bytes(size=12) # FIXME Unknown Array Type
class EVENT_DESCRIPTOR(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Id = v_uint16()
self.Version = v_uint8()
self.Channel = v_uint8()
self.Level = v_uint8()
self.Opcode = v_uint8()
self.Task = v_uint16()
self.Keyword = v_uint64()
class _unnamed_9855(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.ResourceToRelease = v_ptr32()
class _unnamed_9854(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.EndingOffset = v_ptr32()
self.ResourceToRelease = v_ptr32()
class _unnamed_9857(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.NotificationType = v_uint32()
self.SafeToRecurse = v_uint8()
class MMSUPPORT(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.WorkingSetMutex = EX_PUSH_LOCK()
self.ExitGate = v_ptr32()
self.AccessLog = v_ptr32()
self.WorkingSetExpansionLinks = LIST_ENTRY()
self.AgeDistribution = v_bytes(size=28) # FIXME Unknown Array Type
self.MinimumWorkingSetSize = v_uint32()
self.WorkingSetSize = v_uint32()
self.WorkingSetPrivateSize = v_uint32()
self.MaximumWorkingSetSize = v_uint32()
self.ChargedWslePages = v_uint32()
self.ActualWslePages = v_uint32()
self.WorkingSetSizeOverhead = v_uint32()
self.PeakWorkingSetSize = v_uint32()
self.HardFaultCount = v_uint32()
self.VmWorkingSetList = v_ptr32()
self.NextPageColor = v_uint16()
self.LastTrimStamp = v_uint16()
self.PageFaultCount = v_uint32()
self.RepurposeCount = v_uint32()
self.Spare = v_bytes(size=4) # FIXME Unknown Array Type
self.Flags = MMSUPPORT_FLAGS()
class _unnamed_9858(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Argument1 = v_ptr32()
self.Argument2 = v_ptr32()
self.Argument3 = v_ptr32()
self.Argument4 = v_ptr32()
self.Argument5 = v_ptr32()
class FLS_CALLBACK_INFO(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
class ACL(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.AclRevision = v_uint8()
self.Sbz1 = v_uint8()
self.AclSize = v_uint16()
self.AceCount = v_uint16()
self.Sbz2 = v_uint16()
class LIST_ENTRY64(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Flink = v_uint64()
self.Blink = v_uint64()
class WAIT_CONTEXT_BLOCK(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.WaitQueueEntry = KDEVICE_QUEUE_ENTRY()
self.DeviceRoutine = v_ptr32()
self.DeviceContext = v_ptr32()
self.NumberOfMapRegisters = v_uint32()
self.DeviceObject = v_ptr32()
self.CurrentIrp = v_ptr32()
self.BufferChainingDpc = v_ptr32()
class SE_AUDIT_PROCESS_CREATION_INFO(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.ImageFileName = v_ptr32()
class ACTIVATION_CONTEXT_STACK(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.ActiveFrame = v_ptr32()
self.FrameListCache = LIST_ENTRY()
self.Flags = v_uint32()
self.NextCookieSequenceNumber = v_uint32()
self.StackId = v_uint32()
class LDR_DATA_TABLE_ENTRY(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.InLoadOrderLinks = LIST_ENTRY()
self.InMemoryOrderLinks = LIST_ENTRY()
self.InInitializationOrderLinks = LIST_ENTRY()
self.DllBase = v_ptr32()
self.EntryPoint = v_ptr32()
self.SizeOfImage = v_uint32()
self.FullDllName = UNICODE_STRING()
self.BaseDllName = UNICODE_STRING()
self.Flags = v_uint32()
self.LoadCount = v_uint16()
self.TlsIndex = v_uint16()
self.HashLinks = LIST_ENTRY()
self.TimeDateStamp = v_uint32()
self.EntryPointActivationContext = v_ptr32()
self.PatchInformation = v_ptr32()
self.ForwarderLinks = LIST_ENTRY()
self.ServiceTagLinks = LIST_ENTRY()
self.StaticLinks = LIST_ENTRY()
self.ContextInformation = v_ptr32()
self.OriginalBase = v_uint32()
self.LoadTime = LARGE_INTEGER()
class LOOKASIDE_LIST_EX(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.L = GENERAL_LOOKASIDE_POOL()
class TEB(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.NtTib = NT_TIB()
self.EnvironmentPointer = v_ptr32()
self.ClientId = CLIENT_ID()
self.ActiveRpcHandle = v_ptr32()
self.ThreadLocalStoragePointer = v_ptr32()
self.ProcessEnvironmentBlock = v_ptr32()
self.LastErrorValue = v_uint32()
self.CountOfOwnedCriticalSections = v_uint32()
self.CsrClientThread = v_ptr32()
self.Win32ThreadInfo = v_ptr32()
self.User32Reserved = v_bytes(size=104) # FIXME Unknown Array Type
self.UserReserved = v_bytes(size=20) # FIXME Unknown Array Type
self.WOW32Reserved = v_ptr32()
self.CurrentLocale = v_uint32()
self.FpSoftwareStatusRegister = v_uint32()
self.SystemReserved1 = v_bytes(size=216) # FIXME Unknown Array Type
self.ExceptionCode = v_uint32()
self.ActivationContextStackPointer = v_ptr32()
self.SpareBytes = v_bytes(size=36) # FIXME Unknown Array Type
self.TxFsContext = v_uint32()
self.GdiTebBatch = GDI_TEB_BATCH()
self.RealClientId = CLIENT_ID()
self.GdiCachedProcessHandle = v_ptr32()
self.GdiClientPID = v_uint32()
self.GdiClientTID = v_uint32()
self.GdiThreadLocalInfo = v_ptr32()
self.Win32ClientInfo = v_bytes(size=248) # FIXME Unknown Array Type
self.glDispatchTable = v_bytes(size=932) # FIXME Unknown Array Type
self.glReserved1 = v_bytes(size=116) # FIXME Unknown Array Type
self.glReserved2 = v_ptr32()
self.glSectionInfo = v_ptr32()
self.glSection = v_ptr32()
self.glTable = v_ptr32()
self.glCurrentRC = v_ptr32()
self.glContext = v_ptr32()
self.LastStatusValue = v_uint32()
self.StaticUnicodeString = UNICODE_STRING()
self.StaticUnicodeBuffer = v_bytes(size=522) # FIXME Unknown Array Type
self._pad0e0c = v_bytes(size=2)
self.DeallocationStack = v_ptr32()
self.TlsSlots = v_bytes(size=256) # FIXME Unknown Array Type
self.TlsLinks = LIST_ENTRY()
self.Vdm = v_ptr32()
self.ReservedForNtRpc = v_ptr32()
self.DbgSsReserved = v_bytes(size=8) # FIXME Unknown Array Type
self.HardErrorMode = v_uint32()
self.Instrumentation = v_bytes(size=36) # FIXME Unknown Array Type
self.ActivityId = GUID()
self.SubProcessTag = v_ptr32()
self.EtwLocalData = v_ptr32()
self.EtwTraceData = v_ptr32()
self.WinSockData = v_ptr32()
self.GdiBatchCount = v_uint32()
self.CurrentIdealProcessor = PROCESSOR_NUMBER()
self.GuaranteedStackBytes = v_uint32()
self.ReservedForPerf = v_ptr32()
self.ReservedForOle = v_ptr32()
self.WaitingOnLoaderLock = v_uint32()
self.SavedPriorityState = v_ptr32()
self.SoftPatchPtr1 = v_uint32()
self.ThreadPoolData = v_ptr32()
self.TlsExpansionSlots = v_ptr32()
self.MuiGeneration = v_uint32()
self.IsImpersonating = v_uint32()
self.NlsCache = v_ptr32()
self.pShimData = v_ptr32()
self.HeapVirtualAffinity = v_uint32()
self.CurrentTransactionHandle = v_ptr32()
self.ActiveFrame = v_ptr32()
self.FlsData = v_ptr32()
self.PreferredLanguages = v_ptr32()
self.UserPrefLanguages = v_ptr32()
self.MergedPrefLanguages = v_ptr32()
self.MuiImpersonation = v_uint32()
self.CrossTebFlags = v_uint16()
self.SameTebFlags = v_uint16()
self.TxnScopeEnterCallback = v_ptr32()
self.TxnScopeExitCallback = v_ptr32()
self.TxnScopeContext = v_ptr32()
self.LockCount = v_uint32()
self.SpareUlong0 = v_uint32()
self.ResourceRetValue = v_ptr32()
class EX_RUNDOWN_REF(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Count = v_uint32()
class XSAVE_FORMAT(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.ControlWord = v_uint16()
self.StatusWord = v_uint16()
self.TagWord = v_uint8()
self.Reserved1 = v_uint8()
self.ErrorOpcode = v_uint16()
self.ErrorOffset = v_uint32()
self.ErrorSelector = v_uint16()
self.Reserved2 = v_uint16()
self.DataOffset = v_uint32()
self.DataSelector = v_uint16()
self.Reserved3 = v_uint16()
self.MxCsr = v_uint32()
self.MxCsr_Mask = v_uint32()
self.FloatRegisters = v_uint32()
self.XmmRegisters = v_uint32()
self.Reserved4 = v_bytes(size=192) # FIXME Unknown Array Type
self.StackControl = v_bytes(size=28) # FIXME Unknown Array Type
self.Cr0NpxState = v_uint32()
class PO_DIAG_STACK_RECORD(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.StackDepth = v_uint32()
self.Stack = v_bytes(size=4) # FIXME Unknown Array Type
class IMAGE_DOS_HEADER(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.e_magic = v_uint16()
self.e_cblp = v_uint16()
self.e_cp = v_uint16()
self.e_crlc = v_uint16()
self.e_cparhdr = v_uint16()
self.e_minalloc = v_uint16()
self.e_maxalloc = v_uint16()
self.e_ss = v_uint16()
self.e_sp = v_uint16()
self.e_csum = v_uint16()
self.e_ip = v_uint16()
self.e_cs = v_uint16()
self.e_lfarlc = v_uint16()
self.e_ovno = v_uint16()
self.e_res = v_bytes(size=8) # FIXME Unknown Array Type
self.e_oemid = v_uint16()
self.e_oeminfo = v_uint16()
self.e_res2 = v_bytes(size=20) # FIXME Unknown Array Type
self.e_lfanew = v_uint32()
class RTL_DYNAMIC_HASH_TABLE_ENTRY(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Linkage = LIST_ENTRY()
self.Signature = v_uint32()
class MMADDRESS_NODE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.u1 = _unnamed_9563()
self.LeftChild = v_ptr32()
self.RightChild = v_ptr32()
self.StartingVpn = v_uint32()
self.EndingVpn = v_uint32()
class _unnamed_7372(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.ListEntry = LIST_ENTRY()
class TXN_PARAMETER_BLOCK(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Length = v_uint16()
self.TxFsContext = v_uint16()
self.TransactionObject = v_ptr32()
class _unnamed_8980(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.FilePointerIndex = v_uint32()
class _unnamed_8981(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.FilePointerIndex = v_uint32()
class QUAD(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.UseThisFieldToCopy = v_uint64()
class HEAP_TUNING_PARAMETERS(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.CommittThresholdShift = v_uint32()
self.MaxPreCommittThreshold = v_uint32()
class KPRCB(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.MinorVersion = v_uint16()
self.MajorVersion = v_uint16()
self.CurrentThread = v_ptr32()
self.NextThread = v_ptr32()
self.IdleThread = v_ptr32()
self.LegacyNumber = v_uint8()
self.NestingLevel = v_uint8()
self.BuildType = v_uint16()
self.CpuType = v_uint8()
self.CpuID = v_uint8()
self.CpuStep = v_uint16()
self.ProcessorState = KPROCESSOR_STATE()
self.KernelReserved = v_bytes(size=64) # FIXME Unknown Array Type
self.HalReserved = v_bytes(size=64) # FIXME Unknown Array Type
self.CFlushSize = v_uint32()
self.CoresPerPhysicalProcessor = v_uint8()
self.LogicalProcessorsPerCore = v_uint8()
self.PrcbPad0 = v_bytes(size=2) # FIXME Unknown Array Type
self.MHz = v_uint32()
self.CpuVendor = v_uint8()
self.GroupIndex = v_uint8()
self.Group = v_uint16()
self.GroupSetMember = v_uint32()
self.Number = v_uint32()
self.PrcbPad1 = v_bytes(size=72) # FIXME Unknown Array Type
self.LockQueue = v_bytes(size=72) # FIXME Unknown Array Type
self.NpxThread = v_ptr32()
self.InterruptCount = v_uint32()
self.KernelTime = v_uint32()
self.UserTime = v_uint32()
self.DpcTime = v_uint32()
self.DpcTimeCount = v_uint32()
self.InterruptTime = v_uint32()
self.AdjustDpcThreshold = v_uint32()
self.PageColor = v_uint32()
self.DebuggerSavedIRQL = v_uint8()
self.NodeColor = v_uint8()
self.PrcbPad20 = v_bytes(size=2) # FIXME Unknown Array Type
self.NodeShiftedColor = v_uint32()
self.ParentNode = v_ptr32()
self.SecondaryColorMask = v_uint32()
self.DpcTimeLimit = v_uint32()
self.PrcbPad21 = v_bytes(size=8) # FIXME Unknown Array Type
self.CcFastReadNoWait = v_uint32()
self.CcFastReadWait = v_uint32()
self.CcFastReadNotPossible = v_uint32()
self.CcCopyReadNoWait = v_uint32()
self.CcCopyReadWait = v_uint32()
self.CcCopyReadNoWaitMiss = v_uint32()
self.MmSpinLockOrdering = v_uint32()
self.IoReadOperationCount = v_uint32()
self.IoWriteOperationCount = v_uint32()
self.IoOtherOperationCount = v_uint32()
self.IoReadTransferCount = LARGE_INTEGER()
self.IoWriteTransferCount = LARGE_INTEGER()
self.IoOtherTransferCount = LARGE_INTEGER()
self.CcFastMdlReadNoWait = v_uint32()
self.CcFastMdlReadWait = v_uint32()
self.CcFastMdlReadNotPossible = v_uint32()
self.CcMapDataNoWait = v_uint32()
self.CcMapDataWait = v_uint32()
self.CcPinMappedDataCount = v_uint32()
self.CcPinReadNoWait = v_uint32()
self.CcPinReadWait = v_uint32()
self.CcMdlReadNoWait = v_uint32()
self.CcMdlReadWait = v_uint32()
self.CcLazyWriteHotSpots = v_uint32()
self.CcLazyWriteIos = v_uint32()
self.CcLazyWritePages = v_uint32()
self.CcDataFlushes = v_uint32()
self.CcDataPages = v_uint32()
self.CcLostDelayedWrites = v_uint32()
self.CcFastReadResourceMiss = v_uint32()
self.CcCopyReadWaitMiss = v_uint32()
self.CcFastMdlReadResourceMiss = v_uint32()
self.CcMapDataNoWaitMiss = v_uint32()
self.CcMapDataWaitMiss = v_uint32()
self.CcPinReadNoWaitMiss = v_uint32()
self.CcPinReadWaitMiss = v_uint32()
self.CcMdlReadNoWaitMiss = v_uint32()
self.CcMdlReadWaitMiss = v_uint32()
self.CcReadAheadIos = v_uint32()
self.KeAlignmentFixupCount = v_uint32()
self.KeExceptionDispatchCount = v_uint32()
self.KeSystemCalls = v_uint32()
self.AvailableTime = v_uint32()
self.PrcbPad22 = v_bytes(size=8) # FIXME Unknown Array Type
self.PPLookasideList = v_bytes(size=8) # FIXME Unknown Array Type
self.PPNPagedLookasideList = v_bytes(size=8) # FIXME Unknown Array Type
self.PPPagedLookasideList = v_bytes(size=8) # FIXME Unknown Array Type
self.PacketBarrier = v_uint32()
self.ReverseStall = v_uint32()
self.IpiFrame = v_ptr32()
self.PrcbPad3 = v_bytes(size=52) # FIXME Unknown Array Type
self.CurrentPacket = v_bytes(size=12) # FIXME Unknown Array Type
self.TargetSet = v_uint32()
self.WorkerRoutine = v_ptr32()
self.IpiFrozen = v_uint32()
self.PrcbPad4 = v_bytes(size=40) # FIXME Unknown Array Type
self.RequestSummary = v_uint32()
self.SignalDone = v_ptr32()
self.PrcbPad50 = v_bytes(size=56) # FIXME Unknown Array Type
self.DpcData = v_bytes(size=56) # FIXME Unknown Array Type
self.DpcStack = v_ptr32()
self.MaximumDpcQueueDepth = v_uint32()
self.DpcRequestRate = v_uint32()
self.MinimumDpcRate = v_uint32()
self.DpcLastCount = v_uint32()
self.PrcbLock = v_uint32()
self.DpcGate = KGATE()
self.ThreadDpcEnable = v_uint8()
self.QuantumEnd = v_uint8()
self.DpcRoutineActive = v_uint8()
self.IdleSchedule = v_uint8()
self.DpcRequestSummary = v_uint32()
self.TimerHand = v_uint32()
self.LastTick = v_uint32()
self.MasterOffset = v_uint32()
self.PrcbPad41 = v_bytes(size=8) # FIXME Unknown Array Type
self.PeriodicCount = v_uint32()
self.PeriodicBias = v_uint32()
self._pad1958 = v_bytes(size=4)
self.TickOffset = v_uint64()
self.TimerTable = KTIMER_TABLE()
self.CallDpc = KDPC()
self.ClockKeepAlive = v_uint32()
self.ClockCheckSlot = v_uint8()
self.ClockPollCycle = v_uint8()
self.PrcbPad6 = v_bytes(size=2) # FIXME Unknown Array Type
self.DpcWatchdogPeriod = v_uint32()
self.DpcWatchdogCount = v_uint32()
self.ThreadWatchdogPeriod = v_uint32()
self.ThreadWatchdogCount = v_uint32()
self.KeSpinLockOrdering = v_uint32()
self.PrcbPad70 = v_bytes(size=4) # FIXME Unknown Array Type
self.WaitListHead = LIST_ENTRY()
self.WaitLock = v_uint32()
self.ReadySummary = v_uint32()
self.QueueIndex = v_uint32()
self.DeferredReadyListHead = SINGLE_LIST_ENTRY()
self.StartCycles = v_uint64()
self.CycleTime = v_uint64()
self.HighCycleTime = v_uint32()
self.PrcbPad71 = v_uint32()
self.PrcbPad72 = v_bytes(size=16) # FIXME Unknown Array Type
self.DispatcherReadyListHead = v_bytes(size=16) # FIXME Unknown Array Type
self.ChainedInterruptList = v_ptr32()
self.LookasideIrpFloat = v_uint32()
self.MmPageFaultCount = v_uint32()
self.MmCopyOnWriteCount = v_uint32()
self.MmTransitionCount = v_uint32()
self.MmCacheTransitionCount = v_uint32()
self.MmDemandZeroCount = v_uint32()
self.MmPageReadCount = v_uint32()
self.MmPageReadIoCount = v_uint32()
self.MmCacheReadCount = v_uint32()
self.MmCacheIoCount = v_uint32()
self.MmDirtyPagesWriteCount = v_uint32()
self.MmDirtyWriteIoCount = v_uint32()
self.MmMappedPagesWriteCount = v_uint32()
self.MmMappedWriteIoCount = v_uint32()
self.CachedCommit = v_uint32()
self.CachedResidentAvailable = v_uint32()
self.HyperPte = v_ptr32()
self.PrcbPad8 = v_bytes(size=4) # FIXME Unknown Array Type
self.VendorString = v_bytes(size=13) # FIXME Unknown Array Type
self.InitialApicId = v_uint8()
self.LogicalProcessorsPerPhysicalProcessor = v_uint8()
self.PrcbPad9 = v_bytes(size=5) # FIXME Unknown Array Type
self.FeatureBits = v_uint32()
self._pad3388 = v_bytes(size=4)
self.UpdateSignature = LARGE_INTEGER()
self.IsrTime = v_uint64()
self.RuntimeAccumulation = v_uint64()
self.PowerState = PROCESSOR_POWER_STATE()
self.DpcWatchdogDpc = KDPC()
self.DpcWatchdogTimer = KTIMER()
self.WheaInfo = v_ptr32()
self.EtwSupport = v_ptr32()
self.InterruptObjectPool = SLIST_HEADER()
self.HypercallPageList = SLIST_HEADER()
self.HypercallPageVirtual = v_ptr32()
self.VirtualApicAssist = v_ptr32()
self.StatisticsPage = v_ptr32()
self.RateControl = v_ptr32()
self.Cache = v_ptr32()
self.CacheCount = v_uint32()
self.CacheProcessorMask = v_bytes(size=20) # FIXME Unknown Array Type
self.PackageProcessorSet = KAFFINITY_EX()
self.PrcbPad91 = v_bytes(size=56) # FIXME Unknown Array Type
self.CoreProcessorSet = v_uint32()
self.TimerExpirationDpc = KDPC()
self.SpinLockAcquireCount = v_uint32()
self.SpinLockContentionCount = v_uint32()
self.SpinLockSpinCount = v_uint32()
self.IpiSendRequestBroadcastCount = v_uint32()
self.IpiSendRequestRoutineCount = v_uint32()
self.IpiSendSoftwareInterruptCount = v_uint32()
self.ExInitializeResourceCount = v_uint32()
self.ExReInitializeResourceCount = v_uint32()
self.ExDeleteResourceCount = v_uint32()
self.ExecutiveResourceAcquiresCount = v_uint32()
self.ExecutiveResourceContentionsCount = v_uint32()
self.ExecutiveResourceReleaseExclusiveCount = v_uint32()
self.ExecutiveResourceReleaseSharedCount = v_uint32()
self.ExecutiveResourceConvertsCount = v_uint32()
self.ExAcqResExclusiveAttempts = v_uint32()
self.ExAcqResExclusiveAcquiresExclusive = v_uint32()
self.ExAcqResExclusiveAcquiresExclusiveRecursive = v_uint32()
self.ExAcqResExclusiveWaits = v_uint32()
self.ExAcqResExclusiveNotAcquires = v_uint32()
self.ExAcqResSharedAttempts = v_uint32()
self.ExAcqResSharedAcquiresExclusive = v_uint32()
self.ExAcqResSharedAcquiresShared = v_uint32()
self.ExAcqResSharedAcquiresSharedRecursive = v_uint32()
self.ExAcqResSharedWaits = v_uint32()
self.ExAcqResSharedNotAcquires = v_uint32()
self.ExAcqResSharedStarveExclusiveAttempts = v_uint32()
self.ExAcqResSharedStarveExclusiveAcquiresExclusive = v_uint32()
self.ExAcqResSharedStarveExclusiveAcquiresShared = v_uint32()
self.ExAcqResSharedStarveExclusiveAcquiresSharedRecursive = v_uint32()
self.ExAcqResSharedStarveExclusiveWaits = v_uint32()
self.ExAcqResSharedStarveExclusiveNotAcquires = v_uint32()
self.ExAcqResSharedWaitForExclusiveAttempts = v_uint32()
self.ExAcqResSharedWaitForExclusiveAcquiresExclusive = v_uint32()
self.ExAcqResSharedWaitForExclusiveAcquiresShared = v_uint32()
self.ExAcqResSharedWaitForExclusiveAcquiresSharedRecursive = v_uint32()
self.ExAcqResSharedWaitForExclusiveWaits = v_uint32()
self.ExAcqResSharedWaitForExclusiveNotAcquires = v_uint32()
self.ExSetResOwnerPointerExclusive = v_uint32()
self.ExSetResOwnerPointerSharedNew = v_uint32()
self.ExSetResOwnerPointerSharedOld = v_uint32()
self.ExTryToAcqExclusiveAttempts = v_uint32()
self.ExTryToAcqExclusiveAcquires = v_uint32()
self.ExBoostExclusiveOwner = v_uint32()
self.ExBoostSharedOwners = v_uint32()
self.ExEtwSynchTrackingNotificationsCount = v_uint32()
self.ExEtwSynchTrackingNotificationsAccountedCount = v_uint32()
self.Context = v_ptr32()
self.ContextFlags = v_uint32()
self.ExtendedState = v_ptr32()
class RTL_DYNAMIC_HASH_TABLE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Flags = v_uint32()
self.Shift = v_uint32()
self.TableSize = v_uint32()
self.Pivot = v_uint32()
self.DivisorMask = v_uint32()
self.NumEntries = v_uint32()
self.NonEmptyBuckets = v_uint32()
self.NumEnumerators = v_uint32()
self.Directory = v_ptr32()
class KAFFINITY_EX(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Count = v_uint16()
self.Size = v_uint16()
self.Reserved = v_uint32()
self.Bitmap = v_bytes(size=16) # FIXME Unknown Array Type
class DEVICE_OBJECT(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Type = v_uint16()
self.Size = v_uint16()
self.ReferenceCount = v_uint32()
self.DriverObject = v_ptr32()
self.NextDevice = v_ptr32()
self.AttachedDevice = v_ptr32()
self.CurrentIrp = v_ptr32()
self.Timer = v_ptr32()
self.Flags = v_uint32()
self.Characteristics = v_uint32()
self.Vpb = v_ptr32()
self.DeviceExtension = v_ptr32()
self.DeviceType = v_uint32()
self.StackSize = v_uint8()
self._pad0034 = v_bytes(size=3)
self.Queue = _unnamed_7372()
self.AlignmentRequirement = v_uint32()
self.DeviceQueue = KDEVICE_QUEUE()
self.Dpc = KDPC()
self.ActiveThreadCount = v_uint32()
self.SecurityDescriptor = v_ptr32()
self.DeviceLock = KEVENT()
self.SectorSize = v_uint16()
self.Spare1 = v_uint16()
self.DeviceObjectExtension = v_ptr32()
self.Reserved = v_ptr32()
class USER_MEMORY_CACHE_ENTRY(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.UserBlocks = SLIST_HEADER()
self.AvailableBlocks = v_uint32()
class EX_PUSH_LOCK_WAIT_BLOCK(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.WakeEvent = KEVENT()
self.Next = v_ptr32()
self.Last = v_ptr32()
self.Previous = v_ptr32()
self.ShareCount = v_uint32()
self.Flags = v_uint32()
class _unnamed_8182(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.PowerSequence = v_ptr32()
class _unnamed_9780(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Raw = _unnamed_9775()
class IMAGE_NT_HEADERS(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Signature = v_uint32()
self.FileHeader = IMAGE_FILE_HEADER()
self.OptionalHeader = IMAGE_OPTIONAL_HEADER()
class IO_STACK_LOCATION(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.MajorFunction = v_uint8()
self.MinorFunction = v_uint8()
self.Flags = v_uint8()
self.Control = v_uint8()
self.Parameters = _unnamed_7812()
self.DeviceObject = v_ptr32()
self.FileObject = v_ptr32()
self.CompletionRoutine = v_ptr32()
self.Context = v_ptr32()
class KNODE(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.PagedPoolSListHead = SLIST_HEADER()
self.NonPagedPoolSListHead = SLIST_HEADER()
self.Affinity = GROUP_AFFINITY()
self.ProximityId = v_uint32()
self.NodeNumber = v_uint16()
self.PrimaryNodeNumber = v_uint16()
self.MaximumProcessors = v_uint8()
self.Color = v_uint8()
self.Flags = flags()
self.NodePad0 = v_uint8()
self.Seed = v_uint32()
self.MmShiftedColor = v_uint32()
self.FreeCount = v_bytes(size=8) # FIXME Unknown Array Type
self.CachedKernelStacks = CACHED_KSTACK_LIST()
self.ParkLock = v_uint32()
self.NodePad1 = v_uint32()
class _unnamed_8078(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.InterfaceType = v_ptr32()
self.Size = v_uint16()
self.Version = v_uint16()
self.Interface = v_ptr32()
self.InterfaceSpecificData = v_ptr32()
class XSAVE_AREA_HEADER(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.Mask = v_uint64()
self.Reserved = v_bytes(size=56) # FIXME Unknown Array Type
class PSP_CPU_SHARE_CAPTURED_WEIGHT_DATA(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.CapturedCpuShareWeight = v_uint32()
self.CapturedTotalWeight = v_uint32()
class RTL_USER_PROCESS_PARAMETERS(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.MaximumLength = v_uint32()
self.Length = v_uint32()
self.Flags = v_uint32()
self.DebugFlags = v_uint32()
self.ConsoleHandle = v_ptr32()
self.ConsoleFlags = v_uint32()
self.StandardInput = v_ptr32()
self.StandardOutput = v_ptr32()
self.StandardError = v_ptr32()
self.CurrentDirectory = CURDIR()
self.DllPath = UNICODE_STRING()
self.ImagePathName = UNICODE_STRING()
self.CommandLine = UNICODE_STRING()
self.Environment = v_ptr32()
self.StartingX = v_uint32()
self.StartingY = v_uint32()
self.CountX = v_uint32()
self.CountY = v_uint32()
self.CountCharsX = v_uint32()
self.CountCharsY = v_uint32()
self.FillAttribute = v_uint32()
self.WindowFlags = v_uint32()
self.ShowWindowFlags = v_uint32()
self.WindowTitle = UNICODE_STRING()
self.DesktopInfo = UNICODE_STRING()
self.ShellInfo = UNICODE_STRING()
self.RuntimeData = UNICODE_STRING()
self.CurrentDirectores = UNICODE_STRING()
self.EnvironmentSize = v_uint32()
self.EnvironmentVersion = v_uint32()
class _unnamed_8176(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.PowerState = v_uint32()
class IO_RESOURCE_REQUIREMENTS_LIST(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.ListSize = v_uint32()
self.InterfaceType = v_uint32()
self.BusNumber = v_uint32()
self.SlotNumber = v_uint32()
self.Reserved = v_bytes(size=12) # FIXME Unknown Array Type
self.AlternativeLists = v_uint32()
self.List = v_uint32()
class HEAP_BUCKET_COUNTERS(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.TotalBlocks = v_uint32()
self.SubSegmentCounts = v_uint32()
|
{
"content_hash": "809560fc7ad680acdebb6a4c93825be2",
"timestamp": "",
"source": "github",
"line_count": 5137,
"max_line_length": 94,
"avg_line_length": 34.36791901888262,
"alnum_prop": 0.6159514692888053,
"repo_name": "columbia/libtrack",
"id": "33ce5524eabedce821ef8e479f5075bebcef00ac",
"size": "176586",
"binary": false,
"copies": "19",
"ref": "refs/heads/master",
"path": "libtrack/elfmod/vstruct/defs/windows/win_6_1_wow64/ntdll.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Assembly",
"bytes": "11274"
},
{
"name": "C",
"bytes": "625270"
},
{
"name": "C++",
"bytes": "20192"
},
{
"name": "Makefile",
"bytes": "670"
},
{
"name": "Perl",
"bytes": "19070"
},
{
"name": "Perl6",
"bytes": "31592"
},
{
"name": "Python",
"bytes": "2497184"
},
{
"name": "Shell",
"bytes": "44042"
}
],
"symlink_target": ""
}
|
from typing import List
class Solution:
def longestCommonPrefix(self, strs: List[str]) -> str:
if len(strs) == 0:
return ""
if len(strs) == 1:
return strs[0]
common_prefix = strs[0]
l_common_prefix = len(common_prefix)
s_idx = 1
while s_idx < len(strs) and common_prefix:
s = strs[s_idx]
ls = len(s)
l_min = min(l_common_prefix, ls)
idx = 0
while idx < l_min and s[idx] == common_prefix[idx]:
idx += 1
common_prefix = s[:idx]
l_common_prefix = len(common_prefix)
s_idx += 1
return common_prefix
|
{
"content_hash": "de2c2961e857900e8625fe5b56ac7969",
"timestamp": "",
"source": "github",
"line_count": 22,
"max_line_length": 63,
"avg_line_length": 31.454545454545453,
"alnum_prop": 0.4869942196531792,
"repo_name": "y-usuzumi/survive-the-course",
"id": "ff45817097c92498292d71358d09fa2c8953bbdb",
"size": "692",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "leetcode-cn/14.最长公共前缀/main.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "10933"
},
{
"name": "C#",
"bytes": "12308"
},
{
"name": "Haskell",
"bytes": "39589"
},
{
"name": "Java",
"bytes": "55330"
},
{
"name": "JavaScript",
"bytes": "123"
},
{
"name": "Kotlin",
"bytes": "1371"
},
{
"name": "Perl",
"bytes": "716"
},
{
"name": "Python",
"bytes": "348020"
},
{
"name": "Racket",
"bytes": "21082"
},
{
"name": "Rust",
"bytes": "220778"
},
{
"name": "Scala",
"bytes": "5023"
},
{
"name": "Shell",
"bytes": "695"
},
{
"name": "Standard ML",
"bytes": "42393"
},
{
"name": "TypeScript",
"bytes": "83468"
}
],
"symlink_target": ""
}
|
from numpy.random import RandomState
import numpy as np
__all__ = [
'assert_is_type',
'assert_valid_percent',
'get_random_state'
]
def assert_is_type(x, t):
if not isinstance(x, t):
raise TypeError('expected %r but got type=%s'
% (t, type(x)))
return x
def assert_valid_percent(x, eq_lower=False, eq_upper=False):
# these are all castable to float
assert_is_type(x, (float, np.float, np.int, int, np.long))
x = float(x)
# test lower bound:
if not ((eq_lower and 0. <= x) or ((not eq_lower) and 0. < x)):
raise ValueError('Expected 0. %s x, but got x=%r'
% ('<=' if eq_lower else '<', x))
if not ((eq_upper and x <= 1.) or ((not eq_upper) and x < 1.)):
raise ValueError('Expected x %s 1., but got x=%r'
% ('<=' if eq_upper else '<', x))
return x
def get_random_state(random_state):
# if it's a seed, return a new seeded RandomState
if random_state is None or \
isinstance(random_state, (int, np.int, np.long)):
return RandomState(random_state)
# if it's a RandomState, it's been initialized
elif isinstance(random_state, RandomState):
return random_state
else:
raise TypeError('cannot seed new RandomState with type=%s'
% type(random_state))
|
{
"content_hash": "8a4f9f5b8fa9e3cee2f377173d47f620",
"timestamp": "",
"source": "github",
"line_count": 42,
"max_line_length": 67,
"avg_line_length": 32.714285714285715,
"alnum_prop": 0.5684133915574964,
"repo_name": "mbernico/snape",
"id": "1983a155afac6d27373125f4cb21f0a21c6ce51e",
"size": "1375",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "snape/utils.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "79387"
},
{
"name": "Shell",
"bytes": "3215"
}
],
"symlink_target": ""
}
|
import sys, random, time, base64
import getopt
from array import array
from crypto import slowaes
from crypto import rsa
def _decrypt(data, pri):
""" Decrypt the encrypted data collected and sent from bot
Two blocks: len + enc(keyinfo) + sym_enc(data)
format:
0-3 len
4 sym key type
5- key info
"""
#msg = base64.b64decode(data)
msg = data
if len(msg) < 4:
return
try:
keyinfo_len = int(msg[0:4])
except ValueError:
print("Error: Invalide data")
return
keyinfo_enc = msg[4:4+keyinfo_len]
keyinfo = rsa.decrypt(keyinfo_enc, pri)
cipher = array('B', msg[4+keyinfo_len:])
alg = keyinfo[0]
if alg.upper() != 'A':
print "We only support AES currently."
return
try:
mode = int(keyinfo[1:3])
aes_len = int(keyinfo[3:7])
print "aes_len=", aes_len
aes_key_str = keyinfo[7:aes_len+7]
iv_len = int(keyinfo[aes_len+7:aes_len+7+4])
iv_str = keyinfo[7+aes_len+4:7+aes_len+4+iv_len]
orig_len = int(keyinfo[-4:])
print "alg=", alg, "mode=", mode, "aes_len=", aes_len, " iv_len=", iv_len, " orig=", orig_len
except ValueError:
print("Unable to parse keyinfo segment, quit.")
return
moo = slowaes.AESModeOfOperation()
aes_key = array('B', aes_key_str)
iv = array('B', iv_str)
#print aes_key, iv
decr = moo.decrypt(cipher, orig_len, mode, aes_key, moo.aes.keySize["SIZE_256"], iv)
return decr
def _encrypt(data, pub):
""" Encrypt data collected by bot
Two blocks: len + enc(keyinfo) + sym_enc(data)
format:
0-3 len
4 sym key type
5- key info
"""
# 0. TODO: zip the file
# 1. generate 256-bit AES key and IV
aes_key_str = rsa.randnum.read_random_bits(256)
iv_str = rsa.randnum.read_random_bits(256)
aes_key = array('B', aes_key_str)
iv = array('B', iv_str)
# 2. encrypt data with generated AES/IV, and encode the cipher text by Base64
moo = slowaes.AESModeOfOperation()
mode, orig_len, cipher = moo.encrypt(data, moo.modeOfOperation["CBC"], aes_key, moo.aes.keySize["SIZE_256"], iv)
print aes_key, iv, orig_len
#print("Raw_msg AES_mode=%d, len=%d" % (mode, orig_len))
# 3. encrypt keyinfo(AES/IV) with public key, and encode it together with ciphertext by Base64
keyinfo = ''.join(('A', str(mode).zfill(2), str(len(aes_key_str)).zfill(4), aes_key_str, str(len(iv_str)).zfill(4), iv_str, str(orig_len).zfill(4)))
key_enc = rsa.encrypt(keyinfo, pub)
# 4. return the result
msg = str(len(key_enc)).zfill(4) + key_enc + str(bytearray(cipher))
#b64msg = base64.b64encode(msg)
return msg
def encode(rawfile, keyfile):
print("Encrypt file with AES key and protect with public key ...")
try:
with open(keyfile, 'rb') as f:
pub = rsa.PublicKey.load_pkcs1(f.read(), "PEM")
except IOError, ValueError:
print("Error: fail to read key file %s." % keyfile)
return
try:
with open(datafile, 'rb') as fin:
encrypted = _encrypt(fin.read(), pub)
with open(datafile + '.enc', 'wb') as fout:
fout.write(encrypted)
except IOError:
print("Error: fail to write %s encrypted and saved to file. " % datafile)
if encrypted is not NULL:
print encrypted
def decode(encfile, keyfile):
print("Decrypt the encrypted file with private key ...")
try:
with open(keyfile, 'rb') as f:
pri = rsa.PrivateKey.load_pkcs1(f.read(), "PEM")
except IOError, ValueError:
print("Error: fail to read key file %s." % keyfile)
return
try:
with open(datafile, 'rb') as fin:
decrypted = _decrypt(fin.read(), pri)
with open(datafile + '.dec', 'wb') as fout:
fout.write(decrypted)
except IOError:
print("Error: fail to write %s encrypted and saved to file. " % datafile)
if encrypted is not NULL:
print decrypted
def usage():
print("Usage: %s [option] <file>" % sys.argv[0])
print(" -d,--decode Decode the file")
print(" -e,--encode Encode the file")
print(" -k,--keyfile key file")
if __name__ == "__main__":
try:
opts, args = getopt.getopt(sys.argv[1:], "d:e:k:i", ["decode", "encode", "keyfile", "input"])
except getopt.getGetoptError as err:
print str(err)
usage()
sys.exit(2)
print opts, args
datafile=None
keyfile=None
_proc=None
for o, a in opts:
print o, a
if o in ("-d", "--decode"):
datafile = a
_proc=decode
elif o in ("-e", "--encode"):
datafile = a
_proc=encode
elif o in ("-k", "--keyfile"):
keyfile = a
else:
print("Unhandled options ", o)
if keyfile is None or datafile is None:
usage()
sys.exit(2)
_proc(datafile, keyfile)
|
{
"content_hash": "643cceece240cc840f2fd019989bbd5f",
"timestamp": "",
"source": "github",
"line_count": 165,
"max_line_length": 149,
"avg_line_length": 26.78787878787879,
"alnum_prop": 0.6515837104072398,
"repo_name": "sfluo/Mr.Bot",
"id": "096ddf3507ca35eea4dd865b239fbffce179d0c3",
"size": "4525",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "encopt.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Batchfile",
"bytes": "4519"
},
{
"name": "C",
"bytes": "677502"
},
{
"name": "C++",
"bytes": "3268"
},
{
"name": "Makefile",
"bytes": "4764"
},
{
"name": "Python",
"bytes": "1985775"
},
{
"name": "Shell",
"bytes": "23642"
}
],
"symlink_target": ""
}
|
import os
import sys
import cgi
from urllib2 import quote, unquote
import urlparse
from cStringIO import StringIO
from ConfigParser import SafeConfigParser
from pycsw.core.etree import etree
from pycsw import oaipmh, opensearch, sru
from pycsw.plugins.profiles import profile as pprofile
import pycsw.plugins.outputschemas
from pycsw.core import config, log, metadata, util
from pycsw.ogc.fes import fes1
import logging
LOGGER = logging.getLogger(__name__)
class Csw2(object):
''' CSW 2.x server '''
def __init__(self, server_csw):
''' Initialize CSW2 '''
self.parent = server_csw
self.version = '2.0.2'
def getcapabilities(self):
''' Handle GetCapabilities request '''
serviceidentification = True
serviceprovider = True
operationsmetadata = True
if 'sections' in self.parent.kvp:
serviceidentification = False
serviceprovider = False
operationsmetadata = False
for section in self.parent.kvp['sections'].split(','):
if section == 'ServiceIdentification':
serviceidentification = True
if section == 'ServiceProvider':
serviceprovider = True
if section == 'OperationsMetadata':
operationsmetadata = True
# check extra parameters that may be def'd by profiles
if self.parent.profiles is not None:
for prof in self.parent.profiles['loaded'].keys():
result = \
self.parent.profiles['loaded'][prof].check_parameters(self.parent.kvp)
if result is not None:
return self.exceptionreport(result['code'],
result['locator'], result['text'])
# @updateSequence: get latest update to repository
try:
updatesequence = \
util.get_time_iso2unix(self.parent.repository.query_insert())
except:
updatesequence = None
node = etree.Element(util.nspath_eval('csw:Capabilities',
self.parent.context.namespaces),
nsmap=self.parent.context.namespaces, version='2.0.2',
updateSequence=str(updatesequence))
if 'updatesequence' in self.parent.kvp:
if int(self.parent.kvp['updatesequence']) == updatesequence:
return node
elif int(self.parent.kvp['updatesequence']) > updatesequence:
return self.exceptionreport('InvalidUpdateSequence',
'updatesequence',
'outputsequence specified (%s) is higher than server\'s \
updatesequence (%s)' % (self.parent.kvp['updatesequence'],
updatesequence))
node.attrib[util.nspath_eval('xsi:schemaLocation',
self.parent.context.namespaces)] = '%s %s/csw/2.0.2/CSW-discovery.xsd' % \
(self.parent.context.namespaces['csw'],
self.parent.config.get('server', 'ogc_schemas_base'))
metadata_main = dict(self.parent.config.items('metadata:main'))
if serviceidentification:
LOGGER.debug('Writing section ServiceIdentification.')
serviceidentification = etree.SubElement(node, \
util.nspath_eval('ows:ServiceIdentification',
self.parent.context.namespaces))
etree.SubElement(serviceidentification,
util.nspath_eval('ows:Title', self.parent.context.namespaces)).text = \
metadata_main.get('identification_title', 'missing')
etree.SubElement(serviceidentification,
util.nspath_eval('ows:Abstract', self.parent.context.namespaces)).text = \
metadata_main.get('identification_abstract', 'missing')
keywords = etree.SubElement(serviceidentification,
util.nspath_eval('ows:Keywords', self.parent.context.namespaces))
for k in \
metadata_main.get('identification_keywords').split(','):
etree.SubElement(
keywords, util.nspath_eval('ows:Keyword',
self.parent.context.namespaces)).text = k
etree.SubElement(keywords,
util.nspath_eval('ows:Type', self.parent.context.namespaces),
codeSpace='ISOTC211/19115').text = \
metadata_main.get('identification_keywords_type', 'missing')
etree.SubElement(serviceidentification,
util.nspath_eval('ows:ServiceType', self.parent.context.namespaces),
codeSpace='OGC').text = 'CSW'
for stv in self.parent.context.model['parameters']['version']['values']:
etree.SubElement(serviceidentification,
util.nspath_eval('ows:ServiceTypeVersion',
self.parent.context.namespaces)).text = stv
etree.SubElement(serviceidentification,
util.nspath_eval('ows:Fees', self.parent.context.namespaces)).text = \
metadata_main.get('identification_fees', 'missing')
etree.SubElement(serviceidentification,
util.nspath_eval('ows:AccessConstraints',
self.parent.context.namespaces)).text = \
metadata_main.get('identification_accessconstraints', 'missing')
if serviceprovider:
LOGGER.debug('Writing section ServiceProvider.')
serviceprovider = etree.SubElement(node,
util.nspath_eval('ows:ServiceProvider', self.parent.context.namespaces))
etree.SubElement(serviceprovider,
util.nspath_eval('ows:ProviderName', self.parent.context.namespaces)).text = \
metadata_main.get('provider_name', 'missing')
providersite = etree.SubElement(serviceprovider,
util.nspath_eval('ows:ProviderSite', self.parent.context.namespaces))
providersite.attrib[util.nspath_eval('xlink:type',
self.parent.context.namespaces)] = 'simple'
providersite.attrib[util.nspath_eval('xlink:href',
self.parent.context.namespaces)] = \
metadata_main.get('provider_url', 'missing')
servicecontact = etree.SubElement(serviceprovider,
util.nspath_eval('ows:ServiceContact', self.parent.context.namespaces))
etree.SubElement(servicecontact,
util.nspath_eval('ows:IndividualName',
self.parent.context.namespaces)).text = \
metadata_main.get('contact_name', 'missing')
etree.SubElement(servicecontact,
util.nspath_eval('ows:PositionName',
self.parent.context.namespaces)).text = \
metadata_main.get('contact_position', 'missing')
contactinfo = etree.SubElement(servicecontact,
util.nspath_eval('ows:ContactInfo', self.parent.context.namespaces))
phone = etree.SubElement(contactinfo, util.nspath_eval('ows:Phone',
self.parent.context.namespaces))
etree.SubElement(phone, util.nspath_eval('ows:Voice',
self.parent.context.namespaces)).text = \
metadata_main.get('contact_phone', 'missing')
etree.SubElement(phone, util.nspath_eval('ows:Facsimile',
self.parent.context.namespaces)).text = \
metadata_main.get('contact_fax', 'missing')
address = etree.SubElement(contactinfo,
util.nspath_eval('ows:Address', self.parent.context.namespaces))
etree.SubElement(address,
util.nspath_eval('ows:DeliveryPoint',
self.parent.context.namespaces)).text = \
metadata_main.get('contact_address', 'missing')
etree.SubElement(address, util.nspath_eval('ows:City',
self.parent.context.namespaces)).text = \
metadata_main.get('contact_city', 'missing')
etree.SubElement(address,
util.nspath_eval('ows:AdministrativeArea',
self.parent.context.namespaces)).text = \
metadata_main.get('contact_stateorprovince', 'missing')
etree.SubElement(address,
util.nspath_eval('ows:PostalCode',
self.parent.context.namespaces)).text = \
metadata_main.get('contact_postalcode', 'missing')
etree.SubElement(address,
util.nspath_eval('ows:Country', self.parent.context.namespaces)).text = \
metadata_main.get('contact_country', 'missing')
etree.SubElement(address,
util.nspath_eval('ows:ElectronicMailAddress',
self.parent.context.namespaces)).text = \
metadata_main.get('contact_email', 'missing')
url = etree.SubElement(contactinfo,
util.nspath_eval('ows:OnlineResource', self.parent.context.namespaces))
url.attrib[util.nspath_eval('xlink:type',
self.parent.context.namespaces)] = 'simple'
url.attrib[util.nspath_eval('xlink:href',
self.parent.context.namespaces)] = \
metadata_main.get('contact_url', 'missing')
etree.SubElement(contactinfo,
util.nspath_eval('ows:HoursOfService',
self.parent.context.namespaces)).text = \
metadata_main.get('contact_hours', 'missing')
etree.SubElement(contactinfo,
util.nspath_eval('ows:ContactInstructions',
self.parent.context.namespaces)).text = \
metadata_main.get('contact_instructions', 'missing')
etree.SubElement(servicecontact,
util.nspath_eval('ows:Role', self.parent.context.namespaces),
codeSpace='ISOTC211/19115').text = \
metadata_main.get('contact_role', 'missing')
if operationsmetadata:
LOGGER.debug('Writing section OperationsMetadata.')
operationsmetadata = etree.SubElement(node,
util.nspath_eval('ows:OperationsMetadata',
self.parent.context.namespaces))
for operation in self.parent.context.model['operations'].keys():
oper = etree.SubElement(operationsmetadata,
util.nspath_eval('ows:Operation', self.parent.context.namespaces),
name=operation)
dcp = etree.SubElement(oper, util.nspath_eval('ows:DCP',
self.parent.context.namespaces))
http = etree.SubElement(dcp, util.nspath_eval('ows:HTTP',
self.parent.context.namespaces))
if self.parent.context.model['operations'][operation]['methods']['get']:
get = etree.SubElement(http, util.nspath_eval('ows:Get',
self.parent.context.namespaces))
get.attrib[util.nspath_eval('xlink:type',\
self.parent.context.namespaces)] = 'simple'
get.attrib[util.nspath_eval('xlink:href',\
self.parent.context.namespaces)] = self.parent.config.get('server', 'url')
if self.parent.context.model['operations'][operation]['methods']['post']:
post = etree.SubElement(http, util.nspath_eval('ows:Post',
self.parent.context.namespaces))
post.attrib[util.nspath_eval('xlink:type',
self.parent.context.namespaces)] = 'simple'
post.attrib[util.nspath_eval('xlink:href',
self.parent.context.namespaces)] = \
self.parent.config.get('server', 'url')
for parameter in \
self.parent.context.model['operations'][operation]['parameters']:
param = etree.SubElement(oper,
util.nspath_eval('ows:Parameter',
self.parent.context.namespaces), name=parameter)
for val in \
self.parent.context.model['operations'][operation]\
['parameters'][parameter]['values']:
etree.SubElement(param,
util.nspath_eval('ows:Value',
self.parent.context.namespaces)).text = val
if operation == 'GetRecords': # advertise queryables
for qbl in self.parent.repository.queryables.keys():
if qbl != '_all':
param = etree.SubElement(oper,
util.nspath_eval('ows:Constraint',
self.parent.context.namespaces), name=qbl)
for qbl2 in self.parent.repository.queryables[qbl]:
etree.SubElement(param,
util.nspath_eval('ows:Value',
self.parent.context.namespaces)).text = qbl2
if self.parent.profiles is not None:
for con in self.parent.context.model[\
'operations']['GetRecords']['constraints'].keys():
param = etree.SubElement(oper,
util.nspath_eval('ows:Constraint',
self.parent.context.namespaces), name = con)
for val in self.parent.context.model['operations']\
['GetRecords']['constraints'][con]['values']:
etree.SubElement(param,
util.nspath_eval('ows:Value',
self.parent.context.namespaces)).text = val
for parameter in self.parent.context.model['parameters'].keys():
param = etree.SubElement(operationsmetadata,
util.nspath_eval('ows:Parameter', self.parent.context.namespaces),
name=parameter)
for val in self.parent.context.model['parameters'][parameter]['values']:
etree.SubElement(param, util.nspath_eval('ows:Value',
self.parent.context.namespaces)).text = val
for constraint in self.parent.context.model['constraints'].keys():
param = etree.SubElement(operationsmetadata,
util.nspath_eval('ows:Constraint', self.parent.context.namespaces),
name=constraint)
for val in self.parent.context.model['constraints'][constraint]['values']:
etree.SubElement(param, util.nspath_eval('ows:Value',
self.parent.context.namespaces)).text = val
if self.parent.profiles is not None:
for prof in self.parent.profiles['loaded'].keys():
ecnode = \
self.parent.profiles['loaded'][prof].get_extendedcapabilities()
if ecnode is not None:
operationsmetadata.append(ecnode)
# always write out Filter_Capabilities
LOGGER.debug('Writing section Filter_Capabilities.')
fltcaps = etree.SubElement(node,
util.nspath_eval('ogc:Filter_Capabilities', self.parent.context.namespaces))
spatialcaps = etree.SubElement(fltcaps,
util.nspath_eval('ogc:Spatial_Capabilities', self.parent.context.namespaces))
geomops = etree.SubElement(spatialcaps,
util.nspath_eval('ogc:GeometryOperands', self.parent.context.namespaces))
for geomtype in \
fes1.MODEL['GeometryOperands']['values']:
etree.SubElement(geomops,
util.nspath_eval('ogc:GeometryOperand',
self.parent.context.namespaces)).text = geomtype
spatialops = etree.SubElement(spatialcaps,
util.nspath_eval('ogc:SpatialOperators', self.parent.context.namespaces))
for spatial_comparison in \
fes1.MODEL['SpatialOperators']['values']:
etree.SubElement(spatialops,
util.nspath_eval('ogc:SpatialOperator', self.parent.context.namespaces),
name=spatial_comparison)
scalarcaps = etree.SubElement(fltcaps,
util.nspath_eval('ogc:Scalar_Capabilities', self.parent.context.namespaces))
etree.SubElement(scalarcaps, util.nspath_eval('ogc:LogicalOperators',
self.parent.context.namespaces))
cmpops = etree.SubElement(scalarcaps,
util.nspath_eval('ogc:ComparisonOperators', self.parent.context.namespaces))
for cmpop in fes1.MODEL['ComparisonOperators'].keys():
etree.SubElement(cmpops,
util.nspath_eval('ogc:ComparisonOperator',
self.parent.context.namespaces)).text = \
fes1.MODEL['ComparisonOperators'][cmpop]['opname']
arithops = etree.SubElement(scalarcaps,
util.nspath_eval('ogc:ArithmeticOperators', self.parent.context.namespaces))
functions = etree.SubElement(arithops,
util.nspath_eval('ogc:Functions', self.parent.context.namespaces))
functionames = etree.SubElement(functions,
util.nspath_eval('ogc:FunctionNames', self.parent.context.namespaces))
for fnop in sorted(fes1.MODEL['Functions'].keys()):
etree.SubElement(functionames,
util.nspath_eval('ogc:FunctionName', self.parent.context.namespaces),
nArgs=fes1.MODEL['Functions'][fnop]['args']).text = fnop
idcaps = etree.SubElement(fltcaps,
util.nspath_eval('ogc:Id_Capabilities', self.parent.context.namespaces))
for idcap in fes1.MODEL['Ids']['values']:
etree.SubElement(idcaps, util.nspath_eval('ogc:%s' % idcap,
self.parent.context.namespaces))
return node
def describerecord(self):
''' Handle DescribeRecord request '''
if 'typename' not in self.parent.kvp or \
len(self.parent.kvp['typename']) == 0: # missing typename
# set to return all typenames
self.parent.kvp['typename'] = ['csw:Record']
if self.parent.profiles is not None:
for prof in self.parent.profiles['loaded'].keys():
self.parent.kvp['typename'].append(
self.parent.profiles['loaded'][prof].typename)
elif self.parent.requesttype == 'GET': # pass via GET
self.parent.kvp['typename'] = self.parent.kvp['typename'].split(',')
if ('outputformat' in self.parent.kvp and
self.parent.kvp['outputformat'] not in
self.parent.context.model['operations']['DescribeRecord']
['parameters']['outputFormat']['values']): # bad outputformat
return self.exceptionreport('InvalidParameterValue',
'outputformat', 'Invalid value for outputformat: %s' %
self.parent.kvp['outputformat'])
if ('schemalanguage' in self.parent.kvp and
self.parent.kvp['schemalanguage'] not in
self.parent.context.model['operations']['DescribeRecord']['parameters']
['schemaLanguage']['values']): # bad schemalanguage
return self.exceptionreport('InvalidParameterValue',
'schemalanguage', 'Invalid value for schemalanguage: %s' %
self.parent.kvp['schemalanguage'])
node = etree.Element(util.nspath_eval('csw:DescribeRecordResponse',
self.parent.context.namespaces), nsmap=self.parent.context.namespaces)
node.attrib[util.nspath_eval('xsi:schemaLocation',
self.parent.context.namespaces)] = \
'%s %s/csw/2.0.2/CSW-discovery.xsd' % (self.parent.context.namespaces['csw'],
self.parent.config.get('server', 'ogc_schemas_base'))
for typename in self.parent.kvp['typename']:
if typename.find(':') == -1: # unqualified typename
return self.exceptionreport('InvalidParameterValue',
'typename', 'Typename not qualified: %s' % typename)
if typename == 'csw:Record': # load core schema
LOGGER.debug('Writing csw:Record schema.')
schemacomponent = etree.SubElement(node,
util.nspath_eval('csw:SchemaComponent', self.parent.context.namespaces),
schemaLanguage='XMLSCHEMA',
targetNamespace=self.parent.context.namespaces['csw'])
path = os.path.join(self.parent.config.get('server', 'home'),
'core', 'schemas', 'ogc', 'csw', '2.0.2', 'record.xsd')
dublincore = etree.parse(path).getroot()
schemacomponent.append(dublincore)
if self.parent.profiles is not None:
for prof in self.parent.profiles['loaded'].keys():
if self.parent.profiles['loaded'][prof].typename == typename:
scnodes = \
self.parent.profiles['loaded'][prof].get_schemacomponents()
if scnodes is not None:
map(node.append, scnodes)
return node
def getdomain(self):
''' Handle GetDomain request '''
if ('parametername' not in self.parent.kvp and
'propertyname' not in self.parent.kvp):
return self.exceptionreport('MissingParameterValue',
'parametername', 'Missing value. \
One of propertyname or parametername must be specified')
node = etree.Element(util.nspath_eval('csw:GetDomainResponse',
self.parent.context.namespaces), nsmap=self.parent.context.namespaces)
node.attrib[util.nspath_eval('xsi:schemaLocation',
self.parent.context.namespaces)] = '%s %s/csw/2.0.2/CSW-discovery.xsd' % \
(self.parent.context.namespaces['csw'],
self.parent.config.get('server', 'ogc_schemas_base'))
if 'parametername' in self.parent.kvp:
for pname in self.parent.kvp['parametername'].split(','):
LOGGER.debug('Parsing parametername %s.' % pname)
domainvalue = etree.SubElement(node,
util.nspath_eval('csw:DomainValues', self.parent.context.namespaces),
type='csw:Record')
etree.SubElement(domainvalue,
util.nspath_eval('csw:ParameterName',
self.parent.context.namespaces)).text = pname
try:
operation, parameter = pname.split('.')
except:
return node
if (operation in self.parent.context.model['operations'].keys() and
parameter in
self.parent.context.model['operations'][operation]['parameters'].keys()):
listofvalues = etree.SubElement(domainvalue,
util.nspath_eval('csw:ListOfValues', self.parent.context.namespaces))
for val in \
self.parent.context.model['operations'][operation]\
['parameters'][parameter]['values']:
etree.SubElement(listofvalues,
util.nspath_eval('csw:Value',
self.parent.context.namespaces)).text = val
if 'propertyname' in self.parent.kvp:
for pname in self.parent.kvp['propertyname'].split(','):
LOGGER.debug('Parsing propertyname %s.' % pname)
if pname.find('/') == 0: # it's an XPath
pname2 = pname
else: # it's a core queryable, map to internal typename model
try:
pname2 = self.parent.repository.queryables['_all'][pname]['dbcol']
except:
pname2 = pname
# decipher typename
dvtype = None
if self.parent.profiles is not None:
for prof in self.parent.profiles['loaded'].keys():
for prefix in self.parent.profiles['loaded'][prof].prefixes:
if pname2.find(prefix) != -1:
dvtype = self.parent.profiles['loaded'][prof].typename
break
if not dvtype:
dvtype = 'csw:Record'
domainvalue = etree.SubElement(node,
util.nspath_eval('csw:DomainValues', self.parent.context.namespaces),
type=dvtype)
etree.SubElement(domainvalue,
util.nspath_eval('csw:PropertyName',
self.parent.context.namespaces)).text = pname
try:
LOGGER.debug(
'Querying repository property %s, typename %s, \
domainquerytype %s.' % \
(pname2, dvtype, self.parent.domainquerytype))
count = False
if (self.parent.config.has_option('server', 'domaincounts') and
self.parent.config.get('server', 'domaincounts') == 'true'):
count = True
results = self.parent.repository.query_domain(
pname2, dvtype, self.parent.domainquerytype, count)
LOGGER.debug('Results: %s' % str(len(results)))
if self.parent.domainquerytype == 'range':
rangeofvalues = etree.SubElement(domainvalue,
util.nspath_eval('csw:RangeOfValues',
self.parent.context.namespaces))
etree.SubElement(rangeofvalues,
util.nspath_eval('csw:MinValue',
self.parent.context.namespaces)).text = results[0][0]
etree.SubElement(rangeofvalues,
util.nspath_eval('csw:MaxValue',
self.parent.context.namespaces)).text = results[0][1]
else:
listofvalues = etree.SubElement(domainvalue,
util.nspath_eval('csw:ListOfValues',
self.parent.context.namespaces))
for result in results:
LOGGER.debug(str(result))
if (result is not None and
result[0] is not None): # drop null values
if count: # show counts
val = '%s (%s)' % (result[0], result[1])
else:
val = result[0]
etree.SubElement(listofvalues,
util.nspath_eval('csw:Value',
self.parent.context.namespaces)).text = val
except Exception as err:
LOGGER.debug('No results for propertyname %s: %s.' %
(pname2, str(err)))
return node
def getrecords(self):
''' Handle GetRecords request '''
timestamp = util.get_today_and_now()
if ('elementsetname' not in self.parent.kvp and
'elementname' not in self.parent.kvp):
# mutually exclusive required
return self.exceptionreport('MissingParameterValue',
'elementsetname',
'Missing one of ElementSetName or ElementName parameter(s)')
if 'outputschema' not in self.parent.kvp:
self.parent.kvp['outputschema'] = self.parent.context.namespaces['csw']
if (self.parent.kvp['outputschema'] not in self.parent.context.model['operations']
['GetRecords']['parameters']['outputSchema']['values']):
return self.exceptionreport('InvalidParameterValue',
'outputschema', 'Invalid outputSchema parameter value: %s' %
self.parent.kvp['outputschema'])
if 'outputformat' not in self.parent.kvp:
self.parent.kvp['outputformat'] = 'application/xml'
if (self.parent.kvp['outputformat'] not in self.parent.context.model['operations']
['GetRecords']['parameters']['outputFormat']['values']):
return self.exceptionreport('InvalidParameterValue',
'outputformat', 'Invalid outputFormat parameter value: %s' %
self.parent.kvp['outputformat'])
if 'resulttype' not in self.parent.kvp:
self.parent.kvp['resulttype'] = 'hits'
if self.parent.kvp['resulttype'] is not None:
if (self.parent.kvp['resulttype'] not in self.parent.context.model['operations']
['GetRecords']['parameters']['resultType']['values']):
return self.exceptionreport('InvalidParameterValue',
'resulttype', 'Invalid resultType parameter value: %s' %
self.parent.kvp['resulttype'])
if (('elementname' not in self.parent.kvp or
len(self.parent.kvp['elementname']) == 0) and
self.parent.kvp['elementsetname'] not in
self.parent.context.model['operations']['GetRecords']['parameters']
['ElementSetName']['values']):
return self.exceptionreport('InvalidParameterValue',
'elementsetname', 'Invalid ElementSetName parameter value: %s' %
self.parent.kvp['elementsetname'])
if ('elementname' in self.parent.kvp and
self.parent.requesttype == 'GET'): # passed via GET
self.parent.kvp['elementname'] = self.parent.kvp['elementname'].split(',')
self.parent.kvp['elementsetname'] = 'summary'
if 'typenames' not in self.parent.kvp:
return self.exceptionreport('MissingParameterValue',
'typenames', 'Missing typenames parameter')
if ('typenames' in self.parent.kvp and
self.parent.requesttype == 'GET'): # passed via GET
self.parent.kvp['typenames'] = self.parent.kvp['typenames'].split(',')
if 'typenames' in self.parent.kvp:
for tname in self.parent.kvp['typenames']:
if (tname not in self.parent.context.model['operations']['GetRecords']
['parameters']['typeNames']['values']):
return self.exceptionreport('InvalidParameterValue',
'typenames', 'Invalid typeNames parameter value: %s' %
tname)
# check elementname's
if 'elementname' in self.parent.kvp:
for ename in self.parent.kvp['elementname']:
enamelist = self.parent.repository.queryables['_all'].keys()
if ename not in enamelist:
return self.exceptionreport('InvalidParameterValue',
'elementname', 'Invalid ElementName parameter value: %s' %
ename)
if self.parent.kvp['resulttype'] == 'validate':
return self._write_acknowledgement()
maxrecords_cfg = -1 # not set in config server.maxrecords
if self.parent.config.has_option('server', 'maxrecords'):
maxrecords_cfg = int(self.parent.config.get('server', 'maxrecords'))
if 'maxrecords' not in self.parent.kvp: # not specified by client
if maxrecords_cfg > -1: # specified in config
self.parent.kvp['maxrecords'] = maxrecords_cfg
else: # spec default
self.parent.kvp['maxrecords'] = 10
else: # specified by client
if self.parent.kvp['maxrecords'] == '':
self.parent.kvp['maxrecords'] = 10
if maxrecords_cfg > -1: # set in config
if int(self.parent.kvp['maxrecords']) > maxrecords_cfg:
self.parent.kvp['maxrecords'] = maxrecords_cfg
if any(x in ['bbox', 'q', 'time'] for x in self.parent.kvp):
LOGGER.debug('OpenSearch Geo/Time parameters detected.')
self.parent.kvp['constraintlanguage'] = 'FILTER'
tmp_filter = opensearch.kvp2filterxml(self.parent.kvp, self.parent.context)
if tmp_filter is not "":
self.parent.kvp['constraint'] = tmp_filter
LOGGER.debug('OpenSearch Geo/Time parameters to Filter: %s.' % self.parent.kvp['constraint'])
if self.parent.requesttype == 'GET':
if 'constraint' in self.parent.kvp:
# GET request
LOGGER.debug('csw:Constraint passed over HTTP GET.')
if 'constraintlanguage' not in self.parent.kvp:
return self.exceptionreport('MissingParameterValue',
'constraintlanguage',
'constraintlanguage required when constraint specified')
if (self.parent.kvp['constraintlanguage'] not in
self.parent.context.model['operations']['GetRecords']['parameters']
['CONSTRAINTLANGUAGE']['values']):
return self.exceptionreport('InvalidParameterValue',
'constraintlanguage', 'Invalid constraintlanguage: %s'
% self.parent.kvp['constraintlanguage'])
if self.parent.kvp['constraintlanguage'] == 'CQL_TEXT':
tmp = self.parent.kvp['constraint']
self.parent.kvp['constraint'] = {}
self.parent.kvp['constraint']['type'] = 'cql'
self.parent.kvp['constraint']['where'] = \
self.parent._cql_update_queryables_mappings(tmp,
self.parent.repository.queryables['_all'])
self.parent.kvp['constraint']['values'] = {}
elif self.parent.kvp['constraintlanguage'] == 'FILTER':
# validate filter XML
try:
schema = os.path.join(self.parent.config.get('server', 'home'),
'core', 'schemas', 'ogc', 'filter', '1.1.0', 'filter.xsd')
LOGGER.debug('Validating Filter %s.' %
self.parent.kvp['constraint'])
schema = etree.XMLSchema(file=schema)
parser = etree.XMLParser(schema=schema, resolve_entities=False)
doc = etree.fromstring(self.parent.kvp['constraint'], parser)
LOGGER.debug('Filter is valid XML.')
self.parent.kvp['constraint'] = {}
self.parent.kvp['constraint']['type'] = 'filter'
self.parent.kvp['constraint']['where'], self.parent.kvp['constraint']['values'] = \
fes1.parse(doc,
self.parent.repository.queryables['_all'],
self.parent.repository.dbtype,
self.parent.context.namespaces, self.parent.orm, self.parent.language['text'], self.parent.repository.fts)
except Exception as err:
errortext = \
'Exception: document not valid.\nError: %s.' % str(err)
LOGGER.debug(errortext)
return self.exceptionreport('InvalidParameterValue',
'constraint', 'Invalid Filter query: %s' % errortext)
else:
self.parent.kvp['constraint'] = {}
if 'sortby' not in self.parent.kvp:
self.parent.kvp['sortby'] = None
elif 'sortby' in self.parent.kvp and self.parent.requesttype == 'GET':
LOGGER.debug('Sorted query specified.')
tmp = self.parent.kvp['sortby']
self.parent.kvp['sortby'] = {}
try:
name, order = tmp.rsplit(':', 1)
except:
return self.exceptionreport('InvalidParameterValue',
'sortby', 'Invalid SortBy value: must be in the format\
propertyname:A or propertyname:D')
try:
self.parent.kvp['sortby']['propertyname'] = \
self.parent.repository.queryables['_all'][name]['dbcol']
if name.find('BoundingBox') != -1 or name.find('Envelope') != -1:
# it's a spatial sort
self.parent.kvp['sortby']['spatial'] = True
except Exception as err:
return self.exceptionreport('InvalidParameterValue',
'sortby', 'Invalid SortBy propertyname: %s' % name)
if order not in ['A', 'D']:
return self.exceptionreport('InvalidParameterValue',
'sortby', 'Invalid SortBy value: sort order must be "A" or "D"')
if order == 'D':
self.parent.kvp['sortby']['order'] = 'DESC'
else:
self.parent.kvp['sortby']['order'] = 'ASC'
if 'startposition' not in self.parent.kvp:
self.parent.kvp['startposition'] = 1
# query repository
LOGGER.debug('Querying repository with constraint: %s,\
sortby: %s, typenames: %s, maxrecords: %s, startposition: %s.' %
(self.parent.kvp['constraint'], self.parent.kvp['sortby'], self.parent.kvp['typenames'],
self.parent.kvp['maxrecords'], self.parent.kvp['startposition']))
try:
matched, results = self.parent.repository.query(
constraint=self.parent.kvp['constraint'],
sortby=self.parent.kvp['sortby'], typenames=self.parent.kvp['typenames'],
maxrecords=self.parent.kvp['maxrecords'],
startposition=int(self.parent.kvp['startposition'])-1)
except Exception as err:
return self.exceptionreport('InvalidParameterValue', 'constraint',
'Invalid query: %s' % err)
dsresults = []
if (self.parent.config.has_option('server', 'federatedcatalogues') and
'distributedsearch' in self.parent.kvp and
self.parent.kvp['distributedsearch'] and self.parent.kvp['hopcount'] > 0):
# do distributed search
LOGGER.debug('DistributedSearch specified (hopCount: %s).' %
self.parent.kvp['hopcount'])
from owslib.csw import CatalogueServiceWeb
from owslib.ows import ExceptionReport
for fedcat in \
self.parent.config.get('server', 'federatedcatalogues').split(','):
LOGGER.debug('Performing distributed search on federated \
catalogue: %s.' % fedcat)
remotecsw = CatalogueServiceWeb(fedcat, skip_caps=True)
try:
remotecsw.getrecords2(xml=self.parent.request)
if hasattr(remotecsw, 'results'):
LOGGER.debug(
'Distributed search results from catalogue \
%s: %s.' % (fedcat, remotecsw.results))
remotecsw_matches = int(remotecsw.results['matches'])
plural = 's' if remotecsw_matches != 1 else ''
if remotecsw_matches > 0:
matched = str(int(matched) + remotecsw_matches)
dsresults.append(etree.Comment(
' %d result%s from %s ' %
(remotecsw_matches, plural, fedcat)))
dsresults.append(remotecsw.records)
except ExceptionReport as err:
error_string = 'remote CSW %s returned exception: ' % fedcat
dsresults.append(etree.Comment(
' %s\n\n%s ' % (error_string, err)))
LOGGER.debug(str(err))
except Exception as err:
error_string = 'remote CSW %s returned error: ' % fedcat
dsresults.append(etree.Comment(
' %s\n\n%s ' % (error_string, err)))
LOGGER.debug(str(err))
if int(matched) == 0:
returned = nextrecord = '0'
else:
if int(matched) < int(self.parent.kvp['maxrecords']):
returned = matched
nextrecord = '0'
else:
returned = str(self.parent.kvp['maxrecords'])
if int(self.parent.kvp['startposition']) + int(self.parent.kvp['maxrecords']) >= int(matched):
nextrecord = '0'
else:
nextrecord = str(int(self.parent.kvp['startposition']) + \
int(self.parent.kvp['maxrecords']))
LOGGER.debug('Results: matched: %s, returned: %s, next: %s.' % \
(matched, returned, nextrecord))
node = etree.Element(util.nspath_eval('csw:GetRecordsResponse',
self.parent.context.namespaces),
nsmap=self.parent.context.namespaces, version='2.0.2')
node.attrib[util.nspath_eval('xsi:schemaLocation',
self.parent.context.namespaces)] = \
'%s %s/csw/2.0.2/CSW-discovery.xsd' % \
(self.parent.context.namespaces['csw'], self.parent.config.get('server', 'ogc_schemas_base'))
if 'requestid' in self.parent.kvp and self.parent.kvp['requestid'] is not None:
etree.SubElement(node, util.nspath_eval('csw:RequestId',
self.parent.context.namespaces)).text = self.parent.kvp['requestid']
etree.SubElement(node, util.nspath_eval('csw:SearchStatus',
self.parent.context.namespaces), timestamp=timestamp)
if 'where' not in self.parent.kvp['constraint'] and \
self.parent.kvp['resulttype'] is None:
returned = '0'
searchresults = etree.SubElement(node,
util.nspath_eval('csw:SearchResults', self.parent.context.namespaces),
numberOfRecordsMatched=matched, numberOfRecordsReturned=returned,
nextRecord=nextrecord, recordSchema=self.parent.kvp['outputschema'])
if self.parent.kvp['elementsetname'] is not None:
searchresults.attrib['elementSet'] = self.parent.kvp['elementsetname']
if 'where' not in self.parent.kvp['constraint'] \
and self.parent.kvp['resulttype'] is None:
LOGGER.debug('Empty result set returned.')
return node
if self.parent.kvp['resulttype'] == 'hits':
return node
if results is not None:
if len(results) < self.parent.kvp['maxrecords']:
max1 = len(results)
else:
max1 = int(self.parent.kvp['startposition']) + (int(self.parent.kvp['maxrecords'])-1)
LOGGER.debug('Presenting records %s - %s.' %
(self.parent.kvp['startposition'], max1))
for res in results:
try:
if (self.parent.kvp['outputschema'] ==
'http://www.opengis.net/cat/csw/2.0.2' and
'csw:Record' in self.parent.kvp['typenames']):
# serialize csw:Record inline
searchresults.append(self._write_record(
res, self.parent.repository.queryables['_all']))
elif (self.parent.kvp['outputschema'] ==
'http://www.opengis.net/cat/csw/2.0.2' and
'csw:Record' not in self.parent.kvp['typenames']):
# serialize into csw:Record model
for prof in self.parent.profiles['loaded']:
# find source typename
if self.parent.profiles['loaded'][prof].typename in \
self.parent.kvp['typenames']:
typename = self.parent.profiles['loaded'][prof].typename
break
util.transform_mappings(self.parent.repository.queryables['_all'],
self.parent.context.model['typenames'][typename]\
['mappings']['csw:Record'], reverse=True)
searchresults.append(self._write_record(
res, self.parent.repository.queryables['_all']))
elif self.parent.kvp['outputschema'] in self.parent.outputschemas.keys(): # use outputschema serializer
searchresults.append(self.parent.outputschemas[self.parent.kvp['outputschema']].write_record(res, self.parent.kvp['elementsetname'], self.parent.context, self.parent.config.get('server', 'url')))
else: # use profile serializer
searchresults.append(
self.parent.profiles['loaded'][self.parent.kvp['outputschema']].\
write_record(res, self.parent.kvp['elementsetname'],
self.parent.kvp['outputschema'],
self.parent.repository.queryables['_all']))
except Exception as err:
self.parent.response = self.exceptionreport(
'NoApplicableCode', 'service',
'Record serialization failed: %s' % str(err))
return self.parent.response
if len(dsresults) > 0: # return DistributedSearch results
for resultset in dsresults:
if isinstance(resultset, etree._Comment):
searchresults.append(resultset)
for rec in resultset:
searchresults.append(etree.fromstring(resultset[rec].xml))
if 'responsehandler' in self.parent.kvp: # process the handler
self.parent._process_responsehandler(etree.tostring(node,
pretty_print=self.parent.pretty_print))
else:
return node
def getrecordbyid(self, raw=False):
''' Handle GetRecordById request '''
if 'id' not in self.parent.kvp:
return self.exceptionreport('MissingParameterValue', 'id',
'Missing id parameter')
if len(self.parent.kvp['id']) < 1:
return self.exceptionreport('InvalidParameterValue', 'id',
'Invalid id parameter')
if 'outputschema' not in self.parent.kvp:
self.parent.kvp['outputschema'] = self.parent.context.namespaces['csw']
if self.parent.requesttype == 'GET':
self.parent.kvp['id'] = self.parent.kvp['id'].split(',')
if ('outputformat' in self.parent.kvp and
self.parent.kvp['outputformat'] not in
self.parent.context.model['operations']['GetRecordById']['parameters']
['outputFormat']['values']):
return self.exceptionreport('InvalidParameterValue',
'outputformat', 'Invalid outputformat parameter %s' %
self.parent.kvp['outputformat'])
if ('outputschema' in self.parent.kvp and self.parent.kvp['outputschema'] not in
self.parent.context.model['operations']['GetRecordById']['parameters']
['outputSchema']['values']):
return self.exceptionreport('InvalidParameterValue',
'outputschema', 'Invalid outputschema parameter %s' %
self.parent.kvp['outputschema'])
if 'elementsetname' not in self.parent.kvp:
self.parent.kvp['elementsetname'] = 'summary'
else:
if (self.parent.kvp['elementsetname'] not in
self.parent.context.model['operations']['GetRecordById']['parameters']
['ElementSetName']['values']):
return self.exceptionreport('InvalidParameterValue',
'elementsetname', 'Invalid elementsetname parameter %s' %
self.parent.kvp['elementsetname'])
node = etree.Element(util.nspath_eval('csw:GetRecordByIdResponse',
self.parent.context.namespaces), nsmap=self.parent.context.namespaces)
node.attrib[util.nspath_eval('xsi:schemaLocation',
self.parent.context.namespaces)] = '%s %s/csw/2.0.2/CSW-discovery.xsd' % \
(self.parent.context.namespaces['csw'], self.parent.config.get('server', 'ogc_schemas_base'))
# query repository
LOGGER.debug('Querying repository with ids: %s.' % self.parent.kvp['id'][0])
results = self.parent.repository.query_ids(self.parent.kvp['id'])
if raw: # GetRepositoryItem request
LOGGER.debug('GetRepositoryItem request.')
if len(results) > 0:
return etree.fromstring(util.getqattr(results[0],
self.parent.context.md_core_model['mappings']['pycsw:XML']))
for result in results:
if (util.getqattr(result,
self.parent.context.md_core_model['mappings']['pycsw:Typename']) == 'csw:Record'
and self.parent.kvp['outputschema'] ==
'http://www.opengis.net/cat/csw/2.0.2'):
# serialize record inline
node.append(self._write_record(
result, self.parent.repository.queryables['_all']))
elif (self.parent.kvp['outputschema'] ==
'http://www.opengis.net/cat/csw/2.0.2'):
# serialize into csw:Record model
typename = None
for prof in self.parent.profiles['loaded']: # find source typename
if self.parent.profiles['loaded'][prof].typename in \
[util.getqattr(result, self.parent.context.md_core_model['mappings']['pycsw:Typename'])]:
typename = self.parent.profiles['loaded'][prof].typename
break
if typename is not None:
util.transform_mappings(self.parent.repository.queryables['_all'],
self.parent.context.model['typenames'][typename]\
['mappings']['csw:Record'], reverse=True)
node.append(self._write_record(
result, self.parent.repository.queryables['_all']))
elif self.parent.kvp['outputschema'] in self.parent.outputschemas.keys(): # use outputschema serializer
node.append(self.parent.outputschemas[self.parent.kvp['outputschema']].write_record(result, self.parent.kvp['elementsetname'], self.parent.context, self.parent.config.get('server', 'url')))
else: # it's a profile output
node.append(
self.parent.profiles['loaded'][self.parent.kvp['outputschema']].write_record(
result, self.parent.kvp['elementsetname'],
self.parent.kvp['outputschema'], self.parent.repository.queryables['_all']))
if raw and len(results) == 0:
return None
return node
def getrepositoryitem(self):
''' Handle GetRepositoryItem request '''
# similar to GetRecordById without csw:* wrapping
node = self.parent.getrecordbyid(raw=True)
if node is None:
return self.exceptionreport('NotFound', 'id',
'No repository item found for \'%s\'' % self.parent.kvp['id'])
else:
return node
def transaction(self):
''' Handle Transaction request '''
try:
self.parent._test_manager()
except Exception as err:
return self.exceptionreport('NoApplicableCode', 'transaction',
str(err))
inserted = 0
updated = 0
deleted = 0
insertresults = []
LOGGER.debug('Transaction list: %s' % self.parent.kvp['transactions'])
for ttype in self.parent.kvp['transactions']:
if ttype['type'] == 'insert':
try:
record = metadata.parse_record(self.parent.context,
ttype['xml'], self.parent.repository)[0]
except Exception as err:
return self.exceptionreport('NoApplicableCode', 'insert',
'Transaction (insert) failed: record parsing failed: %s' \
% str(err))
LOGGER.debug('Transaction operation: %s' % record)
if not hasattr(record,
self.parent.context.md_core_model['mappings']['pycsw:Identifier']):
return self.exceptionreport('NoApplicableCode',
'insert', 'Record requires an identifier')
# insert new record
try:
self.parent.repository.insert(record, 'local',
util.get_today_and_now())
inserted += 1
insertresults.append(
{'identifier': getattr(record,
self.parent.context.md_core_model['mappings']['pycsw:Identifier']),
'title': getattr(record,
self.parent.context.md_core_model['mappings']['pycsw:Title'])})
except Exception as err:
return self.exceptionreport('NoApplicableCode',
'insert', 'Transaction (insert) failed: %s.' % str(err))
elif ttype['type'] == 'update':
if 'constraint' not in ttype:
# update full existing resource in repository
try:
record = metadata.parse_record(self.parent.context,
ttype['xml'], self.parent.repository)[0]
identifier = getattr(record,
self.parent.context.md_core_model['mappings']['pycsw:Identifier'])
except Exception as err:
return self.exceptionreport('NoApplicableCode', 'insert',
'Transaction (update) failed: record parsing failed: %s' \
% str(err))
# query repository to see if record already exists
LOGGER.debug('checking if record exists (%s)' % \
identifier)
results = self.parent.repository.query_ids(ids=[identifier])
if len(results) == 0:
LOGGER.debug('id %s does not exist in repository' % \
identifier)
else: # existing record, it's an update
try:
self.parent.repository.update(record)
updated += 1
except Exception as err:
return self.exceptionreport('NoApplicableCode',
'update',
'Transaction (update) failed: %s.' % str(err))
else: # update by record property and constraint
# get / set XPath for property names
for rp in ttype['recordproperty']:
if rp['name'] not in self.parent.repository.queryables['_all']:
# is it an XPath?
if rp['name'].find('/') != -1:
# scan outputschemas; if match, bind
for osch in self.parent.outputschemas.values():
for key, value in osch.XPATH_MAPPINGS.iteritems():
if value == rp['name']: # match
rp['rp'] = {'xpath': value, 'name': key}
rp['rp']['dbcol'] = self.parent.repository.queryables['_all'][key]
break
else:
return self.exceptionreport('NoApplicableCode',
'update', 'Transaction (update) failed: invalid property2: %s.' % str(rp['name']))
else:
rp['rp']= \
self.parent.repository.queryables['_all'][rp['name']]
LOGGER.debug('Record Properties: %s.' %
ttype['recordproperty'])
try:
updated += self.parent.repository.update(record=None,
recprops=ttype['recordproperty'],
constraint=ttype['constraint'])
except Exception as err:
return self.exceptionreport('NoApplicableCode',
'update',
'Transaction (update) failed: %s.' % str(err))
elif ttype['type'] == 'delete':
deleted += self.parent.repository.delete(ttype['constraint'])
node = etree.Element(util.nspath_eval('csw:TransactionResponse',
self.parent.context.namespaces), nsmap=self.parent.context.namespaces, version='2.0.2')
node.attrib[util.nspath_eval('xsi:schemaLocation',
self.parent.context.namespaces)] = '%s %s/csw/2.0.2/CSW-publication.xsd' % \
(self.parent.context.namespaces['csw'], self.parent.config.get('server', 'ogc_schemas_base'))
node.append(
self._write_transactionsummary(
inserted=inserted, updated=updated, deleted=deleted))
if (len(insertresults) > 0 and self.parent.kvp['verboseresponse']):
# show insert result identifiers
node.append(self._write_verboseresponse(insertresults))
return node
def harvest(self):
''' Handle Harvest request '''
service_identifier = None
old_identifier = None
deleted = []
try:
self.parent._test_manager()
except Exception as err:
return self.exceptionreport('NoApplicableCode', 'harvest', str(err))
if self.parent.requesttype == 'GET':
if 'resourcetype' not in self.parent.kvp:
return self.exceptionreport('MissingParameterValue',
'resourcetype', 'Missing resourcetype parameter')
if 'source' not in self.parent.kvp:
return self.exceptionreport('MissingParameterValue',
'source', 'Missing source parameter')
# validate resourcetype
if (self.parent.kvp['resourcetype'] not in
self.parent.context.model['operations']['Harvest']['parameters']['ResourceType']
['values']):
return self.exceptionreport('InvalidParameterValue',
'resourcetype', 'Invalid resource type parameter: %s.\
Allowable resourcetype values: %s' % (self.parent.kvp['resourcetype'],
','.join(self.parent.context.model['operations']['Harvest']['parameters']
['ResourceType']['values'])))
if (self.parent.kvp['resourcetype'].find('opengis.net') == -1 and
self.parent.kvp['resourcetype'].find('urn:geoss:waf') == -1):
# fetch content-based resource
LOGGER.debug('Fetching resource %s' % self.parent.kvp['source'])
try:
content = util.http_request('GET', self.parent.kvp['source'])
except Exception as err:
errortext = 'Error fetching resource %s.\nError: %s.' % \
(self.parent.kvp['source'], str(err))
LOGGER.debug(errortext)
return self.exceptionreport('InvalidParameterValue', 'source',
errortext)
else: # it's a service URL
content = self.parent.kvp['source']
# query repository to see if service already exists
LOGGER.debug('checking if service exists (%s)' % content)
results = self.parent.repository.query_source(content)
if len(results) > 0: # exists, keep identifier for update
LOGGER.debug('Service already exists, keeping identifier and results')
service_identifier = results[0].identifier
service_results = results
LOGGER.debug('Identifier is %s' % service_identifier)
# return self.exceptionreport('NoApplicableCode', 'source',
# 'Insert failed: service %s already in repository' % content)
# parse resource into record
try:
records_parsed = metadata.parse_record(self.parent.context,
content, self.parent.repository, self.parent.kvp['resourcetype'],
pagesize=self.parent.csw_harvest_pagesize)
except Exception as err:
LOGGER.exception(err)
return self.exceptionreport('NoApplicableCode', 'source',
'Harvest failed: record parsing failed: %s' % str(err))
inserted = 0
updated = 0
ir = []
LOGGER.debug('Total Records parsed: %d' % len(records_parsed))
for record in records_parsed:
if self.parent.kvp['resourcetype'] == 'urn:geoss:waf':
src = record.source
else:
src = self.parent.kvp['source']
setattr(record, self.parent.context.md_core_model['mappings']['pycsw:Source'],
src)
setattr(record, self.parent.context.md_core_model['mappings']['pycsw:InsertDate'],
util.get_today_and_now())
identifier = getattr(record,
self.parent.context.md_core_model['mappings']['pycsw:Identifier'])
source = getattr(record,
self.parent.context.md_core_model['mappings']['pycsw:Source'])
insert_date = getattr(record,
self.parent.context.md_core_model['mappings']['pycsw:InsertDate'])
title = getattr(record,
self.parent.context.md_core_model['mappings']['pycsw:Title'])
if record.type == 'service' and service_identifier is not None: # service endpoint
LOGGER.debug('Replacing service identifier from %s to %s' % (record.identifier, service_identifier))
old_identifier = record.identifier
identifier = record.identifier = service_identifier
if (record.type != 'service' and service_identifier is not None
and old_identifier is not None): # service resource
if record.identifier.find(old_identifier) != -1:
new_identifier = record.identifier.replace(old_identifier, service_identifier)
LOGGER.debug('Replacing service resource identifier from %s to %s' % (record.identifier, new_identifier))
identifier = record.identifier = new_identifier
ir.append({'identifier': identifier, 'title': title})
# query repository to see if record already exists
LOGGER.debug('checking if record exists (%s)' % identifier)
results = self.parent.repository.query_ids(ids=[identifier])
if len(results) == 0: # check for service identifier
LOGGER.debug('checking if service id exists (%s)' % service_identifier)
results = self.parent.repository.query_ids(ids=[service_identifier])
LOGGER.debug(str(results))
if len(results) == 0: # new record, it's a new insert
inserted += 1
try:
self.parent.repository.insert(record, source, insert_date)
except Exception as err:
return self.exceptionreport('NoApplicableCode',
'source', 'Harvest (insert) failed: %s.' % str(err))
else: # existing record, it's an update
if source != results[0].source:
# same identifier, but different source
return self.exceptionreport('NoApplicableCode',
'source', 'Insert failed: identifier %s in repository\
has source %s.' % (identifier, source))
try:
self.parent.repository.update(record)
except Exception as err:
return self.exceptionreport('NoApplicableCode',
'source', 'Harvest (update) failed: %s.' % str(err))
updated += 1
node = etree.Element(util.nspath_eval('csw:HarvestResponse',
self.parent.context.namespaces), nsmap=self.parent.context.namespaces)
node.attrib[util.nspath_eval('xsi:schemaLocation',
self.parent.context.namespaces)] = \
'%s %s/csw/2.0.2/CSW-publication.xsd' % (self.parent.context.namespaces['csw'],
self.parent.config.get('server', 'ogc_schemas_base'))
node2 = etree.SubElement(node,
util.nspath_eval('csw:TransactionResponse',
self.parent.context.namespaces), version='2.0.2')
if service_identifier is not None:
fresh_records = [str(i['identifier']) for i in ir]
existing_records = [str(i.identifier) for i in service_results]
deleted = set(existing_records) - set(fresh_records)
LOGGER.debug('Records to delete: %s' % str(deleted))
for to_delete in deleted:
delete_constraint = {
'type': 'filter',
'values': [to_delete],
'where': 'identifier = :pvalue0'
}
self.parent.repository.delete(delete_constraint)
node2.append(
self._write_transactionsummary(inserted=inserted, updated=updated,
deleted=len(deleted)))
if inserted > 0:
# show insert result identifiers
node2.append(self._write_verboseresponse(ir))
if 'responsehandler' in self.parent.kvp: # process the handler
self.parent._process_responsehandler(etree.tostring(node,
pretty_print=self.parent.pretty_print))
else:
return node
def _write_record(self, recobj, queryables):
''' Generate csw:Record '''
if self.parent.kvp['elementsetname'] == 'brief':
elname = 'BriefRecord'
elif self.parent.kvp['elementsetname'] == 'summary':
elname = 'SummaryRecord'
else:
elname = 'Record'
record = etree.Element(util.nspath_eval('csw:%s' % elname,
self.parent.context.namespaces))
if ('elementname' in self.parent.kvp and
len(self.parent.kvp['elementname']) > 0):
for elemname in self.parent.kvp['elementname']:
if (elemname.find('BoundingBox') != -1 or
elemname.find('Envelope') != -1):
bboxel = write_boundingbox(util.getqattr(recobj,
self.parent.context.md_core_model['mappings']['pycsw:BoundingBox']),
self.parent.context.namespaces)
if bboxel is not None:
record.append(bboxel)
else:
value = util.getqattr(recobj, queryables[elemname]['dbcol'])
if value:
etree.SubElement(record,
util.nspath_eval(elemname,
self.parent.context.namespaces)).text = value
elif 'elementsetname' in self.parent.kvp:
if (self.parent.kvp['elementsetname'] == 'full' and
util.getqattr(recobj, self.parent.context.md_core_model['mappings']\
['pycsw:Typename']) == 'csw:Record' and
util.getqattr(recobj, self.parent.context.md_core_model['mappings']\
['pycsw:Schema']) == 'http://www.opengis.net/cat/csw/2.0.2' and
util.getqattr(recobj, self.parent.context.md_core_model['mappings']\
['pycsw:Type']) != 'service'):
# dump record as is and exit
return etree.fromstring(util.getqattr(recobj,
self.parent.context.md_core_model['mappings']['pycsw:XML']))
etree.SubElement(record,
util.nspath_eval('dc:identifier', self.parent.context.namespaces)).text = \
util.getqattr(recobj,
self.parent.context.md_core_model['mappings']['pycsw:Identifier'])
for i in ['dc:title', 'dc:type']:
val = util.getqattr(recobj, queryables[i]['dbcol'])
if not val:
val = ''
etree.SubElement(record, util.nspath_eval(i,
self.parent.context.namespaces)).text = val
if self.parent.kvp['elementsetname'] in ['summary', 'full']:
# add summary elements
keywords = util.getqattr(recobj, queryables['dc:subject']['dbcol'])
if keywords is not None:
for keyword in keywords.split(','):
etree.SubElement(record,
util.nspath_eval('dc:subject',
self.parent.context.namespaces)).text = keyword
val = util.getqattr(recobj, queryables['dc:format']['dbcol'])
if val:
etree.SubElement(record,
util.nspath_eval('dc:format',
self.parent.context.namespaces)).text = val
# links
rlinks = util.getqattr(recobj,
self.parent.context.md_core_model['mappings']['pycsw:Links'])
if rlinks:
links = rlinks.split('^')
for link in links:
linkset = link.split(',')
etree.SubElement(record,
util.nspath_eval('dct:references',
self.parent.context.namespaces),
scheme=linkset[2]).text = linkset[-1]
for i in ['dc:relation', 'dct:modified', 'dct:abstract']:
val = util.getqattr(recobj, queryables[i]['dbcol'])
if val is not None:
etree.SubElement(record,
util.nspath_eval(i, self.parent.context.namespaces)).text = val
if self.parent.kvp['elementsetname'] == 'full': # add full elements
for i in ['dc:date', 'dc:creator', \
'dc:publisher', 'dc:contributor', 'dc:source', \
'dc:language', 'dc:rights']:
val = util.getqattr(recobj, queryables[i]['dbcol'])
if val:
etree.SubElement(record,
util.nspath_eval(i, self.parent.context.namespaces)).text = val
# always write out ows:BoundingBox
bboxel = write_boundingbox(getattr(recobj,
self.parent.context.md_core_model['mappings']['pycsw:BoundingBox']),
self.parent.context.namespaces)
if bboxel is not None:
record.append(bboxel)
return record
def _parse_constraint(self, element):
''' Parse csw:Constraint '''
query = {}
tmp = element.find(util.nspath_eval('ogc:Filter', self.parent.context.namespaces))
if tmp is not None:
LOGGER.debug('Filter constraint specified.')
try:
query['type'] = 'filter'
query['where'], query['values'] = fes1.parse(tmp,
self.parent.repository.queryables['_all'], self.parent.repository.dbtype,
self.parent.context.namespaces, self.parent.orm, self.parent.language['text'], self.parent.repository.fts)
except Exception as err:
return 'Invalid Filter request: %s' % err
tmp = element.find(util.nspath_eval('csw:CqlText', self.parent.context.namespaces))
if tmp is not None:
LOGGER.debug('CQL specified: %s.' % tmp.text)
query['type'] = 'cql'
query['where'] = self.parent._cql_update_queryables_mappings(tmp.text,
self.parent.repository.queryables['_all'])
query['values'] = {}
return query
def parse_postdata(self, postdata):
''' Parse POST XML '''
request = {}
try:
LOGGER.debug('Parsing %s.' % postdata)
doc = etree.fromstring(postdata)
except Exception as err:
errortext = \
'Exception: document not well-formed.\nError: %s.' % str(err)
LOGGER.debug(errortext)
return errortext
# if this is a SOAP request, get to SOAP-ENV:Body/csw:*
if (doc.tag == util.nspath_eval('soapenv:Envelope',
self.parent.context.namespaces)):
LOGGER.debug('SOAP request specified.')
self.parent.soap = True
doc = doc.find(
util.nspath_eval('soapenv:Body',
self.parent.context.namespaces)).xpath('child::*')[0]
if (doc.tag in [util.nspath_eval('csw:Transaction',
self.parent.context.namespaces), util.nspath_eval('csw:Harvest',
self.parent.context.namespaces)]):
schema = os.path.join(self.parent.config.get('server', 'home'),
'core', 'schemas', 'ogc', 'csw', '2.0.2', 'CSW-publication.xsd')
else:
schema = os.path.join(self.parent.config.get('server', 'home'),
'core', 'schemas', 'ogc', 'csw', '2.0.2', 'CSW-discovery.xsd')
try:
# it is virtually impossible to validate a csw:Transaction
# csw:Insert|csw:Update (with single child) XML document.
# Only validate non csw:Transaction XML
if doc.find('.//%s' % util.nspath_eval('csw:Insert',
self.parent.context.namespaces)) is None and \
len(doc.xpath('//csw:Update/child::*',
namespaces=self.parent.context.namespaces)) == 0:
LOGGER.debug('Validating %s.' % postdata)
schema = etree.XMLSchema(file=schema)
parser = etree.XMLParser(schema=schema, resolve_entities=False)
if hasattr(self.parent, 'soap') and self.parent.soap:
# validate the body of the SOAP request
doc = etree.fromstring(etree.tostring(doc), parser)
else: # validate the request normally
doc = etree.fromstring(postdata, parser)
LOGGER.debug('Request is valid XML.')
else: # parse Transaction without validation
doc = etree.fromstring(postdata)
except Exception as err:
errortext = \
'Exception: the document is not valid.\nError: %s' % str(err)
LOGGER.debug(errortext)
return errortext
request['request'] = util.xmltag_split(doc.tag)
LOGGER.debug('Request operation %s specified.' % request['request'])
tmp = doc.find('.').attrib.get('service')
if tmp is not None:
request['service'] = tmp
tmp = doc.find('.').attrib.get('version')
if tmp is not None:
request['version'] = tmp
tmp = doc.find('.//%s' % util.nspath_eval('ows:Version',
self.parent.context.namespaces))
if tmp is not None:
request['version'] = tmp.text
tmp = doc.find('.').attrib.get('updateSequence')
if tmp is not None:
request['updatesequence'] = tmp
# GetCapabilities
if request['request'] == 'GetCapabilities':
tmp = doc.find(util.nspath_eval('ows:Sections',
self.parent.context.namespaces))
if tmp is not None:
request['sections'] = ','.join([section.text for section in \
doc.findall(util.nspath_eval('ows:Sections/ows:Section',
self.parent.context.namespaces))])
# DescribeRecord
if request['request'] == 'DescribeRecord':
request['typename'] = [typename.text for typename in \
doc.findall(util.nspath_eval('csw:TypeName',
self.parent.context.namespaces))]
tmp = doc.find('.').attrib.get('schemaLanguage')
if tmp is not None:
request['schemalanguage'] = tmp
tmp = doc.find('.').attrib.get('outputFormat')
if tmp is not None:
request['outputformat'] = tmp
# GetDomain
if request['request'] == 'GetDomain':
tmp = doc.find(util.nspath_eval('csw:ParameterName',
self.parent.context.namespaces))
if tmp is not None:
request['parametername'] = tmp.text
tmp = doc.find(util.nspath_eval('csw:PropertyName',
self.parent.context.namespaces))
if tmp is not None:
request['propertyname'] = tmp.text
# GetRecords
if request['request'] == 'GetRecords':
tmp = doc.find('.').attrib.get('outputSchema')
request['outputschema'] = tmp if tmp is not None \
else self.parent.context.namespaces['csw']
tmp = doc.find('.').attrib.get('resultType')
request['resulttype'] = tmp if tmp is not None else None
tmp = doc.find('.').attrib.get('outputFormat')
request['outputformat'] = tmp if tmp is not None \
else 'application/xml'
tmp = doc.find('.').attrib.get('startPosition')
request['startposition'] = tmp if tmp is not None else 1
tmp = doc.find('.').attrib.get('requestId')
request['requestid'] = tmp if tmp is not None else None
tmp = doc.find('.').attrib.get('maxRecords')
if tmp is not None:
request['maxrecords'] = tmp
tmp = doc.find(util.nspath_eval('csw:DistributedSearch',
self.parent.context.namespaces))
if tmp is not None:
request['distributedsearch'] = True
hopcount = tmp.attrib.get('hopCount')
request['hopcount'] = int(hopcount)-1 if hopcount is not None \
else 1
else:
request['distributedsearch'] = False
tmp = doc.find(util.nspath_eval('csw:ResponseHandler',
self.parent.context.namespaces))
if tmp is not None:
request['responsehandler'] = tmp.text
tmp = doc.find(util.nspath_eval('csw:Query/csw:ElementSetName',
self.parent.context.namespaces))
request['elementsetname'] = tmp.text if tmp is not None else None
tmp = doc.find(util.nspath_eval(
'csw:Query', self.parent.context.namespaces)).attrib.get('typeNames')
request['typenames'] = tmp.split() if tmp is not None \
else 'csw:Record'
request['elementname'] = [elname.text for elname in \
doc.findall(util.nspath_eval('csw:Query/csw:ElementName',
self.parent.context.namespaces))]
request['constraint'] = {}
tmp = doc.find(util.nspath_eval('csw:Query/csw:Constraint',
self.parent.context.namespaces))
if tmp is not None:
request['constraint'] = self._parse_constraint(tmp)
if isinstance(request['constraint'], str): # parse error
return 'Invalid Constraint: %s' % request['constraint']
else:
LOGGER.debug('No csw:Constraint (ogc:Filter or csw:CqlText) \
specified.')
tmp = doc.find(util.nspath_eval('csw:Query/ogc:SortBy',
self.parent.context.namespaces))
if tmp is not None:
LOGGER.debug('Sorted query specified.')
request['sortby'] = {}
try:
elname = tmp.find(util.nspath_eval(
'ogc:SortProperty/ogc:PropertyName',
self.parent.context.namespaces)).text
request['sortby']['propertyname'] = \
self.parent.repository.queryables['_all'][elname]['dbcol']
if (elname.find('BoundingBox') != -1 or
elname.find('Envelope') != -1):
# it's a spatial sort
request['sortby']['spatial'] = True
except Exception as err:
errortext = \
'Invalid ogc:SortProperty/ogc:PropertyName: %s' % str(err)
LOGGER.debug(errortext)
return errortext
tmp2 = tmp.find(util.nspath_eval(
'ogc:SortProperty/ogc:SortOrder', self.parent.context.namespaces))
request['sortby']['order'] = tmp2.text if tmp2 is not None \
else 'ASC'
else:
request['sortby'] = None
# GetRecordById
if request['request'] == 'GetRecordById':
request['id'] = [id1.text for id1 in \
doc.findall(util.nspath_eval('csw:Id', self.parent.context.namespaces))]
tmp = doc.find(util.nspath_eval('csw:ElementSetName',
self.parent.context.namespaces))
request['elementsetname'] = tmp.text if tmp is not None \
else 'summary'
tmp = doc.find('.').attrib.get('outputSchema')
request['outputschema'] = tmp if tmp is not None \
else self.parent.context.namespaces['csw']
tmp = doc.find('.').attrib.get('outputFormat')
if tmp is not None:
request['outputformat'] = tmp
# Transaction
if request['request'] == 'Transaction':
request['verboseresponse'] = True
tmp = doc.find('.').attrib.get('verboseResponse')
if tmp is not None:
if tmp in ['false', '0']:
request['verboseresponse'] = False
tmp = doc.find('.').attrib.get('requestId')
request['requestid'] = tmp if tmp is not None else None
request['transactions'] = []
for ttype in \
doc.xpath('//csw:Insert', namespaces=self.parent.context.namespaces):
tname = ttype.attrib.get('typeName')
for mdrec in ttype.xpath('child::*'):
xml = mdrec
request['transactions'].append(
{'type': 'insert', 'typename': tname, 'xml': xml})
for ttype in \
doc.xpath('//csw:Update', namespaces=self.parent.context.namespaces):
child = ttype.xpath('child::*')
update = {'type': 'update'}
if len(child) == 1: # it's a wholesale update
update['xml'] = child[0]
else: # it's a RecordProperty with Constraint Update
update['recordproperty'] = []
for recprop in ttype.findall(
util.nspath_eval('csw:RecordProperty',
self.parent.context.namespaces)):
rpname = recprop.find(util.nspath_eval('csw:Name',
self.parent.context.namespaces)).text
rpvalue = recprop.find(
util.nspath_eval('csw:Value',
self.parent.context.namespaces)).text
update['recordproperty'].append(
{'name': rpname, 'value': rpvalue})
update['constraint'] = self._parse_constraint(
ttype.find(util.nspath_eval('csw:Constraint',
self.parent.context.namespaces)))
request['transactions'].append(update)
for ttype in \
doc.xpath('//csw:Delete', namespaces=self.parent.context.namespaces):
tname = ttype.attrib.get('typeName')
constraint = self._parse_constraint(
ttype.find(util.nspath_eval('csw:Constraint',
self.parent.context.namespaces)))
if isinstance(constraint, str): # parse error
return 'Invalid Constraint: %s' % constraint
request['transactions'].append(
{'type': 'delete', 'typename': tname, 'constraint': constraint})
# Harvest
if request['request'] == 'Harvest':
request['source'] = doc.find(util.nspath_eval('csw:Source',
self.parent.context.namespaces)).text
request['resourcetype'] = \
doc.find(util.nspath_eval('csw:ResourceType',
self.parent.context.namespaces)).text
tmp = doc.find(util.nspath_eval('csw:ResourceFormat',
self.parent.context.namespaces))
if tmp is not None:
request['resourceformat'] = tmp.text
else:
request['resourceformat'] = 'application/xml'
tmp = doc.find(util.nspath_eval('csw:HarvestInterval',
self.parent.context.namespaces))
if tmp is not None:
request['harvestinterval'] = tmp.text
tmp = doc.find(util.nspath_eval('csw:ResponseHandler',
self.parent.context.namespaces))
if tmp is not None:
request['responsehandler'] = tmp.text
return request
def _write_transactionsummary(self, inserted=0, updated=0, deleted=0):
''' Write csw:TransactionSummary construct '''
node = etree.Element(util.nspath_eval('csw:TransactionSummary',
self.parent.context.namespaces))
if 'requestid' in self.parent.kvp and self.parent.kvp['requestid'] is not None:
node.attrib['requestId'] = self.parent.kvp['requestid']
etree.SubElement(node, util.nspath_eval('csw:totalInserted',
self.parent.context.namespaces)).text = str(inserted)
etree.SubElement(node, util.nspath_eval('csw:totalUpdated',
self.parent.context.namespaces)).text = str(updated)
etree.SubElement(node, util.nspath_eval('csw:totalDeleted',
self.parent.context.namespaces)).text = str(deleted)
return node
def _write_acknowledgement(self, root=True):
''' Generate csw:Acknowledgement '''
node = etree.Element(util.nspath_eval('csw:Acknowledgement',
self.parent.context.namespaces),
nsmap = self.parent.context.namespaces, timeStamp=util.get_today_and_now())
if root:
node.attrib[util.nspath_eval('xsi:schemaLocation',
self.parent.context.namespaces)] = \
'%s %s/csw/2.0.2/CSW-discovery.xsd' % (self.parent.context.namespaces['csw'], \
self.parent.config.get('server', 'ogc_schemas_base'))
node1 = etree.SubElement(node, util.nspath_eval('csw:EchoedRequest',
self.parent.context.namespaces))
if self.parent.requesttype == 'POST':
node1.append(etree.fromstring(self.parent.request))
else: # GET
node2 = etree.SubElement(node1, util.nspath_eval('ows:Get',
self.parent.context.namespaces))
node2.text = self.parent.request
if self.parent.async:
etree.SubElement(node, util.nspath_eval('csw:RequestId',
self.parent.context.namespaces)).text = self.kvp['requestid']
return node
def _write_verboseresponse(self, insertresults):
''' show insert result identifiers '''
insertresult = etree.Element(util.nspath_eval('csw:InsertResult',
self.parent.context.namespaces))
for ir in insertresults:
briefrec = etree.SubElement(insertresult,
util.nspath_eval('csw:BriefRecord',
self.parent.context.namespaces))
etree.SubElement(briefrec,
util.nspath_eval('dc:identifier',
self.parent.context.namespaces)).text = ir['identifier']
etree.SubElement(briefrec,
util.nspath_eval('dc:title',
self.parent.context.namespaces)).text = ir['title']
return insertresult
def exceptionreport(self, code, locator, text):
''' Generate ExceptionReport '''
self.parent.exception = True
self.parent.status = 'OK'
try:
language = self.parent.config.get('server', 'language')
ogc_schemas_base = self.parent.config.get('server', 'ogc_schemas_base')
except:
language = 'en-US'
ogc_schemas_base = self.parent.context.ogc_schemas_base
node = etree.Element(util.nspath_eval('ows:ExceptionReport',
self.parent.context.namespaces), nsmap=self.parent.context.namespaces,
version='1.2.0', language=language)
node.attrib[util.nspath_eval('xsi:schemaLocation',
self.parent.context.namespaces)] = \
'%s %s/ows/1.0.0/owsExceptionReport.xsd' % \
(self.parent.context.namespaces['ows'], ogc_schemas_base)
exception = etree.SubElement(node, util.nspath_eval('ows:Exception',
self.parent.context.namespaces),
exceptionCode=code, locator=locator)
etree.SubElement(exception,
util.nspath_eval('ows:ExceptionText',
self.parent.context.namespaces)).text = text
return node
def write_boundingbox(bbox, nsmap):
''' Generate ows:BoundingBox '''
if bbox is not None:
try:
bbox2 = util.wkt2geom(bbox)
except:
return None
if len(bbox2) == 4:
boundingbox = etree.Element(util.nspath_eval('ows:BoundingBox',
nsmap), crs='urn:x-ogc:def:crs:EPSG:6.11:4326',
dimensions='2')
etree.SubElement(boundingbox, util.nspath_eval('ows:LowerCorner',
nsmap)).text = '%s %s' % (bbox2[1], bbox2[0])
etree.SubElement(boundingbox, util.nspath_eval('ows:UpperCorner',
nsmap)).text = '%s %s' % (bbox2[3], bbox2[2])
return boundingbox
else:
return None
else:
return None
|
{
"content_hash": "7144c6498d1c75ac52d0a428af05e6d5",
"timestamp": "",
"source": "github",
"line_count": 1930,
"max_line_length": 219,
"avg_line_length": 46.21709844559585,
"alnum_prop": 0.5569457056693461,
"repo_name": "benhowell/pycsw",
"id": "64e47f4525fbc646efbde6c261661a77af358920",
"size": "90601",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "pycsw/ogc/csw/csw2.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Makefile",
"bytes": "65"
},
{
"name": "Python",
"bytes": "625074"
},
{
"name": "Shell",
"bytes": "2584"
}
],
"symlink_target": ""
}
|
import pytest
from asphalt.core.context import Context
from asphalt.templating.api import TemplateRenderer
from asphalt.templating.component import TemplatingComponent
from asphalt.templating.renderers.jinja2 import Jinja2Renderer
@pytest.mark.asyncio
async def test_single_renderer():
async with Context() as ctx:
ctx.add_resource("åäö")
component = TemplatingComponent(
backend="jinja2", options={"package_name": "tests"}
)
await component.start(ctx)
for cls in (TemplateRenderer, Jinja2Renderer):
renderer = ctx.require_resource(cls)
assert isinstance(renderer, Jinja2Renderer)
assert type(renderer.environment).__name__ == "Environment"
|
{
"content_hash": "4e1845f5d81be4e37af8869d7fb4642b",
"timestamp": "",
"source": "github",
"line_count": 22,
"max_line_length": 67,
"avg_line_length": 33.36363636363637,
"alnum_prop": 0.7084468664850136,
"repo_name": "asphalt-framework/asphalt-templating",
"id": "62f3bd0414dcdd2b87bdf0062f2bbee5d069631a",
"size": "737",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/test_component.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "HTML",
"bytes": "413"
},
{
"name": "Mako",
"bytes": "146"
},
{
"name": "Python",
"bytes": "15504"
}
],
"symlink_target": ""
}
|
from .utils import str_list
class SignalFunctionType(object):
def __init__(self, type_name, signal_inputs, signal_outputs):
self.type_name = type_name
self.signal_inputs = set(signal_inputs)
self.signal_inputs_multiple = set(s for s in signal_inputs if SignalFunctionType.is_signal_list(s))
self.signal_outputs = set(signal_outputs)
self.signal_outputs_mutiple = set(s for s in signal_outputs if SignalFunctionType.is_signal_list(s))
@staticmethod
def is_signal_list(signal_name):
return signal_name[0] == "*"
@staticmethod
def get_multi_signal_name(signal_name):
return signal_name[1:]
@staticmethod
def make_multi_signal_name(signal_name):
return "*" + signal_name
def __repr__(self):
return "%s :: %s => %s" % (self.type_name, str_list(self.signal_inputs), str_list(self.signal_outputs))
def SignalList(list):
pass
|
{
"content_hash": "396ef0fc40f218865a4d06d4bda224e2",
"timestamp": "",
"source": "github",
"line_count": 31,
"max_line_length": 111,
"avg_line_length": 30.193548387096776,
"alnum_prop": 0.6538461538461539,
"repo_name": "CurryBoy/ProtoML-Deprecated",
"id": "af42203888c2c8487acdc9512e732cc0e178167b",
"size": "936",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "protoml/signal.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "9383"
},
{
"name": "C++",
"bytes": "43265"
},
{
"name": "Matlab",
"bytes": "2905"
},
{
"name": "Python",
"bytes": "126308"
},
{
"name": "Shell",
"bytes": "1345"
}
],
"symlink_target": ""
}
|
from setuptools import setup, find_packages
# see https://github.com/GaretJax/i18n-utils/blob/master/setup.py
# and https://github.com/elastic/curator/blob/master/setup.py
setup(
name='polyglot',
version='0.1',
url='https://github.com/polyrabbit/polyglot',
license='MIT',
author='poly',
author_email='mcx_221@foxmail.com',
description='A computer language savant',
packages=find_packages(exclude=['tests']),
include_package_data=True,
platforms='any',
install_requires=open('./requirements.txt').read().split('\n'),
setup_requires=['nose'],
test_suite='nose.collector',
tests_require=['nose'],
entry_points={
"console_scripts": ["polyglot=polyglot.cli:run"]
}
)
|
{
"content_hash": "3fdf148cfe4cb451e2bdee933717a89e",
"timestamp": "",
"source": "github",
"line_count": 23,
"max_line_length": 67,
"avg_line_length": 31.956521739130434,
"alnum_prop": 0.6666666666666666,
"repo_name": "polyrabbit/polyglot",
"id": "03cfcbd557f099aba858fdacb372ad405d449e68",
"size": "735",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "setup.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "ABAP",
"bytes": "8239"
},
{
"name": "AGS Script",
"bytes": "26386"
},
{
"name": "AMPL",
"bytes": "448"
},
{
"name": "APL",
"bytes": "11748"
},
{
"name": "ATS",
"bytes": "27247"
},
{
"name": "Agda",
"bytes": "3995"
},
{
"name": "Alloy",
"bytes": "9646"
},
{
"name": "ApacheConf",
"bytes": "37117"
},
{
"name": "Apex",
"bytes": "51102"
},
{
"name": "AppleScript",
"bytes": "17650"
},
{
"name": "Arduino",
"bytes": "81"
},
{
"name": "AspectJ",
"bytes": "2285"
},
{
"name": "Assembly",
"bytes": "60291"
},
{
"name": "AutoHotkey",
"bytes": "23"
},
{
"name": "Awk",
"bytes": "4528"
},
{
"name": "BitBake",
"bytes": "1643"
},
{
"name": "BlitzBasic",
"bytes": "11710"
},
{
"name": "BlitzMax",
"bytes": "270"
},
{
"name": "Bluespec",
"bytes": "8668"
},
{
"name": "Brainfuck",
"bytes": "10189"
},
{
"name": "Brightscript",
"bytes": "12972"
},
{
"name": "C",
"bytes": "819142"
},
{
"name": "C#",
"bytes": "10322"
},
{
"name": "C++",
"bytes": "227914"
},
{
"name": "CLIPS",
"bytes": "16994"
},
{
"name": "CMake",
"bytes": "8496"
},
{
"name": "COBOL",
"bytes": "607"
},
{
"name": "CSS",
"bytes": "994"
},
{
"name": "CartoCSS",
"bytes": "60760"
},
{
"name": "Ceylon",
"bytes": "332"
},
{
"name": "Chapel",
"bytes": "71564"
},
{
"name": "Cirru",
"bytes": "1312"
},
{
"name": "Clean",
"bytes": "7294"
},
{
"name": "Clojure",
"bytes": "13011"
},
{
"name": "CoffeeScript",
"bytes": "49624"
},
{
"name": "ColdFusion",
"bytes": "8982"
},
{
"name": "Common Lisp",
"bytes": "18767"
},
{
"name": "Component Pascal",
"bytes": "6679"
},
{
"name": "Cool",
"bytes": "2098"
},
{
"name": "Coq",
"bytes": "170649"
},
{
"name": "Crystal",
"bytes": "14282"
},
{
"name": "Cuda",
"bytes": "3055"
},
{
"name": "Cycript",
"bytes": "15319"
},
{
"name": "D",
"bytes": "10563"
},
{
"name": "DM",
"bytes": "1506"
},
{
"name": "DTrace",
"bytes": "7193"
},
{
"name": "Dart",
"bytes": "338"
},
{
"name": "Dogescript",
"bytes": "289"
},
{
"name": "E",
"bytes": "4881"
},
{
"name": "Eagle",
"bytes": "229555"
},
{
"name": "Ecl",
"bytes": "1731"
},
{
"name": "Eiffel",
"bytes": "2824"
},
{
"name": "Elixir",
"bytes": "381"
},
{
"name": "Elm",
"bytes": "7807"
},
{
"name": "Emacs Lisp",
"bytes": "20390"
},
{
"name": "EmberScript",
"bytes": "536"
},
{
"name": "Erlang",
"bytes": "38144"
},
{
"name": "F#",
"bytes": "32193"
},
{
"name": "FORTRAN",
"bytes": "1360"
},
{
"name": "Forth",
"bytes": "138239"
},
{
"name": "Frege",
"bytes": "32579"
},
{
"name": "GAMS",
"bytes": "2135"
},
{
"name": "GAP",
"bytes": "92512"
},
{
"name": "GDScript",
"bytes": "12597"
},
{
"name": "GLSL",
"bytes": "23432"
},
{
"name": "Game Maker Language",
"bytes": "298131"
},
{
"name": "Gnuplot",
"bytes": "8344"
},
{
"name": "Go",
"bytes": "27122"
},
{
"name": "Golo",
"bytes": "37829"
},
{
"name": "Gosu",
"bytes": "10889"
},
{
"name": "Grace",
"bytes": "15681"
},
{
"name": "Grammatical Framework",
"bytes": "62817"
},
{
"name": "Groff",
"bytes": "115"
},
{
"name": "Groovy",
"bytes": "865"
},
{
"name": "HTML",
"bytes": "17581"
},
{
"name": "Hack",
"bytes": "32621"
},
{
"name": "Handlebars",
"bytes": "310"
},
{
"name": "Haskell",
"bytes": "7928"
},
{
"name": "Hy",
"bytes": "555"
},
{
"name": "IDL",
"bytes": "2417"
},
{
"name": "IGOR Pro",
"bytes": "789"
},
{
"name": "Idris",
"bytes": "1014"
},
{
"name": "Inform 7",
"bytes": "470"
},
{
"name": "Inno Setup",
"bytes": "4335"
},
{
"name": "Isabelle",
"bytes": "1252"
},
{
"name": "J",
"bytes": "2048"
},
{
"name": "JSONiq",
"bytes": "892"
},
{
"name": "Jasmin",
"bytes": "10685"
},
{
"name": "Java",
"bytes": "77191"
},
{
"name": "JavaScript",
"bytes": "206966"
},
{
"name": "Julia",
"bytes": "1444"
},
{
"name": "KRL",
"bytes": "239"
},
{
"name": "Kit",
"bytes": "133"
},
{
"name": "Kotlin",
"bytes": "971"
},
{
"name": "LOLCODE",
"bytes": "32116"
},
{
"name": "LSL",
"bytes": "2007"
},
{
"name": "Lasso",
"bytes": "834726"
},
{
"name": "Lean",
"bytes": "4576"
},
{
"name": "Liquid",
"bytes": "6784"
},
{
"name": "LiveScript",
"bytes": "583"
},
{
"name": "Logos",
"bytes": "306"
},
{
"name": "Logtalk",
"bytes": "260"
},
{
"name": "LookML",
"bytes": "723"
},
{
"name": "LoomScript",
"bytes": "4586"
},
{
"name": "Lua",
"bytes": "5655"
},
{
"name": "M",
"bytes": "289750"
},
{
"name": "MTML",
"bytes": "1320"
},
{
"name": "Makefile",
"bytes": "1331"
},
{
"name": "Mask",
"bytes": "1419"
},
{
"name": "Mathematica",
"bytes": "231901"
},
{
"name": "Matlab",
"bytes": "89332"
},
{
"name": "Max",
"bytes": "15467"
},
{
"name": "Mercury",
"bytes": "709952"
},
{
"name": "Monkey",
"bytes": "2587"
},
{
"name": "Moocode",
"bytes": "55340"
},
{
"name": "MoonScript",
"bytes": "21721"
},
{
"name": "NSIS",
"bytes": "8967"
},
{
"name": "Nemerle",
"bytes": "99"
},
{
"name": "NetLinx",
"bytes": "10334"
},
{
"name": "NetLinx+ERB",
"bytes": "5908"
},
{
"name": "NetLogo",
"bytes": "1209"
},
{
"name": "NewLisp",
"bytes": "20621"
},
{
"name": "Nginx",
"bytes": "1781"
},
{
"name": "Nimrod",
"bytes": "43"
},
{
"name": "Nit",
"bytes": "87621"
},
{
"name": "Nix",
"bytes": "2448"
},
{
"name": "Nu",
"bytes": "1000"
},
{
"name": "OCaml",
"bytes": "174815"
},
{
"name": "Objective-C",
"bytes": "495392"
},
{
"name": "Objective-C++",
"bytes": "72326"
},
{
"name": "Objective-J",
"bytes": "20723"
},
{
"name": "Omgrofl",
"bytes": "299"
},
{
"name": "Opa",
"bytes": "273"
},
{
"name": "Opal",
"bytes": "500"
},
{
"name": "OpenEdge ABL",
"bytes": "35950"
},
{
"name": "OpenSCAD",
"bytes": "442"
},
{
"name": "Ox",
"bytes": "5417"
},
{
"name": "Oxygene",
"bytes": "2299"
},
{
"name": "Oz",
"bytes": "1330"
},
{
"name": "PAWN",
"bytes": "22389"
},
{
"name": "PHP",
"bytes": "223781"
},
{
"name": "PLSQL",
"bytes": "5952"
},
{
"name": "PLpgSQL",
"bytes": "11792"
},
{
"name": "Pan",
"bytes": "1241"
},
{
"name": "Papyrus",
"bytes": "4527"
},
{
"name": "Parrot",
"bytes": "132"
},
{
"name": "Pascal",
"bytes": "8566"
},
{
"name": "Perl",
"bytes": "186949"
},
{
"name": "Perl6",
"bytes": "132463"
},
{
"name": "PigLatin",
"bytes": "164"
},
{
"name": "Pike",
"bytes": "8547"
},
{
"name": "PogoScript",
"bytes": "1418"
},
{
"name": "PostScript",
"bytes": "738"
},
{
"name": "PowerShell",
"bytes": "6951"
},
{
"name": "Processing",
"bytes": "385"
},
{
"name": "Prolog",
"bytes": "45743"
},
{
"name": "Propeller Spin",
"bytes": "241557"
},
{
"name": "Protocol Buffer",
"bytes": "487"
},
{
"name": "Puppet",
"bytes": "13342"
},
{
"name": "PureBasic",
"bytes": "8335"
},
{
"name": "PureScript",
"bytes": "11375"
},
{
"name": "Python",
"bytes": "110612"
},
{
"name": "QMake",
"bytes": "1031"
},
{
"name": "R",
"bytes": "15335"
},
{
"name": "Racket",
"bytes": "1678"
},
{
"name": "Ragel in Ruby Host",
"bytes": "4672"
},
{
"name": "Rebol",
"bytes": "6700"
},
{
"name": "Red",
"bytes": "10243"
},
{
"name": "RenderScript",
"bytes": "12865"
},
{
"name": "RobotFramework",
"bytes": "3713"
},
{
"name": "Ruby",
"bytes": "111536"
},
{
"name": "Rust",
"bytes": "34003"
},
{
"name": "SAS",
"bytes": "11938"
},
{
"name": "SQF",
"bytes": "2203"
},
{
"name": "SQLPL",
"bytes": "3352"
},
{
"name": "Scala",
"bytes": "10081"
},
{
"name": "Scheme",
"bytes": "15118"
},
{
"name": "Scilab",
"bytes": "395"
},
{
"name": "Shell",
"bytes": "49837"
},
{
"name": "ShellSession",
"bytes": "1806"
},
{
"name": "Shen",
"bytes": "17709"
},
{
"name": "Slash",
"bytes": "2314"
},
{
"name": "Smalltalk",
"bytes": "148143"
},
{
"name": "SourcePawn",
"bytes": "39475"
},
{
"name": "Squirrel",
"bytes": "803"
},
{
"name": "Standard ML",
"bytes": "79688"
},
{
"name": "Stata",
"bytes": "23670"
},
{
"name": "SuperCollider",
"bytes": "15212"
},
{
"name": "Swift",
"bytes": "9400"
},
{
"name": "SystemVerilog",
"bytes": "6790"
},
{
"name": "TXL",
"bytes": "1317"
},
{
"name": "Tcl",
"bytes": "9122"
},
{
"name": "TeX",
"bytes": "56664"
},
{
"name": "Tea",
"bytes": "20"
},
{
"name": "Turing",
"bytes": "384"
},
{
"name": "TypeScript",
"bytes": "535"
},
{
"name": "UnrealScript",
"bytes": "36506"
},
{
"name": "VCL",
"bytes": "8653"
},
{
"name": "VHDL",
"bytes": "217"
},
{
"name": "Verilog",
"bytes": "47617"
},
{
"name": "VimL",
"bytes": "52204"
},
{
"name": "Visual Basic",
"bytes": "12799"
},
{
"name": "Volt",
"bytes": "2207"
},
{
"name": "Web Ontology Language",
"bytes": "237587"
},
{
"name": "WebIDL",
"bytes": "2234"
},
{
"name": "XC",
"bytes": "82"
},
{
"name": "XProc",
"bytes": "336"
},
{
"name": "XQuery",
"bytes": "2719"
},
{
"name": "XS",
"bytes": "10674"
},
{
"name": "XSLT",
"bytes": "547"
},
{
"name": "Xojo",
"bytes": "11219"
},
{
"name": "Xtend",
"bytes": "4273"
},
{
"name": "Zephir",
"bytes": "16630"
},
{
"name": "Zimpl",
"bytes": "615"
},
{
"name": "eC",
"bytes": "9132"
},
{
"name": "wisp",
"bytes": "11605"
}
],
"symlink_target": ""
}
|
import testtools
from openstack.network.v2 import load_balancer
IDENTIFIER = 'IDENTIFIER'
EXAMPLE = {
'admin_state_up': True,
'description': '2',
'id': IDENTIFIER,
'listeners': '4',
'name': '5',
'operating_status': '6',
'provisioning_status': '7',
'tenant_id': '8',
'vip_address': '9',
'vip_subnet_id': '10',
}
class TestLoadBalancer(testtools.TestCase):
def test_basic(self):
sot = load_balancer.LoadBalancer()
self.assertEqual('loadbalancer', sot.resource_key)
self.assertEqual('loadbalancers', sot.resources_key)
self.assertEqual('/lbaas/loadbalancers', sot.base_path)
self.assertEqual('network', sot.service.service_type)
self.assertTrue(sot.allow_create)
self.assertTrue(sot.allow_retrieve)
self.assertTrue(sot.allow_update)
self.assertTrue(sot.allow_delete)
self.assertTrue(sot.allow_list)
def test_make_it(self):
sot = load_balancer.LoadBalancer(EXAMPLE)
self.assertEqual(EXAMPLE['admin_state_up'], sot.admin_state_up)
self.assertEqual(EXAMPLE['description'], sot.description)
self.assertEqual(EXAMPLE['id'], sot.id)
self.assertEqual(EXAMPLE['listeners'], sot.listeners)
self.assertEqual(EXAMPLE['name'], sot.name)
self.assertEqual(EXAMPLE['operating_status'], sot.operating_status)
self.assertEqual(EXAMPLE['provisioning_status'],
sot.provisioning_status)
self.assertEqual(EXAMPLE['tenant_id'], sot.project_id)
self.assertEqual(EXAMPLE['vip_address'], sot.vip_address)
self.assertEqual(EXAMPLE['vip_subnet_id'], sot.vip_subnet_id)
|
{
"content_hash": "adfdf95d07fac85b90f666f3d2433992",
"timestamp": "",
"source": "github",
"line_count": 46,
"max_line_length": 75,
"avg_line_length": 36.65217391304348,
"alnum_prop": 0.6518386714116251,
"repo_name": "sjsucohort6/openstack",
"id": "7f1dc6a0c9fa6323e1e2fe75fc8b9a15633d67c0",
"size": "2232",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "python/venv/lib/python2.7/site-packages/openstack/tests/unit/network/v2/test_load_balancer.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "410"
},
{
"name": "CSS",
"bytes": "144982"
},
{
"name": "FreeMarker",
"bytes": "14104"
},
{
"name": "HTML",
"bytes": "8308"
},
{
"name": "Java",
"bytes": "243125"
},
{
"name": "JavaScript",
"bytes": "1493715"
},
{
"name": "Python",
"bytes": "16921939"
},
{
"name": "Shell",
"bytes": "13926"
}
],
"symlink_target": ""
}
|
from sympycore import Algebra, Verbatim
def test_basic():
a = Algebra('a')
assert str(a)=='a'
assert repr(a)=="Algebra('a')",`repr(a)`
assert bool(a)==True
assert a.as_algebra(Algebra)==a
assert a.as_algebra(Verbatim)==Verbatim('a')
assert Verbatim('a').as_algebra(Algebra)==a
|
{
"content_hash": "17e136f013f56804153a03ca82855e76",
"timestamp": "",
"source": "github",
"line_count": 10,
"max_line_length": 48,
"avg_line_length": 30.6,
"alnum_prop": 0.6339869281045751,
"repo_name": "pearu/sympycore",
"id": "1ef1206484a3bb35f348191666a12af81c70e1f8",
"size": "307",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "sympycore/basealgebra/tests/test_algebra.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "79419"
},
{
"name": "Python",
"bytes": "935700"
}
],
"symlink_target": ""
}
|
from T.tings.models.models_collections import TCollection
from T.tings.models.models_subscriptions import TSubscription
from T.tings.models.models_tings import TTing
|
{
"content_hash": "8b342d0374c13e4de51a72038817297f",
"timestamp": "",
"source": "github",
"line_count": 3,
"max_line_length": 61,
"avg_line_length": 55,
"alnum_prop": 0.8606060606060606,
"repo_name": "allynt/tings",
"id": "3a393ecfaef66c985ec26c35e8e335b0b4f5dedb",
"size": "165",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "T/tings/models/__init__.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "5408"
},
{
"name": "JavaScript",
"bytes": "3694"
},
{
"name": "Python",
"bytes": "26680"
}
],
"symlink_target": ""
}
|
"""This code example updates content metadata key hierarchies.
To determine which content metadata key hierarchies exist, run
get_all_content_metadata_key_hierarchies.py.
This feature is only available to Video Solutions publishers.
The LoadFromStorage method is pulling credentials and properties from a
"googleads.yaml" file. By default, it looks for this file in your home
directory. For more information, see the "Caching authentication information"
section of our README.
"""
# Import appropriate modules from the client library.
from googleads import ad_manager
# Set the ID of the content metadata key hierarchy to update.
CONTENT_METADATA_KEY_HIERARCHY_ID = (
'INSERT_CONTENT_METADATA_KEY_HIERARCHY_ID_HERE')
# Set the ID of the custom targeting key to be added as a hierarchy level.
CUSTOM_TARGETING_KEY_ID = 'INSERT_CUSTOM_TARGETING_KEY_ID_HERE'
def main(client, content_metadata_key_hierarchy_id, custom_targeting_key_id):
# Initialize appropriate service.
content_metadata_key_hierarchy_service = client.GetService(
'ContentMetadataKeyHierarchyService', version='v201811')
# Create a query to select a single content metadata key hierarchy.
statement = (ad_manager.StatementBuilder(version='v201811')
.Where('id = :id')
.OrderBy('id', ascending=True)
.WithBindVariable('id', long(content_metadata_key_hierarchy_id))
.Limit(1))
# Get a single content metadata key hierarchy by statement.
response = (content_metadata_key_hierarchy_service
.getContentMetadataKeyHierarchiesByStatement(
statement.ToStatement()))[0]
content_metadata_key_hierarchies = (response['results']
if 'results' in response else None)
if content_metadata_key_hierarchies:
content_metadata_key_hierarchy = content_metadata_key_hierarchies[0]
# Update the content metadata key hierarchy by adding a hierarchy level.
hierarchy_levels = content_metadata_key_hierarchy['hierarchyLevels']
hierarchy_level = {
'customTargetingKeyId': custom_targeting_key_id,
'hierarchyLevel': str(len(hierarchy_levels) + 1)
}
content_metadata_key_hierarchy['hierarchyLevels'] = (
hierarchy_levels.append(hierarchy_level))
content_metadata_key_hierarchies = (
content_metadata_key_hierarchy_service
.updateContentMetadataKeyHierarchies(content_metadata_key_hierarchies))
# Display results.
for content_metadata_key_hierarchy in content_metadata_key_hierarchies:
print ('Content metadata key hierarchy with id "%s" and name "%s"'
' was updated.' % (content_metadata_key_hierarchy['id'],
content_metadata_key_hierarchy['name']))
else:
print 'No content metadata key hierarchies were found'
if __name__ == '__main__':
# Initialize client object.
ad_manager_client = ad_manager.AdManagerClient.LoadFromStorage()
main(ad_manager_client, CONTENT_METADATA_KEY_HIERARCHY_ID,
CUSTOM_TARGETING_KEY_ID)
|
{
"content_hash": "bad5d41c2e79326e25fdfa77ad1e3814",
"timestamp": "",
"source": "github",
"line_count": 78,
"max_line_length": 79,
"avg_line_length": 39.3974358974359,
"alnum_prop": 0.7103807354376831,
"repo_name": "Aloomaio/googleads-python-lib",
"id": "96f131ac7fd1ddb13e679ecc697173bee78d9262",
"size": "3695",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "examples/ad_manager/v201811/content_metadata_key_hierarchy_service/update_content_metadata_key_hierarchies.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "491015"
}
],
"symlink_target": ""
}
|
import os
import sys
from taran import (__title__, __version__, __author__, __copyright__)
sys.path.insert(0, os.path.abspath('../lib'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.autosummary',
'sphinx.ext.todo',
'sphinx.ext.coverage',
'sphinx.ext.viewcode',
'sphinx.ext.napoleon',
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
#
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
# The encoding of source files.
#
# source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = __title__
copyright = __copyright__
author = __author__
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = __version__
# The full version, including alpha/beta/rc tags.
release = __version__
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#
# today = ''
#
# Else, today_fmt is used as the format for a strftime call.
#
# today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This patterns also effect to html_static_path and html_extra_path
exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
# The reST default role (used for this markup: `text`) to use for all
# documents.
#
# default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#
# add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#
# add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#
# show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
# modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
# keep_warnings = False
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = True
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
# html_theme = 'alabaster'
html_theme = 'nature'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#
# html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
# html_theme_path = []
# The name for this set of Sphinx documents.
# "<project> v<release> documentation" by default.
#
# html_title = u'taran v0.0.1'
# A shorter title for the navigation bar. Default is the same as html_title.
#
# html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#
# html_logo = None
# The name of an image file (relative to this directory) to use as a favicon of
# the docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#
# html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#
# html_extra_path = []
# If not None, a 'Last updated on:' timestamp is inserted at every page
# bottom, using the given strftime format.
# The empty string is equivalent to '%b %d, %Y'.
#
# html_last_updated_fmt = None
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#
# html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#
# html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#
# html_additional_pages = {}
# If false, no module index is generated.
#
# html_domain_indices = True
# If false, no index is generated.
#
# html_use_index = True
# If true, the index is split into individual pages for each letter.
#
# html_split_index = False
# If true, links to the reST sources are added to the pages.
#
# html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#
# html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#
# html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#
# html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
# html_file_suffix = None
# Language to be used for generating the HTML full-text search index.
# Sphinx supports the following languages:
# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja'
# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr', 'zh'
#
# html_search_language = 'en'
# A dictionary with options for the search language support, empty by default.
# 'ja' uses this config value.
# 'zh' user can custom change `jieba` dictionary path.
#
# html_search_options = {'type': 'default'}
# The name of a javascript file (relative to the configuration directory) that
# implements a search results scorer. If empty, the default will be used.
#
# html_search_scorer = 'scorer.js'
# Output file base name for HTML help builder.
htmlhelp_basename = 'tarandoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#
# 'preamble': '',
# Latex figure (float) alignment
#
# 'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'taran.tex', u'taran Documentation',
u'Jon Hadfield \\textless{}jon@lessknown.co.uk\\textgreater{}', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#
# latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#
# latex_use_parts = False
# If true, show page references after internal links.
#
# latex_show_pagerefs = False
# If true, show URL addresses after external links.
#
# latex_show_urls = False
# Documents to append as an appendix to all manuals.
#
# latex_appendices = []
# It false, will not define \strong, \code, itleref, \crossref ... but only
# \sphinxstrong, ..., \sphinxtitleref, ... To help avoid clash with user added
# packages.
#
# latex_keep_old_macro_names = True
# If false, no module index is generated.
#
# latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'taran', u'taran Documentation',
[author], 1)
]
# If true, show URL addresses after external links.
#
# man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'taran', u'taran Documentation',
author, 'taran', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#
# texinfo_appendices = []
# If false, no module index is generated.
#
# texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#
# texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#
# texinfo_no_detailmenu = False
|
{
"content_hash": "f179cb06a1dc175f065ae8b95907b707",
"timestamp": "",
"source": "github",
"line_count": 330,
"max_line_length": 80,
"avg_line_length": 28.40909090909091,
"alnum_prop": 0.68512,
"repo_name": "jonhadfield/taran",
"id": "cf73bc1374a12da222c74de9fd1dbf99e7a51703",
"size": "10033",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "docs/conf.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "74548"
}
],
"symlink_target": ""
}
|
from asyncio import coroutine
from .viewset import ObjectViewSet
class Ticket(ObjectViewSet):
name = 'Ticket'
uri_prefix = 'org.fetsy'
new_object_timestamp = True
new_object_schema = {
"$schema": "http://json-schema.org/draft-04/schema#",
"title": "New ticket",
"description": "A new ticket without ID",
"type": "object",
"properties": {
"content": {
"description": "The content of the ticket",
"type": "string"
},
"period": {
"description": "The period in which the ticket has to be "
"solved",
"type": "integer"
}
},
"additionalProperties": False,
"required": ["content"]
}
update_object_schema = {
"$schema": "http://json-schema.org/draft-04/schema#",
"title": "Changed ticket",
"description": "A subset of a ticket to be changed",
"type": "object",
"properties": {
"id": {
"description": "The ID of the ticket",
"type": "integer"
},
"content": {
"description": "The content of the ticket",
"type": "string"
},
"status": {
"description": "The status of the ticket",
"type": "string",
"enum": [
"New",
"Work in progress",
"Closed"
]
},
"priority": {
"description": "The priority of the ticket from low (1) to "
"high (5)",
"type": "integer",
"minimum": 1,
"maximun": 5
},
"assignee": {
"description": "The person who is resposible to solved the "
"ticket",
"type": "string",
"minLength": 1
},
"period": {
"description": "The period in which the ticket has to be "
"solved",
"type": "integer"
}
},
"additionalProperties": False,
"required": ["id"]
}
@coroutine
def register_viewset(self):
"""
Registeres all default procedures for this viewset. Additionally
registeres list_ticket_assignees procedure.
"""
yield from super().register_viewset()
yield from self.app_session.register(
self.list_ticket_assignees,
self.uri_prefix + '.listTicketAssignees')
self.logger.debug('Remote procedure to list ticket assignees '
'registered.')
def set_defaults(self, obj):
"""
Set defaults for new tickets.
"""
obj.setdefault('period', 120)
obj['status'] = 'New'
obj['priority'] = 3
obj['assignee'] = '–'
return obj
@coroutine
def list_ticket_assignees(self, *args, **kwargs):
"""
Async method to get all assignees of all tickets.
"""
self.logger.debug('Remote procedure list_ticket_assignees called.')
curser = self.database[self.name].find()
# TODO: For use of Mongo >= 3.2. Use $text operator.
assignees = set()
while (yield from curser.fetch_next):
ticket = curser.next_object()
assignees.add(ticket.get('assignee', ''))
result = [assignee for assignee in assignees
if kwargs.get('filterValue', '').lower() in assignee.lower()]
return result
|
{
"content_hash": "e6f19be61f4e753c1b07bef28cf1f5f8",
"timestamp": "",
"source": "github",
"line_count": 112,
"max_line_length": 79,
"avg_line_length": 33.205357142857146,
"alnum_prop": 0.47163215918257595,
"repo_name": "normanjaeckel/FeTSy",
"id": "1dbeb55e4f2f8f2efd4cb8d97c94b8639ad86938",
"size": "3721",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "fetsy/database/ticket.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "438"
},
{
"name": "CoffeeScript",
"bytes": "5106"
},
{
"name": "HTML",
"bytes": "23447"
},
{
"name": "JavaScript",
"bytes": "644"
},
{
"name": "Python",
"bytes": "17107"
}
],
"symlink_target": ""
}
|
import pytest
from .. import mockhttp
from ... import di, Story
def test_get_mockhttp_as_dep():
story = Story()
story.use(mockhttp.MockHttpInterface())
with di.child_scope():
@di.desc()
class OneClass:
@di.inject()
def deps(self, http):
self.http = http
assert isinstance(di.injector.get('one_class').http, mockhttp.MockHttpInterface)
|
{
"content_hash": "f4f49eae0c3fa78ef0568f794d8b22f2",
"timestamp": "",
"source": "github",
"line_count": 17,
"max_line_length": 88,
"avg_line_length": 24.41176470588235,
"alnum_prop": 0.6,
"repo_name": "botstory/botstory",
"id": "d97efe2ff15634ad79365bca28ef764ca25d8eff",
"size": "415",
"binary": false,
"copies": "2",
"ref": "refs/heads/develop",
"path": "botstory/integrations/mockhttp/mockhttp_test.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "340760"
},
{
"name": "Shell",
"bytes": "2009"
}
],
"symlink_target": ""
}
|
from ggrc.models import Policy
from ggrc.models import Relationship
from ggrc.converters import errors
from integration.ggrc.converters import TestCase
from integration.ggrc.generator import ObjectGenerator
class TestUnmappings(TestCase):
def setUp(self):
TestCase.setUp(self)
self.generator = ObjectGenerator()
self.client.get("/login")
def test_policy_basic_import(self):
filename = "multi_basic_policy_orggroup_product_with_mappings.csv"
self.import_file(filename)
self.assertEqual(Relationship.query.count(), 13)
filename = "multi_basic_policy_orggroup_product_with_unmappings.csv"
self.import_file(filename)
self.assertEqual(Relationship.query.count(), 0)
|
{
"content_hash": "8b7ce3fad4b418f4aab4e2bacbd7bc23",
"timestamp": "",
"source": "github",
"line_count": 22,
"max_line_length": 72,
"avg_line_length": 32.09090909090909,
"alnum_prop": 0.7662889518413598,
"repo_name": "prasannav7/ggrc-core",
"id": "cb7a673a4a4df4126d5212950e7681a947c212ef",
"size": "946",
"binary": false,
"copies": "3",
"ref": "refs/heads/develop",
"path": "test/integration/ggrc/converters/test_import_unmapping.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "167445"
},
{
"name": "Cucumber",
"bytes": "139629"
},
{
"name": "HTML",
"bytes": "1098331"
},
{
"name": "JavaScript",
"bytes": "1447363"
},
{
"name": "Makefile",
"bytes": "6225"
},
{
"name": "Mako",
"bytes": "2559"
},
{
"name": "Python",
"bytes": "2370461"
},
{
"name": "Shell",
"bytes": "33089"
}
],
"symlink_target": ""
}
|
import os
import sys
# internal imports
import numpy as np
import tensorflow as tf
import ntpath
import librosa
from magenta.models.nsynth import utils
FLAGS = tf.app.flags.FLAGS
tf.app.flags.DEFINE_string("expdir", "",
"The log directory for this experiment. Required if "
"`checkpoint_path` is not given.")
tf.app.flags.DEFINE_string("checkpoint_path", "",
"A path to the checkpoint. If not given, the latest "
"checkpoint in `expdir` will be used.")
tf.app.flags.DEFINE_string("wavdir", "",
"The directory of WAVs to yield embeddings from.")
tf.app.flags.DEFINE_string("config", "model", "Model configuration name")
tf.app.flags.DEFINE_integer("sample_length", 64000, "Sample length.")
tf.app.flags.DEFINE_integer("batch_size", 1, "Sample length.")
tf.app.flags.DEFINE_string("wav_savedir", "", "Where to save the generated wav files.")
tf.app.flags.DEFINE_integer("sample_rate", 16000, "Sample length.")
tf.app.flags.DEFINE_string("log", "INFO",
"The threshold for what messages will be logged."
"DEBUG, INFO, WARN, ERROR, or FATAL.")
def write_wav(waveform, sample_rate, pathname, wavfile_name):
filename = "%s_decode.wav" % wavfile_name.strip(".wav")
pathname += "/"+filename
y = np.array(waveform)
librosa.output.write_wav(pathname, y, sample_rate)
print('Updated wav file at {}'.format(pathname))
def sampled(prediction):
return tf.multinomial(prediction, 1)
def generate(sample):
#return mu_law_decode(sample, 256)
return utils.inv_mu_law(sample - 128)
def main(unused_argv=None):
tf.logging.set_verbosity(FLAGS.log)
if FLAGS.config is None:
raise RuntimeError("No config name specified.")
config = utils.get_module("ours." + FLAGS.config).Config(FLAGS.batch_size)
if FLAGS.checkpoint_path:
checkpoint_path = FLAGS.checkpoint_path
else:
expdir = FLAGS.expdir
tf.logging.info("Will load latest checkpoint from %s.", expdir)
while not tf.gfile.Exists(expdir):
tf.logging.fatal("\tExperiment save dir '%s' does not exist!", expdir)
sys.exit(1)
try:
checkpoint_path = tf.train.latest_checkpoint(expdir)
except tf.errors.NotFoundError:
tf.logging.fatal("There was a problem determining the latest checkpoint.")
sys.exit(1)
if not tf.train.checkpoint_exists(checkpoint_path):
tf.logging.fatal("Invalid checkpoint path: %s", checkpoint_path)
sys.exit(1)
tf.logging.info("Will restore from checkpoint: %s", checkpoint_path)
wavdir = FLAGS.wavdir
tf.logging.info("Will load Wavs from %s." % wavdir)
######################
# restore the model #
######################
tf.logging.info("Building graph")
with tf.Graph().as_default(), tf.device("/gpu:0"):
with tf.variable_scope('ours_model_var_scope') as var_scope:
sample_length = FLAGS.sample_length
batch_size = FLAGS.batch_size
wav_placeholder = tf.placeholder(
tf.float32, shape=[batch_size, sample_length])
wav_names = tf.placeholder(tf.string, shape=[batch_size])
encode_op = config.encode(wav_placeholder)["encoding"]
decode_op = config.decode(encode_op)["logits"] # predictions"]
sample = sampled(decode_op)
reshaped_sample = tf.reshape(sample, [batch_size, sample_length])
generate_wav = generate(reshaped_sample)
ema = tf.train.ExponentialMovingAverage(decay=0.9999)
variables_to_restore = ema.variables_to_restore()
# Create a saver, which is used to restore the parameters from checkpoints
saver = tf.train.Saver(variables_to_restore)
session_config = tf.ConfigProto(allow_soft_placement=True)
# Set the opt_level to prevent py_funcs from being executed multiple times.
session_config.graph_options.optimizer_options.opt_level = 2
sess = tf.Session("", config=session_config)
tf.logging.info("\tRestoring from checkpoint.")
saver.restore(sess, checkpoint_path)
def is_wav(f):
return f.lower().endswith(".wav")
wavfiles = sorted([
os.path.join(wavdir, fname) for fname in tf.gfile.ListDirectory(wavdir)
if is_wav(fname)
])
def get_fnames(files):
fnames_list = []
for f in files:
fnames_list.append(ntpath.basename(f))
return fnames_list
for start_file in xrange(0, len(wavfiles), batch_size):
batch_number = (start_file / batch_size) + 1
tf.logging.info("On batch %d.", batch_number)
end_file = start_file + batch_size
files = wavfiles[start_file:end_file]
wavfile_names = get_fnames(files)
# Ensure that files has batch_size elements.
batch_filler = batch_size - len(files)
files.extend(batch_filler * [files[-1]])
wavdata = np.array([utils.load_wav(f)[:sample_length] for f in files])
try:
res = sess.run(generate_wav,
feed_dict={wav_placeholder: wavdata, wav_names: wavfile_names})
except Exception, e:
tf.logging.info("Unexpected error happened: %s.", e)
raise
for decoded_wav, filename in zip(res, wavfile_names):
write_wav(decoded_wav, FLAGS.sample_rate, FLAGS.wav_savedir, filename)
if __name__ == "__main__":
tf.app.run()
|
{
"content_hash": "c85eaa5a3a8da1b09958a8f8a433a4b8",
"timestamp": "",
"source": "github",
"line_count": 148,
"max_line_length": 87,
"avg_line_length": 36.054054054054056,
"alnum_prop": 0.6506746626686657,
"repo_name": "bda2017-shallowermind/MusTGAN",
"id": "1d7b2c92c7e30183bc2a1290008e0cb2da7a6aee",
"size": "5932",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "magenta/magenta/models/nsynth/ours/generate.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "12668"
},
{
"name": "HTML",
"bytes": "721"
},
{
"name": "JavaScript",
"bytes": "43259"
},
{
"name": "Jupyter Notebook",
"bytes": "2115912"
},
{
"name": "Protocol Buffer",
"bytes": "12931"
},
{
"name": "Python",
"bytes": "1389487"
},
{
"name": "Shell",
"bytes": "8783"
}
],
"symlink_target": ""
}
|
from selenium.webdriver import Firefox
from selenium.webdriver.firefox.firefox_profile import FirefoxProfile
from splinter.driver.webdriver import BaseWebDriver, WebDriverElement as BaseWebDriverElement
from splinter.driver.webdriver.cookie_manager import CookieManager
class WebDriver(BaseWebDriver):
driver_name = "Firefox"
def __init__(self, profile=None, extensions=None, user_agent=None, profile_preferences=None, wait_time=2):
firefox_profile = FirefoxProfile(profile)
firefox_profile.set_preference('extensions.logging.enabled', False)
firefox_profile.set_preference('network.dns.disableIPv6', False)
if user_agent is not None:
firefox_profile.set_preference('general.useragent.override', user_agent)
if profile_preferences:
for key, value in profile_preferences.iteritems():
firefox_profile.set_preference(key, value)
if extensions:
for extension in extensions:
firefox_profile.add_extension(extension)
self.driver = Firefox(firefox_profile)
self.element_class = WebDriverElement
self._cookie_manager = CookieManager(self.driver)
super(WebDriver, self).__init__(wait_time)
class WebDriverElement(BaseWebDriverElement):
def mouse_over(self):
"""
Firefox doesn't support mouseover.
"""
raise NotImplementedError("Firefox doesn't support mouse over")
def mouse_out(self):
"""
Firefox doesn't support mouseout.
"""
raise NotImplementedError("Firefox doesn't support mouseout")
mouseover = mouse_over
mouseout = mouse_out
|
{
"content_hash": "cc8dc867230386fa188e323d704cadf6",
"timestamp": "",
"source": "github",
"line_count": 51,
"max_line_length": 110,
"avg_line_length": 32.88235294117647,
"alnum_prop": 0.6881335718545021,
"repo_name": "moorwu/CloudDataHIveSublime",
"id": "be7e99d3db1ac745759d3c30b765764f4d8e1811",
"size": "1861",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "splinter/driver/webdriver/firefox.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "394454"
}
],
"symlink_target": ""
}
|
import wx
#---------------------------------------------------------------------------
class TestPanel(wx.Panel):
def __init__(self, parent, log):
self.log = log
wx.Panel.__init__(self, parent, -1)
b = wx.Button(self, -1, "Create and Show a TextEntryDialog", (50,50))
self.Bind(wx.EVT_BUTTON, self.OnButton, b)
def OnButton(self, evt):
dlg = wx.TextEntryDialog(
self, 'What is your favorite programming language?',
'Eh??', 'Python')
dlg.SetValue("Python is the best!")
if dlg.ShowModal() == wx.ID_OK:
self.log.WriteText('You entered: %s\n' % dlg.GetValue())
dlg.Destroy()
#---------------------------------------------------------------------------
def runTest(frame, nb, log):
win = TestPanel(nb, log)
return win
#---------------------------------------------------------------------------
overview = """\
This class represents a dialog that requests a one-line text string from the user.
It is implemented as a generic wxWindows dialog. Along with the usual wx.Dialog
style flags, all of the wx.TextCtrl TE_* style flags are accepted, so, for example,
wx.TE_PASSWORD could be used to create a password dialog.
As with other dialogs of this type, the user input must be retrieved prior to
destroying the dialog.
"""
if __name__ == '__main__':
import sys,os
import run
run.main(['', os.path.basename(sys.argv[0])] + sys.argv[1:])
|
{
"content_hash": "dc473c647677f3c289529c156e062e1c",
"timestamp": "",
"source": "github",
"line_count": 55,
"max_line_length": 83,
"avg_line_length": 27.072727272727274,
"alnum_prop": 0.5271994627266622,
"repo_name": "dnxbjyj/python-basic",
"id": "384ed0e6e80550d9d3d5b0c33881ad58102730cb",
"size": "1512",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "gui/wxpython/wxPython-demo-4.0.1/demo/TextEntryDialog.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "70"
},
{
"name": "HTML",
"bytes": "274934"
},
{
"name": "Jupyter Notebook",
"bytes": "868723"
},
{
"name": "Python",
"bytes": "4032747"
},
{
"name": "Shell",
"bytes": "446"
}
],
"symlink_target": ""
}
|
from rpc import RPC, RPCReply
from ncclient.xml_ import *
from lxml import etree
import util
class GetReply(RPCReply):
"""Adds attributes for the *data* element to `RPCReply`."""
def _parsing_hook(self, root):
self._data = None
if not self._errors:
self._data = root.find(qualify("data"))
@property
def data_ele(self):
"*data* element as an :class:`~xml.etree.ElementTree.Element`"
if not self._parsed:
self.parse()
return self._data
@property
def data_xml(self):
"*data* element as an XML string"
if not self._parsed:
self.parse()
return to_xml(self._data)
data = data_ele
"Same as :attr:`data_ele`"
class Get(RPC):
"The *get* RPC."
REPLY_CLS = GetReply
"See :class:`GetReply`."
def request(self, filter=None):
"""Retrieve running configuration and device state information.
*filter* specifies the portion of the configuration to retrieve (by default entire configuration is retrieved)
:seealso: :ref:`filter_params`
"""
node = new_ele("get")
if filter is not None:
node.append(util.build_filter(filter))
return self._request(node)
class GetConfig(RPC):
"""The *get-config* RPC."""
REPLY_CLS = GetReply
"""See :class:`GetReply`."""
def request(self, source, filter=None):
"""Retrieve all or part of a specified configuration.
*source* name of the configuration datastore being queried
*filter* specifies the portion of the configuration to retrieve (by default entire configuration is retrieved)
:seealso: :ref:`filter_params`"""
node = new_ele("get-config")
node.append(util.datastore_or_url("source", source, self._assert))
if filter is not None:
node.append(util.build_filter(filter))
return self._request(node)
class GetSchema(RPC):
"""The *get-schema* RPC."""
REPLY_CLS = GetReply
"""See :class:`GetReply`."""
def request(self, identifier, version=None, format=None):
"""Retrieve a named schema, with optional revision and type.
*identifier* name of the schema to be retrieved
*version* version of schema to get
*format* format of the schema to be retrieved, yang is the default
:seealso: :ref:`filter_params`"""
node = etree.Element(qualify("get-schema",NETCONF_MONITORING_NS))
if identifier is not None:
elem = etree.Element(qualify("identifier",NETCONF_MONITORING_NS))
elem.text = identifier
node.append(elem)
if version is not None:
elem = etree.Element(qualify("version",NETCONF_MONITORING_NS))
elem.text = version
node.append(elem)
if format is not None:
elem = etree.Element(qualify("format",NETCONF_MONITORING_NS))
elem.text = format
node.append(elem)
return self._request(node)
class Dispatch(RPC):
"""Generic retrieving wrapper"""
REPLY_CLS = GetReply
"""See :class:`GetReply`."""
def request(self, rpc_command, source=None, filter=None):
"""
*rpc_command* specifies rpc command to be dispatched either in plain text or in xml element format (depending on command)
*source* name of the configuration datastore being queried
*filter* specifies the portion of the configuration to retrieve (by default entire configuration is retrieved)
:seealso: :ref:`filter_params`
Examples of usage::
dispatch('clear-arp-table')
or dispatch element like ::
xsd_fetch = new_ele('get-xnm-information')
sub_ele(xsd_fetch, 'type').text="xml-schema"
sub_ele(xsd_fetch, 'namespace').text="junos-configuration"
dispatch(xsd_fetch)
"""
if etree.iselement(rpc_command):
node = rpc_command
else:
node = new_ele(rpc_command)
if source is not None:
node.append(util.datastore_or_url("source", source, self._assert))
if filter is not None:
node.append(util.build_filter(filter))
return self._request(node)
|
{
"content_hash": "ad2a4e66526bc98bf71440168c5522d3",
"timestamp": "",
"source": "github",
"line_count": 147,
"max_line_length": 129,
"avg_line_length": 29.108843537414966,
"alnum_prop": 0.6111240944145828,
"repo_name": "mith1979/ansible_automation",
"id": "b7d2b69257d80bff247909c23c1bcd40c60416f6",
"size": "4858",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "applied_python/applied_python/lib/python2.7/site-packages/ncclient/operations/retrieve.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "1005"
},
{
"name": "C",
"bytes": "84868"
},
{
"name": "CSS",
"bytes": "50289"
},
{
"name": "HTML",
"bytes": "70428"
},
{
"name": "JavaScript",
"bytes": "105262"
},
{
"name": "PowerShell",
"bytes": "51840"
},
{
"name": "Python",
"bytes": "19073705"
},
{
"name": "Shell",
"bytes": "3747"
},
{
"name": "XSLT",
"bytes": "152770"
}
],
"symlink_target": ""
}
|
import django.contrib.messages as django_messages
from django.contrib.messages.storage import default_storage
from django.http import HttpRequest
from nose.tools import eq_
from tower import ugettext as _
from amo.messages import _make_message, info
def test_xss():
title = "<script>alert(1)</script>"
message = "<script>alert(2)</script>"
r = _make_message(title)
assert "<script>alert(1)</script>" in r
r = _make_message(None, message)
assert "<script>alert(2)</script>" in r
r = _make_message(title, title_safe=True)
assert "<script>alert(1)</script>" in r
r = _make_message(None, message, message_safe=True)
assert "<script>alert(2)</script>" in r
# Make sure safe flags are independent
r = _make_message(title, message_safe=True)
assert "<script>alert(1)</script>" in r
r = _make_message(None, message, title_safe=True)
assert "<script>alert(2)</script>" in r
def test_no_dupes():
"""Test that duplicate messages aren't saved."""
request = HttpRequest()
setattr(request, '_messages', default_storage(request))
info(request, 'Title', 'Body')
info(request, 'Title', 'Body')
info(request, 'Another Title', 'Another Body')
storage = django_messages.get_messages(request)
eq_(len(storage), 2, 'Too few or too many messages recorded.')
def test_l10n_dups():
"""Test that L10n values are preserved."""
request = HttpRequest()
setattr(request, '_messages', default_storage(request))
info(request, _('Title'), _('Body'))
info(request, _('Title'), _('Body'))
info(request, _('Another Title'), _('Another Body'))
storage = django_messages.get_messages(request)
eq_(len(storage), 2, 'Too few or too many messages recorded.')
def test_unicode_dups():
"""Test that unicode values are preserved."""
request = HttpRequest()
setattr(request, '_messages', default_storage(request))
info(request, u'Titlé', u'Body')
info(request, u'Titlé', u'Body')
info(request, u'Another Titlé', u'Another Body')
storage = django_messages.get_messages(request)
eq_(len(storage), 2, 'Too few or too many messages recorded.')
|
{
"content_hash": "47e24346bad7dc441eca017c89c43b9c",
"timestamp": "",
"source": "github",
"line_count": 70,
"max_line_length": 66,
"avg_line_length": 31.714285714285715,
"alnum_prop": 0.6630630630630631,
"repo_name": "jinankjain/zamboni",
"id": "f8060c83e2e4d61e16a51f51f8a65419427b5230",
"size": "2247",
"binary": false,
"copies": "6",
"ref": "refs/heads/master",
"path": "apps/amo/tests/test_messages.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [],
"symlink_target": ""
}
|
from setuptools import setup, find_packages
setup(
name="procgen_build",
packages=find_packages(),
version="0.0.1",
install_requires=[
# rather than rely on system cmake, install it here
"cmake==3.21.3",
# this is required by procgen/build.py
"gym3==0.3.0",
],
)
|
{
"content_hash": "f955a77beb68d8558fa8a033da7e2670",
"timestamp": "",
"source": "github",
"line_count": 14,
"max_line_length": 59,
"avg_line_length": 22.642857142857142,
"alnum_prop": 0.5962145110410094,
"repo_name": "openai/procgen",
"id": "efbb93ee660f9a9781813430adf9e27d4684a5b6",
"size": "317",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "procgen-build/setup.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "1092"
},
{
"name": "C++",
"bytes": "331483"
},
{
"name": "CMake",
"bytes": "2099"
},
{
"name": "Dockerfile",
"bytes": "226"
},
{
"name": "Python",
"bytes": "38991"
}
],
"symlink_target": ""
}
|
from naglib.config.base import *
""" A representation of a nagios host"""
class Host(BaseObject):
TYPE = 'host'
TEMPLATE_CLASS = 'host.HostTemplate'
PARAMS = (
'use',
'2d_coords',
'3d_coords',
'action_url',
'active_checks_enabled',
'address',
'alias',
'check_command',
'check_freshness',
'check_interval',
'check_period',
'contact_groups',
'contacts',
'display_name',
'event_handler',
'event_handler_enabled',
'first_notification_delay',
'flap_detection_enabled',
'flap_detection_options',
'freshness_threshold',
'high_flap_threshold',
'host_name',
'hostgroups',
'icon_image',
'icon_image_alt',
'initial_state',
'low_flap_threshold',
'max_check_attempts',
'notes',
'notes_url',
'notification_interval',
'notification_options',
'notification_period',
'notifications_enabled',
'obsess_over_host',
'parents',
'passive_checks_enabled',
'process_perf_data',
'retain_nonstatus_information',
'retain_status_information',
'retry_interval',
'stalking_options',
'statusmap_image',
'vrml_image',
)
REQUIRED_PARAMS = (
'address',
'host_name',
'alias',
'max_check_attempts',
'notification_interval',
'notification_period',
)
def __init__(self, host_name, network_site = None, registry=None, **kwargs):
self.props = dict()
self.network_site = network_site
self.props.update(self.get_hostname(host_name, network_site, **kwargs))
self._registry_prefix = self._datacenter
super(Host, self).__init__(registry=registry, **kwargs)
@staticmethod
def get_hostname(host_name = None, network_site = None, **kwargs):
if network_site:
_datacenter = network_site
else:
_datacenter = kwargs.get('use', 'generic-host')
if kwargs.get('qualified', False):
host_name = host_name
else:
host = host_name.split('.')[0]
host_name = "%s.%s" % (host, _datacenter)
if kwargs.get('alias', None):
alias = kwargs['alias']
else:
alias = host_name
return dict(host_name=host_name,
alias=alias,
_datacenter=_datacenter)
@staticmethod
def identity_for(**kwargs):
host_config = Host.get_hostname(**kwargs)
return host_config['host_name']
@property
def identity(self):
return self.host_name
class HostTemplate(BaseTemplate):
PARAMS = Host.PARAMS + ('name','register')
TYPE = 'host'
TEMPLATE_CLASS = 'host.HostTemplate'
|
{
"content_hash": "c9d42f0dc7cfa737f953daedf58d7e88",
"timestamp": "",
"source": "github",
"line_count": 108,
"max_line_length": 80,
"avg_line_length": 26.787037037037038,
"alnum_prop": 0.5416522640857242,
"repo_name": "johnskopis/naglib",
"id": "db8178d9e01814225898e27739dd7e7a4f2f69af",
"size": "2916",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "naglib/config/host.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "30840"
}
],
"symlink_target": ""
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.