repo_name
stringlengths
5
100
ref
stringlengths
12
67
path
stringlengths
4
244
copies
stringlengths
1
8
content
stringlengths
0
1.05M
minhphung171093/GreenERP_V9
refs/heads/master
openerp/addons/base/module/wizard/base_module_configuration.py
46
# -*- coding: utf-8 -*- # Part of Odoo. See LICENSE file for full copyright and licensing details. from openerp.osv import osv from openerp.tools.translate import _ class base_module_configuration(osv.osv_memory): _name = "base.module.configuration" def start(self, cr, uid, ids, context=None): todo_ids = self.pool.get('ir.actions.todo').search(cr, uid, ['|', ('type','=','recurring'), ('state', '=', 'open')]) if not todo_ids: # When there is no wizard todo it will display message data_obj = self.pool.get('ir.model.data') result = data_obj._get_id(cr, uid, 'base', 'view_base_module_configuration_form') view_id = data_obj.browse(cr, uid, result).res_id value = { 'name': _('System Configuration done'), 'view_type': 'form', 'view_mode': 'form', 'res_model': 'base.module.configuration', 'view_id': [view_id], 'type': 'ir.actions.act_window', 'target': 'new' } return value # Run the config wizards config_pool = self.pool.get('res.config') return config_pool.start(cr, uid, ids, context=context) base_module_configuration()
dmsimard/ara
refs/heads/master
ara/ui/templatetags/datetime_formatting.py
1
# Copyright (c) 2019 Red Hat, Inc. # # This file is part of ARA Records Ansible. # # ARA is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # ARA is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with ARA. If not, see <http://www.gnu.org/licenses/>. import datetime from django import template from django.utils.dateparse import parse_datetime register = template.Library() @register.filter(name="format_duration") def format_duration(duration): if duration is not None: return duration[:-4] return duration @register.filter(name="format_date") def format_datetime(datetime): return parse_datetime(datetime).strftime("%d %b %Y %H:%M:%S %z") @register.simple_tag(name="past_timestamp") def past_timestamp(weeks=0, days=0, hours=0, minutes=0, seconds=0): """ Produces a timestamp from the past compatible with the API. Used to provide time ranges by templates. Expects a dictionary of arguments to timedelta, for example: datetime.timedelta(hours=24) datetime.timedelta(days=7) See: https://docs.python.org/3/library/datetime.html#datetime.timedelta """ delta = dict() if weeks: delta["weeks"] = weeks if days: delta["days"] = days if hours: delta["hours"] = hours if minutes: delta["minutes"] = minutes if seconds: delta["seconds"] = seconds return (datetime.datetime.now() - datetime.timedelta(**delta)).isoformat()
coffenbacher/askbot-devel
refs/heads/master
askbot/management/commands/askbot_update_index.py
9
import sys from optparse import make_option from django.core.management import get_commands, load_command_class from django.utils.translation import activate as activate_language from django.core.management.base import BaseCommand, CommandError from django.conf import settings try: from haystack.management.commands.update_index import Command as UpdateCommand haystack_option_list = [option for option in UpdateCommand.base_options if option.get_opt_string() != '--verbosity'] except ImportError: haystack_option_list = [] class Command(BaseCommand): help = "Completely rebuilds the search index by removing the old data and then updating." base_options = [make_option("-l", "--language", action="store", type="string", dest="language", help='Language to user, in language code format'),] option_list = list(BaseCommand.option_list) + haystack_option_list + base_options def handle(self, *args, **options): lang_code = options.get('language', settings.LANGUAGE_CODE.lower()) activate_language(lang_code) options['using'] = ['default_%s' % lang_code[:2],] klass = self._get_command_class('update_index') klass.handle(*args, **options) def _get_command_class(self, name): try: app_name = get_commands()[name] if isinstance(app_name, BaseCommand): # If the command is already loaded, use it directly. klass = app_name else: klass = load_command_class(app_name, name) except KeyError: raise CommandError("Unknown command: %r" % name) return klass def execute(self, *args, **options): """ Try to execute this command, performing model validation if needed (as controlled by the attribute ``self.requires_model_validation``). If the command raises a ``CommandError``, intercept it and print it sensibly to stderr. """ show_traceback = options.get('traceback', False) if self.can_import_settings: try: #language part used to be here pass except ImportError, e: # If settings should be available, but aren't, # raise the error and quit. if show_traceback: traceback.print_exc() else: sys.stderr.write(smart_str(self.style.ERROR('Error: %s\n' % e))) sys.exit(1) try: self.stdout = options.get('stdout', sys.stdout) self.stderr = options.get('stderr', sys.stderr) if self.requires_model_validation: self.validate() output = self.handle(*args, **options) if output: if self.output_transaction: # This needs to be imported here, because it relies on # settings. from django.db import connections, DEFAULT_DB_ALIAS connection = connections[options.get('database', DEFAULT_DB_ALIAS)] if connection.ops.start_transaction_sql(): self.stdout.write(self.style.SQL_KEYWORD(connection.ops.start_transaction_sql()) + '\n') self.stdout.write(output) if self.output_transaction: self.stdout.write('\n' + self.style.SQL_KEYWORD("COMMIT;") + '\n') except CommandError, e: if show_traceback: traceback.print_exc() else: self.stderr.write(smart_str(self.style.ERROR('Error: %s\n' % e))) sys.exit(1)
cjh1/VTK
refs/heads/master
Rendering/Core/Testing/Python/cameraWarpedCone.py
28
#!/usr/bin/env python import vtk # create a rendering window and renderer ren = vtk.vtkRenderer() renWin = vtk.vtkRenderWindow() renWin.AddRenderer(ren) renWin.SetSize(300,300) # create an actor and give it cone geometry cone = vtk.vtkConeSource() cone.SetResolution(8) coneMapper = vtk.vtkPolyDataMapper() coneMapper.SetInputConnection(cone.GetOutputPort()) coneActor = vtk.vtkActor() coneActor.SetMapper(coneMapper) # create a transform and distort the camera using it mat = vtk.vtkMatrix4x4() mat.SetElement(0,0,0.5) mat.SetElement(0,1,0) mat.SetElement(0,2,0) mat.SetElement(0,3,0) mat.SetElement(1,0,0) mat.SetElement(1,1,1) mat.SetElement(1,2,0) mat.SetElement(1,3,0) mat.SetElement(2,0,0) mat.SetElement(2,1,0) mat.SetElement(2,2,1) mat.SetElement(2,3,0) mat.SetElement(3,0,0) mat.SetElement(3,1,0) mat.SetElement(3,2,0) mat.SetElement(3,3,1) trans = vtk.vtkTransform() trans.SetMatrix(mat) # assign our actor to the renderer ren.AddActor(coneActor) ren.ResetCamera(); ren.GetActiveCamera().SetUserTransform(trans); renWin.Render()
poojavade/Genomics_Docker
refs/heads/master
Dockerfiles/gedlab-khmer-filter-abund/pymodules/python2.7/lib/python/radical.utils-0.7.7-py2.7.egg/radical/utils/test_config.py
5
__author__ = "Radical.Utils Development Team (Andre Merzky, Ole Weidner)" __copyright__ = "Copyright 2013, RADICAL@Rutgers" __license__ = "MIT" import radical.utils.testing as rut # ------------------------------------------------------------------------------ # class TestConfig (rut.TestConfig): #--------------------------------------------------------------------------- # def __init__ (self, cfg_file): # initialize configuration. We only use the 'radical.utils.tests' # category from the config file. rut.TestConfig.__init__ (self, cfg_file, 'radical.utils.tests') # ------------------------------------------------------------------------------
CalthorpeAnalytics/urbanfootprint
refs/heads/master
footprint/client/configuration/scag_dm/config_entity/scag_dm_project.py
1
# UrbanFootprint v1.5 # Copyright (C) 2017 Calthorpe Analytics # # This file is part of UrbanFootprint version 1.5 # # UrbanFootprint is distributed under the terms of the GNU General # Public License version 3, as published by the Free Software Foundation. This # code is distributed WITHOUT ANY WARRANTY, without implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General # Public License v3 for more details; see <http://www.gnu.org/licenses/>. from footprint.client.configuration.scag_dm.base.existing_land_use_parcel import ExistingLandUseParcel from footprint.client.configuration.scag_dm.base.city_boundary import CityBoundary from footprint.client.configuration.fixture import ProjectFixture from footprint.client.configuration.scag_dm.config_entity.scag_dm_config_entities import ScagDmDbEntityKey from footprint.main.models.geospatial.db_entity import DbEntity from footprint.main.models.geospatial.feature_behavior import FeatureBehavior from footprint.main.models.category import Category from footprint.main.models.geospatial.behavior import BehaviorKey, Behavior from footprint.main.models.geospatial.db_entity_configuration import update_or_create_db_entity from footprint.main.models.geospatial.feature_class_configuration import FeatureClassConfiguration from footprint.main.models.geospatial.intersection import GeographicIntersection, AttributeIntersection from footprint.main.models.keys.db_entity_category_key import DbEntityCategoryKey __author__ = 'calthorpe_analytics' class ScagDmProjectFixture(ProjectFixture): def default_db_entities(self, **kwargs): """ Project specific SACOG additional db_entities :param default_dict: :return: """ project = self.config_entity # The DbEntity keyspace. These keys have no prefix Key = ScagDmDbEntityKey # The Behavior keyspace behavior_key = BehaviorKey.Fab.ricate # Used to load Behaviors defined elsewhere get_behavior = lambda key: Behavior.objects.get(key=behavior_key(key)) return super(ScagDmProjectFixture, self).default_db_entities() + [ update_or_create_db_entity(project, DbEntity( name='SCAG Existing Land Use Parcels 2012', key=Key.EXISTING_LAND_USE_PARCELS_2012, feature_class_configuration=FeatureClassConfiguration( abstract_class=ExistingLandUseParcel, import_from_db_entity_key=Key.REGION_EXISTING_LAND_USE_PARCELS, filter_query=dict(city=project.name), fields=dict(), related_fields=dict(land_use_definition=dict( single=True, related_class_name='footprint.client.configuration.scag_dm.built_form.scag_dm_land_use_definition.ScagDmLandUseDefinition', related_class_join_field_name='land_use', source_class_join_field_name='scag_lu') ) ), feature_behavior=FeatureBehavior( behavior=get_behavior('reference') ), _categories=[Category(key=DbEntityCategoryKey.KEY_CLASSIFICATION, value=DbEntityCategoryKey.REFERENCE)] ) ), update_or_create_db_entity(project, DbEntity( key=Key.CITY_BOUNDARY, feature_class_configuration=FeatureClassConfiguration( abstract_class=CityBoundary, import_from_db_entity_key=Key.REGION_CITY_BOUNDARY, filter_query=dict(city=project.name), use_for_bounds=True ), feature_behavior=FeatureBehavior( behavior=get_behavior('reference'), intersection=GeographicIntersection.polygon_to_centroid ), _categories=[Category(key=DbEntityCategoryKey.KEY_CLASSIFICATION, value=DbEntityCategoryKey.REFERENCE)] ) ) ]
olavvatne/CNN
refs/heads/master
elements/convpool.py
1
import theano from theano import tensor as T from theano.tensor.nnet import conv from theano.sandbox.cuda import dnn from theano.tensor.signal import pool import numpy as np from elements.util import BaseLayer class ConvPoolLayer(BaseLayer): ''' This class initialize a convolutional layer. Parameters supplied in the init decide the number of kernels, the kernel sizing, the activation function. The layer can also initialize from existing weights and biases (Stored models and so forth). The layer support dropout, strides, and max pooling. The pooling step is not treated as a separate layer, but belongs to a convolutional layer. To deactivate pooling the poolsize should be set (1,1). ''' def __init__(self, rng, input, filter_shape, image_shape, drop, poolsize=(2,2), strides=(1, 1), activation=T.tanh, W = None, b = None, verbose = True, dropout_rate=1.0): ''' :param rng: random number generator used to initialize weights :param input: symbolic image tensor :param filter_shape: (number of filters, num input feature maps, filter height, filter width) :param image_shape: (batch size, num input feature maps, image height, image width) :param poolsize: the downsampling (pooling) factor (#rows, #cols) :param activation: Choice of activation function :param W: Supplied layer weights. Useful if loading form params.pkl. :param b: Supplied biases. If loading model from params.pkl :param verbose: Print layer arch. in console. :return: ''' super(ConvPoolLayer, self).__init__(rng, input, dropout_rate) assert image_shape[1] == filter_shape[1] self._verbose_print(verbose, filter_shape, poolsize, image_shape, strides, dropout_rate) fan_in = np.prod(filter_shape[1:]) # each unit in the lower layer receives a gradient from: # "num output feature maps * filter height * filter width" / # pooling size fan_out = (filter_shape[0] * np.prod(filter_shape[2:]) / np.prod(poolsize)) # initialize weights with random weights W_bound = np.sqrt(6. / (fan_in + fan_out)) self.set_weight(W, -W_bound, W_bound, filter_shape) self.set_bias(b, filter_shape[0]) print(strides) if strides[0] == 1 and strides[1] == 1: #Strides make the system run impossibly slow because of legacy OP. print("No stride, use default conv2d") conv_out = conv.conv2d( input=input, filters=self.W, filter_shape=filter_shape, image_shape=image_shape, ) else: #When using stride/subsample the system require a GPU and CUDA. Using GPU OP directly. #he memory layout to use is bc01, that is batch, channel, first dim, second dim in that order. print("DNN convolution and pooling, stride support") conv_out = dnn.dnn_conv(input, self.W, subsample=strides) #pooled_out = dnn.dnn_pool(conv_out, poolsize, stride=poolsize) pooled_out = pool.pool_2d( input=conv_out, ds=poolsize, st=poolsize, ignore_border=True, mode='max' ) out = activation(pooled_out + self.b.dimshuffle('x', 0, 'x', 'x')) droppedOutput = self.dropout(out, dropout_rate) self.output = T.switch(T.neq(drop, 0), droppedOutput, out) self.params = [self.W, self.b] def _verbose_print(self, is_verbose, filter_shape, poolsize, image_shape, strides, dropout_rate): if is_verbose: print('Convolutional layer with {} kernels'.format(filter_shape[0])) print('---- Kernel size \t {}x{}'.format(filter_shape[2], filter_shape[3])) print('---- Pooling size \t {}x{}'.format(poolsize[0], poolsize[1])) print('---- Input size \t {}x{}'.format(image_shape[2],image_shape[3])) print('---- Stride \t \t {}x{}'.format(strides[0],strides[1])) print('---- Input number of feature maps is {}'.format(image_shape[1])) print('---- Dropout rate is {}'.format(dropout_rate)) print('')
jrjang/mbed
refs/heads/master
workspace_tools/host_tests/host_tests_plugins/host_test_plugins.py
92
""" mbed SDK Copyright (c) 2011-2013 ARM Limited Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ from os import access, F_OK from sys import stdout from time import sleep from subprocess import call class HostTestPluginBase: """ Base class for all plug-ins used with host tests. """ ########################################################################### # Interface: ########################################################################### ########################################################################### # Interface attributes defining plugin name, type etc. ########################################################################### name = "HostTestPluginBase" # Plugin name, can be plugin class name type = "BasePlugin" # Plugin type: ResetMethod, Copymethod etc. capabilities = [] # Capabilities names: what plugin can achieve # (e.g. reset using some external command line tool) stable = False # Determine if plugin is stable and can be used ########################################################################### # Interface methods ########################################################################### def setup(self, *args, **kwargs): """ Configure plugin, this function should be called before plugin execute() method is used. """ return False def execute(self, capabilitity, *args, **kwargs): """ Executes capability by name. Each capability e.g. may directly just call some command line program or execute building pythonic function """ return False ########################################################################### # Interface helper methods - overload only if you need to have custom behaviour ########################################################################### def print_plugin_error(self, text): """ Function prints error in console and exits always with False """ print "Plugin error: %s::%s: %s"% (self.name, self.type, text) return False def print_plugin_info(self, text, NL=True): """ Function prints notification in console and exits always with True """ if NL: print "Plugin info: %s::%s: %s"% (self.name, self.type, text) else: print "Plugin info: %s::%s: %s"% (self.name, self.type, text), return True def print_plugin_char(self, char): """ Function prints char on stdout """ stdout.write(char) stdout.flush() return True def check_mount_point_ready(self, destination_disk, init_delay=0.2, loop_delay=0.25): """ Checks if destination_disk is ready and can be accessed by e.g. copy commands @init_delay - Initial delay time before first access check @loop_delay - pooling delay for access check """ if not access(destination_disk, F_OK): self.print_plugin_info("Waiting for mount point '%s' to be ready..."% destination_disk, NL=False) sleep(init_delay) while not access(destination_disk, F_OK): sleep(loop_delay) self.print_plugin_char('.') def check_parameters(self, capabilitity, *args, **kwargs): """ This function should be ran each time we call execute() to check if none of the required parameters is missing. """ missing_parameters = [] for parameter in self.required_parameters: if parameter not in kwargs: missing_parameters.append(parameter) if len(missing_parameters) > 0: self.print_plugin_error("execute parameter(s) '%s' missing!"% (', '.join(parameter))) return False return True def run_command(self, cmd, shell=True): """ Runs command from command line. """ result = True ret = 0 try: ret = call(cmd, shell=shell) if ret: self.print_plugin_error("[ret=%d] Command: %s"% (int(ret), cmd)) return False except Exception as e: result = False self.print_plugin_error("[ret=%d] Command: %s"% (int(ret), cmd)) self.print_plugin_error(str(e)) return result
SlimRoms/android_external_chromium_org
refs/heads/lp5.0
ppapi/native_client/tools/browser_tester/browsertester/__init__.py
176
# Copyright (c) 2012 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. # Empty but necessary
KyleJamesWalker/ansible
refs/heads/devel
lib/ansible/modules/cloud/ovirt/ovirt_nics_facts.py
26
#!/usr/bin/python # -*- coding: utf-8 -*- # # Copyright (c) 2016 Red Hat, Inc. # # This file is part of Ansible # # Ansible is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Ansible is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Ansible. If not, see <http://www.gnu.org/licenses/>. # ANSIBLE_METADATA = {'metadata_version': '1.0', 'status': ['preview'], 'supported_by': 'community'} DOCUMENTATION = ''' --- module: ovirt_nics_facts short_description: Retrieve facts about one or more oVirt virtual machine network interfaces author: "Ondra Machacek (@machacekondra)" version_added: "2.3" description: - "Retrieve facts about one or more oVirt virtual machine network interfaces." notes: - "This module creates a new top-level C(ovirt_nics) fact, which contains a list of NICs." options: vm: description: - "Name of the VM where NIC is attached." required: true name: description: - "Name of the NIC, can be used as glob expression." extends_documentation_fragment: ovirt_facts ''' EXAMPLES = ''' # Examples don't contain auth parameter for simplicity, # look at ovirt_auth module to see how to reuse authentication: # Gather facts about all NICs which names start with C(eth) for VM named C(centos7): - ovirt_nics_facts: vm: centos7 name: eth* - debug: var: ovirt_nics ''' RETURN = ''' ovirt_nics: description: "List of dictionaries describing the network interfaces. NIC attribues are mapped to dictionary keys, all NICs attributes can be found at following url: https://ovirt.example.com/ovirt-engine/api/model#types/nic." returned: On success. type: list ''' import fnmatch import traceback from ansible.module_utils.basic import AnsibleModule from ansible.module_utils.ovirt import ( check_sdk, create_connection, get_dict_of_struct, ovirt_facts_full_argument_spec, search_by_name, ) def main(): argument_spec = ovirt_facts_full_argument_spec( vm=dict(required=True), name=dict(default=None), ) module = AnsibleModule(argument_spec) check_sdk(module) try: auth = module.params.pop('auth') connection = create_connection(auth) vms_service = connection.system_service().vms_service() vm_name = module.params['vm'] vm = search_by_name(vms_service, vm_name) if vm is None: raise Exception("VM '%s' was not found." % vm_name) nics_service = vms_service.service(vm.id).nics_service() if module.params['name']: nics = [ e for e in nics_service.list() if fnmatch.fnmatch(e.name, module.params['name']) ] else: nics = nics_service.list() module.exit_json( changed=False, ansible_facts=dict( ovirt_nics=[ get_dict_of_struct( struct=c, connection=connection, fetch_nested=module.params.get('fetch_nested'), attributes=module.params.get('nested_attributes'), ) for c in nics ], ), ) except Exception as e: module.fail_json(msg=str(e), exception=traceback.format_exc()) finally: connection.close(logout=auth.get('token') is None) if __name__ == '__main__': main()
eric-haibin-lin/mxnet
refs/heads/master
tools/ipynb2md.py
11
#!/usr/bin/env python # Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. """ Convert jupyter notebook into the markdown format. The notebook outputs will be removed. It is heavily adapted from https://gist.github.com/decabyte/0ed87372774cf5d34d7e """ from __future__ import print_function import sys import io import os import argparse import nbformat def remove_outputs(nb): """Removes the outputs cells for a jupyter notebook.""" for cell in nb.cells: if cell.cell_type == 'code': cell.outputs = [] def clear_notebook(old_ipynb, new_ipynb): with open(old_ipynb, 'r') as f: nb = nbformat.read(f, nbformat.NO_CONVERT) remove_outputs(nb) with open(new_ipynb, 'w', encoding='utf8') as f: nbformat.write(nb, f, nbformat.NO_CONVERT) def main(): parser = argparse.ArgumentParser( description="Jupyter Notebooks to markdown" ) parser.add_argument("notebook", nargs=1, help="The notebook to be converted.") parser.add_argument("-o", "--output", help="output markdown file") args = parser.parse_args() old_ipynb = args.notebook[0] new_ipynb = 'tmp.ipynb' md_file = args.output print(md_file) if not md_file: md_file = os.path.splitext(old_ipynb)[0] + '.md' clear_notebook(old_ipynb, new_ipynb) os.system('jupyter nbconvert ' + new_ipynb + ' --to markdown --output ' + md_file) with open(md_file, 'a') as f: f.write('<!-- INSERT SOURCE DOWNLOAD BUTTONS -->') os.system('rm ' + new_ipynb) if __name__ == '__main__': main()
brian-yang/mozillians
refs/heads/master
vendor-local/lib/python/celery/tests/test_backends/test_amqp.py
14
from __future__ import absolute_import from __future__ import with_statement import socket import sys from datetime import timedelta from Queue import Empty, Queue from celery import current_app from celery import states from celery.app import app_or_default from celery.backends.amqp import AMQPBackend from celery.datastructures import ExceptionInfo from celery.exceptions import TimeoutError from celery.utils import uuid from celery.tests.utils import Case, sleepdeprived class SomeClass(object): def __init__(self, data): self.data = data class test_AMQPBackend(Case): def create_backend(self, **opts): opts = dict(dict(serializer="pickle", persistent=False), **opts) return AMQPBackend(**opts) def test_mark_as_done(self): tb1 = self.create_backend() tb2 = self.create_backend() tid = uuid() tb1.mark_as_done(tid, 42) self.assertEqual(tb2.get_status(tid), states.SUCCESS) self.assertEqual(tb2.get_result(tid), 42) self.assertTrue(tb2._cache.get(tid)) self.assertTrue(tb2.get_result(tid), 42) def test_is_pickled(self): tb1 = self.create_backend() tb2 = self.create_backend() tid2 = uuid() result = {"foo": "baz", "bar": SomeClass(12345)} tb1.mark_as_done(tid2, result) # is serialized properly. rindb = tb2.get_result(tid2) self.assertEqual(rindb.get("foo"), "baz") self.assertEqual(rindb.get("bar").data, 12345) def test_mark_as_failure(self): tb1 = self.create_backend() tb2 = self.create_backend() tid3 = uuid() try: raise KeyError("foo") except KeyError, exception: einfo = ExceptionInfo(sys.exc_info()) tb1.mark_as_failure(tid3, exception, traceback=einfo.traceback) self.assertEqual(tb2.get_status(tid3), states.FAILURE) self.assertIsInstance(tb2.get_result(tid3), KeyError) self.assertEqual(tb2.get_traceback(tid3), einfo.traceback) def test_repair_uuid(self): from celery.backends.amqp import repair_uuid for i in range(10): tid = uuid() self.assertEqual(repair_uuid(tid.replace("-", "")), tid) def test_expires_defaults_to_config_deprecated_setting(self): app = app_or_default() prev = app.conf.CELERY_AMQP_TASK_RESULT_EXPIRES app.conf.CELERY_AMQP_TASK_RESULT_EXPIRES = 10 try: b = self.create_backend() self.assertEqual(b.queue_arguments.get("x-expires"), 10 * 1000.0) finally: app.conf.CELERY_AMQP_TASK_RESULT_EXPIRES = prev def test_expires_is_int(self): b = self.create_backend(expires=48) self.assertEqual(b.queue_arguments.get("x-expires"), 48 * 1000.0) def test_expires_is_float(self): b = self.create_backend(expires=48.3) self.assertEqual(b.queue_arguments.get("x-expires"), 48.3 * 1000.0) def test_expires_is_timedelta(self): b = self.create_backend(expires=timedelta(minutes=1)) self.assertEqual(b.queue_arguments.get("x-expires"), 60 * 1000.0) @sleepdeprived() def test_store_result_retries(self): class _Producer(object): iterations = 0 stop_raising_at = 5 def __init__(self, *args, **kwargs): pass def publish(self, msg, *args, **kwargs): if self.iterations > self.stop_raising_at: return raise KeyError("foo") class Backend(AMQPBackend): Producer = _Producer backend = Backend() with self.assertRaises(KeyError): backend.store_result("foo", "bar", "STARTED", max_retries=None) with self.assertRaises(KeyError): backend.store_result("foo", "bar", "STARTED", max_retries=10) def assertState(self, retval, state): self.assertEqual(retval["status"], state) def test_poll_no_messages(self): b = self.create_backend() self.assertState(b.poll(uuid()), states.PENDING) def test_poll_result(self): results = Queue() class Message(object): def __init__(self, **merge): self.payload = dict({"status": states.STARTED, "result": None}, **merge) class MockBinding(object): def __init__(self, *args, **kwargs): pass def __call__(self, *args, **kwargs): return self def declare(self): pass def get(self, no_ack=False): try: return results.get(block=False) except Empty: pass class MockBackend(AMQPBackend): Queue = MockBinding backend = MockBackend() # FFWD's to the latest state. results.put(Message(status=states.RECEIVED, seq=1)) results.put(Message(status=states.STARTED, seq=2)) results.put(Message(status=states.FAILURE, seq=3)) r1 = backend.poll(uuid()) self.assertDictContainsSubset({"status": states.FAILURE, "seq": 3}, r1, "FFWDs to the last state") # Caches last known state. results.put(Message()) tid = uuid() backend.poll(tid) self.assertIn(tid, backend._cache, "Caches last known state") # Returns cache if no new states. results.queue.clear() assert not results.qsize() backend._cache[tid] = "hello" self.assertEqual(backend.poll(tid), "hello", "Returns cache if no new states") def test_wait_for(self): b = self.create_backend() tid = uuid() with self.assertRaises(TimeoutError): b.wait_for(tid, timeout=0.1) b.store_result(tid, None, states.STARTED) with self.assertRaises(TimeoutError): b.wait_for(tid, timeout=0.1) b.store_result(tid, None, states.RETRY) with self.assertRaises(TimeoutError): b.wait_for(tid, timeout=0.1) b.store_result(tid, 42, states.SUCCESS) self.assertEqual(b.wait_for(tid, timeout=1), 42) b.store_result(tid, 56, states.SUCCESS) self.assertEqual(b.wait_for(tid, timeout=1), 42, "result is cached") self.assertEqual(b.wait_for(tid, timeout=1, cache=False), 56) b.store_result(tid, KeyError("foo"), states.FAILURE) with self.assertRaises(KeyError): b.wait_for(tid, timeout=1, cache=False) def test_drain_events_remaining_timeouts(self): class Connection(object): def drain_events(self, timeout=None): pass b = self.create_backend() with current_app.pool.acquire_channel(block=False) as (_, channel): binding = b._create_binding(uuid()) consumer = b._create_consumer(binding, channel) with self.assertRaises(socket.timeout): b.drain_events(Connection(), consumer, timeout=0.1) def test_get_many(self): b = self.create_backend() tids = [] for i in xrange(10): tid = uuid() b.store_result(tid, i, states.SUCCESS) tids.append(tid) res = list(b.get_many(tids, timeout=1)) expected_results = [(tid, {"status": states.SUCCESS, "result": i, "traceback": None, "task_id": tid}) for i, tid in enumerate(tids)] self.assertEqual(sorted(res), sorted(expected_results)) self.assertDictEqual(b._cache[res[0][0]], res[0][1]) cached_res = list(b.get_many(tids, timeout=1)) self.assertEqual(sorted(cached_res), sorted(expected_results)) b._cache[res[0][0]]["status"] = states.RETRY with self.assertRaises(socket.timeout): list(b.get_many(tids, timeout=0.01)) def test_test_get_many_raises_outer_block(self): class Backend(AMQPBackend): def _create_consumer(self, *args, **kwargs): raise KeyError("foo") b = Backend() with self.assertRaises(KeyError): b.get_many(["id1"]).next() def test_test_get_many_raises_inner_block(self): class Backend(AMQPBackend): def drain_events(self, *args, **kwargs): raise KeyError("foo") b = Backend() with self.assertRaises(KeyError): b.get_many(["id1"]).next() def test_no_expires(self): b = self.create_backend(expires=None) app = app_or_default() prev = app.conf.CELERY_AMQP_TASK_RESULT_EXPIRES app.conf.CELERY_AMQP_TASK_RESULT_EXPIRES = None try: b = self.create_backend(expires=None) with self.assertRaises(KeyError): b.queue_arguments["x-expires"] finally: app.conf.CELERY_AMQP_TASK_RESULT_EXPIRES = prev def test_process_cleanup(self): self.create_backend().process_cleanup() def test_reload_task_result(self): with self.assertRaises(NotImplementedError): self.create_backend().reload_task_result("x") def test_reload_taskset_result(self): with self.assertRaises(NotImplementedError): self.create_backend().reload_taskset_result("x") def test_save_taskset(self): with self.assertRaises(NotImplementedError): self.create_backend().save_taskset("x", "x") def test_restore_taskset(self): with self.assertRaises(NotImplementedError): self.create_backend().restore_taskset("x") def test_delete_taskset(self): with self.assertRaises(NotImplementedError): self.create_backend().delete_taskset("x")
aruizramon/alec_erpnext
refs/heads/master
erpnext/hr/doctype/earning_type/earning_type.py
66
# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors # License: GNU General Public License v3. See license.txt from __future__ import unicode_literals import frappe from frappe.model.document import Document class EarningType(Document): pass
Akhavi/bigcouch
refs/heads/master
couchjs/scons/scons-local-2.0.1/SCons/Tool/sunlink.py
61
"""SCons.Tool.sunlink Tool-specific initialization for the Sun Solaris (Forte) linker. There normally shouldn't be any need to import this module directly. It will usually be imported through the generic SCons.Tool.Tool() selection method. """ # # Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010 The SCons Foundation # # Permission is hereby granted, free of charge, to any person obtaining # a copy of this software and associated documentation files (the # "Software"), to deal in the Software without restriction, including # without limitation the rights to use, copy, modify, merge, publish, # distribute, sublicense, and/or sell copies of the Software, and to # permit persons to whom the Software is furnished to do so, subject to # the following conditions: # # The above copyright notice and this permission notice shall be included # in all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY # KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE # WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND # NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE # LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION # OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION # WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. # __revision__ = "src/engine/SCons/Tool/sunlink.py 5134 2010/08/16 23:02:40 bdeegan" import os import os.path import SCons.Util import link ccLinker = None # search for the acc compiler and linker front end try: dirs = os.listdir('/opt') except (IOError, OSError): # Not being able to read the directory because it doesn't exist # (IOError) or isn't readable (OSError) is okay. dirs = [] for d in dirs: linker = '/opt/' + d + '/bin/CC' if os.path.exists(linker): ccLinker = linker break def generate(env): """Add Builders and construction variables for Forte to an Environment.""" link.generate(env) env['SHLINKFLAGS'] = SCons.Util.CLVar('$LINKFLAGS -G') env.Append(LINKFLAGS=['$__RPATH']) env['RPATHPREFIX'] = '-R' env['RPATHSUFFIX'] = '' env['_RPATH'] = '${_concat(RPATHPREFIX, RPATH, RPATHSUFFIX, __env__)}' def exists(env): return ccLinker # Local Variables: # tab-width:4 # indent-tabs-mode:nil # End: # vim: set expandtab tabstop=4 shiftwidth=4:
habnabit/divmod-sine
refs/heads/master
sine/test/historic/stub_sipDispatcherService2to3.py
2
# -*- test-case-name: sine.test.historic.test_sipDispatcher2to3 -*- from axiom.test.historic.stubloader import saveStub from axiom.dependency import installOn from sine.sipserver import SIPDispatcherService def createDatabase(s): installOn(SIPDispatcherService(store=s), s) if __name__ == '__main__': saveStub(createDatabase, 17606)
mancoast/CPythonPyc_test
refs/heads/master
cpython/275_test_difflib.py
87
import difflib from test.test_support import run_unittest, findfile import unittest import doctest import sys class TestWithAscii(unittest.TestCase): def test_one_insert(self): sm = difflib.SequenceMatcher(None, 'b' * 100, 'a' + 'b' * 100) self.assertAlmostEqual(sm.ratio(), 0.995, places=3) self.assertEqual(list(sm.get_opcodes()), [ ('insert', 0, 0, 0, 1), ('equal', 0, 100, 1, 101)]) sm = difflib.SequenceMatcher(None, 'b' * 100, 'b' * 50 + 'a' + 'b' * 50) self.assertAlmostEqual(sm.ratio(), 0.995, places=3) self.assertEqual(list(sm.get_opcodes()), [ ('equal', 0, 50, 0, 50), ('insert', 50, 50, 50, 51), ('equal', 50, 100, 51, 101)]) def test_one_delete(self): sm = difflib.SequenceMatcher(None, 'a' * 40 + 'c' + 'b' * 40, 'a' * 40 + 'b' * 40) self.assertAlmostEqual(sm.ratio(), 0.994, places=3) self.assertEqual(list(sm.get_opcodes()), [ ('equal', 0, 40, 0, 40), ('delete', 40, 41, 40, 40), ('equal', 41, 81, 40, 80)]) class TestAutojunk(unittest.TestCase): """Tests for the autojunk parameter added in 2.7""" def test_one_insert_homogenous_sequence(self): # By default autojunk=True and the heuristic kicks in for a sequence # of length 200+ seq1 = 'b' * 200 seq2 = 'a' + 'b' * 200 sm = difflib.SequenceMatcher(None, seq1, seq2) self.assertAlmostEqual(sm.ratio(), 0, places=3) # Now turn the heuristic off sm = difflib.SequenceMatcher(None, seq1, seq2, autojunk=False) self.assertAlmostEqual(sm.ratio(), 0.9975, places=3) class TestSFbugs(unittest.TestCase): def test_ratio_for_null_seqn(self): # Check clearing of SF bug 763023 s = difflib.SequenceMatcher(None, [], []) self.assertEqual(s.ratio(), 1) self.assertEqual(s.quick_ratio(), 1) self.assertEqual(s.real_quick_ratio(), 1) def test_comparing_empty_lists(self): # Check fix for bug #979794 group_gen = difflib.SequenceMatcher(None, [], []).get_grouped_opcodes() self.assertRaises(StopIteration, group_gen.next) diff_gen = difflib.unified_diff([], []) self.assertRaises(StopIteration, diff_gen.next) def test_added_tab_hint(self): # Check fix for bug #1488943 diff = list(difflib.Differ().compare(["\tI am a buggy"],["\t\tI am a bug"])) self.assertEqual("- \tI am a buggy", diff[0]) self.assertEqual("? --\n", diff[1]) self.assertEqual("+ \t\tI am a bug", diff[2]) self.assertEqual("? +\n", diff[3]) patch914575_from1 = """ 1. Beautiful is beTTer than ugly. 2. Explicit is better than implicit. 3. Simple is better than complex. 4. Complex is better than complicated. """ patch914575_to1 = """ 1. Beautiful is better than ugly. 3. Simple is better than complex. 4. Complicated is better than complex. 5. Flat is better than nested. """ patch914575_from2 = """ \t\tLine 1: preceeded by from:[tt] to:[ssss] \t\tLine 2: preceeded by from:[sstt] to:[sssst] \t \tLine 3: preceeded by from:[sstst] to:[ssssss] Line 4: \thas from:[sst] to:[sss] after : Line 5: has from:[t] to:[ss] at end\t """ patch914575_to2 = """ Line 1: preceeded by from:[tt] to:[ssss] \tLine 2: preceeded by from:[sstt] to:[sssst] Line 3: preceeded by from:[sstst] to:[ssssss] Line 4: has from:[sst] to:[sss] after : Line 5: has from:[t] to:[ss] at end """ patch914575_from3 = """line 0 1234567890123456789012345689012345 line 1 line 2 line 3 line 4 changed line 5 changed line 6 changed line 7 line 8 subtracted line 9 1234567890123456789012345689012345 short line just fits in!! just fits in two lines yup!! the end""" patch914575_to3 = """line 0 1234567890123456789012345689012345 line 1 line 2 added line 3 line 4 chanGEd line 5a chanGed line 6a changEd line 7 line 8 line 9 1234567890 another long line that needs to be wrapped just fitS in!! just fits in two lineS yup!! the end""" class TestSFpatches(unittest.TestCase): def test_html_diff(self): # Check SF patch 914575 for generating HTML differences f1a = ((patch914575_from1 + '123\n'*10)*3) t1a = (patch914575_to1 + '123\n'*10)*3 f1b = '456\n'*10 + f1a t1b = '456\n'*10 + t1a f1a = f1a.splitlines() t1a = t1a.splitlines() f1b = f1b.splitlines() t1b = t1b.splitlines() f2 = patch914575_from2.splitlines() t2 = patch914575_to2.splitlines() f3 = patch914575_from3 t3 = patch914575_to3 i = difflib.HtmlDiff() j = difflib.HtmlDiff(tabsize=2) k = difflib.HtmlDiff(wrapcolumn=14) full = i.make_file(f1a,t1a,'from','to',context=False,numlines=5) tables = '\n'.join( [ '<h2>Context (first diff within numlines=5(default))</h2>', i.make_table(f1a,t1a,'from','to',context=True), '<h2>Context (first diff after numlines=5(default))</h2>', i.make_table(f1b,t1b,'from','to',context=True), '<h2>Context (numlines=6)</h2>', i.make_table(f1a,t1a,'from','to',context=True,numlines=6), '<h2>Context (numlines=0)</h2>', i.make_table(f1a,t1a,'from','to',context=True,numlines=0), '<h2>Same Context</h2>', i.make_table(f1a,f1a,'from','to',context=True), '<h2>Same Full</h2>', i.make_table(f1a,f1a,'from','to',context=False), '<h2>Empty Context</h2>', i.make_table([],[],'from','to',context=True), '<h2>Empty Full</h2>', i.make_table([],[],'from','to',context=False), '<h2>tabsize=2</h2>', j.make_table(f2,t2), '<h2>tabsize=default</h2>', i.make_table(f2,t2), '<h2>Context (wrapcolumn=14,numlines=0)</h2>', k.make_table(f3.splitlines(),t3.splitlines(),context=True,numlines=0), '<h2>wrapcolumn=14,splitlines()</h2>', k.make_table(f3.splitlines(),t3.splitlines()), '<h2>wrapcolumn=14,splitlines(True)</h2>', k.make_table(f3.splitlines(True),t3.splitlines(True)), ]) actual = full.replace('</body>','\n%s\n</body>' % tables) # temporarily uncomment next two lines to baseline this test #with open('test_difflib_expect.html','w') as fp: # fp.write(actual) with open(findfile('test_difflib_expect.html')) as fp: self.assertEqual(actual, fp.read()) def test_recursion_limit(self): # Check if the problem described in patch #1413711 exists. limit = sys.getrecursionlimit() old = [(i%2 and "K:%d" or "V:A:%d") % i for i in range(limit*2)] new = [(i%2 and "K:%d" or "V:B:%d") % i for i in range(limit*2)] difflib.SequenceMatcher(None, old, new).get_opcodes() class TestOutputFormat(unittest.TestCase): def test_tab_delimiter(self): args = ['one', 'two', 'Original', 'Current', '2005-01-26 23:30:50', '2010-04-02 10:20:52'] ud = difflib.unified_diff(*args, lineterm='') self.assertEqual(list(ud)[0:2], [ "--- Original\t2005-01-26 23:30:50", "+++ Current\t2010-04-02 10:20:52"]) cd = difflib.context_diff(*args, lineterm='') self.assertEqual(list(cd)[0:2], [ "*** Original\t2005-01-26 23:30:50", "--- Current\t2010-04-02 10:20:52"]) def test_no_trailing_tab_on_empty_filedate(self): args = ['one', 'two', 'Original', 'Current'] ud = difflib.unified_diff(*args, lineterm='') self.assertEqual(list(ud)[0:2], ["--- Original", "+++ Current"]) cd = difflib.context_diff(*args, lineterm='') self.assertEqual(list(cd)[0:2], ["*** Original", "--- Current"]) def test_range_format_unified(self): # Per the diff spec at http://www.unix.org/single_unix_specification/ spec = '''\ Each <range> field shall be of the form: %1d", <beginning line number> if the range contains exactly one line, and: "%1d,%1d", <beginning line number>, <number of lines> otherwise. If a range is empty, its beginning line number shall be the number of the line just before the range, or 0 if the empty range starts the file. ''' fmt = difflib._format_range_unified self.assertEqual(fmt(3,3), '3,0') self.assertEqual(fmt(3,4), '4') self.assertEqual(fmt(3,5), '4,2') self.assertEqual(fmt(3,6), '4,3') self.assertEqual(fmt(0,0), '0,0') def test_range_format_context(self): # Per the diff spec at http://www.unix.org/single_unix_specification/ spec = '''\ The range of lines in file1 shall be written in the following format if the range contains two or more lines: "*** %d,%d ****\n", <beginning line number>, <ending line number> and the following format otherwise: "*** %d ****\n", <ending line number> The ending line number of an empty range shall be the number of the preceding line, or 0 if the range is at the start of the file. Next, the range of lines in file2 shall be written in the following format if the range contains two or more lines: "--- %d,%d ----\n", <beginning line number>, <ending line number> and the following format otherwise: "--- %d ----\n", <ending line number> ''' fmt = difflib._format_range_context self.assertEqual(fmt(3,3), '3') self.assertEqual(fmt(3,4), '4') self.assertEqual(fmt(3,5), '4,5') self.assertEqual(fmt(3,6), '4,6') self.assertEqual(fmt(0,0), '0') def test_main(): difflib.HtmlDiff._default_prefix = 0 Doctests = doctest.DocTestSuite(difflib) run_unittest( TestWithAscii, TestAutojunk, TestSFpatches, TestSFbugs, TestOutputFormat, Doctests) if __name__ == '__main__': test_main()
sh4wn/vispy
refs/heads/master
examples/basics/scene/shared_context.py
17
# -*- coding: utf-8 -*- # ----------------------------------------------------------------------------- # Copyright (c) 2015, Vispy Development Team. All Rights Reserved. # Distributed under the (new) BSD License. See LICENSE.txt for more info. # ----------------------------------------------------------------------------- """ This example demonstrates the use of multiple canvases with visuals shared between them. """ import sys import numpy as np from vispy import app, scene from vispy.util.filter import gaussian_filter canvas1 = scene.SceneCanvas(keys='interactive', show=True) view1 = canvas1.central_widget.add_view() view1.camera = scene.TurntableCamera(fov=60) canvas2 = scene.SceneCanvas(keys='interactive', show=True, shared=canvas1.context) view2 = canvas2.central_widget.add_view() view2.camera = 'panzoom' # Simple surface plot example # x, y values are not specified, so assumed to be 0:50 z = gaussian_filter(np.random.normal(size=(50, 50)), (1, 1)) * 10 p1 = scene.visuals.SurfacePlot(z=z, color=(0.5, 0.5, 1, 1), shading='smooth') p1.transform = scene.transforms.MatrixTransform() p1.transform.scale([1/49., 1/49., 0.02]) p1.transform.translate([-0.5, -0.5, 0]) view1.add(p1) view2.add(p1) # Add a 3D axis to keep us oriented axis = scene.visuals.XYZAxis(parent=view1.scene) canvas = canvas1 # allow running this example in our test suite if __name__ == '__main__': if sys.flags.interactive == 0: app.run()
noba3/KoTos
refs/heads/master
addons/plugin.video.netzkino_de/resources/lib/kodion/constants/__init__.py
6
__all__ = [] import const_settings as setting import const_log as log import const_localize as localize import const_sort_methods as sort_method import const_content_types as content_type import const_paths as paths
matrixise/odoo
refs/heads/8.0
openerp/addons/base/tests/test_xmlrpc.py
90
# -*- coding: utf-8 -*- import openerp.tests.common DB = openerp.tests.common.DB class test_xmlrpc(openerp.tests.common.HttpCase): at_install = False post_install = True def test_01_xmlrpc_login(self): """ Try to login on the common service. """ uid = self.xmlrpc_common.login(DB, 'admin', 'admin') self.assertEqual(uid, 1) def test_xmlrpc_ir_model_search(self): """ Try a search on the object service. """ o = self.xmlrpc_object ids = o.execute(DB, 1, 'admin', 'ir.model', 'search', []) self.assertIsInstance(ids, list) ids = o.execute(DB, 1, 'admin', 'ir.model', 'search', [], {}) self.assertIsInstance(ids, list) # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
pidydx/grr
refs/heads/master
grr/lib/rdfvalues/standard.py
2
#!/usr/bin/env python """Standard RDFValues.""" import re import urlparse from grr.lib import config_lib from grr.lib import rdfvalue from grr.lib import type_info from grr.lib.rdfvalues import structs as rdf_structs from grr.proto import jobs_pb2 from grr.proto import sysinfo_pb2 class RegularExpression(rdfvalue.RDFString): """A semantic regular expression.""" context_help_url = "user_manual.html#_regex_matches" def ParseFromString(self, value): super(RegularExpression, self).ParseFromString(value) # Check that this is a valid regex. try: self._regex = re.compile(self._value, flags=re.I | re.S | re.M) except re.error: raise type_info.TypeValueError("Not a valid regular expression.") def Search(self, text): """Search the text for our value.""" if isinstance(text, rdfvalue.RDFString): text = str(text) return self._regex.search(text) def Match(self, text): if isinstance(text, rdfvalue.RDFString): text = str(text) return self._regex.match(text) def FindIter(self, text): if isinstance(text, rdfvalue.RDFString): text = str(text) return self._regex.finditer(text) def __str__(self): return "<RegularExpression: %r/>" % self._value class LiteralExpression(rdfvalue.RDFBytes): """A RDFBytes literal for use in GrepSpec.""" context_help_url = "user_manual.html#_literal_matches" class EmailAddress(rdfvalue.RDFString): """An email address must be well formed.""" _EMAIL_REGEX = re.compile(r"[^@]+@([^@]+)$") def ParseFromString(self, value): super(EmailAddress, self).ParseFromString(value) self._match = self._EMAIL_REGEX.match(self._value) if not self._match: raise ValueError("Email address %r not well formed." % self._value) class DomainEmailAddress(EmailAddress): """A more restricted email address may only address the domain.""" def ParseFromString(self, value): super(DomainEmailAddress, self).ParseFromString(value) domain = config_lib.CONFIG["Logging.domain"] if domain and self._match.group(1) != domain: raise ValueError("Email address '%s' does not belong to the configured " "domain '%s'" % (self._match.group(1), domain)) class AuthenticodeSignedData(rdf_structs.RDFProtoStruct): protobuf = jobs_pb2.AuthenticodeSignedData class PersistenceFile(rdf_structs.RDFProtoStruct): protobuf = jobs_pb2.PersistenceFile class URI(rdf_structs.RDFProtoStruct): """Represets a URI with its individual components seperated.""" protobuf = sysinfo_pb2.URI def ParseFromString(self, value): url = urlparse.urlparse(value) if url.scheme: self.transport = url.scheme if url.netloc: self.host = url.netloc if url.path: self.path = url.path if url.query: self.query = url.query if url.fragment: self.fragment = url.fragment def SerializeToString(self): url = (self.transport, self.host, self.path, self.query, self.fragment) return str(urlparse.urlunsplit(url))
DarkRebel/myrobotlab
refs/heads/master
src/resource/Python/examples/Tracking.safe.modified.worky.py
5
# a safe tracking script - servos are created seperately # and their limits are programmed, they are then "bound" to # the tracking service tracker = Runtime.create("tracker","Tracking") # create servos BEFORE starting the tracking service # so we can specify values for the servos and specify names # before it starts tracking rotation = Runtime.create("rotation","Servo") neck = Runtime.create("neck","Servo") arduino = Runtime.create("arduino","Arduino") arduino.connect("COM3", 57600, 8, 1, 0) eye = Runtime.create("eye","OpenCV") eye.setCameraIndex(1) #attaching servos to arduino using PIN 3 and PIN 9 arduino.attach(rotation.getName() , 13) arduino.attach(neck.getName(), 12) # set safety limits - servos # will not go beyond these limits rotation.setMinMax(50,170) neck.setMinMax(50,170) # here we are binding are new servos with different names # to the tracking service. If not specified the tracking service # will create a servo named x and y tracker.attach(arduino) tracker.attachServos(rotation, neck) tracker.attach(eye) tracker.setRestPosition(90, 90) # setXMinMax & setYMinMax (min, max) - this will set the min and maximum # x value it will send the servo - typically this is not needed # because the tracking service will pull the min and max positions from # the servos it attaches too tracker.setXMinMax(10, 170) tracker.setYMinMax(10, 170) # setServoPins (x, y) set the servo of the pan and tilt repectively tracker.setServoPins(13,12) # tracker.setCameraIndex(1) #change cameras if necessary tracker.startService() tracker.trackLKPoint() #tracker.learnBackground()
Bachaco-ve/odoo
refs/heads/8.0
addons/website_gengo/controllers/main.py
350
# -*- coding: utf-8 -*- import openerp from openerp import http, SUPERUSER_ID from openerp.http import request import time GENGO_DEFAULT_LIMIT = 20 class website_gengo(http.Controller): @http.route('/website/get_translated_length', type='json', auth='user', website=True) def get_translated_length(self, translated_ids, lang): ir_translation_obj = request.registry['ir.translation'] result={"done":0} gengo_translation_ids = ir_translation_obj.search(request.cr, request.uid, [('id','in',translated_ids),('gengo_translation','!=', False)]) for trans in ir_translation_obj.browse(request.cr, request.uid, gengo_translation_ids): result['done'] += len(trans.source.split()) return result @http.route('/website/check_gengo_set', type='json', auth='user', website=True) def check_gengo_set(self): user = request.registry['res.users'].browse(request.cr, SUPERUSER_ID, request.uid) company_flag = 0 if not user.company_id.gengo_public_key or not user.company_id.gengo_private_key: company_flag = user.company_id.id return company_flag @http.route('/website/set_gengo_config', type='json', auth='user', website=True) def set_gengo_config(self,config): user = request.registry['res.users'].browse(request.cr, request.uid, request.uid) if user.company_id: request.registry['res.company'].write(request.cr, request.uid, user.company_id.id, config) return True @http.route('/website/post_gengo_jobs', type='json', auth='user', website=True) def post_gengo_jobs(self): request.registry['base.gengo.translations']._sync_request(request.cr, request.uid, limit=GENGO_DEFAULT_LIMIT, context=request.context) return True
starvingprogrammer/pynag
refs/heads/master
examples/Parsers/get_contactgroup.py
3
#!/usr/bin/python import sys if len(sys.argv) != 2: sys.stderr.write("Usage: %s 'Contactgroup Name'\n" % (sys.argv[0])) sys.exit(2) ## This is for the custom nagios module sys.path.insert(1, '../') from pynag.Parsers import config target_item = sys.argv[1] ## Create the plugin option nc = config('/etc/nagios/nagios.cfg') nc.parse() item = nc.get_contactgroup(target_item) if not item: sys.stderr.write("Item not found: %s\n" % item) sys.exit(2) print nc.print_conf(item)
trg1984/kooditunti
refs/heads/master
public/blockly-games/appengine/storage.py
3
"""Blockly Games: Storage Copyright 2012 Google Inc. https://github.com/google/blockly-games Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ """Store and retrieve XML with App Engine. """ __author__ = "q.neutron@gmail.com (Quynh Neutron)" import cgi import logging from random import randint from google.appengine.ext import db from google.appengine.api import memcache def keyGen(): # Generate a random string of length KEY_LEN. KEY_LEN = 6 CHARS = "abcdefghijkmnopqrstuvwxyz23456789" # Exclude l, 0, 1. max_index = len(CHARS) - 1 return "".join([CHARS[randint(0, max_index)] for x in range(KEY_LEN)]) class Xml(db.Model): # A row in the database. xml_hash = db.IntegerProperty() xml_content = db.TextProperty() def xmlToKey(xml_content): # Store XML and return a generated key. xml_hash = hash(xml_content) lookup_query = db.Query(Xml) lookup_query.filter("xml_hash =", xml_hash) lookup_result = lookup_query.get() if lookup_result: xml_key = lookup_result.key().name() else: trials = 0 result = True while result: trials += 1 if trials == 100: raise Exception("Sorry, the generator failed to get a key for you.") xml_key = keyGen() result = db.get(db.Key.from_path("Xml", xml_key)) xml = db.Text(xml_content, encoding="utf_8") row = Xml(key_name = xml_key, xml_hash = xml_hash, xml_content = xml) row.put() return xml_key def keyToXml(key_provided): # Retrieve stored XML based on the provided key. # Normalize the string. key_provided = key_provided.lower().strip() # Check memcache for a quick match. xml = memcache.get("XML_" + key_provided) if xml is None: # Check datastore for a definitive match. result = db.get(db.Key.from_path("Xml", key_provided)) if not result: xml = "" else: xml = result.xml_content # Save to memcache for next hit. if not memcache.add("XML_" + key_provided, xml, 3600): logging.error("Memcache set failed.") return xml.encode("utf-8") if __name__ == "__main__": print "Content-Type: text/plain\n" forms = cgi.FieldStorage() if "xml" in forms: print(xmlToKey(forms["xml"].value)) if "key" in forms: print(keyToXml(forms["key"].value))
CWVanderReyden/originalMyHomeNet
refs/heads/master
Lib/encodings/mac_cyrillic.py
593
""" Python Character Mapping Codec mac_cyrillic generated from 'MAPPINGS/VENDORS/APPLE/CYRILLIC.TXT' with gencodec.py. """#" import codecs ### Codec APIs class Codec(codecs.Codec): def encode(self,input,errors='strict'): return codecs.charmap_encode(input,errors,encoding_table) def decode(self,input,errors='strict'): return codecs.charmap_decode(input,errors,decoding_table) class IncrementalEncoder(codecs.IncrementalEncoder): def encode(self, input, final=False): return codecs.charmap_encode(input,self.errors,encoding_table)[0] class IncrementalDecoder(codecs.IncrementalDecoder): def decode(self, input, final=False): return codecs.charmap_decode(input,self.errors,decoding_table)[0] class StreamWriter(Codec,codecs.StreamWriter): pass class StreamReader(Codec,codecs.StreamReader): pass ### encodings module API def getregentry(): return codecs.CodecInfo( name='mac-cyrillic', encode=Codec().encode, decode=Codec().decode, incrementalencoder=IncrementalEncoder, incrementaldecoder=IncrementalDecoder, streamreader=StreamReader, streamwriter=StreamWriter, ) ### Decoding Table decoding_table = ( u'\x00' # 0x00 -> CONTROL CHARACTER u'\x01' # 0x01 -> CONTROL CHARACTER u'\x02' # 0x02 -> CONTROL CHARACTER u'\x03' # 0x03 -> CONTROL CHARACTER u'\x04' # 0x04 -> CONTROL CHARACTER u'\x05' # 0x05 -> CONTROL CHARACTER u'\x06' # 0x06 -> CONTROL CHARACTER u'\x07' # 0x07 -> CONTROL CHARACTER u'\x08' # 0x08 -> CONTROL CHARACTER u'\t' # 0x09 -> CONTROL CHARACTER u'\n' # 0x0A -> CONTROL CHARACTER u'\x0b' # 0x0B -> CONTROL CHARACTER u'\x0c' # 0x0C -> CONTROL CHARACTER u'\r' # 0x0D -> CONTROL CHARACTER u'\x0e' # 0x0E -> CONTROL CHARACTER u'\x0f' # 0x0F -> CONTROL CHARACTER u'\x10' # 0x10 -> CONTROL CHARACTER u'\x11' # 0x11 -> CONTROL CHARACTER u'\x12' # 0x12 -> CONTROL CHARACTER u'\x13' # 0x13 -> CONTROL CHARACTER u'\x14' # 0x14 -> CONTROL CHARACTER u'\x15' # 0x15 -> CONTROL CHARACTER u'\x16' # 0x16 -> CONTROL CHARACTER u'\x17' # 0x17 -> CONTROL CHARACTER u'\x18' # 0x18 -> CONTROL CHARACTER u'\x19' # 0x19 -> CONTROL CHARACTER u'\x1a' # 0x1A -> CONTROL CHARACTER u'\x1b' # 0x1B -> CONTROL CHARACTER u'\x1c' # 0x1C -> CONTROL CHARACTER u'\x1d' # 0x1D -> CONTROL CHARACTER u'\x1e' # 0x1E -> CONTROL CHARACTER u'\x1f' # 0x1F -> CONTROL CHARACTER u' ' # 0x20 -> SPACE u'!' # 0x21 -> EXCLAMATION MARK u'"' # 0x22 -> QUOTATION MARK u'#' # 0x23 -> NUMBER SIGN u'$' # 0x24 -> DOLLAR SIGN u'%' # 0x25 -> PERCENT SIGN u'&' # 0x26 -> AMPERSAND u"'" # 0x27 -> APOSTROPHE u'(' # 0x28 -> LEFT PARENTHESIS u')' # 0x29 -> RIGHT PARENTHESIS u'*' # 0x2A -> ASTERISK u'+' # 0x2B -> PLUS SIGN u',' # 0x2C -> COMMA u'-' # 0x2D -> HYPHEN-MINUS u'.' # 0x2E -> FULL STOP u'/' # 0x2F -> SOLIDUS u'0' # 0x30 -> DIGIT ZERO u'1' # 0x31 -> DIGIT ONE u'2' # 0x32 -> DIGIT TWO u'3' # 0x33 -> DIGIT THREE u'4' # 0x34 -> DIGIT FOUR u'5' # 0x35 -> DIGIT FIVE u'6' # 0x36 -> DIGIT SIX u'7' # 0x37 -> DIGIT SEVEN u'8' # 0x38 -> DIGIT EIGHT u'9' # 0x39 -> DIGIT NINE u':' # 0x3A -> COLON u';' # 0x3B -> SEMICOLON u'<' # 0x3C -> LESS-THAN SIGN u'=' # 0x3D -> EQUALS SIGN u'>' # 0x3E -> GREATER-THAN SIGN u'?' # 0x3F -> QUESTION MARK u'@' # 0x40 -> COMMERCIAL AT u'A' # 0x41 -> LATIN CAPITAL LETTER A u'B' # 0x42 -> LATIN CAPITAL LETTER B u'C' # 0x43 -> LATIN CAPITAL LETTER C u'D' # 0x44 -> LATIN CAPITAL LETTER D u'E' # 0x45 -> LATIN CAPITAL LETTER E u'F' # 0x46 -> LATIN CAPITAL LETTER F u'G' # 0x47 -> LATIN CAPITAL LETTER G u'H' # 0x48 -> LATIN CAPITAL LETTER H u'I' # 0x49 -> LATIN CAPITAL LETTER I u'J' # 0x4A -> LATIN CAPITAL LETTER J u'K' # 0x4B -> LATIN CAPITAL LETTER K u'L' # 0x4C -> LATIN CAPITAL LETTER L u'M' # 0x4D -> LATIN CAPITAL LETTER M u'N' # 0x4E -> LATIN CAPITAL LETTER N u'O' # 0x4F -> LATIN CAPITAL LETTER O u'P' # 0x50 -> LATIN CAPITAL LETTER P u'Q' # 0x51 -> LATIN CAPITAL LETTER Q u'R' # 0x52 -> LATIN CAPITAL LETTER R u'S' # 0x53 -> LATIN CAPITAL LETTER S u'T' # 0x54 -> LATIN CAPITAL LETTER T u'U' # 0x55 -> LATIN CAPITAL LETTER U u'V' # 0x56 -> LATIN CAPITAL LETTER V u'W' # 0x57 -> LATIN CAPITAL LETTER W u'X' # 0x58 -> LATIN CAPITAL LETTER X u'Y' # 0x59 -> LATIN CAPITAL LETTER Y u'Z' # 0x5A -> LATIN CAPITAL LETTER Z u'[' # 0x5B -> LEFT SQUARE BRACKET u'\\' # 0x5C -> REVERSE SOLIDUS u']' # 0x5D -> RIGHT SQUARE BRACKET u'^' # 0x5E -> CIRCUMFLEX ACCENT u'_' # 0x5F -> LOW LINE u'`' # 0x60 -> GRAVE ACCENT u'a' # 0x61 -> LATIN SMALL LETTER A u'b' # 0x62 -> LATIN SMALL LETTER B u'c' # 0x63 -> LATIN SMALL LETTER C u'd' # 0x64 -> LATIN SMALL LETTER D u'e' # 0x65 -> LATIN SMALL LETTER E u'f' # 0x66 -> LATIN SMALL LETTER F u'g' # 0x67 -> LATIN SMALL LETTER G u'h' # 0x68 -> LATIN SMALL LETTER H u'i' # 0x69 -> LATIN SMALL LETTER I u'j' # 0x6A -> LATIN SMALL LETTER J u'k' # 0x6B -> LATIN SMALL LETTER K u'l' # 0x6C -> LATIN SMALL LETTER L u'm' # 0x6D -> LATIN SMALL LETTER M u'n' # 0x6E -> LATIN SMALL LETTER N u'o' # 0x6F -> LATIN SMALL LETTER O u'p' # 0x70 -> LATIN SMALL LETTER P u'q' # 0x71 -> LATIN SMALL LETTER Q u'r' # 0x72 -> LATIN SMALL LETTER R u's' # 0x73 -> LATIN SMALL LETTER S u't' # 0x74 -> LATIN SMALL LETTER T u'u' # 0x75 -> LATIN SMALL LETTER U u'v' # 0x76 -> LATIN SMALL LETTER V u'w' # 0x77 -> LATIN SMALL LETTER W u'x' # 0x78 -> LATIN SMALL LETTER X u'y' # 0x79 -> LATIN SMALL LETTER Y u'z' # 0x7A -> LATIN SMALL LETTER Z u'{' # 0x7B -> LEFT CURLY BRACKET u'|' # 0x7C -> VERTICAL LINE u'}' # 0x7D -> RIGHT CURLY BRACKET u'~' # 0x7E -> TILDE u'\x7f' # 0x7F -> CONTROL CHARACTER u'\u0410' # 0x80 -> CYRILLIC CAPITAL LETTER A u'\u0411' # 0x81 -> CYRILLIC CAPITAL LETTER BE u'\u0412' # 0x82 -> CYRILLIC CAPITAL LETTER VE u'\u0413' # 0x83 -> CYRILLIC CAPITAL LETTER GHE u'\u0414' # 0x84 -> CYRILLIC CAPITAL LETTER DE u'\u0415' # 0x85 -> CYRILLIC CAPITAL LETTER IE u'\u0416' # 0x86 -> CYRILLIC CAPITAL LETTER ZHE u'\u0417' # 0x87 -> CYRILLIC CAPITAL LETTER ZE u'\u0418' # 0x88 -> CYRILLIC CAPITAL LETTER I u'\u0419' # 0x89 -> CYRILLIC CAPITAL LETTER SHORT I u'\u041a' # 0x8A -> CYRILLIC CAPITAL LETTER KA u'\u041b' # 0x8B -> CYRILLIC CAPITAL LETTER EL u'\u041c' # 0x8C -> CYRILLIC CAPITAL LETTER EM u'\u041d' # 0x8D -> CYRILLIC CAPITAL LETTER EN u'\u041e' # 0x8E -> CYRILLIC CAPITAL LETTER O u'\u041f' # 0x8F -> CYRILLIC CAPITAL LETTER PE u'\u0420' # 0x90 -> CYRILLIC CAPITAL LETTER ER u'\u0421' # 0x91 -> CYRILLIC CAPITAL LETTER ES u'\u0422' # 0x92 -> CYRILLIC CAPITAL LETTER TE u'\u0423' # 0x93 -> CYRILLIC CAPITAL LETTER U u'\u0424' # 0x94 -> CYRILLIC CAPITAL LETTER EF u'\u0425' # 0x95 -> CYRILLIC CAPITAL LETTER HA u'\u0426' # 0x96 -> CYRILLIC CAPITAL LETTER TSE u'\u0427' # 0x97 -> CYRILLIC CAPITAL LETTER CHE u'\u0428' # 0x98 -> CYRILLIC CAPITAL LETTER SHA u'\u0429' # 0x99 -> CYRILLIC CAPITAL LETTER SHCHA u'\u042a' # 0x9A -> CYRILLIC CAPITAL LETTER HARD SIGN u'\u042b' # 0x9B -> CYRILLIC CAPITAL LETTER YERU u'\u042c' # 0x9C -> CYRILLIC CAPITAL LETTER SOFT SIGN u'\u042d' # 0x9D -> CYRILLIC CAPITAL LETTER E u'\u042e' # 0x9E -> CYRILLIC CAPITAL LETTER YU u'\u042f' # 0x9F -> CYRILLIC CAPITAL LETTER YA u'\u2020' # 0xA0 -> DAGGER u'\xb0' # 0xA1 -> DEGREE SIGN u'\u0490' # 0xA2 -> CYRILLIC CAPITAL LETTER GHE WITH UPTURN u'\xa3' # 0xA3 -> POUND SIGN u'\xa7' # 0xA4 -> SECTION SIGN u'\u2022' # 0xA5 -> BULLET u'\xb6' # 0xA6 -> PILCROW SIGN u'\u0406' # 0xA7 -> CYRILLIC CAPITAL LETTER BYELORUSSIAN-UKRAINIAN I u'\xae' # 0xA8 -> REGISTERED SIGN u'\xa9' # 0xA9 -> COPYRIGHT SIGN u'\u2122' # 0xAA -> TRADE MARK SIGN u'\u0402' # 0xAB -> CYRILLIC CAPITAL LETTER DJE u'\u0452' # 0xAC -> CYRILLIC SMALL LETTER DJE u'\u2260' # 0xAD -> NOT EQUAL TO u'\u0403' # 0xAE -> CYRILLIC CAPITAL LETTER GJE u'\u0453' # 0xAF -> CYRILLIC SMALL LETTER GJE u'\u221e' # 0xB0 -> INFINITY u'\xb1' # 0xB1 -> PLUS-MINUS SIGN u'\u2264' # 0xB2 -> LESS-THAN OR EQUAL TO u'\u2265' # 0xB3 -> GREATER-THAN OR EQUAL TO u'\u0456' # 0xB4 -> CYRILLIC SMALL LETTER BYELORUSSIAN-UKRAINIAN I u'\xb5' # 0xB5 -> MICRO SIGN u'\u0491' # 0xB6 -> CYRILLIC SMALL LETTER GHE WITH UPTURN u'\u0408' # 0xB7 -> CYRILLIC CAPITAL LETTER JE u'\u0404' # 0xB8 -> CYRILLIC CAPITAL LETTER UKRAINIAN IE u'\u0454' # 0xB9 -> CYRILLIC SMALL LETTER UKRAINIAN IE u'\u0407' # 0xBA -> CYRILLIC CAPITAL LETTER YI u'\u0457' # 0xBB -> CYRILLIC SMALL LETTER YI u'\u0409' # 0xBC -> CYRILLIC CAPITAL LETTER LJE u'\u0459' # 0xBD -> CYRILLIC SMALL LETTER LJE u'\u040a' # 0xBE -> CYRILLIC CAPITAL LETTER NJE u'\u045a' # 0xBF -> CYRILLIC SMALL LETTER NJE u'\u0458' # 0xC0 -> CYRILLIC SMALL LETTER JE u'\u0405' # 0xC1 -> CYRILLIC CAPITAL LETTER DZE u'\xac' # 0xC2 -> NOT SIGN u'\u221a' # 0xC3 -> SQUARE ROOT u'\u0192' # 0xC4 -> LATIN SMALL LETTER F WITH HOOK u'\u2248' # 0xC5 -> ALMOST EQUAL TO u'\u2206' # 0xC6 -> INCREMENT u'\xab' # 0xC7 -> LEFT-POINTING DOUBLE ANGLE QUOTATION MARK u'\xbb' # 0xC8 -> RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK u'\u2026' # 0xC9 -> HORIZONTAL ELLIPSIS u'\xa0' # 0xCA -> NO-BREAK SPACE u'\u040b' # 0xCB -> CYRILLIC CAPITAL LETTER TSHE u'\u045b' # 0xCC -> CYRILLIC SMALL LETTER TSHE u'\u040c' # 0xCD -> CYRILLIC CAPITAL LETTER KJE u'\u045c' # 0xCE -> CYRILLIC SMALL LETTER KJE u'\u0455' # 0xCF -> CYRILLIC SMALL LETTER DZE u'\u2013' # 0xD0 -> EN DASH u'\u2014' # 0xD1 -> EM DASH u'\u201c' # 0xD2 -> LEFT DOUBLE QUOTATION MARK u'\u201d' # 0xD3 -> RIGHT DOUBLE QUOTATION MARK u'\u2018' # 0xD4 -> LEFT SINGLE QUOTATION MARK u'\u2019' # 0xD5 -> RIGHT SINGLE QUOTATION MARK u'\xf7' # 0xD6 -> DIVISION SIGN u'\u201e' # 0xD7 -> DOUBLE LOW-9 QUOTATION MARK u'\u040e' # 0xD8 -> CYRILLIC CAPITAL LETTER SHORT U u'\u045e' # 0xD9 -> CYRILLIC SMALL LETTER SHORT U u'\u040f' # 0xDA -> CYRILLIC CAPITAL LETTER DZHE u'\u045f' # 0xDB -> CYRILLIC SMALL LETTER DZHE u'\u2116' # 0xDC -> NUMERO SIGN u'\u0401' # 0xDD -> CYRILLIC CAPITAL LETTER IO u'\u0451' # 0xDE -> CYRILLIC SMALL LETTER IO u'\u044f' # 0xDF -> CYRILLIC SMALL LETTER YA u'\u0430' # 0xE0 -> CYRILLIC SMALL LETTER A u'\u0431' # 0xE1 -> CYRILLIC SMALL LETTER BE u'\u0432' # 0xE2 -> CYRILLIC SMALL LETTER VE u'\u0433' # 0xE3 -> CYRILLIC SMALL LETTER GHE u'\u0434' # 0xE4 -> CYRILLIC SMALL LETTER DE u'\u0435' # 0xE5 -> CYRILLIC SMALL LETTER IE u'\u0436' # 0xE6 -> CYRILLIC SMALL LETTER ZHE u'\u0437' # 0xE7 -> CYRILLIC SMALL LETTER ZE u'\u0438' # 0xE8 -> CYRILLIC SMALL LETTER I u'\u0439' # 0xE9 -> CYRILLIC SMALL LETTER SHORT I u'\u043a' # 0xEA -> CYRILLIC SMALL LETTER KA u'\u043b' # 0xEB -> CYRILLIC SMALL LETTER EL u'\u043c' # 0xEC -> CYRILLIC SMALL LETTER EM u'\u043d' # 0xED -> CYRILLIC SMALL LETTER EN u'\u043e' # 0xEE -> CYRILLIC SMALL LETTER O u'\u043f' # 0xEF -> CYRILLIC SMALL LETTER PE u'\u0440' # 0xF0 -> CYRILLIC SMALL LETTER ER u'\u0441' # 0xF1 -> CYRILLIC SMALL LETTER ES u'\u0442' # 0xF2 -> CYRILLIC SMALL LETTER TE u'\u0443' # 0xF3 -> CYRILLIC SMALL LETTER U u'\u0444' # 0xF4 -> CYRILLIC SMALL LETTER EF u'\u0445' # 0xF5 -> CYRILLIC SMALL LETTER HA u'\u0446' # 0xF6 -> CYRILLIC SMALL LETTER TSE u'\u0447' # 0xF7 -> CYRILLIC SMALL LETTER CHE u'\u0448' # 0xF8 -> CYRILLIC SMALL LETTER SHA u'\u0449' # 0xF9 -> CYRILLIC SMALL LETTER SHCHA u'\u044a' # 0xFA -> CYRILLIC SMALL LETTER HARD SIGN u'\u044b' # 0xFB -> CYRILLIC SMALL LETTER YERU u'\u044c' # 0xFC -> CYRILLIC SMALL LETTER SOFT SIGN u'\u044d' # 0xFD -> CYRILLIC SMALL LETTER E u'\u044e' # 0xFE -> CYRILLIC SMALL LETTER YU u'\u20ac' # 0xFF -> EURO SIGN ) ### Encoding table encoding_table=codecs.charmap_build(decoding_table)
Symmetry-Innovations-Pty-Ltd/Python-2.7-for-QNX6.5.0-x86
refs/heads/master
usr/pkg/lib/python2.7/__phello__.foo.py
516
# This file exists as a helper for the test.test_frozen module.
codrut3/tensorflow
refs/heads/master
tensorflow/python/ops/distributions/bijector_test_util.py
81
# Copyright 2016 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Bijector unit-test utilities.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function import numpy as np from tensorflow.python.framework import ops from tensorflow.python.ops import math_ops from tensorflow.python.ops.distributions import uniform as uniform_lib def assert_finite(array): if not np.isfinite(array).all(): raise AssertionError("array was not all finite. %s" % array[:15]) def assert_strictly_increasing(array): np.testing.assert_array_less(0., np.diff(array)) def assert_strictly_decreasing(array): np.testing.assert_array_less(np.diff(array), 0.) def assert_strictly_monotonic(array): if array[0] < array[-1]: assert_strictly_increasing(array) else: assert_strictly_decreasing(array) def assert_scalar_congruency(bijector, lower_x, upper_x, n=int(10e3), rtol=0.01, sess=None): """Assert `bijector`'s forward/inverse/inverse_log_det_jacobian are congruent. We draw samples `X ~ U(lower_x, upper_x)`, then feed these through the `bijector` in order to check that: 1. the forward is strictly monotonic. 2. the forward/inverse methods are inverses of each other. 3. the jacobian is the correct change of measure. This can only be used for a Bijector mapping open subsets of the real line to themselves. This is due to the fact that this test compares the `prob` before/after transformation with the Lebesgue measure on the line. Args: bijector: Instance of Bijector lower_x: Python scalar. upper_x: Python scalar. Must have `lower_x < upper_x`, and both must be in the domain of the `bijector`. The `bijector` should probably not produce huge variation in values in the interval `(lower_x, upper_x)`, or else the variance based check of the Jacobian will require small `rtol` or huge `n`. n: Number of samples to draw for the checks. rtol: Positive number. Used for the Jacobian check. sess: `tf.Session`. Defaults to the default session. Raises: AssertionError: If tests fail. """ # Checks and defaults. assert bijector.event_ndims.eval() == 0 if sess is None: sess = ops.get_default_session() # Should be monotonic over this interval ten_x_pts = np.linspace(lower_x, upper_x, num=10).astype(np.float32) if bijector.dtype is not None: ten_x_pts = ten_x_pts.astype(bijector.dtype.as_numpy_dtype) forward_on_10_pts = bijector.forward(ten_x_pts) # Set the lower/upper limits in the range of the bijector. lower_y, upper_y = sess.run( [bijector.forward(lower_x), bijector.forward(upper_x)]) if upper_y < lower_y: # If bijector.forward is a decreasing function. lower_y, upper_y = upper_y, lower_y # Uniform samples from the domain, range. uniform_x_samps = uniform_lib.Uniform( low=lower_x, high=upper_x).sample(n, seed=0) uniform_y_samps = uniform_lib.Uniform( low=lower_y, high=upper_y).sample(n, seed=1) # These compositions should be the identity. inverse_forward_x = bijector.inverse(bijector.forward(uniform_x_samps)) forward_inverse_y = bijector.forward(bijector.inverse(uniform_y_samps)) # For a < b, and transformation y = y(x), # (b - a) = \int_a^b dx = \int_{y(a)}^{y(b)} |dx/dy| dy # "change_measure_dy_dx" below is a Monte Carlo approximation to the right # hand side, which should then be close to the left, which is (b - a). dy_dx = math_ops.exp(bijector.inverse_log_det_jacobian(uniform_y_samps)) # E[|dx/dy|] under Uniform[lower_y, upper_y] # = \int_{y(a)}^{y(b)} |dx/dy| dP(u), where dP(u) is the uniform measure expectation_of_dy_dx_under_uniform = math_ops.reduce_mean(dy_dx) # dy = dP(u) * (upper_y - lower_y) change_measure_dy_dx = ( (upper_y - lower_y) * expectation_of_dy_dx_under_uniform) # We'll also check that dy_dx = 1 / dx_dy. dx_dy = math_ops.exp( bijector.forward_log_det_jacobian(bijector.inverse(uniform_y_samps))) [ forward_on_10_pts_v, dy_dx_v, dx_dy_v, change_measure_dy_dx_v, uniform_x_samps_v, uniform_y_samps_v, inverse_forward_x_v, forward_inverse_y_v, ] = sess.run([ forward_on_10_pts, dy_dx, dx_dy, change_measure_dy_dx, uniform_x_samps, uniform_y_samps, inverse_forward_x, forward_inverse_y, ]) assert_strictly_monotonic(forward_on_10_pts_v) # Composition of forward/inverse should be the identity. np.testing.assert_allclose( inverse_forward_x_v, uniform_x_samps_v, atol=1e-5, rtol=1e-3) np.testing.assert_allclose( forward_inverse_y_v, uniform_y_samps_v, atol=1e-5, rtol=1e-3) # Change of measure should be correct. np.testing.assert_allclose( upper_x - lower_x, change_measure_dy_dx_v, atol=0, rtol=rtol) # Inverse Jacobian should be equivalent to the reciprocal of the forward # Jacobian. np.testing.assert_allclose( dy_dx_v, np.divide(1., dx_dy_v), atol=1e-5, rtol=1e-3) def assert_bijective_and_finite(bijector, x, y, atol=0, rtol=1e-5, sess=None): """Assert that forward/inverse (along with jacobians) are inverses and finite. It is recommended to use x and y values that are very very close to the edge of the Bijector's domain. Args: bijector: A Bijector instance. x: np.array of values in the domain of bijector.forward. y: np.array of values in the domain of bijector.inverse. atol: Absolute tolerance. rtol: Relative tolerance. sess: TensorFlow session. Defaults to the default session. Raises: AssertionError: If tests fail. """ sess = sess or ops.get_default_session() # These are the incoming points, but people often create a crazy range of # values for which these end up being bad, especially in 16bit. assert_finite(x) assert_finite(y) f_x = bijector.forward(x) g_y = bijector.inverse(y) [ x_from_x, y_from_y, ildj_f_x, fldj_x, ildj_y, fldj_g_y, f_x_v, g_y_v, ] = sess.run([ bijector.inverse(f_x), bijector.forward(g_y), bijector.inverse_log_det_jacobian(f_x), bijector.forward_log_det_jacobian(x), bijector.inverse_log_det_jacobian(y), bijector.forward_log_det_jacobian(g_y), f_x, g_y, ]) assert_finite(x_from_x) assert_finite(y_from_y) assert_finite(ildj_f_x) assert_finite(fldj_x) assert_finite(ildj_y) assert_finite(fldj_g_y) assert_finite(f_x_v) assert_finite(g_y_v) np.testing.assert_allclose(x_from_x, x, atol=atol, rtol=rtol) np.testing.assert_allclose(y_from_y, y, atol=atol, rtol=rtol) np.testing.assert_allclose(-ildj_f_x, fldj_x, atol=atol, rtol=rtol) np.testing.assert_allclose(-ildj_y, fldj_g_y, atol=atol, rtol=rtol)
rockyzhang/zhangyanhit-python-for-android-mips
refs/heads/master
python3-alpha/python3-src/Lib/ctypes/test/test_stringptr.py
50
import unittest from ctypes import * import _ctypes_test lib = CDLL(_ctypes_test.__file__) class StringPtrTestCase(unittest.TestCase): def test__POINTER_c_char(self): class X(Structure): _fields_ = [("str", POINTER(c_char))] x = X() # NULL pointer access self.assertRaises(ValueError, getattr, x.str, "contents") b = c_buffer(b"Hello, World") from sys import getrefcount as grc self.assertEqual(grc(b), 2) x.str = b self.assertEqual(grc(b), 3) # POINTER(c_char) and Python string is NOT compatible # POINTER(c_char) and c_buffer() is compatible for i in range(len(b)): self.assertEqual(b[i], x.str[i]) self.assertRaises(TypeError, setattr, x, "str", "Hello, World") def test__c_char_p(self): class X(Structure): _fields_ = [("str", c_char_p)] x = X() # c_char_p and Python string is compatible # c_char_p and c_buffer is NOT compatible self.assertEqual(x.str, None) x.str = b"Hello, World" self.assertEqual(x.str, b"Hello, World") b = c_buffer(b"Hello, World") self.assertRaises(TypeError, setattr, x, b"str", b) def test_functions(self): strchr = lib.my_strchr strchr.restype = c_char_p # c_char_p and Python string is compatible # c_char_p and c_buffer are now compatible strchr.argtypes = c_char_p, c_char self.assertEqual(strchr(b"abcdef", b"c"), b"cdef") self.assertEqual(strchr(c_buffer(b"abcdef"), b"c"), b"cdef") # POINTER(c_char) and Python string is NOT compatible # POINTER(c_char) and c_buffer() is compatible strchr.argtypes = POINTER(c_char), c_char buf = c_buffer(b"abcdef") self.assertEqual(strchr(buf, b"c"), b"cdef") self.assertEqual(strchr(b"abcdef", b"c"), b"cdef") # XXX These calls are dangerous, because the first argument # to strchr is no longer valid after the function returns! # So we must keep a reference to buf separately strchr.restype = POINTER(c_char) buf = c_buffer(b"abcdef") r = strchr(buf, b"c") x = r[0], r[1], r[2], r[3], r[4] self.assertEqual(x, (b"c", b"d", b"e", b"f", b"\000")) del buf # x1 will NOT be the same as x, usually: x1 = r[0], r[1], r[2], r[3], r[4] if __name__ == '__main__': unittest.main()
gpetretto/pymatgen
refs/heads/master
pymatgen/entries/exp_entries.py
7
# coding: utf-8 # Copyright (c) Pymatgen Development Team. # Distributed under the terms of the MIT License. from __future__ import division, unicode_literals """ This module defines Entry classes for containing experimental data. """ __author__ = "Shyue Ping Ong" __copyright__ = "Copyright 2012, The Materials Project" __version__ = "0.1" __maintainer__ = "Shyue Ping Ong" __email__ = "shyuep@gmail.com" __date__ = "Jun 27, 2012" from pymatgen.analysis.phase_diagram import PDEntry from pymatgen.core.composition import Composition from monty.json import MSONable from pymatgen.analysis.thermochemistry import ThermoData class ExpEntry(PDEntry, MSONable): """ An lightweight ExpEntry object containing experimental data for a composition for many purposes. Extends a PDEntry so that it can be used for phase diagram generation and reaction calculation. Current version works only with solid phases and at 298K. Further extensions for temperature dependence are planned. Args: composition: Composition of the entry. For flexibility, this can take the form of all the typical input taken by a Composition, including a {symbol: amt} dict, a string formula, and others. thermodata: A sequence of ThermoData associated with the entry. temperature: A temperature for the entry in Kelvin. Defaults to 298K. """ def __init__(self, composition, thermodata, temperature=298): comp = Composition(composition) self._thermodata = thermodata found = False enthalpy = float("inf") for data in self._thermodata: if data.type == "fH" and data.value < enthalpy and \ (data.phaseinfo != "gas" and data.phaseinfo != "liquid"): enthalpy = data.value found = True if not found: raise ValueError("List of Thermodata does not contain enthalpy " "values.") self.temperature = temperature super(ExpEntry, self).__init__(comp, enthalpy) def __repr__(self): return "ExpEntry {}, Energy = {:.4f}".format(self.composition.formula, self.energy) def __str__(self): return self.__repr__() @classmethod def from_dict(cls, d): thermodata = [ThermoData.from_dict(td) for td in d["thermodata"]] return cls(d["composition"], thermodata, d["temperature"]) def as_dict(self): return {"@module": self.__class__.__module__, "@class": self.__class__.__name__, "thermodata": [td.as_dict() for td in self._thermodata], "composition": self.composition.as_dict(), "temperature": self.temperature}
chuchiperriman/FolderFilesAnalyzer
refs/heads/master
test_liten2.py
2
import unittest #!/usr/bin/env python """ Tests for Liten2 """ import unittest import sqlite3 from time import strftime import os import liten2 class TestWalk(unittest.TestCase): def test_findthis(self): path = os.getcwd() size = 1048576 walk = liten2.Walk(path, size) walk.findthis() expected = path+'/'+strftime('%Y-%m-%d.sql') result = os.path.isfile(expected) self.assertTrue(result) class TestReport(unittest.TestCase): def test_db_connect(self): """Connectivity to the DB""" self.dump = strftime('%Y-%m-%d.sql') connection = True try: self.sqlfile = open(self.dump) self.conn = sqlite3.connect(':memory:', isolation_level='exclusive') except: connection = False self.assertTrue(connection) def test_file_num(self): report = liten2.Report(full=False) for i in report.file_num(): result = i[0] self.assertFalse(result) # def test_fullreport(self): # report = Report(full) # self.assertEqual(expected, report.fullreport()) # assert False # TODO: implement your test here # def test_humanvalue(self): # report = Report(full) # self.assertEqual(expected, report.humanvalue(value)) # assert False # TODO: implement your test here # def test_path_dups(self): # report = Report(full) # self.assertEqual(expected, report.path_dups()) # assert False # TODO: implement your test here # def test_size_dups(self): # report = Report(full) # self.assertEqual(expected, report.size_dups()) # assert False # TODO: implement your test here # def test_size_searched(self): # report = Report(full) # self.assertEqual(expected, report.size_searched()) # assert False # TODO: implement your test here # def test_total_files(self): # report = Report(full) # self.assertEqual(expected, report.total_files()) # assert False # TODO: implement your test here # def test_totalmb(self): # report = Report(full) # self.assertEqual(expected, report.totalmb()) # assert False # TODO: implement your test here #class TestDbWork(unittest.TestCase): # def test___init__(self): # db_work = DbWork() # assert False # TODO: implement your test here # def test_export(self): # db_work = DbWork() # self.assertEqual(expected, db_work.export()) # assert False # TODO: implement your test here # def test_insert(self): # db_work = DbWork() # self.assertEqual(expected, db_work.insert(fileinfo, size, checksum)) # assert False # TODO: implement your test here # def test_insert_opts(self): # db_work = DbWork() # self.assertEqual(expected, db_work.insert_opts(searched_files, size)) # assert False # TODO: implement your test here #class TestMain(unittest.TestCase): # def test_main(self): # self.assertEqual(expected, main()) # assert False # TODO: implement your test here if __name__ == '__main__': unittest.main()
BackupGGCode/python-for-android
refs/heads/master
python3-alpha/extra_modules/gdata/tlslite/utils/compat.py
48
"""Miscellaneous functions to mask Python version differences.""" import sys import os if sys.version_info < (2,2): raise AssertionError("Python 2.2 or later required") if sys.version_info < (2,3): def enumerate(collection): return list(zip(list(range(len(collection))), collection)) class Set: def __init__(self, seq=None): self.values = {} if seq: for e in seq: self.values[e] = None def add(self, e): self.values[e] = None def discard(self, e): if e in list(self.values.keys()): del(self.values[e]) def union(self, s): ret = Set() for e in list(self.values.keys()): ret.values[e] = None for e in list(s.values.keys()): ret.values[e] = None return ret def issubset(self, other): for e in list(self.values.keys()): if e not in list(other.values.keys()): return False return True def __bool__( self): return len(list(self.values.keys())) def __contains__(self, e): return e in list(self.values.keys()) def __iter__(self): return iter(list(set.values.keys())) if os.name != "java": import array def createByteArraySequence(seq): return array.array('B', seq) def createByteArrayZeros(howMany): return array.array('B', [0] * howMany) def concatArrays(a1, a2): return a1+a2 def bytesToString(bytes): return bytes.tostring() def stringToBytes(s): bytes = createByteArrayZeros(0) bytes.fromstring(s) return bytes import math def numBits(n): if n==0: return 0 s = "%x" % n return ((len(s)-1)*4) + \ {'0':0, '1':1, '2':2, '3':2, '4':3, '5':3, '6':3, '7':3, '8':4, '9':4, 'a':4, 'b':4, 'c':4, 'd':4, 'e':4, 'f':4, }[s[0]] return int(math.floor(math.log(n, 2))+1) BaseException = Exception import sys import traceback def formatExceptionTrace(e): newStr = "".join(traceback.format_exception(sys.exc_info()[0], sys.exc_info()[1], sys.exc_info()[2])) return newStr else: #Jython 2.1 is missing lots of python 2.3 stuff, #which we have to emulate here: #NOTE: JYTHON SUPPORT NO LONGER WORKS, DUE TO USE OF GENERATORS. #THIS CODE IS LEFT IN SO THAT ONE JYTHON UPDATES TO 2.2, IT HAS A #CHANCE OF WORKING AGAIN. import java import jarray def createByteArraySequence(seq): if isinstance(seq, type("")): #If it's a string, convert seq = [ord(c) for c in seq] return jarray.array(seq, 'h') #use short instead of bytes, cause bytes are signed def createByteArrayZeros(howMany): return jarray.zeros(howMany, 'h') #use short instead of bytes, cause bytes are signed def concatArrays(a1, a2): l = list(a1)+list(a2) return createByteArraySequence(l) #WAY TOO SLOW - MUST BE REPLACED------------ def bytesToString(bytes): return "".join([chr(b) for b in bytes]) def stringToBytes(s): bytes = createByteArrayZeros(len(s)) for count, c in enumerate(s): bytes[count] = ord(c) return bytes #WAY TOO SLOW - MUST BE REPLACED------------ def numBits(n): if n==0: return 0 n= 1 * n; #convert to long, if it isn't already return n.__tojava__(java.math.BigInteger).bitLength() #Adjust the string to an array of bytes def stringToJavaByteArray(s): bytes = jarray.zeros(len(s), 'b') for count, c in enumerate(s): x = ord(c) if x >= 128: x -= 256 bytes[count] = x return bytes BaseException = java.lang.Exception import sys import traceback def formatExceptionTrace(e): newStr = "".join(traceback.format_exception(sys.exc_info()[0], sys.exc_info()[1], sys.exc_info()[2])) return newStr
syaiful6/django
refs/heads/master
tests/auth_tests/test_forms.py
228
from __future__ import unicode_literals import datetime import re from django import forms from django.contrib.auth.forms import ( AdminPasswordChangeForm, AuthenticationForm, PasswordChangeForm, PasswordResetForm, ReadOnlyPasswordHashField, ReadOnlyPasswordHashWidget, SetPasswordForm, UserChangeForm, UserCreationForm, ) from django.contrib.auth.models import User from django.contrib.sites.models import Site from django.core import mail from django.core.mail import EmailMultiAlternatives from django.forms.fields import CharField, Field from django.test import SimpleTestCase, TestCase, mock, override_settings from django.utils import translation from django.utils.encoding import force_text from django.utils.text import capfirst from django.utils.translation import ugettext as _ from .settings import AUTH_TEMPLATES class TestDataMixin(object): @classmethod def setUpTestData(cls): cls.u1 = User.objects.create( password='sha1$6efc0$f93efe9fd7542f25a7be94871ea45aa95de57161', last_login=datetime.datetime(2006, 12, 17, 7, 3, 31), is_superuser=False, username='testclient', first_name='Test', last_name='Client', email='testclient@example.com', is_staff=False, is_active=True, date_joined=datetime.datetime(2006, 12, 17, 7, 3, 31) ) cls.u2 = User.objects.create( password='sha1$6efc0$f93efe9fd7542f25a7be94871ea45aa95de57161', last_login=datetime.datetime(2006, 12, 17, 7, 3, 31), is_superuser=False, username='inactive', first_name='Inactive', last_name='User', email='testclient2@example.com', is_staff=False, is_active=False, date_joined=datetime.datetime(2006, 12, 17, 7, 3, 31) ) cls.u3 = User.objects.create( password='sha1$6efc0$f93efe9fd7542f25a7be94871ea45aa95de57161', last_login=datetime.datetime(2006, 12, 17, 7, 3, 31), is_superuser=False, username='staff', first_name='Staff', last_name='Member', email='staffmember@example.com', is_staff=True, is_active=True, date_joined=datetime.datetime(2006, 12, 17, 7, 3, 31) ) cls.u4 = User.objects.create( password='', last_login=datetime.datetime(2006, 12, 17, 7, 3, 31), is_superuser=False, username='empty_password', first_name='Empty', last_name='Password', email='empty_password@example.com', is_staff=False, is_active=True, date_joined=datetime.datetime(2006, 12, 17, 7, 3, 31) ) cls.u5 = User.objects.create( password='$', last_login=datetime.datetime(2006, 12, 17, 7, 3, 31), is_superuser=False, username='unmanageable_password', first_name='Unmanageable', last_name='Password', email='unmanageable_password@example.com', is_staff=False, is_active=True, date_joined=datetime.datetime(2006, 12, 17, 7, 3, 31) ) cls.u6 = User.objects.create( password='foo$bar', last_login=datetime.datetime(2006, 12, 17, 7, 3, 31), is_superuser=False, username='unknown_password', first_name='Unknown', last_name='Password', email='unknown_password@example.com', is_staff=False, is_active=True, date_joined=datetime.datetime(2006, 12, 17, 7, 3, 31) ) @override_settings(USE_TZ=False, PASSWORD_HASHERS=['django.contrib.auth.hashers.SHA1PasswordHasher']) class UserCreationFormTest(TestDataMixin, TestCase): def test_user_already_exists(self): data = { 'username': 'testclient', 'password1': 'test123', 'password2': 'test123', } form = UserCreationForm(data) self.assertFalse(form.is_valid()) self.assertEqual(form["username"].errors, [force_text(User._meta.get_field('username').error_messages['unique'])]) def test_invalid_data(self): data = { 'username': 'jsmith!', 'password1': 'test123', 'password2': 'test123', } form = UserCreationForm(data) self.assertFalse(form.is_valid()) validator = next(v for v in User._meta.get_field('username').validators if v.code == 'invalid') self.assertEqual(form["username"].errors, [force_text(validator.message)]) def test_password_verification(self): # The verification password is incorrect. data = { 'username': 'jsmith', 'password1': 'test123', 'password2': 'test', } form = UserCreationForm(data) self.assertFalse(form.is_valid()) self.assertEqual(form["password2"].errors, [force_text(form.error_messages['password_mismatch'])]) def test_both_passwords(self): # One (or both) passwords weren't given data = {'username': 'jsmith'} form = UserCreationForm(data) required_error = [force_text(Field.default_error_messages['required'])] self.assertFalse(form.is_valid()) self.assertEqual(form['password1'].errors, required_error) self.assertEqual(form['password2'].errors, required_error) data['password2'] = 'test123' form = UserCreationForm(data) self.assertFalse(form.is_valid()) self.assertEqual(form['password1'].errors, required_error) self.assertEqual(form['password2'].errors, []) @mock.patch('django.contrib.auth.password_validation.password_changed') def test_success(self, password_changed): # The success case. data = { 'username': 'jsmith@example.com', 'password1': 'test123', 'password2': 'test123', } form = UserCreationForm(data) self.assertTrue(form.is_valid()) form.save(commit=False) self.assertEqual(password_changed.call_count, 0) u = form.save() self.assertEqual(password_changed.call_count, 1) self.assertEqual(repr(u), '<User: jsmith@example.com>') @override_settings(AUTH_PASSWORD_VALIDATORS=[ {'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator'}, {'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator', 'OPTIONS': { 'min_length': 12, }}, ]) def test_validates_password(self): data = { 'username': 'testclient', 'password1': 'testclient', 'password2': 'testclient', } form = UserCreationForm(data) self.assertFalse(form.is_valid()) self.assertEqual(len(form['password2'].errors), 2) self.assertIn('The password is too similar to the username.', form['password2'].errors) self.assertIn( 'This password is too short. It must contain at least 12 characters.', form['password2'].errors ) @override_settings(USE_TZ=False, PASSWORD_HASHERS=['django.contrib.auth.hashers.SHA1PasswordHasher']) class AuthenticationFormTest(TestDataMixin, TestCase): def test_invalid_username(self): # The user submits an invalid username. data = { 'username': 'jsmith_does_not_exist', 'password': 'test123', } form = AuthenticationForm(None, data) self.assertFalse(form.is_valid()) self.assertEqual(form.non_field_errors(), [force_text(form.error_messages['invalid_login'] % { 'username': User._meta.get_field('username').verbose_name })]) def test_inactive_user(self): # The user is inactive. data = { 'username': 'inactive', 'password': 'password', } form = AuthenticationForm(None, data) self.assertFalse(form.is_valid()) self.assertEqual(form.non_field_errors(), [force_text(form.error_messages['inactive'])]) def test_inactive_user_i18n(self): with self.settings(USE_I18N=True), translation.override('pt-br', deactivate=True): # The user is inactive. data = { 'username': 'inactive', 'password': 'password', } form = AuthenticationForm(None, data) self.assertFalse(form.is_valid()) self.assertEqual(form.non_field_errors(), [force_text(form.error_messages['inactive'])]) def test_custom_login_allowed_policy(self): # The user is inactive, but our custom form policy allows them to log in. data = { 'username': 'inactive', 'password': 'password', } class AuthenticationFormWithInactiveUsersOkay(AuthenticationForm): def confirm_login_allowed(self, user): pass form = AuthenticationFormWithInactiveUsersOkay(None, data) self.assertTrue(form.is_valid()) # If we want to disallow some logins according to custom logic, # we should raise a django.forms.ValidationError in the form. class PickyAuthenticationForm(AuthenticationForm): def confirm_login_allowed(self, user): if user.username == "inactive": raise forms.ValidationError("This user is disallowed.") raise forms.ValidationError("Sorry, nobody's allowed in.") form = PickyAuthenticationForm(None, data) self.assertFalse(form.is_valid()) self.assertEqual(form.non_field_errors(), ['This user is disallowed.']) data = { 'username': 'testclient', 'password': 'password', } form = PickyAuthenticationForm(None, data) self.assertFalse(form.is_valid()) self.assertEqual(form.non_field_errors(), ["Sorry, nobody's allowed in."]) def test_success(self): # The success case data = { 'username': 'testclient', 'password': 'password', } form = AuthenticationForm(None, data) self.assertTrue(form.is_valid()) self.assertEqual(form.non_field_errors(), []) def test_username_field_label(self): class CustomAuthenticationForm(AuthenticationForm): username = CharField(label="Name", max_length=75) form = CustomAuthenticationForm() self.assertEqual(form['username'].label, "Name") def test_username_field_label_not_set(self): class CustomAuthenticationForm(AuthenticationForm): username = CharField() form = CustomAuthenticationForm() username_field = User._meta.get_field(User.USERNAME_FIELD) self.assertEqual(form.fields['username'].label, capfirst(username_field.verbose_name)) def test_username_field_label_empty_string(self): class CustomAuthenticationForm(AuthenticationForm): username = CharField(label='') form = CustomAuthenticationForm() self.assertEqual(form.fields['username'].label, "") @override_settings(USE_TZ=False, PASSWORD_HASHERS=['django.contrib.auth.hashers.SHA1PasswordHasher']) class SetPasswordFormTest(TestDataMixin, TestCase): def test_password_verification(self): # The two new passwords do not match. user = User.objects.get(username='testclient') data = { 'new_password1': 'abc123', 'new_password2': 'abc', } form = SetPasswordForm(user, data) self.assertFalse(form.is_valid()) self.assertEqual(form["new_password2"].errors, [force_text(form.error_messages['password_mismatch'])]) @mock.patch('django.contrib.auth.password_validation.password_changed') def test_success(self, password_changed): user = User.objects.get(username='testclient') data = { 'new_password1': 'abc123', 'new_password2': 'abc123', } form = SetPasswordForm(user, data) self.assertTrue(form.is_valid()) form.save(commit=False) self.assertEqual(password_changed.call_count, 0) form.save() self.assertEqual(password_changed.call_count, 1) @override_settings(AUTH_PASSWORD_VALIDATORS=[ {'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator'}, {'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator', 'OPTIONS': { 'min_length': 12, }}, ]) def test_validates_password(self): user = User.objects.get(username='testclient') data = { 'new_password1': 'testclient', 'new_password2': 'testclient', } form = SetPasswordForm(user, data) self.assertFalse(form.is_valid()) self.assertEqual(len(form["new_password2"].errors), 2) self.assertIn('The password is too similar to the username.', form["new_password2"].errors) self.assertIn( 'This password is too short. It must contain at least 12 characters.', form["new_password2"].errors ) @override_settings(USE_TZ=False, PASSWORD_HASHERS=['django.contrib.auth.hashers.SHA1PasswordHasher']) class PasswordChangeFormTest(TestDataMixin, TestCase): def test_incorrect_password(self): user = User.objects.get(username='testclient') data = { 'old_password': 'test', 'new_password1': 'abc123', 'new_password2': 'abc123', } form = PasswordChangeForm(user, data) self.assertFalse(form.is_valid()) self.assertEqual(form["old_password"].errors, [force_text(form.error_messages['password_incorrect'])]) def test_password_verification(self): # The two new passwords do not match. user = User.objects.get(username='testclient') data = { 'old_password': 'password', 'new_password1': 'abc123', 'new_password2': 'abc', } form = PasswordChangeForm(user, data) self.assertFalse(form.is_valid()) self.assertEqual(form["new_password2"].errors, [force_text(form.error_messages['password_mismatch'])]) @mock.patch('django.contrib.auth.password_validation.password_changed') def test_success(self, password_changed): # The success case. user = User.objects.get(username='testclient') data = { 'old_password': 'password', 'new_password1': 'abc123', 'new_password2': 'abc123', } form = PasswordChangeForm(user, data) self.assertTrue(form.is_valid()) form.save(commit=False) self.assertEqual(password_changed.call_count, 0) form.save() self.assertEqual(password_changed.call_count, 1) def test_field_order(self): # Regression test - check the order of fields: user = User.objects.get(username='testclient') self.assertEqual(list(PasswordChangeForm(user, {}).fields), ['old_password', 'new_password1', 'new_password2']) @override_settings(USE_TZ=False, PASSWORD_HASHERS=['django.contrib.auth.hashers.SHA1PasswordHasher']) class UserChangeFormTest(TestDataMixin, TestCase): def test_username_validity(self): user = User.objects.get(username='testclient') data = {'username': 'not valid'} form = UserChangeForm(data, instance=user) self.assertFalse(form.is_valid()) validator = next(v for v in User._meta.get_field('username').validators if v.code == 'invalid') self.assertEqual(form["username"].errors, [force_text(validator.message)]) def test_bug_14242(self): # A regression test, introduce by adding an optimization for the # UserChangeForm. class MyUserForm(UserChangeForm): def __init__(self, *args, **kwargs): super(MyUserForm, self).__init__(*args, **kwargs) self.fields['groups'].help_text = 'These groups give users different permissions' class Meta(UserChangeForm.Meta): fields = ('groups',) # Just check we can create it MyUserForm({}) def test_unsuable_password(self): user = User.objects.get(username='empty_password') user.set_unusable_password() user.save() form = UserChangeForm(instance=user) self.assertIn(_("No password set."), form.as_table()) def test_bug_17944_empty_password(self): user = User.objects.get(username='empty_password') form = UserChangeForm(instance=user) self.assertIn(_("No password set."), form.as_table()) def test_bug_17944_unmanageable_password(self): user = User.objects.get(username='unmanageable_password') form = UserChangeForm(instance=user) self.assertIn(_("Invalid password format or unknown hashing algorithm."), form.as_table()) def test_bug_17944_unknown_password_algorithm(self): user = User.objects.get(username='unknown_password') form = UserChangeForm(instance=user) self.assertIn(_("Invalid password format or unknown hashing algorithm."), form.as_table()) def test_bug_19133(self): "The change form does not return the password value" # Use the form to construct the POST data user = User.objects.get(username='testclient') form_for_data = UserChangeForm(instance=user) post_data = form_for_data.initial # The password field should be readonly, so anything # posted here should be ignored; the form will be # valid, and give back the 'initial' value for the # password field. post_data['password'] = 'new password' form = UserChangeForm(instance=user, data=post_data) self.assertTrue(form.is_valid()) self.assertEqual(form.cleaned_data['password'], 'sha1$6efc0$f93efe9fd7542f25a7be94871ea45aa95de57161') def test_bug_19349_bound_password_field(self): user = User.objects.get(username='testclient') form = UserChangeForm(data={}, instance=user) # When rendering the bound password field, # ReadOnlyPasswordHashWidget needs the initial # value to render correctly self.assertEqual(form.initial['password'], form['password'].value()) @override_settings( PASSWORD_HASHERS=['django.contrib.auth.hashers.SHA1PasswordHasher'], TEMPLATES=AUTH_TEMPLATES, USE_TZ=False, ) class PasswordResetFormTest(TestDataMixin, TestCase): @classmethod def setUpClass(cls): super(PasswordResetFormTest, cls).setUpClass() # This cleanup is necessary because contrib.sites cache # makes tests interfere with each other, see #11505 Site.objects.clear_cache() def create_dummy_user(self): """ Create a user and return a tuple (user_object, username, email). """ username = 'jsmith' email = 'jsmith@example.com' user = User.objects.create_user(username, email, 'test123') return (user, username, email) def test_invalid_email(self): data = {'email': 'not valid'} form = PasswordResetForm(data) self.assertFalse(form.is_valid()) self.assertEqual(form['email'].errors, [_('Enter a valid email address.')]) def test_nonexistent_email(self): """ Test nonexistent email address. This should not fail because it would expose information about registered users. """ data = {'email': 'foo@bar.com'} form = PasswordResetForm(data) self.assertTrue(form.is_valid()) self.assertEqual(len(mail.outbox), 0) def test_cleaned_data(self): (user, username, email) = self.create_dummy_user() data = {'email': email} form = PasswordResetForm(data) self.assertTrue(form.is_valid()) form.save(domain_override='example.com') self.assertEqual(form.cleaned_data['email'], email) self.assertEqual(len(mail.outbox), 1) def test_custom_email_subject(self): data = {'email': 'testclient@example.com'} form = PasswordResetForm(data) self.assertTrue(form.is_valid()) # Since we're not providing a request object, we must provide a # domain_override to prevent the save operation from failing in the # potential case where contrib.sites is not installed. Refs #16412. form.save(domain_override='example.com') self.assertEqual(len(mail.outbox), 1) self.assertEqual(mail.outbox[0].subject, 'Custom password reset on example.com') def test_custom_email_constructor(self): data = {'email': 'testclient@example.com'} class CustomEmailPasswordResetForm(PasswordResetForm): def send_mail(self, subject_template_name, email_template_name, context, from_email, to_email, html_email_template_name=None): EmailMultiAlternatives( "Forgot your password?", "Sorry to hear you forgot your password.", None, [to_email], ['site_monitor@example.com'], headers={'Reply-To': 'webmaster@example.com'}, alternatives=[("Really sorry to hear you forgot your password.", "text/html")]).send() form = CustomEmailPasswordResetForm(data) self.assertTrue(form.is_valid()) # Since we're not providing a request object, we must provide a # domain_override to prevent the save operation from failing in the # potential case where contrib.sites is not installed. Refs #16412. form.save(domain_override='example.com') self.assertEqual(len(mail.outbox), 1) self.assertEqual(mail.outbox[0].subject, 'Forgot your password?') self.assertEqual(mail.outbox[0].bcc, ['site_monitor@example.com']) self.assertEqual(mail.outbox[0].content_subtype, "plain") def test_preserve_username_case(self): """ Preserve the case of the user name (before the @ in the email address) when creating a user (#5605). """ user = User.objects.create_user('forms_test2', 'tesT@EXAMple.com', 'test') self.assertEqual(user.email, 'tesT@example.com') user = User.objects.create_user('forms_test3', 'tesT', 'test') self.assertEqual(user.email, 'tesT') def test_inactive_user(self): """ Test that inactive user cannot receive password reset email. """ (user, username, email) = self.create_dummy_user() user.is_active = False user.save() form = PasswordResetForm({'email': email}) self.assertTrue(form.is_valid()) form.save() self.assertEqual(len(mail.outbox), 0) def test_unusable_password(self): user = User.objects.create_user('testuser', 'test@example.com', 'test') data = {"email": "test@example.com"} form = PasswordResetForm(data) self.assertTrue(form.is_valid()) user.set_unusable_password() user.save() form = PasswordResetForm(data) # The form itself is valid, but no email is sent self.assertTrue(form.is_valid()) form.save() self.assertEqual(len(mail.outbox), 0) def test_save_plaintext_email(self): """ Test the PasswordResetForm.save() method with no html_email_template_name parameter passed in. Test to ensure original behavior is unchanged after the parameter was added. """ (user, username, email) = self.create_dummy_user() form = PasswordResetForm({"email": email}) self.assertTrue(form.is_valid()) form.save() self.assertEqual(len(mail.outbox), 1) message = mail.outbox[0].message() self.assertFalse(message.is_multipart()) self.assertEqual(message.get_content_type(), 'text/plain') self.assertEqual(message.get('subject'), 'Custom password reset on example.com') self.assertEqual(len(mail.outbox[0].alternatives), 0) self.assertEqual(message.get_all('to'), [email]) self.assertTrue(re.match(r'^http://example.com/reset/[\w+/-]', message.get_payload())) def test_save_html_email_template_name(self): """ Test the PasswordResetFOrm.save() method with html_email_template_name parameter specified. Test to ensure that a multipart email is sent with both text/plain and text/html parts. """ (user, username, email) = self.create_dummy_user() form = PasswordResetForm({"email": email}) self.assertTrue(form.is_valid()) form.save(html_email_template_name='registration/html_password_reset_email.html') self.assertEqual(len(mail.outbox), 1) self.assertEqual(len(mail.outbox[0].alternatives), 1) message = mail.outbox[0].message() self.assertEqual(message.get('subject'), 'Custom password reset on example.com') self.assertEqual(len(message.get_payload()), 2) self.assertTrue(message.is_multipart()) self.assertEqual(message.get_payload(0).get_content_type(), 'text/plain') self.assertEqual(message.get_payload(1).get_content_type(), 'text/html') self.assertEqual(message.get_all('to'), [email]) self.assertTrue(re.match(r'^http://example.com/reset/[\w/-]+', message.get_payload(0).get_payload())) self.assertTrue( re.match(r'^<html><a href="http://example.com/reset/[\w/-]+/">Link</a></html>$', message.get_payload(1).get_payload()) ) class ReadOnlyPasswordHashTest(SimpleTestCase): def test_bug_19349_render_with_none_value(self): # Rendering the widget with value set to None # mustn't raise an exception. widget = ReadOnlyPasswordHashWidget() html = widget.render(name='password', value=None, attrs={}) self.assertIn(_("No password set."), html) def test_readonly_field_has_changed(self): field = ReadOnlyPasswordHashField() self.assertFalse(field.has_changed('aaa', 'bbb')) @override_settings(USE_TZ=False, PASSWORD_HASHERS=['django.contrib.auth.hashers.SHA1PasswordHasher']) class AdminPasswordChangeFormTest(TestDataMixin, TestCase): @mock.patch('django.contrib.auth.password_validation.password_changed') def test_success(self, password_changed): user = User.objects.get(username='testclient') data = { 'password1': 'test123', 'password2': 'test123', } form = AdminPasswordChangeForm(user, data) self.assertTrue(form.is_valid()) form.save(commit=False) self.assertEqual(password_changed.call_count, 0) form.save() self.assertEqual(password_changed.call_count, 1)
Jgarcia-IAS/localizacion
refs/heads/master
openerp/addons-extra/odoo-pruebas/odoo-server/addons/event_sale/event_sale.py
48
# -*- coding: utf-8 -*- ############################################################################## # # OpenERP, Open Source Management Solution # Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>). # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## from openerp import api from openerp.fields import Integer, One2many, Html from openerp.osv import fields, osv from openerp.tools.translate import _ class product_template(osv.osv): _inherit = 'product.template' _columns = { 'event_ok': fields.boolean('Event Subscription', help='Determine if a product needs to create automatically an event registration at the confirmation of a sales order line.'), 'event_type_id': fields.many2one('event.type', 'Type of Event', help='Select event types so when we use this product in sales order lines, it will filter events of this type only.'), } def onchange_event_ok(self, cr, uid, ids, type, event_ok, context=None): if event_ok: return {'value': {'type': 'service'}} return {} class product(osv.osv): _inherit = 'product.product' _columns = { 'event_ticket_ids': fields.one2many('event.event.ticket', 'product_id', 'Event Tickets'), } def onchange_event_ok(self, cr, uid, ids, type, event_ok, context=None): # cannot directly forward to product.template as the ids are theoretically different if event_ok: return {'value': {'type': 'service'}} return {} class sale_order_line(osv.osv): _inherit = 'sale.order.line' _columns = { 'event_id': fields.many2one('event.event', 'Event', help="Choose an event and it will automatically create a registration for this event."), 'event_ticket_id': fields.many2one('event.event.ticket', 'Event Ticket', help="Choose an event ticket and it will automatically create a registration for this event ticket."), #those 2 fields are used for dynamic domains and filled by onchange 'event_type_id': fields.related('product_id','event_type_id', type='many2one', relation="event.type", string="Event Type"), 'event_ok': fields.related('product_id', 'event_ok', string='event_ok', type='boolean'), } def product_id_change(self, cr, uid, ids, pricelist, product, qty=0, uom=False, qty_uos=0, uos=False, name='', partner_id=False, lang=False, update_tax=True, date_order=False, packaging=False, fiscal_position=False, flag=False, context=None): """ check product if event type """ res = super(sale_order_line,self).product_id_change(cr, uid, ids, pricelist, product, qty=qty, uom=uom, qty_uos=qty_uos, uos=uos, name=name, partner_id=partner_id, lang=lang, update_tax=update_tax, date_order=date_order, packaging=packaging, fiscal_position=fiscal_position, flag=flag, context=context) if product: product_res = self.pool.get('product.product').browse(cr, uid, product, context=context) if product_res.event_ok: res['value'].update(event_type_id=product_res.event_type_id.id, event_ok=product_res.event_ok) else: res['value'].update(event_type_id=False, event_ok=False) return res def button_confirm(self, cr, uid, ids, context=None): ''' create registration with sales order ''' context = dict(context or {}) registration_obj = self.pool.get('event.registration') for order_line in self.browse(cr, uid, ids, context=context): if order_line.event_id: dic = { 'name': order_line.order_id.partner_invoice_id.name, 'partner_id': order_line.order_id.partner_id.id, 'nb_register': int(order_line.product_uom_qty), 'email': order_line.order_id.partner_id.email, 'phone': order_line.order_id.partner_id.phone, 'origin': order_line.order_id.name, 'event_id': order_line.event_id.id, 'event_ticket_id': order_line.event_ticket_id and order_line.event_ticket_id.id or None, } if order_line.event_ticket_id: message = _("The registration has been created for event <i>%s</i> with the ticket <i>%s</i> from the Sale Order %s. ") % (order_line.event_id.name, order_line.event_ticket_id.name, order_line.order_id.name) else: message = _("The registration has been created for event <i>%s</i> from the Sale Order %s. ") % (order_line.event_id.name, order_line.order_id.name) context.update({'mail_create_nolog': True}) registration_id = registration_obj.create(cr, uid, dic, context=context) registration_obj.message_post(cr, uid, [registration_id], body=message, context=context) return super(sale_order_line, self).button_confirm(cr, uid, ids, context=context) def onchange_event_ticket_id(self, cr, uid, ids, event_ticket_id=False, context=None): price = event_ticket_id and self.pool.get("event.event.ticket").browse(cr, uid, event_ticket_id, context=context).price or False return {'value': {'price_unit': price}} class event_event(osv.osv): _inherit = 'event.event' event_ticket_ids = One2many('event.event.ticket', 'event_id', string='Event Ticket', default=lambda rec: rec._default_tickets()) seats_max = Integer(string='Maximum Available Seats', help="The maximum registration level is equal to the sum of the maximum registration of event ticket. " + "If you have too much registrations you are not able to confirm your event. (0 to ignore this rule )", store=True, readonly=True, compute='_compute_seats_max') badge_back = Html('Badge Back', translate=True, states={'done': [('readonly', True)]}) badge_innerleft = Html('Badge Innner Left', translate=True, states={'done': [('readonly', True)]}) badge_innerright = Html('Badge Inner Right', translate=True, states={'done': [('readonly', True)]}) @api.model def _default_tickets(self): try: product = self.env.ref('event_sale.product_product_event') return [{ 'name': _('Subscription'), 'product_id': product.id, 'price': 0, }] except ValueError: return self.env['event.event.ticket'] @api.one @api.depends('event_ticket_ids.seats_max') def _compute_seats_max(self): self.seats_max = sum(ticket.seats_max for ticket in self.event_ticket_ids) class event_ticket(osv.osv): _name = 'event.event.ticket' def _get_seats(self, cr, uid, ids, fields, args, context=None): """Get reserved, available, reserved but unconfirmed and used seats for each event tickets. @return: Dictionary of function field values. """ res = dict([(id, {}) for id in ids]) for ticket in self.browse(cr, uid, ids, context=context): res[ticket.id]['seats_reserved'] = sum(reg.nb_register for reg in ticket.registration_ids if reg.state == "open") res[ticket.id]['seats_used'] = sum(reg.nb_register for reg in ticket.registration_ids if reg.state == "done") res[ticket.id]['seats_unconfirmed'] = sum(reg.nb_register for reg in ticket.registration_ids if reg.state == "draft") res[ticket.id]['seats_available'] = ticket.seats_max - \ (res[ticket.id]['seats_reserved'] + res[ticket.id]['seats_used']) \ if ticket.seats_max > 0 else None return res def _is_expired(self, cr, uid, ids, field_name, args, context=None): # FIXME: A ticket is considered expired when the deadline is passed. The deadline should # be considered in the timezone of the event, not the timezone of the user! # Until we add a TZ on the event we'll use the context's current date, more accurate # than using UTC all the time. current_date = fields.date.context_today(self, cr, uid, context=context) return {ticket.id: ticket.deadline and ticket.deadline < current_date for ticket in self.browse(cr, uid, ids, context=context)} _columns = { 'name': fields.char('Name', required=True, translate=True), 'event_id': fields.many2one('event.event', "Event", required=True, ondelete='cascade'), 'product_id': fields.many2one('product.product', 'Product', required=True, domain=[("event_type_id", "!=", False)]), 'registration_ids': fields.one2many('event.registration', 'event_ticket_id', 'Registrations'), 'deadline': fields.date("Sales End"), 'is_expired': fields.function(_is_expired, type='boolean', string='Is Expired'), 'price': fields.float('Price'), 'seats_max': fields.integer('Maximum Available Seats', oldname='register_max', help="You can for each event define a maximum registration level. If you have too much registrations you are not able to confirm your event. (put 0 to ignore this rule )"), 'seats_reserved': fields.function(_get_seats, string='Reserved Seats', type='integer', multi='seats_reserved'), 'seats_available': fields.function(_get_seats, string='Available Seats', type='integer', multi='seats_reserved'), 'seats_unconfirmed': fields.function(_get_seats, string='Unconfirmed Seat Reservations', type='integer', multi='seats_reserved'), 'seats_used': fields.function(_get_seats, string='Number of Participations', type='integer', multi='seats_reserved'), } def _default_product_id(self, cr, uid, context={}): imd = self.pool.get('ir.model.data') try: product = imd.get_object(cr, uid, 'event_sale', 'product_product_event') except ValueError: return False return product.id _defaults = { 'product_id': _default_product_id } def _check_seats_limit(self, cr, uid, ids, context=None): for ticket in self.browse(cr, uid, ids, context=context): if ticket.seats_max and ticket.seats_available < 0: return False return True _constraints = [ (_check_seats_limit, 'No more available tickets.', ['registration_ids','seats_max']), ] def onchange_product_id(self, cr, uid, ids, product_id=False, context=None): price = self.pool.get("product.product").browse(cr, uid, product_id).list_price if product_id else 0 return {'value': {'price': price}} class event_registration(osv.osv): """Event Registration""" _inherit= 'event.registration' _columns = { 'event_ticket_id': fields.many2one('event.event.ticket', 'Event Ticket'), } def _check_ticket_seats_limit(self, cr, uid, ids, context=None): for registration in self.browse(cr, uid, ids, context=context): if registration.event_ticket_id.seats_max and registration.event_ticket_id.seats_available < 0: return False return True _constraints = [ (_check_ticket_seats_limit, 'No more available tickets.', ['event_ticket_id','nb_register','state']), ]
curtisstpierre/django
refs/heads/master
tests/i18n/sampleproject/update_catalogs.py
344
#!/usr/bin/env python """ Helper script to update sampleproject's translation catalogs. When a bug has been identified related to i18n, this helps capture the issue by using catalogs created from management commands. Example: The string "Two %% Three %%%" renders differently using trans and blocktrans. This issue is difficult to debug, it could be a problem with extraction, interpolation, or both. How this script helps: * Add {% trans "Two %% Three %%%" %} and blocktrans equivalent to templates. * Run this script. * Test extraction - verify the new msgid in sampleproject's django.po. * Add a translation to sampleproject's django.po. * Run this script. * Test interpolation - verify templatetag rendering, test each in a template that is rendered using an activated language from sampleproject's locale. * Tests should fail, issue captured. * Fix issue. * Run this script. * Tests all pass. """ import os import re import sys proj_dir = os.path.dirname(os.path.abspath(__file__)) sys.path.append(os.path.abspath(os.path.join(proj_dir, '..', '..', '..'))) def update_translation_catalogs(): """Run makemessages and compilemessages in sampleproject.""" from django.core.management import call_command prev_cwd = os.getcwd() os.chdir(proj_dir) call_command('makemessages') call_command('compilemessages') # keep the diff friendly - remove 'POT-Creation-Date' pofile = os.path.join(proj_dir, 'locale', 'fr', 'LC_MESSAGES', 'django.po') with open(pofile) as f: content = f.read() content = re.sub(r'^"POT-Creation-Date.+$\s', '', content, flags=re.MULTILINE) with open(pofile, 'w') as f: f.write(content) os.chdir(prev_cwd) if __name__ == "__main__": update_translation_catalogs()
StealthMicro/OctoPi-Makerbot
refs/heads/master
env/Lib/site-packages/pip/_vendor/distlib/scripts.py
163
# -*- coding: utf-8 -*- # # Copyright (C) 2013 Vinay Sajip. # Licensed to the Python Software Foundation under a contributor agreement. # See LICENSE.txt and CONTRIBUTORS.txt. # from io import BytesIO import logging import os import re import struct import sys from .compat import sysconfig, fsencode, detect_encoding, ZipFile from .resources import finder from .util import (FileOperator, get_export_entry, convert_path, get_executable, in_venv) logger = logging.getLogger(__name__) _DEFAULT_MANIFEST = ''' <?xml version="1.0" encoding="UTF-8" standalone="yes"?> <assembly xmlns="urn:schemas-microsoft-com:asm.v1" manifestVersion="1.0"> <assemblyIdentity version="1.0.0.0" processorArchitecture="X86" name="%s" type="win32"/> <!-- Identify the application security requirements. --> <trustInfo xmlns="urn:schemas-microsoft-com:asm.v3"> <security> <requestedPrivileges> <requestedExecutionLevel level="asInvoker" uiAccess="false"/> </requestedPrivileges> </security> </trustInfo> </assembly>'''.strip() # check if Python is called on the first line with this expression FIRST_LINE_RE = re.compile(b'^#!.*pythonw?[0-9.]*([ \t].*)?$') SCRIPT_TEMPLATE = '''# -*- coding: utf-8 -*- if __name__ == '__main__': import sys, re def _resolve(module, func): __import__(module) mod = sys.modules[module] parts = func.split('.') result = getattr(mod, parts.pop(0)) for p in parts: result = getattr(result, p) return result try: sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) func = _resolve('%(module)s', '%(func)s') rc = func() # None interpreted as 0 except Exception as e: # only supporting Python >= 2.6 sys.stderr.write('%%s\\n' %% e) rc = 1 sys.exit(rc) ''' class ScriptMaker(object): """ A class to copy or create scripts from source scripts or callable specifications. """ script_template = SCRIPT_TEMPLATE executable = None # for shebangs def __init__(self, source_dir, target_dir, add_launchers=True, dry_run=False, fileop=None): self.source_dir = source_dir self.target_dir = target_dir self.add_launchers = add_launchers self.force = False self.clobber = False # It only makes sense to set mode bits on POSIX. self.set_mode = (os.name == 'posix') self.variants = set(('', 'X.Y')) self._fileop = fileop or FileOperator(dry_run) def _get_alternate_executable(self, executable, options): if options.get('gui', False) and os.name == 'nt': dn, fn = os.path.split(executable) fn = fn.replace('python', 'pythonw') executable = os.path.join(dn, fn) return executable def _get_shebang(self, encoding, post_interp=b'', options=None): if self.executable: executable = self.executable elif not sysconfig.is_python_build(): executable = get_executable() elif in_venv(): executable = os.path.join(sysconfig.get_path('scripts'), 'python%s' % sysconfig.get_config_var('EXE')) else: executable = os.path.join( sysconfig.get_config_var('BINDIR'), 'python%s%s' % (sysconfig.get_config_var('VERSION'), sysconfig.get_config_var('EXE'))) if options: executable = self._get_alternate_executable(executable, options) executable = fsencode(executable) shebang = b'#!' + executable + post_interp + b'\n' # Python parser starts to read a script using UTF-8 until # it gets a #coding:xxx cookie. The shebang has to be the # first line of a file, the #coding:xxx cookie cannot be # written before. So the shebang has to be decodable from # UTF-8. try: shebang.decode('utf-8') except UnicodeDecodeError: raise ValueError( 'The shebang (%r) is not decodable from utf-8' % shebang) # If the script is encoded to a custom encoding (use a # #coding:xxx cookie), the shebang has to be decodable from # the script encoding too. if encoding != 'utf-8': try: shebang.decode(encoding) except UnicodeDecodeError: raise ValueError( 'The shebang (%r) is not decodable ' 'from the script encoding (%r)' % (shebang, encoding)) return shebang def _get_script_text(self, entry): return self.script_template % dict(module=entry.prefix, func=entry.suffix) manifest = _DEFAULT_MANIFEST def get_manifest(self, exename): base = os.path.basename(exename) return self.manifest % base def _write_script(self, names, shebang, script_bytes, filenames, ext): use_launcher = self.add_launchers and os.name == 'nt' linesep = os.linesep.encode('utf-8') if not use_launcher: script_bytes = shebang + linesep + script_bytes else: if ext == 'py': launcher = self._get_launcher('t') else: launcher = self._get_launcher('w') stream = BytesIO() with ZipFile(stream, 'w') as zf: zf.writestr('__main__.py', script_bytes) zip_data = stream.getvalue() script_bytes = launcher + shebang + linesep + zip_data for name in names: outname = os.path.join(self.target_dir, name) if use_launcher: n, e = os.path.splitext(outname) if e.startswith('.py'): outname = n outname = '%s.exe' % outname try: self._fileop.write_binary_file(outname, script_bytes) except Exception: # Failed writing an executable - it might be in use. logger.warning('Failed to write executable - trying to ' 'use .deleteme logic') dfname = '%s.deleteme' % outname if os.path.exists(dfname): os.remove(dfname) # Not allowed to fail here os.rename(outname, dfname) # nor here self._fileop.write_binary_file(outname, script_bytes) logger.debug('Able to replace executable using ' '.deleteme logic') try: os.remove(dfname) except Exception: pass # still in use - ignore error else: if os.name == 'nt' and not outname.endswith('.' + ext): outname = '%s.%s' % (outname, ext) if os.path.exists(outname) and not self.clobber: logger.warning('Skipping existing file %s', outname) continue self._fileop.write_binary_file(outname, script_bytes) if self.set_mode: self._fileop.set_executable_mode([outname]) filenames.append(outname) def _make_script(self, entry, filenames, options=None): shebang = self._get_shebang('utf-8', options=options) script = self._get_script_text(entry).encode('utf-8') name = entry.name scriptnames = set() if '' in self.variants: scriptnames.add(name) if 'X' in self.variants: scriptnames.add('%s%s' % (name, sys.version[0])) if 'X.Y' in self.variants: scriptnames.add('%s-%s' % (name, sys.version[:3])) if options and options.get('gui', False): ext = 'pyw' else: ext = 'py' self._write_script(scriptnames, shebang, script, filenames, ext) def _copy_script(self, script, filenames): adjust = False script = os.path.join(self.source_dir, convert_path(script)) outname = os.path.join(self.target_dir, os.path.basename(script)) if not self.force and not self._fileop.newer(script, outname): logger.debug('not copying %s (up-to-date)', script) return # Always open the file, but ignore failures in dry-run mode -- # that way, we'll get accurate feedback if we can read the # script. try: f = open(script, 'rb') except IOError: if not self.dry_run: raise f = None else: encoding, lines = detect_encoding(f.readline) f.seek(0) first_line = f.readline() if not first_line: logger.warning('%s: %s is an empty file (skipping)', self.get_command_name(), script) return match = FIRST_LINE_RE.match(first_line.replace(b'\r\n', b'\n')) if match: adjust = True post_interp = match.group(1) or b'' if not adjust: if f: f.close() self._fileop.copy_file(script, outname) if self.set_mode: self._fileop.set_executable_mode([outname]) filenames.append(outname) else: logger.info('copying and adjusting %s -> %s', script, self.target_dir) if not self._fileop.dry_run: shebang = self._get_shebang(encoding, post_interp) if b'pythonw' in first_line: ext = 'pyw' else: ext = 'py' n = os.path.basename(outname) self._write_script([n], shebang, f.read(), filenames, ext) if f: f.close() @property def dry_run(self): return self._fileop.dry_run @dry_run.setter def dry_run(self, value): self._fileop.dry_run = value if os.name == 'nt': # Executable launcher support. # Launchers are from https://bitbucket.org/vinay.sajip/simple_launcher/ def _get_launcher(self, kind): if struct.calcsize('P') == 8: # 64-bit bits = '64' else: bits = '32' name = '%s%s.exe' % (kind, bits) # Issue 31: don't hardcode an absolute package name, but # determine it relative to the current package distlib_package = __name__.rsplit('.', 1)[0] result = finder(distlib_package).find(name).bytes return result # Public API follows def make(self, specification, options=None): """ Make a script. :param specification: The specification, which is either a valid export entry specification (to make a script from a callable) or a filename (to make a script by copying from a source location). :param options: A dictionary of options controlling script generation. :return: A list of all absolute pathnames written to. """ filenames = [] entry = get_export_entry(specification) if entry is None: self._copy_script(specification, filenames) else: self._make_script(entry, filenames, options=options) return filenames def make_multiple(self, specifications, options=None): """ Take a list of specifications and make scripts from them, :param specifications: A list of specifications. :return: A list of all absolute pathnames written to, """ filenames = [] for specification in specifications: filenames.extend(self.make(specification, options)) return filenames
geopython/pycsw
refs/heads/master
tests/functionaltests/conftest.py
4
# ================================================================= # # Authors: Ricardo Garcia Silva <ricardo.garcia.silva@gmail.com> # # Copyright (c) 2016 Ricardo Garcia Silva # # Permission is hereby granted, free of charge, to any person # obtaining a copy of this software and associated documentation # files (the "Software"), to deal in the Software without # restriction, including without limitation the rights to use, # copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the # Software is furnished to do so, subject to the following # conditions: # # The above copyright notice and this permission notice shall be # included in all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, # EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES # OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND # NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT # HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, # WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING # FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR # OTHER DEALINGS IN THE SOFTWARE. # # ================================================================= """pytest configuration file for functional tests""" import codecs from collections import namedtuple import logging import os import re import configparser import apipkg import pytest from pycsw.core import admin from pycsw.core.config import StaticContext apipkg.initpkg("optionaldependencies", { "psycopg2": "psycopg2", }) from optionaldependencies import psycopg2 # NOQA: E402 TESTS_ROOT = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) SuiteDirs = namedtuple("SuiteDirs", [ "get_tests_dir", "post_tests_dir", "data_tests_dir", "expected_results_dir", "export_tests_dir", ]) def pytest_generate_tests(metafunc): """Parametrize tests programmatically. This function scans the filesystem directories under ``tests/functionaltests/suites`` and automatically generates pytest tests based on the available test suites. Each suite directory has the following structure: * A mandatory ``default.cfg`` file specifying the configuration for the pycsw instance to use in the tests of the suite. * An optional ``get/`` subdirectory containing a ``requests.txt`` file with any HTTP GET requests for which to generate tests for. Each request is specified in a new line, with the following pattern: * <test_name>,<request_query_string> * An optional ``post/`` subdirectory containing files that are used as the payload for HTTP POST requests. The name of each file is used as the name of the test (without the file's extension); * An optional ``data/`` subdirectory. This directory, if present, indicates that the suite uses a custom database. The database is populated with any additional files that are contained inside this directory. If the ``data`` directory does not exist then the suite's tests will use the CITE database; * An ``expected/`` subdirectory containing a file for each of the expected test outcomes. The tests are autogenerated by parametrizing the ``tests/functionaltests/test_suites_functional::test_suites`` function Notes ----- Check pytest's documentation for information on autogenerating parametrized tests for further details on how the ``pytest_generate_tests`` function can be used: http://pytest.org/latest/parametrize.html#basic-pytest-generate-tests-example """ global TESTS_ROOT if metafunc.function.__name__ == "test_suites": suites_root_dir = os.path.join(TESTS_ROOT, "functionaltests", "suites") suite_names = os.listdir(suites_root_dir) arg_values = [] test_ids = [] logging.basicConfig(level=getattr( logging, metafunc.config.getoption("--pycsw-loglevel").upper())) if metafunc.config.getoption("--database-backend") == "postgresql": _recreate_postgresql_database(metafunc.config) for suite in suite_names: suite_dir = os.path.join(suites_root_dir, suite) config_path = os.path.join(suite_dir, "default.cfg") if not os.path.isfile(config_path): print("Directory {0!r} does not have a suite " "configuration file".format(suite_dir)) continue print("Generating tests for suite {0!r}...".format(suite)) normalize_ids = True if suite in ("harvesting", "manager") else False suite_dirs = _get_suite_dirs(suite) if suite_dirs.post_tests_dir is not None: post_argvalues, post_ids = _get_post_parameters( post_tests_dir=suite_dirs.post_tests_dir, expected_tests_dir=suite_dirs.expected_results_dir, config_path=config_path, suite_name=suite, normalize_ids=normalize_ids, ) arg_values.extend(post_argvalues) test_ids.extend(post_ids) if suite_dirs.get_tests_dir is not None: get_argvalues, get_ids = _get_get_parameters( get_tests_dir=suite_dirs.get_tests_dir, expected_tests_dir=suite_dirs.expected_results_dir, config_path=config_path, suite_name=suite, normalize_ids=normalize_ids, ) arg_values.extend(get_argvalues) test_ids.extend(get_ids) metafunc.parametrize( argnames=["configuration", "request_method", "request_data", "expected_result", "normalize_identifier_fields",], argvalues=arg_values, indirect=["configuration"], ids=test_ids, ) @pytest.fixture() def test_identifier(request): """Extract a meaningful identifier from the request's node.""" return re.search(r"[\w_]+\[(.*)\]", request.node.name).group(1) @pytest.fixture() def use_xml_canonicalisation(request): return not request.config.getoption("--functional-prefer-diffs") @pytest.fixture() def save_results_directory(request): return request.config.getoption("--functional-save-results-directory") @pytest.fixture() def configuration(request, tests_directory, log_level): """Configure a suite for execution in tests. This function is executed once for each individual test request, after tests have been collected. The configuration file for each test suite is read into memory. Some configuration parameters, like the repository's url and table name are adjusted. The suite's repository is also created, if needed. Parameters ---------- request: pytest.fixtures.FixtureRequest tests_directory: py.path.local Directory created by pytest where any test artifacts are to be saved log_level: str Log level for the pycsw server instance that will be created during tests. """ config_path = request.param config = configparser.ConfigParser() with codecs.open(config_path, encoding="utf-8") as fh: config.read_file(fh) suite_name = config_path.split(os.path.sep)[-2] suite_dirs = _get_suite_dirs(suite_name) data_dir = suite_dirs.data_tests_dir export_dir = suite_dirs.export_tests_dir if data_dir is not None: # suite has its own database repository_url = _get_repository_url(request.config, suite_name, tests_directory) else: # suite uses the CITE database data_dir, export_dir = _get_cite_suite_dirs() repository_url = _get_repository_url(request.config, "cite", tests_directory) table_name = _get_table_name(suite_name, config, repository_url) if not _repository_exists(repository_url, table_name): _initialize_database(repository_url=repository_url, table_name=table_name, data_dir=data_dir, test_dir=tests_directory, export_dir=export_dir) config.set("server", "loglevel", log_level) config.set("server", "logfile", "") config.set("repository", "database", repository_url) config.set("repository", "table", table_name) return config @pytest.fixture(scope="session", name="tests_directory") def fixture_tests_directory(tmpdir_factory): """Create a temporary directory for each test session. This directory is typically situated under ``/tmp`` and is used to create eventual sqlite databases for each suite. This functionality is mostly provided by pytest's built-in ``tmpdir_factory`` fixture. More information on this is available at: http://doc.pytest.org/en/2.9.0/tmpdir.html#the-tmpdir-factory-fixture """ tests_dir = tmpdir_factory.mktemp("functional_tests") return tests_dir def _get_cite_suite_dirs(): """Return the path to the data directory of the CITE test suite.""" global TESTS_ROOT suites_root_dir = os.path.join(TESTS_ROOT, "functionaltests", "suites") suite_dir = os.path.join(suites_root_dir, "cite") data_tests_dir = os.path.join(suite_dir, "data") export_tests_dir = os.path.join(suite_dir, "export") data_dir = data_tests_dir if os.path.isdir(data_tests_dir) else None export_dir = export_tests_dir if os.path.isdir(export_tests_dir) else None return data_dir, export_dir def _get_get_parameters(get_tests_dir, expected_tests_dir, config_path, suite_name, normalize_ids): """Return the parameters suitable for parametrizing HTTP GET tests.""" method = "GET" test_argvalues = [] test_ids = [] requests_file_path = os.path.join(get_tests_dir, "requests.txt") with open(requests_file_path) as fh: for line in fh: test_name, test_params = [i.strip() for i in line.partition(",")[::2]] expected_result_path = os.path.join( expected_tests_dir, "{method}_{name}.xml".format(method=method.lower(), name=test_name) ) test_argvalues.append( (config_path, method, test_params, expected_result_path, normalize_ids) ) test_ids.append( "{suite}_{http_method}_{name}".format( suite=suite_name, http_method=method.lower(), name=test_name) ) return test_argvalues, test_ids def _get_post_parameters(post_tests_dir, expected_tests_dir, config_path, suite_name, normalize_ids): """Return the parameters suitable for parametrizing HTTP POST tests.""" method = "POST" test_argvalues = [] test_ids = [] # we are sorting the directory contents because the # `harvesting` suite requires tests to be executed in alphabetical order directory_contents = sorted(os.listdir(post_tests_dir)) for request_file_name in directory_contents: request_path = os.path.join(post_tests_dir, request_file_name) expected_result_path = os.path.join( expected_tests_dir, "{method}_{filename}".format( method=method.lower(), filename=request_file_name ) ) test_argvalues.append( (config_path, method, request_path, expected_result_path, normalize_ids) ) test_ids.append( "{suite}_{http_method}_{file_name}".format( suite=suite_name, http_method=method.lower(), file_name=os.path.splitext( request_file_name)[0]) ) return test_argvalues, test_ids def _get_repository_url(conf, suite_name, test_dir): """Return the repository_url for the input parameters. Returns ------- repository_url: str SQLAlchemy URL for the repository in use. """ db_type = conf.getoption("--database-backend") if db_type == "sqlite": repository_url = "sqlite:///{test_dir}/{suite}.db".format( test_dir=test_dir, suite=suite_name) elif db_type == "postgresql": repository_url = ( "postgresql://{user}:{password}@{host}:{port}/{database}".format( user=conf.getoption("--database-user-postgresql"), password=conf.getoption("--database-password-postgresql"), host=conf.getoption("--database-host-postgresql"), port=conf.getoption("--database-port-postgresql"), database=conf.getoption("--database-name-postgresql")) ) else: raise NotImplementedError return repository_url def _get_suite_dirs(suite_name): """Get the paths to relevant suite directories. Parameters ---------- suite_name: str Name of the site Returns ------- SuiteDirs A four element named tuple with the input suite's relevant test directories. """ global TESTS_ROOT suites_root_dir = os.path.join(TESTS_ROOT, "functionaltests", "suites") suite_dir = os.path.join(suites_root_dir, suite_name) data_tests_dir = os.path.join(suite_dir, "data") post_tests_dir = os.path.join(suite_dir, "post") get_tests_dir = os.path.join(suite_dir, "get") export_tests_dir = os.path.join(suite_dir, "export") expected_results_dir = os.path.join(suite_dir, "expected") data_dir = data_tests_dir if os.path.isdir(data_tests_dir) else None posts_dir = post_tests_dir if os.path.isdir(post_tests_dir) else None gets_dir = get_tests_dir if os.path.isdir(get_tests_dir) else None expected_dir = (expected_results_dir if os.path.isdir( expected_results_dir) else None) export_dir = export_tests_dir if os.path.isdir(export_tests_dir) else None return SuiteDirs(get_tests_dir=gets_dir, post_tests_dir=posts_dir, data_tests_dir=data_dir, expected_results_dir=expected_dir, export_tests_dir=export_tests_dir) def _get_table_name(suite, config, repository_url): """Get the name of the table used to store records in the database. Parameters ---------- suite: str Name of the suite. config: ConfigParser Configuration for the suite. repository_url: str SQLAlchemy URL for the repository in use. Returns ------- str Name of the table to use in the database """ if repository_url.startswith("sqlite"): result = config.get("repository", "table") elif repository_url.startswith("postgresql"): result = "{suite}_records".format(suite=suite) else: raise NotImplementedError return result def _initialize_database(repository_url, table_name, data_dir, test_dir, export_dir): """Initialize database for tests. This function will create the database and load any test data that the suite may require. Parameters ---------- repository_url: str URL for the repository, as used by SQLAlchemy engines table_name: str Name of the table that is to be used to store pycsw records data_dir: str Path to a directory that contains sample data records to be loaded into the database test_dir: str Directory where the database is to be created, in case of sqlite. export_dir: str Diretory where the exported records are to be saved, if any """ print("Setting up {0!r} repository...".format(repository_url)) if repository_url.startswith("postgresql"): extra_kwargs = { "create_sfsql_tables": True, "create_plpythonu_functions": False } else: extra_kwargs = {} admin.setup_db(database=repository_url, table=table_name, home=test_dir, **extra_kwargs) if len(os.listdir(data_dir)) > 0: print("Loading database data...") loaded = admin.load_records( context=StaticContext(), database=repository_url, table=table_name, xml_dirpath=data_dir, recursive=True ) admin.optimize_db(context=StaticContext(), database=repository_url, table=table_name) if export_dir is not None: # Attempt to export files exported = admin.export_records( context=StaticContext(), database=repository_url, table=table_name, xml_dirpath=export_dir ) if len(loaded) != len(exported): raise ValueError( "Loaded records (%s) is different from exported records (%s)" % (len(loaded), len(exported)) ) # Remove the files that were exported since this was just a test for toremove in exported: os.remove(toremove) def _parse_postgresql_repository_url(repository_url): """Parse a SQLAlchemy engine URL describing a postgresql database. Parameters ---------- repository_url: str SQLAlchemy URL for the repository in use. Returns ------- dict A mapping with the database's connection parameters. """ info_re = re.search(r"postgresql://(?P<user>[\w_]+):(?P<password>.*?)@" r"(?P<host>[\w_.]+):(?P<port>\d+)/" r"(?P<database>[\w_]+)", repository_url, flags=re.UNICODE) try: db_info = info_re.groupdict() except AttributeError: raise RuntimeError("Could not parse repository url {0!r}".format( repository_url)) else: return db_info def _recreate_postgresql_database(configuration): """Recreate a postgresql database. This function will try to create a new postgresql database for testing purposes. If the database already exists it is deleted and then recreated. Parameters ---------- configuration: _pytest.config.Config The configuration object used by pytest Raises ------ RuntimeError If a connection to the postgresql server cannot be made """ connection = psycopg2.connect( database="postgres", user=configuration.getoption("--database-user-postgresql"), password=configuration.getoption("--database-password-postgresql"), host=configuration.getoption("--database-host-postgresql"), port=configuration.getoption("--database-port-postgresql") ) connection.set_isolation_level( psycopg2.extensions.ISOLATION_LEVEL_AUTOCOMMIT) cursor = connection.cursor() db_name = configuration.getoption("--database-name-postgresql") cursor.execute("DROP DATABASE IF EXISTS {database}".format( database=db_name)) cursor.execute("CREATE DATABASE {database}".format(database=db_name)) cursor.execute( "SELECT COUNT(1) FROM pg_available_extensions WHERE name='postgis'") postgis_available = bool(cursor.fetchone()[0]) cursor.close() connection.close() if postgis_available: _create_postgresql_extension(configuration, extension="postgis") else: _create_postgresql_extension(configuration, extension="plpythonu") def _create_postgresql_extension(configuration, extension): """Create a postgresql extension in a previously created database. Parameters ---------- configuration: _pytest.config.Config The configuration object used by pytest extension: str Name of the extension to be created """ connection = psycopg2.connect( database=configuration.getoption("--database-name-postgresql"), user=configuration.getoption("--database-user-postgresql"), password=configuration.getoption("--database-password-postgresql"), host=configuration.getoption("--database-host-postgresql"), port=configuration.getoption("--database-port-postgresql") ) connection.set_isolation_level( psycopg2.extensions.ISOLATION_LEVEL_AUTOCOMMIT) cursor = connection.cursor() cursor.execute("CREATE EXTENSION {0}".format(extension)) cursor.close() connection.close() def _repository_exists(repository_url, table_name): """Test if the database already exists. Parameters ---------- repository_url: str URL for the repository, as used by SQLAlchemy engines table_name: str Name of the table that is to be used to store pycsw records Returns ------- bool Whether the repository exists or not. """ if repository_url.startswith("sqlite"): repository_path = repository_url.replace("sqlite:///", "") result = os.path.isfile(repository_path) elif repository_url.startswith("postgresql"): db_info = _parse_postgresql_repository_url(repository_url) try: connection = psycopg2.connect(user=db_info["user"], password=db_info["password"], host=db_info["host"], port=db_info["port"], database=db_info["database"]) cursor = connection.cursor() cursor.execute("SELECT COUNT(1) FROM {table_name}".format( table_name=table_name)) except (psycopg2.OperationalError, psycopg2.ProgrammingError): # database or table does not exist yet result = False else: result = True else: raise NotImplementedError return result
axbaretto/beam
refs/heads/master
sdks/python/.tox/lint/lib/python2.7/site-packages/pylint/test/input/func_w0401_package/thing2.py
15
"""The second thing.""" # pylint: disable=no-absolute-import from .all_the_things import THING1 __revision__ = None THING2 = "I am thing2" THING1_PLUS_THING2 = "%s, plus %s" % (THING1, THING2)
anirudhjayaraman/Dato-Core
refs/heads/master
src/unity/python/graphlab/data_structures/gframe.py
13
''' Copyright (C) 2015 Dato, Inc. All rights reserved. This software may be modified and distributed under the terms of the BSD license. See the DATO-PYTHON-LICENSE file for details. ''' from graphlab.data_structures.sframe import SFrame from graphlab.data_structures.sframe import SArray from graphlab.cython.context import debug_trace as cython_context from graphlab.data_structures.sarray import SArray, _create_sequential_sarray import copy VERTEX_GFRAME = 0 EDGE_GFRAME = 1 class GFrame(SFrame): """ GFrame is similar to SFrame but is associated with an SGraph. - GFrame can be obtained from either the `vertices` or `edges` attributed in any SGraph: >>> import graphlab >>> g = graphlab.load_sgraph(...) >>> vertices_gf = g.vertices >>> edges_gf = g.edges - GFrame has the same API as SFrame: >>> sa = vertices_gf['pagerank'] >>> # column lambda transform >>> vertices_gf['pagerank'] = vertices_gf['pagerank'].apply(lambda x: 0.15 + 0.85 * x) >>> # frame lambda transform >>> vertices_gf['score'] = vertices_gf.apply(lambda x: 0.2 * x['triangle_count'] + 0.8 * x['pagerank']) >>> del vertices_gf['pagerank'] - GFrame can be converted to SFrame: >>> # extract an SFrame >>> sf = vertices_gf.__to_sframe__() """ def __init__(self, graph, gframe_type): self.__type__ = gframe_type self.__graph__ = graph self.__sframe_cache__ = None self.__is_dirty__ = False def __to_sframe__(self): return copy.copy(self._get_cache()) #/**************************************************************************/ #/* */ #/* Modifiers */ #/* */ #/**************************************************************************/ def add_column(self, data, name=""): """ Adds the specified column to this SFrame. The number of elements in the data given must match every other column of the SFrame. Parameters ---------- data : SArray The 'column' of data. name : string The name of the column. If no name is given, a default name is chosen. """ # Check type for pandas dataframe or SArray? if not isinstance(data, SArray): raise TypeError("Must give column as SArray") if not isinstance(name, str): raise TypeError("Invalid column name: must be str") self.__is_dirty__ = True with cython_context(): if self._is_vertex_frame(): graph_proxy = self.__graph__.__proxy__.add_vertex_field(data.__proxy__, name) self.__graph__.__proxy__ = graph_proxy elif self._is_edge_frame(): graph_proxy = self.__graph__.__proxy__.add_edge_field(data.__proxy__, name) self.__graph__.__proxy__ = graph_proxy def add_columns(self, datalist, namelist): """ Adds columns to the SFrame. The number of elements in all columns must match every other column of the SFrame. Parameters ---------- datalist : list of SArray A list of columns namelist : list of string A list of column names. All names must be specified. """ if not hasattr(datalist, '__iter__'): raise TypeError("datalist must be an iterable") if not hasattr(namelist, '__iter__'): raise TypeError("namelist must be an iterable") if not all([isinstance(x, SArray) for x in datalist]): raise TypeError("Must give column as SArray") if not all([isinstance(x, str) for x in namelist]): raise TypeError("Invalid column name in list: must all be str") for (data, name) in zip(datalist, namelist): self.add_column(data, name) def remove_column(self, name): """ Removes the column with the given name from the SFrame. Parameters ---------- name : string The name of the column to remove. """ if name not in self.column_names(): raise KeyError('Cannot find column %s' % name) self.__is_dirty__ = True try: with cython_context(): if self._is_vertex_frame(): assert name != '__id', 'Cannot remove \"__id\" column' graph_proxy = self.__graph__.__proxy__.delete_vertex_field(name) self.__graph__.__proxy__ = graph_proxy elif self._is_edge_frame(): assert name != '__src_id', 'Cannot remove \"__src_id\" column' assert name != '__dst_id', 'Cannot remove \"__dst_id\" column' graph_proxy = self.__graph__.__proxy__.delete_edge_field(name) self.__graph__.__proxy__ = graph_proxy except: self.__is_dirty__ = False raise def swap_columns(self, column_1, column_2): """ Swaps the columns with the given names. Parameters ---------- column_1 : string Name of column to swap column_2 : string Name of other column to swap """ self.__is_dirty__ = True with cython_context(): if self._is_vertex_frame(): graph_proxy = self.__graph__.__proxy__.swap_vertex_fields(column_1, column_2) self.__graph__.__proxy__ = graph_proxy elif self._is_edge_frame(): graph_proxy = self.__graph__.__proxy__.swap_edge_fields(column_1, column_2) self.__graph__.__proxy__ = graph_proxy def rename(self, names): """ Rename the columns using the 'names' dict. This changes the names of the columns given as the keys and replaces them with the names given as the values. Parameters ---------- names : dict[string, string] Dictionary of [old_name, new_name] """ if (type(names) is not dict): raise TypeError('names must be a dictionary: oldname -> newname') self.__is_dirty__ = True with cython_context(): if self._is_vertex_frame(): graph_proxy = self.__graph__.__proxy__.rename_vertex_fields(names.keys(), names.values()) self.__graph__.__proxy__ = graph_proxy elif self._is_edge_frame(): graph_proxy = self.__graph__.__proxy__.rename_edge_fields(names.keys(), names.values()) self.__graph__.__proxy__ = graph_proxy def add_row_number(self, column_name='id', start=0): if type(column_name) is not str: raise TypeError("Must give column_name as str") if column_name in self.column_names(): raise RuntimeError("Column name %s already exists" % str(column_name)) if type(start) is not int: raise TypeError("Must give start as int") the_col = _create_sequential_sarray(self.num_rows(), start) self[column_name] = the_col return self def __setitem__(self, key, value): """ A wrapper around add_column(s). Key can be either a list or a str. If value is an SArray, it is added to the SFrame as a column. If it is a constant value (int, str, or float), then a column is created where every entry is equal to the constant value. Existing columns can also be replaced using this wrapper. """ if (key in ['__id', '__src_id', '__dst_id']): raise KeyError('Cannot modify column %s. Changing __id column will\ change the graph structure' % key) else: self.__is_dirty__ = True super(GFrame, self).__setitem__(key, value) #/**************************************************************************/ #/* */ #/* Read-only Accessor */ #/* */ #/**************************************************************************/ def num_rows(self): """ Returns the number of rows. Returns ------- out : int Number of rows in the SFrame. """ if self._is_vertex_frame(): return self.__graph__.summary()['num_vertices'] elif self._is_edge_frame(): return self.__graph__.summary()['num_edges'] def num_cols(self): """ Returns the number of columns. Returns ------- out : int Number of columns in the SFrame. """ return len(self.column_names()) def column_names(self): """ Returns the column names. Returns ------- out : list[string] Column names of the SFrame. """ if self._is_vertex_frame(): return self.__graph__.__proxy__.get_vertex_fields() elif self._is_edge_frame(): return self.__graph__.__proxy__.get_edge_fields() def column_types(self): """ Returns the column types. Returns ------- out : list[type] Column types of the SFrame. """ if self.__type__ == VERTEX_GFRAME: return self.__graph__.__proxy__.get_vertex_field_types() elif self.__type__ == EDGE_GFRAME: return self.__graph__.__proxy__.get_edge_field_types() #/**************************************************************************/ #/* */ #/* Internal Private Methods */ #/* */ #/**************************************************************************/ def _get_cache(self): if self.__sframe_cache__ is None or self.__is_dirty__: if self._is_vertex_frame(): self.__sframe_cache__ = self.__graph__.get_vertices() elif self._is_edge_frame(): self.__sframe_cache__ = self.__graph__.get_edges() else: raise TypeError self.__is_dirty__ = False return self.__sframe_cache__ def _is_vertex_frame(self): return self.__type__ == VERTEX_GFRAME def _is_edge_frame(self): return self.__type__ == EDGE_GFRAME @property def __proxy__(self): return self._get_cache().__proxy__
HopeFOAM/HopeFOAM
refs/heads/master
ThirdParty-0.1/ParaView-5.0.1/VTK/Filters/Core/Testing/Python/TestFlyingEdgesPlaneCutter.py
5
#!/usr/bin/env python import vtk from vtk.test import Testing from vtk.util.misc import vtkGetDataRoot VTK_DATA_ROOT = vtkGetDataRoot() useFECutter = 1 res = 100 # Create the RenderWindow, Renderer and both Actors # ren1 = vtk.vtkRenderer() renWin = vtk.vtkRenderWindow() renWin.SetMultiSamples(0) renWin.AddRenderer(ren1) iren = vtk.vtkRenderWindowInteractor() iren.SetRenderWindow(renWin) # Create a synthetic source: sample a sphere across a volume sphere = vtk.vtkSphere() sphere.SetCenter( 0.0,0.0,0.0) sphere.SetRadius(0.25) sample = vtk.vtkSampleFunction() sample.SetImplicitFunction(sphere) sample.SetModelBounds(-0.5,0.5, -0.5,0.5, -0.5,0.5) sample.SetSampleDimensions(res,res,res) sample.Update() # The cut plane plane = vtk.vtkPlane() plane.SetOrigin(0,0,0) plane.SetNormal(1,1,1) if useFECutter: cut = vtk.vtkFlyingEdgesPlaneCutter() cut.SetInputConnection(sample.GetOutputPort()) cut.SetPlane(plane) cut.ComputeNormalsOff() #make it equivalent to vtkCutter else: # Compare against previous method cut = vtk.vtkCutter() cut.SetInputConnection(sample.GetOutputPort()) cut.SetCutFunction(plane) cut.SetValue(0,0.0) # Time the execution of the filter w/out scalar tree CG_timer = vtk.vtkExecutionTimer() CG_timer.SetFilter(cut) cut.Update() CG = CG_timer.GetElapsedWallClockTime() print ("Cut volume:", CG) cutMapper = vtk.vtkPolyDataMapper() cutMapper.SetInputConnection(cut.GetOutputPort()) cutActor = vtk.vtkActor() cutActor.SetMapper(cutMapper) cutActor.GetProperty().SetColor(1,1,1) cutActor.GetProperty().SetOpacity(1) outline = vtk.vtkOutlineFilter() outline.SetInputConnection(sample.GetOutputPort()) outlineMapper = vtk.vtkPolyDataMapper() outlineMapper.SetInputConnection(outline.GetOutputPort()) outlineActor = vtk.vtkActor() outlineActor.SetMapper(outlineMapper) outlineProp = outlineActor.GetProperty() # Add the actors to the renderer, set the background and size # ren1.AddActor(outlineActor) ren1.AddActor(cutActor) ren1.SetBackground(0,0,0) renWin.SetSize(400,400) ren1.ResetCamera() iren.Initialize() renWin.Render() # --- end of script -- #iren.Start()
spock1104/android_kernel_zte_msm8960
refs/heads/master
Documentation/target/tcm_mod_builder.py
4981
#!/usr/bin/python # The TCM v4 multi-protocol fabric module generation script for drivers/target/$NEW_MOD # # Copyright (c) 2010 Rising Tide Systems # Copyright (c) 2010 Linux-iSCSI.org # # Author: nab@kernel.org # import os, sys import subprocess as sub import string import re import optparse tcm_dir = "" fabric_ops = [] fabric_mod_dir = "" fabric_mod_port = "" fabric_mod_init_port = "" def tcm_mod_err(msg): print msg sys.exit(1) def tcm_mod_create_module_subdir(fabric_mod_dir_var): if os.path.isdir(fabric_mod_dir_var) == True: return 1 print "Creating fabric_mod_dir: " + fabric_mod_dir_var ret = os.mkdir(fabric_mod_dir_var) if ret: tcm_mod_err("Unable to mkdir " + fabric_mod_dir_var) return def tcm_mod_build_FC_include(fabric_mod_dir_var, fabric_mod_name): global fabric_mod_port global fabric_mod_init_port buf = "" f = fabric_mod_dir_var + "/" + fabric_mod_name + "_base.h" print "Writing file: " + f p = open(f, 'w'); if not p: tcm_mod_err("Unable to open file: " + f) buf = "#define " + fabric_mod_name.upper() + "_VERSION \"v0.1\"\n" buf += "#define " + fabric_mod_name.upper() + "_NAMELEN 32\n" buf += "\n" buf += "struct " + fabric_mod_name + "_nacl {\n" buf += " /* Binary World Wide unique Port Name for FC Initiator Nport */\n" buf += " u64 nport_wwpn;\n" buf += " /* ASCII formatted WWPN for FC Initiator Nport */\n" buf += " char nport_name[" + fabric_mod_name.upper() + "_NAMELEN];\n" buf += " /* Returned by " + fabric_mod_name + "_make_nodeacl() */\n" buf += " struct se_node_acl se_node_acl;\n" buf += "};\n" buf += "\n" buf += "struct " + fabric_mod_name + "_tpg {\n" buf += " /* FC lport target portal group tag for TCM */\n" buf += " u16 lport_tpgt;\n" buf += " /* Pointer back to " + fabric_mod_name + "_lport */\n" buf += " struct " + fabric_mod_name + "_lport *lport;\n" buf += " /* Returned by " + fabric_mod_name + "_make_tpg() */\n" buf += " struct se_portal_group se_tpg;\n" buf += "};\n" buf += "\n" buf += "struct " + fabric_mod_name + "_lport {\n" buf += " /* SCSI protocol the lport is providing */\n" buf += " u8 lport_proto_id;\n" buf += " /* Binary World Wide unique Port Name for FC Target Lport */\n" buf += " u64 lport_wwpn;\n" buf += " /* ASCII formatted WWPN for FC Target Lport */\n" buf += " char lport_name[" + fabric_mod_name.upper() + "_NAMELEN];\n" buf += " /* Returned by " + fabric_mod_name + "_make_lport() */\n" buf += " struct se_wwn lport_wwn;\n" buf += "};\n" ret = p.write(buf) if ret: tcm_mod_err("Unable to write f: " + f) p.close() fabric_mod_port = "lport" fabric_mod_init_port = "nport" return def tcm_mod_build_SAS_include(fabric_mod_dir_var, fabric_mod_name): global fabric_mod_port global fabric_mod_init_port buf = "" f = fabric_mod_dir_var + "/" + fabric_mod_name + "_base.h" print "Writing file: " + f p = open(f, 'w'); if not p: tcm_mod_err("Unable to open file: " + f) buf = "#define " + fabric_mod_name.upper() + "_VERSION \"v0.1\"\n" buf += "#define " + fabric_mod_name.upper() + "_NAMELEN 32\n" buf += "\n" buf += "struct " + fabric_mod_name + "_nacl {\n" buf += " /* Binary World Wide unique Port Name for SAS Initiator port */\n" buf += " u64 iport_wwpn;\n" buf += " /* ASCII formatted WWPN for Sas Initiator port */\n" buf += " char iport_name[" + fabric_mod_name.upper() + "_NAMELEN];\n" buf += " /* Returned by " + fabric_mod_name + "_make_nodeacl() */\n" buf += " struct se_node_acl se_node_acl;\n" buf += "};\n\n" buf += "struct " + fabric_mod_name + "_tpg {\n" buf += " /* SAS port target portal group tag for TCM */\n" buf += " u16 tport_tpgt;\n" buf += " /* Pointer back to " + fabric_mod_name + "_tport */\n" buf += " struct " + fabric_mod_name + "_tport *tport;\n" buf += " /* Returned by " + fabric_mod_name + "_make_tpg() */\n" buf += " struct se_portal_group se_tpg;\n" buf += "};\n\n" buf += "struct " + fabric_mod_name + "_tport {\n" buf += " /* SCSI protocol the tport is providing */\n" buf += " u8 tport_proto_id;\n" buf += " /* Binary World Wide unique Port Name for SAS Target port */\n" buf += " u64 tport_wwpn;\n" buf += " /* ASCII formatted WWPN for SAS Target port */\n" buf += " char tport_name[" + fabric_mod_name.upper() + "_NAMELEN];\n" buf += " /* Returned by " + fabric_mod_name + "_make_tport() */\n" buf += " struct se_wwn tport_wwn;\n" buf += "};\n" ret = p.write(buf) if ret: tcm_mod_err("Unable to write f: " + f) p.close() fabric_mod_port = "tport" fabric_mod_init_port = "iport" return def tcm_mod_build_iSCSI_include(fabric_mod_dir_var, fabric_mod_name): global fabric_mod_port global fabric_mod_init_port buf = "" f = fabric_mod_dir_var + "/" + fabric_mod_name + "_base.h" print "Writing file: " + f p = open(f, 'w'); if not p: tcm_mod_err("Unable to open file: " + f) buf = "#define " + fabric_mod_name.upper() + "_VERSION \"v0.1\"\n" buf += "#define " + fabric_mod_name.upper() + "_NAMELEN 32\n" buf += "\n" buf += "struct " + fabric_mod_name + "_nacl {\n" buf += " /* ASCII formatted InitiatorName */\n" buf += " char iport_name[" + fabric_mod_name.upper() + "_NAMELEN];\n" buf += " /* Returned by " + fabric_mod_name + "_make_nodeacl() */\n" buf += " struct se_node_acl se_node_acl;\n" buf += "};\n\n" buf += "struct " + fabric_mod_name + "_tpg {\n" buf += " /* iSCSI target portal group tag for TCM */\n" buf += " u16 tport_tpgt;\n" buf += " /* Pointer back to " + fabric_mod_name + "_tport */\n" buf += " struct " + fabric_mod_name + "_tport *tport;\n" buf += " /* Returned by " + fabric_mod_name + "_make_tpg() */\n" buf += " struct se_portal_group se_tpg;\n" buf += "};\n\n" buf += "struct " + fabric_mod_name + "_tport {\n" buf += " /* SCSI protocol the tport is providing */\n" buf += " u8 tport_proto_id;\n" buf += " /* ASCII formatted TargetName for IQN */\n" buf += " char tport_name[" + fabric_mod_name.upper() + "_NAMELEN];\n" buf += " /* Returned by " + fabric_mod_name + "_make_tport() */\n" buf += " struct se_wwn tport_wwn;\n" buf += "};\n" ret = p.write(buf) if ret: tcm_mod_err("Unable to write f: " + f) p.close() fabric_mod_port = "tport" fabric_mod_init_port = "iport" return def tcm_mod_build_base_includes(proto_ident, fabric_mod_dir_val, fabric_mod_name): if proto_ident == "FC": tcm_mod_build_FC_include(fabric_mod_dir_val, fabric_mod_name) elif proto_ident == "SAS": tcm_mod_build_SAS_include(fabric_mod_dir_val, fabric_mod_name) elif proto_ident == "iSCSI": tcm_mod_build_iSCSI_include(fabric_mod_dir_val, fabric_mod_name) else: print "Unsupported proto_ident: " + proto_ident sys.exit(1) return def tcm_mod_build_configfs(proto_ident, fabric_mod_dir_var, fabric_mod_name): buf = "" f = fabric_mod_dir_var + "/" + fabric_mod_name + "_configfs.c" print "Writing file: " + f p = open(f, 'w'); if not p: tcm_mod_err("Unable to open file: " + f) buf = "#include <linux/module.h>\n" buf += "#include <linux/moduleparam.h>\n" buf += "#include <linux/version.h>\n" buf += "#include <generated/utsrelease.h>\n" buf += "#include <linux/utsname.h>\n" buf += "#include <linux/init.h>\n" buf += "#include <linux/slab.h>\n" buf += "#include <linux/kthread.h>\n" buf += "#include <linux/types.h>\n" buf += "#include <linux/string.h>\n" buf += "#include <linux/configfs.h>\n" buf += "#include <linux/ctype.h>\n" buf += "#include <asm/unaligned.h>\n\n" buf += "#include <target/target_core_base.h>\n" buf += "#include <target/target_core_fabric.h>\n" buf += "#include <target/target_core_fabric_configfs.h>\n" buf += "#include <target/target_core_configfs.h>\n" buf += "#include <target/configfs_macros.h>\n\n" buf += "#include \"" + fabric_mod_name + "_base.h\"\n" buf += "#include \"" + fabric_mod_name + "_fabric.h\"\n\n" buf += "/* Local pointer to allocated TCM configfs fabric module */\n" buf += "struct target_fabric_configfs *" + fabric_mod_name + "_fabric_configfs;\n\n" buf += "static struct se_node_acl *" + fabric_mod_name + "_make_nodeacl(\n" buf += " struct se_portal_group *se_tpg,\n" buf += " struct config_group *group,\n" buf += " const char *name)\n" buf += "{\n" buf += " struct se_node_acl *se_nacl, *se_nacl_new;\n" buf += " struct " + fabric_mod_name + "_nacl *nacl;\n" if proto_ident == "FC" or proto_ident == "SAS": buf += " u64 wwpn = 0;\n" buf += " u32 nexus_depth;\n\n" buf += " /* " + fabric_mod_name + "_parse_wwn(name, &wwpn, 1) < 0)\n" buf += " return ERR_PTR(-EINVAL); */\n" buf += " se_nacl_new = " + fabric_mod_name + "_alloc_fabric_acl(se_tpg);\n" buf += " if (!se_nacl_new)\n" buf += " return ERR_PTR(-ENOMEM);\n" buf += "//#warning FIXME: Hardcoded nexus depth in " + fabric_mod_name + "_make_nodeacl()\n" buf += " nexus_depth = 1;\n" buf += " /*\n" buf += " * se_nacl_new may be released by core_tpg_add_initiator_node_acl()\n" buf += " * when converting a NodeACL from demo mode -> explict\n" buf += " */\n" buf += " se_nacl = core_tpg_add_initiator_node_acl(se_tpg, se_nacl_new,\n" buf += " name, nexus_depth);\n" buf += " if (IS_ERR(se_nacl)) {\n" buf += " " + fabric_mod_name + "_release_fabric_acl(se_tpg, se_nacl_new);\n" buf += " return se_nacl;\n" buf += " }\n" buf += " /*\n" buf += " * Locate our struct " + fabric_mod_name + "_nacl and set the FC Nport WWPN\n" buf += " */\n" buf += " nacl = container_of(se_nacl, struct " + fabric_mod_name + "_nacl, se_node_acl);\n" if proto_ident == "FC" or proto_ident == "SAS": buf += " nacl->" + fabric_mod_init_port + "_wwpn = wwpn;\n" buf += " /* " + fabric_mod_name + "_format_wwn(&nacl->" + fabric_mod_init_port + "_name[0], " + fabric_mod_name.upper() + "_NAMELEN, wwpn); */\n\n" buf += " return se_nacl;\n" buf += "}\n\n" buf += "static void " + fabric_mod_name + "_drop_nodeacl(struct se_node_acl *se_acl)\n" buf += "{\n" buf += " struct " + fabric_mod_name + "_nacl *nacl = container_of(se_acl,\n" buf += " struct " + fabric_mod_name + "_nacl, se_node_acl);\n" buf += " core_tpg_del_initiator_node_acl(se_acl->se_tpg, se_acl, 1);\n" buf += " kfree(nacl);\n" buf += "}\n\n" buf += "static struct se_portal_group *" + fabric_mod_name + "_make_tpg(\n" buf += " struct se_wwn *wwn,\n" buf += " struct config_group *group,\n" buf += " const char *name)\n" buf += "{\n" buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + "*" + fabric_mod_port + " = container_of(wwn,\n" buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + ", " + fabric_mod_port + "_wwn);\n\n" buf += " struct " + fabric_mod_name + "_tpg *tpg;\n" buf += " unsigned long tpgt;\n" buf += " int ret;\n\n" buf += " if (strstr(name, \"tpgt_\") != name)\n" buf += " return ERR_PTR(-EINVAL);\n" buf += " if (strict_strtoul(name + 5, 10, &tpgt) || tpgt > UINT_MAX)\n" buf += " return ERR_PTR(-EINVAL);\n\n" buf += " tpg = kzalloc(sizeof(struct " + fabric_mod_name + "_tpg), GFP_KERNEL);\n" buf += " if (!tpg) {\n" buf += " printk(KERN_ERR \"Unable to allocate struct " + fabric_mod_name + "_tpg\");\n" buf += " return ERR_PTR(-ENOMEM);\n" buf += " }\n" buf += " tpg->" + fabric_mod_port + " = " + fabric_mod_port + ";\n" buf += " tpg->" + fabric_mod_port + "_tpgt = tpgt;\n\n" buf += " ret = core_tpg_register(&" + fabric_mod_name + "_fabric_configfs->tf_ops, wwn,\n" buf += " &tpg->se_tpg, (void *)tpg,\n" buf += " TRANSPORT_TPG_TYPE_NORMAL);\n" buf += " if (ret < 0) {\n" buf += " kfree(tpg);\n" buf += " return NULL;\n" buf += " }\n" buf += " return &tpg->se_tpg;\n" buf += "}\n\n" buf += "static void " + fabric_mod_name + "_drop_tpg(struct se_portal_group *se_tpg)\n" buf += "{\n" buf += " struct " + fabric_mod_name + "_tpg *tpg = container_of(se_tpg,\n" buf += " struct " + fabric_mod_name + "_tpg, se_tpg);\n\n" buf += " core_tpg_deregister(se_tpg);\n" buf += " kfree(tpg);\n" buf += "}\n\n" buf += "static struct se_wwn *" + fabric_mod_name + "_make_" + fabric_mod_port + "(\n" buf += " struct target_fabric_configfs *tf,\n" buf += " struct config_group *group,\n" buf += " const char *name)\n" buf += "{\n" buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + " *" + fabric_mod_port + ";\n" if proto_ident == "FC" or proto_ident == "SAS": buf += " u64 wwpn = 0;\n\n" buf += " /* if (" + fabric_mod_name + "_parse_wwn(name, &wwpn, 1) < 0)\n" buf += " return ERR_PTR(-EINVAL); */\n\n" buf += " " + fabric_mod_port + " = kzalloc(sizeof(struct " + fabric_mod_name + "_" + fabric_mod_port + "), GFP_KERNEL);\n" buf += " if (!" + fabric_mod_port + ") {\n" buf += " printk(KERN_ERR \"Unable to allocate struct " + fabric_mod_name + "_" + fabric_mod_port + "\");\n" buf += " return ERR_PTR(-ENOMEM);\n" buf += " }\n" if proto_ident == "FC" or proto_ident == "SAS": buf += " " + fabric_mod_port + "->" + fabric_mod_port + "_wwpn = wwpn;\n" buf += " /* " + fabric_mod_name + "_format_wwn(&" + fabric_mod_port + "->" + fabric_mod_port + "_name[0], " + fabric_mod_name.upper() + "_NAMELEN, wwpn); */\n\n" buf += " return &" + fabric_mod_port + "->" + fabric_mod_port + "_wwn;\n" buf += "}\n\n" buf += "static void " + fabric_mod_name + "_drop_" + fabric_mod_port + "(struct se_wwn *wwn)\n" buf += "{\n" buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + " *" + fabric_mod_port + " = container_of(wwn,\n" buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + ", " + fabric_mod_port + "_wwn);\n" buf += " kfree(" + fabric_mod_port + ");\n" buf += "}\n\n" buf += "static ssize_t " + fabric_mod_name + "_wwn_show_attr_version(\n" buf += " struct target_fabric_configfs *tf,\n" buf += " char *page)\n" buf += "{\n" buf += " return sprintf(page, \"" + fabric_mod_name.upper() + " fabric module %s on %s/%s\"\n" buf += " \"on \"UTS_RELEASE\"\\n\", " + fabric_mod_name.upper() + "_VERSION, utsname()->sysname,\n" buf += " utsname()->machine);\n" buf += "}\n\n" buf += "TF_WWN_ATTR_RO(" + fabric_mod_name + ", version);\n\n" buf += "static struct configfs_attribute *" + fabric_mod_name + "_wwn_attrs[] = {\n" buf += " &" + fabric_mod_name + "_wwn_version.attr,\n" buf += " NULL,\n" buf += "};\n\n" buf += "static struct target_core_fabric_ops " + fabric_mod_name + "_ops = {\n" buf += " .get_fabric_name = " + fabric_mod_name + "_get_fabric_name,\n" buf += " .get_fabric_proto_ident = " + fabric_mod_name + "_get_fabric_proto_ident,\n" buf += " .tpg_get_wwn = " + fabric_mod_name + "_get_fabric_wwn,\n" buf += " .tpg_get_tag = " + fabric_mod_name + "_get_tag,\n" buf += " .tpg_get_default_depth = " + fabric_mod_name + "_get_default_depth,\n" buf += " .tpg_get_pr_transport_id = " + fabric_mod_name + "_get_pr_transport_id,\n" buf += " .tpg_get_pr_transport_id_len = " + fabric_mod_name + "_get_pr_transport_id_len,\n" buf += " .tpg_parse_pr_out_transport_id = " + fabric_mod_name + "_parse_pr_out_transport_id,\n" buf += " .tpg_check_demo_mode = " + fabric_mod_name + "_check_false,\n" buf += " .tpg_check_demo_mode_cache = " + fabric_mod_name + "_check_true,\n" buf += " .tpg_check_demo_mode_write_protect = " + fabric_mod_name + "_check_true,\n" buf += " .tpg_check_prod_mode_write_protect = " + fabric_mod_name + "_check_false,\n" buf += " .tpg_alloc_fabric_acl = " + fabric_mod_name + "_alloc_fabric_acl,\n" buf += " .tpg_release_fabric_acl = " + fabric_mod_name + "_release_fabric_acl,\n" buf += " .tpg_get_inst_index = " + fabric_mod_name + "_tpg_get_inst_index,\n" buf += " .release_cmd = " + fabric_mod_name + "_release_cmd,\n" buf += " .shutdown_session = " + fabric_mod_name + "_shutdown_session,\n" buf += " .close_session = " + fabric_mod_name + "_close_session,\n" buf += " .stop_session = " + fabric_mod_name + "_stop_session,\n" buf += " .fall_back_to_erl0 = " + fabric_mod_name + "_reset_nexus,\n" buf += " .sess_logged_in = " + fabric_mod_name + "_sess_logged_in,\n" buf += " .sess_get_index = " + fabric_mod_name + "_sess_get_index,\n" buf += " .sess_get_initiator_sid = NULL,\n" buf += " .write_pending = " + fabric_mod_name + "_write_pending,\n" buf += " .write_pending_status = " + fabric_mod_name + "_write_pending_status,\n" buf += " .set_default_node_attributes = " + fabric_mod_name + "_set_default_node_attrs,\n" buf += " .get_task_tag = " + fabric_mod_name + "_get_task_tag,\n" buf += " .get_cmd_state = " + fabric_mod_name + "_get_cmd_state,\n" buf += " .queue_data_in = " + fabric_mod_name + "_queue_data_in,\n" buf += " .queue_status = " + fabric_mod_name + "_queue_status,\n" buf += " .queue_tm_rsp = " + fabric_mod_name + "_queue_tm_rsp,\n" buf += " .get_fabric_sense_len = " + fabric_mod_name + "_get_fabric_sense_len,\n" buf += " .set_fabric_sense_len = " + fabric_mod_name + "_set_fabric_sense_len,\n" buf += " .is_state_remove = " + fabric_mod_name + "_is_state_remove,\n" buf += " /*\n" buf += " * Setup function pointers for generic logic in target_core_fabric_configfs.c\n" buf += " */\n" buf += " .fabric_make_wwn = " + fabric_mod_name + "_make_" + fabric_mod_port + ",\n" buf += " .fabric_drop_wwn = " + fabric_mod_name + "_drop_" + fabric_mod_port + ",\n" buf += " .fabric_make_tpg = " + fabric_mod_name + "_make_tpg,\n" buf += " .fabric_drop_tpg = " + fabric_mod_name + "_drop_tpg,\n" buf += " .fabric_post_link = NULL,\n" buf += " .fabric_pre_unlink = NULL,\n" buf += " .fabric_make_np = NULL,\n" buf += " .fabric_drop_np = NULL,\n" buf += " .fabric_make_nodeacl = " + fabric_mod_name + "_make_nodeacl,\n" buf += " .fabric_drop_nodeacl = " + fabric_mod_name + "_drop_nodeacl,\n" buf += "};\n\n" buf += "static int " + fabric_mod_name + "_register_configfs(void)\n" buf += "{\n" buf += " struct target_fabric_configfs *fabric;\n" buf += " int ret;\n\n" buf += " printk(KERN_INFO \"" + fabric_mod_name.upper() + " fabric module %s on %s/%s\"\n" buf += " \" on \"UTS_RELEASE\"\\n\"," + fabric_mod_name.upper() + "_VERSION, utsname()->sysname,\n" buf += " utsname()->machine);\n" buf += " /*\n" buf += " * Register the top level struct config_item_type with TCM core\n" buf += " */\n" buf += " fabric = target_fabric_configfs_init(THIS_MODULE, \"" + fabric_mod_name[4:] + "\");\n" buf += " if (IS_ERR(fabric)) {\n" buf += " printk(KERN_ERR \"target_fabric_configfs_init() failed\\n\");\n" buf += " return PTR_ERR(fabric);\n" buf += " }\n" buf += " /*\n" buf += " * Setup fabric->tf_ops from our local " + fabric_mod_name + "_ops\n" buf += " */\n" buf += " fabric->tf_ops = " + fabric_mod_name + "_ops;\n" buf += " /*\n" buf += " * Setup default attribute lists for various fabric->tf_cit_tmpl\n" buf += " */\n" buf += " TF_CIT_TMPL(fabric)->tfc_wwn_cit.ct_attrs = " + fabric_mod_name + "_wwn_attrs;\n" buf += " TF_CIT_TMPL(fabric)->tfc_tpg_base_cit.ct_attrs = NULL;\n" buf += " TF_CIT_TMPL(fabric)->tfc_tpg_attrib_cit.ct_attrs = NULL;\n" buf += " TF_CIT_TMPL(fabric)->tfc_tpg_param_cit.ct_attrs = NULL;\n" buf += " TF_CIT_TMPL(fabric)->tfc_tpg_np_base_cit.ct_attrs = NULL;\n" buf += " TF_CIT_TMPL(fabric)->tfc_tpg_nacl_base_cit.ct_attrs = NULL;\n" buf += " TF_CIT_TMPL(fabric)->tfc_tpg_nacl_attrib_cit.ct_attrs = NULL;\n" buf += " TF_CIT_TMPL(fabric)->tfc_tpg_nacl_auth_cit.ct_attrs = NULL;\n" buf += " TF_CIT_TMPL(fabric)->tfc_tpg_nacl_param_cit.ct_attrs = NULL;\n" buf += " /*\n" buf += " * Register the fabric for use within TCM\n" buf += " */\n" buf += " ret = target_fabric_configfs_register(fabric);\n" buf += " if (ret < 0) {\n" buf += " printk(KERN_ERR \"target_fabric_configfs_register() failed\"\n" buf += " \" for " + fabric_mod_name.upper() + "\\n\");\n" buf += " return ret;\n" buf += " }\n" buf += " /*\n" buf += " * Setup our local pointer to *fabric\n" buf += " */\n" buf += " " + fabric_mod_name + "_fabric_configfs = fabric;\n" buf += " printk(KERN_INFO \"" + fabric_mod_name.upper() + "[0] - Set fabric -> " + fabric_mod_name + "_fabric_configfs\\n\");\n" buf += " return 0;\n" buf += "};\n\n" buf += "static void __exit " + fabric_mod_name + "_deregister_configfs(void)\n" buf += "{\n" buf += " if (!" + fabric_mod_name + "_fabric_configfs)\n" buf += " return;\n\n" buf += " target_fabric_configfs_deregister(" + fabric_mod_name + "_fabric_configfs);\n" buf += " " + fabric_mod_name + "_fabric_configfs = NULL;\n" buf += " printk(KERN_INFO \"" + fabric_mod_name.upper() + "[0] - Cleared " + fabric_mod_name + "_fabric_configfs\\n\");\n" buf += "};\n\n" buf += "static int __init " + fabric_mod_name + "_init(void)\n" buf += "{\n" buf += " int ret;\n\n" buf += " ret = " + fabric_mod_name + "_register_configfs();\n" buf += " if (ret < 0)\n" buf += " return ret;\n\n" buf += " return 0;\n" buf += "};\n\n" buf += "static void __exit " + fabric_mod_name + "_exit(void)\n" buf += "{\n" buf += " " + fabric_mod_name + "_deregister_configfs();\n" buf += "};\n\n" buf += "MODULE_DESCRIPTION(\"" + fabric_mod_name.upper() + " series fabric driver\");\n" buf += "MODULE_LICENSE(\"GPL\");\n" buf += "module_init(" + fabric_mod_name + "_init);\n" buf += "module_exit(" + fabric_mod_name + "_exit);\n" ret = p.write(buf) if ret: tcm_mod_err("Unable to write f: " + f) p.close() return def tcm_mod_scan_fabric_ops(tcm_dir): fabric_ops_api = tcm_dir + "include/target/target_core_fabric.h" print "Using tcm_mod_scan_fabric_ops: " + fabric_ops_api process_fo = 0; p = open(fabric_ops_api, 'r') line = p.readline() while line: if process_fo == 0 and re.search('struct target_core_fabric_ops {', line): line = p.readline() continue if process_fo == 0: process_fo = 1; line = p.readline() # Search for function pointer if not re.search('\(\*', line): continue fabric_ops.append(line.rstrip()) continue line = p.readline() # Search for function pointer if not re.search('\(\*', line): continue fabric_ops.append(line.rstrip()) p.close() return def tcm_mod_dump_fabric_ops(proto_ident, fabric_mod_dir_var, fabric_mod_name): buf = "" bufi = "" f = fabric_mod_dir_var + "/" + fabric_mod_name + "_fabric.c" print "Writing file: " + f p = open(f, 'w') if not p: tcm_mod_err("Unable to open file: " + f) fi = fabric_mod_dir_var + "/" + fabric_mod_name + "_fabric.h" print "Writing file: " + fi pi = open(fi, 'w') if not pi: tcm_mod_err("Unable to open file: " + fi) buf = "#include <linux/slab.h>\n" buf += "#include <linux/kthread.h>\n" buf += "#include <linux/types.h>\n" buf += "#include <linux/list.h>\n" buf += "#include <linux/types.h>\n" buf += "#include <linux/string.h>\n" buf += "#include <linux/ctype.h>\n" buf += "#include <asm/unaligned.h>\n" buf += "#include <scsi/scsi.h>\n" buf += "#include <scsi/scsi_host.h>\n" buf += "#include <scsi/scsi_device.h>\n" buf += "#include <scsi/scsi_cmnd.h>\n" buf += "#include <scsi/libfc.h>\n\n" buf += "#include <target/target_core_base.h>\n" buf += "#include <target/target_core_fabric.h>\n" buf += "#include <target/target_core_configfs.h>\n\n" buf += "#include \"" + fabric_mod_name + "_base.h\"\n" buf += "#include \"" + fabric_mod_name + "_fabric.h\"\n\n" buf += "int " + fabric_mod_name + "_check_true(struct se_portal_group *se_tpg)\n" buf += "{\n" buf += " return 1;\n" buf += "}\n\n" bufi += "int " + fabric_mod_name + "_check_true(struct se_portal_group *);\n" buf += "int " + fabric_mod_name + "_check_false(struct se_portal_group *se_tpg)\n" buf += "{\n" buf += " return 0;\n" buf += "}\n\n" bufi += "int " + fabric_mod_name + "_check_false(struct se_portal_group *);\n" total_fabric_ops = len(fabric_ops) i = 0 while i < total_fabric_ops: fo = fabric_ops[i] i += 1 # print "fabric_ops: " + fo if re.search('get_fabric_name', fo): buf += "char *" + fabric_mod_name + "_get_fabric_name(void)\n" buf += "{\n" buf += " return \"" + fabric_mod_name[4:] + "\";\n" buf += "}\n\n" bufi += "char *" + fabric_mod_name + "_get_fabric_name(void);\n" continue if re.search('get_fabric_proto_ident', fo): buf += "u8 " + fabric_mod_name + "_get_fabric_proto_ident(struct se_portal_group *se_tpg)\n" buf += "{\n" buf += " struct " + fabric_mod_name + "_tpg *tpg = container_of(se_tpg,\n" buf += " struct " + fabric_mod_name + "_tpg, se_tpg);\n" buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + " *" + fabric_mod_port + " = tpg->" + fabric_mod_port + ";\n" buf += " u8 proto_id;\n\n" buf += " switch (" + fabric_mod_port + "->" + fabric_mod_port + "_proto_id) {\n" if proto_ident == "FC": buf += " case SCSI_PROTOCOL_FCP:\n" buf += " default:\n" buf += " proto_id = fc_get_fabric_proto_ident(se_tpg);\n" buf += " break;\n" elif proto_ident == "SAS": buf += " case SCSI_PROTOCOL_SAS:\n" buf += " default:\n" buf += " proto_id = sas_get_fabric_proto_ident(se_tpg);\n" buf += " break;\n" elif proto_ident == "iSCSI": buf += " case SCSI_PROTOCOL_ISCSI:\n" buf += " default:\n" buf += " proto_id = iscsi_get_fabric_proto_ident(se_tpg);\n" buf += " break;\n" buf += " }\n\n" buf += " return proto_id;\n" buf += "}\n\n" bufi += "u8 " + fabric_mod_name + "_get_fabric_proto_ident(struct se_portal_group *);\n" if re.search('get_wwn', fo): buf += "char *" + fabric_mod_name + "_get_fabric_wwn(struct se_portal_group *se_tpg)\n" buf += "{\n" buf += " struct " + fabric_mod_name + "_tpg *tpg = container_of(se_tpg,\n" buf += " struct " + fabric_mod_name + "_tpg, se_tpg);\n" buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + " *" + fabric_mod_port + " = tpg->" + fabric_mod_port + ";\n\n" buf += " return &" + fabric_mod_port + "->" + fabric_mod_port + "_name[0];\n" buf += "}\n\n" bufi += "char *" + fabric_mod_name + "_get_fabric_wwn(struct se_portal_group *);\n" if re.search('get_tag', fo): buf += "u16 " + fabric_mod_name + "_get_tag(struct se_portal_group *se_tpg)\n" buf += "{\n" buf += " struct " + fabric_mod_name + "_tpg *tpg = container_of(se_tpg,\n" buf += " struct " + fabric_mod_name + "_tpg, se_tpg);\n" buf += " return tpg->" + fabric_mod_port + "_tpgt;\n" buf += "}\n\n" bufi += "u16 " + fabric_mod_name + "_get_tag(struct se_portal_group *);\n" if re.search('get_default_depth', fo): buf += "u32 " + fabric_mod_name + "_get_default_depth(struct se_portal_group *se_tpg)\n" buf += "{\n" buf += " return 1;\n" buf += "}\n\n" bufi += "u32 " + fabric_mod_name + "_get_default_depth(struct se_portal_group *);\n" if re.search('get_pr_transport_id\)\(', fo): buf += "u32 " + fabric_mod_name + "_get_pr_transport_id(\n" buf += " struct se_portal_group *se_tpg,\n" buf += " struct se_node_acl *se_nacl,\n" buf += " struct t10_pr_registration *pr_reg,\n" buf += " int *format_code,\n" buf += " unsigned char *buf)\n" buf += "{\n" buf += " struct " + fabric_mod_name + "_tpg *tpg = container_of(se_tpg,\n" buf += " struct " + fabric_mod_name + "_tpg, se_tpg);\n" buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + " *" + fabric_mod_port + " = tpg->" + fabric_mod_port + ";\n" buf += " int ret = 0;\n\n" buf += " switch (" + fabric_mod_port + "->" + fabric_mod_port + "_proto_id) {\n" if proto_ident == "FC": buf += " case SCSI_PROTOCOL_FCP:\n" buf += " default:\n" buf += " ret = fc_get_pr_transport_id(se_tpg, se_nacl, pr_reg,\n" buf += " format_code, buf);\n" buf += " break;\n" elif proto_ident == "SAS": buf += " case SCSI_PROTOCOL_SAS:\n" buf += " default:\n" buf += " ret = sas_get_pr_transport_id(se_tpg, se_nacl, pr_reg,\n" buf += " format_code, buf);\n" buf += " break;\n" elif proto_ident == "iSCSI": buf += " case SCSI_PROTOCOL_ISCSI:\n" buf += " default:\n" buf += " ret = iscsi_get_pr_transport_id(se_tpg, se_nacl, pr_reg,\n" buf += " format_code, buf);\n" buf += " break;\n" buf += " }\n\n" buf += " return ret;\n" buf += "}\n\n" bufi += "u32 " + fabric_mod_name + "_get_pr_transport_id(struct se_portal_group *,\n" bufi += " struct se_node_acl *, struct t10_pr_registration *,\n" bufi += " int *, unsigned char *);\n" if re.search('get_pr_transport_id_len\)\(', fo): buf += "u32 " + fabric_mod_name + "_get_pr_transport_id_len(\n" buf += " struct se_portal_group *se_tpg,\n" buf += " struct se_node_acl *se_nacl,\n" buf += " struct t10_pr_registration *pr_reg,\n" buf += " int *format_code)\n" buf += "{\n" buf += " struct " + fabric_mod_name + "_tpg *tpg = container_of(se_tpg,\n" buf += " struct " + fabric_mod_name + "_tpg, se_tpg);\n" buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + " *" + fabric_mod_port + " = tpg->" + fabric_mod_port + ";\n" buf += " int ret = 0;\n\n" buf += " switch (" + fabric_mod_port + "->" + fabric_mod_port + "_proto_id) {\n" if proto_ident == "FC": buf += " case SCSI_PROTOCOL_FCP:\n" buf += " default:\n" buf += " ret = fc_get_pr_transport_id_len(se_tpg, se_nacl, pr_reg,\n" buf += " format_code);\n" buf += " break;\n" elif proto_ident == "SAS": buf += " case SCSI_PROTOCOL_SAS:\n" buf += " default:\n" buf += " ret = sas_get_pr_transport_id_len(se_tpg, se_nacl, pr_reg,\n" buf += " format_code);\n" buf += " break;\n" elif proto_ident == "iSCSI": buf += " case SCSI_PROTOCOL_ISCSI:\n" buf += " default:\n" buf += " ret = iscsi_get_pr_transport_id_len(se_tpg, se_nacl, pr_reg,\n" buf += " format_code);\n" buf += " break;\n" buf += " }\n\n" buf += " return ret;\n" buf += "}\n\n" bufi += "u32 " + fabric_mod_name + "_get_pr_transport_id_len(struct se_portal_group *,\n" bufi += " struct se_node_acl *, struct t10_pr_registration *,\n" bufi += " int *);\n" if re.search('parse_pr_out_transport_id\)\(', fo): buf += "char *" + fabric_mod_name + "_parse_pr_out_transport_id(\n" buf += " struct se_portal_group *se_tpg,\n" buf += " const char *buf,\n" buf += " u32 *out_tid_len,\n" buf += " char **port_nexus_ptr)\n" buf += "{\n" buf += " struct " + fabric_mod_name + "_tpg *tpg = container_of(se_tpg,\n" buf += " struct " + fabric_mod_name + "_tpg, se_tpg);\n" buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + " *" + fabric_mod_port + " = tpg->" + fabric_mod_port + ";\n" buf += " char *tid = NULL;\n\n" buf += " switch (" + fabric_mod_port + "->" + fabric_mod_port + "_proto_id) {\n" if proto_ident == "FC": buf += " case SCSI_PROTOCOL_FCP:\n" buf += " default:\n" buf += " tid = fc_parse_pr_out_transport_id(se_tpg, buf, out_tid_len,\n" buf += " port_nexus_ptr);\n" elif proto_ident == "SAS": buf += " case SCSI_PROTOCOL_SAS:\n" buf += " default:\n" buf += " tid = sas_parse_pr_out_transport_id(se_tpg, buf, out_tid_len,\n" buf += " port_nexus_ptr);\n" elif proto_ident == "iSCSI": buf += " case SCSI_PROTOCOL_ISCSI:\n" buf += " default:\n" buf += " tid = iscsi_parse_pr_out_transport_id(se_tpg, buf, out_tid_len,\n" buf += " port_nexus_ptr);\n" buf += " }\n\n" buf += " return tid;\n" buf += "}\n\n" bufi += "char *" + fabric_mod_name + "_parse_pr_out_transport_id(struct se_portal_group *,\n" bufi += " const char *, u32 *, char **);\n" if re.search('alloc_fabric_acl\)\(', fo): buf += "struct se_node_acl *" + fabric_mod_name + "_alloc_fabric_acl(struct se_portal_group *se_tpg)\n" buf += "{\n" buf += " struct " + fabric_mod_name + "_nacl *nacl;\n\n" buf += " nacl = kzalloc(sizeof(struct " + fabric_mod_name + "_nacl), GFP_KERNEL);\n" buf += " if (!nacl) {\n" buf += " printk(KERN_ERR \"Unable to allocate struct " + fabric_mod_name + "_nacl\\n\");\n" buf += " return NULL;\n" buf += " }\n\n" buf += " return &nacl->se_node_acl;\n" buf += "}\n\n" bufi += "struct se_node_acl *" + fabric_mod_name + "_alloc_fabric_acl(struct se_portal_group *);\n" if re.search('release_fabric_acl\)\(', fo): buf += "void " + fabric_mod_name + "_release_fabric_acl(\n" buf += " struct se_portal_group *se_tpg,\n" buf += " struct se_node_acl *se_nacl)\n" buf += "{\n" buf += " struct " + fabric_mod_name + "_nacl *nacl = container_of(se_nacl,\n" buf += " struct " + fabric_mod_name + "_nacl, se_node_acl);\n" buf += " kfree(nacl);\n" buf += "}\n\n" bufi += "void " + fabric_mod_name + "_release_fabric_acl(struct se_portal_group *,\n" bufi += " struct se_node_acl *);\n" if re.search('tpg_get_inst_index\)\(', fo): buf += "u32 " + fabric_mod_name + "_tpg_get_inst_index(struct se_portal_group *se_tpg)\n" buf += "{\n" buf += " return 1;\n" buf += "}\n\n" bufi += "u32 " + fabric_mod_name + "_tpg_get_inst_index(struct se_portal_group *);\n" if re.search('\*release_cmd\)\(', fo): buf += "void " + fabric_mod_name + "_release_cmd(struct se_cmd *se_cmd)\n" buf += "{\n" buf += " return;\n" buf += "}\n\n" bufi += "void " + fabric_mod_name + "_release_cmd(struct se_cmd *);\n" if re.search('shutdown_session\)\(', fo): buf += "int " + fabric_mod_name + "_shutdown_session(struct se_session *se_sess)\n" buf += "{\n" buf += " return 0;\n" buf += "}\n\n" bufi += "int " + fabric_mod_name + "_shutdown_session(struct se_session *);\n" if re.search('close_session\)\(', fo): buf += "void " + fabric_mod_name + "_close_session(struct se_session *se_sess)\n" buf += "{\n" buf += " return;\n" buf += "}\n\n" bufi += "void " + fabric_mod_name + "_close_session(struct se_session *);\n" if re.search('stop_session\)\(', fo): buf += "void " + fabric_mod_name + "_stop_session(struct se_session *se_sess, int sess_sleep , int conn_sleep)\n" buf += "{\n" buf += " return;\n" buf += "}\n\n" bufi += "void " + fabric_mod_name + "_stop_session(struct se_session *, int, int);\n" if re.search('fall_back_to_erl0\)\(', fo): buf += "void " + fabric_mod_name + "_reset_nexus(struct se_session *se_sess)\n" buf += "{\n" buf += " return;\n" buf += "}\n\n" bufi += "void " + fabric_mod_name + "_reset_nexus(struct se_session *);\n" if re.search('sess_logged_in\)\(', fo): buf += "int " + fabric_mod_name + "_sess_logged_in(struct se_session *se_sess)\n" buf += "{\n" buf += " return 0;\n" buf += "}\n\n" bufi += "int " + fabric_mod_name + "_sess_logged_in(struct se_session *);\n" if re.search('sess_get_index\)\(', fo): buf += "u32 " + fabric_mod_name + "_sess_get_index(struct se_session *se_sess)\n" buf += "{\n" buf += " return 0;\n" buf += "}\n\n" bufi += "u32 " + fabric_mod_name + "_sess_get_index(struct se_session *);\n" if re.search('write_pending\)\(', fo): buf += "int " + fabric_mod_name + "_write_pending(struct se_cmd *se_cmd)\n" buf += "{\n" buf += " return 0;\n" buf += "}\n\n" bufi += "int " + fabric_mod_name + "_write_pending(struct se_cmd *);\n" if re.search('write_pending_status\)\(', fo): buf += "int " + fabric_mod_name + "_write_pending_status(struct se_cmd *se_cmd)\n" buf += "{\n" buf += " return 0;\n" buf += "}\n\n" bufi += "int " + fabric_mod_name + "_write_pending_status(struct se_cmd *);\n" if re.search('set_default_node_attributes\)\(', fo): buf += "void " + fabric_mod_name + "_set_default_node_attrs(struct se_node_acl *nacl)\n" buf += "{\n" buf += " return;\n" buf += "}\n\n" bufi += "void " + fabric_mod_name + "_set_default_node_attrs(struct se_node_acl *);\n" if re.search('get_task_tag\)\(', fo): buf += "u32 " + fabric_mod_name + "_get_task_tag(struct se_cmd *se_cmd)\n" buf += "{\n" buf += " return 0;\n" buf += "}\n\n" bufi += "u32 " + fabric_mod_name + "_get_task_tag(struct se_cmd *);\n" if re.search('get_cmd_state\)\(', fo): buf += "int " + fabric_mod_name + "_get_cmd_state(struct se_cmd *se_cmd)\n" buf += "{\n" buf += " return 0;\n" buf += "}\n\n" bufi += "int " + fabric_mod_name + "_get_cmd_state(struct se_cmd *);\n" if re.search('queue_data_in\)\(', fo): buf += "int " + fabric_mod_name + "_queue_data_in(struct se_cmd *se_cmd)\n" buf += "{\n" buf += " return 0;\n" buf += "}\n\n" bufi += "int " + fabric_mod_name + "_queue_data_in(struct se_cmd *);\n" if re.search('queue_status\)\(', fo): buf += "int " + fabric_mod_name + "_queue_status(struct se_cmd *se_cmd)\n" buf += "{\n" buf += " return 0;\n" buf += "}\n\n" bufi += "int " + fabric_mod_name + "_queue_status(struct se_cmd *);\n" if re.search('queue_tm_rsp\)\(', fo): buf += "int " + fabric_mod_name + "_queue_tm_rsp(struct se_cmd *se_cmd)\n" buf += "{\n" buf += " return 0;\n" buf += "}\n\n" bufi += "int " + fabric_mod_name + "_queue_tm_rsp(struct se_cmd *);\n" if re.search('get_fabric_sense_len\)\(', fo): buf += "u16 " + fabric_mod_name + "_get_fabric_sense_len(void)\n" buf += "{\n" buf += " return 0;\n" buf += "}\n\n" bufi += "u16 " + fabric_mod_name + "_get_fabric_sense_len(void);\n" if re.search('set_fabric_sense_len\)\(', fo): buf += "u16 " + fabric_mod_name + "_set_fabric_sense_len(struct se_cmd *se_cmd, u32 sense_length)\n" buf += "{\n" buf += " return 0;\n" buf += "}\n\n" bufi += "u16 " + fabric_mod_name + "_set_fabric_sense_len(struct se_cmd *, u32);\n" if re.search('is_state_remove\)\(', fo): buf += "int " + fabric_mod_name + "_is_state_remove(struct se_cmd *se_cmd)\n" buf += "{\n" buf += " return 0;\n" buf += "}\n\n" bufi += "int " + fabric_mod_name + "_is_state_remove(struct se_cmd *);\n" ret = p.write(buf) if ret: tcm_mod_err("Unable to write f: " + f) p.close() ret = pi.write(bufi) if ret: tcm_mod_err("Unable to write fi: " + fi) pi.close() return def tcm_mod_build_kbuild(fabric_mod_dir_var, fabric_mod_name): buf = "" f = fabric_mod_dir_var + "/Makefile" print "Writing file: " + f p = open(f, 'w') if not p: tcm_mod_err("Unable to open file: " + f) buf += fabric_mod_name + "-objs := " + fabric_mod_name + "_fabric.o \\\n" buf += " " + fabric_mod_name + "_configfs.o\n" buf += "obj-$(CONFIG_" + fabric_mod_name.upper() + ") += " + fabric_mod_name + ".o\n" ret = p.write(buf) if ret: tcm_mod_err("Unable to write f: " + f) p.close() return def tcm_mod_build_kconfig(fabric_mod_dir_var, fabric_mod_name): buf = "" f = fabric_mod_dir_var + "/Kconfig" print "Writing file: " + f p = open(f, 'w') if not p: tcm_mod_err("Unable to open file: " + f) buf = "config " + fabric_mod_name.upper() + "\n" buf += " tristate \"" + fabric_mod_name.upper() + " fabric module\"\n" buf += " depends on TARGET_CORE && CONFIGFS_FS\n" buf += " default n\n" buf += " ---help---\n" buf += " Say Y here to enable the " + fabric_mod_name.upper() + " fabric module\n" ret = p.write(buf) if ret: tcm_mod_err("Unable to write f: " + f) p.close() return def tcm_mod_add_kbuild(tcm_dir, fabric_mod_name): buf = "obj-$(CONFIG_" + fabric_mod_name.upper() + ") += " + fabric_mod_name.lower() + "/\n" kbuild = tcm_dir + "/drivers/target/Makefile" f = open(kbuild, 'a') f.write(buf) f.close() return def tcm_mod_add_kconfig(tcm_dir, fabric_mod_name): buf = "source \"drivers/target/" + fabric_mod_name.lower() + "/Kconfig\"\n" kconfig = tcm_dir + "/drivers/target/Kconfig" f = open(kconfig, 'a') f.write(buf) f.close() return def main(modname, proto_ident): # proto_ident = "FC" # proto_ident = "SAS" # proto_ident = "iSCSI" tcm_dir = os.getcwd(); tcm_dir += "/../../" print "tcm_dir: " + tcm_dir fabric_mod_name = modname fabric_mod_dir = tcm_dir + "drivers/target/" + fabric_mod_name print "Set fabric_mod_name: " + fabric_mod_name print "Set fabric_mod_dir: " + fabric_mod_dir print "Using proto_ident: " + proto_ident if proto_ident != "FC" and proto_ident != "SAS" and proto_ident != "iSCSI": print "Unsupported proto_ident: " + proto_ident sys.exit(1) ret = tcm_mod_create_module_subdir(fabric_mod_dir) if ret: print "tcm_mod_create_module_subdir() failed because module already exists!" sys.exit(1) tcm_mod_build_base_includes(proto_ident, fabric_mod_dir, fabric_mod_name) tcm_mod_scan_fabric_ops(tcm_dir) tcm_mod_dump_fabric_ops(proto_ident, fabric_mod_dir, fabric_mod_name) tcm_mod_build_configfs(proto_ident, fabric_mod_dir, fabric_mod_name) tcm_mod_build_kbuild(fabric_mod_dir, fabric_mod_name) tcm_mod_build_kconfig(fabric_mod_dir, fabric_mod_name) input = raw_input("Would you like to add " + fabric_mod_name + "to drivers/target/Makefile..? [yes,no]: ") if input == "yes" or input == "y": tcm_mod_add_kbuild(tcm_dir, fabric_mod_name) input = raw_input("Would you like to add " + fabric_mod_name + "to drivers/target/Kconfig..? [yes,no]: ") if input == "yes" or input == "y": tcm_mod_add_kconfig(tcm_dir, fabric_mod_name) return parser = optparse.OptionParser() parser.add_option('-m', '--modulename', help='Module name', dest='modname', action='store', nargs=1, type='string') parser.add_option('-p', '--protoident', help='Protocol Ident', dest='protoident', action='store', nargs=1, type='string') (opts, args) = parser.parse_args() mandatories = ['modname', 'protoident'] for m in mandatories: if not opts.__dict__[m]: print "mandatory option is missing\n" parser.print_help() exit(-1) if __name__ == "__main__": main(str(opts.modname), opts.protoident)
ihsanudin/odoo
refs/heads/8.0
addons/auth_crypt/auth_crypt.py
179
import logging from passlib.context import CryptContext import openerp from openerp.osv import fields, osv from openerp.addons.base.res import res_users res_users.USER_PRIVATE_FIELDS.append('password_crypt') _logger = logging.getLogger(__name__) default_crypt_context = CryptContext( # kdf which can be verified by the context. The default encryption kdf is # the first of the list ['pbkdf2_sha512', 'md5_crypt'], # deprecated algorithms are still verified as usual, but ``needs_update`` # will indicate that the stored hash should be replaced by a more recent # algorithm. Passlib 1.6 supports an `auto` value which deprecates any # algorithm but the default, but Debian only provides 1.5 so... deprecated=['md5_crypt'], ) class res_users(osv.osv): _inherit = "res.users" def init(self, cr): _logger.info("Hashing passwords, may be slow for databases with many users...") cr.execute("SELECT id, password FROM res_users" " WHERE password IS NOT NULL" " AND password != ''") for uid, pwd in cr.fetchall(): self._set_password(cr, openerp.SUPERUSER_ID, uid, pwd) def set_pw(self, cr, uid, id, name, value, args, context): if value: self._set_password(cr, uid, id, value, context=context) self.invalidate_cache(cr, uid, context=context) def get_pw( self, cr, uid, ids, name, args, context ): cr.execute('select id, password from res_users where id in %s', (tuple(map(int, ids)),)) return dict(cr.fetchall()) _columns = { 'password': fields.function(get_pw, fnct_inv=set_pw, type='char', string='Password', invisible=True, store=True), 'password_crypt': fields.char(string='Encrypted Password', invisible=True, copy=False), } def check_credentials(self, cr, uid, password): # convert to base_crypt if needed cr.execute('SELECT password, password_crypt FROM res_users WHERE id=%s AND active', (uid,)) encrypted = None if cr.rowcount: stored, encrypted = cr.fetchone() if stored and not encrypted: self._set_password(cr, uid, uid, stored) self.invalidate_cache(cr, uid) try: return super(res_users, self).check_credentials(cr, uid, password) except openerp.exceptions.AccessDenied: if encrypted: valid_pass, replacement = self._crypt_context(cr, uid, uid)\ .verify_and_update(password, encrypted) if replacement is not None: self._set_encrypted_password(cr, uid, uid, replacement) if valid_pass: return raise def _set_password(self, cr, uid, id, password, context=None): """ Encrypts then stores the provided plaintext password for the user ``id`` """ encrypted = self._crypt_context(cr, uid, id, context=context).encrypt(password) self._set_encrypted_password(cr, uid, id, encrypted, context=context) def _set_encrypted_password(self, cr, uid, id, encrypted, context=None): """ Store the provided encrypted password to the database, and clears any plaintext password :param uid: id of the current user :param id: id of the user on which the password should be set """ cr.execute( "UPDATE res_users SET password='', password_crypt=%s WHERE id=%s", (encrypted, id)) def _crypt_context(self, cr, uid, id, context=None): """ Passlib CryptContext instance used to encrypt and verify passwords. Can be overridden if technical, legal or political matters require different kdfs than the provided default. Requires a CryptContext as deprecation and upgrade notices are used internally """ return default_crypt_context # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
jicruz/heroku-bot
refs/heads/master
lib/youtube_dl/extractor/curiositystream.py
32
# coding: utf-8 from __future__ import unicode_literals import re from .common import InfoExtractor from ..utils import ( int_or_none, urlencode_postdata, compat_str, ExtractorError, ) class CuriosityStreamBaseIE(InfoExtractor): _NETRC_MACHINE = 'curiositystream' _auth_token = None _API_BASE_URL = 'https://api.curiositystream.com/v1/' def _handle_errors(self, result): error = result.get('error', {}).get('message') if error: if isinstance(error, dict): error = ', '.join(error.values()) raise ExtractorError( '%s said: %s' % (self.IE_NAME, error), expected=True) def _call_api(self, path, video_id): headers = {} if self._auth_token: headers['X-Auth-Token'] = self._auth_token result = self._download_json( self._API_BASE_URL + path, video_id, headers=headers) self._handle_errors(result) return result['data'] def _real_initialize(self): (email, password) = self._get_login_info() if email is None: return result = self._download_json( self._API_BASE_URL + 'login', None, data=urlencode_postdata({ 'email': email, 'password': password, })) self._handle_errors(result) self._auth_token = result['message']['auth_token'] def _extract_media_info(self, media): video_id = compat_str(media['id']) title = media['title'] formats = [] for encoding in media.get('encodings', []): m3u8_url = encoding.get('master_playlist_url') if m3u8_url: formats.extend(self._extract_m3u8_formats( m3u8_url, video_id, 'mp4', 'm3u8_native', m3u8_id='hls', fatal=False)) encoding_url = encoding.get('url') file_url = encoding.get('file_url') if not encoding_url and not file_url: continue f = { 'width': int_or_none(encoding.get('width')), 'height': int_or_none(encoding.get('height')), 'vbr': int_or_none(encoding.get('video_bitrate')), 'abr': int_or_none(encoding.get('audio_bitrate')), 'filesize': int_or_none(encoding.get('size_in_bytes')), 'vcodec': encoding.get('video_codec'), 'acodec': encoding.get('audio_codec'), 'container': encoding.get('container_type'), } for f_url in (encoding_url, file_url): if not f_url: continue fmt = f.copy() rtmp = re.search(r'^(?P<url>rtmpe?://(?P<host>[^/]+)/(?P<app>.+))/(?P<playpath>mp[34]:.+)$', f_url) if rtmp: fmt.update({ 'url': rtmp.group('url'), 'play_path': rtmp.group('playpath'), 'app': rtmp.group('app'), 'ext': 'flv', 'format_id': 'rtmp', }) else: fmt.update({ 'url': f_url, 'format_id': 'http', }) formats.append(fmt) self._sort_formats(formats) subtitles = {} for closed_caption in media.get('closed_captions', []): sub_url = closed_caption.get('file') if not sub_url: continue lang = closed_caption.get('code') or closed_caption.get('language') or 'en' subtitles.setdefault(lang, []).append({ 'url': sub_url, }) return { 'id': video_id, 'formats': formats, 'title': title, 'description': media.get('description'), 'thumbnail': media.get('image_large') or media.get('image_medium') or media.get('image_small'), 'duration': int_or_none(media.get('duration')), 'tags': media.get('tags'), 'subtitles': subtitles, } class CuriosityStreamIE(CuriosityStreamBaseIE): IE_NAME = 'curiositystream' _VALID_URL = r'https?://app\.curiositystream\.com/video/(?P<id>\d+)' _TEST = { 'url': 'https://app.curiositystream.com/video/2', 'md5': '262bb2f257ff301115f1973540de8983', 'info_dict': { 'id': '2', 'ext': 'mp4', 'title': 'How Did You Develop The Internet?', 'description': 'Vint Cerf, Google\'s Chief Internet Evangelist, describes how he and Bob Kahn created the internet.', } } def _real_extract(self, url): video_id = self._match_id(url) media = self._call_api('media/' + video_id, video_id) return self._extract_media_info(media) class CuriosityStreamCollectionIE(CuriosityStreamBaseIE): IE_NAME = 'curiositystream:collection' _VALID_URL = r'https?://app\.curiositystream\.com/collection/(?P<id>\d+)' _TEST = { 'url': 'https://app.curiositystream.com/collection/2', 'info_dict': { 'id': '2', 'title': 'Curious Minds: The Internet', 'description': 'How is the internet shaping our lives in the 21st Century?', }, 'playlist_mincount': 12, } def _real_extract(self, url): collection_id = self._match_id(url) collection = self._call_api( 'collections/' + collection_id, collection_id) entries = [] for media in collection.get('media', []): entries.append(self._extract_media_info(media)) return self.playlist_result( entries, collection_id, collection.get('title'), collection.get('description'))
11craft/immercv
refs/heads/master
immercv/contrib/__init__.py
14224
# -*- coding: utf-8 -*-
disruptek/boto
refs/heads/develop
boto/ec2/spotpricehistory.py
152
# Copyright (c) 2006-2009 Mitch Garnaat http://garnaat.org/ # # Permission is hereby granted, free of charge, to any person obtaining a # copy of this software and associated documentation files (the # "Software"), to deal in the Software without restriction, including # without limitation the rights to use, copy, modify, merge, publish, dis- # tribute, sublicense, and/or sell copies of the Software, and to permit # persons to whom the Software is furnished to do so, subject to the fol- # lowing conditions: # # The above copyright notice and this permission notice shall be included # in all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS # OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL- # ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT # SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, # WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS # IN THE SOFTWARE. """ Represents an EC2 Spot Instance Request """ from boto.ec2.ec2object import EC2Object class SpotPriceHistory(EC2Object): def __init__(self, connection=None): super(SpotPriceHistory, self).__init__(connection) self.price = 0.0 self.instance_type = None self.product_description = None self.timestamp = None self.availability_zone = None def __repr__(self): return 'SpotPriceHistory(%s):%2f' % (self.instance_type, self.price) def endElement(self, name, value, connection): if name == 'instanceType': self.instance_type = value elif name == 'spotPrice': self.price = float(value) elif name == 'productDescription': self.product_description = value elif name == 'timestamp': self.timestamp = value elif name == 'availabilityZone': self.availability_zone = value else: setattr(self, name, value)
int19h/PTVS
refs/heads/master
Python/Tests/TestData/VirtualEnv/env/Lib/genericpath.py
246
""" Path operations common to more than one OS Do not use directly. The OS specific modules import the appropriate functions from this module themselves. """ import os import stat __all__ = ['commonprefix', 'exists', 'getatime', 'getctime', 'getmtime', 'getsize', 'isdir', 'isfile'] # Does a path exist? # This is false for dangling symbolic links on systems that support them. def exists(path): """Test whether a path exists. Returns False for broken symbolic links""" try: os.stat(path) except os.error: return False return True # This follows symbolic links, so both islink() and isdir() can be true # for the same path ono systems that support symlinks def isfile(path): """Test whether a path is a regular file""" try: st = os.stat(path) except os.error: return False return stat.S_ISREG(st.st_mode) # Is a path a directory? # This follows symbolic links, so both islink() and isdir() # can be true for the same path on systems that support symlinks def isdir(s): """Return true if the pathname refers to an existing directory.""" try: st = os.stat(s) except os.error: return False return stat.S_ISDIR(st.st_mode) def getsize(filename): """Return the size of a file, reported by os.stat().""" return os.stat(filename).st_size def getmtime(filename): """Return the last modification time of a file, reported by os.stat().""" return os.stat(filename).st_mtime def getatime(filename): """Return the last access time of a file, reported by os.stat().""" return os.stat(filename).st_atime def getctime(filename): """Return the metadata change time of a file, reported by os.stat().""" return os.stat(filename).st_ctime # Return the longest prefix of all list elements. def commonprefix(m): "Given a list of pathnames, returns the longest common leading component" if not m: return '' s1 = min(m) s2 = max(m) for i, c in enumerate(s1): if c != s2[i]: return s1[:i] return s1 # Split a path in root and extension. # The extension is everything starting at the last dot in the last # pathname component; the root is everything before that. # It is always true that root + ext == p. # Generic implementation of splitext, to be parametrized with # the separators def _splitext(p, sep, altsep, extsep): """Split the extension from a pathname. Extension is everything from the last dot to the end, ignoring leading dots. Returns "(root, ext)"; ext may be empty.""" sepIndex = p.rfind(sep) if altsep: altsepIndex = p.rfind(altsep) sepIndex = max(sepIndex, altsepIndex) dotIndex = p.rfind(extsep) if dotIndex > sepIndex: # skip all leading dots filenameIndex = sepIndex + 1 while filenameIndex < dotIndex: if p[filenameIndex] != extsep: return p[:dotIndex], p[dotIndex:] filenameIndex += 1 return p, ''
mzizzi/ansible
refs/heads/devel
lib/ansible/modules/monitoring/sensu_subscription.py
17
#!/usr/bin/python # -*- coding: utf-8 -*- # (c) 2014, Anders Ingemann <aim@secoya.dk> # # This file is part of Ansible # # Ansible is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Ansible is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Ansible. If not, see <http://www.gnu.org/licenses/>. # ANSIBLE_METADATA = {'metadata_version': '1.0', 'status': ['preview'], 'supported_by': 'community'} DOCUMENTATION = ''' --- module: sensu_subscription short_description: Manage Sensu subscriptions version_added: 2.2 description: - Manage which I(sensu channels) a machine should subscribe to options: name: description: - The name of the channel required: true state: description: - Whether the machine should subscribe or unsubscribe from the channel choices: [ 'present', 'absent' ] required: false default: present path: description: - Path to the subscriptions json file required: false default: /etc/sensu/conf.d/subscriptions.json backup: description: - Create a backup file (if yes), including the timestamp information so you - can get the original file back if you somehow clobbered it incorrectly. choices: [ 'yes', 'no' ] required: false default: no requirements: [ ] author: Anders Ingemann ''' RETURN = ''' reasons: description: the reasons why the moule changed or did not change something returned: success type: list sample: ["channel subscription was absent and state is `present'"] ''' EXAMPLES = ''' # Subscribe to the nginx channel - name: subscribe to nginx checks sensu_subscription: name=nginx # Unsubscribe from the common checks channel - name: unsubscribe from common checks sensu_subscription: name=common state=absent ''' def sensu_subscription(module, path, name, state='present', backup=False): changed = False reasons = [] try: import json except ImportError: import simplejson as json try: config = json.load(open(path)) except IOError: e = get_exception() if e.errno is 2: # File not found, non-fatal if state == 'absent': reasons.append('file did not exist and state is `absent\'') return changed, reasons config = {} else: module.fail_json(msg=str(e)) except ValueError: msg = '{path} contains invalid JSON'.format(path=path) module.fail_json(msg=msg) if 'client' not in config: if state == 'absent': reasons.append('`client\' did not exist and state is `absent\'') return changed, reasons config['client'] = {} changed = True reasons.append('`client\' did not exist') if 'subscriptions' not in config['client']: if state == 'absent': reasons.append('`client.subscriptions\' did not exist and state is `absent\'') return changed, reasons config['client']['subscriptions'] = [] changed = True reasons.append('`client.subscriptions\' did not exist') if name not in config['client']['subscriptions']: if state == 'absent': reasons.append('channel subscription was absent') return changed, reasons config['client']['subscriptions'].append(name) changed = True reasons.append('channel subscription was absent and state is `present\'') else: if state == 'absent': config['client']['subscriptions'].remove(name) changed = True reasons.append('channel subscription was present and state is `absent\'') if changed and not module.check_mode: if backup: module.backup_local(path) try: open(path, 'w').write(json.dumps(config, indent=2) + '\n') except IOError: e = get_exception() module.fail_json(msg='Failed to write to file %s: %s' % (path, str(e))) return changed, reasons def main(): arg_spec = {'name': {'type': 'str', 'required': True}, 'path': {'type': 'str', 'default': '/etc/sensu/conf.d/subscriptions.json'}, 'state': {'type': 'str', 'default': 'present', 'choices': ['present', 'absent']}, 'backup': {'type': 'str', 'default': 'no', 'type': 'bool'}, } module = AnsibleModule(argument_spec=arg_spec, supports_check_mode=True) path = module.params['path'] name = module.params['name'] state = module.params['state'] backup = module.params['backup'] changed, reasons = sensu_subscription(module, path, name, state, backup) module.exit_json(path=path, name=name, changed=changed, msg='OK', reasons=reasons) from ansible.module_utils.basic import * if __name__ == '__main__': main()
postla/e2-gui
refs/heads/master
lib/python/Components/ResourceManager.py
33
class ResourceManager: def __init__(self): self.resourceList = {} def addResource(self, name, resource): print "adding Resource", name self.resourceList[name] = resource print "resources:", self.resourceList def getResource(self, name): if not self.hasResource(name): return None return self.resourceList[name] def hasResource(self, name): return self.resourceList.has_key(name) def removeResource(self, name): if self.hasResource(name): del self.resourceList[name] resourcemanager = ResourceManager()
jjscarafia/odoo
refs/heads/master
addons/marketing_campaign_crm_demo/__init__.py
450
# Debian packaging removes blank files, so this comment is added. # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
mattf-horton/metron
refs/heads/master
build_utils/verify_license.py
25
# # Licensed to the Apache Software Foundation (ASF) under one or more # contributor license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright ownership. # The ASF licenses this file to You under the Apache License, Version 2.0 # (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # import sets import sys def read_component(i): with open(i, 'r') as fp: component_lines = fp.readlines() ret = [] for line in component_lines: if len(line) > 0: l = line.split(',')[0].strip() ret.append(l) return sets.Set(ret) if __name__ == '__main__': components = read_component(sys.argv[1]) components_not_found = [] for line in sys.stdin: component = line.strip() if len(component) == 0 or component == 'none' or component in components: continue else: if len(sys.argv) > 2: print component else: components_not_found.append(component) if len(components_not_found) > 0: raise ValueError("Unable to find these components: \n " + "\n ".join(components_not_found) + "\nin the acceptable list of components: " + sys.argv[1])
BigBrother-International/gst-cerbero
refs/heads/master
setup.py
19
import os import sys import shutil from setuptools import setup, find_packages from cerbero.utils import shell sys.path.insert(0, './cerbero') # Utility function to read the README file. def read(fname): return open(os.path.join(os.path.dirname(__file__), fname)).read() # Utility function to parse directories def parse_dir(dirpath, extension=None): if os.path.exists('.git'): files = shell.check_call('git ls-files %s' % dirpath).split('\n') files.remove('') else: files = shell.check_call('find %s -type f' % dirpath).split('\n') files.remove('') if extension is None: return files return [f for f in files if f.endswith(extension)] # Utility function to create the list of data files def datafiles(prefix): files = [] datadir = os.path.join(prefix, 'share', 'cerbero') for dirname, extension in [('recipes', '.recipe'), ('packages', '.package')]: for f in parse_dir(dirname, extension): files.append((os.path.join(datadir, dirname), [f])) for dirname in ['config']: for f in parse_dir(dirname): files.append((os.path.join(datadir, dirname), [f])) for dirname in ['data']: for f in parse_dir(dirname): dirpath = os.path.split(f.split('/', 1)[1])[0] files.append((os.path.join(datadir, dirpath), [f])) return files #Fill manifest shutil.copy('MANIFEST.in.in', 'MANIFEST.in') with open('MANIFEST.in', 'a+') as f: for dirname in ['recipes', 'packages', 'data', 'config', 'tools']: f.write('\n'.join(['include %s' % x for x in parse_dir(dirname)])) f.write('\n') # Intercept prefix prefix = [x for x in sys.argv if x.startswith('--prefix=')] if len(prefix) == 1: prefix = prefix[0].split('--prefix=')[1] else: prefix = '/usr/local' setup( name = "cerbero", version = "0.1.0", author = "Andoni Morales", author_email = "amorales@fluendo.com", description = ("Multi platform build system for Open Source projects"), license = "LGPL", url = "http://gstreamer.com", packages = find_packages(exclude=['tests']), long_description=read('README'), zip_safe = False, include_package_data=True, data_files = datafiles(prefix), entry_points = """ [console_scripts] cerbero = cerbero.main:main""", classifiers=[ "License :: OSI Approved :: LGPL License", ], )
splunk/splunk-webframework-toolkit
refs/heads/master
splunk_wftoolkit/django/splunk_wftoolkit/forms.py
1
from splunkdj.setup import forms # (1): NOT from django import forms class SetupForm(forms.Form): email = forms.EmailField( endpoint='configs/conf-setup', entity='auth', field='email', # (2) max_length=100) password = forms.CharField( endpoint='configs/conf-setup', entity='auth', field='password', # (2) max_length=100, widget=forms.PasswordInput(render_value=True))
kpgriffith/kubernetes
refs/heads/master
translations/extract.py
136
#!/usr/bin/env python # Copyright 2017 The Kubernetes Authors. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Extract strings from command files and externalize into translation files. Expects to be run from the root directory of the repository. Usage: extract.py pkg/kubectl/cmd/apply.go """ import fileinput import sys import re class MatchHandler(object): """ Simple holder for a regular expression and a function to run if that regular expression matches a line. The function should expect (re.match, file, linenumber) as parameters """ def __init__(self, regex, replace_fn): self.regex = re.compile(regex) self.replace_fn = replace_fn def short_replace(match, file, line_number): """Replace a Short: ... cobra command description with an internationalization """ sys.stdout.write('{}i18n.T({}),\n'.format(match.group(1), match.group(2))) SHORT_MATCH = MatchHandler(r'(\s+Short:\s+)("[^"]+"),', short_replace) def import_replace(match, file, line_number): """Add an extra import for the i18n library. Doesn't try to be smart and detect if it's already present, assumes a gofmt round wil fix things. """ sys.stdout.write('{}\n"k8s.io/kubectl/pkg/util/i18n"\n'.format(match.group(1))) IMPORT_MATCH = MatchHandler('(.*"k8s.io/kubectl/pkg/cmd/util")', import_replace) def string_flag_replace(match, file, line_number): """Replace a cmd.Flags().String("...", "", "...") with an internationalization """ sys.stdout.write('{}i18n.T("{})"))\n'.format(match.group(1), match.group(2))) STRING_FLAG_MATCH = MatchHandler('(\s+cmd\.Flags\(\).String\("[^"]*", "[^"]*", )"([^"]*)"\)', string_flag_replace) def long_string_replace(match, file, line_number): return '{}i18n.T({}){}'.format(match.group(1), match.group(2), match.group(3)) LONG_DESC_MATCH = MatchHandler('(LongDesc\()(`[^`]+`)([^\n]\n)', long_string_replace) EXAMPLE_MATCH = MatchHandler('(Examples\()(`[^`]+`)([^\n]\n)', long_string_replace) def replace(filename, matchers, multiline_matchers): """Given a file and a set of matchers, run those matchers across the file and replace it with the results. """ # Run all the matchers line_number = 0 for line in fileinput.input(filename, inplace=True): line_number += 1 matched = False for matcher in matchers: match = matcher.regex.match(line) if match: matcher.replace_fn(match, filename, line_number) matched = True break if not matched: sys.stdout.write(line) sys.stdout.flush() with open(filename, 'r') as datafile: content = datafile.read() for matcher in multiline_matchers: match = matcher.regex.search(content) while match: rep = matcher.replace_fn(match, filename, 0) # Escape back references in the replacement string # (And escape for Python) # (And escape for regex) rep = re.sub('\\\\(\\d)', '\\\\\\\\\\1', rep) content = matcher.regex.sub(rep, content, 1) match = matcher.regex.search(content) sys.stdout.write(content) # gofmt the file again from subprocess import call call(["goimports", "-w", filename]) replace(sys.argv[1], [SHORT_MATCH, IMPORT_MATCH, STRING_FLAG_MATCH], [LONG_DESC_MATCH, EXAMPLE_MATCH])
jcoady9/python-for-android
refs/heads/master
python3-alpha/python3-src/Lib/urllib/robotparser.py
55
""" robotparser.py Copyright (C) 2000 Bastian Kleineidam You can choose between two licenses when using this package: 1) GNU GPLv2 2) PSF license for Python 2.2 The robots.txt Exclusion Protocol is implemented as specified in http://info.webcrawler.com/mak/projects/robots/norobots-rfc.html """ import urllib.parse, urllib.request __all__ = ["RobotFileParser"] class RobotFileParser: """ This class provides a set of methods to read, parse and answer questions about a single robots.txt file. """ def __init__(self, url=''): self.entries = [] self.default_entry = None self.disallow_all = False self.allow_all = False self.set_url(url) self.last_checked = 0 def mtime(self): """Returns the time the robots.txt file was last fetched. This is useful for long-running web spiders that need to check for new robots.txt files periodically. """ return self.last_checked def modified(self): """Sets the time the robots.txt file was last fetched to the current time. """ import time self.last_checked = time.time() def set_url(self, url): """Sets the URL referring to a robots.txt file.""" self.url = url self.host, self.path = urllib.parse.urlparse(url)[1:3] def read(self): """Reads the robots.txt URL and feeds it to the parser.""" try: f = urllib.request.urlopen(self.url) except urllib.error.HTTPError as err: if err.code in (401, 403): self.disallow_all = True elif err.code >= 400: self.allow_all = True else: raw = f.read() self.parse(raw.decode("utf-8").splitlines()) def _add_entry(self, entry): if "*" in entry.useragents: # the default entry is considered last if self.default_entry is None: # the first default entry wins self.default_entry = entry else: self.entries.append(entry) def parse(self, lines): """Parse the input lines from a robots.txt file. We allow that a user-agent: line is not preceded by one or more blank lines. """ # states: # 0: start state # 1: saw user-agent line # 2: saw an allow or disallow line state = 0 entry = Entry() for line in lines: if not line: if state == 1: entry = Entry() state = 0 elif state == 2: self._add_entry(entry) entry = Entry() state = 0 # remove optional comment and strip line i = line.find('#') if i >= 0: line = line[:i] line = line.strip() if not line: continue line = line.split(':', 1) if len(line) == 2: line[0] = line[0].strip().lower() line[1] = urllib.parse.unquote(line[1].strip()) if line[0] == "user-agent": if state == 2: self._add_entry(entry) entry = Entry() entry.useragents.append(line[1]) state = 1 elif line[0] == "disallow": if state != 0: entry.rulelines.append(RuleLine(line[1], False)) state = 2 elif line[0] == "allow": if state != 0: entry.rulelines.append(RuleLine(line[1], True)) state = 2 if state == 2: self._add_entry(entry) def can_fetch(self, useragent, url): """using the parsed robots.txt decide if useragent can fetch url""" if self.disallow_all: return False if self.allow_all: return True # search for given user agent matches # the first match counts parsed_url = urllib.parse.urlparse(urllib.parse.unquote(url)) url = urllib.parse.urlunparse(('','',parsed_url.path, parsed_url.params,parsed_url.query, parsed_url.fragment)) url = urllib.parse.quote(url) if not url: url = "/" for entry in self.entries: if entry.applies_to(useragent): return entry.allowance(url) # try the default entry last if self.default_entry: return self.default_entry.allowance(url) # agent not found ==> access granted return True def __str__(self): return ''.join([str(entry) + "\n" for entry in self.entries]) class RuleLine: """A rule line is a single "Allow:" (allowance==True) or "Disallow:" (allowance==False) followed by a path.""" def __init__(self, path, allowance): if path == '' and not allowance: # an empty value means allow all allowance = True self.path = urllib.parse.quote(path) self.allowance = allowance def applies_to(self, filename): return self.path == "*" or filename.startswith(self.path) def __str__(self): return (self.allowance and "Allow" or "Disallow") + ": " + self.path class Entry: """An entry has one or more user-agents and zero or more rulelines""" def __init__(self): self.useragents = [] self.rulelines = [] def __str__(self): ret = [] for agent in self.useragents: ret.extend(["User-agent: ", agent, "\n"]) for line in self.rulelines: ret.extend([str(line), "\n"]) return ''.join(ret) def applies_to(self, useragent): """check if this entry applies to the specified agent""" # split the name token and make it lower case useragent = useragent.split("/")[0].lower() for agent in self.useragents: if agent == '*': # we have the catch-all agent return True agent = agent.lower() if agent in useragent: return True return False def allowance(self, filename): """Preconditions: - our agent applies to this entry - filename is URL decoded""" for line in self.rulelines: if line.applies_to(filename): return line.allowance return True
be-cloud-be/horizon-addons
refs/heads/9.0
server-tools/datetime_formatter/__openerp__.py
2
# -*- coding: utf-8 -*- # © 2015 Grupo ESOC Ingeniería de Servicios, S.L.U. - Jairo Llopis # © 2016 Tecnativa, S.L. - Vicent Cubells # License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html). { "name": "Date & Time Formatter", "summary": "Helper functions to give correct format to date[time] fields", "version": "9.0.1.0.0", "category": "Tools", "website": "https://tecnativa.com", "author": "Grupo ESOC Ingeniería de Servicios, " "Tecnativa," "Odoo Community Association (OCA)", "license": "AGPL-3", "installable": True, "depends": [ "base", ], }
ccomb/OpenUpgrade
refs/heads/master
openerp/addons/base/tests/test_db_cursor.py
175
# -*- coding: utf-8 -*- import unittest2 import openerp from openerp.tools.misc import mute_logger from openerp.tests import common DB = common.DB ADMIN_USER_ID = common.ADMIN_USER_ID def registry(): return openerp.modules.registry.RegistryManager.get(DB) class test_cr_execute(unittest2.TestCase): """ Try cr.execute with wrong parameters """ @mute_logger('openerp.sql_db') def test_execute_bad_params(self): """ Try to use iterable but non-list or int params in query parameters. """ with registry().cursor() as cr: with self.assertRaises(ValueError): cr.execute("SELECT id FROM res_users WHERE login=%s", 'admin') with self.assertRaises(ValueError): cr.execute("SELECT id FROM res_users WHERE id=%s", 1) with self.assertRaises(ValueError): cr.execute("SELECT id FROM res_users WHERE id=%s", '1') # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
grace-/opencv-3.0.0-cvpr
refs/heads/master
opencv/samples/python2/color_histogram.py
8
#!/usr/bin/env python import numpy as np import cv2 from time import clock import sys import video if __name__ == '__main__': hsv_map = np.zeros((180, 256, 3), np.uint8) h, s = np.indices(hsv_map.shape[:2]) hsv_map[:,:,0] = h hsv_map[:,:,1] = s hsv_map[:,:,2] = 255 hsv_map = cv2.cvtColor(hsv_map, cv2.COLOR_HSV2BGR) cv2.imshow('hsv_map', hsv_map) cv2.namedWindow('hist', 0) hist_scale = 10 def set_scale(val): global hist_scale hist_scale = val cv2.createTrackbar('scale', 'hist', hist_scale, 32, set_scale) try: fn = sys.argv[1] except: fn = 0 cam = video.create_capture(fn, fallback='synth:bg=../cpp/baboon.jpg:class=chess:noise=0.05') while True: flag, frame = cam.read() cv2.imshow('camera', frame) small = cv2.pyrDown(frame) hsv = cv2.cvtColor(small, cv2.COLOR_BGR2HSV) dark = hsv[...,2] < 32 hsv[dark] = 0 h = cv2.calcHist( [hsv], [0, 1], None, [180, 256], [0, 180, 0, 256] ) h = np.clip(h*0.005*hist_scale, 0, 1) vis = hsv_map*h[:,:,np.newaxis] / 255.0 cv2.imshow('hist', vis) ch = 0xFF & cv2.waitKey(1) if ch == 27: break cv2.destroyAllWindows()
tchellomello/home-assistant
refs/heads/dev
homeassistant/components/fan/device_trigger.py
9
"""Provides device automations for Fan.""" from typing import List import voluptuous as vol from homeassistant.components.automation import AutomationActionType from homeassistant.components.device_automation import TRIGGER_BASE_SCHEMA from homeassistant.components.homeassistant.triggers import state as state_trigger from homeassistant.const import ( CONF_DEVICE_ID, CONF_DOMAIN, CONF_ENTITY_ID, CONF_PLATFORM, CONF_TYPE, STATE_OFF, STATE_ON, ) from homeassistant.core import CALLBACK_TYPE, HomeAssistant from homeassistant.helpers import config_validation as cv, entity_registry from homeassistant.helpers.typing import ConfigType from . import DOMAIN TRIGGER_TYPES = {"turned_on", "turned_off"} TRIGGER_SCHEMA = TRIGGER_BASE_SCHEMA.extend( { vol.Required(CONF_ENTITY_ID): cv.entity_id, vol.Required(CONF_TYPE): vol.In(TRIGGER_TYPES), } ) async def async_get_triggers(hass: HomeAssistant, device_id: str) -> List[dict]: """List device triggers for Fan devices.""" registry = await entity_registry.async_get_registry(hass) triggers = [] # Get all the integrations entities for this device for entry in entity_registry.async_entries_for_device(registry, device_id): if entry.domain != DOMAIN: continue # Add triggers for each entity that belongs to this integration triggers.append( { CONF_PLATFORM: "device", CONF_DEVICE_ID: device_id, CONF_DOMAIN: DOMAIN, CONF_ENTITY_ID: entry.entity_id, CONF_TYPE: "turned_on", } ) triggers.append( { CONF_PLATFORM: "device", CONF_DEVICE_ID: device_id, CONF_DOMAIN: DOMAIN, CONF_ENTITY_ID: entry.entity_id, CONF_TYPE: "turned_off", } ) return triggers async def async_attach_trigger( hass: HomeAssistant, config: ConfigType, action: AutomationActionType, automation_info: dict, ) -> CALLBACK_TYPE: """Attach a trigger.""" config = TRIGGER_SCHEMA(config) if config[CONF_TYPE] == "turned_on": from_state = STATE_OFF to_state = STATE_ON else: from_state = STATE_ON to_state = STATE_OFF state_config = { state_trigger.CONF_PLATFORM: "state", CONF_ENTITY_ID: config[CONF_ENTITY_ID], state_trigger.CONF_FROM: from_state, state_trigger.CONF_TO: to_state, } state_config = state_trigger.TRIGGER_SCHEMA(state_config) return await state_trigger.async_attach_trigger( hass, state_config, action, automation_info, platform_type="device" )
WesternStar/tilinux
refs/heads/master
tools/perf/scripts/python/Perf-Trace-Util/lib/Perf/Trace/EventClass.py
4653
# EventClass.py # # This is a library defining some events types classes, which could # be used by other scripts to analyzing the perf samples. # # Currently there are just a few classes defined for examples, # PerfEvent is the base class for all perf event sample, PebsEvent # is a HW base Intel x86 PEBS event, and user could add more SW/HW # event classes based on requirements. import struct # Event types, user could add more here EVTYPE_GENERIC = 0 EVTYPE_PEBS = 1 # Basic PEBS event EVTYPE_PEBS_LL = 2 # PEBS event with load latency info EVTYPE_IBS = 3 # # Currently we don't have good way to tell the event type, but by # the size of raw buffer, raw PEBS event with load latency data's # size is 176 bytes, while the pure PEBS event's size is 144 bytes. # def create_event(name, comm, dso, symbol, raw_buf): if (len(raw_buf) == 144): event = PebsEvent(name, comm, dso, symbol, raw_buf) elif (len(raw_buf) == 176): event = PebsNHM(name, comm, dso, symbol, raw_buf) else: event = PerfEvent(name, comm, dso, symbol, raw_buf) return event class PerfEvent(object): event_num = 0 def __init__(self, name, comm, dso, symbol, raw_buf, ev_type=EVTYPE_GENERIC): self.name = name self.comm = comm self.dso = dso self.symbol = symbol self.raw_buf = raw_buf self.ev_type = ev_type PerfEvent.event_num += 1 def show(self): print "PMU event: name=%12s, symbol=%24s, comm=%8s, dso=%12s" % (self.name, self.symbol, self.comm, self.dso) # # Basic Intel PEBS (Precise Event-based Sampling) event, whose raw buffer # contains the context info when that event happened: the EFLAGS and # linear IP info, as well as all the registers. # class PebsEvent(PerfEvent): pebs_num = 0 def __init__(self, name, comm, dso, symbol, raw_buf, ev_type=EVTYPE_PEBS): tmp_buf=raw_buf[0:80] flags, ip, ax, bx, cx, dx, si, di, bp, sp = struct.unpack('QQQQQQQQQQ', tmp_buf) self.flags = flags self.ip = ip self.ax = ax self.bx = bx self.cx = cx self.dx = dx self.si = si self.di = di self.bp = bp self.sp = sp PerfEvent.__init__(self, name, comm, dso, symbol, raw_buf, ev_type) PebsEvent.pebs_num += 1 del tmp_buf # # Intel Nehalem and Westmere support PEBS plus Load Latency info which lie # in the four 64 bit words write after the PEBS data: # Status: records the IA32_PERF_GLOBAL_STATUS register value # DLA: Data Linear Address (EIP) # DSE: Data Source Encoding, where the latency happens, hit or miss # in L1/L2/L3 or IO operations # LAT: the actual latency in cycles # class PebsNHM(PebsEvent): pebs_nhm_num = 0 def __init__(self, name, comm, dso, symbol, raw_buf, ev_type=EVTYPE_PEBS_LL): tmp_buf=raw_buf[144:176] status, dla, dse, lat = struct.unpack('QQQQ', tmp_buf) self.status = status self.dla = dla self.dse = dse self.lat = lat PebsEvent.__init__(self, name, comm, dso, symbol, raw_buf, ev_type) PebsNHM.pebs_nhm_num += 1 del tmp_buf
simonpatrick/bite-project
refs/heads/master
deps/gdata-python-client/src/gdata/Crypto/PublicKey/__init__.py
273
"""Public-key encryption and signature algorithms. Public-key encryption uses two different keys, one for encryption and one for decryption. The encryption key can be made public, and the decryption key is kept private. Many public-key algorithms can also be used to sign messages, and some can *only* be used for signatures. Crypto.PublicKey.DSA Digital Signature Algorithm. (Signature only) Crypto.PublicKey.ElGamal (Signing and encryption) Crypto.PublicKey.RSA (Signing, encryption, and blinding) Crypto.PublicKey.qNEW (Signature only) """ __all__ = ['RSA', 'DSA', 'ElGamal', 'qNEW'] __revision__ = "$Id: __init__.py,v 1.4 2003/04/03 20:27:13 akuchling Exp $"
miaoski/shadowsocks
refs/heads/master
shadowsocks/tcprelay.py
922
#!/usr/bin/python # -*- coding: utf-8 -*- # # Copyright 2015 clowwindy # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from __future__ import absolute_import, division, print_function, \ with_statement import time import socket import errno import struct import logging import traceback import random from shadowsocks import encrypt, eventloop, shell, common from shadowsocks.common import parse_header # we clear at most TIMEOUTS_CLEAN_SIZE timeouts each time TIMEOUTS_CLEAN_SIZE = 512 MSG_FASTOPEN = 0x20000000 # SOCKS command definition CMD_CONNECT = 1 CMD_BIND = 2 CMD_UDP_ASSOCIATE = 3 # for each opening port, we have a TCP Relay # for each connection, we have a TCP Relay Handler to handle the connection # for each handler, we have 2 sockets: # local: connected to the client # remote: connected to remote server # for each handler, it could be at one of several stages: # as sslocal: # stage 0 SOCKS hello received from local, send hello to local # stage 1 addr received from local, query DNS for remote # stage 2 UDP assoc # stage 3 DNS resolved, connect to remote # stage 4 still connecting, more data from local received # stage 5 remote connected, piping local and remote # as ssserver: # stage 0 just jump to stage 1 # stage 1 addr received from local, query DNS for remote # stage 3 DNS resolved, connect to remote # stage 4 still connecting, more data from local received # stage 5 remote connected, piping local and remote STAGE_INIT = 0 STAGE_ADDR = 1 STAGE_UDP_ASSOC = 2 STAGE_DNS = 3 STAGE_CONNECTING = 4 STAGE_STREAM = 5 STAGE_DESTROYED = -1 # for each handler, we have 2 stream directions: # upstream: from client to server direction # read local and write to remote # downstream: from server to client direction # read remote and write to local STREAM_UP = 0 STREAM_DOWN = 1 # for each stream, it's waiting for reading, or writing, or both WAIT_STATUS_INIT = 0 WAIT_STATUS_READING = 1 WAIT_STATUS_WRITING = 2 WAIT_STATUS_READWRITING = WAIT_STATUS_READING | WAIT_STATUS_WRITING BUF_SIZE = 32 * 1024 class TCPRelayHandler(object): def __init__(self, server, fd_to_handlers, loop, local_sock, config, dns_resolver, is_local): self._server = server self._fd_to_handlers = fd_to_handlers self._loop = loop self._local_sock = local_sock self._remote_sock = None self._config = config self._dns_resolver = dns_resolver # TCP Relay works as either sslocal or ssserver # if is_local, this is sslocal self._is_local = is_local self._stage = STAGE_INIT self._encryptor = encrypt.Encryptor(config['password'], config['method']) self._fastopen_connected = False self._data_to_write_to_local = [] self._data_to_write_to_remote = [] self._upstream_status = WAIT_STATUS_READING self._downstream_status = WAIT_STATUS_INIT self._client_address = local_sock.getpeername()[:2] self._remote_address = None if 'forbidden_ip' in config: self._forbidden_iplist = config['forbidden_ip'] else: self._forbidden_iplist = None if is_local: self._chosen_server = self._get_a_server() fd_to_handlers[local_sock.fileno()] = self local_sock.setblocking(False) local_sock.setsockopt(socket.SOL_TCP, socket.TCP_NODELAY, 1) loop.add(local_sock, eventloop.POLL_IN | eventloop.POLL_ERR, self._server) self.last_activity = 0 self._update_activity() def __hash__(self): # default __hash__ is id / 16 # we want to eliminate collisions return id(self) @property def remote_address(self): return self._remote_address def _get_a_server(self): server = self._config['server'] server_port = self._config['server_port'] if type(server_port) == list: server_port = random.choice(server_port) if type(server) == list: server = random.choice(server) logging.debug('chosen server: %s:%d', server, server_port) return server, server_port def _update_activity(self, data_len=0): # tell the TCP Relay we have activities recently # else it will think we are inactive and timed out self._server.update_activity(self, data_len) def _update_stream(self, stream, status): # update a stream to a new waiting status # check if status is changed # only update if dirty dirty = False if stream == STREAM_DOWN: if self._downstream_status != status: self._downstream_status = status dirty = True elif stream == STREAM_UP: if self._upstream_status != status: self._upstream_status = status dirty = True if dirty: if self._local_sock: event = eventloop.POLL_ERR if self._downstream_status & WAIT_STATUS_WRITING: event |= eventloop.POLL_OUT if self._upstream_status & WAIT_STATUS_READING: event |= eventloop.POLL_IN self._loop.modify(self._local_sock, event) if self._remote_sock: event = eventloop.POLL_ERR if self._downstream_status & WAIT_STATUS_READING: event |= eventloop.POLL_IN if self._upstream_status & WAIT_STATUS_WRITING: event |= eventloop.POLL_OUT self._loop.modify(self._remote_sock, event) def _write_to_sock(self, data, sock): # write data to sock # if only some of the data are written, put remaining in the buffer # and update the stream to wait for writing if not data or not sock: return False uncomplete = False try: l = len(data) s = sock.send(data) if s < l: data = data[s:] uncomplete = True except (OSError, IOError) as e: error_no = eventloop.errno_from_exception(e) if error_no in (errno.EAGAIN, errno.EINPROGRESS, errno.EWOULDBLOCK): uncomplete = True else: shell.print_exception(e) self.destroy() return False if uncomplete: if sock == self._local_sock: self._data_to_write_to_local.append(data) self._update_stream(STREAM_DOWN, WAIT_STATUS_WRITING) elif sock == self._remote_sock: self._data_to_write_to_remote.append(data) self._update_stream(STREAM_UP, WAIT_STATUS_WRITING) else: logging.error('write_all_to_sock:unknown socket') else: if sock == self._local_sock: self._update_stream(STREAM_DOWN, WAIT_STATUS_READING) elif sock == self._remote_sock: self._update_stream(STREAM_UP, WAIT_STATUS_READING) else: logging.error('write_all_to_sock:unknown socket') return True def _handle_stage_connecting(self, data): if self._is_local: data = self._encryptor.encrypt(data) self._data_to_write_to_remote.append(data) if self._is_local and not self._fastopen_connected and \ self._config['fast_open']: # for sslocal and fastopen, we basically wait for data and use # sendto to connect try: # only connect once self._fastopen_connected = True remote_sock = \ self._create_remote_socket(self._chosen_server[0], self._chosen_server[1]) self._loop.add(remote_sock, eventloop.POLL_ERR, self._server) data = b''.join(self._data_to_write_to_remote) l = len(data) s = remote_sock.sendto(data, MSG_FASTOPEN, self._chosen_server) if s < l: data = data[s:] self._data_to_write_to_remote = [data] else: self._data_to_write_to_remote = [] self._update_stream(STREAM_UP, WAIT_STATUS_READWRITING) except (OSError, IOError) as e: if eventloop.errno_from_exception(e) == errno.EINPROGRESS: # in this case data is not sent at all self._update_stream(STREAM_UP, WAIT_STATUS_READWRITING) elif eventloop.errno_from_exception(e) == errno.ENOTCONN: logging.error('fast open not supported on this OS') self._config['fast_open'] = False self.destroy() else: shell.print_exception(e) if self._config['verbose']: traceback.print_exc() self.destroy() def _handle_stage_addr(self, data): try: if self._is_local: cmd = common.ord(data[1]) if cmd == CMD_UDP_ASSOCIATE: logging.debug('UDP associate') if self._local_sock.family == socket.AF_INET6: header = b'\x05\x00\x00\x04' else: header = b'\x05\x00\x00\x01' addr, port = self._local_sock.getsockname()[:2] addr_to_send = socket.inet_pton(self._local_sock.family, addr) port_to_send = struct.pack('>H', port) self._write_to_sock(header + addr_to_send + port_to_send, self._local_sock) self._stage = STAGE_UDP_ASSOC # just wait for the client to disconnect return elif cmd == CMD_CONNECT: # just trim VER CMD RSV data = data[3:] else: logging.error('unknown command %d', cmd) self.destroy() return header_result = parse_header(data) if header_result is None: raise Exception('can not parse header') addrtype, remote_addr, remote_port, header_length = header_result logging.info('connecting %s:%d from %s:%d' % (common.to_str(remote_addr), remote_port, self._client_address[0], self._client_address[1])) self._remote_address = (common.to_str(remote_addr), remote_port) # pause reading self._update_stream(STREAM_UP, WAIT_STATUS_WRITING) self._stage = STAGE_DNS if self._is_local: # forward address to remote self._write_to_sock((b'\x05\x00\x00\x01' b'\x00\x00\x00\x00\x10\x10'), self._local_sock) data_to_send = self._encryptor.encrypt(data) self._data_to_write_to_remote.append(data_to_send) # notice here may go into _handle_dns_resolved directly self._dns_resolver.resolve(self._chosen_server[0], self._handle_dns_resolved) else: if len(data) > header_length: self._data_to_write_to_remote.append(data[header_length:]) # notice here may go into _handle_dns_resolved directly self._dns_resolver.resolve(remote_addr, self._handle_dns_resolved) except Exception as e: self._log_error(e) if self._config['verbose']: traceback.print_exc() self.destroy() def _create_remote_socket(self, ip, port): addrs = socket.getaddrinfo(ip, port, 0, socket.SOCK_STREAM, socket.SOL_TCP) if len(addrs) == 0: raise Exception("getaddrinfo failed for %s:%d" % (ip, port)) af, socktype, proto, canonname, sa = addrs[0] if self._forbidden_iplist: if common.to_str(sa[0]) in self._forbidden_iplist: raise Exception('IP %s is in forbidden list, reject' % common.to_str(sa[0])) remote_sock = socket.socket(af, socktype, proto) self._remote_sock = remote_sock self._fd_to_handlers[remote_sock.fileno()] = self remote_sock.setblocking(False) remote_sock.setsockopt(socket.SOL_TCP, socket.TCP_NODELAY, 1) return remote_sock def _handle_dns_resolved(self, result, error): if error: self._log_error(error) self.destroy() return if result: ip = result[1] if ip: try: self._stage = STAGE_CONNECTING remote_addr = ip if self._is_local: remote_port = self._chosen_server[1] else: remote_port = self._remote_address[1] if self._is_local and self._config['fast_open']: # for fastopen: # wait for more data to arrive and send them in one SYN self._stage = STAGE_CONNECTING # we don't have to wait for remote since it's not # created self._update_stream(STREAM_UP, WAIT_STATUS_READING) # TODO when there is already data in this packet else: # else do connect remote_sock = self._create_remote_socket(remote_addr, remote_port) try: remote_sock.connect((remote_addr, remote_port)) except (OSError, IOError) as e: if eventloop.errno_from_exception(e) == \ errno.EINPROGRESS: pass self._loop.add(remote_sock, eventloop.POLL_ERR | eventloop.POLL_OUT, self._server) self._stage = STAGE_CONNECTING self._update_stream(STREAM_UP, WAIT_STATUS_READWRITING) self._update_stream(STREAM_DOWN, WAIT_STATUS_READING) return except Exception as e: shell.print_exception(e) if self._config['verbose']: traceback.print_exc() self.destroy() def _on_local_read(self): # handle all local read events and dispatch them to methods for # each stage if not self._local_sock: return is_local = self._is_local data = None try: data = self._local_sock.recv(BUF_SIZE) except (OSError, IOError) as e: if eventloop.errno_from_exception(e) in \ (errno.ETIMEDOUT, errno.EAGAIN, errno.EWOULDBLOCK): return if not data: self.destroy() return self._update_activity(len(data)) if not is_local: data = self._encryptor.decrypt(data) if not data: return if self._stage == STAGE_STREAM: if self._is_local: data = self._encryptor.encrypt(data) self._write_to_sock(data, self._remote_sock) return elif is_local and self._stage == STAGE_INIT: # TODO check auth method self._write_to_sock(b'\x05\00', self._local_sock) self._stage = STAGE_ADDR return elif self._stage == STAGE_CONNECTING: self._handle_stage_connecting(data) elif (is_local and self._stage == STAGE_ADDR) or \ (not is_local and self._stage == STAGE_INIT): self._handle_stage_addr(data) def _on_remote_read(self): # handle all remote read events data = None try: data = self._remote_sock.recv(BUF_SIZE) except (OSError, IOError) as e: if eventloop.errno_from_exception(e) in \ (errno.ETIMEDOUT, errno.EAGAIN, errno.EWOULDBLOCK): return if not data: self.destroy() return self._update_activity(len(data)) if self._is_local: data = self._encryptor.decrypt(data) else: data = self._encryptor.encrypt(data) try: self._write_to_sock(data, self._local_sock) except Exception as e: shell.print_exception(e) if self._config['verbose']: traceback.print_exc() # TODO use logging when debug completed self.destroy() def _on_local_write(self): # handle local writable event if self._data_to_write_to_local: data = b''.join(self._data_to_write_to_local) self._data_to_write_to_local = [] self._write_to_sock(data, self._local_sock) else: self._update_stream(STREAM_DOWN, WAIT_STATUS_READING) def _on_remote_write(self): # handle remote writable event self._stage = STAGE_STREAM if self._data_to_write_to_remote: data = b''.join(self._data_to_write_to_remote) self._data_to_write_to_remote = [] self._write_to_sock(data, self._remote_sock) else: self._update_stream(STREAM_UP, WAIT_STATUS_READING) def _on_local_error(self): logging.debug('got local error') if self._local_sock: logging.error(eventloop.get_sock_error(self._local_sock)) self.destroy() def _on_remote_error(self): logging.debug('got remote error') if self._remote_sock: logging.error(eventloop.get_sock_error(self._remote_sock)) self.destroy() def handle_event(self, sock, event): # handle all events in this handler and dispatch them to methods if self._stage == STAGE_DESTROYED: logging.debug('ignore handle_event: destroyed') return # order is important if sock == self._remote_sock: if event & eventloop.POLL_ERR: self._on_remote_error() if self._stage == STAGE_DESTROYED: return if event & (eventloop.POLL_IN | eventloop.POLL_HUP): self._on_remote_read() if self._stage == STAGE_DESTROYED: return if event & eventloop.POLL_OUT: self._on_remote_write() elif sock == self._local_sock: if event & eventloop.POLL_ERR: self._on_local_error() if self._stage == STAGE_DESTROYED: return if event & (eventloop.POLL_IN | eventloop.POLL_HUP): self._on_local_read() if self._stage == STAGE_DESTROYED: return if event & eventloop.POLL_OUT: self._on_local_write() else: logging.warn('unknown socket') def _log_error(self, e): logging.error('%s when handling connection from %s:%d' % (e, self._client_address[0], self._client_address[1])) def destroy(self): # destroy the handler and release any resources # promises: # 1. destroy won't make another destroy() call inside # 2. destroy releases resources so it prevents future call to destroy # 3. destroy won't raise any exceptions # if any of the promises are broken, it indicates a bug has been # introduced! mostly likely memory leaks, etc if self._stage == STAGE_DESTROYED: # this couldn't happen logging.debug('already destroyed') return self._stage = STAGE_DESTROYED if self._remote_address: logging.debug('destroy: %s:%d' % self._remote_address) else: logging.debug('destroy') if self._remote_sock: logging.debug('destroying remote') self._loop.remove(self._remote_sock) del self._fd_to_handlers[self._remote_sock.fileno()] self._remote_sock.close() self._remote_sock = None if self._local_sock: logging.debug('destroying local') self._loop.remove(self._local_sock) del self._fd_to_handlers[self._local_sock.fileno()] self._local_sock.close() self._local_sock = None self._dns_resolver.remove_callback(self._handle_dns_resolved) self._server.remove_handler(self) class TCPRelay(object): def __init__(self, config, dns_resolver, is_local, stat_callback=None): self._config = config self._is_local = is_local self._dns_resolver = dns_resolver self._closed = False self._eventloop = None self._fd_to_handlers = {} self._timeout = config['timeout'] self._timeouts = [] # a list for all the handlers # we trim the timeouts once a while self._timeout_offset = 0 # last checked position for timeout self._handler_to_timeouts = {} # key: handler value: index in timeouts if is_local: listen_addr = config['local_address'] listen_port = config['local_port'] else: listen_addr = config['server'] listen_port = config['server_port'] self._listen_port = listen_port addrs = socket.getaddrinfo(listen_addr, listen_port, 0, socket.SOCK_STREAM, socket.SOL_TCP) if len(addrs) == 0: raise Exception("can't get addrinfo for %s:%d" % (listen_addr, listen_port)) af, socktype, proto, canonname, sa = addrs[0] server_socket = socket.socket(af, socktype, proto) server_socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) server_socket.bind(sa) server_socket.setblocking(False) if config['fast_open']: try: server_socket.setsockopt(socket.SOL_TCP, 23, 5) except socket.error: logging.error('warning: fast open is not available') self._config['fast_open'] = False server_socket.listen(1024) self._server_socket = server_socket self._stat_callback = stat_callback def add_to_loop(self, loop): if self._eventloop: raise Exception('already add to loop') if self._closed: raise Exception('already closed') self._eventloop = loop self._eventloop.add(self._server_socket, eventloop.POLL_IN | eventloop.POLL_ERR, self) self._eventloop.add_periodic(self.handle_periodic) def remove_handler(self, handler): index = self._handler_to_timeouts.get(hash(handler), -1) if index >= 0: # delete is O(n), so we just set it to None self._timeouts[index] = None del self._handler_to_timeouts[hash(handler)] def update_activity(self, handler, data_len): if data_len and self._stat_callback: self._stat_callback(self._listen_port, data_len) # set handler to active now = int(time.time()) if now - handler.last_activity < eventloop.TIMEOUT_PRECISION: # thus we can lower timeout modification frequency return handler.last_activity = now index = self._handler_to_timeouts.get(hash(handler), -1) if index >= 0: # delete is O(n), so we just set it to None self._timeouts[index] = None length = len(self._timeouts) self._timeouts.append(handler) self._handler_to_timeouts[hash(handler)] = length def _sweep_timeout(self): # tornado's timeout memory management is more flexible than we need # we just need a sorted last_activity queue and it's faster than heapq # in fact we can do O(1) insertion/remove so we invent our own if self._timeouts: logging.log(shell.VERBOSE_LEVEL, 'sweeping timeouts') now = time.time() length = len(self._timeouts) pos = self._timeout_offset while pos < length: handler = self._timeouts[pos] if handler: if now - handler.last_activity < self._timeout: break else: if handler.remote_address: logging.warn('timed out: %s:%d' % handler.remote_address) else: logging.warn('timed out') handler.destroy() self._timeouts[pos] = None # free memory pos += 1 else: pos += 1 if pos > TIMEOUTS_CLEAN_SIZE and pos > length >> 1: # clean up the timeout queue when it gets larger than half # of the queue self._timeouts = self._timeouts[pos:] for key in self._handler_to_timeouts: self._handler_to_timeouts[key] -= pos pos = 0 self._timeout_offset = pos def handle_event(self, sock, fd, event): # handle events and dispatch to handlers if sock: logging.log(shell.VERBOSE_LEVEL, 'fd %d %s', fd, eventloop.EVENT_NAMES.get(event, event)) if sock == self._server_socket: if event & eventloop.POLL_ERR: # TODO raise Exception('server_socket error') try: logging.debug('accept') conn = self._server_socket.accept() TCPRelayHandler(self, self._fd_to_handlers, self._eventloop, conn[0], self._config, self._dns_resolver, self._is_local) except (OSError, IOError) as e: error_no = eventloop.errno_from_exception(e) if error_no in (errno.EAGAIN, errno.EINPROGRESS, errno.EWOULDBLOCK): return else: shell.print_exception(e) if self._config['verbose']: traceback.print_exc() else: if sock: handler = self._fd_to_handlers.get(fd, None) if handler: handler.handle_event(sock, event) else: logging.warn('poll removed fd') def handle_periodic(self): if self._closed: if self._server_socket: self._eventloop.remove(self._server_socket) self._server_socket.close() self._server_socket = None logging.info('closed TCP port %d', self._listen_port) if not self._fd_to_handlers: logging.info('stopping') self._eventloop.stop() self._sweep_timeout() def close(self, next_tick=False): logging.debug('TCP close') self._closed = True if not next_tick: if self._eventloop: self._eventloop.remove_periodic(self.handle_periodic) self._eventloop.remove(self._server_socket) self._server_socket.close() for handler in list(self._fd_to_handlers.values()): handler.destroy()
dpiers/coderang-meteor
refs/heads/master
public/jsrepl/extern/python/closured/lib/python2.7/distutils/extension.py
250
"""distutils.extension Provides the Extension class, used to describe C/C++ extension modules in setup scripts.""" __revision__ = "$Id$" import os, string, sys from types import * try: import warnings except ImportError: warnings = None # This class is really only used by the "build_ext" command, so it might # make sense to put it in distutils.command.build_ext. However, that # module is already big enough, and I want to make this class a bit more # complex to simplify some common cases ("foo" module in "foo.c") and do # better error-checking ("foo.c" actually exists). # # Also, putting this in build_ext.py means every setup script would have to # import that large-ish module (indirectly, through distutils.core) in # order to do anything. class Extension: """Just a collection of attributes that describes an extension module and everything needed to build it (hopefully in a portable way, but there are hooks that let you be as unportable as you need). Instance attributes: name : string the full name of the extension, including any packages -- ie. *not* a filename or pathname, but Python dotted name sources : [string] list of source filenames, relative to the distribution root (where the setup script lives), in Unix form (slash-separated) for portability. Source files may be C, C++, SWIG (.i), platform-specific resource files, or whatever else is recognized by the "build_ext" command as source for a Python extension. include_dirs : [string] list of directories to search for C/C++ header files (in Unix form for portability) define_macros : [(name : string, value : string|None)] list of macros to define; each macro is defined using a 2-tuple, where 'value' is either the string to define it to or None to define it without a particular value (equivalent of "#define FOO" in source or -DFOO on Unix C compiler command line) undef_macros : [string] list of macros to undefine explicitly library_dirs : [string] list of directories to search for C/C++ libraries at link time libraries : [string] list of library names (not filenames or paths) to link against runtime_library_dirs : [string] list of directories to search for C/C++ libraries at run time (for shared extensions, this is when the extension is loaded) extra_objects : [string] list of extra files to link with (eg. object files not implied by 'sources', static library that must be explicitly specified, binary resource files, etc.) extra_compile_args : [string] any extra platform- and compiler-specific information to use when compiling the source files in 'sources'. For platforms and compilers where "command line" makes sense, this is typically a list of command-line arguments, but for other platforms it could be anything. extra_link_args : [string] any extra platform- and compiler-specific information to use when linking object files together to create the extension (or to create a new static Python interpreter). Similar interpretation as for 'extra_compile_args'. export_symbols : [string] list of symbols to be exported from a shared extension. Not used on all platforms, and not generally necessary for Python extensions, which typically export exactly one symbol: "init" + extension_name. swig_opts : [string] any extra options to pass to SWIG if a source file has the .i extension. depends : [string] list of files that the extension depends on language : string extension language (i.e. "c", "c++", "objc"). Will be detected from the source extensions if not provided. """ # When adding arguments to this constructor, be sure to update # setup_keywords in core.py. def __init__ (self, name, sources, include_dirs=None, define_macros=None, undef_macros=None, library_dirs=None, libraries=None, runtime_library_dirs=None, extra_objects=None, extra_compile_args=None, extra_link_args=None, export_symbols=None, swig_opts = None, depends=None, language=None, **kw # To catch unknown keywords ): assert type(name) is StringType, "'name' must be a string" assert (type(sources) is ListType and map(type, sources) == [StringType]*len(sources)), \ "'sources' must be a list of strings" self.name = name self.sources = sources self.include_dirs = include_dirs or [] self.define_macros = define_macros or [] self.undef_macros = undef_macros or [] self.library_dirs = library_dirs or [] self.libraries = libraries or [] self.runtime_library_dirs = runtime_library_dirs or [] self.extra_objects = extra_objects or [] self.extra_compile_args = extra_compile_args or [] self.extra_link_args = extra_link_args or [] self.export_symbols = export_symbols or [] self.swig_opts = swig_opts or [] self.depends = depends or [] self.language = language # If there are unknown keyword options, warn about them if len(kw): L = kw.keys() ; L.sort() L = map(repr, L) msg = "Unknown Extension options: " + string.join(L, ', ') if warnings is not None: warnings.warn(msg) else: sys.stderr.write(msg + '\n') # class Extension def read_setup_file (filename): from distutils.sysconfig import \ parse_makefile, expand_makefile_vars, _variable_rx from distutils.text_file import TextFile from distutils.util import split_quoted # First pass over the file to gather "VAR = VALUE" assignments. vars = parse_makefile(filename) # Second pass to gobble up the real content: lines of the form # <module> ... [<sourcefile> ...] [<cpparg> ...] [<library> ...] file = TextFile(filename, strip_comments=1, skip_blanks=1, join_lines=1, lstrip_ws=1, rstrip_ws=1) try: extensions = [] while 1: line = file.readline() if line is None: # eof break if _variable_rx.match(line): # VAR=VALUE, handled in first pass continue if line[0] == line[-1] == "*": file.warn("'%s' lines not handled yet" % line) continue #print "original line: " + line line = expand_makefile_vars(line, vars) words = split_quoted(line) #print "expanded line: " + line # NB. this parses a slightly different syntax than the old # makesetup script: here, there must be exactly one extension per # line, and it must be the first word of the line. I have no idea # why the old syntax supported multiple extensions per line, as # they all wind up being the same. module = words[0] ext = Extension(module, []) append_next_word = None for word in words[1:]: if append_next_word is not None: append_next_word.append(word) append_next_word = None continue suffix = os.path.splitext(word)[1] switch = word[0:2] ; value = word[2:] if suffix in (".c", ".cc", ".cpp", ".cxx", ".c++", ".m", ".mm"): # hmm, should we do something about C vs. C++ sources? # or leave it up to the CCompiler implementation to # worry about? ext.sources.append(word) elif switch == "-I": ext.include_dirs.append(value) elif switch == "-D": equals = string.find(value, "=") if equals == -1: # bare "-DFOO" -- no value ext.define_macros.append((value, None)) else: # "-DFOO=blah" ext.define_macros.append((value[0:equals], value[equals+2:])) elif switch == "-U": ext.undef_macros.append(value) elif switch == "-C": # only here 'cause makesetup has it! ext.extra_compile_args.append(word) elif switch == "-l": ext.libraries.append(value) elif switch == "-L": ext.library_dirs.append(value) elif switch == "-R": ext.runtime_library_dirs.append(value) elif word == "-rpath": append_next_word = ext.runtime_library_dirs elif word == "-Xlinker": append_next_word = ext.extra_link_args elif word == "-Xcompiler": append_next_word = ext.extra_compile_args elif switch == "-u": ext.extra_link_args.append(word) if not value: append_next_word = ext.extra_link_args elif word == "-Xcompiler": append_next_word = ext.extra_compile_args elif switch == "-u": ext.extra_link_args.append(word) if not value: append_next_word = ext.extra_link_args elif suffix in (".a", ".so", ".sl", ".o", ".dylib"): # NB. a really faithful emulation of makesetup would # append a .o file to extra_objects only if it # had a slash in it; otherwise, it would s/.o/.c/ # and append it to sources. Hmmmm. ext.extra_objects.append(word) else: file.warn("unrecognized argument '%s'" % word) extensions.append(ext) finally: file.close() #print "module:", module #print "source files:", source_files #print "cpp args:", cpp_args #print "lib args:", library_args #extensions[module] = { 'sources': source_files, # 'cpp_args': cpp_args, # 'lib_args': library_args } return extensions # read_setup_file ()
CoDEmanX/ArangoDB
refs/heads/devel
3rdParty/V8-4.3.61/third_party/python_26/Lib/site-packages/win32/lib/win32con.py
21
# Generated by h2py from commdlg.h (plus modifications 4jan98) WINVER = 1280 WM_USER = 1024 PY_0U = 0 OFN_READONLY = 1 OFN_OVERWRITEPROMPT = 2 OFN_HIDEREADONLY = 4 OFN_NOCHANGEDIR = 8 OFN_SHOWHELP = 16 OFN_ENABLEHOOK = 32 OFN_ENABLETEMPLATE = 64 OFN_ENABLETEMPLATEHANDLE = 128 OFN_NOVALIDATE = 256 OFN_ALLOWMULTISELECT = 512 OFN_EXTENSIONDIFFERENT = 1024 OFN_PATHMUSTEXIST = 2048 OFN_FILEMUSTEXIST = 4096 OFN_CREATEPROMPT = 8192 OFN_SHAREAWARE = 16384 OFN_NOREADONLYRETURN = 32768 OFN_NOTESTFILECREATE = 65536 OFN_NONETWORKBUTTON = 131072 OFN_NOLONGNAMES = 262144 OFN_EXPLORER = 524288 # new look commdlg OFN_NODEREFERENCELINKS = 1048576 OFN_LONGNAMES = 2097152 # force long names for 3.x modules OFN_ENABLEINCLUDENOTIFY = 4194304 # send include message to callback OFN_ENABLESIZING = 8388608 OFN_DONTADDTORECENT = 33554432 OFN_FORCESHOWHIDDEN = 268435456 # Show All files including System and hidden files OFN_EX_NOPLACESBAR = 1 OFN_SHAREFALLTHROUGH = 2 OFN_SHARENOWARN = 1 OFN_SHAREWARN = 0 CDN_FIRST = (PY_0U-601) CDN_LAST = (PY_0U-699) CDN_INITDONE = (CDN_FIRST - 0) CDN_SELCHANGE = (CDN_FIRST - 1) CDN_FOLDERCHANGE = (CDN_FIRST - 2) CDN_SHAREVIOLATION = (CDN_FIRST - 3) CDN_HELP = (CDN_FIRST - 4) CDN_FILEOK = (CDN_FIRST - 5) CDN_TYPECHANGE = (CDN_FIRST - 6) CDN_INCLUDEITEM = (CDN_FIRST - 7) CDM_FIRST = (WM_USER + 100) CDM_LAST = (WM_USER + 200) CDM_GETSPEC = (CDM_FIRST + 0) CDM_GETFILEPATH = (CDM_FIRST + 1) CDM_GETFOLDERPATH = (CDM_FIRST + 2) CDM_GETFOLDERIDLIST = (CDM_FIRST + 3) CDM_SETCONTROLTEXT = (CDM_FIRST + 4) CDM_HIDECONTROL = (CDM_FIRST + 5) CDM_SETDEFEXT = (CDM_FIRST + 6) CC_RGBINIT = 1 CC_FULLOPEN = 2 CC_PREVENTFULLOPEN = 4 CC_SHOWHELP = 8 CC_ENABLEHOOK = 16 CC_ENABLETEMPLATE = 32 CC_ENABLETEMPLATEHANDLE = 64 CC_SOLIDCOLOR = 128 CC_ANYCOLOR = 256 FR_DOWN = 1 FR_WHOLEWORD = 2 FR_MATCHCASE = 4 FR_FINDNEXT = 8 FR_REPLACE = 16 FR_REPLACEALL = 32 FR_DIALOGTERM = 64 FR_SHOWHELP = 128 FR_ENABLEHOOK = 256 FR_ENABLETEMPLATE = 512 FR_NOUPDOWN = 1024 FR_NOMATCHCASE = 2048 FR_NOWHOLEWORD = 4096 FR_ENABLETEMPLATEHANDLE = 8192 FR_HIDEUPDOWN = 16384 FR_HIDEMATCHCASE = 32768 FR_HIDEWHOLEWORD = 65536 CF_SCREENFONTS = 1 CF_PRINTERFONTS = 2 CF_BOTH = (CF_SCREENFONTS | CF_PRINTERFONTS) CF_SHOWHELP = 4 CF_ENABLEHOOK = 8 CF_ENABLETEMPLATE = 16 CF_ENABLETEMPLATEHANDLE = 32 CF_INITTOLOGFONTSTRUCT = 64 CF_USESTYLE = 128 CF_EFFECTS = 256 CF_APPLY = 512 CF_ANSIONLY = 1024 CF_SCRIPTSONLY = CF_ANSIONLY CF_NOVECTORFONTS = 2048 CF_NOOEMFONTS = CF_NOVECTORFONTS CF_NOSIMULATIONS = 4096 CF_LIMITSIZE = 8192 CF_FIXEDPITCHONLY = 16384 CF_WYSIWYG = 32768 # must also have CF_SCREENFONTS & CF_PRINTERFONTS CF_FORCEFONTEXIST = 65536 CF_SCALABLEONLY = 131072 CF_TTONLY = 262144 CF_NOFACESEL = 524288 CF_NOSTYLESEL = 1048576 CF_NOSIZESEL = 2097152 CF_SELECTSCRIPT = 4194304 CF_NOSCRIPTSEL = 8388608 CF_NOVERTFONTS = 16777216 SIMULATED_FONTTYPE = 32768 PRINTER_FONTTYPE = 16384 SCREEN_FONTTYPE = 8192 BOLD_FONTTYPE = 256 ITALIC_FONTTYPE = 512 REGULAR_FONTTYPE = 1024 OPENTYPE_FONTTYPE = 65536 TYPE1_FONTTYPE = 131072 DSIG_FONTTYPE = 262144 WM_CHOOSEFONT_GETLOGFONT = (WM_USER + 1) WM_CHOOSEFONT_SETLOGFONT = (WM_USER + 101) WM_CHOOSEFONT_SETFLAGS = (WM_USER + 102) LBSELCHSTRINGA = "commdlg_LBSelChangedNotify" SHAREVISTRINGA = "commdlg_ShareViolation" FILEOKSTRINGA = "commdlg_FileNameOK" COLOROKSTRINGA = "commdlg_ColorOK" SETRGBSTRINGA = "commdlg_SetRGBColor" HELPMSGSTRINGA = "commdlg_help" FINDMSGSTRINGA = "commdlg_FindReplace" LBSELCHSTRING = LBSELCHSTRINGA SHAREVISTRING = SHAREVISTRINGA FILEOKSTRING = FILEOKSTRINGA COLOROKSTRING = COLOROKSTRINGA SETRGBSTRING = SETRGBSTRINGA HELPMSGSTRING = HELPMSGSTRINGA FINDMSGSTRING = FINDMSGSTRINGA CD_LBSELNOITEMS = -1 CD_LBSELCHANGE = 0 CD_LBSELSUB = 1 CD_LBSELADD = 2 PD_ALLPAGES = 0 PD_SELECTION = 1 PD_PAGENUMS = 2 PD_NOSELECTION = 4 PD_NOPAGENUMS = 8 PD_COLLATE = 16 PD_PRINTTOFILE = 32 PD_PRINTSETUP = 64 PD_NOWARNING = 128 PD_RETURNDC = 256 PD_RETURNIC = 512 PD_RETURNDEFAULT = 1024 PD_SHOWHELP = 2048 PD_ENABLEPRINTHOOK = 4096 PD_ENABLESETUPHOOK = 8192 PD_ENABLEPRINTTEMPLATE = 16384 PD_ENABLESETUPTEMPLATE = 32768 PD_ENABLEPRINTTEMPLATEHANDLE = 65536 PD_ENABLESETUPTEMPLATEHANDLE = 131072 PD_USEDEVMODECOPIES = 262144 PD_DISABLEPRINTTOFILE = 524288 PD_HIDEPRINTTOFILE = 1048576 PD_NONETWORKBUTTON = 2097152 DN_DEFAULTPRN = 1 WM_PSD_PAGESETUPDLG = (WM_USER ) WM_PSD_FULLPAGERECT = (WM_USER+1) WM_PSD_MINMARGINRECT = (WM_USER+2) WM_PSD_MARGINRECT = (WM_USER+3) WM_PSD_GREEKTEXTRECT = (WM_USER+4) WM_PSD_ENVSTAMPRECT = (WM_USER+5) WM_PSD_YAFULLPAGERECT = (WM_USER+6) PSD_DEFAULTMINMARGINS = 0 # default (printer's) PSD_INWININIINTLMEASURE = 0 # 1st of 4 possible PSD_MINMARGINS = 1 # use caller's PSD_MARGINS = 2 # use caller's PSD_INTHOUSANDTHSOFINCHES = 4 # 2nd of 4 possible PSD_INHUNDREDTHSOFMILLIMETERS = 8 # 3rd of 4 possible PSD_DISABLEMARGINS = 16 PSD_DISABLEPRINTER = 32 PSD_NOWARNING = 128 # must be same as PD_* PSD_DISABLEORIENTATION = 256 PSD_RETURNDEFAULT = 1024 # must be same as PD_* PSD_DISABLEPAPER = 512 PSD_SHOWHELP = 2048 # must be same as PD_* PSD_ENABLEPAGESETUPHOOK = 8192 # must be same as PD_* PSD_ENABLEPAGESETUPTEMPLATE = 32768 # must be same as PD_* PSD_ENABLEPAGESETUPTEMPLATEHANDLE = 131072 # must be same as PD_* PSD_ENABLEPAGEPAINTHOOK = 262144 PSD_DISABLEPAGEPAINTING = 524288 PSD_NONETWORKBUTTON = 2097152 # must be same as PD_* # Generated by h2py from winreg.h HKEY_CLASSES_ROOT = -2147483648 HKEY_CURRENT_USER = -2147483647 HKEY_LOCAL_MACHINE = -2147483646 HKEY_USERS = -2147483645 HKEY_PERFORMANCE_DATA = -2147483644 HKEY_CURRENT_CONFIG = -2147483643 HKEY_DYN_DATA = -2147483642 HKEY_PERFORMANCE_TEXT = -2147483568 # ?? 4Jan98 HKEY_PERFORMANCE_NLSTEXT = -2147483552 # ?? 4Jan98 # Generated by h2py from winuser.h HWND_BROADCAST = 65535 HWND_DESKTOP = 0 HWND_TOP = 0 HWND_BOTTOM = 1 HWND_TOPMOST = -1 HWND_NOTOPMOST = -2 HWND_MESSAGE = -3 # winuser.h line 4601 SM_CXSCREEN = 0 SM_CYSCREEN = 1 SM_CXVSCROLL = 2 SM_CYHSCROLL = 3 SM_CYCAPTION = 4 SM_CXBORDER = 5 SM_CYBORDER = 6 SM_CXDLGFRAME = 7 SM_CYDLGFRAME = 8 SM_CYVTHUMB = 9 SM_CXHTHUMB = 10 SM_CXICON = 11 SM_CYICON = 12 SM_CXCURSOR = 13 SM_CYCURSOR = 14 SM_CYMENU = 15 SM_CXFULLSCREEN = 16 SM_CYFULLSCREEN = 17 SM_CYKANJIWINDOW = 18 SM_MOUSEPRESENT = 19 SM_CYVSCROLL = 20 SM_CXHSCROLL = 21 SM_DEBUG = 22 SM_SWAPBUTTON = 23 SM_RESERVED1 = 24 SM_RESERVED2 = 25 SM_RESERVED3 = 26 SM_RESERVED4 = 27 SM_CXMIN = 28 SM_CYMIN = 29 SM_CXSIZE = 30 SM_CYSIZE = 31 SM_CXFRAME = 32 SM_CYFRAME = 33 SM_CXMINTRACK = 34 SM_CYMINTRACK = 35 SM_CXDOUBLECLK = 36 SM_CYDOUBLECLK = 37 SM_CXICONSPACING = 38 SM_CYICONSPACING = 39 SM_MENUDROPALIGNMENT = 40 SM_PENWINDOWS = 41 SM_DBCSENABLED = 42 SM_CMOUSEBUTTONS = 43 SM_CXFIXEDFRAME = SM_CXDLGFRAME SM_CYFIXEDFRAME = SM_CYDLGFRAME SM_CXSIZEFRAME = SM_CXFRAME SM_CYSIZEFRAME = SM_CYFRAME SM_SECURE = 44 SM_CXEDGE = 45 SM_CYEDGE = 46 SM_CXMINSPACING = 47 SM_CYMINSPACING = 48 SM_CXSMICON = 49 SM_CYSMICON = 50 SM_CYSMCAPTION = 51 SM_CXSMSIZE = 52 SM_CYSMSIZE = 53 SM_CXMENUSIZE = 54 SM_CYMENUSIZE = 55 SM_ARRANGE = 56 SM_CXMINIMIZED = 57 SM_CYMINIMIZED = 58 SM_CXMAXTRACK = 59 SM_CYMAXTRACK = 60 SM_CXMAXIMIZED = 61 SM_CYMAXIMIZED = 62 SM_NETWORK = 63 SM_CLEANBOOT = 67 SM_CXDRAG = 68 SM_CYDRAG = 69 SM_SHOWSOUNDS = 70 SM_CXMENUCHECK = 71 SM_CYMENUCHECK = 72 SM_SLOWMACHINE = 73 SM_MIDEASTENABLED = 74 SM_MOUSEWHEELPRESENT = 75 SM_XVIRTUALSCREEN = 76 SM_YVIRTUALSCREEN = 77 SM_CXVIRTUALSCREEN = 78 SM_CYVIRTUALSCREEN = 79 SM_CMONITORS = 80 SM_SAMEDISPLAYFORMAT = 81 SM_CMETRICS = 83 MNC_IGNORE = 0 MNC_CLOSE = 1 MNC_EXECUTE = 2 MNC_SELECT = 3 MNS_NOCHECK = -2147483648 MNS_MODELESS = 1073741824 MNS_DRAGDROP = 536870912 MNS_AUTODISMISS = 268435456 MNS_NOTIFYBYPOS = 134217728 MNS_CHECKORBMP = 67108864 MIM_MAXHEIGHT = 1 MIM_BACKGROUND = 2 MIM_HELPID = 4 MIM_MENUDATA = 8 MIM_STYLE = 16 MIM_APPLYTOSUBMENUS = -2147483648 MND_CONTINUE = 0 MND_ENDMENU = 1 MNGOF_GAP = 3 MNGO_NOINTERFACE = 0 MNGO_NOERROR = 1 MIIM_STATE = 1 MIIM_ID = 2 MIIM_SUBMENU = 4 MIIM_CHECKMARKS = 8 MIIM_TYPE = 16 MIIM_DATA = 32 MIIM_STRING = 64 MIIM_BITMAP = 128 MIIM_FTYPE = 256 HBMMENU_CALLBACK = -1 HBMMENU_SYSTEM = 1 HBMMENU_MBAR_RESTORE = 2 HBMMENU_MBAR_MINIMIZE = 3 HBMMENU_MBAR_CLOSE = 5 HBMMENU_MBAR_CLOSE_D = 6 HBMMENU_MBAR_MINIMIZE_D = 7 HBMMENU_POPUP_CLOSE = 8 HBMMENU_POPUP_RESTORE = 9 HBMMENU_POPUP_MAXIMIZE = 10 HBMMENU_POPUP_MINIMIZE = 11 GMDI_USEDISABLED = 1 GMDI_GOINTOPOPUPS = 2 TPM_LEFTBUTTON = 0 TPM_RIGHTBUTTON = 2 TPM_LEFTALIGN = 0 TPM_CENTERALIGN = 4 TPM_RIGHTALIGN = 8 TPM_TOPALIGN = 0 TPM_VCENTERALIGN = 16 TPM_BOTTOMALIGN = 32 TPM_HORIZONTAL = 0 TPM_VERTICAL = 64 TPM_NONOTIFY = 128 TPM_RETURNCMD = 256 TPM_RECURSE = 1 DOF_EXECUTABLE = 32769 DOF_DOCUMENT = 32770 DOF_DIRECTORY = 32771 DOF_MULTIPLE = 32772 DOF_PROGMAN = 1 DOF_SHELLDATA = 2 DO_DROPFILE = 1162627398 DO_PRINTFILE = 1414419024 DT_TOP = 0 DT_LEFT = 0 DT_CENTER = 1 DT_RIGHT = 2 DT_VCENTER = 4 DT_BOTTOM = 8 DT_WORDBREAK = 16 DT_SINGLELINE = 32 DT_EXPANDTABS = 64 DT_TABSTOP = 128 DT_NOCLIP = 256 DT_EXTERNALLEADING = 512 DT_CALCRECT = 1024 DT_NOPREFIX = 2048 DT_INTERNAL = 4096 DT_EDITCONTROL = 8192 DT_PATH_ELLIPSIS = 16384 DT_END_ELLIPSIS = 32768 DT_MODIFYSTRING = 65536 DT_RTLREADING = 131072 DT_WORD_ELLIPSIS = 262144 DST_COMPLEX = 0 DST_TEXT = 1 DST_PREFIXTEXT = 2 DST_ICON = 3 DST_BITMAP = 4 DSS_NORMAL = 0 DSS_UNION = 16 DSS_DISABLED = 32 DSS_MONO = 128 DSS_RIGHT = 32768 DCX_WINDOW = 1 DCX_CACHE = 2 DCX_NORESETATTRS = 4 DCX_CLIPCHILDREN = 8 DCX_CLIPSIBLINGS = 16 DCX_PARENTCLIP = 32 DCX_EXCLUDERGN = 64 DCX_INTERSECTRGN = 128 DCX_EXCLUDEUPDATE = 256 DCX_INTERSECTUPDATE = 512 DCX_LOCKWINDOWUPDATE = 1024 DCX_VALIDATE = 2097152 CUDR_NORMAL = 0 CUDR_NOSNAPTOGRID = 1 CUDR_NORESOLVEPOSITIONS = 2 CUDR_NOCLOSEGAPS = 4 CUDR_NEGATIVECOORDS = 8 CUDR_NOPRIMARY = 16 RDW_INVALIDATE = 1 RDW_INTERNALPAINT = 2 RDW_ERASE = 4 RDW_VALIDATE = 8 RDW_NOINTERNALPAINT = 16 RDW_NOERASE = 32 RDW_NOCHILDREN = 64 RDW_ALLCHILDREN = 128 RDW_UPDATENOW = 256 RDW_ERASENOW = 512 RDW_FRAME = 1024 RDW_NOFRAME = 2048 SW_SCROLLCHILDREN = 1 SW_INVALIDATE = 2 SW_ERASE = 4 SW_SMOOTHSCROLL = 16 # Use smooth scrolling ESB_ENABLE_BOTH = 0 ESB_DISABLE_BOTH = 3 ESB_DISABLE_LEFT = 1 ESB_DISABLE_RIGHT = 2 ESB_DISABLE_UP = 1 ESB_DISABLE_DOWN = 2 ESB_DISABLE_LTUP = ESB_DISABLE_LEFT ESB_DISABLE_RTDN = ESB_DISABLE_RIGHT HELPINFO_WINDOW = 1 HELPINFO_MENUITEM = 2 MB_OK = 0 MB_OKCANCEL = 1 MB_ABORTRETRYIGNORE = 2 MB_YESNOCANCEL = 3 MB_YESNO = 4 MB_RETRYCANCEL = 5 MB_ICONHAND = 16 MB_ICONQUESTION = 32 MB_ICONEXCLAMATION = 48 MB_ICONASTERISK = 64 MB_ICONWARNING = MB_ICONEXCLAMATION MB_ICONERROR = MB_ICONHAND MB_ICONINFORMATION = MB_ICONASTERISK MB_ICONSTOP = MB_ICONHAND MB_DEFBUTTON1 = 0 MB_DEFBUTTON2 = 256 MB_DEFBUTTON3 = 512 MB_DEFBUTTON4 = 768 MB_APPLMODAL = 0 MB_SYSTEMMODAL = 4096 MB_TASKMODAL = 8192 MB_HELP = 16384 MB_NOFOCUS = 32768 MB_SETFOREGROUND = 65536 MB_DEFAULT_DESKTOP_ONLY = 131072 MB_TOPMOST = 262144L MB_RIGHT = 524288 MB_RTLREADING = 1048576 MB_SERVICE_NOTIFICATION = 2097152 MB_TYPEMASK = 15 MB_USERICON = 128 MB_ICONMASK = 240 MB_DEFMASK = 3840 MB_MODEMASK = 12288 MB_MISCMASK = 49152 # winuser.h line 6373 CWP_ALL = 0 CWP_SKIPINVISIBLE = 1 CWP_SKIPDISABLED = 2 CWP_SKIPTRANSPARENT = 4 CTLCOLOR_MSGBOX = 0 CTLCOLOR_EDIT = 1 CTLCOLOR_LISTBOX = 2 CTLCOLOR_BTN = 3 CTLCOLOR_DLG = 4 CTLCOLOR_SCROLLBAR = 5 CTLCOLOR_STATIC = 6 CTLCOLOR_MAX = 7 COLOR_SCROLLBAR = 0 COLOR_BACKGROUND = 1 COLOR_ACTIVECAPTION = 2 COLOR_INACTIVECAPTION = 3 COLOR_MENU = 4 COLOR_WINDOW = 5 COLOR_WINDOWFRAME = 6 COLOR_MENUTEXT = 7 COLOR_WINDOWTEXT = 8 COLOR_CAPTIONTEXT = 9 COLOR_ACTIVEBORDER = 10 COLOR_INACTIVEBORDER = 11 COLOR_APPWORKSPACE = 12 COLOR_HIGHLIGHT = 13 COLOR_HIGHLIGHTTEXT = 14 COLOR_BTNFACE = 15 COLOR_BTNSHADOW = 16 COLOR_GRAYTEXT = 17 COLOR_BTNTEXT = 18 COLOR_INACTIVECAPTIONTEXT = 19 COLOR_BTNHIGHLIGHT = 20 COLOR_3DDKSHADOW = 21 COLOR_3DLIGHT = 22 COLOR_INFOTEXT = 23 COLOR_INFOBK = 24 COLOR_HOTLIGHT = 26 COLOR_GRADIENTACTIVECAPTION = 27 COLOR_GRADIENTINACTIVECAPTION = 28 COLOR_DESKTOP = COLOR_BACKGROUND COLOR_3DFACE = COLOR_BTNFACE COLOR_3DSHADOW = COLOR_BTNSHADOW COLOR_3DHIGHLIGHT = COLOR_BTNHIGHLIGHT COLOR_3DHILIGHT = COLOR_BTNHIGHLIGHT COLOR_BTNHILIGHT = COLOR_BTNHIGHLIGHT GW_HWNDFIRST = 0 GW_HWNDLAST = 1 GW_HWNDNEXT = 2 GW_HWNDPREV = 3 GW_OWNER = 4 GW_CHILD = 5 GW_ENABLEDPOPUP = 6 GW_MAX = 6 MF_INSERT = 0 MF_CHANGE = 128 MF_APPEND = 256 MF_DELETE = 512 MF_REMOVE = 4096 MF_BYCOMMAND = 0 MF_BYPOSITION = 1024 MF_SEPARATOR = 2048 MF_ENABLED = 0 MF_GRAYED = 1 MF_DISABLED = 2 MF_UNCHECKED = 0 MF_CHECKED = 8 MF_USECHECKBITMAPS = 512 MF_STRING = 0 MF_BITMAP = 4 MF_OWNERDRAW = 256 MF_POPUP = 16 MF_MENUBARBREAK = 32 MF_MENUBREAK = 64 MF_UNHILITE = 0 MF_HILITE = 128 MF_DEFAULT = 4096 MF_SYSMENU = 8192 MF_HELP = 16384 MF_RIGHTJUSTIFY = 16384 MF_MOUSESELECT = 32768 MF_END = 128 MFT_STRING = MF_STRING MFT_BITMAP = MF_BITMAP MFT_MENUBARBREAK = MF_MENUBARBREAK MFT_MENUBREAK = MF_MENUBREAK MFT_OWNERDRAW = MF_OWNERDRAW MFT_RADIOCHECK = 512 MFT_SEPARATOR = MF_SEPARATOR MFT_RIGHTORDER = 8192 MFT_RIGHTJUSTIFY = MF_RIGHTJUSTIFY MFS_GRAYED = 3 MFS_DISABLED = MFS_GRAYED MFS_CHECKED = MF_CHECKED MFS_HILITE = MF_HILITE MFS_ENABLED = MF_ENABLED MFS_UNCHECKED = MF_UNCHECKED MFS_UNHILITE = MF_UNHILITE MFS_DEFAULT = MF_DEFAULT MFS_MASK = 4235L MFS_HOTTRACKDRAWN = 268435456 MFS_CACHEDBMP = 536870912 MFS_BOTTOMGAPDROP = 1073741824 MFS_TOPGAPDROP = -2147483648 MFS_GAPDROP = -1073741824 SC_SIZE = 61440 SC_MOVE = 61456 SC_MINIMIZE = 61472 SC_MAXIMIZE = 61488 SC_NEXTWINDOW = 61504 SC_PREVWINDOW = 61520 SC_CLOSE = 61536 SC_VSCROLL = 61552 SC_HSCROLL = 61568 SC_MOUSEMENU = 61584 SC_KEYMENU = 61696 SC_ARRANGE = 61712 SC_RESTORE = 61728 SC_TASKLIST = 61744 SC_SCREENSAVE = 61760 SC_HOTKEY = 61776 SC_DEFAULT = 61792 SC_MONITORPOWER = 61808 SC_CONTEXTHELP = 61824 SC_SEPARATOR = 61455 SC_ICON = SC_MINIMIZE SC_ZOOM = SC_MAXIMIZE IDC_ARROW = 32512 IDC_IBEAM = 32513 IDC_WAIT = 32514 IDC_CROSS = 32515 IDC_UPARROW = 32516 IDC_SIZE = 32640 # OBSOLETE: use IDC_SIZEALL IDC_ICON = 32641 # OBSOLETE: use IDC_ARROW IDC_SIZENWSE = 32642 IDC_SIZENESW = 32643 IDC_SIZEWE = 32644 IDC_SIZENS = 32645 IDC_SIZEALL = 32646 IDC_NO = 32648 IDC_HAND = 32649 IDC_APPSTARTING = 32650 IDC_HELP = 32651 IMAGE_BITMAP = 0 IMAGE_ICON = 1 IMAGE_CURSOR = 2 IMAGE_ENHMETAFILE = 3 LR_DEFAULTCOLOR = 0 LR_MONOCHROME = 1 LR_COLOR = 2 LR_COPYRETURNORG = 4 LR_COPYDELETEORG = 8 LR_LOADFROMFILE = 16 LR_LOADTRANSPARENT = 32 LR_DEFAULTSIZE = 64 LR_LOADREALSIZE = 128 LR_LOADMAP3DCOLORS = 4096 LR_CREATEDIBSECTION = 8192 LR_COPYFROMRESOURCE = 16384 LR_SHARED = 32768 DI_MASK = 1 DI_IMAGE = 2 DI_NORMAL = 3 DI_COMPAT = 4 DI_DEFAULTSIZE = 8 RES_ICON = 1 RES_CURSOR = 2 OBM_CLOSE = 32754 OBM_UPARROW = 32753 OBM_DNARROW = 32752 OBM_RGARROW = 32751 OBM_LFARROW = 32750 OBM_REDUCE = 32749 OBM_ZOOM = 32748 OBM_RESTORE = 32747 OBM_REDUCED = 32746 OBM_ZOOMD = 32745 OBM_RESTORED = 32744 OBM_UPARROWD = 32743 OBM_DNARROWD = 32742 OBM_RGARROWD = 32741 OBM_LFARROWD = 32740 OBM_MNARROW = 32739 OBM_COMBO = 32738 OBM_UPARROWI = 32737 OBM_DNARROWI = 32736 OBM_RGARROWI = 32735 OBM_LFARROWI = 32734 OBM_OLD_CLOSE = 32767 OBM_SIZE = 32766 OBM_OLD_UPARROW = 32765 OBM_OLD_DNARROW = 32764 OBM_OLD_RGARROW = 32763 OBM_OLD_LFARROW = 32762 OBM_BTSIZE = 32761 OBM_CHECK = 32760 OBM_CHECKBOXES = 32759 OBM_BTNCORNERS = 32758 OBM_OLD_REDUCE = 32757 OBM_OLD_ZOOM = 32756 OBM_OLD_RESTORE = 32755 OCR_NORMAL = 32512 OCR_IBEAM = 32513 OCR_WAIT = 32514 OCR_CROSS = 32515 OCR_UP = 32516 OCR_SIZE = 32640 OCR_ICON = 32641 OCR_SIZENWSE = 32642 OCR_SIZENESW = 32643 OCR_SIZEWE = 32644 OCR_SIZENS = 32645 OCR_SIZEALL = 32646 OCR_ICOCUR = 32647 OCR_NO = 32648 OCR_HAND = 32649 OCR_APPSTARTING = 32650 # winuser.h line 7455 OIC_SAMPLE = 32512 OIC_HAND = 32513 OIC_QUES = 32514 OIC_BANG = 32515 OIC_NOTE = 32516 OIC_WINLOGO = 32517 OIC_WARNING = OIC_BANG OIC_ERROR = OIC_HAND OIC_INFORMATION = OIC_NOTE ORD_LANGDRIVER = 1 IDI_APPLICATION = 32512 IDI_HAND = 32513 IDI_QUESTION = 32514 IDI_EXCLAMATION = 32515 IDI_ASTERISK = 32516 IDI_WINLOGO = 32517 IDI_WARNING = IDI_EXCLAMATION IDI_ERROR = IDI_HAND IDI_INFORMATION = IDI_ASTERISK IDOK = 1 IDCANCEL = 2 IDABORT = 3 IDRETRY = 4 IDIGNORE = 5 IDYES = 6 IDNO = 7 IDCLOSE = 8 IDHELP = 9 ES_LEFT = 0 ES_CENTER = 1 ES_RIGHT = 2 ES_MULTILINE = 4 ES_UPPERCASE = 8 ES_LOWERCASE = 16 ES_PASSWORD = 32 ES_AUTOVSCROLL = 64 ES_AUTOHSCROLL = 128 ES_NOHIDESEL = 256 ES_OEMCONVERT = 1024 ES_READONLY = 2048 ES_WANTRETURN = 4096 ES_NUMBER = 8192 EN_SETFOCUS = 256 EN_KILLFOCUS = 512 EN_CHANGE = 768 EN_UPDATE = 1024 EN_ERRSPACE = 1280 EN_MAXTEXT = 1281 EN_HSCROLL = 1537 EN_VSCROLL = 1538 EC_LEFTMARGIN = 1 EC_RIGHTMARGIN = 2 EC_USEFONTINFO = 65535 EMSIS_COMPOSITIONSTRING = 1 EIMES_GETCOMPSTRATONCE = 1 EIMES_CANCELCOMPSTRINFOCUS = 2 EIMES_COMPLETECOMPSTRKILLFOCUS = 4 EM_GETSEL = 176 EM_SETSEL = 177 EM_GETRECT = 178 EM_SETRECT = 179 EM_SETRECTNP = 180 EM_SCROLL = 181 EM_LINESCROLL = 182 EM_SCROLLCARET = 183 EM_GETMODIFY = 184 EM_SETMODIFY = 185 EM_GETLINECOUNT = 186 EM_LINEINDEX = 187 EM_SETHANDLE = 188 EM_GETHANDLE = 189 EM_GETTHUMB = 190 EM_LINELENGTH = 193 EM_REPLACESEL = 194 EM_GETLINE = 196 EM_LIMITTEXT = 197 EM_CANUNDO = 198 EM_UNDO = 199 EM_FMTLINES = 200 EM_LINEFROMCHAR = 201 EM_SETTABSTOPS = 203 EM_SETPASSWORDCHAR = 204 EM_EMPTYUNDOBUFFER = 205 EM_GETFIRSTVISIBLELINE = 206 EM_SETREADONLY = 207 EM_SETWORDBREAKPROC = 208 EM_GETWORDBREAKPROC = 209 EM_GETPASSWORDCHAR = 210 EM_SETMARGINS = 211 EM_GETMARGINS = 212 EM_SETLIMITTEXT = EM_LIMITTEXT EM_GETLIMITTEXT = 213 EM_POSFROMCHAR = 214 EM_CHARFROMPOS = 215 EM_SETIMESTATUS = 216 EM_GETIMESTATUS = 217 WB_LEFT = 0 WB_RIGHT = 1 WB_ISDELIMITER = 2 BS_PUSHBUTTON = 0 BS_DEFPUSHBUTTON = 1 BS_CHECKBOX = 2 BS_AUTOCHECKBOX = 3 BS_RADIOBUTTON = 4 BS_3STATE = 5 BS_AUTO3STATE = 6 BS_GROUPBOX = 7 BS_USERBUTTON = 8 BS_AUTORADIOBUTTON = 9 BS_OWNERDRAW = 11L BS_LEFTTEXT = 32 BS_TEXT = 0 BS_ICON = 64 BS_BITMAP = 128 BS_LEFT = 256 BS_RIGHT = 512 BS_CENTER = 768 BS_TOP = 1024 BS_BOTTOM = 2048 BS_VCENTER = 3072 BS_PUSHLIKE = 4096 BS_MULTILINE = 8192 BS_NOTIFY = 16384 BS_FLAT = 32768 BS_RIGHTBUTTON = BS_LEFTTEXT BN_CLICKED = 0 BN_PAINT = 1 BN_HILITE = 2 BN_UNHILITE = 3 BN_DISABLE = 4 BN_DOUBLECLICKED = 5 BN_PUSHED = BN_HILITE BN_UNPUSHED = BN_UNHILITE BN_DBLCLK = BN_DOUBLECLICKED BN_SETFOCUS = 6 BN_KILLFOCUS = 7 BM_GETCHECK = 240 BM_SETCHECK = 241 BM_GETSTATE = 242 BM_SETSTATE = 243 BM_SETSTYLE = 244 BM_CLICK = 245 BM_GETIMAGE = 246 BM_SETIMAGE = 247 BST_UNCHECKED = 0 BST_CHECKED = 1 BST_INDETERMINATE = 2 BST_PUSHED = 4 BST_FOCUS = 8 SS_LEFT = 0 SS_CENTER = 1 SS_RIGHT = 2 SS_ICON = 3 SS_BLACKRECT = 4 SS_GRAYRECT = 5 SS_WHITERECT = 6 SS_BLACKFRAME = 7 SS_GRAYFRAME = 8 SS_WHITEFRAME = 9 SS_USERITEM = 10 SS_SIMPLE = 11 SS_LEFTNOWORDWRAP = 12 SS_BITMAP = 14 SS_OWNERDRAW = 13 SS_ENHMETAFILE = 15 SS_ETCHEDHORZ = 16 SS_ETCHEDVERT = 17 SS_ETCHEDFRAME = 18 SS_TYPEMASK = 31 SS_NOPREFIX = 128 SS_NOTIFY = 256 SS_CENTERIMAGE = 512 SS_RIGHTJUST = 1024 SS_REALSIZEIMAGE = 2048 SS_SUNKEN = 4096 SS_ENDELLIPSIS = 16384 SS_PATHELLIPSIS = 32768 SS_WORDELLIPSIS = 49152 SS_ELLIPSISMASK = 49152 STM_SETICON = 368 STM_GETICON = 369 STM_SETIMAGE = 370 STM_GETIMAGE = 371 STN_CLICKED = 0 STN_DBLCLK = 1 STN_ENABLE = 2 STN_DISABLE = 3 STM_MSGMAX = 372 DWL_MSGRESULT = 0 DWL_DLGPROC = 4 DWL_USER = 8 DDL_READWRITE = 0 DDL_READONLY = 1 DDL_HIDDEN = 2 DDL_SYSTEM = 4 DDL_DIRECTORY = 16 DDL_ARCHIVE = 32 DDL_POSTMSGS = 8192 DDL_DRIVES = 16384 DDL_EXCLUSIVE = 32768 #from winuser.h line 153 RT_CURSOR = 1 RT_BITMAP = 2 RT_ICON = 3 RT_MENU = 4 RT_DIALOG = 5 RT_STRING = 6 RT_FONTDIR = 7 RT_FONT = 8 RT_ACCELERATOR = 9 RT_RCDATA = 10 RT_MESSAGETABLE = 11 DIFFERENCE = 11 RT_GROUP_CURSOR = (RT_CURSOR + DIFFERENCE) RT_GROUP_ICON = (RT_ICON + DIFFERENCE) RT_VERSION = 16 RT_DLGINCLUDE = 17 RT_PLUGPLAY = 19 RT_VXD = 20 RT_ANICURSOR = 21 RT_ANIICON = 22 RT_HTML = 23 # from winuser.h line 218 SB_HORZ = 0 SB_VERT = 1 SB_CTL = 2 SB_BOTH = 3 SB_LINEUP = 0 SB_LINELEFT = 0 SB_LINEDOWN = 1 SB_LINERIGHT = 1 SB_PAGEUP = 2 SB_PAGELEFT = 2 SB_PAGEDOWN = 3 SB_PAGERIGHT = 3 SB_THUMBPOSITION = 4 SB_THUMBTRACK = 5 SB_TOP = 6 SB_LEFT = 6 SB_BOTTOM = 7 SB_RIGHT = 7 SB_ENDSCROLL = 8 SW_HIDE = 0 SW_SHOWNORMAL = 1 SW_NORMAL = 1 SW_SHOWMINIMIZED = 2 SW_SHOWMAXIMIZED = 3 SW_MAXIMIZE = 3 SW_SHOWNOACTIVATE = 4 SW_SHOW = 5 SW_MINIMIZE = 6 SW_SHOWMINNOACTIVE = 7 SW_SHOWNA = 8 SW_RESTORE = 9 SW_SHOWDEFAULT = 10 SW_FORCEMINIMIZE = 11 SW_MAX = 11 HIDE_WINDOW = 0 SHOW_OPENWINDOW = 1 SHOW_ICONWINDOW = 2 SHOW_FULLSCREEN = 3 SHOW_OPENNOACTIVATE = 4 SW_PARENTCLOSING = 1 SW_OTHERZOOM = 2 SW_PARENTOPENING = 3 SW_OTHERUNZOOM = 4 AW_HOR_POSITIVE = 1 AW_HOR_NEGATIVE = 2 AW_VER_POSITIVE = 4 AW_VER_NEGATIVE = 8 AW_CENTER = 16 AW_HIDE = 65536 AW_ACTIVATE = 131072 AW_SLIDE = 262144 AW_BLEND = 524288 KF_EXTENDED = 256 KF_DLGMODE = 2048 KF_MENUMODE = 4096 KF_ALTDOWN = 8192 KF_REPEAT = 16384 KF_UP = 32768 VK_LBUTTON = 1 VK_RBUTTON = 2 VK_CANCEL = 3 VK_MBUTTON = 4 VK_BACK = 8 VK_TAB = 9 VK_CLEAR = 12 VK_RETURN = 13 VK_SHIFT = 16 VK_CONTROL = 17 VK_MENU = 18 VK_PAUSE = 19 VK_CAPITAL = 20 VK_KANA = 21 VK_HANGEUL = 21 # old name - should be here for compatibility VK_HANGUL = 21 VK_JUNJA = 23 VK_FINAL = 24 VK_HANJA = 25 VK_KANJI = 25 VK_ESCAPE = 27 VK_CONVERT = 28 VK_NONCONVERT = 29 VK_ACCEPT = 30 VK_MODECHANGE = 31 VK_SPACE = 32 VK_PRIOR = 33 VK_NEXT = 34 VK_END = 35 VK_HOME = 36 VK_LEFT = 37 VK_UP = 38 VK_RIGHT = 39 VK_DOWN = 40 VK_SELECT = 41 VK_PRINT = 42 VK_EXECUTE = 43 VK_SNAPSHOT = 44 VK_INSERT = 45 VK_DELETE = 46 VK_HELP = 47 VK_LWIN = 91 VK_RWIN = 92 VK_APPS = 93 VK_NUMPAD0 = 96 VK_NUMPAD1 = 97 VK_NUMPAD2 = 98 VK_NUMPAD3 = 99 VK_NUMPAD4 = 100 VK_NUMPAD5 = 101 VK_NUMPAD6 = 102 VK_NUMPAD7 = 103 VK_NUMPAD8 = 104 VK_NUMPAD9 = 105 VK_MULTIPLY = 106 VK_ADD = 107 VK_SEPARATOR = 108 VK_SUBTRACT = 109 VK_DECIMAL = 110 VK_DIVIDE = 111 VK_F1 = 112 VK_F2 = 113 VK_F3 = 114 VK_F4 = 115 VK_F5 = 116 VK_F6 = 117 VK_F7 = 118 VK_F8 = 119 VK_F9 = 120 VK_F10 = 121 VK_F11 = 122 VK_F12 = 123 VK_F13 = 124 VK_F14 = 125 VK_F15 = 126 VK_F16 = 127 VK_F17 = 128 VK_F18 = 129 VK_F19 = 130 VK_F20 = 131 VK_F21 = 132 VK_F22 = 133 VK_F23 = 134 VK_F24 = 135 VK_NUMLOCK = 144 VK_SCROLL = 145 VK_LSHIFT = 160 VK_RSHIFT = 161 VK_LCONTROL = 162 VK_RCONTROL = 163 VK_LMENU = 164 VK_RMENU = 165 VK_PROCESSKEY = 229 VK_ATTN = 246 VK_CRSEL = 247 VK_EXSEL = 248 VK_EREOF = 249 VK_PLAY = 250 VK_ZOOM = 251 VK_NONAME = 252 VK_PA1 = 253 VK_OEM_CLEAR = 254 # multi-media related "keys" MOUSEEVENTF_XDOWN = 0x0080 MOUSEEVENTF_XUP = 0x0100 MOUSEEVENTF_WHEEL = 0x0800 VK_XBUTTON1 = 0x05 VK_XBUTTON2 = 0x06 VK_VOLUME_MUTE = 0xAD VK_VOLUME_DOWN = 0xAE VK_VOLUME_UP = 0xAF VK_MEDIA_NEXT_TRACK = 0xB0 VK_MEDIA_PREV_TRACK = 0xB1 VK_MEDIA_PLAY_PAUSE = 0xB3 VK_BROWSER_BACK = 0xA6 VK_BROWSER_FORWARD = 0xA7 WH_MIN = (-1) WH_MSGFILTER = (-1) WH_JOURNALRECORD = 0 WH_JOURNALPLAYBACK = 1 WH_KEYBOARD = 2 WH_GETMESSAGE = 3 WH_CALLWNDPROC = 4 WH_CBT = 5 WH_SYSMSGFILTER = 6 WH_MOUSE = 7 WH_HARDWARE = 8 WH_DEBUG = 9 WH_SHELL = 10 WH_FOREGROUNDIDLE = 11 WH_CALLWNDPROCRET = 12 WH_KEYBOARD_LL = 13 WH_MOUSE_LL = 14 WH_MAX = 14 WH_MINHOOK = WH_MIN WH_MAXHOOK = WH_MAX HC_ACTION = 0 HC_GETNEXT = 1 HC_SKIP = 2 HC_NOREMOVE = 3 HC_NOREM = HC_NOREMOVE HC_SYSMODALON = 4 HC_SYSMODALOFF = 5 HCBT_MOVESIZE = 0 HCBT_MINMAX = 1 HCBT_QS = 2 HCBT_CREATEWND = 3 HCBT_DESTROYWND = 4 HCBT_ACTIVATE = 5 HCBT_CLICKSKIPPED = 6 HCBT_KEYSKIPPED = 7 HCBT_SYSCOMMAND = 8 HCBT_SETFOCUS = 9 MSGF_DIALOGBOX = 0 MSGF_MESSAGEBOX = 1 MSGF_MENU = 2 #MSGF_MOVE = 3 #MSGF_SIZE = 4 MSGF_SCROLLBAR = 5 MSGF_NEXTWINDOW = 6 #MSGF_MAINLOOP = 8 MSGF_MAX = 8 MSGF_USER = 4096 HSHELL_WINDOWCREATED = 1 HSHELL_WINDOWDESTROYED = 2 HSHELL_ACTIVATESHELLWINDOW = 3 HSHELL_WINDOWACTIVATED = 4 HSHELL_GETMINRECT = 5 HSHELL_REDRAW = 6 HSHELL_TASKMAN = 7 HSHELL_LANGUAGE = 8 HSHELL_ACCESSIBILITYSTATE = 11 ACCESS_STICKYKEYS = 1 ACCESS_FILTERKEYS = 2 ACCESS_MOUSEKEYS = 3 # winuser.h line 624 LLKHF_EXTENDED = 1 LLKHF_INJECTED = 16 LLKHF_ALTDOWN = 32 LLKHF_UP = 128 LLMHF_INJECTED = 1 # line 692 HKL_PREV = 0 HKL_NEXT = 1 KLF_ACTIVATE = 1 KLF_SUBSTITUTE_OK = 2 KLF_UNLOADPREVIOUS = 4 KLF_REORDER = 8 KLF_REPLACELANG = 16 KLF_NOTELLSHELL = 128 KLF_SETFORPROCESS = 256 KL_NAMELENGTH = 9 DESKTOP_READOBJECTS = 1 DESKTOP_CREATEWINDOW = 2 DESKTOP_CREATEMENU = 4 DESKTOP_HOOKCONTROL = 8 DESKTOP_JOURNALRECORD = 16 DESKTOP_JOURNALPLAYBACK = 32 DESKTOP_ENUMERATE = 64 DESKTOP_WRITEOBJECTS = 128 DESKTOP_SWITCHDESKTOP = 256 DF_ALLOWOTHERACCOUNTHOOK = 1 WINSTA_ENUMDESKTOPS = 1 WINSTA_READATTRIBUTES = 2 WINSTA_ACCESSCLIPBOARD = 4 WINSTA_CREATEDESKTOP = 8 WINSTA_WRITEATTRIBUTES = 16 WINSTA_ACCESSGLOBALATOMS = 32 WINSTA_EXITWINDOWS = 64 WINSTA_ENUMERATE = 256 WINSTA_READSCREEN = 512 WSF_VISIBLE = 1 UOI_FLAGS = 1 UOI_NAME = 2 UOI_TYPE = 3 UOI_USER_SID = 4 GWL_WNDPROC = (-4) GWL_HINSTANCE = (-6) GWL_HWNDPARENT = (-8) GWL_STYLE = (-16) GWL_EXSTYLE = (-20) GWL_USERDATA = (-21) GWL_ID = (-12) GCL_MENUNAME = (-8) GCL_HBRBACKGROUND = (-10) GCL_HCURSOR = (-12) GCL_HICON = (-14) GCL_HMODULE = (-16) GCL_CBWNDEXTRA = (-18) GCL_CBCLSEXTRA = (-20) GCL_WNDPROC = (-24) GCL_STYLE = (-26) GCW_ATOM = (-32) GCL_HICONSM = (-34) # line 1291 WM_NULL = 0 WM_CREATE = 1 WM_DESTROY = 2 WM_MOVE = 3 WM_SIZE = 5 WM_ACTIVATE = 6 WA_INACTIVE = 0 WA_ACTIVE = 1 WA_CLICKACTIVE = 2 WM_SETFOCUS = 7 WM_KILLFOCUS = 8 WM_ENABLE = 10 WM_SETREDRAW = 11 WM_SETTEXT = 12 WM_GETTEXT = 13 WM_GETTEXTLENGTH = 14 WM_PAINT = 15 WM_CLOSE = 16 WM_QUERYENDSESSION = 17 WM_QUIT = 18 WM_QUERYOPEN = 19 WM_ERASEBKGND = 20 WM_SYSCOLORCHANGE = 21 WM_ENDSESSION = 22 WM_SHOWWINDOW = 24 WM_WININICHANGE = 26 WM_SETTINGCHANGE = WM_WININICHANGE WM_DEVMODECHANGE = 27 WM_ACTIVATEAPP = 28 WM_FONTCHANGE = 29 WM_TIMECHANGE = 30 WM_CANCELMODE = 31 WM_SETCURSOR = 32 WM_MOUSEACTIVATE = 33 WM_CHILDACTIVATE = 34 WM_QUEUESYNC = 35 WM_GETMINMAXINFO = 36 WM_PAINTICON = 38 WM_ICONERASEBKGND = 39 WM_NEXTDLGCTL = 40 WM_SPOOLERSTATUS = 42 WM_DRAWITEM = 43 WM_MEASUREITEM = 44 WM_DELETEITEM = 45 WM_VKEYTOITEM = 46 WM_CHARTOITEM = 47 WM_SETFONT = 48 WM_GETFONT = 49 WM_SETHOTKEY = 50 WM_GETHOTKEY = 51 WM_QUERYDRAGICON = 55 WM_COMPAREITEM = 57 WM_GETOBJECT = 61 WM_COMPACTING = 65 WM_COMMNOTIFY = 68 WM_WINDOWPOSCHANGING = 70 WM_WINDOWPOSCHANGED = 71 WM_POWER = 72 PWR_OK = 1 PWR_FAIL = (-1) PWR_SUSPENDREQUEST = 1 PWR_SUSPENDRESUME = 2 PWR_CRITICALRESUME = 3 WM_COPYDATA = 74 WM_CANCELJOURNAL = 75 WM_NOTIFY = 78 WM_INPUTLANGCHANGEREQUEST = 80 WM_INPUTLANGCHANGE = 81 WM_TCARD = 82 WM_HELP = 83 WM_USERCHANGED = 84 WM_NOTIFYFORMAT = 85 NFR_ANSI = 1 NFR_UNICODE = 2 NF_QUERY = 3 NF_REQUERY = 4 WM_CONTEXTMENU = 123 WM_STYLECHANGING = 124 WM_STYLECHANGED = 125 WM_DISPLAYCHANGE = 126 WM_GETICON = 127 WM_SETICON = 128 WM_NCCREATE = 129 WM_NCDESTROY = 130 WM_NCCALCSIZE = 131 WM_NCHITTEST = 132 WM_NCPAINT = 133 WM_NCACTIVATE = 134 WM_GETDLGCODE = 135 WM_SYNCPAINT = 136 WM_NCMOUSEMOVE = 160 WM_NCLBUTTONDOWN = 161 WM_NCLBUTTONUP = 162 WM_NCLBUTTONDBLCLK = 163 WM_NCRBUTTONDOWN = 164 WM_NCRBUTTONUP = 165 WM_NCRBUTTONDBLCLK = 166 WM_NCMBUTTONDOWN = 167 WM_NCMBUTTONUP = 168 WM_NCMBUTTONDBLCLK = 169 WM_KEYFIRST = 256 WM_KEYDOWN = 256 WM_KEYUP = 257 WM_CHAR = 258 WM_DEADCHAR = 259 WM_SYSKEYDOWN = 260 WM_SYSKEYUP = 261 WM_SYSCHAR = 262 WM_SYSDEADCHAR = 263 WM_KEYLAST = 264 WM_IME_STARTCOMPOSITION = 269 WM_IME_ENDCOMPOSITION = 270 WM_IME_COMPOSITION = 271 WM_IME_KEYLAST = 271 WM_INITDIALOG = 272 WM_COMMAND = 273 WM_SYSCOMMAND = 274 WM_TIMER = 275 WM_HSCROLL = 276 WM_VSCROLL = 277 WM_INITMENU = 278 WM_INITMENUPOPUP = 279 WM_MENUSELECT = 287 WM_MENUCHAR = 288 WM_ENTERIDLE = 289 WM_MENURBUTTONUP = 290 WM_MENUDRAG = 291 WM_MENUGETOBJECT = 292 WM_UNINITMENUPOPUP = 293 WM_MENUCOMMAND = 294 WM_CTLCOLORMSGBOX = 306 WM_CTLCOLOREDIT = 307 WM_CTLCOLORLISTBOX = 308 WM_CTLCOLORBTN = 309 WM_CTLCOLORDLG = 310 WM_CTLCOLORSCROLLBAR = 311 WM_CTLCOLORSTATIC = 312 WM_MOUSEFIRST = 512 WM_MOUSEMOVE = 512 WM_LBUTTONDOWN = 513 WM_LBUTTONUP = 514 WM_LBUTTONDBLCLK = 515 WM_RBUTTONDOWN = 516 WM_RBUTTONUP = 517 WM_RBUTTONDBLCLK = 518 WM_MBUTTONDOWN = 519 WM_MBUTTONUP = 520 WM_MBUTTONDBLCLK = 521 WM_MOUSEWHEEL = 522 WM_MOUSELAST = 522 WHEEL_DELTA = 120 # Value for rolling one detent WHEEL_PAGESCROLL = -1 # Scroll one page WM_PARENTNOTIFY = 528 MENULOOP_WINDOW = 0 MENULOOP_POPUP = 1 WM_ENTERMENULOOP = 529 WM_EXITMENULOOP = 530 WM_NEXTMENU = 531 WM_SIZING = 532 WM_CAPTURECHANGED = 533 WM_MOVING = 534 WM_POWERBROADCAST = 536 PBT_APMQUERYSUSPEND = 0 PBT_APMQUERYSTANDBY = 1 PBT_APMQUERYSUSPENDFAILED = 2 PBT_APMQUERYSTANDBYFAILED = 3 PBT_APMSUSPEND = 4 PBT_APMSTANDBY = 5 PBT_APMRESUMECRITICAL = 6 PBT_APMRESUMESUSPEND = 7 PBT_APMRESUMESTANDBY = 8 PBTF_APMRESUMEFROMFAILURE = 1 PBT_APMBATTERYLOW = 9 PBT_APMPOWERSTATUSCHANGE = 10 PBT_APMOEMEVENT = 11 PBT_APMRESUMEAUTOMATIC = 18 WM_DEVICECHANGE = 537 WM_MDICREATE = 544 WM_MDIDESTROY = 545 WM_MDIACTIVATE = 546 WM_MDIRESTORE = 547 WM_MDINEXT = 548 WM_MDIMAXIMIZE = 549 WM_MDITILE = 550 WM_MDICASCADE = 551 WM_MDIICONARRANGE = 552 WM_MDIGETACTIVE = 553 WM_MDISETMENU = 560 WM_ENTERSIZEMOVE = 561 WM_EXITSIZEMOVE = 562 WM_DROPFILES = 563 WM_MDIREFRESHMENU = 564 WM_IME_SETCONTEXT = 641 WM_IME_NOTIFY = 642 WM_IME_CONTROL = 643 WM_IME_COMPOSITIONFULL = 644 WM_IME_SELECT = 645 WM_IME_CHAR = 646 WM_IME_REQUEST = 648 WM_IME_KEYDOWN = 656 WM_IME_KEYUP = 657 WM_MOUSEHOVER = 673 WM_MOUSELEAVE = 675 WM_CUT = 768 WM_COPY = 769 WM_PASTE = 770 WM_CLEAR = 771 WM_UNDO = 772 WM_RENDERFORMAT = 773 WM_RENDERALLFORMATS = 774 WM_DESTROYCLIPBOARD = 775 WM_DRAWCLIPBOARD = 776 WM_PAINTCLIPBOARD = 777 WM_VSCROLLCLIPBOARD = 778 WM_SIZECLIPBOARD = 779 WM_ASKCBFORMATNAME = 780 WM_CHANGECBCHAIN = 781 WM_HSCROLLCLIPBOARD = 782 WM_QUERYNEWPALETTE = 783 WM_PALETTEISCHANGING = 784 WM_PALETTECHANGED = 785 WM_HOTKEY = 786 WM_PRINT = 791 WM_PRINTCLIENT = 792 WM_HANDHELDFIRST = 856 WM_HANDHELDLAST = 863 WM_AFXFIRST = 864 WM_AFXLAST = 895 WM_PENWINFIRST = 896 WM_PENWINLAST = 911 WM_APP = 32768 WMSZ_LEFT = 1 WMSZ_RIGHT = 2 WMSZ_TOP = 3 WMSZ_TOPLEFT = 4 WMSZ_TOPRIGHT = 5 WMSZ_BOTTOM = 6 WMSZ_BOTTOMLEFT = 7 WMSZ_BOTTOMRIGHT = 8 #ST_BEGINSWP = 0 #ST_ENDSWP = 1 HTERROR = (-2) HTTRANSPARENT = (-1) HTNOWHERE = 0 HTCLIENT = 1 HTCAPTION = 2 HTSYSMENU = 3 HTGROWBOX = 4 HTSIZE = HTGROWBOX HTMENU = 5 HTHSCROLL = 6 HTVSCROLL = 7 HTMINBUTTON = 8 HTMAXBUTTON = 9 HTLEFT = 10 HTRIGHT = 11 HTTOP = 12 HTTOPLEFT = 13 HTTOPRIGHT = 14 HTBOTTOM = 15 HTBOTTOMLEFT = 16 HTBOTTOMRIGHT = 17 HTBORDER = 18 HTREDUCE = HTMINBUTTON HTZOOM = HTMAXBUTTON HTSIZEFIRST = HTLEFT HTSIZELAST = HTBOTTOMRIGHT HTOBJECT = 19 HTCLOSE = 20 HTHELP = 21 SMTO_NORMAL = 0 SMTO_BLOCK = 1 SMTO_ABORTIFHUNG = 2 SMTO_NOTIMEOUTIFNOTHUNG = 8 MA_ACTIVATE = 1 MA_ACTIVATEANDEAT = 2 MA_NOACTIVATE = 3 MA_NOACTIVATEANDEAT = 4 ICON_SMALL = 0 ICON_BIG = 1 SIZE_RESTORED = 0 SIZE_MINIMIZED = 1 SIZE_MAXIMIZED = 2 SIZE_MAXSHOW = 3 SIZE_MAXHIDE = 4 SIZENORMAL = SIZE_RESTORED SIZEICONIC = SIZE_MINIMIZED SIZEFULLSCREEN = SIZE_MAXIMIZED SIZEZOOMSHOW = SIZE_MAXSHOW SIZEZOOMHIDE = SIZE_MAXHIDE WVR_ALIGNTOP = 16 WVR_ALIGNLEFT = 32 WVR_ALIGNBOTTOM = 64 WVR_ALIGNRIGHT = 128 WVR_HREDRAW = 256 WVR_VREDRAW = 512 WVR_REDRAW = (WVR_HREDRAW | WVR_VREDRAW) WVR_VALIDRECTS = 1024 MK_LBUTTON = 1 MK_RBUTTON = 2 MK_SHIFT = 4 MK_CONTROL = 8 MK_MBUTTON = 16 TME_HOVER = 1 TME_LEAVE = 2 TME_QUERY = 1073741824 TME_CANCEL = -2147483648 HOVER_DEFAULT = -1 WS_OVERLAPPED = 0 WS_POPUP = -2147483648 WS_CHILD = 1073741824 WS_MINIMIZE = 536870912 WS_VISIBLE = 268435456 WS_DISABLED = 134217728 WS_CLIPSIBLINGS = 67108864 WS_CLIPCHILDREN = 33554432 WS_MAXIMIZE = 16777216 WS_CAPTION = 12582912 WS_BORDER = 8388608 WS_DLGFRAME = 4194304 WS_VSCROLL = 2097152 WS_HSCROLL = 1048576 WS_SYSMENU = 524288 WS_THICKFRAME = 262144 WS_GROUP = 131072 WS_TABSTOP = 65536 WS_MINIMIZEBOX = 131072 WS_MAXIMIZEBOX = 65536 WS_TILED = WS_OVERLAPPED WS_ICONIC = WS_MINIMIZE WS_SIZEBOX = WS_THICKFRAME WS_OVERLAPPEDWINDOW = (WS_OVERLAPPED | \ WS_CAPTION | \ WS_SYSMENU | \ WS_THICKFRAME | \ WS_MINIMIZEBOX | \ WS_MAXIMIZEBOX) WS_POPUPWINDOW = (WS_POPUP | \ WS_BORDER | \ WS_SYSMENU) WS_CHILDWINDOW = (WS_CHILD) WS_TILEDWINDOW = WS_OVERLAPPEDWINDOW WS_EX_DLGMODALFRAME = 1 WS_EX_NOPARENTNOTIFY = 4 WS_EX_TOPMOST = 8 WS_EX_ACCEPTFILES = 16 WS_EX_TRANSPARENT = 32 WS_EX_MDICHILD = 64 WS_EX_TOOLWINDOW = 128 WS_EX_WINDOWEDGE = 256 WS_EX_CLIENTEDGE = 512 WS_EX_CONTEXTHELP = 1024 WS_EX_RIGHT = 4096 WS_EX_LEFT = 0 WS_EX_RTLREADING = 8192 WS_EX_LTRREADING = 0 WS_EX_LEFTSCROLLBAR = 16384 WS_EX_RIGHTSCROLLBAR = 0 WS_EX_CONTROLPARENT = 65536 WS_EX_STATICEDGE = 131072 WS_EX_APPWINDOW = 262144 WS_EX_OVERLAPPEDWINDOW = (WS_EX_WINDOWEDGE | WS_EX_CLIENTEDGE) WS_EX_PALETTEWINDOW = (WS_EX_WINDOWEDGE | WS_EX_TOOLWINDOW | WS_EX_TOPMOST) WS_EX_LAYERED = 0x00080000 WS_EX_NOINHERITLAYOUT = 0x00100000 WS_EX_LAYOUTRTL = 0x00400000 WS_EX_COMPOSITED = 0x02000000 WS_EX_NOACTIVATE = 0x08000000 CS_VREDRAW = 1 CS_HREDRAW = 2 #CS_KEYCVTWINDOW = 0x0004 CS_DBLCLKS = 8 CS_OWNDC = 32 CS_CLASSDC = 64 CS_PARENTDC = 128 #CS_NOKEYCVT = 0x0100 CS_NOCLOSE = 512 CS_SAVEBITS = 2048 CS_BYTEALIGNCLIENT = 4096 CS_BYTEALIGNWINDOW = 8192 CS_GLOBALCLASS = 16384 CS_IME = 65536 PRF_CHECKVISIBLE = 1 PRF_NONCLIENT = 2 PRF_CLIENT = 4 PRF_ERASEBKGND = 8 PRF_CHILDREN = 16 PRF_OWNED = 32 BDR_RAISEDOUTER = 1 BDR_SUNKENOUTER = 2 BDR_RAISEDINNER = 4 BDR_SUNKENINNER = 8 BDR_OUTER = 3 BDR_INNER = 12 #BDR_RAISED = 0x0005 #BDR_SUNKEN = 0x000a EDGE_RAISED = (BDR_RAISEDOUTER | BDR_RAISEDINNER) EDGE_SUNKEN = (BDR_SUNKENOUTER | BDR_SUNKENINNER) EDGE_ETCHED = (BDR_SUNKENOUTER | BDR_RAISEDINNER) EDGE_BUMP = (BDR_RAISEDOUTER | BDR_SUNKENINNER) # winuser.h line 2879 ISMEX_NOSEND = 0 ISMEX_SEND = 1 ISMEX_NOTIFY = 2 ISMEX_CALLBACK = 4 ISMEX_REPLIED = 8 CW_USEDEFAULT = -2147483648 FLASHW_STOP = 0 FLASHW_CAPTION = 1 FLASHW_TRAY = 2 FLASHW_ALL = (FLASHW_CAPTION | FLASHW_TRAY) FLASHW_TIMER = 4 FLASHW_TIMERNOFG = 12 # winuser.h line 7963 DS_ABSALIGN = 1 DS_SYSMODAL = 2 DS_LOCALEDIT = 32 DS_SETFONT = 64 DS_MODALFRAME = 128 DS_NOIDLEMSG = 256 DS_SETFOREGROUND = 512 DS_3DLOOK = 4 DS_FIXEDSYS = 8 DS_NOFAILCREATE = 16 DS_CONTROL = 1024 DS_CENTER = 2048 DS_CENTERMOUSE = 4096 DS_CONTEXTHELP = 8192 DM_GETDEFID = (WM_USER+0) DM_SETDEFID = (WM_USER+1) DM_REPOSITION = (WM_USER+2) #PSM_PAGEINFO = (WM_USER+100) #PSM_SHEETINFO = (WM_USER+101) #PSI_SETACTIVE = 0x0001 #PSI_KILLACTIVE = 0x0002 #PSI_APPLY = 0x0003 #PSI_RESET = 0x0004 #PSI_HASHELP = 0x0005 #PSI_HELP = 0x0006 #PSI_CHANGED = 0x0001 #PSI_GUISTART = 0x0002 #PSI_REBOOT = 0x0003 #PSI_GETSIBLINGS = 0x0004 DC_HASDEFID = 21323 DLGC_WANTARROWS = 1 DLGC_WANTTAB = 2 DLGC_WANTALLKEYS = 4 DLGC_WANTMESSAGE = 4 DLGC_HASSETSEL = 8 DLGC_DEFPUSHBUTTON = 16 DLGC_UNDEFPUSHBUTTON = 32 DLGC_RADIOBUTTON = 64 DLGC_WANTCHARS = 128 DLGC_STATIC = 256 DLGC_BUTTON = 8192 LB_CTLCODE = 0 LB_OKAY = 0 LB_ERR = (-1) LB_ERRSPACE = (-2) LBN_ERRSPACE = (-2) LBN_SELCHANGE = 1 LBN_DBLCLK = 2 LBN_SELCANCEL = 3 LBN_SETFOCUS = 4 LBN_KILLFOCUS = 5 LB_ADDSTRING = 384 LB_INSERTSTRING = 385 LB_DELETESTRING = 386 LB_SELITEMRANGEEX = 387 LB_RESETCONTENT = 388 LB_SETSEL = 389 LB_SETCURSEL = 390 LB_GETSEL = 391 LB_GETCURSEL = 392 LB_GETTEXT = 393 LB_GETTEXTLEN = 394 LB_GETCOUNT = 395 LB_SELECTSTRING = 396 LB_DIR = 397 LB_GETTOPINDEX = 398 LB_FINDSTRING = 399 LB_GETSELCOUNT = 400 LB_GETSELITEMS = 401 LB_SETTABSTOPS = 402 LB_GETHORIZONTALEXTENT = 403 LB_SETHORIZONTALEXTENT = 404 LB_SETCOLUMNWIDTH = 405 LB_ADDFILE = 406 LB_SETTOPINDEX = 407 LB_GETITEMRECT = 408 LB_GETITEMDATA = 409 LB_SETITEMDATA = 410 LB_SELITEMRANGE = 411 LB_SETANCHORINDEX = 412 LB_GETANCHORINDEX = 413 LB_SETCARETINDEX = 414 LB_GETCARETINDEX = 415 LB_SETITEMHEIGHT = 416 LB_GETITEMHEIGHT = 417 LB_FINDSTRINGEXACT = 418 LB_SETLOCALE = 421 LB_GETLOCALE = 422 LB_SETCOUNT = 423 LB_INITSTORAGE = 424 LB_ITEMFROMPOINT = 425 LB_MSGMAX = 432 LBS_NOTIFY = 1 LBS_SORT = 2 LBS_NOREDRAW = 4 LBS_MULTIPLESEL = 8 LBS_OWNERDRAWFIXED = 16 LBS_OWNERDRAWVARIABLE = 32 LBS_HASSTRINGS = 64 LBS_USETABSTOPS = 128 LBS_NOINTEGRALHEIGHT = 256 LBS_MULTICOLUMN = 512 LBS_WANTKEYBOARDINPUT = 1024 LBS_EXTENDEDSEL = 2048 LBS_DISABLENOSCROLL = 4096 LBS_NODATA = 8192 LBS_NOSEL = 16384 LBS_STANDARD = (LBS_NOTIFY | LBS_SORT | WS_VSCROLL | WS_BORDER) CB_OKAY = 0 CB_ERR = (-1) CB_ERRSPACE = (-2) CBN_ERRSPACE = (-1) CBN_SELCHANGE = 1 CBN_DBLCLK = 2 CBN_SETFOCUS = 3 CBN_KILLFOCUS = 4 CBN_EDITCHANGE = 5 CBN_EDITUPDATE = 6 CBN_DROPDOWN = 7 CBN_CLOSEUP = 8 CBN_SELENDOK = 9 CBN_SELENDCANCEL = 10 CBS_SIMPLE = 1 CBS_DROPDOWN = 2 CBS_DROPDOWNLIST = 3 CBS_OWNERDRAWFIXED = 16 CBS_OWNERDRAWVARIABLE = 32 CBS_AUTOHSCROLL = 64 CBS_OEMCONVERT = 128 CBS_SORT = 256 CBS_HASSTRINGS = 512 CBS_NOINTEGRALHEIGHT = 1024 CBS_DISABLENOSCROLL = 2048 CBS_UPPERCASE = 8192 CBS_LOWERCASE = 16384 CB_GETEDITSEL = 320 CB_LIMITTEXT = 321 CB_SETEDITSEL = 322 CB_ADDSTRING = 323 CB_DELETESTRING = 324 CB_DIR = 325 CB_GETCOUNT = 326 CB_GETCURSEL = 327 CB_GETLBTEXT = 328 CB_GETLBTEXTLEN = 329 CB_INSERTSTRING = 330 CB_RESETCONTENT = 331 CB_FINDSTRING = 332 CB_SELECTSTRING = 333 CB_SETCURSEL = 334 CB_SHOWDROPDOWN = 335 CB_GETITEMDATA = 336 CB_SETITEMDATA = 337 CB_GETDROPPEDCONTROLRECT = 338 CB_SETITEMHEIGHT = 339 CB_GETITEMHEIGHT = 340 CB_SETEXTENDEDUI = 341 CB_GETEXTENDEDUI = 342 CB_GETDROPPEDSTATE = 343 CB_FINDSTRINGEXACT = 344 CB_SETLOCALE = 345 CB_GETLOCALE = 346 CB_GETTOPINDEX = 347 CB_SETTOPINDEX = 348 CB_GETHORIZONTALEXTENT = 349 CB_SETHORIZONTALEXTENT = 350 CB_GETDROPPEDWIDTH = 351 CB_SETDROPPEDWIDTH = 352 CB_INITSTORAGE = 353 CB_MSGMAX = 354 SBS_HORZ = 0 SBS_VERT = 1 SBS_TOPALIGN = 2 SBS_LEFTALIGN = 2 SBS_BOTTOMALIGN = 4 SBS_RIGHTALIGN = 4 SBS_SIZEBOXTOPLEFTALIGN = 2 SBS_SIZEBOXBOTTOMRIGHTALIGN = 4 SBS_SIZEBOX = 8 SBS_SIZEGRIP = 16 SBM_SETPOS = 224 SBM_GETPOS = 225 SBM_SETRANGE = 226 SBM_SETRANGEREDRAW = 230 SBM_GETRANGE = 227 SBM_ENABLE_ARROWS = 228 SBM_SETSCROLLINFO = 233 SBM_GETSCROLLINFO = 234 SIF_RANGE = 1 SIF_PAGE = 2 SIF_POS = 4 SIF_DISABLENOSCROLL = 8 SIF_TRACKPOS = 16 SIF_ALL = (SIF_RANGE | SIF_PAGE | SIF_POS | SIF_TRACKPOS) MDIS_ALLCHILDSTYLES = 1 MDITILE_VERTICAL = 0 MDITILE_HORIZONTAL = 1 MDITILE_SKIPDISABLED = 2 IMC_GETCANDIDATEPOS = 7 IMC_SETCANDIDATEPOS = 8 IMC_GETCOMPOSITIONFONT = 9 IMC_SETCOMPOSITIONFONT = 10 IMC_GETCOMPOSITIONWINDOW = 11 IMC_SETCOMPOSITIONWINDOW = 12 IMC_GETSTATUSWINDOWPOS = 15 IMC_SETSTATUSWINDOWPOS = 16 IMC_CLOSESTATUSWINDOW = 33 IMC_OPENSTATUSWINDOW = 34 # Generated by h2py from \msvc20\include\winnt.h # hacked and split by mhammond. DELETE = (65536) READ_CONTROL = (131072) WRITE_DAC = (262144) WRITE_OWNER = (524288) SYNCHRONIZE = (1048576) STANDARD_RIGHTS_REQUIRED = (983040) STANDARD_RIGHTS_READ = (READ_CONTROL) STANDARD_RIGHTS_WRITE = (READ_CONTROL) STANDARD_RIGHTS_EXECUTE = (READ_CONTROL) STANDARD_RIGHTS_ALL = (2031616) SPECIFIC_RIGHTS_ALL = (65535) ACCESS_SYSTEM_SECURITY = (16777216) MAXIMUM_ALLOWED = (33554432) GENERIC_READ = (-2147483648) GENERIC_WRITE = (1073741824) GENERIC_EXECUTE = (536870912) GENERIC_ALL = (268435456) SERVICE_KERNEL_DRIVER = 1 SERVICE_FILE_SYSTEM_DRIVER = 2 SERVICE_ADAPTER = 4 SERVICE_RECOGNIZER_DRIVER = 8 SERVICE_DRIVER = (SERVICE_KERNEL_DRIVER | \ SERVICE_FILE_SYSTEM_DRIVER | \ SERVICE_RECOGNIZER_DRIVER) SERVICE_WIN32_OWN_PROCESS = 16 SERVICE_WIN32_SHARE_PROCESS = 32 SERVICE_WIN32 = (SERVICE_WIN32_OWN_PROCESS | \ SERVICE_WIN32_SHARE_PROCESS) SERVICE_INTERACTIVE_PROCESS = 256 SERVICE_TYPE_ALL = (SERVICE_WIN32 | \ SERVICE_ADAPTER | \ SERVICE_DRIVER | \ SERVICE_INTERACTIVE_PROCESS) SERVICE_BOOT_START = 0 SERVICE_SYSTEM_START = 1 SERVICE_AUTO_START = 2 SERVICE_DEMAND_START = 3 SERVICE_DISABLED = 4 SERVICE_ERROR_IGNORE = 0 SERVICE_ERROR_NORMAL = 1 SERVICE_ERROR_SEVERE = 2 SERVICE_ERROR_CRITICAL = 3 TAPE_ERASE_SHORT = 0 TAPE_ERASE_LONG = 1 TAPE_LOAD = 0 TAPE_UNLOAD = 1 TAPE_TENSION = 2 TAPE_LOCK = 3 TAPE_UNLOCK = 4 TAPE_FORMAT = 5 TAPE_SETMARKS = 0 TAPE_FILEMARKS = 1 TAPE_SHORT_FILEMARKS = 2 TAPE_LONG_FILEMARKS = 3 TAPE_ABSOLUTE_POSITION = 0 TAPE_LOGICAL_POSITION = 1 TAPE_PSEUDO_LOGICAL_POSITION = 2 TAPE_REWIND = 0 TAPE_ABSOLUTE_BLOCK = 1 TAPE_LOGICAL_BLOCK = 2 TAPE_PSEUDO_LOGICAL_BLOCK = 3 TAPE_SPACE_END_OF_DATA = 4 TAPE_SPACE_RELATIVE_BLOCKS = 5 TAPE_SPACE_FILEMARKS = 6 TAPE_SPACE_SEQUENTIAL_FMKS = 7 TAPE_SPACE_SETMARKS = 8 TAPE_SPACE_SEQUENTIAL_SMKS = 9 TAPE_DRIVE_FIXED = 1 TAPE_DRIVE_SELECT = 2 TAPE_DRIVE_INITIATOR = 4 TAPE_DRIVE_ERASE_SHORT = 16 TAPE_DRIVE_ERASE_LONG = 32 TAPE_DRIVE_ERASE_BOP_ONLY = 64 TAPE_DRIVE_ERASE_IMMEDIATE = 128 TAPE_DRIVE_TAPE_CAPACITY = 256 TAPE_DRIVE_TAPE_REMAINING = 512 TAPE_DRIVE_FIXED_BLOCK = 1024 TAPE_DRIVE_VARIABLE_BLOCK = 2048 TAPE_DRIVE_WRITE_PROTECT = 4096 TAPE_DRIVE_EOT_WZ_SIZE = 8192 TAPE_DRIVE_ECC = 65536 TAPE_DRIVE_COMPRESSION = 131072 TAPE_DRIVE_PADDING = 262144 TAPE_DRIVE_REPORT_SMKS = 524288 TAPE_DRIVE_GET_ABSOLUTE_BLK = 1048576 TAPE_DRIVE_GET_LOGICAL_BLK = 2097152 TAPE_DRIVE_SET_EOT_WZ_SIZE = 4194304 TAPE_DRIVE_LOAD_UNLOAD = -2147483647 TAPE_DRIVE_TENSION = -2147483646 TAPE_DRIVE_LOCK_UNLOCK = -2147483644 TAPE_DRIVE_REWIND_IMMEDIATE = -2147483640 TAPE_DRIVE_SET_BLOCK_SIZE = -2147483632 TAPE_DRIVE_LOAD_UNLD_IMMED = -2147483616 TAPE_DRIVE_TENSION_IMMED = -2147483584 TAPE_DRIVE_LOCK_UNLK_IMMED = -2147483520 TAPE_DRIVE_SET_ECC = -2147483392 TAPE_DRIVE_SET_COMPRESSION = -2147483136 TAPE_DRIVE_SET_PADDING = -2147482624 TAPE_DRIVE_SET_REPORT_SMKS = -2147481600 TAPE_DRIVE_ABSOLUTE_BLK = -2147479552 TAPE_DRIVE_ABS_BLK_IMMED = -2147475456 TAPE_DRIVE_LOGICAL_BLK = -2147467264 TAPE_DRIVE_LOG_BLK_IMMED = -2147450880 TAPE_DRIVE_END_OF_DATA = -2147418112 TAPE_DRIVE_RELATIVE_BLKS = -2147352576 TAPE_DRIVE_FILEMARKS = -2147221504 TAPE_DRIVE_SEQUENTIAL_FMKS = -2146959360 TAPE_DRIVE_SETMARKS = -2146435072 TAPE_DRIVE_SEQUENTIAL_SMKS = -2145386496 TAPE_DRIVE_REVERSE_POSITION = -2143289344 TAPE_DRIVE_SPACE_IMMEDIATE = -2139095040 TAPE_DRIVE_WRITE_SETMARKS = -2130706432 TAPE_DRIVE_WRITE_FILEMARKS = -2113929216 TAPE_DRIVE_WRITE_SHORT_FMKS = -2080374784 TAPE_DRIVE_WRITE_LONG_FMKS = -2013265920 TAPE_DRIVE_WRITE_MARK_IMMED = -1879048192 TAPE_DRIVE_FORMAT = -1610612736 TAPE_DRIVE_FORMAT_IMMEDIATE = -1073741824 TAPE_FIXED_PARTITIONS = 0 TAPE_SELECT_PARTITIONS = 1 TAPE_INITIATOR_PARTITIONS = 2 # Generated by h2py from \msvc20\include\winnt.h # hacked and split by mhammond. APPLICATION_ERROR_MASK = 536870912 ERROR_SEVERITY_SUCCESS = 0 ERROR_SEVERITY_INFORMATIONAL = 1073741824 ERROR_SEVERITY_WARNING = -2147483648 ERROR_SEVERITY_ERROR = -1073741824 MINCHAR = 128 MAXCHAR = 127 MINSHORT = 32768 MAXSHORT = 32767 MINLONG = -2147483648 MAXLONG = 2147483647 MAXBYTE = 255 MAXWORD = 65535 MAXDWORD = -1 LANG_NEUTRAL = 0 LANG_BULGARIAN = 2 LANG_CHINESE = 4 LANG_CROATIAN = 26 LANG_CZECH = 5 LANG_DANISH = 6 LANG_DUTCH = 19 LANG_ENGLISH = 9 LANG_FINNISH = 11 LANG_FRENCH = 12 LANG_GERMAN = 7 LANG_GREEK = 8 LANG_HUNGARIAN = 14 LANG_ICELANDIC = 15 LANG_ITALIAN = 16 LANG_JAPANESE = 17 LANG_KOREAN = 18 LANG_NORWEGIAN = 20 LANG_POLISH = 21 LANG_PORTUGUESE = 22 LANG_ROMANIAN = 24 LANG_RUSSIAN = 25 LANG_SLOVAK = 27 LANG_SLOVENIAN = 36 LANG_SPANISH = 10 LANG_SWEDISH = 29 LANG_TURKISH = 31 SUBLANG_NEUTRAL = 0 SUBLANG_DEFAULT = 1 SUBLANG_SYS_DEFAULT = 2 SUBLANG_CHINESE_TRADITIONAL = 1 SUBLANG_CHINESE_SIMPLIFIED = 2 SUBLANG_CHINESE_HONGKONG = 3 SUBLANG_CHINESE_SINGAPORE = 4 SUBLANG_DUTCH = 1 SUBLANG_DUTCH_BELGIAN = 2 SUBLANG_ENGLISH_US = 1 SUBLANG_ENGLISH_UK = 2 SUBLANG_ENGLISH_AUS = 3 SUBLANG_ENGLISH_CAN = 4 SUBLANG_ENGLISH_NZ = 5 SUBLANG_ENGLISH_EIRE = 6 SUBLANG_FRENCH = 1 SUBLANG_FRENCH_BELGIAN = 2 SUBLANG_FRENCH_CANADIAN = 3 SUBLANG_FRENCH_SWISS = 4 SUBLANG_GERMAN = 1 SUBLANG_GERMAN_SWISS = 2 SUBLANG_GERMAN_AUSTRIAN = 3 SUBLANG_ITALIAN = 1 SUBLANG_ITALIAN_SWISS = 2 SUBLANG_NORWEGIAN_BOKMAL = 1 SUBLANG_NORWEGIAN_NYNORSK = 2 SUBLANG_PORTUGUESE = 2 SUBLANG_PORTUGUESE_BRAZILIAN = 1 SUBLANG_SPANISH = 1 SUBLANG_SPANISH_MEXICAN = 2 SUBLANG_SPANISH_MODERN = 3 SORT_DEFAULT = 0 SORT_JAPANESE_XJIS = 0 SORT_JAPANESE_UNICODE = 1 SORT_CHINESE_BIG5 = 0 SORT_CHINESE_UNICODE = 1 SORT_KOREAN_KSC = 0 SORT_KOREAN_UNICODE = 1 def PRIMARYLANGID(lgid): return ((lgid) & 1023) def SUBLANGID(lgid): return ((lgid) >> 10) NLS_VALID_LOCALE_MASK = 1048575 CONTEXT_PORTABLE_32BIT = 1048576 CONTEXT_ALPHA = 131072 CONTEXT_CONTROL = (CONTEXT_ALPHA | 1) CONTEXT_FLOATING_POINT = (CONTEXT_ALPHA | 2) CONTEXT_INTEGER = (CONTEXT_ALPHA | 4) CONTEXT_FULL = (CONTEXT_CONTROL | CONTEXT_FLOATING_POINT | CONTEXT_INTEGER) SIZE_OF_80387_REGISTERS = 80 CONTEXT_FULL = (CONTEXT_CONTROL | CONTEXT_FLOATING_POINT | CONTEXT_INTEGER) CONTEXT_CONTROL = 1 CONTEXT_FLOATING_POINT = 2 CONTEXT_INTEGER = 4 CONTEXT_FULL = (CONTEXT_CONTROL | CONTEXT_FLOATING_POINT | CONTEXT_INTEGER) PROCESS_TERMINATE = (1) PROCESS_CREATE_THREAD = (2) PROCESS_VM_OPERATION = (8) PROCESS_VM_READ = (16) PROCESS_VM_WRITE = (32) PROCESS_DUP_HANDLE = (64) PROCESS_CREATE_PROCESS = (128) PROCESS_SET_QUOTA = (256) PROCESS_SET_INFORMATION = (512) PROCESS_QUERY_INFORMATION = (1024) PROCESS_ALL_ACCESS = (STANDARD_RIGHTS_REQUIRED | SYNCHRONIZE | 4095) THREAD_TERMINATE = (1) THREAD_SUSPEND_RESUME = (2) THREAD_GET_CONTEXT = (8) THREAD_SET_CONTEXT = (16) THREAD_SET_INFORMATION = (32) THREAD_QUERY_INFORMATION = (64) THREAD_SET_THREAD_TOKEN = (128) THREAD_IMPERSONATE = (256) THREAD_DIRECT_IMPERSONATION = (512) TLS_MINIMUM_AVAILABLE = 64 EVENT_MODIFY_STATE = 2 MUTANT_QUERY_STATE = 1 SEMAPHORE_MODIFY_STATE = 2 TIME_ZONE_ID_UNKNOWN = 0 TIME_ZONE_ID_STANDARD = 1 TIME_ZONE_ID_DAYLIGHT = 2 PROCESSOR_INTEL_386 = 386 PROCESSOR_INTEL_486 = 486 PROCESSOR_INTEL_PENTIUM = 586 PROCESSOR_INTEL_860 = 860 PROCESSOR_MIPS_R2000 = 2000 PROCESSOR_MIPS_R3000 = 3000 PROCESSOR_MIPS_R4000 = 4000 PROCESSOR_ALPHA_21064 = 21064 PROCESSOR_PPC_601 = 601 PROCESSOR_PPC_603 = 603 PROCESSOR_PPC_604 = 604 PROCESSOR_PPC_620 = 620 SECTION_QUERY = 1 SECTION_MAP_WRITE = 2 SECTION_MAP_READ = 4 SECTION_MAP_EXECUTE = 8 SECTION_EXTEND_SIZE = 16 PAGE_NOACCESS = 1 PAGE_READONLY = 2 PAGE_READWRITE = 4 PAGE_WRITECOPY = 8 PAGE_EXECUTE = 16 PAGE_EXECUTE_READ = 32 PAGE_EXECUTE_READWRITE = 64 PAGE_EXECUTE_WRITECOPY = 128 PAGE_GUARD = 256 PAGE_NOCACHE = 512 MEM_COMMIT = 4096 MEM_RESERVE = 8192 MEM_DECOMMIT = 16384 MEM_RELEASE = 32768 MEM_FREE = 65536 MEM_PRIVATE = 131072 MEM_MAPPED = 262144 MEM_TOP_DOWN = 1048576 # Generated by h2py from \msvc20\include\winnt.h # hacked and split by mhammond. SEC_FILE = 8388608 SEC_IMAGE = 16777216 SEC_RESERVE = 67108864 SEC_COMMIT = 134217728 SEC_NOCACHE = 268435456 MEM_IMAGE = SEC_IMAGE FILE_SHARE_READ = 1 FILE_SHARE_WRITE = 2 FILE_SHARE_DELETE = 4 FILE_ATTRIBUTE_READONLY = 1 FILE_ATTRIBUTE_HIDDEN = 2 FILE_ATTRIBUTE_SYSTEM = 4 FILE_ATTRIBUTE_DIRECTORY = 16 FILE_ATTRIBUTE_ARCHIVE = 32 FILE_ATTRIBUTE_NORMAL = 128 FILE_ATTRIBUTE_TEMPORARY = 256 FILE_ATTRIBUTE_ATOMIC_WRITE = 512 FILE_ATTRIBUTE_XACTION_WRITE = 1024 FILE_ATTRIBUTE_COMPRESSED = 2048 FILE_NOTIFY_CHANGE_FILE_NAME = 1 FILE_NOTIFY_CHANGE_DIR_NAME = 2 FILE_NOTIFY_CHANGE_ATTRIBUTES = 4 FILE_NOTIFY_CHANGE_SIZE = 8 FILE_NOTIFY_CHANGE_LAST_WRITE = 16 FILE_NOTIFY_CHANGE_SECURITY = 256 FILE_CASE_SENSITIVE_SEARCH = 1 FILE_CASE_PRESERVED_NAMES = 2 FILE_UNICODE_ON_DISK = 4 FILE_PERSISTENT_ACLS = 8 FILE_FILE_COMPRESSION = 16 FILE_VOLUME_IS_COMPRESSED = 32768 IO_COMPLETION_MODIFY_STATE = 2 DUPLICATE_CLOSE_SOURCE = 1 DUPLICATE_SAME_ACCESS = 2 SID_MAX_SUB_AUTHORITIES = (15) SECURITY_NULL_RID = (0) SECURITY_WORLD_RID = (0) SECURITY_LOCAL_RID = (0X00000000) SECURITY_CREATOR_OWNER_RID = (0) SECURITY_CREATOR_GROUP_RID = (1) SECURITY_DIALUP_RID = (1) SECURITY_NETWORK_RID = (2) SECURITY_BATCH_RID = (3) SECURITY_INTERACTIVE_RID = (4) SECURITY_SERVICE_RID = (6) SECURITY_ANONYMOUS_LOGON_RID = (7) SECURITY_LOGON_IDS_RID = (5) SECURITY_LOGON_IDS_RID_COUNT = (3) SECURITY_LOCAL_SYSTEM_RID = (18) SECURITY_NT_NON_UNIQUE = (21) SECURITY_BUILTIN_DOMAIN_RID = (32) DOMAIN_USER_RID_ADMIN = (500) DOMAIN_USER_RID_GUEST = (501) DOMAIN_GROUP_RID_ADMINS = (512) DOMAIN_GROUP_RID_USERS = (513) DOMAIN_GROUP_RID_GUESTS = (514) DOMAIN_ALIAS_RID_ADMINS = (544) DOMAIN_ALIAS_RID_USERS = (545) DOMAIN_ALIAS_RID_GUESTS = (546) DOMAIN_ALIAS_RID_POWER_USERS = (547) DOMAIN_ALIAS_RID_ACCOUNT_OPS = (548) DOMAIN_ALIAS_RID_SYSTEM_OPS = (549) DOMAIN_ALIAS_RID_PRINT_OPS = (550) DOMAIN_ALIAS_RID_BACKUP_OPS = (551) DOMAIN_ALIAS_RID_REPLICATOR = (552) SE_GROUP_MANDATORY = (1) SE_GROUP_ENABLED_BY_DEFAULT = (2) SE_GROUP_ENABLED = (4) SE_GROUP_OWNER = (8) SE_GROUP_LOGON_ID = (-1073741824) ACL_REVISION = (2) ACL_REVISION1 = (1) ACL_REVISION2 = (2) ACCESS_ALLOWED_ACE_TYPE = (0) ACCESS_DENIED_ACE_TYPE = (1) SYSTEM_AUDIT_ACE_TYPE = (2) SYSTEM_ALARM_ACE_TYPE = (3) OBJECT_INHERIT_ACE = (1) CONTAINER_INHERIT_ACE = (2) NO_PROPAGATE_INHERIT_ACE = (4) INHERIT_ONLY_ACE = (8) VALID_INHERIT_FLAGS = (15) SUCCESSFUL_ACCESS_ACE_FLAG = (64) FAILED_ACCESS_ACE_FLAG = (128) SECURITY_DESCRIPTOR_REVISION = (1) SECURITY_DESCRIPTOR_REVISION1 = (1) SECURITY_DESCRIPTOR_MIN_LENGTH = (20) SE_OWNER_DEFAULTED = (1) SE_GROUP_DEFAULTED = (2) SE_DACL_PRESENT = (4) SE_DACL_DEFAULTED = (8) SE_SACL_PRESENT = (16) SE_SACL_DEFAULTED = (32) SE_SELF_RELATIVE = (32768) SE_PRIVILEGE_ENABLED_BY_DEFAULT = (1) SE_PRIVILEGE_ENABLED = (2) SE_PRIVILEGE_USED_FOR_ACCESS = (-2147483648) PRIVILEGE_SET_ALL_NECESSARY = (1) SE_CREATE_TOKEN_NAME = "SeCreateTokenPrivilege" SE_ASSIGNPRIMARYTOKEN_NAME = "SeAssignPrimaryTokenPrivilege" SE_LOCK_MEMORY_NAME = "SeLockMemoryPrivilege" SE_INCREASE_QUOTA_NAME = "SeIncreaseQuotaPrivilege" SE_UNSOLICITED_INPUT_NAME = "SeUnsolicitedInputPrivilege" SE_MACHINE_ACCOUNT_NAME = "SeMachineAccountPrivilege" SE_TCB_NAME = "SeTcbPrivilege" SE_SECURITY_NAME = "SeSecurityPrivilege" SE_TAKE_OWNERSHIP_NAME = "SeTakeOwnershipPrivilege" SE_LOAD_DRIVER_NAME = "SeLoadDriverPrivilege" SE_SYSTEM_PROFILE_NAME = "SeSystemProfilePrivilege" SE_SYSTEMTIME_NAME = "SeSystemtimePrivilege" SE_PROF_SINGLE_PROCESS_NAME = "SeProfileSingleProcessPrivilege" SE_INC_BASE_PRIORITY_NAME = "SeIncreaseBasePriorityPrivilege" SE_CREATE_PAGEFILE_NAME = "SeCreatePagefilePrivilege" SE_CREATE_PERMANENT_NAME = "SeCreatePermanentPrivilege" SE_BACKUP_NAME = "SeBackupPrivilege" SE_RESTORE_NAME = "SeRestorePrivilege" SE_SHUTDOWN_NAME = "SeShutdownPrivilege" SE_DEBUG_NAME = "SeDebugPrivilege" SE_AUDIT_NAME = "SeAuditPrivilege" SE_SYSTEM_ENVIRONMENT_NAME = "SeSystemEnvironmentPrivilege" SE_CHANGE_NOTIFY_NAME = "SeChangeNotifyPrivilege" SE_REMOTE_SHUTDOWN_NAME = "SeRemoteShutdownPrivilege" TOKEN_ASSIGN_PRIMARY = (1) TOKEN_DUPLICATE = (2) TOKEN_IMPERSONATE = (4) TOKEN_QUERY = (8) TOKEN_QUERY_SOURCE = (16) TOKEN_ADJUST_PRIVILEGES = (32) TOKEN_ADJUST_GROUPS = (64) TOKEN_ADJUST_DEFAULT = (128) TOKEN_ALL_ACCESS = (STANDARD_RIGHTS_REQUIRED |\ TOKEN_ASSIGN_PRIMARY |\ TOKEN_DUPLICATE |\ TOKEN_IMPERSONATE |\ TOKEN_QUERY |\ TOKEN_QUERY_SOURCE |\ TOKEN_ADJUST_PRIVILEGES |\ TOKEN_ADJUST_GROUPS |\ TOKEN_ADJUST_DEFAULT) TOKEN_READ = (STANDARD_RIGHTS_READ |\ TOKEN_QUERY) TOKEN_WRITE = (STANDARD_RIGHTS_WRITE |\ TOKEN_ADJUST_PRIVILEGES |\ TOKEN_ADJUST_GROUPS |\ TOKEN_ADJUST_DEFAULT) TOKEN_EXECUTE = (STANDARD_RIGHTS_EXECUTE) TOKEN_SOURCE_LENGTH = 8 KEY_QUERY_VALUE = (1) KEY_SET_VALUE = (2) KEY_CREATE_SUB_KEY = (4) KEY_ENUMERATE_SUB_KEYS = (8) KEY_NOTIFY = (16) KEY_CREATE_LINK = (32) KEY_WOW64_32KEY = 512 KEY_WOW64_64KEY = 256 KEY_WOW64_RES = 768 KEY_READ = ((STANDARD_RIGHTS_READ |\ KEY_QUERY_VALUE |\ KEY_ENUMERATE_SUB_KEYS |\ KEY_NOTIFY) \ & \ (~SYNCHRONIZE)) KEY_WRITE = ((STANDARD_RIGHTS_WRITE |\ KEY_SET_VALUE |\ KEY_CREATE_SUB_KEY) \ & \ (~SYNCHRONIZE)) KEY_EXECUTE = ((KEY_READ) \ & \ (~SYNCHRONIZE)) KEY_ALL_ACCESS = ((STANDARD_RIGHTS_ALL |\ KEY_QUERY_VALUE |\ KEY_SET_VALUE |\ KEY_CREATE_SUB_KEY |\ KEY_ENUMERATE_SUB_KEYS |\ KEY_NOTIFY |\ KEY_CREATE_LINK) \ & \ (~SYNCHRONIZE)) REG_NOTIFY_CHANGE_ATTRIBUTES = (2) REG_NOTIFY_CHANGE_SECURITY = (8) REG_RESOURCE_REQUIREMENTS_LIST = ( 10 ) REG_NONE = ( 0 ) # No value type REG_SZ = ( 1 ) # Unicode nul terminated string REG_EXPAND_SZ = ( 2 ) # Unicode nul terminated string # (with environment variable references) REG_BINARY = ( 3 ) # Free form binary REG_DWORD = ( 4 ) # 32-bit number REG_DWORD_LITTLE_ENDIAN = ( 4 ) # 32-bit number (same as REG_DWORD) REG_DWORD_BIG_ENDIAN = ( 5 ) # 32-bit number REG_LINK = ( 6 ) # Symbolic Link (unicode) REG_MULTI_SZ = ( 7 ) # Multiple Unicode strings REG_RESOURCE_LIST = ( 8 ) # Resource list in the resource map REG_FULL_RESOURCE_DESCRIPTOR =( 9 ) # Resource list in the hardware description REG_RESOURCE_REQUIREMENTS_LIST = ( 10 ) REG_QWORD = ( 11 ) # 64-bit number REG_QWORD_LITTLE_ENDIAN = ( 11 ) # 64-bit number (same as REG_QWORD) # Generated by h2py from \msvc20\include\winnt.h # hacked and split by mhammond. # Included from string.h _NLSCMPERROR = 2147483647 NULL = 0 HEAP_NO_SERIALIZE = 1 HEAP_GROWABLE = 2 HEAP_GENERATE_EXCEPTIONS = 4 HEAP_ZERO_MEMORY = 8 HEAP_REALLOC_IN_PLACE_ONLY = 16 HEAP_TAIL_CHECKING_ENABLED = 32 HEAP_FREE_CHECKING_ENABLED = 64 HEAP_DISABLE_COALESCE_ON_FREE = 128 IS_TEXT_UNICODE_ASCII16 = 1 IS_TEXT_UNICODE_REVERSE_ASCII16 = 16 IS_TEXT_UNICODE_STATISTICS = 2 IS_TEXT_UNICODE_REVERSE_STATISTICS = 32 IS_TEXT_UNICODE_CONTROLS = 4 IS_TEXT_UNICODE_REVERSE_CONTROLS = 64 IS_TEXT_UNICODE_SIGNATURE = 8 IS_TEXT_UNICODE_REVERSE_SIGNATURE = 128 IS_TEXT_UNICODE_ILLEGAL_CHARS = 256 IS_TEXT_UNICODE_ODD_LENGTH = 512 IS_TEXT_UNICODE_DBCS_LEADBYTE = 1024 IS_TEXT_UNICODE_NULL_BYTES = 4096 IS_TEXT_UNICODE_UNICODE_MASK = 15 IS_TEXT_UNICODE_REVERSE_MASK = 240 IS_TEXT_UNICODE_NOT_UNICODE_MASK = 3840 IS_TEXT_UNICODE_NOT_ASCII_MASK = 61440 COMPRESSION_FORMAT_NONE = (0) COMPRESSION_FORMAT_DEFAULT = (1) COMPRESSION_FORMAT_LZNT1 = (2) COMPRESSION_ENGINE_STANDARD = (0) COMPRESSION_ENGINE_MAXIMUM = (256) MESSAGE_RESOURCE_UNICODE = 1 RTL_CRITSECT_TYPE = 0 RTL_RESOURCE_TYPE = 1 DLL_PROCESS_ATTACH = 1 DLL_THREAD_ATTACH = 2 DLL_THREAD_DETACH = 3 DLL_PROCESS_DETACH = 0 EVENTLOG_SEQUENTIAL_READ = 0X0001 EVENTLOG_SEEK_READ = 0X0002 EVENTLOG_FORWARDS_READ = 0X0004 EVENTLOG_BACKWARDS_READ = 0X0008 EVENTLOG_SUCCESS = 0X0000 EVENTLOG_ERROR_TYPE = 1 EVENTLOG_WARNING_TYPE = 2 EVENTLOG_INFORMATION_TYPE = 4 EVENTLOG_AUDIT_SUCCESS = 8 EVENTLOG_AUDIT_FAILURE = 16 EVENTLOG_START_PAIRED_EVENT = 1 EVENTLOG_END_PAIRED_EVENT = 2 EVENTLOG_END_ALL_PAIRED_EVENTS = 4 EVENTLOG_PAIRED_EVENT_ACTIVE = 8 EVENTLOG_PAIRED_EVENT_INACTIVE = 16 # Generated by h2py from \msvc20\include\winnt.h # hacked and split by mhammond. OWNER_SECURITY_INFORMATION = (0X00000001) GROUP_SECURITY_INFORMATION = (0X00000002) DACL_SECURITY_INFORMATION = (0X00000004) SACL_SECURITY_INFORMATION = (0X00000008) IMAGE_SIZEOF_FILE_HEADER = 20 IMAGE_FILE_MACHINE_UNKNOWN = 0 IMAGE_NUMBEROF_DIRECTORY_ENTRIES = 16 IMAGE_SIZEOF_ROM_OPTIONAL_HEADER = 56 IMAGE_SIZEOF_STD_OPTIONAL_HEADER = 28 IMAGE_SIZEOF_NT_OPTIONAL_HEADER = 224 IMAGE_NT_OPTIONAL_HDR_MAGIC = 267 IMAGE_ROM_OPTIONAL_HDR_MAGIC = 263 IMAGE_SIZEOF_SHORT_NAME = 8 IMAGE_SIZEOF_SECTION_HEADER = 40 IMAGE_SIZEOF_SYMBOL = 18 IMAGE_SYM_CLASS_NULL = 0 IMAGE_SYM_CLASS_AUTOMATIC = 1 IMAGE_SYM_CLASS_EXTERNAL = 2 IMAGE_SYM_CLASS_STATIC = 3 IMAGE_SYM_CLASS_REGISTER = 4 IMAGE_SYM_CLASS_EXTERNAL_DEF = 5 IMAGE_SYM_CLASS_LABEL = 6 IMAGE_SYM_CLASS_UNDEFINED_LABEL = 7 IMAGE_SYM_CLASS_MEMBER_OF_STRUCT = 8 IMAGE_SYM_CLASS_ARGUMENT = 9 IMAGE_SYM_CLASS_STRUCT_TAG = 10 IMAGE_SYM_CLASS_MEMBER_OF_UNION = 11 IMAGE_SYM_CLASS_UNION_TAG = 12 IMAGE_SYM_CLASS_TYPE_DEFINITION = 13 IMAGE_SYM_CLASS_UNDEFINED_STATIC = 14 IMAGE_SYM_CLASS_ENUM_TAG = 15 IMAGE_SYM_CLASS_MEMBER_OF_ENUM = 16 IMAGE_SYM_CLASS_REGISTER_PARAM = 17 IMAGE_SYM_CLASS_BIT_FIELD = 18 IMAGE_SYM_CLASS_BLOCK = 100 IMAGE_SYM_CLASS_FUNCTION = 101 IMAGE_SYM_CLASS_END_OF_STRUCT = 102 IMAGE_SYM_CLASS_FILE = 103 IMAGE_SYM_CLASS_SECTION = 104 IMAGE_SYM_CLASS_WEAK_EXTERNAL = 105 N_BTMASK = 017 N_TMASK = 060 N_TMASK1 = 0300 N_TMASK2 = 0360 N_BTSHFT = 4 N_TSHIFT = 2 IMAGE_SIZEOF_AUX_SYMBOL = 18 IMAGE_COMDAT_SELECT_NODUPLICATES = 1 IMAGE_COMDAT_SELECT_ANY = 2 IMAGE_COMDAT_SELECT_SAME_SIZE = 3 IMAGE_COMDAT_SELECT_EXACT_MATCH = 4 IMAGE_COMDAT_SELECT_ASSOCIATIVE = 5 IMAGE_WEAK_EXTERN_SEARCH_NOLIBRARY = 1 IMAGE_WEAK_EXTERN_SEARCH_LIBRARY = 2 IMAGE_WEAK_EXTERN_SEARCH_ALIAS = 3 IMAGE_SIZEOF_RELOCATION = 10 IMAGE_REL_I386_SECTION = 012 IMAGE_REL_I386_SECREL = 013 IMAGE_REL_MIPS_REFHALF = 01 IMAGE_REL_MIPS_REFWORD = 02 IMAGE_REL_MIPS_JMPADDR = 03 IMAGE_REL_MIPS_REFHI = 04 IMAGE_REL_MIPS_REFLO = 05 IMAGE_REL_MIPS_GPREL = 06 IMAGE_REL_MIPS_LITERAL = 07 IMAGE_REL_MIPS_SECTION = 012 IMAGE_REL_MIPS_SECREL = 013 IMAGE_REL_MIPS_REFWORDNB = 042 IMAGE_REL_MIPS_PAIR = 045 IMAGE_REL_ALPHA_ABSOLUTE = 0 IMAGE_REL_ALPHA_REFLONG = 1 IMAGE_REL_ALPHA_REFQUAD = 2 IMAGE_REL_ALPHA_GPREL32 = 3 IMAGE_REL_ALPHA_LITERAL = 4 IMAGE_REL_ALPHA_LITUSE = 5 IMAGE_REL_ALPHA_GPDISP = 6 IMAGE_REL_ALPHA_BRADDR = 7 IMAGE_REL_ALPHA_HINT = 8 IMAGE_REL_ALPHA_INLINE_REFLONG = 9 IMAGE_REL_ALPHA_REFHI = 10 IMAGE_REL_ALPHA_REFLO = 11 IMAGE_REL_ALPHA_PAIR = 12 IMAGE_REL_ALPHA_MATCH = 13 IMAGE_REL_ALPHA_SECTION = 14 IMAGE_REL_ALPHA_SECREL = 15 IMAGE_REL_ALPHA_REFLONGNB = 16 IMAGE_SIZEOF_BASE_RELOCATION = 8 IMAGE_REL_BASED_ABSOLUTE = 0 IMAGE_REL_BASED_HIGH = 1 IMAGE_REL_BASED_LOW = 2 IMAGE_REL_BASED_HIGHLOW = 3 IMAGE_REL_BASED_HIGHADJ = 4 IMAGE_REL_BASED_MIPS_JMPADDR = 5 IMAGE_SIZEOF_LINENUMBER = 6 IMAGE_ARCHIVE_START_SIZE = 8 IMAGE_ARCHIVE_START = "!<arch>\n" IMAGE_ARCHIVE_END = "`\n" IMAGE_ARCHIVE_PAD = "\n" IMAGE_ARCHIVE_LINKER_MEMBER = "/ " IMAGE_ARCHIVE_LONGNAMES_MEMBER = "// " IMAGE_SIZEOF_ARCHIVE_MEMBER_HDR = 60 IMAGE_ORDINAL_FLAG = -2147483648 def IMAGE_SNAP_BY_ORDINAL(Ordinal): return ((Ordinal & IMAGE_ORDINAL_FLAG) != 0) def IMAGE_ORDINAL(Ordinal): return (Ordinal & 65535) IMAGE_RESOURCE_NAME_IS_STRING = -2147483648 IMAGE_RESOURCE_DATA_IS_DIRECTORY = -2147483648 IMAGE_DEBUG_TYPE_UNKNOWN = 0 IMAGE_DEBUG_TYPE_COFF = 1 IMAGE_DEBUG_TYPE_CODEVIEW = 2 IMAGE_DEBUG_TYPE_FPO = 3 IMAGE_DEBUG_TYPE_MISC = 4 IMAGE_DEBUG_TYPE_EXCEPTION = 5 IMAGE_DEBUG_TYPE_FIXUP = 6 IMAGE_DEBUG_TYPE_OMAP_TO_SRC = 7 IMAGE_DEBUG_TYPE_OMAP_FROM_SRC = 8 FRAME_FPO = 0 FRAME_TRAP = 1 FRAME_TSS = 2 SIZEOF_RFPO_DATA = 16 IMAGE_DEBUG_MISC_EXENAME = 1 IMAGE_SEPARATE_DEBUG_SIGNATURE = 18756 # Generated by h2py from \msvcnt\include\wingdi.h # hacked and split manually by mhammond. NEWFRAME = 1 ABORTDOC = 2 NEXTBAND = 3 SETCOLORTABLE = 4 GETCOLORTABLE = 5 FLUSHOUTPUT = 6 DRAFTMODE = 7 QUERYESCSUPPORT = 8 SETABORTPROC = 9 STARTDOC = 10 ENDDOC = 11 GETPHYSPAGESIZE = 12 GETPRINTINGOFFSET = 13 GETSCALINGFACTOR = 14 MFCOMMENT = 15 GETPENWIDTH = 16 SETCOPYCOUNT = 17 SELECTPAPERSOURCE = 18 DEVICEDATA = 19 PASSTHROUGH = 19 GETTECHNOLGY = 20 GETTECHNOLOGY = 20 SETLINECAP = 21 SETLINEJOIN = 22 SETMITERLIMIT = 23 BANDINFO = 24 DRAWPATTERNRECT = 25 GETVECTORPENSIZE = 26 GETVECTORBRUSHSIZE = 27 ENABLEDUPLEX = 28 GETSETPAPERBINS = 29 GETSETPRINTORIENT = 30 ENUMPAPERBINS = 31 SETDIBSCALING = 32 EPSPRINTING = 33 ENUMPAPERMETRICS = 34 GETSETPAPERMETRICS = 35 POSTSCRIPT_DATA = 37 POSTSCRIPT_IGNORE = 38 MOUSETRAILS = 39 GETDEVICEUNITS = 42 GETEXTENDEDTEXTMETRICS = 256 GETEXTENTTABLE = 257 GETPAIRKERNTABLE = 258 GETTRACKKERNTABLE = 259 EXTTEXTOUT = 512 GETFACENAME = 513 DOWNLOADFACE = 514 ENABLERELATIVEWIDTHS = 768 ENABLEPAIRKERNING = 769 SETKERNTRACK = 770 SETALLJUSTVALUES = 771 SETCHARSET = 772 STRETCHBLT = 2048 GETSETSCREENPARAMS = 3072 BEGIN_PATH = 4096 CLIP_TO_PATH = 4097 END_PATH = 4098 EXT_DEVICE_CAPS = 4099 RESTORE_CTM = 4100 SAVE_CTM = 4101 SET_ARC_DIRECTION = 4102 SET_BACKGROUND_COLOR = 4103 SET_POLY_MODE = 4104 SET_SCREEN_ANGLE = 4105 SET_SPREAD = 4106 TRANSFORM_CTM = 4107 SET_CLIP_BOX = 4108 SET_BOUNDS = 4109 SET_MIRROR_MODE = 4110 OPENCHANNEL = 4110 DOWNLOADHEADER = 4111 CLOSECHANNEL = 4112 POSTSCRIPT_PASSTHROUGH = 4115 ENCAPSULATED_POSTSCRIPT = 4116 SP_NOTREPORTED = 16384 SP_ERROR = (-1) SP_APPABORT = (-2) SP_USERABORT = (-3) SP_OUTOFDISK = (-4) SP_OUTOFMEMORY = (-5) PR_JOBSTATUS = 0 ## GDI object types OBJ_PEN = 1 OBJ_BRUSH = 2 OBJ_DC = 3 OBJ_METADC = 4 OBJ_PAL = 5 OBJ_FONT = 6 OBJ_BITMAP = 7 OBJ_REGION = 8 OBJ_METAFILE = 9 OBJ_MEMDC = 10 OBJ_EXTPEN = 11 OBJ_ENHMETADC = 12 OBJ_ENHMETAFILE = 13 OBJ_COLORSPACE = 14 MWT_IDENTITY = 1 MWT_LEFTMULTIPLY = 2 MWT_RIGHTMULTIPLY = 3 MWT_MIN = MWT_IDENTITY MWT_MAX = MWT_RIGHTMULTIPLY BI_RGB = 0 BI_RLE8 = 1 BI_RLE4 = 2 BI_BITFIELDS = 3 TMPF_FIXED_PITCH = 1 TMPF_VECTOR = 2 TMPF_DEVICE = 8 TMPF_TRUETYPE = 4 NTM_REGULAR = 64 NTM_BOLD = 32 NTM_ITALIC = 1 LF_FACESIZE = 32 LF_FULLFACESIZE = 64 OUT_DEFAULT_PRECIS = 0 OUT_STRING_PRECIS = 1 OUT_CHARACTER_PRECIS = 2 OUT_STROKE_PRECIS = 3 OUT_TT_PRECIS = 4 OUT_DEVICE_PRECIS = 5 OUT_RASTER_PRECIS = 6 OUT_TT_ONLY_PRECIS = 7 OUT_OUTLINE_PRECIS = 8 CLIP_DEFAULT_PRECIS = 0 CLIP_CHARACTER_PRECIS = 1 CLIP_STROKE_PRECIS = 2 CLIP_MASK = 15 CLIP_LH_ANGLES = (1<<4) CLIP_TT_ALWAYS = (2<<4) CLIP_EMBEDDED = (8<<4) DEFAULT_QUALITY = 0 DRAFT_QUALITY = 1 PROOF_QUALITY = 2 NONANTIALIASED_QUALITY = 3 ANTIALIASED_QUALITY = 4 CLEARTYPE_QUALITY = 5 CLEARTYPE_NATURAL_QUALITY = 6 DEFAULT_PITCH = 0 FIXED_PITCH = 1 VARIABLE_PITCH = 2 ANSI_CHARSET = 0 DEFAULT_CHARSET = 1 SYMBOL_CHARSET = 2 SHIFTJIS_CHARSET = 128 HANGEUL_CHARSET = 129 CHINESEBIG5_CHARSET = 136 OEM_CHARSET = 255 JOHAB_CHARSET = 130 HEBREW_CHARSET = 177 ARABIC_CHARSET = 178 GREEK_CHARSET = 161 TURKISH_CHARSET = 162 VIETNAMESE_CHARSET = 163 THAI_CHARSET = 222 EASTEUROPE_CHARSET = 238 RUSSIAN_CHARSET = 204 MAC_CHARSET = 77 BALTIC_CHARSET = 186 FF_DONTCARE = (0<<4) FF_ROMAN = (1<<4) FF_SWISS = (2<<4) FF_MODERN = (3<<4) FF_SCRIPT = (4<<4) FF_DECORATIVE = (5<<4) FW_DONTCARE = 0 FW_THIN = 100 FW_EXTRALIGHT = 200 FW_LIGHT = 300 FW_NORMAL = 400 FW_MEDIUM = 500 FW_SEMIBOLD = 600 FW_BOLD = 700 FW_EXTRABOLD = 800 FW_HEAVY = 900 FW_ULTRALIGHT = FW_EXTRALIGHT FW_REGULAR = FW_NORMAL FW_DEMIBOLD = FW_SEMIBOLD FW_ULTRABOLD = FW_EXTRABOLD FW_BLACK = FW_HEAVY # Generated by h2py from \msvcnt\include\wingdi.h # hacked and split manually by mhammond. BS_SOLID = 0 BS_NULL = 1 BS_HOLLOW = BS_NULL BS_HATCHED = 2 BS_PATTERN = 3 BS_INDEXED = 4 BS_DIBPATTERN = 5 BS_DIBPATTERNPT = 6 BS_PATTERN8X8 = 7 BS_DIBPATTERN8X8 = 8 HS_HORIZONTAL = 0 HS_VERTICAL = 1 HS_FDIAGONAL = 2 HS_BDIAGONAL = 3 HS_CROSS = 4 HS_DIAGCROSS = 5 HS_FDIAGONAL1 = 6 HS_BDIAGONAL1 = 7 HS_SOLID = 8 HS_DENSE1 = 9 HS_DENSE2 = 10 HS_DENSE3 = 11 HS_DENSE4 = 12 HS_DENSE5 = 13 HS_DENSE6 = 14 HS_DENSE7 = 15 HS_DENSE8 = 16 HS_NOSHADE = 17 HS_HALFTONE = 18 HS_SOLIDCLR = 19 HS_DITHEREDCLR = 20 HS_SOLIDTEXTCLR = 21 HS_DITHEREDTEXTCLR = 22 HS_SOLIDBKCLR = 23 HS_DITHEREDBKCLR = 24 HS_API_MAX = 25 PS_SOLID = 0 PS_DASH = 1 PS_DOT = 2 PS_DASHDOT = 3 PS_DASHDOTDOT = 4 PS_NULL = 5 PS_INSIDEFRAME = 6 PS_USERSTYLE = 7 PS_ALTERNATE = 8 PS_STYLE_MASK = 15 PS_ENDCAP_ROUND = 0 PS_ENDCAP_SQUARE = 256 PS_ENDCAP_FLAT = 512 PS_ENDCAP_MASK = 3840 PS_JOIN_ROUND = 0 PS_JOIN_BEVEL = 4096 PS_JOIN_MITER = 8192 PS_JOIN_MASK = 61440 PS_COSMETIC = 0 PS_GEOMETRIC = 65536 PS_TYPE_MASK = 983040 AD_COUNTERCLOCKWISE = 1 AD_CLOCKWISE = 2 DRIVERVERSION = 0 TECHNOLOGY = 2 HORZSIZE = 4 VERTSIZE = 6 HORZRES = 8 VERTRES = 10 BITSPIXEL = 12 PLANES = 14 NUMBRUSHES = 16 NUMPENS = 18 NUMMARKERS = 20 NUMFONTS = 22 NUMCOLORS = 24 PDEVICESIZE = 26 CURVECAPS = 28 LINECAPS = 30 POLYGONALCAPS = 32 TEXTCAPS = 34 CLIPCAPS = 36 RASTERCAPS = 38 ASPECTX = 40 ASPECTY = 42 ASPECTXY = 44 LOGPIXELSX = 88 LOGPIXELSY = 90 SIZEPALETTE = 104 NUMRESERVED = 106 COLORRES = 108 PHYSICALWIDTH = 110 PHYSICALHEIGHT = 111 PHYSICALOFFSETX = 112 PHYSICALOFFSETY = 113 SCALINGFACTORX = 114 SCALINGFACTORY = 115 VREFRESH = 116 DESKTOPVERTRES = 117 DESKTOPHORZRES = 118 BLTALIGNMENT = 119 SHADEBLENDCAPS = 120 COLORMGMTCAPS = 121 DT_PLOTTER = 0 DT_RASDISPLAY = 1 DT_RASPRINTER = 2 DT_RASCAMERA = 3 DT_CHARSTREAM = 4 DT_METAFILE = 5 DT_DISPFILE = 6 CC_NONE = 0 CC_CIRCLES = 1 CC_PIE = 2 CC_CHORD = 4 CC_ELLIPSES = 8 CC_WIDE = 16 CC_STYLED = 32 CC_WIDESTYLED = 64 CC_INTERIORS = 128 CC_ROUNDRECT = 256 LC_NONE = 0 LC_POLYLINE = 2 LC_MARKER = 4 LC_POLYMARKER = 8 LC_WIDE = 16 LC_STYLED = 32 LC_WIDESTYLED = 64 LC_INTERIORS = 128 PC_NONE = 0 PC_POLYGON = 1 PC_RECTANGLE = 2 PC_WINDPOLYGON = 4 PC_TRAPEZOID = 4 PC_SCANLINE = 8 PC_WIDE = 16 PC_STYLED = 32 PC_WIDESTYLED = 64 PC_INTERIORS = 128 CP_NONE = 0 CP_RECTANGLE = 1 CP_REGION = 2 TC_OP_CHARACTER = 1 TC_OP_STROKE = 2 TC_CP_STROKE = 4 TC_CR_90 = 8 TC_CR_ANY = 16 TC_SF_X_YINDEP = 32 TC_SA_DOUBLE = 64 TC_SA_INTEGER = 128 TC_SA_CONTIN = 256 TC_EA_DOUBLE = 512 TC_IA_ABLE = 1024 TC_UA_ABLE = 2048 TC_SO_ABLE = 4096 TC_RA_ABLE = 8192 TC_VA_ABLE = 16384 TC_RESERVED = 32768 TC_SCROLLBLT = 65536 RC_BITBLT = 1 RC_BANDING = 2 RC_SCALING = 4 RC_BITMAP64 = 8 RC_GDI20_OUTPUT = 16 RC_GDI20_STATE = 32 RC_SAVEBITMAP = 64 RC_DI_BITMAP = 128 RC_PALETTE = 256 RC_DIBTODEV = 512 RC_BIGFONT = 1024 RC_STRETCHBLT = 2048 RC_FLOODFILL = 4096 RC_STRETCHDIB = 8192 RC_OP_DX_OUTPUT = 16384 RC_DEVBITS = 32768 DIB_RGB_COLORS = 0 DIB_PAL_COLORS = 1 DIB_PAL_INDICES = 2 DIB_PAL_PHYSINDICES = 2 DIB_PAL_LOGINDICES = 4 SYSPAL_ERROR = 0 SYSPAL_STATIC = 1 SYSPAL_NOSTATIC = 2 CBM_CREATEDIB = 2 CBM_INIT = 4 FLOODFILLBORDER = 0 FLOODFILLSURFACE = 1 CCHDEVICENAME = 32 CCHFORMNAME = 32 # Generated by h2py from \msvcnt\include\wingdi.h # hacked and split manually by mhammond. # DEVMODE.dmFields DM_SPECVERSION = 800 DM_ORIENTATION = 1 DM_PAPERSIZE = 2 DM_PAPERLENGTH = 4 DM_PAPERWIDTH = 8 DM_SCALE = 16 DM_POSITION = 32 DM_NUP = 64 DM_DISPLAYORIENTATION = 128 DM_COPIES = 256 DM_DEFAULTSOURCE = 512 DM_PRINTQUALITY = 1024 DM_COLOR = 2048 DM_DUPLEX = 4096 DM_YRESOLUTION = 8192 DM_TTOPTION = 16384 DM_COLLATE = 32768 DM_FORMNAME = 65536 DM_LOGPIXELS = 131072 DM_BITSPERPEL = 262144 DM_PELSWIDTH = 524288 DM_PELSHEIGHT = 1048576 DM_DISPLAYFLAGS = 2097152 DM_DISPLAYFREQUENCY = 4194304 DM_ICMMETHOD = 8388608 DM_ICMINTENT = 16777216 DM_MEDIATYPE = 33554432 DM_DITHERTYPE = 67108864 DM_PANNINGWIDTH = 134217728 DM_PANNINGHEIGHT = 268435456 DM_DISPLAYFIXEDOUTPUT = 536870912 # DEVMODE.dmOrientation DMORIENT_PORTRAIT = 1 DMORIENT_LANDSCAPE = 2 # DEVMODE.dmDisplayOrientation DMDO_DEFAULT = 0 DMDO_90 = 1 DMDO_180 = 2 DMDO_270 = 3 # DEVMODE.dmDisplayFixedOutput DMDFO_DEFAULT = 0 DMDFO_STRETCH = 1 DMDFO_CENTER = 2 # DEVMODE.dmPaperSize DMPAPER_LETTER = 1 DMPAPER_LETTERSMALL = 2 DMPAPER_TABLOID = 3 DMPAPER_LEDGER = 4 DMPAPER_LEGAL = 5 DMPAPER_STATEMENT = 6 DMPAPER_EXECUTIVE = 7 DMPAPER_A3 = 8 DMPAPER_A4 = 9 DMPAPER_A4SMALL = 10 DMPAPER_A5 = 11 DMPAPER_B4 = 12 DMPAPER_B5 = 13 DMPAPER_FOLIO = 14 DMPAPER_QUARTO = 15 DMPAPER_10X14 = 16 DMPAPER_11X17 = 17 DMPAPER_NOTE = 18 DMPAPER_ENV_9 = 19 DMPAPER_ENV_10 = 20 DMPAPER_ENV_11 = 21 DMPAPER_ENV_12 = 22 DMPAPER_ENV_14 = 23 DMPAPER_CSHEET = 24 DMPAPER_DSHEET = 25 DMPAPER_ESHEET = 26 DMPAPER_ENV_DL = 27 DMPAPER_ENV_C5 = 28 DMPAPER_ENV_C3 = 29 DMPAPER_ENV_C4 = 30 DMPAPER_ENV_C6 = 31 DMPAPER_ENV_C65 = 32 DMPAPER_ENV_B4 = 33 DMPAPER_ENV_B5 = 34 DMPAPER_ENV_B6 = 35 DMPAPER_ENV_ITALY = 36 DMPAPER_ENV_MONARCH = 37 DMPAPER_ENV_PERSONAL = 38 DMPAPER_FANFOLD_US = 39 DMPAPER_FANFOLD_STD_GERMAN = 40 DMPAPER_FANFOLD_LGL_GERMAN = 41 DMPAPER_ISO_B4 = 42 DMPAPER_JAPANESE_POSTCARD = 43 DMPAPER_9X11 = 44 DMPAPER_10X11 = 45 DMPAPER_15X11 = 46 DMPAPER_ENV_INVITE = 47 DMPAPER_RESERVED_48 = 48 DMPAPER_RESERVED_49 = 49 DMPAPER_LETTER_EXTRA = 50 DMPAPER_LEGAL_EXTRA = 51 DMPAPER_TABLOID_EXTRA = 52 DMPAPER_A4_EXTRA = 53 DMPAPER_LETTER_TRANSVERSE = 54 DMPAPER_A4_TRANSVERSE = 55 DMPAPER_LETTER_EXTRA_TRANSVERSE = 56 DMPAPER_A_PLUS = 57 DMPAPER_B_PLUS = 58 DMPAPER_LETTER_PLUS = 59 DMPAPER_A4_PLUS = 60 DMPAPER_A5_TRANSVERSE = 61 DMPAPER_B5_TRANSVERSE = 62 DMPAPER_A3_EXTRA = 63 DMPAPER_A5_EXTRA = 64 DMPAPER_B5_EXTRA = 65 DMPAPER_A2 = 66 DMPAPER_A3_TRANSVERSE = 67 DMPAPER_A3_EXTRA_TRANSVERSE = 68 DMPAPER_DBL_JAPANESE_POSTCARD = 69 DMPAPER_A6 = 70 DMPAPER_JENV_KAKU2 = 71 DMPAPER_JENV_KAKU3 = 72 DMPAPER_JENV_CHOU3 = 73 DMPAPER_JENV_CHOU4 = 74 DMPAPER_LETTER_ROTATED = 75 DMPAPER_A3_ROTATED = 76 DMPAPER_A4_ROTATED = 77 DMPAPER_A5_ROTATED = 78 DMPAPER_B4_JIS_ROTATED = 79 DMPAPER_B5_JIS_ROTATED = 80 DMPAPER_JAPANESE_POSTCARD_ROTATED = 81 DMPAPER_DBL_JAPANESE_POSTCARD_ROTATED = 82 DMPAPER_A6_ROTATED = 83 DMPAPER_JENV_KAKU2_ROTATED = 84 DMPAPER_JENV_KAKU3_ROTATED = 85 DMPAPER_JENV_CHOU3_ROTATED = 86 DMPAPER_JENV_CHOU4_ROTATED = 87 DMPAPER_B6_JIS = 88 DMPAPER_B6_JIS_ROTATED = 89 DMPAPER_12X11 = 90 DMPAPER_JENV_YOU4 = 91 DMPAPER_JENV_YOU4_ROTATED = 92 DMPAPER_P16K = 93 DMPAPER_P32K = 94 DMPAPER_P32KBIG = 95 DMPAPER_PENV_1 = 96 DMPAPER_PENV_2 = 97 DMPAPER_PENV_3 = 98 DMPAPER_PENV_4 = 99 DMPAPER_PENV_5 = 100 DMPAPER_PENV_6 = 101 DMPAPER_PENV_7 = 102 DMPAPER_PENV_8 = 103 DMPAPER_PENV_9 = 104 DMPAPER_PENV_10 = 105 DMPAPER_P16K_ROTATED = 106 DMPAPER_P32K_ROTATED = 107 DMPAPER_P32KBIG_ROTATED = 108 DMPAPER_PENV_1_ROTATED = 109 DMPAPER_PENV_2_ROTATED = 110 DMPAPER_PENV_3_ROTATED = 111 DMPAPER_PENV_4_ROTATED = 112 DMPAPER_PENV_5_ROTATED = 113 DMPAPER_PENV_6_ROTATED = 114 DMPAPER_PENV_7_ROTATED = 115 DMPAPER_PENV_8_ROTATED = 116 DMPAPER_PENV_9_ROTATED = 117 DMPAPER_PENV_10_ROTATED = 118 DMPAPER_LAST = DMPAPER_PENV_10_ROTATED DMPAPER_USER = 256 # DEVMODE.dmDefaultSource DMBIN_UPPER = 1 DMBIN_ONLYONE = 1 DMBIN_LOWER = 2 DMBIN_MIDDLE = 3 DMBIN_MANUAL = 4 DMBIN_ENVELOPE = 5 DMBIN_ENVMANUAL = 6 DMBIN_AUTO = 7 DMBIN_TRACTOR = 8 DMBIN_SMALLFMT = 9 DMBIN_LARGEFMT = 10 DMBIN_LARGECAPACITY = 11 DMBIN_CASSETTE = 14 DMBIN_FORMSOURCE = 15 DMBIN_LAST = DMBIN_FORMSOURCE DMBIN_USER = 256 # DEVMODE.dmPrintQuality DMRES_DRAFT = (-1) DMRES_LOW = (-2) DMRES_MEDIUM = (-3) DMRES_HIGH = (-4) # DEVMODE.dmColor DMCOLOR_MONOCHROME = 1 DMCOLOR_COLOR = 2 # DEVMODE.dmDuplex DMDUP_SIMPLEX = 1 DMDUP_VERTICAL = 2 DMDUP_HORIZONTAL = 3 # DEVMODE.dmTTOption DMTT_BITMAP = 1 DMTT_DOWNLOAD = 2 DMTT_SUBDEV = 3 DMTT_DOWNLOAD_OUTLINE = 4 # DEVMODE.dmCollate DMCOLLATE_FALSE = 0 DMCOLLATE_TRUE = 1 # DEVMODE.dmDisplayFlags DM_GRAYSCALE = 1 DM_INTERLACED = 2 # DEVMODE.dmICMMethod DMICMMETHOD_NONE = 1 DMICMMETHOD_SYSTEM = 2 DMICMMETHOD_DRIVER = 3 DMICMMETHOD_DEVICE = 4 DMICMMETHOD_USER = 256 # DEVMODE.dmICMIntent DMICM_SATURATE = 1 DMICM_CONTRAST = 2 DMICM_COLORIMETRIC = 3 DMICM_ABS_COLORIMETRIC = 4 DMICM_USER = 256 # DEVMODE.dmMediaType DMMEDIA_STANDARD = 1 DMMEDIA_TRANSPARENCY = 2 DMMEDIA_GLOSSY = 3 DMMEDIA_USER = 256 # DEVMODE.dmDitherType DMDITHER_NONE = 1 DMDITHER_COARSE = 2 DMDITHER_FINE = 3 DMDITHER_LINEART = 4 DMDITHER_ERRORDIFFUSION = 5 DMDITHER_RESERVED6 = 6 DMDITHER_RESERVED7 = 7 DMDITHER_RESERVED8 = 8 DMDITHER_RESERVED9 = 9 DMDITHER_GRAYSCALE = 10 DMDITHER_USER = 256 # DEVMODE.dmNup DMNUP_SYSTEM = 1 DMNUP_ONEUP = 2 # used with ExtEscape FEATURESETTING_NUP = 0 FEATURESETTING_OUTPUT = 1 FEATURESETTING_PSLEVEL = 2 FEATURESETTING_CUSTPAPER = 3 FEATURESETTING_MIRROR = 4 FEATURESETTING_NEGATIVE = 5 FEATURESETTING_PROTOCOL = 6 FEATURESETTING_PRIVATE_BEGIN = 0x1000 FEATURESETTING_PRIVATE_END = 0x1FFF RDH_RECTANGLES = 1 GGO_METRICS = 0 GGO_BITMAP = 1 GGO_NATIVE = 2 TT_POLYGON_TYPE = 24 TT_PRIM_LINE = 1 TT_PRIM_QSPLINE = 2 TT_AVAILABLE = 1 TT_ENABLED = 2 DM_UPDATE = 1 DM_COPY = 2 DM_PROMPT = 4 DM_MODIFY = 8 DM_IN_BUFFER = DM_MODIFY DM_IN_PROMPT = DM_PROMPT DM_OUT_BUFFER = DM_COPY DM_OUT_DEFAULT = DM_UPDATE # DISPLAY_DEVICE.StateFlags DISPLAY_DEVICE_ATTACHED_TO_DESKTOP = 1 DISPLAY_DEVICE_MULTI_DRIVER = 2 DISPLAY_DEVICE_PRIMARY_DEVICE = 4 DISPLAY_DEVICE_MIRRORING_DRIVER = 8 DISPLAY_DEVICE_VGA_COMPATIBLE = 16 DISPLAY_DEVICE_REMOVABLE = 32 DISPLAY_DEVICE_MODESPRUNED = 134217728 DISPLAY_DEVICE_REMOTE = 67108864 DISPLAY_DEVICE_DISCONNECT = 33554432 # DeviceCapabilities types DC_FIELDS = 1 DC_PAPERS = 2 DC_PAPERSIZE = 3 DC_MINEXTENT = 4 DC_MAXEXTENT = 5 DC_BINS = 6 DC_DUPLEX = 7 DC_SIZE = 8 DC_EXTRA = 9 DC_VERSION = 10 DC_DRIVER = 11 DC_BINNAMES = 12 DC_ENUMRESOLUTIONS = 13 DC_FILEDEPENDENCIES = 14 DC_TRUETYPE = 15 DC_PAPERNAMES = 16 DC_ORIENTATION = 17 DC_COPIES = 18 DC_BINADJUST = 19 DC_EMF_COMPLIANT = 20 DC_DATATYPE_PRODUCED = 21 DC_COLLATE = 22 DC_MANUFACTURER = 23 DC_MODEL = 24 DC_PERSONALITY = 25 DC_PRINTRATE = 26 DC_PRINTRATEUNIT = 27 DC_PRINTERMEM = 28 DC_MEDIAREADY = 29 DC_STAPLE = 30 DC_PRINTRATEPPM = 31 DC_COLORDEVICE = 32 DC_NUP = 33 DC_MEDIATYPENAMES = 34 DC_MEDIATYPES = 35 PRINTRATEUNIT_PPM = 1 PRINTRATEUNIT_CPS = 2 PRINTRATEUNIT_LPM = 3 PRINTRATEUNIT_IPM = 4 # TrueType constants DCTT_BITMAP = 1 DCTT_DOWNLOAD = 2 DCTT_SUBDEV = 4 DCTT_DOWNLOAD_OUTLINE = 8 DCBA_FACEUPNONE = 0 DCBA_FACEUPCENTER = 1 DCBA_FACEUPLEFT = 2 DCBA_FACEUPRIGHT = 3 DCBA_FACEDOWNNONE = 256 DCBA_FACEDOWNCENTER = 257 DCBA_FACEDOWNLEFT = 258 DCBA_FACEDOWNRIGHT = 259 CA_NEGATIVE = 1 CA_LOG_FILTER = 2 ILLUMINANT_DEVICE_DEFAULT = 0 ILLUMINANT_A = 1 ILLUMINANT_B = 2 ILLUMINANT_C = 3 ILLUMINANT_D50 = 4 ILLUMINANT_D55 = 5 ILLUMINANT_D65 = 6 ILLUMINANT_D75 = 7 ILLUMINANT_F2 = 8 ILLUMINANT_MAX_INDEX = ILLUMINANT_F2 ILLUMINANT_TUNGSTEN = ILLUMINANT_A ILLUMINANT_DAYLIGHT = ILLUMINANT_C ILLUMINANT_FLUORESCENT = ILLUMINANT_F2 ILLUMINANT_NTSC = ILLUMINANT_C # Generated by h2py from \msvcnt\include\wingdi.h # hacked and split manually by mhammond. FONTMAPPER_MAX = 10 ENHMETA_SIGNATURE = 1179469088 ENHMETA_STOCK_OBJECT = -2147483648 EMR_HEADER = 1 EMR_POLYBEZIER = 2 EMR_POLYGON = 3 EMR_POLYLINE = 4 EMR_POLYBEZIERTO = 5 EMR_POLYLINETO = 6 EMR_POLYPOLYLINE = 7 EMR_POLYPOLYGON = 8 EMR_SETWINDOWEXTEX = 9 EMR_SETWINDOWORGEX = 10 EMR_SETVIEWPORTEXTEX = 11 EMR_SETVIEWPORTORGEX = 12 EMR_SETBRUSHORGEX = 13 EMR_EOF = 14 EMR_SETPIXELV = 15 EMR_SETMAPPERFLAGS = 16 EMR_SETMAPMODE = 17 EMR_SETBKMODE = 18 EMR_SETPOLYFILLMODE = 19 EMR_SETROP2 = 20 EMR_SETSTRETCHBLTMODE = 21 EMR_SETTEXTALIGN = 22 EMR_SETCOLORADJUSTMENT = 23 EMR_SETTEXTCOLOR = 24 EMR_SETBKCOLOR = 25 EMR_OFFSETCLIPRGN = 26 EMR_MOVETOEX = 27 EMR_SETMETARGN = 28 EMR_EXCLUDECLIPRECT = 29 EMR_INTERSECTCLIPRECT = 30 EMR_SCALEVIEWPORTEXTEX = 31 EMR_SCALEWINDOWEXTEX = 32 EMR_SAVEDC = 33 EMR_RESTOREDC = 34 EMR_SETWORLDTRANSFORM = 35 EMR_MODIFYWORLDTRANSFORM = 36 EMR_SELECTOBJECT = 37 EMR_CREATEPEN = 38 EMR_CREATEBRUSHINDIRECT = 39 EMR_DELETEOBJECT = 40 EMR_ANGLEARC = 41 EMR_ELLIPSE = 42 EMR_RECTANGLE = 43 EMR_ROUNDRECT = 44 EMR_ARC = 45 EMR_CHORD = 46 EMR_PIE = 47 EMR_SELECTPALETTE = 48 EMR_CREATEPALETTE = 49 EMR_SETPALETTEENTRIES = 50 EMR_RESIZEPALETTE = 51 EMR_REALIZEPALETTE = 52 EMR_EXTFLOODFILL = 53 EMR_LINETO = 54 EMR_ARCTO = 55 EMR_POLYDRAW = 56 EMR_SETARCDIRECTION = 57 EMR_SETMITERLIMIT = 58 EMR_BEGINPATH = 59 EMR_ENDPATH = 60 EMR_CLOSEFIGURE = 61 EMR_FILLPATH = 62 EMR_STROKEANDFILLPATH = 63 EMR_STROKEPATH = 64 EMR_FLATTENPATH = 65 EMR_WIDENPATH = 66 EMR_SELECTCLIPPATH = 67 EMR_ABORTPATH = 68 EMR_GDICOMMENT = 70 EMR_FILLRGN = 71 EMR_FRAMERGN = 72 EMR_INVERTRGN = 73 EMR_PAINTRGN = 74 EMR_EXTSELECTCLIPRGN = 75 EMR_BITBLT = 76 EMR_STRETCHBLT = 77 EMR_MASKBLT = 78 EMR_PLGBLT = 79 EMR_SETDIBITSTODEVICE = 80 EMR_STRETCHDIBITS = 81 EMR_EXTCREATEFONTINDIRECTW = 82 EMR_EXTTEXTOUTA = 83 EMR_EXTTEXTOUTW = 84 EMR_POLYBEZIER16 = 85 EMR_POLYGON16 = 86 EMR_POLYLINE16 = 87 EMR_POLYBEZIERTO16 = 88 EMR_POLYLINETO16 = 89 EMR_POLYPOLYLINE16 = 90 EMR_POLYPOLYGON16 = 91 EMR_POLYDRAW16 = 92 EMR_CREATEMONOBRUSH = 93 EMR_CREATEDIBPATTERNBRUSHPT = 94 EMR_EXTCREATEPEN = 95 EMR_POLYTEXTOUTA = 96 EMR_POLYTEXTOUTW = 97 EMR_MIN = 1 EMR_MAX = 97 # Generated by h2py from \msvcnt\include\wingdi.h # hacked and split manually by mhammond. PANOSE_COUNT = 10 PAN_FAMILYTYPE_INDEX = 0 PAN_SERIFSTYLE_INDEX = 1 PAN_WEIGHT_INDEX = 2 PAN_PROPORTION_INDEX = 3 PAN_CONTRAST_INDEX = 4 PAN_STROKEVARIATION_INDEX = 5 PAN_ARMSTYLE_INDEX = 6 PAN_LETTERFORM_INDEX = 7 PAN_MIDLINE_INDEX = 8 PAN_XHEIGHT_INDEX = 9 PAN_CULTURE_LATIN = 0 PAN_ANY = 0 PAN_NO_FIT = 1 PAN_FAMILY_TEXT_DISPLAY = 2 PAN_FAMILY_SCRIPT = 3 PAN_FAMILY_DECORATIVE = 4 PAN_FAMILY_PICTORIAL = 5 PAN_SERIF_COVE = 2 PAN_SERIF_OBTUSE_COVE = 3 PAN_SERIF_SQUARE_COVE = 4 PAN_SERIF_OBTUSE_SQUARE_COVE = 5 PAN_SERIF_SQUARE = 6 PAN_SERIF_THIN = 7 PAN_SERIF_BONE = 8 PAN_SERIF_EXAGGERATED = 9 PAN_SERIF_TRIANGLE = 10 PAN_SERIF_NORMAL_SANS = 11 PAN_SERIF_OBTUSE_SANS = 12 PAN_SERIF_PERP_SANS = 13 PAN_SERIF_FLARED = 14 PAN_SERIF_ROUNDED = 15 PAN_WEIGHT_VERY_LIGHT = 2 PAN_WEIGHT_LIGHT = 3 PAN_WEIGHT_THIN = 4 PAN_WEIGHT_BOOK = 5 PAN_WEIGHT_MEDIUM = 6 PAN_WEIGHT_DEMI = 7 PAN_WEIGHT_BOLD = 8 PAN_WEIGHT_HEAVY = 9 PAN_WEIGHT_BLACK = 10 PAN_WEIGHT_NORD = 11 PAN_PROP_OLD_STYLE = 2 PAN_PROP_MODERN = 3 PAN_PROP_EVEN_WIDTH = 4 PAN_PROP_EXPANDED = 5 PAN_PROP_CONDENSED = 6 PAN_PROP_VERY_EXPANDED = 7 PAN_PROP_VERY_CONDENSED = 8 PAN_PROP_MONOSPACED = 9 PAN_CONTRAST_NONE = 2 PAN_CONTRAST_VERY_LOW = 3 PAN_CONTRAST_LOW = 4 PAN_CONTRAST_MEDIUM_LOW = 5 PAN_CONTRAST_MEDIUM = 6 PAN_CONTRAST_MEDIUM_HIGH = 7 PAN_CONTRAST_HIGH = 8 PAN_CONTRAST_VERY_HIGH = 9 PAN_STROKE_GRADUAL_DIAG = 2 PAN_STROKE_GRADUAL_TRAN = 3 PAN_STROKE_GRADUAL_VERT = 4 PAN_STROKE_GRADUAL_HORZ = 5 PAN_STROKE_RAPID_VERT = 6 PAN_STROKE_RAPID_HORZ = 7 PAN_STROKE_INSTANT_VERT = 8 PAN_STRAIGHT_ARMS_HORZ = 2 PAN_STRAIGHT_ARMS_WEDGE = 3 PAN_STRAIGHT_ARMS_VERT = 4 PAN_STRAIGHT_ARMS_SINGLE_SERIF = 5 PAN_STRAIGHT_ARMS_DOUBLE_SERIF = 6 PAN_BENT_ARMS_HORZ = 7 PAN_BENT_ARMS_WEDGE = 8 PAN_BENT_ARMS_VERT = 9 PAN_BENT_ARMS_SINGLE_SERIF = 10 PAN_BENT_ARMS_DOUBLE_SERIF = 11 PAN_LETT_NORMAL_CONTACT = 2 PAN_LETT_NORMAL_WEIGHTED = 3 PAN_LETT_NORMAL_BOXED = 4 PAN_LETT_NORMAL_FLATTENED = 5 PAN_LETT_NORMAL_ROUNDED = 6 PAN_LETT_NORMAL_OFF_CENTER = 7 PAN_LETT_NORMAL_SQUARE = 8 PAN_LETT_OBLIQUE_CONTACT = 9 PAN_LETT_OBLIQUE_WEIGHTED = 10 PAN_LETT_OBLIQUE_BOXED = 11 PAN_LETT_OBLIQUE_FLATTENED = 12 PAN_LETT_OBLIQUE_ROUNDED = 13 PAN_LETT_OBLIQUE_OFF_CENTER = 14 PAN_LETT_OBLIQUE_SQUARE = 15 PAN_MIDLINE_STANDARD_TRIMMED = 2 PAN_MIDLINE_STANDARD_POINTED = 3 PAN_MIDLINE_STANDARD_SERIFED = 4 PAN_MIDLINE_HIGH_TRIMMED = 5 PAN_MIDLINE_HIGH_POINTED = 6 PAN_MIDLINE_HIGH_SERIFED = 7 PAN_MIDLINE_CONSTANT_TRIMMED = 8 PAN_MIDLINE_CONSTANT_POINTED = 9 PAN_MIDLINE_CONSTANT_SERIFED = 10 PAN_MIDLINE_LOW_TRIMMED = 11 PAN_MIDLINE_LOW_POINTED = 12 PAN_MIDLINE_LOW_SERIFED = 13 PAN_XHEIGHT_CONSTANT_SMALL = 2 PAN_XHEIGHT_CONSTANT_STD = 3 PAN_XHEIGHT_CONSTANT_LARGE = 4 PAN_XHEIGHT_DUCKING_SMALL = 5 PAN_XHEIGHT_DUCKING_STD = 6 PAN_XHEIGHT_DUCKING_LARGE = 7 ELF_VENDOR_SIZE = 4 ELF_VERSION = 0 ELF_CULTURE_LATIN = 0 RASTER_FONTTYPE = 1 DEVICE_FONTTYPE = 2 TRUETYPE_FONTTYPE = 4 def PALETTEINDEX(i): return ((16777216 | (i))) PC_RESERVED = 1 PC_EXPLICIT = 2 PC_NOCOLLAPSE = 4 def GetRValue(rgb): return rgb & 0xff def GetGValue(rgb): return (rgb >> 8) & 0xff def GetBValue(rgb): return (rgb >> 16) & 0xff TRANSPARENT = 1 OPAQUE = 2 BKMODE_LAST = 2 GM_COMPATIBLE = 1 GM_ADVANCED = 2 GM_LAST = 2 PT_CLOSEFIGURE = 1 PT_LINETO = 2 PT_BEZIERTO = 4 PT_MOVETO = 6 MM_TEXT = 1 MM_LOMETRIC = 2 MM_HIMETRIC = 3 MM_LOENGLISH = 4 MM_HIENGLISH = 5 MM_TWIPS = 6 MM_ISOTROPIC = 7 MM_ANISOTROPIC = 8 MM_MIN = MM_TEXT MM_MAX = MM_ANISOTROPIC MM_MAX_FIXEDSCALE = MM_TWIPS ABSOLUTE = 1 RELATIVE = 2 WHITE_BRUSH = 0 LTGRAY_BRUSH = 1 GRAY_BRUSH = 2 DKGRAY_BRUSH = 3 BLACK_BRUSH = 4 NULL_BRUSH = 5 HOLLOW_BRUSH = NULL_BRUSH WHITE_PEN = 6 BLACK_PEN = 7 NULL_PEN = 8 OEM_FIXED_FONT = 10 ANSI_FIXED_FONT = 11 ANSI_VAR_FONT = 12 SYSTEM_FONT = 13 DEVICE_DEFAULT_FONT = 14 DEFAULT_PALETTE = 15 SYSTEM_FIXED_FONT = 16 STOCK_LAST = 16 CLR_INVALID = -1 DC_BRUSH = 18 DC_PEN = 19 # Exception/Status codes from winuser.h and winnt.h STATUS_WAIT_0 = 0 STATUS_ABANDONED_WAIT_0 = 128 STATUS_USER_APC = 192 STATUS_TIMEOUT = 258 STATUS_PENDING = 259 STATUS_SEGMENT_NOTIFICATION = 1073741829 STATUS_GUARD_PAGE_VIOLATION = -2147483647 STATUS_DATATYPE_MISALIGNMENT = -2147483646 STATUS_BREAKPOINT = -2147483645 STATUS_SINGLE_STEP = -2147483644 STATUS_ACCESS_VIOLATION = -1073741819 STATUS_IN_PAGE_ERROR = -1073741818 STATUS_INVALID_HANDLE = -1073741816 STATUS_NO_MEMORY = -1073741801 STATUS_ILLEGAL_INSTRUCTION = -1073741795 STATUS_NONCONTINUABLE_EXCEPTION = -1073741787 STATUS_INVALID_DISPOSITION = -1073741786 STATUS_ARRAY_BOUNDS_EXCEEDED = -1073741684 STATUS_FLOAT_DENORMAL_OPERAND = -1073741683 STATUS_FLOAT_DIVIDE_BY_ZERO = -1073741682 STATUS_FLOAT_INEXACT_RESULT = -1073741681 STATUS_FLOAT_INVALID_OPERATION = -1073741680 STATUS_FLOAT_OVERFLOW = -1073741679 STATUS_FLOAT_STACK_CHECK = -1073741678 STATUS_FLOAT_UNDERFLOW = -1073741677 STATUS_INTEGER_DIVIDE_BY_ZERO = -1073741676 STATUS_INTEGER_OVERFLOW = -1073741675 STATUS_PRIVILEGED_INSTRUCTION = -1073741674 STATUS_STACK_OVERFLOW = -1073741571 STATUS_CONTROL_C_EXIT = -1073741510 WAIT_FAILED = -1 WAIT_OBJECT_0 = STATUS_WAIT_0 + 0 WAIT_ABANDONED = STATUS_ABANDONED_WAIT_0 + 0 WAIT_ABANDONED_0 = STATUS_ABANDONED_WAIT_0 + 0 WAIT_TIMEOUT = STATUS_TIMEOUT WAIT_IO_COMPLETION = STATUS_USER_APC STILL_ACTIVE = STATUS_PENDING EXCEPTION_ACCESS_VIOLATION = STATUS_ACCESS_VIOLATION EXCEPTION_DATATYPE_MISALIGNMENT = STATUS_DATATYPE_MISALIGNMENT EXCEPTION_BREAKPOINT = STATUS_BREAKPOINT EXCEPTION_SINGLE_STEP = STATUS_SINGLE_STEP EXCEPTION_ARRAY_BOUNDS_EXCEEDED = STATUS_ARRAY_BOUNDS_EXCEEDED EXCEPTION_FLT_DENORMAL_OPERAND = STATUS_FLOAT_DENORMAL_OPERAND EXCEPTION_FLT_DIVIDE_BY_ZERO = STATUS_FLOAT_DIVIDE_BY_ZERO EXCEPTION_FLT_INEXACT_RESULT = STATUS_FLOAT_INEXACT_RESULT EXCEPTION_FLT_INVALID_OPERATION = STATUS_FLOAT_INVALID_OPERATION EXCEPTION_FLT_OVERFLOW = STATUS_FLOAT_OVERFLOW EXCEPTION_FLT_STACK_CHECK = STATUS_FLOAT_STACK_CHECK EXCEPTION_FLT_UNDERFLOW = STATUS_FLOAT_UNDERFLOW EXCEPTION_INT_DIVIDE_BY_ZERO = STATUS_INTEGER_DIVIDE_BY_ZERO EXCEPTION_INT_OVERFLOW = STATUS_INTEGER_OVERFLOW EXCEPTION_PRIV_INSTRUCTION = STATUS_PRIVILEGED_INSTRUCTION EXCEPTION_IN_PAGE_ERROR = STATUS_IN_PAGE_ERROR EXCEPTION_ILLEGAL_INSTRUCTION = STATUS_ILLEGAL_INSTRUCTION EXCEPTION_NONCONTINUABLE_EXCEPTION = STATUS_NONCONTINUABLE_EXCEPTION EXCEPTION_STACK_OVERFLOW = STATUS_STACK_OVERFLOW EXCEPTION_INVALID_DISPOSITION = STATUS_INVALID_DISPOSITION EXCEPTION_GUARD_PAGE = STATUS_GUARD_PAGE_VIOLATION EXCEPTION_INVALID_HANDLE = STATUS_INVALID_HANDLE CONTROL_C_EXIT = STATUS_CONTROL_C_EXIT # winuser.h line 8594 # constants used with SystemParametersInfo SPI_GETBEEP = 1 SPI_SETBEEP = 2 SPI_GETMOUSE = 3 SPI_SETMOUSE = 4 SPI_GETBORDER = 5 SPI_SETBORDER = 6 SPI_GETKEYBOARDSPEED = 10 SPI_SETKEYBOARDSPEED = 11 SPI_LANGDRIVER = 12 SPI_ICONHORIZONTALSPACING = 13 SPI_GETSCREENSAVETIMEOUT = 14 SPI_SETSCREENSAVETIMEOUT = 15 SPI_GETSCREENSAVEACTIVE = 16 SPI_SETSCREENSAVEACTIVE = 17 SPI_GETGRIDGRANULARITY = 18 SPI_SETGRIDGRANULARITY = 19 SPI_SETDESKWALLPAPER = 20 SPI_SETDESKPATTERN = 21 SPI_GETKEYBOARDDELAY = 22 SPI_SETKEYBOARDDELAY = 23 SPI_ICONVERTICALSPACING = 24 SPI_GETICONTITLEWRAP = 25 SPI_SETICONTITLEWRAP = 26 SPI_GETMENUDROPALIGNMENT = 27 SPI_SETMENUDROPALIGNMENT = 28 SPI_SETDOUBLECLKWIDTH = 29 SPI_SETDOUBLECLKHEIGHT = 30 SPI_GETICONTITLELOGFONT = 31 SPI_SETDOUBLECLICKTIME = 32 SPI_SETMOUSEBUTTONSWAP = 33 SPI_SETICONTITLELOGFONT = 34 SPI_GETFASTTASKSWITCH = 35 SPI_SETFASTTASKSWITCH = 36 SPI_SETDRAGFULLWINDOWS = 37 SPI_GETDRAGFULLWINDOWS = 38 SPI_GETNONCLIENTMETRICS = 41 SPI_SETNONCLIENTMETRICS = 42 SPI_GETMINIMIZEDMETRICS = 43 SPI_SETMINIMIZEDMETRICS = 44 SPI_GETICONMETRICS = 45 SPI_SETICONMETRICS = 46 SPI_SETWORKAREA = 47 SPI_GETWORKAREA = 48 SPI_SETPENWINDOWS = 49 SPI_GETFILTERKEYS = 50 SPI_SETFILTERKEYS = 51 SPI_GETTOGGLEKEYS = 52 SPI_SETTOGGLEKEYS = 53 SPI_GETMOUSEKEYS = 54 SPI_SETMOUSEKEYS = 55 SPI_GETSHOWSOUNDS = 56 SPI_SETSHOWSOUNDS = 57 SPI_GETSTICKYKEYS = 58 SPI_SETSTICKYKEYS = 59 SPI_GETACCESSTIMEOUT = 60 SPI_SETACCESSTIMEOUT = 61 SPI_GETSERIALKEYS = 62 SPI_SETSERIALKEYS = 63 SPI_GETSOUNDSENTRY = 64 SPI_SETSOUNDSENTRY = 65 SPI_GETHIGHCONTRAST = 66 SPI_SETHIGHCONTRAST = 67 SPI_GETKEYBOARDPREF = 68 SPI_SETKEYBOARDPREF = 69 SPI_GETSCREENREADER = 70 SPI_SETSCREENREADER = 71 SPI_GETANIMATION = 72 SPI_SETANIMATION = 73 SPI_GETFONTSMOOTHING = 74 SPI_SETFONTSMOOTHING = 75 SPI_SETDRAGWIDTH = 76 SPI_SETDRAGHEIGHT = 77 SPI_SETHANDHELD = 78 SPI_GETLOWPOWERTIMEOUT = 79 SPI_GETPOWEROFFTIMEOUT = 80 SPI_SETLOWPOWERTIMEOUT = 81 SPI_SETPOWEROFFTIMEOUT = 82 SPI_GETLOWPOWERACTIVE = 83 SPI_GETPOWEROFFACTIVE = 84 SPI_SETLOWPOWERACTIVE = 85 SPI_SETPOWEROFFACTIVE = 86 SPI_SETCURSORS = 87 SPI_SETICONS = 88 SPI_GETDEFAULTINPUTLANG = 89 SPI_SETDEFAULTINPUTLANG = 90 SPI_SETLANGTOGGLE = 91 SPI_GETWINDOWSEXTENSION = 92 SPI_SETMOUSETRAILS = 93 SPI_GETMOUSETRAILS = 94 SPI_GETSNAPTODEFBUTTON = 95 SPI_SETSNAPTODEFBUTTON = 96 SPI_SETSCREENSAVERRUNNING = 97 SPI_SCREENSAVERRUNNING = SPI_SETSCREENSAVERRUNNING SPI_GETMOUSEHOVERWIDTH = 98 SPI_SETMOUSEHOVERWIDTH = 99 SPI_GETMOUSEHOVERHEIGHT = 100 SPI_SETMOUSEHOVERHEIGHT = 101 SPI_GETMOUSEHOVERTIME = 102 SPI_SETMOUSEHOVERTIME = 103 SPI_GETWHEELSCROLLLINES = 104 SPI_SETWHEELSCROLLLINES = 105 SPI_GETMENUSHOWDELAY = 106 SPI_SETMENUSHOWDELAY = 107 SPI_GETSHOWIMEUI = 110 SPI_SETSHOWIMEUI = 111 SPI_GETMOUSESPEED = 112 SPI_SETMOUSESPEED = 113 SPI_GETSCREENSAVERRUNNING = 114 SPI_GETDESKWALLPAPER = 115 SPI_GETACTIVEWINDOWTRACKING = 4096 SPI_SETACTIVEWINDOWTRACKING = 4097 SPI_GETMENUANIMATION = 4098 SPI_SETMENUANIMATION = 4099 SPI_GETCOMBOBOXANIMATION = 4100 SPI_SETCOMBOBOXANIMATION = 4101 SPI_GETLISTBOXSMOOTHSCROLLING = 4102 SPI_SETLISTBOXSMOOTHSCROLLING = 4103 SPI_GETGRADIENTCAPTIONS = 4104 SPI_SETGRADIENTCAPTIONS = 4105 SPI_GETKEYBOARDCUES = 4106 SPI_SETKEYBOARDCUES = 4107 SPI_GETMENUUNDERLINES = 4106 SPI_SETMENUUNDERLINES = 4107 SPI_GETACTIVEWNDTRKZORDER = 4108 SPI_SETACTIVEWNDTRKZORDER = 4109 SPI_GETHOTTRACKING = 4110 SPI_SETHOTTRACKING = 4111 SPI_GETMENUFADE = 4114 SPI_SETMENUFADE = 4115 SPI_GETSELECTIONFADE = 4116 SPI_SETSELECTIONFADE = 4117 SPI_GETTOOLTIPANIMATION = 4118 SPI_SETTOOLTIPANIMATION = 4119 SPI_GETTOOLTIPFADE = 4120 SPI_SETTOOLTIPFADE = 4121 SPI_GETCURSORSHADOW = 4122 SPI_SETCURSORSHADOW = 4123 SPI_GETMOUSESONAR = 4124 SPI_SETMOUSESONAR = 4125 SPI_GETMOUSECLICKLOCK = 4126 SPI_SETMOUSECLICKLOCK = 4127 SPI_GETMOUSEVANISH = 4128 SPI_SETMOUSEVANISH = 4129 SPI_GETFLATMENU = 4130 SPI_SETFLATMENU = 4131 SPI_GETDROPSHADOW = 4132 SPI_SETDROPSHADOW = 4133 SPI_GETBLOCKSENDINPUTRESETS = 4134 SPI_SETBLOCKSENDINPUTRESETS = 4135 SPI_GETUIEFFECTS = 4158 SPI_SETUIEFFECTS = 4159 SPI_GETFOREGROUNDLOCKTIMEOUT = 8192 SPI_SETFOREGROUNDLOCKTIMEOUT = 8193 SPI_GETACTIVEWNDTRKTIMEOUT = 8194 SPI_SETACTIVEWNDTRKTIMEOUT = 8195 SPI_GETFOREGROUNDFLASHCOUNT = 8196 SPI_SETFOREGROUNDFLASHCOUNT = 8197 SPI_GETCARETWIDTH = 8198 SPI_SETCARETWIDTH = 8199 SPI_GETMOUSECLICKLOCKTIME = 8200 SPI_SETMOUSECLICKLOCKTIME = 8201 SPI_GETFONTSMOOTHINGTYPE = 8202 SPI_SETFONTSMOOTHINGTYPE = 8203 SPI_GETFONTSMOOTHINGCONTRAST = 8204 SPI_SETFONTSMOOTHINGCONTRAST = 8205 SPI_GETFOCUSBORDERWIDTH = 8206 SPI_SETFOCUSBORDERWIDTH = 8207 SPI_GETFOCUSBORDERHEIGHT = 8208 SPI_SETFOCUSBORDERHEIGHT = 8209 SPI_GETFONTSMOOTHINGORIENTATION = 8210 SPI_SETFONTSMOOTHINGORIENTATION = 8211 # fWinIni flags for SystemParametersInfo SPIF_UPDATEINIFILE = 1 SPIF_SENDWININICHANGE = 2 SPIF_SENDCHANGE = SPIF_SENDWININICHANGE # used with SystemParametersInfo and SPI_GETFONTSMOOTHINGTYPE/SPI_SETFONTSMOOTHINGTYPE FE_FONTSMOOTHINGSTANDARD = 1 FE_FONTSMOOTHINGCLEARTYPE = 2 FE_FONTSMOOTHINGDOCKING = 32768 METRICS_USEDEFAULT = -1 ARW_BOTTOMLEFT = 0 ARW_BOTTOMRIGHT = 1 ARW_TOPLEFT = 2 ARW_TOPRIGHT = 3 ARW_STARTMASK = 3 ARW_STARTRIGHT = 1 ARW_STARTTOP = 2 ARW_LEFT = 0 ARW_RIGHT = 0 ARW_UP = 4 ARW_DOWN = 4 ARW_HIDE = 8 #ARW_VALID = 0x000F SERKF_SERIALKEYSON = 1 SERKF_AVAILABLE = 2 SERKF_INDICATOR = 4 HCF_HIGHCONTRASTON = 1 HCF_AVAILABLE = 2 HCF_HOTKEYACTIVE = 4 HCF_CONFIRMHOTKEY = 8 HCF_HOTKEYSOUND = 16 HCF_INDICATOR = 32 HCF_HOTKEYAVAILABLE = 64 CDS_UPDATEREGISTRY = 1 CDS_TEST = 2 CDS_FULLSCREEN = 4 CDS_GLOBAL = 8 CDS_SET_PRIMARY = 16 CDS_RESET = 1073741824 CDS_SETRECT = 536870912 CDS_NORESET = 268435456 # return values from ChangeDisplaySettings and ChangeDisplaySettingsEx DISP_CHANGE_SUCCESSFUL = 0 DISP_CHANGE_RESTART = 1 DISP_CHANGE_FAILED = -1 DISP_CHANGE_BADMODE = -2 DISP_CHANGE_NOTUPDATED = -3 DISP_CHANGE_BADFLAGS = -4 DISP_CHANGE_BADPARAM = -5 DISP_CHANGE_BADDUALVIEW = -6 ENUM_CURRENT_SETTINGS = -1 ENUM_REGISTRY_SETTINGS = -2 FKF_FILTERKEYSON = 1 FKF_AVAILABLE = 2 FKF_HOTKEYACTIVE = 4 FKF_CONFIRMHOTKEY = 8 FKF_HOTKEYSOUND = 16 FKF_INDICATOR = 32 FKF_CLICKON = 64 SKF_STICKYKEYSON = 1 SKF_AVAILABLE = 2 SKF_HOTKEYACTIVE = 4 SKF_CONFIRMHOTKEY = 8 SKF_HOTKEYSOUND = 16 SKF_INDICATOR = 32 SKF_AUDIBLEFEEDBACK = 64 SKF_TRISTATE = 128 SKF_TWOKEYSOFF = 256 SKF_LALTLATCHED = 268435456 SKF_LCTLLATCHED = 67108864 SKF_LSHIFTLATCHED = 16777216 SKF_RALTLATCHED = 536870912 SKF_RCTLLATCHED = 134217728 SKF_RSHIFTLATCHED = 33554432 SKF_LWINLATCHED = 1073741824 SKF_RWINLATCHED = -2147483648 SKF_LALTLOCKED = 1048576 SKF_LCTLLOCKED = 262144 SKF_LSHIFTLOCKED = 65536 SKF_RALTLOCKED = 2097152 SKF_RCTLLOCKED = 524288 SKF_RSHIFTLOCKED = 131072 SKF_LWINLOCKED = 4194304 SKF_RWINLOCKED = 8388608 MKF_MOUSEKEYSON = 1 MKF_AVAILABLE = 2 MKF_HOTKEYACTIVE = 4 MKF_CONFIRMHOTKEY = 8 MKF_HOTKEYSOUND = 16 MKF_INDICATOR = 32 MKF_MODIFIERS = 64 MKF_REPLACENUMBERS = 128 MKF_LEFTBUTTONSEL = 268435456 MKF_RIGHTBUTTONSEL = 536870912 MKF_LEFTBUTTONDOWN = 16777216 MKF_RIGHTBUTTONDOWN = 33554432 MKF_MOUSEMODE = -2147483648 ATF_TIMEOUTON = 1 ATF_ONOFFFEEDBACK = 2 SSGF_NONE = 0 SSGF_DISPLAY = 3 SSTF_NONE = 0 SSTF_CHARS = 1 SSTF_BORDER = 2 SSTF_DISPLAY = 3 SSWF_NONE = 0 SSWF_TITLE = 1 SSWF_WINDOW = 2 SSWF_DISPLAY = 3 SSWF_CUSTOM = 4 SSF_SOUNDSENTRYON = 1 SSF_AVAILABLE = 2 SSF_INDICATOR = 4 TKF_TOGGLEKEYSON = 1 TKF_AVAILABLE = 2 TKF_HOTKEYACTIVE = 4 TKF_CONFIRMHOTKEY = 8 TKF_HOTKEYSOUND = 16 TKF_INDICATOR = 32 SLE_ERROR = 1 SLE_MINORERROR = 2 SLE_WARNING = 3 MONITOR_DEFAULTTONULL = 0 MONITOR_DEFAULTTOPRIMARY = 1 MONITOR_DEFAULTTONEAREST = 2 MONITORINFOF_PRIMARY = 1 CCHDEVICENAME = 32 CHILDID_SELF = 0 INDEXID_OBJECT = 0 INDEXID_CONTAINER = 0 OBJID_WINDOW = 0 OBJID_SYSMENU = -1 OBJID_TITLEBAR = -2 OBJID_MENU = -3 OBJID_CLIENT = -4 OBJID_VSCROLL = -5 OBJID_HSCROLL = -6 OBJID_SIZEGRIP = -7 OBJID_CARET = -8 OBJID_CURSOR = -9 OBJID_ALERT = -10 OBJID_SOUND = -11 EVENT_MIN = 1 EVENT_MAX = 2147483647 EVENT_SYSTEM_SOUND = 1 EVENT_SYSTEM_ALERT = 2 EVENT_SYSTEM_FOREGROUND = 3 EVENT_SYSTEM_MENUSTART = 4 EVENT_SYSTEM_MENUEND = 5 EVENT_SYSTEM_MENUPOPUPSTART = 6 EVENT_SYSTEM_MENUPOPUPEND = 7 EVENT_SYSTEM_CAPTURESTART = 8 EVENT_SYSTEM_CAPTUREEND = 9 EVENT_SYSTEM_MOVESIZESTART = 10 EVENT_SYSTEM_MOVESIZEEND = 11 EVENT_SYSTEM_CONTEXTHELPSTART = 12 EVENT_SYSTEM_CONTEXTHELPEND = 13 EVENT_SYSTEM_DRAGDROPSTART = 14 EVENT_SYSTEM_DRAGDROPEND = 15 EVENT_SYSTEM_DIALOGSTART = 16 EVENT_SYSTEM_DIALOGEND = 17 EVENT_SYSTEM_SCROLLINGSTART = 18 EVENT_SYSTEM_SCROLLINGEND = 19 EVENT_SYSTEM_SWITCHSTART = 20 EVENT_SYSTEM_SWITCHEND = 21 EVENT_SYSTEM_MINIMIZESTART = 22 EVENT_SYSTEM_MINIMIZEEND = 23 EVENT_OBJECT_CREATE = 32768 EVENT_OBJECT_DESTROY = 32769 EVENT_OBJECT_SHOW = 32770 EVENT_OBJECT_HIDE = 32771 EVENT_OBJECT_REORDER = 32772 EVENT_OBJECT_FOCUS = 32773 EVENT_OBJECT_SELECTION = 32774 EVENT_OBJECT_SELECTIONADD = 32775 EVENT_OBJECT_SELECTIONREMOVE = 32776 EVENT_OBJECT_SELECTIONWITHIN = 32777 EVENT_OBJECT_STATECHANGE = 32778 EVENT_OBJECT_LOCATIONCHANGE = 32779 EVENT_OBJECT_NAMECHANGE = 32780 EVENT_OBJECT_DESCRIPTIONCHANGE = 32781 EVENT_OBJECT_VALUECHANGE = 32782 EVENT_OBJECT_PARENTCHANGE = 32783 EVENT_OBJECT_HELPCHANGE = 32784 EVENT_OBJECT_DEFACTIONCHANGE = 32785 EVENT_OBJECT_ACCELERATORCHANGE = 32786 SOUND_SYSTEM_STARTUP = 1 SOUND_SYSTEM_SHUTDOWN = 2 SOUND_SYSTEM_BEEP = 3 SOUND_SYSTEM_ERROR = 4 SOUND_SYSTEM_QUESTION = 5 SOUND_SYSTEM_WARNING = 6 SOUND_SYSTEM_INFORMATION = 7 SOUND_SYSTEM_MAXIMIZE = 8 SOUND_SYSTEM_MINIMIZE = 9 SOUND_SYSTEM_RESTOREUP = 10 SOUND_SYSTEM_RESTOREDOWN = 11 SOUND_SYSTEM_APPSTART = 12 SOUND_SYSTEM_FAULT = 13 SOUND_SYSTEM_APPEND = 14 SOUND_SYSTEM_MENUCOMMAND = 15 SOUND_SYSTEM_MENUPOPUP = 16 CSOUND_SYSTEM = 16 ALERT_SYSTEM_INFORMATIONAL = 1 ALERT_SYSTEM_WARNING = 2 ALERT_SYSTEM_ERROR = 3 ALERT_SYSTEM_QUERY = 4 ALERT_SYSTEM_CRITICAL = 5 CALERT_SYSTEM = 6 WINEVENT_OUTOFCONTEXT = 0 WINEVENT_SKIPOWNTHREAD = 1 WINEVENT_SKIPOWNPROCESS = 2 WINEVENT_INCONTEXT = 4 GUI_CARETBLINKING = 1 GUI_INMOVESIZE = 2 GUI_INMENUMODE = 4 GUI_SYSTEMMENUMODE = 8 GUI_POPUPMENUMODE = 16 STATE_SYSTEM_UNAVAILABLE = 1 STATE_SYSTEM_SELECTED = 2 STATE_SYSTEM_FOCUSED = 4 STATE_SYSTEM_PRESSED = 8 STATE_SYSTEM_CHECKED = 16 STATE_SYSTEM_MIXED = 32 STATE_SYSTEM_READONLY = 64 STATE_SYSTEM_HOTTRACKED = 128 STATE_SYSTEM_DEFAULT = 256 STATE_SYSTEM_EXPANDED = 512 STATE_SYSTEM_COLLAPSED = 1024 STATE_SYSTEM_BUSY = 2048 STATE_SYSTEM_FLOATING = 4096 STATE_SYSTEM_MARQUEED = 8192 STATE_SYSTEM_ANIMATED = 16384 STATE_SYSTEM_INVISIBLE = 32768 STATE_SYSTEM_OFFSCREEN = 65536 STATE_SYSTEM_SIZEABLE = 131072 STATE_SYSTEM_MOVEABLE = 262144 STATE_SYSTEM_SELFVOICING = 524288 STATE_SYSTEM_FOCUSABLE = 1048576 STATE_SYSTEM_SELECTABLE = 2097152 STATE_SYSTEM_LINKED = 4194304 STATE_SYSTEM_TRAVERSED = 8388608 STATE_SYSTEM_MULTISELECTABLE = 16777216 STATE_SYSTEM_EXTSELECTABLE = 33554432 STATE_SYSTEM_ALERT_LOW = 67108864 STATE_SYSTEM_ALERT_MEDIUM = 134217728 STATE_SYSTEM_ALERT_HIGH = 268435456 STATE_SYSTEM_VALID = 536870911 CCHILDREN_TITLEBAR = 5 CCHILDREN_SCROLLBAR = 5 CURSOR_SHOWING = 1 WS_ACTIVECAPTION = 1 GA_MIC = 1 GA_PARENT = 1 GA_ROOT = 2 GA_ROOTOWNER = 3 GA_MAC = 4 # winuser.h line 1979 BF_LEFT = 1 BF_TOP = 2 BF_RIGHT = 4 BF_BOTTOM = 8 BF_TOPLEFT = (BF_TOP | BF_LEFT) BF_TOPRIGHT = (BF_TOP | BF_RIGHT) BF_BOTTOMLEFT = (BF_BOTTOM | BF_LEFT) BF_BOTTOMRIGHT = (BF_BOTTOM | BF_RIGHT) BF_RECT = (BF_LEFT | BF_TOP | BF_RIGHT | BF_BOTTOM) BF_DIAGONAL = 16 BF_DIAGONAL_ENDTOPRIGHT = (BF_DIAGONAL | BF_TOP | BF_RIGHT) BF_DIAGONAL_ENDTOPLEFT = (BF_DIAGONAL | BF_TOP | BF_LEFT) BF_DIAGONAL_ENDBOTTOMLEFT = (BF_DIAGONAL | BF_BOTTOM | BF_LEFT) BF_DIAGONAL_ENDBOTTOMRIGHT = (BF_DIAGONAL | BF_BOTTOM | BF_RIGHT) BF_MIDDLE = 2048 BF_SOFT = 4096 BF_ADJUST = 8192 BF_FLAT = 16384 BF_MONO = 32768 DFC_CAPTION = 1 DFC_MENU = 2 DFC_SCROLL = 3 DFC_BUTTON = 4 DFC_POPUPMENU = 5 DFCS_CAPTIONCLOSE = 0 DFCS_CAPTIONMIN = 1 DFCS_CAPTIONMAX = 2 DFCS_CAPTIONRESTORE = 3 DFCS_CAPTIONHELP = 4 DFCS_MENUARROW = 0 DFCS_MENUCHECK = 1 DFCS_MENUBULLET = 2 DFCS_MENUARROWRIGHT = 4 DFCS_SCROLLUP = 0 DFCS_SCROLLDOWN = 1 DFCS_SCROLLLEFT = 2 DFCS_SCROLLRIGHT = 3 DFCS_SCROLLCOMBOBOX = 5 DFCS_SCROLLSIZEGRIP = 8 DFCS_SCROLLSIZEGRIPRIGHT = 16 DFCS_BUTTONCHECK = 0 DFCS_BUTTONRADIOIMAGE = 1 DFCS_BUTTONRADIOMASK = 2 DFCS_BUTTONRADIO = 4 DFCS_BUTTON3STATE = 8 DFCS_BUTTONPUSH = 16 DFCS_INACTIVE = 256 DFCS_PUSHED = 512 DFCS_CHECKED = 1024 DFCS_TRANSPARENT = 2048 DFCS_HOT = 4096 DFCS_ADJUSTRECT = 8192 DFCS_FLAT = 16384 DFCS_MONO = 32768 DC_ACTIVE = 1 DC_SMALLCAP = 2 DC_ICON = 4 DC_TEXT = 8 DC_INBUTTON = 16 DC_GRADIENT = 32 IDANI_OPEN = 1 IDANI_CLOSE = 2 IDANI_CAPTION = 3 CF_TEXT = 1 CF_BITMAP = 2 CF_METAFILEPICT = 3 CF_SYLK = 4 CF_DIF = 5 CF_TIFF = 6 CF_OEMTEXT = 7 CF_DIB = 8 CF_PALETTE = 9 CF_PENDATA = 10 CF_RIFF = 11 CF_WAVE = 12 CF_UNICODETEXT = 13 CF_ENHMETAFILE = 14 CF_HDROP = 15 CF_LOCALE = 16 CF_DIBV5 = 17 CF_MAX = 18 CF_OWNERDISPLAY = 128 CF_DSPTEXT = 129 CF_DSPBITMAP = 130 CF_DSPMETAFILEPICT = 131 CF_DSPENHMETAFILE = 142 CF_PRIVATEFIRST = 512 CF_PRIVATELAST = 767 CF_GDIOBJFIRST = 768 CF_GDIOBJLAST = 1023 FVIRTKEY =1 FNOINVERT = 2 FSHIFT = 4 FCONTROL = 8 FALT = 16 WPF_SETMINPOSITION = 1 WPF_RESTORETOMAXIMIZED = 2 ODT_MENU = 1 ODT_LISTBOX = 2 ODT_COMBOBOX = 3 ODT_BUTTON = 4 ODT_STATIC = 5 ODA_DRAWENTIRE = 1 ODA_SELECT = 2 ODA_FOCUS = 4 ODS_SELECTED = 1 ODS_GRAYED = 2 ODS_DISABLED = 4 ODS_CHECKED = 8 ODS_FOCUS = 16 ODS_DEFAULT = 32 ODS_COMBOBOXEDIT = 4096 ODS_HOTLIGHT = 64 ODS_INACTIVE = 128 PM_NOREMOVE = 0 PM_REMOVE = 1 PM_NOYIELD = 2 MOD_ALT = 1 MOD_CONTROL = 2 MOD_SHIFT = 4 MOD_WIN = 8 IDHOT_SNAPWINDOW = (-1) IDHOT_SNAPDESKTOP = (-2) #EW_RESTARTWINDOWS = 0x0042 #EW_REBOOTSYSTEM = 0x0043 #EW_EXITANDEXECAPP = 0x0044 ENDSESSION_LOGOFF = -2147483648 EWX_LOGOFF = 0 EWX_SHUTDOWN = 1 EWX_REBOOT = 2 EWX_FORCE = 4 EWX_POWEROFF = 8 EWX_FORCEIFHUNG = 16 BSM_ALLCOMPONENTS = 0 BSM_VXDS = 1 BSM_NETDRIVER = 2 BSM_INSTALLABLEDRIVERS = 4 BSM_APPLICATIONS = 8 BSM_ALLDESKTOPS = 16 BSF_QUERY = 1 BSF_IGNORECURRENTTASK = 2 BSF_FLUSHDISK = 4 BSF_NOHANG = 8 BSF_POSTMESSAGE = 16 BSF_FORCEIFHUNG = 32 BSF_NOTIMEOUTIFNOTHUNG = 64 BROADCAST_QUERY_DENY = 1112363332 # Return this value to deny a query. DBWF_LPARAMPOINTER = 32768 # winuser.h line 3232 SWP_NOSIZE = 1 SWP_NOMOVE = 2 SWP_NOZORDER = 4 SWP_NOREDRAW = 8 SWP_NOACTIVATE = 16 SWP_FRAMECHANGED = 32 SWP_SHOWWINDOW = 64 SWP_HIDEWINDOW = 128 SWP_NOCOPYBITS = 256 SWP_NOOWNERZORDER = 512 SWP_NOSENDCHANGING = 1024 SWP_DRAWFRAME = SWP_FRAMECHANGED SWP_NOREPOSITION = SWP_NOOWNERZORDER SWP_DEFERERASE = 8192 SWP_ASYNCWINDOWPOS = 16384 DLGWINDOWEXTRA = 30 # winuser.h line 4249 KEYEVENTF_EXTENDEDKEY = 1 KEYEVENTF_KEYUP = 2 MOUSEEVENTF_MOVE = 1 MOUSEEVENTF_LEFTDOWN = 2 MOUSEEVENTF_LEFTUP = 4 MOUSEEVENTF_RIGHTDOWN = 8 MOUSEEVENTF_RIGHTUP = 16 MOUSEEVENTF_MIDDLEDOWN = 32 MOUSEEVENTF_MIDDLEUP = 64 MOUSEEVENTF_ABSOLUTE = 32768 INPUT_MOUSE = 0 INPUT_KEYBOARD = 1 INPUT_HARDWARE = 2 MWMO_WAITALL = 1 MWMO_ALERTABLE = 2 MWMO_INPUTAVAILABLE = 4 QS_KEY = 1 QS_MOUSEMOVE = 2 QS_MOUSEBUTTON = 4 QS_POSTMESSAGE = 8 QS_TIMER = 16 QS_PAINT = 32 QS_SENDMESSAGE = 64 QS_HOTKEY = 128 QS_MOUSE = (QS_MOUSEMOVE | \ QS_MOUSEBUTTON) QS_INPUT = (QS_MOUSE | \ QS_KEY) QS_ALLEVENTS = (QS_INPUT | \ QS_POSTMESSAGE | \ QS_TIMER | \ QS_PAINT | \ QS_HOTKEY) QS_ALLINPUT = (QS_INPUT | \ QS_POSTMESSAGE | \ QS_TIMER | \ QS_PAINT | \ QS_HOTKEY | \ QS_SENDMESSAGE) IMN_CLOSESTATUSWINDOW = 1 IMN_OPENSTATUSWINDOW = 2 IMN_CHANGECANDIDATE = 3 IMN_CLOSECANDIDATE = 4 IMN_OPENCANDIDATE = 5 IMN_SETCONVERSIONMODE = 6 IMN_SETSENTENCEMODE = 7 IMN_SETOPENSTATUS = 8 IMN_SETCANDIDATEPOS = 9 IMN_SETCOMPOSITIONFONT = 10 IMN_SETCOMPOSITIONWINDOW = 11 IMN_SETSTATUSWINDOWPOS = 12 IMN_GUIDELINE = 13 IMN_PRIVATE = 14 # winuser.h line 8518 HELP_CONTEXT = 1 HELP_QUIT = 2 HELP_INDEX = 3 HELP_CONTENTS = 3 HELP_HELPONHELP = 4 HELP_SETINDEX = 5 HELP_SETCONTENTS = 5 HELP_CONTEXTPOPUP = 8 HELP_FORCEFILE = 9 HELP_KEY = 257 HELP_COMMAND = 258 HELP_PARTIALKEY = 261 HELP_MULTIKEY = 513 HELP_SETWINPOS = 515 HELP_CONTEXTMENU = 10 HELP_FINDER = 11 HELP_WM_HELP = 12 HELP_SETPOPUP_POS = 13 HELP_TCARD = 32768 HELP_TCARD_DATA = 16 HELP_TCARD_OTHER_CALLER = 17 IDH_NO_HELP = 28440 IDH_MISSING_CONTEXT = 28441 # Control doesn't have matching help context IDH_GENERIC_HELP_BUTTON = 28442 # Property sheet help button IDH_OK = 28443 IDH_CANCEL = 28444 IDH_HELP = 28445 GR_GDIOBJECTS = 0 # Count of GDI objects GR_USEROBJECTS = 1 # Count of USER objects # Generated by h2py from \msvcnt\include\wingdi.h # manually added (missed by generation some how! SRCCOPY = 13369376 # dest = source SRCPAINT = 15597702 # dest = source OR dest SRCAND = 8913094 # dest = source AND dest SRCINVERT = 6684742 # dest = source XOR dest SRCERASE = 4457256 # dest = source AND (NOT dest ) NOTSRCCOPY = 3342344 # dest = (NOT source) NOTSRCERASE = 1114278 # dest = (NOT src) AND (NOT dest) MERGECOPY = 12583114 # dest = (source AND pattern) MERGEPAINT = 12255782 # dest = (NOT source) OR dest PATCOPY = 15728673 # dest = pattern PATPAINT = 16452105 # dest = DPSnoo PATINVERT = 5898313 # dest = pattern XOR dest DSTINVERT = 5570569 # dest = (NOT dest) BLACKNESS = 66 # dest = BLACK WHITENESS = 16711778 # dest = WHITE # hacked and split manually by mhammond. R2_BLACK = 1 R2_NOTMERGEPEN = 2 R2_MASKNOTPEN = 3 R2_NOTCOPYPEN = 4 R2_MASKPENNOT = 5 R2_NOT = 6 R2_XORPEN = 7 R2_NOTMASKPEN = 8 R2_MASKPEN = 9 R2_NOTXORPEN = 10 R2_NOP = 11 R2_MERGENOTPEN = 12 R2_COPYPEN = 13 R2_MERGEPENNOT = 14 R2_MERGEPEN = 15 R2_WHITE = 16 R2_LAST = 16 GDI_ERROR = (-1) ERROR = 0 NULLREGION = 1 SIMPLEREGION = 2 COMPLEXREGION = 3 RGN_ERROR = ERROR RGN_AND = 1 RGN_OR = 2 RGN_XOR = 3 RGN_DIFF = 4 RGN_COPY = 5 RGN_MIN = RGN_AND RGN_MAX = RGN_COPY ## Stretching modes used with Get/SetStretchBltMode BLACKONWHITE = 1 WHITEONBLACK = 2 COLORONCOLOR = 3 HALFTONE = 4 MAXSTRETCHBLTMODE = 4 STRETCH_ANDSCANS = BLACKONWHITE STRETCH_ORSCANS = WHITEONBLACK STRETCH_DELETESCANS = COLORONCOLOR STRETCH_HALFTONE = HALFTONE ALTERNATE = 1 WINDING = 2 POLYFILL_LAST = 2 ## flags used with SetLayout LAYOUT_RTL = 1 LAYOUT_BTT = 2 LAYOUT_VBH = 4 LAYOUT_ORIENTATIONMASK = LAYOUT_RTL|LAYOUT_BTT|LAYOUT_VBH LAYOUT_BITMAPORIENTATIONPRESERVED = 8 TA_NOUPDATECP = 0 TA_UPDATECP = 1 TA_LEFT = 0 TA_RIGHT = 2 TA_CENTER = 6 TA_TOP = 0 TA_BOTTOM = 8 TA_BASELINE = 24 TA_MASK = (TA_BASELINE+TA_CENTER+TA_UPDATECP) VTA_BASELINE = TA_BASELINE VTA_LEFT = TA_BOTTOM VTA_RIGHT = TA_TOP VTA_CENTER = TA_CENTER VTA_BOTTOM = TA_RIGHT VTA_TOP = TA_LEFT ETO_GRAYED = 1 ETO_OPAQUE = 2 ETO_CLIPPED = 4 ASPECT_FILTERING = 1 DCB_RESET = 1 DCB_ACCUMULATE = 2 DCB_DIRTY = DCB_ACCUMULATE DCB_SET = (DCB_RESET | DCB_ACCUMULATE) DCB_ENABLE = 4 DCB_DISABLE = 8 META_SETBKCOLOR = 513 META_SETBKMODE = 258 META_SETMAPMODE = 259 META_SETROP2 = 260 META_SETRELABS = 261 META_SETPOLYFILLMODE = 262 META_SETSTRETCHBLTMODE = 263 META_SETTEXTCHAREXTRA = 264 META_SETTEXTCOLOR = 521 META_SETTEXTJUSTIFICATION = 522 META_SETWINDOWORG = 523 META_SETWINDOWEXT = 524 META_SETVIEWPORTORG = 525 META_SETVIEWPORTEXT = 526 META_OFFSETWINDOWORG = 527 META_SCALEWINDOWEXT = 1040 META_OFFSETVIEWPORTORG = 529 META_SCALEVIEWPORTEXT = 1042 META_LINETO = 531 META_MOVETO = 532 META_EXCLUDECLIPRECT = 1045 META_INTERSECTCLIPRECT = 1046 META_ARC = 2071 META_ELLIPSE = 1048 META_FLOODFILL = 1049 META_PIE = 2074 META_RECTANGLE = 1051 META_ROUNDRECT = 1564 META_PATBLT = 1565 META_SAVEDC = 30 META_SETPIXEL = 1055 META_OFFSETCLIPRGN = 544 META_TEXTOUT = 1313 META_BITBLT = 2338 META_STRETCHBLT = 2851 META_POLYGON = 804 META_POLYLINE = 805 META_ESCAPE = 1574 META_RESTOREDC = 295 META_FILLREGION = 552 META_FRAMEREGION = 1065 META_INVERTREGION = 298 META_PAINTREGION = 299 META_SELECTCLIPREGION = 300 META_SELECTOBJECT = 301 META_SETTEXTALIGN = 302 META_CHORD = 2096 META_SETMAPPERFLAGS = 561 META_EXTTEXTOUT = 2610 META_SETDIBTODEV = 3379 META_SELECTPALETTE = 564 META_REALIZEPALETTE = 53 META_ANIMATEPALETTE = 1078 META_SETPALENTRIES = 55 META_POLYPOLYGON = 1336 META_RESIZEPALETTE = 313 META_DIBBITBLT = 2368 META_DIBSTRETCHBLT = 2881 META_DIBCREATEPATTERNBRUSH = 322 META_STRETCHDIB = 3907 META_EXTFLOODFILL = 1352 META_DELETEOBJECT = 496 META_CREATEPALETTE = 247 META_CREATEPATTERNBRUSH = 505 META_CREATEPENINDIRECT = 762 META_CREATEFONTINDIRECT = 763 META_CREATEBRUSHINDIRECT = 764 META_CREATEREGION = 1791 FILE_BEGIN = 0 FILE_CURRENT = 1 FILE_END = 2 FILE_FLAG_WRITE_THROUGH = -2147483648 FILE_FLAG_OVERLAPPED = 1073741824 FILE_FLAG_NO_BUFFERING = 536870912 FILE_FLAG_RANDOM_ACCESS = 268435456 FILE_FLAG_SEQUENTIAL_SCAN = 134217728 FILE_FLAG_DELETE_ON_CLOSE = 67108864 FILE_FLAG_BACKUP_SEMANTICS = 33554432 FILE_FLAG_POSIX_SEMANTICS = 16777216 CREATE_NEW = 1 CREATE_ALWAYS = 2 OPEN_EXISTING = 3 OPEN_ALWAYS = 4 TRUNCATE_EXISTING = 5 PIPE_ACCESS_INBOUND = 1 PIPE_ACCESS_OUTBOUND = 2 PIPE_ACCESS_DUPLEX = 3 PIPE_CLIENT_END = 0 PIPE_SERVER_END = 1 PIPE_WAIT = 0 PIPE_NOWAIT = 1 PIPE_READMODE_BYTE = 0 PIPE_READMODE_MESSAGE = 2 PIPE_TYPE_BYTE = 0 PIPE_TYPE_MESSAGE = 4 PIPE_UNLIMITED_INSTANCES = 255 SECURITY_CONTEXT_TRACKING = 262144 SECURITY_EFFECTIVE_ONLY = 524288 SECURITY_SQOS_PRESENT = 1048576 SECURITY_VALID_SQOS_FLAGS = 2031616 DTR_CONTROL_DISABLE = 0 DTR_CONTROL_ENABLE = 1 DTR_CONTROL_HANDSHAKE = 2 RTS_CONTROL_DISABLE = 0 RTS_CONTROL_ENABLE = 1 RTS_CONTROL_HANDSHAKE = 2 RTS_CONTROL_TOGGLE = 3 GMEM_FIXED = 0 GMEM_MOVEABLE = 2 GMEM_NOCOMPACT = 16 GMEM_NODISCARD = 32 GMEM_ZEROINIT = 64 GMEM_MODIFY = 128 GMEM_DISCARDABLE = 256 GMEM_NOT_BANKED = 4096 GMEM_SHARE = 8192 GMEM_DDESHARE = 8192 GMEM_NOTIFY = 16384 GMEM_LOWER = GMEM_NOT_BANKED GMEM_VALID_FLAGS = 32626 GMEM_INVALID_HANDLE = 32768 GHND = (GMEM_MOVEABLE | GMEM_ZEROINIT) GPTR = (GMEM_FIXED | GMEM_ZEROINIT) GMEM_DISCARDED = 16384 GMEM_LOCKCOUNT = 255 LMEM_FIXED = 0 LMEM_MOVEABLE = 2 LMEM_NOCOMPACT = 16 LMEM_NODISCARD = 32 LMEM_ZEROINIT = 64 LMEM_MODIFY = 128 LMEM_DISCARDABLE = 3840 LMEM_VALID_FLAGS = 3954 LMEM_INVALID_HANDLE = 32768 LHND = (LMEM_MOVEABLE | LMEM_ZEROINIT) LPTR = (LMEM_FIXED | LMEM_ZEROINIT) NONZEROLHND = (LMEM_MOVEABLE) NONZEROLPTR = (LMEM_FIXED) LMEM_DISCARDED = 16384 LMEM_LOCKCOUNT = 255 DEBUG_PROCESS = 1 DEBUG_ONLY_THIS_PROCESS = 2 CREATE_SUSPENDED = 4 DETACHED_PROCESS = 8 CREATE_NEW_CONSOLE = 16 NORMAL_PRIORITY_CLASS = 32 IDLE_PRIORITY_CLASS = 64 HIGH_PRIORITY_CLASS = 128 REALTIME_PRIORITY_CLASS = 256 CREATE_NEW_PROCESS_GROUP = 512 CREATE_UNICODE_ENVIRONMENT = 1024 CREATE_SEPARATE_WOW_VDM = 2048 CREATE_SHARED_WOW_VDM = 4096 CREATE_DEFAULT_ERROR_MODE = 67108864 CREATE_NO_WINDOW = 134217728 PROFILE_USER = 268435456 PROFILE_KERNEL = 536870912 PROFILE_SERVER = 1073741824 THREAD_BASE_PRIORITY_LOWRT = 15 THREAD_BASE_PRIORITY_MAX = 2 THREAD_BASE_PRIORITY_MIN = -2 THREAD_BASE_PRIORITY_IDLE = -15 THREAD_PRIORITY_LOWEST = THREAD_BASE_PRIORITY_MIN THREAD_PRIORITY_BELOW_NORMAL = THREAD_PRIORITY_LOWEST+1 THREAD_PRIORITY_HIGHEST = THREAD_BASE_PRIORITY_MAX THREAD_PRIORITY_ABOVE_NORMAL = THREAD_PRIORITY_HIGHEST-1 THREAD_PRIORITY_ERROR_RETURN = MAXLONG THREAD_PRIORITY_TIME_CRITICAL = THREAD_BASE_PRIORITY_LOWRT THREAD_PRIORITY_IDLE = THREAD_BASE_PRIORITY_IDLE THREAD_PRIORITY_NORMAL = 0 THREAD_MODE_BACKGROUND_BEGIN = 0x00010000 THREAD_MODE_BACKGROUND_END = 0x00020000 EXCEPTION_DEBUG_EVENT = 1 CREATE_THREAD_DEBUG_EVENT = 2 CREATE_PROCESS_DEBUG_EVENT = 3 EXIT_THREAD_DEBUG_EVENT = 4 EXIT_PROCESS_DEBUG_EVENT = 5 LOAD_DLL_DEBUG_EVENT = 6 UNLOAD_DLL_DEBUG_EVENT = 7 OUTPUT_DEBUG_STRING_EVENT = 8 RIP_EVENT = 9 DRIVE_UNKNOWN = 0 DRIVE_NO_ROOT_DIR = 1 DRIVE_REMOVABLE = 2 DRIVE_FIXED = 3 DRIVE_REMOTE = 4 DRIVE_CDROM = 5 DRIVE_RAMDISK = 6 FILE_TYPE_UNKNOWN = 0 FILE_TYPE_DISK = 1 FILE_TYPE_CHAR = 2 FILE_TYPE_PIPE = 3 FILE_TYPE_REMOTE = 32768 NOPARITY = 0 ODDPARITY = 1 EVENPARITY = 2 MARKPARITY = 3 SPACEPARITY = 4 ONESTOPBIT = 0 ONE5STOPBITS = 1 TWOSTOPBITS = 2 CBR_110 = 110 CBR_300 = 300 CBR_600 = 600 CBR_1200 = 1200 CBR_2400 = 2400 CBR_4800 = 4800 CBR_9600 = 9600 CBR_14400 = 14400 CBR_19200 = 19200 CBR_38400 = 38400 CBR_56000 = 56000 CBR_57600 = 57600 CBR_115200 = 115200 CBR_128000 = 128000 CBR_256000 = 256000 S_QUEUEEMPTY = 0 S_THRESHOLD = 1 S_ALLTHRESHOLD = 2 S_NORMAL = 0 S_LEGATO = 1 S_STACCATO = 2 NMPWAIT_WAIT_FOREVER = -1 NMPWAIT_NOWAIT = 1 NMPWAIT_USE_DEFAULT_WAIT = 0 OF_READ = 0 OF_WRITE = 1 OF_READWRITE = 2 OF_SHARE_COMPAT = 0 OF_SHARE_EXCLUSIVE = 16 OF_SHARE_DENY_WRITE = 32 OF_SHARE_DENY_READ = 48 OF_SHARE_DENY_NONE = 64 OF_PARSE = 256 OF_DELETE = 512 OF_VERIFY = 1024 OF_CANCEL = 2048 OF_CREATE = 4096 OF_PROMPT = 8192 OF_EXIST = 16384 OF_REOPEN = 32768 OFS_MAXPATHNAME = 128 MAXINTATOM = 49152 # winbase.h PROCESS_HEAP_REGION = 1 PROCESS_HEAP_UNCOMMITTED_RANGE = 2 PROCESS_HEAP_ENTRY_BUSY = 4 PROCESS_HEAP_ENTRY_MOVEABLE = 16 PROCESS_HEAP_ENTRY_DDESHARE = 32 SCS_32BIT_BINARY = 0 SCS_DOS_BINARY = 1 SCS_WOW_BINARY = 2 SCS_PIF_BINARY = 3 SCS_POSIX_BINARY = 4 SCS_OS216_BINARY = 5 SEM_FAILCRITICALERRORS = 1 SEM_NOGPFAULTERRORBOX = 2 SEM_NOALIGNMENTFAULTEXCEPT = 4 SEM_NOOPENFILEERRORBOX = 32768 LOCKFILE_FAIL_IMMEDIATELY = 1 LOCKFILE_EXCLUSIVE_LOCK = 2 HANDLE_FLAG_INHERIT = 1 HANDLE_FLAG_PROTECT_FROM_CLOSE = 2 HINSTANCE_ERROR = 32 GET_TAPE_MEDIA_INFORMATION = 0 GET_TAPE_DRIVE_INFORMATION = 1 SET_TAPE_MEDIA_INFORMATION = 0 SET_TAPE_DRIVE_INFORMATION = 1 FORMAT_MESSAGE_ALLOCATE_BUFFER = 256 FORMAT_MESSAGE_IGNORE_INSERTS = 512 FORMAT_MESSAGE_FROM_STRING = 1024 FORMAT_MESSAGE_FROM_HMODULE = 2048 FORMAT_MESSAGE_FROM_SYSTEM = 4096 FORMAT_MESSAGE_ARGUMENT_ARRAY = 8192 FORMAT_MESSAGE_MAX_WIDTH_MASK = 255 BACKUP_INVALID = 0 BACKUP_DATA = 1 BACKUP_EA_DATA = 2 BACKUP_SECURITY_DATA = 3 BACKUP_ALTERNATE_DATA = 4 BACKUP_LINK = 5 BACKUP_PROPERTY_DATA = 6 BACKUP_OBJECT_ID = 7 BACKUP_REPARSE_DATA = 8 BACKUP_SPARSE_BLOCK = 9 STREAM_NORMAL_ATTRIBUTE = 0 STREAM_MODIFIED_WHEN_READ = 1 STREAM_CONTAINS_SECURITY = 2 STREAM_CONTAINS_PROPERTIES = 4 STARTF_USESHOWWINDOW = 1 STARTF_USESIZE = 2 STARTF_USEPOSITION = 4 STARTF_USECOUNTCHARS = 8 STARTF_USEFILLATTRIBUTE = 16 STARTF_FORCEONFEEDBACK = 64 STARTF_FORCEOFFFEEDBACK = 128 STARTF_USESTDHANDLES = 256 STARTF_USEHOTKEY = 512 SHUTDOWN_NORETRY = 1 DONT_RESOLVE_DLL_REFERENCES = 1 LOAD_LIBRARY_AS_DATAFILE = 2 LOAD_WITH_ALTERED_SEARCH_PATH = 8 DDD_RAW_TARGET_PATH = 1 DDD_REMOVE_DEFINITION = 2 DDD_EXACT_MATCH_ON_REMOVE = 4 MOVEFILE_REPLACE_EXISTING = 1 MOVEFILE_COPY_ALLOWED = 2 MOVEFILE_DELAY_UNTIL_REBOOT = 4 MAX_COMPUTERNAME_LENGTH = 15 LOGON32_LOGON_INTERACTIVE = 2 LOGON32_LOGON_BATCH = 4 LOGON32_LOGON_SERVICE = 5 LOGON32_PROVIDER_DEFAULT = 0 LOGON32_PROVIDER_WINNT35 = 1 VER_PLATFORM_WIN32s = 0 VER_PLATFORM_WIN32_WINDOWS = 1 VER_PLATFORM_WIN32_NT = 2 TC_NORMAL = 0 TC_HARDERR = 1 TC_GP_TRAP = 2 TC_SIGNAL = 3 AC_LINE_OFFLINE = 0 AC_LINE_ONLINE = 1 AC_LINE_BACKUP_POWER = 2 AC_LINE_UNKNOWN = 255 BATTERY_FLAG_HIGH = 1 BATTERY_FLAG_LOW = 2 BATTERY_FLAG_CRITICAL = 4 BATTERY_FLAG_CHARGING = 8 BATTERY_FLAG_NO_BATTERY = 128 BATTERY_FLAG_UNKNOWN = 255 BATTERY_PERCENTAGE_UNKNOWN = 255 BATTERY_LIFE_UNKNOWN = -1 # Generated by h2py from d:\msdev\include\richedit.h cchTextLimitDefault = 32767 WM_CONTEXTMENU = 123 WM_PRINTCLIENT = 792 EN_MSGFILTER = 1792 EN_REQUESTRESIZE = 1793 EN_SELCHANGE = 1794 EN_DROPFILES = 1795 EN_PROTECTED = 1796 EN_CORRECTTEXT = 1797 EN_STOPNOUNDO = 1798 EN_IMECHANGE = 1799 EN_SAVECLIPBOARD = 1800 EN_OLEOPFAILED = 1801 ENM_NONE = 0 ENM_CHANGE = 1 ENM_UPDATE = 2 ENM_SCROLL = 4 ENM_KEYEVENTS = 65536 ENM_MOUSEEVENTS = 131072 ENM_REQUESTRESIZE = 262144 ENM_SELCHANGE = 524288 ENM_DROPFILES = 1048576 ENM_PROTECTED = 2097152 ENM_CORRECTTEXT = 4194304 ENM_IMECHANGE = 8388608 ES_SAVESEL = 32768 ES_SUNKEN = 16384 ES_DISABLENOSCROLL = 8192 ES_SELECTIONBAR = 16777216 ES_EX_NOCALLOLEINIT = 16777216 ES_VERTICAL = 4194304 ES_NOIME = 524288 ES_SELFIME = 262144 ECO_AUTOWORDSELECTION = 1 ECO_AUTOVSCROLL = 64 ECO_AUTOHSCROLL = 128 ECO_NOHIDESEL = 256 ECO_READONLY = 2048 ECO_WANTRETURN = 4096 ECO_SAVESEL = 32768 ECO_SELECTIONBAR = 16777216 ECO_VERTICAL = 4194304 ECOOP_SET = 1 ECOOP_OR = 2 ECOOP_AND = 3 ECOOP_XOR = 4 WB_CLASSIFY = 3 WB_MOVEWORDLEFT = 4 WB_MOVEWORDRIGHT = 5 WB_LEFTBREAK = 6 WB_RIGHTBREAK = 7 WB_MOVEWORDPREV = 4 WB_MOVEWORDNEXT = 5 WB_PREVBREAK = 6 WB_NEXTBREAK = 7 PC_FOLLOWING = 1 PC_LEADING = 2 PC_OVERFLOW = 3 PC_DELIMITER = 4 WBF_WORDWRAP = 16 WBF_WORDBREAK = 32 WBF_OVERFLOW = 64 WBF_LEVEL1 = 128 WBF_LEVEL2 = 256 WBF_CUSTOM = 512 CFM_BOLD = 1 CFM_ITALIC = 2 CFM_UNDERLINE = 4 CFM_STRIKEOUT = 8 CFM_PROTECTED = 16 CFM_SIZE = -2147483648 CFM_COLOR = 1073741824 CFM_FACE = 536870912 CFM_OFFSET = 268435456 CFM_CHARSET = 134217728 CFE_BOLD = 1 CFE_ITALIC = 2 CFE_UNDERLINE = 4 CFE_STRIKEOUT = 8 CFE_PROTECTED = 16 CFE_AUTOCOLOR = 1073741824 yHeightCharPtsMost = 1638 SCF_SELECTION = 1 SCF_WORD = 2 SF_TEXT = 1 SF_RTF = 2 SF_RTFNOOBJS = 3 SF_TEXTIZED = 4 SFF_SELECTION = 32768 SFF_PLAINRTF = 16384 MAX_TAB_STOPS = 32 lDefaultTab = 720 PFM_STARTINDENT = 1 PFM_RIGHTINDENT = 2 PFM_OFFSET = 4 PFM_ALIGNMENT = 8 PFM_TABSTOPS = 16 PFM_NUMBERING = 32 PFM_OFFSETINDENT = -2147483648 PFN_BULLET = 1 PFA_LEFT = 1 PFA_RIGHT = 2 PFA_CENTER = 3 WM_NOTIFY = 78 SEL_EMPTY = 0 SEL_TEXT = 1 SEL_OBJECT = 2 SEL_MULTICHAR = 4 SEL_MULTIOBJECT = 8 OLEOP_DOVERB = 1 CF_RTF = "Rich Text Format" CF_RTFNOOBJS = "Rich Text Format Without Objects" CF_RETEXTOBJ = "RichEdit Text and Objects" # From wincon.h RIGHT_ALT_PRESSED = 1 # the right alt key is pressed. LEFT_ALT_PRESSED = 2 # the left alt key is pressed. RIGHT_CTRL_PRESSED = 4 # the right ctrl key is pressed. LEFT_CTRL_PRESSED = 8 # the left ctrl key is pressed. SHIFT_PRESSED = 16 # the shift key is pressed. NUMLOCK_ON = 32 # the numlock light is on. SCROLLLOCK_ON = 64 # the scrolllock light is on. CAPSLOCK_ON = 128 # the capslock light is on. ENHANCED_KEY = 256 # the key is enhanced. NLS_DBCSCHAR = 65536 # DBCS for JPN: SBCS/DBCS mode. NLS_ALPHANUMERIC = 0 # DBCS for JPN: Alphanumeric mode. NLS_KATAKANA = 131072 # DBCS for JPN: Katakana mode. NLS_HIRAGANA = 262144 # DBCS for JPN: Hiragana mode. NLS_ROMAN = 4194304 # DBCS for JPN: Roman/Noroman mode. NLS_IME_CONVERSION = 8388608 # DBCS for JPN: IME conversion. NLS_IME_DISABLE = 536870912 # DBCS for JPN: IME enable/disable. FROM_LEFT_1ST_BUTTON_PRESSED = 1 RIGHTMOST_BUTTON_PRESSED = 2 FROM_LEFT_2ND_BUTTON_PRESSED = 4 FROM_LEFT_3RD_BUTTON_PRESSED = 8 FROM_LEFT_4TH_BUTTON_PRESSED = 16 CTRL_C_EVENT = 0 CTRL_BREAK_EVENT = 1 CTRL_CLOSE_EVENT = 2 CTRL_LOGOFF_EVENT = 5 CTRL_SHUTDOWN_EVENT = 6 MOUSE_MOVED = 1 DOUBLE_CLICK = 2 MOUSE_WHEELED = 4 #property sheet window messages from prsht.h PSM_SETCURSEL = (WM_USER + 101) PSM_REMOVEPAGE = (WM_USER + 102) PSM_ADDPAGE = (WM_USER + 103) PSM_CHANGED = (WM_USER + 104) PSM_RESTARTWINDOWS = (WM_USER + 105) PSM_REBOOTSYSTEM = (WM_USER + 106) PSM_CANCELTOCLOSE = (WM_USER + 107) PSM_QUERYSIBLINGS = (WM_USER + 108) PSM_UNCHANGED = (WM_USER + 109) PSM_APPLY = (WM_USER + 110) PSM_SETTITLEA = (WM_USER + 111) PSM_SETTITLEW = (WM_USER + 120) PSM_SETWIZBUTTONS = (WM_USER + 112) PSM_PRESSBUTTON = (WM_USER + 113) PSM_SETCURSELID = (WM_USER + 114) PSM_SETFINISHTEXTA = (WM_USER + 115) PSM_SETFINISHTEXTW = (WM_USER + 121) PSM_GETTABCONTROL = (WM_USER + 116) PSM_ISDIALOGMESSAGE = (WM_USER + 117) PSM_GETCURRENTPAGEHWND = (WM_USER + 118) PSM_INSERTPAGE = (WM_USER + 119) PSM_SETHEADERTITLEA = (WM_USER + 125) PSM_SETHEADERTITLEW = (WM_USER + 126) PSM_SETHEADERSUBTITLEA = (WM_USER + 127) PSM_SETHEADERSUBTITLEW = (WM_USER + 128) PSM_HWNDTOINDEX = (WM_USER + 129) PSM_INDEXTOHWND = (WM_USER + 130) PSM_PAGETOINDEX = (WM_USER + 131) PSM_INDEXTOPAGE = (WM_USER + 132) PSM_IDTOINDEX = (WM_USER + 133) PSM_INDEXTOID = (WM_USER + 134) PSM_GETRESULT = (WM_USER + 135) PSM_RECALCPAGESIZES = (WM_USER + 136) # GetUserNameEx/GetComputerNameEx NameUnknown = 0 NameFullyQualifiedDN = 1 NameSamCompatible = 2 NameDisplay = 3 NameUniqueId = 6 NameCanonical = 7 NameUserPrincipal = 8 NameCanonicalEx = 9 NameServicePrincipal = 10 NameDnsDomain = 12 ComputerNameNetBIOS = 0 ComputerNameDnsHostname = 1 ComputerNameDnsDomain = 2 ComputerNameDnsFullyQualified = 3 ComputerNamePhysicalNetBIOS = 4 ComputerNamePhysicalDnsHostname = 5 ComputerNamePhysicalDnsDomain = 6 ComputerNamePhysicalDnsFullyQualified = 7 LWA_COLORKEY = 0x00000001 LWA_ALPHA = 0x00000002 ULW_COLORKEY = 0x00000001 ULW_ALPHA = 0x00000002 ULW_OPAQUE = 0x00000004 # WinDef.h TRUE = 1 FALSE = 0 MAX_PATH = 260 # WinGDI.h AC_SRC_OVER = 0 AC_SRC_ALPHA = 1 GRADIENT_FILL_RECT_H = 0 GRADIENT_FILL_RECT_V = 1 GRADIENT_FILL_TRIANGLE = 2 GRADIENT_FILL_OP_FLAG = 255 ## flags used with Get/SetSystemFileCacheSize MM_WORKING_SET_MAX_HARD_ENABLE = 1 MM_WORKING_SET_MAX_HARD_DISABLE = 2 MM_WORKING_SET_MIN_HARD_ENABLE = 4 MM_WORKING_SET_MIN_HARD_DISABLE = 8 ## Flags for GetFinalPathNameByHandle VOLUME_NAME_DOS = 0 VOLUME_NAME_GUID = 1 VOLUME_NAME_NT = 2 VOLUME_NAME_NONE = 4 FILE_NAME_NORMALIZED = 0 FILE_NAME_OPENED = 8 DEVICE_NOTIFY_WINDOW_HANDLE = 0x00000000 DEVICE_NOTIFY_SERVICE_HANDLE = 0x00000001 # From Dbt.h # Generated by h2py from Dbt.h WM_DEVICECHANGE = 0x0219 BSF_QUERY = 0x00000001 BSF_IGNORECURRENTTASK = 0x00000002 BSF_FLUSHDISK = 0x00000004 BSF_NOHANG = 0x00000008 BSF_POSTMESSAGE = 0x00000010 BSF_FORCEIFHUNG = 0x00000020 BSF_NOTIMEOUTIFNOTHUNG = 0x00000040 BSF_MSGSRV32ISOK = (-2147483648) BSF_MSGSRV32ISOK_BIT = 31 BSM_ALLCOMPONENTS = 0x00000000 BSM_VXDS = 0x00000001 BSM_NETDRIVER = 0x00000002 BSM_INSTALLABLEDRIVERS = 0x00000004 BSM_APPLICATIONS = 0x00000008 DBT_APPYBEGIN = 0x0000 DBT_APPYEND = 0x0001 DBT_DEVNODES_CHANGED = 0x0007 DBT_QUERYCHANGECONFIG = 0x0017 DBT_CONFIGCHANGED = 0x0018 DBT_CONFIGCHANGECANCELED = 0x0019 DBT_MONITORCHANGE = 0x001B DBT_SHELLLOGGEDON = 0x0020 DBT_CONFIGMGAPI32 = 0x0022 DBT_VXDINITCOMPLETE = 0x0023 DBT_VOLLOCKQUERYLOCK = 0x8041 DBT_VOLLOCKLOCKTAKEN = 0x8042 DBT_VOLLOCKLOCKFAILED = 0x8043 DBT_VOLLOCKQUERYUNLOCK = 0x8044 DBT_VOLLOCKLOCKRELEASED = 0x8045 DBT_VOLLOCKUNLOCKFAILED = 0x8046 LOCKP_ALLOW_WRITES = 0x01 LOCKP_FAIL_WRITES = 0x00 LOCKP_FAIL_MEM_MAPPING = 0x02 LOCKP_ALLOW_MEM_MAPPING = 0x00 LOCKP_USER_MASK = 0x03 LOCKP_LOCK_FOR_FORMAT = 0x04 LOCKF_LOGICAL_LOCK = 0x00 LOCKF_PHYSICAL_LOCK = 0x01 DBT_NO_DISK_SPACE = 0x0047 DBT_LOW_DISK_SPACE = 0x0048 DBT_CONFIGMGPRIVATE = 0x7FFF DBT_DEVICEARRIVAL = 0x8000 DBT_DEVICEQUERYREMOVE = 0x8001 DBT_DEVICEQUERYREMOVEFAILED = 0x8002 DBT_DEVICEREMOVEPENDING = 0x8003 DBT_DEVICEREMOVECOMPLETE = 0x8004 DBT_DEVICETYPESPECIFIC = 0x8005 DBT_CUSTOMEVENT = 0x8006 DBT_DEVTYP_OEM = 0x00000000 DBT_DEVTYP_DEVNODE = 0x00000001 DBT_DEVTYP_VOLUME = 0x00000002 DBT_DEVTYP_PORT = 0x00000003 DBT_DEVTYP_NET = 0x00000004 DBT_DEVTYP_DEVICEINTERFACE = 0x00000005 DBT_DEVTYP_HANDLE = 0x00000006 DBTF_MEDIA = 0x0001 DBTF_NET = 0x0002 DBTF_RESOURCE = 0x00000001 DBTF_XPORT = 0x00000002 DBTF_SLOWNET = 0x00000004 DBT_VPOWERDAPI = 0x8100 DBT_USERDEFINED = 0xFFFF
dagbldr/dagbldr
refs/heads/master
dagbldr/externals/scripts/get_objgraph.py
4
#!/usr/bin/env python # # Author: Mike McKerns (mmckerns @caltech and @uqfoundation) # Copyright (c) 2008-2015 California Institute of Technology. # License: 3-clause BSD. The full license text is available at: # - http://trac.mystic.cacr.caltech.edu/project/pathos/browser/dill/LICENSE """ use objgraph to plot the reference paths for types found in dill.types """ #XXX: useful if could read .pkl file and generate the graph... ? import dill as pickle #pickle.debug.trace(True) #import pickle # get all objects for testing from dill import load_types load_types(pickleable=True,unpickleable=True) from dill import objects if __name__ == "__main__": import sys if len(sys.argv) != 2: print ("Please provide exactly one type name (e.g. 'IntType')") msg = "\n" for objtype in list(objects.keys())[:40]: msg += objtype + ', ' print (msg + "...") else: objtype = str(sys.argv[-1]) obj = objects[objtype] try: import objgraph objgraph.show_refs(obj, filename=objtype+'.png') except ImportError: print ("Please install 'objgraph' to view object graphs") # EOF
akras14/cs-101
refs/heads/master
coursera/stanford-algo/median-maintenance/load.py
1
"""Load test data""" import math import heap def shouldBalance(left, right): """Check if 2 heaps are more than 1 node apart""" return math.fabs(left.size() - right.size()) > 1 def balance(left, right): """Balance two heaps that are off by 1 value""" if left.size() > right.size(): temp = left.remove() right.insert(temp) elif left.size() < right.size(): temp = right.remove() left.insert(temp) else: raise ValueError("Heaps were of same size") if shouldBalance(left, right): raise ValueError("Balances was called too late") return True # FILENAME = "test.txt" FILENAME = "median.txt" data = [] with open(FILENAME) as f: for num in f: data.append(int(num)) median = None leftHeap = heap.Heap(heap.MAX) rightHeap = heap.Heap(heap.MIN) medianSum = 0 for i, d in enumerate(data): if d < median: leftHeap.insert(d) else: rightHeap.insert(d) if shouldBalance(leftHeap, rightHeap): balance(leftHeap, rightHeap) if leftHeap.size() > rightHeap.size(): median = leftHeap.top() elif rightHeap.size() > leftHeap.size(): median = rightHeap.top() else: median = leftHeap.top() medianSum += median print medianSum print medianSum % 10000
lyoniionly/django-cobra
refs/heads/master
src/cobra/apps/accounts/migrations/0001_initial.py
1
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import models, migrations import django.utils.timezone import cobra.models.fields.bounded class Migration(migrations.Migration): dependencies = [ ] operations = [ migrations.CreateModel( name='User', fields=[ ('password', models.CharField(max_length=128, verbose_name='password')), ('last_login', models.DateTimeField(default=django.utils.timezone.now, verbose_name='last login')), ('id', cobra.models.fields.bounded.BoundedBigAutoField(serialize=False, primary_key=True)), ('username', models.CharField(unique=True, max_length=128, verbose_name='username')), ('first_name', models.CharField(max_length=30, verbose_name='first name', blank=True)), ('last_name', models.CharField(max_length=30, verbose_name='last name', blank=True)), ('email', models.EmailField(max_length=75, verbose_name='email address', blank=True)), ('is_staff', models.BooleanField(default=False, help_text='Designates whether the user can log into this admin site.', verbose_name='staff status')), ('is_active', models.BooleanField(default=True, help_text='Designates whether this user should be treated as active. Unselect this instead of deleting accounts.', verbose_name='active')), ('is_superuser', models.BooleanField(default=False, help_text='Designates that this user has all permissions without explicitly assigning them.', verbose_name='superuser status')), ('is_managed', models.BooleanField(default=False, help_text='Designates whether this user should be treated as managed. Select this to disallow the user from modifying their account (username, password, etc).', verbose_name='managed')), ('date_joined', models.DateTimeField(default=django.utils.timezone.now, verbose_name='date joined')), ], options={ 'abstract': False, 'db_table': 'auth_user', 'verbose_name': 'User', 'swappable': 'AUTH_USER_MODEL', 'verbose_name_plural': 'Users', }, bases=(models.Model,), ), ]
mrquim/mrquimrepo
refs/heads/master
plugin.program.indigo/libs/requests/certs.py
516
#!/usr/bin/env python # -*- coding: utf-8 -*- """ requests.certs ~~~~~~~~~~~~~~ This module returns the preferred default CA certificate bundle. If you are packaging Requests, e.g., for a Linux distribution or a managed environment, you can change the definition of where() to return a separately packaged CA bundle. """ import os.path try: from certifi import where except ImportError: def where(): """Return the preferred certificate bundle.""" # vendored bundle inside Requests return os.path.join(os.path.dirname(__file__), 'cacert.pem') if __name__ == '__main__': print(where())
firerszd/kbengine
refs/heads/master
kbe/res/scripts/common/Lib/test/test_osx_env.py
112
""" Test suite for OS X interpreter environment variables. """ from test.support import EnvironmentVarGuard, run_unittest import subprocess import sys import sysconfig import unittest @unittest.skipUnless(sys.platform == 'darwin' and sysconfig.get_config_var('WITH_NEXT_FRAMEWORK'), 'unnecessary on this platform') class OSXEnvironmentVariableTestCase(unittest.TestCase): def _check_sys(self, ev, cond, sv, val = sys.executable + 'dummy'): with EnvironmentVarGuard() as evg: subpc = [str(sys.executable), '-c', 'import sys; sys.exit(2 if "%s" %s %s else 3)' % (val, cond, sv)] # ensure environment variable does not exist evg.unset(ev) # test that test on sys.xxx normally fails rc = subprocess.call(subpc) self.assertEqual(rc, 3, "expected %s not %s %s" % (ev, cond, sv)) # set environ variable evg.set(ev, val) # test that sys.xxx has been influenced by the environ value rc = subprocess.call(subpc) self.assertEqual(rc, 2, "expected %s %s %s" % (ev, cond, sv)) def test_pythonexecutable_sets_sys_executable(self): self._check_sys('PYTHONEXECUTABLE', '==', 'sys.executable') if __name__ == "__main__": unittest.main()
demon-ru/iml-crm
refs/heads/master
addons/marketing_crm/__init__.py
378
# -*- coding: utf-8 -*- ############################################################################## # # OpenERP, Open Source Management Solution # Copyright (C) 2004-TODAY OpenERP SA (http://www.openerp.com) # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## import models
hryamzik/ansible
refs/heads/devel
lib/ansible/module_utils/database.py
108
# This code is part of Ansible, but is an independent component. # This particular file snippet, and this file snippet only, is BSD licensed. # Modules you write using this snippet, which is embedded dynamically by Ansible # still belong to the author of the module, and may assign their own license # to the complete work. # # Copyright (c) 2014, Toshio Kuratomi <tkuratomi@ansible.com> # All rights reserved. # # Redistribution and use in source and binary forms, with or without modification, # are permitted provided that the following conditions are met: # # * Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # * Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND # ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED # WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. # IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, # INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, # PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT # LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE # USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. class SQLParseError(Exception): pass class UnclosedQuoteError(SQLParseError): pass # maps a type of identifier to the maximum number of dot levels that are # allowed to specify that identifier. For example, a database column can be # specified by up to 4 levels: database.schema.table.column _PG_IDENTIFIER_TO_DOT_LEVEL = dict(database=1, schema=2, table=3, column=4, role=1) _MYSQL_IDENTIFIER_TO_DOT_LEVEL = dict(database=1, table=2, column=3, role=1, vars=1) def _find_end_quote(identifier, quote_char): accumulate = 0 while True: try: quote = identifier.index(quote_char) except ValueError: raise UnclosedQuoteError accumulate = accumulate + quote try: next_char = identifier[quote + 1] except IndexError: return accumulate if next_char == quote_char: try: identifier = identifier[quote + 2:] accumulate = accumulate + 2 except IndexError: raise UnclosedQuoteError else: return accumulate def _identifier_parse(identifier, quote_char): if not identifier: raise SQLParseError('Identifier name unspecified or unquoted trailing dot') already_quoted = False if identifier.startswith(quote_char): already_quoted = True try: end_quote = _find_end_quote(identifier[1:], quote_char=quote_char) + 1 except UnclosedQuoteError: already_quoted = False else: if end_quote < len(identifier) - 1: if identifier[end_quote + 1] == '.': dot = end_quote + 1 first_identifier = identifier[:dot] next_identifier = identifier[dot + 1:] further_identifiers = _identifier_parse(next_identifier, quote_char) further_identifiers.insert(0, first_identifier) else: raise SQLParseError('User escaped identifiers must escape extra quotes') else: further_identifiers = [identifier] if not already_quoted: try: dot = identifier.index('.') except ValueError: identifier = identifier.replace(quote_char, quote_char * 2) identifier = ''.join((quote_char, identifier, quote_char)) further_identifiers = [identifier] else: if dot == 0 or dot >= len(identifier) - 1: identifier = identifier.replace(quote_char, quote_char * 2) identifier = ''.join((quote_char, identifier, quote_char)) further_identifiers = [identifier] else: first_identifier = identifier[:dot] next_identifier = identifier[dot + 1:] further_identifiers = _identifier_parse(next_identifier, quote_char) first_identifier = first_identifier.replace(quote_char, quote_char * 2) first_identifier = ''.join((quote_char, first_identifier, quote_char)) further_identifiers.insert(0, first_identifier) return further_identifiers def pg_quote_identifier(identifier, id_type): identifier_fragments = _identifier_parse(identifier, quote_char='"') if len(identifier_fragments) > _PG_IDENTIFIER_TO_DOT_LEVEL[id_type]: raise SQLParseError('PostgreSQL does not support %s with more than %i dots' % (id_type, _PG_IDENTIFIER_TO_DOT_LEVEL[id_type])) return '.'.join(identifier_fragments) def mysql_quote_identifier(identifier, id_type): identifier_fragments = _identifier_parse(identifier, quote_char='`') if len(identifier_fragments) > _MYSQL_IDENTIFIER_TO_DOT_LEVEL[id_type]: raise SQLParseError('MySQL does not support %s with more than %i dots' % (id_type, _MYSQL_IDENTIFIER_TO_DOT_LEVEL[id_type])) special_cased_fragments = [] for fragment in identifier_fragments: if fragment == '`*`': special_cased_fragments.append('*') else: special_cased_fragments.append(fragment) return '.'.join(special_cased_fragments)
ayepezv/GAD_ERP
refs/heads/master
addons/calendar/models/ir_http.py
2
# -*- coding: utf-8 -*- # Part of Odoo. See LICENSE file for full copyright and licensing details. import odoo from odoo import models, SUPERUSER_ID from odoo.http import request from odoo.api import Environment from werkzeug.exceptions import BadRequest class IrHttp(models.AbstractModel): _inherit = 'ir.http' def _auth_method_calendar(self): token = request.params['token'] dbname = request.params['db'] registry = odoo.modules.registry.RegistryManager.get(dbname) error_message = False with registry.cursor() as cr: env = Environment(cr, SUPERUSER_ID, {}) attendee = env['calendar.attendee'].sudo().search([('access_token', '=', token)], limit=1) if not attendee: error_message = """Invalid Invitation Token.""" elif request.session.uid and request.session.login != 'anonymous': # if valid session but user is not match user = env['res.users'].sudo().browse(request.session.uid) if attendee.partner_id != user.partner_id: error_message = """Invitation cannot be forwarded via email. This event/meeting belongs to %s and you are logged in as %s. Please ask organizer to add you.""" % (attendee.email, user.email) if error_message: raise BadRequest(error_message) return True
osm-fr/osmose-backend
refs/heads/master
plugins/TagFix_Area.py
4
#-*- coding: utf-8 -*- ########################################################################### ## ## ## Copyrights Frédéric Rodrigo 2014 ## ## ## ## This program is free software: you can redistribute it and/or modify ## ## it under the terms of the GNU General Public License as published by ## ## the Free Software Foundation, either version 3 of the License, or ## ## (at your option) any later version. ## ## ## ## This program is distributed in the hope that it will be useful, ## ## but WITHOUT ANY WARRANTY; without even the implied warranty of ## ## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the ## ## GNU General Public License for more details. ## ## ## ## You should have received a copy of the GNU General Public License ## ## along with this program. If not, see <http://www.gnu.org/licenses/>. ## ## ## ########################################################################### from modules.OsmoseTranslation import T_ from plugins.Plugin import Plugin class TagFix_Area(Plugin): def init(self, logger): Plugin.init(self, logger) self.area_yes_good = set(('aerialway', 'aeroway', 'amenity', 'barrier', 'highway', 'historic', 'leisure', 'man_made', 'military', 'power', 'public_transport', 'sport', 'tourism', 'waterway')) self.area_yes_bad = set(('boundary', 'building', 'craft', 'geological', 'landuse', 'natural', 'office', 'place', 'shop', 'indoor')) self.errors[32001] = self.def_class(item = 3200, level = 3, tags = ['tag', 'fix:chair'], title = T_('Redundant area tagging'), detail = T_('This feature is already implicitly an area due to another tag.'), fix = T_('Remove the `{0}` tag.', 'area=yes') ) self.errors[32002] = self.def_class(item = 3200, level = 3, tags = ['tag', 'fix:chair'], title = T_('Untagged area object'), detail = T_('The object is missing any tag which defines what kind of feature it is. This is unexpected for something tagged with `area=yes`.'), fix = self.merge_doc( T_('Add a top level tag to state what this feature is. Considered acceptable `area=yes` features are:'), {'en': ', '.join(map(lambda x: '`{}`'.format(x), sorted(self.area_yes_good)))} ), trap = T_('It may be more appropriate to remove the object completely if it isn\'t useful.') ) self.errors[32003] = self.def_class(item = 3200, level = 3, tags = ['tag', 'fix:chair'], title = T_('Redundant area negation'), detail = T_('This feature is already implicitly not an area.'), fix = T_('Remove the `{0}` tag.', 'area=no') ) def way(self, data, tags, nds): err = [] key_set = set(tags.keys()) if tags.get("area") == "yes": tagged_as_bad = set(key_set & self.area_yes_bad) if len(tagged_as_bad) > 0: err.append({ "class": 32001, "subclass": 1, "text": T_('Tags, {0}, already make this an area.', '/'.join(map(lambda x: '`{}`'.format(x), tagged_as_bad))) }) elif not (len(key_set & self.area_yes_good) > 0 or tags.get("railway") == "platform"): err.append({"class": 32002, "subclass": 1}) if tags.get("area") == "no" and not "aeroway" in tags and not "building" in tags and not "landuse" in tags and not "leisure" in tags and not "natural" in tags: err.append({"class": 32003, "subclass": 1}) return err ########################################################################### from plugins.Plugin import TestPluginCommon class Test(TestPluginCommon): def test(self): a = TagFix_Area(None) a.init(None) for t in [{"area":"yes", "railway": "rail"}, {"area":"yes", "building": "yes"}, {"area":"yes", "landuse": "farm"}, {"area":"no", "amenity": "bakery"}, {"area":"yes", "indoor": "room"}, ]: self.check_err(a.way(None, t, None), t) for t in [{"area":"yes", "railway": "platform"}, {"area":"yes", "amenity": "bakery"}, {"area":"no", "building": "yes"}, ]: assert not a.way(None, t, None), t
dparks1134/CompareM
refs/heads/master
comparem/amino_acid_usage.py
1
############################################################################### # # # This program is free software: you can redistribute it and/or modify # # it under the terms of the GNU General Public License as published by # # the Free Software Foundation, either version 3 of the License, or # # (at your option) any later version. # # # # This program is distributed in the hope that it will be useful, # # but WITHOUT ANY WARRANTY; without even the implied warranty of # # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # # GNU General Public License for more details. # # # # You should have received a copy of the GNU General Public License # # along with this program. If not, see <http://www.gnu.org/licenses/>. # # # ############################################################################### __author__ = 'Donovan Parks' __copyright__ = 'Copyright 2014' __credits__ = ['Donovan Parks'] __license__ = 'GPL3' __maintainer__ = 'Donovan Parks' __email__ = 'donovan.parks@gmail.com' import os import logging import ntpath from collections import defaultdict, namedtuple import biolib.seq_io as seq_io from biolib.parallel import Parallel class AminoAcidUsage(object): """Calculate amino acid usage over a set of genomes.""" def __init__(self, cpus=1): """Initialization. Parameters ---------- cpus : int Number of cpus to use. """ self.logger = logging.getLogger('timestamp') self.cpus = cpus def amino_acid_usage(self, seqs): """ Calculate amino acid usage within sequences. Parameters ---------- seqs : dict[seq_id] -> seq Sequences indexed by sequence id. Returns ------- dict : dict[aa] -> count Occurrence of each amino acid. """ aa_usage = defaultdict(int) for _seqId, seq in seqs.items(): for aa in seq: if aa != '*': aa = aa.upper() aa_usage[aa] += 1 return aa_usage def _producer(self, gene_file): """Calculates amino acid usage of a genome. Parameters ---------- gene_file : str Fasta file containing amino acid sequences. Returns ------- str Unique identifier of genome. dict : dict[aa] -> count Occurrence of each amino acid. """ genome_id = ntpath.basename(gene_file) genome_id = genome_id.replace('.genes.faa', '') genome_id = os.path.splitext(genome_id)[0] seqs = seq_io.read_fasta(gene_file) aa_usage = self.amino_acid_usage(seqs) return [genome_id, aa_usage] def _consumer(self, produced_data, consumer_data): """Consume results from producer processes. Parameters ---------- produced_data : list -> [genome_id, aa_usage] Unique id of a genome followed by a dictionary indicating its amino acid usage. consumer_data : namedtuple Set of amino acids observed across all genomes (aa_set), along with the amino acid usage of each genome (genome_aa_usage). Returns ------- consumer_data The consumer data structure or None must be returned """ if consumer_data == None: # setup data to be returned by consumer ConsumerData = namedtuple('ConsumerData', 'aa_set genome_aa_usage') consumer_data = ConsumerData(set(), dict()) genome_id, aa_usage = produced_data consumer_data.aa_set.update(aa_usage.keys()) consumer_data.genome_aa_usage[genome_id] = aa_usage return consumer_data def _progress(self, processed_items, total_items): """Report progress of consumer processes. Parameters ---------- processed_items : int Number of genomes processed. total_items : int Total number of genomes to process. Returns ------- str String indicating progress of data processing. """ return ' Finished processing %d of %d (%.2f%%) genomes.' % (processed_items, total_items, float(processed_items) * 100 / total_items) def run(self, gene_files): """Calculate amino acid usage over a set of genomes. Parameters ---------- gene_files : list Fasta files containing called genes. Returns ------- dict of dict : dict[genome_id][aa] -> count Amino acid usage of each genome. set Set with all identified amino acids. """ self.logger.info('Calculating amino acid usage for each genome:') progress_func = self._progress if self.logger.is_silent: progress_func = None parallel = Parallel(self.cpus) consumer_data = parallel.run(self._producer, self._consumer, gene_files, progress_func) return consumer_data.genome_aa_usage, consumer_data.aa_set
nortikin/sverchok
refs/heads/master
nodes/generators_extended/hilbert.py
2
# ##### BEGIN GPL LICENSE BLOCK ##### # # This program is free software; you can redistribute it and/or # modify it under the terms of the GNU General Public License # as published by the Free Software Foundation; either version 2 # of the License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software Foundation, # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. # # ##### END GPL LICENSE BLOCK ##### import bpy from bpy.props import IntProperty, FloatProperty from sverchok.node_tree import SverchCustomTreeNode from sverchok.data_structure import updateNode class HilbertNode(bpy.types.Node, SverchCustomTreeNode): ''' Hilbert line ''' bl_idname = 'HilbertNode' bl_label = 'Hilbert' bl_icon = 'OUTLINER_OB_EMPTY' sv_icon = 'SV_HILBERT2D' level_: IntProperty( name='level', description='Level', default=2, min=1, max=6, options={'ANIMATABLE'}, update=updateNode) size_: FloatProperty( name='size', description='Size', default=1.0, min=0.1, options={'ANIMATABLE'}, update=updateNode) def sv_init(self, context): self.inputs.new('SvStringsSocket', "Level").prop_name = 'level_' self.inputs.new('SvStringsSocket', "Size").prop_name = 'size_' self.outputs.new('SvVerticesSocket', "Vertices") self.outputs.new('SvStringsSocket', "Edges") def draw_buttons(self, context, layout): pass def process(self): level_socket, size_socket = self.inputs verts_socket, edges_socket = self.outputs if verts_socket.is_linked: Integer = int(level_socket.sv_get()[0][0]) Step = size_socket.sv_get()[0][0] verts = self.hilbert(0.0, 0.0, Step*1.0, 0.0, 0.0, Step*1.0, Integer) verts_socket.sv_set([verts]) if edges_socket.is_linked: listEdg = [] r = len(verts)-1 for i in range(r): listEdg.append((i, i+1)) edg = list(listEdg) edges_socket.sv_set([edg]) def hilbert(self, x0, y0, xi, xj, yi, yj, n): out = [] if n <= 0: X = x0 + (xi + yi)/2 Y = y0 + (xj + yj)/2 out.append(X) out.append(Y) out.append(0) return [out] else: out.extend(self.hilbert(x0, y0, yi/2, yj/2, xi/2, xj/2, n - 1)) out.extend(self.hilbert(x0 + xi/2, y0 + xj/2, xi/2, xj/2, yi/2, yj/2, n - 1)) out.extend(self.hilbert(x0 + xi/2 + yi/2, y0 + xj/2 + yj/2, xi/2, xj/2, yi/2, yj/2, n - 1)) out.extend(self.hilbert(x0 + xi/2 + yi, y0 + xj/2 + yj, -yi/2,-yj/2,-xi/2,-xj/2, n - 1)) return out def register(): bpy.utils.register_class(HilbertNode) def unregister(): bpy.utils.unregister_class(HilbertNode)
devanshdalal/scikit-learn
refs/heads/master
sklearn/isotonic.py
10
# Authors: Fabian Pedregosa <fabian@fseoane.net> # Alexandre Gramfort <alexandre.gramfort@inria.fr> # Nelle Varoquaux <nelle.varoquaux@gmail.com> # License: BSD 3 clause import numpy as np from scipy import interpolate from scipy.stats import spearmanr from .base import BaseEstimator, TransformerMixin, RegressorMixin from .utils import as_float_array, check_array, check_consistent_length from .utils import deprecated from .utils.fixes import astype from ._isotonic import _inplace_contiguous_isotonic_regression, _make_unique import warnings import math __all__ = ['check_increasing', 'isotonic_regression', 'IsotonicRegression'] def check_increasing(x, y): """Determine whether y is monotonically correlated with x. y is found increasing or decreasing with respect to x based on a Spearman correlation test. Parameters ---------- x : array-like, shape=(n_samples,) Training data. y : array-like, shape=(n_samples,) Training target. Returns ------- increasing_bool : boolean Whether the relationship is increasing or decreasing. Notes ----- The Spearman correlation coefficient is estimated from the data, and the sign of the resulting estimate is used as the result. In the event that the 95% confidence interval based on Fisher transform spans zero, a warning is raised. References ---------- Fisher transformation. Wikipedia. https://en.wikipedia.org/wiki/Fisher_transformation """ # Calculate Spearman rho estimate and set return accordingly. rho, _ = spearmanr(x, y) increasing_bool = rho >= 0 # Run Fisher transform to get the rho CI, but handle rho=+/-1 if rho not in [-1.0, 1.0]: F = 0.5 * math.log((1. + rho) / (1. - rho)) F_se = 1 / math.sqrt(len(x) - 3) # Use a 95% CI, i.e., +/-1.96 S.E. # https://en.wikipedia.org/wiki/Fisher_transformation rho_0 = math.tanh(F - 1.96 * F_se) rho_1 = math.tanh(F + 1.96 * F_se) # Warn if the CI spans zero. if np.sign(rho_0) != np.sign(rho_1): warnings.warn("Confidence interval of the Spearman " "correlation coefficient spans zero. " "Determination of ``increasing`` may be " "suspect.") return increasing_bool def isotonic_regression(y, sample_weight=None, y_min=None, y_max=None, increasing=True): """Solve the isotonic regression model:: min sum w[i] (y[i] - y_[i]) ** 2 subject to y_min = y_[1] <= y_[2] ... <= y_[n] = y_max where: - y[i] are inputs (real numbers) - y_[i] are fitted - w[i] are optional strictly positive weights (default to 1.0) Read more in the :ref:`User Guide <isotonic>`. Parameters ---------- y : iterable of floating-point values The data. sample_weight : iterable of floating-point values, optional, default: None Weights on each point of the regression. If None, weight is set to 1 (equal weights). y_min : optional, default: None If not None, set the lowest value of the fit to y_min. y_max : optional, default: None If not None, set the highest value of the fit to y_max. increasing : boolean, optional, default: True Whether to compute ``y_`` is increasing (if set to True) or decreasing (if set to False) Returns ------- y_ : list of floating-point values Isotonic fit of y. References ---------- "Active set algorithms for isotonic regression; A unifying framework" by Michael J. Best and Nilotpal Chakravarti, section 3. """ order = np.s_[:] if increasing else np.s_[::-1] y = np.array(y[order], dtype=np.float64) if sample_weight is None: sample_weight = np.ones(len(y), dtype=np.float64) else: sample_weight = np.array(sample_weight[order], dtype=np.float64) _inplace_contiguous_isotonic_regression(y, sample_weight) if y_min is not None or y_max is not None: # Older versions of np.clip don't accept None as a bound, so use np.inf if y_min is None: y_min = -np.inf if y_max is None: y_max = np.inf np.clip(y, y_min, y_max, y) return y[order] class IsotonicRegression(BaseEstimator, TransformerMixin, RegressorMixin): """Isotonic regression model. The isotonic regression optimization problem is defined by:: min sum w_i (y[i] - y_[i]) ** 2 subject to y_[i] <= y_[j] whenever X[i] <= X[j] and min(y_) = y_min, max(y_) = y_max where: - ``y[i]`` are inputs (real numbers) - ``y_[i]`` are fitted - ``X`` specifies the order. If ``X`` is non-decreasing then ``y_`` is non-decreasing. - ``w[i]`` are optional strictly positive weights (default to 1.0) Read more in the :ref:`User Guide <isotonic>`. Parameters ---------- y_min : optional, default: None If not None, set the lowest value of the fit to y_min. y_max : optional, default: None If not None, set the highest value of the fit to y_max. increasing : boolean or string, optional, default: True If boolean, whether or not to fit the isotonic regression with y increasing or decreasing. The string value "auto" determines whether y should increase or decrease based on the Spearman correlation estimate's sign. out_of_bounds : string, optional, default: "nan" The ``out_of_bounds`` parameter handles how x-values outside of the training domain are handled. When set to "nan", predicted y-values will be NaN. When set to "clip", predicted y-values will be set to the value corresponding to the nearest train interval endpoint. When set to "raise", allow ``interp1d`` to throw ValueError. Attributes ---------- X_min_ : float Minimum value of input array `X_` for left bound. X_max_ : float Maximum value of input array `X_` for right bound. f_ : function The stepwise interpolating function that covers the domain `X_`. Notes ----- Ties are broken using the secondary method from Leeuw, 1977. References ---------- Isotonic Median Regression: A Linear Programming Approach Nilotpal Chakravarti Mathematics of Operations Research Vol. 14, No. 2 (May, 1989), pp. 303-308 Isotone Optimization in R : Pool-Adjacent-Violators Algorithm (PAVA) and Active Set Methods Leeuw, Hornik, Mair Journal of Statistical Software 2009 Correctness of Kruskal's algorithms for monotone regression with ties Leeuw, Psychometrica, 1977 """ def __init__(self, y_min=None, y_max=None, increasing=True, out_of_bounds='nan'): self.y_min = y_min self.y_max = y_max self.increasing = increasing self.out_of_bounds = out_of_bounds @property @deprecated("Attribute ``X_`` is deprecated in version 0.18 and will be" " removed in version 0.20.") def X_(self): return self._X_ @X_.setter def X_(self, value): self._X_ = value @X_.deleter def X_(self): del self._X_ @property @deprecated("Attribute ``y_`` is deprecated in version 0.18 and will" " be removed in version 0.20.") def y_(self): return self._y_ @y_.setter def y_(self, value): self._y_ = value @y_.deleter def y_(self): del self._y_ def _check_fit_data(self, X, y, sample_weight=None): if len(X.shape) != 1: raise ValueError("X should be a 1d array") def _build_f(self, X, y): """Build the f_ interp1d function.""" # Handle the out_of_bounds argument by setting bounds_error if self.out_of_bounds not in ["raise", "nan", "clip"]: raise ValueError("The argument ``out_of_bounds`` must be in " "'nan', 'clip', 'raise'; got {0}" .format(self.out_of_bounds)) bounds_error = self.out_of_bounds == "raise" if len(y) == 1: # single y, constant prediction self.f_ = lambda x: y.repeat(x.shape) else: self.f_ = interpolate.interp1d(X, y, kind='linear', bounds_error=bounds_error) def _build_y(self, X, y, sample_weight, trim_duplicates=True): """Build the y_ IsotonicRegression.""" check_consistent_length(X, y, sample_weight) X, y = [check_array(x, ensure_2d=False) for x in [X, y]] y = as_float_array(y) self._check_fit_data(X, y, sample_weight) # Determine increasing if auto-determination requested if self.increasing == 'auto': self.increasing_ = check_increasing(X, y) else: self.increasing_ = self.increasing # If sample_weights is passed, removed zero-weight values and clean # order if sample_weight is not None: sample_weight = check_array(sample_weight, ensure_2d=False) mask = sample_weight > 0 X, y, sample_weight = X[mask], y[mask], sample_weight[mask] else: sample_weight = np.ones(len(y)) order = np.lexsort((y, X)) X, y, sample_weight = [astype(array[order], np.float64, copy=False) for array in [X, y, sample_weight]] unique_X, unique_y, unique_sample_weight = _make_unique( X, y, sample_weight) # Store _X_ and _y_ to maintain backward compat during the deprecation # period of X_ and y_ self._X_ = X = unique_X self._y_ = y = isotonic_regression(unique_y, unique_sample_weight, self.y_min, self.y_max, increasing=self.increasing_) # Handle the left and right bounds on X self.X_min_, self.X_max_ = np.min(X), np.max(X) if trim_duplicates: # Remove unnecessary points for faster prediction keep_data = np.ones((len(y),), dtype=bool) # Aside from the 1st and last point, remove points whose y values # are equal to both the point before and the point after it. keep_data[1:-1] = np.logical_or( np.not_equal(y[1:-1], y[:-2]), np.not_equal(y[1:-1], y[2:]) ) return X[keep_data], y[keep_data] else: # The ability to turn off trim_duplicates is only used to it make # easier to unit test that removing duplicates in y does not have # any impact the resulting interpolation function (besides # prediction speed). return X, y def fit(self, X, y, sample_weight=None): """Fit the model using X, y as training data. Parameters ---------- X : array-like, shape=(n_samples,) Training data. y : array-like, shape=(n_samples,) Training target. sample_weight : array-like, shape=(n_samples,), optional, default: None Weights. If set to None, all weights will be set to 1 (equal weights). Returns ------- self : object Returns an instance of self. Notes ----- X is stored for future use, as `transform` needs X to interpolate new input data. """ # Transform y by running the isotonic regression algorithm and # transform X accordingly. X, y = self._build_y(X, y, sample_weight) # It is necessary to store the non-redundant part of the training set # on the model to make it possible to support model persistence via # the pickle module as the object built by scipy.interp1d is not # picklable directly. self._necessary_X_, self._necessary_y_ = X, y # Build the interpolation function self._build_f(X, y) return self def transform(self, T): """Transform new data by linear interpolation Parameters ---------- T : array-like, shape=(n_samples,) Data to transform. Returns ------- T_ : array, shape=(n_samples,) The transformed data """ T = as_float_array(T) if len(T.shape) != 1: raise ValueError("Isotonic regression input should be a 1d array") # Handle the out_of_bounds argument by clipping if needed if self.out_of_bounds not in ["raise", "nan", "clip"]: raise ValueError("The argument ``out_of_bounds`` must be in " "'nan', 'clip', 'raise'; got {0}" .format(self.out_of_bounds)) if self.out_of_bounds == "clip": T = np.clip(T, self.X_min_, self.X_max_) return self.f_(T) def predict(self, T): """Predict new data by linear interpolation. Parameters ---------- T : array-like, shape=(n_samples,) Data to transform. Returns ------- T_ : array, shape=(n_samples,) Transformed data. """ return self.transform(T) def __getstate__(self): """Pickle-protocol - return state of the estimator. """ state = super(IsotonicRegression, self).__getstate__() # remove interpolation method state.pop('f_', None) return state def __setstate__(self, state): """Pickle-protocol - set state of the estimator. We need to rebuild the interpolation function. """ super(IsotonicRegression, self).__setstate__(state) if hasattr(self, '_necessary_X_') and hasattr(self, '_necessary_y_'): self._build_f(self._necessary_X_, self._necessary_y_)
CiscoDevNet/coding-skills-sample-code
refs/heads/master
coding102-REST-python-dcloud/create-ticket.py
1
# import requests library import requests #import json library import json # put the ip address or dns of your apic-em controller in this url url = 'https://198.18.129.100/api/v1/ticket' #the username and password to access the APIC-EM Controller payload = {"username":"admin","password":"C1sco12345"} #Content type must be included in the header header = {"content-type": "application/json"} #Performs a POST on the specified url. response= requests.post(url,data=json.dumps(payload), headers=header, verify=False) # print the json that is returned print(response.text)
molobrakos/home-assistant
refs/heads/master
homeassistant/components/yamaha/__init__.py
36
"""The yamaha component."""
suutari-ai/shoop
refs/heads/master
shuup_tests/utils/test_namemixin.py
3
# -*- coding: utf-8 -*- # This file is part of Shuup. # # Copyright (c) 2012-2017, Shoop Commerce Ltd. All rights reserved. # # This source code is licensed under the OSL-3.0 license found in the # LICENSE file in the root directory of this source tree. from shuup.core.utils.name_mixin import NameMixin class Ahnuld(NameMixin): def __init__(self, first_name, last_name="", prefix="", suffix=""): self.first_name_str = first_name self.last_name_str = last_name self.name = "%s %s" % (first_name, last_name) self.prefix = prefix self.suffix = suffix def get_fullname(self): return "%s %s" % (self.first_name_str, self.last_name_str) def test_basic_name(): ahnuld = Ahnuld(first_name="Ahnuld", last_name="Strong") assert ahnuld.first_name == ahnuld.first_name_str assert ahnuld.last_name == ahnuld.last_name_str assert ahnuld.full_name == ahnuld.get_fullname() def test_only_firstname(): ahnuld = Ahnuld(first_name="Ahnuld") assert ahnuld.first_name == ahnuld.first_name_str assert ahnuld.last_name == ahnuld.last_name_str assert ahnuld.full_name == ahnuld.first_name # full_name should be first name def test_prefixes(): ahnuld = Ahnuld(first_name="Ahnuld", last_name="Strong", prefix="mr.") assert ahnuld.first_name == ahnuld.first_name_str assert ahnuld.last_name == ahnuld.last_name_str assert ahnuld.full_name == ("%s %s" % (ahnuld.prefix, ahnuld.get_fullname())) def test_prefix_and_suffix(): ahnuld = Ahnuld(first_name="Ahnuld", last_name="Strong", prefix="mr.", suffix="the oak") assert ahnuld.first_name == ahnuld.first_name_str assert ahnuld.last_name == ahnuld.last_name_str assert ahnuld.full_name == ("%s %s %s" % (ahnuld.prefix, ahnuld.get_fullname(), ahnuld.suffix)) def test_awkward_names(): ahnuld = Ahnuld(first_name="Ahnuld", last_name="Super Strong in The Sky") assert ahnuld.first_name == ahnuld.first_name_str assert ahnuld.last_name == ahnuld.last_name_str assert ahnuld.full_name == ahnuld.get_fullname()
seanli9jan/tensorflow
refs/heads/master
tensorflow/tools/docs/parser_test.py
22
# Copyright 2015 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Tests for documentation parser.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function import collections import functools import os import sys from tensorflow.python.platform import googletest from tensorflow.python.util import tf_inspect from tensorflow.tools.docs import doc_controls from tensorflow.tools.docs import parser # The test needs a real module. `types.ModuleType()` doesn't work, as the result # is a `builtin` module. Using "parser" here is arbitraty. The tests don't # depend on the module contents. At this point in the process the public api # has already been extracted. test_module = parser def test_function(unused_arg, unused_kwarg='default'): """Docstring for test function.""" pass def test_function_with_args_kwargs(unused_arg, *unused_args, **unused_kwargs): """Docstring for second test function.""" pass class ParentClass(object): @doc_controls.do_not_doc_inheritable def hidden_method(self): pass class TestClass(ParentClass): """Docstring for TestClass itself.""" def a_method(self, arg='default'): """Docstring for a method.""" pass def hidden_method(self): pass @doc_controls.do_not_generate_docs def hidden_method2(self): pass class ChildClass(object): """Docstring for a child class.""" pass @property def a_property(self): """Docstring for a property.""" pass CLASS_MEMBER = 'a class member' class DummyVisitor(object): def __init__(self, index, duplicate_of): self.index = index self.duplicate_of = duplicate_of class ParserTest(googletest.TestCase): def test_documentation_path(self): self.assertEqual('test.md', parser.documentation_path('test')) self.assertEqual('test/module.md', parser.documentation_path('test.module')) def test_replace_references(self): class HasOneMember(object): def foo(self): pass string = ( 'A @{tf.reference}, another @{tf.reference$with\nnewline}, a member ' '@{tf.reference.foo}, and a @{tf.third$link `text` with `code` in ' 'it}.') duplicate_of = {'tf.third': 'tf.fourth'} index = {'tf.reference': HasOneMember, 'tf.reference.foo': HasOneMember.foo, 'tf.third': HasOneMember, 'tf.fourth': HasOneMember} visitor = DummyVisitor(index, duplicate_of) reference_resolver = parser.ReferenceResolver.from_visitor( visitor=visitor, doc_index={}, py_module_names=['tf']) result = reference_resolver.replace_references(string, '../..') self.assertEqual('A <a href="../../tf/reference.md">' '<code>tf.reference</code></a>, ' 'another <a href="../../tf/reference.md">' 'with\nnewline</a>, ' 'a member <a href="../../tf/reference.md#foo">' '<code>tf.reference.foo</code></a>, ' 'and a <a href="../../tf/fourth.md">link ' '<code>text</code> with ' '<code>code</code> in it</a>.', result) def test_doc_replace_references(self): string = '@{$doc1} @{$doc1#abc} @{$doc1$link} @{$doc1#def$zelda} @{$do/c2}' class DocInfo(object): pass doc1 = DocInfo() doc1.title = 'Title1' doc1.url = 'URL1' doc2 = DocInfo() doc2.title = 'Two words' doc2.url = 'somewhere/else' doc_index = {'doc1': doc1, 'do/c2': doc2} visitor = DummyVisitor(index={}, duplicate_of={}) reference_resolver = parser.ReferenceResolver.from_visitor( visitor=visitor, doc_index=doc_index, py_module_names=['tf']) result = reference_resolver.replace_references(string, 'python') self.assertEqual('<a href="../URL1">Title1</a> ' '<a href="../URL1#abc">Title1</a> ' '<a href="../URL1">link</a> ' '<a href="../URL1#def">zelda</a> ' '<a href="../somewhere/else">Two words</a>', result) def test_docs_for_class(self): index = { 'TestClass': TestClass, 'TestClass.a_method': TestClass.a_method, 'TestClass.a_property': TestClass.a_property, 'TestClass.ChildClass': TestClass.ChildClass, 'TestClass.CLASS_MEMBER': TestClass.CLASS_MEMBER } visitor = DummyVisitor(index=index, duplicate_of={}) reference_resolver = parser.ReferenceResolver.from_visitor( visitor=visitor, doc_index={}, py_module_names=['tf']) tree = { 'TestClass': ['a_method', 'a_property', 'ChildClass', 'CLASS_MEMBER'] } parser_config = parser.ParserConfig( reference_resolver=reference_resolver, duplicates={}, duplicate_of={}, tree=tree, index=index, reverse_index={}, guide_index={}, base_dir='/') page_info = parser.docs_for_object( full_name='TestClass', py_object=TestClass, parser_config=parser_config) # Make sure the brief docstring is present self.assertEqual( tf_inspect.getdoc(TestClass).split('\n')[0], page_info.doc.brief) # Make sure the method is present self.assertEqual(TestClass.a_method, page_info.methods[0].obj) # Make sure that the signature is extracted properly and omits self. self.assertEqual(["arg='default'"], page_info.methods[0].signature) # Make sure the property is present self.assertIs(TestClass.a_property, page_info.properties[0].obj) # Make sure there is a link to the child class and it points the right way. self.assertIs(TestClass.ChildClass, page_info.classes[0].obj) # Make sure this file is contained as the definition location. self.assertEqual(os.path.relpath(__file__, '/'), page_info.defined_in.path) def test_namedtuple_field_order(self): namedtupleclass = collections.namedtuple('namedtupleclass', {'z', 'y', 'x', 'w', 'v', 'u'}) index = { 'namedtupleclass': namedtupleclass, 'namedtupleclass.u': namedtupleclass.u, 'namedtupleclass.v': namedtupleclass.v, 'namedtupleclass.w': namedtupleclass.w, 'namedtupleclass.x': namedtupleclass.x, 'namedtupleclass.y': namedtupleclass.y, 'namedtupleclass.z': namedtupleclass.z, } visitor = DummyVisitor(index=index, duplicate_of={}) reference_resolver = parser.ReferenceResolver.from_visitor( visitor=visitor, doc_index={}, py_module_names=['tf']) tree = {'namedtupleclass': {'u', 'v', 'w', 'x', 'y', 'z'}} parser_config = parser.ParserConfig( reference_resolver=reference_resolver, duplicates={}, duplicate_of={}, tree=tree, index=index, reverse_index={}, guide_index={}, base_dir='/') page_info = parser.docs_for_object( full_name='namedtupleclass', py_object=namedtupleclass, parser_config=parser_config) # Each namedtiple field has a docstring of the form: # 'Alias for field number ##'. These props are returned sorted. def sort_key(prop_info): return int(prop_info.obj.__doc__.split(' ')[-1]) self.assertSequenceEqual(page_info.properties, sorted(page_info.properties, key=sort_key)) def test_docs_for_class_should_skip(self): class Parent(object): @doc_controls.do_not_doc_inheritable def a_method(self, arg='default'): pass class Child(Parent): def a_method(self, arg='default'): pass index = { 'Child': Child, 'Child.a_method': Child.a_method, } visitor = DummyVisitor(index=index, duplicate_of={}) reference_resolver = parser.ReferenceResolver.from_visitor( visitor=visitor, doc_index={}, py_module_names=['tf']) tree = { 'Child': ['a_method'], } parser_config = parser.ParserConfig( reference_resolver=reference_resolver, duplicates={}, duplicate_of={}, tree=tree, index=index, reverse_index={}, guide_index={}, base_dir='/') page_info = parser.docs_for_object( full_name='Child', py_object=Child, parser_config=parser_config) # Make sure the `a_method` is not present self.assertEqual(0, len(page_info.methods)) def test_docs_for_message_class(self): class CMessage(object): def hidden(self): pass class Message(object): def hidden2(self): pass class MessageMeta(object): def hidden3(self): pass class ChildMessage(CMessage, Message, MessageMeta): def my_method(self): pass index = { 'ChildMessage': ChildMessage, 'ChildMessage.hidden': ChildMessage.hidden, 'ChildMessage.hidden2': ChildMessage.hidden2, 'ChildMessage.hidden3': ChildMessage.hidden3, 'ChildMessage.my_method': ChildMessage.my_method, } visitor = DummyVisitor(index=index, duplicate_of={}) reference_resolver = parser.ReferenceResolver.from_visitor( visitor=visitor, doc_index={}, py_module_names=['tf']) tree = {'ChildMessage': ['hidden', 'hidden2', 'hidden3', 'my_method']} parser_config = parser.ParserConfig( reference_resolver=reference_resolver, duplicates={}, duplicate_of={}, tree=tree, index=index, reverse_index={}, guide_index={}, base_dir='/') page_info = parser.docs_for_object( full_name='ChildMessage', py_object=ChildMessage, parser_config=parser_config) self.assertEqual(1, len(page_info.methods)) self.assertEqual('my_method', page_info.methods[0].short_name) def test_docs_for_module(self): index = { 'TestModule': test_module, 'TestModule.test_function': test_function, 'TestModule.test_function_with_args_kwargs': test_function_with_args_kwargs, 'TestModule.TestClass': TestClass, } visitor = DummyVisitor(index=index, duplicate_of={}) reference_resolver = parser.ReferenceResolver.from_visitor( visitor=visitor, doc_index={}, py_module_names=['tf']) tree = { 'TestModule': ['TestClass', 'test_function', 'test_function_with_args_kwargs'] } parser_config = parser.ParserConfig( reference_resolver=reference_resolver, duplicates={}, duplicate_of={}, tree=tree, index=index, reverse_index={}, guide_index={}, base_dir='/') page_info = parser.docs_for_object( full_name='TestModule', py_object=test_module, parser_config=parser_config) # Make sure the brief docstring is present self.assertEqual( tf_inspect.getdoc(test_module).split('\n')[0], page_info.doc.brief) # Make sure that the members are there funcs = {f_info.obj for f_info in page_info.functions} self.assertEqual({test_function, test_function_with_args_kwargs}, funcs) classes = {cls_info.obj for cls_info in page_info.classes} self.assertEqual({TestClass}, classes) # Make sure the module's file is contained as the definition location. self.assertEqual( os.path.relpath(test_module.__file__, '/'), page_info.defined_in.path) def test_docs_for_function(self): index = { 'test_function': test_function } visitor = DummyVisitor(index=index, duplicate_of={}) reference_resolver = parser.ReferenceResolver.from_visitor( visitor=visitor, doc_index={}, py_module_names=['tf']) tree = { '': ['test_function'] } parser_config = parser.ParserConfig( reference_resolver=reference_resolver, duplicates={}, duplicate_of={}, tree=tree, index=index, reverse_index={}, guide_index={}, base_dir='/') page_info = parser.docs_for_object( full_name='test_function', py_object=test_function, parser_config=parser_config) # Make sure the brief docstring is present self.assertEqual( tf_inspect.getdoc(test_function).split('\n')[0], page_info.doc.brief) # Make sure the extracted signature is good. self.assertEqual(['unused_arg', "unused_kwarg='default'"], page_info.signature) # Make sure this file is contained as the definition location. self.assertEqual(os.path.relpath(__file__, '/'), page_info.defined_in.path) def test_docs_for_function_with_kwargs(self): index = { 'test_function_with_args_kwargs': test_function_with_args_kwargs } visitor = DummyVisitor(index=index, duplicate_of={}) reference_resolver = parser.ReferenceResolver.from_visitor( visitor=visitor, doc_index={}, py_module_names=['tf']) tree = { '': ['test_function_with_args_kwargs'] } parser_config = parser.ParserConfig( reference_resolver=reference_resolver, duplicates={}, duplicate_of={}, tree=tree, index=index, reverse_index={}, guide_index={}, base_dir='/') page_info = parser.docs_for_object( full_name='test_function_with_args_kwargs', py_object=test_function_with_args_kwargs, parser_config=parser_config) # Make sure the brief docstring is present self.assertEqual( tf_inspect.getdoc(test_function_with_args_kwargs).split('\n')[0], page_info.doc.brief) # Make sure the extracted signature is good. self.assertEqual(['unused_arg', '*unused_args', '**unused_kwargs'], page_info.signature) def test_parse_md_docstring(self): def test_function_with_fancy_docstring(arg): """Function with a fancy docstring. And a bunch of references: @{tf.reference}, another @{tf.reference}, a member @{tf.reference.foo}, and a @{tf.third}. Args: arg: An argument. Raises: an exception Returns: arg: the input, and arg: the input, again. @compatibility(numpy) NumPy has nothing as awesome as this function. @end_compatibility @compatibility(theano) Theano has nothing as awesome as this function. Check it out. @end_compatibility """ return arg, arg class HasOneMember(object): def foo(self): pass duplicate_of = {'tf.third': 'tf.fourth'} index = { 'tf': test_module, 'tf.fancy': test_function_with_fancy_docstring, 'tf.reference': HasOneMember, 'tf.reference.foo': HasOneMember.foo, 'tf.third': HasOneMember, 'tf.fourth': HasOneMember } visitor = DummyVisitor(index=index, duplicate_of=duplicate_of) reference_resolver = parser.ReferenceResolver.from_visitor( visitor=visitor, doc_index={}, py_module_names=['tf']) doc_info = parser._parse_md_docstring(test_function_with_fancy_docstring, '../..', reference_resolver) self.assertNotIn('@', doc_info.docstring) self.assertNotIn('compatibility', doc_info.docstring) self.assertNotIn('Raises:', doc_info.docstring) self.assertEqual(len(doc_info.function_details), 3) self.assertEqual(set(doc_info.compatibility.keys()), {'numpy', 'theano'}) self.assertEqual(doc_info.compatibility['numpy'], 'NumPy has nothing as awesome as this function.\n') def test_generate_index(self): index = { 'tf': test_module, 'tf.TestModule': test_module, 'tf.test_function': test_function, 'tf.TestModule.test_function': test_function, 'tf.TestModule.TestClass': TestClass, 'tf.TestModule.TestClass.a_method': TestClass.a_method, 'tf.TestModule.TestClass.a_property': TestClass.a_property, 'tf.TestModule.TestClass.ChildClass': TestClass.ChildClass, } duplicate_of = {'tf.TestModule.test_function': 'tf.test_function'} visitor = DummyVisitor(index=index, duplicate_of=duplicate_of) reference_resolver = parser.ReferenceResolver.from_visitor( visitor=visitor, doc_index={}, py_module_names=['tf']) docs = parser.generate_global_index('TestLibrary', index=index, reference_resolver=reference_resolver) # Make sure duplicates and non-top-level symbols are in the index, but # methods and properties are not. self.assertNotIn('a_method', docs) self.assertNotIn('a_property', docs) self.assertIn('TestModule.TestClass', docs) self.assertIn('TestModule.TestClass.ChildClass', docs) self.assertIn('TestModule.test_function', docs) # Leading backtick to make sure it's included top-level. # This depends on formatting, but should be stable. self.assertIn('<code>tf.test_function', docs) def test_argspec_for_functools_partial(self): # pylint: disable=unused-argument def test_function_for_partial1(arg1, arg2, kwarg1=1, kwarg2=2): pass def test_function_for_partial2(arg1, arg2, *my_args, **my_kwargs): pass # pylint: enable=unused-argument # pylint: disable=protected-access # Make sure everything works for regular functions. expected = tf_inspect.FullArgSpec( args=['arg1', 'arg2', 'kwarg1', 'kwarg2'], varargs=None, varkw=None, defaults=(1, 2), kwonlyargs=[], kwonlydefaults=None, annotations={}) self.assertEqual(expected, parser._get_arg_spec(test_function_for_partial1)) # Make sure doing nothing works. expected = tf_inspect.FullArgSpec( args=['arg1', 'arg2', 'kwarg1', 'kwarg2'], varargs=None, varkw=None, defaults=(1, 2), kwonlyargs=[], kwonlydefaults=None, annotations={}) partial = functools.partial(test_function_for_partial1) self.assertEqual(expected, parser._get_arg_spec(partial)) # Make sure setting args from the front works. expected = tf_inspect.FullArgSpec( args=['arg2', 'kwarg1', 'kwarg2'], varargs=None, varkw=None, defaults=(1, 2), kwonlyargs=[], kwonlydefaults=None, annotations={}) partial = functools.partial(test_function_for_partial1, 1) self.assertEqual(expected, parser._get_arg_spec(partial)) expected = tf_inspect.FullArgSpec( args=['kwarg2'], varargs=None, varkw=None, defaults=(2,), kwonlyargs=[], kwonlydefaults=None, annotations={}) partial = functools.partial(test_function_for_partial1, 1, 2, 3) self.assertEqual(expected, parser._get_arg_spec(partial)) # Make sure setting kwargs works. expected = tf_inspect.FullArgSpec( args=['arg1', 'arg2', 'kwarg2'], varargs=None, varkw=None, defaults=(2,), kwonlyargs=[], kwonlydefaults=None, annotations={}) partial = functools.partial(test_function_for_partial1, kwarg1=0) self.assertEqual(expected, parser._get_arg_spec(partial)) expected = tf_inspect.FullArgSpec( args=['arg1', 'arg2', 'kwarg1'], varargs=None, varkw=None, defaults=(1,), kwonlyargs=[], kwonlydefaults=None, annotations={}) partial = functools.partial(test_function_for_partial1, kwarg2=0) self.assertEqual(expected, parser._get_arg_spec(partial)) expected = tf_inspect.FullArgSpec( args=['arg1'], varargs=None, varkw=None, defaults=(), kwonlyargs=[], kwonlydefaults=None, annotations={}) partial = functools.partial(test_function_for_partial1, arg2=0, kwarg1=0, kwarg2=0) self.assertEqual(expected, parser._get_arg_spec(partial)) # Make sure *args, *kwargs is accounted for. expected = tf_inspect.FullArgSpec( args=[], varargs='my_args', varkw='my_kwargs', defaults=(), kwonlyargs=[], kwonlydefaults=None, annotations={}) partial = functools.partial(test_function_for_partial2, 0, 1) self.assertEqual(expected, parser._get_arg_spec(partial)) # pylint: enable=protected-access def testSaveReferenceResolver(self): you_cant_serialize_this = object() duplicate_of = {'AClass': ['AClass2']} doc_index = {'doc': you_cant_serialize_this} is_fragment = { 'tf': False, 'tf.VERSION': True, 'tf.AClass': False, 'tf.AClass.method': True, 'tf.AClass2': False, 'tf.function': False } py_module_names = ['tf', 'tfdbg'] resolver = parser.ReferenceResolver(duplicate_of, doc_index, is_fragment, py_module_names) outdir = googletest.GetTempDir() filepath = os.path.join(outdir, 'resolver.json') resolver.to_json_file(filepath) resolver2 = parser.ReferenceResolver.from_json_file(filepath, doc_index) # There are no __slots__, so all fields are visible in __dict__. self.assertEqual(resolver.__dict__, resolver2.__dict__) def testIsFreeFunction(self): result = parser.is_free_function(test_function, 'test_module.test_function', {'test_module': test_module}) self.assertTrue(result) result = parser.is_free_function(test_function, 'TestClass.test_function', {'TestClass': TestClass}) self.assertFalse(result) result = parser.is_free_function(TestClass, 'TestClass', {}) self.assertFalse(result) result = parser.is_free_function(test_module, 'test_module', {}) self.assertFalse(result) RELU_DOC = """Computes rectified linear: `max(features, 0)` Args: features: A `Tensor`. Must be one of the following types: `float32`, `float64`, `int32`, `int64`, `uint8`, `int16`, `int8`, `uint16`, `half`. name: A name for the operation (optional) Returns: A `Tensor`. Has the same type as `features` """ class TestParseFunctionDetails(googletest.TestCase): def test_parse_function_details(self): docstring, function_details = parser._parse_function_details(RELU_DOC) self.assertEqual(len(function_details), 2) args = function_details[0] self.assertEqual(args.keyword, 'Args') self.assertEqual(len(args.header), 0) self.assertEqual(len(args.items), 2) self.assertEqual(args.items[0][0], 'features') self.assertEqual(args.items[1][0], 'name') self.assertEqual(args.items[1][1], 'A name for the operation (optional)\n\n') returns = function_details[1] self.assertEqual(returns.keyword, 'Returns') relu_doc_lines = RELU_DOC.split('\n') self.assertEqual(docstring, relu_doc_lines[0] + '\n\n') self.assertEqual(returns.header, relu_doc_lines[-2] + '\n') self.assertEqual( RELU_DOC, docstring + ''.join(str(detail) for detail in function_details)) class TestGenerateSignature(googletest.TestCase): def test_known_object(self): known_object = object() reverse_index = {id(known_object): 'location.of.object.in.api'} def example_fun(arg=known_object): # pylint: disable=unused-argument pass sig = parser._generate_signature(example_fun, reverse_index) self.assertEqual(sig, ['arg=location.of.object.in.api']) def test_literals(self): if sys.version_info >= (3, 0): print('Warning: Doc generation is not supported from python3.') return def example_fun(a=5, b=5.0, c=None, d=True, e='hello', f=(1, (2, 3))): # pylint: disable=g-bad-name, unused-argument pass sig = parser._generate_signature(example_fun, reverse_index={}) self.assertEqual( sig, ['a=5', 'b=5.0', 'c=None', 'd=True', "e='hello'", 'f=(1, (2, 3))']) def test_dotted_name(self): if sys.version_info >= (3, 0): print('Warning: Doc generation is not supported from python3.') return # pylint: disable=g-bad-name class a(object): class b(object): class c(object): class d(object): def __init__(self, *args): pass # pylint: enable=g-bad-name e = {'f': 1} def example_fun(arg1=a.b.c.d, arg2=a.b.c.d(1, 2), arg3=e['f']): # pylint: disable=unused-argument pass sig = parser._generate_signature(example_fun, reverse_index={}) self.assertEqual(sig, ['arg1=a.b.c.d', 'arg2=a.b.c.d(1, 2)', "arg3=e['f']"]) if __name__ == '__main__': googletest.main()
sumpfgottheit/pdu1800_data_provider
refs/heads/master
pygame32/pygame/tests/test_utils/arrinter.py
18
import sys import ctypes from ctypes import * import unittest __all__ = ['PAI_CONTIGUOUS', 'PAI_FORTRAN', 'PAI_ALIGNED', 'PAI_NOTSWAPPED', 'PAI_WRITEABLE', 'PAI_ARR_HAS_DESCR', 'ArrayInterface',] try: c_ssize_t # Undefined in early Python versions except NameError: if sizeof(c_uint) == sizeof(c_void_p): c_size_t = c_uint c_ssize_t = c_int elif sizeof(c_ulong) == sizeof(c_void_p): c_size_t = c_ulong c_ssize_t = c_long elif sizeof(c_ulonglong) == sizeof(c_void_p): c_size_t = c_ulonglong c_ssize_t = c_longlong PY3 = 0 if sys.version_info >= (3,): PY3 = 1 SIZEOF_VOID_P = sizeof(c_void_p) if SIZEOF_VOID_P <= sizeof(c_int): Py_intptr_t = c_int elif SIZEOF_VOID_P <= sizeof(c_long): Py_intptr_t = c_long elif 'c_longlong' in globals() and SIZEOF_VOID_P <= sizeof(c_longlong): Py_intptr_t = c_longlong else: raise RuntimeError("Unrecognized pointer size %i" % (pointer_size,)) class PyArrayInterface(Structure): _fields_ = [('two', c_int), ('nd', c_int), ('typekind', c_char), ('itemsize', c_int), ('flags', c_int), ('shape', POINTER(Py_intptr_t)), ('strides', POINTER(Py_intptr_t)), ('data', c_void_p), ('descr', py_object)] PAI_Ptr = POINTER(PyArrayInterface) try: PyCObject_AsVoidPtr = pythonapi.PyCObject_AsVoidPtr except AttributeError: def PyCObject_AsVoidPtr(o): raise TypeError("Not available") else: PyCObject_AsVoidPtr.restype = c_void_p PyCObject_AsVoidPtr.argtypes = [py_object] PyCObject_GetDesc = pythonapi.PyCObject_GetDesc PyCObject_GetDesc.restype = c_void_p PyCObject_GetDesc.argtypes = [py_object] try: PyCapsule_IsValid = pythonapi.PyCapsule_IsValid except AttributeError: def PyCapsule_IsValid(capsule, name): return 0 else: PyCapsule_IsValid.restype = c_int PyCapsule_IsValid.argtypes = [py_object, c_char_p] PyCapsule_GetPointer = pythonapi.PyCapsule_GetPointer PyCapsule_GetPointer.restype = c_void_p PyCapsule_GetPointer.argtypes = [py_object, c_char_p] PyCapsule_GetContext = pythonapi.PyCapsule_GetContext PyCapsule_GetContext.restype = c_void_p PyCapsule_GetContext.argtypes = [py_object] if PY3: PyCapsule_Destructor = CFUNCTYPE(None, py_object) PyCapsule_New = pythonapi.PyCapsule_New PyCapsule_New.restype = py_object PyCapsule_New.argtypes = [c_void_p, c_char_p, POINTER(PyCapsule_Destructor)] def capsule_new(p): return PyCapsule_New(addressof(p), None, None) else: PyCObject_Destructor = CFUNCTYPE(None, c_void_p) PyCObject_FromVoidPtr = pythonapi.PyCObject_FromVoidPtr PyCObject_FromVoidPtr.restype = py_object PyCObject_FromVoidPtr.argtypes = [c_void_p, POINTER(PyCObject_Destructor)] def capsule_new(p): return PyCObject_FromVoidPtr(addressof(p), None) PAI_CONTIGUOUS = 0x01 PAI_FORTRAN = 0x02 PAI_ALIGNED = 0x100 PAI_NOTSWAPPED = 0x200 PAI_WRITEABLE = 0x400 PAI_ARR_HAS_DESCR = 0x800 class ArrayInterface(object): def __init__(self, arr): try: self._cobj = arr.__array_struct__ except AttributeError: raise TypeError("The array object lacks an array structure") if not self._cobj: raise TypeError("The array object has a NULL array structure value") try: vp = PyCObject_AsVoidPtr(self._cobj) except TypeError: if PyCapsule_IsValid(self._cobj, None): vp = PyCapsule_GetPointer(self._cobj, None) else: raise TypeError("The array object has an invalid array structure") self.desc = PyCapsule_GetContext(self._cobj) else: self.desc = PyCObject_GetDesc(self._cobj) self._inter = cast(vp, PAI_Ptr)[0] def __getattr__(self, name): if (name == 'typekind'): return self._inter.typekind.decode('latin-1') return getattr(self._inter, name) def __str__(self): if isinstance(self.desc, tuple): ver = self.desc[0] else: ver = "N/A" return ("nd: %i\n" "typekind: %s\n" "itemsize: %i\n" "flags: %s\n" "shape: %s\n" "strides: %s\n" "ver: %s\n" % (self.nd, self.typekind, self.itemsize, format_flags(self.flags), format_shape(self.nd, self.shape), format_strides(self.nd, self.strides), ver)) def format_flags(flags): names = [] for flag, name in [(PAI_CONTIGUOUS, 'CONTIGUOUS'), (PAI_FORTRAN, 'FORTRAN'), (PAI_ALIGNED, 'ALIGNED'), (PAI_NOTSWAPPED, 'NOTSWAPPED'), (PAI_WRITEABLE, 'WRITEABLE'), (PAI_ARR_HAS_DESCR, 'ARR_HAS_DESCR')]: if flag & flags: names.append(name) return ', '.join(names) def format_shape(nd, shape): return ', '.join([str(shape[i]) for i in range(nd)]) def format_strides(nd, strides): return ', '.join([str(strides[i]) for i in range(nd)]) class Exporter(object): def __init__(self, shape, typekind=None, itemsize=None, strides=None, descr=None, flags=None): if typekind is None: typekind = 'u' if itemsize is None: itemsize = 1 if flags is None: flags = PAI_WRITEABLE | PAI_ALIGNED | PAI_NOTSWAPPED if descr is not None: flags |= PAI_ARR_HAS_DESCR if len(typekind) != 1: raise ValueError("Argument 'typekind' must be length 1 string") nd = len(shape) self.typekind = typekind self.itemsize = itemsize self.nd = nd self.shape = tuple(shape) self._shape = (c_ssize_t * self.nd)(*self.shape) if strides is None: self._strides = (c_ssize_t * self.nd)() self._strides[self.nd - 1] = self.itemsize for i in range(self.nd - 1, 0, -1): self._strides[i - 1] = self.shape[i] * self._strides[i] strides = tuple(self._strides) self.strides = strides elif len(strides) == nd: self.strides = tuple(strides) self._strides = (c_ssize_t * self.nd)(*self.strides) else: raise ValueError("Mismatch in length of strides and shape") self.descr = descr if self.is_contiguous('C'): flags |= PAI_CONTIGUOUS if self.is_contiguous('F'): flags |= PAI_FORTRAN self.flags = flags sz = max(shape[i] * strides[i] for i in range(nd)) self._data = (c_ubyte * sz)() self.data = addressof(self._data) self._inter = PyArrayInterface(2, nd, typekind.encode('latin_1'), itemsize, flags, self._shape, self._strides, self.data, descr) self.len = itemsize for i in range(nd): self.len *= self.shape[i] __array_struct__ = property(lambda self: capsule_new(self._inter)) def is_contiguous(self, fortran): if fortran in "CA": if self.strides[-1] == self.itemsize: for i in range(self.nd - 1, 0, -1): if self.strides[i - 1] != self.shape[i] * self.strides[i]: break else: return True if fortran in "FA": if self.strides[0] == self.itemsize: for i in range(0, self.nd - 1): if self.strides[i + 1] != self.shape[i] * self.strides[i]: break else: return True return False class Array(Exporter): _ctypes = {('u', 1): c_uint8, ('u', 2): c_uint16, ('u', 4): c_uint32, ('u', 8): c_uint64, ('i', 1): c_int8, ('i', 2): c_int16, ('i', 4): c_int32, ('i', 8): c_int64} def __init__(self, *args, **kwds): super(Array, self).__init__(*args, **kwds) try: if self.flags & PAI_NOTSWAPPED: ct = self._ctypes[self.typekind, self.itemsize] elif c_int.__ctype_le__ is c_int: ct = self._ctypes[self.typekind, self.itemsize].__ctype_be__ else: ct = self._ctypes[self.typekind, self.itemsize].__ctype_le__ except KeyError: ct = c_uint8 * self.itemsize self._ctype = ct self._ctype_p = POINTER(ct) def __getitem__(self, key): return cast(self._addr_at(key), self._ctype_p)[0] def __setitem__(self, key, value): cast(self._addr_at(key), self._ctype_p)[0] = value def _addr_at(self, key): if not isinstance(key, tuple): key = key, if len(key) != self.nd: raise ValueError("wrong number of indexes") for i in range(self.nd): if not (0 <= key[i] < self.shape[i]): raise IndexError("index {} out of range".format(i)) return self.data + sum(i * s for i, s in zip(key, self.strides)) class ExporterTest(unittest.TestCase): def test_strides(self): self.check_args(0, (10,), 'u', (2,), 20, 20, 2) self.check_args(0, (5, 3), 'u', (6, 2), 30, 30, 2) self.check_args(0, (7, 3, 5), 'u', (30, 10, 2), 210, 210, 2) self.check_args(0, (13, 5, 11, 3), 'u', (330, 66, 6, 2), 4290, 4290, 2) self.check_args(3, (7, 3, 5), 'i', (2, 14, 42), 210, 210, 2) self.check_args(3, (7, 3, 5), 'x', (2, 16, 48), 210, 240, 2) self.check_args(3, (13, 5, 11, 3), '%', (440, 88, 8, 2), 4290, 5720, 2) self.check_args(3, (7, 5), '-', (15, 3), 105, 105, 3) self.check_args(3, (7, 5), '*', (3, 21), 105, 105, 3) self.check_args(3, (7, 5), ' ', (3, 24), 105, 120, 3) def test_is_contiguous(self): a = Exporter((10,), itemsize=2) self.assertTrue(a.is_contiguous('C')) self.assertTrue(a.is_contiguous('F')) self.assertTrue(a.is_contiguous('A')) a = Exporter((10, 4), itemsize=2) self.assertTrue(a.is_contiguous('C')) self.assertTrue(a.is_contiguous('A')) self.assertFalse(a.is_contiguous('F')) a = Exporter((13, 5, 11, 3), itemsize=2, strides=(330, 66, 6, 2)) self.assertTrue(a.is_contiguous('C')) self.assertTrue(a.is_contiguous('A')) self.assertFalse(a.is_contiguous('F')) a = Exporter((10, 4), itemsize=2, strides=(2, 20)) self.assertTrue(a.is_contiguous('F')) self.assertTrue(a.is_contiguous('A')) self.assertFalse(a.is_contiguous('C')) a = Exporter((13, 5, 11, 3), itemsize=2, strides=(2, 26, 130, 1430)) self.assertTrue(a.is_contiguous('F')) self.assertTrue(a.is_contiguous('A')) self.assertFalse(a.is_contiguous('C')) a = Exporter((2, 11, 6, 4), itemsize=2, strides=(576, 48, 8, 2)) self.assertFalse(a.is_contiguous('A')) a = Exporter((2, 11, 6, 4), itemsize=2, strides=(2, 4, 48, 288)) self.assertFalse(a.is_contiguous('A')) a = Exporter((3, 2, 2), itemsize=2, strides=(16, 8, 4)) self.assertFalse(a.is_contiguous('A')) a = Exporter((3, 2, 2), itemsize=2, strides=(4, 12, 24)) self.assertFalse(a.is_contiguous('A')) def check_args(self, call_flags, shape, typekind, strides, length, bufsize, itemsize, offset=0): if call_flags & 1: typekind_arg = typekind else: typekind_arg = None if call_flags & 2: strides_arg = strides else: strides_arg = None a = Exporter(shape, itemsize=itemsize, strides=strides_arg) self.assertEqual(sizeof(a._data), bufsize) self.assertEqual(a.data, ctypes.addressof(a._data) + offset) m = ArrayInterface(a) self.assertEqual(m.data, a.data) self.assertEqual(m.itemsize, itemsize) self.assertEqual(tuple(m.shape[0:m.nd]), shape) self.assertEqual(tuple(m.strides[0:m.nd]), strides) class ArrayTest(unittest.TestCase): def __init__(self, *args, **kwds): unittest.TestCase.__init__(self, *args, **kwds) self.a = Array((20, 15), 'i', 4) def setUp(self): # Every test starts with a zeroed array. memset(self.a.data, 0, sizeof(self.a._data)) def test__addr_at(self): a = self.a self.assertEqual(a._addr_at((0, 0)), a.data) self.assertEqual(a._addr_at((0, 1)), a.data + 4) self.assertEqual(a._addr_at((1, 0)), a.data + 60) self.assertEqual(a._addr_at((1, 1)), a.data + 64) def test_indices(self): a = self.a self.assertEqual(a[0, 0], 0) self.assertEqual(a[19, 0], 0) self.assertEqual(a[0, 14], 0) self.assertEqual(a[19, 14], 0) self.assertEqual(a[5, 8], 0) a[0, 0] = 12 a[5, 8] = 99 self.assertEqual(a[0, 0], 12) self.assertEqual(a[5, 8], 99) self.assertRaises(IndexError, a.__getitem__, (-1, 0)) self.assertRaises(IndexError, a.__getitem__, (0, -1)) self.assertRaises(IndexError, a.__getitem__, (20, 0)) self.assertRaises(IndexError, a.__getitem__, (0, 15)) self.assertRaises(ValueError, a.__getitem__, 0) self.assertRaises(ValueError, a.__getitem__, (0, 0, 0)) a = Array((3,), 'i', 4) a[1] = 333 self.assertEqual(a[1], 333) def test_typekind(self): a = Array((1,), 'i', 4) self.assertTrue(a._ctype is c_int32) self.assertTrue(a._ctype_p is POINTER(c_int32)) a = Array((1,), 'u', 4) self.assertTrue(a._ctype is c_uint32) self.assertTrue(a._ctype_p is POINTER(c_uint32)) a = Array((1,), 'f', 4) # float types unsupported: size system dependent ct = a._ctype self.assertTrue(issubclass(ct, ctypes.Array)) self.assertEqual(sizeof(ct), 4) def test_itemsize(self): for size in [1, 2, 4, 8]: a = Array((1,), 'i', size) ct = a._ctype self.assertTrue(issubclass(ct, ctypes._SimpleCData)) self.assertEqual(sizeof(ct), size) def test_oddball_itemsize(self): for size in [3, 5, 6, 7, 9]: a = Array((1,), 'i', size) ct = a._ctype self.assertTrue(issubclass(ct, ctypes.Array)) self.assertEqual(sizeof(ct), size) def test_byteswapped(self): a = Array((1,), 'u', 4, flags=(PAI_ALIGNED | PAI_WRITEABLE)) ct = a._ctype self.assertTrue(ct is not c_uint32) if sys.byteorder == 'little': self.assertTrue(ct is c_uint32.__ctype_be__) else: self.assertTrue(ct is c_uint32.__ctype_le__) i = 0xa0b0c0d n = c_uint32(i) a[0] = i self.assertEqual(a[0], i) self.assertEqual(a._data[0:4], cast(addressof(n), POINTER(c_uint8))[3:-1:-1]) if __name__ == '__main__': unittest.main()
PetePriority/home-assistant
refs/heads/dev
homeassistant/components/mysensors/light.py
5
""" Support for MySensors lights. For more details about this platform, please refer to the documentation at https://home-assistant.io/components/light.mysensors/ """ from homeassistant.components import mysensors from homeassistant.components.light import ( ATTR_BRIGHTNESS, ATTR_HS_COLOR, ATTR_WHITE_VALUE, DOMAIN, SUPPORT_BRIGHTNESS, SUPPORT_COLOR, SUPPORT_WHITE_VALUE, Light) from homeassistant.const import STATE_OFF, STATE_ON from homeassistant.util.color import rgb_hex_to_rgb_list import homeassistant.util.color as color_util SUPPORT_MYSENSORS_RGBW = SUPPORT_COLOR | SUPPORT_WHITE_VALUE async def async_setup_platform( hass, config, async_add_entities, discovery_info=None): """Set up the mysensors platform for lights.""" device_class_map = { 'S_DIMMER': MySensorsLightDimmer, 'S_RGB_LIGHT': MySensorsLightRGB, 'S_RGBW_LIGHT': MySensorsLightRGBW, } mysensors.setup_mysensors_platform( hass, DOMAIN, discovery_info, device_class_map, async_add_entities=async_add_entities) class MySensorsLight(mysensors.device.MySensorsEntity, Light): """Representation of a MySensors Light child node.""" def __init__(self, *args): """Initialize a MySensors Light.""" super().__init__(*args) self._state = None self._brightness = None self._hs = None self._white = None @property def brightness(self): """Return the brightness of this light between 0..255.""" return self._brightness @property def hs_color(self): """Return the hs color value [int, int].""" return self._hs @property def white_value(self): """Return the white value of this light between 0..255.""" return self._white @property def assumed_state(self): """Return true if unable to access real state of entity.""" return self.gateway.optimistic @property def is_on(self): """Return true if device is on.""" return self._state def _turn_on_light(self): """Turn on light child device.""" set_req = self.gateway.const.SetReq if self._state: return self.gateway.set_child_value( self.node_id, self.child_id, set_req.V_LIGHT, 1) if self.gateway.optimistic: # optimistically assume that light has changed state self._state = True self._values[set_req.V_LIGHT] = STATE_ON def _turn_on_dimmer(self, **kwargs): """Turn on dimmer child device.""" set_req = self.gateway.const.SetReq brightness = self._brightness if ATTR_BRIGHTNESS not in kwargs or \ kwargs[ATTR_BRIGHTNESS] == self._brightness or \ set_req.V_DIMMER not in self._values: return brightness = kwargs[ATTR_BRIGHTNESS] percent = round(100 * brightness / 255) self.gateway.set_child_value( self.node_id, self.child_id, set_req.V_DIMMER, percent) if self.gateway.optimistic: # optimistically assume that light has changed state self._brightness = brightness self._values[set_req.V_DIMMER] = percent def _turn_on_rgb_and_w(self, hex_template, **kwargs): """Turn on RGB or RGBW child device.""" rgb = list(color_util.color_hs_to_RGB(*self._hs)) white = self._white hex_color = self._values.get(self.value_type) hs_color = kwargs.get(ATTR_HS_COLOR) if hs_color is not None: new_rgb = color_util.color_hs_to_RGB(*hs_color) else: new_rgb = None new_white = kwargs.get(ATTR_WHITE_VALUE) if new_rgb is None and new_white is None: return if new_rgb is not None: rgb = list(new_rgb) if hex_template == '%02x%02x%02x%02x': if new_white is not None: rgb.append(new_white) else: rgb.append(white) hex_color = hex_template % tuple(rgb) if len(rgb) > 3: white = rgb.pop() self.gateway.set_child_value( self.node_id, self.child_id, self.value_type, hex_color) if self.gateway.optimistic: # optimistically assume that light has changed state self._hs = color_util.color_RGB_to_hs(*rgb) self._white = white self._values[self.value_type] = hex_color async def async_turn_off(self, **kwargs): """Turn the device off.""" value_type = self.gateway.const.SetReq.V_LIGHT self.gateway.set_child_value( self.node_id, self.child_id, value_type, 0) if self.gateway.optimistic: # optimistically assume that light has changed state self._state = False self._values[value_type] = STATE_OFF self.async_schedule_update_ha_state() def _async_update_light(self): """Update the controller with values from light child.""" value_type = self.gateway.const.SetReq.V_LIGHT self._state = self._values[value_type] == STATE_ON def _async_update_dimmer(self): """Update the controller with values from dimmer child.""" value_type = self.gateway.const.SetReq.V_DIMMER if value_type in self._values: self._brightness = round(255 * int(self._values[value_type]) / 100) if self._brightness == 0: self._state = False def _async_update_rgb_or_w(self): """Update the controller with values from RGB or RGBW child.""" value = self._values[self.value_type] color_list = rgb_hex_to_rgb_list(value) if len(color_list) > 3: self._white = color_list.pop() self._hs = color_util.color_RGB_to_hs(*color_list) class MySensorsLightDimmer(MySensorsLight): """Dimmer child class to MySensorsLight.""" @property def supported_features(self): """Flag supported features.""" return SUPPORT_BRIGHTNESS async def async_turn_on(self, **kwargs): """Turn the device on.""" self._turn_on_light() self._turn_on_dimmer(**kwargs) if self.gateway.optimistic: self.async_schedule_update_ha_state() async def async_update(self): """Update the controller with the latest value from a sensor.""" await super().async_update() self._async_update_light() self._async_update_dimmer() class MySensorsLightRGB(MySensorsLight): """RGB child class to MySensorsLight.""" @property def supported_features(self): """Flag supported features.""" set_req = self.gateway.const.SetReq if set_req.V_DIMMER in self._values: return SUPPORT_BRIGHTNESS | SUPPORT_COLOR return SUPPORT_COLOR async def async_turn_on(self, **kwargs): """Turn the device on.""" self._turn_on_light() self._turn_on_dimmer(**kwargs) self._turn_on_rgb_and_w('%02x%02x%02x', **kwargs) if self.gateway.optimistic: self.async_schedule_update_ha_state() async def async_update(self): """Update the controller with the latest value from a sensor.""" await super().async_update() self._async_update_light() self._async_update_dimmer() self._async_update_rgb_or_w() class MySensorsLightRGBW(MySensorsLightRGB): """RGBW child class to MySensorsLightRGB.""" # pylint: disable=too-many-ancestors @property def supported_features(self): """Flag supported features.""" set_req = self.gateway.const.SetReq if set_req.V_DIMMER in self._values: return SUPPORT_BRIGHTNESS | SUPPORT_MYSENSORS_RGBW return SUPPORT_MYSENSORS_RGBW async def async_turn_on(self, **kwargs): """Turn the device on.""" self._turn_on_light() self._turn_on_dimmer(**kwargs) self._turn_on_rgb_and_w('%02x%02x%02x%02x', **kwargs) if self.gateway.optimistic: self.async_schedule_update_ha_state()
xbmc/xbmc-antiquated
refs/heads/master
xbmc/lib/libPython/Python/Lib/test/test_socket.py
5
#!/usr/bin/env python import unittest from test import test_support import socket import select import time import thread, threading import Queue import sys from weakref import proxy PORT = 50007 HOST = 'localhost' MSG = 'Michael Gilfix was here\n' class SocketTCPTest(unittest.TestCase): def setUp(self): self.serv = socket.socket(socket.AF_INET, socket.SOCK_STREAM) self.serv.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) global PORT PORT = test_support.bind_port(self.serv, HOST, PORT) self.serv.listen(1) def tearDown(self): self.serv.close() self.serv = None class SocketUDPTest(unittest.TestCase): def setUp(self): self.serv = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) self.serv.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) global PORT PORT = test_support.bind_port(self.serv, HOST, PORT) def tearDown(self): self.serv.close() self.serv = None class ThreadableTest: """Threadable Test class The ThreadableTest class makes it easy to create a threaded client/server pair from an existing unit test. To create a new threaded class from an existing unit test, use multiple inheritance: class NewClass (OldClass, ThreadableTest): pass This class defines two new fixture functions with obvious purposes for overriding: clientSetUp () clientTearDown () Any new test functions within the class must then define tests in pairs, where the test name is preceeded with a '_' to indicate the client portion of the test. Ex: def testFoo(self): # Server portion def _testFoo(self): # Client portion Any exceptions raised by the clients during their tests are caught and transferred to the main thread to alert the testing framework. Note, the server setup function cannot call any blocking functions that rely on the client thread during setup, unless serverExplicityReady() is called just before the blocking call (such as in setting up a client/server connection and performing the accept() in setUp(). """ def __init__(self): # Swap the true setup function self.__setUp = self.setUp self.__tearDown = self.tearDown self.setUp = self._setUp self.tearDown = self._tearDown def serverExplicitReady(self): """This method allows the server to explicitly indicate that it wants the client thread to proceed. This is useful if the server is about to execute a blocking routine that is dependent upon the client thread during its setup routine.""" self.server_ready.set() def _setUp(self): self.server_ready = threading.Event() self.client_ready = threading.Event() self.done = threading.Event() self.queue = Queue.Queue(1) # Do some munging to start the client test. methodname = self.id() i = methodname.rfind('.') methodname = methodname[i+1:] test_method = getattr(self, '_' + methodname) self.client_thread = thread.start_new_thread( self.clientRun, (test_method,)) self.__setUp() if not self.server_ready.isSet(): self.server_ready.set() self.client_ready.wait() def _tearDown(self): self.__tearDown() self.done.wait() if not self.queue.empty(): msg = self.queue.get() self.fail(msg) def clientRun(self, test_func): self.server_ready.wait() self.client_ready.set() self.clientSetUp() if not callable(test_func): raise TypeError, "test_func must be a callable function" try: test_func() except Exception, strerror: self.queue.put(strerror) self.clientTearDown() def clientSetUp(self): raise NotImplementedError, "clientSetUp must be implemented." def clientTearDown(self): self.done.set() thread.exit() class ThreadedTCPSocketTest(SocketTCPTest, ThreadableTest): def __init__(self, methodName='runTest'): SocketTCPTest.__init__(self, methodName=methodName) ThreadableTest.__init__(self) def clientSetUp(self): self.cli = socket.socket(socket.AF_INET, socket.SOCK_STREAM) def clientTearDown(self): self.cli.close() self.cli = None ThreadableTest.clientTearDown(self) class ThreadedUDPSocketTest(SocketUDPTest, ThreadableTest): def __init__(self, methodName='runTest'): SocketUDPTest.__init__(self, methodName=methodName) ThreadableTest.__init__(self) def clientSetUp(self): self.cli = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) class SocketConnectedTest(ThreadedTCPSocketTest): def __init__(self, methodName='runTest'): ThreadedTCPSocketTest.__init__(self, methodName=methodName) def setUp(self): ThreadedTCPSocketTest.setUp(self) # Indicate explicitly we're ready for the client thread to # proceed and then perform the blocking call to accept self.serverExplicitReady() conn, addr = self.serv.accept() self.cli_conn = conn def tearDown(self): self.cli_conn.close() self.cli_conn = None ThreadedTCPSocketTest.tearDown(self) def clientSetUp(self): ThreadedTCPSocketTest.clientSetUp(self) self.cli.connect((HOST, PORT)) self.serv_conn = self.cli def clientTearDown(self): self.serv_conn.close() self.serv_conn = None ThreadedTCPSocketTest.clientTearDown(self) class SocketPairTest(unittest.TestCase, ThreadableTest): def __init__(self, methodName='runTest'): unittest.TestCase.__init__(self, methodName=methodName) ThreadableTest.__init__(self) def setUp(self): self.serv, self.cli = socket.socketpair() def tearDown(self): self.serv.close() self.serv = None def clientSetUp(self): pass def clientTearDown(self): self.cli.close() self.cli = None ThreadableTest.clientTearDown(self) ####################################################################### ## Begin Tests class GeneralModuleTests(unittest.TestCase): def test_weakref(self): s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) p = proxy(s) self.assertEqual(p.fileno(), s.fileno()) s.close() s = None try: p.fileno() except ReferenceError: pass else: self.fail('Socket proxy still exists') def testSocketError(self): # Testing socket module exceptions def raise_error(*args, **kwargs): raise socket.error def raise_herror(*args, **kwargs): raise socket.herror def raise_gaierror(*args, **kwargs): raise socket.gaierror self.failUnlessRaises(socket.error, raise_error, "Error raising socket exception.") self.failUnlessRaises(socket.error, raise_herror, "Error raising socket exception.") self.failUnlessRaises(socket.error, raise_gaierror, "Error raising socket exception.") def testCrucialConstants(self): # Testing for mission critical constants socket.AF_INET socket.SOCK_STREAM socket.SOCK_DGRAM socket.SOCK_RAW socket.SOCK_RDM socket.SOCK_SEQPACKET socket.SOL_SOCKET socket.SO_REUSEADDR def testHostnameRes(self): # Testing hostname resolution mechanisms hostname = socket.gethostname() try: ip = socket.gethostbyname(hostname) except socket.error: # Probably name lookup wasn't set up right; skip this test return self.assert_(ip.find('.') >= 0, "Error resolving host to ip.") try: hname, aliases, ipaddrs = socket.gethostbyaddr(ip) except socket.error: # Probably a similar problem as above; skip this test return all_host_names = [hostname, hname] + aliases fqhn = socket.getfqdn(ip) if not fqhn in all_host_names: self.fail("Error testing host resolution mechanisms.") def testRefCountGetNameInfo(self): # Testing reference count for getnameinfo import sys if hasattr(sys, "getrefcount"): try: # On some versions, this loses a reference orig = sys.getrefcount(__name__) socket.getnameinfo(__name__,0) except SystemError: if sys.getrefcount(__name__) <> orig: self.fail("socket.getnameinfo loses a reference") def testInterpreterCrash(self): # Making sure getnameinfo doesn't crash the interpreter try: # On some versions, this crashes the interpreter. socket.getnameinfo(('x', 0, 0, 0), 0) except socket.error: pass def testNtoH(self): # This just checks that htons etc. are their own inverse, # when looking at the lower 16 or 32 bits. sizes = {socket.htonl: 32, socket.ntohl: 32, socket.htons: 16, socket.ntohs: 16} for func, size in sizes.items(): mask = (1L<<size) - 1 for i in (0, 1, 0xffff, ~0xffff, 2, 0x01234567, 0x76543210): self.assertEqual(i & mask, func(func(i&mask)) & mask) swapped = func(mask) self.assertEqual(swapped & mask, mask) self.assertRaises(OverflowError, func, 1L<<34) def testGetServBy(self): eq = self.assertEqual # Find one service that exists, then check all the related interfaces. # I've ordered this by protocols that have both a tcp and udp # protocol, at least for modern Linuxes. if sys.platform in ('linux2', 'freebsd4', 'freebsd5', 'freebsd6', 'darwin'): # avoid the 'echo' service on this platform, as there is an # assumption breaking non-standard port/protocol entry services = ('daytime', 'qotd', 'domain') else: services = ('echo', 'daytime', 'domain') for service in services: try: port = socket.getservbyname(service, 'tcp') break except socket.error: pass else: raise socket.error # Try same call with optional protocol omitted port2 = socket.getservbyname(service) eq(port, port2) # Try udp, but don't barf it it doesn't exist try: udpport = socket.getservbyname(service, 'udp') except socket.error: udpport = None else: eq(udpport, port) # Now make sure the lookup by port returns the same service name eq(socket.getservbyport(port2), service) eq(socket.getservbyport(port, 'tcp'), service) if udpport is not None: eq(socket.getservbyport(udpport, 'udp'), service) def testDefaultTimeout(self): # Testing default timeout # The default timeout should initially be None self.assertEqual(socket.getdefaulttimeout(), None) s = socket.socket() self.assertEqual(s.gettimeout(), None) s.close() # Set the default timeout to 10, and see if it propagates socket.setdefaulttimeout(10) self.assertEqual(socket.getdefaulttimeout(), 10) s = socket.socket() self.assertEqual(s.gettimeout(), 10) s.close() # Reset the default timeout to None, and see if it propagates socket.setdefaulttimeout(None) self.assertEqual(socket.getdefaulttimeout(), None) s = socket.socket() self.assertEqual(s.gettimeout(), None) s.close() # Check that setting it to an invalid value raises ValueError self.assertRaises(ValueError, socket.setdefaulttimeout, -1) # Check that setting it to an invalid type raises TypeError self.assertRaises(TypeError, socket.setdefaulttimeout, "spam") def testIPv4toString(self): if not hasattr(socket, 'inet_pton'): return # No inet_pton() on this platform from socket import inet_aton as f, inet_pton, AF_INET g = lambda a: inet_pton(AF_INET, a) self.assertEquals('\x00\x00\x00\x00', f('0.0.0.0')) self.assertEquals('\xff\x00\xff\x00', f('255.0.255.0')) self.assertEquals('\xaa\xaa\xaa\xaa', f('170.170.170.170')) self.assertEquals('\x01\x02\x03\x04', f('1.2.3.4')) self.assertEquals('\xff\xff\xff\xff', f('255.255.255.255')) self.assertEquals('\x00\x00\x00\x00', g('0.0.0.0')) self.assertEquals('\xff\x00\xff\x00', g('255.0.255.0')) self.assertEquals('\xaa\xaa\xaa\xaa', g('170.170.170.170')) self.assertEquals('\xff\xff\xff\xff', g('255.255.255.255')) def testIPv6toString(self): if not hasattr(socket, 'inet_pton'): return # No inet_pton() on this platform try: from socket import inet_pton, AF_INET6, has_ipv6 if not has_ipv6: return except ImportError: return f = lambda a: inet_pton(AF_INET6, a) self.assertEquals('\x00' * 16, f('::')) self.assertEquals('\x00' * 16, f('0::0')) self.assertEquals('\x00\x01' + '\x00' * 14, f('1::')) self.assertEquals( '\x45\xef\x76\xcb\x00\x1a\x56\xef\xaf\xeb\x0b\xac\x19\x24\xae\xae', f('45ef:76cb:1a:56ef:afeb:bac:1924:aeae') ) def testStringToIPv4(self): if not hasattr(socket, 'inet_ntop'): return # No inet_ntop() on this platform from socket import inet_ntoa as f, inet_ntop, AF_INET g = lambda a: inet_ntop(AF_INET, a) self.assertEquals('1.0.1.0', f('\x01\x00\x01\x00')) self.assertEquals('170.85.170.85', f('\xaa\x55\xaa\x55')) self.assertEquals('255.255.255.255', f('\xff\xff\xff\xff')) self.assertEquals('1.2.3.4', f('\x01\x02\x03\x04')) self.assertEquals('1.0.1.0', g('\x01\x00\x01\x00')) self.assertEquals('170.85.170.85', g('\xaa\x55\xaa\x55')) self.assertEquals('255.255.255.255', g('\xff\xff\xff\xff')) def testStringToIPv6(self): if not hasattr(socket, 'inet_ntop'): return # No inet_ntop() on this platform try: from socket import inet_ntop, AF_INET6, has_ipv6 if not has_ipv6: return except ImportError: return f = lambda a: inet_ntop(AF_INET6, a) self.assertEquals('::', f('\x00' * 16)) self.assertEquals('::1', f('\x00' * 15 + '\x01')) self.assertEquals( 'aef:b01:506:1001:ffff:9997:55:170', f('\x0a\xef\x0b\x01\x05\x06\x10\x01\xff\xff\x99\x97\x00\x55\x01\x70') ) # XXX The following don't test module-level functionality... def testSockName(self): # Testing getsockname() sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) sock.bind(("0.0.0.0", PORT+1)) name = sock.getsockname() self.assertEqual(name, ("0.0.0.0", PORT+1)) def testGetSockOpt(self): # Testing getsockopt() # We know a socket should start without reuse==0 sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) reuse = sock.getsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR) self.failIf(reuse != 0, "initial mode is reuse") def testSetSockOpt(self): # Testing setsockopt() sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) reuse = sock.getsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR) self.failIf(reuse == 0, "failed to set reuse mode") def testSendAfterClose(self): # testing send() after close() with timeout sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) sock.settimeout(1) sock.close() self.assertRaises(socket.error, sock.send, "spam") class BasicTCPTest(SocketConnectedTest): def __init__(self, methodName='runTest'): SocketConnectedTest.__init__(self, methodName=methodName) def testRecv(self): # Testing large receive over TCP msg = self.cli_conn.recv(1024) self.assertEqual(msg, MSG) def _testRecv(self): self.serv_conn.send(MSG) def testOverFlowRecv(self): # Testing receive in chunks over TCP seg1 = self.cli_conn.recv(len(MSG) - 3) seg2 = self.cli_conn.recv(1024) msg = seg1 + seg2 self.assertEqual(msg, MSG) def _testOverFlowRecv(self): self.serv_conn.send(MSG) def testRecvFrom(self): # Testing large recvfrom() over TCP msg, addr = self.cli_conn.recvfrom(1024) self.assertEqual(msg, MSG) def _testRecvFrom(self): self.serv_conn.send(MSG) def testOverFlowRecvFrom(self): # Testing recvfrom() in chunks over TCP seg1, addr = self.cli_conn.recvfrom(len(MSG)-3) seg2, addr = self.cli_conn.recvfrom(1024) msg = seg1 + seg2 self.assertEqual(msg, MSG) def _testOverFlowRecvFrom(self): self.serv_conn.send(MSG) def testSendAll(self): # Testing sendall() with a 2048 byte string over TCP msg = '' while 1: read = self.cli_conn.recv(1024) if not read: break msg += read self.assertEqual(msg, 'f' * 2048) def _testSendAll(self): big_chunk = 'f' * 2048 self.serv_conn.sendall(big_chunk) def testFromFd(self): # Testing fromfd() if not hasattr(socket, "fromfd"): return # On Windows, this doesn't exist fd = self.cli_conn.fileno() sock = socket.fromfd(fd, socket.AF_INET, socket.SOCK_STREAM) msg = sock.recv(1024) self.assertEqual(msg, MSG) def _testFromFd(self): self.serv_conn.send(MSG) def testShutdown(self): # Testing shutdown() msg = self.cli_conn.recv(1024) self.assertEqual(msg, MSG) def _testShutdown(self): self.serv_conn.send(MSG) self.serv_conn.shutdown(2) class BasicUDPTest(ThreadedUDPSocketTest): def __init__(self, methodName='runTest'): ThreadedUDPSocketTest.__init__(self, methodName=methodName) def testSendtoAndRecv(self): # Testing sendto() and Recv() over UDP msg = self.serv.recv(len(MSG)) self.assertEqual(msg, MSG) def _testSendtoAndRecv(self): self.cli.sendto(MSG, 0, (HOST, PORT)) def testRecvFrom(self): # Testing recvfrom() over UDP msg, addr = self.serv.recvfrom(len(MSG)) self.assertEqual(msg, MSG) def _testRecvFrom(self): self.cli.sendto(MSG, 0, (HOST, PORT)) class BasicSocketPairTest(SocketPairTest): def __init__(self, methodName='runTest'): SocketPairTest.__init__(self, methodName=methodName) def testRecv(self): msg = self.serv.recv(1024) self.assertEqual(msg, MSG) def _testRecv(self): self.cli.send(MSG) def testSend(self): self.serv.send(MSG) def _testSend(self): msg = self.cli.recv(1024) self.assertEqual(msg, MSG) class NonBlockingTCPTests(ThreadedTCPSocketTest): def __init__(self, methodName='runTest'): ThreadedTCPSocketTest.__init__(self, methodName=methodName) def testSetBlocking(self): # Testing whether set blocking works self.serv.setblocking(0) start = time.time() try: self.serv.accept() except socket.error: pass end = time.time() self.assert_((end - start) < 1.0, "Error setting non-blocking mode.") def _testSetBlocking(self): pass def testAccept(self): # Testing non-blocking accept self.serv.setblocking(0) try: conn, addr = self.serv.accept() except socket.error: pass else: self.fail("Error trying to do non-blocking accept.") read, write, err = select.select([self.serv], [], []) if self.serv in read: conn, addr = self.serv.accept() else: self.fail("Error trying to do accept after select.") def _testAccept(self): time.sleep(0.1) self.cli.connect((HOST, PORT)) def testConnect(self): # Testing non-blocking connect conn, addr = self.serv.accept() def _testConnect(self): self.cli.settimeout(10) self.cli.connect((HOST, PORT)) def testRecv(self): # Testing non-blocking recv conn, addr = self.serv.accept() conn.setblocking(0) try: msg = conn.recv(len(MSG)) except socket.error: pass else: self.fail("Error trying to do non-blocking recv.") read, write, err = select.select([conn], [], []) if conn in read: msg = conn.recv(len(MSG)) self.assertEqual(msg, MSG) else: self.fail("Error during select call to non-blocking socket.") def _testRecv(self): self.cli.connect((HOST, PORT)) time.sleep(0.1) self.cli.send(MSG) class FileObjectClassTestCase(SocketConnectedTest): bufsize = -1 # Use default buffer size def __init__(self, methodName='runTest'): SocketConnectedTest.__init__(self, methodName=methodName) def setUp(self): SocketConnectedTest.setUp(self) self.serv_file = self.cli_conn.makefile('rb', self.bufsize) def tearDown(self): self.serv_file.close() self.assert_(self.serv_file.closed) self.serv_file = None SocketConnectedTest.tearDown(self) def clientSetUp(self): SocketConnectedTest.clientSetUp(self) self.cli_file = self.serv_conn.makefile('wb') def clientTearDown(self): self.cli_file.close() self.assert_(self.cli_file.closed) self.cli_file = None SocketConnectedTest.clientTearDown(self) def testSmallRead(self): # Performing small file read test first_seg = self.serv_file.read(len(MSG)-3) second_seg = self.serv_file.read(3) msg = first_seg + second_seg self.assertEqual(msg, MSG) def _testSmallRead(self): self.cli_file.write(MSG) self.cli_file.flush() def testFullRead(self): # read until EOF msg = self.serv_file.read() self.assertEqual(msg, MSG) def _testFullRead(self): self.cli_file.write(MSG) self.cli_file.close() def testUnbufferedRead(self): # Performing unbuffered file read test buf = '' while 1: char = self.serv_file.read(1) if not char: break buf += char self.assertEqual(buf, MSG) def _testUnbufferedRead(self): self.cli_file.write(MSG) self.cli_file.flush() def testReadline(self): # Performing file readline test line = self.serv_file.readline() self.assertEqual(line, MSG) def _testReadline(self): self.cli_file.write(MSG) self.cli_file.flush() def testClosedAttr(self): self.assert_(not self.serv_file.closed) def _testClosedAttr(self): self.assert_(not self.cli_file.closed) class UnbufferedFileObjectClassTestCase(FileObjectClassTestCase): """Repeat the tests from FileObjectClassTestCase with bufsize==0. In this case (and in this case only), it should be possible to create a file object, read a line from it, create another file object, read another line from it, without loss of data in the first file object's buffer. Note that httplib relies on this when reading multiple requests from the same socket.""" bufsize = 0 # Use unbuffered mode def testUnbufferedReadline(self): # Read a line, create a new file object, read another line with it line = self.serv_file.readline() # first line self.assertEqual(line, "A. " + MSG) # first line self.serv_file = self.cli_conn.makefile('rb', 0) line = self.serv_file.readline() # second line self.assertEqual(line, "B. " + MSG) # second line def _testUnbufferedReadline(self): self.cli_file.write("A. " + MSG) self.cli_file.write("B. " + MSG) self.cli_file.flush() class LineBufferedFileObjectClassTestCase(FileObjectClassTestCase): bufsize = 1 # Default-buffered for reading; line-buffered for writing class SmallBufferedFileObjectClassTestCase(FileObjectClassTestCase): bufsize = 2 # Exercise the buffering code class TCPTimeoutTest(SocketTCPTest): def testTCPTimeout(self): def raise_timeout(*args, **kwargs): self.serv.settimeout(1.0) self.serv.accept() self.failUnlessRaises(socket.timeout, raise_timeout, "Error generating a timeout exception (TCP)") def testTimeoutZero(self): ok = False try: self.serv.settimeout(0.0) foo = self.serv.accept() except socket.timeout: self.fail("caught timeout instead of error (TCP)") except socket.error: ok = True except: self.fail("caught unexpected exception (TCP)") if not ok: self.fail("accept() returned success when we did not expect it") class UDPTimeoutTest(SocketTCPTest): def testUDPTimeout(self): def raise_timeout(*args, **kwargs): self.serv.settimeout(1.0) self.serv.recv(1024) self.failUnlessRaises(socket.timeout, raise_timeout, "Error generating a timeout exception (UDP)") def testTimeoutZero(self): ok = False try: self.serv.settimeout(0.0) foo = self.serv.recv(1024) except socket.timeout: self.fail("caught timeout instead of error (UDP)") except socket.error: ok = True except: self.fail("caught unexpected exception (UDP)") if not ok: self.fail("recv() returned success when we did not expect it") class TestExceptions(unittest.TestCase): def testExceptionTree(self): self.assert_(issubclass(socket.error, Exception)) self.assert_(issubclass(socket.herror, socket.error)) self.assert_(issubclass(socket.gaierror, socket.error)) self.assert_(issubclass(socket.timeout, socket.error)) def test_main(): tests = [GeneralModuleTests, BasicTCPTest, TCPTimeoutTest, TestExceptions] if sys.platform != 'mac': tests.extend([ BasicUDPTest, UDPTimeoutTest ]) tests.extend([ NonBlockingTCPTests, FileObjectClassTestCase, UnbufferedFileObjectClassTestCase, LineBufferedFileObjectClassTestCase, SmallBufferedFileObjectClassTestCase ]) if hasattr(socket, "socketpair"): tests.append(BasicSocketPairTest) test_support.run_unittest(*tests) if __name__ == "__main__": test_main()
ZerpaTechnology/AsenZor
refs/heads/master
apps/votSys/user/vistas/widgets/previewImg-marco.py
1
#!/usr/bin/python # -*- coding: utf-8 -*- print '''<div><style> .thumb { height: 150px; border: 1px solid #000; margin: 10px 5px 0 0; }</style><output id="'''+str(data['output'])+'''"></output></div>'''
vrsys/avangong
refs/heads/master
avango-menu/python/avango/menu/widget/_PushButton.py
6
# -*- Mode:Python -*- ########################################################################## # # # This file is part of AVANGO. # # # # Copyright 1997 - 2010 Fraunhofer-Gesellschaft zur Foerderung der # # angewandten Forschung (FhG), Munich, Germany. # # # # AVANGO is free software: you can redistribute it and/or modify # # it under the terms of the GNU Lesser General Public License as # # published by the Free Software Foundation, version 3. # # # # AVANGO is distributed in the hope that it will be useful, # # but WITHOUT ANY WARRANTY; without even the implied warranty of # # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # # GNU General Public License for more details. # # # # You should have received a copy of the GNU Lesser General Public # # License along with AVANGO. If not, see <http://www.gnu.org/licenses/>. # # # ########################################################################## from avango.menu.widget import WidgetBase import avango.menu.Preferences import avango.osg class PushButton(WidgetBase): Title = avango.SFString() IconFilenames = avango.MFString() IconSize = avango.SFFloat() IconPadding = avango.SFFloat() IconColor = avango.osg.SFVec4() IconDisabledColor = avango.osg.SFVec4() TextSize = avango.SFFloat() TextColor = avango.osg.SFVec4() TextDisabledColor = avango.osg.SFVec4() TextFontname = avango.SFString() CloseOnRelease = avango.SFBool() def __init__(self): self.super(PushButton).__init__() # preferences field connections self.IconSize.connect_from(avango.menu.Preferences.widget.IconSize) self.IconPadding.connect_from(avango.menu.Preferences.widget.IconPadding) self.IconColor.connect_from(avango.menu.Preferences.widget.IconColor) self.IconDisabledColor.connect_from(avango.menu.Preferences.widget.IconDisabledColor) self.TextSize.connect_from(avango.menu.Preferences.widget.TextSize) self.TextColor.connect_from(avango.menu.Preferences.widget.TextColor) self.TextDisabledColor.connect_from(avango.menu.Preferences.widget.TextDisabledColor) self.TextFontname.connect_from(avango.menu.Preferences.widget.TextFontname) self.CloseOnRelease.value = avango.menu.Preferences.pushbutton_close_on_release self.TextOnly.connect_from(avango.menu.Preferences.widget.TextOnly) self.init_defaults() def init_defaults(self): self.Title.value = avango.menu.Preferences.pushbutton_text self.IconFilenames.value = avango.menu.Preferences.pushbutton_icon def create_layouter(self): layouter = avango.menu.layout.PushButtonLayouter(Widget=self) return layouter def cleanup(self): self.pushbutton_cleanup() def pushbutton_cleanup(self): self.disconnect_all_fields() self.super(PushButton).widgetbase_cleanup() def __del__(self): if avango.menu.Preferences.print_destruction_of_menu_objects: print "PushButton Deleted"
luo66/scikit-learn
refs/heads/master
examples/linear_model/plot_sgd_comparison.py
77
""" ================================== Comparing various online solvers ================================== An example showing how different online solvers perform on the hand-written digits dataset. """ # Author: Rob Zinkov <rob at zinkov dot com> # License: BSD 3 clause import numpy as np import matplotlib.pyplot as plt from sklearn import datasets from sklearn.cross_validation import train_test_split from sklearn.linear_model import SGDClassifier, Perceptron from sklearn.linear_model import PassiveAggressiveClassifier from sklearn.linear_model import LogisticRegression heldout = [0.95, 0.90, 0.75, 0.50, 0.01] rounds = 20 digits = datasets.load_digits() X, y = digits.data, digits.target classifiers = [ ("SGD", SGDClassifier()), ("ASGD", SGDClassifier(average=True)), ("Perceptron", Perceptron()), ("Passive-Aggressive I", PassiveAggressiveClassifier(loss='hinge', C=1.0)), ("Passive-Aggressive II", PassiveAggressiveClassifier(loss='squared_hinge', C=1.0)), ("SAG", LogisticRegression(solver='sag', tol=1e-1, C=1.e4 / X.shape[0])) ] xx = 1. - np.array(heldout) for name, clf in classifiers: print("training %s" % name) rng = np.random.RandomState(42) yy = [] for i in heldout: yy_ = [] for r in range(rounds): X_train, X_test, y_train, y_test = \ train_test_split(X, y, test_size=i, random_state=rng) clf.fit(X_train, y_train) y_pred = clf.predict(X_test) yy_.append(1 - np.mean(y_pred == y_test)) yy.append(np.mean(yy_)) plt.plot(xx, yy, label=name) plt.legend(loc="upper right") plt.xlabel("Proportion train") plt.ylabel("Test Error Rate") plt.show()
alfa-addon/addon
refs/heads/master
plugin.video.alfa/channels/playview.py
1
# -*- coding: utf-8 -*- # -*- Channel Playview -*- # -*- Created for Alfa-addon -*- # -*- By the Alfa Develop Group -*- import sys PY3 = False if sys.version_info[0] >= 3: PY3 = True; unicode = str; unichr = chr; long = int import re from channelselector import get_thumb from core import httptools from core import scrapertools from core import servertools from core import tmdb from core.item import Item from platformcode import config, logger from channels import autoplay from channels import filtertools from bs4 import BeautifulSoup import base64 import datetime host = 'https://playview.io/' IDIOMAS = {"Latino": "LAT", "Español": "CAST", "Subtitulado": "VOSE"} list_language = list(IDIOMAS.values()) list_quality = list() list_servers = ['upstream', 'cloudvideo', 'mixdrop', 'mystream', 'doodstream'] date = datetime.datetime.now() def mainlist(item): logger.info() autoplay.init(item.channel, list_servers, list_quality) itemlist = list() itemlist.append(Item(channel=item.channel, title='Películas', action='sub_menu', thumbnail=get_thumb('movies', auto=True))) itemlist.append(Item(channel=item.channel, title='Series', action='list_all', url=host + 'series-online', thumbnail=get_thumb('tvshows', auto=True), first=0)) itemlist.append(Item(channel=item.channel, title='Anime', action='list_all', url=host + 'anime-online', thumbnail=get_thumb('tvshows', auto=True), first=0)) itemlist.append(Item(channel=item.channel, title='Buscar', action='search', url=host + 'search/', thumbnail=get_thumb('search', auto=True))) autoplay.show_option(item.channel, itemlist) return itemlist def sub_menu(item): logger.info() itemlist = list() itemlist.append( Item(channel=item.channel, title='Ultimas', action='list_all', url=host + 'estrenos-%s' % date.year, first=0, thumbnail=get_thumb('last', auto=True))) itemlist.append( Item(channel=item.channel, title='Todas', action='list_all', url=host + 'peliculas-online', first=0, thumbnail=get_thumb('all', auto=True))) itemlist.append( Item(channel=item.channel, title='Generos', action='section', thumbnail=get_thumb('genres', auto=True))) return itemlist def create_soup(url, referer=None, unescape=False, forced_proxy_opt=None): logger.info() if referer: data = httptools.downloadpage(url, headers={'Referer': referer}, forced_proxy_opt=forced_proxy_opt).data else: data = httptools.downloadpage(url, forced_proxy_opt=forced_proxy_opt).data if unescape: data = scrapertools.unescape(data) soup = BeautifulSoup(data, "html5lib", from_encoding="utf-8") return soup def list_all(item): logger.info() itemlist = list() listed = list() next = False soup = create_soup(item.url) matches = soup.find_all("div", class_="spotlight_container") first = item.first last = first + 20 if last >= len(matches): last = len(matches) next = True for elem in matches[first:last]: url = elem.find("a", class_="overviewPlay playLink")["href"] title = re.sub("\s{2}.*", "", elem.find("div", class_="spotlight_title").text.strip()) thumb = elem.find("div", class_="spotlight_image lazy")["data-original"] try: year = elem.find_all("span", class_="slqual sres")[0].text except: year = "-" new_item = Item(channel=item.channel, title=title, thumbnail=thumb, infoLabels={'year': year}) if elem.find("div", class_="info-series"): new_item.contentSerieName = title season_path = re.sub("-temp-\d+", "", scrapertools.find_single_match(url, "%s(.+)" % host)) new_item.url = "%sver-temporadas-completas-de/%s" % (host, season_path) new_item.action = "seasons" else: new_item.contentTitle = title new_item.url = url new_item.action = "findvideos" if url not in listed: itemlist.append(new_item) listed.append(url) tmdb.set_infoLabels_itemlist(itemlist, True) if not next: url_next_page = item.url first = last else: try: url_next_page = soup.find_all("a", class_="page-link")[-1]["href"] first = 0 except: url_next_page = None if url_next_page: itemlist.append(item.clone(title="Siguiente >>", url=url_next_page, action='list_all', first=first)) return itemlist def section(item): logger.info() itemlist = list() soup = create_soup(host).find("li", class_="dropdown") if item.title == "Generos": matches = soup.find_all("li") for elem in matches: url = elem.a["href"] title = elem.a.text if "serie" in title.lower(): continue itemlist.append(Item(channel=item.channel, title=title, url=url, action="list_all", first=0 )) itemlist = sorted(itemlist, key=lambda i: i.title) return itemlist def seasons(item): logger.info() itemlist = list() infoLabels = item.infoLabels soup = create_soup(item.url) matches = soup.find_all("div", class_="spotlight_container") if len(matches) == 1: season_info = scrapertools.find_single_match(matches[0], "Temporada (\d+)") if not season_info: season_info = 1 url = matches[0].a["href"] infoLabels["season"] = season_info title = "Temporada %s" % season_info itemlist.append(Item(channel=item.channel, title=title, url=url, action="episodesxseason", infoLabels=infoLabels)) else: for elem in matches: full_title = elem.find("div", class_="spotlight_title").text title = re.sub(r"%s|/.*" % item.infoLabels["tvshowtitle"].lower(), "", full_title.lower()).strip() url = elem.a["href"] infoLabels["season"] = scrapertools.find_single_match(title, "\d+") itemlist.append(Item(channel=item.channel, title=title.capitalize(), url=url, action="episodesxseason", infoLabels=infoLabels)) tmdb.set_infoLabels_itemlist(itemlist, True) itemlist = sorted(itemlist, key=lambda i: i.title) if config.get_videolibrary_support() and len(itemlist) > 0: itemlist.append(Item(channel=item.channel, title='[COLOR yellow]Añadir esta serie a la videoteca[/COLOR]', url=item.url, action="add_serie_to_library", extra="episodios", contentSerieName=item.contentSerieName)) return itemlist def episodios(item): logger.info() itemlist = [] templist = seasons(item) for tempitem in templist: itemlist += episodesxseason(tempitem) return itemlist def episodesxseason(item): logger.info() itemlist = list() info_soup = create_soup(item.url) set_option = "LoadOptionsEpisode" info = info_soup.find("div", id="ficha") post = {"set": set_option, 'action': "EpisodesInfo", "id": info["data-id"], "type": info["data-type"]} episodesinfo = httptools.downloadpage(host + 'playview', post=post).data matches = BeautifulSoup(episodesinfo, "html5lib").find_all("div", class_="episodeBlock") infoLabels = item.infoLabels for elem in matches: epi_num = elem.find("div", class_="episodeNumber").text title = "%sx%s" % (infoLabels["season"], epi_num) infoLabels["episode"] = epi_num post = {"set": set_option, "action": "Step1", "id": elem["data-id"], "type": "1", 'episode': epi_num} itemlist.append(Item(channel=item.channel, title=title,action="findvideos", post=post, infoLabels=infoLabels)) tmdb.set_infoLabels_itemlist(itemlist, True) return itemlist def findvideos(item): logger.info() itemlist = list() set_option = "LoadOptions" episode = "" if item.post: post = item.post id = post['id'] episode = post['episode'] dtype = post['type'] set_option = 'LoadOptionsEpisode' else: info_soup = create_soup(item.url) info = info_soup.find("div", id="ficha") id = info["data-id"] dtype = info["data-type"] post = {"set": set_option, 'action': "Step1", "id": id, "type": dtype} step1 = httptools.downloadpage(host + 'playview', post=post).data matches = BeautifulSoup(step1, "html5lib").find_all("button", class_="select-quality") for step2 in matches: post = {"set": set_option, "action": "Step2", "id": id, "type": dtype, "quality": step2["data-quality"], "episode": episode} options = httptools.downloadpage(host + 'playview', post=post).data soup = BeautifulSoup(options, "html5lib").find_all("li", class_="tb-data-single") for elem in soup: lang = elem.find("h4").text srv = re.sub(r"(\..+|\s.+)", "", elem.find("img")["title"]) video_id = elem.find("button", class_="btn-link")["data-id"] qlty = scrapertools.find_single_match(step2["data-quality"], r"\d+p") post = {"set": set_option, "action": "Step3", "id": video_id, "type": dtype} if not srv: srv = "directo" itemlist.append(Item(channel=item.channel, title=srv.capitalize(), server=srv, action="play", post=post, language=IDIOMAS.get(lang, "LAT"), quality=qlty, infoLabels=item.infoLabels)) itemlist = sorted(itemlist, key=lambda i: i.language) # Requerido para FilterTools itemlist = filtertools.get_links(itemlist, item, list_language) # Requerido para AutoPlay autoplay.start(itemlist, item) if item.contentType == 'movie': if config.get_videolibrary_support() and len(itemlist) > 0 and item.extra != 'findvideos': itemlist.append( Item(channel=item.channel, title='[COLOR yellow]Añadir esta pelicula a la videoteca[/COLOR]', url=item.url, action="add_pelicula_to_library", extra="findvideos", contentTitle=item.contentTitle)) return itemlist def play(item): logger.info() data = httptools.downloadpage(host + 'playview', post=item.post).data url_data = BeautifulSoup(data, "html5lib") try: iframe = url_data.find("iframe", class_="embed-responsive-item")["src"] url = httptools.downloadpage(iframe).url except: url_data = url_data.find("button", class_="linkfull")["data-url"] url = base64.b64decode(scrapertools.find_single_match(url_data, "/go/(.+)")) srv = servertools.get_server_from_url(url) item = item.clone(url=url, server=srv) return [item] def search(item, texto): logger.info() try: texto = texto.replace(" ", "+") item.url = item.url + texto item.first = 0 if texto != '': return list_all(item) else: return [] # Se captura la excepción, para no interrumpir al buscador global si un canal falla except: import sys for line in sys.exc_info(): logger.error("%s" % line) return [] def newest(categoria): logger.info() item = Item() item.type = 'movie' item.first = 0 try: if categoria == 'peliculas': item.url = host + 'peliculas-online' elif categoria == 'infantiles': item.url = host + 'peliculas-online/animacion' elif categoria == 'terror': item.url = host + 'peliculas-online/terror' itemlist = list_all(item) if itemlist[-1].title == 'Siguiente >>': itemlist.pop() except: import sys for line in sys.exc_info(): logger.error("{0}".format(line)) return [] return itemlist
mlavin/django
refs/heads/master
tests/admin_changelist/models.py
42
from django.db import models class Event(models.Model): # Oracle can have problems with a column named "date" date = models.DateField(db_column="event_date") class Parent(models.Model): name = models.CharField(max_length=128) class Child(models.Model): parent = models.ForeignKey(Parent, models.SET_NULL, editable=False, null=True) name = models.CharField(max_length=30, blank=True) age = models.IntegerField(null=True, blank=True) class Genre(models.Model): name = models.CharField(max_length=20) class Band(models.Model): name = models.CharField(max_length=20) nr_of_members = models.PositiveIntegerField() genres = models.ManyToManyField(Genre) class Musician(models.Model): name = models.CharField(max_length=30) def __str__(self): return self.name class Group(models.Model): name = models.CharField(max_length=30) members = models.ManyToManyField(Musician, through='Membership') def __str__(self): return self.name class Concert(models.Model): name = models.CharField(max_length=30) group = models.ForeignKey(Group, models.CASCADE) class Membership(models.Model): music = models.ForeignKey(Musician, models.CASCADE) group = models.ForeignKey(Group, models.CASCADE) role = models.CharField(max_length=15) class Quartet(Group): pass class ChordsMusician(Musician): pass class ChordsBand(models.Model): name = models.CharField(max_length=30) members = models.ManyToManyField(ChordsMusician, through='Invitation') class Invitation(models.Model): player = models.ForeignKey(ChordsMusician, models.CASCADE) band = models.ForeignKey(ChordsBand, models.CASCADE) instrument = models.CharField(max_length=15) class Swallow(models.Model): origin = models.CharField(max_length=255) load = models.FloatField() speed = models.FloatField() class Meta: ordering = ('speed', 'load') class SwallowOneToOne(models.Model): swallow = models.OneToOneField(Swallow, models.CASCADE) class UnorderedObject(models.Model): """ Model without any defined `Meta.ordering`. Refs #17198. """ bool = models.BooleanField(default=True) class OrderedObjectManager(models.Manager): def get_queryset(self): return super().get_queryset().order_by('number') class OrderedObject(models.Model): """ Model with Manager that defines a default order. Refs #17198. """ name = models.CharField(max_length=255) bool = models.BooleanField(default=True) number = models.IntegerField(default=0, db_column='number_val') objects = OrderedObjectManager() class CustomIdUser(models.Model): uuid = models.AutoField(primary_key=True)
rohanp/scikit-learn
refs/heads/master
sklearn/utils/metaestimators.py
283
"""Utilities for meta-estimators""" # Author: Joel Nothman # Andreas Mueller # Licence: BSD from operator import attrgetter from functools import update_wrapper __all__ = ['if_delegate_has_method'] class _IffHasAttrDescriptor(object): """Implements a conditional property using the descriptor protocol. Using this class to create a decorator will raise an ``AttributeError`` if the ``attribute_name`` is not present on the base object. This allows ducktyping of the decorated method based on ``attribute_name``. See https://docs.python.org/3/howto/descriptor.html for an explanation of descriptors. """ def __init__(self, fn, attribute_name): self.fn = fn self.get_attribute = attrgetter(attribute_name) # update the docstring of the descriptor update_wrapper(self, fn) def __get__(self, obj, type=None): # raise an AttributeError if the attribute is not present on the object if obj is not None: # delegate only on instances, not the classes. # this is to allow access to the docstrings. self.get_attribute(obj) # lambda, but not partial, allows help() to work with update_wrapper out = lambda *args, **kwargs: self.fn(obj, *args, **kwargs) # update the docstring of the returned function update_wrapper(out, self.fn) return out def if_delegate_has_method(delegate): """Create a decorator for methods that are delegated to a sub-estimator This enables ducktyping by hasattr returning True according to the sub-estimator. >>> from sklearn.utils.metaestimators import if_delegate_has_method >>> >>> >>> class MetaEst(object): ... def __init__(self, sub_est): ... self.sub_est = sub_est ... ... @if_delegate_has_method(delegate='sub_est') ... def predict(self, X): ... return self.sub_est.predict(X) ... >>> class HasPredict(object): ... def predict(self, X): ... return X.sum(axis=1) ... >>> class HasNoPredict(object): ... pass ... >>> hasattr(MetaEst(HasPredict()), 'predict') True >>> hasattr(MetaEst(HasNoPredict()), 'predict') False """ return lambda fn: _IffHasAttrDescriptor(fn, '%s.%s' % (delegate, fn.__name__))
phac-nml/bioconda-recipes
refs/heads/master
recipes/phylip/drawgram.py
59
#!/usr/bin/env python # # Wrapper script for Java Conda packages that ensures that the java runtime # is invoked with the right options. Adapted from the bash script (http://stackoverflow.com/questions/59895/can-a-bash-script-tell-what-directory-its-stored-in/246128#246128). # # Program Parameters # import os import subprocess import sys import shutil from os import access from os import getenv from os import X_OK jar_file = 'DrawGram.jar' import re default_jvm_mem_opts = ['-Xms512m', '-Xmx1g'] # !!! End of parameter section. No user-serviceable code below this line !!! def real_dirname(path): """Return the symlink-resolved, canonicalized directory-portion of path.""" return os.path.dirname(os.path.realpath(path)) def java_executable(): """Return the executable name of the Java interpreter.""" java_home = getenv('JAVA_HOME') java_bin = os.path.join('bin', 'java') if java_home and access(os.path.join(java_home, java_bin), X_OK): return os.path.join(java_home, java_bin) else: return 'java' def jvm_opts(argv): """Construct list of Java arguments based on our argument list. The argument list passed in argv must not include the script name. The return value is a 3-tuple lists of strings of the form: (memory_options, prop_options, passthrough_options) """ mem_opts = [] prop_opts = [] pass_args = [] exec_dir = None for arg in argv: if arg.startswith('-D'): prop_opts.append(arg) elif arg.startswith('-XX'): prop_opts.append(arg) elif arg.startswith('-Xm'): mem_opts.append(arg) elif arg.startswith('--exec_dir='): exec_dir = arg.split('=')[1].strip('"').strip("'") if not os.path.exists(exec_dir): shutil.copytree(real_dirname(sys.argv[0]), exec_dir, symlinks=False, ignore=None) else: pass_args.append(arg) # In the original shell script the test coded below read: # if [ "$jvm_mem_opts" == "" ] && [ -z ${_JAVA_OPTIONS+x} ] # To reproduce the behaviour of the above shell code fragment # it is important to explictly check for equality with None # in the second condition, so a null envar value counts as True! if mem_opts == [] and getenv('_JAVA_OPTIONS') is None: mem_opts = default_jvm_mem_opts return (mem_opts, prop_opts, pass_args, exec_dir) def main(): java = java_executable() """ this program updates files relative to the path of the jar file. In a multiuser setting, the option --exec_dir="exec_dir" can be used as the location for the python gdistribution. If the exec_dir dies not exist, we copy the jar file, lib, and resources to the exec_dir directory. """ (mem_opts, prop_opts, pass_args, exec_dir) = jvm_opts(sys.argv[1:]) jar_dir = "{jd}/java".format(jd=exec_dir if exec_dir else real_dirname(sys.argv[0])) if pass_args != [] and pass_args[0].startswith('eu'): jar_arg = '-cp' else: jar_arg = '-jar' jar_path = os.path.join(jar_dir, jar_file) java_args = [java] + mem_opts + prop_opts + [jar_arg] + [jar_path] + pass_args # Trying to fix font problems phylip appears to require that fonts # are placed in the working directory Current workaround is to # temporarily link them there and then remove links. Far from # optimal solution, but I can't get anything else to work:( subprocess.call("ln -s {fd}/* .".format(fd=re.sub("java","fonts",jar_dir)), shell=True) # execute jar-file, record signal sig=subprocess.call(java_args) # remove links subprocess.call(["rm -f font[1-6] fontfile"], shell=True) # return signal from jar-file sys.exit(sig) if __name__ == '__main__': main()
sajuptpm/manila
refs/heads/master
contrib/tempest/tempest/api/share/test_limits.py
2
# Copyright 2014 Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from tempest.api.share import base from tempest import test class ShareLimitsTest(base.BaseSharesTest): @test.attr(type=["gate", "smoke", ]) def test_limits_keys(self): # list limits limits = self.shares_client.get_limits() # verify response keys = ["rate", "absolute"] [self.assertIn(key, limits.keys()) for key in keys] abs_keys = [ "maxTotalShareGigabytes", "maxTotalShares", "maxTotalShareSnapshots", "maxTotalShareNetworks", "totalSharesUsed", "totalShareSnapshotsUsed", "totalShareNetworksUsed", "totalShareGigabytesUsed", ] [self.assertIn(key, limits["absolute"].keys()) for key in abs_keys] @test.attr(type=["gate", "smoke", ]) def test_limits_values(self): # list limits limits = self.shares_client.get_limits() # verify integer values for absolute limits abs_l = limits["absolute"] self.assertGreater(int(abs_l["maxTotalShareGigabytes"]), -2) self.assertGreater(int(abs_l["maxTotalShares"]), -2) self.assertGreater(int(abs_l["maxTotalShareSnapshots"]), -2) self.assertGreater(int(abs_l["maxTotalShareNetworks"]), -2) self.assertGreater(int(abs_l["totalSharesUsed"]), -2) self.assertGreater(int(abs_l["totalShareSnapshotsUsed"]), -2) self.assertGreater(int(abs_l["totalShareNetworksUsed"]), -2) self.assertGreater(int(abs_l["totalShareGigabytesUsed"]), -2)
antoviaque/edx-platform
refs/heads/master
openedx/core/djangoapps/user_api/tests/test_constants.py
121
# -*- coding: utf-8 -*- """Constants used in the test suite. """ SORTED_COUNTRIES = [ (u"AF", u"Afghanistan"), (u"AX", u"Åland Islands"), (u"AL", u"Albania"), (u"DZ", u"Algeria"), (u"AS", u"American Samoa"), (u"AD", u"Andorra"), (u"AO", u"Angola"), (u"AI", u"Anguilla"), (u"AQ", u"Antarctica"), (u"AG", u"Antigua and Barbuda"), (u"AR", u"Argentina"), (u"AM", u"Armenia"), (u"AW", u"Aruba"), (u"AU", u"Australia"), (u"AT", u"Austria"), (u"AZ", u"Azerbaijan"), (u"BS", u"Bahamas"), (u"BH", u"Bahrain"), (u"BD", u"Bangladesh"), (u"BB", u"Barbados"), (u"BY", u"Belarus"), (u"BE", u"Belgium"), (u"BZ", u"Belize"), (u"BJ", u"Benin"), (u"BM", u"Bermuda"), (u"BT", u"Bhutan"), (u"BO", u"Bolivia"), (u"BQ", u"Bonaire, Sint Eustatius and Saba"), (u"BA", u"Bosnia and Herzegovina"), (u"BW", u"Botswana"), (u"BV", u"Bouvet Island"), (u"BR", u"Brazil"), (u"IO", u"British Indian Ocean Territory"), (u"BN", u"Brunei"), (u"BG", u"Bulgaria"), (u"BF", u"Burkina Faso"), (u"BI", u"Burundi"), (u"CV", u"Cabo Verde"), (u"KH", u"Cambodia"), (u"CM", u"Cameroon"), (u"CA", u"Canada"), (u"KY", u"Cayman Islands"), (u"CF", u"Central African Republic"), (u"TD", u"Chad"), (u"CL", u"Chile"), (u"CN", u"China"), (u"CX", u"Christmas Island"), (u"CC", u"Cocos (Keeling) Islands"), (u"CO", u"Colombia"), (u"KM", u"Comoros"), (u"CG", u"Congo"), (u"CD", u"Congo (the Democratic Republic of the)"), (u"CK", u"Cook Islands"), (u"CR", u"Costa Rica"), (u"CI", u"Côte d'Ivoire"), (u"HR", u"Croatia"), (u"CU", u"Cuba"), (u"CW", u"Curaçao"), (u"CY", u"Cyprus"), (u"CZ", u"Czech Republic"), (u"DK", u"Denmark"), (u"DJ", u"Djibouti"), (u"DM", u"Dominica"), (u"DO", u"Dominican Republic"), (u"EC", u"Ecuador"), (u"EG", u"Egypt"), (u"SV", u"El Salvador"), (u"GQ", u"Equatorial Guinea"), (u"ER", u"Eritrea"), (u"EE", u"Estonia"), (u"ET", u"Ethiopia"), (u"FK", u"Falkland Islands [Malvinas]"), (u"FO", u"Faroe Islands"), (u"FJ", u"Fiji"), (u"FI", u"Finland"), (u"FR", u"France"), (u"GF", u"French Guiana"), (u"PF", u"French Polynesia"), (u"TF", u"French Southern Territories"), (u"GA", u"Gabon"), (u"GM", u"Gambia"), (u"GE", u"Georgia"), (u"DE", u"Germany"), (u"GH", u"Ghana"), (u"GI", u"Gibraltar"), (u"GR", u"Greece"), (u"GL", u"Greenland"), (u"GD", u"Grenada"), (u"GP", u"Guadeloupe"), (u"GU", u"Guam"), (u"GT", u"Guatemala"), (u"GG", u"Guernsey"), (u"GN", u"Guinea"), (u"GW", u"Guinea-Bissau"), (u"GY", u"Guyana"), (u"HT", u"Haiti"), (u"HM", u"Heard Island and McDonald Islands"), (u"VA", u"Holy See"), (u"HN", u"Honduras"), (u"HK", u"Hong Kong"), (u"HU", u"Hungary"), (u"IS", u"Iceland"), (u"IN", u"India"), (u"ID", u"Indonesia"), (u"IR", u"Iran"), (u"IQ", u"Iraq"), (u"IE", u"Ireland"), (u"IM", u"Isle of Man"), (u"IL", u"Israel"), (u"IT", u"Italy"), (u"JM", u"Jamaica"), (u"JP", u"Japan"), (u"JE", u"Jersey"), (u"JO", u"Jordan"), (u"KZ", u"Kazakhstan"), (u"KE", u"Kenya"), (u"KI", u"Kiribati"), (u"KW", u"Kuwait"), (u"KG", u"Kyrgyzstan"), (u"LA", u"Laos"), (u"LV", u"Latvia"), (u"LB", u"Lebanon"), (u"LS", u"Lesotho"), (u"LR", u"Liberia"), (u"LY", u"Libya"), (u"LI", u"Liechtenstein"), (u"LT", u"Lithuania"), (u"LU", u"Luxembourg"), (u"MO", u"Macao"), (u"MK", u"Macedonia"), (u"MG", u"Madagascar"), (u"MW", u"Malawi"), (u"MY", u"Malaysia"), (u"MV", u"Maldives"), (u"ML", u"Mali"), (u"MT", u"Malta"), (u"MH", u"Marshall Islands"), (u"MQ", u"Martinique"), (u"MR", u"Mauritania"), (u"MU", u"Mauritius"), (u"YT", u"Mayotte"), (u"MX", u"Mexico"), (u"FM", u"Micronesia (Federated States of)"), (u"MD", u"Moldovia"), (u"MC", u"Monaco"), (u"MN", u"Mongolia"), (u"ME", u"Montenegro"), (u"MS", u"Montserrat"), (u"MA", u"Morocco"), (u"MZ", u"Mozambique"), (u"MM", u"Myanmar"), (u"NA", u"Namibia"), (u"NR", u"Nauru"), (u"NP", u"Nepal"), (u"NL", u"Netherlands"), (u"NC", u"New Caledonia"), (u"NZ", u"New Zealand"), (u"NI", u"Nicaragua"), (u"NE", u"Niger"), (u"NG", u"Nigeria"), (u"NU", u"Niue"), (u"NF", u"Norfolk Island"), (u"KP", u"North Korea"), (u"MP", u"Northern Mariana Islands"), (u"NO", u"Norway"), (u"OM", u"Oman"), (u"PK", u"Pakistan"), (u"PW", u"Palau"), (u"PS", u"Palestine, State of"), (u"PA", u"Panama"), (u"PG", u"Papua New Guinea"), (u"PY", u"Paraguay"), (u"PE", u"Peru"), (u"PH", u"Philippines"), (u"PN", u"Pitcairn"), (u"PL", u"Poland"), (u"PT", u"Portugal"), (u"PR", u"Puerto Rico"), (u"QA", u"Qatar"), (u"RE", u"Réunion"), (u"RO", u"Romania"), (u"RU", u"Russia"), (u"RW", u"Rwanda"), (u"BL", u"Saint Barthélemy"), (u"SH", u"Saint Helena, Ascension and Tristan da Cunha"), (u"KN", u"Saint Kitts and Nevis"), (u"LC", u"Saint Lucia"), (u"MF", u"Saint Martin (French part)"), (u"PM", u"Saint Pierre and Miquelon"), (u"VC", u"Saint Vincent and the Grenadines"), (u"WS", u"Samoa"), (u"SM", u"San Marino"), (u"ST", u"Sao Tome and Principe"), (u"SA", u"Saudi Arabia"), (u"SN", u"Senegal"), (u"RS", u"Serbia"), (u"SC", u"Seychelles"), (u"SL", u"Sierra Leone"), (u"SG", u"Singapore"), (u"SX", u"Sint Maarten (Dutch part)"), (u"SK", u"Slovakia"), (u"SI", u"Slovenia"), (u"SB", u"Solomon Islands"), (u"SO", u"Somalia"), (u"ZA", u"South Africa"), (u"GS", u"South Georgia and the South Sandwich Islands"), (u"KR", u"South Korea"), (u"SS", u"South Sudan"), (u"ES", u"Spain"), (u"LK", u"Sri Lanka"), (u"SD", u"Sudan"), (u"SR", u"Suriname"), (u"SJ", u"Svalbard and Jan Mayen"), (u"SZ", u"Swaziland"), (u"SE", u"Sweden"), (u"CH", u"Switzerland"), (u"SY", u"Syria"), (u"TW", u"Taiwan"), (u"TJ", u"Tajikistan"), (u"TZ", u"Tanzania"), (u"TH", u"Thailand"), (u"TL", u"Timor-Leste"), (u"TG", u"Togo"), (u"TK", u"Tokelau"), (u"TO", u"Tonga"), (u"TT", u"Trinidad and Tobago"), (u"TN", u"Tunisia"), (u"TR", u"Turkey"), (u"TM", u"Turkmenistan"), (u"TC", u"Turks and Caicos Islands"), (u"TV", u"Tuvalu"), (u"UG", u"Uganda"), (u"UA", u"Ukraine"), (u"AE", u"United Arab Emirates"), (u"GB", u"United Kingdom of Great Britain and Northern Ireland"), (u"UM", u"United States Minor Outlying Islands"), (u"US", u"United States of America"), (u"UY", u"Uruguay"), (u"UZ", u"Uzbekistan"), (u"VU", u"Vanuatu"), (u"VE", u"Venezuela"), (u"VN", u"Vietnam"), (u"VG", u"Virgin Islands (British)"), (u"VI", u"Virgin Islands (U.S.)"), (u"WF", u"Wallis and Futuna"), (u"EH", u"Western Sahara"), (u"YE", u"Yemen"), (u"ZM", u"Zambia"), (u"ZW", u"Zimbabwe"), ]
eneldoserrata/marcos_openerp
refs/heads/master
addons/report_geraldo/lib/geraldo/site/newsite/site-geraldo/django/conf/app_template/views.py
6027
# Create your views here.
coco-team/Ikos-Api
refs/heads/master
PyIkos/ikos/api.py
1
################################################################################# # # Low-level API for IKOS # # Author: Maxime Arthaud (maxime@arthaud.me) # # Copyright (c) 2014 Carnegie Mellon University # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN # THE SOFTWARE. # ################################################################################# from fractions import Fraction from ikos import apicore class Types: INT = 'int' RATIONAL = 'rational' @classmethod def all(cls): return (cls.INT, cls.RATIONAL) class TypeException(Exception): pass class Var: def __init__(self, name, type): assert type in Types.all() self.name = name self.type = type def __repr__(self): return 'Var(%s, %s)' % (self.type, self.name) def __str__(self): return self.name def IntVar(name): return Var(name, Types.INT) def RationalVar(name): return Var(name, Types.RATIONAL) class LinearExpression: def __init__(self, terms): assert terms, 'empty expression' self.terms = terms @staticmethod def _constant_type(cst): if isinstance(cst, Fraction) or isinstance(cst, float): return Types.RATIONAL elif isinstance(cst, int): return Types.INT else: raise TypeException('invalid constant: %s' % repr(cst)) @staticmethod def _constant_value(cst, type): if type == Types.RATIONAL: if isinstance(cst, Fraction) or isinstance(cst, float) or isinstance(cst, int): return Fraction(cst) else: raise TypeException('invalid constant of type %s: %s' % (type, repr(cst))) else: if isinstance(cst, int): return cst else: raise TypeException('invalid constant of type %s: %s' % (type, repr(cst))) def is_constant(self): return all(not isinstance(term, tuple) for term in self.terms) def constant(self): type = self.type cst = LinearExpression._constant_value(0, type) for term in self.terms: if not isinstance(term, tuple): cst += LinearExpression._constant_value(term, type) return cst def variables(self): result = set() for term in self.terms: if isinstance(term, tuple): factor, variable = term result.add(variable) return result def normalize(self): type = self.type terms = [] cst = LinearExpression._constant_value(0, type) for term in self.terms: if isinstance(term, tuple): factor, variable = term factor = LinearExpression._constant_value(factor, type) terms.append((factor, variable)) else: cst += LinearExpression._constant_value(term, type) return terms, cst @property def type(self): assert self.terms, 'empty expression' current_type = None only_constant = False for term in self.terms: if isinstance(term, tuple): factor, variable = term factor_type = LinearExpression._constant_type(factor) if factor_type == Types.RATIONAL and variable.type == Types.INT: raise TypeException('invalid operand types for *: %s and %s' % (factor_type, variable.type)) # term is well typed if current_type is None: current_type = variable.type elif only_constant: if current_type == Types.RATIONAL and variable.type == Types.INT: raise TypeException('invalid operand types for +: %s and %s' % (current_type, variable.type)) current_type = variable.type only_constant = False else: if current_type != variable.type: raise TypeException('invalid operand types for +: %s and %s' % (current_type, variable.type)) else: cst = term cst_type = LinearExpression._constant_type(cst) if current_type is None: current_type = cst_type only_constant = True elif only_constant: if current_type == Types.RATIONAL or cst_type == Types.RATIONAL: current_type = Types.RATIONAL else: current_type = Types.INT else: if current_type == Types.INT and cst_type == Types.RATIONAL: raise TypeException('invalid operand types for +: %s and %s' % (current_type, cst_type)) return current_type def multiply(self, constant): def multiply_term(term): if isinstance(term, tuple): factor, var = term return factor * constant, var else: return term * constant self.terms = list(map(multiply_term, self.terms)) def __repr__(self): return 'LinearExpression(%s)' % repr(self.terms) def __str__(self): terms, cst = self.normalize() r = '' for factor, variable in terms: if not r: if factor < 0: r += '-' else: if factor < 0: r += ' - ' else: r += ' + ' if abs(factor) != 1: r += str(abs(factor)) r += variable.name # constant if not r or cst != 0: if not r: r += str(cst) else: if cst < 0: r += ' - ' else: r += ' + ' r += str(abs(cst)) return r class LinearConstraint: INF_EQ = '<=' EQ = '=' NOT_EQ = '!=' def __init__(self, expression, operator): assert isinstance(expression, LinearExpression) assert operator in (LinearConstraint.INF_EQ, LinearConstraint.EQ, LinearConstraint.NOT_EQ) self.expression = expression self.operator = operator def is_tautology(self): if self.expression.is_constant(): if self.operator == LinearConstraint.INF_EQ: return self.expression.constant() <= 0 elif self.operator == LinearConstraint.EQ: return self.expression.constant() == 0 else: return self.expression.constant() != 0 else: return False def is_contradiction(self): if self.expression.is_constant(): if self.operator == LinearConstraint.INF_EQ: return self.expression.constant() > 0 elif self.operator == LinearConstraint.EQ: return self.expression.constant() != 0 else: return self.expression.constant() == 0 else: return False @property def type(self): return self.expression.type def __repr__(self): return 'LinearConstraint(%s, %s)' % (repr(self.expression), self.operator) def __str__(self): if self.is_tautology(): return 'true' elif self.is_contradiction(): return 'false' else: cst = self.expression.constant() return '%s %s %s' % (LinearExpression(self.expression.terms + [-cst]), self.operator, -cst) class Statement: def check_types(self): raise NotImplementedError() class BinaryOperation(Statement): ADD = '+' SUB = '-' MUL = '*' DIV = '/' def __init__(self, var, left, operator, right): assert isinstance(var, Var) assert isinstance(left, Var) assert operator in (BinaryOperation.ADD, BinaryOperation.SUB, BinaryOperation.MUL, BinaryOperation.DIV) assert isinstance(right, Var) self.var = var self.left = left self.operator = operator self.right = right def check_types(self): if not(self.var.type == self.left.type and self.var.type == self.right.type): raise TypeException('invalid operand types for operation %s: %s, %s and %s' % (self.operator, self.var.type, self.left.type, self.right.type)) def __repr__(self): return 'BinaryOperation(var=%s, left=%s, op=%s, right=%s)' % (repr(self.var), repr(self.left), self.operator, repr(self.right)) def __str__(self): return '%s = %s %s %s;' % (self.var.name, self.left.name, self.operator, self.right.name) def AddOperation(var, left, right): return BinaryOperation(var, left, BinaryOperation.ADD, right) def SubOperation(var, left, right): return BinaryOperation(var, left, BinaryOperation.SUB, right) def MulOperation(var, left, right): return BinaryOperation(var, left, BinaryOperation.MUL, right) def DivOperation(var, left, right): return BinaryOperation(var, left, BinaryOperation.DIV, right) class Assign(Statement): def __init__(self, var, expression): assert isinstance(var, Var) assert isinstance(expression, LinearExpression) self.var = var self.expression = expression def check_types(self): if self.var.type != self.expression.type: raise TypeException('invalid assign statement, type mismatch: %s and %s' % (self.var.type, self.expression.type)) def __repr__(self): return 'Assign(%s, %s)' % (repr(self.var), repr(self.expression)) def __str__(self): return '%s = %s;' % (self.var.name, self.expression) class Assert(Statement): def __init__(self, constraint): assert isinstance(constraint, LinearConstraint) self.constraint = constraint def check_types(self): self.constraint.type # will check expression type def __repr__(self): return 'Assert(%s)' % repr(self.constraint) def __str__(self): return 'assert(%s);' % self.constraint class Checkpoint(Statement): def __init__(self, name): self.name = name def check_types(self): pass def __repr__(self): return 'Checkpoint(%s)' % self.name def __str__(self): return 'checkpoint(%s);' % self.name class BasicBlock: def __init__(self, name): self.name = name self.statements = [] self.next_blocks = [] def add_statement(self, stmt): assert isinstance(stmt, Statement) self.statements.append(stmt) def add_statements(self, stmts): self.statements += stmts def add_next(self, next_block): assert isinstance(next_block, BasicBlock) self.next_blocks.append(next_block) def check_types(self): for stmt in self.statements: stmt.check_types() def __repr__(self): return 'BasicBlock(name=%s, statements=[\n%s\n], next_blocks=[%s])' % ( self.name, ',\n'.join('\t' + repr(stmt) for stmt in self.statements), ', '.join(b.name for b in self.next_blocks)) def __str__(self): return '%s:\n%s\n--> [%s]' % ( self.name, '\n'.join('\t' + str(stmt) for stmt in self.statements), ', '.join(b.name for b in self.next_blocks)) class Cfg: ''' represents a Control Flow Graph ''' def __init__(self, entry): self.entry = None self.blocks = [] self.set_entry(entry) def add_block(self, block): assert isinstance(block, BasicBlock) if block not in self.blocks: self.blocks.append(block) def set_entry(self, entry): assert isinstance(entry, BasicBlock) self.entry = entry self.add_block(entry) def check_types(self): for block in self.blocks: block.check_types() def __repr__(self): return 'Cfg(entry=%s, blocks=[\n%s\n])' % ( self.entry.name, ',\n'.join('\t' + repr(b).replace('\n', '\n\t') for b in self.blocks)) def __str__(self): return '\n\n'.join(str(b) for b in self.blocks) class Constraint: INF = '<' INF_EQ = '<=' SUP = '>' SUP_EQ = '>=' EQ = '=' NOT_EQ = '!=' MOD = 'mod' def __init__(self, expression, operator, modulus=None): assert isinstance(expression, LinearExpression) assert operator in (Constraint.INF, Constraint.INF_EQ, Constraint.SUP, Constraint.SUP_EQ, Constraint.EQ, Constraint.NOT_EQ, Constraint.MOD) self.expression = expression self.operator = operator self.modulus = modulus def is_tautology(self): if self.expression.is_constant(): if self.operator == Constraint.INF: return self.expression.constant() < 0 elif self.operator == Constraint.INF_EQ: return self.expression.constant() <= 0 elif self.operator == Constraint.SUP: return self.expression.constant() > 0 elif self.operator == Constraint.SUP_EQ: return self.expression.constant() >= 0 elif self.operator == Constraint.EQ: return self.expression.constant() == 0 elif self.operator == Constraint.NOT_EQ: return self.expression.constant() != 0 else: return self.expression.constant() % self.modulus == 0 else: return False def is_contradiction(self): if self.expression.is_constant(): if self.operator == Constraint.INF: return self.expression.constant() >= 0 elif self.operator == Constraint.INF_EQ: return self.expression.constant() > 0 elif self.operator == Constraint.SUP: return self.expression.constant() <= 0 elif self.operator == Constraint.SUP_EQ: return self.expression.constant() < 0 elif self.operator == Constraint.EQ: return self.expression.constant() != 0 elif self.operator == Constraint.NOT_EQ: return self.expression.constant() == 0 else: return self.expression.constant() % self.modulus != 0 else: return False @property def type(self): return self.expression.type def __repr__(self): if self.operator == Constraint.MOD: return 'Constraint(%s, %s, modulus=%s)' % (repr(self.expression), self.operator, self.modulus) else: return 'Constraint(%s, %s)' % (repr(self.expression), self.operator) def __str__(self): if self.is_tautology(): return 'true' elif self.is_contradiction(): return 'false' else: cst = self.expression.constant() if self.operator == Constraint.MOD: return '%s = %s [%s]' % (LinearExpression(self.expression.terms + [-cst]), -cst, self.modulus) else: return '%s %s %s' % (LinearExpression(self.expression.terms + [-cst]), self.operator, -cst) class Constraints(list): def __repr__(self): return 'Constraints(%s)' % ', '.join(map(repr, self)) def __str__(self): if not self: return '{}' else: return '{ %s }' % ', '.join(map(str, self)) class Domain: CONSTANT = 'constant' INTERVAL = 'interval' OCTAGON = 'octagon' def compute_fixpoint(cfg, z_domain=Domain.INTERVAL, q_domain=Domain.INTERVAL): cfg.check_types() result = apicore.compute_fixpoint(cfg, z_domain, q_domain) def convert_z_term(term): if isinstance(term, tuple): return term[0], Var(term[1], Types.INT) else: return term def convert_q_term(term): if isinstance(term, tuple): return term[0], Var(term[1], Types.RATIONAL) else: return term def convert_z_constraint(cst): expr = LinearExpression(list(map(convert_z_term, cst[0]))) return Constraint(expr, *cst[1:]) def convert_q_constraint(cst): expr = LinearExpression(list(map(convert_q_term, cst[0]))) return Constraint(expr, *cst[1:]) for key, (z_csts, q_csts) in result.items(): result[key] = Constraints(map(convert_z_constraint, z_csts)), Constraints(map(convert_q_constraint, q_csts)) return result Cfg.fixpoint = compute_fixpoint
Gadal/sympy
refs/heads/master
sympy/combinatorics/testutil.py
51
from __future__ import print_function, division from sympy.core.compatibility import range from sympy.combinatorics.util import _distribute_gens_by_base from sympy.combinatorics import Permutation rmul = Permutation.rmul def _cmp_perm_lists(first, second): """ Compare two lists of permutations as sets. This is used for testing purposes. Since the array form of a permutation is currently a list, Permutation is not hashable and cannot be put into a set. Examples ======== >>> from sympy.combinatorics.permutations import Permutation >>> from sympy.combinatorics.testutil import _cmp_perm_lists >>> a = Permutation([0, 2, 3, 4, 1]) >>> b = Permutation([1, 2, 0, 4, 3]) >>> c = Permutation([3, 4, 0, 1, 2]) >>> ls1 = [a, b, c] >>> ls2 = [b, c, a] >>> _cmp_perm_lists(ls1, ls2) True """ return set([tuple(a) for a in first]) == \ set([tuple(a) for a in second]) def _naive_list_centralizer(self, other, af=False): from sympy.combinatorics.perm_groups import PermutationGroup """ Return a list of elements for the centralizer of a subgroup/set/element. This is a brute force implementation that goes over all elements of the group and checks for membership in the centralizer. It is used to test ``.centralizer()`` from ``sympy.combinatorics.perm_groups``. Examples ======== >>> from sympy.combinatorics.testutil import _naive_list_centralizer >>> from sympy.combinatorics.named_groups import DihedralGroup >>> D = DihedralGroup(4) >>> _naive_list_centralizer(D, D) [Permutation([0, 1, 2, 3]), Permutation([2, 3, 0, 1])] See Also ======== sympy.combinatorics.perm_groups.centralizer """ from sympy.combinatorics.permutations import _af_commutes_with if hasattr(other, 'generators'): elements = list(self.generate_dimino(af=True)) gens = [x._array_form for x in other.generators] commutes_with_gens = lambda x: all(_af_commutes_with(x, gen) for gen in gens) centralizer_list = [] if not af: for element in elements: if commutes_with_gens(element): centralizer_list.append(Permutation._af_new(element)) else: for element in elements: if commutes_with_gens(element): centralizer_list.append(element) return centralizer_list elif hasattr(other, 'getitem'): return _naive_list_centralizer(self, PermutationGroup(other), af) elif hasattr(other, 'array_form'): return _naive_list_centralizer(self, PermutationGroup([other]), af) def _verify_bsgs(group, base, gens): """ Verify the correctness of a base and strong generating set. This is a naive implementation using the definition of a base and a strong generating set relative to it. There are other procedures for verifying a base and strong generating set, but this one will serve for more robust testing. Examples ======== >>> from sympy.combinatorics.named_groups import AlternatingGroup >>> from sympy.combinatorics.testutil import _verify_bsgs >>> A = AlternatingGroup(4) >>> A.schreier_sims() >>> _verify_bsgs(A, A.base, A.strong_gens) True See Also ======== sympy.combinatorics.perm_groups.PermutationGroup.schreier_sims """ from sympy.combinatorics.perm_groups import PermutationGroup strong_gens_distr = _distribute_gens_by_base(base, gens) current_stabilizer = group for i in range(len(base)): candidate = PermutationGroup(strong_gens_distr[i]) if current_stabilizer.order() != candidate.order(): return False current_stabilizer = current_stabilizer.stabilizer(base[i]) if current_stabilizer.order() != 1: return False return True def _verify_centralizer(group, arg, centr=None): """ Verify the centralizer of a group/set/element inside another group. This is used for testing ``.centralizer()`` from ``sympy.combinatorics.perm_groups`` Examples ======== >>> from sympy.combinatorics.named_groups import (SymmetricGroup, ... AlternatingGroup) >>> from sympy.combinatorics.perm_groups import PermutationGroup >>> from sympy.combinatorics.permutations import Permutation >>> from sympy.combinatorics.testutil import _verify_centralizer >>> S = SymmetricGroup(5) >>> A = AlternatingGroup(5) >>> centr = PermutationGroup([Permutation([0, 1, 2, 3, 4])]) >>> _verify_centralizer(S, A, centr) True See Also ======== _naive_list_centralizer, sympy.combinatorics.perm_groups.PermutationGroup.centralizer, _cmp_perm_lists """ if centr is None: centr = group.centralizer(arg) centr_list = list(centr.generate_dimino(af=True)) centr_list_naive = _naive_list_centralizer(group, arg, af=True) return _cmp_perm_lists(centr_list, centr_list_naive) def _verify_normal_closure(group, arg, closure=None): from sympy.combinatorics.perm_groups import PermutationGroup """ Verify the normal closure of a subgroup/subset/element in a group. This is used to test sympy.combinatorics.perm_groups.PermutationGroup.normal_closure Examples ======== >>> from sympy.combinatorics.named_groups import (SymmetricGroup, ... AlternatingGroup) >>> from sympy.combinatorics.testutil import _verify_normal_closure >>> S = SymmetricGroup(3) >>> A = AlternatingGroup(3) >>> _verify_normal_closure(S, A, closure=A) True See Also ======== sympy.combinatorics.perm_groups.PermutationGroup.normal_closure """ if closure is None: closure = group.normal_closure(arg) conjugates = set() if hasattr(arg, 'generators'): subgr_gens = arg.generators elif hasattr(arg, '__getitem__'): subgr_gens = arg elif hasattr(arg, 'array_form'): subgr_gens = [arg] for el in group.generate_dimino(): for gen in subgr_gens: conjugates.add(gen ^ el) naive_closure = PermutationGroup(list(conjugates)) return closure.is_subgroup(naive_closure) def canonicalize_naive(g, dummies, sym, *v): """ Canonicalize tensor formed by tensors of the different types g permutation representing the tensor dummies list of dummy indices msym symmetry of the metric v is a list of (base_i, gens_i, n_i, sym_i) for tensors of type `i` base_i, gens_i BSGS for tensors of this type n_i number ot tensors of type `i` sym_i symmetry under exchange of two component tensors of type `i` None no symmetry 0 commuting 1 anticommuting Return 0 if the tensor is zero, else return the array form of the permutation representing the canonical form of the tensor. Examples ======== >>> from sympy.combinatorics.testutil import canonicalize_naive >>> from sympy.combinatorics.tensor_can import get_symmetric_group_sgs >>> from sympy.combinatorics import Permutation, PermutationGroup >>> g = Permutation([1, 3, 2, 0, 4, 5]) >>> base2, gens2 = get_symmetric_group_sgs(2) >>> canonicalize_naive(g, [2, 3], 0, (base2, gens2, 2, 0)) [0, 2, 1, 3, 4, 5] """ from sympy.combinatorics.perm_groups import PermutationGroup from sympy.combinatorics.tensor_can import gens_products, dummy_sgs from sympy.combinatorics.permutations import Permutation, _af_rmul v1 = [] for i in range(len(v)): base_i, gens_i, n_i, sym_i = v[i] v1.append((base_i, gens_i, [[]]*n_i, sym_i)) size, sbase, sgens = gens_products(*v1) dgens = dummy_sgs(dummies, sym, size-2) if isinstance(sym, int): num_types = 1 dummies = [dummies] sym = [sym] else: num_types = len(sym) dgens = [] for i in range(num_types): dgens.extend(dummy_sgs(dummies[i], sym[i], size - 2)) S = PermutationGroup(sgens) D = PermutationGroup([Permutation(x) for x in dgens]) dlist = list(D.generate(af=True)) g = g.array_form st = set() for s in S.generate(af=True): h = _af_rmul(g, s) for d in dlist: q = tuple(_af_rmul(d, h)) st.add(q) a = list(st) a.sort() prev = (0,)*size for h in a: if h[:-2] == prev[:-2]: if h[-1] != prev[-1]: return 0 prev = h return list(a[0]) def graph_certificate(gr): """ Return a certificate for the graph gr adjacency list The graph is assumed to be unoriented and without external lines. Associate to each vertex of the graph a symmetric tensor with number of indices equal to the degree of the vertex; indices are contracted when they correspond to the same line of the graph. The canonical form of the tensor gives a certificate for the graph. This is not an efficient algorithm to get the certificate of a graph. Examples ======== >>> from sympy.combinatorics.testutil import graph_certificate >>> gr1 = {0:[1, 2, 3, 5], 1:[0, 2, 4], 2:[0, 1, 3, 4], 3:[0, 2, 4], 4:[1, 2, 3, 5], 5:[0, 4]} >>> gr2 = {0:[1, 5], 1:[0, 2, 3, 4], 2:[1, 3, 5], 3:[1, 2, 4, 5], 4:[1, 3, 5], 5:[0, 2, 3, 4]} >>> c1 = graph_certificate(gr1) >>> c2 = graph_certificate(gr2) >>> c1 [0, 2, 4, 6, 1, 8, 10, 12, 3, 14, 16, 18, 5, 9, 15, 7, 11, 17, 13, 19, 20, 21] >>> c1 == c2 True """ from sympy.combinatorics.permutations import _af_invert from sympy.combinatorics.tensor_can import get_symmetric_group_sgs, canonicalize items = list(gr.items()) items.sort(key=lambda x: len(x[1]), reverse=True) pvert = [x[0] for x in items] pvert = _af_invert(pvert) # the indices of the tensor are twice the number of lines of the graph num_indices = 0 for v, neigh in items: num_indices += len(neigh) # associate to each vertex its indices; for each line # between two vertices assign the # even index to the vertex which comes first in items, # the odd index to the other vertex vertices = [[] for i in items] i = 0 for v, neigh in items: for v2 in neigh: if pvert[v] < pvert[v2]: vertices[pvert[v]].append(i) vertices[pvert[v2]].append(i+1) i += 2 g = [] for v in vertices: g.extend(v) assert len(g) == num_indices g += [num_indices, num_indices + 1] size = num_indices + 2 assert sorted(g) == list(range(size)) g = Permutation(g) vlen = [0]*(len(vertices[0])+1) for neigh in vertices: vlen[len(neigh)] += 1 v = [] for i in range(len(vlen)): n = vlen[i] if n: base, gens = get_symmetric_group_sgs(i) v.append((base, gens, n, 0)) v.reverse() dummies = list(range(num_indices)) can = canonicalize(g, dummies, 0, *v) return can
RicardoJohann/frappe
refs/heads/develop
frappe/core/doctype/report/__init__.py
1829
# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors # MIT License. See license.txt from __future__ import unicode_literals
hitszxp/scikit-learn
refs/heads/master
examples/cluster/plot_dbscan.py
346
# -*- coding: utf-8 -*- """ =================================== Demo of DBSCAN clustering algorithm =================================== Finds core samples of high density and expands clusters from them. """ print(__doc__) import numpy as np from sklearn.cluster import DBSCAN from sklearn import metrics from sklearn.datasets.samples_generator import make_blobs from sklearn.preprocessing import StandardScaler ############################################################################## # Generate sample data centers = [[1, 1], [-1, -1], [1, -1]] X, labels_true = make_blobs(n_samples=750, centers=centers, cluster_std=0.4, random_state=0) X = StandardScaler().fit_transform(X) ############################################################################## # Compute DBSCAN db = DBSCAN(eps=0.3, min_samples=10).fit(X) core_samples_mask = np.zeros_like(db.labels_, dtype=bool) core_samples_mask[db.core_sample_indices_] = True labels = db.labels_ # Number of clusters in labels, ignoring noise if present. n_clusters_ = len(set(labels)) - (1 if -1 in labels else 0) print('Estimated number of clusters: %d' % n_clusters_) print("Homogeneity: %0.3f" % metrics.homogeneity_score(labels_true, labels)) print("Completeness: %0.3f" % metrics.completeness_score(labels_true, labels)) print("V-measure: %0.3f" % metrics.v_measure_score(labels_true, labels)) print("Adjusted Rand Index: %0.3f" % metrics.adjusted_rand_score(labels_true, labels)) print("Adjusted Mutual Information: %0.3f" % metrics.adjusted_mutual_info_score(labels_true, labels)) print("Silhouette Coefficient: %0.3f" % metrics.silhouette_score(X, labels)) ############################################################################## # Plot result import matplotlib.pyplot as plt # Black removed and is used for noise instead. unique_labels = set(labels) colors = plt.cm.Spectral(np.linspace(0, 1, len(unique_labels))) for k, col in zip(unique_labels, colors): if k == -1: # Black used for noise. col = 'k' class_member_mask = (labels == k) xy = X[class_member_mask & core_samples_mask] plt.plot(xy[:, 0], xy[:, 1], 'o', markerfacecolor=col, markeredgecolor='k', markersize=14) xy = X[class_member_mask & ~core_samples_mask] plt.plot(xy[:, 0], xy[:, 1], 'o', markerfacecolor=col, markeredgecolor='k', markersize=6) plt.title('Estimated number of clusters: %d' % n_clusters_) plt.show()
dotmagic/python-fu
refs/heads/master
kaleidosix.py
1
#!/usr/bin/env python # Copyright (C) 2005 Werner Hartnagel <info@dotmagic.de> # based on Perl plugin from Magnus Enger # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. from gimpfu import * import sys, math def python_fu_kaleidosix(old, drawable, width, height): # Find the area that has been selected in the original image old_is_sel, old_x1, old_y1, old_x2, old_y2 = pdb.gimp_selection_bounds(old) if not old_is_sel: pdb.gimp_message("FATAL: Missing selection in old image!") #sys.exit(1) old_width = old_x2 - old_x1 old_height = old_y2 - old_y1 # Turn the initial selection into a rectangle with equal sides, pointing to the right xm = int(old_y1 + (old_height / 2)) inner_width = int(math.sqrt((old_height**2)-((old_height/2)**2))) ym = int(old_x1 + inner_width) points = (old_x1, old_y1, ym, xm, old_x1, old_y2) pdb.gimp_free_select(old, 6, points, CHANNEL_OP_REPLACE, 0, 0, 0) # Copy the selected area pdb.gimp_selection_grow(old, 1) # gimp_selection_feather($old, 0); pdb.gimp_edit_copy(drawable) # Create a new image img = gimp.Image(width, height, RGB) # Disable Undo # img.undo_group_start() # Create a new layer and set the size of the layer = the size of the initial selection layer = gimp.Layer(img, "first", inner_width, old_y2-old_y1, 0, 100, 0) img.add_layer(layer, 0) layer.add_alpha() # Clear the layer of any initial garbage pdb.gimp_edit_clear(layer) # Add the copied selection to the layer, and the layer to the image layer.fill(3) pdb.gimp_edit_paste(layer, 1) pdb.gimp_floating_sel_anchor(pdb.gimp_image_get_floating_sel(img)) move_down = (old_y2 - old_y1)/2 layer.translate(0, move_down) # Create the mirrored layer mirror = gimp.Layer(img, "mirror", inner_width, old_y2-old_y1, 0, 100, 0) img.add_layer(mirror, 0) mirror.add_alpha() pdb.gimp_edit_clear(mirror) mirror.fill(3) pdb.gimp_edit_paste(mirror, 1) pdb.gimp_floating_sel_anchor(pdb.gimp_image_get_floating_sel(img)) pdb.gimp_layer_translate(mirror, inner_width, move_down) #pdb.gimp_flip(mirror, 0) pdb.gimp_drawable_transform_flip_simple(mirror, ORIENTATION_HORIZONTAL, 1, 0, 0) # Make copies of the fist pair and rotate them # Merge the two layers combo = img.merge_visible_layers(1); # Make a copy for rotating copy1 = combo.copy(0) img.add_layer(copy1, 0) #pdb.gimp_rotate(copy1, 0, 2.094) pdb.gimp_drawable_transform_rotate_default(copy1, 2.094, 0, inner_width, old_y2-old_y1, 1, 0) pdb.gimp_selection_none(img) # Make a second copy for the second rotation copy2 = combo.copy(0) img.add_layer(copy2, 0) #pdb.gimp_rotate(copy2, 0, -2.094) pdb.gimp_drawable_transform_rotate_default(copy2, -2.094, 0, inner_width, old_y2-old_y1, 1, 0) pdb.gimp_selection_none(img) # Fill the available space with cells! # Turn all the "temporary" layers into one #cell = pdb.gimp_image_merge_visible_layers(img, 1) cell = img.merge_visible_layers(1) # Position the cells rows = int(width / old_height)+1 cols = int(height / old_height)+1 for i in range(0, rows): for j in range(0, cols): this_layer = cell.copy(0) img.add_layer(this_layer, 0) #my $offx; if i % 2 == 0: offx = 2*j -1 else: offx = 2*j offy = 1.5*i -1 # This feels like a bit of a hack, but it seems to work: pdb.gimp_layer_set_offsets(this_layer, (offx*inner_width)-offx, (offy*old_height)-(offy+6*i)) #pdb.gimp_image_merge_visible_layers(img, 1) img.merge_visible_layers(1) disp1 = gimp.Display(img) # Enable Undo # img.undo_group_end() # Register with The Gimp register( "python_fu_kaleidosix", "Turn selection into tiled kaleidoscopic image", "Turn selection into tiled kaleidoscopic image", "Werner Hartnagel, Magnus Enger", "(c) 2005, Werner Hartnagel", "2005-08-25", "<Image>/Python-Fu/Patterns/Kaleidosix...", "*", [ (PF_INT32, "width", "Width", 500), (PF_INT32, "height", "Height", 500), ], [], python_fu_kaleidosix); main()
RamezIssac/django-sitetree
refs/heads/master
sitetree/south_migrations/0005_auto__add_field_treeitem_access_guest.py
5
# -*- coding: utf-8 -*- import datetime from south.db import db from south.v2 import SchemaMigration from django.db import models class Migration(SchemaMigration): def forwards(self, orm): # Adding field 'TreeItem.access_guest' db.add_column('sitetree_treeitem', 'access_guest', self.gf('django.db.models.fields.BooleanField')(default=False, db_index=True), keep_default=False) def backwards(self, orm): # Deleting field 'TreeItem.access_guest' db.delete_column('sitetree_treeitem', 'access_guest') models = { 'auth.permission': { 'Meta': {'object_name': 'Permission', 'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)"}, 'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}), 'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '50'}) }, 'contenttypes.contenttype': { 'Meta': {'object_name': 'ContentType', 'db_table': "'django_content_type'", 'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)"}, 'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}) }, 'sitetree.tree': { 'Meta': {'object_name': 'Tree'}, 'alias': ('django.db.models.fields.CharField', [], {'unique': 'True', 'db_index': 'True', 'max_length': '80'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'title': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}) }, 'sitetree.treeitem': { 'Meta': {'object_name': 'TreeItem', 'unique_together': "(('tree', 'alias'),)"}, 'access_guest': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}), 'access_loggedin': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}), 'access_perm_type': ('django.db.models.fields.IntegerField', [], {'default': '1'}), 'access_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}), 'access_restricted': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}), 'alias': ('sitetree.models.CharFieldNullable', [], {'null': 'True', 'blank': 'True', 'db_index': 'True', 'max_length': '80'}), 'description': ('django.db.models.fields.TextField', [], {'default': "''", 'blank': 'True'}), 'hidden': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}), 'hint': ('django.db.models.fields.CharField', [], {'max_length': '200', 'default': "''", 'blank': 'True'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'inbreadcrumbs': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'db_index': 'True'}), 'inmenu': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'db_index': 'True'}), 'insitetree': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'db_index': 'True'}), 'parent': ('django.db.models.fields.related.ForeignKey', [], {'null': 'True', 'related_name': "'treeitem_parent'", 'to': "orm['sitetree.TreeItem']", 'blank': 'True'}), 'sort_order': ('django.db.models.fields.IntegerField', [], {'default': '0', 'db_index': 'True'}), 'title': ('django.db.models.fields.CharField', [], {'max_length': '100'}), 'tree': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'treeitem_tree'", 'to': "orm['sitetree.Tree']"}), 'url': ('django.db.models.fields.CharField', [], {'max_length': '200', 'db_index': 'True'}), 'urlaspattern': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}) } } complete_apps = ['sitetree']
ad-m/django-atom
refs/heads/master
atom/ext/guardian/views.py
1
from braces.views import LoginRequiredMixin # django.contrib.auth.mixins lack of redirect_unauthenticated_users from guardian.mixins import PermissionRequiredMixin class RaisePermissionRequiredMixin(LoginRequiredMixin, PermissionRequiredMixin): """Mixin to verify object permission with preserve correct status code in view """ raise_exception = True redirect_unauthenticated_users = True class AttrPermissionRequiredMixin(RaisePermissionRequiredMixin): """Mixin to verify object permission in SingleObjectView Attributes: permission_attribute (str): A path to traverse from object to permission object """ permission_attribute = None @staticmethod def _resolve_path(obj, path=None): """Resolve django-like path eg. object2__object3 for object Args: obj: The object the view is displaying. path (str, optional): Description Returns: A oject at end of resolved path """ if path: for attr_name in path.split('__'): obj = getattr(obj, attr_name) return obj def get_permission_object(self): obj = super(AttrPermissionRequiredMixin, self).get_object() return self._resolve_path(obj, self.permission_attribute) def get_object(self): if not hasattr(self, 'object'): self.object = super(AttrPermissionRequiredMixin, self).get_object() return self.object
peiwei/zulip
refs/heads/master
zilencer/models.py
126
from django.db import models import zerver.models def get_deployment_by_domain(domain): return Deployment.objects.get(realms__domain=domain) class Deployment(models.Model): realms = models.ManyToManyField(zerver.models.Realm, related_name="_deployments") is_active = models.BooleanField(default=True) # TODO: This should really become the public portion of a keypair, and # it should be settable only with an initial bearer "activation key" api_key = models.CharField(max_length=32, null=True) base_api_url = models.CharField(max_length=128) base_site_url = models.CharField(max_length=128) @property def endpoints(self): return {'base_api_url': self.base_api_url, 'base_site_url': self.base_site_url} @property def name(self): # TODO: This only does the right thing for prod because prod authenticates to # staging with the zulip.com deployment key, while staging is technically the # deployment for the zulip.com realm. # This also doesn't necessarily handle other multi-realm deployments correctly. return self.realms.order_by('pk')[0].domain
ecoPlanos/SputnikRedeSensores
refs/heads/master
Arduino/Temp_HR/Central_Temp_HR/lib/Arduino-master/tests/device/test_http_server/test_http_server.py
11
from mock_decorators import setup, teardown from threading import Thread from poster.encode import MultipartParam from poster.encode import multipart_encode from poster.streaminghttp import register_openers import urllib2 import urllib def http_test(res, url, get=None, post=None): response = '' try: if get: url += '?' + urllib.urlencode(get) if post: post = urllib.urlencode(post) request = urllib2.urlopen(url, post, 2) response = request.read() except: return 1 if response != res: return 1 return 0 @setup('HTTP GET Parameters') def setup_http_get_params(e): def testRun(): return http_test('var1=val with spaces&var+=some%', 'http://etd.local/get', {'var1' : 'val with spaces', 'var+' : 'some%'}) Thread(target=testRun).start() @teardown('HTTP GET Parameters') def teardown_http_get_params(e): return 0 @setup('HTTP POST Parameters') def setup_http_post_params(e): def testRun(): return http_test('var2=val with spaces', 'http://etd.local/post', None, {'var2' : 'val with spaces'}) Thread(target=testRun).start() @teardown('HTTP POST Parameters') def teardown_http_post_params(e): return 0 @setup('HTTP GET+POST Parameters') def setup_http_getpost_params(e): def testRun(): return http_test('var3=val with spaces&var+=some%', 'http://etd.local/get_and_post', {'var3' : 'val with spaces'}, {'var+' : 'some%'}) Thread(target=testRun).start() @teardown('HTTP GET+POST Parameters') def teardown_http_getpost_params(e): return 0 @setup('HTTP Upload') def setup_http_upload(e): def testRun(): response = '' try: register_openers() p = MultipartParam("file", "0123456789abcdef", "test.txt", "text/plain; charset=utf8") datagen, headers = multipart_encode( [("var4", "val with spaces"), p] ) request = urllib2.Request('http://etd.local/upload', datagen, headers) response = urllib2.urlopen(request, None, 2).read() except: return 1 if response != 'test.txt:16&var4=val with spaces': return 1 return 0 Thread(target=testRun).start() @teardown('HTTP Upload') def teardown_http_upload(e): return 0