repo_name
stringlengths 5
100
| ref
stringlengths 12
67
| path
stringlengths 4
244
| copies
stringlengths 1
8
| content
stringlengths 0
1.05M
⌀ |
|---|---|---|---|---|
awsdocs/aws-doc-sdk-examples
|
refs/heads/master
|
python/test_tools/fixtures/common.py
|
1
|
# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
# SPDX-License-Identifier: Apache-2.0
"""
Common test fixtures that can be used throughout the unit tests for all Python
code examples.
"""
import contextlib
import time
import pytest
from test_tools.stubber_factory import stubber_factory
def pytest_addoption(parser):
"""Add an option to run tests against an actual AWS account instead of
the Stubber."""
parser.addoption(
"--use-real-aws-may-incur-charges", action="store_true", default=False,
help="Connect to real AWS services while testing. **Warning: this might incur "
"charges on your account!**"
)
def pytest_configure(config):
"""Register the skip_if_real_aws marker with Pytest."""
config.addinivalue_line(
"markers", "skip_if_real_aws: mark test to run only when stubbed."
)
def pytest_runtest_setup(item):
"""Handle the custom marker skip_if_real_aws, which skips a test when it is
run against actual AWS services."""
skip_if_real_aws = 'skip_if_real_aws' in [m.name for m in item.iter_markers()]
if skip_if_real_aws:
if item.config.getoption("--use-real-aws-may-incur-charges"):
pytest.skip("When run with actual AWS services instead of stub functions, "
"this test will fail because it uses test data. To run this "
"test with AWS services, you must first substitute actual "
"data, such as user IDs, for test data.")
@pytest.fixture(name="use_real_aws")
def fixture_use_real_aws(request):
"""Indicates whether the 'use_real_aws' option is on or off."""
return request.config.getoption("--use-real-aws-may-incur-charges")
@pytest.fixture(name='make_stubber')
def fixture_make_stubber(request, monkeypatch):
"""
Return a factory function that makes an object configured either
to pass calls through to AWS or to use stubs.
:param request: An object that contains configuration parameters.
:param monkeypatch: The Pytest monkeypatch object.
:return: A factory function that makes the stubber object.
"""
def _make_stubber(service_client):
"""
Create a class that wraps the botocore Stubber and implements a variety of
stub functions that can be used in unit tests for the specified service client.
After tests complete, the stubber checks that no more responses remain in its
queue. This lets tests verify that all expected calls were actually made during
the test.
When tests are run against an actual AWS account, the stubber does not
set up stubs and passes all calls through to the Boto 3 client.
:param service_client: The Boto 3 service client to stub.
:return: The stubber object, configured either for actual AWS or for stubbing.
"""
fact = stubber_factory(service_client.meta.service_model.service_name)
stubber = fact(
service_client,
not request.config.getoption("--use-real-aws-may-incur-charges")
)
if stubber.use_stubs:
def fin():
stubber.assert_no_pending_responses()
stubber.deactivate()
request.addfinalizer(fin)
stubber.activate()
return stubber
return _make_stubber
@pytest.fixture(name='make_unique_name')
def fixture_make_unique_name():
"""
Return a factory function that can be used to create a unique name.
:return: The function to create a unique name.
"""
def _make_unique_name(prefix):
"""
Creates a unique name based on a prefix and the current time in nanoseconds.
:return: A unique name that can be used to create something, such as
an Amazon S3 bucket.
"""
return f"{prefix}{time.time_ns()}"
return _make_unique_name
@pytest.fixture(name='make_bucket')
def fixture_make_bucket(request, make_unique_name):
"""
Return a factory function that can be used to make a bucket for testing.
:param request: The Pytest request object that contains configuration data.
:param make_unique_name: A function that creates a unique name.
:return: The factory function to make a test bucket.
"""
def _make_bucket(s3_stubber, s3_resource, region_name=None):
"""
Make a bucket that can be used for testing. When stubbing is used, a stubbed
bucket is created. When AWS services are used, the bucket is deleted after
the test completes.
:param s3_stub: The S3Stubber object, configured for stubbing or AWS.
:param region_name: The AWS Region in which to create the bucket.
:return: The test bucket.
"""
bucket_name = make_unique_name('bucket')
if not region_name:
region_name = s3_resource.meta.client.meta.region_name
s3_stubber.stub_create_bucket(bucket_name, region_name)
bucket = s3_resource.create_bucket(
Bucket=bucket_name,
CreateBucketConfiguration={
'LocationConstraint': region_name
}
)
def fin():
if not s3_stubber.use_stubs:
bucket.delete()
request.addfinalizer(fin)
return bucket
return _make_bucket
class StubRunner:
"""
Adds stubbed responses until a specified method name is encountered.
Stubbers and stub functions can be added from any stubber. The error code
is automatically added to the specified method.
"""
def __init__(self, error_code, stop_on_method):
self.stubs = []
self.error_code = error_code
self.stop_on_method = stop_on_method
def add(self, func, *func_args, keep_going=False, **func_kwargs):
"""
Adds a stubbed function response to the list.
:param stubber: The stubber that implements the specified function.
:param func: The name of the stub function.
:param keep_going: When True, continue to process stub responses that follow
this function. Otherwise, stop on this function.
:param func_args: The positional arguments of the stub function.
:param func_kwargs: The keyword arguments of the stub function.
"""
self.stubs.append({
'func': func, 'keep_going': keep_going,
'func_args': func_args, 'func_kwargs': func_kwargs
})
def run(self):
"""
Adds stubbed responses until the specified method is encountered. The
specified error code is added to the kwargs for the final method and no
more responses are added to the stubbers. When no method is specified, all
responses are added and the error code is not used.
In this way, flexible tests can be written that both run successfully through
all stubbed calls or raise errors and exit early.
"""
for stub in self.stubs:
if self.stop_on_method == stub['func'].__name__ and not stub['keep_going']:
stub['func_kwargs']['error_code'] = self.error_code
stub['func'](*stub['func_args'], **stub['func_kwargs'])
if self.stop_on_method == stub['func'].__name__ and not stub['keep_going']:
break
@pytest.fixture
def stub_runner():
"""
Encapsulates the StubRunner in a context manager so its run function is called
when the context is exited.
"""
@contextlib.contextmanager
def _runner(err, stop):
runner = StubRunner(err, stop)
yield runner
runner.run()
return _runner
|
amith01994/intellij-community
|
refs/heads/master
|
python/testData/inspections/PyTypeCheckerInspection/FunctionParameterReturnType.py
|
49
|
def func(f, seq):
"""
:param f: my param
:type f: (unknown) -> str
:rtype: list[str]
"""
return [f(v) for v in seq]
def f(x):
return int(x)
def test():
for item in func(<warning descr="Expected type '(Any) -> str', got '(x: Any) -> int' instead">f</warning>, []):
pass
for item in func(<warning descr="Expected type '(Any) -> str', got 'int' instead">int</warning>, []):
pass
for item in func(<warning descr="Expected type '(Any) -> str', got '(x: Any) -> int' instead">lambda x: int(x)</warning>, []):
pass
for item in func(lambda x: str(x), []):
pass
for item in func(str, []):
pass
|
mitsuhiko/django
|
refs/heads/master
|
django/contrib/localflavor/fi/forms.py
|
2
|
"""
FI-specific Form helpers
"""
import re
from django.core.validators import EMPTY_VALUES
from django.forms import ValidationError
from django.forms.fields import Field, RegexField, Select
from django.utils.translation import ugettext_lazy as _
class FIZipCodeField(RegexField):
default_error_messages = {
'invalid': _('Enter a zip code in the format XXXXX.'),
}
def __init__(self, max_length=None, min_length=None, *args, **kwargs):
super(FIZipCodeField, self).__init__(r'^\d{5}$',
max_length, min_length, *args, **kwargs)
class FIMunicipalitySelect(Select):
"""
A Select widget that uses a list of Finnish municipalities as its choices.
"""
def __init__(self, attrs=None):
from fi_municipalities import MUNICIPALITY_CHOICES
super(FIMunicipalitySelect, self).__init__(attrs, choices=MUNICIPALITY_CHOICES)
class FISocialSecurityNumber(Field):
default_error_messages = {
'invalid': _('Enter a valid Finnish social security number.'),
}
def clean(self, value):
super(FISocialSecurityNumber, self).clean(value)
if value in EMPTY_VALUES:
return u''
checkmarks = "0123456789ABCDEFHJKLMNPRSTUVWXY"
result = re.match(r"""^
(?P<date>([0-2]\d|3[01])
(0\d|1[012])
(\d{2}))
[A+-]
(?P<serial>(\d{3}))
(?P<checksum>[%s])$""" % checkmarks, value, re.VERBOSE | re.IGNORECASE)
if not result:
raise ValidationError(self.error_messages['invalid'])
gd = result.groupdict()
checksum = int(gd['date'] + gd['serial'])
if checkmarks[checksum % len(checkmarks)] == gd['checksum'].upper():
return u'%s' % value.upper()
raise ValidationError(self.error_messages['invalid'])
|
astrorafael/tessflux
|
refs/heads/master
|
tessflux/tessflux.py
|
1
|
# ----------------------------------------------------------------------
# Copyright (c) 2014 Rafael Gonzalez.
#
# See the LICENSE file for details
# ----------------------------------------------------------------------
#--------------------
# System wide imports
# -------------------
from __future__ import division, absolute_import
import sys
import datetime
import random
import os
from collections import deque
# ---------------
# Twisted imports
# ---------------
from twisted import __version__ as __twisted_version__
from twisted.logger import Logger, LogLevel
from twisted.internet import task, reactor, defer
from twisted.internet.defer import inlineCallbacks
from twisted.internet.threads import deferToThread
#--------------
# local imports
# -------------
from tessflux import __version__
from tessflux.config import VERSION_STRING, loadCfgFile
from tessflux.logger import setLogLevel
from tessflux.service.reloadable import MultiService
from tessflux.influxdb import InfluxDBService
from tessflux.mqttsubs import MQTTService
from tessflux.defer import DeferredQueue
# ----------------
# Module constants
# ----------------
# -----------------------
# Module global variables
# -----------------------
log = Logger(namespace='tessflux')
# -----------------------------------------------------------------------------
# -----------------------------------------------------------------------------
# -----------------------------------------------------------------------------
class TESSFluxDBService(MultiService):
# Service name
NAME = 'MQTT To InfluxDB converter'
# Stats period task in seconds
T_STAT = 3600
def __init__(self, options, cfgFilePath):
MultiService.__init__(self)
setLogLevel(namespace='tessflux', levelStr=options['log_level'])
self.cfgFilePath = cfgFilePath
self.options = options
self.queue = DeferredQueue(backlog=1)
self.statsTask = task.LoopingCall(self.logCounters)
@inlineCallbacks
def reloadService(self, options):
'''
Reload application parameters
'''
log.warn("{tessflux} config being reloaded", tessflux=VERSION_STRING)
try:
options = yield deferToThread(loadCfgFile, self.cfgFilePath)
except Exception as e:
log.error("Error trying to reload: {excp!s}", excp=e)
else:
self.options = options['tessflux']
MultiService.reloadService(self, options)
def startService(self):
'''
Starts database service and see if we can continue.
'''
log.info('starting {name} {version} using Twisted {tw_version}',
name=self.name,
version=__version__,
tw_version=__twisted_version__)
self.influxdbService = self.getServiceNamed(InfluxDBService.NAME)
self.mqttService = self.getServiceNamed(MQTTService.NAME)
self.statsTask.start(self.T_STAT, now=False) # call every T seconds
try:
self.influxdbService.startService()
except Exception as e:
log.failure("{excp!s}", excp=e)
log.critical("Problems initializing {name}. Exiting gracefully", name=InfluxDBService.NAME)
reactor.callLater(0, reactor.stop)
else:
self.mqttService.startService()
# -------------
# log stats API
# -------------
def resetCounters(self):
'''Resets stat counters'''
self.mqttService.resetCounters()
self.influxdbService.resetCounters()
def logCounters(self):
'''log stat counters'''
self.mqttService.logCounters()
self.influxdbService.logCounters()
self.resetCounters()
__all__ = [ "TESSFluxDBService" ]
|
atlassian/dd-agent
|
refs/heads/master
|
checks.d/vsphere.py
|
7
|
# stdlib
from copy import deepcopy
from datetime import datetime, timedelta
from hashlib import md5
from Queue import Empty, Queue
import re
import ssl
import time
import traceback
# 3p
from pyVim import connect
from pyVmomi import vim
# project
from checks import AgentCheck
from checks.libs.thread_pool import Pool
from checks.libs.vmware.basic_metrics import BASIC_METRICS
from checks.libs.vmware.all_metrics import ALL_METRICS
from util import Timer
SOURCE_TYPE = 'vsphere'
REAL_TIME_INTERVAL = 20 # Default vCenter sampling interval
# The size of the ThreadPool used to process the request queue
DEFAULT_SIZE_POOL = 4
# The interval in seconds between two refresh of the entities list
REFRESH_MORLIST_INTERVAL = 3 * 60
# The interval in seconds between two refresh of metrics metadata (id<->name)
REFRESH_METRICS_METADATA_INTERVAL = 10 * 60
# The amount of jobs batched at the same time in the queue to query available metrics
BATCH_MORLIST_SIZE = 50
# Time after which we reap the jobs that clog the queue
# TODO: use it
JOB_TIMEOUT = 10
EXCLUDE_FILTERS = {
'AlarmStatusChangedEvent': [r'Gray'],
'TaskEvent': [
r'Initialize powering On',
r'Power Off virtual machine',
r'Power On virtual machine',
r'Reconfigure virtual machine',
r'Relocate virtual machine',
r'Suspend virtual machine',
r'Migrate virtual machine',
],
'VmBeingHotMigratedEvent': [],
'VmMessageEvent': [],
'VmMigratedEvent': [],
'VmPoweredOnEvent': [],
'VmPoweredOffEvent': [],
'VmReconfiguredEvent': [],
'VmResumedEvent': [],
'VmSuspendedEvent': [],
}
MORLIST = 'morlist'
METRICS_METADATA = 'metrics_metadata'
LAST = 'last'
INTERVAL = 'interval'
class VSphereEvent(object):
UNKNOWN = 'unknown'
def __init__(self, raw_event, event_config=None):
self.raw_event = raw_event
if self.raw_event and self.raw_event.__class__.__name__.startswith('vim.event'):
self.event_type = self.raw_event.__class__.__name__[10:]
else:
self.event_type = VSphereEvent.UNKNOWN
self.timestamp = int((self.raw_event.createdTime.replace(tzinfo=None) - datetime(1970, 1, 1)).total_seconds())
self.payload = {
"timestamp": self.timestamp,
"event_type": SOURCE_TYPE,
"source_type_name": SOURCE_TYPE,
}
if event_config is None:
self.event_config = {}
else:
self.event_config = event_config
def _is_filtered(self):
# Filter the unwanted types
if self.event_type not in EXCLUDE_FILTERS:
return True
filters = EXCLUDE_FILTERS[self.event_type]
for f in filters:
if re.search(f, self.raw_event.fullFormattedMessage):
return True
return False
def get_datadog_payload(self):
if self._is_filtered():
return None
transform_method = getattr(self, 'transform_%s' % self.event_type.lower(), None)
if callable(transform_method):
return transform_method()
# Default event transformation
self.payload["msg_title"] = u"{0}".format(self.event_type)
self.payload["msg_text"] = u"@@@\n{0}\n@@@".format(self.raw_event.fullFormattedMessage)
return self.payload
def transform_vmbeinghotmigratedevent(self):
self.payload["msg_title"] = u"VM {0} is being migrated".format(self.raw_event.vm.name)
self.payload["msg_text"] = u"{user} has launched a hot migration of this virtual machine:\n".format(user=self.raw_event.userName)
changes = []
pre_host = self.raw_event.host.name
new_host = self.raw_event.destHost.name
pre_dc = self.raw_event.datacenter.name
new_dc = self.raw_event.destDatacenter.name
pre_ds = self.raw_event.ds.name
new_ds = self.raw_event.destDatastore.name
if pre_host == new_host:
changes.append(u"- No host migration: still {0}".format(new_host))
else:
# Insert in front if it's a change
changes = [u"- Host MIGRATION: from {0} to {1}".format(pre_host, new_host)] + changes
if pre_dc == new_dc:
changes.append(u"- No datacenter migration: still {0}".format(new_dc))
else:
# Insert in front if it's a change
changes = [u"- Datacenter MIGRATION: from {0} to {1}".format(pre_dc, new_dc)] + changes
if pre_ds == new_ds:
changes.append(u"- No datastore migration: still {0}".format(new_ds))
else:
# Insert in front if it's a change
changes = [u"- Datastore MIGRATION: from {0} to {1}".format(pre_ds, new_ds)] + changes
self.payload["msg_text"] += "\n".join(changes)
self.payload['host'] = self.raw_event.vm.name
self.payload['tags'] = [
'vsphere_host:%s' % pre_host,
'vsphere_host:%s' % new_host,
'vsphere_datacenter:%s' % pre_dc,
'vsphere_datacenter:%s' % new_dc,
]
return self.payload
def transform_alarmstatuschangedevent(self):
if self.event_config.get('collect_vcenter_alarms') is None:
return None
def get_transition(before, after):
vals = {
'gray': -1,
'green': 0,
'yellow': 1,
'red': 2
}
before = before.lower()
after = after.lower()
if before not in vals or after not in vals:
return None
if vals[before] < vals[after]:
return 'Triggered'
else:
return 'Recovered'
TO_ALERT_TYPE = {
'green': 'success',
'yellow': 'warning',
'red': 'error'
}
def get_agg_key(alarm_event):
return 'h:{0}|dc:{1}|a:{2}'.format(
md5(alarm_event.entity.name).hexdigest()[:10],
md5(alarm_event.datacenter.name).hexdigest()[:10],
md5(alarm_event.alarm.name).hexdigest()[:10]
)
# Get the entity type/name
if self.raw_event.entity.entity.__class__ == vim.VirtualMachine:
host_type = 'VM'
elif self.raw_event.entity.entity.__class__ == vim.HostSystem:
host_type = 'host'
else:
return None
host_name = self.raw_event.entity.name
# Need a getattr because from is a reserved keyword...
trans_before = getattr(self.raw_event, 'from')
trans_after = self.raw_event.to
transition = get_transition(trans_before, trans_after)
# Bad transition, we shouldn't have got this transition
if transition is None:
return None
self.payload['msg_title'] = u"[{transition}] {monitor} on {host_type} {host_name} is now {status}".format(
transition=transition,
monitor=self.raw_event.alarm.name,
host_type=host_type,
host_name=host_name,
status=trans_after
)
self.payload['alert_type'] = TO_ALERT_TYPE[trans_after]
self.payload['event_object'] = get_agg_key(self.raw_event)
self.payload['msg_text'] = u"""vCenter monitor status changed on this alarm, it was {before} and it's now {after}.""".format(
before=trans_before,
after=trans_after
)
self.payload['host'] = host_name
return self.payload
def transform_vmmessageevent(self):
self.payload["msg_title"] = u"VM {0} is reporting".format(self.raw_event.vm.name)
self.payload["msg_text"] = u"@@@\n{0}\n@@@".format(self.raw_event.fullFormattedMessage)
self.payload['host'] = self.raw_event.vm.name
return self.payload
def transform_vmmigratedevent(self):
self.payload["msg_title"] = u"VM {0} has been migrated".format(self.raw_event.vm.name)
self.payload["msg_text"] = u"@@@\n{0}\n@@@".format(self.raw_event.fullFormattedMessage)
self.payload['host'] = self.raw_event.vm.name
return self.payload
def transform_vmpoweredoffevent(self):
self.payload["msg_title"] = u"VM {0} has been powered OFF".format(self.raw_event.vm.name)
self.payload["msg_text"] = u"""{user} has powered off this virtual machine. It was running on:
- datacenter: {dc}
- host: {host}
""".format(
user=self.raw_event.userName,
dc=self.raw_event.datacenter.name,
host=self.raw_event.host.name
)
self.payload['host'] = self.raw_event.vm.name
return self.payload
def transform_vmpoweredonevent(self):
self.payload["msg_title"] = u"VM {0} has been powered ON".format(self.raw_event.vm.name)
self.payload["msg_text"] = u"""{user} has powered on this virtual machine. It is running on:
- datacenter: {dc}
- host: {host}
""".format(
user=self.raw_event.userName,
dc=self.raw_event.datacenter.name,
host=self.raw_event.host.name
)
self.payload['host'] = self.raw_event.vm.name
return self.payload
def transform_vmresumingevent(self):
self.payload["msg_title"] = u"VM {0} is RESUMING".format(self.raw_event.vm.name)
self.payload["msg_text"] = u"""{user} has resumed {vm}. It will soon be powered on.""".format(
user=self.raw_event.userName,
vm=self.raw_event.vm.name
)
self.payload['host'] = self.raw_event.vm.name
return self.payload
def transform_vmsuspendedevent(self):
self.payload["msg_title"] = u"VM {0} has been SUSPENDED".format(self.raw_event.vm.name)
self.payload["msg_text"] = u"""{user} has suspended this virtual machine. It was running on:
- datacenter: {dc}
- host: {host}
""".format(
user=self.raw_event.userName,
dc=self.raw_event.datacenter.name,
host=self.raw_event.host.name
)
self.payload['host'] = self.raw_event.vm.name
return self.payload
def transform_vmreconfiguredevent(self):
self.payload["msg_title"] = u"VM {0} configuration has been changed".format(self.raw_event.vm.name)
self.payload["msg_text"] = u"{user} saved the new configuration:\n@@@\n".format(user=self.raw_event.userName)
# Add lines for configuration change don't show unset, that's hacky...
config_change_lines = [line for line in self.raw_event.configSpec.__repr__().splitlines() if 'unset' not in line]
self.payload["msg_text"] += u"\n".join(config_change_lines)
self.payload["msg_text"] += u"\n@@@"
self.payload['host'] = self.raw_event.vm.name
return self.payload
def atomic_method(method):
""" Decorator to catch the exceptions that happen in detached thread atomic tasks
and display them in the logs.
"""
def wrapper(*args, **kwargs):
try:
method(*args, **kwargs)
except Exception:
args[0].exceptionq.put("A worker thread crashed:\n" + traceback.format_exc())
return wrapper
class VSphereCheck(AgentCheck):
""" Get performance metrics from a vCenter server and upload them to Datadog
References:
http://pubs.vmware.com/vsphere-51/index.jsp#com.vmware.wssdk.apiref.doc/vim.PerformanceManager.html
*_atomic jobs perform one single task asynchronously in the ThreadPool, we
don't know exactly when they will finish, but we reap them if they're stuck.
The other calls are performed synchronously.
"""
SERVICE_CHECK_NAME = 'vcenter.can_connect'
def __init__(self, name, init_config, agentConfig, instances):
AgentCheck.__init__(self, name, init_config, agentConfig, instances)
self.time_started = time.time()
self.pool_started = False
self.exceptionq = Queue()
# Connections open to vCenter instances
self.server_instances = {}
# Event configuration
self.event_config = {}
# Caching resources, timeouts
self.cache_times = {}
for instance in self.instances:
i_key = self._instance_key(instance)
self.cache_times[i_key] = {
MORLIST: {
LAST: 0,
INTERVAL: init_config.get('refresh_morlist_interval',
REFRESH_MORLIST_INTERVAL)
},
METRICS_METADATA: {
LAST: 0,
INTERVAL: init_config.get('refresh_metrics_metadata_interval',
REFRESH_METRICS_METADATA_INTERVAL)
}
}
self.event_config[i_key] = instance.get('event_config')
# First layer of cache (get entities from the tree)
self.morlist_raw = {}
# Second layer, processed from the first one
self.morlist = {}
# Metrics metadata, basically perfCounterId -> {name, group, description}
self.metrics_metadata = {}
self.latest_event_query = {}
def stop(self):
self.stop_pool()
def start_pool(self):
self.log.info("Starting Thread Pool")
self.pool_size = int(self.init_config.get('threads_count', DEFAULT_SIZE_POOL))
self.pool = Pool(self.pool_size)
self.pool_started = True
self.jobs_status = {}
def stop_pool(self):
self.log.info("Stopping Thread Pool")
if self.pool_started:
self.pool.terminate()
self.pool.join()
self.jobs_status.clear()
assert self.pool.get_nworkers() == 0
self.pool_started = False
def restart_pool(self):
self.stop_pool()
self.start_pool()
def _clean(self):
now = time.time()
# TODO: use that
for name in self.jobs_status.keys():
start_time = self.jobs_status[name]
if now - start_time > JOB_TIMEOUT:
self.log.critical("Restarting Pool. One check is stuck.")
self.restart_pool()
break
def _query_event(self, instance):
i_key = self._instance_key(instance)
last_time = self.latest_event_query.get(i_key)
server_instance = self._get_server_instance(instance)
event_manager = server_instance.content.eventManager
# Be sure we don't duplicate any event, never query the "past"
if not last_time:
last_time = self.latest_event_query[i_key] = \
event_manager.latestEvent.createdTime + timedelta(seconds=1)
query_filter = vim.event.EventFilterSpec()
time_filter = vim.event.EventFilterSpec.ByTime(beginTime=self.latest_event_query[i_key])
query_filter.time = time_filter
try:
new_events = event_manager.QueryEvents(query_filter)
self.log.debug("Got {0} events from vCenter event manager".format(len(new_events)))
for event in new_events:
normalized_event = VSphereEvent(event, self.event_config[i_key])
# Can return None if the event if filtered out
event_payload = normalized_event.get_datadog_payload()
if event_payload is not None:
self.event(event_payload)
last_time = event.createdTime + timedelta(seconds=1)
except Exception as e:
# Don't get stuck on a failure to fetch an event
# Ignore them for next pass
self.log.warning("Unable to fetch Events %s", e)
last_time = event_manager.latestEvent.createdTime + timedelta(seconds=1)
self.latest_event_query[i_key] = last_time
def _instance_key(self, instance):
i_key = instance.get('name')
if i_key is None:
raise Exception("Must define a unique 'name' per vCenter instance")
return i_key
def _should_cache(self, instance, entity):
i_key = self._instance_key(instance)
now = time.time()
return now - self.cache_times[i_key][entity][LAST] > self.cache_times[i_key][entity][INTERVAL]
def _get_server_instance(self, instance):
i_key = self._instance_key(instance)
service_check_tags = [
'vcenter_server:{0}'.format(instance.get('name')),
'vcenter_host:{0}'.format(instance.get('host')),
]
# Check for ssl configs and generate an appropriate ssl context object
ssl_verify = instance.get('ssl_verify', True)
ssl_capath = instance.get('ssl_capath', None)
if not ssl_verify:
context = ssl.SSLContext(ssl.PROTOCOL_SSLv23)
context.verify_mode = ssl.CERT_NONE
elif ssl_capath:
context = ssl.SSLContext(ssl.PROTOCOL_SSLv23)
context.verify_mode = ssl.CERT_REQUIRED
context.load_verify_locations(capath=ssl_capath)
# If both configs are used, log a message explaining the default
if not ssl_verify and ssl_capath:
self.log.debug("Your configuration is incorrectly attempting to "
"specify both a CA path, and to disable SSL "
"verification. You cannot do both. Proceeding with "
"disabling ssl verification.")
if i_key not in self.server_instances:
try:
server_instance = connect.SmartConnect(
host = instance.get('host'),
user = instance.get('username'),
pwd = instance.get('password'),
sslContext = context if not ssl_verify or ssl_capath else None
)
except Exception as e:
err_msg = "Connection to %s failed: %s" % (instance.get('host'), e)
self.service_check(self.SERVICE_CHECK_NAME, AgentCheck.CRITICAL,
tags=service_check_tags, message=err_msg)
raise Exception(err_msg)
self.server_instances[i_key] = server_instance
# Test if the connection is working
try:
self.server_instances[i_key].RetrieveContent()
self.service_check(self.SERVICE_CHECK_NAME, AgentCheck.OK,
tags=service_check_tags)
except Exception as e:
err_msg = "Connection to %s died unexpectedly: %s" % (instance.get('host'), e)
self.service_check(self.SERVICE_CHECK_NAME, AgentCheck.CRITICAL,
tags=service_check_tags, message=err_msg)
raise Exception(err_msg)
return self.server_instances[i_key]
def _compute_needed_metrics(self, instance, available_metrics):
""" Compare the available metrics for one MOR we have computed and intersect them
with the set of metrics we want to report
"""
if instance.get('all_metrics', False):
return available_metrics
i_key = self._instance_key(instance)
wanted_metrics = []
# Get only the basic metrics
for metric in available_metrics:
# No cache yet, skip it for now
if (i_key not in self.metrics_metadata
or metric.counterId not in self.metrics_metadata[i_key]):
continue
if self.metrics_metadata[i_key][metric.counterId]['name'] in BASIC_METRICS:
wanted_metrics.append(metric)
return wanted_metrics
def get_external_host_tags(self):
""" Returns a list of tags for every host that is detected by the vSphere
integration.
List of pairs (hostname, list_of_tags)
"""
self.log.info("Sending external_host_tags now")
external_host_tags = []
for instance in self.instances:
i_key = self._instance_key(instance)
mor_list = self.morlist[i_key].items()
for mor_name, mor in mor_list:
external_host_tags.append((mor['hostname'], {SOURCE_TYPE: mor['tags']}))
return external_host_tags
@atomic_method
def _cache_morlist_raw_atomic(self, i_key, obj_type, obj, tags, regexes=None):
""" Compute tags for a single node in the vCenter rootFolder
and queue other such jobs for children nodes.
Usual hierarchy:
rootFolder
- datacenter1
- compute_resource1 == cluster
- host1
- host2
- host3
- compute_resource2
- host5
- vm1
- vm2
If it's a node we want to query metric for, queue it in self.morlist_raw
that will be processed by another job.
"""
### <TEST-INSTRUMENTATION>
t = Timer()
self.log.debug("job_atomic: Exploring MOR {0} (type={1})".format(obj, obj_type))
### </TEST-INSTRUMENTATION>
tags_copy = deepcopy(tags)
if obj_type == 'rootFolder':
for datacenter in obj.childEntity:
# Skip non-datacenter
if not hasattr(datacenter, 'hostFolder'):
continue
self.pool.apply_async(
self._cache_morlist_raw_atomic,
args=(i_key, 'datacenter', datacenter, tags_copy, regexes)
)
elif obj_type == 'datacenter':
dc_tag = "vsphere_datacenter:%s" % obj.name
tags_copy.append(dc_tag)
for compute_resource in obj.hostFolder.childEntity:
# Skip non-compute resource
if not hasattr(compute_resource, 'host'):
continue
self.pool.apply_async(
self._cache_morlist_raw_atomic,
args=(i_key, 'compute_resource', compute_resource, tags_copy, regexes)
)
elif obj_type == 'compute_resource':
if obj.__class__ == vim.ClusterComputeResource:
cluster_tag = "vsphere_cluster:%s" % obj.name
tags_copy.append(cluster_tag)
for host in obj.host:
# Skip non-host
if not hasattr(host, 'vm'):
continue
self.pool.apply_async(
self._cache_morlist_raw_atomic,
args=(i_key, 'host', host, tags_copy, regexes)
)
elif obj_type == 'host':
if regexes and regexes.get('host_include') is not None:
match = re.search(regexes['host_include'], obj.name)
if not match:
self.log.debug(u"Filtered out VM {0} because of host_include_only_regex".format(obj.name))
return
watched_mor = dict(mor_type='host', mor=obj, hostname=obj.name, tags=tags_copy+['vsphere_type:host'])
self.morlist_raw[i_key].append(watched_mor)
host_tag = "vsphere_host:%s" % obj.name
tags_copy.append(host_tag)
for vm in obj.vm:
if vm.runtime.powerState != 'poweredOn':
continue
self.pool.apply_async(
self._cache_morlist_raw_atomic,
args=(i_key, 'vm', vm, tags_copy, regexes)
)
elif obj_type == 'vm':
if regexes and regexes.get('vm_include') is not None:
match = re.search(regexes['vm_include'], obj.name)
if not match:
self.log.debug(u"Filtered out VM {0} because of vm_include_only_regex".format(obj.name))
return
watched_mor = dict(mor_type='vm', mor=obj, hostname=obj.name, tags=tags_copy+['vsphere_type:vm'])
self.morlist_raw[i_key].append(watched_mor)
### <TEST-INSTRUMENTATION>
self.histogram('datadog.agent.vsphere.morlist_raw_atomic.time', t.total())
### </TEST-INSTRUMENTATION>
def _cache_morlist_raw(self, instance):
""" Initiate the first layer to refresh self.morlist by queueing
_cache_morlist_raw_atomic on the rootFolder in a recursive/asncy approach
"""
i_key = self._instance_key(instance)
self.log.debug("Caching the morlist for vcenter instance %s" % i_key)
if i_key in self.morlist_raw and len(self.morlist_raw[i_key]) > 0:
self.log.debug(
"Skipping morlist collection now, RAW results "
"processing not over (latest refresh was {0}s ago)".format(
time.time() - self.cache_times[i_key][MORLIST][LAST])
)
return
self.morlist_raw[i_key] = []
server_instance = self._get_server_instance(instance)
root_folder = server_instance.content.rootFolder
instance_tag = "vcenter_server:%s" % instance.get('name')
regexes = {
'host_include': instance.get('host_include_only_regex'),
'vm_include': instance.get('vm_include_only_regex')
}
self.pool.apply_async(
self._cache_morlist_raw_atomic,
args=(i_key, 'rootFolder', root_folder, [instance_tag], regexes)
)
self.cache_times[i_key][MORLIST][LAST] = time.time()
@atomic_method
def _cache_morlist_process_atomic(self, instance, mor):
""" Process one item of the self.morlist_raw list by querying the available
metrics for this MOR and then putting it in self.morlist
"""
### <TEST-INSTRUMENTATION>
t = Timer()
### </TEST-INSTRUMENTATION>
i_key = self._instance_key(instance)
server_instance = self._get_server_instance(instance)
perfManager = server_instance.content.perfManager
self.log.debug(
"job_atomic: Querying available metrics"
" for MOR {0} (type={1})".format(mor['mor'], mor['mor_type'])
)
available_metrics = perfManager.QueryAvailablePerfMetric(
mor['mor'], intervalId=REAL_TIME_INTERVAL)
mor['metrics'] = self._compute_needed_metrics(instance, available_metrics)
mor_name = str(mor['mor'])
if mor_name in self.morlist[i_key]:
# Was already here last iteration
self.morlist[i_key][mor_name]['metrics'] = mor['metrics']
else:
self.morlist[i_key][mor_name] = mor
self.morlist[i_key][mor_name]['last_seen'] = time.time()
### <TEST-INSTRUMENTATION>
self.histogram('datadog.agent.vsphere.morlist_process_atomic.time', t.total())
### </TEST-INSTRUMENTATION>
def _cache_morlist_process(self, instance):
""" Empties the self.morlist_raw by popping items and running asynchronously
the _cache_morlist_process_atomic operation that will get the available
metrics for this MOR and put it in self.morlist
"""
i_key = self._instance_key(instance)
if i_key not in self.morlist:
self.morlist[i_key] = {}
batch_size = self.init_config.get('batch_morlist_size', BATCH_MORLIST_SIZE)
for i in xrange(batch_size):
try:
mor = self.morlist_raw[i_key].pop()
self.pool.apply_async(self._cache_morlist_process_atomic, args=(instance, mor))
except (IndexError, KeyError):
self.log.debug("No more work to process in morlist_raw")
return
def _vacuum_morlist(self, instance):
""" Check if self.morlist doesn't have some old MORs that are gone, ie
we cannot get any metrics from them anyway (or =0)
"""
i_key = self._instance_key(instance)
morlist = self.morlist[i_key].items()
for mor_name, mor in morlist:
last_seen = mor['last_seen']
if (time.time() - last_seen) > 2 * REFRESH_MORLIST_INTERVAL:
del self.morlist[i_key][mor_name]
def _cache_metrics_metadata(self, instance):
""" Get from the server instance, all the performance counters metadata
meaning name/group/description... attached with the corresponding ID
"""
### <TEST-INSTRUMENTATION>
t = Timer()
### </TEST-INSTRUMENTATION>
i_key = self._instance_key(instance)
self.log.info("Warming metrics metadata cache for instance {0}".format(i_key))
server_instance = self._get_server_instance(instance)
perfManager = server_instance.content.perfManager
new_metadata = {}
for counter in perfManager.perfCounter:
d = dict(
name = "%s.%s" % (counter.groupInfo.key, counter.nameInfo.key),
unit = counter.unitInfo.key,
instance_tag = 'instance' # FIXME: replace by what we want to tag!
)
new_metadata[counter.key] = d
self.cache_times[i_key][METRICS_METADATA][LAST] = time.time()
self.log.info("Finished metadata collection for instance {0}".format(i_key))
# Reset metadata
self.metrics_metadata[i_key] = new_metadata
### <TEST-INSTRUMENTATION>
self.histogram('datadog.agent.vsphere.metric_metadata_collection.time', t.total())
### </TEST-INSTRUMENTATION>
def _transform_value(self, instance, counter_id, value):
""" Given the counter_id, look up for the metrics metadata to check the vsphere
type of the counter and apply pre-reporting transformation if needed.
"""
i_key = self._instance_key(instance)
if counter_id in self.metrics_metadata[i_key]:
unit = self.metrics_metadata[i_key][counter_id]['unit']
if unit == 'percent':
return float(value) / 100
# Defaults to return the value without transformation
return value
@atomic_method
def _collect_metrics_atomic(self, instance, mor):
""" Task that collects the metrics listed in the morlist for one MOR
"""
### <TEST-INSTRUMENTATION>
t = Timer()
### </TEST-INSTRUMENTATION>
i_key = self._instance_key(instance)
server_instance = self._get_server_instance(instance)
perfManager = server_instance.content.perfManager
query = vim.PerformanceManager.QuerySpec(maxSample=1,
entity=mor['mor'],
metricId=mor['metrics'],
intervalId=20,
format='normal')
results = perfManager.QueryPerf(querySpec=[query])
if results:
for result in results[0].value:
if result.id.counterId not in self.metrics_metadata[i_key]:
self.log.debug("Skipping this metric value, because there is no metadata about it")
continue
instance_name = result.id.instance or "none"
value = self._transform_value(instance, result.id.counterId, result.value[0])
# Metric types are absolute, delta, and rate
if ALL_METRICS[self.metrics_metadata[i_key][result.id.counterId]['name']]['s_type'] == 'rate':
record_metric = self.rate
else:
record_metric = self.gauge
record_metric(
"vsphere.%s" % self.metrics_metadata[i_key][result.id.counterId]['name'],
value,
hostname=mor['hostname'],
tags=['instance:%s' % instance_name]
)
### <TEST-INSTRUMENTATION>
self.histogram('datadog.agent.vsphere.metric_colection.time', t.total())
### </TEST-INSTRUMENTATION>
def collect_metrics(self, instance):
""" Calls asynchronously _collect_metrics_atomic on all MORs, as the
job queue is processed the Aggregator will receive the metrics.
"""
i_key = self._instance_key(instance)
if i_key not in self.morlist:
self.log.debug("Not collecting metrics for this instance, nothing to do yet: {0}".format(i_key))
return
mors = self.morlist[i_key].items()
self.log.debug("Collecting metrics of %d mors" % len(mors))
vm_count = 0
for mor_name, mor in mors:
if mor['mor_type'] == 'vm':
vm_count += 1
if 'metrics' not in mor:
# self.log.debug("Skipping entity %s collection because we didn't cache its metrics yet" % mor['hostname'])
continue
self.pool.apply_async(self._collect_metrics_atomic, args=(instance, mor))
self.gauge('vsphere.vm.count', vm_count, tags=["vcenter_server:%s" % instance.get('name')])
def check(self, instance):
if not self.pool_started:
self.start_pool()
### <TEST-INSTRUMENTATION>
self.gauge('datadog.agent.vsphere.queue_size', self.pool._workq.qsize(), tags=['instant:initial'])
### </TEST-INSTRUMENTATION>
# First part: make sure our object repository is neat & clean
if self._should_cache(instance, METRICS_METADATA):
self._cache_metrics_metadata(instance)
if self._should_cache(instance, MORLIST):
self._cache_morlist_raw(instance)
self._cache_morlist_process(instance)
self._vacuum_morlist(instance)
# Second part: do the job
self.collect_metrics(instance)
self._query_event(instance)
# For our own sanity
self._clean()
thread_crashed = False
try:
while True:
self.log.critical(self.exceptionq.get_nowait())
thread_crashed = True
except Empty:
pass
if thread_crashed:
self.stop_pool()
raise Exception("One thread in the pool crashed, check the logs")
### <TEST-INSTRUMENTATION>
self.gauge('datadog.agent.vsphere.queue_size', self.pool._workq.qsize(), tags=['instant:final'])
### </TEST-INSTRUMENTATION>
if __name__ == '__main__':
check, _instances = VSphereCheck.from_yaml('conf.d/vsphere.yaml')
try:
for i in xrange(200):
print "Loop %d" % i
for instance in check.instances:
check.check(instance)
if check.has_events():
print 'Events: %s' % (check.get_events())
print 'Metrics: %d' % (len(check.get_metrics()))
time.sleep(10)
except Exception as e:
print "Whoops something happened {0}".format(traceback.format_exc())
finally:
check.stop()
|
houqp/rumrunner
|
refs/heads/master
|
test.py
|
1
|
#!/usr/bin/env python
# -*- coding:utf-8 -*-
import unittest
import random
import os
import zmq
from rumrunner import Rumrunner
class TestRumrunner(unittest.TestCase):
def test_send_counter_metric(self):
ctx = zmq.Context()
recv_socket = ctx.socket(zmq.PULL)
tmp_metric_socket = '/var/tmp/test_metric_{0}'.format(random.random())
recv_socket.bind('ipc://{0}'.format(tmp_metric_socket))
Rumrunner(tmp_metric_socket, 'test_app').counter('test_counter')
recv_socket.recv() # suck out empty string for write test
self.assertEqual(recv_socket.recv(),
'["test_app", "test_counter", "COUNTER", 1]')
os.remove(tmp_metric_socket)
def test_error_out_on_not_writable_socket_disable(self):
ctx = zmq.Context()
recv_socket = ctx.socket(zmq.PULL)
tmp_metric_socket = '/var/tmp/test_metric_{0}'.format(random.random())
recv_socket.bind('ipc://{0}'.format(tmp_metric_socket))
Rumrunner(tmp_metric_socket, 'test_app', strict_check_socket=False)
os.chmod(tmp_metric_socket, 0444)
# Should not raise an exception due to permissions
Rumrunner(tmp_metric_socket, 'test_app', strict_check_socket=False)
os.remove(tmp_metric_socket)
def test_error_out_on_not_writable_socket(self):
ctx = zmq.Context()
recv_socket = ctx.socket(zmq.PULL)
tmp_metric_socket = '/var/tmp/test_metric_{0}'.format(random.random())
recv_socket.bind('ipc://{0}'.format(tmp_metric_socket))
Rumrunner(tmp_metric_socket, 'test_app')
os.chmod(tmp_metric_socket, 0444)
self.assertRaises(Exception, Rumrunner, tmp_metric_socket, 'test_app')
os.remove(tmp_metric_socket)
def test_mock_rumrunner(self):
from rumrunner import unmock_rumrunner, mock_rumrunner, MockRumrunner
self.assertTrue(isinstance(Rumrunner('/var/tmp/test', 'test_app',
strict_check_socket=False),
Rumrunner))
mock_rumrunner()
self.assertTrue(isinstance(Rumrunner('/var/tmp/test', 'test_app',
strict_check_socket=False),
MockRumrunner))
unmock_rumrunner()
self.assertTrue(isinstance(Rumrunner('/var/tmp/test', 'test_app',
strict_check_socket=False),
Rumrunner))
if __name__ == '__main__':
unittest.main()
|
ThiagoGarciaAlves/intellij-community
|
refs/heads/master
|
python/lib/Lib/encodings/mac_croatian.py
|
593
|
""" Python Character Mapping Codec mac_croatian generated from 'MAPPINGS/VENDORS/APPLE/CROATIAN.TXT' with gencodec.py.
"""#"
import codecs
### Codec APIs
class Codec(codecs.Codec):
def encode(self,input,errors='strict'):
return codecs.charmap_encode(input,errors,encoding_table)
def decode(self,input,errors='strict'):
return codecs.charmap_decode(input,errors,decoding_table)
class IncrementalEncoder(codecs.IncrementalEncoder):
def encode(self, input, final=False):
return codecs.charmap_encode(input,self.errors,encoding_table)[0]
class IncrementalDecoder(codecs.IncrementalDecoder):
def decode(self, input, final=False):
return codecs.charmap_decode(input,self.errors,decoding_table)[0]
class StreamWriter(Codec,codecs.StreamWriter):
pass
class StreamReader(Codec,codecs.StreamReader):
pass
### encodings module API
def getregentry():
return codecs.CodecInfo(
name='mac-croatian',
encode=Codec().encode,
decode=Codec().decode,
incrementalencoder=IncrementalEncoder,
incrementaldecoder=IncrementalDecoder,
streamreader=StreamReader,
streamwriter=StreamWriter,
)
### Decoding Table
decoding_table = (
u'\x00' # 0x00 -> CONTROL CHARACTER
u'\x01' # 0x01 -> CONTROL CHARACTER
u'\x02' # 0x02 -> CONTROL CHARACTER
u'\x03' # 0x03 -> CONTROL CHARACTER
u'\x04' # 0x04 -> CONTROL CHARACTER
u'\x05' # 0x05 -> CONTROL CHARACTER
u'\x06' # 0x06 -> CONTROL CHARACTER
u'\x07' # 0x07 -> CONTROL CHARACTER
u'\x08' # 0x08 -> CONTROL CHARACTER
u'\t' # 0x09 -> CONTROL CHARACTER
u'\n' # 0x0A -> CONTROL CHARACTER
u'\x0b' # 0x0B -> CONTROL CHARACTER
u'\x0c' # 0x0C -> CONTROL CHARACTER
u'\r' # 0x0D -> CONTROL CHARACTER
u'\x0e' # 0x0E -> CONTROL CHARACTER
u'\x0f' # 0x0F -> CONTROL CHARACTER
u'\x10' # 0x10 -> CONTROL CHARACTER
u'\x11' # 0x11 -> CONTROL CHARACTER
u'\x12' # 0x12 -> CONTROL CHARACTER
u'\x13' # 0x13 -> CONTROL CHARACTER
u'\x14' # 0x14 -> CONTROL CHARACTER
u'\x15' # 0x15 -> CONTROL CHARACTER
u'\x16' # 0x16 -> CONTROL CHARACTER
u'\x17' # 0x17 -> CONTROL CHARACTER
u'\x18' # 0x18 -> CONTROL CHARACTER
u'\x19' # 0x19 -> CONTROL CHARACTER
u'\x1a' # 0x1A -> CONTROL CHARACTER
u'\x1b' # 0x1B -> CONTROL CHARACTER
u'\x1c' # 0x1C -> CONTROL CHARACTER
u'\x1d' # 0x1D -> CONTROL CHARACTER
u'\x1e' # 0x1E -> CONTROL CHARACTER
u'\x1f' # 0x1F -> CONTROL CHARACTER
u' ' # 0x20 -> SPACE
u'!' # 0x21 -> EXCLAMATION MARK
u'"' # 0x22 -> QUOTATION MARK
u'#' # 0x23 -> NUMBER SIGN
u'$' # 0x24 -> DOLLAR SIGN
u'%' # 0x25 -> PERCENT SIGN
u'&' # 0x26 -> AMPERSAND
u"'" # 0x27 -> APOSTROPHE
u'(' # 0x28 -> LEFT PARENTHESIS
u')' # 0x29 -> RIGHT PARENTHESIS
u'*' # 0x2A -> ASTERISK
u'+' # 0x2B -> PLUS SIGN
u',' # 0x2C -> COMMA
u'-' # 0x2D -> HYPHEN-MINUS
u'.' # 0x2E -> FULL STOP
u'/' # 0x2F -> SOLIDUS
u'0' # 0x30 -> DIGIT ZERO
u'1' # 0x31 -> DIGIT ONE
u'2' # 0x32 -> DIGIT TWO
u'3' # 0x33 -> DIGIT THREE
u'4' # 0x34 -> DIGIT FOUR
u'5' # 0x35 -> DIGIT FIVE
u'6' # 0x36 -> DIGIT SIX
u'7' # 0x37 -> DIGIT SEVEN
u'8' # 0x38 -> DIGIT EIGHT
u'9' # 0x39 -> DIGIT NINE
u':' # 0x3A -> COLON
u';' # 0x3B -> SEMICOLON
u'<' # 0x3C -> LESS-THAN SIGN
u'=' # 0x3D -> EQUALS SIGN
u'>' # 0x3E -> GREATER-THAN SIGN
u'?' # 0x3F -> QUESTION MARK
u'@' # 0x40 -> COMMERCIAL AT
u'A' # 0x41 -> LATIN CAPITAL LETTER A
u'B' # 0x42 -> LATIN CAPITAL LETTER B
u'C' # 0x43 -> LATIN CAPITAL LETTER C
u'D' # 0x44 -> LATIN CAPITAL LETTER D
u'E' # 0x45 -> LATIN CAPITAL LETTER E
u'F' # 0x46 -> LATIN CAPITAL LETTER F
u'G' # 0x47 -> LATIN CAPITAL LETTER G
u'H' # 0x48 -> LATIN CAPITAL LETTER H
u'I' # 0x49 -> LATIN CAPITAL LETTER I
u'J' # 0x4A -> LATIN CAPITAL LETTER J
u'K' # 0x4B -> LATIN CAPITAL LETTER K
u'L' # 0x4C -> LATIN CAPITAL LETTER L
u'M' # 0x4D -> LATIN CAPITAL LETTER M
u'N' # 0x4E -> LATIN CAPITAL LETTER N
u'O' # 0x4F -> LATIN CAPITAL LETTER O
u'P' # 0x50 -> LATIN CAPITAL LETTER P
u'Q' # 0x51 -> LATIN CAPITAL LETTER Q
u'R' # 0x52 -> LATIN CAPITAL LETTER R
u'S' # 0x53 -> LATIN CAPITAL LETTER S
u'T' # 0x54 -> LATIN CAPITAL LETTER T
u'U' # 0x55 -> LATIN CAPITAL LETTER U
u'V' # 0x56 -> LATIN CAPITAL LETTER V
u'W' # 0x57 -> LATIN CAPITAL LETTER W
u'X' # 0x58 -> LATIN CAPITAL LETTER X
u'Y' # 0x59 -> LATIN CAPITAL LETTER Y
u'Z' # 0x5A -> LATIN CAPITAL LETTER Z
u'[' # 0x5B -> LEFT SQUARE BRACKET
u'\\' # 0x5C -> REVERSE SOLIDUS
u']' # 0x5D -> RIGHT SQUARE BRACKET
u'^' # 0x5E -> CIRCUMFLEX ACCENT
u'_' # 0x5F -> LOW LINE
u'`' # 0x60 -> GRAVE ACCENT
u'a' # 0x61 -> LATIN SMALL LETTER A
u'b' # 0x62 -> LATIN SMALL LETTER B
u'c' # 0x63 -> LATIN SMALL LETTER C
u'd' # 0x64 -> LATIN SMALL LETTER D
u'e' # 0x65 -> LATIN SMALL LETTER E
u'f' # 0x66 -> LATIN SMALL LETTER F
u'g' # 0x67 -> LATIN SMALL LETTER G
u'h' # 0x68 -> LATIN SMALL LETTER H
u'i' # 0x69 -> LATIN SMALL LETTER I
u'j' # 0x6A -> LATIN SMALL LETTER J
u'k' # 0x6B -> LATIN SMALL LETTER K
u'l' # 0x6C -> LATIN SMALL LETTER L
u'm' # 0x6D -> LATIN SMALL LETTER M
u'n' # 0x6E -> LATIN SMALL LETTER N
u'o' # 0x6F -> LATIN SMALL LETTER O
u'p' # 0x70 -> LATIN SMALL LETTER P
u'q' # 0x71 -> LATIN SMALL LETTER Q
u'r' # 0x72 -> LATIN SMALL LETTER R
u's' # 0x73 -> LATIN SMALL LETTER S
u't' # 0x74 -> LATIN SMALL LETTER T
u'u' # 0x75 -> LATIN SMALL LETTER U
u'v' # 0x76 -> LATIN SMALL LETTER V
u'w' # 0x77 -> LATIN SMALL LETTER W
u'x' # 0x78 -> LATIN SMALL LETTER X
u'y' # 0x79 -> LATIN SMALL LETTER Y
u'z' # 0x7A -> LATIN SMALL LETTER Z
u'{' # 0x7B -> LEFT CURLY BRACKET
u'|' # 0x7C -> VERTICAL LINE
u'}' # 0x7D -> RIGHT CURLY BRACKET
u'~' # 0x7E -> TILDE
u'\x7f' # 0x7F -> CONTROL CHARACTER
u'\xc4' # 0x80 -> LATIN CAPITAL LETTER A WITH DIAERESIS
u'\xc5' # 0x81 -> LATIN CAPITAL LETTER A WITH RING ABOVE
u'\xc7' # 0x82 -> LATIN CAPITAL LETTER C WITH CEDILLA
u'\xc9' # 0x83 -> LATIN CAPITAL LETTER E WITH ACUTE
u'\xd1' # 0x84 -> LATIN CAPITAL LETTER N WITH TILDE
u'\xd6' # 0x85 -> LATIN CAPITAL LETTER O WITH DIAERESIS
u'\xdc' # 0x86 -> LATIN CAPITAL LETTER U WITH DIAERESIS
u'\xe1' # 0x87 -> LATIN SMALL LETTER A WITH ACUTE
u'\xe0' # 0x88 -> LATIN SMALL LETTER A WITH GRAVE
u'\xe2' # 0x89 -> LATIN SMALL LETTER A WITH CIRCUMFLEX
u'\xe4' # 0x8A -> LATIN SMALL LETTER A WITH DIAERESIS
u'\xe3' # 0x8B -> LATIN SMALL LETTER A WITH TILDE
u'\xe5' # 0x8C -> LATIN SMALL LETTER A WITH RING ABOVE
u'\xe7' # 0x8D -> LATIN SMALL LETTER C WITH CEDILLA
u'\xe9' # 0x8E -> LATIN SMALL LETTER E WITH ACUTE
u'\xe8' # 0x8F -> LATIN SMALL LETTER E WITH GRAVE
u'\xea' # 0x90 -> LATIN SMALL LETTER E WITH CIRCUMFLEX
u'\xeb' # 0x91 -> LATIN SMALL LETTER E WITH DIAERESIS
u'\xed' # 0x92 -> LATIN SMALL LETTER I WITH ACUTE
u'\xec' # 0x93 -> LATIN SMALL LETTER I WITH GRAVE
u'\xee' # 0x94 -> LATIN SMALL LETTER I WITH CIRCUMFLEX
u'\xef' # 0x95 -> LATIN SMALL LETTER I WITH DIAERESIS
u'\xf1' # 0x96 -> LATIN SMALL LETTER N WITH TILDE
u'\xf3' # 0x97 -> LATIN SMALL LETTER O WITH ACUTE
u'\xf2' # 0x98 -> LATIN SMALL LETTER O WITH GRAVE
u'\xf4' # 0x99 -> LATIN SMALL LETTER O WITH CIRCUMFLEX
u'\xf6' # 0x9A -> LATIN SMALL LETTER O WITH DIAERESIS
u'\xf5' # 0x9B -> LATIN SMALL LETTER O WITH TILDE
u'\xfa' # 0x9C -> LATIN SMALL LETTER U WITH ACUTE
u'\xf9' # 0x9D -> LATIN SMALL LETTER U WITH GRAVE
u'\xfb' # 0x9E -> LATIN SMALL LETTER U WITH CIRCUMFLEX
u'\xfc' # 0x9F -> LATIN SMALL LETTER U WITH DIAERESIS
u'\u2020' # 0xA0 -> DAGGER
u'\xb0' # 0xA1 -> DEGREE SIGN
u'\xa2' # 0xA2 -> CENT SIGN
u'\xa3' # 0xA3 -> POUND SIGN
u'\xa7' # 0xA4 -> SECTION SIGN
u'\u2022' # 0xA5 -> BULLET
u'\xb6' # 0xA6 -> PILCROW SIGN
u'\xdf' # 0xA7 -> LATIN SMALL LETTER SHARP S
u'\xae' # 0xA8 -> REGISTERED SIGN
u'\u0160' # 0xA9 -> LATIN CAPITAL LETTER S WITH CARON
u'\u2122' # 0xAA -> TRADE MARK SIGN
u'\xb4' # 0xAB -> ACUTE ACCENT
u'\xa8' # 0xAC -> DIAERESIS
u'\u2260' # 0xAD -> NOT EQUAL TO
u'\u017d' # 0xAE -> LATIN CAPITAL LETTER Z WITH CARON
u'\xd8' # 0xAF -> LATIN CAPITAL LETTER O WITH STROKE
u'\u221e' # 0xB0 -> INFINITY
u'\xb1' # 0xB1 -> PLUS-MINUS SIGN
u'\u2264' # 0xB2 -> LESS-THAN OR EQUAL TO
u'\u2265' # 0xB3 -> GREATER-THAN OR EQUAL TO
u'\u2206' # 0xB4 -> INCREMENT
u'\xb5' # 0xB5 -> MICRO SIGN
u'\u2202' # 0xB6 -> PARTIAL DIFFERENTIAL
u'\u2211' # 0xB7 -> N-ARY SUMMATION
u'\u220f' # 0xB8 -> N-ARY PRODUCT
u'\u0161' # 0xB9 -> LATIN SMALL LETTER S WITH CARON
u'\u222b' # 0xBA -> INTEGRAL
u'\xaa' # 0xBB -> FEMININE ORDINAL INDICATOR
u'\xba' # 0xBC -> MASCULINE ORDINAL INDICATOR
u'\u03a9' # 0xBD -> GREEK CAPITAL LETTER OMEGA
u'\u017e' # 0xBE -> LATIN SMALL LETTER Z WITH CARON
u'\xf8' # 0xBF -> LATIN SMALL LETTER O WITH STROKE
u'\xbf' # 0xC0 -> INVERTED QUESTION MARK
u'\xa1' # 0xC1 -> INVERTED EXCLAMATION MARK
u'\xac' # 0xC2 -> NOT SIGN
u'\u221a' # 0xC3 -> SQUARE ROOT
u'\u0192' # 0xC4 -> LATIN SMALL LETTER F WITH HOOK
u'\u2248' # 0xC5 -> ALMOST EQUAL TO
u'\u0106' # 0xC6 -> LATIN CAPITAL LETTER C WITH ACUTE
u'\xab' # 0xC7 -> LEFT-POINTING DOUBLE ANGLE QUOTATION MARK
u'\u010c' # 0xC8 -> LATIN CAPITAL LETTER C WITH CARON
u'\u2026' # 0xC9 -> HORIZONTAL ELLIPSIS
u'\xa0' # 0xCA -> NO-BREAK SPACE
u'\xc0' # 0xCB -> LATIN CAPITAL LETTER A WITH GRAVE
u'\xc3' # 0xCC -> LATIN CAPITAL LETTER A WITH TILDE
u'\xd5' # 0xCD -> LATIN CAPITAL LETTER O WITH TILDE
u'\u0152' # 0xCE -> LATIN CAPITAL LIGATURE OE
u'\u0153' # 0xCF -> LATIN SMALL LIGATURE OE
u'\u0110' # 0xD0 -> LATIN CAPITAL LETTER D WITH STROKE
u'\u2014' # 0xD1 -> EM DASH
u'\u201c' # 0xD2 -> LEFT DOUBLE QUOTATION MARK
u'\u201d' # 0xD3 -> RIGHT DOUBLE QUOTATION MARK
u'\u2018' # 0xD4 -> LEFT SINGLE QUOTATION MARK
u'\u2019' # 0xD5 -> RIGHT SINGLE QUOTATION MARK
u'\xf7' # 0xD6 -> DIVISION SIGN
u'\u25ca' # 0xD7 -> LOZENGE
u'\uf8ff' # 0xD8 -> Apple logo
u'\xa9' # 0xD9 -> COPYRIGHT SIGN
u'\u2044' # 0xDA -> FRACTION SLASH
u'\u20ac' # 0xDB -> EURO SIGN
u'\u2039' # 0xDC -> SINGLE LEFT-POINTING ANGLE QUOTATION MARK
u'\u203a' # 0xDD -> SINGLE RIGHT-POINTING ANGLE QUOTATION MARK
u'\xc6' # 0xDE -> LATIN CAPITAL LETTER AE
u'\xbb' # 0xDF -> RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK
u'\u2013' # 0xE0 -> EN DASH
u'\xb7' # 0xE1 -> MIDDLE DOT
u'\u201a' # 0xE2 -> SINGLE LOW-9 QUOTATION MARK
u'\u201e' # 0xE3 -> DOUBLE LOW-9 QUOTATION MARK
u'\u2030' # 0xE4 -> PER MILLE SIGN
u'\xc2' # 0xE5 -> LATIN CAPITAL LETTER A WITH CIRCUMFLEX
u'\u0107' # 0xE6 -> LATIN SMALL LETTER C WITH ACUTE
u'\xc1' # 0xE7 -> LATIN CAPITAL LETTER A WITH ACUTE
u'\u010d' # 0xE8 -> LATIN SMALL LETTER C WITH CARON
u'\xc8' # 0xE9 -> LATIN CAPITAL LETTER E WITH GRAVE
u'\xcd' # 0xEA -> LATIN CAPITAL LETTER I WITH ACUTE
u'\xce' # 0xEB -> LATIN CAPITAL LETTER I WITH CIRCUMFLEX
u'\xcf' # 0xEC -> LATIN CAPITAL LETTER I WITH DIAERESIS
u'\xcc' # 0xED -> LATIN CAPITAL LETTER I WITH GRAVE
u'\xd3' # 0xEE -> LATIN CAPITAL LETTER O WITH ACUTE
u'\xd4' # 0xEF -> LATIN CAPITAL LETTER O WITH CIRCUMFLEX
u'\u0111' # 0xF0 -> LATIN SMALL LETTER D WITH STROKE
u'\xd2' # 0xF1 -> LATIN CAPITAL LETTER O WITH GRAVE
u'\xda' # 0xF2 -> LATIN CAPITAL LETTER U WITH ACUTE
u'\xdb' # 0xF3 -> LATIN CAPITAL LETTER U WITH CIRCUMFLEX
u'\xd9' # 0xF4 -> LATIN CAPITAL LETTER U WITH GRAVE
u'\u0131' # 0xF5 -> LATIN SMALL LETTER DOTLESS I
u'\u02c6' # 0xF6 -> MODIFIER LETTER CIRCUMFLEX ACCENT
u'\u02dc' # 0xF7 -> SMALL TILDE
u'\xaf' # 0xF8 -> MACRON
u'\u03c0' # 0xF9 -> GREEK SMALL LETTER PI
u'\xcb' # 0xFA -> LATIN CAPITAL LETTER E WITH DIAERESIS
u'\u02da' # 0xFB -> RING ABOVE
u'\xb8' # 0xFC -> CEDILLA
u'\xca' # 0xFD -> LATIN CAPITAL LETTER E WITH CIRCUMFLEX
u'\xe6' # 0xFE -> LATIN SMALL LETTER AE
u'\u02c7' # 0xFF -> CARON
)
### Encoding table
encoding_table=codecs.charmap_build(decoding_table)
|
tgerla/ansible
|
refs/heads/devel
|
lib/ansible/vars/hostvars.py
|
43
|
# (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import collections
import sys
from jinja2 import Undefined as j2undefined
from ansible import constants as C
from ansible.inventory.host import Host
from ansible.template import Templar
__all__ = ['HostVars']
# Note -- this is a Mapping, not a MutableMapping
class HostVars(collections.Mapping):
''' A special view of vars_cache that adds values from the inventory when needed. '''
def __init__(self, vars_manager, play, inventory, loader):
self._lookup = {}
self._loader = loader
# temporarily remove the inventory filter restriction
# so we can compile the variables for all of the hosts
# in inventory
restriction = inventory._restriction
inventory.remove_restriction()
hosts = inventory.get_hosts(ignore_limits_and_restrictions=True)
inventory.restrict_to_hosts(restriction)
# check to see if localhost is in the hosts list, as we
# may have it referenced via hostvars but if created implicitly
# it doesn't sow up in the hosts list
has_localhost = False
for host in hosts:
if host.name in C.LOCALHOST:
has_localhost = True
break
# we don't use the method in inventory to create the implicit host,
# because it also adds it to the 'ungrouped' group, and we want to
# avoid any side-effects
if not has_localhost:
new_host = Host(name='localhost')
new_host.set_variable("ansible_python_interpreter", sys.executable)
new_host.set_variable("ansible_connection", "local")
new_host.ipv4_address = '127.0.0.1'
hosts.append(new_host)
for host in hosts:
self._lookup[host.name] = vars_manager.get_vars(loader=loader, play=play, host=host, include_hostvars=False)
def __getitem__(self, host_name):
if host_name not in self._lookup:
return j2undefined
data = self._lookup.get(host_name)
templar = Templar(variables=data, loader=self._loader)
return templar.template(data, fail_on_undefined=False)
def __contains__(self, host_name):
item = self.get(host_name)
if item and item is not j2undefined:
return True
return False
def __iter__(self):
raise NotImplementedError('HostVars does not support iteration as hosts are discovered on an as needed basis.')
def __len__(self):
raise NotImplementedError('HostVars does not support len. hosts entries are discovered dynamically as needed')
def __getstate__(self):
data = self._lookup.copy()
return dict(loader=self._loader, data=data)
def __setstate__(self, data):
self._lookup = data.get('data')
self._loader = data.get('loader')
|
ihousman/earthengine-api
|
refs/heads/master
|
python/examples/FeatureCollection/join.py
|
5
|
#!/usr/bin/env python
"""FeatureCollection Join example.
Show parks in San Francisco within 2 kilometers of a BART station.
"""
import ee
import ee.mapclient
ee.Initialize()
ee.mapclient.centerMap(-122.45, 37.75, 13)
bart = ee.FeatureCollection('ft:1xCCZkVn8DIkB7i7RVkvsYWxAxsdsQZ6SbD9PCXw')
parks = ee.FeatureCollection('ft:10KC6VfBWMUvNcuxU7mbSEg__F_4UVe9uDkCldBw')
buffered_bart = bart.map(lambda f: f.buffer(2000))
join_filter = ee.Filter.withinDistance(2000, '.geo', None, '.geo')
close_parks = ee.Join.simple().apply(parks, bart, join_filter)
ee.mapclient.addToMap(buffered_bart, {'color': 'b0b0b0'})
ee.mapclient.addToMap(close_parks, {'color': '008000'})
|
Code4SA/ecd-crowdsourcing
|
refs/heads/master
|
address.py
|
1
|
import csv
import re
field_names = 'id,source_url,status,category,fax,name,postal_address,phone,reg_no,physical_address,legal_form,reg_date,contact_name,email,reg_status_cell,reg_no_cell,category1,category3,category2'.split(',')
def gen_re():
places = [el.strip() for el in open('zululand_places.txt')]
piped = "|".join(places)
return re.compile(piped)
re_place = gen_re()
reader = csv.DictReader(open('npo.csv'))
with open('ecd_npo.csv', 'w') as fp:
writer = csv.DictWriter(fp, field_names)
for datum in reader:
address = datum['physical_address']
if re_place.search(address):
writer.writerow(datum)
|
ingokegel/intellij-community
|
refs/heads/master
|
python/testData/findUsages/Imports.py
|
83
|
import r<caret>e
x = re.compile('')
|
stack-of-tasks/rbdlpy
|
refs/heads/master
|
tutorial/lib/python2.7/site-packages/OpenGL/GL/ARB/robustness_isolation.py
|
9
|
'''OpenGL extension ARB.robustness_isolation
This module customises the behaviour of the
OpenGL.raw.GL.ARB.robustness_isolation to provide a more
Python-friendly API
Overview (from the spec)
GL_ARB_robustness and supporting window system extensions allow
creating an OpenGL context supporting graphics reset notification
behavior. GL_ARB_robustness_isolation provides stronger
guarantees about the possible side-effects of a graphics reset.
It is expected that there may be a performance cost associated
with isolating an application or share group from other contexts
on the GPU. For this reason, GL_ARB_robustness_isolation is
phrased as an opt-in mechanism, with a new context creation bit
defined in the window system bindings. It is expected that
implementations might only advertise the strings in this extension
if both the implementation supports the desired isolation
properties, and the context was created with the appropriate reset
isolation bit.
The official definition of this extension is available here:
http://www.opengl.org/registry/specs/ARB/robustness_isolation.txt
'''
from OpenGL import platform, constant, arrays
from OpenGL import extensions, wrapper
import ctypes
from OpenGL.raw.GL import _types, _glgets
from OpenGL.raw.GL.ARB.robustness_isolation import *
from OpenGL.raw.GL.ARB.robustness_isolation import _EXTENSION_NAME
def glInitRobustnessIsolationARB():
'''Return boolean indicating whether this extension is available'''
from OpenGL import extensions
return extensions.hasGLExtension( _EXTENSION_NAME )
### END AUTOGENERATED SECTION
|
jetskijoe/headphones
|
refs/heads/master
|
lib/requests/packages/chardet/jpcntx.py
|
1776
|
######################## BEGIN LICENSE BLOCK ########################
# The Original Code is Mozilla Communicator client code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 1998
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
from .compat import wrap_ord
NUM_OF_CATEGORY = 6
DONT_KNOW = -1
ENOUGH_REL_THRESHOLD = 100
MAX_REL_THRESHOLD = 1000
MINIMUM_DATA_THRESHOLD = 4
# This is hiragana 2-char sequence table, the number in each cell represents its frequency category
jp2CharContext = (
(0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1),
(2,4,0,4,0,3,0,4,0,3,4,4,4,2,4,3,3,4,3,2,3,3,4,2,3,3,3,2,4,1,4,3,3,1,5,4,3,4,3,4,3,5,3,0,3,5,4,2,0,3,1,0,3,3,0,3,3,0,1,1,0,4,3,0,3,3,0,4,0,2,0,3,5,5,5,5,4,0,4,1,0,3,4),
(0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2),
(0,4,0,5,0,5,0,4,0,4,5,4,4,3,5,3,5,1,5,3,4,3,4,4,3,4,3,3,4,3,5,4,4,3,5,5,3,5,5,5,3,5,5,3,4,5,5,3,1,3,2,0,3,4,0,4,2,0,4,2,1,5,3,2,3,5,0,4,0,2,0,5,4,4,5,4,5,0,4,0,0,4,4),
(0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0),
(0,3,0,4,0,3,0,3,0,4,5,4,3,3,3,3,4,3,5,4,4,3,5,4,4,3,4,3,4,4,4,4,5,3,4,4,3,4,5,5,4,5,5,1,4,5,4,3,0,3,3,1,3,3,0,4,4,0,3,3,1,5,3,3,3,5,0,4,0,3,0,4,4,3,4,3,3,0,4,1,1,3,4),
(0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0),
(0,4,0,3,0,3,0,4,0,3,4,4,3,2,2,1,2,1,3,1,3,3,3,3,3,4,3,1,3,3,5,3,3,0,4,3,0,5,4,3,3,5,4,4,3,4,4,5,0,1,2,0,1,2,0,2,2,0,1,0,0,5,2,2,1,4,0,3,0,1,0,4,4,3,5,4,3,0,2,1,0,4,3),
(0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0),
(0,3,0,5,0,4,0,2,1,4,4,2,4,1,4,2,4,2,4,3,3,3,4,3,3,3,3,1,4,2,3,3,3,1,4,4,1,1,1,4,3,3,2,0,2,4,3,2,0,3,3,0,3,1,1,0,0,0,3,3,0,4,2,2,3,4,0,4,0,3,0,4,4,5,3,4,4,0,3,0,0,1,4),
(1,4,0,4,0,4,0,4,0,3,5,4,4,3,4,3,5,4,3,3,4,3,5,4,4,4,4,3,4,2,4,3,3,1,5,4,3,2,4,5,4,5,5,4,4,5,4,4,0,3,2,2,3,3,0,4,3,1,3,2,1,4,3,3,4,5,0,3,0,2,0,4,5,5,4,5,4,0,4,0,0,5,4),
(0,5,0,5,0,4,0,3,0,4,4,3,4,3,3,3,4,0,4,4,4,3,4,3,4,3,3,1,4,2,4,3,4,0,5,4,1,4,5,4,4,5,3,2,4,3,4,3,2,4,1,3,3,3,2,3,2,0,4,3,3,4,3,3,3,4,0,4,0,3,0,4,5,4,4,4,3,0,4,1,0,1,3),
(0,3,1,4,0,3,0,2,0,3,4,4,3,1,4,2,3,3,4,3,4,3,4,3,4,4,3,2,3,1,5,4,4,1,4,4,3,5,4,4,3,5,5,4,3,4,4,3,1,2,3,1,2,2,0,3,2,0,3,1,0,5,3,3,3,4,3,3,3,3,4,4,4,4,5,4,2,0,3,3,2,4,3),
(0,2,0,3,0,1,0,1,0,0,3,2,0,0,2,0,1,0,2,1,3,3,3,1,2,3,1,0,1,0,4,2,1,1,3,3,0,4,3,3,1,4,3,3,0,3,3,2,0,0,0,0,1,0,0,2,0,0,0,0,0,4,1,0,2,3,2,2,2,1,3,3,3,4,4,3,2,0,3,1,0,3,3),
(0,4,0,4,0,3,0,3,0,4,4,4,3,3,3,3,3,3,4,3,4,2,4,3,4,3,3,2,4,3,4,5,4,1,4,5,3,5,4,5,3,5,4,0,3,5,5,3,1,3,3,2,2,3,0,3,4,1,3,3,2,4,3,3,3,4,0,4,0,3,0,4,5,4,4,5,3,0,4,1,0,3,4),
(0,2,0,3,0,3,0,0,0,2,2,2,1,0,1,0,0,0,3,0,3,0,3,0,1,3,1,0,3,1,3,3,3,1,3,3,3,0,1,3,1,3,4,0,0,3,1,1,0,3,2,0,0,0,0,1,3,0,1,0,0,3,3,2,0,3,0,0,0,0,0,3,4,3,4,3,3,0,3,0,0,2,3),
(2,3,0,3,0,2,0,1,0,3,3,4,3,1,3,1,1,1,3,1,4,3,4,3,3,3,0,0,3,1,5,4,3,1,4,3,2,5,5,4,4,4,4,3,3,4,4,4,0,2,1,1,3,2,0,1,2,0,0,1,0,4,1,3,3,3,0,3,0,1,0,4,4,4,5,5,3,0,2,0,0,4,4),
(0,2,0,1,0,3,1,3,0,2,3,3,3,0,3,1,0,0,3,0,3,2,3,1,3,2,1,1,0,0,4,2,1,0,2,3,1,4,3,2,0,4,4,3,1,3,1,3,0,1,0,0,1,0,0,0,1,0,0,0,0,4,1,1,1,2,0,3,0,0,0,3,4,2,4,3,2,0,1,0,0,3,3),
(0,1,0,4,0,5,0,4,0,2,4,4,2,3,3,2,3,3,5,3,3,3,4,3,4,2,3,0,4,3,3,3,4,1,4,3,2,1,5,5,3,4,5,1,3,5,4,2,0,3,3,0,1,3,0,4,2,0,1,3,1,4,3,3,3,3,0,3,0,1,0,3,4,4,4,5,5,0,3,0,1,4,5),
(0,2,0,3,0,3,0,0,0,2,3,1,3,0,4,0,1,1,3,0,3,4,3,2,3,1,0,3,3,2,3,1,3,0,2,3,0,2,1,4,1,2,2,0,0,3,3,0,0,2,0,0,0,1,0,0,0,0,2,2,0,3,2,1,3,3,0,2,0,2,0,0,3,3,1,2,4,0,3,0,2,2,3),
(2,4,0,5,0,4,0,4,0,2,4,4,4,3,4,3,3,3,1,2,4,3,4,3,4,4,5,0,3,3,3,3,2,0,4,3,1,4,3,4,1,4,4,3,3,4,4,3,1,2,3,0,4,2,0,4,1,0,3,3,0,4,3,3,3,4,0,4,0,2,0,3,5,3,4,5,2,0,3,0,0,4,5),
(0,3,0,4,0,1,0,1,0,1,3,2,2,1,3,0,3,0,2,0,2,0,3,0,2,0,0,0,1,0,1,1,0,0,3,1,0,0,0,4,0,3,1,0,2,1,3,0,0,0,0,0,0,3,0,0,0,0,0,0,0,4,2,2,3,1,0,3,0,0,0,1,4,4,4,3,0,0,4,0,0,1,4),
(1,4,1,5,0,3,0,3,0,4,5,4,4,3,5,3,3,4,4,3,4,1,3,3,3,3,2,1,4,1,5,4,3,1,4,4,3,5,4,4,3,5,4,3,3,4,4,4,0,3,3,1,2,3,0,3,1,0,3,3,0,5,4,4,4,4,4,4,3,3,5,4,4,3,3,5,4,0,3,2,0,4,4),
(0,2,0,3,0,1,0,0,0,1,3,3,3,2,4,1,3,0,3,1,3,0,2,2,1,1,0,0,2,0,4,3,1,0,4,3,0,4,4,4,1,4,3,1,1,3,3,1,0,2,0,0,1,3,0,0,0,0,2,0,0,4,3,2,4,3,5,4,3,3,3,4,3,3,4,3,3,0,2,1,0,3,3),
(0,2,0,4,0,3,0,2,0,2,5,5,3,4,4,4,4,1,4,3,3,0,4,3,4,3,1,3,3,2,4,3,0,3,4,3,0,3,4,4,2,4,4,0,4,5,3,3,2,2,1,1,1,2,0,1,5,0,3,3,2,4,3,3,3,4,0,3,0,2,0,4,4,3,5,5,0,0,3,0,2,3,3),
(0,3,0,4,0,3,0,1,0,3,4,3,3,1,3,3,3,0,3,1,3,0,4,3,3,1,1,0,3,0,3,3,0,0,4,4,0,1,5,4,3,3,5,0,3,3,4,3,0,2,0,1,1,1,0,1,3,0,1,2,1,3,3,2,3,3,0,3,0,1,0,1,3,3,4,4,1,0,1,2,2,1,3),
(0,1,0,4,0,4,0,3,0,1,3,3,3,2,3,1,1,0,3,0,3,3,4,3,2,4,2,0,1,0,4,3,2,0,4,3,0,5,3,3,2,4,4,4,3,3,3,4,0,1,3,0,0,1,0,0,1,0,0,0,0,4,2,3,3,3,0,3,0,0,0,4,4,4,5,3,2,0,3,3,0,3,5),
(0,2,0,3,0,0,0,3,0,1,3,0,2,0,0,0,1,0,3,1,1,3,3,0,0,3,0,0,3,0,2,3,1,0,3,1,0,3,3,2,0,4,2,2,0,2,0,0,0,4,0,0,0,0,0,0,0,0,0,0,0,2,1,2,0,1,0,1,0,0,0,1,3,1,2,0,0,0,1,0,0,1,4),
(0,3,0,3,0,5,0,1,0,2,4,3,1,3,3,2,1,1,5,2,1,0,5,1,2,0,0,0,3,3,2,2,3,2,4,3,0,0,3,3,1,3,3,0,2,5,3,4,0,3,3,0,1,2,0,2,2,0,3,2,0,2,2,3,3,3,0,2,0,1,0,3,4,4,2,5,4,0,3,0,0,3,5),
(0,3,0,3,0,3,0,1,0,3,3,3,3,0,3,0,2,0,2,1,1,0,2,0,1,0,0,0,2,1,0,0,1,0,3,2,0,0,3,3,1,2,3,1,0,3,3,0,0,1,0,0,0,0,0,2,0,0,0,0,0,2,3,1,2,3,0,3,0,1,0,3,2,1,0,4,3,0,1,1,0,3,3),
(0,4,0,5,0,3,0,3,0,4,5,5,4,3,5,3,4,3,5,3,3,2,5,3,4,4,4,3,4,3,4,5,5,3,4,4,3,4,4,5,4,4,4,3,4,5,5,4,2,3,4,2,3,4,0,3,3,1,4,3,2,4,3,3,5,5,0,3,0,3,0,5,5,5,5,4,4,0,4,0,1,4,4),
(0,4,0,4,0,3,0,3,0,3,5,4,4,2,3,2,5,1,3,2,5,1,4,2,3,2,3,3,4,3,3,3,3,2,5,4,1,3,3,5,3,4,4,0,4,4,3,1,1,3,1,0,2,3,0,2,3,0,3,0,0,4,3,1,3,4,0,3,0,2,0,4,4,4,3,4,5,0,4,0,0,3,4),
(0,3,0,3,0,3,1,2,0,3,4,4,3,3,3,0,2,2,4,3,3,1,3,3,3,1,1,0,3,1,4,3,2,3,4,4,2,4,4,4,3,4,4,3,2,4,4,3,1,3,3,1,3,3,0,4,1,0,2,2,1,4,3,2,3,3,5,4,3,3,5,4,4,3,3,0,4,0,3,2,2,4,4),
(0,2,0,1,0,0,0,0,0,1,2,1,3,0,0,0,0,0,2,0,1,2,1,0,0,1,0,0,0,0,3,0,0,1,0,1,1,3,1,0,0,0,1,1,0,1,1,0,0,0,0,0,2,0,0,0,0,0,0,0,0,1,1,2,2,0,3,4,0,0,0,1,1,0,0,1,0,0,0,0,0,1,1),
(0,1,0,0,0,1,0,0,0,0,4,0,4,1,4,0,3,0,4,0,3,0,4,0,3,0,3,0,4,1,5,1,4,0,0,3,0,5,0,5,2,0,1,0,0,0,2,1,4,0,1,3,0,0,3,0,0,3,1,1,4,1,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0),
(1,4,0,5,0,3,0,2,0,3,5,4,4,3,4,3,5,3,4,3,3,0,4,3,3,3,3,3,3,2,4,4,3,1,3,4,4,5,4,4,3,4,4,1,3,5,4,3,3,3,1,2,2,3,3,1,3,1,3,3,3,5,3,3,4,5,0,3,0,3,0,3,4,3,4,4,3,0,3,0,2,4,3),
(0,1,0,4,0,0,0,0,0,1,4,0,4,1,4,2,4,0,3,0,1,0,1,0,0,0,0,0,2,0,3,1,1,1,0,3,0,0,0,1,2,1,0,0,1,1,1,1,0,1,0,0,0,1,0,0,3,0,0,0,0,3,2,0,2,2,0,1,0,0,0,2,3,2,3,3,0,0,0,0,2,1,0),
(0,5,1,5,0,3,0,3,0,5,4,4,5,1,5,3,3,0,4,3,4,3,5,3,4,3,3,2,4,3,4,3,3,0,3,3,1,4,4,3,4,4,4,3,4,5,5,3,2,3,1,1,3,3,1,3,1,1,3,3,2,4,5,3,3,5,0,4,0,3,0,4,4,3,5,3,3,0,3,4,0,4,3),
(0,5,0,5,0,3,0,2,0,4,4,3,5,2,4,3,3,3,4,4,4,3,5,3,5,3,3,1,4,0,4,3,3,0,3,3,0,4,4,4,4,5,4,3,3,5,5,3,2,3,1,2,3,2,0,1,0,0,3,2,2,4,4,3,1,5,0,4,0,3,0,4,3,1,3,2,1,0,3,3,0,3,3),
(0,4,0,5,0,5,0,4,0,4,5,5,5,3,4,3,3,2,5,4,4,3,5,3,5,3,4,0,4,3,4,4,3,2,4,4,3,4,5,4,4,5,5,0,3,5,5,4,1,3,3,2,3,3,1,3,1,0,4,3,1,4,4,3,4,5,0,4,0,2,0,4,3,4,4,3,3,0,4,0,0,5,5),
(0,4,0,4,0,5,0,1,1,3,3,4,4,3,4,1,3,0,5,1,3,0,3,1,3,1,1,0,3,0,3,3,4,0,4,3,0,4,4,4,3,4,4,0,3,5,4,1,0,3,0,0,2,3,0,3,1,0,3,1,0,3,2,1,3,5,0,3,0,1,0,3,2,3,3,4,4,0,2,2,0,4,4),
(2,4,0,5,0,4,0,3,0,4,5,5,4,3,5,3,5,3,5,3,5,2,5,3,4,3,3,4,3,4,5,3,2,1,5,4,3,2,3,4,5,3,4,1,2,5,4,3,0,3,3,0,3,2,0,2,3,0,4,1,0,3,4,3,3,5,0,3,0,1,0,4,5,5,5,4,3,0,4,2,0,3,5),
(0,5,0,4,0,4,0,2,0,5,4,3,4,3,4,3,3,3,4,3,4,2,5,3,5,3,4,1,4,3,4,4,4,0,3,5,0,4,4,4,4,5,3,1,3,4,5,3,3,3,3,3,3,3,0,2,2,0,3,3,2,4,3,3,3,5,3,4,1,3,3,5,3,2,0,0,0,0,4,3,1,3,3),
(0,1,0,3,0,3,0,1,0,1,3,3,3,2,3,3,3,0,3,0,0,0,3,1,3,0,0,0,2,2,2,3,0,0,3,2,0,1,2,4,1,3,3,0,0,3,3,3,0,1,0,0,2,1,0,0,3,0,3,1,0,3,0,0,1,3,0,2,0,1,0,3,3,1,3,3,0,0,1,1,0,3,3),
(0,2,0,3,0,2,1,4,0,2,2,3,1,1,3,1,1,0,2,0,3,1,2,3,1,3,0,0,1,0,4,3,2,3,3,3,1,4,2,3,3,3,3,1,0,3,1,4,0,1,1,0,1,2,0,1,1,0,1,1,0,3,1,3,2,2,0,1,0,0,0,2,3,3,3,1,0,0,0,0,0,2,3),
(0,5,0,4,0,5,0,2,0,4,5,5,3,3,4,3,3,1,5,4,4,2,4,4,4,3,4,2,4,3,5,5,4,3,3,4,3,3,5,5,4,5,5,1,3,4,5,3,1,4,3,1,3,3,0,3,3,1,4,3,1,4,5,3,3,5,0,4,0,3,0,5,3,3,1,4,3,0,4,0,1,5,3),
(0,5,0,5,0,4,0,2,0,4,4,3,4,3,3,3,3,3,5,4,4,4,4,4,4,5,3,3,5,2,4,4,4,3,4,4,3,3,4,4,5,5,3,3,4,3,4,3,3,4,3,3,3,3,1,2,2,1,4,3,3,5,4,4,3,4,0,4,0,3,0,4,4,4,4,4,1,0,4,2,0,2,4),
(0,4,0,4,0,3,0,1,0,3,5,2,3,0,3,0,2,1,4,2,3,3,4,1,4,3,3,2,4,1,3,3,3,0,3,3,0,0,3,3,3,5,3,3,3,3,3,2,0,2,0,0,2,0,0,2,0,0,1,0,0,3,1,2,2,3,0,3,0,2,0,4,4,3,3,4,1,0,3,0,0,2,4),
(0,0,0,4,0,0,0,0,0,0,1,0,1,0,2,0,0,0,0,0,1,0,2,0,1,0,0,0,0,0,3,1,3,0,3,2,0,0,0,1,0,3,2,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,4,0,2,0,0,0,0,0,0,2),
(0,2,1,3,0,2,0,2,0,3,3,3,3,1,3,1,3,3,3,3,3,3,4,2,2,1,2,1,4,0,4,3,1,3,3,3,2,4,3,5,4,3,3,3,3,3,3,3,0,1,3,0,2,0,0,1,0,0,1,0,0,4,2,0,2,3,0,3,3,0,3,3,4,2,3,1,4,0,1,2,0,2,3),
(0,3,0,3,0,1,0,3,0,2,3,3,3,0,3,1,2,0,3,3,2,3,3,2,3,2,3,1,3,0,4,3,2,0,3,3,1,4,3,3,2,3,4,3,1,3,3,1,1,0,1,1,0,1,0,1,0,1,0,0,0,4,1,1,0,3,0,3,1,0,2,3,3,3,3,3,1,0,0,2,0,3,3),
(0,0,0,0,0,0,0,0,0,0,3,0,2,0,3,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,3,0,3,0,3,1,0,1,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,0,2,0,2,3,0,0,0,0,0,0,0,0,3),
(0,2,0,3,1,3,0,3,0,2,3,3,3,1,3,1,3,1,3,1,3,3,3,1,3,0,2,3,1,1,4,3,3,2,3,3,1,2,2,4,1,3,3,0,1,4,2,3,0,1,3,0,3,0,0,1,3,0,2,0,0,3,3,2,1,3,0,3,0,2,0,3,4,4,4,3,1,0,3,0,0,3,3),
(0,2,0,1,0,2,0,0,0,1,3,2,2,1,3,0,1,1,3,0,3,2,3,1,2,0,2,0,1,1,3,3,3,0,3,3,1,1,2,3,2,3,3,1,2,3,2,0,0,1,0,0,0,0,0,0,3,0,1,0,0,2,1,2,1,3,0,3,0,0,0,3,4,4,4,3,2,0,2,0,0,2,4),
(0,0,0,1,0,1,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,1,1,1,0,0,0,0,0,0,0,0,0,2,2,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,1,3,1,0,0,0,0,0,0,0,3),
(0,3,0,3,0,2,0,3,0,3,3,3,2,3,2,2,2,0,3,1,3,3,3,2,3,3,0,0,3,0,3,2,2,0,2,3,1,4,3,4,3,3,2,3,1,5,4,4,0,3,1,2,1,3,0,3,1,1,2,0,2,3,1,3,1,3,0,3,0,1,0,3,3,4,4,2,1,0,2,1,0,2,4),
(0,1,0,3,0,1,0,2,0,1,4,2,5,1,4,0,2,0,2,1,3,1,4,0,2,1,0,0,2,1,4,1,1,0,3,3,0,5,1,3,2,3,3,1,0,3,2,3,0,1,0,0,0,0,0,0,1,0,0,0,0,4,0,1,0,3,0,2,0,1,0,3,3,3,4,3,3,0,0,0,0,2,3),
(0,0,0,1,0,0,0,0,0,0,2,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,1,0,0,1,0,0,0,0,0,3),
(0,1,0,3,0,4,0,3,0,2,4,3,1,0,3,2,2,1,3,1,2,2,3,1,1,1,2,1,3,0,1,2,0,1,3,2,1,3,0,5,5,1,0,0,1,3,2,1,0,3,0,0,1,0,0,0,0,0,3,4,0,1,1,1,3,2,0,2,0,1,0,2,3,3,1,2,3,0,1,0,1,0,4),
(0,0,0,1,0,3,0,3,0,2,2,1,0,0,4,0,3,0,3,1,3,0,3,0,3,0,1,0,3,0,3,1,3,0,3,3,0,0,1,2,1,1,1,0,1,2,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,2,2,1,2,0,0,2,0,0,0,0,2,3,3,3,3,0,0,0,0,1,4),
(0,0,0,3,0,3,0,0,0,0,3,1,1,0,3,0,1,0,2,0,1,0,0,0,0,0,0,0,1,0,3,0,2,0,2,3,0,0,2,2,3,1,2,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,0,0,2,0,0,0,0,2,3),
(2,4,0,5,0,5,0,4,0,3,4,3,3,3,4,3,3,3,4,3,4,4,5,4,5,5,5,2,3,0,5,5,4,1,5,4,3,1,5,4,3,4,4,3,3,4,3,3,0,3,2,0,2,3,0,3,0,0,3,3,0,5,3,2,3,3,0,3,0,3,0,3,4,5,4,5,3,0,4,3,0,3,4),
(0,3,0,3,0,3,0,3,0,3,3,4,3,2,3,2,3,0,4,3,3,3,3,3,3,3,3,0,3,2,4,3,3,1,3,4,3,4,4,4,3,4,4,3,2,4,4,1,0,2,0,0,1,1,0,2,0,0,3,1,0,5,3,2,1,3,0,3,0,1,2,4,3,2,4,3,3,0,3,2,0,4,4),
(0,3,0,3,0,1,0,0,0,1,4,3,3,2,3,1,3,1,4,2,3,2,4,2,3,4,3,0,2,2,3,3,3,0,3,3,3,0,3,4,1,3,3,0,3,4,3,3,0,1,1,0,1,0,0,0,4,0,3,0,0,3,1,2,1,3,0,4,0,1,0,4,3,3,4,3,3,0,2,0,0,3,3),
(0,3,0,4,0,1,0,3,0,3,4,3,3,0,3,3,3,1,3,1,3,3,4,3,3,3,0,0,3,1,5,3,3,1,3,3,2,5,4,3,3,4,5,3,2,5,3,4,0,1,0,0,0,0,0,2,0,0,1,1,0,4,2,2,1,3,0,3,0,2,0,4,4,3,5,3,2,0,1,1,0,3,4),
(0,5,0,4,0,5,0,2,0,4,4,3,3,2,3,3,3,1,4,3,4,1,5,3,4,3,4,0,4,2,4,3,4,1,5,4,0,4,4,4,4,5,4,1,3,5,4,2,1,4,1,1,3,2,0,3,1,0,3,2,1,4,3,3,3,4,0,4,0,3,0,4,4,4,3,3,3,0,4,2,0,3,4),
(1,4,0,4,0,3,0,1,0,3,3,3,1,1,3,3,2,2,3,3,1,0,3,2,2,1,2,0,3,1,2,1,2,0,3,2,0,2,2,3,3,4,3,0,3,3,1,2,0,1,1,3,1,2,0,0,3,0,1,1,0,3,2,2,3,3,0,3,0,0,0,2,3,3,4,3,3,0,1,0,0,1,4),
(0,4,0,4,0,4,0,0,0,3,4,4,3,1,4,2,3,2,3,3,3,1,4,3,4,0,3,0,4,2,3,3,2,2,5,4,2,1,3,4,3,4,3,1,3,3,4,2,0,2,1,0,3,3,0,0,2,0,3,1,0,4,4,3,4,3,0,4,0,1,0,2,4,4,4,4,4,0,3,2,0,3,3),
(0,0,0,1,0,4,0,0,0,0,0,0,1,1,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,1,0,3,2,0,0,1,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,2),
(0,2,0,3,0,4,0,4,0,1,3,3,3,0,4,0,2,1,2,1,1,1,2,0,3,1,1,0,1,0,3,1,0,0,3,3,2,0,1,1,0,0,0,0,0,1,0,2,0,2,2,0,3,1,0,0,1,0,1,1,0,1,2,0,3,0,0,0,0,1,0,0,3,3,4,3,1,0,1,0,3,0,2),
(0,0,0,3,0,5,0,0,0,0,1,0,2,0,3,1,0,1,3,0,0,0,2,0,0,0,1,0,0,0,1,1,0,0,4,0,0,0,2,3,0,1,4,1,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,1,0,0,0,0,0,0,0,2,0,0,3,0,0,0,0,0,3),
(0,2,0,5,0,5,0,1,0,2,4,3,3,2,5,1,3,2,3,3,3,0,4,1,2,0,3,0,4,0,2,2,1,1,5,3,0,0,1,4,2,3,2,0,3,3,3,2,0,2,4,1,1,2,0,1,1,0,3,1,0,1,3,1,2,3,0,2,0,0,0,1,3,5,4,4,4,0,3,0,0,1,3),
(0,4,0,5,0,4,0,4,0,4,5,4,3,3,4,3,3,3,4,3,4,4,5,3,4,5,4,2,4,2,3,4,3,1,4,4,1,3,5,4,4,5,5,4,4,5,5,5,2,3,3,1,4,3,1,3,3,0,3,3,1,4,3,4,4,4,0,3,0,4,0,3,3,4,4,5,0,0,4,3,0,4,5),
(0,4,0,4,0,3,0,3,0,3,4,4,4,3,3,2,4,3,4,3,4,3,5,3,4,3,2,1,4,2,4,4,3,1,3,4,2,4,5,5,3,4,5,4,1,5,4,3,0,3,2,2,3,2,1,3,1,0,3,3,3,5,3,3,3,5,4,4,2,3,3,4,3,3,3,2,1,0,3,2,1,4,3),
(0,4,0,5,0,4,0,3,0,3,5,5,3,2,4,3,4,0,5,4,4,1,4,4,4,3,3,3,4,3,5,5,2,3,3,4,1,2,5,5,3,5,5,2,3,5,5,4,0,3,2,0,3,3,1,1,5,1,4,1,0,4,3,2,3,5,0,4,0,3,0,5,4,3,4,3,0,0,4,1,0,4,4),
(1,3,0,4,0,2,0,2,0,2,5,5,3,3,3,3,3,0,4,2,3,4,4,4,3,4,0,0,3,4,5,4,3,3,3,3,2,5,5,4,5,5,5,4,3,5,5,5,1,3,1,0,1,0,0,3,2,0,4,2,0,5,2,3,2,4,1,3,0,3,0,4,5,4,5,4,3,0,4,2,0,5,4),
(0,3,0,4,0,5,0,3,0,3,4,4,3,2,3,2,3,3,3,3,3,2,4,3,3,2,2,0,3,3,3,3,3,1,3,3,3,0,4,4,3,4,4,1,1,4,4,2,0,3,1,0,1,1,0,4,1,0,2,3,1,3,3,1,3,4,0,3,0,1,0,3,1,3,0,0,1,0,2,0,0,4,4),
(0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0),
(0,3,0,3,0,2,0,3,0,1,5,4,3,3,3,1,4,2,1,2,3,4,4,2,4,4,5,0,3,1,4,3,4,0,4,3,3,3,2,3,2,5,3,4,3,2,2,3,0,0,3,0,2,1,0,1,2,0,0,0,0,2,1,1,3,1,0,2,0,4,0,3,4,4,4,5,2,0,2,0,0,1,3),
(0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,1,1,0,0,1,1,0,0,0,4,2,1,1,0,1,0,3,2,0,0,3,1,1,1,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,0,1,0,0,0,2,0,0,0,1,4,0,4,2,1,0,0,0,0,0,1),
(0,0,0,0,0,0,0,0,0,1,0,1,0,0,0,0,1,0,0,0,0,0,0,1,0,1,0,0,0,0,3,1,0,0,0,2,0,2,1,0,0,1,2,1,0,1,1,0,0,3,0,0,0,0,0,0,0,0,0,0,0,1,3,1,0,0,0,0,0,1,0,0,2,1,0,0,0,0,0,0,0,0,2),
(0,4,0,4,0,4,0,3,0,4,4,3,4,2,4,3,2,0,4,4,4,3,5,3,5,3,3,2,4,2,4,3,4,3,1,4,0,2,3,4,4,4,3,3,3,4,4,4,3,4,1,3,4,3,2,1,2,1,3,3,3,4,4,3,3,5,0,4,0,3,0,4,3,3,3,2,1,0,3,0,0,3,3),
(0,4,0,3,0,3,0,3,0,3,5,5,3,3,3,3,4,3,4,3,3,3,4,4,4,3,3,3,3,4,3,5,3,3,1,3,2,4,5,5,5,5,4,3,4,5,5,3,2,2,3,3,3,3,2,3,3,1,2,3,2,4,3,3,3,4,0,4,0,2,0,4,3,2,2,1,2,0,3,0,0,4,1),
)
class JapaneseContextAnalysis:
def __init__(self):
self.reset()
def reset(self):
self._mTotalRel = 0 # total sequence received
# category counters, each interger counts sequence in its category
self._mRelSample = [0] * NUM_OF_CATEGORY
# if last byte in current buffer is not the last byte of a character,
# we need to know how many bytes to skip in next buffer
self._mNeedToSkipCharNum = 0
self._mLastCharOrder = -1 # The order of previous char
# If this flag is set to True, detection is done and conclusion has
# been made
self._mDone = False
def feed(self, aBuf, aLen):
if self._mDone:
return
# The buffer we got is byte oriented, and a character may span in more than one
# buffers. In case the last one or two byte in last buffer is not
# complete, we record how many byte needed to complete that character
# and skip these bytes here. We can choose to record those bytes as
# well and analyse the character once it is complete, but since a
# character will not make much difference, by simply skipping
# this character will simply our logic and improve performance.
i = self._mNeedToSkipCharNum
while i < aLen:
order, charLen = self.get_order(aBuf[i:i + 2])
i += charLen
if i > aLen:
self._mNeedToSkipCharNum = i - aLen
self._mLastCharOrder = -1
else:
if (order != -1) and (self._mLastCharOrder != -1):
self._mTotalRel += 1
if self._mTotalRel > MAX_REL_THRESHOLD:
self._mDone = True
break
self._mRelSample[jp2CharContext[self._mLastCharOrder][order]] += 1
self._mLastCharOrder = order
def got_enough_data(self):
return self._mTotalRel > ENOUGH_REL_THRESHOLD
def get_confidence(self):
# This is just one way to calculate confidence. It works well for me.
if self._mTotalRel > MINIMUM_DATA_THRESHOLD:
return (self._mTotalRel - self._mRelSample[0]) / self._mTotalRel
else:
return DONT_KNOW
def get_order(self, aBuf):
return -1, 1
class SJISContextAnalysis(JapaneseContextAnalysis):
def __init__(self):
self.charset_name = "SHIFT_JIS"
def get_charset_name(self):
return self.charset_name
def get_order(self, aBuf):
if not aBuf:
return -1, 1
# find out current char's byte length
first_char = wrap_ord(aBuf[0])
if ((0x81 <= first_char <= 0x9F) or (0xE0 <= first_char <= 0xFC)):
charLen = 2
if (first_char == 0x87) or (0xFA <= first_char <= 0xFC):
self.charset_name = "CP932"
else:
charLen = 1
# return its order if it is hiragana
if len(aBuf) > 1:
second_char = wrap_ord(aBuf[1])
if (first_char == 202) and (0x9F <= second_char <= 0xF1):
return second_char - 0x9F, charLen
return -1, charLen
class EUCJPContextAnalysis(JapaneseContextAnalysis):
def get_order(self, aBuf):
if not aBuf:
return -1, 1
# find out current char's byte length
first_char = wrap_ord(aBuf[0])
if (first_char == 0x8E) or (0xA1 <= first_char <= 0xFE):
charLen = 2
elif first_char == 0x8F:
charLen = 3
else:
charLen = 1
# return its order if it is hiragana
if len(aBuf) > 1:
second_char = wrap_ord(aBuf[1])
if (first_char == 0xA4) and (0xA1 <= second_char <= 0xF3):
return second_char - 0xA1, charLen
return -1, charLen
# flake8: noqa
|
martinb07/mysmarthome
|
refs/heads/develop
|
plugins/sml/__init__.py
|
2
|
#!/usr/bin/env python3
# vim: set encoding=utf-8 tabstop=4 softtabstop=4 shiftwidth=4 expandtab
#########################################################################
# Copyright 2012-2014 Oliver Hinckel github@ollisnet.de
#########################################################################
#
# This file is part of SmartHome.py. http://mknx.github.io/smarthome/
#
# SmartHome.py is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# SmartHome.py is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with SmartHome.py. If not, see <http://www.gnu.org/licenses/>.
#########################################################################
import logging
import time
import re
import serial
import threading
import struct
import socket
import errno
logger = logging.getLogger('')
class Sml():
_units = { # Blue book @ http://www.dlms.com/documentation/overviewexcerptsofthedlmsuacolouredbooks/index.html
1 : 'a', 2 : 'mo', 3 : 'wk', 4 : 'd', 5 : 'h', 6 : 'min.', 7 : 's', 8 : '°', 9 : '°C', 10 : 'currency',
11 : 'm', 12 : 'm/s', 13 : 'm³', 14 : 'm³', 15 : 'm³/h', 16 : 'm³/h', 17 : 'm³/d', 18 : 'm³/d', 19 : 'l', 20 : 'kg',
21 : 'N', 22 : 'Nm', 23 : 'Pa', 24 : 'bar', 25 : 'J', 26 : 'J/h', 27 : 'W', 28 : 'VA', 29 : 'var', 30 : 'Wh',
31 : 'WAh', 32 : 'varh', 33 : 'A', 34 : 'C', 35 : 'V', 36 : 'V/m', 37 : 'F', 38 : 'Ω', 39 : 'Ωm²/h', 40 : 'Wb',
41 : 'T', 42 : 'A/m', 43 : 'H', 44 : 'Hz', 45 : 'Rac', 46 : 'Rre', 47 : 'Rap', 48 : 'V²h', 49 : 'A²h', 50 : 'kg/s',
51 : 'Smho'
}
_devices = {
'smart-meter-gateway-com-1' : 'hex'
}
def __init__(self, smarthome, host=None, port=0, serialport=None, device="raw", cycle=300):
self._sh = smarthome
self.host = host
self.port = int(port)
self.serialport = serialport
self.cycle = cycle
self.connected = False
self._serial = None
self._sock = None
self._target = None
self._dataoffset = 0
self._items = {}
self._lock = threading.Lock()
if device in self._devices:
device = self._devices[device]
if device == "hex":
self._prepare = self._prepareHex
elif device == "raw":
self._prepare = self._prepareRaw
else:
logger.warning("Device type \"{}\" not supported - defaulting to \"raw\"".format(device))
self._prepare = self._prepareRaw
smarthome.connections.monitor(self)
def run(self):
self.alive = True
self._sh.scheduler.add('Sml', self._refresh, cycle=self.cycle)
def stop(self):
self.alive = False
self.disconnect()
def parse_item(self, item):
if 'sml_obis' in item.conf:
obis = item.conf['sml_obis']
prop = item.conf['sml_prop'] if 'sml_prop' in item.conf else 'valueReal'
if obis not in self._items:
self._items[obis] = {}
if prop not in self._items[obis]:
self._items[obis][prop] = []
self._items[obis][prop].append(item)
return self.update_item
return None
def parse_logic(self, logic):
pass
def update_item(self, item, caller=None, source=None, dest=None):
if caller != 'Sml':
pass
def connect(self):
self._lock.acquire()
target = None
try:
if self.serialport is not None:
self._target = 'serial://{}'.format(self.serialport)
self._serial = serial.Serial(
self.serialport, 9600, serial.EIGHTBITS, serial.PARITY_NONE, serial.STOPBITS_ONE, timeout=0)
elif self.host is not None:
self._target = 'tcp://{}:{}'.format(self.host, self.port)
self._sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self._sock.settimeout(2)
self._sock.connect((self.host, self.port))
self._sock.setblocking(False)
except Exception as e:
logger.error('Sml: Could not connect to {}: {}'.format(self._target, e))
self._lock.release()
return
else:
logger.info('Sml: Connected to {}'.format(self._target))
self.connected = True
self._lock.release()
def disconnect(self):
if self.connected:
try:
if self._serial is not None:
self._serial.close()
self._serial = None
elif self._sock is not None:
self._sock.shutdown(socket.SHUT_RDWR)
self._sock = None
except:
pass
logger.info('Sml: Disconnected!')
self.connected = False
self._target = None
def _read(self, length):
total = []
while 1:
if self._serial is not None:
data = self._serial.read(length)
if data:
total.append(data)
else:
break
elif self._sock is not None:
try:
data = self._sock.recv(length)
if data:
total.append(data)
except socket.error as e:
if e.args[0] == errno.EAGAIN or e.args[0] == errno.EWOULDBLOCK:
break
else:
raise e
return b''.join(total)
def _refresh(self):
if self.connected:
start = time.time()
retry = 5
while retry > 0:
try:
data = self._read(512)
retry = 0
values = self._parse(self._prepare(data))
for obis in values:
logger.debug('Entry {}'.format(values[obis]))
if obis in self._items:
for prop in self._items[obis]:
for item in self._items[obis][prop]:
item(values[obis][prop], 'Sml')
except Exception as e:
logger.error('Reading data from {0} failed: {1} - reconnecting!'.format(self._target, e))
self.disconnect()
time.sleep(1)
self.connect()
retry = retry - 1
if retry == 0:
logger.warn('Trying to read data in next cycle due to connection errors!')
cycletime = time.time() - start
logger.debug("cycle takes {0} seconds".format(cycletime))
def _parse(self, data):
# Search SML List Entry sequences like:
# "77 07 81 81 c7 82 03 ff 01 01 01 01 04 xx xx xx xx" - manufactor
# "77 07 01 00 00 00 09 ff 01 01 01 01 0b xx xx xx xx xx xx xx xx xx xx 01" - server id
# "77 07 01 00 01 08 00 ff 63 01 80 01 62 1e 52 ff 56 00 00 00 29 85 01"
# Details see http://wiki.volkszaehler.org/software/sml
values = {}
packetsize = 7
logger.debug('Data:{}'.format(''.join(' {:02x}'.format(x) for x in data)))
self._dataoffset = 0
while self._dataoffset < len(data)-packetsize:
# Find SML_ListEntry starting with 0x77 0x07 and OBIS code end with 0xFF
if data[self._dataoffset] == 0x77 and data[self._dataoffset+1] == 0x07 and data[self._dataoffset+packetsize] == 0xff:
packetstart = self._dataoffset
self._dataoffset += 1
try:
entry = {
'objName' : self._read_entity(data),
'status' : self._read_entity(data),
'valTime' : self._read_entity(data),
'unit' : self._read_entity(data),
'scaler' : self._read_entity(data),
'value' : self._read_entity(data),
'signature' : self._read_entity(data)
}
# add additional calculated fields
entry['obis'] = '{}-{}:{}.{}.{}*{}'.format(entry['objName'][0], entry['objName'][1], entry['objName'][2], entry['objName'][3], entry['objName'][4], entry['objName'][5])
entry['valueReal'] = entry['value'] * 10 ** entry['scaler'] if entry['scaler'] is not None else entry['value']
entry['unitName'] = self._units[entry['unit']] if entry['unit'] != None and entry['unit'] in self._units else None
values[entry['obis']] = entry
except Exception as e:
if self._dataoffset < len(data) - 1:
logger.warning('Can not parse entity at position {}, byte {}: {}:{}...'.format(self._dataoffset, self._dataoffset - packetstart, e, ''.join(' {:02x}'.format(x) for x in data[packetstart:packetstart+64])))
self._dataoffset = packetstart + packetsize - 1
else:
self._dataoffset += 1
return values
def _read_entity(self, data):
import builtins
upack = {
5 : { 1 : '>b', 2 : '>h', 4 : '>i', 8 : '>q' }, # int
6 : { 1 : '>B', 2 : '>H', 4 : '>I', 8 : '>Q' } # uint
}
result = None
tlf = data[self._dataoffset]
type = (tlf & 112) >> 4
more = tlf & 128
len = tlf & 15
self._dataoffset += 1
if more > 0:
tlf = data[self._dataoffset]
len = (len << 4) + (tlf & 15)
self._dataoffset += 1
len -= 1
if len == 0: # skip empty optional value
return result
if self._dataoffset + len >= builtins.len(data):
raise Exception("Try to read {} bytes, but only have {}".format(len, builtins.len(data) - self._dataoffset))
if type == 0: # octet string
result = data[self._dataoffset:self._dataoffset+len]
elif type == 5 or type == 6: # int or uint
d = data[self._dataoffset:self._dataoffset+len]
ulen = len
if ulen not in upack[type]: # extend to next greather unpack unit
while ulen not in upack[type]:
d = b'\x00' + d
ulen += 1
result = struct.unpack(upack[type][ulen], d)[0]
elif type == 7: # list
result = []
self._dataoffset += 1
for i in range(0, len + 1):
result.append(self._read_entity(data))
return result
else:
logger.warning('Skipping unkown field {}'.format(hex(tlf)))
self._dataoffset += len
return result
def _prepareRaw(self, data):
return data
def _prepareHex(self, data):
data = data.decode("iso-8859-1").lower();
data = re.sub("[^a-f0-9]", " ", data)
data = re.sub("( +[a-f0-9]|[a-f0-9] +)", "", data)
data = data.encode()
return bytes(''.join(chr(int(data[i:i+2], 16)) for i in range(0, len(data), 2)), "iso8859-1")
|
CCI-MOC/haas
|
refs/heads/master
|
hil/client/user.py
|
4
|
"""Client library for user-oriented api calls.
These are only meaningful if the server is configured to use
username & password auth.
"""
import json
from hil.client.base import ClientBase
from hil.client.base import check_reserved_chars
class User(ClientBase):
"""Consists of calls to query and
manipulate users related objects and relations.
"""
def list(self):
"""List all users"""
url = self.object_url('auth/basic/users')
return self.check_response(self.httpClient.request("GET", url))
@check_reserved_chars(dont_check=['password', 'is_admin'])
def create(self, username, password, is_admin):
"""Create a user <username> with password <password>.
<is_admin> is a boolean,
and determines whether a user is authorized for
administrative privileges.
"""
url = self.object_url('auth/basic/user', username)
payload = json.dumps({
'password': password, 'is_admin': is_admin,
})
return self.check_response(
self.httpClient.request("PUT", url, data=payload)
)
@check_reserved_chars()
def delete(self, username):
"""Deletes the user <username>. """
url = self.object_url('auth/basic/user', username)
return self.check_response(
self.httpClient.request("DELETE", url)
)
@check_reserved_chars()
def add(self, user, project):
"""Adds <user> to a <project>. """
url = self.object_url('auth/basic/user', user, 'add_project')
payload = json.dumps({'project': project})
return self.check_response(
self.httpClient.request("POST", url, data=payload)
)
@check_reserved_chars()
def remove(self, user, project):
"""Removes all access of <user> to <project>. """
url = self.object_url('auth/basic/user', user, 'remove_project')
payload = json.dumps({'project': project})
return self.check_response(
self.httpClient.request("POST", url, data=payload)
)
@check_reserved_chars(dont_check=['is_admin'])
def set_admin(self, username, is_admin):
"""Changes the admin status of <username>.
<is_admin> is a boolean that determines
whether a user is authorized for
administrative privileges.
"""
url = self.object_url('auth/basic/user', username)
payload = json.dumps({'is_admin': is_admin})
return self.check_response(
self.httpClient.request("PATCH", url, data=payload)
)
|
codemac/servo
|
refs/heads/master
|
tests/wpt/css-tests/tools/wptserve/wptserve/request.py
|
57
|
import base64
import cgi
import Cookie
import StringIO
import tempfile
import urlparse
import stash
from utils import HTTPException
missing = object()
class Server(object):
"""Data about the server environment
.. attribute:: config
Environment configuration information with information about the
various servers running, their hostnames and ports.
.. attribute:: stash
Stash object holding state stored on the server between requests.
"""
config = None
def __init__(self, request):
self.stash = stash.Stash(request.url_parts.path)
class InputFile(object):
max_buffer_size = 1024*1024
def __init__(self, rfile, length):
"""File-like object used to provide a seekable view of request body data"""
self._file = rfile
self.length = length
self._file_position = 0
if length > self.max_buffer_size:
self._buf = tempfile.TemporaryFile(mode="rw+b")
else:
self._buf = StringIO.StringIO()
@property
def _buf_position(self):
rv = self._buf.tell()
assert rv <= self._file_position
return rv
def read(self, bytes=-1):
assert self._buf_position <= self._file_position
if bytes < 0:
bytes = self.length - self._buf_position
bytes_remaining = min(bytes, self.length - self._buf_position)
if bytes_remaining == 0:
return ""
if self._buf_position != self._file_position:
buf_bytes = min(bytes_remaining, self._file_position - self._buf_position)
old_data = self._buf.read(buf_bytes)
bytes_remaining -= buf_bytes
else:
old_data = ""
assert self._buf_position == self._file_position, (
"Before reading buffer position (%i) didn't match file position (%i)" %
(self._buf_position, self._file_position))
new_data = self._file.read(bytes_remaining)
self._buf.write(new_data)
self._file_position += bytes_remaining
assert self._buf_position == self._file_position, (
"After reading buffer position (%i) didn't match file position (%i)" %
(self._buf_position, self._file_position))
return old_data + new_data
def tell(self):
return self._buf_position
def seek(self, offset):
if offset > self.length or offset < 0:
raise ValueError
if offset <= self._file_position:
self._buf.seek(offset)
else:
self.read(offset - self._file_position)
def readline(self, max_bytes=None):
if max_bytes is None:
max_bytes = self.length - self._buf_position
if self._buf_position < self._file_position:
data = self._buf.readline(max_bytes)
if data.endswith("\n") or len(data) == max_bytes:
return data
else:
data = ""
assert self._buf_position == self._file_position
initial_position = self._file_position
found = False
buf = []
max_bytes -= len(data)
while not found:
readahead = self.read(min(2, max_bytes))
max_bytes -= len(readahead)
for i, c in enumerate(readahead):
if c == "\n":
buf.append(readahead[:i+1])
found = True
break
if not found:
buf.append(readahead)
if not readahead or not max_bytes:
break
new_data = "".join(buf)
data += new_data
self.seek(initial_position + len(new_data))
return data
def readlines(self):
rv = []
while True:
data = self.readline()
if data:
rv.append(data)
else:
break
return rv
def next(self):
data = self.readline()
if data:
return data
else:
raise StopIteration
def __iter__(self):
return self
class Request(object):
"""Object representing a HTTP request.
.. attribute:: doc_root
The local directory to use as a base when resolving paths
.. attribute:: route_match
Regexp match object from matching the request path to the route
selected for the request.
.. attribute:: protocol_version
HTTP version specified in the request.
.. attribute:: method
HTTP method in the request.
.. attribute:: request_path
Request path as it appears in the HTTP request.
.. attribute:: url
Absolute URL for the request.
.. attribute:: headers
List of request headers.
.. attribute:: raw_input
File-like object representing the body of the request.
.. attribute:: url_parts
Parts of the requested URL as obtained by urlparse.urlsplit(path)
.. attribute:: request_line
Raw request line
.. attribute:: headers
RequestHeaders object providing a dictionary-like representation of
the request headers.
.. attribute:: body
Request body as a string
.. attribute:: GET
MultiDict representing the parameters supplied with the request.
Note that these may be present on non-GET requests; the name is
chosen to be familiar to users of other systems such as PHP.
.. attribute:: POST
MultiDict representing the request body parameters. Most parameters
are present as string values, but file uploads have file-like
values.
.. attribute:: cookies
Cookies object representing cookies sent with the request with a
dictionary-like interface.
.. attribute:: auth
Object with username and password properties representing any
credentials supplied using HTTP authentication.
.. attribute:: server
Server object containing information about the server environment.
"""
def __init__(self, request_handler):
self.doc_root = request_handler.server.router.doc_root
self.route_match = None # Set by the router
self.protocol_version = request_handler.protocol_version
self.method = request_handler.command
scheme = request_handler.server.scheme
host = request_handler.headers.get("Host")
port = request_handler.server.server_address[1]
if host is None:
host = request_handler.server.server_address[0]
else:
if ":" in host:
host, port = host.split(":", 1)
self.request_path = request_handler.path
if self.request_path.startswith(scheme + "://"):
self.url = request_handler.path
else:
self.url = "%s://%s:%s%s" % (scheme,
host,
port,
self.request_path)
self.url_parts = urlparse.urlsplit(self.url)
self._raw_headers = request_handler.headers
self.request_line = request_handler.raw_requestline
self._headers = None
self.raw_input = InputFile(request_handler.rfile,
int(self.headers.get("Content-Length", 0)))
self._body = None
self._GET = None
self._POST = None
self._cookies = None
self._auth = None
self.server = Server(self)
def __repr__(self):
return "<Request %s %s>" % (self.method, self.url)
@property
def GET(self):
if self._GET is None:
params = urlparse.parse_qsl(self.url_parts.query, keep_blank_values=True)
self._GET = MultiDict()
for key, value in params:
self._GET.add(key, value)
return self._GET
@property
def POST(self):
if self._POST is None:
#Work out the post parameters
pos = self.raw_input.tell()
self.raw_input.seek(0)
fs = cgi.FieldStorage(fp=self.raw_input,
environ={"REQUEST_METHOD": self.method},
headers=self.headers,
keep_blank_values=True)
self._POST = MultiDict.from_field_storage(fs)
self.raw_input.seek(pos)
return self._POST
@property
def cookies(self):
if self._cookies is None:
parser = Cookie.BaseCookie()
cookie_headers = self.headers.get("cookie", "")
parser.load(cookie_headers)
cookies = Cookies()
for key, value in parser.iteritems():
cookies[key] = CookieValue(value)
self._cookies = cookies
return self._cookies
@property
def headers(self):
if self._headers is None:
self._headers = RequestHeaders(self._raw_headers)
return self._headers
@property
def body(self):
if self._body is None:
pos = self.raw_input.tell()
self.raw_input.seek(0)
self._body = self.raw_input.read()
self.raw_input.seek(pos)
return self._body
@property
def auth(self):
if self._auth is None:
self._auth = Authentication(self.headers)
return self._auth
class RequestHeaders(dict):
"""Dictionary-like API for accessing request headers."""
def __init__(self, items):
for key, value in zip(items.keys(), items.values()):
key = key.lower()
if key in self:
self[key].append(value)
else:
dict.__setitem__(self, key, [value])
def __getitem__(self, key):
"""Get all headers of a certain (case-insensitive) name. If there is
more than one, the values are returned comma separated"""
values = dict.__getitem__(self, key.lower())
if len(values) == 1:
return values[0]
else:
return ", ".join(values)
def __setitem__(self, name, value):
raise Exception
def get(self, key, default=None):
"""Get a string representing all headers with a particular value,
with multiple headers separated by a comma. If no header is found
return a default value
:param key: The header name to look up (case-insensitive)
:param default: The value to return in the case of no match
"""
try:
return self[key]
except KeyError:
return default
def get_list(self, key, default=missing):
"""Get all the header values for a particular field name as
a list"""
try:
return dict.__getitem__(self, key.lower())
except KeyError:
if default is not missing:
return default
else:
raise
def __contains__(self, key):
return dict.__contains__(self, key.lower())
def iteritems(self):
for item in self:
yield item, self[item]
def itervalues(self):
for item in self:
yield self[item]
class CookieValue(object):
"""Representation of cookies.
Note that cookies are considered read-only and the string value
of the cookie will not change if you update the field values.
However this is not enforced.
.. attribute:: key
The name of the cookie.
.. attribute:: value
The value of the cookie
.. attribute:: expires
The expiry date of the cookie
.. attribute:: path
The path of the cookie
.. attribute:: comment
The comment of the cookie.
.. attribute:: domain
The domain with which the cookie is associated
.. attribute:: max_age
The max-age value of the cookie.
.. attribute:: secure
Whether the cookie is marked as secure
.. attribute:: httponly
Whether the cookie is marked as httponly
"""
def __init__(self, morsel):
self.key = morsel.key
self.value = morsel.value
for attr in ["expires", "path",
"comment", "domain", "max-age",
"secure", "version", "httponly"]:
setattr(self, attr.replace("-", "_"), morsel[attr])
self._str = morsel.OutputString()
def __str__(self):
return self._str
def __repr__(self):
return self._str
def __eq__(self, other):
"""Equality comparison for cookies. Compares to other cookies
based on value alone and on non-cookies based on the equality
of self.value with the other object so that a cookie with value
"ham" compares equal to the string "ham"
"""
if hasattr(other, "value"):
return self.value == other.value
return self.value == other
class MultiDict(dict):
"""Dictionary type that holds multiple values for each
key"""
#TODO: this should perhaps also order the keys
def __init__(self):
pass
def __setitem__(self, name, value):
dict.__setitem__(self, name, [value])
def add(self, name, value):
if name in self:
dict.__getitem__(self, name).append(value)
else:
dict.__setitem__(self, name, [value])
def __getitem__(self, key):
"""Get the first value with a given key"""
#TODO: should this instead be the last value?
return self.first(key)
def first(self, key, default=missing):
"""Get the first value with a given key
:param key: The key to lookup
:param default: The default to return if key is
not found (throws if nothing is
specified)
"""
if key in self and dict.__getitem__(self, key):
return dict.__getitem__(self, key)[0]
elif default is not missing:
return default
raise KeyError
def last(self, key, default=missing):
"""Get the last value with a given key
:param key: The key to lookup
:param default: The default to return if key is
not found (throws if nothing is
specified)
"""
if key in self and dict.__getitem__(self, key):
return dict.__getitem__(self, key)[-1]
elif default is not missing:
return default
raise KeyError
def get_list(self, key):
"""Get all values with a given key as a list
:param key: The key to lookup
"""
return dict.__getitem__(self, key)
@classmethod
def from_field_storage(cls, fs):
self = cls()
if fs.list is None:
return self
for key in fs:
values = fs[key]
if not isinstance(values, list):
values = [values]
for value in values:
if value.filename:
value = value
else:
value = value.value
self.add(key, value)
return self
class Cookies(MultiDict):
"""MultiDict specialised for Cookie values"""
def __init__(self):
pass
def __getitem__(self, key):
return self.last(key)
class Authentication(object):
"""Object for dealing with HTTP Authentication
.. attribute:: username
The username supplied in the HTTP Authorization
header, or None
.. attribute:: password
The password supplied in the HTTP Authorization
header, or None
"""
def __init__(self, headers):
self.username = None
self.password = None
auth_schemes = {"Basic": self.decode_basic}
if "authorization" in headers:
header = headers.get("authorization")
auth_type, data = header.split(" ", 1)
if auth_type in auth_schemes:
self.username, self.password = auth_schemes[auth_type](data)
else:
raise HTTPException(400, "Unsupported authentication scheme %s" % auth_type)
def decode_basic(self, data):
decoded_data = base64.decodestring(data)
return decoded_data.split(":", 1)
|
galengold/split70
|
refs/heads/master
|
qmk_firmware/lib/googletest/googletest/test/gtest_shuffle_test.py
|
3023
|
#!/usr/bin/env python
#
# Copyright 2009 Google Inc. All Rights Reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Verifies that test shuffling works."""
__author__ = 'wan@google.com (Zhanyong Wan)'
import os
import gtest_test_utils
# Command to run the gtest_shuffle_test_ program.
COMMAND = gtest_test_utils.GetTestExecutablePath('gtest_shuffle_test_')
# The environment variables for test sharding.
TOTAL_SHARDS_ENV_VAR = 'GTEST_TOTAL_SHARDS'
SHARD_INDEX_ENV_VAR = 'GTEST_SHARD_INDEX'
TEST_FILTER = 'A*.A:A*.B:C*'
ALL_TESTS = []
ACTIVE_TESTS = []
FILTERED_TESTS = []
SHARDED_TESTS = []
SHUFFLED_ALL_TESTS = []
SHUFFLED_ACTIVE_TESTS = []
SHUFFLED_FILTERED_TESTS = []
SHUFFLED_SHARDED_TESTS = []
def AlsoRunDisabledTestsFlag():
return '--gtest_also_run_disabled_tests'
def FilterFlag(test_filter):
return '--gtest_filter=%s' % (test_filter,)
def RepeatFlag(n):
return '--gtest_repeat=%s' % (n,)
def ShuffleFlag():
return '--gtest_shuffle'
def RandomSeedFlag(n):
return '--gtest_random_seed=%s' % (n,)
def RunAndReturnOutput(extra_env, args):
"""Runs the test program and returns its output."""
environ_copy = os.environ.copy()
environ_copy.update(extra_env)
return gtest_test_utils.Subprocess([COMMAND] + args, env=environ_copy).output
def GetTestsForAllIterations(extra_env, args):
"""Runs the test program and returns a list of test lists.
Args:
extra_env: a map from environment variables to their values
args: command line flags to pass to gtest_shuffle_test_
Returns:
A list where the i-th element is the list of tests run in the i-th
test iteration.
"""
test_iterations = []
for line in RunAndReturnOutput(extra_env, args).split('\n'):
if line.startswith('----'):
tests = []
test_iterations.append(tests)
elif line.strip():
tests.append(line.strip()) # 'TestCaseName.TestName'
return test_iterations
def GetTestCases(tests):
"""Returns a list of test cases in the given full test names.
Args:
tests: a list of full test names
Returns:
A list of test cases from 'tests', in their original order.
Consecutive duplicates are removed.
"""
test_cases = []
for test in tests:
test_case = test.split('.')[0]
if not test_case in test_cases:
test_cases.append(test_case)
return test_cases
def CalculateTestLists():
"""Calculates the list of tests run under different flags."""
if not ALL_TESTS:
ALL_TESTS.extend(
GetTestsForAllIterations({}, [AlsoRunDisabledTestsFlag()])[0])
if not ACTIVE_TESTS:
ACTIVE_TESTS.extend(GetTestsForAllIterations({}, [])[0])
if not FILTERED_TESTS:
FILTERED_TESTS.extend(
GetTestsForAllIterations({}, [FilterFlag(TEST_FILTER)])[0])
if not SHARDED_TESTS:
SHARDED_TESTS.extend(
GetTestsForAllIterations({TOTAL_SHARDS_ENV_VAR: '3',
SHARD_INDEX_ENV_VAR: '1'},
[])[0])
if not SHUFFLED_ALL_TESTS:
SHUFFLED_ALL_TESTS.extend(GetTestsForAllIterations(
{}, [AlsoRunDisabledTestsFlag(), ShuffleFlag(), RandomSeedFlag(1)])[0])
if not SHUFFLED_ACTIVE_TESTS:
SHUFFLED_ACTIVE_TESTS.extend(GetTestsForAllIterations(
{}, [ShuffleFlag(), RandomSeedFlag(1)])[0])
if not SHUFFLED_FILTERED_TESTS:
SHUFFLED_FILTERED_TESTS.extend(GetTestsForAllIterations(
{}, [ShuffleFlag(), RandomSeedFlag(1), FilterFlag(TEST_FILTER)])[0])
if not SHUFFLED_SHARDED_TESTS:
SHUFFLED_SHARDED_TESTS.extend(
GetTestsForAllIterations({TOTAL_SHARDS_ENV_VAR: '3',
SHARD_INDEX_ENV_VAR: '1'},
[ShuffleFlag(), RandomSeedFlag(1)])[0])
class GTestShuffleUnitTest(gtest_test_utils.TestCase):
"""Tests test shuffling."""
def setUp(self):
CalculateTestLists()
def testShufflePreservesNumberOfTests(self):
self.assertEqual(len(ALL_TESTS), len(SHUFFLED_ALL_TESTS))
self.assertEqual(len(ACTIVE_TESTS), len(SHUFFLED_ACTIVE_TESTS))
self.assertEqual(len(FILTERED_TESTS), len(SHUFFLED_FILTERED_TESTS))
self.assertEqual(len(SHARDED_TESTS), len(SHUFFLED_SHARDED_TESTS))
def testShuffleChangesTestOrder(self):
self.assert_(SHUFFLED_ALL_TESTS != ALL_TESTS, SHUFFLED_ALL_TESTS)
self.assert_(SHUFFLED_ACTIVE_TESTS != ACTIVE_TESTS, SHUFFLED_ACTIVE_TESTS)
self.assert_(SHUFFLED_FILTERED_TESTS != FILTERED_TESTS,
SHUFFLED_FILTERED_TESTS)
self.assert_(SHUFFLED_SHARDED_TESTS != SHARDED_TESTS,
SHUFFLED_SHARDED_TESTS)
def testShuffleChangesTestCaseOrder(self):
self.assert_(GetTestCases(SHUFFLED_ALL_TESTS) != GetTestCases(ALL_TESTS),
GetTestCases(SHUFFLED_ALL_TESTS))
self.assert_(
GetTestCases(SHUFFLED_ACTIVE_TESTS) != GetTestCases(ACTIVE_TESTS),
GetTestCases(SHUFFLED_ACTIVE_TESTS))
self.assert_(
GetTestCases(SHUFFLED_FILTERED_TESTS) != GetTestCases(FILTERED_TESTS),
GetTestCases(SHUFFLED_FILTERED_TESTS))
self.assert_(
GetTestCases(SHUFFLED_SHARDED_TESTS) != GetTestCases(SHARDED_TESTS),
GetTestCases(SHUFFLED_SHARDED_TESTS))
def testShuffleDoesNotRepeatTest(self):
for test in SHUFFLED_ALL_TESTS:
self.assertEqual(1, SHUFFLED_ALL_TESTS.count(test),
'%s appears more than once' % (test,))
for test in SHUFFLED_ACTIVE_TESTS:
self.assertEqual(1, SHUFFLED_ACTIVE_TESTS.count(test),
'%s appears more than once' % (test,))
for test in SHUFFLED_FILTERED_TESTS:
self.assertEqual(1, SHUFFLED_FILTERED_TESTS.count(test),
'%s appears more than once' % (test,))
for test in SHUFFLED_SHARDED_TESTS:
self.assertEqual(1, SHUFFLED_SHARDED_TESTS.count(test),
'%s appears more than once' % (test,))
def testShuffleDoesNotCreateNewTest(self):
for test in SHUFFLED_ALL_TESTS:
self.assert_(test in ALL_TESTS, '%s is an invalid test' % (test,))
for test in SHUFFLED_ACTIVE_TESTS:
self.assert_(test in ACTIVE_TESTS, '%s is an invalid test' % (test,))
for test in SHUFFLED_FILTERED_TESTS:
self.assert_(test in FILTERED_TESTS, '%s is an invalid test' % (test,))
for test in SHUFFLED_SHARDED_TESTS:
self.assert_(test in SHARDED_TESTS, '%s is an invalid test' % (test,))
def testShuffleIncludesAllTests(self):
for test in ALL_TESTS:
self.assert_(test in SHUFFLED_ALL_TESTS, '%s is missing' % (test,))
for test in ACTIVE_TESTS:
self.assert_(test in SHUFFLED_ACTIVE_TESTS, '%s is missing' % (test,))
for test in FILTERED_TESTS:
self.assert_(test in SHUFFLED_FILTERED_TESTS, '%s is missing' % (test,))
for test in SHARDED_TESTS:
self.assert_(test in SHUFFLED_SHARDED_TESTS, '%s is missing' % (test,))
def testShuffleLeavesDeathTestsAtFront(self):
non_death_test_found = False
for test in SHUFFLED_ACTIVE_TESTS:
if 'DeathTest.' in test:
self.assert_(not non_death_test_found,
'%s appears after a non-death test' % (test,))
else:
non_death_test_found = True
def _VerifyTestCasesDoNotInterleave(self, tests):
test_cases = []
for test in tests:
[test_case, _] = test.split('.')
if test_cases and test_cases[-1] != test_case:
test_cases.append(test_case)
self.assertEqual(1, test_cases.count(test_case),
'Test case %s is not grouped together in %s' %
(test_case, tests))
def testShuffleDoesNotInterleaveTestCases(self):
self._VerifyTestCasesDoNotInterleave(SHUFFLED_ALL_TESTS)
self._VerifyTestCasesDoNotInterleave(SHUFFLED_ACTIVE_TESTS)
self._VerifyTestCasesDoNotInterleave(SHUFFLED_FILTERED_TESTS)
self._VerifyTestCasesDoNotInterleave(SHUFFLED_SHARDED_TESTS)
def testShuffleRestoresOrderAfterEachIteration(self):
# Get the test lists in all 3 iterations, using random seed 1, 2,
# and 3 respectively. Google Test picks a different seed in each
# iteration, and this test depends on the current implementation
# picking successive numbers. This dependency is not ideal, but
# makes the test much easier to write.
[tests_in_iteration1, tests_in_iteration2, tests_in_iteration3] = (
GetTestsForAllIterations(
{}, [ShuffleFlag(), RandomSeedFlag(1), RepeatFlag(3)]))
# Make sure running the tests with random seed 1 gets the same
# order as in iteration 1 above.
[tests_with_seed1] = GetTestsForAllIterations(
{}, [ShuffleFlag(), RandomSeedFlag(1)])
self.assertEqual(tests_in_iteration1, tests_with_seed1)
# Make sure running the tests with random seed 2 gets the same
# order as in iteration 2 above. Success means that Google Test
# correctly restores the test order before re-shuffling at the
# beginning of iteration 2.
[tests_with_seed2] = GetTestsForAllIterations(
{}, [ShuffleFlag(), RandomSeedFlag(2)])
self.assertEqual(tests_in_iteration2, tests_with_seed2)
# Make sure running the tests with random seed 3 gets the same
# order as in iteration 3 above. Success means that Google Test
# correctly restores the test order before re-shuffling at the
# beginning of iteration 3.
[tests_with_seed3] = GetTestsForAllIterations(
{}, [ShuffleFlag(), RandomSeedFlag(3)])
self.assertEqual(tests_in_iteration3, tests_with_seed3)
def testShuffleGeneratesNewOrderInEachIteration(self):
[tests_in_iteration1, tests_in_iteration2, tests_in_iteration3] = (
GetTestsForAllIterations(
{}, [ShuffleFlag(), RandomSeedFlag(1), RepeatFlag(3)]))
self.assert_(tests_in_iteration1 != tests_in_iteration2,
tests_in_iteration1)
self.assert_(tests_in_iteration1 != tests_in_iteration3,
tests_in_iteration1)
self.assert_(tests_in_iteration2 != tests_in_iteration3,
tests_in_iteration2)
def testShuffleShardedTestsPreservesPartition(self):
# If we run M tests on N shards, the same M tests should be run in
# total, regardless of the random seeds used by the shards.
[tests1] = GetTestsForAllIterations({TOTAL_SHARDS_ENV_VAR: '3',
SHARD_INDEX_ENV_VAR: '0'},
[ShuffleFlag(), RandomSeedFlag(1)])
[tests2] = GetTestsForAllIterations({TOTAL_SHARDS_ENV_VAR: '3',
SHARD_INDEX_ENV_VAR: '1'},
[ShuffleFlag(), RandomSeedFlag(20)])
[tests3] = GetTestsForAllIterations({TOTAL_SHARDS_ENV_VAR: '3',
SHARD_INDEX_ENV_VAR: '2'},
[ShuffleFlag(), RandomSeedFlag(25)])
sorted_sharded_tests = tests1 + tests2 + tests3
sorted_sharded_tests.sort()
sorted_active_tests = []
sorted_active_tests.extend(ACTIVE_TESTS)
sorted_active_tests.sort()
self.assertEqual(sorted_active_tests, sorted_sharded_tests)
if __name__ == '__main__':
gtest_test_utils.Main()
|
Facetracker-project/facetracker-core
|
refs/heads/master
|
lib/youtube-dl/youtube_dl/extractor/keezmovies.py
|
107
|
from __future__ import unicode_literals
import os
import re
from .common import InfoExtractor
from ..compat import (
compat_urllib_parse_urlparse,
compat_urllib_request,
)
class KeezMoviesIE(InfoExtractor):
_VALID_URL = r'https?://(?:www\.)?keezmovies\.com/video/.+?(?P<id>[0-9]+)(?:[/?&]|$)'
_TEST = {
'url': 'http://www.keezmovies.com/video/petite-asian-lady-mai-playing-in-bathtub-1214711',
'md5': '6e297b7e789329923fcf83abb67c9289',
'info_dict': {
'id': '1214711',
'ext': 'mp4',
'title': 'Petite Asian Lady Mai Playing In Bathtub',
'age_limit': 18,
}
}
def _real_extract(self, url):
video_id = self._match_id(url)
req = compat_urllib_request.Request(url)
req.add_header('Cookie', 'age_verified=1')
webpage = self._download_webpage(req, video_id)
# embedded video
mobj = re.search(r'href="([^"]+)"></iframe>', webpage)
if mobj:
embedded_url = mobj.group(1)
return self.url_result(embedded_url)
video_title = self._html_search_regex(
r'<h1 [^>]*>([^<]+)', webpage, 'title')
video_url = self._html_search_regex(
r'(?s)html5VideoPlayer = .*?src="([^"]+)"', webpage, 'video URL')
path = compat_urllib_parse_urlparse(video_url).path
extension = os.path.splitext(path)[1][1:]
format = path.split('/')[4].split('_')[:2]
format = "-".join(format)
age_limit = self._rta_search(webpage)
return {
'id': video_id,
'title': video_title,
'url': video_url,
'ext': extension,
'format': format,
'format_id': format,
'age_limit': age_limit,
}
|
sqrf/nanoacoustics-mie
|
refs/heads/master
|
Scripts/lambdalist.py
|
1
|
"""
File lambdalist.py
Author Rafael Silva Quiroz
email sqrf87@gmail.com
License educational use
Description List of wavelengths used in Epsilon.py for calculate the corrected dielectric function
Return Wavelengths
Reference Bohren & Hoffman data base
"""
from variables import *
from energies import *
lambdalist = []
for i in xrange(len(energies)):
lambdas = float(h * ls) / energies[i]
lambdalist.append(lambdas)
l2 = lambdalist
|
giovannicode/djangoseller
|
refs/heads/master
|
users/migrations/0001_initial.py
|
1
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import django.utils.timezone
class Migration(migrations.Migration):
dependencies = [
('auth', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='User',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('password', models.CharField(max_length=128, verbose_name='password')),
('last_login', models.DateTimeField(default=django.utils.timezone.now, verbose_name='last login')),
('is_superuser', models.BooleanField(default=False, help_text='Designates that this user has all permissions without explicitly assigning them.', verbose_name='superuser status')),
('first_name', models.CharField(max_length=30)),
('last_name', models.CharField(max_length=30)),
('email', models.EmailField(unique=True, max_length=75)),
('is_active', models.BooleanField(default=True)),
('is_staff', models.BooleanField(default=False)),
('groups', models.ManyToManyField(related_query_name='user', related_name='user_set', to='auth.Group', blank=True, help_text='The groups this user belongs to. A user will get all permissions granted to each of his/her group.', verbose_name='groups')),
('user_permissions', models.ManyToManyField(related_query_name='user', related_name='user_set', to='auth.Permission', blank=True, help_text='Specific permissions for this user.', verbose_name='user permissions')),
],
options={
'abstract': False,
},
bases=(models.Model,),
),
]
|
agvergara/Python
|
refs/heads/master
|
X-Serv-15.6-Django-CMS_PUT/cms_put/views.py
|
1
|
from django.shortcuts import render
from cms.models import Pages
from django.http import HttpResponse, HttpResponseNotAllowed
from django.views.decorators.csrf import csrf_exempt
# Create your views here.
@csrf_exempt
def insertcontent(request):
info = request.body
try:
name = info.split('=')[0]
content = info.split('=')[1]
except IndexError:
ans = "Bad input: name=content"
return HttpResponse(ans)
if request.method == "PUT" or request.method == "POST":
page = Pages(name=name, page=content)
page.save()
ans = "Todo ha ido ok"
else:
return HttpResponseNotAllowed("Method Not Allowed")
return HttpResponse(ans)
|
bmanojlovic/ansible
|
refs/heads/devel
|
lib/ansible/modules/cloud/openstack/os_user.py
|
4
|
#!/usr/bin/python
# Copyright (c) 2015 Hewlett-Packard Development Company, L.P.
#
# This module is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This software is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this software. If not, see <http://www.gnu.org/licenses/>.
try:
import shade
HAS_SHADE = True
except ImportError:
HAS_SHADE = False
ANSIBLE_METADATA = {'status': ['preview'],
'supported_by': 'community',
'version': '1.0'}
DOCUMENTATION = '''
---
module: os_user
short_description: Manage OpenStack Identity Users
extends_documentation_fragment: openstack
version_added: "2.0"
description:
- Manage OpenStack Identity users. Users can be created,
updated or deleted using this module. A user will be updated
if I(name) matches an existing user and I(state) is present.
The value for I(name) cannot be updated without deleting and
re-creating the user.
options:
name:
description:
- Username for the user
required: true
password:
description:
- Password for the user
required: false
default: None
update_password:
required: false
default: always
choices: ['always', 'on_create']
version_added: "2.3"
description:
- C(always) will attempt to update password. C(on_create) will only
set the password for newly created users.
email:
description:
- Email address for the user
required: false
default: None
default_project:
description:
- Project name or ID that the user should be associated with by default
required: false
default: None
domain:
description:
- Domain to create the user in if the cloud supports domains
required: false
default: None
enabled:
description:
- Is the user enabled
required: false
default: True
state:
description:
- Should the resource be present or absent.
choices: [present, absent]
default: present
requirements:
- "python >= 2.6"
- "shade"
'''
EXAMPLES = '''
# Create a user
- os_user:
cloud: mycloud
state: present
name: demouser
password: secret
email: demo@example.com
domain: default
default_project: demo
# Delete a user
- os_user:
cloud: mycloud
state: absent
name: demouser
# Create a user but don't update password if user exists
- os_user:
cloud: mycloud
state: present
name: demouser
password: secret
update_password: on_create
email: demo@example.com
domain: default
default_project: demo
'''
RETURN = '''
user:
description: Dictionary describing the user.
returned: On success when I(state) is 'present'
type: dictionary
contains:
default_project_id:
description: User default project ID. Only present with Keystone >= v3.
type: string
sample: "4427115787be45f08f0ec22a03bfc735"
domain_id:
description: User domain ID. Only present with Keystone >= v3.
type: string
sample: "default"
email:
description: User email address
type: string
sample: "demo@example.com"
id:
description: User ID
type: string
sample: "f59382db809c43139982ca4189404650"
name:
description: User name
type: string
sample: "demouser"
'''
def _needs_update(params_dict, user):
for k, v in params_dict.items():
if k not in ('password', 'update_password') and user[k] != v:
return True
# We don't get password back in the user object, so assume any supplied
# password is a change.
if (params_dict['password'] is not None and
params_dict['update_password'] == 'always'):
return True
return False
def _get_domain_id(cloud, domain):
try:
# We assume admin is passing domain id
domain_id = cloud.get_domain(domain)['id']
except:
# If we fail, maybe admin is passing a domain name.
# Note that domains have unique names, just like id.
try:
domain_id = cloud.search_domains(filters={'name': domain})[0]['id']
except:
# Ok, let's hope the user is non-admin and passing a sane id
domain_id = domain
return domain_id
def _get_default_project_id(cloud, default_project):
project = cloud.get_project(default_project)
if not project:
module.fail_json(msg='Default project %s is not valid' % default_project)
return project['id']
def main():
argument_spec = openstack_full_argument_spec(
name=dict(required=True),
password=dict(required=False, default=None),
email=dict(required=False, default=None),
default_project=dict(required=False, default=None),
domain=dict(required=False, default=None),
enabled=dict(default=True, type='bool'),
state=dict(default='present', choices=['absent', 'present']),
update_password=dict(default='always', choices=['always',
'on_create']),
)
module_kwargs = openstack_module_kwargs()
module = AnsibleModule(
argument_spec,
**module_kwargs)
if not HAS_SHADE:
module.fail_json(msg='shade is required for this module')
name = module.params['name']
password = module.params.pop('password')
email = module.params['email']
default_project = module.params['default_project']
domain = module.params['domain']
enabled = module.params['enabled']
state = module.params['state']
update_password = module.params['update_password']
try:
cloud = shade.openstack_cloud(**module.params)
user = cloud.get_user(name)
domain_id = None
if domain:
opcloud = shade.operator_cloud(**module.params)
domain_id = _get_domain_id(opcloud, domain)
if state == 'present':
if update_password in ('always', 'on_create'):
if not password:
msg = ("update_password is %s but a password value is "
"missing") % update_password
module.fail_json(msg=msg)
default_project_id = None
if default_project:
default_project_id = _get_default_project_id(cloud, default_project)
if user is None:
user = cloud.create_user(
name=name, password=password, email=email,
default_project=default_project_id, domain_id=domain_id,
enabled=enabled)
changed = True
else:
params_dict = {'email': email, 'enabled': enabled,
'password': password,
'update_password': update_password}
if domain_id is not None:
params_dict['domain_id'] = domain_id
if default_project_id is not None:
params_dict['default_project_id'] = default_project_id
if _needs_update(params_dict, user):
if update_password == 'always':
user = cloud.update_user(
user['id'], password=password, email=email,
default_project=default_project_id,
domain_id=domain_id, enabled=enabled)
else:
user = cloud.update_user(
user['id'], email=email,
default_project=default_project_id,
domain_id=domain_id, enabled=enabled)
changed = True
else:
changed = False
module.exit_json(changed=changed, user=user)
elif state == 'absent':
if user is None:
changed=False
else:
cloud.delete_user(user['id'])
changed=True
module.exit_json(changed=changed)
except shade.OpenStackCloudException as e:
module.fail_json(msg=str(e), extra_data=e.extra_data)
from ansible.module_utils.basic import *
from ansible.module_utils.openstack import *
if __name__ == '__main__':
main()
|
ourbest/sns_app
|
refs/heads/master
|
backend/schedulers.py
|
1
|
from datetime import datetime, timedelta
import django_rq
scheduler = None
def get_scheduler():
global scheduler
if scheduler:
return scheduler
scheduler = django_rq.get_scheduler('default')
return scheduler
def run_at(timestamp, func, *args, **kwargs):
if isinstance(timestamp, datetime):
timestamp = timestamp.timestamp()
get_scheduler().enqueue_in(timedelta(seconds=datetime.now().timestamp() - timestamp), func, *args, **kwargs)
|
Compunctus/android_kernel_lge_g3
|
refs/heads/cm-13.0
|
tools/perf/util/setup.py
|
4998
|
#!/usr/bin/python2
from distutils.core import setup, Extension
from os import getenv
from distutils.command.build_ext import build_ext as _build_ext
from distutils.command.install_lib import install_lib as _install_lib
class build_ext(_build_ext):
def finalize_options(self):
_build_ext.finalize_options(self)
self.build_lib = build_lib
self.build_temp = build_tmp
class install_lib(_install_lib):
def finalize_options(self):
_install_lib.finalize_options(self)
self.build_dir = build_lib
cflags = ['-fno-strict-aliasing', '-Wno-write-strings']
cflags += getenv('CFLAGS', '').split()
build_lib = getenv('PYTHON_EXTBUILD_LIB')
build_tmp = getenv('PYTHON_EXTBUILD_TMP')
ext_sources = [f.strip() for f in file('util/python-ext-sources')
if len(f.strip()) > 0 and f[0] != '#']
perf = Extension('perf',
sources = ext_sources,
include_dirs = ['util/include'],
extra_compile_args = cflags,
)
setup(name='perf',
version='0.1',
description='Interface with the Linux profiling infrastructure',
author='Arnaldo Carvalho de Melo',
author_email='acme@redhat.com',
license='GPLv2',
url='http://perf.wiki.kernel.org',
ext_modules=[perf],
cmdclass={'build_ext': build_ext, 'install_lib': install_lib})
|
MattsFleaMarket/python-for-android
|
refs/heads/master
|
python-build/python-libs/gdata/tests/gdata_tests/apps/groups/service_test.py
|
128
|
#!/usr/bin/python
#
# Copyright (C) 2008 Google
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Test for Groups service."""
__author__ = 'google-apps-apis@googlegroups.com'
import unittest
try:
from xml.etree import ElementTree
except ImportError:
from elementtree import ElementTree
import atom
import gdata.apps
import gdata.apps.service
import gdata.apps.groups.service
import getpass
import time
domain = ''
admin_email = ''
admin_password = ''
username = ''
class GroupsTest(unittest.TestCase):
"""Test for the GroupsService."""
def setUp(self):
self.postfix = time.strftime("%Y%m%d%H%M%S")
self.apps_client = gdata.apps.service.AppsService(
email=admin_email, domain=domain, password=admin_password,
source='GroupsClient "Unit" Tests')
self.apps_client.ProgrammaticLogin()
self.groups_client = gdata.apps.groups.service.GroupsService(
email=admin_email, domain=domain, password=admin_password,
source='GroupsClient "Unit" Tests')
self.groups_client.ProgrammaticLogin()
self.created_users = []
self.created_groups = []
self.createUsers();
def createUsers(self):
user_name = 'yujimatsuo-' + self.postfix
family_name = 'Matsuo'
given_name = 'Yuji'
password = '123$$abc'
suspended = 'false'
try:
self.user_yuji = self.apps_client.CreateUser(
user_name=user_name, family_name=family_name, given_name=given_name,
password=password, suspended=suspended)
print 'User ' + user_name + ' created'
except Exception, e:
self.fail('Unexpected exception occurred: %s' % e)
self.created_users.append(self.user_yuji)
user_name = 'taromatsuo-' + self.postfix
family_name = 'Matsuo'
given_name = 'Taro'
password = '123$$abc'
suspended = 'false'
try:
self.user_taro = self.apps_client.CreateUser(
user_name=user_name, family_name=family_name, given_name=given_name,
password=password, suspended=suspended)
print 'User ' + user_name + ' created'
except Exception, e:
self.fail('Unexpected exception occurred: %s' % e)
self.created_users.append(self.user_taro)
def tearDown(self):
print '\n'
for user in self.created_users:
try:
self.apps_client.DeleteUser(user.login.user_name)
print 'User ' + user.login.user_name + ' deleted'
except Exception, e:
print e
for group in self.created_groups:
try:
self.groups_client.DeleteGroup(group)
print 'Group ' + group + ' deleted'
except Exception, e:
print e
def test001GroupsMethods(self):
# tests CreateGroup method
group01_id = 'group01-' + self.postfix
group02_id = 'group02-' + self.postfix
try:
created_group01 = self.groups_client.CreateGroup(group01_id, 'US Sales 1',
'Testing', gdata.apps.groups.service.PERMISSION_OWNER)
created_group02 = self.groups_client.CreateGroup(group02_id, 'US Sales 2',
'Testing', gdata.apps.groups.service.PERMISSION_OWNER)
except Exception, e:
self.fail('Unexpected exception occurred: %s' % e)
self.assertEquals(created_group01['groupId'], group01_id)
self.assertEquals(created_group02['groupId'], group02_id)
self.created_groups.append(group01_id)
self.created_groups.append(group02_id)
# tests UpdateGroup method
try:
updated_group = self.groups_client.UpdateGroup(group01_id, 'Updated!',
'Testing', gdata.apps.groups.service.PERMISSION_OWNER)
except Exception, e:
self.fail('Unexpected exception occurred: %s' % e)
self.assertEquals(updated_group['groupName'], 'Updated!')
# tests RetrieveGroup method
try:
retrieved_group = self.groups_client.RetrieveGroup(group01_id)
except Exception, e:
self.fail('Unexpected exception occurred: %s' % e)
self.assertEquals(retrieved_group['groupId'], group01_id + '@' + domain)
# tests RetrieveAllGroups method
try:
retrieved_groups = self.groups_client.RetrieveAllGroups()
except Exception, e:
self.fail('Unexpected exception occurred: %s' % e)
self.assertEquals(len(retrieved_groups),
len(self.apps_client.RetrieveAllEmailLists().entry))
# tests AddMemberToGroup
try:
added_member = self.groups_client.AddMemberToGroup(
self.user_yuji.login.user_name, group01_id)
self.groups_client.AddMemberToGroup(
self.user_taro.login.user_name, group02_id)
self.groups_client.AddMemberToGroup(
group01_id, group02_id)
except Exception, e:
self.fail('Unexpected exception occurred: %s' % e)
self.assertEquals(added_member['memberId'],
self.user_yuji.login.user_name)
# tests RetrieveGroups method
try:
retrieved_direct_groups = self.groups_client.RetrieveGroups(
self.user_yuji.login.user_name, True)
retrieved_groups = self.groups_client.RetrieveGroups(
self.user_yuji.login.user_name, False)
except Exception, e:
self.fail('Unexpected exception occurred: %s' % e)
self.assertEquals(len(retrieved_direct_groups), 1)
# TODO: Enable this test after a directOnly bug is fixed
#self.assertEquals(len(retrieved_groups), 2)
# tests IsMember method
try:
result = self.groups_client.IsMember(
self.user_yuji.login.user_name, group01_id)
except Exception, e:
self.fail('Unexpected exception occurred: %s' % e)
self.assertEquals(result, True)
# tests RetrieveMember method
try:
retrieved_member = self.groups_client.RetrieveMember(
self.user_yuji.login.user_name, group01_id)
except Exception, e:
self.fail('Unexpected exception occurred: %s' % e)
self.assertEquals(retrieved_member['memberId'],
self.user_yuji.login.user_name + '@' + domain)
# tests RetrieveAllMembers method
try:
retrieved_members = self.groups_client.RetrieveAllMembers(group01_id)
except Exception, e:
self.fail('Unexpected exception occurred: %s' % e)
self.assertEquals(len(retrieved_members), 1)
# tests RemoveMemberFromGroup method
try:
self.groups_client.RemoveMemberFromGroup(self.user_yuji.login.user_name,
group01_id)
retrieved_members = self.groups_client.RetrieveAllMembers(group01_id)
except Exception, e:
self.fail('Unexpected exception occurred: %s' % e)
self.assertEquals(len(retrieved_members), 0)
# tests AddOwnerToGroup
try:
added_owner = self.groups_client.AddOwnerToGroup(
self.user_yuji.login.user_name, group01_id)
except Exception, e:
self.fail('Unexpected exception occurred: %s' % e)
self.assertEquals(added_owner['email'],
self.user_yuji.login.user_name)
# tests IsOwner method
try:
result = self.groups_client.IsOwner(
self.user_yuji.login.user_name, group01_id)
except Exception, e:
self.fail('Unexpected exception occurred: %s' % e)
self.assertEquals(result, True)
# tests RetrieveOwner method
try:
retrieved_owner = self.groups_client.RetrieveOwner(
self.user_yuji.login.user_name, group01_id)
except Exception, e:
self.fail('Unexpected exception occurred: %s' % e)
self.assertEquals(retrieved_owner['email'],
self.user_yuji.login.user_name + '@' + domain)
# tests RetrieveAllOwners method
try:
retrieved_owners = self.groups_client.RetrieveAllOwners(group01_id)
except Exception, e:
self.fail('Unexpected exception occurred: %s' % e)
self.assertEquals(len(retrieved_owners), 1)
# tests RemoveOwnerFromGroup method
try:
self.groups_client.RemoveOwnerFromGroup(self.user_yuji.login.user_name,
group01_id)
retrieved_owners = self.groups_client.RetrieveAllOwners(group01_id)
except Exception, e:
self.fail('Unexpected exception occurred: %s' % e)
self.assertEquals(len(retrieved_owners), 0)
if __name__ == '__main__':
print("""Google Apps Groups Service Tests
NOTE: Please run these tests only with a test user account.
""")
domain = raw_input('Google Apps domain: ')
admin_email = '%s@%s' % (raw_input('Administrator username: '), domain)
admin_password = getpass.getpass('Administrator password: ')
unittest.main()
|
CALlanoR/VirtShell
|
refs/heads/master
|
virtshell_server/virtshell_server/errors.py
|
2
|
"""
VirtShell-Server Errors
"""
class VirtShellServerError(Exception):
def __init__(self, message, code):
self.message = message
self.code = code
super(Exception, self).__init__(message)
# 400
class MissingField(VirtShellServerError):
def __init__(self, field):
IndicoError.__init__(self,
"%s field was not provided" % field,
400
)
class WrongFieldType(VirtShellServerError):
def __init__(self, field, _given, _required):
IndicoError.__init__(self,
"Field %s - %s is type %s but should be %s" %
(field, _given, type(_given), _required),
400
)
class InvalidJSON(VirtShellServerError):
def __init__(self):
IndicoError.__init__(self,
"No JSON object could be decoded.",
400
)
# 401
class AuthError(VirtShellServerError):
def __init__(self):
IndicoError.__init__(self,
"User not authenticated",
401
)
# 404
class RouteNotFound(VirtShellServerError):
def __init__(self, action):
IndicoError.__init__(self,
"%s route could not be found" % action,
404
)
# 500
class MongoError(VirtShellServerError):
def __init__(self, message):
IndicoError.__init__(self,
message,
500
)
class ServerError(VirtShellServerError):
def __init__(self):
IndicoError.__init__(self,
"we screwed up and have some debugging to do",
500
)
|
virneo/nupic
|
refs/heads/master
|
tests/unit/nupic/encoders/category_test.py
|
7
|
#!/usr/bin/env python
# ----------------------------------------------------------------------
# Numenta Platform for Intelligent Computing (NuPIC)
# Copyright (C) 2013, Numenta, Inc. Unless you have an agreement
# with Numenta, Inc., for a separate license for this software code, the
# following terms and conditions apply:
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU Affero Public License for more details.
#
# You should have received a copy of the GNU Affero Public License
# along with this program. If not, see http://www.gnu.org/licenses.
#
# http://numenta.org/licenses/
# ----------------------------------------------------------------------
"""Unit tests for category encoder"""
import tempfile
import unittest
import capnp
import numpy
from nupic.data import SENTINEL_VALUE_FOR_MISSING_DATA
from nupic.encoders.base import defaultDtype
from nupic.encoders.category import CategoryEncoder, UNKNOWN
from nupic.encoders.category_capnp import CategoryEncoderProto
class CategoryEncoderTest(unittest.TestCase):
'''Unit tests for CategoryEncoder class'''
def testCategoryEncoder(self):
categories = ["ES", "GB", "US"]
# forced: is not recommended, but is used here for readability.
# see scalar.py
e = CategoryEncoder(w=3, categoryList=categories, forced=True)
output = e.encode("US")
expected = numpy.array([0,0,0,0,0,0,0,0,0,1,1,1], dtype=defaultDtype)
self.assertTrue(numpy.array_equal(output, expected))
# Test reverse lookup
decoded = e.decode(output)
(fieldsDict, fieldNames) = decoded
self.assertEqual(len(fieldNames), 1)
self.assertEqual(len(fieldsDict), 1)
self.assertEqual(fieldNames[0], fieldsDict.keys()[0])
(ranges, desc) = fieldsDict.values()[0]
self.assertEqual(desc, "US")
self.assertEqual(len(ranges), 1)
self.assertTrue(numpy.array_equal(ranges[0], [3, 3]))
# Test topdown compute
for v in categories:
output = e.encode(v)
topDown = e.topDownCompute(output)
self.assertEqual(topDown.value, v)
self.assertEqual(topDown.scalar, e.getScalars(v)[0])
bucketIndices = e.getBucketIndices(v)
topDown = e.getBucketInfo(bucketIndices)[0]
self.assertEqual(topDown.value, v)
self.assertEqual(topDown.scalar, e.getScalars(v)[0])
self.assertTrue(numpy.array_equal(topDown.encoding, output))
self.assertEqual(topDown.value, e.getBucketValues()[bucketIndices[0]])
# ---------------------
# unknown category
output = e.encode("NA")
expected = numpy.array([1,1,1,0,0,0,0,0,0,0,0,0], dtype=defaultDtype)
self.assertTrue(numpy.array_equal(output, expected))
# Test reverse lookup
decoded = e.decode(output)
(fieldsDict, fieldNames) = decoded
self.assertEqual(len(fieldNames), 1)
self.assertEqual(len(fieldsDict), 1)
self.assertEqual(fieldNames[0], fieldsDict.keys()[0])
(ranges, desc) = fieldsDict.values()[0]
self.assertEqual(len(ranges), 1)
self.assertTrue(numpy.array_equal(ranges[0], [0, 0]))
# Test topdown compute
topDown = e.topDownCompute(output)
self.assertEqual(topDown.value, UNKNOWN)
self.assertEqual(topDown.scalar, 0)
# --------------------------------
# ES
output = e.encode("ES")
expected = numpy.array([0,0,0,1,1,1,0,0,0,0,0,0], dtype=defaultDtype)
self.assertTrue(numpy.array_equal(output, expected))
# MISSING VALUE
outputForMissing = e.encode(SENTINEL_VALUE_FOR_MISSING_DATA)
self.assertEqual(sum(outputForMissing), 0)
# Test reverse lookup
decoded = e.decode(output)
(fieldsDict, fieldNames) = decoded
self.assertEqual(len(fieldNames), 1)
self.assertEqual(len(fieldsDict), 1)
self.assertEqual(fieldNames[0], fieldsDict.keys()[0])
(ranges, desc) = fieldsDict.values()[0]
self.assertEqual(len(ranges), 1)
self.assertTrue(numpy.array_equal(ranges[0], [1, 1]))
# Test topdown compute
topDown = e.topDownCompute(output)
self.assertEqual(topDown.value, "ES")
self.assertEqual(topDown.scalar, e.getScalars("ES")[0])
# --------------------------------
# Multiple categories
output.fill(1)
# Test reverse lookup
decoded = e.decode(output)
(fieldsDict, fieldNames) = decoded
self.assertEqual(len(fieldNames), 1)
self.assertEqual(len(fieldsDict), 1)
self.assertEqual(fieldNames[0], fieldsDict.keys()[0])
(ranges, desc) = fieldsDict.values()[0]
self.assertEqual(len(ranges), 1)
self.assertTrue(numpy.array_equal(ranges[0], [0, 3]))
# -------------------------------------------------------------
# Test with width = 1
categories = ["cat1", "cat2", "cat3", "cat4", "cat5"]
# forced: is not recommended, but is used here for readability.
# see scalar.py
e = CategoryEncoder(w=1, categoryList=categories, forced=True)
for cat in categories:
output = e.encode(cat)
topDown = e.topDownCompute(output)
self.assertEqual(topDown.value, cat)
self.assertEqual(topDown.scalar, e.getScalars(cat)[0])
# -------------------------------------------------------------
# Test with width = 9, removing some bits end the encoded output
categories = ["cat%d" % (x) for x in range(1, 10)]
# forced: is not recommended, but is used here for readability.
# see scalar.py
e = CategoryEncoder(w=9, categoryList=categories, forced=True)
for cat in categories:
output = e.encode(cat)
topDown = e.topDownCompute(output)
self.assertEqual(topDown.value, cat)
self.assertEqual(topDown.scalar, e.getScalars(cat)[0])
# Get rid of 1 bit on the left
outputNZs = output.nonzero()[0]
output[outputNZs[0]] = 0
topDown = e.topDownCompute(output)
self.assertEqual(topDown.value, cat)
self.assertEqual(topDown.scalar, e.getScalars(cat)[0])
# Get rid of 1 bit on the right
output[outputNZs[0]] = 1
output[outputNZs[-1]] = 0
topDown = e.topDownCompute(output)
self.assertEqual(topDown.value, cat)
self.assertEqual(topDown.scalar, e.getScalars(cat)[0])
# Get rid of 4 bits on the left
output.fill(0)
output[outputNZs[-5:]] = 1
topDown = e.topDownCompute(output)
self.assertEqual(topDown.value, cat)
self.assertEqual(topDown.scalar, e.getScalars(cat)[0])
# Get rid of 4 bits on the right
output.fill(0)
output[outputNZs[0:5]] = 1
topDown = e.topDownCompute(output)
self.assertEqual(topDown.value, cat)
self.assertEqual(topDown.scalar, e.getScalars(cat)[0])
# OR together the output of 2 different categories, we should not get
# back the mean, but rather one or the other
output1 = e.encode("cat1")
output2 = e.encode("cat9")
output = output1 + output2
topDown = e.topDownCompute(output)
self.assertTrue(topDown.scalar == e.getScalars("cat1")[0] \
or topDown.scalar == e.getScalars("cat9")[0])
def testReadWrite(self):
categories = ["ES", "GB", "US"]
# forced: is not recommended, but is used here for readability. see
# scalar.py
original = CategoryEncoder(w=3, categoryList=categories, forced=True)
output = original.encode("US")
target = numpy.array([0,0,0,0,0,0,0,0,0,1,1,1], dtype=defaultDtype)
self.assertTrue(numpy.array_equal(output, target))
decoded = original.decode(output)
proto1 = CategoryEncoderProto.new_message()
original.write(proto1)
# Write the proto to a temp file and read it back into a new proto
with tempfile.TemporaryFile() as f:
proto1.write(f)
f.seek(0)
proto2 = CategoryEncoderProto.read(f)
encoder = CategoryEncoder.read(proto2)
self.assertIsInstance(encoder, CategoryEncoder)
self.assertEqual(encoder.verbosity, original.verbosity)
self.assertEqual(encoder.width, original.width)
self.assertEqual(encoder.description, original.description)
self.assertEqual(encoder.name, original.name)
self.assertDictEqual(encoder.categoryToIndex, original.categoryToIndex)
self.assertDictEqual(encoder.indexToCategory, original.indexToCategory)
self.assertTrue(numpy.array_equal(encoder.encode("US"), output))
self.assertEqual(original.decode(encoder.encode("US")),
encoder.decode(original.encode("US")))
self.assertEqual(decoded, encoder.decode(output))
if __name__ == '__main__':
unittest.main()
|
tscohen/chainer
|
refs/heads/master
|
tests/chainer_tests/functions_tests/loss_tests/test_hierarchical_softmax.py
|
9
|
import copy
import unittest
import numpy
import chainer
from chainer import cuda
from chainer import functions
from chainer import gradient_check
from chainer import testing
from chainer.testing import attr
from chainer.testing import condition
class TestHuffmanTree(unittest.TestCase):
def test_empty(self):
with self.assertRaises(ValueError):
functions.BinaryHierarchicalSoftmax.create_huffman_tree({})
def test_simple(self):
tree = functions.BinaryHierarchicalSoftmax.create_huffman_tree(
{'x': 8, 'y': 6, 'z': 5, 'w': 4, 'v': 3})
expect = (('z', 'y'), (('v', 'w'), 'x'))
self.assertEqual(expect, tree)
def test_same_count(self):
tree = functions.BinaryHierarchicalSoftmax.create_huffman_tree(
{'x': 1, 'y': 2, 'z': 3})
# Order of the same items are not defined.
self.assertTrue((('x', 'y'), 'z') == tree or
('z', ('x', 'y')) == tree)
class TestBinaryHierarchicalSoftmax(unittest.TestCase):
def setUp(self):
tree = ((0, 1), ((2, 3), 4))
self.func = functions.BinaryHierarchicalSoftmax(3, tree)
self.func.gW.fill(0)
self.x = numpy.random.uniform(-1, 1, (2, 3)).astype(numpy.float32)
self.t = numpy.array([0, 2]).astype(numpy.int32)
self.gy = numpy.random.uniform(-1, 1, ()).astype(numpy.float32)
self.W = self.func.W.copy()
def check_sum(self, x, gpu=False):
total = 0
for i in range(5):
t = numpy.array([i], dtype=numpy.int32)
if gpu:
t = cuda.to_gpu(t)
loss, = self.func.forward((x, t))
self.assertEqual(loss.dtype, numpy.float32)
self.assertEqual(loss.shape, ())
total += numpy.exp(-cuda.to_cpu(loss))
self.assertAlmostEqual(1.0, float(total), delta=1.0e-5)
@condition.retry(3)
def test_sum_cpu(self):
x = numpy.array([[1.0, 2.0, 3.0]], numpy.float32)
self.check_sum(x)
@attr.gpu
@condition.retry(3)
def test_sum_gpu(self):
x = numpy.array([[1.0, 2.0, 3.0]], numpy.float32)
self.func.to_gpu()
self.check_sum(cuda.to_gpu(x), gpu=True)
@attr.gpu
def test_forward(self):
# TODO(unno): We need to test return values of forward function.
cpu_loss, = self.func.forward((self.x, self.t))
self.func.to_gpu()
gpu_loss, = self.func.forward((cuda.to_gpu(self.x),
cuda.to_gpu(self.t)))
gradient_check.assert_allclose(
cpu_loss, cuda.to_cpu(gpu_loss))
def check_backward(self, x_data, t_data, y_grad):
x = chainer.Variable(x_data)
t = chainer.Variable(t_data)
y = self.func(x, t)
y.grad = y_grad
y.backward()
func = y.creator
f = lambda: func.forward((x.data, t.data))
gx, _, gW = gradient_check.numerical_grad(
f, (x.data, t.data, func.W), (y.grad,), eps=1e-2)
gradient_check.assert_allclose(cuda.to_cpu(gx), cuda.to_cpu(x.grad))
gradient_check.assert_allclose(cuda.to_cpu(gW), cuda.to_cpu(func.gW))
@condition.retry(3)
def test_backward_cpu(self):
self.check_backward(self.x, self.t, self.gy)
@attr.gpu
@condition.retry(3)
def test_backward_gpu(self):
self.func.to_gpu()
self.check_backward(cuda.to_gpu(self.x),
cuda.to_gpu(self.t),
cuda.to_gpu(self.gy))
@attr.gpu
def test_to_cpu(self):
f = copy.deepcopy(self.func)
self.func.to_gpu()
self.func.to_cpu()
self.assertTrue((f.begins == self.func.begins).all())
self.assertTrue((f.paths == self.func.paths).all())
self.assertTrue((f.codes == self.func.codes).all())
testing.run_module(__name__, __file__)
|
Endika/edx-platform
|
refs/heads/master
|
cms/djangoapps/contentstore/features/transcripts.py
|
56
|
# disable missing docstring
# pylint: disable=missing-docstring
import os
from lettuce import world, step
from django.conf import settings
from xmodule.contentstore.content import StaticContent
from xmodule.contentstore.django import contentstore
from xmodule.exceptions import NotFoundError
from splinter.request_handler.request_handler import RequestHandler
TEST_ROOT = settings.COMMON_TEST_DATA_ROOT
# We should wait 300 ms for event handler invocation + 200ms for safety.
DELAY = 0.5
ERROR_MESSAGES = {
'url_format': u'Incorrect url format.',
'file_type': u'Link types should be unique.',
'links_duplication': u'Links should be unique.',
}
STATUSES = {
'found': u'Timed Transcript Found',
'not found on edx': u'No EdX Timed Transcript',
'not found': u'No Timed Transcript',
'replace': u'Timed Transcript Conflict',
'uploaded_successfully': u'Timed Transcript Uploaded Successfully',
'use existing': u'Confirm Timed Transcript',
}
SELECTORS = {
'error_bar': '.transcripts-error-message',
'url_inputs': '.videolist-settings-item input.input',
'collapse_link': '.collapse-action.collapse-setting',
'collapse_bar': '.videolist-extra-videos',
'status_bar': '.transcripts-message-status',
}
# button type , button css selector, button message
TRANSCRIPTS_BUTTONS = {
'import': ('.setting-import', 'Import YouTube Transcript'),
'download_to_edit': ('.setting-download', 'Download Transcript for Editing'),
'disabled_download_to_edit': ('.setting-download.is-disabled', 'Download Transcript for Editing'),
'upload_new_timed_transcripts': ('.setting-upload', 'Upload New Transcript'),
'replace': ('.setting-replace', 'Yes, replace the edX transcript with the YouTube transcript'),
'choose': ('.setting-choose', 'Timed Transcript from {}'),
'use_existing': ('.setting-use-existing', 'Use Current Transcript'),
}
@step('I clear fields$')
def clear_fields(_step):
# Clear the input fields and trigger an 'input' event
script = """
$('{selector}')
.prop('disabled', false)
.removeClass('is-disabled')
.attr('aria-disabled', false)
.val('')
.trigger('input');
""".format(selector=SELECTORS['url_inputs'])
world.browser.execute_script(script)
world.wait(DELAY)
world.wait_for_ajax_complete()
@step('I clear field number (.+)$')
def clear_field(_step, index):
index = int(index) - 1
world.css_fill(SELECTORS['url_inputs'], '', index)
# For some reason ChromeDriver doesn't trigger an 'input' event after filling
# the field with an empty value. That's why we trigger it manually via jQuery.
world.trigger_event(SELECTORS['url_inputs'], event='input', index=index)
world.wait(DELAY)
world.wait_for_ajax_complete()
@step('I expect (.+) inputs are disabled$')
def inputs_are_disabled(_step, indexes):
index_list = [int(i.strip()) - 1 for i in indexes.split(',')]
for index in index_list:
el = world.css_find(SELECTORS['url_inputs'])[index]
assert el['disabled']
@step('I expect inputs are enabled$')
def inputs_are_enabled(_step):
for index in range(3):
el = world.css_find(SELECTORS['url_inputs'])[index]
assert not el['disabled']
@step('I do not see error message$')
def i_do_not_see_error_message(_step):
assert not world.css_visible(SELECTORS['error_bar'])
@step('I see error message "([^"]*)"$')
def i_see_error_message(_step, error):
assert world.css_has_text(SELECTORS['error_bar'], ERROR_MESSAGES[error])
@step('I do not see status message$')
def i_do_not_see_status_message(_step):
assert not world.css_visible(SELECTORS['status_bar'])
@step('I see status message "([^"]*)"$')
def i_see_status_message(_step, status):
assert not world.css_visible(SELECTORS['error_bar'])
assert world.css_has_text(SELECTORS['status_bar'], STATUSES[status])
DOWNLOAD_BUTTON = TRANSCRIPTS_BUTTONS["download_to_edit"][0]
if world.is_css_present(DOWNLOAD_BUTTON, wait_time=1) and not world.css_find(DOWNLOAD_BUTTON)[0].has_class('is-disabled'):
assert _transcripts_are_downloaded()
@step('I (.*)see button "([^"]*)"$')
def i_see_button(_step, not_see, button_type):
button = button_type.strip()
if not_see.strip():
assert world.is_css_not_present(TRANSCRIPTS_BUTTONS[button][0])
else:
assert world.css_has_text(TRANSCRIPTS_BUTTONS[button][0], TRANSCRIPTS_BUTTONS[button][1])
@step('I (.*)see (.*)button "([^"]*)" number (\d+)$')
def i_see_button_with_custom_text(_step, not_see, button_type, custom_text, index):
button = button_type.strip()
custom_text = custom_text.strip()
index = int(index.strip()) - 1
if not_see.strip():
assert world.is_css_not_present(TRANSCRIPTS_BUTTONS[button][0])
else:
assert world.css_has_text(TRANSCRIPTS_BUTTONS[button][0], TRANSCRIPTS_BUTTONS[button][1].format(custom_text), index)
@step('I click transcript button "([^"]*)"$')
def click_button_transcripts_variant(_step, button_type):
button = button_type.strip()
world.css_click(TRANSCRIPTS_BUTTONS[button][0])
world.wait_for_ajax_complete()
@step('I click transcript button "([^"]*)" number (\d+)$')
def click_button_index(_step, button_type, index):
button = button_type.strip()
index = int(index.strip()) - 1
world.css_click(TRANSCRIPTS_BUTTONS[button][0], index)
world.wait_for_ajax_complete()
@step('I remove "([^"]+)" transcripts id from store')
def remove_transcripts_from_store(_step, subs_id):
"""Remove from store, if transcripts content exists."""
filename = 'subs_{0}.srt.sjson'.format(subs_id.strip())
content_location = StaticContent.compute_location(
world.scenario_dict['COURSE'].id,
filename
)
try:
content = contentstore().find(content_location)
contentstore().delete(content.location)
print 'Transcript file was removed from store.'
except NotFoundError:
print 'Transcript file was NOT found and not removed.'
@step('I enter a "([^"]+)" source to field number (\d+)$')
def i_enter_a_source(_step, link, index):
index = int(index) - 1
if index is not 0 and not world.css_visible(SELECTORS['collapse_bar']):
world.css_click(SELECTORS['collapse_link'])
assert world.css_visible(SELECTORS['collapse_bar'])
world.css_fill(SELECTORS['url_inputs'], link, index)
world.wait(DELAY)
world.wait_for_ajax_complete()
@step('I upload the transcripts file "([^"]*)"$')
def upload_file(_step, file_name):
path = os.path.join(TEST_ROOT, 'uploads/', file_name.strip())
world.browser.execute_script("$('form.file-chooser').show()")
world.browser.attach_file('transcript-file', os.path.abspath(path))
world.wait_for_ajax_complete()
@step('I see "([^"]*)" text in the captions')
def check_text_in_the_captions(_step, text):
world.wait_for_present('.video.is-captions-rendered')
world.wait_for(lambda _: world.css_text('.subtitles'), timeout=30)
actual_text = world.css_text('.subtitles')
assert text in actual_text
@step('I see value "([^"]*)" in the field "([^"]*)"$')
def check_transcripts_field(_step, values, field_name):
world.select_editor_tab('Advanced')
tab = world.css_find('#settings-tab').first
field_id = '#' + tab.find_by_xpath('.//label[text()="%s"]' % field_name.strip())[0]['for']
values_list = [i.strip() == world.css_value(field_id) for i in values.split('|')]
assert any(values_list)
world.select_editor_tab('Basic')
@step('I save changes$')
def save_changes(_step):
world.save_component()
@step('I open tab "([^"]*)"$')
def open_tab(_step, tab_name):
world.select_editor_tab(tab_name)
@step('I set value "([^"]*)" to the field "([^"]*)"$')
def set_value_transcripts_field(_step, value, field_name):
tab = world.css_find('#settings-tab').first
XPATH = './/label[text()="{name}"]'.format(name=field_name)
SELECTOR = '#' + tab.find_by_xpath(XPATH)[0]['for']
element = world.css_find(SELECTOR).first
if element['type'] == 'text':
SCRIPT = '$("{selector}").val("{value}").change()'.format(
selector=SELECTOR,
value=value
)
world.browser.execute_script(SCRIPT)
assert world.css_has_value(SELECTOR, value)
else:
assert False, 'Incorrect element type.'
world.wait_for_ajax_complete()
@step('I revert the transcript field "([^"]*)"$')
def revert_transcripts_field(_step, field_name):
world.revert_setting_entry(field_name)
def _transcripts_are_downloaded():
world.wait_for_ajax_complete()
request = RequestHandler()
DOWNLOAD_BUTTON = world.css_find(TRANSCRIPTS_BUTTONS["download_to_edit"][0]).first
url = DOWNLOAD_BUTTON['href']
request.connect(url)
return request.status_code.is_success()
|
pheanex/xpython
|
refs/heads/master
|
exercises/tree-building/example.py
|
3
|
class Record():
def __init__(self, record_id, parent_id):
self.record_id = record_id
self.parent_id = parent_id
def equal_id(self):
return self.record_id == self.parent_id
class Node():
def __init__(self, node_id):
self.node_id = node_id
self.children = []
def validateRecord(record):
if record.equal_id() and record.record_id != 0:
raise ValueError("Only root should have equal record and parent id")
elif not record.equal_id() and record.parent_id >= record.record_id:
raise ValueError("Node record_id should be smaller than its parent_id")
def BuildTree(records):
parent_dict = {}
node_dict = {}
ordered_id = sorted((i.record_id for i in records))
for record in records:
validateRecord(record)
parent_dict[record.record_id] = record.parent_id
node_dict[record.record_id] = Node(record.record_id)
root_id = 0
root = None
for index, record_id in enumerate(ordered_id):
if index != record_id:
raise ValueError("Record id is invalid or out of order")
if record_id == root_id:
root = node_dict[record_id]
else:
parent_id = parent_dict[record_id]
node_dict[parent_id].children.append(node_dict[record_id])
return root
|
martyngigg/pyqt-msvc
|
refs/heads/master
|
examples/painting/basicdrawing/basicdrawing_rc2.py
|
5
|
# -*- coding: utf-8 -*-
# Resource object code
#
# Created: Wed Mar 20 13:48:13 2013
# by: The Resource Compiler for PyQt (Qt v4.8.4)
#
# WARNING! All changes made in this file will be lost!
from PyQt4 import QtCore
qt_resource_data = "\
\x00\x00\x02\xff\
\x89\
\x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\
\x00\x00\x20\x00\x00\x00\x10\x08\x02\x00\x00\x00\xf8\x62\xea\x0e\
\x00\x00\x02\xc6\x49\x44\x41\x54\x78\x5e\x2d\x94\x5b\x6e\x14\x31\
\x10\x45\xaf\x1f\xdd\x93\x87\x26\x3b\x61\x13\x6c\x82\x2c\x05\x09\
\x85\xf5\xc0\x17\x1f\x91\xf8\x60\x09\x2c\x01\x96\x00\x4a\x08\x93\
\x7e\xda\x2e\xd3\x07\x5b\xb2\x3c\xb7\xed\xaa\xba\xe5\x5b\x55\xe3\
\x7e\xff\xf8\xe4\x83\x8e\x25\x2f\xef\x24\xa7\x5a\xa5\xaa\x6a\x32\
\x93\x8a\x8a\xa9\x16\xf1\x79\xec\x19\x33\xe7\xe4\x3c\x27\xcd\x06\
\x50\xba\x57\x18\x15\x4f\x0a\x83\x42\x94\x8b\xba\x7f\x77\x1f\xe5\
\xb4\x5d\x7e\x1e\xa7\xde\xcb\x60\x92\x0b\x38\x94\x24\xcb\xca\x3b\
\x7b\x2d\x44\xb7\xa4\x92\x89\xee\xa3\x24\x61\x73\x2c\x0c\xb8\x75\
\x9c\x11\x74\xb8\xd6\xd5\x59\xe1\xa4\x8f\x0f\x0f\xd5\x14\x49\xdf\
\x6b\x7b\xf9\x1e\x07\xf9\xa1\xa7\xa0\x22\xdb\xb4\x4d\x4a\xb3\xd2\
\xda\x92\x85\xac\xac\x30\xf9\x08\x4d\x49\x4a\x0b\x87\x32\x59\x85\
\xd2\x7b\x72\x1f\x6f\x95\xee\xf4\xf5\xdb\xa3\x19\x0f\x8d\x3e\xca\
\x07\xd0\x7e\x79\x3c\xf0\xe9\x2c\x13\x49\xa5\x49\xeb\x93\xd6\xbf\
\x5a\x9e\xb5\xbd\x76\x11\xf6\x49\x79\xc3\xd8\xb2\x20\x98\x94\x93\
\xbc\x07\xd7\x8a\x2c\xa7\x3b\xd2\x1f\x6e\x39\x21\x90\x14\x11\x34\
\x70\x67\x41\xf1\x4a\x68\x35\x08\xb9\xb9\xee\xc5\xc8\x8b\x0a\x42\
\x69\x3f\xc0\x26\x09\x17\xcb\xec\x82\x09\x25\x11\x30\xe2\xbe\x47\
\xc8\x10\x16\x5f\x08\xc4\x93\x79\x84\xc2\x08\x46\x90\x4a\xc4\x7d\
\xd6\xf2\x87\xac\xab\xc0\x79\x26\x5f\x3c\x0d\x29\x9c\x47\x3d\x21\
\x9d\xd2\x84\x4b\x74\x94\x24\x9c\x90\x8e\xc7\xad\x70\x53\x03\x08\
\xbc\x58\x4e\x56\x10\x61\xbb\x68\x7d\x39\xa2\x03\xa8\x81\x61\x3d\
\x3f\xc1\x44\x6d\x33\x42\x43\x30\x37\x2f\x70\x2d\x00\x33\x82\xaa\
\xe2\x95\x56\x42\x21\x11\x46\xad\x6e\x99\x23\x42\x1f\xeb\xb9\xfb\
\x13\x55\xec\x14\x79\x07\xca\xa1\x80\x84\xb1\x0f\xc8\x32\xdc\x48\
\xb5\xe9\x86\x86\xbe\x35\x71\x81\x2f\x9a\x61\x1a\xbc\x6c\x04\x94\
\x0d\xe6\xfd\x42\xee\xfb\x2b\x5a\xf1\xa6\x05\x07\x1f\xa1\x2c\x49\
\x00\x4f\xd9\x54\x7b\xdc\x38\x8a\x11\xf2\x54\x51\xea\x1d\xc1\x51\
\x55\xac\xd6\x8a\x46\x2e\x56\xf0\xa7\xb5\x0d\x4c\x9b\x2e\xad\x23\
\x49\xb6\x4b\x81\x08\xec\x36\x62\x23\x75\xee\x10\x28\xcc\x70\xcb\
\x67\x97\xce\xc3\x11\xf9\x31\x59\xa6\x43\xbc\x27\x90\x25\xc8\x5a\
\xe5\x6b\xe5\x24\x2f\x90\x95\x95\xd4\xe4\xe0\xa8\x4e\xb6\x73\x9b\
\x92\xe4\x35\xde\xc8\x0d\xaa\x6a\xd5\x6e\x94\x3d\xa1\x48\xc3\x24\
\x5a\x65\xfa\x45\x0a\x96\x7b\x3d\x10\x77\x54\x0a\xca\x85\x39\xd8\
\xa7\x3e\x50\x88\x96\xb9\x95\xa3\x2a\xb5\x80\xd3\xcc\x5e\x79\x19\
\x41\xb9\x45\x15\x02\x46\xcb\xe8\x30\x3f\x43\x30\x9e\xb9\x2b\x3b\
\x59\xcb\x14\x22\x4b\x7d\x1a\xda\x28\xc1\x9d\x50\xb5\xff\x3d\xc4\
\x6b\x85\x13\x98\x96\x15\xb8\x71\x93\x62\xc0\x3e\xa2\x7b\xd6\x97\
\xcf\x4c\xf6\xe9\xdc\xa7\x54\x28\x40\xd6\xfb\x2c\xd2\xbf\x40\x69\
\xd6\x95\xad\x6d\x0c\x1d\xd8\x2f\x0a\x9b\xc2\xd4\xab\xc8\x1a\x60\
\x8a\x9d\x09\x02\x7d\x78\xff\xd0\x06\x2f\x8c\x30\x4b\x62\xfa\x33\
\xba\x1b\x93\xd5\x0b\x53\xba\x32\x92\x41\x46\x94\x91\x70\x2e\x61\
\x2f\x07\x06\x08\x62\x30\x25\x94\x7b\xfb\x46\xf2\x78\x49\x5d\x38\
\x80\x5a\x3b\x61\x41\x38\x74\x07\x34\x3b\xd2\xaf\xed\x05\x70\x74\
\x17\x34\x81\xc0\x35\x8e\xd0\x5b\xee\x1f\xb6\x84\x15\x24\xe2\x59\
\x92\x70\x00\x00\x00\x00\x49\x45\x4e\x44\xae\x42\x60\x82\
\x00\x00\x0e\x70\
\x89\
\x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\
\x00\x00\x50\x00\x00\x00\x50\x08\x06\x00\x00\x00\x8e\x11\xf2\xad\
\x00\x00\x00\x04\x73\x42\x49\x54\x08\x08\x08\x08\x7c\x08\x64\x88\
\x00\x00\x00\x09\x70\x48\x59\x73\x00\x00\x01\x3d\x00\x00\x01\x3d\
\x01\x9f\xdc\x2b\xea\x00\x00\x00\x19\x74\x45\x58\x74\x53\x6f\x66\
\x74\x77\x61\x72\x65\x00\x77\x77\x77\x2e\x69\x6e\x6b\x73\x63\x61\
\x70\x65\x2e\x6f\x72\x67\x9b\xee\x3c\x1a\x00\x00\x0d\xed\x49\x44\
\x41\x54\x78\x9c\xed\x5c\x79\x70\x1c\xd5\x9d\xfe\x7a\x46\x73\x69\
\x34\xba\x0f\xcb\x96\x35\xbe\x8d\x90\x10\xbe\x30\x8e\x85\xed\x10\
\x67\x51\x44\x4c\x16\x58\x76\x97\xb0\xb0\x04\x36\x45\x2a\x86\x62\
\xf1\x06\xb6\x48\xc2\x62\x90\x8b\x50\xd9\xb0\x5b\x9b\x84\x85\x62\
\xd9\x0a\x09\x6c\x41\xc0\x2c\xb5\x0e\x87\x11\xc2\x16\xb2\x25\x1b\
\xb0\x6c\xd9\x96\xe4\xdb\xb2\x75\x59\xb2\x67\xac\xd1\x68\xa6\xef\
\xee\xf7\xf6\x8f\xd6\x31\xb6\xa7\xbb\x67\xa4\x19\x8d\x30\xf9\xaa\
\x5c\xe5\x79\xfd\xbd\xee\x5f\x7f\xfa\xbd\x7e\xc7\xef\xf7\x1e\x43\
\x29\x45\xbc\xf0\xd4\x78\xcb\x01\x54\x03\xa8\x04\x90\xe3\x70\x5a\
\x0a\x2a\x96\x67\x2c\xc8\xcc\xb6\x7a\xac\x69\x8c\xcd\x66\x63\x60\
\xb3\x5b\xa8\xa2\x50\x91\xe7\xd4\x7e\x91\xa7\xc7\x65\x89\xb4\xc8\
\x32\xdd\xa9\x2a\xb4\x69\xcf\x96\x0e\x12\xf7\x43\xa7\x29\x98\x58\
\x05\xf4\xd4\x78\xad\x00\x9e\x06\xf0\x20\x80\x12\x00\xb0\x58\x19\
\x2c\x2a\x4f\x47\xc5\x72\x37\xec\x0e\x4b\x4c\xf7\x51\x15\x8a\x70\
\x48\xe5\x78\x96\x9c\x17\x05\x72\x52\x92\xc8\x7e\x45\xa2\x0d\x8a\
\x42\x1b\xf7\x6c\xe9\x90\x26\xf8\x1e\x29\x43\x4c\x02\x7a\x6a\xbc\
\x33\x00\xfc\x11\xc0\xba\xd1\x32\xbb\xc3\x82\xea\x3b\xf3\xe0\xc9\
\xb2\x26\xc4\x10\xa2\x52\x84\x43\xaa\xc0\xb3\xe4\x82\x28\x90\x53\
\x92\x48\x0e\xc8\x32\xfd\x4c\x91\xe9\x8e\x3d\x5b\x3a\x84\x84\x3c\
\x24\x09\x30\x15\xd0\x53\xe3\xf5\x02\xf8\x1c\xc0\x8c\xc8\xf2\xb5\
\xd5\xd9\x28\x99\xeb\x4c\xa2\x69\x1a\x08\x01\xd8\x90\x2a\xf2\xac\
\xea\x13\x04\xd2\x29\x89\xa4\x55\x96\xe8\x2e\x55\xa1\x9f\x34\xd7\
\x76\x84\x93\x6e\x80\x09\x62\x11\xf0\x1d\x00\x7f\x1d\x59\xb6\xb0\
\x3c\x1d\x37\xac\xc9\x4c\xa6\x5d\xa6\xa0\x14\x60\xc3\xaa\xc4\x85\
\x55\xbf\xc8\x93\x33\x92\x48\x0f\xca\x12\xd9\xad\x28\xb4\xbe\xb9\
\xb6\x63\x70\xaa\xec\x30\x14\xd0\x53\xe3\xfd\x26\x80\x86\xc8\x32\
\xb7\xc7\x8a\x0d\x77\xe7\xc3\x6a\x65\x92\x6c\xda\xc4\x40\x29\xc0\
\x85\x55\x99\x63\xd5\x41\x41\x13\xf6\xb0\x2c\x91\x26\x45\xa6\x75\
\xcd\xb5\x1d\x17\x12\xfd\x3c\x33\x01\x9b\x01\xac\x8e\x2c\x9b\xb7\
\xd8\x85\x55\x37\x67\x25\xda\x8e\x29\x01\xc7\xaa\x0a\x17\x26\x01\
\x81\x27\x5d\x92\x40\xda\x64\x89\xec\x91\x15\x5a\xd7\xfc\x6c\x47\
\xcf\x44\xef\xa9\x2b\xa0\xa7\xc6\x9b\x06\x80\x05\x60\x8f\x2c\x5f\
\x71\x53\x26\x16\x55\xa4\x4f\xf4\x79\xd3\x12\x3c\x47\x54\x2e\xac\
\x0e\x09\x1c\xe9\x11\x45\xd2\x2e\x4b\x74\xaf\x22\x93\xba\xa6\x67\
\x3b\x4e\x9b\xd5\x35\x12\xf0\x5a\x00\x1d\x97\x97\xdf\x72\x47\x1e\
\xf2\x8b\x6c\x93\xb7\xfa\x2b\x00\x91\x27\x84\x0d\xab\xc1\xbe\x6e\
\xb1\x64\x4f\x6d\x07\x17\x8d\x63\x34\x78\xab\xb8\xbc\x80\x61\x80\
\x9c\xbc\xb4\x84\x19\x38\xdd\xe1\x70\x59\x2c\xb9\x05\xb6\x1c\xe6\
\xb2\x56\x18\x09\x23\x01\xe7\x5c\x5e\x60\xb3\x5b\x60\x4d\x9b\x9e\
\x9d\x47\xaa\x60\x24\x60\x6c\x53\x8b\xaf\x39\xfe\x2c\xd2\x24\xf1\
\x67\x01\x27\x89\x94\xf6\x08\x92\x48\xc0\xb1\x04\x16\xd8\x30\x23\
\xb7\x00\x79\x99\x39\x10\x55\x0e\x41\xd1\x0f\x51\xe1\x53\x69\x5a\
\xcc\x98\x52\x01\x55\x15\xe8\xeb\x12\x70\xf6\xa4\x00\x22\xda\xb0\
\x7e\xc5\x6a\x3c\x76\xeb\x46\x5c\xef\x5d\x72\x09\x8f\x52\x82\xfe\
\xf0\x59\x9c\x09\x74\xe0\xb8\x7f\x3f\x5a\xfa\xea\xa1\x12\x65\x2a\
\x4d\x8d\x19\x53\x26\xa0\x6f\x40\xc2\xde\x86\x61\x88\x3c\xc1\xa3\
\x77\xdf\x83\x67\xbe\xff\x9c\x2e\x97\x61\x2c\x98\xe9\x99\x87\x99\
\x9e\x79\xa8\x2a\xbd\x0d\x77\x94\xfd\x18\x75\x27\xdf\x40\x53\xd7\
\x36\xc8\x64\x7a\xad\x78\x4d\x89\x80\xed\xfb\x59\xb4\xed\x0b\x21\
\x27\xd7\x89\x4f\x5f\xdc\x8a\xf2\xd9\x95\x71\xd5\xcf\x71\x15\xe1\
\xee\xca\xc7\xf1\x8d\xd2\xef\xe2\xa5\x2f\x1e\xc7\x90\xe0\x37\xe4\
\xab\x32\xc5\x91\x43\x2c\x00\xe0\xae\x6f\xdd\x8a\x9c\xac\x0c\x0c\
\x09\x3e\x04\xf8\x0b\x18\xe2\x2f\x40\x54\x13\xf7\x79\x48\xba\x80\
\xc7\xdb\x39\x1c\xde\x17\x82\xdb\x6d\xc3\x07\xcf\xbf\x13\xb7\x78\
\x91\xf0\x66\x97\xe1\xa7\xeb\x7e\x8f\x17\x3f\xff\x09\x7a\x82\xc7\
\x75\x79\xbe\xf3\x32\xda\x5a\xc2\x60\x18\x06\xdb\x9e\x78\x02\x05\
\x99\x85\x63\xd7\x5a\xfa\xea\xf1\x6a\xcb\x53\x13\xb6\xe1\x72\x24\
\xb5\x17\xee\xef\x11\x71\xa0\x69\x18\x0c\xc3\xe0\xf5\xa7\x7f\x8b\
\xeb\x4a\xaf\x9f\xf4\x3d\xb3\x9d\x05\x78\x64\xd5\xbf\x23\xc3\x9e\
\xad\xcb\xf1\x0d\x68\xcd\xbc\xb0\xd0\x7d\x89\x78\xc9\x40\xd2\x04\
\xa4\x00\x5a\xf7\x86\x40\x01\x54\xdd\x50\x86\x5b\x2a\x6b\x4c\xeb\
\x88\xb2\x84\xf7\xf6\xbe\x8b\x8f\x0f\x7e\x68\xc8\xcb\x76\xe6\xe3\
\x81\x65\x9b\xc1\x20\xfa\xac\xc8\xd7\xaf\x09\x58\xb1\x70\x41\xbc\
\x66\xc7\x8d\xa4\x09\xd8\x79\x8c\xc7\xd0\xa0\x82\x34\x9b\x05\xbf\
\xdd\xf8\x6b\x53\xfe\xfe\x73\x3b\xb0\xa5\xf1\xfb\x78\x69\x57\x2d\
\xee\xd9\xfc\x30\xca\x7f\xb4\x04\x6f\x34\xfe\x4e\x97\x5f\x51\xb4\
\x1a\xab\x4b\x37\x5c\x51\x4e\x08\x85\xef\x82\x0c\x00\x58\x5b\x59\
\x35\xf1\x17\x88\x11\x49\x11\x90\x52\x8a\xb6\x7d\xda\x6a\xfb\x77\
\xd6\xac\xc4\x82\xa2\x45\x86\xfc\xfe\xd0\x19\xfc\xfe\x40\x2d\x7c\
\x6c\x2f\xe6\x5f\xe3\xc2\xcd\xdf\xcd\x45\x77\x4f\x00\x8f\xfe\xaa\
\x16\xfb\x3b\xbf\xd0\xad\x57\xbd\xf0\xbe\x2b\xbc\x30\x70\x51\x81\
\x2a\x6b\x2b\x4c\x7f\xb9\xf2\xce\x49\xbe\x89\x39\x92\x22\xa0\x6f\
\x40\x06\xc7\xaa\x00\x80\x1f\xd6\x3c\x68\xca\x7f\xb5\xe5\xe7\x90\
\xd4\xf1\xb8\x51\x7e\x91\x0d\x73\x16\x38\xa1\xa8\x14\x0f\xbe\xb0\
\x51\xb7\x5e\x51\x86\x17\x95\x33\x6e\xba\xec\xd9\x5a\xf3\xcd\xce\
\x71\x62\x7e\xd1\x57\xb4\x09\xf7\x76\x8a\x00\x80\xac\x6c\x07\xd6\
\x57\x54\x1b\x72\xfd\x6c\x1f\xfa\x86\xaf\x5c\xb7\xac\x5c\xe9\x01\
\x00\x74\x9e\xf1\xe3\x95\x4f\x5e\xd4\xad\xbf\x6e\xee\x5d\x97\xfc\
\xf6\xf5\x6b\xcd\xb7\x6c\x5e\x69\x5c\x36\x4f\x14\x49\x19\xc6\xf4\
\x9c\xd5\xbc\x69\xf5\x12\xf3\x21\x4b\x67\xa0\x3d\x6a\x79\x86\xc7\
\x0a\x57\xba\x05\x3c\x47\xb0\xf3\x50\x23\x7e\x74\xcb\x23\x51\x79\
\x8b\xf2\x97\x61\xef\x27\x3c\xba\xbb\x82\xf8\x76\xd5\x52\xfc\xdf\
\x53\x7f\x40\x8f\xbf\x1b\xc5\x39\xc5\x51\xf9\x59\xce\x02\x2c\xce\
\x5f\x0e\x56\x1e\x06\x2b\x0d\x83\x93\x86\x27\x35\x2e\x4c\xb8\x80\
\xc3\x01\x15\x6c\x48\x6b\xbe\x77\x54\x7d\xcf\x94\xaf\x27\x20\x00\
\xe4\x16\xda\xd1\x77\x56\xc0\xd1\x4e\xfd\x95\x75\x9b\xc5\x8e\x55\
\x65\x2b\x71\xa6\xb3\x1e\x84\x02\x8b\x8a\xaf\xc1\xa2\xe2\x6b\x74\
\xf9\x0b\xf3\x96\xe0\x9f\xaa\x5e\xba\xa4\x6c\x4f\xf7\x07\xf8\x43\
\xeb\x16\x53\x5b\xa3\x21\xe1\x4d\x78\x28\x20\x8f\xfd\x7f\xcd\x35\
\xeb\x0c\x98\x1a\xce\x04\xae\x88\x1a\x8c\x21\xaf\x40\x0b\x1d\xf4\
\x9e\x0b\x80\x50\xfd\x6c\x90\xf5\xcb\xd6\xc6\x61\xe1\x95\x60\x98\
\x89\x2f\x12\x27\x5c\xc0\xe1\x80\xe6\x7d\x4e\x57\x1a\x4a\xf2\xbd\
\x86\x5c\x0a\x8a\xde\xe0\x09\xdd\xeb\x79\x85\x5a\x03\x91\x25\x82\
\xcf\x4f\x34\xe9\xf2\x16\xcf\x5e\x3c\x01\x4b\x13\x83\x24\x08\xa8\
\xad\x9a\x14\xe5\x9b\x07\xde\x25\x45\x80\x42\x64\xdd\xeb\x99\xd9\
\xe3\xc1\xab\x96\x53\xfb\x74\x79\x79\x9e\xdc\x38\x2c\x4c\x2c\x12\
\xfe\x0d\x0c\x8e\x34\xe1\xd2\x99\xd1\x3f\xe2\x91\x30\xfb\x78\xdb\
\x22\x42\x39\x41\x2e\xa4\xcb\xcb\x72\xe5\xe2\x5b\x1b\x72\x11\x0e\
\xf6\x63\xc9\xc3\x2b\x90\x9b\x99\x89\x9d\xcf\xef\x8c\xca\x1d\x08\
\xf6\xe0\xc8\xc0\x21\xd8\x2d\x76\x38\x6d\x6e\x64\xa7\x67\x21\xc8\
\x5f\x34\xb5\x55\x0f\x09\x17\x90\x63\xb5\x6f\xd5\xac\xfc\x22\x53\
\xae\xa8\x44\x8d\x14\x8e\x21\xcd\x36\xde\x40\x38\x81\xd5\xe5\x65\
\xd8\x73\x50\x5c\xe2\xc0\x59\x81\xc3\xe9\xce\x20\x7a\x1c\xfa\x82\
\xf4\x86\x8f\xe1\xed\x63\x13\xeb\x30\xa2\x21\xe1\x4d\x58\x91\x35\
\x01\xdd\xae\x0c\x53\xae\xa8\x1a\x27\x5d\x59\x2c\xda\x3f\x00\x60\
\x45\x7d\x01\x2d\x8c\x05\x6e\x7b\x6a\xb2\x25\x12\x2a\x20\x21\x14\
\xaa\xd6\x87\x20\x33\x16\x01\x4d\x3c\x10\xc0\x58\x18\x95\xe3\x8d\
\x9b\xbb\xc7\x91\x63\x6e\x60\x12\x90\x50\x01\xd5\x88\x55\xf7\x98\
\x3c\x30\x86\xb8\x87\x6d\xa4\x19\x73\xe2\xd7\x40\x40\x59\x1e\x1f\
\xab\x65\xb9\xcc\x7b\xe1\x58\x66\x00\x69\x23\x1d\xb1\x99\x80\x99\
\x8e\xd4\xf4\xc4\x89\x6d\xc2\xca\x78\x9e\x8d\xdb\xe5\x36\xe5\x4b\
\x31\x78\xe0\x68\x47\x22\x88\xc6\xdf\x4b\x8f\xfd\x2a\xf0\x40\x5b\
\x44\x9e\xf4\x60\xc8\x3c\xc7\xd1\x6a\x89\x61\x10\x30\x92\xfc\xa4\
\xb7\x78\x3a\x0a\x0b\x93\x9a\x10\x77\x42\x9f\x6a\x77\x30\x18\x7d\
\xcf\x0b\x41\x9f\x39\xdf\xea\x32\xe5\x28\x23\x5e\xed\x72\x38\x0c\
\x79\xac\x3c\x6c\x6e\x60\x12\x90\x50\x01\x19\x86\x81\xcd\xa6\x29\
\x78\x31\x68\x3e\x38\xb5\xa7\x99\xe7\x58\x8f\x76\x4c\x0e\xbb\x31\
\x97\x95\xae\x02\x01\x01\xc0\xe1\xd4\x6e\x39\x38\x1c\x30\xe7\xc6\
\xe0\x81\xb2\x3c\xea\x81\x26\x02\x5e\x0d\x1e\x08\x00\x0e\xa7\xb6\
\xed\x61\x30\x6c\xfe\x42\x0e\x6b\x0c\x1e\xa8\x8e\x0a\x68\x2c\xf6\
\x55\xe3\x81\x4e\xa7\xd6\x84\x87\x43\xe6\x3b\x10\xec\x26\x02\x52\
\x68\x1b\x73\x00\x20\xdd\x44\x40\xee\x6a\xf1\x40\xfb\x48\x13\x0e\
\x71\xe6\xb3\x0c\x7b\x9a\xb1\x28\x6a\xc4\xb8\xd2\xed\x30\xce\xcb\
\xbe\x6a\x3c\xd0\x31\x32\x94\x61\x39\xf3\xcd\x45\x66\xdf\x40\x25\
\x62\x66\xe3\x72\xea\x0b\x28\x28\x2c\x08\x55\x63\x33\x30\xc1\x48\
\x9a\x07\xf2\x9c\xfe\x3a\xdf\x28\x1c\x69\x2e\xc3\x29\x98\x24\x8e\
\x7b\x60\x61\x56\x81\x2e\x2f\x68\x92\x2b\x93\x4c\x24\x5c\x40\x4f\
\xd6\xe8\xcc\x41\xc5\xc0\xd0\x39\x53\x7e\x69\x96\x7e\xfc\x22\xe0\
\x1f\xff\x23\x7c\xb3\x62\xbd\x2e\xef\x84\xbf\x35\x0e\x0b\x13\x8b\
\x84\x0b\x58\x50\x3c\xb2\x0a\x4a\x81\x6d\x5f\xbe\x67\xca\xf7\x66\
\x97\xe9\x5e\x1b\xf4\x6b\x6d\xd8\xe3\xb1\x63\x6e\xe1\x7c\x5d\xde\
\x51\xdf\x97\xf1\x19\x99\x40\x24\x5c\xc0\x74\xb7\x15\x6e\x8f\x36\
\x94\x69\x38\xb8\xcb\x94\x3f\x27\x47\x5f\xc0\x80\x6f\x64\x75\x7b\
\x96\x7e\x82\x10\xa5\x04\xc7\xfc\x2d\x31\xdb\x67\xd6\xf3\xc7\x8b\
\xa4\x4c\x20\x0b\x47\xbc\xf0\xd0\x49\xfd\x14\xb4\x51\xc4\xe2\x81\
\xd7\xcd\xd7\x4f\x0d\xe9\x0e\x9e\x00\x2b\x05\xb5\x1f\x31\x44\xd7\
\x66\x66\xea\x7b\xf2\x44\x90\x14\x01\x4b\xe7\x69\xf3\xd6\xbe\xbe\
\x20\x8e\xf4\xb6\x19\x72\xb3\x9d\x05\x51\x5f\x2a\x38\xa8\x8c\x75\
\x22\x35\x2b\xbe\xa3\x5b\xff\x58\x44\xf3\xb5\x3b\x34\x01\x65\x89\
\x42\x52\xc4\xa8\xfc\xbc\xf4\x62\x38\xd3\x12\xb7\x55\x2d\x29\x02\
\xce\xf4\x3a\xe0\x72\x5b\x41\x29\xc5\x0b\xef\xbd\x60\xca\xff\x9b\
\x8a\xc7\xae\x28\x3b\xf8\xa5\x36\x10\x5f\x5a\x31\x07\x77\xae\xfa\
\x5b\xdd\xba\x47\x22\x04\x4c\x77\x6b\x9f\x0e\x4a\x29\x4e\xf6\x47\
\xf7\x7e\x06\x0c\xfe\xaa\x7c\x13\x2e\x76\x66\xe2\xf4\xbe\x0c\xe4\
\x0e\xaf\xc5\x43\x95\xe6\xd9\x63\x7a\x48\x4a\x6a\x07\xc3\x30\x98\
\xb7\xd8\x85\x8e\x03\x61\x7c\xdc\xdc\x0c\xf5\x11\xc5\x70\xe9\xaa\
\xac\x60\x25\x2a\x67\xac\xc1\xe1\x81\xdd\x00\x80\xbe\x6e\x11\x7d\
\x67\x05\xd8\x1d\x16\xfc\xee\xf1\x57\x75\xeb\xf5\x87\xce\xe0\xb8\
\x7f\xff\xd8\x6f\x77\xc6\xf8\xee\xf9\xe3\x7d\x47\x75\xb3\x61\xd7\
\xce\xf9\x1e\xd6\x6e\x1a\xcf\x9a\x50\x88\x04\x1c\x8e\xf9\xf5\x2e\
\x41\xd2\x16\xd1\x16\x57\xa4\xc3\x66\x67\x10\x1a\x96\x51\xfb\xce\
\x66\x53\xfe\xbd\xd7\x3f\x89\xb2\x82\x1b\x71\xe4\x40\x18\xcd\xf5\
\x41\xac\x5c\x3a\x1f\x6f\xd5\xbe\x64\x98\x1a\xb7\xed\xe8\x2b\xa0\
\x11\x19\x0b\x69\x36\x06\xd9\xb9\xda\x1f\x6a\xc7\xa1\xe8\x61\xcd\
\x68\x88\xff\xd8\x8d\x71\x24\x4d\x40\x67\xba\x05\x4b\x57\x69\x19\
\x56\x2f\xfe\xf1\x4d\x9c\x3e\x7f\xca\x90\x9f\xe5\xcc\xc7\x63\xab\
\x7f\x83\xad\x8f\xd5\xa3\xfd\xb5\x5d\xd8\xf1\x8b\x9d\x86\x59\xad\
\x27\xfc\x07\xd0\xda\xdf\x70\x45\xf9\x75\x2b\xb4\x58\xcc\x3b\x1f\
\xd7\xc3\x1f\x8a\x6d\x80\x3d\x99\xdd\x7f\x49\x5d\xc6\x5d\x50\x96\
\x8e\xa2\x59\x76\x48\x22\xc1\xbd\xbf\xfc\x7b\xc3\xfc\x96\x51\xcc\
\xc8\x99\x89\x59\x79\xb3\x0d\x39\x01\xfe\x3c\xfe\xab\xe5\x67\x51\
\xaf\xcd\x9e\xeb\x44\xc9\x5c\x27\x04\x41\xc5\x7d\x2f\xdc\xa7\xdb\
\x99\x8c\xe2\xdc\xf0\x69\x34\x74\x6e\x35\xb5\x4b\x0f\xc9\xcd\xd2\
\x67\x80\xb5\xd5\x39\xd8\xf9\xc1\x20\xda\x8f\xf6\xe1\xb6\xda\x0d\
\xf8\x70\xf3\x47\x93\xba\x65\x48\x0c\xe0\x3f\xbf\x78\x1c\x21\x51\
\x67\xbd\x91\xd1\x0e\xc4\xe8\x3c\xc6\xe3\xd4\xc9\x53\x58\xb9\xe9\
\x1b\x78\xa0\xfa\x1e\xdc\xb8\x78\x15\x96\xcd\xbd\x01\x61\x39\x80\
\x63\xbe\x16\x1c\xf3\xed\xc3\x51\xdf\x3e\x0c\x8b\x13\xcf\x4a\x00\
\x8c\x37\x5c\x3f\x09\xe0\xf9\xc8\x32\xbb\xc3\x82\xbb\x1e\x88\x3f\
\xeb\x5d\x96\x28\x76\xd5\x05\x70\xbe\x4f\xc2\x9a\x1b\xcb\xf1\xf6\
\x93\x6f\xc3\xe3\xf4\xc4\x7d\x9f\xee\xe0\x71\xbc\xfc\xc5\x3f\x63\
\x90\x1f\x88\xbb\xee\x64\xd0\xbe\x3f\x9c\xd3\x5c\xdb\x31\x14\xed\
\xda\x94\x44\x62\x6c\x76\x06\xeb\x37\xe4\xe2\xc6\x75\x59\xf8\xbc\
\xf5\x08\xca\xff\x61\x05\xb6\xee\x79\x2b\xe6\xfa\x7e\xb6\x0f\x6f\
\x1e\xfe\x57\xfc\x72\xd7\x0f\xa7\x5c\x3c\x33\x4c\x5d\x28\x8b\x01\
\xe6\x97\xb9\x70\xc7\xfd\x85\x28\x5f\xee\xc4\x53\xaf\x6f\xc6\x82\
\x1f\x94\x63\xe3\xcb\x0f\x19\x2e\x3a\x10\xaa\x62\xcb\x67\xf7\xa2\
\xf1\xcc\xff\x6a\xc3\x8d\x69\x86\x29\x8f\x05\xa6\x59\x19\x94\xce\
\x77\x62\x6d\x75\x0e\x06\x03\x2c\xde\xf8\x53\x1d\x9e\xdb\xaa\x9f\
\xec\x63\x61\xac\x58\x98\xb7\x74\x0a\x2d\x8c\x0f\x29\xdb\x2f\x6c\
\xb1\x00\xb3\x4a\xb5\x89\xfd\x8e\x2f\xf7\x18\x72\xab\xbc\xb7\x4d\
\x85\x49\x51\xa1\x28\x14\x94\x42\x77\xb9\x3b\xa5\x1b\xae\x4b\xe6\
\x6a\x73\xe6\xde\xde\x21\x9c\x3a\xaf\x9f\xa9\xba\xb4\xf8\x66\xd4\
\x2c\xbc\x3f\x25\xf9\x2f\x6c\x48\xe5\x8d\x4e\x9b\x4b\xa9\x80\x33\
\x4b\x1d\xb0\x5a\xb5\x99\xc0\x7f\xd7\xbd\x62\xc8\xbd\xfd\xda\x8d\
\xf8\x55\xf5\x47\xf8\xc7\x55\x2f\xe3\xf6\xb2\x87\xa7\xc6\x40\x00\
\x02\x47\x0c\x33\x04\x52\x2a\x60\x9a\x8d\x81\x77\x81\x16\x17\x79\
\xaf\xa1\x0e\xb2\x6a\x1c\x06\x60\x18\x0b\xae\x2d\x5a\x86\xf9\xb9\
\x57\x9c\xc8\x92\x34\xf0\x1c\x31\x5c\x4e\x4a\xf9\x99\x09\xcb\xab\
\x32\x91\xe1\xb1\xa2\x7f\x20\x84\x07\xfe\xe3\xfe\x54\x9b\x73\x09\
\x14\x85\x82\x0d\x29\x9b\x8c\x38\x29\x17\xd0\x66\x67\x50\xf5\x17\
\xd9\xb0\x58\x80\x3f\x7d\xda\x8c\x37\x76\xbd\x96\x6a\x93\xc6\x70\
\xbe\x57\x3a\xd8\xf4\x6c\xc7\x49\x23\x4e\xca\x05\x04\x80\xbc\x42\
\x1b\xd6\xd5\xe4\x20\x3b\x2f\x0d\x3f\xf9\xf5\x16\xfc\xdb\xfb\xcf\
\xa5\xfc\x8c\x04\x49\x24\x34\x18\x50\x1e\x32\xe3\x4d\x9b\x73\x9c\
\x8a\x67\x3b\x50\x5c\xe2\x40\x7f\x9f\x88\x6d\xad\x6f\xe1\xc3\xfd\
\xef\x63\xd1\xac\x85\x58\xb2\xb0\x12\x4b\xe7\x5d\x07\xc2\xa8\x60\
\xa5\x20\xfc\xec\x39\x74\x1b\xec\x56\x4f\x04\x64\x89\xd2\xce\xe3\
\xfc\xbd\xbb\x9f\x69\xd7\xdf\x5b\x31\x82\x69\x23\x20\x00\x80\x01\
\x8a\x4b\x34\x21\xb5\x6d\x38\x27\xd0\x1a\x3c\x81\xd6\xd6\x77\xa7\
\xcc\x04\x81\x27\x6a\xd7\x29\xe1\xc7\x8d\xff\xd2\xfe\x66\x2c\xfc\
\xe9\x25\x60\x8a\x40\x29\x30\x3c\xa4\x84\x2e\x5e\x90\xff\x47\x12\
\xc8\x13\x8d\x4f\xb7\xeb\x6f\x09\xb8\x0c\x5f\x2b\x01\x47\x4e\xb7\
\x94\x38\x56\xf5\x89\x02\x3d\x2d\x8b\xe4\x80\xaa\xd2\xcf\xd2\xdd\
\xd6\xfa\xf7\x1f\x3d\x68\x9e\xcc\x13\x05\x57\xa5\x80\x94\x02\x1c\
\xab\x4a\x5c\x98\xf8\xc5\x91\x83\x6b\x55\x85\x36\x38\x5d\x96\xfa\
\x8f\x36\x1d\x4a\xe8\xc1\xb5\x5f\x79\x01\xb5\xf3\x52\xc9\x45\x81\
\x1f\x13\xaa\xd1\x62\x61\x3e\xf9\xf4\xc9\xc3\xc1\xa9\x78\xbe\x91\
\x80\x93\x89\xb5\x24\x1c\x1c\xab\xca\x5c\x98\x8c\x1c\x2c\x4b\x0e\
\xaa\x0a\xdd\xc5\x30\xf8\x78\xc7\x4f\xdb\xcc\x53\x61\x93\x08\x23\
\x01\x5f\x03\x70\x37\x80\x25\x06\x9c\x84\x83\x67\x89\xc2\x86\x47\
\x4f\xe0\x25\x87\x14\x99\xee\x06\x83\xba\x86\x9f\xb5\x99\x67\xad\
\xa7\x00\xba\x02\x86\xb6\x77\x5d\x18\x39\x06\xf9\x03\x00\x37\xe9\
\xf1\x26\x0a\x9e\x25\x0a\xc7\xaa\x01\x81\x23\x5d\xe2\xa8\x50\xc0\
\xf6\x86\x9f\xb7\x25\xfc\xa8\xe2\x64\xc2\xf0\x1b\x18\xda\xde\x15\
\xf4\xd4\x78\x6f\x01\xf0\x2e\x80\x5b\x27\xf2\x80\x91\x13\x72\x47\
\x85\x3a\x2c\x4b\xb4\x89\x12\xba\xbd\xf1\xe9\xf6\xfe\x89\xdc\x6f\
\xba\xc1\xb4\x13\x09\x6d\xef\xe2\x3d\x35\xde\xdb\x01\xbc\x0e\xad\
\x49\x47\x85\xc0\x13\x95\x0d\xa9\x41\x81\x27\x5d\xa2\x40\xda\x14\
\x99\x36\x29\x32\xdd\xbe\xfb\x99\xf6\xde\x44\x1a\x3c\xdd\x10\x53\
\x2f\x1c\xda\xde\x25\x7b\x6a\xbc\x7f\x67\xb5\x82\x13\x78\xf2\x03\
\x36\xa4\x0e\x09\x3c\xe9\x91\x04\xd2\x2e\x49\xb4\x59\x55\xe8\xf6\
\xdd\xcf\xb4\x9f\x4d\xb2\xad\xd3\x12\xff\x0f\xd4\xc9\xcc\xde\x09\
\x31\x84\xf7\x00\x00\x00\x00\x49\x45\x4e\x44\xae\x42\x60\x82\
"
qt_resource_name = "\
\x00\x06\
\x07\x03\x7d\xc3\
\x00\x69\
\x00\x6d\x00\x61\x00\x67\x00\x65\x00\x73\
\x00\x09\
\x0f\x9e\x84\x47\
\x00\x62\
\x00\x72\x00\x69\x00\x63\x00\x6b\x00\x2e\x00\x70\x00\x6e\x00\x67\
\x00\x0b\
\x05\x52\xbf\x27\
\x00\x71\
\x00\x74\x00\x2d\x00\x6c\x00\x6f\x00\x67\x00\x6f\x00\x2e\x00\x70\x00\x6e\x00\x67\
"
qt_resource_struct = "\
\x00\x00\x00\x00\x00\x02\x00\x00\x00\x01\x00\x00\x00\x01\
\x00\x00\x00\x00\x00\x02\x00\x00\x00\x02\x00\x00\x00\x02\
\x00\x00\x00\x2a\x00\x00\x00\x00\x00\x01\x00\x00\x03\x03\
\x00\x00\x00\x12\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\
"
def qInitResources():
QtCore.qRegisterResourceData(0x01, qt_resource_struct, qt_resource_name, qt_resource_data)
def qCleanupResources():
QtCore.qUnregisterResourceData(0x01, qt_resource_struct, qt_resource_name, qt_resource_data)
qInitResources()
|
rvalyi/OpenUpgrade
|
refs/heads/master
|
addons/hr_gamification/models/gamification.py
|
388
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2013 OpenERP SA (<http://www.openerp.com>)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>
#
##############################################################################
from openerp.osv import fields, osv
class hr_gamification_badge_user(osv.Model):
"""User having received a badge"""
_name = 'gamification.badge.user'
_inherit = ['gamification.badge.user']
_columns = {
'employee_id': fields.many2one("hr.employee", string='Employee'),
}
def _check_employee_related_user(self, cr, uid, ids, context=None):
for badge_user in self.browse(cr, uid, ids, context=context):
if badge_user.user_id and badge_user.employee_id:
if badge_user.employee_id not in badge_user.user_id.employee_ids:
return False
return True
_constraints = [
(_check_employee_related_user, "The selected employee does not correspond to the selected user.", ['employee_id']),
]
class gamification_badge(osv.Model):
_name = 'gamification.badge'
_inherit = ['gamification.badge']
def get_granted_employees(self, cr, uid, badge_ids, context=None):
if context is None:
context = {}
employee_ids = []
badge_user_ids = self.pool.get('gamification.badge.user').search(cr, uid, [('badge_id', 'in', badge_ids), ('employee_id', '!=', False)], context=context)
for badge_user in self.pool.get('gamification.badge.user').browse(cr, uid, badge_user_ids, context):
employee_ids.append(badge_user.employee_id.id)
# remove duplicates
employee_ids = list(set(employee_ids))
return {
'type': 'ir.actions.act_window',
'name': 'Granted Employees',
'view_mode': 'kanban,tree,form',
'view_type': 'form',
'res_model': 'hr.employee',
'domain': [('id', 'in', employee_ids)]
}
class hr_employee(osv.osv):
_name = "hr.employee"
_inherit = "hr.employee"
def _get_employee_goals(self, cr, uid, ids, field_name, arg, context=None):
"""Return the list of goals assigned to the employee"""
res = {}
for employee in self.browse(cr, uid, ids, context=context):
res[employee.id] = self.pool.get('gamification.goal').search(cr,uid,[('user_id', '=', employee.user_id.id), ('challenge_id.category', '=', 'hr')], context=context)
return res
def _get_employee_badges(self, cr, uid, ids, field_name, arg, context=None):
"""Return the list of badge_users assigned to the employee"""
res = {}
for employee in self.browse(cr, uid, ids, context=context):
res[employee.id] = self.pool.get('gamification.badge.user').search(cr, uid, [
'|',
('employee_id', '=', employee.id),
'&',
('employee_id', '=', False),
('user_id', '=', employee.user_id.id)
], context=context)
return res
def _has_badges(self, cr, uid, ids, field_name, arg, context=None):
"""Return the list of badge_users assigned to the employee"""
res = {}
for employee in self.browse(cr, uid, ids, context=context):
employee_badge_ids = self.pool.get('gamification.badge.user').search(cr, uid, [
'|',
('employee_id', '=', employee.id),
'&',
('employee_id', '=', False),
('user_id', '=', employee.user_id.id)
], context=context)
res[employee.id] = len(employee_badge_ids) > 0
return res
_columns = {
'goal_ids': fields.function(_get_employee_goals, type="one2many", obj='gamification.goal', string="Employee HR Goals"),
'badge_ids': fields.function(_get_employee_badges, type="one2many", obj='gamification.badge.user', string="Employee Badges"),
'has_badges': fields.function(_has_badges, type="boolean", string="Has Badges"),
}
|
centralniak/railworks-dsd
|
refs/heads/master
|
setup.py
|
1
|
#!/usr/bin/env python
import os
import setuptools
CLASSIFIERS = [
'Development Status :: 3 - Alpha',
'Environment :: Win32 (MS Windows)',
'Intended Audience :: End Users/Desktop',
'License :: OSI Approved :: MIT License',
'Operating System :: Microsoft :: Windows',
'Programming Language :: Python',
'Topic :: Games/Entertainment :: Simulation',
]
setuptools.setup(
author='Piotr Kilczuk',
author_email='piotr@tymaszweb.pl',
name='railworks-dsd',
version='0.0.1',
description='Makes your USB footswitch work as driver viligance device in Train Simulator 2016',
long_description=open(os.path.join(os.path.dirname(__file__), 'README.rst')).read(),
url='https://github.com/centralniak/railworks-dsd',
license='MIT License',
platforms=['Windows'],
classifiers=CLASSIFIERS,
entry_points={
'console_scripts': [
'railworksdsd = dsd:__main__'
]
},
install_requires=open('requirements.txt').read(),
tests_require=open('test_requirements.txt').read(),
packages=setuptools.find_packages(),
include_package_data=False,
zip_safe=False,
test_suite='nose.collector',
)
|
julienbaladier/fuddly
|
refs/heads/master
|
framework/database.py
|
1
|
import sys
import os
import re
import math
import threading
from datetime import datetime
import framework.global_resources as gr
import libs.external_modules as em
from libs.external_modules import *
from libs.utils import ensure_dir, chunk_lines
def regexp(expr, item):
reg = re.compile(expr)
if item is None:
return False
robj = reg.search(item)
return robj is not None
def regexp_bin(expr, item):
expr = bytes(expr)
reg = re.compile(expr)
if item is None:
return False
robj = reg.search(item)
return robj is not None
class Database(object):
DDL_fname = 'fmk_db.sql'
DEFAULT_DM_NAME = '__DEFAULT_DATAMODEL'
DEFAULT_GTYPE_NAME = '__DEFAULT_GTYPE'
DEFAULT_GEN_NAME = '__DEFAULT_GNAME'
OUTCOME_ROWID = 1
OUTCOME_DATA = 2
def __init__(self, fmkdb_path=None):
self.name = 'fmkDB.db'
if fmkdb_path is None:
self.fmk_db_path = os.path.join(gr.fuddly_data_folder, self.name)
else:
self.fmk_db_path = fmkdb_path
# self._con = None
# self._cur = None
self.enabled = False
self.last_feedback = {}
self.last_data_id = None
self._data_id = None
self._sql_handler_thread = None
self._sql_handler_stop_event = threading.Event()
self._thread_initialized = threading.Event()
self._sql_stmt_submitted_cond = threading.Condition()
self._sql_stmt_list = []
self._sql_stmt_handled = threading.Event()
self._sql_stmt_outcome_lock = threading.Lock()
self._sql_stmt_outcome = None
self._sync_lock = threading.Lock()
self._ok = None
def _is_valid(self, connection, cursor):
valid = False
with connection:
tmp_con = sqlite3.connect(':memory:', detect_types=sqlite3.PARSE_DECLTYPES)
with open(gr.fmk_folder + self.DDL_fname) as fd:
fmk_db_sql = fd.read()
with tmp_con:
cur = tmp_con.cursor()
cur.executescript(fmk_db_sql)
cur.execute("select name from sqlite_master WHERE type='table'")
tables = map(lambda x: x[0], cur.fetchall())
tables = filter(lambda x: not x.startswith('sqlite'), tables)
for t in tables:
cur.execute('select * from {!s}'.format(t))
ref_names = list(map(lambda x: x[0], cur.description))
cursor.execute('select * from {!s}'.format(t))
names = list(map(lambda x: x[0], cursor.description))
if ref_names != names:
valid = False
break
else:
valid = True
return valid
def _sql_handler(self):
if os.path.isfile(self.fmk_db_path):
connection = sqlite3.connect(self.fmk_db_path, detect_types=sqlite3.PARSE_DECLTYPES)
cursor = connection.cursor()
self._ok = self._is_valid(connection, cursor)
else:
connection = sqlite3.connect(self.fmk_db_path, detect_types=sqlite3.PARSE_DECLTYPES)
fmk_db_sql = open(gr.fmk_folder + self.DDL_fname).read()
self._ok = False
with connection:
cursor = connection.cursor()
cursor.executescript(fmk_db_sql)
self._ok = True
self._thread_initialized.set()
if not self._ok:
return
connection.create_function("REGEXP", 2, regexp)
connection.create_function("BINREGEXP", 2, regexp_bin)
while not self._sql_handler_stop_event.is_set():
with self._sql_stmt_submitted_cond:
self._sql_stmt_submitted_cond.wait(0.01)
if self._sql_stmt_list:
sql_stmts = self._sql_stmt_list
self._sql_stmt_list = []
else:
continue
last_stmt_error = True
for stmt in sql_stmts:
sql_stmt, sql_params, outcome_type, sql_error = stmt
try:
if sql_params is None:
cursor.execute(sql_stmt)
else:
cursor.execute(sql_stmt, sql_params)
connection.commit()
except sqlite3.Error as e:
connection.rollback()
print("\n*** ERROR[SQL:{:s}] ".format(e.args[0])+sql_error)
last_stmt_error = True
else:
last_stmt_error = False
if outcome_type is not None:
with self._sql_stmt_outcome_lock:
if self._sql_stmt_outcome is not None:
print("\n*** WARNING: SQL statement outcomes have not been consumed."
"\n Will be overwritten!")
if last_stmt_error:
self._sql_stmt_outcome = None
elif outcome_type == Database.OUTCOME_ROWID:
self._sql_stmt_outcome = cursor.lastrowid
elif outcome_type == Database.OUTCOME_DATA:
self._sql_stmt_outcome = cursor.fetchall()
else:
print("\n*** ERROR: Unrecognized outcome type request")
self._sql_stmt_outcome = None
self._sql_stmt_handled.set()
self._sql_handler_stop_event.wait(0.01)
if connection:
connection.close()
def _stop_sql_handler(self):
with self._sync_lock:
self._sql_handler_stop_event.set()
self._sql_handler_thread.join()
def submit_sql_stmt(self, stmt, params=None, outcome_type=None, error_msg=''):
"""
This method is the only one that should submit request to the threaded SQL handler.
It is also synchronized to guarantee request order (especially needed when you wait for
the outcomes of your submitted SQL statement).
Args:
stmt (str): SQL statement
params (tuple): parameters
outcome_type (int): type of the expected outcomes. If `None`, no outcomes are expected
error_msg (str): specific error message to display in case of an error
Returns:
`None` or the expected outcomes
"""
with self._sync_lock:
with self._sql_stmt_submitted_cond:
self._sql_stmt_list.append((stmt, params, outcome_type, error_msg))
self._sql_stmt_submitted_cond.notify()
if outcome_type is not None:
# If we care about outcomes, then we are sure to get outcomes from the just
# submitted SQL statement as this method is 'synchronized'.
while not self._sql_stmt_handled.is_set():
self._sql_stmt_handled.wait(0.1)
self._sql_stmt_handled.clear()
with self._sql_stmt_outcome_lock:
ret = self._sql_stmt_outcome
self._sql_stmt_outcome = None
return ret
def start(self):
if self._sql_handler_thread is not None:
return
if not sqlite3_module:
print("/!\\ WARNING /!\\: Fuddly's FmkDB unavailable because python-sqlite3 is not installed!")
return False
self._sql_handler_thread = threading.Thread(None, self._sql_handler, 'db_handler')
self._sql_handler_thread.start()
while not self._thread_initialized.is_set():
self._thread_initialized.wait(0.1)
self.enabled = self._ok
return self._ok
def stop(self):
self._stop_sql_handler()
self.enabled = False
def enable(self):
self.enabled = True
def disable(self):
self.enabled = False
def cleanup_current_state(self):
self.last_feedback = {}
def execute_sql_statement(self, sql_stmt, params=None):
return self.submit_sql_stmt(sql_stmt, params=params, outcome_type=Database.OUTCOME_DATA)
def insert_data_model(self, dm_name):
stmt = "INSERT INTO DATAMODEL(NAME) VALUES(?)"
params = (dm_name,)
err_msg = 'while inserting a value into table DATAMODEL!'
self.submit_sql_stmt(stmt, params=params, error_msg=err_msg)
def insert_project(self, prj_name):
stmt = "INSERT INTO PROJECT(NAME) VALUES(?)"
params = (prj_name,)
err_msg = 'while inserting a value into table PROJECT!'
self.submit_sql_stmt(stmt, params=params, error_msg=err_msg)
def insert_dmaker(self, dm_name, dtype, name, is_gen, stateful, clone_type=None):
clone_name = None if clone_type is None else name
stmt = "INSERT INTO DMAKERS(DM_NAME,TYPE,NAME,CLONE_TYPE,CLONE_NAME,GENERATOR,STATEFUL)"\
" VALUES(?,?,?,?,?,?,?)"
params = (dm_name, dtype, name, clone_type, clone_name, is_gen, stateful)
err_msg = 'while inserting a value into table DATAMODEL!'
self.submit_sql_stmt(stmt, params=params, error_msg=err_msg)
def insert_data(self, dtype, dm_name, raw_data, sz, sent_date, ack_date,
target_name, prj_name, group_id=None):
self.cleanup_current_state()
if not self.enabled:
return None
blob = sqlite3.Binary(raw_data)
stmt = "INSERT INTO DATA(GROUP_ID,TYPE,DM_NAME,CONTENT,SIZE,SENT_DATE,ACK_DATE,"\
"TARGET,PRJ_NAME)"\
" VALUES(?,?,?,?,?,?,?,?,?)"
params = (group_id, dtype, dm_name, blob, sz, sent_date, ack_date, target_name, prj_name)
err_msg = 'while inserting a value into table DATA!'
if self._data_id is None:
did = self.submit_sql_stmt(stmt, params=params, outcome_type=Database.OUTCOME_ROWID,
error_msg=err_msg)
self._data_id = did
else:
self.submit_sql_stmt(stmt, params=params, error_msg=err_msg)
self._data_id += 1
return self._data_id
def insert_steps(self, data_id, step_id, dmaker_type, dmaker_name, data_id_src,
user_input, info):
if not self.enabled:
return None
if info:
info = sqlite3.Binary(info)
stmt = "INSERT INTO STEPS(DATA_ID,STEP_ID,DMAKER_TYPE,DMAKER_NAME,DATA_ID_SRC,USER_INPUT,INFO)"\
" VALUES(?,?,?,?,?,?,?)"
params = (data_id, step_id, dmaker_type, dmaker_name, data_id_src, user_input, info)
err_msg = 'while inserting a value into table STEPS!'
self.submit_sql_stmt(stmt, params=params, error_msg=err_msg)
def insert_feedback(self, data_id, source, timestamp, content, status_code=None):
if data_id != self.last_data_id:
self.last_data_id = data_id
self.last_feedback = {}
if source not in self.last_feedback:
self.last_feedback[source] = []
self.last_feedback[source].append(
{
'timestamp': timestamp,
'content': content,
'status': status_code
}
)
if not self.enabled:
return None
if content:
content = sqlite3.Binary(content)
stmt = "INSERT INTO FEEDBACK(DATA_ID,SOURCE,DATE,CONTENT,STATUS)"\
" VALUES(?,?,?,?,?)"
params = (data_id, source, timestamp, content, status_code)
err_msg = 'while inserting a value into table FEEDBACK!'
self.submit_sql_stmt(stmt, params=params, error_msg=err_msg)
def insert_comment(self, data_id, content, date):
if not self.enabled:
return None
stmt = "INSERT INTO COMMENTS(DATA_ID,CONTENT,DATE)" \
" VALUES(?,?,?)"
params = (data_id, content, date)
err_msg = 'while inserting a value into table COMMENTS!'
self.submit_sql_stmt(stmt, params=params, error_msg=err_msg)
def insert_fmk_info(self, data_id, content, date, error=False):
if not self.enabled:
return None
stmt = "INSERT INTO FMKINFO(DATA_ID,CONTENT,DATE,ERROR)"\
" VALUES(?,?,?,?)"
params = (data_id, content, date, error)
err_msg = 'while inserting a value into table FMKINFO!'
self.submit_sql_stmt(stmt, params=params, error_msg=err_msg)
def fetch_data(self, start_id=1, end_id=-1):
ign_end_id = '--' if end_id < 1 else ''
stmt = \
'''
SELECT DATA.ID, DATA.CONTENT, DATA.TYPE, DMAKERS.NAME, DATA.DM_NAME
FROM DATA INNER JOIN DMAKERS
ON DATA.TYPE = DMAKERS.TYPE AND DMAKERS.CLONE_TYPE IS NULL
WHERE DATA.ID >= {sid:d} {ign_eid:s} AND DATA.ID <= {eid:d}
UNION ALL
SELECT DATA.ID, DATA.CONTENT, DMAKERS.CLONE_TYPE AS TYPE, DMAKERS.CLONE_NAME AS NAME,
DATA.DM_NAME
FROM DATA INNER JOIN DMAKERS
ON DATA.TYPE = DMAKERS.TYPE AND DMAKERS.CLONE_TYPE IS NOT NULL
WHERE DATA.ID >= {sid:d} {ign_eid:s} AND DATA.ID <= {eid:d}
'''.format(sid = start_id, eid = end_id, ign_eid = ign_end_id)
ret = self.submit_sql_stmt(stmt, outcome_type=Database.OUTCOME_DATA)
return ret
def _get_color_function(self, colorized):
if not colorized:
def colorize(string, rgb=None, ansi=None, bg=None, ansi_bg=None, fd=1):
return string
else:
colorize = em.colorize
return colorize
def check_data_existence(self, data_id, colorized=True):
colorize = self._get_color_function(colorized)
data = self.execute_sql_statement(
"SELECT * FROM DATA "
"WHERE ID == {data_id:d};".format(data_id=data_id)
)
if not data:
print(colorize("*** ERROR: The provided DATA ID does not exist ***", rgb=Color.ERROR))
return data
def display_data_info(self, data_id, with_data=False, with_fbk=False, with_fmkinfo=True,
fbk_src=None, limit_data_sz=None, page_width=100, colorized=True,
raw=False):
colorize = self._get_color_function(colorized)
data = self.check_data_existence(data_id=data_id, colorized=colorized)
if not data:
return
prt = sys.stdout.write
data_id, gr_id, data_type, dm_name, data_content, size, sent_date, ack_date, tg, prj = data[0]
steps = self.execute_sql_statement(
"SELECT * FROM STEPS "
"WHERE DATA_ID == {data_id:d} "
"ORDER BY STEP_ID ASC;".format(data_id=data_id)
)
if not steps:
print(colorize("*** BUG with data ID '{:d}' (data should always have at least 1 step) "
"***".format(data_id),
rgb=Color.ERROR))
return
if fbk_src:
feedback = self.execute_sql_statement(
"SELECT SOURCE, DATE, STATUS, CONTENT FROM FEEDBACK "
"WHERE DATA_ID == ? AND SOURCE REGEXP ? "
"ORDER BY SOURCE ASC;",
params=(data_id, fbk_src)
)
else:
feedback = self.execute_sql_statement(
"SELECT SOURCE, DATE, STATUS, CONTENT FROM FEEDBACK "
"WHERE DATA_ID == {data_id:d} "
"ORDER BY SOURCE"
" ASC;".format(data_id=data_id)
)
comments = self.execute_sql_statement(
"SELECT CONTENT, DATE FROM COMMENTS "
"WHERE DATA_ID == {data_id:d} "
"ORDER BY DATE ASC;".format(data_id=data_id)
)
fmkinfo = self.execute_sql_statement(
"SELECT CONTENT, DATE, ERROR FROM FMKINFO "
"WHERE DATA_ID == {data_id:d} "
"ORDER BY ERROR DESC;".format(data_id=data_id)
)
line_pattern = '-' * page_width
data_id_pattern = " Data ID #{:d} ".format(data_id)
msg = colorize("[".rjust((page_width - 20), '='), rgb=Color.NEWLOGENTRY)
msg += colorize(data_id_pattern, rgb=Color.FMKHLIGHT)
msg += colorize("]".ljust(page_width - (page_width - 20) - len(data_id_pattern), "="),
rgb=Color.NEWLOGENTRY)
msg += colorize("\n Project: ", rgb=Color.FMKINFO)
msg += colorize("{:s}".format(prj), rgb=Color.FMKSUBINFO)
msg += colorize(" | Target: ", rgb=Color.FMKINFO)
msg += colorize("{:s}".format(tg), rgb=Color.FMKSUBINFO)
status_prefix = " Status: "
msg += colorize('\n' + status_prefix, rgb=Color.FMKINFO)
src_max_sz = 0
for idx, fbk in enumerate(feedback):
src, tstamp, status, _ = fbk
src_sz = len(src)
src_max_sz = src_sz if src_sz > src_max_sz else src_max_sz
if status is None:
continue
msg += colorize("{!s}".format(status), rgb=Color.FMKSUBINFO) + \
colorize(" by ", rgb=Color.FMKINFO) + \
colorize("{!s}".format(src), rgb=Color.FMKSUBINFO)
if idx < len(feedback) - 1:
msg += colorize(",\n".format(src) + ' '*len(status_prefix), rgb=Color.FMKINFO)
msg += '\n'
sentd = sent_date.strftime("%d/%m/%Y - %H:%M:%S") if sent_date else 'None'
ackd = ack_date.strftime("%d/%m/%Y - %H:%M:%S") if ack_date else 'None'
msg += colorize(" Sent: ", rgb=Color.FMKINFO) + colorize(sentd, rgb=Color.DATE)
msg += colorize("\n Received: ", rgb=Color.FMKINFO) + colorize(ackd, rgb=Color.DATE)
msg += colorize("\n Size: ", rgb=Color.FMKINFO) + colorize(str(size) + ' Bytes',
rgb=Color.FMKSUBINFO)
msg += colorize('\n' + line_pattern, rgb=Color.NEWLOGENTRY)
prt(msg)
def handle_dmaker(dmk_pattern, info, dmk_type, dmk_name, name_sep_sz, id_src=None):
msg = ''
msg += colorize("\n |_ {:s}: ".format(dmk_pattern), rgb=Color.FMKINFO)
msg += colorize(str(dmk_type).ljust(name_sep_sz, ' '), rgb=Color.FMKSUBINFO)
if id_src is None:
msg += colorize(" | Name: ", rgb=Color.FMKINFO)
msg += colorize(str(dmk_name), rgb=Color.FMKSUBINFO)
msg += colorize(" | UI: ", rgb=Color.FMKINFO)
msg += colorize(str(ui), rgb=Color.FMKSUBINFO)
else:
msg += colorize(" | ID source: ", rgb=Color.FMKINFO)
msg += colorize(str(id_src), rgb=Color.FMKSUBINFO)
if info is not None:
info = gr.unconvert_from_internal_repr(info)
info = info.split('\n')
for i in info:
chks = chunk_lines(i, page_width - prefix_sz - 10)
for idx, c in enumerate(chks):
spc = 1 if idx > 0 else 0
msg += '\n' + colorize(' ' * prefix_sz + '| ', rgb=Color.FMKINFO) + \
colorize(' ' * spc + c, rgb=Color.DATAINFO_ALT)
return msg
msg = ''
first_pass = True
prefix_sz = 7
name_sep_sz = len(data_type)
for _, _, dmk_type, _, _, _, _ in steps:
dmk_type_sz = 0 if dmk_type is None else len(dmk_type)
name_sep_sz = dmk_type_sz if dmk_type_sz > name_sep_sz else name_sep_sz
sid = 1
for _, step_id, dmk_type, dmk_name, id_src, ui, info in steps:
if first_pass:
if dmk_type is None:
assert (id_src is not None)
continue
else:
first_pass = False
msg += colorize("\n Step #{:d}:".format(sid), rgb=Color.FMKINFOGROUP)
if dmk_type != data_type:
msg += colorize("\n |_ Generator: ", rgb=Color.FMKINFO)
msg += colorize(str(data_type), rgb=Color.FMKSUBINFO)
msg += colorize(" | UI: ", rgb=Color.FMKINFO)
msg += colorize(str(ui), rgb=Color.FMKSUBINFO)
sid += 1
msg += colorize("\n Step #{:d}:".format(sid), rgb=Color.FMKINFOGROUP)
msg += handle_dmaker('Disruptor', info, dmk_type, dmk_name, len(data_type))
else:
msg += handle_dmaker('Generator', info, dmk_type, dmk_name, name_sep_sz,
id_src=id_src)
else:
msg += colorize("\n Step #{:d}:".format(sid), rgb=Color.FMKINFOGROUP)
msg += handle_dmaker('Disruptor', info, dmk_type, dmk_name, name_sep_sz)
sid += 1
msg += colorize('\n' + line_pattern, rgb=Color.NEWLOGENTRY)
prt(msg)
msg = ''
for idx, com in enumerate(comments, start=1):
content, tstamp = com
date_str = tstamp.strftime("%d/%m/%Y - %H:%M:%S") if tstamp else 'None'
msg += colorize("\n Comment #{:d}: ".format(idx), rgb=Color.FMKINFOGROUP) + \
colorize(date_str, rgb=Color.DATE)
chks = chunk_lines(content, page_width - 10)
for c in chks:
msg += '\n' + colorize(' ' * 2 + '| ', rgb=Color.FMKINFOGROUP) + \
colorize(str(c), rgb=Color.DATAINFO_ALT)
if comments:
msg += colorize('\n' + line_pattern, rgb=Color.NEWLOGENTRY)
prt(msg)
msg = ''
for idx, info in enumerate(fmkinfo, start=1):
content, tstamp, error = info
if not with_fmkinfo and not error:
continue
date_str = tstamp.strftime("%d/%m/%Y - %H:%M:%S") if tstamp else 'None'
if error:
msg += colorize("\n FMK Error: ", rgb=Color.ERROR)
else:
msg += colorize("\n FMK Info: ", rgb=Color.FMKINFOGROUP)
msg += colorize(date_str, rgb=Color.DATE)
chks = chunk_lines(content, page_width - 10)
for c in chks:
color = Color.FMKHLIGHT if error else Color.DATAINFO_ALT
msg += '\n' + colorize(' ' * 2 + '| ', rgb=Color.FMKINFOGROUP) + \
colorize(str(c), rgb=color)
if msg:
msg += colorize('\n' + line_pattern, rgb=Color.NEWLOGENTRY)
prt(msg)
msg = ''
if with_data:
msg += colorize("\n Sent Data:\n", rgb=Color.FMKINFOGROUP)
data_content = gr.unconvert_from_internal_repr(data_content)
data_content = self._handle_binary_content(data_content, sz_limit=limit_data_sz, raw=raw,
colorized=colorized)
msg += data_content
msg += colorize('\n' + line_pattern, rgb=Color.NEWLOGENTRY)
if with_fbk:
for src, tstamp, status, content in feedback:
formatted_ts = None if tstamp is None else tstamp.strftime("%d/%m/%Y - %H:%M:%S")
msg += colorize("\n Status(", rgb=Color.FMKINFOGROUP)
msg += colorize("{!s}".format(src), rgb=Color.FMKSUBINFO)
msg += colorize(" | ", rgb=Color.FMKINFOGROUP)
msg += colorize("{!s}".format(formatted_ts),
rgb=Color.FMKSUBINFO)
msg += colorize(")", rgb=Color.FMKINFOGROUP)
msg += colorize(" = {!s}".format(status), rgb=Color.FMKSUBINFO)
if content:
content = gr.unconvert_from_internal_repr(content)
content = self._handle_binary_content(content, sz_limit=limit_data_sz, raw=raw,
colorized=colorized)
chks = chunk_lines(content, page_width - 4)
for c in chks:
c_sz = len(c)
for i in range(c_sz):
c = c[:-1] if c[-1] == '\n' else c
break
msg += colorize('\n' + ' ' * 2 + '| ', rgb=Color.FMKINFOGROUP) + \
colorize(str(c), rgb=Color.DATAINFO_ALT)
if feedback:
msg += colorize('\n' + line_pattern, rgb=Color.NEWLOGENTRY)
prt(msg + '\n')
def _handle_binary_content(self, content, sz_limit=None, raw=False, colorized=True):
colorize = self._get_color_function(colorized)
if sys.version_info[0] > 2:
content = content if raw else '{!a}'.format(content)
else:
content = content if raw else repr(content)
if sz_limit is not None and len(content) > sz_limit:
content = content[:sz_limit]
content += colorize(' ...', rgb=Color.FMKHLIGHT)
return content
def display_data_info_by_date(self, start, end, with_data=False, with_fbk=False, with_fmkinfo=True,
fbk_src=None, prj_name=None,
limit_data_sz=None, raw=False, page_width=100, colorized=True):
colorize = self._get_color_function(colorized)
if prj_name:
records = self.execute_sql_statement(
"SELECT ID FROM DATA "
"WHERE ? <= SENT_DATE and SENT_DATE <= ? and PRJ_NAME == ?;",
params=(start, end, prj_name)
)
else:
records = self.execute_sql_statement(
"SELECT ID FROM DATA "
"WHERE ? <= SENT_DATE and SENT_DATE <= ?;",
params=(start, end)
)
if records:
for rec in records:
data_id = rec[0]
self.display_data_info(data_id, with_data=with_data, with_fbk=with_fbk,
with_fmkinfo=with_fmkinfo, fbk_src=fbk_src,
limit_data_sz=limit_data_sz, raw=raw, page_width=page_width,
colorized=colorized)
else:
print(colorize("*** ERROR: No data found between {!s} and {!s} ***".format(start, end),
rgb=Color.ERROR))
def display_data_info_by_range(self, first_id, last_id, with_data=False, with_fbk=False, with_fmkinfo=True,
fbk_src=None, prj_name=None,
limit_data_sz=None, raw=False, page_width=100, colorized=True):
colorize = self._get_color_function(colorized)
if prj_name:
records = self.execute_sql_statement(
"SELECT ID FROM DATA "
"WHERE ? <= ID and ID <= ? and PRJ_NAME == ?;",
params=(first_id, last_id, prj_name)
)
else:
records = self.execute_sql_statement(
"SELECT ID FROM DATA "
"WHERE ? <= ID and ID <= ?;",
params=(first_id, last_id)
)
if records:
for rec in records:
data_id = rec[0]
self.display_data_info(data_id, with_data=with_data, with_fbk=with_fbk,
with_fmkinfo=with_fmkinfo, fbk_src=fbk_src,
limit_data_sz=limit_data_sz, raw=raw, page_width=page_width,
colorized=colorized)
else:
print(colorize("*** ERROR: No data found between {!s} and {!s} ***".format(first_id,
last_id),
rgb=Color.ERROR))
def display_stats(self, colorized=True):
colorize = self._get_color_function(colorized)
records = self.execute_sql_statement(
"SELECT TARGET, TYPE, TOTAL FROM STATS_BY_TARGET;"
)
if records:
current_target = None
max_len = 0
for rec in records:
_, data_type, _ = rec
data_type_len = len(data_type)
if max_len < data_type_len:
max_len = data_type_len
data_type_pattern = "{:>" + str(max_len + 1) + "s}"
for rec in records:
tg, data_type, total = rec
if tg != current_target:
current_target = tg
print(colorize("*** {:s} ***".format(tg), rgb=Color.FMKINFOGROUP))
format_string = data_type_pattern + " : {:d}"
print(colorize(format_string.format(data_type, total),
rgb=Color.FMKSUBINFO))
else:
print(colorize("*** ERROR: Statistics are unavailable ***", rgb=Color.ERROR))
data_records = self.execute_sql_statement(
"SELECT ID FROM DATA;"
)
nb_data_records = len(data_records)
title = colorize("Number of Data IDs: ", rgb=Color.FMKINFOGROUP)
content = colorize("{:d}".format(nb_data_records), rgb=Color.FMKSUBINFO)
print(title + content)
def export_data(self, first, last=None, colorized=True):
colorize = self._get_color_function(colorized)
if last is not None:
records = self.execute_sql_statement(
"SELECT ID, TYPE, DM_NAME, SENT_DATE, CONTENT FROM DATA "
"WHERE {start:d} <= ID and ID <= {end:d};".format(start=first,
end=last)
)
else:
records = self.execute_sql_statement(
"SELECT ID, TYPE, DM_NAME, SENT_DATE, CONTENT FROM DATA "
"WHERE ID == {data_id:d};".format(data_id=first)
)
if records:
base_dir = gr.exported_data_folder
prev_export_date = None
export_cpt = 0
for rec in records:
data_id, data_type, dm_name, sent_date, content = rec
file_extension = dm_name
if sent_date is None:
current_export_date = datetime.now().strftime("%Y-%m-%d-%H%M%S")
else:
current_export_date = sent_date.strftime("%Y-%m-%d-%H%M%S")
if current_export_date != prev_export_date:
prev_export_date = current_export_date
export_cpt = 0
else:
export_cpt += 1
export_fname = '{typ:s}_{date:s}_{cpt:0>2d}.{ext:s}'.format(
date=current_export_date,
cpt=export_cpt,
ext=file_extension,
typ=data_type)
export_full_fn = os.path.join(base_dir, dm_name, export_fname)
ensure_dir(export_full_fn)
with open(export_full_fn, 'wb') as fd:
fd.write(content)
print(colorize("Data ID #{:d} --> {:s}".format(data_id, export_full_fn),
rgb=Color.FMKINFO))
else:
print(colorize("*** ERROR: The provided DATA IDs do not exist ***", rgb=Color.ERROR))
def remove_data(self, data_id, colorized=True):
colorize = self._get_color_function(colorized)
if not self.check_data_existence(data_id, colorized=colorized):
return
comments = self.execute_sql_statement(
"DELETE FROM COMMENTS "
"WHERE DATA_ID == {data_id:d};".format(data_id=data_id)
)
fmkinfo = self.execute_sql_statement(
"DELETE FROM FMKINFO "
"WHERE DATA_ID == {data_id:d};".format(data_id=data_id)
)
fbk = self.execute_sql_statement(
"DELETE FROM FEEDBACK "
"WHERE DATA_ID == {data_id:d};".format(data_id=data_id)
)
steps = self.execute_sql_statement(
"DELETE FROM STEPS "
"WHERE DATA_ID == {data_id:d};".format(data_id=data_id)
)
data = self.execute_sql_statement(
"DELETE FROM DATA "
"WHERE ID == {data_id:d};".format(data_id=data_id)
)
print(colorize("*** Data {:d} and all related records have been removed ***".format(data_id),
rgb=Color.FMKINFO))
def get_project_record(self, prj_name=None):
if prj_name:
prj_records = self.execute_sql_statement(
"SELECT ID, TARGET, PRJ_NAME FROM DATA "
"WHERE PRJ_NAME == ? "
"ORDER BY PRJ_NAME ASC, TARGET ASC;",
params=(prj_name,)
)
else:
prj_records = self.execute_sql_statement(
"SELECT ID, TARGET, PRJ_NAME FROM DATA "
"ORDER BY PRJ_NAME ASC, TARGET ASC;",
)
return prj_records
def get_data_with_impact(self, prj_name=None, fbk_src=None, display=True, verbose=False,
colorized=True):
colorize = self._get_color_function(colorized)
if fbk_src:
fbk_records = self.execute_sql_statement(
"SELECT DATA_ID, STATUS, SOURCE FROM FEEDBACK "
"WHERE STATUS < 0 and SOURCE REGEXP ?;",
params=(fbk_src,)
)
else:
fbk_records = self.execute_sql_statement(
"SELECT DATA_ID, STATUS, SOURCE FROM FEEDBACK "
"WHERE STATUS < 0;"
)
prj_records = self.get_project_record(prj_name)
data_list = []
if fbk_records and prj_records:
id2fbk = {}
for rec in fbk_records:
data_id, status, src = rec
if data_id not in id2fbk:
id2fbk[data_id] = {}
if src not in id2fbk[data_id]:
id2fbk[data_id][src] = []
id2fbk[data_id][src].append(status)
data_id_pattern = "{:>" + str(int(math.log10(len(prj_records))) + 2) + "s}"
format_string = " [DataID " + data_id_pattern + "] --> {:s}"
current_prj = None
for rec in prj_records:
data_id, target, prj = rec
if data_id in id2fbk:
data_list.append(data_id)
if display:
if prj != current_prj:
current_prj = prj
print(
colorize("*** Project '{:s}' ***".format(prj), rgb=Color.FMKINFOGROUP))
print(colorize(format_string.format('#' + str(data_id), target),
rgb=Color.DATAINFO))
if verbose:
for src, status in id2fbk[data_id].items():
status_str = ''.join([str(s) + ',' for s in status])[:-1]
print(colorize(" |_ status={:s} from {:s}".format(status_str,
src),
rgb=Color.FMKSUBINFO))
else:
print(colorize("*** No data has negatively impacted a target ***", rgb=Color.FMKINFO))
return data_list
def get_data_without_fbk(self, prj_name=None, fbk_src=None, display=True, colorized=True):
colorize = self._get_color_function(colorized)
if fbk_src:
fbk_records = self.execute_sql_statement(
"SELECT DATA_ID, STATUS, SOURCE, CONTENT FROM FEEDBACK "
"WHERE SOURCE REGEXP ?;",
params=(fbk_src,)
)
else:
fbk_records = self.execute_sql_statement(
"SELECT DATA_ID, STATUS, SOURCE, CONTENT FROM FEEDBACK;"
)
prj_records = self.get_project_record(prj_name)
data_list = []
if fbk_records and prj_records:
id2fbk = {}
for rec in fbk_records:
data_id, status, src, content = rec
if data_id not in id2fbk:
id2fbk[data_id] = {}
if src not in id2fbk[data_id]:
id2fbk[data_id][src] = []
id2fbk[data_id][src].append((status, content))
data_id_pattern = "{:>" + str(int(math.log10(len(prj_records))) + 2) + "s}"
format_string = " [DataID " + data_id_pattern + "] --> {:s}"
current_prj = None
for rec in prj_records:
data_id, target, prj = rec
to_gather = True
if data_id in id2fbk:
current_fbk = id2fbk[data_id] # the dictionnay is never empty
for src, fbk_list in current_fbk.items():
for fbk in fbk_list:
if fbk[1] is None or \
(isinstance(fbk[1], bytes) and fbk[1].strip() == b''):
continue
else:
to_gather = False
break
if not to_gather:
break
if to_gather:
data_list.append(data_id)
if display:
if prj != current_prj:
current_prj = prj
print(
colorize("*** Project '{:s}' ***".format(prj), rgb=Color.FMKINFOGROUP))
print(colorize(format_string.format('#' + str(data_id), target),
rgb=Color.DATAINFO))
else:
print(colorize("*** No data has been found for analysis ***", rgb=Color.FMKINFO))
return data_list
def get_data_with_specific_fbk(self, fbk, prj_name=None, fbk_src=None, display=True,
colorized=True):
colorize = self._get_color_function(colorized)
fbk = gr.convert_to_internal_repr(fbk)
if fbk_src:
fbk_records = self.execute_sql_statement(
"SELECT DATA_ID, CONTENT, SOURCE FROM FEEDBACK "
"WHERE SOURCE REGEXP ? AND BINREGEXP(?,CONTENT);",
params=(fbk_src, fbk)
)
else:
fbk_records = self.execute_sql_statement(
"SELECT DATA_ID, CONTENT, SOURCE FROM FEEDBACK "
"WHERE BINREGEXP(?,CONTENT);",
params=(fbk,)
)
prj_records = self.get_project_record(prj_name)
data_list = []
data_id_pattern = "{:>" + str(int(math.log10(len(prj_records))) + 2) + "s}"
format_string = " [DataID " + data_id_pattern + "] --> {:s}"
if fbk_records and prj_records:
ids_to_display = {}
for rec in fbk_records:
data_id, content, src = rec
if data_id not in ids_to_display:
ids_to_display[data_id] = {}
if src not in ids_to_display[data_id]:
ids_to_display[data_id][src] = []
ids_to_display[data_id][src].append(content)
current_prj = None
for rec in prj_records:
data_id, target, prj = rec
if data_id in ids_to_display:
data_list.append(data_id)
if display:
fbk = ids_to_display[data_id]
if prj != current_prj:
current_prj = prj
print(
colorize("*** Project '{:s}' ***".format(prj), rgb=Color.FMKINFOGROUP))
print(colorize(format_string.format('#' + str(data_id), target),
rgb=Color.DATAINFO))
for src, contents in fbk.items():
print(colorize(" |_ From [{:s}]:".format(src), rgb=Color.FMKSUBINFO))
for ct in contents:
print(
colorize(" {:s}".format(str(ct)), rgb=Color.DATAINFO_ALT))
else:
print(colorize("*** No data has been found for analysis ***", rgb=Color.FMKINFO))
return data_list
|
thnee/ansible
|
refs/heads/devel
|
lib/ansible/modules/cloud/oneandone/oneandone_firewall_policy.py
|
21
|
#!/usr/bin/python
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: oneandone_firewall_policy
short_description: Configure 1&1 firewall policy.
description:
- Create, remove, reconfigure, update firewall policies.
This module has a dependency on 1and1 >= 1.0
version_added: "2.5"
options:
state:
description:
- Define a firewall policy state to create, remove, or update.
required: false
default: 'present'
choices: [ "present", "absent", "update" ]
auth_token:
description:
- Authenticating API token provided by 1&1.
required: true
api_url:
description:
- Custom API URL. Overrides the
ONEANDONE_API_URL environment variable.
required: false
name:
description:
- Firewall policy name used with present state. Used as identifier (id or name) when used with absent state.
maxLength=128
required: true
firewall_policy:
description:
- The identifier (id or name) of the firewall policy used with update state.
required: true
rules:
description:
- A list of rules that will be set for the firewall policy.
Each rule must contain protocol parameter, in addition to three optional parameters
(port_from, port_to, and source)
add_server_ips:
description:
- A list of server identifiers (id or name) to be assigned to a firewall policy.
Used in combination with update state.
required: false
remove_server_ips:
description:
- A list of server IP ids to be unassigned from a firewall policy. Used in combination with update state.
required: false
add_rules:
description:
- A list of rules that will be added to an existing firewall policy.
It is syntax is the same as the one used for rules parameter. Used in combination with update state.
required: false
remove_rules:
description:
- A list of rule ids that will be removed from an existing firewall policy. Used in combination with update state.
required: false
description:
description:
- Firewall policy description. maxLength=256
required: false
wait:
description:
- wait for the instance to be in state 'running' before returning
required: false
default: "yes"
type: bool
wait_timeout:
description:
- how long before wait gives up, in seconds
default: 600
wait_interval:
description:
- Defines the number of seconds to wait when using the _wait_for methods
default: 5
requirements:
- "1and1"
- "python >= 2.6"
author:
- "Amel Ajdinovic (@aajdinov)"
- "Ethan Devenport (@edevenport)"
'''
EXAMPLES = '''
# Provisioning example. Create and destroy a firewall policy.
- oneandone_firewall_policy:
auth_token: oneandone_private_api_key
name: ansible-firewall-policy
description: Testing creation of firewall policies with ansible
rules:
-
protocol: TCP
port_from: 80
port_to: 80
source: 0.0.0.0
wait: true
wait_timeout: 500
- oneandone_firewall_policy:
auth_token: oneandone_private_api_key
state: absent
name: ansible-firewall-policy
# Update a firewall policy.
- oneandone_firewall_policy:
auth_token: oneandone_private_api_key
state: update
firewall_policy: ansible-firewall-policy
name: ansible-firewall-policy-updated
description: Testing creation of firewall policies with ansible - updated
# Add server to a firewall policy.
- oneandone_firewall_policy:
auth_token: oneandone_private_api_key
firewall_policy: ansible-firewall-policy-updated
add_server_ips:
- server_identifier (id or name)
- server_identifier #2 (id or name)
wait: true
wait_timeout: 500
state: update
# Remove server from a firewall policy.
- oneandone_firewall_policy:
auth_token: oneandone_private_api_key
firewall_policy: ansible-firewall-policy-updated
remove_server_ips:
- B2504878540DBC5F7634EB00A07C1EBD (server's IP id)
wait: true
wait_timeout: 500
state: update
# Add rules to a firewall policy.
- oneandone_firewall_policy:
auth_token: oneandone_private_api_key
firewall_policy: ansible-firewall-policy-updated
description: Adding rules to an existing firewall policy
add_rules:
-
protocol: TCP
port_from: 70
port_to: 70
source: 0.0.0.0
-
protocol: TCP
port_from: 60
port_to: 60
source: 0.0.0.0
wait: true
wait_timeout: 500
state: update
# Remove rules from a firewall policy.
- oneandone_firewall_policy:
auth_token: oneandone_private_api_key
firewall_policy: ansible-firewall-policy-updated
remove_rules:
- rule_id #1
- rule_id #2
- ...
wait: true
wait_timeout: 500
state: update
'''
RETURN = '''
firewall_policy:
description: Information about the firewall policy that was processed
type: dict
sample: '{"id": "92B74394A397ECC3359825C1656D67A6", "name": "Default Policy"}'
returned: always
'''
import os
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.oneandone import (
get_firewall_policy,
get_server,
OneAndOneResources,
wait_for_resource_creation_completion
)
HAS_ONEANDONE_SDK = True
try:
import oneandone.client
except ImportError:
HAS_ONEANDONE_SDK = False
def _check_mode(module, result):
if module.check_mode:
module.exit_json(
changed=result
)
def _add_server_ips(module, oneandone_conn, firewall_id, server_ids):
"""
Assigns servers to a firewall policy.
"""
try:
attach_servers = []
for _server_id in server_ids:
server = get_server(oneandone_conn, _server_id, True)
attach_server = oneandone.client.AttachServer(
server_id=server['id'],
server_ip_id=next(iter(server['ips'] or []), None)['id']
)
attach_servers.append(attach_server)
if module.check_mode:
if attach_servers:
return True
return False
firewall_policy = oneandone_conn.attach_server_firewall_policy(
firewall_id=firewall_id,
server_ips=attach_servers)
return firewall_policy
except Exception as e:
module.fail_json(msg=str(e))
def _remove_firewall_server(module, oneandone_conn, firewall_id, server_ip_id):
"""
Unassigns a server/IP from a firewall policy.
"""
try:
if module.check_mode:
firewall_server = oneandone_conn.get_firewall_server(
firewall_id=firewall_id,
server_ip_id=server_ip_id)
if firewall_server:
return True
return False
firewall_policy = oneandone_conn.remove_firewall_server(
firewall_id=firewall_id,
server_ip_id=server_ip_id)
return firewall_policy
except Exception as e:
module.fail_json(msg=str(e))
def _add_firewall_rules(module, oneandone_conn, firewall_id, rules):
"""
Adds new rules to a firewall policy.
"""
try:
firewall_rules = []
for rule in rules:
firewall_rule = oneandone.client.FirewallPolicyRule(
protocol=rule['protocol'],
port_from=rule['port_from'],
port_to=rule['port_to'],
source=rule['source'])
firewall_rules.append(firewall_rule)
if module.check_mode:
firewall_policy_id = get_firewall_policy(oneandone_conn, firewall_id)
if (firewall_rules and firewall_policy_id):
return True
return False
firewall_policy = oneandone_conn.add_firewall_policy_rule(
firewall_id=firewall_id,
firewall_policy_rules=firewall_rules
)
return firewall_policy
except Exception as e:
module.fail_json(msg=str(e))
def _remove_firewall_rule(module, oneandone_conn, firewall_id, rule_id):
"""
Removes a rule from a firewall policy.
"""
try:
if module.check_mode:
rule = oneandone_conn.get_firewall_policy_rule(
firewall_id=firewall_id,
rule_id=rule_id)
if rule:
return True
return False
firewall_policy = oneandone_conn.remove_firewall_rule(
firewall_id=firewall_id,
rule_id=rule_id
)
return firewall_policy
except Exception as e:
module.fail_json(msg=str(e))
def update_firewall_policy(module, oneandone_conn):
"""
Updates a firewall policy based on input arguments.
Firewall rules and server ips can be added/removed to/from
firewall policy. Firewall policy name and description can be
updated as well.
module : AnsibleModule object
oneandone_conn: authenticated oneandone object
"""
try:
firewall_policy_id = module.params.get('firewall_policy')
name = module.params.get('name')
description = module.params.get('description')
add_server_ips = module.params.get('add_server_ips')
remove_server_ips = module.params.get('remove_server_ips')
add_rules = module.params.get('add_rules')
remove_rules = module.params.get('remove_rules')
changed = False
firewall_policy = get_firewall_policy(oneandone_conn, firewall_policy_id, True)
if firewall_policy is None:
_check_mode(module, False)
if name or description:
_check_mode(module, True)
firewall_policy = oneandone_conn.modify_firewall(
firewall_id=firewall_policy['id'],
name=name,
description=description)
changed = True
if add_server_ips:
if module.check_mode:
_check_mode(module, _add_server_ips(module,
oneandone_conn,
firewall_policy['id'],
add_server_ips))
firewall_policy = _add_server_ips(module, oneandone_conn, firewall_policy['id'], add_server_ips)
changed = True
if remove_server_ips:
chk_changed = False
for server_ip_id in remove_server_ips:
if module.check_mode:
chk_changed |= _remove_firewall_server(module,
oneandone_conn,
firewall_policy['id'],
server_ip_id)
_remove_firewall_server(module,
oneandone_conn,
firewall_policy['id'],
server_ip_id)
_check_mode(module, chk_changed)
firewall_policy = get_firewall_policy(oneandone_conn, firewall_policy['id'], True)
changed = True
if add_rules:
firewall_policy = _add_firewall_rules(module,
oneandone_conn,
firewall_policy['id'],
add_rules)
_check_mode(module, firewall_policy)
changed = True
if remove_rules:
chk_changed = False
for rule_id in remove_rules:
if module.check_mode:
chk_changed |= _remove_firewall_rule(module,
oneandone_conn,
firewall_policy['id'],
rule_id)
_remove_firewall_rule(module,
oneandone_conn,
firewall_policy['id'],
rule_id)
_check_mode(module, chk_changed)
firewall_policy = get_firewall_policy(oneandone_conn, firewall_policy['id'], True)
changed = True
return (changed, firewall_policy)
except Exception as e:
module.fail_json(msg=str(e))
def create_firewall_policy(module, oneandone_conn):
"""
Create a new firewall policy.
module : AnsibleModule object
oneandone_conn: authenticated oneandone object
"""
try:
name = module.params.get('name')
description = module.params.get('description')
rules = module.params.get('rules')
wait = module.params.get('wait')
wait_timeout = module.params.get('wait_timeout')
wait_interval = module.params.get('wait_interval')
firewall_rules = []
for rule in rules:
firewall_rule = oneandone.client.FirewallPolicyRule(
protocol=rule['protocol'],
port_from=rule['port_from'],
port_to=rule['port_to'],
source=rule['source'])
firewall_rules.append(firewall_rule)
firewall_policy_obj = oneandone.client.FirewallPolicy(
name=name,
description=description
)
_check_mode(module, True)
firewall_policy = oneandone_conn.create_firewall_policy(
firewall_policy=firewall_policy_obj,
firewall_policy_rules=firewall_rules
)
if wait:
wait_for_resource_creation_completion(
oneandone_conn,
OneAndOneResources.firewall_policy,
firewall_policy['id'],
wait_timeout,
wait_interval)
firewall_policy = get_firewall_policy(oneandone_conn, firewall_policy['id'], True) # refresh
changed = True if firewall_policy else False
_check_mode(module, False)
return (changed, firewall_policy)
except Exception as e:
module.fail_json(msg=str(e))
def remove_firewall_policy(module, oneandone_conn):
"""
Removes a firewall policy.
module : AnsibleModule object
oneandone_conn: authenticated oneandone object
"""
try:
fp_id = module.params.get('name')
firewall_policy_id = get_firewall_policy(oneandone_conn, fp_id)
if module.check_mode:
if firewall_policy_id is None:
_check_mode(module, False)
_check_mode(module, True)
firewall_policy = oneandone_conn.delete_firewall(firewall_policy_id)
changed = True if firewall_policy else False
return (changed, {
'id': firewall_policy['id'],
'name': firewall_policy['name']
})
except Exception as e:
module.fail_json(msg=str(e))
def main():
module = AnsibleModule(
argument_spec=dict(
auth_token=dict(
type='str',
default=os.environ.get('ONEANDONE_AUTH_TOKEN')),
api_url=dict(
type='str',
default=os.environ.get('ONEANDONE_API_URL')),
name=dict(type='str'),
firewall_policy=dict(type='str'),
description=dict(type='str'),
rules=dict(type='list', default=[]),
add_server_ips=dict(type='list', default=[]),
remove_server_ips=dict(type='list', default=[]),
add_rules=dict(type='list', default=[]),
remove_rules=dict(type='list', default=[]),
wait=dict(type='bool', default=True),
wait_timeout=dict(type='int', default=600),
wait_interval=dict(type='int', default=5),
state=dict(type='str', default='present', choices=['present', 'absent', 'update']),
),
supports_check_mode=True
)
if not HAS_ONEANDONE_SDK:
module.fail_json(msg='1and1 required for this module')
if not module.params.get('auth_token'):
module.fail_json(
msg='The "auth_token" parameter or ' +
'ONEANDONE_AUTH_TOKEN environment variable is required.')
if not module.params.get('api_url'):
oneandone_conn = oneandone.client.OneAndOneService(
api_token=module.params.get('auth_token'))
else:
oneandone_conn = oneandone.client.OneAndOneService(
api_token=module.params.get('auth_token'), api_url=module.params.get('api_url'))
state = module.params.get('state')
if state == 'absent':
if not module.params.get('name'):
module.fail_json(
msg="'name' parameter is required to delete a firewall policy.")
try:
(changed, firewall_policy) = remove_firewall_policy(module, oneandone_conn)
except Exception as e:
module.fail_json(msg=str(e))
elif state == 'update':
if not module.params.get('firewall_policy'):
module.fail_json(
msg="'firewall_policy' parameter is required to update a firewall policy.")
try:
(changed, firewall_policy) = update_firewall_policy(module, oneandone_conn)
except Exception as e:
module.fail_json(msg=str(e))
elif state == 'present':
for param in ('name', 'rules'):
if not module.params.get(param):
module.fail_json(
msg="%s parameter is required for new firewall policies." % param)
try:
(changed, firewall_policy) = create_firewall_policy(module, oneandone_conn)
except Exception as e:
module.fail_json(msg=str(e))
module.exit_json(changed=changed, firewall_policy=firewall_policy)
if __name__ == '__main__':
main()
|
n4hy/gnuradio
|
refs/heads/master
|
gr-pager/apps/usrp_flex_band.py
|
6
|
#!/usr/bin/env python
#
# Copyright 2006,2007,2009,2011 Free Software Foundation, Inc.
#
# This file is part of GNU Radio
#
# GNU Radio is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3, or (at your option)
# any later version.
#
# GNU Radio is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with GNU Radio; see the file COPYING. If not, write to
# the Free Software Foundation, Inc., 51 Franklin Street,
# Boston, MA 02110-1301, USA.
#
from gnuradio import gr, gru, uhd, optfir, eng_notation, blks2, pager
from gnuradio.eng_option import eng_option
from optparse import OptionParser
import sys
class app_top_block(gr.top_block):
def __init__(self, options, queue):
gr.top_block.__init__(self, "usrp_flex_all")
if options.from_file is not None:
self.u = gr.file_source(gr.sizeof_gr_complex, options.from_file)
if options.verbose:
print "Reading samples from file", options.from_file
else:
# Set up USRP source
self.u = uhd.usrp_source(device_addr=options.address, stream_args=uhd.stream_args('fc32'))
# Grab 1 MHz of spectrum
# (A UHD facility to get sample rate range and granularity would be useful)
self.u.set_samp_rate(1e6)
rate = self.u.get_samp_rate()
if rate != 1e6:
print "Unable to set required sample rate of 1 Msps (got %f)" % rate
sys.exit(1)
# Tune daughterboard
r = self.u.set_center_freq(options.freq+options.calibration, 0)
if not r:
frange = self.u.get_freq_range()
sys.stderr.write(("\nRequested frequency (%f) out or range [%f, %f]\n") % \
(freq, frange.start(), frange.stop()))
sys.exit(1)
# if no gain was specified, use the mid-point in dB
if options.rx_gain is None:
grange = self.u.get_gain_range()
options.rx_gain = float(grange.start()+grange.stop())/2.0
print "\nNo gain specified."
print "Setting gain to %f (from [%f, %f])" % \
(options.rx_gain, grange.start(), grange.stop())
self.u.set_gain(options.rx_gain, 0)
taps = gr.firdes.low_pass(1.0,
1.0,
1.0/40.0*0.4,
1.0/40.0*0.1,
gr.firdes.WIN_HANN)
if options.verbose:
print "Channel filter has", len(taps), "taps"
bank = blks2.analysis_filterbank(40, taps)
self.connect(self.u, bank)
if options.log and options.from_file == None:
src_sink = gr.file_sink(gr.sizeof_gr_complex, 'usrp.dat')
self.connect(self.u, src_sink)
for i in range(40):
if i < 20:
freq = options.freq+i*25e3
else:
freq = options.freq-0.5e6+(i-20)*25e3
self.connect((bank, i), pager.flex_demod(queue, freq, options.verbose, options.log))
if options.log:
self.connect((bank, i), gr.file_sink(gr.sizeof_gr_complex, 'chan_'+'%3.3f'%(freq/1e6)+'.dat'))
def get_options():
parser = OptionParser(option_class=eng_option)
parser.add_option('-f', '--freq', type="eng_float", default=None,
help="Set receive frequency to FREQ [default=%default]",
metavar="FREQ")
parser.add_option("-a", "--address", type="string", default="addr=192.168.10.2",
help="Address of UHD device, [default=%default]")
parser.add_option("-A", "--antenna", type="string", default=None,
help="select Rx Antenna where appropriate")
parser.add_option("", "--rx-gain", type="eng_float", default=None,
help="set receive gain in dB (default is midpoint)")
parser.add_option("-c", "--calibration", type="eng_float", default=0.0,
help="set frequency offset to Hz", metavar="Hz")
parser.add_option("-v", "--verbose", action="store_true", default=False)
parser.add_option("-l", "--log", action="store_true", default=False,
help="log flowgraph to files (LOTS of data)")
parser.add_option("-F", "--from-file", default=None,
help="read samples from file instead of USRP")
(options, args) = parser.parse_args()
if len(args) > 0:
print "Run 'usrp_flex_band.py -h' for options."
sys.exit(1)
if (options.freq is None):
sys.stderr.write("You must specify -f FREQ or --freq FREQ\n")
sys.exit(1)
return (options, args)
if __name__ == "__main__":
(options, args) = get_options()
queue = gr.msg_queue()
tb = app_top_block(options, queue)
runner = pager.queue_runner(queue)
try:
tb.run()
except KeyboardInterrupt:
pass
runner.end()
|
yuruofeifei/mxnet
|
refs/heads/master
|
example/bi-lstm-sort/rnn_model.py
|
19
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# pylint: disable=C0111,too-many-arguments,too-many-instance-attributes,too-many-locals,redefined-outer-name,fixme
# pylint: disable=superfluous-parens, no-member, invalid-name
import sys
import numpy as np
import mxnet as mx
from lstm import LSTMState, LSTMParam, lstm, bi_lstm_inference_symbol
class BiLSTMInferenceModel(object):
def __init__(self,
seq_len,
input_size,
num_hidden,
num_embed,
num_label,
arg_params,
ctx=mx.cpu(),
dropout=0.):
self.sym = bi_lstm_inference_symbol(input_size, seq_len,
num_hidden,
num_embed,
num_label,
dropout)
batch_size = 1
init_c = [('l%d_init_c'%l, (batch_size, num_hidden)) for l in range(2)]
init_h = [('l%d_init_h'%l, (batch_size, num_hidden)) for l in range(2)]
data_shape = [("data", (batch_size, seq_len, ))]
input_shapes = dict(init_c + init_h + data_shape)
self.executor = self.sym.simple_bind(ctx=mx.cpu(), **input_shapes)
for key in self.executor.arg_dict.keys():
if key in arg_params:
arg_params[key].copyto(self.executor.arg_dict[key])
state_name = []
for i in range(2):
state_name.append("l%d_init_c" % i)
state_name.append("l%d_init_h" % i)
self.states_dict = dict(zip(state_name, self.executor.outputs[1:]))
self.input_arr = mx.nd.zeros(data_shape[0][1])
def forward(self, input_data, new_seq=False):
if new_seq == True:
for key in self.states_dict.keys():
self.executor.arg_dict[key][:] = 0.
input_data.copyto(self.executor.arg_dict["data"])
self.executor.forward()
for key in self.states_dict.keys():
self.states_dict[key].copyto(self.executor.arg_dict[key])
prob = self.executor.outputs[0].asnumpy()
return prob
|
BrotherPhil/django
|
refs/heads/master
|
tests/migrations/test_migrations_squashed_complex_multi_apps/app1/1_auto.py
|
1155
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
operations = [
migrations.RunPython(migrations.RunPython.noop)
]
|
carze/cutlass
|
refs/heads/master
|
examples/microbiome_assay_prep.py
|
2
|
#!/usr/bin/env python
# pylint: disable=C0111, C0325
import logging
import sys
import tempfile
from pprint import pprint
from cutlass import MicrobiomeAssayPrep
from cutlass import iHMPSession
username = "test"
password = "test"
def set_logging():
""" Setup logging. """
root = logging.getLogger()
root.setLevel(logging.DEBUG)
ch = logging.StreamHandler(sys.stdout)
ch.setLevel(logging.DEBUG)
formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
ch.setFormatter(formatter)
root.addHandler(ch)
set_logging()
session = iHMPSession(username, password)
print("Required fields: ")
print(MicrobiomeAssayPrep.required_fields())
prep = MicrobiomeAssayPrep()
prep.comment = "Hello world!"
prep.pride_id = "PRIDE ID"
prep.center = "the center"
prep.contact = "first name last name"
prep.sample_name = "name of the sample"
prep.experiment_type = "PRIDE:0000429, Shotgun proteomics"
prep.prep_id = "the prep id"
prep.storage_duration = 30
prep.study = "prediabetes"
prep.title = "the title"
# Optional properties
prep.short_label = "the short label"
prep.url = ["http://prep.url"]
prep.species = "the species"
prep.cell_type = "the cell type"
prep.tissue = "test tissue"
prep.reference = "the reference"
prep.protocol_name = "name of the protocol"
prep.protocol_steps = "steps of the protocol"
prep.exp_description = "exp description"
prep.sample_description = "description of the sample"
print("Creating a temp file for example/testing purposes.")
temp_file = tempfile.NamedTemporaryFile(delete=False).name
print("Local file: %s" % temp_file)
# MicrobiomeAssayPreps are 'prepared_from' a Sample
prep.links = {"prepared_from": ["610a4911a5ca67de12cdc1e4b4011876"]}
prep.tags = ["prep", "ihmp"]
prep.add_tag("another")
prep.add_tag("and_another")
print(prep.to_json(indent=2))
if prep.is_valid():
print("Valid!")
success = prep.save()
if success:
prep_id = prep.id
print("Succesfully saved prep ID: %s" % prep_id)
prep2 = MicrobiomeAssayPrep.load(prep_id)
print(prep2.to_json(indent=2))
deletion_success = prep.delete()
if deletion_success:
print("Deleted prep with ID %s" % prep_id)
else:
print("Deletion of prep %s failed." % prep_id)
else:
print("Save failed")
else:
print("Invalid...")
validation_errors = prep.validate()
pprint(validation_errors)
|
pigshell/nhnick
|
refs/heads/vnc-websocket
|
test/www/__init__.py
|
121
|
# This file makes test/www/ into a "package" so that
# importing Python response hooks works correctly.
|
mikel-egana-aranguren/SADI-Galaxy-Docker
|
refs/heads/master
|
galaxy-dist/eggs/mercurial-2.2.3-py2.7-linux-x86_64-ucs4.egg/mercurial/hgweb/protocol.py
|
1
|
#
# Copyright 21 May 2005 - (c) 2005 Jake Edge <jake@edge2.net>
# Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
import cgi, cStringIO, zlib, urllib
from mercurial import util, wireproto
from common import HTTP_OK
HGTYPE = 'application/mercurial-0.1'
HGERRTYPE = 'application/hg-error'
class webproto(object):
def __init__(self, req, ui):
self.req = req
self.response = ''
self.ui = ui
def getargs(self, args):
knownargs = self._args()
data = {}
keys = args.split()
for k in keys:
if k == '*':
star = {}
for key in knownargs.keys():
if key != 'cmd' and key not in keys:
star[key] = knownargs[key][0]
data['*'] = star
else:
data[k] = knownargs[k][0]
return [data[k] for k in keys]
def _args(self):
args = self.req.form.copy()
chunks = []
i = 1
while True:
h = self.req.env.get('HTTP_X_HGARG_' + str(i))
if h is None:
break
chunks += [h]
i += 1
args.update(cgi.parse_qs(''.join(chunks), keep_blank_values=True))
return args
def getfile(self, fp):
length = int(self.req.env['CONTENT_LENGTH'])
for s in util.filechunkiter(self.req, limit=length):
fp.write(s)
def redirect(self):
self.oldio = self.ui.fout, self.ui.ferr
self.ui.ferr = self.ui.fout = cStringIO.StringIO()
def restore(self):
val = self.ui.fout.getvalue()
self.ui.ferr, self.ui.fout = self.oldio
return val
def groupchunks(self, cg):
z = zlib.compressobj()
while True:
chunk = cg.read(4096)
if not chunk:
break
yield z.compress(chunk)
yield z.flush()
def _client(self):
return 'remote:%s:%s:%s' % (
self.req.env.get('wsgi.url_scheme') or 'http',
urllib.quote(self.req.env.get('REMOTE_HOST', '')),
urllib.quote(self.req.env.get('REMOTE_USER', '')))
def iscmd(cmd):
return cmd in wireproto.commands
def call(repo, req, cmd):
p = webproto(req, repo.ui)
rsp = wireproto.dispatch(repo, p, cmd)
if isinstance(rsp, str):
req.respond(HTTP_OK, HGTYPE, length=len(rsp))
return [rsp]
elif isinstance(rsp, wireproto.streamres):
req.respond(HTTP_OK, HGTYPE)
return rsp.gen
elif isinstance(rsp, wireproto.pushres):
val = p.restore()
req.respond(HTTP_OK, HGTYPE)
return ['%d\n%s' % (rsp.res, val)]
elif isinstance(rsp, wireproto.pusherr):
# drain the incoming bundle
req.drain()
p.restore()
rsp = '0\n%s\n' % rsp.res
req.respond(HTTP_OK, HGTYPE, length=len(rsp))
return [rsp]
elif isinstance(rsp, wireproto.ooberror):
rsp = rsp.message
req.respond(HTTP_OK, HGERRTYPE, length=len(rsp))
return [rsp]
|
latusrepo/latus
|
refs/heads/master
|
latus/aws/util/aws_util.py
|
2
|
from pprint import pprint
import latus.aws.aws_access
def dump_all(aws_local):
access_aws = latus.aws.aws_access.AWSAccess(aws_local)
db_client = access_aws.get_db_client()
s3_client = access_aws.get_s3_client()
s3_resource = access_aws.get_s3_resource()
buckets = s3_client.list_buckets()['Buckets']
print('Buckets:')
pprint(buckets)
for bucket in buckets:
bucket_resource = s3_resource.Bucket(bucket['Name'])
for bucket_object in bucket_resource.objects.all():
print(bucket_object)
print()
tables = db_client.list_tables()['TableNames']
print('Tables : %s' % tables)
for table in tables:
paginator = db_client.get_paginator('scan')
items = []
for page in paginator.paginate(TableName=table):
items.extend(page['Items'])
print('Table : %s' % table)
pprint(items)
print()
|
buildbot/buildbot
|
refs/heads/master
|
worker/buildbot_worker/compat.py
|
8
|
# coding=utf-8
# This file is part of Buildbot. Buildbot is free software: you can
# redistribute it and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation, version 2.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Copyright Buildbot Team Members
"""
Helpers for handling compatibility differences
between Python 2 and Python 3.
"""
from __future__ import absolute_import
from __future__ import print_function
from future.utils import text_type
if str != bytes:
# On Python 3 and higher, str and bytes
# are not equivalent. We must use StringIO for
# doing io on native strings.
from io import StringIO as NativeStringIO
else:
# On Python 2 and older, str and bytes
# are equivalent. We must use BytesIO for
# doing io on native strings.
from io import BytesIO as NativeStringIO
def bytes2NativeString(x, encoding='utf-8'):
"""
Convert C{bytes} to a native C{str}.
On Python 3 and higher, str and bytes
are not equivalent. In this case, decode
the bytes, and return a native string.
On Python 2 and lower, str and bytes
are equivalent. In this case, just
just return the native string.
@param x: a string of type C{bytes}
@param encoding: an optional codec, default: 'utf-8'
@return: a string of type C{str}
"""
if isinstance(x, bytes) and str != bytes:
return x.decode(encoding)
return x
def unicode2bytes(x, encoding='utf-8', errors='strict'):
"""
Convert a unicode string to C{bytes}.
@param x: a unicode string, of type C{unicode} on Python 2,
or C{str} on Python 3.
@param encoding: an optional codec, default: 'utf-8'
@param errors: error handling scheme, default 'strict'
@return: a string of type C{bytes}
"""
if isinstance(x, text_type):
x = x.encode(encoding, errors)
return x
def bytes2unicode(x, encoding='utf-8', errors='strict'):
"""
Convert a C{bytes} to a unicode string.
@param x: a unicode string, of type C{unicode} on Python 2,
or C{str} on Python 3.
@param encoding: an optional codec, default: 'utf-8'
@param errors: error handling scheme, default 'strict'
@return: a unicode string of type C{unicode} on Python 2, or
C{str} on Python 3.
"""
if isinstance(x, (text_type, type(None))):
return x
return text_type(x, encoding, errors)
__all__ = [
"NativeStringIO",
"bytes2NativeString",
"bytes2unicode",
"unicode2bytes"
]
|
lucafavatella/intellij-community
|
refs/heads/cli-wip
|
python/testData/quickFixes/PyMakeMethodStaticQuickFixTest/usage_after.py
|
75
|
class A:
@staticmethod
def m(x):
return x
print A.m(1)
|
ResearchSoftwareInstitute/MyHPOM
|
refs/heads/myhpom-develop
|
hs_geographic_feature_resource/models.py
|
1
|
from lxml import etree
from django.db import models
from django.contrib.contenttypes.fields import GenericRelation
from mezzanine.pages.page_processors import processor_for
from dominate.tags import legend, table, tbody, tr, td, th, h4, div
from hs_core.models import BaseResource, ResourceManager, resource_processor, \
CoreMetaData, AbstractMetaDataElement
from hs_core.hydroshare.utils import add_metadata_element_to_xml
class OriginalCoverage(AbstractMetaDataElement):
term = 'OriginalCoverage'
northlimit = models.FloatField(null=False, blank=False)
southlimit = models.FloatField(null=False, blank=False)
westlimit = models.FloatField(null=False, blank=False)
eastlimit = models.FloatField(null=False, blank=False)
projection_string = models.TextField(null=True, blank=True)
projection_name = models.TextField(max_length=256, null=True, blank=True)
datum = models.TextField(max_length=256, null=True, blank=True)
unit = models.TextField(max_length=256, null=True, blank=True)
class Meta:
# OriginalCoverage element is not repeatable
unique_together = ("content_type", "object_id")
def get_html(self, pretty=True):
"""Generates html code for displaying data for this metadata element"""
root_div = div(cls="col-xs-6 col-sm-6", style="margin-bottom:40px;")
def get_th(heading_name):
return th(heading_name, cls="text-muted")
with root_div:
legend('Spatial Reference')
with table(cls='custom-table'):
with tbody():
with tr():
get_th('Coordinate Reference System')
td(self.projection_name)
with tr():
get_th('Datum')
td(self.datum)
with tr():
get_th('Coordinate String Text')
td(self.projection_string)
h4('Extent')
with table(cls='custom-table'):
with tbody():
with tr():
get_th('North')
td(self.northlimit)
with tr():
get_th('West')
td(self.westlimit)
with tr():
get_th('South')
td(self.southlimit)
with tr():
get_th('East')
td(self.eastlimit)
with tr():
get_th('Unit')
td(self.unit)
return root_div.render(pretty=pretty)
@classmethod
def get_html_form(cls, resource, element=None, allow_edit=True, file_type=False):
"""Generates html form code for an instance of this metadata element so
that this element can be edited"""
from .forms import OriginalCoverageForm
ori_cov_dict = {}
if element is not None:
ori_cov_dict['northlimit'] = element.northlimit
ori_cov_dict['eastlimit'] = element.eastlimit
ori_cov_dict['southlimit'] = element.southlimit
ori_cov_dict['westlimit'] = element.westlimit
ori_cov_dict['projection_string'] = element.projection_string
ori_cov_dict['projection_name'] = element.projection_name
ori_cov_dict['datum'] = element.datum
ori_cov_dict['unit'] = element.unit
orig_coverage_form = OriginalCoverageForm(initial=ori_cov_dict,
res_short_id=resource.short_id if
resource else None,
allow_edit=allow_edit,
element_id=element.id if element else None,
file_type=file_type)
return orig_coverage_form
def add_to_xml_container(self, container):
"""Generates xml+rdf representation of the metadata element"""
NAMESPACES = CoreMetaData.NAMESPACES
cov = etree.SubElement(container, '{%s}spatialReference' % NAMESPACES['hsterms'])
cov_term = '{%s}' + 'box'
coverage_terms = etree.SubElement(cov, cov_term % NAMESPACES['hsterms'])
rdf_coverage_value = etree.SubElement(coverage_terms,
'{%s}value' % NAMESPACES['rdf'])
# original coverage is of box type
cov_value = 'northlimit=%s; eastlimit=%s; southlimit=%s; westlimit=%s; units=%s' \
% (self.northlimit, self.eastlimit,
self.southlimit, self.westlimit,
self.unit)
if self.projection_name:
cov_value += '; projection_name={}'.format(self.projection_name)
if self.projection_string:
cov_value += '; projection_string={}'.format(self.projection_string)
if self.datum:
cov_value += '; datum={}'.format(self.datum)
rdf_coverage_value.text = cov_value
class FieldInformation(AbstractMetaDataElement):
term = 'FieldInformation'
fieldName = models.CharField(max_length=128, null=False, blank=False)
fieldType = models.CharField(max_length=128, null=False, blank=False)
fieldTypeCode = models.CharField(max_length=50, null=True, blank=True)
fieldWidth = models.IntegerField(null=True, blank=True)
fieldPrecision = models.IntegerField(null=True, blank=True)
def get_html(self, pretty=True):
"""Generates html code for displaying data for this metadata element"""
field_infor_tr = tr(cls='row')
with field_infor_tr:
td(self.fieldName)
td(self.fieldType)
td(self.fieldWidth)
td(self.fieldPrecision)
if pretty:
return field_infor_tr.render(pretty=pretty)
return field_infor_tr
def add_to_xml_container(self, container):
"""Generates xml+rdf representation of the metadata element"""
# element attribute name : name in xml
md_fields = {
"fieldName": "fieldName",
"fieldType": "fieldType",
"fieldTypeCode": "fieldTypeCode",
"fieldWidth": "fieldWidth",
"fieldPrecision": "fieldPrecision"
}
add_metadata_element_to_xml(container, self, md_fields)
class GeometryInformation(AbstractMetaDataElement):
term = 'GeometryInformation'
featureCount = models.IntegerField(null=False, blank=False, default=0)
geometryType = models.CharField(max_length=128, null=False, blank=False)
class Meta:
# GeometryInformation element is not repeatable
unique_together = ("content_type", "object_id")
def get_html(self, pretty=True):
"""Generates html code for displaying data for this metadata element"""
root_div = div(cls="col-xs-12 col-sm-12", style="margin-bottom:40px;")
def get_th(heading_name):
return th(heading_name, cls="text-muted")
with root_div:
legend('Geometry Information')
with table(cls='custom-table'):
with tbody():
with tr():
get_th('Geometry Type')
td(self.geometryType)
with tr():
get_th('Feature Count')
td(self.featureCount)
return root_div.render(pretty=pretty)
@classmethod
def get_html_form(cls, resource, element=None, allow_edit=True, file_type=False):
"""Generates html form code for an instance of this metadata element so
that this element can be edited"""
from .forms import GeometryInformationForm
geom_info_data_dict = {}
if element is not None:
geom_info_data_dict['geometryType'] = element.geometryType
geom_info_data_dict['featureCount'] = element.featureCount
geom_information_form = GeometryInformationForm(initial=geom_info_data_dict,
res_short_id=resource.short_id if
resource else None,
allow_edit=allow_edit,
element_id=element.id if element else None,
file_type=file_type)
return geom_information_form
def add_to_xml_container(self, container):
"""Generates xml+rdf representation of the metadata element"""
# element attribute name : name in xml
md_fields = {
"geometryType": "geometryType",
"featureCount": "featureCount"
}
add_metadata_element_to_xml(container, self, md_fields)
class GeographicFeatureResource(BaseResource):
objects = ResourceManager("GeographicFeatureResource")
@property
def metadata(self):
md = GeographicFeatureMetaData()
return self._get_metadata(md)
@classmethod
def get_supported_upload_file_types(cls):
# See Shapefile format:
# http://resources.arcgis.com/en/help/main/10.2/index.html#//005600000003000000
return (".zip", ".shp", ".shx", ".dbf", ".prj",
".sbx", ".sbn", ".cpg", ".xml", ".fbn",
".fbx", ".ain", ".aih", ".atx", ".ixs",
".mxs")
def has_required_content_files(self):
if self.files.all().count < 3:
return False
file_extensions = [f.extension for f in self.files.all()]
return all(ext in file_extensions for ext in ['.shp', '.shx', '.dbf'])
def get_hs_term_dict(self):
# get existing hs_term_dict from base class
hs_term_dict = super(GeographicFeatureResource, self).get_hs_term_dict()
geometryinformation = self.metadata.geometryinformation
if geometryinformation is not None:
hs_term_dict["HS_GFR_FEATURE_COUNT"] = geometryinformation.featureCount
else:
hs_term_dict["HS_GFR_FEATURE_COUNT"] = 0
return hs_term_dict
class Meta:
verbose_name = 'Geographic Feature (ESRI Shapefiles)'
proxy = True
processor_for(GeographicFeatureResource)(resource_processor)
class GeographicFeatureMetaDataMixin(models.Model):
"""This class must be the first class in the multi-inheritance list of classes"""
geometryinformations = GenericRelation(GeometryInformation)
fieldinformations = GenericRelation(FieldInformation)
originalcoverages = GenericRelation(OriginalCoverage)
class Meta:
abstract = True
@property
def geometryinformation(self):
return self.geometryinformations.all().first()
@property
def originalcoverage(self):
return self.originalcoverages.all().first()
@classmethod
def get_supported_element_names(cls):
# get the names of all core metadata elements
elements = super(GeographicFeatureMetaDataMixin, cls).get_supported_element_names()
# add the name of any additional element to the list
elements.append('FieldInformation')
elements.append('OriginalCoverage')
elements.append('GeometryInformation')
return elements
def has_all_required_elements(self):
if self.get_required_missing_elements():
return False
return True
def get_required_missing_elements(self): # show missing required meta
missing_required_elements = super(GeographicFeatureMetaDataMixin, self). \
get_required_missing_elements()
if not (self.coverages.all().filter(type='box').first() or
self.coverages.all().filter(type='point').first()):
missing_required_elements.append('Spatial Coverage')
if not self.originalcoverage:
missing_required_elements.append('Spatial Reference')
if not self.geometryinformation:
missing_required_elements.append('Geometry Information')
return missing_required_elements
def delete_all_elements(self):
super(GeographicFeatureMetaDataMixin, self).delete_all_elements()
self.reset()
def reset(self):
"""
This helper method should be used to reset metadata when essential files are removed
from the resource
:return:
"""
self.geometryinformations.all().delete()
self.fieldinformations.all().delete()
self.originalcoverages.all().delete()
class GeographicFeatureMetaData(GeographicFeatureMetaDataMixin, CoreMetaData):
@property
def resource(self):
return GeographicFeatureResource.objects.filter(object_id=self.id).first()
def get_xml(self, pretty_print=True, include_format_elements=True):
# get the xml string representation of the core metadata elements
xml_string = super(GeographicFeatureMetaData, self).get_xml(pretty_print=False)
# create an etree xml object
RDF_ROOT = etree.fromstring(xml_string)
# get root 'Description' element that contains all other elements
container = RDF_ROOT.find('rdf:Description', namespaces=self.NAMESPACES)
if self.geometryinformation:
self.geometryinformation.add_to_xml_container(container)
for field_info in self.fieldinformations.all():
field_info.add_to_xml_container(container)
if self.originalcoverage:
self.originalcoverage.add_to_xml_container(container)
return etree.tostring(RDF_ROOT, pretty_print=pretty_print)
|
tylerreinhart/generator-django-kaiju
|
refs/heads/master
|
app/templates/kaiju/apps/core/context_processors.py
|
5
|
from django.conf import settings
def debug(request):
return {'DEBUG': settings.DEBUG}
|
xin3liang/platform_external_chromium_org
|
refs/heads/master
|
tools/telemetry/telemetry/core/heap/live_heap_object.py
|
82
|
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
class LiveHeapObject(object):
"""Data structure for representing an object in the heap snapshot.
Attributes:
object_id: int, identifier for the object.
type_string: str, describes the type of the node.
class_name: str, describes the class of the JavaScript object
represented by this LiveHeapObject.
edges_to: [RetainingEdge], edges whose end point this LiveHeapObject is.
edges_from: [RetainingEdge], edges whose start point this LiveHeapObject is.
string: str, for string LiveHeapObjects, contains the string the
LiveHeapObject represents. Empty string for LiveHeapObjects which are
not strings.
name: str, how to refer to this LiveHeapObject.
"""
def __init__(self, object_id, type_string, class_name):
"""Initializes the LiveHeapObject object.
Args:
object_id: int, identifier for the LiveHeapObject.
type_string: str, the type of the node.
class_name: str, the class of the object this LiveHeapObject represents.
"""
self.object_id = object_id
self.type_string = type_string
self.class_name = class_name
self.edges_to = []
self.edges_from = []
self.string = ''
self.name = ''
def AddEdgeTo(self, edge):
"""Associates an Edge with the LiveHeapObject (the end point).
Args:
edge: Edge, an edge whose end point this LiveHeapObject is.
"""
self.edges_to.append(edge)
def AddEdgeFrom(self, edge):
"""Associates an Edge with the LiveHeapObject (the start point).
Args:
edge: Edge, an edge whose start point this LiveHeapObject is.
"""
self.edges_from.append(edge)
def __str__(self):
prefix = 'LiveHeapObject(' + str(self.object_id) + ' '
if self.type_string == 'object':
return prefix + self.class_name + ')'
return prefix + self.type_string + ')'
|
Juniper/python-neutronclient
|
refs/heads/master
|
neutronclient/tests/unit/test_cli20_subnetpool.py
|
3
|
# Copyright 2015 OpenStack Foundation.
# All Rights Reserved
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
import sys
from mox3 import mox
from neutronclient.common import exceptions
from neutronclient.neutron.v2_0 import subnetpool
from neutronclient.tests.unit import test_cli20
class CLITestV20SubnetPoolJSON(test_cli20.CLITestV20Base):
non_admin_status_resources = ['subnetpool']
def setUp(self):
super(CLITestV20SubnetPoolJSON, self).setUp(plurals={'tags': 'tag'})
def test_create_subnetpool_shared(self):
# Create subnetpool: myname.
resource = 'subnetpool'
cmd = subnetpool.CreateSubnetPool(test_cli20.MyApp(sys.stdout), None)
name = 'myname'
myid = 'myid'
min_prefixlen = 30
prefix1 = '10.11.12.0/24'
prefix2 = '12.11.13.0/24'
args = [name, '--min-prefixlen', str(min_prefixlen),
'--pool-prefix', prefix1, '--pool-prefix', prefix2,
'--shared', '--description', 'public pool']
position_names = ['name', 'min_prefixlen', 'prefixes', 'shared']
position_values = [name, min_prefixlen, [prefix1, prefix2], True]
self._test_create_resource(resource, cmd, name, myid, args,
position_names, position_values,
description='public pool')
def test_create_subnetpool_not_shared(self):
# Create subnetpool: myname.
resource = 'subnetpool'
cmd = subnetpool.CreateSubnetPool(test_cli20.MyApp(sys.stdout), None)
name = 'myname'
myid = 'myid'
min_prefixlen = 30
prefix1 = '10.11.12.0/24'
prefix2 = '12.11.13.0/24'
args = [name, '--min-prefixlen', str(min_prefixlen),
'--pool-prefix', prefix1, '--pool-prefix', prefix2]
position_names = ['name', 'min_prefixlen', 'prefixes']
position_values = [name, min_prefixlen, [prefix1, prefix2]]
self._test_create_resource(resource, cmd, name, myid, args,
position_names, position_values)
def test_create_subnetpool(self, default='false'):
# Create subnetpool: myname.
resource = 'subnetpool'
cmd = subnetpool.CreateSubnetPool(test_cli20.MyApp(sys.stdout), None)
name = 'myname'
myid = 'myid'
min_prefixlen = 30
prefix1 = '10.11.12.0/24'
prefix2 = '12.11.13.0/24'
args = [name, '--min-prefixlen', str(min_prefixlen),
'--pool-prefix', prefix1, '--pool-prefix', prefix2,
'--is-default', default]
position_names = ['name', 'min_prefixlen', 'prefixes', 'is_default']
position_values = [name, min_prefixlen, [prefix1, prefix2], default]
self._test_create_resource(resource, cmd, name, myid, args,
position_names, position_values)
def test_create_subnetpool_default(self):
self.test_create_subnetpool(default='true')
def test_create_subnetpool_with_unicode(self):
# Create subnetpool: u'\u7f51\u7edc'.
resource = 'subnetpool'
cmd = subnetpool.CreateSubnetPool(test_cli20.MyApp(sys.stdout), None)
name = u'\u7f51\u7edc'
myid = 'myid'
min_prefixlen = 30
prefixes = '10.11.12.0/24'
args = [name, '--min-prefixlen', str(min_prefixlen),
'--pool-prefix', prefixes]
position_names = ['name', 'min_prefixlen', 'prefixes']
position_values = [name, min_prefixlen, [prefixes]]
self._test_create_resource(resource, cmd, name, myid, args,
position_names, position_values)
def test_create_subnetpool_with_addrscope(self):
# Create subnetpool: myname in addrscope: foo-address-scope
resource = 'subnetpool'
cmd = subnetpool.CreateSubnetPool(test_cli20.MyApp(sys.stdout), None)
name = 'myname'
myid = 'myid'
min_prefixlen = 30
prefix1 = '11.11.11.0/24'
prefix2 = '12.12.12.0/24'
address_scope = 'foo-address-scope'
args = [name, '--min-prefixlen', str(min_prefixlen),
'--pool-prefix', prefix1, '--pool-prefix', prefix2,
'--address-scope', address_scope]
position_names = ['name', 'min_prefixlen', 'prefixes',
'address_scope_id']
position_values = [name, min_prefixlen, [prefix1, prefix2],
address_scope]
self._test_create_resource(resource, cmd, name, myid, args,
position_names, position_values)
def test_create_subnetpool_no_poolprefix(self):
# Should raise an error because --pool-prefix is required
resource = 'subnetpool'
cmd = subnetpool.CreateSubnetPool(test_cli20.MyApp(sys.stdout), None)
name = 'myname'
myid = 'myid'
args = [name]
position_names = ['name']
position_values = [name]
self.assertRaises(SystemExit, self._test_create_resource, resource,
cmd, name, myid, args, position_names,
position_values)
def test_list_subnetpool_pagination(self):
cmd = subnetpool.ListSubnetPool(test_cli20.MyApp(sys.stdout), None)
self.mox.StubOutWithMock(subnetpool.ListSubnetPool, "extend_list")
subnetpool.ListSubnetPool.extend_list(mox.IsA(list), mox.IgnoreArg())
self._test_list_resources_with_pagination("subnetpools", cmd)
self.mox.VerifyAll()
self.mox.UnsetStubs()
def test_list_subnetpools_sort(self):
# List subnetpools:
# --sort-key name --sort-key id --sort-key asc --sort-key desc
resources = "subnetpools"
cmd = subnetpool.ListSubnetPool(test_cli20.MyApp(sys.stdout), None)
self._test_list_resources(resources, cmd,
sort_key=["name", "id"],
sort_dir=["asc", "desc"])
def test_list_subnetpools_limit(self):
# List subnetpools: -P.
resources = "subnetpools"
cmd = subnetpool.ListSubnetPool(test_cli20.MyApp(sys.stdout), None)
self._test_list_resources(resources, cmd, page_size=1000)
def test_update_subnetpool_exception(self):
# Update subnetpool: myid.
resource = 'subnetpool'
cmd = subnetpool.UpdateSubnetPool(test_cli20.MyApp(sys.stdout), None)
self.assertRaises(exceptions.CommandError, self._test_update_resource,
resource, cmd, 'myid', ['myid'], {})
def test_update_subnetpool(self):
# Update subnetpool: myid --name myname.
resource = 'subnetpool'
cmd = subnetpool.UpdateSubnetPool(test_cli20.MyApp(sys.stdout), None)
self._test_update_resource(resource, cmd, 'myid',
['myid', '--name', 'myname',
'--description', ':)'],
{'name': 'myname', 'description': ':)'})
def test_update_subnetpool_with_address_scope(self):
# Update subnetpool: myid --address-scope newscope.
resource = 'subnetpool'
cmd = subnetpool.UpdateSubnetPool(test_cli20.MyApp(sys.stdout), None)
self._test_update_resource(resource, cmd, 'myid',
['myid', '--address-scope', 'newscope'],
{'address_scope_id': 'newscope'}
)
def test_update_subnetpool_with_no_address_scope(self):
# Update subnetpool: myid --no-address-scope.
resource = 'subnetpool'
cmd = subnetpool.UpdateSubnetPool(test_cli20.MyApp(sys.stdout), None)
self._test_update_resource(resource, cmd, 'myid',
['myid', '--no-address-scope'],
{'address_scope_id': None}
)
def test_show_subnetpool(self):
# Show subnetpool: --fields id --fields name myid.
resource = 'subnetpool'
cmd = subnetpool.ShowSubnetPool(test_cli20.MyApp(sys.stdout), None)
args = ['--fields', 'id', '--fields', 'name', self.test_id]
self._test_show_resource(resource, cmd, self.test_id, args,
['id', 'name'])
def test_delete_subnetpool(self):
# Delete subnetpool: subnetpoolid.
resource = 'subnetpool'
cmd = subnetpool.DeleteSubnetPool(test_cli20.MyApp(sys.stdout), None)
myid = 'myid'
args = [myid]
self._test_delete_resource(resource, cmd, myid, args)
|
Brocade-OpenSource/OpenStack-DNRM-Neutron
|
refs/heads/master
|
neutron/plugins/bigswitch/extensions/__init__.py
|
48
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2013 Big Switch Networks, Inc.
# All Rights Reserved
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
# @author: Kevin Benton, Big Switch Networks, Inc.
|
foyzur/gpdb
|
refs/heads/master
|
gpMgmt/bin/gppylib/programs/__init__.py
|
512
|
# Make sure Python loads the modules of this package via absolute paths.
from os.path import abspath as _abspath
__path__[0] = _abspath(__path__[0])
|
AnalogJ/lexicon
|
refs/heads/master
|
lexicon/tests/providers/test_netcup.py
|
1
|
"""Integration tests for netcup"""
from unittest import TestCase
import pytest
from lexicon.tests.providers.integration_tests import IntegrationTestsV2
# Hook into testing framework by inheriting unittest.TestCase and reuse
# the tests which *each and every* implementation of the interface must
# pass, by inheritance from define_tests.TheTests
class NetcupProviderTests(TestCase, IntegrationTestsV2):
"""TestCase for netcup."""
provider_name = "netcup"
domain = "coldfix.de"
def _filter_post_data_parameters(self):
# actually only param[customerid, apikey, apipassword, apisessionid],
# but I don't think this method allows filtering nested keys...
return ["param"]
def _test_parameters_overrides(self):
return {
"api_endpoint": "https://ccp.netcup.net/run/webservice/servers/endpoint.php?JSON"
}
@pytest.mark.skip(reason="TTL can not be set via netcup API")
def test_provider_when_calling_list_records_after_setting_ttl(self):
pass
|
obi-two/Rebelion
|
refs/heads/master
|
data/scripts/templates/object/mobile/shared_huurton_pup.py
|
2
|
#### NOTICE: THIS FILE IS AUTOGENERATED
#### MODIFICATIONS MAY BE LOST IF DONE IMPROPERLY
#### PLEASE SEE THE ONLINE DOCUMENTATION FOR EXAMPLES
from swgpy.object import *
def create(kernel):
result = Creature()
result.template = "object/mobile/shared_huurton_pup.iff"
result.attribute_template_id = 9
result.stfName("monster_name","huurton")
#### BEGIN MODIFICATIONS ####
#### END MODIFICATIONS ####
return result
|
mbkumar/pymatgen
|
refs/heads/master
|
pymatgen/util/tests/test_string_utils.py
|
2
|
# coding: utf-8
# Copyright (c) Pymatgen Development Team.
# Distributed under the terms of the MIT License.
import unittest
from pymatgen.util.string import formula_double_format, latexify, \
latexify_spacegroup, transformation_to_string, htmlify, unicodeify, \
disordered_formula, unicodeify_spacegroup, unicodeify_species
from pymatgen.core import Structure
class FuncTest(unittest.TestCase):
def test_latexify(self):
self.assertEqual(latexify("Li3Fe2(PO4)3"),
"Li$_{3}$Fe$_{2}$(PO$_{4}$)$_{3}$")
self.assertEqual(latexify("Li0.2Na0.8Cl"),
"Li$_{0.2}$Na$_{0.8}$Cl")
def test_latexify_spacegroup(self):
self.assertEqual(latexify_spacegroup("Fd-3m"), "Fd$\\overline{3}$m")
self.assertEqual(latexify_spacegroup("P2_1/c"), "P2$_{1}$/c")
def test_htmlify(self):
self.assertEqual(htmlify("Li3Fe2(PO4)3"),
"Li<sub>3</sub>Fe<sub>2</sub>(PO<sub>4</sub>)<sub>3</sub>")
self.assertEqual(htmlify("Li0.2Na0.8Cl"),
"Li<sub>0.2</sub>Na<sub>0.8</sub>Cl")
def test_unicodeify(self):
self.assertEqual(unicodeify("Li3Fe2(PO4)3"),
"Li₃Fe₂(PO₄)₃")
self.assertRaises(ValueError, unicodeify,
"Li0.2Na0.8Cl")
self.assertEqual(unicodeify_species("O2+"), "O²⁺")
self.assertEqual(unicodeify_spacegroup("F-3m"), "F3̅m")
def test_formula_double_format(self):
self.assertEqual(formula_double_format(1.00), "")
self.assertEqual(formula_double_format(2.00), "2")
self.assertEqual(formula_double_format(2.10), "2.1")
self.assertEqual(formula_double_format(2.10000000002), "2.1")
def test_transformation_to_string(self):
m = [[1, 0, 0], [0, 1, 0], [0, 0, 1]]
t = [0, 0, 0]
s = 'x,y,z'
ms = 'mx,my,mz'
abc = 'a,b,c'
self.assertEqual(s, transformation_to_string(m, t))
self.assertEqual(ms, transformation_to_string(m, t, c='m'))
self.assertEqual(abc, transformation_to_string(m, t, components=('a', 'b', 'c')))
m = [[1, 2, 3], [4, 5, 6], [7, 8, 9]]
t = [11, 12, 13]
s = 'x+2y+3z+11,4x+5y+6z+12,7x+8y+9z+13'
self.assertEqual(s, transformation_to_string(m, t))
m = [[-1 / 2, -2 / 3, -3 / 4], [-5 / 6, -6 / 7, -7 / 8], [-8 / 9, -9 / 10, -10 / 11]]
t = [-11 / 12, -12 / 13, -13 / 14]
s = '-x/2-2y/3-3z/4-11/12,-5x/6-6y/7-7z/8-12/13,-8x/9-9y/10-10z/11-13/14'
self.assertEqual(s, transformation_to_string(m, t))
def test_disordered_formula(self):
disordered_struct = Structure([[10, 0, 0], [0, 10, 0], [0, 0, 10]],
[{'Cu': 0.25, 'Au': 0.75}],
[[0, 0, 0]])
formula_plain = disordered_formula(disordered_struct, fmt='plain')
formula_latex = disordered_formula(disordered_struct, fmt='LaTeX')
formula_html = disordered_formula(disordered_struct, fmt='HTML')
self.assertEqual(formula_plain, 'CuxAu1-x x=0.25')
self.assertEqual(formula_latex, 'Cu_{x}Au_{1-x} x=0.25')
self.assertEqual(formula_html, 'Cu<sub>x</sub>Au<sub>1-x</sub> x=0.25')
if __name__ == "__main__":
unittest.main()
|
johnchase/scikit-bio
|
refs/heads/master
|
skbio/stats/ordination/_utils.py
|
3
|
# ----------------------------------------------------------------------------
# Copyright (c) 2013--, scikit-bio development team.
#
# Distributed under the terms of the Modified BSD License.
#
# The full license is in the file COPYING.txt, distributed with this software.
# ----------------------------------------------------------------------------
from __future__ import absolute_import, division, print_function
import numpy as np
import numpy.testing as npt
def mean_and_std(a, axis=None, weights=None, with_mean=True, with_std=True,
ddof=0):
"""Compute the weighted average and standard deviation along the
specified axis.
Parameters
----------
a : array_like
Calculate average and standard deviation of these values.
axis : int, optional
Axis along which the statistics are computed. The default is
to compute them on the flattened array.
weights : array_like, optional
An array of weights associated with the values in `a`. Each
value in `a` contributes to the average according to its
associated weight. The weights array can either be 1-D (in
which case its length must be the size of `a` along the given
axis) or of the same shape as `a`. If `weights=None`, then all
data in `a` are assumed to have a weight equal to one.
with_mean : bool, optional, defaults to True
Compute average if True.
with_std : bool, optional, defaults to True
Compute standard deviation if True.
ddof : int, optional, defaults to 0
It means delta degrees of freedom. Variance is calculated by
dividing by `n - ddof` (where `n` is the number of
elements). By default it computes the maximum likelyhood
estimator.
Returns
-------
average, std
Return the average and standard deviation along the specified
axis. If any of them was not required, returns `None` instead
"""
if not (with_mean or with_std):
raise ValueError("Either the mean or standard deviation need to be"
" computed.")
a = np.asarray(a)
if weights is None:
avg = a.mean(axis=axis) if with_mean else None
std = a.std(axis=axis, ddof=ddof) if with_std else None
else:
avg = np.average(a, axis=axis, weights=weights)
if with_std:
if axis is None:
variance = np.average((a - avg)**2, weights=weights)
else:
# Make sure that the subtraction to compute variance works for
# multidimensional arrays
a_rolled = np.rollaxis(a, axis)
# Numpy doesn't have a weighted std implementation, but this is
# stable and fast
variance = np.average((a_rolled - avg)**2, axis=0,
weights=weights)
if ddof != 0: # Don't waste time if variance doesn't need scaling
if axis is None:
variance *= a.size / (a.size - ddof)
else:
variance *= a.shape[axis] / (a.shape[axis] - ddof)
std = np.sqrt(variance)
else:
std = None
avg = avg if with_mean else None
return avg, std
def scale(a, weights=None, with_mean=True, with_std=True, ddof=0, copy=True):
"""Scale array by columns to have weighted average 0 and standard
deviation 1.
Parameters
----------
a : array_like
2D array whose columns are standardized according to the
weights.
weights : array_like, optional
Array of weights associated with the columns of `a`. By
default, the scaling is unweighted.
with_mean : bool, optional, defaults to True
Center columns to have 0 weighted mean.
with_std : bool, optional, defaults to True
Scale columns to have unit weighted std.
ddof : int, optional, defaults to 0
If with_std is True, variance is calculated by dividing by `n
- ddof` (where `n` is the number of elements). By default it
computes the maximum likelyhood stimator.
copy : bool, optional, defaults to True
Whether to perform the standardization in place, or return a
new copy of `a`.
Returns
-------
2D ndarray
Scaled array.
Notes
-----
Wherever std equals 0, it is replaced by 1 in order to avoid
division by zero.
"""
if copy:
a = a.copy()
avg, std = mean_and_std(a, axis=0, weights=weights, with_mean=with_mean,
with_std=with_std, ddof=ddof)
if with_mean:
a -= avg
if with_std:
std[std == 0] = 1.0
a /= std
return a
def svd_rank(M_shape, S, tol=None):
"""Matrix rank of `M` given its singular values `S`.
See `np.linalg.matrix_rank` for a rationale on the tolerance
(we're not using that function because it doesn't let us reuse a
precomputed SVD)."""
if tol is None:
tol = S.max() * max(M_shape) * np.finfo(S.dtype).eps
return np.sum(S > tol)
def corr(x, y=None):
"""Computes correlation between columns of `x`, or `x` and `y`.
Correlation is covariance of (columnwise) standardized matrices,
so each matrix is first centered and scaled to have variance one,
and then their covariance is computed.
Parameters
----------
x : 2D array_like
Matrix of shape (n, p). Correlation between its columns will
be computed.
y : 2D array_like, optional
Matrix of shape (n, q). If provided, the correlation is
computed between the columns of `x` and the columns of
`y`. Else, it's computed between the columns of `x`.
Returns
-------
correlation
Matrix of computed correlations. Has shape (p, p) if `y` is
not provided, else has shape (p, q).
"""
x = np.asarray(x)
if y is not None:
y = np.asarray(y)
if y.shape[0] != x.shape[0]:
raise ValueError("Both matrices must have the same number of rows")
x, y = scale(x), scale(y)
else:
x = scale(x)
y = x
# Notice that scaling was performed with ddof=0 (dividing by n,
# the default), so now we need to remove it by also using ddof=0
# (dividing by n)
return x.T.dot(y) / x.shape[0]
def assert_ordination_results_equal(left, right):
"""Assert that ordination results objects are equal.
This is a helper function intended to be used in unit tests that need to
compare ``OrdinationResults`` objects.
For numeric attributes (e.g., eigvals, site, etc.),
``numpy.testing.assert_almost_equal`` is used. Otherwise,
``numpy.testing.assert_equal`` is used for comparisons. An assertion is
in place to ensure the two objects are exactly the same type.
Parameters
----------
left, right : OrdinationResults
Ordination results to be compared for equality.
Raises
------
AssertionError
If the two objects are not equal.
"""
npt.assert_equal(type(left) is type(right), True)
# eigvals should always be present
npt.assert_almost_equal(left.eigvals, right.eigvals)
# these attributes are strings, so can compare directly, even if one or
# both are None
npt.assert_equal(left.species_ids, right.species_ids)
npt.assert_equal(left.site_ids, right.site_ids)
# these attributes need to be checked that they are almost equal, but one
# or both can be None, which npt.assert_almost_equal doesn't like
_assert_optional_numeric_attr_equal(left.species, right.species)
_assert_optional_numeric_attr_equal(left.site, right.site)
_assert_optional_numeric_attr_equal(left.biplot, right.biplot)
_assert_optional_numeric_attr_equal(left.site_constraints,
right.site_constraints)
_assert_optional_numeric_attr_equal(left.proportion_explained,
right.proportion_explained)
def _assert_optional_numeric_attr_equal(left, right):
if left is None or right is None:
npt.assert_equal(left, right)
else:
npt.assert_almost_equal(left, right)
|
diox/app-validator
|
refs/heads/master
|
appvalidator/testcases/javascript/predefinedentities.py
|
3
|
import math
import call_definitions
from appvalidator.constants import JS_DEBUG
from call_definitions import python_wrap
from entity_values import entity
from jstypes import JSGlobal, JSLiteral
# See https://github.com/mozilla/app-validator/wiki/JS-Predefined-Entities
# for details on entity properties.
def resolve_entity(traverser, *args):
element = GLOBAL_ENTITIES[args[0]]
for layer in args[1:]:
value = element["value"]
while callable(value):
value = value(t=t)
element = value[layer]
return element
def get_global(*args):
return lambda trav: resolve_entity(trav, *args)
global_identity = {"value": lambda *args: GLOBAL_ENTITIES}
READONLY = {"readonly": True}
def feature(constant, fallback=None):
def wrap(t):
t.log_feature(constant)
t._debug("Found feature: %s" % constant)
if fallback:
t._debug("Feature has fallback: %s" % repr(fallback))
return lambda *a: fallback if fallback else {}
return {'value': wrap,
'return': lambda **kw: kw['traverser'].log_feature(constant)}
MOZAPPS = {
u'installPackage': feature('PACKAGED_APPS'),
}
NAVIGATOR = {
u"apps": feature("APPS", MOZAPPS),
u"mozApps": feature("APPS", MOZAPPS),
u"pay": feature("PAY"),
u"mozPay": feature("PAY"),
u"battery": feature("BATTERY"),
u"bluetooth": feature("BLUETOOTH"),
u"mozBluetooth": feature("BLUETOOTH"),
u"contacts": feature("CONTACTS"),
u"mozContacts": feature("CONTACTS"),
u"getDeviceStorage": feature("DEVICE_STORAGE"),
u"geolocation": feature("GEOLOCATION"),
u"getCurrentPosition": feature("GEOLOCATION"),
u"addIdleObserver": feature("IDLE"),
u"removeIdleObserver": feature("IDLE"),
u"connection": feature("NETWORK_INFO"),
u"mozConnection": feature("NETWORK_INFO"),
u"mozMobileConnection": feature("NETWORK_INFO"),
u"networkStats": feature("NETWORK_STATS"),
u"mozNetworkStats": feature("NETWORK_STATS"),
u"push": feature("PUSH"),
u"mozPush": feature("PUSH"),
u"time": feature("TIME_CLOCK"),
u"mozTime": feature("TIME_CLOCK"),
u"vibrate": feature("VIBRATE"),
u"FM": feature("FM"),
u"mozFM": feature("FM"),
u"mozFMRadio": feature("FM"),
# XXX: The "SMS" API's capitalization seems to be inconsistent at the moment.
u"SMS": feature("SMS"),
u"mozSMS": feature("SMS"),
u"mozSms": feature("SMS"),
u"mozNotification": feature("NOTIFICATION"),
u"mozAlarms": feature("ALARM"),
u"getGamepad": feature("GAMEPAD"),
u"mozGetGamepad": feature("GAMEPAD"),
u"webkitGetGamepad": feature("GAMEPAD"),
u"mozTCPSocket": feature("TCPSOCKET"),
u"mozInputMethod": feature("THIRDPARTY_KEYBOARD_SUPPORT"),
u"mozMobileConnections": feature("NETWORK_INFO_MULTIPLE"),
u"getMobileIdAssertion": feature("MOBILEID"),
u"getUserMedia": entity("getUserMedia"),
}
# GLOBAL_ENTITIES is also representative of the `window` object.
GLOBAL_ENTITIES = {
u"window": global_identity,
u"null": {"literal": None},
u"document":
{"value":
{u"defaultView": global_identity,
u"cancelFullScreen": feature("FULLSCREEN"),
u"mozCancelFullScreen": feature("FULLSCREEN"),
u"webkitCancelFullScreen": feature("FULLSCREEN"),
u"fullScreenElement": feature("FULLSCREEN"),
u"mozFullScreenElement": feature("FULLSCREEN"),
u"webkitFullScreenElement": feature("FULLSCREEN"),
},
},
# The nefariuos timeout brothers!
u"setTimeout": entity("setTimeout"),
u"setInterval": entity("setInterval"),
u"encodeURI": READONLY,
u"decodeURI": READONLY,
u"encodeURIComponent": READONLY,
u"decodeURIComponent": READONLY,
u"escape": READONLY,
u"unescape": READONLY,
u"isFinite": READONLY,
u"isNaN": READONLY,
u"parseFloat": READONLY,
u"parseInt": READONLY,
u"eval": entity("eval"),
u"Function": entity("Function"),
u"Object":
{"value": {u"constructor": {"value": get_global("Function")}}},
u"String":
{"value":
{u"constructor": {"value": get_global("Function")}},
"return": call_definitions.string_global,
"new": call_definitions.string_global,
"typeof": "string"},
u"Array":
{"value":
{u"constructor": {"value": get_global("Function")}},
"return": call_definitions.array_global,
"new": call_definitions.array_global},
u"Number":
{"value":
{u"constructor": {"value": get_global("Function")},
u"POSITIVE_INFINITY": {"literal": float('inf')},
u"NEGATIVE_INFINITY": {"literal": float('-inf')},
u"isNaN": get_global("isNaN")},
"return": call_definitions.number_global,
"new": call_definitions.number_global,
"typeof": "number"},
u"Boolean":
{"value":
{u"constructor": {"value": get_global("Function")}},
"return": call_definitions.boolean_global,
"new": call_definitions.boolean_global,
"typeof": "boolean"},
u"RegExp":
{"value":
{u"constructor": {"value": get_global("Function")}}},
u"Date":
{"value":
{u"constructor": {"value": get_global("Function")}}},
u"File":
{"value":
{u"constructor": {"value": get_global("Function")}}},
u"Math":
{"value":
{u"PI": {"literal": math.pi},
u"E": {"literal": math.e},
u"LN2": {"literal": math.log(2)},
u"LN10": {"literal": math.log(10)},
u"LOG2E": {"literal": math.log(math.e, 2)},
u"LOG10E": {"literal": math.log10(math.e)},
u"SQRT2": {"literal": math.sqrt(2)},
u"SQRT1_2": {"literal": math.sqrt(1/2)},
u"abs": {"return": python_wrap(abs, [("num", 0)])},
u"acos": {"return": python_wrap(math.acos, [("num", 0)])},
u"asin": {"return": python_wrap(math.asin, [("num", 0)])},
u"atan": {"return": python_wrap(math.atan, [("num", 0)])},
u"atan2": {"return": python_wrap(math.atan2, [("num", 0),
("num", 1)])},
u"ceil": {"return": python_wrap(math.ceil, [("num", 0)])},
u"cos": {"return": python_wrap(math.cos, [("num", 0)])},
u"exp": {"return": python_wrap(math.exp, [("num", 0)])},
u"floor": {"return": python_wrap(math.floor, [("num", 0)])},
u"log": {"return": call_definitions.math_log},
u"max": {"return": python_wrap(max, [("num", 0)], nargs=True)},
u"min": {"return": python_wrap(min, [("num", 0)], nargs=True)},
u"pow": {"return": python_wrap(math.pow, [("num", 0),
("num", 0)])},
# Random always returns 0.5 in our fantasy land.
u"random": {"return": lambda **kw: JSLiteral(0.5)},
u"round": {"return": call_definitions.math_round},
u"sin": {"return": python_wrap(math.sin, [("num", 0)])},
u"sqrt": {"return": python_wrap(math.sqrt, [("num", 1)])},
u"tan": {"return": python_wrap(math.tan, [("num", 0)])},
},
},
u"XMLHttpRequest": entity('XMLHttpRequest'),
# Global properties are inherently read-only, though this formalizes it.
u"Infinity": get_global("Number", "POSITIVE_INFINITY"),
u"NaN": READONLY,
u"undefined": {"readonly": True, "undefined": True, "literal": None},
u"opener": global_identity,
u"navigator": {"value": NAVIGATOR},
u"Activity": feature("ACTIVITY"),
u"MozActivity": feature("ACTIVITY"),
u"ondevicelight": feature("LIGHT_EVENTS"),
u"ArchiveReader": feature("ARCHIVE"),
u"indexedDB": feature("INDEXEDDB"),
u"mozIndexedDB": feature("INDEXEDDB"),
u"ondeviceproximity": feature("PROXIMITY"),
u"ondeviceorientation": feature("ORIENTATION"),
u"ontouchstart": feature("TOUCH"),
u"Audio": feature("AUDIO"),
u"webkitAudioContext": feature("WEBAUDIO"),
u"mozAudioContext": feature("WEBAUDIO"),
u"AudioContext": feature("WEBAUDIO"),
u"persistentStorage": feature("QUOTA"),
u"mozPersistentStorage": feature("QUOTA"),
u"webkitPersistentStorage": feature("QUOTA"),
u"StorageInfo": feature("QUOTA"),
u"fullScreen": feature("FULLSCREEN"),
U"MediaStream": feature("WEBRTC_MEDIA"),
u"DataChannel": feature("WEBRTC_DATA"),
u"RTCPeerConnection": feature("WEBRTC_PEER"),
u"mozRTCPeerConnection": feature("WEBRTC_PEER"),
u"webkitRTCPeerConnection": feature("WEBRTC_PEER"),
u"speechSynthesis": feature("SPEECH_SYN"),
u"SpeechSynthesisUtterance": feature("SPEECH_SYN"),
u"SpeechRecognition": feature("SPEECH_REC"),
}
def enable_debug():
def assert_(wrapper, arguments, traverser):
traverser.asserts = True
for arg in arguments:
if not arg.get_literal_value(traverser):
traverser.err.error(
err_id=("js", "debug", "assert"),
error="`%s` expected to be truthy" % arg,
description="Assertion error")
GLOBAL_ENTITIES[u"__assert"] = {"return": assert_}
def callable_(wrapper, arguments, traverser):
traverser.asserts = True
for arg in arguments:
if not arg.callable:
traverser.err.error(
err_id=("js", "debug", "callable"),
error="`%s` expected to be callable" % arg,
description="Assertion error")
GLOBAL_ENTITIES[u"__callable"] = {"return": assert_}
|
prutseltje/ansible
|
refs/heads/devel
|
test/units/module_utils/facts/other/test_ohai.py
|
118
|
# unit tests for ansible ohai fact collector
# -*- coding: utf-8 -*-
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
# Make coding more python3-ish
from __future__ import (absolute_import, division)
__metaclass__ = type
from ansible.compat.tests.mock import Mock, patch
from .. base import BaseFactsTest
from ansible.module_utils.facts.other.ohai import OhaiFactCollector
ohai_json_output = r'''
{
"kernel": {
"name": "Linux",
"release": "4.9.14-200.fc25.x86_64",
"version": "#1 SMP Mon Mar 13 19:26:40 UTC 2017",
"machine": "x86_64",
"processor": "x86_64",
"os": "GNU/Linux",
"modules": {
"binfmt_misc": {
"size": "20480",
"refcount": "1"
},
"veth": {
"size": "16384",
"refcount": "0"
},
"xfs": {
"size": "1200128",
"refcount": "1"
},
"xt_addrtype": {
"size": "16384",
"refcount": "2"
},
"br_netfilter": {
"size": "24576",
"refcount": "0"
},
"dm_thin_pool": {
"size": "65536",
"refcount": "2"
},
"dm_persistent_data": {
"size": "69632",
"refcount": "1"
},
"dm_bio_prison": {
"size": "16384",
"refcount": "1"
},
"libcrc32c": {
"size": "16384",
"refcount": "2"
},
"rfcomm": {
"size": "77824",
"refcount": "14",
"version": "1.11"
},
"fuse": {
"size": "102400",
"refcount": "3"
},
"ccm": {
"size": "20480",
"refcount": "2"
},
"xt_CHECKSUM": {
"size": "16384",
"refcount": "2"
},
"iptable_mangle": {
"size": "16384",
"refcount": "1"
},
"ipt_MASQUERADE": {
"size": "16384",
"refcount": "7"
},
"nf_nat_masquerade_ipv4": {
"size": "16384",
"refcount": "1"
},
"iptable_nat": {
"size": "16384",
"refcount": "1"
},
"nf_nat_ipv4": {
"size": "16384",
"refcount": "1"
},
"nf_nat": {
"size": "28672",
"refcount": "2"
},
"nf_conntrack_ipv4": {
"size": "16384",
"refcount": "4"
},
"nf_defrag_ipv4": {
"size": "16384",
"refcount": "1"
},
"xt_conntrack": {
"size": "16384",
"refcount": "3"
},
"nf_conntrack": {
"size": "106496",
"refcount": "5"
},
"ip6t_REJECT": {
"size": "16384",
"refcount": "2"
},
"nf_reject_ipv6": {
"size": "16384",
"refcount": "1"
},
"tun": {
"size": "28672",
"refcount": "4"
},
"bridge": {
"size": "135168",
"refcount": "1",
"version": "2.3"
},
"stp": {
"size": "16384",
"refcount": "1"
},
"llc": {
"size": "16384",
"refcount": "2"
},
"ebtable_filter": {
"size": "16384",
"refcount": "0"
},
"ebtables": {
"size": "36864",
"refcount": "1"
},
"ip6table_filter": {
"size": "16384",
"refcount": "1"
},
"ip6_tables": {
"size": "28672",
"refcount": "1"
},
"cmac": {
"size": "16384",
"refcount": "3"
},
"uhid": {
"size": "20480",
"refcount": "2"
},
"bnep": {
"size": "20480",
"refcount": "2",
"version": "1.3"
},
"btrfs": {
"size": "1056768",
"refcount": "1"
},
"xor": {
"size": "24576",
"refcount": "1"
},
"raid6_pq": {
"size": "106496",
"refcount": "1"
},
"loop": {
"size": "28672",
"refcount": "6"
},
"arc4": {
"size": "16384",
"refcount": "2"
},
"snd_hda_codec_hdmi": {
"size": "45056",
"refcount": "1"
},
"intel_rapl": {
"size": "20480",
"refcount": "0"
},
"x86_pkg_temp_thermal": {
"size": "16384",
"refcount": "0"
},
"intel_powerclamp": {
"size": "16384",
"refcount": "0"
},
"coretemp": {
"size": "16384",
"refcount": "0"
},
"kvm_intel": {
"size": "192512",
"refcount": "0"
},
"kvm": {
"size": "585728",
"refcount": "1"
},
"irqbypass": {
"size": "16384",
"refcount": "1"
},
"crct10dif_pclmul": {
"size": "16384",
"refcount": "0"
},
"crc32_pclmul": {
"size": "16384",
"refcount": "0"
},
"iTCO_wdt": {
"size": "16384",
"refcount": "0",
"version": "1.11"
},
"ghash_clmulni_intel": {
"size": "16384",
"refcount": "0"
},
"mei_wdt": {
"size": "16384",
"refcount": "0"
},
"iTCO_vendor_support": {
"size": "16384",
"refcount": "1",
"version": "1.04"
},
"iwlmvm": {
"size": "364544",
"refcount": "0"
},
"intel_cstate": {
"size": "16384",
"refcount": "0"
},
"uvcvideo": {
"size": "90112",
"refcount": "0",
"version": "1.1.1"
},
"videobuf2_vmalloc": {
"size": "16384",
"refcount": "1"
},
"intel_uncore": {
"size": "118784",
"refcount": "0"
},
"videobuf2_memops": {
"size": "16384",
"refcount": "1"
},
"videobuf2_v4l2": {
"size": "24576",
"refcount": "1"
},
"videobuf2_core": {
"size": "40960",
"refcount": "2"
},
"intel_rapl_perf": {
"size": "16384",
"refcount": "0"
},
"mac80211": {
"size": "749568",
"refcount": "1"
},
"videodev": {
"size": "172032",
"refcount": "3"
},
"snd_usb_audio": {
"size": "180224",
"refcount": "3"
},
"e1000e": {
"size": "249856",
"refcount": "0",
"version": "3.2.6-k"
}
}
},
"os": "linux",
"os_version": "4.9.14-200.fc25.x86_64",
"lsb": {
"id": "Fedora",
"description": "Fedora release 25 (Twenty Five)",
"release": "25",
"codename": "TwentyFive"
},
"platform": "fedora",
"platform_version": "25",
"platform_family": "fedora",
"packages": {
"ansible": {
"epoch": "0",
"version": "2.2.1.0",
"release": "1.fc25",
"installdate": "1486050042",
"arch": "noarch"
},
"python3": {
"epoch": "0",
"version": "3.5.3",
"release": "3.fc25",
"installdate": "1490025957",
"arch": "x86_64"
},
"kernel": {
"epoch": "0",
"version": "4.9.6",
"release": "200.fc25",
"installdate": "1486047522",
"arch": "x86_64"
},
"glibc": {
"epoch": "0",
"version": "2.24",
"release": "4.fc25",
"installdate": "1483402427",
"arch": "x86_64"
}
},
"chef_packages": {
ohai": {
"version": "13.0.0",
"ohai_root": "/home/some_user/.gem/ruby/gems/ohai-13.0.0/lib/ohai"
}
},
"dmi": {
"dmidecode_version": "3.0"
},
"uptime_seconds": 2509008,
"uptime": "29 days 00 hours 56 minutes 48 seconds",
"idletime_seconds": 19455087,
"idletime": "225 days 04 hours 11 minutes 27 seconds",
"memory": {
"swap": {
"cached": "262436kB",
"total": "8069116kB",
"free": "5154396kB"
},
"hugepages": {
"total": "0",
"free": "0",
"reserved": "0",
"surplus": "0"
},
"total": "16110540kB",
"free": "3825844kB",
"buffers": "377240kB",
"cached": "3710084kB",
"active": "8104320kB",
"inactive": "3192920kB",
"dirty": "812kB",
"writeback": "0kB",
"anon_pages": "7124992kB",
"mapped": "580700kB",
"slab": "622848kB",
"slab_reclaimable": "307300kB",
"slab_unreclaim": "315548kB",
"page_tables": "157572kB",
"nfs_unstable": "0kB",
"bounce": "0kB",
"commit_limit": "16124384kB",
"committed_as": "31345068kB",
"vmalloc_total": "34359738367kB",
"vmalloc_used": "0kB",
"vmalloc_chunk": "0kB",
"hugepage_size": "2048kB"
},
"filesystem": {
"by_device": {
"devtmpfs": {
"kb_size": "8044124",
"kb_used": "0",
"kb_available": "8044124",
"percent_used": "0%",
"total_inodes": "2011031",
"inodes_used": "629",
"inodes_available": "2010402",
"inodes_percent_used": "1%",
"fs_type": "devtmpfs",
"mount_options": [
"rw",
"nosuid",
"seclabel",
"size=8044124k",
"nr_inodes=2011031",
"mode=755"
],
"mounts": [
"/dev"
]
},
"tmpfs": {
"kb_size": "1611052",
"kb_used": "72",
"kb_available": "1610980",
"percent_used": "1%",
"total_inodes": "2013817",
"inodes_used": "36",
"inodes_available": "2013781",
"inodes_percent_used": "1%",
"fs_type": "tmpfs",
"mount_options": [
"rw",
"nosuid",
"nodev",
"relatime",
"seclabel",
"size=1611052k",
"mode=700",
"uid=1000",
"gid=1000"
],
"mounts": [
"/dev/shm",
"/run",
"/sys/fs/cgroup",
"/tmp",
"/run/user/0",
"/run/user/1000"
]
},
"/dev/mapper/fedora_host--186-root": {
"kb_size": "51475068",
"kb_used": "42551284",
"kb_available": "6285960",
"percent_used": "88%",
"total_inodes": "3276800",
"inodes_used": "532908",
"inodes_available": "2743892",
"inodes_percent_used": "17%",
"fs_type": "ext4",
"mount_options": [
"rw",
"relatime",
"seclabel",
"data=ordered"
],
"uuid": "12312331-3449-4a6c-8179-a1feb2bca6ce",
"mounts": [
"/",
"/var/lib/docker/devicemapper"
]
},
"/dev/sda1": {
"kb_size": "487652",
"kb_used": "126628",
"kb_available": "331328",
"percent_used": "28%",
"total_inodes": "128016",
"inodes_used": "405",
"inodes_available": "127611",
"inodes_percent_used": "1%",
"fs_type": "ext4",
"mount_options": [
"rw",
"relatime",
"seclabel",
"data=ordered"
],
"uuid": "12312311-ef40-4691-a3b6-438c3f9bc1c0",
"mounts": [
"/boot"
]
},
"/dev/mapper/fedora_host--186-home": {
"kb_size": "185948124",
"kb_used": "105904724",
"kb_available": "70574680",
"percent_used": "61%",
"total_inodes": "11821056",
"inodes_used": "1266687",
"inodes_available": "10554369",
"inodes_percent_used": "11%",
"fs_type": "ext4",
"mount_options": [
"rw",
"relatime",
"seclabel",
"data=ordered"
],
"uuid": "2d3e4853-fa69-4ccf-8a6a-77b05ab0a42d",
"mounts": [
"/home"
]
},
"/dev/loop0": {
"kb_size": "512000",
"kb_used": "16672",
"kb_available": "429056",
"percent_used": "4%",
"fs_type": "btrfs",
"uuid": "0f031512-ab15-497d-9abd-3a512b4a9390",
"mounts": [
"/var/lib/machines"
]
},
"sysfs": {
"fs_type": "sysfs",
"mount_options": [
"rw",
"nosuid",
"nodev",
"noexec",
"relatime",
"seclabel"
],
"mounts": [
"/sys"
]
},
"proc": {
"fs_type": "proc",
"mount_options": [
"rw",
"nosuid",
"nodev",
"noexec",
"relatime"
],
"mounts": [
"/proc"
]
},
"securityfs": {
"fs_type": "securityfs",
"mount_options": [
"rw",
"nosuid",
"nodev",
"noexec",
"relatime"
],
"mounts": [
"/sys/kernel/security"
]
},
"devpts": {
"fs_type": "devpts",
"mount_options": [
"rw",
"nosuid",
"noexec",
"relatime",
"seclabel",
"gid=5",
"mode=620",
"ptmxmode=000"
],
"mounts": [
"/dev/pts"
]
},
"cgroup": {
"fs_type": "cgroup",
"mount_options": [
"rw",
"nosuid",
"nodev",
"noexec",
"relatime",
"net_cls",
"net_prio"
],
"mounts": [
"/sys/fs/cgroup/systemd",
"/sys/fs/cgroup/devices",
"/sys/fs/cgroup/cpuset",
"/sys/fs/cgroup/perf_event",
"/sys/fs/cgroup/hugetlb",
"/sys/fs/cgroup/cpu,cpuacct",
"/sys/fs/cgroup/blkio",
"/sys/fs/cgroup/freezer",
"/sys/fs/cgroup/memory",
"/sys/fs/cgroup/pids",
"/sys/fs/cgroup/net_cls,net_prio"
]
},
"pstore": {
"fs_type": "pstore",
"mount_options": [
"rw",
"nosuid",
"nodev",
"noexec",
"relatime",
"seclabel"
],
"mounts": [
"/sys/fs/pstore"
]
},
"configfs": {
"fs_type": "configfs",
"mount_options": [
"rw",
"relatime"
],
"mounts": [
"/sys/kernel/config"
]
},
"selinuxfs": {
"fs_type": "selinuxfs",
"mount_options": [
"rw",
"relatime"
],
"mounts": [
"/sys/fs/selinux"
]
},
"debugfs": {
"fs_type": "debugfs",
"mount_options": [
"rw",
"relatime",
"seclabel"
],
"mounts": [
"/sys/kernel/debug"
]
},
"hugetlbfs": {
"fs_type": "hugetlbfs",
"mount_options": [
"rw",
"relatime",
"seclabel"
],
"mounts": [
"/dev/hugepages"
]
},
"mqueue": {
"fs_type": "mqueue",
"mount_options": [
"rw",
"relatime",
"seclabel"
],
"mounts": [
"/dev/mqueue"
]
},
"systemd-1": {
"fs_type": "autofs",
"mount_options": [
"rw",
"relatime",
"fd=40",
"pgrp=1",
"timeout=0",
"minproto=5",
"maxproto=5",
"direct",
"pipe_ino=17610"
],
"mounts": [
"/proc/sys/fs/binfmt_misc"
]
},
"/var/lib/machines.raw": {
"fs_type": "btrfs",
"mount_options": [
"rw",
"relatime",
"seclabel",
"space_cache",
"subvolid=5",
"subvol=/"
],
"mounts": [
"/var/lib/machines"
]
},
"fusectl": {
"fs_type": "fusectl",
"mount_options": [
"rw",
"relatime"
],
"mounts": [
"/sys/fs/fuse/connections"
]
},
"gvfsd-fuse": {
"fs_type": "fuse.gvfsd-fuse",
"mount_options": [
"rw",
"nosuid",
"nodev",
"relatime",
"user_id=1000",
"group_id=1000"
],
"mounts": [
"/run/user/1000/gvfs"
]
},
"binfmt_misc": {
"fs_type": "binfmt_misc",
"mount_options": [
"rw",
"relatime"
],
"mounts": [
"/proc/sys/fs/binfmt_misc"
]
},
"/dev/mapper/docker-253:1-1180487-0868fce108cd2524a4823aad8d665cca018ead39550ca088c440ab05deec13f8": {
"fs_type": "xfs",
"mount_options": [
"rw",
"relatime",
"context=\"system_u:object_r:container_file_t:s0:c523",
"c681\"",
"nouuid",
"attr2",
"inode64",
"logbsize=64k",
"sunit=128",
"swidth=128",
"noquota"
],
"uuid": "00e2aa25-20d8-4ad7-b3a5-c501f2f4c123",
"mounts": [
"/var/lib/docker/devicemapper/mnt/0868fce108cd2524a4823aad8d665cca018ead39550ca088c440ab05deec13f8"
]
},
"shm": {
"fs_type": "tmpfs",
"mount_options": [
"rw",
"nosuid",
"nodev",
"noexec",
"relatime",
"context=\"system_u:object_r:container_file_t:s0:c523",
"c681\"",
"size=65536k"
],
"mounts": [
"/var/lib/docker/containers/426e513ed508a451e3f70440eed040761f81529e4bc4240e7522d331f3f3bc12/shm"
]
},
"nsfs": {
"fs_type": "nsfs",
"mount_options": [
"rw"
],
"mounts": [
"/run/docker/netns/1ce89fd79f3d"
]
},
"tracefs": {
"fs_type": "tracefs",
"mount_options": [
"rw",
"relatime"
],
"mounts": [
"/sys/kernel/debug/tracing"
]
},
"/dev/loop1": {
"fs_type": "xfs",
"uuid": "00e2aa25-20d8-4ad7-b3a5-c501f2f4c123",
"mounts": [
]
},
"/dev/mapper/docker-253:1-1180487-pool": {
"mounts": [
]
},
"/dev/sr0": {
"mounts": [
]
},
"/dev/loop2": {
"mounts": [
]
},
"/dev/sda": {
"mounts": [
]
},
"/dev/sda2": {
"fs_type": "LVM2_member",
"uuid": "66Ojcd-ULtu-1cZa-Tywo-mx0d-RF4O-ysA9jK",
"mounts": [
]
},
"/dev/mapper/fedora_host--186-swap": {
"fs_type": "swap",
"uuid": "eae6059d-2fbe-4d1c-920d-a80bbeb1ac6d",
"mounts": [
]
}
},
"by_mountpoint": {
"/dev": {
"kb_size": "8044124",
"kb_used": "0",
"kb_available": "8044124",
"percent_used": "0%",
"total_inodes": "2011031",
"inodes_used": "629",
"inodes_available": "2010402",
"inodes_percent_used": "1%",
"fs_type": "devtmpfs",
"mount_options": [
"rw",
"nosuid",
"seclabel",
"size=8044124k",
"nr_inodes=2011031",
"mode=755"
],
"devices": [
"devtmpfs"
]
},
"/dev/shm": {
"kb_size": "8055268",
"kb_used": "96036",
"kb_available": "7959232",
"percent_used": "2%",
"total_inodes": "2013817",
"inodes_used": "217",
"inodes_available": "2013600",
"inodes_percent_used": "1%",
"fs_type": "tmpfs",
"mount_options": [
"rw",
"nosuid",
"nodev",
"seclabel"
],
"devices": [
"tmpfs"
]
},
"/run": {
"kb_size": "8055268",
"kb_used": "2280",
"kb_available": "8052988",
"percent_used": "1%",
"total_inodes": "2013817",
"inodes_used": "1070",
"inodes_available": "2012747",
"inodes_percent_used": "1%",
"fs_type": "tmpfs",
"mount_options": [
"rw",
"nosuid",
"nodev",
"seclabel",
"mode=755"
],
"devices": [
"tmpfs"
]
},
"/sys/fs/cgroup": {
"kb_size": "8055268",
"kb_used": "0",
"kb_available": "8055268",
"percent_used": "0%",
"total_inodes": "2013817",
"inodes_used": "16",
"inodes_available": "2013801",
"inodes_percent_used": "1%",
"fs_type": "tmpfs",
"mount_options": [
"ro",
"nosuid",
"nodev",
"noexec",
"seclabel",
"mode=755"
],
"devices": [
"tmpfs"
]
},
"/": {
"kb_size": "51475068",
"kb_used": "42551284",
"kb_available": "6285960",
"percent_used": "88%",
"total_inodes": "3276800",
"inodes_used": "532908",
"inodes_available": "2743892",
"inodes_percent_used": "17%",
"fs_type": "ext4",
"mount_options": [
"rw",
"relatime",
"seclabel",
"data=ordered"
],
"uuid": "d34cf5e3-3449-4a6c-8179-a1feb2bca6ce",
"devices": [
"/dev/mapper/fedora_host--186-root"
]
},
"/tmp": {
"kb_size": "8055268",
"kb_used": "848396",
"kb_available": "7206872",
"percent_used": "11%",
"total_inodes": "2013817",
"inodes_used": "1353",
"inodes_available": "2012464",
"inodes_percent_used": "1%",
"fs_type": "tmpfs",
"mount_options": [
"rw",
"nosuid",
"nodev",
"seclabel"
],
"devices": [
"tmpfs"
]
},
"/boot": {
"kb_size": "487652",
"kb_used": "126628",
"kb_available": "331328",
"percent_used": "28%",
"total_inodes": "128016",
"inodes_used": "405",
"inodes_available": "127611",
"inodes_percent_used": "1%",
"fs_type": "ext4",
"mount_options": [
"rw",
"relatime",
"seclabel",
"data=ordered"
],
"uuid": "32caaec3-ef40-4691-a3b6-438c3f9bc1c0",
"devices": [
"/dev/sda1"
]
},
"/home": {
"kb_size": "185948124",
"kb_used": "105904724",
"kb_available": "70574680",
"percent_used": "61%",
"total_inodes": "11821056",
"inodes_used": "1266687",
"inodes_available": "10554369",
"inodes_percent_used": "11%",
"fs_type": "ext4",
"mount_options": [
"rw",
"relatime",
"seclabel",
"data=ordered"
],
"uuid": "2d3e4853-fa69-4ccf-8a6a-77b05ab0a42d",
"devices": [
"/dev/mapper/fedora_host--186-home"
]
},
"/var/lib/machines": {
"kb_size": "512000",
"kb_used": "16672",
"kb_available": "429056",
"percent_used": "4%",
"fs_type": "btrfs",
"uuid": "0f031512-ab15-497d-9abd-3a512b4a9390",
"devices": [
"/dev/loop0",
"/var/lib/machines.raw"
],
"mount_options": [
"rw",
"relatime",
"seclabel",
"space_cache",
"subvolid=5",
"subvol=/"
]
},
"/run/user/0": {
"kb_size": "1611052",
"kb_used": "0",
"kb_available": "1611052",
"percent_used": "0%",
"total_inodes": "2013817",
"inodes_used": "7",
"inodes_available": "2013810",
"inodes_percent_used": "1%",
"fs_type": "tmpfs",
"mount_options": [
"rw",
"nosuid",
"nodev",
"relatime",
"seclabel",
"size=1611052k",
"mode=700"
],
"devices": [
"tmpfs"
]
},
"/run/user/1000": {
"kb_size": "1611052",
"kb_used": "72",
"kb_available": "1610980",
"percent_used": "1%",
"total_inodes": "2013817",
"inodes_used": "36",
"inodes_available": "2013781",
"inodes_percent_used": "1%",
"fs_type": "tmpfs",
"mount_options": [
"rw",
"nosuid",
"nodev",
"relatime",
"seclabel",
"size=1611052k",
"mode=700",
"uid=1000",
"gid=1000"
],
"devices": [
"tmpfs"
]
},
"/sys": {
"fs_type": "sysfs",
"mount_options": [
"rw",
"nosuid",
"nodev",
"noexec",
"relatime",
"seclabel"
],
"devices": [
"sysfs"
]
},
"/proc": {
"fs_type": "proc",
"mount_options": [
"rw",
"nosuid",
"nodev",
"noexec",
"relatime"
],
"devices": [
"proc"
]
},
"/sys/kernel/security": {
"fs_type": "securityfs",
"mount_options": [
"rw",
"nosuid",
"nodev",
"noexec",
"relatime"
],
"devices": [
"securityfs"
]
},
"/dev/pts": {
"fs_type": "devpts",
"mount_options": [
"rw",
"nosuid",
"noexec",
"relatime",
"seclabel",
"gid=5",
"mode=620",
"ptmxmode=000"
],
"devices": [
"devpts"
]
},
"/sys/fs/cgroup/systemd": {
"fs_type": "cgroup",
"mount_options": [
"rw",
"nosuid",
"nodev",
"noexec",
"relatime",
"xattr",
"release_agent=/usr/lib/systemd/systemd-cgroups-agent",
"name=systemd"
],
"devices": [
"cgroup"
]
},
"/sys/fs/pstore": {
"fs_type": "pstore",
"mount_options": [
"rw",
"nosuid",
"nodev",
"noexec",
"relatime",
"seclabel"
],
"devices": [
"pstore"
]
},
"/sys/fs/cgroup/devices": {
"fs_type": "cgroup",
"mount_options": [
"rw",
"nosuid",
"nodev",
"noexec",
"relatime",
"devices"
],
"devices": [
"cgroup"
]
},
"/sys/fs/cgroup/cpuset": {
"fs_type": "cgroup",
"mount_options": [
"rw",
"nosuid",
"nodev",
"noexec",
"relatime",
"cpuset"
],
"devices": [
"cgroup"
]
},
"/sys/fs/cgroup/perf_event": {
"fs_type": "cgroup",
"mount_options": [
"rw",
"nosuid",
"nodev",
"noexec",
"relatime",
"perf_event"
],
"devices": [
"cgroup"
]
},
"/sys/fs/cgroup/hugetlb": {
"fs_type": "cgroup",
"mount_options": [
"rw",
"nosuid",
"nodev",
"noexec",
"relatime",
"hugetlb"
],
"devices": [
"cgroup"
]
},
"/sys/fs/cgroup/cpu,cpuacct": {
"fs_type": "cgroup",
"mount_options": [
"rw",
"nosuid",
"nodev",
"noexec",
"relatime",
"cpu",
"cpuacct"
],
"devices": [
"cgroup"
]
},
"/sys/fs/cgroup/blkio": {
"fs_type": "cgroup",
"mount_options": [
"rw",
"nosuid",
"nodev",
"noexec",
"relatime",
"blkio"
],
"devices": [
"cgroup"
]
},
"/sys/fs/cgroup/freezer": {
"fs_type": "cgroup",
"mount_options": [
"rw",
"nosuid",
"nodev",
"noexec",
"relatime",
"freezer"
],
"devices": [
"cgroup"
]
},
"/sys/fs/cgroup/memory": {
"fs_type": "cgroup",
"mount_options": [
"rw",
"nosuid",
"nodev",
"noexec",
"relatime",
"memory"
],
"devices": [
"cgroup"
]
},
"/sys/fs/cgroup/pids": {
"fs_type": "cgroup",
"mount_options": [
"rw",
"nosuid",
"nodev",
"noexec",
"relatime",
"pids"
],
"devices": [
"cgroup"
]
},
"/sys/fs/cgroup/net_cls,net_prio": {
"fs_type": "cgroup",
"mount_options": [
"rw",
"nosuid",
"nodev",
"noexec",
"relatime",
"net_cls",
"net_prio"
],
"devices": [
"cgroup"
]
},
"/sys/kernel/config": {
"fs_type": "configfs",
"mount_options": [
"rw",
"relatime"
],
"devices": [
"configfs"
]
},
"/sys/fs/selinux": {
"fs_type": "selinuxfs",
"mount_options": [
"rw",
"relatime"
],
"devices": [
"selinuxfs"
]
},
"/sys/kernel/debug": {
"fs_type": "debugfs",
"mount_options": [
"rw",
"relatime",
"seclabel"
],
"devices": [
"debugfs"
]
},
"/dev/hugepages": {
"fs_type": "hugetlbfs",
"mount_options": [
"rw",
"relatime",
"seclabel"
],
"devices": [
"hugetlbfs"
]
},
"/dev/mqueue": {
"fs_type": "mqueue",
"mount_options": [
"rw",
"relatime",
"seclabel"
],
"devices": [
"mqueue"
]
},
"/proc/sys/fs/binfmt_misc": {
"fs_type": "binfmt_misc",
"mount_options": [
"rw",
"relatime"
],
"devices": [
"systemd-1",
"binfmt_misc"
]
},
"/sys/fs/fuse/connections": {
"fs_type": "fusectl",
"mount_options": [
"rw",
"relatime"
],
"devices": [
"fusectl"
]
},
"/run/user/1000/gvfs": {
"fs_type": "fuse.gvfsd-fuse",
"mount_options": [
"rw",
"nosuid",
"nodev",
"relatime",
"user_id=1000",
"group_id=1000"
],
"devices": [
"gvfsd-fuse"
]
},
"/var/lib/docker/devicemapper": {
"fs_type": "ext4",
"mount_options": [
"rw",
"relatime",
"seclabel",
"data=ordered"
],
"uuid": "d34cf5e3-3449-4a6c-8179-a1feb2bca6ce",
"devices": [
"/dev/mapper/fedora_host--186-root"
]
},
"/var/lib/docker/devicemapper/mnt/0868fce108cd2524a4823aad8d665cca018ead39550ca088c440ab05deec13f8": {
"fs_type": "xfs",
"mount_options": [
"rw",
"relatime",
"context=\"system_u:object_r:container_file_t:s0:c523",
"c681\"",
"nouuid",
"attr2",
"inode64",
"logbsize=64k",
"sunit=128",
"swidth=128",
"noquota"
],
"uuid": "00e2aa25-20d8-4ad7-b3a5-c501f2f4c123",
"devices": [
"/dev/mapper/docker-253:1-1180487-0868fce108cd2524a4823aad8d665cca018ead39550ca088c440ab05deec13f8"
]
},
"/var/lib/docker/containers/426e513ed508a451e3f70440eed040761f81529e4bc4240e7522d331f3f3bc12/shm": {
"fs_type": "tmpfs",
"mount_options": [
"rw",
"nosuid",
"nodev",
"noexec",
"relatime",
"context=\"system_u:object_r:container_file_t:s0:c523",
"c681\"",
"size=65536k"
],
"devices": [
"shm"
]
},
"/run/docker/netns/1ce89fd79f3d": {
"fs_type": "nsfs",
"mount_options": [
"rw"
],
"devices": [
"nsfs"
]
},
"/sys/kernel/debug/tracing": {
"fs_type": "tracefs",
"mount_options": [
"rw",
"relatime"
],
"devices": [
"tracefs"
]
}
},
"by_pair": {
"devtmpfs,/dev": {
"device": "devtmpfs",
"kb_size": "8044124",
"kb_used": "0",
"kb_available": "8044124",
"percent_used": "0%",
"mount": "/dev",
"total_inodes": "2011031",
"inodes_used": "629",
"inodes_available": "2010402",
"inodes_percent_used": "1%",
"fs_type": "devtmpfs",
"mount_options": [
"rw",
"nosuid",
"seclabel",
"size=8044124k",
"nr_inodes=2011031",
"mode=755"
]
},
"tmpfs,/dev/shm": {
"device": "tmpfs",
"kb_size": "8055268",
"kb_used": "96036",
"kb_available": "7959232",
"percent_used": "2%",
"mount": "/dev/shm",
"total_inodes": "2013817",
"inodes_used": "217",
"inodes_available": "2013600",
"inodes_percent_used": "1%",
"fs_type": "tmpfs",
"mount_options": [
"rw",
"nosuid",
"nodev",
"seclabel"
]
},
"tmpfs,/run": {
"device": "tmpfs",
"kb_size": "8055268",
"kb_used": "2280",
"kb_available": "8052988",
"percent_used": "1%",
"mount": "/run",
"total_inodes": "2013817",
"inodes_used": "1070",
"inodes_available": "2012747",
"inodes_percent_used": "1%",
"fs_type": "tmpfs",
"mount_options": [
"rw",
"nosuid",
"nodev",
"seclabel",
"mode=755"
]
},
"tmpfs,/sys/fs/cgroup": {
"device": "tmpfs",
"kb_size": "8055268",
"kb_used": "0",
"kb_available": "8055268",
"percent_used": "0%",
"mount": "/sys/fs/cgroup",
"total_inodes": "2013817",
"inodes_used": "16",
"inodes_available": "2013801",
"inodes_percent_used": "1%",
"fs_type": "tmpfs",
"mount_options": [
"ro",
"nosuid",
"nodev",
"noexec",
"seclabel",
"mode=755"
]
},
"/dev/mapper/fedora_host--186-root,/": {
"device": "/dev/mapper/fedora_host--186-root",
"kb_size": "51475068",
"kb_used": "42551284",
"kb_available": "6285960",
"percent_used": "88%",
"mount": "/",
"total_inodes": "3276800",
"inodes_used": "532908",
"inodes_available": "2743892",
"inodes_percent_used": "17%",
"fs_type": "ext4",
"mount_options": [
"rw",
"relatime",
"seclabel",
"data=ordered"
],
"uuid": "d34cf5e3-3449-4a6c-8179-a1feb2bca6ce"
},
"tmpfs,/tmp": {
"device": "tmpfs",
"kb_size": "8055268",
"kb_used": "848396",
"kb_available": "7206872",
"percent_used": "11%",
"mount": "/tmp",
"total_inodes": "2013817",
"inodes_used": "1353",
"inodes_available": "2012464",
"inodes_percent_used": "1%",
"fs_type": "tmpfs",
"mount_options": [
"rw",
"nosuid",
"nodev",
"seclabel"
]
},
"/dev/sda1,/boot": {
"device": "/dev/sda1",
"kb_size": "487652",
"kb_used": "126628",
"kb_available": "331328",
"percent_used": "28%",
"mount": "/boot",
"total_inodes": "128016",
"inodes_used": "405",
"inodes_available": "127611",
"inodes_percent_used": "1%",
"fs_type": "ext4",
"mount_options": [
"rw",
"relatime",
"seclabel",
"data=ordered"
],
"uuid": "32caaec3-ef40-4691-a3b6-438c3f9bc1c0"
},
"/dev/mapper/fedora_host--186-home,/home": {
"device": "/dev/mapper/fedora_host--186-home",
"kb_size": "185948124",
"kb_used": "105904724",
"kb_available": "70574680",
"percent_used": "61%",
"mount": "/home",
"total_inodes": "11821056",
"inodes_used": "1266687",
"inodes_available": "10554369",
"inodes_percent_used": "11%",
"fs_type": "ext4",
"mount_options": [
"rw",
"relatime",
"seclabel",
"data=ordered"
],
"uuid": "2d3e4853-fa69-4ccf-8a6a-77b05ab0a42d"
},
"/dev/loop0,/var/lib/machines": {
"device": "/dev/loop0",
"kb_size": "512000",
"kb_used": "16672",
"kb_available": "429056",
"percent_used": "4%",
"mount": "/var/lib/machines",
"fs_type": "btrfs",
"uuid": "0f031512-ab15-497d-9abd-3a512b4a9390"
},
"tmpfs,/run/user/0": {
"device": "tmpfs",
"kb_size": "1611052",
"kb_used": "0",
"kb_available": "1611052",
"percent_used": "0%",
"mount": "/run/user/0",
"total_inodes": "2013817",
"inodes_used": "7",
"inodes_available": "2013810",
"inodes_percent_used": "1%",
"fs_type": "tmpfs",
"mount_options": [
"rw",
"nosuid",
"nodev",
"relatime",
"seclabel",
"size=1611052k",
"mode=700"
]
},
"tmpfs,/run/user/1000": {
"device": "tmpfs",
"kb_size": "1611052",
"kb_used": "72",
"kb_available": "1610980",
"percent_used": "1%",
"mount": "/run/user/1000",
"total_inodes": "2013817",
"inodes_used": "36",
"inodes_available": "2013781",
"inodes_percent_used": "1%",
"fs_type": "tmpfs",
"mount_options": [
"rw",
"nosuid",
"nodev",
"relatime",
"seclabel",
"size=1611052k",
"mode=700",
"uid=1000",
"gid=1000"
]
},
"sysfs,/sys": {
"device": "sysfs",
"mount": "/sys",
"fs_type": "sysfs",
"mount_options": [
"rw",
"nosuid",
"nodev",
"noexec",
"relatime",
"seclabel"
]
},
"proc,/proc": {
"device": "proc",
"mount": "/proc",
"fs_type": "proc",
"mount_options": [
"rw",
"nosuid",
"nodev",
"noexec",
"relatime"
]
},
"securityfs,/sys/kernel/security": {
"device": "securityfs",
"mount": "/sys/kernel/security",
"fs_type": "securityfs",
"mount_options": [
"rw",
"nosuid",
"nodev",
"noexec",
"relatime"
]
},
"devpts,/dev/pts": {
"device": "devpts",
"mount": "/dev/pts",
"fs_type": "devpts",
"mount_options": [
"rw",
"nosuid",
"noexec",
"relatime",
"seclabel",
"gid=5",
"mode=620",
"ptmxmode=000"
]
},
"cgroup,/sys/fs/cgroup/systemd": {
"device": "cgroup",
"mount": "/sys/fs/cgroup/systemd",
"fs_type": "cgroup",
"mount_options": [
"rw",
"nosuid",
"nodev",
"noexec",
"relatime",
"xattr",
"release_agent=/usr/lib/systemd/systemd-cgroups-agent",
"name=systemd"
]
},
"pstore,/sys/fs/pstore": {
"device": "pstore",
"mount": "/sys/fs/pstore",
"fs_type": "pstore",
"mount_options": [
"rw",
"nosuid",
"nodev",
"noexec",
"relatime",
"seclabel"
]
},
"cgroup,/sys/fs/cgroup/devices": {
"device": "cgroup",
"mount": "/sys/fs/cgroup/devices",
"fs_type": "cgroup",
"mount_options": [
"rw",
"nosuid",
"nodev",
"noexec",
"relatime",
"devices"
]
},
"cgroup,/sys/fs/cgroup/cpuset": {
"device": "cgroup",
"mount": "/sys/fs/cgroup/cpuset",
"fs_type": "cgroup",
"mount_options": [
"rw",
"nosuid",
"nodev",
"noexec",
"relatime",
"cpuset"
]
},
"cgroup,/sys/fs/cgroup/perf_event": {
"device": "cgroup",
"mount": "/sys/fs/cgroup/perf_event",
"fs_type": "cgroup",
"mount_options": [
"rw",
"nosuid",
"nodev",
"noexec",
"relatime",
"perf_event"
]
},
"cgroup,/sys/fs/cgroup/hugetlb": {
"device": "cgroup",
"mount": "/sys/fs/cgroup/hugetlb",
"fs_type": "cgroup",
"mount_options": [
"rw",
"nosuid",
"nodev",
"noexec",
"relatime",
"hugetlb"
]
},
"cgroup,/sys/fs/cgroup/cpu,cpuacct": {
"device": "cgroup",
"mount": "/sys/fs/cgroup/cpu,cpuacct",
"fs_type": "cgroup",
"mount_options": [
"rw",
"nosuid",
"nodev",
"noexec",
"relatime",
"cpu",
"cpuacct"
]
},
"cgroup,/sys/fs/cgroup/blkio": {
"device": "cgroup",
"mount": "/sys/fs/cgroup/blkio",
"fs_type": "cgroup",
"mount_options": [
"rw",
"nosuid",
"nodev",
"noexec",
"relatime",
"blkio"
]
},
"cgroup,/sys/fs/cgroup/freezer": {
"device": "cgroup",
"mount": "/sys/fs/cgroup/freezer",
"fs_type": "cgroup",
"mount_options": [
"rw",
"nosuid",
"nodev",
"noexec",
"relatime",
"freezer"
]
},
"cgroup,/sys/fs/cgroup/memory": {
"device": "cgroup",
"mount": "/sys/fs/cgroup/memory",
"fs_type": "cgroup",
"mount_options": [
"rw",
"nosuid",
"nodev",
"noexec",
"relatime",
"memory"
]
},
"cgroup,/sys/fs/cgroup/pids": {
"device": "cgroup",
"mount": "/sys/fs/cgroup/pids",
"fs_type": "cgroup",
"mount_options": [
"rw",
"nosuid",
"nodev",
"noexec",
"relatime",
"pids"
]
},
"cgroup,/sys/fs/cgroup/net_cls,net_prio": {
"device": "cgroup",
"mount": "/sys/fs/cgroup/net_cls,net_prio",
"fs_type": "cgroup",
"mount_options": [
"rw",
"nosuid",
"nodev",
"noexec",
"relatime",
"net_cls",
"net_prio"
]
},
"configfs,/sys/kernel/config": {
"device": "configfs",
"mount": "/sys/kernel/config",
"fs_type": "configfs",
"mount_options": [
"rw",
"relatime"
]
},
"selinuxfs,/sys/fs/selinux": {
"device": "selinuxfs",
"mount": "/sys/fs/selinux",
"fs_type": "selinuxfs",
"mount_options": [
"rw",
"relatime"
]
},
"debugfs,/sys/kernel/debug": {
"device": "debugfs",
"mount": "/sys/kernel/debug",
"fs_type": "debugfs",
"mount_options": [
"rw",
"relatime",
"seclabel"
]
},
"hugetlbfs,/dev/hugepages": {
"device": "hugetlbfs",
"mount": "/dev/hugepages",
"fs_type": "hugetlbfs",
"mount_options": [
"rw",
"relatime",
"seclabel"
]
},
"mqueue,/dev/mqueue": {
"device": "mqueue",
"mount": "/dev/mqueue",
"fs_type": "mqueue",
"mount_options": [
"rw",
"relatime",
"seclabel"
]
},
"systemd-1,/proc/sys/fs/binfmt_misc": {
"device": "systemd-1",
"mount": "/proc/sys/fs/binfmt_misc",
"fs_type": "autofs",
"mount_options": [
"rw",
"relatime",
"fd=40",
"pgrp=1",
"timeout=0",
"minproto=5",
"maxproto=5",
"direct",
"pipe_ino=17610"
]
},
"/var/lib/machines.raw,/var/lib/machines": {
"device": "/var/lib/machines.raw",
"mount": "/var/lib/machines",
"fs_type": "btrfs",
"mount_options": [
"rw",
"relatime",
"seclabel",
"space_cache",
"subvolid=5",
"subvol=/"
]
},
"fusectl,/sys/fs/fuse/connections": {
"device": "fusectl",
"mount": "/sys/fs/fuse/connections",
"fs_type": "fusectl",
"mount_options": [
"rw",
"relatime"
]
},
"gvfsd-fuse,/run/user/1000/gvfs": {
"device": "gvfsd-fuse",
"mount": "/run/user/1000/gvfs",
"fs_type": "fuse.gvfsd-fuse",
"mount_options": [
"rw",
"nosuid",
"nodev",
"relatime",
"user_id=1000",
"group_id=1000"
]
},
"/dev/mapper/fedora_host--186-root,/var/lib/docker/devicemapper": {
"device": "/dev/mapper/fedora_host--186-root",
"mount": "/var/lib/docker/devicemapper",
"fs_type": "ext4",
"mount_options": [
"rw",
"relatime",
"seclabel",
"data=ordered"
],
"uuid": "d34cf5e3-3449-4a6c-8179-a1feb2bca6ce"
},
"binfmt_misc,/proc/sys/fs/binfmt_misc": {
"device": "binfmt_misc",
"mount": "/proc/sys/fs/binfmt_misc",
"fs_type": "binfmt_misc",
"mount_options": [
"rw",
"relatime"
]
},
"/dev/mapper/docker-253:1-1180487-0868fce108cd2524a4823aad8d665cca018ead39550ca088c440ab05deec13f8,/var/lib/docker/devicemapper/mnt/0868fce108cd2524a4823aad8d665cca018ead39550ca088c440ab05deec13f8": {
"device": "/dev/mapper/docker-253:1-1180487-0868fce108cd2524a4823aad8d665cca018ead39550ca088c440ab05deec13f8",
"mount": "/var/lib/docker/devicemapper/mnt/0868fce108cd2524a4823aad8d665cca018ead39550ca088c440ab05deec13f8",
"fs_type": "xfs",
"mount_options": [
"rw",
"relatime",
"context=\"system_u:object_r:container_file_t:s0:c523",
"c681\"",
"nouuid",
"attr2",
"inode64",
"logbsize=64k",
"sunit=128",
"swidth=128",
"noquota"
],
"uuid": "00e2aa25-20d8-4ad7-b3a5-c501f2f4c123"
},
"shm,/var/lib/docker/containers/426e513ed508a451e3f70440eed040761f81529e4bc4240e7522d331f3f3bc12/shm": {
"device": "shm",
"mount": "/var/lib/docker/containers/426e513ed508a451e3f70440eed040761f81529e4bc4240e7522d331f3f3bc12/shm",
"fs_type": "tmpfs",
"mount_options": [
"rw",
"nosuid",
"nodev",
"noexec",
"relatime",
"context=\"system_u:object_r:container_file_t:s0:c523",
"c681\"",
"size=65536k"
]
},
"nsfs,/run/docker/netns/1ce89fd79f3d": {
"device": "nsfs",
"mount": "/run/docker/netns/1ce89fd79f3d",
"fs_type": "nsfs",
"mount_options": [
"rw"
]
},
"tracefs,/sys/kernel/debug/tracing": {
"device": "tracefs",
"mount": "/sys/kernel/debug/tracing",
"fs_type": "tracefs",
"mount_options": [
"rw",
"relatime"
]
},
"/dev/loop1,": {
"device": "/dev/loop1",
"fs_type": "xfs",
"uuid": "00e2aa25-20d8-4ad7-b3a5-c501f2f4c123"
},
"/dev/mapper/docker-253:1-1180487-pool,": {
"device": "/dev/mapper/docker-253:1-1180487-pool"
},
"/dev/sr0,": {
"device": "/dev/sr0"
},
"/dev/loop2,": {
"device": "/dev/loop2"
},
"/dev/sda,": {
"device": "/dev/sda"
},
"/dev/sda2,": {
"device": "/dev/sda2",
"fs_type": "LVM2_member",
"uuid": "66Ojcd-ULtu-1cZa-Tywo-mx0d-RF4O-ysA9jK"
},
"/dev/mapper/fedora_host--186-swap,": {
"device": "/dev/mapper/fedora_host--186-swap",
"fs_type": "swap",
"uuid": "eae6059d-2fbe-4d1c-920d-a80bbeb1ac6d"
}
}
},
"filesystem2": {
"by_device": {
"devtmpfs": {
"kb_size": "8044124",
"kb_used": "0",
"kb_available": "8044124",
"percent_used": "0%",
"total_inodes": "2011031",
"inodes_used": "629",
"inodes_available": "2010402",
"inodes_percent_used": "1%",
"fs_type": "devtmpfs",
"mount_options": [
"rw",
"nosuid",
"seclabel",
"size=8044124k",
"nr_inodes=2011031",
"mode=755"
],
"mounts": [
"/dev"
]
},
"tmpfs": {
"kb_size": "1611052",
"kb_used": "72",
"kb_available": "1610980",
"percent_used": "1%",
"total_inodes": "2013817",
"inodes_used": "36",
"inodes_available": "2013781",
"inodes_percent_used": "1%",
"fs_type": "tmpfs",
"mount_options": [
"rw",
"nosuid",
"nodev",
"relatime",
"seclabel",
"size=1611052k",
"mode=700",
"uid=1000",
"gid=1000"
],
"mounts": [
"/dev/shm",
"/run",
"/sys/fs/cgroup",
"/tmp",
"/run/user/0",
"/run/user/1000"
]
},
"/dev/mapper/fedora_host--186-root": {
"kb_size": "51475068",
"kb_used": "42551284",
"kb_available": "6285960",
"percent_used": "88%",
"total_inodes": "3276800",
"inodes_used": "532908",
"inodes_available": "2743892",
"inodes_percent_used": "17%",
"fs_type": "ext4",
"mount_options": [
"rw",
"relatime",
"seclabel",
"data=ordered"
],
"uuid": "d34cf5e3-3449-4a6c-8179-a1feb2bca6ce",
"mounts": [
"/",
"/var/lib/docker/devicemapper"
]
},
"/dev/sda1": {
"kb_size": "487652",
"kb_used": "126628",
"kb_available": "331328",
"percent_used": "28%",
"total_inodes": "128016",
"inodes_used": "405",
"inodes_available": "127611",
"inodes_percent_used": "1%",
"fs_type": "ext4",
"mount_options": [
"rw",
"relatime",
"seclabel",
"data=ordered"
],
"uuid": "32caaec3-ef40-4691-a3b6-438c3f9bc1c0",
"mounts": [
"/boot"
]
},
"/dev/mapper/fedora_host--186-home": {
"kb_size": "185948124",
"kb_used": "105904724",
"kb_available": "70574680",
"percent_used": "61%",
"total_inodes": "11821056",
"inodes_used": "1266687",
"inodes_available": "10554369",
"inodes_percent_used": "11%",
"fs_type": "ext4",
"mount_options": [
"rw",
"relatime",
"seclabel",
"data=ordered"
],
"uuid": "2d3e4853-fa69-4ccf-8a6a-77b05ab0a42d",
"mounts": [
"/home"
]
},
"/dev/loop0": {
"kb_size": "512000",
"kb_used": "16672",
"kb_available": "429056",
"percent_used": "4%",
"fs_type": "btrfs",
"uuid": "0f031512-ab15-497d-9abd-3a512b4a9390",
"mounts": [
"/var/lib/machines"
]
},
"sysfs": {
"fs_type": "sysfs",
"mount_options": [
"rw",
"nosuid",
"nodev",
"noexec",
"relatime",
"seclabel"
],
"mounts": [
"/sys"
]
},
"proc": {
"fs_type": "proc",
"mount_options": [
"rw",
"nosuid",
"nodev",
"noexec",
"relatime"
],
"mounts": [
"/proc"
]
},
"securityfs": {
"fs_type": "securityfs",
"mount_options": [
"rw",
"nosuid",
"nodev",
"noexec",
"relatime"
],
"mounts": [
"/sys/kernel/security"
]
},
"devpts": {
"fs_type": "devpts",
"mount_options": [
"rw",
"nosuid",
"noexec",
"relatime",
"seclabel",
"gid=5",
"mode=620",
"ptmxmode=000"
],
"mounts": [
"/dev/pts"
]
},
"cgroup": {
"fs_type": "cgroup",
"mount_options": [
"rw",
"nosuid",
"nodev",
"noexec",
"relatime",
"net_cls",
"net_prio"
],
"mounts": [
"/sys/fs/cgroup/systemd",
"/sys/fs/cgroup/devices",
"/sys/fs/cgroup/cpuset",
"/sys/fs/cgroup/perf_event",
"/sys/fs/cgroup/hugetlb",
"/sys/fs/cgroup/cpu,cpuacct",
"/sys/fs/cgroup/blkio",
"/sys/fs/cgroup/freezer",
"/sys/fs/cgroup/memory",
"/sys/fs/cgroup/pids",
"/sys/fs/cgroup/net_cls,net_prio"
]
},
"pstore": {
"fs_type": "pstore",
"mount_options": [
"rw",
"nosuid",
"nodev",
"noexec",
"relatime",
"seclabel"
],
"mounts": [
"/sys/fs/pstore"
]
},
"configfs": {
"fs_type": "configfs",
"mount_options": [
"rw",
"relatime"
],
"mounts": [
"/sys/kernel/config"
]
},
"selinuxfs": {
"fs_type": "selinuxfs",
"mount_options": [
"rw",
"relatime"
],
"mounts": [
"/sys/fs/selinux"
]
},
"debugfs": {
"fs_type": "debugfs",
"mount_options": [
"rw",
"relatime",
"seclabel"
],
"mounts": [
"/sys/kernel/debug"
]
},
"hugetlbfs": {
"fs_type": "hugetlbfs",
"mount_options": [
"rw",
"relatime",
"seclabel"
],
"mounts": [
"/dev/hugepages"
]
},
"mqueue": {
"fs_type": "mqueue",
"mount_options": [
"rw",
"relatime",
"seclabel"
],
"mounts": [
"/dev/mqueue"
]
},
"systemd-1": {
"fs_type": "autofs",
"mount_options": [
"rw",
"relatime",
"fd=40",
"pgrp=1",
"timeout=0",
"minproto=5",
"maxproto=5",
"direct",
"pipe_ino=17610"
],
"mounts": [
"/proc/sys/fs/binfmt_misc"
]
},
"/var/lib/machines.raw": {
"fs_type": "btrfs",
"mount_options": [
"rw",
"relatime",
"seclabel",
"space_cache",
"subvolid=5",
"subvol=/"
],
"mounts": [
"/var/lib/machines"
]
},
"fusectl": {
"fs_type": "fusectl",
"mount_options": [
"rw",
"relatime"
],
"mounts": [
"/sys/fs/fuse/connections"
]
},
"gvfsd-fuse": {
"fs_type": "fuse.gvfsd-fuse",
"mount_options": [
"rw",
"nosuid",
"nodev",
"relatime",
"user_id=1000",
"group_id=1000"
],
"mounts": [
"/run/user/1000/gvfs"
]
},
"binfmt_misc": {
"fs_type": "binfmt_misc",
"mount_options": [
"rw",
"relatime"
],
"mounts": [
"/proc/sys/fs/binfmt_misc"
]
},
"/dev/mapper/docker-253:1-1180487-0868fce108cd2524a4823aad8d665cca018ead39550ca088c440ab05deec13f8": {
"fs_type": "xfs",
"mount_options": [
"rw",
"relatime",
"context=\"system_u:object_r:container_file_t:s0:c523",
"c681\"",
"nouuid",
"attr2",
"inode64",
"logbsize=64k",
"sunit=128",
"swidth=128",
"noquota"
],
"uuid": "00e2aa25-20d8-4ad7-b3a5-c501f2f4c123",
"mounts": [
"/var/lib/docker/devicemapper/mnt/0868fce108cd2524a4823aad8d665cca018ead39550ca088c440ab05deec13f8"
]
},
"shm": {
"fs_type": "tmpfs",
"mount_options": [
"rw",
"nosuid",
"nodev",
"noexec",
"relatime",
"context=\"system_u:object_r:container_file_t:s0:c523",
"c681\"",
"size=65536k"
],
"mounts": [
"/var/lib/docker/containers/426e513ed508a451e3f70440eed040761f81529e4bc4240e7522d331f3f3bc12/shm"
]
},
"nsfs": {
"fs_type": "nsfs",
"mount_options": [
"rw"
],
"mounts": [
"/run/docker/netns/1ce89fd79f3d"
]
},
"tracefs": {
"fs_type": "tracefs",
"mount_options": [
"rw",
"relatime"
],
"mounts": [
"/sys/kernel/debug/tracing"
]
},
"/dev/loop1": {
"fs_type": "xfs",
"uuid": "00e2aa25-20d8-4ad7-b3a5-c501f2f4c123",
"mounts": [
]
},
"/dev/mapper/docker-253:1-1180487-pool": {
"mounts": [
]
},
"/dev/sr0": {
"mounts": [
]
},
"/dev/loop2": {
"mounts": [
]
},
"/dev/sda": {
"mounts": [
]
},
"/dev/sda2": {
"fs_type": "LVM2_member",
"uuid": "66Ojcd-ULtu-1cZa-Tywo-mx0d-RF4O-ysA9jK",
"mounts": [
]
},
"/dev/mapper/fedora_host--186-swap": {
"fs_type": "swap",
"uuid": "eae6059d-2fbe-4d1c-920d-a80bbeb1ac6d",
"mounts": [
]
}
},
"by_mountpoint": {
"/dev": {
"kb_size": "8044124",
"kb_used": "0",
"kb_available": "8044124",
"percent_used": "0%",
"total_inodes": "2011031",
"inodes_used": "629",
"inodes_available": "2010402",
"inodes_percent_used": "1%",
"fs_type": "devtmpfs",
"mount_options": [
"rw",
"nosuid",
"seclabel",
"size=8044124k",
"nr_inodes=2011031",
"mode=755"
],
"devices": [
"devtmpfs"
]
},
"/dev/shm": {
"kb_size": "8055268",
"kb_used": "96036",
"kb_available": "7959232",
"percent_used": "2%",
"total_inodes": "2013817",
"inodes_used": "217",
"inodes_available": "2013600",
"inodes_percent_used": "1%",
"fs_type": "tmpfs",
"mount_options": [
"rw",
"nosuid",
"nodev",
"seclabel"
],
"devices": [
"tmpfs"
]
},
"/run": {
"kb_size": "8055268",
"kb_used": "2280",
"kb_available": "8052988",
"percent_used": "1%",
"total_inodes": "2013817",
"inodes_used": "1070",
"inodes_available": "2012747",
"inodes_percent_used": "1%",
"fs_type": "tmpfs",
"mount_options": [
"rw",
"nosuid",
"nodev",
"seclabel",
"mode=755"
],
"devices": [
"tmpfs"
]
},
"/sys/fs/cgroup": {
"kb_size": "8055268",
"kb_used": "0",
"kb_available": "8055268",
"percent_used": "0%",
"total_inodes": "2013817",
"inodes_used": "16",
"inodes_available": "2013801",
"inodes_percent_used": "1%",
"fs_type": "tmpfs",
"mount_options": [
"ro",
"nosuid",
"nodev",
"noexec",
"seclabel",
"mode=755"
],
"devices": [
"tmpfs"
]
},
"/": {
"kb_size": "51475068",
"kb_used": "42551284",
"kb_available": "6285960",
"percent_used": "88%",
"total_inodes": "3276800",
"inodes_used": "532908",
"inodes_available": "2743892",
"inodes_percent_used": "17%",
"fs_type": "ext4",
"mount_options": [
"rw",
"relatime",
"seclabel",
"data=ordered"
],
"uuid": "d34cf5e3-3449-4a6c-8179-a1feb2bca6ce",
"devices": [
"/dev/mapper/fedora_host--186-root"
]
},
"/tmp": {
"kb_size": "8055268",
"kb_used": "848396",
"kb_available": "7206872",
"percent_used": "11%",
"total_inodes": "2013817",
"inodes_used": "1353",
"inodes_available": "2012464",
"inodes_percent_used": "1%",
"fs_type": "tmpfs",
"mount_options": [
"rw",
"nosuid",
"nodev",
"seclabel"
],
"devices": [
"tmpfs"
]
},
"/boot": {
"kb_size": "487652",
"kb_used": "126628",
"kb_available": "331328",
"percent_used": "28%",
"total_inodes": "128016",
"inodes_used": "405",
"inodes_available": "127611",
"inodes_percent_used": "1%",
"fs_type": "ext4",
"mount_options": [
"rw",
"relatime",
"seclabel",
"data=ordered"
],
"uuid": "32caaec3-ef40-4691-a3b6-438c3f9bc1c0",
"devices": [
"/dev/sda1"
]
},
"/home": {
"kb_size": "185948124",
"kb_used": "105904724",
"kb_available": "70574680",
"percent_used": "61%",
"total_inodes": "11821056",
"inodes_used": "1266687",
"inodes_available": "10554369",
"inodes_percent_used": "11%",
"fs_type": "ext4",
"mount_options": [
"rw",
"relatime",
"seclabel",
"data=ordered"
],
"uuid": "2d3e4853-fa69-4ccf-8a6a-77b05ab0a42d",
"devices": [
"/dev/mapper/fedora_host--186-home"
]
},
"/var/lib/machines": {
"kb_size": "512000",
"kb_used": "16672",
"kb_available": "429056",
"percent_used": "4%",
"fs_type": "btrfs",
"uuid": "0f031512-ab15-497d-9abd-3a512b4a9390",
"devices": [
"/dev/loop0",
"/var/lib/machines.raw"
],
"mount_options": [
"rw",
"relatime",
"seclabel",
"space_cache",
"subvolid=5",
"subvol=/"
]
},
"/run/user/0": {
"kb_size": "1611052",
"kb_used": "0",
"kb_available": "1611052",
"percent_used": "0%",
"total_inodes": "2013817",
"inodes_used": "7",
"inodes_available": "2013810",
"inodes_percent_used": "1%",
"fs_type": "tmpfs",
"mount_options": [
"rw",
"nosuid",
"nodev",
"relatime",
"seclabel",
"size=1611052k",
"mode=700"
],
"devices": [
"tmpfs"
]
},
"/run/user/1000": {
"kb_size": "1611052",
"kb_used": "72",
"kb_available": "1610980",
"percent_used": "1%",
"total_inodes": "2013817",
"inodes_used": "36",
"inodes_available": "2013781",
"inodes_percent_used": "1%",
"fs_type": "tmpfs",
"mount_options": [
"rw",
"nosuid",
"nodev",
"relatime",
"seclabel",
"size=1611052k",
"mode=700",
"uid=1000",
"gid=1000"
],
"devices": [
"tmpfs"
]
},
"/sys": {
"fs_type": "sysfs",
"mount_options": [
"rw",
"nosuid",
"nodev",
"noexec",
"relatime",
"seclabel"
],
"devices": [
"sysfs"
]
},
"/proc": {
"fs_type": "proc",
"mount_options": [
"rw",
"nosuid",
"nodev",
"noexec",
"relatime"
],
"devices": [
"proc"
]
},
"/sys/kernel/security": {
"fs_type": "securityfs",
"mount_options": [
"rw",
"nosuid",
"nodev",
"noexec",
"relatime"
],
"devices": [
"securityfs"
]
},
"/dev/pts": {
"fs_type": "devpts",
"mount_options": [
"rw",
"nosuid",
"noexec",
"relatime",
"seclabel",
"gid=5",
"mode=620",
"ptmxmode=000"
],
"devices": [
"devpts"
]
},
"/sys/fs/cgroup/systemd": {
"fs_type": "cgroup",
"mount_options": [
"rw",
"nosuid",
"nodev",
"noexec",
"relatime",
"xattr",
"release_agent=/usr/lib/systemd/systemd-cgroups-agent",
"name=systemd"
],
"devices": [
"cgroup"
]
},
"/sys/fs/pstore": {
"fs_type": "pstore",
"mount_options": [
"rw",
"nosuid",
"nodev",
"noexec",
"relatime",
"seclabel"
],
"devices": [
"pstore"
]
},
"/sys/fs/cgroup/devices": {
"fs_type": "cgroup",
"mount_options": [
"rw",
"nosuid",
"nodev",
"noexec",
"relatime",
"devices"
],
"devices": [
"cgroup"
]
},
"/sys/fs/cgroup/cpuset": {
"fs_type": "cgroup",
"mount_options": [
"rw",
"nosuid",
"nodev",
"noexec",
"relatime",
"cpuset"
],
"devices": [
"cgroup"
]
},
"/sys/fs/cgroup/perf_event": {
"fs_type": "cgroup",
"mount_options": [
"rw",
"nosuid",
"nodev",
"noexec",
"relatime",
"perf_event"
],
"devices": [
"cgroup"
]
},
"/sys/fs/cgroup/hugetlb": {
"fs_type": "cgroup",
"mount_options": [
"rw",
"nosuid",
"nodev",
"noexec",
"relatime",
"hugetlb"
],
"devices": [
"cgroup"
]
},
"/sys/fs/cgroup/cpu,cpuacct": {
"fs_type": "cgroup",
"mount_options": [
"rw",
"nosuid",
"nodev",
"noexec",
"relatime",
"cpu",
"cpuacct"
],
"devices": [
"cgroup"
]
},
"/sys/fs/cgroup/blkio": {
"fs_type": "cgroup",
"mount_options": [
"rw",
"nosuid",
"nodev",
"noexec",
"relatime",
"blkio"
],
"devices": [
"cgroup"
]
},
"/sys/fs/cgroup/freezer": {
"fs_type": "cgroup",
"mount_options": [
"rw",
"nosuid",
"nodev",
"noexec",
"relatime",
"freezer"
],
"devices": [
"cgroup"
]
},
"/sys/fs/cgroup/memory": {
"fs_type": "cgroup",
"mount_options": [
"rw",
"nosuid",
"nodev",
"noexec",
"relatime",
"memory"
],
"devices": [
"cgroup"
]
},
"/sys/fs/cgroup/pids": {
"fs_type": "cgroup",
"mount_options": [
"rw",
"nosuid",
"nodev",
"noexec",
"relatime",
"pids"
],
"devices": [
"cgroup"
]
},
"/sys/fs/cgroup/net_cls,net_prio": {
"fs_type": "cgroup",
"mount_options": [
"rw",
"nosuid",
"nodev",
"noexec",
"relatime",
"net_cls",
"net_prio"
],
"devices": [
"cgroup"
]
},
"/sys/kernel/config": {
"fs_type": "configfs",
"mount_options": [
"rw",
"relatime"
],
"devices": [
"configfs"
]
},
"/sys/fs/selinux": {
"fs_type": "selinuxfs",
"mount_options": [
"rw",
"relatime"
],
"devices": [
"selinuxfs"
]
},
"/sys/kernel/debug": {
"fs_type": "debugfs",
"mount_options": [
"rw",
"relatime",
"seclabel"
],
"devices": [
"debugfs"
]
},
"/dev/hugepages": {
"fs_type": "hugetlbfs",
"mount_options": [
"rw",
"relatime",
"seclabel"
],
"devices": [
"hugetlbfs"
]
},
"/dev/mqueue": {
"fs_type": "mqueue",
"mount_options": [
"rw",
"relatime",
"seclabel"
],
"devices": [
"mqueue"
]
},
"/proc/sys/fs/binfmt_misc": {
"fs_type": "binfmt_misc",
"mount_options": [
"rw",
"relatime"
],
"devices": [
"systemd-1",
"binfmt_misc"
]
},
"/sys/fs/fuse/connections": {
"fs_type": "fusectl",
"mount_options": [
"rw",
"relatime"
],
"devices": [
"fusectl"
]
},
"/run/user/1000/gvfs": {
"fs_type": "fuse.gvfsd-fuse",
"mount_options": [
"rw",
"nosuid",
"nodev",
"relatime",
"user_id=1000",
"group_id=1000"
],
"devices": [
"gvfsd-fuse"
]
},
"/var/lib/docker/devicemapper": {
"fs_type": "ext4",
"mount_options": [
"rw",
"relatime",
"seclabel",
"data=ordered"
],
"uuid": "d34cf5e3-3449-4a6c-8179-a1feb2bca6ce",
"devices": [
"/dev/mapper/fedora_host--186-root"
]
},
{
"/run/docker/netns/1ce89fd79f3d": {
"fs_type": "nsfs",
"mount_options": [
"rw"
],
"devices": [
"nsfs"
]
},
"/sys/kernel/debug/tracing": {
"fs_type": "tracefs",
"mount_options": [
"rw",
"relatime"
],
"devices": [
"tracefs"
]
}
},
"by_pair": {
"devtmpfs,/dev": {
"device": "devtmpfs",
"kb_size": "8044124",
"kb_used": "0",
"kb_available": "8044124",
"percent_used": "0%",
"mount": "/dev",
"total_inodes": "2011031",
"inodes_used": "629",
"inodes_available": "2010402",
"inodes_percent_used": "1%",
"fs_type": "devtmpfs",
"mount_options": [
"rw",
"nosuid",
"seclabel",
"size=8044124k",
"nr_inodes=2011031",
"mode=755"
]
},
"tmpfs,/dev/shm": {
"device": "tmpfs",
"kb_size": "8055268",
"kb_used": "96036",
"kb_available": "7959232",
"percent_used": "2%",
"mount": "/dev/shm",
"total_inodes": "2013817",
"inodes_used": "217",
"inodes_available": "2013600",
"inodes_percent_used": "1%",
"fs_type": "tmpfs",
"mount_options": [
"rw",
"nosuid",
"nodev",
"seclabel"
]
},
"tmpfs,/run": {
"device": "tmpfs",
"kb_size": "8055268",
"kb_used": "2280",
"kb_available": "8052988",
"percent_used": "1%",
"mount": "/run",
"total_inodes": "2013817",
"inodes_used": "1070",
"inodes_available": "2012747",
"inodes_percent_used": "1%",
"fs_type": "tmpfs",
"mount_options": [
"rw",
"nosuid",
"nodev",
"seclabel",
"mode=755"
]
},
"tmpfs,/sys/fs/cgroup": {
"device": "tmpfs",
"kb_size": "8055268",
"kb_used": "0",
"kb_available": "8055268",
"percent_used": "0%",
"mount": "/sys/fs/cgroup",
"total_inodes": "2013817",
"inodes_used": "16",
"inodes_available": "2013801",
"inodes_percent_used": "1%",
"fs_type": "tmpfs",
"mount_options": [
"ro",
"nosuid",
"nodev",
"noexec",
"seclabel",
"mode=755"
]
},
"/dev/mapper/fedora_host--186-root,/": {
"device": "/dev/mapper/fedora_host--186-root",
"kb_size": "51475068",
"kb_used": "42551284",
"kb_available": "6285960",
"percent_used": "88%",
"mount": "/",
"total_inodes": "3276800",
"inodes_used": "532908",
"inodes_available": "2743892",
"inodes_percent_used": "17%",
"fs_type": "ext4",
"mount_options": [
"rw",
"relatime",
"seclabel",
"data=ordered"
],
"uuid": "d34cf5e3-3449-4a6c-8179-a1feb2bca6ce"
},
"tmpfs,/tmp": {
"device": "tmpfs",
"kb_size": "8055268",
"kb_used": "848396",
"kb_available": "7206872",
"percent_used": "11%",
"mount": "/tmp",
"total_inodes": "2013817",
"inodes_used": "1353",
"inodes_available": "2012464",
"inodes_percent_used": "1%",
"fs_type": "tmpfs",
"mount_options": [
"rw",
"nosuid",
"nodev",
"seclabel"
]
},
"/dev/sda1,/boot": {
"device": "/dev/sda1",
"kb_size": "487652",
"kb_used": "126628",
"kb_available": "331328",
"percent_used": "28%",
"mount": "/boot",
"total_inodes": "128016",
"inodes_used": "405",
"inodes_available": "127611",
"inodes_percent_used": "1%",
"fs_type": "ext4",
"mount_options": [
"rw",
"relatime",
"seclabel",
"data=ordered"
],
"uuid": "32caaec3-ef40-4691-a3b6-438c3f9bc1c0"
},
"/dev/mapper/fedora_host--186-home,/home": {
"device": "/dev/mapper/fedora_host--186-home",
"kb_size": "185948124",
"kb_used": "105904724",
"kb_available": "70574680",
"percent_used": "61%",
"mount": "/home",
"total_inodes": "11821056",
"inodes_used": "1266687",
"inodes_available": "10554369",
"inodes_percent_used": "11%",
"fs_type": "ext4",
"mount_options": [
"rw",
"relatime",
"seclabel",
"data=ordered"
],
"uuid": "2d3e4853-fa69-4ccf-8a6a-77b05ab0a42d"
},
"/dev/loop0,/var/lib/machines": {
"device": "/dev/loop0",
"kb_size": "512000",
"kb_used": "16672",
"kb_available": "429056",
"percent_used": "4%",
"mount": "/var/lib/machines",
"fs_type": "btrfs",
"uuid": "0f031512-ab15-497d-9abd-3a512b4a9390"
},
"tmpfs,/run/user/0": {
"device": "tmpfs",
"kb_size": "1611052",
"kb_used": "0",
"kb_available": "1611052",
"percent_used": "0%",
"mount": "/run/user/0",
"total_inodes": "2013817",
"inodes_used": "7",
"inodes_available": "2013810",
"inodes_percent_used": "1%",
"fs_type": "tmpfs",
"mount_options": [
"rw",
"nosuid",
"nodev",
"relatime",
"seclabel",
"size=1611052k",
"mode=700"
]
},
"tmpfs,/run/user/1000": {
"device": "tmpfs",
"kb_size": "1611052",
"kb_used": "72",
"kb_available": "1610980",
"percent_used": "1%",
"mount": "/run/user/1000",
"total_inodes": "2013817",
"inodes_used": "36",
"inodes_available": "2013781",
"inodes_percent_used": "1%",
"fs_type": "tmpfs",
"mount_options": [
"rw",
"nosuid",
"nodev",
"relatime",
"seclabel",
"size=1611052k",
"mode=700",
"uid=1000",
"gid=1000"
]
},
"sysfs,/sys": {
"device": "sysfs",
"mount": "/sys",
"fs_type": "sysfs",
"mount_options": [
"rw",
"nosuid",
"nodev",
"noexec",
"relatime",
"seclabel"
]
},
"proc,/proc": {
"device": "proc",
"mount": "/proc",
"fs_type": "proc",
"mount_options": [
"rw",
"nosuid",
"nodev",
"noexec",
"relatime"
]
},
"securityfs,/sys/kernel/security": {
"device": "securityfs",
"mount": "/sys/kernel/security",
"fs_type": "securityfs",
"mount_options": [
"rw",
"nosuid",
"nodev",
"noexec",
"relatime"
]
},
"devpts,/dev/pts": {
"device": "devpts",
"mount": "/dev/pts",
"fs_type": "devpts",
"mount_options": [
"rw",
"nosuid",
"noexec",
"relatime",
"seclabel",
"gid=5",
"mode=620",
"ptmxmode=000"
]
},
"cgroup,/sys/fs/cgroup/systemd": {
"device": "cgroup",
"mount": "/sys/fs/cgroup/systemd",
"fs_type": "cgroup",
"mount_options": [
"rw",
"nosuid",
"nodev",
"noexec",
"relatime",
"xattr",
"release_agent=/usr/lib/systemd/systemd-cgroups-agent",
"name=systemd"
]
},
"pstore,/sys/fs/pstore": {
"device": "pstore",
"mount": "/sys/fs/pstore",
"fs_type": "pstore",
"mount_options": [
"rw",
"nosuid",
"nodev",
"noexec",
"relatime",
"seclabel"
]
},
"cgroup,/sys/fs/cgroup/devices": {
"device": "cgroup",
"mount": "/sys/fs/cgroup/devices",
"fs_type": "cgroup",
"mount_options": [
"rw",
"nosuid",
"nodev",
"noexec",
"relatime",
"devices"
]
},
"cgroup,/sys/fs/cgroup/cpuset": {
"device": "cgroup",
"mount": "/sys/fs/cgroup/cpuset",
"fs_type": "cgroup",
"mount_options": [
"rw",
"nosuid",
"nodev",
"noexec",
"relatime",
"cpuset"
]
},
"cgroup,/sys/fs/cgroup/perf_event": {
"device": "cgroup",
"mount": "/sys/fs/cgroup/perf_event",
"fs_type": "cgroup",
"mount_options": [
"rw",
"nosuid",
"nodev",
"noexec",
"relatime",
"perf_event"
]
},
"cgroup,/sys/fs/cgroup/hugetlb": {
"device": "cgroup",
"mount": "/sys/fs/cgroup/hugetlb",
"fs_type": "cgroup",
"mount_options": [
"rw",
"nosuid",
"nodev",
"noexec",
"relatime",
"hugetlb"
]
},
"cgroup,/sys/fs/cgroup/cpu,cpuacct": {
"device": "cgroup",
"mount": "/sys/fs/cgroup/cpu,cpuacct",
"fs_type": "cgroup",
"mount_options": [
"rw",
"nosuid",
"nodev",
"noexec",
"relatime",
"cpu",
"cpuacct"
]
},
"cgroup,/sys/fs/cgroup/blkio": {
"device": "cgroup",
"mount": "/sys/fs/cgroup/blkio",
"fs_type": "cgroup",
"mount_options": [
"rw",
"nosuid",
"nodev",
"noexec",
"relatime",
"blkio"
]
},
"cgroup,/sys/fs/cgroup/freezer": {
"device": "cgroup",
"mount": "/sys/fs/cgroup/freezer",
"fs_type": "cgroup",
"mount_options": [
"rw",
"nosuid",
"nodev",
"noexec",
"relatime",
"freezer"
]
},
"cgroup,/sys/fs/cgroup/memory": {
"device": "cgroup",
"mount": "/sys/fs/cgroup/memory",
"fs_type": "cgroup",
"mount_options": [
"rw",
"nosuid",
"nodev",
"noexec",
"relatime",
"memory"
]
},
"cgroup,/sys/fs/cgroup/pids": {
"device": "cgroup",
"mount": "/sys/fs/cgroup/pids",
"fs_type": "cgroup",
"mount_options": [
"rw",
"nosuid",
"nodev",
"noexec",
"relatime",
"pids"
]
},
"cgroup,/sys/fs/cgroup/net_cls,net_prio": {
"device": "cgroup",
"mount": "/sys/fs/cgroup/net_cls,net_prio",
"fs_type": "cgroup",
"mount_options": [
"rw",
"nosuid",
"nodev",
"noexec",
"relatime",
"net_cls",
"net_prio"
]
},
"configfs,/sys/kernel/config": {
"device": "configfs",
"mount": "/sys/kernel/config",
"fs_type": "configfs",
"mount_options": [
"rw",
"relatime"
]
},
"selinuxfs,/sys/fs/selinux": {
"device": "selinuxfs",
"mount": "/sys/fs/selinux",
"fs_type": "selinuxfs",
"mount_options": [
"rw",
"relatime"
]
},
"debugfs,/sys/kernel/debug": {
"device": "debugfs",
"mount": "/sys/kernel/debug",
"fs_type": "debugfs",
"mount_options": [
"rw",
"relatime",
"seclabel"
]
},
"hugetlbfs,/dev/hugepages": {
"device": "hugetlbfs",
"mount": "/dev/hugepages",
"fs_type": "hugetlbfs",
"mount_options": [
"rw",
"relatime",
"seclabel"
]
},
"mqueue,/dev/mqueue": {
"device": "mqueue",
"mount": "/dev/mqueue",
"fs_type": "mqueue",
"mount_options": [
"rw",
"relatime",
"seclabel"
]
},
"systemd-1,/proc/sys/fs/binfmt_misc": {
"device": "systemd-1",
"mount": "/proc/sys/fs/binfmt_misc",
"fs_type": "autofs",
"mount_options": [
"rw",
"relatime",
"fd=40",
"pgrp=1",
"timeout=0",
"minproto=5",
"maxproto=5",
"direct",
"pipe_ino=17610"
]
},
"/var/lib/machines.raw,/var/lib/machines": {
"device": "/var/lib/machines.raw",
"mount": "/var/lib/machines",
"fs_type": "btrfs",
"mount_options": [
"rw",
"relatime",
"seclabel",
"space_cache",
"subvolid=5",
"subvol=/"
]
},
"fusectl,/sys/fs/fuse/connections": {
"device": "fusectl",
"mount": "/sys/fs/fuse/connections",
"fs_type": "fusectl",
"mount_options": [
"rw",
"relatime"
]
},
"gvfsd-fuse,/run/user/1000/gvfs": {
"device": "gvfsd-fuse",
"mount": "/run/user/1000/gvfs",
"fs_type": "fuse.gvfsd-fuse",
"mount_options": [
"rw",
"nosuid",
"nodev",
"relatime",
"user_id=1000",
"group_id=1000"
]
},
"/dev/mapper/fedora_host--186-root,/var/lib/docker/devicemapper": {
"device": "/dev/mapper/fedora_host--186-root",
"mount": "/var/lib/docker/devicemapper",
"fs_type": "ext4",
"mount_options": [
"rw",
"relatime",
"seclabel",
"data=ordered"
],
"uuid": "d34cf5e3-3449-4a6c-8179-a1feb2bca6ce"
},
"binfmt_misc,/proc/sys/fs/binfmt_misc": {
"device": "binfmt_misc",
"mount": "/proc/sys/fs/binfmt_misc",
"fs_type": "binfmt_misc",
"mount_options": [
"rw",
"relatime"
]
},
"/dev/mapper/docker-253:1-1180487-0868fce108cd2524a4823aad8d665cca018ead39550ca088c440ab05deec13f8,/var/lib/docker/devicemapper/mnt/0868fce108cd2524a4823aad8d665cca018ead39550ca088c440ab05deec13f8": {
"device": "/dev/mapper/docker-253:1-1180487-0868fce108cd2524a4823aad8d665cca018ead39550ca088c440ab05deec13f8",
"mount": "/var/lib/docker/devicemapper/mnt/0868fce108cd2524a4823aad8d665cca018ead39550ca088c440ab05deec13f8",
"fs_type": "xfs",
"mount_options": [
"rw",
"relatime",
"context=\"system_u:object_r:container_file_t:s0:c523",
"c681\"",
"nouuid",
"attr2",
"inode64",
"logbsize=64k",
"sunit=128",
"swidth=128",
"noquota"
],
"uuid": "00e2aa25-20d8-4ad7-b3a5-c501f2f4c123"
},
"shm,/var/lib/docker/containers/426e513ed508a451e3f70440eed040761f81529e4bc4240e7522d331f3f3bc12/shm": {
"device": "shm",
"mount": "/var/lib/docker/containers/426e513ed508a451e3f70440eed040761f81529e4bc4240e7522d331f3f3bc12/shm",
"fs_type": "tmpfs",
"mount_options": [
"rw",
"nosuid",
"nodev",
"noexec",
"relatime",
"context=\"system_u:object_r:container_file_t:s0:c523",
"c681\"",
"size=65536k"
]
},
"nsfs,/run/docker/netns/1ce89fd79f3d": {
"device": "nsfs",
"mount": "/run/docker/netns/1ce89fd79f3d",
"fs_type": "nsfs",
"mount_options": [
"rw"
]
},
"tracefs,/sys/kernel/debug/tracing": {
"device": "tracefs",
"mount": "/sys/kernel/debug/tracing",
"fs_type": "tracefs",
"mount_options": [
"rw",
"relatime"
]
},
"/dev/loop1,": {
"device": "/dev/loop1",
"fs_type": "xfs",
"uuid": "00e2aa25-20d8-4ad7-b3a5-c501f2f4c123"
},
"/dev/mapper/docker-253:1-1180487-pool,": {
"device": "/dev/mapper/docker-253:1-1180487-pool"
},
"/dev/sr0,": {
"device": "/dev/sr0"
},
"/dev/loop2,": {
"device": "/dev/loop2"
},
"/dev/sda,": {
"device": "/dev/sda"
},
"/dev/sda2,": {
"device": "/dev/sda2",
"fs_type": "LVM2_member",
"uuid": "66Ojcd-ULtu-1cZa-Tywo-mx0d-RF4O-ysA9jK"
},
"/dev/mapper/fedora_host--186-swap,": {
"device": "/dev/mapper/fedora_host--186-swap",
"fs_type": "swap",
"uuid": "eae6059d-2fbe-4d1c-920d-a80bbeb1ac6d"
}
}
},
"virtualization": {
"systems": {
"kvm": "host"
},
"system": "kvm",
"role": "host",
"libvirt_version": "2.2.0",
"uri": "qemu:///system",
"capabilities": {
},
"nodeinfo": {
"cores": 4,
"cpus": 8,
"memory": 16110540,
"mhz": 2832,
"model": "x86_64",
"nodes": 1,
"sockets": 1,
"threads": 2
},
"domains": {
},
"networks": {
"vagrant-libvirt": {
"bridge_name": "virbr1",
"uuid": "877ddb27-b39c-427e-a7bf-1aa829389eeb"
},
"default": {
"bridge_name": "virbr0",
"uuid": "750d2567-23a8-470d-8a2b-71cd651e30d1"
}
},
"storage": {
"virt-images": {
"autostart": true,
"uuid": "d8a189fa-f98c-462f-9ea4-204eb77a96a1",
"allocation": 106412863488,
"available": 83998015488,
"capacity": 190410878976,
"state": 2,
"volumes": {
"rhel-atomic-host-standard-2014-7-1.qcow2": {
"key": "/home/some_user/virt-images/rhel-atomic-host-standard-2014-7-1.qcow2",
"name": "rhel-atomic-host-standard-2014-7-1.qcow2",
"path": "/home/some_user/virt-images/rhel-atomic-host-standard-2014-7-1.qcow2",
"allocation": 1087115264,
"capacity": 8589934592,
"type": 0
},
"atomic-beta-instance-7.qcow2": {
"key": "/home/some_user/virt-images/atomic-beta-instance-7.qcow2",
"name": "atomic-beta-instance-7.qcow2",
"path": "/home/some_user/virt-images/atomic-beta-instance-7.qcow2",
"allocation": 200704,
"capacity": 8589934592,
"type": 0
},
"os1-atomic-meta-data": {
"key": "/home/some_user/virt-images/os1-atomic-meta-data",
"name": "os1-atomic-meta-data",
"path": "/home/some_user/virt-images/os1-atomic-meta-data",
"allocation": 4096,
"capacity": 49,
"type": 0
},
"atomic-user-data": {
"key": "/home/some_user/virt-images/atomic-user-data",
"name": "atomic-user-data",
"path": "/home/some_user/virt-images/atomic-user-data",
"allocation": 4096,
"capacity": 512,
"type": 0
},
"qemu-snap.txt": {
"key": "/home/some_user/virt-images/qemu-snap.txt",
"name": "qemu-snap.txt",
"path": "/home/some_user/virt-images/qemu-snap.txt",
"allocation": 4096,
"capacity": 111,
"type": 0
},
"atomic-beta-instance-5.qcow2": {
"key": "/home/some_user/virt-images/atomic-beta-instance-5.qcow2",
"name": "atomic-beta-instance-5.qcow2",
"path": "/home/some_user/virt-images/atomic-beta-instance-5.qcow2",
"allocation": 339091456,
"capacity": 8589934592,
"type": 0
},
"meta-data": {
"key": "/home/some_user/virt-images/meta-data",
"name": "meta-data",
"path": "/home/some_user/virt-images/meta-data",
"allocation": 4096,
"capacity": 49,
"type": 0
},
"atomic-beta-instance-8.qcow2": {
"key": "/home/some_user/virt-images/atomic-beta-instance-8.qcow2",
"name": "atomic-beta-instance-8.qcow2",
"path": "/home/some_user/virt-images/atomic-beta-instance-8.qcow2",
"allocation": 322576384,
"capacity": 8589934592,
"type": 0
},
"user-data": {
"key": "/home/some_user/virt-images/user-data",
"name": "user-data",
"path": "/home/some_user/virt-images/user-data",
"allocation": 4096,
"capacity": 512,
"type": 0
},
"rhel-6-2015-10-16.qcow2": {
"key": "/home/some_user/virt-images/rhel-6-2015-10-16.qcow2",
"name": "rhel-6-2015-10-16.qcow2",
"path": "/home/some_user/virt-images/rhel-6-2015-10-16.qcow2",
"allocation": 7209422848,
"capacity": 17179869184,
"type": 0
},
"atomic_demo_notes.txt": {
"key": "/home/some_user/virt-images/atomic_demo_notes.txt",
"name": "atomic_demo_notes.txt",
"path": "/home/some_user/virt-images/atomic_demo_notes.txt",
"allocation": 4096,
"capacity": 354,
"type": 0
},
"packer-windows-2012-R2-standard": {
"key": "/home/some_user/virt-images/packer-windows-2012-R2-standard",
"name": "packer-windows-2012-R2-standard",
"path": "/home/some_user/virt-images/packer-windows-2012-R2-standard",
"allocation": 16761495552,
"capacity": 64424509440,
"type": 0
},
"atomic3-cidata.iso": {
"key": "/home/some_user/virt-images/atomic3-cidata.iso",
"name": "atomic3-cidata.iso",
"path": "/home/some_user/virt-images/atomic3-cidata.iso",
"allocation": 376832,
"capacity": 374784,
"type": 0
},
".atomic_demo_notes.txt.swp": {
"key": "/home/some_user/virt-images/.atomic_demo_notes.txt.swp",
"name": ".atomic_demo_notes.txt.swp",
"path": "/home/some_user/virt-images/.atomic_demo_notes.txt.swp",
"allocation": 12288,
"capacity": 12288,
"type": 0
},
"rhel7-2015-10-13.qcow2": {
"key": "/home/some_user/virt-images/rhel7-2015-10-13.qcow2",
"name": "rhel7-2015-10-13.qcow2",
"path": "/home/some_user/virt-images/rhel7-2015-10-13.qcow2",
"allocation": 4679413760,
"capacity": 12884901888,
"type": 0
}
}
},
"default": {
"autostart": true,
"uuid": "c8d9d160-efc0-4207-81c2-e79d6628f7e1",
"allocation": 43745488896,
"available": 8964980736,
"capacity": 52710469632,
"state": 2,
"volumes": {
"s3than-VAGRANTSLASH-trusty64_vagrant_box_image_0.0.1.img": {
"key": "/var/lib/libvirt/images/s3than-VAGRANTSLASH-trusty64_vagrant_box_image_0.0.1.img",
"name": "s3than-VAGRANTSLASH-trusty64_vagrant_box_image_0.0.1.img",
"path": "/var/lib/libvirt/images/s3than-VAGRANTSLASH-trusty64_vagrant_box_image_0.0.1.img",
"allocation": 1258622976,
"capacity": 42949672960,
"type": 0
},
"centos-7.0_vagrant_box_image.img": {
"key": "/var/lib/libvirt/images/centos-7.0_vagrant_box_image.img",
"name": "centos-7.0_vagrant_box_image.img",
"path": "/var/lib/libvirt/images/centos-7.0_vagrant_box_image.img",
"allocation": 1649414144,
"capacity": 42949672960,
"type": 0
},
"baremettle-VAGRANTSLASH-centos-5.10_vagrant_box_image_1.0.0.img": {
"key": "/var/lib/libvirt/images/baremettle-VAGRANTSLASH-centos-5.10_vagrant_box_image_1.0.0.img",
"name": "baremettle-VAGRANTSLASH-centos-5.10_vagrant_box_image_1.0.0.img",
"path": "/var/lib/libvirt/images/baremettle-VAGRANTSLASH-centos-5.10_vagrant_box_image_1.0.0.img",
"allocation": 810422272,
"capacity": 42949672960,
"type": 0
},
"centos-6_vagrant_box_image.img": {
"key": "/var/lib/libvirt/images/centos-6_vagrant_box_image.img",
"name": "centos-6_vagrant_box_image.img",
"path": "/var/lib/libvirt/images/centos-6_vagrant_box_image.img",
"allocation": 1423642624,
"capacity": 42949672960,
"type": 0
},
"centos5-ansible_default.img": {
"key": "/var/lib/libvirt/images/centos5-ansible_default.img",
"name": "centos5-ansible_default.img",
"path": "/var/lib/libvirt/images/centos5-ansible_default.img",
"allocation": 8986624,
"capacity": 42949672960,
"type": 0
},
"ubuntu_default.img": {
"key": "/var/lib/libvirt/images/ubuntu_default.img",
"name": "ubuntu_default.img",
"path": "/var/lib/libvirt/images/ubuntu_default.img",
"allocation": 3446833152,
"capacity": 42949672960,
"type": 0
}
}
},
"boot-scratch": {
"autostart": true,
"uuid": "e5ef4360-b889-4843-84fb-366e8fb30f20",
"allocation": 43745488896,
"available": 8964980736,
"capacity": 52710469632,
"state": 2,
"volumes": {
}
}
}
},
"network": {
"interfaces": {
"lo": {
"mtu": "65536",
"flags": [
"LOOPBACK",
"UP",
"LOWER_UP"
],
"encapsulation": "Loopback",
"addresses": {
"127.0.0.1": {
"family": "inet",
"prefixlen": "8",
"netmask": "255.0.0.0",
"scope": "Node",
"ip_scope": "LOOPBACK"
},
"::1": {
"family": "inet6",
"prefixlen": "128",
"scope": "Node",
"tags": [
],
"ip_scope": "LINK LOCAL LOOPBACK"
}
},
"state": "unknown"
},
"em1": {
"type": "em",
"number": "1",
"mtu": "1500",
"flags": [
"BROADCAST",
"MULTICAST",
"UP"
],
"encapsulation": "Ethernet",
"addresses": {
"3C:97:0E:E9:28:8E": {
"family": "lladdr"
}
},
"state": "down",
"link_speed": 0,
"duplex": "Unknown! (255)",
"port": "Twisted Pair",
"transceiver": "internal",
"auto_negotiation": "on",
"mdi_x": "Unknown (auto)",
"ring_params": {
"max_rx": 4096,
"max_rx_mini": 0,
"max_rx_jumbo": 0,
"max_tx": 4096,
"current_rx": 256,
"current_rx_mini": 0,
"current_rx_jumbo": 0,
"current_tx": 256
}
},
"wlp4s0": {
"type": "wlp4s",
"number": "0",
"mtu": "1500",
"flags": [
"BROADCAST",
"MULTICAST",
"UP",
"LOWER_UP"
],
"encapsulation": "Ethernet",
"addresses": {
"5C:51:4F:E6:A8:E3": {
"family": "lladdr"
},
"192.168.1.19": {
"family": "inet",
"prefixlen": "24",
"netmask": "255.255.255.0",
"broadcast": "192.168.1.255",
"scope": "Global",
"ip_scope": "RFC1918 PRIVATE"
},
"fe80::5e51:4fff:fee6:a8e3": {
"family": "inet6",
"prefixlen": "64",
"scope": "Link",
"tags": [
],
"ip_scope": "LINK LOCAL UNICAST"
}
},
"state": "up",
"arp": {
"192.168.1.33": "00:11:d9:39:3e:e0",
"192.168.1.20": "ac:3a:7a:a7:49:e8",
"192.168.1.17": "00:09:b0:d0:64:19",
"192.168.1.22": "ac:bc:32:82:30:bb",
"192.168.1.15": "00:11:32:2e:10:d5",
"192.168.1.1": "84:1b:5e:03:50:b2",
"192.168.1.34": "00:11:d9:5f:e8:e6",
"192.168.1.16": "dc:a5:f4:ac:22:3a",
"192.168.1.21": "74:c2:46:73:28:d8",
"192.168.1.27": "00:17:88:09:3c:bb",
"192.168.1.24": "08:62:66:90:a2:b8"
},
"routes": [
{
"destination": "default",
"family": "inet",
"via": "192.168.1.1",
"metric": "600",
"proto": "static"
},
{
"destination": "66.187.232.64",
"family": "inet",
"via": "192.168.1.1",
"metric": "600",
"proto": "static"
},
{
"destination": "192.168.1.0/24",
"family": "inet",
"scope": "link",
"metric": "600",
"proto": "kernel",
"src": "192.168.1.19"
},
{
"destination": "192.168.1.1",
"family": "inet",
"scope": "link",
"metric": "600",
"proto": "static"
},
{
"destination": "fe80::/64",
"family": "inet6",
"metric": "256",
"proto": "kernel"
}
],
"ring_params": {
"max_rx": 0,
"max_rx_mini": 0,
"max_rx_jumbo": 0,
"max_tx": 0,
"current_rx": 0,
"current_rx_mini": 0,
"current_rx_jumbo": 0,
"current_tx": 0
}
},
"virbr1": {
"type": "virbr",
"number": "1",
"mtu": "1500",
"flags": [
"BROADCAST",
"MULTICAST",
"UP"
],
"encapsulation": "Ethernet",
"addresses": {
"52:54:00:B4:68:A9": {
"family": "lladdr"
},
"192.168.121.1": {
"family": "inet",
"prefixlen": "24",
"netmask": "255.255.255.0",
"broadcast": "192.168.121.255",
"scope": "Global",
"ip_scope": "RFC1918 PRIVATE"
}
},
"state": "1",
"routes": [
{
"destination": "192.168.121.0/24",
"family": "inet",
"scope": "link",
"proto": "kernel",
"src": "192.168.121.1"
}
],
"ring_params": {
}
},
"virbr1-nic": {
"type": "virbr",
"number": "1-nic",
"mtu": "1500",
"flags": [
"BROADCAST",
"MULTICAST"
],
"encapsulation": "Ethernet",
"addresses": {
"52:54:00:B4:68:A9": {
"family": "lladdr"
}
},
"state": "disabled",
"link_speed": 10,
"duplex": "Full",
"port": "Twisted Pair",
"transceiver": "internal",
"auto_negotiation": "off",
"mdi_x": "Unknown",
"ring_params": {
}
},
"virbr0": {
"type": "virbr",
"number": "0",
"mtu": "1500",
"flags": [
"BROADCAST",
"MULTICAST",
"UP"
],
"encapsulation": "Ethernet",
"addresses": {
"52:54:00:CE:82:5E": {
"family": "lladdr"
},
"192.168.137.1": {
"family": "inet",
"prefixlen": "24",
"netmask": "255.255.255.0",
"broadcast": "192.168.137.255",
"scope": "Global",
"ip_scope": "RFC1918 PRIVATE"
}
},
"state": "1",
"routes": [
{
"destination": "192.168.137.0/24",
"family": "inet",
"scope": "link",
"proto": "kernel",
"src": "192.168.137.1"
}
],
"ring_params": {
}
},
"virbr0-nic": {
"type": "virbr",
"number": "0-nic",
"mtu": "1500",
"flags": [
"BROADCAST",
"MULTICAST"
],
"encapsulation": "Ethernet",
"addresses": {
"52:54:00:CE:82:5E": {
"family": "lladdr"
}
},
"state": "disabled",
"link_speed": 10,
"duplex": "Full",
"port": "Twisted Pair",
"transceiver": "internal",
"auto_negotiation": "off",
"mdi_x": "Unknown",
"ring_params": {
}
},
"docker0": {
"type": "docker",
"number": "0",
"mtu": "1500",
"flags": [
"BROADCAST",
"MULTICAST",
"UP",
"LOWER_UP"
],
"encapsulation": "Ethernet",
"addresses": {
"02:42:EA:15:D8:84": {
"family": "lladdr"
},
"172.17.0.1": {
"family": "inet",
"prefixlen": "16",
"netmask": "255.255.0.0",
"scope": "Global",
"ip_scope": "RFC1918 PRIVATE"
},
"fe80::42:eaff:fe15:d884": {
"family": "inet6",
"prefixlen": "64",
"scope": "Link",
"tags": [
],
"ip_scope": "LINK LOCAL UNICAST"
}
},
"state": "0",
"arp": {
"172.17.0.2": "02:42:ac:11:00:02",
"172.17.0.4": "02:42:ac:11:00:04",
"172.17.0.3": "02:42:ac:11:00:03"
},
"routes": [
{
"destination": "172.17.0.0/16",
"family": "inet",
"scope": "link",
"proto": "kernel",
"src": "172.17.0.1"
},
{
"destination": "fe80::/64",
"family": "inet6",
"metric": "256",
"proto": "kernel"
}
],
"ring_params": {
}
},
"vethf20ff12": {
"type": "vethf20ff1",
"number": "2",
"mtu": "1500",
"flags": [
"BROADCAST",
"MULTICAST",
"UP",
"LOWER_UP"
],
"encapsulation": "Ethernet",
"addresses": {
"AE:6E:2B:1E:A1:31": {
"family": "lladdr"
},
"fe80::ac6e:2bff:fe1e:a131": {
"family": "inet6",
"prefixlen": "64",
"scope": "Link",
"tags": [
],
"ip_scope": "LINK LOCAL UNICAST"
}
},
"state": "forwarding",
"routes": [
{
"destination": "fe80::/64",
"family": "inet6",
"metric": "256",
"proto": "kernel"
}
],
"link_speed": 10000,
"duplex": "Full",
"port": "Twisted Pair",
"transceiver": "internal",
"auto_negotiation": "off",
"mdi_x": "Unknown",
"ring_params": {
}
},
"tun0": {
"type": "tun",
"number": "0",
"mtu": "1360",
"flags": [
"MULTICAST",
"NOARP",
"UP",
"LOWER_UP"
],
"addresses": {
"10.10.120.68": {
"family": "inet",
"prefixlen": "21",
"netmask": "255.255.248.0",
"broadcast": "10.10.127.255",
"scope": "Global",
"ip_scope": "RFC1918 PRIVATE"
},
"fe80::365e:885c:31ca:7670": {
"family": "inet6",
"prefixlen": "64",
"scope": "Link",
"tags": [
"flags",
"800"
],
"ip_scope": "LINK LOCAL UNICAST"
}
},
"state": "unknown",
"routes": [
{
"destination": "10.0.0.0/8",
"family": "inet",
"via": "10.10.120.1",
"metric": "50",
"proto": "static"
},
{
"destination": "10.10.120.0/21",
"family": "inet",
"scope": "link",
"metric": "50",
"proto": "kernel",
"src": "10.10.120.68"
},
{
"destination": "fe80::/64",
"family": "inet6",
"metric": "256",
"proto": "kernel"
}
]
}
},
"default_interface": "wlp4s0",
"default_gateway": "192.168.1.1"
},
"counters": {
"network": {
"interfaces": {
"lo": {
"tx": {
"queuelen": "1",
"bytes": "202568405",
"packets": "1845473",
"errors": "0",
"drop": "0",
"carrier": "0",
"collisions": "0"
},
"rx": {
"bytes": "202568405",
"packets": "1845473",
"errors": "0",
"drop": "0",
"overrun": "0"
}
},
"em1": {
"tx": {
"queuelen": "1000",
"bytes": "673898037",
"packets": "1631282",
"errors": "0",
"drop": "0",
"carrier": "0",
"collisions": "0"
},
"rx": {
"bytes": "1536186718",
"packets": "1994394",
"errors": "0",
"drop": "0",
"overrun": "0"
}
},
"wlp4s0": {
"tx": {
"queuelen": "1000",
"bytes": "3927670539",
"packets": "15146886",
"errors": "0",
"drop": "0",
"carrier": "0",
"collisions": "0"
},
"rx": {
"bytes": "12367173401",
"packets": "23981258",
"errors": "0",
"drop": "0",
"overrun": "0"
}
},
"virbr1": {
"tx": {
"queuelen": "1000",
"bytes": "0",
"packets": "0",
"errors": "0",
"drop": "0",
"carrier": "0",
"collisions": "0"
},
"rx": {
"bytes": "0",
"packets": "0",
"errors": "0",
"drop": "0",
"overrun": "0"
}
},
"virbr1-nic": {
"tx": {
"queuelen": "1000",
"bytes": "0",
"packets": "0",
"errors": "0",
"drop": "0",
"carrier": "0",
"collisions": "0"
},
"rx": {
"bytes": "0",
"packets": "0",
"errors": "0",
"drop": "0",
"overrun": "0"
}
},
"virbr0": {
"tx": {
"queuelen": "1000",
"bytes": "0",
"packets": "0",
"errors": "0",
"drop": "0",
"carrier": "0",
"collisions": "0"
},
"rx": {
"bytes": "0",
"packets": "0",
"errors": "0",
"drop": "0",
"overrun": "0"
}
},
"virbr0-nic": {
"tx": {
"queuelen": "1000",
"bytes": "0",
"packets": "0",
"errors": "0",
"drop": "0",
"carrier": "0",
"collisions": "0"
},
"rx": {
"bytes": "0",
"packets": "0",
"errors": "0",
"drop": "0",
"overrun": "0"
}
},
"docker0": {
"rx": {
"bytes": "2471313",
"packets": "36915",
"errors": "0",
"drop": "0",
"overrun": "0"
},
"tx": {
"bytes": "413371670",
"packets": "127713",
"errors": "0",
"drop": "0",
"carrier": "0",
"collisions": "0"
}
},
"vethf20ff12": {
"rx": {
"bytes": "34391",
"packets": "450",
"errors": "0",
"drop": "0",
"overrun": "0"
},
"tx": {
"bytes": "17919115",
"packets": "108069",
"errors": "0",
"drop": "0",
"carrier": "0",
"collisions": "0"
}
},
"tun0": {
"tx": {
"queuelen": "100",
"bytes": "22343462",
"packets": "253442",
"errors": "0",
"drop": "0",
"carrier": "0",
"collisions": "0"
},
"rx": {
"bytes": "115160002",
"packets": "197529",
"errors": "0",
"drop": "0",
"overrun": "0"
}
}
}
}
},
"ipaddress": "192.168.1.19",
"macaddress": "5C:51:4F:E6:A8:E3",
"ip6address": "fe80::42:eaff:fe15:d884",
"cpu": {
"0": {
"vendor_id": "GenuineIntel",
"family": "6",
"model": "60",
"model_name": "Intel(R) Core(TM) i7-4800MQ CPU @ 2.70GHz",
"stepping": "3",
"mhz": "3238.714",
"cache_size": "6144 KB",
"physical_id": "0",
"core_id": "0",
"cores": "4",
"flags": [
"fpu",
"vme",
"de",
"pse",
"tsc",
"msr",
"pae",
"mce",
"cx8",
"apic",
"sep",
"mtrr",
"pge",
"mca",
"cmov",
"pat",
"pse36",
"clflush",
"dts",
"acpi",
"mmx",
"fxsr",
"sse",
"sse2",
"ss",
"ht",
"tm",
"pbe",
"syscall",
"nx",
"pdpe1gb",
"rdtscp",
"lm",
"constant_tsc",
"arch_perfmon",
"pebs",
"bts",
"rep_good",
"nopl",
"xtopology",
"nonstop_tsc",
"aperfmperf",
"eagerfpu",
"pni",
"pclmulqdq",
"dtes64",
"monitor",
"ds_cpl",
"vmx",
"smx",
"est",
"tm2",
"ssse3",
"sdbg",
"fma",
"cx16",
"xtpr",
"pdcm",
"pcid",
"sse4_1",
"sse4_2",
"x2apic",
"movbe",
"popcnt",
"tsc_deadline_timer",
"aes",
"xsave",
"avx",
"f16c",
"rdrand",
"lahf_lm",
"abm",
"epb",
"tpr_shadow",
"vnmi",
"flexpriority",
"ept",
"vpid",
"fsgsbase",
"tsc_adjust",
"bmi1",
"avx2",
"smep",
"bmi2",
"erms",
"invpcid",
"xsaveopt",
"dtherm",
"ida",
"arat",
"pln",
"pts"
]
},
"1": {
"vendor_id": "GenuineIntel",
"family": "6",
"model": "60",
"model_name": "Intel(R) Core(TM) i7-4800MQ CPU @ 2.70GHz",
"stepping": "3",
"mhz": "3137.200",
"cache_size": "6144 KB",
"physical_id": "0",
"core_id": "0",
"cores": "4",
"flags": [
"fpu",
"vme",
"de",
"pse",
"tsc",
"msr",
"pae",
"mce",
"cx8",
"apic",
"sep",
"mtrr",
"pge",
"mca",
"cmov",
"pat",
"pse36",
"clflush",
"dts",
"acpi",
"mmx",
"fxsr",
"sse",
"sse2",
"ss",
"ht",
"tm",
"pbe",
"syscall",
"nx",
"pdpe1gb",
"rdtscp",
"lm",
"constant_tsc",
"arch_perfmon",
"pebs",
"bts",
"rep_good",
"nopl",
"xtopology",
"nonstop_tsc",
"aperfmperf",
"eagerfpu",
"pni",
"pclmulqdq",
"dtes64",
"monitor",
"ds_cpl",
"vmx",
"smx",
"est",
"tm2",
"ssse3",
"sdbg",
"fma",
"cx16",
"xtpr",
"pdcm",
"pcid",
"sse4_1",
"sse4_2",
"x2apic",
"movbe",
"popcnt",
"tsc_deadline_timer",
"aes",
"xsave",
"avx",
"f16c",
"rdrand",
"lahf_lm",
"abm",
"epb",
"tpr_shadow",
"vnmi",
"flexpriority",
"ept",
"vpid",
"fsgsbase",
"tsc_adjust",
"bmi1",
"avx2",
"smep",
"bmi2",
"erms",
"invpcid",
"xsaveopt",
"dtherm",
"ida",
"arat",
"pln",
"pts"
]
},
"2": {
"vendor_id": "GenuineIntel",
"family": "6",
"model": "60",
"model_name": "Intel(R) Core(TM) i7-4800MQ CPU @ 2.70GHz",
"stepping": "3",
"mhz": "3077.050",
"cache_size": "6144 KB",
"physical_id": "0",
"core_id": "1",
"cores": "4",
"flags": [
"fpu",
"vme",
"de",
"pse",
"tsc",
"msr",
"pae",
"mce",
"cx8",
"apic",
"sep",
"mtrr",
"pge",
"mca",
"cmov",
"pat",
"pse36",
"clflush",
"dts",
"acpi",
"mmx",
"fxsr",
"sse",
"sse2",
"ss",
"ht",
"tm",
"pbe",
"syscall",
"nx",
"pdpe1gb",
"rdtscp",
"lm",
"constant_tsc",
"arch_perfmon",
"pebs",
"bts",
"rep_good",
"nopl",
"xtopology",
"nonstop_tsc",
"aperfmperf",
"eagerfpu",
"pni",
"pclmulqdq",
"dtes64",
"monitor",
"ds_cpl",
"vmx",
"smx",
"est",
"tm2",
"ssse3",
"sdbg",
"fma",
"cx16",
"xtpr",
"pdcm",
"pcid",
"sse4_1",
"sse4_2",
"x2apic",
"movbe",
"popcnt",
"tsc_deadline_timer",
"aes",
"xsave",
"avx",
"f16c",
"rdrand",
"lahf_lm",
"abm",
"epb",
"tpr_shadow",
"vnmi",
"flexpriority",
"ept",
"vpid",
"fsgsbase",
"tsc_adjust",
"bmi1",
"avx2",
"smep",
"bmi2",
"erms",
"invpcid",
"xsaveopt",
"dtherm",
"ida",
"arat",
"pln",
"pts"
]
},
"3": {
"vendor_id": "GenuineIntel",
"family": "6",
"model": "60",
"model_name": "Intel(R) Core(TM) i7-4800MQ CPU @ 2.70GHz",
"stepping": "3",
"mhz": "2759.655",
"cache_size": "6144 KB",
"physical_id": "0",
"core_id": "1",
"cores": "4",
"flags": [
"fpu",
"vme",
"de",
"pse",
"tsc",
"msr",
"pae",
"mce",
"cx8",
"apic",
"sep",
"mtrr",
"pge",
"mca",
"cmov",
"pat",
"pse36",
"clflush",
"dts",
"acpi",
"mmx",
"fxsr",
"sse",
"sse2",
"ss",
"ht",
"tm",
"pbe",
"syscall",
"nx",
"pdpe1gb",
"rdtscp",
"lm",
"constant_tsc",
"arch_perfmon",
"pebs",
"bts",
"rep_good",
"nopl",
"xtopology",
"nonstop_tsc",
"aperfmperf",
"eagerfpu",
"pni",
"pclmulqdq",
"dtes64",
"monitor",
"ds_cpl",
"vmx",
"smx",
"est",
"tm2",
"ssse3",
"sdbg",
"fma",
"cx16",
"xtpr",
"pdcm",
"pcid",
"sse4_1",
"sse4_2",
"x2apic",
"movbe",
"popcnt",
"tsc_deadline_timer",
"aes",
"xsave",
"avx",
"f16c",
"rdrand",
"lahf_lm",
"abm",
"epb",
"tpr_shadow",
"vnmi",
"flexpriority",
"ept",
"vpid",
"fsgsbase",
"tsc_adjust",
"bmi1",
"avx2",
"smep",
"bmi2",
"erms",
"invpcid",
"xsaveopt",
"dtherm",
"ida",
"arat",
"pln",
"pts"
]
},
"4": {
"vendor_id": "GenuineIntel",
"family": "6",
"model": "60",
"model_name": "Intel(R) Core(TM) i7-4800MQ CPU @ 2.70GHz",
"stepping": "3",
"mhz": "3419.000",
"cache_size": "6144 KB",
"physical_id": "0",
"core_id": "2",
"cores": "4",
"flags": [
"fpu",
"vme",
"de",
"pse",
"tsc",
"msr",
"pae",
"mce",
"cx8",
"apic",
"sep",
"mtrr",
"pge",
"mca",
"cmov",
"pat",
"pse36",
"clflush",
"dts",
"acpi",
"mmx",
"fxsr",
"sse",
"sse2",
"ss",
"ht",
"tm",
"pbe",
"syscall",
"nx",
"pdpe1gb",
"rdtscp",
"lm",
"constant_tsc",
"arch_perfmon",
"pebs",
"bts",
"rep_good",
"nopl",
"xtopology",
"nonstop_tsc",
"aperfmperf",
"eagerfpu",
"pni",
"pclmulqdq",
"dtes64",
"monitor",
"ds_cpl",
"vmx",
"smx",
"est",
"tm2",
"ssse3",
"sdbg",
"fma",
"cx16",
"xtpr",
"pdcm",
"pcid",
"sse4_1",
"sse4_2",
"x2apic",
"movbe",
"popcnt",
"tsc_deadline_timer",
"aes",
"xsave",
"avx",
"f16c",
"rdrand",
"lahf_lm",
"abm",
"epb",
"tpr_shadow",
"vnmi",
"flexpriority",
"ept",
"vpid",
"fsgsbase",
"tsc_adjust",
"bmi1",
"avx2",
"smep",
"bmi2",
"erms",
"invpcid",
"xsaveopt",
"dtherm",
"ida",
"arat",
"pln",
"pts"
]
},
"5": {
"vendor_id": "GenuineIntel",
"family": "6",
"model": "60",
"model_name": "Intel(R) Core(TM) i7-4800MQ CPU @ 2.70GHz",
"stepping": "3",
"mhz": "2752.569",
"cache_size": "6144 KB",
"physical_id": "0",
"core_id": "2",
"cores": "4",
"flags": [
"fpu",
"vme",
"de",
"pse",
"tsc",
"msr",
"pae",
"mce",
"cx8",
"apic",
"sep",
"mtrr",
"pge",
"mca",
"cmov",
"pat",
"pse36",
"clflush",
"dts",
"acpi",
"mmx",
"fxsr",
"sse",
"sse2",
"ss",
"ht",
"tm",
"pbe",
"syscall",
"nx",
"pdpe1gb",
"rdtscp",
"lm",
"constant_tsc",
"arch_perfmon",
"pebs",
"bts",
"rep_good",
"nopl",
"xtopology",
"nonstop_tsc",
"aperfmperf",
"eagerfpu",
"pni",
"pclmulqdq",
"dtes64",
"monitor",
"ds_cpl",
"vmx",
"smx",
"est",
"tm2",
"ssse3",
"sdbg",
"fma",
"cx16",
"xtpr",
"pdcm",
"pcid",
"sse4_1",
"sse4_2",
"x2apic",
"movbe",
"popcnt",
"tsc_deadline_timer",
"aes",
"xsave",
"avx",
"f16c",
"rdrand",
"lahf_lm",
"abm",
"epb",
"tpr_shadow",
"vnmi",
"flexpriority",
"ept",
"vpid",
"fsgsbase",
"tsc_adjust",
"bmi1",
"avx2",
"smep",
"bmi2",
"erms",
"invpcid",
"xsaveopt",
"dtherm",
"ida",
"arat",
"pln",
"pts"
]
},
"6": {
"vendor_id": "GenuineIntel",
"family": "6",
"model": "60",
"model_name": "Intel(R) Core(TM) i7-4800MQ CPU @ 2.70GHz",
"stepping": "3",
"mhz": "2953.619",
"cache_size": "6144 KB",
"physical_id": "0",
"core_id": "3",
"cores": "4",
"flags": [
"fpu",
"vme",
"de",
"pse",
"tsc",
"msr",
"pae",
"mce",
"cx8",
"apic",
"sep",
"mtrr",
"pge",
"mca",
"cmov",
"pat",
"pse36",
"clflush",
"dts",
"acpi",
"mmx",
"fxsr",
"sse",
"sse2",
"ss",
"ht",
"tm",
"pbe",
"syscall",
"nx",
"pdpe1gb",
"rdtscp",
"lm",
"constant_tsc",
"arch_perfmon",
"pebs",
"bts",
"rep_good",
"nopl",
"xtopology",
"nonstop_tsc",
"aperfmperf",
"eagerfpu",
"pni",
"pclmulqdq",
"dtes64",
"monitor",
"ds_cpl",
"vmx",
"smx",
"est",
"tm2",
"ssse3",
"sdbg",
"fma",
"cx16",
"xtpr",
"pdcm",
"pcid",
"sse4_1",
"sse4_2",
"x2apic",
"movbe",
"popcnt",
"tsc_deadline_timer",
"aes",
"xsave",
"avx",
"f16c",
"rdrand",
"lahf_lm",
"abm",
"epb",
"tpr_shadow",
"vnmi",
"flexpriority",
"ept",
"vpid",
"fsgsbase",
"tsc_adjust",
"bmi1",
"avx2",
"smep",
"bmi2",
"erms",
"invpcid",
"xsaveopt",
"dtherm",
"ida",
"arat",
"pln",
"pts"
]
},
"7": {
"vendor_id": "GenuineIntel",
"family": "6",
"model": "60",
"model_name": "Intel(R) Core(TM) i7-4800MQ CPU @ 2.70GHz",
"stepping": "3",
"mhz": "2927.087",
"cache_size": "6144 KB",
"physical_id": "0",
"core_id": "3",
"cores": "4",
"flags": [
"fpu",
"vme",
"de",
"pse",
"tsc",
"msr",
"pae",
"mce",
"cx8",
"apic",
"sep",
"mtrr",
"pge",
"mca",
"cmov",
"pat",
"pse36",
"clflush",
"dts",
"acpi",
"mmx",
"fxsr",
"sse",
"sse2",
"ss",
"ht",
"tm",
"pbe",
"syscall",
"nx",
"pdpe1gb",
"rdtscp",
"lm",
"constant_tsc",
"arch_perfmon",
"pebs",
"bts",
"rep_good",
"nopl",
"xtopology",
"nonstop_tsc",
"aperfmperf",
"eagerfpu",
"pni",
"pclmulqdq",
"dtes64",
"monitor",
"ds_cpl",
"vmx",
"smx",
"est",
"tm2",
"ssse3",
"sdbg",
"fma",
"cx16",
"xtpr",
"pdcm",
"pcid",
"sse4_1",
"sse4_2",
"x2apic",
"movbe",
"popcnt",
"tsc_deadline_timer",
"aes",
"xsave",
"avx",
"f16c",
"rdrand",
"lahf_lm",
"abm",
"epb",
"tpr_shadow",
"vnmi",
"flexpriority",
"ept",
"vpid",
"fsgsbase",
"tsc_adjust",
"bmi1",
"avx2",
"smep",
"bmi2",
"erms",
"invpcid",
"xsaveopt",
"dtherm",
"ida",
"arat",
"pln",
"pts"
]
},
"total": 8,
"real": 1,
"cores": 4
},
"etc": {
"passwd": {
"root": {
"dir": "/root",
"gid": 0,
"uid": 0,
"shell": "/bin/bash",
"gecos": "root"
},
"bin": {
"dir": "/bin",
"gid": 1,
"uid": 1,
"shell": "/sbin/nologin",
"gecos": "bin"
},
"daemon": {
"dir": "/sbin",
"gid": 2,
"uid": 2,
"shell": "/sbin/nologin",
"gecos": "daemon"
},
"adm": {
"dir": "/var/adm",
"gid": 4,
"uid": 3,
"shell": "/sbin/nologin",
"gecos": "adm"
},
"lp": {
"dir": "/var/spool/lpd",
"gid": 7,
"uid": 4,
"shell": "/sbin/nologin",
"gecos": "lp"
},
"sync": {
"dir": "/sbin",
"gid": 0,
"uid": 5,
"shell": "/bin/sync",
"gecos": "sync"
},
"shutdown": {
"dir": "/sbin",
"gid": 0,
"uid": 6,
"shell": "/sbin/shutdown",
"gecos": "shutdown"
},
"halt": {
"dir": "/sbin",
"gid": 0,
"uid": 7,
"shell": "/sbin/halt",
"gecos": "halt"
},
"mail": {
"dir": "/var/spool/mail",
"gid": 12,
"uid": 8,
"shell": "/sbin/nologin",
"gecos": "mail"
},
"operator": {
"dir": "/root",
"gid": 0,
"uid": 11,
"shell": "/sbin/nologin",
"gecos": "operator"
},
"games": {
"dir": "/usr/games",
"gid": 100,
"uid": 12,
"shell": "/sbin/nologin",
"gecos": "games"
},
"ftp": {
"dir": "/var/ftp",
"gid": 50,
"uid": 14,
"shell": "/sbin/nologin",
"gecos": "FTP User"
},
"nobody": {
"dir": "/",
"gid": 99,
"uid": 99,
"shell": "/sbin/nologin",
"gecos": "Nobody"
},
"avahi-autoipd": {
"dir": "/var/lib/avahi-autoipd",
"gid": 170,
"uid": 170,
"shell": "/sbin/nologin",
"gecos": "Avahi IPv4LL Stack"
},
"dbus": {
"dir": "/",
"gid": 81,
"uid": 81,
"shell": "/sbin/nologin",
"gecos": "System message bus"
},
"polkitd": {
"dir": "/",
"gid": 999,
"uid": 999,
"shell": "/sbin/nologin",
"gecos": "User for polkitd"
},
"abrt": {
"dir": "/etc/abrt",
"gid": 173,
"uid": 173,
"shell": "/sbin/nologin",
"gecos": ""
},
"usbmuxd": {
"dir": "/",
"gid": 113,
"uid": 113,
"shell": "/sbin/nologin",
"gecos": "usbmuxd user"
},
"colord": {
"dir": "/var/lib/colord",
"gid": 998,
"uid": 998,
"shell": "/sbin/nologin",
"gecos": "User for colord"
},
"geoclue": {
"dir": "/var/lib/geoclue",
"gid": 997,
"uid": 997,
"shell": "/sbin/nologin",
"gecos": "User for geoclue"
},
"rpc": {
"dir": "/var/lib/rpcbind",
"gid": 32,
"uid": 32,
"shell": "/sbin/nologin",
"gecos": "Rpcbind Daemon"
},
"rpcuser": {
"dir": "/var/lib/nfs",
"gid": 29,
"uid": 29,
"shell": "/sbin/nologin",
"gecos": "RPC Service User"
},
"nfsnobody": {
"dir": "/var/lib/nfs",
"gid": 65534,
"uid": 65534,
"shell": "/sbin/nologin",
"gecos": "Anonymous NFS User"
},
"qemu": {
"dir": "/",
"gid": 107,
"uid": 107,
"shell": "/sbin/nologin",
"gecos": "qemu user"
},
"rtkit": {
"dir": "/proc",
"gid": 172,
"uid": 172,
"shell": "/sbin/nologin",
"gecos": "RealtimeKit"
},
"radvd": {
"dir": "/",
"gid": 75,
"uid": 75,
"shell": "/sbin/nologin",
"gecos": "radvd user"
},
"tss": {
"dir": "/dev/null",
"gid": 59,
"uid": 59,
"shell": "/sbin/nologin",
"gecos": "Account used by the trousers package to sandbox the tcsd daemon"
},
"unbound": {
"dir": "/etc/unbound",
"gid": 995,
"uid": 996,
"shell": "/sbin/nologin",
"gecos": "Unbound DNS resolver"
},
"openvpn": {
"dir": "/etc/openvpn",
"gid": 994,
"uid": 995,
"shell": "/sbin/nologin",
"gecos": "OpenVPN"
},
"saslauth": {
"dir": "/run/saslauthd",
"gid": 76,
"uid": 994,
"shell": "/sbin/nologin",
"gecos": "\"Saslauthd user\""
},
"avahi": {
"dir": "/var/run/avahi-daemon",
"gid": 70,
"uid": 70,
"shell": "/sbin/nologin",
"gecos": "Avahi mDNS/DNS-SD Stack"
},
"pulse": {
"dir": "/var/run/pulse",
"gid": 992,
"uid": 993,
"shell": "/sbin/nologin",
"gecos": "PulseAudio System Daemon"
},
"gdm": {
"dir": "/var/lib/gdm",
"gid": 42,
"uid": 42,
"shell": "/sbin/nologin",
"gecos": ""
},
"gnome-initial-setup": {
"dir": "/run/gnome-initial-setup/",
"gid": 990,
"uid": 992,
"shell": "/sbin/nologin",
"gecos": ""
},
"nm-openconnect": {
"dir": "/",
"gid": 989,
"uid": 991,
"shell": "/sbin/nologin",
"gecos": "NetworkManager user for OpenConnect"
},
"sshd": {
"dir": "/var/empty/sshd",
"gid": 74,
"uid": 74,
"shell": "/sbin/nologin",
"gecos": "Privilege-separated SSH"
},
"chrony": {
"dir": "/var/lib/chrony",
"gid": 988,
"uid": 990,
"shell": "/sbin/nologin",
"gecos": ""
},
"tcpdump": {
"dir": "/",
"gid": 72,
"uid": 72,
"shell": "/sbin/nologin",
"gecos": ""
},
"some_user": {
"dir": "/home/some_user",
"gid": 1000,
"uid": 1000,
"shell": "/bin/bash",
"gecos": "some_user"
},
"systemd-journal-gateway": {
"dir": "/var/log/journal",
"gid": 191,
"uid": 191,
"shell": "/sbin/nologin",
"gecos": "Journal Gateway"
},
"postgres": {
"dir": "/var/lib/pgsql",
"gid": 26,
"uid": 26,
"shell": "/bin/bash",
"gecos": "PostgreSQL Server"
},
"dockerroot": {
"dir": "/var/lib/docker",
"gid": 977,
"uid": 984,
"shell": "/sbin/nologin",
"gecos": "Docker User"
},
"apache": {
"dir": "/usr/share/httpd",
"gid": 48,
"uid": 48,
"shell": "/sbin/nologin",
"gecos": "Apache"
},
"systemd-network": {
"dir": "/",
"gid": 974,
"uid": 982,
"shell": "/sbin/nologin",
"gecos": "systemd Network Management"
},
"systemd-resolve": {
"dir": "/",
"gid": 973,
"uid": 981,
"shell": "/sbin/nologin",
"gecos": "systemd Resolver"
},
"systemd-bus-proxy": {
"dir": "/",
"gid": 972,
"uid": 980,
"shell": "/sbin/nologin",
"gecos": "systemd Bus Proxy"
},
"systemd-journal-remote": {
"dir": "//var/log/journal/remote",
"gid": 970,
"uid": 979,
"shell": "/sbin/nologin",
"gecos": "Journal Remote"
},
"systemd-journal-upload": {
"dir": "//var/log/journal/upload",
"gid": 969,
"uid": 978,
"shell": "/sbin/nologin",
"gecos": "Journal Upload"
},
"setroubleshoot": {
"dir": "/var/lib/setroubleshoot",
"gid": 967,
"uid": 977,
"shell": "/sbin/nologin",
"gecos": ""
},
"oprofile": {
"dir": "/var/lib/oprofile",
"gid": 16,
"uid": 16,
"shell": "/sbin/nologin",
"gecos": "Special user account to be used by OProfile"
}
},
"group": {
"root": {
"gid": 0,
"members": [
]
},
"bin": {
"gid": 1,
"members": [
]
},
"daemon": {
"gid": 2,
"members": [
]
},
"sys": {
"gid": 3,
"members": [
]
},
"adm": {
"gid": 4,
"members": [
"logcheck"
]
},
"tty": {
"gid": 5,
"members": [
]
},
"disk": {
"gid": 6,
"members": [
]
},
"lp": {
"gid": 7,
"members": [
]
},
"mem": {
"gid": 8,
"members": [
]
},
"kmem": {
"gid": 9,
"members": [
]
},
"wheel": {
"gid": 10,
"members": [
]
},
"cdrom": {
"gid": 11,
"members": [
]
},
"mail": {
"gid": 12,
"members": [
]
},
"man": {
"gid": 15,
"members": [
]
},
"dialout": {
"gid": 18,
"members": [
"lirc"
]
},
"floppy": {
"gid": 19,
"members": [
]
},
"games": {
"gid": 20,
"members": [
]
},
"tape": {
"gid": 30,
"members": [
]
},
"video": {
"gid": 39,
"members": [
]
},
"ftp": {
"gid": 50,
"members": [
]
},
"lock": {
"gid": 54,
"members": [
"lirc"
]
},
"audio": {
"gid": 63,
"members": [
]
},
"nobody": {
"gid": 99,
"members": [
]
},
"users": {
"gid": 100,
"members": [
]
},
"utmp": {
"gid": 22,
"members": [
]
},
"utempter": {
"gid": 35,
"members": [
]
},
"avahi-autoipd": {
"gid": 170,
"members": [
]
},
"systemd-journal": {
"gid": 190,
"members": [
]
},
"dbus": {
"gid": 81,
"members": [
]
},
"polkitd": {
"gid": 999,
"members": [
]
},
"abrt": {
"gid": 173,
"members": [
]
},
"dip": {
"gid": 40,
"members": [
]
},
"usbmuxd": {
"gid": 113,
"members": [
]
},
"colord": {
"gid": 998,
"members": [
]
},
"geoclue": {
"gid": 997,
"members": [
]
},
"ssh_keys": {
"gid": 996,
"members": [
]
},
"rpc": {
"gid": 32,
"members": [
]
},
"rpcuser": {
"gid": 29,
"members": [
]
},
"nfsnobody": {
"gid": 65534,
"members": [
]
},
"kvm": {
"gid": 36,
"members": [
"qemu"
]
},
"qemu": {
"gid": 107,
"members": [
]
},
"rtkit": {
"gid": 172,
"members": [
]
},
"radvd": {
"gid": 75,
"members": [
]
},
"tss": {
"gid": 59,
"members": [
]
},
"unbound": {
"gid": 995,
"members": [
]
},
"openvpn": {
"gid": 994,
"members": [
]
},
"saslauth": {
"gid": 76,
"members": [
]
},
"avahi": {
"gid": 70,
"members": [
]
},
"brlapi": {
"gid": 993,
"members": [
]
},
"pulse": {
"gid": 992,
"members": [
]
},
"pulse-access": {
"gid": 991,
"members": [
]
},
"gdm": {
"gid": 42,
"members": [
]
},
"gnome-initial-setup": {
"gid": 990,
"members": [
]
},
"nm-openconnect": {
"gid": 989,
"members": [
]
},
"sshd": {
"gid": 74,
"members": [
]
},
"slocate": {
"gid": 21,
"members": [
]
},
"chrony": {
"gid": 988,
"members": [
]
},
"tcpdump": {
"gid": 72,
"members": [
]
},
"some_user": {
"gid": 1000,
"members": [
"some_user"
]
},
"docker": {
"gid": 986,
"members": [
"some_user"
]
}
},
"c": {
"gcc": {
"target": "x86_64-redhat-linux",
"configured_with": "../configure --enable-bootstrap --enable-languages=c,c++,objc,obj-c++,fortran,ada,go,lto --prefix=/usr --mandir=/usr/share/man --infodir=/usr/share/info --with-bugurl=http://bugzilla.redhat.com/bugzilla --enable-shared --enable-threads=posix --enable-checking=release --enable-multilib --with-system-zlib --enable-__cxa_atexit --disable-libunwind-exceptions --enable-gnu-unique-object --enable-linker-build-id --with-linker-hash-style=gnu --enable-plugin --enable-initfini-array --disable-libgcj --with-isl --enable-libmpx --enable-gnu-indirect-function --with-tune=generic --with-arch_32=i686 --build=x86_64-redhat-linux",
"thread_model": "posix",
"description": "gcc version 6.3.1 20161221 (Red Hat 6.3.1-1) (GCC) ",
"version": "6.3.1"
},
"glibc": {
"version": "2.24",
"description": "GNU C Library (GNU libc) stable release version 2.24, by Roland McGrath et al."
}
},
"lua": {
"version": "5.3.4"
},
"ruby": {
"platform": "x86_64-linux",
"version": "2.3.3",
"release_date": "2016-11-21",
"target": "x86_64-redhat-linux-gnu",
"target_cpu": "x86_64",
"target_vendor": "redhat",
"target_os": "linux",
"host": "x86_64-redhat-linux-gnu",
"host_cpu": "x86_64",
"host_os": "linux-gnu",
"host_vendor": "redhat",
"bin_dir": "/usr/bin",
"ruby_bin": "/usr/bin/ruby",
"gems_dir": "/home/some_user/.gem/ruby",
"gem_bin": "/usr/bin/gem"
}
},
"command": {
"ps": "ps -ef"
},
"root_group": "root",
"fips": {
"kernel": {
"enabled": false
}
},
"hostname": "myhostname",
"machinename": "myhostname",
"fqdn": "myhostname",
"domain": null,
"machine_id": "1234567abcede123456123456123456a",
"privateaddress": "192.168.1.100",
"keys": {
"ssh": {
}
},
"time": {
"timezone": "EDT"
},
"sessions": {
"by_session": {
"1918": {
"session": "1918",
"uid": "1000",
"user": "some_user",
"seat": null
},
"5": {
"session": "5",
"uid": "1000",
"user": "some_user",
"seat": "seat0"
},
"3": {
"session": "3",
"uid": "0",
"user": "root",
"seat": "seat0"
}
},
"by_user": {
"some_user": [
{
"session": "1918",
"uid": "1000",
"user": "some_user",
"seat": null
},
{
"session": "5",
"uid": "1000",
"user": "some_user",
"seat": "seat0"
}
],
"root": [
{
"session": "3",
"uid": "0",
"user": "root",
"seat": "seat0"
}
]
}
},
"hostnamectl": {
"static_hostname": "myhostname",
"icon_name": "computer-laptop",
"chassis": "laptop",
"machine_id": "24dc16bd7694404c825b517ab46d9d6b",
"machine_id": "12345123451234512345123451242323",
"boot_id": "3d5d5512341234123412341234123423",
"operating_system": "Fedora 25 (Workstation Edition)",
"cpe_os_name": "cpe",
"kernel": "Linux 4.9.14-200.fc25.x86_64",
"architecture": "x86-64"
},
"block_device": {
"dm-1": {
"size": "104857600",
"removable": "0",
"rotational": "0",
"physical_block_size": "512",
"logical_block_size": "512"
},
"loop1": {
"size": "209715200",
"removable": "0",
"rotational": "1",
"physical_block_size": "512",
"logical_block_size": "512"
},
"sr0": {
"size": "2097151",
"removable": "1",
"model": "DVD-RAM UJ8E2",
"rev": "SB01",
"state": "running",
"timeout": "30",
"vendor": "MATSHITA",
"queue_depth": "1",
"rotational": "1",
"physical_block_size": "512",
"logical_block_size": "512"
},
"dm-2": {
"size": "378093568",
"removable": "0",
"rotational": "0",
"physical_block_size": "512",
"logical_block_size": "512"
},
"loop2": {
"size": "4194304",
"removable": "0",
"rotational": "1",
"physical_block_size": "512",
"logical_block_size": "512"
},
"dm-0": {
"size": "16138240",
"removable": "0",
"rotational": "0",
"physical_block_size": "512",
"logical_block_size": "512"
},
"loop0": {
"size": "1024000",
"removable": "0",
"rotational": "1",
"physical_block_size": "512",
"logical_block_size": "512"
},
"sda": {
"size": "500118192",
"removable": "0",
"model": "SAMSUNG MZ7TD256",
"rev": "2L5Q",
"state": "running",
"timeout": "30",
"vendor": "ATA",
"queue_depth": "31",
"rotational": "0",
"physical_block_size": "512",
"logical_block_size": "512"
},
"dm-5": {
"size": "20971520",
"removable": "0",
"rotational": "1",
"physical_block_size": "512",
"logical_block_size": "512"
},
"dm-3": {
"size": "209715200",
"removable": "0",
"rotational": "1",
"physical_block_size": "512",
"logical_block_size": "512"
}
},
"sysconf": {
"LINK_MAX": 65000,
"_POSIX_LINK_MAX": 65000,
"MAX_CANON": 255,
"_POSIX_MAX_CANON": 255,
"MAX_INPUT": 255,
"_POSIX_MAX_INPUT": 255,
"NAME_MAX": 255,
"_POSIX_NAME_MAX": 255,
"PATH_MAX": 4096,
"_POSIX_PATH_MAX": 4096,
"PIPE_BUF": 4096,
"_POSIX_PIPE_BUF": 4096,
"SOCK_MAXBUF": null,
"_POSIX_ASYNC_IO": null,
"_POSIX_CHOWN_RESTRICTED": 1,
"_POSIX_NO_TRUNC": 1,
"_POSIX_PRIO_IO": null,
"_POSIX_SYNC_IO": null,
"_POSIX_VDISABLE": 0,
"ARG_MAX": 2097152,
"ATEXIT_MAX": 2147483647,
"CHAR_BIT": 8,
"CHAR_MAX": 127,
"CHAR_MIN": -128,
"CHILD_MAX": 62844,
"CLK_TCK": 100,
"INT_MAX": 2147483647,
"INT_MIN": -2147483648,
"IOV_MAX": 1024,
"LOGNAME_MAX": 256,
"LONG_BIT": 64,
"MB_LEN_MAX": 16,
"NGROUPS_MAX": 65536,
"NL_ARGMAX": 4096,
"NL_LANGMAX": 2048,
"NL_MSGMAX": 2147483647,
"NL_NMAX": 2147483647,
"NL_SETMAX": 2147483647,
"NL_TEXTMAX": 2147483647,
"NSS_BUFLEN_GROUP": 1024,
"NSS_BUFLEN_PASSWD": 1024,
"NZERO": 20,
"OPEN_MAX": 1024,
"PAGESIZE": 4096,
"PAGE_SIZE": 4096,
"PASS_MAX": 8192,
"PTHREAD_DESTRUCTOR_ITERATIONS": 4,
"PTHREAD_KEYS_MAX": 1024,
"PTHREAD_STACK_MIN": 16384,
"PTHREAD_THREADS_MAX": null,
"SCHAR_MAX": 127,
"SCHAR_MIN": -128,
"SHRT_MAX": 32767,
"SHRT_MIN": -32768,
"SSIZE_MAX": 32767,
"TTY_NAME_MAX": 32,
"TZNAME_MAX": 6,
"UCHAR_MAX": 255,
"UINT_MAX": 4294967295,
"UIO_MAXIOV": 1024,
"ULONG_MAX": 18446744073709551615,
"USHRT_MAX": 65535,
"WORD_BIT": 32,
"_AVPHYS_PAGES": 955772,
"_NPROCESSORS_CONF": 8,
"_NPROCESSORS_ONLN": 8,
"_PHYS_PAGES": 4027635,
"_POSIX_ARG_MAX": 2097152,
"_POSIX_ASYNCHRONOUS_IO": 200809,
"_POSIX_CHILD_MAX": 62844,
"_POSIX_FSYNC": 200809,
"_POSIX_JOB_CONTROL": 1,
"_POSIX_MAPPED_FILES": 200809,
"_POSIX_MEMLOCK": 200809,
"_POSIX_MEMLOCK_RANGE": 200809,
"_POSIX_MEMORY_PROTECTION": 200809,
"_POSIX_MESSAGE_PASSING": 200809,
"_POSIX_NGROUPS_MAX": 65536,
"_POSIX_OPEN_MAX": 1024,
"_POSIX_PII": null,
"_POSIX_PII_INTERNET": null,
"_POSIX_PII_INTERNET_DGRAM": null,
"_POSIX_PII_INTERNET_STREAM": null,
"_POSIX_PII_OSI": null,
"_POSIX_PII_OSI_CLTS": null,
"_POSIX_PII_OSI_COTS": null,
"_POSIX_PII_OSI_M": null,
"_POSIX_PII_SOCKET": null,
"_POSIX_PII_XTI": null,
"_POSIX_POLL": null,
"_POSIX_PRIORITIZED_IO": 200809,
"_POSIX_PRIORITY_SCHEDULING": 200809,
"_POSIX_REALTIME_SIGNALS": 200809,
"_POSIX_SAVED_IDS": 1,
"_POSIX_SELECT": null,
"_POSIX_SEMAPHORES": 200809,
"_POSIX_SHARED_MEMORY_OBJECTS": 200809,
"_POSIX_SSIZE_MAX": 32767,
"_POSIX_STREAM_MAX": 16,
"_POSIX_SYNCHRONIZED_IO": 200809,
"_POSIX_THREADS": 200809,
"_POSIX_THREAD_ATTR_STACKADDR": 200809,
"_POSIX_THREAD_ATTR_STACKSIZE": 200809,
"_POSIX_THREAD_PRIORITY_SCHEDULING": 200809,
"_POSIX_THREAD_PRIO_INHERIT": 200809,
"_POSIX_THREAD_PRIO_PROTECT": 200809,
"_POSIX_THREAD_ROBUST_PRIO_INHERIT": null,
"_POSIX_THREAD_ROBUST_PRIO_PROTECT": null,
"_POSIX_THREAD_PROCESS_SHARED": 200809,
"_POSIX_THREAD_SAFE_FUNCTIONS": 200809,
"_POSIX_TIMERS": 200809,
"TIMER_MAX": null,
"_POSIX_TZNAME_MAX": 6,
"_POSIX_VERSION": 200809,
"_T_IOV_MAX": null,
"_XOPEN_CRYPT": 1,
"_XOPEN_ENH_I18N": 1,
"_XOPEN_LEGACY": 1,
"_XOPEN_REALTIME": 1,
"_XOPEN_REALTIME_THREADS": 1,
"_XOPEN_SHM": 1,
"_XOPEN_UNIX": 1,
"_XOPEN_VERSION": 700,
"_XOPEN_XCU_VERSION": 4,
"_XOPEN_XPG2": 1,
"_XOPEN_XPG3": 1,
"_XOPEN_XPG4": 1,
"BC_BASE_MAX": 99,
"BC_DIM_MAX": 2048,
"BC_SCALE_MAX": 99,
"BC_STRING_MAX": 1000,
"CHARCLASS_NAME_MAX": 2048,
"COLL_WEIGHTS_MAX": 255,
"EQUIV_CLASS_MAX": null,
"EXPR_NEST_MAX": 32,
"LINE_MAX": 2048,
"POSIX2_BC_BASE_MAX": 99,
"POSIX2_BC_DIM_MAX": 2048,
"POSIX2_BC_SCALE_MAX": 99,
"POSIX2_BC_STRING_MAX": 1000,
"POSIX2_CHAR_TERM": 200809,
"POSIX2_COLL_WEIGHTS_MAX": 255,
"POSIX2_C_BIND": 200809,
"POSIX2_C_DEV": 200809,
"POSIX2_C_VERSION": 200809,
"POSIX2_EXPR_NEST_MAX": 32,
"POSIX2_FORT_DEV": null,
"POSIX2_FORT_RUN": null,
"_POSIX2_LINE_MAX": 2048,
"POSIX2_LINE_MAX": 2048,
"POSIX2_LOCALEDEF": 200809,
"POSIX2_RE_DUP_MAX": 32767,
"POSIX2_SW_DEV": 200809,
"POSIX2_UPE": null,
"POSIX2_VERSION": 200809,
"RE_DUP_MAX": 32767,
"PATH": "/usr/bin",
"CS_PATH": "/usr/bin",
"LFS_CFLAGS": null,
"LFS_LDFLAGS": null,
"LFS_LIBS": null,
"LFS_LINTFLAGS": null,
"LFS64_CFLAGS": "-D_LARGEFILE64_SOURCE",
"LFS64_LDFLAGS": null,
"LFS64_LIBS": null,
"LFS64_LINTFLAGS": "-D_LARGEFILE64_SOURCE",
"_XBS5_WIDTH_RESTRICTED_ENVS": "XBS5_LP64_OFF64",
"XBS5_WIDTH_RESTRICTED_ENVS": "XBS5_LP64_OFF64",
"_XBS5_ILP32_OFF32": null,
"XBS5_ILP32_OFF32_CFLAGS": null,
"XBS5_ILP32_OFF32_LDFLAGS": null,
"XBS5_ILP32_OFF32_LIBS": null,
"XBS5_ILP32_OFF32_LINTFLAGS": null,
"_XBS5_ILP32_OFFBIG": null,
"XBS5_ILP32_OFFBIG_CFLAGS": null,
"XBS5_ILP32_OFFBIG_LDFLAGS": null,
"XBS5_ILP32_OFFBIG_LIBS": null,
"XBS5_ILP32_OFFBIG_LINTFLAGS": null,
"_XBS5_LP64_OFF64": 1,
"XBS5_LP64_OFF64_CFLAGS": "-m64",
"XBS5_LP64_OFF64_LDFLAGS": "-m64",
"XBS5_LP64_OFF64_LIBS": null,
"XBS5_LP64_OFF64_LINTFLAGS": null,
"_XBS5_LPBIG_OFFBIG": null,
"XBS5_LPBIG_OFFBIG_CFLAGS": null,
"XBS5_LPBIG_OFFBIG_LDFLAGS": null,
"XBS5_LPBIG_OFFBIG_LIBS": null,
"XBS5_LPBIG_OFFBIG_LINTFLAGS": null,
"_POSIX_V6_ILP32_OFF32": null,
"POSIX_V6_ILP32_OFF32_CFLAGS": null,
"POSIX_V6_ILP32_OFF32_LDFLAGS": null,
"POSIX_V6_ILP32_OFF32_LIBS": null,
"POSIX_V6_ILP32_OFF32_LINTFLAGS": null,
"_POSIX_V6_WIDTH_RESTRICTED_ENVS": "POSIX_V6_LP64_OFF64",
"POSIX_V6_WIDTH_RESTRICTED_ENVS": "POSIX_V6_LP64_OFF64",
"_POSIX_V6_ILP32_OFFBIG": null,
"POSIX_V6_ILP32_OFFBIG_CFLAGS": null,
"POSIX_V6_ILP32_OFFBIG_LDFLAGS": null,
"POSIX_V6_ILP32_OFFBIG_LIBS": null,
"POSIX_V6_ILP32_OFFBIG_LINTFLAGS": null,
"_POSIX_V6_LP64_OFF64": 1,
"POSIX_V6_LP64_OFF64_CFLAGS": "-m64",
"POSIX_V6_LP64_OFF64_LDFLAGS": "-m64",
"POSIX_V6_LP64_OFF64_LIBS": null,
"POSIX_V6_LP64_OFF64_LINTFLAGS": null,
"_POSIX_V6_LPBIG_OFFBIG": null,
"POSIX_V6_LPBIG_OFFBIG_CFLAGS": null,
"POSIX_V6_LPBIG_OFFBIG_LDFLAGS": null,
"POSIX_V6_LPBIG_OFFBIG_LIBS": null,
"POSIX_V6_LPBIG_OFFBIG_LINTFLAGS": null,
"_POSIX_V7_ILP32_OFF32": null,
"POSIX_V7_ILP32_OFF32_CFLAGS": null,
"POSIX_V7_ILP32_OFF32_LDFLAGS": null,
"POSIX_V7_ILP32_OFF32_LIBS": null,
"POSIX_V7_ILP32_OFF32_LINTFLAGS": null,
"_POSIX_V7_WIDTH_RESTRICTED_ENVS": "POSIX_V7_LP64_OFF64",
"POSIX_V7_WIDTH_RESTRICTED_ENVS": "POSIX_V7_LP64_OFF64",
"_POSIX_V7_ILP32_OFFBIG": null,
"POSIX_V7_ILP32_OFFBIG_CFLAGS": null,
"POSIX_V7_ILP32_OFFBIG_LDFLAGS": null,
"POSIX_V7_ILP32_OFFBIG_LIBS": null,
"POSIX_V7_ILP32_OFFBIG_LINTFLAGS": null,
"_POSIX_V7_LP64_OFF64": 1,
"POSIX_V7_LP64_OFF64_CFLAGS": "-m64",
"POSIX_V7_LP64_OFF64_LDFLAGS": "-m64",
"POSIX_V7_LP64_OFF64_LIBS": null,
"POSIX_V7_LP64_OFF64_LINTFLAGS": null,
"_POSIX_V7_LPBIG_OFFBIG": null,
"POSIX_V7_LPBIG_OFFBIG_CFLAGS": null,
"POSIX_V7_LPBIG_OFFBIG_LDFLAGS": null,
"POSIX_V7_LPBIG_OFFBIG_LIBS": null,
"POSIX_V7_LPBIG_OFFBIG_LINTFLAGS": null,
"_POSIX_ADVISORY_INFO": 200809,
"_POSIX_BARRIERS": 200809,
"_POSIX_BASE": null,
"_POSIX_C_LANG_SUPPORT": null,
"_POSIX_C_LANG_SUPPORT_R": null,
"_POSIX_CLOCK_SELECTION": 200809,
"_POSIX_CPUTIME": 200809,
"_POSIX_THREAD_CPUTIME": 200809,
"_POSIX_DEVICE_SPECIFIC": null,
"_POSIX_DEVICE_SPECIFIC_R": null,
"_POSIX_FD_MGMT": null,
"_POSIX_FIFO": null,
"_POSIX_PIPE": null,
"_POSIX_FILE_ATTRIBUTES": null,
"_POSIX_FILE_LOCKING": null,
"_POSIX_FILE_SYSTEM": null,
"_POSIX_MONOTONIC_CLOCK": 200809,
"_POSIX_MULTI_PROCESS": null,
"_POSIX_SINGLE_PROCESS": null,
"_POSIX_NETWORKING": null,
"_POSIX_READER_WRITER_LOCKS": 200809,
"_POSIX_SPIN_LOCKS": 200809,
"_POSIX_REGEXP": 1,
"_REGEX_VERSION": null,
"_POSIX_SHELL": 1,
"_POSIX_SIGNALS": null,
"_POSIX_SPAWN": 200809,
"_POSIX_SPORADIC_SERVER": null,
"_POSIX_THREAD_SPORADIC_SERVER": null,
"_POSIX_SYSTEM_DATABASE": null,
"_POSIX_SYSTEM_DATABASE_R": null,
"_POSIX_TIMEOUTS": 200809,
"_POSIX_TYPED_MEMORY_OBJECTS": null,
"_POSIX_USER_GROUPS": null,
"_POSIX_USER_GROUPS_R": null,
"POSIX2_PBS": null,
"POSIX2_PBS_ACCOUNTING": null,
"POSIX2_PBS_LOCATE": null,
"POSIX2_PBS_TRACK": null,
"POSIX2_PBS_MESSAGE": null,
"SYMLOOP_MAX": null,
"STREAM_MAX": 16,
"AIO_LISTIO_MAX": null,
"AIO_MAX": null,
"AIO_PRIO_DELTA_MAX": 20,
"DELAYTIMER_MAX": 2147483647,
"HOST_NAME_MAX": 64,
"LOGIN_NAME_MAX": 256,
"MQ_OPEN_MAX": null,
"MQ_PRIO_MAX": 32768,
"_POSIX_DEVICE_IO": null,
"_POSIX_TRACE": null,
"_POSIX_TRACE_EVENT_FILTER": null,
"_POSIX_TRACE_INHERIT": null,
"_POSIX_TRACE_LOG": null,
"RTSIG_MAX": 32,
"SEM_NSEMS_MAX": null,
"SEM_VALUE_MAX": 2147483647,
"SIGQUEUE_MAX": 62844,
"FILESIZEBITS": 64,
"POSIX_ALLOC_SIZE_MIN": 4096,
"POSIX_REC_INCR_XFER_SIZE": null,
"POSIX_REC_MAX_XFER_SIZE": null,
"POSIX_REC_MIN_XFER_SIZE": 4096,
"POSIX_REC_XFER_ALIGN": 4096,
"SYMLINK_MAX": null,
"GNU_LIBC_VERSION": "glibc 2.24",
"GNU_LIBPTHREAD_VERSION": "NPTL 2.24",
"POSIX2_SYMLINKS": 1,
"LEVEL1_ICACHE_SIZE": 32768,
"LEVEL1_ICACHE_ASSOC": 8,
"LEVEL1_ICACHE_LINESIZE": 64,
"LEVEL1_DCACHE_SIZE": 32768,
"LEVEL1_DCACHE_ASSOC": 8,
"LEVEL1_DCACHE_LINESIZE": 64,
"LEVEL2_CACHE_SIZE": 262144,
"LEVEL2_CACHE_ASSOC": 8,
"LEVEL2_CACHE_LINESIZE": 64,
"LEVEL3_CACHE_SIZE": 6291456,
"LEVEL3_CACHE_ASSOC": 12,
"LEVEL3_CACHE_LINESIZE": 64,
"LEVEL4_CACHE_SIZE": 0,
"LEVEL4_CACHE_ASSOC": 0,
"LEVEL4_CACHE_LINESIZE": 0,
"IPV6": 200809,
"RAW_SOCKETS": 200809,
"_POSIX_IPV6": 200809,
"_POSIX_RAW_SOCKETS": 200809
},
"init_package": "systemd",
"shells": [
"/bin/sh",
"/bin/bash",
"/sbin/nologin",
"/usr/bin/sh",
"/usr/bin/bash",
"/usr/sbin/nologin",
"/usr/bin/zsh",
"/bin/zsh"
],
"ohai_time": 1492535225.41052,
"cloud_v2": null,
"cloud": null
}
''' # noqa
class TestOhaiCollector(BaseFactsTest):
__test__ = True
gather_subset = ['!all', 'ohai']
valid_subsets = ['ohai']
fact_namespace = 'ansible_ohai'
collector_class = OhaiFactCollector
def _mock_module(self):
mock_module = Mock()
mock_module.params = {'gather_subset': self.gather_subset,
'gather_timeout': 10,
'filter': '*'}
mock_module.get_bin_path = Mock(return_value='/not/actually/ohai')
mock_module.run_command = Mock(return_value=(0, ohai_json_output, ''))
return mock_module
@patch('ansible.module_utils.facts.other.ohai.OhaiFactCollector.get_ohai_output')
def test_bogus_json(self, mock_get_ohai_output):
module = self._mock_module()
# bogus json
mock_get_ohai_output.return_value = '{'
fact_collector = self.collector_class()
facts_dict = fact_collector.collect(module=module)
self.assertIsInstance(facts_dict, dict)
self.assertEqual(facts_dict, {})
@patch('ansible.module_utils.facts.other.ohai.OhaiFactCollector.run_ohai')
def test_ohai_non_zero_return_code(self, mock_run_ohai):
module = self._mock_module()
# bogus json
mock_run_ohai.return_value = (1, '{}', '')
fact_collector = self.collector_class()
facts_dict = fact_collector.collect(module=module)
self.assertIsInstance(facts_dict, dict)
# This assumes no 'ohai' entry at all is correct
self.assertNotIn('ohai', facts_dict)
self.assertEqual(facts_dict, {})
|
luzpaz/QGIS
|
refs/heads/master
|
python/plugins/processing/core/outputs.py
|
15
|
# -*- coding: utf-8 -*-
"""
***************************************************************************
Output.py
---------------------
Date : August 2012
Copyright : (C) 2012 by Victor Olaya
Email : volayaf at gmail dot com
***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************
"""
__author__ = 'Victor Olaya'
__date__ = 'August 2012'
__copyright__ = '(C) 2012, Victor Olaya'
import sys
from qgis.core import (QgsExpressionContext,
QgsExpressionContextUtils,
QgsExpression,
QgsExpressionContextScope,
QgsProject,
QgsSettings,
QgsVectorFileWriter,
QgsProcessingUtils,
QgsProcessingParameterDefinition,
QgsProcessingOutputRasterLayer,
QgsProcessingOutputVectorLayer,
QgsProcessingOutputMapLayer,
QgsProcessingOutputHtml,
QgsProcessingOutputNumber,
QgsProcessingOutputString,
QgsProcessingOutputBoolean,
QgsProcessingOutputFolder,
QgsProcessingOutputMultipleLayers)
def getOutputFromString(s):
try:
if "|" in s and s.startswith("Output"):
tokens = s.split("|")
params = [t if str(t) != "None" else None for t in tokens[1:]]
clazz = getattr(sys.modules[__name__], tokens[0])
return clazz(*params)
else:
tokens = s.split("=")
if not tokens[1].lower()[:len('output')] == 'output':
return None
name = tokens[0]
description = tokens[0]
token = tokens[1].strip()[len('output') + 1:]
out = None
if token.lower().strip().startswith('outputraster'):
out = QgsProcessingOutputRasterLayer(name, description)
elif token.lower().strip() == 'outputvector':
out = QgsProcessingOutputVectorLayer(name, description)
elif token.lower().strip() == 'outputlayer':
out = QgsProcessingOutputMapLayer(name, description)
elif token.lower().strip() == 'outputmultilayers':
out = QgsProcessingOutputMultipleLayers(name, description)
# elif token.lower().strip() == 'vector point':
# out = OutputVector(datatype=[dataobjects.TYPE_VECTOR_POINT])
# elif token.lower().strip() == 'vector line':
# out = OutputVector(datatype=[OutputVector.TYPE_VECTOR_LINE])
# elif token.lower().strip() == 'vector polygon':
# out = OutputVector(datatype=[OutputVector.TYPE_VECTOR_POLYGON])
# elif token.lower().strip().startswith('table'):
# out = OutputTable()
elif token.lower().strip().startswith('outputhtml'):
out = QgsProcessingOutputHtml(name, description)
# elif token.lower().strip().startswith('file'):
# out = OutputFile()
# ext = token.strip()[len('file') + 1:]
# if ext:
# out.ext = ext
elif token.lower().strip().startswith('outputfolder'):
out = QgsProcessingOutputFolder(name, description)
elif token.lower().strip().startswith('outputnumber'):
out = QgsProcessingOutputNumber(name, description)
elif token.lower().strip().startswith('outputstring'):
out = QgsProcessingOutputString(name, description)
elif token.lower().strip().startswith('outputboolean'):
out = QgsProcessingOutputBoolean(name, description)
# elif token.lower().strip().startswith('extent'):
# out = OutputExtent()
return out
except:
return None
|
myfreecomm/fixofx
|
refs/heads/master
|
3rdparty/dateutil/rrule.py
|
9
|
"""
Copyright (c) 2003-2005 Gustavo Niemeyer <gustavo@niemeyer.net>
This module offers extensions to the standard python 2.3+
datetime module.
"""
__author__ = "Gustavo Niemeyer <gustavo@niemeyer.net>"
__license__ = "PSF License"
import itertools
import datetime
import calendar
import thread
import sys
__all__ = ["rrule", "rruleset", "rrulestr",
"YEARLY", "MONTHLY", "WEEKLY", "DAILY",
"HOURLY", "MINUTELY", "SECONDLY",
"MO", "TU", "WE", "TH", "FR", "SA", "SU"]
# Every mask is 7 days longer to handle cross-year weekly periods.
M366MASK = tuple([1]*31+[2]*29+[3]*31+[4]*30+[5]*31+[6]*30+
[7]*31+[8]*31+[9]*30+[10]*31+[11]*30+[12]*31+[1]*7)
M365MASK = list(M366MASK)
M29, M30, M31 = range(1,30), range(1,31), range(1,32)
MDAY366MASK = tuple(M31+M29+M31+M30+M31+M30+M31+M31+M30+M31+M30+M31+M31[:7])
MDAY365MASK = list(MDAY366MASK)
M29, M30, M31 = range(-29,0), range(-30,0), range(-31,0)
NMDAY366MASK = tuple(M31+M29+M31+M30+M31+M30+M31+M31+M30+M31+M30+M31+M31[:7])
NMDAY365MASK = list(NMDAY366MASK)
M366RANGE = (0,31,60,91,121,152,182,213,244,274,305,335,366)
M365RANGE = (0,31,59,90,120,151,181,212,243,273,304,334,365)
WDAYMASK = [0,1,2,3,4,5,6]*55
del M29, M30, M31, M365MASK[59], MDAY365MASK[59], NMDAY365MASK[31]
MDAY365MASK = tuple(MDAY365MASK)
M365MASK = tuple(M365MASK)
(YEARLY,
MONTHLY,
WEEKLY,
DAILY,
HOURLY,
MINUTELY,
SECONDLY) = range(7)
# Imported on demand.
easter = None
parser = None
class weekday(object):
__slots__ = ["weekday", "n"]
def __init__(self, weekday, n=None):
if n == 0:
raise ValueError, "Can't create weekday with n == 0"
self.weekday = weekday
self.n = n
def __call__(self, n):
if n == self.n:
return self
else:
return self.__class__(self.weekday, n)
def __eq__(self, other):
try:
if self.weekday != other.weekday or self.n != other.n:
return False
except AttributeError:
return False
return True
def __repr__(self):
s = ("MO", "TU", "WE", "TH", "FR", "SA", "SU")[self.weekday]
if not self.n:
return s
else:
return "%s(%+d)" % (s, self.n)
MO, TU, WE, TH, FR, SA, SU = weekdays = tuple([weekday(x) for x in range(7)])
class rrulebase:
def __init__(self, cache=False):
if cache:
self._cache = []
self._cache_lock = thread.allocate_lock()
self._cache_gen = self._iter()
self._cache_complete = False
else:
self._cache = None
self._cache_complete = False
self._len = None
def __iter__(self):
if self._cache_complete:
return iter(self._cache)
elif self._cache is None:
return self._iter()
else:
return self._iter_cached()
def _iter_cached(self):
i = 0
gen = self._cache_gen
cache = self._cache
acquire = self._cache_lock.acquire
release = self._cache_lock.release
while gen:
if i == len(cache):
acquire()
if self._cache_complete:
break
try:
for j in range(10):
cache.append(gen.next())
except StopIteration:
self._cache_gen = gen = None
self._cache_complete = True
break
release()
yield cache[i]
i += 1
while i < self._len:
yield cache[i]
i += 1
def __getitem__(self, item):
if self._cache_complete:
return self._cache[item]
elif isinstance(item, slice):
if item.step and item.step < 0:
return list(iter(self))[item]
else:
return list(itertools.islice(self,
item.start or 0,
item.stop or sys.maxint,
item.step or 1))
elif item >= 0:
gen = iter(self)
try:
for i in range(item+1):
res = gen.next()
except StopIteration:
raise IndexError
return res
else:
return list(iter(self))[item]
def __contains__(self, item):
if self._cache_complete:
return item in self._cache
else:
for i in self:
if i == item:
return True
elif i > item:
return False
return False
# __len__() introduces a large performance penality.
def count(self):
if self._len is None:
for x in self: pass
return self._len
def before(self, dt, inc=False):
if self._cache_complete:
gen = self._cache
else:
gen = self
last = None
if inc:
for i in gen:
if i > dt:
break
last = i
else:
for i in gen:
if i >= dt:
break
last = i
return last
def after(self, dt, inc=False):
if self._cache_complete:
gen = self._cache
else:
gen = self
if inc:
for i in gen:
if i >= dt:
return i
else:
for i in gen:
if i > dt:
return i
return None
def between(self, after, before, inc=False):
if self._cache_complete:
gen = self._cache
else:
gen = self
started = False
l = []
if inc:
for i in gen:
if i > before:
break
elif not started:
if i >= after:
started = True
l.append(i)
else:
l.append(i)
else:
for i in gen:
if i >= before:
break
elif not started:
if i > after:
started = True
l.append(i)
else:
l.append(i)
return l
class rrule(rrulebase):
def __init__(self, freq, dtstart=None,
interval=1, wkst=None, count=None, until=None, bysetpos=None,
bymonth=None, bymonthday=None, byyearday=None, byeaster=None,
byweekno=None, byweekday=None,
byhour=None, byminute=None, bysecond=None,
cache=False):
rrulebase.__init__(self, cache)
global easter
if not dtstart:
dtstart = datetime.datetime.now().replace(microsecond=0)
elif not isinstance(dtstart, datetime.datetime):
dtstart = datetime.datetime.fromordinal(dtstart.toordinal())
else:
dtstart = dtstart.replace(microsecond=0)
self._dtstart = dtstart
self._tzinfo = dtstart.tzinfo
self._freq = freq
self._interval = interval
self._count = count
if until and not isinstance(until, datetime.datetime):
until = datetime.datetime.fromordinal(until.toordinal())
self._until = until
if wkst is None:
self._wkst = calendar.firstweekday()
elif type(wkst) is int:
self._wkst = wkst
else:
self._wkst = wkst.weekday
if bysetpos is None:
self._bysetpos = None
elif type(bysetpos) is int:
self._bysetpos = (bysetpos,)
else:
self._bysetpos = tuple(bysetpos)
if not (byweekno or byyearday or bymonthday or
byweekday is not None or byeaster is not None):
if freq == YEARLY:
if not bymonth:
bymonth = dtstart.month
bymonthday = dtstart.day
elif freq == MONTHLY:
bymonthday = dtstart.day
elif freq == WEEKLY:
byweekday = dtstart.weekday()
# bymonth
if not bymonth:
self._bymonth = None
elif type(bymonth) is int:
self._bymonth = (bymonth,)
else:
self._bymonth = tuple(bymonth)
# byyearday
if not byyearday:
self._byyearday = None
elif type(byyearday) is int:
self._byyearday = (byyearday,)
else:
self._byyearday = tuple(byyearday)
# byeaster
if byeaster is not None:
if not easter:
from dateutil import easter
if type(byeaster) is int:
self._byeaster = (byeaster,)
else:
self._byeaster = tuple(byeaster)
else:
self._byeaster = None
# bymonthay
if not bymonthday:
self._bymonthday = ()
self._bynmonthday = ()
elif type(bymonthday) is int:
if bymonthday < 0:
self._bynmonthday = (bymonthday,)
self._bymonthday = ()
else:
self._bymonthday = (bymonthday,)
self._bynmonthday = ()
else:
self._bymonthday = tuple([x for x in bymonthday if x > 0])
self._bynmonthday = tuple([x for x in bymonthday if x < 0])
# byweekno
if byweekno is None:
self._byweekno = None
elif type(byweekno) is int:
self._byweekno = (byweekno,)
else:
self._byweekno = tuple(byweekno)
# byweekday / bynweekday
if byweekday is None:
self._byweekday = None
self._bynweekday = None
elif type(byweekday) is int:
self._byweekday = (byweekday,)
self._bynweekday = None
elif hasattr(byweekday, "n"):
if not byweekday.n or freq > MONTHLY:
self._byweekday = (byweekday.weekday,)
self._bynweekday = None
else:
self._bynweekday = ((byweekday.weekday, byweekday.n),)
self._byweekday = None
else:
self._byweekday = []
self._bynweekday = []
for wday in byweekday:
if type(wday) is int:
self._byweekday.append(wday)
elif not wday.n or freq > MONTHLY:
self._byweekday.append(wday.weekday)
else:
self._bynweekday.append((wday.weekday, wday.n))
self._byweekday = tuple(self._byweekday)
self._bynweekday = tuple(self._bynweekday)
if not self._byweekday:
self._byweekday = None
elif not self._bynweekday:
self._bynweekday = None
# byhour
if byhour is None:
if freq < HOURLY:
self._byhour = (dtstart.hour,)
else:
self._byhour = None
elif type(byhour) is int:
self._byhour = (byhour,)
else:
self._byhour = tuple(byhour)
# byminute
if byminute is None:
if freq < MINUTELY:
self._byminute = (dtstart.minute,)
else:
self._byminute = None
elif type(byminute) is int:
self._byminute = (byminute,)
else:
self._byminute = tuple(byminute)
# bysecond
if bysecond is None:
if freq < SECONDLY:
self._bysecond = (dtstart.second,)
else:
self._bysecond = None
elif type(bysecond) is int:
self._bysecond = (bysecond,)
else:
self._bysecond = tuple(bysecond)
if self._freq >= HOURLY:
self._timeset = None
else:
self._timeset = []
for hour in self._byhour:
for minute in self._byminute:
for second in self._bysecond:
self._timeset.append(
datetime.time(hour, minute, second,
tzinfo=self._tzinfo))
self._timeset.sort()
self._timeset = tuple(self._timeset)
def _iter(self):
year, month, day, hour, minute, second, weekday, yearday, _ = \
self._dtstart.timetuple()
# Some local variables to speed things up a bit
freq = self._freq
interval = self._interval
wkst = self._wkst
until = self._until
bymonth = self._bymonth
byweekno = self._byweekno
byyearday = self._byyearday
byweekday = self._byweekday
byeaster = self._byeaster
bymonthday = self._bymonthday
bynmonthday = self._bynmonthday
bysetpos = self._bysetpos
byhour = self._byhour
byminute = self._byminute
bysecond = self._bysecond
ii = _iterinfo(self)
ii.rebuild(year, month)
getdayset = {YEARLY:ii.ydayset,
MONTHLY:ii.mdayset,
WEEKLY:ii.wdayset,
DAILY:ii.ddayset,
HOURLY:ii.ddayset,
MINUTELY:ii.ddayset,
SECONDLY:ii.ddayset}[freq]
if freq < HOURLY:
timeset = self._timeset
else:
gettimeset = {HOURLY:ii.htimeset,
MINUTELY:ii.mtimeset,
SECONDLY:ii.stimeset}[freq]
if ((freq >= HOURLY and
self._byhour and hour not in self._byhour) or
(freq >= MINUTELY and
self._byminute and minute not in self._byminute) or
(freq >= SECONDLY and
self._bysecond and minute not in self._bysecond)):
timeset = ()
else:
timeset = gettimeset(hour, minute, second)
total = 0
count = self._count
while True:
# Get dayset with the right frequency
dayset, start, end = getdayset(year, month, day)
# Do the "hard" work ;-)
filtered = False
for i in dayset[start:end]:
if ((bymonth and ii.mmask[i] not in bymonth) or
(byweekno and not ii.wnomask[i]) or
(byweekday and ii.wdaymask[i] not in byweekday) or
(ii.nwdaymask and not ii.nwdaymask[i]) or
(byeaster and not ii.eastermask[i]) or
((bymonthday or bynmonthday) and
ii.mdaymask[i] not in bymonthday and
ii.nmdaymask[i] not in bynmonthday) or
(byyearday and
((i < ii.yearlen and i+1 not in byyearday
and -ii.yearlen+i not in byyearday) or
(i >= ii.yearlen and i+1-ii.yearlen not in byyearday
and -ii.nextyearlen+i-ii.yearlen
not in byyearday)))):
dayset[i] = None
filtered = True
# Output results
if bysetpos and timeset:
poslist = []
for pos in bysetpos:
if pos < 0:
daypos, timepos = divmod(pos, len(timeset))
else:
daypos, timepos = divmod(pos-1, len(timeset))
try:
i = [x for x in dayset[start:end]
if x is not None][daypos]
time = timeset[timepos]
except IndexError:
pass
else:
date = datetime.date.fromordinal(ii.yearordinal+i)
res = datetime.datetime.combine(date, time)
if res not in poslist:
poslist.append(res)
poslist.sort()
for res in poslist:
if until and res > until:
self._len = total
return
elif res >= self._dtstart:
total += 1
yield res
if count:
count -= 1
if not count:
self._len = total
return
else:
for i in dayset[start:end]:
if i is not None:
date = datetime.date.fromordinal(ii.yearordinal+i)
for time in timeset:
res = datetime.datetime.combine(date, time)
if until and res > until:
self._len = total
return
elif res >= self._dtstart:
total += 1
yield res
if count:
count -= 1
if not count:
self._len = total
return
# Handle frequency and interval
fixday = False
if freq == YEARLY:
year += interval
if year > datetime.MAXYEAR:
self._len = total
return
ii.rebuild(year, month)
elif freq == MONTHLY:
month += interval
if month > 12:
div, mod = divmod(month, 12)
month = mod
year += div
if month == 0:
month = 12
year -= 1
if year > datetime.MAXYEAR:
self._len = total
return
ii.rebuild(year, month)
elif freq == WEEKLY:
if wkst > weekday:
day += -(weekday+1+(6-wkst))+self._interval*7
else:
day += -(weekday-wkst)+self._interval*7
weekday = wkst
fixday = True
elif freq == DAILY:
day += interval
fixday = True
elif freq == HOURLY:
if filtered:
# Jump to one iteration before next day
hour += ((23-hour)//interval)*interval
while True:
hour += interval
div, mod = divmod(hour, 24)
if div:
hour = mod
day += div
fixday = True
if not byhour or hour in byhour:
break
timeset = gettimeset(hour, minute, second)
elif freq == MINUTELY:
if filtered:
# Jump to one iteration before next day
minute += ((1439-(hour*60+minute))//interval)*interval
while True:
minute += interval
div, mod = divmod(minute, 60)
if div:
minute = mod
hour += div
div, mod = divmod(hour, 24)
if div:
hour = mod
day += div
fixday = True
filtered = False
if ((not byhour or hour in byhour) and
(not byminute or minute in byminute)):
break
timeset = gettimeset(hour, minute, second)
elif freq == SECONDLY:
if filtered:
# Jump to one iteration before next day
second += (((86399-(hour*3600+minute*60+second))
//interval)*interval)
while True:
second += self._interval
div, mod = divmod(second, 60)
if div:
second = mod
minute += div
div, mod = divmod(minute, 60)
if div:
minute = mod
hour += div
div, mod = divmod(hour, 24)
if div:
hour = mod
day += div
fixday = True
if ((not byhour or hour in byhour) and
(not byminute or minute in byminute) and
(not bysecond or second in bysecond)):
break
timeset = gettimeset(hour, minute, second)
if fixday and day > 28:
daysinmonth = calendar.monthrange(year, month)[1]
if day > daysinmonth:
while day > daysinmonth:
day -= daysinmonth
month += 1
if month == 13:
month = 1
year += 1
if year > datetime.MAXYEAR:
self._len = total
return
daysinmonth = calendar.monthrange(year, month)[1]
ii.rebuild(year, month)
class _iterinfo(object):
__slots__ = ["rrule", "lastyear", "lastmonth",
"yearlen", "nextyearlen", "yearordinal", "yearweekday",
"mmask", "mrange", "mdaymask", "nmdaymask",
"wdaymask", "wnomask", "nwdaymask", "eastermask"]
def __init__(self, rrule):
for attr in self.__slots__:
setattr(self, attr, None)
self.rrule = rrule
def rebuild(self, year, month):
# Every mask is 7 days longer to handle cross-year weekly periods.
rr = self.rrule
if year != self.lastyear:
self.yearlen = 365+calendar.isleap(year)
self.nextyearlen = 365+calendar.isleap(year+1)
firstyday = datetime.date(year, 1, 1)
self.yearordinal = firstyday.toordinal()
self.yearweekday = firstyday.weekday()
wday = datetime.date(year, 1, 1).weekday()
if self.yearlen == 365:
self.mmask = M365MASK
self.mdaymask = MDAY365MASK
self.nmdaymask = NMDAY365MASK
self.wdaymask = WDAYMASK[wday:]
self.mrange = M365RANGE
else:
self.mmask = M366MASK
self.mdaymask = MDAY366MASK
self.nmdaymask = NMDAY366MASK
self.wdaymask = WDAYMASK[wday:]
self.mrange = M366RANGE
if not rr._byweekno:
self.wnomask = None
else:
self.wnomask = [0]*(self.yearlen+7)
#no1wkst = firstwkst = self.wdaymask.index(rr._wkst)
no1wkst = firstwkst = (7-self.yearweekday+rr._wkst)%7
if no1wkst >= 4:
no1wkst = 0
# Number of days in the year, plus the days we got
# from last year.
wyearlen = self.yearlen+(self.yearweekday-rr._wkst)%7
else:
# Number of days in the year, minus the days we
# left in last year.
wyearlen = self.yearlen-no1wkst
div, mod = divmod(wyearlen, 7)
numweeks = div+mod//4
for n in rr._byweekno:
if n < 0:
n += numweeks+1
if not (0 < n <= numweeks):
continue
if n > 1:
i = no1wkst+(n-1)*7
if no1wkst != firstwkst:
i -= 7-firstwkst
else:
i = no1wkst
for j in range(7):
self.wnomask[i] = 1
i += 1
if self.wdaymask[i] == rr._wkst:
break
if 1 in rr._byweekno:
# Check week number 1 of next year as well
# TODO: Check -numweeks for next year.
i = no1wkst+numweeks*7
if no1wkst != firstwkst:
i -= 7-firstwkst
if i < self.yearlen:
# If week starts in next year, we
# don't care about it.
for j in range(7):
self.wnomask[i] = 1
i += 1
if self.wdaymask[i] == rr._wkst:
break
if no1wkst:
# Check last week number of last year as
# well. If no1wkst is 0, either the year
# started on week start, or week number 1
# got days from last year, so there are no
# days from last year's last week number in
# this year.
if -1 not in rr._byweekno:
lyearweekday = datetime.date(year-1,1,1).weekday()
lno1wkst = (7-lyearweekday+rr._wkst)%7
lyearlen = 365+calendar.isleap(year-1)
if lno1wkst >= 4:
lno1wkst = 0
lnumweeks = 52+(lyearlen+
(lyearweekday-rr._wkst)%7)%7//4
else:
lnumweeks = 52+(self.yearlen-no1wkst)%7//4
else:
lnumweeks = -1
if lnumweeks in rr._byweekno:
for i in range(no1wkst):
self.wnomask[i] = 1
if (rr._bynweekday and
(month != self.lastmonth or year != self.lastyear)):
ranges = []
if rr._freq == YEARLY:
if rr._bymonth:
for month in rr._bymonth:
ranges.append(self.mrange[month-1:month+1])
else:
ranges = [(0, self.yearlen)]
elif rr._freq == MONTHLY:
ranges = [self.mrange[month-1:month+1]]
if ranges:
# Weekly frequency won't get here, so we may not
# care about cross-year weekly periods.
self.nwdaymask = [0]*self.yearlen
for first, last in ranges:
last -= 1
for wday, n in rr._bynweekday:
if n < 0:
i = last+(n+1)*7
i -= (self.wdaymask[i]-wday)%7
else:
i = first+(n-1)*7
i += (7-self.wdaymask[i]+wday)%7
if first <= i <= last:
self.nwdaymask[i] = 1
if rr._byeaster:
self.eastermask = [0]*(self.yearlen+7)
eyday = easter.easter(year).toordinal()-self.yearordinal
for offset in rr._byeaster:
self.eastermask[eyday+offset] = 1
self.lastyear = year
self.lastmonth = month
def ydayset(self, year, month, day):
return range(self.yearlen), 0, self.yearlen
def mdayset(self, year, month, day):
set = [None]*self.yearlen
start, end = self.mrange[month-1:month+1]
for i in range(start, end):
set[i] = i
return set, start, end
def wdayset(self, year, month, day):
# We need to handle cross-year weeks here.
set = [None]*(self.yearlen+7)
i = datetime.date(year, month, day).toordinal()-self.yearordinal
start = i
for j in range(7):
set[i] = i
i += 1
#if (not (0 <= i < self.yearlen) or
# self.wdaymask[i] == self.rrule._wkst):
# This will cross the year boundary, if necessary.
if self.wdaymask[i] == self.rrule._wkst:
break
return set, start, i
def ddayset(self, year, month, day):
set = [None]*self.yearlen
i = datetime.date(year, month, day).toordinal()-self.yearordinal
set[i] = i
return set, i, i+1
def htimeset(self, hour, minute, second):
set = []
rr = self.rrule
for minute in rr._byminute:
for second in rr._bysecond:
set.append(datetime.time(hour, minute, second,
tzinfo=rr._tzinfo))
set.sort()
return set
def mtimeset(self, hour, minute, second):
set = []
rr = self.rrule
for second in rr._bysecond:
set.append(datetime.time(hour, minute, second, tzinfo=rr._tzinfo))
set.sort()
return set
def stimeset(self, hour, minute, second):
return (datetime.time(hour, minute, second,
tzinfo=self.rrule._tzinfo),)
class rruleset(rrulebase):
class _genitem:
def __init__(self, genlist, gen):
try:
self.dt = gen()
genlist.append(self)
except StopIteration:
pass
self.genlist = genlist
self.gen = gen
def next(self):
try:
self.dt = self.gen()
except StopIteration:
self.genlist.remove(self)
def __cmp__(self, other):
return cmp(self.dt, other.dt)
def __init__(self, cache=False):
rrulebase.__init__(self, cache)
self._rrule = []
self._rdate = []
self._exrule = []
self._exdate = []
def rrule(self, rrule):
self._rrule.append(rrule)
def rdate(self, rdate):
self._rdate.append(rdate)
def exrule(self, exrule):
self._exrule.append(exrule)
def exdate(self, exdate):
self._exdate.append(exdate)
def _iter(self):
rlist = []
self._rdate.sort()
self._genitem(rlist, iter(self._rdate).next)
for gen in [iter(x).next for x in self._rrule]:
self._genitem(rlist, gen)
rlist.sort()
exlist = []
self._exdate.sort()
self._genitem(exlist, iter(self._exdate).next)
for gen in [iter(x).next for x in self._exrule]:
self._genitem(exlist, gen)
exlist.sort()
lastdt = None
total = 0
while rlist:
ritem = rlist[0]
if not lastdt or lastdt != ritem.dt:
while exlist and exlist[0] < ritem:
exlist[0].next()
exlist.sort()
if not exlist or ritem != exlist[0]:
total += 1
yield ritem.dt
lastdt = ritem.dt
ritem.next()
rlist.sort()
self._len = total
class _rrulestr:
_freq_map = {"YEARLY": YEARLY,
"MONTHLY": MONTHLY,
"WEEKLY": WEEKLY,
"DAILY": DAILY,
"HOURLY": HOURLY,
"MINUTELY": MINUTELY,
"SECONDLY": SECONDLY}
_weekday_map = {"MO":0,"TU":1,"WE":2,"TH":3,"FR":4,"SA":5,"SU":6}
def _handle_int(self, rrkwargs, name, value, **kwargs):
rrkwargs[name.lower()] = int(value)
def _handle_int_list(self, rrkwargs, name, value, **kwargs):
rrkwargs[name.lower()] = [int(x) for x in value.split(',')]
_handle_INTERVAL = _handle_int
_handle_COUNT = _handle_int
_handle_BYSETPOS = _handle_int_list
_handle_BYMONTH = _handle_int_list
_handle_BYMONTHDAY = _handle_int_list
_handle_BYYEARDAY = _handle_int_list
_handle_BYEASTER = _handle_int_list
_handle_BYWEEKNO = _handle_int_list
_handle_BYHOUR = _handle_int_list
_handle_BYMINUTE = _handle_int_list
_handle_BYSECOND = _handle_int_list
def _handle_FREQ(self, rrkwargs, name, value, **kwargs):
rrkwargs["freq"] = self._freq_map[value]
def _handle_UNTIL(self, rrkwargs, name, value, **kwargs):
global parser
if not parser:
from dateutil import parser
try:
rrkwargs["until"] = parser.parse(value,
ignoretz=kwargs.get("ignoretz"),
tzinfos=kwargs.get("tzinfos"))
except ValueError:
raise ValueError, "invalid until date"
def _handle_WKST(self, rrkwargs, name, value, **kwargs):
rrkwargs["wkst"] = self._weekday_map[value]
def _handle_BYWEEKDAY(self, rrkwargs, name, value, **kwarsg):
l = []
for wday in value.split(','):
for i in range(len(wday)):
if wday[i] not in '+-0123456789':
break
n = wday[:i] or None
w = wday[i:]
if n: n = int(n)
l.append(weekdays[self._weekday_map[w]](n))
rrkwargs["byweekday"] = l
_handle_BYDAY = _handle_BYWEEKDAY
def _parse_rfc_rrule(self, line,
dtstart=None,
cache=False,
ignoretz=False,
tzinfos=None):
if line.find(':') != -1:
name, value = line.split(':')
if name != "RRULE":
raise ValueError, "unknown parameter name"
else:
value = line
rrkwargs = {}
for pair in value.split(';'):
name, value = pair.split('=')
name = name.upper()
value = value.upper()
try:
getattr(self, "_handle_"+name)(rrkwargs, name, value,
ignoretz=ignoretz,
tzinfos=tzinfos)
except AttributeError:
raise "unknown parameter '%s'" % name
except (KeyError, ValueError):
raise "invalid '%s': %s" % (name, value)
return rrule(dtstart=dtstart, cache=cache, **rrkwargs)
def _parse_rfc(self, s,
dtstart=None,
cache=False,
unfold=False,
forceset=False,
compatible=False,
ignoretz=False,
tzinfos=None):
global parser
if compatible:
forceset = True
unfold = True
s = s.upper()
if not s.strip():
raise ValueError, "empty string"
if unfold:
lines = s.splitlines()
i = 0
while i < len(lines):
line = lines[i].rstrip()
if not line:
del lines[i]
elif i > 0 and line[0] == " ":
lines[i-1] += line[1:]
del lines[i]
else:
i += 1
else:
lines = s.split()
if (not forceset and len(lines) == 1 and
(s.find(':') == -1 or s.startswith('RRULE:'))):
return self._parse_rfc_rrule(lines[0], cache=cache,
dtstart=dtstart, ignoretz=ignoretz,
tzinfos=tzinfos)
else:
rrulevals = []
rdatevals = []
exrulevals = []
exdatevals = []
for line in lines:
if not line:
continue
if line.find(':') == -1:
name = "RRULE"
value = line
else:
name, value = line.split(':', 1)
parms = name.split(';')
if not parms:
raise ValueError, "empty property name"
name = parms[0]
parms = parms[1:]
if name == "RRULE":
for parm in parms:
raise ValueError, "unsupported RRULE parm: "+parm
rrulevals.append(value)
elif name == "RDATE":
for parm in parms:
if parm != "VALUE=DATE-TIME":
raise ValueError, "unsupported RDATE parm: "+parm
rdatevals.append(value)
elif name == "EXRULE":
for parm in parms:
raise ValueError, "unsupported EXRULE parm: "+parm
exrulevals.append(value)
elif name == "EXDATE":
for parm in parms:
if parm != "VALUE=DATE-TIME":
raise ValueError, "unsupported RDATE parm: "+parm
exdatevals.append(value)
elif name == "DTSTART":
for parm in parms:
raise ValueError, "unsupported DTSTART parm: "+parm
if not parser:
from dateutil import parser
dtstart = parser.parse(value, ignoretz=ignoretz,
tzinfos=tzinfos)
else:
raise ValueError, "unsupported property: "+name
if (forceset or len(rrulevals) > 1 or
rdatevals or exrulevals or exdatevals):
if not parser and (rdatevals or exdatevals):
from dateutil import parser
set = rruleset(cache=cache)
for value in rrulevals:
set.rrule(self._parse_rfc_rrule(value, dtstart=dtstart,
ignoretz=ignoretz,
tzinfos=tzinfos))
for value in rdatevals:
for datestr in value.split(','):
set.rdate(parser.parse(datestr,
ignoretz=ignoretz,
tzinfos=tzinfos))
for value in exrulevals:
set.exrule(self._parse_rfc_rrule(value, dtstart=dtstart,
ignoretz=ignoretz,
tzinfos=tzinfos))
for value in exdatevals:
for datestr in value.split(','):
set.exdate(parser.parse(datestr,
ignoretz=ignoretz,
tzinfos=tzinfos))
if compatible and dtstart:
set.rdate(dtstart)
return set
else:
return self._parse_rfc_rrule(rrulevals[0],
dtstart=dtstart,
cache=cache,
ignoretz=ignoretz,
tzinfos=tzinfos)
def __call__(self, s, **kwargs):
return self._parse_rfc(s, **kwargs)
rrulestr = _rrulestr()
# vim:ts=4:sw=4:et
|
xrmx/xhtml2pdf
|
refs/heads/master
|
demo/tgpisa/setup.py
|
168
|
# -*- coding: utf-8 -*-
from setuptools import setup, find_packages
from turbogears.finddata import find_package_data
import os
execfile(os.path.join("tgpisa", "release.py"))
packages=find_packages()
package_data = find_package_data(where='tgpisa',
package='tgpisa')
if os.path.isdir('locales'):
packages.append('locales')
package_data.update(find_package_data(where='locales',
exclude=('*.po',), only_in_packages=False))
setup(
name="tgpisa",
version=version,
# uncomment the following lines if you fill them out in release.py
#description=description,
#author=author,
#author_email=email,
#url=url,
#download_url=download_url,
#license=license,
install_requires=[
"TurboGears >= 1.0.4.3",
"SQLObject>=0.8,<=0.10.0"
],
zip_safe=False,
packages=packages,
package_data=package_data,
keywords=[
# Use keywords if you'll be adding your package to the
# Python Cheeseshop
# if this has widgets, uncomment the next line
# 'turbogears.widgets',
# if this has a tg-admin command, uncomment the next line
# 'turbogears.command',
# if this has identity providers, uncomment the next line
# 'turbogears.identity.provider',
# If this is a template plugin, uncomment the next line
# 'python.templating.engines',
# If this is a full application, uncomment the next line
# 'turbogears.app',
],
classifiers=[
'Development Status :: 3 - Alpha',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries :: Python Modules',
'Framework :: TurboGears',
# if this is an application that you'll distribute through
# the Cheeseshop, uncomment the next line
# 'Framework :: TurboGears :: Applications',
# if this is a package that includes widgets that you'll distribute
# through the Cheeseshop, uncomment the next line
# 'Framework :: TurboGears :: Widgets',
],
test_suite='nose.collector',
entry_points = {
'console_scripts': [
'start-tgpisa = tgpisa.commands:start',
],
},
# Uncomment next line and create a default.cfg file in your project dir
# if you want to package a default configuration in your egg.
#data_files = [('config', ['default.cfg'])],
)
|
freedomtan/tensorflow
|
refs/heads/master
|
tensorflow/python/training/momentum.py
|
20
|
# Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Momentum for TensorFlow."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.python.framework import ops
from tensorflow.python.ops import math_ops
from tensorflow.python.training import optimizer
from tensorflow.python.training import training_ops
from tensorflow.python.util.tf_export import tf_export
@tf_export(v1=["train.MomentumOptimizer"])
class MomentumOptimizer(optimizer.Optimizer):
"""Optimizer that implements the Momentum algorithm.
Computes (if `use_nesterov = False`):
```
accumulation = momentum * accumulation + gradient
variable -= learning_rate * accumulation
```
Note that in the dense version of this algorithm, `accumulation` is updated
and applied regardless of a gradient's value, whereas the sparse version (when
the gradient is an `IndexedSlices`, typically because of `tf.gather` or an
embedding) only updates variable slices and corresponding `accumulation` terms
when that part of the variable was used in the forward pass.
"""
def __init__(self, learning_rate, momentum,
use_locking=False, name="Momentum", use_nesterov=False):
"""Construct a new Momentum optimizer.
Args:
learning_rate: A `Tensor` or a floating point value. The learning rate.
momentum: A `Tensor` or a floating point value. The momentum.
use_locking: If `True` use locks for update operations.
name: Optional name prefix for the operations created when applying
gradients. Defaults to "Momentum".
use_nesterov: If `True` use Nesterov Momentum.
See (Sutskever et al., 2013).
This implementation always computes gradients at the value of the
variable(s) passed to the optimizer. Using Nesterov Momentum makes the
variable(s) track the values called `theta_t + mu*v_t` in the paper.
This implementation is an approximation of the original formula, valid
for high values of momentum. It will compute the "adjusted gradient"
in NAG by assuming that the new gradient will be estimated by the
current average gradient plus the product of momentum and the change
in the average gradient.
References:
On the importance of initialization and momentum in deep learning:
[Sutskever et al., 2013]
(http://proceedings.mlr.press/v28/sutskever13.html)
([pdf](http://proceedings.mlr.press/v28/sutskever13.pdf))
@compatibility(eager)
When eager execution is enabled, `learning_rate` and `momentum` can each be
a callable that takes no arguments and returns the actual value to use. This
can be useful for changing these values across different invocations of
optimizer functions.
@end_compatibility
"""
super(MomentumOptimizer, self).__init__(use_locking, name)
self._learning_rate = learning_rate
self._momentum = momentum
self._use_nesterov = use_nesterov
def _create_slots(self, var_list):
for v in var_list:
self._zeros_slot(v, "momentum", self._name)
def _prepare(self):
learning_rate = self._learning_rate
if callable(learning_rate):
learning_rate = learning_rate()
self._learning_rate_tensor = ops.convert_to_tensor(learning_rate,
name="learning_rate")
momentum = self._momentum
if callable(momentum):
momentum = momentum()
self._momentum_tensor = ops.convert_to_tensor(momentum, name="momentum")
def _apply_dense(self, grad, var):
mom = self.get_slot(var, "momentum")
return training_ops.apply_momentum(
var, mom,
math_ops.cast(self._learning_rate_tensor, var.dtype.base_dtype),
grad,
math_ops.cast(self._momentum_tensor, var.dtype.base_dtype),
use_locking=self._use_locking,
use_nesterov=self._use_nesterov).op
def _resource_apply_dense(self, grad, var):
mom = self.get_slot(var, "momentum")
return training_ops.resource_apply_momentum(
var.handle, mom.handle,
math_ops.cast(self._learning_rate_tensor, grad.dtype.base_dtype),
grad,
math_ops.cast(self._momentum_tensor, grad.dtype.base_dtype),
use_locking=self._use_locking,
use_nesterov=self._use_nesterov)
def _apply_sparse(self, grad, var):
mom = self.get_slot(var, "momentum")
return training_ops.sparse_apply_momentum(
var, mom,
math_ops.cast(self._learning_rate_tensor, var.dtype.base_dtype),
grad.values, grad.indices,
math_ops.cast(self._momentum_tensor, var.dtype.base_dtype),
use_locking=self._use_locking,
use_nesterov=self._use_nesterov).op
def _resource_apply_sparse(self, grad, var, indices):
mom = self.get_slot(var, "momentum")
return training_ops.resource_sparse_apply_momentum(
var.handle, mom.handle,
math_ops.cast(self._learning_rate_tensor, grad.dtype),
grad, indices,
math_ops.cast(self._momentum_tensor, grad.dtype),
use_locking=self._use_locking,
use_nesterov=self._use_nesterov)
|
zkan/pronto-feedback
|
refs/heads/master
|
pronto_feedback/officevibe/apis.py
|
1
|
import requests
class OfficeVibeManager(object):
def __init__(self):
self.feedback_url = 'https://app.officevibe.com/api/v2/feedback'
def get_feedback(self):
requests.get(self.feedback_url)
|
MalloyPower/parsing-python
|
refs/heads/master
|
front-end/testsuite-python-lib/Python-2.4/Lib/posixpath.py
|
1
|
"""Common operations on Posix pathnames.
Instead of importing this module directly, import os and refer to
this module as os.path. The "os.path" name is an alias for this
module on Posix systems; on other systems (e.g. Mac, Windows),
os.path provides the same operations in a manner specific to that
platform, and is an alias to another module (e.g. macpath, ntpath).
Some of this can actually be useful on non-Posix systems too, e.g.
for manipulation of the pathname component of URLs.
"""
import os
import stat
__all__ = ["normcase","isabs","join","splitdrive","split","splitext",
"basename","dirname","commonprefix","getsize","getmtime",
"getatime","getctime","islink","exists","isdir","isfile","ismount",
"walk","expanduser","expandvars","normpath","abspath",
"samefile","sameopenfile","samestat",
"curdir","pardir","sep","pathsep","defpath","altsep","extsep",
"devnull","realpath","supports_unicode_filenames"]
# strings representing various path-related bits and pieces
curdir = '.'
pardir = '..'
extsep = '.'
sep = '/'
pathsep = ':'
defpath = ':/bin:/usr/bin'
altsep = None
devnull = '/dev/null'
# Normalize the case of a pathname. Trivial in Posix, string.lower on Mac.
# On MS-DOS this may also turn slashes into backslashes; however, other
# normalizations (such as optimizing '../' away) are not allowed
# (another function should be defined to do that).
def normcase(s):
"""Normalize case of pathname. Has no effect under Posix"""
return s
# Return whether a path is absolute.
# Trivial in Posix, harder on the Mac or MS-DOS.
def isabs(s):
"""Test whether a path is absolute"""
return s.startswith('/')
# Join pathnames.
# Ignore the previous parts if a part is absolute.
# Insert a '/' unless the first part is empty or already ends in '/'.
def join(a, *p):
"""Join two or more pathname components, inserting '/' as needed"""
path = a
for b in p:
if b.startswith('/'):
path = b
elif path == '' or path.endswith('/'):
path += b
else:
path += '/' + b
return path
# Split a path in head (everything up to the last '/') and tail (the
# rest). If the path ends in '/', tail will be empty. If there is no
# '/' in the path, head will be empty.
# Trailing '/'es are stripped from head unless it is the root.
def split(p):
"""Split a pathname. Returns tuple "(head, tail)" where "tail" is
everything after the final slash. Either part may be empty."""
i = p.rfind('/') + 1
head, tail = p[:i], p[i:]
if head and head != '/'*len(head):
head = head.rstrip('/')
return head, tail
# Split a path in root and extension.
# The extension is everything starting at the last dot in the last
# pathname component; the root is everything before that.
# It is always true that root + ext == p.
def splitext(p):
"""Split the extension from a pathname. Extension is everything from the
last dot to the end. Returns "(root, ext)", either part may be empty."""
i = p.rfind('.')
if i<=p.rfind('/'):
return p, ''
else:
return p[:i], p[i:]
# Split a pathname into a drive specification and the rest of the
# path. Useful on DOS/Windows/NT; on Unix, the drive is always empty.
def splitdrive(p):
"""Split a pathname into drive and path. On Posix, drive is always
empty."""
return '', p
# Return the tail (basename) part of a path.
def basename(p):
"""Returns the final component of a pathname"""
return split(p)[1]
# Return the head (dirname) part of a path.
def dirname(p):
"""Returns the directory component of a pathname"""
return split(p)[0]
# Return the longest prefix of all list elements.
def commonprefix(m):
"Given a list of pathnames, returns the longest common leading component"
if not m: return ''
s1 = min(m)
s2 = max(m)
n = min(len(s1), len(s2))
for i in xrange(n):
if s1[i] != s2[i]:
return s1[:i]
return s1[:n]
# Get size, mtime, atime of files.
def getsize(filename):
"""Return the size of a file, reported by os.stat()."""
return os.stat(filename).st_size
def getmtime(filename):
"""Return the last modification time of a file, reported by os.stat()."""
return os.stat(filename).st_mtime
def getatime(filename):
"""Return the last access time of a file, reported by os.stat()."""
return os.stat(filename).st_atime
def getctime(filename):
"""Return the metadata change time of a file, reported by os.stat()."""
return os.stat(filename).st_ctime
# Is a path a symbolic link?
# This will always return false on systems where os.lstat doesn't exist.
def islink(path):
"""Test whether a path is a symbolic link"""
try:
st = os.lstat(path)
except (os.error, AttributeError):
return False
return stat.S_ISLNK(st.st_mode)
# Does a path exist?
# This is false for dangling symbolic links.
def exists(path):
"""Test whether a path exists. Returns False for broken symbolic links"""
try:
st = os.stat(path)
except os.error:
return False
return True
# Being true for dangling symbolic links is also useful.
def lexists(path):
"""Test whether a path exists. Returns True for broken symbolic links"""
try:
st = os.lstat(path)
except os.error:
return False
return True
# Is a path a directory?
# This follows symbolic links, so both islink() and isdir() can be true
# for the same path.
def isdir(path):
"""Test whether a path is a directory"""
try:
st = os.stat(path)
except os.error:
return False
return stat.S_ISDIR(st.st_mode)
# Is a path a regular file?
# This follows symbolic links, so both islink() and isfile() can be true
# for the same path.
def isfile(path):
"""Test whether a path is a regular file"""
try:
st = os.stat(path)
except os.error:
return False
return stat.S_ISREG(st.st_mode)
# Are two filenames really pointing to the same file?
def samefile(f1, f2):
"""Test whether two pathnames reference the same actual file"""
s1 = os.stat(f1)
s2 = os.stat(f2)
return samestat(s1, s2)
# Are two open files really referencing the same file?
# (Not necessarily the same file descriptor!)
def sameopenfile(fp1, fp2):
"""Test whether two open file objects reference the same file"""
s1 = os.fstat(fp1)
s2 = os.fstat(fp2)
return samestat(s1, s2)
# Are two stat buffers (obtained from stat, fstat or lstat)
# describing the same file?
def samestat(s1, s2):
"""Test whether two stat buffers reference the same file"""
return s1.st_ino == s2.st_ino and \
s1.st_dev == s2.st_dev
# Is a path a mount point?
# (Does this work for all UNIXes? Is it even guaranteed to work by Posix?)
def ismount(path):
"""Test whether a path is a mount point"""
try:
s1 = os.stat(path)
s2 = os.stat(join(path, '..'))
except os.error:
return False # It doesn't exist -- so not a mount point :-)
dev1 = s1.st_dev
dev2 = s2.st_dev
if dev1 != dev2:
return True # path/.. on a different device as path
ino1 = s1.st_ino
ino2 = s2.st_ino
if ino1 == ino2:
return True # path/.. is the same i-node as path
return False
# Directory tree walk.
# For each directory under top (including top itself, but excluding
# '.' and '..'), func(arg, dirname, filenames) is called, where
# dirname is the name of the directory and filenames is the list
# of files (and subdirectories etc.) in the directory.
# The func may modify the filenames list, to implement a filter,
# or to impose a different order of visiting.
def walk(top, func, arg):
"""Directory tree walk with callback function.
For each directory in the directory tree rooted at top (including top
itself, but excluding '.' and '..'), call func(arg, dirname, fnames).
dirname is the name of the directory, and fnames a list of the names of
the files and subdirectories in dirname (excluding '.' and '..'). func
may modify the fnames list in-place (e.g. via del or slice assignment),
and walk will only recurse into the subdirectories whose names remain in
fnames; this can be used to implement a filter, or to impose a specific
order of visiting. No semantics are defined for, or required of, arg,
beyond that arg is always passed to func. It can be used, e.g., to pass
a filename pattern, or a mutable object designed to accumulate
statistics. Passing None for arg is common."""
try:
names = os.listdir(top)
except os.error:
return
func(arg, top, names)
for name in names:
name = join(top, name)
try:
st = os.lstat(name)
except os.error:
continue
if stat.S_ISDIR(st.st_mode):
walk(name, func, arg)
# Expand paths beginning with '~' or '~user'.
# '~' means $HOME; '~user' means that user's home directory.
# If the path doesn't begin with '~', or if the user or $HOME is unknown,
# the path is returned unchanged (leaving error reporting to whatever
# function is called with the expanded path as argument).
# See also module 'glob' for expansion of *, ? and [...] in pathnames.
# (A function should also be defined to do full *sh-style environment
# variable expansion.)
def expanduser(path):
"""Expand ~ and ~user constructions. If user or $HOME is unknown,
do nothing."""
if not path.startswith('~'):
return path
i = path.find('/', 1)
if i < 0:
i = len(path)
if i == 1:
if 'HOME' not in os.environ:
import pwd
userhome = pwd.getpwuid(os.getuid()).pw_dir
else:
userhome = os.environ['HOME']
else:
import pwd
try:
pwent = pwd.getpwnam(path[1:i])
except KeyError:
return path
userhome = pwent.pw_dir
if userhome.endswith('/'):
i += 1
return userhome + path[i:]
# Expand paths containing shell variable substitutions.
# This expands the forms $variable and ${variable} only.
# Non-existent variables are left unchanged.
_varprog = None
def expandvars(path):
"""Expand shell variables of form $var and ${var}. Unknown variables
are left unchanged."""
global _varprog
if '$' not in path:
return path
if not _varprog:
import re
_varprog = re.compile(r'\$(\w+|\{[^}]*\})')
i = 0
while True:
m = _varprog.search(path, i)
if not m:
break
i, j = m.span(0)
name = m.group(1)
if name.startswith('{') and name.endswith('}'):
name = name[1:-1]
if name in os.environ:
tail = path[j:]
path = path[:i] + os.environ[name]
i = len(path)
path += tail
else:
i = j
return path
# Normalize a path, e.g. A//B, A/./B and A/foo/../B all become A/B.
# It should be understood that this may change the meaning of the path
# if it contains symbolic links!
def normpath(path):
"""Normalize path, eliminating double slashes, etc."""
if path == '':
return '.'
initial_slashes = path.startswith('/')
# POSIX allows one or two initial slashes, but treats three or more
# as single slash.
if (initial_slashes and
path.startswith('//') and not path.startswith('///')):
initial_slashes = 2
comps = path.split('/')
new_comps = []
for comp in comps:
if comp in ('', '.'):
continue
if (comp != '..' or (not initial_slashes and not new_comps) or
(new_comps and new_comps[-1] == '..')):
new_comps.append(comp)
elif new_comps:
new_comps.pop()
comps = new_comps
path = '/'.join(comps)
if initial_slashes:
path = '/'*initial_slashes + path
return path or '.'
def abspath(path):
"""Return an absolute path."""
if not isabs(path):
path = join(os.getcwd(), path)
return normpath(path)
# Return a canonical path (i.e. the absolute location of a file on the
# filesystem).
def realpath(filename):
"""Return the canonical path of the specified filename, eliminating any
symbolic links encountered in the path."""
if isabs(filename):
bits = ['/'] + filename.split('/')[1:]
else:
bits = filename.split('/')
for i in range(2, len(bits)+1):
component = join(*bits[0:i])
# Resolve symbolic links.
if islink(component):
resolved = _resolve_link(component)
if resolved is None:
# Infinite loop -- return original component + rest of the path
return abspath(join(*([component] + bits[i:])))
else:
newpath = join(*([resolved] + bits[i:]))
return realpath(newpath)
return abspath(filename)
def _resolve_link(path):
"""Internal helper function. Takes a path and follows symlinks
until we either arrive at something that isn't a symlink, or
encounter a path we've seen before (meaning that there's a loop).
"""
paths_seen = []
while islink(path):
if path in paths_seen:
# Already seen this path, so we must have a symlink loop
return None
paths_seen.append(path)
# Resolve where the link points to
resolved = os.readlink(path)
if not isabs(resolved):
dir = dirname(path)
path = normpath(join(dir, resolved))
else:
path = normpath(resolved)
return path
supports_unicode_filenames = False
|
VDuda/DjangoBookWorm
|
refs/heads/master
|
functional_tests/test_list_app.py
|
1
|
#! /usr/bin/env python3
from selenium import webdriver
from selenium.webdriver.common.keys import Keys
from django.contrib.staticfiles.testing import StaticLiveServerTestCase
import sys
class NewVisitorTest(StaticLiveServerTestCase):
@classmethod
def setUpClass(cls):
for arg in sys.argv:
if 'liveserver' in arg:
cls.server_url = 'http://' + arg.split('=')[1]
return
super().setUpClass()
cls.server_url = cls.live_server_url
@classmethod
def tearDownClass(cls):
if cls.server_url == cls.live_server_url:
super().tearDownClass()
def setUp(self):
self.browser = webdriver.Firefox()
self.browser.implicitly_wait(3)
def tearDown(self):
self.browser.quit()
def check_for_row_in_list_table(self, row_text):
table = self.browser.find_element_by_id('id_list_table')
rows = table.find_elements_by_tag_name('tr')
self.assertIn(row_text, [row.text for row in rows])
def test_can_start_a_list_and_retrieve_it_later(self):
# Edith has heard about a cool new online to-do app. She goes
# to check out its homepage
self.browser.get(self.server_url)
# She notices the page title and header mention to-do lists
self.assertIn('To-Do', self.browser.title)
header_text = self.browser.find_element_by_tag_name('h1').text
self.assertIn('To-Do', header_text)
# She is invited to enter a to-do item straight away
inputbox = self.browser.find_element_by_id('id_new_item')
self.assertEqual(
inputbox.get_attribute('placeholder'),
'Enter a to-do item'
)
# She types "Buy peacock feathers" into a text box
inputbox.send_keys('Buy peacock feathers')
# When she hits enter, the page updates, and now the page lists
# "1: Buy peacock feathers" as an item in a to-do list
inputbox.send_keys(Keys.ENTER)
edith_list_url = self.browser.current_url
self.assertRegex(edith_list_url, '/lists/.+')
self.check_for_row_in_list_table('1: Buy peacock feathers')
# There is still a text box inviting her to add another item. She
# enters "Use peacock feathers to make a fly" (Edith is very methodical)
inbox = self.browser.find_element_by_id('id_new_item')
inbox.send_keys('Use peacock feathers to make a fly')
inbox.send_keys(Keys.ENTER)
# The page updates again, and now shows both items on her list
self.check_for_row_in_list_table('1: Buy peacock feathers')
self.check_for_row_in_list_table('2: Use peacock feathers to make a fly')
# Now a new user, Francis, comes along to the site.
## We use a new browser session to make sure that no information
## of Edith's is coming through from cookies etc
self.browser.quit()
self.browser = webdriver.Firefox()
# Francis visits the home page. There is no sigh of Edith's List
self.browser.get(self.server_url)
page_txt = self.browser.find_element_by_tag_name('body').text
self.assertNotIn('Buy peacock feathers', page_txt)
self.assertNotIn('make a fly', page_txt)
# Francis starts a new List by entering a new item.
# He is less interesting then Edith...
inputbox = self.browser.find_element_by_id('id_new_item')
inputbox.send_keys('Buy milk')
inputbox.send_keys(Keys.ENTER)
# Francis gets his own unique URL
francis_list_url = self.browser.current_url
self.assertRegex(francis_list_url, '/lists/.+')
self.assertNotEqual(francis_list_url, edith_list_url)
# Again, there is no trace of Edith's list
page_text = self.browser.find_element_by_tag_name('body').text
self.assertIn('Buy milk', page_text)
# Satisfied, she goes back to sleep.
def test_layout_and_styling(self):
# Edith goes to the Home page
self.browser.get(self.server_url)
self.browser.set_window_size(1024, 768)
# She notices the input box is nicely centered
# inputbox.send_keys('testing\n')
inputbox = self.browser.find_element_by_id('id_new_item')
self.assertAlmostEqual(
inputbox.location['x'] + inputbox.size['width'] / 2,
512,
delta=5
)
|
the-zebulan/CodeWars
|
refs/heads/master
|
katas/kyu_7/character_counter.py
|
1
|
from collections import Counter
def validate_word(word):
return len(set(Counter(word.lower()).itervalues())) == 1
|
adrienbrault/home-assistant
|
refs/heads/dev
|
tests/components/control4/__init__.py
|
18
|
"""Tests for the Control4 integration."""
|
ljwolf/pysal
|
refs/heads/master
|
pysal/spreg/tests/test_sur.py
|
7
|
import unittest
import numpy as np
import pysal
from pysal.spreg.sur_utils import sur_dictxy,sur_dictZ
from pysal.spreg.sur import SUR,ThreeSLS
from pysal.common import RTOL
PEGP = pysal.examples.get_path
def dict_compare(actual, desired, rtol):
for i in actual.keys():
np.testing.assert_allclose(actual[i],desired[i],rtol)
class Test_SUR(unittest.TestCase):
def setUp(self):
self.db = pysal.open(pysal.examples.get_path('NAT.dbf'),'r')
self.w = pysal.queen_from_shapefile(pysal.examples.get_path("NAT.shp"))
self.w.transform = 'r'
def test_SUR(self): #2 equations, same K in each, two-step estimation
y_var0 = ['HR80','HR90']
x_var0 = [['PS80','UE80'],['PS90','UE90']]
bigy0,bigX0,bigyvars0,bigXvars0 = sur_dictxy(self.db,y_var0,x_var0)
reg = SUR(bigy0,bigX0,name_bigy=bigyvars0,name_bigX=bigXvars0)
dict_compare(reg.bOLS,{0: np.array([[ 5.39719146],[ 0.6973813 ],\
[ 0.22566378]]), 1: np.array([[ 1.80829725],[ 1.03504143],[ 0.6582483 ]])},RTOL)
dict_compare(reg.bSUR,{0: np.array([[ 5.13907179],[ 0.67764814],\
[ 0.26372397]]), 1: np.array([[ 3.61394031],[ 1.02607147],[ 0.38654993]])},RTOL)
dict_compare(reg.sur_inf,{0: np.array([[ 2.62467257e-01, 1.95798587e+01, 2.29656805e-85],\
[ 1.21957836e-01, 5.55641325e+00, 2.75374482e-08],\
[ 3.43183797e-02, 7.68462769e+00, 1.53442563e-14]]),\
1: np.array([[ 2.53499643e-01, 1.42561949e+01, 4.10220329e-46],\
[ 1.12166227e-01, 9.14777552e+00, 5.81179115e-20],\
[ 3.41995564e-02, 1.13027760e+01, 1.27134462e-29]])},RTOL)
np.testing.assert_allclose(reg.corr,np.array([[ 1. , 0.46954842],\
[ 0.46954842, 1. ]]),RTOL)
np.testing.assert_allclose(reg.surchow,[(26.72917022127309, 1, 2.3406126054510838e-07),\
(8.2409218385398244, 1, 0.0040956326095295649),\
(9.3837654127686712, 1, 0.002189154327032255)],RTOL)
def test_SUR_iter(self): #2 equations, same K in each, iterated estimation, spatial test
y_var0 = ['HR80','HR90']
x_var0 = [['PS80','UE80'],['PS90','UE90']]
bigy0,bigX0,bigyvars0,bigXvars0 = sur_dictxy(self.db,y_var0,x_var0)
reg = SUR(bigy0,bigX0,w=self.w,nonspat_diag=True,spat_diag=True,iter=True,verbose=False,\
name_bigy=bigyvars0,name_bigX=bigXvars0)
dict_compare(reg.bOLS,{0: np.array([[ 5.39719146],[ 0.6973813 ],\
[ 0.22566378]]), 1: np.array([[ 1.80829725],[ 1.03504143],[ 0.6582483 ]])},RTOL)
dict_compare(reg.bSUR,{0: np.array([[ 5.18423225],[ 0.67757925],\
[ 0.25706498]]), 1: np.array([[ 3.79731807],[ 1.02411196],[ 0.35895674]])},RTOL)
dict_compare(reg.sur_inf,{0: np.array([[ 2.59392406e-01, 1.99860602e+01, 7.28237551e-89],\
[ 1.21911330e-01, 5.55796781e+00, 2.72933704e-08],\
[ 3.38051365e-02, 7.60431727e+00, 2.86411830e-14]]),\
1: np.array([[ 2.53108919e-01, 1.50027035e+01, 7.04886598e-51],\
[ 1.13329850e-01, 9.03655976e+00, 1.61679985e-19],\
[ 3.40440433e-02, 1.05438928e+01, 5.42075621e-26]])},RTOL)
np.testing.assert_allclose(reg.corr,np.array([[ 1. , 0.5079133],
[ 0.5079133, 1. ]]),RTOL)
np.testing.assert_allclose(reg.surchow,[(23.457203761752844, 1, 1.2772356421778157e-06),\
(8.6998292497532859, 1, 0.0031823985960753743),\
(6.8426866249389589, 1, 0.0089004881389025351)],RTOL)
np.testing.assert_allclose(reg.llik,-19860.067987395596)
np.testing.assert_allclose(reg.lmtest,(680.16759754291365, 1, 6.144389240997126e-150))
np.testing.assert_allclose(reg.lrtest, (854.18095147295708, 1, 8.966465468792485e-188))
np.testing.assert_allclose(reg.lmEtest,(1270.9424750801545, 2, 1.0431532803839709e-276))
def test_SUR_3eq(self): #3 equations, different K, iterated estimation, spatial test
y_var1 = ['HR60','HR70','HR80']
x_var1 = [['RD60','PS60'],['RD70','PS70','UE70'],['RD80','PS80']]
bigy1,bigX1,bigyvars1,bigXvars1 = sur_dictxy(self.db,y_var1,x_var1)
reg = SUR(bigy1,bigX1,w=self.w,spat_diag=True,iter=True,verbose=False,\
name_bigy=bigyvars1,name_bigX=bigXvars1)
dict_compare(reg.bOLS,{0: np.array([[ 4.50407527],[ 2.50426531],\
[ 0.50130802]]), 1: np.array([[ 7.41171812],[ 4.0021532 ],[ 1.32168167],\
[-0.22786048]]), 2: np.array([[ 6.92761614],[ 3.90531039],[ 1.47413939]])},RTOL)
dict_compare(reg.bSUR,{0: np.array([[ 4.50407527],[ 2.39199682],\
[ 0.52723694]]), 1: np.array([[ 7.44509818],
[ 3.74968571],[ 1.28811685],[-0.23526451]]), 2: np.array([[ 6.92761614],\
[ 3.65423052],[ 1.38247611]])},RTOL)
dict_compare(reg.sur_inf,{0: np.array([[ 9.16019177e-002, 4.91700980e+001, 0.00000000e+000],\
[ 9.18832357e-002, 2.60330060e+001, 2.09562528e-149],\
[ 9.31668754e-002, 5.65906002e+000, 1.52204326e-008]]),\
1: np.array([[ 2.31085029e-001, 3.22180031e+001, 9.87752395e-228],\
[ 1.14421850e-001, 3.27707138e+001, 1.53941252e-235],\
[ 1.14799399e-001, 1.12205888e+001, 3.23111806e-029],\
[ 4.47806286e-002, -5.25371170e+000, 1.49064159e-007]]),\
2: np.array([[ 1.00643767e-001, 6.88330371e+001, 0.00000000e+000],\
[ 1.00599909e-001, 3.63243917e+001, 6.66811571e-289],\
[ 1.02053898e-001, 1.35465291e+001, 8.30659234e-042]])},RTOL)
np.testing.assert_allclose(reg.corr,np.array([[ 1. , 0.34470181, 0.25096458],\
[ 0.34470181, 1. , 0.33527277],[ 0.25096458, 0.33527277, 1. ]]),RTOL)
np.testing.assert_allclose(reg.llik,-28695.767676078722)
np.testing.assert_allclose(reg.lmtest,(882.43543942655947, 3, 5.7128374010751484e-191))
np.testing.assert_allclose(reg.lrtest, (818.30409875688747, 3, 4.6392724270549021e-177))
np.testing.assert_allclose(reg.lmEtest,(696.64318511682916, 3, 1.1218163246066135e-150))
def test_3SLS(self): # two equations, one endog, one instrument, same k
y_var1 = ['HR80','HR90']
x_var1 = [['PS80','UE80'],['PS90','UE90']]
yend_var1 = [['RD80'],['RD90']]
q_var1 = [['FP79'],['FP89']]
bigy1,bigX1,bigyvars1,bigXvars1 = sur_dictxy(self.db,y_var1,x_var1)
bigyend1,bigyendvars1 = sur_dictZ(self.db,yend_var1)
bigq1,bigqvars1 = sur_dictZ(self.db,q_var1)
reg = ThreeSLS(bigy1,bigX1,bigyend1,bigq1)
dict_compare(reg.b3SLS,{0: np.array([[ 6.92426353e+00],[ 1.42921826e+00],[ 4.94348442e-04],\
[ 3.58292750e+00]]), 1: np.array([[ 7.62385875],[ 1.65031181],[-0.21682974],[ 3.91250428]])},RTOL)
dict_compare(reg.tsls_inf,{0: np.array([[ 2.32208525e-001, 2.98191616e+001, 2.20522747e-195],\
[ 1.03734166e-001, 1.37777004e+001, 3.47155373e-043],\
[ 3.08619277e-002, 1.60180675e-002, 9.87219978e-001],\
[ 1.11319989e-001, 3.21858412e+001, 2.78527634e-227]]),\
1: np.array([[ 2.87394149e-001, 2.65275364e+001, 4.66554915e-155],\
[ 9.59703138e-002, 1.71960655e+001, 2.84185085e-066],\
[ 4.08954707e-002, -5.30204786e+000, 1.14510807e-007],\
[ 1.35867887e-001, 2.87963872e+001, 2.38043782e-182]])},RTOL)
np.testing.assert_allclose(reg.corr,np.array([[ 1. , 0.26404959],
[ 0.26404959, 1. ]]),RTOL)
np.testing.assert_allclose(reg.surchow,[(4.398001850528483, 1, 0.035981064325265613),\
(3.3042403886525147, 1, 0.069101286634542139),\
(21.712902666281863, 1, 3.1665430446850281e-06),\
(4.4286185200127388, 1, 0.035341101907069621)],RTOL)
def test_3SLS_uneqK(self): # Three equations, unequal K, two endog variables, three instruments
y_var2 = ['HR60','HR70','HR80']
x_var2 = [['RD60','PS60'],['RD70','PS70','MA70'],['RD80','PS80']]
yend_var2 = [['UE60','DV60'],['UE70','DV70'],['UE80','DV80']]
q_var2 = [['FH60','FP59','GI59'],['FH70','FP69','GI69'],['FH80','FP79','GI79']]
bigy2,bigX2,bigyvars2,bigXvars2 = sur_dictxy(self.db,y_var2,x_var2)
bigyend2,bigyendvars2 = sur_dictZ(self.db,yend_var2)
bigq2,bigqvars2 = sur_dictZ(self.db,q_var2)
reg = ThreeSLS(bigy2,bigX2,bigyend2,bigq2,name_bigy=bigyvars2,\
name_bigX=bigXvars2,name_bigyend=bigyendvars2,\
name_bigq=bigqvars2,name_ds="natregimes")
dict_compare(reg.b2SLS,{0: np.array([[-2.04160355],[ 4.5438992 ],[ 1.65007567],[-0.73163458],\
[ 5.43071683]]), 1: np.array([[ 17.26252005],[ 5.17297895],[ 1.2893243 ],[ -0.38349609],\
[ -2.17689289],[ 4.31713382]]), 2: np.array([[-7.6809159 ],[ 3.88957396],[ 0.49973258],\
[ 0.36476446],[ 2.63375234]])},RTOL)
dict_compare(reg.b3SLS,{0: np.array([[-1.56830297],[ 4.07805179],[ 1.49694849],[-0.5376807 ],\
[ 4.65487154]]), 1: np.array([[ 16.13792395],[ 4.97265632],[ 1.31962844],[ -0.32122485],\
[ -2.12407425],[ 3.91227737]]), 2: np.array([[-6.7283657 ],[ 3.79206731],[ 0.52278922],\
[ 0.33447996],[ 2.47158609]])},RTOL)
dict_compare(reg.tsls_inf,{0: np.array([[ 9.95215966e-01, -1.57584185e+00, 1.15062254e-01],\
[ 2.26574971e-01, 1.79986861e+01, 1.99495587e-72],\
[ 1.60939740e-01, 9.30129807e+00, 1.38741353e-20],\
[ 1.19040839e-01, -4.51677511e+00, 6.27885257e-06],\
[ 5.32942876e-01, 8.73427857e+00, 2.45216107e-18]]),\
1: np.array([[ 1.59523920e+000, 1.01163035e+001, 4.67748637e-024],\
[ 1.87013008e-001, 2.65898954e+001, 8.88419907e-156],\
[ 1.44410869e-001, 9.13801331e+000, 6.36101069e-020],\
[ 3.46429228e-002, -9.27245233e+000, 1.81914372e-020],\
[ 2.49627824e-001, -8.50896434e+000, 1.75493796e-017],\
[ 4.19425249e-001, 9.32771068e+000, 1.08182251e-020]]),\
2: np.array([[ 1.09143600e+000, -6.16469102e+000, 7.06208998e-010],\
[ 1.27908896e-001, 2.96466268e+001, 3.74870055e-193],\
[ 1.32436222e-001, 3.94747912e+000, 7.89784041e-005],\
[ 8.81489692e-002, 3.79448524e+000, 1.47950082e-004],\
[ 1.95538678e-001, 1.26398834e+001, 1.27242486e-036]])},RTOL)
np.testing.assert_allclose(reg.corr,np.array([[ 1. , 0.31819323, 0.20428789],\
[ 0.31819323, 1. , 0.12492191],[ 0.20428789, 0.12492191, 1. ]]),RTOL)
if __name__ == '__main__':
unittest.main()
|
jshiv/turntable
|
refs/heads/master
|
test/lib/python2.7/site-packages/pip/_vendor/requests/packages/chardet/euctwfreq.py
|
3132
|
######################## BEGIN LICENSE BLOCK ########################
# The Original Code is Mozilla Communicator client code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 1998
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
# EUCTW frequency table
# Converted from big5 work
# by Taiwan's Mandarin Promotion Council
# <http:#www.edu.tw:81/mandr/>
# 128 --> 0.42261
# 256 --> 0.57851
# 512 --> 0.74851
# 1024 --> 0.89384
# 2048 --> 0.97583
#
# Idea Distribution Ratio = 0.74851/(1-0.74851) =2.98
# Random Distribution Ration = 512/(5401-512)=0.105
#
# Typical Distribution Ratio about 25% of Ideal one, still much higher than RDR
EUCTW_TYPICAL_DISTRIBUTION_RATIO = 0.75
# Char to FreqOrder table ,
EUCTW_TABLE_SIZE = 8102
EUCTWCharToFreqOrder = (
1,1800,1506, 255,1431, 198, 9, 82, 6,7310, 177, 202,3615,1256,2808, 110, # 2742
3735, 33,3241, 261, 76, 44,2113, 16,2931,2184,1176, 659,3868, 26,3404,2643, # 2758
1198,3869,3313,4060, 410,2211, 302, 590, 361,1963, 8, 204, 58,4296,7311,1931, # 2774
63,7312,7313, 317,1614, 75, 222, 159,4061,2412,1480,7314,3500,3068, 224,2809, # 2790
3616, 3, 10,3870,1471, 29,2774,1135,2852,1939, 873, 130,3242,1123, 312,7315, # 2806
4297,2051, 507, 252, 682,7316, 142,1914, 124, 206,2932, 34,3501,3173, 64, 604, # 2822
7317,2494,1976,1977, 155,1990, 645, 641,1606,7318,3405, 337, 72, 406,7319, 80, # 2838
630, 238,3174,1509, 263, 939,1092,2644, 756,1440,1094,3406, 449, 69,2969, 591, # 2854
179,2095, 471, 115,2034,1843, 60, 50,2970, 134, 806,1868, 734,2035,3407, 180, # 2870
995,1607, 156, 537,2893, 688,7320, 319,1305, 779,2144, 514,2374, 298,4298, 359, # 2886
2495, 90,2707,1338, 663, 11, 906,1099,2545, 20,2436, 182, 532,1716,7321, 732, # 2902
1376,4062,1311,1420,3175, 25,2312,1056, 113, 399, 382,1949, 242,3408,2467, 529, # 2918
3243, 475,1447,3617,7322, 117, 21, 656, 810,1297,2295,2329,3502,7323, 126,4063, # 2934
706, 456, 150, 613,4299, 71,1118,2036,4064, 145,3069, 85, 835, 486,2114,1246, # 2950
1426, 428, 727,1285,1015, 800, 106, 623, 303,1281,7324,2127,2354, 347,3736, 221, # 2966
3503,3110,7325,1955,1153,4065, 83, 296,1199,3070, 192, 624, 93,7326, 822,1897, # 2982
2810,3111, 795,2064, 991,1554,1542,1592, 27, 43,2853, 859, 139,1456, 860,4300, # 2998
437, 712,3871, 164,2392,3112, 695, 211,3017,2096, 195,3872,1608,3504,3505,3618, # 3014
3873, 234, 811,2971,2097,3874,2229,1441,3506,1615,2375, 668,2076,1638, 305, 228, # 3030
1664,4301, 467, 415,7327, 262,2098,1593, 239, 108, 300, 200,1033, 512,1247,2077, # 3046
7328,7329,2173,3176,3619,2673, 593, 845,1062,3244, 88,1723,2037,3875,1950, 212, # 3062
266, 152, 149, 468,1898,4066,4302, 77, 187,7330,3018, 37, 5,2972,7331,3876, # 3078
7332,7333, 39,2517,4303,2894,3177,2078, 55, 148, 74,4304, 545, 483,1474,1029, # 3094
1665, 217,1869,1531,3113,1104,2645,4067, 24, 172,3507, 900,3877,3508,3509,4305, # 3110
32,1408,2811,1312, 329, 487,2355,2247,2708, 784,2674, 4,3019,3314,1427,1788, # 3126
188, 109, 499,7334,3620,1717,1789, 888,1217,3020,4306,7335,3510,7336,3315,1520, # 3142
3621,3878, 196,1034, 775,7337,7338, 929,1815, 249, 439, 38,7339,1063,7340, 794, # 3158
3879,1435,2296, 46, 178,3245,2065,7341,2376,7342, 214,1709,4307, 804, 35, 707, # 3174
324,3622,1601,2546, 140, 459,4068,7343,7344,1365, 839, 272, 978,2257,2572,3409, # 3190
2128,1363,3623,1423, 697, 100,3071, 48, 70,1231, 495,3114,2193,7345,1294,7346, # 3206
2079, 462, 586,1042,3246, 853, 256, 988, 185,2377,3410,1698, 434,1084,7347,3411, # 3222
314,2615,2775,4308,2330,2331, 569,2280, 637,1816,2518, 757,1162,1878,1616,3412, # 3238
287,1577,2115, 768,4309,1671,2854,3511,2519,1321,3737, 909,2413,7348,4069, 933, # 3254
3738,7349,2052,2356,1222,4310, 765,2414,1322, 786,4311,7350,1919,1462,1677,2895, # 3270
1699,7351,4312,1424,2437,3115,3624,2590,3316,1774,1940,3413,3880,4070, 309,1369, # 3286
1130,2812, 364,2230,1653,1299,3881,3512,3882,3883,2646, 525,1085,3021, 902,2000, # 3302
1475, 964,4313, 421,1844,1415,1057,2281, 940,1364,3116, 376,4314,4315,1381, 7, # 3318
2520, 983,2378, 336,1710,2675,1845, 321,3414, 559,1131,3022,2742,1808,1132,1313, # 3334
265,1481,1857,7352, 352,1203,2813,3247, 167,1089, 420,2814, 776, 792,1724,3513, # 3350
4071,2438,3248,7353,4072,7354, 446, 229, 333,2743, 901,3739,1200,1557,4316,2647, # 3366
1920, 395,2744,2676,3740,4073,1835, 125, 916,3178,2616,4317,7355,7356,3741,7357, # 3382
7358,7359,4318,3117,3625,1133,2547,1757,3415,1510,2313,1409,3514,7360,2145, 438, # 3398
2591,2896,2379,3317,1068, 958,3023, 461, 311,2855,2677,4074,1915,3179,4075,1978, # 3414
383, 750,2745,2617,4076, 274, 539, 385,1278,1442,7361,1154,1964, 384, 561, 210, # 3430
98,1295,2548,3515,7362,1711,2415,1482,3416,3884,2897,1257, 129,7363,3742, 642, # 3446
523,2776,2777,2648,7364, 141,2231,1333, 68, 176, 441, 876, 907,4077, 603,2592, # 3462
710, 171,3417, 404, 549, 18,3118,2393,1410,3626,1666,7365,3516,4319,2898,4320, # 3478
7366,2973, 368,7367, 146, 366, 99, 871,3627,1543, 748, 807,1586,1185, 22,2258, # 3494
379,3743,3180,7368,3181, 505,1941,2618,1991,1382,2314,7369, 380,2357, 218, 702, # 3510
1817,1248,3418,3024,3517,3318,3249,7370,2974,3628, 930,3250,3744,7371, 59,7372, # 3526
585, 601,4078, 497,3419,1112,1314,4321,1801,7373,1223,1472,2174,7374, 749,1836, # 3542
690,1899,3745,1772,3885,1476, 429,1043,1790,2232,2116, 917,4079, 447,1086,1629, # 3558
7375, 556,7376,7377,2020,1654, 844,1090, 105, 550, 966,1758,2815,1008,1782, 686, # 3574
1095,7378,2282, 793,1602,7379,3518,2593,4322,4080,2933,2297,4323,3746, 980,2496, # 3590
544, 353, 527,4324, 908,2678,2899,7380, 381,2619,1942,1348,7381,1341,1252, 560, # 3606
3072,7382,3420,2856,7383,2053, 973, 886,2080, 143,4325,7384,7385, 157,3886, 496, # 3622
4081, 57, 840, 540,2038,4326,4327,3421,2117,1445, 970,2259,1748,1965,2081,4082, # 3638
3119,1234,1775,3251,2816,3629, 773,1206,2129,1066,2039,1326,3887,1738,1725,4083, # 3654
279,3120, 51,1544,2594, 423,1578,2130,2066, 173,4328,1879,7386,7387,1583, 264, # 3670
610,3630,4329,2439, 280, 154,7388,7389,7390,1739, 338,1282,3073, 693,2857,1411, # 3686
1074,3747,2440,7391,4330,7392,7393,1240, 952,2394,7394,2900,1538,2679, 685,1483, # 3702
4084,2468,1436, 953,4085,2054,4331, 671,2395, 79,4086,2441,3252, 608, 567,2680, # 3718
3422,4087,4088,1691, 393,1261,1791,2396,7395,4332,7396,7397,7398,7399,1383,1672, # 3734
3748,3182,1464, 522,1119, 661,1150, 216, 675,4333,3888,1432,3519, 609,4334,2681, # 3750
2397,7400,7401,7402,4089,3025, 0,7403,2469, 315, 231,2442, 301,3319,4335,2380, # 3766
7404, 233,4090,3631,1818,4336,4337,7405, 96,1776,1315,2082,7406, 257,7407,1809, # 3782
3632,2709,1139,1819,4091,2021,1124,2163,2778,1777,2649,7408,3074, 363,1655,3183, # 3798
7409,2975,7410,7411,7412,3889,1567,3890, 718, 103,3184, 849,1443, 341,3320,2934, # 3814
1484,7413,1712, 127, 67, 339,4092,2398, 679,1412, 821,7414,7415, 834, 738, 351, # 3830
2976,2146, 846, 235,1497,1880, 418,1992,3749,2710, 186,1100,2147,2746,3520,1545, # 3846
1355,2935,2858,1377, 583,3891,4093,2573,2977,7416,1298,3633,1078,2549,3634,2358, # 3862
78,3750,3751, 267,1289,2099,2001,1594,4094, 348, 369,1274,2194,2175,1837,4338, # 3878
1820,2817,3635,2747,2283,2002,4339,2936,2748, 144,3321, 882,4340,3892,2749,3423, # 3894
4341,2901,7417,4095,1726, 320,7418,3893,3026, 788,2978,7419,2818,1773,1327,2859, # 3910
3894,2819,7420,1306,4342,2003,1700,3752,3521,2359,2650, 787,2022, 506, 824,3636, # 3926
534, 323,4343,1044,3322,2023,1900, 946,3424,7421,1778,1500,1678,7422,1881,4344, # 3942
165, 243,4345,3637,2521, 123, 683,4096, 764,4346, 36,3895,1792, 589,2902, 816, # 3958
626,1667,3027,2233,1639,1555,1622,3753,3896,7423,3897,2860,1370,1228,1932, 891, # 3974
2083,2903, 304,4097,7424, 292,2979,2711,3522, 691,2100,4098,1115,4347, 118, 662, # 3990
7425, 611,1156, 854,2381,1316,2861, 2, 386, 515,2904,7426,7427,3253, 868,2234, # 4006
1486, 855,2651, 785,2212,3028,7428,1040,3185,3523,7429,3121, 448,7430,1525,7431, # 4022
2164,4348,7432,3754,7433,4099,2820,3524,3122, 503, 818,3898,3123,1568, 814, 676, # 4038
1444, 306,1749,7434,3755,1416,1030, 197,1428, 805,2821,1501,4349,7435,7436,7437, # 4054
1993,7438,4350,7439,7440,2195, 13,2779,3638,2980,3124,1229,1916,7441,3756,2131, # 4070
7442,4100,4351,2399,3525,7443,2213,1511,1727,1120,7444,7445, 646,3757,2443, 307, # 4086
7446,7447,1595,3186,7448,7449,7450,3639,1113,1356,3899,1465,2522,2523,7451, 519, # 4102
7452, 128,2132, 92,2284,1979,7453,3900,1512, 342,3125,2196,7454,2780,2214,1980, # 4118
3323,7455, 290,1656,1317, 789, 827,2360,7456,3758,4352, 562, 581,3901,7457, 401, # 4134
4353,2248, 94,4354,1399,2781,7458,1463,2024,4355,3187,1943,7459, 828,1105,4101, # 4150
1262,1394,7460,4102, 605,4356,7461,1783,2862,7462,2822, 819,2101, 578,2197,2937, # 4166
7463,1502, 436,3254,4103,3255,2823,3902,2905,3425,3426,7464,2712,2315,7465,7466, # 4182
2332,2067, 23,4357, 193, 826,3759,2102, 699,1630,4104,3075, 390,1793,1064,3526, # 4198
7467,1579,3076,3077,1400,7468,4105,1838,1640,2863,7469,4358,4359, 137,4106, 598, # 4214
3078,1966, 780, 104, 974,2938,7470, 278, 899, 253, 402, 572, 504, 493,1339,7471, # 4230
3903,1275,4360,2574,2550,7472,3640,3029,3079,2249, 565,1334,2713, 863, 41,7473, # 4246
7474,4361,7475,1657,2333, 19, 463,2750,4107, 606,7476,2981,3256,1087,2084,1323, # 4262
2652,2982,7477,1631,1623,1750,4108,2682,7478,2864, 791,2714,2653,2334, 232,2416, # 4278
7479,2983,1498,7480,2654,2620, 755,1366,3641,3257,3126,2025,1609, 119,1917,3427, # 4294
862,1026,4109,7481,3904,3760,4362,3905,4363,2260,1951,2470,7482,1125, 817,4110, # 4310
4111,3906,1513,1766,2040,1487,4112,3030,3258,2824,3761,3127,7483,7484,1507,7485, # 4326
2683, 733, 40,1632,1106,2865, 345,4113, 841,2524, 230,4364,2984,1846,3259,3428, # 4342
7486,1263, 986,3429,7487, 735, 879, 254,1137, 857, 622,1300,1180,1388,1562,3907, # 4358
3908,2939, 967,2751,2655,1349, 592,2133,1692,3324,2985,1994,4114,1679,3909,1901, # 4374
2185,7488, 739,3642,2715,1296,1290,7489,4115,2198,2199,1921,1563,2595,2551,1870, # 4390
2752,2986,7490, 435,7491, 343,1108, 596, 17,1751,4365,2235,3430,3643,7492,4366, # 4406
294,3527,2940,1693, 477, 979, 281,2041,3528, 643,2042,3644,2621,2782,2261,1031, # 4422
2335,2134,2298,3529,4367, 367,1249,2552,7493,3530,7494,4368,1283,3325,2004, 240, # 4438
1762,3326,4369,4370, 836,1069,3128, 474,7495,2148,2525, 268,3531,7496,3188,1521, # 4454
1284,7497,1658,1546,4116,7498,3532,3533,7499,4117,3327,2684,1685,4118, 961,1673, # 4470
2622, 190,2005,2200,3762,4371,4372,7500, 570,2497,3645,1490,7501,4373,2623,3260, # 4486
1956,4374, 584,1514, 396,1045,1944,7502,4375,1967,2444,7503,7504,4376,3910, 619, # 4502
7505,3129,3261, 215,2006,2783,2553,3189,4377,3190,4378, 763,4119,3763,4379,7506, # 4518
7507,1957,1767,2941,3328,3646,1174, 452,1477,4380,3329,3130,7508,2825,1253,2382, # 4534
2186,1091,2285,4120, 492,7509, 638,1169,1824,2135,1752,3911, 648, 926,1021,1324, # 4550
4381, 520,4382, 997, 847,1007, 892,4383,3764,2262,1871,3647,7510,2400,1784,4384, # 4566
1952,2942,3080,3191,1728,4121,2043,3648,4385,2007,1701,3131,1551, 30,2263,4122, # 4582
7511,2026,4386,3534,7512, 501,7513,4123, 594,3431,2165,1821,3535,3432,3536,3192, # 4598
829,2826,4124,7514,1680,3132,1225,4125,7515,3262,4387,4126,3133,2336,7516,4388, # 4614
4127,7517,3912,3913,7518,1847,2383,2596,3330,7519,4389, 374,3914, 652,4128,4129, # 4630
375,1140, 798,7520,7521,7522,2361,4390,2264, 546,1659, 138,3031,2445,4391,7523, # 4646
2250, 612,1848, 910, 796,3765,1740,1371, 825,3766,3767,7524,2906,2554,7525, 692, # 4662
444,3032,2624, 801,4392,4130,7526,1491, 244,1053,3033,4131,4132, 340,7527,3915, # 4678
1041,2987, 293,1168, 87,1357,7528,1539, 959,7529,2236, 721, 694,4133,3768, 219, # 4694
1478, 644,1417,3331,2656,1413,1401,1335,1389,3916,7530,7531,2988,2362,3134,1825, # 4710
730,1515, 184,2827, 66,4393,7532,1660,2943, 246,3332, 378,1457, 226,3433, 975, # 4726
3917,2944,1264,3537, 674, 696,7533, 163,7534,1141,2417,2166, 713,3538,3333,4394, # 4742
3918,7535,7536,1186, 15,7537,1079,1070,7538,1522,3193,3539, 276,1050,2716, 758, # 4758
1126, 653,2945,3263,7539,2337, 889,3540,3919,3081,2989, 903,1250,4395,3920,3434, # 4774
3541,1342,1681,1718, 766,3264, 286, 89,2946,3649,7540,1713,7541,2597,3334,2990, # 4790
7542,2947,2215,3194,2866,7543,4396,2498,2526, 181, 387,1075,3921, 731,2187,3335, # 4806
7544,3265, 310, 313,3435,2299, 770,4134, 54,3034, 189,4397,3082,3769,3922,7545, # 4822
1230,1617,1849, 355,3542,4135,4398,3336, 111,4136,3650,1350,3135,3436,3035,4137, # 4838
2149,3266,3543,7546,2784,3923,3924,2991, 722,2008,7547,1071, 247,1207,2338,2471, # 4854
1378,4399,2009, 864,1437,1214,4400, 373,3770,1142,2216, 667,4401, 442,2753,2555, # 4870
3771,3925,1968,4138,3267,1839, 837, 170,1107, 934,1336,1882,7548,7549,2118,4139, # 4886
2828, 743,1569,7550,4402,4140, 582,2384,1418,3437,7551,1802,7552, 357,1395,1729, # 4902
3651,3268,2418,1564,2237,7553,3083,3772,1633,4403,1114,2085,4141,1532,7554, 482, # 4918
2446,4404,7555,7556,1492, 833,1466,7557,2717,3544,1641,2829,7558,1526,1272,3652, # 4934
4142,1686,1794, 416,2556,1902,1953,1803,7559,3773,2785,3774,1159,2316,7560,2867, # 4950
4405,1610,1584,3036,2419,2754, 443,3269,1163,3136,7561,7562,3926,7563,4143,2499, # 4966
3037,4406,3927,3137,2103,1647,3545,2010,1872,4144,7564,4145, 431,3438,7565, 250, # 4982
97, 81,4146,7566,1648,1850,1558, 160, 848,7567, 866, 740,1694,7568,2201,2830, # 4998
3195,4147,4407,3653,1687, 950,2472, 426, 469,3196,3654,3655,3928,7569,7570,1188, # 5014
424,1995, 861,3546,4148,3775,2202,2685, 168,1235,3547,4149,7571,2086,1674,4408, # 5030
3337,3270, 220,2557,1009,7572,3776, 670,2992, 332,1208, 717,7573,7574,3548,2447, # 5046
3929,3338,7575, 513,7576,1209,2868,3339,3138,4409,1080,7577,7578,7579,7580,2527, # 5062
3656,3549, 815,1587,3930,3931,7581,3550,3439,3777,1254,4410,1328,3038,1390,3932, # 5078
1741,3933,3778,3934,7582, 236,3779,2448,3271,7583,7584,3657,3780,1273,3781,4411, # 5094
7585, 308,7586,4412, 245,4413,1851,2473,1307,2575, 430, 715,2136,2449,7587, 270, # 5110
199,2869,3935,7588,3551,2718,1753, 761,1754, 725,1661,1840,4414,3440,3658,7589, # 5126
7590, 587, 14,3272, 227,2598, 326, 480,2265, 943,2755,3552, 291, 650,1883,7591, # 5142
1702,1226, 102,1547, 62,3441, 904,4415,3442,1164,4150,7592,7593,1224,1548,2756, # 5158
391, 498,1493,7594,1386,1419,7595,2055,1177,4416, 813, 880,1081,2363, 566,1145, # 5174
4417,2286,1001,1035,2558,2599,2238, 394,1286,7596,7597,2068,7598, 86,1494,1730, # 5190
3936, 491,1588, 745, 897,2948, 843,3340,3937,2757,2870,3273,1768, 998,2217,2069, # 5206
397,1826,1195,1969,3659,2993,3341, 284,7599,3782,2500,2137,2119,1903,7600,3938, # 5222
2150,3939,4151,1036,3443,1904, 114,2559,4152, 209,1527,7601,7602,2949,2831,2625, # 5238
2385,2719,3139, 812,2560,7603,3274,7604,1559, 737,1884,3660,1210, 885, 28,2686, # 5254
3553,3783,7605,4153,1004,1779,4418,7606, 346,1981,2218,2687,4419,3784,1742, 797, # 5270
1642,3940,1933,1072,1384,2151, 896,3941,3275,3661,3197,2871,3554,7607,2561,1958, # 5286
4420,2450,1785,7608,7609,7610,3942,4154,1005,1308,3662,4155,2720,4421,4422,1528, # 5302
2600, 161,1178,4156,1982, 987,4423,1101,4157, 631,3943,1157,3198,2420,1343,1241, # 5318
1016,2239,2562, 372, 877,2339,2501,1160, 555,1934, 911,3944,7611, 466,1170, 169, # 5334
1051,2907,2688,3663,2474,2994,1182,2011,2563,1251,2626,7612, 992,2340,3444,1540, # 5350
2721,1201,2070,2401,1996,2475,7613,4424, 528,1922,2188,1503,1873,1570,2364,3342, # 5366
3276,7614, 557,1073,7615,1827,3445,2087,2266,3140,3039,3084, 767,3085,2786,4425, # 5382
1006,4158,4426,2341,1267,2176,3664,3199, 778,3945,3200,2722,1597,2657,7616,4427, # 5398
7617,3446,7618,7619,7620,3277,2689,1433,3278, 131, 95,1504,3946, 723,4159,3141, # 5414
1841,3555,2758,2189,3947,2027,2104,3665,7621,2995,3948,1218,7622,3343,3201,3949, # 5430
4160,2576, 248,1634,3785, 912,7623,2832,3666,3040,3786, 654, 53,7624,2996,7625, # 5446
1688,4428, 777,3447,1032,3950,1425,7626, 191, 820,2120,2833, 971,4429, 931,3202, # 5462
135, 664, 783,3787,1997, 772,2908,1935,3951,3788,4430,2909,3203, 282,2723, 640, # 5478
1372,3448,1127, 922, 325,3344,7627,7628, 711,2044,7629,7630,3952,2219,2787,1936, # 5494
3953,3345,2220,2251,3789,2300,7631,4431,3790,1258,3279,3954,3204,2138,2950,3955, # 5510
3956,7632,2221, 258,3205,4432, 101,1227,7633,3280,1755,7634,1391,3281,7635,2910, # 5526
2056, 893,7636,7637,7638,1402,4161,2342,7639,7640,3206,3556,7641,7642, 878,1325, # 5542
1780,2788,4433, 259,1385,2577, 744,1183,2267,4434,7643,3957,2502,7644, 684,1024, # 5558
4162,7645, 472,3557,3449,1165,3282,3958,3959, 322,2152, 881, 455,1695,1152,1340, # 5574
660, 554,2153,4435,1058,4436,4163, 830,1065,3346,3960,4437,1923,7646,1703,1918, # 5590
7647, 932,2268, 122,7648,4438, 947, 677,7649,3791,2627, 297,1905,1924,2269,4439, # 5606
2317,3283,7650,7651,4164,7652,4165, 84,4166, 112, 989,7653, 547,1059,3961, 701, # 5622
3558,1019,7654,4167,7655,3450, 942, 639, 457,2301,2451, 993,2951, 407, 851, 494, # 5638
4440,3347, 927,7656,1237,7657,2421,3348, 573,4168, 680, 921,2911,1279,1874, 285, # 5654
790,1448,1983, 719,2167,7658,7659,4441,3962,3963,1649,7660,1541, 563,7661,1077, # 5670
7662,3349,3041,3451, 511,2997,3964,3965,3667,3966,1268,2564,3350,3207,4442,4443, # 5686
7663, 535,1048,1276,1189,2912,2028,3142,1438,1373,2834,2952,1134,2012,7664,4169, # 5702
1238,2578,3086,1259,7665, 700,7666,2953,3143,3668,4170,7667,4171,1146,1875,1906, # 5718
4444,2601,3967, 781,2422, 132,1589, 203, 147, 273,2789,2402, 898,1786,2154,3968, # 5734
3969,7668,3792,2790,7669,7670,4445,4446,7671,3208,7672,1635,3793, 965,7673,1804, # 5750
2690,1516,3559,1121,1082,1329,3284,3970,1449,3794, 65,1128,2835,2913,2759,1590, # 5766
3795,7674,7675, 12,2658, 45, 976,2579,3144,4447, 517,2528,1013,1037,3209,7676, # 5782
3796,2836,7677,3797,7678,3452,7679,2602, 614,1998,2318,3798,3087,2724,2628,7680, # 5798
2580,4172, 599,1269,7681,1810,3669,7682,2691,3088, 759,1060, 489,1805,3351,3285, # 5814
1358,7683,7684,2386,1387,1215,2629,2252, 490,7685,7686,4173,1759,2387,2343,7687, # 5830
4448,3799,1907,3971,2630,1806,3210,4449,3453,3286,2760,2344, 874,7688,7689,3454, # 5846
3670,1858, 91,2914,3671,3042,3800,4450,7690,3145,3972,2659,7691,3455,1202,1403, # 5862
3801,2954,2529,1517,2503,4451,3456,2504,7692,4452,7693,2692,1885,1495,1731,3973, # 5878
2365,4453,7694,2029,7695,7696,3974,2693,1216, 237,2581,4174,2319,3975,3802,4454, # 5894
4455,2694,3560,3457, 445,4456,7697,7698,7699,7700,2761, 61,3976,3672,1822,3977, # 5910
7701, 687,2045, 935, 925, 405,2660, 703,1096,1859,2725,4457,3978,1876,1367,2695, # 5926
3352, 918,2105,1781,2476, 334,3287,1611,1093,4458, 564,3146,3458,3673,3353, 945, # 5942
2631,2057,4459,7702,1925, 872,4175,7703,3459,2696,3089, 349,4176,3674,3979,4460, # 5958
3803,4177,3675,2155,3980,4461,4462,4178,4463,2403,2046, 782,3981, 400, 251,4179, # 5974
1624,7704,7705, 277,3676, 299,1265, 476,1191,3804,2121,4180,4181,1109, 205,7706, # 5990
2582,1000,2156,3561,1860,7707,7708,7709,4464,7710,4465,2565, 107,2477,2157,3982, # 6006
3460,3147,7711,1533, 541,1301, 158, 753,4182,2872,3562,7712,1696, 370,1088,4183, # 6022
4466,3563, 579, 327, 440, 162,2240, 269,1937,1374,3461, 968,3043, 56,1396,3090, # 6038
2106,3288,3354,7713,1926,2158,4467,2998,7714,3564,7715,7716,3677,4468,2478,7717, # 6054
2791,7718,1650,4469,7719,2603,7720,7721,3983,2661,3355,1149,3356,3984,3805,3985, # 6070
7722,1076, 49,7723, 951,3211,3289,3290, 450,2837, 920,7724,1811,2792,2366,4184, # 6086
1908,1138,2367,3806,3462,7725,3212,4470,1909,1147,1518,2423,4471,3807,7726,4472, # 6102
2388,2604, 260,1795,3213,7727,7728,3808,3291, 708,7729,3565,1704,7730,3566,1351, # 6118
1618,3357,2999,1886, 944,4185,3358,4186,3044,3359,4187,7731,3678, 422, 413,1714, # 6134
3292, 500,2058,2345,4188,2479,7732,1344,1910, 954,7733,1668,7734,7735,3986,2404, # 6150
4189,3567,3809,4190,7736,2302,1318,2505,3091, 133,3092,2873,4473, 629, 31,2838, # 6166
2697,3810,4474, 850, 949,4475,3987,2955,1732,2088,4191,1496,1852,7737,3988, 620, # 6182
3214, 981,1242,3679,3360,1619,3680,1643,3293,2139,2452,1970,1719,3463,2168,7738, # 6198
3215,7739,7740,3361,1828,7741,1277,4476,1565,2047,7742,1636,3568,3093,7743, 869, # 6214
2839, 655,3811,3812,3094,3989,3000,3813,1310,3569,4477,7744,7745,7746,1733, 558, # 6230
4478,3681, 335,1549,3045,1756,4192,3682,1945,3464,1829,1291,1192, 470,2726,2107, # 6246
2793, 913,1054,3990,7747,1027,7748,3046,3991,4479, 982,2662,3362,3148,3465,3216, # 6262
3217,1946,2794,7749, 571,4480,7750,1830,7751,3570,2583,1523,2424,7752,2089, 984, # 6278
4481,3683,1959,7753,3684, 852, 923,2795,3466,3685, 969,1519, 999,2048,2320,1705, # 6294
7754,3095, 615,1662, 151, 597,3992,2405,2321,1049, 275,4482,3686,4193, 568,3687, # 6310
3571,2480,4194,3688,7755,2425,2270, 409,3218,7756,1566,2874,3467,1002, 769,2840, # 6326
194,2090,3149,3689,2222,3294,4195, 628,1505,7757,7758,1763,2177,3001,3993, 521, # 6342
1161,2584,1787,2203,2406,4483,3994,1625,4196,4197, 412, 42,3096, 464,7759,2632, # 6358
4484,3363,1760,1571,2875,3468,2530,1219,2204,3814,2633,2140,2368,4485,4486,3295, # 6374
1651,3364,3572,7760,7761,3573,2481,3469,7762,3690,7763,7764,2271,2091, 460,7765, # 6390
4487,7766,3002, 962, 588,3574, 289,3219,2634,1116, 52,7767,3047,1796,7768,7769, # 6406
7770,1467,7771,1598,1143,3691,4198,1984,1734,1067,4488,1280,3365, 465,4489,1572, # 6422
510,7772,1927,2241,1812,1644,3575,7773,4490,3692,7774,7775,2663,1573,1534,7776, # 6438
7777,4199, 536,1807,1761,3470,3815,3150,2635,7778,7779,7780,4491,3471,2915,1911, # 6454
2796,7781,3296,1122, 377,3220,7782, 360,7783,7784,4200,1529, 551,7785,2059,3693, # 6470
1769,2426,7786,2916,4201,3297,3097,2322,2108,2030,4492,1404, 136,1468,1479, 672, # 6486
1171,3221,2303, 271,3151,7787,2762,7788,2049, 678,2727, 865,1947,4493,7789,2013, # 6502
3995,2956,7790,2728,2223,1397,3048,3694,4494,4495,1735,2917,3366,3576,7791,3816, # 6518
509,2841,2453,2876,3817,7792,7793,3152,3153,4496,4202,2531,4497,2304,1166,1010, # 6534
552, 681,1887,7794,7795,2957,2958,3996,1287,1596,1861,3154, 358, 453, 736, 175, # 6550
478,1117, 905,1167,1097,7796,1853,1530,7797,1706,7798,2178,3472,2287,3695,3473, # 6566
3577,4203,2092,4204,7799,3367,1193,2482,4205,1458,2190,2205,1862,1888,1421,3298, # 6582
2918,3049,2179,3474, 595,2122,7800,3997,7801,7802,4206,1707,2636, 223,3696,1359, # 6598
751,3098, 183,3475,7803,2797,3003, 419,2369, 633, 704,3818,2389, 241,7804,7805, # 6614
7806, 838,3004,3697,2272,2763,2454,3819,1938,2050,3998,1309,3099,2242,1181,7807, # 6630
1136,2206,3820,2370,1446,4207,2305,4498,7808,7809,4208,1055,2605, 484,3698,7810, # 6646
3999, 625,4209,2273,3368,1499,4210,4000,7811,4001,4211,3222,2274,2275,3476,7812, # 6662
7813,2764, 808,2606,3699,3369,4002,4212,3100,2532, 526,3370,3821,4213, 955,7814, # 6678
1620,4214,2637,2427,7815,1429,3700,1669,1831, 994, 928,7816,3578,1260,7817,7818, # 6694
7819,1948,2288, 741,2919,1626,4215,2729,2455, 867,1184, 362,3371,1392,7820,7821, # 6710
4003,4216,1770,1736,3223,2920,4499,4500,1928,2698,1459,1158,7822,3050,3372,2877, # 6726
1292,1929,2506,2842,3701,1985,1187,2071,2014,2607,4217,7823,2566,2507,2169,3702, # 6742
2483,3299,7824,3703,4501,7825,7826, 666,1003,3005,1022,3579,4218,7827,4502,1813, # 6758
2253, 574,3822,1603, 295,1535, 705,3823,4219, 283, 858, 417,7828,7829,3224,4503, # 6774
4504,3051,1220,1889,1046,2276,2456,4004,1393,1599, 689,2567, 388,4220,7830,2484, # 6790
802,7831,2798,3824,2060,1405,2254,7832,4505,3825,2109,1052,1345,3225,1585,7833, # 6806
809,7834,7835,7836, 575,2730,3477, 956,1552,1469,1144,2323,7837,2324,1560,2457, # 6822
3580,3226,4005, 616,2207,3155,2180,2289,7838,1832,7839,3478,4506,7840,1319,3704, # 6838
3705,1211,3581,1023,3227,1293,2799,7841,7842,7843,3826, 607,2306,3827, 762,2878, # 6854
1439,4221,1360,7844,1485,3052,7845,4507,1038,4222,1450,2061,2638,4223,1379,4508, # 6870
2585,7846,7847,4224,1352,1414,2325,2921,1172,7848,7849,3828,3829,7850,1797,1451, # 6886
7851,7852,7853,7854,2922,4006,4007,2485,2346, 411,4008,4009,3582,3300,3101,4509, # 6902
1561,2664,1452,4010,1375,7855,7856, 47,2959, 316,7857,1406,1591,2923,3156,7858, # 6918
1025,2141,3102,3157, 354,2731, 884,2224,4225,2407, 508,3706, 726,3583, 996,2428, # 6934
3584, 729,7859, 392,2191,1453,4011,4510,3707,7860,7861,2458,3585,2608,1675,2800, # 6950
919,2347,2960,2348,1270,4511,4012, 73,7862,7863, 647,7864,3228,2843,2255,1550, # 6966
1346,3006,7865,1332, 883,3479,7866,7867,7868,7869,3301,2765,7870,1212, 831,1347, # 6982
4226,4512,2326,3830,1863,3053, 720,3831,4513,4514,3832,7871,4227,7872,7873,4515, # 6998
7874,7875,1798,4516,3708,2609,4517,3586,1645,2371,7876,7877,2924, 669,2208,2665, # 7014
2429,7878,2879,7879,7880,1028,3229,7881,4228,2408,7882,2256,1353,7883,7884,4518, # 7030
3158, 518,7885,4013,7886,4229,1960,7887,2142,4230,7888,7889,3007,2349,2350,3833, # 7046
516,1833,1454,4014,2699,4231,4519,2225,2610,1971,1129,3587,7890,2766,7891,2961, # 7062
1422, 577,1470,3008,1524,3373,7892,7893, 432,4232,3054,3480,7894,2586,1455,2508, # 7078
2226,1972,1175,7895,1020,2732,4015,3481,4520,7896,2733,7897,1743,1361,3055,3482, # 7094
2639,4016,4233,4521,2290, 895, 924,4234,2170, 331,2243,3056, 166,1627,3057,1098, # 7110
7898,1232,2880,2227,3374,4522, 657, 403,1196,2372, 542,3709,3375,1600,4235,3483, # 7126
7899,4523,2767,3230, 576, 530,1362,7900,4524,2533,2666,3710,4017,7901, 842,3834, # 7142
7902,2801,2031,1014,4018, 213,2700,3376, 665, 621,4236,7903,3711,2925,2430,7904, # 7158
2431,3302,3588,3377,7905,4237,2534,4238,4525,3589,1682,4239,3484,1380,7906, 724, # 7174
2277, 600,1670,7907,1337,1233,4526,3103,2244,7908,1621,4527,7909, 651,4240,7910, # 7190
1612,4241,2611,7911,2844,7912,2734,2307,3058,7913, 716,2459,3059, 174,1255,2701, # 7206
4019,3590, 548,1320,1398, 728,4020,1574,7914,1890,1197,3060,4021,7915,3061,3062, # 7222
3712,3591,3713, 747,7916, 635,4242,4528,7917,7918,7919,4243,7920,7921,4529,7922, # 7238
3378,4530,2432, 451,7923,3714,2535,2072,4244,2735,4245,4022,7924,1764,4531,7925, # 7254
4246, 350,7926,2278,2390,2486,7927,4247,4023,2245,1434,4024, 488,4532, 458,4248, # 7270
4025,3715, 771,1330,2391,3835,2568,3159,2159,2409,1553,2667,3160,4249,7928,2487, # 7286
2881,2612,1720,2702,4250,3379,4533,7929,2536,4251,7930,3231,4252,2768,7931,2015, # 7302
2736,7932,1155,1017,3716,3836,7933,3303,2308, 201,1864,4253,1430,7934,4026,7935, # 7318
7936,7937,7938,7939,4254,1604,7940, 414,1865, 371,2587,4534,4535,3485,2016,3104, # 7334
4536,1708, 960,4255, 887, 389,2171,1536,1663,1721,7941,2228,4027,2351,2926,1580, # 7350
7942,7943,7944,1744,7945,2537,4537,4538,7946,4539,7947,2073,7948,7949,3592,3380, # 7366
2882,4256,7950,4257,2640,3381,2802, 673,2703,2460, 709,3486,4028,3593,4258,7951, # 7382
1148, 502, 634,7952,7953,1204,4540,3594,1575,4541,2613,3717,7954,3718,3105, 948, # 7398
3232, 121,1745,3837,1110,7955,4259,3063,2509,3009,4029,3719,1151,1771,3838,1488, # 7414
4030,1986,7956,2433,3487,7957,7958,2093,7959,4260,3839,1213,1407,2803, 531,2737, # 7430
2538,3233,1011,1537,7960,2769,4261,3106,1061,7961,3720,3721,1866,2883,7962,2017, # 7446
120,4262,4263,2062,3595,3234,2309,3840,2668,3382,1954,4542,7963,7964,3488,1047, # 7462
2704,1266,7965,1368,4543,2845, 649,3383,3841,2539,2738,1102,2846,2669,7966,7967, # 7478
1999,7968,1111,3596,2962,7969,2488,3842,3597,2804,1854,3384,3722,7970,7971,3385, # 7494
2410,2884,3304,3235,3598,7972,2569,7973,3599,2805,4031,1460, 856,7974,3600,7975, # 7510
2885,2963,7976,2886,3843,7977,4264, 632,2510, 875,3844,1697,3845,2291,7978,7979, # 7526
4544,3010,1239, 580,4545,4265,7980, 914, 936,2074,1190,4032,1039,2123,7981,7982, # 7542
7983,3386,1473,7984,1354,4266,3846,7985,2172,3064,4033, 915,3305,4267,4268,3306, # 7558
1605,1834,7986,2739, 398,3601,4269,3847,4034, 328,1912,2847,4035,3848,1331,4270, # 7574
3011, 937,4271,7987,3602,4036,4037,3387,2160,4546,3388, 524, 742, 538,3065,1012, # 7590
7988,7989,3849,2461,7990, 658,1103, 225,3850,7991,7992,4547,7993,4548,7994,3236, # 7606
1243,7995,4038, 963,2246,4549,7996,2705,3603,3161,7997,7998,2588,2327,7999,4550, # 7622
8000,8001,8002,3489,3307, 957,3389,2540,2032,1930,2927,2462, 870,2018,3604,1746, # 7638
2770,2771,2434,2463,8003,3851,8004,3723,3107,3724,3490,3390,3725,8005,1179,3066, # 7654
8006,3162,2373,4272,3726,2541,3163,3108,2740,4039,8007,3391,1556,2542,2292, 977, # 7670
2887,2033,4040,1205,3392,8008,1765,3393,3164,2124,1271,1689, 714,4551,3491,8009, # 7686
2328,3852, 533,4273,3605,2181, 617,8010,2464,3308,3492,2310,8011,8012,3165,8013, # 7702
8014,3853,1987, 618, 427,2641,3493,3394,8015,8016,1244,1690,8017,2806,4274,4552, # 7718
8018,3494,8019,8020,2279,1576, 473,3606,4275,3395, 972,8021,3607,8022,3067,8023, # 7734
8024,4553,4554,8025,3727,4041,4042,8026, 153,4555, 356,8027,1891,2888,4276,2143, # 7750
408, 803,2352,8028,3854,8029,4277,1646,2570,2511,4556,4557,3855,8030,3856,4278, # 7766
8031,2411,3396, 752,8032,8033,1961,2964,8034, 746,3012,2465,8035,4279,3728, 698, # 7782
4558,1892,4280,3608,2543,4559,3609,3857,8036,3166,3397,8037,1823,1302,4043,2706, # 7798
3858,1973,4281,8038,4282,3167, 823,1303,1288,1236,2848,3495,4044,3398, 774,3859, # 7814
8039,1581,4560,1304,2849,3860,4561,8040,2435,2161,1083,3237,4283,4045,4284, 344, # 7830
1173, 288,2311, 454,1683,8041,8042,1461,4562,4046,2589,8043,8044,4563, 985, 894, # 7846
8045,3399,3168,8046,1913,2928,3729,1988,8047,2110,1974,8048,4047,8049,2571,1194, # 7862
425,8050,4564,3169,1245,3730,4285,8051,8052,2850,8053, 636,4565,1855,3861, 760, # 7878
1799,8054,4286,2209,1508,4566,4048,1893,1684,2293,8055,8056,8057,4287,4288,2210, # 7894
479,8058,8059, 832,8060,4049,2489,8061,2965,2490,3731, 990,3109, 627,1814,2642, # 7910
4289,1582,4290,2125,2111,3496,4567,8062, 799,4291,3170,8063,4568,2112,1737,3013, # 7926
1018, 543, 754,4292,3309,1676,4569,4570,4050,8064,1489,8065,3497,8066,2614,2889, # 7942
4051,8067,8068,2966,8069,8070,8071,8072,3171,4571,4572,2182,1722,8073,3238,3239, # 7958
1842,3610,1715, 481, 365,1975,1856,8074,8075,1962,2491,4573,8076,2126,3611,3240, # 7974
433,1894,2063,2075,8077, 602,2741,8078,8079,8080,8081,8082,3014,1628,3400,8083, # 7990
3172,4574,4052,2890,4575,2512,8084,2544,2772,8085,8086,8087,3310,4576,2891,8088, # 8006
4577,8089,2851,4578,4579,1221,2967,4053,2513,8090,8091,8092,1867,1989,8093,8094, # 8022
8095,1895,8096,8097,4580,1896,4054, 318,8098,2094,4055,4293,8099,8100, 485,8101, # 8038
938,3862, 553,2670, 116,8102,3863,3612,8103,3498,2671,2773,3401,3311,2807,8104, # 8054
3613,2929,4056,1747,2930,2968,8105,8106, 207,8107,8108,2672,4581,2514,8109,3015, # 8070
890,3614,3864,8110,1877,3732,3402,8111,2183,2353,3403,1652,8112,8113,8114, 941, # 8086
2294, 208,3499,4057,2019, 330,4294,3865,2892,2492,3733,4295,8115,8116,8117,8118, # 8102
#Everything below is of no interest for detection purpose
2515,1613,4582,8119,3312,3866,2516,8120,4058,8121,1637,4059,2466,4583,3867,8122, # 8118
2493,3016,3734,8123,8124,2192,8125,8126,2162,8127,8128,8129,8130,8131,8132,8133, # 8134
8134,8135,8136,8137,8138,8139,8140,8141,8142,8143,8144,8145,8146,8147,8148,8149, # 8150
8150,8151,8152,8153,8154,8155,8156,8157,8158,8159,8160,8161,8162,8163,8164,8165, # 8166
8166,8167,8168,8169,8170,8171,8172,8173,8174,8175,8176,8177,8178,8179,8180,8181, # 8182
8182,8183,8184,8185,8186,8187,8188,8189,8190,8191,8192,8193,8194,8195,8196,8197, # 8198
8198,8199,8200,8201,8202,8203,8204,8205,8206,8207,8208,8209,8210,8211,8212,8213, # 8214
8214,8215,8216,8217,8218,8219,8220,8221,8222,8223,8224,8225,8226,8227,8228,8229, # 8230
8230,8231,8232,8233,8234,8235,8236,8237,8238,8239,8240,8241,8242,8243,8244,8245, # 8246
8246,8247,8248,8249,8250,8251,8252,8253,8254,8255,8256,8257,8258,8259,8260,8261, # 8262
8262,8263,8264,8265,8266,8267,8268,8269,8270,8271,8272,8273,8274,8275,8276,8277, # 8278
8278,8279,8280,8281,8282,8283,8284,8285,8286,8287,8288,8289,8290,8291,8292,8293, # 8294
8294,8295,8296,8297,8298,8299,8300,8301,8302,8303,8304,8305,8306,8307,8308,8309, # 8310
8310,8311,8312,8313,8314,8315,8316,8317,8318,8319,8320,8321,8322,8323,8324,8325, # 8326
8326,8327,8328,8329,8330,8331,8332,8333,8334,8335,8336,8337,8338,8339,8340,8341, # 8342
8342,8343,8344,8345,8346,8347,8348,8349,8350,8351,8352,8353,8354,8355,8356,8357, # 8358
8358,8359,8360,8361,8362,8363,8364,8365,8366,8367,8368,8369,8370,8371,8372,8373, # 8374
8374,8375,8376,8377,8378,8379,8380,8381,8382,8383,8384,8385,8386,8387,8388,8389, # 8390
8390,8391,8392,8393,8394,8395,8396,8397,8398,8399,8400,8401,8402,8403,8404,8405, # 8406
8406,8407,8408,8409,8410,8411,8412,8413,8414,8415,8416,8417,8418,8419,8420,8421, # 8422
8422,8423,8424,8425,8426,8427,8428,8429,8430,8431,8432,8433,8434,8435,8436,8437, # 8438
8438,8439,8440,8441,8442,8443,8444,8445,8446,8447,8448,8449,8450,8451,8452,8453, # 8454
8454,8455,8456,8457,8458,8459,8460,8461,8462,8463,8464,8465,8466,8467,8468,8469, # 8470
8470,8471,8472,8473,8474,8475,8476,8477,8478,8479,8480,8481,8482,8483,8484,8485, # 8486
8486,8487,8488,8489,8490,8491,8492,8493,8494,8495,8496,8497,8498,8499,8500,8501, # 8502
8502,8503,8504,8505,8506,8507,8508,8509,8510,8511,8512,8513,8514,8515,8516,8517, # 8518
8518,8519,8520,8521,8522,8523,8524,8525,8526,8527,8528,8529,8530,8531,8532,8533, # 8534
8534,8535,8536,8537,8538,8539,8540,8541,8542,8543,8544,8545,8546,8547,8548,8549, # 8550
8550,8551,8552,8553,8554,8555,8556,8557,8558,8559,8560,8561,8562,8563,8564,8565, # 8566
8566,8567,8568,8569,8570,8571,8572,8573,8574,8575,8576,8577,8578,8579,8580,8581, # 8582
8582,8583,8584,8585,8586,8587,8588,8589,8590,8591,8592,8593,8594,8595,8596,8597, # 8598
8598,8599,8600,8601,8602,8603,8604,8605,8606,8607,8608,8609,8610,8611,8612,8613, # 8614
8614,8615,8616,8617,8618,8619,8620,8621,8622,8623,8624,8625,8626,8627,8628,8629, # 8630
8630,8631,8632,8633,8634,8635,8636,8637,8638,8639,8640,8641,8642,8643,8644,8645, # 8646
8646,8647,8648,8649,8650,8651,8652,8653,8654,8655,8656,8657,8658,8659,8660,8661, # 8662
8662,8663,8664,8665,8666,8667,8668,8669,8670,8671,8672,8673,8674,8675,8676,8677, # 8678
8678,8679,8680,8681,8682,8683,8684,8685,8686,8687,8688,8689,8690,8691,8692,8693, # 8694
8694,8695,8696,8697,8698,8699,8700,8701,8702,8703,8704,8705,8706,8707,8708,8709, # 8710
8710,8711,8712,8713,8714,8715,8716,8717,8718,8719,8720,8721,8722,8723,8724,8725, # 8726
8726,8727,8728,8729,8730,8731,8732,8733,8734,8735,8736,8737,8738,8739,8740,8741) # 8742
# flake8: noqa
|
edouard-lopez/ansible-modules-core
|
refs/heads/devel
|
cloud/amazon/ec2_metric_alarm.py
|
61
|
#!/usr/bin/python
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
DOCUMENTATION = """
module: ec2_metric_alarm
short_description: "Create/update or delete AWS Cloudwatch 'metric alarms'"
description:
- Can create or delete AWS metric alarms
- Metrics you wish to alarm on must already exist
version_added: "1.6"
author: "Zacharie Eakin (@zeekin)"
options:
state:
description:
- register or deregister the alarm
required: true
choices: ['present', 'absent']
name:
description:
- Unique name for the alarm
required: true
metric:
description:
- Name of the monitored metric (e.g. CPUUtilization)
- Metric must already exist
required: false
namespace:
description:
- Name of the appropriate namespace ('AWS/EC2', 'System/Linux', etc.), which determines the category it will appear under in cloudwatch
required: false
statistic:
description:
- Operation applied to the metric
- Works in conjunction with period and evaluation_periods to determine the comparison value
required: false
options: ['SampleCount','Average','Sum','Minimum','Maximum']
comparison:
description:
- Determines how the threshold value is compared
required: false
options: ['<=','<','>','>=']
threshold:
description:
- Sets the min/max bound for triggering the alarm
required: false
period:
description:
- The time (in seconds) between metric evaluations
required: false
evaluation_periods:
description:
- The number of times in which the metric is evaluated before final calculation
required: false
unit:
description:
- The threshold's unit of measurement
required: false
options: ['Seconds','Microseconds','Milliseconds','Bytes','Kilobytes','Megabytes','Gigabytes','Terabytes','Bits','Kilobits','Megabits','Gigabits','Terabits','Percent','Count','Bytes/Second','Kilobytes/Second','Megabytes/Second','Gigabytes/Second','Terabytes/Second','Bits/Second','Kilobits/Second','Megabits/Second','Gigabits/Second','Terabits/Second','Count/Second','None']
description:
description:
- A longer description of the alarm
required: false
dimensions:
description:
- Describes to what the alarm is applied
required: false
alarm_actions:
description:
- A list of the names action(s) taken when the alarm is in the 'alarm' status
required: false
insufficient_data_actions:
description:
- A list of the names of action(s) to take when the alarm is in the 'insufficient_data' status
required: false
ok_actions:
description:
- A list of the names of action(s) to take when the alarm is in the 'ok' status
required: false
extends_documentation_fragment: aws
"""
EXAMPLES = '''
- name: create alarm
ec2_metric_alarm:
state: present
region: ap-southeast-2
name: "cpu-low"
metric: "CPUUtilization"
namespace: "AWS/EC2"
statistic: Average
comparison: "<="
threshold: 5.0
period: 300
evaluation_periods: 3
unit: "Percent"
description: "This will alarm when a bamboo slave's cpu usage average is lower than 5% for 15 minutes "
dimensions: {'InstanceId':'i-XXX'}
alarm_actions: ["action1","action2"]
'''
import sys
from ansible.module_utils.basic import *
from ansible.module_utils.ec2 import *
try:
import boto.ec2.cloudwatch
from boto.ec2.cloudwatch import CloudWatchConnection, MetricAlarm
from boto.exception import BotoServerError
HAS_BOTO = True
except ImportError:
HAS_BOTO = False
def create_metric_alarm(connection, module):
name = module.params.get('name')
metric = module.params.get('metric')
namespace = module.params.get('namespace')
statistic = module.params.get('statistic')
comparison = module.params.get('comparison')
threshold = module.params.get('threshold')
period = module.params.get('period')
evaluation_periods = module.params.get('evaluation_periods')
unit = module.params.get('unit')
description = module.params.get('description')
dimensions = module.params.get('dimensions')
alarm_actions = module.params.get('alarm_actions')
insufficient_data_actions = module.params.get('insufficient_data_actions')
ok_actions = module.params.get('ok_actions')
alarms = connection.describe_alarms(alarm_names=[name])
if not alarms:
alm = MetricAlarm(
name=name,
metric=metric,
namespace=namespace,
statistic=statistic,
comparison=comparison,
threshold=threshold,
period=period,
evaluation_periods=evaluation_periods,
unit=unit,
description=description,
dimensions=dimensions,
alarm_actions=alarm_actions,
insufficient_data_actions=insufficient_data_actions,
ok_actions=ok_actions
)
try:
connection.create_alarm(alm)
changed = True
alarms = connection.describe_alarms(alarm_names=[name])
except BotoServerError, e:
module.fail_json(msg=str(e))
else:
alarm = alarms[0]
changed = False
for attr in ('comparison','metric','namespace','statistic','threshold','period','evaluation_periods','unit','description'):
if getattr(alarm, attr) != module.params.get(attr):
changed = True
setattr(alarm, attr, module.params.get(attr))
#this is to deal with a current bug where you cannot assign '<=>' to the comparator when modifying an existing alarm
comparison = alarm.comparison
comparisons = {'<=' : 'LessThanOrEqualToThreshold', '<' : 'LessThanThreshold', '>=' : 'GreaterThanOrEqualToThreshold', '>' : 'GreaterThanThreshold'}
alarm.comparison = comparisons[comparison]
dim1 = module.params.get('dimensions', {})
dim2 = alarm.dimensions
for keys in dim1:
if not isinstance(dim1[keys], list):
dim1[keys] = [dim1[keys]]
if keys not in dim2 or dim1[keys] != dim2[keys]:
changed=True
setattr(alarm, 'dimensions', dim1)
for attr in ('alarm_actions','insufficient_data_actions','ok_actions'):
action = module.params.get(attr) or []
if getattr(alarm, attr) != action:
changed = True
setattr(alarm, attr, module.params.get(attr))
try:
if changed:
connection.create_alarm(alarm)
except BotoServerError, e:
module.fail_json(msg=str(e))
result = alarms[0]
module.exit_json(changed=changed, name=result.name,
actions_enabled=result.actions_enabled,
alarm_actions=result.alarm_actions,
alarm_arn=result.alarm_arn,
comparison=result.comparison,
description=result.description,
dimensions=result.dimensions,
evaluation_periods=result.evaluation_periods,
insufficient_data_actions=result.insufficient_data_actions,
last_updated=result.last_updated,
metric=result.metric,
namespace=result.namespace,
ok_actions=result.ok_actions,
period=result.period,
state_reason=result.state_reason,
state_value=result.state_value,
statistic=result.statistic,
threshold=result.threshold,
unit=result.unit)
def delete_metric_alarm(connection, module):
name = module.params.get('name')
alarms = connection.describe_alarms(alarm_names=[name])
if alarms:
try:
connection.delete_alarms([name])
module.exit_json(changed=True)
except BotoServerError, e:
module.fail_json(msg=str(e))
else:
module.exit_json(changed=False)
def main():
argument_spec = ec2_argument_spec()
argument_spec.update(
dict(
name=dict(required=True, type='str'),
metric=dict(type='str'),
namespace=dict(type='str'),
statistic=dict(type='str', choices=['SampleCount', 'Average', 'Sum', 'Minimum', 'Maximum']),
comparison=dict(type='str', choices=['<=', '<', '>', '>=']),
threshold=dict(type='float'),
period=dict(type='int'),
unit=dict(type='str', choices=['Seconds', 'Microseconds', 'Milliseconds', 'Bytes', 'Kilobytes', 'Megabytes', 'Gigabytes', 'Terabytes', 'Bits', 'Kilobits', 'Megabits', 'Gigabits', 'Terabits', 'Percent', 'Count', 'Bytes/Second', 'Kilobytes/Second', 'Megabytes/Second', 'Gigabytes/Second', 'Terabytes/Second', 'Bits/Second', 'Kilobits/Second', 'Megabits/Second', 'Gigabits/Second', 'Terabits/Second', 'Count/Second', 'None']),
evaluation_periods=dict(type='int'),
description=dict(type='str'),
dimensions=dict(type='dict'),
alarm_actions=dict(type='list'),
insufficient_data_actions=dict(type='list'),
ok_actions=dict(type='list'),
state=dict(default='present', choices=['present', 'absent']),
region=dict(aliases=['aws_region', 'ec2_region']),
)
)
module = AnsibleModule(argument_spec=argument_spec)
if not HAS_BOTO:
module.fail_json(msg='boto required for this module')
state = module.params.get('state')
region, ec2_url, aws_connect_params = get_aws_connection_info(module)
try:
connection = connect_to_aws(boto.ec2.cloudwatch, region, **aws_connect_params)
except (boto.exception.NoAuthHandlerFound, StandardError), e:
module.fail_json(msg=str(e))
if state == 'present':
create_metric_alarm(connection, module)
elif state == 'absent':
delete_metric_alarm(connection, module)
main()
|
xaviercobain88/framework-python
|
refs/heads/master
|
openerp/addons/stock/__init__.py
|
64
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from stock import *
import partner
import product
import report
import wizard
import res_config
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
alexandrul-ci/robotframework
|
refs/heads/master
|
utest/model/test_testsuite.py
|
7
|
import unittest
from robot.utils.asserts import assert_equal, assert_true, assert_raises
from robot.model import TestSuite
from robot.utils import PY2, PY3
if PY3:
unicode = str
class TestTestSuite(unittest.TestCase):
def setUp(self):
self.suite = TestSuite(metadata={'M': 'V'})
def test_modify_medatata(self):
self.suite.metadata['m'] = 'v'
self.suite.metadata['n'] = 'w'
assert_equal(dict(self.suite.metadata), {'M': 'v', 'n': 'w'})
def test_set_metadata(self):
self.suite.metadata = {'a': '1', 'b': '1'}
self.suite.metadata['A'] = '2'
assert_equal(dict(self.suite.metadata), {'a': '2', 'b': '1'})
def test_create_and_add_suite(self):
s1 = self.suite.suites.create(name='s1')
s2 = TestSuite(name='s2')
self.suite.suites.append(s2)
assert_true(s1.parent is self.suite)
assert_true(s2.parent is self.suite)
assert_equal(list(self.suite.suites), [s1, s2])
def test_reset_suites(self):
s1 = TestSuite(name='s1')
self.suite.suites = [s1]
s2 = self.suite.suites.create(name='s2')
assert_true(s1.parent is self.suite)
assert_true(s2.parent is self.suite)
assert_equal(list(self.suite.suites), [s1, s2])
def test_suite_name(self):
suite = TestSuite()
assert_equal(suite.name, '')
assert_equal(suite.suites.create(name='foo').name, 'foo')
assert_equal(suite.suites.create(name='bar').name, 'bar')
assert_equal(suite.name, 'foo & bar')
assert_equal(suite.suites.create(name='zap').name, 'zap')
assert_equal(suite.name, 'foo & bar & zap')
suite.name = 'new name'
assert_equal(suite.name, 'new name')
def test_nested_subsuites(self):
suite = TestSuite(name='top')
sub1 = suite.suites.create(name='sub1')
sub2 = sub1.suites.create(name='sub2')
assert_equal(list(suite.suites), [sub1])
assert_equal(list(sub1.suites), [sub2])
def test_set_tags(self):
suite = TestSuite()
suite.tests.create()
suite.tests.create(tags=['t1', 't2'])
suite.set_tags(add='a', remove=['t2', 'nonex'])
suite.tests.create()
assert_equal(list(suite.tests[0].tags), ['a'])
assert_equal(list(suite.tests[1].tags), ['a', 't1'])
assert_equal(list(suite.tests[2].tags), [])
def test_set_tags_also_to_new_child(self):
suite = TestSuite()
suite.tests.create()
suite.set_tags(add='a', remove=['t2', 'nonex'], persist=True)
suite.tests.create(tags=['t1', 't2'])
suite.tests = list(suite.tests)
suite.tests.create()
suite.suites.create().tests.create()
assert_equal(list(suite.tests[0].tags), ['a'])
assert_equal(list(suite.tests[1].tags), ['a', 't1'])
assert_equal(list(suite.tests[2].tags), ['a'])
assert_equal(list(suite.suites[0].tests[0].tags), ['a'])
def test_slots(self):
assert_raises(AttributeError, setattr, self.suite, 'attr', 'value')
class TestSuiteId(unittest.TestCase):
def test_one_suite(self):
assert_equal(TestSuite().id, 's1')
def test_sub_suites(self):
parent = TestSuite()
for i in range(10):
assert_equal(parent.suites.create().id, 's1-s%s' % (i+1))
assert_equal(parent.suites[-1].suites.create().id, 's1-s10-s1')
def test_id_is_dynamic(self):
suite = TestSuite()
sub = suite.suites.create().suites.create()
assert_equal(sub.id, 's1-s1-s1')
suite.suites = [sub]
assert_equal(sub.id, 's1-s1')
class TestStringRepresentation(unittest.TestCase):
def setUp(self):
self.empty = TestSuite()
self.ascii = TestSuite(name='Kekkonen')
self.non_ascii = TestSuite(name=u'hyv\xe4 nimi')
def test_unicode(self):
assert_equal(unicode(self.empty), '')
assert_equal(unicode(self.ascii), 'Kekkonen')
assert_equal(unicode(self.non_ascii), u'hyv\xe4 nimi')
if PY2:
def test_str(self):
assert_equal(str(self.empty), '')
assert_equal(str(self.ascii), 'Kekkonen')
assert_equal(str(self.non_ascii), u'hyv\xe4 nimi'.encode('UTF-8'))
if __name__ == '__main__':
unittest.main()
|
incorrectusername/coala
|
refs/heads/master
|
tests/processes/BearRunningTest.py
|
21
|
import multiprocessing
import queue
import unittest
from coalib.bears.GlobalBear import GlobalBear
from coalib.bears.LocalBear import LocalBear
from coalib.processes.BearRunning import (
LOG_LEVEL, LogMessage, run, send_msg, task_done)
from coalib.processes.CONTROL_ELEMENT import CONTROL_ELEMENT
from coalib.results.Result import RESULT_SEVERITY, Result
from coalib.settings.Section import Section
class LocalTestBear(LocalBear):
def run(self, filename, file):
if filename == 'file1':
raise Exception('Just to throw anything here.')
return [Result.from_values('LocalTestBear',
'something went wrong',
filename)]
class SimpleBear(LocalBear):
def run(self,
filename,
file,
*args,
dependency_results=None,
**kwargs):
return [Result.from_values('SimpleBear',
'something went wrong',
filename),
# This result should not be passed to DependentBear
Result.from_values('FakeBear',
'something went wrong',
filename),
Result.from_values('SimpleBear',
'another thing went wrong',
filename)]
class DependentBear(LocalBear):
BEAR_DEPS = {SimpleBear}
def run(self,
filename,
file,
*args,
dependency_results=None,
**kwargs):
assert len(dependency_results['SimpleBear']) == 2
class SimpleGlobalBear(GlobalBear):
def run(self,
*args,
dependency_results=None,
**kwargs):
return [Result('SimpleGlobalBear', 'something went wrong'),
# This result should not be passed to DependentBear
Result('FakeBear', 'something went wrong'),
Result('SimpleGlobalBear', 'another thing went wrong')]
class DependentGlobalBear(GlobalBear):
BEAR_DEPS = {SimpleGlobalBear}
def run(self,
*args,
dependency_results=None,
**kwargs):
assert len(dependency_results['SimpleGlobalBear']) == 3
class GlobalTestBear(GlobalBear):
def run(self):
result = []
for file, contents in self.file_dict.items():
result.append(Result.from_values('GlobalTestBear',
'Files are bad in general!',
file,
severity=RESULT_SEVERITY.INFO))
return result
class EvilBear(LocalBear):
def execute(self, *args, **kwargs):
raise NotImplementedError
class UnexpectedBear1(LocalBear):
def run(self, filename, file):
return [1,
Result('UnexpectedBear1', 'test result')]
class UnexpectedBear2(LocalBear):
def run(self, filename, file):
return 1
class BearRunningUnitTest(unittest.TestCase):
def setUp(self):
self.settings = Section('name')
self.file_name_queue = queue.Queue()
self.local_bear_list = []
self.global_bear_list = []
self.global_bear_queue = queue.Queue()
self.file_dict = {}
manager = multiprocessing.Manager()
self.local_result_dict = manager.dict()
self.global_result_dict = manager.dict()
self.message_queue = queue.Queue()
self.control_queue = queue.Queue()
def test_queue_done_marking(self):
self.message_queue.put('test')
task_done(self.message_queue) # Should make the queue joinable
self.message_queue.join()
task_done('test') # Should pass silently
def test_messaging(self):
send_msg(self.message_queue,
0,
LOG_LEVEL.DEBUG,
'test',
'messag',
delimiter='-',
end='e')
self.assertEqual(self.message_queue.get(),
LogMessage(LOG_LEVEL.DEBUG, 'test-message'))
def test_dependencies(self):
self.local_bear_list.append(SimpleBear(self.settings,
self.message_queue))
self.local_bear_list.append(DependentBear(self.settings,
self.message_queue))
self.global_bear_list.append(SimpleGlobalBear({},
self.settings,
self.message_queue))
self.global_bear_list.append(DependentGlobalBear({},
self.settings,
self.message_queue))
self.global_bear_queue.put(1)
self.global_bear_queue.put(0)
self.file_name_queue.put('t')
self.file_dict['t'] = []
run(self.file_name_queue,
self.local_bear_list,
self.global_bear_list,
self.global_bear_queue,
self.file_dict,
self.local_result_dict,
self.global_result_dict,
self.message_queue,
self.control_queue)
try:
while True:
msg = self.message_queue.get(timeout=0)
self.assertEqual(msg.log_level, LOG_LEVEL.DEBUG)
except queue.Empty:
pass
def test_evil_bear(self):
self.local_bear_list.append(EvilBear(self.settings,
self.message_queue))
self.file_name_queue.put('t')
self.file_dict['t'] = []
run(self.file_name_queue,
self.local_bear_list,
self.global_bear_list,
self.global_bear_queue,
self.file_dict,
self.local_result_dict,
self.global_result_dict,
self.message_queue,
self.control_queue)
def test_strange_bear(self):
self.local_bear_list.append(UnexpectedBear1(self.settings,
self.message_queue))
self.local_bear_list.append(UnexpectedBear2(self.settings,
self.message_queue))
self.file_name_queue.put('t')
self.file_dict['t'] = []
run(self.file_name_queue,
self.local_bear_list,
self.global_bear_list,
self.global_bear_queue,
self.file_dict,
self.local_result_dict,
self.global_result_dict,
self.message_queue,
self.control_queue)
expected_messages = [LOG_LEVEL.DEBUG,
LOG_LEVEL.ERROR,
LOG_LEVEL.DEBUG,
LOG_LEVEL.DEBUG,
LOG_LEVEL.WARNING]
for msg in expected_messages:
self.assertEqual(msg, self.message_queue.get(timeout=0).log_level)
class BearRunningIntegrationTest(unittest.TestCase):
example_file = """a
b
c
d
"""
def setUp(self):
self.settings = Section('name')
self.file_name_queue = queue.Queue()
self.local_bear_list = []
self.global_bear_list = []
self.global_bear_queue = queue.Queue()
self.file_dict = {}
manager = multiprocessing.Manager()
self.local_result_dict = manager.dict()
self.global_result_dict = manager.dict()
self.message_queue = queue.Queue()
self.control_queue = queue.Queue()
self.file1 = 'file1'
self.file2 = 'arbitrary'
self.file_name_queue.put(self.file1)
self.file_name_queue.put(self.file2)
self.file_name_queue.put('invalid file')
self.local_bear_list.append(LocalTestBear(self.settings,
self.message_queue))
self.local_bear_list.append('not a valid bear')
self.file_dict[self.file1] = self.example_file
self.file_dict[self.file2] = self.example_file
self.global_bear_list.append(GlobalTestBear(self.file_dict,
self.settings,
self.message_queue))
self.global_bear_list.append('not a valid bear')
self.global_bear_queue.put(0)
self.global_bear_queue.put(1)
def test_run(self):
run(self.file_name_queue,
self.local_bear_list,
self.global_bear_list,
self.global_bear_queue,
self.file_dict,
self.local_result_dict,
self.global_result_dict,
self.message_queue,
self.control_queue)
expected_messages = [LOG_LEVEL.DEBUG,
LOG_LEVEL.WARNING,
LOG_LEVEL.DEBUG,
LOG_LEVEL.WARNING,
LOG_LEVEL.DEBUG,
LOG_LEVEL.WARNING,
LOG_LEVEL.ERROR,
LOG_LEVEL.DEBUG,
LOG_LEVEL.DEBUG,
LOG_LEVEL.WARNING]
for msg in expected_messages:
self.assertEqual(msg, self.message_queue.get(timeout=0).log_level)
local_result_expected = [[],
[Result.from_values('LocalTestBear',
'something went wrong',
'arbitrary')]
]
for expected in local_result_expected:
control_elem, index = self.control_queue.get()
self.assertEqual(control_elem, CONTROL_ELEMENT.LOCAL)
real = self.local_result_dict[index]
self.assertEqual(real, expected)
global_results_expected = [Result.from_values(
'GlobalTestBear',
'Files are bad in general!',
'file1',
severity=RESULT_SEVERITY.INFO),
Result.from_values(
'GlobalTestBear',
'Files are bad in general!',
'arbitrary',
severity=RESULT_SEVERITY.INFO)]
control_elem, index = self.control_queue.get()
self.assertEqual(control_elem, CONTROL_ELEMENT.LOCAL_FINISHED)
control_elem, index = self.control_queue.get()
self.assertEqual(control_elem, CONTROL_ELEMENT.GLOBAL)
real = self.global_result_dict[index]
self.assertEqual(sorted(global_results_expected), sorted(real))
control_elem, none = self.control_queue.get(timeout=0)
self.assertEqual(control_elem, CONTROL_ELEMENT.GLOBAL_FINISHED)
self.assertEqual(none, None)
# The invalid bear gets a None in that dict for dependency resolution
self.assertEqual(len(self.global_result_dict), 2)
self.assertEqual(len(self.local_result_dict),
len(local_result_expected))
self.assertRaises(queue.Empty, self.message_queue.get, timeout=0)
self.assertRaises(queue.Empty, self.control_queue.get, timeout=0)
|
mikesun/xen-cow-checkpointing
|
refs/heads/master
|
tools/python/xen/util/asserts.py
|
52
|
#===========================================================================
# This library is free software; you can redistribute it and/or
# modify it under the terms of version 2.1 of the GNU Lesser General Public
# License as published by the Free Software Foundation.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#============================================================================
# Copyright (C) 2005 XenSource Ltd
#============================================================================
def isCharConvertible(c):
"""Assert that the given value is convertible to a character using the %c
conversion. This implies that c is either an integer, or a character
(i.e. a string of length 1).
"""
assert (isinstance(c, int) or
(isinstance(c, str) and
len(c) == 1)), "%s is not convertible to a character" % c
|
hynnet/hiwifi-openwrt-HC5661-HC5761
|
refs/heads/master
|
staging_dir/host/lib/scons-2.1.0/SCons/Defaults.py
|
21
|
"""SCons.Defaults
Builders and other things for the local site. Here's where we'll
duplicate the functionality of autoconf until we move it into the
installation procedure or use something like qmconf.
The code that reads the registry to find MSVC components was borrowed
from distutils.msvccompiler.
"""
#
# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011 The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
from __future__ import division
__revision__ = "src/engine/SCons/Defaults.py 5357 2011/09/09 21:31:03 bdeegan"
import os
import errno
import shutil
import stat
import time
import sys
import SCons.Action
import SCons.Builder
import SCons.CacheDir
import SCons.Environment
import SCons.PathList
import SCons.Subst
import SCons.Tool
# A placeholder for a default Environment (for fetching source files
# from source code management systems and the like). This must be
# initialized later, after the top-level directory is set by the calling
# interface.
_default_env = None
# Lazily instantiate the default environment so the overhead of creating
# it doesn't apply when it's not needed.
def _fetch_DefaultEnvironment(*args, **kw):
"""
Returns the already-created default construction environment.
"""
global _default_env
return _default_env
def DefaultEnvironment(*args, **kw):
"""
Initial public entry point for creating the default construction
Environment.
After creating the environment, we overwrite our name
(DefaultEnvironment) with the _fetch_DefaultEnvironment() function,
which more efficiently returns the initialized default construction
environment without checking for its existence.
(This function still exists with its _default_check because someone
else (*cough* Script/__init__.py *cough*) may keep a reference
to this function. So we can't use the fully functional idiom of
having the name originally be a something that *only* creates the
construction environment and then overwrites the name.)
"""
global _default_env
if not _default_env:
import SCons.Util
_default_env = SCons.Environment.Environment(*args, **kw)
if SCons.Util.md5:
_default_env.Decider('MD5')
else:
_default_env.Decider('timestamp-match')
global DefaultEnvironment
DefaultEnvironment = _fetch_DefaultEnvironment
_default_env._CacheDir_path = None
return _default_env
# Emitters for setting the shared attribute on object files,
# and an action for checking that all of the source files
# going into a shared library are, in fact, shared.
def StaticObjectEmitter(target, source, env):
for tgt in target:
tgt.attributes.shared = None
return (target, source)
def SharedObjectEmitter(target, source, env):
for tgt in target:
tgt.attributes.shared = 1
return (target, source)
def SharedFlagChecker(source, target, env):
same = env.subst('$STATIC_AND_SHARED_OBJECTS_ARE_THE_SAME')
if same == '0' or same == '' or same == 'False':
for src in source:
try:
shared = src.attributes.shared
except AttributeError:
shared = None
if not shared:
raise SCons.Errors.UserError("Source file: %s is static and is not compatible with shared target: %s" % (src, target[0]))
SharedCheck = SCons.Action.Action(SharedFlagChecker, None)
# Some people were using these variable name before we made
# SourceFileScanner part of the public interface. Don't break their
# SConscript files until we've given them some fair warning and a
# transition period.
CScan = SCons.Tool.CScanner
DScan = SCons.Tool.DScanner
LaTeXScan = SCons.Tool.LaTeXScanner
ObjSourceScan = SCons.Tool.SourceFileScanner
ProgScan = SCons.Tool.ProgramScanner
# These aren't really tool scanners, so they don't quite belong with
# the rest of those in Tool/__init__.py, but I'm not sure where else
# they should go. Leave them here for now.
import SCons.Scanner.Dir
DirScanner = SCons.Scanner.Dir.DirScanner()
DirEntryScanner = SCons.Scanner.Dir.DirEntryScanner()
# Actions for common languages.
CAction = SCons.Action.Action("$CCCOM", "$CCCOMSTR")
ShCAction = SCons.Action.Action("$SHCCCOM", "$SHCCCOMSTR")
CXXAction = SCons.Action.Action("$CXXCOM", "$CXXCOMSTR")
ShCXXAction = SCons.Action.Action("$SHCXXCOM", "$SHCXXCOMSTR")
ASAction = SCons.Action.Action("$ASCOM", "$ASCOMSTR")
ASPPAction = SCons.Action.Action("$ASPPCOM", "$ASPPCOMSTR")
LinkAction = SCons.Action.Action("$LINKCOM", "$LINKCOMSTR")
ShLinkAction = SCons.Action.Action("$SHLINKCOM", "$SHLINKCOMSTR")
LdModuleLinkAction = SCons.Action.Action("$LDMODULECOM", "$LDMODULECOMSTR")
# Common tasks that we allow users to perform in platform-independent
# ways by creating ActionFactory instances.
ActionFactory = SCons.Action.ActionFactory
def get_paths_str(dest):
# If dest is a list, we need to manually call str() on each element
if SCons.Util.is_List(dest):
elem_strs = []
for element in dest:
elem_strs.append('"' + str(element) + '"')
return '[' + ', '.join(elem_strs) + ']'
else:
return '"' + str(dest) + '"'
def chmod_func(dest, mode):
SCons.Node.FS.invalidate_node_memos(dest)
if not SCons.Util.is_List(dest):
dest = [dest]
for element in dest:
os.chmod(str(element), mode)
def chmod_strfunc(dest, mode):
return 'Chmod(%s, 0%o)' % (get_paths_str(dest), mode)
Chmod = ActionFactory(chmod_func, chmod_strfunc)
def copy_func(dest, src):
SCons.Node.FS.invalidate_node_memos(dest)
if SCons.Util.is_List(src) and os.path.isdir(dest):
for file in src:
shutil.copy2(file, dest)
return 0
elif os.path.isfile(src):
return shutil.copy2(src, dest)
else:
return shutil.copytree(src, dest, 1)
Copy = ActionFactory(copy_func,
lambda dest, src: 'Copy("%s", "%s")' % (dest, src),
convert=str)
def delete_func(dest, must_exist=0):
SCons.Node.FS.invalidate_node_memos(dest)
if not SCons.Util.is_List(dest):
dest = [dest]
for entry in dest:
entry = str(entry)
# os.path.exists returns False with broken links that exist
entry_exists = os.path.exists(entry) or os.path.islink(entry)
if not entry_exists and not must_exist:
continue
# os.path.isdir returns True when entry is a link to a dir
if os.path.isdir(entry) and not os.path.islink(entry):
shutil.rmtree(entry, 1)
continue
os.unlink(entry)
def delete_strfunc(dest, must_exist=0):
return 'Delete(%s)' % get_paths_str(dest)
Delete = ActionFactory(delete_func, delete_strfunc)
def mkdir_func(dest):
SCons.Node.FS.invalidate_node_memos(dest)
if not SCons.Util.is_List(dest):
dest = [dest]
for entry in dest:
try:
os.makedirs(str(entry))
except os.error, e:
p = str(entry)
if (e.args[0] == errno.EEXIST or
(sys.platform=='win32' and e.args[0]==183)) \
and os.path.isdir(str(entry)):
pass # not an error if already exists
else:
raise
Mkdir = ActionFactory(mkdir_func,
lambda dir: 'Mkdir(%s)' % get_paths_str(dir))
def move_func(dest, src):
SCons.Node.FS.invalidate_node_memos(dest)
SCons.Node.FS.invalidate_node_memos(src)
shutil.move(src, dest)
Move = ActionFactory(move_func,
lambda dest, src: 'Move("%s", "%s")' % (dest, src),
convert=str)
def touch_func(dest):
SCons.Node.FS.invalidate_node_memos(dest)
if not SCons.Util.is_List(dest):
dest = [dest]
for file in dest:
file = str(file)
mtime = int(time.time())
if os.path.exists(file):
atime = os.path.getatime(file)
else:
open(file, 'w')
atime = mtime
os.utime(file, (atime, mtime))
Touch = ActionFactory(touch_func,
lambda file: 'Touch(%s)' % get_paths_str(file))
# Internal utility functions
def _concat(prefix, list, suffix, env, f=lambda x: x, target=None, source=None):
"""
Creates a new list from 'list' by first interpolating each element
in the list using the 'env' dictionary and then calling f on the
list, and finally calling _concat_ixes to concatenate 'prefix' and
'suffix' onto each element of the list.
"""
if not list:
return list
l = f(SCons.PathList.PathList(list).subst_path(env, target, source))
if l is not None:
list = l
return _concat_ixes(prefix, list, suffix, env)
def _concat_ixes(prefix, list, suffix, env):
"""
Creates a new list from 'list' by concatenating the 'prefix' and
'suffix' arguments onto each element of the list. A trailing space
on 'prefix' or leading space on 'suffix' will cause them to be put
into separate list elements rather than being concatenated.
"""
result = []
# ensure that prefix and suffix are strings
prefix = str(env.subst(prefix, SCons.Subst.SUBST_RAW))
suffix = str(env.subst(suffix, SCons.Subst.SUBST_RAW))
for x in list:
if isinstance(x, SCons.Node.FS.File):
result.append(x)
continue
x = str(x)
if x:
if prefix:
if prefix[-1] == ' ':
result.append(prefix[:-1])
elif x[:len(prefix)] != prefix:
x = prefix + x
result.append(x)
if suffix:
if suffix[0] == ' ':
result.append(suffix[1:])
elif x[-len(suffix):] != suffix:
result[-1] = result[-1]+suffix
return result
def _stripixes(prefix, itms, suffix, stripprefixes, stripsuffixes, env, c=None):
"""
This is a wrapper around _concat()/_concat_ixes() that checks for
the existence of prefixes or suffixes on list items and strips them
where it finds them. This is used by tools (like the GNU linker)
that need to turn something like 'libfoo.a' into '-lfoo'.
"""
if not itms:
return itms
if not callable(c):
env_c = env['_concat']
if env_c != _concat and callable(env_c):
# There's a custom _concat() method in the construction
# environment, and we've allowed people to set that in
# the past (see test/custom-concat.py), so preserve the
# backwards compatibility.
c = env_c
else:
c = _concat_ixes
stripprefixes = list(map(env.subst, SCons.Util.flatten(stripprefixes)))
stripsuffixes = list(map(env.subst, SCons.Util.flatten(stripsuffixes)))
stripped = []
for l in SCons.PathList.PathList(itms).subst_path(env, None, None):
if isinstance(l, SCons.Node.FS.File):
stripped.append(l)
continue
if not SCons.Util.is_String(l):
l = str(l)
for stripprefix in stripprefixes:
lsp = len(stripprefix)
if l[:lsp] == stripprefix:
l = l[lsp:]
# Do not strip more than one prefix
break
for stripsuffix in stripsuffixes:
lss = len(stripsuffix)
if l[-lss:] == stripsuffix:
l = l[:-lss]
# Do not strip more than one suffix
break
stripped.append(l)
return c(prefix, stripped, suffix, env)
def processDefines(defs):
"""process defines, resolving strings, lists, dictionaries, into a list of
strings
"""
if SCons.Util.is_List(defs):
l = []
for d in defs:
if d is None:
continue
elif SCons.Util.is_List(d) or isinstance(d, tuple):
if len(d) >= 2:
l.append(str(d[0]) + '=' + str(d[1]))
else:
l.append(str(d[0]))
elif SCons.Util.is_Dict(d):
for macro,value in d.iteritems():
if value is not None:
l.append(str(macro) + '=' + str(value))
else:
l.append(str(macro))
elif SCons.Util.is_String(d):
l.append(str(d))
else:
raise SCons.Errors.UserError("DEFINE %s is not a list, dict, string or None."%repr(d))
elif SCons.Util.is_Dict(defs):
# The items in a dictionary are stored in random order, but
# if the order of the command-line options changes from
# invocation to invocation, then the signature of the command
# line will change and we'll get random unnecessary rebuilds.
# Consequently, we have to sort the keys to ensure a
# consistent order...
l = []
for k,v in sorted(defs.items()):
if v is None:
l.append(str(k))
else:
l.append(str(k) + '=' + str(v))
else:
l = [str(defs)]
return l
def _defines(prefix, defs, suffix, env, c=_concat_ixes):
"""A wrapper around _concat_ixes that turns a list or string
into a list of C preprocessor command-line definitions.
"""
return c(prefix, env.subst_path(processDefines(defs)), suffix, env)
class NullCmdGenerator(object):
"""This is a callable class that can be used in place of other
command generators if you don't want them to do anything.
The __call__ method for this class simply returns the thing
you instantiated it with.
Example usage:
env["DO_NOTHING"] = NullCmdGenerator
env["LINKCOM"] = "${DO_NOTHING('$LINK $SOURCES $TARGET')}"
"""
def __init__(self, cmd):
self.cmd = cmd
def __call__(self, target, source, env, for_signature=None):
return self.cmd
class Variable_Method_Caller(object):
"""A class for finding a construction variable on the stack and
calling one of its methods.
We use this to support "construction variables" in our string
eval()s that actually stand in for methods--specifically, use
of "RDirs" in call to _concat that should actually execute the
"TARGET.RDirs" method. (We used to support this by creating a little
"build dictionary" that mapped RDirs to the method, but this got in
the way of Memoizing construction environments, because we had to
create new environment objects to hold the variables.)
"""
def __init__(self, variable, method):
self.variable = variable
self.method = method
def __call__(self, *args, **kw):
try: 1//0
except ZeroDivisionError:
# Don't start iterating with the current stack-frame to
# prevent creating reference cycles (f_back is safe).
frame = sys.exc_info()[2].tb_frame.f_back
variable = self.variable
while frame:
if variable in frame.f_locals:
v = frame.f_locals[variable]
if v:
method = getattr(v, self.method)
return method(*args, **kw)
frame = frame.f_back
return None
ConstructionEnvironment = {
'BUILDERS' : {},
'SCANNERS' : [],
'CONFIGUREDIR' : '#/.sconf_temp',
'CONFIGURELOG' : '#/config.log',
'CPPSUFFIXES' : SCons.Tool.CSuffixes,
'DSUFFIXES' : SCons.Tool.DSuffixes,
'ENV' : {},
'IDLSUFFIXES' : SCons.Tool.IDLSuffixes,
# 'LATEXSUFFIXES' : SCons.Tool.LaTeXSuffixes, # moved to the TeX tools generate functions
'_concat' : _concat,
'_defines' : _defines,
'_stripixes' : _stripixes,
'_LIBFLAGS' : '${_concat(LIBLINKPREFIX, LIBS, LIBLINKSUFFIX, __env__)}',
'_LIBDIRFLAGS' : '$( ${_concat(LIBDIRPREFIX, LIBPATH, LIBDIRSUFFIX, __env__, RDirs, TARGET, SOURCE)} $)',
'_CPPINCFLAGS' : '$( ${_concat(INCPREFIX, CPPPATH, INCSUFFIX, __env__, RDirs, TARGET, SOURCE)} $)',
'_CPPDEFFLAGS' : '${_defines(CPPDEFPREFIX, CPPDEFINES, CPPDEFSUFFIX, __env__)}',
'TEMPFILE' : NullCmdGenerator,
'Dir' : Variable_Method_Caller('TARGET', 'Dir'),
'Dirs' : Variable_Method_Caller('TARGET', 'Dirs'),
'File' : Variable_Method_Caller('TARGET', 'File'),
'RDirs' : Variable_Method_Caller('TARGET', 'RDirs'),
}
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:
|
perlygatekeeper/glowing-robot
|
refs/heads/master
|
Little_Alchemy_2/Scraper_python/env/lib/python3.7/site-packages/bs4/formatter.py
|
3
|
from bs4.dammit import EntitySubstitution
class Formatter(EntitySubstitution):
"""Describes a strategy to use when outputting a parse tree to a string.
Some parts of this strategy come from the distinction between
HTML4, HTML5, and XML. Others are configurable by the user.
Formatters are passed in as the `formatter` argument to methods
like `PageElement.encode`. Most people won't need to think about
formatters, and most people who need to think about them can pass
in one of these predefined strings as `formatter` rather than
making a new Formatter object:
For HTML documents:
* 'html' - HTML entity substitution for generic HTML documents. (default)
* 'html5' - HTML entity substitution for HTML5 documents.
* 'minimal' - Only make the substitutions necessary to guarantee
valid HTML.
* None - Do not perform any substitution. This will be faster
but may result in invalid markup.
For XML documents:
* 'html' - Entity substitution for XHTML documents.
* 'minimal' - Only make the substitutions necessary to guarantee
valid XML. (default)
* None - Do not perform any substitution. This will be faster
but may result in invalid markup.
"""
# Registries of XML and HTML formatters.
XML_FORMATTERS = {}
HTML_FORMATTERS = {}
HTML = 'html'
XML = 'xml'
HTML_DEFAULTS = dict(
cdata_containing_tags=set(["script", "style"]),
)
def _default(self, language, value, kwarg):
if value is not None:
return value
if language == self.XML:
return set()
return self.HTML_DEFAULTS[kwarg]
def __init__(
self, language=None, entity_substitution=None,
void_element_close_prefix='/', cdata_containing_tags=None,
):
"""Constructor.
:param language: This should be Formatter.XML if you are formatting
XML markup and Formatter.HTML if you are formatting HTML markup.
:param entity_substitution: A function to call to replace special
characters with XML/HTML entities. For examples, see
bs4.dammit.EntitySubstitution.substitute_html and substitute_xml.
:param void_element_close_prefix: By default, void elements
are represented as <tag/> (XML rules) rather than <tag>
(HTML rules). To get <tag>, pass in the empty string.
:param cdata_containing_tags: The list of tags that are defined
as containing CDATA in this dialect. For example, in HTML,
<script> and <style> tags are defined as containing CDATA,
and their contents should not be formatted.
"""
self.language = language
self.entity_substitution = entity_substitution
self.void_element_close_prefix = void_element_close_prefix
self.cdata_containing_tags = self._default(
language, cdata_containing_tags, 'cdata_containing_tags'
)
def substitute(self, ns):
"""Process a string that needs to undergo entity substitution.
This may be a string encountered in an attribute value or as
text.
:param ns: A string.
:return: A string with certain characters replaced by named
or numeric entities.
"""
if not self.entity_substitution:
return ns
from .element import NavigableString
if (isinstance(ns, NavigableString)
and ns.parent is not None
and ns.parent.name in self.cdata_containing_tags):
# Do nothing.
return ns
# Substitute.
return self.entity_substitution(ns)
def attribute_value(self, value):
"""Process the value of an attribute.
:param ns: A string.
:return: A string with certain characters replaced by named
or numeric entities.
"""
return self.substitute(value)
def attributes(self, tag):
"""Reorder a tag's attributes however you want.
By default, attributes are sorted alphabetically. This makes
behavior consistent between Python 2 and Python 3, and preserves
backwards compatibility with older versions of Beautiful Soup.
"""
return sorted(tag.attrs.items())
class HTMLFormatter(Formatter):
"""A generic Formatter for HTML."""
REGISTRY = {}
def __init__(self, *args, **kwargs):
return super(HTMLFormatter, self).__init__(self.HTML, *args, **kwargs)
class XMLFormatter(Formatter):
"""A generic Formatter for XML."""
REGISTRY = {}
def __init__(self, *args, **kwargs):
return super(XMLFormatter, self).__init__(self.XML, *args, **kwargs)
# Set up aliases for the default formatters.
HTMLFormatter.REGISTRY['html'] = HTMLFormatter(
entity_substitution=EntitySubstitution.substitute_html
)
HTMLFormatter.REGISTRY["html5"] = HTMLFormatter(
entity_substitution=EntitySubstitution.substitute_html,
void_element_close_prefix = None
)
HTMLFormatter.REGISTRY["minimal"] = HTMLFormatter(
entity_substitution=EntitySubstitution.substitute_xml
)
HTMLFormatter.REGISTRY[None] = HTMLFormatter(
entity_substitution=None
)
XMLFormatter.REGISTRY["html"] = XMLFormatter(
entity_substitution=EntitySubstitution.substitute_html
)
XMLFormatter.REGISTRY["minimal"] = XMLFormatter(
entity_substitution=EntitySubstitution.substitute_xml
)
XMLFormatter.REGISTRY[None] = Formatter(
Formatter(Formatter.XML, entity_substitution=None)
)
|
raster-foundry/raster-foundry
|
refs/heads/develop
|
app-tasks/rf/setup.py
|
1
|
#!/usr/bin/env python
from __future__ import absolute_import
from __future__ import print_function
import io
from glob import glob
from os.path import basename
from os.path import dirname
from os.path import join
from os.path import splitext
from setuptools import find_packages
from setuptools import setup
def read(*names, **kwargs):
return io.open(
join(dirname(__file__), *names),
encoding=kwargs.get('encoding', 'utf8')
).read()
setup(
name='rf',
version='0.1.0',
license='Apache',
description='An application to import, process, and export raster imagery.',
author='Azavea',
author_email='systems+rf@azavea.com',
url='https://github.com/azavea/raster-foundry',
packages=find_packages('src'),
package_dir={'': 'src'},
py_modules=[splitext(basename(path))[0] for path in glob('src/*.py')],
include_package_data=True,
zip_safe=False,
entry_points={
'console_scripts': [
'rf=rf.cli:run'
]
},
classifiers=[
# complete classifier list: http://pypi.python.org/pypi?%3Aaction=list_classifiers
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: Unix',
'Operating System :: POSIX',
'Operating System :: Microsoft :: Windows',
'Programming Language :: Python',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Programming Language :: Python :: Implementation :: CPython',
'Programming Language :: Python :: Implementation :: PyPy',
# uncomment if you test on these interpreters:
# 'Programming Language :: Python :: Implementation :: IronPython',
# 'Programming Language :: Python :: Implementation :: Jython',
# 'Programming Language :: Python :: Implementation :: Stackless',
'Topic :: Utilities',
],
keywords=[
# eg: 'keyword1', 'keyword2', 'keyword3',
],
install_requires=[
read('requirements.txt')
],
extras_require={
# eg:
# 'rst': ['docutils>=0.11'],
# ':python_version=="2.6"': ['argparse'],
},
)
|
wadetb/tinynumpy
|
refs/heads/master
|
docs/ext/numpydoc.py
|
9
|
"""
========
numpydoc
========
Sphinx extension that handles docstrings in the Numpy standard format. [1]
It will:
- Convert Parameters etc. sections to field lists.
- Convert See Also section to a See also entry.
- Renumber references.
- Extract the signature from the docstring, if it can't be determined otherwise.
.. [1] http://projects.scipy.org/numpy/wiki/CodingStyleGuidelines#docstring-standard
"""
import os
import re
import pydoc
import sys
from docscrape_sphinx import get_doc_object, SphinxDocString
from sphinx.util.compat import Directive
import inspect
if sys.version_info > (3,):
unicode = str
def mangle_docstrings(app, what, name, obj, options, lines,
reference_offset=[0]):
cfg = dict(use_plots=app.config.numpydoc_use_plots,
show_class_members=app.config.numpydoc_show_class_members)
if what == 'module':
# Strip top title
title_re = re.compile(r'^\s*[#*=]{4,}\n[a-z0-9 -]+\n[#*=]{4,}\s*',
re.I | re.S)
lines[:] = title_re.sub(u'', u"\n".join(lines)).split(u"\n")
else:
doc = get_doc_object(obj, what, u"\n".join(lines), config=cfg)
lines[:] = unicode(doc).split(u"\n")
if app.config.numpydoc_edit_link and hasattr(obj, '__name__') and \
obj.__name__:
if hasattr(obj, '__module__'):
v = dict(full_name=u"%s.%s" % (obj.__module__, obj.__name__))
else:
v = dict(full_name=obj.__name__)
lines += [u'', u'.. htmlonly::', '']
lines += [u' %s' % x for x in
(app.config.numpydoc_edit_link % v).split("\n")]
# replace reference numbers so that there are no duplicates
references = []
for line in lines:
line = line.strip()
m = re.match(r'^.. \[([a-z0-9_.-])\]', line, re.I)
if m:
references.append(m.group(1))
# start renaming from the longest string, to avoid overwriting parts
references.sort(key=lambda x: -len(x))
if references:
for i, line in enumerate(lines):
for r in references:
if re.match(r'^\d+$', r):
new_r = u"R%d" % (reference_offset[0] + int(r))
else:
new_r = u"%s%d" % (r, reference_offset[0])
lines[i] = lines[i].replace(u'[%s]_' % r,
u'[%s]_' % new_r)
lines[i] = lines[i].replace(u'.. [%s]' % r,
u'.. [%s]' % new_r)
reference_offset[0] += len(references)
def mangle_signature(app, what, name, obj, options, sig, retann):
# Do not try to inspect classes that don't define `__init__`
if (inspect.isclass(obj) and
(not hasattr(obj, '__init__') or
'initializes x; see ' in pydoc.getdoc(obj.__init__))):
return '', ''
if not (callable(obj) or hasattr(obj, '__argspec_is_invalid_')):
return
if not hasattr(obj, '__doc__'):
return
doc = SphinxDocString(pydoc.getdoc(obj))
if doc['Signature']:
sig = re.sub(u"^[^(]*", u"", doc['Signature'])
return sig, u''
def setup(app, get_doc_object_=get_doc_object):
global get_doc_object
get_doc_object = get_doc_object_
app.connect('autodoc-process-docstring', mangle_docstrings)
app.connect('autodoc-process-signature', mangle_signature)
app.add_config_value('numpydoc_edit_link', None, False)
app.add_config_value('numpydoc_use_plots', None, False)
app.add_config_value('numpydoc_show_class_members', True, True)
# Extra mangling domains
app.add_domain(NumpyPythonDomain)
app.add_domain(NumpyCDomain)
#------------------------------------------------------------------------------
# Docstring-mangling domains
#------------------------------------------------------------------------------
from docutils.statemachine import ViewList
from sphinx.domains.c import CDomain
from sphinx.domains.python import PythonDomain
class ManglingDomainBase(object):
directive_mangling_map = {}
def __init__(self, *a, **kw):
super(ManglingDomainBase, self).__init__(*a, **kw)
self.wrap_mangling_directives()
def wrap_mangling_directives(self):
for name, objtype in self.directive_mangling_map.items():
self.directives[name] = wrap_mangling_directive(
self.directives[name], objtype)
class NumpyPythonDomain(ManglingDomainBase, PythonDomain):
name = 'np'
directive_mangling_map = {
'function': 'function',
'class': 'class',
'exception': 'class',
'method': 'function',
'classmethod': 'function',
'staticmethod': 'function',
'attribute': 'attribute',
}
class NumpyCDomain(ManglingDomainBase, CDomain):
name = 'np-c'
directive_mangling_map = {
'function': 'function',
'member': 'attribute',
'macro': 'function',
'type': 'class',
'var': 'object',
}
def wrap_mangling_directive(base_directive, objtype):
class directive(base_directive):
def run(self):
env = self.state.document.settings.env
name = None
if self.arguments:
m = re.match(r'^(.*\s+)?(.*?)(\(.*)?', self.arguments[0])
name = m.group(2).strip()
if not name:
name = self.arguments[0]
lines = list(self.content)
mangle_docstrings(env.app, objtype, name, None, None, lines)
self.content = ViewList(lines, self.content.parent)
return base_directive.run(self)
return directive
|
itai12312/workspaces
|
refs/heads/master
|
hellodjango/venv/lib/python2.7/site-packages/django/contrib/gis/tests/test_measure.py
|
221
|
"""
Distance and Area objects to allow for sensible and convienient calculation
and conversions. Here are some tests.
"""
from django.contrib.gis.measure import Distance, Area, D, A
from django.utils import unittest
class DistanceTest(unittest.TestCase):
"Testing the Distance object"
def testInit(self):
"Testing initialisation from valid units"
d = Distance(m=100)
self.assertEqual(d.m, 100)
d1, d2, d3 = D(m=100), D(meter=100), D(metre=100)
for d in (d1, d2, d3):
self.assertEqual(d.m, 100)
d = D(nm=100)
self.assertEqual(d.m, 185200)
y1, y2, y3 = D(yd=100), D(yard=100), D(Yard=100)
for d in (y1, y2, y3):
self.assertEqual(d.yd, 100)
mm1, mm2 = D(millimeter=1000), D(MiLLiMeTeR=1000)
for d in (mm1, mm2):
self.assertEqual(d.m, 1.0)
self.assertEqual(d.mm, 1000.0)
def testInitInvalid(self):
"Testing initialisation from invalid units"
self.assertRaises(AttributeError, D, banana=100)
def testAccess(self):
"Testing access in different units"
d = D(m=100)
self.assertEqual(d.km, 0.1)
self.assertAlmostEqual(d.ft, 328.084, 3)
def testAccessInvalid(self):
"Testing access in invalid units"
d = D(m=100)
self.assertFalse(hasattr(d, 'banana'))
def testAddition(self):
"Test addition & subtraction"
d1 = D(m=100)
d2 = D(m=200)
d3 = d1 + d2
self.assertEqual(d3.m, 300)
d3 += d1
self.assertEqual(d3.m, 400)
d4 = d1 - d2
self.assertEqual(d4.m, -100)
d4 -= d1
self.assertEqual(d4.m, -200)
with self.assertRaises(TypeError):
d5 = d1 + 1
self.fail('Distance + number should raise TypeError')
with self.assertRaises(TypeError):
d5 = d1 - 1
self.fail('Distance - number should raise TypeError')
with self.assertRaises(TypeError):
d1 += 1
self.fail('Distance += number should raise TypeError')
with self.assertRaises(TypeError):
d1 -= 1
self.fail('Distance -= number should raise TypeError')
def testMultiplication(self):
"Test multiplication & division"
d1 = D(m=100)
d3 = d1 * 2
self.assertEqual(d3.m, 200)
d3 = 2 * d1
self.assertEqual(d3.m, 200)
d3 *= 5
self.assertEqual(d3.m, 1000)
d4 = d1 / 2
self.assertEqual(d4.m, 50)
d4 /= 5
self.assertEqual(d4.m, 10)
d5 = d1 / D(m=2)
self.assertEqual(d5, 50)
a5 = d1 * D(m=10)
self.assertTrue(isinstance(a5, Area))
self.assertEqual(a5.sq_m, 100*10)
with self.assertRaises(TypeError):
d1 *= D(m=1)
self.fail('Distance *= Distance should raise TypeError')
with self.assertRaises(TypeError):
d1 /= D(m=1)
self.fail('Distance /= Distance should raise TypeError')
def testUnitConversions(self):
"Testing default units during maths"
d1 = D(m=100)
d2 = D(km=1)
d3 = d1 + d2
self.assertEqual(d3._default_unit, 'm')
d4 = d2 + d1
self.assertEqual(d4._default_unit, 'km')
d5 = d1 * 2
self.assertEqual(d5._default_unit, 'm')
d6 = d1 / 2
self.assertEqual(d6._default_unit, 'm')
def testComparisons(self):
"Testing comparisons"
d1 = D(m=100)
d2 = D(km=1)
d3 = D(km=0)
self.assertTrue(d2 > d1)
self.assertTrue(d1 == d1)
self.assertTrue(d1 < d2)
self.assertFalse(d3)
def testUnitsStr(self):
"Testing conversion to strings"
d1 = D(m=100)
d2 = D(km=3.5)
self.assertEqual(str(d1), '100.0 m')
self.assertEqual(str(d2), '3.5 km')
self.assertEqual(repr(d1), 'Distance(m=100.0)')
self.assertEqual(repr(d2), 'Distance(km=3.5)')
def testUnitAttName(self):
"Testing the `unit_attname` class method"
unit_tuple = [('Yard', 'yd'), ('Nautical Mile', 'nm'), ('German legal metre', 'german_m'),
('Indian yard', 'indian_yd'), ('Chain (Sears)', 'chain_sears'), ('Chain', 'chain')]
for nm, att in unit_tuple:
self.assertEqual(att, D.unit_attname(nm))
class AreaTest(unittest.TestCase):
"Testing the Area object"
def testInit(self):
"Testing initialisation from valid units"
a = Area(sq_m=100)
self.assertEqual(a.sq_m, 100)
a = A(sq_m=100)
self.assertEqual(a.sq_m, 100)
a = A(sq_mi=100)
self.assertEqual(a.sq_m, 258998811.0336)
def testInitInvaliA(self):
"Testing initialisation from invalid units"
self.assertRaises(AttributeError, A, banana=100)
def testAccess(self):
"Testing access in different units"
a = A(sq_m=100)
self.assertEqual(a.sq_km, 0.0001)
self.assertAlmostEqual(a.sq_ft, 1076.391, 3)
def testAccessInvaliA(self):
"Testing access in invalid units"
a = A(sq_m=100)
self.assertFalse(hasattr(a, 'banana'))
def testAddition(self):
"Test addition & subtraction"
a1 = A(sq_m=100)
a2 = A(sq_m=200)
a3 = a1 + a2
self.assertEqual(a3.sq_m, 300)
a3 += a1
self.assertEqual(a3.sq_m, 400)
a4 = a1 - a2
self.assertEqual(a4.sq_m, -100)
a4 -= a1
self.assertEqual(a4.sq_m, -200)
with self.assertRaises(TypeError):
a5 = a1 + 1
self.fail('Area + number should raise TypeError')
with self.assertRaises(TypeError):
a5 = a1 - 1
self.fail('Area - number should raise TypeError')
with self.assertRaises(TypeError):
a1 += 1
self.fail('Area += number should raise TypeError')
with self.assertRaises(TypeError):
a1 -= 1
self.fail('Area -= number should raise TypeError')
def testMultiplication(self):
"Test multiplication & division"
a1 = A(sq_m=100)
a3 = a1 * 2
self.assertEqual(a3.sq_m, 200)
a3 = 2 * a1
self.assertEqual(a3.sq_m, 200)
a3 *= 5
self.assertEqual(a3.sq_m, 1000)
a4 = a1 / 2
self.assertEqual(a4.sq_m, 50)
a4 /= 5
self.assertEqual(a4.sq_m, 10)
with self.assertRaises(TypeError):
a5 = a1 * A(sq_m=1)
self.fail('Area * Area should raise TypeError')
with self.assertRaises(TypeError):
a1 *= A(sq_m=1)
self.fail('Area *= Area should raise TypeError')
with self.assertRaises(TypeError):
a5 = a1 / A(sq_m=1)
self.fail('Area / Area should raise TypeError')
with self.assertRaises(TypeError):
a1 /= A(sq_m=1)
self.fail('Area /= Area should raise TypeError')
def testUnitConversions(self):
"Testing default units during maths"
a1 = A(sq_m=100)
a2 = A(sq_km=1)
a3 = a1 + a2
self.assertEqual(a3._default_unit, 'sq_m')
a4 = a2 + a1
self.assertEqual(a4._default_unit, 'sq_km')
a5 = a1 * 2
self.assertEqual(a5._default_unit, 'sq_m')
a6 = a1 / 2
self.assertEqual(a6._default_unit, 'sq_m')
def testComparisons(self):
"Testing comparisons"
a1 = A(sq_m=100)
a2 = A(sq_km=1)
a3 = A(sq_km=0)
self.assertTrue(a2 > a1)
self.assertTrue(a1 == a1)
self.assertTrue(a1 < a2)
self.assertFalse(a3)
def testUnitsStr(self):
"Testing conversion to strings"
a1 = A(sq_m=100)
a2 = A(sq_km=3.5)
self.assertEqual(str(a1), '100.0 sq_m')
self.assertEqual(str(a2), '3.5 sq_km')
self.assertEqual(repr(a1), 'Area(sq_m=100.0)')
self.assertEqual(repr(a2), 'Area(sq_km=3.5)')
def suite():
s = unittest.TestSuite()
s.addTest(unittest.makeSuite(DistanceTest))
s.addTest(unittest.makeSuite(AreaTest))
return s
def run(verbosity=2):
unittest.TextTestRunner(verbosity=verbosity).run(suite())
if __name__=="__main__":
run()
|
AllMyChanges/allmychanges.com
|
refs/heads/master
|
allmychanges/tests/registration.py
|
1
|
# coding: utf-8
from nose.tools import eq_
from .utils import check_status_code, create_user, refresh
from allmychanges import chat
from allmychanges.models import EmailVerificationCode
from django.core import mail
from django.test import Client
from django.core.urlresolvers import reverse
def test_user_creation_lead_to_chat_notification():
chat.clear_messages()
create_user('art')
eq_(1, len(chat.messages))
def test_first_step_sends_email_with_email_validation_code():
cl = Client()
user = create_user('art')
cl.login(username='art', password='art')
eq_(0, EmailVerificationCode.objects.count())
response = cl.post(reverse('first-step'),
dict(email='unittest@allmychanges.com',
timezone='UTC'))
check_status_code(302, response)
eq_('http://testserver' + reverse('second-step'), response['Location'])
eq_(1, EmailVerificationCode.objects.count())
user = refresh(user)
assert user.email_verification_code is not None
eq_(1, len(mail.outbox))
def test_verification_code():
cl = Client()
user = create_user('art')
code = EmailVerificationCode.new_code_for(user)
url = reverse('verify-email', kwargs=dict(code=code.hash))
eq_(False, user.email_is_valid)
response = cl.get(url)
check_status_code(200, response)
user = refresh(user)
eq_(True, user.email_is_valid)
def test_email_is_required_in_account_settings():
# email field is required now.
# here we check if error message is shown on the page
# when email is not given
cl = Client()
user = create_user('art')
cl.login(username='art', password='art')
url = reverse('account-settings')
response = cl.post(
url,
dict={}
)
check_status_code(200, response)
assert 'There is some error in the form data' in response.content
with open('/tmp/allmychanges/response.html', 'w') as f:
f.write(response.content)
assert 'We need your email to deliver fresh release notes.' in response.content
|
levkar/odoo-addons
|
refs/heads/8.0
|
product_stock_location/__openerp__.py
|
1
|
# -*- coding: utf-8 -*-
{
'name': 'Product Stock Location',
'version': '1.0',
'category': 'Sales Management',
'sequence': 14,
'summary': 'Sales, Product, Category, Clasification',
'description': """
Product Stock Location
======================
""",
'author': 'Ingenieria ADHOC',
'website': 'www.ingadhoc.com',
'images': [
],
'depends': [
'product',
],
'data': [
'product_view.xml',
],
'demo': [
],
'test': [
],
'installable': True,
'auto_install': False,
'application': False,
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
marcellodesales/svnedge-console
|
refs/heads/master
|
ext/windows/pkg-toolkit/pkg/vendor-packages/doc/pycurl/tests/test_post.py
|
13
|
#! /usr/bin/env python
# -*- coding: iso-8859-1 -*-
# vi:ts=4:et
# $Id: test_post.py,v 1.9 2003/04/21 18:46:11 mfx Exp $
import urllib
import pycurl
# simple
pf = {'field1': 'value1'}
# multiple fields
pf = {'field1':'value1', 'field2':'value2 with blanks', 'field3':'value3'}
# multiple fields with & in field
pf = {'field1':'value1', 'field2':'value2 with blanks and & chars',
'field3':'value3'}
c = pycurl.Curl()
c.setopt(c.URL, 'http://pycurl.sourceforge.net/tests/testpostvars.php')
c.setopt(c.POSTFIELDS, urllib.urlencode(pf))
c.setopt(c.VERBOSE, 1)
c.perform()
c.close()
|
lrivallain/openvolunteer
|
refs/heads/master
|
views.py
|
1
|
#-*- coding: utf-8 -*-
"""
---------------------------------------------------------------------------
OpenVolunteer
Copyright 2009, Ludovic Rivallain
---------------------------------------------------------------------------
This file is part of OpenVolunteer.
OpenVolunteer is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
OpenVolunteer is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with OpenVolunteer. If not, see <http://www.gnu.org/licenses/>.
---------------------------------------------------------------------------
"""
from views_volunteer import *
from views_event import *
from views_answer import *
from views_job import *
from views_need import *
from views_api import *
from django.shortcuts import render_to_response
from django.template import RequestContext
from django.contrib.auth.decorators import login_required
@login_required(redirect_field_name='next')
def index(request):
"""
OpenVolunteers home page - Display links to main parts
of interface
"""
return render_to_response('openvolunteer/index.html',{},
context_instance=RequestContext(request))
|
neurospin/pylearn-epac
|
refs/heads/master
|
examples/run_multi_processes.py
|
1
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Created on Wed Apr 24 12:13:11 2013
@author: edouard.duchesnay@cea.fr
@author: benoit.da_mota@inria.fr
@author: jinpeng.li@cea.fr
Introduction
------------
The library Epac can create an Epac tree for machine learning algorithms.
This example shows how to compute Epac with n processes
"""
import sys
import optparse
import time
import numpy as np
from sklearn import datasets
from sklearn.svm import SVC
from sklearn.feature_selection import SelectKBest
from epac import Pipe, CV, Perms, Methods, CVBestSearchRefit, range_log2
from epac.map_reduce.engine import LocalEngine
def do_all(options):
if options.k_max != "auto":
k_values = range_log2(np.minimum(int(options.k_max),
options.n_features), add_n=True)
else:
k_values = range_log2(options.n_features, add_n=True)
C_values = [1, 10]
random_state = 0
#print options
#sys.exit(0)
if options.trace:
from epac import conf
conf.TRACE_TOPDOWN = True
## 1) Build dataset
## ================
X, y = datasets.make_classification(n_samples=options.n_samples,
n_features=options.n_features,
n_informative=options.n_informative)
## 2) Build Workflow
## =================
time_start = time.time()
## CV + Grid search of a pipeline with a nested grid search
cls = Methods(*[Pipe(SelectKBest(k=k),
SVC(kernel="linear", C=C))
for C in C_values
for k in k_values])
pipeline = CVBestSearchRefit(cls,
n_folds=options.n_folds_nested,
random_state=random_state)
wf = Perms(CV(pipeline, n_folds=options.n_folds),
n_perms=options.n_perms,
permute="y",
random_state=random_state)
print "Time ellapsed, tree construction:", time.time() - time_start
## 3) Run Workflow
## ===============
time_fit_predict = time.time()
local_engine = LocalEngine(tree_root=wf, num_processes=options.n_cores)
wf = local_engine.run(X=X, y=y)
print "Time ellapsed, fit predict:", time.time() - time_fit_predict
time_reduce = time.time()
## 4) Reduce Workflow
## ==================
print wf.reduce()
print "Time ellapsed, reduce:", time.time() - time_reduce
if __name__ == "__main__":
# Set default values to parameters
n_samples = 100
n_features = int(1E03)
n_informative = 5
n_perms = 10
n_folds = 10
n_folds_nested = 5
k_max = "auto"
n_cores = 3
# parse command line options
parser = optparse.OptionParser()
parser.add_option('-n', '--n_samples',
help='(default %d)' % n_samples,
default=n_samples, type="int")
parser.add_option('-p', '--n_features',
help='(default %d)' % n_features,
default=n_features, type="int")
parser.add_option('-i', '--n_informative',
help='(default %d)' % n_informative,
default=n_informative, type="int")
parser.add_option('-m', '--n_perms',
help='(default %d)' % n_perms,
default=n_perms, type="int")
parser.add_option('-f', '--n_folds',
help='(default %d)' % n_folds,
default=n_folds, type="int")
parser.add_option('-g', '--n_folds_nested',
help='(default %d)' % n_folds_nested,
default=n_folds_nested, type="int")
parser.add_option('-k', '--k_max',
help='"auto": 1, 2, 4, ... n_features values. '
'"fixed": 1, 2, 4, ..., k_max (default %s)' % k_max,
default=k_max, type="string")
parser.add_option('-t', '--trace',
help='Trace execution (default %s)' % False,
action='store_true', default=False)
parser.add_option('-c', '--n_cores',
help='(default %d)' % n_cores,
default=n_cores, type="int")
#argv = []
#options, args = parser.parse_args(argv)
options, args = parser.parse_args(sys.argv)
do_all(options)
|
quinot/ansible
|
refs/heads/devel
|
lib/ansible/modules/network/avi/avi_network.py
|
15
|
#!/usr/bin/python
#
# @author: Gaurav Rastogi (grastogi@avinetworks.com)
# Eric Anderson (eanderson@avinetworks.com)
# module_check: supported
# Avi Version: 17.1.1
#
# Copyright: (c) 2017 Gaurav Rastogi, <grastogi@avinetworks.com>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
#
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: avi_network
author: Gaurav Rastogi (grastogi@avinetworks.com)
short_description: Module for setup of Network Avi RESTful Object
description:
- This module is used to configure Network object
- more examples at U(https://github.com/avinetworks/devops)
requirements: [ avisdk ]
version_added: "2.4"
options:
state:
description:
- The state that should be applied on the entity.
default: present
choices: ["absent", "present"]
avi_api_update_method:
description:
- Default method for object update is HTTP PUT.
- Setting to patch will override that behavior to use HTTP PATCH.
version_added: "2.5"
default: put
choices: ["put", "patch"]
avi_api_patch_op:
description:
- Patch operation to use when using avi_api_update_method as patch.
version_added: "2.5"
choices: ["add", "replace", "delete"]
cloud_ref:
description:
- It is a reference to an object of type cloud.
configured_subnets:
description:
- List of subnet.
dhcp_enabled:
description:
- Select the ip address management scheme for this network.
- Default value when not specified in API or module is interpreted by Avi Controller as True.
exclude_discovered_subnets:
description:
- When selected, excludes all discovered subnets in this network from consideration for virtual service placement.
- Default value when not specified in API or module is interpreted by Avi Controller as False.
name:
description:
- Name of the object.
required: true
synced_from_se:
description:
- Boolean flag to set synced_from_se.
- Default value when not specified in API or module is interpreted by Avi Controller as False.
tenant_ref:
description:
- It is a reference to an object of type tenant.
url:
description:
- Avi controller URL of the object.
uuid:
description:
- Unique object identifier of the object.
vcenter_dvs:
description:
- Boolean flag to set vcenter_dvs.
- Default value when not specified in API or module is interpreted by Avi Controller as True.
vimgrnw_ref:
description:
- It is a reference to an object of type vimgrnwruntime.
vrf_context_ref:
description:
- It is a reference to an object of type vrfcontext.
extends_documentation_fragment:
- avi
'''
EXAMPLES = """
- name: Example to create Network object
avi_network:
controller: 10.10.25.42
username: admin
password: something
state: present
name: sample_network
"""
RETURN = '''
obj:
description: Network (api/network) object
returned: success, changed
type: dict
'''
from ansible.module_utils.basic import AnsibleModule
try:
from ansible.module_utils.network.avi.avi import (
avi_common_argument_spec, HAS_AVI, avi_ansible_api)
except ImportError:
HAS_AVI = False
def main():
argument_specs = dict(
state=dict(default='present',
choices=['absent', 'present']),
avi_api_update_method=dict(default='put',
choices=['put', 'patch']),
avi_api_patch_op=dict(choices=['add', 'replace', 'delete']),
cloud_ref=dict(type='str',),
configured_subnets=dict(type='list',),
dhcp_enabled=dict(type='bool',),
exclude_discovered_subnets=dict(type='bool',),
name=dict(type='str', required=True),
synced_from_se=dict(type='bool',),
tenant_ref=dict(type='str',),
url=dict(type='str',),
uuid=dict(type='str',),
vcenter_dvs=dict(type='bool',),
vimgrnw_ref=dict(type='str',),
vrf_context_ref=dict(type='str',),
)
argument_specs.update(avi_common_argument_spec())
module = AnsibleModule(
argument_spec=argument_specs, supports_check_mode=True)
if not HAS_AVI:
return module.fail_json(msg=(
'Avi python API SDK (avisdk>=17.1) is not installed. '
'For more details visit https://github.com/avinetworks/sdk.'))
return avi_ansible_api(module, 'network',
set([]))
if __name__ == '__main__':
main()
|
kotnik/nikola
|
refs/heads/master
|
nikola/plugins/task_copy_files.py
|
3
|
# -*- coding: utf-8 -*-
# Copyright © 2012-2013 Roberto Alsina and others.
# Permission is hereby granted, free of charge, to any
# person obtaining a copy of this software and associated
# documentation files (the "Software"), to deal in the
# Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the
# Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice
# shall be included in all copies or substantial portions of
# the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR
# PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS
# OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR
# OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
import os
from nikola.plugin_categories import Task
from nikola import utils
class CopyFiles(Task):
"""Copy static files into the output folder."""
name = "copy_files"
def gen_tasks(self):
"""Copy static files into the output folder."""
kw = {
'files_folders': self.site.config['FILES_FOLDERS'],
'output_folder': self.site.config['OUTPUT_FOLDER'],
'filters': self.site.config['FILTERS'],
}
flag = False
for src in kw['files_folders']:
dst = kw['output_folder']
filters = kw['filters']
real_dst = os.path.join(dst, kw['files_folders'][src])
for task in utils.copy_tree(src, real_dst, link_cutoff=dst):
flag = True
task['basename'] = self.name
task['uptodate'] = [utils.config_changed(kw)]
yield utils.apply_filters(task, filters)
if not flag:
yield {
'basename': self.name,
'actions': (),
}
|
libcrosswind/libcrosswind
|
refs/heads/master
|
platform/windows/compilers/x64/TDM-GCC-64/gdb64/bin/lib/msilib/text.py
|
159
|
import msilib,os;dirname=os.path.dirname(__file__)
ActionText = [
(u'InstallValidate', u'Validating install', None),
(u'InstallFiles', u'Copying new files', u'File: [1], Directory: [9], Size: [6]'),
(u'InstallAdminPackage', u'Copying network install files', u'File: [1], Directory: [9], Size: [6]'),
(u'FileCost', u'Computing space requirements', None),
(u'CostInitialize', u'Computing space requirements', None),
(u'CostFinalize', u'Computing space requirements', None),
(u'CreateShortcuts', u'Creating shortcuts', u'Shortcut: [1]'),
(u'PublishComponents', u'Publishing Qualified Components', u'Component ID: [1], Qualifier: [2]'),
(u'PublishFeatures', u'Publishing Product Features', u'Feature: [1]'),
(u'PublishProduct', u'Publishing product information', None),
(u'RegisterClassInfo', u'Registering Class servers', u'Class Id: [1]'),
(u'RegisterExtensionInfo', u'Registering extension servers', u'Extension: [1]'),
(u'RegisterMIMEInfo', u'Registering MIME info', u'MIME Content Type: [1], Extension: [2]'),
(u'RegisterProgIdInfo', u'Registering program identifiers', u'ProgId: [1]'),
(u'AllocateRegistrySpace', u'Allocating registry space', u'Free space: [1]'),
(u'AppSearch', u'Searching for installed applications', u'Property: [1], Signature: [2]'),
(u'BindImage', u'Binding executables', u'File: [1]'),
(u'CCPSearch', u'Searching for qualifying products', None),
(u'CreateFolders', u'Creating folders', u'Folder: [1]'),
(u'DeleteServices', u'Deleting services', u'Service: [1]'),
(u'DuplicateFiles', u'Creating duplicate files', u'File: [1], Directory: [9], Size: [6]'),
(u'FindRelatedProducts', u'Searching for related applications', u'Found application: [1]'),
(u'InstallODBC', u'Installing ODBC components', None),
(u'InstallServices', u'Installing new services', u'Service: [2]'),
(u'LaunchConditions', u'Evaluating launch conditions', None),
(u'MigrateFeatureStates', u'Migrating feature states from related applications', u'Application: [1]'),
(u'MoveFiles', u'Moving files', u'File: [1], Directory: [9], Size: [6]'),
(u'PatchFiles', u'Patching files', u'File: [1], Directory: [2], Size: [3]'),
(u'ProcessComponents', u'Updating component registration', None),
(u'RegisterComPlus', u'Registering COM+ Applications and Components', u'AppId: [1]{{, AppType: [2], Users: [3], RSN: [4]}}'),
(u'RegisterFonts', u'Registering fonts', u'Font: [1]'),
(u'RegisterProduct', u'Registering product', u'[1]'),
(u'RegisterTypeLibraries', u'Registering type libraries', u'LibID: [1]'),
(u'RegisterUser', u'Registering user', u'[1]'),
(u'RemoveDuplicateFiles', u'Removing duplicated files', u'File: [1], Directory: [9]'),
(u'RemoveEnvironmentStrings', u'Updating environment strings', u'Name: [1], Value: [2], Action [3]'),
(u'RemoveExistingProducts', u'Removing applications', u'Application: [1], Command line: [2]'),
(u'RemoveFiles', u'Removing files', u'File: [1], Directory: [9]'),
(u'RemoveFolders', u'Removing folders', u'Folder: [1]'),
(u'RemoveIniValues', u'Removing INI files entries', u'File: [1], Section: [2], Key: [3], Value: [4]'),
(u'RemoveODBC', u'Removing ODBC components', None),
(u'RemoveRegistryValues', u'Removing system registry values', u'Key: [1], Name: [2]'),
(u'RemoveShortcuts', u'Removing shortcuts', u'Shortcut: [1]'),
(u'RMCCPSearch', u'Searching for qualifying products', None),
(u'SelfRegModules', u'Registering modules', u'File: [1], Folder: [2]'),
(u'SelfUnregModules', u'Unregistering modules', u'File: [1], Folder: [2]'),
(u'SetODBCFolders', u'Initializing ODBC directories', None),
(u'StartServices', u'Starting services', u'Service: [1]'),
(u'StopServices', u'Stopping services', u'Service: [1]'),
(u'UnpublishComponents', u'Unpublishing Qualified Components', u'Component ID: [1], Qualifier: [2]'),
(u'UnpublishFeatures', u'Unpublishing Product Features', u'Feature: [1]'),
(u'UnregisterClassInfo', u'Unregister Class servers', u'Class Id: [1]'),
(u'UnregisterComPlus', u'Unregistering COM+ Applications and Components', u'AppId: [1]{{, AppType: [2]}}'),
(u'UnregisterExtensionInfo', u'Unregistering extension servers', u'Extension: [1]'),
(u'UnregisterFonts', u'Unregistering fonts', u'Font: [1]'),
(u'UnregisterMIMEInfo', u'Unregistering MIME info', u'MIME Content Type: [1], Extension: [2]'),
(u'UnregisterProgIdInfo', u'Unregistering program identifiers', u'ProgId: [1]'),
(u'UnregisterTypeLibraries', u'Unregistering type libraries', u'LibID: [1]'),
(u'WriteEnvironmentStrings', u'Updating environment strings', u'Name: [1], Value: [2], Action [3]'),
(u'WriteIniValues', u'Writing INI files values', u'File: [1], Section: [2], Key: [3], Value: [4]'),
(u'WriteRegistryValues', u'Writing system registry values', u'Key: [1], Name: [2], Value: [3]'),
(u'Advertise', u'Advertising application', None),
(u'GenerateScript', u'Generating script operations for action:', u'[1]'),
(u'InstallSFPCatalogFile', u'Installing system catalog', u'File: [1], Dependencies: [2]'),
(u'MsiPublishAssemblies', u'Publishing assembly information', u'Application Context:[1], Assembly Name:[2]'),
(u'MsiUnpublishAssemblies', u'Unpublishing assembly information', u'Application Context:[1], Assembly Name:[2]'),
(u'Rollback', u'Rolling back action:', u'[1]'),
(u'RollbackCleanup', u'Removing backup files', u'File: [1]'),
(u'UnmoveFiles', u'Removing moved files', u'File: [1], Directory: [9]'),
(u'UnpublishProduct', u'Unpublishing product information', None),
]
UIText = [
(u'AbsentPath', None),
(u'bytes', u'bytes'),
(u'GB', u'GB'),
(u'KB', u'KB'),
(u'MB', u'MB'),
(u'MenuAbsent', u'Entire feature will be unavailable'),
(u'MenuAdvertise', u'Feature will be installed when required'),
(u'MenuAllCD', u'Entire feature will be installed to run from CD'),
(u'MenuAllLocal', u'Entire feature will be installed on local hard drive'),
(u'MenuAllNetwork', u'Entire feature will be installed to run from network'),
(u'MenuCD', u'Will be installed to run from CD'),
(u'MenuLocal', u'Will be installed on local hard drive'),
(u'MenuNetwork', u'Will be installed to run from network'),
(u'ScriptInProgress', u'Gathering required information...'),
(u'SelAbsentAbsent', u'This feature will remain uninstalled'),
(u'SelAbsentAdvertise', u'This feature will be set to be installed when required'),
(u'SelAbsentCD', u'This feature will be installed to run from CD'),
(u'SelAbsentLocal', u'This feature will be installed on the local hard drive'),
(u'SelAbsentNetwork', u'This feature will be installed to run from the network'),
(u'SelAdvertiseAbsent', u'This feature will become unavailable'),
(u'SelAdvertiseAdvertise', u'Will be installed when required'),
(u'SelAdvertiseCD', u'This feature will be available to run from CD'),
(u'SelAdvertiseLocal', u'This feature will be installed on your local hard drive'),
(u'SelAdvertiseNetwork', u'This feature will be available to run from the network'),
(u'SelCDAbsent', u"This feature will be uninstalled completely, you won't be able to run it from CD"),
(u'SelCDAdvertise', u'This feature will change from run from CD state to set to be installed when required'),
(u'SelCDCD', u'This feature will remain to be run from CD'),
(u'SelCDLocal', u'This feature will change from run from CD state to be installed on the local hard drive'),
(u'SelChildCostNeg', u'This feature frees up [1] on your hard drive.'),
(u'SelChildCostPos', u'This feature requires [1] on your hard drive.'),
(u'SelCostPending', u'Compiling cost for this feature...'),
(u'SelLocalAbsent', u'This feature will be completely removed'),
(u'SelLocalAdvertise', u'This feature will be removed from your local hard drive, but will be set to be installed when required'),
(u'SelLocalCD', u'This feature will be removed from your local hard drive, but will be still available to run from CD'),
(u'SelLocalLocal', u'This feature will remain on you local hard drive'),
(u'SelLocalNetwork', u'This feature will be removed from your local hard drive, but will be still available to run from the network'),
(u'SelNetworkAbsent', u"This feature will be uninstalled completely, you won't be able to run it from the network"),
(u'SelNetworkAdvertise', u'This feature will change from run from network state to set to be installed when required'),
(u'SelNetworkLocal', u'This feature will change from run from network state to be installed on the local hard drive'),
(u'SelNetworkNetwork', u'This feature will remain to be run from the network'),
(u'SelParentCostNegNeg', u'This feature frees up [1] on your hard drive. It has [2] of [3] subfeatures selected. The subfeatures free up [4] on your hard drive.'),
(u'SelParentCostNegPos', u'This feature frees up [1] on your hard drive. It has [2] of [3] subfeatures selected. The subfeatures require [4] on your hard drive.'),
(u'SelParentCostPosNeg', u'This feature requires [1] on your hard drive. It has [2] of [3] subfeatures selected. The subfeatures free up [4] on your hard drive.'),
(u'SelParentCostPosPos', u'This feature requires [1] on your hard drive. It has [2] of [3] subfeatures selected. The subfeatures require [4] on your hard drive.'),
(u'TimeRemaining', u'Time remaining: {[1] minutes }{[2] seconds}'),
(u'VolumeCostAvailable', u'Available'),
(u'VolumeCostDifference', u'Difference'),
(u'VolumeCostRequired', u'Required'),
(u'VolumeCostSize', u'Disk Size'),
(u'VolumeCostVolume', u'Volume'),
]
tables=['ActionText', 'UIText']
|
cristianquaglio/odoo
|
refs/heads/master
|
addons/account/wizard/account_chart.py
|
271
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import fields, osv
class account_chart(osv.osv_memory):
"""
For Chart of Accounts
"""
_name = "account.chart"
_description = "Account chart"
_columns = {
'fiscalyear': fields.many2one('account.fiscalyear', \
'Fiscal year', \
help='Keep empty for all open fiscal years'),
'period_from': fields.many2one('account.period', 'Start period'),
'period_to': fields.many2one('account.period', 'End period'),
'target_move': fields.selection([('posted', 'All Posted Entries'),
('all', 'All Entries'),
], 'Target Moves', required=True),
}
def _get_fiscalyear(self, cr, uid, context=None):
"""Return default Fiscalyear value"""
return self.pool.get('account.fiscalyear').find(cr, uid, context=context)
def onchange_fiscalyear(self, cr, uid, ids, fiscalyear_id=False, context=None):
res = {}
if fiscalyear_id:
start_period = end_period = False
cr.execute('''
SELECT * FROM (SELECT p.id
FROM account_period p
LEFT JOIN account_fiscalyear f ON (p.fiscalyear_id = f.id)
WHERE f.id = %s
ORDER BY p.date_start ASC, p.special DESC
LIMIT 1) AS period_start
UNION ALL
SELECT * FROM (SELECT p.id
FROM account_period p
LEFT JOIN account_fiscalyear f ON (p.fiscalyear_id = f.id)
WHERE f.id = %s
AND p.date_start < NOW()
ORDER BY p.date_stop DESC
LIMIT 1) AS period_stop''', (fiscalyear_id, fiscalyear_id))
periods = [i[0] for i in cr.fetchall()]
if periods:
start_period = periods[0]
if len(periods) > 1:
end_period = periods[1]
res['value'] = {'period_from': start_period, 'period_to': end_period}
else:
res['value'] = {'period_from': False, 'period_to': False}
return res
def account_chart_open_window(self, cr, uid, ids, context=None):
"""
Opens chart of Accounts
@param cr: the current row, from the database cursor,
@param uid: the current user’s ID for security checks,
@param ids: List of account chart’s IDs
@return: dictionary of Open account chart window on given fiscalyear and all Entries or posted entries
"""
mod_obj = self.pool.get('ir.model.data')
act_obj = self.pool.get('ir.actions.act_window')
period_obj = self.pool.get('account.period')
fy_obj = self.pool.get('account.fiscalyear')
if context is None:
context = {}
data = self.read(cr, uid, ids, context=context)[0]
result = mod_obj.get_object_reference(cr, uid, 'account', 'action_account_tree')
id = result and result[1] or False
result = act_obj.read(cr, uid, [id], context=context)[0]
fiscalyear_id = data.get('fiscalyear', False) and data['fiscalyear'][0] or False
result['periods'] = []
if data['period_from'] and data['period_to']:
period_from = data.get('period_from', False) and data['period_from'][0] or False
period_to = data.get('period_to', False) and data['period_to'][0] or False
result['periods'] = period_obj.build_ctx_periods(cr, uid, period_from, period_to)
result['context'] = str({'fiscalyear': fiscalyear_id, 'periods': result['periods'], \
'state': data['target_move']})
if fiscalyear_id:
result['name'] += ':' + fy_obj.read(cr, uid, [fiscalyear_id], context=context)[0]['code']
return result
_defaults = {
'target_move': 'posted',
'fiscalyear': _get_fiscalyear,
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
GeographicaGS/moocng
|
refs/heads/master
|
moocng/api/admin.py
|
2
|
# -*- coding: utf-8 -*-
# Copyright 2012-2013 UNED
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
import uuid
from django.contrib import admin
from django.utils.translation import ugettext_lazy as _
from adminsortable.admin import SortableAdmin
from moocng.api.models import UserApi
logger = logging.getLogger(__name__)
class UserApiAdmin(SortableAdmin):
raw_id_fields = ('user',)
autocomplete_lookup_fields = {'fk': ['user'], }
readonly_fields = ('key',)
actions = ['change_key']
def change_key(self, request, queryset):
for apikey in queryset:
apikey.key = unicode(uuid.uuid4())
apikey.save()
change_key.short_description = _("Change key of selected %(verbose_name_plural)s")
admin.site.register(UserApi, UserApiAdmin)
|
erhuabushuo/crossbar
|
refs/heads/master
|
crossbar/twisted/tests/test_endpoint.py
|
2
|
#####################################################################################
#
# Copyright (C) Tavendo GmbH
#
# Unless a separate license agreement exists between you and Tavendo GmbH (e.g. you
# have purchased a commercial license), the license terms below apply.
#
# Should you enter into a separate license agreement after having received a copy of
# this software, then the terms of such license agreement replace the terms below at
# the time at which such license agreement becomes effective.
#
# In case a separate license agreement ends, and such agreement ends without being
# replaced by another separate license agreement, the license terms below apply
# from the time at which said agreement ends.
#
# LICENSE TERMS
#
# This program is free software: you can redistribute it and/or modify it under the
# terms of the GNU Affero General Public License, version 3, as published by the
# Free Software Foundation. This program is distributed in the hope that it will be
# useful, but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
#
# See the GNU Affero General Public License Version 3 for more details.
#
# You should have received a copy of the GNU Affero General Public license along
# with this program. If not, see <http://www.gnu.org/licenses/agpl-3.0.en.html>.
#
#####################################################################################
from __future__ import absolute_import, division, print_function
import os
from uuid import uuid4
from twisted.internet.endpoints import UNIXServerEndpoint
from twisted.internet.selectreactor import SelectReactor
from twisted.internet.protocol import Factory
from twisted.protocols.wire import Echo
from twisted.trial.unittest import TestCase
from twisted.python.filepath import FilePath
from twisted.python.runtime import platform
from crossbar.twisted.endpoint import create_listening_endpoint_from_config
class ListeningEndpointTests(TestCase):
def setUp(self):
self.cbdir = self.mktemp()
FilePath(self.cbdir).makedirs()
def test_unix(self):
"""
A config with type = "unix" will create an endpoint for a UNIX socket
at the given path.
"""
path = FilePath("/tmp").child(uuid4().hex).path
self.addCleanup(os.remove, path)
reactor = SelectReactor()
config = {
"type": "unix",
"path": path
}
endpoint = create_listening_endpoint_from_config(config, self.cbdir,
reactor)
self.assertTrue(isinstance(endpoint, UNIXServerEndpoint))
factory = Factory.forProtocol(Echo)
endpoint.listen(factory)
self.assertIn(
factory,
[getattr(x, "factory", None) for x in reactor.getReaders()])
def test_unix_already_listening(self):
"""
A config with type = "unix" will create an endpoint for a UNIX socket
at the given path, and delete it if required.
"""
path = FilePath("/tmp").child(uuid4().hex).path
self.addCleanup(os.remove, path)
# Something is already there
FilePath(path).setContent(b"")
reactor = SelectReactor()
config = {
"type": "unix",
"path": path
}
endpoint = create_listening_endpoint_from_config(config, self.cbdir,
reactor)
self.assertTrue(isinstance(endpoint, UNIXServerEndpoint))
factory = Factory.forProtocol(Echo)
endpoint.listen(factory)
self.assertIn(
factory,
[getattr(x, "factory", None) for x in reactor.getReaders()])
def test_unix_already_listening_cant_delete(self):
"""
A config with type = "unix" will create an endpoint for a UNIX socket
at the given path, and delete it if required. If it can't delete it, it
will raise an exception.
"""
parent_fp = FilePath("/tmp").child(uuid4().hex)
parent_fp.makedirs()
fp = parent_fp.child(uuid4().hex)
# Something is already there
fp.setContent(b"")
fp.chmod(0o544)
parent_fp.chmod(0o544)
reactor = SelectReactor()
config = {
"type": "unix",
"path": fp.path
}
with self.assertRaises(OSError) as e:
create_listening_endpoint_from_config(config, self.cbdir, reactor)
self.assertEqual(e.exception.errno, 13) # Permission Denied
parent_fp.chmod(0o777)
parent_fp.remove()
if platform.isWindows():
_ = "Windows does not have UNIX sockets"
test_unix.skip = _
test_unix_already_listening.skip = _
test_unix_already_listening_cant_delete.skip = _
del _
|
deandunbar/html2bwml
|
refs/heads/master
|
venv/lib/python2.7/site-packages/django/db/models/options.py
|
49
|
from __future__ import unicode_literals
from bisect import bisect
from collections import OrderedDict
import warnings
from django.apps import apps
from django.conf import settings
from django.db.models.fields.related import ManyToManyRel
from django.db.models.fields import AutoField, FieldDoesNotExist
from django.db.models.fields.proxy import OrderWrt
from django.utils import six
from django.utils.deprecation import RemovedInDjango18Warning
from django.utils.encoding import force_text, smart_text, python_2_unicode_compatible
from django.utils.functional import cached_property
from django.utils.text import camel_case_to_spaces
from django.utils.translation import activate, deactivate_all, get_language, string_concat
DEFAULT_NAMES = ('verbose_name', 'verbose_name_plural', 'db_table', 'ordering',
'unique_together', 'permissions', 'get_latest_by',
'order_with_respect_to', 'app_label', 'db_tablespace',
'abstract', 'managed', 'proxy', 'swappable', 'auto_created',
'index_together', 'apps', 'default_permissions',
'select_on_save')
def normalize_together(option_together):
"""
option_together can be either a tuple of tuples, or a single
tuple of two strings. Normalize it to a tuple of tuples, so that
calling code can uniformly expect that.
"""
try:
if not option_together:
return ()
if not isinstance(option_together, (tuple, list)):
raise TypeError
first_element = next(iter(option_together))
if not isinstance(first_element, (tuple, list)):
option_together = (option_together,)
# Normalize everything to tuples
return tuple(tuple(ot) for ot in option_together)
except TypeError:
# If the value of option_together isn't valid, return it
# verbatim; this will be picked up by the check framework later.
return option_together
@python_2_unicode_compatible
class Options(object):
def __init__(self, meta, app_label=None):
self.local_fields = []
self.local_many_to_many = []
self.virtual_fields = []
self.model_name = None
self.verbose_name = None
self.verbose_name_plural = None
self.db_table = ''
self.ordering = []
self.unique_together = []
self.index_together = []
self.select_on_save = False
self.default_permissions = ('add', 'change', 'delete')
self.permissions = []
self.object_name = None
self.app_label = app_label
self.get_latest_by = None
self.order_with_respect_to = None
self.db_tablespace = settings.DEFAULT_TABLESPACE
self.meta = meta
self.pk = None
self.has_auto_field = False
self.auto_field = None
self.abstract = False
self.managed = True
self.proxy = False
# For any class that is a proxy (including automatically created
# classes for deferred object loading), proxy_for_model tells us
# which class this model is proxying. Note that proxy_for_model
# can create a chain of proxy models. For non-proxy models, the
# variable is always None.
self.proxy_for_model = None
# For any non-abstract class, the concrete class is the model
# in the end of the proxy_for_model chain. In particular, for
# concrete models, the concrete_model is always the class itself.
self.concrete_model = None
self.swappable = None
self.parents = OrderedDict()
self.auto_created = False
# To handle various inheritance situations, we need to track where
# managers came from (concrete or abstract base classes).
self.abstract_managers = []
self.concrete_managers = []
# List of all lookups defined in ForeignKey 'limit_choices_to' options
# from *other* models. Needed for some admin checks. Internal use only.
self.related_fkey_lookups = []
# A custom app registry to use, if you're making a separate model set.
self.apps = apps
@property
def app_config(self):
# Don't go through get_app_config to avoid triggering imports.
return self.apps.app_configs.get(self.app_label)
@property
def installed(self):
return self.app_config is not None
def contribute_to_class(self, cls, name):
from django.db import connection
from django.db.backends.utils import truncate_name
cls._meta = self
self.model = cls
# First, construct the default values for these options.
self.object_name = cls.__name__
self.model_name = self.object_name.lower()
self.verbose_name = camel_case_to_spaces(self.object_name)
# Store the original user-defined values for each option,
# for use when serializing the model definition
self.original_attrs = {}
# Next, apply any overridden values from 'class Meta'.
if self.meta:
meta_attrs = self.meta.__dict__.copy()
for name in self.meta.__dict__:
# Ignore any private attributes that Django doesn't care about.
# NOTE: We can't modify a dictionary's contents while looping
# over it, so we loop over the *original* dictionary instead.
if name.startswith('_'):
del meta_attrs[name]
for attr_name in DEFAULT_NAMES:
if attr_name in meta_attrs:
setattr(self, attr_name, meta_attrs.pop(attr_name))
self.original_attrs[attr_name] = getattr(self, attr_name)
elif hasattr(self.meta, attr_name):
setattr(self, attr_name, getattr(self.meta, attr_name))
self.original_attrs[attr_name] = getattr(self, attr_name)
ut = meta_attrs.pop('unique_together', self.unique_together)
self.unique_together = normalize_together(ut)
it = meta_attrs.pop('index_together', self.index_together)
self.index_together = normalize_together(it)
# verbose_name_plural is a special case because it uses a 's'
# by default.
if self.verbose_name_plural is None:
self.verbose_name_plural = string_concat(self.verbose_name, 's')
# Any leftover attributes must be invalid.
if meta_attrs != {}:
raise TypeError("'class Meta' got invalid attribute(s): %s" % ','.join(meta_attrs.keys()))
else:
self.verbose_name_plural = string_concat(self.verbose_name, 's')
del self.meta
# If the db_table wasn't provided, use the app_label + model_name.
if not self.db_table:
self.db_table = "%s_%s" % (self.app_label, self.model_name)
self.db_table = truncate_name(self.db_table, connection.ops.max_name_length())
@property
def module_name(self):
"""
This property has been deprecated in favor of `model_name`. refs #19689
"""
warnings.warn(
"Options.module_name has been deprecated in favor of model_name",
RemovedInDjango18Warning, stacklevel=2)
return self.model_name
def _prepare(self, model):
if self.order_with_respect_to:
self.order_with_respect_to = self.get_field(self.order_with_respect_to)
self.ordering = ('_order',)
if not any(isinstance(field, OrderWrt) for field in model._meta.local_fields):
model.add_to_class('_order', OrderWrt())
else:
self.order_with_respect_to = None
if self.pk is None:
if self.parents:
# Promote the first parent link in lieu of adding yet another
# field.
field = next(six.itervalues(self.parents))
# Look for a local field with the same name as the
# first parent link. If a local field has already been
# created, use it instead of promoting the parent
already_created = [fld for fld in self.local_fields if fld.name == field.name]
if already_created:
field = already_created[0]
field.primary_key = True
self.setup_pk(field)
else:
auto = AutoField(verbose_name='ID', primary_key=True,
auto_created=True)
model.add_to_class('id', auto)
def add_field(self, field):
# Insert the given field in the order in which it was created, using
# the "creation_counter" attribute of the field.
# Move many-to-many related fields from self.fields into
# self.many_to_many.
if field.rel and isinstance(field.rel, ManyToManyRel):
self.local_many_to_many.insert(bisect(self.local_many_to_many, field), field)
if hasattr(self, '_m2m_cache'):
del self._m2m_cache
else:
self.local_fields.insert(bisect(self.local_fields, field), field)
self.setup_pk(field)
if hasattr(self, '_field_cache'):
del self._field_cache
del self._field_name_cache
# The fields, concrete_fields and local_concrete_fields are
# implemented as cached properties for performance reasons.
# The attrs will not exists if the cached property isn't
# accessed yet, hence the try-excepts.
try:
del self.fields
except AttributeError:
pass
try:
del self.concrete_fields
except AttributeError:
pass
try:
del self.local_concrete_fields
except AttributeError:
pass
if hasattr(self, '_name_map'):
del self._name_map
def add_virtual_field(self, field):
self.virtual_fields.append(field)
def setup_pk(self, field):
if not self.pk and field.primary_key:
self.pk = field
field.serialize = False
def pk_index(self):
"""
Returns the index of the primary key field in the self.concrete_fields
list.
"""
return self.concrete_fields.index(self.pk)
def setup_proxy(self, target):
"""
Does the internal setup so that the current model is a proxy for
"target".
"""
self.pk = target._meta.pk
self.proxy_for_model = target
self.db_table = target._meta.db_table
def __repr__(self):
return '<Options for %s>' % self.object_name
def __str__(self):
return "%s.%s" % (smart_text(self.app_label), smart_text(self.model_name))
def verbose_name_raw(self):
"""
There are a few places where the untranslated verbose name is needed
(so that we get the same value regardless of currently active
locale).
"""
lang = get_language()
deactivate_all()
raw = force_text(self.verbose_name)
activate(lang)
return raw
verbose_name_raw = property(verbose_name_raw)
def _swapped(self):
"""
Has this model been swapped out for another? If so, return the model
name of the replacement; otherwise, return None.
For historical reasons, model name lookups using get_model() are
case insensitive, so we make sure we are case insensitive here.
"""
if self.swappable:
model_label = '%s.%s' % (self.app_label, self.model_name)
swapped_for = getattr(settings, self.swappable, None)
if swapped_for:
try:
swapped_label, swapped_object = swapped_for.split('.')
except ValueError:
# setting not in the format app_label.model_name
# raising ImproperlyConfigured here causes problems with
# test cleanup code - instead it is raised in get_user_model
# or as part of validation.
return swapped_for
if '%s.%s' % (swapped_label, swapped_object.lower()) not in (None, model_label):
return swapped_for
return None
swapped = property(_swapped)
@cached_property
def fields(self):
"""
The getter for self.fields. This returns the list of field objects
available to this model (including through parent models).
Callers are not permitted to modify this list, since it's a reference
to this instance (not a copy).
"""
try:
self._field_name_cache
except AttributeError:
self._fill_fields_cache()
return self._field_name_cache
@cached_property
def concrete_fields(self):
return [f for f in self.fields if f.column is not None]
@cached_property
def local_concrete_fields(self):
return [f for f in self.local_fields if f.column is not None]
def get_fields_with_model(self):
"""
Returns a sequence of (field, model) pairs for all fields. The "model"
element is None for fields on the current model. Mostly of use when
constructing queries so that we know which model a field belongs to.
"""
try:
self._field_cache
except AttributeError:
self._fill_fields_cache()
return self._field_cache
def get_concrete_fields_with_model(self):
return [(field, model) for field, model in self.get_fields_with_model() if
field.column is not None]
def _fill_fields_cache(self):
cache = []
for parent in self.parents:
for field, model in parent._meta.get_fields_with_model():
if model:
cache.append((field, model))
else:
cache.append((field, parent))
cache.extend((f, None) for f in self.local_fields)
self._field_cache = tuple(cache)
self._field_name_cache = [x for x, _ in cache]
def _many_to_many(self):
try:
self._m2m_cache
except AttributeError:
self._fill_m2m_cache()
return list(self._m2m_cache)
many_to_many = property(_many_to_many)
def get_m2m_with_model(self):
"""
The many-to-many version of get_fields_with_model().
"""
try:
self._m2m_cache
except AttributeError:
self._fill_m2m_cache()
return list(six.iteritems(self._m2m_cache))
def _fill_m2m_cache(self):
cache = OrderedDict()
for parent in self.parents:
for field, model in parent._meta.get_m2m_with_model():
if model:
cache[field] = model
else:
cache[field] = parent
for field in self.local_many_to_many:
cache[field] = None
self._m2m_cache = cache
def get_field(self, name, many_to_many=True):
"""
Returns the requested field by name. Raises FieldDoesNotExist on error.
"""
to_search = (self.fields + self.many_to_many) if many_to_many else self.fields
for f in to_search:
if f.name == name:
return f
raise FieldDoesNotExist('%s has no field named %r' % (self.object_name, name))
def get_field_by_name(self, name):
"""
Returns the (field_object, model, direct, m2m), where field_object is
the Field instance for the given name, model is the model containing
this field (None for local fields), direct is True if the field exists
on this model, and m2m is True for many-to-many relations. When
'direct' is False, 'field_object' is the corresponding RelatedObject
for this field (since the field doesn't have an instance associated
with it).
Uses a cache internally, so after the first access, this is very fast.
"""
try:
try:
return self._name_map[name]
except AttributeError:
cache = self.init_name_map()
return cache[name]
except KeyError:
raise FieldDoesNotExist('%s has no field named %r'
% (self.object_name, name))
def get_all_field_names(self):
"""
Returns a list of all field names that are possible for this model
(including reverse relation names). This is used for pretty printing
debugging output (a list of choices), so any internal-only field names
are not included.
"""
try:
cache = self._name_map
except AttributeError:
cache = self.init_name_map()
names = sorted(cache.keys())
# Internal-only names end with "+" (symmetrical m2m related names being
# the main example). Trim them.
return [val for val in names if not val.endswith('+')]
def init_name_map(self):
"""
Initialises the field name -> field object mapping.
"""
cache = {}
# We intentionally handle related m2m objects first so that symmetrical
# m2m accessor names can be overridden, if necessary.
for f, model in self.get_all_related_m2m_objects_with_model():
cache[f.field.related_query_name()] = (f, model, False, True)
for f, model in self.get_all_related_objects_with_model():
cache[f.field.related_query_name()] = (f, model, False, False)
for f, model in self.get_m2m_with_model():
cache[f.name] = cache[f.attname] = (f, model, True, True)
for f, model in self.get_fields_with_model():
cache[f.name] = cache[f.attname] = (f, model, True, False)
for f in self.virtual_fields:
if hasattr(f, 'related'):
cache[f.name] = cache[f.attname] = (
f, None if f.model == self.model else f.model, True, False)
if apps.ready:
self._name_map = cache
return cache
def get_add_permission(self):
"""
This method has been deprecated in favor of
`django.contrib.auth.get_permission_codename`. refs #20642
"""
warnings.warn(
"`Options.get_add_permission` has been deprecated in favor "
"of `django.contrib.auth.get_permission_codename`.",
RemovedInDjango18Warning, stacklevel=2)
return 'add_%s' % self.model_name
def get_change_permission(self):
"""
This method has been deprecated in favor of
`django.contrib.auth.get_permission_codename`. refs #20642
"""
warnings.warn(
"`Options.get_change_permission` has been deprecated in favor "
"of `django.contrib.auth.get_permission_codename`.",
RemovedInDjango18Warning, stacklevel=2)
return 'change_%s' % self.model_name
def get_delete_permission(self):
"""
This method has been deprecated in favor of
`django.contrib.auth.get_permission_codename`. refs #20642
"""
warnings.warn(
"`Options.get_delete_permission` has been deprecated in favor "
"of `django.contrib.auth.get_permission_codename`.",
RemovedInDjango18Warning, stacklevel=2)
return 'delete_%s' % self.model_name
def get_all_related_objects(self, local_only=False, include_hidden=False,
include_proxy_eq=False):
return [k for k, v in self.get_all_related_objects_with_model(
local_only=local_only, include_hidden=include_hidden,
include_proxy_eq=include_proxy_eq)]
def get_all_related_objects_with_model(self, local_only=False,
include_hidden=False,
include_proxy_eq=False):
"""
Returns a list of (related-object, model) pairs. Similar to
get_fields_with_model().
"""
try:
self._related_objects_cache
except AttributeError:
self._fill_related_objects_cache()
predicates = []
if local_only:
predicates.append(lambda k, v: not v)
if not include_hidden:
predicates.append(lambda k, v: not k.field.rel.is_hidden())
cache = (self._related_objects_proxy_cache if include_proxy_eq
else self._related_objects_cache)
return [t for t in cache.items() if all(p(*t) for p in predicates)]
def _fill_related_objects_cache(self):
cache = OrderedDict()
parent_list = self.get_parent_list()
for parent in self.parents:
for obj, model in parent._meta.get_all_related_objects_with_model(include_hidden=True):
if (obj.field.creation_counter < 0 or obj.field.rel.parent_link) and obj.model not in parent_list:
continue
if not model:
cache[obj] = parent
else:
cache[obj] = model
# Collect also objects which are in relation to some proxy child/parent of self.
proxy_cache = cache.copy()
for klass in self.apps.get_models(include_auto_created=True):
if not klass._meta.swapped:
for f in klass._meta.local_fields + klass._meta.virtual_fields:
if (hasattr(f, 'rel') and f.rel and not isinstance(f.rel.to, six.string_types)
and f.generate_reverse_relation):
if self == f.rel.to._meta:
cache[f.related] = None
proxy_cache[f.related] = None
elif self.concrete_model == f.rel.to._meta.concrete_model:
proxy_cache[f.related] = None
self._related_objects_cache = cache
self._related_objects_proxy_cache = proxy_cache
def get_all_related_many_to_many_objects(self, local_only=False):
try:
cache = self._related_many_to_many_cache
except AttributeError:
cache = self._fill_related_many_to_many_cache()
if local_only:
return [k for k, v in cache.items() if not v]
return list(cache)
def get_all_related_m2m_objects_with_model(self):
"""
Returns a list of (related-m2m-object, model) pairs. Similar to
get_fields_with_model().
"""
try:
cache = self._related_many_to_many_cache
except AttributeError:
cache = self._fill_related_many_to_many_cache()
return list(six.iteritems(cache))
def _fill_related_many_to_many_cache(self):
cache = OrderedDict()
parent_list = self.get_parent_list()
for parent in self.parents:
for obj, model in parent._meta.get_all_related_m2m_objects_with_model():
if obj.field.creation_counter < 0 and obj.model not in parent_list:
continue
if not model:
cache[obj] = parent
else:
cache[obj] = model
for klass in self.apps.get_models():
if not klass._meta.swapped:
for f in klass._meta.local_many_to_many:
if (f.rel
and not isinstance(f.rel.to, six.string_types)
and self == f.rel.to._meta):
cache[f.related] = None
if apps.ready:
self._related_many_to_many_cache = cache
return cache
def get_base_chain(self, model):
"""
Returns a list of parent classes leading to 'model' (order from closet
to most distant ancestor). This has to handle the case were 'model' is
a grandparent or even more distant relation.
"""
if not self.parents:
return None
if model in self.parents:
return [model]
for parent in self.parents:
res = parent._meta.get_base_chain(model)
if res:
res.insert(0, parent)
return res
return None
def get_parent_list(self):
"""
Returns a list of all the ancestor of this model as a list. Useful for
determining if something is an ancestor, regardless of lineage.
"""
result = set()
for parent in self.parents:
result.add(parent)
result.update(parent._meta.get_parent_list())
return result
def get_ancestor_link(self, ancestor):
"""
Returns the field on the current model which points to the given
"ancestor". This is possible an indirect link (a pointer to a parent
model, which points, eventually, to the ancestor). Used when
constructing table joins for model inheritance.
Returns None if the model isn't an ancestor of this one.
"""
if ancestor in self.parents:
return self.parents[ancestor]
for parent in self.parents:
# Tries to get a link field from the immediate parent
parent_link = parent._meta.get_ancestor_link(ancestor)
if parent_link:
# In case of a proxied model, the first link
# of the chain to the ancestor is that parent
# links
return self.parents[parent] or parent_link
|
otsaloma/poor-maps
|
refs/heads/master
|
poor/test/test_voice.py
|
1
|
# -*- coding: utf-8 -*-
# Copyright (C) 2017 Osmo Salomaa
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import os
import poor.test
import tempfile
import time
class TestVoiceGenerator(poor.test.TestCase):
def setup_method(self, method):
self.generator = poor.VoiceGenerator()
def test_engines(self):
for engine in self.generator.engines:
if not engine.supports("en"): continue
handle, fname = tempfile.mkstemp(dir=self.generator._tmpdir)
engine("en").make_wav("just testing", fname)
assert os.path.isfile(fname)
assert os.path.getsize(fname) > 256
def test_clean(self):
self.generator.set_voice("en")
self.generator.make("just testing")
time.sleep(1)
self.generator.clean()
assert not os.listdir(self.generator._tmpdir)
def test_get(self):
self.generator.set_voice("en")
if not self.generator.active: return
self.generator.make("just testing")
time.sleep(1)
fname = self.generator.get("just testing")
assert os.path.isfile(fname)
assert os.path.getsize(fname) > 256
def test_make(self):
self.generator.set_voice("en")
self.generator.make("just testing")
time.sleep(1)
def test_quit(self):
self.generator.set_voice("en")
self.generator.make("just testing")
time.sleep(1)
self.generator.quit()
assert not os.path.isdir(self.generator._tmpdir)
def test_set_voice(self):
self.generator.set_voice("en")
self.generator.set_voice("en", "male")
self.generator.set_voice("en", "female")
self.generator.set_voice("en_US")
self.generator.set_voice("en_XX")
|
almarklein/scikit-image
|
refs/heads/master
|
skimage/exposure/exposure.py
|
1
|
import warnings
import numpy as np
from skimage import img_as_float
from skimage.util.dtype import dtype_range, dtype_limits
from skimage._shared.utils import deprecated
__all__ = ['histogram', 'cumulative_distribution', 'equalize',
'rescale_intensity', 'adjust_gamma',
'adjust_log', 'adjust_sigmoid']
def histogram(image, nbins=256):
"""Return histogram of image.
Unlike `numpy.histogram`, this function returns the centers of bins and
does not rebin integer arrays. For integer arrays, each integer value has
its own bin, which improves speed and intensity-resolution.
The histogram is computed on the flattened image: for color images, the
function should be used separately on each channel to obtain a histogram
for each color channel.
Parameters
----------
image : array
Input image.
nbins : int
Number of bins used to calculate histogram. This value is ignored for
integer arrays.
Returns
-------
hist : array
The values of the histogram.
bin_centers : array
The values at the center of the bins.
Examples
--------
>>> from skimage import data, exposure, util
>>> image = util.img_as_float(data.camera())
>>> np.histogram(image, bins=2)
(array([107432, 154712]), array([ 0. , 0.5, 1. ]))
>>> exposure.histogram(image, nbins=2)
(array([107432, 154712]), array([ 0.25, 0.75]))
"""
sh = image.shape
if len(sh) == 3 and sh[-1] < 4:
warnings.warn("This might be a color image. The histogram will be "
"computed on the flattened image. You can instead "
"apply this function to each color channel.")
# For integer types, histogramming with bincount is more efficient.
if np.issubdtype(image.dtype, np.integer):
offset = 0
if np.min(image) < 0:
offset = np.min(image)
hist = np.bincount(image.ravel() - offset)
bin_centers = np.arange(len(hist)) + offset
# clip histogram to start with a non-zero bin
idx = np.nonzero(hist)[0][0]
return hist[idx:], bin_centers[idx:]
else:
hist, bin_edges = np.histogram(image.flat, nbins)
bin_centers = (bin_edges[:-1] + bin_edges[1:]) / 2.
return hist, bin_centers
def cumulative_distribution(image, nbins=256):
"""Return cumulative distribution function (cdf) for the given image.
Parameters
----------
image : array
Image array.
nbins : int
Number of bins for image histogram.
Returns
-------
img_cdf : array
Values of cumulative distribution function.
bin_centers : array
Centers of bins.
References
----------
.. [1] http://en.wikipedia.org/wiki/Cumulative_distribution_function
"""
hist, bin_centers = histogram(image, nbins)
img_cdf = hist.cumsum()
img_cdf = img_cdf / float(img_cdf[-1])
return img_cdf, bin_centers
@deprecated('equalize_hist')
def equalize(image, nbins=256):
return equalize_hist(image, nbins)
def equalize_hist(image, nbins=256):
"""Return image after histogram equalization.
Parameters
----------
image : array
Image array.
nbins : int
Number of bins for image histogram.
Returns
-------
out : float array
Image array after histogram equalization.
Notes
-----
This function is adapted from [1]_ with the author's permission.
References
----------
.. [1] http://www.janeriksolem.net/2009/06/histogram-equalization-with-python-and.html
.. [2] http://en.wikipedia.org/wiki/Histogram_equalization
"""
image = img_as_float(image)
cdf, bin_centers = cumulative_distribution(image, nbins)
out = np.interp(image.flat, bin_centers, cdf)
return out.reshape(image.shape)
def rescale_intensity(image, in_range=None, out_range=None):
"""Return image after stretching or shrinking its intensity levels.
The image intensities are uniformly rescaled such that the minimum and
maximum values given by `in_range` match those given by `out_range`.
Parameters
----------
image : array
Image array.
in_range : 2-tuple (float, float)
Min and max *allowed* intensity values of input image. If None, the
*allowed* min/max values are set to the *actual* min/max values in the
input image.
out_range : 2-tuple (float, float)
Min and max intensity values of output image. If None, use the min/max
intensities of the image data type. See `skimage.util.dtype` for
details.
Returns
-------
out : array
Image array after rescaling its intensity. This image is the same dtype
as the input image.
Examples
--------
By default, intensities are stretched to the limits allowed by the dtype:
>>> image = np.array([51, 102, 153], dtype=np.uint8)
>>> rescale_intensity(image)
array([ 0, 127, 255], dtype=uint8)
It's easy to accidentally convert an image dtype from uint8 to float:
>>> 1.0 * image
array([ 51., 102., 153.])
Use `rescale_intensity` to rescale to the proper range for float dtypes:
>>> image_float = 1.0 * image
>>> rescale_intensity(image_float)
array([ 0. , 0.5, 1. ])
To maintain the low contrast of the original, use the `in_range` parameter:
>>> rescale_intensity(image_float, in_range=(0, 255))
array([ 0.2, 0.4, 0.6])
If the min/max value of `in_range` is more/less than the min/max image
intensity, then the intensity levels are clipped:
>>> rescale_intensity(image_float, in_range=(0, 102))
array([ 0.5, 1. , 1. ])
If you have an image with signed integers but want to rescale the image to
just the positive range, use the `out_range` parameter:
>>> image = np.array([-10, 0, 10], dtype=np.int8)
>>> rescale_intensity(image, out_range=(0, 127))
array([ 0, 63, 127], dtype=int8)
"""
dtype = image.dtype.type
if in_range is None:
imin = np.min(image)
imax = np.max(image)
else:
imin, imax = in_range
if out_range is None:
omin, omax = dtype_range[dtype]
if imin >= 0:
omin = 0
else:
omin, omax = out_range
image = np.clip(image, imin, imax)
image = (image - imin) / float(imax - imin)
return dtype(image * (omax - omin) + omin)
def _assert_non_negative(image):
if np.any(image < 0):
raise ValueError('Image Correction methods work correctly only on '
'images with non-negative values. Use '
'skimage.exposure.rescale_intensity.')
def adjust_gamma(image, gamma=1, gain=1):
"""Performs Gamma Correction on the input image.
Also known as Power Law Transform.
This function transforms the input image pixelwise according to the
equation ``O = I**gamma`` after scaling each pixel to the range 0 to 1.
Parameters
----------
image : ndarray
Input image.
gamma : float
Non negative real number. Default value is 1.
gain : float
The constant multiplier. Default value is 1.
Returns
-------
out : ndarray
Gamma corrected output image.
Notes
-----
For gamma greater than 1, the histogram will shift towards left and
the output image will be darker than the input image.
For gamma less than 1, the histogram will shift towards right and
the output image will be brighter than the input image.
References
----------
.. [1] http://en.wikipedia.org/wiki/Gamma_correction
"""
_assert_non_negative(image)
dtype = image.dtype.type
if gamma < 0:
raise ValueError("Gamma should be a non-negative real number.")
scale = float(dtype_limits(image, True)[1] - dtype_limits(image, True)[0])
out = ((image / scale) ** gamma) * scale * gain
return dtype(out)
def adjust_log(image, gain=1, inv=False):
"""Performs Logarithmic correction on the input image.
This function transforms the input image pixelwise according to the
equation ``O = gain*log(1 + I)`` after scaling each pixel to the range 0 to 1.
For inverse logarithmic correction, the equation is ``O = gain*(2**I - 1)``.
Parameters
----------
image : ndarray
Input image.
gain : float
The constant multiplier. Default value is 1.
inv : float
If True, it performs inverse logarithmic correction,
else correction will be logarithmic. Defaults to False.
Returns
-------
out : ndarray
Logarithm corrected output image.
References
----------
.. [1] http://www.ece.ucsb.edu/Faculty/Manjunath/courses/ece178W03/EnhancePart1.pdf
"""
_assert_non_negative(image)
dtype = image.dtype.type
scale = float(dtype_limits(image, True)[1] - dtype_limits(image, True)[0])
if inv:
out = (2 ** (image / scale) - 1) * scale * gain
return dtype(out)
out = np.log2(1 + image / scale) * scale * gain
return dtype(out)
def adjust_sigmoid(image, cutoff=0.5, gain=10, inv=False):
"""Performs Sigmoid Correction on the input image.
Also known as Contrast Adjustment.
This function transforms the input image pixelwise according to the
equation ``O = 1/(1 + exp*(gain*(cutoff - I)))`` after scaling each pixel
to the range 0 to 1.
Parameters
----------
image : ndarray
Input image.
cutoff : float
Cutoff of the sigmoid function that shifts the characteristic curve
in horizontal direction. Default value is 0.5.
gain : float
The constant multiplier in exponential's power of sigmoid function.
Default value is 10.
inv : bool
If True, returns the negative sigmoid correction. Defaults to False.
Returns
-------
out : ndarray
Sigmoid corrected output image.
References
----------
.. [1] Gustav J. Braun, "Image Lightness Rescaling Using Sigmoidal Contrast
Enhancement Functions",
http://www.cis.rit.edu/fairchild/PDFs/PAP07.pdf
"""
_assert_non_negative(image)
dtype = image.dtype.type
scale = float(dtype_limits(image, True)[1] - dtype_limits(image, True)[0])
if inv:
out = (1 - 1 / (1 + np.exp(gain * (cutoff - image/scale)))) * scale
return dtype(out)
out = (1 / (1 + np.exp(gain * (cutoff - image/scale)))) * scale
return dtype(out)
|
PaulWay/spacewalk
|
refs/heads/master
|
client/tools/rhn-virtualization/actions/virt.py
|
7
|
#
# Copyright (c) 2008--2013 Red Hat, Inc.
#
# This software is licensed to you under the GNU General Public License,
# version 2 (GPLv2). There is NO WARRANTY for this software, express or
# implied, including the implied warranties of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. You should have received a copy of GPLv2
# along with this software; if not, see
# http://www.gnu.org/licenses/old-licenses/gpl-2.0.txt.
#
# Red Hat trademarks are not licensed under GPLv2. No permission is
# granted to use or replicate Red Hat trademarks that are incorporated
# in this software or its documentation.
#
import sys
sys.path.append("/usr/share/rhn/")
from virtualization import support, errors
__rhnexport__ = [
'refresh',
'shutdown',
'suspend',
'start',
'resume',
'reboot',
'destroy',
'setMemory',
'setVCPUs',
'schedulePoller']
##
# Refreshes the virtualization info for this host and any subdomains on the
# server.
#
def refresh(cache_only=None):
if cache_only:
return (0, "no-ops for caching", {})
try:
support.refresh()
except errors.VirtualizationException, ve:
return (1, str(ve), {})
return (0, "Virtualization Info refreshed.", {})
def shutdown(uuid, cache_only=None):
if cache_only:
return (0, "no-ops for caching", {})
try:
support.shutdown(uuid)
except errors.VirtualizationException, ve:
return (1, str(ve), {})
return (0, "Domain %s shutdown." % str(uuid), {})
def start(uuid, cache_only=None):
if cache_only:
return (0, "no-ops for caching", {})
try:
support.start(uuid)
except errors.VirtualizationException, ve:
return (1, str(ve), {})
return (0, "Domain %s started." % str(uuid), {})
def suspend(uuid, cache_only=None):
if cache_only:
return (0, "no-ops for caching", {})
try:
support.suspend(uuid)
except errors.VirtualizationException, ve:
return (1, str(ve), {})
return (0, "Domain %s suspended." % str(uuid), {})
def resume(uuid, cache_only=None):
if cache_only:
return (0, "no-ops for caching", {})
try:
support.resume(uuid)
except errors.VirtualizationException, ve:
return (1, str(ve), {})
return (0, "Domain %s resumed." % str(uuid), {})
def reboot(uuid, cache_only=None):
if cache_only:
return (0, "no-ops for caching", {})
try:
support.reboot(uuid)
except errors.VirtualizationException, ve:
return (1, str(ve), {})
return (0, "Domain %s rebooted." % str(uuid), {})
def destroy(uuid, cache_only=None):
if cache_only:
return (0, "no-ops for caching", {})
try:
support.destroy(uuid)
except errors.VirtualizationException, ve:
return (1, str(ve), {})
return (0, "Domain %s destroyed." % str(uuid), {})
def setMemory(uuid, memory, cache_only=None):
if cache_only:
return (0, "no-ops for caching", {})
try:
support.setMemory(uuid, memory)
except errors.VirtualizationException, ve:
return (1, str(ve), {})
return (0, "Memory set to %s on %s." % (str(memory), str(uuid)), {})
def setVCPUs(uuid, vcpus, cache_only=None):
if cache_only:
return (0, "no-ops for caching", {})
try:
support.setVCPUs(uuid, vcpus)
except errors.VirtualizationException, ve:
return (1, str(ve), {})
return (0, "VCPUs set to %s on %s." % (str(vcpus), str(uuid)), {})
def schedulePoller(minute, hour, dom, month, dow, cache_only=None):
if cache_only:
return (0, "no-ops for caching", {})
ret_val = support.schedulePoller(minute, hour, dom, month, dow)
return (ret_val[0], ret_val[1], {})
###############################################################################
# Test Routine
###############################################################################
if __name__ == "__main__":
import sys
import actions.virt
func = getattr(actions.virt, sys.argv[1])
print apply(func, tuple(sys.argv[2:]))
|
aflaxman/scikit-learn
|
refs/heads/master
|
sklearn/cross_validation.py
|
21
|
"""
The :mod:`sklearn.cross_validation` module includes utilities for cross-
validation and performance evaluation.
"""
# Author: Alexandre Gramfort <alexandre.gramfort@inria.fr>,
# Gael Varoquaux <gael.varoquaux@normalesup.org>,
# Olivier Grisel <olivier.grisel@ensta.org>
# License: BSD 3 clause
from __future__ import print_function
from __future__ import division
import warnings
from itertools import chain, combinations
from math import ceil, floor, factorial
import numbers
import time
from abc import ABCMeta, abstractmethod
import numpy as np
import scipy.sparse as sp
from .base import is_classifier, clone
from .utils import indexable, check_random_state, safe_indexing
from .utils.validation import (_is_arraylike, _num_samples,
column_or_1d)
from .utils.multiclass import type_of_target
from .externals.joblib import Parallel, delayed, logger
from .externals.six import with_metaclass
from .externals.six.moves import zip
from .metrics.scorer import check_scoring
from .gaussian_process.kernels import Kernel as GPKernel
from .exceptions import FitFailedWarning
warnings.warn("This module was deprecated in version 0.18 in favor of the "
"model_selection module into which all the refactored classes "
"and functions are moved. Also note that the interface of the "
"new CV iterators are different from that of this module. "
"This module will be removed in 0.20.", DeprecationWarning)
__all__ = ['KFold',
'LabelKFold',
'LeaveOneLabelOut',
'LeaveOneOut',
'LeavePLabelOut',
'LeavePOut',
'ShuffleSplit',
'StratifiedKFold',
'StratifiedShuffleSplit',
'PredefinedSplit',
'LabelShuffleSplit',
'check_cv',
'cross_val_score',
'cross_val_predict',
'permutation_test_score',
'train_test_split']
class _PartitionIterator(with_metaclass(ABCMeta)):
"""Base class for CV iterators where train_mask = ~test_mask
Implementations must define `_iter_test_masks` or `_iter_test_indices`.
Parameters
----------
n : int
Total number of elements in dataset.
"""
def __init__(self, n):
if abs(n - int(n)) >= np.finfo('f').eps:
raise ValueError("n must be an integer")
self.n = int(n)
def __iter__(self):
ind = np.arange(self.n)
for test_index in self._iter_test_masks():
train_index = np.logical_not(test_index)
train_index = ind[train_index]
test_index = ind[test_index]
yield train_index, test_index
# Since subclasses must implement either _iter_test_masks or
# _iter_test_indices, neither can be abstract.
def _iter_test_masks(self):
"""Generates boolean masks corresponding to test sets.
By default, delegates to _iter_test_indices()
"""
for test_index in self._iter_test_indices():
test_mask = self._empty_mask()
test_mask[test_index] = True
yield test_mask
def _iter_test_indices(self):
"""Generates integer indices corresponding to test sets."""
raise NotImplementedError
def _empty_mask(self):
return np.zeros(self.n, dtype=np.bool)
class LeaveOneOut(_PartitionIterator):
"""Leave-One-Out cross validation iterator.
.. deprecated:: 0.18
This module will be removed in 0.20.
Use :class:`sklearn.model_selection.LeaveOneOut` instead.
Provides train/test indices to split data in train test sets. Each
sample is used once as a test set (singleton) while the remaining
samples form the training set.
Note: ``LeaveOneOut(n)`` is equivalent to ``KFold(n, n_folds=n)`` and
``LeavePOut(n, p=1)``.
Due to the high number of test sets (which is the same as the
number of samples) this cross validation method can be very costly.
For large datasets one should favor KFold, StratifiedKFold or
ShuffleSplit.
Read more in the :ref:`User Guide <cross_validation>`.
Parameters
----------
n : int
Total number of elements in dataset.
Examples
--------
>>> from sklearn import cross_validation
>>> X = np.array([[1, 2], [3, 4]])
>>> y = np.array([1, 2])
>>> loo = cross_validation.LeaveOneOut(2)
>>> len(loo)
2
>>> print(loo)
sklearn.cross_validation.LeaveOneOut(n=2)
>>> for train_index, test_index in loo:
... print("TRAIN:", train_index, "TEST:", test_index)
... X_train, X_test = X[train_index], X[test_index]
... y_train, y_test = y[train_index], y[test_index]
... print(X_train, X_test, y_train, y_test)
TRAIN: [1] TEST: [0]
[[3 4]] [[1 2]] [2] [1]
TRAIN: [0] TEST: [1]
[[1 2]] [[3 4]] [1] [2]
See also
--------
LeaveOneLabelOut for splitting the data according to explicit,
domain-specific stratification of the dataset.
"""
def _iter_test_indices(self):
return range(self.n)
def __repr__(self):
return '%s.%s(n=%i)' % (
self.__class__.__module__,
self.__class__.__name__,
self.n,
)
def __len__(self):
return self.n
class LeavePOut(_PartitionIterator):
"""Leave-P-Out cross validation iterator
.. deprecated:: 0.18
This module will be removed in 0.20.
Use :class:`sklearn.model_selection.LeavePOut` instead.
Provides train/test indices to split data in train test sets. This results
in testing on all distinct samples of size p, while the remaining n - p
samples form the training set in each iteration.
Note: ``LeavePOut(n, p)`` is NOT equivalent to ``KFold(n, n_folds=n // p)``
which creates non-overlapping test sets.
Due to the high number of iterations which grows combinatorically with the
number of samples this cross validation method can be very costly. For
large datasets one should favor KFold, StratifiedKFold or ShuffleSplit.
Read more in the :ref:`User Guide <cross_validation>`.
Parameters
----------
n : int
Total number of elements in dataset.
p : int
Size of the test sets.
Examples
--------
>>> from sklearn import cross_validation
>>> X = np.array([[1, 2], [3, 4], [5, 6], [7, 8]])
>>> y = np.array([1, 2, 3, 4])
>>> lpo = cross_validation.LeavePOut(4, 2)
>>> len(lpo)
6
>>> print(lpo)
sklearn.cross_validation.LeavePOut(n=4, p=2)
>>> for train_index, test_index in lpo:
... print("TRAIN:", train_index, "TEST:", test_index)
... X_train, X_test = X[train_index], X[test_index]
... y_train, y_test = y[train_index], y[test_index]
TRAIN: [2 3] TEST: [0 1]
TRAIN: [1 3] TEST: [0 2]
TRAIN: [1 2] TEST: [0 3]
TRAIN: [0 3] TEST: [1 2]
TRAIN: [0 2] TEST: [1 3]
TRAIN: [0 1] TEST: [2 3]
"""
def __init__(self, n, p):
super(LeavePOut, self).__init__(n)
self.p = p
def _iter_test_indices(self):
for comb in combinations(range(self.n), self.p):
yield np.array(comb)
def __repr__(self):
return '%s.%s(n=%i, p=%i)' % (
self.__class__.__module__,
self.__class__.__name__,
self.n,
self.p,
)
def __len__(self):
return int(factorial(self.n) / factorial(self.n - self.p)
/ factorial(self.p))
class _BaseKFold(with_metaclass(ABCMeta, _PartitionIterator)):
"""Base class to validate KFold approaches"""
@abstractmethod
def __init__(self, n, n_folds, shuffle, random_state):
super(_BaseKFold, self).__init__(n)
if abs(n_folds - int(n_folds)) >= np.finfo('f').eps:
raise ValueError("n_folds must be an integer")
self.n_folds = n_folds = int(n_folds)
if n_folds <= 1:
raise ValueError(
"k-fold cross validation requires at least one"
" train / test split by setting n_folds=2 or more,"
" got n_folds={0}.".format(n_folds))
if n_folds > self.n:
raise ValueError(
("Cannot have number of folds n_folds={0} greater"
" than the number of samples: {1}.").format(n_folds, n))
if not isinstance(shuffle, bool):
raise TypeError("shuffle must be True or False;"
" got {0}".format(shuffle))
self.shuffle = shuffle
self.random_state = random_state
class KFold(_BaseKFold):
"""K-Folds cross validation iterator.
.. deprecated:: 0.18
This module will be removed in 0.20.
Use :class:`sklearn.model_selection.KFold` instead.
Provides train/test indices to split data in train test sets. Split
dataset into k consecutive folds (without shuffling by default).
Each fold is then used as a validation set once while the k - 1 remaining
fold(s) form the training set.
Read more in the :ref:`User Guide <cross_validation>`.
Parameters
----------
n : int
Total number of elements.
n_folds : int, default=3
Number of folds. Must be at least 2.
shuffle : boolean, optional
Whether to shuffle the data before splitting into batches.
random_state : int, RandomState instance or None, optional, default=None
If int, random_state is the seed used by the random number
generator; If RandomState instance, random_state is the random number
generator; If None, the random number generator is the RandomState
instance used by `np.random`. Used when ``shuffle`` == True.
Examples
--------
>>> from sklearn.cross_validation import KFold
>>> X = np.array([[1, 2], [3, 4], [1, 2], [3, 4]])
>>> y = np.array([1, 2, 3, 4])
>>> kf = KFold(4, n_folds=2)
>>> len(kf)
2
>>> print(kf) # doctest: +NORMALIZE_WHITESPACE
sklearn.cross_validation.KFold(n=4, n_folds=2, shuffle=False,
random_state=None)
>>> for train_index, test_index in kf:
... print("TRAIN:", train_index, "TEST:", test_index)
... X_train, X_test = X[train_index], X[test_index]
... y_train, y_test = y[train_index], y[test_index]
TRAIN: [2 3] TEST: [0 1]
TRAIN: [0 1] TEST: [2 3]
Notes
-----
The first n % n_folds folds have size n // n_folds + 1, other folds have
size n // n_folds.
See also
--------
StratifiedKFold take label information into account to avoid building
folds with imbalanced class distributions (for binary or multiclass
classification tasks).
LabelKFold: K-fold iterator variant with non-overlapping labels.
"""
def __init__(self, n, n_folds=3, shuffle=False,
random_state=None):
super(KFold, self).__init__(n, n_folds, shuffle, random_state)
self.idxs = np.arange(n)
if shuffle:
rng = check_random_state(self.random_state)
rng.shuffle(self.idxs)
def _iter_test_indices(self):
n = self.n
n_folds = self.n_folds
fold_sizes = (n // n_folds) * np.ones(n_folds, dtype=np.int)
fold_sizes[:n % n_folds] += 1
current = 0
for fold_size in fold_sizes:
start, stop = current, current + fold_size
yield self.idxs[start:stop]
current = stop
def __repr__(self):
return '%s.%s(n=%i, n_folds=%i, shuffle=%s, random_state=%s)' % (
self.__class__.__module__,
self.__class__.__name__,
self.n,
self.n_folds,
self.shuffle,
self.random_state,
)
def __len__(self):
return self.n_folds
class LabelKFold(_BaseKFold):
"""K-fold iterator variant with non-overlapping labels.
.. deprecated:: 0.18
This module will be removed in 0.20.
Use :class:`sklearn.model_selection.GroupKFold` instead.
The same label will not appear in two different folds (the number of
distinct labels has to be at least equal to the number of folds).
The folds are approximately balanced in the sense that the number of
distinct labels is approximately the same in each fold.
.. versionadded:: 0.17
Parameters
----------
labels : array-like with shape (n_samples, )
Contains a label for each sample.
The folds are built so that the same label does not appear in two
different folds.
n_folds : int, default=3
Number of folds. Must be at least 2.
Examples
--------
>>> from sklearn.cross_validation import LabelKFold
>>> X = np.array([[1, 2], [3, 4], [5, 6], [7, 8]])
>>> y = np.array([1, 2, 3, 4])
>>> labels = np.array([0, 0, 2, 2])
>>> label_kfold = LabelKFold(labels, n_folds=2)
>>> len(label_kfold)
2
>>> print(label_kfold)
sklearn.cross_validation.LabelKFold(n_labels=4, n_folds=2)
>>> for train_index, test_index in label_kfold:
... print("TRAIN:", train_index, "TEST:", test_index)
... X_train, X_test = X[train_index], X[test_index]
... y_train, y_test = y[train_index], y[test_index]
... print(X_train, X_test, y_train, y_test)
...
TRAIN: [0 1] TEST: [2 3]
[[1 2]
[3 4]] [[5 6]
[7 8]] [1 2] [3 4]
TRAIN: [2 3] TEST: [0 1]
[[5 6]
[7 8]] [[1 2]
[3 4]] [3 4] [1 2]
See also
--------
LeaveOneLabelOut for splitting the data according to explicit,
domain-specific stratification of the dataset.
"""
def __init__(self, labels, n_folds=3):
super(LabelKFold, self).__init__(len(labels), n_folds,
shuffle=False, random_state=None)
unique_labels, labels = np.unique(labels, return_inverse=True)
n_labels = len(unique_labels)
if n_folds > n_labels:
raise ValueError(
("Cannot have number of folds n_folds={0} greater"
" than the number of labels: {1}.").format(n_folds,
n_labels))
# Weight labels by their number of occurrences
n_samples_per_label = np.bincount(labels)
# Distribute the most frequent labels first
indices = np.argsort(n_samples_per_label)[::-1]
n_samples_per_label = n_samples_per_label[indices]
# Total weight of each fold
n_samples_per_fold = np.zeros(n_folds)
# Mapping from label index to fold index
label_to_fold = np.zeros(len(unique_labels))
# Distribute samples by adding the largest weight to the lightest fold
for label_index, weight in enumerate(n_samples_per_label):
lightest_fold = np.argmin(n_samples_per_fold)
n_samples_per_fold[lightest_fold] += weight
label_to_fold[indices[label_index]] = lightest_fold
self.idxs = label_to_fold[labels]
def _iter_test_indices(self):
for f in range(self.n_folds):
yield np.where(self.idxs == f)[0]
def __repr__(self):
return '{0}.{1}(n_labels={2}, n_folds={3})'.format(
self.__class__.__module__,
self.__class__.__name__,
self.n,
self.n_folds,
)
def __len__(self):
return self.n_folds
class StratifiedKFold(_BaseKFold):
"""Stratified K-Folds cross validation iterator
.. deprecated:: 0.18
This module will be removed in 0.20.
Use :class:`sklearn.model_selection.StratifiedKFold` instead.
Provides train/test indices to split data in train test sets.
This cross-validation object is a variation of KFold that
returns stratified folds. The folds are made by preserving
the percentage of samples for each class.
Read more in the :ref:`User Guide <cross_validation>`.
Parameters
----------
y : array-like, [n_samples]
Samples to split in K folds.
n_folds : int, default=3
Number of folds. Must be at least 2.
shuffle : boolean, optional
Whether to shuffle each stratification of the data before splitting
into batches.
random_state : int, RandomState instance or None, optional, default=None
If int, random_state is the seed used by the random number
generator; If RandomState instance, random_state is the random number
generator; If None, the random number generator is the RandomState
instance used by `np.random`. Used when ``shuffle`` == True.
Examples
--------
>>> from sklearn.cross_validation import StratifiedKFold
>>> X = np.array([[1, 2], [3, 4], [1, 2], [3, 4]])
>>> y = np.array([0, 0, 1, 1])
>>> skf = StratifiedKFold(y, n_folds=2)
>>> len(skf)
2
>>> print(skf) # doctest: +NORMALIZE_WHITESPACE
sklearn.cross_validation.StratifiedKFold(labels=[0 0 1 1], n_folds=2,
shuffle=False, random_state=None)
>>> for train_index, test_index in skf:
... print("TRAIN:", train_index, "TEST:", test_index)
... X_train, X_test = X[train_index], X[test_index]
... y_train, y_test = y[train_index], y[test_index]
TRAIN: [1 3] TEST: [0 2]
TRAIN: [0 2] TEST: [1 3]
Notes
-----
All the folds have size trunc(n_samples / n_folds), the last one has the
complementary.
See also
--------
LabelKFold: K-fold iterator variant with non-overlapping labels.
"""
def __init__(self, y, n_folds=3, shuffle=False,
random_state=None):
super(StratifiedKFold, self).__init__(
len(y), n_folds, shuffle, random_state)
y = np.asarray(y)
n_samples = y.shape[0]
unique_labels, y_inversed = np.unique(y, return_inverse=True)
label_counts = np.bincount(y_inversed)
min_labels = np.min(label_counts)
if np.all(self.n_folds > label_counts):
raise ValueError("All the n_labels for individual classes"
" are less than %d folds."
% (self.n_folds))
if self.n_folds > min_labels:
warnings.warn(("The least populated class in y has only %d"
" members, which is too few. The minimum"
" number of labels for any class cannot"
" be less than n_folds=%d."
% (min_labels, self.n_folds)), Warning)
# don't want to use the same seed in each label's shuffle
if self.shuffle:
rng = check_random_state(self.random_state)
else:
rng = self.random_state
# pre-assign each sample to a test fold index using individual KFold
# splitting strategies for each label so as to respect the
# balance of labels
per_label_cvs = [
KFold(max(c, self.n_folds), self.n_folds, shuffle=self.shuffle,
random_state=rng) for c in label_counts]
test_folds = np.zeros(n_samples, dtype=np.int)
for test_fold_idx, per_label_splits in enumerate(zip(*per_label_cvs)):
for label, (_, test_split) in zip(unique_labels, per_label_splits):
label_test_folds = test_folds[y == label]
# the test split can be too big because we used
# KFold(max(c, self.n_folds), self.n_folds) instead of
# KFold(c, self.n_folds) to make it possible to not crash even
# if the data is not 100% stratifiable for all the labels
# (we use a warning instead of raising an exception)
# If this is the case, let's trim it:
test_split = test_split[test_split < len(label_test_folds)]
label_test_folds[test_split] = test_fold_idx
test_folds[y == label] = label_test_folds
self.test_folds = test_folds
self.y = y
def _iter_test_masks(self):
for i in range(self.n_folds):
yield self.test_folds == i
def __repr__(self):
return '%s.%s(labels=%s, n_folds=%i, shuffle=%s, random_state=%s)' % (
self.__class__.__module__,
self.__class__.__name__,
self.y,
self.n_folds,
self.shuffle,
self.random_state,
)
def __len__(self):
return self.n_folds
class LeaveOneLabelOut(_PartitionIterator):
"""Leave-One-Label_Out cross-validation iterator
.. deprecated:: 0.18
This module will be removed in 0.20.
Use :class:`sklearn.model_selection.LeaveOneGroupOut` instead.
Provides train/test indices to split data according to a third-party
provided label. This label information can be used to encode arbitrary
domain specific stratifications of the samples as integers.
For instance the labels could be the year of collection of the samples
and thus allow for cross-validation against time-based splits.
Read more in the :ref:`User Guide <cross_validation>`.
Parameters
----------
labels : array-like of int with shape (n_samples,)
Arbitrary domain-specific stratification of the data to be used
to draw the splits.
Examples
--------
>>> from sklearn import cross_validation
>>> X = np.array([[1, 2], [3, 4], [5, 6], [7, 8]])
>>> y = np.array([1, 2, 1, 2])
>>> labels = np.array([1, 1, 2, 2])
>>> lol = cross_validation.LeaveOneLabelOut(labels)
>>> len(lol)
2
>>> print(lol)
sklearn.cross_validation.LeaveOneLabelOut(labels=[1 1 2 2])
>>> for train_index, test_index in lol:
... print("TRAIN:", train_index, "TEST:", test_index)
... X_train, X_test = X[train_index], X[test_index]
... y_train, y_test = y[train_index], y[test_index]
... print(X_train, X_test, y_train, y_test)
TRAIN: [2 3] TEST: [0 1]
[[5 6]
[7 8]] [[1 2]
[3 4]] [1 2] [1 2]
TRAIN: [0 1] TEST: [2 3]
[[1 2]
[3 4]] [[5 6]
[7 8]] [1 2] [1 2]
See also
--------
LabelKFold: K-fold iterator variant with non-overlapping labels.
"""
def __init__(self, labels):
super(LeaveOneLabelOut, self).__init__(len(labels))
# We make a copy of labels to avoid side-effects during iteration
self.labels = np.array(labels, copy=True)
self.unique_labels = np.unique(labels)
self.n_unique_labels = len(self.unique_labels)
def _iter_test_masks(self):
for i in self.unique_labels:
yield self.labels == i
def __repr__(self):
return '%s.%s(labels=%s)' % (
self.__class__.__module__,
self.__class__.__name__,
self.labels,
)
def __len__(self):
return self.n_unique_labels
class LeavePLabelOut(_PartitionIterator):
"""Leave-P-Label_Out cross-validation iterator
.. deprecated:: 0.18
This module will be removed in 0.20.
Use :class:`sklearn.model_selection.LeavePGroupsOut` instead.
Provides train/test indices to split data according to a third-party
provided label. This label information can be used to encode arbitrary
domain specific stratifications of the samples as integers.
For instance the labels could be the year of collection of the samples
and thus allow for cross-validation against time-based splits.
The difference between LeavePLabelOut and LeaveOneLabelOut is that
the former builds the test sets with all the samples assigned to
``p`` different values of the labels while the latter uses samples
all assigned the same labels.
Read more in the :ref:`User Guide <cross_validation>`.
Parameters
----------
labels : array-like of int with shape (n_samples,)
Arbitrary domain-specific stratification of the data to be used
to draw the splits.
p : int
Number of samples to leave out in the test split.
Examples
--------
>>> from sklearn import cross_validation
>>> X = np.array([[1, 2], [3, 4], [5, 6]])
>>> y = np.array([1, 2, 1])
>>> labels = np.array([1, 2, 3])
>>> lpl = cross_validation.LeavePLabelOut(labels, p=2)
>>> len(lpl)
3
>>> print(lpl)
sklearn.cross_validation.LeavePLabelOut(labels=[1 2 3], p=2)
>>> for train_index, test_index in lpl:
... print("TRAIN:", train_index, "TEST:", test_index)
... X_train, X_test = X[train_index], X[test_index]
... y_train, y_test = y[train_index], y[test_index]
... print(X_train, X_test, y_train, y_test)
TRAIN: [2] TEST: [0 1]
[[5 6]] [[1 2]
[3 4]] [1] [1 2]
TRAIN: [1] TEST: [0 2]
[[3 4]] [[1 2]
[5 6]] [2] [1 1]
TRAIN: [0] TEST: [1 2]
[[1 2]] [[3 4]
[5 6]] [1] [2 1]
See also
--------
LabelKFold: K-fold iterator variant with non-overlapping labels.
"""
def __init__(self, labels, p):
# We make a copy of labels to avoid side-effects during iteration
super(LeavePLabelOut, self).__init__(len(labels))
self.labels = np.array(labels, copy=True)
self.unique_labels = np.unique(labels)
self.n_unique_labels = len(self.unique_labels)
self.p = p
def _iter_test_masks(self):
comb = combinations(range(self.n_unique_labels), self.p)
for idx in comb:
test_index = self._empty_mask()
idx = np.array(idx)
for l in self.unique_labels[idx]:
test_index[self.labels == l] = True
yield test_index
def __repr__(self):
return '%s.%s(labels=%s, p=%s)' % (
self.__class__.__module__,
self.__class__.__name__,
self.labels,
self.p,
)
def __len__(self):
return int(factorial(self.n_unique_labels) /
factorial(self.n_unique_labels - self.p) /
factorial(self.p))
class BaseShuffleSplit(with_metaclass(ABCMeta)):
"""Base class for ShuffleSplit and StratifiedShuffleSplit"""
def __init__(self, n, n_iter=10, test_size=0.1, train_size=None,
random_state=None):
self.n = n
self.n_iter = n_iter
self.test_size = test_size
self.train_size = train_size
self.random_state = random_state
self.n_train, self.n_test = _validate_shuffle_split(n, test_size,
train_size)
def __iter__(self):
for train, test in self._iter_indices():
yield train, test
return
@abstractmethod
def _iter_indices(self):
"""Generate (train, test) indices"""
class ShuffleSplit(BaseShuffleSplit):
"""Random permutation cross-validation iterator.
.. deprecated:: 0.18
This module will be removed in 0.20.
Use :class:`sklearn.model_selection.ShuffleSplit` instead.
Yields indices to split data into training and test sets.
Note: contrary to other cross-validation strategies, random splits
do not guarantee that all folds will be different, although this is
still very likely for sizeable datasets.
Read more in the :ref:`User Guide <cross_validation>`.
Parameters
----------
n : int
Total number of elements in the dataset.
n_iter : int (default 10)
Number of re-shuffling & splitting iterations.
test_size : float (default 0.1), int, or None
If float, should be between 0.0 and 1.0 and represent the
proportion of the dataset to include in the test split. If
int, represents the absolute number of test samples. If None,
the value is automatically set to the complement of the train size.
train_size : float, int, or None (default is None)
If float, should be between 0.0 and 1.0 and represent the
proportion of the dataset to include in the train split. If
int, represents the absolute number of train samples. If None,
the value is automatically set to the complement of the test size.
random_state : int, RandomState instance or None, optional (default None)
If int, random_state is the seed used by the random number generator;
If RandomState instance, random_state is the random number generator;
If None, the random number generator is the RandomState instance used
by `np.random`.
Examples
--------
>>> from sklearn import cross_validation
>>> rs = cross_validation.ShuffleSplit(4, n_iter=3,
... test_size=.25, random_state=0)
>>> len(rs)
3
>>> print(rs)
... # doctest: +ELLIPSIS
ShuffleSplit(4, n_iter=3, test_size=0.25, ...)
>>> for train_index, test_index in rs:
... print("TRAIN:", train_index, "TEST:", test_index)
...
TRAIN: [3 1 0] TEST: [2]
TRAIN: [2 1 3] TEST: [0]
TRAIN: [0 2 1] TEST: [3]
>>> rs = cross_validation.ShuffleSplit(4, n_iter=3,
... train_size=0.5, test_size=.25, random_state=0)
>>> for train_index, test_index in rs:
... print("TRAIN:", train_index, "TEST:", test_index)
...
TRAIN: [3 1] TEST: [2]
TRAIN: [2 1] TEST: [0]
TRAIN: [0 2] TEST: [3]
"""
def _iter_indices(self):
rng = check_random_state(self.random_state)
for i in range(self.n_iter):
# random partition
permutation = rng.permutation(self.n)
ind_test = permutation[:self.n_test]
ind_train = permutation[self.n_test:self.n_test + self.n_train]
yield ind_train, ind_test
def __repr__(self):
return ('%s(%d, n_iter=%d, test_size=%s, '
'random_state=%s)' % (
self.__class__.__name__,
self.n,
self.n_iter,
str(self.test_size),
self.random_state,
))
def __len__(self):
return self.n_iter
def _validate_shuffle_split(n, test_size, train_size):
if test_size is None and train_size is None:
raise ValueError(
'test_size and train_size can not both be None')
if test_size is not None:
if np.asarray(test_size).dtype.kind == 'f':
if test_size >= 1.:
raise ValueError(
'test_size=%f should be smaller '
'than 1.0 or be an integer' % test_size)
elif np.asarray(test_size).dtype.kind == 'i':
if test_size >= n:
raise ValueError(
'test_size=%d should be smaller '
'than the number of samples %d' % (test_size, n))
else:
raise ValueError("Invalid value for test_size: %r" % test_size)
if train_size is not None:
if np.asarray(train_size).dtype.kind == 'f':
if train_size >= 1.:
raise ValueError("train_size=%f should be smaller "
"than 1.0 or be an integer" % train_size)
elif np.asarray(test_size).dtype.kind == 'f' and \
train_size + test_size > 1.:
raise ValueError('The sum of test_size and train_size = %f, '
'should be smaller than 1.0. Reduce '
'test_size and/or train_size.' %
(train_size + test_size))
elif np.asarray(train_size).dtype.kind == 'i':
if train_size >= n:
raise ValueError("train_size=%d should be smaller "
"than the number of samples %d" %
(train_size, n))
else:
raise ValueError("Invalid value for train_size: %r" % train_size)
if np.asarray(test_size).dtype.kind == 'f':
n_test = ceil(test_size * n)
elif np.asarray(test_size).dtype.kind == 'i':
n_test = float(test_size)
if train_size is None:
n_train = n - n_test
else:
if np.asarray(train_size).dtype.kind == 'f':
n_train = floor(train_size * n)
else:
n_train = float(train_size)
if test_size is None:
n_test = n - n_train
if n_train + n_test > n:
raise ValueError('The sum of train_size and test_size = %d, '
'should be smaller than the number of '
'samples %d. Reduce test_size and/or '
'train_size.' % (n_train + n_test, n))
return int(n_train), int(n_test)
def _approximate_mode(class_counts, n_draws, rng):
"""Computes approximate mode of multivariate hypergeometric.
This is an approximation to the mode of the multivariate
hypergeometric given by class_counts and n_draws.
It shouldn't be off by more than one.
It is the mostly likely outcome of drawing n_draws many
samples from the population given by class_counts.
Parameters
----------
class_counts : ndarray of int
Population per class.
n_draws : int
Number of draws (samples to draw) from the overall population.
rng : random state
Used to break ties.
Returns
-------
sampled_classes : ndarray of int
Number of samples drawn from each class.
np.sum(sampled_classes) == n_draws
"""
# this computes a bad approximation to the mode of the
# multivariate hypergeometric given by class_counts and n_draws
continuous = n_draws * class_counts / class_counts.sum()
# floored means we don't overshoot n_samples, but probably undershoot
floored = np.floor(continuous)
# we add samples according to how much "left over" probability
# they had, until we arrive at n_samples
need_to_add = int(n_draws - floored.sum())
if need_to_add > 0:
remainder = continuous - floored
values = np.sort(np.unique(remainder))[::-1]
# add according to remainder, but break ties
# randomly to avoid biases
for value in values:
inds, = np.where(remainder == value)
# if we need_to_add less than what's in inds
# we draw randomly from them.
# if we need to add more, we add them all and
# go to the next value
add_now = min(len(inds), need_to_add)
inds = rng.choice(inds, size=add_now, replace=False)
floored[inds] += 1
need_to_add -= add_now
if need_to_add == 0:
break
return floored.astype(np.int)
class StratifiedShuffleSplit(BaseShuffleSplit):
"""Stratified ShuffleSplit cross validation iterator
.. deprecated:: 0.18
This module will be removed in 0.20.
Use :class:`sklearn.model_selection.StratifiedShuffleSplit` instead.
Provides train/test indices to split data in train test sets.
This cross-validation object is a merge of StratifiedKFold and
ShuffleSplit, which returns stratified randomized folds. The folds
are made by preserving the percentage of samples for each class.
Note: like the ShuffleSplit strategy, stratified random splits
do not guarantee that all folds will be different, although this is
still very likely for sizeable datasets.
Read more in the :ref:`User Guide <cross_validation>`.
Parameters
----------
y : array, [n_samples]
Labels of samples.
n_iter : int (default 10)
Number of re-shuffling & splitting iterations.
test_size : float (default 0.1), int, or None
If float, should be between 0.0 and 1.0 and represent the
proportion of the dataset to include in the test split. If
int, represents the absolute number of test samples. If None,
the value is automatically set to the complement of the train size.
train_size : float, int, or None (default is None)
If float, should be between 0.0 and 1.0 and represent the
proportion of the dataset to include in the train split. If
int, represents the absolute number of train samples. If None,
the value is automatically set to the complement of the test size.
random_state : int, RandomState instance or None, optional (default None)
If int, random_state is the seed used by the random number generator;
If RandomState instance, random_state is the random number generator;
If None, the random number generator is the RandomState instance used
by `np.random`.
Examples
--------
>>> from sklearn.cross_validation import StratifiedShuffleSplit
>>> X = np.array([[1, 2], [3, 4], [1, 2], [3, 4]])
>>> y = np.array([0, 0, 1, 1])
>>> sss = StratifiedShuffleSplit(y, 3, test_size=0.5, random_state=0)
>>> len(sss)
3
>>> print(sss) # doctest: +ELLIPSIS
StratifiedShuffleSplit(labels=[0 0 1 1], n_iter=3, ...)
>>> for train_index, test_index in sss:
... print("TRAIN:", train_index, "TEST:", test_index)
... X_train, X_test = X[train_index], X[test_index]
... y_train, y_test = y[train_index], y[test_index]
TRAIN: [1 2] TEST: [3 0]
TRAIN: [0 2] TEST: [1 3]
TRAIN: [0 2] TEST: [3 1]
"""
def __init__(self, y, n_iter=10, test_size=0.1, train_size=None,
random_state=None):
super(StratifiedShuffleSplit, self).__init__(
len(y), n_iter, test_size, train_size, random_state)
self.y = np.array(y)
self.classes, self.y_indices = np.unique(y, return_inverse=True)
n_cls = self.classes.shape[0]
if np.min(np.bincount(self.y_indices)) < 2:
raise ValueError("The least populated class in y has only 1"
" member, which is too few. The minimum"
" number of labels for any class cannot"
" be less than 2.")
if self.n_train < n_cls:
raise ValueError('The train_size = %d should be greater or '
'equal to the number of classes = %d' %
(self.n_train, n_cls))
if self.n_test < n_cls:
raise ValueError('The test_size = %d should be greater or '
'equal to the number of classes = %d' %
(self.n_test, n_cls))
def _iter_indices(self):
rng = check_random_state(self.random_state)
cls_count = np.bincount(self.y_indices)
for n in range(self.n_iter):
# if there are ties in the class-counts, we want
# to make sure to break them anew in each iteration
n_i = _approximate_mode(cls_count, self.n_train, rng)
class_counts_remaining = cls_count - n_i
t_i = _approximate_mode(class_counts_remaining, self.n_test, rng)
train = []
test = []
for i, _ in enumerate(self.classes):
permutation = rng.permutation(cls_count[i])
perm_indices_class_i = np.where(
(i == self.y_indices))[0][permutation]
train.extend(perm_indices_class_i[:n_i[i]])
test.extend(perm_indices_class_i[n_i[i]:n_i[i] + t_i[i]])
train = rng.permutation(train)
test = rng.permutation(test)
yield train, test
def __repr__(self):
return ('%s(labels=%s, n_iter=%d, test_size=%s, '
'random_state=%s)' % (
self.__class__.__name__,
self.y,
self.n_iter,
str(self.test_size),
self.random_state,
))
def __len__(self):
return self.n_iter
class PredefinedSplit(_PartitionIterator):
"""Predefined split cross validation iterator
.. deprecated:: 0.18
This module will be removed in 0.20.
Use :class:`sklearn.model_selection.PredefinedSplit` instead.
Splits the data into training/test set folds according to a predefined
scheme. Each sample can be assigned to at most one test set fold, as
specified by the user through the ``test_fold`` parameter.
Read more in the :ref:`User Guide <cross_validation>`.
Parameters
----------
test_fold : "array-like, shape (n_samples,)
test_fold[i] gives the test set fold of sample i. A value of -1
indicates that the corresponding sample is not part of any test set
folds, but will instead always be put into the training fold.
Examples
--------
>>> from sklearn.cross_validation import PredefinedSplit
>>> X = np.array([[1, 2], [3, 4], [1, 2], [3, 4]])
>>> y = np.array([0, 0, 1, 1])
>>> ps = PredefinedSplit(test_fold=[0, 1, -1, 1])
>>> len(ps)
2
>>> print(ps) # doctest: +NORMALIZE_WHITESPACE +ELLIPSIS
sklearn.cross_validation.PredefinedSplit(test_fold=[ 0 1 -1 1])
>>> for train_index, test_index in ps:
... print("TRAIN:", train_index, "TEST:", test_index)
... X_train, X_test = X[train_index], X[test_index]
... y_train, y_test = y[train_index], y[test_index]
TRAIN: [1 2 3] TEST: [0]
TRAIN: [0 2] TEST: [1 3]
"""
def __init__(self, test_fold):
super(PredefinedSplit, self).__init__(len(test_fold))
self.test_fold = np.array(test_fold, dtype=np.int)
self.test_fold = column_or_1d(self.test_fold)
self.unique_folds = np.unique(self.test_fold)
self.unique_folds = self.unique_folds[self.unique_folds != -1]
def _iter_test_indices(self):
for f in self.unique_folds:
yield np.where(self.test_fold == f)[0]
def __repr__(self):
return '%s.%s(test_fold=%s)' % (
self.__class__.__module__,
self.__class__.__name__,
self.test_fold)
def __len__(self):
return len(self.unique_folds)
class LabelShuffleSplit(ShuffleSplit):
"""Shuffle-Labels-Out cross-validation iterator
.. deprecated:: 0.18
This module will be removed in 0.20.
Use :class:`sklearn.model_selection.GroupShuffleSplit` instead.
Provides randomized train/test indices to split data according to a
third-party provided label. This label information can be used to encode
arbitrary domain specific stratifications of the samples as integers.
For instance the labels could be the year of collection of the samples
and thus allow for cross-validation against time-based splits.
The difference between LeavePLabelOut and LabelShuffleSplit is that
the former generates splits using all subsets of size ``p`` unique labels,
whereas LabelShuffleSplit generates a user-determined number of random
test splits, each with a user-determined fraction of unique labels.
For example, a less computationally intensive alternative to
``LeavePLabelOut(labels, p=10)`` would be
``LabelShuffleSplit(labels, test_size=10, n_iter=100)``.
Note: The parameters ``test_size`` and ``train_size`` refer to labels, and
not to samples, as in ShuffleSplit.
.. versionadded:: 0.17
Parameters
----------
labels : array, [n_samples]
Labels of samples
n_iter : int (default 5)
Number of re-shuffling and splitting iterations.
test_size : float (default 0.2), int, or None
If float, should be between 0.0 and 1.0 and represent the
proportion of the labels to include in the test split. If
int, represents the absolute number of test labels. If None,
the value is automatically set to the complement of the train size.
train_size : float, int, or None (default is None)
If float, should be between 0.0 and 1.0 and represent the
proportion of the labels to include in the train split. If
int, represents the absolute number of train labels. If None,
the value is automatically set to the complement of the test size.
random_state : int, RandomState instance or None, optional (default None)
If int, random_state is the seed used by the random number generator;
If RandomState instance, random_state is the random number generator;
If None, the random number generator is the RandomState instance used
by `np.random`.
"""
def __init__(self, labels, n_iter=5, test_size=0.2, train_size=None,
random_state=None):
classes, label_indices = np.unique(labels, return_inverse=True)
super(LabelShuffleSplit, self).__init__(
len(classes),
n_iter=n_iter,
test_size=test_size,
train_size=train_size,
random_state=random_state)
self.labels = labels
self.classes = classes
self.label_indices = label_indices
def __repr__(self):
return ('%s(labels=%s, n_iter=%d, test_size=%s, '
'random_state=%s)' % (
self.__class__.__name__,
self.labels,
self.n_iter,
str(self.test_size),
self.random_state,
))
def __len__(self):
return self.n_iter
def _iter_indices(self):
for label_train, label_test in super(LabelShuffleSplit,
self)._iter_indices():
# these are the indices of classes in the partition
# invert them into data indices
train = np.flatnonzero(np.in1d(self.label_indices, label_train))
test = np.flatnonzero(np.in1d(self.label_indices, label_test))
yield train, test
##############################################################################
def _index_param_value(X, v, indices):
"""Private helper function for parameter value indexing."""
if not _is_arraylike(v) or _num_samples(v) != _num_samples(X):
# pass through: skip indexing
return v
if sp.issparse(v):
v = v.tocsr()
return safe_indexing(v, indices)
def cross_val_predict(estimator, X, y=None, cv=None, n_jobs=1,
verbose=0, fit_params=None, pre_dispatch='2*n_jobs'):
"""Generate cross-validated estimates for each input data point
.. deprecated:: 0.18
This module will be removed in 0.20.
Use :func:`sklearn.model_selection.cross_val_predict` instead.
Read more in the :ref:`User Guide <cross_validation>`.
Parameters
----------
estimator : estimator object implementing 'fit' and 'predict'
The object to use to fit the data.
X : array-like
The data to fit. Can be, for example a list, or an array at least 2d.
y : array-like, optional, default: None
The target variable to try to predict in the case of
supervised learning.
cv : int, cross-validation generator or an iterable, optional
Determines the cross-validation splitting strategy.
Possible inputs for cv are:
- None, to use the default 3-fold cross-validation,
- integer, to specify the number of folds.
- An object to be used as a cross-validation generator.
- An iterable yielding train/test splits.
For integer/None inputs, if the estimator is a classifier and ``y`` is
either binary or multiclass, :class:`StratifiedKFold` is used. In all
other cases, :class:`KFold` is used.
Refer :ref:`User Guide <cross_validation>` for the various
cross-validation strategies that can be used here.
n_jobs : integer, optional
The number of CPUs to use to do the computation. -1 means
'all CPUs'.
verbose : integer, optional
The verbosity level.
fit_params : dict, optional
Parameters to pass to the fit method of the estimator.
pre_dispatch : int, or string, optional
Controls the number of jobs that get dispatched during parallel
execution. Reducing this number can be useful to avoid an
explosion of memory consumption when more jobs get dispatched
than CPUs can process. This parameter can be:
- None, in which case all the jobs are immediately
created and spawned. Use this for lightweight and
fast-running jobs, to avoid delays due to on-demand
spawning of the jobs
- An int, giving the exact number of total jobs that are
spawned
- A string, giving an expression as a function of n_jobs,
as in '2*n_jobs'
Returns
-------
preds : ndarray
This is the result of calling 'predict'
Examples
--------
>>> from sklearn import datasets, linear_model
>>> from sklearn.cross_validation import cross_val_predict
>>> diabetes = datasets.load_diabetes()
>>> X = diabetes.data[:150]
>>> y = diabetes.target[:150]
>>> lasso = linear_model.Lasso()
>>> y_pred = cross_val_predict(lasso, X, y)
"""
X, y = indexable(X, y)
cv = check_cv(cv, X, y, classifier=is_classifier(estimator))
# We clone the estimator to make sure that all the folds are
# independent, and that it is pickle-able.
parallel = Parallel(n_jobs=n_jobs, verbose=verbose,
pre_dispatch=pre_dispatch)
preds_blocks = parallel(delayed(_fit_and_predict)(clone(estimator), X, y,
train, test, verbose,
fit_params)
for train, test in cv)
preds = [p for p, _ in preds_blocks]
locs = np.concatenate([loc for _, loc in preds_blocks])
if not _check_is_partition(locs, _num_samples(X)):
raise ValueError('cross_val_predict only works for partitions')
inv_locs = np.empty(len(locs), dtype=int)
inv_locs[locs] = np.arange(len(locs))
# Check for sparse predictions
if sp.issparse(preds[0]):
preds = sp.vstack(preds, format=preds[0].format)
else:
preds = np.concatenate(preds)
return preds[inv_locs]
def _fit_and_predict(estimator, X, y, train, test, verbose, fit_params):
"""Fit estimator and predict values for a given dataset split.
Read more in the :ref:`User Guide <cross_validation>`.
Parameters
----------
estimator : estimator object implementing 'fit' and 'predict'
The object to use to fit the data.
X : array-like of shape at least 2D
The data to fit.
y : array-like, optional, default: None
The target variable to try to predict in the case of
supervised learning.
train : array-like, shape (n_train_samples,)
Indices of training samples.
test : array-like, shape (n_test_samples,)
Indices of test samples.
verbose : integer
The verbosity level.
fit_params : dict or None
Parameters that will be passed to ``estimator.fit``.
Returns
-------
preds : sequence
Result of calling 'estimator.predict'
test : array-like
This is the value of the test parameter
"""
# Adjust length of sample weights
fit_params = fit_params if fit_params is not None else {}
fit_params = dict([(k, _index_param_value(X, v, train))
for k, v in fit_params.items()])
X_train, y_train = _safe_split(estimator, X, y, train)
X_test, _ = _safe_split(estimator, X, y, test, train)
if y_train is None:
estimator.fit(X_train, **fit_params)
else:
estimator.fit(X_train, y_train, **fit_params)
preds = estimator.predict(X_test)
return preds, test
def _check_is_partition(locs, n):
"""Check whether locs is a reordering of the array np.arange(n)
Parameters
----------
locs : ndarray
integer array to test
n : int
number of expected elements
Returns
-------
is_partition : bool
True iff sorted(locs) is range(n)
"""
if len(locs) != n:
return False
hit = np.zeros(n, bool)
hit[locs] = True
if not np.all(hit):
return False
return True
def cross_val_score(estimator, X, y=None, scoring=None, cv=None, n_jobs=1,
verbose=0, fit_params=None, pre_dispatch='2*n_jobs'):
"""Evaluate a score by cross-validation
.. deprecated:: 0.18
This module will be removed in 0.20.
Use :func:`sklearn.model_selection.cross_val_score` instead.
Read more in the :ref:`User Guide <cross_validation>`.
Parameters
----------
estimator : estimator object implementing 'fit'
The object to use to fit the data.
X : array-like
The data to fit. Can be, for example a list, or an array at least 2d.
y : array-like, optional, default: None
The target variable to try to predict in the case of
supervised learning.
scoring : string, callable or None, optional, default: None
A string (see model evaluation documentation) or
a scorer callable object / function with signature
``scorer(estimator, X, y)``.
cv : int, cross-validation generator or an iterable, optional
Determines the cross-validation splitting strategy.
Possible inputs for cv are:
- None, to use the default 3-fold cross-validation,
- integer, to specify the number of folds.
- An object to be used as a cross-validation generator.
- An iterable yielding train/test splits.
For integer/None inputs, if the estimator is a classifier and ``y`` is
either binary or multiclass, :class:`StratifiedKFold` is used. In all
other cases, :class:`KFold` is used.
Refer :ref:`User Guide <cross_validation>` for the various
cross-validation strategies that can be used here.
n_jobs : integer, optional
The number of CPUs to use to do the computation. -1 means
'all CPUs'.
verbose : integer, optional
The verbosity level.
fit_params : dict, optional
Parameters to pass to the fit method of the estimator.
pre_dispatch : int, or string, optional
Controls the number of jobs that get dispatched during parallel
execution. Reducing this number can be useful to avoid an
explosion of memory consumption when more jobs get dispatched
than CPUs can process. This parameter can be:
- None, in which case all the jobs are immediately
created and spawned. Use this for lightweight and
fast-running jobs, to avoid delays due to on-demand
spawning of the jobs
- An int, giving the exact number of total jobs that are
spawned
- A string, giving an expression as a function of n_jobs,
as in '2*n_jobs'
Returns
-------
scores : array of float, shape=(len(list(cv)),)
Array of scores of the estimator for each run of the cross validation.
Examples
--------
>>> from sklearn import datasets, linear_model
>>> from sklearn.cross_validation import cross_val_score
>>> diabetes = datasets.load_diabetes()
>>> X = diabetes.data[:150]
>>> y = diabetes.target[:150]
>>> lasso = linear_model.Lasso()
>>> print(cross_val_score(lasso, X, y)) # doctest: +ELLIPSIS
[ 0.33150734 0.08022311 0.03531764]
See Also
---------
:func:`sklearn.metrics.make_scorer`:
Make a scorer from a performance metric or loss function.
"""
X, y = indexable(X, y)
cv = check_cv(cv, X, y, classifier=is_classifier(estimator))
scorer = check_scoring(estimator, scoring=scoring)
# We clone the estimator to make sure that all the folds are
# independent, and that it is pickle-able.
parallel = Parallel(n_jobs=n_jobs, verbose=verbose,
pre_dispatch=pre_dispatch)
scores = parallel(delayed(_fit_and_score)(clone(estimator), X, y, scorer,
train, test, verbose, None,
fit_params)
for train, test in cv)
return np.array(scores)[:, 0]
def _fit_and_score(estimator, X, y, scorer, train, test, verbose,
parameters, fit_params, return_train_score=False,
return_parameters=False, error_score='raise'):
"""Fit estimator and compute scores for a given dataset split.
Parameters
----------
estimator : estimator object implementing 'fit'
The object to use to fit the data.
X : array-like of shape at least 2D
The data to fit.
y : array-like, optional, default: None
The target variable to try to predict in the case of
supervised learning.
scorer : callable
A scorer callable object / function with signature
``scorer(estimator, X, y)``.
train : array-like, shape (n_train_samples,)
Indices of training samples.
test : array-like, shape (n_test_samples,)
Indices of test samples.
verbose : integer
The verbosity level.
error_score : 'raise' (default) or numeric
Value to assign to the score if an error occurs in estimator fitting.
If set to 'raise', the error is raised. If a numeric value is given,
FitFailedWarning is raised. This parameter does not affect the refit
step, which will always raise the error.
parameters : dict or None
Parameters to be set on the estimator.
fit_params : dict or None
Parameters that will be passed to ``estimator.fit``.
return_train_score : boolean, optional, default: False
Compute and return score on training set.
return_parameters : boolean, optional, default: False
Return parameters that has been used for the estimator.
Returns
-------
train_score : float, optional
Score on training set, returned only if `return_train_score` is `True`.
test_score : float
Score on test set.
n_test_samples : int
Number of test samples.
scoring_time : float
Time spent for fitting and scoring in seconds.
parameters : dict or None, optional
The parameters that have been evaluated.
"""
if verbose > 1:
if parameters is None:
msg = ''
else:
msg = '%s' % (', '.join('%s=%s' % (k, v)
for k, v in parameters.items()))
print("[CV] %s %s" % (msg, (64 - len(msg)) * '.'))
# Adjust length of sample weights
fit_params = fit_params if fit_params is not None else {}
fit_params = dict([(k, _index_param_value(X, v, train))
for k, v in fit_params.items()])
if parameters is not None:
estimator.set_params(**parameters)
start_time = time.time()
X_train, y_train = _safe_split(estimator, X, y, train)
X_test, y_test = _safe_split(estimator, X, y, test, train)
try:
if y_train is None:
estimator.fit(X_train, **fit_params)
else:
estimator.fit(X_train, y_train, **fit_params)
except Exception as e:
if error_score == 'raise':
raise
elif isinstance(error_score, numbers.Number):
test_score = error_score
if return_train_score:
train_score = error_score
warnings.warn("Classifier fit failed. The score on this train-test"
" partition for these parameters will be set to %f. "
"Details: \n%r" % (error_score, e), FitFailedWarning)
else:
raise ValueError("error_score must be the string 'raise' or a"
" numeric value. (Hint: if using 'raise', please"
" make sure that it has been spelled correctly.)"
)
else:
test_score = _score(estimator, X_test, y_test, scorer)
if return_train_score:
train_score = _score(estimator, X_train, y_train, scorer)
scoring_time = time.time() - start_time
if verbose > 2:
msg += ", score=%f" % test_score
if verbose > 1:
end_msg = "%s -%s" % (msg, logger.short_format_time(scoring_time))
print("[CV] %s %s" % ((64 - len(end_msg)) * '.', end_msg))
ret = [train_score] if return_train_score else []
ret.extend([test_score, _num_samples(X_test), scoring_time])
if return_parameters:
ret.append(parameters)
return ret
def _safe_split(estimator, X, y, indices, train_indices=None):
"""Create subset of dataset and properly handle kernels."""
if hasattr(estimator, 'kernel') and callable(estimator.kernel) \
and not isinstance(estimator.kernel, GPKernel):
# cannot compute the kernel values with custom function
raise ValueError("Cannot use a custom kernel function. "
"Precompute the kernel matrix instead.")
if not hasattr(X, "shape"):
if getattr(estimator, "_pairwise", False):
raise ValueError("Precomputed kernels or affinity matrices have "
"to be passed as arrays or sparse matrices.")
X_subset = [X[idx] for idx in indices]
else:
if getattr(estimator, "_pairwise", False):
# X is a precomputed square kernel matrix
if X.shape[0] != X.shape[1]:
raise ValueError("X should be a square kernel matrix")
if train_indices is None:
X_subset = X[np.ix_(indices, indices)]
else:
X_subset = X[np.ix_(indices, train_indices)]
else:
X_subset = safe_indexing(X, indices)
if y is not None:
y_subset = safe_indexing(y, indices)
else:
y_subset = None
return X_subset, y_subset
def _score(estimator, X_test, y_test, scorer):
"""Compute the score of an estimator on a given test set."""
if y_test is None:
score = scorer(estimator, X_test)
else:
score = scorer(estimator, X_test, y_test)
if hasattr(score, 'item'):
try:
# e.g. unwrap memmapped scalars
score = score.item()
except ValueError:
# non-scalar?
pass
if not isinstance(score, numbers.Number):
raise ValueError("scoring must return a number, got %s (%s) instead."
% (str(score), type(score)))
return score
def _permutation_test_score(estimator, X, y, cv, scorer):
"""Auxiliary function for permutation_test_score"""
avg_score = []
for train, test in cv:
X_train, y_train = _safe_split(estimator, X, y, train)
X_test, y_test = _safe_split(estimator, X, y, test, train)
estimator.fit(X_train, y_train)
avg_score.append(scorer(estimator, X_test, y_test))
return np.mean(avg_score)
def _shuffle(y, labels, random_state):
"""Return a shuffled copy of y eventually shuffle among same labels."""
if labels is None:
ind = random_state.permutation(len(y))
else:
ind = np.arange(len(labels))
for label in np.unique(labels):
this_mask = (labels == label)
ind[this_mask] = random_state.permutation(ind[this_mask])
return safe_indexing(y, ind)
def check_cv(cv, X=None, y=None, classifier=False):
"""Input checker utility for building a CV in a user friendly way.
.. deprecated:: 0.18
This module will be removed in 0.20.
Use :func:`sklearn.model_selection.check_cv` instead.
Parameters
----------
cv : int, cross-validation generator or an iterable, optional
Determines the cross-validation splitting strategy.
Possible inputs for cv are:
- None, to use the default 3-fold cross-validation,
- integer, to specify the number of folds.
- An object to be used as a cross-validation generator.
- An iterable yielding train/test splits.
For integer/None inputs, if classifier is True and ``y`` is binary or
multiclass, :class:`StratifiedKFold` is used. In all other cases,
:class:`KFold` is used.
Refer :ref:`User Guide <cross_validation>` for the various
cross-validation strategies that can be used here.
X : array-like
The data the cross-val object will be applied on.
y : array-like
The target variable for a supervised learning problem.
classifier : boolean optional
Whether the task is a classification task, in which case
stratified KFold will be used.
Returns
-------
checked_cv : a cross-validation generator instance.
The return value is guaranteed to be a cv generator instance, whatever
the input type.
"""
is_sparse = sp.issparse(X)
if cv is None:
cv = 3
if isinstance(cv, numbers.Integral):
if classifier:
if type_of_target(y) in ['binary', 'multiclass']:
cv = StratifiedKFold(y, cv)
else:
cv = KFold(_num_samples(y), cv)
else:
if not is_sparse:
n_samples = len(X)
else:
n_samples = X.shape[0]
cv = KFold(n_samples, cv)
return cv
def permutation_test_score(estimator, X, y, cv=None,
n_permutations=100, n_jobs=1, labels=None,
random_state=0, verbose=0, scoring=None):
"""Evaluate the significance of a cross-validated score with permutations
.. deprecated:: 0.18
This module will be removed in 0.20.
Use :func:`sklearn.model_selection.permutation_test_score` instead.
Read more in the :ref:`User Guide <cross_validation>`.
Parameters
----------
estimator : estimator object implementing 'fit'
The object to use to fit the data.
X : array-like of shape at least 2D
The data to fit.
y : array-like
The target variable to try to predict in the case of
supervised learning.
scoring : string, callable or None, optional, default: None
A string (see model evaluation documentation) or
a scorer callable object / function with signature
``scorer(estimator, X, y)``.
cv : int, cross-validation generator or an iterable, optional
Determines the cross-validation splitting strategy.
Possible inputs for cv are:
- None, to use the default 3-fold cross-validation,
- integer, to specify the number of folds.
- An object to be used as a cross-validation generator.
- An iterable yielding train/test splits.
For integer/None inputs, if the estimator is a classifier and ``y`` is
either binary or multiclass, :class:`StratifiedKFold` is used. In all
other cases, :class:`KFold` is used.
Refer :ref:`User Guide <cross_validation>` for the various
cross-validation strategies that can be used here.
n_permutations : integer, optional
Number of times to permute ``y``.
n_jobs : integer, optional
The number of CPUs to use to do the computation. -1 means
'all CPUs'.
labels : array-like of shape [n_samples] (optional)
Labels constrain the permutation among groups of samples with
a same label.
random_state : int, RandomState instance or None, optional (default=0)
If int, random_state is the seed used by the random number generator;
If RandomState instance, random_state is the random number generator;
If None, the random number generator is the RandomState instance used
by `np.random`.
verbose : integer, optional
The verbosity level.
Returns
-------
score : float
The true score without permuting targets.
permutation_scores : array, shape (n_permutations,)
The scores obtained for each permutations.
pvalue : float
The p-value, which approximates the probability that the score would
be obtained by chance. This is calculated as:
`(C + 1) / (n_permutations + 1)`
Where C is the number of permutations whose score >= the true score.
The best possible p-value is 1/(n_permutations + 1), the worst is 1.0.
Notes
-----
This function implements Test 1 in:
Ojala and Garriga. Permutation Tests for Studying Classifier
Performance. The Journal of Machine Learning Research (2010)
vol. 11
"""
X, y = indexable(X, y)
cv = check_cv(cv, X, y, classifier=is_classifier(estimator))
scorer = check_scoring(estimator, scoring=scoring)
random_state = check_random_state(random_state)
# We clone the estimator to make sure that all the folds are
# independent, and that it is pickle-able.
score = _permutation_test_score(clone(estimator), X, y, cv, scorer)
permutation_scores = Parallel(n_jobs=n_jobs, verbose=verbose)(
delayed(_permutation_test_score)(
clone(estimator), X, _shuffle(y, labels, random_state), cv,
scorer)
for _ in range(n_permutations))
permutation_scores = np.array(permutation_scores)
pvalue = (np.sum(permutation_scores >= score) + 1.0) / (n_permutations + 1)
return score, permutation_scores, pvalue
permutation_test_score.__test__ = False # to avoid a pb with nosetests
def train_test_split(*arrays, **options):
"""Split arrays or matrices into random train and test subsets
.. deprecated:: 0.18
This module will be removed in 0.20.
Use :func:`sklearn.model_selection.train_test_split` instead.
Quick utility that wraps input validation and
``next(iter(ShuffleSplit(n_samples)))`` and application to input
data into a single call for splitting (and optionally subsampling)
data in a oneliner.
Read more in the :ref:`User Guide <cross_validation>`.
Parameters
----------
*arrays : sequence of indexables with same length / shape[0]
Allowed inputs are lists, numpy arrays, scipy-sparse
matrices or pandas dataframes.
test_size : float, int, or None (default is None)
If float, should be between 0.0 and 1.0 and represent the
proportion of the dataset to include in the test split. If
int, represents the absolute number of test samples. If None,
the value is automatically set to the complement of the train size.
If train size is also None, test size is set to 0.25.
train_size : float, int, or None (default is None)
If float, should be between 0.0 and 1.0 and represent the
proportion of the dataset to include in the train split. If
int, represents the absolute number of train samples. If None,
the value is automatically set to the complement of the test size.
random_state : int, RandomState instance or None, optional (default=None)
If int, random_state is the seed used by the random number generator;
If RandomState instance, random_state is the random number generator;
If None, the random number generator is the RandomState instance used
by `np.random`.
stratify : array-like or None (default is None)
If not None, data is split in a stratified fashion, using this as
the labels array.
.. versionadded:: 0.17
*stratify* splitting
Returns
-------
splitting : list, length = 2 * len(arrays),
List containing train-test split of inputs.
.. versionadded:: 0.16
If the input is sparse, the output will be a
``scipy.sparse.csr_matrix``. Else, output type is the same as the
input type.
Examples
--------
>>> import numpy as np
>>> from sklearn.cross_validation import train_test_split
>>> X, y = np.arange(10).reshape((5, 2)), range(5)
>>> X
array([[0, 1],
[2, 3],
[4, 5],
[6, 7],
[8, 9]])
>>> list(y)
[0, 1, 2, 3, 4]
>>> X_train, X_test, y_train, y_test = train_test_split(
... X, y, test_size=0.33, random_state=42)
...
>>> X_train
array([[4, 5],
[0, 1],
[6, 7]])
>>> y_train
[2, 0, 3]
>>> X_test
array([[2, 3],
[8, 9]])
>>> y_test
[1, 4]
"""
n_arrays = len(arrays)
if n_arrays == 0:
raise ValueError("At least one array required as input")
test_size = options.pop('test_size', None)
train_size = options.pop('train_size', None)
random_state = options.pop('random_state', None)
stratify = options.pop('stratify', None)
if options:
raise TypeError("Invalid parameters passed: %s" % str(options))
if test_size is None and train_size is None:
test_size = 0.25
arrays = indexable(*arrays)
if stratify is not None:
cv = StratifiedShuffleSplit(stratify, test_size=test_size,
train_size=train_size,
random_state=random_state)
else:
n_samples = _num_samples(arrays[0])
cv = ShuffleSplit(n_samples, test_size=test_size,
train_size=train_size,
random_state=random_state)
train, test = next(iter(cv))
return list(chain.from_iterable((safe_indexing(a, train),
safe_indexing(a, test)) for a in arrays))
train_test_split.__test__ = False # to avoid a pb with nosetests
|
dnozay/lettuce
|
refs/heads/master
|
tests/integration/lib/Django-1.3/tests/regressiontests/defaultfilters/tests.py
|
49
|
# -*- coding: utf-8 -*-
import datetime
from django.utils import unittest
from django.template.defaultfilters import *
class DefaultFiltersTests(unittest.TestCase):
def test_floatformat(self):
self.assertEqual(floatformat(7.7), u'7.7')
self.assertEqual(floatformat(7.0), u'7')
self.assertEqual(floatformat(0.7), u'0.7')
self.assertEqual(floatformat(0.07), u'0.1')
self.assertEqual(floatformat(0.007), u'0.0')
self.assertEqual(floatformat(0.0), u'0')
self.assertEqual(floatformat(7.7, 3), u'7.700')
self.assertEqual(floatformat(6.000000, 3), u'6.000')
self.assertEqual(floatformat(6.200000, 3), u'6.200')
self.assertEqual(floatformat(6.200000, -3), u'6.200')
self.assertEqual(floatformat(13.1031, -3), u'13.103')
self.assertEqual(floatformat(11.1197, -2), u'11.12')
self.assertEqual(floatformat(11.0000, -2), u'11')
self.assertEqual(floatformat(11.000001, -2), u'11.00')
self.assertEqual(floatformat(8.2798, 3), u'8.280')
self.assertEqual(floatformat(u'foo'), u'')
self.assertEqual(floatformat(13.1031, u'bar'), u'13.1031')
self.assertEqual(floatformat(18.125, 2), u'18.13')
self.assertEqual(floatformat(u'foo', u'bar'), u'')
self.assertEqual(floatformat(u'¿Cómo esta usted?'), u'')
self.assertEqual(floatformat(None), u'')
# Check that we're not converting to scientific notation.
self.assertEqual(floatformat(0, 6), u'0.000000')
self.assertEqual(floatformat(0, 7), u'0.0000000')
self.assertEqual(floatformat(0, 10), u'0.0000000000')
self.assertEqual(floatformat(0.000000000000000000015, 20),
u'0.00000000000000000002')
pos_inf = float(1e30000)
self.assertEqual(floatformat(pos_inf), unicode(pos_inf))
neg_inf = float(-1e30000)
self.assertEqual(floatformat(neg_inf), unicode(neg_inf))
nan = pos_inf / pos_inf
self.assertEqual(floatformat(nan), unicode(nan))
class FloatWrapper(object):
def __init__(self, value):
self.value = value
def __float__(self):
return self.value
self.assertEqual(floatformat(FloatWrapper(11.000001), -2), u'11.00')
# This fails because of Python's float handling. Floats with many zeroes
# after the decimal point should be passed in as another type such as
# unicode or Decimal.
@unittest.expectedFailure
def test_floatformat_fail(self):
self.assertEqual(floatformat(1.00000000000000015, 16), u'1.0000000000000002')
def test_addslashes(self):
self.assertEqual(addslashes(u'"double quotes" and \'single quotes\''),
u'\\"double quotes\\" and \\\'single quotes\\\'')
self.assertEqual(addslashes(ur'\ : backslashes, too'),
u'\\\\ : backslashes, too')
def test_capfirst(self):
self.assertEqual(capfirst(u'hello world'), u'Hello world')
def test_escapejs(self):
self.assertEqual(escapejs(u'"double quotes" and \'single quotes\''),
u'\\u0022double quotes\\u0022 and \\u0027single quotes\\u0027')
self.assertEqual(escapejs(ur'\ : backslashes, too'),
u'\\u005C : backslashes, too')
self.assertEqual(escapejs(u'and lots of whitespace: \r\n\t\v\f\b'),
u'and lots of whitespace: \\u000D\\u000A\\u0009\\u000B\\u000C\\u0008')
self.assertEqual(escapejs(ur'<script>and this</script>'),
u'\\u003Cscript\\u003Eand this\\u003C/script\\u003E')
self.assertEqual(
escapejs(u'paragraph separator:\u2029and line separator:\u2028'),
u'paragraph separator:\\u2029and line separator:\\u2028')
def test_fix_ampersands(self):
self.assertEqual(fix_ampersands(u'Jack & Jill & Jeroboam'),
u'Jack & Jill & Jeroboam')
def test_linenumbers(self):
self.assertEqual(linenumbers(u'line 1\nline 2'),
u'1. line 1\n2. line 2')
self.assertEqual(linenumbers(u'\n'.join([u'x'] * 10)),
u'01. x\n02. x\n03. x\n04. x\n05. x\n06. x\n07. '\
u'x\n08. x\n09. x\n10. x')
def test_lower(self):
self.assertEqual(lower('TEST'), u'test')
# uppercase E umlaut
self.assertEqual(lower(u'\xcb'), u'\xeb')
def test_make_list(self):
self.assertEqual(make_list('abc'), [u'a', u'b', u'c'])
self.assertEqual(make_list(1234), [u'1', u'2', u'3', u'4'])
def test_slugify(self):
self.assertEqual(slugify(' Jack & Jill like numbers 1,2,3 and 4 and'\
' silly characters ?%.$!/'),
u'jack-jill-like-numbers-123-and-4-and-silly-characters')
self.assertEqual(slugify(u"Un \xe9l\xe9phant \xe0 l'or\xe9e du bois"),
u'un-elephant-a-loree-du-bois')
def test_stringformat(self):
self.assertEqual(stringformat(1, u'03d'), u'001')
self.assertEqual(stringformat(1, u'z'), u'')
def test_title(self):
self.assertEqual(title('a nice title, isn\'t it?'),
u"A Nice Title, Isn't It?")
self.assertEqual(title(u'discoth\xe8que'), u'Discoth\xe8que')
def test_truncatewords(self):
self.assertEqual(
truncatewords(u'A sentence with a few words in it', 1), u'A ...')
self.assertEqual(
truncatewords(u'A sentence with a few words in it', 5),
u'A sentence with a few ...')
self.assertEqual(
truncatewords(u'A sentence with a few words in it', 100),
u'A sentence with a few words in it')
self.assertEqual(
truncatewords(u'A sentence with a few words in it',
'not a number'), u'A sentence with a few words in it')
def test_truncatewords_html(self):
self.assertEqual(truncatewords_html(
u'<p>one <a href="#">two - three <br>four</a> five</p>', 0), u'')
self.assertEqual(truncatewords_html(u'<p>one <a href="#">two - '\
u'three <br>four</a> five</p>', 2),
u'<p>one <a href="#">two ...</a></p>')
self.assertEqual(truncatewords_html(
u'<p>one <a href="#">two - three <br>four</a> five</p>', 4),
u'<p>one <a href="#">two - three <br>four ...</a></p>')
self.assertEqual(truncatewords_html(
u'<p>one <a href="#">two - three <br>four</a> five</p>', 5),
u'<p>one <a href="#">two - three <br>four</a> five</p>')
self.assertEqual(truncatewords_html(
u'<p>one <a href="#">two - three <br>four</a> five</p>', 100),
u'<p>one <a href="#">two - three <br>four</a> five</p>')
self.assertEqual(truncatewords_html(
u'\xc5ngstr\xf6m was here', 1), u'\xc5ngstr\xf6m ...')
def test_upper(self):
self.assertEqual(upper(u'Mixed case input'), u'MIXED CASE INPUT')
# lowercase e umlaut
self.assertEqual(upper(u'\xeb'), u'\xcb')
def test_urlencode(self):
self.assertEqual(urlencode(u'fran\xe7ois & jill'),
u'fran%C3%A7ois%20%26%20jill')
self.assertEqual(urlencode(1), u'1')
def test_iriencode(self):
self.assertEqual(iriencode(u'S\xf8r-Tr\xf8ndelag'),
u'S%C3%B8r-Tr%C3%B8ndelag')
self.assertEqual(iriencode(urlencode(u'fran\xe7ois & jill')),
u'fran%C3%A7ois%20%26%20jill')
def test_urlizetrunc(self):
self.assertEqual(urlizetrunc(u'http://short.com/', 20), u'<a href='\
u'"http://short.com/" rel="nofollow">http://short.com/</a>')
self.assertEqual(urlizetrunc(u'http://www.google.co.uk/search?hl=en'\
u'&q=some+long+url&btnG=Search&meta=', 20), u'<a href="http://'\
u'www.google.co.uk/search?hl=en&q=some+long+url&btnG=Search&'\
u'meta=" rel="nofollow">http://www.google...</a>')
self.assertEqual(urlizetrunc('http://www.google.co.uk/search?hl=en'\
u'&q=some+long+url&btnG=Search&meta=', 20), u'<a href="http://'\
u'www.google.co.uk/search?hl=en&q=some+long+url&btnG=Search'\
u'&meta=" rel="nofollow">http://www.google...</a>')
# Check truncating of URIs which are the exact length
uri = 'http://31characteruri.com/test/'
self.assertEqual(len(uri), 31)
self.assertEqual(urlizetrunc(uri, 31),
u'<a href="http://31characteruri.com/test/" rel="nofollow">'\
u'http://31characteruri.com/test/</a>')
self.assertEqual(urlizetrunc(uri, 30),
u'<a href="http://31characteruri.com/test/" rel="nofollow">'\
u'http://31characteruri.com/t...</a>')
self.assertEqual(urlizetrunc(uri, 2),
u'<a href="http://31characteruri.com/test/"'\
u' rel="nofollow">...</a>')
def test_urlize(self):
# Check normal urlize
self.assertEqual(urlize('http://google.com'),
u'<a href="http://google.com" rel="nofollow">http://google.com</a>')
self.assertEqual(urlize('http://google.com/'),
u'<a href="http://google.com/" rel="nofollow">http://google.com/</a>')
self.assertEqual(urlize('www.google.com'),
u'<a href="http://www.google.com" rel="nofollow">www.google.com</a>')
self.assertEqual(urlize('djangoproject.org'),
u'<a href="http://djangoproject.org" rel="nofollow">djangoproject.org</a>')
self.assertEqual(urlize('info@djangoproject.org'),
u'<a href="mailto:info@djangoproject.org">info@djangoproject.org</a>')
# Check urlize with https addresses
self.assertEqual(urlize('https://google.com'),
u'<a href="https://google.com" rel="nofollow">https://google.com</a>')
def test_wordcount(self):
self.assertEqual(wordcount(''), 0)
self.assertEqual(wordcount(u'oneword'), 1)
self.assertEqual(wordcount(u'lots of words'), 3)
self.assertEqual(wordwrap(u'this is a long paragraph of text that '\
u'really needs to be wrapped I\'m afraid', 14),
u"this is a long\nparagraph of\ntext that\nreally needs\nto be "\
u"wrapped\nI'm afraid")
self.assertEqual(wordwrap(u'this is a short paragraph of text.\n '\
u'But this line should be indented', 14),
u'this is a\nshort\nparagraph of\ntext.\n But this\nline '\
u'should be\nindented')
self.assertEqual(wordwrap(u'this is a short paragraph of text.\n '\
u'But this line should be indented',15), u'this is a short\n'\
u'paragraph of\ntext.\n But this line\nshould be\nindented')
def test_rjust(self):
self.assertEqual(ljust(u'test', 10), u'test ')
self.assertEqual(ljust(u'test', 3), u'test')
self.assertEqual(rjust(u'test', 10), u' test')
self.assertEqual(rjust(u'test', 3), u'test')
def test_center(self):
self.assertEqual(center(u'test', 6), u' test ')
def test_cut(self):
self.assertEqual(cut(u'a string to be mangled', 'a'),
u' string to be mngled')
self.assertEqual(cut(u'a string to be mangled', 'ng'),
u'a stri to be maled')
self.assertEqual(cut(u'a string to be mangled', 'strings'),
u'a string to be mangled')
def test_force_escape(self):
self.assertEqual(
force_escape(u'<some html & special characters > here'),
u'<some html & special characters > here')
self.assertEqual(
force_escape(u'<some html & special characters > here ĐÅ€£'),
u'<some html & special characters > here'\
u' \u0110\xc5\u20ac\xa3')
def test_linebreaks(self):
self.assertEqual(linebreaks(u'line 1'), u'<p>line 1</p>')
self.assertEqual(linebreaks(u'line 1\nline 2'),
u'<p>line 1<br />line 2</p>')
def test_removetags(self):
self.assertEqual(removetags(u'some <b>html</b> with <script>alert'\
u'("You smell")</script> disallowed <img /> tags', 'script img'),
u'some <b>html</b> with alert("You smell") disallowed tags')
self.assertEqual(striptags(u'some <b>html</b> with <script>alert'\
u'("You smell")</script> disallowed <img /> tags'),
u'some html with alert("You smell") disallowed tags')
def test_dictsort(self):
sorted_dicts = dictsort([{'age': 23, 'name': 'Barbara-Ann'},
{'age': 63, 'name': 'Ra Ra Rasputin'},
{'name': 'Jonny B Goode', 'age': 18}], 'age')
self.assertEqual([sorted(dict.items()) for dict in sorted_dicts],
[[('age', 18), ('name', 'Jonny B Goode')],
[('age', 23), ('name', 'Barbara-Ann')],
[('age', 63), ('name', 'Ra Ra Rasputin')]])
def test_dictsortreversed(self):
sorted_dicts = dictsortreversed([{'age': 23, 'name': 'Barbara-Ann'},
{'age': 63, 'name': 'Ra Ra Rasputin'},
{'name': 'Jonny B Goode', 'age': 18}],
'age')
self.assertEqual([sorted(dict.items()) for dict in sorted_dicts],
[[('age', 63), ('name', 'Ra Ra Rasputin')],
[('age', 23), ('name', 'Barbara-Ann')],
[('age', 18), ('name', 'Jonny B Goode')]])
def test_first(self):
self.assertEqual(first([0,1,2]), 0)
self.assertEqual(first(u''), u'')
self.assertEqual(first(u'test'), u't')
def test_join(self):
self.assertEqual(join([0,1,2], u'glue'), u'0glue1glue2')
def test_length(self):
self.assertEqual(length(u'1234'), 4)
self.assertEqual(length([1,2,3,4]), 4)
self.assertEqual(length_is([], 0), True)
self.assertEqual(length_is([], 1), False)
self.assertEqual(length_is('a', 1), True)
self.assertEqual(length_is(u'a', 10), False)
def test_slice(self):
self.assertEqual(slice_(u'abcdefg', u'0'), u'')
self.assertEqual(slice_(u'abcdefg', u'1'), u'a')
self.assertEqual(slice_(u'abcdefg', u'-1'), u'abcdef')
self.assertEqual(slice_(u'abcdefg', u'1:2'), u'b')
self.assertEqual(slice_(u'abcdefg', u'1:3'), u'bc')
self.assertEqual(slice_(u'abcdefg', u'0::2'), u'aceg')
def test_unordered_list(self):
self.assertEqual(unordered_list([u'item 1', u'item 2']),
u'\t<li>item 1</li>\n\t<li>item 2</li>')
self.assertEqual(unordered_list([u'item 1', [u'item 1.1']]),
u'\t<li>item 1\n\t<ul>\n\t\t<li>item 1.1</li>\n\t</ul>\n\t</li>')
self.assertEqual(
unordered_list([u'item 1', [u'item 1.1', u'item1.2'], u'item 2']),
u'\t<li>item 1\n\t<ul>\n\t\t<li>item 1.1</li>\n\t\t<li>item1.2'\
u'</li>\n\t</ul>\n\t</li>\n\t<li>item 2</li>')
self.assertEqual(
unordered_list([u'item 1', [u'item 1.1', [u'item 1.1.1',
[u'item 1.1.1.1']]]]),
u'\t<li>item 1\n\t<ul>\n\t\t<li>item 1.1\n\t\t<ul>\n\t\t\t<li>'\
u'item 1.1.1\n\t\t\t<ul>\n\t\t\t\t<li>item 1.1.1.1</li>\n\t\t\t'\
u'</ul>\n\t\t\t</li>\n\t\t</ul>\n\t\t</li>\n\t</ul>\n\t</li>')
self.assertEqual(unordered_list(
['States', ['Kansas', ['Lawrence', 'Topeka'], 'Illinois']]),
u'\t<li>States\n\t<ul>\n\t\t<li>Kansas\n\t\t<ul>\n\t\t\t<li>'\
u'Lawrence</li>\n\t\t\t<li>Topeka</li>\n\t\t</ul>\n\t\t</li>'\
u'\n\t\t<li>Illinois</li>\n\t</ul>\n\t</li>')
class ULItem(object):
def __init__(self, title):
self.title = title
def __unicode__(self):
return u'ulitem-%s' % str(self.title)
a = ULItem('a')
b = ULItem('b')
self.assertEqual(unordered_list([a,b]),
u'\t<li>ulitem-a</li>\n\t<li>ulitem-b</li>')
# Old format for unordered lists should still work
self.assertEqual(unordered_list([u'item 1', []]), u'\t<li>item 1</li>')
self.assertEqual(unordered_list([u'item 1', [[u'item 1.1', []]]]),
u'\t<li>item 1\n\t<ul>\n\t\t<li>item 1.1</li>\n\t</ul>\n\t</li>')
self.assertEqual(unordered_list([u'item 1', [[u'item 1.1', []],
[u'item 1.2', []]]]), u'\t<li>item 1\n\t<ul>\n\t\t<li>item 1.1'\
u'</li>\n\t\t<li>item 1.2</li>\n\t</ul>\n\t</li>')
self.assertEqual(unordered_list(['States', [['Kansas', [['Lawrence',
[]], ['Topeka', []]]], ['Illinois', []]]]), u'\t<li>States\n\t'\
u'<ul>\n\t\t<li>Kansas\n\t\t<ul>\n\t\t\t<li>Lawrence</li>'\
u'\n\t\t\t<li>Topeka</li>\n\t\t</ul>\n\t\t</li>\n\t\t<li>'\
u'Illinois</li>\n\t</ul>\n\t</li>')
def test_add(self):
self.assertEqual(add(u'1', u'2'), 3)
def test_get_digit(self):
self.assertEqual(get_digit(123, 1), 3)
self.assertEqual(get_digit(123, 2), 2)
self.assertEqual(get_digit(123, 3), 1)
self.assertEqual(get_digit(123, 4), 0)
self.assertEqual(get_digit(123, 0), 123)
self.assertEqual(get_digit(u'xyz', 0), u'xyz')
def test_date(self):
# real testing of date() is in dateformat.py
self.assertEqual(date(datetime.datetime(2005, 12, 29), u"d F Y"),
u'29 December 2005')
self.assertEqual(date(datetime.datetime(2005, 12, 29), ur'jS o\f F'),
u'29th of December')
def test_time(self):
# real testing of time() is done in dateformat.py
self.assertEqual(time(datetime.time(13), u"h"), u'01')
self.assertEqual(time(datetime.time(0), u"h"), u'12')
def test_timesince(self):
# real testing is done in timesince.py, where we can provide our own 'now'
self.assertEqual(
timesince(datetime.datetime.now() - datetime.timedelta(1)),
u'1 day')
self.assertEqual(
timesince(datetime.datetime(2005, 12, 29),
datetime.datetime(2005, 12, 30)),
u'1 day')
def test_timeuntil(self):
self.assertEqual(
timeuntil(datetime.datetime.now() + datetime.timedelta(1)),
u'1 day')
self.assertEqual(timeuntil(datetime.datetime(2005, 12, 30),
datetime.datetime(2005, 12, 29)),
u'1 day')
def test_default(self):
self.assertEqual(default(u"val", u"default"), u'val')
self.assertEqual(default(None, u"default"), u'default')
self.assertEqual(default(u'', u"default"), u'default')
def test_if_none(self):
self.assertEqual(default_if_none(u"val", u"default"), u'val')
self.assertEqual(default_if_none(None, u"default"), u'default')
self.assertEqual(default_if_none(u'', u"default"), u'')
def test_divisibleby(self):
self.assertEqual(divisibleby(4, 2), True)
self.assertEqual(divisibleby(4, 3), False)
def test_yesno(self):
self.assertEqual(yesno(True), u'yes')
self.assertEqual(yesno(False), u'no')
self.assertEqual(yesno(None), u'maybe')
self.assertEqual(yesno(True, u'certainly,get out of town,perhaps'),
u'certainly')
self.assertEqual(yesno(False, u'certainly,get out of town,perhaps'),
u'get out of town')
self.assertEqual(yesno(None, u'certainly,get out of town,perhaps'),
u'perhaps')
self.assertEqual(yesno(None, u'certainly,get out of town'),
u'get out of town')
def test_filesizeformat(self):
self.assertEqual(filesizeformat(1023), u'1023 bytes')
self.assertEqual(filesizeformat(1024), u'1.0 KB')
self.assertEqual(filesizeformat(10*1024), u'10.0 KB')
self.assertEqual(filesizeformat(1024*1024-1), u'1024.0 KB')
self.assertEqual(filesizeformat(1024*1024), u'1.0 MB')
self.assertEqual(filesizeformat(1024*1024*50), u'50.0 MB')
self.assertEqual(filesizeformat(1024*1024*1024-1), u'1024.0 MB')
self.assertEqual(filesizeformat(1024*1024*1024), u'1.0 GB')
self.assertEqual(filesizeformat(1024*1024*1024*1024), u'1.0 TB')
self.assertEqual(filesizeformat(1024*1024*1024*1024*1024), u'1.0 PB')
self.assertEqual(filesizeformat(1024*1024*1024*1024*1024*2000),
u'2000.0 PB')
self.assertEqual(filesizeformat(complex(1,-1)), u'0 bytes')
self.assertEqual(filesizeformat(""), u'0 bytes')
self.assertEqual(filesizeformat(u"\N{GREEK SMALL LETTER ALPHA}"),
u'0 bytes')
def test_localized_filesizeformat(self):
from django.utils.translation import activate, deactivate
old_localize = settings.USE_L10N
try:
activate('de')
settings.USE_L10N = True
self.assertEqual(filesizeformat(1023), u'1023 Bytes')
self.assertEqual(filesizeformat(1024), u'1,0 KB')
self.assertEqual(filesizeformat(10*1024), u'10,0 KB')
self.assertEqual(filesizeformat(1024*1024-1), u'1024,0 KB')
self.assertEqual(filesizeformat(1024*1024), u'1,0 MB')
self.assertEqual(filesizeformat(1024*1024*50), u'50,0 MB')
self.assertEqual(filesizeformat(1024*1024*1024-1), u'1024,0 MB')
self.assertEqual(filesizeformat(1024*1024*1024), u'1,0 GB')
self.assertEqual(filesizeformat(1024*1024*1024*1024), u'1,0 TB')
self.assertEqual(filesizeformat(1024*1024*1024*1024*1024),
u'1,0 PB')
self.assertEqual(filesizeformat(1024*1024*1024*1024*1024*2000),
u'2000,0 PB')
self.assertEqual(filesizeformat(complex(1,-1)), u'0 Bytes')
self.assertEqual(filesizeformat(""), u'0 Bytes')
self.assertEqual(filesizeformat(u"\N{GREEK SMALL LETTER ALPHA}"),
u'0 Bytes')
finally:
deactivate()
settings.USE_L10N = old_localize
def test_pluralize(self):
self.assertEqual(pluralize(1), u'')
self.assertEqual(pluralize(0), u's')
self.assertEqual(pluralize(2), u's')
self.assertEqual(pluralize([1]), u'')
self.assertEqual(pluralize([]), u's')
self.assertEqual(pluralize([1,2,3]), u's')
self.assertEqual(pluralize(1,u'es'), u'')
self.assertEqual(pluralize(0,u'es'), u'es')
self.assertEqual(pluralize(2,u'es'), u'es')
self.assertEqual(pluralize(1,u'y,ies'), u'y')
self.assertEqual(pluralize(0,u'y,ies'), u'ies')
self.assertEqual(pluralize(2,u'y,ies'), u'ies')
self.assertEqual(pluralize(0,u'y,ies,error'), u'')
def test_phone2numeric(self):
self.assertEqual(phone2numeric(u'0800 flowers'), u'0800 3569377')
def test_non_string_input(self):
# Filters shouldn't break if passed non-strings
self.assertEqual(addslashes(123), u'123')
self.assertEqual(linenumbers(123), u'1. 123')
self.assertEqual(lower(123), u'123')
self.assertEqual(make_list(123), [u'1', u'2', u'3'])
self.assertEqual(slugify(123), u'123')
self.assertEqual(title(123), u'123')
self.assertEqual(truncatewords(123, 2), u'123')
self.assertEqual(upper(123), u'123')
self.assertEqual(urlencode(123), u'123')
self.assertEqual(urlize(123), u'123')
self.assertEqual(urlizetrunc(123, 1), u'123')
self.assertEqual(wordcount(123), 1)
self.assertEqual(wordwrap(123, 2), u'123')
self.assertEqual(ljust('123', 4), u'123 ')
self.assertEqual(rjust('123', 4), u' 123')
self.assertEqual(center('123', 5), u' 123 ')
self.assertEqual(center('123', 6), u' 123 ')
self.assertEqual(cut(123, '2'), u'13')
self.assertEqual(escape(123), u'123')
self.assertEqual(linebreaks(123), u'<p>123</p>')
self.assertEqual(linebreaksbr(123), u'123')
self.assertEqual(removetags(123, 'a'), u'123')
self.assertEqual(striptags(123), u'123')
|
pkoutsias/SickRage
|
refs/heads/master
|
lib/mako/lookup.py
|
34
|
# mako/lookup.py
# Copyright (C) 2006-2015 the Mako authors and contributors <see AUTHORS file>
#
# This module is part of Mako and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
import os
import stat
import posixpath
import re
from mako import exceptions, util
from mako.template import Template
try:
import threading
except:
import dummy_threading as threading
class TemplateCollection(object):
"""Represent a collection of :class:`.Template` objects,
identifiable via URI.
A :class:`.TemplateCollection` is linked to the usage of
all template tags that address other templates, such
as ``<%include>``, ``<%namespace>``, and ``<%inherit>``.
The ``file`` attribute of each of those tags refers
to a string URI that is passed to that :class:`.Template`
object's :class:`.TemplateCollection` for resolution.
:class:`.TemplateCollection` is an abstract class,
with the usual default implementation being :class:`.TemplateLookup`.
"""
def has_template(self, uri):
"""Return ``True`` if this :class:`.TemplateLookup` is
capable of returning a :class:`.Template` object for the
given ``uri``.
:param uri: String URI of the template to be resolved.
"""
try:
self.get_template(uri)
return True
except exceptions.TemplateLookupException:
return False
def get_template(self, uri, relativeto=None):
"""Return a :class:`.Template` object corresponding to the given
``uri``.
The default implementation raises
:class:`.NotImplementedError`. Implementations should
raise :class:`.TemplateLookupException` if the given ``uri``
cannot be resolved.
:param uri: String URI of the template to be resolved.
:param relativeto: if present, the given ``uri`` is assumed to
be relative to this URI.
"""
raise NotImplementedError()
def filename_to_uri(self, uri, filename):
"""Convert the given ``filename`` to a URI relative to
this :class:`.TemplateCollection`."""
return uri
def adjust_uri(self, uri, filename):
"""Adjust the given ``uri`` based on the calling ``filename``.
When this method is called from the runtime, the
``filename`` parameter is taken directly to the ``filename``
attribute of the calling template. Therefore a custom
:class:`.TemplateCollection` subclass can place any string
identifier desired in the ``filename`` parameter of the
:class:`.Template` objects it constructs and have them come back
here.
"""
return uri
class TemplateLookup(TemplateCollection):
"""Represent a collection of templates that locates template source files
from the local filesystem.
The primary argument is the ``directories`` argument, the list of
directories to search:
.. sourcecode:: python
lookup = TemplateLookup(["/path/to/templates"])
some_template = lookup.get_template("/index.html")
The :class:`.TemplateLookup` can also be given :class:`.Template` objects
programatically using :meth:`.put_string` or :meth:`.put_template`:
.. sourcecode:: python
lookup = TemplateLookup()
lookup.put_string("base.html", '''
<html><body>${self.next()}</body></html>
''')
lookup.put_string("hello.html", '''
<%include file='base.html'/>
Hello, world !
''')
:param directories: A list of directory names which will be
searched for a particular template URI. The URI is appended
to each directory and the filesystem checked.
:param collection_size: Approximate size of the collection used
to store templates. If left at its default of ``-1``, the size
is unbounded, and a plain Python dictionary is used to
relate URI strings to :class:`.Template` instances.
Otherwise, a least-recently-used cache object is used which
will maintain the size of the collection approximately to
the number given.
:param filesystem_checks: When at its default value of ``True``,
each call to :meth:`.TemplateLookup.get_template()` will
compare the filesystem last modified time to the time in
which an existing :class:`.Template` object was created.
This allows the :class:`.TemplateLookup` to regenerate a
new :class:`.Template` whenever the original source has
been updated. Set this to ``False`` for a very minor
performance increase.
:param modulename_callable: A callable which, when present,
is passed the path of the source file as well as the
requested URI, and then returns the full path of the
generated Python module file. This is used to inject
alternate schemes for Python module location. If left at
its default of ``None``, the built in system of generation
based on ``module_directory`` plus ``uri`` is used.
All other keyword parameters available for
:class:`.Template` are mirrored here. When new
:class:`.Template` objects are created, the keywords
established with this :class:`.TemplateLookup` are passed on
to each new :class:`.Template`.
"""
def __init__(self,
directories=None,
module_directory=None,
filesystem_checks=True,
collection_size=-1,
format_exceptions=False,
error_handler=None,
disable_unicode=False,
bytestring_passthrough=False,
output_encoding=None,
encoding_errors='strict',
cache_args=None,
cache_impl='beaker',
cache_enabled=True,
cache_type=None,
cache_dir=None,
cache_url=None,
modulename_callable=None,
module_writer=None,
default_filters=None,
buffer_filters=(),
strict_undefined=False,
imports=None,
future_imports=None,
enable_loop=True,
input_encoding=None,
preprocessor=None,
lexer_cls=None):
self.directories = [posixpath.normpath(d) for d in
util.to_list(directories, ())
]
self.module_directory = module_directory
self.modulename_callable = modulename_callable
self.filesystem_checks = filesystem_checks
self.collection_size = collection_size
if cache_args is None:
cache_args = {}
# transfer deprecated cache_* args
if cache_dir:
cache_args.setdefault('dir', cache_dir)
if cache_url:
cache_args.setdefault('url', cache_url)
if cache_type:
cache_args.setdefault('type', cache_type)
self.template_args = {
'format_exceptions': format_exceptions,
'error_handler': error_handler,
'disable_unicode': disable_unicode,
'bytestring_passthrough': bytestring_passthrough,
'output_encoding': output_encoding,
'cache_impl': cache_impl,
'encoding_errors': encoding_errors,
'input_encoding': input_encoding,
'module_directory': module_directory,
'module_writer': module_writer,
'cache_args': cache_args,
'cache_enabled': cache_enabled,
'default_filters': default_filters,
'buffer_filters': buffer_filters,
'strict_undefined': strict_undefined,
'imports': imports,
'future_imports': future_imports,
'enable_loop': enable_loop,
'preprocessor': preprocessor,
'lexer_cls': lexer_cls
}
if collection_size == -1:
self._collection = {}
self._uri_cache = {}
else:
self._collection = util.LRUCache(collection_size)
self._uri_cache = util.LRUCache(collection_size)
self._mutex = threading.Lock()
def get_template(self, uri):
"""Return a :class:`.Template` object corresponding to the given
``uri``.
.. note:: The ``relativeto`` argument is not supported here at
the moment.
"""
try:
if self.filesystem_checks:
return self._check(uri, self._collection[uri])
else:
return self._collection[uri]
except KeyError:
u = re.sub(r'^\/+', '', uri)
for dir in self.directories:
# make sure the path seperators are posix - os.altsep is empty
# on POSIX and cannot be used.
dir = dir.replace(os.path.sep, posixpath.sep)
srcfile = posixpath.normpath(posixpath.join(dir, u))
if os.path.isfile(srcfile):
return self._load(srcfile, uri)
else:
raise exceptions.TopLevelLookupException(
"Cant locate template for uri %r" % uri)
def adjust_uri(self, uri, relativeto):
"""Adjust the given ``uri`` based on the given relative URI."""
key = (uri, relativeto)
if key in self._uri_cache:
return self._uri_cache[key]
if uri[0] != '/':
if relativeto is not None:
v = self._uri_cache[key] = posixpath.join(
posixpath.dirname(relativeto), uri)
else:
v = self._uri_cache[key] = '/' + uri
else:
v = self._uri_cache[key] = uri
return v
def filename_to_uri(self, filename):
"""Convert the given ``filename`` to a URI relative to
this :class:`.TemplateCollection`."""
try:
return self._uri_cache[filename]
except KeyError:
value = self._relativeize(filename)
self._uri_cache[filename] = value
return value
def _relativeize(self, filename):
"""Return the portion of a filename that is 'relative'
to the directories in this lookup.
"""
filename = posixpath.normpath(filename)
for dir in self.directories:
if filename[0:len(dir)] == dir:
return filename[len(dir):]
else:
return None
def _load(self, filename, uri):
self._mutex.acquire()
try:
try:
# try returning from collection one
# more time in case concurrent thread already loaded
return self._collection[uri]
except KeyError:
pass
try:
if self.modulename_callable is not None:
module_filename = self.modulename_callable(filename, uri)
else:
module_filename = None
self._collection[uri] = template = Template(
uri=uri,
filename=posixpath.normpath(filename),
lookup=self,
module_filename=module_filename,
**self.template_args)
return template
except:
# if compilation fails etc, ensure
# template is removed from collection,
# re-raise
self._collection.pop(uri, None)
raise
finally:
self._mutex.release()
def _check(self, uri, template):
if template.filename is None:
return template
try:
template_stat = os.stat(template.filename)
if template.module._modified_time < \
template_stat[stat.ST_MTIME]:
self._collection.pop(uri, None)
return self._load(template.filename, uri)
else:
return template
except OSError:
self._collection.pop(uri, None)
raise exceptions.TemplateLookupException(
"Cant locate template for uri %r" % uri)
def put_string(self, uri, text):
"""Place a new :class:`.Template` object into this
:class:`.TemplateLookup`, based on the given string of
``text``.
"""
self._collection[uri] = Template(
text,
lookup=self,
uri=uri,
**self.template_args)
def put_template(self, uri, template):
"""Place a new :class:`.Template` object into this
:class:`.TemplateLookup`, based on the given
:class:`.Template` object.
"""
self._collection[uri] = template
|
sburnett/seattle
|
refs/heads/master
|
softwareupdater/generatekeys.py
|
1
|
import repyhelper
# simple keypair creator...
import sys
import random
repyhelper.translate_and_import("rsa.repy")
# need to support random number generation
randomfloat = random.random
pubfn = sys.argv[1]+'.publickey'
privfn = sys.argv[1]+'.privatekey'
if len(sys.argv) == 3:
keylength = int(sys.argv[2])
else:
keylength = 1024
print "Generating key files called '"+pubfn+"' and '"+privfn+"' of length "+str(keylength)+"."
print "This may take a moment..."
keys = rsa_gen_pubpriv_keys(keylength)
rsa_publickey_to_file(keys[0],pubfn)
rsa_privatekey_to_file(keys[1],privfn)
print "Success!"
|
edulramirez/nova
|
refs/heads/master
|
nova/tests/unit/objects/test_block_device.py
|
5
|
# Copyright 2013 Red Hat Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
import six
from nova.cells import rpcapi as cells_rpcapi
from nova import context
from nova import db
from nova import exception
from nova import objects
from nova.objects import block_device as block_device_obj
from nova import test
from nova.tests.unit import fake_block_device
from nova.tests.unit import fake_instance
from nova.tests.unit.objects import test_objects
class _TestBlockDeviceMappingObject(object):
def fake_bdm(self, instance=None):
instance = instance or {}
fake_bdm = fake_block_device.FakeDbBlockDeviceDict({
'id': 123,
'instance_uuid': instance.get('uuid') or 'fake-instance',
'device_name': '/dev/sda2',
'source_type': 'snapshot',
'destination_type': 'volume',
'connection_info': "{'fake': 'connection_info'}",
'snapshot_id': 'fake-snapshot-id-1',
'boot_index': -1
})
if instance:
fake_bdm['instance'] = instance
return fake_bdm
def _test_save(self, cell_type=None, update_device_name=False):
if cell_type:
self.flags(enable=True, cell_type=cell_type, group='cells')
else:
self.flags(enable=False, group='cells')
create = False
fake_bdm = self.fake_bdm()
with test.nested(
mock.patch.object(
db, 'block_device_mapping_update', return_value=fake_bdm),
mock.patch.object(
cells_rpcapi.CellsAPI, 'bdm_update_or_create_at_top')
) as (bdm_update_mock, cells_update_mock):
bdm_object = objects.BlockDeviceMapping(context=self.context)
bdm_object.id = 123
bdm_object.volume_id = 'fake_volume_id'
if update_device_name:
bdm_object.device_name = '/dev/vda'
create = None
bdm_object.save()
if update_device_name:
bdm_update_mock.assert_called_once_with(
self.context, 123,
{'volume_id': 'fake_volume_id',
'device_name': '/dev/vda'},
legacy=False)
else:
bdm_update_mock.assert_called_once_with(
self.context, 123, {'volume_id': 'fake_volume_id'},
legacy=False)
if cell_type != 'compute':
self.assertFalse(cells_update_mock.called)
else:
self.assertEqual(1, cells_update_mock.call_count)
self.assertTrue(len(cells_update_mock.call_args[0]) > 1)
self.assertIsInstance(cells_update_mock.call_args[0][1],
block_device_obj.BlockDeviceMapping)
self.assertEqual({'create': create},
cells_update_mock.call_args[1])
def test_save_nocells(self):
self._test_save()
def test_save_apicell(self):
self._test_save(cell_type='api')
def test_save_computecell(self):
self._test_save(cell_type='compute')
def test_save_computecell_device_name_changed(self):
self._test_save(cell_type='compute', update_device_name=True)
def test_save_instance_changed(self):
bdm_object = objects.BlockDeviceMapping(context=self.context)
bdm_object.instance = objects.Instance()
self.assertRaises(exception.ObjectActionError,
bdm_object.save)
@mock.patch.object(db, 'block_device_mapping_update', return_value=None)
def test_save_not_found(self, bdm_update):
bdm_object = objects.BlockDeviceMapping(context=self.context)
bdm_object.id = 123
self.assertRaises(exception.BDMNotFound, bdm_object.save)
@mock.patch.object(db, 'block_device_mapping_get_by_volume_id')
def test_get_by_volume_id(self, get_by_vol_id):
get_by_vol_id.return_value = self.fake_bdm()
vol_bdm = objects.BlockDeviceMapping.get_by_volume_id(
self.context, 'fake-volume-id')
for attr in block_device_obj.BLOCK_DEVICE_OPTIONAL_ATTRS:
self.assertFalse(vol_bdm.obj_attr_is_set(attr))
@mock.patch.object(db, 'block_device_mapping_get_by_volume_id')
def test_get_by_volume_id_not_found(self, get_by_vol_id):
get_by_vol_id.return_value = None
self.assertRaises(exception.VolumeBDMNotFound,
objects.BlockDeviceMapping.get_by_volume_id,
self.context, 'fake-volume-id')
@mock.patch.object(db, 'block_device_mapping_get_by_volume_id')
def test_get_by_volume_instance_uuid_missmatch(self, get_by_vol_id):
fake_bdm_vol = self.fake_bdm(instance={'uuid': 'other-fake-instance'})
get_by_vol_id.return_value = fake_bdm_vol
self.assertRaises(exception.InvalidVolume,
objects.BlockDeviceMapping.get_by_volume_id,
self.context, 'fake-volume-id',
instance_uuid='fake-instance')
@mock.patch.object(db, 'block_device_mapping_get_by_volume_id')
def test_get_by_volume_id_with_expected(self, get_by_vol_id):
get_by_vol_id.return_value = self.fake_bdm(
fake_instance.fake_db_instance())
vol_bdm = objects.BlockDeviceMapping.get_by_volume_id(
self.context, 'fake-volume-id', expected_attrs=['instance'])
for attr in block_device_obj.BLOCK_DEVICE_OPTIONAL_ATTRS:
self.assertTrue(vol_bdm.obj_attr_is_set(attr))
get_by_vol_id.assert_called_once_with(self.context, 'fake-volume-id',
['instance'])
def _test_create_mocked(self, cell_type=None, update_or_create=False,
device_name=None):
if cell_type:
self.flags(enable=True, cell_type=cell_type, group='cells')
else:
self.flags(enable=False, group='cells')
values = {'source_type': 'volume', 'volume_id': 'fake-vol-id',
'destination_type': 'volume',
'instance_uuid': 'fake-instance'}
if device_name:
values['device_name'] = device_name
fake_bdm = fake_block_device.FakeDbBlockDeviceDict(values)
with test.nested(
mock.patch.object(
db, 'block_device_mapping_create', return_value=fake_bdm),
mock.patch.object(
db, 'block_device_mapping_update_or_create',
return_value=fake_bdm),
mock.patch.object(cells_rpcapi.CellsAPI,
'bdm_update_or_create_at_top')
) as (bdm_create_mock, bdm_update_or_create_mock, cells_update_mock):
bdm = objects.BlockDeviceMapping(context=self.context, **values)
if update_or_create:
method = bdm.update_or_create
else:
method = bdm.create
if cell_type == 'api':
self.assertRaises(exception.ObjectActionError,
method)
else:
method()
if update_or_create:
bdm_update_or_create_mock.assert_called_once_with(
self.context, values, legacy=False)
else:
bdm_create_mock.assert_called_once_with(
self.context, values, legacy=False)
if cell_type == 'compute' and 'device_name' in values:
self.assertEqual(1, cells_update_mock.call_count)
self.assertTrue(len(cells_update_mock.call_args[0]) > 1)
self.assertEqual(self.context,
cells_update_mock.call_args[0][0])
self.assertIsInstance(cells_update_mock.call_args[0][1],
block_device_obj.BlockDeviceMapping)
self.assertEqual({'create': update_or_create or None},
cells_update_mock.call_args[1])
else:
self.assertFalse(cells_update_mock.called)
def test_create_nocells(self):
self._test_create_mocked()
def test_update_or_create(self):
self._test_create_mocked(update_or_create=True)
def test_create_apicell(self):
self._test_create_mocked(cell_type='api')
def test_update_or_create_apicell(self):
self._test_create_mocked(cell_type='api', update_or_create=True)
def test_create_computecell(self):
self._test_create_mocked(cell_type='compute')
def test_update_or_create_computecell(self):
self._test_create_mocked(cell_type='compute', update_or_create=True)
def test_device_name_compute_cell(self):
self._test_create_mocked(cell_type='compute', device_name='/dev/xvdb')
def test_create(self):
values = {'source_type': 'volume', 'volume_id': 'fake-vol-id',
'destination_type': 'volume',
'instance_uuid': 'fake-instance'}
bdm = objects.BlockDeviceMapping(context=self.context, **values)
with mock.patch.object(cells_rpcapi.CellsAPI,
'bdm_update_or_create_at_top'):
bdm.create()
for k, v in six.iteritems(values):
self.assertEqual(v, getattr(bdm, k))
def test_create_fails(self):
values = {'source_type': 'volume', 'volume_id': 'fake-vol-id',
'destination_type': 'volume',
'instance_uuid': 'fake-instance'}
bdm = objects.BlockDeviceMapping(context=self.context, **values)
bdm.create()
self.assertRaises(exception.ObjectActionError,
bdm.create)
def test_create_fails_instance(self):
values = {'source_type': 'volume', 'volume_id': 'fake-vol-id',
'destination_type': 'volume',
'instance_uuid': 'fake-instance',
'instance': objects.Instance()}
bdm = objects.BlockDeviceMapping(context=self.context, **values)
self.assertRaises(exception.ObjectActionError,
bdm.create)
def _test_destroy_mocked(self, cell_type=None):
values = {'source_type': 'volume', 'volume_id': 'fake-vol-id',
'destination_type': 'volume', 'id': 1,
'instance_uuid': 'fake-instance', 'device_name': 'fake'}
if cell_type:
self.flags(enable=True, cell_type=cell_type, group='cells')
else:
self.flags(enable=False, group='cells')
with test.nested(
mock.patch.object(db, 'block_device_mapping_destroy'),
mock.patch.object(cells_rpcapi.CellsAPI, 'bdm_destroy_at_top')
) as (bdm_del, cells_destroy):
bdm = objects.BlockDeviceMapping(context=self.context, **values)
bdm.destroy()
bdm_del.assert_called_once_with(self.context, values['id'])
if cell_type != 'compute':
self.assertFalse(cells_destroy.called)
else:
cells_destroy.assert_called_once_with(
self.context, values['instance_uuid'],
device_name=values['device_name'],
volume_id=values['volume_id'])
def test_destroy_nocells(self):
self._test_destroy_mocked()
def test_destroy_apicell(self):
self._test_destroy_mocked(cell_type='api')
def test_destroy_computecell(self):
self._test_destroy_mocked(cell_type='compute')
def test_is_image_true(self):
bdm = objects.BlockDeviceMapping(context=self.context,
source_type='image')
self.assertTrue(bdm.is_image)
def test_is_image_false(self):
bdm = objects.BlockDeviceMapping(context=self.context,
source_type='snapshot')
self.assertFalse(bdm.is_image)
def test_is_volume_true(self):
bdm = objects.BlockDeviceMapping(context=self.context,
destination_type='volume')
self.assertTrue(bdm.is_volume)
def test_is_volume_false(self):
bdm = objects.BlockDeviceMapping(context=self.context,
destination_type='local')
self.assertFalse(bdm.is_volume)
class TestBlockDeviceMappingObject(test_objects._LocalTest,
_TestBlockDeviceMappingObject):
pass
class TestRemoteBlockDeviceMappingObject(test_objects._RemoteTest,
_TestBlockDeviceMappingObject):
pass
class _TestBlockDeviceMappingListObject(object):
def fake_bdm(self, bdm_id):
fake_bdm = fake_block_device.FakeDbBlockDeviceDict({
'id': bdm_id, 'instance_uuid': 'fake-instance',
'device_name': '/dev/sda2',
'source_type': 'snapshot',
'destination_type': 'volume',
'connection_info': "{'fake': 'connection_info'}",
'snapshot_id': 'fake-snapshot-id-1',
'boot_index': -1,
})
return fake_bdm
@mock.patch.object(db, 'block_device_mapping_get_all_by_instance')
def test_get_by_instance_uuid(self, get_all_by_inst):
fakes = [self.fake_bdm(123), self.fake_bdm(456)]
get_all_by_inst.return_value = fakes
bdm_list = (
objects.BlockDeviceMappingList.get_by_instance_uuid(
self.context, 'fake_instance_uuid'))
for faked, got in zip(fakes, bdm_list):
self.assertIsInstance(got, objects.BlockDeviceMapping)
self.assertEqual(faked['id'], got.id)
@mock.patch.object(db, 'block_device_mapping_get_all_by_instance')
def test_get_by_instance_uuid_no_result(self, get_all_by_inst):
get_all_by_inst.return_value = None
bdm_list = (
objects.BlockDeviceMappingList.get_by_instance_uuid(
self.context, 'fake_instance_uuid'))
self.assertEqual(0, len(bdm_list))
class TestBlockDeviceMappingListObject(test_objects._LocalTest,
_TestBlockDeviceMappingListObject):
pass
class TestRemoteBlockDeviceMappingListObject(
test_objects._RemoteTest, _TestBlockDeviceMappingListObject):
pass
class TestBlockDeviceUtils(test.NoDBTestCase):
def test_make_list_from_dicts(self):
ctx = context.get_admin_context()
dicts = [{'id': 1}, {'id': 2}]
objs = block_device_obj.block_device_make_list_from_dicts(ctx,
dicts)
self.assertIsInstance(objs, block_device_obj.BlockDeviceMappingList)
self.assertEqual(2, len(objs))
self.assertEqual(1, objs[0].id)
self.assertEqual(2, objs[1].id)
def test_make_list_from_dicts_empty(self):
ctx = context.get_admin_context()
objs = block_device_obj.block_device_make_list_from_dicts(ctx, [])
self.assertIsInstance(objs, block_device_obj.BlockDeviceMappingList)
self.assertEqual(0, len(objs))
|
aduric/crossfit
|
refs/heads/master
|
nonrel/tests/regressiontests/templates/nodelist.py
|
54
|
from django.template.loader import get_template_from_string
from django.template import VariableNode
from django.utils.unittest import TestCase
class NodelistTest(TestCase):
def test_for(self):
source = '{% for i in 1 %}{{ a }}{% endfor %}'
template = get_template_from_string(source)
vars = template.nodelist.get_nodes_by_type(VariableNode)
self.assertEqual(len(vars), 1)
def test_if(self):
source = '{% if x %}{{ a }}{% endif %}'
template = get_template_from_string(source)
vars = template.nodelist.get_nodes_by_type(VariableNode)
self.assertEqual(len(vars), 1)
def test_ifequal(self):
source = '{% ifequal x y %}{{ a }}{% endifequal %}'
template = get_template_from_string(source)
vars = template.nodelist.get_nodes_by_type(VariableNode)
self.assertEqual(len(vars), 1)
def test_ifchanged(self):
source = '{% ifchanged x %}{{ a }}{% endifchanged %}'
template = get_template_from_string(source)
vars = template.nodelist.get_nodes_by_type(VariableNode)
self.assertEqual(len(vars), 1)
|
Glasgow2015/team-10
|
refs/heads/master
|
env/lib/python2.7/site-packages/setuptools/script template.py
|
486
|
# EASY-INSTALL-SCRIPT: %(spec)r,%(script_name)r
__requires__ = """%(spec)r"""
import pkg_resources
pkg_resources.run_script("""%(spec)r""", """%(script_name)r""")
|
srznew/heat
|
refs/heads/master
|
heat/engine/clients/os/ceilometer.py
|
8
|
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from ceilometerclient import client as cc
from ceilometerclient import exc
from ceilometerclient.openstack.common.apiclient import exceptions as api_exc
from heat.engine.clients import client_plugin
class CeilometerClientPlugin(client_plugin.ClientPlugin):
exceptions_module = [exc, api_exc]
service_types = [METERING] = ['metering']
def _create(self):
con = self.context
endpoint_type = self._get_client_option('ceilometer', 'endpoint_type')
endpoint = self.url_for(service_type=self.METERING,
endpoint_type=endpoint_type)
args = {
'auth_url': con.auth_url,
'service_type': self.METERING,
'project_name': con.tenant,
'token': lambda: self.auth_token,
'endpoint_type': endpoint_type,
'os_endpoint': endpoint,
'cacert': self._get_client_option('ceilometer', 'ca_file'),
'cert_file': self._get_client_option('ceilometer', 'cert_file'),
'key_file': self._get_client_option('ceilometer', 'key_file'),
'insecure': self._get_client_option('ceilometer', 'insecure')
}
return cc.get_client('2', **args)
def is_not_found(self, ex):
return isinstance(ex, (exc.HTTPNotFound, api_exc.NotFound))
def is_over_limit(self, ex):
return isinstance(ex, exc.HTTPOverLimit)
def is_conflict(self, ex):
return isinstance(ex, exc.HTTPConflict)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.