repo_name
stringlengths 5
100
| ref
stringlengths 12
67
| path
stringlengths 4
244
| copies
stringlengths 1
8
| content
stringlengths 0
1.05M
⌀ |
|---|---|---|---|---|
krast/suse_xen
|
refs/heads/master
|
tools/python/xen/xend/server/netif2.py
|
43
|
#============================================================================
# This library is free software; you can redistribute it and/or
# modify it under the terms of version 2.1 of the GNU Lesser General Public
# License as published by the Free Software Foundation.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#============================================================================
# Copyright (C) 2004, 2005 Mike Wray <mike.wray@hp.com>
# Copyright (C) 2005 XenSource Ltd
# Copyright (C) 2008 Citrix Systems Inc.
#============================================================================
#
# Based closely on netif.py.
#
"""Support for virtual network interfaces, version 2.
"""
import os
import random
import re
import time
from xen.xend import XendOptions
from xen.xend.server.DevController import DevController
from xen.xend.XendError import VmError
from xen.xend.XendXSPolicyAdmin import XSPolicyAdminInstance
from xen.xend.xenstore.xstransact import xstransact
import xen.util.xsm.xsm as security
from xen.xend.XendLogging import log
xoptions = XendOptions.instance()
def randomMAC():
"""Generate a random MAC address.
Uses OUI (Organizationally Unique Identifier) 00-16-3E, allocated to
Xensource, Inc. The OUI list is available at
http://standards.ieee.org/regauth/oui/oui.txt.
The remaining 3 fields are random, with the first bit of the first
random field set 0.
@return: MAC address string
"""
mac = [ 0x00, 0x16, 0x3e,
random.randint(0x00, 0x7f),
random.randint(0x00, 0xff),
random.randint(0x00, 0xff) ]
return ':'.join(map(lambda x: "%02x" % x, mac))
class NetifController2(DevController):
def __init__(self, vm):
DevController.__init__(self, vm)
def getDeviceDetails(self, config):
"""@see DevController.getDeviceDetails"""
devid = self.allocateDeviceID()
bridge = config.get('bridge')
back_mac = config.get('back_mac')
if not back_mac:
if bridge:
back_mac = "fe:ff:ff:ff:ff:ff"
else:
back_mac = randomMAC()
front_mac = config.get('front_mac') or randomMAC()
front_trust = config.get("trusted") or "0"
back_trust = config.get("back_trusted") or "1"
max_bypasses = config.get("max_bypasses") or "5"
pdev = config.get('pdev')
front_filter = config.get("front_filter_mac")
if front_filter == None:
if back_trust == "0":
front_filter = "1"
else:
front_filter = "0"
back_filter = config.get("filter_mac")
if back_filter == None:
if front_trust == "0":
back_filter = "1"
else:
back_filter = "0"
back = { 'mac': back_mac, 'remote-mac': front_mac,
'handle': "%i" % devid, 'local-trusted': back_trust,
'remote-trusted': front_trust, 'filter-mac': back_filter,
'max-bypasses': max_bypasses }
front = { 'mac': front_mac, 'remote-mac': back_mac,
'local-trusted': front_trust, 'remote-trusted': back_trust,
'filter-mac': front_filter }
if bridge:
back['bridge'] = bridge
if pdev:
back['pdev'] = pdev
return (devid, back, front)
def getDeviceConfiguration(self, devid, transaction = None):
"""@see DevController.configuration"""
if transaction is None:
read_fn = xstransact.Read
else:
read_fn = transaction.read
def front_read(x):
return read_fn(frontpath + x)
def back_read(x):
return read_fn(backpath + x)
result = DevController.getDeviceConfiguration(self, devid, transaction)
dev = self.convertToDeviceNumber(devid)
frontpath = self.frontendPath(dev) + "/"
backpath = front_read("backend") + "/"
front_mac = front_read("mac")
back_mac = back_read("mac")
front_trusted = back_read("remote-trusted")
back_trusted = back_read("local-trusted")
max_bypasses = back_read("max-bypasses")
bridge = back_read("bridge")
pdev = back_read("pdev")
if front_mac:
result["front_mac"] = front_mac
if back_mac:
result["back_mac"] = back_mac
if front_trusted:
result["front_trusted"] = front_trusted
if back_trusted:
result["back_trusted"] = back_trusted
if bridge:
result["bridge"] = bridge
if pdev:
result["pdev"] = pdev
if max_bypasses:
result["max-bypasses"] = max_bypasses
return result
def destroyDevice(self, devid, force):
dev = self.convertToDeviceNumber(devid)
self.writeBackend(dev, "online", "0")
if force:
self.writeBackend(dev, "shutdown-request", "force")
else:
self.writeBackend(dev, "shutdown-request", "normal")
self.vm._removeVm("device/%s/%d" % (self.deviceClass, dev))
|
jonparrott/gcloud-python
|
refs/heads/master
|
redis/google/cloud/redis_v1/__init__.py
|
3
|
# -*- coding: utf-8 -*-
#
# Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import
from google.cloud.redis_v1 import types
from google.cloud.redis_v1.gapic import cloud_redis_client
from google.cloud.redis_v1.gapic import enums
class CloudRedisClient(cloud_redis_client.CloudRedisClient):
__doc__ = cloud_redis_client.CloudRedisClient.__doc__
enums = enums
__all__ = (
'enums',
'types',
'CloudRedisClient',
)
|
arenadata/ambari
|
refs/heads/branch-adh-1.6
|
ambari-server/src/main/resources/stacks/ADH/1.4/services/ATLAS/package/scripts/atlas_client.py
|
2
|
#!/usr/bin/env python
"""
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import sys
from resource_management import *
from metadata import metadata
class AtlasClient(Script):
def get_component_name(self):
return "atlas-client"
def pre_upgrade_restart(self, env, upgrade_type=None):
import params
env.set_params(params)
def install(self, env):
self.install_packages(env)
self.configure(env)
def configure(self, env, upgrade_type=None, config_dir=None):
import params
env.set_params(params)
metadata('client')
def status(self, env):
raise ClientComponentHasNoStatus()
if __name__ == "__main__":
AtlasClient().execute()
|
antkillerfarm/antkillerfarm_crazy
|
refs/heads/master
|
python/ml/tensorflow/hello_mnist.py
|
1
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
from tensorflow.examples.tutorials.mnist import input_data
import tensorflow as tf
mnist = input_data.read_data_sets("MNIST_data/", one_hot=True)
x = tf.placeholder(tf.float32, [None, 784])
W = tf.Variable(tf.zeros([784, 10]))
b = tf.Variable(tf.zeros([10]))
y = tf.matmul(x, W) + b
y_ = tf.placeholder(tf.float32, [None, 10])
cross_entropy = tf.reduce_mean(
tf.nn.softmax_cross_entropy_with_logits(labels=y_, logits=y))
train_step = tf.train.GradientDescentOptimizer(0.5).minimize(cross_entropy)
sess = tf.InteractiveSession()
tf.global_variables_initializer().run()
# Train
for _ in range(1000):
batch_xs, batch_ys = mnist.train.next_batch(100)
sess.run(train_step, feed_dict={x: batch_xs, y_: batch_ys})
# Test trained model
correct_prediction = tf.equal(tf.argmax(y, 1), tf.argmax(y_, 1))
accuracy = tf.reduce_mean(tf.cast(correct_prediction, tf.float32))
print(sess.run(accuracy, feed_dict={x: mnist.test.images,
y_: mnist.test.labels}))
|
tisnik/fabric8-analytics-common
|
refs/heads/master
|
integration-tests/features/src/attribute_checks.py
|
1
|
"""Utility functions to check attributes returned in API responses and read from the AWS S3."""
import datetime
import re
def check_attribute_presence(node, attribute_name):
"""Check the attribute presence in the given dictionary or list.
To be used to check the deserialized JSON data etc.
"""
found_attributes = node if type(node) is list else node.keys()
assert attribute_name in node, \
"'%s' attribute is expected in the node, " \
"found: %s attributes " % (attribute_name, ", ".join(found_attributes))
def check_attributes_presence(node, attribute_names):
"""Check the presence of all attributes in the dictionary or in the list.
To be used to check the deserialized JSON data etc.
"""
for attribute_name in attribute_names:
found_attributes = node if type(node) is list else node.keys()
assert attribute_name in node, \
"'%s' attribute is expected in the node, " \
"found: %s attributes " % (attribute_name, ", ".join(found_attributes))
def check_and_get_attribute(node, attribute_name):
"""Check the attribute presence and if the attribute is found, return its value."""
check_attribute_presence(node, attribute_name)
return node[attribute_name]
def check_uuid(uuid):
"""Check if the string contains a proper UUID.
Supported format: 71769af6-0a39-4242-94be-1f84f04c8a56
"""
regex = re.compile(
r'^[a-f0-9]{8}-?[a-f0-9]{4}-?4[a-f0-9]{3}-?[89ab][a-f0-9]{3}-?[a-f0-9]{12}\Z',
re.I)
match = regex.match(uuid)
return bool(match)
def is_string(attribute):
"""Check if given attribute is a string."""
assert attribute is not None
assert isinstance(attribute, str)
def is_posint(value):
"""Check if given attribute is positive integer."""
assert isinstance(value, int)
assert value > 0
def is_posint_or_zero(value):
"""Check if given attribute is positive integer or zero."""
assert isinstance(value, int)
assert value >= 0
def check_date(date):
"""Check if the string contains proper date value."""
is_string(date)
# just try to parse the string to check whether
# the ValueError exception is raised or not
datetime.datetime.strptime(date, "%Y-%m-%d")
def check_response_time(time):
"""Check the response time which is a real number with 'ms' appended."""
is_string(time)
regex = re.compile("^[0-9]+\\.[0-9]+ ms$")
assert regex.match(time)
def check_timestamp(timestamp):
"""Check if the string contains proper timestamp value.
The following four formats are supported:
2017-07-19 13:05:25.041688
2017-07-17T09:05:29.101780
2017-07-19 13:05:25
2017-07-17T09:05:29
"""
is_string(timestamp)
# some attributes contains timestamp without the millisecond part
# so we need to take care of it
if len(timestamp) == len("YYYY-mm-dd HH:MM:SS") and '.' not in timestamp:
timestamp += '.0'
assert len(timestamp) >= len("YYYY-mm-dd HH:MM:SS.")
# we have to support the following formats:
# 2017-07-19 13:05:25.041688
# 2017-07-17T09:05:29.101780
# -> it is needed to distinguish the 'T' separator
#
# (please see https://www.tutorialspoint.com/python/time_strptime.htm for
# an explanation how timeformat should look like)
timeformat = "%Y-%m-%d %H:%M:%S.%f"
if timestamp[10] == "T":
timeformat = "%Y-%m-%dT%H:%M:%S.%f"
# just try to parse the string to check whether
# the ValueError exception is raised or not
datetime.datetime.strptime(timestamp, timeformat)
def check_job_token_attributes(token):
"""Check that the given JOB token contains all required attributes."""
attribs = ["limit", "remaining", "reset"]
for attr in attribs:
assert attr in token
assert int(token[attr]) >= 0
def check_status_attribute(data):
"""Check the value of the status attribute, that should contain just two allowed values."""
status = check_and_get_attribute(data, "status")
assert status in ["success", "error"]
def check_summary_attribute(data):
"""Check the summary attribute that can be found all generated metadata."""
summary = check_and_get_attribute(data, "summary")
assert type(summary) is list or type(summary) is dict
def release_string(ecosystem, package, version=None):
"""Construct a string with ecosystem:package or ecosystem:package:version tuple."""
return "{e}:{p}:{v}".format(e=ecosystem, p=package, v=version)
def check_release_attribute(data, ecosystem, package, version=None):
"""Check the content of _release attribute.
Check that the attribute _release contains proper release string for given ecosystem
and package.
"""
check_attribute_presence(data, "_release")
assert data["_release"] == release_string(ecosystem, package, version)
def check_schema_attribute(data, expected_schema_name, expected_schema_version):
"""Check the content of the schema attribute.
This attribute should contains dictionary with name and version that are checked as well.
"""
# read the toplevel attribute 'schema'
schema = check_and_get_attribute(data, "schema")
# read attributes from the 'schema' node
name = check_and_get_attribute(schema, "name")
version = check_and_get_attribute(schema, "version")
# check the schema name
assert name == expected_schema_name, "Schema name '{n1}' is different from " \
"expected name '{n2}'".format(n1=name, n2=expected_schema_name)
# check the schema version (ATM we are able to check just one fixed version)
assert version == expected_schema_version, "Schema version {v1} is different from expected " \
"version {v2}".format(v1=version, v2=expected_schema_version)
def check_audit_metadata(data):
"""Check the metadata stored in the _audit attribute.
Check if all common attributes can be found in the audit node
in the component or package metadata.
"""
check_attribute_presence(data, "_audit")
audit = data["_audit"]
check_attribute_presence(audit, "version")
assert audit["version"] == "v1"
check_attribute_presence(audit, "started_at")
check_timestamp(audit["started_at"])
check_attribute_presence(audit, "ended_at")
check_timestamp(audit["ended_at"])
def get_details_node(context):
"""Get content of details node, given it exists."""
data = context.s3_data
return check_and_get_attribute(data, 'details')
def check_cve_pattern(with_score):
"""Return the pattern for matching CVE entry."""
if with_score:
# please note that in graph DB, the CVE entries have the following format:
# CVE-2012-1150:5.0
# don't ask me why, but the score is stored in one field together with ID itself
# the : character is used as a separator
return r"CVE-(\d{4})-\d{4,}:(\d+\.\d+)"
else:
return r"CVE-(\d{4})-\d{4,}"
def check_cve_value(cve, with_score=False):
"""Check CVE values in CVE records."""
pattern = check_cve_pattern(with_score)
match = re.fullmatch(pattern, cve)
assert match is not None, "Improper CVE number %s" % cve
year = int(match.group(1))
current_year = datetime.datetime.utcnow().year
# well the lower limit is a bit arbitrary
# (according to SRT guys it should be 1999)
assert year >= 1999 and year <= current_year
if with_score:
score = float(match.group(2))
assert score >= 0.0 and score <= 10.0
def check_cve_score(score):
"""Check the CVE score value."""
assert isinstance(score, float)
assert score >= 0.0 and score <= 10.0
def check_hash_value(hash_value):
"""Check if the value is proper hash in hex."""
assert hash_value is not None
pattern = r"[A-Za-z0-9]+"
match = re.fullmatch(pattern, hash_value)
assert match is not None, "Improper hash value %s" % hash_value
def check_year(year):
"""Check the attribute with year value."""
# sometimes the attribute is stored as a string
if isinstance(year, str):
year = int(year)
assert year >= 1970
# some sane max value is needed
assert year < 2100
def check_month(month):
"""Check the attribute with month number."""
# sometimes the attribute is stored as a string
if isinstance(month, str):
month = int(month)
assert month >= 1
assert month <= 12
def check_day(day):
"""Check the attribute with day number."""
# sometimes the attribute is stored as a string
if isinstance(day, str):
day = int(day)
assert day >= 1
assert day <= 31
|
STIXProject/python-stix
|
refs/heads/master
|
stix/bindings/extensions/test_mechanism/__init__.py
|
9
|
# Copyright (c) 2017, The MITRE Corporation. All rights reserved.
# See LICENSE.txt for complete terms.
|
VielSoft/odoo
|
refs/heads/8.0
|
addons/account/wizard/account_statement_from_invoice.py
|
224
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import time
from openerp.osv import fields, osv
class account_statement_from_invoice_lines(osv.osv_memory):
"""
Generate Entries by Statement from Invoices
"""
_name = "account.statement.from.invoice.lines"
_description = "Entries by Statement from Invoices"
_columns = {
'line_ids': fields.many2many('account.move.line', 'account_move_line_relation', 'move_id', 'line_id', 'Invoices'),
}
def populate_statement(self, cr, uid, ids, context=None):
context = dict(context or {})
statement_id = context.get('statement_id', False)
if not statement_id:
return {'type': 'ir.actions.act_window_close'}
data = self.read(cr, uid, ids, context=context)[0]
line_ids = data['line_ids']
if not line_ids:
return {'type': 'ir.actions.act_window_close'}
line_obj = self.pool.get('account.move.line')
statement_obj = self.pool.get('account.bank.statement')
statement_line_obj = self.pool.get('account.bank.statement.line')
currency_obj = self.pool.get('res.currency')
statement = statement_obj.browse(cr, uid, statement_id, context=context)
line_date = statement.date
# for each selected move lines
for line in line_obj.browse(cr, uid, line_ids, context=context):
ctx = context.copy()
# take the date for computation of currency => use payment date
ctx['date'] = line_date
amount = 0.0
if line.debit > 0:
amount = line.debit
elif line.credit > 0:
amount = -line.credit
if line.amount_currency:
if line.company_id.currency_id.id != statement.currency.id:
# In the specific case where the company currency and the statement currency are the same
# the debit/credit field already contains the amount in the right currency.
# We therefore avoid to re-convert the amount in the currency, to prevent Gain/loss exchanges
amount = currency_obj.compute(cr, uid, line.currency_id.id,
statement.currency.id, line.amount_currency, context=ctx)
elif (line.invoice and line.invoice.currency_id.id != statement.currency.id):
amount = currency_obj.compute(cr, uid, line.invoice.currency_id.id,
statement.currency.id, amount, context=ctx)
context.update({'move_line_ids': [line.id],
'invoice_id': line.invoice.id})
statement_line_obj.create(cr, uid, {
'name': line.name or '?',
'amount': amount,
'partner_id': line.partner_id.id,
'statement_id': statement_id,
'ref': line.ref,
'date': statement.date,
'amount_currency': line.amount_currency,
'currency_id': line.currency_id.id,
}, context=context)
return {'type': 'ir.actions.act_window_close'}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
googleads/google-ads-python
|
refs/heads/master
|
google/ads/googleads/v7/services/services/keyword_view_service/transports/grpc.py
|
1
|
# -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import warnings
from typing import Callable, Dict, Optional, Sequence, Tuple
from google.api_core import grpc_helpers # type: ignore
from google.api_core import gapic_v1 # type: ignore
from google import auth # type: ignore
from google.auth import credentials # type: ignore
from google.auth.transport.grpc import SslCredentials # type: ignore
import grpc # type: ignore
from google.ads.googleads.v7.resources.types import keyword_view
from google.ads.googleads.v7.services.types import keyword_view_service
from .base import KeywordViewServiceTransport, DEFAULT_CLIENT_INFO
class KeywordViewServiceGrpcTransport(KeywordViewServiceTransport):
"""gRPC backend transport for KeywordViewService.
Service to manage keyword views.
This class defines the same methods as the primary client, so the
primary client can load the underlying transport implementation
and call it.
It sends protocol buffers over the wire using gRPC (which is built on
top of HTTP/2); the ``grpcio`` package must be installed.
"""
def __init__(
self,
*,
host: str = "googleads.googleapis.com",
credentials: credentials.Credentials = None,
credentials_file: str = None,
scopes: Sequence[str] = None,
channel: grpc.Channel = None,
api_mtls_endpoint: str = None,
client_cert_source: Callable[[], Tuple[bytes, bytes]] = None,
ssl_channel_credentials: grpc.ChannelCredentials = None,
quota_project_id: Optional[str] = None,
client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
) -> None:
"""Instantiate the transport.
Args:
host (Optional[str]):
The hostname to connect to.
credentials (Optional[google.auth.credentials.Credentials]): The
authorization credentials to attach to requests. These
credentials identify the application to the service; if none
are specified, the client will attempt to ascertain the
credentials from the environment.
This argument is ignored if ``channel`` is provided.
credentials_file (Optional[str]): A file with credentials that can
be loaded with :func:`google.auth.load_credentials_from_file`.
This argument is ignored if ``channel`` is provided.
scopes (Optional(Sequence[str])): A list of scopes. This argument is
ignored if ``channel`` is provided.
channel (Optional[grpc.Channel]): A ``Channel`` instance through
which to make calls.
api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint.
If provided, it overrides the ``host`` argument and tries to create
a mutual TLS channel with client SSL credentials from
``client_cert_source`` or applicatin default SSL credentials.
client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]):
Deprecated. A callback to provide client SSL certificate bytes and
private key bytes, both in PEM format. It is ignored if
``api_mtls_endpoint`` is None.
ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials
for grpc channel. It is ignored if ``channel`` is provided.
quota_project_id (Optional[str]): An optional project to use for billing
and quota.
client_info (google.api_core.gapic_v1.client_info.ClientInfo):
The client info used to send a user-agent string along with
API requests. If ``None``, then default info will be used.
Generally, you only need to set this if you're developing
your own client library.
Raises:
google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport
creation failed for any reason.
"""
self._ssl_channel_credentials = ssl_channel_credentials
if channel:
# Sanity check: Ensure that channel and credentials are not both
# provided.
credentials = False
# If a channel was explicitly provided, set it.
self._grpc_channel = channel
self._ssl_channel_credentials = None
elif api_mtls_endpoint:
warnings.warn(
"api_mtls_endpoint and client_cert_source are deprecated",
DeprecationWarning,
)
host = (
api_mtls_endpoint
if ":" in api_mtls_endpoint
else api_mtls_endpoint + ":443"
)
if credentials is None:
credentials, _ = auth.default(
scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id
)
# Create SSL credentials with client_cert_source or application
# default SSL credentials.
if client_cert_source:
cert, key = client_cert_source()
ssl_credentials = grpc.ssl_channel_credentials(
certificate_chain=cert, private_key=key
)
else:
ssl_credentials = SslCredentials().ssl_credentials
# create a new channel. The provided one is ignored.
self._grpc_channel = type(self).create_channel(
host,
credentials=credentials,
credentials_file=credentials_file,
ssl_credentials=ssl_credentials,
scopes=scopes or self.AUTH_SCOPES,
quota_project_id=quota_project_id,
options=[
("grpc.max_send_message_length", -1),
("grpc.max_receive_message_length", -1),
],
)
self._ssl_channel_credentials = ssl_credentials
else:
host = host if ":" in host else host + ":443"
if credentials is None:
credentials, _ = auth.default(scopes=self.AUTH_SCOPES)
# create a new channel. The provided one is ignored.
self._grpc_channel = type(self).create_channel(
host,
credentials=credentials,
ssl_credentials=ssl_channel_credentials,
scopes=self.AUTH_SCOPES,
options=[
("grpc.max_send_message_length", -1),
("grpc.max_receive_message_length", -1),
],
)
self._stubs = {} # type: Dict[str, Callable]
# Run the base constructor.
super().__init__(
host=host, credentials=credentials, client_info=client_info,
)
@classmethod
def create_channel(
cls,
host: str = "googleads.googleapis.com",
credentials: credentials.Credentials = None,
scopes: Optional[Sequence[str]] = None,
**kwargs,
) -> grpc.Channel:
"""Create and return a gRPC channel object.
Args:
address (Optionsl[str]): The host for the channel to use.
credentials (Optional[~.Credentials]): The
authorization credentials to attach to requests. These
credentials identify this application to the service. If
none are specified, the client will attempt to ascertain
the credentials from the environment.
scopes (Optional[Sequence[str]]): A optional list of scopes needed for this
service. These are only used when credentials are not specified and
are passed to :func:`google.auth.default`.
kwargs (Optional[dict]): Keyword arguments, which are passed to the
channel creation.
Returns:
grpc.Channel: A gRPC channel object.
"""
return grpc_helpers.create_channel(
host,
credentials=credentials,
scopes=scopes or cls.AUTH_SCOPES,
**kwargs,
)
@property
def grpc_channel(self) -> grpc.Channel:
"""Return the channel designed to connect to this service.
"""
return self._grpc_channel
@property
def get_keyword_view(
self,
) -> Callable[
[keyword_view_service.GetKeywordViewRequest], keyword_view.KeywordView
]:
r"""Return a callable for the
get keyword view
method over gRPC.
Returns the requested keyword view in full detail.
List of thrown errors: `AuthenticationError <>`__
`AuthorizationError <>`__ `HeaderError <>`__
`InternalError <>`__ `QuotaError <>`__ `RequestError <>`__
Returns:
Callable[[~.GetKeywordViewRequest],
~.KeywordView]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "get_keyword_view" not in self._stubs:
self._stubs["get_keyword_view"] = self.grpc_channel.unary_unary(
"/google.ads.googleads.v7.services.KeywordViewService/GetKeywordView",
request_serializer=keyword_view_service.GetKeywordViewRequest.serialize,
response_deserializer=keyword_view.KeywordView.deserialize,
)
return self._stubs["get_keyword_view"]
__all__ = ("KeywordViewServiceGrpcTransport",)
|
XiaodunServerGroup/xiaodun-platform
|
refs/heads/master
|
cms/djangoapps/contentstore/management/commands/__init__.py
|
12133432
| |
adrn/streams
|
refs/heads/master
|
scripts/make_streams2.py
|
1
|
# coding: utf-8
""" Turn our model into a generative model. """
from __future__ import division, print_function
__author__ = "adrn <adrn@astro.columbia.edu>"
# Standard library
import os, sys
# Third-party
import astropy.units as u
import astropy.coordinates as coord
import matplotlib.pyplot as plt
import numpy as np
from scipy.signal import argrelmin
# Project
import streams.io as io
from streams.coordinates import _gc_to_hel, _hel_to_gc
from streams.coordinates.frame import heliocentric
from streams.potential.lm10 import LawMajewski2010
from streams.integrate import LeapfrogIntegrator
from streams.integrate.stream_generator import StreamGenerator
from streams.util import project_root
plot_path = os.path.join(project_root, "plots")
potential = LawMajewski2010()
nparticles = 2000
T = 3000.
dt = 0.1
_m = "2.e4"
np.random.seed(42)
# Set up a bit to specify leading or trailing
tail_bit = np.ones(nparticles)
tail_bit[:nparticles//2] = -1.
mass = float(_m)
# simulation = io.SgrSimulation(mass=_m)
# particles = simulation.particles(N=1000, expr="tub!=0")
# satellite = simulation.satellite()\
# .to_frame(heliocentric)
# s_hel = satellite._X.copy()
# s_gc = _hel_to_gc(s_hel)
s_gc = np.array([[8.363919011, 0.243352771, 16.864546659,
-0.04468993, -0.12392801, -0.01664498]]) # Pal5
s_hel = _gc_to_hel(s_gc)
# First integrate the orbit of the satellite back to get initial conditions
acc = np.zeros_like(s_gc[:,:3])
integrator = LeapfrogIntegrator(potential._acceleration_at,
np.array(s_gc[:,:3]), np.array(s_gc[:,3:]),
args=(s_gc.shape[0], acc))
t, rs, vs = integrator.run(t1=T, t2=0, dt=-dt)
init_r,init_v = rs[-1], vs[-1]
# integrate the orbit of the satellite
acc = np.zeros_like(s_gc[:,:3])
integrator = LeapfrogIntegrator(potential._acceleration_at,
init_r, init_v,
args=(1, acc))
t, rs, vs = integrator.run(t1=0, t2=T, dt=dt)
satellite_orbit = np.vstack((rs.T,vs.T)).T
# sample unbinding times uniformly
s_R_orbit = np.sqrt(np.sum(satellite_orbit[...,:3]**2, axis=-1))
pericenters, = argrelmin(np.squeeze(s_R_orbit))
pericenters = pericenters[:-1]
############################################
ppp = s_R_orbit[pericenters,0]
zero_one = (ppp - ppp.min()) / (ppp.max() - ppp.min())
#ppp = ((-zero_one + 1.)*99 + 1).astype(int)
ppp = zero_one**(0.3333333)
zero_one = (ppp - ppp.min()) / (ppp.max() - ppp.min())
ppp = ((-zero_one + 1.)*99 + 1).astype(int)
pp = []
for ii,peri in enumerate(pericenters):
pp += [peri]*ppp[ii]
import random
tubs = []
for ii in range(nparticles):
peri_idx = random.choice(pp)
tub = np.random.normal(peri_idx+100, 100)
tubs.append(tub)
tubs = np.array(tubs).astype(int)
acc = np.zeros((nparticles,3))
gen = StreamGenerator(potential, satellite_orbit, mass,
acc_args=(nparticles, acc))
t, orbits = gen.run(tubs, 0., T, dt)
# E_pot = potential._value_at(orbits[:,0,:3])
# E_kin = np.squeeze(0.5*np.sum(orbits[:,0,3:]**2, axis=-1))
# E_total = (E_pot + E_kin)[tubs[0]:]
# plt.clf()
# plt.subplot(211)
# plt.semilogy(np.abs(E_pot))
# plt.subplot(212)
# plt.semilogy(np.abs(E_kin))
# plt.savefig(os.path.join(plot_path, "wtf2.png"))
# plt.clf()
# plt.semilogy(np.fabs((E_total[1:]-E_total[0])/E_total[0]))
# plt.savefig(os.path.join(plot_path, "wtf.png"))
# sys.exit(0)
plt.clf()
fig = plt.figure(figsize=(6,6))
ax = fig.add_subplot(111)
ax.plot(orbits[-1,:,0], orbits[-1,:,1],
alpha=0.75, linestyle='none')
ax.plot(satellite_orbit[:,0,0], satellite_orbit[:,0,1],
alpha=0.75, marker=None)
#axes[0].set_xlim(-80,40)
#axes[0].set_ylim(-60,60)
fig.savefig(os.path.join(plot_path, "generate_xy.png"))
plt.clf()
fig = plt.figure(figsize=(6,6))
ax = fig.add_subplot(111)
fig2 = plt.figure(figsize=(6,6))
ax2 = fig.add_subplot(111)
hel = _gc_to_hel(orbits[-1])
l,b = hel[:,0], hel[:,1]
l = coord.Angle(l*u.rad).to(u.degree).wrap_at(180*u.deg).value
b = coord.Angle(b*u.rad).to(u.degree).value
_g = coord.Galactic(l*u.deg,b*u.deg).icrs
ax.plot(l, b,
alpha=0.75, linestyle='none')
ax2.plot(_g.ra.degree, _g.dec.degree,
alpha=0.75, linestyle='none')
l = coord.Angle(s_hel[:,0]*u.rad).to(u.degree).wrap_at(180*u.deg).value
b = coord.Angle(s_hel[:,1]*u.rad).to(u.degree).value
_g = coord.Galactic(l*u.deg,b*u.deg).icrs
ax.plot(l,b)
ax2.plot(_g.ra.degree, _g.dec.degree,
alpha=0.75, linestyle='none')
ax2.set_xlim(245, 225)
ax2.set_ylim(-2, 10)
# ax.plot(satellite_orbit[:,0,0], satellite_orbit[:,0,1],
# alpha=0.75, marker=None)
#axes[0].set_xlim(-80,40)
#axes[0].set_ylim(-60,60)
fig.savefig(os.path.join(plot_path, "generate_lb.png"))
fig2.savefig(os.path.join(plot_path, "generate_ad.png"))
sys.exit(0)
########################################################
########################################################
########################################################
plt.clf()
fig,axes = plt.subplots(1, 2, figsize=(12,6),
sharex=True, sharey=True)
axes[0].plot(particles._X[:,0], particles._X[:,2],
alpha=0.75, linestyle='none')
axes[1].plot(orbits[-1,:,0], orbits[-1,:,2],
alpha=0.75, linestyle='none')
axes[0].set_xlim(-80,40)
axes[0].set_ylim(-60,60)
fig.savefig(os.path.join(plot_path, "generate2.png"))
sys.exit(0)
plt.clf()
plt.plot(particles._X[:,0], particles._X[:,2],
alpha=0.5, linestyle='none')
orbits = np.zeros((T,nparticles,6))
for ii,tub in enumerate(tubs):
print(ii)
init_r = np.random.normal(rs[tub] + a_pm[tub,ii]*r_tide[tub], r_tide[tub])
init_v = np.random.normal(vs[tub], v_disp[tub])
integrator = LeapfrogIntegrator(potential._acceleration_at,
init_r, init_v,
args=(1, np.zeros_like(init_r)))
t, R, V = integrator.run(t1=tub, t2=T, dt=dt)
# orbits[tub:,ii,:3] = R[:,0]
# orbits[tub:,ii,3:] = V[:,0]
plt.plot(R[-1,0,0], R[-1,0,2], marker='.', color='b', alpha=0.5)
plt.savefig("/Users/adrian/projects/streams/plots/generate.png")
|
dennybaa/st2
|
refs/heads/master
|
contrib/packs/actions/pack_mgmt/setup_virtualenv.py
|
1
|
# Licensed to the StackStorm, Inc ('StackStorm') under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import re
import shutil
from oslo_config import cfg
from st2common.util.shell import run_command
from st2actions.runners.pythonrunner import Action
from st2common.constants.pack import PACK_NAME_WHITELIST
from st2common.constants.pack import BASE_PACK_REQUIREMENTS
from st2common.content.utils import get_packs_base_paths
from st2common.content.utils import get_pack_directory
from st2common.util.shell import quote_unix
class SetupVirtualEnvironmentAction(Action):
"""
Action which sets up virtual environment for the provided packs.
Setup consists of the following step:
1. Create virtual environment for the pack
2. Install base requirements which are common to all the packs
3. Install pack-specific requirements (if any)
If the 'update' parameter is set to True, the setup skips the deletion and
creation of the virtual environment and performs an update of the
current dependencies as well as an installation of new dependencies
"""
def __init__(self, config=None, action_service=None):
super(SetupVirtualEnvironmentAction, self).__init__(config=config,
action_service=action_service)
self._base_virtualenvs_path = os.path.join(cfg.CONF.system.base_path,
'virtualenvs/')
def run(self, packs, update=False):
"""
:param packs: A list of packs to create the environment for.
:type: packs: ``list``
:param update: True to update dependencies inside the virtual environment.
:type update: ``bool``
"""
for pack_name in packs:
self._setup_pack_virtualenv(pack_name=pack_name, update=update)
message = ('Successfuly set up virtualenv for the following packs: %s' %
(', '.join(packs)))
return message
def _setup_pack_virtualenv(self, pack_name, update=False):
"""
Setup virtual environment for the provided pack.
:param pack_name: Pack name.
:type pack_name: ``str``
"""
# Prevent directory traversal by whitelisting allowed characters in the
# pack name
if not re.match(PACK_NAME_WHITELIST, pack_name):
raise ValueError('Invalid pack name "%s"' % (pack_name))
self.logger.debug('Setting up virtualenv for pack "%s"' % (pack_name))
virtualenv_path = os.path.join(self._base_virtualenvs_path, quote_unix(pack_name))
# Ensure pack directory exists in one of the search paths
pack_path = get_pack_directory(pack_name=pack_name)
if not pack_path:
packs_base_paths = get_packs_base_paths()
search_paths = ', '.join(packs_base_paths)
msg = 'Pack "%s" is not installed. Looked in: %s' % (pack_name, search_paths)
raise Exception(msg)
if not os.path.exists(self._base_virtualenvs_path):
os.makedirs(self._base_virtualenvs_path)
# If we don't want to update, or if the virtualenv doesn't exist, let's create it.
if not update or not os.path.exists(virtualenv_path):
# 0. Delete virtual environment if it exists
self._remove_virtualenv(virtualenv_path=virtualenv_path)
# 1. Create virtual environment
self.logger.debug('Creating virtualenv for pack "%s" in "%s"' %
(pack_name, virtualenv_path))
self._create_virtualenv(virtualenv_path=virtualenv_path)
# 2. Install base requirements which are common to all the packs
self.logger.debug('Installing base requirements')
for requirement in BASE_PACK_REQUIREMENTS:
self._install_requirement(virtualenv_path=virtualenv_path,
requirement=requirement)
# 3. Install pack-specific requirements
requirements_file_path = os.path.join(pack_path, 'requirements.txt')
has_requirements = os.path.isfile(requirements_file_path)
if has_requirements:
self.logger.debug('Installing pack specific requirements from "%s"' %
(requirements_file_path))
self._install_requirements(virtualenv_path, requirements_file_path)
else:
self.logger.debug('No pack specific requirements found')
self.logger.debug('Virtualenv for pack "%s" successfully %s in "%s"' %
(pack_name,
'updated' if update else 'created',
virtualenv_path))
def _create_virtualenv(self, virtualenv_path):
python_binary = cfg.CONF.actionrunner.python_binary
virtualenv_binary = cfg.CONF.actionrunner.virtualenv_binary
virtualenv_opts = cfg.CONF.actionrunner.virtualenv_opts
if not os.path.isfile(python_binary):
raise Exception('Python binary "%s" doesn\'t exist' % (python_binary))
if not os.path.isfile(virtualenv_binary):
raise Exception('Virtualenv binary "%s" doesn\'t exist.' % (virtualenv_binary))
self.logger.debug('Creating virtualenv in "%s" using Python binary "%s"' %
(virtualenv_path, python_binary))
cmd = [virtualenv_binary, '-p', python_binary]
cmd.extend(virtualenv_opts)
cmd.extend([virtualenv_path])
self.logger.debug('Running command "%s" to create virtualenv.', ' '.join(cmd))
try:
exit_code, _, stderr = run_command(cmd=cmd)
except OSError as e:
raise Exception('Error executing command %s. %s.' % (' '.join(cmd),
e.message))
if exit_code != 0:
raise Exception('Failed to create virtualenv in "%s": %s' %
(virtualenv_path, stderr))
return True
def _remove_virtualenv(self, virtualenv_path):
if not os.path.exists(virtualenv_path):
self.logger.info('Virtualenv path "%s" doesn\'t exist' % virtualenv_path)
return True
self.logger.debug('Removing virtualenv in "%s"' % virtualenv_path)
try:
shutil.rmtree(virtualenv_path)
except Exception as error:
self.logger.error('Error while removing virtualenv at "%s": "%s"' %
(virtualenv_path, error))
raise
return True
def _install_requirements(self, virtualenv_path, requirements_file_path):
"""
Install requirements from a file.
"""
pip_path = os.path.join(virtualenv_path, 'bin/pip')
cmd = [pip_path, 'install', '-U', '-r', requirements_file_path]
env = self._get_env_for_subprocess_command()
exit_code, stdout, stderr = run_command(cmd=cmd, env=env)
if exit_code != 0:
raise Exception('Failed to install requirements from "%s": %s' %
(requirements_file_path, stdout))
return True
def _install_requirement(self, virtualenv_path, requirement):
"""
Install a single requirement.
"""
pip_path = os.path.join(virtualenv_path, 'bin/pip')
cmd = [pip_path, 'install', requirement]
env = self._get_env_for_subprocess_command()
exit_code, stdout, stderr = run_command(cmd=cmd, env=env)
if exit_code != 0:
raise Exception('Failed to install requirement "%s": %s' %
(requirement, stdout))
return True
def _get_env_for_subprocess_command(self):
"""
Retrieve environment to be used with the subprocess command.
Note: We remove PYTHONPATH from the environment so the command works
correctly with the newely created virtualenv.
"""
env = os.environ.copy()
if 'PYTHONPATH' in env:
del env['PYTHONPATH']
return env
|
xccui/flink
|
refs/heads/master
|
flink-python/pyflink/datastream/tests/__init__.py
|
406
|
################################################################################
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
################################################################################
|
Ananasus/EchelonFrontEnd
|
refs/heads/master
|
DjangoFrontendHD/DjangoFrontendHD/urls.py
|
1
|
"""
Definition of urls for DjangoFrontendHD.
"""
from datetime import datetime
from django.conf.urls import patterns, url
from django.conf.urls.static import static
from django.conf import settings
from syncdb.forms import BootstrapAuthenticationForm
# Uncomment the next lines to enable the admin:
# from django.conf.urls import include
# from django.contrib import admin
# admin.autodiscover()
urlpatterns = patterns('',
# Examples:
url(r'^$', 'syncdb.views.home', name='home'),
url(r'^load/$',
'syncdb.views.load', name='loadaverage'),
url(r'^login/$',
'django.contrib.auth.views.login',
{
'template_name': 'app/login.html',
'authentication_form': BootstrapAuthenticationForm,
'extra_context':
{
'title':'Log in',
'year':datetime.now().year,
}
},
name='login'),
url(r'^logout$',
'django.contrib.auth.views.logout',
{
'next_page': '/',
},
name='logout'),
url(r'^api/get_recent', 'syncdb.views.get_recent', name='recents'),
url(r'^api/gen_data', 'syncdb.views.gen_data', name='gendata'),
url(r'^api/get_load', 'syncdb.views.get_load', name='loadaverage'),
url(r'^api/get_event', 'syncdb.views.get_event_data', name='eventget'),
url(r'^api/gen_load', 'syncdb.views.gen_load', name='eventget')
# Uncomment the admin/doc line below to enable admin documentation:
# url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
# url(r'^admin/', include(admin.site.urls)),
) + static(settings.STATIC_URL, document_root=settings.STATIC_ROOT)
|
lodr/codeaurora_kernel_msm
|
refs/heads/b2g-msm-hammerhead-3.4-lollipop-release
|
scripts/rt-tester/rt-tester.py
|
11005
|
#!/usr/bin/python
#
# rt-mutex tester
#
# (C) 2006 Thomas Gleixner <tglx@linutronix.de>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 2 as
# published by the Free Software Foundation.
#
import os
import sys
import getopt
import shutil
import string
# Globals
quiet = 0
test = 0
comments = 0
sysfsprefix = "/sys/devices/system/rttest/rttest"
statusfile = "/status"
commandfile = "/command"
# Command opcodes
cmd_opcodes = {
"schedother" : "1",
"schedfifo" : "2",
"lock" : "3",
"locknowait" : "4",
"lockint" : "5",
"lockintnowait" : "6",
"lockcont" : "7",
"unlock" : "8",
"signal" : "11",
"resetevent" : "98",
"reset" : "99",
}
test_opcodes = {
"prioeq" : ["P" , "eq" , None],
"priolt" : ["P" , "lt" , None],
"priogt" : ["P" , "gt" , None],
"nprioeq" : ["N" , "eq" , None],
"npriolt" : ["N" , "lt" , None],
"npriogt" : ["N" , "gt" , None],
"unlocked" : ["M" , "eq" , 0],
"trylock" : ["M" , "eq" , 1],
"blocked" : ["M" , "eq" , 2],
"blockedwake" : ["M" , "eq" , 3],
"locked" : ["M" , "eq" , 4],
"opcodeeq" : ["O" , "eq" , None],
"opcodelt" : ["O" , "lt" , None],
"opcodegt" : ["O" , "gt" , None],
"eventeq" : ["E" , "eq" , None],
"eventlt" : ["E" , "lt" , None],
"eventgt" : ["E" , "gt" , None],
}
# Print usage information
def usage():
print "rt-tester.py <-c -h -q -t> <testfile>"
print " -c display comments after first command"
print " -h help"
print " -q quiet mode"
print " -t test mode (syntax check)"
print " testfile: read test specification from testfile"
print " otherwise from stdin"
return
# Print progress when not in quiet mode
def progress(str):
if not quiet:
print str
# Analyse a status value
def analyse(val, top, arg):
intval = int(val)
if top[0] == "M":
intval = intval / (10 ** int(arg))
intval = intval % 10
argval = top[2]
elif top[0] == "O":
argval = int(cmd_opcodes.get(arg, arg))
else:
argval = int(arg)
# progress("%d %s %d" %(intval, top[1], argval))
if top[1] == "eq" and intval == argval:
return 1
if top[1] == "lt" and intval < argval:
return 1
if top[1] == "gt" and intval > argval:
return 1
return 0
# Parse the commandline
try:
(options, arguments) = getopt.getopt(sys.argv[1:],'chqt')
except getopt.GetoptError, ex:
usage()
sys.exit(1)
# Parse commandline options
for option, value in options:
if option == "-c":
comments = 1
elif option == "-q":
quiet = 1
elif option == "-t":
test = 1
elif option == '-h':
usage()
sys.exit(0)
# Select the input source
if arguments:
try:
fd = open(arguments[0])
except Exception,ex:
sys.stderr.write("File not found %s\n" %(arguments[0]))
sys.exit(1)
else:
fd = sys.stdin
linenr = 0
# Read the test patterns
while 1:
linenr = linenr + 1
line = fd.readline()
if not len(line):
break
line = line.strip()
parts = line.split(":")
if not parts or len(parts) < 1:
continue
if len(parts[0]) == 0:
continue
if parts[0].startswith("#"):
if comments > 1:
progress(line)
continue
if comments == 1:
comments = 2
progress(line)
cmd = parts[0].strip().lower()
opc = parts[1].strip().lower()
tid = parts[2].strip()
dat = parts[3].strip()
try:
# Test or wait for a status value
if cmd == "t" or cmd == "w":
testop = test_opcodes[opc]
fname = "%s%s%s" %(sysfsprefix, tid, statusfile)
if test:
print fname
continue
while 1:
query = 1
fsta = open(fname, 'r')
status = fsta.readline().strip()
fsta.close()
stat = status.split(",")
for s in stat:
s = s.strip()
if s.startswith(testop[0]):
# Separate status value
val = s[2:].strip()
query = analyse(val, testop, dat)
break
if query or cmd == "t":
break
progress(" " + status)
if not query:
sys.stderr.write("Test failed in line %d\n" %(linenr))
sys.exit(1)
# Issue a command to the tester
elif cmd == "c":
cmdnr = cmd_opcodes[opc]
# Build command string and sys filename
cmdstr = "%s:%s" %(cmdnr, dat)
fname = "%s%s%s" %(sysfsprefix, tid, commandfile)
if test:
print fname
continue
fcmd = open(fname, 'w')
fcmd.write(cmdstr)
fcmd.close()
except Exception,ex:
sys.stderr.write(str(ex))
sys.stderr.write("\nSyntax error in line %d\n" %(linenr))
if not test:
fd.close()
sys.exit(1)
# Normal exit pass
print "Pass"
sys.exit(0)
|
ytjiang/django
|
refs/heads/master
|
tests/template_tests/filter_tests/test_get_digit.py
|
523
|
from django.template.defaultfilters import get_digit
from django.test import SimpleTestCase
class FunctionTests(SimpleTestCase):
def test_values(self):
self.assertEqual(get_digit(123, 1), 3)
self.assertEqual(get_digit(123, 2), 2)
self.assertEqual(get_digit(123, 3), 1)
self.assertEqual(get_digit(123, 4), 0)
self.assertEqual(get_digit(123, 0), 123)
def test_string(self):
self.assertEqual(get_digit('xyz', 0), 'xyz')
|
LOSP/external_skia
|
refs/heads/kk
|
tools/verify_images_for_gm_results.py
|
32
|
#!/usr/bin/python
# Copyright (c) 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
""" Look through skia-autogen, searching for all checksums which should have
corresponding files in Google Storage, and verify that those files exist. """
import json
import posixpath
import re
import subprocess
import sys
# TODO(borenet): Replace some/all of these with constants from gm/gm_json.py
AUTOGEN_URL = 'http://skia-autogen.googlecode.com/svn/gm-actual'
GS_URL = 'gs://chromium-skia-gm/gm'
TEST_NAME_PATTERN = re.compile('(\S+)_(\S+).png')
def FileNameToGSURL(filename, hash_type, hash_value):
""" Convert a file name given in a checksum file to the URL of the
corresponding image file in Google Storage.
filename: string; the file name to convert. Takes the form specified by
TEST_NAME_PATTERN.
hash_type: string; the type of the checksum.
hash_value: string; the checksum itself.
"""
test_name = TEST_NAME_PATTERN.match(filename).group(1)
if not test_name:
raise Exception('Invalid test name for file: %s' % filename)
return '%s/%s/%s/%s.png' % (GS_URL, hash_type, test_name, hash_value)
def FindURLSInJSON(json_file, gs_urls):
""" Extract Google Storage URLs from a JSON file in svn, adding them to the
gs_urls dictionary.
json_file: string; URL of the JSON file.
gs_urls: dict; stores Google Storage URLs as keys and lists of the JSON files
which reference them.
Example gs_urls:
{ 'gs://chromium-skia-gm/gm/sometest/12345.png': [
'http://skia-autogen.googlecode.com/svn/gm-actual/Test-Mac10.6-MacMini4.1-GeForce320M-x86-Debug/actual-results.json',
'http://skia-autogen.googlecode.com/svn/gm-actual/Test-Mac10.8-MacMini4.1-GeForce320M-x86-Debug/actual-results.json',
]
}
"""
output = subprocess.check_output(['svn', 'cat', json_file])
json_content = json.loads(output)
for dict_type in ['actual-results']:
for result_type in json_content[dict_type]:
if json_content[dict_type][result_type]:
for result in json_content[dict_type][result_type].keys():
hash_type, hash_value = json_content[dict_type][result_type][result]
gs_url = FileNameToGSURL(result, hash_type, str(hash_value))
if gs_urls.get(gs_url):
gs_urls[gs_url].append(json_file)
else:
gs_urls[gs_url] = [json_file]
def _FindJSONFiles(url, json_files):
""" Helper function for FindJsonFiles. Recursively explore the repository,
adding JSON files to a list.
url: string; URL of the repository (or subdirectory thereof) to explore.
json_files: list to which JSON file urls will be added.
"""
proc = subprocess.Popen(['svn', 'ls', url], stdout=subprocess.PIPE,
stderr=subprocess.STDOUT)
if proc.wait() != 0:
raise Exception('Failed to list svn directory.')
output = proc.communicate()[0].splitlines()
subdirs = []
for item in output:
if item.endswith(posixpath.sep):
subdirs.append(item)
elif item.endswith('.json'):
json_files.append(posixpath.join(url, item))
else:
print 'Warning: ignoring %s' % posixpath.join(url, item)
for subdir in subdirs:
_FindJSONFiles(posixpath.join(url, subdir), json_files)
def FindJSONFiles(url):
""" Recursively explore the given repository and return a list of the JSON
files it contains.
url: string; URL of the repository to explore.
"""
print 'Searching for JSON files in %s' % url
json_files = []
_FindJSONFiles(url, json_files)
return json_files
def FindURLs(url):
""" Find Google Storage URLs inside of JSON files in the given repository.
Returns a dictionary whose keys are Google Storage URLs and values are lists
of the JSON files which reference them.
url: string; URL of the repository to explore.
Example output:
{ 'gs://chromium-skia-gm/gm/sometest/12345.png': [
'http://skia-autogen.googlecode.com/svn/gm-actual/Test-Mac10.6-MacMini4.1-GeForce320M-x86-Debug/actual-results.json',
'http://skia-autogen.googlecode.com/svn/gm-actual/Test-Mac10.8-MacMini4.1-GeForce320M-x86-Debug/actual-results.json',
]
}
"""
gs_urls = {}
for json_file in FindJSONFiles(url):
print 'Looking for checksums in %s' % json_file
FindURLSInJSON(json_file, gs_urls)
return gs_urls
def VerifyURL(url):
""" Verify that the given URL exists.
url: string; the Google Storage URL of the image file in question.
"""
proc = subprocess.Popen(['gsutil', 'ls', url], stdout=subprocess.PIPE,
stderr=subprocess.STDOUT)
if proc.wait() != 0:
return False
return True
def VerifyURLs(urls):
""" Verify that each of the given URLs exists. Return a list of which URLs do
not exist.
urls: dictionary; URLs of the image files in question.
"""
print 'Verifying that images exist for URLs...'
missing = []
for url in urls.iterkeys():
if not VerifyURL(url):
print 'Missing: %s, referenced by: \n %s' % (url, '\n '.join(urls[url]))
missing.append(url)
return missing
def Main():
urls = FindURLs(AUTOGEN_URL)
missing = VerifyURLs(urls)
if missing:
print 'Found %d Missing files.' % len(missing)
return 1
if __name__ == '__main__':
sys.exit(Main())
|
hubsaysnuaa/odoo
|
refs/heads/8.0
|
addons/account/project/report/quantity_cost_ledger.py
|
358
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import time
from openerp.osv import osv
from openerp.report import report_sxw
class account_analytic_quantity_cost_ledger(report_sxw.rml_parse):
def __init__(self, cr, uid, name, context):
super(account_analytic_quantity_cost_ledger, self).__init__(cr, uid, name, context=context)
self.localcontext.update( {
'time': time,
'lines_g': self._lines_g,
'lines_a': self._lines_a,
'sum_quantity': self._sum_quantity,
'account_sum_quantity': self._account_sum_quantity,
})
def _lines_g(self, account_id, date1, date2, journals):
if not journals:
self.cr.execute("SELECT sum(aal.unit_amount) AS quantity, \
aa.code AS code, aa.name AS name, aa.id AS id \
FROM account_account AS aa, account_analytic_line AS aal \
WHERE (aal.account_id=%s) AND (aal.date>=%s) \
AND (aal.date<=%s) AND (aal.general_account_id=aa.id) \
AND aa.active \
GROUP BY aa.code, aa.name, aa.id ORDER BY aa.code",
(account_id, date1, date2))
else:
journal_ids = journals
self.cr.execute("SELECT sum(aal.unit_amount) AS quantity, \
aa.code AS code, aa.name AS name, aa.id AS id \
FROM account_account AS aa, account_analytic_line AS aal \
WHERE (aal.account_id=%s) AND (aal.date>=%s) \
AND (aal.date<=%s) AND (aal.general_account_id=aa.id) \
AND aa.active \
AND (aal.journal_id IN %s ) \
GROUP BY aa.code, aa.name, aa.id ORDER BY aa.code",
(account_id, date1, date2, tuple(journal_ids)))
res = self.cr.dictfetchall()
return res
def _lines_a(self, general_account_id, account_id, date1, date2, journals):
if not journals:
self.cr.execute("SELECT aal.name AS name, aal.code AS code, \
aal.unit_amount AS quantity, aal.date AS date, \
aaj.code AS cj \
FROM account_analytic_line AS aal, \
account_analytic_journal AS aaj \
WHERE (aal.general_account_id=%s) AND (aal.account_id=%s) \
AND (aal.date>=%s) AND (aal.date<=%s) \
AND (aal.journal_id=aaj.id) \
ORDER BY aal.date, aaj.code, aal.code",
(general_account_id, account_id, date1, date2))
else:
journal_ids = journals
self.cr.execute("SELECT aal.name AS name, aal.code AS code, \
aal.unit_amount AS quantity, aal.date AS date, \
aaj.code AS cj \
FROM account_analytic_line AS aal, \
account_analytic_journal AS aaj \
WHERE (aal.general_account_id=%s) AND (aal.account_id=%s) \
AND (aal.date>=%s) AND (aal.date<=%s) \
AND (aal.journal_id=aaj.id) AND (aaj.id IN %s) \
ORDER BY aal.date, aaj.code, aal.code",
(general_account_id, account_id, date1, date2,tuple(journal_ids)))
res = self.cr.dictfetchall()
return res
def _account_sum_quantity(self, account_id, date1, date2, journals):
if not journals:
self.cr.execute("SELECT sum(unit_amount) \
FROM account_analytic_line \
WHERE account_id=%s AND date>=%s AND date<=%s",
(account_id, date1, date2))
else:
journal_ids = journals
self.cr.execute("SELECT sum(unit_amount) \
FROM account_analytic_line \
WHERE account_id = %s AND date >= %s AND date <= %s \
AND journal_id IN %s",
(account_id, date1, date2, tuple(journal_ids),))
return self.cr.fetchone()[0] or 0.0
def _sum_quantity(self, accounts, date1, date2, journals):
ids = map(lambda x: x.id, accounts)
if not ids:
return 0.0
if not journals:
self.cr.execute("SELECT sum(unit_amount) \
FROM account_analytic_line \
WHERE account_id IN %s AND date>=%s AND date<=%s",
(tuple(ids), date1, date2,))
else:
journal_ids = journals
self.cr.execute("SELECT sum(unit_amount) \
FROM account_analytic_line \
WHERE account_id IN %s AND date >= %s AND date <= %s \
AND journal_id IN %s",(tuple(ids), date1, date2, tuple(journal_ids)))
return self.cr.fetchone()[0] or 0.0
class report_analyticcostledgerquantity(osv.AbstractModel):
_name = 'report.account.report_analyticcostledgerquantity'
_inherit = 'report.abstract_report'
_template = 'account.report_analyticcostledgerquantity'
_wrapped_report_class = account_analytic_quantity_cost_ledger
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
ventrixcode/yowsup
|
refs/heads/pr/2
|
yowsup/layers/protocol_presence/protocolentities/test_presence.py
|
68
|
from yowsup.layers.protocol_presence.protocolentities.presence import PresenceProtocolEntity
from yowsup.structs import ProtocolTreeNode
from yowsup.structs.protocolentity import ProtocolEntityTest
import unittest
class PresenceProtocolEntityTest(ProtocolEntityTest, unittest.TestCase):
def setUp(self):
self.ProtocolEntity = PresenceProtocolEntity
self.node = ProtocolTreeNode("presence", {"type": "presence_type", "name": "presence_name"}, None, None)
|
FNST-OpenStack/horizon
|
refs/heads/master
|
openstack_dashboard/dashboards/admin/networks/subnets/workflows.py
|
80
|
# Copyright 2013 NEC Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import logging
from django.core.urlresolvers import reverse
from django.utils.translation import ugettext_lazy as _
from horizon import exceptions
from openstack_dashboard import api
from openstack_dashboard.dashboards.project.networks.subnets \
import workflows as project_workflows
LOG = logging.getLogger(__name__)
class CreateSubnet(project_workflows.CreateSubnet):
def get_success_url(self):
return reverse("horizon:admin:networks:detail",
args=(self.context.get('network_id'),))
def get_failure_url(self):
return reverse("horizon:admin:networks:detail",
args=(self.context.get('network_id'),))
def handle(self, request, data):
try:
# We must specify tenant_id of the network which a subnet is
# created for if admin user does not belong to the tenant.
network = api.neutron.network_get(request,
self.context['network_id'])
except Exception:
msg = (_('Failed to retrieve network %s for a subnet') %
data['network_id'])
LOG.info(msg)
redirect = self.get_failure_url()
exceptions.handle(request, msg, redirect=redirect)
subnet = self._create_subnet(request, data,
tenant_id=network.tenant_id)
return True if subnet else False
class UpdateSubnet(project_workflows.UpdateSubnet):
success_url = "horizon:admin:networks:detail"
failure_url = "horizon:admin:networks:detail"
|
hackoon/cinnamon-screensaver-mplayer-fs
|
refs/heads/master
|
src/widgets/marqueeLabel.py
|
2
|
#! /usr/bin/python3
from gi.repository import Gtk, GObject, GLib
from util import trackers
class _fixedViewport(Gtk.Viewport):
"""
This is needed by MarqueeLabel to restrict the size of
our label, and cause the viewport to actually be functional.
Otherwise, the text is trimmed, but no scrolling occurs.
"""
def __init__(self):
super(_fixedViewport, self).__init__()
self.set_shadow_type(Gtk.ShadowType.NONE)
def do_get_preferred_width(self):
return (400, 400)
class MarqueeLabel(Gtk.Stack):
"""
A scrolling label for the PlayerControl - it uses the defined
pattern as a mapping between elapsed time and the hadjustment
of the _fixedViewport. If the label text is wider than the
widget's actual width, we will scroll according to this map
over the course of 15 seconds.
It roughly translates to:
0.0 - 2.0 seconds: no movement.
2.0 - 10.0 seconds: gradually scroll to max adjustment of the viewport.
10.0 - 12.0 seconds: no movement.
12.0 - 15.0 seconds: scroll back to the starting position.
This widget also implements a stack (similar to the one in MonitorView) which
provides for a smooth label crossfade when track info changes.
"""
PATTERN = [( 0.0, 0.0),
( 2.0, 0.0),
(10.0, 1.0),
(12.0, 1.0),
(15.0, 0.0)]
LENGTH = len(PATTERN)
def __init__(self, text):
super(MarqueeLabel, self).__init__()
self.set_transition_type(Gtk.StackTransitionType.CROSSFADE)
self.set_transition_duration(300)
self.tick_id = 0
self.current = self._make_label(text)
self.add(self.current)
self.set_visible_child(self.current)
def _make_label(self, text):
vp = _fixedViewport()
label = Gtk.Label(text)
label.set_halign(Gtk.Align.START)
vp.add(label)
vp.show_all()
return vp
def set_text(self, text):
if self.current.get_child().get_text() == text:
return
self.cancel_tick()
self.queued = self._make_label(text)
self.add(self.queued)
self.set_visible_child(self.queued)
tmp = self.current
self.current = self.queued
self.queued = None
GObject.idle_add(tmp.destroy)
if not self.current.get_realized():
trackers.con_tracker_get().connect(self.current,
"realize",
self.on_current_realized)
else:
GObject.idle_add(self._marquee_idle)
def on_current_realized(self, widget, data=None):
GObject.idle_add(self._marquee_idle)
trackers.con_tracker_get().disconnect(widget,
"realize",
self.on_current_realized)
def cancel_tick(self):
if self.tick_id > 0:
self.remove_tick_callback(self.tick_id)
self.tick_id = 0
def _marquee_idle(self):
self.hadjust = self.current.get_hadjustment()
if (self.hadjust.get_upper() == self.hadjust.get_page_size()) == self.get_allocated_width():
return False
self.start_time = self.get_frame_clock().get_frame_time()
self.end_time = self.start_time + (self.PATTERN[self.LENGTH - 1][0] * 1000 * 1000) # sec to ms to μs
if self.tick_id == 0:
self.tick_id = self.add_tick_callback(self._on_marquee_tick)
self._marquee_step(self.start_time)
return GLib.SOURCE_REMOVE
def _on_marquee_tick(self, widget, clock, data=None):
now = clock.get_frame_time()
self._marquee_step(now)
if now >= self.end_time:
self.start_time = self.end_time
self.end_time += (self.PATTERN[self.LENGTH - 1][0] * 1000 * 1000) # sec to ms to μs
return GLib.SOURCE_CONTINUE
def interpolate_point(self, now):
point = ((now - self.start_time) / 1000 / 1000)
i = 0
while i < self.LENGTH:
cindex, cval = self.PATTERN[i]
if point > cindex:
i += 1
continue
if point == cindex:
return cval
pindex, pval = self.PATTERN[i - 1]
diff = cval - pval
duration = cindex - pindex
ratio = diff / duration
additive = (point - pindex) * ratio
return pval + additive
def _marquee_step(self, now):
if now < self.end_time:
t = self.interpolate_point(now)
else:
t = self.PATTERN[self.LENGTH - 1][1]
new_position = ((self.hadjust.get_upper() - self.hadjust.get_page_size()) * t)
self.hadjust.set_value(new_position)
self.queue_draw()
|
SRabbelier/Melange
|
refs/heads/master
|
thirdparty/google_appengine/lib/django_0_96/django/utils/_threading_local.py
|
64
|
"""Thread-local objects
(Note that this module provides a Python version of thread
threading.local class. Depending on the version of Python you're
using, there may be a faster one available. You should always import
the local class from threading.)
Thread-local objects support the management of thread-local data.
If you have data that you want to be local to a thread, simply create
a thread-local object and use its attributes:
>>> mydata = local()
>>> mydata.number = 42
>>> mydata.number
42
You can also access the local-object's dictionary:
>>> mydata.__dict__
{'number': 42}
>>> mydata.__dict__.setdefault('widgets', [])
[]
>>> mydata.widgets
[]
What's important about thread-local objects is that their data are
local to a thread. If we access the data in a different thread:
>>> log = []
>>> def f():
... items = mydata.__dict__.items()
... items.sort()
... log.append(items)
... mydata.number = 11
... log.append(mydata.number)
>>> import threading
>>> thread = threading.Thread(target=f)
>>> thread.start()
>>> thread.join()
>>> log
[[], 11]
we get different data. Furthermore, changes made in the other thread
don't affect data seen in this thread:
>>> mydata.number
42
Of course, values you get from a local object, including a __dict__
attribute, are for whatever thread was current at the time the
attribute was read. For that reason, you generally don't want to save
these values across threads, as they apply only to the thread they
came from.
You can create custom local objects by subclassing the local class:
>>> class MyLocal(local):
... number = 2
... initialized = False
... def __init__(self, **kw):
... if self.initialized:
... raise SystemError('__init__ called too many times')
... self.initialized = True
... self.__dict__.update(kw)
... def squared(self):
... return self.number ** 2
This can be useful to support default values, methods and
initialization. Note that if you define an __init__ method, it will be
called each time the local object is used in a separate thread. This
is necessary to initialize each thread's dictionary.
Now if we create a local object:
>>> mydata = MyLocal(color='red')
Now we have a default number:
>>> mydata.number
2
an initial color:
>>> mydata.color
'red'
>>> del mydata.color
And a method that operates on the data:
>>> mydata.squared()
4
As before, we can access the data in a separate thread:
>>> log = []
>>> thread = threading.Thread(target=f)
>>> thread.start()
>>> thread.join()
>>> log
[[('color', 'red'), ('initialized', True)], 11]
without affecting this thread's data:
>>> mydata.number
2
>>> mydata.color
Traceback (most recent call last):
...
AttributeError: 'MyLocal' object has no attribute 'color'
Note that subclasses can define slots, but they are not thread
local. They are shared across threads:
>>> class MyLocal(local):
... __slots__ = 'number'
>>> mydata = MyLocal()
>>> mydata.number = 42
>>> mydata.color = 'red'
So, the separate thread:
>>> thread = threading.Thread(target=f)
>>> thread.start()
>>> thread.join()
affects what we see:
>>> mydata.number
11
>>> del mydata
"""
# Threading import is at end
class _localbase(object):
__slots__ = '_local__key', '_local__args', '_local__lock'
def __new__(cls, *args, **kw):
self = object.__new__(cls)
key = '_local__key', 'thread.local.' + str(id(self))
object.__setattr__(self, '_local__key', key)
object.__setattr__(self, '_local__args', (args, kw))
object.__setattr__(self, '_local__lock', RLock())
if args or kw and (cls.__init__ is object.__init__):
raise TypeError("Initialization arguments are not supported")
# We need to create the thread dict in anticipation of
# __init__ being called, to make sure we don't call it
# again ourselves.
dict = object.__getattribute__(self, '__dict__')
currentThread().__dict__[key] = dict
return self
def _patch(self):
key = object.__getattribute__(self, '_local__key')
d = currentThread().__dict__.get(key)
if d is None:
d = {}
currentThread().__dict__[key] = d
object.__setattr__(self, '__dict__', d)
# we have a new instance dict, so call out __init__ if we have
# one
cls = type(self)
if cls.__init__ is not object.__init__:
args, kw = object.__getattribute__(self, '_local__args')
cls.__init__(self, *args, **kw)
else:
object.__setattr__(self, '__dict__', d)
class local(_localbase):
def __getattribute__(self, name):
lock = object.__getattribute__(self, '_local__lock')
lock.acquire()
try:
_patch(self)
return object.__getattribute__(self, name)
finally:
lock.release()
def __setattr__(self, name, value):
lock = object.__getattribute__(self, '_local__lock')
lock.acquire()
try:
_patch(self)
return object.__setattr__(self, name, value)
finally:
lock.release()
def __delattr__(self, name):
lock = object.__getattribute__(self, '_local__lock')
lock.acquire()
try:
_patch(self)
return object.__delattr__(self, name)
finally:
lock.release()
def __del__():
threading_enumerate = enumerate
__getattribute__ = object.__getattribute__
def __del__(self):
key = __getattribute__(self, '_local__key')
try:
threads = list(threading_enumerate())
except:
# if enumerate fails, as it seems to do during
# shutdown, we'll skip cleanup under the assumption
# that there is nothing to clean up
return
for thread in threads:
try:
__dict__ = thread.__dict__
except AttributeError:
# Thread is dying, rest in peace
continue
if key in __dict__:
try:
del __dict__[key]
except KeyError:
pass # didn't have anything in this thread
return __del__
__del__ = __del__()
try:
from threading import currentThread, enumerate, RLock
except ImportError:
from dummy_threading import currentThread, enumerate, RLock
|
jchevin/ardupilot-master
|
refs/heads/master
|
Tools/autotest/apm_unit_tests/examples/arducopter_example_level.py
|
250
|
import arducopter
def unit_test(mavproxy, mav):
'''A scripted flight plan'''
if (arducopter.calibrate_level(mavproxy, mav)):
return True
return False
|
vedujoshi/tempest
|
refs/heads/master
|
tempest/tests/lib/common/utils/linux/test_remote_client.py
|
1
|
# Copyright 2017 NEC Corporation.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
from tempest.lib.common import ssh
from tempest.lib.common.utils.linux import remote_client
from tempest.lib import exceptions as lib_exc
from tempest.tests import base
class FakeServersClient(object):
def get_console_output(self, server_id):
return {"output": "fake_output"}
class TestRemoteClient(base.TestCase):
@mock.patch.object(ssh.Client, 'exec_command', return_value='success')
def test_exec_command(self, mock_ssh_exec_command):
client = remote_client.RemoteClient('192.168.1.10', 'username')
client.exec_command('ls')
mock_ssh_exec_command.assert_called_once_with(
'set -eu -o pipefail; PATH=$$PATH:/sbin; ls')
@mock.patch.object(ssh.Client, 'test_connection_auth')
def test_validate_authentication(self, mock_test_connection_auth):
client = remote_client.RemoteClient('192.168.1.10', 'username')
client.validate_authentication()
mock_test_connection_auth.assert_called_once_with()
@mock.patch.object(remote_client.LOG, 'debug')
@mock.patch.object(ssh.Client, 'exec_command')
def test_debug_ssh_without_console(self, mock_exec_command, mock_debug):
mock_exec_command.side_effect = lib_exc.SSHTimeout
server = {'id': 'fake_id'}
client = remote_client.RemoteClient('192.168.1.10', 'username',
server=server)
self.assertRaises(lib_exc.SSHTimeout, client.exec_command, 'ls')
mock_debug.assert_called_with(
'Caller: %s. Timeout trying to ssh to server %s',
'TestRemoteClient:test_debug_ssh_without_console', server)
@mock.patch.object(remote_client.LOG, 'debug')
@mock.patch.object(ssh.Client, 'exec_command')
def test_debug_ssh_with_console(self, mock_exec_command, mock_debug):
mock_exec_command.side_effect = lib_exc.SSHTimeout
server = {'id': 'fake_id'}
client = remote_client.RemoteClient('192.168.1.10', 'username',
server=server,
servers_client=FakeServersClient())
self.assertRaises(lib_exc.SSHTimeout, client.exec_command, 'ls')
mock_debug.assert_called_with(
'Console log for server %s: %s', server['id'], 'fake_output')
|
romain-li/edx-platform
|
refs/heads/master
|
openedx/core/djangoapps/content/course_overviews/migrations/0005_delete_courseoverviewgeneratedhistory.py
|
80
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('course_overviews', '0004_courseoverview_org'),
]
operations = [
migrations.DeleteModel(
name='CourseOverviewGeneratedHistory',
),
]
|
markoshorro/gem5
|
refs/heads/master
|
configs/ruby/MOESI_hammer.py
|
15
|
# Copyright (c) 2006-2007 The Regents of The University of Michigan
# Copyright (c) 2009 Advanced Micro Devices, Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met: redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer;
# redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution;
# neither the name of the copyright holders nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# Authors: Brad Beckmann
import math
import m5
from m5.objects import *
from m5.defines import buildEnv
from Ruby import create_topology
from Ruby import send_evicts
#
# Note: the L1 Cache latency is only used by the sequencer on fast path hits
#
class L1Cache(RubyCache):
latency = 2
#
# Note: the L2 Cache latency is not currently used
#
class L2Cache(RubyCache):
latency = 10
#
# Probe filter is a cache, latency is not used
#
class ProbeFilter(RubyCache):
latency = 1
def define_options(parser):
parser.add_option("--allow-atomic-migration", action="store_true",
help="allow migratory sharing for atomic only accessed blocks")
parser.add_option("--pf-on", action="store_true",
help="Hammer: enable Probe Filter")
parser.add_option("--dir-on", action="store_true",
help="Hammer: enable Full-bit Directory")
def create_system(options, full_system, system, dma_ports, ruby_system):
if buildEnv['PROTOCOL'] != 'MOESI_hammer':
panic("This script requires the MOESI_hammer protocol to be built.")
cpu_sequencers = []
#
# The ruby network creation expects the list of nodes in the system to be
# consistent with the NetDest list. Therefore the l1 controller nodes must be
# listed before the directory nodes and directory nodes before dma nodes, etc.
#
l1_cntrl_nodes = []
dir_cntrl_nodes = []
dma_cntrl_nodes = []
#
# Must create the individual controllers before the network to ensure the
# controller constructors are called before the network constructor
#
block_size_bits = int(math.log(options.cacheline_size, 2))
for i in xrange(options.num_cpus):
#
# First create the Ruby objects associated with this cpu
#
l1i_cache = L1Cache(size = options.l1i_size,
assoc = options.l1i_assoc,
start_index_bit = block_size_bits,
is_icache = True)
l1d_cache = L1Cache(size = options.l1d_size,
assoc = options.l1d_assoc,
start_index_bit = block_size_bits)
l2_cache = L2Cache(size = options.l2_size,
assoc = options.l2_assoc,
start_index_bit = block_size_bits)
l1_cntrl = L1Cache_Controller(version = i,
L1Icache = l1i_cache,
L1Dcache = l1d_cache,
L2cache = l2_cache,
no_mig_atomic = not \
options.allow_atomic_migration,
send_evictions = send_evicts(options),
transitions_per_cycle = options.ports,
clk_domain=system.cpu[i].clk_domain,
ruby_system = ruby_system)
cpu_seq = RubySequencer(version = i,
icache = l1i_cache,
dcache = l1d_cache,
clk_domain=system.cpu[i].clk_domain,
ruby_system = ruby_system)
l1_cntrl.sequencer = cpu_seq
if options.recycle_latency:
l1_cntrl.recycle_latency = options.recycle_latency
exec("ruby_system.l1_cntrl%d = l1_cntrl" % i)
# Add controllers and sequencers to the appropriate lists
cpu_sequencers.append(cpu_seq)
l1_cntrl_nodes.append(l1_cntrl)
# Connect the L1 controller and the network
# Connect the buffers from the controller to network
l1_cntrl.requestFromCache = ruby_system.network.slave
l1_cntrl.responseFromCache = ruby_system.network.slave
l1_cntrl.unblockFromCache = ruby_system.network.slave
# Connect the buffers from the network to the controller
l1_cntrl.forwardToCache = ruby_system.network.master
l1_cntrl.responseToCache = ruby_system.network.master
phys_mem_size = sum(map(lambda r: r.size(), system.mem_ranges))
assert(phys_mem_size % options.num_dirs == 0)
mem_module_size = phys_mem_size / options.num_dirs
#
# determine size and index bits for probe filter
# By default, the probe filter size is configured to be twice the
# size of the L2 cache.
#
pf_size = MemorySize(options.l2_size)
pf_size.value = pf_size.value * 2
dir_bits = int(math.log(options.num_dirs, 2))
pf_bits = int(math.log(pf_size.value, 2))
if options.numa_high_bit:
if options.pf_on or options.dir_on:
# if numa high bit explicitly set, make sure it does not overlap
# with the probe filter index
assert(options.numa_high_bit - dir_bits > pf_bits)
# set the probe filter start bit to just above the block offset
pf_start_bit = block_size_bits
else:
if dir_bits > 0:
pf_start_bit = dir_bits + block_size_bits - 1
else:
pf_start_bit = block_size_bits
# Run each of the ruby memory controllers at a ratio of the frequency of
# the ruby system
# clk_divider value is a fix to pass regression.
ruby_system.memctrl_clk_domain = DerivedClockDomain(
clk_domain=ruby_system.clk_domain,
clk_divider=3)
for i in xrange(options.num_dirs):
dir_size = MemorySize('0B')
dir_size.value = mem_module_size
pf = ProbeFilter(size = pf_size, assoc = 4,
start_index_bit = pf_start_bit)
dir_cntrl = Directory_Controller(version = i,
directory = RubyDirectoryMemory(
version = i, size = dir_size),
probeFilter = pf,
probe_filter_enabled = options.pf_on,
full_bit_dir_enabled = options.dir_on,
transitions_per_cycle = options.ports,
ruby_system = ruby_system)
if options.recycle_latency:
dir_cntrl.recycle_latency = options.recycle_latency
exec("ruby_system.dir_cntrl%d = dir_cntrl" % i)
dir_cntrl_nodes.append(dir_cntrl)
# Connect the directory controller to the network
dir_cntrl.forwardFromDir = ruby_system.network.slave
dir_cntrl.responseFromDir = ruby_system.network.slave
dir_cntrl.dmaResponseFromDir = ruby_system.network.slave
dir_cntrl.unblockToDir = ruby_system.network.master
dir_cntrl.responseToDir = ruby_system.network.master
dir_cntrl.requestToDir = ruby_system.network.master
dir_cntrl.dmaRequestToDir = ruby_system.network.master
for i, dma_port in enumerate(dma_ports):
#
# Create the Ruby objects associated with the dma controller
#
dma_seq = DMASequencer(version = i,
ruby_system = ruby_system,
slave = dma_port)
dma_cntrl = DMA_Controller(version = i,
dma_sequencer = dma_seq,
transitions_per_cycle = options.ports,
ruby_system = ruby_system)
exec("ruby_system.dma_cntrl%d = dma_cntrl" % i)
dma_cntrl_nodes.append(dma_cntrl)
if options.recycle_latency:
dma_cntrl.recycle_latency = options.recycle_latency
# Connect the dma controller to the network
dma_cntrl.responseFromDir = ruby_system.network.master
dma_cntrl.requestToDir = ruby_system.network.slave
all_cntrls = l1_cntrl_nodes + dir_cntrl_nodes + dma_cntrl_nodes
# Create the io controller and the sequencer
if full_system:
io_seq = DMASequencer(version=len(dma_ports), ruby_system=ruby_system)
ruby_system._io_port = io_seq
io_controller = DMA_Controller(version = len(dma_ports),
dma_sequencer = io_seq,
ruby_system = ruby_system)
ruby_system.io_controller = io_controller
# Connect the dma controller to the network
io_controller.responseFromDir = ruby_system.network.master
io_controller.requestToDir = ruby_system.network.slave
all_cntrls = all_cntrls + [io_controller]
topology = create_topology(all_cntrls, options)
return (cpu_sequencers, dir_cntrl_nodes, topology)
|
chhao91/QGIS
|
refs/heads/master
|
python/analysis/__init__.py
|
23
|
from qgis._analysis import *
|
DMLoy/ECommerceBasic
|
refs/heads/master
|
lib/python2.7/site-packages/django/conf/locale/pt_BR/__init__.py
|
12133432
| |
Nitaco/ansible
|
refs/heads/devel
|
test/units/modules/network/__init__.py
|
12133432
| |
hottwaj/django
|
refs/heads/master
|
tests/sites_tests/__init__.py
|
12133432
| |
aikramer2/spaCy
|
refs/heads/master
|
spacy/syntax/__init__.py
|
12133432
| |
kragniz/searchlight
|
refs/heads/master
|
searchlight/common/config.py
|
2
|
#!/usr/bin/env python
# Copyright 2011 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Routines for configuring Glance
"""
import logging
import logging.config
import logging.handlers
import os
import tempfile
from oslo_concurrency import lockutils
from oslo_config import cfg
from oslo_policy import policy
from paste import deploy
from searchlight import i18n
from searchlight.version import version_info as version
_ = i18n._
paste_deploy_opts = [
cfg.StrOpt('flavor',
help=_('Partial name of a pipeline in your paste configuration '
'file with the service name removed. For example, if '
'your paste section name is '
'[pipeline:searchlight-api-keystone] use the value '
'"keystone"')),
cfg.StrOpt('config_file',
help=_('Name of the paste configuration file.')),
]
common_opts = [
cfg.IntOpt('limit_param_default', default=25,
help=_('Default value for the number of items returned by a '
'request if not specified explicitly in the request')),
cfg.IntOpt('api_limit_max', default=1000,
help=_('Maximum permissible number of items that could be '
'returned by a request')),
cfg.StrOpt('pydev_worker_debug_host',
help=_('The hostname/IP of the pydev process listening for '
'debug connections')),
cfg.IntOpt('pydev_worker_debug_port', default=5678,
help=_('The port on which a pydev process is listening for '
'connections.')),
cfg.StrOpt('metadata_encryption_key', secret=True,
help=_('AES key for encrypting store \'location\' metadata. '
'This includes, if used, Swift or S3 credentials. '
'Should be set to a random string of length 16, 24 or '
'32 bytes')),
cfg.StrOpt('digest_algorithm', default='sha1',
help=_('Digest algorithm which will be used for digital '
'signature; the default is sha1 the default in Kilo '
'for a smooth upgrade process, and it will be updated '
'with sha256 in next release(L). Use the command '
'"openssl list-message-digest-algorithms" to get the '
'available algorithms supported by the version of '
'OpenSSL on the platform. Examples are "sha1", '
'"sha256", "sha512", etc.')),
]
CONF = cfg.CONF
CONF.register_opts(paste_deploy_opts, group='paste_deploy')
CONF.register_opts(common_opts)
policy.Enforcer(CONF)
def parse_args(args=None, usage=None, default_config_files=None):
if "OSLO_LOCK_PATH" not in os.environ:
lockutils.set_defaults(tempfile.gettempdir())
CONF(args=args,
project='searchlight',
version=version.cached_version_string(),
usage=usage,
default_config_files=default_config_files)
def parse_cache_args(args=None):
config_files = cfg.find_config_files(project='searchlight',
prog='searchlight-cache')
parse_args(args=args, default_config_files=config_files)
def _get_deployment_flavor(flavor=None):
"""
Retrieve the paste_deploy.flavor config item, formatted appropriately
for appending to the application name.
:param flavor: if specified, use this setting rather than the
paste_deploy.flavor configuration setting
"""
if not flavor:
flavor = CONF.paste_deploy.flavor
return '' if not flavor else ('-' + flavor)
def _get_paste_config_path():
paste_suffix = '-paste.ini'
conf_suffix = '.conf'
if CONF.config_file:
# Assume paste config is in a paste.ini file corresponding
# to the last config file
path = CONF.config_file[-1].replace(conf_suffix, paste_suffix)
else:
path = CONF.prog + paste_suffix
return CONF.find_file(os.path.basename(path))
def _get_deployment_config_file():
"""
Retrieve the deployment_config_file config item, formatted as an
absolute pathname.
"""
path = CONF.paste_deploy.config_file
if not path:
path = _get_paste_config_path()
if not path:
msg = _("Unable to locate paste config file for %s.") % CONF.prog
raise RuntimeError(msg)
return os.path.abspath(path)
def load_paste_app(app_name, flavor=None, conf_file=None):
"""
Builds and returns a WSGI app from a paste config file.
We assume the last config file specified in the supplied ConfigOpts
object is the paste config file, if conf_file is None.
:param app_name: name of the application to load
:param flavor: name of the variant of the application to load
:param conf_file: path to the paste config file
:raises RuntimeError when config file cannot be located or application
cannot be loaded from config file
"""
# append the deployment flavor to the application name,
# in order to identify the appropriate paste pipeline
app_name += _get_deployment_flavor(flavor)
if not conf_file:
conf_file = _get_deployment_config_file()
try:
logger = logging.getLogger(__name__)
logger.debug("Loading %(app_name)s from %(conf_file)s",
{'conf_file': conf_file, 'app_name': app_name})
app = deploy.loadapp("config:%s" % conf_file, name=app_name)
# Log the options used when starting if we're in debug mode...
if CONF.debug:
CONF.log_opt_values(logger, logging.DEBUG)
return app
except (LookupError, ImportError) as e:
msg = (_("Unable to load %(app_name)s from "
"configuration file %(conf_file)s."
"\nGot: %(e)r") % {'app_name': app_name,
'conf_file': conf_file,
'e': e})
logger.error(msg)
raise RuntimeError(msg)
|
fiuba08/robotframework
|
refs/heads/master
|
src/robot/running/baselibrary.py
|
6
|
# Copyright 2008-2014 Nokia Solutions and Networks
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from robot.errors import DataError
class BaseLibrary:
def get_handler(self, name):
try:
return self.handlers[name]
except KeyError:
raise DataError("No keyword handler with name '%s' found" % name)
def has_handler(self, name):
return name in self.handlers
def __len__(self):
return len(self.handlers)
|
slisson/intellij-community
|
refs/heads/master
|
python/lib/Lib/xml/Uri.py
|
109
|
# pylint: disable-msg=C0103
#
# backported code from 4Suite with slight modifications, started from r1.89 of
# Ft/Lib/Uri.py, by syt@logilab.fr on 2005-02-09
#
# part if not all of this code should probably move to urlparse (or be used
# to fix some existant functions in this module)
#
#
# Copyright 2004 Fourthought, Inc. (USA).
# Detailed license and copyright information: http://4suite.org/COPYRIGHT
# Project home, documentation, distributions: http://4suite.org/
import os.path
import sys
import re
import urlparse, urllib, urllib2
def UnsplitUriRef(uriRefSeq):
"""should replace urlparse.urlunsplit
Given a sequence as would be produced by SplitUriRef(), assembles and
returns a URI reference as a string.
"""
if not isinstance(uriRefSeq, (tuple, list)):
raise TypeError("sequence expected, got %s" % type(uriRefSeq))
(scheme, authority, path, query, fragment) = uriRefSeq
uri = ''
if scheme is not None:
uri += scheme + ':'
if authority is not None:
uri += '//' + authority
uri += path
if query is not None:
uri += '?' + query
if fragment is not None:
uri += '#' + fragment
return uri
SPLIT_URI_REF_PATTERN = re.compile(r"^(?:(?P<scheme>[^:/?#]+):)?(?://(?P<authority>[^/?#]*))?(?P<path>[^?#]*)(?:\?(?P<query>[^#]*))?(?:#(?P<fragment>.*))?$")
def SplitUriRef(uriref):
"""should replace urlparse.urlsplit
Given a valid URI reference as a string, returns a tuple representing the
generic URI components, as per RFC 2396 appendix B. The tuple's structure
is (scheme, authority, path, query, fragment).
All values will be strings (possibly empty) or None if undefined.
Note that per rfc3986, there is no distinction between a path and
an "opaque part", as there was in RFC 2396.
"""
# the pattern will match every possible string, so it's safe to
# assume there's a groupdict method to call.
g = SPLIT_URI_REF_PATTERN.match(uriref).groupdict()
scheme = g['scheme']
authority = g['authority']
path = g['path']
query = g['query']
fragment = g['fragment']
return (scheme, authority, path, query, fragment)
def Absolutize(uriRef, baseUri):
"""
Resolves a URI reference to absolute form, effecting the result of RFC
3986 section 5. The URI reference is considered to be relative to the
given base URI.
It is the caller's responsibility to ensure that the base URI matches
the absolute-URI syntax rule of RFC 3986, and that its path component
does not contain '.' or '..' segments if the scheme is hierarchical.
Unexpected results may occur otherwise.
This function only conducts a minimal sanity check in order to determine
if relative resolution is possible: it raises a UriException if the base
URI does not have a scheme component. While it is true that the base URI
is irrelevant if the URI reference has a scheme, an exception is raised
in order to signal that the given string does not even come close to
meeting the criteria to be usable as a base URI.
It is the caller's responsibility to make a determination of whether the
URI reference constitutes a "same-document reference", as defined in RFC
2396 or RFC 3986. As per the spec, dereferencing a same-document
reference "should not" involve retrieval of a new representation of the
referenced resource. Note that the two specs have different definitions
of same-document reference: RFC 2396 says it is *only* the cases where the
reference is the empty string, or "#" followed by a fragment; RFC 3986
requires making a comparison of the base URI to the absolute form of the
reference (as is returned by the spec), minus its fragment component,
if any.
This function is similar to urlparse.urljoin() and urllib.basejoin().
Those functions, however, are (as of Python 2.3) outdated, buggy, and/or
designed to produce results acceptable for use with other core Python
libraries, rather than being earnest implementations of the relevant
specs. Their problems are most noticeable in their handling of
same-document references and 'file:' URIs, both being situations that
come up far too often to consider the functions reliable enough for
general use.
"""
# Reasons to avoid using urllib.basejoin() and urlparse.urljoin():
# - Both are partial implementations of long-obsolete specs.
# - Both accept relative URLs as the base, which no spec allows.
# - urllib.basejoin() mishandles the '' and '..' references.
# - If the base URL uses a non-hierarchical or relative path,
# or if the URL scheme is unrecognized, the result is not
# always as expected (partly due to issues in RFC 1808).
# - If the authority component of a 'file' URI is empty,
# the authority component is removed altogether. If it was
# not present, an empty authority component is in the result.
# - '.' and '..' segments are not always collapsed as well as they
# should be (partly due to issues in RFC 1808).
# - Effective Python 2.4, urllib.basejoin() *is* urlparse.urljoin(),
# but urlparse.urljoin() is still based on RFC 1808.
# This procedure is based on the pseudocode in RFC 3986 sec. 5.2.
#
# ensure base URI is absolute
if not baseUri:
raise ValueError('baseUri is required and must be a non empty string')
if not IsAbsolute(baseUri):
raise ValueError('%r is not an absolute URI' % baseUri)
# shortcut for the simplest same-document reference cases
if uriRef == '' or uriRef[0] == '#':
return baseUri.split('#')[0] + uriRef
# ensure a clean slate
tScheme = tAuth = tPath = tQuery = None
# parse the reference into its components
(rScheme, rAuth, rPath, rQuery, rFrag) = SplitUriRef(uriRef)
# if the reference is absolute, eliminate '.' and '..' path segments
# and skip to the end
if rScheme is not None:
tScheme = rScheme
tAuth = rAuth
tPath = RemoveDotSegments(rPath)
tQuery = rQuery
else:
# the base URI's scheme, and possibly more, will be inherited
(bScheme, bAuth, bPath, bQuery, bFrag) = SplitUriRef(baseUri)
# if the reference is a net-path, just eliminate '.' and '..' path
# segments; no other changes needed.
if rAuth is not None:
tAuth = rAuth
tPath = RemoveDotSegments(rPath)
tQuery = rQuery
# if it's not a net-path, we need to inherit pieces of the base URI
else:
# use base URI's path if the reference's path is empty
if not rPath:
tPath = bPath
# use the reference's query, if any, or else the base URI's,
tQuery = rQuery is not None and rQuery or bQuery
# the reference's path is not empty
else:
# just use the reference's path if it's absolute
if rPath[0] == '/':
tPath = RemoveDotSegments(rPath)
# merge the reference's relative path with the base URI's path
else:
if bAuth is not None and not bPath:
tPath = '/' + rPath
else:
tPath = bPath[:bPath.rfind('/')+1] + rPath
tPath = RemoveDotSegments(tPath)
# use the reference's query
tQuery = rQuery
# since the reference isn't a net-path,
# use the authority from the base URI
tAuth = bAuth
# inherit the scheme from the base URI
tScheme = bScheme
# always use the reference's fragment (but no need to define another var)
#tFrag = rFrag
# now compose the target URI (RFC 3986 sec. 5.3)
return UnsplitUriRef((tScheme, tAuth, tPath, tQuery, rFrag))
REG_NAME_HOST_PATTERN = re.compile(r"^(?:(?:[0-9A-Za-z\-_\.!~*'();&=+$,]|(?:%[0-9A-Fa-f]{2}))*)$")
def MakeUrllibSafe(uriRef):
"""
Makes the given RFC 3986-conformant URI reference safe for passing
to legacy urllib functions. The result may not be a valid URI.
As of Python 2.3.3, urllib.urlopen() does not fully support
internationalized domain names, it does not strip fragment components,
and on Windows, it expects file URIs to use '|' instead of ':' in the
path component corresponding to the drivespec. It also relies on
urllib.unquote(), which mishandles unicode arguments. This function
produces a URI reference that will work around these issues, although
the IDN workaround is limited to Python 2.3 only. May raise a
UnicodeEncodeError if the URI reference is Unicode and erroneously
contains non-ASCII characters.
"""
# IDN support requires decoding any percent-encoded octets in the
# host part (if it's a reg-name) of the authority component, and when
# doing DNS lookups, applying IDNA encoding to that string first.
# As of Python 2.3, there is an IDNA codec, and the socket and httplib
# modules accept Unicode strings and apply IDNA encoding automatically
# where necessary. However, urllib.urlopen() has not yet been updated
# to do the same; it raises an exception if you give it a Unicode
# string, and does no conversion on non-Unicode strings, meaning you
# have to give it an IDNA string yourself. We will only support it on
# Python 2.3 and up.
#
# see if host is a reg-name, as opposed to IPv4 or IPv6 addr.
if isinstance(uriRef, unicode):
try:
uriRef = uriRef.encode('us-ascii') # parts of urllib are not unicode safe
except UnicodeError:
raise ValueError("uri %r must consist of ASCII characters." % uriRef)
(scheme, auth, path, query, frag) = urlparse.urlsplit(uriRef)
if auth and auth.find('@') > -1:
userinfo, hostport = auth.split('@')
else:
userinfo = None
hostport = auth
if hostport and hostport.find(':') > -1:
host, port = hostport.split(':')
else:
host = hostport
port = None
if host and REG_NAME_HOST_PATTERN.match(host):
# percent-encoded hostnames will always fail DNS lookups
host = urllib.unquote(host) #PercentDecode(host)
# IDNA-encode if possible.
# We shouldn't do this for schemes that don't need DNS lookup,
# but are there any (that you'd be calling urlopen for)?
if sys.version_info[0:2] >= (2, 3):
if isinstance(host, str):
host = host.decode('utf-8')
host = host.encode('idna')
# reassemble the authority with the new hostname
# (percent-decoded, and possibly IDNA-encoded)
auth = ''
if userinfo:
auth += userinfo + '@'
auth += host
if port:
auth += ':' + port
# On Windows, ensure that '|', not ':', is used in a drivespec.
if os.name == 'nt' and scheme == 'file':
path = path.replace(':', '|', 1)
# Note that we drop fragment, if any. See RFC 3986 sec. 3.5.
uri = urlparse.urlunsplit((scheme, auth, path, query, None))
return uri
def BaseJoin(base, uriRef):
"""
Merges a base URI reference with another URI reference, returning a
new URI reference.
It behaves exactly the same as Absolutize(), except the arguments
are reversed, and it accepts any URI reference (even a relative URI)
as the base URI. If the base has no scheme component, it is
evaluated as if it did, and then the scheme component of the result
is removed from the result, unless the uriRef had a scheme. Thus, if
neither argument has a scheme component, the result won't have one.
This function is named BaseJoin because it is very much like
urllib.basejoin(), but it follows the current rfc3986 algorithms
for path merging, dot segment elimination, and inheritance of query
and fragment components.
WARNING: This function exists for 2 reasons: (1) because of a need
within the 4Suite repository to perform URI reference absolutization
using base URIs that are stored (inappropriately) as absolute paths
in the subjects of statements in the RDF model, and (2) because of
a similar need to interpret relative repo paths in a 4Suite product
setup.xml file as being relative to a path that can be set outside
the document. When these needs go away, this function probably will,
too, so it is not advisable to use it.
"""
if IsAbsolute(base):
return Absolutize(uriRef, base)
else:
dummyscheme = 'basejoin'
res = Absolutize(uriRef, '%s:%s' % (dummyscheme, base))
if IsAbsolute(uriRef):
# scheme will be inherited from uriRef
return res
else:
# no scheme in, no scheme out
return res[len(dummyscheme)+1:]
def RemoveDotSegments(path):
"""
Supports Absolutize() by implementing the remove_dot_segments function
described in RFC 3986 sec. 5.2. It collapses most of the '.' and '..'
segments out of a path without eliminating empty segments. It is intended
to be used during the path merging process and may not give expected
results when used independently. Use NormalizePathSegments() or
NormalizePathSegmentsInUri() if more general normalization is desired.
semi-private because it is not for general use. I've implemented it
using two segment stacks, as alluded to in the spec, rather than the
explicit string-walking algorithm that would be too inefficient. (mbrown)
"""
# return empty string if entire path is just "." or ".."
if path == '.' or path == '..':
return path[0:0] # preserves string type
# remove all "./" or "../" segments at the beginning
while path:
if path[:2] == './':
path = path[2:]
elif path[:3] == '../':
path = path[3:]
else:
break
# We need to keep track of whether there was a leading slash,
# because we're going to drop it in order to prevent our list of
# segments from having an ambiguous empty first item when we call
# split().
leading_slash = 0
if path[:1] == '/':
path = path[1:]
leading_slash = 1
# replace a trailing "/." with just "/"
if path[-2:] == '/.':
path = path[:-1]
# convert the segments into a list and process each segment in
# order from left to right.
segments = path.split('/')
keepers = []
segments.reverse()
while segments:
seg = segments.pop()
# '..' means drop the previous kept segment, if any.
# If none, and if the path is relative, then keep the '..'.
# If the '..' was the last segment, ensure
# that the result ends with '/'.
if seg == '..':
if keepers:
keepers.pop()
elif not leading_slash:
keepers.append(seg)
if not segments:
keepers.append('')
# ignore '.' segments and keep all others, even empty ones
elif seg != '.':
keepers.append(seg)
# reassemble the kept segments
return leading_slash * '/' + '/'.join(keepers)
SCHEME_PATTERN = re.compile(r'([a-zA-Z][a-zA-Z0-9+\-.]*):')
def GetScheme(uriRef):
"""
Obtains, with optimum efficiency, just the scheme from a URI reference.
Returns a string, or if no scheme could be found, returns None.
"""
# Using a regex seems to be the best option. Called 50,000 times on
# different URIs, on a 1.0-GHz PIII with FreeBSD 4.7 and Python
# 2.2.1, this method completed in 0.95s, and 0.05s if there was no
# scheme to find. By comparison,
# urllib.splittype()[0] took 1.5s always;
# Ft.Lib.Uri.SplitUriRef()[0] took 2.5s always;
# urlparse.urlparse()[0] took 3.5s always.
m = SCHEME_PATTERN.match(uriRef)
if m is None:
return None
else:
return m.group(1)
def IsAbsolute(identifier):
"""
Given a string believed to be a URI or URI reference, tests that it is
absolute (as per RFC 2396), not relative -- i.e., that it has a scheme.
"""
# We do it this way to avoid compiling another massive regex.
return GetScheme(identifier) is not None
|
weisongchen/flaskapp
|
refs/heads/master
|
venv/lib/python2.7/site-packages/sqlalchemy/orm/path_registry.py
|
5
|
# orm/path_registry.py
# Copyright (C) 2005-2017 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
"""Path tracking utilities, representing mapper graph traversals.
"""
from .. import inspection
from .. import util
from .. import exc
from itertools import chain
from .base import class_mapper
import logging
log = logging.getLogger(__name__)
def _unreduce_path(path):
return PathRegistry.deserialize(path)
_WILDCARD_TOKEN = "*"
_DEFAULT_TOKEN = "_sa_default"
class PathRegistry(object):
"""Represent query load paths and registry functions.
Basically represents structures like:
(<User mapper>, "orders", <Order mapper>, "items", <Item mapper>)
These structures are generated by things like
query options (joinedload(), subqueryload(), etc.) and are
used to compose keys stored in the query._attributes dictionary
for various options.
They are then re-composed at query compile/result row time as
the query is formed and as rows are fetched, where they again
serve to compose keys to look up options in the context.attributes
dictionary, which is copied from query._attributes.
The path structure has a limited amount of caching, where each
"root" ultimately pulls from a fixed registry associated with
the first mapper, that also contains elements for each of its
property keys. However paths longer than two elements, which
are the exception rather than the rule, are generated on an
as-needed basis.
"""
is_token = False
is_root = False
def __eq__(self, other):
return other is not None and \
self.path == other.path
def set(self, attributes, key, value):
log.debug("set '%s' on path '%s' to '%s'", key, self, value)
attributes[(key, self.path)] = value
def setdefault(self, attributes, key, value):
log.debug("setdefault '%s' on path '%s' to '%s'", key, self, value)
attributes.setdefault((key, self.path), value)
def get(self, attributes, key, value=None):
key = (key, self.path)
if key in attributes:
return attributes[key]
else:
return value
def __len__(self):
return len(self.path)
@property
def length(self):
return len(self.path)
def pairs(self):
path = self.path
for i in range(0, len(path), 2):
yield path[i], path[i + 1]
def contains_mapper(self, mapper):
for path_mapper in [
self.path[i] for i in range(0, len(self.path), 2)
]:
if path_mapper.is_mapper and \
path_mapper.isa(mapper):
return True
else:
return False
def contains(self, attributes, key):
return (key, self.path) in attributes
def __reduce__(self):
return _unreduce_path, (self.serialize(), )
def serialize(self):
path = self.path
return list(zip(
[m.class_ for m in [path[i] for i in range(0, len(path), 2)]],
[path[i].key for i in range(1, len(path), 2)] + [None]
))
@classmethod
def deserialize(cls, path):
if path is None:
return None
p = tuple(chain(*[(class_mapper(mcls),
class_mapper(mcls).attrs[key]
if key is not None else None)
for mcls, key in path]))
if p and p[-1] is None:
p = p[0:-1]
return cls.coerce(p)
@classmethod
def per_mapper(cls, mapper):
return EntityRegistry(
cls.root, mapper
)
@classmethod
def coerce(cls, raw):
return util.reduce(lambda prev, next: prev[next], raw, cls.root)
def token(self, token):
if token.endswith(':' + _WILDCARD_TOKEN):
return TokenRegistry(self, token)
elif token.endswith(":" + _DEFAULT_TOKEN):
return TokenRegistry(self.root, token)
else:
raise exc.ArgumentError("invalid token: %s" % token)
def __add__(self, other):
return util.reduce(
lambda prev, next: prev[next],
other.path, self)
def __repr__(self):
return "%s(%r)" % (self.__class__.__name__, self.path, )
class RootRegistry(PathRegistry):
"""Root registry, defers to mappers so that
paths are maintained per-root-mapper.
"""
path = ()
has_entity = False
is_aliased_class = False
is_root = True
def __getitem__(self, entity):
return entity._path_registry
PathRegistry.root = RootRegistry()
class TokenRegistry(PathRegistry):
def __init__(self, parent, token):
self.token = token
self.parent = parent
self.path = parent.path + (token,)
has_entity = False
is_token = True
def generate_for_superclasses(self):
if not self.parent.is_aliased_class and not self.parent.is_root:
for ent in self.parent.mapper.iterate_to_root():
yield TokenRegistry(self.parent.parent[ent], self.token)
else:
yield self
def __getitem__(self, entity):
raise NotImplementedError()
class PropRegistry(PathRegistry):
def __init__(self, parent, prop):
# restate this path in terms of the
# given MapperProperty's parent.
insp = inspection.inspect(parent[-1])
if not insp.is_aliased_class or insp._use_mapper_path:
parent = parent.parent[prop.parent]
elif insp.is_aliased_class and insp.with_polymorphic_mappers:
if prop.parent is not insp.mapper and \
prop.parent in insp.with_polymorphic_mappers:
subclass_entity = parent[-1]._entity_for_mapper(prop.parent)
parent = parent.parent[subclass_entity]
self.prop = prop
self.parent = parent
self.path = parent.path + (prop,)
def __str__(self):
return " -> ".join(
str(elem) for elem in self.path
)
@util.memoized_property
def has_entity(self):
return hasattr(self.prop, "mapper")
@util.memoized_property
def entity(self):
return self.prop.mapper
@util.memoized_property
def _wildcard_path_loader_key(self):
"""Given a path (mapper A, prop X), replace the prop with the wildcard,
e.g. (mapper A, 'relationship:.*') or (mapper A, 'column:.*'), then
return within the ("loader", path) structure.
"""
return ("loader",
self.parent.token(
"%s:%s" % (
self.prop.strategy_wildcard_key, _WILDCARD_TOKEN)
).path
)
@util.memoized_property
def _default_path_loader_key(self):
return ("loader",
self.parent.token(
"%s:%s" % (self.prop.strategy_wildcard_key,
_DEFAULT_TOKEN)
).path
)
@util.memoized_property
def _loader_key(self):
return ("loader", self.path)
@property
def mapper(self):
return self.entity
@property
def entity_path(self):
return self[self.entity]
def __getitem__(self, entity):
if isinstance(entity, (int, slice)):
return self.path[entity]
else:
return EntityRegistry(
self, entity
)
class EntityRegistry(PathRegistry, dict):
is_aliased_class = False
has_entity = True
def __init__(self, parent, entity):
self.key = entity
self.parent = parent
self.is_aliased_class = entity.is_aliased_class
self.entity = entity
self.path = parent.path + (entity,)
self.entity_path = self
@property
def mapper(self):
return inspection.inspect(self.entity).mapper
def __bool__(self):
return True
__nonzero__ = __bool__
def __getitem__(self, entity):
if isinstance(entity, (int, slice)):
return self.path[entity]
else:
return dict.__getitem__(self, entity)
def __missing__(self, key):
self[key] = item = PropRegistry(self, key)
return item
|
jboeuf/grpc
|
refs/heads/master
|
src/python/grpcio_testing/grpc_testing/_channel/_multi_callable.py
|
27
|
# Copyright 2017 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import grpc
from grpc_testing import _common
from grpc_testing._channel import _invocation
# All per-call credentials parameters are unused by this test infrastructure.
# pylint: disable=unused-argument
class UnaryUnary(grpc.UnaryUnaryMultiCallable):
def __init__(self, method_full_rpc_name, channel_handler):
self._method_full_rpc_name = method_full_rpc_name
self._channel_handler = channel_handler
def __call__(self, request, timeout=None, metadata=None, credentials=None):
rpc_handler = self._channel_handler.invoke_rpc(
self._method_full_rpc_name, _common.fuss_with_metadata(metadata),
[request], True, timeout)
return _invocation.blocking_unary_response(rpc_handler)
def with_call(self, request, timeout=None, metadata=None, credentials=None):
rpc_handler = self._channel_handler.invoke_rpc(
self._method_full_rpc_name, _common.fuss_with_metadata(metadata),
[request], True, timeout)
return _invocation.blocking_unary_response_with_call(rpc_handler)
def future(self, request, timeout=None, metadata=None, credentials=None):
rpc_handler = self._channel_handler.invoke_rpc(
self._method_full_rpc_name, _common.fuss_with_metadata(metadata),
[request], True, timeout)
return _invocation.future_call(rpc_handler)
class UnaryStream(grpc.StreamStreamMultiCallable):
def __init__(self, method_full_rpc_name, channel_handler):
self._method_full_rpc_name = method_full_rpc_name
self._channel_handler = channel_handler
def __call__(self, request, timeout=None, metadata=None, credentials=None):
rpc_handler = self._channel_handler.invoke_rpc(
self._method_full_rpc_name, _common.fuss_with_metadata(metadata),
[request], True, timeout)
return _invocation.ResponseIteratorCall(rpc_handler)
class StreamUnary(grpc.StreamUnaryMultiCallable):
def __init__(self, method_full_rpc_name, channel_handler):
self._method_full_rpc_name = method_full_rpc_name
self._channel_handler = channel_handler
def __call__(self,
request_iterator,
timeout=None,
metadata=None,
credentials=None):
rpc_handler = self._channel_handler.invoke_rpc(
self._method_full_rpc_name, _common.fuss_with_metadata(metadata),
[], False, timeout)
_invocation.consume_requests(request_iterator, rpc_handler)
return _invocation.blocking_unary_response(rpc_handler)
def with_call(self,
request_iterator,
timeout=None,
metadata=None,
credentials=None):
rpc_handler = self._channel_handler.invoke_rpc(
self._method_full_rpc_name, _common.fuss_with_metadata(metadata),
[], False, timeout)
_invocation.consume_requests(request_iterator, rpc_handler)
return _invocation.blocking_unary_response_with_call(rpc_handler)
def future(self,
request_iterator,
timeout=None,
metadata=None,
credentials=None):
rpc_handler = self._channel_handler.invoke_rpc(
self._method_full_rpc_name, _common.fuss_with_metadata(metadata),
[], False, timeout)
_invocation.consume_requests(request_iterator, rpc_handler)
return _invocation.future_call(rpc_handler)
class StreamStream(grpc.StreamStreamMultiCallable):
def __init__(self, method_full_rpc_name, channel_handler):
self._method_full_rpc_name = method_full_rpc_name
self._channel_handler = channel_handler
def __call__(self,
request_iterator,
timeout=None,
metadata=None,
credentials=None):
rpc_handler = self._channel_handler.invoke_rpc(
self._method_full_rpc_name, _common.fuss_with_metadata(metadata),
[], False, timeout)
_invocation.consume_requests(request_iterator, rpc_handler)
return _invocation.ResponseIteratorCall(rpc_handler)
# pylint: enable=unused-argument
|
smainand/scapy
|
refs/heads/master
|
scapy/layers/tls/crypto/prf.py
|
1
|
# This file is part of Scapy
# Copyright (C) 2007, 2008, 2009 Arnaud Ebalard
# 2015, 2016, 2017 Maxence Tury
# This program is published under a GPLv2 license
"""
TLS Pseudorandom Function.
"""
from __future__ import absolute_import
from scapy.error import warning
from scapy.utils import strxor
from scapy.layers.tls.crypto.hash import _tls_hash_algs
from scapy.layers.tls.crypto.h_mac import _tls_hmac_algs
from scapy.modules.six.moves import range
from scapy.compat import *
# Data expansion functions
def _tls_P_hash(secret, seed, req_len, hm):
"""
Provides the implementation of P_hash function defined in
section 5 of RFC 4346 (and section 5 of RFC 5246). Two
parameters have been added (hm and req_len):
- secret : the key to be used. If RFC 4868 is to be believed,
the length must match hm.key_len. Actually,
python hmac takes care of formatting every key.
- seed : the seed to be used.
- req_len : the length of data to be generated by iterating
the specific HMAC function (hm). This prevents
multiple calls to the function.
- hm : the hmac function class to use for iteration (either
Hmac_MD5 or Hmac_SHA1 in TLS <= 1.1 or
Hmac_SHA256 or Hmac_SHA384 in TLS 1.2)
"""
hash_len = hm.hash_alg.hash_len
n = (req_len + hash_len - 1) // hash_len
res = b""
a = hm(secret).digest(seed) # A(1)
while n > 0:
res += hm(secret).digest(a + raw(seed))
a = hm(secret).digest(a)
n -= 1
return res[:req_len]
def _tls_P_MD5(secret, seed, req_len):
return _tls_P_hash(secret, seed, req_len, _tls_hmac_algs["HMAC-MD5"])
def _tls_P_SHA1(secret, seed, req_len):
return _tls_P_hash(secret, seed, req_len, _tls_hmac_algs["HMAC-SHA"])
def _tls_P_SHA256(secret, seed, req_len):
return _tls_P_hash(secret, seed, req_len, _tls_hmac_algs["HMAC-SHA256"])
def _tls_P_SHA384(secret, seed, req_len):
return _tls_P_hash(secret, seed, req_len, _tls_hmac_algs["HMAC-SHA384"])
def _tls_P_SHA512(secret, seed, req_len):
return _tls_P_hash(secret, seed, req_len, _tls_hmac_algs["HMAC-SHA512"])
# PRF functions, according to the protocol version
def _sslv2_PRF(secret, seed, req_len):
hash_md5 = _tls_hash_algs["MD5"]()
rounds = (req_len + hash_md5.hash_len - 1) // hash_md5.hash_len
res = b""
if rounds == 1:
res += hash_md5.digest(secret + seed)
else:
r = 0
while r < rounds:
label = str(r).encode("utf8")
res += hash_md5.digest(secret + label + seed)
r += 1
return res[:req_len]
def _ssl_PRF(secret, seed, req_len):
"""
Provides the implementation of SSLv3 PRF function:
SSLv3-PRF(secret, seed) =
MD5(secret || SHA-1("A" || secret || seed)) ||
MD5(secret || SHA-1("BB" || secret || seed)) ||
MD5(secret || SHA-1("CCC" || secret || seed)) || ...
req_len should not be more than 26 x 16 = 416.
"""
if req_len > 416:
warning("_ssl_PRF() is not expected to provide more than 416 bytes")
return ""
d = [b"A", b"B", b"C", b"D", b"E", b"F", b"G", b"H", b"I", b"J", b"K", b"L", # noqa: E501
b"M", b"N", b"O", b"P", b"Q", b"R", b"S", b"T", b"U", b"V", b"W", b"X", # noqa: E501
b"Y", b"Z"]
res = b""
hash_sha1 = _tls_hash_algs["SHA"]()
hash_md5 = _tls_hash_algs["MD5"]()
rounds = (req_len + hash_md5.hash_len - 1) // hash_md5.hash_len
for i in range(rounds):
label = d[i] * (i + 1)
tmp = hash_sha1.digest(label + secret + seed)
res += hash_md5.digest(secret + tmp)
return res[:req_len]
def _tls_PRF(secret, label, seed, req_len):
"""
Provides the implementation of TLS PRF function as defined in
section 5 of RFC 4346:
PRF(secret, label, seed) = P_MD5(S1, label + seed) XOR
P_SHA-1(S2, label + seed)
Parameters are:
- secret: the secret used by the HMAC in the 2 expansion
functions (S1 and S2 are the halves of this secret).
- label: specific label as defined in various sections of the RFC
depending on the use of the generated PRF keystream
- seed: the seed used by the expansion functions.
- req_len: amount of keystream to be generated
"""
l = (len(secret) + 1) // 2
S1 = secret[:l]
S2 = secret[-l:]
a1 = _tls_P_MD5(S1, label + seed, req_len)
a2 = _tls_P_SHA1(S2, label + seed, req_len)
return strxor(a1, a2)
def _tls12_SHA256PRF(secret, label, seed, req_len):
"""
Provides the implementation of TLS 1.2 PRF function as
defined in section 5 of RFC 5246:
PRF(secret, label, seed) = P_SHA256(secret, label + seed)
Parameters are:
- secret: the secret used by the HMAC in the 2 expansion
functions (S1 and S2 are the halves of this secret).
- label: specific label as defined in various sections of the RFC
depending on the use of the generated PRF keystream
- seed: the seed used by the expansion functions.
- req_len: amount of keystream to be generated
"""
return _tls_P_SHA256(secret, label + seed, req_len)
def _tls12_SHA384PRF(secret, label, seed, req_len):
return _tls_P_SHA384(secret, label + seed, req_len)
def _tls12_SHA512PRF(secret, label, seed, req_len):
return _tls_P_SHA512(secret, label + seed, req_len)
class PRF(object):
"""
The PRF used by SSL/TLS varies based on the version of the protocol and
(for TLS 1.2) possibly the Hash algorithm of the negotiated cipher suite.
The various uses of the PRF (key derivation, computation of verify_data,
computation of pre_master_secret values) for the different versions of the
protocol also changes. In order to abstract those elements, the common
_tls_PRF() object is provided. It is expected to be initialised in the
context of the connection state using the tls_version and the cipher suite.
"""
def __init__(self, hash_name="SHA256", tls_version=0x0303):
self.tls_version = tls_version
self.hash_name = hash_name
if tls_version < 0x0300: # SSLv2
self.prf = _sslv2_PRF
elif tls_version == 0x0300: # SSLv3
self.prf = _ssl_PRF
elif (tls_version == 0x0301 or # TLS 1.0
tls_version == 0x0302): # TLS 1.1
self.prf = _tls_PRF
elif tls_version == 0x0303: # TLS 1.2
if hash_name == "SHA384":
self.prf = _tls12_SHA384PRF
elif hash_name == "SHA512":
self.prf = _tls12_SHA512PRF
else:
self.prf = _tls12_SHA256PRF
else:
warning("Unknown TLS version")
def compute_master_secret(self, pre_master_secret,
client_random, server_random):
"""
Return the 48-byte master_secret, computed from pre_master_secret,
client_random and server_random. See RFC 5246, section 6.3.
"""
seed = client_random + server_random
if self.tls_version < 0x0300:
return None
elif self.tls_version == 0x0300:
return self.prf(pre_master_secret, seed, 48)
else:
return self.prf(pre_master_secret, b"master secret", seed, 48)
def derive_key_block(self, master_secret, server_random,
client_random, req_len):
"""
Perform the derivation of master_secret into a key_block of req_len
requested length. See RFC 5246, section 6.3.
"""
seed = server_random + client_random
if self.tls_version <= 0x0300:
return self.prf(master_secret, seed, req_len)
else:
return self.prf(master_secret, b"key expansion", seed, req_len)
def compute_verify_data(self, con_end, read_or_write,
handshake_msg, master_secret):
"""
Return verify_data based on handshake messages, connection end,
master secret, and read_or_write position. See RFC 5246, section 7.4.9.
Every TLS 1.2 cipher suite has a verify_data of length 12. Note also:
"This PRF with the SHA-256 hash function is used for all cipher
suites defined in this document and in TLS documents published
prior to this document when TLS 1.2 is negotiated."
Cipher suites using SHA-384 were defined later on.
"""
if self.tls_version < 0x0300:
return None
elif self.tls_version == 0x0300:
if read_or_write == "write":
d = {"client": b"CLNT", "server": b"SRVR"}
else:
d = {"client": b"SRVR", "server": b"CLNT"}
label = d[con_end]
sslv3_md5_pad1 = b"\x36" * 48
sslv3_md5_pad2 = b"\x5c" * 48
sslv3_sha1_pad1 = b"\x36" * 40
sslv3_sha1_pad2 = b"\x5c" * 40
md5 = _tls_hash_algs["MD5"]()
sha1 = _tls_hash_algs["SHA"]()
md5_hash = md5.digest(master_secret + sslv3_md5_pad2 +
md5.digest(handshake_msg + label +
master_secret + sslv3_md5_pad1))
sha1_hash = sha1.digest(master_secret + sslv3_sha1_pad2 +
sha1.digest(handshake_msg + label +
master_secret + sslv3_sha1_pad1)) # noqa: E501
verify_data = md5_hash + sha1_hash
else:
if read_or_write == "write":
d = {"client": "client", "server": "server"}
else:
d = {"client": "server", "server": "client"}
label = ("%s finished" % d[con_end]).encode()
if self.tls_version <= 0x0302:
s1 = _tls_hash_algs["MD5"]().digest(handshake_msg)
s2 = _tls_hash_algs["SHA"]().digest(handshake_msg)
verify_data = self.prf(master_secret, label, s1 + s2, 12)
else:
if self.hash_name in ["MD5", "SHA"]:
h = _tls_hash_algs["SHA256"]()
else:
h = _tls_hash_algs[self.hash_name]()
s = h.digest(handshake_msg)
verify_data = self.prf(master_secret, label, s, 12)
return verify_data
def postprocess_key_for_export(self, key, client_random, server_random,
con_end, read_or_write, req_len):
"""
Postprocess cipher key for EXPORT ciphersuite, i.e. weakens it.
An export key generation example is given in section 6.3.1 of RFC 2246.
See also page 86 of EKR's book.
"""
s = con_end + read_or_write
s = (s == "clientwrite" or s == "serverread")
if self.tls_version < 0x0300:
return None
elif self.tls_version == 0x0300:
if s:
tbh = key + client_random + server_random
else:
tbh = key + server_random + client_random
export_key = _tls_hash_algs["MD5"]().digest(tbh)[:req_len]
else:
if s:
tag = b"client write key"
else:
tag = b"server write key"
export_key = self.prf(key,
tag,
client_random + server_random,
req_len)
return export_key
def generate_iv_for_export(self, client_random, server_random,
con_end, read_or_write, req_len):
"""
Generate IV for EXPORT ciphersuite, i.e. weakens it.
An export IV generation example is given in section 6.3.1 of RFC 2246.
See also page 86 of EKR's book.
"""
s = con_end + read_or_write
s = (s == "clientwrite" or s == "serverread")
if self.tls_version < 0x0300:
return None
elif self.tls_version == 0x0300:
if s:
tbh = client_random + server_random
else:
tbh = server_random + client_random
iv = _tls_hash_algs["MD5"]().digest(tbh)[:req_len]
else:
iv_block = self.prf("",
b"IV block",
client_random + server_random,
2 * req_len)
if s:
iv = iv_block[:req_len]
else:
iv = iv_block[req_len:]
return iv
|
belokop/indico_bare
|
refs/heads/master
|
conftest.py
|
2
|
# This file is part of Indico.
# Copyright (C) 2002 - 2016 European Organization for Nuclear Research (CERN).
#
# Indico is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 3 of the
# License, or (at your option) any later version.
#
# Indico is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Indico; if not, see <http://www.gnu.org/licenses/>.
# Registered via entry point; see indico.testing.pytest_plugin
pytest_plugins = 'indico'
|
polimediaupv/edx-platform
|
refs/heads/master
|
common/djangoapps/third_party_auth/tests/__init__.py
|
12133432
| |
dvliman/jaikuengine
|
refs/heads/master
|
.google_appengine/lib/django_1_2/django/contrib/localflavor/de/__init__.py
|
12133432
| |
seem-sky/cloudtunes
|
refs/heads/master
|
cloudtunes-server/cloudtunes/services/youtube/__init__.py
|
12133432
| |
bnsantos/python-junk-code
|
refs/heads/master
|
algorithms/sort/selectionSort.py
|
1
|
__author__ = 'bruno'
def sort(unordered):
"""
worst case O(n^2)
"""
for i in range(len(unordered)):
min_index = i
for j in range(i, len(unordered)):
if unordered[j] < unordered[min_index]:
min_index = j
if min_index != i:
unordered[i], unordered[min_index] = unordered[min_index], unordered[i]
return unordered
|
toshic/elliptics
|
refs/heads/master
|
example/remove_keys_from_storage_found_removed_in_eblob.py
|
4
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
import sys
sys.path.insert(0, "/usr/lib/")
sys.path.insert(0, "./.libs/")
sys.path.insert(0, "bindings/python/.libs/")
from libelliptics_python import *
import eblob
class remover:
def __init__(self, remotes=[], groups=[], log='/dev/stdout', mask=8, path=''):
self.log = elliptics_log_file(log, mask)
self.n = elliptics_node_python(self.log)
self.n.add_groups(groups)
self.n.add_remotes(remotes)
if len(self.n.get_routes()) == 0:
raise NameError("Route table for group " + str(group) + " is empty")
b = eblob.blob(path)
for id in b.iterate(want_removed=True):
if b.removed():
for g in groups:
eid = elliptics_id(list(bytearray(id)), g, -1)
self.n.remove(eid, 0)
print "%s: flags: 0x%x, position: %d, data_size: %d" % \
(b.sid(count=64), b.flags, b.position, b.data_size)
if __name__ == '__main__':
# this script runs over index for given blob, finds all removed entries and removes them from the storage
# list of tuples of remote addresses to connect and grab route table
remotes = [('elisto19f.dev:1025:2')]
# these groups
groups = [1, 2, 3]
# Path to blob to get objects from. Index file must be near with .index suffix
inpath='/opt/elliptics/eblob.2/data.0'
try:
remover(remotes=remotes, groups=groups, path=inpath)
except NameError as e:
print "Completed:", e
|
jeffreymingyue/ansible
|
refs/heads/devel
|
lib/ansible/inventory/host.py
|
45
|
# (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from ansible.inventory.group import Group
from ansible.utils.vars import combine_vars
__all__ = ['Host']
class Host:
''' a single ansible host '''
#__slots__ = [ 'name', 'vars', 'groups' ]
def __getstate__(self):
return self.serialize()
def __setstate__(self, data):
return self.deserialize(data)
def __eq__(self, other):
return self.name == other.name
def serialize(self):
groups = []
for group in self.groups:
groups.append(group.serialize())
return dict(
name=self.name,
vars=self.vars.copy(),
ipv4_address=self.ipv4_address,
ipv6_address=self.ipv6_address,
gathered_facts=self._gathered_facts,
groups=groups,
)
def deserialize(self, data):
self.__init__()
self.name = data.get('name')
self.vars = data.get('vars', dict())
self.ipv4_address = data.get('ipv4_address', '')
self.ipv6_address = data.get('ipv6_address', '')
groups = data.get('groups', [])
for group_data in groups:
g = Group()
g.deserialize(group_data)
self.groups.append(g)
def __init__(self, name=None, port=None):
self.name = name
self.vars = {}
self.groups = []
self.ipv4_address = name
self.ipv6_address = name
if port:
self.set_variable('ansible_ssh_port', int(port))
self._gathered_facts = False
def __repr__(self):
return self.get_name()
def get_name(self):
return self.name
@property
def gathered_facts(self):
return self._gathered_facts
def set_gathered_facts(self, gathered):
self._gathered_facts = gathered
def add_group(self, group):
self.groups.append(group)
def set_variable(self, key, value):
self.vars[key]=value
def get_groups(self):
groups = {}
for g in self.groups:
groups[g.name] = g
ancestors = g.get_ancestors()
for a in ancestors:
groups[a.name] = a
return groups.values()
def get_vars(self):
results = {}
groups = self.get_groups()
for group in sorted(groups, key=lambda g: g.depth):
results = combine_vars(results, group.get_vars())
results = combine_vars(results, self.vars)
results['inventory_hostname'] = self.name
results['inventory_hostname_short'] = self.name.split('.')[0]
results['group_names'] = sorted([ g.name for g in groups if g.name != 'all'])
return results
|
Distrotech/intellij-community
|
refs/heads/master
|
python/testData/refactoring/extractsuperclass/moveExtends/source_module.py
|
80
|
import shared_module
class MyClass(shared_module.TheParentOfItAll):
pass
|
masayukig/tempest
|
refs/heads/master
|
tempest/tests/lib/services/identity/v3/__init__.py
|
12133432
| |
GitAngel/django
|
refs/heads/master
|
tests/delete_regress/__init__.py
|
12133432
| |
zerodb/zerodb
|
refs/heads/master
|
zerodb/permissions/__init__.py
|
12133432
| |
ajaali/django
|
refs/heads/master
|
django/conf/locale/cs/__init__.py
|
12133432
| |
SusanJL/iris
|
refs/heads/master
|
lib/iris/tests/experimental/regrid/test_regrid_area_weighted_rectilinear_src_and_grid.py
|
10
|
# (C) British Crown Copyright 2013 - 2015, Met Office
#
# This file is part of Iris.
#
# Iris is free software: you can redistribute it and/or modify it under
# the terms of the GNU Lesser General Public License as published by the
# Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Iris is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Iris. If not, see <http://www.gnu.org/licenses/>.
"""
Test area weighted regridding.
"""
from __future__ import (absolute_import, division, print_function)
from six.moves import (filter, input, map, range, zip) # noqa
# import iris tests first so that some things can be initialised
# before importing anything else.
import iris.tests as tests
import copy
import random
import numpy as np
import numpy.ma as ma
from iris.experimental.regrid import \
regrid_area_weighted_rectilinear_src_and_grid as regrid_area_weighted
import iris.analysis._interpolation
import iris.tests.stock
# Run tests in no graphics mode if matplotlib is not available.
if tests.MPL_AVAILABLE:
import matplotlib.pyplot as plt
import iris.quickplot as qplt
RESULT_DIR = ('experimental', 'regrid',
'regrid_area_weighted_rectilinear_src_and_grid')
def _scaled_and_offset_grid(cube, x_scalefactor, y_scalefactor,
x_offset=0.0, y_offset=0.0):
"""
Return a cube with a horizontal grid that is scaled and offset
from the horizontal grid of `src`.
"""
x, y = iris.analysis._interpolation.get_xy_dim_coords(cube)
new_cube = cube.copy()
new_cube.replace_coord(x * x_scalefactor + x_offset)
new_cube.replace_coord(y * y_scalefactor + y_offset)
return new_cube
def _subsampled_coord(coord, subsamplefactor):
"""
Return a coordinate that is a subsampled copy of `coord`.
.. note:: `subsamplefactor` must be an integer >= 1.
"""
if not isinstance(subsamplefactor, int):
raise ValueError('subsamplefactor must be an integer.')
if subsamplefactor < 1:
raise ValueError('subsamplefactor must be >= 1.')
if not coord.has_bounds():
raise ValueError('The coordinate must have bounds.')
new_coord = coord[::subsamplefactor]
new_bounds = new_coord.bounds.copy()
new_bounds[:, 1] = coord.bounds[(subsamplefactor - 1)::subsamplefactor, 1]
new_bounds[-1, 1] = coord.bounds[-1, 1]
new_coord = coord.copy(points=new_coord.points, bounds=new_bounds)
return new_coord
def _subsampled_grid(cube, x_subsamplefactor, y_subsamplefactor):
"""
Return a cube that has a horizontal grid that is a subsampled
version of the horizontal grid of `cube`.
.. note:: The two subsamplefactors must both be integers >= 1.
.. note:: The data of the returned cube is populated with zeros.
"""
x, y = iris.analysis._interpolation.get_xy_dim_coords(cube)
x_dim = cube.coord_dims(x)[0]
y_dim = cube.coord_dims(y)[0]
new_x = _subsampled_coord(x, x_subsamplefactor)
new_y = _subsampled_coord(y, y_subsamplefactor)
new_shape = list(cube.shape)
new_shape[x_dim] = len(new_x.points)
new_shape[y_dim] = len(new_y.points)
new_data = np.zeros(new_shape)
new_cube = iris.cube.Cube(new_data)
new_cube.metadata = cube.metadata
new_cube.add_dim_coord(new_y, y_dim)
new_cube.add_dim_coord(new_x, x_dim)
return new_cube
def _resampled_coord(coord, samplefactor):
"""
Return a coordinate that has the same extent as `coord` but has
`samplefactor` times as many points and bounds.
"""
bounds = coord.bounds
lower = bounds[0, 0]
upper = bounds[-1, 1]
# Prevent fp-precision increasing the extent by "squeezing" the grid.
delta = 0.00001 * np.sign(upper - lower) * abs(bounds[0, 1] - bounds[0, 0])
lower = lower + delta
upper = upper - delta
new_points, step = np.linspace(lower, upper,
len(bounds) * samplefactor,
endpoint=False, retstep=True)
new_points += step * 0.5
new_coord = coord.copy(points=new_points)
new_coord.guess_bounds()
return new_coord
def _resampled_grid(cube, x_samplefactor, y_samplefactor):
"""
Return a cube that has the same horizontal extent as `cube` but has
a reduced (or increased) number of points (and bounds) along the X and Y
dimensions.
The resulting number of points for each dimension is determined by::
int(len(coord.points) * samplefactor)
This will be truncated if the result is not an integer.
.. note:: The data of the returned cube is populated with zeros.
"""
x, y = iris.analysis._interpolation.get_xy_dim_coords(cube)
x_dim = cube.coord_dims(x)[0]
y_dim = cube.coord_dims(y)[0]
new_x = _resampled_coord(x, x_samplefactor)
new_y = _resampled_coord(y, y_samplefactor)
new_shape = list(cube.shape)
new_shape[x_dim] = len(new_x.points)
new_shape[y_dim] = len(new_y.points)
new_data = np.zeros(new_shape)
new_cube = iris.cube.Cube(new_data)
new_cube.metadata = cube.metadata
new_cube.add_dim_coord(new_y, y_dim)
new_cube.add_dim_coord(new_x, x_dim)
return new_cube
class TestAreaWeightedRegrid(tests.GraphicsTest):
def setUp(self):
# A cube with a hybrid height derived coordinate.
self.realistic_cube = iris.tests.stock.realistic_4d()[:2, :5, :20, :30]
# A simple (3, 4) cube.
self.simple_cube = iris.tests.stock.lat_lon_cube()
self.simple_cube.coord('latitude').guess_bounds(0.0)
self.simple_cube.coord('longitude').guess_bounds(0.0)
def test_no_bounds(self):
src = self.simple_cube.copy()
src.coord('latitude').bounds = None
dest = self.simple_cube.copy()
with self.assertRaises(ValueError):
regrid_area_weighted(src, dest)
src = self.simple_cube.copy()
src.coord('longitude').bounds = None
with self.assertRaises(ValueError):
regrid_area_weighted(src, dest)
src = self.simple_cube.copy()
dest = self.simple_cube.copy()
dest.coord('latitude').bounds = None
with self.assertRaises(ValueError):
regrid_area_weighted(src, dest)
dest = self.simple_cube.copy()
dest.coord('longitude').bounds = None
with self.assertRaises(ValueError):
regrid_area_weighted(src, dest)
def test_non_contiguous_bounds(self):
src = self.simple_cube.copy()
bounds = src.coord('latitude').bounds.copy()
bounds[1, 1] -= 0.1
src.coord('latitude').bounds = bounds
dest = self.simple_cube.copy()
with self.assertRaises(ValueError):
regrid_area_weighted(src, dest)
src = self.simple_cube.copy()
dest = self.simple_cube.copy()
bounds = dest.coord('longitude').bounds.copy()
bounds[1, 1] -= 0.1
dest.coord('longitude').bounds = bounds
with self.assertRaises(ValueError):
regrid_area_weighted(src, dest)
def test_missing_coords(self):
dest = self.simple_cube.copy()
# Missing src_x.
src = self.simple_cube.copy()
src.remove_coord('longitude')
with self.assertRaises(ValueError):
regrid_area_weighted(src, dest)
# Missing src_y.
src = self.simple_cube.copy()
src.remove_coord('latitude')
with self.assertRaises(ValueError):
regrid_area_weighted(src, dest)
# Missing dest_x.
src = self.simple_cube.copy()
dest = self.simple_cube.copy()
dest.remove_coord('longitude')
with self.assertRaises(ValueError):
regrid_area_weighted(src, dest)
# Missing dest_y.
src = self.simple_cube.copy()
dest = self.simple_cube.copy()
dest.remove_coord('latitude')
with self.assertRaises(ValueError):
regrid_area_weighted(src, dest)
def test_different_cs(self):
src = self.simple_cube.copy()
src_cs = copy.copy(src.coord('latitude').coord_system)
src_cs.semi_major_axis = 7000000
src.coord('longitude').coord_system = src_cs
src.coord('latitude').coord_system = src_cs
dest = self.simple_cube.copy()
dest_cs = copy.copy(src_cs)
dest_cs.semi_major_axis = 7000001
dest.coord('longitude').coord_system = dest_cs
dest.coord('latitude').coord_system = dest_cs
with self.assertRaises(ValueError):
regrid_area_weighted(src, dest)
def test_regrid_to_same_grid(self):
src = self.simple_cube
res = regrid_area_weighted(src, src)
self.assertEqual(res, src)
self.assertCMLApproxData(res, RESULT_DIR + ('simple.cml',))
def test_equal_area_numbers(self):
# Remove coords system and units so it is no longer spherical.
self.simple_cube.coord('latitude').coord_system = None
self.simple_cube.coord('latitude').units = None
self.simple_cube.coord('longitude').coord_system = None
self.simple_cube.coord('longitude').units = None
# Reduce to a single cell
src = self.simple_cube.copy()
dest = _subsampled_grid(src, 4, 3)
res = regrid_area_weighted(src, dest)
expected_val = np.mean(src.data)
self.assertAlmostEqual(expected_val, res.data)
# Reduce to two cells along x
src = self.simple_cube.copy()
dest = _subsampled_grid(src, 2, 3)
res = regrid_area_weighted(src, dest)
expected_val_left = np.mean(src.data[:, 0:2])
self.assertEqual(expected_val_left, res.data[0])
expected_val_right = np.mean(src.data[:, 2:4])
self.assertAlmostEqual(expected_val_right, res.data[1])
# Reduce to two cells along x, one three times the size
# of the other.
src = self.simple_cube.copy()
dest = _subsampled_grid(src, 2, 3)
lon = dest.coord('longitude')
points = lon.points.copy()
bounds = [[-1, 0], [0, 3]]
lon = lon.copy(points=points, bounds=bounds)
dest.replace_coord(lon)
res = regrid_area_weighted(src, dest)
expected_val_left = np.mean(src.data[:, 0:1])
self.assertEqual(expected_val_left, res.data[0])
expected_val_right = np.mean(src.data[:, 1:4])
self.assertAlmostEqual(expected_val_right, res.data[1])
def test_unqeual_area_numbers(self):
# Remove coords system and units so it is no longer spherical.
self.simple_cube.coord('latitude').coord_system = None
self.simple_cube.coord('latitude').units = None
self.simple_cube.coord('longitude').coord_system = None
self.simple_cube.coord('longitude').units = None
# Reduce src to two cells along x, one three times the size
# of the other.
src = self.simple_cube.copy()
src = _subsampled_grid(src, 2, 2)
lon = src.coord('longitude')
points = lon.points.copy()
bounds = [[-1, 0], [0, 3]]
lon = lon.copy(points=points, bounds=bounds)
src.replace_coord(lon)
# Reduce src to two cells along y, one 2 times the size
# of the other.
lat = src.coord('latitude')
points = lat.points.copy()
bounds = [[-1, 0], [0, 2]]
lat = lat.copy(points=points, bounds=bounds)
src.replace_coord(lat)
# Populate with data
src.data = np.arange(src.data.size).reshape(src.shape) + 1.23
# dest is a single cell over the whole area.
dest = _subsampled_grid(self.simple_cube, 4, 3)
res = regrid_area_weighted(src, dest)
expected_val = (1. / 12. * src.data[0, 0] +
2. / 12. * np.mean(src.data[1:, 0]) +
3. / 12. * np.mean(src.data[0, 1:]) +
6. / 12. * np.mean(src.data[1:, 1:]))
self.assertAlmostEqual(expected_val, res.data)
def test_regrid_latlon_reduced_res(self):
src = self.simple_cube
# Reduce from (3, 4) to (2, 2).
dest = _subsampled_grid(src, 2, 2)
res = regrid_area_weighted(src, dest)
self.assertCMLApproxData(res, RESULT_DIR + ('latlonreduced.cml',))
def test_regrid_transposed(self):
src = self.simple_cube.copy()
dest = _subsampled_grid(src, 2, 3)
# Transpose src so that the coords are not y, x ordered.
src.transpose()
res = regrid_area_weighted(src, dest)
self.assertCMLApproxData(res, RESULT_DIR + ('trasposed.cml',))
# Using original and transposing the result should give the
# same answer.
src = self.simple_cube.copy()
res = regrid_area_weighted(src, dest)
res.transpose()
self.assertCMLApproxData(res, RESULT_DIR + ('trasposed.cml',))
def test_regrid_lon_to_half_res(self):
src = self.simple_cube
dest = _resampled_grid(src, 0.5, 1.0)
res = regrid_area_weighted(src, dest)
self.assertCMLApproxData(res, RESULT_DIR + ('lonhalved.cml',))
def test_regrid_to_non_int_frac(self):
# Create dest such that bounds do not line up
# with src: src.shape = (3, 4), dest.shape = (2, 3)
src = self.simple_cube
dest = _resampled_grid(src, 0.75, 0.67)
res = regrid_area_weighted(src, dest)
self.assertCMLApproxData(res, RESULT_DIR + ('lower.cml',))
def test_regrid_to_higher_res(self):
src = self.simple_cube
frac = 3.5
dest = _resampled_grid(src, frac, frac)
res = regrid_area_weighted(src, dest)
self.assertCMLApproxData(res, RESULT_DIR + ('higher.cml',))
@tests.skip_plot
def test_hybrid_height(self):
src = self.realistic_cube
dest = _resampled_grid(src, 0.7, 0.8)
res = regrid_area_weighted(src, dest)
self.assertCMLApproxData(res, RESULT_DIR + ('hybridheight.cml',))
# Consider a single slice to allow visual tests of altitudes.
src = src[1, 2]
res = res[1, 2]
qplt.pcolormesh(res)
self.check_graphic()
plt.contourf(res.coord('grid_longitude').points,
res.coord('grid_latitude').points,
res.coord('altitude').points)
self.check_graphic()
plt.contourf(res.coord('grid_longitude').points,
res.coord('grid_latitude').points,
res.coord('surface_altitude').points)
self.check_graphic()
def test_missing_data(self):
src = self.simple_cube.copy()
src.data = ma.masked_array(src.data)
src.data[1, 2] = ma.masked
dest = _resampled_grid(self.simple_cube, 2.3, 2.4)
res = regrid_area_weighted(src, dest)
mask = np.zeros((7, 9), bool)
mask[slice(2, 5), slice(4, 7)] = True
self.assertArrayEqual(res.data.mask, mask)
def test_no_x_overlap(self):
src = self.simple_cube
dest = _scaled_and_offset_grid(src, 1.0, 1.0,
(np.max(src.coord('longitude').bounds) -
np.min(src.coord('longitude').bounds)),
0.0)
res = regrid_area_weighted(src, dest)
self.assertTrue(res.data.mask.all())
def test_no_y_overlap(self):
src = self.simple_cube
dest = _scaled_and_offset_grid(src, 1.0, 1.0,
0.0,
(np.max(src.coord('latitude').bounds) -
np.min(src.coord('latitude').bounds)))
res = regrid_area_weighted(src, dest)
self.assertTrue(res.data.mask.all())
def test_scalar(self):
src = self.realistic_cube
i = 2
j = 3
dest = src[0, 0, i, j]
res = regrid_area_weighted(src, dest)
self.assertEqual(res, src[:, :, i, j])
def test_one_point(self):
src = self.simple_cube.copy()
for n in range(10):
i = random.randint(0, src.shape[0] - 1)
j = random.randint(0, src.shape[1] - 1)
indices = tuple([slice(i, i + 1), slice(j, j + 1)])
dest = src[indices]
res = regrid_area_weighted(src, dest)
self.assertTrue(res, src[indices])
def test_ten_by_ten_subset(self):
src = _resampled_grid(self.simple_cube, 20, 20)
for n in range(10):
i = random.randint(0, src.shape[0] - 10)
j = random.randint(0, src.shape[1] - 10)
indices = tuple([slice(i, i + 10), slice(j, j + 10)])
dest = src[indices]
res = regrid_area_weighted(src, dest)
self.assertTrue(res, src[indices])
@tests.skip_plot
def test_cross_section(self):
# Slice to get a cross section.
# Constant latitude
src = self.realistic_cube[0, :, 10, :]
lon = _resampled_coord(src.coord('grid_longitude'), 0.6)
shape = list(src.shape)
shape[1] = len(lon.points)
data = np.zeros(shape)
dest = iris.cube.Cube(data)
dest.add_dim_coord(lon, 1)
dest.add_aux_coord(src.coord('grid_latitude').copy(), None)
res = regrid_area_weighted(src, dest)
self.assertCMLApproxData(res, RESULT_DIR +
('const_lat_cross_section.cml',))
# Plot a single slice.
qplt.plot(res[0])
qplt.plot(src[0], 'r')
self.check_graphic()
# Constant longitude
src = self.realistic_cube[0, :, :, 10]
lat = _resampled_coord(src.coord('grid_latitude'), 0.6)
shape = list(src.shape)
shape[1] = len(lat.points)
data = np.zeros(shape)
dest = iris.cube.Cube(data)
dest.add_dim_coord(lat, 1)
dest.add_aux_coord(src.coord('grid_longitude').copy(), None)
res = regrid_area_weighted(src, dest)
self.assertCMLApproxData(res, RESULT_DIR +
('const_lon_cross_section.cml',))
# Plot a single slice.
qplt.plot(res[0])
qplt.plot(src[0], 'r')
self.check_graphic()
def test_scalar_source_cube(self):
src = self.simple_cube[1, 2]
# Extend dest beyond src grid
dest = src.copy()
dest.coord('latitude').bounds = np.array([[-0.5, 1.5]])
res = regrid_area_weighted(src, dest)
self.assertTrue(res.data.mask.all())
# Shrink dest to 1/4 of src
dest = src.copy()
dest.coord('latitude').bounds = np.array([[0.25, 0.75]])
dest.coord('longitude').bounds = np.array([[1.25, 1.75]])
res = regrid_area_weighted(src, dest)
self.assertEqual(res.data, src.data)
@tests.skip_data
@tests.skip_plot
def test_global_data_reduce_res(self):
src = iris.tests.stock.global_pp()
src.coord('latitude').guess_bounds()
src.coord('longitude').guess_bounds()
dest = _resampled_grid(src, 0.4, 0.3)
res = regrid_area_weighted(src, dest)
qplt.pcolormesh(res)
self.check_graphic()
@tests.skip_data
@tests.skip_plot
def test_global_data_increase_res(self):
src = iris.tests.stock.global_pp()
src.coord('latitude').guess_bounds()
src.coord('longitude').guess_bounds()
dest = _resampled_grid(src, 1.5, 1.5)
res = regrid_area_weighted(src, dest)
qplt.pcolormesh(res)
self.check_graphic()
@tests.skip_data
@tests.skip_plot
def test_global_data_same_res(self):
src = iris.tests.stock.global_pp()
src.coord('latitude').guess_bounds()
src.coord('longitude').guess_bounds()
res = regrid_area_weighted(src, src)
qplt.pcolormesh(res)
self.check_graphic()
@tests.skip_data
@tests.skip_plot
def test_global_data_subset(self):
src = iris.tests.stock.global_pp()
src.coord('latitude').guess_bounds()
src.coord('longitude').guess_bounds()
dest_lat = src.coord('latitude')[0:40]
dest_lon = iris.coords.DimCoord(np.linspace(-160, -70, 30),
standard_name='longitude',
units='degrees',
coord_system=dest_lat.coord_system)
# Note target grid (in -180 to 180) src in 0 to 360
dest_lon.guess_bounds()
data = np.zeros((dest_lat.shape[0], dest_lon.shape[0]))
dest = iris.cube.Cube(data)
dest.add_dim_coord(dest_lat, 0)
dest.add_dim_coord(dest_lon, 1)
res = regrid_area_weighted(src, dest)
qplt.pcolormesh(res)
plt.gca().coastlines()
self.check_graphic()
@tests.skip_data
@tests.skip_plot
def test_circular_subset(self):
src = iris.tests.stock.global_pp()
src.coord('latitude').guess_bounds()
src.coord('longitude').guess_bounds()
dest_lat = src.coord('latitude')[0:40]
dest_lon = iris.coords.DimCoord([-15., -10., -5., 0., 5., 10., 15.],
standard_name='longitude',
units='degrees',
coord_system=dest_lat.coord_system)
# Note target grid (in -180 to 180) src in 0 to 360
dest_lon.guess_bounds()
data = np.zeros((dest_lat.shape[0], dest_lon.shape[0]))
dest = iris.cube.Cube(data)
dest.add_dim_coord(dest_lat, 0)
dest.add_dim_coord(dest_lon, 1)
res = regrid_area_weighted(src, dest)
qplt.pcolormesh(res)
plt.gca().coastlines()
self.check_graphic()
@tests.skip_data
@tests.skip_plot
def test_non_circular_subset(self):
src = iris.tests.stock.global_pp()
src.coord('latitude').guess_bounds()
src.coord('longitude').guess_bounds()
src.coord('longitude').circular = False
dest_lat = src.coord('latitude')[0:40]
dest_lon = iris.coords.DimCoord([-15., -10., -5., 0., 5., 10., 15.],
standard_name='longitude',
units='degrees',
coord_system=dest_lat.coord_system)
# Note target grid (in -180 to 180) src in 0 to 360
dest_lon.guess_bounds()
data = np.zeros((dest_lat.shape[0], dest_lon.shape[0]))
dest = iris.cube.Cube(data)
dest.add_dim_coord(dest_lat, 0)
dest.add_dim_coord(dest_lon, 1)
res = regrid_area_weighted(src, dest)
qplt.pcolormesh(res)
plt.gca().coastlines()
self.check_graphic()
if __name__ == "__main__":
tests.main()
|
rschnapka/odoo
|
refs/heads/7.0
|
addons/l10n_gr/__init__.py
|
438
|
# -*- encoding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2008 Tiny SPRL (<http://tiny.be>). All Rights Reserved
# $Id$
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
#import sandwich_wizard
#import order_create
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
wanghaven/nupic
|
refs/heads/master
|
external/linux32/lib/python2.6/site-packages/matplotlib/numerix/linear_algebra/__init__.py
|
70
|
from matplotlib.numerix import which
if which[0] == "numarray":
from numarray.linear_algebra import *
elif which[0] == "numeric":
from LinearAlgebra import *
elif which[0] == "numpy":
try:
from numpy.oldnumeric.linear_algebra import *
except ImportError:
from numpy.linalg.old import *
else:
raise RuntimeError("invalid numerix selector")
|
Edu-Glez/Bank_sentiment_analysis
|
refs/heads/master
|
env/lib/python3.6/site-packages/nbformat/tests/base.py
|
6
|
"""
Contains base test class for nbformat
"""
# Copyright (c) IPython Development Team.
# Distributed under the terms of the Modified BSD License.
import os
import unittest
import io
class TestsBase(unittest.TestCase):
"""Base tests class."""
def fopen(self, f, mode=u'r',encoding='utf-8'):
return io.open(os.path.join(self._get_files_path(), f), mode, encoding=encoding)
def _get_files_path(self):
return os.path.dirname(__file__)
|
mattseymour/django
|
refs/heads/master
|
django/utils/datetime_safe.py
|
100
|
# Python's datetime strftime doesn't handle dates before 1900.
# These classes override date and datetime to support the formatting of a date
# through its full "proleptic Gregorian" date range.
#
# Based on code submitted to comp.lang.python by Andrew Dalke
#
# >>> datetime_safe.date(1850, 8, 2).strftime("%Y/%m/%d was a %A")
# '1850/08/02 was a Friday'
import re
import time as ttime
from datetime import (
date as real_date, datetime as real_datetime, time as real_time,
)
class date(real_date):
def strftime(self, fmt):
return strftime(self, fmt)
class datetime(real_datetime):
def strftime(self, fmt):
return strftime(self, fmt)
@classmethod
def combine(cls, date, time):
return cls(date.year, date.month, date.day,
time.hour, time.minute, time.second,
time.microsecond, time.tzinfo)
def date(self):
return date(self.year, self.month, self.day)
class time(real_time):
pass
def new_date(d):
"Generate a safe date from a datetime.date object."
return date(d.year, d.month, d.day)
def new_datetime(d):
"""
Generate a safe datetime from a datetime.date or datetime.datetime object.
"""
kw = [d.year, d.month, d.day]
if isinstance(d, real_datetime):
kw.extend([d.hour, d.minute, d.second, d.microsecond, d.tzinfo])
return datetime(*kw)
# This library does not support strftime's "%s" or "%y" format strings.
# Allowed if there's an even number of "%"s because they are escaped.
_illegal_formatting = re.compile(r"((^|[^%])(%%)*%[sy])")
def _findall(text, substr):
# Also finds overlaps
sites = []
i = 0
while 1:
j = text.find(substr, i)
if j == -1:
break
sites.append(j)
i = j + 1
return sites
def strftime(dt, fmt):
if dt.year >= 1900:
return super(type(dt), dt).strftime(fmt)
illegal_formatting = _illegal_formatting.search(fmt)
if illegal_formatting:
raise TypeError("strftime of dates before 1900 does not handle" + illegal_formatting.group(0))
year = dt.year
# For every non-leap year century, advance by
# 6 years to get into the 28-year repeat cycle
delta = 2000 - year
off = 6 * (delta // 100 + delta // 400)
year = year + off
# Move to around the year 2000
year = year + ((2000 - year) // 28) * 28
timetuple = dt.timetuple()
s1 = ttime.strftime(fmt, (year,) + timetuple[1:])
sites1 = _findall(s1, str(year))
s2 = ttime.strftime(fmt, (year + 28,) + timetuple[1:])
sites2 = _findall(s2, str(year + 28))
sites = []
for site in sites1:
if site in sites2:
sites.append(site)
s = s1
syear = "%04d" % (dt.year,)
for site in sites:
s = s[:site] + syear + s[site + 4:]
return s
|
bigdatauniversity/edx-platform
|
refs/heads/master
|
lms/djangoapps/courseware/tests/test_module_render.py
|
12
|
# -*- coding: utf-8 -*-
"""
Test for lms courseware app, module render unit
"""
import ddt
import itertools
import json
from nose.plugins.attrib import attr
from functools import partial
from bson import ObjectId
from django.http import Http404, HttpResponse
from django.core.urlresolvers import reverse
from django.conf import settings
from django.test.client import RequestFactory
from django.test.utils import override_settings
from django.contrib.auth.models import AnonymousUser
from mock import MagicMock, patch, Mock
from opaque_keys.edx.keys import UsageKey, CourseKey
from opaque_keys.edx.locations import SlashSeparatedCourseKey
from pyquery import PyQuery
from courseware.module_render import hash_resource
from xblock.field_data import FieldData
from xblock.runtime import Runtime
from xblock.fields import ScopeIds
from xblock.core import XBlock
from xblock.fragment import Fragment
from capa.tests.response_xml_factory import OptionResponseXMLFactory
from course_modes.models import CourseMode
from courseware import module_render as render
from courseware.courses import get_course_with_access, get_course_info_section
from courseware.field_overrides import OverrideFieldData
from courseware.model_data import FieldDataCache
from courseware.module_render import hash_resource, get_module_for_descriptor
from courseware.models import StudentModule
from courseware.tests.factories import StudentModuleFactory, UserFactory, GlobalStaffFactory
from courseware.tests.tests import LoginEnrollmentTestCase
from courseware.tests.test_submitting_problems import TestSubmittingProblems
from lms.djangoapps.lms_xblock.runtime import quote_slashes
from lms.djangoapps.lms_xblock.field_data import LmsFieldData
from openedx.core.lib.courses import course_image_url
from student.models import anonymous_id_for_user
from xmodule.modulestore.tests.django_utils import (
TEST_DATA_MIXED_TOY_MODULESTORE,
TEST_DATA_XML_MODULESTORE,
)
from xmodule.lti_module import LTIDescriptor
from xmodule.modulestore import ModuleStoreEnum
from xmodule.modulestore.django import modulestore
from xmodule.modulestore.tests.django_utils import ModuleStoreTestCase
from xmodule.modulestore.tests.factories import ItemFactory, CourseFactory, ToyCourseFactory, check_mongo_calls
from xmodule.x_module import XModuleDescriptor, XModule, STUDENT_VIEW, CombinedSystem
from openedx.core.djangoapps.credit.models import CreditCourse
from openedx.core.djangoapps.credit.api import (
set_credit_requirements,
set_credit_requirement_status
)
from edx_proctoring.api import (
create_exam,
create_exam_attempt,
update_attempt_status
)
from edx_proctoring.runtime import set_runtime_service
from edx_proctoring.tests.test_services import MockCreditService
TEST_DATA_DIR = settings.COMMON_TEST_DATA_ROOT
@XBlock.needs("field-data")
@XBlock.needs("i18n")
@XBlock.needs("fs")
@XBlock.needs("user")
class PureXBlock(XBlock):
"""
Pure XBlock to use in tests.
"""
pass
class EmptyXModule(XModule): # pylint: disable=abstract-method
"""
Empty XModule for testing with no dependencies.
"""
pass
class EmptyXModuleDescriptor(XModuleDescriptor): # pylint: disable=abstract-method
"""
Empty XModule for testing with no dependencies.
"""
module_class = EmptyXModule
class GradedStatelessXBlock(XBlock):
"""
This XBlock exists to test grade storage for blocks that don't store
student state in a scoped field.
"""
@XBlock.json_handler
def set_score(self, json_data, suffix): # pylint: disable=unused-argument
"""
Set the score for this testing XBlock.
"""
self.runtime.publish(
self,
'grade',
{
'value': json_data['grade'],
'max_value': 1
}
)
@attr('shard_1')
@ddt.ddt
class ModuleRenderTestCase(ModuleStoreTestCase, LoginEnrollmentTestCase):
"""
Tests of courseware.module_render
"""
# TODO: this test relies on the specific setup of the toy course.
# It should be rewritten to build the course it needs and then test that.
def setUp(self):
"""
Set up the course and user context
"""
super(ModuleRenderTestCase, self).setUp()
self.course_key = ToyCourseFactory.create().id
self.toy_course = modulestore().get_course(self.course_key)
self.mock_user = UserFactory()
self.mock_user.id = 1
self.request_factory = RequestFactory()
# Construct a mock module for the modulestore to return
self.mock_module = MagicMock()
self.mock_module.id = 1
self.dispatch = 'score_update'
# Construct a 'standard' xqueue_callback url
self.callback_url = reverse(
'xqueue_callback',
kwargs=dict(
course_id=self.course_key.to_deprecated_string(),
userid=str(self.mock_user.id),
mod_id=self.mock_module.id,
dispatch=self.dispatch
)
)
def test_get_module(self):
self.assertEqual(
None,
render.get_module('dummyuser', None, 'invalid location', None)
)
def test_module_render_with_jump_to_id(self):
"""
This test validates that the /jump_to_id/<id> shorthand for intracourse linking works assertIn
expected. Note there's a HTML element in the 'toy' course with the url_name 'toyjumpto' which
defines this linkage
"""
mock_request = MagicMock()
mock_request.user = self.mock_user
course = get_course_with_access(self.mock_user, 'load', self.course_key)
field_data_cache = FieldDataCache.cache_for_descriptor_descendents(
self.course_key, self.mock_user, course, depth=2)
module = render.get_module(
self.mock_user,
mock_request,
self.course_key.make_usage_key('html', 'toyjumpto'),
field_data_cache,
)
# get the rendered HTML output which should have the rewritten link
html = module.render(STUDENT_VIEW).content
# See if the url got rewritten to the target link
# note if the URL mapping changes then this assertion will break
self.assertIn('/courses/' + self.course_key.to_deprecated_string() + '/jump_to_id/vertical_test', html)
def test_xqueue_callback_success(self):
"""
Test for happy-path xqueue_callback
"""
fake_key = 'fake key'
xqueue_header = json.dumps({'lms_key': fake_key})
data = {
'xqueue_header': xqueue_header,
'xqueue_body': 'hello world',
}
# Patch getmodule to return our mock module
with patch('courseware.module_render.load_single_xblock', return_value=self.mock_module):
# call xqueue_callback with our mocked information
request = self.request_factory.post(self.callback_url, data)
render.xqueue_callback(
request,
unicode(self.course_key),
self.mock_user.id,
self.mock_module.id,
self.dispatch
)
# Verify that handle ajax is called with the correct data
request.POST['queuekey'] = fake_key
self.mock_module.handle_ajax.assert_called_once_with(self.dispatch, request.POST)
def test_xqueue_callback_missing_header_info(self):
data = {
'xqueue_header': '{}',
'xqueue_body': 'hello world',
}
with patch('courseware.module_render.load_single_xblock', return_value=self.mock_module):
# Test with missing xqueue data
with self.assertRaises(Http404):
request = self.request_factory.post(self.callback_url, {})
render.xqueue_callback(
request,
unicode(self.course_key),
self.mock_user.id,
self.mock_module.id,
self.dispatch
)
# Test with missing xqueue_header
with self.assertRaises(Http404):
request = self.request_factory.post(self.callback_url, data)
render.xqueue_callback(
request,
unicode(self.course_key),
self.mock_user.id,
self.mock_module.id,
self.dispatch
)
def test_get_score_bucket(self):
self.assertEquals(render.get_score_bucket(0, 10), 'incorrect')
self.assertEquals(render.get_score_bucket(1, 10), 'partial')
self.assertEquals(render.get_score_bucket(10, 10), 'correct')
# get_score_bucket calls error cases 'incorrect'
self.assertEquals(render.get_score_bucket(11, 10), 'incorrect')
self.assertEquals(render.get_score_bucket(-1, 10), 'incorrect')
def test_anonymous_handle_xblock_callback(self):
dispatch_url = reverse(
'xblock_handler',
args=[
self.course_key.to_deprecated_string(),
quote_slashes(self.course_key.make_usage_key('videosequence', 'Toy_Videos').to_deprecated_string()),
'xmodule_handler',
'goto_position'
]
)
response = self.client.post(dispatch_url, {'position': 2})
self.assertEquals(403, response.status_code)
self.assertEquals('Unauthenticated', response.content)
def test_missing_position_handler(self):
"""
Test that sending POST request without or invalid position argument don't raise server error
"""
self.client.login(username=self.mock_user.username, password="test")
dispatch_url = reverse(
'xblock_handler',
args=[
self.course_key.to_deprecated_string(),
quote_slashes(self.course_key.make_usage_key('videosequence', 'Toy_Videos').to_deprecated_string()),
'xmodule_handler',
'goto_position'
]
)
response = self.client.post(dispatch_url)
self.assertEqual(200, response.status_code)
self.assertEqual(json.loads(response.content), {'success': True})
response = self.client.post(dispatch_url, {'position': ''})
self.assertEqual(200, response.status_code)
self.assertEqual(json.loads(response.content), {'success': True})
response = self.client.post(dispatch_url, {'position': '-1'})
self.assertEqual(200, response.status_code)
self.assertEqual(json.loads(response.content), {'success': True})
response = self.client.post(dispatch_url, {'position': "string"})
self.assertEqual(200, response.status_code)
self.assertEqual(json.loads(response.content), {'success': True})
response = self.client.post(dispatch_url, {'position': u"Φυσικά"})
self.assertEqual(200, response.status_code)
self.assertEqual(json.loads(response.content), {'success': True})
response = self.client.post(dispatch_url, {'position': None})
self.assertEqual(200, response.status_code)
self.assertEqual(json.loads(response.content), {'success': True})
@ddt.data('pure', 'vertical')
@XBlock.register_temp_plugin(PureXBlock, identifier='pure')
def test_rebinding_same_user(self, block_type):
request = self.request_factory.get('')
request.user = self.mock_user
course = CourseFactory()
descriptor = ItemFactory(category=block_type, parent=course)
field_data_cache = FieldDataCache([self.toy_course, descriptor], self.toy_course.id, self.mock_user)
# This is verifying that caching doesn't cause an error during get_module_for_descriptor, which
# is why it calls the method twice identically.
render.get_module_for_descriptor(
self.mock_user,
request,
descriptor,
field_data_cache,
self.toy_course.id,
course=self.toy_course
)
render.get_module_for_descriptor(
self.mock_user,
request,
descriptor,
field_data_cache,
self.toy_course.id,
course=self.toy_course
)
@override_settings(FIELD_OVERRIDE_PROVIDERS=(
'ccx.overrides.CustomCoursesForEdxOverrideProvider',
))
def test_rebind_different_users_ccx(self):
"""
This tests the rebinding a descriptor to a student does not result
in overly nested _field_data when CCX is enabled.
"""
request = self.request_factory.get('')
request.user = self.mock_user
course = CourseFactory.create(enable_ccx=True)
descriptor = ItemFactory(category='html', parent=course)
field_data_cache = FieldDataCache(
[course, descriptor], course.id, self.mock_user
)
# grab what _field_data was originally set to
original_field_data = descriptor._field_data # pylint: disable=protected-access, no-member
render.get_module_for_descriptor(
self.mock_user, request, descriptor, field_data_cache, course.id, course=course
)
# check that _unwrapped_field_data is the same as the original
# _field_data, but now _field_data as been reset.
# pylint: disable=protected-access, no-member
self.assertIs(descriptor._unwrapped_field_data, original_field_data)
self.assertIsNot(descriptor._unwrapped_field_data, descriptor._field_data)
# now bind this module to a few other students
for user in [UserFactory(), UserFactory(), UserFactory()]:
render.get_module_for_descriptor(
user,
request,
descriptor,
field_data_cache,
course.id,
course=course
)
# _field_data should now be wrapped by LmsFieldData
# pylint: disable=protected-access, no-member
self.assertIsInstance(descriptor._field_data, LmsFieldData)
# the LmsFieldData should now wrap OverrideFieldData
self.assertIsInstance(
# pylint: disable=protected-access, no-member
descriptor._field_data._authored_data._source,
OverrideFieldData
)
# the OverrideFieldData should point to the original unwrapped field_data
self.assertIs(
# pylint: disable=protected-access, no-member
descriptor._field_data._authored_data._source.fallback,
descriptor._unwrapped_field_data
)
def test_hash_resource(self):
"""
Ensure that the resource hasher works and does not fail on unicode,
decoded or otherwise.
"""
resources = ['ASCII text', u'❄ I am a special snowflake.', "❄ So am I, but I didn't tell you."]
self.assertEqual(hash_resource(resources), 'a76e27c8e80ca3efd7ce743093aa59e0')
@attr('shard_1')
class TestHandleXBlockCallback(ModuleStoreTestCase, LoginEnrollmentTestCase):
"""
Test the handle_xblock_callback function
"""
def setUp(self):
super(TestHandleXBlockCallback, self).setUp()
self.course_key = ToyCourseFactory.create().id
self.location = self.course_key.make_usage_key('chapter', 'Overview')
self.toy_course = modulestore().get_course(self.course_key)
self.mock_user = UserFactory.create()
self.request_factory = RequestFactory()
# Construct a mock module for the modulestore to return
self.mock_module = MagicMock()
self.mock_module.id = 1
self.dispatch = 'score_update'
# Construct a 'standard' xqueue_callback url
self.callback_url = reverse(
'xqueue_callback', kwargs={
'course_id': self.course_key.to_deprecated_string(),
'userid': str(self.mock_user.id),
'mod_id': self.mock_module.id,
'dispatch': self.dispatch
}
)
def _mock_file(self, name='file', size=10):
"""Create a mock file object for testing uploads"""
mock_file = MagicMock(
size=size,
read=lambda: 'x' * size
)
# We can't use `name` as a kwarg to Mock to set the name attribute
# because mock uses `name` to name the mock itself
mock_file.name = name
return mock_file
def test_invalid_location(self):
request = self.request_factory.post('dummy_url', data={'position': 1})
request.user = self.mock_user
with self.assertRaises(Http404):
render.handle_xblock_callback(
request,
self.course_key.to_deprecated_string(),
'invalid Location',
'dummy_handler'
'dummy_dispatch'
)
def test_too_many_files(self):
request = self.request_factory.post(
'dummy_url',
data={'file_id': (self._mock_file(), ) * (settings.MAX_FILEUPLOADS_PER_INPUT + 1)}
)
request.user = self.mock_user
self.assertEquals(
render.handle_xblock_callback(
request,
self.course_key.to_deprecated_string(),
quote_slashes(self.location.to_deprecated_string()),
'dummy_handler'
).content,
json.dumps({
'success': 'Submission aborted! Maximum %d files may be submitted at once' %
settings.MAX_FILEUPLOADS_PER_INPUT
}, indent=2)
)
def test_too_large_file(self):
inputfile = self._mock_file(size=1 + settings.STUDENT_FILEUPLOAD_MAX_SIZE)
request = self.request_factory.post(
'dummy_url',
data={'file_id': inputfile}
)
request.user = self.mock_user
self.assertEquals(
render.handle_xblock_callback(
request,
self.course_key.to_deprecated_string(),
quote_slashes(self.location.to_deprecated_string()),
'dummy_handler'
).content,
json.dumps({
'success': 'Submission aborted! Your file "%s" is too large (max size: %d MB)' %
(inputfile.name, settings.STUDENT_FILEUPLOAD_MAX_SIZE / (1000 ** 2))
}, indent=2)
)
def test_xmodule_dispatch(self):
request = self.request_factory.post('dummy_url', data={'position': 1})
request.user = self.mock_user
response = render.handle_xblock_callback(
request,
self.course_key.to_deprecated_string(),
quote_slashes(self.location.to_deprecated_string()),
'xmodule_handler',
'goto_position',
)
self.assertIsInstance(response, HttpResponse)
def test_bad_course_id(self):
request = self.request_factory.post('dummy_url')
request.user = self.mock_user
with self.assertRaises(Http404):
render.handle_xblock_callback(
request,
'bad_course_id',
quote_slashes(self.location.to_deprecated_string()),
'xmodule_handler',
'goto_position',
)
def test_bad_location(self):
request = self.request_factory.post('dummy_url')
request.user = self.mock_user
with self.assertRaises(Http404):
render.handle_xblock_callback(
request,
self.course_key.to_deprecated_string(),
quote_slashes(self.course_key.make_usage_key('chapter', 'bad_location').to_deprecated_string()),
'xmodule_handler',
'goto_position',
)
def test_bad_xmodule_dispatch(self):
request = self.request_factory.post('dummy_url')
request.user = self.mock_user
with self.assertRaises(Http404):
render.handle_xblock_callback(
request,
self.course_key.to_deprecated_string(),
quote_slashes(self.location.to_deprecated_string()),
'xmodule_handler',
'bad_dispatch',
)
def test_missing_handler(self):
request = self.request_factory.post('dummy_url')
request.user = self.mock_user
with self.assertRaises(Http404):
render.handle_xblock_callback(
request,
self.course_key.to_deprecated_string(),
quote_slashes(self.location.to_deprecated_string()),
'bad_handler',
'bad_dispatch',
)
@XBlock.register_temp_plugin(GradedStatelessXBlock, identifier='stateless_scorer')
def test_score_without_student_state(self):
course = CourseFactory.create()
block = ItemFactory.create(category='stateless_scorer', parent=course)
request = self.request_factory.post(
'dummy_url',
data=json.dumps({"grade": 0.75}),
content_type='application/json'
)
request.user = self.mock_user
response = render.handle_xblock_callback(
request,
unicode(course.id),
quote_slashes(unicode(block.scope_ids.usage_id)),
'set_score',
'',
)
self.assertEquals(response.status_code, 200)
student_module = StudentModule.objects.get(
student=self.mock_user,
module_state_key=block.scope_ids.usage_id,
)
self.assertEquals(student_module.grade, 0.75)
self.assertEquals(student_module.max_grade, 1)
@patch.dict('django.conf.settings.FEATURES', {'ENABLE_XBLOCK_VIEW_ENDPOINT': True})
def test_xblock_view_handler(self):
args = [
'edX/toy/2012_Fall',
quote_slashes('i4x://edX/toy/videosequence/Toy_Videos'),
'student_view'
]
xblock_view_url = reverse(
'xblock_view',
args=args
)
request = self.request_factory.get(xblock_view_url)
request.user = self.mock_user
response = render.xblock_view(request, *args)
self.assertEquals(200, response.status_code)
expected = ['csrf_token', 'html', 'resources']
content = json.loads(response.content)
for section in expected:
self.assertIn(section, content)
doc = PyQuery(content['html'])
self.assertEquals(len(doc('div.xblock-student_view-videosequence')), 1)
@attr('shard_1')
@ddt.ddt
class TestTOC(ModuleStoreTestCase):
"""Check the Table of Contents for a course"""
def setup_request_and_course(self, num_finds, num_sends):
"""
Sets up the toy course in the modulestore and the request object.
"""
self.course_key = ToyCourseFactory.create().id # pylint: disable=attribute-defined-outside-init
self.chapter = 'Overview'
chapter_url = '%s/%s/%s' % ('/courses', self.course_key, self.chapter)
factory = RequestFactory()
self.request = factory.get(chapter_url)
self.request.user = UserFactory()
self.modulestore = self.store._get_modulestore_for_courselike(self.course_key) # pylint: disable=protected-access, attribute-defined-outside-init
with self.modulestore.bulk_operations(self.course_key):
with check_mongo_calls(num_finds, num_sends):
self.toy_course = self.store.get_course(self.course_key, depth=2) # pylint: disable=attribute-defined-outside-init
self.field_data_cache = FieldDataCache.cache_for_descriptor_descendents(
self.course_key, self.request.user, self.toy_course, depth=2
)
# Mongo makes 3 queries to load the course to depth 2:
# - 1 for the course
# - 1 for its children
# - 1 for its grandchildren
# Split makes 6 queries to load the course to depth 2:
# - load the structure
# - load 5 definitions
# Split makes 5 queries to render the toc:
# - it loads the active version at the start of the bulk operation
# - it loads 4 definitions, because it instantiates 4 VideoModules
# each of which access a Scope.content field in __init__
@ddt.data((ModuleStoreEnum.Type.mongo, 3, 0, 0), (ModuleStoreEnum.Type.split, 6, 0, 5))
@ddt.unpack
def test_toc_toy_from_chapter(self, default_ms, setup_finds, setup_sends, toc_finds):
with self.store.default_store(default_ms):
self.setup_request_and_course(setup_finds, setup_sends)
expected = ([{'active': True, 'sections':
[{'url_name': 'Toy_Videos', 'display_name': u'Toy Videos', 'graded': True,
'format': u'Lecture Sequence', 'due': None, 'active': False},
{'url_name': 'Welcome', 'display_name': u'Welcome', 'graded': True,
'format': '', 'due': None, 'active': False},
{'url_name': 'video_123456789012', 'display_name': 'Test Video', 'graded': True,
'format': '', 'due': None, 'active': False},
{'url_name': 'video_4f66f493ac8f', 'display_name': 'Video', 'graded': True,
'format': '', 'due': None, 'active': False}],
'url_name': 'Overview', 'display_name': u'Overview', 'display_id': u'overview'},
{'active': False, 'sections':
[{'url_name': 'toyvideo', 'display_name': 'toyvideo', 'graded': True,
'format': '', 'due': None, 'active': False}],
'url_name': 'secret:magic', 'display_name': 'secret:magic', 'display_id': 'secretmagic'}])
course = self.store.get_course(self.toy_course.id, depth=2)
with check_mongo_calls(toc_finds):
actual = render.toc_for_course(
self.request.user, self.request, course, self.chapter, None, self.field_data_cache
)
for toc_section in expected:
self.assertIn(toc_section, actual)
# Mongo makes 3 queries to load the course to depth 2:
# - 1 for the course
# - 1 for its children
# - 1 for its grandchildren
# Split makes 6 queries to load the course to depth 2:
# - load the structure
# - load 5 definitions
# Split makes 5 queries to render the toc:
# - it loads the active version at the start of the bulk operation
# - it loads 4 definitions, because it instantiates 4 VideoModules
# each of which access a Scope.content field in __init__
@ddt.data((ModuleStoreEnum.Type.mongo, 3, 0, 0), (ModuleStoreEnum.Type.split, 6, 0, 5))
@ddt.unpack
def test_toc_toy_from_section(self, default_ms, setup_finds, setup_sends, toc_finds):
with self.store.default_store(default_ms):
self.setup_request_and_course(setup_finds, setup_sends)
section = 'Welcome'
expected = ([{'active': True, 'sections':
[{'url_name': 'Toy_Videos', 'display_name': u'Toy Videos', 'graded': True,
'format': u'Lecture Sequence', 'due': None, 'active': False},
{'url_name': 'Welcome', 'display_name': u'Welcome', 'graded': True,
'format': '', 'due': None, 'active': True},
{'url_name': 'video_123456789012', 'display_name': 'Test Video', 'graded': True,
'format': '', 'due': None, 'active': False},
{'url_name': 'video_4f66f493ac8f', 'display_name': 'Video', 'graded': True,
'format': '', 'due': None, 'active': False}],
'url_name': 'Overview', 'display_name': u'Overview', 'display_id': u'overview'},
{'active': False, 'sections':
[{'url_name': 'toyvideo', 'display_name': 'toyvideo', 'graded': True,
'format': '', 'due': None, 'active': False}],
'url_name': 'secret:magic', 'display_name': 'secret:magic', 'display_id': 'secretmagic'}])
with check_mongo_calls(toc_finds):
actual = render.toc_for_course(
self.request.user, self.request, self.toy_course, self.chapter, section, self.field_data_cache
)
for toc_section in expected:
self.assertIn(toc_section, actual)
@attr('shard_1')
@ddt.ddt
@patch.dict('django.conf.settings.FEATURES', {'ENABLE_SPECIAL_EXAMS': True})
class TestProctoringRendering(ModuleStoreTestCase):
"""Check the Table of Contents for a course"""
def setUp(self):
"""
Set up the initial mongo datastores
"""
super(TestProctoringRendering, self).setUp()
self.course_key = ToyCourseFactory.create().id
self.chapter = 'Overview'
chapter_url = '%s/%s/%s' % ('/courses', self.course_key, self.chapter)
factory = RequestFactory()
self.request = factory.get(chapter_url)
self.request.user = UserFactory()
self.modulestore = self.store._get_modulestore_for_courselike(self.course_key) # pylint: disable=protected-access
with self.modulestore.bulk_operations(self.course_key):
self.toy_course = self.store.get_course(self.course_key, depth=2)
self.field_data_cache = FieldDataCache.cache_for_descriptor_descendents(
self.course_key, self.request.user, self.toy_course, depth=2
)
@ddt.data(
(CourseMode.DEFAULT_MODE_SLUG, False, None, None),
(
CourseMode.DEFAULT_MODE_SLUG,
True,
'eligible',
{
'status': 'eligible',
'short_description': 'Ungraded Practice Exam',
'suggested_icon': '',
'in_completed_state': False
}
),
(
CourseMode.DEFAULT_MODE_SLUG,
True,
'submitted',
{
'status': 'submitted',
'short_description': 'Practice Exam Completed',
'suggested_icon': 'fa-check',
'in_completed_state': True
}
),
(
CourseMode.DEFAULT_MODE_SLUG,
True,
'error',
{
'status': 'error',
'short_description': 'Practice Exam Failed',
'suggested_icon': 'fa-exclamation-triangle',
'in_completed_state': True
}
),
(
CourseMode.VERIFIED,
False,
None,
{
'status': 'eligible',
'short_description': 'Proctored Option Available',
'suggested_icon': 'fa-pencil-square-o',
'in_completed_state': False
}
),
(
CourseMode.VERIFIED,
False,
'declined',
{
'status': 'declined',
'short_description': 'Taking As Open Exam',
'suggested_icon': 'fa-pencil-square-o',
'in_completed_state': False
}
),
(
CourseMode.VERIFIED,
False,
'submitted',
{
'status': 'submitted',
'short_description': 'Pending Session Review',
'suggested_icon': 'fa-spinner fa-spin',
'in_completed_state': True
}
),
(
CourseMode.VERIFIED,
False,
'verified',
{
'status': 'verified',
'short_description': 'Passed Proctoring',
'suggested_icon': 'fa-check',
'in_completed_state': True
}
),
(
CourseMode.VERIFIED,
False,
'rejected',
{
'status': 'rejected',
'short_description': 'Failed Proctoring',
'suggested_icon': 'fa-exclamation-triangle',
'in_completed_state': True
}
),
(
CourseMode.VERIFIED,
False,
'error',
{
'status': 'error',
'short_description': 'Failed Proctoring',
'suggested_icon': 'fa-exclamation-triangle',
'in_completed_state': True
}
),
)
@ddt.unpack
def test_proctored_exam_toc(self, enrollment_mode, is_practice_exam,
attempt_status, expected):
"""
Generate TOC for a course with a single chapter/sequence which contains proctored exam
"""
self._setup_test_data(enrollment_mode, is_practice_exam, attempt_status)
actual = render.toc_for_course(
self.request.user,
self.request,
self.toy_course,
self.chapter,
'Toy_Videos',
self.field_data_cache
)
section_actual = self._find_section(actual, 'Overview', 'Toy_Videos')
if expected:
self.assertIn(expected, [section_actual['proctoring']])
else:
# we expect there not to be a 'proctoring' key in the dict
self.assertNotIn('proctoring', section_actual)
@ddt.data(
(
CourseMode.DEFAULT_MODE_SLUG,
True,
None,
'Try a proctored exam',
True
),
(
CourseMode.DEFAULT_MODE_SLUG,
True,
'submitted',
'You have submitted this practice proctored exam',
False
),
(
CourseMode.DEFAULT_MODE_SLUG,
True,
'error',
'There was a problem with your practice proctoring session',
True
),
(
CourseMode.VERIFIED,
False,
None,
'This exam is proctored',
False
),
(
CourseMode.VERIFIED,
False,
'submitted',
'You have submitted this proctored exam for review',
True
),
(
CourseMode.VERIFIED,
False,
'verified',
'Your proctoring session was reviewed and passed all requirements',
False
),
(
CourseMode.VERIFIED,
False,
'rejected',
'Your proctoring session was reviewed and did not pass requirements',
True
),
(
CourseMode.VERIFIED,
False,
'error',
'There was a problem with your proctoring session',
False
),
)
@ddt.unpack
def test_render_proctored_exam(self, enrollment_mode, is_practice_exam,
attempt_status, expected, with_credit_context):
"""
Verifies gated content from the student view rendering of a sequence
this is labeled as a proctored exam
"""
usage_key = self._setup_test_data(enrollment_mode, is_practice_exam, attempt_status)
# initialize some credit requirements, if so then specify
if with_credit_context:
credit_course = CreditCourse(course_key=self.course_key, enabled=True)
credit_course.save()
set_credit_requirements(
self.course_key,
[
{
'namespace': 'reverification',
'name': 'reverification-1',
'display_name': 'ICRV1',
'criteria': {},
},
{
'namespace': 'proctored-exam',
'name': 'Exam1',
'display_name': 'A Proctored Exam',
'criteria': {}
}
]
)
set_credit_requirement_status(
self.request.user.username,
self.course_key,
'reverification',
'ICRV1'
)
module = render.get_module(
self.request.user,
self.request,
usage_key,
self.field_data_cache,
wrap_xmodule_display=True,
)
content = module.render(STUDENT_VIEW).content
self.assertIn(expected, content)
def _setup_test_data(self, enrollment_mode, is_practice_exam, attempt_status):
"""
Helper method to consolidate some courseware/proctoring/credit
test harness data
"""
usage_key = self.course_key.make_usage_key('videosequence', 'Toy_Videos')
sequence = self.modulestore.get_item(usage_key)
sequence.is_time_limited = True
sequence.is_proctored_exam = True
sequence.is_practice_exam = is_practice_exam
self.modulestore.update_item(sequence, self.user.id)
self.toy_course = self.modulestore.get_course(self.course_key)
# refresh cache after update
self.field_data_cache = FieldDataCache.cache_for_descriptor_descendents(
self.course_key, self.request.user, self.toy_course, depth=2
)
set_runtime_service(
'credit',
MockCreditService(enrollment_mode=enrollment_mode)
)
exam_id = create_exam(
course_id=unicode(self.course_key),
content_id=unicode(sequence.location),
exam_name='foo',
time_limit_mins=10,
is_proctored=True,
is_practice_exam=is_practice_exam
)
if attempt_status:
create_exam_attempt(exam_id, self.request.user.id, taking_as_proctored=True)
update_attempt_status(exam_id, self.request.user.id, attempt_status)
return usage_key
def _find_url_name(self, toc, url_name):
"""
Helper to return the dict TOC section associated with a Chapter of url_name
"""
for entry in toc:
if entry['url_name'] == url_name:
return entry
return None
def _find_section(self, toc, chapter_url_name, section_url_name):
"""
Helper to return the dict TOC section associated with a section of url_name
"""
chapter = self._find_url_name(toc, chapter_url_name)
if chapter:
return self._find_url_name(chapter['sections'], section_url_name)
return None
@attr('shard_1')
@ddt.ddt
class TestHtmlModifiers(ModuleStoreTestCase):
"""
Tests to verify that standard modifications to the output of XModule/XBlock
student_view are taking place
"""
def setUp(self):
super(TestHtmlModifiers, self).setUp()
self.user = UserFactory.create()
self.request = RequestFactory().get('/')
self.request.user = self.user
self.request.session = {}
self.course = CourseFactory.create()
self.content_string = '<p>This is the content<p>'
self.rewrite_link = '<a href="/static/foo/content">Test rewrite</a>'
self.rewrite_bad_link = '<img src="/static//file.jpg" />'
self.course_link = '<a href="/course/bar/content">Test course rewrite</a>'
self.descriptor = ItemFactory.create(
category='html',
data=self.content_string + self.rewrite_link + self.rewrite_bad_link + self.course_link
)
self.location = self.descriptor.location
self.field_data_cache = FieldDataCache.cache_for_descriptor_descendents(
self.course.id,
self.user,
self.descriptor
)
def test_xmodule_display_wrapper_enabled(self):
module = render.get_module(
self.user,
self.request,
self.location,
self.field_data_cache,
wrap_xmodule_display=True,
)
result_fragment = module.render(STUDENT_VIEW)
self.assertEquals(len(PyQuery(result_fragment.content)('div.xblock.xblock-student_view.xmodule_HtmlModule')), 1)
def test_xmodule_display_wrapper_disabled(self):
module = render.get_module(
self.user,
self.request,
self.location,
self.field_data_cache,
wrap_xmodule_display=False,
)
result_fragment = module.render(STUDENT_VIEW)
self.assertNotIn('div class="xblock xblock-student_view xmodule_display xmodule_HtmlModule"', result_fragment.content)
def test_static_link_rewrite(self):
module = render.get_module(
self.user,
self.request,
self.location,
self.field_data_cache,
)
result_fragment = module.render(STUDENT_VIEW)
self.assertIn(
'/c4x/{org}/{course}/asset/foo_content'.format(
org=self.course.location.org,
course=self.course.location.course,
),
result_fragment.content
)
def test_static_badlink_rewrite(self):
module = render.get_module(
self.user,
self.request,
self.location,
self.field_data_cache,
)
result_fragment = module.render(STUDENT_VIEW)
self.assertIn(
'/c4x/{org}/{course}/asset/_file.jpg'.format(
org=self.course.location.org,
course=self.course.location.course,
),
result_fragment.content
)
def test_static_asset_path_use(self):
'''
when a course is loaded with do_import_static=False (see xml_importer.py), then
static_asset_path is set as an lms kv in course. That should make static paths
not be mangled (ie not changed to c4x://).
'''
module = render.get_module(
self.user,
self.request,
self.location,
self.field_data_cache,
static_asset_path="toy_course_dir",
)
result_fragment = module.render(STUDENT_VIEW)
self.assertIn('href="/static/toy_course_dir', result_fragment.content)
def test_course_image(self):
url = course_image_url(self.course)
self.assertTrue(url.startswith('/c4x/'))
self.course.static_asset_path = "toy_course_dir"
url = course_image_url(self.course)
self.assertTrue(url.startswith('/static/toy_course_dir/'))
self.course.static_asset_path = ""
@override_settings(DEFAULT_COURSE_ABOUT_IMAGE_URL='test.png')
@override_settings(STATIC_URL='static/')
@ddt.data(ModuleStoreEnum.Type.mongo, ModuleStoreEnum.Type.split)
def test_course_image_for_split_course(self, store):
"""
for split courses if course_image is empty then course_image_url will be
the default image url defined in settings
"""
self.course = CourseFactory.create(default_store=store)
self.course.course_image = ''
url = course_image_url(self.course)
self.assertEqual('static/test.png', url)
def test_get_course_info_section(self):
self.course.static_asset_path = "toy_course_dir"
get_course_info_section(self.request, self.course, "handouts")
# NOTE: check handouts output...right now test course seems to have no such content
# at least this makes sure get_course_info_section returns without exception
def test_course_link_rewrite(self):
module = render.get_module(
self.user,
self.request,
self.location,
self.field_data_cache,
)
result_fragment = module.render(STUDENT_VIEW)
self.assertIn(
'/courses/{course_id}/bar/content'.format(
course_id=self.course.id.to_deprecated_string()
),
result_fragment.content
)
class XBlockWithJsonInitData(XBlock):
"""
Pure XBlock to use in tests, with JSON init data.
"""
the_json_data = None
def student_view(self, context=None): # pylint: disable=unused-argument
"""
A simple view that returns just enough to test.
"""
frag = Fragment(u"Hello there!")
frag.add_javascript(u'alert("Hi!");')
frag.initialize_js('ThumbsBlock', self.the_json_data)
return frag
@attr('shard_1')
@ddt.ddt
class JsonInitDataTest(ModuleStoreTestCase):
"""Tests for JSON data injected into the JS init function."""
@ddt.data(
({'a': 17}, '''{"a": 17}'''),
({'xss': '</script>alert("XSS")'}, r'''{"xss": "<\/script>alert(\"XSS\")"}'''),
)
@ddt.unpack
@XBlock.register_temp_plugin(XBlockWithJsonInitData, identifier='withjson')
def test_json_init_data(self, json_data, json_output):
XBlockWithJsonInitData.the_json_data = json_data
mock_user = UserFactory()
mock_request = MagicMock()
mock_request.user = mock_user
course = CourseFactory()
descriptor = ItemFactory(category='withjson', parent=course)
field_data_cache = FieldDataCache([course, descriptor], course.id, mock_user) # pylint: disable=no-member
module = render.get_module_for_descriptor(
mock_user,
mock_request,
descriptor,
field_data_cache,
course.id, # pylint: disable=no-member
course=course
)
html = module.render(STUDENT_VIEW).content
self.assertIn(json_output, html)
# No matter what data goes in, there should only be one close-script tag.
self.assertEqual(html.count("</script>"), 1)
class ViewInStudioTest(ModuleStoreTestCase):
"""Tests for the 'View in Studio' link visiblity."""
def setUp(self):
""" Set up the user and request that will be used. """
super(ViewInStudioTest, self).setUp()
self.staff_user = GlobalStaffFactory.create()
self.request = RequestFactory().get('/')
self.request.user = self.staff_user
self.request.session = {}
self.module = None
def _get_module(self, course_id, descriptor, location):
"""
Get the module from the course from which to pattern match (or not) the 'View in Studio' buttons
"""
field_data_cache = FieldDataCache.cache_for_descriptor_descendents(
course_id,
self.staff_user,
descriptor
)
return render.get_module(
self.staff_user,
self.request,
location,
field_data_cache,
)
def setup_mongo_course(self, course_edit_method='Studio'):
""" Create a mongo backed course. """
course = CourseFactory.create(
course_edit_method=course_edit_method
)
descriptor = ItemFactory.create(
category='vertical',
parent_location=course.location,
)
child_descriptor = ItemFactory.create(
category='vertical',
parent_location=descriptor.location
)
self.module = self._get_module(course.id, descriptor, descriptor.location)
# pylint: disable=attribute-defined-outside-init
self.child_module = self._get_module(course.id, child_descriptor, child_descriptor.location)
def setup_xml_course(self):
"""
Define the XML backed course to use.
Toy courses are already loaded in XML and mixed modulestores.
"""
course_key = SlashSeparatedCourseKey('edX', 'toy', '2012_Fall')
location = course_key.make_usage_key('chapter', 'Overview')
descriptor = modulestore().get_item(location)
self.module = self._get_module(course_key, descriptor, location)
@attr('shard_1')
class MongoViewInStudioTest(ViewInStudioTest):
"""Test the 'View in Studio' link visibility in a mongo backed course."""
def test_view_in_studio_link_studio_course(self):
"""Regular Studio courses should see 'View in Studio' links."""
self.setup_mongo_course()
result_fragment = self.module.render(STUDENT_VIEW)
self.assertIn('View Unit in Studio', result_fragment.content)
def test_view_in_studio_link_only_in_top_level_vertical(self):
"""Regular Studio courses should not see 'View in Studio' for child verticals of verticals."""
self.setup_mongo_course()
# Render the parent vertical, then check that there is only a single "View Unit in Studio" link.
result_fragment = self.module.render(STUDENT_VIEW)
# The single "View Unit in Studio" link should appear before the first xmodule vertical definition.
parts = result_fragment.content.split('data-block-type="vertical"')
self.assertEqual(3, len(parts), "Did not find two vertical blocks")
self.assertIn('View Unit in Studio', parts[0])
self.assertNotIn('View Unit in Studio', parts[1])
self.assertNotIn('View Unit in Studio', parts[2])
def test_view_in_studio_link_xml_authored(self):
"""Courses that change 'course_edit_method' setting can hide 'View in Studio' links."""
self.setup_mongo_course(course_edit_method='XML')
result_fragment = self.module.render(STUDENT_VIEW)
self.assertNotIn('View Unit in Studio', result_fragment.content)
@attr('shard_1')
class MixedViewInStudioTest(ViewInStudioTest):
"""Test the 'View in Studio' link visibility in a mixed mongo backed course."""
MODULESTORE = TEST_DATA_MIXED_TOY_MODULESTORE
def test_view_in_studio_link_mongo_backed(self):
"""Mixed mongo courses that are mongo backed should see 'View in Studio' links."""
self.setup_mongo_course()
result_fragment = self.module.render(STUDENT_VIEW)
self.assertIn('View Unit in Studio', result_fragment.content)
def test_view_in_studio_link_xml_authored(self):
"""Courses that change 'course_edit_method' setting can hide 'View in Studio' links."""
self.setup_mongo_course(course_edit_method='XML')
result_fragment = self.module.render(STUDENT_VIEW)
self.assertNotIn('View Unit in Studio', result_fragment.content)
def test_view_in_studio_link_xml_backed(self):
"""Course in XML only modulestore should not see 'View in Studio' links."""
self.setup_xml_course()
result_fragment = self.module.render(STUDENT_VIEW)
self.assertNotIn('View Unit in Studio', result_fragment.content)
@attr('shard_1')
class XmlViewInStudioTest(ViewInStudioTest):
"""Test the 'View in Studio' link visibility in an xml backed course."""
MODULESTORE = TEST_DATA_XML_MODULESTORE
def test_view_in_studio_link_xml_backed(self):
"""Course in XML only modulestore should not see 'View in Studio' links."""
self.setup_xml_course()
result_fragment = self.module.render(STUDENT_VIEW)
self.assertNotIn('View Unit in Studio', result_fragment.content)
@attr('shard_1')
@patch.dict('django.conf.settings.FEATURES', {'DISPLAY_DEBUG_INFO_TO_STAFF': True, 'DISPLAY_HISTOGRAMS_TO_STAFF': True})
@patch('courseware.module_render.has_access', Mock(return_value=True, autospec=True))
class TestStaffDebugInfo(ModuleStoreTestCase):
"""Tests to verify that Staff Debug Info panel and histograms are displayed to staff."""
def setUp(self):
super(TestStaffDebugInfo, self).setUp()
self.user = UserFactory.create()
self.request = RequestFactory().get('/')
self.request.user = self.user
self.request.session = {}
self.course = CourseFactory.create()
problem_xml = OptionResponseXMLFactory().build_xml(
question_text='The correct answer is Correct',
num_inputs=2,
weight=2,
options=['Correct', 'Incorrect'],
correct_option='Correct'
)
self.descriptor = ItemFactory.create(
category='problem',
data=problem_xml,
display_name='Option Response Problem'
)
self.location = self.descriptor.location
self.field_data_cache = FieldDataCache.cache_for_descriptor_descendents(
self.course.id,
self.user,
self.descriptor
)
@patch.dict('django.conf.settings.FEATURES', {'DISPLAY_DEBUG_INFO_TO_STAFF': False})
def test_staff_debug_info_disabled(self):
module = render.get_module(
self.user,
self.request,
self.location,
self.field_data_cache,
)
result_fragment = module.render(STUDENT_VIEW)
self.assertNotIn('Staff Debug', result_fragment.content)
def test_staff_debug_info_enabled(self):
module = render.get_module(
self.user,
self.request,
self.location,
self.field_data_cache,
)
result_fragment = module.render(STUDENT_VIEW)
self.assertIn('Staff Debug', result_fragment.content)
@patch.dict('django.conf.settings.FEATURES', {'DISPLAY_HISTOGRAMS_TO_STAFF': False})
def test_histogram_disabled(self):
module = render.get_module(
self.user,
self.request,
self.location,
self.field_data_cache,
)
result_fragment = module.render(STUDENT_VIEW)
self.assertNotIn('histrogram', result_fragment.content)
def test_histogram_enabled_for_unscored_xmodules(self):
"""Histograms should not display for xmodules which are not scored."""
html_descriptor = ItemFactory.create(
category='html',
data='Here are some course details.'
)
field_data_cache = FieldDataCache.cache_for_descriptor_descendents(
self.course.id,
self.user,
self.descriptor
)
with patch('openedx.core.lib.xblock_utils.grade_histogram') as mock_grade_histogram:
mock_grade_histogram.return_value = []
module = render.get_module(
self.user,
self.request,
html_descriptor.location,
field_data_cache,
)
module.render(STUDENT_VIEW)
self.assertFalse(mock_grade_histogram.called)
def test_histogram_enabled_for_scored_xmodules(self):
"""Histograms should display for xmodules which are scored."""
StudentModuleFactory.create(
course_id=self.course.id,
module_state_key=self.location,
student=UserFactory(),
grade=1,
max_grade=1,
state="{}",
)
with patch('openedx.core.lib.xblock_utils.grade_histogram') as mock_grade_histogram:
mock_grade_histogram.return_value = []
module = render.get_module(
self.user,
self.request,
self.location,
self.field_data_cache,
)
module.render(STUDENT_VIEW)
self.assertTrue(mock_grade_histogram.called)
PER_COURSE_ANONYMIZED_DESCRIPTORS = (LTIDescriptor, )
# The "set" here is to work around the bug that load_classes returns duplicates for multiply-delcared classes.
PER_STUDENT_ANONYMIZED_DESCRIPTORS = set(
class_ for (name, class_) in XModuleDescriptor.load_classes()
if not issubclass(class_, PER_COURSE_ANONYMIZED_DESCRIPTORS)
)
@attr('shard_1')
@ddt.ddt
class TestAnonymousStudentId(ModuleStoreTestCase, LoginEnrollmentTestCase):
"""
Test that anonymous_student_id is set correctly across a variety of XBlock types
"""
def setUp(self):
super(TestAnonymousStudentId, self).setUp(create_user=False)
self.user = UserFactory()
self.course_key = ToyCourseFactory.create().id
self.course = modulestore().get_course(self.course_key)
@patch('courseware.module_render.has_access', Mock(return_value=True, autospec=True))
def _get_anonymous_id(self, course_id, xblock_class):
location = course_id.make_usage_key('dummy_category', 'dummy_name')
descriptor = Mock(
spec=xblock_class,
_field_data=Mock(spec=FieldData, name='field_data'),
location=location,
static_asset_path=None,
_runtime=Mock(
spec=Runtime,
resources_fs=None,
mixologist=Mock(_mixins=(), name='mixologist'),
name='runtime',
),
scope_ids=Mock(spec=ScopeIds),
name='descriptor',
_field_data_cache={},
_dirty_fields={},
fields={},
days_early_for_beta=None,
)
descriptor.runtime = CombinedSystem(descriptor._runtime, None) # pylint: disable=protected-access
# Use the xblock_class's bind_for_student method
descriptor.bind_for_student = partial(xblock_class.bind_for_student, descriptor)
if hasattr(xblock_class, 'module_class'):
descriptor.module_class = xblock_class.module_class
return render.get_module_for_descriptor_internal(
user=self.user,
descriptor=descriptor,
student_data=Mock(spec=FieldData, name='student_data'),
course_id=course_id,
track_function=Mock(name='track_function'), # Track Function
xqueue_callback_url_prefix=Mock(name='xqueue_callback_url_prefix'), # XQueue Callback Url Prefix
request_token='request_token',
course=self.course,
).xmodule_runtime.anonymous_student_id
@ddt.data(*PER_STUDENT_ANONYMIZED_DESCRIPTORS)
def test_per_student_anonymized_id(self, descriptor_class):
for course_id in ('MITx/6.00x/2012_Fall', 'MITx/6.00x/2013_Spring'):
self.assertEquals(
# This value is set by observation, so that later changes to the student
# id computation don't break old data
'5afe5d9bb03796557ee2614f5c9611fb',
self._get_anonymous_id(CourseKey.from_string(course_id), descriptor_class)
)
@ddt.data(*PER_COURSE_ANONYMIZED_DESCRIPTORS)
def test_per_course_anonymized_id(self, descriptor_class):
self.assertEquals(
# This value is set by observation, so that later changes to the student
# id computation don't break old data
'e3b0b940318df9c14be59acb08e78af5',
self._get_anonymous_id(SlashSeparatedCourseKey('MITx', '6.00x', '2012_Fall'), descriptor_class)
)
self.assertEquals(
# This value is set by observation, so that later changes to the student
# id computation don't break old data
'f82b5416c9f54b5ce33989511bb5ef2e',
self._get_anonymous_id(SlashSeparatedCourseKey('MITx', '6.00x', '2013_Spring'), descriptor_class)
)
@attr('shard_1')
@patch('track.views.tracker', autospec=True)
class TestModuleTrackingContext(ModuleStoreTestCase):
"""
Ensure correct tracking information is included in events emitted during XBlock callback handling.
"""
def setUp(self):
super(TestModuleTrackingContext, self).setUp()
self.user = UserFactory.create()
self.request = RequestFactory().get('/')
self.request.user = self.user
self.request.session = {}
self.course = CourseFactory.create()
self.problem_xml = OptionResponseXMLFactory().build_xml(
question_text='The correct answer is Correct',
num_inputs=2,
weight=2,
options=['Correct', 'Incorrect'],
correct_option='Correct'
)
def test_context_contains_display_name(self, mock_tracker):
problem_display_name = u'Option Response Problem'
module_info = self.handle_callback_and_get_module_info(mock_tracker, problem_display_name)
self.assertEquals(problem_display_name, module_info['display_name'])
def handle_callback_and_get_module_info(self, mock_tracker, problem_display_name=None):
"""
Creates a fake module, invokes the callback and extracts the 'module'
metadata from the emitted problem_check event.
"""
descriptor_kwargs = {
'category': 'problem',
'data': self.problem_xml
}
if problem_display_name:
descriptor_kwargs['display_name'] = problem_display_name
descriptor = ItemFactory.create(**descriptor_kwargs)
render.handle_xblock_callback(
self.request,
self.course.id.to_deprecated_string(),
quote_slashes(descriptor.location.to_deprecated_string()),
'xmodule_handler',
'problem_check',
)
self.assertEquals(len(mock_tracker.send.mock_calls), 1)
mock_call = mock_tracker.send.mock_calls[0]
event = mock_call[1][0]
self.assertEquals(event['event_type'], 'problem_check')
return event['context']['module']
def test_missing_display_name(self, mock_tracker):
actual_display_name = self.handle_callback_and_get_module_info(mock_tracker)['display_name']
self.assertTrue(actual_display_name.startswith('problem'))
def test_library_source_information(self, mock_tracker):
"""
Check that XBlocks that are inherited from a library include the
information about their library block source in events.
We patch the modulestore to avoid having to create a library.
"""
original_usage_key = UsageKey.from_string(u'block-v1:A+B+C+type@problem+block@abcd1234')
original_usage_version = ObjectId()
mock_get_original_usage = lambda _, key: (original_usage_key, original_usage_version)
with patch('xmodule.modulestore.mixed.MixedModuleStore.get_block_original_usage', mock_get_original_usage):
module_info = self.handle_callback_and_get_module_info(mock_tracker)
self.assertIn('original_usage_key', module_info)
self.assertEqual(module_info['original_usage_key'], unicode(original_usage_key))
self.assertIn('original_usage_version', module_info)
self.assertEqual(module_info['original_usage_version'], unicode(original_usage_version))
@attr('shard_1')
class TestXmoduleRuntimeEvent(TestSubmittingProblems):
"""
Inherit from TestSubmittingProblems to get functionality that set up a course and problems structure
"""
def setUp(self):
super(TestXmoduleRuntimeEvent, self).setUp()
self.homework = self.add_graded_section_to_course('homework')
self.problem = self.add_dropdown_to_section(self.homework.location, 'p1', 1)
self.grade_dict = {'value': 0.18, 'max_value': 32}
self.delete_dict = {'value': None, 'max_value': None}
def get_module_for_user(self, user):
"""Helper function to get useful module at self.location in self.course_id for user"""
mock_request = MagicMock()
mock_request.user = user
field_data_cache = FieldDataCache.cache_for_descriptor_descendents(
self.course.id, user, self.course, depth=2)
return render.get_module( # pylint: disable=protected-access
user,
mock_request,
self.problem.location,
field_data_cache,
)._xmodule
def set_module_grade_using_publish(self, grade_dict):
"""Publish the user's grade, takes grade_dict as input"""
module = self.get_module_for_user(self.student_user)
module.system.publish(module, 'grade', grade_dict)
return module
def test_xmodule_runtime_publish(self):
"""Tests the publish mechanism"""
self.set_module_grade_using_publish(self.grade_dict)
student_module = StudentModule.objects.get(student=self.student_user, module_state_key=self.problem.location)
self.assertEqual(student_module.grade, self.grade_dict['value'])
self.assertEqual(student_module.max_grade, self.grade_dict['max_value'])
def test_xmodule_runtime_publish_delete(self):
"""Test deleting the grade using the publish mechanism"""
module = self.set_module_grade_using_publish(self.grade_dict)
module.system.publish(module, 'grade', self.delete_dict)
student_module = StudentModule.objects.get(student=self.student_user, module_state_key=self.problem.location)
self.assertIsNone(student_module.grade)
self.assertIsNone(student_module.max_grade)
@patch('courseware.module_render.SCORE_CHANGED.send')
def test_score_change_signal(self, send_mock):
"""Test that a Django signal is generated when a score changes"""
self.set_module_grade_using_publish(self.grade_dict)
expected_signal_kwargs = {
'sender': None,
'points_possible': self.grade_dict['max_value'],
'points_earned': self.grade_dict['value'],
'user_id': self.student_user.id,
'course_id': unicode(self.course.id),
'usage_id': unicode(self.problem.location)
}
send_mock.assert_called_with(**expected_signal_kwargs)
@attr('shard_1')
class TestRebindModule(TestSubmittingProblems):
"""
Tests to verify the functionality of rebinding a module.
Inherit from TestSubmittingProblems to get functionality that set up a course structure
"""
def setUp(self):
super(TestRebindModule, self).setUp()
self.homework = self.add_graded_section_to_course('homework')
self.lti = ItemFactory.create(category='lti', parent=self.homework)
self.problem = ItemFactory.create(category='problem', parent=self.homework)
self.user = UserFactory.create()
self.anon_user = AnonymousUser()
def get_module_for_user(self, user, item=None):
"""Helper function to get useful module at self.location in self.course_id for user"""
mock_request = MagicMock()
mock_request.user = user
field_data_cache = FieldDataCache.cache_for_descriptor_descendents(
self.course.id, user, self.course, depth=2)
if item is None:
item = self.lti
return render.get_module( # pylint: disable=protected-access
user,
mock_request,
item.location,
field_data_cache,
)._xmodule
def test_rebind_module_to_new_users(self):
module = self.get_module_for_user(self.user, self.problem)
# Bind the module to another student, which will remove "correct_map"
# from the module's _field_data_cache and _dirty_fields.
user2 = UserFactory.create()
module.descriptor.bind_for_student(module.system, user2.id)
# XBlock's save method assumes that if a field is in _dirty_fields,
# then it's also in _field_data_cache. If this assumption
# doesn't hold, then we get an error trying to bind this module
# to a third student, since we've removed "correct_map" from
# _field_data cache, but not _dirty_fields, when we bound
# this module to the second student. (TNL-2640)
user3 = UserFactory.create()
module.descriptor.bind_for_student(module.system, user3.id)
def test_rebind_noauth_module_to_user_not_anonymous(self):
"""
Tests that an exception is thrown when rebind_noauth_module_to_user is run from a
module bound to a real user
"""
module = self.get_module_for_user(self.user)
user2 = UserFactory()
user2.id = 2
with self.assertRaisesRegexp(
render.LmsModuleRenderError,
"rebind_noauth_module_to_user can only be called from a module bound to an anonymous user"
):
self.assertTrue(module.system.rebind_noauth_module_to_user(module, user2))
def test_rebind_noauth_module_to_user_anonymous(self):
"""
Tests that get_user_module_for_noauth succeeds when rebind_noauth_module_to_user is run from a
module bound to AnonymousUser
"""
module = self.get_module_for_user(self.anon_user)
user2 = UserFactory()
user2.id = 2
module.system.rebind_noauth_module_to_user(module, user2)
self.assertTrue(module)
self.assertEqual(module.system.anonymous_student_id, anonymous_id_for_user(user2, self.course.id))
self.assertEqual(module.scope_ids.user_id, user2.id)
self.assertEqual(module.descriptor.scope_ids.user_id, user2.id)
@attr('shard_1')
@ddt.ddt
class TestEventPublishing(ModuleStoreTestCase, LoginEnrollmentTestCase):
"""
Tests of event publishing for both XModules and XBlocks.
"""
def setUp(self):
"""
Set up the course and user context
"""
super(TestEventPublishing, self).setUp()
self.mock_user = UserFactory()
self.mock_user.id = 1
self.request_factory = RequestFactory()
@ddt.data('xblock', 'xmodule')
@XBlock.register_temp_plugin(PureXBlock, identifier='xblock')
@XBlock.register_temp_plugin(EmptyXModuleDescriptor, identifier='xmodule')
@patch.object(render, 'make_track_function')
def test_event_publishing(self, block_type, mock_track_function):
request = self.request_factory.get('')
request.user = self.mock_user
course = CourseFactory()
descriptor = ItemFactory(category=block_type, parent=course)
field_data_cache = FieldDataCache([course, descriptor], course.id, self.mock_user) # pylint: disable=no-member
block = render.get_module(self.mock_user, request, descriptor.location, field_data_cache)
event_type = 'event_type'
event = {'event': 'data'}
block.runtime.publish(block, event_type, event)
mock_track_function.assert_called_once_with(request)
mock_track_function.return_value.assert_called_once_with(event_type, event)
@attr('shard_1')
@ddt.ddt
class LMSXBlockServiceBindingTest(ModuleStoreTestCase):
"""
Tests that the LMS Module System (XBlock Runtime) provides an expected set of services.
"""
def setUp(self):
"""
Set up the user and other fields that will be used to instantiate the runtime.
"""
super(LMSXBlockServiceBindingTest, self).setUp()
self.user = UserFactory()
self.student_data = Mock()
self.course = CourseFactory.create()
self.track_function = Mock()
self.xqueue_callback_url_prefix = Mock()
self.request_token = Mock()
@XBlock.register_temp_plugin(PureXBlock, identifier='pure')
@ddt.data("user", "i18n", "fs", "field-data")
def test_expected_services_exist(self, expected_service):
"""
Tests that the 'user', 'i18n', and 'fs' services are provided by the LMS runtime.
"""
descriptor = ItemFactory(category="pure", parent=self.course)
runtime, _ = render.get_module_system_for_user(
self.user,
self.student_data,
descriptor,
self.course.id,
self.track_function,
self.xqueue_callback_url_prefix,
self.request_token,
course=self.course
)
service = runtime.service(descriptor, expected_service)
self.assertIsNotNone(service)
def test_beta_tester_fields_added(self):
"""
Tests that the beta tester fields are set on LMS runtime.
"""
descriptor = ItemFactory(category="pure", parent=self.course)
descriptor.days_early_for_beta = 5
runtime, _ = render.get_module_system_for_user(
self.user,
self.student_data,
descriptor,
self.course.id,
self.track_function,
self.xqueue_callback_url_prefix,
self.request_token,
course=self.course
)
# pylint: disable=no-member
self.assertFalse(runtime.user_is_beta_tester)
self.assertEqual(runtime.days_early_for_beta, 5)
class PureXBlockWithChildren(PureXBlock):
"""
Pure XBlock with children to use in tests.
"""
has_children = True
class EmptyXModuleWithChildren(EmptyXModule): # pylint: disable=abstract-method
"""
Empty XModule for testing with no dependencies.
"""
has_children = True
class EmptyXModuleDescriptorWithChildren(EmptyXModuleDescriptor): # pylint: disable=abstract-method
"""
Empty XModule for testing with no dependencies.
"""
module_class = EmptyXModuleWithChildren
has_children = True
BLOCK_TYPES = ['xblock', 'xmodule']
USER_NUMBERS = range(2)
@attr('shard_1')
@ddt.ddt
class TestFilteredChildren(ModuleStoreTestCase):
"""
Tests that verify access to XBlock/XModule children work correctly
even when those children are filtered by the runtime when loaded.
"""
# pylint: disable=attribute-defined-outside-init, no-member
def setUp(self):
super(TestFilteredChildren, self).setUp()
self.users = {number: UserFactory() for number in USER_NUMBERS}
self.course = CourseFactory()
self._old_has_access = render.has_access
patcher = patch('courseware.module_render.has_access', self._has_access)
patcher.start()
self.addCleanup(patcher.stop)
@ddt.data(*BLOCK_TYPES)
@XBlock.register_temp_plugin(PureXBlockWithChildren, identifier='xblock')
@XBlock.register_temp_plugin(EmptyXModuleDescriptorWithChildren, identifier='xmodule')
def test_unbound(self, block_type):
block = self._load_block(block_type)
self.assertUnboundChildren(block)
@ddt.data(*itertools.product(BLOCK_TYPES, USER_NUMBERS))
@ddt.unpack
@XBlock.register_temp_plugin(PureXBlockWithChildren, identifier='xblock')
@XBlock.register_temp_plugin(EmptyXModuleDescriptorWithChildren, identifier='xmodule')
def test_unbound_then_bound_as_descriptor(self, block_type, user_number):
user = self.users[user_number]
block = self._load_block(block_type)
self.assertUnboundChildren(block)
self._bind_block(block, user)
self.assertBoundChildren(block, user)
@ddt.data(*itertools.product(BLOCK_TYPES, USER_NUMBERS))
@ddt.unpack
@XBlock.register_temp_plugin(PureXBlockWithChildren, identifier='xblock')
@XBlock.register_temp_plugin(EmptyXModuleDescriptorWithChildren, identifier='xmodule')
def test_unbound_then_bound_as_xmodule(self, block_type, user_number):
user = self.users[user_number]
block = self._load_block(block_type)
self.assertUnboundChildren(block)
self._bind_block(block, user)
# Validate direct XModule access as well
if isinstance(block, XModuleDescriptor):
self.assertBoundChildren(block._xmodule, user) # pylint: disable=protected-access
else:
self.assertBoundChildren(block, user)
@ddt.data(*itertools.product(BLOCK_TYPES, USER_NUMBERS))
@ddt.unpack
@XBlock.register_temp_plugin(PureXBlockWithChildren, identifier='xblock')
@XBlock.register_temp_plugin(EmptyXModuleDescriptorWithChildren, identifier='xmodule')
def test_bound_only_as_descriptor(self, block_type, user_number):
user = self.users[user_number]
block = self._load_block(block_type)
self._bind_block(block, user)
self.assertBoundChildren(block, user)
@ddt.data(*itertools.product(BLOCK_TYPES, USER_NUMBERS))
@ddt.unpack
@XBlock.register_temp_plugin(PureXBlockWithChildren, identifier='xblock')
@XBlock.register_temp_plugin(EmptyXModuleDescriptorWithChildren, identifier='xmodule')
def test_bound_only_as_xmodule(self, block_type, user_number):
user = self.users[user_number]
block = self._load_block(block_type)
self._bind_block(block, user)
# Validate direct XModule access as well
if isinstance(block, XModuleDescriptor):
self.assertBoundChildren(block._xmodule, user) # pylint: disable=protected-access
else:
self.assertBoundChildren(block, user)
def _load_block(self, block_type):
"""
Instantiate an XBlock of `block_type` with the appropriate set of children.
"""
self.parent = ItemFactory(category=block_type, parent=self.course)
# Create a child of each block type for each user
self.children_for_user = {
user: [
ItemFactory(category=child_type, parent=self.parent).scope_ids.usage_id
for child_type in BLOCK_TYPES
]
for user in self.users.itervalues()
}
self.all_children = sum(self.children_for_user.values(), [])
return modulestore().get_item(self.parent.scope_ids.usage_id)
def _bind_block(self, block, user):
"""
Bind `block` to the supplied `user`.
"""
course_id = self.course.id
field_data_cache = FieldDataCache.cache_for_descriptor_descendents(
course_id,
user,
block,
)
return get_module_for_descriptor(
user,
Mock(name='request', user=user),
block,
field_data_cache,
course_id,
course=self.course
)
def _has_access(self, user, action, obj, course_key=None):
"""
Mock implementation of `has_access` used to control which blocks
have access to which children during tests.
"""
if action != 'load':
return self._old_has_access(user, action, obj, course_key)
if isinstance(obj, XBlock):
key = obj.scope_ids.usage_id
elif isinstance(obj, UsageKey):
key = obj
if key == self.parent.scope_ids.usage_id:
return True
return key in self.children_for_user[user]
def assertBoundChildren(self, block, user):
"""
Ensure the bound children are indeed children.
"""
self.assertChildren(block, self.children_for_user[user])
def assertUnboundChildren(self, block):
"""
Ensure unbound children are indeed children.
"""
self.assertChildren(block, self.all_children)
def assertChildren(self, block, child_usage_ids):
"""
Used to assert that sets of children are equivalent.
"""
self.assertEquals(set(child_usage_ids), set(child.scope_ids.usage_id for child in block.get_children()))
@attr('shard_1')
@ddt.ddt
class TestDisabledXBlockTypes(ModuleStoreTestCase):
"""
Tests that verify disabled XBlock types are not loaded.
"""
# pylint: disable=no-member
def setUp(self):
super(TestDisabledXBlockTypes, self).setUp()
for store in self.store.modulestores:
store.disabled_xblock_types = ('video',)
@ddt.data(ModuleStoreEnum.Type.mongo, ModuleStoreEnum.Type.split)
def test_get_item(self, default_ms):
with self.store.default_store(default_ms):
course = CourseFactory()
for block_type in ('video',):
item = ItemFactory(category=block_type, parent=course)
item = self.store.get_item(item.scope_ids.usage_id)
self.assertEqual(item.__class__.__name__, 'RawDescriptorWithMixins')
|
savoirfairelinux/OpenUpgrade
|
refs/heads/master
|
addons/hw_escpos/escpos/supported_devices.py
|
227
|
#!/usr/bin/python
# This is a list of esc/pos compatible usb printers. The vendor and product ids can be found by
# typing lsusb in a linux terminal, this will give you the ids in the form ID VENDOR:PRODUCT
device_list = [
{ 'vendor' : 0x04b8, 'product' : 0x0e03, 'name' : 'Epson TM-T20' },
{ 'vendor' : 0x04b8, 'product' : 0x0202, 'name' : 'Epson TM-T70' },
{ 'vendor' : 0x04b8, 'product' : 0x0e15, 'name' : 'Epson TM-T20II' },
]
|
Vova23/limbo-android
|
refs/heads/master
|
jni/qemu/roms/seabios/tools/checkstack.py
|
42
|
#!/usr/bin/env python
# Script that tries to find how much stack space each function in an
# object is using.
#
# Copyright (C) 2008 Kevin O'Connor <kevin@koconnor.net>
#
# This file may be distributed under the terms of the GNU GPLv3 license.
# Usage:
# objdump -m i386 -M i8086 -M suffix -d out/rom16.o | tools/checkstack.py
import sys
import re
# Functions that change stacks
STACKHOP = ['__send_disk_op']
# List of functions we can assume are never called.
#IGNORE = ['panic', '__dprintf']
IGNORE = ['panic']
OUTPUTDESC = """
#funcname1[preamble_stack_usage,max_usage_with_callers]:
# insn_addr:called_function [usage_at_call_point+caller_preamble,total_usage]
#
#funcname2[p,m,max_usage_to_yield_point]:
# insn_addr:called_function [u+c,t,usage_to_yield_point]
"""
# Find out maximum stack usage for a function
def calcmaxstack(funcs, funcaddr):
info = funcs[funcaddr]
# Find max of all nested calls.
maxusage = info[1]
maxyieldusage = doesyield = 0
if info[3] is not None:
maxyieldusage = info[3]
doesyield = 1
info[2] = maxusage
info[4] = info[3]
seenbefore = {}
totcalls = 0
for insnaddr, calladdr, usage in info[6]:
callinfo = funcs.get(calladdr)
if callinfo is None:
continue
if callinfo[2] is None:
calcmaxstack(funcs, calladdr)
if callinfo[0] not in seenbefore:
seenbefore[callinfo[0]] = 1
totcalls += 1 + callinfo[5]
funcnameroot = callinfo[0].split('.')[0]
if funcnameroot in IGNORE:
# This called function is ignored - don't contribute it to
# the max stack.
continue
if funcnameroot in STACKHOP:
if usage > maxusage:
maxusage = usage
if callinfo[4] is not None:
doesyield = 1
if usage > maxyieldusage:
maxyieldusage = usage
continue
totusage = usage + callinfo[2]
if totusage > maxusage:
maxusage = totusage
if callinfo[4] is not None:
doesyield = 1
totyieldusage = usage + callinfo[4]
if totyieldusage > maxyieldusage:
maxyieldusage = totyieldusage
info[2] = maxusage
if doesyield:
info[4] = maxyieldusage
info[5] = totcalls
# Try to arrange output so that functions that call each other are
# near each other.
def orderfuncs(funcaddrs, availfuncs):
l = [(availfuncs[funcaddr][5], availfuncs[funcaddr][0], funcaddr)
for funcaddr in funcaddrs if funcaddr in availfuncs]
l.sort()
l.reverse()
out = []
while l:
count, name, funcaddr = l.pop(0)
if funcaddr not in availfuncs:
continue
calladdrs = [calls[1] for calls in availfuncs[funcaddr][6]]
del availfuncs[funcaddr]
out = out + orderfuncs(calladdrs, availfuncs) + [funcaddr]
return out
# Update function info with a found "yield" point.
def noteYield(info, stackusage):
prevyield = info[3]
if prevyield is None or prevyield < stackusage:
info[3] = stackusage
# Update function info with a found "call" point.
def noteCall(info, subfuncs, insnaddr, calladdr, stackusage):
if (calladdr, stackusage) in subfuncs:
# Already noted a nearly identical call - ignore this one.
return
info[6].append((insnaddr, calladdr, stackusage))
subfuncs[(calladdr, stackusage)] = 1
hex_s = r'[0-9a-f]+'
re_func = re.compile(r'^(?P<funcaddr>' + hex_s + r') <(?P<func>.*)>:$')
re_asm = re.compile(
r'^[ ]*(?P<insnaddr>' + hex_s
+ r'):\t.*\t(addr32 )?(?P<insn>.+?)[ ]*((?P<calladdr>' + hex_s
+ r') <(?P<ref>.*)>)?$')
re_usestack = re.compile(
r'^(push[f]?[lw])|(sub.* [$](?P<num>0x' + hex_s + r'),%esp)$')
def calc():
# funcs[funcaddr] = [funcname, basicstackusage, maxstackusage
# , yieldusage, maxyieldusage, totalcalls
# , [(insnaddr, calladdr, stackusage), ...]]
funcs = {-1: ['<indirect>', 0, 0, None, None, 0, []]}
cur = None
atstart = 0
stackusage = 0
# Parse input lines
for line in sys.stdin.readlines():
m = re_func.match(line)
if m is not None:
# Found function
funcaddr = int(m.group('funcaddr'), 16)
funcs[funcaddr] = cur = [m.group('func'), 0, None, None, None, 0, []]
stackusage = 0
atstart = 1
subfuncs = {}
continue
m = re_asm.match(line)
if m is not None:
insn = m.group('insn')
im = re_usestack.match(insn)
if im is not None:
if insn.startswith('pushl') or insn.startswith('pushfl'):
stackusage += 4
continue
elif insn.startswith('pushw') or insn.startswith('pushfw'):
stackusage += 2
continue
stackusage += int(im.group('num'), 16)
if atstart:
if '%esp' in insn or insn.startswith('leal'):
# Still part of initial header
continue
cur[1] = stackusage
atstart = 0
insnaddr = m.group('insnaddr')
calladdr = m.group('calladdr')
if calladdr is None:
if insn.startswith('lcallw'):
noteCall(cur, subfuncs, insnaddr, -1, stackusage + 4)
noteYield(cur, stackusage + 4)
elif insn.startswith('int'):
noteCall(cur, subfuncs, insnaddr, -1, stackusage + 6)
noteYield(cur, stackusage + 6)
elif insn.startswith('sti'):
noteYield(cur, stackusage)
else:
# misc instruction
continue
else:
# Jump or call insn
calladdr = int(calladdr, 16)
ref = m.group('ref')
if '+' in ref:
# Inter-function jump.
pass
elif insn.startswith('j'):
# Tail call
noteCall(cur, subfuncs, insnaddr, calladdr, 0)
elif insn.startswith('calll'):
noteCall(cur, subfuncs, insnaddr, calladdr, stackusage + 4)
else:
print "unknown call", ref
noteCall(cur, subfuncs, insnaddr, calladdr, stackusage)
# Reset stack usage to preamble usage
stackusage = cur[1]
#print "other", repr(line)
# Calculate maxstackusage
for funcaddr, info in funcs.items():
if info[2] is not None:
continue
calcmaxstack(funcs, funcaddr)
# Sort functions for output
funcaddrs = orderfuncs(funcs.keys(), funcs.copy())
# Show all functions
print OUTPUTDESC
for funcaddr in funcaddrs:
name, basicusage, maxusage, yieldusage, maxyieldusage, count, calls = \
funcs[funcaddr]
if maxusage == 0 and maxyieldusage is None:
continue
yieldstr = ""
if maxyieldusage is not None:
yieldstr = ",%d" % maxyieldusage
print "\n%s[%d,%d%s]:" % (name, basicusage, maxusage, yieldstr)
for insnaddr, calladdr, stackusage in calls:
callinfo = funcs.get(calladdr, ("<unknown>", 0, 0, 0, None))
yieldstr = ""
if callinfo[4] is not None:
yieldstr = ",%d" % (stackusage + callinfo[4])
print " %04s:%-40s [%d+%d,%d%s]" % (
insnaddr, callinfo[0], stackusage, callinfo[1]
, stackusage+callinfo[2], yieldstr)
def main():
calc()
if __name__ == '__main__':
main()
|
jolyonb/edx-platform
|
refs/heads/master
|
lms/djangoapps/lms_xblock/migrations/0001_initial.py
|
87
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
from django.conf import settings
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='XBlockAsidesConfig',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('change_date', models.DateTimeField(auto_now_add=True, verbose_name='Change date')),
('enabled', models.BooleanField(default=False, verbose_name='Enabled')),
('disabled_blocks', models.TextField(default=b'about course_info static_tab', help_text=b'Space-separated list of XBlocks on which XBlockAsides should never render.')),
('changed_by', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, editable=False, to=settings.AUTH_USER_MODEL, null=True, verbose_name='Changed by')),
],
options={
'ordering': ('-change_date',),
'abstract': False,
},
),
]
|
marmarek/pykickstart
|
refs/heads/master
|
pykickstart/handlers/f22.py
|
9
|
#
# Chris Lumens <clumens@redhat.com>
#
# Copyright 2014 Red Hat, Inc.
#
# This copyrighted material is made available to anyone wishing to use, modify,
# copy, or redistribute it subject to the terms and conditions of the GNU
# General Public License v.2. This program is distributed in the hope that it
# will be useful, but WITHOUT ANY WARRANTY expressed or implied, including the
# implied warranties of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. Any Red Hat
# trademarks that are incorporated in the source code or documentation are not
# subject to the GNU General Public License and may only be used or replicated
# with the express permission of Red Hat, Inc.
#
__all__ = ["F22Handler"]
from pykickstart import commands
from pykickstart.base import BaseHandler
from pykickstart.version import F22
class F22Handler(BaseHandler):
version = F22
commandMap = {
"auth": commands.authconfig.FC3_Authconfig,
"authconfig": commands.authconfig.FC3_Authconfig,
"autopart": commands.autopart.F21_AutoPart,
"autostep": commands.autostep.FC3_AutoStep,
"bootloader": commands.bootloader.F21_Bootloader,
"btrfs": commands.btrfs.F17_BTRFS,
"cdrom": commands.cdrom.FC3_Cdrom,
"clearpart": commands.clearpart.F21_ClearPart,
"cmdline": commands.displaymode.FC3_DisplayMode,
"device": commands.device.F8_Device,
"deviceprobe": commands.deviceprobe.FC3_DeviceProbe,
"dmraid": commands.dmraid.FC6_DmRaid,
"driverdisk": commands.driverdisk.F14_DriverDisk,
"eula": commands.eula.F20_Eula,
"fcoe": commands.fcoe.F13_Fcoe,
"firewall": commands.firewall.F20_Firewall,
"firstboot": commands.firstboot.FC3_Firstboot,
"graphical": commands.displaymode.FC3_DisplayMode,
"group": commands.group.F12_Group,
"halt": commands.reboot.F18_Reboot,
"harddrive": commands.harddrive.FC3_HardDrive,
"ignoredisk": commands.ignoredisk.F14_IgnoreDisk,
"install": commands.upgrade.F11_Upgrade,
"iscsi": commands.iscsi.F17_Iscsi,
"iscsiname": commands.iscsiname.FC6_IscsiName,
"keyboard": commands.keyboard.F18_Keyboard,
"lang": commands.lang.F19_Lang,
"liveimg": commands.liveimg.F19_Liveimg,
"logging": commands.logging.FC6_Logging,
"logvol": commands.logvol.F20_LogVol,
"mediacheck": commands.mediacheck.FC4_MediaCheck,
"method": commands.method.F19_Method,
"multipath": commands.multipath.FC6_MultiPath,
"network": commands.network.F22_Network,
"nfs": commands.nfs.FC6_NFS,
"ostreesetup": commands.ostreesetup.F21_OSTreeSetup,
"part": commands.partition.F20_Partition,
"partition": commands.partition.F20_Partition,
"poweroff": commands.reboot.F18_Reboot,
"raid": commands.raid.F20_Raid,
"realm": commands.realm.F19_Realm,
"reboot": commands.reboot.F18_Reboot,
"repo": commands.repo.F21_Repo,
"rescue": commands.rescue.F10_Rescue,
"rootpw": commands.rootpw.F18_RootPw,
"selinux": commands.selinux.FC3_SELinux,
"services": commands.services.FC6_Services,
"shutdown": commands.reboot.F18_Reboot,
"skipx": commands.skipx.FC3_SkipX,
"sshpw": commands.sshpw.F13_SshPw,
"sshkey": commands.sshkey.F22_SshKey,
"text": commands.displaymode.FC3_DisplayMode,
"timezone": commands.timezone.F18_Timezone,
"updates": commands.updates.F7_Updates,
"upgrade": commands.upgrade.F20_Upgrade,
"url": commands.url.F18_Url,
"user": commands.user.F19_User,
"vnc": commands.vnc.F9_Vnc,
"volgroup": commands.volgroup.F21_VolGroup,
"xconfig": commands.xconfig.F14_XConfig,
"zerombr": commands.zerombr.F9_ZeroMbr,
"zfcp": commands.zfcp.F14_ZFCP,
}
dataMap = {
"BTRFSData": commands.btrfs.F17_BTRFSData,
"DriverDiskData": commands.driverdisk.F14_DriverDiskData,
"DeviceData": commands.device.F8_DeviceData,
"DmRaidData": commands.dmraid.FC6_DmRaidData,
"FcoeData": commands.fcoe.F13_FcoeData,
"GroupData": commands.group.F12_GroupData,
"IscsiData": commands.iscsi.F17_IscsiData,
"LogVolData": commands.logvol.F20_LogVolData,
"MultiPathData": commands.multipath.FC6_MultiPathData,
"NetworkData": commands.network.F22_NetworkData,
"PartData": commands.partition.F18_PartData,
"RaidData": commands.raid.F18_RaidData,
"RepoData": commands.repo.F21_RepoData,
"SshPwData": commands.sshpw.F13_SshPwData,
"SshKeyData": commands.sshkey.F22_SshKeyData,
"UserData": commands.user.F19_UserData,
"VolGroupData": commands.volgroup.F21_VolGroupData,
"ZFCPData": commands.zfcp.F14_ZFCPData,
}
|
espadrine/opera
|
refs/heads/master
|
chromium/src/third_party/python_26/Lib/dummy_thread.py
|
326
|
"""Drop-in replacement for the thread module.
Meant to be used as a brain-dead substitute so that threaded code does
not need to be rewritten for when the thread module is not present.
Suggested usage is::
try:
import thread
except ImportError:
import dummy_thread as thread
"""
# Exports only things specified by thread documentation;
# skipping obsolete synonyms allocate(), start_new(), exit_thread().
__all__ = ['error', 'start_new_thread', 'exit', 'get_ident', 'allocate_lock',
'interrupt_main', 'LockType']
import traceback as _traceback
class error(Exception):
"""Dummy implementation of thread.error."""
def __init__(self, *args):
self.args = args
def start_new_thread(function, args, kwargs={}):
"""Dummy implementation of thread.start_new_thread().
Compatibility is maintained by making sure that ``args`` is a
tuple and ``kwargs`` is a dictionary. If an exception is raised
and it is SystemExit (which can be done by thread.exit()) it is
caught and nothing is done; all other exceptions are printed out
by using traceback.print_exc().
If the executed function calls interrupt_main the KeyboardInterrupt will be
raised when the function returns.
"""
if type(args) != type(tuple()):
raise TypeError("2nd arg must be a tuple")
if type(kwargs) != type(dict()):
raise TypeError("3rd arg must be a dict")
global _main
_main = False
try:
function(*args, **kwargs)
except SystemExit:
pass
except:
_traceback.print_exc()
_main = True
global _interrupt
if _interrupt:
_interrupt = False
raise KeyboardInterrupt
def exit():
"""Dummy implementation of thread.exit()."""
raise SystemExit
def get_ident():
"""Dummy implementation of thread.get_ident().
Since this module should only be used when threadmodule is not
available, it is safe to assume that the current process is the
only thread. Thus a constant can be safely returned.
"""
return -1
def allocate_lock():
"""Dummy implementation of thread.allocate_lock()."""
return LockType()
def stack_size(size=None):
"""Dummy implementation of thread.stack_size()."""
if size is not None:
raise error("setting thread stack size not supported")
return 0
class LockType(object):
"""Class implementing dummy implementation of thread.LockType.
Compatibility is maintained by maintaining self.locked_status
which is a boolean that stores the state of the lock. Pickling of
the lock, though, should not be done since if the thread module is
then used with an unpickled ``lock()`` from here problems could
occur from this class not having atomic methods.
"""
def __init__(self):
self.locked_status = False
def acquire(self, waitflag=None):
"""Dummy implementation of acquire().
For blocking calls, self.locked_status is automatically set to
True and returned appropriately based on value of
``waitflag``. If it is non-blocking, then the value is
actually checked and not set if it is already acquired. This
is all done so that threading.Condition's assert statements
aren't triggered and throw a little fit.
"""
if waitflag is None or waitflag:
self.locked_status = True
return True
else:
if not self.locked_status:
self.locked_status = True
return True
else:
return False
__enter__ = acquire
def __exit__(self, typ, val, tb):
self.release()
def release(self):
"""Release the dummy lock."""
# XXX Perhaps shouldn't actually bother to test? Could lead
# to problems for complex, threaded code.
if not self.locked_status:
raise error
self.locked_status = False
return True
def locked(self):
return self.locked_status
# Used to signal that interrupt_main was called in a "thread"
_interrupt = False
# True when not executing in a "thread"
_main = True
def interrupt_main():
"""Set _interrupt flag to True to have start_new_thread raise
KeyboardInterrupt upon exiting."""
if _main:
raise KeyboardInterrupt
else:
global _interrupt
_interrupt = True
|
joansmith/openmicroscopy
|
refs/heads/develop
|
components/tools/OmeroPy/test/integration/test_delete.py
|
9
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright (C) 2010-2015 Glencoe Software, Inc. All Rights Reserved.
# Use is subject to license terms supplied in LICENSE.txt
#
# This program is free software you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
"""
Integration test for delete testing
"""
import traceback
import library as lib
import pytest
import omero
import omero.callbacks
import Ice
import sys
import os
from time import time
from omero.cmd import Delete2
from omero.cmd.graphs import ChildOption
from omero.rtypes import rstring, rlist, rlong
class TestDelete(lib.ITest):
def testBasicUsage(self):
img = self.new_image(name="delete test")
tag = omero.model.TagAnnotationI()
img.linkAnnotation(tag)
img = self.update.saveAndReturnObject(img)
command = Delete2(targetObjects={"Image": [img.id.val]})
handle = self.client.sf.submit(command)
self.waitOnCmd(self.client, handle)
def testDeleteMany(self):
images = list()
for i in range(0, 5):
img = self.new_image(name="delete test")
tag = omero.model.TagAnnotationI()
img.linkAnnotation(tag)
images.append(self.update.saveAndReturnObject(img))
ids = [image.id.val for image in images]
command = Delete2(targetObjects={"Image": ids})
handle = self.client.sf.submit(command)
self.waitOnCmd(self.client, handle)
def testDeleteProjectWithoutContent(self):
uuid = self.ctx.sessionUuid
images = list()
for i in range(0, 5):
img = self.new_image(name="test-delete-image-%i" % i)
tag = omero.model.TagAnnotationI()
img.linkAnnotation(tag)
images.append(self.update.saveAndReturnObject(img))
# create dataset
dataset = self.make_dataset('DS-test-2936-%s' % uuid)
# create project
project = self.make_project('PR-test-2936-%s' % uuid)
# put dataset in project
self.link(project, dataset)
# put image in dataset
for image in images:
self.link(dataset, image)
keep = ChildOption(excludeType=[
"TagAnnotation", "TermAnnotation", "FileAnnotation",
"Dataset", "Image"])
dc = Delete2(
targetObjects={'Project': [project.id.val]}, childOptions=[keep])
handle = self.client.sf.submit(dc)
self.waitOnCmd(self.client, handle)
assert not self.query.find('Project', project.id.val)
assert dataset.id.val == self.query.find(
'Dataset', dataset.id.val).id.val
p = omero.sys.Parameters()
p.map = {}
p.map["oid"] = dataset.id
sql = "select im from Image im "\
"left outer join fetch im.datasetLinks dil "\
"left outer join fetch dil.parent d " \
"where d.id = :oid " \
"order by im.id asc"
res = self.query.findAllByQuery(sql, p)
assert 5 == len(res)
for e in res:
if e.id.val not in [i.id.val for i in images]:
self.assertRaises(
'Image %i is not in the [%s]'
% (e.id.val, ",".join(images)))
def testCheckIfDeleted(self):
uuid = self.ctx.sessionUuid
userName = self.ctx.userName
img = self.new_image(name="to delete - test")
tag = omero.model.TagAnnotationI()
img.linkAnnotation(tag)
iid = self.update.saveAndReturnObject(img).id.val
cmd = Delete2(targetObjects={"Image": [iid]})
handle = self.client.sf.submit(cmd)
callback = self.waitOnCmd(self.client, handle)
cbString = str(handle)
callback.close(True) # Don't close handle
assert not self.query.find("Image", iid)
# create new session and double check
import os
import Ice
c = omero.client(
pmap=['--Ice.Config=' + (os.environ.get("ICE_CONFIG"))])
host = c.ic.getProperties().getProperty('omero.host')
port = int(c.ic.getProperties().getProperty('omero.port'))
cl1 = omero.client(host=host, port=port)
cl1.createSession(userName, userName)
with pytest.raises(Ice.ObjectNotExistException):
omero.cmd.HandlePrx.checkedCast(
cl1.ic.stringToProxy(cbString))
# join session and double check
cl2 = omero.client(host=host, port=port)
cl2.joinSession(uuid)
with pytest.raises(Ice.ObjectNotExistException):
omero.cmd.HandlePrx.checkedCast(
cl2.ic.stringToProxy(cbString))
def testCheckIfDeleted2(self):
uuid = self.ctx.sessionUuid
# dataset with many images
images = list()
for i in range(0, 50):
img = self.new_image(name="test-delete-image-%i" % i)
tag = omero.model.TagAnnotationI()
img.linkAnnotation(tag)
images.append(self.update.saveAndReturnObject(img))
# create dataset
dataset = self.make_dataset('DS-test-%s' % uuid)
# put image in dataset
for img in images:
self.link(dataset, img)
ids = [image.id.val for image in images]
command = Delete2(targetObjects={"Image": ids})
handle = self.client.sf.submit(command)
callback = self.waitOnCmd(self.client, handle, ms=1000, loops=50)
callback.close(True)
p = omero.sys.Parameters()
p.map = {}
p.map["oid"] = dataset.id
sql = "select im from Image im "\
"left outer join fetch im.datasetLinks dil "\
"left outer join fetch dil.parent d " \
"where d.id = :oid " \
"order by im.id asc"
assert 0 == len(self.query.findAllByQuery(sql, p))
def testOddMessage(self):
store = self.client.sf.createRawFileStore()
images = list()
for i in range(0, 10):
img = self.make_image()
iid = img.getId().getValue()
oFile = omero.model.OriginalFileI()
oFile.setName(rstring('companion_file.txt'))
oFile.setPath(rstring('/my/path/to/the/file/'))
oFile.setSize(rlong(7471))
oFile.setHash(rstring("pending"))
oFile.setMimetype(rstring('Companion/Deltavision'))
ofid = self.update.saveAndReturnObject(oFile).id.val
store.setFileId(ofid)
binary = 'aaa\naaaa\naaaaa'
store.write(binary, 0, 0)
of = store.save()
fa = omero.model.FileAnnotationI()
fa.setNs(rstring(omero.constants.namespaces.NSCOMPANIONFILE))
fa.setFile(of)
self.link(img, fa)
images.append(iid)
command = Delete2(targetObjects={"Image": images})
handle = self.client.sf.submit(command)
callback = self.waitOnCmd(self.client, handle)
callback.close(True)
def testDeleteComment(self):
comment = omero.model.CommentAnnotationI()
comment = self.update.saveAndReturnObject(comment)
images = list()
# Single Comment linked to 3 Images and 3 Datasets
for i in range(0, 3):
img = self.new_image(name="testDeleteComment")
img.linkAnnotation(comment)
images.append(self.update.saveAndReturnObject(img))
ds = self.make_dataset("testDeleteComment")
ds.linkAnnotation(comment)
self.update.saveAndReturnObject(ds)
cid = comment.id.val
assert self.query.find("CommentAnnotation", cid)
# Remove comment from first image
linkIds = []
for l in images[0].copyAnnotationLinks():
linkIds.append(l.id.val)
command = Delete2(targetObjects={"ImageAnnotationLink": linkIds})
handle = self.client.sf.submit(command)
self.waitOnCmd(self.client, handle)
handle.close()
# Delete Dry Run...
command = Delete2(targetObjects={"CommentAnnotation": [cid]},
dryRun=True)
handle = self.client.sf.submit(command)
self.waitOnCmd(self.client, handle)
# ...Should tell us that remaining links will be deleted
rsp = handle.getResponse()
handle.close()
assert ('ome.model.annotations.ImageAnnotationLink'
in rsp.deletedObjects)
links = rsp.deletedObjects['ome.model.annotations.ImageAnnotationLink']
assert len(links) == 2
dlnks = rsp.deletedObjects[
'ome.model.annotations.DatasetAnnotationLink']
assert len(dlnks) == 3
# Comment should not yet be deleted
assert self.query.find("CommentAnnotation", cid)
# Finally, delete Comment
command = Delete2(targetObjects={"CommentAnnotation": [cid]})
handle = self.client.sf.submit(command)
self.waitOnCmd(self.client, handle)
handle.close()
assert not self.query.find("CommentAnnotation", cid)
def test3639(self):
uuid = self.ctx.sessionUuid
images = list()
for i in range(0, 5):
images.append(self.make_image())
p = omero.sys.Parameters()
p.map = {}
p.map["oids"] = rlist([rlong(s.id.val) for s in images])
# create dataset
dataset = self.make_dataset('DS-test-2936-%s' % uuid)
# put image in dataset
for image in images:
self.link(dataset, image)
# log in as group owner:
client_o, owner = self.new_client_and_user(
group=self.group, owner=True)
query_o = client_o.sf.getQueryService()
handlers = list()
keep = ChildOption(excludeType=["Image"])
dc = Delete2(
targetObjects={'Dataset': [dataset.id.val]}, childOptions=[keep])
handlers.append(str(client_o.sf.submit(dc)))
imageToDelete = images[2].id.val
dc2 = Delete2(targetObjects={'Image': [imageToDelete]})
handlers.append(str(client_o.sf.submit(dc2)))
def _formatReport(delete_handle):
"""
Added as workaround to the changes made in #3006.
"""
delete_report = delete_handle.getResponse()
rv = []
if isinstance(delete_report, omero.cmd.ERR):
rv.append(str(delete_report))
if len(rv) > 0:
return "; ".join(rv)
return None
failure = list()
in_progress = 0
r = None
while(len(handlers) > 0):
for cbString in handlers:
try:
with pytest.raises(Ice.ObjectNotExistException):
handle = omero.cmd.HandlePrx.checkedCast(
client_o.ic.stringToProxy(cbString))
cb = omero.callbacks.CmdCallbackI(client_o, handle)
if not cb.block(500): # ms.
# No errors possible if in progress(
# (since no response)
print "in progress", _formatReport(handle)
in_progress += 1
else:
rsp = cb.getResponse()
if isinstance(rsp, omero.cmd.ERR):
r = _formatReport(handle)
if r is not None:
failure.append(r)
else:
failure.append("No report!!!")
else:
r = _formatReport(handle)
if r is not None:
failure.append(r)
cb.close(True) # Close handle
handlers.remove(cbString)
except Exception:
if r is not None:
failure.append(traceback.format_exc())
if len(failure) > 0:
assert False, ";".join(failure)
assert not query_o.find('Dataset', dataset.id.val)
def test5793(self):
uuid = self.ctx.sessionUuid
img = self.new_image(name="delete tagset test")
tag = omero.model.TagAnnotationI()
tag.textValue = rstring("tag %s" % uuid)
tag = self.update.saveAndReturnObject(tag)
img.linkAnnotation(tag)
img = self.update.saveAndReturnObject(img)
tagset = omero.model.TagAnnotationI()
tagset.textValue = rstring("tagset %s" % uuid)
tagset.linkAnnotation(tag)
tagset = self.update.saveAndReturnObject(tagset)
tag = tagset.linkedAnnotationList()[0]
command = Delete2(targetObjects={"Annotation": [tagset.id.val]})
handle = self.client.sf.submit(command)
self.waitOnCmd(self.client, handle)
assert not self.query.find("TagAnnotation", tagset.id.val)
assert tag.id.val == self.query.find(
"TagAnnotation", tag.id.val).id.val
def test7314(self):
"""
Test the delete of an original file when a file annotation is present
"""
o = self.client.upload(__file__)
fa = omero.model.FileAnnotationI()
fa.file = o.proxy()
fa = self.update.saveAndReturnObject(fa)
command = Delete2(targetObjects={"OriginalFile": [o.id.val]})
handle = self.client.sf.submit(command)
self.waitOnCmd(self.client, handle)
with pytest.raises(omero.ServerError):
self.query.get("FileAnnotation", fa.id.val)
def testDeleteOneDatasetFilesetErr(self):
"""
Simple example of the MIF delete bad case:
a single fileset containing 2 images is split among 2 datasets.
Delete one dataset, delete fails.
"""
datasets = self.createDatasets(2, "testDeleteOneDatasetFilesetErr")
images = self.importMIF(2)
for i in range(2):
self.link(datasets[i], images[i])
# Now delete one dataset
delete = Delete2(targetObjects={"Dataset": [datasets[0].id.val]})
self.doSubmit(delete, self.client)
# The dataset should be deleted, but not any images.
assert not self.query.find("Dataset", datasets[0].id.val)
assert images[0].id.val == self.query.find(
"Image", images[0].id.val).id.val
assert images[1].id.val == self.query.find(
"Image", images[1].id.val).id.val
def testDeleteOneImageFilesetErr(self):
"""
Simple example of the MIF delete good case:
two images in a MIF.
Delete one image, the delete should fail.
"""
images = self.importMIF(2)
# Now delete one image
delete = Delete2(targetObjects={"Image": [images[0].id.val]})
self.doSubmit(delete, self.client, test_should_pass=False)
# Neither image should be deleted.
assert images[0].id.val == self.query.find(
"Image", images[0].id.val).id.val
assert images[1].id.val == self.query.find(
"Image", images[1].id.val).id.val
def testDeleteDatasetFilesetOK(self):
"""
Simple example of the MIF delete good case:
a single fileset containing 2 images in one dataset.
Delete the dataset, the delete should succeed.
"""
ds = self.make_dataset("testDeleteDatasetFilesetOK")
images = self.importMIF(2)
fsId = self.query.get("Image", images[0].id.val).fileset.id.val
for i in range(2):
self.link(ds, images[i])
# Now delete the dataset, should succeed
delete = Delete2(targetObjects={"Dataset": [ds.id.val]})
self.doSubmit(delete, self.client)
# The dataset, fileset and both images should be deleted.
assert not self.query.find("Dataset", ds.id.val)
assert not self.query.find("Fileset", fsId)
assert not self.query.find("Image", images[0].id.val)
assert not self.query.find("Image", images[1].id.val)
def testDeleteAllDatasetsFilesetOK(self):
"""
Simple example of the MIF delete bad case:
a single fileset containing 2 images is split among 2 datasets.
Delete all datasets, delete succeeds.
"""
datasets = self.createDatasets(2, "testDeleteAllDatasetsFilesetOK")
images = self.importMIF(2)
fsId = self.query.get("Image", images[0].id.val).fileset.id.val
for i in range(2):
self.link(datasets[i], images[i])
# Now delete all datasets, should succeed
dids = [datasets[0].id.val, datasets[1].id.val]
delete = Delete2(targetObjects={"Dataset": dids})
self.doSubmit(delete, self.client)
# Both datasets, the fileset and both images should be deleted.
assert not self.query.find("Dataset", datasets[0].id.val)
assert not self.query.find("Dataset", datasets[1].id.val)
assert not self.query.find("Fileset", fsId)
assert not self.query.find("Image", images[0].id.val)
assert not self.query.find("Image", images[1].id.val)
def testDeleteAllImagesFilesetOK(self):
"""
Simple example of the MIF delete good case:
two images in a MIF.
Delete all images, the delete should succeed.
"""
images = self.importMIF(2)
fsId = self.query.get("Image", images[0].id.val).fileset.id.val
# Now delete all images, should succeed
iids = [images[0].id.val, images[1].id.val]
delete = Delete2(targetObjects={"Image": iids})
self.doSubmit(delete, self.client)
# The fileset and both images should be deleted.
assert not self.query.find("Fileset", fsId)
assert not self.query.find("Image", images[0].id.val)
assert not self.query.find("Image", images[1].id.val)
def testDeleteFilesetOK(self):
"""
Simple example of the MIF delete good case:
a single fileset containing 2 images.
Delete the fileset, the delete should succeed.
"""
images = self.importMIF(2)
fsId = self.query.get("Image", images[0].id.val).fileset.id.val
# Now delete the fileset, should succeed
delete = Delete2(targetObjects={"Fileset": [fsId]})
self.doSubmit(delete, self.client)
# The dataset, fileset and both images should be deleted.
assert not self.query.find("Fileset", fsId)
assert not self.query.find("Image", images[0].id.val)
assert not self.query.find("Image", images[1].id.val)
def testDeleteImagesTwoFilesetsErr(self):
"""
If we try to partially delete 2 Filesets, both should be returned
by the delete error
"""
# 2 filesets, each with 2 images
imagesFsOne = self.importMIF(2)
imagesFsTwo = self.importMIF(2)
# delete should fail...
iids = [imagesFsOne[0].id.val, imagesFsTwo[0].id.val]
delete = Delete2(targetObjects={"Image": iids})
self.doSubmit(delete, self.client, test_should_pass=False)
def testDeleteDatasetTwoFilesetsErr(self):
"""
If we try to partially delete 2 Filesets, both should be returned
by the delete error
"""
# 2 filesets, each with 2 images
imagesFsOne = self.importMIF(2)
imagesFsTwo = self.importMIF(2)
ds = self.make_dataset("testDeleteDatasetTwoFilesetsErr")
self.importMIF(2)
for i in (imagesFsOne, imagesFsTwo):
self.link(ds, i[0])
# delete should remove only the Dataset
delete = Delete2(targetObjects={"Dataset": [ds.id.val]})
self.doSubmit(delete, self.client)
# The dataset should be deleted.
assert not self.query.find("Dataset", ds.id.val)
# Neither image should be deleted.
for i in (imagesFsOne[0], imagesFsTwo[0]):
assert i.id.val == self.query.find("Image", i.id.val).id.val
def testDeleteProjectWithOneEmptyDataset(self):
"""
P->D
Delete P
See https://trac.openmicroscopy.org.uk/ome/ticket/12452
"""
p = self.make_project()
d = self.make_dataset()
self.link(p, d)
self.delete([p])
assert not self.query.find("Project", p.id.val)
assert not self.query.find("Dataset", d.id.val)
def testDeleteProjectWithEmptyDatasetLinkedToAnotherProjectDefault(self):
"""
P1->D
P2->D
Delete P1
See https://trac.openmicroscopy.org.uk/ome/ticket/12452
"""
p1 = self.make_project()
p2 = self.make_project()
d = self.make_dataset()
self.link(p1, d)
self.link(p2, d)
self.delete([p1])
assert self.query.find("Project", p2.id.val)
assert not self.query.find("Project", p1.id.val)
assert self.query.find("Dataset", d.id.val)
def testDeleteProjectWithEmptyDatasetLinkedToAnotherProjectHard(self):
"""
P1->D
P2->D
Delete P1
See https://trac.openmicroscopy.org.uk/ome/ticket/12452
"""
p1 = self.make_project()
p2 = self.make_project()
d = self.make_dataset()
self.link(p1, d)
self.link(p2, d)
hard = ChildOption(includeType=["Dataset"])
delete = Delete2(
targetObjects={"Project": [p1.id.val]}, childOptions=[hard])
self.doSubmit(delete, self.client)
assert self.query.find("Project", p2.id.val)
assert not self.query.find("Project", p1.id.val)
assert not self.query.find("Dataset", d.id.val)
def testDeleteProjectWithDatasetLinkedToAnotherProject(self):
"""
P1->D->I
P2->D->I
Delete P1
See https://trac.openmicroscopy.org.uk/ome/ticket/12452
"""
p1 = self.make_project()
p2 = self.make_project()
d = self.make_dataset()
i = self.make_image()
self.link(p1, d)
self.link(p2, d)
self.link(d, i)
self.delete([p1])
assert not self.query.find("Project", p1.id.val)
assert self.query.find("Project", p2.id.val)
assert self.query.find("Dataset", d.id.val)
assert self.query.find("Image", i.id.val)
def testDeleteDatasetLinkedToTwoProjects(self):
"""
P1->D->I
P2->D->I
Delete D
See https://trac.openmicroscopy.org.uk/ome/ticket/12452
"""
p1 = self.make_project()
p2 = self.make_project()
d = self.make_dataset()
i = self.make_image()
self.link(p1, d)
self.link(p2, d)
self.link(d, i)
self.delete([d])
assert self.query.find("Project", p1.id.val)
assert self.query.find("Project", p2.id.val)
assert not self.query.find("Image", i.id.val)
assert not self.query.find("Dataset", d.id.val)
def testDeleteDatasetWithImageLinkedToAnotherDatasetDefault(self):
"""
D1->I
D2->I
Delete D1
See https://trac.openmicroscopy.org.uk/ome/ticket/12452
"""
d1 = self.make_dataset()
d2 = self.make_dataset()
i = self.make_image()
self.link(d1, i)
self.link(d2, i)
self.delete([d1])
assert not self.query.find("Dataset", d1.id.val)
assert self.query.find("Dataset", d2.id.val)
assert self.query.find("Image", i.id.val)
def testDeleteDatasetWithImageLinkedToAnotherDatasetHard(self):
"""
D1->I
D2->I
Delete D1
See https://trac.openmicroscopy.org.uk/ome/ticket/12452
"""
d1 = self.make_dataset()
d2 = self.make_dataset()
i = self.make_image()
self.link(d1, i)
self.link(d2, i)
hard = ChildOption(includeType=["Image"])
delete = Delete2(
targetObjects={"Dataset": [d1.id.val]}, childOptions=[hard])
self.doSubmit(delete, self.client)
assert not self.query.find("Dataset", d1.id.val)
assert self.query.find("Dataset", d2.id.val)
assert not self.query.find("Image", i.id.val)
def testStepsDuringDelete(self):
img = self.make_image(name="delete test")
command = Delete2(targetObjects={"Image": [img.id.val]})
handle = self.client.sf.submit(command)
end_by = time() + 5
latest_step = 0
try:
while time() < end_by:
# still within five seconds of request submission
status = handle.getStatus()
# current step increases monotonically
assert latest_step <= status.currentStep
latest_step = status.currentStep
if status.stopTime > 0:
# request stops after last step commenced
assert status.currentStep == status.steps - 1
return
except:
handle.close()
raise Exception('delete did not complete within five seconds')
if __name__ == '__main__':
if "TRACE" in os.environ:
import trace
tracer = trace.Trace(ignoredirs=[sys.prefix, sys.exec_prefix], trace=1)
tracer.runfunc(pytest.main)
else:
pytest.main()
|
Jc2k/libcloudcore
|
refs/heads/master
|
contrib/ingest_profitbricks.py
|
1
|
from __future__ import absolute_import
import collections
import json
import os
import xmltodict
import requests
def xlist(v):
if not isinstance(v, list):
return [v]
return v
def process_data(output_path):
response = requests.get("https://api.profitbricks.com/1.3/wsdl")
wsdl = xmltodict.parse(response.text)
model = collections.OrderedDict()
model['metadata'] = collections.OrderedDict()
model['endpoints'] = []
model['operations'] = collections.OrderedDict()
model['shapes'] = collections.OrderedDict()
url = wsdl['definitions']['service']['port']['soap:address']['@location']
for shape in wsdl['definitions']['types']['xs:schema']['xs:simpleType']:
s = collections.OrderedDict()
if "xs:restriction" in shape and shape['xs:restriction']:
s['type'] = shape['xs:restriction']['@base']
s['choices'] = [v["@value"] for v in xlist(shape['xs:restriction']['xs:enumeration'])]
model['shapes'][shape['@name']] = s
for shape in wsdl['definitions']['types']['xs:schema']['xs:complexType']:
s = {}
if "xs:sequence" in shape and shape['xs:sequence']:
members = s['members'] = []
elements = shape['xs:sequence'].get('xs:element', [])
if not isinstance(elements, list):
elements = [elements]
for member in elements:
name = member['@name']
members.append({
"name": name,
"shape": member['@type'].split(":")[1],
"min": member.get('@minOccurs', 0),
})
model['shapes'][shape['@name']] = s
for operation in wsdl['definitions']['portType']['operation']:
model['operations'][operation["@name"]] = {
"input": {"shape": operation["input"]["@message"].split(":")[1]},
"output": {"shape": operation["output"]["@message"].split(":")[1]},
}
with open(os.path.join(output_path, "service.json"), "w") as fp:
json.dump(model, fp, indent=4, separators=(',', ': '))
if __name__ == "__main__":
output_path = os.path.join("libcloudcore", "data", "profitbricks")
if not os.path.exists(output_path):
os.makedirs(output_path)
process_data(output_path)
|
jiaaro/django-badges
|
refs/heads/master
|
badges/south_migrations/__init__.py
|
19
|
__author__ = 'igor'
|
burzillibus/RobHome
|
refs/heads/master
|
venv/lib/python2.7/site-packages/django/contrib/sites/middleware.py
|
234
|
from django.utils.deprecation import MiddlewareMixin
from .shortcuts import get_current_site
class CurrentSiteMiddleware(MiddlewareMixin):
"""
Middleware that sets `site` attribute to request object.
"""
def process_request(self, request):
request.site = get_current_site(request)
|
Nyks45/Veno-M
|
refs/heads/Release
|
tools/perf/tests/attr.py
|
3174
|
#! /usr/bin/python
import os
import sys
import glob
import optparse
import tempfile
import logging
import shutil
import ConfigParser
class Fail(Exception):
def __init__(self, test, msg):
self.msg = msg
self.test = test
def getMsg(self):
return '\'%s\' - %s' % (self.test.path, self.msg)
class Unsup(Exception):
def __init__(self, test):
self.test = test
def getMsg(self):
return '\'%s\'' % self.test.path
class Event(dict):
terms = [
'cpu',
'flags',
'type',
'size',
'config',
'sample_period',
'sample_type',
'read_format',
'disabled',
'inherit',
'pinned',
'exclusive',
'exclude_user',
'exclude_kernel',
'exclude_hv',
'exclude_idle',
'mmap',
'comm',
'freq',
'inherit_stat',
'enable_on_exec',
'task',
'watermark',
'precise_ip',
'mmap_data',
'sample_id_all',
'exclude_host',
'exclude_guest',
'exclude_callchain_kernel',
'exclude_callchain_user',
'wakeup_events',
'bp_type',
'config1',
'config2',
'branch_sample_type',
'sample_regs_user',
'sample_stack_user',
]
def add(self, data):
for key, val in data:
log.debug(" %s = %s" % (key, val))
self[key] = val
def __init__(self, name, data, base):
log.debug(" Event %s" % name);
self.name = name;
self.group = ''
self.add(base)
self.add(data)
def compare_data(self, a, b):
# Allow multiple values in assignment separated by '|'
a_list = a.split('|')
b_list = b.split('|')
for a_item in a_list:
for b_item in b_list:
if (a_item == b_item):
return True
elif (a_item == '*') or (b_item == '*'):
return True
return False
def equal(self, other):
for t in Event.terms:
log.debug(" [%s] %s %s" % (t, self[t], other[t]));
if not self.has_key(t) or not other.has_key(t):
return False
if not self.compare_data(self[t], other[t]):
return False
return True
def diff(self, other):
for t in Event.terms:
if not self.has_key(t) or not other.has_key(t):
continue
if not self.compare_data(self[t], other[t]):
log.warning("expected %s=%s, got %s" % (t, self[t], other[t]))
# Test file description needs to have following sections:
# [config]
# - just single instance in file
# - needs to specify:
# 'command' - perf command name
# 'args' - special command arguments
# 'ret' - expected command return value (0 by default)
#
# [eventX:base]
# - one or multiple instances in file
# - expected values assignments
class Test(object):
def __init__(self, path, options):
parser = ConfigParser.SafeConfigParser()
parser.read(path)
log.warning("running '%s'" % path)
self.path = path
self.test_dir = options.test_dir
self.perf = options.perf
self.command = parser.get('config', 'command')
self.args = parser.get('config', 'args')
try:
self.ret = parser.get('config', 'ret')
except:
self.ret = 0
self.expect = {}
self.result = {}
log.debug(" loading expected events");
self.load_events(path, self.expect)
def is_event(self, name):
if name.find("event") == -1:
return False
else:
return True
def load_events(self, path, events):
parser_event = ConfigParser.SafeConfigParser()
parser_event.read(path)
# The event record section header contains 'event' word,
# optionaly followed by ':' allowing to load 'parent
# event' first as a base
for section in filter(self.is_event, parser_event.sections()):
parser_items = parser_event.items(section);
base_items = {}
# Read parent event if there's any
if (':' in section):
base = section[section.index(':') + 1:]
parser_base = ConfigParser.SafeConfigParser()
parser_base.read(self.test_dir + '/' + base)
base_items = parser_base.items('event')
e = Event(section, parser_items, base_items)
events[section] = e
def run_cmd(self, tempdir):
cmd = "PERF_TEST_ATTR=%s %s %s -o %s/perf.data %s" % (tempdir,
self.perf, self.command, tempdir, self.args)
ret = os.WEXITSTATUS(os.system(cmd))
log.info(" '%s' ret %d " % (cmd, ret))
if ret != int(self.ret):
raise Unsup(self)
def compare(self, expect, result):
match = {}
log.debug(" compare");
# For each expected event find all matching
# events in result. Fail if there's not any.
for exp_name, exp_event in expect.items():
exp_list = []
log.debug(" matching [%s]" % exp_name)
for res_name, res_event in result.items():
log.debug(" to [%s]" % res_name)
if (exp_event.equal(res_event)):
exp_list.append(res_name)
log.debug(" ->OK")
else:
log.debug(" ->FAIL");
log.debug(" match: [%s] matches %s" % (exp_name, str(exp_list)))
# we did not any matching event - fail
if (not exp_list):
exp_event.diff(res_event)
raise Fail(self, 'match failure');
match[exp_name] = exp_list
# For each defined group in the expected events
# check we match the same group in the result.
for exp_name, exp_event in expect.items():
group = exp_event.group
if (group == ''):
continue
for res_name in match[exp_name]:
res_group = result[res_name].group
if res_group not in match[group]:
raise Fail(self, 'group failure')
log.debug(" group: [%s] matches group leader %s" %
(exp_name, str(match[group])))
log.debug(" matched")
def resolve_groups(self, events):
for name, event in events.items():
group_fd = event['group_fd'];
if group_fd == '-1':
continue;
for iname, ievent in events.items():
if (ievent['fd'] == group_fd):
event.group = iname
log.debug('[%s] has group leader [%s]' % (name, iname))
break;
def run(self):
tempdir = tempfile.mkdtemp();
try:
# run the test script
self.run_cmd(tempdir);
# load events expectation for the test
log.debug(" loading result events");
for f in glob.glob(tempdir + '/event*'):
self.load_events(f, self.result);
# resolve group_fd to event names
self.resolve_groups(self.expect);
self.resolve_groups(self.result);
# do the expectation - results matching - both ways
self.compare(self.expect, self.result)
self.compare(self.result, self.expect)
finally:
# cleanup
shutil.rmtree(tempdir)
def run_tests(options):
for f in glob.glob(options.test_dir + '/' + options.test):
try:
Test(f, options).run()
except Unsup, obj:
log.warning("unsupp %s" % obj.getMsg())
def setup_log(verbose):
global log
level = logging.CRITICAL
if verbose == 1:
level = logging.WARNING
if verbose == 2:
level = logging.INFO
if verbose >= 3:
level = logging.DEBUG
log = logging.getLogger('test')
log.setLevel(level)
ch = logging.StreamHandler()
ch.setLevel(level)
formatter = logging.Formatter('%(message)s')
ch.setFormatter(formatter)
log.addHandler(ch)
USAGE = '''%s [OPTIONS]
-d dir # tests dir
-p path # perf binary
-t test # single test
-v # verbose level
''' % sys.argv[0]
def main():
parser = optparse.OptionParser(usage=USAGE)
parser.add_option("-t", "--test",
action="store", type="string", dest="test")
parser.add_option("-d", "--test-dir",
action="store", type="string", dest="test_dir")
parser.add_option("-p", "--perf",
action="store", type="string", dest="perf")
parser.add_option("-v", "--verbose",
action="count", dest="verbose")
options, args = parser.parse_args()
if args:
parser.error('FAILED wrong arguments %s' % ' '.join(args))
return -1
setup_log(options.verbose)
if not options.test_dir:
print 'FAILED no -d option specified'
sys.exit(-1)
if not options.test:
options.test = 'test*'
try:
run_tests(options)
except Fail, obj:
print "FAILED %s" % obj.getMsg();
sys.exit(-1)
sys.exit(0)
if __name__ == '__main__':
main()
|
node-migrator-bot/node-gyp
|
refs/heads/master
|
legacy/tools/gyp/PRESUBMIT.py
|
42
|
# Copyright (c) 2011 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Top-level presubmit script for GYP.
See http://dev.chromium.org/developers/how-tos/depottools/presubmit-scripts
for more details about the presubmit API built into gcl.
"""
def CheckChangeOnUpload(input_api, output_api):
report = []
report.extend(input_api.canned_checks.PanProjectChecks(
input_api, output_api))
return report
def CheckChangeOnCommit(input_api, output_api):
report = []
license = (
r'.*? Copyright \(c\) %(year)s Google Inc\. All rights reserved\.\n'
r'.*? Use of this source code is governed by a BSD-style license that '
r'can be\n'
r'.*? found in the LICENSE file\.\n'
) % {
'year': input_api.time.strftime('%Y'),
}
report.extend(input_api.canned_checks.PanProjectChecks(
input_api, output_api, license_header=license))
report.extend(input_api.canned_checks.CheckTreeIsOpen(
input_api, output_api,
'http://gyp-status.appspot.com/status',
'http://gyp-status.appspot.com/current'))
import sys
old_sys_path = sys.path
try:
sys.path = ['pylib', 'test/lib'] + sys.path
report.extend(input_api.canned_checks.RunPylint(
input_api,
output_api))
finally:
sys.path = old_sys_path
return report
def GetPreferredTrySlaves():
return ['gyp-win32', 'gyp-win64', 'gyp-linux', 'gyp-mac']
|
googleinterns/wss
|
refs/heads/master
|
third_party/deeplab/utils/get_dataset_colormap.py
|
4
|
# Lint as: python2, python3
# Copyright 2018 The TensorFlow Authors All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Visualizes the segmentation results via specified color map.
Visualizes the semantic segmentation results by the color map
defined by the different datasets. Supported colormaps are:
* ADE20K (http://groups.csail.mit.edu/vision/datasets/ADE20K/).
* Cityscapes dataset (https://www.cityscapes-dataset.com).
* Mapillary Vistas (https://research.mapillary.com).
* PASCAL VOC 2012 (http://host.robots.ox.ac.uk/pascal/VOC/).
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from six.moves import range
# Dataset names.
_ADE20K = 'ade20k'
_CITYSCAPES = 'cityscapes'
_MAPILLARY_VISTAS = 'mapillary_vistas'
_PASCAL = 'pascal'
# Max number of entries in the colormap for each dataset.
_DATASET_MAX_ENTRIES = {
_ADE20K: 151,
_CITYSCAPES: 256,
_MAPILLARY_VISTAS: 66,
_PASCAL: 512,
}
def create_ade20k_label_colormap():
"""Creates a label colormap used in ADE20K segmentation benchmark.
Returns:
A colormap for visualizing segmentation results.
"""
return np.asarray([
[0, 0, 0],
[120, 120, 120],
[180, 120, 120],
[6, 230, 230],
[80, 50, 50],
[4, 200, 3],
[120, 120, 80],
[140, 140, 140],
[204, 5, 255],
[230, 230, 230],
[4, 250, 7],
[224, 5, 255],
[235, 255, 7],
[150, 5, 61],
[120, 120, 70],
[8, 255, 51],
[255, 6, 82],
[143, 255, 140],
[204, 255, 4],
[255, 51, 7],
[204, 70, 3],
[0, 102, 200],
[61, 230, 250],
[255, 6, 51],
[11, 102, 255],
[255, 7, 71],
[255, 9, 224],
[9, 7, 230],
[220, 220, 220],
[255, 9, 92],
[112, 9, 255],
[8, 255, 214],
[7, 255, 224],
[255, 184, 6],
[10, 255, 71],
[255, 41, 10],
[7, 255, 255],
[224, 255, 8],
[102, 8, 255],
[255, 61, 6],
[255, 194, 7],
[255, 122, 8],
[0, 255, 20],
[255, 8, 41],
[255, 5, 153],
[6, 51, 255],
[235, 12, 255],
[160, 150, 20],
[0, 163, 255],
[140, 140, 140],
[250, 10, 15],
[20, 255, 0],
[31, 255, 0],
[255, 31, 0],
[255, 224, 0],
[153, 255, 0],
[0, 0, 255],
[255, 71, 0],
[0, 235, 255],
[0, 173, 255],
[31, 0, 255],
[11, 200, 200],
[255, 82, 0],
[0, 255, 245],
[0, 61, 255],
[0, 255, 112],
[0, 255, 133],
[255, 0, 0],
[255, 163, 0],
[255, 102, 0],
[194, 255, 0],
[0, 143, 255],
[51, 255, 0],
[0, 82, 255],
[0, 255, 41],
[0, 255, 173],
[10, 0, 255],
[173, 255, 0],
[0, 255, 153],
[255, 92, 0],
[255, 0, 255],
[255, 0, 245],
[255, 0, 102],
[255, 173, 0],
[255, 0, 20],
[255, 184, 184],
[0, 31, 255],
[0, 255, 61],
[0, 71, 255],
[255, 0, 204],
[0, 255, 194],
[0, 255, 82],
[0, 10, 255],
[0, 112, 255],
[51, 0, 255],
[0, 194, 255],
[0, 122, 255],
[0, 255, 163],
[255, 153, 0],
[0, 255, 10],
[255, 112, 0],
[143, 255, 0],
[82, 0, 255],
[163, 255, 0],
[255, 235, 0],
[8, 184, 170],
[133, 0, 255],
[0, 255, 92],
[184, 0, 255],
[255, 0, 31],
[0, 184, 255],
[0, 214, 255],
[255, 0, 112],
[92, 255, 0],
[0, 224, 255],
[112, 224, 255],
[70, 184, 160],
[163, 0, 255],
[153, 0, 255],
[71, 255, 0],
[255, 0, 163],
[255, 204, 0],
[255, 0, 143],
[0, 255, 235],
[133, 255, 0],
[255, 0, 235],
[245, 0, 255],
[255, 0, 122],
[255, 245, 0],
[10, 190, 212],
[214, 255, 0],
[0, 204, 255],
[20, 0, 255],
[255, 255, 0],
[0, 153, 255],
[0, 41, 255],
[0, 255, 204],
[41, 0, 255],
[41, 255, 0],
[173, 0, 255],
[0, 245, 255],
[71, 0, 255],
[122, 0, 255],
[0, 255, 184],
[0, 92, 255],
[184, 255, 0],
[0, 133, 255],
[255, 214, 0],
[25, 194, 194],
[102, 255, 0],
[92, 0, 255],
])
def create_cityscapes_label_colormap():
"""Creates a label colormap used in CITYSCAPES segmentation benchmark.
Returns:
A colormap for visualizing segmentation results.
"""
colormap = np.zeros((256, 3), dtype=np.uint8)
colormap[0] = [128, 64, 128]
colormap[1] = [244, 35, 232]
colormap[2] = [70, 70, 70]
colormap[3] = [102, 102, 156]
colormap[4] = [190, 153, 153]
colormap[5] = [153, 153, 153]
colormap[6] = [250, 170, 30]
colormap[7] = [220, 220, 0]
colormap[8] = [107, 142, 35]
colormap[9] = [152, 251, 152]
colormap[10] = [70, 130, 180]
colormap[11] = [220, 20, 60]
colormap[12] = [255, 0, 0]
colormap[13] = [0, 0, 142]
colormap[14] = [0, 0, 70]
colormap[15] = [0, 60, 100]
colormap[16] = [0, 80, 100]
colormap[17] = [0, 0, 230]
colormap[18] = [119, 11, 32]
return colormap
def create_mapillary_vistas_label_colormap():
"""Creates a label colormap used in Mapillary Vistas segmentation benchmark.
Returns:
A colormap for visualizing segmentation results.
"""
return np.asarray([
[165, 42, 42],
[0, 192, 0],
[196, 196, 196],
[190, 153, 153],
[180, 165, 180],
[102, 102, 156],
[102, 102, 156],
[128, 64, 255],
[140, 140, 200],
[170, 170, 170],
[250, 170, 160],
[96, 96, 96],
[230, 150, 140],
[128, 64, 128],
[110, 110, 110],
[244, 35, 232],
[150, 100, 100],
[70, 70, 70],
[150, 120, 90],
[220, 20, 60],
[255, 0, 0],
[255, 0, 0],
[255, 0, 0],
[200, 128, 128],
[255, 255, 255],
[64, 170, 64],
[128, 64, 64],
[70, 130, 180],
[255, 255, 255],
[152, 251, 152],
[107, 142, 35],
[0, 170, 30],
[255, 255, 128],
[250, 0, 30],
[0, 0, 0],
[220, 220, 220],
[170, 170, 170],
[222, 40, 40],
[100, 170, 30],
[40, 40, 40],
[33, 33, 33],
[170, 170, 170],
[0, 0, 142],
[170, 170, 170],
[210, 170, 100],
[153, 153, 153],
[128, 128, 128],
[0, 0, 142],
[250, 170, 30],
[192, 192, 192],
[220, 220, 0],
[180, 165, 180],
[119, 11, 32],
[0, 0, 142],
[0, 60, 100],
[0, 0, 142],
[0, 0, 90],
[0, 0, 230],
[0, 80, 100],
[128, 64, 64],
[0, 0, 110],
[0, 0, 70],
[0, 0, 192],
[32, 32, 32],
[0, 0, 0],
[0, 0, 0],
])
def create_pascal_label_colormap():
"""Creates a label colormap used in PASCAL VOC segmentation benchmark.
Returns:
A colormap for visualizing segmentation results.
"""
colormap = np.zeros((_DATASET_MAX_ENTRIES[_PASCAL], 3), dtype=int)
ind = np.arange(_DATASET_MAX_ENTRIES[_PASCAL], dtype=int)
for shift in reversed(list(range(8))):
for channel in range(3):
colormap[:, channel] |= bit_get(ind, channel) << shift
ind >>= 3
return colormap
def get_ade20k_name():
return _ADE20K
def get_cityscapes_name():
return _CITYSCAPES
def get_mapillary_vistas_name():
return _MAPILLARY_VISTAS
def get_pascal_name():
return _PASCAL
def bit_get(val, idx):
"""Gets the bit value.
Args:
val: Input value, int or numpy int array.
idx: Which bit of the input val.
Returns:
The "idx"-th bit of input val.
"""
return (val >> idx) & 1
def create_label_colormap(dataset=_PASCAL):
"""Creates a label colormap for the specified dataset.
Args:
dataset: The colormap used in the dataset.
Returns:
A numpy array of the dataset colormap.
Raises:
ValueError: If the dataset is not supported.
"""
if dataset == _ADE20K:
return create_ade20k_label_colormap()
elif dataset == _CITYSCAPES:
return create_cityscapes_label_colormap()
elif dataset == _MAPILLARY_VISTAS:
return create_mapillary_vistas_label_colormap()
elif dataset == _PASCAL:
return create_pascal_label_colormap()
else:
raise ValueError('Unsupported dataset.')
def label_to_color_image(label, dataset=_PASCAL):
"""Adds color defined by the dataset colormap to the label.
Args:
label: A 2D array with integer type, storing the segmentation label.
dataset: The colormap used in the dataset.
Returns:
result: A 2D array with floating type. The element of the array
is the color indexed by the corresponding element in the input label
to the dataset color map.
Raises:
ValueError: If label is not of rank 2 or its value is larger than color
map maximum entry.
"""
if label.ndim != 2:
raise ValueError('Expect 2-D input label. Got {}'.format(label.shape))
if np.max(label) >= _DATASET_MAX_ENTRIES[dataset]:
raise ValueError(
'label value too large: {} >= {}.'.format(
np.max(label), _DATASET_MAX_ENTRIES[dataset]))
colormap = create_label_colormap(dataset)
return colormap[label]
def get_dataset_colormap_max_entries(dataset):
return _DATASET_MAX_ENTRIES[dataset]
|
akrherz/dep
|
refs/heads/main
|
scripts/plots/yearly_summary.py
|
2
|
import datetime
import cStringIO
import psycopg2
from shapely.wkb import loads
import numpy as np
import sys
from geopandas import read_postgis
import matplotlib
matplotlib.use("agg")
from pyiem.plot import MapPlot
import matplotlib.pyplot as plt
from matplotlib.patches import Polygon
from matplotlib.collections import PatchCollection
import matplotlib.colors as mpcolors
import cartopy.crs as ccrs
import cartopy.feature as cfeature
from pyiem.util import get_dbconn
V2NAME = {
"avg_loss": "Detachment",
"qc_precip": "Precipitation",
"avg_delivery": "Delivery",
"avg_runoff": "Runoff",
}
V2MULTI = {
"avg_loss": 4.463,
"qc_precip": 1.0 / 25.4,
"avg_delivery": 4.463,
"avg_runoff": 1.0 / 25.4,
}
V2UNITS = {
"avg_loss": "tons/acre",
"qc_precip": "inches",
"avg_delivery": "tons/acre",
"avg_runoff": "inches",
}
V2RAMP = {
"avg_loss": [0, 2.5, 5, 10, 20, 40, 60],
"qc_precip": [15, 25, 35, 45, 55],
"avg_delivery": [0, 2.5, 5, 10, 20, 40, 60],
"avg_runoff": [0, 2.5, 5, 10, 15, 30],
}
year = int(sys.argv[1])
v = sys.argv[2]
ts = datetime.date(year, 1, 1)
ts2 = datetime.date(year, 12, 31)
scenario = 0
# suggested for runoff and precip
if v in ["qc_precip", "avg_runoff"]:
c = ["#ffffa6", "#9cf26d", "#76cc94", "#6399ba", "#5558a1"]
# suggested for detachment
elif v in ["avg_loss"]:
c = ["#cbe3bb", "#c4ff4d", "#ffff4d", "#ffc44d", "#ff4d4d", "#c34dee"]
# suggested for delivery
elif v in ["avg_delivery"]:
c = ["#ffffd2", "#ffff4d", "#ffe0a5", "#eeb74d", "#ba7c57", "#96504d"]
cmap = mpcolors.ListedColormap(c, "james")
cmap.set_under("white")
cmap.set_over("black")
pgconn = get_dbconn("idep")
cursor = pgconn.cursor()
title = "for %s" % (ts.strftime("%-d %B %Y"),)
if ts != ts2:
title = "for period between %s and %s" % (
ts.strftime("%-d %b %Y"),
ts2.strftime("%-d %b %Y"),
)
m = MapPlot(
axisbg="#EEEEEE",
nologo=True,
sector="iowa",
nocaption=True,
title="DEP %s %s" % (V2NAME[v], title),
caption="Daily Erosion Project",
)
# Check that we have data for this date!
cursor.execute(
"""
SELECT value from properties where key = 'last_date_0'
"""
)
lastts = datetime.datetime.strptime(cursor.fetchone()[0], "%Y-%m-%d")
floor = datetime.date(2007, 1, 1)
df = read_postgis(
"""
WITH data as (
SELECT huc_12,
sum("""
+ v
+ """) as d from results_by_huc12
WHERE scenario = %s and valid >= %s and valid <= %s
GROUP by huc_12)
SELECT ST_Transform(simple_geom, 4326) as geo, coalesce(d.d, 0) as data
from huc12 i LEFT JOIN data d
ON (i.huc_12 = d.huc_12) WHERE i.scenario = %s and i.states ~* 'IA'
""",
pgconn,
params=(scenario, ts, ts2, scenario),
geom_col="geo",
index_col=None,
)
df["data"] = df["data"] * V2MULTI[v]
if df["data"].max() < 0.01:
bins = [0.01, 0.02, 0.03, 0.04, 0.05]
else:
bins = V2RAMP[v]
norm = mpcolors.BoundaryNorm(bins, cmap.N)
patches = []
# m.ax.add_geometries(df['geo'], ccrs.PlateCarree())
for i, row in df.iterrows():
c = cmap(norm([row["data"]]))[0]
arr = np.asarray(row["geo"].exterior)
points = m.ax.projection.transform_points(
ccrs.Geodetic(), arr[:, 0], arr[:, 1]
)
p = Polygon(points[:, :2], fc=c, ec="k", zorder=2, lw=0.1)
m.ax.add_patch(p)
# m.ax.add_collection(PatchCollection(patches, match_original=True))
m.drawcounties()
m.drawcities()
lbl = [round(_, 2) for _ in bins]
u = "%s, Avg: %.2f" % (V2UNITS[v], df["data"].mean())
m.draw_colorbar(
bins,
cmap,
norm,
clevlabels=lbl,
title="%s :: %s" % (V2NAME[v], V2UNITS[v]),
)
plt.savefig("%s_%s.png" % (year, v))
|
JT5D/Alfred-Popclip-Sublime
|
refs/heads/master
|
Geeklets/forecasts.py
|
1
|
""" forecasts
ROBERT WOLTERMAN (xtacocorex) - 2012
FORECAST BASE CLASSES - ALL SERVICES SHOULD INHERIT Forecast
AND SHOULD STORE ALL FORECAST DAY DATA INTO FCDay
"""
# CHANGELOG
# 21 APRIL 2012
# - EXTRACTION FROM THE MAIN SCRIPT AND PUT INTO THE SERVICES MODULE
# MODULE IMPORTS
import urllib2, json
import location
from globals import *
class FCDay:
def __init__(self):
# NOT ALL WEATHER SITES SUPPORT THESE FIELDS
self.condition = "--"
self.high = "--"
self.low = "--"
self.curtemp = "--"
self.date = "--"
self.day = "--"
self.code = "--"
self.imageurl = "--"
self.pcntprecip = "--"
# OTHER DATA THAT ISN'T PUT IN THE __REPR__
self.sunrise = "--"
self.sunset = "--"
self.windspeed = "--"
self.windgust = "--"
self.winddir = "--"
self.tempfeel = "--"
self.humidity = "--"
self.dewpoint = "--"
self.pressure = "--"
self.presrise = "--"
self.visibility = "--"
self.sunrise = "--"
self.sunset = "--"
def __repr__(self):
return repr((self.condition, self.high, self.low, self.curtemp, self.date, self.day, self.code, self.imageurl, self.pcntprecip))
class Forecasts:
def __init__(self):
self.url = ""
self.location = location.Location()
self.units = {}
self.forecasts = []
self.numfcasts = 0
self.jsondata = {}
def setLocationData(self,opts):
if opts.locgrabber or opts.locfeeder:
self.location.getLocation(opts)
else:
# DO NOTHING
return
def getData(self,opts):
# GET THE URL DATA
if self.url != '':
urld = urllib2.urlopen(self.url)
# READ THE JSON SHENANIGANS
lines = urld.read()
if opts.debug:
print "\n** lines after reading url data **"
print lines
print DEBUGSPACER
# CLOSE THE WEB PAGE
urld.close()
# REMOVE INVALID VARIABLES IF THEY EXIST
lines = lines.replace('\n','')
lines = lines.replace('null','"null"')
# SET THE CLASS VARIABLE FOR JSON DATA
#self.jsondata = ast.literal_eval(lines)
self.jsondata = json.loads(lines)
if opts.debug:
print "\n** actual jsondata dictionary"
print self.jsondata
else:
print "*** NO URL CREATED, PLEASE CALL setURL() PRIOR TO CALLING getJSON() ***"
|
endlisnis/weather-records
|
refs/heads/master
|
consecutive.py
|
1
|
#!/usr/bin/python3
# -*- coding: utf-8 -*-
from collections import defaultdict
from fieldOperators import *
from howOften import dateDateStr, winterFromDate
from makefit import makeFit
from monthName import monthName
from plotdict import plotdict
from reversedict import reverseDict
import daily, sys, gnuplot, linear, time, getopt, argparse, fnmatch
import datetime as dt
now = dt.datetime.now().date()
data = None
def keyOfMinMaxValue(data):
minKey = None
maxKey = None
for (key, value) in data.items():
if value == None:
continue
if minKey == None:
minKey = [key]
maxKey = [key]
elif value < data[minKey[0]]:
minKey = [key]
elif value == data[minKey[0]]:
minKey.append(key)
elif value > data[maxKey[0]]:
maxKey = [key]
elif value == data[maxKey[0]]:
maxKey.append(key)
return (minKey, maxKey)
def valuesFromKeyList(db, keyList):
return filter(lambda t: t!=None, [db.get(c,None) for c in keyList])
def count(cityName, data, expr, name, endDate,
minRunLen,
showNth,
verbose=True,
skipIncomplete=False,
):
runByStartDate = {}
currentRunStartDate = None
currentRun = []
todaysRun = 0
for sampleDate in daily.dayRange(min(data.keys()), endDate):
dayValues = data.get(sampleDate, None)
val = None
flag = ''
if dayValues != None:
#print day, dayValues
try:
val = dayValues.eval(expr, {'time': sampleDate})
except AttributeError:
print(dayValues)
raise
if val is not None:
if ( val is False
or ( type(val) is tuple and val[0] is False )
or skipIncomplete and 'I' in flag
):
if len(currentRun) >= minRunLen:
#print currentRunStartDate, len(currentRun), ['%.1f' % a for a in currentRun]
runByStartDate[currentRunStartDate] = currentRun
if sampleDate == endDate - dt.timedelta(1):
print('today breaks a run of %d, starting on %s' % (len(currentRun), currentRunStartDate))
currentRun = []
currentRunStartDate = None
else:
if len(currentRun) == 0:
currentRunStartDate = sampleDate
if type(val) is tuple:
currentRun.append(val[1])
else:
currentRun.append(val)
if sampleDate == endDate - dt.timedelta(1):
todaysRun = len(currentRun)
else:
if len(currentRun) >= minRunLen:
runByStartDate[currentRunStartDate] = currentRun
currentRun = []
currentRunStartDate = None
if len(currentRun) >= minRunLen:
runByStartDate[currentRunStartDate] = currentRun
#print runByStartDate
startDateByRunLen = defaultdict(list)
longestRunByWinter = defaultdict(int)
for startDate, run in sorted(runByStartDate.items()):
runlen = len(run)
winter = winterFromDate(startDate)
longestRunByWinter[winter] = max(runlen, longestRunByWinter[winter])
ds = dateDateStr( (startDate, startDate+dt.timedelta(days=runlen-1)) )
print(ds, len(run), run) #, startDateByRunLen
startDateByRunLen[runlen] = list(reversed(sorted(startDateByRunLen[runlen]+[startDate])))
lengths = sorted(startDateByRunLen.keys())
most = lengths[-1]
lastStartDate = max(runByStartDate.keys())
print('lastStartDate', lastStartDate)
if verbose:
print("most %s was %d (%s)" % (name, most, startDateByRunLen[most]))
nth = 1
for l in reversed(lengths):
for d in startDateByRunLen[l]:
if nth <= showNth or d == lastStartDate:
print('%d) %s (%d)' % (nth, dateDateStr( (d, d+dt.timedelta(days=l-1)) ), l), tuple(runByStartDate[d]))
nth += len(startDateByRunLen[l])
print("today's run was %d" % todaysRun, currentRun)
maxLengthPerModernWinter = tuple(
longestRunByWinter[x] for x in filter(
lambda t: t in range(winterFromDate(now)-30,
winterFromDate(now)),
longestRunByWinter.keys()))
print('30-year average: {:.1f}'
.format(sum(maxLengthPerModernWinter)/len(maxLengthPerModernWinter)))
#for winter, runlen in sorted(longestRunByWinter.items()):
# print(winter, runlen)
def dispatch(cityName, firstYear,
endDate,
expression,
minRunLen,
showNth,
verbose=True,
skipIncomplete=False):
data = daily.load(cityName)
if firstYear != None:
for key in tuple(data.keys()):
if key.year < firstYear:
del data[key]
return count(cityName, data, expression, '??', endDate,
minRunLen,
showNth,
verbose, skipIncomplete=skipIncomplete)
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Determine the longest number of days some condition has been true.')
parser.add_argument('-x', '--exp', help='Expression', required=True)
parser.add_argument('-c', '--city', default='ottawa')
parser.add_argument('-f', '--firstYear', help='Ignore years before this value', type=int)
parser.add_argument('-e', '--end', default=str(now + dt.timedelta(1)))
parser.add_argument('-l', '--last')
parser.add_argument('--nth', type=int, default=20)
parser.add_argument('-i', help='Skip incomplete.', action='store_true', default=False)
parser.add_argument('-r', help='Minimum run', type=int, default=3)
args = parser.parse_args()
if args.end != None:
(y,m,d) = map(int, args.end.split('-'))
endDate = dt.date(y,m,d)
if args.last != None:
(y,m,d) = map(int, args.last.split('-'))
endDate = dt.date(y,m,d)+dt.timedelta(1)
dispatch(args.city, args.firstYear,
endDate,
args.exp,
showNth=args.nth,
skipIncomplete=args.i,
minRunLen=args.r)
|
dasseclab/dasseclab
|
refs/heads/master
|
clones/routersploit/tests/exploits/routers/huawei/test_hg530_hg520b_password_disclosure.py
|
1
|
from routersploit.modules.exploits.routers.huawei.hg530_hg520b_password_disclosure import Exploit
def test_check_success(target):
""" Test scenario - successful check """
cgi_mock = target.get_route_mock("/UD/", methods=["POST"])
cgi_mock.return_value = (
'TEST'
'<NewUserpassword>Admin1234</NewUserpassword>'
'TEST'
)
exploit = Exploit()
assert exploit.target == ""
assert exploit.port == 80
exploit.target = target.host
exploit.port = target.port
assert exploit.check()
assert exploit.run() is None
|
eeshangarg/oh-mainline
|
refs/heads/master
|
vendor/packages/Django/tests/regressiontests/views/app0/__init__.py
|
9480
|
#
|
skia-dev/oss-fuzz
|
refs/heads/master
|
infra/cifuzz/clusterfuzz_deployment_test.py
|
1
|
# Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for clusterfuzz_deployment.py"""
import os
import unittest
from unittest import mock
import urllib.error
from pyfakefs import fake_filesystem_unittest
import clusterfuzz_deployment
import config_utils
# NOTE: This integration test relies on
# https://github.com/google/oss-fuzz/tree/master/projects/example project.
EXAMPLE_PROJECT = 'example'
# An example fuzzer that triggers an error.
EXAMPLE_FUZZER = 'example_crash_fuzzer'
def _create_config(**kwargs):
"""Creates a config object and then sets every attribute that is a key in
|kwargs| to the corresponding value. Asserts that each key in |kwargs| is an
attribute of Config."""
defaults = {'is_github': True, 'project_name': EXAMPLE_PROJECT}
for default_key, default_value in defaults.items():
if default_key not in kwargs:
kwargs[default_key] = default_value
with mock.patch('os.path.basename', return_value=None), mock.patch(
'config_utils.get_project_src_path',
return_value=None), mock.patch('config_utils._is_dry_run',
return_value=True):
config = config_utils.RunFuzzersConfig()
for key, value in kwargs.items():
assert hasattr(config, key), 'Config doesn\'t have attribute: ' + key
setattr(config, key, value)
return config
def _create_deployment(**kwargs):
config = _create_config(**kwargs)
return clusterfuzz_deployment.get_clusterfuzz_deployment(config)
class OSSFuzzTest(fake_filesystem_unittest.TestCase):
"""Tests OSSFuzz."""
OUT_DIR = '/out'
def setUp(self):
self.setUpPyfakefs()
self.deployment = _create_deployment()
@mock.patch('clusterfuzz_deployment.download_and_unpack_zip',
return_value=True)
def test_download_corpus(self, mocked_download_and_unpack_zip):
"""Tests that we can download a corpus for a valid project."""
result = self.deployment.download_corpus(EXAMPLE_FUZZER, self.OUT_DIR)
self.assertIsNotNone(result)
expected_corpus_dir = os.path.join(self.OUT_DIR, 'cifuzz-corpus',
EXAMPLE_FUZZER)
expected_url = ('https://storage.googleapis.com/example-backup.'
'clusterfuzz-external.appspot.com/corpus/libFuzzer/'
'example_crash_fuzzer/public.zip')
call_args, _ = mocked_download_and_unpack_zip.call_args
self.assertEqual(call_args, (expected_url, expected_corpus_dir))
@mock.patch('clusterfuzz_deployment.download_and_unpack_zip',
return_value=False)
def test_download_fail(self, _):
"""Tests that when downloading fails, None is returned."""
corpus_path = self.deployment.download_corpus(EXAMPLE_FUZZER, self.OUT_DIR)
self.assertIsNone(corpus_path)
def test_get_latest_build_name(self):
"""Tests that the latest build name can be retrieved from GCS."""
latest_build_name = self.deployment.get_latest_build_name()
self.assertTrue(latest_build_name.endswith('.zip'))
self.assertTrue('address' in latest_build_name)
class DownloadUrlTest(unittest.TestCase):
"""Tests that download_url works."""
URL = 'example.com/file'
FILE_PATH = '/tmp/file'
@mock.patch('time.sleep')
@mock.patch('urllib.request.urlretrieve', return_value=True)
def test_download_url_no_error(self, mocked_urlretrieve, _):
"""Tests that download_url works when there is no error."""
self.assertTrue(
clusterfuzz_deployment.download_url(self.URL, self.FILE_PATH))
self.assertEqual(1, mocked_urlretrieve.call_count)
@mock.patch('time.sleep')
@mock.patch('logging.error')
@mock.patch('urllib.request.urlretrieve',
side_effect=urllib.error.HTTPError(None, None, None, None, None))
def test_download_url_http_error(self, mocked_urlretrieve, mocked_error, _):
"""Tests that download_url doesn't retry when there is an HTTP error."""
self.assertFalse(
clusterfuzz_deployment.download_url(self.URL, self.FILE_PATH))
mocked_error.assert_called_with('Unable to download from: %s.', self.URL)
self.assertEqual(1, mocked_urlretrieve.call_count)
@mock.patch('time.sleep')
@mock.patch('logging.error')
@mock.patch('urllib.request.urlretrieve', side_effect=ConnectionResetError)
def test_download_url_connection_error(self, mocked_urlretrieve, mocked_error,
mocked_sleep):
"""Tests that download_url doesn't retry when there is an HTTP error."""
self.assertFalse(
clusterfuzz_deployment.download_url(self.URL, self.FILE_PATH))
self.assertEqual(3, mocked_urlretrieve.call_count)
self.assertEqual(3, mocked_sleep.call_count)
mocked_error.assert_called_with('Failed to download %s, %d times.',
self.URL, 3)
class DownloadAndUnpackZipTest(fake_filesystem_unittest.TestCase):
"""Tests download_and_unpack_zip."""
def setUp(self):
self.setUpPyfakefs()
@mock.patch('urllib.request.urlretrieve', return_value=True)
def test_bad_zip_download(self, _):
"""Tests download_and_unpack_zip returns none when a bad zip is passed."""
self.fs.create_file('/url_tmp.zip', contents='Test file.')
self.assertFalse(
clusterfuzz_deployment.download_and_unpack_zip('/not/a/real/url',
'/extract-directory'))
if __name__ == '__main__':
unittest.main()
|
ClovisIRex/Snake-django
|
refs/heads/master
|
env/lib/python3.6/site-packages/astroid/tests/testdata/python2/data/package/absimport.py
|
34
|
from __future__ import absolute_import, print_function
import import_package_subpackage_module # fail
print(import_package_subpackage_module)
from . import hello as hola
|
sauloal/pycluster
|
refs/heads/master
|
venvlin64/site-packages/requests-1.1.0-py2.7.egg/requests/structures.py
|
10
|
# -*- coding: utf-8 -*-
"""
requests.structures
~~~~~~~~~~~~~~~~~~~
Data structures that power Requests.
"""
import os
from itertools import islice
class IteratorProxy(object):
"""docstring for IteratorProxy"""
def __init__(self, i):
self.i = i
# self.i = chain.from_iterable(i)
def __iter__(self):
return self.i
def __len__(self):
if hasattr(self.i, '__len__'):
return len(self.i)
if hasattr(self.i, 'len'):
return self.i.len
if hasattr(self.i, 'fileno'):
return os.fstat(self.i.fileno()).st_size
def read(self, n):
return "".join(islice(self.i, None, n))
class CaseInsensitiveDict(dict):
"""Case-insensitive Dictionary
For example, ``headers['content-encoding']`` will return the
value of a ``'Content-Encoding'`` response header."""
@property
def lower_keys(self):
if not hasattr(self, '_lower_keys') or not self._lower_keys:
self._lower_keys = dict((k.lower(), k) for k in list(self.keys()))
return self._lower_keys
def _clear_lower_keys(self):
if hasattr(self, '_lower_keys'):
self._lower_keys.clear()
def __setitem__(self, key, value):
dict.__setitem__(self, key, value)
self._clear_lower_keys()
def __delitem__(self, key):
dict.__delitem__(self, self.lower_keys.get(key.lower(), key))
self._lower_keys.clear()
def __contains__(self, key):
return key.lower() in self.lower_keys
def __getitem__(self, key):
# We allow fall-through here, so values default to None
if key in self:
return dict.__getitem__(self, self.lower_keys[key.lower()])
def get(self, key, default=None):
if key in self:
return self[key]
else:
return default
class LookupDict(dict):
"""Dictionary lookup object."""
def __init__(self, name=None):
self.name = name
super(LookupDict, self).__init__()
def __repr__(self):
return '<lookup \'%s\'>' % (self.name)
def __getitem__(self, key):
# We allow fall-through here, so values default to None
return self.__dict__.get(key, None)
def get(self, key, default=None):
return self.__dict__.get(key, default)
|
geerlingguy/ansible
|
refs/heads/devel
|
test/integration/targets/collections/collection_root_user/ansible_collections/testns/testcoll/plugins/action/plugin_lookup.py
|
51
|
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from ansible.plugins.action import ActionBase
from ansible.plugins import loader
class ActionModule(ActionBase):
TRANSFERS_FILES = False
_VALID_ARGS = frozenset(('type', 'name'))
def run(self, tmp=None, task_vars=None):
if task_vars is None:
task_vars = dict()
result = super(ActionModule, self).run(None, task_vars)
type = self._task.args.get('type')
name = self._task.args.get('name')
result = dict(changed=False, collection_list=self._task.collections)
if all([type, name]):
attr_name = '{0}_loader'.format(type)
typed_loader = getattr(loader, attr_name, None)
if not typed_loader:
return (dict(failed=True, msg='invalid plugin type {0}'.format(type)))
result['plugin_path'] = typed_loader.find_plugin(name, collection_list=self._task.collections)
return result
|
9and3r/RPi-InfoScreen-Kivy
|
refs/heads/master
|
screens/energenie/__init__.py
|
12133432
| |
Work4Labs/lettuce
|
refs/heads/master
|
tests/integration/lib/Django-1.2.5/tests/modeltests/custom_columns/__init__.py
|
12133432
| |
ecederstrand/django
|
refs/heads/master
|
tests/template_backends/apps/__init__.py
|
12133432
| |
alfasin/st2
|
refs/heads/master
|
st2api/st2api/controllers/v1/__init__.py
|
12133432
| |
lunafeng/django
|
refs/heads/master
|
tests/template_tests/syntax_tests/__init__.py
|
12133432
| |
sargas/scipy
|
refs/heads/master
|
scipy/constants/codata.py
|
1
|
# Compiled by Charles Harris, dated October 3, 2002
# updated to 2002 values by BasSw, 2006
# Updated to 2006 values by Vincent Davis June 2010
"""
Fundamental Physical Constants
------------------------------
These constants are taken from CODATA Recommended Values of the Fundamental
Physical Constants 2010.
Object
------
physical_constants : dict
A dictionary containing physical constants. Keys are the names of
physical constants, values are tuples (value, units, precision).
Functions
---------
value(key):
Returns the value of the physical constant(key).
unit(key):
Returns the units of the physical constant(key).
precision(key):
Returns the relative precision of the physical constant(key).
find(sub):
Prints or returns list of keys containing the string sub, default is
all.
Source
------
The values of the constants provided at this site are recommended for
international use by CODATA and are the latest available. Termed the "2010
CODATA recommended values," they are generally recognized worldwide for use in
all fields of science and technology. The values became available on 2 June
2011 and replaced the 2006 CODATA set. They are based on all of the data
available through 31 December 2010. The 2010 adjustment was carried out under
the auspices of the CODATA Task Group on Fundamental Constants. See References
for an introduction to the constants for nonexperts.
References
----------
Theoretical and experimental publications relevant to the fundamental
constants and closely related precision measurements published since the mid
1980s, but also including many older papers of particular interest, some of
which date back to the 1800s. To search bibliography visit
http://physics.nist.gov/cuu/Constants/
"""
from __future__ import division, print_function, absolute_import
import warnings
from math import pi, sqrt
__all__ = ['physical_constants', 'value', 'unit', 'precision', 'find',
'ConstantWarning']
"""
Source: http://physics.nist.gov/cuu/Constants/index.html
The values of the constants provided at the above site are recommended
for international use by CODATA and are the latest available. Termed
the "2006 CODATA recommended values", they are generally recognized
worldwide for use in all fields of science and technology. The values
became available in March 2007 and replaced the 2002 CODATA set. They
are based on all of the data available through 31 December 2006. The
2006 adjustment was carried out under the auspices of the CODATA Task
Group on Fundamental Constants.
"""
#
# Source: http://physics.nist.gov/cuu/Constants/index.html
#
# Quantity Value Uncertainty Unit
# ---------------------------------------------------- --------------------- -------------------- -------------
txt2002 = """\
Wien displacement law constant 2.897 7685e-3 0.000 0051e-3 m K
atomic unit of 1st hyperpolarizablity 3.206 361 51e-53 0.000 000 28e-53 C^3 m^3 J^-2
atomic unit of 2nd hyperpolarizablity 6.235 3808e-65 0.000 0011e-65 C^4 m^4 J^-3
atomic unit of electric dipole moment 8.478 353 09e-30 0.000 000 73e-30 C m
atomic unit of electric polarizablity 1.648 777 274e-41 0.000 000 016e-41 C^2 m^2 J^-1
atomic unit of electric quadrupole moment 4.486 551 24e-40 0.000 000 39e-40 C m^2
atomic unit of magn. dipole moment 1.854 801 90e-23 0.000 000 16e-23 J T^-1
atomic unit of magn. flux density 2.350 517 42e5 0.000 000 20e5 T
deuteron magn. moment 0.433 073 482e-26 0.000 000 038e-26 J T^-1
deuteron magn. moment to Bohr magneton ratio 0.466 975 4567e-3 0.000 000 0050e-3
deuteron magn. moment to nuclear magneton ratio 0.857 438 2329 0.000 000 0092
deuteron-electron magn. moment ratio -4.664 345 548e-4 0.000 000 050e-4
deuteron-proton magn. moment ratio 0.307 012 2084 0.000 000 0045
deuteron-neutron magn. moment ratio -0.448 206 52 0.000 000 11
electron gyromagn. ratio 1.760 859 74e11 0.000 000 15e11 s^-1 T^-1
electron gyromagn. ratio over 2 pi 28 024.9532 0.0024 MHz T^-1
electron magn. moment -928.476 412e-26 0.000 080e-26 J T^-1
electron magn. moment to Bohr magneton ratio -1.001 159 652 1859 0.000 000 000 0038
electron magn. moment to nuclear magneton ratio -1838.281 971 07 0.000 000 85
electron magn. moment anomaly 1.159 652 1859e-3 0.000 000 0038e-3
electron to shielded proton magn. moment ratio -658.227 5956 0.000 0071
electron to shielded helion magn. moment ratio 864.058 255 0.000 010
electron-deuteron magn. moment ratio -2143.923 493 0.000 023
electron-muon magn. moment ratio 206.766 9894 0.000 0054
electron-neutron magn. moment ratio 960.920 50 0.000 23
electron-proton magn. moment ratio -658.210 6862 0.000 0066
magn. constant 12.566 370 614...e-7 0 N A^-2
magn. flux quantum 2.067 833 72e-15 0.000 000 18e-15 Wb
muon magn. moment -4.490 447 99e-26 0.000 000 40e-26 J T^-1
muon magn. moment to Bohr magneton ratio -4.841 970 45e-3 0.000 000 13e-3
muon magn. moment to nuclear magneton ratio -8.890 596 98 0.000 000 23
muon-proton magn. moment ratio -3.183 345 118 0.000 000 089
neutron gyromagn. ratio 1.832 471 83e8 0.000 000 46e8 s^-1 T^-1
neutron gyromagn. ratio over 2 pi 29.164 6950 0.000 0073 MHz T^-1
neutron magn. moment -0.966 236 45e-26 0.000 000 24e-26 J T^-1
neutron magn. moment to Bohr magneton ratio -1.041 875 63e-3 0.000 000 25e-3
neutron magn. moment to nuclear magneton ratio -1.913 042 73 0.000 000 45
neutron to shielded proton magn. moment ratio -0.684 996 94 0.000 000 16
neutron-electron magn. moment ratio 1.040 668 82e-3 0.000 000 25e-3
neutron-proton magn. moment ratio -0.684 979 34 0.000 000 16
proton gyromagn. ratio 2.675 222 05e8 0.000 000 23e8 s^-1 T^-1
proton gyromagn. ratio over 2 pi 42.577 4813 0.000 0037 MHz T^-1
proton magn. moment 1.410 606 71e-26 0.000 000 12e-26 J T^-1
proton magn. moment to Bohr magneton ratio 1.521 032 206e-3 0.000 000 015e-3
proton magn. moment to nuclear magneton ratio 2.792 847 351 0.000 000 028
proton magn. shielding correction 25.689e-6 0.015e-6
proton-neutron magn. moment ratio -1.459 898 05 0.000 000 34
shielded helion gyromagn. ratio 2.037 894 70e8 0.000 000 18e8 s^-1 T^-1
shielded helion gyromagn. ratio over 2 pi 32.434 1015 0.000 0028 MHz T^-1
shielded helion magn. moment -1.074 553 024e-26 0.000 000 093e-26 J T^-1
shielded helion magn. moment to Bohr magneton ratio -1.158 671 474e-3 0.000 000 014e-3
shielded helion magn. moment to nuclear magneton ratio -2.127 497 723 0.000 000 025
shielded helion to proton magn. moment ratio -0.761 766 562 0.000 000 012
shielded helion to shielded proton magn. moment ratio -0.761 786 1313 0.000 000 0033
shielded helion gyromagn. ratio 2.037 894 70e8 0.000 000 18e8 s^-1 T^-1
shielded helion gyromagn. ratio over 2 pi 32.434 1015 0.000 0028 MHz T^-1
shielded proton magn. moment 1.410 570 47e-26 0.000 000 12e-26 J T^-1
shielded proton magn. moment to Bohr magneton ratio 1.520 993 132e-3 0.000 000 016e-3
shielded proton magn. moment to nuclear magneton ratio 2.792 775 604 0.000 000 030
{220} lattice spacing of silicon 192.015 5965e-12 0.000 0070e-12 m"""
txt2006 = """\
lattice spacing of silicon 192.015 5762 e-12 0.000 0050 e-12 m
alpha particle-electron mass ratio 7294.299 5365 0.000 0031
alpha particle mass 6.644 656 20 e-27 0.000 000 33 e-27 kg
alpha particle mass energy equivalent 5.971 919 17 e-10 0.000 000 30 e-10 J
alpha particle mass energy equivalent in MeV 3727.379 109 0.000 093 MeV
alpha particle mass in u 4.001 506 179 127 0.000 000 000 062 u
alpha particle molar mass 4.001 506 179 127 e-3 0.000 000 000 062 e-3 kg mol^-1
alpha particle-proton mass ratio 3.972 599 689 51 0.000 000 000 41
Angstrom star 1.000 014 98 e-10 0.000 000 90 e-10 m
atomic mass constant 1.660 538 782 e-27 0.000 000 083 e-27 kg
atomic mass constant energy equivalent 1.492 417 830 e-10 0.000 000 074 e-10 J
atomic mass constant energy equivalent in MeV 931.494 028 0.000 023 MeV
atomic mass unit-electron volt relationship 931.494 028 e6 0.000 023 e6 eV
atomic mass unit-hartree relationship 3.423 177 7149 e7 0.000 000 0049 e7 E_h
atomic mass unit-hertz relationship 2.252 342 7369 e23 0.000 000 0032 e23 Hz
atomic mass unit-inverse meter relationship 7.513 006 671 e14 0.000 000 011 e14 m^-1
atomic mass unit-joule relationship 1.492 417 830 e-10 0.000 000 074 e-10 J
atomic mass unit-kelvin relationship 1.080 9527 e13 0.000 0019 e13 K
atomic mass unit-kilogram relationship 1.660 538 782 e-27 0.000 000 083 e-27 kg
atomic unit of 1st hyperpolarizability 3.206 361 533 e-53 0.000 000 081 e-53 C^3 m^3 J^-2
atomic unit of 2nd hyperpolarizability 6.235 380 95 e-65 0.000 000 31 e-65 C^4 m^4 J^-3
atomic unit of action 1.054 571 628 e-34 0.000 000 053 e-34 J s
atomic unit of charge 1.602 176 487 e-19 0.000 000 040 e-19 C
atomic unit of charge density 1.081 202 300 e12 0.000 000 027 e12 C m^-3
atomic unit of current 6.623 617 63 e-3 0.000 000 17 e-3 A
atomic unit of electric dipole mom. 8.478 352 81 e-30 0.000 000 21 e-30 C m
atomic unit of electric field 5.142 206 32 e11 0.000 000 13 e11 V m^-1
atomic unit of electric field gradient 9.717 361 66 e21 0.000 000 24 e21 V m^-2
atomic unit of electric polarizability 1.648 777 2536 e-41 0.000 000 0034 e-41 C^2 m^2 J^-1
atomic unit of electric potential 27.211 383 86 0.000 000 68 V
atomic unit of electric quadrupole mom. 4.486 551 07 e-40 0.000 000 11 e-40 C m^2
atomic unit of energy 4.359 743 94 e-18 0.000 000 22 e-18 J
atomic unit of force 8.238 722 06 e-8 0.000 000 41 e-8 N
atomic unit of length 0.529 177 208 59 e-10 0.000 000 000 36 e-10 m
atomic unit of mag. dipole mom. 1.854 801 830 e-23 0.000 000 046 e-23 J T^-1
atomic unit of mag. flux density 2.350 517 382 e5 0.000 000 059 e5 T
atomic unit of magnetizability 7.891 036 433 e-29 0.000 000 027 e-29 J T^-2
atomic unit of mass 9.109 382 15 e-31 0.000 000 45 e-31 kg
atomic unit of momentum 1.992 851 565 e-24 0.000 000 099 e-24 kg m s^-1
atomic unit of permittivity 1.112 650 056... e-10 (exact) F m^-1
atomic unit of time 2.418 884 326 505 e-17 0.000 000 000 016 e-17 s
atomic unit of velocity 2.187 691 2541 e6 0.000 000 0015 e6 m s^-1
Avogadro constant 6.022 141 79 e23 0.000 000 30 e23 mol^-1
Bohr magneton 927.400 915 e-26 0.000 023 e-26 J T^-1
Bohr magneton in eV/T 5.788 381 7555 e-5 0.000 000 0079 e-5 eV T^-1
Bohr magneton in Hz/T 13.996 246 04 e9 0.000 000 35 e9 Hz T^-1
Bohr magneton in inverse meters per tesla 46.686 4515 0.000 0012 m^-1 T^-1
Bohr magneton in K/T 0.671 7131 0.000 0012 K T^-1
Bohr radius 0.529 177 208 59 e-10 0.000 000 000 36 e-10 m
Boltzmann constant 1.380 6504 e-23 0.000 0024 e-23 J K^-1
Boltzmann constant in eV/K 8.617 343 e-5 0.000 015 e-5 eV K^-1
Boltzmann constant in Hz/K 2.083 6644 e10 0.000 0036 e10 Hz K^-1
Boltzmann constant in inverse meters per kelvin 69.503 56 0.000 12 m^-1 K^-1
characteristic impedance of vacuum 376.730 313 461... (exact) ohm
classical electron radius 2.817 940 2894 e-15 0.000 000 0058 e-15 m
Compton wavelength 2.426 310 2175 e-12 0.000 000 0033 e-12 m
Compton wavelength over 2 pi 386.159 264 59 e-15 0.000 000 53 e-15 m
conductance quantum 7.748 091 7004 e-5 0.000 000 0053 e-5 S
conventional value of Josephson constant 483 597.9 e9 (exact) Hz V^-1
conventional value of von Klitzing constant 25 812.807 (exact) ohm
Cu x unit 1.002 076 99 e-13 0.000 000 28 e-13 m
deuteron-electron mag. mom. ratio -4.664 345 537 e-4 0.000 000 039 e-4
deuteron-electron mass ratio 3670.482 9654 0.000 0016
deuteron g factor 0.857 438 2308 0.000 000 0072
deuteron mag. mom. 0.433 073 465 e-26 0.000 000 011 e-26 J T^-1
deuteron mag. mom. to Bohr magneton ratio 0.466 975 4556 e-3 0.000 000 0039 e-3
deuteron mag. mom. to nuclear magneton ratio 0.857 438 2308 0.000 000 0072
deuteron mass 3.343 583 20 e-27 0.000 000 17 e-27 kg
deuteron mass energy equivalent 3.005 062 72 e-10 0.000 000 15 e-10 J
deuteron mass energy equivalent in MeV 1875.612 793 0.000 047 MeV
deuteron mass in u 2.013 553 212 724 0.000 000 000 078 u
deuteron molar mass 2.013 553 212 724 e-3 0.000 000 000 078 e-3 kg mol^-1
deuteron-neutron mag. mom. ratio -0.448 206 52 0.000 000 11
deuteron-proton mag. mom. ratio 0.307 012 2070 0.000 000 0024
deuteron-proton mass ratio 1.999 007 501 08 0.000 000 000 22
deuteron rms charge radius 2.1402 e-15 0.0028 e-15 m
electric constant 8.854 187 817... e-12 (exact) F m^-1
electron charge to mass quotient -1.758 820 150 e11 0.000 000 044 e11 C kg^-1
electron-deuteron mag. mom. ratio -2143.923 498 0.000 018
electron-deuteron mass ratio 2.724 437 1093 e-4 0.000 000 0012 e-4
electron g factor -2.002 319 304 3622 0.000 000 000 0015
electron gyromag. ratio 1.760 859 770 e11 0.000 000 044 e11 s^-1 T^-1
electron gyromag. ratio over 2 pi 28 024.953 64 0.000 70 MHz T^-1
electron mag. mom. -928.476 377 e-26 0.000 023 e-26 J T^-1
electron mag. mom. anomaly 1.159 652 181 11 e-3 0.000 000 000 74 e-3
electron mag. mom. to Bohr magneton ratio -1.001 159 652 181 11 0.000 000 000 000 74
electron mag. mom. to nuclear magneton ratio -1838.281 970 92 0.000 000 80
electron mass 9.109 382 15 e-31 0.000 000 45 e-31 kg
electron mass energy equivalent 8.187 104 38 e-14 0.000 000 41 e-14 J
electron mass energy equivalent in MeV 0.510 998 910 0.000 000 013 MeV
electron mass in u 5.485 799 0943 e-4 0.000 000 0023 e-4 u
electron molar mass 5.485 799 0943 e-7 0.000 000 0023 e-7 kg mol^-1
electron-muon mag. mom. ratio 206.766 9877 0.000 0052
electron-muon mass ratio 4.836 331 71 e-3 0.000 000 12 e-3
electron-neutron mag. mom. ratio 960.920 50 0.000 23
electron-neutron mass ratio 5.438 673 4459 e-4 0.000 000 0033 e-4
electron-proton mag. mom. ratio -658.210 6848 0.000 0054
electron-proton mass ratio 5.446 170 2177 e-4 0.000 000 0024 e-4
electron-tau mass ratio 2.875 64 e-4 0.000 47 e-4
electron to alpha particle mass ratio 1.370 933 555 70 e-4 0.000 000 000 58 e-4
electron to shielded helion mag. mom. ratio 864.058 257 0.000 010
electron to shielded proton mag. mom. ratio -658.227 5971 0.000 0072
electron volt 1.602 176 487 e-19 0.000 000 040 e-19 J
electron volt-atomic mass unit relationship 1.073 544 188 e-9 0.000 000 027 e-9 u
electron volt-hartree relationship 3.674 932 540 e-2 0.000 000 092 e-2 E_h
electron volt-hertz relationship 2.417 989 454 e14 0.000 000 060 e14 Hz
electron volt-inverse meter relationship 8.065 544 65 e5 0.000 000 20 e5 m^-1
electron volt-joule relationship 1.602 176 487 e-19 0.000 000 040 e-19 J
electron volt-kelvin relationship 1.160 4505 e4 0.000 0020 e4 K
electron volt-kilogram relationship 1.782 661 758 e-36 0.000 000 044 e-36 kg
elementary charge 1.602 176 487 e-19 0.000 000 040 e-19 C
elementary charge over h 2.417 989 454 e14 0.000 000 060 e14 A J^-1
Faraday constant 96 485.3399 0.0024 C mol^-1
Faraday constant for conventional electric current 96 485.3401 0.0048 C_90 mol^-1
Fermi coupling constant 1.166 37 e-5 0.000 01 e-5 GeV^-2
fine-structure constant 7.297 352 5376 e-3 0.000 000 0050 e-3
first radiation constant 3.741 771 18 e-16 0.000 000 19 e-16 W m^2
first radiation constant for spectral radiance 1.191 042 759 e-16 0.000 000 059 e-16 W m^2 sr^-1
hartree-atomic mass unit relationship 2.921 262 2986 e-8 0.000 000 0042 e-8 u
hartree-electron volt relationship 27.211 383 86 0.000 000 68 eV
Hartree energy 4.359 743 94 e-18 0.000 000 22 e-18 J
Hartree energy in eV 27.211 383 86 0.000 000 68 eV
hartree-hertz relationship 6.579 683 920 722 e15 0.000 000 000 044 e15 Hz
hartree-inverse meter relationship 2.194 746 313 705 e7 0.000 000 000 015 e7 m^-1
hartree-joule relationship 4.359 743 94 e-18 0.000 000 22 e-18 J
hartree-kelvin relationship 3.157 7465 e5 0.000 0055 e5 K
hartree-kilogram relationship 4.850 869 34 e-35 0.000 000 24 e-35 kg
helion-electron mass ratio 5495.885 2765 0.000 0052
helion mass 5.006 411 92 e-27 0.000 000 25 e-27 kg
helion mass energy equivalent 4.499 538 64 e-10 0.000 000 22 e-10 J
helion mass energy equivalent in MeV 2808.391 383 0.000 070 MeV
helion mass in u 3.014 932 2473 0.000 000 0026 u
helion molar mass 3.014 932 2473 e-3 0.000 000 0026 e-3 kg mol^-1
helion-proton mass ratio 2.993 152 6713 0.000 000 0026
hertz-atomic mass unit relationship 4.439 821 6294 e-24 0.000 000 0064 e-24 u
hertz-electron volt relationship 4.135 667 33 e-15 0.000 000 10 e-15 eV
hertz-hartree relationship 1.519 829 846 006 e-16 0.000 000 000010e-16 E_h
hertz-inverse meter relationship 3.335 640 951... e-9 (exact) m^-1
hertz-joule relationship 6.626 068 96 e-34 0.000 000 33 e-34 J
hertz-kelvin relationship 4.799 2374 e-11 0.000 0084 e-11 K
hertz-kilogram relationship 7.372 496 00 e-51 0.000 000 37 e-51 kg
inverse fine-structure constant 137.035 999 679 0.000 000 094
inverse meter-atomic mass unit relationship 1.331 025 0394 e-15 0.000 000 0019 e-15 u
inverse meter-electron volt relationship 1.239 841 875 e-6 0.000 000 031 e-6 eV
inverse meter-hartree relationship 4.556 335 252 760 e-8 0.000 000 000 030 e-8 E_h
inverse meter-hertz relationship 299 792 458 (exact) Hz
inverse meter-joule relationship 1.986 445 501 e-25 0.000 000 099 e-25 J
inverse meter-kelvin relationship 1.438 7752 e-2 0.000 0025 e-2 K
inverse meter-kilogram relationship 2.210 218 70 e-42 0.000 000 11 e-42 kg
inverse of conductance quantum 12 906.403 7787 0.000 0088 ohm
Josephson constant 483 597.891 e9 0.012 e9 Hz V^-1
joule-atomic mass unit relationship 6.700 536 41 e9 0.000 000 33 e9 u
joule-electron volt relationship 6.241 509 65 e18 0.000 000 16 e18 eV
joule-hartree relationship 2.293 712 69 e17 0.000 000 11 e17 E_h
joule-hertz relationship 1.509 190 450 e33 0.000 000 075 e33 Hz
joule-inverse meter relationship 5.034 117 47 e24 0.000 000 25 e24 m^-1
joule-kelvin relationship 7.242 963 e22 0.000 013 e22 K
joule-kilogram relationship 1.112 650 056... e-17 (exact) kg
kelvin-atomic mass unit relationship 9.251 098 e-14 0.000 016 e-14 u
kelvin-electron volt relationship 8.617 343 e-5 0.000 015 e-5 eV
kelvin-hartree relationship 3.166 8153 e-6 0.000 0055 e-6 E_h
kelvin-hertz relationship 2.083 6644 e10 0.000 0036 e10 Hz
kelvin-inverse meter relationship 69.503 56 0.000 12 m^-1
kelvin-joule relationship 1.380 6504 e-23 0.000 0024 e-23 J
kelvin-kilogram relationship 1.536 1807 e-40 0.000 0027 e-40 kg
kilogram-atomic mass unit relationship 6.022 141 79 e26 0.000 000 30 e26 u
kilogram-electron volt relationship 5.609 589 12 e35 0.000 000 14 e35 eV
kilogram-hartree relationship 2.061 486 16 e34 0.000 000 10 e34 E_h
kilogram-hertz relationship 1.356 392 733 e50 0.000 000 068 e50 Hz
kilogram-inverse meter relationship 4.524 439 15 e41 0.000 000 23 e41 m^-1
kilogram-joule relationship 8.987 551 787... e16 (exact) J
kilogram-kelvin relationship 6.509 651 e39 0.000 011 e39 K
lattice parameter of silicon 543.102 064 e-12 0.000 014 e-12 m
Loschmidt constant (273.15 K, 101.325 kPa) 2.686 7774 e25 0.000 0047 e25 m^-3
mag. constant 12.566 370 614... e-7 (exact) N A^-2
mag. flux quantum 2.067 833 667 e-15 0.000 000 052 e-15 Wb
molar gas constant 8.314 472 0.000 015 J mol^-1 K^-1
molar mass constant 1 e-3 (exact) kg mol^-1
molar mass of carbon-12 12 e-3 (exact) kg mol^-1
molar Planck constant 3.990 312 6821 e-10 0.000 000 0057 e-10 J s mol^-1
molar Planck constant times c 0.119 626 564 72 0.000 000 000 17 J m mol^-1
molar volume of ideal gas (273.15 K, 100 kPa) 22.710 981 e-3 0.000 040 e-3 m^3 mol^-1
molar volume of ideal gas (273.15 K, 101.325 kPa) 22.413 996 e-3 0.000 039 e-3 m^3 mol^-1
molar volume of silicon 12.058 8349 e-6 0.000 0011 e-6 m^3 mol^-1
Mo x unit 1.002 099 55 e-13 0.000 000 53 e-13 m
muon Compton wavelength 11.734 441 04 e-15 0.000 000 30 e-15 m
muon Compton wavelength over 2 pi 1.867 594 295 e-15 0.000 000 047 e-15 m
muon-electron mass ratio 206.768 2823 0.000 0052
muon g factor -2.002 331 8414 0.000 000 0012
muon mag. mom. -4.490 447 86 e-26 0.000 000 16 e-26 J T^-1
muon mag. mom. anomaly 1.165 920 69 e-3 0.000 000 60 e-3
muon mag. mom. to Bohr magneton ratio -4.841 970 49 e-3 0.000 000 12 e-3
muon mag. mom. to nuclear magneton ratio -8.890 597 05 0.000 000 23
muon mass 1.883 531 30 e-28 0.000 000 11 e-28 kg
muon mass energy equivalent 1.692 833 510 e-11 0.000 000 095 e-11 J
muon mass energy equivalent in MeV 105.658 3668 0.000 0038 MeV
muon mass in u 0.113 428 9256 0.000 000 0029 u
muon molar mass 0.113 428 9256 e-3 0.000 000 0029 e-3 kg mol^-1
muon-neutron mass ratio 0.112 454 5167 0.000 000 0029
muon-proton mag. mom. ratio -3.183 345 137 0.000 000 085
muon-proton mass ratio 0.112 609 5261 0.000 000 0029
muon-tau mass ratio 5.945 92 e-2 0.000 97 e-2
natural unit of action 1.054 571 628 e-34 0.000 000 053 e-34 J s
natural unit of action in eV s 6.582 118 99 e-16 0.000 000 16 e-16 eV s
natural unit of energy 8.187 104 38 e-14 0.000 000 41 e-14 J
natural unit of energy in MeV 0.510 998 910 0.000 000 013 MeV
natural unit of length 386.159 264 59 e-15 0.000 000 53 e-15 m
natural unit of mass 9.109 382 15 e-31 0.000 000 45 e-31 kg
natural unit of momentum 2.730 924 06 e-22 0.000 000 14 e-22 kg m s^-1
natural unit of momentum in MeV/c 0.510 998 910 0.000 000 013 MeV/c
natural unit of time 1.288 088 6570 e-21 0.000 000 0018 e-21 s
natural unit of velocity 299 792 458 (exact) m s^-1
neutron Compton wavelength 1.319 590 8951 e-15 0.000 000 0020 e-15 m
neutron Compton wavelength over 2 pi 0.210 019 413 82 e-15 0.000 000 000 31 e-15 m
neutron-electron mag. mom. ratio 1.040 668 82 e-3 0.000 000 25 e-3
neutron-electron mass ratio 1838.683 6605 0.000 0011
neutron g factor -3.826 085 45 0.000 000 90
neutron gyromag. ratio 1.832 471 85 e8 0.000 000 43 e8 s^-1 T^-1
neutron gyromag. ratio over 2 pi 29.164 6954 0.000 0069 MHz T^-1
neutron mag. mom. -0.966 236 41 e-26 0.000 000 23 e-26 J T^-1
neutron mag. mom. to Bohr magneton ratio -1.041 875 63 e-3 0.000 000 25 e-3
neutron mag. mom. to nuclear magneton ratio -1.913 042 73 0.000 000 45
neutron mass 1.674 927 211 e-27 0.000 000 084 e-27 kg
neutron mass energy equivalent 1.505 349 505 e-10 0.000 000 075 e-10 J
neutron mass energy equivalent in MeV 939.565 346 0.000 023 MeV
neutron mass in u 1.008 664 915 97 0.000 000 000 43 u
neutron molar mass 1.008 664 915 97 e-3 0.000 000 000 43 e-3 kg mol^-1
neutron-muon mass ratio 8.892 484 09 0.000 000 23
neutron-proton mag. mom. ratio -0.684 979 34 0.000 000 16
neutron-proton mass ratio 1.001 378 419 18 0.000 000 000 46
neutron-tau mass ratio 0.528 740 0.000 086
neutron to shielded proton mag. mom. ratio -0.684 996 94 0.000 000 16
Newtonian constant of gravitation 6.674 28 e-11 0.000 67 e-11 m^3 kg^-1 s^-2
Newtonian constant of gravitation over h-bar c 6.708 81 e-39 0.000 67 e-39 (GeV/c^2)^-2
nuclear magneton 5.050 783 24 e-27 0.000 000 13 e-27 J T^-1
nuclear magneton in eV/T 3.152 451 2326 e-8 0.000 000 0045 e-8 eV T^-1
nuclear magneton in inverse meters per tesla 2.542 623 616 e-2 0.000 000 064 e-2 m^-1 T^-1
nuclear magneton in K/T 3.658 2637 e-4 0.000 0064 e-4 K T^-1
nuclear magneton in MHz/T 7.622 593 84 0.000 000 19 MHz T^-1
Planck constant 6.626 068 96 e-34 0.000 000 33 e-34 J s
Planck constant in eV s 4.135 667 33 e-15 0.000 000 10 e-15 eV s
Planck constant over 2 pi 1.054 571 628 e-34 0.000 000 053 e-34 J s
Planck constant over 2 pi in eV s 6.582 118 99 e-16 0.000 000 16 e-16 eV s
Planck constant over 2 pi times c in MeV fm 197.326 9631 0.000 0049 MeV fm
Planck length 1.616 252 e-35 0.000 081 e-35 m
Planck mass 2.176 44 e-8 0.000 11 e-8 kg
Planck mass energy equivalent in GeV 1.220 892 e19 0.000 061 e19 GeV
Planck temperature 1.416 785 e32 0.000 071 e32 K
Planck time 5.391 24 e-44 0.000 27 e-44 s
proton charge to mass quotient 9.578 833 92 e7 0.000 000 24 e7 C kg^-1
proton Compton wavelength 1.321 409 8446 e-15 0.000 000 0019 e-15 m
proton Compton wavelength over 2 pi 0.210 308 908 61 e-15 0.000 000 000 30 e-15 m
proton-electron mass ratio 1836.152 672 47 0.000 000 80
proton g factor 5.585 694 713 0.000 000 046
proton gyromag. ratio 2.675 222 099 e8 0.000 000 070 e8 s^-1 T^-1
proton gyromag. ratio over 2 pi 42.577 4821 0.000 0011 MHz T^-1
proton mag. mom. 1.410 606 662 e-26 0.000 000 037 e-26 J T^-1
proton mag. mom. to Bohr magneton ratio 1.521 032 209 e-3 0.000 000 012 e-3
proton mag. mom. to nuclear magneton ratio 2.792 847 356 0.000 000 023
proton mag. shielding correction 25.694 e-6 0.014 e-6
proton mass 1.672 621 637 e-27 0.000 000 083 e-27 kg
proton mass energy equivalent 1.503 277 359 e-10 0.000 000 075 e-10 J
proton mass energy equivalent in MeV 938.272 013 0.000 023 MeV
proton mass in u 1.007 276 466 77 0.000 000 000 10 u
proton molar mass 1.007 276 466 77 e-3 0.000 000 000 10 e-3 kg mol^-1
proton-muon mass ratio 8.880 243 39 0.000 000 23
proton-neutron mag. mom. ratio -1.459 898 06 0.000 000 34
proton-neutron mass ratio 0.998 623 478 24 0.000 000 000 46
proton rms charge radius 0.8768 e-15 0.0069 e-15 m
proton-tau mass ratio 0.528 012 0.000 086
quantum of circulation 3.636 947 5199 e-4 0.000 000 0050 e-4 m^2 s^-1
quantum of circulation times 2 7.273 895 040 e-4 0.000 000 010 e-4 m^2 s^-1
Rydberg constant 10 973 731.568 527 0.000 073 m^-1
Rydberg constant times c in Hz 3.289 841 960 361 e15 0.000 000 000 022 e15 Hz
Rydberg constant times hc in eV 13.605 691 93 0.000 000 34 eV
Rydberg constant times hc in J 2.179 871 97 e-18 0.000 000 11 e-18 J
Sackur-Tetrode constant (1 K, 100 kPa) -1.151 7047 0.000 0044
Sackur-Tetrode constant (1 K, 101.325 kPa) -1.164 8677 0.000 0044
second radiation constant 1.438 7752 e-2 0.000 0025 e-2 m K
shielded helion gyromag. ratio 2.037 894 730 e8 0.000 000 056 e8 s^-1 T^-1
shielded helion gyromag. ratio over 2 pi 32.434 101 98 0.000 000 90 MHz T^-1
shielded helion mag. mom. -1.074 552 982 e-26 0.000 000 030 e-26 J T^-1
shielded helion mag. mom. to Bohr magneton ratio -1.158 671 471 e-3 0.000 000 014 e-3
shielded helion mag. mom. to nuclear magneton ratio -2.127 497 718 0.000 000 025
shielded helion to proton mag. mom. ratio -0.761 766 558 0.000 000 011
shielded helion to shielded proton mag. mom. ratio -0.761 786 1313 0.000 000 0033
shielded proton gyromag. ratio 2.675 153 362 e8 0.000 000 073 e8 s^-1 T^-1
shielded proton gyromag. ratio over 2 pi 42.576 3881 0.000 0012 MHz T^-1
shielded proton mag. mom. 1.410 570 419 e-26 0.000 000 038 e-26 J T^-1
shielded proton mag. mom. to Bohr magneton ratio 1.520 993 128 e-3 0.000 000 017 e-3
shielded proton mag. mom. to nuclear magneton ratio 2.792 775 598 0.000 000 030
speed of light in vacuum 299 792 458 (exact) m s^-1
standard acceleration of gravity 9.806 65 (exact) m s^-2
standard atmosphere 101 325 (exact) Pa
Stefan-Boltzmann constant 5.670 400 e-8 0.000 040 e-8 W m^-2 K^-4
tau Compton wavelength 0.697 72 e-15 0.000 11 e-15 m
tau Compton wavelength over 2 pi 0.111 046 e-15 0.000 018 e-15 m
tau-electron mass ratio 3477.48 0.57
tau mass 3.167 77 e-27 0.000 52 e-27 kg
tau mass energy equivalent 2.847 05 e-10 0.000 46 e-10 J
tau mass energy equivalent in MeV 1776.99 0.29 MeV
tau mass in u 1.907 68 0.000 31 u
tau molar mass 1.907 68 e-3 0.000 31 e-3 kg mol^-1
tau-muon mass ratio 16.8183 0.0027
tau-neutron mass ratio 1.891 29 0.000 31
tau-proton mass ratio 1.893 90 0.000 31
Thomson cross section 0.665 245 8558 e-28 0.000 000 0027 e-28 m^2
triton-electron mag. mom. ratio -1.620 514 423 e-3 0.000 000 021 e-3
triton-electron mass ratio 5496.921 5269 0.000 0051
triton g factor 5.957 924 896 0.000 000 076
triton mag. mom. 1.504 609 361 e-26 0.000 000 042 e-26 J T^-1
triton mag. mom. to Bohr magneton ratio 1.622 393 657 e-3 0.000 000 021 e-3
triton mag. mom. to nuclear magneton ratio 2.978 962 448 0.000 000 038
triton mass 5.007 355 88 e-27 0.000 000 25 e-27 kg
triton mass energy equivalent 4.500 387 03 e-10 0.000 000 22 e-10 J
triton mass energy equivalent in MeV 2808.920 906 0.000 070 MeV
triton mass in u 3.015 500 7134 0.000 000 0025 u
triton molar mass 3.015 500 7134 e-3 0.000 000 0025 e-3 kg mol^-1
triton-neutron mag. mom. ratio -1.557 185 53 0.000 000 37
triton-proton mag. mom. ratio 1.066 639 908 0.000 000 010
triton-proton mass ratio 2.993 717 0309 0.000 000 0025
unified atomic mass unit 1.660 538 782 e-27 0.000 000 083 e-27 kg
von Klitzing constant 25 812.807 557 0.000 018 ohm
weak mixing angle 0.222 55 0.000 56
Wien frequency displacement law constant 5.878 933 e10 0.000 010 e10 Hz K^-1
Wien wavelength displacement law constant 2.897 7685 e-3 0.000 0051 e-3 m K"""
txt2010 = """\
{220} lattice spacing of silicon 192.015 5714 e-12 0.000 0032 e-12 m
alpha particle-electron mass ratio 7294.299 5361 0.000 0029
alpha particle mass 6.644 656 75 e-27 0.000 000 29 e-27 kg
alpha particle mass energy equivalent 5.971 919 67 e-10 0.000 000 26 e-10 J
alpha particle mass energy equivalent in MeV 3727.379 240 0.000 082 MeV
alpha particle mass in u 4.001 506 179 125 0.000 000 000 062 u
alpha particle molar mass 4.001 506 179 125 e-3 0.000 000 000 062 e-3 kg mol^-1
alpha particle-proton mass ratio 3.972 599 689 33 0.000 000 000 36
Angstrom star 1.000 014 95 e-10 0.000 000 90 e-10 m
atomic mass constant 1.660 538 921 e-27 0.000 000 073 e-27 kg
atomic mass constant energy equivalent 1.492 417 954 e-10 0.000 000 066 e-10 J
atomic mass constant energy equivalent in MeV 931.494 061 0.000 021 MeV
atomic mass unit-electron volt relationship 931.494 061 e6 0.000 021 e6 eV
atomic mass unit-hartree relationship 3.423 177 6845 e7 0.000 000 0024 e7 E_h
atomic mass unit-hertz relationship 2.252 342 7168 e23 0.000 000 0016 e23 Hz
atomic mass unit-inverse meter relationship 7.513 006 6042 e14 0.000 000 0053 e14 m^-1
atomic mass unit-joule relationship 1.492 417 954 e-10 0.000 000 066 e-10 J
atomic mass unit-kelvin relationship 1.080 954 08 e13 0.000 000 98 e13 K
atomic mass unit-kilogram relationship 1.660 538 921 e-27 0.000 000 073 e-27 kg
atomic unit of 1st hyperpolarizability 3.206 361 449 e-53 0.000 000 071 e-53 C^3 m^3 J^-2
atomic unit of 2nd hyperpolarizability 6.235 380 54 e-65 0.000 000 28 e-65 C^4 m^4 J^-3
atomic unit of action 1.054 571 726 e-34 0.000 000 047 e-34 J s
atomic unit of charge 1.602 176 565 e-19 0.000 000 035 e-19 C
atomic unit of charge density 1.081 202 338 e12 0.000 000 024 e12 C m^-3
atomic unit of current 6.623 617 95 e-3 0.000 000 15 e-3 A
atomic unit of electric dipole mom. 8.478 353 26 e-30 0.000 000 19 e-30 C m
atomic unit of electric field 5.142 206 52 e11 0.000 000 11 e11 V m^-1
atomic unit of electric field gradient 9.717 362 00 e21 0.000 000 21 e21 V m^-2
atomic unit of electric polarizability 1.648 777 2754 e-41 0.000 000 0016 e-41 C^2 m^2 J^-1
atomic unit of electric potential 27.211 385 05 0.000 000 60 V
atomic unit of electric quadrupole mom. 4.486 551 331 e-40 0.000 000 099 e-40 C m^2
atomic unit of energy 4.359 744 34 e-18 0.000 000 19 e-18 J
atomic unit of force 8.238 722 78 e-8 0.000 000 36 e-8 N
atomic unit of length 0.529 177 210 92 e-10 0.000 000 000 17 e-10 m
atomic unit of mag. dipole mom. 1.854 801 936 e-23 0.000 000 041 e-23 J T^-1
atomic unit of mag. flux density 2.350 517 464 e5 0.000 000 052 e5 T
atomic unit of magnetizability 7.891 036 607 e-29 0.000 000 013 e-29 J T^-2
atomic unit of mass 9.109 382 91 e-31 0.000 000 40 e-31 kg
atomic unit of mom.um 1.992 851 740 e-24 0.000 000 088 e-24 kg m s^-1
atomic unit of permittivity 1.112 650 056... e-10 (exact) F m^-1
atomic unit of time 2.418 884 326 502e-17 0.000 000 000 012e-17 s
atomic unit of velocity 2.187 691 263 79 e6 0.000 000 000 71 e6 m s^-1
Avogadro constant 6.022 141 29 e23 0.000 000 27 e23 mol^-1
Bohr magneton 927.400 968 e-26 0.000 020 e-26 J T^-1
Bohr magneton in eV/T 5.788 381 8066 e-5 0.000 000 0038 e-5 eV T^-1
Bohr magneton in Hz/T 13.996 245 55 e9 0.000 000 31 e9 Hz T^-1
Bohr magneton in inverse meters per tesla 46.686 4498 0.000 0010 m^-1 T^-1
Bohr magneton in K/T 0.671 713 88 0.000 000 61 K T^-1
Bohr radius 0.529 177 210 92 e-10 0.000 000 000 17 e-10 m
Boltzmann constant 1.380 6488 e-23 0.000 0013 e-23 J K^-1
Boltzmann constant in eV/K 8.617 3324 e-5 0.000 0078 e-5 eV K^-1
Boltzmann constant in Hz/K 2.083 6618 e10 0.000 0019 e10 Hz K^-1
Boltzmann constant in inverse meters per kelvin 69.503 476 0.000 063 m^-1 K^-1
characteristic impedance of vacuum 376.730 313 461... (exact) ohm
classical electron radius 2.817 940 3267 e-15 0.000 000 0027 e-15 m
Compton wavelength 2.426 310 2389 e-12 0.000 000 0016 e-12 m
Compton wavelength over 2 pi 386.159 268 00 e-15 0.000 000 25 e-15 m
conductance quantum 7.748 091 7346 e-5 0.000 000 0025 e-5 S
conventional value of Josephson constant 483 597.9 e9 (exact) Hz V^-1
conventional value of von Klitzing constant 25 812.807 (exact) ohm
Cu x unit 1.002 076 97 e-13 0.000 000 28 e-13 m
deuteron-electron mag. mom. ratio -4.664 345 537 e-4 0.000 000 039 e-4
deuteron-electron mass ratio 3670.482 9652 0.000 0015
deuteron g factor 0.857 438 2308 0.000 000 0072
deuteron mag. mom. 0.433 073 489 e-26 0.000 000 010 e-26 J T^-1
deuteron mag. mom. to Bohr magneton ratio 0.466 975 4556 e-3 0.000 000 0039 e-3
deuteron mag. mom. to nuclear magneton ratio 0.857 438 2308 0.000 000 0072
deuteron mass 3.343 583 48 e-27 0.000 000 15 e-27 kg
deuteron mass energy equivalent 3.005 062 97 e-10 0.000 000 13 e-10 J
deuteron mass energy equivalent in MeV 1875.612 859 0.000 041 MeV
deuteron mass in u 2.013 553 212 712 0.000 000 000 077 u
deuteron molar mass 2.013 553 212 712 e-3 0.000 000 000 077 e-3 kg mol^-1
deuteron-neutron mag. mom. ratio -0.448 206 52 0.000 000 11
deuteron-proton mag. mom. ratio 0.307 012 2070 0.000 000 0024
deuteron-proton mass ratio 1.999 007 500 97 0.000 000 000 18
deuteron rms charge radius 2.1424 e-15 0.0021 e-15 m
electric constant 8.854 187 817... e-12 (exact) F m^-1
electron charge to mass quotient -1.758 820 088 e11 0.000 000 039 e11 C kg^-1
electron-deuteron mag. mom. ratio -2143.923 498 0.000 018
electron-deuteron mass ratio 2.724 437 1095 e-4 0.000 000 0011 e-4
electron g factor -2.002 319 304 361 53 0.000 000 000 000 53
electron gyromag. ratio 1.760 859 708 e11 0.000 000 039 e11 s^-1 T^-1
electron gyromag. ratio over 2 pi 28 024.952 66 0.000 62 MHz T^-1
electron-helion mass ratio 1.819 543 0761 e-4 0.000 000 0017 e-4
electron mag. mom. -928.476 430 e-26 0.000 021 e-26 J T^-1
electron mag. mom. anomaly 1.159 652 180 76 e-3 0.000 000 000 27 e-3
electron mag. mom. to Bohr magneton ratio -1.001 159 652 180 76 0.000 000 000 000 27
electron mag. mom. to nuclear magneton ratio -1838.281 970 90 0.000 000 75
electron mass 9.109 382 91 e-31 0.000 000 40 e-31 kg
electron mass energy equivalent 8.187 105 06 e-14 0.000 000 36 e-14 J
electron mass energy equivalent in MeV 0.510 998 928 0.000 000 011 MeV
electron mass in u 5.485 799 0946 e-4 0.000 000 0022 e-4 u
electron molar mass 5.485 799 0946 e-7 0.000 000 0022 e-7 kg mol^-1
electron-muon mag. mom. ratio 206.766 9896 0.000 0052
electron-muon mass ratio 4.836 331 66 e-3 0.000 000 12 e-3
electron-neutron mag. mom. ratio 960.920 50 0.000 23
electron-neutron mass ratio 5.438 673 4461 e-4 0.000 000 0032 e-4
electron-proton mag. mom. ratio -658.210 6848 0.000 0054
electron-proton mass ratio 5.446 170 2178 e-4 0.000 000 0022 e-4
electron-tau mass ratio 2.875 92 e-4 0.000 26 e-4
electron to alpha particle mass ratio 1.370 933 555 78 e-4 0.000 000 000 55 e-4
electron to shielded helion mag. mom. ratio 864.058 257 0.000 010
electron to shielded proton mag. mom. ratio -658.227 5971 0.000 0072
electron-triton mass ratio 1.819 200 0653 e-4 0.000 000 0017 e-4
electron volt 1.602 176 565 e-19 0.000 000 035 e-19 J
electron volt-atomic mass unit relationship 1.073 544 150 e-9 0.000 000 024 e-9 u
electron volt-hartree relationship 3.674 932 379 e-2 0.000 000 081 e-2 E_h
electron volt-hertz relationship 2.417 989 348 e14 0.000 000 053 e14 Hz
electron volt-inverse meter relationship 8.065 544 29 e5 0.000 000 18 e5 m^-1
electron volt-joule relationship 1.602 176 565 e-19 0.000 000 035 e-19 J
electron volt-kelvin relationship 1.160 4519 e4 0.000 0011 e4 K
electron volt-kilogram relationship 1.782 661 845 e-36 0.000 000 039 e-36 kg
elementary charge 1.602 176 565 e-19 0.000 000 035 e-19 C
elementary charge over h 2.417 989 348 e14 0.000 000 053 e14 A J^-1
Faraday constant 96 485.3365 0.0021 C mol^-1
Faraday constant for conventional electric current 96 485.3321 0.0043 C_90 mol^-1
Fermi coupling constant 1.166 364 e-5 0.000 005 e-5 GeV^-2
fine-structure constant 7.297 352 5698 e-3 0.000 000 0024 e-3
first radiation constant 3.741 771 53 e-16 0.000 000 17 e-16 W m^2
first radiation constant for spectral radiance 1.191 042 869 e-16 0.000 000 053 e-16 W m^2 sr^-1
hartree-atomic mass unit relationship 2.921 262 3246 e-8 0.000 000 0021 e-8 u
hartree-electron volt relationship 27.211 385 05 0.000 000 60 eV
Hartree energy 4.359 744 34 e-18 0.000 000 19 e-18 J
Hartree energy in eV 27.211 385 05 0.000 000 60 eV
hartree-hertz relationship 6.579 683 920 729 e15 0.000 000 000 033 e15 Hz
hartree-inverse meter relationship 2.194 746 313 708 e7 0.000 000 000 011 e7 m^-1
hartree-joule relationship 4.359 744 34 e-18 0.000 000 19 e-18 J
hartree-kelvin relationship 3.157 7504 e5 0.000 0029 e5 K
hartree-kilogram relationship 4.850 869 79 e-35 0.000 000 21 e-35 kg
helion-electron mass ratio 5495.885 2754 0.000 0050
helion g factor -4.255 250 613 0.000 000 050
helion mag. mom. -1.074 617 486 e-26 0.000 000 027 e-26 J T^-1
helion mag. mom. to Bohr magneton ratio -1.158 740 958 e-3 0.000 000 014 e-3
helion mag. mom. to nuclear magneton ratio -2.127 625 306 0.000 000 025
helion mass 5.006 412 34 e-27 0.000 000 22 e-27 kg
helion mass energy equivalent 4.499 539 02 e-10 0.000 000 20 e-10 J
helion mass energy equivalent in MeV 2808.391 482 0.000 062 MeV
helion mass in u 3.014 932 2468 0.000 000 0025 u
helion molar mass 3.014 932 2468 e-3 0.000 000 0025 e-3 kg mol^-1
helion-proton mass ratio 2.993 152 6707 0.000 000 0025
hertz-atomic mass unit relationship 4.439 821 6689 e-24 0.000 000 0031 e-24 u
hertz-electron volt relationship 4.135 667 516 e-15 0.000 000 091 e-15 eV
hertz-hartree relationship 1.519 829 8460045e-16 0.000 000 0000076e-16 E_h
hertz-inverse meter relationship 3.335 640 951... e-9 (exact) m^-1
hertz-joule relationship 6.626 069 57 e-34 0.000 000 29 e-34 J
hertz-kelvin relationship 4.799 2434 e-11 0.000 0044 e-11 K
hertz-kilogram relationship 7.372 496 68 e-51 0.000 000 33 e-51 kg
inverse fine-structure constant 137.035 999 074 0.000 000 044
inverse meter-atomic mass unit relationship 1.331 025 051 20 e-15 0.000 000 000 94 e-15 u
inverse meter-electron volt relationship 1.239 841 930 e-6 0.000 000 027 e-6 eV
inverse meter-hartree relationship 4.556 335 252 755 e-8 0.000 000 000 023 e-8 E_h
inverse meter-hertz relationship 299 792 458 (exact) Hz
inverse meter-joule relationship 1.986 445 684 e-25 0.000 000 088 e-25 J
inverse meter-kelvin relationship 1.438 7770 e-2 0.000 0013 e-2 K
inverse meter-kilogram relationship 2.210 218 902 e-42 0.000 000 098 e-42 kg
inverse of conductance quantum 12 906.403 7217 0.000 0042 ohm
Josephson constant 483 597.870 e9 0.011 e9 Hz V^-1
joule-atomic mass unit relationship 6.700 535 85 e9 0.000 000 30 e9 u
joule-electron volt relationship 6.241 509 34 e18 0.000 000 14 e18 eV
joule-hartree relationship 2.293 712 48 e17 0.000 000 10 e17 E_h
joule-hertz relationship 1.509 190 311 e33 0.000 000 067 e33 Hz
joule-inverse meter relationship 5.034 117 01 e24 0.000 000 22 e24 m^-1
joule-kelvin relationship 7.242 9716 e22 0.000 0066 e22 K
joule-kilogram relationship 1.112 650 056... e-17 (exact) kg
kelvin-atomic mass unit relationship 9.251 0868 e-14 0.000 0084 e-14 u
kelvin-electron volt relationship 8.617 3324 e-5 0.000 0078 e-5 eV
kelvin-hartree relationship 3.166 8114 e-6 0.000 0029 e-6 E_h
kelvin-hertz relationship 2.083 6618 e10 0.000 0019 e10 Hz
kelvin-inverse meter relationship 69.503 476 0.000 063 m^-1
kelvin-joule relationship 1.380 6488 e-23 0.000 0013 e-23 J
kelvin-kilogram relationship 1.536 1790 e-40 0.000 0014 e-40 kg
kilogram-atomic mass unit relationship 6.022 141 29 e26 0.000 000 27 e26 u
kilogram-electron volt relationship 5.609 588 85 e35 0.000 000 12 e35 eV
kilogram-hartree relationship 2.061 485 968 e34 0.000 000 091 e34 E_h
kilogram-hertz relationship 1.356 392 608 e50 0.000 000 060 e50 Hz
kilogram-inverse meter relationship 4.524 438 73 e41 0.000 000 20 e41 m^-1
kilogram-joule relationship 8.987 551 787... e16 (exact) J
kilogram-kelvin relationship 6.509 6582 e39 0.000 0059 e39 K
lattice parameter of silicon 543.102 0504 e-12 0.000 0089 e-12 m
Loschmidt constant (273.15 K, 100 kPa) 2.651 6462 e25 0.000 0024 e25 m^-3
Loschmidt constant (273.15 K, 101.325 kPa) 2.686 7805 e25 0.000 0024 e25 m^-3
mag. constant 12.566 370 614... e-7 (exact) N A^-2
mag. flux quantum 2.067 833 758 e-15 0.000 000 046 e-15 Wb
molar gas constant 8.314 4621 0.000 0075 J mol^-1 K^-1
molar mass constant 1 e-3 (exact) kg mol^-1
molar mass of carbon-12 12 e-3 (exact) kg mol^-1
molar Planck constant 3.990 312 7176 e-10 0.000 000 0028 e-10 J s mol^-1
molar Planck constant times c 0.119 626 565 779 0.000 000 000 084 J m mol^-1
molar volume of ideal gas (273.15 K, 100 kPa) 22.710 953 e-3 0.000 021 e-3 m^3 mol^-1
molar volume of ideal gas (273.15 K, 101.325 kPa) 22.413 968 e-3 0.000 020 e-3 m^3 mol^-1
molar volume of silicon 12.058 833 01 e-6 0.000 000 80 e-6 m^3 mol^-1
Mo x unit 1.002 099 52 e-13 0.000 000 53 e-13 m
muon Compton wavelength 11.734 441 03 e-15 0.000 000 30 e-15 m
muon Compton wavelength over 2 pi 1.867 594 294 e-15 0.000 000 047 e-15 m
muon-electron mass ratio 206.768 2843 0.000 0052
muon g factor -2.002 331 8418 0.000 000 0013
muon mag. mom. -4.490 448 07 e-26 0.000 000 15 e-26 J T^-1
muon mag. mom. anomaly 1.165 920 91 e-3 0.000 000 63 e-3
muon mag. mom. to Bohr magneton ratio -4.841 970 44 e-3 0.000 000 12 e-3
muon mag. mom. to nuclear magneton ratio -8.890 596 97 0.000 000 22
muon mass 1.883 531 475 e-28 0.000 000 096 e-28 kg
muon mass energy equivalent 1.692 833 667 e-11 0.000 000 086 e-11 J
muon mass energy equivalent in MeV 105.658 3715 0.000 0035 MeV
muon mass in u 0.113 428 9267 0.000 000 0029 u
muon molar mass 0.113 428 9267 e-3 0.000 000 0029 e-3 kg mol^-1
muon-neutron mass ratio 0.112 454 5177 0.000 000 0028
muon-proton mag. mom. ratio -3.183 345 107 0.000 000 084
muon-proton mass ratio 0.112 609 5272 0.000 000 0028
muon-tau mass ratio 5.946 49 e-2 0.000 54 e-2
natural unit of action 1.054 571 726 e-34 0.000 000 047 e-34 J s
natural unit of action in eV s 6.582 119 28 e-16 0.000 000 15 e-16 eV s
natural unit of energy 8.187 105 06 e-14 0.000 000 36 e-14 J
natural unit of energy in MeV 0.510 998 928 0.000 000 011 MeV
natural unit of length 386.159 268 00 e-15 0.000 000 25 e-15 m
natural unit of mass 9.109 382 91 e-31 0.000 000 40 e-31 kg
natural unit of mom.um 2.730 924 29 e-22 0.000 000 12 e-22 kg m s^-1
natural unit of mom.um in MeV/c 0.510 998 928 0.000 000 011 MeV/c
natural unit of time 1.288 088 668 33 e-21 0.000 000 000 83 e-21 s
natural unit of velocity 299 792 458 (exact) m s^-1
neutron Compton wavelength 1.319 590 9068 e-15 0.000 000 0011 e-15 m
neutron Compton wavelength over 2 pi 0.210 019 415 68 e-15 0.000 000 000 17 e-15 m
neutron-electron mag. mom. ratio 1.040 668 82 e-3 0.000 000 25 e-3
neutron-electron mass ratio 1838.683 6605 0.000 0011
neutron g factor -3.826 085 45 0.000 000 90
neutron gyromag. ratio 1.832 471 79 e8 0.000 000 43 e8 s^-1 T^-1
neutron gyromag. ratio over 2 pi 29.164 6943 0.000 0069 MHz T^-1
neutron mag. mom. -0.966 236 47 e-26 0.000 000 23 e-26 J T^-1
neutron mag. mom. to Bohr magneton ratio -1.041 875 63 e-3 0.000 000 25 e-3
neutron mag. mom. to nuclear magneton ratio -1.913 042 72 0.000 000 45
neutron mass 1.674 927 351 e-27 0.000 000 074 e-27 kg
neutron mass energy equivalent 1.505 349 631 e-10 0.000 000 066 e-10 J
neutron mass energy equivalent in MeV 939.565 379 0.000 021 MeV
neutron mass in u 1.008 664 916 00 0.000 000 000 43 u
neutron molar mass 1.008 664 916 00 e-3 0.000 000 000 43 e-3 kg mol^-1
neutron-muon mass ratio 8.892 484 00 0.000 000 22
neutron-proton mag. mom. ratio -0.684 979 34 0.000 000 16
neutron-proton mass difference 2.305 573 92 e-30 0.000 000 76 e-30
neutron-proton mass difference energy equivalent 2.072 146 50 e-13 0.000 000 68 e-13
neutron-proton mass difference energy equivalent in MeV 1.293 332 17 0.000 000 42
neutron-proton mass difference in u 0.001 388 449 19 0.000 000 000 45
neutron-proton mass ratio 1.001 378 419 17 0.000 000 000 45
neutron-tau mass ratio 0.528 790 0.000 048
neutron to shielded proton mag. mom. ratio -0.684 996 94 0.000 000 16
Newtonian constant of gravitation 6.673 84 e-11 0.000 80 e-11 m^3 kg^-1 s^-2
Newtonian constant of gravitation over h-bar c 6.708 37 e-39 0.000 80 e-39 (GeV/c^2)^-2
nuclear magneton 5.050 783 53 e-27 0.000 000 11 e-27 J T^-1
nuclear magneton in eV/T 3.152 451 2605 e-8 0.000 000 0022 e-8 eV T^-1
nuclear magneton in inverse meters per tesla 2.542 623 527 e-2 0.000 000 056 e-2 m^-1 T^-1
nuclear magneton in K/T 3.658 2682 e-4 0.000 0033 e-4 K T^-1
nuclear magneton in MHz/T 7.622 593 57 0.000 000 17 MHz T^-1
Planck constant 6.626 069 57 e-34 0.000 000 29 e-34 J s
Planck constant in eV s 4.135 667 516 e-15 0.000 000 091 e-15 eV s
Planck constant over 2 pi 1.054 571 726 e-34 0.000 000 047 e-34 J s
Planck constant over 2 pi in eV s 6.582 119 28 e-16 0.000 000 15 e-16 eV s
Planck constant over 2 pi times c in MeV fm 197.326 9718 0.000 0044 MeV fm
Planck length 1.616 199 e-35 0.000 097 e-35 m
Planck mass 2.176 51 e-8 0.000 13 e-8 kg
Planck mass energy equivalent in GeV 1.220 932 e19 0.000 073 e19 GeV
Planck temperature 1.416 833 e32 0.000 085 e32 K
Planck time 5.391 06 e-44 0.000 32 e-44 s
proton charge to mass quotient 9.578 833 58 e7 0.000 000 21 e7 C kg^-1
proton Compton wavelength 1.321 409 856 23 e-15 0.000 000 000 94 e-15 m
proton Compton wavelength over 2 pi 0.210 308 910 47 e-15 0.000 000 000 15 e-15 m
proton-electron mass ratio 1836.152 672 45 0.000 000 75
proton g factor 5.585 694 713 0.000 000 046
proton gyromag. ratio 2.675 222 005 e8 0.000 000 063 e8 s^-1 T^-1
proton gyromag. ratio over 2 pi 42.577 4806 0.000 0010 MHz T^-1
proton mag. mom. 1.410 606 743 e-26 0.000 000 033 e-26 J T^-1
proton mag. mom. to Bohr magneton ratio 1.521 032 210 e-3 0.000 000 012 e-3
proton mag. mom. to nuclear magneton ratio 2.792 847 356 0.000 000 023
proton mag. shielding correction 25.694 e-6 0.014 e-6
proton mass 1.672 621 777 e-27 0.000 000 074 e-27 kg
proton mass energy equivalent 1.503 277 484 e-10 0.000 000 066 e-10 J
proton mass energy equivalent in MeV 938.272 046 0.000 021 MeV
proton mass in u 1.007 276 466 812 0.000 000 000 090 u
proton molar mass 1.007 276 466 812 e-3 0.000 000 000 090 e-3 kg mol^-1
proton-muon mass ratio 8.880 243 31 0.000 000 22
proton-neutron mag. mom. ratio -1.459 898 06 0.000 000 34
proton-neutron mass ratio 0.998 623 478 26 0.000 000 000 45
proton rms charge radius 0.8775 e-15 0.0051 e-15 m
proton-tau mass ratio 0.528 063 0.000 048
quantum of circulation 3.636 947 5520 e-4 0.000 000 0024 e-4 m^2 s^-1
quantum of circulation times 2 7.273 895 1040 e-4 0.000 000 0047 e-4 m^2 s^-1
Rydberg constant 10 973 731.568 539 0.000 055 m^-1
Rydberg constant times c in Hz 3.289 841 960 364 e15 0.000 000 000 017 e15 Hz
Rydberg constant times hc in eV 13.605 692 53 0.000 000 30 eV
Rydberg constant times hc in J 2.179 872 171 e-18 0.000 000 096 e-18 J
Sackur-Tetrode constant (1 K, 100 kPa) -1.151 7078 0.000 0023
Sackur-Tetrode constant (1 K, 101.325 kPa) -1.164 8708 0.000 0023
second radiation constant 1.438 7770 e-2 0.000 0013 e-2 m K
shielded helion gyromag. ratio 2.037 894 659 e8 0.000 000 051 e8 s^-1 T^-1
shielded helion gyromag. ratio over 2 pi 32.434 100 84 0.000 000 81 MHz T^-1
shielded helion mag. mom. -1.074 553 044 e-26 0.000 000 027 e-26 J T^-1
shielded helion mag. mom. to Bohr magneton ratio -1.158 671 471 e-3 0.000 000 014 e-3
shielded helion mag. mom. to nuclear magneton ratio -2.127 497 718 0.000 000 025
shielded helion to proton mag. mom. ratio -0.761 766 558 0.000 000 011
shielded helion to shielded proton mag. mom. ratio -0.761 786 1313 0.000 000 0033
shielded proton gyromag. ratio 2.675 153 268 e8 0.000 000 066 e8 s^-1 T^-1
shielded proton gyromag. ratio over 2 pi 42.576 3866 0.000 0010 MHz T^-1
shielded proton mag. mom. 1.410 570 499 e-26 0.000 000 035 e-26 J T^-1
shielded proton mag. mom. to Bohr magneton ratio 1.520 993 128 e-3 0.000 000 017 e-3
shielded proton mag. mom. to nuclear magneton ratio 2.792 775 598 0.000 000 030
speed of light in vacuum 299 792 458 (exact) m s^-1
standard acceleration of gravity 9.806 65 (exact) m s^-2
standard atmosphere 101 325 (exact) Pa
standard-state pressure 100 000 (exact) Pa
Stefan-Boltzmann constant 5.670 373 e-8 0.000 021 e-8 W m^-2 K^-4
tau Compton wavelength 0.697 787 e-15 0.000 063 e-15 m
tau Compton wavelength over 2 pi 0.111 056 e-15 0.000 010 e-15 m
tau-electron mass ratio 3477.15 0.31
tau mass 3.167 47 e-27 0.000 29 e-27 kg
tau mass energy equivalent 2.846 78 e-10 0.000 26 e-10 J
tau mass energy equivalent in MeV 1776.82 0.16 MeV
tau mass in u 1.907 49 0.000 17 u
tau molar mass 1.907 49 e-3 0.000 17 e-3 kg mol^-1
tau-muon mass ratio 16.8167 0.0015
tau-neutron mass ratio 1.891 11 0.000 17
tau-proton mass ratio 1.893 72 0.000 17
Thomson cross section 0.665 245 8734 e-28 0.000 000 0013 e-28 m^2
triton-electron mass ratio 5496.921 5267 0.000 0050
triton g factor 5.957 924 896 0.000 000 076
triton mag. mom. 1.504 609 447 e-26 0.000 000 038 e-26 J T^-1
triton mag. mom. to Bohr magneton ratio 1.622 393 657 e-3 0.000 000 021 e-3
triton mag. mom. to nuclear magneton ratio 2.978 962 448 0.000 000 038
triton mass 5.007 356 30 e-27 0.000 000 22 e-27 kg
triton mass energy equivalent 4.500 387 41 e-10 0.000 000 20 e-10 J
triton mass energy equivalent in MeV 2808.921 005 0.000 062 MeV
triton mass in u 3.015 500 7134 0.000 000 0025 u
triton molar mass 3.015 500 7134 e-3 0.000 000 0025 e-3 kg mol^-1
triton-proton mass ratio 2.993 717 0308 0.000 000 0025
unified atomic mass unit 1.660 538 921 e-27 0.000 000 073 e-27 kg
von Klitzing constant 25 812.807 4434 0.000 0084 ohm
weak mixing angle 0.2223 0.0021
Wien frequency displacement law constant 5.878 9254 e10 0.000 0053 e10 Hz K^-1
Wien wavelength displacement law constant 2.897 7721 e-3 0.000 0026 e-3 m K"""
# -----------------------------------------------------------------------------
physical_constants = {}
def parse_constants(d):
constants = {}
for line in d.split('\n'):
name = line[:55].rstrip()
val = line[55:77].replace(' ','').replace('...','')
val = float(val)
uncert = line[77:99].replace(' ','').replace('(exact)', '0')
uncert = float(uncert)
units = line[99:].rstrip()
constants[name] = (val, units, uncert)
return constants
_physical_constants_2002 = parse_constants(txt2002)
_physical_constants_2006 = parse_constants(txt2006)
_physical_constants_2010 = parse_constants(txt2010)
physical_constants.update(_physical_constants_2002)
physical_constants.update(_physical_constants_2006)
physical_constants.update(_physical_constants_2010)
_current_constants = _physical_constants_2010
_current_codata = "CODATA 2010"
# check obsolete values
_obsolete_constants = {}
for k in physical_constants:
if k not in _current_constants:
_obsolete_constants[k] = True
# generate some additional aliases
_aliases = {}
for k in _physical_constants_2002:
if 'magn.' in k:
_aliases[k] = k.replace('magn.', 'mag.')
for k in _physical_constants_2006:
if 'momentum' in k:
_aliases[k] = k.replace('momentum', 'mom.um')
class ConstantWarning(DeprecationWarning):
"""Accessing a constant no longer in current CODATA data set"""
pass
def _check_obsolete(key):
if key in _obsolete_constants and key not in _aliases:
warnings.warn("Constant '%s' is not in current %s data set" % (
key, _current_codata), ConstantWarning)
def value(key) :
"""
Value in physical_constants indexed by key
Parameters
----------
key : Python string or unicode
Key in dictionary `physical_constants`
Returns
-------
value : float
Value in `physical_constants` corresponding to `key`
See Also
--------
codata : Contains the description of `physical_constants`, which, as a
dictionary literal object, does not itself possess a docstring.
Examples
--------
>>> from scipy.constants import codata
>>> codata.value('elementary charge')
1.602176487e-019
"""
_check_obsolete(key)
return physical_constants[key][0]
def unit(key) :
"""
Unit in physical_constants indexed by key
Parameters
----------
key : Python string or unicode
Key in dictionary `physical_constants`
Returns
-------
unit : Python string
Unit in `physical_constants` corresponding to `key`
See Also
--------
codata : Contains the description of `physical_constants`, which, as a
dictionary literal object, does not itself possess a docstring.
Examples
--------
>>> from scipy.constants import codata
>>> codata.unit(u'proton mass')
'kg'
"""
_check_obsolete(key)
return physical_constants[key][1]
def precision(key) :
"""
Relative precision in physical_constants indexed by key
Parameters
----------
key : Python string or unicode
Key in dictionary `physical_constants`
Returns
-------
prec : float
Relative precision in `physical_constants` corresponding to `key`
See Also
--------
codata : Contains the description of `physical_constants`, which, as a
dictionary literal object, does not itself possess a docstring.
Examples
--------
>>> from scipy.constants import codata
>>> codata.precision(u'proton mass')
4.96226989798e-08
"""
_check_obsolete(key)
return physical_constants[key][2] / physical_constants[key][0]
def find(sub=None, disp=False):
"""
Return list of codata.physical_constant keys containing a given string.
Parameters
----------
sub : str, unicode
Sub-string to search keys for. By default, return all keys.
disp : bool
If True, print the keys that are found, and return None.
Otherwise, return the list of keys without printing anything.
Returns
-------
keys : list or None
If `disp` is False, the list of keys is returned.
Otherwise, None is returned.
See Also
--------
codata : Contains the description of `physical_constants`, which, as a
dictionary literal object, does not itself possess a docstring.
"""
if sub is None:
result = list(_current_constants.keys())
else:
result = [key for key in _current_constants \
if sub.lower() in key.lower()]
result.sort()
if disp:
for key in result:
print(key)
return
else:
return result
# Table is lacking some digits for exact values: calculate from definition
c = value('speed of light in vacuum')
mu0 = 4e-7*pi
epsilon0 = 1/(mu0*c*c)
exact_values = {
'mag. constant': (mu0, 'N A^-2', 0.0),
'electric constant': (epsilon0, 'F m^-1', 0.0),
'characteristic impedance of vacuum': (sqrt(mu0/epsilon0), 'ohm', 0.0),
'atomic unit of permittivity': (4*epsilon0*pi, 'F m^-1', 0.0), #is that the definition?
'joule-kilogram relationship': (1/(c*c), 'kg', 0.0),
'kilogram-joule relationship': (c*c, 'J', 0.0),
'hertz-inverse meter relationship': (1/c, 'm^-1', 0.0)
}
# sanity check
for key in exact_values:
val = _current_constants[key][0]
if abs(exact_values[key][0] - val) / val > 1e-9:
raise ValueError("Constants.codata: exact values too far off.")
physical_constants.update(exact_values)
# finally, insert aliases for values
for k, v in list(_aliases.items()):
if v in _current_constants:
physical_constants[k] = physical_constants[v]
else:
del _aliases[k]
|
tacid/ajenti
|
refs/heads/master
|
plugins/iptables/__init__.py
|
5
|
MODULES = ['main', 'backend']
DEPS = [
(['any'],
[
('app', 'iptables', 'iptables')
])
]
NAME = 'IP tables'
PLATFORMS = ['debian', 'arch', 'centos', 'fedora', 'gentoo']
DESCRIPTION = 'Netfilter rules control plugin'
VERSION = '0'
GENERATION = 1
AUTHOR = 'Ajenti team'
HOMEPAGE = 'http://ajenti.org'
|
nirmeshk/oh-mainline
|
refs/heads/master
|
vendor/packages/Django/django/contrib/auth/tests/__init__.py
|
101
|
from django.contrib.auth.tests.custom_user import *
from django.contrib.auth.tests.auth_backends import *
from django.contrib.auth.tests.basic import *
from django.contrib.auth.tests.context_processors import *
from django.contrib.auth.tests.decorators import *
from django.contrib.auth.tests.forms import *
from django.contrib.auth.tests.remote_user import *
from django.contrib.auth.tests.management import *
from django.contrib.auth.tests.models import *
from django.contrib.auth.tests.handlers import *
from django.contrib.auth.tests.hashers import *
from django.contrib.auth.tests.signals import *
from django.contrib.auth.tests.tokens import *
from django.contrib.auth.tests.views import *
# The password for the fixture data users is 'password'
from django.dispatch import receiver
from django.test.signals import setting_changed
@receiver(setting_changed)
def user_model_swapped(**kwargs):
if kwargs['setting'] == 'AUTH_USER_MODEL':
from django.db.models.manager import ensure_default_manager
from django.contrib.auth.models import User
# Reset User manager
setattr(User, 'objects', User._default_manager)
ensure_default_manager(User)
|
kwlzn/pants
|
refs/heads/master
|
src/python/pants/reporting/html_reporter.py
|
16
|
# coding=utf-8
# Copyright 2014 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import (absolute_import, division, generators, nested_scopes, print_function,
unicode_literals, with_statement)
import cgi
import os
import re
import time
import uuid
from collections import defaultdict, namedtuple
from textwrap import dedent
from six import string_types
from six.moves import range
from pants.base.build_environment import get_buildroot
from pants.base.mustache import MustacheRenderer
from pants.base.workunit import WorkUnit, WorkUnitLabel
from pants.reporting.linkify import linkify
from pants.reporting.report import Report
from pants.reporting.reporter import Reporter
from pants.reporting.reporting_utils import items_to_report_element
from pants.util.dirutil import safe_mkdir
class HtmlReporter(Reporter):
"""HTML reporting to files.
The files are intended to be served by the ReportingServer,
not accessed directly from the filesystem.
Pages are rendered using mustache templates, but individual fragments (appended to the report
of a currently running Pants run) are rendered using python string.format(), because it's
significantly faster, and profiles showed that the difference was non-trivial in short
pants runs.
TODO: The entire HTML reporting system, and the pants server that backs it, should be
rewritten to use some modern webapp framework, instead of this combination of server-side
ad-hoc templates and client-side spaghetti code.
"""
# HTML reporting settings.
# html_dir: Where the report files go.
# template_dir: Where to find mustache templates.
Settings = namedtuple('Settings', Reporter.Settings._fields + ('html_dir', 'template_dir'))
def __init__(self, run_tracker, settings):
super(HtmlReporter, self).__init__(run_tracker, settings)
# The main report, and associated tool outputs, go under this dir.
self._html_dir = settings.html_dir
# We render HTML from mustache templates.
self._renderer = MustacheRenderer(settings.template_dir, __name__)
# We serve files relative to the build root.
self._buildroot = get_buildroot()
self._html_path_base = os.path.relpath(self._html_dir, self._buildroot)
# We write the main report body to this file object.
self._report_file = None
# We redirect stdout, stderr etc. of tool invocations to these files.
self._output_files = defaultdict(dict) # workunit_id -> {path -> fileobj}.
self._linkify_memo = {}
# Map from filename to timestamp (ms since the epoch) of when we last overwrote that file.
# Useful for preventing too-frequent overwrites of, e.g., timing stats,
# which can noticeably slow down short pants runs with many workunits.
self._last_overwrite_time = {}
def report_path(self):
"""The path to the main report file."""
return os.path.join(self._html_dir, 'build.html')
def open(self):
"""Implementation of Reporter callback."""
safe_mkdir(os.path.dirname(self._html_dir))
self._report_file = open(self.report_path(), 'w')
def close(self):
"""Implementation of Reporter callback."""
self._report_file.close()
# Make sure everything's closed.
for files in self._output_files.values():
for f in files.values():
f.close()
# Creates a collapsible div in which to nest the reporting for a workunit.
# To add content to this div, append it to ${'#WORKUNITID-content'}.
# Note that definitive workunit timing is done in pants, but the client-side timer in the js
# below allows us to show a running timer in the browser while the workunit is executing.
_start_workunit_fmt_string = dedent("""
<div id="__{id}__content">
<div id="{id}">
<div class="toggle-header" id="{id}-header">
<div class="toggle-header-icon" onclick="pants.collapsible.toggle('{id}')">
<i id="{id}-icon" class="visibility-icon icon-large icon-caret-{icon_caret} hidden"></i>
</div>
<div class="toggle-header-text">
<div class="timeprefix">
<span class="timestamp">{workunit.start_time_string}</span>
<span class="timedelta">{workunit.start_delta_string}</span>
</div>
[<span id="{id}-header-text">{workunit.name}</span>]
<span class="timer" id="{id}-timer"></span>
<i class="icon-{icon}"></i>
<span class="aborted nodisplay" id="{id}-aborted">ctrl-c</span>
<span class="unaccounted-time nodisplay" id="{id}-unaccounted-time"></span>
</div>
<div id="{id}-spinner"><i class="icon-spinner icon-spin icon-large"></i></div>
</div>
<div class="toggle-content {display_class}" id="{id}-content"></div>
</div>
</div>
<script>
$(function() {{
if ('{parent_id}' !== '') {{
pants.append('#__{id}__content', '#{parent_id}-content');
$('#{parent_id}-icon').removeClass('hidden');
pants.timerManager.startTimer('{id}', '#{id}-timer', 1000 * {workunit.start_time});
}}
}});
</script>
""")
_start_tool_invocation_fmt_string = dedent("""
<div id="__{id}__tool_invocation">
{tool_invocation_details}
</div>
<script>
$(function() {{
pants.collapsible.hasContent('{id}');
pants.collapsible.hasContent('{id}-cmd');
pants.append('#__{id}__tool_invocation', '#{id}-content');
pants.appendString('{cmd}', '#{id}-cmd-content');
var startTailing = function() {{
pants.poller.startTailing('{id}_stdout', '{html_path_base}/{id}.stdout',
'#{id}-stdout-content', function() {{ pants.collapsible.hasContent('{id}-stdout'); }});
pants.poller.startTailing('{id}_stderr', '{html_path_base}/{id}.stderr',
'#{id}-stderr-content', function() {{ pants.collapsible.hasContent('{id}-stderr'); }});
}}
if ($('#{id}-content').is(':visible')) {{
startTailing();
}} else {{
$('#{id}-header').one('click', startTailing);
}}
}});
</script>
""")
def start_workunit(self, workunit):
"""Implementation of Reporter callback."""
# We use these properties of the workunit to decide how to render information about it.
is_bootstrap = workunit.has_label(WorkUnitLabel.BOOTSTRAP)
is_tool = workunit.has_label(WorkUnitLabel.TOOL)
is_multitool = workunit.has_label(WorkUnitLabel.MULTITOOL)
is_test = workunit.has_label(WorkUnitLabel.TEST)
initially_open = is_test or not (is_bootstrap or is_tool or is_multitool)
# Render the workunit's div.
s = self._start_workunit_fmt_string.format(
indent=len(workunit.ancestors()) * 10,
id=workunit.id,
parent_id=workunit.parent.id if workunit.parent else '',
workunit=workunit,
icon_caret='down' if initially_open else 'right',
display_class='' if initially_open else 'nodisplay',
icon='cog' if is_tool else 'cogs' if is_multitool else 'none'
)
self._emit(s)
if is_tool:
tool_invocation_details = '\n'.join([
self._render_tool_detail(workunit=workunit, title='cmd', class_prefix='cmd'),
# Have test framework stdout open by default, but not that of other tools.
# This is an arbitrary choice, but one that turns out to be useful to users in practice.
self._render_tool_detail(workunit=workunit, title='stdout', initially_open=is_test),
self._render_tool_detail(workunit=workunit, title='stderr'),
])
cmd = workunit.cmd or ''
linkified_cmd = linkify(self._buildroot, cmd.replace('$', '\\\\$'), self._linkify_memo)
s = self._start_tool_invocation_fmt_string.format(
tool_invocation_details=tool_invocation_details,
html_path_base=self._html_path_base,
id=workunit.id,
cmd=linkified_cmd
)
self._emit(s)
# CSS classes from pants.css that we use to style the header text to reflect the outcome.
_outcome_css_classes = ['aborted', 'failure', 'warning', 'success', 'unknown']
_end_tool_invocation_fmt_string = dedent("""
<script>
$('#{id}-header-text').addClass('{status}'); $('#{id}-spinner').hide();
pants.poller.stopTailing('{id}_stdout');
pants.poller.stopTailing('{id}_stderr');
</script>
""")
_end_workunit_fmt_string = dedent("""
<script>
$('#{id}-header-text').addClass('{status}');
$('#{id}-spinner').hide();
$('#{id}-timer').html('{timing}s');
if ({aborted}) {{
$('#{id}-aborted').show();
}} else if ('{unaccounted_time}' !== '') {{
$('#{id}-unaccounted-time').html('(Unaccounted: {unaccounted_time}s)').show();
}}
$(function(){{
pants.timerManager.stopTimer('{id}');
}});
</script>
""")
def end_workunit(self, workunit):
"""Implementation of Reporter callback."""
duration = workunit.duration()
timing = '{:.3f}'.format(duration)
unaccounted_time = ''
# Background work may be idle a lot, no point in reporting that as unaccounted.
if self.is_under_main_root(workunit):
unaccounted_time_secs = workunit.unaccounted_time()
if unaccounted_time_secs >= 1 and unaccounted_time_secs > 0.05 * duration:
unaccounted_time = '{:.3f}'.format(unaccounted_time_secs)
status = HtmlReporter._outcome_css_classes[workunit.outcome()]
if workunit.has_label(WorkUnitLabel.TOOL):
self._emit(self._end_tool_invocation_fmt_string.format(
id=workunit.id,
status=status
))
self._emit(self._end_workunit_fmt_string.format(
id=workunit.id,
status=status,
timing=timing,
unaccounted_time=unaccounted_time,
aborted='true' if workunit.outcome() == WorkUnit.ABORTED else 'false'
))
# If we're a root workunit, force an overwrite, as we may be the last ever write in this run.
force_overwrite = workunit.parent is None
# Update the timings.
def render_timings(timings):
timings_dict = timings.get_all()
for item in timings_dict:
item['timing_string'] = '{:.3f}'.format(item['timing'])
res = ['<table>']
for item in timings_dict:
res.append("""<tr><td class="timing-string">{timing:.3f}</td>
<td class="timing-label">{label}""".format(
timing=item['timing'],
label=item['label']
))
if item['is_tool']:
res.append("""<i class="icon-cog"></i>""")
res.append("""</td></tr>""")
res.append('<table>')
return ''.join(res)
self._overwrite('cumulative_timings',
lambda: render_timings(self.run_tracker.cumulative_timings),
force=force_overwrite)
self._overwrite('self_timings',
lambda: render_timings(self.run_tracker.self_timings),
force=force_overwrite)
# Update the artifact cache stats.
def render_cache_stats(artifact_cache_stats):
def fix_detail_id(e, _id):
return e if isinstance(e, string_types) else e + (_id, )
msg_elements = []
for cache_name, stat in artifact_cache_stats.stats_per_cache.items():
# TODO consider display causes for hit/miss targets
hit_targets = [tgt for tgt, cause in stat.hit_targets]
miss_targets = [tgt for tgt, cause in stat.miss_targets]
msg_elements.extend([
cache_name + ' artifact cache: ',
# Explicitly set the detail ids, so their displayed/hidden state survives a refresh.
fix_detail_id(items_to_report_element(hit_targets, 'hit'), 'cache-hit-details'),
', ',
fix_detail_id(items_to_report_element(miss_targets, 'miss'), 'cache-miss-details'),
'.'
])
if not msg_elements:
msg_elements = ['No artifact cache use.']
return self._render_message(*msg_elements)
self._overwrite('artifact_cache_stats',
lambda: render_cache_stats(self.run_tracker.artifact_cache_stats),
force=force_overwrite)
for f in self._output_files[workunit.id].values():
f.close()
def handle_output(self, workunit, label, s):
"""Implementation of Reporter callback."""
if os.path.exists(self._html_dir): # Make sure we're not immediately after a clean-all.
path = os.path.join(self._html_dir, '{}.{}'.format(workunit.id, label))
output_files = self._output_files[workunit.id]
if path not in output_files:
f = open(path, 'w')
output_files[path] = f
else:
f = output_files[path]
f.write(self._htmlify_text(s).encode('utf-8'))
# We must flush in the same thread as the write.
f.flush()
_log_level_css_map = {
Report.FATAL: 'fatal',
Report.ERROR: 'error',
Report.WARN: 'warn',
Report.INFO: 'info',
Report.DEBUG: 'debug'
}
_log_fmt_string = dedent("""
<div id="__{content_id}"><span class="{css_class}">{message}</span></div>
<script>
$(function(){{
pants.append('#__{content_id}', '#{workunit_id}-content');
}});
</script>
""")
def do_handle_log(self, workunit, level, *msg_elements):
"""Implementation of Reporter callback."""
message = self._render_message(*msg_elements)
s = self._log_fmt_string.format(content_id=uuid.uuid4(),
workunit_id=workunit.id,
css_class=HtmlReporter._log_level_css_map[level],
message=message)
# Emit that javascript to the main report body.
self._emit(s)
_detail_a_fmt_string = dedent("""
<a href="#" onclick="$('.{detail_class}').not('#{detail_id}').hide();
$('#{detail_id}').toggle(); return false;">{text}</a>
""")
_detail_div_fmt_string = dedent("""
<div id="{detail_id}" class="{detail_class} {detail_visibility_class}">{detail}</div>
""")
def _render_message(self, *msg_elements):
# Identifies all details in this message, so that opening one can close all the others.
detail_class = str(uuid.uuid4())
html_fragments = ['<div>']
detail_divs = []
for element in msg_elements:
# Each element can be a message or a (message, detail) pair, as received by handle_log().
#
# However, as an internal implementation detail, we also allow an element to be a tuple
# (message, detail, detail_initially_visible[, detail_id])
#
# - If the detail exists, clicking on the text will toggle display of the detail and close
# all other details in this message.
# - If detail_initially_visible is True, the detail will be displayed by default.
#
# We allow detail_id to be explicitly specified, so that the open/closed state can be
# preserved through refreshes. For example, when looking at the artifact cache stats,
# if "hits" are open and "misses" are closed, we want to remember that even after
# the cache stats are updated and the message re-rendered.
if isinstance(element, string_types):
element = [element]
# Map assumes None for missing values, so this will pick the default for those.
(text, detail, detail_id, detail_initially_visible) = \
map(lambda x, y: x or y, element, ('', None, None, False))
htmlified_text = self._htmlify_text(text)
if detail is None:
html_fragments.append(htmlified_text)
else:
detail_id = detail_id or str(uuid.uuid4())
detail_visibility_class = '' if detail_initially_visible else 'nodisplay'
html_fragments.append(self._detail_a_fmt_string.format(
text=htmlified_text, detail_id=detail_id, detail_class=detail_class))
detail_divs.append(self._detail_div_fmt_string.format(
detail_id=detail_id, detail=detail, detail_class=detail_class,
detail_visibility_class=detail_visibility_class
))
html_fragments.extend(detail_divs)
html_fragments.append('</div>')
return ''.join(html_fragments)
_tool_detail_fmt_string = dedent("""
<div class="{class_prefix}" id="{id}">
<div class="{class_prefix}-header toggle-header" id="{id}-header">
<div class="{class_prefix}-header-icon toggle-header-icon"
onclick="pants.collapsible.toggle('{id}')">
<i id="{id}-icon" class="visibility-icon icon-large icon-caret-{icon_caret} hidden"></i>
</div>
<div class="{class_prefix}-header-text toggle-header-text">
[<span id="{id}-header-text">{title}</span>]
</div>
</div>
<div class="{class_prefix}-content toggle-content {display_class}" id="{id}-content"></div>
</div>
""")
def _render_tool_detail(self, workunit, title, class_prefix='greyed', initially_open=False):
return self._tool_detail_fmt_string.format(
class_prefix=class_prefix,
id='{}-{}'.format(workunit.id, title),
icon_caret='down' if initially_open else 'right',
display_class='' if initially_open else 'nodisplay',
title=title,
)
def _emit(self, s):
"""Append content to the main report file."""
if os.path.exists(self._html_dir): # Make sure we're not immediately after a clean-all.
self._report_file.write(s)
self._report_file.flush() # We must flush in the same thread as the write.
def _overwrite(self, filename, func, force=False):
"""Overwrite a file with the specified contents.
Write times are tracked, too-frequent overwrites are skipped, for performance reasons.
:param filename: The path under the html dir to write to.
:param func: A no-arg function that returns the contents to write.
:param force: Whether to force a write now, regardless of the last overwrite time.
"""
now = int(time.time() * 1000)
last_overwrite_time = self._last_overwrite_time.get(filename) or now
# Overwrite only once per second.
if (now - last_overwrite_time >= 1000) or force:
if os.path.exists(self._html_dir): # Make sure we're not immediately after a clean-all.
with open(os.path.join(self._html_dir, filename), 'w') as f:
f.write(func())
self._last_overwrite_time[filename] = now
def _htmlify_text(self, s):
"""Make text HTML-friendly."""
colored = self._handle_ansi_color_codes(cgi.escape(s.decode('utf-8', 'replace')))
return linkify(self._buildroot, colored, self._linkify_memo).replace('\n', '</br>')
_ANSI_COLOR_CODE_RE = re.compile(r'\033\[((?:\d|;)*)m')
def _handle_ansi_color_codes(self, s):
"""Replace ansi escape sequences with spans of appropriately named css classes."""
parts = HtmlReporter._ANSI_COLOR_CODE_RE.split(s)
ret = []
span_depth = 0
# Note that len(parts) is always odd: text, code, text, code, ..., text.
for i in range(0, len(parts), 2):
ret.append(parts[i])
if i + 1 < len(parts):
for code in parts[i + 1].split(';'):
if code == 0: # Reset.
while span_depth > 0:
ret.append('</span>')
span_depth -= 1
else:
ret.append('<span class="ansi-{}">'.format(code))
span_depth += 1
while span_depth > 0:
ret.append('</span>')
span_depth -= 1
return ''.join(ret)
|
srilatha44/threepress
|
refs/heads/master
|
bookworm/ez_setup.py
|
358
|
#!python
"""Bootstrap setuptools installation
If you want to use setuptools in your package's setup.py, just include this
file in the same directory with it, and add this to the top of your setup.py::
from ez_setup import use_setuptools
use_setuptools()
If you want to require a specific version of setuptools, set a download
mirror, or use an alternate download directory, you can do so by supplying
the appropriate options to ``use_setuptools()``.
This file can also be run as a script to install or upgrade setuptools.
"""
import sys
DEFAULT_VERSION = "0.6c9"
DEFAULT_URL = "http://pypi.python.org/packages/%s/s/setuptools/" % sys.version[:3]
md5_data = {
'setuptools-0.6b1-py2.3.egg': '8822caf901250d848b996b7f25c6e6ca',
'setuptools-0.6b1-py2.4.egg': 'b79a8a403e4502fbb85ee3f1941735cb',
'setuptools-0.6b2-py2.3.egg': '5657759d8a6d8fc44070a9d07272d99b',
'setuptools-0.6b2-py2.4.egg': '4996a8d169d2be661fa32a6e52e4f82a',
'setuptools-0.6b3-py2.3.egg': 'bb31c0fc7399a63579975cad9f5a0618',
'setuptools-0.6b3-py2.4.egg': '38a8c6b3d6ecd22247f179f7da669fac',
'setuptools-0.6b4-py2.3.egg': '62045a24ed4e1ebc77fe039aa4e6f7e5',
'setuptools-0.6b4-py2.4.egg': '4cb2a185d228dacffb2d17f103b3b1c4',
'setuptools-0.6c1-py2.3.egg': 'b3f2b5539d65cb7f74ad79127f1a908c',
'setuptools-0.6c1-py2.4.egg': 'b45adeda0667d2d2ffe14009364f2a4b',
'setuptools-0.6c2-py2.3.egg': 'f0064bf6aa2b7d0f3ba0b43f20817c27',
'setuptools-0.6c2-py2.4.egg': '616192eec35f47e8ea16cd6a122b7277',
'setuptools-0.6c3-py2.3.egg': 'f181fa125dfe85a259c9cd6f1d7b78fa',
'setuptools-0.6c3-py2.4.egg': 'e0ed74682c998bfb73bf803a50e7b71e',
'setuptools-0.6c3-py2.5.egg': 'abef16fdd61955514841c7c6bd98965e',
'setuptools-0.6c4-py2.3.egg': 'b0b9131acab32022bfac7f44c5d7971f',
'setuptools-0.6c4-py2.4.egg': '2a1f9656d4fbf3c97bf946c0a124e6e2',
'setuptools-0.6c4-py2.5.egg': '8f5a052e32cdb9c72bcf4b5526f28afc',
'setuptools-0.6c5-py2.3.egg': 'ee9fd80965da04f2f3e6b3576e9d8167',
'setuptools-0.6c5-py2.4.egg': 'afe2adf1c01701ee841761f5bcd8aa64',
'setuptools-0.6c5-py2.5.egg': 'a8d3f61494ccaa8714dfed37bccd3d5d',
'setuptools-0.6c6-py2.3.egg': '35686b78116a668847237b69d549ec20',
'setuptools-0.6c6-py2.4.egg': '3c56af57be3225019260a644430065ab',
'setuptools-0.6c6-py2.5.egg': 'b2f8a7520709a5b34f80946de5f02f53',
'setuptools-0.6c7-py2.3.egg': '209fdf9adc3a615e5115b725658e13e2',
'setuptools-0.6c7-py2.4.egg': '5a8f954807d46a0fb67cf1f26c55a82e',
'setuptools-0.6c7-py2.5.egg': '45d2ad28f9750e7434111fde831e8372',
'setuptools-0.6c8-py2.3.egg': '50759d29b349db8cfd807ba8303f1902',
'setuptools-0.6c8-py2.4.egg': 'cba38d74f7d483c06e9daa6070cce6de',
'setuptools-0.6c8-py2.5.egg': '1721747ee329dc150590a58b3e1ac95b',
'setuptools-0.6c9-py2.3.egg': 'a83c4020414807b496e4cfbe08507c03',
'setuptools-0.6c9-py2.4.egg': '260a2be2e5388d66bdaee06abec6342a',
'setuptools-0.6c9-py2.5.egg': 'fe67c3e5a17b12c0e7c541b7ea43a8e6',
'setuptools-0.6c9-py2.6.egg': 'ca37b1ff16fa2ede6e19383e7b59245a',
}
import sys, os
try: from hashlib import md5
except ImportError: from md5 import md5
def _validate_md5(egg_name, data):
if egg_name in md5_data:
digest = md5(data).hexdigest()
if digest != md5_data[egg_name]:
print >>sys.stderr, (
"md5 validation of %s failed! (Possible download problem?)"
% egg_name
)
sys.exit(2)
return data
def use_setuptools(
version=DEFAULT_VERSION, download_base=DEFAULT_URL, to_dir=os.curdir,
download_delay=15
):
"""Automatically find/download setuptools and make it available on sys.path
`version` should be a valid setuptools version number that is available
as an egg for download under the `download_base` URL (which should end with
a '/'). `to_dir` is the directory where setuptools will be downloaded, if
it is not already available. If `download_delay` is specified, it should
be the number of seconds that will be paused before initiating a download,
should one be required. If an older version of setuptools is installed,
this routine will print a message to ``sys.stderr`` and raise SystemExit in
an attempt to abort the calling script.
"""
was_imported = 'pkg_resources' in sys.modules or 'setuptools' in sys.modules
def do_download():
egg = download_setuptools(version, download_base, to_dir, download_delay)
sys.path.insert(0, egg)
import setuptools; setuptools.bootstrap_install_from = egg
try:
import pkg_resources
except ImportError:
return do_download()
try:
pkg_resources.require("setuptools>="+version); return
except pkg_resources.VersionConflict, e:
if was_imported:
print >>sys.stderr, (
"The required version of setuptools (>=%s) is not available, and\n"
"can't be installed while this script is running. Please install\n"
" a more recent version first, using 'easy_install -U setuptools'."
"\n\n(Currently using %r)"
) % (version, e.args[0])
sys.exit(2)
else:
del pkg_resources, sys.modules['pkg_resources'] # reload ok
return do_download()
except pkg_resources.DistributionNotFound:
return do_download()
def download_setuptools(
version=DEFAULT_VERSION, download_base=DEFAULT_URL, to_dir=os.curdir,
delay = 15
):
"""Download setuptools from a specified location and return its filename
`version` should be a valid setuptools version number that is available
as an egg for download under the `download_base` URL (which should end
with a '/'). `to_dir` is the directory where the egg will be downloaded.
`delay` is the number of seconds to pause before an actual download attempt.
"""
import urllib2, shutil
egg_name = "setuptools-%s-py%s.egg" % (version,sys.version[:3])
url = download_base + egg_name
saveto = os.path.join(to_dir, egg_name)
src = dst = None
if not os.path.exists(saveto): # Avoid repeated downloads
try:
from distutils import log
if delay:
log.warn("""
---------------------------------------------------------------------------
This script requires setuptools version %s to run (even to display
help). I will attempt to download it for you (from
%s), but
you may need to enable firewall access for this script first.
I will start the download in %d seconds.
(Note: if this machine does not have network access, please obtain the file
%s
and place it in this directory before rerunning this script.)
---------------------------------------------------------------------------""",
version, download_base, delay, url
); from time import sleep; sleep(delay)
log.warn("Downloading %s", url)
src = urllib2.urlopen(url)
# Read/write all in one block, so we don't create a corrupt file
# if the download is interrupted.
data = _validate_md5(egg_name, src.read())
dst = open(saveto,"wb"); dst.write(data)
finally:
if src: src.close()
if dst: dst.close()
return os.path.realpath(saveto)
def main(argv, version=DEFAULT_VERSION):
"""Install or upgrade setuptools and EasyInstall"""
try:
import setuptools
except ImportError:
egg = None
try:
egg = download_setuptools(version, delay=0)
sys.path.insert(0,egg)
from setuptools.command.easy_install import main
return main(list(argv)+[egg]) # we're done here
finally:
if egg and os.path.exists(egg):
os.unlink(egg)
else:
if setuptools.__version__ == '0.0.1':
print >>sys.stderr, (
"You have an obsolete version of setuptools installed. Please\n"
"remove it from your system entirely before rerunning this script."
)
sys.exit(2)
req = "setuptools>="+version
import pkg_resources
try:
pkg_resources.require(req)
except pkg_resources.VersionConflict:
try:
from setuptools.command.easy_install import main
except ImportError:
from easy_install import main
main(list(argv)+[download_setuptools(delay=0)])
sys.exit(0) # try to force an exit
else:
if argv:
from setuptools.command.easy_install import main
main(argv)
else:
print "Setuptools version",version,"or greater has been installed."
print '(Run "ez_setup.py -U setuptools" to reinstall or upgrade.)'
def update_md5(filenames):
"""Update our built-in md5 registry"""
import re
for name in filenames:
base = os.path.basename(name)
f = open(name,'rb')
md5_data[base] = md5(f.read()).hexdigest()
f.close()
data = [" %r: %r,\n" % it for it in md5_data.items()]
data.sort()
repl = "".join(data)
import inspect
srcfile = inspect.getsourcefile(sys.modules[__name__])
f = open(srcfile, 'rb'); src = f.read(); f.close()
match = re.search("\nmd5_data = {\n([^}]+)}", src)
if not match:
print >>sys.stderr, "Internal error!"
sys.exit(2)
src = src[:match.start(1)] + repl + src[match.end(1):]
f = open(srcfile,'w')
f.write(src)
f.close()
if __name__=='__main__':
if len(sys.argv)>2 and sys.argv[1]=='--md5update':
update_md5(sys.argv[2:])
else:
main(sys.argv[1:])
|
djeo94/CouchPotatoServer
|
refs/heads/master
|
libs/tornado/wsgi.py
|
73
|
#!/usr/bin/env python
#
# Copyright 2009 Facebook
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""WSGI support for the Tornado web framework.
WSGI is the Python standard for web servers, and allows for interoperability
between Tornado and other Python web frameworks and servers. This module
provides WSGI support in two ways:
* `WSGIAdapter` converts a `tornado.web.Application` to the WSGI application
interface. This is useful for running a Tornado app on another
HTTP server, such as Google App Engine. See the `WSGIAdapter` class
documentation for limitations that apply.
* `WSGIContainer` lets you run other WSGI applications and frameworks on the
Tornado HTTP server. For example, with this class you can mix Django
and Tornado handlers in a single server.
"""
from __future__ import absolute_import, division, print_function, with_statement
import sys
from io import BytesIO
import tornado
from tornado.concurrent import Future
from tornado import escape
from tornado import httputil
from tornado.log import access_log
from tornado import web
from tornado.escape import native_str
from tornado.util import unicode_type
try:
import urllib.parse as urllib_parse # py3
except ImportError:
import urllib as urllib_parse
# PEP 3333 specifies that WSGI on python 3 generally deals with byte strings
# that are smuggled inside objects of type unicode (via the latin1 encoding).
# These functions are like those in the tornado.escape module, but defined
# here to minimize the temptation to use them in non-wsgi contexts.
if str is unicode_type:
def to_wsgi_str(s):
assert isinstance(s, bytes)
return s.decode('latin1')
def from_wsgi_str(s):
assert isinstance(s, str)
return s.encode('latin1')
else:
def to_wsgi_str(s):
assert isinstance(s, bytes)
return s
def from_wsgi_str(s):
assert isinstance(s, str)
return s
class WSGIApplication(web.Application):
"""A WSGI equivalent of `tornado.web.Application`.
.. deprecated:: 4.0
Use a regular `.Application` and wrap it in `WSGIAdapter` instead.
"""
def __call__(self, environ, start_response):
return WSGIAdapter(self)(environ, start_response)
# WSGI has no facilities for flow control, so just return an already-done
# Future when the interface requires it.
_dummy_future = Future()
_dummy_future.set_result(None)
class _WSGIConnection(httputil.HTTPConnection):
def __init__(self, method, start_response, context):
self.method = method
self.start_response = start_response
self.context = context
self._write_buffer = []
self._finished = False
self._expected_content_remaining = None
self._error = None
def set_close_callback(self, callback):
# WSGI has no facility for detecting a closed connection mid-request,
# so we can simply ignore the callback.
pass
def write_headers(self, start_line, headers, chunk=None, callback=None):
if self.method == 'HEAD':
self._expected_content_remaining = 0
elif 'Content-Length' in headers:
self._expected_content_remaining = int(headers['Content-Length'])
else:
self._expected_content_remaining = None
self.start_response(
'%s %s' % (start_line.code, start_line.reason),
[(native_str(k), native_str(v)) for (k, v) in headers.get_all()])
if chunk is not None:
self.write(chunk, callback)
elif callback is not None:
callback()
return _dummy_future
def write(self, chunk, callback=None):
if self._expected_content_remaining is not None:
self._expected_content_remaining -= len(chunk)
if self._expected_content_remaining < 0:
self._error = httputil.HTTPOutputError(
"Tried to write more data than Content-Length")
raise self._error
self._write_buffer.append(chunk)
if callback is not None:
callback()
return _dummy_future
def finish(self):
if (self._expected_content_remaining is not None and
self._expected_content_remaining != 0):
self._error = httputil.HTTPOutputError(
"Tried to write %d bytes less than Content-Length" %
self._expected_content_remaining)
raise self._error
self._finished = True
class _WSGIRequestContext(object):
def __init__(self, remote_ip, protocol):
self.remote_ip = remote_ip
self.protocol = protocol
def __str__(self):
return self.remote_ip
class WSGIAdapter(object):
"""Converts a `tornado.web.Application` instance into a WSGI application.
Example usage::
import tornado.web
import tornado.wsgi
import wsgiref.simple_server
class MainHandler(tornado.web.RequestHandler):
def get(self):
self.write("Hello, world")
if __name__ == "__main__":
application = tornado.web.Application([
(r"/", MainHandler),
])
wsgi_app = tornado.wsgi.WSGIAdapter(application)
server = wsgiref.simple_server.make_server('', 8888, wsgi_app)
server.serve_forever()
See the `appengine demo
<https://github.com/tornadoweb/tornado/tree/stable/demos/appengine>`_
for an example of using this module to run a Tornado app on Google
App Engine.
In WSGI mode asynchronous methods are not supported. This means
that it is not possible to use `.AsyncHTTPClient`, or the
`tornado.auth` or `tornado.websocket` modules.
.. versionadded:: 4.0
"""
def __init__(self, application):
if isinstance(application, WSGIApplication):
self.application = lambda request: web.Application.__call__(
application, request)
else:
self.application = application
def __call__(self, environ, start_response):
method = environ["REQUEST_METHOD"]
uri = urllib_parse.quote(from_wsgi_str(environ.get("SCRIPT_NAME", "")))
uri += urllib_parse.quote(from_wsgi_str(environ.get("PATH_INFO", "")))
if environ.get("QUERY_STRING"):
uri += "?" + environ["QUERY_STRING"]
headers = httputil.HTTPHeaders()
if environ.get("CONTENT_TYPE"):
headers["Content-Type"] = environ["CONTENT_TYPE"]
if environ.get("CONTENT_LENGTH"):
headers["Content-Length"] = environ["CONTENT_LENGTH"]
for key in environ:
if key.startswith("HTTP_"):
headers[key[5:].replace("_", "-")] = environ[key]
if headers.get("Content-Length"):
body = environ["wsgi.input"].read(
int(headers["Content-Length"]))
else:
body = b""
protocol = environ["wsgi.url_scheme"]
remote_ip = environ.get("REMOTE_ADDR", "")
if environ.get("HTTP_HOST"):
host = environ["HTTP_HOST"]
else:
host = environ["SERVER_NAME"]
connection = _WSGIConnection(method, start_response,
_WSGIRequestContext(remote_ip, protocol))
request = httputil.HTTPServerRequest(
method, uri, "HTTP/1.1", headers=headers, body=body,
host=host, connection=connection)
request._parse_body()
self.application(request)
if connection._error:
raise connection._error
if not connection._finished:
raise Exception("request did not finish synchronously")
return connection._write_buffer
class WSGIContainer(object):
r"""Makes a WSGI-compatible function runnable on Tornado's HTTP server.
.. warning::
WSGI is a *synchronous* interface, while Tornado's concurrency model
is based on single-threaded asynchronous execution. This means that
running a WSGI app with Tornado's `WSGIContainer` is *less scalable*
than running the same app in a multi-threaded WSGI server like
``gunicorn`` or ``uwsgi``. Use `WSGIContainer` only when there are
benefits to combining Tornado and WSGI in the same process that
outweigh the reduced scalability.
Wrap a WSGI function in a `WSGIContainer` and pass it to `.HTTPServer` to
run it. For example::
def simple_app(environ, start_response):
status = "200 OK"
response_headers = [("Content-type", "text/plain")]
start_response(status, response_headers)
return ["Hello world!\n"]
container = tornado.wsgi.WSGIContainer(simple_app)
http_server = tornado.httpserver.HTTPServer(container)
http_server.listen(8888)
tornado.ioloop.IOLoop.instance().start()
This class is intended to let other frameworks (Django, web.py, etc)
run on the Tornado HTTP server and I/O loop.
The `tornado.web.FallbackHandler` class is often useful for mixing
Tornado and WSGI apps in the same server. See
https://github.com/bdarnell/django-tornado-demo for a complete example.
"""
def __init__(self, wsgi_application):
self.wsgi_application = wsgi_application
def __call__(self, request):
data = {}
response = []
def start_response(status, response_headers, exc_info=None):
data["status"] = status
data["headers"] = response_headers
return response.append
app_response = self.wsgi_application(
WSGIContainer.environ(request), start_response)
try:
response.extend(app_response)
body = b"".join(response)
finally:
if hasattr(app_response, "close"):
app_response.close()
if not data:
raise Exception("WSGI app did not call start_response")
status_code = int(data["status"].split()[0])
headers = data["headers"]
header_set = set(k.lower() for (k, v) in headers)
body = escape.utf8(body)
if status_code != 304:
if "content-length" not in header_set:
headers.append(("Content-Length", str(len(body))))
if "content-type" not in header_set:
headers.append(("Content-Type", "text/html; charset=UTF-8"))
if "server" not in header_set:
headers.append(("Server", "TornadoServer/%s" % tornado.version))
parts = [escape.utf8("HTTP/1.1 " + data["status"] + "\r\n")]
for key, value in headers:
parts.append(escape.utf8(key) + b": " + escape.utf8(value) + b"\r\n")
parts.append(b"\r\n")
parts.append(body)
request.write(b"".join(parts))
request.finish()
self._log(status_code, request)
@staticmethod
def environ(request):
"""Converts a `tornado.httputil.HTTPServerRequest` to a WSGI environment.
"""
hostport = request.host.split(":")
if len(hostport) == 2:
host = hostport[0]
port = int(hostport[1])
else:
host = request.host
port = 443 if request.protocol == "https" else 80
environ = {
"REQUEST_METHOD": request.method,
"SCRIPT_NAME": "",
"PATH_INFO": to_wsgi_str(escape.url_unescape(
request.path, encoding=None, plus=False)),
"QUERY_STRING": request.query,
"REMOTE_ADDR": request.remote_ip,
"SERVER_NAME": host,
"SERVER_PORT": str(port),
"SERVER_PROTOCOL": request.version,
"wsgi.version": (1, 0),
"wsgi.url_scheme": request.protocol,
"wsgi.input": BytesIO(escape.utf8(request.body)),
"wsgi.errors": sys.stderr,
"wsgi.multithread": False,
"wsgi.multiprocess": True,
"wsgi.run_once": False,
}
if "Content-Type" in request.headers:
environ["CONTENT_TYPE"] = request.headers.pop("Content-Type")
if "Content-Length" in request.headers:
environ["CONTENT_LENGTH"] = request.headers.pop("Content-Length")
for key, value in request.headers.items():
environ["HTTP_" + key.replace("-", "_").upper()] = value
return environ
def _log(self, status_code, request):
if status_code < 400:
log_method = access_log.info
elif status_code < 500:
log_method = access_log.warning
else:
log_method = access_log.error
request_time = 1000.0 * request.request_time()
summary = request.method + " " + request.uri + " (" + \
request.remote_ip + ")"
log_method("%d %s %.2fms", status_code, summary, request_time)
HTTPRequest = httputil.HTTPServerRequest
|
sburnett/seattle
|
refs/heads/master
|
repy/tests/ip_junkip_trybind.py
|
1
|
# This test only has the
# --ip 256.256.256.256 flag, and we want try to bind to something random "128.0.1.5" to be sure waitforconn and recvmess fail
def noop(ip,port,mess,ch):
sleep(30)
def noop1(ip,port,mess,ch1, ch2):
sleep(30)
junkip = "128.0.1.5"
if callfunc == 'initialize':
try:
waitforconn("128.0.1.5", 12345, noop1)
except:
pass # This is expected
else:
print "Error! waitforconn should have failed!"
try:
recvmess("128.0.1.5", 12345, noop)
except:
pass # This is expected
else:
print "Error! recvmess should have failed!"
|
brijeshkesariya/odoo
|
refs/heads/8.0
|
addons/website_customer/controllers/__init__.py
|
7372
|
import main
|
bikong2/scikit-learn
|
refs/heads/master
|
sklearn/semi_supervised/__init__.py
|
436
|
"""
The :mod:`sklearn.semi_supervised` module implements semi-supervised learning
algorithms. These algorithms utilized small amounts of labeled data and large
amounts of unlabeled data for classification tasks. This module includes Label
Propagation.
"""
from .label_propagation import LabelPropagation, LabelSpreading
__all__ = ['LabelPropagation', 'LabelSpreading']
|
telwertowski/QGIS
|
refs/heads/master
|
tests/src/python/test_qgsserver_wms_getfeatureinfo.py
|
9
|
# -*- coding: utf-8 -*-
"""QGIS Unit tests for QgsServer GetFeatureInfo WMS.
From build dir, run: ctest -R PyQgsServerWMSGetFeatureInfo -V
.. note:: This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
"""
__author__ = 'Alessandro Pasotti'
__date__ = '11/03/2018'
__copyright__ = 'Copyright 2018, The QGIS Project'
import os
# Needed on Qt 5 so that the serialization of XML is consistent among all
# executions
os.environ['QT_HASH_SEED'] = '1'
import re
import urllib.request
import urllib.parse
import urllib.error
import xml.etree.ElementTree as ET
import json
from qgis.testing import unittest
from qgis.PyQt.QtCore import QSize
import osgeo.gdal # NOQA
from test_qgsserver_wms import TestQgsServerWMSTestBase
from qgis.core import QgsProject
class TestQgsServerWMSGetFeatureInfo(TestQgsServerWMSTestBase):
"""QGIS Server WMS Tests for GetFeatureInfo request"""
#regenerate_reference = True
def testGetFeatureInfo(self):
# Test getfeatureinfo response xml
self.wms_request_compare('GetFeatureInfo',
'&layers=testlayer%20%C3%A8%C3%A9&styles=&' +
'info_format=text%2Fxml&transparent=true&' +
'width=600&height=400&srs=EPSG%3A3857&bbox=913190.6389747962%2C' +
'5606005.488876367%2C913235.426296057%2C5606035.347090538&' +
'query_layers=testlayer%20%C3%A8%C3%A9&X=190&Y=320',
'wms_getfeatureinfo-text-xml')
self.wms_request_compare('GetFeatureInfo',
'&layers=&styles=&' +
'info_format=text%2Fxml&transparent=true&' +
'width=600&height=400&srs=EPSG%3A3857&bbox=913190.6389747962%2C' +
'5606005.488876367%2C913235.426296057%2C5606035.347090538&' +
'query_layers=testlayer%20%C3%A8%C3%A9&X=190&Y=320',
'wms_getfeatureinfo-text-xml')
# Test getfeatureinfo on non queryable layer
self.wms_request_compare('GetFeatureInfo',
'&layers=testlayer3&styles=&' +
'info_format=text%2Fxml&transparent=true&' +
'width=600&height=400&srs=EPSG%3A3857&bbox=913190.6389747962%2C' +
'5606005.488876367%2C913235.426296057%2C5606035.347090538&' +
'query_layers=testlayer3&X=190&Y=320',
'wms_getfeatureinfo-testlayer3-notqueryable')
# Test getfeatureinfo on group without shortname (no queryable...)
self.wms_request_compare('GetFeatureInfo',
'&layers=groupwithoutshortname&styles=&' +
'info_format=text%2Fxml&transparent=true&' +
'width=600&height=400&srs=EPSG%3A3857&bbox=913190.6389747962%2C' +
'5606005.488876367%2C913235.426296057%2C5606035.347090538&' +
'query_layers=groupwithoutshortname&X=190&Y=320',
'wms_getfeatureinfo-groupwithoutshortname-notqueryable')
# Test getfeatureinfo on group with shortname (no queryable...)
self.wms_request_compare('GetFeatureInfo',
'&layers=group_name&styles=&' +
'info_format=text%2Fxml&transparent=true&' +
'width=600&height=400&srs=EPSG%3A3857&bbox=913190.6389747962%2C' +
'5606005.488876367%2C913235.426296057%2C5606035.347090538&' +
'query_layers=group_name&X=190&Y=320',
'wms_getfeatureinfo-group_name-notqueryable')
# Test getfeatureinfo response html
self.wms_request_compare('GetFeatureInfo',
'&layers=testlayer%20%C3%A8%C3%A9&styles=&' +
'info_format=text%2Fhtml&transparent=true&' +
'width=600&height=400&srs=EPSG%3A3857&bbox=913190.6389747962%2C' +
'5606005.488876367%2C913235.426296057%2C5606035.347090538&' +
'query_layers=testlayer%20%C3%A8%C3%A9&X=190&Y=320',
'wms_getfeatureinfo-text-html')
# Test getfeatureinfo response html with geometry
self.wms_request_compare('GetFeatureInfo',
'&layers=testlayer%20%C3%A8%C3%A9&styles=&' +
'info_format=text%2Fhtml&transparent=true&' +
'width=600&height=400&srs=EPSG%3A3857&bbox=913190.6389747962%2C' +
'5606005.488876367%2C913235.426296057%2C5606035.347090538&' +
'query_layers=testlayer%20%C3%A8%C3%A9&X=190&Y=320&' +
'with_geometry=true',
'wms_getfeatureinfo-text-html-geometry')
# Test getfeatureinfo response html with maptip
self.wms_request_compare('GetFeatureInfo',
'&layers=testlayer%20%C3%A8%C3%A9&styles=&' +
'info_format=text%2Fhtml&transparent=true&' +
'width=600&height=400&srs=EPSG%3A3857&bbox=913190.6389747962%2C' +
'5606005.488876367%2C913235.426296057%2C5606035.347090538&' +
'query_layers=testlayer%20%C3%A8%C3%A9&X=190&Y=320&' +
'with_maptip=true',
'wms_getfeatureinfo-text-html-maptip')
# Test getfeatureinfo response text
self.wms_request_compare('GetFeatureInfo',
'&layers=testlayer%20%C3%A8%C3%A9&styles=&' +
'transparent=true&' +
'width=600&height=400&srs=EPSG%3A3857&bbox=913190.6389747962%2C' +
'5606005.488876367%2C913235.426296057%2C5606035.347090538&' +
'query_layers=testlayer%20%C3%A8%C3%A9&X=190&Y=320&' +
'info_format=text/plain',
'wms_getfeatureinfo-text-plain')
# Test getfeatureinfo default info_format
self.wms_request_compare('GetFeatureInfo',
'&layers=testlayer%20%C3%A8%C3%A9&styles=&' +
'transparent=true&' +
'width=600&height=400&srs=EPSG%3A3857&bbox=913190.6389747962%2C' +
'5606005.488876367%2C913235.426296057%2C5606035.347090538&' +
'query_layers=testlayer%20%C3%A8%C3%A9&X=190&Y=320',
'wms_getfeatureinfo-text-plain')
# Test getfeatureinfo invalid info_format
self.wms_request_compare('GetFeatureInfo',
'&layers=testlayer%20%C3%A8%C3%A9&styles=&' +
'transparent=true&' +
'width=600&height=400&srs=EPSG%3A3857&bbox=913190.6389747962%2C' +
'5606005.488876367%2C913235.426296057%2C5606035.347090538&' +
'query_layers=testlayer%20%C3%A8%C3%A9&X=190&Y=320&' +
'info_format=InvalidFormat',
'wms_getfeatureinfo-invalid-format')
# Test feature info request with filter geometry
self.wms_request_compare('GetFeatureInfo',
'&layers=testlayer%20%C3%A8%C3%A9&' +
'INFO_FORMAT=text%2Fxml&' +
'width=600&height=400&srs=EPSG%3A4326&' +
'query_layers=testlayer%20%C3%A8%C3%A9&' +
'FEATURE_COUNT=10&FILTER_GEOM=POLYGON((8.2035381 44.901459,8.2035562 44.901459,8.2035562 44.901418,8.2035381 44.901418,8.2035381 44.901459))',
'wms_getfeatureinfo_geometry_filter')
# Test feature info request with filter geometry in non-layer CRS
self.wms_request_compare('GetFeatureInfo',
'&layers=testlayer%20%C3%A8%C3%A9&' +
'INFO_FORMAT=text%2Fxml&' +
'width=600&height=400&srs=EPSG%3A3857&' +
'query_layers=testlayer%20%C3%A8%C3%A9&' +
'FEATURE_COUNT=10&FILTER_GEOM=POLYGON ((913213.6839952 5606021.5399693, 913215.6988780 5606021.5399693, 913215.6988780 5606015.09643322, 913213.6839952 5606015.0964332, 913213.6839952 5606021.5399693))',
'wms_getfeatureinfo_geometry_filter_3857')
# Test feature info request with invalid query_layer
self.wms_request_compare('GetFeatureInfo',
'&layers=testlayer%20%C3%A8%C3%A9&' +
'INFO_FORMAT=text%2Fxml&' +
'width=600&height=400&srs=EPSG%3A3857&' +
'query_layers=InvalidLayer&' +
'FEATURE_COUNT=10&FILTER_GEOM=POLYGON((8.2035381 44.901459,8.2035562 44.901459,8.2035562 44.901418,8.2035381 44.901418,8.2035381 44.901459))',
'wms_getfeatureinfo_invalid_query_layers')
# Test feature info request with '+' instead of ' ' in layers and
# query_layers parameters
self.wms_request_compare('GetFeatureInfo',
'&layers=testlayer+%C3%A8%C3%A9&styles=&' +
'info_format=text%2Fxml&transparent=true&' +
'width=600&height=400&srs=EPSG%3A3857&bbox=913190.6389747962%2C' +
'5606005.488876367%2C913235.426296057%2C5606035.347090538&' +
'query_layers=testlayer+%C3%A8%C3%A9&X=190&Y=320',
'wms_getfeatureinfo-text-xml')
# layer1 is a clone of layer0 but with a scale visibility. Thus,
# GetFeatureInfo response contains only a feature for layer0 and layer1
# is ignored for the required bbox. Without the scale visibility option,
# the feature for layer1 would have been in the response too.
mypath = self.testdata_path + "test_project_scalevisibility.qgs"
self.wms_request_compare('GetFeatureInfo',
'&layers=layer0,layer1&styles=&' +
'VERSION=1.1.0&' +
'info_format=text%2Fxml&' +
'width=500&height=500&srs=EPSG%3A4326' +
'&bbox=8.1976,44.8998,8.2100,44.9027&' +
'query_layers=layer0,layer1&X=235&Y=243',
'wms_getfeatureinfo_notvisible',
'test_project_scalevisibility.qgs')
# Test GetFeatureInfo resolves "value map" widget values but also
# Server usage of qgs and gpkg file
mypath = self.testdata_path + "test_project_values.qgz"
self.wms_request_compare('GetFeatureInfo',
'&layers=layer0&styles=&' +
'VERSION=1.3.0&' +
'info_format=text%2Fxml&' +
'width=926&height=787&srs=EPSG%3A4326' +
'&bbox=912217,5605059,914099,5606652' +
'&CRS=EPSG:3857' +
'&FEATURE_COUNT=10' +
'&QUERY_LAYERS=layer0&I=487&J=308',
'wms_getfeatureinfo-values0-text-xml',
'test_project_values.qgz')
# Test GetFeatureInfo on raster layer
self.wms_request_compare('GetFeatureInfo',
'&layers=landsat&styles=&' +
'info_format=text%2Fxml&transparent=true&' +
'width=500&height=500&srs=EPSG%3A3857&' +
'bbox=1989139.6,3522745.0,2015014.9,3537004.5&' +
'query_layers=landsat&X=250&Y=250',
'wms_getfeatureinfo-raster-text-xml')
def testGetFeatureInfoValueRelation(self):
"""Test GetFeatureInfo resolves "value relation" widget values. regression 18518"""
mypath = self.testdata_path + "test_project_values.qgz"
self.wms_request_compare('GetFeatureInfo',
'&layers=layer1&styles=&' +
'VERSION=1.3.0&' +
'info_format=text%2Fxml&' +
'width=926&height=787&srs=EPSG%3A4326' +
'&bbox=912217,5605059,914099,5606652' +
'&CRS=EPSG:3857' +
'&FEATURE_COUNT=10' +
'&WITH_GEOMETRY=True' +
'&QUERY_LAYERS=layer1&I=487&J=308',
'wms_getfeatureinfo-values1-text-xml',
'test_project_values.qgz')
def testGetFeatureInfoValueRelationArray(self):
"""Test GetFeatureInfo on "value relation" widget with array field (multiple selections)"""
mypath = self.testdata_path + "test_project_values.qgz"
self.wms_request_compare('GetFeatureInfo',
'&layers=layer3&styles=&' +
'VERSION=1.3.0&' +
'info_format=text%2Fxml&' +
'width=926&height=787&srs=EPSG%3A4326' +
'&bbox=912217,5605059,914099,5606652' +
'&CRS=EPSG:3857' +
'&FEATURE_COUNT=10' +
'&WITH_GEOMETRY=True' +
'&QUERY_LAYERS=layer3&I=487&J=308',
'wms_getfeatureinfo-values3-text-xml',
'test_project_values.qgz')
# TODO make GetFeatureInfo show what's in the display expression and
# enable test
@unittest.expectedFailure
def testGetFeatureInfoRelationReference(self):
"""Test GetFeatureInfo solves "relation reference" widget "display expression" values"""
mypath = self.testdata_path + "test_project_values.qgz"
self.wms_request_compare('GetFeatureInfo',
'&layers=layer2&styles=&' +
'VERSION=1.3.0&' +
'info_format=text%2Fxml&' +
'width=926&height=787&srs=EPSG%3A4326' +
'&bbox=912217,5605059,914099,5606652' +
'&CRS=EPSG:3857' +
'&FEATURE_COUNT=10' +
'&WITH_GEOMETRY=True' +
'&QUERY_LAYERS=layer2&I=487&J=308',
'wms_getfeatureinfo-values2-text-xml',
'test_project_values.qgz')
def testGetFeatureInfoFilterGPKG(self):
# 'test_project.qgz' ='test_project.qgs' but with a gpkg source + different fid
# Regression for #8656 Test getfeatureinfo response xml with gpkg datasource
# Mind the gap! (the space in the FILTER expression)
self.wms_request_compare('GetFeatureInfo',
'&layers=testlayer%20%C3%A8%C3%A9&' +
'INFO_FORMAT=text%2Fxml&' +
'width=600&height=400&srs=EPSG%3A3857&' +
'query_layers=testlayer%20%C3%A8%C3%A9&' +
'FEATURE_COUNT=10&FILTER=testlayer%20%C3%A8%C3%A9' +
urllib.parse.quote(':"NAME" = \'two\''),
'wms_getfeatureinfo_filter_gpkg',
'test_project.qgz')
def testGetFeatureInfoFilter(self):
# Test getfeatureinfo response xml
# Regression for #8656
# Mind the gap! (the space in the FILTER expression)
self.wms_request_compare('GetFeatureInfo',
'&layers=testlayer%20%C3%A8%C3%A9&' +
'INFO_FORMAT=text%2Fxml&' +
'width=600&height=400&srs=EPSG%3A3857&' +
'query_layers=testlayer%20%C3%A8%C3%A9&' +
'FEATURE_COUNT=10&FILTER=testlayer%20%C3%A8%C3%A9' +
urllib.parse.quote(':"NAME" = \'two\''),
'wms_getfeatureinfo_filter')
# Test a filter with NO condition results
self.wms_request_compare('GetFeatureInfo',
'&layers=testlayer%20%C3%A8%C3%A9&' +
'INFO_FORMAT=text%2Fxml&' +
'width=600&height=400&srs=EPSG%3A3857&' +
'query_layers=testlayer%20%C3%A8%C3%A9&' +
'FEATURE_COUNT=10&FILTER=testlayer%20%C3%A8%C3%A9' +
urllib.parse.quote(
':"NAME" = \'two\' AND "utf8nameè" = \'no-results\''),
'wms_getfeatureinfo_filter_no_results')
# Test a filter with OR condition results
self.wms_request_compare('GetFeatureInfo',
'&layers=testlayer%20%C3%A8%C3%A9&' +
'INFO_FORMAT=text%2Fxml&' +
'width=600&height=400&srs=EPSG%3A3857&' +
'query_layers=testlayer%20%C3%A8%C3%A9&' +
'FEATURE_COUNT=10&FILTER=testlayer%20%C3%A8%C3%A9' +
urllib.parse.quote(
':"NAME" = \'two\' OR "NAME" = \'three\''),
'wms_getfeatureinfo_filter_or')
# Test a filter with OR condition and UTF results
# Note that the layer name that contains utf-8 chars cannot be
# to upper case.
self.wms_request_compare('GetFeatureInfo',
'&layers=testlayer%20%C3%A8%C3%A9&' +
'INFO_FORMAT=text%2Fxml&' +
'width=600&height=400&srs=EPSG%3A3857&' +
'query_layers=testlayer%20%C3%A8%C3%A9&' +
'FEATURE_COUNT=10&FILTER=testlayer%20%C3%A8%C3%A9' +
urllib.parse.quote(
':"NAME" = \'two\' OR "utf8nameè" = \'three èé↓\''),
'wms_getfeatureinfo_filter_or_utf8')
# Regression #18292 Server GetFeatureInfo FILTER search fails when
# WIDTH, HEIGHT are not specified
self.wms_request_compare('GetFeatureInfo',
'&layers=testlayer%20%C3%A8%C3%A9&' +
'INFO_FORMAT=text%2Fxml&' +
'srs=EPSG%3A3857&' +
'query_layers=testlayer%20%C3%A8%C3%A9&' +
'FEATURE_COUNT=10&FILTER=testlayer%20%C3%A8%C3%A9' +
urllib.parse.quote(':"NAME" = \'two\''),
'wms_getfeatureinfo_filter_no_width')
# Test a filter without CRS parameter
self.wms_request_compare('GetFeatureInfo',
'&layers=testlayer%20%C3%A8%C3%A9&' +
'INFO_FORMAT=text%2Fxml&' +
'width=600&height=400&' +
'query_layers=testlayer%20%C3%A8%C3%A9&' +
'FEATURE_COUNT=10&FILTER=testlayer%20%C3%A8%C3%A9' +
urllib.parse.quote(':"NAME" = \'two\''),
'wms_getfeatureinfo_filter_no_crs')
def testGetFeatureInfoTolerance(self):
self.wms_request_compare('GetFeatureInfo',
'&layers=layer3&styles=&' +
'VERSION=1.3.0&' +
'info_format=text%2Fxml&' +
'width=400&height=200' +
'&bbox=913119.2,5605988.9,913316.0,5606047.4' +
'&CRS=EPSG:3857' +
'&FEATURE_COUNT=10' +
'&WITH_GEOMETRY=False' +
'&QUERY_LAYERS=layer3&I=193&J=100' +
'&FI_POINT_TOLERANCE=0',
'wms_getfeatureinfo_point_tolerance_0_text_xml',
'test_project_values.qgz')
self.wms_request_compare('GetFeatureInfo',
'&layers=layer3&styles=&' +
'VERSION=1.3.0&' +
'info_format=text%2Fxml&' +
'width=400&height=200' +
'&bbox=913119.2,5605988.9,913316.0,5606047.4' +
'&CRS=EPSG:3857' +
'&FEATURE_COUNT=10' +
'&WITH_GEOMETRY=False' +
'&QUERY_LAYERS=layer3&I=193&J=100' +
'&FI_POINT_TOLERANCE=20',
'wms_getfeatureinfo_point_tolerance_20_text_xml',
'test_project_values.qgz')
self.wms_request_compare('GetFeatureInfo',
'&layers=ls2d&styles=&' +
'VERSION=1.3.0&' +
'info_format=text%2Fxml&' +
'width=400&height=200' +
'&bbox=-50396.4,-2783.0,161715.8,114108.6' +
'&CRS=EPSG:3857' +
'&FEATURE_COUNT=10' +
'&WITH_GEOMETRY=False' +
'&QUERY_LAYERS=ls2d&I=153&J=147' +
'&FI_LINE_TOLERANCE=0',
'wms_getfeatureinfo_line_tolerance_0_text_xml',
'test_project_values.qgz')
self.wms_request_compare('GetFeatureInfo',
'&layers=ls2d&styles=&' +
'VERSION=1.3.0&' +
'info_format=text%2Fxml&' +
'width=400&height=200' +
'&bbox=-50396.4,-2783.0,161715.8,114108.6' +
'&CRS=EPSG:3857' +
'&FEATURE_COUNT=10' +
'&WITH_GEOMETRY=False' +
'&QUERY_LAYERS=ls2d&I=153&J=147' +
'&FI_LINE_TOLERANCE=20',
'wms_getfeatureinfo_line_tolerance_20_text_xml',
'test_project_values.qgz')
self.wms_request_compare('GetFeatureInfo',
'&layers=p2d&styles=&' +
'VERSION=1.3.0&' +
'info_format=text%2Fxml&' +
'width=400&height=200' +
'&bbox=-135832.0,-66482.4,240321.9,167300.4' +
'&CRS=EPSG:3857' +
'&FEATURE_COUNT=10' +
'&WITH_GEOMETRY=False' +
'&QUERY_LAYERS=p2d&I=206&J=144' +
'&FI_POLYGON_TOLERANCE=0',
'wms_getfeatureinfo_polygon_tolerance_0_text_xml',
'test_project_values.qgz')
self.wms_request_compare('GetFeatureInfo',
'&layers=p2d&styles=&' +
'VERSION=1.3.0&' +
'info_format=text%2Fxml&' +
'width=400&height=200' +
'&bbox=-135832.0,-66482.4,240321.9,167300.4' +
'&CRS=EPSG:3857' +
'&FEATURE_COUNT=10' +
'&WITH_GEOMETRY=False' +
'&QUERY_LAYERS=p2d&I=206&J=144' +
'&FI_POLYGON_TOLERANCE=20',
'wms_getfeatureinfo_polygon_tolerance_20_text_xml',
'test_project_values.qgz')
def testGetFeatureInfoGML(self):
# Test getfeatureinfo response gml
self.wms_request_compare('GetFeatureInfo',
'&layers=testlayer%20%C3%A8%C3%A9&styles=&' +
'info_format=application%2Fvnd.ogc.gml&transparent=true&' +
'width=600&height=400&srs=EPSG%3A3857&bbox=913190.6389747962%2C' +
'5606005.488876367%2C913235.426296057%2C5606035.347090538&' +
'query_layers=testlayer%20%C3%A8%C3%A9&X=190&Y=320',
'wms_getfeatureinfo-text-gml')
# Test getfeatureinfo response gml with gml
self.wms_request_compare('GetFeatureInfo',
'&layers=testlayer%20%C3%A8%C3%A9&styles=&' +
'info_format=application%2Fvnd.ogc.gml&transparent=true&' +
'width=600&height=400&srs=EPSG%3A3857&bbox=913190.6389747962%2C' +
'5606005.488876367%2C913235.426296057%2C5606035.347090538&' +
'query_layers=testlayer%20%C3%A8%C3%A9&X=190&Y=320&' +
'with_geometry=true',
'wms_getfeatureinfo-text-gml-geometry')
def testGetFeatureInfoJSON(self):
# simple test without geometry and info_format=application/json
self.wms_request_compare('GetFeatureInfo',
'&layers=testlayer%20%C3%A8%C3%A9&styles=&' +
'info_format=application%2Fjson&transparent=true&' +
'width=600&height=400&srs=EPSG%3A3857&bbox=913190.6389747962%2C' +
'5606005.488876367%2C913235.426296057%2C5606035.347090538&' +
'query_layers=testlayer%20%C3%A8%C3%A9&X=190&Y=320',
'wms_getfeatureinfo_json',
normalizeJson=True)
# simple test without geometry and info_format=application/geo+json
self.wms_request_compare('GetFeatureInfo',
'&layers=testlayer%20%C3%A8%C3%A9&styles=&' +
'info_format=application%2Fgeo%2Bjson&transparent=true&' +
'width=600&height=400&srs=EPSG%3A3857&bbox=913190.6389747962%2C' +
'5606005.488876367%2C913235.426296057%2C5606035.347090538&' +
'query_layers=testlayer%20%C3%A8%C3%A9&X=190&Y=320',
'wms_getfeatureinfo_geojson',
normalizeJson=True)
# test with several features and several layers
self.wms_request_compare('GetFeatureInfo',
'&layers=testlayer%20%C3%A8%C3%A9,fields_alias,exclude_attribute&styles=&' +
'info_format=application%2Fjson&transparent=true&' +
'width=600&height=400&srs=EPSG%3A3857&bbox=913190.6389747962%2C' +
'5606005.488876367%2C913235.426296057%2C5606035.347090538&' +
'query_layers=testlayer%20%C3%A8%C3%A9,fields_alias,exclude_attribute&' +
'X=190&Y=320&FEATURE_COUNT=2&FI_POINT_TOLERANCE=200',
'wms_getfeatureinfo_multiple_json',
normalizeJson=True)
# simple test with geometry with underlying layer in 3857
self.wms_request_compare('GetFeatureInfo',
'&layers=testlayer%20%C3%A8%C3%A9&styles=&' +
'info_format=application%2Fjson&transparent=true&' +
'width=600&height=400&srs=EPSG%3A3857&bbox=913190.6389747962%2C' +
'5606005.488876367%2C913235.426296057%2C5606035.347090538&' +
'query_layers=testlayer%20%C3%A8%C3%A9&X=190&Y=320&' +
'with_geometry=true',
'wms_getfeatureinfo_geometry_json',
'test_project_epsg3857.qgs',
normalizeJson=True)
# simple test with geometry with underlying layer in 4326
self.wms_request_compare('GetFeatureInfo',
'&layers=testlayer%20%C3%A8%C3%A9&styles=&' +
'info_format=application%2Fjson&transparent=true&' +
'width=600&height=400&srs=EPSG%3A3857&bbox=913190.6389747962%2C' +
'5606005.488876367%2C913235.426296057%2C5606035.347090538&' +
'query_layers=testlayer%20%C3%A8%C3%A9&X=190&Y=320&' +
'with_geometry=true',
'wms_getfeatureinfo_geometry_json',
'test_project.qgs',
normalizeJson=True)
# test with alias
self.wms_request_compare('GetFeatureInfo',
'&layers=fields_alias&styles=&' +
'info_format=application%2Fjson&transparent=true&' +
'width=600&height=400&srs=EPSG%3A3857&bbox=913190.6389747962%2C' +
'5606005.488876367%2C913235.426296057%2C5606035.347090538&' +
'query_layers=fields_alias&X=190&Y=320',
'wms_getfeatureinfo_alias_json',
normalizeJson=True)
# test with excluded attributes
self.wms_request_compare('GetFeatureInfo',
'&layers=exclude_attribute&styles=&' +
'info_format=application%2Fjson&transparent=true&' +
'width=600&height=400&srs=EPSG%3A3857&bbox=913190.6389747962%2C' +
'5606005.488876367%2C913235.426296057%2C5606035.347090538&' +
'query_layers=exclude_attribute&X=190&Y=320',
'wms_getfeatureinfo_exclude_attribute_json',
normalizeJson=True)
# test with raster layer
self.wms_request_compare('GetFeatureInfo',
'&layers=landsat&styles=&' +
'info_format=application%2Fjson&transparent=true&' +
'width=500&height=500&srs=EPSG%3A3857&' +
'bbox=1989139.6,3522745.0,2015014.9,3537004.5&' +
'query_layers=landsat&X=250&Y=250',
'wms_getfeatureinfo_raster_json',
normalizeJson=True)
def testGetFeatureInfoPostgresTypes(self):
# compare json list output with file
self.wms_request_compare('GetFeatureInfo',
'&layers=json' +
'&info_format=text%2Fxml' +
'&srs=EPSG%3A3857' +
'&QUERY_LAYERS=json' +
'&FILTER=json' +
urllib.parse.quote(':"pk" = 1'),
'get_postgres_types_json_list',
'test_project_postgres_types.qgs',
normalizeJson=True)
# compare dict output with file
self.wms_request_compare('GetFeatureInfo',
'&layers=json' +
'&info_format=text%2Fxml' +
'&srs=EPSG%3A3857' +
'&QUERY_LAYERS=json' +
'&FILTER=json' +
urllib.parse.quote(':"pk" = 2'),
'get_postgres_types_json_dict',
'test_project_postgres_types.qgs',
normalizeJson=True)
# compare decoded json field list
response_header, response_body, query_string = self.wms_request('GetFeatureInfo',
'&layers=json' +
'&info_format=text%2Fxml' +
'&srs=EPSG%3A3857' +
'&QUERY_LAYERS=json' +
'&FILTER=json' +
urllib.parse.quote(
':"pk" = 1'),
'test_project_postgres_types.qgs')
root = ET.fromstring(response_body)
for attribute in root.iter('Attribute'):
if attribute.get('name') == 'jvalue':
self.assertIsInstance(json.loads(attribute.get('value')), list)
self.assertEqual(json.loads(attribute.get('value')), [1, 2, 3])
self.assertEqual(
json.loads(
attribute.get('value')), [
1.0, 2.0, 3.0])
if attribute.get('name') == 'jbvalue':
self.assertIsInstance(json.loads(attribute.get('value')), list)
self.assertEqual(json.loads(attribute.get('value')), [4, 5, 6])
self.assertEqual(
json.loads(
attribute.get('value')), [
4.0, 5.0, 6.0])
# compare decoded json field dict
response_header, response_body, query_string = self.wms_request('GetFeatureInfo',
'&layers=json' +
'&info_format=text%2Fxml' +
'&srs=EPSG%3A3857' +
'&QUERY_LAYERS=json' +
'&FILTER=json' +
urllib.parse.quote(
':"pk" = 2'),
'test_project_postgres_types.qgs')
root = ET.fromstring(response_body)
for attribute in root.iter('Attribute'):
if attribute.get('name') == 'jvalue':
self.assertIsInstance(json.loads(attribute.get('value')), dict)
self.assertEqual(
json.loads(
attribute.get('value')), {
'a': 1, 'b': 2})
self.assertEqual(
json.loads(
attribute.get('value')), {
'a': 1.0, 'b': 2.0})
if attribute.get('name') == 'jbvalue':
self.assertIsInstance(json.loads(attribute.get('value')), dict)
self.assertEqual(
json.loads(
attribute.get('value')), {
'c': 4, 'd': 5})
self.assertEqual(
json.loads(
attribute.get('value')), {
'c': 4.0, 'd': 5.0})
def testGetFeatureInfoGroupedLayers(self):
"""Test that we can get feature info from the top and group layers"""
# areas+and+symbols (not nested)
self.wms_request_compare('GetFeatureInfo',
'&BBOX=52.44095517977704901,10.71171069440170776,52.440955186258563,10.71171070552261817' +
'&CRS=EPSG:4326' +
'&WIDTH=2&HEIGHT=2' +
'&QUERY_LAYERS=areas+and+symbols' +
'&INFO_FORMAT=application/json' +
'&I=0&J=1' +
'&FEATURE_COUNT=10',
'wms_getfeatureinfo_group_name_areas',
'test_project_wms_grouped_layers.qgs',
normalizeJson=True)
# areas+and+symbols (nested)
self.wms_request_compare('GetFeatureInfo',
'&BBOX=52.44095517977704901,10.71171069440170776,52.440955186258563,10.71171070552261817' +
'&CRS=EPSG:4326' +
'&WIDTH=2&HEIGHT=2' +
'&QUERY_LAYERS=areas+and+symbols' +
'&INFO_FORMAT=application/json' +
'&I=0&J=1' +
'&FEATURE_COUNT=10',
'wms_getfeatureinfo_group_name_areas',
'test_project_wms_grouped_nested_layers.qgs',
normalizeJson=True)
# as-areas-short-name
self.wms_request_compare('GetFeatureInfo',
'&BBOX=52.44095517977704901,10.71171069440170776,52.440955186258563,10.71171070552261817' +
'&CRS=EPSG:4326' +
'&WIDTH=2&HEIGHT=2' +
'&QUERY_LAYERS=as-areas-short-name' +
'&INFO_FORMAT=application/json' +
'&I=0&J=1' +
'&FEATURE_COUNT=10',
'wms_getfeatureinfo_group_name_areas',
'test_project_wms_grouped_nested_layers.qgs',
normalizeJson=True)
# Top level: QGIS Server - Grouped Layer
self.wms_request_compare('GetFeatureInfo',
'&BBOX=52.44095517977704901,10.71171069440170776,52.440955186258563,10.71171070552261817' +
'&CRS=EPSG:4326' +
'&WIDTH=2&HEIGHT=2' +
'&QUERY_LAYERS=QGIS+Server+-+Grouped Nested Layer' +
'&INFO_FORMAT=application/json' +
'&I=0&J=1' +
'&FEATURE_COUNT=10',
'wms_getfeatureinfo_group_name_top',
'test_project_wms_grouped_nested_layers.qgs',
normalizeJson=True)
# Multiple matches from 2 layer groups
self.wms_request_compare('GetFeatureInfo',
'&BBOX=52.44095517977704901,10.71171069440170776,52.440955186258563,10.71171070552261817' +
'&CRS=EPSG:4326' +
'&WIDTH=2&HEIGHT=2' +
'&QUERY_LAYERS=areas+and+symbols,city+and+district+boundaries' +
'&INFO_FORMAT=application/json' +
'&I=0&J=1' +
'&FEATURE_COUNT=10',
'wms_getfeatureinfo_group_name_areas_cities',
'test_project_wms_grouped_nested_layers.qgs',
normalizeJson=True)
# no_query group (nested)
self.wms_request_compare('GetFeatureInfo',
'&BBOX=52.44095517977704901,10.71171069440170776,52.440955186258563,10.71171070552261817' +
'&CRS=EPSG:4326' +
'&WIDTH=2&HEIGHT=2' +
'&QUERY_LAYERS=no_query' +
'&INFO_FORMAT=application/json' +
'&I=0&J=1' +
'&FEATURE_COUNT=10',
'wms_getfeatureinfo_group_no_query',
'test_project_wms_grouped_nested_layers.qgs',
normalizeJson=True)
# query_child group (nested)
self.wms_request_compare('GetFeatureInfo',
'&BBOX=52.44095517977704901,10.71171069440170776,52.440955186258563,10.71171070552261817' +
'&CRS=EPSG:4326' +
'&WIDTH=2&HEIGHT=2' +
'&QUERY_LAYERS=query_child' +
'&INFO_FORMAT=application/json' +
'&I=0&J=1' +
'&FEATURE_COUNT=10',
'wms_getfeatureinfo_group_query_child',
'test_project_wms_grouped_nested_layers.qgs',
normalizeJson=True)
# child_ok group (nested)
self.wms_request_compare('GetFeatureInfo',
'&BBOX=52.44095517977704901,10.71171069440170776,52.440955186258563,10.71171070552261817' +
'&CRS=EPSG:4326' +
'&WIDTH=2&HEIGHT=2' +
'&QUERY_LAYERS=child_ok' +
'&INFO_FORMAT=application/json' +
'&I=0&J=1' +
'&FEATURE_COUNT=10',
'wms_getfeatureinfo_group_query_child',
'test_project_wms_grouped_nested_layers.qgs',
normalizeJson=True)
# as_areas_query_copy == as-areas-short-name-query-copy (nested)
self.wms_request_compare('GetFeatureInfo',
'&BBOX=52.44095517977704901,10.71171069440170776,52.440955186258563,10.71171070552261817' +
'&CRS=EPSG:4326' +
'&WIDTH=2&HEIGHT=2' +
'&QUERY_LAYERS=as-areas-short-name-query-copy' +
'&INFO_FORMAT=application/json' +
'&I=0&J=1' +
'&FEATURE_COUNT=10',
'wms_getfeatureinfo_group_query_child',
'test_project_wms_grouped_nested_layers.qgs',
normalizeJson=True)
@unittest.skipIf(os.environ.get('TRAVIS', '') == 'true', "This test cannot run in TRAVIS because it relies on cascading external services")
def testGetFeatureInfoCascadingLayers(self):
"""Test that we can get feature info on cascading WMS layers"""
project_name = 'bug_gh31177_gfi_cascading_wms.qgs'
self.wms_request_compare('GetFeatureInfo',
'&BBOX=852729.31,5631138.51,853012.18,5631346.17' +
'&CRS=EPSG:3857' +
'&WIDTH=850&HEIGHT=624' +
'&QUERY_LAYERS=Alberate' +
'&INFO_FORMAT=application/vnd.ogc.gml' +
'&I=509&J=289' +
'&FEATURE_COUNT=10',
'wms_getfeatureinfo_cascading_issue31177',
project_name)
if __name__ == '__main__':
unittest.main()
|
maximon93/fabric-bolt
|
refs/heads/master
|
fabric_bolt/accounts/migrations/0002_auto_20140811_1921.py
|
14
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
def add_default_groups(apps, schema_editor):
Group = apps.get_model('auth', 'Group')
Group.objects.get_or_create(name='Admin')
Group.objects.get_or_create(name='Deployer')
Group.objects.get_or_create(name='Historian')
class Migration(migrations.Migration):
dependencies = [
('accounts', '0001_initial'),
]
operations = [
migrations.RunPython(add_default_groups),
]
|
svn2github/django
|
refs/heads/master
|
django/contrib/gis/tests/geoadmin/urls.py
|
383
|
from django.conf.urls import patterns, include
from django.contrib import admin
urlpatterns = patterns('',
(r'^admin/', include(admin.site.urls)),
)
|
erandros/py2html
|
refs/heads/master
|
src/syntax/temp.py
|
1
|
import re
from reader import Reader
from lines import RawLine
from comparators import RegexComparator, StringComparator
class Context:
def __init__(self, tree, level):
self.tree = tree
self.level = level
|
tschmidiger/CodeIgniter
|
refs/heads/3.0-stable
|
user_guide_src/source/conf.py
|
3
|
# -*- coding: utf-8 -*-
#
# CodeIgniter documentation build configuration file, created by
# sphinx-quickstart on Sun Aug 28 07:24:38 2011.
#
# This file is execfile()d with the current directory set to its containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys, os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# -- General configuration -----------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['sphinx.ext.ifconfig', 'sphinxcontrib.phpdomain']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'CodeIgniter'
copyright = u'2014 - 2015, British Columbia Institute of Technology'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '3.0.2-dev'
# The full version, including alpha/beta/rc tags.
release = '3.0.2-dev'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = []
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# If true, '()' will be appended to :php:func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. php:function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'trac'
highlight_language = 'ci'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = 'sphinx_rtd_theme'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#
# Specifying a few options; just a starting point & we can play with it.
html_theme_options = {
}
# Add any paths that contain custom themes here, relative to this directory.
html_theme_path = ["./_themes"]
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
html_favicon = 'images/ci-icon.ico'
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
#html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'CodeIgniterdoc'
html_copy_source = False
# -- Options for LaTeX output --------------------------------------------------
# The paper size ('letter' or 'a4').
#latex_paper_size = 'letter'
# The font size ('10pt', '11pt' or '12pt').
#latex_font_size = '10pt'
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
('index', 'CodeIgniter.tex', u'CodeIgniter Documentation',
u'British Columbia Institute of Technology', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Additional stuff for the LaTeX preamble.
#latex_preamble = ''
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output --------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'codeigniter', u'CodeIgniter Documentation',
[u'British Columbia Institute of Technology'], 1)
]
# -- Options for Epub output ---------------------------------------------------
# Bibliographic Dublin Core info.
epub_title = u'CodeIgniter'
epub_author = u'British Columbia Institute of Technology'
epub_publisher = u'British Columbia Institute of Technology'
epub_copyright = u'2014 - 2015, British Columbia Institute of Technology'
# The language of the text. It defaults to the language option
# or en if the language is not set.
#epub_language = ''
# The scheme of the identifier. Typical schemes are ISBN or URL.
#epub_scheme = ''
# The unique identifier of the text. This can be a ISBN number
# or the project homepage.
#epub_identifier = ''
# A unique identification for the text.
#epub_uid = ''
# HTML files that should be inserted before the pages created by sphinx.
# The format is a list of tuples containing the path and title.
#epub_pre_files = []
# HTML files shat should be inserted after the pages created by sphinx.
# The format is a list of tuples containing the path and title.
#epub_post_files = []
# A list of files that should not be packed into the epub file.
#epub_exclude_files = []
# The depth of the table of contents in toc.ncx.
#epub_tocdepth = 3
# Allow duplicate toc entries.
#epub_tocdup = True
|
vad/django-cms
|
refs/heads/develop
|
cms/south_migrations/0060_auto__add_field_page_xframe_options.py
|
63
|
# -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'Page.xframe_options'
db.add_column(u'cms_page', 'xframe_options',
self.gf('django.db.models.fields.IntegerField')(default=0),
keep_default=False)
def backwards(self, orm):
# Deleting field 'Page.xframe_options'
db.delete_column(u'cms_page', 'xframe_options')
models = {
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Group']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Permission']"}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'cms.cmsplugin': {
'Meta': {'object_name': 'CMSPlugin'},
'changed_date': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'creation_date': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'language': ('django.db.models.fields.CharField', [], {'max_length': '15', 'db_index': 'True'}),
'level': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'lft': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'parent': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['cms.CMSPlugin']", 'null': 'True', 'blank': 'True'}),
'placeholder': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['cms.Placeholder']", 'null': 'True'}),
'plugin_type': ('django.db.models.fields.CharField', [], {'max_length': '50', 'db_index': 'True'}),
'position': ('django.db.models.fields.PositiveSmallIntegerField', [], {'null': 'True', 'blank': 'True'}),
'rght': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'tree_id': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'})
},
'cms.globalpagepermission': {
'Meta': {'object_name': 'GlobalPagePermission'},
'can_add': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'can_change': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'can_change_advanced_settings': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'can_change_permissions': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'can_delete': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'can_move_page': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'can_publish': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'can_recover_page': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'can_view': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'group': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.Group']", 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'sites': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': u"orm['sites.Site']", 'null': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']", 'null': 'True', 'blank': 'True'})
},
'cms.page': {
'Meta': {'ordering': "('tree_id', 'lft')", 'unique_together': "(('publisher_is_draft', 'application_namespace'),)", 'object_name': 'Page'},
'application_namespace': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'application_urls': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '200', 'null': 'True', 'blank': 'True'}),
'changed_by': ('django.db.models.fields.CharField', [], {'max_length': '70'}),
'changed_date': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'created_by': ('django.db.models.fields.CharField', [], {'max_length': '70'}),
'creation_date': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'in_navigation': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'db_index': 'True'}),
'is_home': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}),
'level': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'lft': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'limit_visibility_in_menu': ('django.db.models.fields.SmallIntegerField', [], {'default': 'None', 'null': 'True', 'db_index': 'True', 'blank': 'True'}),
'login_required': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'navigation_extenders': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '80', 'null': 'True', 'blank': 'True'}),
'parent': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'children'", 'null': 'True', 'to': "orm['cms.Page']"}),
'placeholders': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['cms.Placeholder']", 'symmetrical': 'False'}),
'publication_date': ('django.db.models.fields.DateTimeField', [], {'db_index': 'True', 'null': 'True', 'blank': 'True'}),
'publication_end_date': ('django.db.models.fields.DateTimeField', [], {'db_index': 'True', 'null': 'True', 'blank': 'True'}),
'published': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'publisher_is_draft': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'db_index': 'True'}),
'publisher_public': ('django.db.models.fields.related.OneToOneField', [], {'related_name': "'publisher_draft'", 'unique': 'True', 'null': 'True', 'to': "orm['cms.Page']"}),
'publisher_state': ('django.db.models.fields.SmallIntegerField', [], {'default': '0', 'db_index': 'True'}),
'reverse_id': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '40', 'null': 'True', 'blank': 'True'}),
'revision_id': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'rght': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'site': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'djangocms_pages'", 'to': u"orm['sites.Site']"}),
'soft_root': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}),
'template': ('django.db.models.fields.CharField', [], {'default': "'INHERIT'", 'max_length': '100'}),
'tree_id': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'xframe_options': ('django.db.models.fields.IntegerField', [], {'default': '0'})
},
'cms.pagemoderatorstate': {
'Meta': {'ordering': "('page', 'action', '-created')", 'object_name': 'PageModeratorState'},
'action': ('django.db.models.fields.CharField', [], {'max_length': '3', 'null': 'True', 'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'message': ('django.db.models.fields.TextField', [], {'default': "''", 'max_length': '1000', 'blank': 'True'}),
'page': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['cms.Page']"}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']", 'null': 'True'})
},
'cms.pagepermission': {
'Meta': {'object_name': 'PagePermission'},
'can_add': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'can_change': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'can_change_advanced_settings': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'can_change_permissions': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'can_delete': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'can_move_page': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'can_publish': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'can_view': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'grant_on': ('django.db.models.fields.IntegerField', [], {'default': '5'}),
'group': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.Group']", 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'page': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['cms.Page']", 'null': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']", 'null': 'True', 'blank': 'True'})
},
'cms.pageuser': {
'Meta': {'object_name': 'PageUser', '_ormbases': [u'auth.User']},
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'created_users'", 'to': u"orm['auth.User']"}),
u'user_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': u"orm['auth.User']", 'unique': 'True', 'primary_key': 'True'})
},
'cms.pageusergroup': {
'Meta': {'object_name': 'PageUserGroup', '_ormbases': [u'auth.Group']},
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'created_usergroups'", 'to': u"orm['auth.User']"}),
u'group_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': u"orm['auth.Group']", 'unique': 'True', 'primary_key': 'True'})
},
'cms.placeholder': {
'Meta': {'object_name': 'Placeholder'},
'default_width': ('django.db.models.fields.PositiveSmallIntegerField', [], {'null': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'slot': ('django.db.models.fields.CharField', [], {'max_length': '50', 'db_index': 'True'})
},
'cms.placeholderreference': {
'Meta': {'object_name': 'PlaceholderReference', 'db_table': "u'cmsplugin_placeholderreference'", '_ormbases': ['cms.CMSPlugin']},
u'cmsplugin_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['cms.CMSPlugin']", 'unique': 'True', 'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'placeholder_ref': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['cms.Placeholder']", 'null': 'True'})
},
'cms.staticplaceholder': {
'Meta': {'object_name': 'StaticPlaceholder'},
'code': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255', 'blank': 'True'}),
'creation_method': ('django.db.models.fields.CharField', [], {'default': "'code'", 'max_length': '20', 'blank': 'True'}),
'dirty': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'draft': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'static_draft'", 'null': 'True', 'to': "orm['cms.Placeholder']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '255', 'blank': 'True'}),
'public': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'static_public'", 'null': 'True', 'to': "orm['cms.Placeholder']"})
},
'cms.title': {
'Meta': {'unique_together': "(('language', 'page'),)", 'object_name': 'Title'},
'creation_date': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'has_url_overwrite': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'language': ('django.db.models.fields.CharField', [], {'max_length': '15', 'db_index': 'True'}),
'menu_title': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'meta_description': ('django.db.models.fields.TextField', [], {'max_length': '155', 'null': 'True', 'blank': 'True'}),
'page': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'title_set'", 'to': "orm['cms.Page']"}),
'page_title': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'path': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}),
'redirect': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '255'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '255'})
},
'cms.usersettings': {
'Meta': {'object_name': 'UserSettings'},
'clipboard': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['cms.Placeholder']", 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'language': ('django.db.models.fields.CharField', [], {'max_length': '10'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'djangocms_usersettings'", 'to': u"orm['auth.User']"})
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
u'sites.site': {
'Meta': {'ordering': "(u'domain',)", 'object_name': 'Site', 'db_table': "u'django_site'"},
'domain': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
}
}
complete_apps = ['cms']
|
MwanzanFelipe/rockletonfortune
|
refs/heads/master
|
lib/django/conf/locale/en_AU/formats.py
|
504
|
# -*- encoding: utf-8 -*-
# This file is distributed under the same license as the Django package.
#
from __future__ import unicode_literals
# The *_FORMAT strings use the Django date format syntax,
# see http://docs.djangoproject.com/en/dev/ref/templates/builtins/#date
DATE_FORMAT = 'j M Y' # '25 Oct 2006'
TIME_FORMAT = 'P' # '2:30 p.m.'
DATETIME_FORMAT = 'j M Y, P' # '25 Oct 2006, 2:30 p.m.'
YEAR_MONTH_FORMAT = 'F Y' # 'October 2006'
MONTH_DAY_FORMAT = 'j F' # '25 October'
SHORT_DATE_FORMAT = 'd/m/Y' # '25/10/2006'
SHORT_DATETIME_FORMAT = 'd/m/Y P' # '25/10/2006 2:30 p.m.'
FIRST_DAY_OF_WEEK = 0 # Sunday
# The *_INPUT_FORMATS strings use the Python strftime format syntax,
# see http://docs.python.org/library/datetime.html#strftime-strptime-behavior
DATE_INPUT_FORMATS = [
'%d/%m/%Y', '%d/%m/%y', # '25/10/2006', '25/10/06'
# '%b %d %Y', '%b %d, %Y', # 'Oct 25 2006', 'Oct 25, 2006'
# '%d %b %Y', '%d %b, %Y', # '25 Oct 2006', '25 Oct, 2006'
# '%B %d %Y', '%B %d, %Y', # 'October 25 2006', 'October 25, 2006'
# '%d %B %Y', '%d %B, %Y', # '25 October 2006', '25 October, 2006'
]
DATETIME_INPUT_FORMATS = [
'%Y-%m-%d %H:%M:%S', # '2006-10-25 14:30:59'
'%Y-%m-%d %H:%M:%S.%f', # '2006-10-25 14:30:59.000200'
'%Y-%m-%d %H:%M', # '2006-10-25 14:30'
'%Y-%m-%d', # '2006-10-25'
'%d/%m/%Y %H:%M:%S', # '25/10/2006 14:30:59'
'%d/%m/%Y %H:%M:%S.%f', # '25/10/2006 14:30:59.000200'
'%d/%m/%Y %H:%M', # '25/10/2006 14:30'
'%d/%m/%Y', # '25/10/2006'
'%d/%m/%y %H:%M:%S', # '25/10/06 14:30:59'
'%d/%m/%y %H:%M:%S.%f', # '25/10/06 14:30:59.000200'
'%d/%m/%y %H:%M', # '25/10/06 14:30'
'%d/%m/%y', # '25/10/06'
]
DECIMAL_SEPARATOR = '.'
THOUSAND_SEPARATOR = ','
NUMBER_GROUPING = 3
|
OneStopTransport/OST-FiWare-Lisbon
|
refs/heads/master
|
fiware_lisbon/fiware/tasks.py
|
1
|
#!/usr/bin/env python
# encoding: utf-8
from celery.task import task
from colorama import Fore
from utils.constants import CP_NAME
from utils.constants import AGENCY
from utils.constants import ROUTE
from utils.constants import STOP
from utils.constants import TRIP
from utils.constants import STOPTIME
from utils.constants import ID
from utils.errors import APIKeyError
from utils.errors import CrawlerError
from utils.errors import OSTError
from utils.errors import FiWareError
from utils.utils import get_error_message
from crawler import Crawler
from importer import FiWare
@task(name='transfer_gtfs_cb', ignore_result=True)
def transfer_gtfs_cb(agency_name=None):
"""
Fetches CP data from OST APIs and puts it on ContextBroker
Uses the Crawler to fetch data and FiWare to import it.
# 1st) Agency == CP
# 2nd) CP Routes
# 3rd) CP Stops
# 4th) CP Trips
# 5th) CP StopTimes
"""
try:
crawler = Crawler()
fiware = FiWare()
if agency_name is None:
agency_name = CP_NAME
print '> Inserting Agency... ',
agency = crawler.get_agency(agency_name)
agency_id = agency.get(ID)
fiware.insert_data(agency, content_type=AGENCY)
print 'Done.'
# ROUTES
print '> Inserting Routes... ',
routes = crawler.get_data_by_agency(agency_id, content_type=ROUTE)
fiware.insert_data(routes, content_type=ROUTE)
routes_cb = fiware.get_data(content_type=ROUTE)['contextResponses']
print 'Done:', len(routes_cb)
# STOPS
print '> Inserting Stops... ',
stops = crawler.get_data_by_agency(agency_id, content_type=STOP)
fiware.insert_data(stops, content_type=STOP)
stops_cb = fiware.get_data(content_type=STOP)['contextResponses']
print 'Done:', len(stops_cb)
# TRIPS
route_ids = fiware.get_ids(fiware.get_data(content_type=ROUTE))
print '> Inserting Trips... ',
trips = crawler.get_data_from_routes(route_ids, content_type=TRIP)
fiware.insert_data(trips, content_type=TRIP)
trips_cb = fiware.get_data(content_type=TRIP)['contextResponses']
print 'Done:', len(trips_cb)
# STOPTIMES
print '> Inserting StopTimes...',
times = crawler.get_data_from_routes(route_ids, content_type=STOPTIME)
fiware.insert_data(times, content_type=STOPTIME)
times_cb = fiware.get_data(content_type=STOPTIME)['contextResponses']
print 'Done:', len(times_cb)
except (APIKeyError, CrawlerError, OSTError, FiWareError) as error:
message = get_error_message(error)
print(Fore.RED + str(error) + Fore.RESET + ':' + message)
if __name__ == '__main__':
transfer_gtfs_cb()
|
dazzzl/yowsup
|
refs/heads/master
|
yowsup/layers/protocol_presence/protocolentities/test_presence_available.py
|
70
|
from yowsup.layers.protocol_presence.protocolentities.presence_available import AvailablePresenceProtocolEntity
from yowsup.layers.protocol_presence.protocolentities.test_presence import PresenceProtocolEntityTest
class AvailablePresenceProtocolEntityTest(PresenceProtocolEntityTest):
def setUp(self):
super(AvailablePresenceProtocolEntityTest, self).setUp()
self.ProtocolEntity = AvailablePresenceProtocolEntity
self.node.setAttribute("type", "available")
|
jymannob/CouchPotatoServer
|
refs/heads/develop
|
couchpotato/core/notifications/pushalot.py
|
81
|
import traceback
from couchpotato.core.helpers.encoding import toUnicode
from couchpotato.core.logger import CPLog
from couchpotato.core.notifications.base import Notification
log = CPLog(__name__)
autoload = 'Pushalot'
class Pushalot(Notification):
urls = {
'api': 'https://pushalot.com/api/sendmessage'
}
def notify(self, message = '', data = None, listener = None):
if not data: data = {}
data = {
'AuthorizationToken': self.conf('auth_token'),
'Title': self.default_title,
'Body': toUnicode(message),
'IsImportant': self.conf('important'),
'IsSilent': self.conf('silent'),
'Image': toUnicode(self.getNotificationImage('medium') + '?1'),
'Source': toUnicode(self.default_title)
}
headers = {
'Content-type': 'application/x-www-form-urlencoded'
}
try:
self.urlopen(self.urls['api'], headers = headers, data = data, show_error = False)
return True
except:
log.error('PushAlot failed: %s', traceback.format_exc())
return False
config = [{
'name': 'pushalot',
'groups': [
{
'tab': 'notifications',
'list': 'notification_providers',
'name': 'pushalot',
'description': 'for Windows Phone and Windows 8',
'options': [
{
'name': 'enabled',
'default': 0,
'type': 'enabler',
},
{
'name': 'auth_token',
'label': 'Auth Token',
},
{
'name': 'silent',
'label': 'Silent',
'default': 0,
'type': 'bool',
'description': 'Don\'t send Toast notifications. Only update Live Tile',
},
{
'name': 'important',
'label': 'High Priority',
'default': 0,
'type': 'bool',
'description': 'Send message with High priority.',
},
{
'name': 'on_snatch',
'default': 0,
'type': 'bool',
'advanced': True,
'description': 'Also send message when movie is snatched.',
},
],
}
],
}]
|
dapuck/pyleus
|
refs/heads/develop
|
pyleus/cli/commands/__init__.py
|
12133432
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.