repo_name stringlengths 5 100 | ref stringlengths 12 67 | path stringlengths 4 244 | copies stringlengths 1 8 | content stringlengths 0 1.05M ⌀ |
|---|---|---|---|---|
canercandan/iot-project | refs/heads/master | deprecated/cegGtk/test/vncviewer.py | 1 | # IOT Copyright (C) 2010 CEG development team
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 2
# as published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
# Authors: CEG <ceg@ionlythink.com>, http://www.ionlythink.com
import gtk, gtkvnc
vnc = gtkvnc.Display()
window = gtk.Window()
layout = gtk.VBox()
window.add(layout)
layout.add(vnc)
vnc.realize()
vnc.open_host("localhost", "5900")
gtk.main()
import time
time.sleep(2)
vnc.send_keys(["Control_L", "Alt_L", "Del"])
vnc.close()
|
Cinntax/home-assistant | refs/heads/dev | homeassistant/components/sleepiq/sensor.py | 7 | """Support for SleepIQ sensors."""
from homeassistant.components import sleepiq
ICON = "mdi:hotel"
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the SleepIQ sensors."""
if discovery_info is None:
return
data = sleepiq.DATA
data.update()
dev = list()
for bed_id, bed in data.beds.items():
for side in sleepiq.SIDES:
if getattr(bed, side) is not None:
dev.append(SleepNumberSensor(data, bed_id, side))
add_entities(dev)
class SleepNumberSensor(sleepiq.SleepIQSensor):
"""Implementation of a SleepIQ sensor."""
def __init__(self, sleepiq_data, bed_id, side):
"""Initialize the sensor."""
sleepiq.SleepIQSensor.__init__(self, sleepiq_data, bed_id, side)
self._state = None
self.type = sleepiq.SLEEP_NUMBER
self._name = sleepiq.SENSOR_TYPES[self.type]
self.update()
@property
def state(self):
"""Return the state of the sensor."""
return self._state
@property
def icon(self):
"""Icon to use in the frontend, if any."""
return ICON
def update(self):
"""Get the latest data from SleepIQ and updates the states."""
sleepiq.SleepIQSensor.update(self)
self._state = self.side.sleep_number
|
rwth-ti/gr-ofdm | refs/heads/master | python/ofdm/qa_interp_cir_set_noncir_to_zero.py | 1 | #!/usr/bin/env python
#
# Copyright 2014 Institute for Theoretical Information Technology,
# RWTH Aachen University
# www.ti.rwth-aachen.de
#
# This is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3, or (at your option)
# any later version.
#
# This software is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this software; see the file COPYING. If not, write to
# the Free Software Foundation, Inc., 51 Franklin Street,
# Boston, MA 02110-1301, USA.
#
from gnuradio import gr, gr_unittest
import ofdm_swig as ofdm
class qa_interp_cir_set_noncir_to_zero (gr_unittest.TestCase):
def setUp (self):
self.tb = gr.top_block ()
def tearDown (self):
self.tb = None
def test_001_t (self):
# set up fg
self.tb.run ()
# check data
if __name__ == '__main__':
gr_unittest.run(qa_interp_cir_set_noncir_to_zero, "qa_interp_cir_set_noncir_to_zero.xml")
|
harukaeru/Brython-Django | refs/heads/master | static/brython/www/speed/benchmarks/create_function_complex_args.py | 9 | for i in range(1000000):
def f(x, y=1, *args, **kw):
pass
|
openstack/nova | refs/heads/master | doc/ext/versioned_notifications.py | 2 | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
This provides a sphinx extension able to list the implemented versioned
notifications into the developer documentation.
It is used via a single directive in the .rst file
.. versioned_notifications::
"""
import os
from docutils import nodes
from docutils.parsers import rst
import importlib
from oslo_serialization import jsonutils
import pkgutil
from nova.notifications.objects import base as notification
from nova.objects import base
from nova.tests import json_ref
import nova.utils
class VersionedNotificationDirective(rst.Directive):
SAMPLE_ROOT = 'doc/notification_samples/'
TOGGLE_SCRIPT = """
<!-- jQuery -->
<script type="text/javascript" src="../_static/js/jquery-3.2.1.min.js">
</script>
<script>
jQuery(document).ready(function(){
jQuery('#%s-div').toggle('show');
jQuery('#%s-hideshow').on('click', function(event) {
jQuery('#%s-div').toggle('show');
});
});
</script>
"""
def run(self):
notifications = self._collect_notifications()
return self._build_markup(notifications)
def _import_all_notification_packages(self):
list(map(lambda module: importlib.import_module(module),
('nova.notifications.objects.' + name for _, name, _ in
pkgutil.iter_modules(nova.notifications.objects.__path__))))
def _collect_notifications(self):
# If you do not see your notification sample showing up in the docs
# be sure that the sample filename matches what is registered on the
# versioned notification object class using the
# @base.notification_sample decorator.
self._import_all_notification_packages()
base.NovaObjectRegistry.register_notification_objects()
notifications = {}
ovos = base.NovaObjectRegistry.obj_classes()
for name, cls in ovos.items():
cls = cls[0]
if (issubclass(cls, notification.NotificationBase) and
cls != notification.NotificationBase):
payload_name = cls.fields['payload'].objname
payload_cls = ovos[payload_name][0]
for sample in cls.samples:
if sample in notifications:
raise ValueError('Duplicated usage of %s '
'sample file detected' % sample)
notifications[sample] = ((cls.__name__,
payload_cls.__name__,
sample))
return sorted(notifications.values())
def _build_markup(self, notifications):
content = []
cols = ['Event type', 'Notification class', 'Payload class', 'Sample']
table = nodes.table()
content.append(table)
group = nodes.tgroup(cols=len(cols))
table.append(group)
head = nodes.thead()
group.append(head)
for _ in cols:
group.append(nodes.colspec(colwidth=1))
body = nodes.tbody()
group.append(body)
# fill the table header
row = nodes.row()
body.append(row)
for col_name in cols:
col = nodes.entry()
row.append(col)
text = nodes.strong(text=col_name)
col.append(text)
# fill the table content, one notification per row
for name, payload, sample_file in notifications:
event_type = sample_file[0: -5].replace('-', '.')
row = nodes.row()
body.append(row)
col = nodes.entry()
row.append(col)
text = nodes.literal(text=event_type)
col.append(text)
col = nodes.entry()
row.append(col)
text = nodes.literal(text=name)
col.append(text)
col = nodes.entry()
row.append(col)
text = nodes.literal(text=payload)
col.append(text)
col = nodes.entry()
row.append(col)
with open(os.path.join(self.SAMPLE_ROOT, sample_file), 'r') as f:
sample_content = f.read()
sample_obj = jsonutils.loads(sample_content)
sample_obj = json_ref.resolve_refs(
sample_obj,
base_path=os.path.abspath(self.SAMPLE_ROOT))
sample_content = jsonutils.dumps(sample_obj,
sort_keys=True, indent=4,
separators=(',', ': '))
event_type = sample_file[0: -5]
html_str = self.TOGGLE_SCRIPT % ((event_type, ) * 3)
html_str += ("<input type='button' id='%s-hideshow' "
"value='hide/show sample'>" % event_type)
html_str += ("<div id='%s-div'><pre>%s</pre></div>"
% (event_type, sample_content))
raw = nodes.raw('', html_str, format="html")
col.append(raw)
return content
def setup(app):
app.add_directive(
'versioned_notifications', VersionedNotificationDirective)
return {
'parallel_read_safe': True,
'parallel_write_safe': True,
}
|
mKeRix/home-assistant | refs/heads/dev | homeassistant/components/gios/__init__.py | 6 | """The GIOS component."""
import logging
from aiohttp.client_exceptions import ClientConnectorError
from async_timeout import timeout
from gios import ApiError, Gios, InvalidSensorsData, NoStationError
from homeassistant.core import Config, HomeAssistant
from homeassistant.exceptions import ConfigEntryNotReady
from homeassistant.helpers.aiohttp_client import async_get_clientsession
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
from .const import CONF_STATION_ID, DOMAIN, SCAN_INTERVAL
_LOGGER = logging.getLogger(__name__)
async def async_setup(hass: HomeAssistant, config: Config) -> bool:
"""Set up configured GIOS."""
return True
async def async_setup_entry(hass, config_entry):
"""Set up GIOS as config entry."""
station_id = config_entry.data[CONF_STATION_ID]
_LOGGER.debug("Using station_id: %s", station_id)
websession = async_get_clientsession(hass)
coordinator = GiosDataUpdateCoordinator(hass, websession, station_id)
await coordinator.async_refresh()
if not coordinator.last_update_success:
raise ConfigEntryNotReady
hass.data.setdefault(DOMAIN, {})
hass.data[DOMAIN][config_entry.entry_id] = coordinator
hass.async_create_task(
hass.config_entries.async_forward_entry_setup(config_entry, "air_quality")
)
return True
async def async_unload_entry(hass, config_entry):
"""Unload a config entry."""
hass.data[DOMAIN].pop(config_entry.entry_id)
await hass.config_entries.async_forward_entry_unload(config_entry, "air_quality")
return True
class GiosDataUpdateCoordinator(DataUpdateCoordinator):
"""Define an object to hold GIOS data."""
def __init__(self, hass, session, station_id):
"""Class to manage fetching GIOS data API."""
self.gios = Gios(station_id, session)
super().__init__(hass, _LOGGER, name=DOMAIN, update_interval=SCAN_INTERVAL)
async def _async_update_data(self):
"""Update data via library."""
try:
with timeout(30):
await self.gios.update()
except (
ApiError,
NoStationError,
ClientConnectorError,
InvalidSensorsData,
) as error:
raise UpdateFailed(error)
if not self.gios.data:
raise UpdateFailed("Invalid sensors data")
return self.gios.data
|
nteract/papermill | refs/heads/main | papermill/cli.py | 1 | # -*- coding: utf-8 -*-
"""Main `papermill` interface."""
import os
import sys
from stat import S_ISFIFO
import nbclient
import traceback
import base64
import logging
import click
import yaml
import platform
from .execute import execute_notebook
from .iorw import read_yaml_file, NoDatesSafeLoader
from .inspection import display_notebook_help
from . import __version__ as papermill_version
click.disable_unicode_literals_warning = True
INPUT_PIPED = S_ISFIFO(os.fstat(0).st_mode)
OUTPUT_PIPED = not sys.stdout.isatty()
def print_papermill_version(ctx, param, value):
if not value:
return
print(
"{version} from {path} ({pyver})".format(
version=papermill_version, path=__file__, pyver=platform.python_version()
)
)
ctx.exit()
@click.command(context_settings=dict(help_option_names=['-h', '--help']))
@click.pass_context
@click.argument('notebook_path', required=not INPUT_PIPED)
@click.argument('output_path', default="")
@click.option(
'--help-notebook',
is_flag=True,
default=False,
help='Display parameters information for the given notebook path.',
)
@click.option(
'--parameters', '-p', nargs=2, multiple=True, help='Parameters to pass to the parameters cell.'
)
@click.option(
'--parameters_raw', '-r', nargs=2, multiple=True, help='Parameters to be read as raw string.'
)
@click.option(
'--parameters_file', '-f', multiple=True, help='Path to YAML file containing parameters.'
)
@click.option(
'--parameters_yaml', '-y', multiple=True, help='YAML string to be used as parameters.'
)
@click.option(
'--parameters_base64', '-b', multiple=True, help='Base64 encoded YAML string as parameters.'
)
@click.option(
'--inject-input-path',
is_flag=True,
default=False,
help="Insert the path of the input notebook as PAPERMILL_INPUT_PATH as a notebook parameter.",
)
@click.option(
'--inject-output-path',
is_flag=True,
default=False,
help="Insert the path of the output notebook as PAPERMILL_OUTPUT_PATH as a notebook parameter.",
)
@click.option(
'--inject-paths',
is_flag=True,
default=False,
help=(
"Insert the paths of input/output notebooks as PAPERMILL_INPUT_PATH/PAPERMILL_OUTPUT_PATH"
" as notebook parameters."
),
)
@click.option('--engine', help='The execution engine name to use in evaluating the notebook.')
@click.option(
'--request-save-on-cell-execute/--no-request-save-on-cell-execute',
default=True,
help='Request save notebook after each cell execution',
)
@click.option(
'--autosave-cell-every',
default=30,
type=int,
help='How often in seconds to autosave the notebook during long cell executions (0 to disable)',
)
@click.option(
'--prepare-only/--prepare-execute',
default=False,
help="Flag for outputting the notebook without execution, but with parameters applied.",
)
@click.option(
'--kernel',
'-k',
help='Name of kernel to run. Ignores kernel name in the notebook document metadata.'
)
@click.option(
'--language',
'-l',
help='Language for notebook execution. Ignores language in the notebook document metadata.'
)
@click.option('--cwd', default=None, help='Working directory to run notebook in.')
@click.option(
'--progress-bar/--no-progress-bar', default=None, help="Flag for turning on the progress bar."
)
@click.option(
'--log-output/--no-log-output',
default=False,
help="Flag for writing notebook output to the configured logger.",
)
@click.option(
'--stdout-file',
type=click.File(mode='w', encoding='utf-8'),
help="File to write notebook stdout output to.",
)
@click.option(
'--stderr-file',
type=click.File(mode='w', encoding='utf-8'),
help="File to write notebook stderr output to.",
)
@click.option(
'--log-level',
type=click.Choice(['NOTSET', 'DEBUG', 'INFO', 'WARNING', 'ERROR', 'CRITICAL']),
default='INFO',
help='Set log level',
)
@click.option(
'--start-timeout',
'--start_timeout', # Backwards compatible naming
type=int,
default=60,
help="Time in seconds to wait for kernel to start.",
)
@click.option(
'--execution-timeout',
type=int,
help="Time in seconds to wait for each cell before failing execution (default: forever)",
)
@click.option('--report-mode/--no-report-mode', default=False, help="Flag for hiding input.")
@click.option(
'--version',
is_flag=True,
callback=print_papermill_version,
expose_value=False,
is_eager=True,
help='Flag for displaying the version.',
)
def papermill(
click_ctx,
notebook_path,
output_path,
help_notebook,
parameters,
parameters_raw,
parameters_file,
parameters_yaml,
parameters_base64,
inject_input_path,
inject_output_path,
inject_paths,
engine,
request_save_on_cell_execute,
autosave_cell_every,
prepare_only,
kernel,
language,
cwd,
progress_bar,
log_output,
log_level,
start_timeout,
execution_timeout,
report_mode,
stdout_file,
stderr_file,
):
"""This utility executes a single notebook in a subprocess.
Papermill takes a source notebook, applies parameters to the source
notebook, executes the notebook with the specified kernel, and saves the
output in the destination notebook.
The NOTEBOOK_PATH and OUTPUT_PATH can now be replaced by `-` representing
stdout and stderr, or by the presence of pipe inputs / outputs.
Meaning that
`<generate input>... | papermill | ...<process output>`
with `papermill - -` being implied by the pipes will read a notebook
from stdin and write it out to stdout.
"""
if not help_notebook:
required_output_path = not (INPUT_PIPED or OUTPUT_PIPED)
if required_output_path and not output_path:
raise click.UsageError("Missing argument 'OUTPUT_PATH'")
if INPUT_PIPED and notebook_path and not output_path:
input_path = '-'
output_path = notebook_path
else:
input_path = notebook_path or '-'
output_path = output_path or '-'
if output_path == '-':
# Save notebook to stdout just once
request_save_on_cell_execute = False
# Reduce default log level if we pipe to stdout
if log_level == 'INFO':
log_level = 'ERROR'
elif progress_bar is None:
progress_bar = not log_output
logging.basicConfig(level=log_level, format="%(message)s")
# Read in Parameters
parameters_final = {}
if inject_input_path or inject_paths:
parameters_final['PAPERMILL_INPUT_PATH'] = input_path
if inject_output_path or inject_paths:
parameters_final['PAPERMILL_OUTPUT_PATH'] = output_path
for params in parameters_base64 or []:
parameters_final.update(yaml.load(base64.b64decode(params), Loader=NoDatesSafeLoader) or {})
for files in parameters_file or []:
parameters_final.update(read_yaml_file(files) or {})
for params in parameters_yaml or []:
parameters_final.update(yaml.load(params, Loader=NoDatesSafeLoader) or {})
for name, value in parameters or []:
parameters_final[name] = _resolve_type(value)
for name, value in parameters_raw or []:
parameters_final[name] = value
if help_notebook:
sys.exit(display_notebook_help(click_ctx, notebook_path, parameters_final))
try:
execute_notebook(
input_path=input_path,
output_path=output_path,
parameters=parameters_final,
engine_name=engine,
request_save_on_cell_execute=request_save_on_cell_execute,
autosave_cell_every=autosave_cell_every,
prepare_only=prepare_only,
kernel_name=kernel,
language=language,
progress_bar=progress_bar,
log_output=log_output,
stdout_file=stdout_file,
stderr_file=stderr_file,
start_timeout=start_timeout,
report_mode=report_mode,
cwd=cwd,
execution_timeout=execution_timeout,
)
except nbclient.exceptions.DeadKernelError:
# Exiting with a special exit code for dead kernels
traceback.print_exc()
sys.exit(138)
def _resolve_type(value):
if value == "True":
return True
elif value == "False":
return False
elif value == "None":
return None
elif _is_int(value):
return int(value)
elif _is_float(value):
return float(value)
else:
return value
def _is_int(value):
"""Use casting to check if value can convert to an `int`."""
try:
int(value)
except ValueError:
return False
else:
return True
def _is_float(value):
"""Use casting to check if value can convert to a `float`."""
try:
float(value)
except ValueError:
return False
else:
return True
|
jamespcole/home-assistant | refs/heads/master | homeassistant/components/aquostv/media_player.py | 4 | """
Support for interface with an Aquos TV.
For more details about this platform, please refer to the documentation at
https://home-assistant.io/components/media_player.aquostv/
"""
import logging
import voluptuous as vol
from homeassistant.components.media_player import (
MediaPlayerDevice, PLATFORM_SCHEMA)
from homeassistant.components.media_player.const import (
SUPPORT_NEXT_TRACK, SUPPORT_PAUSE, SUPPORT_PLAY,
SUPPORT_PREVIOUS_TRACK, SUPPORT_SELECT_SOURCE, SUPPORT_TURN_OFF,
SUPPORT_TURN_ON, SUPPORT_VOLUME_MUTE, SUPPORT_VOLUME_SET,
SUPPORT_VOLUME_STEP)
from homeassistant.const import (
CONF_HOST, CONF_NAME, CONF_PASSWORD, CONF_PORT, CONF_TIMEOUT,
CONF_USERNAME, STATE_OFF, STATE_ON)
import homeassistant.helpers.config_validation as cv
REQUIREMENTS = ['sharp_aquos_rc==0.3.2']
_LOGGER = logging.getLogger(__name__)
DEFAULT_NAME = 'Sharp Aquos TV'
DEFAULT_PORT = 10002
DEFAULT_USERNAME = 'admin'
DEFAULT_PASSWORD = 'password'
DEFAULT_TIMEOUT = 0.5
DEFAULT_RETRIES = 2
SUPPORT_SHARPTV = SUPPORT_TURN_OFF | \
SUPPORT_NEXT_TRACK | SUPPORT_PAUSE | SUPPORT_PREVIOUS_TRACK | \
SUPPORT_SELECT_SOURCE | SUPPORT_VOLUME_MUTE | SUPPORT_VOLUME_STEP | \
SUPPORT_VOLUME_SET | SUPPORT_PLAY
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
vol.Required(CONF_HOST): cv.string,
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(CONF_PORT, default=DEFAULT_PORT): cv.port,
vol.Optional(CONF_USERNAME, default=DEFAULT_USERNAME): cv.string,
vol.Optional(CONF_PASSWORD, default=DEFAULT_PASSWORD): cv.string,
vol.Optional(CONF_TIMEOUT, default=DEFAULT_TIMEOUT): cv.string,
vol.Optional('retries', default=DEFAULT_RETRIES): cv.string,
vol.Optional('power_on_enabled', default=False): cv.boolean,
})
SOURCES = {0: 'TV / Antenna',
1: 'HDMI_IN_1',
2: 'HDMI_IN_2',
3: 'HDMI_IN_3',
4: 'HDMI_IN_4',
5: 'COMPONENT IN',
6: 'VIDEO_IN_1',
7: 'VIDEO_IN_2',
8: 'PC_IN'}
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the Sharp Aquos TV platform."""
import sharp_aquos_rc
name = config.get(CONF_NAME)
port = config.get(CONF_PORT)
username = config.get(CONF_USERNAME)
password = config.get(CONF_PASSWORD)
power_on_enabled = config.get('power_on_enabled')
if discovery_info:
_LOGGER.debug('%s', discovery_info)
vals = discovery_info.split(':')
if len(vals) > 1:
port = vals[1]
host = vals[0]
remote = sharp_aquos_rc.TV(host, port, username, password, timeout=20)
add_entities([SharpAquosTVDevice(name, remote, power_on_enabled)])
return True
host = config.get(CONF_HOST)
remote = sharp_aquos_rc.TV(host, port, username, password, 15, 1)
add_entities([SharpAquosTVDevice(name, remote, power_on_enabled)])
return True
def _retry(func):
"""Handle query retries."""
def wrapper(obj, *args, **kwargs):
"""Wrap all query functions."""
update_retries = 5
while update_retries > 0:
try:
func(obj, *args, **kwargs)
break
except (OSError, TypeError, ValueError):
update_retries -= 1
if update_retries == 0:
obj.set_state(STATE_OFF)
return wrapper
class SharpAquosTVDevice(MediaPlayerDevice):
"""Representation of a Aquos TV."""
def __init__(self, name, remote, power_on_enabled=False):
"""Initialize the aquos device."""
global SUPPORT_SHARPTV
self._power_on_enabled = power_on_enabled
if self._power_on_enabled:
SUPPORT_SHARPTV = SUPPORT_SHARPTV | SUPPORT_TURN_ON
# Save a reference to the imported class
self._name = name
# Assume that the TV is not muted
self._muted = False
self._state = None
self._remote = remote
self._volume = 0
self._source = None
self._source_list = list(SOURCES.values())
def set_state(self, state):
"""Set TV state."""
self._state = state
@_retry
def update(self):
"""Retrieve the latest data."""
if self._remote.power() == 1:
self._state = STATE_ON
else:
self._state = STATE_OFF
# Set TV to be able to remotely power on
if self._power_on_enabled:
self._remote.power_on_command_settings(2)
else:
self._remote.power_on_command_settings(0)
# Get mute state
if self._remote.mute() == 2:
self._muted = False
else:
self._muted = True
# Get source
self._source = SOURCES.get(self._remote.input())
# Get volume
self._volume = self._remote.volume() / 60
@property
def name(self):
"""Return the name of the device."""
return self._name
@property
def state(self):
"""Return the state of the device."""
return self._state
@property
def source(self):
"""Return the current source."""
return self._source
@property
def source_list(self):
"""Return the source list."""
return self._source_list
@property
def volume_level(self):
"""Volume level of the media player (0..1)."""
return self._volume
@property
def is_volume_muted(self):
"""Boolean if volume is currently muted."""
return self._muted
@property
def supported_features(self):
"""Flag media player features that are supported."""
return SUPPORT_SHARPTV
@_retry
def turn_off(self):
"""Turn off tvplayer."""
self._remote.power(0)
@_retry
def volume_up(self):
"""Volume up the media player."""
self._remote.volume(int(self._volume * 60) + 2)
@_retry
def volume_down(self):
"""Volume down media player."""
self._remote.volume(int(self._volume * 60) - 2)
@_retry
def set_volume_level(self, volume):
"""Set Volume media player."""
self._remote.volume(int(volume * 60))
@_retry
def mute_volume(self, mute):
"""Send mute command."""
self._remote.mute(0)
@_retry
def turn_on(self):
"""Turn the media player on."""
self._remote.power(1)
@_retry
def media_play_pause(self):
"""Simulate play pause media player."""
self._remote.remote_button(40)
@_retry
def media_play(self):
"""Send play command."""
self._remote.remote_button(16)
@_retry
def media_pause(self):
"""Send pause command."""
self._remote.remote_button(16)
@_retry
def media_next_track(self):
"""Send next track command."""
self._remote.remote_button(21)
@_retry
def media_previous_track(self):
"""Send the previous track command."""
self._remote.remote_button(19)
def select_source(self, source):
"""Set the input source."""
for key, value in SOURCES.items():
if source == value:
self._remote.input(key)
|
seankelly/buildbot | refs/heads/master | master/buildbot/util/maildir.py | 1 | # This file is part of Buildbot. Buildbot is free software: you can
# redistribute it and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation, version 2.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Copyright Buildbot Team Members
"""
This is a class which watches a maildir for new messages. It uses the
linux dirwatcher API (if available) to look for new files. The
.messageReceived method is invoked with the filename of the new message,
relative to the top of the maildir (so it will look like "new/blahblah").
"""
from __future__ import absolute_import
from __future__ import print_function
import os
from twisted.application import internet
from twisted.internet import defer
from twisted.internet import reactor
# We have to put it here, since we use it to provide feedback
from twisted.python import log
from twisted.python import runtime
from buildbot.util import service
dnotify = None
try:
import dnotify
except ImportError:
log.msg("unable to import dnotify, so Maildir will use polling instead")
class NoSuchMaildir(Exception):
pass
class MaildirService(service.BuildbotService):
pollinterval = 10 # only used if we don't have DNotify
def __init__(self, basedir=None):
service.AsyncMultiService.__init__(self)
if basedir:
self.setBasedir(basedir)
self.files = []
self.dnotify = None
self.timerService = None
def setBasedir(self, basedir):
# some users of MaildirService (scheduler.Try_Jobdir, in particular)
# don't know their basedir until setServiceParent, since it is
# relative to the buildmaster's basedir. So let them set it late. We
# don't actually need it until our own startService.
self.basedir = basedir
self.newdir = os.path.join(self.basedir, "new")
self.curdir = os.path.join(self.basedir, "cur")
def startService(self):
if not os.path.isdir(self.newdir) or not os.path.isdir(self.curdir):
raise NoSuchMaildir("invalid maildir '%s'" % self.basedir)
try:
if dnotify:
# we must hold an fd open on the directory, so we can get
# notified when it changes.
self.dnotify = dnotify.DNotify(self.newdir,
self.dnotify_callback,
[dnotify.DNotify.DN_CREATE])
except (IOError, OverflowError):
# IOError is probably linux<2.4.19, which doesn't support
# dnotify. OverflowError will occur on some 64-bit machines
# because of a python bug
log.msg("DNotify failed, falling back to polling")
if not self.dnotify:
self.timerService = internet.TimerService(
self.pollinterval, self.poll)
self.timerService.setServiceParent(self)
self.poll()
return service.AsyncMultiService.startService(self)
def dnotify_callback(self):
log.msg("dnotify noticed something, now polling")
# give it a moment. I found that qmail had problems when the message
# was removed from the maildir instantly. It shouldn't, that's what
# maildirs are made for. I wasn't able to eyeball any reason for the
# problem, and safecat didn't behave the same way, but qmail reports
# "Temporary_error_on_maildir_delivery" (qmail-local.c:165,
# maildir_child() process exited with rc not in 0,2,3,4). Not sure
# why, and I'd have to hack qmail to investigate further, so it's
# easier to just wait a second before yanking the message out of new/
reactor.callLater(0.1, self.poll)
def stopService(self):
if self.dnotify:
self.dnotify.remove()
self.dnotify = None
if self.timerService is not None:
self.timerService.disownServiceParent()
self.timerService = None
return service.AsyncMultiService.stopService(self)
@defer.inlineCallbacks
def poll(self):
try:
assert self.basedir
# see what's new
for f in self.files:
if not os.path.isfile(os.path.join(self.newdir, f)):
self.files.remove(f)
newfiles = []
for f in os.listdir(self.newdir):
if f not in self.files:
newfiles.append(f)
self.files.extend(newfiles)
for n in newfiles:
try:
yield self.messageReceived(n)
except Exception:
log.err(
None, "while reading '%s' from maildir '%s':" % (n, self.basedir))
except Exception:
log.err(None, "while polling maildir '%s':" % (self.basedir,))
def moveToCurDir(self, filename):
if runtime.platformType == "posix":
# open the file before moving it, because I'm afraid that once
# it's in cur/, someone might delete it at any moment
path = os.path.join(self.newdir, filename)
f = open(path, "r")
os.rename(os.path.join(self.newdir, filename),
os.path.join(self.curdir, filename))
elif runtime.platformType == "win32":
# do this backwards under windows, because you can't move a file
# that somebody is holding open. This was causing a Permission
# Denied error on bear's win32-twisted1.3 worker.
os.rename(os.path.join(self.newdir, filename),
os.path.join(self.curdir, filename))
path = os.path.join(self.curdir, filename)
f = open(path, "r")
return f
def messageReceived(self, filename):
raise NotImplementedError
|
abhattad4/Digi-Menu | refs/heads/master | digimenu2/django/db/backends/utils.py | 430 | from __future__ import unicode_literals
import datetime
import decimal
import hashlib
import logging
from time import time
from django.conf import settings
from django.utils.encoding import force_bytes
from django.utils.timezone import utc
logger = logging.getLogger('django.db.backends')
class CursorWrapper(object):
def __init__(self, cursor, db):
self.cursor = cursor
self.db = db
WRAP_ERROR_ATTRS = frozenset(['fetchone', 'fetchmany', 'fetchall', 'nextset'])
def __getattr__(self, attr):
cursor_attr = getattr(self.cursor, attr)
if attr in CursorWrapper.WRAP_ERROR_ATTRS:
return self.db.wrap_database_errors(cursor_attr)
else:
return cursor_attr
def __iter__(self):
with self.db.wrap_database_errors:
for item in self.cursor:
yield item
def __enter__(self):
return self
def __exit__(self, type, value, traceback):
# Ticket #17671 - Close instead of passing thru to avoid backend
# specific behavior. Catch errors liberally because errors in cleanup
# code aren't useful.
try:
self.close()
except self.db.Database.Error:
pass
# The following methods cannot be implemented in __getattr__, because the
# code must run when the method is invoked, not just when it is accessed.
def callproc(self, procname, params=None):
self.db.validate_no_broken_transaction()
with self.db.wrap_database_errors:
if params is None:
return self.cursor.callproc(procname)
else:
return self.cursor.callproc(procname, params)
def execute(self, sql, params=None):
self.db.validate_no_broken_transaction()
with self.db.wrap_database_errors:
if params is None:
return self.cursor.execute(sql)
else:
return self.cursor.execute(sql, params)
def executemany(self, sql, param_list):
self.db.validate_no_broken_transaction()
with self.db.wrap_database_errors:
return self.cursor.executemany(sql, param_list)
class CursorDebugWrapper(CursorWrapper):
# XXX callproc isn't instrumented at this time.
def execute(self, sql, params=None):
start = time()
try:
return super(CursorDebugWrapper, self).execute(sql, params)
finally:
stop = time()
duration = stop - start
sql = self.db.ops.last_executed_query(self.cursor, sql, params)
self.db.queries_log.append({
'sql': sql,
'time': "%.3f" % duration,
})
logger.debug('(%.3f) %s; args=%s' % (duration, sql, params),
extra={'duration': duration, 'sql': sql, 'params': params}
)
def executemany(self, sql, param_list):
start = time()
try:
return super(CursorDebugWrapper, self).executemany(sql, param_list)
finally:
stop = time()
duration = stop - start
try:
times = len(param_list)
except TypeError: # param_list could be an iterator
times = '?'
self.db.queries_log.append({
'sql': '%s times: %s' % (times, sql),
'time': "%.3f" % duration,
})
logger.debug('(%.3f) %s; args=%s' % (duration, sql, param_list),
extra={'duration': duration, 'sql': sql, 'params': param_list}
)
###############################################
# Converters from database (string) to Python #
###############################################
def typecast_date(s):
return datetime.date(*map(int, s.split('-'))) if s else None # returns None if s is null
def typecast_time(s): # does NOT store time zone information
if not s:
return None
hour, minutes, seconds = s.split(':')
if '.' in seconds: # check whether seconds have a fractional part
seconds, microseconds = seconds.split('.')
else:
microseconds = '0'
return datetime.time(int(hour), int(minutes), int(seconds), int(float('.' + microseconds) * 1000000))
def typecast_timestamp(s): # does NOT store time zone information
# "2005-07-29 15:48:00.590358-05"
# "2005-07-29 09:56:00-05"
if not s:
return None
if ' ' not in s:
return typecast_date(s)
d, t = s.split()
# Extract timezone information, if it exists. Currently we just throw
# it away, but in the future we may make use of it.
if '-' in t:
t, tz = t.split('-', 1)
tz = '-' + tz
elif '+' in t:
t, tz = t.split('+', 1)
tz = '+' + tz
else:
tz = ''
dates = d.split('-')
times = t.split(':')
seconds = times[2]
if '.' in seconds: # check whether seconds have a fractional part
seconds, microseconds = seconds.split('.')
else:
microseconds = '0'
tzinfo = utc if settings.USE_TZ else None
return datetime.datetime(int(dates[0]), int(dates[1]), int(dates[2]),
int(times[0]), int(times[1]), int(seconds),
int((microseconds + '000000')[:6]), tzinfo)
def typecast_decimal(s):
if s is None or s == '':
return None
return decimal.Decimal(s)
###############################################
# Converters from Python to database (string) #
###############################################
def rev_typecast_decimal(d):
if d is None:
return None
return str(d)
def truncate_name(name, length=None, hash_len=4):
"""Shortens a string to a repeatable mangled version with the given length.
"""
if length is None or len(name) <= length:
return name
hsh = hashlib.md5(force_bytes(name)).hexdigest()[:hash_len]
return '%s%s' % (name[:length - hash_len], hsh)
def format_number(value, max_digits, decimal_places):
"""
Formats a number into a string with the requisite number of digits and
decimal places.
"""
if value is None:
return None
if isinstance(value, decimal.Decimal):
context = decimal.getcontext().copy()
if max_digits is not None:
context.prec = max_digits
if decimal_places is not None:
value = value.quantize(decimal.Decimal(".1") ** decimal_places, context=context)
else:
context.traps[decimal.Rounded] = 1
value = context.create_decimal(value)
return "{:f}".format(value)
if decimal_places is not None:
return "%.*f" % (decimal_places, value)
return "{:f}".format(value)
|
scottpurdy/nupic | refs/heads/master | tests/unit/nupic/regions/anomaly_region_test.py | 10 | # ----------------------------------------------------------------------
# Numenta Platform for Intelligent Computing (NuPIC)
# Copyright (C) 2015, Numenta, Inc. Unless you have purchased from
# Numenta, Inc. a separate commercial license for this software code, the
# following terms and conditions apply:
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU Affero Public License for more details.
#
# You should have received a copy of the GNU Affero Public License
# along with this program. If not, see http://www.gnu.org/licenses.
#
# http://numenta.org/licenses/
# ----------------------------------------------------------------------
import tempfile
import unittest
import numpy
from nupic.regions.anomaly_region import AnomalyRegion
try:
import capnp
except ImportError:
capnp = None
if capnp:
from nupic.regions.AnomalyRegion_capnp import AnomalyRegionProto
class AnomalyRegionTest(unittest.TestCase):
"""Tests for anomaly region"""
@unittest.skipUnless(
capnp, "pycapnp is not installed, skipping serialization test.")
def testWriteRead(self):
predictedColumns = [[0, 1 ,1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0],
[0, 1 ,1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0],
[0, 1 ,1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0],
[0, 1 ,1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0],
[0, 1 ,1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0],
[0, 1 ,1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0],
[0, 1 ,1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0],
[0, 1 ,1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0],
[0, 1 ,1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0],
[0, 1 ,1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0]]
activeColumns = [[0, 1 ,1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0],
[0, 1 ,1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 1],
[0, 1 ,0, 0, 1, 0, 1, 0, 0, 0, 0, 0, 0],
[0, 0 ,0, 0, 0, 0, 1, 0, 0, 0, 1, 1, 0],
[1, 0 ,0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1],
[0, 0 ,0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1],
[0, 0 ,0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1],
[0, 1 ,1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0],
[0, 1 ,1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 1],
[0, 1 ,1, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0]]
anomalyExpected = (1.0, 0.25, 1.0/3.0, 2.0/3.0, 1.0, 2.0/3.0, 1.0,
0.0, 0.25, 0.25)
anomalyRegion1 = AnomalyRegion()
inputs = AnomalyRegion.getSpec()['inputs']
outputs = AnomalyRegion.getSpec()['outputs']
for i in xrange(0, 6):
inputs['predictedColumns'] = numpy.array(predictedColumns[i])
inputs['activeColumns'] = numpy.array(activeColumns[i])
anomalyRegion1.compute(inputs, outputs)
proto1 = AnomalyRegionProto.new_message()
anomalyRegion1.write(proto1)
# Write the proto to a temp file and read it back into a new proto
with tempfile.TemporaryFile() as f:
proto1.write(f)
f.seek(0)
proto2 = AnomalyRegionProto.read(f)
# Load the deserialized proto
anomalyRegion2 = AnomalyRegion.read(proto2)
self.assertEqual(anomalyRegion1, anomalyRegion2)
for i in xrange(6, 10):
inputs['predictedColumns'] = numpy.array(predictedColumns[i])
inputs['activeColumns'] = numpy.array(activeColumns[i])
anomalyRegion1.compute(inputs, outputs)
score1 = outputs['rawAnomalyScore'][0]
anomalyRegion2.compute(inputs, outputs)
score2 = outputs['rawAnomalyScore'][0]
self.assertAlmostEqual(
score1, anomalyExpected[i], places=5,
msg="Anomaly score of %f doesn't match expected of %f" % (
score1, anomalyExpected[i]))
self.assertAlmostEqual(
score2, anomalyExpected[i], places=5,
msg="Anomaly score of %f doesn't match expected of %f" % (
score2, anomalyExpected[i]))
if __name__ == "__main__":
unittest.main()
|
bayusantoso/final-assignment-web-ontology | refs/heads/master | IMPLEMENTATION/Application/SourceCode/GOApps/flask/Lib/site-packages/pip/operations/__init__.py | 12133432 | |
thoas/i386 | refs/heads/master | src/milkshape/application/internals/profiles/management/__init__.py | 12133432 | |
sudheerchintala/LearnEraPlatForm | refs/heads/master | lms/djangoapps/open_ended_grading/__init__.py | 12133432 | |
Jay-Oh-eN/topmodel | refs/heads/master | web/views/__init__.py | 12133432 | |
arun6582/django | refs/heads/master | tests/migrate_signals/custom_migrations/__init__.py | 12133432 | |
loco-odoo/localizacion_co | refs/heads/master | openerp/addons-extra/odoo-pruebas/odoo-server/addons/sale_order_dates/sale_order_dates.py | 44 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from datetime import datetime, timedelta
from openerp.osv import fields, osv
from openerp.tools.translate import _
from openerp.tools import DEFAULT_SERVER_DATETIME_FORMAT
class sale_order_dates(osv.osv):
"""Add several date fields to Sale Orders, computed or user-entered"""
_inherit = 'sale.order'
def _get_date_planned(self, cr, uid, order, line, start_date, context=None):
"""Compute the expected date from the requested date, not the order date"""
if order and order.requested_date:
date_planned = datetime.strptime(order.requested_date, DEFAULT_SERVER_DATETIME_FORMAT)
date_planned -= timedelta(days=order.company_id.security_lead)
return date_planned.strftime(DEFAULT_SERVER_DATETIME_FORMAT)
return super(sale_order_dates, self)._get_date_planned(
cr, uid, order, line, start_date, context=context)
def _get_effective_date(self, cr, uid, ids, name, arg, context=None):
"""Read the shipping date from the related packings"""
# TODO: would be better if it returned the date the picking was processed?
res = {}
dates_list = []
for order in self.browse(cr, uid, ids, context=context):
dates_list = []
for pick in order.picking_ids:
dates_list.append(pick.date)
if dates_list:
res[order.id] = min(dates_list)
else:
res[order.id] = False
return res
def _get_commitment_date(self, cr, uid, ids, name, arg, context=None):
"""Compute the commitment date"""
res = {}
dates_list = []
for order in self.browse(cr, uid, ids, context=context):
dates_list = []
order_datetime = datetime.strptime(order.date_order, DEFAULT_SERVER_DATETIME_FORMAT)
for line in order.order_line:
dt = order_datetime + timedelta(days=line.delay or 0.0)
dt_s = dt.strftime(DEFAULT_SERVER_DATETIME_FORMAT)
dates_list.append(dt_s)
if dates_list:
res[order.id] = min(dates_list)
return res
def onchange_requested_date(self, cr, uid, ids, requested_date,
commitment_date, context=None):
"""Warn if the requested dates is sooner than the commitment date"""
if (requested_date and commitment_date and requested_date < commitment_date):
return {'warning': {
'title': _('Requested date is too soon!'),
'message': _("The date requested by the customer is "
"sooner than the commitment date. You may be "
"unable to honor the customer's request.")
}
}
return {}
_columns = {
'commitment_date': fields.function(_get_commitment_date, store=True,
type='datetime', string='Commitment Date',
help="Date by which the products are sure to be delivered. This is "
"a date that you can promise to the customer, based on the "
"Product Lead Times."),
'requested_date': fields.datetime('Requested Date',
readonly=True, states={'draft': [('readonly', False)]}, copy=False,
help="Date by which the customer has requested the items to be "
"delivered.\n"
"When this Order gets confirmed, the Delivery Order's "
"expected date will be computed based on this date and the "
"Company's Security Delay.\n"
"Leave this field empty if you want the Delivery Order to be "
"processed as soon as possible. In that case the expected "
"date will be computed using the default method: based on "
"the Product Lead Times and the Company's Security Delay."),
'effective_date': fields.function(_get_effective_date, type='date',
store=True, string='Effective Date',
help="Date on which the first Delivery Order was created."),
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
poljeff/odoo | refs/heads/8.0 | addons/website_event_track/__openerp__.py | 323 | # -*- coding: utf-8 -*-
{
'name': 'Advanced Events',
'category': 'Website',
'summary': 'Sponsors, Tracks, Agenda, Event News',
'website': 'https://www.odoo.com/page/events',
'version': '1.0',
'description': """
Online Advanced Events
======================
Adds support for:
- sponsors
- dedicated menu per event
- news per event
- tracks
- agenda
- call for proposals
""",
'author': 'OpenERP SA',
'depends': ['website_event', 'website_blog'],
'data': [
'data/event_data.xml',
'views/website_event.xml',
'views/event_backend.xml',
'security/ir.model.access.csv',
'security/event.xml',
],
'qweb': ['static/src/xml/*.xml'],
'demo': [
'data/event_demo.xml',
'data/website_event_track_demo.xml'
],
'installable': True,
}
|
dmwyatt/django-rest-framework | refs/heads/master | tests/test_htmlrenderer.py | 5 | from __future__ import unicode_literals
import django.template.loader
import pytest
from django.conf.urls import url
from django.core.exceptions import ImproperlyConfigured, PermissionDenied
from django.http import Http404
from django.template import TemplateDoesNotExist, engines
from django.test import TestCase, override_settings
from django.utils import six
from rest_framework import status
from rest_framework.decorators import api_view, renderer_classes
from rest_framework.renderers import TemplateHTMLRenderer
from rest_framework.response import Response
@api_view(('GET',))
@renderer_classes((TemplateHTMLRenderer,))
def example(request):
"""
A view that can returns an HTML representation.
"""
data = {'object': 'foobar'}
return Response(data, template_name='example.html')
@api_view(('GET',))
@renderer_classes((TemplateHTMLRenderer,))
def permission_denied(request):
raise PermissionDenied()
@api_view(('GET',))
@renderer_classes((TemplateHTMLRenderer,))
def not_found(request):
raise Http404()
urlpatterns = [
url(r'^$', example),
url(r'^permission_denied$', permission_denied),
url(r'^not_found$', not_found),
]
@override_settings(ROOT_URLCONF='tests.test_htmlrenderer')
class TemplateHTMLRendererTests(TestCase):
def setUp(self):
class MockResponse(object):
template_name = None
self.mock_response = MockResponse()
self._monkey_patch_get_template()
def _monkey_patch_get_template(self):
"""
Monkeypatch get_template
"""
self.get_template = django.template.loader.get_template
def get_template(template_name, dirs=None):
if template_name == 'example.html':
return engines['django'].from_string("example: {{ object }}")
raise TemplateDoesNotExist(template_name)
def select_template(template_name_list, dirs=None, using=None):
if template_name_list == ['example.html']:
return engines['django'].from_string("example: {{ object }}")
raise TemplateDoesNotExist(template_name_list[0])
django.template.loader.get_template = get_template
django.template.loader.select_template = select_template
def tearDown(self):
"""
Revert monkeypatching
"""
django.template.loader.get_template = self.get_template
def test_simple_html_view(self):
response = self.client.get('/')
self.assertContains(response, "example: foobar")
self.assertEqual(response['Content-Type'], 'text/html; charset=utf-8')
def test_not_found_html_view(self):
response = self.client.get('/not_found')
self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
self.assertEqual(response.content, six.b("404 Not Found"))
self.assertEqual(response['Content-Type'], 'text/html; charset=utf-8')
def test_permission_denied_html_view(self):
response = self.client.get('/permission_denied')
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
self.assertEqual(response.content, six.b("403 Forbidden"))
self.assertEqual(response['Content-Type'], 'text/html; charset=utf-8')
# 2 tests below are based on order of if statements in corresponding method
# of TemplateHTMLRenderer
def test_get_template_names_returns_own_template_name(self):
renderer = TemplateHTMLRenderer()
renderer.template_name = 'test_template'
template_name = renderer.get_template_names(self.mock_response, view={})
assert template_name == ['test_template']
def test_get_template_names_returns_view_template_name(self):
renderer = TemplateHTMLRenderer()
class MockResponse(object):
template_name = None
class MockView(object):
def get_template_names(self):
return ['template from get_template_names method']
class MockView2(object):
template_name = 'template from template_name attribute'
template_name = renderer.get_template_names(self.mock_response,
MockView())
assert template_name == ['template from get_template_names method']
template_name = renderer.get_template_names(self.mock_response,
MockView2())
assert template_name == ['template from template_name attribute']
def test_get_template_names_raises_error_if_no_template_found(self):
renderer = TemplateHTMLRenderer()
with pytest.raises(ImproperlyConfigured):
renderer.get_template_names(self.mock_response, view=object())
@override_settings(ROOT_URLCONF='tests.test_htmlrenderer')
class TemplateHTMLRendererExceptionTests(TestCase):
def setUp(self):
"""
Monkeypatch get_template
"""
self.get_template = django.template.loader.get_template
def get_template(template_name):
if template_name == '404.html':
return engines['django'].from_string("404: {{ detail }}")
if template_name == '403.html':
return engines['django'].from_string("403: {{ detail }}")
raise TemplateDoesNotExist(template_name)
django.template.loader.get_template = get_template
def tearDown(self):
"""
Revert monkeypatching
"""
django.template.loader.get_template = self.get_template
def test_not_found_html_view_with_template(self):
response = self.client.get('/not_found')
self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
self.assertTrue(response.content in (
six.b("404: Not found"), six.b("404 Not Found")))
self.assertEqual(response['Content-Type'], 'text/html; charset=utf-8')
def test_permission_denied_html_view_with_template(self):
response = self.client.get('/permission_denied')
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
self.assertTrue(response.content in (
six.b("403: Permission denied"), six.b("403 Forbidden")))
self.assertEqual(response['Content-Type'], 'text/html; charset=utf-8')
|
ldotlopez/appkit | refs/heads/master | appkit/keyvaluestore.py | 1 | # -*- coding: utf-8 -*-
# Copyright (C) 2015 Luis López <luis@cuarentaydos.com>
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301,
# USA.
from appkit.db import sqlalchemyutils as sautils
import json
import pickle
import sqlalchemy
from sqlalchemy.ext import declarative
from sqlalchemy.orm import exc
_UNDEF = object()
def keyvaluemodel_for_session(name, session, tablename=None):
base = declarative.declarative_base()
base.metadata.bind = session.get_bind()
return keyvaluemodel(name, base, tablename)
def keyvaluemodel(name, base, extra_dict={}):
if not (isinstance(name, str) and name != ''):
raise TypeError('name must be a non-empty str')
class_dict = {
'__tablename__': name.lower()
}
class_dict.update(extra_dict)
newcls = type(
name,
(_KeyValueItem, base),
class_dict)
return newcls
class _KeyValueItem:
id = sqlalchemy.Column(sqlalchemy.Integer, primary_key=True)
key = sqlalchemy.Column(sqlalchemy.String, name='key', nullable=False)
_value = sqlalchemy.Column(sqlalchemy.String, name='value')
_typ = sqlalchemy.Column(sqlalchemy.String, name='type', default='str',
nullable=False)
_resolved = _UNDEF
def __init__(self, key, value, typ=None):
self.key = key
self._typ, self._value = self._native_to_internal(value)
if typ:
self._typ = typ
@property
def value(self):
return self._interal_to_native(self._typ, self._value)
@value.setter
def value(self, v):
self._typ, self._value = self._native_to_internal(v)
@staticmethod
def _native_to_internal(value):
if isinstance(value, str):
typ = 'str'
elif isinstance(value, bool):
typ = 'bool'
value = '1' if value else '0'
elif isinstance(value, int):
typ = 'int'
value = str(value)
elif isinstance(value, float):
typ = 'float'
value = str(value)
else:
try:
value = json.dumps(value)
typ = 'json'
except TypeError:
value = pickle.dumps(value)
typ = 'pickle'
return (typ, value)
@staticmethod
def _interal_to_native(typ, value):
if typ == 'bool':
return (value != '0')
elif typ == 'int':
return int(value)
elif typ == 'float':
return float(value)
elif typ == 'str':
return str(value)
elif typ == 'json':
return json.loads(value)
elif typ == 'pickle':
return pickle.loads(value)
raise ValueError((typ, value))
def __repr__(self):
return "<{classname} {key}={value}>".format(
classname=self.__class__.__name__,
key=self.key,
value=self.value)
class KeyValueManager:
def __init__(self, model, session=None):
if not session:
engine = model.metadata.bind
if not engine:
msg = ("Model '{model}' is not bind to any engine an session "
"argument is None")
msg = msg.format(model=repr(model))
raise TypeError(msg)
session = sautils.create_session(engine=model.metadata.bind)
self._sess = session
self._model = model
@property
def _query(self):
return self._sess.query(self._model)
def get(self, k, default=_UNDEF):
try:
item = self._query.filter(self._model.key == k).one()
except exc.NoResultFound:
if default is _UNDEF:
raise KeyError(k)
else:
return default
return item.value
def set(self, k, v):
try:
item = self._query.filter(self._model.key == k).one()
item.value = v
except exc.NoResultFound:
item = self._model(key=k, value=v)
self._sess.add(item)
self._sess.commit()
def reset(self, k):
try:
item = self._query.filter(self._model.key == k).one()
except KeyError:
pass
self._sess.delete(item)
self._sess.commit()
def children(self, k):
return map(
lambda x: x.key,
self._query.filter(self._model.key.startswith(k+".")))
|
andybondar/CloudFerry | refs/heads/master | cloudferrylib/scheduler/__init__.py | 12133432 | |
liaods/git-repo | refs/heads/master | pager.py | 86 | #
# Copyright (C) 2008 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
import os
import select
import sys
active = False
def RunPager(globalConfig):
global active
if not os.isatty(0) or not os.isatty(1):
return
pager = _SelectPager(globalConfig)
if pager == '' or pager == 'cat':
return
# This process turns into the pager; a child it forks will
# do the real processing and output back to the pager. This
# is necessary to keep the pager in control of the tty.
#
try:
r, w = os.pipe()
pid = os.fork()
if not pid:
os.dup2(w, 1)
os.dup2(w, 2)
os.close(r)
os.close(w)
active = True
return
os.dup2(r, 0)
os.close(r)
os.close(w)
_BecomePager(pager)
except Exception:
print("fatal: cannot start pager '%s'" % pager, file=sys.stderr)
sys.exit(255)
def _SelectPager(globalConfig):
try:
return os.environ['GIT_PAGER']
except KeyError:
pass
pager = globalConfig.GetString('core.pager')
if pager:
return pager
try:
return os.environ['PAGER']
except KeyError:
pass
return 'less'
def _BecomePager(pager):
# Delaying execution of the pager until we have output
# ready works around a long-standing bug in popularly
# available versions of 'less', a better 'more'.
#
_a, _b, _c = select.select([0], [], [0])
os.environ['LESS'] = 'FRSX'
try:
os.execvp(pager, [pager])
except OSError:
os.execv('/bin/sh', ['sh', '-c', pager])
|
diegoguimaraes/django | refs/heads/master | tests/user_commands/management/commands/leave_locale_alone_true.py | 428 | from django.core.management.base import BaseCommand
from django.utils import translation
class Command(BaseCommand):
can_import_settings = True
leave_locale_alone = True
def handle(self, *args, **options):
return translation.get_language()
|
jlspyaozhongkai/Uter | refs/heads/master | third_party_backup/Python-2.7.9/Lib/lib-tk/tkColorChooser.py | 37 | # tk common color chooser dialogue
#
# this module provides an interface to the native color dialogue
# available in Tk 4.2 and newer.
#
# written by Fredrik Lundh, May 1997
#
# fixed initialcolor handling in August 1998
#
#
# options (all have default values):
#
# - initialcolor: color to mark as selected when dialog is displayed
# (given as an RGB triplet or a Tk color string)
#
# - parent: which window to place the dialog on top of
#
# - title: dialog title
#
from tkCommonDialog import Dialog
#
# color chooser class
class Chooser(Dialog):
"Ask for a color"
command = "tk_chooseColor"
def _fixoptions(self):
try:
# make sure initialcolor is a tk color string
color = self.options["initialcolor"]
if isinstance(color, tuple):
# assume an RGB triplet
self.options["initialcolor"] = "#%02x%02x%02x" % color
except KeyError:
pass
def _fixresult(self, widget, result):
# result can be somethings: an empty tuple, an empty string or
# a Tcl_Obj, so this somewhat weird check handles that
if not result or not str(result):
return None, None # canceled
# to simplify application code, the color chooser returns
# an RGB tuple together with the Tk color string
r, g, b = widget.winfo_rgb(result)
return (r/256, g/256, b/256), str(result)
#
# convenience stuff
def askcolor(color = None, **options):
"Ask for a color"
if color:
options = options.copy()
options["initialcolor"] = color
return Chooser(**options).show()
# --------------------------------------------------------------------
# test stuff
if __name__ == "__main__":
print "color", askcolor()
|
cloudbase/neutron-virtualbox | refs/heads/virtualbox_agent | neutron/tests/unit/ml2/test_type_flat.py | 2 | # Copyright (c) 2014 Thales Services SAS
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from neutron.common import exceptions as exc
import neutron.db.api as db
from neutron.plugins.common import constants as p_const
from neutron.plugins.ml2 import driver_api as api
from neutron.plugins.ml2.drivers import type_flat
from neutron.tests.unit import testlib_api
from oslo_config import cfg
FLAT_NETWORKS = ['flat_net1', 'flat_net2']
class FlatTypeTest(testlib_api.SqlTestCase):
def setUp(self):
super(FlatTypeTest, self).setUp()
cfg.CONF.set_override('flat_networks', FLAT_NETWORKS,
group='ml2_type_flat')
self.driver = type_flat.FlatTypeDriver()
self.session = db.get_session()
def _get_allocation(self, session, segment):
return session.query(type_flat.FlatAllocation).filter_by(
physical_network=segment[api.PHYSICAL_NETWORK]).first()
def test_is_partial_segment(self):
segment = {api.NETWORK_TYPE: p_const.TYPE_FLAT,
api.PHYSICAL_NETWORK: 'flat_net1'}
self.assertFalse(self.driver.is_partial_segment(segment))
def test_validate_provider_segment(self):
segment = {api.NETWORK_TYPE: p_const.TYPE_FLAT,
api.PHYSICAL_NETWORK: 'flat_net1'}
self.driver.validate_provider_segment(segment)
def test_validate_provider_phynet_name(self):
self.assertRaises(exc.InvalidInput,
self.driver._parse_networks,
entries=[''])
def test_validate_provider_phynet_name_multiple(self):
self.assertRaises(exc.InvalidInput,
self.driver._parse_networks,
entries=['flat_net1', ''])
def test_validate_provider_segment_without_physnet_restriction(self):
self.driver._parse_networks('*')
segment = {api.NETWORK_TYPE: p_const.TYPE_FLAT,
api.PHYSICAL_NETWORK: 'other_flat_net'}
self.driver.validate_provider_segment(segment)
def test_validate_provider_segment_with_missing_physical_network(self):
segment = {api.NETWORK_TYPE: p_const.TYPE_FLAT}
self.assertRaises(exc.InvalidInput,
self.driver.validate_provider_segment,
segment)
def test_validate_provider_segment_with_unsupported_physical_network(self):
segment = {api.NETWORK_TYPE: p_const.TYPE_FLAT,
api.PHYSICAL_NETWORK: 'other_flat_net'}
self.assertRaises(exc.InvalidInput,
self.driver.validate_provider_segment,
segment)
def test_validate_provider_segment_with_unallowed_segmentation_id(self):
segment = {api.NETWORK_TYPE: p_const.TYPE_FLAT,
api.PHYSICAL_NETWORK: 'flat_net1',
api.SEGMENTATION_ID: 1234}
self.assertRaises(exc.InvalidInput,
self.driver.validate_provider_segment,
segment)
def test_reserve_provider_segment(self):
segment = {api.NETWORK_TYPE: p_const.TYPE_FLAT,
api.PHYSICAL_NETWORK: 'flat_net1'}
observed = self.driver.reserve_provider_segment(self.session, segment)
alloc = self._get_allocation(self.session, observed)
self.assertEqual(segment[api.PHYSICAL_NETWORK], alloc.physical_network)
def test_release_segment(self):
segment = {api.NETWORK_TYPE: p_const.TYPE_FLAT,
api.PHYSICAL_NETWORK: 'flat_net1'}
self.driver.reserve_provider_segment(self.session, segment)
self.driver.release_segment(self.session, segment)
alloc = self._get_allocation(self.session, segment)
self.assertIsNone(alloc)
def test_reserve_provider_segment_already_reserved(self):
segment = {api.NETWORK_TYPE: p_const.TYPE_FLAT,
api.PHYSICAL_NETWORK: 'flat_net1'}
self.driver.reserve_provider_segment(self.session, segment)
self.assertRaises(exc.FlatNetworkInUse,
self.driver.reserve_provider_segment,
self.session, segment)
def test_allocate_tenant_segment(self):
observed = self.driver.allocate_tenant_segment(self.session)
self.assertIsNone(observed)
|
nelmiux/CarnotKE | refs/heads/master | jyhton/lib-python/2.7/test/test_global.py | 139 | """Verify that warnings are issued for global statements following use."""
from test.test_support import run_unittest, check_syntax_error
import unittest
import warnings
class GlobalTests(unittest.TestCase):
def test1(self):
prog_text_1 = """\
def wrong1():
a = 1
b = 2
global a
global b
"""
check_syntax_error(self, prog_text_1)
def test2(self):
prog_text_2 = """\
def wrong2():
print x
global x
"""
check_syntax_error(self, prog_text_2)
def test3(self):
prog_text_3 = """\
def wrong3():
print x
x = 2
global x
"""
check_syntax_error(self, prog_text_3)
def test4(self):
prog_text_4 = """\
global x
x = 2
"""
# this should work
compile(prog_text_4, "<test string>", "exec")
def test_main():
with warnings.catch_warnings():
warnings.filterwarnings("error", module="<test string>")
run_unittest(GlobalTests)
if __name__ == "__main__":
test_main()
|
MiltosD/CEF-ELRC | refs/heads/master | lib/python2.7/site-packages/django/conf/locale/de/formats.py | 329 | # -*- encoding: utf-8 -*-
# This file is distributed under the same license as the Django package.
#
# The *_FORMAT strings use the Django date format syntax,
# see http://docs.djangoproject.com/en/dev/ref/templates/builtins/#date
DATE_FORMAT = 'j. F Y'
TIME_FORMAT = 'H:i:s'
DATETIME_FORMAT = 'j. F Y H:i:s'
YEAR_MONTH_FORMAT = 'F Y'
MONTH_DAY_FORMAT = 'j. F'
SHORT_DATE_FORMAT = 'd.m.Y'
SHORT_DATETIME_FORMAT = 'd.m.Y H:i:s'
FIRST_DAY_OF_WEEK = 1 # Monday
# The *_INPUT_FORMATS strings use the Python strftime format syntax,
# see http://docs.python.org/library/datetime.html#strftime-strptime-behavior
DATE_INPUT_FORMATS = (
'%d.%m.%Y', '%d.%m.%y', # '25.10.2006', '25.10.06'
'%Y-%m-%d', '%y-%m-%d', # '2006-10-25', '06-10-25'
# '%d. %B %Y', '%d. %b. %Y', # '25. October 2006', '25. Oct. 2006'
)
TIME_INPUT_FORMATS = (
'%H:%M:%S', # '14:30:59'
'%H:%M', # '14:30'
)
DATETIME_INPUT_FORMATS = (
'%d.%m.%Y %H:%M:%S', # '25.10.2006 14:30:59'
'%d.%m.%Y %H:%M', # '25.10.2006 14:30'
'%d.%m.%Y', # '25.10.2006'
'%Y-%m-%d %H:%M:%S', # '2006-10-25 14:30:59'
'%Y-%m-%d %H:%M', # '2006-10-25 14:30'
'%Y-%m-%d', # '2006-10-25'
)
DECIMAL_SEPARATOR = ','
THOUSAND_SEPARATOR = '.'
NUMBER_GROUPING = 3
|
mortcanty/SARDocker | refs/heads/master | src/setup.py | 1 | from distutils.core import setup
setup(name = 'auxil',
version = '1.1',
author = 'Mort Canty',
author_email = 'mort.canty@gmail.com',
url = 'http://mcanty.homepage.t-online.de/',
description = 'Auxiliary package for M. J.Canty, Image Analysis, Classificatiion and Change Detection in Remote Sensing, 3rd Ed.',
long_description = 'Auxiliary package for M. J.Canty, Image Analysis, Classificatiion and Change Detection in Remote Sensing, 3rd Ed.',
license = 'GNU General Public License',
platforms = ['Windows','Linux'],
packages = ['auxil'])
|
shoyer/numpy | refs/heads/master | numpy/testing/setup.py | 19 | #!/usr/bin/env python
from __future__ import division, print_function
def configuration(parent_package='',top_path=None):
from numpy.distutils.misc_util import Configuration
config = Configuration('testing', parent_package, top_path)
config.add_subpackage('_private')
config.add_data_dir('tests')
return config
if __name__ == '__main__':
from numpy.distutils.core import setup
setup(maintainer="NumPy Developers",
maintainer_email="numpy-dev@numpy.org",
description="NumPy test module",
url="https://www.numpy.org",
license="NumPy License (BSD Style)",
configuration=configuration,
)
|
miniconfig/home-assistant | refs/heads/dev | homeassistant/components/light/insteon_local.py | 4 | """
Support for Insteon dimmers via local hub control.
For more details about this component, please refer to the documentation at
https://home-assistant.io/components/light.insteon_local/
"""
import json
import logging
import os
from datetime import timedelta
from homeassistant.components.light import (
ATTR_BRIGHTNESS, SUPPORT_BRIGHTNESS, Light)
from homeassistant.loader import get_component
import homeassistant.util as util
_CONFIGURING = {}
_LOGGER = logging.getLogger(__name__)
DEPENDENCIES = ['insteon_local']
DOMAIN = 'light'
INSTEON_LOCAL_LIGHTS_CONF = 'insteon_local_lights.conf'
MIN_TIME_BETWEEN_FORCED_SCANS = timedelta(milliseconds=100)
MIN_TIME_BETWEEN_SCANS = timedelta(seconds=5)
SUPPORT_INSTEON_LOCAL = SUPPORT_BRIGHTNESS
def setup_platform(hass, config, add_devices, discovery_info=None):
"""Set up the Insteon local light platform."""
insteonhub = hass.data['insteon_local']
conf_lights = config_from_file(hass.config.path(INSTEON_LOCAL_LIGHTS_CONF))
if len(conf_lights):
for device_id in conf_lights:
setup_light(device_id, conf_lights[device_id], insteonhub, hass,
add_devices)
else:
linked = insteonhub.get_linked()
for device_id in linked:
if (linked[device_id]['cat_type'] == 'dimmer' and
device_id not in conf_lights):
request_configuration(device_id,
insteonhub,
linked[device_id]['model_name'] + ' ' +
linked[device_id]['sku'],
hass, add_devices)
def request_configuration(device_id, insteonhub, model, hass,
add_devices_callback):
"""Request configuration steps from the user."""
configurator = get_component('configurator')
# We got an error if this method is called while we are configuring
if device_id in _CONFIGURING:
configurator.notify_errors(
_CONFIGURING[device_id], 'Failed to register, please try again.')
return
def insteon_light_config_callback(data):
"""The actions to do when our configuration callback is called."""
setup_light(device_id, data.get('name'), insteonhub, hass,
add_devices_callback)
_CONFIGURING[device_id] = configurator.request_config(
hass, 'Insteon ' + model + ' addr: ' + device_id,
insteon_light_config_callback,
description=('Enter a name for ' + model + ' addr: ' + device_id),
entity_picture='/static/images/config_insteon.png',
submit_caption='Confirm',
fields=[{'id': 'name', 'name': 'Name', 'type': ''}]
)
def setup_light(device_id, name, insteonhub, hass, add_devices_callback):
"""Set up the light."""
if device_id in _CONFIGURING:
request_id = _CONFIGURING.pop(device_id)
configurator = get_component('configurator')
configurator.request_done(request_id)
_LOGGER.info("Device configuration done!")
conf_lights = config_from_file(hass.config.path(INSTEON_LOCAL_LIGHTS_CONF))
if device_id not in conf_lights:
conf_lights[device_id] = name
if not config_from_file(
hass.config.path(INSTEON_LOCAL_LIGHTS_CONF),
conf_lights):
_LOGGER.error("Failed to save configuration file")
device = insteonhub.dimmer(device_id)
add_devices_callback([InsteonLocalDimmerDevice(device, name)])
def config_from_file(filename, config=None):
"""Small configuration file management function."""
if config:
# We're writing configuration
try:
with open(filename, 'w') as fdesc:
fdesc.write(json.dumps(config))
except IOError as error:
_LOGGER.error('Saving config file failed: %s', error)
return False
return True
else:
# We're reading config
if os.path.isfile(filename):
try:
with open(filename, 'r') as fdesc:
return json.loads(fdesc.read())
except IOError as error:
_LOGGER.error("Reading configuration file failed: %s", error)
# This won't work yet
return False
else:
return {}
class InsteonLocalDimmerDevice(Light):
"""An abstract Class for an Insteon node."""
def __init__(self, node, name):
"""Initialize the device."""
self.node = node
self.node.deviceName = name
self._value = 0
@property
def name(self):
"""Return the the name of the node."""
return self.node.deviceName
@property
def unique_id(self):
"""Return the ID of this Insteon node."""
return 'insteon_local_{}'.format(self.node.device_id)
@property
def brightness(self):
"""Return the brightness of this light between 0..255."""
return self._value
@util.Throttle(MIN_TIME_BETWEEN_SCANS, MIN_TIME_BETWEEN_FORCED_SCANS)
def update(self):
"""Update state of the light."""
resp = self.node.status(0)
while 'error' in resp and resp['error'] is True:
resp = self.node.status(0)
if 'cmd2' in resp:
self._value = int(resp['cmd2'], 16)
@property
def is_on(self):
"""Return the boolean response if the node is on."""
return self._value != 0
@property
def supported_features(self):
"""Flag supported features."""
return SUPPORT_INSTEON_LOCAL
def turn_on(self, **kwargs):
"""Turn device on."""
brightness = 100
if ATTR_BRIGHTNESS in kwargs:
brightness = int(kwargs[ATTR_BRIGHTNESS]) / 255 * 100
self.node.on(brightness)
def turn_off(self, **kwargs):
"""Turn device off."""
self.node.off()
|
jwren/intellij-community | refs/heads/master | python/testData/editing/closedTripleQuoteBackspace.before.py | 76 | '''''' |
jborean93/ansible | refs/heads/devel | test/units/module_utils/urls/test_urls.py | 74 | # -*- coding: utf-8 -*-
# (c) 2018 Matt Martz <matt@sivel.net>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
from ansible.module_utils import urls
from ansible.module_utils._text import to_native
import pytest
def test_build_ssl_validation_error(mocker):
mocker.patch.object(urls, 'HAS_SSLCONTEXT', new=False)
mocker.patch.object(urls, 'HAS_URLLIB3_PYOPENSSLCONTEXT', new=False)
mocker.patch.object(urls, 'HAS_URLLIB3_SSL_WRAP_SOCKET', new=False)
with pytest.raises(urls.SSLValidationError) as excinfo:
urls.build_ssl_validation_error('hostname', 'port', 'paths', exc=None)
assert 'python >= 2.7.9' in to_native(excinfo.value)
assert 'the python executable used' in to_native(excinfo.value)
assert 'urllib3' in to_native(excinfo.value)
assert 'python >= 2.6' in to_native(excinfo.value)
assert 'validate_certs=False' in to_native(excinfo.value)
mocker.patch.object(urls, 'HAS_SSLCONTEXT', new=True)
with pytest.raises(urls.SSLValidationError) as excinfo:
urls.build_ssl_validation_error('hostname', 'port', 'paths', exc=None)
assert 'validate_certs=False' in to_native(excinfo.value)
mocker.patch.object(urls, 'HAS_SSLCONTEXT', new=False)
mocker.patch.object(urls, 'HAS_URLLIB3_PYOPENSSLCONTEXT', new=True)
mocker.patch.object(urls, 'HAS_URLLIB3_SSL_WRAP_SOCKET', new=True)
mocker.patch.object(urls, 'HAS_SSLCONTEXT', new=True)
with pytest.raises(urls.SSLValidationError) as excinfo:
urls.build_ssl_validation_error('hostname', 'port', 'paths', exc=None)
assert 'urllib3' not in to_native(excinfo.value)
with pytest.raises(urls.SSLValidationError) as excinfo:
urls.build_ssl_validation_error('hostname', 'port', 'paths', exc='BOOM')
assert 'BOOM' in to_native(excinfo.value)
def test_maybe_add_ssl_handler(mocker):
mocker.patch.object(urls, 'HAS_SSL', new=False)
with pytest.raises(urls.NoSSLError):
urls.maybe_add_ssl_handler('https://ansible.com/', True)
mocker.patch.object(urls, 'HAS_SSL', new=True)
url = 'https://user:passwd@ansible.com/'
handler = urls.maybe_add_ssl_handler(url, True)
assert handler.hostname == 'ansible.com'
assert handler.port == 443
url = 'https://ansible.com:4433/'
handler = urls.maybe_add_ssl_handler(url, True)
assert handler.hostname == 'ansible.com'
assert handler.port == 4433
url = 'https://user:passwd@ansible.com:4433/'
handler = urls.maybe_add_ssl_handler(url, True)
assert handler.hostname == 'ansible.com'
assert handler.port == 4433
url = 'https://ansible.com/'
handler = urls.maybe_add_ssl_handler(url, True)
assert handler.hostname == 'ansible.com'
assert handler.port == 443
url = 'http://ansible.com/'
handler = urls.maybe_add_ssl_handler(url, True)
assert handler is None
url = 'https://[2a00:16d8:0:7::205]:4443/'
handler = urls.maybe_add_ssl_handler(url, True)
assert handler.hostname == '2a00:16d8:0:7::205'
assert handler.port == 4443
url = 'https://[2a00:16d8:0:7::205]/'
handler = urls.maybe_add_ssl_handler(url, True)
assert handler.hostname == '2a00:16d8:0:7::205'
assert handler.port == 443
def test_basic_auth_header():
header = urls.basic_auth_header('user', 'passwd')
assert header == b'Basic dXNlcjpwYXNzd2Q='
def test_ParseResultDottedDict():
url = 'https://ansible.com/blog'
parts = urls.urlparse(url)
dotted_parts = urls.ParseResultDottedDict(parts._asdict())
assert parts[0] == dotted_parts.scheme
assert dotted_parts.as_list() == list(parts)
def test_unix_socket_patch_httpconnection_connect(mocker):
unix_conn = mocker.patch.object(urls.UnixHTTPConnection, 'connect')
conn = urls.httplib.HTTPConnection('ansible.com')
with urls.unix_socket_patch_httpconnection_connect():
conn.connect()
assert unix_conn.call_count == 1
|
felipenaselva/repo.felipe | refs/heads/master | plugin.video.salts/scrapers/proxy.py | 1 | """
SALTS XBMC Addon
Copyright (C) 2014 tknorris
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
import abc
import log_utils
import scraper
class Proxy(scraper.Scraper):
__metaclass__ = abc.ABCMeta
base_url = ''
real_scraper = None
def __init__(self, timeout=scraper.DEFAULT_TIMEOUT):
self.timeout = timeout
self.__scraper = None
try:
self.__scraper = self.real_scraper(timeout)
except Exception as e:
log_utils.log('Failure during %s scraper creation: %s' % (self.get_name(), e), log_utils.LOGDEBUG)
@classmethod
def provides(cls):
try:
return cls.real_scraper.provides()
except:
return frozenset([])
@classmethod
def get_name(cls):
try:
return cls.real_scraper.get_name()
except:
return ''
@classmethod
def get_settings(cls):
try:
settings = cls.real_scraper.get_settings()
except:
settings = super(cls, cls).get_settings()
return settings
def resolve_link(self, link):
if self.__scraper is not None:
return self.__scraper.resolve_link(link)
def format_source_label(self, item):
if self.__scraper is not None:
return self.__scraper.format_source_label(item)
def get_sources(self, video):
if self.__scraper is not None:
return self.__scraper.get_sources(video)
def get_url(self, video):
if self.__scraper is not None:
return self.__scraper.get_url(video)
def search(self, video_type, title, year, season=''):
if self.__scraper is not None:
return self.__scraper.search(video_type, title, year, season)
else:
return []
def _get_episode_url(self, show_url, video):
if self.__scraper is not None:
return self.__scraper._get_episode_url(show_url, video)
|
adw0rd/lettuce | refs/heads/master | tests/integration/lib/Django-1.3/django/contrib/humanize/__init__.py | 12133432 | |
isendel/algorithms | refs/heads/master | algorithms/prime_numbers/__init__.py | 12133432 | |
mbauskar/frappe | refs/heads/develop | frappe/patches/v8_0/drop_unwanted_indexes.py | 19 | # Copyright (c) 2017, Frappe and Contributors
# License: GNU General Public License v3. See license.txt
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import frappe
def execute():
# communication
unwanted_indexes = ["communication_date_index", "message_id_index", "modified_index",
"creation_index", "reference_owner", "communication_date"]
for k in unwanted_indexes:
try:
frappe.db.sql("drop index {0} on `tabCommunication`".format(k))
except:
pass |
industrydive/mezzanine | refs/heads/master | mezzanine/generic/migrations/0002_auto_20141227_0224.py | 46 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import mezzanine.core.fields
class Migration(migrations.Migration):
dependencies = [
('generic', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='assignedkeyword',
name='_order',
field=mezzanine.core.fields.OrderField(null=True, verbose_name='Order'),
preserve_default=True,
),
]
|
enthought/etsproxy | refs/heads/master | enthought/pyface/wizard/__init__.py | 24 | #------------------------------------------------------------------------------
# Copyright (c) 2005, Enthought, Inc.
# All rights reserved.
#
# This software is provided without warranty under the terms of the BSD
# license included in enthought/LICENSE.txt and may be redistributed only
# under the conditions described in the aforementioned license. The license
# is also available online at http://www.enthought.com/licenses/BSD.txt
# Thanks for using Enthought open source!
#
# Author: Enthought, Inc.
# Description: <Enthought pyface package component>
#------------------------------------------------------------------------------
|
jmvasquez/redashtest | refs/heads/master | migrations/0014_migrate_existing_es_to_kibana.py | 20 | __author__ = 'lior'
from redash.models import DataSource
if __name__ == '__main__':
for ds in DataSource.select(DataSource.id, DataSource.type):
if ds.type == 'elasticsearch':
ds.type = 'kibana'
ds.save(only=ds.dirty_fields)
|
Endika/odoo-saas-tools | refs/heads/8.0 | saas_server/controllers/__init__.py | 7372 | import main
|
rmmh/kubernetes-test-infra | refs/heads/master | gubernator/third_party/cloudstorage/common.py | 129 | # Copyright 2012 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
# either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
"""Helpers shared by cloudstorage_stub and cloudstorage_api."""
__all__ = ['CS_XML_NS',
'CSFileStat',
'dt_str_to_posix',
'local_api_url',
'LOCAL_GCS_ENDPOINT',
'local_run',
'get_access_token',
'get_stored_content_length',
'get_metadata',
'GCSFileStat',
'http_time_to_posix',
'memory_usage',
'posix_time_to_http',
'posix_to_dt_str',
'set_access_token',
'validate_options',
'validate_bucket_name',
'validate_bucket_path',
'validate_file_path',
]
import calendar
import datetime
from email import utils as email_utils
import logging
import os
import re
try:
from google.appengine.api import runtime
except ImportError:
from google.appengine.api import runtime
_GCS_BUCKET_REGEX_BASE = r'[a-z0-9\.\-_]{3,63}'
_GCS_BUCKET_REGEX = re.compile(_GCS_BUCKET_REGEX_BASE + r'$')
_GCS_BUCKET_PATH_REGEX = re.compile(r'/' + _GCS_BUCKET_REGEX_BASE + r'$')
_GCS_PATH_PREFIX_REGEX = re.compile(r'/' + _GCS_BUCKET_REGEX_BASE + r'.*')
_GCS_FULLPATH_REGEX = re.compile(r'/' + _GCS_BUCKET_REGEX_BASE + r'/.*')
_GCS_METADATA = ['x-goog-meta-',
'content-disposition',
'cache-control',
'content-encoding']
_GCS_OPTIONS = _GCS_METADATA + ['x-goog-acl']
CS_XML_NS = 'http://doc.s3.amazonaws.com/2006-03-01'
LOCAL_GCS_ENDPOINT = '/_ah/gcs'
_access_token = ''
_MAX_GET_BUCKET_RESULT = 1000
def set_access_token(access_token):
"""Set the shared access token to authenticate with Google Cloud Storage.
When set, the library will always attempt to communicate with the
real Google Cloud Storage with this token even when running on dev appserver.
Note the token could expire so it's up to you to renew it.
When absent, the library will automatically request and refresh a token
on appserver, or when on dev appserver, talk to a Google Cloud Storage
stub.
Args:
access_token: you can get one by run 'gsutil -d ls' and copy the
str after 'Bearer'.
"""
global _access_token
_access_token = access_token
def get_access_token():
"""Returns the shared access token."""
return _access_token
class GCSFileStat(object):
"""Container for GCS file stat."""
def __init__(self,
filename,
st_size,
etag,
st_ctime,
content_type=None,
metadata=None,
is_dir=False):
"""Initialize.
For files, the non optional arguments are always set.
For directories, only filename and is_dir is set.
Args:
filename: a Google Cloud Storage filename of form '/bucket/filename'.
st_size: file size in bytes. long compatible.
etag: hex digest of the md5 hash of the file's content. str.
st_ctime: posix file creation time. float compatible.
content_type: content type. str.
metadata: a str->str dict of user specified options when creating
the file. Possible keys are x-goog-meta-, content-disposition,
content-encoding, and cache-control.
is_dir: True if this represents a directory. False if this is a real file.
"""
self.filename = filename
self.is_dir = is_dir
self.st_size = None
self.st_ctime = None
self.etag = None
self.content_type = content_type
self.metadata = metadata
if not is_dir:
self.st_size = long(st_size)
self.st_ctime = float(st_ctime)
if etag[0] == '"' and etag[-1] == '"':
etag = etag[1:-1]
self.etag = etag
def __repr__(self):
if self.is_dir:
return '(directory: %s)' % self.filename
return (
'(filename: %(filename)s, st_size: %(st_size)s, '
'st_ctime: %(st_ctime)s, etag: %(etag)s, '
'content_type: %(content_type)s, '
'metadata: %(metadata)s)' %
dict(filename=self.filename,
st_size=self.st_size,
st_ctime=self.st_ctime,
etag=self.etag,
content_type=self.content_type,
metadata=self.metadata))
def __cmp__(self, other):
if not isinstance(other, self.__class__):
raise ValueError('Argument to cmp must have the same type. '
'Expect %s, got %s', self.__class__.__name__,
other.__class__.__name__)
if self.filename > other.filename:
return 1
elif self.filename < other.filename:
return -1
return 0
def __hash__(self):
if self.etag:
return hash(self.etag)
return hash(self.filename)
CSFileStat = GCSFileStat
def get_stored_content_length(headers):
"""Return the content length (in bytes) of the object as stored in GCS.
x-goog-stored-content-length should always be present except when called via
the local dev_appserver. Therefore if it is not present we default to the
standard content-length header.
Args:
headers: a dict of headers from the http response.
Returns:
the stored content length.
"""
length = headers.get('x-goog-stored-content-length')
if length is None:
length = headers.get('content-length')
return length
def get_metadata(headers):
"""Get user defined options from HTTP response headers."""
return dict((k, v) for k, v in headers.iteritems()
if any(k.lower().startswith(valid) for valid in _GCS_METADATA))
def validate_bucket_name(name):
"""Validate a Google Storage bucket name.
Args:
name: a Google Storage bucket name with no prefix or suffix.
Raises:
ValueError: if name is invalid.
"""
_validate_path(name)
if not _GCS_BUCKET_REGEX.match(name):
raise ValueError('Bucket should be 3-63 characters long using only a-z,'
'0-9, underscore, dash or dot but got %s' % name)
def validate_bucket_path(path):
"""Validate a Google Cloud Storage bucket path.
Args:
path: a Google Storage bucket path. It should have form '/bucket'.
Raises:
ValueError: if path is invalid.
"""
_validate_path(path)
if not _GCS_BUCKET_PATH_REGEX.match(path):
raise ValueError('Bucket should have format /bucket '
'but got %s' % path)
def validate_file_path(path):
"""Validate a Google Cloud Storage file path.
Args:
path: a Google Storage file path. It should have form '/bucket/filename'.
Raises:
ValueError: if path is invalid.
"""
_validate_path(path)
if not _GCS_FULLPATH_REGEX.match(path):
raise ValueError('Path should have format /bucket/filename '
'but got %s' % path)
def _process_path_prefix(path_prefix):
"""Validate and process a Google Cloud Stoarge path prefix.
Args:
path_prefix: a Google Cloud Storage path prefix of format '/bucket/prefix'
or '/bucket/' or '/bucket'.
Raises:
ValueError: if path is invalid.
Returns:
a tuple of /bucket and prefix. prefix can be None.
"""
_validate_path(path_prefix)
if not _GCS_PATH_PREFIX_REGEX.match(path_prefix):
raise ValueError('Path prefix should have format /bucket, /bucket/, '
'or /bucket/prefix but got %s.' % path_prefix)
bucket_name_end = path_prefix.find('/', 1)
bucket = path_prefix
prefix = None
if bucket_name_end != -1:
bucket = path_prefix[:bucket_name_end]
prefix = path_prefix[bucket_name_end + 1:] or None
return bucket, prefix
def _validate_path(path):
"""Basic validation of Google Storage paths.
Args:
path: a Google Storage path. It should have form '/bucket/filename'
or '/bucket'.
Raises:
ValueError: if path is invalid.
TypeError: if path is not of type basestring.
"""
if not path:
raise ValueError('Path is empty')
if not isinstance(path, basestring):
raise TypeError('Path should be a string but is %s (%s).' %
(path.__class__, path))
def validate_options(options):
"""Validate Google Cloud Storage options.
Args:
options: a str->basestring dict of options to pass to Google Cloud Storage.
Raises:
ValueError: if option is not supported.
TypeError: if option is not of type str or value of an option
is not of type basestring.
"""
if not options:
return
for k, v in options.iteritems():
if not isinstance(k, str):
raise TypeError('option %r should be a str.' % k)
if not any(k.lower().startswith(valid) for valid in _GCS_OPTIONS):
raise ValueError('option %s is not supported.' % k)
if not isinstance(v, basestring):
raise TypeError('value %r for option %s should be of type basestring.' %
(v, k))
def http_time_to_posix(http_time):
"""Convert HTTP time format to posix time.
See http://www.w3.org/Protocols/rfc2616/rfc2616-sec3.html#sec3.3.1
for http time format.
Args:
http_time: time in RFC 2616 format. e.g.
"Mon, 20 Nov 1995 19:12:08 GMT".
Returns:
A float of secs from unix epoch.
"""
if http_time is not None:
return email_utils.mktime_tz(email_utils.parsedate_tz(http_time))
def posix_time_to_http(posix_time):
"""Convert posix time to HTML header time format.
Args:
posix_time: unix time.
Returns:
A datatime str in RFC 2616 format.
"""
if posix_time:
return email_utils.formatdate(posix_time, usegmt=True)
_DT_FORMAT = '%Y-%m-%dT%H:%M:%S'
def dt_str_to_posix(dt_str):
"""format str to posix.
datetime str is of format %Y-%m-%dT%H:%M:%S.%fZ,
e.g. 2013-04-12T00:22:27.978Z. According to ISO 8601, T is a separator
between date and time when they are on the same line.
Z indicates UTC (zero meridian).
A pointer: http://www.cl.cam.ac.uk/~mgk25/iso-time.html
This is used to parse LastModified node from GCS's GET bucket XML response.
Args:
dt_str: A datetime str.
Returns:
A float of secs from unix epoch. By posix definition, epoch is midnight
1970/1/1 UTC.
"""
parsable, _ = dt_str.split('.')
dt = datetime.datetime.strptime(parsable, _DT_FORMAT)
return calendar.timegm(dt.utctimetuple())
def posix_to_dt_str(posix):
"""Reverse of str_to_datetime.
This is used by GCS stub to generate GET bucket XML response.
Args:
posix: A float of secs from unix epoch.
Returns:
A datetime str.
"""
dt = datetime.datetime.utcfromtimestamp(posix)
dt_str = dt.strftime(_DT_FORMAT)
return dt_str + '.000Z'
def local_run():
"""Whether we should hit GCS dev appserver stub."""
server_software = os.environ.get('SERVER_SOFTWARE')
if server_software is None:
return True
if 'remote_api' in server_software:
return False
if server_software.startswith(('Development', 'testutil')):
return True
return False
def local_api_url():
"""Return URL for GCS emulation on dev appserver."""
return 'http://%s%s' % (os.environ.get('HTTP_HOST'), LOCAL_GCS_ENDPOINT)
def memory_usage(method):
"""Log memory usage before and after a method."""
def wrapper(*args, **kwargs):
logging.info('Memory before method %s is %s.',
method.__name__, runtime.memory_usage().current())
result = method(*args, **kwargs)
logging.info('Memory after method %s is %s',
method.__name__, runtime.memory_usage().current())
return result
return wrapper
def _add_ns(tagname):
return '{%(ns)s}%(tag)s' % {'ns': CS_XML_NS,
'tag': tagname}
_T_CONTENTS = _add_ns('Contents')
_T_LAST_MODIFIED = _add_ns('LastModified')
_T_ETAG = _add_ns('ETag')
_T_KEY = _add_ns('Key')
_T_SIZE = _add_ns('Size')
_T_PREFIX = _add_ns('Prefix')
_T_COMMON_PREFIXES = _add_ns('CommonPrefixes')
_T_NEXT_MARKER = _add_ns('NextMarker')
_T_IS_TRUNCATED = _add_ns('IsTruncated')
|
khertan/PyGTKEditor | refs/heads/master | pge_recentchooser.py | 1 | #!/usr/bin/env python2.5
#
# PyGTKEditor
#
# Copyright (c) 2007 Khertan (Benoit HERVIER)
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# Khertan (Benoit HERVIER) khertan@khertan.net
import hildon
import gtk
import osso
import pango
from portrait import FremantleRotation
import pge_editor
import os
from subprocess import *
import commands
import gobject
import pge_preferences
class Dialog(hildon.Dialog):
def __init__(self):
hildon.Dialog.__init__ (self)
self.set_title('Choose a file to open')
self.selected = None
vbox = gtk.VBox()
p1 = hildon.PannableArea()
p1.add_with_viewport(vbox)
rm = gtk.recent_manager_get_default()
ritems = rm.get_items()
ritems.sort(lambda x,y: y.get_modified()-x.get_modified())
if (len(ritems)>0):
# vbox.add(gtk.Label('Recent Files'))
for index,item in enumerate(ritems):
b=hildon.Button(0,1)
label = item.get_uri_display()
i = gtk.image_new_from_stock(gtk.STOCK_FILE,gtk.ICON_SIZE_BUTTON)
# i = gtk.image_new_from_icon_name(item.get_mime_type(),gtk.ICON_SIZE_BUTTON)
# i = gtk.image_new_from_pixbuf(item.get_icon(gtk.ICON_SIZE_LARGE_TOOLBAR))
b.set_image(i)
b.set_title(os.path.basename(label))
b.set_value(label)
b.set_alignment(0.0,0.0,0.0,0.5)
vbox.add(b)
b.connect("clicked", self._clicked, label)
# vbox
p1.set_size_request(-1,350)
self.vbox.add(p1)
self.vbox.show_all()
def _clicked(self,w,label):
self.selected = label
self.destroy()
def get(self):
self.run()
return self.selected
if __name__ == "__main__":
print Dialog().get()
gtk.main()
|
tejoesperanto/pasportaservo | refs/heads/master | hosting/migrations/0054_phones_ordering.py | 3 | # -*- coding: utf-8 -*-
# Generated by Django 1.11.5 on 2018-02-13 08:15
from __future__ import unicode_literals
from django.db import migrations
def set_priority_for_deleted_phones(app_registry, schema_editor):
"""
Deleted phones should have higher priority rating, to appear lower on
the list (lower rating = higher on list = more important).
"""
Phone = app_registry.get_model('hosting', 'Phone')
Phone.all_objects.filter(deleted_on__isnull=False).update(_order=100)
class Migration(migrations.Migration):
dependencies = [
('hosting', '0053_profile_death_date'),
]
operations = [
migrations.AlterOrderWithRespectTo(
name='phone',
order_with_respect_to='profile',
),
migrations.RunPython(
set_priority_for_deleted_phones, reverse_code=migrations.RunPython.noop
),
]
|
citrix-openstack-build/swift | refs/heads/master | test/unit/common/middleware/test_tempauth.py | 2 | # Copyright (c) 2011 OpenStack, LLC.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
from contextlib import contextmanager
from base64 import b64encode
from time import time
from swift.common.middleware import tempauth as auth
from swift.common.swob import Request, Response
class FakeMemcache(object):
def __init__(self):
self.store = {}
def get(self, key):
return self.store.get(key)
def set(self, key, value, time=0):
self.store[key] = value
return True
def incr(self, key, time=0):
self.store[key] = self.store.setdefault(key, 0) + 1
return self.store[key]
@contextmanager
def soft_lock(self, key, timeout=0, retries=5):
yield True
def delete(self, key):
try:
del self.store[key]
except Exception:
pass
return True
class FakeApp(object):
def __init__(self, status_headers_body_iter=None, acl=None, sync_key=None):
self.calls = 0
self.status_headers_body_iter = status_headers_body_iter
if not self.status_headers_body_iter:
self.status_headers_body_iter = iter([('404 Not Found', {}, '')])
self.acl = acl
self.sync_key = sync_key
def __call__(self, env, start_response):
self.calls += 1
self.request = Request.blank('', environ=env)
if self.acl:
self.request.acl = self.acl
if self.sync_key:
self.request.environ['swift_sync_key'] = self.sync_key
if 'swift.authorize' in env:
resp = env['swift.authorize'](self.request)
if resp:
return resp(env, start_response)
status, headers, body = self.status_headers_body_iter.next()
return Response(status=status, headers=headers,
body=body)(env, start_response)
class FakeConn(object):
def __init__(self, status_headers_body_iter=None):
self.calls = 0
self.status_headers_body_iter = status_headers_body_iter
if not self.status_headers_body_iter:
self.status_headers_body_iter = iter([('404 Not Found', {}, '')])
def request(self, method, path, headers):
self.calls += 1
self.request_path = path
self.status, self.headers, self.body = \
self.status_headers_body_iter.next()
self.status, self.reason = self.status.split(' ', 1)
self.status = int(self.status)
def getresponse(self):
return self
def read(self):
body = self.body
self.body = ''
return body
class TestAuth(unittest.TestCase):
def setUp(self):
self.test_auth = auth.filter_factory({})(FakeApp())
def _make_request(self, path, **kwargs):
req = Request.blank(path, **kwargs)
req.environ['swift.cache'] = FakeMemcache()
return req
def test_reseller_prefix_init(self):
app = FakeApp()
ath = auth.filter_factory({})(app)
self.assertEquals(ath.reseller_prefix, 'AUTH_')
ath = auth.filter_factory({'reseller_prefix': 'TEST'})(app)
self.assertEquals(ath.reseller_prefix, 'TEST_')
ath = auth.filter_factory({'reseller_prefix': 'TEST_'})(app)
self.assertEquals(ath.reseller_prefix, 'TEST_')
def test_auth_prefix_init(self):
app = FakeApp()
ath = auth.filter_factory({})(app)
self.assertEquals(ath.auth_prefix, '/auth/')
ath = auth.filter_factory({'auth_prefix': ''})(app)
self.assertEquals(ath.auth_prefix, '/auth/')
ath = auth.filter_factory({'auth_prefix': '/'})(app)
self.assertEquals(ath.auth_prefix, '/auth/')
ath = auth.filter_factory({'auth_prefix': '/test/'})(app)
self.assertEquals(ath.auth_prefix, '/test/')
ath = auth.filter_factory({'auth_prefix': '/test'})(app)
self.assertEquals(ath.auth_prefix, '/test/')
ath = auth.filter_factory({'auth_prefix': 'test/'})(app)
self.assertEquals(ath.auth_prefix, '/test/')
ath = auth.filter_factory({'auth_prefix': 'test'})(app)
self.assertEquals(ath.auth_prefix, '/test/')
def test_top_level_deny(self):
req = self._make_request('/')
resp = req.get_response(self.test_auth)
self.assertEquals(resp.status_int, 401)
self.assertEquals(req.environ['swift.authorize'],
self.test_auth.denied_response)
def test_anon(self):
req = self._make_request('/v1/AUTH_account')
resp = req.get_response(self.test_auth)
self.assertEquals(resp.status_int, 401)
self.assertEquals(req.environ['swift.authorize'],
self.test_auth.authorize)
def test_override_asked_for_but_not_allowed(self):
self.test_auth = \
auth.filter_factory({'allow_overrides': 'false'})(FakeApp())
req = self._make_request('/v1/AUTH_account',
environ={'swift.authorize_override': True})
resp = req.get_response(self.test_auth)
self.assertEquals(resp.status_int, 401)
self.assertEquals(req.environ['swift.authorize'],
self.test_auth.authorize)
def test_override_asked_for_and_allowed(self):
self.test_auth = \
auth.filter_factory({'allow_overrides': 'true'})(FakeApp())
req = self._make_request('/v1/AUTH_account',
environ={'swift.authorize_override': True})
resp = req.get_response(self.test_auth)
self.assertEquals(resp.status_int, 404)
self.assertTrue('swift.authorize' not in req.environ)
def test_override_default_allowed(self):
req = self._make_request('/v1/AUTH_account',
environ={'swift.authorize_override': True})
resp = req.get_response(self.test_auth)
self.assertEquals(resp.status_int, 404)
self.assertTrue('swift.authorize' not in req.environ)
def test_auth_deny_non_reseller_prefix(self):
req = self._make_request('/v1/BLAH_account',
headers={'X-Auth-Token': 'BLAH_t'})
resp = req.get_response(self.test_auth)
self.assertEquals(resp.status_int, 401)
self.assertEquals(req.environ['swift.authorize'],
self.test_auth.denied_response)
def test_auth_deny_non_reseller_prefix_no_override(self):
fake_authorize = lambda x: Response(status='500 Fake')
req = self._make_request('/v1/BLAH_account',
headers={'X-Auth-Token': 'BLAH_t'},
environ={'swift.authorize': fake_authorize}
)
resp = req.get_response(self.test_auth)
self.assertEquals(resp.status_int, 500)
self.assertEquals(req.environ['swift.authorize'], fake_authorize)
def test_auth_no_reseller_prefix_deny(self):
# Ensures that when we have no reseller prefix, we don't deny a request
# outright but set up a denial swift.authorize and pass the request on
# down the chain.
local_app = FakeApp()
local_auth = auth.filter_factory({'reseller_prefix': ''})(local_app)
req = self._make_request('/v1/account',
headers={'X-Auth-Token': 't'})
resp = req.get_response(local_auth)
self.assertEquals(resp.status_int, 401)
self.assertEquals(local_app.calls, 1)
self.assertEquals(req.environ['swift.authorize'],
local_auth.denied_response)
def test_auth_no_reseller_prefix_no_token(self):
# Check that normally we set up a call back to our authorize.
local_auth = \
auth.filter_factory({'reseller_prefix': ''})(FakeApp(iter([])))
req = self._make_request('/v1/account')
resp = req.get_response(local_auth)
self.assertEquals(resp.status_int, 401)
self.assertEquals(req.environ['swift.authorize'],
local_auth.authorize)
# Now make sure we don't override an existing swift.authorize when we
# have no reseller prefix.
local_auth = \
auth.filter_factory({'reseller_prefix': ''})(FakeApp())
local_authorize = lambda req: Response('test')
req = self._make_request('/v1/account', environ={'swift.authorize':
local_authorize})
resp = req.get_response(local_auth)
self.assertEquals(resp.status_int, 200)
self.assertEquals(req.environ['swift.authorize'], local_authorize)
def test_auth_fail(self):
resp = self._make_request(
'/v1/AUTH_cfa',
headers={'X-Auth-Token': 'AUTH_t'}).get_response(self.test_auth)
self.assertEquals(resp.status_int, 401)
def test_authorize_bad_path(self):
req = self._make_request('/badpath')
resp = self.test_auth.authorize(req)
self.assertEquals(resp.status_int, 401)
req = self._make_request('/badpath')
req.remote_user = 'act:usr,act,AUTH_cfa'
resp = self.test_auth.authorize(req)
self.assertEquals(resp.status_int, 403)
def test_authorize_account_access(self):
req = self._make_request('/v1/AUTH_cfa')
req.remote_user = 'act:usr,act,AUTH_cfa'
self.assertEquals(self.test_auth.authorize(req), None)
req = self._make_request('/v1/AUTH_cfa')
req.remote_user = 'act:usr,act'
resp = self.test_auth.authorize(req)
self.assertEquals(resp.status_int, 403)
def test_authorize_acl_group_access(self):
req = self._make_request('/v1/AUTH_cfa')
req.remote_user = 'act:usr,act'
resp = self.test_auth.authorize(req)
self.assertEquals(resp.status_int, 403)
req = self._make_request('/v1/AUTH_cfa')
req.remote_user = 'act:usr,act'
req.acl = 'act'
self.assertEquals(self.test_auth.authorize(req), None)
req = self._make_request('/v1/AUTH_cfa')
req.remote_user = 'act:usr,act'
req.acl = 'act:usr'
self.assertEquals(self.test_auth.authorize(req), None)
req = self._make_request('/v1/AUTH_cfa')
req.remote_user = 'act:usr,act'
req.acl = 'act2'
resp = self.test_auth.authorize(req)
self.assertEquals(resp.status_int, 403)
req = self._make_request('/v1/AUTH_cfa')
req.remote_user = 'act:usr,act'
req.acl = 'act:usr2'
resp = self.test_auth.authorize(req)
self.assertEquals(resp.status_int, 403)
def test_deny_cross_reseller(self):
# Tests that cross-reseller is denied, even if ACLs/group names match
req = self._make_request('/v1/OTHER_cfa')
req.remote_user = 'act:usr,act,AUTH_cfa'
req.acl = 'act'
resp = self.test_auth.authorize(req)
self.assertEquals(resp.status_int, 403)
def test_authorize_acl_referer_after_user_groups(self):
req = self._make_request('/v1/AUTH_cfa/c')
req.remote_user = 'act:usr'
req.acl = '.r:*,act:usr'
self.assertEquals(self.test_auth.authorize(req), None)
def test_authorize_acl_referrer_access(self):
req = self._make_request('/v1/AUTH_cfa/c')
req.remote_user = 'act:usr,act'
resp = self.test_auth.authorize(req)
self.assertEquals(resp.status_int, 403)
req = self._make_request('/v1/AUTH_cfa/c')
req.remote_user = 'act:usr,act'
req.acl = '.r:*,.rlistings'
self.assertEquals(self.test_auth.authorize(req), None)
req = self._make_request('/v1/AUTH_cfa/c')
req.remote_user = 'act:usr,act'
req.acl = '.r:*' # No listings allowed
resp = self.test_auth.authorize(req)
self.assertEquals(resp.status_int, 403)
req = self._make_request('/v1/AUTH_cfa/c')
req.remote_user = 'act:usr,act'
req.acl = '.r:.example.com,.rlistings'
resp = self.test_auth.authorize(req)
self.assertEquals(resp.status_int, 403)
req = self._make_request('/v1/AUTH_cfa/c')
req.remote_user = 'act:usr,act'
req.referer = 'http://www.example.com/index.html'
req.acl = '.r:.example.com,.rlistings'
self.assertEquals(self.test_auth.authorize(req), None)
req = self._make_request('/v1/AUTH_cfa/c')
resp = self.test_auth.authorize(req)
self.assertEquals(resp.status_int, 401)
req = self._make_request('/v1/AUTH_cfa/c')
req.acl = '.r:*,.rlistings'
self.assertEquals(self.test_auth.authorize(req), None)
req = self._make_request('/v1/AUTH_cfa/c')
req.acl = '.r:*' # No listings allowed
resp = self.test_auth.authorize(req)
self.assertEquals(resp.status_int, 401)
req = self._make_request('/v1/AUTH_cfa/c')
req.acl = '.r:.example.com,.rlistings'
resp = self.test_auth.authorize(req)
self.assertEquals(resp.status_int, 401)
req = self._make_request('/v1/AUTH_cfa/c')
req.referer = 'http://www.example.com/index.html'
req.acl = '.r:.example.com,.rlistings'
self.assertEquals(self.test_auth.authorize(req), None)
def test_detect_reseller_request(self):
req = self._make_request('/v1/AUTH_admin',
headers={'X-Auth-Token': 'AUTH_t'})
cache_key = 'AUTH_/token/AUTH_t'
cache_entry = (time() + 3600, '.reseller_admin')
req.environ['swift.cache'].set(cache_key, cache_entry)
req.get_response(self.test_auth)
self.assertTrue(req.environ.get('reseller_request', False))
def test_account_put_permissions(self):
req = self._make_request('/v1/AUTH_new',
environ={'REQUEST_METHOD': 'PUT'})
req.remote_user = 'act:usr,act'
resp = self.test_auth.authorize(req)
self.assertEquals(resp.status_int, 403)
req = self._make_request('/v1/AUTH_new',
environ={'REQUEST_METHOD': 'PUT'})
req.remote_user = 'act:usr,act,AUTH_other'
resp = self.test_auth.authorize(req)
self.assertEquals(resp.status_int, 403)
# Even PUTs to your own account as account admin should fail
req = self._make_request('/v1/AUTH_old',
environ={'REQUEST_METHOD': 'PUT'})
req.remote_user = 'act:usr,act,AUTH_old'
resp = self.test_auth.authorize(req)
self.assertEquals(resp.status_int, 403)
req = self._make_request('/v1/AUTH_new',
environ={'REQUEST_METHOD': 'PUT'})
req.remote_user = 'act:usr,act,.reseller_admin'
resp = self.test_auth.authorize(req)
self.assertEquals(resp, None)
# .super_admin is not something the middleware should ever see or care
# about
req = self._make_request('/v1/AUTH_new',
environ={'REQUEST_METHOD': 'PUT'})
req.remote_user = 'act:usr,act,.super_admin'
resp = self.test_auth.authorize(req)
self.assertEquals(resp.status_int, 403)
def test_account_delete_permissions(self):
req = self._make_request('/v1/AUTH_new',
environ={'REQUEST_METHOD': 'DELETE'})
req.remote_user = 'act:usr,act'
resp = self.test_auth.authorize(req)
self.assertEquals(resp.status_int, 403)
req = self._make_request('/v1/AUTH_new',
environ={'REQUEST_METHOD': 'DELETE'})
req.remote_user = 'act:usr,act,AUTH_other'
resp = self.test_auth.authorize(req)
self.assertEquals(resp.status_int, 403)
# Even DELETEs to your own account as account admin should fail
req = self._make_request('/v1/AUTH_old',
environ={'REQUEST_METHOD': 'DELETE'})
req.remote_user = 'act:usr,act,AUTH_old'
resp = self.test_auth.authorize(req)
self.assertEquals(resp.status_int, 403)
req = self._make_request('/v1/AUTH_new',
environ={'REQUEST_METHOD': 'DELETE'})
req.remote_user = 'act:usr,act,.reseller_admin'
resp = self.test_auth.authorize(req)
self.assertEquals(resp, None)
# .super_admin is not something the middleware should ever see or care
# about
req = self._make_request('/v1/AUTH_new',
environ={'REQUEST_METHOD': 'DELETE'})
req.remote_user = 'act:usr,act,.super_admin'
resp = self.test_auth.authorize(req)
self.assertEquals(resp.status_int, 403)
def test_get_token_fail(self):
resp = self._make_request('/auth/v1.0').get_response(self.test_auth)
self.assertEquals(resp.status_int, 401)
resp = self._make_request(
'/auth/v1.0',
headers={'X-Auth-User': 'act:usr',
'X-Auth-Key': 'key'}).get_response(self.test_auth)
self.assertEquals(resp.status_int, 401)
def test_get_token_fail_invalid_x_auth_user_format(self):
resp = self._make_request(
'/auth/v1/act/auth',
headers={'X-Auth-User': 'usr',
'X-Auth-Key': 'key'}).get_response(self.test_auth)
self.assertEquals(resp.status_int, 401)
def test_get_token_fail_non_matching_account_in_request(self):
resp = self._make_request(
'/auth/v1/act/auth',
headers={'X-Auth-User': 'act2:usr',
'X-Auth-Key': 'key'}).get_response(self.test_auth)
self.assertEquals(resp.status_int, 401)
def test_get_token_fail_bad_path(self):
resp = self._make_request(
'/auth/v1/act/auth/invalid',
headers={'X-Auth-User': 'act:usr',
'X-Auth-Key': 'key'}).get_response(self.test_auth)
self.assertEquals(resp.status_int, 400)
def test_get_token_fail_missing_key(self):
resp = self._make_request(
'/auth/v1/act/auth',
headers={'X-Auth-User': 'act:usr'}).get_response(self.test_auth)
self.assertEquals(resp.status_int, 401)
def test_storage_url_default(self):
self.test_auth = \
auth.filter_factory({'user_test_tester': 'testing'})(FakeApp())
req = self._make_request(
'/auth/v1.0',
headers={'X-Auth-User': 'test:tester', 'X-Auth-Key': 'testing'})
del req.environ['HTTP_HOST']
req.environ['SERVER_NAME'] = 'bob'
req.environ['SERVER_PORT'] = '1234'
resp = req.get_response(self.test_auth)
self.assertEquals(resp.status_int, 200)
self.assertEquals(resp.headers['x-storage-url'],
'http://bob:1234/v1/AUTH_test')
def test_storage_url_based_on_host(self):
self.test_auth = \
auth.filter_factory({'user_test_tester': 'testing'})(FakeApp())
req = self._make_request(
'/auth/v1.0',
headers={'X-Auth-User': 'test:tester', 'X-Auth-Key': 'testing'})
req.environ['HTTP_HOST'] = 'somehost:5678'
req.environ['SERVER_NAME'] = 'bob'
req.environ['SERVER_PORT'] = '1234'
resp = req.get_response(self.test_auth)
self.assertEquals(resp.status_int, 200)
self.assertEquals(resp.headers['x-storage-url'],
'http://somehost:5678/v1/AUTH_test')
def test_storage_url_overriden_scheme(self):
self.test_auth = \
auth.filter_factory({'user_test_tester': 'testing',
'storage_url_scheme': 'fake'})(FakeApp())
req = self._make_request(
'/auth/v1.0',
headers={'X-Auth-User': 'test:tester', 'X-Auth-Key': 'testing'})
req.environ['HTTP_HOST'] = 'somehost:5678'
req.environ['SERVER_NAME'] = 'bob'
req.environ['SERVER_PORT'] = '1234'
resp = req.get_response(self.test_auth)
self.assertEquals(resp.status_int, 200)
self.assertEquals(resp.headers['x-storage-url'],
'fake://somehost:5678/v1/AUTH_test')
def test_use_old_token_from_memcached(self):
self.test_auth = \
auth.filter_factory({'user_test_tester': 'testing',
'storage_url_scheme': 'fake'})(FakeApp())
req = self._make_request(
'/auth/v1.0',
headers={'X-Auth-User': 'test:tester', 'X-Auth-Key': 'testing'})
req.environ['HTTP_HOST'] = 'somehost:5678'
req.environ['SERVER_NAME'] = 'bob'
req.environ['SERVER_PORT'] = '1234'
req.environ['swift.cache'].set('AUTH_/user/test:tester', 'uuid_token')
req.environ['swift.cache'].set('AUTH_/token/uuid_token',
(time() + 180, 'test,test:tester'))
resp = req.get_response(self.test_auth)
self.assertEquals(resp.status_int, 200)
self.assertEquals(resp.headers['x-auth-token'], 'uuid_token')
def test_old_token_overdate(self):
self.test_auth = \
auth.filter_factory({'user_test_tester': 'testing',
'storage_url_scheme': 'fake'})(FakeApp())
req = self._make_request(
'/auth/v1.0',
headers={'X-Auth-User': 'test:tester', 'X-Auth-Key': 'testing'})
req.environ['HTTP_HOST'] = 'somehost:5678'
req.environ['SERVER_NAME'] = 'bob'
req.environ['SERVER_PORT'] = '1234'
req.environ['swift.cache'].set('AUTH_/user/test:tester', 'uuid_token')
req.environ['swift.cache'].set('AUTH_/token/uuid_token',
(0, 'test,test:tester'))
resp = req.get_response(self.test_auth)
self.assertEquals(resp.status_int, 200)
self.assertNotEquals(resp.headers['x-auth-token'], 'uuid_token')
self.assertEquals(resp.headers['x-auth-token'][:7], 'AUTH_tk')
def test_old_token_with_old_data(self):
self.test_auth = \
auth.filter_factory({'user_test_tester': 'testing',
'storage_url_scheme': 'fake'})(FakeApp())
req = self._make_request(
'/auth/v1.0',
headers={'X-Auth-User': 'test:tester', 'X-Auth-Key': 'testing'})
req.environ['HTTP_HOST'] = 'somehost:5678'
req.environ['SERVER_NAME'] = 'bob'
req.environ['SERVER_PORT'] = '1234'
req.environ['swift.cache'].set('AUTH_/user/test:tester', 'uuid_token')
req.environ['swift.cache'].set('AUTH_/token/uuid_token',
(time() + 99, 'test,test:tester,.role'))
resp = req.get_response(self.test_auth)
self.assertEquals(resp.status_int, 200)
self.assertNotEquals(resp.headers['x-auth-token'], 'uuid_token')
self.assertEquals(resp.headers['x-auth-token'][:7], 'AUTH_tk')
def test_reseller_admin_is_owner(self):
orig_authorize = self.test_auth.authorize
owner_values = []
def mitm_authorize(req):
rv = orig_authorize(req)
owner_values.append(req.environ.get('swift_owner', False))
return rv
self.test_auth.authorize = mitm_authorize
req = self._make_request('/v1/AUTH_cfa',
headers={'X-Auth-Token': 'AUTH_t'})
req.remote_user = '.reseller_admin'
self.test_auth.authorize(req)
self.assertEquals(owner_values, [True])
def test_admin_is_owner(self):
orig_authorize = self.test_auth.authorize
owner_values = []
def mitm_authorize(req):
rv = orig_authorize(req)
owner_values.append(req.environ.get('swift_owner', False))
return rv
self.test_auth.authorize = mitm_authorize
req = self._make_request(
'/v1/AUTH_cfa',
headers={'X-Auth-Token': 'AUTH_t'})
req.remote_user = 'AUTH_cfa'
self.test_auth.authorize(req)
self.assertEquals(owner_values, [True])
def test_regular_is_not_owner(self):
orig_authorize = self.test_auth.authorize
owner_values = []
def mitm_authorize(req):
rv = orig_authorize(req)
owner_values.append(req.environ.get('swift_owner', False))
return rv
self.test_auth.authorize = mitm_authorize
req = self._make_request(
'/v1/AUTH_cfa/c',
headers={'X-Auth-Token': 'AUTH_t'})
req.remote_user = 'act:usr'
self.test_auth.authorize(req)
self.assertEquals(owner_values, [False])
def test_sync_request_success(self):
self.test_auth.app = FakeApp(iter([('204 No Content', {}, '')]),
sync_key='secret')
req = self._make_request(
'/v1/AUTH_cfa/c/o',
environ={'REQUEST_METHOD': 'DELETE'},
headers={'x-container-sync-key': 'secret',
'x-timestamp': '123.456'})
req.remote_addr = '127.0.0.1'
resp = req.get_response(self.test_auth)
self.assertEquals(resp.status_int, 204)
def test_sync_request_fail_key(self):
self.test_auth.app = FakeApp(iter([('204 No Content', {}, '')]),
sync_key='secret')
req = self._make_request(
'/v1/AUTH_cfa/c/o',
environ={'REQUEST_METHOD': 'DELETE'},
headers={'x-container-sync-key': 'wrongsecret',
'x-timestamp': '123.456'})
req.remote_addr = '127.0.0.1'
resp = req.get_response(self.test_auth)
self.assertEquals(resp.status_int, 401)
self.test_auth.app = FakeApp(iter([('204 No Content', {}, '')]),
sync_key='othersecret')
req = self._make_request(
'/v1/AUTH_cfa/c/o',
environ={'REQUEST_METHOD': 'DELETE'},
headers={'x-container-sync-key': 'secret',
'x-timestamp': '123.456'})
req.remote_addr = '127.0.0.1'
resp = req.get_response(self.test_auth)
self.assertEquals(resp.status_int, 401)
self.test_auth.app = FakeApp(iter([('204 No Content', {}, '')]),
sync_key=None)
req = self._make_request(
'/v1/AUTH_cfa/c/o',
environ={'REQUEST_METHOD': 'DELETE'},
headers={'x-container-sync-key': 'secret',
'x-timestamp': '123.456'})
req.remote_addr = '127.0.0.1'
resp = req.get_response(self.test_auth)
self.assertEquals(resp.status_int, 401)
def test_sync_request_fail_no_timestamp(self):
self.test_auth.app = FakeApp(iter([('204 No Content', {}, '')]),
sync_key='secret')
req = self._make_request(
'/v1/AUTH_cfa/c/o',
environ={'REQUEST_METHOD': 'DELETE'},
headers={'x-container-sync-key': 'secret'})
req.remote_addr = '127.0.0.1'
resp = req.get_response(self.test_auth)
self.assertEquals(resp.status_int, 401)
def test_sync_request_success_lb_sync_host(self):
self.test_auth.app = FakeApp(iter([('204 No Content', {}, '')]),
sync_key='secret')
req = self._make_request(
'/v1/AUTH_cfa/c/o',
environ={'REQUEST_METHOD': 'DELETE'},
headers={'x-container-sync-key': 'secret',
'x-timestamp': '123.456',
'x-forwarded-for': '127.0.0.1'})
req.remote_addr = '127.0.0.2'
resp = req.get_response(self.test_auth)
self.assertEquals(resp.status_int, 204)
self.test_auth.app = FakeApp(iter([('204 No Content', {}, '')]),
sync_key='secret')
req = self._make_request(
'/v1/AUTH_cfa/c/o',
environ={'REQUEST_METHOD': 'DELETE'},
headers={'x-container-sync-key': 'secret',
'x-timestamp': '123.456',
'x-cluster-client-ip': '127.0.0.1'})
req.remote_addr = '127.0.0.2'
resp = req.get_response(self.test_auth)
self.assertEquals(resp.status_int, 204)
def test_options_call(self):
req = self._make_request('/v1/AUTH_cfa/c/o',
environ={'REQUEST_METHOD': 'OPTIONS'})
resp = self.test_auth.authorize(req)
self.assertEquals(resp, None)
def test_get_user_group(self):
app = FakeApp()
ath = auth.filter_factory({})(app)
ath.users = {'test:tester': {'groups': ['.admin']}}
groups = ath._get_user_groups('test', 'test:tester', 'AUTH_test')
self.assertEquals(groups, 'test,test:tester,AUTH_test')
ath.users = {'test:tester': {'groups': []}}
groups = ath._get_user_groups('test', 'test:tester', 'AUTH_test')
self.assertEquals(groups, 'test,test:tester')
class TestParseUserCreation(unittest.TestCase):
def test_parse_user_creation(self):
auth_filter = auth.filter_factory({
'reseller_prefix': 'ABC',
'user_test_tester3': 'testing',
'user_has_url': 'urlly .admin http://a.b/v1/DEF_has',
'user_admin_admin': 'admin .admin .reseller_admin',
})(FakeApp())
self.assertEquals(auth_filter.users, {
'admin:admin': {
'url': '$HOST/v1/ABC_admin',
'groups': ['.admin', '.reseller_admin'],
'key': 'admin'
}, 'test:tester3': {
'url': '$HOST/v1/ABC_test',
'groups': [],
'key': 'testing'
}, 'has:url': {
'url': 'http://a.b/v1/DEF_has',
'groups': ['.admin'],
'key': 'urlly'
},
})
def test_base64_encoding(self):
auth_filter = auth.filter_factory({
'reseller_prefix': 'ABC',
'user64_%s_%s' % (
b64encode('test').rstrip('='),
b64encode('tester3').rstrip('=')):
'testing .reseller_admin',
'user64_%s_%s' % (
b64encode('user_foo').rstrip('='),
b64encode('ab').rstrip('=')):
'urlly .admin http://a.b/v1/DEF_has',
})(FakeApp())
self.assertEquals(auth_filter.users, {
'test:tester3': {
'url': '$HOST/v1/ABC_test',
'groups': ['.reseller_admin'],
'key': 'testing'
}, 'user_foo:ab': {
'url': 'http://a.b/v1/DEF_has',
'groups': ['.admin'],
'key': 'urlly'
},
})
def test_key_with_no_value(self):
self.assertRaises(ValueError, auth.filter_factory({
'user_test_tester3': 'testing',
'user_bob_bobby': '',
'user_admin_admin': 'admin .admin .reseller_admin',
}), FakeApp())
if __name__ == '__main__':
unittest.main()
|
ClusterLabs/pcs | refs/heads/master | pcs/snmp/pcs_snmp_agent.py | 3 | import os
import sys
import logging
import logging.handlers
# pylint: disable=import-error
import pyagentx
import pcs.utils
from pcs.snmp import settings
from pcs.snmp.updaters.v1 import ClusterPcsV1Updater
logger = logging.getLogger("pcs.snmp")
logger.addHandler(logging.NullHandler())
def is_debug():
debug = os.environ.get("PCS_SNMP_AGENT_DEBUG", "")
return debug.lower() in ["true", "on", "1"]
def get_update_interval():
interval = os.environ.get("PCS_SNMP_AGENT_UPDATE_INTERVAL")
if not interval:
return settings.DEFAULT_UPDATE_INTERVAL
def _log_invalid_value(_value):
logger.warning(
"Invalid update interval value: '%s' is not >= 1.0", str(_value)
)
logger.debug(
"Using default update interval: %s",
str(settings.DEFAULT_UPDATE_INTERVAL),
)
try:
interval = float(interval)
except ValueError:
_log_invalid_value(interval)
return settings.DEFAULT_UPDATE_INTERVAL
if interval <= 1.0:
_log_invalid_value(interval)
return settings.DEFAULT_UPDATE_INTERVAL
return interval
def setup_logging(debug=False):
level = logging.INFO
if debug:
level = logging.DEBUG
# this is required to enable debug also in the ruby code
# key '--debug' has to be added
pcs.utils.pcs_options["--debug"] = debug
formatter = logging.Formatter(
"%(asctime)s - %(name)s - %(levelname)s - %(message)s"
)
handler = logging.handlers.WatchedFileHandler(
settings.LOG_FILE, encoding="utf8"
)
handler.setLevel(level)
handler.setFormatter(formatter)
for logger_name in ["pyagentx", "pcs"]:
logger_instance = logging.getLogger(logger_name)
logger_instance.setLevel(level)
logger_instance.addHandler(handler)
class PcsAgent(pyagentx.Agent):
def setup(self):
update_interval = get_update_interval()
logger.info("Update interval set to: %s", str(update_interval))
self.register(
settings.PCS_OID + ".1",
ClusterPcsV1Updater,
freq=update_interval,
)
def main():
setup_logging(is_debug())
try:
agent = PcsAgent()
agent.start()
# pylint: disable=broad-except
except Exception as e:
print("Unhandled exception: {0}".format(str(e)))
agent.stop()
sys.exit(1)
except KeyboardInterrupt:
agent.stop()
|
antonve/s4-project-mooc | refs/heads/master | common/lib/calc/calc/__init__.py | 270 | """
Ideally, we wouldn't need to pull in all the calc symbols here,
but courses were using 'import calc', so we need this for
backwards compatibility
"""
from calc import *
|
blackzw/openwrt_sdk_dev1 | refs/heads/master | staging_dir/target-mips_r2_uClibc-0.9.33.2/usr/lib/python2.7/distutils/command/sdist.py | 68 | """distutils.command.sdist
Implements the Distutils 'sdist' command (create a source distribution)."""
__revision__ = "$Id$"
import os
import string
import sys
from glob import glob
from warnings import warn
from distutils.core import Command
from distutils import dir_util, dep_util, file_util, archive_util
from distutils.text_file import TextFile
from distutils.errors import (DistutilsPlatformError, DistutilsOptionError,
DistutilsTemplateError)
from distutils.filelist import FileList
from distutils import log
from distutils.util import convert_path
def show_formats():
"""Print all possible values for the 'formats' option (used by
the "--help-formats" command-line option).
"""
from distutils.fancy_getopt import FancyGetopt
from distutils.archive_util import ARCHIVE_FORMATS
formats = []
for format in ARCHIVE_FORMATS.keys():
formats.append(("formats=" + format, None,
ARCHIVE_FORMATS[format][2]))
formats.sort()
FancyGetopt(formats).print_help(
"List of available source distribution formats:")
class sdist(Command):
description = "create a source distribution (tarball, zip file, etc.)"
def checking_metadata(self):
"""Callable used for the check sub-command.
Placed here so user_options can view it"""
return self.metadata_check
user_options = [
('template=', 't',
"name of manifest template file [default: MANIFEST.in]"),
('manifest=', 'm',
"name of manifest file [default: MANIFEST]"),
('use-defaults', None,
"include the default file set in the manifest "
"[default; disable with --no-defaults]"),
('no-defaults', None,
"don't include the default file set"),
('prune', None,
"specifically exclude files/directories that should not be "
"distributed (build tree, RCS/CVS dirs, etc.) "
"[default; disable with --no-prune]"),
('no-prune', None,
"don't automatically exclude anything"),
('manifest-only', 'o',
"just regenerate the manifest and then stop "
"(implies --force-manifest)"),
('force-manifest', 'f',
"forcibly regenerate the manifest and carry on as usual. "
"Deprecated: now the manifest is always regenerated."),
('formats=', None,
"formats for source distribution (comma-separated list)"),
('keep-temp', 'k',
"keep the distribution tree around after creating " +
"archive file(s)"),
('dist-dir=', 'd',
"directory to put the source distribution archive(s) in "
"[default: dist]"),
('metadata-check', None,
"Ensure that all required elements of meta-data "
"are supplied. Warn if any missing. [default]"),
('owner=', 'u',
"Owner name used when creating a tar file [default: current user]"),
('group=', 'g',
"Group name used when creating a tar file [default: current group]"),
]
boolean_options = ['use-defaults', 'prune',
'manifest-only', 'force-manifest',
'keep-temp', 'metadata-check']
help_options = [
('help-formats', None,
"list available distribution formats", show_formats),
]
negative_opt = {'no-defaults': 'use-defaults',
'no-prune': 'prune' }
default_format = {'posix': 'gztar',
'nt': 'zip' }
sub_commands = [('check', checking_metadata)]
def initialize_options(self):
# 'template' and 'manifest' are, respectively, the names of
# the manifest template and manifest file.
self.template = None
self.manifest = None
# 'use_defaults': if true, we will include the default file set
# in the manifest
self.use_defaults = 1
self.prune = 1
self.manifest_only = 0
self.force_manifest = 0
self.formats = None
self.keep_temp = 0
self.dist_dir = None
self.archive_files = None
self.metadata_check = 1
self.owner = None
self.group = None
def finalize_options(self):
if self.manifest is None:
self.manifest = "MANIFEST"
if self.template is None:
self.template = "MANIFEST.in"
self.ensure_string_list('formats')
if self.formats is None:
try:
self.formats = [self.default_format[os.name]]
except KeyError:
raise DistutilsPlatformError, \
"don't know how to create source distributions " + \
"on platform %s" % os.name
bad_format = archive_util.check_archive_formats(self.formats)
if bad_format:
raise DistutilsOptionError, \
"unknown archive format '%s'" % bad_format
if self.dist_dir is None:
self.dist_dir = "dist"
def run(self):
# 'filelist' contains the list of files that will make up the
# manifest
self.filelist = FileList()
# Run sub commands
for cmd_name in self.get_sub_commands():
self.run_command(cmd_name)
# Do whatever it takes to get the list of files to process
# (process the manifest template, read an existing manifest,
# whatever). File list is accumulated in 'self.filelist'.
self.get_file_list()
# If user just wanted us to regenerate the manifest, stop now.
if self.manifest_only:
return
# Otherwise, go ahead and create the source distribution tarball,
# or zipfile, or whatever.
self.make_distribution()
def check_metadata(self):
"""Deprecated API."""
warn("distutils.command.sdist.check_metadata is deprecated, \
use the check command instead", PendingDeprecationWarning)
check = self.distribution.get_command_obj('check')
check.ensure_finalized()
check.run()
def get_file_list(self):
"""Figure out the list of files to include in the source
distribution, and put it in 'self.filelist'. This might involve
reading the manifest template (and writing the manifest), or just
reading the manifest, or just using the default file set -- it all
depends on the user's options.
"""
# new behavior when using a template:
# the file list is recalculated everytime because
# even if MANIFEST.in or setup.py are not changed
# the user might have added some files in the tree that
# need to be included.
#
# This makes --force the default and only behavior with templates.
template_exists = os.path.isfile(self.template)
if not template_exists and self._manifest_is_not_generated():
self.read_manifest()
self.filelist.sort()
self.filelist.remove_duplicates()
return
if not template_exists:
self.warn(("manifest template '%s' does not exist " +
"(using default file list)") %
self.template)
self.filelist.findall()
if self.use_defaults:
self.add_defaults()
if template_exists:
self.read_template()
if self.prune:
self.prune_file_list()
self.filelist.sort()
self.filelist.remove_duplicates()
self.write_manifest()
def add_defaults(self):
"""Add all the default files to self.filelist:
- README or README.txt
- setup.py
- test/test*.py
- all pure Python modules mentioned in setup script
- all files pointed by package_data (build_py)
- all files defined in data_files.
- all files defined as scripts.
- all C sources listed as part of extensions or C libraries
in the setup script (doesn't catch C headers!)
Warns if (README or README.txt) or setup.py are missing; everything
else is optional.
"""
standards = [('README', 'README.txt'), self.distribution.script_name]
for fn in standards:
if isinstance(fn, tuple):
alts = fn
got_it = 0
for fn in alts:
if os.path.exists(fn):
got_it = 1
self.filelist.append(fn)
break
if not got_it:
self.warn("standard file not found: should have one of " +
string.join(alts, ', '))
else:
if os.path.exists(fn):
self.filelist.append(fn)
else:
self.warn("standard file '%s' not found" % fn)
optional = ['test/test*.py', 'setup.cfg']
for pattern in optional:
files = filter(os.path.isfile, glob(pattern))
if files:
self.filelist.extend(files)
# build_py is used to get:
# - python modules
# - files defined in package_data
build_py = self.get_finalized_command('build_py')
# getting python files
if self.distribution.has_pure_modules():
self.filelist.extend(build_py.get_source_files())
# getting package_data files
# (computed in build_py.data_files by build_py.finalize_options)
for pkg, src_dir, build_dir, filenames in build_py.data_files:
for filename in filenames:
self.filelist.append(os.path.join(src_dir, filename))
# getting distribution.data_files
if self.distribution.has_data_files():
for item in self.distribution.data_files:
if isinstance(item, str): # plain file
item = convert_path(item)
if os.path.isfile(item):
self.filelist.append(item)
else: # a (dirname, filenames) tuple
dirname, filenames = item
for f in filenames:
f = convert_path(f)
if os.path.isfile(f):
self.filelist.append(f)
if self.distribution.has_ext_modules():
build_ext = self.get_finalized_command('build_ext')
self.filelist.extend(build_ext.get_source_files())
if self.distribution.has_c_libraries():
build_clib = self.get_finalized_command('build_clib')
self.filelist.extend(build_clib.get_source_files())
if self.distribution.has_scripts():
build_scripts = self.get_finalized_command('build_scripts')
self.filelist.extend(build_scripts.get_source_files())
def read_template(self):
"""Read and parse manifest template file named by self.template.
(usually "MANIFEST.in") The parsing and processing is done by
'self.filelist', which updates itself accordingly.
"""
log.info("reading manifest template '%s'", self.template)
template = TextFile(self.template,
strip_comments=1,
skip_blanks=1,
join_lines=1,
lstrip_ws=1,
rstrip_ws=1,
collapse_join=1)
try:
while 1:
line = template.readline()
if line is None: # end of file
break
try:
self.filelist.process_template_line(line)
# the call above can raise a DistutilsTemplateError for
# malformed lines, or a ValueError from the lower-level
# convert_path function
except (DistutilsTemplateError, ValueError) as msg:
self.warn("%s, line %d: %s" % (template.filename,
template.current_line,
msg))
finally:
template.close()
def prune_file_list(self):
"""Prune off branches that might slip into the file list as created
by 'read_template()', but really don't belong there:
* the build tree (typically "build")
* the release tree itself (only an issue if we ran "sdist"
previously with --keep-temp, or it aborted)
* any RCS, CVS, .svn, .hg, .git, .bzr, _darcs directories
"""
build = self.get_finalized_command('build')
base_dir = self.distribution.get_fullname()
self.filelist.exclude_pattern(None, prefix=build.build_base)
self.filelist.exclude_pattern(None, prefix=base_dir)
# pruning out vcs directories
# both separators are used under win32
if sys.platform == 'win32':
seps = r'/|\\'
else:
seps = '/'
vcs_dirs = ['RCS', 'CVS', r'\.svn', r'\.hg', r'\.git', r'\.bzr',
'_darcs']
vcs_ptrn = r'(^|%s)(%s)(%s).*' % (seps, '|'.join(vcs_dirs), seps)
self.filelist.exclude_pattern(vcs_ptrn, is_regex=1)
def write_manifest(self):
"""Write the file list in 'self.filelist' (presumably as filled in
by 'add_defaults()' and 'read_template()') to the manifest file
named by 'self.manifest'.
"""
if self._manifest_is_not_generated():
log.info("not writing to manually maintained "
"manifest file '%s'" % self.manifest)
return
content = self.filelist.files[:]
content.insert(0, '# file GENERATED by distutils, do NOT edit')
self.execute(file_util.write_file, (self.manifest, content),
"writing manifest file '%s'" % self.manifest)
def _manifest_is_not_generated(self):
# check for special comment used in 2.7.1 and higher
if not os.path.isfile(self.manifest):
return False
fp = open(self.manifest, 'rU')
try:
first_line = fp.readline()
finally:
fp.close()
return first_line != '# file GENERATED by distutils, do NOT edit\n'
def read_manifest(self):
"""Read the manifest file (named by 'self.manifest') and use it to
fill in 'self.filelist', the list of files to include in the source
distribution.
"""
log.info("reading manifest file '%s'", self.manifest)
manifest = open(self.manifest)
for line in manifest:
# ignore comments and blank lines
line = line.strip()
if line.startswith('#') or not line:
continue
self.filelist.append(line)
manifest.close()
def make_release_tree(self, base_dir, files):
"""Create the directory tree that will become the source
distribution archive. All directories implied by the filenames in
'files' are created under 'base_dir', and then we hard link or copy
(if hard linking is unavailable) those files into place.
Essentially, this duplicates the developer's source tree, but in a
directory named after the distribution, containing only the files
to be distributed.
"""
# Create all the directories under 'base_dir' necessary to
# put 'files' there; the 'mkpath()' is just so we don't die
# if the manifest happens to be empty.
self.mkpath(base_dir)
dir_util.create_tree(base_dir, files, dry_run=self.dry_run)
# And walk over the list of files, either making a hard link (if
# os.link exists) to each one that doesn't already exist in its
# corresponding location under 'base_dir', or copying each file
# that's out-of-date in 'base_dir'. (Usually, all files will be
# out-of-date, because by default we blow away 'base_dir' when
# we're done making the distribution archives.)
if hasattr(os, 'link'): # can make hard links on this system
link = 'hard'
msg = "making hard links in %s..." % base_dir
else: # nope, have to copy
link = None
msg = "copying files to %s..." % base_dir
if not files:
log.warn("no files to distribute -- empty manifest?")
else:
log.info(msg)
for file in files:
if not os.path.isfile(file):
log.warn("'%s' not a regular file -- skipping" % file)
else:
dest = os.path.join(base_dir, file)
self.copy_file(file, dest, link=link)
self.distribution.metadata.write_pkg_info(base_dir)
def make_distribution(self):
"""Create the source distribution(s). First, we create the release
tree with 'make_release_tree()'; then, we create all required
archive files (according to 'self.formats') from the release tree.
Finally, we clean up by blowing away the release tree (unless
'self.keep_temp' is true). The list of archive files created is
stored so it can be retrieved later by 'get_archive_files()'.
"""
# Don't warn about missing meta-data here -- should be (and is!)
# done elsewhere.
base_dir = self.distribution.get_fullname()
base_name = os.path.join(self.dist_dir, base_dir)
self.make_release_tree(base_dir, self.filelist.files)
archive_files = [] # remember names of files we create
# tar archive must be created last to avoid overwrite and remove
if 'tar' in self.formats:
self.formats.append(self.formats.pop(self.formats.index('tar')))
for fmt in self.formats:
file = self.make_archive(base_name, fmt, base_dir=base_dir,
owner=self.owner, group=self.group)
archive_files.append(file)
self.distribution.dist_files.append(('sdist', '', file))
self.archive_files = archive_files
if not self.keep_temp:
dir_util.remove_tree(base_dir, dry_run=self.dry_run)
def get_archive_files(self):
"""Return the list of archive files created when the command
was run, or None if the command hasn't run yet.
"""
return self.archive_files
|
cristian69/KernotekV3 | refs/heads/master | venv/lib/python2.7/site-packages/pip/_vendor/requests/packages/chardet/langthaimodel.py | 2929 | ######################## BEGIN LICENSE BLOCK ########################
# The Original Code is Mozilla Communicator client code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 1998
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
# 255: Control characters that usually does not exist in any text
# 254: Carriage/Return
# 253: symbol (punctuation) that does not belong to word
# 252: 0 - 9
# The following result for thai was collected from a limited sample (1M).
# Character Mapping Table:
TIS620CharToOrderMap = (
255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00
255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10
253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20
252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30
253,182,106,107,100,183,184,185,101, 94,186,187,108,109,110,111, # 40
188,189,190, 89, 95,112,113,191,192,193,194,253,253,253,253,253, # 50
253, 64, 72, 73,114, 74,115,116,102, 81,201,117, 90,103, 78, 82, # 60
96,202, 91, 79, 84,104,105, 97, 98, 92,203,253,253,253,253,253, # 70
209,210,211,212,213, 88,214,215,216,217,218,219,220,118,221,222,
223,224, 99, 85, 83,225,226,227,228,229,230,231,232,233,234,235,
236, 5, 30,237, 24,238, 75, 8, 26, 52, 34, 51,119, 47, 58, 57,
49, 53, 55, 43, 20, 19, 44, 14, 48, 3, 17, 25, 39, 62, 31, 54,
45, 9, 16, 2, 61, 15,239, 12, 42, 46, 18, 21, 76, 4, 66, 63,
22, 10, 1, 36, 23, 13, 40, 27, 32, 35, 86,240,241,242,243,244,
11, 28, 41, 29, 33,245, 50, 37, 6, 7, 67, 77, 38, 93,246,247,
68, 56, 59, 65, 69, 60, 70, 80, 71, 87,248,249,250,251,252,253,
)
# Model Table:
# total sequences: 100%
# first 512 sequences: 92.6386%
# first 1024 sequences:7.3177%
# rest sequences: 1.0230%
# negative sequences: 0.0436%
ThaiLangModel = (
0,1,3,3,3,3,0,0,3,3,0,3,3,0,3,3,3,3,3,3,3,3,0,0,3,3,3,0,3,3,3,3,
0,3,3,0,0,0,1,3,0,3,3,2,3,3,0,1,2,3,3,3,3,0,2,0,2,0,0,3,2,1,2,2,
3,0,3,3,2,3,0,0,3,3,0,3,3,0,3,3,3,3,3,3,3,3,3,0,3,2,3,0,2,2,2,3,
0,2,3,0,0,0,0,1,0,1,2,3,1,1,3,2,2,0,1,1,0,0,1,0,0,0,0,0,0,0,1,1,
3,3,3,2,3,3,3,3,3,3,3,3,3,3,3,2,2,2,2,2,2,2,3,3,2,3,2,3,3,2,2,2,
3,1,2,3,0,3,3,2,2,1,2,3,3,1,2,0,1,3,0,1,0,0,1,0,0,0,0,0,0,0,1,1,
3,3,2,2,3,3,3,3,1,2,3,3,3,3,3,2,2,2,2,3,3,2,2,3,3,2,2,3,2,3,2,2,
3,3,1,2,3,1,2,2,3,3,1,0,2,1,0,0,3,1,2,1,0,0,1,0,0,0,0,0,0,1,0,1,
3,3,3,3,3,3,2,2,3,3,3,3,2,3,2,2,3,3,2,2,3,2,2,2,2,1,1,3,1,2,1,1,
3,2,1,0,2,1,0,1,0,1,1,0,1,1,0,0,1,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,
3,3,3,2,3,2,3,3,2,2,3,2,3,3,2,3,1,1,2,3,2,2,2,3,2,2,2,2,2,1,2,1,
2,2,1,1,3,3,2,1,0,1,2,2,0,1,3,0,0,0,1,1,0,0,0,0,0,2,3,0,0,2,1,1,
3,3,2,3,3,2,0,0,3,3,0,3,3,0,2,2,3,1,2,2,1,1,1,0,2,2,2,0,2,2,1,1,
0,2,1,0,2,0,0,2,0,1,0,0,1,0,0,0,1,1,1,1,0,0,0,0,0,0,0,0,0,0,1,0,
3,3,2,3,3,2,0,0,3,3,0,2,3,0,2,1,2,2,2,2,1,2,0,0,2,2,2,0,2,2,1,1,
0,2,1,0,2,0,0,2,0,1,1,0,1,0,0,0,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,
3,3,2,3,2,3,2,0,2,2,1,3,2,1,3,2,1,2,3,2,2,3,0,2,3,2,2,1,2,2,2,2,
1,2,2,0,0,0,0,2,0,1,2,0,1,1,1,0,1,0,3,1,1,0,0,0,0,0,0,0,0,0,1,0,
3,3,2,3,3,2,3,2,2,2,3,2,2,3,2,2,1,2,3,2,2,3,1,3,2,2,2,3,2,2,2,3,
3,2,1,3,0,1,1,1,0,2,1,1,1,1,1,0,1,0,1,1,0,0,0,0,0,0,0,0,0,2,0,0,
1,0,0,3,0,3,3,3,3,3,0,0,3,0,2,2,3,3,3,3,3,0,0,0,1,1,3,0,0,0,0,2,
0,0,1,0,0,0,0,0,0,0,2,3,0,0,0,3,0,2,0,0,0,0,0,3,0,0,0,0,0,0,0,0,
2,0,3,3,3,3,0,0,2,3,0,0,3,0,3,3,2,3,3,3,3,3,0,0,3,3,3,0,0,0,3,3,
0,0,3,0,0,0,0,2,0,0,2,1,1,3,0,0,1,0,0,2,3,0,1,0,0,0,0,0,0,0,1,0,
3,3,3,3,2,3,3,3,3,3,3,3,1,2,1,3,3,2,2,1,2,2,2,3,1,1,2,0,2,1,2,1,
2,2,1,0,0,0,1,1,0,1,0,1,1,0,0,0,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,
3,0,2,1,2,3,3,3,0,2,0,2,2,0,2,1,3,2,2,1,2,1,0,0,2,2,1,0,2,1,2,2,
0,1,1,0,0,0,0,1,0,1,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,3,3,3,2,1,3,3,1,1,3,0,2,3,1,1,3,2,1,1,2,0,2,2,3,2,1,1,1,1,1,2,
3,0,0,1,3,1,2,1,2,0,3,0,0,0,1,0,3,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,
3,3,1,1,3,2,3,3,3,1,3,2,1,3,2,1,3,2,2,2,2,1,3,3,1,2,1,3,1,2,3,0,
2,1,1,3,2,2,2,1,2,1,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,
3,3,2,3,2,3,3,2,3,2,3,2,3,3,2,1,0,3,2,2,2,1,2,2,2,1,2,2,1,2,1,1,
2,2,2,3,0,1,3,1,1,1,1,0,1,1,0,2,1,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,3,3,3,2,3,2,2,1,1,3,2,3,2,3,2,0,3,2,2,1,2,0,2,2,2,1,2,2,2,2,1,
3,2,1,2,2,1,0,2,0,1,0,0,1,1,0,0,0,0,0,1,1,0,1,0,0,0,0,0,0,0,0,1,
3,3,3,3,3,2,3,1,2,3,3,2,2,3,0,1,1,2,0,3,3,2,2,3,0,1,1,3,0,0,0,0,
3,1,0,3,3,0,2,0,2,1,0,0,3,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,3,3,2,3,2,3,3,0,1,3,1,1,2,1,2,1,1,3,1,1,0,2,3,1,1,1,1,1,1,1,1,
3,1,1,2,2,2,2,1,1,1,0,0,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,
3,2,2,1,1,2,1,3,3,2,3,2,2,3,2,2,3,1,2,2,1,2,0,3,2,1,2,2,2,2,2,1,
3,2,1,2,2,2,1,1,1,1,0,0,1,1,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,3,3,3,3,3,3,3,1,3,3,0,2,1,0,3,2,0,0,3,1,0,1,1,0,1,0,0,0,0,0,1,
1,0,0,1,0,3,2,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,0,2,2,2,3,0,0,1,3,0,3,2,0,3,2,2,3,3,3,3,3,1,0,2,2,2,0,2,2,1,2,
0,2,3,0,0,0,0,1,0,1,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,
3,0,2,3,1,3,3,2,3,3,0,3,3,0,3,2,2,3,2,3,3,3,0,0,2,2,3,0,1,1,1,3,
0,0,3,0,0,0,2,2,0,1,3,0,1,2,2,2,3,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,
3,2,3,3,2,0,3,3,2,2,3,1,3,2,1,3,2,0,1,2,2,0,2,3,2,1,0,3,0,0,0,0,
3,0,0,2,3,1,3,0,0,3,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,1,3,2,2,2,1,2,0,1,3,1,1,3,1,3,0,0,2,1,1,1,1,2,1,1,1,0,2,1,0,1,
1,2,0,0,0,3,1,1,0,0,0,0,1,0,1,0,0,1,0,1,0,0,0,0,0,3,1,0,0,0,1,0,
3,3,3,3,2,2,2,2,2,1,3,1,1,1,2,0,1,1,2,1,2,1,3,2,0,0,3,1,1,1,1,1,
3,1,0,2,3,0,0,0,3,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,2,3,0,3,3,0,2,0,0,0,0,0,0,0,3,0,0,1,0,0,0,0,0,0,0,0,0,0,0,
0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,2,3,1,3,0,0,1,2,0,0,2,0,3,3,2,3,3,3,2,3,0,0,2,2,2,0,0,0,2,2,
0,0,1,0,0,0,0,3,0,0,0,0,2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,
0,0,0,3,0,2,0,0,0,0,0,0,0,0,0,0,1,2,3,1,3,3,0,0,1,0,3,0,0,0,0,0,
0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,3,1,2,3,1,2,3,1,0,3,0,2,2,1,0,2,1,1,2,0,1,0,0,1,1,1,1,0,1,0,0,
1,0,0,0,0,1,1,0,3,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,3,3,3,2,1,0,1,1,1,3,1,2,2,2,2,2,2,1,1,1,1,0,3,1,0,1,3,1,1,1,1,
1,1,0,2,0,1,3,1,1,0,0,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,2,0,1,
3,0,2,2,1,3,3,2,3,3,0,1,1,0,2,2,1,2,1,3,3,1,0,0,3,2,0,0,0,0,2,1,
0,1,0,0,0,0,1,2,0,1,1,3,1,1,2,2,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,
0,0,3,0,0,1,0,0,0,3,0,0,3,0,3,1,0,1,1,1,3,2,0,0,0,3,0,0,0,0,2,0,
0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,2,0,0,0,0,0,0,0,0,0,
3,3,1,3,2,1,3,3,1,2,2,0,1,2,1,0,1,2,0,0,0,0,0,3,0,0,0,3,0,0,0,0,
3,0,0,1,1,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,0,1,2,0,3,3,3,2,2,0,1,1,0,1,3,0,0,0,2,2,0,0,0,0,3,1,0,1,0,0,0,
0,0,0,0,0,0,0,0,0,1,0,1,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,0,2,3,1,2,0,0,2,1,0,3,1,0,1,2,0,1,1,1,1,3,0,0,3,1,1,0,2,2,1,1,
0,2,0,0,0,0,0,1,0,1,0,0,1,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,0,0,3,1,2,0,0,2,2,0,1,2,0,1,0,1,3,1,2,1,0,0,0,2,0,3,0,0,0,1,0,
0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,0,1,1,2,2,0,0,0,2,0,2,1,0,1,1,0,1,1,1,2,1,0,0,1,1,1,0,2,1,1,1,
0,1,1,0,0,0,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0,1,
0,0,0,2,0,1,3,1,1,1,1,0,0,0,0,3,2,0,1,0,0,0,1,2,0,0,0,1,0,0,0,0,
0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,3,3,3,3,1,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
1,0,2,3,2,2,0,0,0,1,0,0,0,0,2,3,2,1,2,2,3,0,0,0,2,3,1,0,0,0,1,1,
0,0,1,0,0,0,0,0,0,0,1,0,0,1,0,0,0,0,0,1,1,0,1,0,0,0,0,0,0,0,0,0,
3,3,2,2,0,1,0,0,0,0,2,0,2,0,1,0,0,0,1,1,0,0,0,2,1,0,1,0,1,1,0,0,
0,1,0,2,0,0,1,0,3,0,1,0,0,0,2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,3,1,0,0,1,0,0,0,0,0,1,1,2,0,0,0,0,1,0,0,1,3,1,0,0,0,0,1,1,0,0,
0,1,0,0,0,0,3,0,0,0,0,0,0,3,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,
3,3,1,1,1,1,2,3,0,0,2,1,1,1,1,1,0,2,1,1,0,0,0,2,1,0,1,2,1,1,0,1,
2,1,0,3,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
1,3,1,0,0,0,0,0,0,0,3,0,0,0,3,0,0,0,0,0,0,0,0,1,1,0,0,0,0,0,0,1,
0,0,0,2,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,3,2,0,0,0,0,0,0,1,2,1,0,1,1,0,2,0,0,1,0,0,2,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,2,0,0,0,1,3,0,1,0,0,0,2,0,0,0,0,0,0,0,1,2,0,0,0,0,0,
3,3,0,0,1,1,2,0,0,1,2,1,0,1,1,1,0,1,1,0,0,2,1,1,0,1,0,0,1,1,1,0,
0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,3,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,
2,2,2,1,0,0,0,0,1,0,0,0,0,3,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,0,
2,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
2,3,0,0,1,1,0,0,0,2,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
1,1,0,1,2,0,1,2,0,0,1,1,0,2,0,1,0,0,1,0,0,0,0,1,0,0,0,2,0,0,0,0,
1,0,0,1,0,1,1,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,1,0,0,0,0,0,0,0,1,1,0,1,1,0,2,1,3,0,0,0,0,1,1,0,0,0,0,0,0,0,3,
1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
2,0,1,0,1,0,0,2,0,0,2,0,0,1,1,2,0,0,1,1,0,0,0,1,0,0,0,1,1,0,0,0,
1,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,
1,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,1,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,1,1,0,0,0,
2,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
2,0,0,0,0,2,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,1,0,1,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,1,3,0,0,0,
2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,1,0,0,0,0,
1,0,0,0,0,0,0,0,0,1,0,0,0,0,2,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,1,1,0,0,2,1,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
)
TIS620ThaiModel = {
'charToOrderMap': TIS620CharToOrderMap,
'precedenceMatrix': ThaiLangModel,
'mTypicalPositiveRatio': 0.926386,
'keepEnglishLetter': False,
'charsetName': "TIS-620"
}
# flake8: noqa
|
grangier/django-11599 | refs/heads/master | django/contrib/localflavor/us/__init__.py | 12133432 | |
drxos/python-social-auth | refs/heads/master | social/apps/cherrypy_app/__init__.py | 12133432 | |
inonit/wagtail | refs/heads/master | wagtail/contrib/__init__.py | 12133432 | |
zhenzhai/edx-platform | refs/heads/master | openedx/core/djangoapps/user_api/__init__.py | 12133432 | |
opencache-project/opencache-controller | refs/heads/master | opencache/controller/request/__init__.py | 12133432 | |
bprodoehl/phantomjs | refs/heads/master | src/qt/qtwebkit/Tools/Scripts/webkitpy/common/system/workspace_mock.py | 191 | # Copyright (C) 2011 Google Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
class MockWorkspace(object):
def find_unused_filename(self, directory, name, extension, search_limit=10):
return "%s/%s.%s" % (directory, name, extension)
def create_zip(self, zip_path, source_path):
self.zip_path = zip_path
self.source_path = source_path
return object() # Something that is not None
|
chiefy/kubernetes | refs/heads/master | examples/cluster-dns/images/frontend/client.py | 468 | #!/usr/bin/env python
# Copyright 2015 The Kubernetes Authors All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import argparse
import requests
import socket
from urlparse import urlparse
def CheckServiceAddress(address):
hostname = urlparse(address).hostname
service_address = socket.gethostbyname(hostname)
print service_address
def GetServerResponse(address):
print 'Send request to:', address
response = requests.get(address)
print response
print response.content
def Main():
parser = argparse.ArgumentParser()
parser.add_argument('address')
args = parser.parse_args()
CheckServiceAddress(args.address)
GetServerResponse(args.address)
if __name__ == "__main__":
Main()
|
srm912/servo | refs/heads/master | tests/wpt/web-platform-tests/websockets/handlers/empty-message_wsh.py | 284 | #!/usr/bin/python
from mod_pywebsocket import msgutil
def web_socket_do_extra_handshake(request):
pass # Always accept.
def web_socket_transfer_data(request):
line = msgutil.receive_message(request)
if line == "":
msgutil.send_message(request, 'pass')
else:
msgutil.send_message(request, 'fail')
|
gfreed/android_external_chromium-org | refs/heads/android-4.4 | native_client_sdk/src/build_tools/tests/__init__.py | 171 | # Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""tests package."""
|
meletakis/collato | refs/heads/master | lib/python2.7/site-packages/PIL/IptcImagePlugin.py | 42 | #
# The Python Imaging Library.
# $Id$
#
# IPTC/NAA file handling
#
# history:
# 1995-10-01 fl Created
# 1998-03-09 fl Cleaned up and added to PIL
# 2002-06-18 fl Added getiptcinfo helper
#
# Copyright (c) Secret Labs AB 1997-2002.
# Copyright (c) Fredrik Lundh 1995.
#
# See the README file for information on usage and redistribution.
#
__version__ = "0.3"
import Image, ImageFile
import os, tempfile
COMPRESSION = {
1: "raw",
5: "jpeg"
}
PAD = chr(0) * 4
#
# Helpers
def i16(c):
return ord(c[1]) + (ord(c[0])<<8)
def i32(c):
return ord(c[3]) + (ord(c[2])<<8) + (ord(c[1])<<16) + (ord(c[0])<<24)
def i(c):
return i32((PAD + c)[-4:])
def dump(c):
for i in c:
print "%02x" % ord(i),
print
##
# Image plugin for IPTC/NAA datastreams. To read IPTC/NAA fields
# from TIFF and JPEG files, use the <b>getiptcinfo</b> function.
class IptcImageFile(ImageFile.ImageFile):
format = "IPTC"
format_description = "IPTC/NAA"
def getint(self, key):
return i(self.info[key])
def field(self):
#
# get a IPTC field header
s = self.fp.read(5)
if not len(s):
return None, 0
tag = ord(s[1]), ord(s[2])
# syntax
if ord(s[0]) != 0x1C or tag[0] < 1 or tag[0] > 9:
raise SyntaxError, "invalid IPTC/NAA file"
# field size
size = ord(s[3])
if size > 132:
raise IOError, "illegal field length in IPTC/NAA file"
elif size == 128:
size = 0
elif size > 128:
size = i(self.fp.read(size-128))
else:
size = i16(s[3:])
return tag, size
def _is_raw(self, offset, size):
#
# check if the file can be mapped
# DISABLED: the following only slows things down...
return 0
self.fp.seek(offset)
t, sz = self.field()
if sz != size[0]:
return 0
y = 1
while 1:
self.fp.seek(sz, 1)
t, s = self.field()
if t != (8, 10):
break
if s != sz:
return 0
y = y + 1
return y == size[1]
def _open(self):
# load descriptive fields
while 1:
offset = self.fp.tell()
tag, size = self.field()
if not tag or tag == (8,10):
break
if size:
tagdata = self.fp.read(size)
else:
tagdata = None
if tag in self.info.keys():
if isinstance(self.info[tag], list):
self.info[tag].append(tagdata)
else:
self.info[tag] = [self.info[tag], tagdata]
else:
self.info[tag] = tagdata
# print tag, self.info[tag]
# mode
layers = ord(self.info[(3,60)][0])
component = ord(self.info[(3,60)][1])
if self.info.has_key((3,65)):
id = ord(self.info[(3,65)][0])-1
else:
id = 0
if layers == 1 and not component:
self.mode = "L"
elif layers == 3 and component:
self.mode = "RGB"[id]
elif layers == 4 and component:
self.mode = "CMYK"[id]
# size
self.size = self.getint((3,20)), self.getint((3,30))
# compression
try:
compression = COMPRESSION[self.getint((3,120))]
except KeyError:
raise IOError, "Unknown IPTC image compression"
# tile
if tag == (8,10):
if compression == "raw" and self._is_raw(offset, self.size):
self.tile = [(compression, (offset, size + 5, -1),
(0, 0, self.size[0], self.size[1]))]
else:
self.tile = [("iptc", (compression, offset),
(0, 0, self.size[0], self.size[1]))]
def load(self):
if len(self.tile) != 1 or self.tile[0][0] != "iptc":
return ImageFile.ImageFile.load(self)
type, tile, box = self.tile[0]
encoding, offset = tile
self.fp.seek(offset)
# Copy image data to temporary file
outfile = tempfile.mktemp()
o = open(outfile, "wb")
if encoding == "raw":
# To simplify access to the extracted file,
# prepend a PPM header
o.write("P5\n%d %d\n255\n" % self.size)
while 1:
type, size = self.field()
if type != (8, 10):
break
while size > 0:
s = self.fp.read(min(size, 8192))
if not s:
break
o.write(s)
size = size - len(s)
o.close()
try:
try:
# fast
self.im = Image.core.open_ppm(outfile)
except:
# slightly slower
im = Image.open(outfile)
im.load()
self.im = im.im
finally:
try: os.unlink(outfile)
except: pass
Image.register_open("IPTC", IptcImageFile)
Image.register_extension("IPTC", ".iim")
##
# Get IPTC information from TIFF, JPEG, or IPTC file.
#
# @param im An image containing IPTC data.
# @return A dictionary containing IPTC information, or None if
# no IPTC information block was found.
def getiptcinfo(im):
import TiffImagePlugin, JpegImagePlugin
import StringIO
data = None
if isinstance(im, IptcImageFile):
# return info dictionary right away
return im.info
elif isinstance(im, JpegImagePlugin.JpegImageFile):
# extract the IPTC/NAA resource
try:
app = im.app["APP13"]
if app[:14] == "Photoshop 3.0\x00":
app = app[14:]
# parse the image resource block
offset = 0
while app[offset:offset+4] == "8BIM":
offset = offset + 4
# resource code
code = JpegImagePlugin.i16(app, offset)
offset = offset + 2
# resource name (usually empty)
name_len = ord(app[offset])
name = app[offset+1:offset+1+name_len]
offset = 1 + offset + name_len
if offset & 1:
offset = offset + 1
# resource data block
size = JpegImagePlugin.i32(app, offset)
offset = offset + 4
if code == 0x0404:
# 0x0404 contains IPTC/NAA data
data = app[offset:offset+size]
break
offset = offset + size
if offset & 1:
offset = offset + 1
except (AttributeError, KeyError):
pass
elif isinstance(im, TiffImagePlugin.TiffImageFile):
# get raw data from the IPTC/NAA tag (PhotoShop tags the data
# as 4-byte integers, so we cannot use the get method...)
try:
type, data = im.tag.tagdata[TiffImagePlugin.IPTC_NAA_CHUNK]
except (AttributeError, KeyError):
pass
if data is None:
return None # no properties
# create an IptcImagePlugin object without initializing it
class FakeImage:
pass
im = FakeImage()
im.__class__ = IptcImageFile
# parse the IPTC information chunk
im.info = {}
im.fp = StringIO.StringIO(data)
try:
im._open()
except (IndexError, KeyError):
pass # expected failure
return im.info
|
EnTeQuAk/dotfiles | refs/heads/master | sublime-text-3/Packages/isort/natsort/py23compat.py | 1 | # -*- coding: utf-8 -*-
from __future__ import print_function, division, unicode_literals, absolute_import
import functools
import sys
# These functions are used to make the doctests compatible between
# python2 and python3. This code is pretty much lifted from the iPython
# project's py3compat.py file. Credit to the iPython devs.
# Assume all strings are Unicode in Python 2
py23_str = str if sys.version[0] == '3' else unicode
# Use the range iterator always
py23_range = range if sys.version[0] == '3' else xrange
# Uniform base string type
py23_basestring = str if sys.version[0] == '3' else basestring
# zip as an iterator
if sys.version[0] == '3':
py23_zip = zip
else:
import itertools
py23_zip = itertools.izip
# This function is intended to decorate other functions that will modify
# either a string directly, or a function's docstring.
def _modify_str_or_docstring(str_change_func):
@functools.wraps(str_change_func)
def wrapper(func_or_str):
if isinstance(func_or_str, py23_basestring):
func = None
doc = func_or_str
else:
func = func_or_str
doc = func.__doc__
doc = str_change_func(doc)
if func:
func.__doc__ = doc
return func
return doc
return wrapper
# Properly modify a doctstring to either have the unicode literal or not.
if sys.version[0] == '3':
# Abstract u'abc' syntax:
@_modify_str_or_docstring
def u_format(s):
""""{u}'abc'" --> "'abc'" (Python 3)
Accepts a string or a function, so it can be used as a decorator."""
return s.format(u='')
else:
# Abstract u'abc' syntax:
@_modify_str_or_docstring
def u_format(s):
""""{u}'abc'" --> "u'abc'" (Python 2)
Accepts a string or a function, so it can be used as a decorator."""
return s.format(u='u')
|
astrofrog/glue-vispy-viewers | refs/heads/master | glue_vispy_viewers/extern/vispy/glsl/collections/__init__.py | 12133432 | |
ZhangBohan/KoalaAPI | refs/heads/master | KoalaAPI/views/__init__.py | 1 | from flask import Blueprint
from leancloud import Object
main_view = Blueprint('main', __name__)
__all__ = ['tuchuang', 'auth']
class GitHubUser(Object):
pass
File = Object.extend('Files')
def leanobject_to_dict(lo):
data = lo.attributes
data['id'] = lo.id
data['created_at'] = lo.created_at
data['updated_at'] = lo.updated_at
return data
from . import *
|
atarun/web | refs/heads/master | public/dev/Cocos2dJS/HelloWorld/tools/bindings-generator/generator.py | 17 | #!/usr/bin/env python
# generator.py
# simple C++ generator, originally targetted for Spidermonkey bindings
#
# Copyright (c) 2011 - Zynga Inc.
from clang import cindex
import sys
import pdb
import ConfigParser
import yaml
import re
import os
import inspect
import traceback
from Cheetah.Template import Template
type_map = {
cindex.TypeKind.VOID : "void",
cindex.TypeKind.BOOL : "bool",
cindex.TypeKind.CHAR_U : "unsigned char",
cindex.TypeKind.UCHAR : "unsigned char",
cindex.TypeKind.CHAR16 : "char",
cindex.TypeKind.CHAR32 : "char",
cindex.TypeKind.USHORT : "unsigned short",
cindex.TypeKind.UINT : "unsigned int",
cindex.TypeKind.ULONG : "unsigned long",
cindex.TypeKind.ULONGLONG : "unsigned long long",
cindex.TypeKind.CHAR_S : "char",
cindex.TypeKind.SCHAR : "char",
cindex.TypeKind.WCHAR : "wchar_t",
cindex.TypeKind.SHORT : "short",
cindex.TypeKind.INT : "int",
cindex.TypeKind.LONG : "long",
cindex.TypeKind.LONGLONG : "long long",
cindex.TypeKind.FLOAT : "float",
cindex.TypeKind.DOUBLE : "double",
cindex.TypeKind.LONGDOUBLE : "long double",
cindex.TypeKind.NULLPTR : "NULL",
cindex.TypeKind.OBJCID : "id",
cindex.TypeKind.OBJCCLASS : "class",
cindex.TypeKind.OBJCSEL : "SEL",
# cindex.TypeKind.ENUM : "int"
}
INVALID_NATIVE_TYPE = "??"
default_arg_type_arr = [
# An integer literal.
cindex.CursorKind.INTEGER_LITERAL,
# A floating point number literal.
cindex.CursorKind.FLOATING_LITERAL,
# An imaginary number literal.
cindex.CursorKind.IMAGINARY_LITERAL,
# A string literal.
cindex.CursorKind.STRING_LITERAL,
# A character literal.
cindex.CursorKind.CHARACTER_LITERAL,
# [C++ 2.13.5] C++ Boolean Literal.
cindex.CursorKind.CXX_BOOL_LITERAL_EXPR,
# [C++0x 2.14.7] C++ Pointer Literal.
cindex.CursorKind.CXX_NULL_PTR_LITERAL_EXPR,
# An expression that refers to some value declaration, such as a function,
# varible, or enumerator.
cindex.CursorKind.DECL_REF_EXPR
]
def native_name_from_type(ntype, underlying=False):
kind = ntype.kind #get_canonical().kind
const = "" #"const " if ntype.is_const_qualified() else ""
if not underlying and kind == cindex.TypeKind.ENUM:
decl = ntype.get_declaration()
return get_namespaced_name(decl)
elif kind in type_map:
return const + type_map[kind]
elif kind == cindex.TypeKind.RECORD:
# might be an std::string
decl = ntype.get_declaration()
parent = decl.semantic_parent
cdecl = ntype.get_canonical().get_declaration()
cparent = cdecl.semantic_parent
if decl.spelling == "string" and parent and parent.spelling == "std":
return "std::string"
elif cdecl.spelling == "function" and cparent and cparent.spelling == "std":
return "std::function"
else:
# print >> sys.stderr, "probably a function pointer: " + str(decl.spelling)
return const + decl.spelling
else:
# name = ntype.get_declaration().spelling
# print >> sys.stderr, "Unknown type: " + str(kind) + " " + str(name)
return INVALID_NATIVE_TYPE
# pdb.set_trace()
def build_namespace(cursor, namespaces=[]):
'''
build the full namespace for a specific cursor
'''
if cursor:
parent = cursor.semantic_parent
if parent:
if parent.kind == cindex.CursorKind.NAMESPACE or parent.kind == cindex.CursorKind.CLASS_DECL:
namespaces.append(parent.displayname)
build_namespace(parent, namespaces)
return namespaces
def get_namespaced_name(declaration_cursor):
ns_list = build_namespace(declaration_cursor, [])
ns_list.reverse()
ns = "::".join(ns_list)
if len(ns) > 0:
return ns + "::" + declaration_cursor.displayname
return declaration_cursor.displayname
def generate_namespace_list(cursor, namespaces=[]):
'''
build the full namespace for a specific cursor
'''
if cursor:
parent = cursor.semantic_parent
if parent:
if parent.kind == cindex.CursorKind.NAMESPACE or parent.kind == cindex.CursorKind.CLASS_DECL:
if parent.kind == cindex.CursorKind.NAMESPACE:
namespaces.append(parent.displayname)
generate_namespace_list(parent, namespaces)
return namespaces
def get_namespace_name(declaration_cursor):
ns_list = generate_namespace_list(declaration_cursor, [])
ns_list.reverse()
ns = "::".join(ns_list)
if len(ns) > 0:
return ns + "::"
return declaration_cursor.displayname
class NativeType(object):
def __init__(self):
self.is_object = False
self.is_function = False
self.is_enum = False
self.not_supported = False
self.param_types = []
self.ret_type = None
self.namespaced_name = ""
self.namespace_name = ""
self.name = ""
self.whole_name = None
self.is_const = False
self.is_pointer = False
self.canonical_type = None
@staticmethod
def from_type(ntype):
if ntype.kind == cindex.TypeKind.POINTER:
nt = NativeType.from_type(ntype.get_pointee())
if None != nt.canonical_type:
nt.canonical_type.name += "*"
nt.canonical_type.namespaced_name += "*"
nt.canonical_type.whole_name += "*"
nt.name += "*"
nt.namespaced_name += "*"
nt.whole_name = nt.namespaced_name
nt.is_enum = False
nt.is_const = ntype.get_pointee().is_const_qualified()
nt.is_pointer = True
if nt.is_const:
nt.whole_name = "const " + nt.whole_name
elif ntype.kind == cindex.TypeKind.LVALUEREFERENCE:
nt = NativeType.from_type(ntype.get_pointee())
nt.is_const = ntype.get_pointee().is_const_qualified()
nt.whole_name = nt.namespaced_name + "&"
if nt.is_const:
nt.whole_name = "const " + nt.whole_name
if None != nt.canonical_type:
nt.canonical_type.whole_name += "&"
else:
nt = NativeType()
decl = ntype.get_declaration()
if ntype.kind == cindex.TypeKind.RECORD:
if decl.kind == cindex.CursorKind.CLASS_DECL:
nt.is_object = True
nt.name = decl.displayname
nt.namespaced_name = get_namespaced_name(decl)
nt.namespace_name = get_namespace_name(decl)
nt.whole_name = nt.namespaced_name
else:
if decl.kind == cindex.CursorKind.NO_DECL_FOUND:
nt.name = native_name_from_type(ntype)
else:
nt.name = decl.spelling
nt.namespaced_name = get_namespaced_name(decl)
nt.namespace_name = get_namespace_name(decl)
if nt.namespaced_name == "std::string":
nt.name = nt.namespaced_name
if nt.namespaced_name.startswith("std::function"):
nt.name = "std::function"
if len(nt.namespaced_name) == 0 or nt.namespaced_name.find("::") == -1:
nt.namespaced_name = nt.name
nt.whole_name = nt.namespaced_name
nt.is_const = ntype.is_const_qualified()
if nt.is_const:
nt.whole_name = "const " + nt.whole_name
# Check whether it's a std::function typedef
cdecl = ntype.get_canonical().get_declaration()
if None != cdecl.spelling and 0 == cmp(cdecl.spelling, "function"):
nt.name = "std::function"
if nt.name != INVALID_NATIVE_TYPE and nt.name != "std::string" and nt.name != "std::function":
if ntype.kind == cindex.TypeKind.UNEXPOSED or ntype.kind == cindex.TypeKind.TYPEDEF:
ret = NativeType.from_type(ntype.get_canonical())
if ret.name != "":
if decl.kind == cindex.CursorKind.TYPEDEF_DECL:
ret.canonical_type = nt
return ret
nt.is_enum = ntype.get_canonical().kind == cindex.TypeKind.ENUM
if nt.name == "std::function":
nt.namespaced_name = get_namespaced_name(cdecl)
r = re.compile('function<(.+) .*\((.*)\)>').search(cdecl.displayname)
(ret_type, params) = r.groups()
params = filter(None, params.split(", "))
nt.is_function = True
nt.ret_type = NativeType.from_string(ret_type)
nt.param_types = [NativeType.from_string(string) for string in params]
# mark argument as not supported
if nt.name == INVALID_NATIVE_TYPE:
nt.not_supported = True
return nt
@staticmethod
def from_string(displayname):
displayname = displayname.replace(" *", "*")
nt = NativeType()
nt.name = displayname.split("::")[-1]
nt.namespaced_name = displayname
nt.whole_name = nt.namespaced_name
nt.is_object = True
return nt
@property
def lambda_parameters(self):
params = ["%s larg%d" % (str(nt), i) for i, nt in enumerate(self.param_types)]
return ", ".join(params)
@staticmethod
def dict_has_key_re(dict, real_key_list):
for real_key in real_key_list:
for (k, v) in dict.items():
if k.startswith('@'):
k = k[1:]
match = re.match("^" + k + "$", real_key)
if match:
return True
else:
if k == real_key:
return True
return False
@staticmethod
def dict_get_value_re(dict, real_key_list):
for real_key in real_key_list:
for (k, v) in dict.items():
if k.startswith('@'):
k = k[1:]
match = re.match("^" + k + "$", real_key)
if match:
return v
else:
if k == real_key:
return v
return None
@staticmethod
def dict_replace_value_re(dict, real_key_list):
for real_key in real_key_list:
for (k, v) in dict.items():
if k.startswith('@'):
k = k[1:]
match = re.match('.*' + k, real_key)
if match:
return re.sub(k, v, real_key)
else:
if k == real_key:
return v
return None
def from_native(self, convert_opts):
assert(convert_opts.has_key('generator'))
generator = convert_opts['generator']
keys = []
if self.canonical_type != None:
keys.append(self.canonical_type.name)
keys.append(self.name)
from_native_dict = generator.config['conversions']['from_native']
if self.is_object:
if not NativeType.dict_has_key_re(from_native_dict, keys):
keys.append("object")
elif self.is_enum:
keys.append("int")
if NativeType.dict_has_key_re(from_native_dict, keys):
tpl = NativeType.dict_get_value_re(from_native_dict, keys)
tpl = Template(tpl, searchList=[convert_opts])
return str(tpl).rstrip()
return "#pragma warning NO CONVERSION FROM NATIVE FOR " + self.name
def to_native(self, convert_opts):
assert('generator' in convert_opts)
generator = convert_opts['generator']
keys = []
if self.canonical_type != None:
keys.append(self.canonical_type.name)
keys.append(self.name)
to_native_dict = generator.config['conversions']['to_native']
if self.is_object:
if not NativeType.dict_has_key_re(to_native_dict, keys):
keys.append("object")
elif self.is_enum:
keys.append("int")
if self.is_function:
tpl = Template(file=os.path.join(generator.target, "templates", "lambda.c"),
searchList=[convert_opts, self])
indent = convert_opts['level'] * "\t"
return str(tpl).replace("\n", "\n" + indent)
if NativeType.dict_has_key_re(to_native_dict, keys):
tpl = NativeType.dict_get_value_re(to_native_dict, keys)
tpl = Template(tpl, searchList=[convert_opts])
return str(tpl).rstrip()
return "#pragma warning NO CONVERSION TO NATIVE FOR " + self.name + "\n" + convert_opts['level'] * "\t" + "ok = false"
def to_string(self, generator):
conversions = generator.config['conversions']
if conversions.has_key('native_types'):
native_types_dict = conversions['native_types']
if NativeType.dict_has_key_re(native_types_dict, [self.namespaced_name]):
return NativeType.dict_get_value_re(native_types_dict, [self.namespaced_name])
name = self.namespaced_name
to_native_dict = generator.config['conversions']['to_native']
from_native_dict = generator.config['conversions']['from_native']
use_typedef = False
typedef_name = self.canonical_type.name if None != self.canonical_type else None
if None != typedef_name:
if NativeType.dict_has_key_re(to_native_dict, [typedef_name]) or NativeType.dict_has_key_re(from_native_dict, [typedef_name]):
use_typedef = True
if use_typedef and self.canonical_type:
name = self.canonical_type.namespaced_name
return "const " + name if (self.is_pointer and self.is_const) else name
def get_whole_name(self, generator):
conversions = generator.config['conversions']
to_native_dict = conversions['to_native']
from_native_dict = conversions['from_native']
use_typedef = False
name = self.whole_name
typedef_name = self.canonical_type.name if None != self.canonical_type else None
if None != typedef_name:
if NativeType.dict_has_key_re(to_native_dict, [typedef_name]) or NativeType.dict_has_key_re(from_native_dict, [typedef_name]):
use_typedef = True
if use_typedef and self.canonical_type:
name = self.canonical_type.whole_name
to_replace = None
if conversions.has_key('native_types'):
native_types_dict = conversions['native_types']
to_replace = NativeType.dict_replace_value_re(native_types_dict, [name])
if to_replace:
name = to_replace
return name
def __str__(self):
return self.canonical_type.whole_name if None != self.canonical_type else self.whole_name
class NativeField(object):
def __init__(self, cursor):
cursor = cursor.canonical
self.cursor = cursor
self.name = cursor.displayname
self.kind = cursor.type.kind
self.location = cursor.location
member_field_re = re.compile('m_(\w+)')
match = member_field_re.match(self.name)
if match:
self.pretty_name = match.group(1)
else:
self.pretty_name = self.name
# return True if found default argument.
def iterate_param_node(param_node, depth=1):
for node in param_node.get_children():
# print(">"*depth+" "+str(node.kind))
if node.kind in default_arg_type_arr:
return True
if iterate_param_node(node, depth + 1):
return True
return False
class NativeFunction(object):
def __init__(self, cursor):
self.cursor = cursor
self.func_name = cursor.spelling
self.signature_name = self.func_name
self.arguments = []
self.argumtntTips = []
self.static = cursor.kind == cindex.CursorKind.CXX_METHOD and cursor.is_static_method()
self.implementations = []
self.is_constructor = False
self.not_supported = False
self.is_override = False
self.ret_type = NativeType.from_type(cursor.result_type)
self.comment = self.get_comment(cursor.getRawComment())
# parse the arguments
# if self.func_name == "spriteWithFile":
# pdb.set_trace()
for arg in cursor.get_arguments():
self.argumtntTips.append(arg.spelling)
for arg in cursor.type.argument_types():
nt = NativeType.from_type(arg)
self.arguments.append(nt)
# mark the function as not supported if at least one argument is not supported
if nt.not_supported:
self.not_supported = True
found_default_arg = False
index = -1
for arg_node in self.cursor.get_children():
if arg_node.kind == cindex.CursorKind.CXX_OVERRIDE_ATTR:
self.is_override = True
if arg_node.kind == cindex.CursorKind.PARM_DECL:
index += 1
if iterate_param_node(arg_node):
found_default_arg = True
break
self.min_args = index if found_default_arg else len(self.arguments)
def get_comment(self, comment):
replaceStr = comment
if comment is None:
return ""
regular_replace_list = [
("(\s)*//!",""),
("(\s)*//",""),
("(\s)*/\*\*",""),
("(\s)*/\*",""),
("\*/",""),
("\r\n", "\n"),
("\n(\s)*\*", "\n"),
("\n(\s)*@","\n"),
("\n(\s)*","\n"),
("\n(\s)*\n", "\n"),
("^(\s)*\n",""),
("\n(\s)*$", ""),
("\n","<br>\n"),
("\n", "\n-- ")
]
for item in regular_replace_list:
replaceStr = re.sub(item[0], item[1], replaceStr)
return replaceStr
def generate_code(self, current_class=None, generator=None, is_override=False):
gen = current_class.generator if current_class else generator
config = gen.config
tpl = Template(file=os.path.join(gen.target, "templates", "function.h"),
searchList=[current_class, self])
if not is_override:
gen.head_file.write(str(tpl))
if self.static:
if config['definitions'].has_key('sfunction'):
tpl = Template(config['definitions']['sfunction'],
searchList=[current_class, self])
self.signature_name = str(tpl)
tpl = Template(file=os.path.join(gen.target, "templates", "sfunction.c"),
searchList=[current_class, self])
else:
if not self.is_constructor:
if config['definitions'].has_key('ifunction'):
tpl = Template(config['definitions']['ifunction'],
searchList=[current_class, self])
self.signature_name = str(tpl)
else:
if config['definitions'].has_key('constructor'):
tpl = Template(config['definitions']['constructor'],
searchList=[current_class, self])
self.signature_name = str(tpl)
if self.is_constructor and gen.script_type == "spidermonkey" :
tpl = Template(file=os.path.join(gen.target, "templates", "constructor.c"),
searchList=[current_class, self])
else :
tpl = Template(file=os.path.join(gen.target, "templates", "ifunction.c"),
searchList=[current_class, self])
if not is_override:
gen.impl_file.write(str(tpl))
apidoc_function_script = Template(file=os.path.join(gen.target,
"templates",
"apidoc_function.script"),
searchList=[current_class, self])
if gen.script_type == "spidermonkey":
gen.doc_file.write(str(apidoc_function_script))
else:
if gen.script_type == "lua" and current_class != None :
current_class.doc_func_file.write(str(apidoc_function_script))
class NativeOverloadedFunction(object):
def __init__(self, func_array):
self.implementations = func_array
self.func_name = func_array[0].func_name
self.signature_name = self.func_name
self.min_args = 100
self.is_constructor = False
for m in func_array:
self.min_args = min(self.min_args, m.min_args)
self.comment = self.get_comment(func_array[0].cursor.getRawComment())
def get_comment(self, comment):
replaceStr = comment
if comment is None:
return ""
regular_replace_list = [
("(\s)*//!",""),
("(\s)*//",""),
("(\s)*/\*\*",""),
("(\s)*/\*",""),
("\*/",""),
("\r\n", "\n"),
("\n(\s)*\*", "\n"),
("\n(\s)*@","\n"),
("\n(\s)*","\n"),
("\n(\s)*\n", "\n"),
("^(\s)*\n",""),
("\n(\s)*$", ""),
("\n","<br>\n"),
("\n", "\n-- ")
]
for item in regular_replace_list:
replaceStr = re.sub(item[0], item[1], replaceStr)
return replaceStr
def append(self, func):
self.min_args = min(self.min_args, func.min_args)
self.implementations.append(func)
def generate_code(self, current_class=None, is_override=False):
gen = current_class.generator
config = gen.config
static = self.implementations[0].static
tpl = Template(file=os.path.join(gen.target, "templates", "function.h"),
searchList=[current_class, self])
if not is_override:
gen.head_file.write(str(tpl))
if static:
if config['definitions'].has_key('sfunction'):
tpl = Template(config['definitions']['sfunction'],
searchList=[current_class, self])
self.signature_name = str(tpl)
tpl = Template(file=os.path.join(gen.target, "templates", "sfunction_overloaded.c"),
searchList=[current_class, self])
else:
if not self.is_constructor:
if config['definitions'].has_key('ifunction'):
tpl = Template(config['definitions']['ifunction'],
searchList=[current_class, self])
self.signature_name = str(tpl)
else:
if config['definitions'].has_key('constructor'):
tpl = Template(config['definitions']['constructor'],
searchList=[current_class, self])
self.signature_name = str(tpl)
tpl = Template(file=os.path.join(gen.target, "templates", "ifunction_overloaded.c"),
searchList=[current_class, self])
if not is_override:
gen.impl_file.write(str(tpl))
if current_class != None:
if gen.script_type == "lua":
apidoc_function_overload_script = Template(file=os.path.join(gen.target,
"templates",
"apidoc_function_overload.script"),
searchList=[current_class, self])
current_class.doc_func_file.write(str(apidoc_function_overload_script))
else:
if gen.script_type == "spidermonkey":
apidoc_function_overload_script = Template(file=os.path.join(gen.target,
"templates",
"apidoc_function_overload.script"),
searchList=[current_class, self])
gen.doc_file.write(str(apidoc_function_overload_script))
class NativeClass(object):
def __init__(self, cursor, generator):
# the cursor to the implementation
self.cursor = cursor
self.class_name = cursor.displayname
self.is_ref_class = self.class_name == "Ref"
self.namespaced_class_name = self.class_name
self.parents = []
self.fields = []
self.methods = {}
self.static_methods = {}
self.generator = generator
self.is_abstract = self.class_name in generator.abstract_classes
self._current_visibility = cindex.AccessSpecifierKind.PRIVATE
#for generate lua api doc
self.override_methods = {}
self.has_constructor = False
self.namespace_name = ""
registration_name = generator.get_class_or_rename_class(self.class_name)
if generator.remove_prefix:
self.target_class_name = re.sub('^' + generator.remove_prefix, '', registration_name)
else:
self.target_class_name = registration_name
self.namespaced_class_name = get_namespaced_name(cursor)
self.namespace_name = get_namespace_name(cursor)
self.parse()
@property
def underlined_class_name(self):
return self.namespaced_class_name.replace("::", "_")
def parse(self):
'''
parse the current cursor, getting all the necesary information
'''
self._deep_iterate(self.cursor)
def methods_clean(self):
'''
clean list of methods (without the ones that should be skipped)
'''
ret = []
for name, impl in self.methods.iteritems():
should_skip = False
if name == 'constructor':
should_skip = True
else:
if self.generator.should_skip(self.class_name, name):
should_skip = True
if not should_skip:
ret.append({"name": name, "impl": impl})
return ret
def static_methods_clean(self):
'''
clean list of static methods (without the ones that should be skipped)
'''
ret = []
for name, impl in self.static_methods.iteritems():
should_skip = self.generator.should_skip(self.class_name, name)
if not should_skip:
ret.append({"name": name, "impl": impl})
return ret
def override_methods_clean(self):
'''
clean list of override methods (without the ones that should be skipped)
'''
ret = []
for name, impl in self.override_methods.iteritems():
should_skip = self.generator.should_skip(self.class_name, name)
if not should_skip:
ret.append({"name": name, "impl": impl})
return ret
def generate_code(self):
'''
actually generate the code. it uses the current target templates/rules in order to
generate the right code
'''
if not self.is_ref_class:
self.is_ref_class = self._is_ref_class()
config = self.generator.config
prelude_h = Template(file=os.path.join(self.generator.target, "templates", "prelude.h"),
searchList=[{"current_class": self}])
prelude_c = Template(file=os.path.join(self.generator.target, "templates", "prelude.c"),
searchList=[{"current_class": self}])
apidoc_classhead_script = Template(file=os.path.join(self.generator.target,
"templates",
"apidoc_classhead.script"),
searchList=[{"current_class": self}])
if self.generator.script_type == "lua":
docfuncfilepath = os.path.join(self.generator.outdir + "/api", self.class_name + ".lua")
self.doc_func_file = open(docfuncfilepath, "w+")
apidoc_fun_head_script = Template(file=os.path.join(self.generator.target,
"templates",
"apidoc_function_head.script"),
searchList=[{"current_class": self}])
self.doc_func_file.write(str(apidoc_fun_head_script))
self.generator.head_file.write(str(prelude_h))
self.generator.impl_file.write(str(prelude_c))
self.generator.doc_file.write(str(apidoc_classhead_script))
for m in self.methods_clean():
m['impl'].generate_code(self)
for m in self.static_methods_clean():
m['impl'].generate_code(self)
if self.generator.script_type == "lua":
for m in self.override_methods_clean():
m['impl'].generate_code(self, is_override = True)
# generate register section
register = Template(file=os.path.join(self.generator.target, "templates", "register.c"),
searchList=[{"current_class": self}])
apidoc_classfoot_script = Template(file=os.path.join(self.generator.target,
"templates",
"apidoc_classfoot.script"),
searchList=[{"current_class": self}])
self.generator.impl_file.write(str(register))
self.generator.doc_file.write(str(apidoc_classfoot_script))
if self.generator.script_type == "lua":
apidoc_fun_foot_script = Template(file=os.path.join(self.generator.target,
"templates",
"apidoc_function_foot.script"),
searchList=[{"current_class": self}])
self.doc_func_file.write(str(apidoc_fun_foot_script))
self.doc_func_file.close()
def _deep_iterate(self, cursor=None, depth=0):
for node in cursor.get_children():
# print("%s%s - %s" % ("> " * depth, node.displayname, node.kind))
if self._process_node(node):
self._deep_iterate(node, depth + 1)
@staticmethod
def _is_method_in_parents(current_class, method_name):
if len(current_class.parents) > 0:
if method_name in current_class.parents[0].methods:
return True
return NativeClass._is_method_in_parents(current_class.parents[0], method_name)
return False
def _is_ref_class(self, depth = 0):
"""
Mark the class as 'cocos2d::Ref' or its subclass.
"""
# print ">" * (depth + 1) + " " + self.class_name
if len(self.parents) > 0:
return self.parents[0]._is_ref_class(depth + 1)
if self.is_ref_class:
return True
return False
def _process_node(self, cursor):
'''
process the node, depending on the type. If returns true, then it will perform a deep
iteration on its children. Otherwise it will continue with its siblings (if any)
@param: cursor the cursor to analyze
'''
if cursor.kind == cindex.CursorKind.CXX_BASE_SPECIFIER:
parent = cursor.get_definition()
parent_name = parent.displayname
if not self.class_name in self.generator.classes_have_no_parents:
if parent_name and parent_name not in self.generator.base_classes_to_skip:
#if parent and self.generator.in_listed_classes(parent.displayname):
if not self.generator.generated_classes.has_key(parent.displayname):
parent = NativeClass(parent, self.generator)
self.generator.generated_classes[parent.class_name] = parent
else:
parent = self.generator.generated_classes[parent.displayname]
self.parents.append(parent)
if parent_name == "Ref":
self.is_ref_class = True
elif cursor.kind == cindex.CursorKind.FIELD_DECL:
self.fields.append(NativeField(cursor))
elif cursor.kind == cindex.CursorKind.CXX_ACCESS_SPEC_DECL:
self._current_visibility = cursor.get_access_specifier()
elif cursor.kind == cindex.CursorKind.CXX_METHOD and cursor.get_availability() != cindex.AvailabilityKind.DEPRECATED:
# skip if variadic
if self._current_visibility == cindex.AccessSpecifierKind.PUBLIC and not cursor.type.is_function_variadic():
m = NativeFunction(cursor)
registration_name = self.generator.should_rename_function(self.class_name, m.func_name) or m.func_name
# bail if the function is not supported (at least one arg not supported)
if m.not_supported:
return False
if m.is_override:
if NativeClass._is_method_in_parents(self, registration_name):
if self.generator.script_type == "lua":
if not self.override_methods.has_key(registration_name):
self.override_methods[registration_name] = m
else:
previous_m = self.override_methods[registration_name]
if isinstance(previous_m, NativeOverloadedFunction):
previous_m.append(m)
else:
self.override_methods[registration_name] = NativeOverloadedFunction([m, previous_m])
return False
if m.static:
if not self.static_methods.has_key(registration_name):
self.static_methods[registration_name] = m
else:
previous_m = self.static_methods[registration_name]
if isinstance(previous_m, NativeOverloadedFunction):
previous_m.append(m)
else:
self.static_methods[registration_name] = NativeOverloadedFunction([m, previous_m])
else:
if not self.methods.has_key(registration_name):
self.methods[registration_name] = m
else:
previous_m = self.methods[registration_name]
if isinstance(previous_m, NativeOverloadedFunction):
previous_m.append(m)
else:
self.methods[registration_name] = NativeOverloadedFunction([m, previous_m])
return True
elif self._current_visibility == cindex.AccessSpecifierKind.PUBLIC and cursor.kind == cindex.CursorKind.CONSTRUCTOR and not self.is_abstract:
# Skip copy constructor
if cursor.displayname == self.class_name + "(const " + self.namespaced_class_name + " &)":
# print "Skip copy constructor: " + cursor.displayname
return True
m = NativeFunction(cursor)
m.is_constructor = True
self.has_constructor = True
if not self.methods.has_key('constructor'):
self.methods['constructor'] = m
else:
previous_m = self.methods['constructor']
if isinstance(previous_m, NativeOverloadedFunction):
previous_m.append(m)
else:
m = NativeOverloadedFunction([m, previous_m])
m.is_constructor = True
self.methods['constructor'] = m
return True
# else:
# print >> sys.stderr, "unknown cursor: %s - %s" % (cursor.kind, cursor.displayname)
return False
class Generator(object):
def __init__(self, opts):
self.index = cindex.Index.create()
self.outdir = opts['outdir']
self.prefix = opts['prefix']
self.headers = opts['headers'].split(' ')
self.classes = opts['classes']
self.classes_need_extend = opts['classes_need_extend']
self.classes_have_no_parents = opts['classes_have_no_parents'].split(' ')
self.base_classes_to_skip = opts['base_classes_to_skip'].split(' ')
self.abstract_classes = opts['abstract_classes'].split(' ')
self.clang_args = opts['clang_args']
self.target = opts['target']
self.remove_prefix = opts['remove_prefix']
self.target_ns = opts['target_ns']
self.cpp_ns = opts['cpp_ns']
self.impl_file = None
self.head_file = None
self.skip_classes = {}
self.generated_classes = {}
self.rename_functions = {}
self.rename_classes = {}
self.out_file = opts['out_file']
self.script_control_cpp = opts['script_control_cpp'] == "yes"
self.script_type = opts['script_type']
self.macro_judgement = opts['macro_judgement']
extend_clang_args = []
for clang_arg in self.clang_args:
if not os.path.exists(clang_arg.replace("-I","")):
pos = clang_arg.find("lib/clang/3.3/include")
if -1 != pos:
extend_clang_arg = clang_arg.replace("3.3", "3.4")
if os.path.exists(extend_clang_arg.replace("-I","")):
extend_clang_args.append(extend_clang_arg)
if len(extend_clang_args) > 0:
self.clang_args.extend(extend_clang_args)
if opts['skip']:
list_of_skips = re.split(",\n?", opts['skip'])
for skip in list_of_skips:
class_name, methods = skip.split("::")
self.skip_classes[class_name] = []
match = re.match("\[([^]]+)\]", methods)
if match:
self.skip_classes[class_name] = match.group(1).split(" ")
else:
raise Exception("invalid list of skip methods")
if opts['rename_functions']:
list_of_function_renames = re.split(",\n?", opts['rename_functions'])
for rename in list_of_function_renames:
class_name, methods = rename.split("::")
self.rename_functions[class_name] = {}
match = re.match("\[([^]]+)\]", methods)
if match:
list_of_methods = match.group(1).split(" ")
for pair in list_of_methods:
k, v = pair.split("=")
self.rename_functions[class_name][k] = v
else:
raise Exception("invalid list of rename methods")
if opts['rename_classes']:
list_of_class_renames = re.split(",\n?", opts['rename_classes'])
for rename in list_of_class_renames:
class_name, renamed_class_name = rename.split("::")
self.rename_classes[class_name] = renamed_class_name
def should_rename_function(self, class_name, method_name):
if self.rename_functions.has_key(class_name) and self.rename_functions[class_name].has_key(method_name):
# print >> sys.stderr, "will rename %s to %s" % (method_name, self.rename_functions[class_name][method_name])
return self.rename_functions[class_name][method_name]
return None
def get_class_or_rename_class(self, class_name):
if self.rename_classes.has_key(class_name):
# print >> sys.stderr, "will rename %s to %s" % (method_name, self.rename_functions[class_name][method_name])
return self.rename_classes[class_name]
return class_name
def should_skip(self, class_name, method_name, verbose=False):
if class_name == "*" and self.skip_classes.has_key("*"):
for func in self.skip_classes["*"]:
if re.match(func, method_name):
return True
else:
for key in self.skip_classes.iterkeys():
if key == "*" or re.match("^" + key + "$", class_name):
if verbose:
print "%s in skip_classes" % (class_name)
if len(self.skip_classes[key]) == 1 and self.skip_classes[key][0] == "*":
if verbose:
print "%s will be skipped completely" % (class_name)
return True
if method_name != None:
for func in self.skip_classes[key]:
if re.match(func, method_name):
if verbose:
print "%s will skip method %s" % (class_name, method_name)
return True
if verbose:
print "%s will be accepted (%s, %s)" % (class_name, key, self.skip_classes[key])
return False
def in_listed_classes(self, class_name):
"""
returns True if the class is in the list of required classes and it's not in the skip list
"""
for key in self.classes:
md = re.match("^" + key + "$", class_name)
if md and not self.should_skip(class_name, None):
return True
return False
def in_listed_extend_classed(self, class_name):
"""
returns True if the class is in the list of required classes that need to extend
"""
for key in self.classes_need_extend:
md = re.match("^" + key + "$", class_name)
if md:
return True
return False
def sorted_classes(self):
'''
sorted classes in order of inheritance
'''
sorted_list = []
for class_name in self.generated_classes.iterkeys():
nclass = self.generated_classes[class_name]
sorted_list += self._sorted_parents(nclass)
# remove dupes from the list
no_dupes = []
[no_dupes.append(i) for i in sorted_list if not no_dupes.count(i)]
return no_dupes
def _sorted_parents(self, nclass):
'''
returns the sorted list of parents for a native class
'''
sorted_parents = []
for p in nclass.parents:
if p.class_name in self.generated_classes.keys():
sorted_parents += self._sorted_parents(p)
if nclass.class_name in self.generated_classes.keys():
sorted_parents.append(nclass.class_name)
return sorted_parents
def generate_code(self):
# must read the yaml file first
stream = file(os.path.join(self.target, "conversions.yaml"), "r")
data = yaml.load(stream)
self.config = data
implfilepath = os.path.join(self.outdir, self.out_file + ".cpp")
headfilepath = os.path.join(self.outdir, self.out_file + ".hpp")
docfiledir = self.outdir + "/api"
if not os.path.exists(docfiledir):
os.makedirs(docfiledir)
if self.script_type == "lua":
docfilepath = os.path.join(docfiledir, self.out_file + "_api.lua")
else:
docfilepath = os.path.join(docfiledir, self.out_file + "_api.js")
self.impl_file = open(implfilepath, "w+")
self.head_file = open(headfilepath, "w+")
self.doc_file = open(docfilepath, "w+")
layout_h = Template(file=os.path.join(self.target, "templates", "layout_head.h"),
searchList=[self])
layout_c = Template(file=os.path.join(self.target, "templates", "layout_head.c"),
searchList=[self])
apidoc_ns_script = Template(file=os.path.join(self.target, "templates", "apidoc_ns.script"),
searchList=[self])
self.head_file.write(str(layout_h))
self.impl_file.write(str(layout_c))
self.doc_file.write(str(apidoc_ns_script))
self._parse_headers()
layout_h = Template(file=os.path.join(self.target, "templates", "layout_foot.h"),
searchList=[self])
layout_c = Template(file=os.path.join(self.target, "templates", "layout_foot.c"),
searchList=[self])
self.head_file.write(str(layout_h))
self.impl_file.write(str(layout_c))
if self.script_type == "lua":
apidoc_ns_foot_script = Template(file=os.path.join(self.target, "templates", "apidoc_ns_foot.script"),
searchList=[self])
self.doc_file.write(str(apidoc_ns_foot_script))
self.impl_file.close()
self.head_file.close()
self.doc_file.close()
def _pretty_print(self, diagnostics):
print("====\nErrors in parsing headers:")
severities=['Ignored', 'Note', 'Warning', 'Error', 'Fatal']
for idx, d in enumerate(diagnostics):
print "%s. <severity = %s,\n location = %r,\n details = %r>" % (
idx+1, severities[d.severity], d.location, d.spelling)
print("====\n")
def _parse_headers(self):
for header in self.headers:
tu = self.index.parse(header, self.clang_args)
if len(tu.diagnostics) > 0:
self._pretty_print(tu.diagnostics)
is_fatal = False
for d in tu.diagnostics:
if d.severity >= cindex.Diagnostic.Error:
is_fatal = True
if is_fatal:
print("*** Found errors - can not continue")
raise Exception("Fatal error in parsing headers")
self._deep_iterate(tu.cursor)
def _deep_iterate(self, cursor, depth=0):
# get the canonical type
if cursor.kind == cindex.CursorKind.CLASS_DECL:
if cursor == cursor.type.get_declaration() and len(cursor.get_children_array()) > 0:
is_targeted_class = True
if self.cpp_ns:
is_targeted_class = False
namespaced_name = get_namespaced_name(cursor)
for ns in self.cpp_ns:
if namespaced_name.startswith(ns):
is_targeted_class = True
break
if is_targeted_class and self.in_listed_classes(cursor.displayname):
if not self.generated_classes.has_key(cursor.displayname):
nclass = NativeClass(cursor, self)
nclass.generate_code()
self.generated_classes[cursor.displayname] = nclass
return
for node in cursor.get_children():
# print("%s %s - %s" % (">" * depth, node.displayname, node.kind))
self._deep_iterate(node, depth + 1)
def scriptname_from_native(self, namespace_class_name, namespace_name):
script_ns_dict = self.config['conversions']['ns_map']
for (k, v) in script_ns_dict.items():
if k == namespace_name:
return namespace_class_name.replace("*","").replace("const ", "").replace(k, v)
if namespace_class_name.find("::") >= 0:
if namespace_class_name.find("std::") == 0:
return namespace_class_name
else:
raise Exception("The namespace (%s) conversion wasn't set in 'ns_map' section of the conversions.yaml" % namespace_class_name)
else:
return namespace_class_name.replace("*","").replace("const ", "")
def is_cocos_class(self, namespace_class_name):
script_ns_dict = self.config['conversions']['ns_map']
for (k, v) in script_ns_dict.items():
if namespace_class_name.find("std::") == 0:
return False
if namespace_class_name.find(k) >= 0:
return True
return False
def scriptname_cocos_class(self, namespace_class_name):
script_ns_dict = self.config['conversions']['ns_map']
for (k, v) in script_ns_dict.items():
if namespace_class_name.find(k) >= 0:
return namespace_class_name.replace("*","").replace("const ", "").replace(k,v)
raise Exception("The namespace (%s) conversion wasn't set in 'ns_map' section of the conversions.yaml" % namespace_class_name)
def js_typename_from_natve(self, namespace_class_name):
script_ns_dict = self.config['conversions']['ns_map']
if namespace_class_name.find("std::") == 0:
if namespace_class_name.find("std::string") == 0:
return "String"
if namespace_class_name.find("std::vector") == 0:
return "Array"
if namespace_class_name.find("std::map") == 0 or namespace_class_name.find("std::unordered_map") == 0:
return "map_object"
if namespace_class_name.find("std::function") == 0:
return "function"
for (k, v) in script_ns_dict.items():
if namespace_class_name.find(k) >= 0:
if namespace_class_name.find("cocos2d::Vec2") == 0:
return "vec2_object"
if namespace_class_name.find("cocos2d::Vec3") == 0:
return "vec3_object"
if namespace_class_name.find("cocos2d::Vec4") == 0:
return "vec4_object"
if namespace_class_name.find("cocos2d::Mat4") == 0:
return "mat4_object"
if namespace_class_name.find("cocos2d::Vector") == 0:
return "Array"
if namespace_class_name.find("cocos2d::Map") == 0:
return "map_object"
if namespace_class_name.find("cocos2d::Point") == 0:
return "point_object"
if namespace_class_name.find("cocos2d::Size") == 0:
return "size_object"
if namespace_class_name.find("cocos2d::Rect") == 0:
return "rect_object"
if namespace_class_name.find("cocos2d::Color3B") == 0:
return "color3b_object"
if namespace_class_name.find("cocos2d::Color4B") == 0:
return "color4b_object"
if namespace_class_name.find("cocos2d::Color4F") == 0:
return "color4f_object"
else:
return namespace_class_name.replace("*","").replace("const ", "").replace(k,v)
return namespace_class_name.replace("*","").replace("const ", "")
def lua_typename_from_natve(self, namespace_class_name, is_ret = False):
script_ns_dict = self.config['conversions']['ns_map']
if namespace_class_name.find("std::") == 0:
if namespace_class_name.find("std::string") == 0:
return "string"
if namespace_class_name.find("std::vector") == 0:
return "array_table"
if namespace_class_name.find("std::map") == 0 or namespace_class_name.find("std::unordered_map") == 0:
return "map_table"
if namespace_class_name.find("std::function") == 0:
return "function"
for (k, v) in script_ns_dict.items():
if namespace_class_name.find(k) >= 0:
if namespace_class_name.find("cocos2d::Vec2") == 0:
return "vec2_table"
if namespace_class_name.find("cocos2d::Vec3") == 0:
return "vec3_table"
if namespace_class_name.find("cocos2d::Vec4") == 0:
return "vec4_table"
if namespace_class_name.find("cocos2d::Vector") == 0:
return "array_table"
if namespace_class_name.find("cocos2d::Mat4") == 0:
return "mat4_table"
if namespace_class_name.find("cocos2d::Map") == 0:
return "map_table"
if namespace_class_name.find("cocos2d::Point") == 0:
return "point_table"
if namespace_class_name.find("cocos2d::Size") == 0:
return "size_table"
if namespace_class_name.find("cocos2d::Rect") == 0:
return "rect_table"
if namespace_class_name.find("cocos2d::Color3B") == 0:
return "color3b_table"
if namespace_class_name.find("cocos2d::Color4B") == 0:
return "color4b_table"
if namespace_class_name.find("cocos2d::Color4F") == 0:
return "color4f_table"
if is_ret == 1:
return namespace_class_name.replace("*","").replace("const ", "").replace(k,"")
else:
return namespace_class_name.replace("*","").replace("const ", "").replace(k,v)
return namespace_class_name.replace("*","").replace("const ","")
def api_param_name_from_native(self,native_name):
lower_name = native_name.lower()
if lower_name == "std::string":
return "str"
if lower_name.find("unsigned ") >= 0 :
return native_name.replace("unsigned ","")
if lower_name.find("unordered_map") >= 0 or lower_name.find("map") >= 0:
return "map"
if lower_name.find("vector") >= 0 :
return "array"
if lower_name == "std::function":
return "func"
else:
return lower_name
def js_ret_name_from_native(self, namespace_class_name, is_enum) :
if self.is_cocos_class(namespace_class_name):
if namespace_class_name.find("cocos2d::Vector") >=0:
return "new Array()"
if namespace_class_name.find("cocos2d::Map") >=0:
return "map_object"
if is_enum:
return 0
else:
return self.scriptname_cocos_class(namespace_class_name)
lower_name = namespace_class_name.lower()
if lower_name.find("unsigned ") >= 0:
lower_name = lower_name.replace("unsigned ","")
if lower_name == "std::string":
return ""
if lower_name == "char" or lower_name == "short" or lower_name == "int" or lower_name == "float" or lower_name == "double" or lower_name == "long":
return 0
if lower_name == "bool":
return "false"
if lower_name.find("std::vector") >= 0 or lower_name.find("vector") >= 0:
return "new Array()"
if lower_name.find("std::map") >= 0 or lower_name.find("std::unordered_map") >= 0 or lower_name.find("unordered_map") >= 0 or lower_name.find("map") >= 0:
return "map_object"
if lower_name == "std::function":
return "func"
else:
return namespace_class_name
def main():
from optparse import OptionParser
parser = OptionParser("usage: %prog [options] {configfile}")
parser.add_option("-s", action="store", type="string", dest="section",
help="sets a specific section to be converted")
parser.add_option("-t", action="store", type="string", dest="target",
help="specifies the target vm. Will search for TARGET.yaml")
parser.add_option("-o", action="store", type="string", dest="outdir",
help="specifies the output directory for generated C++ code")
parser.add_option("-n", action="store", type="string", dest="out_file",
help="specifcies the name of the output file, defaults to the prefix in the .ini file")
(opts, args) = parser.parse_args()
# script directory
workingdir = os.path.dirname(inspect.getfile(inspect.currentframe()))
if len(args) == 0:
parser.error('invalid number of arguments')
userconfig = ConfigParser.SafeConfigParser()
userconfig.read('userconf.ini')
print 'Using userconfig \n ', userconfig.items('DEFAULT')
config = ConfigParser.SafeConfigParser()
config.read(args[0])
if (0 == len(config.sections())):
raise Exception("No sections defined in config file")
sections = []
if opts.section:
if (opts.section in config.sections()):
sections = []
sections.append(opts.section)
else:
raise Exception("Section not found in config file")
else:
print("processing all sections")
sections = config.sections()
# find available targets
targetdir = os.path.join(workingdir, "targets")
targets = []
if (os.path.isdir(targetdir)):
targets = [entry for entry in os.listdir(targetdir)
if (os.path.isdir(os.path.join(targetdir, entry)))]
if 0 == len(targets):
raise Exception("No targets defined")
if opts.target:
if (opts.target in targets):
targets = []
targets.append(opts.target)
if opts.outdir:
outdir = opts.outdir
else:
outdir = os.path.join(workingdir, "gen")
if not os.path.exists(outdir):
os.makedirs(outdir)
for t in targets:
# Fix for hidden '.svn', '.cvs' and '.git' etc. folders - these must be ignored or otherwise they will be interpreted as a target.
if t == ".svn" or t == ".cvs" or t == ".git" or t == ".gitignore":
continue
print "\n.... Generating bindings for target", t
for s in sections:
print "\n.... .... Processing section", s, "\n"
gen_opts = {
'prefix': config.get(s, 'prefix'),
'headers': (config.get(s, 'headers' , 0, dict(userconfig.items('DEFAULT')))),
'classes': config.get(s, 'classes').split(' '),
'classes_need_extend': config.get(s, 'classes_need_extend').split(' ') if config.has_option(s, 'classes_need_extend') else [],
'clang_args': (config.get(s, 'extra_arguments', 0, dict(userconfig.items('DEFAULT'))) or "").split(" "),
'target': os.path.join(workingdir, "targets", t),
'outdir': outdir,
'remove_prefix': config.get(s, 'remove_prefix'),
'target_ns': config.get(s, 'target_namespace'),
'cpp_ns': config.get(s, 'cpp_namespace').split(' ') if config.has_option(s, 'cpp_namespace') else None,
'classes_have_no_parents': config.get(s, 'classes_have_no_parents'),
'base_classes_to_skip': config.get(s, 'base_classes_to_skip'),
'abstract_classes': config.get(s, 'abstract_classes'),
'skip': config.get(s, 'skip'),
'rename_functions': config.get(s, 'rename_functions'),
'rename_classes': config.get(s, 'rename_classes'),
'out_file': opts.out_file or config.get(s, 'prefix'),
'script_control_cpp': config.get(s, 'script_control_cpp') if config.has_option(s, 'script_control_cpp') else 'no',
'script_type': t,
'macro_judgement': config.get(s, 'macro_judgement') if config.has_option(s, 'macro_judgement') else None
}
generator = Generator(gen_opts)
generator.generate_code()
if __name__ == '__main__':
try:
main()
except Exception as e:
traceback.print_exc()
sys.exit(1)
|
proversity-org/edx-platform | refs/heads/master | lms/djangoapps/branding/tests/test_views.py | 1 | #-*- coding: utf-8 -*-
"""Tests of Branding API views. """
import json
import urllib
import ddt
import mock
from config_models.models import cache
from django.conf import settings
from django.contrib.auth.models import User
from django.core.urlresolvers import reverse
from django.test import TestCase
from branding.models import BrandingApiConfig
from openedx.core.djangoapps.dark_lang.models import DarkLangConfig
from openedx.core.djangoapps.lang_pref.api import released_languages
from openedx.core.djangoapps.site_configuration.tests.mixins import SiteMixin
from openedx.core.djangoapps.theming.tests.test_util import with_comprehensive_theme_context
from student.tests.factories import UserFactory
@ddt.ddt
class TestFooter(TestCase):
"""Test API end-point for retrieving the footer. """
def setUp(self):
"""Clear the configuration cache. """
super(TestFooter, self).setUp()
cache.clear()
@ddt.data("*/*", "text/html", "application/json")
def test_feature_flag(self, accepts):
self._set_feature_flag(False)
resp = self._get_footer(accepts=accepts)
self.assertEqual(resp.status_code, 404)
@ddt.data(
# Open source version
(None, "application/json", "application/json; charset=utf-8", "Open edX"),
(None, "text/html", "text/html; charset=utf-8", "lms-footer.css"),
(None, "text/html", "text/html; charset=utf-8", "Open edX"),
# EdX.org version
("edx.org", "application/json", "application/json; charset=utf-8", "edX Inc"),
("edx.org", "text/html", "text/html; charset=utf-8", "lms-footer-edx.css"),
("edx.org", "text/html", "text/html; charset=utf-8", "edX Inc"),
)
@ddt.unpack
def test_footer_content_types(self, theme, accepts, content_type, content):
self._set_feature_flag(True)
with with_comprehensive_theme_context(theme):
resp = self._get_footer(accepts=accepts)
self.assertEqual(resp.status_code, 200)
self.assertEqual(resp["Content-Type"], content_type)
self.assertIn(content, resp.content)
@mock.patch.dict(settings.FEATURES, {'ENABLE_FOOTER_MOBILE_APP_LINKS': True})
@ddt.data("edx.org", None)
def test_footer_json(self, theme):
self._set_feature_flag(True)
with with_comprehensive_theme_context(theme):
resp = self._get_footer()
self.assertEqual(resp.status_code, 200)
json_data = json.loads(resp.content)
self.assertTrue(isinstance(json_data, dict))
# Logo
self.assertIn("logo_image", json_data)
# Links
self.assertIn("navigation_links", json_data)
for link in json_data["navigation_links"]:
self.assertIn("name", link)
self.assertIn("title", link)
self.assertIn("url", link)
# Social links
self.assertIn("social_links", json_data)
for link in json_data["social_links"]:
self.assertIn("name", link)
self.assertIn("title", link)
self.assertIn("url", link)
self.assertIn("icon-class", link)
self.assertIn("action", link)
# Mobile links
self.assertIn("mobile_links", json_data)
for link in json_data["mobile_links"]:
self.assertIn("name", link)
self.assertIn("title", link)
self.assertIn("url", link)
self.assertIn("image", link)
# Legal links
self.assertIn("legal_links", json_data)
for link in json_data["legal_links"]:
self.assertIn("name", link)
self.assertIn("title", link)
self.assertIn("url", link)
# OpenEdX
self.assertIn("openedx_link", json_data)
self.assertIn("url", json_data["openedx_link"])
self.assertIn("title", json_data["openedx_link"])
self.assertIn("image", json_data["openedx_link"])
# Copyright
self.assertIn("copyright", json_data)
def test_absolute_urls_with_cdn(self):
self._set_feature_flag(True)
# Ordinarily, we'd use `override_settings()` to override STATIC_URL,
# which is what the staticfiles storage backend is using to construct the URL.
# Unfortunately, other parts of the system are caching this value on module
# load, which can cause other tests to fail. To ensure that this change
# doesn't affect other tests, we patch the `url()` method directly instead.
cdn_url = "http://cdn.example.com/static/image.png"
with mock.patch('branding.api.staticfiles_storage.url', return_value=cdn_url):
resp = self._get_footer()
self.assertEqual(resp.status_code, 200)
json_data = json.loads(resp.content)
self.assertEqual(json_data["logo_image"], cdn_url)
for link in json_data["mobile_links"]:
self.assertEqual(link["url"], cdn_url)
@ddt.data(
("en", "registered trademarks"),
("eo", u"régïstéréd trädémärks"), # Dummy language string
("unknown", "registered trademarks"), # default to English
)
@ddt.unpack
def test_language_override_translation(self, language, expected_copyright):
self._set_feature_flag(True)
# Load the footer with the specified language
resp = self._get_footer(params={'language': language})
self.assertEqual(resp.status_code, 200)
json_data = json.loads(resp.content)
# Verify that the translation occurred
self.assertIn(expected_copyright, json_data['copyright'])
@ddt.data(
# OpenEdX
(None, "en", "lms-footer.css"),
(None, "ar", "lms-footer-rtl.css"),
# EdX.org
("edx.org", "en", "lms-footer-edx.css"),
("edx.org", "ar", "lms-footer-edx-rtl.css"),
)
@ddt.unpack
def test_language_rtl(self, theme, language, static_path):
self._set_feature_flag(True)
with with_comprehensive_theme_context(theme):
resp = self._get_footer(accepts="text/html", params={'language': language})
self.assertEqual(resp.status_code, 200)
self.assertIn(static_path, resp.content)
@ddt.data(
# OpenEdX
(None, True),
(None, False),
# EdX.org
("edx.org", True),
("edx.org", False),
)
@ddt.unpack
def test_show_openedx_logo(self, theme, show_logo):
self._set_feature_flag(True)
with with_comprehensive_theme_context(theme):
params = {'show-openedx-logo': 1} if show_logo else {}
resp = self._get_footer(accepts="text/html", params=params)
self.assertEqual(resp.status_code, 200)
if show_logo:
self.assertIn(settings.FOOTER_OPENEDX_URL, resp.content)
else:
self.assertNotIn(settings.FOOTER_OPENEDX_URL, resp.content)
@ddt.data(
# OpenEdX
(None, False),
(None, True),
# EdX.org
("edx.org", False),
("edx.org", True),
)
@ddt.unpack
def test_include_dependencies(self, theme, include_dependencies):
self._set_feature_flag(True)
with with_comprehensive_theme_context(theme):
params = {'include-dependencies': 1} if include_dependencies else {}
resp = self._get_footer(accepts="text/html", params=params)
self.assertEqual(resp.status_code, 200)
if include_dependencies:
self.assertIn("vendor", resp.content)
else:
self.assertNotIn("vendor", resp.content)
@ddt.data(
# OpenEdX
(None, None, '1'),
(None, 'eo', '1'),
(None, None, ''),
# EdX.org
('edx.org', None, '1'),
('edx.org', 'eo', '1'),
('edx.org', None, '')
)
@ddt.unpack
def test_include_language_selector(self, theme, language, include_language_selector):
self._set_feature_flag(True)
DarkLangConfig(released_languages='en,eo,es-419,fr', enabled=True, changed_by=User().save()).save()
with with_comprehensive_theme_context(theme):
params = {
key: val for key, val in [
('language', language), ('include-language-selector', include_language_selector)
] if val
}
resp = self._get_footer(accepts="text/html", params=params)
self.assertEqual(resp.status_code, 200)
if include_language_selector:
selected_language = language if language else 'en'
self._verify_language_selector(resp.content, selected_language)
else:
self.assertNotIn('footer-language-selector', resp.content)
def test_no_supported_accept_type(self):
self._set_feature_flag(True)
resp = self._get_footer(accepts="application/x-shockwave-flash")
self.assertEqual(resp.status_code, 406)
def _set_feature_flag(self, enabled):
"""Enable or disable the feature flag for the branding API end-points. """
config = BrandingApiConfig(enabled=enabled)
config.save()
def _get_footer(self, accepts="application/json", params=None):
"""Retrieve the footer. """
url = reverse("branding_footer")
if params is not None:
url = u"{url}?{params}".format(
url=url,
params=urllib.urlencode(params)
)
return self.client.get(url, HTTP_ACCEPT=accepts)
def _verify_language_selector(self, content, selected_language):
""" Verify that the language selector is present and correctly configured."""
# Verify the selector is included
self.assertIn('footer-language-selector', content)
# Verify the correct language is selected
self.assertIn('<option value="{}" selected="selected">'.format(selected_language), content)
# Verify the language choices
for language in released_languages():
if language.code == selected_language:
continue
self.assertIn('<option value="{}">'.format(language.code), content)
class TestIndex(SiteMixin, TestCase):
""" Test the index view """
def setUp(self):
""" Set up a user """
super(TestIndex, self).setUp()
patcher = mock.patch("student.models.tracker")
self.mock_tracker = patcher.start()
self.user = UserFactory.create()
self.user.set_password("password")
self.user.save()
def test_index_does_not_redirect_without_site_override(self):
""" Test index view does not redirect if MKTG_URLS['ROOT'] is not set """
response = self.client.get(reverse("root"))
self.assertEqual(response.status_code, 200)
def test_index_redirects_to_marketing_site_with_site_override(self):
""" Test index view redirects if MKTG_URLS['ROOT'] is set in SiteConfiguration """
self.use_site(self.site_other)
response = self.client.get(reverse("root"))
self.assertRedirects(
response,
self.site_configuration_other.values["MKTG_URLS"]["ROOT"],
fetch_redirect_response=False
)
def test_header_logo_links_to_marketing_site_with_site_override(self):
"""
Test marketing site root link is included on dashboard page
if MKTG_URLS['ROOT'] is set in SiteConfiguration
"""
self.use_site(self.site_other)
self.client.login(username=self.user.username, password="password")
response = self.client.get(reverse("dashboard"))
self.assertIn(self.site_configuration_other.values["MKTG_URLS"]["ROOT"], response.content)
|
galtay/cosmolabe | refs/heads/master | cosmolabe/__init__.py | 1 | import units
import constants
u = units.Units()
pc = constants.PhysicalConstants()
|
huyang1532/learn-python | refs/heads/master | xlrd-0.6.1/xlrd/timemachine.py | 1 | # -*- coding: cp1252 -*-
##
# <p>Copyright © 2006 Stephen John Machin, Lingfo Pty Ltd</p>
# <p>This module is part of the xlrd package, which is released under a BSD-style licence.</p>
##
# timemachine.py -- adaptation for earlier Pythons e.g. 2.1
# usage: from timemachine import *
import sys
python_version = sys.version_info[:2] # e.g. version 2.4 -> (2, 4)
CAN_PICKLE_ARRAY = python_version >= (2, 5)
CAN_SUBCLASS_BUILTIN = python_version >= (2, 2)
if sys.version.startswith("IronPython"):
array_array = None
else:
from array import array as array_array
if python_version < (2, 2):
class object:
pass
False = 0
True = 1
def int_floor_div(x, y):
return divmod(x, y)[0]
def intbool(x):
if x:
return 1
return 0
if python_version < (2, 3):
def sum(sequence, start=0):
tot = start
for item in aseq:
tot += item
return tot
|
carvalhodj/qunews | refs/heads/master | raspberry/mac_amqp.py | 1 | import pcap, dpkt, binascii
import pika
import sys
import threading
from get_serial import get_serial
MACS = {}
TIME_OUT = 10
mensagem = ""
SERIAL = get_serial()
end_ip = sys.argv[1:]
if not end_ip:
print >> sys.stderr, "Fomato: %s [SERVER IP ADDRESS]" % (sys.argv[0])
credentials = pika.PlainCredentials('qunews', 'qunews')
connection = pika.BlockingConnection(pika.ConnectionParameters(sys.argv[1], 5672, 'qunews_host', credentials))
channel = connection.channel()
channel.exchange_declare(exchange='qunews.data', type='topic', durable=True)
routing_key = 'qunews.coletor.ceagri'
historico_routing_key = 'qunews.historico.coletor.ceagri'
def enviaAMQP(rk, hrk):
global mensagem
global SERIAL
channel.basic_publish(exchange='qunews.data', routing_key=rk, body=mensagem)
channel.basic_publish(exchange='qunews.data', routing_key=hrk, body=mensagem+':'+SERIAL)
threading.Timer(60, enviaAMQP, [rk,hrk]).start()
enviaAMQP(routing_key, historico_routing_key)
for ts, pkt in pcap.pcap(name='wlan0'):
try:
rtap = dpkt.radiotap.Radiotap(pkt)
except:
pass
wifi = rtap.data
if wifi.type == 0 and wifi.subtype == 4:
mac = binascii.hexlify(wifi.mgmt.src)
ssid = wifi.ies[0].info
MACS[mac] = ts
for mac in list(MACS):
if(ts - MACS[mac] >= TIME_OUT):
del MACS[mac]
print("### TAMANHO DICIONARIO = %d" % len(MACS))
mensagem = str(list(MACS.keys())).replace('[', '').replace(']', '').replace('\'', '')
|
gitlabhq/pygments.rb | refs/heads/master | vendor/pygments-main/pygments/styles/fruity.py | 364 | # -*- coding: utf-8 -*-
"""
pygments.styles.fruity
~~~~~~~~~~~~~~~~~~~~~~
pygments version of my "fruity" vim theme.
:copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
from pygments.style import Style
from pygments.token import Token, Comment, Name, Keyword, \
Generic, Number, String, Whitespace
class FruityStyle(Style):
"""
Pygments version of the "native" vim theme.
"""
background_color = '#111111'
highlight_color = '#333333'
styles = {
Whitespace: '#888888',
Token: '#ffffff',
Generic.Output: '#444444 bg:#222222',
Keyword: '#fb660a bold',
Keyword.Pseudo: 'nobold',
Number: '#0086f7 bold',
Name.Tag: '#fb660a bold',
Name.Variable: '#fb660a',
Comment: '#008800 bg:#0f140f italic',
Name.Attribute: '#ff0086 bold',
String: '#0086d2',
Name.Function: '#ff0086 bold',
Generic.Heading: '#ffffff bold',
Keyword.Type: '#cdcaa9 bold',
Generic.Subheading: '#ffffff bold',
Name.Constant: '#0086d2',
Comment.Preproc: '#ff0007 bold'
}
|
LividInstruments/LiveRemoteScripts | refs/heads/master | Livid_Base/__init__.py | 1 | # by amounra 0413 : http://www.aumhaa.com
from Base import Base
from _Framework.Capabilities import controller_id, inport, outport, CONTROLLER_ID_KEY, PORTS_KEY, HIDDEN, NOTES_CC, SCRIPT, REMOTE, SYNC, TYPE_KEY, FIRMWARE_KEY, AUTO_LOAD_KEY
def get_capabilities():
return {CONTROLLER_ID_KEY: controller_id(vendor_id=2536, product_ids=[115], model_name='Livid Instruments Base'),
PORTS_KEY: [inport(props=[HIDDEN, NOTES_CC, SCRIPT, REMOTE]),
inport(props = []),
outport(props=[HIDDEN, NOTES_CC, SCRIPT, REMOTE]),
outport(props=[])],
TYPE_KEY: 'push',
AUTO_LOAD_KEY: False}
def create_instance(c_instance):
""" Creates and returns the Base script """
return Base(c_instance)
|
hiisi13/django-management-audit | refs/heads/master | setup.py | 1 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import sys
from django.conf import settings
settings.configure()
import management_audit
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
version = management_audit.__version__
if sys.argv[-1] == 'publish':
os.system('python setup.py sdist upload')
print("You probably want to also tag the version now:")
print(" git tag -a %s -m 'version %s'" % (version, version))
print(" git push --tags")
sys.exit()
readme = open('README.rst').read()
history = open('HISTORY.rst').read().replace('.. :changelog:', '')
setup(
name='django-management-audit',
version=version,
description="""Easily audit Django management commands""",
long_description=readme + '\n\n' + history,
author='Dmitry Kozhedubov',
author_email='dmitry-kozhedubov@yandex.ru',
url='https://github.com/hiisi13/django-management-audit',
packages=[
'management_audit',
],
include_package_data=True,
install_requires=[
],
license="BSD",
zip_safe=False,
keywords='django-management-audit',
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Natural Language :: English',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
],
) |
bjzhang/xen_arm_pv | refs/heads/master | tools/python/xen/xend/server/mtdif.py | 3 | #============================================================================
# This library is free software; you can redistribute it and/or
# modify it under the terms of version 2.1 of the GNU Lesser General Public
# License as published by the Free Software Foundation.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#============================================================================
# Copyright (C) 2004, 2005 Mike Wray <mike.wray@hp.com>
# Copyright (C) 2005 XenSource Ltd
#============================================================================
import re
import string
import sys, traceback
#from xen.util import blkif
from xen.xend import sxp
from xen.xend.XendError import VmError
from xen.xend.XendLogging import log
from xen.xend.server.DevController import DevController
class MtdifController(DevController):
"""MTD device interface controller. Handles all MTD devices
for a domain.
"""
def __init__(self, vm):
"""Create a block device controller.
"""
DevController.__init__(self, vm)
def getDeviceDetails(self, config):
"""@see DevController.getDeviceDetails"""
# dev = sxp.child_value(config, 'dev')
# (typ, params) = string.split(sxp.child_value(config, 'uname'), ':', 1)
uname = sxp.child_value(config, 'uname')
(typ, params) = string.split(uname, ':')
dev = sxp.child_value(config, 'dev')
mode = sxp.child_value(config, 'mode')
back = { 'dev' : dev,
'type' : typ,
'params' : params,
'mode' : sxp.child_value(config, 'mode', 'r')
}
# if 'ioemu:' in dev:
# (dummy, dev1) = string.split(dev, ':', 1)
# devid = blkif.blkdev_name_to_number(dev1)
# front = {}
# else:
# devid = blkif.blkdev_name_to_number(dev)
devid = 777
front = { 'virtual-device' : "%i" % devid }
return (devid, back, front)
def configuration(self, devid):
"""@see DevController.configuration"""
result = DevController.configuration(self, devid)
(dev, typ, params, mode) = self.readBackend(devid,
'dev', 'type', 'params',
'mode')
if dev:
result.append(['dev', dev])
if typ and params:
result.append(['uname', typ + ":" + params])
if mode:
result.append(['mode', mode])
return result
def destroyDevice(self, devid):
"""@see DevController.destroyDevice"""
# If we are given a device name, then look up the device ID from it,
# and destroy that ID instead. If what we are given is an integer,
# then assume it's a device ID and pass it straight through to our
# superclass's method.
try:
DevController.destroyDevice(self, int(devid))
except ValueError:
devid_end = type(devid) is str and devid.split('/')[-1] or None
for i in self.deviceIDs():
d = self.readBackend(i, 'dev')
if d == devid or (devid_end and d == devid_end):
DevController.destroyDevice(self, i)
return
raise VmError("Device %s not connected" % devid)
|
cecep-edu/refactory | refs/heads/staging | requirements/PyChart-1.39/pychart/afm/Utopia_Italic.py | 12 | # AFM font Utopia-Italic (path: /usr/share/fonts/afms/adobe/putri8a.afm).
# Derived from Ghostscript distribution.
# Go to www.cs.wisc.edu/~ghost to get the Ghostcript source code.
import dir
dir.afm["Utopia-Italic"] = (500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 225, 240, 402, 530, 530, 826, 725, 216, 350, 350, 412, 570, 265, 392, 265, 270, 530, 530, 530, 530, 530, 530, 530, 530, 530, 530, 265, 265, 570, 570, 570, 425, 794, 624, 632, 661, 763, 596, 571, 709, 775, 345, 352, 650, 565, 920, 763, 753, 614, 753, 640, 533, 606, 794, 637, 946, 632, 591, 622, 330, 390, 330, 570, 500, 216, 561, 559, 441, 587, 453, 315, 499, 607, 317, 309, 545, 306, 912, 618, 537, 590, 559, 402, 389, 341, 618, 510, 785, 516, 468, 468, 340, 270, 340, 570, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 240, 530, 530, 100, 530, 530, 530, 530, 216, 402, 462, 277, 277, 607, 603, 500, 500, 500, 490, 265, 500, 560, 500, 216, 402, 402, 462, 1000, 1200, 500, 425, 500, 400, 400, 400, 400, 400, 400, 402, 400, 500, 400, 400, 500, 400, 350, 400, 1000, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 880, 500, 425, 500, 500, 500, 500, 571, 753, 1020, 389, 500, 500, 500, 500, 500, 779, 500, 500, 500, 317, 500, 500, 318, 537, 806, 577, )
|
manoj24rana/MobileIPv6 | refs/heads/master | src/fd-net-device/test/examples-to-run.py | 97 | #! /usr/bin/env python
## -*- Mode: python; py-indent-offset: 4; indent-tabs-mode: nil; coding: utf-8; -*-
# A list of C++ examples to run in order to ensure that they remain
# buildable and runnable over time. Each tuple in the list contains
#
# (example_name, do_run, do_valgrind_run).
#
# See test.py for more information.
cpp_examples = [
("dummy-network", "True", "True"),
("fd-emu-ping", "False", "True"),
("fd-emu-onoff", "False", "True"),
("fd-emu-udp-echo", "False", "True"),
("realtime-dummy-network", "False", "True"),
("fd2fd-onoff", "True", "True"),
("fd-tap-ping", "False", "True"),
("realtime-fd2fd-onoff", "False", "True"),
]
# A list of Python examples to run in order to ensure that they remain
# runnable over time. Each tuple in the list contains
#
# (example_name, do_run).
#
# See test.py for more information.
|
tyrannosaurus/python-libmagic | refs/heads/master | magic/tests/__init__.py | 12133432 | |
ehashman/oh-mainline | refs/heads/master | vendor/packages/Django/tests/modeltests/or_lookups/__init__.py | 12133432 | |
krzysztofwos/BitcoinUnlimited | refs/heads/dev | src/test/bitcoin-util-test.py | 257 | #!/usr/bin/python
# Copyright 2014 BitPay, Inc.
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
import os
import bctest
import buildenv
if __name__ == '__main__':
bctest.bctester(os.environ["srcdir"] + "/test/data",
"bitcoin-util-test.json",buildenv)
|
iulian787/spack | refs/heads/develop | var/spack/repos/builtin/packages/yoda/package.py | 2 | # Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class Yoda(AutotoolsPackage):
"""YODA - Yet more Objects for Data Analysis"""
homepage = "https://yoda.hepforge.org/"
url = "https://yoda.hepforge.org/downloads/?f=YODA-1.8.3.tar.bz2"
tags = ['hep']
version('1.8.3', sha256='d9dd0ea5e0f630cdf4893c09a40c78bd44455777c2125385ecc26fa9a2acba8a')
version('1.8.2', sha256='89558c11cf9b88b0899713e5b4bf8781fdcecc480ff155985ebbf148c6d80bdb')
version('1.8.1', sha256='51472e12065b9469f13906f0dc609e036d0c1dbd2a8e445e7d654aba73660112')
version('1.8.0', sha256='82c62bbaedb4b6b7d50cd42ce5409d453d46c1cc6724047db5efa74d34dd6dc5')
version('1.7.7', sha256='cfb64b099a79ec4d138792f0b464a8fbb04c4345143f77bbdca07acb744628ce')
version('1.7.6', sha256='864a1459c82676c991fcaed931263a415e815e3c9dc2cad2f94bda6fa4d112e5')
version('1.7.5', sha256='7b1dc7bb380d0fbadce12072f5cc21912c115e826182a3922d864e7edea131db')
version('1.7.4', sha256='3df316b89e9c0052104f8956e4f7d26c0b0b05cdace7d908be35c383361e3a71')
version('1.7.3', sha256='ebf6094733823e9cc2d1586aff06db2d8999c74a47e666baf305322f62c48058')
version('1.7.2', sha256='7f093cf947824ec118767c7c1999a50ea9343c173cf8c5062e3800ba54c2943e')
version('1.7.1', sha256='edd7971ecd272314309c800395200b07cf68547cbac3378a02d0b8c9ac03027b')
version('1.7.0', sha256='b3d6bfb0c52ed87cd240cee5e93e09102832d9ef32505d7275f4d3191a35ce3b')
version('1.6.7', sha256='2abf378573832c201bc6a9fecfff5b2006fc98c7a272540326cda8eb5bd95e16')
version('1.6.6', sha256='cf172a496d9108b93420530ea91055d07ecd514d2894d78db46b806530e91d21')
version('1.6.5', sha256='1477fe754cfe2e4e06aa363a773accf18aab960a8b899968b77834368cac14c5')
version('1.6.4', sha256='4c01f43c18b7b2e71f61dea0bb8c6fdc099c8e1a66256c510652884c4ffffbca')
version('1.6.3', sha256='1dd7e334fe54a05ff911d9e227d395abc5efd29e29d60187a036b2201f97da19')
version('1.6.2', sha256='5793cd1320694118423888801ca520f2719565fde04699ee69e1751f47cb57a8')
version('1.6.1', sha256='ec3f4cc4eb57f94fb431cc37db10eb831f025df95ffd9e516b8009199253c62b')
version('1.6.0', sha256='2920ef2588268484b650dc08438664a3539b79c65a9e80d58e3771bb699e2a6b')
version('1.5.9', sha256='1a19cc8c34c08f1797a93d355250e682eb85d62d4ab277b6714d7873b4bdde75')
version('1.5.8', sha256='011c5be5cc565f8baf02e7ebbe57a57b4d70dc6a528d5b0102700020bbf5a973')
version('1.5.7', sha256='f775df11b034154b8f5d43f12007692c3314672e60d3e554b3928fe5b0f00c29')
version('1.5.6', sha256='050e17b1b80658213281a2e4112dfecc0096f01f269cd739d601b2fd0e790a0c')
version('1.5.5', sha256='ce45df6248c6c50633953048240513dc52ca5c9144ef69ea72ada2df23bc4918')
version('1.5.4', sha256='c41853a1f3aa0794875ae09c1ba4348942eb890e798ac7cee6e3505a9b68b678')
version('1.5.3', sha256='1220ac0ae204c3ed6b22a6a35c30d9b5c1ded35a1054cff131861b4a919d4904')
version('1.5.2', sha256='ec113c53a6174b174aaea8f45802cc419184ce056123b93ab8d3f80fc1bd4986')
version('1.5.1', sha256='a8b088b3ede67d560e40f91f4f99be313f21841c46ce2f657af7692a7bbe3276')
version('1.5.0', sha256='2c2b77344854fac937a8ef07c0928c50829ff4c69bcad6e0afb92da611b7dd18')
version('1.4.0', sha256='e76a129f7c2b72b53525fe0b712606eeeab0dc145daa070ebf0728f0384eaf48')
version('1.3.1', sha256='274e196d009e3aac6dd1f2db876de9613ca1a3c21ec3364bc3662f5493bc9747')
version('1.3.0', sha256='d63197d5940b481ecb06cf4703d9c0b49388f32cad61ccae580d1b80312bd215')
version('1.2.1', sha256='e86964e91e4fbbba443d2848f55c028001de4713dcc64c40339389de053e7d8b')
version('1.2.0', sha256='143fa86cd7965d26d3897a5752307bfe08f4866c2f9a9f226a393127d19ee353')
version('1.1.0', sha256='5d2e8f3c1cddfb59fe651931c7c605fe0ed067864fa86047aed312c6a7938e01')
version('1.0.7', sha256='145c27d922c27a4e1d6d50030f4ddece5f03d6c309a5e392a5fcbb5e83e747ab')
version('1.0.6', sha256='357732448d67a593e5ff004418f2a2a263a1401ffe84e021f8a714aa183eaa21')
version('1.0.5', sha256='ba72bc3943a1b39fa63900570948199cf5ed5c7523f2c4af4740e51b098f1794')
version('1.0.4', sha256='697fe397c69689feecb2a731e19b2ff85e19343b8198c4f18a7064c4f7123950')
version('1.0.3', sha256='6a1d1d75d9d74da457726ea9463c1b0b6ba38d4b43ef54e1c33f885e70fdae4b')
variant("root", default=False, description="Enable ROOT interface")
depends_on('python', type=('build', 'run'))
depends_on('py-future', type=('build', 'run'))
depends_on('boost', when='@:1.6.0', type=('build', 'run'))
depends_on('py-cython', type='build')
depends_on('py-matplotlib', when='@1.3.0:', type=('build', 'run'))
depends_on('root', type=('build', 'run'), when='+root')
patch('yoda-1.5.5.patch', level=0, when='@1.5.5')
patch('yoda-1.5.9.patch', level=0, when='@1.5.9')
patch('yoda-1.6.1.patch', level=0, when='@1.6.1')
patch('yoda-1.6.2.patch', level=0, when='@1.6.2')
patch('yoda-1.6.3.patch', level=0, when='@1.6.3')
patch('yoda-1.6.4.patch', level=0, when='@1.6.4')
patch('yoda-1.6.5.patch', level=0, when='@1.6.5')
patch('yoda-1.6.6.patch', level=0, when='@1.6.6')
patch('yoda-1.6.7.patch', level=0, when='@1.6.7')
def configure_args(self):
args = []
if self.spec.satisfies('@:1.6.0'):
args += '--with-boost=' + self.spec['boost'].prefix
if '+root' in self.spec:
args += '--enable-root'
return args
|
chiviak/headphones | refs/heads/master | lib/cherrypy/lib/jsontools.py | 66 | import cherrypy
from cherrypy._cpcompat import basestring, ntou, json_encode, json_decode
def json_processor(entity):
"""Read application/json data into request.json."""
if not entity.headers.get(ntou("Content-Length"), ntou("")):
raise cherrypy.HTTPError(411)
body = entity.fp.read()
try:
cherrypy.serving.request.json = json_decode(body.decode('utf-8'))
except ValueError:
raise cherrypy.HTTPError(400, 'Invalid JSON document')
def json_in(content_type=[ntou('application/json'), ntou('text/javascript')],
force=True, debug=False, processor=json_processor):
"""Add a processor to parse JSON request entities:
The default processor places the parsed data into request.json.
Incoming request entities which match the given content_type(s) will
be deserialized from JSON to the Python equivalent, and the result
stored at cherrypy.request.json. The 'content_type' argument may
be a Content-Type string or a list of allowable Content-Type strings.
If the 'force' argument is True (the default), then entities of other
content types will not be allowed; "415 Unsupported Media Type" is
raised instead.
Supply your own processor to use a custom decoder, or to handle the parsed
data differently. The processor can be configured via
tools.json_in.processor or via the decorator method.
Note that the deserializer requires the client send a Content-Length
request header, or it will raise "411 Length Required". If for any
other reason the request entity cannot be deserialized from JSON,
it will raise "400 Bad Request: Invalid JSON document".
You must be using Python 2.6 or greater, or have the 'simplejson'
package importable; otherwise, ValueError is raised during processing.
"""
request = cherrypy.serving.request
if isinstance(content_type, basestring):
content_type = [content_type]
if force:
if debug:
cherrypy.log('Removing body processors %s' %
repr(request.body.processors.keys()), 'TOOLS.JSON_IN')
request.body.processors.clear()
request.body.default_proc = cherrypy.HTTPError(
415, 'Expected an entity of content type %s' %
', '.join(content_type))
for ct in content_type:
if debug:
cherrypy.log('Adding body processor for %s' % ct, 'TOOLS.JSON_IN')
request.body.processors[ct] = processor
def json_handler(*args, **kwargs):
value = cherrypy.serving.request._json_inner_handler(*args, **kwargs)
return json_encode(value)
def json_out(content_type='application/json', debug=False,
handler=json_handler):
"""Wrap request.handler to serialize its output to JSON. Sets Content-Type.
If the given content_type is None, the Content-Type response header
is not set.
Provide your own handler to use a custom encoder. For example
cherrypy.config['tools.json_out.handler'] = <function>, or
@json_out(handler=function).
You must be using Python 2.6 or greater, or have the 'simplejson'
package importable; otherwise, ValueError is raised during processing.
"""
request = cherrypy.serving.request
# request.handler may be set to None by e.g. the caching tool
# to signal to all components that a response body has already
# been attached, in which case we don't need to wrap anything.
if request.handler is None:
return
if debug:
cherrypy.log('Replacing %s with JSON handler' % request.handler,
'TOOLS.JSON_OUT')
request._json_inner_handler = request.handler
request.handler = handler
if content_type is not None:
if debug:
cherrypy.log('Setting Content-Type to %s' %
content_type, 'TOOLS.JSON_OUT')
cherrypy.serving.response.headers['Content-Type'] = content_type
|
schleichdi2/openpli-e2 | refs/heads/master | lib/python/Components/Renderer/NextEpgInfo.py | 17 | from Components.VariableText import VariableText
from Renderer import Renderer
from enigma import eLabel, eEPGCache, eServiceReference
from time import localtime, strftime
class NextEpgInfo(Renderer, VariableText):
def __init__(self):
Renderer.__init__(self)
VariableText.__init__(self)
self.epgcache = eEPGCache.getInstance()
self.numberOfItems = 1
GUI_WIDGET = eLabel
def changed(self, what):
self.text = ""
reference = self.source.service
info = reference and self.source.info
if info:
currentEvent = self.source.getCurrentEvent()
if currentEvent:
if not self.epgcache.startTimeQuery(eServiceReference(reference.toString()), currentEvent.getBeginTime() + currentEvent.getDuration()):
if self.numberOfItems == 1:
event = self.epgcache.getNextTimeEntry()
if event:
self.text = "%s: %s" % (pgettext("now/next: 'next' event label", "Next"), event.getEventName())
else:
for x in range(self.numberOfItems):
event = self.epgcache.getNextTimeEntry()
if event:
self.text = "%s\n%s %s" % (self.text, strftime("%H:%M", localtime(event.getBeginTime())), event.getEventName())
self.text = self.text and "%s%s" % (pgettext("now/next: 'next' event label", "Next"), self.text) or ""
def applySkin(self, desktop, parent):
for (attrib, value) in self.skinAttributes:
if attrib == "NumberOfItems":
self.numberOfItems = int(value)
self.skinAttributes.remove((attrib, value))
return Renderer.applySkin(self, desktop, parent)
|
sunxfancy/ArduCopter | refs/heads/master | Tools/autotest/apm_unit_tests/mustpass/arducopter_arm_disarm.py | 250 | import arducopter
def unit_test(mavproxy, mav):
'''A scripted flight plan'''
if (
arducopter.calibrate_level(mavproxy, mav) and
arducopter.arm_motors(mavproxy, mav) and
arducopter.disarm_motors(mavproxy,mav)):
return True
return False
|
sebdelsol/pyload | refs/heads/stable | module/plugins/hoster/GamefrontCom.py | 1 | # -*- coding: utf-8 -*-
import re
from module.network.RequestFactory import getURL
from module.plugins.Hoster import Hoster
from module.utils import parseFileSize
class GamefrontCom(Hoster):
__name__ = "GamefrontCom"
__type__ = "hoster"
__version__ = "0.04"
__pattern__ = r'http://(?:www\.)?gamefront\.com/files/\w+'
__description__ = """Gamefront.com hoster plugin"""
__license__ = "GPLv3"
__authors__ = [("fwannmacher", "felipe@warhammerproject.com")]
PATTERN_FILENAME = r'<title>(.*?) | Game Front'
PATTERN_FILESIZE = r'<dt>File Size:</dt>[\n\s]*<dd>(.*?)</dd>'
PATTERN_OFFLINE = r'This file doesn\'t exist, or has been removed.'
def setup(self):
self.resumeDownload = self.multiDL = True
self.chunkLimit = -1
def process(self, pyfile):
self.pyfile = pyfile
self.html = self.load(pyfile.url, decode=True)
if not self._checkOnline():
self.offline()
pyfile.name = self._getName()
link = self._getLink()
if not link.startswith('http://'):
link = "http://www.gamefront.com/" + link
self.download(link)
def _checkOnline(self):
if re.search(self.PATTERN_OFFLINE, self.html):
return False
else:
return True
def _getName(self):
name = re.search(self.PATTERN_FILENAME, self.html)
if name is None:
self.fail(_("Plugin broken")
return name.group(1)
def _getLink(self):
self.html2 = self.load("http://www.gamefront.com/" + re.search("(files/service/thankyou\\?id=\w+)",
self.html).group(1))
return re.search("<a href=\"(http://media\d+\.gamefront.com/.*)\">click here</a>", self.html2).group(1).replace("&", "&")
def getInfo(urls):
result = []
for url in urls:
html = getURL(url)
if re.search(GamefrontCom.PATTERN_OFFLINE, html):
result.append((url, 0, 1, url))
else:
name = re.search(GamefrontCom.PATTERN_FILENAME, html)
if name is None:
result.append((url, 0, 1, url))
else:
name = name.group(1)
size = re.search(GamefrontCom.PATTERN_FILESIZE, html)
size = parseFileSize(size.group(1))
result.append((name, size, 3, url))
yield result
|
s20121035/rk3288_android5.1_repo | refs/heads/master | cts/apps/CameraITS/tests/scene1/test_latching.py | 2 | # Copyright 2013 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import its.image
import its.caps
import its.device
import its.objects
import its.target
import pylab
import os.path
import matplotlib
import matplotlib.pyplot
def main():
"""Test that settings latch on the right frame.
Takes a bunch of shots using back-to-back requests, varying the capture
request parameters between shots. Checks that the images that come back
have the expected properties.
"""
NAME = os.path.basename(__file__).split(".")[0]
with its.device.ItsSession() as cam:
props = cam.get_camera_properties()
its.caps.skip_unless(its.caps.full(props) and
its.caps.per_frame_control(props))
_,fmt = its.objects.get_fastest_manual_capture_settings(props)
e, s = its.target.get_target_exposure_combos(cam)["midExposureTime"]
e /= 2.0
r_means = []
g_means = []
b_means = []
reqs = [
its.objects.manual_capture_request(s, e, True),
its.objects.manual_capture_request(s, e, True),
its.objects.manual_capture_request(s*2,e, True),
its.objects.manual_capture_request(s*2,e, True),
its.objects.manual_capture_request(s, e, True),
its.objects.manual_capture_request(s, e, True),
its.objects.manual_capture_request(s, e*2, True),
its.objects.manual_capture_request(s, e, True),
its.objects.manual_capture_request(s*2,e, True),
its.objects.manual_capture_request(s, e, True),
its.objects.manual_capture_request(s, e*2, True),
its.objects.manual_capture_request(s, e, True),
its.objects.manual_capture_request(s, e*2, True),
its.objects.manual_capture_request(s, e*2, True),
]
caps = cam.do_capture(reqs, fmt)
for i,cap in enumerate(caps):
img = its.image.convert_capture_to_rgb_image(cap)
its.image.write_image(img, "%s_i=%02d.jpg" % (NAME, i))
tile = its.image.get_image_patch(img, 0.45, 0.45, 0.1, 0.1)
rgb_means = its.image.compute_image_means(tile)
r_means.append(rgb_means[0])
g_means.append(rgb_means[1])
b_means.append(rgb_means[2])
# Draw a plot.
idxs = range(len(r_means))
pylab.plot(idxs, r_means, 'r')
pylab.plot(idxs, g_means, 'g')
pylab.plot(idxs, b_means, 'b')
pylab.ylim([0,1])
matplotlib.pyplot.savefig("%s_plot_means.png" % (NAME))
g_avg = sum(g_means) / len(g_means)
g_ratios = [g / g_avg for g in g_means]
g_hilo = [g>1.0 for g in g_ratios]
assert(g_hilo == [False, False, True, True, False, False, True,
False, True, False, True, False, True, True])
if __name__ == '__main__':
main()
|
seanchen/taiga-back | refs/heads/master | taiga/export_import/dump_service.py | 17 | # Copyright (C) 2014 Andrey Antukh <niwi@niwi.be>
# Copyright (C) 2014 Jesús Espino <jespinog@gmail.com>
# Copyright (C) 2014 David Barragán <bameda@dbarragan.com>
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from django.utils.translation import ugettext as _
from taiga.projects.models import Membership
from . import serializers
from . import service
class TaigaImportError(Exception):
def __init__(self, message):
self.message = message
def store_milestones(project, data):
results = []
for milestone_data in data.get("milestones", []):
milestone = service.store_milestone(project, milestone_data)
results.append(milestone)
return results
def store_tasks(project, data):
results = []
for task in data.get("tasks", []):
task = service.store_task(project, task)
results.append(task)
return results
def store_wiki_pages(project, data):
results = []
for wiki_page in data.get("wiki_pages", []):
results.append(service.store_wiki_page(project, wiki_page))
return results
def store_wiki_links(project, data):
results = []
for wiki_link in data.get("wiki_links", []):
results.append(service.store_wiki_link(project, wiki_link))
return results
def store_user_stories(project, data):
results = []
for userstory in data.get("user_stories", []):
us = service.store_user_story(project, userstory)
results.append(us)
return results
def store_timeline_entries(project, data):
results = []
for timeline in data.get("timeline", []):
tl = service.store_timeline_entry(project, timeline)
results.append(tl)
return results
def store_issues(project, data):
issues = []
for issue in data.get("issues", []):
issues.append(service.store_issue(project, issue))
return issues
def store_tags_colors(project, data):
project.tags_colors = data.get("tags_colors", [])
project.save()
return None
def dict_to_project(data, owner=None):
if owner:
data["owner"] = owner
project_serialized = service.store_project(data)
if not project_serialized:
raise TaigaImportError(_("error importing project data"))
proj = project_serialized.object
service.store_choices(proj, data, "points", serializers.PointsExportSerializer)
service.store_choices(proj, data, "issue_types", serializers.IssueTypeExportSerializer)
service.store_choices(proj, data, "issue_statuses", serializers.IssueStatusExportSerializer)
service.store_choices(proj, data, "us_statuses", serializers.UserStoryStatusExportSerializer)
service.store_choices(proj, data, "task_statuses", serializers.TaskStatusExportSerializer)
service.store_choices(proj, data, "priorities", serializers.PriorityExportSerializer)
service.store_choices(proj, data, "severities", serializers.SeverityExportSerializer)
if service.get_errors(clear=False):
raise TaigaImportError(_("error importing lists of project attributes"))
service.store_default_choices(proj, data)
if service.get_errors(clear=False):
raise TaigaImportError(_("error importing default project attributes values"))
service.store_custom_attributes(proj, data, "userstorycustomattributes",
serializers.UserStoryCustomAttributeExportSerializer)
service.store_custom_attributes(proj, data, "taskcustomattributes",
serializers.TaskCustomAttributeExportSerializer)
service.store_custom_attributes(proj, data, "issuecustomattributes",
serializers.IssueCustomAttributeExportSerializer)
if service.get_errors(clear=False):
raise TaigaImportError(_("error importing custom attributes"))
service.store_roles(proj, data)
if service.get_errors(clear=False):
raise TaigaImportError(_("error importing roles"))
service.store_memberships(proj, data)
if proj.memberships.filter(user=proj.owner).count() == 0:
if proj.roles.all().count() > 0:
Membership.objects.create(
project=proj,
email=proj.owner.email,
user=proj.owner,
role=proj.roles.all().first(),
is_owner=True
)
if service.get_errors(clear=False):
raise TaigaImportError(_("error importing memberships"))
store_milestones(proj, data)
if service.get_errors(clear=False):
raise TaigaImportError(_("error importing sprints"))
store_wiki_pages(proj, data)
if service.get_errors(clear=False):
raise TaigaImportError(_("error importing wiki pages"))
store_wiki_links(proj, data)
if service.get_errors(clear=False):
raise TaigaImportError(_("error importing wiki links"))
store_issues(proj, data)
if service.get_errors(clear=False):
raise TaigaImportError(_("error importing issues"))
store_user_stories(proj, data)
if service.get_errors(clear=False):
raise TaigaImportError(_("error importing user stories"))
store_tasks(proj, data)
if service.get_errors(clear=False):
raise TaigaImportError(_("error importing tasks"))
store_tags_colors(proj, data)
if service.get_errors(clear=False):
raise TaigaImportError(_("error importing tags"))
store_timeline_entries(proj, data)
if service.get_errors(clear=False):
raise TaigaImportError(_("error importing timelines"))
return proj
|
ProfessionalIT/professionalit-webiste | refs/heads/master | sdk/google_appengine/lib/django-1.2/django/db/backends/postgresql_psycopg2/base.py | 45 | """
PostgreSQL database backend for Django.
Requires psycopg 2: http://initd.org/projects/psycopg2
"""
import sys
from django.db import utils
from django.db.backends import *
from django.db.backends.signals import connection_created
from django.db.backends.postgresql.operations import DatabaseOperations as PostgresqlDatabaseOperations
from django.db.backends.postgresql.client import DatabaseClient
from django.db.backends.postgresql.creation import DatabaseCreation
from django.db.backends.postgresql.version import get_version
from django.db.backends.postgresql_psycopg2.introspection import DatabaseIntrospection
from django.utils.safestring import SafeUnicode, SafeString
try:
import psycopg2 as Database
import psycopg2.extensions
except ImportError, e:
from django.core.exceptions import ImproperlyConfigured
raise ImproperlyConfigured("Error loading psycopg2 module: %s" % e)
DatabaseError = Database.DatabaseError
IntegrityError = Database.IntegrityError
psycopg2.extensions.register_type(psycopg2.extensions.UNICODE)
psycopg2.extensions.register_adapter(SafeString, psycopg2.extensions.QuotedString)
psycopg2.extensions.register_adapter(SafeUnicode, psycopg2.extensions.QuotedString)
class CursorWrapper(object):
"""
A thin wrapper around psycopg2's normal cursor class so that we can catch
particular exception instances and reraise them with the right types.
"""
def __init__(self, cursor):
self.cursor = cursor
def execute(self, query, args=None):
try:
return self.cursor.execute(query, args)
except Database.IntegrityError, e:
raise utils.IntegrityError, utils.IntegrityError(*tuple(e)), sys.exc_info()[2]
except Database.DatabaseError, e:
raise utils.DatabaseError, utils.DatabaseError(*tuple(e)), sys.exc_info()[2]
def executemany(self, query, args):
try:
return self.cursor.executemany(query, args)
except Database.IntegrityError, e:
raise utils.IntegrityError, utils.IntegrityError(*tuple(e)), sys.exc_info()[2]
except Database.DatabaseError, e:
raise utils.DatabaseError, utils.DatabaseError(*tuple(e)), sys.exc_info()[2]
def __getattr__(self, attr):
if attr in self.__dict__:
return self.__dict__[attr]
else:
return getattr(self.cursor, attr)
def __iter__(self):
return iter(self.cursor)
class DatabaseFeatures(BaseDatabaseFeatures):
needs_datetime_string_cast = False
can_return_id_from_insert = False
class DatabaseOperations(PostgresqlDatabaseOperations):
def last_executed_query(self, cursor, sql, params):
# With psycopg2, cursor objects have a "query" attribute that is the
# exact query sent to the database. See docs here:
# http://www.initd.org/tracker/psycopg/wiki/psycopg2_documentation#postgresql-status-message-and-executed-query
return cursor.query
def return_insert_id(self):
return "RETURNING %s", ()
class DatabaseWrapper(BaseDatabaseWrapper):
operators = {
'exact': '= %s',
'iexact': '= UPPER(%s)',
'contains': 'LIKE %s',
'icontains': 'LIKE UPPER(%s)',
'regex': '~ %s',
'iregex': '~* %s',
'gt': '> %s',
'gte': '>= %s',
'lt': '< %s',
'lte': '<= %s',
'startswith': 'LIKE %s',
'endswith': 'LIKE %s',
'istartswith': 'LIKE UPPER(%s)',
'iendswith': 'LIKE UPPER(%s)',
}
def __init__(self, *args, **kwargs):
super(DatabaseWrapper, self).__init__(*args, **kwargs)
self.features = DatabaseFeatures()
autocommit = self.settings_dict["OPTIONS"].get('autocommit', False)
self.features.uses_autocommit = autocommit
self._set_isolation_level(int(not autocommit))
self.ops = DatabaseOperations(self)
self.client = DatabaseClient(self)
self.creation = DatabaseCreation(self)
self.introspection = DatabaseIntrospection(self)
self.validation = BaseDatabaseValidation(self)
def _cursor(self):
new_connection = False
set_tz = False
settings_dict = self.settings_dict
if self.connection is None:
new_connection = True
set_tz = settings_dict.get('TIME_ZONE')
if settings_dict['NAME'] == '':
from django.core.exceptions import ImproperlyConfigured
raise ImproperlyConfigured("You need to specify NAME in your Django settings file.")
conn_params = {
'database': settings_dict['NAME'],
}
conn_params.update(settings_dict['OPTIONS'])
if 'autocommit' in conn_params:
del conn_params['autocommit']
if settings_dict['USER']:
conn_params['user'] = settings_dict['USER']
if settings_dict['PASSWORD']:
conn_params['password'] = settings_dict['PASSWORD']
if settings_dict['HOST']:
conn_params['host'] = settings_dict['HOST']
if settings_dict['PORT']:
conn_params['port'] = settings_dict['PORT']
self.connection = Database.connect(**conn_params)
self.connection.set_client_encoding('UTF8')
self.connection.set_isolation_level(self.isolation_level)
connection_created.send(sender=self.__class__, connection=self)
cursor = self.connection.cursor()
cursor.tzinfo_factory = None
if new_connection:
if set_tz:
cursor.execute("SET TIME ZONE %s", [settings_dict['TIME_ZONE']])
if not hasattr(self, '_version'):
self.__class__._version = get_version(cursor)
if self._version[0:2] < (8, 0):
# No savepoint support for earlier version of PostgreSQL.
self.features.uses_savepoints = False
if self.features.uses_autocommit:
if self._version[0:2] < (8, 2):
# FIXME: Needs extra code to do reliable model insert
# handling, so we forbid it for now.
from django.core.exceptions import ImproperlyConfigured
raise ImproperlyConfigured("You cannot use autocommit=True with PostgreSQL prior to 8.2 at the moment.")
else:
# FIXME: Eventually we're enable this by default for
# versions that support it, but, right now, that's hard to
# do without breaking other things (#10509).
self.features.can_return_id_from_insert = True
return CursorWrapper(cursor)
def _enter_transaction_management(self, managed):
"""
Switch the isolation level when needing transaction support, so that
the same transaction is visible across all the queries.
"""
if self.features.uses_autocommit and managed and not self.isolation_level:
self._set_isolation_level(1)
def _leave_transaction_management(self, managed):
"""
If the normal operating mode is "autocommit", switch back to that when
leaving transaction management.
"""
if self.features.uses_autocommit and not managed and self.isolation_level:
self._set_isolation_level(0)
def _set_isolation_level(self, level):
"""
Do all the related feature configurations for changing isolation
levels. This doesn't touch the uses_autocommit feature, since that
controls the movement *between* isolation levels.
"""
assert level in (0, 1)
try:
if self.connection is not None:
self.connection.set_isolation_level(level)
finally:
self.isolation_level = level
self.features.uses_savepoints = bool(level)
def _commit(self):
if self.connection is not None:
try:
return self.connection.commit()
except Database.IntegrityError, e:
raise utils.IntegrityError, utils.IntegrityError(*tuple(e)), sys.exc_info()[2]
|
kisoku/ansible | refs/heads/devel | lib/ansible/template/__init__.py | 4 | # (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import ast
import contextlib
import os
import re
from ansible.compat.six import string_types, text_type, binary_type, StringIO
from jinja2 import Environment
from jinja2.loaders import FileSystemLoader
from jinja2.exceptions import TemplateSyntaxError, UndefinedError
from jinja2.utils import concat as j2_concat
from jinja2.runtime import StrictUndefined
from ansible import constants as C
from ansible.errors import AnsibleError, AnsibleFilterError, AnsibleUndefinedVariable
from ansible.plugins import filter_loader, lookup_loader, test_loader
from ansible.template.safe_eval import safe_eval
from ansible.template.template import AnsibleJ2Template
from ansible.template.vars import AnsibleJ2Vars
from ansible.utils.debug import debug
try:
from hashlib import sha1
except ImportError:
from sha import sha as sha1
from numbers import Number
__all__ = ['Templar']
# A regex for checking to see if a variable we're trying to
# expand is just a single variable name.
# Primitive Types which we don't want Jinja to convert to strings.
NON_TEMPLATED_TYPES = ( bool, Number )
JINJA2_OVERRIDE = '#jinja2:'
def _escape_backslashes(data, jinja_env):
"""Double backslashes within jinja2 expressions
A user may enter something like this in a playbook::
debug:
msg: "Test Case 1\\3; {{ test1_name | regex_replace('^(.*)_name$', '\\1')}}"
The string inside of the {{ gets interpreted multiple times First by yaml.
Then by python. And finally by jinja2 as part of it's variable. Because
it is processed by both python and jinja2, the backslash escaped
characters get unescaped twice. This means that we'd normally have to use
four backslashes to escape that. This is painful for playbook authors as
they have to remember different rules for inside vs outside of a jinja2
expression (The backslashes outside of the "{{ }}" only get processed by
yaml and python. So they only need to be escaped once). The following
code fixes this by automatically performing the extra quoting of
backslashes inside of a jinja2 expression.
"""
if '\\' in data and '{{' in data:
new_data = []
d2 = jinja_env.preprocess(data)
in_var = False
for token in jinja_env.lex(d2):
if token[1] == 'variable_begin':
in_var = True
new_data.append(token[2])
elif token[1] == 'variable_end':
in_var = False
new_data.append(token[2])
elif in_var and token[1] == 'string':
# Double backslashes only if we're inside of a jinja2 variable
new_data.append(token[2].replace('\\','\\\\'))
else:
new_data.append(token[2])
data = ''.join(new_data)
return data
def _count_newlines_from_end(in_str):
'''
Counts the number of newlines at the end of a string. This is used during
the jinja2 templating to ensure the count matches the input, since some newlines
may be thrown away during the templating.
'''
try:
i = len(in_str)
j = i -1
while in_str[j] == '\n':
j -= 1
return i - 1 - j
except IndexError:
# Uncommon cases: zero length string and string containing only newlines
return i
class Templar:
'''
The main class for templating, with the main entry-point of template().
'''
def __init__(self, loader, shared_loader_obj=None, variables=dict()):
self._loader = loader
self._filters = None
self._tests = None
self._available_variables = variables
self._cached_result = {}
if loader:
self._basedir = loader.get_basedir()
else:
self._basedir = './'
if shared_loader_obj:
self._filter_loader = getattr(shared_loader_obj, 'filter_loader')
self._test_loader = getattr(shared_loader_obj, 'test_loader')
self._lookup_loader = getattr(shared_loader_obj, 'lookup_loader')
else:
self._filter_loader = filter_loader
self._test_loader = test_loader
self._lookup_loader = lookup_loader
# flags to determine whether certain failures during templating
# should result in fatal errors being raised
self._fail_on_lookup_errors = True
self._fail_on_filter_errors = True
self._fail_on_undefined_errors = C.DEFAULT_UNDEFINED_VAR_BEHAVIOR
self.environment = Environment(
trim_blocks=True,
undefined=StrictUndefined,
extensions=self._get_extensions(),
finalize=self._finalize,
loader=FileSystemLoader(self._basedir),
)
self.environment.template_class = AnsibleJ2Template
self.SINGLE_VAR = re.compile(r"^%s\s*(\w*)\s*%s$" % (self.environment.variable_start_string, self.environment.variable_end_string))
self.block_start = self.environment.block_start_string
self.block_end = self.environment.block_end_string
self.variable_start = self.environment.variable_start_string
self.variable_end = self.environment.variable_end_string
self._clean_regex = re.compile(r'(?:%s[%s%s]|[%s%s]%s)' % (self.variable_start[0], self.variable_start[1], self.block_start[1], self.block_end[0], self.variable_end[0], self.variable_end[1]))
def _get_filters(self):
'''
Returns filter plugins, after loading and caching them if need be
'''
if self._filters is not None:
return self._filters.copy()
plugins = [x for x in self._filter_loader.all()]
self._filters = dict()
for fp in plugins:
self._filters.update(fp.filters())
self._filters.update(self._get_tests())
return self._filters.copy()
def _get_tests(self):
'''
Returns tests plugins, after loading and caching them if need be
'''
if self._tests is not None:
return self._tests.copy()
plugins = [x for x in self._test_loader.all()]
self._tests = dict()
for fp in plugins:
self._tests.update(fp.tests())
return self._tests.copy()
def _get_extensions(self):
'''
Return jinja2 extensions to load.
If some extensions are set via jinja_extensions in ansible.cfg, we try
to load them with the jinja environment.
'''
jinja_exts = []
if C.DEFAULT_JINJA2_EXTENSIONS:
# make sure the configuration directive doesn't contain spaces
# and split extensions in an array
jinja_exts = C.DEFAULT_JINJA2_EXTENSIONS.replace(" ", "").split(',')
return jinja_exts
def _clean_data(self, orig_data):
''' remove jinja2 template tags from a string '''
if not isinstance(orig_data, string_types):
return orig_data
with contextlib.closing(StringIO(orig_data)) as data:
# these variables keep track of opening block locations, as we only
# want to replace matched pairs of print/block tags
print_openings = []
block_openings = []
for mo in self._clean_regex.finditer(orig_data):
token = mo.group(0)
token_start = mo.start(0)
if token[0] == self.variable_start[0]:
if token == self.block_start:
block_openings.append(token_start)
elif token == self.variable_start:
print_openings.append(token_start)
elif token[1] == self.variable_end[1]:
prev_idx = None
if token == self.block_end and block_openings:
prev_idx = block_openings.pop()
elif token == self.variable_end and print_openings:
prev_idx = print_openings.pop()
if prev_idx is not None:
# replace the opening
data.seek(prev_idx, os.SEEK_SET)
data.write(self.environment.comment_start_string)
# replace the closing
data.seek(token_start, os.SEEK_SET)
data.write(self.environment.comment_end_string)
else:
raise AnsibleError("Error while cleaning data for safety: unhandled regex match")
return data.getvalue()
def set_available_variables(self, variables):
'''
Sets the list of template variables this Templar instance will use
to template things, so we don't have to pass them around between
internal methods. We also clear the template cache here, as the variables
are being changed.
'''
assert isinstance(variables, dict)
self._available_variables = variables
self._cached_result = {}
def template(self, variable, convert_bare=False, preserve_trailing_newlines=True, escape_backslashes=True, fail_on_undefined=None, overrides=None, convert_data=True, static_vars = [''], cache = True):
'''
Templates (possibly recursively) any given data as input. If convert_bare is
set to True, the given data will be wrapped as a jinja2 variable ('{{foo}}')
before being sent through the template engine.
'''
if fail_on_undefined is None:
fail_on_undefined = self._fail_on_undefined_errors
# Don't template unsafe variables, instead drop them back down to
# their constituent type.
if hasattr(variable, '__UNSAFE__'):
if isinstance(variable, text_type):
return self._clean_data(text_type(variable))
elif isinstance(variable, binary_type):
return self._clean_data(bytes(variable))
else:
return self._clean_data(variable._obj)
try:
if convert_bare:
variable = self._convert_bare_variable(variable)
if isinstance(variable, string_types):
result = variable
if self._contains_vars(variable):
# Check to see if the string we are trying to render is just referencing a single
# var. In this case we don't want to accidentally change the type of the variable
# to a string by using the jinja template renderer. We just want to pass it.
only_one = self.SINGLE_VAR.match(variable)
if only_one:
var_name = only_one.group(1)
if var_name in self._available_variables:
resolved_val = self._available_variables[var_name]
if isinstance(resolved_val, NON_TEMPLATED_TYPES):
return resolved_val
elif resolved_val is None:
return C.DEFAULT_NULL_REPRESENTATION
# Using a cache in order to prevent template calls with already templated variables
sha1_hash = None
if cache:
variable_hash = sha1(text_type(variable).encode('utf-8'))
options_hash = sha1((text_type(preserve_trailing_newlines) + text_type(escape_backslashes) + text_type(fail_on_undefined) + text_type(overrides)).encode('utf-8'))
sha1_hash = variable_hash.hexdigest() + options_hash.hexdigest()
if cache and sha1_hash in self._cached_result:
result = self._cached_result[sha1_hash]
else:
result = self._do_template(variable, preserve_trailing_newlines=preserve_trailing_newlines, escape_backslashes=escape_backslashes, fail_on_undefined=fail_on_undefined, overrides=overrides)
if convert_data:
# if this looks like a dictionary or list, convert it to such using the safe_eval method
if (result.startswith("{") and not result.startswith(self.environment.variable_start_string)) or \
result.startswith("[") or result in ("True", "False"):
eval_results = safe_eval(result, locals=self._available_variables, include_exceptions=True)
if eval_results[1] is None:
result = eval_results[0]
else:
# FIXME: if the safe_eval raised an error, should we do something with it?
pass
# we only cache in the case where we have a single variable
# name, to make sure we're not putting things which may otherwise
# be dynamic in the cache (filters, lookups, etc.)
if cache:
self._cached_result[sha1_hash] = result
return result
elif isinstance(variable, (list, tuple)):
return [self.template(v, preserve_trailing_newlines=preserve_trailing_newlines, fail_on_undefined=fail_on_undefined, overrides=overrides) for v in variable]
elif isinstance(variable, dict):
d = {}
# we don't use iteritems() here to avoid problems if the underlying dict
# changes sizes due to the templating, which can happen with hostvars
for k in variable.keys():
if k not in static_vars:
d[k] = self.template(variable[k], preserve_trailing_newlines=preserve_trailing_newlines, fail_on_undefined=fail_on_undefined, overrides=overrides)
else:
d[k] = variable[k]
return d
else:
return variable
except AnsibleFilterError:
if self._fail_on_filter_errors:
raise
else:
return variable
def _contains_vars(self, data):
'''
returns True if the data contains a variable pattern
'''
return self.environment.block_start_string in data or self.environment.variable_start_string in data
def _convert_bare_variable(self, variable):
'''
Wraps a bare string, which may have an attribute portion (ie. foo.bar)
in jinja2 variable braces so that it is evaluated properly.
'''
if isinstance(variable, string_types):
contains_filters = "|" in variable
first_part = variable.split("|")[0].split(".")[0].split("[")[0]
if (contains_filters or first_part in self._available_variables) and self.environment.variable_start_string not in variable:
return "%s%s%s" % (self.environment.variable_start_string, variable, self.environment.variable_end_string)
# the variable didn't meet the conditions to be converted,
# so just return it as-is
return variable
def _finalize(self, thing):
'''
A custom finalize method for jinja2, which prevents None from being returned
'''
return thing if thing is not None else ''
def _lookup(self, name, *args, **kwargs):
instance = self._lookup_loader.get(name.lower(), loader=self._loader, templar=self)
if instance is not None:
from ansible.utils.listify import listify_lookup_plugin_terms
loop_terms = listify_lookup_plugin_terms(terms=args, templar=self, loader=self._loader, fail_on_undefined=True, convert_bare=False)
# safely catch run failures per #5059
try:
ran = instance.run(loop_terms, variables=self._available_variables, **kwargs)
except (AnsibleUndefinedVariable, UndefinedError) as e:
raise AnsibleUndefinedVariable(e)
except Exception as e:
if self._fail_on_lookup_errors:
raise
ran = None
if ran:
from ansible.vars.unsafe_proxy import UnsafeProxy
ran = UnsafeProxy(",".join(ran))
return ran
else:
raise AnsibleError("lookup plugin (%s) not found" % name)
def _do_template(self, data, preserve_trailing_newlines=True, escape_backslashes=True, fail_on_undefined=None, overrides=None):
# For preserving the number of input newlines in the output (used
# later in this method)
data_newlines = _count_newlines_from_end(data)
if fail_on_undefined is None:
fail_on_undefined = self._fail_on_undefined_errors
try:
# allows template header overrides to change jinja2 options.
if overrides is None:
myenv = self.environment.overlay()
else:
myenv = self.environment.overlay(overrides)
# Get jinja env overrides from template
if data.startswith(JINJA2_OVERRIDE):
eol = data.find('\n')
line = data[len(JINJA2_OVERRIDE):eol]
data = data[eol+1:]
for pair in line.split(','):
(key,val) = pair.split(':')
key = key.strip()
setattr(myenv, key, ast.literal_eval(val.strip()))
#FIXME: add tests
myenv.filters.update(self._get_filters())
myenv.tests.update(self._get_tests())
if escape_backslashes:
# Allow users to specify backslashes in playbooks as "\\"
# instead of as "\\\\".
data = _escape_backslashes(data, myenv)
try:
t = myenv.from_string(data)
except TemplateSyntaxError as e:
raise AnsibleError("template error while templating string: %s" % str(e))
except Exception as e:
if 'recursion' in str(e):
raise AnsibleError("recursive loop detected in template string: %s" % data)
else:
return data
t.globals['lookup'] = self._lookup
t.globals['finalize'] = self._finalize
jvars = AnsibleJ2Vars(self, t.globals)
new_context = t.new_context(jvars, shared=True)
rf = t.root_render_func(new_context)
try:
res = j2_concat(rf)
except TypeError as te:
if 'StrictUndefined' in str(te):
raise AnsibleUndefinedVariable(
"Unable to look up a name or access an attribute in template string. " + \
"Make sure your variable name does not contain invalid characters like '-'."
)
else:
debug("failing because of a type error, template data is: %s" % data)
raise AnsibleError("an unexpected type error occurred. Error was %s" % te)
if preserve_trailing_newlines:
# The low level calls above do not preserve the newline
# characters at the end of the input data, so we use the
# calculate the difference in newlines and append them
# to the resulting output for parity
#
# jinja2 added a keep_trailing_newline option in 2.7 when
# creating an Environment. That would let us make this code
# better (remove a single newline if
# preserve_trailing_newlines is False). Once we can depend on
# that version being present, modify our code to set that when
# initializing self.environment and remove a single trailing
# newline here if preserve_newlines is False.
res_newlines = _count_newlines_from_end(res)
if data_newlines > res_newlines:
res += '\n' * (data_newlines - res_newlines)
return res
except (UndefinedError, AnsibleUndefinedVariable) as e:
if fail_on_undefined:
raise AnsibleUndefinedVariable(e)
else:
#TODO: return warning about undefined var
return data
|
Jorge-Rodriguez/ansible | refs/heads/devel | lib/ansible/modules/monitoring/newrelic_deployment.py | 95 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright 2013 Matt Coddington <coddington@gmail.com>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: newrelic_deployment
version_added: "1.2"
author: "Matt Coddington (@mcodd)"
short_description: Notify newrelic about app deployments
description:
- Notify newrelic about app deployments (see https://docs.newrelic.com/docs/apm/new-relic-apm/maintenance/deployment-notifications#api)
options:
token:
description:
- API token, to place in the x-api-key header.
required: true
app_name:
description:
- (one of app_name or application_id are required) The value of app_name in the newrelic.yml file used by the application
required: false
application_id:
description:
- (one of app_name or application_id are required) The application id, found in the URL when viewing the application in RPM
required: false
changelog:
description:
- A list of changes for this deployment
required: false
description:
description:
- Text annotation for the deployment - notes for you
required: false
revision:
description:
- A revision number (e.g., git commit SHA)
required: false
user:
description:
- The name of the user/process that triggered this deployment
required: false
appname:
description:
- Name of the application
required: false
environment:
description:
- The environment for this deployment
required: false
validate_certs:
description:
- If C(no), SSL certificates will not be validated. This should only be used
on personally controlled sites using self-signed certificates.
required: false
default: 'yes'
type: bool
version_added: 1.5.1
requirements: []
'''
EXAMPLES = '''
- newrelic_deployment:
token: AAAAAA
app_name: myapp
user: ansible deployment
revision: '1.0'
'''
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.urls import fetch_url
from ansible.module_utils.six.moves.urllib.parse import urlencode
# ===========================================
# Module execution.
#
def main():
module = AnsibleModule(
argument_spec=dict(
token=dict(required=True, no_log=True),
app_name=dict(required=False),
application_id=dict(required=False),
changelog=dict(required=False),
description=dict(required=False),
revision=dict(required=False),
user=dict(required=False),
appname=dict(required=False),
environment=dict(required=False),
validate_certs=dict(default='yes', type='bool'),
),
required_one_of=[['app_name', 'application_id']],
supports_check_mode=True
)
# build list of params
params = {}
if module.params["app_name"] and module.params["application_id"]:
module.fail_json(msg="only one of 'app_name' or 'application_id' can be set")
if module.params["app_name"]:
params["app_name"] = module.params["app_name"]
elif module.params["application_id"]:
params["application_id"] = module.params["application_id"]
else:
module.fail_json(msg="you must set one of 'app_name' or 'application_id'")
for item in ["changelog", "description", "revision", "user", "appname", "environment"]:
if module.params[item]:
params[item] = module.params[item]
# If we're in check mode, just exit pretending like we succeeded
if module.check_mode:
module.exit_json(changed=True)
# Send the data to NewRelic
url = "https://rpm.newrelic.com/deployments.xml"
data = urlencode(params)
headers = {
'x-api-key': module.params["token"],
}
response, info = fetch_url(module, url, data=data, headers=headers)
if info['status'] in (200, 201):
module.exit_json(changed=True)
else:
module.fail_json(msg="unable to update newrelic: %s" % info['msg'])
if __name__ == '__main__':
main()
|
hostviralnetworks/nampy | refs/heads/master | nampy/core/DictList.py | 1 | # This script is developed from DictList.py
# in COBRApy, which was distributed
# under GNU GENERAL PUBLIC LICENSE Version 3
# Ebrahim, A., Lerman, J. a, Palsson, B. O., & Hyduke, D. R. (2013).
# COBRApy: COnstraints-Based Reconstruction and Analysis for Python.
# BMC systems biology, 7(1), 74. doi:10.1186/1752-0509-7-74
# Could replace this with collections.OrderedDict
# as ebrahim et al. suggest
# but there are some nice default methods defined here
# like _check
from copy import copy, deepcopy
import re
def get_id(object):
"""return an id for the object
This allows the function to be generalize to non-nampy.core objects,
however, this added function call slows things down.
"""
return object.id
class DictList(list):
"""A combined dict and list that feels like a list, but has
the speed benefits of a dict. This may be eventually
replaced by collections.OrderedDict.
This was written to address the performance issues associated
with searching, accessing, or iterating over a list in python
that resulted in notable performance decays with COBRA for
python.
"""
def __init__(self, *args, **kwargs):
list.__init__(self, *args, **kwargs)
self._dict = {}
self._object_dict = {}
self._generate_index()
def _check(self, id):
"""make sure duplicate id's are not added.
This function is called before adding in elements.
"""
if id in self._dict:
raise ValueError, "id %s is already present in list" % str(id)
def _generate_index(self):
"""rebuild the _dict index
"""
self._dict = {}
self._object_dict = {}
[(self._dict.update({v.id: k}),
self._object_dict.update({v.id: v}))
for k, v in enumerate(self)]
def get_by_id(self, id):
"""return the element with a matching id
"""
return self._object_dict[id]
def list_attr(self, attribute):
"""return a list of the given attribute for every object
"""
return [getattr(i, attribute)
for i in self]
def query(self, search_function, attribute="id"):
"""query the list
search_function: this will be used to select which objects to return
This can be:
- a string, in which case any object.attribute containing
the string will be returned
- a compiled regular expression
- a boolean function which takes one argument and returns True
for desired values
attribute: the attribute to be searched for (default is 'id').
If this is None, the object itself is used.
returns: a list of objects which match the query
"""
if attribute == None:
select_attribute = lambda x : x
else:
select_attribute = lambda x: getattr(the_object, attribute)
# if the search_function is a regular expression
match_list = DictList()
if isinstance(search_function, str):
search_function = re.compile(search_function)
if hasattr(search_function, "findall"):
for the_object in self:
if search_function.findall(select_attribute(the_object)) != []:
match_list.append(the_object)
else:
for the_object in self:
if search_function(select_attribute(the_object)):
match_list.append(the_object)
return match_list
# overriding default list functions with new ones
def __setitem__(self, i, y):
the_id = get_id(y)
self._check(the_id)
super(DictList, self).__setitem__(i, y)
self._dict[the_id] = i
self._object_dict[the_id] = y
def union(self, iterable):
"""adds elements with id's not already in the model"""
[self.append(i)
for i in iterable
if get_id(i) not in self._dict]
def __add__(self, other, should_deepcopy=True):
"""
other: an DictList
should_deepcopy: Boolean. Allow for shallow copying, however,
this can cause problems if one doesn't know that the
items are referenceable from different id
"""
if should_deepcopy:
sum = deepcopy(self) # should this be deepcopy or shallow?
else:
sum = self
sum.extend(other)
sum._generate_index()
return sum
def __iadd__(self, other):
self.extend(other)
return self
def index(self, id):
"""
id: A string or a :class:`~nampy.core.Object`
"""
# because values are unique, start and stop are not relevant
try:
the_object = self._dict[id]
except:
the_object = self._dict[id.id]
if self[the_object] is not id:
raise Exception("The id for the nampy.object (%s) provided "%repr(id) +\
"is in this dictionary but the_id is not the nampy.object")
return the_object
def __contains__(self, object):
"""DictList.__contains__(object) <==> object in DictList
object can either be the object to search for itself, or
simply the id
"""
if hasattr(object, "id"):
the_id = get_id(object)
# allow to check with the object itself in addition to the id
else:
the_id = object
return self._dict.has_key(the_id)
def __copy__(self):
self._dict.clear()
self._object_dict.clear()
the_copy = copy(super(DictList, self))
self._generate_index()
the_copy._generate_index()
return the_copy
def __deepcopy__(self, *args, **kwargs):
return DictList((deepcopy(i) for i in self))
# these functions are slower because they rebuild the _dict every time
# TODO: speed up
def extend(self, iterable):
# Want something like
# [self.append(i) for i in iterable]
# But make a new function to avoid
# regenerating indices until the end
for i in iterable:
the_id = get_id(i)
self._check(the_id)
self._dict[the_id] = len(self)
super(DictList, self).append(i)
self._object_dict[the_id] = i
# This was not in the version from Ebrahim
# but this call makes sense here
self._generate_index()
def append(self, object):
the_id = get_id(object)
self._check(the_id)
self._dict[the_id] = len(self)
super(DictList, self).append(object)
self._object_dict[the_id] = object
# This was not in the version from Ebrahim
# but this call could make sense here
self._generate_index()
def remove_subset(self, subset):
the_list = [x for x in self if x not in subset]
self[0: len(the_list)] = the_list
del self[len(the_list):]
self._generate_index()
def insert(self, index, object):
self._check(get_id(object))
super(DictList, self).insert(index, object)
self._generate_index()
def pop(self, *args, **kwargs):
value = super(DictList, self).pop(*args, **kwargs)
self._generate_index()
return value
def remove(self, *args, **kwargs):
super(DictList, self).remove(*args, **kwargs)
self._generate_index()
def reverse(self, *args, **kwargs):
super(DictList, self).reverse(*args, **kwargs)
self._generate_index()
def sort(self, *args, **kwargs):
super(DictList, self).sort(*args, **kwargs)
self._generate_index()
def __setslice__(self, *args, **kwargs):
super(DictList, self).__setslice__(*args, **kwargs)
self._generate_index()
def __delslice__(self, *args, **kwargs):
super(DictList, self).__delslice__(*args, **kwargs)
self._generate_index()
def __delitem__(self, *args, **kwargs):
super(DictList, self).__delitem__(*args, **kwargs)
self._generate_index()
def __getattr__(self, attr):
try:
return super(DictList, self).__getattribute__(attr)
except:
try:
func = super(DictList, self).__getattribute__("get_by_id")
return func(attr)
except:
raise AttributeError("DictList has no attribute or entry %s" % \
(attr))
def __dir__(self):
attributes = self.__class__.__dict__.keys()
attributes.extend(self._dict.keys())
return attributes
|
jaruba/chromium.src | refs/heads/nw12 | build/util/lib/common/__init__.py | 12133432 | |
LIMXTEC/BitSend | refs/heads/master | qa/rpc-tests/python-bitcoinrpc/bitcoinrpc/__init__.py | 12133432 | |
liwangdong/augmented-traffic-control | refs/heads/master | atc/atc_thrift/__init__.py | 12133432 | |
chinmaygarde/mojo | refs/heads/ios | examples/python/__mojo__.py | 5 | # Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Example python application implementing the Echo service."""
import logging
import example_service_mojom
from mojo_application import application_delegate
from mojo_application import service_provider_impl
from mojo_application import application_runner
import mojo_system
class ExampleApp(application_delegate.ApplicationDelegate):
def OnAcceptConnection(self,
requestor_url,
resolved_url,
service_provider,
exposed_services):
service_provider.AddService(ExampleServiceImpl)
return True
class ExampleServiceImpl(example_service_mojom.ExampleService):
def Ping(self, ping_value):
return ping_value
def MojoMain(app_request_handle):
application_runner.RunMojoApplication(ExampleApp(), app_request_handle)
|
habnabit/pip | refs/heads/develop | pip/_vendor/requests/certs.py | 1218 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
certs.py
~~~~~~~~
This module returns the preferred default CA certificate bundle.
If you are packaging Requests, e.g., for a Linux distribution or a managed
environment, you can change the definition of where() to return a separately
packaged CA bundle.
"""
import os.path
try:
from certifi import where
except ImportError:
def where():
"""Return the preferred certificate bundle."""
# vendored bundle inside Requests
return os.path.join(os.path.dirname(__file__), 'cacert.pem')
if __name__ == '__main__':
print(where())
|
matrogers/pylearn2 | refs/heads/master | pylearn2/gui/graph_2D.py | 28 | """
Classes for making simple 2D visualizations.
"""
import numpy as N
from theano.compat.six.moves import xrange
from theano import config
class Graph2D(object):
"""
A class for plotting simple graphs in two dimensions.
Parameters
----------
shape : tuple
The shape of the display of the graph in (rows, cols)
format. Units are pixels
xlim : tuple
A tuple specifying (xmin, xmax). This determines what
portion of the real numbers are displayed in the graph.
ycenter : float
The coordinate of the center pixel along the y axis.
"""
def __init__(self, shape, xlim, ycenter):
self.xmin = 0.
self.xmax = 0.
self.set_shape(shape)
self.set_xlim(xlim)
self.set_ycenter(ycenter)
self.components = []
def set_shape(self, shape):
"""
Sets the shape of the display (in pixels)
Parameters
----------
shape : tuple
The (rows, columns) of the display.
"""
self.rows = shape[0]
self.cols = shape[1]
def set_xlim(self, xlim):
"""
Sets the range of space that is plotted in the graph.
Parameters
----------
xlim : tuple
The range (xmin, xmax)
"""
# x coordinate of center of leftmost pixel
self.xmin = xlim[0]
# x coordinate of center of rightmost pixel
self.xmax = xlim[1]
self.delta_x = (self.xmax-self.xmin)/float(self.cols-1)
def set_ycenter(self, ycenter):
"""
Sets the y coordinate of the central pixel of the display.
Parameters
----------
ycenter : float
The desired coordinate.
"""
self.delta_y = self.delta_x
self.ymin = ycenter - (self.rows / 2) * self.delta_y
self.ymax = self.ymin + (self.rows -1) * self.delta_y
def render(self):
"""
Renders the graph.
Returns
-------
output : ndarray
An ndarray in (rows, cols, RGB) format.
"""
rval = N.zeros((self.rows, self.cols, 3))
for component in self.components:
rval = component.render( prev_layer = rval, parent = self )
assert rval is not None
return rval
def get_coords_for_col(self, i):
"""
Returns the coordinates of every pixel in column i of the
graph.
Parameters
----------
i : int
Column index
Returns
-------
coords : ndarray
A vector containing the real-number coordinates of every
pixel in column i of the graph.
"""
X = N.zeros((self.rows,2),dtype=config.floatX)
X[:,0] = self.xmin + float(i) * self.delta_x
X[:,1] = self.ymin + N.cast[config.floatX](N.asarray(range(self.rows-1,-1,-1))) * self.delta_y
return X
class HeatMap(object):
"""
A class for plotting 2-D functions as heatmaps.
Parameters
----------
f : callable
A callable that takes a design matrix of 2D coordinates and returns a
vector containing the function value at those coordinates
normalizer : callable, optional
None or a callable that takes a 2D numpy array and returns a 2D numpy
array
render_mode : str
* 'o' : opaque.
* 'r' : render only to the (r)ed channel
"""
def __init__(self, f, normalizer=None, render_mode = 'o'):
self.f = f
self.normalizer = normalizer
self.render_mode = render_mode
def render(self, prev_layer, parent):
"""
Renders the heatmap.
Parameters
----------
prev_layer : numpy ndarray
An image that will be copied into the new output.
The new image will be rendered on top of the first
one, i.e., `prev_layer` will be visible through the
new heatmap if the new heatmap is not rendered in
fully opaque mode.
parent : Graph2D
A Graph2D object that defines the coordinate system
of the heatmap.
Returns
-------
img : The rendered heatmap
"""
my_img = prev_layer * 0.0
for i in xrange(prev_layer.shape[1]):
X = parent.get_coords_for_col(i)
f = self.f(X)
if len(f.shape) == 1:
for j in xrange(3):
my_img[:,i,j] = f
else:
my_img[:,i,:] = f
#end if
#end for i
if self.normalizer is not None:
my_img = self.normalizer(my_img)
assert my_img is not None
if self.render_mode == 'r':
my_img[:,:,1:] = prev_layer[:,:,1:]
elif self.render_mode == 'o':
pass
else:
raise NotImplementedError()
return my_img
|
saurabhjn76/sympy | refs/heads/master | sympy/assumptions/sathandlers.py | 42 | from __future__ import print_function, division
from collections import MutableMapping, defaultdict
from sympy.core import (Add, Mul, Pow, Integer, Number, NumberSymbol,)
from sympy.core.numbers import ImaginaryUnit
from sympy.core.sympify import _sympify
from sympy.core.rules import Transform
from sympy.core.logic import fuzzy_or, fuzzy_and
from sympy.matrices.expressions import MatMul
from sympy.functions.elementary.complexes import Abs
from sympy.assumptions.ask import Q
from sympy.assumptions.assume import Predicate, AppliedPredicate
from sympy.logic.boolalg import (Equivalent, Implies, And, Or,
BooleanFunction, Not)
# APIs here may be subject to change
# XXX: Better name?
class UnevaluatedOnFree(BooleanFunction):
"""
Represents a Boolean function that remains unevaluated on free predicates
This is intended to be a superclass of other classes, which define the
behavior on singly applied predicates.
A free predicate is a predicate that is not applied, or a combination
thereof. For example, Q.zero or Or(Q.positive, Q.negative).
A singly applied predicate is a free predicate applied everywhere to a
single expression. For instance, Q.zero(x) and Or(Q.positive(x*y),
Q.negative(x*y)) are singly applied, but Or(Q.positive(x), Q.negative(y))
and Or(Q.positive, Q.negative(y)) are not.
The boolean literals True and False are considered to be both free and
singly applied.
This class raises ValueError unless the input is a free predicate or a
singly applied predicate.
On a free predicate, this class remains unevaluated. On a singly applied
predicate, the method apply() is called and returned, or the original
expression returned if apply() returns None. When apply() is called,
self.expr is set to the unique expression that the predicates are applied
at. self.pred is set to the free form of the predicate.
The typical usage is to create this class with free predicates and
evaluate it using .rcall().
"""
def __new__(cls, arg):
# Mostly type checking here
arg = _sympify(arg)
predicates = arg.atoms(Predicate)
applied_predicates = arg.atoms(AppliedPredicate)
if predicates and applied_predicates:
raise ValueError("arg must be either completely free or singly applied")
if not applied_predicates:
obj = BooleanFunction.__new__(cls, arg)
obj.pred = arg
obj.expr = None
return obj
predicate_args = set([pred.args[0] for pred in applied_predicates])
if len(predicate_args) > 1:
raise ValueError("The AppliedPredicates in arg must be applied to a single expression.")
obj = BooleanFunction.__new__(cls, arg)
obj.expr = predicate_args.pop()
obj.pred = arg.xreplace(Transform(lambda e: e.func, lambda e:
isinstance(e, AppliedPredicate)))
applied = obj.apply()
if applied is None:
return obj
return applied
def apply(self):
return
class AllArgs(UnevaluatedOnFree):
"""
Class representing vectorizing a predicate over all the .args of an
expression
See the docstring of UnevaluatedOnFree for more information on this
class.
The typical usage is to evaluate predicates with expressions using .rcall().
Example
=======
>>> from sympy.assumptions.sathandlers import AllArgs
>>> from sympy import symbols, Q
>>> x, y = symbols('x y')
>>> a = AllArgs(Q.positive | Q.negative)
>>> a
AllArgs(Or(Q.negative, Q.positive))
>>> a.rcall(x*y)
And(Or(Q.negative(x), Q.positive(x)), Or(Q.negative(y), Q.positive(y)))
"""
def apply(self):
return And(*[self.pred.rcall(arg) for arg in self.expr.args])
class AnyArgs(UnevaluatedOnFree):
"""
Class representing vectorizing a predicate over any of the .args of an
expression.
See the docstring of UnevaluatedOnFree for more information on this
class.
The typical usage is to evaluate predicates with expressions using .rcall().
Example
=======
>>> from sympy.assumptions.sathandlers import AnyArgs
>>> from sympy import symbols, Q
>>> x, y = symbols('x y')
>>> a = AnyArgs(Q.positive & Q.negative)
>>> a
AnyArgs(And(Q.negative, Q.positive))
>>> a.rcall(x*y)
Or(And(Q.negative(x), Q.positive(x)), And(Q.negative(y), Q.positive(y)))
"""
def apply(self):
return Or(*[self.pred.rcall(arg) for arg in self.expr.args])
class ExactlyOneArg(UnevaluatedOnFree):
"""
Class representing a predicate holding on exactly one of the .args of an
expression.
See the docstring of UnevaluatedOnFree for more information on this
class.
The typical usage is to evaluate predicate with expressions using
.rcall().
Example
=======
>>> from sympy.assumptions.sathandlers import ExactlyOneArg
>>> from sympy import symbols, Q
>>> x, y = symbols('x y')
>>> a = ExactlyOneArg(Q.positive)
>>> a
ExactlyOneArg(Q.positive)
>>> a.rcall(x*y)
Or(And(Not(Q.positive(x)), Q.positive(y)), And(Not(Q.positive(y)), Q.positive(x)))
"""
def apply(self):
expr = self.expr
pred = self.pred
pred_args = [pred.rcall(arg) for arg in expr.args]
# Technically this is xor, but if one term in the disjunction is true,
# it is not possible for the remainder to be true, so regular or is
# fine in this case.
return Or(*[And(pred_args[i], *map(Not, pred_args[:i] +
pred_args[i+1:])) for i in range(len(pred_args))])
# Note: this is the equivalent cnf form. The above is more efficient
# as the first argument of an implication, since p >> q is the same as
# q | ~p, so the the ~ will convert the Or to and, and one just needs
# to distribute the q across it to get to cnf.
# return And(*[Or(*map(Not, c)) for c in combinations(pred_args, 2)]) & Or(*pred_args)
def _old_assump_replacer(obj):
# Things to be careful of:
# - real means real or infinite in the old assumptions.
# - nonzero does not imply real in the old assumptions.
# - finite means finite and not zero in the old assumptions.
if not isinstance(obj, AppliedPredicate):
return obj
e = obj.args[0]
ret = None
if obj.func == Q.positive:
ret = fuzzy_and([e.is_finite, e.is_positive])
if obj.func == Q.zero:
ret = e.is_zero
if obj.func == Q.negative:
ret = fuzzy_and([e.is_finite, e.is_negative])
if obj.func == Q.nonpositive:
ret = fuzzy_and([e.is_finite, e.is_nonpositive])
if obj.func == Q.nonzero:
ret = fuzzy_and([e.is_nonzero, e.is_finite])
if obj.func == Q.nonnegative:
ret = fuzzy_and([fuzzy_or([e.is_zero, e.is_finite]),
e.is_nonnegative])
if obj.func == Q.rational:
ret = e.is_rational
if obj.func == Q.irrational:
ret = e.is_irrational
if obj.func == Q.even:
ret = e.is_even
if obj.func == Q.odd:
ret = e.is_odd
if obj.func == Q.integer:
ret = e.is_integer
if obj.func == Q.imaginary:
ret = e.is_imaginary
if obj.func == Q.commutative:
ret = e.is_commutative
if ret is None:
return obj
return ret
def evaluate_old_assump(pred):
"""
Replace assumptions of expressions replaced with their values in the old
assumptions (like Q.negative(-1) => True). Useful because some direct
computations for numeric objects is defined most conveniently in the old
assumptions.
"""
return pred.xreplace(Transform(_old_assump_replacer))
class CheckOldAssump(UnevaluatedOnFree):
def apply(self):
return Equivalent(self.args[0], evaluate_old_assump(self.args[0]))
class CheckIsPrime(UnevaluatedOnFree):
def apply(self):
from sympy import isprime
return Equivalent(self.args[0], isprime(self.expr))
class CustomLambda(object):
"""
Interface to lambda with rcall
Workaround until we get a better way to represent certain facts.
"""
def __init__(self, lamda):
self.lamda = lamda
def rcall(self, *args):
return self.lamda(*args)
class ClassFactRegistry(MutableMapping):
"""
Register handlers against classes
``registry[C] = handler`` registers ``handler`` for class
``C``. ``registry[C]`` returns a set of handlers for class ``C``, or any
of its superclasses.
"""
def __init__(self, d=None):
d = d or {}
self.d = defaultdict(frozenset, d)
super(ClassFactRegistry, self).__init__()
def __setitem__(self, key, item):
self.d[key] = frozenset(item)
def __getitem__(self, key):
ret = self.d[key]
for k in self.d:
if issubclass(key, k):
ret |= self.d[k]
return ret
def __delitem__(self, key):
del self.d[key]
def __iter__(self):
return self.d.__iter__()
def __len__(self):
return len(self.d)
def __repr__(self):
return repr(self.d)
fact_registry = ClassFactRegistry()
def register_fact(klass, fact, registry=fact_registry):
registry[klass] |= set([fact])
for klass, fact in [
(Mul, Equivalent(Q.zero, AnyArgs(Q.zero))),
(MatMul, Implies(AllArgs(Q.square), Equivalent(Q.invertible, AllArgs(Q.invertible)))),
(Add, Implies(AllArgs(Q.positive), Q.positive)),
(Add, Implies(AllArgs(Q.negative), Q.negative)),
(Mul, Implies(AllArgs(Q.positive), Q.positive)),
(Mul, Implies(AllArgs(Q.commutative), Q.commutative)),
(Mul, Implies(AllArgs(Q.real), Q.commutative)),
# This one can still be made easier to read. I think we need basic pattern
# matching, so that we can just write Equivalent(Q.zero(x**y), Q.zero(x) & Q.positive(y))
(Pow, CustomLambda(lambda power: Equivalent(Q.zero(power), Q.zero(power.base) & Q.positive(power.exp)))),
(Integer, CheckIsPrime(Q.prime)),
# Implicitly assumes Mul has more than one arg
# Would be AllArgs(Q.prime | Q.composite) except 1 is composite
(Mul, Implies(AllArgs(Q.prime), ~Q.prime)),
# More advanced prime assumptions will require inequalities, as 1 provides
# a corner case.
(Mul, Implies(AllArgs(Q.imaginary | Q.real), Implies(ExactlyOneArg(Q.imaginary), Q.imaginary))),
(Mul, Implies(AllArgs(Q.real), Q.real)),
(Add, Implies(AllArgs(Q.real), Q.real)),
#General Case: Odd number of imaginary args implies mul is imaginary(To be implemented)
(Mul, Implies(AllArgs(Q.real), Implies(ExactlyOneArg(Q.irrational),
Q.irrational))),
(Add, Implies(AllArgs(Q.real), Implies(ExactlyOneArg(Q.irrational),
Q.irrational))),
(Mul, Implies(AllArgs(Q.rational), Q.rational)),
(Add, Implies(AllArgs(Q.rational), Q.rational)),
(Abs, Q.nonnegative),
(Abs, Equivalent(AllArgs(~Q.zero), ~Q.zero)),
# Including the integer qualification means we don't need to add any facts
# for odd, since the assumptions already know that every integer is
# exactly one of even or odd.
(Mul, Implies(AllArgs(Q.integer), Equivalent(AnyArgs(Q.even), Q.even))),
(Abs, Implies(AllArgs(Q.even), Q.even)),
(Abs, Implies(AllArgs(Q.odd), Q.odd)),
(Add, Implies(AllArgs(Q.integer), Q.integer)),
(Add, Implies(ExactlyOneArg(~Q.integer), ~Q.integer)),
(Mul, Implies(AllArgs(Q.integer), Q.integer)),
(Mul, Implies(ExactlyOneArg(~Q.rational), ~Q.integer)),
(Abs, Implies(AllArgs(Q.integer), Q.integer)),
(Number, CheckOldAssump(Q.negative)),
(Number, CheckOldAssump(Q.zero)),
(Number, CheckOldAssump(Q.positive)),
(Number, CheckOldAssump(Q.nonnegative)),
(Number, CheckOldAssump(Q.nonzero)),
(Number, CheckOldAssump(Q.nonpositive)),
(Number, CheckOldAssump(Q.rational)),
(Number, CheckOldAssump(Q.irrational)),
(Number, CheckOldAssump(Q.even)),
(Number, CheckOldAssump(Q.odd)),
(Number, CheckOldAssump(Q.integer)),
(Number, CheckOldAssump(Q.imaginary)),
# For some reason NumberSymbol does not subclass Number
(NumberSymbol, CheckOldAssump(Q.negative)),
(NumberSymbol, CheckOldAssump(Q.zero)),
(NumberSymbol, CheckOldAssump(Q.positive)),
(NumberSymbol, CheckOldAssump(Q.nonnegative)),
(NumberSymbol, CheckOldAssump(Q.nonzero)),
(NumberSymbol, CheckOldAssump(Q.nonpositive)),
(NumberSymbol, CheckOldAssump(Q.rational)),
(NumberSymbol, CheckOldAssump(Q.irrational)),
(NumberSymbol, CheckOldAssump(Q.imaginary)),
(ImaginaryUnit, CheckOldAssump(Q.negative)),
(ImaginaryUnit, CheckOldAssump(Q.zero)),
(ImaginaryUnit, CheckOldAssump(Q.positive)),
(ImaginaryUnit, CheckOldAssump(Q.nonnegative)),
(ImaginaryUnit, CheckOldAssump(Q.nonzero)),
(ImaginaryUnit, CheckOldAssump(Q.nonpositive)),
(ImaginaryUnit, CheckOldAssump(Q.rational)),
(ImaginaryUnit, CheckOldAssump(Q.irrational)),
(ImaginaryUnit, CheckOldAssump(Q.imaginary))
]:
register_fact(klass, fact)
|
goodwinnk/intellij-community | refs/heads/master | python/testData/resolve/multiFile/nestedPackage/foo/__init__.py | 12133432 | |
CRImier/WCS | refs/heads/master | apps/update/__init__.py | 12133432 | |
rue89-tech/edx-analytics-pipeline | refs/heads/master | edx/analytics/tasks/tests/acceptance/services/__init__.py | 12133432 | |
jtwaleson/python-oauth2 | refs/heads/master | oauth2/clients/__init__.py | 12133432 | |
siouka/dmind | refs/heads/master | plugin.video.replaypt/resources/__init__.py | 12133432 | |
jonashaag/django-nonrel-nohistory | refs/heads/master | tests/regressiontests/i18n/other/locale/__init__.py | 12133432 | |
yoelk/instrumentino | refs/heads/master | instrumentino/resources/old/__init__.py | 12133432 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.