code stringlengths 2 1.05M | repo_name stringlengths 5 104 | path stringlengths 4 251 | language stringclasses 1 value | license stringclasses 15 values | size int32 2 1.05M |
|---|---|---|---|---|---|
"""
homeassistant.components.media_player.plex
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Provides an interface to the Plex API.
For more details about this platform, please refer to the documentation at
https://home-assistant.io/components/media_player.plex/
"""
import os
import json
import logging
from datetime import timedelta
from urllib.parse import urlparse
from homeassistant.loader import get_component
import homeassistant.util as util
from homeassistant.components.media_player import (
MediaPlayerDevice, SUPPORT_PAUSE, SUPPORT_PREVIOUS_TRACK,
SUPPORT_NEXT_TRACK, MEDIA_TYPE_TVSHOW, MEDIA_TYPE_VIDEO)
from homeassistant.const import (
DEVICE_DEFAULT_NAME, STATE_IDLE, STATE_PLAYING,
STATE_PAUSED, STATE_OFF, STATE_UNKNOWN)
REQUIREMENTS = ['plexapi==1.1.0']
MIN_TIME_BETWEEN_SCANS = timedelta(seconds=10)
MIN_TIME_BETWEEN_FORCED_SCANS = timedelta(seconds=1)
PLEX_CONFIG_FILE = 'plex.conf'
# Map ip to request id for configuring
_CONFIGURING = {}
_LOGGER = logging.getLogger(__name__)
SUPPORT_PLEX = SUPPORT_PAUSE | SUPPORT_PREVIOUS_TRACK | SUPPORT_NEXT_TRACK
def config_from_file(filename, config=None):
''' Small configuration file management function'''
if config:
# We're writing configuration
try:
with open(filename, 'w') as fdesc:
fdesc.write(json.dumps(config))
except IOError as error:
_LOGGER.error('Saving config file failed: %s', error)
return False
return True
else:
# We're reading config
if os.path.isfile(filename):
try:
with open(filename, 'r') as fdesc:
return json.loads(fdesc.read())
except IOError as error:
_LOGGER.error('Reading config file failed: %s', error)
# This won't work yet
return False
else:
return {}
# pylint: disable=abstract-method, unused-argument
def setup_platform(hass, config, add_devices_callback, discovery_info=None):
""" Sets up the plex platform. """
config = config_from_file(hass.config.path(PLEX_CONFIG_FILE))
if len(config):
# Setup a configured PlexServer
host, token = config.popitem()
token = token['token']
# Via discovery
elif discovery_info is not None:
# Parse discovery data
host = urlparse(discovery_info[1]).netloc
_LOGGER.info('Discovered PLEX server: %s', host)
if host in _CONFIGURING:
return
token = None
else:
return
setup_plexserver(host, token, hass, add_devices_callback)
# pylint: disable=too-many-branches
def setup_plexserver(host, token, hass, add_devices_callback):
''' Setup a plexserver based on host parameter'''
import plexapi.server
import plexapi.exceptions
try:
plexserver = plexapi.server.PlexServer('http://%s' % host, token)
except (plexapi.exceptions.BadRequest,
plexapi.exceptions.Unauthorized,
plexapi.exceptions.NotFound) as error:
_LOGGER.info(error)
# No token or wrong token
request_configuration(host, hass, add_devices_callback)
return
# If we came here and configuring this host, mark as done
if host in _CONFIGURING:
request_id = _CONFIGURING.pop(host)
configurator = get_component('configurator')
configurator.request_done(request_id)
_LOGGER.info('Discovery configuration done!')
# Save config
if not config_from_file(
hass.config.path(PLEX_CONFIG_FILE),
{host: {'token': token}}):
_LOGGER.error('failed to save config file')
_LOGGER.info('Connected to: htts://%s', host)
plex_clients = {}
plex_sessions = {}
@util.Throttle(MIN_TIME_BETWEEN_SCANS, MIN_TIME_BETWEEN_FORCED_SCANS)
def update_devices():
""" Updates the devices objects. """
try:
devices = plexserver.clients()
except plexapi.exceptions.BadRequest:
_LOGGER.exception("Error listing plex devices")
return
new_plex_clients = []
for device in devices:
# For now, let's allow all deviceClass types
if device.deviceClass in ['badClient']:
continue
if device.machineIdentifier not in plex_clients:
new_client = PlexClient(device, plex_sessions, update_devices,
update_sessions)
plex_clients[device.machineIdentifier] = new_client
new_plex_clients.append(new_client)
else:
plex_clients[device.machineIdentifier].set_device(device)
if new_plex_clients:
add_devices_callback(new_plex_clients)
@util.Throttle(MIN_TIME_BETWEEN_SCANS, MIN_TIME_BETWEEN_FORCED_SCANS)
def update_sessions():
""" Updates the sessions objects. """
try:
sessions = plexserver.sessions()
except plexapi.exceptions.BadRequest:
_LOGGER.exception("Error listing plex sessions")
return
plex_sessions.clear()
for session in sessions:
plex_sessions[session.player.machineIdentifier] = session
update_devices()
update_sessions()
def request_configuration(host, hass, add_devices_callback):
""" Request configuration steps from the user. """
configurator = get_component('configurator')
# We got an error if this method is called while we are configuring
if host in _CONFIGURING:
configurator.notify_errors(
_CONFIGURING[host], "Failed to register, please try again.")
return
def plex_configuration_callback(data):
""" Actions to do when our configuration callback is called. """
setup_plexserver(host, data.get('token'), hass, add_devices_callback)
_CONFIGURING[host] = configurator.request_config(
hass, "Plex Media Server", plex_configuration_callback,
description=('Enter the X-Plex-Token'),
description_image="/static/images/config_plex_mediaserver.png",
submit_caption="Confirm",
fields=[{'id': 'token', 'name': 'X-Plex-Token', 'type': ''}]
)
class PlexClient(MediaPlayerDevice):
""" Represents a Plex device. """
# pylint: disable=too-many-public-methods, attribute-defined-outside-init
def __init__(self, device, plex_sessions, update_devices, update_sessions):
self.plex_sessions = plex_sessions
self.update_devices = update_devices
self.update_sessions = update_sessions
self.set_device(device)
def set_device(self, device):
""" Sets the device property. """
self.device = device
@property
def unique_id(self):
""" Returns the id of this plex client """
return "{}.{}".format(
self.__class__, self.device.machineIdentifier or self.device.name)
@property
def name(self):
""" Returns the name of the device. """
return self.device.name or DEVICE_DEFAULT_NAME
@property
def session(self):
""" Returns the session, if any. """
if self.device.machineIdentifier not in self.plex_sessions:
return None
return self.plex_sessions[self.device.machineIdentifier]
@property
def state(self):
""" Returns the state of the device. """
if self.session:
state = self.session.player.state
if state == 'playing':
return STATE_PLAYING
elif state == 'paused':
return STATE_PAUSED
# This is nasty. Need to find a way to determine alive
elif self.device:
return STATE_IDLE
else:
return STATE_OFF
return STATE_UNKNOWN
def update(self):
self.update_devices(no_throttle=True)
self.update_sessions(no_throttle=True)
@property
def media_content_id(self):
""" Content ID of current playing media. """
if self.session is not None:
return self.session.ratingKey
@property
def media_content_type(self):
""" Content type of current playing media. """
if self.session is None:
return None
media_type = self.session.type
if media_type == 'episode':
return MEDIA_TYPE_TVSHOW
elif media_type == 'movie':
return MEDIA_TYPE_VIDEO
return None
@property
def media_duration(self):
""" Duration of current playing media in seconds. """
if self.session is not None:
return self.session.duration
@property
def media_image_url(self):
""" Image url of current playing media. """
if self.session is not None:
return self.session.thumbUrl
@property
def media_title(self):
""" Title of current playing media. """
# find a string we can use as a title
if self.session is not None:
return self.session.title
@property
def media_season(self):
""" Season of curent playing media (TV Show only). """
from plexapi.video import Show
if isinstance(self.session, Show):
return self.session.seasons()[0].index
@property
def media_series_title(self):
""" Series title of current playing media (TV Show only). """
from plexapi.video import Show
if isinstance(self.session, Show):
return self.session.grandparentTitle
@property
def media_episode(self):
""" Episode of current playing media (TV Show only). """
from plexapi.video import Show
if isinstance(self.session, Show):
return self.session.index
@property
def supported_media_commands(self):
""" Flags of media commands that are supported. """
return SUPPORT_PLEX
def media_play(self):
""" media_play media player. """
self.device.play()
def media_pause(self):
""" media_pause media player. """
self.device.pause()
def media_next_track(self):
""" Send next track command. """
self.device.skipNext()
def media_previous_track(self):
""" Send previous track command. """
self.device.skipPrevious()
| caiuspb/home-assistant | homeassistant/components/media_player/plex.py | Python | mit | 10,331 |
# Copyright (c) 2012 Santosh Philip
# =======================================================================
# Distributed under the MIT License.
# (See accompanying file LICENSE or copy at
# http://opensource.org/licenses/MIT)
# =======================================================================
"""pytest for iddgaps.py"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import eppy.iddgaps as iddgaps
def test_cleaniddfield():
"""pytest for cleaniddfield"""
data = ((
{
'field': ['Water Supply Storage Tank Name'],
'Field': ['Water Supply Storage Tank Name'],
'object-list': ['WaterStorageTankNames'],
'type': ['object-list']
},
{
'field': ['Water Supply Storage Tank Name'],
'object-list': ['WaterStorageTankNames'],
'type': ['object-list']
}
), #field, newfield
)
for field, newfield in data:
result = iddgaps.cleaniddfield(field)
assert result == newfield
| jamiebull1/eppy | eppy/tests/test_iddgaps.py | Python | mit | 1,138 |
#### NOTICE: THIS FILE IS AUTOGENERATED
#### MODIFICATIONS MAY BE LOST IF DONE IMPROPERLY
#### PLEASE SEE THE ONLINE DOCUMENTATION FOR EXAMPLES
from swgpy.object import *
def create(kernel):
result = Tangible()
result.template = "object/tangible/shipcontrol/shared_shipcontrol_corvette.iff"
result.attribute_template_id = 8
result.stfName("space/space_item","shipcontrol_corvette")
#### BEGIN MODIFICATIONS ####
#### END MODIFICATIONS ####
return result | obi-two/Rebelion | data/scripts/templates/object/tangible/shipcontrol/shared_shipcontrol_corvette.py | Python | mit | 472 |
import os
import string
import codecs
import ast
import math
from vector3 import Vector3
filename_out = "../../Assets/cosine_table"
table_size = 512
fixed_point_precision = 512
def dumpCosine(_cosine_func, display_name, f):
f.write('const int ' + display_name + '[] =' + '\n')
f.write('{' + '\n')
# _str_out = '\t'
for angle in range(0,table_size):
_cos = int(_cosine_func(angle * math.pi / (table_size / 2.0)) * fixed_point_precision)
_str_out = str(_cos) + ','
f.write(_str_out + '\n')
# if angle%10 == 9:
# f.write(_str_out + '\n')
# _str_out = '\t'
f.write('};' + '\n')
def main():
## Creates the header
f = codecs.open(filename_out + '.h', 'w')
f.write('#define COSINE_TABLE_LEN ' + str(table_size) + '\n')
f.write('\n')
f.write('extern const int tcos[COSINE_TABLE_LEN];' + '\n')
f.write('extern const int tsin[COSINE_TABLE_LEN];' + '\n')
f.close()
## Creates the C file
f = codecs.open(filename_out + '.c', 'w')
dumpCosine(_cosine_func = math.cos, display_name = 'tcos', f = f)
f.write('\n')
dumpCosine(_cosine_func = math.sin, display_name = 'tsin', f = f)
f.close()
main() | voitureblanche/projet-secret | work/Python-toolchain/3D/build_cosine_tables.py | Python | mit | 1,178 |
def d(n):
tp = (n>>4) + (n>>3)
if tp < 64:
tp = 64
if tp > 2048:
tp = 2048
print[n,tp]
for x in range(0, 9999, 512):
d(x)
| dw/scratch | overalloc.py | Python | mit | 161 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# fig08_10.py
#
# Author: Billy Wilson Arante
# Created: 2016/10/10
from rational import Rational
def main():
"""Main"""
# Objects of class Rational
rational1 = Rational() # 1/1
rational2 = Rational(10, 30) # 10/30 reduces to 1/3
rational3 = Rational(-7, 14) # -7/14 reduces to -1/2
# Printing objects of class Rational
print "Rational 1:", rational1
print "Rational 2:", rational2
print "Rational 3:", rational3
print
# Testing mathematical operators
print rational1, "/", rational2, "=", rational1 / rational2
print rational3, "-", rational2, "=", rational3 - rational2
print rational2, "*", rational3, "-", rational1, "=", \
rational2 * rational3 - rational1
# Overloading + implicitly overloads +=
rational1 += rational2 * rational3
print "\nrational1 after adding rational2 * rational3:", rational1
print
# Test comparison operators
print rational1, "<=", rational2, ":", rational1 <= rational2
print rational1, ">", rational3, ":", rational1 > rational3
print
# Test built-in function abs
print "The absolute value of", rational3, "is:", abs(rational3)
print
# Test coercion
print rational2, "as an integer is:", int(rational2)
print rational2, "as a float is:", float(rational2)
print rational2, "+ 1 =", rational2 + 1
if __name__ == "__main__":
main()
| arantebillywilson/python-snippets | py2/htp/ch08/fig08_10.py | Python | mit | 1,448 |
import uuid
import asyncio
from appdaemon.appdaemon import AppDaemon
class Sequences:
def __init__(self, ad: AppDaemon):
self.AD = ad
self.logger = ad.logging.get_child("_sequences")
async def run_sequence_service(self, namespace, domain, service, kwargs):
if "entity_id" not in kwargs:
self.logger.warning("entity_id not given in service call, so will not be executing %s", service)
return
# await self.run_sequence("_services", namespace, kwargs["entity_id"])
self.AD.thread_async.call_async_no_wait(self.run_sequence, "_services", namespace, kwargs["entity_id"])
async def add_sequences(self, sequences):
for sequence in sequences:
entity = "sequence.{}".format(sequence)
attributes = {
"friendly_name": sequences[sequence].get("name", sequence),
"loop": sequences[sequence].get("loop", False),
"steps": sequences[sequence]["steps"],
}
if not await self.AD.state.entity_exists("rules", entity):
# it doesn't exist so add it
await self.AD.state.add_entity(
"rules", entity, "idle", attributes=attributes,
)
else:
await self.AD.state.set_state(
"_sequences", "rules", entity, state="idle", attributes=attributes, replace=True
)
async def remove_sequences(self, sequences):
if not isinstance(sequences, list):
sequences = [sequences]
for sequence in sequences:
await self.AD.state.remove_entity("rules", "sequence.{}".format(sequence))
async def run_sequence(self, _name, namespace, sequence):
coro = self.prep_sequence(_name, namespace, sequence)
#
# OK, lets run it
#
future = asyncio.ensure_future(coro)
self.AD.futures.add_future(_name, future)
return future
async def prep_sequence(self, _name, namespace, sequence):
ephemeral_entity = False
loop = False
if isinstance(sequence, str):
entity_id = sequence
if await self.AD.state.entity_exists("rules", entity_id) is False:
self.logger.warning('Unknown sequence "%s" in run_sequence()', sequence)
return None
entity = await self.AD.state.get_state("_services", "rules", sequence, attribute="all")
seq = entity["attributes"]["steps"]
loop = entity["attributes"]["loop"]
else:
#
# Assume it's a list with the actual commands in it
#
entity_id = "sequence.{}".format(uuid.uuid4().hex)
# Create an ephemeral entity for it
ephemeral_entity = True
await self.AD.state.add_entity("rules", entity_id, "idle", attributes={"steps": sequence})
seq = sequence
coro = await self.do_steps(namespace, entity_id, seq, ephemeral_entity, loop)
return coro
@staticmethod
async def cancel_sequence(_name, future):
future.cancel()
async def do_steps(self, namespace, entity_id, seq, ephemeral_entity, loop):
await self.AD.state.set_state("_sequences", "rules", entity_id, state="active")
try:
while True:
for step in seq:
for command, parameters in step.items():
if command == "sleep":
await asyncio.sleep(float(parameters))
elif command == "sequence":
# Running a sub-sequence so just recurse
await self.prep_sequence("_sequence", namespace, parameters)
pass
else:
domain, service = str.split(command, "/")
if "namespace" in parameters:
ns = parameters["namespace"]
del parameters["namespace"]
else:
ns = namespace
parameters["__name"] = entity_id
await self.AD.services.call_service(ns, domain, service, parameters)
if loop is not True:
break
finally:
await self.AD.state.set_state("_sequences", "rules", entity_id, state="idle")
if ephemeral_entity is True:
await self.AD.state.remove_entity("rules", entity_id)
| acockburn/appdaemon | appdaemon/sequences.py | Python | mit | 4,617 |
#!/usr/bin/env python
import os
import pkgutil
import logging
import sys
import tornado.web
from pyjojo.config import config
from pyjojo.scripts import create_collection
log = logging.getLogger(__name__)
class route(object):
"""
decorates RequestHandlers and builds up a list of routables handlers
From: https://gist.github.com/616347
"""
_routes = []
def __init__(self, uri, name=None):
self._uri = uri
self.name = name
def __call__(self, _handler):
"""gets called when we class decorate"""
log.info("Binding {0} to route {1}".format(_handler.__name__, self._uri))
name = self.name and self.name or _handler.__name__
self._routes.append(tornado.web.url(self._uri, _handler, name=name))
return _handler
@classmethod
def get_routes(self):
return self._routes
def setup_logging():
""" setup the logging system """
base_log = logging.getLogger()
handler = logging.StreamHandler(sys.stdout)
handler.setFormatter(logging.Formatter("%(asctime)s %(levelname)s [%(filename)s:%(lineno)d] %(message)s"))
base_log.addHandler(handler)
base_log.setLevel(logging.DEBUG)
return handler
def create_application(debug):
# import the handler file, this will fill out the route.get_routes() call.
import pyjojo.handlers
application = tornado.web.Application(
route.get_routes(),
scripts=create_collection(config['directory']),
debug=debug
)
return application
| atarola/pyjojo | pyjojo/util.py | Python | mit | 1,563 |
'''
Created on Apr 3, 2012
@author: Dan
'''
from Common.Converter import *
from Common.Module import *
from Common.Symbol import DevicePart
from Common.Device import Deviceset
class Library(object):
__slots__ = ("name", "modules", "symbols", "converter", "deviceparts")
def __init__(self, node, name, converter=None):
self.name = name
if converter is None:
converter = Converter()
symConverter = SchemConverter()
self.modules = []
self.deviceparts = []
devicesetsLst = []
symbolsHash = {}
packages = node.find("packages").findall("package")
if packages != None:
for package in packages:
self.modules.append(Module(package, converter))
devicesets = node.find("devicesets").findall("deviceset")
if devicesets != None:
for deviceset in devicesets:
ds = Deviceset(deviceset, symConverter)
devicesetsLst.append(ds)
symbols = node.find("symbols").findall("symbol")
if symbols != None and len(devicesetsLst) != 0: #strange if not?
for symbol in symbols:
sn = symbol.get("name")
if sn in symbolsHash:
print("The symbol with the same name %s already exists!" % sn)
else:
symbolsHash[sn] = symbol
for deviceset in devicesetsLst: #strange if not?
#just iterater over all posible device packages
for device in deviceset.getDevices():
#we have to create a number of symbols to match diffrent pin configurations
#the real name of device is <deviceset> name plus name of <device>
#symlink is just a scheme representation of the set of devices or devicessts
device.setFullName(deviceset.name)
dp = DevicePart(device, symbolsHash, deviceset.getGates(), symConverter)
self.deviceparts.append(dp)
def writeLibrary(self, modFile=None, symFile=None, docFile=None):
if modFile != None:
self.writeModFile(modFile)
if symFile != None:
self.writeSymFile(symFile)
if docFile != None: #not used at the moment
self.writeDocFile(docFile)
def writeModFile(self, modFile):
modFile.write("PCBNEW-LibModule-V1 00/00/0000-00:00:00\n")
modFile.write("$INDEX\n")
for module in self.modules:
modFile.write(module.package + "\n")
modFile.write("$EndINDEX\n")
for module in self.modules:
module.write(modFile)
modFile.write("$EndLIBRARY")
modFile.close()
def writeSymFile(self, symFile):
symFile.write("EESchema-LIBRARY Version 0.0 00/00/0000-00:00:00\n")
for devicepart in self.deviceparts:
devicepart.write(symFile)
symFile.write("# End Library")
def writeDocFile(self, docFile):
docFile.write("EESchema-DOCLIB Version 0.0 Date: 00/00/0000 00:00:00\n")
| Pecacheu/Eagle2Kicad | Library/Library.py | Python | mit | 3,113 |
import os
from dotenv import load_dotenv
dotenv_path = os.path.join(os.path.dirname(__file__), '../.env')
load_dotenv(dotenv_path)
# pylama:ignore=E402
from flask import Flask
from flask.ext.cors import CORS
from flask.ext.session import Session
from werkzeug.contrib.fixers import ProxyFix
from raven.contrib.flask import Sentry
from .blueprints import datastore, package, user, search
from .blueprints.logger import logger
def create():
"""Create application.
"""
# Create application
app = Flask('service', static_folder=None)
app.config['DEBUG'] = True
# Respect X-Forwarding-* headers
app.wsgi_app = ProxyFix(app.wsgi_app)
# CORS support
CORS(app, supports_credentials=True)
# Exception logging
Sentry(app, dsn=os.environ.get('SENTRY_DSN', ''))
# Session
sess = Session()
app.config['SESSION_TYPE'] = 'filesystem'
app.config['SECRET_KEY'] = 'openspending rocks'
sess.init_app(app)
# Register blueprints
logger.info("Creating Datastore Blueprint")
app.register_blueprint(datastore.create(), url_prefix='/datastore/')
logger.info("Creating Package Blueprint")
app.register_blueprint(package.create(), url_prefix='/package/')
logger.info("Creating Authentication Blueprint")
app.register_blueprint(user.oauth_create(), url_prefix='/oauth/')
logger.info("Creating Users Blueprint")
app.register_blueprint(user.create(), url_prefix='/user/')
logger.info("Creating Search Blueprint")
app.register_blueprint(search.create(), url_prefix='/search/')
# Return application
return app
| openspending/os-conductor | conductor/app.py | Python | mit | 1,606 |
# This file is part of Indico.
# Copyright (C) 2002 - 2019 CERN
#
# Indico is free software; you can redistribute it and/or
# modify it under the terms of the MIT License; see the
# LICENSE file for more details.
from __future__ import unicode_literals
from wtforms.ext.sqlalchemy.fields import QuerySelectField
from wtforms.fields import StringField
from wtforms.validators import DataRequired
from indico.core.db.sqlalchemy.descriptions import RenderMode
from indico.modules.events.sessions.models.sessions import Session
from indico.modules.events.tracks.models.groups import TrackGroup
from indico.util.i18n import _
from indico.web.forms.base import IndicoForm, generated_data
from indico.web.forms.fields import IndicoMarkdownField
class TrackForm(IndicoForm):
title = StringField(_('Title'), [DataRequired()])
code = StringField(_('Code'))
track_group = QuerySelectField(_('Track group'), default='', allow_blank=True, get_label='title',
description=_('Select a track group to which this track should belong'))
default_session = QuerySelectField(_('Default session'), default='', allow_blank=True, get_label='title',
description=_('Indico will preselect this session whenever an abstract is '
'accepted for the track'))
description = IndicoMarkdownField(_('Description'), editor=True)
def __init__(self, *args, **kwargs):
event = kwargs.pop('event')
super(TrackForm, self).__init__(*args, **kwargs)
self.default_session.query = Session.query.with_parent(event)
self.track_group.query = TrackGroup.query.with_parent(event)
class ProgramForm(IndicoForm):
program = IndicoMarkdownField(_('Programme'), editor=True, mathjax=True)
@generated_data
def program_render_mode(self):
return RenderMode.markdown
class TrackGroupForm(IndicoForm):
title = StringField(_('Title'), [DataRequired()])
description = IndicoMarkdownField(_('Description'), editor=True)
| mvidalgarcia/indico | indico/modules/events/tracks/forms.py | Python | mit | 2,078 |
try:
# Python 3
from http.client import HTTPResponse, IncompleteRead
str_cls = str
except (ImportError):
# Python 2
from httplib import HTTPResponse, IncompleteRead
str_cls = unicode
from ..console_write import console_write
class DebuggableHTTPResponse(HTTPResponse):
"""
A custom HTTPResponse that formats debugging info for Sublime Text
"""
_debug_protocol = 'HTTP'
def __init__(self, sock, debuglevel=0, method=None, **kwargs):
# We have to use a positive debuglevel to get it passed to here,
# however we don't want to use it because by default debugging prints
# to the stdout and we can't capture it, so we use a special -1 value
if debuglevel == 5:
debuglevel = -1
HTTPResponse.__init__(self, sock, debuglevel=debuglevel, method=method)
def begin(self):
return_value = HTTPResponse.begin(self)
if self.debuglevel == -1:
# Python 2
if hasattr(self.msg, 'headers'):
headers = [line.rstrip() for line in self.msg.headers]
# Python 3
else:
headers = []
for header in self.msg:
headers.append("%s: %s" % (header, self.msg[header]))
versions = {
9: u'HTTP/0.9',
10: u'HTTP/1.0',
11: u'HTTP/1.1'
}
status_line = u'%s %s %s' % (versions[self.version], str_cls(self.status), self.reason)
headers.insert(0, status_line)
indented_headers = u'\n '.join(headers)
console_write(
u'''
Urllib %s Debug Read
%s
''',
(self._debug_protocol, indented_headers)
)
return return_value
def is_keep_alive(self):
# Python 2
if hasattr(self.msg, 'headers'):
connection = self.msg.getheader('connection')
# Python 3
else:
connection = self.msg['connection']
if connection and connection.lower() == 'keep-alive':
return True
return False
def read(self, *args):
try:
return HTTPResponse.read(self, *args)
except (IncompleteRead) as e:
return e.partial
| herove/dotfiles | sublime/Packages/Package Control/package_control/http/debuggable_http_response.py | Python | mit | 2,329 |
from copy import deepcopy
from manager_utils import upsert
class BaseSmartManager(object):
def __init__(self, template):
self._template = deepcopy(template)
self._built_objs = set()
@property
def built_objs(self):
return self._built_objs
def build_obj(self, model_class, is_deletable=True, updates=None, defaults=None, **kwargs):
"""
Builds an object using the upsert function in manager utils. If the object can be deleted
by the smart manager, it is added to the internal _built_objs list and returned.
"""
built_obj = upsert(model_class.objects, updates=updates, defaults=defaults, **kwargs)[0]
if is_deletable:
self._built_objs |= set([built_obj])
return built_obj
def build_using(self, smart_manager_class, template):
"""
Builds objects using another builder and a template. Adds the resulting built objects
from that builder to the built objects of this builder.
"""
smart_manager = smart_manager_class(template)
built_objs = smart_manager.build()
self._built_objs |= smart_manager.built_objs
# make sure build objs is a list or tuple
if type(built_objs) not in (list, tuple,):
built_objs = [built_objs]
return built_objs
def build(self):
"""
All builders must implement the build function, which returns the built object. All build
functions must also maintain an interal list of built objects, which are accessed by
self.built_objs.
"""
raise NotImplementedError
| ambitioninc/django-smart-manager | smart_manager/base.py | Python | mit | 1,637 |
#!/usr/bin/python3
#
# progress.py
#
# Author: Billy Wilson Arante
# Created: 2016/02/05 PHT
# Modified: 2016/08/19 PHT
#
def progress():
"""Progress icon
The Python 3 version of loading_icon.py.
"""
while True:
for i in ["/", "-", "|", "\\", "|"]:
print("%s\r" % i, end="")
def main():
"""Main"""
progress()
if __name__ == "__main__":
main()
| arantebillywilson/python-snippets | py3/abw-things/progress.py | Python | mit | 399 |
from jsonrpc import ServiceProxy
access = ServiceProxy("http://127.0.0.1:5332")
pwd = raw_input("Enter wallet passphrase: ")
access.walletpassphrase(pwd, 60)
| jarno83/digigems | contrib/wallettools/walletunlock.py | Python | mit | 158 |
# -*- coding: utf-8 -*-
from PIL import Image, ImageFont, ImageDraw
def drawPoint(imagedraw,x,y,width,colour):
imagedraw.ellipse((x-(width/2),y-(width/2),x+(width/2),y+(width/2)), colour)
frame = Image.new("RGBA", (300,300))
frameDraw = ImageDraw.Draw(frame)
#load font
fontpath = "/usr/share/fonts/truetype/freefont/FreeMono.ttf"
# use a truetype font
font = ImageFont.truetype(fontpath, 15)
#draw some text
frameDraw.text((0,0),"Hello World", font=font)
#draw a line
frameDraw.line((0, 20, 150, 20), fill="white", width=3)
#draw a point
#frameDraw.ellipse((147, 17, 153, 23), "red")
drawPoint(frameDraw, 150, 20, 10, "red")
frame.save("/home/pi/dev/cbb/vidGPSOverlay/testcode/pil.gif", "GIF")
| tomn46037/vidGPSOverlay | testcode/PILTest.py | Python | mit | 707 |
# Written by Nanbo Sun and CBIG under MIT license: https://github.com/ThomasYeoLab/CBIG/blob/master/LICENSE.md
x = { 'a':37,'b':42,
'c':927}
y = 'hello ''world'
z = 'hello '+'world'
a = 'hello {}'.format('world')
class foo ( object ):
def f (self ):
return 37*-+2
def g(self, x,y=42):
return y
def f ( a ) :
return 37+-+a[42-x : y**3]
| ThomasYeoLab/CBIG | setup/tests/hooks_tests/pre_commit_tests/G_check_flake8_format/notfollow_flake8.py | Python | mit | 380 |
import logging
from contextlib import contextmanager
from ..errors import CompilerError
from .state import State
from .core import Context, Object, BoundObject, SoftScope, Scope, Type
from .module import Module
from .closure import Closure, ClosedLink, ClosedTarget
from .function import Function, FunctionType, FunctionInstance, Return
from .external_function import ExternalFunction
from .call import Call
from .method import Method, MethodType, MethodInstance
from .links import Link, BoundLink, ContextLink, Attribute
from .identifier import Identifier
from .modifiers import Constant, Reference
from .variable import Variable
from .assignment import Assignment
from .class_ import Class, Constructor
from .forward import ForwardObject, ForwardTarget
from .literal import Literal
from .branches import Loop, Break, Branch
from .void_type import VoidType
from .size_of import SizeOf
from . import stats
from . import util
from . import forward
def _verify(source, frontend, logger = logging.getLogger()):
logger.info("Parsing")
module = frontend.parse(source, logger)
logger.info("Verifying")
verify(module, logger)
return module
def compile(source, frontend, backend, logger = logging.getLogger(), opt_level = 0):
module = _verify(source, frontend, logger)
logger.info("Generating Code")
return backend.emit(module, logger, opt_level)
def run(source, frontend, backend, logger = logging.getLogger(), opt_level = 0):
module = _verify(source, frontend, logger)
logger.info("Running")
return backend.run(module)
def verify(module:Module, logger = logging.getLogger()):
# Set up the initial state before verifying
State.init(logger.getChild("lekvar"))
State.logger.info(module.context)
try:
module.verify()
except CompilerError as e:
e.format()
raise e
@contextmanager
def use(frontend, backend, logger = logging.getLogger()):
with useFrontend(frontend, logger), useBackend(backend, logger):
yield
@contextmanager
def useFrontend(frontend, logger = logging.getLogger()):
builtins = frontend.builtins(logger)
# Hack backend into frontend builtins
builtins.context.addChild(ForwardObject(builtins, "_builtins"))
try:
old_builtins = State.builtins
State.builtins = builtins
verify(builtins)
yield
finally:
State.builtins = old_builtins
@contextmanager
def useBackend(backend, logger = logging.getLogger()):
backend_builtins = backend.builtins(logger)
builtins = State.builtins.context["_builtins"]
with forward.target([(builtins, backend_builtins)], False):
yield
| CameronLonsdale/jam | compiler/lekvar/__init__.py | Python | mit | 2,659 |
import math
from datetime import datetime
from conch import analyze_segments
from conch.analysis.segments import SegmentMapping
from .helper import generate_pitch_function
from ..segments import generate_utterance_segments
from ...exceptions import SpeakerAttributeError
from ..classes import Track, TimePoint
from ..utils import PADDING
def analyze_utterance_pitch(corpus_context, utterance, source='praat', min_pitch=50, max_pitch=500,
**kwargs):
if isinstance(utterance, str):
utterance_id = utterance
else:
utterance_id = utterance.id
padding = kwargs.pop('padding', None)
if padding is None:
padding = PADDING
utt_type = corpus_context.hierarchy.highest
statement = '''MATCH (s:Speaker:{corpus_name})-[r:speaks_in]->(d:Discourse:{corpus_name}),
(u:{utt_type}:{corpus_name})-[:spoken_by]->(s),
(u)-[:spoken_in]->(d)
WHERE u.id = $utterance_id
RETURN u, d, r.channel as channel'''.format(corpus_name=corpus_context.cypher_safe_name,
utt_type=utt_type)
results = corpus_context.execute_cypher(statement, utterance_id=utterance_id)
segment_mapping = SegmentMapping()
for r in results:
channel = r['channel']
file_path = r['d']['vowel_file_path']
u = r['u']
segment_mapping.add_file_segment(file_path, u['begin'], u['end'], channel, padding=padding)
path = None
if source == 'praat':
path = corpus_context.config.praat_path
elif source == 'reaper':
path = corpus_context.config.reaper_path
pitch_function = generate_pitch_function(source, min_pitch, max_pitch, path=path)
track = Track()
for seg in segment_mapping:
output = pitch_function(seg)
for k, v in output.items():
if v['F0'] is None or v['F0'] <= 0:
continue
p = TimePoint(k)
p.add_value('F0', v['F0'])
track.add(p)
if 'pitch' not in corpus_context.hierarchy.acoustics:
corpus_context.hierarchy.add_acoustic_properties(corpus_context, 'pitch', [('F0', float)])
corpus_context.encode_hierarchy()
return track
def update_utterance_pitch_track(corpus_context, utterance, new_track):
from ...corpus.audio import s_to_ms, s_to_nano
if isinstance(utterance, str):
utterance_id = utterance
else:
utterance_id = utterance.id
today = datetime.utcnow()
utt_type = corpus_context.hierarchy.highest
phone_type = corpus_context.hierarchy.lowest
time_stamp = today.timestamp()
statement = '''MATCH (s:Speaker:{corpus_name})-[r:speaks_in]->(d:Discourse:{corpus_name}),
(u:{utt_type}:{corpus_name})-[:spoken_by]->(s),
(u)-[:spoken_in]->(d),
(p:{phone_type}:{corpus_name})-[:contained_by*]->(u)
WHERE u.id = $utterance_id
SET u.pitch_last_edited = $date
RETURN u, d, r.channel as channel, s, collect(p) as p'''.format(
corpus_name=corpus_context.cypher_safe_name,
utt_type=utt_type, phone_type=phone_type)
results = corpus_context.execute_cypher(statement, utterance_id=utterance_id, date=time_stamp)
for r in results:
channel = r['channel']
discourse = r['d']['name']
speaker = r['s']['name']
u = r['u']
phones = r['p']
client = corpus_context.acoustic_client()
query = '''DELETE from "pitch"
where "discourse" = '{}'
and "speaker" = '{}'
and "time" >= {}
and "time" <= {};'''.format(discourse, speaker, s_to_nano(u['begin']), s_to_nano(u['end']))
result = client.query(query)
data = []
for data_point in new_track:
speaker, discourse, channel = speaker, discourse, channel
time_point, value = data_point['time'], data_point['F0']
t_dict = {'speaker': speaker, 'discourse': discourse, 'channel': channel}
label = None
for i, p in enumerate(sorted(phones, key=lambda x: x['begin'])):
if p['begin'] > time_point:
break
label = p['label']
if i == len(phones) - 1:
break
else:
label = None
if label is None:
continue
fields = {'phone': label, 'utterance_id': u['id']}
try:
if value is None:
continue
value = float(value)
except TypeError:
continue
if value <= 0:
continue
fields['F0'] = value
d = {'measurement': 'pitch',
'tags': t_dict,
'time': s_to_ms(time_point),
'fields': fields
}
data.append(d)
client.write_points(data, batch_size=1000, time_precision='ms')
if 'pitch' not in corpus_context.hierarchy.acoustics:
corpus_context.hierarchy.acoustics.add('pitch')
corpus_context.encode_hierarchy()
return time_stamp
def analyze_pitch(corpus_context,
source='praat',
algorithm='base',
call_back=None,
absolute_min_pitch=50,
absolute_max_pitch=500,
adjusted_octaves=1,
stop_check=None, multiprocessing=True):
"""
Parameters
----------
corpus_context : :class:`~polyglotdb.corpus.audio.AudioContext`
source : str
Program to use for analyzing pitch, either ``praat`` or ``reaper``
algorithm : str
Algorithm to use, ``base``, ``gendered``, or ``speaker_adjusted``
absolute_min_pitch : int
Absolute pitch floor
absolute_max_pitch : int
Absolute pitch ceiling
adjusted_octaves : int
How many octaves around the speaker's mean pitch to set the speaker adjusted pitch floor and ceiling
stop_check : callable
Function to check whether processing should stop early
call_back : callable
Function to report progress
multiprocessing : bool
Flag whether to use multiprocessing or threading
Returns
-------
"""
if not 'utterance' in corpus_context.hierarchy:
raise (Exception('Must encode utterances before pitch can be analyzed'))
segment_mapping = generate_utterance_segments(corpus_context, padding=PADDING).grouped_mapping('speaker')
num_speakers = len(segment_mapping)
path = None
if source == 'praat':
path = corpus_context.config.praat_path
# kwargs = {'silence_threshold': 0.03,
# 'voicing_threshold': 0.45, 'octave_cost': 0.01, 'octave_jump_cost': 0.35,
# 'voiced_unvoiced_cost': 0.14}
elif source == 'reaper':
path = corpus_context.config.reaper_path
# kwargs = None
pitch_function = generate_pitch_function(source, absolute_min_pitch, absolute_max_pitch,
path=path)
if 'pitch' not in corpus_context.hierarchy.acoustics:
corpus_context.hierarchy.add_acoustic_properties(corpus_context, 'pitch', [('F0', float)])
corpus_context.encode_hierarchy()
if algorithm == 'speaker_adjusted':
speaker_data = {}
if call_back is not None:
call_back('Getting original speaker means and SDs...')
for i, ((k,), v) in enumerate(segment_mapping.items()):
if call_back is not None:
call_back('Analyzing speaker {} ({} of {})'.format(k, i, num_speakers))
output = analyze_segments(v, pitch_function, stop_check=stop_check, multiprocessing=multiprocessing)
sum_pitch = 0
n = 0
for seg, track in output.items():
for t, v in track.items():
v = v['F0']
if v is not None and v > 0: # only voiced frames
n += 1
sum_pitch += v
mean_pitch = sum_pitch / n
speaker_data[k] = int(mean_pitch / math.pow(2, adjusted_octaves)), \
int( mean_pitch * math.pow(2, adjusted_octaves))
for i, ((speaker,), v) in enumerate(segment_mapping.items()):
if call_back is not None:
call_back('Analyzing speaker {} ({} of {})'.format(speaker, i, num_speakers))
if algorithm == 'gendered':
min_pitch = absolute_min_pitch
max_pitch = absolute_max_pitch
try:
q = corpus_context.query_speakers().filter(corpus_context.speaker.name == speaker)
q = q.columns(corpus_context.speaker.gender.column_name('Gender'))
gender = q.all()[0]['Gender']
if gender is not None:
if gender.lower()[0] == 'f':
min_pitch = 100
else:
max_pitch = 400
except SpeakerAttributeError:
pass
pitch_function = generate_pitch_function(source, min_pitch, max_pitch,
path=path)
elif algorithm == 'speaker_adjusted':
min_pitch, max_pitch = speaker_data[speaker]
if min_pitch < absolute_min_pitch:
min_pitch = absolute_min_pitch
if max_pitch > absolute_max_pitch:
max_pitch = absolute_max_pitch
pitch_function = generate_pitch_function(source, min_pitch, max_pitch,
path=path)
output = analyze_segments(v, pitch_function, stop_check=stop_check, multiprocessing=multiprocessing)
corpus_context.save_acoustic_tracks('pitch', output, speaker)
today = datetime.utcnow()
corpus_context.query_graph(corpus_context.utterance).set_properties(pitch_last_edited=today.timestamp())
corpus_context.encode_hierarchy()
| PhonologicalCorpusTools/PolyglotDB | polyglotdb/acoustics/pitch/base.py | Python | mit | 10,009 |
#### NOTICE: THIS FILE IS AUTOGENERATED
#### MODIFICATIONS MAY BE LOST IF DONE IMPROPERLY
#### PLEASE SEE THE ONLINE DOCUMENTATION FOR EXAMPLES
from swgpy.object import *
def create(kernel):
result = Tangible()
result.template = "object/tangible/loot/loot_schematic/shared_chemical_recycler_schematic.iff"
result.attribute_template_id = -1
result.stfName("craft_item_ingredients_n","chemical_recycler")
#### BEGIN MODIFICATIONS ####
#### END MODIFICATIONS ####
return result | anhstudios/swganh | data/scripts/templates/object/tangible/loot/loot_schematic/shared_chemical_recycler_schematic.py | Python | mit | 493 |
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
# --------------------------------------------------------------------------
from .v2016_09_01.models import *
| Azure/azure-sdk-for-python | sdk/resources/azure-mgmt-resource/azure/mgmt/resource/locks/models.py | Python | mit | 360 |
#
# SFA XML-RPC and SOAP interfaces
#
import string
import xmlrpclib
# SOAP support is optional
try:
import SOAPpy
from SOAPpy.Parser import parseSOAPRPC
from SOAPpy.Types import faultType
from SOAPpy.NS import NS
from SOAPpy.SOAPBuilder import buildSOAP
except ImportError:
SOAPpy = None
####################
#from sfa.util.faults import SfaNotImplemented, SfaAPIError, SfaInvalidAPIMethod, SfaFault
from sfa.util.faults import SfaInvalidAPIMethod, SfaAPIError, SfaFault
from sfa.util.sfalogging import logger
####################
# See "2.2 Characters" in the XML specification:
#
# #x9 | #xA | #xD | [#x20-#xD7FF] | [#xE000-#xFFFD]
# avoiding
# [#x7F-#x84], [#x86-#x9F], [#xFDD0-#xFDDF]
invalid_xml_ascii = map(chr, range(0x0, 0x8) + [0xB, 0xC] + range(0xE, 0x1F))
xml_escape_table = string.maketrans("".join(invalid_xml_ascii), "?" * len(invalid_xml_ascii))
def xmlrpclib_escape(s, replace = string.replace):
"""
xmlrpclib does not handle invalid 7-bit control characters. This
function augments xmlrpclib.escape, which by default only replaces
'&', '<', and '>' with entities.
"""
# This is the standard xmlrpclib.escape function
s = replace(s, "&", "&")
s = replace(s, "<", "<")
s = replace(s, ">", ">",)
# Replace invalid 7-bit control characters with '?'
return s.translate(xml_escape_table)
def xmlrpclib_dump(self, value, write):
"""
xmlrpclib cannot marshal instances of subclasses of built-in
types. This function overrides xmlrpclib.Marshaller.__dump so that
any value that is an instance of one of its acceptable types is
marshalled as that type.
xmlrpclib also cannot handle invalid 7-bit control characters. See
above.
"""
# Use our escape function
args = [self, value, write]
if isinstance(value, (str, unicode)):
args.append(xmlrpclib_escape)
try:
# Try for an exact match first
f = self.dispatch[type(value)]
except KeyError:
raise
# Try for an isinstance() match
for Type, f in self.dispatch.iteritems():
if isinstance(value, Type):
f(*args)
return
raise TypeError, "cannot marshal %s objects" % type(value)
else:
f(*args)
# You can't hide from me!
xmlrpclib.Marshaller._Marshaller__dump = xmlrpclib_dump
class XmlrpcApi:
"""
The XmlrpcApi class implements a basic xmlrpc (or soap) service
"""
protocol = None
def __init__ (self, encoding="utf-8", methods='sfa.methods'):
self.encoding = encoding
self.source = None
# flat list of method names
self.methods_module = methods_module = __import__(methods, fromlist=[methods])
self.methods = methods_module.all
self.logger = logger
def callable(self, method):
"""
Return a new instance of the specified method.
"""
# Look up method
if method not in self.methods:
raise SfaInvalidAPIMethod, method
# Get new instance of method
try:
classname = method.split(".")[-1]
module = __import__(self.methods_module.__name__ + "." + method, globals(), locals(), [classname])
callablemethod = getattr(module, classname)(self)
return getattr(module, classname)(self)
except (ImportError, AttributeError):
self.logger.log_exc("Error importing method: %s" % method)
raise SfaInvalidAPIMethod, method
def call(self, source, method, *args):
"""
Call the named method from the specified source with the
specified arguments.
"""
function = self.callable(method)
function.source = source
self.source = source
return function(*args)
def handle(self, source, data, method_map):
"""
Handle an XML-RPC or SOAP request from the specified source.
"""
# Parse request into method name and arguments
try:
interface = xmlrpclib
self.protocol = 'xmlrpclib'
(args, method) = xmlrpclib.loads(data)
if method_map.has_key(method):
method = method_map[method]
methodresponse = True
except Exception, e:
if SOAPpy is not None:
self.protocol = 'soap'
interface = SOAPpy
(r, header, body, attrs) = parseSOAPRPC(data, header = 1, body = 1, attrs = 1)
method = r._name
args = r._aslist()
# XXX Support named arguments
else:
raise e
try:
result = self.call(source, method, *args)
except SfaFault, fault:
result = fault
self.logger.log_exc("XmlrpcApi.handle has caught Exception")
except Exception, fault:
self.logger.log_exc("XmlrpcApi.handle has caught Exception")
result = SfaAPIError(fault)
# Return result
response = self.prepare_response(result, method)
return response
def prepare_response(self, result, method=""):
"""
convert result to a valid xmlrpc or soap response
"""
if self.protocol == 'xmlrpclib':
if not isinstance(result, SfaFault):
result = (result,)
response = xmlrpclib.dumps(result, methodresponse = True, encoding = self.encoding, allow_none = 1)
elif self.protocol == 'soap':
if isinstance(result, Exception):
result = faultParameter(NS.ENV_T + ":Server", "Method Failed", method)
result._setDetail("Fault %d: %s" % (result.faultCode, result.faultString))
else:
response = buildSOAP(kw = {'%sResponse' % method: {'Result': result}}, encoding = self.encoding)
else:
if isinstance(result, Exception):
raise result
return response
| yippeecw/sfa | sfa/server/xmlrpcapi.py | Python | mit | 6,069 |
from .base import *
import dj_database_url
if os.environ.get('DEBUG') == 'False':
DEBUG = False
else:
DEBUG = True
try:
from .local import *
except ImportError:
pass
ALLOWED_HOSTS = ['*']
DATABASES = {'default': dj_database_url.config()}
SOCIAL_AUTH_YAMMER_KEY = os.environ.get('SOCIAL_AUTH_YAMMER_KEY')
SOCIAL_AUTH_YAMMER_SECRET = os.environ.get('SOCIAL_AUTH_YAMMER_SECRET')
AWS_STORAGE_BUCKET_NAME = os.environ['AWS_STORAGE_BUCKET_NAME']
STATICFILES_STORAGE = 'core.storage.S3PipelineManifestStorage'
STATIC_URL = 'http://s3.amazonaws.com/%s/' % AWS_STORAGE_BUCKET_NAME
AWS_QUERYSTRING_AUTH = False
AWS_S3_FILE_OVERWRITE = True
PIPELINE_JS_COMPRESSOR = 'pipeline.compressors.yuglify.YuglifyCompressor'
PIPELINE_CSS_COMPRESSOR = 'pipeline.compressors.yuglify.YuglifyCompressor'
PIPELINE_YUGLIFY_BINARY = '/app/.heroku/python/bin/yuglify' | kave/Face-Off | face-off/settings/production.py | Python | cc0-1.0 | 866 |
# -*- coding: utf-8 -*-
# vi:si:et:sw=4:sts=4:ts=4
##
## Copyright (C) 2005-2012 Async Open Source <http://www.async.com.br>
## All rights reserved
##
## This program is free software; you can redistribute it and/or modify
## it under the terms of the GNU Lesser General Public License as published by
## the Free Software Foundation; either version 2 of the License, or
## (at your option) any later version.
##
## This program is distributed in the hope that it will be useful,
## but WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
## GNU Lesser General Public License for more details.
##
## You should have received a copy of the GNU Lesser General Public License
## along with this program; if not, write to the Free Software
## Foundation, Inc., or visit: http://www.gnu.org/.
##
## Author(s): Stoq Team <stoq-devel@async.com.br>
##
##
""" Purchase wizard definition """
import datetime
import gtk
from kiwi.component import get_utility
from kiwi.currency import currency
from kiwi.datatypes import ValidationError
from kiwi.ui.objectlist import Column
from stoqlib.api import api
from stoqlib.domain.inventory import Inventory
from stoqlib.domain.payment.group import PaymentGroup
from stoqlib.domain.payment.payment import Payment
from stoqlib.domain.person import Branch, Supplier, Transporter
from stoqlib.domain.product import ProductSupplierInfo
from stoqlib.domain.purchase import PurchaseOrder, PurchaseItem
from stoqlib.domain.receiving import ReceivingOrder
from stoqlib.domain.sellable import Sellable
from stoqlib.domain.views import ProductFullStockItemSupplierView
from stoqlib.gui.base.dialogs import run_dialog
from stoqlib.gui.base.wizards import WizardEditorStep, BaseWizard
from stoqlib.gui.editors.purchaseeditor import PurchaseItemEditor
from stoqlib.gui.editors.personeditor import SupplierEditor, TransporterEditor
from stoqlib.gui.interfaces import IDomainSlaveMapper
from stoqlib.gui.wizards.personwizard import run_person_role_dialog
from stoqlib.gui.wizards.receivingwizard import ReceivingInvoiceStep
from stoqlib.gui.wizards.abstractwizard import SellableItemStep
from stoqlib.gui.search.sellablesearch import PurchaseSellableSearch
from stoqlib.gui.slaves.paymentmethodslave import SelectPaymentMethodSlave
from stoqlib.gui.slaves.paymentslave import register_payment_slaves
from stoqlib.gui.utils.printing import print_report
from stoqlib.lib.defaults import MAX_INT
from stoqlib.lib.dateutils import localtoday
from stoqlib.lib.translation import stoqlib_gettext
from stoqlib.lib.parameters import sysparam
from stoqlib.lib.permissions import PermissionManager
from stoqlib.lib.formatters import format_quantity, get_formatted_cost
from stoqlib.reporting.purchase import PurchaseOrderReport
_ = stoqlib_gettext
#
# Wizard Steps
#
class StartPurchaseStep(WizardEditorStep):
gladefile = 'StartPurchaseStep'
model_type = PurchaseOrder
proxy_widgets = ['open_date',
'identifier',
'supplier',
'branch',
'expected_freight',
]
def __init__(self, wizard, store, model):
WizardEditorStep.__init__(self, store, wizard, model)
pm = PermissionManager.get_permission_manager()
if not pm.can_create('Supplier'):
self.add_supplier.hide()
if not pm.can_edit('Supplier'):
self.edit_supplier.hide()
def _fill_supplier_combo(self):
suppliers = Supplier.get_active_suppliers(self.store)
self.edit_supplier.set_sensitive(any(suppliers))
self.supplier.prefill(api.for_person_combo(suppliers))
def _fill_branch_combo(self):
branches = Branch.get_active_branches(self.store)
self.branch.prefill(api.for_person_combo(branches))
sync_mode = api.sysparam.get_bool('SYNCHRONIZED_MODE')
self.branch.set_sensitive(not sync_mode)
def _setup_widgets(self):
allow_outdated = sysparam.get_bool('ALLOW_OUTDATED_OPERATIONS')
self.open_date.set_property('mandatory', True)
self.open_date.set_sensitive(allow_outdated)
self._fill_supplier_combo()
self._fill_branch_combo()
if self.model.freight_type == self.model_type.FREIGHT_FOB:
self.fob_radio.set_active(True)
else:
self.cif_radio.set_active(True)
self._update_widgets()
def _update_widgets(self):
has_freight = self.fob_radio.get_active()
self.expected_freight.set_sensitive(has_freight)
if self.cif_radio.get_active():
self.model.freight_type = self.model_type.FREIGHT_CIF
else:
self.model.freight_type = self.model_type.FREIGHT_FOB
def _run_supplier_dialog(self, supplier):
store = api.new_store()
if supplier is not None:
supplier = store.fetch(self.model.supplier)
model = run_person_role_dialog(SupplierEditor, self.wizard, store,
supplier)
retval = store.confirm(model)
if retval:
model = self.store.fetch(model)
self._fill_supplier_combo()
self.supplier.select(model)
store.close()
def _add_supplier(self):
self._run_supplier_dialog(supplier=None)
def _edit_supplier(self):
supplier = self.supplier.get_selected()
self._run_supplier_dialog(supplier)
#
# WizardStep hooks
#
def post_init(self):
self.open_date.grab_focus()
self.register_validate_function(self.wizard.refresh_next)
self.force_validation()
def next_step(self):
self.wizard.all_products = self.all_products.get_active()
return PurchaseItemStep(self.wizard, self, self.store, self.model)
def has_previous_step(self):
return False
def setup_proxies(self):
self._setup_widgets()
self.proxy = self.add_proxy(self.model,
StartPurchaseStep.proxy_widgets)
#
# Kiwi callbacks
#
def on_fob_radio__toggled(self, *args):
self._update_widgets()
def on_add_supplier__clicked(self, button):
self._add_supplier()
def on_supplier__content_changed(self, supplier):
self.edit_supplier.set_sensitive(bool(self.supplier.get_selected()))
def on_edit_supplier__clicked(self, button):
self._edit_supplier()
def on_open_date__validate(self, widget, date):
if sysparam.get_bool('ALLOW_OUTDATED_OPERATIONS'):
return
if date < localtoday().date():
return ValidationError(
_("Open date must be set to today or "
"a future date"))
def on_expected_freight__validate(self, widget, value):
if value < 0:
return ValidationError(_(u'The expected freight value must be a '
'positive number.'))
class PurchaseItemStep(SellableItemStep):
""" Wizard step for purchase order's items selection """
model_type = PurchaseOrder
item_table = PurchaseItem
summary_label_text = "<b>%s</b>" % api.escape(_('Total Ordered:'))
sellable_editable = True
item_editor = PurchaseItemEditor
sellable_search = PurchaseSellableSearch
def _set_expected_receival_date(self, item):
supplier = self.model.supplier
product = item.sellable.product
supplier_info = self.store.find(ProductSupplierInfo, product=product,
supplier=supplier).one()
if supplier_info is not None:
delta = datetime.timedelta(days=supplier_info.lead_time)
expected_receival = self.model.open_date + delta
item.expected_receival_date = expected_receival
#
# Helper methods
#
def get_sellable_view_query(self):
supplier = self.model.supplier
if self.wizard.all_products:
supplier = None
# If we our query includes the supplier, we must use another viewable,
# that actually joins with that table
if supplier:
viewable = ProductFullStockItemSupplierView
else:
viewable = self.sellable_view
query = Sellable.get_unblocked_sellables_query(self.store, supplier=supplier,
consigned=self.model.consigned)
return viewable, query
def setup_slaves(self):
SellableItemStep.setup_slaves(self)
self.hide_add_button()
self.cost.set_editable(True)
self.quantity.connect('validate', self._on_quantity__validate)
self.slave.klist.connect('selection-changed',
self._on_klist_selection_changed)
#
# SellableItemStep virtual methods
#
def validate(self, value):
SellableItemStep.validate(self, value)
can_purchase = self.model.purchase_total > 0
self.wizard.refresh_next(value and can_purchase)
def get_order_item(self, sellable, cost, quantity, batch=None):
assert batch is None
# Associate the product with the supplier if they are not yet. This
# happens when the user checked the option to show all products on the
# first step
supplier_info = self._get_supplier_info()
if not supplier_info:
supplier_info = ProductSupplierInfo(product=sellable.product,
supplier=self.model.supplier,
store=self.store)
supplier_info.base_cost = cost
item = self.model.add_item(sellable, quantity)
self._set_expected_receival_date(item)
item.cost = cost
return item
def get_saved_items(self):
return list(self.model.get_items())
def sellable_selected(self, sellable, batch=None):
super(PurchaseItemStep, self).sellable_selected(sellable, batch=batch)
supplier_info = self._get_supplier_info()
if not supplier_info:
return
minimum = supplier_info.minimum_purchase
self.quantity.set_adjustment(gtk.Adjustment(lower=minimum,
upper=MAX_INT,
step_incr=1))
self.quantity.set_value(minimum)
self.cost.set_value(supplier_info.base_cost)
def get_sellable_search_extra_kwargs(self):
return dict(supplier=self.model.supplier)
def get_columns(self):
return [
Column('sellable.code', title=_('Code'), width=100, data_type=str),
Column('sellable.description', title=_('Description'),
data_type=str, width=250, searchable=True, expand=True),
Column('sellable.category_description', title=_('Category'),
data_type=str, searchable=True, visible=False),
Column('quantity', title=_('Quantity'), data_type=float, width=90,
format_func=format_quantity),
Column('expected_receival_date', title=_('Expected Receival'),
data_type=datetime.date, visible=False),
Column('sellable.unit_description', title=_('Unit'), data_type=str,
width=70),
Column('cost', title=_('Cost'), data_type=currency,
format_func=get_formatted_cost, width=90),
Column('total', title=_('Total'), data_type=currency, width=100),
]
#
# WizardStep hooks
#
def next_step(self):
if self.model.consigned:
return FinishPurchaseStep(self.store, self.wizard, self.model, self)
return PurchasePaymentStep(self.wizard, self, self.store, self.model)
#
# Private API
#
def _get_supplier_info(self):
sellable = self.proxy.model.sellable
if not sellable:
# FIXME: We should not be accessing a private method here
sellable, batch = self._get_sellable_and_batch()
if not sellable:
return
product = sellable.product
supplier = self.model.supplier
return self.store.find(ProductSupplierInfo, product=product,
supplier=supplier).one()
#
# Callbacks
#
def _on_quantity__validate(self, widget, value):
if not self.proxy.model.sellable:
return
supplier_info = self._get_supplier_info()
if supplier_info and value < supplier_info.minimum_purchase:
return ValidationError(_(u'Quantity below the minimum required '
'by the supplier'))
return super(PurchaseItemStep,
self).on_quantity__validate(widget, value)
def _on_klist_selection_changed(self, klist, data):
can_delete = all(product.quantity_received == 0 for product in data)
self.slave.delete_button.set_sensitive(can_delete)
class PurchasePaymentStep(WizardEditorStep):
gladefile = 'PurchasePaymentStep'
model_type = PaymentGroup
def __init__(self, wizard, previous, store, model,
outstanding_value=currency(0)):
self.order = model
self.slave = None
self.discount_surcharge_slave = None
self.outstanding_value = outstanding_value
if not model.payments.count():
# Default values
self._installments_number = None
self._first_duedate = None
self._method = 'bill'
else:
# FIXME: SqlObject returns count as long, but we need it as int.
self._installments_number = int(model.payments.count())
self._method = model.payments[0].method.method_name
# due_date is datetime.datetime. Converting it to datetime.date
due_date = model.payments[0].due_date.date()
self._first_duedate = (due_date >= localtoday().date() and
due_date or None)
WizardEditorStep.__init__(self, store, wizard, model.group, previous)
def _setup_widgets(self):
register_payment_slaves()
self._ms = SelectPaymentMethodSlave(store=self.store,
payment_type=Payment.TYPE_OUT,
default_method=self._method,
no_payments=True)
self._ms.connect_after('method-changed',
self._after_method_select__method_changed)
self.attach_slave('method_select_holder', self._ms)
self._update_payment_method_slave()
def _set_method_slave(self):
"""Sets the payment method slave"""
method = self._ms.get_selected_method()
if not method:
return
domain_mapper = get_utility(IDomainSlaveMapper)
slave_class = domain_mapper.get_slave_class(method)
if slave_class:
self.wizard.payment_group = self.model
self.slave = slave_class(self.wizard, self,
self.store, self.order, method,
outstanding_value=self.outstanding_value,
first_duedate=self._first_duedate,
installments_number=self._installments_number)
self.attach_slave('method_slave_holder', self.slave)
def _update_payment_method_slave(self):
"""Updates the payment method slave """
holder_name = 'method_slave_holder'
if self.get_slave(holder_name):
self.slave.get_toplevel().hide()
self.detach_slave(holder_name)
self.slave = None
# remove all payments created last time, if any
self.model.clear_unused()
if not self.slave:
self._set_method_slave()
#
# WizardStep hooks
#
def validate_step(self):
if self.slave:
return self.slave.finish()
return True
def next_step(self):
return FinishPurchaseStep(self.store, self.wizard, self.order, self)
def post_init(self):
self.model.clear_unused()
self.main_box.set_focus_chain([self.method_select_holder,
self.method_slave_holder])
self.register_validate_function(self.wizard.refresh_next)
self.force_validation()
def setup_proxies(self):
self._setup_widgets()
#
# callbacks
#
def _after_method_select__method_changed(self, slave, method):
self._update_payment_method_slave()
class FinishPurchaseStep(WizardEditorStep):
gladefile = 'FinishPurchaseStep'
model_type = PurchaseOrder
proxy_widgets = ('salesperson_name',
'expected_receival_date',
'transporter',
'notes')
def _setup_transporter_entry(self):
self.add_transporter.set_tooltip_text(_("Add a new transporter"))
self.edit_transporter.set_tooltip_text(_("Edit the selected transporter"))
items = Transporter.get_active_transporters(self.store)
self.transporter.prefill(api.for_person_combo(items))
self.transporter.set_sensitive(not items.is_empty())
self.edit_transporter.set_sensitive(not items.is_empty())
def _set_receival_date_suggestion(self):
receival_date = self.model.get_items().max(PurchaseItem.expected_receival_date)
if receival_date:
self.expected_receival_date.update(receival_date)
def _setup_focus(self):
self.salesperson_name.grab_focus()
self.notes.set_accepts_tab(False)
def _create_receiving_order(self):
# since we will create a new receiving order, we should confirm the
# purchase first. Note that the purchase may already be confirmed
if self.model.status in [PurchaseOrder.ORDER_PENDING,
PurchaseOrder.ORDER_CONSIGNED]:
self.model.confirm()
receiving_model = ReceivingOrder(
responsible=api.get_current_user(self.store),
supplier=self.model.supplier,
branch=self.model.branch,
transporter=self.model.transporter,
invoice_number=None,
store=self.store)
receiving_model.add_purchase(self.model)
# Creates ReceivingOrderItem's
for item in self.model.get_pending_items():
receiving_model.add_purchase_item(item)
self.wizard.receiving_model = receiving_model
#
# WizardStep hooks
#
def has_next_step(self):
return self.receive_now.get_active()
def next_step(self):
# In case the check box for receiving the products now is not active,
# This is the last step.
if not self.receive_now.get_active():
return
self._create_receiving_order()
return ReceivingInvoiceStep(self.store, self.wizard,
self.wizard.receiving_model)
def post_init(self):
# A receiving model was created. We should remove it (and its items),
# since after this step we can either receive the products now or
# later, on the stock application.
receiving_model = self.wizard.receiving_model
if receiving_model:
for item in receiving_model.get_items():
self.store.remove(item)
self.store.remove(receiving_model)
self.wizard.receiving_model = None
self.salesperson_name.grab_focus()
self._set_receival_date_suggestion()
if self.model.has_batch_item():
self.receive_now.hide()
self.register_validate_function(self.wizard.refresh_next)
self.force_validation()
def setup_proxies(self):
# Avoid changing widget states in __init__, so that plugins have a
# chance to override the default settings
has_open_inventory = Inventory.has_open(self.store,
api.get_current_branch(self.store))
self.receive_now.set_sensitive(not bool(has_open_inventory))
self._setup_focus()
self._setup_transporter_entry()
self.proxy = self.add_proxy(self.model, self.proxy_widgets)
def _run_transporter_editor(self, transporter=None):
store = api.new_store()
transporter = store.fetch(transporter)
model = run_person_role_dialog(TransporterEditor, self.wizard, store,
transporter)
rv = store.confirm(model)
store.close()
if rv:
self._setup_transporter_entry()
model = self.store.fetch(model)
self.transporter.select(model)
def on_expected_receival_date__validate(self, widget, date):
if sysparam.get_bool('ALLOW_OUTDATED_OPERATIONS'):
return
if date < localtoday().date():
return ValidationError(_("Expected receival date must be set to a future date"))
def on_add_transporter__clicked(self, button):
self._run_transporter_editor()
def on_edit_transporter__clicked(self, button):
self._run_transporter_editor(self.transporter.get_selected())
def on_transporter__content_changed(self, category):
self.edit_transporter.set_sensitive(bool(self.transporter.get_selected()))
def on_receive_now__toggled(self, widget):
if self.receive_now.get_active():
self.wizard.disable_finish()
else:
self.wizard.enable_finish()
def on_print_button__clicked(self, button):
print_report(PurchaseOrderReport, self.model)
#
# Main wizard
#
class PurchaseWizard(BaseWizard):
size = (775, 400)
help_section = 'purchase-new'
need_cancel_confirmation = True
def __init__(self, store, model=None, edit_mode=False):
title = self._get_title(model)
model = model or self._create_model(store)
# Should we show all products or only the ones associated with the
# selected supplier?
self.all_products = False
# If we receive the order right after the purchase.
self.receiving_model = None
purchase_edit = [PurchaseOrder.ORDER_CONFIRMED,
PurchaseOrder.ORDER_PENDING]
if not model.status in purchase_edit:
raise ValueError('Invalid order status. It should '
'be ORDER_PENDING or ORDER_CONFIRMED')
first_step = StartPurchaseStep(self, store, model)
BaseWizard.__init__(self, store, first_step, model, title=title,
edit_mode=edit_mode)
def _get_title(self, model=None):
if not model:
return _('New Order')
return _('Edit Order')
def _create_model(self, store):
supplier_id = sysparam.get_object_id('SUGGESTED_SUPPLIER')
branch = api.get_current_branch(store)
group = PaymentGroup(store=store)
status = PurchaseOrder.ORDER_PENDING
return PurchaseOrder(supplier_id=supplier_id,
responsible=api.get_current_user(store),
branch=branch,
status=status,
group=group,
store=store)
#
# WizardStep hooks
#
def finish(self):
self.retval = self.model
if self.receiving_model:
# Confirming the receiving will close the purchase
self.receiving_model.confirm()
self.close()
if sysparam.get_bool('UPDATE_PRODUCTS_COST_ON_PURCHASE'):
self.model.update_products_cost()
def test(): # pragma nocover
creator = api.prepare_test()
retval = run_dialog(PurchaseWizard, None, creator.store)
creator.store.confirm(retval)
if __name__ == '__main__': # pragma nocover
test()
| andrebellafronte/stoq | stoqlib/gui/wizards/purchasewizard.py | Python | gpl-2.0 | 23,998 |
from optparse import OptionParser
from uuid import uuid4
import re
def main():
parser = OptionParser()
#parser.add_option("--skip-rows", type="int", dest="skip_rows", default="0", help="Number of lines/rows to skip when using --lines or --csv. Default: %default")
(options, arguments) = parser.parse_args()
listing_markup = "------------------------------------------------------------------\n"
for path in arguments:
tag_listing = False
block_listing = False
graphviz_listing = False
for markup in open(path, "r"):
if re.search("<pre>", markup):
tag_listing = True
markup = listing_markup
elif re.search("</pre>", markup):
tag_listing = False
markup = listing_markup
elif re.search("<graphviz>", markup):
graphviz_listing = True
print "[\"graphviz\", \"%s.png\"]" % str(uuid4())
markup = listing_markup
elif re.search("</graphviz>", markup):
graphviz_listing = False
markup = listing_markup
elif markup.startswith(" ") and not block_listing and not tag_listing:
print listing_markup
block_listing = True
markup = markup[1:]
elif not markup.startswith(" ") and block_listing:
print listing_markup
block_listing = False
if not tag_listing and not block_listing and not graphviz_listing:
# Convert varying numbers of single-quotes to Asciidoc "emphasis"
markup = re.sub("('{2,5})([^']+('[^']+)*)\\1", "'\\2'", markup)
# Markup double-quotes
markup = re.sub("\"([^\"]*)\"", "``\\1''", markup)
# Eliminate article categories
markup = re.sub("\\[\\[Category:[^\]]*\\]\\]", "", markup)
# Convert wiki links to Asciidoc links
markup = re.sub("\\[\\[([^\]]*)\\]\\]", "<<\\1,\\1>>", markup)
print markup,
if __name__ == "__main__":
main()
| barche/k3d | share/k3d/guide/content/wiki2asciidoc.py | Python | gpl-2.0 | 1,764 |
# -*- coding: utf-8 -*-
## This file is part of Invenio.
## Copyright (C) 2009, 2010, 2011, 2012 CERN.
##
## Invenio is free software; you can redistribute it and/or
## modify it under the terms of the GNU General Public License as
## published by the Free Software Foundation; either version 2 of the
## License, or (at your option) any later version.
##
## Invenio is distributed in the hope that it will be useful, but
## WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with Invenio; if not, write to the Free Software Foundation, Inc.,
## 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
"""mod_python->WSGI Framework"""
import sys
import os
import re
import cgi
import inspect
from fnmatch import fnmatch
from urlparse import urlparse, urlunparse
from wsgiref.validate import validator
from wsgiref.util import FileWrapper, guess_scheme
if __name__ != "__main__":
# Chances are that we are inside mod_wsgi.
## You can't write to stdout in mod_wsgi, but some of our
## dependecies do this! (e.g. 4Suite)
sys.stdout = sys.stderr
from invenio.session import get_session
from invenio.webinterface_handler import CFG_HAS_HTTPS_SUPPORT, CFG_FULL_HTTPS
from invenio.webinterface_layout import invenio_handler
from invenio.webinterface_handler_wsgi_utils import table, FieldStorage
from invenio.webinterface_handler_config import \
HTTP_STATUS_MAP, SERVER_RETURN, OK, DONE, \
HTTP_NOT_FOUND, HTTP_INTERNAL_SERVER_ERROR
from invenio.config import CFG_WEBDIR, CFG_SITE_LANG, \
CFG_WEBSTYLE_HTTP_STATUS_ALERT_LIST, CFG_DEVEL_SITE, CFG_SITE_URL, \
CFG_SITE_SECURE_URL, CFG_WEBSTYLE_REVERSE_PROXY_IPS
from invenio.errorlib import register_exception, get_pretty_traceback
## Static files are usually handled directly by the webserver (e.g. Apache)
## However in case WSGI is required to handle static files too (such
## as when running wsgiref simple server), then this flag can be
## turned on (it is done automatically by wsgi_handler_test).
CFG_WSGI_SERVE_STATIC_FILES = False
## Magic regexp to search for usage of CFG_SITE_URL within src/href or
## any src usage of an external website
_RE_HTTPS_REPLACES = re.compile(r"\b((?:src\s*=|url\s*\()\s*[\"']?)http\://", re.I)
## Regexp to verify that the IP starts with a number (filter cases where 'unknown')
## It is faster to verify only the start (585 ns) compared with verifying
## the whole ip address - re.compile('^\d+\.\d+\.\d+\.\d+$') (1.01 µs)
_RE_IPADDRESS_START = re.compile("^\d+\.")
def _http_replace_func(match):
## src external_site -> CFG_SITE_SECURE_URL/sslredirect/external_site
return match.group(1) + CFG_SITE_SECURE_URL + '/sslredirect/'
_ESCAPED_CFG_SITE_URL = cgi.escape(CFG_SITE_URL, True)
_ESCAPED_CFG_SITE_SECURE_URL = cgi.escape(CFG_SITE_SECURE_URL, True)
def https_replace(html):
html = html.replace(_ESCAPED_CFG_SITE_URL, _ESCAPED_CFG_SITE_SECURE_URL)
return _RE_HTTPS_REPLACES.sub(_http_replace_func, html)
class InputProcessed(object):
"""
Auxiliary class used when reading input.
@see: <http://www.wsgi.org/wsgi/Specifications/handling_post_forms>.
"""
def read(self, *args):
raise EOFError('The wsgi.input stream has already been consumed')
readline = readlines = __iter__ = read
class SimulatedModPythonRequest(object):
"""
mod_python like request object.
Minimum and cleaned implementation to make moving out of mod_python
easy.
@see: <http://www.modpython.org/live/current/doc-html/pyapi-mprequest.html>
"""
def __init__(self, environ, start_response):
self.__environ = environ
self.__start_response = start_response
self.__response_sent_p = False
self.__buffer = ''
self.__low_level_headers = []
self.__headers = table(self.__low_level_headers)
self.__headers.add = self.__headers.add_header
self.__status = "200 OK"
self.__filename = None
self.__disposition_type = None
self.__bytes_sent = 0
self.__allowed_methods = []
self.__cleanups = []
self.headers_out = self.__headers
## See: <http://www.python.org/dev/peps/pep-0333/#the-write-callable>
self.__write = None
self.__write_error = False
self.__errors = environ['wsgi.errors']
self.__headers_in = table([])
self.__tainted = False
self.__is_https = int(guess_scheme(self.__environ) == 'https')
self.__replace_https = False
self.track_writings = False
self.__what_was_written = ""
for key, value in environ.iteritems():
if key.startswith('HTTP_'):
self.__headers_in[key[len('HTTP_'):].replace('_', '-')] = value
if environ.get('CONTENT_LENGTH'):
self.__headers_in['content-length'] = environ['CONTENT_LENGTH']
if environ.get('CONTENT_TYPE'):
self.__headers_in['content-type'] = environ['CONTENT_TYPE']
def get_wsgi_environ(self):
return self.__environ
def get_post_form(self):
self.__tainted = True
post_form = self.__environ.get('wsgi.post_form')
input = self.__environ['wsgi.input']
if (post_form is not None
and post_form[0] is input):
return post_form[2]
# This must be done to avoid a bug in cgi.FieldStorage
self.__environ.setdefault('QUERY_STRING', '')
## Video handler hack:
uri = self.__environ['PATH_INFO']
if uri.endswith("upload_video"):
tmp_shared = True
else:
tmp_shared = False
fs = FieldStorage(self, keep_blank_values=1, to_tmp_shared=tmp_shared)
if fs.wsgi_input_consumed:
new_input = InputProcessed()
post_form = (new_input, input, fs)
self.__environ['wsgi.post_form'] = post_form
self.__environ['wsgi.input'] = new_input
else:
post_form = (input, None, fs)
self.__environ['wsgi.post_form'] = post_form
return fs
def get_response_sent_p(self):
return self.__response_sent_p
def get_low_level_headers(self):
return self.__low_level_headers
def get_buffer(self):
return self.__buffer
def write(self, string, flush=1):
if isinstance(string, unicode):
self.__buffer += string.encode('utf8')
else:
self.__buffer += string
if flush:
self.flush()
def flush(self):
self.send_http_header()
if self.__buffer:
self.__bytes_sent += len(self.__buffer)
try:
if not self.__write_error:
if self.__replace_https:
self.__write(https_replace(self.__buffer))
else:
self.__write(self.__buffer)
if self.track_writings:
if self.__replace_https:
self.__what_was_written += https_replace(self.__buffer)
else:
self.__what_was_written += self.__buffer
except IOError, err:
if "failed to write data" in str(err) or "client connection closed" in str(err):
## Let's just log this exception without alerting the admin:
register_exception(req=self)
self.__write_error = True ## This flag is there just
## to not report later other errors to the admin.
else:
raise
self.__buffer = ''
def set_content_type(self, content_type):
self.__headers['content-type'] = content_type
if self.__is_https:
if content_type.startswith("text/html") or content_type.startswith("application/rss+xml"):
self.__replace_https = True
def get_content_type(self):
return self.__headers['content-type']
def send_http_header(self):
if not self.__response_sent_p:
self.__tainted = True
if self.__allowed_methods and self.__status.startswith('405 ') or self.__status.startswith('501 '):
self.__headers['Allow'] = ', '.join(self.__allowed_methods)
## See: <http://www.python.org/dev/peps/pep-0333/#the-write-callable>
#print self.__low_level_headers
self.__write = self.__start_response(self.__status, self.__low_level_headers)
self.__response_sent_p = True
#print "Response sent: %s" % self.__headers
def get_unparsed_uri(self):
return '?'.join([self.__environ['PATH_INFO'], self.__environ['QUERY_STRING']])
def get_uri(self):
return self.__environ['PATH_INFO']
def get_headers_in(self):
return self.__headers_in
def get_subprocess_env(self):
return self.__environ
def add_common_vars(self):
pass
def get_args(self):
return self.__environ['QUERY_STRING']
def get_remote_ip(self):
if 'X-FORWARDED-FOR' in self.__headers_in and \
self.__headers_in.get('X-FORWARDED-SERVER', '') == \
self.__headers_in.get('X-FORWARDED-HOST', '') == \
urlparse(CFG_SITE_URL)[1]:
# we are using proxy setup
if self.__environ.get('REMOTE_ADDR') in CFG_WEBSTYLE_REVERSE_PROXY_IPS:
# we trust this proxy
ip_list = self.__headers_in['X-FORWARDED-FOR'].split(',')
for ip in ip_list:
if _RE_IPADDRESS_START.match(ip):
return ip
# no IP has the correct format, return a default IP
return '10.0.0.10'
else:
# we don't trust this proxy
register_exception(prefix="You are running in a proxy configuration, but the " + \
"CFG_WEBSTYLE_REVERSE_PROXY_IPS variable does not contain " + \
"the IP of your proxy, thus the remote IP addresses of your " + \
"clients are not trusted. Please configure this variable.",
alert_admin=True)
return '10.0.0.11'
return self.__environ.get('REMOTE_ADDR')
def get_remote_host(self):
return self.__environ.get('REMOTE_HOST')
def get_header_only(self):
return self.__environ['REQUEST_METHOD'] == 'HEAD'
def set_status(self, status):
self.__status = '%s %s' % (status, HTTP_STATUS_MAP.get(int(status), 'Explanation not available'))
def get_status(self):
return int(self.__status.split(' ')[0])
def get_wsgi_status(self):
return self.__status
def sendfile(self, path, offset=0, the_len=-1):
try:
self.send_http_header()
file_to_send = open(path)
file_to_send.seek(offset)
file_wrapper = FileWrapper(file_to_send)
count = 0
if the_len < 0:
for chunk in file_wrapper:
count += len(chunk)
self.__bytes_sent += len(chunk)
self.__write(chunk)
else:
for chunk in file_wrapper:
if the_len >= len(chunk):
the_len -= len(chunk)
count += len(chunk)
self.__bytes_sent += len(chunk)
self.__write(chunk)
else:
count += the_len
self.__bytes_sent += the_len
self.__write(chunk[:the_len])
break
except IOError, err:
if "failed to write data" in str(err) or "client connection closed" in str(err):
## Let's just log this exception without alerting the admin:
register_exception(req=self)
else:
raise
return self.__bytes_sent
def set_content_length(self, content_length):
if content_length is not None:
self.__headers['content-length'] = str(content_length)
else:
del self.__headers['content-length']
def is_https(self):
return self.__is_https
def get_method(self):
return self.__environ['REQUEST_METHOD']
def get_hostname(self):
return self.__environ.get('HTTP_HOST', '')
def set_filename(self, filename):
self.__filename = filename
if self.__disposition_type is None:
self.__disposition_type = 'inline'
self.__headers['content-disposition'] = '%s; filename=%s' % (self.__disposition_type, self.__filename)
def set_encoding(self, encoding):
if encoding:
self.__headers['content-encoding'] = str(encoding)
else:
del self.__headers['content-encoding']
def get_bytes_sent(self):
return self.__bytes_sent
def log_error(self, message):
self.__errors.write(message.strip() + '\n')
def get_content_type_set_p(self):
return bool(self.__headers['content-type'])
def allow_methods(self, methods, reset=0):
if reset:
self.__allowed_methods = []
self.__allowed_methods += [method.upper().strip() for method in methods]
def get_allowed_methods(self):
return self.__allowed_methods
def readline(self, hint=None):
try:
return self.__environ['wsgi.input'].readline(hint)
except TypeError:
## the hint param is not part of wsgi pep, although
## it's great to exploit it in when reading FORM
## with large files, in order to avoid filling up the memory
## Too bad it's not there :-(
return self.__environ['wsgi.input'].readline()
def readlines(self, hint=None):
return self.__environ['wsgi.input'].readlines(hint)
def read(self, hint=None):
return self.__environ['wsgi.input'].read(hint)
def register_cleanup(self, callback, data=None):
self.__cleanups.append((callback, data))
def get_cleanups(self):
return self.__cleanups
def get_referer(self):
return self.headers_in.get('referer')
def get_what_was_written(self):
return self.__what_was_written
def __str__(self):
from pprint import pformat
out = ""
for key in dir(self):
try:
if not callable(getattr(self, key)) and not key.startswith("_SimulatedModPythonRequest") and not key.startswith('__'):
out += 'req.%s: %s\n' % (key, pformat(getattr(self, key)))
except:
pass
return out
def get_original_wsgi_environment(self):
"""
Return the original WSGI environment used to initialize this request
object.
@return: environ, start_response
@raise AssertionError: in case the environment has been altered, i.e.
either the input has been consumed or something has already been
written to the output.
"""
assert not self.__tainted, "The original WSGI environment is tainted since at least req.write or req.form has been used."
return self.__environ, self.__start_response
content_type = property(get_content_type, set_content_type)
unparsed_uri = property(get_unparsed_uri)
uri = property(get_uri)
headers_in = property(get_headers_in)
subprocess_env = property(get_subprocess_env)
args = property(get_args)
header_only = property(get_header_only)
status = property(get_status, set_status)
method = property(get_method)
hostname = property(get_hostname)
filename = property(fset=set_filename)
encoding = property(fset=set_encoding)
bytes_sent = property(get_bytes_sent)
content_type_set_p = property(get_content_type_set_p)
allowed_methods = property(get_allowed_methods)
response_sent_p = property(get_response_sent_p)
form = property(get_post_form)
remote_ip = property(get_remote_ip)
remote_host = property(get_remote_host)
referer = property(get_referer)
what_was_written = property(get_what_was_written)
def alert_admin_for_server_status_p(status, referer):
"""
Check the configuration variable
CFG_WEBSTYLE_HTTP_STATUS_ALERT_LIST to see if the exception should
be registered and the admin should be alerted.
"""
status = str(status)
for pattern in CFG_WEBSTYLE_HTTP_STATUS_ALERT_LIST:
pattern = pattern.lower()
must_have_referer = False
if pattern.endswith('r'):
## e.g. "404 r"
must_have_referer = True
pattern = pattern[:-1].strip() ## -> "404"
if fnmatch(status, pattern) and (not must_have_referer or referer):
return True
return False
def application(environ, start_response):
"""
Entry point for wsgi.
"""
## Needed for mod_wsgi, see: <http://code.google.com/p/modwsgi/wiki/ApplicationIssues>
req = SimulatedModPythonRequest(environ, start_response)
#print 'Starting mod_python simulation'
try:
try:
possible_module, possible_handler = is_mp_legacy_publisher_path(environ['PATH_INFO'])
if possible_module is not None:
mp_legacy_publisher(req, possible_module, possible_handler)
elif CFG_WSGI_SERVE_STATIC_FILES:
possible_static_path = is_static_path(environ['PATH_INFO'])
if possible_static_path is not None:
from invenio.bibdocfile import stream_file
stream_file(req, possible_static_path)
else:
ret = invenio_handler(req)
else:
ret = invenio_handler(req)
req.flush()
except SERVER_RETURN, status:
status = int(str(status))
if status not in (OK, DONE):
req.status = status
req.headers_out['content-type'] = 'text/html'
admin_to_be_alerted = alert_admin_for_server_status_p(status,
req.headers_in.get('referer'))
if admin_to_be_alerted:
register_exception(req=req, alert_admin=True)
if not req.response_sent_p:
start_response(req.get_wsgi_status(), req.get_low_level_headers(), sys.exc_info())
return generate_error_page(req, admin_to_be_alerted)
else:
req.flush()
except:
register_exception(req=req, alert_admin=True)
if not req.response_sent_p:
req.status = HTTP_INTERNAL_SERVER_ERROR
req.headers_out['content-type'] = 'text/html'
start_response(req.get_wsgi_status(), req.get_low_level_headers(), sys.exc_info())
if CFG_DEVEL_SITE:
return ["<pre>%s</pre>" % cgi.escape(get_pretty_traceback(req=req, exc_info=sys.exc_info()))]
from cgitb import html
return [html(sys.exc_info())]
return generate_error_page(req)
else:
return generate_error_page(req, page_already_started=True)
finally:
for (callback, data) in req.get_cleanups():
callback(data)
return []
def generate_error_page(req, admin_was_alerted=True, page_already_started=False):
"""
Returns an iterable with the error page to be sent to the user browser.
"""
from invenio.webpage import page
from invenio import template
webstyle_templates = template.load('webstyle')
ln = req.form.get('ln', CFG_SITE_LANG)
if page_already_started:
return [webstyle_templates.tmpl_error_page(status=req.get_wsgi_status(), ln=ln, admin_was_alerted=admin_was_alerted)]
else:
return [page(title=req.get_wsgi_status(), body=webstyle_templates.tmpl_error_page(status=req.get_wsgi_status(), ln=ln, admin_was_alerted=admin_was_alerted), language=ln, req=req)]
def is_static_path(path):
"""
Returns True if path corresponds to an exsting file under CFG_WEBDIR.
@param path: the path.
@type path: string
@return: True if path corresponds to an exsting file under CFG_WEBDIR.
@rtype: bool
"""
path = os.path.abspath(CFG_WEBDIR + path)
if path.startswith(CFG_WEBDIR) and os.path.isfile(path):
return path
return None
def is_mp_legacy_publisher_path(path):
"""
Checks path corresponds to an exsting Python file under CFG_WEBDIR.
@param path: the path.
@type path: string
@return: the path of the module to load and the function to call there.
@rtype: tuple
"""
path = path.split('/')
for index, component in enumerate(path):
if component.endswith('.py'):
possible_module = os.path.abspath(CFG_WEBDIR + os.path.sep + os.path.sep.join(path[:index + 1]))
possible_handler = '/'.join(path[index + 1:]).strip()
if possible_handler.startswith('_'):
return None, None
if not possible_handler:
possible_handler = 'index'
if os.path.exists(possible_module) and possible_module.startswith(CFG_WEBDIR):
return (possible_module, possible_handler)
else:
return None, None
def mp_legacy_publisher(req, possible_module, possible_handler):
"""
mod_python legacy publisher minimum implementation.
"""
the_module = open(possible_module).read()
module_globals = {}
exec(the_module, module_globals)
if possible_handler in module_globals and callable(module_globals[possible_handler]):
from invenio.webinterface_handler import _check_result
## req is the required first parameter of any handler
expected_args = list(inspect.getargspec(module_globals[possible_handler])[0])
if not expected_args or 'req' != expected_args[0]:
## req was not the first argument. Too bad!
raise SERVER_RETURN, HTTP_NOT_FOUND
## the req.form must be casted to dict because of Python 2.4 and earlier
## otherwise any object exposing the mapping interface can be
## used with the magic **
form = dict(req.form)
for key, value in form.items():
## FIXME: this is a backward compatibility workaround
## because most of the old administration web handler
## expect parameters to be of type str.
## When legacy publisher will be removed all this
## pain will go away anyway :-)
if isinstance(value, str):
form[key] = str(value)
else:
## NOTE: this is a workaround for e.g. legacy webupload
## that is still using legacy publisher and expect to
## have a file (Field) instance instead of a string.
form[key] = value
if (CFG_FULL_HTTPS or CFG_HAS_HTTPS_SUPPORT and get_session(req).need_https) and not req.is_https():
from invenio.urlutils import redirect_to_url
# We need to isolate the part of the URI that is after
# CFG_SITE_URL, and append that to our CFG_SITE_SECURE_URL.
original_parts = urlparse(req.unparsed_uri)
plain_prefix_parts = urlparse(CFG_SITE_URL)
secure_prefix_parts = urlparse(CFG_SITE_SECURE_URL)
# Compute the new path
plain_path = original_parts[2]
plain_path = secure_prefix_parts[2] + \
plain_path[len(plain_prefix_parts[2]):]
# ...and recompose the complete URL
final_parts = list(secure_prefix_parts)
final_parts[2] = plain_path
final_parts[-3:] = original_parts[-3:]
target = urlunparse(final_parts)
redirect_to_url(req, target)
try:
return _check_result(req, module_globals[possible_handler](req, **form))
except TypeError, err:
if ("%s() got an unexpected keyword argument" % possible_handler) in str(err) or ('%s() takes at least' % possible_handler) in str(err):
inspected_args = inspect.getargspec(module_globals[possible_handler])
expected_args = list(inspected_args[0])
expected_defaults = list(inspected_args[3])
expected_args.reverse()
expected_defaults.reverse()
register_exception(req=req, prefix="Wrong GET parameter set in calling a legacy publisher handler for %s: expected_args=%s, found_args=%s" % (possible_handler, repr(expected_args), repr(req.form.keys())), alert_admin=CFG_DEVEL_SITE)
cleaned_form = {}
for index, arg in enumerate(expected_args):
if arg == 'req':
continue
if index < len(expected_defaults):
cleaned_form[arg] = form.get(arg, expected_defaults[index])
else:
cleaned_form[arg] = form.get(arg, None)
return _check_result(req, module_globals[possible_handler](req, **cleaned_form))
else:
raise
else:
raise SERVER_RETURN, HTTP_NOT_FOUND
def check_wsgiref_testing_feasability():
"""
In order to use wsgiref for running Invenio, CFG_SITE_URL and
CFG_SITE_SECURE_URL must not use HTTPS because SSL is not supported.
"""
if CFG_SITE_URL.lower().startswith('https'):
print >> sys.stderr, """
ERROR: SSL is not supported by the wsgiref simple server implementation.
Please set CFG_SITE_URL not to start with "https".
Currently CFG_SITE_URL is set to: "%s".""" % CFG_SITE_URL
sys.exit(1)
if CFG_SITE_SECURE_URL.lower().startswith('https'):
print >> sys.stderr, """
ERROR: SSL is not supported by the wsgiref simple server implementation.
Please set CFG_SITE_SECURE_URL not to start with "https".
Currently CFG_SITE_SECURE_URL is set to: "%s".""" % CFG_SITE_SECURE_URL
sys.exit(1)
def wsgi_handler_test(port=80):
"""
Simple WSGI testing environment based on wsgiref.
"""
from wsgiref.simple_server import make_server
global CFG_WSGI_SERVE_STATIC_FILES
CFG_WSGI_SERVE_STATIC_FILES = True
check_wsgiref_testing_feasability()
validator_app = validator(application)
httpd = make_server('', port, validator_app)
print "Serving on port %s..." % port
httpd.serve_forever()
def main():
from optparse import OptionParser
parser = OptionParser()
parser.add_option('-t', '--test', action='store_true',
dest='test', default=False,
help="Run a WSGI test server via wsgiref (not using Apache).")
parser.add_option('-p', '--port', type='int', dest='port', default='80',
help="The port where the WSGI test server will listen. [80]")
(options, args) = parser.parse_args()
if options.test:
wsgi_handler_test(options.port)
else:
parser.print_help()
if __name__ == "__main__":
main()
| cul-it/Invenio | modules/webstyle/lib/webinterface_handler_wsgi.py | Python | gpl-2.0 | 27,426 |
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: google/protobuf/duration.proto
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
from google.protobuf import descriptor_pb2
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
DESCRIPTOR = _descriptor.FileDescriptor(
name='google/protobuf/duration.proto',
package='google.protobuf',
syntax='proto3',
serialized_pb=_b('\n\x1egoogle/protobuf/duration.proto\x12\x0fgoogle.protobuf\"*\n\x08\x44uration\x12\x0f\n\x07seconds\x18\x01 \x01(\x03\x12\r\n\x05nanos\x18\x02 \x01(\x05\x42|\n\x13\x63om.google.protobufB\rDurationProtoP\x01Z*github.com/golang/protobuf/ptypes/duration\xf8\x01\x01\xa2\x02\x03GPB\xaa\x02\x1eGoogle.Protobuf.WellKnownTypesb\x06proto3')
)
_DURATION = _descriptor.Descriptor(
name='Duration',
full_name='google.protobuf.Duration',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='seconds', full_name='google.protobuf.Duration.seconds', index=0,
number=1, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='nanos', full_name='google.protobuf.Duration.nanos', index=1,
number=2, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=51,
serialized_end=93,
)
DESCRIPTOR.message_types_by_name['Duration'] = _DURATION
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
Duration = _reflection.GeneratedProtocolMessageType('Duration', (_message.Message,), dict(
DESCRIPTOR = _DURATION,
__module__ = 'google.protobuf.duration_pb2'
# @@protoc_insertion_point(class_scope:google.protobuf.Duration)
))
_sym_db.RegisterMessage(Duration)
DESCRIPTOR.has_options = True
DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\n\023com.google.protobufB\rDurationProtoP\001Z*github.com/golang/protobuf/ptypes/duration\370\001\001\242\002\003GPB\252\002\036Google.Protobuf.WellKnownTypes'))
# @@protoc_insertion_point(module_scope)
| Haynie-Research-and-Development/jarvis | deps/lib/python3.4/site-packages/google/protobuf/duration_pb2.py | Python | gpl-2.0 | 2,780 |
# -*- coding: utf-8 -*-
# Generated by Django 1.9.5 on 2017-06-16 19:25
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('silo', '0013_deletedsilos'),
]
operations = [
migrations.CreateModel(
name='FormulaColumnMapping',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('mapping', models.TextField()),
('operation', models.TextField()),
('column_name', models.TextField()),
('silo', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='silo.Silo')),
],
),
]
| mercycorps/TolaTables | silo/migrations/0014_formulacolumnmapping.py | Python | gpl-2.0 | 815 |
# -*- Mode: Python -*-
# vi:si:et:sw=4:sts=4:ts=4
#
# Flumotion - a streaming media server
# Copyright (C) 2008 Fluendo, S.L. (www.fluendo.com).
# All rights reserved.
# This file may be distributed and/or modified under the terms of
# the GNU General Public License version 2 as published by
# the Free Software Foundation.
# This file is distributed without any warranty; without even the implied
# warranty of merchantability or fitness for a particular purpose.
# See "LICENSE.GPL" in the source distribution for more information.
# Licensees having purchased or holding a valid Flumotion Advanced
# Streaming Server license may use this file in accordance with the
# Flumotion Advanced Streaming Server Commercial License Agreement.
# See "LICENSE.Flumotion" in the source distribution for more information.
# Headers in this file shall remain intact.
"""GnomeVFS backend for Virtual File System.
"""
import os
from twisted.internet.defer import succeed
from twisted.spread.flavors import Copyable, RemoteCopy
from twisted.spread.jelly import setUnjellyableForClass
from zope.interface import implements
from flumotion.common import log
from flumotion.common.errors import AccessDeniedError
from flumotion.common.interfaces import IDirectory, IFile
# gnomevfs is only imported inside nested scopes so that
# pychecker can ignore them, If pychecker ever gets fixed,
# move it back where it belongs
__pychecker__ = 'keepgoing'
class GnomeVFSFile(Copyable, RemoteCopy):
"""I am object implementing L{IFile} on top of GnomeVFS,
see L{IFile} for more information.
"""
implements(IFile)
def __init__(self, parent, fileInfo):
self.parent = parent
self.filename = fileInfo.name
self.iconNames = ['gnome-fs-regular']
# IFile
def getPath(self):
return os.path.join(self.parent, self.filename)
class GnomeVFSDirectory(Copyable, RemoteCopy):
"""I am object implementing L{IDirectory} on top of GnomeVFS,
see L{IDirectory} for more information.
"""
implements(IDirectory)
def __init__(self, path, name=None):
import gnomevfs
if not os.path.exists(path):
self.path = '/'
else:
self.path = os.path.abspath(path)
if name is None:
fileInfo = gnomevfs.get_file_info(self.path)
name = fileInfo.name
self.filename = name
self.iconNames = ['gnome-fs-directory']
self._cachedFiles = None
# IFile
def getPath(self):
return self.path
# IDirectory
def getFiles(self):
return succeed(self._cachedFiles)
def cacheFiles(self):
"""
Fetches the files contained on the directory for posterior usage of
them. This should be called on the worker side to work or the files
wouldn't be the expected ones.
"""
import gnomevfs
log.debug('vfsgnome', 'getting files for %s' % (self.path, ))
retval = []
try:
fileInfos = gnomevfs.open_directory(self.path)
except gnomevfs.AccessDeniedError:
raise AccessDeniedError
if self.path != '/':
retval.append(GnomeVFSDirectory(os.path.dirname(self.path),
name='..'))
for fileInfo in fileInfos:
filename = fileInfo.name
if filename.startswith('.'):
continue
if fileInfo.type == gnomevfs.FILE_TYPE_DIRECTORY:
obj = GnomeVFSDirectory(os.path.join(self.path,
fileInfo.name))
else:
obj = GnomeVFSFile(self.path, fileInfo)
retval.append(obj)
log.log('vfsgnome', 'returning %r' % (retval, ))
self._cachedFiles = retval
def registerGnomeVFSJelly():
"""Register the jelly used by the GnomeVFS VFS backend.
"""
setUnjellyableForClass(GnomeVFSFile, GnomeVFSFile)
setUnjellyableForClass(GnomeVFSDirectory, GnomeVFSDirectory)
log.info('jelly', 'GnomeVFS registered')
| flyapen/UgFlu | flumotion/common/vfsgnome.py | Python | gpl-2.0 | 4,077 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Integration test which checks the various parameters for makemovie.py
Copyright 2010-2013 Glencoe Software, Inc. All rights reserved.
Use is subject to license terms supplied in LICENSE.txt
"""
import test.integration.library as lib
import os, sys
import omero.processor
class TestMakeMovie(lib.ITest):
"""
Requires Pillow being installed
"""
def setup_method(self, method):
lib.ITest.setup_method(self, method)
self.svc = self.client.sf.getScriptService()
def testNoParams(self):
makeMovieID = self.svc.getScriptID("/omero/export_scripts/Make_Movie.py")
imported_pix = ",".join(self.import_image())
imported_img = self.query.findByQuery("select i from Image i join fetch i.pixels pixels where pixels.id in (%s)" % imported_pix, None)
inputs = {"IDs": omero.rtypes.rlist([imported_img.id])}
impl = omero.processor.usermode_processor(self.root)
try:
process = self.svc.runScript(makeMovieID, inputs, None)
finally:
impl.cleanup()
| jballanc/openmicroscopy | components/tools/OmeroPy/test/integration/scriptstest/test_make_movie.py | Python | gpl-2.0 | 1,111 |
import sphivedbcli
import time
import sys
def printResultSet( rs ):
print "row.count %d" % ( rs.getRowCount() )
columnCount = rs.getColumnCount()
hdrs = ""
for i in range( columnCount ):
hdrs = hdrs + ( "\t%s(%s)" % ( rs.getName( i ), rs.getType( i ) ) )
print hdrs
for i in range( rs.getRowCount() ):
rs.moveTo( i )
row = ""
for j in range( columnCount ):
row = row + ( "\t[%s]" % ( rs.getString( j ) ) )
print row
if __name__ == "__main__":
if len( sys.argv ) != 2:
print "Usage: %s <config file>" % ( sys.argv[0] )
print "\tpython %s ../../sphivedb/sphivedbcli.ini" % ( sys.argv[0] )
sys.exit( -1 )
configFile = sys.argv[1]
cli = sphivedbcli.SPHiveDBClient()
cli.init( configFile )
try:
resp = cli.execute( 0, "foobar", "addrbook", \
[ "insert into addrbook ( addr ) values ( \"%d\" )" % ( time.time() ), \
"select * from addrbook" ] )
if 0 == resp.getErrorCode():
rsCount = resp.getResultCount()
for i in range( rsCount ):
rs = resp.getResultSet( i )
printResultSet( rs )
else:
print "%d: %s" % ( resp.getErrdataCode(), resp.getErrdataMsg() )
except Exception, e:
print e
| codegooglecom/sphivedb | client/python/testcli.py | Python | gpl-2.0 | 1,157 |
# -*- coding: utf-8 -*-
import json
def json_pre_process_hook(action, request, *args, **kwargs):
json_data = request.body
if not json_data:
action.ret('002').msg('json_params_required')
return False
try:
param_dict = json.loads(json_data)
except ValueError:
action.ret('003').msg('json_params_invalid')
return False
for key, value in param_dict.items():
setattr(action, key, value)
return True
def query_pre_process_hook(action, request, *args, **kwargs):
params_dict = request.GET
if not params_dict:
return True
for key, value in params_dict.items():
setattr(action, key, value)
return True
def form_pre_process_hook(action, request, *args, **kwargs):
param_dict = request.POST
if not param_dict:
action.ret('004').msg('form_params_required')
return False
for key, value in param_dict.items():
setattr(action, key, value)
return True
def jsonp_post_render_hook(action):
if action.jsonp_callback:
action.resp_data_json(
action.jsonp_callback + '('
+ action.resp_data_json + ')',
)
else:
action.ret('005').msg('jsonp_callback_required')
if action._data:
del action._data
action.render()
return False
return True
| Alexoner/health-care-demo | careHealth/earth/action/hooks.py | Python | gpl-2.0 | 1,374 |
##### CONTROL AND NAVIGATION ###################################################
# update the applications' title bar
def UpdateCaption(page=0, force=False):
global CurrentCaption, CurrentOSDCaption, CurrentOSDPage, CurrentOSDStatus
global CurrentOSDComment
if (page == CurrentCaption) and not(force):
return
CurrentCaption = page
caption = __title__
if DocumentTitle:
caption += " - " + DocumentTitle
if page < 1:
CurrentOSDCaption = ""
CurrentOSDPage = ""
CurrentOSDStatus = ""
CurrentOSDComment = ""
Platform.SetWindowTitle(caption)
return
CurrentOSDPage = "%d/%d" % (page, PageCount)
caption = "%s (%s)" % (caption, CurrentOSDPage)
title = GetPageProp(page, 'title') or GetPageProp(page, '_title')
if title:
caption += ": %s" % title
CurrentOSDCaption = title
else:
CurrentOSDCaption = ""
status = []
if GetPageProp(page, 'skip', False):
status.append("skipped: yes")
if not GetPageProp(page, ('overview', '_overview'), True):
status.append("on overview page: no")
CurrentOSDStatus = ", ".join(status)
CurrentOSDComment = GetPageProp(page, 'comment')
Platform.SetWindowTitle(caption)
# get next/previous page
def GetNextPage(page, direction):
checked_pages = set()
while True:
checked_pages.add(page)
page = GetPageProp(page,
('prev' if (direction < 0) else 'next'),
page + direction)
if page in checked_pages:
return 0 # we looped around completely and found nothing
if Wrap:
if page < 1: page = PageCount
if page > PageCount: page = 1
else:
if page < 1 or page > PageCount:
return 0 # start or end of presentation
if not GetPageProp(page, 'skip', False):
return page
# pre-load the following page into Pnext/Tnext
def PreloadNextPage(page):
global Pnext, Tnext
if (page < 1) or (page > PageCount):
Pnext = 0
return 0
if page == Pnext:
return 1
RenderPage(page, Tnext)
Pnext = page
return 1
# perform box fading; the fade animation time is mapped through func()
def BoxFade(func):
t0 = Platform.GetTicks()
while BoxFadeDuration > 0:
if Platform.CheckAnimationCancelEvent(): break
t = (Platform.GetTicks() - t0) * 1.0 / BoxFadeDuration
if t >= 1.0: break
DrawCurrentPage(func(t))
DrawCurrentPage(func(1.0))
return 0
# reset the timer
def ResetTimer():
global StartTime, PageEnterTime
if TimeTracking and not(FirstPage):
print("--- timer was reset here ---")
StartTime = Platform.GetTicks()
PageEnterTime = 0
# start video playback
def PlayVideo(video):
global MPlayerProcess, VideoPlaying, NextPageAfterVideo
if not video: return
StopMPlayer()
if Platform.use_omxplayer:
opts = ["omxplayer"]
else:
opts = [MPlayerPath, "-quiet", "-slave", \
"-monitorpixelaspect", "1:1", \
"-vo", "gl", \
"-autosync", "100"]
try:
opts += ["-wid", str(Platform.GetWindowID())]
except KeyError:
if Fullscreen:
opts.append("-fs")
else:
print("Sorry, but Impressive only supports video on your operating system if fullscreen", file=sys.stderr)
print("mode is used.", file=sys.stderr)
VideoPlaying = False
MPlayerProcess = None
return
if not isinstance(video, list):
video = [video]
NextPageAfterVideo = False
try:
MPlayerProcess = Popen(opts + video, stdin=subprocess.PIPE)
if Platform.use_omxplayer:
gl.Clear(gl.COLOR_BUFFER_BIT)
Platform.SwapBuffers()
if Fullscreen and (os.name == 'nt'):
# very ugly Win32-specific hack: in -wid embedding mode,
# video display only works if we briefly minimize and restore
# the window ... and that's the good case: in -fs, keyboard
# focus is messed up and we don't get any input!
if Win32FullscreenVideoHackTiming[0] > 0:
time.sleep(Win32FullscreenVideoHackTiming[0])
win32gui.ShowWindow(Platform.GetWindowID(), 6) # SW_MINIMIZE
if Win32FullscreenVideoHackTiming[1] > 0:
time.sleep(Win32FullscreenVideoHackTiming[1])
win32gui.ShowWindow(Platform.GetWindowID(), 9) # SW_RESTORE
VideoPlaying = True
except OSError:
MPlayerProcess = None
# called each time a page is entered, AFTER the transition, BEFORE entering box-fade mode
def PreparePage():
global SpotRadius, SpotRadiusBase
global BoxFadeDarkness, BoxFadeDarknessBase
global BoxZoomDarkness, BoxZoomDarknessBase
override = GetPageProp(Pcurrent, 'radius')
if override:
SpotRadius = override
SpotRadiusBase = override
GenerateSpotMesh()
override = GetPageProp(Pcurrent, 'darkness')
if override is not None:
BoxFadeDarkness = override * 0.01
BoxFadeDarknessBase = override * 0.01
override = GetPageProp(Pcurrent, 'zoomdarkness')
if override is not None:
BoxZoomDarkness = override * 0.01
BoxZoomDarknessBase = override * 0.01
# called each time a page is entered, AFTER the transition, AFTER entering box-fade mode
def PageEntered(update_time=True):
global PageEnterTime, PageTimeout, MPlayerProcess, IsZoomed, WantStatus
if update_time:
PageEnterTime = Platform.GetTicks() - StartTime
IsZoomed = 0 # no, we don't have a pre-zoomed image right now
WantStatus = False # don't show status unless it's changed interactively
PageTimeout = AutoAdvanceTime if AutoAdvanceEnabled else 0
shown = GetPageProp(Pcurrent, '_shown', 0)
try:
os.chdir(os.path.dirname(GetPageProp(Pcurrent, '_file')))
except OSError:
pass
if not(shown) or Wrap:
PageTimeout = GetPageProp(Pcurrent, 'timeout', PageTimeout)
if GetPageProp(Pcurrent, '_video'):
PlayVideo(GetPageProp(Pcurrent, '_file'))
if not(shown) or GetPageProp(Pcurrent, 'always', False):
if not GetPageProp(Pcurrent, '_video'):
video = GetPageProp(Pcurrent, 'video')
sound = GetPageProp(Pcurrent, 'sound')
PlayVideo(video)
if sound and not(video):
StopMPlayer()
try:
MPlayerProcess = Popen(
[MPlayerPath, "-quiet", "-really-quiet", "-novideo", sound],
stdin=subprocess.PIPE)
except OSError:
MPlayerProcess = None
SafeCall(GetPageProp(Pcurrent, 'OnEnterOnce'))
SafeCall(GetPageProp(Pcurrent, 'OnEnter'))
if PageTimeout:
Platform.ScheduleEvent("$page-timeout", PageTimeout)
SetPageProp(Pcurrent, '_shown', shown + 1)
# called each time a page is left
def PageLeft(overview=False):
global FirstPage, LastPage, WantStatus, PageLeaveTime
PageLeaveTime = Platform.GetTicks() - StartTime
WantStatus = False
if not overview:
if GetTristatePageProp(Pcurrent, 'reset'):
ResetTimer()
FirstPage = False
LastPage = Pcurrent
if GetPageProp(Pcurrent, '_shown', 0) == 1:
SafeCall(GetPageProp(Pcurrent, 'OnLeaveOnce'))
SafeCall(GetPageProp(Pcurrent, 'OnLeave'))
if TimeTracking:
t1 = Platform.GetTicks() - StartTime
dt = (t1 - PageEnterTime + 500) // 1000
if overview:
p = "over"
else:
p = "%4d" % Pcurrent
print("%s%9s%9s%9s" % (p, FormatTime(dt),
FormatTime(PageEnterTime // 1000),
FormatTime(t1 // 1000)))
# create an instance of a transition class
def InstantiateTransition(trans_class):
if isinstance(trans_class, basestring):
index = dict((c.__name__.lower(), c) for c in AllTransitions)
try:
trans_class = index[trans_class.lower()]
except KeyError:
print("Error: invalid transition '{}', ignoring".format(trans_class), file=sys.stderr)
return None
elif not(isinstance(trans_class, type) and issubclass(trans_class, Transition)):
print("Error: invalid transition '{!r}', ignoring".format(trans_class), file=sys.stderr)
return None
try:
return trans_class()
except GLInvalidShaderError:
return None
except GLShaderCompileError:
print("Note: all %s transitions will be disabled" % trans_class.__name__, file=sys.stderr)
return None
# perform a transition to a specified page
def TransitionTo(page, allow_transition=True, notify_page_left=True):
global Pcurrent, Pnext, Tcurrent, Tnext
global PageCount, Marking, Tracing, Panning
global TransitionRunning, TransitionPhase
global TransitionDone
TransitionDone = False
# first, stop video and kill the auto-timer
if VideoPlaying:
StopMPlayer()
Platform.ScheduleEvent("$page-timeout", 0)
# invalid page? go away
if not PreloadNextPage(page):
if QuitAtEnd:
LeaveZoomMode(allow_transition)
if FadeInOut:
EnterFadeMode()
PageLeft()
Quit()
return 0
# leave zoom mode now, if enabled
LeaveZoomMode(allow_transition)
# notify that the page has been left
if notify_page_left:
PageLeft()
if TransitionDone:
return 1 # nested call to TransitionTo() detected -> abort here
# box fade-out
if GetPageProp(Pcurrent, 'boxes') or Tracing:
skip = BoxFade(lambda t: 1.0 - t)
else:
skip = 0
# some housekeeping
Marking = False
Tracing = False
UpdateCaption(page)
# check if the transition is valid
tpage = max(Pcurrent, Pnext)
trans = None
if allow_transition:
trans = GetPageProp(tpage, 'transition', GetPageProp(tpage, '_transition'))
else:
trans = None
if trans is not None:
transtime = GetPageProp(tpage, 'transtime', TransitionDuration)
try:
dummy = trans.__class__
except AttributeError:
# ah, gotcha! the transition is not yet instantiated!
trans = InstantiateTransition(trans)
PageProps[tpage][tkey] = trans
if trans is None:
transtime = 0
# backward motion? then swap page buffers now
backward = (Pnext < Pcurrent)
if Wrap and (min(Pcurrent, Pnext) == 1) and (max(Pcurrent, Pnext) == PageCount):
backward = not(backward) # special case: last<->first in wrap mode
if backward:
Pcurrent, Pnext = (Pnext, Pcurrent)
Tcurrent, Tnext = (Tnext, Tcurrent)
# transition animation
if not(skip) and transtime:
transtime = 1.0 / transtime
TransitionRunning = True
trans.start()
t0 = Platform.GetTicks()
while not(VideoPlaying):
if Platform.CheckAnimationCancelEvent():
skip = 1
break
t = (Platform.GetTicks() - t0) * transtime
if t >= 1.0: break
TransitionPhase = t
if backward: t = 1.0 - t
gl.Clear(gl.COLOR_BUFFER_BIT)
trans.render(t)
DrawOverlays(t)
Platform.SwapBuffers()
TransitionRunning = False
# forward motion => swap page buffers now
if not backward:
Pcurrent, Pnext = (Pnext, Pcurrent)
Tcurrent, Tnext = (Tnext, Tcurrent)
# prepare the page's changeable metadata
PreparePage()
# box fade-in
if not(skip) and GetPageProp(Pcurrent, 'boxes'): BoxFade(lambda t: t)
# finally update the screen and preload the next page
DrawCurrentPage()
PageEntered()
if TransitionDone:
return 1
if not PreloadNextPage(GetNextPage(Pcurrent, 1)):
PreloadNextPage(GetNextPage(Pcurrent, -1))
TransitionDone = True
return 1
# zoom mode animation
def ZoomAnimation(targetx, targety, func, duration_override=None):
global ZoomX0, ZoomY0, ZoomArea
t0 = Platform.GetTicks()
if duration_override is None:
duration = ZoomDuration
else:
duration = duration_override
while duration > 0:
if Platform.CheckAnimationCancelEvent(): break
t = (Platform.GetTicks() - t0) * 1.0 / duration
if t >= 1.0: break
t = func(t)
dark = (t if BoxZoom else 1.0)
t = (2.0 - t) * t
ZoomX0 = targetx * t
ZoomY0 = targety * t
ZoomArea = 1.0 - (1.0 - 1.0 / ViewZoomFactor) * t
DrawCurrentPage(dark=dark)
t = func(1.0)
ZoomX0 = targetx * t
ZoomY0 = targety * t
ZoomArea = 1.0 - (1.0 - 1.0 / ViewZoomFactor) * t
GenerateSpotMesh()
DrawCurrentPage(dark=(t if BoxZoom else 1.0))
# re-render zoomed page image
def ReRenderZoom(factor):
global ResZoomFactor, IsZoomed, HighResZoomFailed
ResZoomFactor = min(factor, MaxZoomFactor)
if (IsZoomed >= ResZoomFactor) or (ResZoomFactor < 1.1) or HighResZoomFailed:
return
gl.BindTexture(gl.TEXTURE_2D, Tcurrent)
while gl.GetError():
pass # clear all OpenGL errors
gl.TexImage2D(gl.TEXTURE_2D, 0, gl.RGB, int(ResZoomFactor * TexWidth), int(ResZoomFactor * TexHeight), 0, gl.RGB, gl.UNSIGNED_BYTE, PageImage(Pcurrent, True))
if gl.GetError():
print("I'm sorry, but your graphics card is not capable of rendering presentations", file=sys.stderr)
print("in this resolution. Either the texture memory is exhausted, or there is no", file=sys.stderr)
print("support for large textures (%dx%d). Please try to run Impressive in a" % (TexWidth, TexHeight), file=sys.stderr)
print("smaller resolution using the -g command-line option.", file=sys.stderr)
HighResZoomFailed = True
return
DrawCurrentPage()
IsZoomed = ResZoomFactor
# enter zoom mode
def EnterZoomMode(factor, targetx, targety):
global ZoomMode, ViewZoomFactor
ViewZoomFactor = factor
ZoomAnimation(targetx, targety, lambda t: t)
ZoomMode = True
ReRenderZoom(factor)
# leave zoom mode (if enabled)
def LeaveZoomMode(allow_transition=True):
global ZoomMode, BoxZoom, Panning, ViewZoomFactor, ResZoomFactor
global ZoomArea, ZoomX0, ZoomY0
if not ZoomMode: return
ZoomAnimation(ZoomX0, ZoomY0, lambda t: 1.0 - t, (None if allow_transition else 0))
ZoomMode = False
BoxZoom = False
Panning = False
ViewZoomFactor = 1
ResZoomFactor = 1
ZoomArea = 1.0
ZoomX0 = 0.0
ZoomY0 = 0.0
# change zoom factor in zoom mode
def ChangeZoom(target_factor, mousepos):
global ZoomMode, ViewZoomFactor, ZoomArea, ZoomX0, ZoomY0
px, py = MouseToScreen(mousepos)
log_zf = log(ViewZoomFactor)
dlog = log(target_factor) - log_zf
t0 = Platform.GetTicks()
dt = -1
while dt < WheelZoomDuration:
dt = Platform.GetTicks() - t0
rel = min(1.0, float(dt) / WheelZoomDuration) if WheelZoomDuration else 1.0
factor = exp(log_zf + rel * dlog)
if factor < 1.001: factor = 1.0
ZoomArea = 1.0 / factor
ZoomX0 = max(0.0, min(1.0 - ZoomArea, px - mousepos[0] * ZoomArea / ScreenWidth))
ZoomY0 = max(0.0, min(1.0 - ZoomArea, py - mousepos[1] * ZoomArea / ScreenHeight))
DrawCurrentPage()
ViewZoomFactor = factor
ZoomMode = (factor > 1.0)
# check whether a box mark is too small
def BoxTooSmall():
return ((abs(MarkUL[0] - MarkLR[0]) * ScreenWidth) < MinBoxSize) \
or ((abs(MarkUL[1] - MarkLR[1]) * ScreenHeight) < MinBoxSize)
# increment/decrement spot radius
def IncrementSpotSize(delta):
global SpotRadius
if not Tracing:
return
SpotRadius = max(SpotRadius + delta, 8)
GenerateSpotMesh()
DrawCurrentPage()
# post-initialize the page transitions
def PrepareTransitions():
Unspecified = 0xAFFED00F
# STEP 1: randomly assign transitions where the user didn't specify them
cnt = sum([1 for page in range(1, PageCount + 1) \
if GetPageProp(page, 'transition', Unspecified) == Unspecified])
newtrans = ((cnt // len(AvailableTransitions) + 1) * AvailableTransitions)[:cnt]
random.shuffle(newtrans)
for page in range(1, PageCount + 1):
if GetPageProp(page, 'transition', Unspecified) == Unspecified:
SetPageProp(page, '_transition', newtrans.pop())
# STEP 2: instantiate transitions
for page in PageProps:
for key in ('transition', '_transition'):
if not key in PageProps[page]:
continue
trans = PageProps[page][key]
if trans is not None:
PageProps[page][key] = InstantiateTransition(trans)
# update timer values and screen timer
def TimerTick():
global CurrentTime, ProgressBarPos
redraw = False
newtime = (Platform.GetTicks() - StartTime) * 0.001
if EstimatedDuration:
newpos = int(ScreenWidth * newtime / EstimatedDuration)
if newpos != ProgressBarPos:
redraw = True
ProgressBarPos = newpos
newtime = int(newtime)
if TimeDisplay and (CurrentTime != newtime):
redraw = True
if PageTimeout and AutoAdvanceProgress:
redraw = True
CurrentTime = newtime
return redraw
# enables time tracking mode (if not already done so)
def EnableTimeTracking(force=False):
global TimeTracking
if force or (TimeDisplay and not(TimeTracking) and not(ShowClock) and FirstPage):
print("Time tracking mode enabled.", file=sys.stderr)
TimeTracking = True
print("page duration enter leave")
print("---- -------- -------- --------")
# set cursor visibility
def SetCursor(visible):
global CursorVisible
CursorVisible = visible
if not(CursorImage) and (MouseHideDelay != 1):
Platform.SetMouseVisible(visible)
# handle a shortcut key event: store it (if shifted) or return the
# page number to navigate to (if not)
def HandleShortcutKey(key, current=0):
if not(key) or (key[0] != '*'):
return None
shift = key.startswith('*shift+')
if shift:
key = key[7:]
else:
key = key[1:]
if (len(key) == 1) or ((key >= "f1") and (key <= "f9")):
# Note: F10..F12 are implicitly included due to lexicographic sorting
page = None
for check_page, props in PageProps.items():
if props.get('shortcut') == key:
page = check_page
break
if shift:
if page:
DelPageProp(page, 'shortcut')
SetPageProp(current, 'shortcut', key)
elif page and (page != current):
return page
return None
| mjg/Impressive-svn | src/control.py | Python | gpl-2.0 | 19,439 |
#
# @BEGIN LICENSE
#
# Psi4: an open-source quantum chemistry software package
#
# Copyright (c) 2007-2017 The Psi4 Developers.
#
# The copyrights for code used from other parties are included in
# the corresponding files.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# @END LICENSE
#
"""Elemental masses (most common isotope), symbols, and atomic numbers from psi4.
"""
_temp_element = ["GHOST", "HYDROGEN", "HELIUM", "LITHIUM", "BERYLLIUM",
"BORON", "CARBON", "NITROGEN", "OXYGEN", "FLUORINE",
"NEON", "SODIUM", "MAGNESIUM", "ALUMINUM", "SILICON",
"PHOSPHORUS", "SULFUR", "CHLORINE", "ARGON", "POTASSIUM",
"CALCIUM", "SCANDIUM", "TITANIUM", "VANADIUM", "CHROMIUM",
"MANGANESE", "IRON", "COBALT", "NICKEL", "COPPER",
"ZINC", "GALLIUM", "GERMANIUM", "ARSENIC", "SELENIUM",
"BROMINE", "KRYPTON", "RUBIDIUM", "STRONTIUM", "YTTRIUM",
"ZIRCONIUM", "NIOBIUM", "MOLYBDENUM", "TECHNETIUM", "RUTHENIUM",
"RHODIUM", "PALLADIUM", "SILVER", "CADMIUM", "INDIUM",
"TIN", "ANTIMONY", "TELLURIUM", "IODINE", "XENON",
"CESIUM", "BARIUM", "LANTHANUM", "CERIUM", "PRASEODYMIUM",
"NEODYMIUM", "PROMETHIUM", "SAMARIUM", "EUROPIUM", "GADOLINIUM",
"TERBIUM", "DYSPROSIUM", "HOLMIUM", "ERBIUM", "THULIUM",
"YTTERBIUM", "LUTETIUM", "HAFNIUM", "TANTALUM", "TUNGSTEN",
"RHENIUM", "OSMIUM", "IRIDIUM", "PLATINUM", "GOLD",
"MERCURY", "THALLIUM", "LEAD", "BISMUTH", "POLONIUM",
"ASTATINE", "RADON", "FRANCIUM", "RADIUM", "ACTINIUM",
"THORIUM", "PROTACTINIUM", "URANIUM", "NEPTUNIUM", "PLUTONIUM",
"AMERICIUM", "CURIUM", "BERKELIUM", "CALIFORNIUM", "EINSTEINIUM",
"FERMIUM", "MENDELEVIUM", "NOBELIUM", "LAWRENCIUM" "RUTHERFORDIUM",
"DUBNIUM", "SEABORGIUM", "BOHRIUM"]
_temp_symbol = ["X", "H", "HE", "LI", "BE", "B", "C", "N", "O", "F", "NE", "NA", "MG",
"AL", "SI", "P", "S", "CL", "AR", "K", "CA", "SC", "TI", "V", "CR", "MN", "FE", "CO",
"NI", "CU", "ZN", "GA", "GE", "AS", "SE", "BR", "KR", "RB", "SR", "Y", "ZR", "NB",
"MO", "TC", "RU", "RH", "PD", "AG", "CD", "IN", "SN", "SB", "TE", "I", "XE", "CS",
"BA", "LA", "CE", "PR", "ND", "PM", "SM", "EU", "GD", "TB", "DY", "HO", "ER", "TM",
"YB", "LU", "HF", "TA", "W", "RE", "OS", "IR", "PT", "AU", "HG", "TL", "PB", "BI",
"PO", "AT", "RN", "FR", "RA", "AC", "TH", "PA", "U", "NP", "PU", "AM", "CM", "BK",
"CF", "ES", "FM", "MD", "NO", "LR", "RF", "DB", "SG", "BH", "HS", "MT", "DS", "RG",
"UUB", "UUT", "UUQ", "UUP", "UUH", "UUS", "UUO"]
_temp_z = list(range(0, 108))
_temp_mass = [
0., 1.00782503207, 4.00260325415, 7.016004548, 9.012182201, 11.009305406,
12, 14.00307400478, 15.99491461956, 18.998403224, 19.99244017542,
22.98976928087, 23.985041699, 26.981538627, 27.97692653246, 30.973761629,
31.972070999, 34.968852682, 39.96238312251, 38.963706679, 39.962590983,
44.955911909, 47.947946281, 50.943959507, 51.940507472, 54.938045141,
55.934937475, 58.933195048, 57.935342907, 62.929597474, 63.929142222,
68.925573587, 73.921177767, 74.921596478, 79.916521271, 78.918337087,
85.910610729, 84.911789737, 87.905612124, 88.905848295, 89.904704416,
92.906378058, 97.905408169, 98.906254747, 101.904349312, 102.905504292,
105.903485715, 106.90509682, 113.90335854, 114.903878484, 119.902194676,
120.903815686, 129.906224399, 126.904472681, 131.904153457, 132.905451932,
137.905247237, 138.906353267, 139.905438706, 140.907652769, 141.907723297,
144.912749023, 151.919732425, 152.921230339, 157.924103912, 158.925346757,
163.929174751, 164.93032207, 165.930293061, 168.93421325, 173.938862089,
174.940771819, 179.946549953, 180.947995763, 183.950931188, 186.955753109,
191.96148069, 192.96292643, 194.964791134, 196.966568662, 201.970643011,
204.974427541, 207.976652071, 208.980398734, 208.982430435, 210.987496271,
222.017577738, 222.01755173, 228.031070292, 227.027752127, 232.038055325,
231.03588399, 238.050788247, 237.048173444, 242.058742611, 243.06138108,
247.07035354, 247.07030708, 251.079586788, 252.082978512, 257.095104724,
258.098431319, 255.093241131, 260.105504, 263.112547, 255.107398, 259.114500,
262.122892, 263.128558, 265.136151, 281.162061, 272.153615, 283.171792, 283.176451,
285.183698, 287.191186, 292.199786, 291.206564, 293.214670]
_temp_iso_symbol = [
"H", "H1", "H2", "D", "H3", "T", "H4", "H5", "H6", "H7", "HE", "HE3", "HE4",
"HE5", "HE6", "HE7", "HE8", "HE9", "HE10", "LI", "LI3", "LI4", "LI5", "LI6",
"LI7", "LI8", "LI9", "LI10", "LI11", "LI12", "BE", "BE5", "BE6", "BE7", "BE8",
"BE9", "BE10", "BE11", "BE12", "BE13", "BE14", "BE15", "BE16", "B", "B6", "B7",
"B8", "B9", "B10", "B11", "B12", "B13", "B14", "B15", "B16", "B17", "B18", "B19",
"C", "C8", "C9", "C10", "C11", "C12", "C13", "C14", "C15", "C16", "C17", "C18",
"C19", "C20", "C21", "C22", "N", "N10", "N11", "N12", "N13", "N14", "N15", "N16",
"N17", "N18", "N19", "N20", "N21", "N22", "N23", "N24", "N25", "O", "O12", "O13",
"O14", "O15", "O16", "O17", "O18", "O19", "O20", "O21", "O22", "O23", "O24",
"O25", "O26", "O27", "O28", "F", "F14", "F15", "F16", "F17", "F18", "F19", "F20",
"F21", "F22", "F23", "F24", "F25", "F26", "F27", "F28", "F29", "F30", "F31",
"NE", "NE16", "NE17", "NE18", "NE19", "NE20", "NE21", "NE22", "NE23", "NE24",
"NE25", "NE26", "NE27", "NE28", "NE29", "NE30", "NE31", "NE32", "NE33", "NE34",
"NA", "NA18", "NA19", "NA20", "NA21", "NA22", "NA23", "NA24", "NA25", "NA26",
"NA27", "NA28", "NA29", "NA30", "NA31", "NA32", "NA33", "NA34", "NA35", "NA36",
"NA37", "MG", "MG19", "MG20", "MG21", "MG22", "MG23", "MG24", "MG25", "MG26",
"MG27", "MG28", "MG29", "MG30", "MG31", "MG32", "MG33", "MG34", "MG35", "MG36",
"MG37", "MG38", "MG39", "MG40", "AL", "AL21", "AL22", "AL23", "AL24", "AL25",
"AL26", "AL27", "AL28", "AL29", "AL30", "AL31", "AL32", "AL33", "AL34", "AL35",
"AL36", "AL37", "AL38", "AL39", "AL40", "AL41", "AL42", "SI", "SI22", "SI23",
"SI24", "SI25", "SI26", "SI27", "SI28", "SI29", "SI30", "SI31", "SI32", "SI33",
"SI34", "SI35", "SI36", "SI37", "SI38", "SI39", "SI40", "SI41", "SI42", "SI43",
"SI44", "P", "P24", "P25", "P26", "P27", "P28", "P29", "P30", "P31", "P32",
"P33", "P34", "P35", "P36", "P37", "P38", "P39", "P40", "P41", "P42", "P43",
"P44", "P45", "P46", "S", "S26", "S27", "S28", "S29", "S30", "S31", "S32", "S33",
"S34", "S35", "S36", "S37", "S38", "S39", "S40", "S41", "S42", "S43", "S44",
"S45", "S46", "S47", "S48", "S49", "CL", "CL28", "CL29", "CL30", "CL31", "CL32",
"CL33", "CL34", "CL35", "CL36", "CL37", "CL38", "CL39", "CL40", "CL41", "CL42",
"CL43", "CL44", "CL45", "CL46", "CL47", "CL48", "CL49", "CL50", "CL51", "AR",
"AR30", "AR31", "AR32", "AR33", "AR34", "AR35", "AR36", "AR37", "AR38", "AR39",
"AR40", "AR41", "AR42", "AR43", "AR44", "AR45", "AR46", "AR47", "AR48", "AR49",
"AR50", "AR51", "AR52", "AR53", "K", "K32", "K33", "K34", "K35", "K36", "K37",
"K38", "K39", "K40", "K41", "K42", "K43", "K44", "K45", "K46", "K47", "K48",
"K49", "K50", "K51", "K52", "K53", "K54", "K55", "CA", "CA34", "CA35", "CA36",
"CA37", "CA38", "CA39", "CA40", "CA41", "CA42", "CA43", "CA44", "CA45", "CA46",
"CA47", "CA48", "CA49", "CA50", "CA51", "CA52", "CA53", "CA54", "CA55", "CA56",
"CA57", "SC", "SC36", "SC37", "SC38", "SC39", "SC40", "SC41", "SC42", "SC43",
"SC44", "SC45", "SC46", "SC47", "SC48", "SC49", "SC50", "SC51", "SC52", "SC53",
"SC54", "SC55", "SC56", "SC57", "SC58", "SC59", "SC60", "TI", "TI38", "TI39",
"TI40", "TI41", "TI42", "TI43", "TI44", "TI45", "TI46", "TI47", "TI48", "TI49",
"TI50", "TI51", "TI52", "TI53", "TI54", "TI55", "TI56", "TI57", "TI58", "TI59",
"TI60", "TI61", "TI62", "TI63", "V", "V40", "V41", "V42", "V43", "V44", "V45",
"V46", "V47", "V48", "V49", "V50", "V51", "V52", "V53", "V54", "V55", "V56",
"V57", "V58", "V59", "V60", "V61", "V62", "V63", "V64", "V65", "CR", "CR42",
"CR43", "CR44", "CR45", "CR46", "CR47", "CR48", "CR49", "CR50", "CR51", "CR52",
"CR53", "CR54", "CR55", "CR56", "CR57", "CR58", "CR59", "CR60", "CR61", "CR62",
"CR63", "CR64", "CR65", "CR66", "CR67", "MN", "MN44", "MN45", "MN46", "MN47",
"MN48", "MN49", "MN50", "MN51", "MN52", "MN53", "MN54", "MN55", "MN56", "MN57",
"MN58", "MN59", "MN60", "MN61", "MN62", "MN63", "MN64", "MN65", "MN66", "MN67",
"MN68", "MN69", "FE", "FE45", "FE46", "FE47", "FE48", "FE49", "FE50", "FE51",
"FE52", "FE53", "FE54", "FE55", "FE56", "FE57", "FE58", "FE59", "FE60", "FE61",
"FE62", "FE63", "FE64", "FE65", "FE66", "FE67", "FE68", "FE69", "FE70", "FE71",
"FE72", "CO", "CO47", "CO48", "CO49", "CO50", "CO51", "CO52", "CO53", "CO54",
"CO55", "CO56", "CO57", "CO58", "CO59", "CO60", "CO61", "CO62", "CO63", "CO64",
"CO65", "CO66", "CO67", "CO68", "CO69", "CO70", "CO71", "CO72", "CO73", "CO74",
"CO75", "NI", "NI48", "NI49", "NI50", "NI51", "NI52", "NI53", "NI54", "NI55",
"NI56", "NI57", "NI58", "NI59", "NI60", "NI61", "NI62", "NI63", "NI64", "NI65",
"NI66", "NI67", "NI68", "NI69", "NI70", "NI71", "NI72", "NI73", "NI74", "NI75",
"NI76", "NI77", "NI78", "CU", "CU52", "CU53", "CU54", "CU55", "CU56", "CU57",
"CU58", "CU59", "CU60", "CU61", "CU62", "CU63", "CU64", "CU65", "CU66", "CU67",
"CU68", "CU69", "CU70", "CU71", "CU72", "CU73", "CU74", "CU75", "CU76", "CU77",
"CU78", "CU79", "CU80", "ZN", "ZN54", "ZN55", "ZN56", "ZN57", "ZN58", "ZN59",
"ZN60", "ZN61", "ZN62", "ZN63", "ZN64", "ZN65", "ZN66", "ZN67", "ZN68", "ZN69",
"ZN70", "ZN71", "ZN72", "ZN73", "ZN74", "ZN75", "ZN76", "ZN77", "ZN78", "ZN79",
"ZN80", "ZN81", "ZN82", "ZN83", "GA", "GA56", "GA57", "GA58", "GA59", "GA60",
"GA61", "GA62", "GA63", "GA64", "GA65", "GA66", "GA67", "GA68", "GA69", "GA70",
"GA71", "GA72", "GA73", "GA74", "GA75", "GA76", "GA77", "GA78", "GA79", "GA80",
"GA81", "GA82", "GA83", "GA84", "GA85", "GA86", "GE", "GE58", "GE59", "GE60",
"GE61", "GE62", "GE63", "GE64", "GE65", "GE66", "GE67", "GE68", "GE69", "GE70",
"GE71", "GE72", "GE73", "GE74", "GE75", "GE76", "GE77", "GE78", "GE79", "GE80",
"GE81", "GE82", "GE83", "GE84", "GE85", "GE86", "GE87", "GE88", "GE89", "AS",
"AS60", "AS61", "AS62", "AS63", "AS64", "AS65", "AS66", "AS67", "AS68", "AS69",
"AS70", "AS71", "AS72", "AS73", "AS74", "AS75", "AS76", "AS77", "AS78", "AS79",
"AS80", "AS81", "AS82", "AS83", "AS84", "AS85", "AS86", "AS87", "AS88", "AS89",
"AS90", "AS91", "AS92", "SE", "SE65", "SE66", "SE67", "SE68", "SE69", "SE70",
"SE71", "SE72", "SE73", "SE74", "SE75", "SE76", "SE77", "SE78", "SE79", "SE80",
"SE81", "SE82", "SE83", "SE84", "SE85", "SE86", "SE87", "SE88", "SE89", "SE90",
"SE91", "SE92", "SE93", "SE94", "BR", "BR67", "BR68", "BR69", "BR70", "BR71",
"BR72", "BR73", "BR74", "BR75", "BR76", "BR77", "BR78", "BR79", "BR80", "BR81",
"BR82", "BR83", "BR84", "BR85", "BR86", "BR87", "BR88", "BR89", "BR90", "BR91",
"BR92", "BR93", "BR94", "BR95", "BR96", "BR97", "KR", "KR69", "KR70", "KR71",
"KR72", "KR73", "KR74", "KR75", "KR76", "KR77", "KR78", "KR79", "KR80", "KR81",
"KR82", "KR83", "KR84", "KR85", "KR86", "KR87", "KR88", "KR89", "KR90", "KR91",
"KR92", "KR93", "KR94", "KR95", "KR96", "KR97", "KR98", "KR99", "KR100", "RB",
"RB71", "RB72", "RB73", "RB74", "RB75", "RB76", "RB77", "RB78", "RB79", "RB80",
"RB81", "RB82", "RB83", "RB84", "RB85", "RB86", "RB87", "RB88", "RB89", "RB90",
"RB91", "RB92", "RB93", "RB94", "RB95", "RB96", "RB97", "RB98", "RB99",
"RB100", "RB101", "RB102", "SR", "SR73", "SR74", "SR75", "SR76", "SR77",
"SR78", "SR79", "SR80", "SR81", "SR82", "SR83", "SR84", "SR85", "SR86", "SR87",
"SR88", "SR89", "SR90", "SR91", "SR92", "SR93", "SR94", "SR95", "SR96", "SR97",
"SR98", "SR99", "SR100", "SR101", "SR102", "SR103", "SR104", "SR105", "Y",
"Y76", "Y77", "Y78", "Y79", "Y80", "Y81", "Y82", "Y83", "Y84", "Y85", "Y86",
"Y87", "Y88", "Y89", "Y90", "Y91", "Y92", "Y93", "Y94", "Y95", "Y96", "Y97",
"Y98", "Y99", "Y100", "Y101", "Y102", "Y103", "Y104", "Y105", "Y106", "Y107",
"Y108", "ZR", "ZR78", "ZR79", "ZR80", "ZR81", "ZR82", "ZR83", "ZR84", "ZR85",
"ZR86", "ZR87", "ZR88", "ZR89", "ZR90", "ZR91", "ZR92", "ZR93", "ZR94", "ZR95",
"ZR96", "ZR97", "ZR98", "ZR99", "ZR100", "ZR101", "ZR102", "ZR103", "ZR104",
"ZR105", "ZR106", "ZR107", "ZR108", "ZR109", "ZR110", "NB", "NB81", "NB82",
"NB83", "NB84", "NB85", "NB86", "NB87", "NB88", "NB89", "NB90", "NB91", "NB92",
"NB93", "NB94", "NB95", "NB96", "NB97", "NB98", "NB99", "NB100", "NB101",
"NB102", "NB103", "NB104", "NB105", "NB106", "NB107", "NB108", "NB109",
"NB110", "NB111", "NB112", "NB113", "MO", "MO83", "MO84", "MO85", "MO86",
"MO87", "MO88", "MO89", "MO90", "MO91", "MO92", "MO93", "MO94", "MO95", "MO96",
"MO97", "MO98", "MO99", "MO100", "MO101", "MO102", "MO103", "MO104", "MO105",
"MO106", "MO107", "MO108", "MO109", "MO110", "MO111", "MO112", "MO113",
"MO114", "MO115", "TC", "TC85", "TC86", "TC87", "TC88", "TC89", "TC90", "TC91",
"TC92", "TC93", "TC94", "TC95", "TC96", "TC97", "TC98", "TC99", "TC100",
"TC101", "TC102", "TC103", "TC104", "TC105", "TC106", "TC107", "TC108",
"TC109", "TC110", "TC111", "TC112", "TC113", "TC114", "TC115", "TC116",
"TC117", "TC118", "RU", "RU87", "RU88", "RU89", "RU90", "RU91", "RU92", "RU93",
"RU94", "RU95", "RU96", "RU97", "RU98", "RU99", "RU100", "RU101", "RU102",
"RU103", "RU104", "RU105", "RU106", "RU107", "RU108", "RU109", "RU110",
"RU111", "RU112", "RU113", "RU114", "RU115", "RU116", "RU117", "RU118",
"RU119", "RU120", "RH", "RH89", "RH90", "RH91", "RH92", "RH93", "RH94", "RH95",
"RH96", "RH97", "RH98", "RH99", "RH100", "RH101", "RH102", "RH103", "RH104",
"RH105", "RH106", "RH107", "RH108", "RH109", "RH110", "RH111", "RH112",
"RH113", "RH114", "RH115", "RH116", "RH117", "RH118", "RH119", "RH120",
"RH121", "RH122", "PD", "PD91", "PD92", "PD93", "PD94", "PD95", "PD96", "PD97",
"PD98", "PD99", "PD100", "PD101", "PD102", "PD103", "PD104", "PD105", "PD106",
"PD107", "PD108", "PD109", "PD110", "PD111", "PD112", "PD113", "PD114",
"PD115", "PD116", "PD117", "PD118", "PD119", "PD120", "PD121", "PD122",
"PD123", "PD124", "AG", "AG93", "AG94", "AG95", "AG96", "AG97", "AG98", "AG99",
"AG100", "AG101", "AG102", "AG103", "AG104", "AG105", "AG106", "AG107",
"AG108", "AG109", "AG110", "AG111", "AG112", "AG113", "AG114", "AG115",
"AG116", "AG117", "AG118", "AG119", "AG120", "AG121", "AG122", "AG123",
"AG124", "AG125", "AG126", "AG127", "AG128", "AG129", "AG130", "CD", "CD95",
"CD96", "CD97", "CD98", "CD99", "CD100", "CD101", "CD102", "CD103", "CD104",
"CD105", "CD106", "CD107", "CD108", "CD109", "CD110", "CD111", "CD112",
"CD113", "CD114", "CD115", "CD116", "CD117", "CD118", "CD119", "CD120",
"CD121", "CD122", "CD123", "CD124", "CD125", "CD126", "CD127", "CD128",
"CD129", "CD130", "CD131", "CD132", "IN", "IN97", "IN98", "IN99", "IN100",
"IN101", "IN102", "IN103", "IN104", "IN105", "IN106", "IN107", "IN108",
"IN109", "IN110", "IN111", "IN112", "IN113", "IN114", "IN115", "IN116",
"IN117", "IN118", "IN119", "IN120", "IN121", "IN122", "IN123", "IN124",
"IN125", "IN126", "IN127", "IN128", "IN129", "IN130", "IN131", "IN132",
"IN133", "IN134", "IN135", "SN", "SN99", "SN100", "SN101", "SN102", "SN103",
"SN104", "SN105", "SN106", "SN107", "SN108", "SN109", "SN110", "SN111",
"SN112", "SN113", "SN114", "SN115", "SN116", "SN117", "SN118", "SN119",
"SN120", "SN121", "SN122", "SN123", "SN124", "SN125", "SN126", "SN127",
"SN128", "SN129", "SN130", "SN131", "SN132", "SN133", "SN134", "SN135",
"SN136", "SN137", "SB", "SB103", "SB104", "SB105", "SB106", "SB107", "SB108",
"SB109", "SB110", "SB111", "SB112", "SB113", "SB114", "SB115", "SB116",
"SB117", "SB118", "SB119", "SB120", "SB121", "SB122", "SB123", "SB124",
"SB125", "SB126", "SB127", "SB128", "SB129", "SB130", "SB131", "SB132",
"SB133", "SB134", "SB135", "SB136", "SB137", "SB138", "SB139", "TE", "TE105",
"TE106", "TE107", "TE108", "TE109", "TE110", "TE111", "TE112", "TE113",
"TE114", "TE115", "TE116", "TE117", "TE118", "TE119", "TE120", "TE121",
"TE122", "TE123", "TE124", "TE125", "TE126", "TE127", "TE128", "TE129",
"TE130", "TE131", "TE132", "TE133", "TE134", "TE135", "TE136", "TE137",
"TE138", "TE139", "TE140", "TE141", "TE142", "I", "I108", "I109", "I110",
"I111", "I112", "I113", "I114", "I115", "I116", "I117", "I118", "I119", "I120",
"I121", "I122", "I123", "I124", "I125", "I126", "I127", "I128", "I129", "I130",
"I131", "I132", "I133", "I134", "I135", "I136", "I137", "I138", "I139", "I140",
"I141", "I142", "I143", "I144", "XE", "XE110", "XE111", "XE112", "XE113",
"XE114", "XE115", "XE116", "XE117", "XE118", "XE119", "XE120", "XE121",
"XE122", "XE123", "XE124", "XE125", "XE126", "XE127", "XE128", "XE129",
"XE130", "XE131", "XE132", "XE133", "XE134", "XE135", "XE136", "XE137",
"XE138", "XE139", "XE140", "XE141", "XE142", "XE143", "XE144", "XE145",
"XE146", "XE147", "CS", "CS112", "CS113", "CS114", "CS115", "CS116", "CS117",
"CS118", "CS119", "CS120", "CS121", "CS122", "CS123", "CS124", "CS125",
"CS126", "CS127", "CS128", "CS129", "CS130", "CS131", "CS132", "CS133",
"CS134", "CS135", "CS136", "CS137", "CS138", "CS139", "CS140", "CS141",
"CS142", "CS143", "CS144", "CS145", "CS146", "CS147", "CS148", "CS149",
"CS150", "CS151", "BA", "BA114", "BA115", "BA116", "BA117", "BA118", "BA119",
"BA120", "BA121", "BA122", "BA123", "BA124", "BA125", "BA126", "BA127",
"BA128", "BA129", "BA130", "BA131", "BA132", "BA133", "BA134", "BA135",
"BA136", "BA137", "BA138", "BA139", "BA140", "BA141", "BA142", "BA143",
"BA144", "BA145", "BA146", "BA147", "BA148", "BA149", "BA150", "BA151",
"BA152", "BA153", "LA", "LA117", "LA118", "LA119", "LA120", "LA121", "LA122",
"LA123", "LA124", "LA125", "LA126", "LA127", "LA128", "LA129", "LA130",
"LA131", "LA132", "LA133", "LA134", "LA135", "LA136", "LA137", "LA138",
"LA139", "LA140", "LA141", "LA142", "LA143", "LA144", "LA145", "LA146",
"LA147", "LA148", "LA149", "LA150", "LA151", "LA152", "LA153", "LA154",
"LA155", "CE", "CE119", "CE120", "CE121", "CE122", "CE123", "CE124", "CE125",
"CE126", "CE127", "CE128", "CE129", "CE130", "CE131", "CE132", "CE133",
"CE134", "CE135", "CE136", "CE137", "CE138", "CE139", "CE140", "CE141",
"CE142", "CE143", "CE144", "CE145", "CE146", "CE147", "CE148", "CE149",
"CE150", "CE151", "CE152", "CE153", "CE154", "CE155", "CE156", "CE157", "PR",
"PR121", "PR122", "PR123", "PR124", "PR125", "PR126", "PR127", "PR128",
"PR129", "PR130", "PR131", "PR132", "PR133", "PR134", "PR135", "PR136",
"PR137", "PR138", "PR139", "PR140", "PR141", "PR142", "PR143", "PR144",
"PR145", "PR146", "PR147", "PR148", "PR149", "PR150", "PR151", "PR152",
"PR153", "PR154", "PR155", "PR156", "PR157", "PR158", "PR159", "ND", "ND124",
"ND125", "ND126", "ND127", "ND128", "ND129", "ND130", "ND131", "ND132",
"ND133", "ND134", "ND135", "ND136", "ND137", "ND138", "ND139", "ND140",
"ND141", "ND142", "ND143", "ND144", "ND145", "ND146", "ND147", "ND148",
"ND149", "ND150", "ND151", "ND152", "ND153", "ND154", "ND155", "ND156",
"ND157", "ND158", "ND159", "ND160", "ND161", "PM", "PM126", "PM127", "PM128",
"PM129", "PM130", "PM131", "PM132", "PM133", "PM134", "PM135", "PM136",
"PM137", "PM138", "PM139", "PM140", "PM141", "PM142", "PM143", "PM144",
"PM145", "PM146", "PM147", "PM148", "PM149", "PM150", "PM151", "PM152",
"PM153", "PM154", "PM155", "PM156", "PM157", "PM158", "PM159", "PM160",
"PM161", "PM162", "PM163", "SM", "SM128", "SM129", "SM130", "SM131", "SM132",
"SM133", "SM134", "SM135", "SM136", "SM137", "SM138", "SM139", "SM140",
"SM141", "SM142", "SM143", "SM144", "SM145", "SM146", "SM147", "SM148",
"SM149", "SM150", "SM151", "SM152", "SM153", "SM154", "SM155", "SM156",
"SM157", "SM158", "SM159", "SM160", "SM161", "SM162", "SM163", "SM164",
"SM165", "EU", "EU130", "EU131", "EU132", "EU133", "EU134", "EU135", "EU136",
"EU137", "EU138", "EU139", "EU140", "EU141", "EU142", "EU143", "EU144",
"EU145", "EU146", "EU147", "EU148", "EU149", "EU150", "EU151", "EU152",
"EU153", "EU154", "EU155", "EU156", "EU157", "EU158", "EU159", "EU160",
"EU161", "EU162", "EU163", "EU164", "EU165", "EU166", "EU167", "GD", "GD134",
"GD135", "GD136", "GD137", "GD138", "GD139", "GD140", "GD141", "GD142",
"GD143", "GD144", "GD145", "GD146", "GD147", "GD148", "GD149", "GD150",
"GD151", "GD152", "GD153", "GD154", "GD155", "GD156", "GD157", "GD158",
"GD159", "GD160", "GD161", "GD162", "GD163", "GD164", "GD165", "GD166",
"GD167", "GD168", "GD169", "TB", "TB136", "TB137", "TB138", "TB139", "TB140",
"TB141", "TB142", "TB143", "TB144", "TB145", "TB146", "TB147", "TB148",
"TB149", "TB150", "TB151", "TB152", "TB153", "TB154", "TB155", "TB156",
"TB157", "TB158", "TB159", "TB160", "TB161", "TB162", "TB163", "TB164",
"TB165", "TB166", "TB167", "TB168", "TB169", "TB170", "TB171", "DY", "DY138",
"DY139", "DY140", "DY141", "DY142", "DY143", "DY144", "DY145", "DY146",
"DY147", "DY148", "DY149", "DY150", "DY151", "DY152", "DY153", "DY154",
"DY155", "DY156", "DY157", "DY158", "DY159", "DY160", "DY161", "DY162",
"DY163", "DY164", "DY165", "DY166", "DY167", "DY168", "DY169", "DY170",
"DY171", "DY172", "DY173", "HO", "HO140", "HO141", "HO142", "HO143", "HO144",
"HO145", "HO146", "HO147", "HO148", "HO149", "HO150", "HO151", "HO152",
"HO153", "HO154", "HO155", "HO156", "HO157", "HO158", "HO159", "HO160",
"HO161", "HO162", "HO163", "HO164", "HO165", "HO166", "HO167", "HO168",
"HO169", "HO170", "HO171", "HO172", "HO173", "HO174", "HO175", "ER", "ER143",
"ER144", "ER145", "ER146", "ER147", "ER148", "ER149", "ER150", "ER151",
"ER152", "ER153", "ER154", "ER155", "ER156", "ER157", "ER158", "ER159",
"ER160", "ER161", "ER162", "ER163", "ER164", "ER165", "ER166", "ER167",
"ER168", "ER169", "ER170", "ER171", "ER172", "ER173", "ER174", "ER175",
"ER176", "ER177", "TM", "TM145", "TM146", "TM147", "TM148", "TM149", "TM150",
"TM151", "TM152", "TM153", "TM154", "TM155", "TM156", "TM157", "TM158",
"TM159", "TM160", "TM161", "TM162", "TM163", "TM164", "TM165", "TM166",
"TM167", "TM168", "TM169", "TM170", "TM171", "TM172", "TM173", "TM174",
"TM175", "TM176", "TM177", "TM178", "TM179", "YB", "YB148", "YB149", "YB150",
"YB151", "YB152", "YB153", "YB154", "YB155", "YB156", "YB157", "YB158",
"YB159", "YB160", "YB161", "YB162", "YB163", "YB164", "YB165", "YB166",
"YB167", "YB168", "YB169", "YB170", "YB171", "YB172", "YB173", "YB174",
"YB175", "YB176", "YB177", "YB178", "YB179", "YB180", "YB181", "LU", "LU150",
"LU151", "LU152", "LU153", "LU154", "LU155", "LU156", "LU157", "LU158",
"LU159", "LU160", "LU161", "LU162", "LU163", "LU164", "LU165", "LU166",
"LU167", "LU168", "LU169", "LU170", "LU171", "LU172", "LU173", "LU174",
"LU175", "LU176", "LU177", "LU178", "LU179", "LU180", "LU181", "LU182",
"LU183", "LU184", "HF", "HF153", "HF154", "HF155", "HF156", "HF157", "HF158",
"HF159", "HF160", "HF161", "HF162", "HF163", "HF164", "HF165", "HF166",
"HF167", "HF168", "HF169", "HF170", "HF171", "HF172", "HF173", "HF174",
"HF175", "HF176", "HF177", "HF178", "HF179", "HF180", "HF181", "HF182",
"HF183", "HF184", "HF185", "HF186", "HF187", "HF188", "TA", "TA155", "TA156",
"TA157", "TA158", "TA159", "TA160", "TA161", "TA162", "TA163", "TA164",
"TA165", "TA166", "TA167", "TA168", "TA169", "TA170", "TA171", "TA172",
"TA173", "TA174", "TA175", "TA176", "TA177", "TA178", "TA179", "TA180",
"TA181", "TA182", "TA183", "TA184", "TA185", "TA186", "TA187", "TA188",
"TA189", "TA190", "W", "W158", "W159", "W160", "W161", "W162", "W163", "W164",
"W165", "W166", "W167", "W168", "W169", "W170", "W171", "W172", "W173", "W174",
"W175", "W176", "W177", "W178", "W179", "W180", "W181", "W182", "W183", "W184",
"W185", "W186", "W187", "W188", "W189", "W190", "W191", "W192", "RE", "RE160",
"RE161", "RE162", "RE163", "RE164", "RE165", "RE166", "RE167", "RE168",
"RE169", "RE170", "RE171", "RE172", "RE173", "RE174", "RE175", "RE176",
"RE177", "RE178", "RE179", "RE180", "RE181", "RE182", "RE183", "RE184",
"RE185", "RE186", "RE187", "RE188", "RE189", "RE190", "RE191", "RE192",
"RE193", "RE194", "OS", "OS162", "OS163", "OS164", "OS165", "OS166", "OS167",
"OS168", "OS169", "OS170", "OS171", "OS172", "OS173", "OS174", "OS175",
"OS176", "OS177", "OS178", "OS179", "OS180", "OS181", "OS182", "OS183",
"OS184", "OS185", "OS186", "OS187", "OS188", "OS189", "OS190", "OS191",
"OS192", "OS193", "OS194", "OS195", "OS196", "IR", "IR164", "IR165", "IR166",
"IR167", "IR168", "IR169", "IR170", "IR171", "IR172", "IR173", "IR174",
"IR175", "IR176", "IR177", "IR178", "IR179", "IR180", "IR181", "IR182",
"IR183", "IR184", "IR185", "IR186", "IR187", "IR188", "IR189", "IR190",
"IR191", "IR192", "IR193", "IR194", "IR195", "IR196", "IR197", "IR198",
"IR199", "PT", "PT166", "PT167", "PT168", "PT169", "PT170", "PT171", "PT172",
"PT173", "PT174", "PT175", "PT176", "PT177", "PT178", "PT179", "PT180",
"PT181", "PT182", "PT183", "PT184", "PT185", "PT186", "PT187", "PT188",
"PT189", "PT190", "PT191", "PT192", "PT193", "PT194", "PT195", "PT196",
"PT197", "PT198", "PT199", "PT200", "PT201", "PT202", "AU", "AU169", "AU170",
"AU171", "AU172", "AU173", "AU174", "AU175", "AU176", "AU177", "AU178",
"AU179", "AU180", "AU181", "AU182", "AU183", "AU184", "AU185", "AU186",
"AU187", "AU188", "AU189", "AU190", "AU191", "AU192", "AU193", "AU194",
"AU195", "AU196", "AU197", "AU198", "AU199", "AU200", "AU201", "AU202",
"AU203", "AU204", "AU205", "HG", "HG171", "HG172", "HG173", "HG174", "HG175",
"HG176", "HG177", "HG178", "HG179", "HG180", "HG181", "HG182", "HG183",
"HG184", "HG185", "HG186", "HG187", "HG188", "HG189", "HG190", "HG191",
"HG192", "HG193", "HG194", "HG195", "HG196", "HG197", "HG198", "HG199",
"HG200", "HG201", "HG202", "HG203", "HG204", "HG205", "HG206", "HG207",
"HG208", "HG209", "HG210", "TL", "TL176", "TL177", "TL178", "TL179", "TL180",
"TL181", "TL182", "TL183", "TL184", "TL185", "TL186", "TL187", "TL188",
"TL189", "TL190", "TL191", "TL192", "TL193", "TL194", "TL195", "TL196",
"TL197", "TL198", "TL199", "TL200", "TL201", "TL202", "TL203", "TL204",
"TL205", "TL206", "TL207", "TL208", "TL209", "TL210", "TL211", "TL212", "PB",
"PB178", "PB179", "PB180", "PB181", "PB182", "PB183", "PB184", "PB185",
"PB186", "PB187", "PB188", "PB189", "PB190", "PB191", "PB192", "PB193",
"PB194", "PB195", "PB196", "PB197", "PB198", "PB199", "PB200", "PB201",
"PB202", "PB203", "PB204", "PB205", "PB206", "PB207", "PB208", "PB209",
"PB210", "PB211", "PB212", "PB213", "PB214", "PB215", "BI", "BI184", "BI185",
"BI186", "BI187", "BI188", "BI189", "BI190", "BI191", "BI192", "BI193",
"BI194", "BI195", "BI196", "BI197", "BI198", "BI199", "BI200", "BI201",
"BI202", "BI203", "BI204", "BI205", "BI206", "BI207", "BI208", "BI209",
"BI210", "BI211", "BI212", "BI213", "BI214", "BI215", "BI216", "BI217",
"BI218", "PO", "PO188", "PO189", "PO190", "PO191", "PO192", "PO193", "PO194",
"PO195", "PO196", "PO197", "PO198", "PO199", "PO200", "PO201", "PO202",
"PO203", "PO204", "PO205", "PO206", "PO207", "PO208", "PO209", "PO210",
"PO211", "PO212", "PO213", "PO214", "PO215", "PO216", "PO217", "PO218",
"PO219", "PO220", "AT", "AT193", "AT194", "AT195", "AT196", "AT197", "AT198",
"AT199", "AT200", "AT201", "AT202", "AT203", "AT204", "AT205", "AT206",
"AT207", "AT208", "AT209", "AT210", "AT211", "AT212", "AT213", "AT214",
"AT215", "AT216", "AT217", "AT218", "AT219", "AT220", "AT221", "AT222",
"AT223", "RN", "RN195", "RN196", "RN197", "RN198", "RN199", "RN200", "RN201",
"RN202", "RN203", "RN204", "RN205", "RN206", "RN207", "RN208", "RN209",
"RN210", "RN211", "RN212", "RN213", "RN214", "RN215", "RN216", "RN217",
"RN218", "RN219", "RN220", "RN221", "RN222", "RN223", "RN224", "RN225",
"RN226", "RN227", "RN228", "FR", "FR199", "FR200", "FR201", "FR202", "FR203",
"FR204", "FR205", "FR206", "FR207", "FR208", "FR209", "FR210", "FR211",
"FR212", "FR213", "FR214", "FR215", "FR216", "FR217", "FR218", "FR219",
"FR220", "FR221", "FR222", "FR223", "FR224", "FR225", "FR226", "FR227",
"FR228", "FR229", "FR230", "FR231", "FR232", "RA", "RA202", "RA203", "RA204",
"RA205", "RA206", "RA207", "RA208", "RA209", "RA210", "RA211", "RA212",
"RA213", "RA214", "RA215", "RA216", "RA217", "RA218", "RA219", "RA220",
"RA221", "RA222", "RA223", "RA224", "RA225", "RA226", "RA227", "RA228",
"RA229", "RA230", "RA231", "RA232", "RA233", "RA234", "AC", "AC206", "AC207",
"AC208", "AC209", "AC210", "AC211", "AC212", "AC213", "AC214", "AC215",
"AC216", "AC217", "AC218", "AC219", "AC220", "AC221", "AC222", "AC223",
"AC224", "AC225", "AC226", "AC227", "AC228", "AC229", "AC230", "AC231",
"AC232", "AC233", "AC234", "AC235", "AC236", "TH", "TH209", "TH210", "TH211",
"TH212", "TH213", "TH214", "TH215", "TH216", "TH217", "TH218", "TH219",
"TH220", "TH221", "TH222", "TH223", "TH224", "TH225", "TH226", "TH227",
"TH228", "TH229", "TH230", "TH231", "TH232", "TH233", "TH234", "TH235",
"TH236", "TH237", "TH238", "PA", "PA212", "PA213", "PA214", "PA215", "PA216",
"PA217", "PA218", "PA219", "PA220", "PA221", "PA222", "PA223", "PA224",
"PA225", "PA226", "PA227", "PA228", "PA229", "PA230", "PA231", "PA232",
"PA233", "PA234", "PA235", "PA236", "PA237", "PA238", "PA239", "PA240", "U",
"U217", "U218", "U219", "U220", "U221", "U222", "U223", "U224", "U225", "U226",
"U227", "U228", "U229", "U230", "U231", "U232", "U233", "U234", "U235", "U236",
"U237", "U238", "U239", "U240", "U241", "U242", "NP", "NP225", "NP226",
"NP227", "NP228", "NP229", "NP230", "NP231", "NP232", "NP233", "NP234",
"NP235", "NP236", "NP237", "NP238", "NP239", "NP240", "NP241", "NP242",
"NP243", "NP244", "PU", "PU228", "PU229", "PU230", "PU231", "PU232", "PU233",
"PU234", "PU235", "PU236", "PU237", "PU238", "PU239", "PU240", "PU241",
"PU242", "PU243", "PU244", "PU245", "PU246", "PU247", "AM", "AM231", "AM232",
"AM233", "AM234", "AM235", "AM236", "AM237", "AM238", "AM239", "AM240",
"AM241", "AM242", "AM243", "AM244", "AM245", "AM246", "AM247", "AM248",
"AM249", "CM", "CM233", "CM234", "CM235", "CM236", "CM237", "CM238", "CM239",
"CM240", "CM241", "CM242", "CM243", "CM244", "CM245", "CM246", "CM247",
"CM248", "CM249", "CM250", "CM251", "CM252", "BK", "BK235", "BK236", "BK237",
"BK238", "BK239", "BK240", "BK241", "BK242", "BK243", "BK244", "BK245",
"BK246", "BK247", "BK248", "BK249", "BK250", "BK251", "BK252", "BK253",
"BK254", "CF", "CF237", "CF238", "CF239", "CF240", "CF241", "CF242", "CF243",
"CF244", "CF245", "CF246", "CF247", "CF248", "CF249", "CF250", "CF251",
"CF252", "CF253", "CF254", "CF255", "CF256", "ES", "ES240", "ES241", "ES242",
"ES243", "ES244", "ES245", "ES246", "ES247", "ES248", "ES249", "ES250",
"ES251", "ES252", "ES253", "ES254", "ES255", "ES256", "ES257", "ES258", "FM",
"FM242", "FM243", "FM244", "FM245", "FM246", "FM247", "FM248", "FM249",
"FM250", "FM251", "FM252", "FM253", "FM254", "FM255", "FM256", "FM257",
"FM258", "FM259", "FM260", "MD", "MD245", "MD246", "MD247", "MD248", "MD249",
"MD250", "MD251", "MD252", "MD253", "MD254", "MD255", "MD256", "MD257",
"MD258", "MD259", "MD260", "MD261", "MD262", "NO", "NO248", "NO249", "NO250",
"NO251", "NO252", "NO253", "NO254", "NO255", "NO256", "NO257", "NO258",
"NO259", "NO260", "NO261", "NO262", "NO263", "NO264", "LR", "LR251", "LR252",
"LR253", "LR254", "LR255", "LR256", "LR257", "LR258", "LR259", "LR260",
"LR261", "LR262", "LR263", "LR264", "LR265", "LR266", "RF", "RF253", "RF254",
"RF255", "RF256", "RF257", "RF258", "RF259", "RF260", "RF261", "RF262",
"RF263", "RF264", "RF265", "RF266", "RF267", "RF268", "DB", "DB255", "DB256",
"DB257", "DB258", "DB259", "DB260", "DB261", "DB262", "DB263", "DB264",
"DB265", "DB266", "DB267", "DB268", "DB269", "DB270", "SG", "SG258", "SG259",
"SG260", "SG261", "SG262", "SG263", "SG264", "SG265", "SG266", "SG267",
"SG268", "SG269", "SG270", "SG271", "SG272", "SG273", "BH", "BH260", "BH261",
"BH262", "BH263", "BH264", "BH265", "BH266", "BH267", "BH268", "BH269",
"BH270", "BH271", "BH272", "BH273", "BH274", "BH275", "HS", "HS263", "HS264",
"HS265", "HS266", "HS267", "HS268", "HS269", "HS270", "HS271", "HS272",
"HS273", "HS274", "HS275", "HS276", "HS277", "MT", "MT265", "MT266", "MT267",
"MT268", "MT269", "MT270", "MT271", "MT272", "MT273", "MT274", "MT275",
"MT276", "MT277", "MT278", "MT279", "DS", "DS267", "DS268", "DS269", "DS270",
"DS271", "DS272", "DS273", "DS274", "DS275", "DS276", "DS277", "DS278",
"DS279", "DS280", "DS281", "RG", "RG272", "RG273", "RG274", "RG275", "RG276",
"RG277", "RG278", "RG279", "RG280", "RG281", "RG282", "RG283", "UUB",
"UUB277", "UUB278", "UUB279", "UUB280", "UUB281", "UUB282", "UUB283",
"UUB284", "UUB285", "UUT", "UUT283", "UUT284", "UUT285", "UUT286", "UUT287",
"UUQ", "UUQ285", "UUQ286", "UUQ287", "UUQ288", "UUQ289", "UUP", "UUP287",
"UUP288", "UUP289", "UUP290", "UUP291", "UUH", "UUH289", "UUH290", "UUH291",
"UUH292", "UUS", "UUS291", "UUS292", "UUO", "UUO293"]
_temp_iso_mass = [
1.00782503207, 1.00782503207, 2.01410177785, 2.01410177785, 3.01604927767,
3.01604927767, 4.027806424, 5.035311488, 6.044942594, 7.052749,
4.00260325415, 3.01602931914, 4.00260325415, 5.012223624, 6.018889124,
7.028020618, 8.033921897, 9.043950286, 10.052398837, 7.016004548, 3.030775,
4.027185558, 5.0125378, 6.015122794, 7.016004548, 8.022487362, 9.026789505,
10.035481259, 11.043797715, 12.053780, 9.012182201, 5.040790, 6.019726317,
7.016929828, 8.005305103, 9.012182201, 10.013533818, 11.021657749,
12.026920737, 13.035693007, 14.04289292, 15.053460, 16.061920, 11.009305406,
6.046810, 7.029917901, 8.024607233, 9.013328782, 10.012936992, 11.009305406,
12.014352104, 13.017780217, 14.025404009, 15.031103021, 16.039808829,
17.046989906, 18.056170, 19.063730, 12, 8.037675025, 9.031036689,
10.016853228, 11.011433613, 12, 13.00335483778, 14.0032419887, 15.010599256,
16.014701252, 17.022586116, 18.026759354, 19.034805018, 20.040319754,
21.049340, 22.057200, 14.00307400478, 10.041653674, 11.026090956,
12.018613197, 13.005738609, 14.00307400478, 15.00010889823, 16.006101658,
17.008450261, 18.014078959, 19.017028697, 20.023365807, 21.02710824,
22.034394934, 23.041220, 24.051040, 25.060660, 15.99491461956,
12.034404895, 13.024812213, 14.00859625, 15.003065617, 15.99491461956,
16.999131703, 17.999161001, 19.00358013, 20.004076742, 21.008655886,
22.009966947, 23.015687659, 24.020472917, 25.029460, 26.038340, 27.048260,
28.057810, 18.998403224, 14.035060, 15.018009103, 16.011465724,
17.002095237, 18.000937956, 18.998403224, 19.999981315, 20.999948951,
22.002998815, 23.003574631, 24.008115485, 25.012101747, 26.019615555,
27.026760086, 28.035670, 29.043260, 30.052500, 31.060429, 19.99244017542,
16.025761262, 17.017671504, 18.005708213, 19.001880248, 19.99244017542,
20.993846684, 21.991385113, 22.994466904, 23.993610779, 24.997736888,
26.000461206, 27.007589903, 28.012071575, 29.019385933, 30.024801045,
31.033110, 32.040020, 33.049380, 34.057028, 22.98976928087, 18.025969,
19.013877499, 20.007351328, 20.997655206, 21.994436425, 22.98976928087,
23.990962782, 24.989953968, 25.992633, 26.994076788, 27.998938, 29.002861,
30.008976, 31.013585452, 32.02046656, 33.026719756, 34.035170, 35.042493,
36.051480, 37.059340, 23.985041699, 19.03547, 20.018862545, 21.01171291,
21.999573843, 22.994123669, 23.985041699, 24.985836917, 25.982592929,
26.984340585, 27.983876825, 28.9886, 29.990434, 30.996546, 31.998975,
33.005254, 34.009456424, 35.017340, 36.023000, 37.031400, 38.037570,
39.046772, 40.053930, 26.981538627, 21.028040, 22.019520, 23.007267432,
23.999938865, 24.990428095, 25.986891692, 26.981538627, 27.981910306,
28.980445046, 29.982960256, 30.983946619, 31.988124489, 32.990843336,
33.996851837, 34.999860235, 36.006207204, 37.01067782, 38.017231021,
39.02297, 40.031450, 41.038330, 42.046890, 27.97692653246, 22.034530,
23.025520, 24.011545616, 25.004105574, 25.992329921, 26.986704905,
27.97692653246, 28.9764947, 29.973770171, 30.975363226999998,
31.974148082, 32.97800022, 33.978575524, 34.984583575, 35.986599477,
36.99293608, 37.995633601, 39.002070013, 40.005869121, 41.01456,
42.019790, 43.028660, 44.035260, 30.973761629, 24.034350, 25.020260,
26.011780, 26.999230236, 27.992314761, 28.981800606, 29.978313789,
30.973761629, 31.973907274, 32.971725543, 33.973636257, 34.973314117,
35.97825968, 36.979608946, 37.984156827, 38.986179475, 39.991296951,
40.994335435, 42.001007913, 43.00619, 44.012990, 45.019220, 46.027380,
31.972070999, 26.027880, 27.018833, 28.004372763, 28.996608049,
29.984903249, 30.979554728, 31.972070999, 32.971458759, 33.967866902,
34.969032161, 35.96708076, 36.971125567, 37.971163317, 38.975134306,
39.975451728, 40.979582149, 41.981022419, 42.98715479, 43.99021339,
44.996508112, 46.000750, 47.008590, 48.014170, 49.023619, 34.968852682,
28.028510, 29.014110, 30.004770, 30.992413086, 31.985689901, 32.977451887,
33.973762819, 34.968852682, 35.968306981, 36.965902591, 37.968010425,
38.968008164, 39.970415472, 40.970684525, 41.973254804, 42.974054403,
43.978281071, 44.980286886, 45.98421004, 46.988710, 47.994950, 49.000320,
50.007840, 51.014490, 39.96238312251, 30.021560, 31.012123, 31.997637984,
32.989925709, 33.980271244, 34.975257585, 35.967545105, 36.96677632,
37.962732394, 38.964313231, 39.96238312251, 40.964500611, 41.963045736,
42.965636056, 43.964924033, 44.968039956, 45.968094129, 46.972186792,
47.974540, 48.980520, 49.984430, 50.991630, 51.996780, 53.004940,
38.963706679, 32.021920, 33.007260, 33.998410, 34.988009692, 35.981292235,
36.973375889, 37.969081184, 38.963706679, 39.963998475, 40.961825762,
41.96240281, 42.96071554, 43.961556804, 44.960699493, 45.961976864,
46.961678473, 47.965513535, 48.967450928, 49.972783355, 50.976380,
51.982610, 52.987120, 53.994200, 54.999710, 39.962590983, 34.014120,
35.004940, 35.993087063, 36.985870269, 37.976318452, 38.970719725,
39.962590983, 40.962278062, 41.958618014, 42.958766628, 43.955481754,
44.956186566, 45.953692587, 46.954546006, 47.952534177, 48.955674148,
49.957518962, 50.961499214, 51.9651, 52.970050, 53.974350, 54.980550,
55.985570, 56.992356, 44.955911909, 36.014920, 37.003050, 37.994700,
38.984790002, 39.977967407, 40.969251125, 41.965516429, 42.961150658,
43.959402752, 44.955911909, 45.95517189, 46.952407508, 47.952231468,
48.950023975, 49.952187685, 50.953603368, 51.956675468, 52.959610,
53.963264561, 54.968243949, 55.972870, 56.977790, 57.983710, 58.989220,
59.995710, 47.947946281, 38.009770, 39.001610, 39.990498838, 40.983145,
41.973030902, 42.968522499, 43.959690069, 44.958125616, 45.952631555,
46.951763088, 47.947946281, 48.947869982, 49.944791194, 50.946614955,
51.946897311, 52.949727171, 53.951052401, 54.955265056, 55.958199639,
56.963989137, 57.966970, 58.972930, 59.976760, 60.983200, 61.987490,
62.994420, 50.943959507, 40.011090, 40.999780, 41.991230, 42.980650,
43.97411, 44.965775808, 45.960200481, 46.95490894, 47.952253707,
48.948516101, 49.947158485, 50.943959507, 51.944775479, 52.944337979,
53.946439854, 54.947233701, 55.950530966, 56.952561432, 57.956834136,
58.960207407, 59.965026862, 60.968480, 61.973780, 62.977550, 63.983470,
64.987920, 51.940507472, 42.006430, 42.997710, 43.985549, 44.97964,
45.968358635, 46.962900046, 47.954031716, 48.951335721, 49.946044205,
50.944767431, 51.940507472, 52.940649386, 53.938880395, 54.940839672,
55.940653139, 56.943613013, 57.944353129, 58.948586367, 59.950076033,
60.954717204, 61.95661319, 62.961860, 63.964410, 64.970160, 65.973380,
66.979550, 54.938045141, 44.006870, 44.994510, 45.986720, 46.976100,
47.96852, 48.959618005, 49.95423823, 50.948210787, 51.945565464,
52.941290117, 53.940358854, 54.938045141, 55.93890491, 56.938285378,
57.939981549, 58.940440237, 59.942911246, 60.944652638, 61.94842822,
62.95023999, 63.95424909, 64.956336065, 65.961080, 66.964140, 67.969300,
68.972840, 55.934937475, 45.014578, 46.000810, 46.992890, 47.980504,
48.973610, 49.962988982, 50.956819538, 51.948113875, 52.945307942,
53.939610501, 54.938293357, 55.934937475, 56.935393969, 57.933275558,
58.934875464, 59.934071683, 60.936745281, 61.936767442, 62.940369091,
63.941201265, 64.94538027, 65.946780638, 66.950947244, 67.9537, 68.958780,
69.961460, 70.966720, 71.969620, 58.933195048, 47.011490, 48.001760,
48.989720, 49.981540, 50.970720, 51.963590, 52.954218896, 53.948459635,
54.941999029, 55.939839278, 56.936291373, 57.935752814, 58.933195048,
59.933817059, 60.932475763, 61.934050563, 62.933611611, 63.935809908,
64.93647846, 65.939762004, 66.940889529, 67.944873058, 68.94632, 69.951,
70.9529, 71.957810, 72.960240, 73.965380, 74.968330, 57.935342907,
48.019750, 49.009660, 49.995930, 50.987720, 51.975680, 52.968470,
53.957905495, 54.951330251, 55.942132022, 56.939793526, 57.935342907,
58.934346705, 59.930786372, 60.931056033, 61.928345115, 62.929669374,
63.927965959, 64.930084304, 65.929139334, 66.931569414, 67.931868789,
68.935610269, 69.9365, 70.940736283, 71.942092682, 72.946470, 73.948070,
74.952870, 75.955330, 76.960550, 77.963180, 62.929597474, 51.997180,
52.985550, 53.976710, 54.966050, 55.958560, 56.949211078, 57.944538499,
58.939498028, 59.93736503, 60.933457821, 61.932583745, 62.929597474,
63.929764183, 64.927789485, 65.928868813, 66.927730314, 67.929610889,
68.929429269, 69.932392343, 70.932676833, 71.935820307, 72.936675282,
73.939874862, 74.9419, 75.945275026, 76.947850, 77.951960, 78.954560,
79.960870, 63.929142222, 53.992950, 54.983980, 55.972380, 56.964788,
57.954591555, 58.949263764, 59.941827035, 60.939510635, 61.934329764,
62.933211566, 63.929142222, 64.929240984, 65.926033419, 66.927127345,
67.924844154, 68.926550281, 69.925319274, 70.927721599, 71.926857951,
72.929779104, 73.929458609, 74.932936741, 75.93329357, 76.936958967,
77.938440216, 78.942652, 79.944342348, 80.950480, 81.954420, 82.961030,
68.925573587, 55.994910, 56.982930, 57.974250, 58.963370, 59.957060,
60.949446287, 61.944175238, 62.939294196, 63.936838747, 64.932734754,
65.93158901, 66.928201703, 67.927980084, 68.925573587, 69.926021972,
70.924701349, 71.926366268, 72.925174682, 73.926945762, 74.926500246,
75.928827626, 76.9291543, 77.93160818, 78.93289326, 79.936515781,
80.937752355, 81.942990, 82.946980, 83.952650, 84.957000, 85.963120,
73.921177767, 57.991010, 58.981750, 59.970190, 60.963790, 61.954650,
62.949640, 63.941653, 64.939436406, 65.933843453, 66.93273407,
67.92809424, 68.927964533, 69.924247381, 70.924950954, 71.922075815,
72.923458945, 73.921177767, 74.922858948, 75.921402557, 76.923548591,
77.922852739, 78.925400995, 79.925372392, 80.928820467, 81.929549725,
82.934620, 83.937470, 84.943030, 85.946490, 86.952510, 87.956910,
88.963830, 74.921596478, 59.993130, 60.980620, 61.973200, 62.963690,
63.957572, 64.949564, 65.94471, 66.939186071, 67.936769069, 68.932273675,
69.930924826, 70.927112428, 71.926752283, 72.923824844, 73.923928692,
74.921596478, 75.922394021, 76.920647286, 77.921827281, 78.920947934,
79.922533816, 80.922132287, 81.924504067, 82.924980024, 83.929058,
84.932020, 85.936500, 86.939900, 87.944940, 88.949390, 89.955500,
90.960430, 91.966800, 79.916521271, 64.964660, 65.955210, 66.950090,
67.941798, 68.939557817, 69.933390644, 70.932241822, 71.927112352,
72.926765345, 73.922476436, 74.922523368, 75.919213597, 76.919914038,
77.91730909, 78.918499098, 79.916521271, 80.917992474, 81.916699401,
82.919118473, 83.918462354, 84.922245053, 85.924271579, 86.928521358,
87.931423998, 88.936450, 89.939960, 90.945960, 91.949920, 92.956290,
93.960490, 78.918337087, 66.964790, 67.958516, 68.950106, 69.944792,
70.93874, 71.936644572, 72.931691524, 73.929891034, 74.925776207,
75.924541469, 76.921379082, 77.921145706, 78.918337087, 79.918529296,
80.916290563, 81.916804119, 82.915180421, 83.916478974, 84.915608403,
85.918797577, 86.920711324, 87.924065926, 88.926385334, 89.930627737,
90.933968095, 91.939258714, 92.943050, 93.948680, 94.952870, 95.958530,
96.962800, 85.910610729, 68.965180, 69.955259, 70.949625738, 71.942092038,
72.939289195, 73.933084369, 74.930945746, 75.925910078, 76.92467,
77.920364783, 78.920082431, 79.916378965, 80.916592015, 81.9134836,
82.914136099, 83.911506687, 84.912527331, 85.910610729, 86.913354862,
87.914446969, 88.917630581, 89.919516555, 90.923445215, 91.92615621,
92.931274357, 93.934360, 94.939840, 95.943070, 96.948560, 97.951910,
98.957600, 99.961140, 84.911789737, 70.965320, 71.959080, 72.950561,
73.944264751, 74.93857, 75.935072226, 76.930408, 77.928141, 78.92398946,
79.92251925, 80.918995913, 81.918208598, 82.915109701, 83.914384821,
84.911789737, 85.911167419, 86.909180526, 87.911315588, 88.912278016,
89.914801694, 90.916536958, 91.9197289, 92.922041876, 93.926404946,
94.929302889, 95.934272637, 96.937351916, 97.941790668, 98.945379283,
99.949870, 100.953196445, 101.958870, 87.905612124, 72.965970,
73.956310, 74.949949568, 75.941766782, 76.937944782, 77.93218,
78.929708, 79.924521013, 80.923211846, 81.918401639, 82.917556701,
83.913425275, 84.912932803, 85.909260204, 86.908877124, 87.905612124,
88.907450675, 89.907737888, 90.910203095, 91.911037858, 92.914025634,
93.915361312, 94.919358766, 95.921696802, 96.926152923, 97.928452934,
98.933240926, 99.935351911, 100.940517888, 101.943018987, 102.948950,
103.952330, 104.958580, 88.905848295, 75.958450, 76.949645, 77.943610,
78.937351634, 79.93428, 80.929127468, 81.926792451, 82.922354243,
83.920388264, 84.916433039, 85.914885576, 86.91087573, 87.909501146,
88.905848295, 89.907151886, 90.907304791, 91.908949143, 92.909582713,
93.911595245, 94.912820621, 95.915891343, 96.918133995, 97.92220302,
98.924636204, 99.927756586, 100.93031385, 101.933555695, 102.936730,
103.941050, 104.944870, 105.949790, 106.954140, 107.959480,
89.904704416, 77.955230, 78.949160, 79.9404, 80.937210026, 81.931087,
82.928653801, 83.923250, 84.921471182, 85.916473591, 86.914816252,
87.910226904, 88.9088895, 89.904704416, 90.905645767, 91.905040847,
92.906476006, 93.906315192, 94.9080426, 95.908273386, 96.910953109,
97.912734892, 98.916512106, 99.917761889, 100.921140415, 101.922981285,
102.926599606, 103.928780, 104.933050, 105.935910, 106.940750,
107.943960, 108.949240, 109.952870, 92.906378058, 80.949030,
81.943130, 82.936705382, 83.933570, 84.927912447, 85.925038326,
86.920361108, 87.918332163, 88.913418245, 89.911264845,
90.906996243, 91.907193888, 92.906378058, 93.907283888, 94.906835792,
95.908100647, 96.908098556, 97.910328412, 98.911618375, 99.914181619,
100.915252025, 101.918037614, 102.919143842, 103.922464701,
104.923936545, 105.927970, 106.930310, 107.934840, 108.937630,
109.942440, 110.945650, 111.950830, 112.954700, 97.905408169, 82.948740,
83.940090, 84.936550, 85.930695904, 86.927326502, 87.921953241,
88.919480009, 89.913936896, 90.911750194, 91.906810991, 92.90681261,
93.905088269, 94.905842129, 95.904679477, 96.906021465, 97.905408169,
98.90771187, 99.907477336, 100.910347001, 101.91029736, 102.913207142,
103.913763625, 104.91697461, 105.918136802, 106.921692604, 107.923453,
108.927810, 109.929730, 110.934410, 111.936840, 112.941880, 113.944920,
114.950290, 98.906254747, 84.948830, 85.942880, 86.936530, 87.932678,
88.927167, 89.923556564, 90.918427639, 91.915260166, 92.910248984,
93.909657002, 94.907657084, 95.907871383, 96.906365358, 97.907215966,
98.906254747, 99.90765778, 100.907314659, 101.909215019, 102.909181351,
103.911447454, 104.911660566, 105.914357927, 106.915079572, 107.918461226,
108.919982665, 109.923820483, 110.92569283, 111.929146493, 112.931590,
113.935880, 114.938690, 115.943370, 116.946480, 117.951480, 101.904349312,
86.949180, 87.940260, 88.936110, 89.929890, 90.926292, 91.920120,
92.917052034, 93.911359711, 94.910412929, 95.907597835, 96.9075547,
97.905287132, 98.905939302, 99.904219476, 100.905582087, 101.904349312,
102.906323847, 103.905432701, 104.907752866, 105.907329433,
106.909905089, 107.910173465, 108.913203233, 109.914136041, 110.917696,
111.918965, 112.922487194, 113.924281, 114.928686173, 115.930810,
116.935580, 117.937820, 118.942840, 119.945310, 102.905504292,
88.948837, 89.942870, 90.936550, 91.931980, 92.925740, 93.921698,
94.91589874, 95.914460631, 96.911336797, 97.910708158, 98.908132104,
99.90812155, 100.906163625, 101.906843196, 102.905504292, 103.906655518,
104.905693821, 105.907287135, 106.906748423, 107.908728018, 108.908737289,
109.911136411, 110.911585913, 111.914394159, 112.915530627, 113.918806,
114.920334, 115.924062, 116.925980, 117.930070, 118.932110, 119.936410,
120.938720, 121.943210, 105.903485715, 90.949110, 91.940420, 92.935910,
93.928770, 94.924690, 95.918164359, 96.916479073, 97.912720902,
98.911767833, 99.908505886, 100.908289242, 101.905608544, 102.906087307,
103.904035834, 104.90508492, 105.903485715, 106.905133481, 107.903891701,
108.905950451, 109.905153254, 110.907670734, 111.907314058, 112.910152908,
113.910362638, 114.913683824, 115.914158662, 116.917841338, 117.9189843,
118.923110, 119.924691878, 120.928870, 121.930550, 122.934930, 123.936880,
106.90509682, 92.949780, 93.942780, 94.935480, 95.930680, 96.923972412,
97.921566201, 98.917597178, 99.916104255, 100.912802233, 101.911685,
102.90897272, 103.908629157, 104.906528661, 105.906668921, 106.90509682,
107.905955556, 108.904752292, 109.906107231, 110.905291157, 111.907004814,
112.906566579, 113.908803704, 114.908762698, 115.911359933, 116.911684562,
117.914582768, 118.915665059, 119.918787384, 120.919848046, 121.923530,
122.924900, 123.928640, 124.930430, 125.934500, 126.936770, 127.941170,
128.943690, 129.950448, 113.90335854, 94.949870, 95.939770, 96.934940,
97.927395546, 98.925010, 99.920289525, 100.918681538, 101.914462258,
102.913419246, 103.909849475, 104.909467905, 105.90645941, 106.906617928,
107.904183683, 108.904982293, 109.90300207, 110.904178107, 111.902757809,
112.904401662, 113.90335854, 114.905430969, 115.904755809, 116.907218618,
117.90691453, 118.909921597, 119.909850129, 120.912977363, 121.913332432,
122.917002999, 123.917647616, 124.92124637, 125.922353321, 126.926443864,
127.927762285, 128.932150, 129.933901937, 130.940670, 131.945550,
114.903878484, 96.949540, 97.942140, 98.934220, 99.931110851,
100.926340, 101.924090238, 102.919914188, 103.918296171, 104.91467354,
105.913465411, 106.9102951, 107.90969818, 108.907150507, 109.907165274,
110.905103278, 111.905532331, 112.904057761, 113.904913876,
114.903878484, 115.905259703, 116.904513564, 117.906354367, 118.90584535,
119.907959608, 120.907845822, 121.91027601, 122.910438276, 123.913175231,
124.913600588, 125.916463857, 126.917353091, 127.920172328, 128.92169698,
129.924970049, 130.926851767, 131.93299026, 132.937810, 133.944150,
134.949330, 119.902194676, 98.949330, 99.939044343, 100.936060,
101.930295324, 102.928100, 103.923143223, 104.921349437, 105.91688062,
106.915644329, 107.911925378, 108.911283214, 109.907842791, 110.90773446,
111.904818207, 112.905170577, 113.902778869, 114.903342397, 115.90174053,
116.902951656, 117.901603167, 118.90330763, 119.902194676, 120.90423548,
121.903439046, 122.905720838, 123.905273946, 124.907784125, 125.90765328,
126.910360024, 127.910536624, 128.913479, 129.913967295, 130.916999769,
131.917815713, 132.923829249, 133.928291765, 134.934730, 135.939340,
136.945990, 120.903815686, 102.939690, 103.936472, 104.931486348,
105.928791, 106.924150, 107.922160, 108.918132426, 109.916753, 110.913163,
111.912398009, 112.909371672, 113.909269, 114.906598, 115.906793629,
116.904835941, 117.905528731, 118.903942009, 119.905072427, 120.903815686,
121.905173651, 122.90421397, 123.905935743, 124.905253818, 125.90724748,
126.906923609, 127.909169001, 128.909148442, 129.911656324, 130.911982275,
131.914466896, 132.91525163, 133.920379744, 134.925165771, 135.930350,
136.935310, 137.940790, 138.945980, 129.906224399, 104.943640,
105.937504237, 106.935006, 107.929444597, 108.927415515, 109.922407316,
110.921110692, 111.917013672, 112.915891, 113.912089, 114.911902,
115.90846, 116.908644719, 117.905827581, 118.906403645, 119.904020222,
120.904936424, 121.903043898, 122.904270029, 123.902817896, 124.904430731,
125.903311696, 126.905226336, 127.904463056, 128.906598238, 129.906224399,
130.908523864, 131.90855316, 132.910955306, 133.911368737, 134.916448592,
135.920101246, 136.925322954, 137.929220, 138.934730, 139.938850,
140.944650, 141.949080, 126.904472681, 107.943475, 108.938149417,
109.935242, 110.930276, 111.927970, 112.923640583, 113.921850, 114.918048,
115.916808633, 116.91365, 117.913074, 118.910074, 119.910048173,
120.907366811, 121.907589284, 122.905588965, 123.906209852, 124.904630164,
125.905624153, 126.904472681, 127.905809443, 128.904987722, 129.906674247,
130.906124609, 131.907997381, 132.907796939, 133.909744465, 134.910048121,
135.914653993, 136.91787084, 137.922349591, 138.926099478, 139.931000,
140.935030, 141.940180, 142.944560, 143.949990, 131.904153457, 109.944278068,
110.941602, 111.935623112, 112.933341174, 113.927980306, 114.92629392,
115.921581087, 116.920358735, 117.916178655, 118.915410688, 119.911784244,
120.911461829, 121.908367632, 122.90848191, 123.905893003, 124.906395464,
125.904273634, 126.905183723, 127.903531275, 128.904779435, 129.903508007,
130.905082362, 131.904153457, 132.905910722, 133.905394464, 134.907227495,
135.907218794, 136.911562125, 137.913954475, 138.918792936, 139.921640943,
140.926648049, 141.92970959, 142.935110, 143.938510, 144.944070, 145.947750,
146.953560, 132.905451932, 111.950301, 112.944493274, 113.941450, 114.935910,
115.933367, 116.928670701, 117.926559494, 118.922377304, 119.920677253,
120.917229209, 121.916113434, 122.912996036, 123.912257798, 124.90972827,
125.909451977, 126.907417525, 127.907748866, 128.906064426, 129.906708552,
130.905463926, 131.90643426, 132.905451932, 133.906718475, 134.905977008,
135.907311576, 136.907089473, 137.911016704, 138.913363999, 139.917282354,
140.920045752, 141.924298927, 142.92735175, 143.932076914, 144.93552617,
145.940289423, 146.944155008, 147.949218153, 148.952930, 149.958170,
150.962190, 137.905247237, 113.950675405, 114.947370, 115.941380,
116.938499, 117.933040, 118.930659661, 119.926044974, 120.924054499,
121.919904, 122.918781036, 123.915093603, 124.914472912, 125.911250177,
126.911093797, 127.908317698, 128.908679439, 129.906320811, 130.906941118,
131.905061288, 132.90600749, 133.904508383, 134.905688591, 135.904575945,
136.905827384, 137.905247237, 138.908841341, 139.910604505, 140.914411009,
141.91645341, 142.920626719, 143.922952853, 144.927627032, 145.930219572,
146.934945, 147.937720047, 148.942580, 149.945680, 150.950810, 151.954270,
152.959610, 138.906353267, 116.950068, 117.946730, 118.940990, 119.938070,
120.933010, 121.930710, 122.926240, 123.924574275, 124.920816034,
125.919512667, 126.916375448, 127.915585177, 128.912692815, 129.912368724,
130.91007, 131.910101145, 132.908218, 133.908514011, 134.906976844,
135.907635536, 136.906493598, 137.90711193, 138.906353267, 139.909477645,
140.910962152, 141.91407913, 142.91606272, 143.919599647, 144.921645401,
145.92579346, 146.928235284, 147.932228868, 148.934734, 149.938770,
150.941720, 151.946250, 152.949620, 153.954500, 154.958350, 139.905438706,
118.952760, 119.946640, 120.943420, 121.937910, 122.935400, 123.930410,
124.928440, 125.923971, 126.922731, 127.918911, 128.918102, 129.914736,
130.914422, 131.911460487, 132.91151502, 133.908924821, 134.909151396,
135.907172422, 136.907805577, 137.905991321, 138.906652651, 139.905438706,
140.90827627, 141.909244205, 142.91238591, 143.913647336, 144.917233135,
145.918759009, 146.922673954, 147.92443241, 148.928399883, 149.930408931,
150.933976196, 151.936540, 152.940580, 153.943420, 154.948040, 155.951260,
156.956340, 140.907652769, 120.955364, 121.951810, 122.945960, 123.942960,
124.937830, 125.935310, 126.930830, 127.928791, 128.925095, 129.92359,
130.920259, 131.919255, 132.916330532, 133.915711737, 134.913111745,
135.912691611, 136.910705455, 137.910754636, 138.908938399, 139.909075874,
140.907652769, 141.910044806, 142.910816926, 143.913305245, 144.9145117,
145.917644336, 146.918995992, 147.922135026, 148.923717651, 149.926672997,
150.928318618, 151.931499225, 152.933838905, 153.937518153, 154.940120,
155.944270, 156.947430, 157.951980, 158.955500, 141.907723297, 123.952230,
124.948880, 125.943220, 126.940500, 127.935390, 128.933188, 129.928506,
130.927247, 131.923321237, 132.922348, 133.918790181, 134.91818116,
135.914976035, 136.914567137, 137.911949961, 138.911978288, 139.909552,
140.909609854, 141.907723297, 142.90981429, 143.910087274, 144.912573636,
145.913116939, 146.916100441, 147.916893288, 148.920148842, 149.920890888,
150.923828929, 151.924682219, 152.927698232, 153.929477307, 154.932932,
155.935018114, 156.939030, 157.941600, 158.946090, 159.949090, 160.953880,
144.912749023, 125.957520, 126.951630, 127.948420, 128.943160, 129.940450,
130.935870, 131.933750, 132.929782, 133.928353, 134.924876, 135.923565829,
136.920479493, 137.919548281, 138.916804082, 139.916041789, 140.913555054,
141.912874471, 142.910932616, 143.912590843, 144.912749023, 145.914696305,
146.915138545, 147.917474618, 148.918334155, 149.920983561, 150.921206973,
151.923496795, 152.924116889, 153.926463943, 154.928101267, 155.931056736,
156.933039369, 157.936561407, 158.938970, 159.942990, 160.945860,
161.950290, 162.953680, 151.919732425, 127.958080, 128.954640, 129.948920,
130.946110, 131.940690, 132.938670, 133.933970, 134.93252, 135.928275527,
136.926971746, 137.923243961, 138.922296605, 139.918994687, 140.918476488,
141.915197641, 142.914628338, 143.911999478, 144.913410353, 145.9130409,
146.914897923, 147.914822674, 148.917184735, 149.917275539, 150.919932409,
151.919732425, 152.922097356, 153.922209273, 154.924640161, 155.925527887,
156.928358717, 157.929991317, 158.933211271, 159.935140, 160.938830,
161.941220, 162.945360, 163.948280, 164.952980, 152.921230339, 129.963569,
130.957753, 131.954370, 132.949240, 133.946510, 134.941820, 135.939600,
136.935570, 137.933709, 138.92979228, 139.928087607, 140.92493072,
141.923434945, 142.920297509, 143.918816823, 144.916265237, 145.917205817,
146.916746111, 147.918085895, 148.917931238, 149.919701819, 150.919850161,
151.921744534, 152.921230339, 153.922979237, 154.92289326, 155.924752249,
156.925423647, 157.927845302, 158.929088861, 159.931971, 160.933680,
161.937040, 162.939210, 163.942990, 164.945720, 165.949970, 166.953210,
157.924103912, 133.955370, 134.952570, 135.947340, 136.945020, 137.940120,
138.938240, 139.933674, 140.932126, 141.928116, 142.92674951, 143.922963,
144.921709252, 145.918310608, 146.91909442, 147.918114524, 148.919340915,
149.918658876, 150.920348482, 151.919790996, 152.921749543, 153.920865598,
154.922622022, 155.922122743, 156.923960135, 157.924103912, 158.926388658,
159.927054146, 160.929669211, 161.930984751, 162.933990, 163.935860,
164.939380, 165.941600, 166.945570, 167.948360, 168.952870, 158.925346757,
135.961380, 136.955980, 137.953160, 138.948290, 139.945805049, 140.941448,
141.938744, 142.935121, 143.933045, 144.929274, 145.927246584, 146.924044585,
147.924271701, 148.923245909, 149.923659686, 150.923102543, 151.924074438,
152.923434588, 153.924678019, 154.923505236, 155.924747213, 156.924024604,
157.925413137, 158.925346757, 159.927167606, 160.927569919, 161.929488234,
162.930647536, 163.933350838, 164.934880, 165.937991959, 166.940050,
167.943640, 168.946220, 169.950250, 170.953300, 163.929174751, 137.962490,
138.959540, 139.954010, 140.951350, 141.946366, 142.943830, 143.939254,
144.937425, 145.932845369, 146.9310915, 147.927149831, 148.927304787,
149.925585184, 150.926184601, 151.9247183, 152.92576467, 153.924424457,
154.925753775, 155.92428311, 156.925466095, 157.924409487, 158.925739214,
159.925197517, 160.926933364, 161.926798447, 162.928731159, 163.929174751,
164.931703333, 165.932806741, 166.935655462, 167.937128769, 168.940307614,
169.942390, 170.946200, 171.948760, 172.953000, 164.93032207, 139.968539,
140.963098, 141.959770, 142.954610, 143.951480, 144.947200, 145.944640,
146.940056, 147.937718, 148.933774771, 149.933496182, 150.931688142,
151.931713714, 152.930198789, 153.930601579, 154.929103491, 155.929839,
156.928256188, 157.928941007, 158.927711959, 159.928729478, 160.927854776,
161.929095504, 162.928733903, 163.930233507, 164.93032207, 165.932284162,
166.933132633, 167.935515708, 168.936872273, 169.939618929, 170.94146515,
171.944820, 172.947290, 173.951150, 174.954050, 165.930293061, 142.966340,
143.960380, 144.957390, 145.952000, 146.949490, 147.944550, 148.942306,
149.937913839, 150.937448903, 151.935050389, 152.935063492, 153.932783081,
154.933208949, 155.931064698, 156.931916, 157.929893474, 158.930684066,
159.929083292, 160.929995309, 161.928778264, 162.930032749, 163.929200229,
164.930726003, 165.930293061, 166.932048159, 167.932370224, 168.934590364,
169.935464312, 170.938029808, 171.939356113, 172.942400, 173.944230,
174.947770, 175.950080, 176.954050, 168.93421325, 144.970073, 145.966425,
146.960961, 147.957840, 148.952720, 149.949960, 150.94548349, 151.944422,
152.942012112, 153.941567808, 154.939199459, 155.938979933, 156.936973,
157.936979525, 158.934975, 159.935262801, 160.933549, 161.933994682,
162.932651124, 163.93356, 164.932435492, 165.933554131, 166.932851622,
167.934172776, 168.93421325, 169.935801397, 170.93642944, 171.938400044,
172.939603607, 173.942168605, 174.943836853, 175.946994685, 176.949040,
177.952640, 178.955340, 173.938862089, 147.967420, 148.964040, 149.958420,
150.955400769, 151.950288919, 152.949480, 153.946393928, 154.945782332,
155.942818215, 156.942627848, 157.939865617, 158.940050099, 159.937552344,
160.937901678, 161.93576821, 162.936334305, 163.934489416, 164.935279,
165.933882042, 166.934949605, 167.933896895, 168.935189802, 169.934761837,
170.936325799, 171.936381469, 172.938210787, 173.938862089, 174.94127645,
175.942571683, 176.945260822, 177.94664668, 178.950170, 179.952330,
180.956150, 174.940771819, 149.973228, 150.967577, 151.964120,
152.958767331, 153.957522, 154.954316216, 155.953032523, 156.9500983,
157.949313283, 158.946628776, 159.946033, 160.943572, 161.943277288,
162.941179, 163.941339, 164.939406724, 165.939859, 166.93827,
167.938739111, 168.937651439, 169.938474968, 170.937913136, 171.939085669,
172.938930602, 173.94033748, 174.940771819, 175.94268631, 176.943758055,
177.945954559, 178.947327443, 179.94988116, 180.951970, 181.955040,
182.957570, 183.960910, 179.946549953, 152.970690, 153.964860, 154.963390,
155.959364025, 156.958396, 157.954799366, 158.95399487, 159.950684379,
160.950274844, 161.947210498, 162.947089, 163.944367284, 164.944567,
165.94218, 166.9426, 167.940568, 168.941259, 169.939609, 170.940492,
171.939448301, 172.940513, 173.940046178, 174.941509181, 175.941408631,
176.943220651, 177.943698766, 178.945816145, 179.946549953, 180.949101246,
181.950554096, 182.953530439, 183.955446515, 184.958820, 185.960890,
186.964590, 187.966850, 180.947995763, 154.974592, 155.972303,
156.968192445, 157.966699, 158.963018173, 159.961486056, 160.958417,
161.957291859, 162.954330271, 163.953534, 164.950772514, 165.950512,
166.948093, 167.948047, 168.946011, 169.946175, 170.944476, 171.944895,
172.94375, 173.944454, 174.943737, 175.944857, 176.944472403,
177.945778221, 178.945929535, 179.947464831, 180.947995763, 181.950151849,
182.951372616, 183.954007966, 184.955559375, 185.958552023, 186.960530,
187.963700, 188.965830, 189.969230, 183.950931188, 157.974562, 158.972918,
159.968478805, 160.967357, 161.963497417, 162.962523542, 163.958954382,
164.958279949, 165.955027253, 166.954816014, 167.951808394, 168.95177879,
169.949228482, 170.949451, 171.947292, 172.947689, 173.946079, 174.946717,
175.945634, 176.946643, 177.945876236, 178.947070447, 179.946704459,
180.948197248, 181.948204156, 182.950222951, 183.950931188, 184.953419264,
185.954364127, 186.957160466, 187.958489105, 188.961912868, 189.963181378,
190.966600, 191.968170, 186.955753109, 159.982115, 160.977589119,
161.976002, 162.972080535, 163.970323, 164.967088557, 165.965808,
166.962601, 167.961572608, 168.958791096, 169.958220071, 170.955716,
171.955422961, 172.953243, 173.953115, 174.951381, 175.951623, 176.950328,
177.950989, 178.949987641, 179.950789084, 180.950067916, 181.95121008,
182.950819841, 183.952520756, 184.952954982, 185.954986084, 186.955753109,
187.958114438, 188.959229007, 189.961817977, 190.963125242, 191.965960,
192.967470, 193.970420, 191.96148069, 161.984431, 162.982690,
163.978035649, 164.976762, 165.972690753, 166.971547969, 167.967803678,
168.96701927, 169.963577028, 170.963184819, 171.960023303, 172.959808409,
173.957062202, 174.956945835, 175.954806, 176.954965324, 177.953251241,
178.953816017, 179.952378803, 180.953244, 181.952110186, 182.953126102,
183.952489071, 184.954042265, 185.953838158, 186.955750458, 187.955838228,
188.95814747, 189.958447048, 190.960929718, 191.96148069, 192.964151563,
193.965182083, 194.968126661, 195.969639333, 192.96292643, 163.992201,
164.987520, 165.985824, 166.981665156, 167.979881, 168.976294942, 169.974965,
170.971626042, 171.970456, 172.967501739, 173.966861045, 174.964112895,
175.963648688, 176.9613015, 177.961082, 178.959122266, 179.959229446,
180.957625297, 181.958076296, 182.956846458, 183.957476, 184.956698,
185.957946104, 186.957363361, 187.958853121, 188.958718935, 189.960545968,
190.960594046, 191.962605012, 192.96292643, 193.965078378, 194.965979573,
195.968396542, 196.969653285, 197.972280, 198.973804583, 194.964791134,
165.994855, 166.992979, 167.988150742, 168.986715, 169.982495289,
170.981244542, 171.977347128, 172.976444754, 173.972818767, 174.972420552,
175.968944622, 176.968469481, 177.965648724, 178.965363404, 179.963031477,
180.963097285, 181.961170656, 182.961596703, 183.959922251, 184.960619,
185.959350813, 186.960587, 187.959395391, 188.960833686, 189.959931655,
190.961676661, 191.961038005, 192.962987401, 193.962680253, 194.964791134,
195.964951521, 196.967340182, 197.96789279, 198.970593094, 199.971440677,
200.974512868, 201.975740, 196.966568662, 168.998080, 169.996122,
170.991878881, 171.990035, 172.98623738, 173.984761, 174.981274107,
175.980099, 176.976864908, 177.97603192, 178.973212812, 179.972521124,
180.970079048, 181.969617874, 182.967593034, 183.967451524, 184.965789411,
185.965952703, 186.964567541, 187.965323661, 188.963948286, 189.964700339,
190.963704225, 191.964812953, 192.964149715, 193.96536525, 194.96503464,
195.966569813, 196.966568662, 197.968242303, 198.968765193, 199.970725647,
200.97165724, 201.973805838, 202.975154542, 203.977724, 204.979870,
201.970643011, 171.003760, 171.998832686, 172.997242, 173.992863695,
174.99142327, 175.98735458, 176.986279158, 177.982483143, 178.981833861,
179.978266394, 180.977819311, 181.974689964, 182.974449841, 183.971713051,
184.971899086, 185.96936179, 186.969814236, 187.967577049, 188.968190034,
189.966322449, 190.967157105, 191.965634327, 192.966665421, 193.965439409,
194.966720113, 195.965832649, 196.967212908, 197.966769032, 198.968279932,
199.968326004, 200.970302268, 201.970643011, 202.972872484, 203.973493933,
204.976073386, 205.977514066, 206.982588545, 207.985940, 208.991040,
209.994510, 204.974427541, 176.000590, 176.996427286, 177.994897,
178.991089082, 179.989906, 180.986257447, 181.985667104, 182.982192802,
183.981873122, 184.978791305, 185.978325, 186.975905897, 187.976009782,
188.973588428, 189.973877149, 190.971786154, 191.972225, 192.970672,
193.9712, 194.969774335, 195.970481151, 196.969574511, 197.970483495,
198.969877, 199.970962672, 200.970818891, 201.972105808, 202.97234422,
203.973863522, 204.974427541, 205.97611032, 206.977419429, 207.9820187,
208.985358952, 209.990073689, 210.993477, 211.998228, 207.976652071,
178.003830191, 179.002150, 179.997918173, 180.996623958, 181.992671842,
182.991874629, 183.988142339, 184.987609944, 185.984238945, 186.98391837,
187.980874338, 188.980807, 189.978081517, 190.978265, 191.975785171,
192.976173234, 193.97401207, 194.97454205, 195.972774109, 196.973431124,
197.972033959, 198.97291665, 199.971826675, 200.972884511, 201.972159133,
202.973390521, 203.973043589, 204.974481755, 205.974465278, 206.975896887,
207.976652071, 208.98109012, 209.984188527, 210.988736964, 211.991897543,
212.996581499, 213.999805408, 215.004807, 208.980398734, 184.001124,
184.997625, 185.996597625, 186.993157835, 187.992265154, 188.989199012,
189.988295129, 190.985786119, 191.985457954, 192.982959771, 193.98283396,
194.980650737, 195.980666509, 196.978864454, 197.979206, 198.977671961,
199.978131829, 200.977009036, 201.977742324, 202.976876001, 203.977812736,
204.977389366, 205.97849913, 206.978470679, 207.979742196, 208.980398734,
209.984120371, 210.98726946, 211.991285724, 212.994384666, 213.998711539,
215.001769776, 216.006305943, 217.009470, 218.014316, 208.982430435,
187.999422048, 188.998480562, 189.995101185, 190.994574485, 191.991335149,
192.991025275, 193.988185606, 194.988110728, 195.98553458, 196.98565963,
197.983388616, 198.983666063, 199.981798604, 200.982259764, 201.980757541,
202.981420103, 203.980318121, 204.981203322, 205.980481099, 206.981593173,
207.981245702, 208.982430435, 209.982873673, 210.986653154, 211.988867969,
212.99285728, 213.99520135, 214.999419988, 216.001915035, 217.006334796,
218.008973037, 219.013744, 220.016602, 210.987496271, 192.999843112,
193.998725085, 194.996268098, 195.995788077, 196.993189215, 197.992837202,
198.990532254, 199.990351264, 200.988416999, 201.988630236, 202.986941984,
203.987251326, 204.986074483, 205.986667036, 206.985783502, 207.986589977,
208.986173143, 209.98714771, 210.987496271, 211.990744771, 212.992936646,
213.996371733, 214.99865257, 216.002423257, 217.004718822, 218.008694336,
219.011161691, 220.015407682, 221.018050, 222.022330, 223.025190,
222.017577738, 195.005437696, 196.002115223, 197.001584351, 197.998678663,
198.998370297, 199.9956993, 200.995628335, 201.993263492, 202.993386687,
203.99142874, 204.991718799, 205.990214104, 206.990734225, 207.98964247,
208.990414742, 209.989696216, 210.990600523, 211.990703529, 212.993882668,
213.995362554, 214.998745483, 216.00027437, 217.003927675, 218.005601256,
219.009480204, 220.011393981, 221.015536782, 222.017577738, 223.021790,
224.024090, 225.028440, 226.030890, 227.035407, 228.037986, 222.01755173,
199.007258147, 200.00657249, 201.003860867, 202.003372847, 203.000924647,
204.000653204, 204.99859396, 205.998666066, 206.996949414, 207.997138783,
208.995953555, 209.996407738, 210.995536544, 211.996202244, 212.996189081,
213.998971145, 215.000341497, 216.00319799, 217.004631951, 218.007578322,
219.009252149, 220.012327405, 221.014254762, 222.01755173, 223.019735857,
224.023249951, 225.025565414, 226.029386231, 227.031835938, 228.035729,
229.038450228, 230.042510, 231.045440, 232.049772, 228.031070292,
202.009890686, 203.009271619, 204.006499668, 205.00626857, 206.00382727,
207.003798105, 208.00183994, 209.001991373, 210.000494978, 211.000897987,
211.999794499, 213.000383959, 214.000107894, 215.002719834, 216.003533035,
217.006320327, 218.00714023, 219.010085078, 220.011028384, 221.013917338,
222.01537453, 223.018502171, 224.020211821, 225.023611564, 226.025409823,
227.029177842, 228.031070292, 229.034957577, 230.037056394, 231.041220,
232.043638, 233.048060, 234.050704, 227.027752127, 206.01450498,
207.011949748, 208.011551551, 209.009494863, 210.009435986, 211.007734835,
212.007813822, 213.006607643, 214.006901798, 215.006453625, 216.008720075,
217.009346914, 218.011641453, 219.012420389, 220.014762979, 221.015591248,
222.017843851, 223.019137468, 224.021722866, 225.023229585, 226.026098089,
227.027752127, 228.031021112, 229.033015243, 230.036294178, 231.038558786,
232.042027438, 233.044550, 234.048420, 235.051232, 236.055296,
232.038055325, 209.017715682, 210.015075342, 211.014928413, 212.012980288,
213.01301014, 214.01149977, 215.01173033, 216.011062115, 217.013114328,
218.013284499, 219.015536895, 220.015747762, 221.018183674, 222.018468121,
223.020811448, 224.021466895, 225.023951021, 226.024903069, 227.02770407,
228.028741127, 229.03176243, 230.033133843, 231.036304343, 232.038055325,
233.041581843, 234.04360123, 235.047510074, 236.049870, 237.053894,
238.056496, 231.03588399, 212.023204138, 213.02110934, 214.020918417,
215.019185865, 216.019109564, 217.018323986, 218.020041889, 219.019883143,
220.021875303, 221.021877983, 222.023742, 223.023962273, 224.025625738,
225.026130678, 226.027947753, 227.028805072, 228.031051376, 229.032096793,
230.034540754, 231.03588399, 232.038591592, 233.040247277, 234.043308058,
235.045443615, 236.048681284, 237.051145659, 238.05450271, 239.057260,
240.060980, 238.050788247, 217.024368791, 218.023535671, 219.02491916,
220.024723, 221.026399, 222.026086, 223.0277386, 224.027604778,
225.029390717, 226.029338702, 227.031156367, 228.031374006, 229.033505939,
230.033939784, 231.036293704, 232.037156152, 233.039635207, 234.040952088,
235.043929918, 236.045568006, 237.048730184, 238.050788247, 239.054293299,
240.056591988, 241.060330, 242.062931, 237.048173444, 225.033913933,
226.035145, 227.034956789, 228.036180, 229.036263808, 230.037827597,
231.038245085, 232.040108, 233.040740546, 234.042895038, 235.044063267,
236.0465696, 237.048173444, 238.050946405, 239.052939025, 240.056162182,
241.058252431, 242.06164118, 243.064279, 244.067850, 242.058742611,
228.038742328, 229.040150212, 230.039649886, 231.041101107, 232.041187097,
233.042997375, 234.043317076, 235.04528605, 236.046057964, 237.048409658,
238.049559894, 239.052163381, 240.053813545, 241.056851456, 242.058742611,
243.062003092, 244.064203907, 245.067747154, 246.070204627, 247.074070,
243.06138108, 231.045560, 232.046590, 233.046348, 234.047809, 235.047946,
236.049579, 237.049996, 238.051984324, 239.053024479, 240.055300179,
241.056829144, 242.059549159, 243.06138108, 244.064284847, 245.066452114,
246.069774619, 247.072093, 248.075752, 249.078480, 247.07035354,
233.050771232, 234.050159841, 235.051434, 236.051413, 237.052901,
238.053028697, 239.054957, 240.055529539, 241.057653001, 242.058835824,
243.061389114, 244.062752578, 245.065491249, 246.067223662, 247.07035354,
248.072348508, 249.075953413, 250.078356959, 251.082284605, 252.084870,
247.07030708, 235.056580, 236.057330, 237.057003, 238.058281, 239.058279,
240.059759, 241.060230, 242.061981, 243.063007572, 244.065180774,
245.066361616, 246.068672947, 247.07030708, 248.073086, 249.074986657,
250.07831652, 251.080760172, 252.084310, 253.086880, 254.090600,
251.079586788, 237.062070, 238.061410, 239.062422, 240.062302, 241.063726,
242.063701552, 243.065427, 244.066000689, 245.068048612, 246.068805309,
247.071000589, 248.072184861, 249.074853537, 250.076406066, 251.079586788,
252.081625846, 253.085133145, 254.087322909, 255.091046, 256.093440,
252.082978512, 240.068920, 241.068538, 242.069745, 243.069548, 244.070883,
245.071324, 246.072896, 247.073656, 248.075471, 249.076411, 250.078612,
251.079992142, 252.082978512, 253.084824697, 254.088022021, 255.090273122,
256.093598, 257.095979, 258.099520, 257.095104724, 242.073430, 243.074353,
244.074084, 245.075385, 246.075299023, 247.076847, 248.077194714,
249.079034, 250.079521264, 251.081575017, 252.082466855, 253.085185236,
254.08685422, 255.089962202, 256.091773117, 257.095104724, 258.097076,
259.100595, 260.102678, 258.098431319, 245.080829, 246.081886, 247.081635,
248.082823, 249.083013, 250.084420, 251.084839, 252.086560, 253.087280,
254.089656, 255.091082705, 256.094059025, 257.095541368, 258.098431319,
259.100509, 260.103652, 261.105721, 262.108865, 255.093241131, 248.086596,
249.087833, 250.087510, 251.089012, 252.088976521, 253.090678,
254.090955253, 255.093241131, 256.094282666, 257.09687719, 258.098207,
259.101031, 260.102643, 261.105749, 262.107301, 263.110552, 264.112345,
260.105504, 251.094360, 252.095371, 253.095210, 254.096454, 255.096681,
256.098629, 257.099555, 258.101814, 259.102901, 260.105504, 261.106883,
262.109634, 263.111293, 264.114038, 265.115839, 266.119305, 263.112547,
253.100689, 254.100184, 255.101340, 256.101166194, 257.102990,
258.103489, 259.105637, 260.106440, 261.108766556, 262.109925, 263.112547,
264.113985, 265.116704, 266.117956, 267.121529, 268.123644, 255.107398,
255.107398, 256.108127, 257.107722, 258.109231, 259.109610, 260.111300,
261.112056, 262.114084, 263.114988, 264.117404, 265.118601, 266.121029,
267.122377, 268.125445, 269.127460, 270.130712, 259.114500, 258.113168,
259.114500, 260.114422071, 261.116117, 262.116398, 263.118322, 264.118931,
265.121114693, 266.122065, 267.124425, 268.125606, 269.128755, 270.130329,
271.133472, 272.135158, 273.138220, 262.122892, 260.121970, 261.121664,
262.122892, 263.123035, 264.124604, 265.125147, 266.126942, 267.127650,
268.129755, 269.130694, 270.133616, 271.135179, 272.138032, 273.139618,
274.142440, 275.144250, 263.128558, 263.128558, 264.128394885, 265.130085,
266.130097, 267.131789, 268.132162, 269.134056, 270.134650, 271.137657,
272.139052, 273.141986, 274.143131, 275.145952, 276.147208, 277.149841,
265.136151, 265.136151, 266.137299, 267.137307, 268.138728, 269.139055,
270.140657, 271.141139, 272.143738, 273.144913, 274.147492, 275.148647,
276.151156, 277.152420, 278.154812, 279.156193, 281.162061, 267.144341,
268.143795, 269.145124, 270.144720, 271.146062, 272.146317, 273.148863,
274.149492, 275.152176, 276.153034, 277.155647, 278.156469, 279.158861,
280.159795, 281.162061, 272.153615, 272.153615, 273.153682, 274.155713,
275.156142, 276.158493, 277.159519, 278.161604, 279.162468, 280.164473,
281.165372, 282.167486, 283.168415, 283.171792, 277.163943, 278.164312,
279.166546, 280.167039, 281.169286, 282.169765, 283.171792, 284.172384,
285.174105, 283.176451, 283.176451, 284.178080, 285.178732, 286.180481,
287.181045, 285.183698, 285.183698, 286.183855, 287.185599, 288.185689,
289.187279, 287.191186, 287.191186, 288.192492, 289.192715, 290.194141,
291.194384, 292.199786, 289.198862, 290.198590, 291.200011, 292.199786,
291.206564, 291.206564, 292.207549, 293.214670, 293.214670]
el2mass = dict(zip(_temp_symbol, _temp_mass))
el2mass["GH"] = 0. # note that ghost atoms in Cfour have mass 100.
eliso2mass = dict(zip(_temp_iso_symbol, _temp_iso_mass)) # encompasses el2mass
eliso2mass["GH"] = 0. # note that ghost atoms in Cfour have mass 100. # encompasses el2mass
#eliso2mass["X0"] = 0. # probably needed, just checking
el2z = dict(zip(_temp_symbol, _temp_z))
el2z["GH"] = 0
z2mass = dict(zip(_temp_z, _temp_mass))
z2el = dict(zip(_temp_z, _temp_symbol))
z2element = dict(zip(_temp_z, _temp_element))
| kratman/psi4public | psi4/driver/qcdb/periodictable.py | Python | gpl-2.0 | 78,237 |
#
# Copyright (C) 2004 SIPfoundry Inc.
# Licensed by SIPfoundry under the GPL license.
#
# Copyright (C) 2004 SIP Forum
# Licensed to SIPfoundry under a Contributor Agreement.
#
#
# This file is part of SIP Forum User Agent Basic Test Suite which
# belongs to the SIP Forum Test Framework.
#
# SIP Forum User Agent Basic Test Suite is free software; you can
# redistribute it and/or modify it under the terms of the GNU General
# Public License as published by the Free Software Foundation; either
# version 2 of the License, or (at your option) any later version.
#
# SIP Forum User Agent Basic Test Suite is distributed in the hope that it
# will be useful, but WITHOUT ANY WARRANTY; without even the implied
# warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with SIP Forum User Agent Basic Test Suite; if not, write to the
# Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston,
# MA 02111-1307 USA
#
# $Id: case207.py,v 1.2 2004/05/02 18:57:35 lando Exp $
#
from TestCase import TestCase
import NetworkEventHandler as NEH
import Log
class case207 (TestCase):
def config(self):
self.name = "Case 207"
self.description = "Content length larger than message"
self.isClient = True
self.transport = "UDP"
def run(self):
self.neh = NEH.NetworkEventHandler(self.transport)
inv = self.createRequest("INVITE")
cl = inv.getParsedHeaderValue("Content-Length")
cl.length = 9999
inv.setHeaderValue("Content-Length", cl.create())
self.writeMessageToNetwork(self.neh, inv)
self.code = 0
while (self.code <= 200):
repl = self.readReplyFromNetwork(self.neh)
if (repl is not None) and (repl.code > self.code):
self.code = repl.code
elif repl is None:
self.code = 999
if repl is None:
self.addResult(TestCase.TC_FAILED, "missing reply on request")
self.neh.closeSock()
def onDefaultCode(self, message):
if message.code > self.code:
self.code = message.code
if message.code >= 200:
if message.getParsedHeaderValue("CSeq").method == "INVITE":
Log.logDebug("case207: sending ACK for >= 200 reply", 3)
ack = self.createRequest("ACK", trans=message.transaction)
self.writeMessageToNetwork(self.neh, ack)
if message.code == 400:
self.addResult(TestCase.TC_PASSED, "INVITE rejected with 400")
elif message.code == 200:
if message.transaction.canceled:
Log.logDebug("case207: received 200 for CANCEL", 3)
else:
Log.logDebug("case207: sending BYE for accepted INVITE", 3)
bye = self.createRequest("BYE", dia=message.transaction.dialog)
self.writeMessageToNetwork(self.neh, bye)
rep = self.readReplyFromNetwork(self.neh)
if rep is None:
self.addResult(TestCase.TC_ERROR, "missing response on BYE")
elif message.code != 487:
self.addResult(TestCase.TC_FAILED, "INVITE rejected, but not with 400")
else:
self.addResult(TestCase.TC_FAILED, "INVITE accepted, not rejected with 400")
can = self.createRequest("CANCEL", trans=message.transaction)
message.transaction.canceled = True
self.writeMessageToNetwork(self.neh, can)
canrepl = self.readReplyFromNetwork(self.neh)
if canrepl is None:
self.addResult(TestCase.TC_ERROR, "missing 200 on CANCEL")
| ezigman/sftf | UserAgentBasicTestSuite/case207.py | Python | gpl-2.0 | 3,360 |
# Rekall Memory Forensics
# Copyright (C) 2007-2013 Volatility Foundation
# Copyright 2013 Google Inc. All Rights Reserved.
#
# This file is part of Rekall Memory Forensics.
#
# Rekall Memory Forensics is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License Version 2 as
# published by the Free Software Foundation. You may not use, modify or
# distribute this program under any other version of the GNU General Public
# License.
#
# Rekall Memory Forensics is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along with
# Rekall Memory Forensics. If not, see <http://www.gnu.org/licenses/>.
#
"""
@author: Andrew Case
@license: GNU General Public License 2.0
@contact: atcuno@gmail.com
@organization:
"""
from rekall.plugins.linux import common
from rekall.plugins.tools import dynamic_profiles
class CheckSyscall(common.LinuxPlugin):
"""Checks if the system call table has been altered."""
__name = "check_syscall"
table_header = [
dict(name="divider", type="Divider"),
dict(name="table", hidden=True),
dict(name="index", style="address"),
dict(name="address", style="address"),
dict(name="symbol", width=80)
]
def Find_sys_call_tables(self):
"""Calculates the size of the syscall table.
Here we need the symbol __NR_syscall_max. We derive it from
disassembling the following system calls:
- system_call_fastpath function:
http://lxr.linux.no/linux+v3.12/arch/x86/kernel/entry_64.S#L620
system_call_fastpath:
#if __SYSCALL_MASK == ~0
cmpq $__NR_syscall_max,%rax
#else
andl $__SYSCALL_MASK,%eax
cmpl $__NR_syscall_max,%eax
#endif
- ret_from_sys_call function (with a small rewind):
http://lxr.linux.no/linux+v2.6.26/arch/x86/kernel/entry_64.S#L249
249 cmpq $__NR_syscall_max,%rax
250 ja badsys
251 movq %r10,%rcx
252 call *sys_call_table(,%rax,8) # XXX: rip relative
253 movq %rax,RAX-ARGOFFSET(%rsp)
254 /*
255 * Syscall return path ending with SYSRET (fast path)
256 * Has incomplete stack frame and undefined top of stack.
257 */
258 ret_from_sys_call:
259 movl $_TIF_ALLWORK_MASK,%edi
260 /* edi: flagmask */
- sysenter_do_call
Linux> dis "linux!sysenter_do_call"
Address Rel Op Codes Instruction Comment
------- ---------- -------------------- ------------------ -------
------ linux!sysenter_do_call ------: 0xc12c834d
0xc12c834d 0x0 3d5d010000 CMP EAX, 0x15d
0xc12c8352 0x5 0f8397baffff JAE 0xc12c3def linux!syscall_badsys
"""
rules = [
# Look for a comparison of the register (EAX) with a fixed value.
{'mnemonic': 'CMP', 'operands': [
{'type': 'REG'}, {'type': 'IMM', 'target': "$value"}]},
# Immediately followed by a branch to linux!badsys,
# linux!ia32_badsys etc.
{'comment': '~.+badsys'}
]
func = None
tables = set()
for func_name, table_name in [
# http://lxr.free-electrons.com/source/arch/x86_64/kernel/entry.S?v=2.4.37
("system_call", "sys_call_table"),
# http://lxr.free-electrons.com/source/arch/x86/kernel/entry_64.S?v=3.16
("system_call_fastpath", "sys_call_table"),
# http://lxr.free-electrons.com/source/arch/x86/ia32/ia32entry.S?v=3.14
("ia32_sysenter_target", "ia32_sys_call_table"),
("sysenter_auditsys", "ia32_sys_call_table"),
# http://lxr.free-electrons.com/source/arch/x86/kernel/entry_32.S?v=3.3
("sysenter_do_call", "sys_call_table")]:
if table_name in tables:
continue
# This table does not exist in this profile dont bother looking for
# its size.
if self.profile.get_constant(table_name) == None:
continue
func = self.profile.get_constant_object(
func_name, target="Function")
if func == None:
continue
matcher = dynamic_profiles.DisassembleMatcher(
name="sys_call_table_size",
mode=func.mode, rules=rules, session=self.session)
result = matcher.MatchFunction(func)
if result:
tables.add(table_name)
yield table_name, result["$value"] + 1
# Fallback. Note this underestimates the size quite a bit.
if func == None:
table_size = len([x for x in self.profile.constants
if x.startswith("__syscall_meta__")]) or 0x300
yield "ia32_sys_call_table", table_size
yield "sys_call_table", table_size
def collect(self):
"""
This works by walking the system call table
and verifies that each is a symbol in the kernel
"""
for table_name, table_size in self.Find_sys_call_tables():
# The syscall table is simply an array of pointers to functions.
table = self.profile.get_constant_object(
table_name,
target="Array",
target_args=dict(
count=table_size,
target="Pointer",
target_args=dict(
target="Function"
)
)
)
yield dict(divider="Table %s" % table_name)
resolver = self.session.address_resolver
for i, entry in enumerate(table):
sym_name = resolver.format_address(entry.deref())[:2]
yield dict(
table=table_name, index=i,
address=entry,
symbol=sym_name or "Unknown",
highlight=None if sym_name else "important")
| dsweet04/rekall | rekall-core/rekall/plugins/linux/check_syscall.py | Python | gpl-2.0 | 6,461 |
#!/usr/bin/env python
#
# Copyright (C) 2010 Apple Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY APPLE INC. AND ITS CONTRIBUTORS ``AS IS'' AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR ITS CONTRIBUTORS BE LIABLE FOR
# ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from __future__ import with_statement
import sys
import webkit.messages
def main(argv=None):
if not argv:
argv = sys.argv
input_path = argv[1]
with open(input_path) as input_file:
# Python 3, change to: print(webkit.messages.generate_messages_header(input_file), end='')
sys.stdout.write(webkit.messages.generate_messages_header(input_file))
return 0
if __name__ == '__main__':
sys.exit(main(sys.argv))
| qtproject/qtwebkit | Source/WebKit2/Scripts/generate-messages-header.py | Python | gpl-2.0 | 1,806 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('public', '0017_auto_20141218_1813'),
]
operations = [
migrations.RenameField(
model_name='commission',
old_name='estimated_price',
new_name='estimated_display_price',
),
migrations.AlterField(
model_name='commission',
name='customer',
field=models.ForeignKey(related_name='commissions', to='public.Customer'),
preserve_default=True,
),
]
| tomcounsell/Cobra | apps/public/migrations/0018_auto_20141219_1711.py | Python | gpl-2.0 | 646 |
"""
WSGI config for ldstext project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.7/howto/deployment/wsgi/
"""
import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "ldstext.settings")
from django.core.wsgi import get_wsgi_application
application = get_wsgi_application()
| zowper/ldstext | ldstext/wsgi.py | Python | gpl-2.0 | 389 |
# ===========================================================================
# eXe config
# Copyright 2004-2006, University of Auckland
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
# ===========================================================================
"""
Config settings loaded from exe.conf
Is responsible for the system-wide settings we use
O/S specific config classes are derieved from here
"""
from exe.engine.configparser import ConfigParser
from exe.engine.path import Path
from exe.engine.locales import chooseDefaultLocale
from exe.engine import version
import logging
from logging.handlers import RotatingFileHandler
import sys
import os
import gettext
import tempfile
import twisted
import shutil
from exe import globals as G
from exe.engine.stylestore import StyleStore
from exe.webui import common
x_ = lambda s: s
class Config(object):
"""
The Config class contains the configuration information for eXe.
"""
# To build link to git revision
baseGitWebURL = 'https://forja.cenatic.es/plugins/scmgit/cgi-bin/gitweb.cgi?p=iteexe/iteexe.git'
# Class attributes
optionNames = {
'system': ('webDir', 'jsDir', 'port', 'dataDir',
'configDir', 'localeDir', 'browser', 'mediaProfilePath',
'videoMediaConverter_ogv', 'videoMediaConverter_3gp',
'videoMediaConverter_mpg',
'videoMediaConverter_avi', 'audioMediaConverter_ogg',
'audioMediaConverter_au', 'audioMediaConverter_mp3',
'audioMediaConverter_wav', 'ffmpegPath'),
'user': ('locale', 'lastDir', 'showPreferencesOnStart','defaultStyle', 'showIdevicesGrouped','docType','editorMode'),
}
idevicesCategories = {
'activity': [x_('Non-Interactive Activities')],
'reading activity': [x_('Non-Interactive Activities')],
'dropdown activity': [x_('Interactive Activities')],
'java applet': [x_('Non-Textual Information')],
'wiki article': [x_('Non-Textual Information')],
'case study': [x_('Non-Interactive Activities')],
'preknowledge': [x_('Textual Information')],
'scorm quiz': [x_('Interactive Activities')],
'fpd - multi choice activity': [x_('FPD')],
'fpd - cloze activity': [x_('FPD')],
'fpd - cloze activity (modified)': [x_('FPD')],
'fpd - multi select activity': [x_('FPD')],
'fpd - true/false activity': [x_('FPD')],
'fpd - situation': [x_('FPD')],
'fpd - quotation': [x_('FPD')],
'fpd - you should know': [x_('FPD')],
'fpd - highlighted': [x_('FPD')],
'fpd - translation': [x_('FPD')],
'fpd - guidelines students': [x_('FPD')],
'fpd - guidelines teacher': [x_('FPD')],
'fpd - a step ahead': [x_('FPD')],
'fpd - a piece of advice': [x_('FPD')],
'fpd - think about it (with feedback)': [x_('FPD')],
'fpd - think about it (without feedback)': [x_('FPD')],
'fpd - free text': [x_('FPD')],
'image gallery': [x_('Non-Textual Information')],
'image magnifier': [x_('Non-Textual Information')],
'note': [x_('Textual Information')],
'objectives': [x_('Textual Information')],
'multi-choice': [x_('Interactive Activities')],
'multi-select': [x_('Interactive Activities')],
'true-false question': [x_('Interactive Activities')],
'reflection': [x_('Non-Interactive Activities')],
'cloze activity': [x_('Interactive Activities')],
'rss': [x_('Non-Textual Information')],
'external web site': [x_('Non-Textual Information')],
'free text': [x_('Textual Information')],
'click in order game': [x_('Experimental')],
'hangman game': [x_('Experimental')],
'place the objects': [x_('Interactive Activities')],
'memory match game': [x_('Experimental')],
'file attachments': [x_('Non-Textual Information')],
'sort items': [x_('Experimental')],
'sort items': [x_('Interactive Activities')],
'scorm test cloze': [x_('Interactive Activities')],
'scorm test cloze (multiple options)': [x_('Interactive Activities')],
'scorm test dropdown': [x_('Interactive Activities')],
'scorm test multiple choice': [x_('Interactive Activities')]
}
@classmethod
def getConfigPath(cls):
obj = cls.__new__(cls)
obj.configParser = ConfigParser()
obj._overrideDefaultVals()
obj.__setConfigPath()
return obj.configPath
def __init__(self):
"""
Initialise
"""
self.configPath = None
self.configParser = ConfigParser(self.onWrite)
# Set default values
# exePath is the whole path and filename of the exe executable
self.exePath = Path(sys.argv[0]).abspath()
# webDir is the parent directory for styles,scripts and templates
self.webDir = self.exePath.dirname()
self.jsDir = self.exePath.dirname()
# localeDir is the base directory where all the locales are stored
self.localeDir = self.exePath.dirname()/"locale"
# port is the port the exe webserver will listen on
# (previous default, which earlier users might still use, was 8081)
self.port = 51235
# dataDir is the default directory that is shown to the user
# to save packages and exports in
self.dataDir = Path(".")
# configDir is the dir for storing user profiles
# and user made idevices and the config file
self.configDir = Path(".")
#FM: New Styles Directory path
self.stylesDir =Path(self.configDir/'style').abspath()
#FM: Default Style name
self.defaultStyle= u"KIC-IE"
# browser is the name of a predefined browser specified at http://docs.python.org/library/webbrowser.html.
# None for system default
self.browser = None
# docType is the HTML export format
self.docType = 'XHTML'
# locale is the language of the user
self.locale = chooseDefaultLocale(self.localeDir)
# internalAnchors indicate which exe_tmp_anchor tags to generate for each tinyMCE field
# available values = "enable_all", "disable_autotop", or "disable_all"
self.internalAnchors = "enable_all"
self.lastDir = None
self.showPreferencesOnStart = "1"
self.showIdevicesGrouped = "1"
# tinymce option
self.editorMode = 'permissive'
# styleSecureMode : if this [user] key is = 0 , exelearning can run python files in styles
# as websitepage.py , ... ( deactivate secure mode )
self.styleSecureMode="1"
# styles is the list of style names available for loading
self.styles = []
# The documents that we've recently looked at
self.recentProjects = []
# canonical (English) names of iDevices not to show in the iDevice pane
self.hiddeniDevices = []
#Media conversion programs used for XML export system
self.videoMediaConverter_ogv = ""
self.videoMediaConverter_3gp = ""
self.videoMediaConverter_avi = ""
self.videoMediaConverter_mpg = ""
self.audioMediaConverter_ogg = ""
self.audioMediaConverter_au = ""
self.audioMediaConverter_mp3 = ""
self.audioMediaConverter_wav = ""
self.ffmpegPath = ""
self.mediaProfilePath = self.exePath.dirname()/'mediaprofiles'
# likewise, a canonical (English) names of iDevices not to show in the
# iDevice pane but, contrary to the hiddens, these are ones that the
# configuration can specify to turn ON:
self.deprecatediDevices = [ "flash with text", "flash movie", "mp3", \
"attachment"]
# by default, only allow embedding of media types for which a
# browser plugin is found:
self.assumeMediaPlugins = False;
# Let our children override our defaults depending
# on the OS that we're running on
self._overrideDefaultVals()
# Try to make the defaults a little intelligent
# Under devel trees, webui is the default webdir
self.webDir = Path(self.webDir)
if not (self.webDir/'scripts').isdir() \
and (self.webDir/'webui').isdir():
self.webDir /= 'webui'
self.jsDir = Path(self.jsDir)
if not (self.jsDir/'scripts').isdir() \
and (self.jsDir/'jsui').isdir():
self.jsDir /= 'jsui'
# Find where the config file will be saved
self.__setConfigPath()
# Fill in any undefined config options with our defaults
self._writeDefaultConfigFile()
# Now we are ready to serve the application
self.loadSettings()
self.setupLogging()
self.loadLocales()
self.loadStyles()
def _overrideDefaultVals(self):
"""
Override this to override the
default config values
"""
def _getConfigPathOptions(self):
"""
Override this to give a list of
possible config filenames
in order of preference
"""
return ['exe.conf']
def _writeDefaultConfigFile(self):
"""
[Over]writes 'self.configPath' with a default config file
(auto write is on so we don't need to write the file at the end)
"""
if not G.application.portable:
for sectionName, optionNames in self.optionNames.items():
for optionName in optionNames:
defaultVal = getattr(self, optionName)
self.configParser.setdefault(sectionName,
optionName,
defaultVal)
# Logging can't really be changed from inside the program at the moment...
self.configParser.setdefault('logging', 'root', 'INFO')
def __setConfigPath(self):
"""
sets self.configPath to the filename of the config file that we'll
use.
In descendant classes set self.configFileOptions to a list
of directories where the configDir should be in order of preference.
If no config files can be found in these dirs, it will
force creation of the config file in the top dir
"""
# If there's an EXECONF environment variable, use it
self.configPath = None
configFileOptions = map(Path, self._getConfigPathOptions())
if "EXECONF" in os.environ:
envconf = Path(os.environ["EXECONF"])
if envconf.isfile():
self.configPath = os.environ["EXECONF"]
# Otherwise find the most appropriate existing file
if self.configPath is None:
for confPath in configFileOptions:
if confPath.isfile():
self.configPath = confPath
break
else:
# If no config files exist, create and use the
# first one on the list
self.configPath = configFileOptions[0]
folder = self.configPath.abspath().dirname()
if not folder.exists():
folder.makedirs()
self.configPath.touch()
# Now make our configParser
self.configParser.read(self.configPath)
self.configParser.autoWrite = True
def upgradeFile(self):
"""
Called before loading the config file,
removes or upgrades any old settings.
"""
if self.configParser.has_section('system'):
system = self.configParser.system
if system.has_option('appDataDir'):
# Older config files had configDir stored as appDataDir
self.configDir = Path(system.appDataDir)
self.stylesDir =Path(self.configDir)/'style'
# We'll just upgrade their config file for them for now...
system.configDir = self.configDir
system.stylesDir =Path(self.configDir)/'style'
del system.appDataDir
self.audioMediaConverter_au = system.audioMediaConverter_au
self.audioMediaConverter_wav = system.audioMediaConverter_wav
self.videoMediaConverter_ogv = system.videoMediaConverter_ogv
self.videoMediaConverter_3gp = system.videoMediaConverter_3gp
self.videoMediaConverter_avi = system.videoMediaConverter_avi
self.videoMediaConverter_mpg = system.videoMediaConverter_mpg
self.audioMediaConverter_ogg = system.audioMediaConverter_ogg
self.audioMediaConverter_mp3 = system.audioMediaConverter_mp3
self.ffmpegPath = system.ffmpegPath
self.mediaProfilePath = system.mediaProfilePath
if system.has_option('greDir'):
# No longer used, system should automatically support
del system.greDir
def loadSettings(self):
"""
Loads the settings from the exe.conf file.
Overrides the defaults set in __init__
"""
# Set up the parser so that if a certain value is not in the config
# file, it will use the value from our default values
def defVal(dummy, option):
"""If something is not in the config file, just use the default in
'self'"""
return getattr(self, option)
self.configParser.defaultValue = defVal
self.upgradeFile()
# System Section
if self.configParser.has_section('system'):
system = self.configParser.system
self.port = int(system.port)
self.browser = None if system.browser == u"None" else system.browser
if not G.application.portable:
self.dataDir = Path(system.dataDir)
self.configDir = Path(system.configDir)
self.webDir = Path(system.webDir)
self.stylesDir = Path(self.configDir)/'style'
self.jsDir = Path(system.jsDir)
else:
self.stylesDir = Path(self.webDir/'style').abspath()
self.assumeMediaPlugins = False;
if self.configParser.has_option('system', \
'assumeMediaPlugins'):
value = system.assumeMediaPlugins.strip().lower()
if value == "1" or value == "yes" or value == "true" or \
value == "on":
self.assumeMediaPlugins = True;
# If the dataDir points to some other dir, fix it
if not self.dataDir.isdir():
self.dataDir = tempfile.gettempdir()
# make the webDir absolute, to hide path joins of relative paths
self.webDir = self.webDir.expand().abspath()
# If the configDir doesn't exist (as it may be a default setting with a
# new installation) create it
if not self.configDir.exists():
self.configDir.mkdir()
if not G.application.standalone:
#FM: Copy styles
if not os.path.exists(self.stylesDir) or not os.listdir(self.stylesDir):
self.copyStyles()
else:
self.updateStyles()
else:
if G.application.portable:
if os.name == 'posix':
self.stylesDir = Path(self.webDir/'..'/'..'/'..'/'style')
else:
self.stylesDir = Path(self.webDir/'..'/'style')
if not os.path.exists(self.stylesDir) or not os.listdir(self.stylesDir):
self.copyStyles()
else:
self.stylesDir = Path(self.webDir/'style').abspath()
# Get the list of recently opened projects
self.recentProjects = []
if self.configParser.has_section('recent_projects'):
recentProjectsSection = self.configParser.recent_projects
# recentProjectsSection.items() is in the wrong order, keys are alright.
# Sorting list by key before adding to self.recentProjects, to avoid wrong ordering
# in Recent Projects menu list
recentProjectsItems = recentProjectsSection.items();
recentProjectsItems.sort()
for key, path in recentProjectsItems:
self.recentProjects.append(path)
# Load the list of "hidden" iDevices
self.hiddeniDevices = []
if self.configParser.has_section('idevices'):
idevicesSection = self.configParser.idevices
for key,value in idevicesSection.items():
# emulate standard library's getboolean()
value = value.strip().lower()
if value == "0" or value == "no" or value == "false" or \
value == "off":
self.hiddeniDevices.append(key.lower())
#self.deprecatediDevices = [ "flash with text", "flash movie", ...]
# and UN-Load from the list of "deprecated" iDevices
if self.configParser.has_section('deprecated'):
deprecatedSection = self.configParser.deprecated
for key,value in deprecatedSection.items():
# emulate standard library's getboolean()
value = value.strip().lower()
if value == "1" or value == "yes" or value == "true" or \
value == "on":
if key.lower() in self.deprecatediDevices:
self.deprecatediDevices.remove(key.lower())
# Load the "user" section
if self.configParser.has_section('user'):
if self.configParser.user.has_option('editorMode'):
self.editorMode = self.configParser.user.editorMode
if self.configParser.user.has_option('docType'):
self.docType = self.configParser.user.docType
common.setExportDocType(self.configParser.user.docType)
if self.configParser.user.has_option('defaultStyle'):
self.defaultStyle= self.configParser.user.defaultStyle
if self.configParser.user.has_option('styleSecureMode'):
self.styleSecureMode= self.configParser.user.styleSecureMode
if self.configParser.user.has_option('internalAnchors'):
self.internalAnchors = self.configParser.user.internalAnchors
if self.configParser.user.has_option('lastDir'):
self.lastDir = self.configParser.user.lastDir
if self.configParser.user.has_option('showPreferencesOnStart'):
self.showPreferencesOnStart = self.configParser.user.showPreferencesOnStart
if self.configParser.user.has_option('showIdevicesGrouped'):
self.showIdevicesGrouped = self.configParser.user.showIdevicesGrouped
if self.configParser.user.has_option('locale'):
self.locale = self.configParser.user.locale
return
self.locale = chooseDefaultLocale(self.localeDir)
def onWrite(self, configParser):
"""
Called just before the config file is written.
We use it to fill out any settings that are stored here and
not in the config parser itself
"""
# Recent projects
self.configParser.delete('recent_projects')
recentProjectsSection = self.configParser.addSection('recent_projects')
for num, path in enumerate(self.recentProjects):
recentProjectsSection[str(num)] = path
def setupLogging(self):
"""
setup logging file
"""
try:
hdlr = RotatingFileHandler(self.configDir/'exe.log', 'a',
500000, 10)
hdlr.doRollover()
except OSError:
# ignore the error we get if the log file is logged
hdlr = logging.FileHandler(self.configDir/'exe.log')
format = "%(asctime)s %(name)s %(levelname)s %(message)s"
log = logging.getLogger()
hdlr.setFormatter(logging.Formatter(format))
log.addHandler(hdlr)
loggingLevels = {"DEBUG" : logging.DEBUG,
"INFO" : logging.INFO,
"WARNING" : logging.WARNING,
"ERROR" : logging.ERROR,
"CRITICAL" : logging.CRITICAL }
if self.configParser.has_section('logging'):
for logger, level in self.configParser._sections["logging"].items():
if logger == "root":
logging.getLogger().setLevel(loggingLevels[level])
else:
logging.getLogger(logger).setLevel(loggingLevels[level])
if not G.application.portable:
log.info("************** eXe logging started **************")
log.info("version = %s" % version.version)
log.info("configPath = %s" % self.configPath)
log.info("exePath = %s" % self.exePath)
log.info("libPath = %s" % Path(twisted.__path__[0]).splitpath()[0])
log.info("browser = %s" % self.browser)
log.info("webDir = %s" % self.webDir)
log.info("jsDir = %s" % self.jsDir)
log.info("localeDir = %s" % self.localeDir)
log.info("port = %d" % self.port)
log.info("dataDir = %s" % self.dataDir)
log.info("configDir = %s" % self.configDir)
log.info("locale = %s" % self.locale)
log.info("internalAnchors = %s" % self.internalAnchors)
def loadStyles(self):
"""
Scans the eXe style directory and builds a list of styles
"""
self.styleStore = StyleStore(self)
listStyles = self.styleStore.getStyles()
for style in listStyles:
self.styles.append(style)
#print style
def copyStyles(self):
bkstyle=self.webDir/'style'
dststyle=self.stylesDir
if os.path.exists(bkstyle):
if os.path.exists(dststyle) and not os.listdir(self.stylesDir): shutil.rmtree(dststyle)
shutil.copytree(bkstyle,dststyle )
def updateStyles(self):
bkstyle=self.webDir/'style'
dststyle=self.stylesDir
if os.stat(bkstyle).st_mtime - os.stat(dststyle).st_mtime > 1:
for name in os.listdir(bkstyle):
bksdirstyle=os.path.join(bkstyle, name)
dstdirstyle=os.path.join(dststyle, name)
if os.path.isdir(bksdirstyle):
if os.path.exists(dstdirstyle):shutil.rmtree(dstdirstyle)
shutil.copytree(bksdirstyle, dstdirstyle)
else:
shutil.copy(bksdirstyle, dstdirstyle)
def loadLocales(self):
"""
Scans the eXe locale directory and builds a list of locales
"""
log = logging.getLogger()
log.debug("loadLocales")
gettext.install('exe', self.localeDir, True)
self.locales = {}
for subDir in self.localeDir.dirs():
if (subDir/'LC_MESSAGES'/'exe.mo').exists():
self.locales[subDir.basename()] = \
gettext.translation('exe',
self.localeDir,
languages=[str(subDir.basename())])
if subDir.basename() == self.locale:
locale = subDir.basename()
log.debug(" loading locale %s" % locale)
self.locales[locale].install(unicode=True)
__builtins__['c_'] = lambda s: self.locales[locale].ugettext(s) if s else s
# ===========================================================================
| kohnle-lernmodule/KITexe201based | exe/engine/config.py | Python | gpl-2.0 | 25,533 |
# -*- coding: utf-8 -*-
#
# This file is part of CERN Analysis Preservation Framework.
# Copyright (C) 2017 CERN.
#
# CERN Analysis Preservation Framework is free software; you can redistribute
# it and/or modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# CERN Analysis Preservation Framework is distributed in the hope that it will
# be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with CERN Analysis Preservation Framework; if not, write to the
# Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston,
# MA 02111-1307, USA.
#
# In applying this license, CERN does not
# waive the privileges and immunities granted to it by virtue of its status
# as an Intergovernmental Organization or submit itself to any jurisdiction.
# or submit itself to any jurisdiction.
"""Integration tests for deleting deposits."""
import json
# #######################################
# # api/deposits/{pid} [DELETE]
# #######################################
def test_delete_deposit_with_non_existing_pid_returns_404(app,
auth_headers_for_superuser):
with app.test_client() as client:
resp = client.delete('/deposits/{}'.format('non-existing-pid'),
headers=auth_headers_for_superuser)
assert resp.status_code == 404
def test_delete_deposit_when_user_has_no_permission_returns_403(app,
users,
create_deposit,
auth_headers_for_user):
deposit = create_deposit(users['lhcb_user'], 'lhcb-v0.0.1')
other_user_headers = auth_headers_for_user(users['lhcb_user2'])
with app.test_client() as client:
resp = client.delete('/deposits/{}'.format(deposit['_deposit']['id']),
headers=other_user_headers)
assert resp.status_code == 403
def test_delete_deposit_when_user_is_owner_can_delete_his_deposit(app,
users,
create_deposit,
json_headers,
auth_headers_for_user):
owner = users['lhcb_user']
deposit = create_deposit(owner, 'lhcb-v0.0.1')
headers = auth_headers_for_user(owner) + json_headers
with app.test_client() as client:
resp = client.delete('/deposits/{}'.format(deposit['_deposit']['id']),
headers=headers)
assert resp.status_code == 204
# deposit not existing anymore
resp = client.get('/deposits/{}'.format(deposit['_deposit']['id']),
headers=headers)
assert resp.status_code == 410
def test_delete_deposit_when_deposit_published_already_cant_be_deleted(app,
users,
create_deposit,
json_headers,
auth_headers_for_user):
deposit = create_deposit(users['lhcb_user'], 'lhcb-v0.0.1')
headers = auth_headers_for_user(users['lhcb_user']) + json_headers
pid = deposit['_deposit']['id']
with app.test_client() as client:
resp = client.post('/deposits/{}/actions/publish'.format(pid),
headers=headers)
resp = client.delete('/deposits/{}'.format(pid),
headers=headers)
assert resp.status_code == 403
# deposit not removed
resp = client.get('/deposits/{}'.format(pid),
headers=headers)
assert resp.status_code == 200
def test_delete_deposit_when_superuser_can_delete_others_deposit(app,
users,
create_deposit,
auth_headers_for_superuser):
deposit = create_deposit(users['lhcb_user'], 'lhcb-v0.0.1')
with app.test_client() as client:
resp = client.delete('/deposits/{}'.format(deposit['_deposit']['id']),
headers=auth_headers_for_superuser)
assert resp.status_code == 204
def test_delete_deposit_when_user_with_admin_access_can_delete(app,
users,
create_deposit,
auth_headers_for_user,
json_headers):
owner, other_user = users['lhcb_user'], users['cms_user']
deposit = create_deposit(owner, 'lhcb-v0.0.1')
permissions = [{
'email': other_user.email,
'type': 'user',
'op': 'add',
'action': 'deposit-admin'
}]
with app.test_client() as client:
# give other user read/write access
resp = client.post('/deposits/{}/actions/permissions'.format(deposit['_deposit']['id']),
headers=auth_headers_for_user(owner) + json_headers,
data=json.dumps(permissions))
resp = client.delete('/deposits/{}'.format(deposit['_deposit']['id']),
headers=auth_headers_for_user(other_user))
assert resp.status_code == 204
def test_delete_deposit_when_user_only_with_read_write_access_returns_403(app,
users,
create_deposit,
auth_headers_for_user,
json_headers):
owner, other_user = users['lhcb_user'], users['cms_user']
deposit = create_deposit(owner, 'lhcb-v0.0.1')
permissions = [{
'email': other_user.email,
'type': 'user',
'op': 'add',
'action': 'deposit-read'
},{
'email': other_user.email,
'type': 'user',
'op': 'add',
'action': 'deposit-update'
}]
with app.test_client() as client:
# give other user read/write access
resp = client.post('/deposits/{}/actions/permissions'.format(deposit['_deposit']['id']),
headers=auth_headers_for_user(owner) + json_headers,
data=json.dumps(permissions))
resp = client.delete('/deposits/{}'.format(deposit['_deposit']['id']),
headers=auth_headers_for_user(other_user))
assert resp.status_code == 403
| tiborsimko/analysis-preservation.cern.ch | tests/integration/test_delete_deposit.py | Python | gpl-2.0 | 7,407 |
# The plot server must be running
# Go to http://localhost:5006/bokeh to view this plot
from numpy.random import random
from bokeh.plotting import *
def mscatter(p, x, y, typestr):
p.scatter(x, y, marker=typestr,
line_color="#6666ee", fill_color="#ee6666", fill_alpha=0.5, size=12)
def mtext(p, x, y, textstr):
p.text(x, y, text=textstr,
text_color="#449944", text_align="center", text_font_size="10pt")
output_server("markers")
p = figure(title="markers.py example")
N = 10
mscatter(p, random(N)+2, random(N)+1, "circle")
mscatter(p, random(N)+4, random(N)+1, "square")
mscatter(p, random(N)+6, random(N)+1, "triangle")
mscatter(p, random(N)+8, random(N)+1, "asterisk")
mscatter(p, random(N)+2, random(N)+4, "circle_x")
mscatter(p, random(N)+4, random(N)+4, "square_x")
mscatter(p, random(N)+6, random(N)+4, "inverted_triangle")
mscatter(p, random(N)+8, random(N)+4, "x")
mscatter(p, random(N)+2, random(N)+7, "circle_cross")
mscatter(p, random(N)+4, random(N)+7, "square_cross")
mscatter(p, random(N)+6, random(N)+7, "diamond")
mscatter(p, random(N)+8, random(N)+7, "cross")
mtext(p, [2.5], [0.5], "circle / o")
mtext(p, [4.5], [0.5], "square")
mtext(p, [6.5], [0.5], "triangle")
mtext(p, [8.5], [0.5], "asterisk / *")
mtext(p, [2.5], [3.5], "circle_x / ox")
mtext(p, [4.5], [3.5], "square_x")
mtext(p, [6.5], [3.5], "inverted_triangle")
mtext(p, [8.5], [3.5], "x")
mtext(p, [2.5], [6.5], "circle_cross / o+")
mtext(p, [4.5], [6.5], "square_cross")
mtext(p, [6.5], [6.5], "diamond")
mtext(p, [8.5], [6.5], "cross / +")
show(p) # open a browser
| zrhans/python | exemplos/Examples.lnk/bokeh/plotting/server/markers.py | Python | gpl-2.0 | 1,590 |
#########################################################################
## This program is part of 'MOOSE', the
## Messaging Object Oriented Simulation Environment.
## Copyright (C) 2013 Upinder S. Bhalla. and NCBS
## It is made available under the terms of the
## GNU Lesser General Public License version 2.1
## See the file COPYING.LIB for the full notice.
#########################################################################
import math
import pylab
import numpy
import moose
runtime = 50.0
def makeModel():
# create container for model
model = moose.Neutral( 'model' )
harmonic = moose.CubeMesh( '/model/harmonic' )
harmonic.volume = 1e-15
lotka = moose.CubeMesh( '/model/lotka' )
lotka.volume = 1e-15
# create molecules and reactions
x = moose.Pool( '/model/lotka/x' )
y = moose.Pool( '/model/lotka/y' )
z = moose.BufPool( '/model/lotka/z' ) # Dummy molecule.
xreac = moose.Reac( '/model/lotka/xreac' )
yreac = moose.Reac( '/model/lotka/yreac' )
xrate = moose.Function( '/model/lotka/xreac/func' )
yrate = moose.Function( '/model/lotka/yreac/func' )
# Parameters
alpha = 1.0
beta = 1.0
gamma = 1.0
delta = 1.0
k = 1.0
x.nInit = 2.0
y.nInit = 1.0
z.nInit = 0.0
xrate.x.num = 1
yrate.x.num = 1
xrate.expr = "x0 * " + str( beta ) + " - " + str( alpha )
yrate.expr = str( gamma ) + " - x0 * " + str( delta )
xreac.Kf = k
yreac.Kf = k
xreac.Kb = 0
yreac.Kb = 0
# connect them up for reactions
moose.connect( y, 'nOut', xrate.x[0], 'input' )
moose.connect( x, 'nOut', yrate.x[0], 'input' )
moose.connect( xrate, 'valueOut', xreac, 'setNumKf' )
moose.connect( yrate, 'valueOut', yreac, 'setNumKf' )
moose.connect( xreac, 'sub', x, 'reac' )
moose.connect( xreac, 'prd', z, 'reac' )
moose.connect( yreac, 'sub', y, 'reac' )
moose.connect( yreac, 'prd', z, 'reac' )
# Create the output tables
graphs = moose.Neutral( '/model/graphs' )
xplot = moose.Table2 ( '/model/graphs/x' )
yplot = moose.Table2 ( '/model/graphs/y' )
# connect up the tables
moose.connect( xplot, 'requestOut', x, 'getN' );
moose.connect( yplot, 'requestOut', y, 'getN' );
def main():
"""
The funcReacLotkaVolterra example shows how to use function objects
as part of differential equation systems in the framework of the MOOSE
kinetic solvers. Here the system is set up explicitly using the
scripting, in normal use one would expect to use SBML.
In this example we set up a Lotka-Volterra system. The equations
are readily expressed as a pair of reactions each of whose rate is
governed by a function::
x' = x( alpha - beta.y )
y' = -y( gamma - delta.x )
This translates into two reactions::
x ---> z Kf = beta.y - alpha
y ---> z Kf = gamma - delta.x
Here z is a dummy molecule whose concentration is buffered to zero.
The model first runs using default Exponential Euler integration.
This is not particularly accurate even with a small timestep.
The model is then converted to use the deterministic Kinetic solver
Ksolve. This is accurate and faster.
Note that we cannot use the stochastic GSSA solver for this system, it
cannot handle a reaction term whose rate keeps changing.
"""
makeModel()
for i in range( 11, 18 ):
moose.setClock( i, 0.001 )
moose.setClock( 18, 0.1 )
moose.reinit()
moose.start( runtime ) # Run the model
# Iterate through all plots, dump their contents to data.plot.
for x in moose.wildcardFind( '/model/graphs/#' ):
#x.xplot( 'scriptKineticModel.plot', x.name )
t = numpy.arange( 0, x.vector.size, 1 ) * x.dt # sec
pylab.plot( t, x.vector, label=x.name )
pylab.ylim( 0, 2.5 )
pylab.title( "Exponential Euler solution. Note slight error buildup" )
pylab.legend()
pylab.figure()
compt = moose.element( '/model/lotka' )
ksolve = moose.Ksolve( '/model/lotka/ksolve' )
stoich = moose.Stoich( '/model/lotka/stoich' )
stoich.compartment = compt
stoich.ksolve = ksolve
stoich.path = '/model/lotka/##'
moose.reinit()
moose.start( runtime ) # Run the model
for i in range( 11, 18 ):
moose.setClock( i, 0.1 )
for x in moose.wildcardFind( '/model/graphs/#' ):
t = numpy.arange( 0, x.vector.size, 1 ) * x.dt # sec
pylab.plot( t, x.vector, label=x.name )
pylab.ylim( 0, 2.5 )
pylab.title( "Runge-Kutta solution." )
pylab.legend()
pylab.show()
quit()
# Run the 'main' if this script is executed standalone.
if __name__ == '__main__':
main()
| dilawar/moose-full | moose-examples/snippets/funcReacLotkaVolterra.py | Python | gpl-2.0 | 5,043 |
"""
SleekXMPP: The Sleek XMPP Library
Copyright (C) 2010 Nathanael C. Fritz
This file is part of SleekXMPP.
See the file LICENSE for copying permission.
"""
from sleekxmpp.plugins.base import PluginManager, PluginNotFound, BasePlugin
from sleekxmpp.plugins.base import register_plugin, load_plugin
__all__ = [
# XEPS
'xep_0004', # Data Forms
'xep_0009', # Jabber-RPC
'xep_0012', # Last Activity
'xep_0013', # Flexible Offline Message Retrieval
'xep_0016', # Privacy Lists
'xep_0020', # Feature Negotiation
'xep_0027', # Current Jabber OpenPGP Usage
'xep_0030', # Service Discovery
'xep_0033', # Extended Stanza Addresses
'xep_0045', # Multi-User Chat (Client)
'xep_0047', # In-Band Bytestreams
'xep_0048', # Bookmarks
'xep_0049', # Private XML Storage
'xep_0050', # Ad-hoc Commands
'xep_0054', # vcard-temp
'xep_0059', # Result Set Management
'xep_0060', # Pubsub (Client)
'xep_0065', # SOCKS5 Bytestreams
'xep_0066', # Out of Band Data
'xep_0071', # XHTML-IM
'xep_0077', # In-Band Registration
# 'xep_0078', # Non-SASL auth. Don't automatically load
'xep_0079', # Advanced Message Processing
'xep_0080', # User Location
'xep_0082', # XMPP Date and Time Profiles
'xep_0084', # User Avatar
'xep_0085', # Chat State Notifications
'xep_0086', # Legacy Error Codes
'xep_0091', # Legacy Delayed Delivery
'xep_0092', # Software Version
'xep_0106', # JID Escaping
'xep_0107', # User Mood
'xep_0108', # User Activity
'xep_0115', # Entity Capabilities
'xep_0118', # User Tune
'xep_0122', # Data Forms Validation
'xep_0128', # Extended Service Discovery
'xep_0131', # Standard Headers and Internet Metadata
'xep_0133', # Service Administration
'xep_0152', # Reachability Addresses
'xep_0153', # vCard-Based Avatars
'xep_0163', # Personal Eventing Protocol
'xep_0172', # User Nickname
'xep_0184', # Message Receipts
'xep_0186', # Invisible Command
'xep_0191', # Blocking Command
'xep_0196', # User Gaming
'xep_0198', # Stream Management
'xep_0199', # Ping
'xep_0202', # Entity Time
'xep_0203', # Delayed Delivery
'xep_0221', # Data Forms Media Element
'xep_0222', # Persistent Storage of Public Data via Pubsub
'xep_0223', # Persistent Storage of Private Data via Pubsub
'xep_0224', # Attention
'xep_0231', # Bits of Binary
'xep_0235', # OAuth Over XMPP
'xep_0242', # XMPP Client Compliance 2009
'xep_0249', # Direct MUC Invitations
'xep_0256', # Last Activity in Presence
'xep_0257', # Client Certificate Management for SASL EXTERNAL
'xep_0258', # Security Labels in XMPP
'xep_0270', # XMPP Compliance Suites 2010
'xep_0279', # Server IP Check
'xep_0280', # Message Carbons
'xep_0297', # Stanza Forwarding
'xep_0302', # XMPP Compliance Suites 2012
'xep_0308', # Last Message Correction
'xep_0313', # Message Archive Management
'xep_0319', # Last User Interaction in Presence
'xep_0323', # IoT Systems Sensor Data
'xep_0325', # IoT Systems Control
'xep_0332', # HTTP Over XMPP Transport
]
| Haynie-Research-and-Development/jarvis | deps/lib/python3.4/site-packages/sleekxmpp/plugins/__init__.py | Python | gpl-2.0 | 3,265 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
with open('README.rst') as readme_file:
readme = readme_file.read()
with open('HISTORY.rst') as history_file:
history = history_file.read().replace('.. :changelog:', '')
requirements = [
# TODO: put package requirements here
]
test_requirements = [
# TODO: put package test requirements here
]
setup(
name='kubestack',
version='0.1.0',
description="Python app to manage dynamic Jenkins slaves with Kubernetes",
long_description=readme + '\n\n' + history,
author="Yolanda Robla",
author_email='info@ysoft.biz',
url='https://github.com/yrobla/kubestack',
packages=[
'kubestack',
],
package_dir={'kubestack':
'kubestack'},
include_package_data=True,
install_requires=requirements,
license="BSD",
zip_safe=False,
keywords='kubestack',
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Natural Language :: English',
"Programming Language :: Python :: 2",
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
],
test_suite='tests',
tests_require=test_requirements
) | kubestack/kubestack | app/kubestack/setup.py | Python | gpl-2.0 | 1,536 |
from .transport import TransportEvents
from . import connection_info
import gui
import speech
import ui
import braille
import versionInfo
from logHandler import log
from . import configuration
from . import nvda_patcher
from . import RelayTransport
from collections import defaultdict
from . import connection_info
from . import cues
import hashlib
import addonHandler
addonHandler.initTranslation()
if not (
versionInfo.version_year >= 2021 or
(versionInfo.version_year == 2020 and versionInfo.version_major >= 2)
):
# NVDA versions newer than 2020.2 have a _CancellableSpeechCommand which should be ignored by NVDA remote
# For older versions, we create a dummy command that won't cause existing commands to be ignored.
class _DummyCommand(speech.commands.SpeechCommand): pass
speech.commands._CancellableSpeechCommand = _DummyCommand
EXCLUDED_SPEECH_COMMANDS = (
speech.commands.BaseCallbackCommand,
# _CancellableSpeechCommands are not designed to be reported and are used internally by NVDA. (#230)
speech.commands._CancellableSpeechCommand,
)
class RemoteSession:
def __init__(self, local_machine, transport: RelayTransport):
self.local_machine = local_machine
self.patcher = None
self.transport = transport
self.transport.callback_manager.register_callback('msg_version_mismatch', self.handle_version_mismatch)
self.transport.callback_manager.register_callback('msg_motd', self.handle_motd)
def handle_version_mismatch(self, **kwargs):
#translators: Message for version mismatch
message = _("""The version of the relay server which you have connected to is not compatible with this version of the Remote Client.
Please either use a different server or upgrade your version of the addon.""")
ui.message(message)
self.transport.close()
def handle_motd(self, motd: str, force_display=False, **kwargs):
if force_display or self.should_display_motd(motd):
gui.messageBox(parent=gui.mainFrame, caption=_("Message of the Day"), message=motd)
def should_display_motd(self, motd: str):
conf = configuration.get_config()
host, port = self.transport.address
host = host.lower()
address = '{host}:{port}'.format(host=host, port=port)
motdBytes = motd.encode('utf-8', errors='surrogatepass')
hashed = hashlib.sha1(motdBytes).hexdigest()
current = conf['seen_motds'].get(address, "")
if current == hashed:
return False
conf['seen_motds'][address] = hashed
conf.write()
return True
class SlaveSession(RemoteSession):
"""Session that runs on the slave and manages state."""
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.transport.callback_manager.register_callback('msg_client_joined', self.handle_client_connected)
self.transport.callback_manager.register_callback('msg_client_left', self.handle_client_disconnected)
self.transport.callback_manager.register_callback('msg_key', self.local_machine.send_key)
self.masters = defaultdict(dict)
self.master_display_sizes = []
self.transport.callback_manager.register_callback('msg_index', self.recv_index)
self.transport.callback_manager.register_callback(TransportEvents.CLOSING, self.handle_transport_closing)
self.patcher = nvda_patcher.NVDASlavePatcher()
self.patch_callbacks_added = False
self.transport.callback_manager.register_callback('msg_channel_joined', self.handle_channel_joined)
self.transport.callback_manager.register_callback('msg_set_clipboard_text', self.local_machine.set_clipboard_text)
self.transport.callback_manager.register_callback('msg_set_braille_info', self.handle_braille_info)
self.transport.callback_manager.register_callback('msg_set_display_size', self.set_display_size)
self.transport.callback_manager.register_callback('msg_braille_input', self.local_machine.braille_input)
self.transport.callback_manager.register_callback('msg_send_SAS', self.local_machine.send_SAS)
def get_connection_info(self):
hostname, port = self.transport.address
key = self.transport.channel
return connection_info.ConnectionInfo(hostname=hostname, port=port, key=key, mode='slave')
def handle_client_connected(self, client=None, **kwargs):
self.patcher.patch()
if not self.patch_callbacks_added:
self.add_patch_callbacks()
self.patch_callbacks_added = True
cues.client_connected()
if client['connection_type'] == 'master':
self.masters[client['id']]['active'] = True
def handle_channel_joined(self, channel=None, clients=None, origin=None, **kwargs):
if clients is None:
clients = []
for client in clients:
self.handle_client_connected(client)
def handle_transport_closing(self):
self.patcher.unpatch()
if self.patch_callbacks_added:
self.remove_patch_callbacks()
self.patch_callbacks_added = False
def handle_transport_disconnected(self):
cues.client_connected()
self.patcher.unpatch()
def handle_client_disconnected(self, client=None, **kwargs):
cues.client_disconnected()
if client['connection_type'] == 'master':
del self.masters[client['id']]
if not self.masters:
self.patcher.unpatch()
def set_display_size(self, sizes=None, **kwargs):
self.master_display_sizes = sizes if sizes else [info.get("braille_numCells", 0) for info in self.masters.values()]
self.local_machine.set_braille_display_size(self.master_display_sizes)
def handle_braille_info(self, name=None, numCells=0, origin=None, **kwargs):
if not self.masters.get(origin):
return
self.masters[origin]['braille_name'] = name
self.masters[origin]['braille_numCells'] = numCells
self.set_display_size()
def _get_patcher_callbacks(self):
return (
('speak', self.speak),
('beep', self.beep),
('wave', self.playWaveFile),
('cancel_speech', self.cancel_speech),
('pause_speech', self.pause_speech),
('display', self.display),
('set_display', self.set_display_size)
)
def add_patch_callbacks(self):
patcher_callbacks = self._get_patcher_callbacks()
for event, callback in patcher_callbacks:
self.patcher.register_callback(event, callback)
def remove_patch_callbacks(self):
patcher_callbacks = self._get_patcher_callbacks()
for event, callback in patcher_callbacks:
self.patcher.unregister_callback(event, callback)
def _filterUnsupportedSpeechCommands(self, speechSequence):
return list([
item for item in speechSequence
if not isinstance(item, EXCLUDED_SPEECH_COMMANDS)
])
def speak(self, speechSequence, priority):
self.transport.send(
type="speak",
sequence=self._filterUnsupportedSpeechCommands(speechSequence),
priority=priority
)
def cancel_speech(self):
self.transport.send(type="cancel")
def pause_speech(self, switch):
self.transport.send(type="pause_speech", switch=switch)
def beep(self, hz, length, left=50, right=50):
self.transport.send(type='tone', hz=hz, length=length, left=left, right=right)
def playWaveFile(self, **kwargs):
"""This machine played a sound, send it to Master machine"""
kwargs.update({
# nvWave.playWaveFile should always be asynchronous when called from NVDA remote, so always send 'True'
# Version 2.2 requires 'async' keyword.
'async': True,
# Version 2.3 onwards. Not currently used, but matches arguments for nvWave.playWaveFile.
# Including it allows for forward compatibility if requirements change.
'asynchronous': True,
})
self.transport.send(type='wave', **kwargs)
def display(self, cells):
# Only send braille data when there are controlling machines with a braille display
if self.has_braille_masters():
self.transport.send(type="display", cells=cells)
def has_braille_masters(self):
return bool([i for i in self.master_display_sizes if i>0])
def recv_index(self, index=None, **kwargs):
pass # speech index approach changed in 2019.3
class MasterSession(RemoteSession):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.slaves = defaultdict(dict)
self.patcher = nvda_patcher.NVDAMasterPatcher()
self.patch_callbacks_added = False
self.transport.callback_manager.register_callback('msg_speak', self.local_machine.speak)
self.transport.callback_manager.register_callback('msg_cancel', self.local_machine.cancel_speech)
self.transport.callback_manager.register_callback('msg_pause_speech', self.local_machine.pause_speech)
self.transport.callback_manager.register_callback('msg_tone', self.local_machine.beep)
self.transport.callback_manager.register_callback('msg_wave', self.handle_play_wave)
self.transport.callback_manager.register_callback('msg_display', self.local_machine.display)
self.transport.callback_manager.register_callback('msg_nvda_not_connected', self.handle_nvda_not_connected)
self.transport.callback_manager.register_callback('msg_client_joined', self.handle_client_connected)
self.transport.callback_manager.register_callback('msg_client_left', self.handle_client_disconnected)
self.transport.callback_manager.register_callback('msg_channel_joined', self.handle_channel_joined)
self.transport.callback_manager.register_callback('msg_set_clipboard_text', self.local_machine.set_clipboard_text)
self.transport.callback_manager.register_callback('msg_send_braille_info', self.send_braille_info)
self.transport.callback_manager.register_callback(TransportEvents.CONNECTED, self.handle_connected)
self.transport.callback_manager.register_callback(TransportEvents.DISCONNECTED, self.handle_disconnected)
def handle_play_wave(self, **kwargs):
"""Receive instruction to play a 'wave' from the slave machine
This method handles translation (between versions of NVDA Remote) of arguments required for 'msg_wave'
"""
# Note:
# Version 2.2 will send only 'async' in kwargs
# Version 2.3 will send 'asynchronous' and 'async' in kwargs
if "fileName" not in kwargs:
log.error("'fileName' missing from kwargs.")
return
fileName = kwargs.pop("fileName")
self.local_machine.play_wave(fileName=fileName)
def get_connection_info(self):
hostname, port = self.transport.address
key = self.transport.channel
return connection_info.ConnectionInfo(hostname=hostname, port=port, key=key, mode='master')
def handle_nvda_not_connected(self):
speech.cancelSpeech()
ui.message(_("Remote NVDA not connected."))
def handle_connected(self):
# speech index approach changed in 2019.3
pass # nothing to do
def handle_disconnected(self):
# speech index approach changed in 2019.3
pass # nothing to do
def handle_channel_joined(self, channel=None, clients=None, origin=None, **kwargs):
if clients is None:
clients = []
for client in clients:
self.handle_client_connected(client)
def handle_client_connected(self, client=None, **kwargs):
self.patcher.patch()
if not self.patch_callbacks_added:
self.add_patch_callbacks()
self.patch_callbacks_added = True
self.send_braille_info()
cues.client_connected()
def handle_client_disconnected(self, client=None, **kwargs):
self.patcher.unpatch()
if self.patch_callbacks_added:
self.remove_patch_callbacks()
self.patch_callbacks_added = False
cues.client_disconnected()
def send_braille_info(self, **kwargs):
display = braille.handler.display
self.transport.send(type="set_braille_info", name=display.name, numCells=display.numCells or braille.handler.displaySize)
def braille_input(self,**kwargs):
self.transport.send(type="braille_input", **kwargs)
def add_patch_callbacks(self):
patcher_callbacks = (('braille_input', self.braille_input), ('set_display', self.send_braille_info))
for event, callback in patcher_callbacks:
self.patcher.register_callback(event, callback)
def remove_patch_callbacks(self):
patcher_callbacks = (('braille_input', self.braille_input), ('set_display', self.send_braille_info))
for event, callback in patcher_callbacks:
self.patcher.unregister_callback(event, callback)
| NVDARemote/NVDARemote | addon/globalPlugins/remoteClient/session.py | Python | gpl-2.0 | 12,126 |
#!/usr/bin/env python3
import os
import traceback
import glob
from multiprocessing import Pool, Process, Lock
import subprocess
import requests
import portage
from terminaltables import AsciiTable
DEBUG=0
if 'DEBUG' in os.environ:
DEBUG = os.environ['DEBUG']
PORTDIR_OVERLAY=os.path.realpath(os.path.dirname(os.path.realpath(__file__)) + "/../")
os.chdir(PORTDIR_OVERLAY)
os.environ["PORTDIR_OVERLAY"] = PORTDIR_OVERLAY
# retrieves the latest versions for specified product codes
def get_version( codes ):
payload = {
'code': ','.join(codes),
'latest': 'false',
'type': 'release'
}
r = requests.get('https://data.services.jetbrains.com/products/releases', params=payload)
json=r.json()
# [code][slot]
versions = {}
for c in codes:
versions[c] = {}
all_v_data = json[c]
# latest version is always the first one
versions[c]['latest_slot'] = all_v_data[0]['majorVersion']
# loop over all data and pick the first version from each slot, because the data are already sorted
for v_data in all_v_data:
slot = v_data['majorVersion']
if not slot in versions[c]:
v = v_data['version']
versions[c][slot] = v
return versions
# format: `package_name: product_code`
codes={
'clion': 'CL',
'datagrip': 'DG',
'idea': 'IIU',
'idea-community': 'IIC',
'phpstorm': 'PS',
'pycharm': 'PCP',
'pycharm-community': 'PCC',
'rider': 'RD',
'rubymine': 'RM',
'webstorm': 'WS',
}
remote_versions = get_version(codes.values())
update_table = [dict() for x in range(0)]
pdb = portage.db[portage.root]["porttree"].dbapi
for pn, code in sorted(codes.items()):
new_updates = [dict() for x in range(0)]
# find category by globbing in this repo
cat = glob.glob(f"*/{pn}/{pn}*.ebuild")[0].split("/")[0]
# find the newest version for each slot
loc_slots = {}
local_versions = pdb.xmatch('match-visible', f"{cat}/{pn}::rindeal")
for v in local_versions:
slot = pdb.aux_get(v, ["SLOT"])[0]
# add if not yet present
if not slot in loc_slots:
loc_slots[slot] = v
continue
# update slot if newer version was found
if portage.vercmp(loc_slots[slot], v) < 0:
loc_slots[slot] = v
# now compare current and server versions for each slot
for slot in loc_slots:
pkg = loc_slots[slot]
loc_ver = portage.pkgsplit(pkg)[1]
rem_ver = remote_versions[code][slot]
if portage.vercmp(loc_ver, rem_ver) < 0:
new_updates.append({
'cat': cat,
'pn': pn,
'loc_slot': slot,
'loc_ver': loc_ver,
'rem_slot': slot,
'rem_ver': rem_ver
})
# now look for the newest version outside of any known slots
latest_loc_pkg = pdb.xmatch('bestmatch-visible', f"{cat}/{pn}::rindeal")
latest_loc_ver = portage.pkgsplit(latest_loc_pkg)[1]
latest_loc_slot = pdb.aux_get(latest_loc_pkg, ["SLOT"])[0]
latest_rem_slot = remote_versions[code]['latest_slot']
latest_rem_ver = remote_versions[code][latest_rem_slot]
if portage.vercmp(latest_loc_ver, latest_rem_ver) < 0:
# check for duplicates
is_dup = 0
for update in new_updates:
if update['loc_slot'] == latest_rem_slot:
is_dup = 1
break
if not is_dup:
new_updates.append({
'cat': cat,
'pn': pn,
'loc_slot': latest_loc_slot,
'loc_ver': latest_loc_ver,
'rem_slot': latest_rem_slot,
'rem_ver': latest_rem_ver
})
update_table += new_updates
# create a pretty table
pretty_table = [ [ 'Category', 'Package', 'Slot', 'Version' ] ]
for u in update_table:
slot = u['loc_slot']
if slot != u['rem_slot']:
slot += ' -> ' + u['rem_slot']
pretty_table.append([ u['cat'], u['pn'], slot, u['loc_ver'] + ' -> ' + u['rem_ver'] ])
# now print the table
print(AsciiTable(pretty_table).table)
# and prompt the user for an action
y = input("Press 'y' to proceed with the update\n")
if y != "y":
print(f"You pressed '{y}', bailing...")
exit(0)
def run_cmd(cmd):
pn = os.path.basename(os.getcwd())
print(f"> \033[94m{pn}\033[0m: `\033[93m{cmd}\033[0m`")
err = os.system(cmd)
if err:
print(f"{pn}: command '{cmd}' failed with code {err}")
return err
def update_pkg(cat, pn, loc_slot, loc_ver, rem_slot, rem_ver):
global GIT_LOCK, PKG_LOCKS, PORTDIR_OVERLAY
cat_pn = f"{cat}/{pn}"
os.chdir(f"{PORTDIR_OVERLAY}/{cat_pn}")
PKG_LOCKS[cat_pn].acquire()
new_slot = False if loc_slot == rem_slot else True
if new_slot: # bump into a new slot
run_cmd(f"cp -v {pn}-{loc_slot}*.ebuild {pn}-{rem_ver}.ebuild")
else: # bump inside a slot
GIT_LOCK.acquire()
run_cmd(f"git mv -v {pn}-{loc_ver}*.ebuild {pn}-{rem_ver}.ebuild")
GIT_LOCK.release()
if run_cmd(f"repoman manifest") != 0:
GIT_LOCK.acquire()
run_cmd('git reset -- .')
run_cmd('git checkout -- .')
GIT_LOCK.release()
PKG_LOCKS[cat_pn].release()
return 1
GIT_LOCK.acquire()
run_cmd(f"git add {pn}-{rem_ver}.ebuild")
if new_slot:
run_cmd(f"git commit -m '{cat}/{pn}: new version v{rem_ver}' .")
else: # bump inside a slot
run_cmd(f"git commit -m '{cat}/{pn}: bump to v{rem_ver}' .")
GIT_LOCK.release()
PKG_LOCKS[cat_pn].release()
# only one git command may run concurrently
GIT_LOCK = Lock()
PKG_LOCKS = {}
for update in update_table:
cat_pn = update['cat'] + "/" + update['pn']
if not cat_pn in PKG_LOCKS:
PKG_LOCKS[cat_pn] = Lock()
# DEBUG
#update_pkg(update_table[0])
# https://stackoverflow.com/a/25558333/2566213
def pool_init(l):
global PKG_LOCKS
PKG_LOCKS = l
pool = Pool(processes=8, initializer=pool_init, initargs=(PKG_LOCKS, ))
for update in update_table:
pool.apply_async(func=update_pkg, kwds=update)
pool.close()
pool.join()
| rindeal/gentoo-overlay | _tools/bump-jetbrains-pkgs.py | Python | gpl-2.0 | 6,258 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('labman_setup', '0003_googlesearchscript'),
]
operations = [
migrations.AddField(
model_name='labmandeploygeneralsettings',
name='background_color',
field=models.CharField(max_length=25, null=True, blank=True),
preserve_default=True,
),
]
| morelab/labman_ud | labman_ud/labman_setup/migrations/0004_labmandeploygeneralsettings_background_color.py | Python | gpl-3.0 | 494 |
# -*- coding: utf-8 -*-
"""
Copyright (C) 2016, Zato Source s.r.o. https://zato.io
Licensed under LGPLv3, see LICENSE.txt for terms and conditions.
"""
| alirizakeles/zato | code/zato-server/src/zato/server/service/internal/security/vault/policy.py | Python | gpl-3.0 | 154 |
# -*- coding: utf-8 -*-
#
## This file is part of Zenodo.
## Copyright (C) 2012, 2013, 2014 CERN.
##
## Zenodo is free software: you can redistribute it and/or modify
## it under the terms of the GNU General Public License as published by
## the Free Software Foundation, either version 3 of the License, or
## (at your option) any later version.
##
## Zenodo is distributed in the hope that it will be useful,
## but WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
## GNU General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with Zenodo. If not, see <http://www.gnu.org/licenses/>.
##
## In applying this licence, CERN does not waive the privileges and immunities
## granted to it by virtue of its status as an Intergovernmental Organization
## or submit itself to any jurisdiction.
import os
import shutil
from flask import current_app
from invenio.base.factory import with_app_context
@with_app_context(new_context=True)
def post_handler_database_create(sender, default_data='', *args, **kwargs):
"""Load data after demosite creation."""
from invenio.modules.communities.models import Community
print(">>> Creating collections for communities...")
c = Community.query.filter_by(id='zenodo').first()
c.save_collections()
c = Community.query.filter_by(id='ecfunded').first()
c.save_collections()
print(">>> Fixing dbquery for root collection.")
from invenio.modules.search.models import Collection
from invenio.ext.sqlalchemy import db
c = Collection.query.filter_by(id=1).first()
c.dbquery = '980__a:0->Z AND NOT 980__a:PROVISIONAL AND NOT ' \
'980__a:PENDING AND NOT 980__a:SPAM AND NOT 980__a:REJECTED ' \
'AND NOT 980__a:DARK'
db.session.commit()
@with_app_context(new_context=True)
def clean_data_files(sender, *args, **kwargs):
"""Clean data in directories."""
dirs = [
current_app.config['DEPOSIT_STORAGEDIR'],
current_app.config['CFG_TMPDIR'],
current_app.config['CFG_TMPSHAREDDIR'],
current_app.config['CFG_LOGDIR'],
current_app.config['CFG_CACHEDIR'],
current_app.config['CFG_RUNDIR'],
current_app.config['CFG_BIBDOCFILE_FILEDIR'],
]
for d in dirs:
print(">>> Cleaning {0}".format(d))
if os.path.exists(d):
shutil.rmtree(d)
os.makedirs(d)
@with_app_context(new_context=True)
def post_handler_demosite_populate(sender, default_data='', *args, **kwargs):
"""Load data after records are created."""
| otron/zenodo | zenodo/demosite/receivers.py | Python | gpl-3.0 | 2,660 |
# -*- coding: utf-8 -*-
'''
Created on Oct 2, 2010
@author: dimitry (zavlab1)
'''
from gi.repository import Gtk
from gi.repository import Gdk
from foobnix.gui.service.path_service import get_foobnix_resourse_path_by_name
from foobnix.util.const import ICON_FOOBNIX
from foobnix.version import FOOBNIX_VERSION
class AboutWindow(Gtk.AboutDialog):
def __init__(self):
Gtk.AboutDialog.__init__(self)
self.set_program_name("Foobnix")
self.set_version(FOOBNIX_VERSION)
self.set_copyright("(c) Ivan Ivanenko <ivan.ivanenko@gmail.com>")
self.set_comments(_("Simple and Powerful player"))
self.set_website("http://www.foobnix.com")
self.set_authors(["Dmitry Kozhura (zavlab1) <zavlab1@gmail.com>", "Pietro Campagnano <fain182@gmailcom>", "Viktor Suprun <popsul1993@gmail.com>"])
self.set_translator_credits("""Bernardo Miguel Savone
Sérgio Marques
XsLiDian
KamilSPL
north
Alex Serada
Ivan Ivanenko
Dmitry-Kogura
Fitoschido
zeugma
Schaffino
Oleg «Eleidan» Kulik
Sergey Zigachev
Martino Barbon
Florian Heissenberger
Aldo Mann""")
self.set_logo(Gdk.pixbuf_new_from_file(get_foobnix_resourse_path_by_name(ICON_FOOBNIX))) #@UndefinedVariable
def show(self):
self.run()
self.destroy()
| kagel/foobnix | foobnix/gui/about/about.py | Python | gpl-3.0 | 1,313 |
"""
Demonstrates how to use the labjack.ljm.eAddresses (LJM_eAddresses) function.
"""
from labjack import ljm
# Open first found LabJack
handle = ljm.open(ljm.constants.dtANY, ljm.constants.ctANY, "ANY")
#handle = ljm.openS("ANY", "ANY", "ANY")
info = ljm.getHandleInfo(handle)
print("Opened a LabJack with Device type: %i, Connection type: %i,\n" \
"Serial number: %i, IP address: %s, Port: %i,\nMax bytes per MB: %i" % \
(info[0], info[1], info[2], ljm.numberToIP(info[3]), info[4], info[5]))
# Setup and call eAddresses to write/read values to/from the LabJack.
numFrames = 3
aAddresses = [1000, 55110, 55110] # [DAC0, TEST_UINT16, TEST_UINT16]
aDataTypes = [ljm.constants.FLOAT32, ljm.constants.UINT16, ljm.constants.UINT16]
aWrites = [ljm.constants.WRITE, ljm.constants.WRITE, ljm.constants.READ]
aNumValues = [1, 1, 1]
aValues = [2.5, 12345, 0] # [write 2.5 V, write 12345, read]
results = ljm.eAddresses(handle, numFrames, aAddresses, aDataTypes, aWrites, aNumValues, aValues)
print("\neAddresses results: ")
start = 0
for i in range(numFrames):
end = start + aNumValues[i]
print(" Address - %i, data type - %i, write - %i, values: %s" % \
(aAddresses[i], aDataTypes[i], aWrites[i], str(results[start:end])))
start = end
# Close handle
ljm.close(handle) | LaFriOC/LabJack | Python_LJM/Examples/eAddresses.py | Python | gpl-3.0 | 1,299 |
import bpy
bpy.context.object.data.sensor_width = 23.6
bpy.context.object.data.sensor_height = 15.6
bpy.context.object.data.sensor_fit = 'HORIZONTAL'
| cschenck/blender_sim | fluid_sim_deps/blender-2.69/2.69/scripts/presets/camera/Nikon_D7000.py | Python | gpl-3.0 | 150 |
# encoding: utf-8
def _unicode_truncate(ustr, length, encoding="UTF-8"):
"Truncate @ustr to specific encoded byte length"
bstr = ustr.encode(encoding)[:length]
return bstr.decode(encoding, 'ignore')
def extract_title_body(text, maxtitlelen=60):
"""Prepare @text: Return a (title, body) tuple
@text: A user-submitted paragraph or otherwise snippet of text. We
try to detect an obvious title and then return the title and the
following body. Otherwise we extract a title from the first words,
and return the full text as body.
@maxtitlelen: A unitless measure of approximate length of title.
The default value yields a resulting title of approximately 60 ascii
characters, or 20 asian characters.
>>> extract_title_body("Short Text")
('Short Text', '')
>>> title, body = extract_title_body(u"執筆方針については、項目名の付け方、"
... "フォーマットや表記上の諸問題に関して多くの方針が存在している。")
>>> print(title)
執筆方針については、項目名の付け方、フォ
>>> print(body) # doctest: +ELLIPSIS
執筆方針については、項目名の付け方、フォ...して多くの方針が存在している。
"""
# if you don't make real tests, it's not not worth doing it at all.
if not text.strip():
return text, ""
def split_first_line(text):
"""Take first non-empty line of text"""
lines = iter(text.splitlines())
for l in lines:
l = l.strip()
if not l:
continue
rest = "\n".join(lines)
return l, rest
# We use the UTF-8 encoding and truncate due to it:
# this is a good heuristic for ascii vs "wide characters"
# it results in taking fewer characters if they are asian, which
# is exactly what we want
def split_first_words(text, maxlen):
text = text.lstrip()
first_text = _unicode_truncate(text, maxlen)
words = first_text.split()
if len(words) > 3:
words = words[:-1]
first_words = " ".join(words[:-1])
if text.startswith(first_words):
first_text = first_words
rest_text = text[len(first_text):]
return first_text, rest_text
firstline, rest = split_first_line(text)
if len(firstline.encode("UTF-8")) > maxtitlelen:
firstline, rest = split_first_words(text, maxtitlelen)
else:
return firstline, rest
if rest.strip():
return firstline, text
else:
return text, ""
if __name__ == '__main__':
import doctest
doctest.testmod()
| engla/kupfer | kupfer/textutils.py | Python | gpl-3.0 | 2,681 |
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'Layer'
db.create_table('layers_layer', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('added', self.gf('django.db.models.fields.DateTimeField')(default=datetime.datetime(2014, 10, 2, 0, 0))),
('updated', self.gf('django.db.models.fields.DateTimeField')(default=datetime.datetime(2014, 10, 2, 0, 0))),
('name', self.gf('django.db.models.fields.CharField')(unique=True, max_length=50)),
('slug', self.gf('django.db.models.fields.SlugField')(unique=True, max_length=50)),
('description', self.gf('django.db.models.fields.CharField')(max_length=250, null=True, blank=True)),
('text', self.gf('django.db.models.fields.TextField')(null=True, blank=True)),
('is_published', self.gf('django.db.models.fields.BooleanField')(default=True)),
('is_external', self.gf('django.db.models.fields.BooleanField')(default=False)),
('center', self.gf('django.contrib.gis.db.models.fields.PointField')(null=True, blank=True)),
('area', self.gf('django.contrib.gis.db.models.fields.PolygonField')(null=True, blank=True)),
('zoom', self.gf('django.db.models.fields.SmallIntegerField')(default=12)),
('organization', self.gf('django.db.models.fields.CharField')(max_length=255)),
('website', self.gf('django.db.models.fields.URLField')(max_length=200, null=True, blank=True)),
('email', self.gf('django.db.models.fields.EmailField')(max_length=75, blank=True)),
('minimum_distance', self.gf('django.db.models.fields.IntegerField')(default=0)),
('new_nodes_allowed', self.gf('django.db.models.fields.BooleanField')(default=True)),
('data', self.gf(u'django_hstore.fields.DictionaryField')(null=True, blank=True)),
))
db.send_create_signal('layers', ['Layer'])
# Adding M2M table for field mantainers on 'Layer'
m2m_table_name = db.shorten_name('layers_layer_mantainers')
db.create_table(m2m_table_name, (
('id', models.AutoField(verbose_name='ID', primary_key=True, auto_created=True)),
('layer', models.ForeignKey(orm['layers.layer'], null=False)),
('profile', models.ForeignKey(orm['profiles.profile'], null=False))
))
db.create_unique(m2m_table_name, ['layer_id', 'profile_id'])
def backwards(self, orm):
# Deleting model 'Layer'
db.delete_table('layers_layer')
# Removing M2M table for field mantainers on 'Layer'
db.delete_table(db.shorten_name('layers_layer_mantainers'))
models = {
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'layers.layer': {
'Meta': {'object_name': 'Layer'},
'added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2014, 10, 2, 0, 0)'}),
'area': ('django.contrib.gis.db.models.fields.PolygonField', [], {'null': 'True', 'blank': 'True'}),
'center': ('django.contrib.gis.db.models.fields.PointField', [], {'null': 'True', 'blank': 'True'}),
'data': (u'django_hstore.fields.DictionaryField', [], {'null': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.CharField', [], {'max_length': '250', 'null': 'True', 'blank': 'True'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_external': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_published': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'mantainers': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['profiles.Profile']", 'symmetrical': 'False', 'blank': 'True'}),
'minimum_distance': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '50'}),
'new_nodes_allowed': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'organization': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '50'}),
'text': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'updated': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2014, 10, 2, 0, 0)'}),
'website': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'zoom': ('django.db.models.fields.SmallIntegerField', [], {'default': '12'})
},
'profiles.profile': {
'Meta': {'object_name': 'Profile'},
'about': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'address': ('django.db.models.fields.CharField', [], {'max_length': '150', 'blank': 'True'}),
'birth_date': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'city': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'country': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2014, 10, 2, 0, 0)'}),
'email': ('django.db.models.fields.EmailField', [], {'db_index': 'True', 'unique': 'True', 'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'gender': ('django.db.models.fields.CharField', [], {'max_length': '1', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Group']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Permission']"}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '254', 'db_index': 'True'})
}
}
complete_apps = ['layers'] | sephiroth6/nodeshot | nodeshot/core/layers/migrations/0001_initial.py | Python | gpl-3.0 | 8,891 |
"""Handles downloading and importing OSM Data"""
import os
import subprocess
import tempfile
import requests
from celery.utils.log import get_task_logger
from django.conf import settings
from django.db import connection
from datasources.models import OSMData, OSMDataProblem
from datasources.tasks.shapefile import ErrorFactory
# Note: The download is done using the overpass API
# (see:http://wiki.openstreetmap.org/wiki/Overpass_API) because
# we may be downloading large files and these endpoints are optimized
# for downloads/reads unlike the main openstreetmap API endpoint
OSM_API_URL = 'http://www.overpass-api.de/api/xapi?way[bbox=%s,%s,%s,%s][highway=*]'
# set up shared task logger
logger = get_task_logger(__name__)
def run_osm_import(osmdata_id):
"""Download and run import step for OSM data
Downloads and stores raw OSM data within a bounding box defined
by imported GTFS data. Uses the SRID defined on the gtfs_stops
table to determine correct UTM projection to import data as.
Uses Raw SQL to
- get extent from GTFS data since we
do not have models that keeps track of GTFS Data
- get UTM projection to import OSM data as correct projection
"""
logger.debug('Starting OSM import')
osm_data = OSMData.objects.get(pk=osmdata_id)
osm_data.status = OSMData.Statuses.PROCESSING
error_factory = ErrorFactory(OSMDataProblem, osm_data, 'osmdata')
def handle_error(title, description):
"""Helper method to handle shapefile errors."""
error_factory.error(title, description)
osm_data.status = OSMData.Statuses.ERROR
osm_data.save()
return
with connection.cursor() as c:
try:
# Get the bounding box for gtfs data
# split components to make it easier to parse the sql response
bbox_query = """
SELECT MIN(ST_Xmin(the_geom)),
MIN(ST_Ymin(the_geom)),
MAX(ST_Xmax(the_geom)),
MAX(ST_Ymax(the_geom))
FROM gtfs_stops;"""
logger.debug('Making query for bounding box from gtfs stops')
c.execute(bbox_query)
bbox = c.fetchone()
except Exception as e:
err_msg = 'Error obtaining bounding box from gtfs_stops table'
handle_error(err_msg, e.message)
try:
logger.debug('Making query for UTM projection srid from gtfs_stops table (geom field)')
utm_projection_query = "SELECT FIND_SRID('', 'gtfs_stops', 'geom');"
c.execute(utm_projection_query)
utm_projection = c.fetchone()[0]
except Exception as e:
err_msg = 'Error obtaining SRID from gtfs_stops table'
logger.exception(err_msg)
handle_error(err_msg, e.message)
_, temp_filename = tempfile.mkstemp()
logger.debug('Generated tempfile %s to download osm data into', temp_filename)
osm_data.source_file = temp_filename
osm_data.status = OSMData.Statuses.DOWNLOADING
osm_data.save()
try:
response = requests.get(OSM_API_URL % bbox, stream=True)
logger.debug('Downloading OSM data from overpass/OSM api')
# Download OSM data
with open(temp_filename, 'wb') as fh:
for chunk in response.iter_content(chunk_size=1024):
if chunk:
fh.write(chunk)
fh.flush()
logger.debug('Finished downloading OSM data')
osm_data.status = OSMData.Statuses.IMPORTING
osm_data.save()
except Exception as e:
err_msg = 'Error downloading data'
logger.exception('Error downloading data')
handle_error(err_msg, e.message)
# Get Database settings
db_host = settings.DATABASES['default']['HOST']
db_password = settings.DATABASES['default']['PASSWORD']
db_user = settings.DATABASES['default']['USER']
db_name = settings.DATABASES['default']['NAME']
env = os.environ.copy()
env['PGPASSWORD'] = db_password
# Insert OSM Data into Database with osm2pgsql command
osm2pgsql_command = ['osm2pgsql',
'-U', db_user,
'-H', db_host,
'-d', db_name,
'-s', # use slim mode to cache to DB rather than in-memory
'-E', str(utm_projection),
temp_filename]
try:
logger.debug('Running OSM import command %s', ' '.join(osm2pgsql_command))
subprocess.check_call(osm2pgsql_command, env=env)
osm_data.status = OSMData.Statuses.COMPLETE
except subprocess.CalledProcessError as e:
osm_data.status = OSMData.Statuses.ERROR
err_msg = 'Error running osm2pgsql command'
logger.exception('Error running osm2pgsql command')
error_factory.error(err_msg, e.message)
finally:
osm_data.save()
os.remove(temp_filename)
| WorldBank-Transport/open-transit-indicators | python/django/datasources/tasks/osm.py | Python | gpl-3.0 | 4,966 |
#! /usr/bin/env python
##############################################################################
#
# Copyright (C) Zenoss, Inc. 2008, 2009, all rights reserved.
#
# This content is made available according to terms specified in
# License.zenoss under the directory where your Zenoss product is installed.
#
##############################################################################
__doc__ = """Monitor Java Management eXtension (JMX) mbeans
Dispatches calls to a java server process to collect JMX values for a device.
"""
import logging
import sys
import os
import socket
import Globals
import zope
from twisted.internet.defer import Deferred
from twisted.web import xmlrpc
from twisted.internet.protocol import ProcessProtocol
from twisted.internet import defer, reactor, error
from Products.ZenCollector.daemon import CollectorDaemon
from Products.ZenCollector.interfaces import ICollectorPreferences,\
IDataService,\
IEventService,\
IScheduledTask
from Products.ZenCollector.tasks import SimpleTaskFactory,\
SimpleTaskSplitter,\
TaskStates
from Products.ZenEvents import Event
from Products.ZenHub.XmlRpcService import XmlRpcService
from Products.ZenUtils.NJobs import NJobs
from Products.ZenUtils.Utils import unused
from Products.ZenUtils.observable import ObservableMixin
import ZenPacks.zenoss.ZenJMX
from ZenPacks.zenoss.ZenJMX.services.ZenJMXConfigService import JMXDataSourceConfig
unused(JMXDataSourceConfig)
log = logging.getLogger( "zen.zenjmx" )
DEFAULT_HEARTBEAT_TIME = 5 * 60
WARNING_EVENT = dict(eventClass='/Status/JMX', component='JMX',
device=socket.getfqdn(), severity=Event.Warning)
class ZenJMXPreferences(object):
"""
Configuration values for the zenjmx daemon.
"""
zope.interface.implements(ICollectorPreferences)
def __init__(self):
"""
Construct a new ZenJMXPreferences instance and provide default
values for needed attributes.
"""
self.collectorName = "zenjmx"
self.defaultRRDCreateCommand = None
self.cycleInterval = 5 * 60 # seconds
self.configCycleInterval = 20 # minutes
self.options = None
# the configurationService attribute is the fully qualified class-name
# of our configuration service that runs within ZenHub
self.configurationService = 'ZenPacks.zenoss.ZenJMX.services.ZenJMXConfigService'
def buildOptions(self, parser):
parser.add_option('-j','--zenjmxjavaport',
dest='zenjmxjavaport',
default=9988,
type='int',
help='Port for zenjmxjava process; default 9988. '+\
'Tries 5 consecutive ports if there is a conflict',
)
parser.add_option('--concurrentJMXCalls',
dest='concurrentJMXCalls',
action='store_true', default=False,
help='Enable concurrent calls to a JMX server'
)
parser.add_option('--parallel', dest='parallel',
default=200, type='int',
help='Number of devices to collect from at one time'
)
parser.add_option('--cycleInterval', dest='cycleInterval',
default=300, type='int',
help='Cycle time, in seconds, to run collection'
)
parser.add_option('--portRange', dest='portRange',
default=5, type='int',
help='Number of ports to attempt when starting' +
'Java jmx client')
parser.add_option('--javaheap',
dest="maxHeap",type="int", default=512,
help="Max heap, in MB, to use for java process")
def postStartup(self):
pass
def getJavaClientArgs(self):
args = None
if self.options.configfile:
args = ('--configfile', self.options.configfile)
if self.options.logseverity:
args = args + ('-v', str(self.options.logseverity))
if self.options.concurrentJMXCalls:
args = args + ('-concurrentJMXCalls', )
return args
def getStartingPort(self):
return self.options.zenjmxjavaport
def getAttemptedPortRange(self):
return self.options.portRange
class IZenJMXJavaClient(zope.interface.Interface):
listenPort = zope.interface.Attribute("listenPort")
class ZenJMXJavaClientImpl(ProcessProtocol):
"""
Protocol to control the zenjmxjava process
"""
zope.interface.implements(IZenJMXJavaClient)
def __init__(
self,
args,
cycle=True,
zenjmxjavaport=9988,
maxHeap=512
):
"""
Initializer
@param args: argument list for zenjmx
@type args: list of strings
@param cycle: whether to run once or repeat
@type cycle: boolean
@param zenjmxjavaport: port on which java process
will listen for queries
@type zenjmxjavaport: int
"""
self.deferred = Deferred()
self.stopCalled = False
self.process = None
self.outReceived = sys.stdout.write
self.errReceived = sys.stderr.write
self.log = logging.getLogger('zen.ZenJMXJavaClient')
self.args = args
self.cycle = cycle
self.listenPort = zenjmxjavaport
self._maxHeap = maxHeap
self.restartEnabled = False
self._eventService = zope.component.queryUtility(IEventService)
self._preferences = zope.component.queryUtility(ICollectorPreferences,
'zenjmx')
def processEnded(self, reason):
"""
Twisted reactor function called when the process ends.
@param reason: message from the process
@type reason: string
"""
self.process = None
if not self.stopCalled:
procEndEvent = {
'eventClass': '/Status/JMX',
'summary': 'zenjmxjava ended unexpectedly: %s'\
% reason.getErrorMessage(),
'severity': Event.Warning,
'component': 'zenjmx',
'device': self._preferences.options.monitor,
}
self._eventService.sendEvent(procEndEvent)
self.log.warn('processEnded():zenjmxjava process ended %s'
% reason)
if self.deferred:
msg = reason.getErrorMessage()
exitCode = reason.value.exitCode
if exitCode == 10:
msg = 'Could not start up Java web server, '+\
'possible port conflict'
self.deferred.callback((exitCode,msg))
self.deferred = None
elif self.restartEnabled:
self.log.info('processEnded():restarting zenjmxjava')
reactor.callLater(1, self.run)
def stop(self):
"""
Twisted reactor function called when we are shutting down.
"""
import signal
self.log.info('stop():stopping zenjmxjava')
self.stopCalled = True
if not self.process:
self.log.debug('stop():no zenjmxjava process to stop')
return
try:
self.process.signalProcess(signal.SIGKILL)
except error.ProcessExitedAlready:
self.log.info('stop():zenjmxjava process already exited')
pass
try:
self.process.loseConnection()
except Exception:
pass
self.process = None
def connectionMade(self):
"""
Called when the Twisted reactor starts up
"""
self.log.debug('connectionMade():zenjmxjava started')
def doCallback():
"""
doCallback
"""
msg = \
'doCallback(): callback on deferred zenjmxjava proc is up'
self.log.debug(msg)
if self.deferred:
self.deferred.callback((True,'zenjmx java started'))
if self.process:
procStartEvent = {
'eventClass': '/Status/JMX',
'summary': 'zenjmxjava started',
'severity': Event.Clear,
'component': 'zenjmx',
'device': self._preferences.options.monitor,
}
self._eventService.sendEvent(procStartEvent)
self.deferred = None
if self.deferred:
self.log.debug('connectionMade():scheduling callback')
# give the java service a chance to startup
reactor.callLater(3, doCallback)
self.log.debug('connectionMade(): done')
def run(self):
"""
Twisted function called when started
"""
if self.stopCalled:
return
self.log.info('run():starting zenjmxjava')
zenjmxjavacmd = os.path.join(ZenPacks.zenoss.ZenJMX.binDir,
'zenjmxjava')
if self.cycle:
args = ('runjmxenabled', )
else:
# don't want to start up with jmx server to avoid port conflicts
args = ('run', )
args = args + ('-zenjmxjavaport',
str(self.listenPort))
if self.args:
args = args + self.args
cmd = (zenjmxjavacmd, ) + args
self.log.debug('run():spawn process %s' % (cmd, ))
self.deferred = Deferred()
env = dict(os.environ)
env['JVM_MAX_HEAP'] = '-Xmx%sm'%self._maxHeap
self.process = reactor.spawnProcess(self, zenjmxjavacmd, cmd,
env=env)
return self.deferred
DEFAULT_JMX_JAVA_CLIENT_NAME = 'zenjmxjavaclient'
class ZenJMXJavaClientInitialization(object):
"""
Wrapper that continues to start the Java jmx client until
successful.
"""
def __init__(self,
registeredName=DEFAULT_JMX_JAVA_CLIENT_NAME):
"""
@param registeredName: the name with which this client
will be registered as a utility
"""
self._jmxClient = None
self._clientName = registeredName
def initialize(self):
"""
Begin the first attempt to start the Java jmx client. Note that
this method returns a Deferred that relies on the ZenJMXPreferences
being present when it is finally executed. This is meant to be
the Deferred that is given to the CollectorDaemon for
initialization before the first JMX task is scheduled.
@return the deferred that represents the loading of preferences
and the initial attempt to start the Java jmx client
@rtype defer.Deferred
"""
def loadPrefs():
log.debug( "Retrieving java client startup args")
preferences = zope.component.queryUtility(ICollectorPreferences,
'zenjmx')
self._args = preferences.getJavaClientArgs()
self._cycle = preferences.options.cycle
self._maxHeap = preferences.options.maxHeap
self._startingPort = preferences.getStartingPort()
self._rpcPort = self._startingPort
self._attemptedPortRange = preferences.getAttemptedPortRange()
def printProblem(result):
log.error( str(result) )
sys.exit(1)
d = defer.maybeDeferred( loadPrefs )
d.addCallback( self._startJavaProc )
d.addErrback( printProblem )
return d
def _tryClientOnCurrentPort( self ):
"""
Returns the Deferred for executing an attempt
to start the java jmx client on the current port.
"""
log.debug( 'Attempting java client startup on port %s',
self._rpcPort )
self._jmxClient = ZenJMXJavaClientImpl( self._args, self._cycle, self._rpcPort, self._maxHeap )
zope.component.provideUtility(
self._jmxClient,
IZenJMXJavaClient,
self._clientName
)
return self._jmxClient.run()
def _startJavaProc( self, result=None ):
"""
Checks whether startup of the java jmx client was successful. If
it was unsuccessful due to port conflict, increments the port and
tries to start the client again.
"""
# If the result is not None, that means this was called as a callback
# after an attempt to start the client
if result is not None:
# If result[0] is True, the client process started
if result[0] is True:
log.debug( 'Java jmx client started' )
self._jmxClient.restartEnabled = True
deferred = defer.succeed( True )
# If the result[0] is 10, there was a port conflict
elif result[0] == 10:
log.debug( 'Java client didn\'t start; port %s occupied',
self._rpcPort )
if self._rpcPort < ( self._startingPort +
self._attemptedPortRange ):
self._rpcPort += 1
deferred = self._tryClientOnCurrentPort()
deferred.addCallback( self._startJavaProc )
else:
raise RuntimeError(
"ZenJMXJavaClient could not be started, check ports")
else:
#unknown error
raise RuntimeError('ZenJMXJavaClient could not be started, '+\
'check JVM type and version: %s' % result[1])
# If there was no result passed in, then this is the first attempt
# to start the client
else:
deferred = self._tryClientOnCurrentPort()
deferred.addCallback( self._startJavaProc )
return deferred
class ZenJMXTask(ObservableMixin):
"""
The scheduled task for all the jmx datasources on an individual device.
"""
zope.interface.implements(IScheduledTask)
def __init__(self,
deviceId,
taskName,
scheduleIntervalSeconds,
taskConfig,
clientName=DEFAULT_JMX_JAVA_CLIENT_NAME ):
super( ZenJMXTask, self ).__init__()
self.name = taskName
self.configId = deviceId
self.state = TaskStates.STATE_IDLE
self._taskConfig = taskConfig
self._manageIp = self._taskConfig.manageIp
self._dataService = zope.component.queryUtility( IDataService )
self._eventService = zope.component.queryUtility( IEventService )
self._preferences = zope.component.queryUtility( ICollectorPreferences,
'zenjmx' )
self._client = zope.component.queryUtility( IZenJMXJavaClient,
clientName )
# At this time, do not use the interval passed from the device
# configuration. Use the value pulled from the daemon
# configuration.
unused( scheduleIntervalSeconds )
self.interval = self._preferences.options.cycleInterval
def createEvent(self, errorMap, component=None):
"""
Given an event dictionary, copy it and return the event
@param errorMap: errorMap
@type errorMap: s dictionarytring
@param component: component name
@type component: string
@return: updated event
@rtype: dictionary
"""
event = errorMap.copy()
if component:
event['component'] = component
if event.get('datasourceId') and not event.get('eventKey'):
event['eventKey'] = event.get('datasourceId')
return event
def sendEvent(self, event, **kw):
self._eventService.sendEvent(event, **kw)
def _collectJMX(self, dsConfigList):
"""
Call Java JMX process to collect JMX values
@param dsConfigList: DataSource configuration
@type dsConfigList: list of JMXDataSourceConfig
@return: Twisted deferred object
@rtype: Twisted deferred object
"""
def toDict(config):
"""
Marshall the fields from the datasource into a dictionary and
ignore everything that is not a primitive
@param config: dictionary of results
@type config: string
@return: results from remote device
@rtype: dictionary
"""
vals = {}
for (key, val) in config.__dict__.items():
if key != 'rrdConfig' and type(val)\
in XmlRpcService.PRIMITIVES:
vals[key] = val
rrdConfigs = config.rrdConfig.values()
rrdConfigs.sort(lambda x, y: cmp(x.dataPointId,
y.dataPointId))
vals['dps'] = []
vals['dptypes'] = []
for rrdConfig in rrdConfigs:
vals['dps'].append(rrdConfig.dataPointId)
vals['dptypes'].append(rrdConfig.rrdType)
vals['connectionKey'] = config.getConnectionPropsKey()
return vals
def rpcCall():
"""
Communicate with our local JMX process to collect results.
This is a generator function
@param driver: generator
@type driver: string
"""
port = self._client.listenPort
xmlRpcProxy = xmlrpc.Proxy('http://localhost:%s/' % port)
d = xmlRpcProxy.callRemote('zenjmx.collect', configMaps)
d.addCallbacks( processResults , processRpcError)
return d
def processRpcError(error):
log.debug("Could not make XML RPC call for device %s; content of call: %s", self._taskConfig, configMaps)
self.sendEvent({}, severity=Event.Error,
eventClass='/Status/JMX',
summary='unexpected error: %s' % error.getErrorMessage(),
eventKey='unexpected_xmlrpc_error',
device=self.configId)
return error
def processResults(jmxResults):
"""
Given the results from JMX, store them or send events.
@param jmxResults: jmxResults
@type jmxResults: string
"""
#Send clear for RPC error
self.sendEvent({}, severity=Event.Clear,
eventClass='/Status/JMX',
summary='unexpected error cleared',
eventKey='unexpected_xmlrpc_error',
device=self.configId)
result = {}
hasConnectionError = False
hasUnexpectedError = False
for result in jmxResults:
log.debug("JMX result -> %s", result)
evtSummary = result.get('summary')
deviceId = result.get('device')
evt = self.createEvent(result)
if not evtSummary:
rrdPath = result.get('rrdPath')
dsId = result.get('datasourceId')
dpId = result.get('dpId')
value = result.get('value')
try:
self.storeRRD(deviceId, rrdPath, dsId, dpId, value)
except ValueError:
pass
self.sendEvent(evt,summary="Clear",severity=Event.Clear)
else:
# send event
log.debug('processResults(): '
+ 'jmx error, sending event for %s'
% result)
if evt.get("eventClass", "") == '/Status/JMX/Connection':
hasConnectionError = True
if evt.get("eventKey", "") == 'unexpected_error':
hasUnexpectedError = True
self.sendEvent(evt, severity=Event.Error)
if not hasConnectionError:
self.sendEvent({}, severity=Event.Clear,
eventClass='/Status/JMX/Connection',
summary='Connection is up',
eventKey=connectionComponentKey,
device=self.configId)
if not hasUnexpectedError:
self.sendEvent({}, severity=Event.Clear,
eventClass='/Status/JMX',
summary='Unexpected error cleared',
eventKey='unexpected_error',
device=self.configId)
return jmxResults
connectionComponentKey = ''
configMaps = []
for config in dsConfigList:
connectionComponentKey = config.getConnectionPropsKey()
configMaps.append(toDict(config))
log.info('collectJMX(): for %s %s' % (config.device,
connectionComponentKey))
return rpcCall()
def storeRRD(
self,
deviceId,
rrdPath,
dataSourceId,
dataPointId,
dpValue,
):
"""
Store a value into an RRD file
@param deviceId: name of the remote device
@type deviceId: string
@param dataSourceId: name of the data source
@type dataSourceId: string
@param dataPointId: name of the data point
@type dataPointId: string
@param dpValue: dpValue
@type dpValue: number
"""
deviceConfig = self._taskConfig
dsConfig = deviceConfig.findDataSource(dataSourceId)
if not dsConfig:
log.info(
'No data source config found for device %s datasource %s' \
% (deviceId, dataSourceId))
return
rrdConf = dsConfig.rrdConfig.get(dataPointId)
type = rrdConf.rrdType
if(type in ('COUNTER', 'DERIVE')):
try:
# cast to float first because long('100.0') will fail with a
# ValueError
dpValue = long(float(dpValue))
except (TypeError, ValueError):
log.warning("value %s not valid for derive or counter data points", dpValue)
else:
try:
dpValue = float(dpValue)
except (TypeError, ValueError):
log.warning("value %s not valid for data point", dpValue)
if not rrdConf:
log.info(
'No RRD config found for device %s datasource %s datapoint %s' \
% (deviceId, dataSourceId, dataPointId))
return
dpPath = '/'.join((rrdPath, rrdConf.dpName))
min = rrdConf.min
max = rrdConf.max
self._dataService.writeRRD(dpPath, dpValue, rrdConf.rrdType,
rrdConf.command, min=min, max=max)
def _finished(self, results):
for result in results:
log.debug("Finished with result %s" % str( result ) )
return results
def doTask(self):
log.debug("Scanning device %s [%s]", self.configId, self._manageIp)
d = self._collectCallback()
d.addBoth(self._finished)
# returning a Deferred will keep the framework from assuming the task
# is done until the Deferred actually completes
return d
def _collectCallback(self):
jobs = NJobs(self._preferences.options.parallel,
self._collectJMX,
self._taskConfig.jmxDataSourceConfigs.values())
deferred = jobs.start()
return deferred
def cleanup(self):
pass
def stopJavaJmxClients():
# Currently only starting/stopping one.
clientName = DEFAULT_JMX_JAVA_CLIENT_NAME
client = zope.component.queryUtility( IZenJMXJavaClient,
clientName )
if client is not None:
log.debug( 'Shutting down JMX Java client %s' % clientName )
client.stop()
if __name__ == '__main__':
myPreferences = ZenJMXPreferences()
initialization = ZenJMXJavaClientInitialization()
myTaskFactory = SimpleTaskFactory(ZenJMXTask)
myTaskSplitter = SimpleTaskSplitter(myTaskFactory)
daemon = CollectorDaemon(myPreferences, myTaskSplitter,
initializationCallback=initialization.initialize,
stoppingCallback=stopJavaJmxClients)
daemon.run()
| krull/docker-zenoss4 | init_fs/usr/local/zenoss/ZenPacks/ZenPacks.zenoss.ZenJMX-3.12.1.egg/ZenPacks/zenoss/ZenJMX/zenjmx.py | Python | gpl-3.0 | 25,284 |
import io
import json
import os
import subprocess
import sys
import unittest
from os.path import join, abspath, dirname
sys.path.append('..')
from python_driver import __version__, get_processor_instance
from python_driver.requestprocessor import (
Request, Response, RequestProcessorJSON, InBuffer, EmptyCodeException)
from typing import Dict, Any, List, AnyStr, Optional, Iterator, cast
CURDIR = abspath(dirname(__file__))
# Disabled until I update the new module with typing
# class TestTypeCheck(unittest.TestCase):
# def test_10_check(self) -> None:
# prevdir = os.getcwd()
# try:
# os.chdir(dirname(CURDIR))
# srcdir = abspath(join(dirname(CURDIR), 'python_driver', '*'))
# self.assertEqual(subprocess.call(['test/typecheck.sh', srcdir], shell=True), 0)
# finally:
# os.chdir(prevdir)
class TestPythonDriverBase(unittest.TestCase):
def _restart_data(self, format_: str='json') -> None:
assert format_ == 'json'
with open(join(CURDIR, 'data', 'helloworld.py')) as f:
testcode = f.read()
self.data = Request({
'filepath': 'test.py',
'action': 'ParseAST',
'content': testcode,
'language': 'python',
})
bufferclass = io.StringIO if format_ == 'json' else io.BytesIO
# This will mock the python_driver stdin
self.sendbuffer = bufferclass()
# This will mock the python_driver stdout
self.recvbuffer = bufferclass()
@staticmethod
def _extract_docs(inbuffer: InBuffer) -> Iterator[Response]:
"""
This generator will read the inbuffer yielding the JSON
docs when it finds the ending mark
"""
line: str
for line in inbuffer.readlines():
yield json.loads(line)
def _loadResults(self, format_: str) -> List[Response]:
"""Read all msgs from the recvbuffer"""
self.recvbuffer.seek(0)
res: List[Response] = []
res = [doc for doc in self._extract_docs(self.recvbuffer)]
return res
class Test10ProcessRequestFunc(TestPythonDriverBase):
def _add_to_buffer(self, count: int, format_: str) -> None:
"""Add count test msgs to the sendbuffer"""
for i in range(count):
msg = ''
msg = json.dumps(self.data, ensure_ascii=False) + '\n'
self.sendbuffer.write(msg)
self.sendbuffer.flush()
def _send_receive(self, nummsgs: int, outformat: str='json',
dataupdate: Optional[Dict[AnyStr, Any]]=None,
restart_data: bool=True) -> List[Response]:
if restart_data:
self._restart_data(outformat)
if dataupdate:
self.data.update(dataupdate)
self._add_to_buffer(nummsgs, outformat)
self.sendbuffer.seek(0)
processor, _ = get_processor_instance(
outformat,
custom_outbuffer=self.recvbuffer,
custom_inbuffer=self.sendbuffer
)
processor.process_requests(self.sendbuffer)
return self._loadResults(outformat)
def _check_reply_dict(self, response: Response, has_errors: bool=False) -> None:
self.assertIsInstance(response, dict)
status = response.get('status')
if has_errors:
assert status in ('error', 'fatal')
errors = response.get('errors', list)
self.assertIsInstance(errors, list)
self.assertGreater(len(errors), 0)
else:
self.assertEqual(status, 'ok')
self._check_AST_dict(response)
language_version = response['metadata'].get('language_version', -1)
assert str(language_version) in ('2', '3')
def _check_AST_dict(self, response: Response) -> None:
self.assertIsNotNone(response)
assert 'ast' in response
self.assertIsInstance(response['ast'], dict)
root_key = list(response['ast'].keys())[0]
assert root_key
for key in ('ast_type', 'body'):
assert key in response['ast'][root_key]
self.assertIsInstance(response['ast'][root_key]['body'], list)
for item in response['ast'][root_key]['body']:
for key in ('ast_type', 'lineno', 'col_offset'):
assert key in item
def test_010_normal_json(self) -> None:
replies = self._send_receive(1, 'json')
self.assertEqual(len(replies), 1)
self._check_reply_dict(replies[0])
def test_020_normal_json_many(self) -> None:
replies = self._send_receive(100, 'json')
self.assertEqual(len(replies), 100)
for reply in replies:
self._check_reply_dict(reply)
def test_030_error_print(self) -> None:
wrongcode = 'wtf lol'
replies = self._send_receive(1, 'json', {'content': wrongcode})
self.assertEqual(len(replies), 1)
ast = replies[0].get('ast')
self.assertIsNone(ast)
self._check_reply_dict(replies[0], has_errors=True)
# Check that it still alive
self._restart_data()
replies = self._send_receive(1, 'json')
self.assertEqual(len(replies), 1)
def test_040_broken_json(self) -> None:
self._restart_data('json')
brokendata = json.dumps(self.data, ensure_ascii=False)[:-30]
self.sendbuffer.write(brokendata)
self.sendbuffer.flush()
reply = self._send_receive(1, 'json', restart_data=False)[0]
self.assertEqual(reply['status'], 'fatal')
self.assertEqual(len(reply['errors']), 1)
class Test20ReqProcMethods(TestPythonDriverBase):
def test_10_send_response_json(self) -> None:
self._restart_data('json')
processor = RequestProcessorJSON(self.recvbuffer)
processor._send_response(cast(Response, self.data))
res = self._loadResults('json')
self.assertEqual(len(res), 1)
self.assertDictEqual(self.data, res[0])
# process request already tested with TestPythonDriverBase
def test_20_return_error(self) -> None:
self._restart_data('json')
processor = RequestProcessorJSON(self.recvbuffer)
processor.errors = ['test error']
processor._return_error('test.py', 'fatal')
res = self._loadResults('json')
self.assertEqual(len(res), 1)
self.assertDictEqual(res[0] , {'driver': 'python23:%s' % __version__,
'errors': ['test error'],
'filepath': 'test.py',
'ast': None,
'status': 'fatal'})
if __name__ == '__main__':
unittest.main()
| juanjux/python-driver | native/python_package/test/test_python_driver.py | Python | gpl-3.0 | 6,751 |
# coding=utf-8
"""g[ravity] class."""
from foamfile import FoamFile, foam_file_from_file
from collections import OrderedDict
class G(FoamFile):
"""G (gravity) class."""
# set default valus for this class
__default_values = OrderedDict()
__default_values['dimensions'] = '[0 1 -2 0 0 0 0]'
__default_values['#include'] = None
__default_values['value'] = '(0 0 -9.81)'
def __init__(self, values=None):
"""Init class."""
FoamFile.__init__(self, name='g',
cls='uniformDimensionedVectorField',
location='constant',
default_values=self.__default_values,
values=values)
@classmethod
def from_file(cls, filepath):
"""Create a FoamFile from a file.
Args:
filepath: Full file path to dictionary.
"""
return cls(values=foam_file_from_file(filepath, cls.__name__))
@property
def dimensions(self):
return self.values['dimensions']
@property
def value(self):
"""Gravity vector."""
return eval(self.values['value'].replace(' ', ','))
@value.setter
def value(self, vec):
"""Set gravity vector."""
assert len(vec) == 3, \
ValueError('Gravity vector must be a tuple with 3 values.')
self.values['value'] = '({})'.format(' '.join((str(v) for v in vec)))
| ladybug-analysis-tools/butterfly | butterfly/g.py | Python | gpl-3.0 | 1,428 |
"""
$Id: Base.py,v 1.12.2.10 2008/08/01 03:58:03 customdesigned Exp $
This file is part of the pydns project.
Homepage: http://pydns.sourceforge.net
This code is covered by the standard Python License.
Base functionality. Request and Response classes, that sort of thing.
"""
import socket, string, types, time, select
import Type,Class,Opcode
import asyncore
#
# This random generator is used for transaction ids and port selection. This
# is important to prevent spurious results from lost packets, and malicious
# cache poisoning. This doesn't matter if you are behind a caching nameserver
# or your app is a primary DNS server only. To install your own generator,
# replace DNS.Base.random. SystemRandom uses /dev/urandom or similar source.
#
try:
from random import SystemRandom
random = SystemRandom()
except:
import random
class DNSError(Exception): pass
# Lib uses DNSError, so import after defining.
import Lib
defaults= { 'protocol':'udp', 'port':53, 'opcode':Opcode.QUERY,
'qtype':Type.A, 'rd':1, 'timing':1, 'timeout': 30 }
defaults['server']=[]
def ParseResolvConf(resolv_path="/etc/resolv.conf"):
"parses the /etc/resolv.conf file and sets defaults for name servers"
global defaults
lines=open(resolv_path).readlines()
for line in lines:
line = string.strip(line)
if not line or line[0]==';' or line[0]=='#':
continue
fields=string.split(line)
if len(fields) < 2:
continue
if fields[0]=='domain' and len(fields) > 1:
defaults['domain']=fields[1]
if fields[0]=='search':
pass
if fields[0]=='options':
pass
if fields[0]=='sortlist':
pass
if fields[0]=='nameserver':
if fields[1].count(':'):
""" Ignore IPv6 nameservers as we currently do not support querying them. """
pass
else:
defaults['server'].append(fields[1])
def DiscoverNameServers():
import sys
if sys.platform in ('win32', 'nt'):
import win32dns
defaults['server']=win32dns.RegistryResolve()
else:
return ParseResolvConf()
class DnsRequest:
""" high level Request object """
def __init__(self,*name,**args):
self.donefunc=None
self.async=None
self.defaults = {}
self.argparse(name,args)
self.defaults = self.args
self.tid = 0
def argparse(self,name,args):
if not name and self.defaults.has_key('name'):
args['name'] = self.defaults['name']
if type(name) is types.StringType:
args['name']=name
else:
if len(name) == 1:
if name[0]:
args['name']=name[0]
for i in defaults.keys():
if not args.has_key(i):
if self.defaults.has_key(i):
args[i]=self.defaults[i]
else:
args[i]=defaults[i]
if type(args['server']) == types.StringType:
args['server'] = [args['server']]
self.args=args
def socketInit(self,a,b):
self.s = socket.socket(a,b)
def processUDPReply(self):
if self.timeout > 0:
r,w,e = select.select([self.s],[],[],self.timeout)
if not len(r):
raise DNSError, 'Timeout'
(self.reply, self.from_address) = self.s.recvfrom(65535)
self.time_finish=time.time()
self.args['server']=self.ns
return self.processReply()
def processTCPReply(self):
if self.timeout > 0:
r,w,e = select.select([self.s],[],[],self.timeout)
if not len(r):
raise DNSError, 'Timeout'
f = self.s.makefile('r')
header = f.read(2)
if len(header) < 2:
raise DNSError,'EOF'
count = Lib.unpack16bit(header)
self.reply = f.read(count)
if len(self.reply) != count:
# FIXME: Since we are non-blocking, it could just be a large reply
# that we need to loop and wait for.
raise DNSError,'incomplete reply'
self.time_finish=time.time()
self.args['server']=self.ns
return self.processReply()
def processReply(self):
self.args['elapsed']=(self.time_finish-self.time_start)*1000
u = Lib.Munpacker(self.reply)
r=Lib.DnsResult(u,self.args)
r.args=self.args
#self.args=None # mark this DnsRequest object as used.
return r
#### TODO TODO TODO ####
# if protocol == 'tcp' and qtype == Type.AXFR:
# while 1:
# header = f.read(2)
# if len(header) < 2:
# print '========== EOF =========='
# break
# count = Lib.unpack16bit(header)
# if not count:
# print '========== ZERO COUNT =========='
# break
# print '========== NEXT =========='
# reply = f.read(count)
# if len(reply) != count:
# print '*** Incomplete reply ***'
# break
# u = Lib.Munpacker(reply)
# Lib.dumpM(u)
def getSource(self):
"Pick random source port to avoid DNS cache poisoning attack."
while True:
try:
source_port = random.randint(1024,65535)
self.s.bind(('', source_port))
break
except socket.error, msg:
# Error 98, 'Address already in use'
if msg[0] != 98: raise
def conn(self):
self.getSource()
self.s.connect((self.ns,self.port))
def req(self,*name,**args):
" needs a refactoring "
self.argparse(name,args)
#if not self.args:
# raise DNSError,'reinitialize request before reuse'
protocol = self.args['protocol']
self.port = self.args['port']
self.tid = random.randint(0,65535)
self.timeout = self.args['timeout'];
opcode = self.args['opcode']
rd = self.args['rd']
server=self.args['server']
if type(self.args['qtype']) == types.StringType:
try:
qtype = getattr(Type, string.upper(self.args['qtype']))
except AttributeError:
raise DNSError,'unknown query type'
else:
qtype=self.args['qtype']
if not self.args.has_key('name'):
print self.args
raise DNSError,'nothing to lookup'
qname = self.args['name']
if qtype == Type.AXFR:
print 'Query type AXFR, protocol forced to TCP'
protocol = 'tcp'
#print 'QTYPE %d(%s)' % (qtype, Type.typestr(qtype))
m = Lib.Mpacker()
# jesus. keywords and default args would be good. TODO.
m.addHeader(self.tid,
0, opcode, 0, 0, rd, 0, 0, 0,
1, 0, 0, 0)
m.addQuestion(qname, qtype, Class.IN)
self.request = m.getbuf()
try:
if protocol == 'udp':
self.sendUDPRequest(server)
else:
self.sendTCPRequest(server)
except socket.error, reason:
raise DNSError, reason
if self.async:
return None
else:
if not self.response:
raise DNSError,'no working nameservers found'
return self.response
def sendUDPRequest(self, server):
"refactor me"
self.response=None
for self.ns in server:
#print "trying udp",self.ns
try:
if self.ns.count(':'):
if hasattr(socket,'has_ipv6') and socket.has_ipv6:
self.socketInit(socket.AF_INET6, socket.SOCK_DGRAM)
else: continue
else:
self.socketInit(socket.AF_INET, socket.SOCK_DGRAM)
try:
# TODO. Handle timeouts &c correctly (RFC)
self.time_start=time.time()
self.conn()
if not self.async:
self.s.send(self.request)
r=self.processUDPReply()
# Since we bind to the source port and connect to the
# destination port, we don't need to check that here,
# but do make sure it's actually a DNS request that the
# packet is in reply to.
while r.header['id'] != self.tid \
or self.from_address[1] != self.port:
r=self.processUDPReply()
self.response = r
# FIXME: check waiting async queries
finally:
if not self.async:
self.s.close()
except socket.error:
continue
break
def sendTCPRequest(self, server):
" do the work of sending a TCP request "
self.response=None
for self.ns in server:
#print "trying tcp",self.ns
try:
if self.ns.count(':'):
if hasattr(socket,'has_ipv6') and socket.has_ipv6:
self.socketInit(socket.AF_INET6, socket.SOCK_STREAM)
else: continue
else:
self.socketInit(socket.AF_INET, socket.SOCK_STREAM)
try:
# TODO. Handle timeouts &c correctly (RFC)
self.time_start=time.time()
self.conn()
buf = Lib.pack16bit(len(self.request))+self.request
# Keep server from making sendall hang
self.s.setblocking(0)
# FIXME: throws WOULDBLOCK if request too large to fit in
# system buffer
self.s.sendall(buf)
self.s.shutdown(socket.SHUT_WR)
r=self.processTCPReply()
if r.header['id'] == self.tid:
self.response = r
break
finally:
self.s.close()
except socket.error:
continue
#class DnsAsyncRequest(DnsRequest):
class DnsAsyncRequest(DnsRequest,asyncore.dispatcher_with_send):
" an asynchronous request object. out of date, probably broken "
def __init__(self,*name,**args):
DnsRequest.__init__(self, *name, **args)
# XXX todo
if args.has_key('done') and args['done']:
self.donefunc=args['done']
else:
self.donefunc=self.showResult
#self.realinit(name,args) # XXX todo
self.async=1
def conn(self):
self.getSource()
self.connect((self.ns,self.port))
self.time_start=time.time()
if self.args.has_key('start') and self.args['start']:
asyncore.dispatcher.go(self)
def socketInit(self,a,b):
self.create_socket(a,b)
asyncore.dispatcher.__init__(self)
self.s=self
def handle_read(self):
if self.args['protocol'] == 'udp':
self.response=self.processUDPReply()
if self.donefunc:
apply(self.donefunc,(self,))
def handle_connect(self):
self.send(self.request)
def handle_write(self):
pass
def showResult(self,*s):
self.response.show()
#
# $Log: Base.py,v $
# Revision 1.12.2.10 2008/08/01 03:58:03 customdesigned
# Don't try to close socket when never opened.
#
# Revision 1.12.2.9 2008/08/01 03:48:31 customdesigned
# Fix more breakage from port randomization patch. Support Ipv6 queries.
#
# Revision 1.12.2.8 2008/07/31 18:22:59 customdesigned
# Wait until tcp response at least starts coming in.
#
# Revision 1.12.2.7 2008/07/28 01:27:00 customdesigned
# Check configured port.
#
# Revision 1.12.2.6 2008/07/28 00:17:10 customdesigned
# Randomize source ports.
#
# Revision 1.12.2.5 2008/07/24 20:10:55 customdesigned
# Randomize tid in requests, and check in response.
#
# Revision 1.12.2.4 2007/05/22 20:28:31 customdesigned
# Missing import Lib
#
# Revision 1.12.2.3 2007/05/22 20:25:52 customdesigned
# Use socket.inetntoa,inetaton.
#
# Revision 1.12.2.2 2007/05/22 20:21:46 customdesigned
# Trap socket error
#
# Revision 1.12.2.1 2007/05/22 20:19:35 customdesigned
# Skip bogus but non-empty lines in resolv.conf
#
# Revision 1.12 2002/04/23 06:04:27 anthonybaxter
# attempt to refactor the DNSRequest.req method a little. after doing a bit
# of this, I've decided to bite the bullet and just rewrite the puppy. will
# be checkin in some design notes, then unit tests and then writing the sod.
#
# Revision 1.11 2002/03/19 13:05:02 anthonybaxter
# converted to class based exceptions (there goes the python1.4 compatibility :)
#
# removed a quite gross use of 'eval()'.
#
# Revision 1.10 2002/03/19 12:41:33 anthonybaxter
# tabnannied and reindented everything. 4 space indent, no tabs.
# yay.
#
# Revision 1.9 2002/03/19 12:26:13 anthonybaxter
# death to leading tabs.
#
# Revision 1.8 2002/03/19 10:30:33 anthonybaxter
# first round of major bits and pieces. The major stuff here (summarised
# from my local, off-net CVS server :/ this will cause some oddities with
# the
#
# tests/testPackers.py:
# a large slab of unit tests for the packer and unpacker code in DNS.Lib
#
# DNS/Lib.py:
# placeholder for addSRV.
# added 'klass' to addA, make it the same as the other A* records.
# made addTXT check for being passed a string, turn it into a length 1 list.
# explicitly check for adding a string of length > 255 (prohibited).
# a bunch of cleanups from a first pass with pychecker
# new code for pack/unpack. the bitwise stuff uses struct, for a smallish
# (disappointly small, actually) improvement, while addr2bin is much
# much faster now.
#
# DNS/Base.py:
# added DiscoverNameServers. This automatically does the right thing
# on unix/ win32. No idea how MacOS handles this. *sigh*
# Incompatible change: Don't use ParseResolvConf on non-unix, use this
# function, instead!
# a bunch of cleanups from a first pass with pychecker
#
# Revision 1.5 2001/08/09 09:22:28 anthonybaxter
# added what I hope is win32 resolver lookup support. I'll need to try
# and figure out how to get the CVS checkout onto my windows machine to
# make sure it works (wow, doing something other than games on the
# windows machine :)
#
# Code from Wolfgang.Strobl@gmd.de
# win32dns.py from
# http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/66260
#
# Really, ParseResolvConf() should be renamed "FindNameServers" or
# some such.
#
# Revision 1.4 2001/08/09 09:08:55 anthonybaxter
# added identifying header to top of each file
#
# Revision 1.3 2001/07/19 07:20:12 anthony
# Handle blank resolv.conf lines.
# Patch from Bastian Kleineidam
#
# Revision 1.2 2001/07/19 06:57:07 anthony
# cvs keywords added
#
#
| disabler/isida3 | lib/DNS/Base.py | Python | gpl-3.0 | 12,716 |
import IMP
import IMP.test
import IMP.atom
import IMP.core
class Tests(IMP.test.TestCase):
def _produce_point_sets(self, tr):
vs = []
vsr = []
for i in range(0, 20):
vs.append(IMP.algebra.get_random_vector_in(
IMP.algebra.get_unit_bounding_box_3d()))
vsr.append(tr.get_transformed(vs[-1]))
return (vs, vsr)
def test_alignment_selection(self):
"""Testing rigid alignment of point sets"""
m=IMP.Model()
r = IMP.algebra.get_random_rotation_3d()
t = IMP.algebra.get_random_vector_in(
IMP.algebra.get_unit_bounding_box_3d())
tr = IMP.algebra.Transformation3D(r, t)
(vs, vsr) = self._produce_point_sets(tr)
hroot1=IMP.atom.Hierarchy(IMP.Particle(m))
hroot2=IMP.atom.Hierarchy(IMP.Particle(m))
for v in vs:
p=IMP.Particle(m)
d=IMP.core.XYZR.setup_particle(p)
d.set_coordinates(v)
d.set_radius(1.0)
IMP.atom.Mass.setup_particle(p,1.0)
hroot1.add_child(p)
for v in vsr:
p=IMP.Particle(m)
d=IMP.core.XYZR.setup_particle(p)
d.set_coordinates(v)
d.set_radius(1.0)
IMP.atom.Mass.setup_particle(p,1.0)
hroot2.add_child(p)
sel1=IMP.atom.Selection(hroot1)
sel2=IMP.atom.Selection(hroot2)
tr = IMP.atom.get_transformation_aligning_first_to_second(sel1, sel2)
self.assertAlmostEqual(IMP.algebra.get_distance(tr.get_rotation(), r),
0, delta=.1)
self.assertAlmostEqual(IMP.algebra.get_distance(tr.get_translation(),
t),
0, delta=.1)
if __name__ == '__main__':
IMP.test.main()
| shanot/imp | modules/atom/test/test_alignment.py | Python | gpl-3.0 | 1,846 |
# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
#
# Copyright (C) 2015-2017 Canonical Ltd
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import logging
import os
import shutil
import fixtures
from unittest import mock
from testtools.matchers import FileExists, MatchesRegex, Not
from snapcraft.main import main
from snapcraft.internal import (
pluginhandler,
project_loader,
states,
)
from snapcraft import tests
class CleanCommandTestCase(tests.TestCase):
yaml_template = """name: clean-test
version: 1.0
summary: test clean
description: if the clean is succesful the state file will be updated
icon: icon.png
confinement: strict
grade: stable
parts:
{parts}"""
yaml_part = """ clean{:d}:
plugin: nil"""
def make_snapcraft_yaml(self, n=1, create=True):
parts = '\n'.join([self.yaml_part.format(i) for i in range(n)])
super().make_snapcraft_yaml(self.yaml_template.format(parts=parts))
open('icon.png', 'w').close()
parts = []
validator = project_loader.Validator()
for i in range(n):
part_name = 'clean{}'.format(i)
handler = pluginhandler.load_plugin(
part_name, plugin_name='nil',
part_properties={'plugin': 'nil'},
part_schema=validator.part_schema,
definitions_schema=validator.definitions_schema)
parts.append({
'part_dir': handler.code.partdir,
})
if create:
handler.makedirs()
open(os.path.join(
handler.code.installdir, part_name), 'w').close()
handler.mark_done('pull')
handler.mark_done('build')
handler.stage()
handler.prime()
return parts
def test_clean_all(self):
self.make_snapcraft_yaml(n=3)
main(['clean'])
self.assertFalse(os.path.exists(self.parts_dir))
self.assertFalse(os.path.exists(self.stage_dir))
self.assertFalse(os.path.exists(self.prime_dir))
def test_local_plugin_not_removed(self):
self.make_snapcraft_yaml(n=3)
local_plugin = os.path.join(self.local_plugins_dir, 'foo.py')
os.makedirs(os.path.dirname(local_plugin))
open(local_plugin, 'w').close()
main(['clean'])
self.assertThat(self.stage_dir, Not(FileExists()))
self.assertThat(self.prime_dir, Not(FileExists()))
self.assertThat(self.parts_dir, Not(FileExists()))
self.assertThat(local_plugin, FileExists())
def test_clean_all_when_all_parts_specified(self):
self.make_snapcraft_yaml(n=3)
main(['clean', 'clean0', 'clean1', 'clean2'])
self.assertFalse(os.path.exists(self.parts_dir))
self.assertFalse(os.path.exists(self.stage_dir))
self.assertFalse(os.path.exists(self.prime_dir))
def test_partial_clean(self):
parts = self.make_snapcraft_yaml(n=3)
main(['clean', 'clean0', 'clean2'])
for i in [0, 2]:
self.assertFalse(
os.path.exists(parts[i]['part_dir']),
'Expected for {!r} to be wiped'.format(parts[i]['part_dir']))
self.assertTrue(os.path.exists(parts[1]['part_dir']),
'Expected a part directory for the clean1 part')
self.assertTrue(os.path.exists(self.parts_dir))
self.assertTrue(os.path.exists(self.stage_dir))
self.assertTrue(os.path.exists(self.prime_dir))
# Now clean it the rest of the way
main(['clean', 'clean1'])
for i in range(0, 3):
self.assertFalse(
os.path.exists(parts[i]['part_dir']),
'Expected for {!r} to be wiped'.format(parts[i]['part_dir']))
self.assertFalse(os.path.exists(self.parts_dir))
self.assertFalse(os.path.exists(self.stage_dir))
self.assertFalse(os.path.exists(self.prime_dir))
def test_everything_is_clean(self):
"""Don't crash if everything is already clean."""
self.make_snapcraft_yaml(n=3, create=False)
main(['clean'])
def test_part_to_remove_not_defined_exits_with_error(self):
fake_logger = fixtures.FakeLogger(level=logging.ERROR)
self.useFixture(fake_logger)
self.make_snapcraft_yaml(n=3)
raised = self.assertRaises(
SystemExit,
main, ['clean', 'no-clean'])
self.assertEqual(1, raised.code)
self.assertEqual(
fake_logger.output,
"The part named 'no-clean' is not defined in "
"'snap/snapcraft.yaml'\n")
@mock.patch.object(pluginhandler.PluginHandler, 'clean')
def test_per_step_cleaning(self, mock_clean):
self.make_snapcraft_yaml(n=3)
main(['clean', '--step=foo'])
expected_staged_state = {
'clean0': states.StageState({'clean0'}, set()),
'clean1': states.StageState({'clean1'}, set()),
'clean2': states.StageState({'clean2'}, set()),
}
expected_primed_state = {
'clean0': states.PrimeState({'clean0'}, set()),
'clean1': states.PrimeState({'clean1'}, set()),
'clean2': states.PrimeState({'clean2'}, set()),
}
mock_clean.assert_called_with(
expected_staged_state, expected_primed_state, 'foo')
def test_cleaning_with_strip_does_prime_and_warns(self):
fake_logger = fixtures.FakeLogger(level=logging.WARNING)
self.useFixture(fake_logger)
self.make_snapcraft_yaml(n=3)
main(['clean', '--step=strip'])
self.assertThat(
fake_logger.output, MatchesRegex(
'DEPRECATED: Use `prime` instead of `strip` as the step to '
'clean'))
self.assertFalse(os.path.exists(self.prime_dir))
class CleanCommandReverseDependenciesTestCase(tests.TestCase):
def setUp(self):
super().setUp()
self.make_snapcraft_yaml("""name: clean-test
version: 1.0
summary: test clean
description: test clean
confinement: strict
grade: stable
parts:
main:
plugin: nil
dependent:
plugin: nil
after: [main]
nested-dependent:
plugin: nil
after: [dependent]""")
self.part_dirs = {}
for part in ['main', 'dependent', 'nested-dependent']:
self.part_dirs[part] = os.path.join(self.parts_dir, part)
os.makedirs(os.path.join(self.part_dirs[part], 'state'))
open(os.path.join(self.part_dirs[part], 'state', 'pull'),
'w').close()
os.makedirs(self.stage_dir)
os.makedirs(self.prime_dir)
def assert_clean(self, parts):
for part in parts:
self.assertFalse(
os.path.exists(self.part_dirs[part]),
'Expected part directory for {!r} to be cleaned'.format(part))
def test_clean_dependent_parts(self):
main(['clean', 'dependent', 'nested-dependent'])
self.assert_clean(['dependent', 'nested-dependent'])
self.assertTrue(
os.path.exists(self.part_dirs['main']),
'Expected part directory for main to be untouched by the clean')
def test_clean_part_with_clean_dependent(self):
main(['clean', 'nested-dependent'])
self.assert_clean(['nested-dependent'])
# Not specifying nested-dependent here should be okay since it's
# already clean.
main(['clean', 'dependent'])
self.assert_clean(['dependent', 'nested-dependent'])
def test_clean_part_unspecified_uncleaned_dependent_raises(self):
# Not specifying nested-dependent here should result in clean raising
# an exception, saying that it has dependents. Note the use of '-d',
# so we get a RuntimeError instead of SystemExit.
raised = self.assertRaises(
RuntimeError,
main, ['-d', 'clean', 'dependent'])
self.assertEqual(
str(raised),
"Requested clean of 'dependent' but 'nested-dependent' depends "
"upon it. Please add each to the clean command if that's what you "
"intended.")
def test_clean_nested_dependent_parts(self):
main(['clean', 'main', 'dependent', 'nested-dependent'])
self.assert_clean(['main', 'dependent', 'nested-dependent'])
def test_clean_part_with_clean_dependent_uncleaned_nested_dependent(self):
shutil.rmtree(self.part_dirs['dependent'])
self.assert_clean(['dependent'])
# Not specifying dependent here should be okay since it's already
# clean.
main(['clean', 'main', 'nested-dependent'])
self.assert_clean(['main', 'dependent', 'nested-dependent'])
def test_clean_part_with_clean_nested_dependent(self):
shutil.rmtree(self.part_dirs['nested-dependent'])
self.assert_clean(['nested-dependent'])
# Not specifying nested-dependent here should be okay since it's
# already clean.
main(['clean', 'main', 'dependent'])
self.assert_clean(['main', 'dependent', 'nested-dependent'])
def test_clean_part_unspecified_uncleaned_dependent_with_nest_raises(self):
# Not specifying dependent here should result in clean raising
# an exception, saying that it has dependents. Note the use of '-d',
# so we get a RuntimeError instead of SystemExit.
raised = self.assertRaises(
RuntimeError,
main, ['-d', 'clean', 'main'])
self.assertEqual(
str(raised),
"Requested clean of 'main' but 'dependent' depends upon it. "
"Please add each to the clean command if that's what you "
"intended.")
def test_clean_part_unspecified_uncleaned_nested_dependent_raises(self):
# Not specifying nested-dependent here should result in clean raising
# an exception, saying that it has dependents. Note the use of '-d',
# so we get a RuntimeError instead of SystemExit.
raised = self.assertRaises(
RuntimeError,
main, ['-d', 'clean', 'main', 'dependent'])
self.assertEqual(
str(raised),
"Requested clean of 'dependent' but 'nested-dependent' depends "
"upon it. Please add each to the clean command if that's what you "
"intended.")
| josepht/snapcraft | snapcraft/tests/commands/test_clean.py | Python | gpl-3.0 | 10,914 |
# CUPS Cloudprint - Print via Google Cloud Print
# Copyright (C) 2014 Simon Cadman
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import subprocess
import os
import logging
import sys
import grp
import base64
import fcntl
import termios
import struct
class Utils(object):
logpath = '/var/log/cups/cloudprint_log'
# Countries where letter sized paper is used, according to:
# http://en.wikipedia.org/wiki/Letter_(paper_size)
_LETTER_COUNTRIES = set(('US', 'CA', 'MX', 'BO', 'CO', 'VE', 'PH', 'CL'))
PROTOCOL_NAME = 'gcp'
GUI = False
PROTOCOL = PROTOCOL_NAME + '://'
OLD_PROTOCOL_NAME = 'cloudprint'
OLD_PROTOCOL = OLD_PROTOCOL_NAME + '://'
_MIMETYPES_JOBTYPES = {'pdf': 'application/pdf',
'other': 'application/octet-stream',
'jpg': 'image/jpeg',
'png': 'image/png'}
@staticmethod
def FixFilePermissions(filename):
filePermissions = True
fileOwnerships = True
currentStat = None
if os.path.exists(filename):
currentStat = os.stat(filename)
if currentStat is None or currentStat.st_mode != 0o100660:
try:
os.chmod(filename, 0o100660)
except Exception:
filePermissions = False
sys.stderr.write(
"DEBUG: Cannot alter " +
filename +
" file permissions\n")
if currentStat is None or currentStat.st_gid != Utils.GetLPID():
try:
os.chown(filename, -1, Utils.GetLPID())
except Exception:
fileOwnerships = False
sys.stderr.write(
"DEBUG: Cannot alter " +
filename +
" file ownership\n")
return filePermissions, fileOwnerships
@staticmethod
def SetupLogging(logpath=None):
returnValue = True
logformat = "%(asctime)s|%(levelname)s|%(message)s"
dateformat = "%Y-%m-%d %H:%M:%S"
if logpath is None:
logpath = Utils.logpath
try:
logging.basicConfig(
filename=logpath,
level=logging.INFO,
format=logformat,
datefmt=dateformat)
Utils.FixFilePermissions(logpath)
except Exception:
logging.basicConfig(
level=logging.INFO,
format=logformat,
datefmt=dateformat)
logging.error("Unable to write to log file " + logpath)
returnValue = False
return returnValue
@staticmethod
def fileIsPDF(filedata):
"""Check if a file is or isnt a PDF
Args:
filename: string, name of the file to check
Returns:
boolean: True = is a PDF, False = not a PDF.
"""
p = subprocess.Popen(["file", '-'], stdout=subprocess.PIPE, stdin=subprocess.PIPE)
output = p.communicate(filedata)[0]
logging.debug("File output was: " + output)
return "PDF document" in output
@staticmethod
def is_exe(fpath):
return os.path.exists(fpath) and os.access(fpath, os.X_OK)
@staticmethod
def which(program):
for path in os.environ["PATH"].split(os.pathsep):
exe_file = os.path.join(path, program)
if Utils.is_exe(exe_file):
return exe_file
return None
@staticmethod
def GetLPID(default='lp', alternative='cups', useFiles=True,
blacklistedGroups=None,
useFilesOnly=False):
if blacklistedGroups is None:
blacklistedGroups = ['adm', 'wheel', 'root']
blacklistedGroupIds = []
for group in blacklistedGroups:
try:
blacklistedGroupIds.append(grp.getgrnam(group).gr_gid)
except Exception:
logging.debug("Group " + group + " not found")
if useFiles:
# check files in order
for cupsConfigFile in ['/var/log/cups/access_log',
'/etc/cups/ppd',
'/usr/local/etc/cups/ppd']:
if os.path.exists(cupsConfigFile):
configGid = os.stat(cupsConfigFile).st_gid
if configGid not in blacklistedGroupIds:
return configGid
else:
logging.debug(
"Group " +
str(configGid) +
" excluded as blacklisted")
if useFilesOnly:
return None
# try lp first, then cups
lpgrp = None
try:
lpgrp = grp.getgrnam(default)
except Exception:
try:
lpgrp = grp.getgrnam(alternative)
except Exception:
pass
if lpgrp is None:
return None
else:
return lpgrp.gr_gid
@staticmethod
def ShowVersion(CCPVersion):
if len(sys.argv) == 2 and sys.argv[1] == 'version':
print "CUPS Cloud Print Version " + CCPVersion
sys.exit(0)
return False
@staticmethod
def ReadFile(pathname):
"""Read contents of a file and return content.
Args:
pathname: string, (path)name of file.
Returns:
string: contents of file.
"""
try:
f = open(pathname, 'rb')
s = f.read()
return s
except IOError as e:
print 'ERROR: Error opening %s\n%s', pathname, e
return None
@staticmethod
def WriteFile(file_name, data):
"""Write contents of data to a file_name.
Args:
file_name: string, (path)name of file.
data: string, contents to write to file.
Returns:
boolean: True = success, False = errors.
"""
status = True
try:
f = open(file_name, 'wb')
f.write(data)
f.close()
except IOError:
status = False
return status
@staticmethod
def Base64Encode(data, jobtype):
"""Convert a file to a base64 encoded file.
Args:
pathname: data to base64 encode
jobtype: job type being encoded - pdf, jpg etc
Returns:
string, base64 encoded string.
For more info on data urls, see:
http://en.wikipedia.org/wiki/Data_URI_scheme
"""
# Convert binary data to base64 encoded data.
mimetype = Utils._MIMETYPES_JOBTYPES['other']
if jobtype in Utils._MIMETYPES_JOBTYPES:
mimetype = Utils._MIMETYPES_JOBTYPES[jobtype]
header = 'data:%s;base64,' % mimetype
return header + base64.b64encode(data)
@staticmethod
def GetLanguage(locale, cupshelper=None):
newlocale = None
if cupshelper is not None:
newlocale = cupshelper.getServerSetting('DefaultLanguage')
if newlocale is None:
if len(locale) < 1 or locale[0] is None:
return ('en', 'en')
defaultlocale = locale[0]
newlocale = defaultlocale
language = newlocale
if '_' in newlocale:
language = newlocale.split("_")[0]
return (language, newlocale)
@staticmethod
def GetDefaultPaperType(locale):
defaultpapertype = "Letter"
if len(locale.split('_')) > 1 and \
locale.split('_')[1].upper() not in Utils._LETTER_COUNTRIES:
defaultpapertype = "A4"
return defaultpapertype
@staticmethod
def GetWindowSize(winsize=None):
"""Gets window height and width.
Gets window (aka terminal, console) height and width using IOCtl Get WINdow SiZe
method.
Returns:
The tuple (height, width) of the window as integers, or None if the
windows size isn't available.
"""
try:
structbytes = struct.pack('HHHH', 0, 0, 0, 0)
if winsize is None:
winsize = fcntl.ioctl(sys.stdout.fileno(), termios.TIOCGWINSZ, structbytes)
height, width = struct.unpack('HHHH', winsize)[:2]
except Exception:
return None
if height > 0 and width > 0:
return height, width
return None
@staticmethod
def StdInToTempFile(jobID, userName, stdin=None):
if stdin is None:
stdin = sys.stdin
tmpDir = os.getenv('TMPDIR')
if not tmpDir:
tmpDir = "/tmp"
tempFile = '%s/%s-%s-cupsjob-%s' % \
(tmpDir, jobID, userName, str(os.getpid()))
OUT = open(tempFile, 'w')
if not OUT:
logging.error("Cannot write temp file: %s", tempFile)
print "ERROR: Cannot write " + tempFile
sys.exit(1)
for line in stdin:
OUT.write(line)
OUT.close()
return tempFile
@staticmethod
def hasGUI():
return Utils.GUI
@staticmethod
def openBrowserWithUrl(url):
if not Utils.hasGUI():
return False
try:
if sys.platform == "darwin":
subprocess.Popen(['open', url])
else:
subprocess.Popen(['xdg-open', url])
except Exception:
return False
return True
| jjscarafia/CUPS-Cloud-Print | ccputils.py | Python | gpl-3.0 | 10,118 |
# -*- coding: utf-8 -*-
"""
Created on Mon Jun 20 12:28:32 2015
@author: boland
"""
import sys
sys.path.append('/home/boland/Anaconda/lib/python2.7/site-packages')
import pickle
import numpy as np
import matplotlib.pyplot as plt
from scipy.cluster.vq import kmeans
import multiprocessing as mp
import pyproj
import os
import itertools
import datetime
import pointshape as ps
from math import sqrt, atan2, radians,degrees, cos, tan, sin, asin
import random
import uuid
shape_path = "/home/boland/Dropbox/University/UniMelb/AGOS/PROGRAMS/ANT/Versions/26.04.2015/shapefiles/aus.shp"
N = 130
#enter km spacing between path density points
km_points = 20.0
# reference elipsoid to calculate distance
wgs84 = pyproj.Geod(ellps='WGS84')
nbins = 200
def haversine(coordinates):
"""
Calculate the great circle distance between two points
on the earth (specified in decimal degrees)
"""
lon1, lat1, lon2, lat2= coordinates[0],coordinates[1],\
coordinates[2],coordinates[3]
# convert decimal degrees to radians
lon1, lat1, lon2, lat2 = map(radians, [lon1, lat1, lon2, lat2])
# haversine formula
dlon, dlat = lon2 - lon1, lat2 - lat1
a = sin(dlat/2)**2 + cos(lat1) * cos(lat2) * sin(dlon/2)**2
c = 2 * asin(sqrt(a))
km = 6367 * c
return km
def haversine2(lon1, lat1, lon2, lat2):
"""
Calculate the great circle distance between two points
on the earth (specified in decimal degrees)
"""
# convert decimal degrees to radians
lon1, lat1, lon2, lat2 = map(radians, [lon1, lat1, lon2, lat2])
# haversine formula
dlon, dlat = lon2 - lon1, lat2 - lat1
a = sin(dlat/2)**2 + cos(lat1) * cos(lat2) * sin(dlon/2)**2
c = 2 * asin(sqrt(a))
km = 6367 * c
return km
def geodesic(coord1, coord2, npts):
"""
Returns a list of *npts* points along the geodesic between
(and including) *coord1* and *coord2*, in an array of
shape (*npts*, 2).
@rtype: L{ndarray}
"""
if npts < 2:
raise Exception('nb of points must be at least 2')
path = wgs84.npts(lon1=coord1[0], lat1=coord1[1],
lon2=coord2[0], lat2=coord2[1],
npts=npts-2)
return np.array([coord1] + path + [coord2])
def new_geodesic(lon1,lat1,lon2,lat2, npts):
"""
Returns a list of *npts* points along the geodesic between
(and including) *coord1* and *coord2*, in an array of
shape (*npts*, 2).
@rtype: L{ndarray}
"""
if npts < 2:
raise Exception('nb of points must be at least 2')
path = wgs84.npts(lon1=lon1, lat1=lat1,
lon2=lon2, lat2=lat2,
npts=npts-2)
return np.array([[lon1,lat1]] + path + [[lon2,lat2]])
def cluster_points(coord_points, N):
"""
Function that returns k which is an nx2 matrix of lon-lat vector columns
containing the optimal cluster centroid spacings within a large set of random
numbers e.g. those produced by the many_points() function above!
"""
k = kmeans(coord_points, N)
return k[0]
def paths_func(path_info, km=km_points):
lon1, lat1, lon2, lat2 = path_info[0], \
path_info[1], path_info[2], path_info[3]
#lon1, lat1, lon2, lat2, dist = path_info[0], \
#path_info[1], path_info[2], path_info[3], \
#path_info[4]
dist = haversine2(lon1, lat1, lon2, lat2)
# interpoint distance <= 1 km, and nb of points >= 100
npts = max(int((np.ceil(dist) + 1)/km), 100)
path = new_geodesic(lon1,lat1,lon2,lat2, npts)
#print("still going strong\n")
length = len(path)
lons = [lon1 for i in range(0,length)]
lats = [lat1 for i in range(0,length)]
path = np.column_stack((path,lons,lats))
return path
def HIST2D(nbins,paths, grad=False):
H, xedges, yedges = np.histogram2d(paths[:,0],paths[:,1],bins=nbins)
#name = "path_density_2Dhist.png"
if grad:
H = np.abs(np.asarray(np.gradient(H)[0]))#name = "path_density_2Dhist_grad.png"
# H needs to be rotated and flipped
H = np.rot90(H)
H = np.flipud(H)
# Mask zeros
Hmasked = np.ma.masked_where(H==0,H) # Mask pixels with a value of zero
return Hmasked
#fig = plt.figure()
#plt.pcolormesh(xedges,yedges,Hmasked)
#plt.xlabel('longitude (degrees)')
#plt.ylabel('longitude (degrees)')
#cbar = plt.colorbar()
#cbar.ax.set_ylabel('Counts')
#fig.savefig(name)
def latitude(dist, sigma01, alpha0, lon0):
sigma = sigma01 + dist#/R
lat = degrees(asin(cos(alpha0)*sin(sigma)))
#alpha = atan2(tan(alpha0),cos(sigma))
return lat
def longitude(dist, sigma01, alpha0, lon0):
sigma = sigma01 + dist#/R
lon = degrees(atan2(sin(alpha0)*sin(sigma), cos(sigma))) + degrees(lon0)
#alpha = atan2(tan(alpha0),cos(sigma))
return lon
vlat_func = np.vectorize(latitude)
vlon_func = np.vectorize(longitude)
def waypoint_init(path_info, km=km_points):
R = 6371
lon1, lat1, lon2, lat2, dist = radians(path_info[0]), \
radians(path_info[1]), radians(path_info[2]), \
radians(path_info[3]), radians(path_info[4])
#lon1, lat1, lon2, lat2, dist = map(radians, [path_info[0],path_info[1],path_info[2],path_info[3],path_info[4]])
lon_diff = lon2-lon1
alpha1 = atan2(sin(lon_diff),(cos(lat1)*tan(lat2)-sin(lat1)*cos(lon_diff)))
#alpha2 = atan2(sin(lon_diff),(-cos(lat2)*tan(lat1)+sin(lat2)*cos(lon_diff)))
#try:
#sigma12 = acos(sin(lat1)*sin(lat2)+cos(lat1)*cos(lat2)*cos(lon_diff))
#except:
#return
sigma01, alpha0 = atan2(tan(lat1), cos(alpha1)), asin(sin(alpha1)*cos(lat1))
#sigma02 = sigma01+sigma12
lon01 = atan2(sin(alpha0)*sin(sigma01), cos(sigma01))
lon0 = lon1 - lon01
npts = max(int((np.ceil(dist) + 1)/km), 100)
all_d = np.linspace(0,dist,npts)/R
lons, lats = vlon_func(all_d, sigma01, alpha0, lon0), vlat_func(all_d, sigma01, alpha0, lon0)
return np.column_stack((lons, lats))
t_total0 = datetime.datetime.now()
t0 = datetime.datetime.now()
ideal_path = 'ideal_coordinates.pickle'
#if no paths have been done before, start afresh!
if not os.path.exists(ideal_path):
M = 1e5
many_points = ps.points_in_shape(shape_path, M)
coords = cluster_points(many_points,N)
#else import already processed coordinates if the program has already done so.
else:
f = open(name=ideal_path, mode='rb')
coords = pickle.load(f)
f.close()
#generate N kmeans cluster points from massive M number of randomly distributed
#points inside the shape file.
lonmin = np.floor(min(coords[:,0]))
lonmax = np.ceil(max(coords[:,0]))
latmin = np.floor(min(coords[:,1]))
latmax = np.ceil(max(coords[:,1]))
print lonmin,lonmax,latmin,latmax
#coords1 = [coord1 for coord1 in coords for coord2 in coords]
#coords2 = [coord2 for coord1 in coords for coord2 in coords]
#columns = np.column_stack((coords1, coords2))
kappa = [np.vstack([[coord1[0],coord1[1],coord2[0],coord2[1]]\
for coord2 in coords]) for coord1 in coords]
def spread_paths(nets):
#pool = mp.Pool()
#paths = pool.map(new_paths, nets)
#pool.close()
#pool.join()
paths = map(paths_func, nets)
#create a flattened numpy array of size 2xN from the paths created!
#paths = np.asarray(list(itertools.chain(*paths)))
#keep all but the repeated coordinates by keeping only unique whole rows!
#method is slowed without the b contiguous array
#b = np.ascontiguousarray(paths).view(np.dtype((np.void, paths.dtype.itemsize * paths.shape[1])))
#_, idx = np.unique(b, return_index=True)
#paths = np.unique(b).view(paths.dtype).reshape(-1, paths.shape[1])
#plt.figure()
#plt.scatter(paths[:,0],paths[:,1])
#name = uuid.uuid4()
#plt.savefig('{}.png'.format(name))
return paths
t0 = datetime.datetime.now()
pool = mp.Pool()
paths = pool.map(spread_paths, kappa)
pool.close()
pool.join()
t1 = datetime.datetime.now()
print t1-t0
#paths = list(paths)
counter = 0
#cd Desktop/Link\ to\ SIMULATIONS/Network_Tracks/smarter_model/
grad_ideal, grad_check1, grad_check2, H_avg1, H_avg2 = 0, 0, 0, 0, 0
SHAPE = (1,1)
counter2 = 0
perc_high = 0.01
#counter of how many times the points
#have been chosen from the lowest path density spots
low_counter = 0
#counter of how many times the points
#have been chosen from the random spots.
random_counter = 0
new_coord = 0
infinite_counter = 0
while infinite_counter <= 1:
t0 = datetime.datetime.now()
#the following while loop is a work around fix to a:
#new paths shape: (130, 100, 4) rather than being (130,) like it should be!
while SHAPE != (130,):
#if counter2 >= len(paths)-1:
# counter2 = 0
#cycle through paths
#----------------------------------------------------------------------
#old_path = paths[counter2]
#del paths[counter2]
#old_coord = [old_path[0][0][0],old_path[0][0][1]]
#itemindex = np.where(coords==old_coord)[0][0]
#coords = list(coords)
#find index of array in nested array to remove!
#del coords[itemindex]
#print(counter2)
#----------------------------------------------------------------------
#or random selection of paths?!
#----------------------------------------------------------------------
#remove a random set of paths associated with a single one of the N coordinates
rand_int = random.randint(0,len(paths)-1)
old_path = paths[rand_int]
#figure out which old coordinate to remove from the coordinates list
old_coord = [old_path[0][0][0],old_path[0][0][1]]
#print "old coord:", old_coord
#NEED TO REMOVE OLD POINT FROM COORDS!
#find index of array in nested array to remove!
itemindex = np.where(coords==old_coord)[0][0]
coords = list(coords)
#find index of array in nested array to remove!
del coords[itemindex]
coords = np.asarray(coords)
new_coord_first = new_coord
#----------------------------------------------------------------------
#generate new point coordinate
if not counter >= 1:
new_coord = ps.points_in_shape(shape_path, 1)[0]
else:
new_coord = new_coord
#place new coordinate in old set of coordinates
coords = np.append(coords, [new_coord], axis=0)
#generate new array of points in conjunction with the new randomly generated point!
new_coord_set = np.vstack([[new_coord[0],new_coord[1],coord1[0],\
coord1[1]] for coord1 in coords])
#generate new random point in place of all 'popped' points!
new_paths = map(paths_func, new_coord_set)
SHAPE = np.asarray(new_paths).shape
if not SHAPE == (130,):
#remove substitude back the old coordinate for the new coordinate!
coords = list(coords)
#find index of array in nested array to remove!
del coords[-1]
coords = np.asarray(coords)
#place new coordinate in old set of coordinates
coords = np.append(coords, [old_coord], axis=0)
#print "new paths shape:", SHAPE
#paths = np.asarray(paths)
#if np.asarray(new_paths).shape != (130,):
# print("This one's trouble")
# print np.asarray(new_paths).shape
# new_paths = np.asarray(new_paths[0]).reshape(130,)
del paths[rand_int]
SHAPE = (1,1)
#place new_paths in original path set!
#paths = np.insert(paths, [1], [new_paths], axis=0)
paths = np.append(paths, [new_paths], axis=0)
#paths = paths.append(new_paths)
#paths = np.concatenate((paths, [new_paths]), axis=0)
#paths = np.append(paths, new_paths, axis=0)
#create a flattened numpy array of size 2xN from the paths created!
paths_density_check = list(itertools.chain(*paths))
paths_density_check = np.asarray(list(itertools.chain(*paths_density_check)))
#keep all but the repeated coordinates by keeping only unique whole rows!
#method is slowed without the b contiguous array
b = np.ascontiguousarray(paths_density_check).view(np.dtype\
((np.void, paths_density_check.dtype.itemsize * \
paths_density_check.shape[1])))
_, idx = np.unique(b, return_index=True)
paths_density_check = np.unique(b).view(paths_density_check.dtype)\
.reshape(-1, paths_density_check.shape[1])
#plt.figure()
#plt.scatter(paths_density_check[:,0],paths_density_check[:,1])
#plt.savefig('{}.png'.format(counter))
#remove 3rd and 4th columns
#paths_density_check = np.column_stack((paths_density_check[:,0],
# paths_density_check[:,1]))
#remove all path points that lay outside the shape file polygon
#paths_density_check = ps.paths_in_shape(paths_density_check)
paths = list(paths)
# Estimate the 2D histogram
H, xedges, yedges = np.histogram2d(paths_density_check[:,0],
paths_density_check[:,1],
bins=nbins)
#edges_new = ps.paths_in_shape(np.column_stack((xedges,yedges)))
GRAD = np.abs(np.asarray(np.gradient(H)[0]))
# H needs to be rotated and flipped
H = np.rot90(H)
GRAD = np.rot90(GRAD)
H = np.flipud(H)
GRAD = np.flipud(GRAD)
# Mask zeros
H = np.ma.masked_where(H==0,H) # Mask pixels with a value of zero
GRAD = np.ma.masked_where(GRAD==0,GRAD) # Mask pixels with a value of zero
H_avg1 = np.average(H)
grad_check1 = np.std(GRAD)
rand_indicator = random.randint(1,10)
if 0 < rand_indicator <= 5:
#half the time move the coordinates to low density locations.
WHERE = np.where(H < perc_high*H_avg1)
#scale these points with respect to the lat-lon limits!
Hminx, Hminy = WHERE[1], WHERE[0]
Hminx = (lonmax-lonmin)/(nbins) * Hminx + lonmin
Hminy = (latmax-latmin)/(nbins) * Hminy + latmin
#make sure all low density coordinates ARE within shapefile!
low_density_coords = ps.paths_in_shape(np.column_stack((Hminx, Hminy)))
if len(low_density_coords) == 0:
new_coord = ps.points_in_shape(shape_path, 1)[0]
#increase percentage of search if no new low density points are created!
perc_high +=0.05
elif len(low_density_coords) == 1:
new_coord = low_density_coords[0]
perc_high +=0.05
else:
new_coord = low_density_coords[random.randint(0,len(low_density_coords)-1)]
elif 5 < rand_indicator <= 10:
#half the time move coordinates to random locations.
new_coord = ps.points_in_shape(shape_path, 1)[0]
if counter == 0:
grad_ideal = 1e6
avg_ideal = 0
if grad_check1 < grad_ideal and avg_ideal < H_avg1:
#counter >= 1 and
#dump the coordinates!
#print grad_check1, grad_ideal
#print avg_ideal, H_avg1
print "Exporting new ideal coordinates."
with open(u'ideal_coordinates.pickle', 'wb') as f:
print "\nExporting new ideal coordinates."
pickle.dump(coords, f, protocol=2)
grad_ideal = grad_check1
avg_ideal = H_avg1
# find indices of pixels where H==HMIN
#HMATMIN = np.ma.masked_where(H>HMIN,H)
#only select coordinates where the density is 10% of the average or below!
fig = plt.figure()
plt.pcolormesh(xedges,yedges,H)
plt.xlabel('longitude (degrees)')
plt.ylabel('latitude (degrees)')
cbar = plt.colorbar()
cbar.ax.set_ylabel('Counts')
#plt.scatter(low_density_coords[:,0], low_density_coords[:,1], color='red')
fig.savefig("min_density.png".format(counter))
#print(len(paths))
#print(len(KEEP_PATHS))
else:
#RESET!
#remove new coordinate and replace with old coordinate
coords = list(coords)
del coords[-1]
coords = np.asarray(coords)
#place new coordinate in old set of coordinates
coords = np.append(coords, [old_coord], axis=0)
#remove new path and replace it with the old set!
paths = list(paths)
del paths[-1]
paths = list(np.append(paths, [old_path], axis=0))
#plt.scatter(Hminx, Hminy, color='yellow')
#grad_check2 = grad_check1
#H_avg2 = H_avg1
#print(counter)
counter+=1
counter2+=1
t1 = datetime.datetime.now()
print t1-t0 | boland1992/SeisSuite | seissuite/sort_later/find_holes.py | Python | gpl-3.0 | 17,030 |
# encoding: utf-8
#
# Copyright (C) 2016 YouCompleteMe contributors
#
# This file is part of YouCompleteMe.
#
# YouCompleteMe is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# YouCompleteMe is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with YouCompleteMe. If not, see <http://www.gnu.org/licenses/>.
from __future__ import unicode_literals
from __future__ import print_function
from __future__ import division
from __future__ import absolute_import
from future import standard_library
standard_library.install_aliases()
from builtins import * # noqa
from future.utils import PY2
from mock import patch, call
from nose.tools import eq_
from hamcrest import contains_string
from ycm.tests.test_utils import ExpectedFailure, ExtendedMock, MockVimModule
MockVimModule()
from ycm.tests import YouCompleteMeInstance
from ycmd.utils import ToBytes
from ycmd.request_wrap import RequestWrap
def ToBytesOnPY2( data ):
# To test the omnifunc, etc. returning strings, which can be of different
# types depending on python version, we use ToBytes on PY2 and just the native
# str on python3. This roughly matches what happens between py2 and py3
# versions within Vim
if PY2:
return ToBytes( data )
return data
def BuildRequest( line_num, column_num, contents ):
# Note: it would be nice to use ycmd.tests.test_utils.BuildRequest directly
# here, but we can't import ycmd.tests.test_utils because that in turn imports
# ycm_core, which would cause our "ycm_core not imported" test to fail.
return {
'line_num': line_num,
'column_num': column_num,
'filepath': '/test',
'file_data': {
'/test': {
'contents': contents,
'filetypes': [ 'java' ] # We need a filetype with a trigger, so we just
# use java
}
}
}
def BuildRequestWrap( line_num, column_num, contents ):
return RequestWrap( BuildRequest( line_num, column_num, contents ) )
@YouCompleteMeInstance( { 'cache_omnifunc': 1 } )
def OmniCompleter_GetCompletions_Cache_List_test( ycm ):
contents = 'test.'
request_data = BuildRequestWrap( line_num = 1,
column_num = 6,
contents = contents )
# Make sure there is an omnifunc set up.
with patch( 'vim.eval', return_value = ToBytesOnPY2( 'test_omnifunc' ) ):
ycm._omnicomp.OnFileReadyToParse( request_data )
omnifunc_result = [ ToBytesOnPY2( 'a' ),
ToBytesOnPY2( 'b' ),
ToBytesOnPY2( 'cdef' ) ]
# And get the completions
with patch( 'vim.eval',
new_callable = ExtendedMock,
side_effect = [ 6, omnifunc_result ] ) as vim_eval:
results = ycm._omnicomp.ComputeCandidates( request_data )
vim_eval.assert_has_exact_calls( [
call( 'test_omnifunc(1,"")' ),
call( "test_omnifunc(0,'')" ),
] )
eq_( results, omnifunc_result )
@YouCompleteMeInstance( { 'cache_omnifunc': 1 } )
def OmniCompleter_GetCompletions_Cache_ListFilter_test( ycm ):
contents = 'test.t'
request_data = BuildRequestWrap( line_num = 1,
column_num = 7,
contents = contents )
eq_( request_data[ 'query' ], 't' )
# Make sure there is an omnifunc set up.
with patch( 'vim.eval', return_value = ToBytesOnPY2( 'test_omnifunc' ) ):
ycm._omnicomp.OnFileReadyToParse( request_data )
omnifunc_result = [ ToBytesOnPY2( 'a' ),
ToBytesOnPY2( 'b' ),
ToBytesOnPY2( 'cdef' ) ]
# And get the completions
with patch( 'vim.eval',
new_callable = ExtendedMock,
side_effect = [ 6, omnifunc_result ] ) as vim_eval:
results = ycm._omnicomp.ComputeCandidates( request_data )
vim_eval.assert_has_exact_calls( [
call( 'test_omnifunc(1,"")' ),
call( "test_omnifunc(0,'t')" ),
] )
eq_( results, [] )
@YouCompleteMeInstance( { 'cache_omnifunc': 0 } )
def OmniCompleter_GetCompletions_NoCache_List_test( ycm ):
contents = 'test.'
request_data = BuildRequestWrap( line_num = 1,
column_num = 6,
contents = contents )
# Make sure there is an omnifunc set up.
with patch( 'vim.eval', return_value = ToBytesOnPY2( 'test_omnifunc' ) ):
ycm._omnicomp.OnFileReadyToParse( request_data )
omnifunc_result = [ ToBytesOnPY2( 'a' ),
ToBytesOnPY2( 'b' ),
ToBytesOnPY2( 'cdef' ) ]
# And get the completions
with patch( 'vim.eval',
new_callable = ExtendedMock,
side_effect = [ 6, omnifunc_result ] ) as vim_eval:
results = ycm._omnicomp.ComputeCandidates( request_data )
vim_eval.assert_has_exact_calls( [
call( 'test_omnifunc(1,"")' ),
call( "test_omnifunc(0,'')" ),
] )
eq_( results, omnifunc_result )
@YouCompleteMeInstance( { 'cache_omnifunc': 0 } )
def OmniCompleter_GetCompletions_NoCache_ListFilter_test( ycm ):
contents = 'test.t'
request_data = BuildRequestWrap( line_num = 1,
column_num = 7,
contents = contents )
eq_( request_data[ 'query' ], 't' )
# Make sure there is an omnifunc set up.
with patch( 'vim.eval', return_value = ToBytesOnPY2( 'test_omnifunc' ) ):
ycm._omnicomp.OnFileReadyToParse( request_data )
omnifunc_result = [ ToBytesOnPY2( 'a' ),
ToBytesOnPY2( 'b' ),
ToBytesOnPY2( 'cdef' ) ]
# And get the completions
with patch( 'vim.eval',
new_callable = ExtendedMock,
side_effect = [ 6, omnifunc_result ] ) as vim_eval:
results = ycm._omnicomp.ComputeCandidates( request_data )
vim_eval.assert_has_exact_calls( [
call( 'test_omnifunc(1,"")' ),
call( "test_omnifunc(0,'t')" ),
] )
# actual result is that the results are not filtered, as we expect the
# omniufunc or vim itself to do this filtering
eq_( results, omnifunc_result )
@ExpectedFailure( 'We ignore the result of the call to findstart and use our '
'own interpretation of where the identifier should be',
contains_string( "test_omnifunc(0,'t')" ) )
@YouCompleteMeInstance( { 'cache_omnifunc': 1 } )
def OmniCompleter_GetCompletsions_UseFindStart_test( ycm ):
contents = 'test.t'
request_data = BuildRequestWrap( line_num = 1,
column_num = 7,
contents = contents )
eq_( request_data[ 'query' ], 't' )
# Make sure there is an omnifunc set up.
with patch( 'vim.eval', return_value = ToBytesOnPY2( 'test_omnifunc' ) ):
ycm._omnicomp.OnFileReadyToParse( request_data )
omnifunc_result = [ ToBytesOnPY2( 'a' ),
ToBytesOnPY2( 'b' ),
ToBytesOnPY2( 'cdef' ) ]
# And get the completions
with patch( 'vim.eval',
new_callable = ExtendedMock,
side_effect = [ 1, omnifunc_result ] ) as vim_eval:
results = ycm._omnicomp.ComputeCandidates( request_data )
vim_eval.assert_has_exact_calls( [
call( 'test_omnifunc(1,"")' ),
# Fails here: actual result is that the findstart result (1) is ignored
# and we use the 't' query as we normally would on the server side
call( "test_omnifunc(0,'test.t')" ),
] )
eq_( results, omnifunc_result )
@YouCompleteMeInstance( { 'cache_omnifunc': 1 } )
def OmniCompleter_GetCompletions_Cache_Object_test( ycm ):
contents = 'test.t'
request_data = BuildRequestWrap( line_num = 1,
column_num = 7,
contents = contents )
eq_( request_data[ 'query' ], 't' )
# Make sure there is an omnifunc set up.
with patch( 'vim.eval', return_value = ToBytesOnPY2( 'test_omnifunc' ) ):
ycm._omnicomp.OnFileReadyToParse( request_data )
omnifunc_result = {
'words': [
ToBytesOnPY2( 'a' ),
ToBytesOnPY2( 'b' ),
ToBytesOnPY2( 'CDtEF' )
]
}
# And get the completions
with patch( 'vim.eval',
new_callable = ExtendedMock,
side_effect = [ 6, omnifunc_result ] ) as vim_eval:
results = ycm._omnicomp.ComputeCandidates( request_data )
vim_eval.assert_has_exact_calls( [
call( 'test_omnifunc(1,"")' ),
call( "test_omnifunc(0,'t')" ),
] )
eq_( results, [ ToBytesOnPY2( 'CDtEF' ) ] )
@YouCompleteMeInstance( { 'cache_omnifunc': 1 } )
def OmniCompleter_GetCompletions_Cache_ObjectList_test( ycm ):
contents = 'test.tt'
request_data = BuildRequestWrap( line_num = 1,
column_num = 8,
contents = contents )
eq_( request_data[ 'query' ], 'tt' )
# Make sure there is an omnifunc set up.
with patch( 'vim.eval', return_value = ToBytesOnPY2( 'test_omnifunc' ) ):
ycm._omnicomp.OnFileReadyToParse( request_data )
omnifunc_result = [
{
'word': ToBytesOnPY2( 'a' ),
'abbr': ToBytesOnPY2( 'ABBR'),
'menu': ToBytesOnPY2( 'MENU' ),
'info': ToBytesOnPY2( 'INFO' ),
'kind': ToBytesOnPY2( 'K' )
},
{
'word': ToBytesOnPY2( 'test' ),
'abbr': ToBytesOnPY2( 'ABBRTEST'),
'menu': ToBytesOnPY2( 'MENUTEST' ),
'info': ToBytesOnPY2( 'INFOTEST' ),
'kind': ToBytesOnPY2( 'T' )
}
]
# And get the completions
with patch( 'vim.eval',
new_callable = ExtendedMock,
side_effect = [ 6, omnifunc_result ] ) as vim_eval:
results = ycm._omnicomp.ComputeCandidates( request_data )
vim_eval.assert_has_exact_calls( [
call( 'test_omnifunc(1,"")' ),
call( "test_omnifunc(0,'tt')" ),
] )
eq_( results, [ omnifunc_result[ 1 ] ] )
@YouCompleteMeInstance( { 'cache_omnifunc': 0 } )
def OmniCompleter_GetCompletions_NoCache_ObjectList_test( ycm ):
contents = 'test.tt'
request_data = BuildRequestWrap( line_num = 1,
column_num = 8,
contents = contents )
eq_( request_data[ 'query' ], 'tt' )
# Make sure there is an omnifunc set up.
with patch( 'vim.eval', return_value = ToBytesOnPY2( 'test_omnifunc' ) ):
ycm._omnicomp.OnFileReadyToParse( request_data )
omnifunc_result = [
{
'word': ToBytesOnPY2( 'a' ),
'abbr': ToBytesOnPY2( 'ABBR'),
'menu': ToBytesOnPY2( 'MENU' ),
'info': ToBytesOnPY2( 'INFO' ),
'kind': ToBytesOnPY2( 'K' )
},
{
'word': ToBytesOnPY2( 'test' ),
'abbr': ToBytesOnPY2( 'ABBRTEST'),
'menu': ToBytesOnPY2( 'MENUTEST' ),
'info': ToBytesOnPY2( 'INFOTEST' ),
'kind': ToBytesOnPY2( 'T' )
}
]
# And get the completions
with patch( 'vim.eval',
new_callable = ExtendedMock,
side_effect = [ 6, omnifunc_result ] ) as vim_eval:
results = ycm._omnicomp.ComputeCandidates( request_data )
vim_eval.assert_has_exact_calls( [
call( 'test_omnifunc(1,"")' ),
call( "test_omnifunc(0,'tt')" ),
] )
# We don't filter the result - we expect the omnifunc to do that
# based on the query we supplied (Note: that means no fuzzy matching!)
eq_( results, omnifunc_result )
@YouCompleteMeInstance( { 'cache_omnifunc': 1 } )
def OmniCompleter_GetCompletions_Cache_ObjectListObject_test( ycm ):
contents = 'test.tt'
request_data = BuildRequestWrap( line_num = 1,
column_num = 8,
contents = contents )
eq_( request_data[ 'query' ], 'tt' )
# Make sure there is an omnifunc set up.
with patch( 'vim.eval', return_value = ToBytesOnPY2( 'test_omnifunc' ) ):
ycm._omnicomp.OnFileReadyToParse( request_data )
omnifunc_result = {
'words': [
{
'word': ToBytesOnPY2( 'a' ),
'abbr': ToBytesOnPY2( 'ABBR'),
'menu': ToBytesOnPY2( 'MENU' ),
'info': ToBytesOnPY2( 'INFO' ),
'kind': ToBytesOnPY2( 'K' )
},
{
'word': ToBytesOnPY2( 'test' ),
'abbr': ToBytesOnPY2( 'ABBRTEST'),
'menu': ToBytesOnPY2( 'MENUTEST' ),
'info': ToBytesOnPY2( 'INFOTEST' ),
'kind': ToBytesOnPY2( 'T' )
}
]
}
# And get the completions
with patch( 'vim.eval',
new_callable = ExtendedMock,
side_effect = [ 6, omnifunc_result ] ) as vim_eval:
results = ycm._omnicomp.ComputeCandidates( request_data )
vim_eval.assert_has_exact_calls( [
call( 'test_omnifunc(1,"")' ),
call( "test_omnifunc(0,'tt')" ),
] )
eq_( results, [ omnifunc_result[ 'words' ][ 1 ] ] )
@YouCompleteMeInstance( { 'cache_omnifunc': 0 } )
def OmniCompleter_GetCompletions_NoCache_ObjectListObject_test( ycm ):
contents = 'test.tt'
request_data = BuildRequestWrap( line_num = 1,
column_num = 8,
contents = contents )
eq_( request_data[ 'query' ], 'tt' )
# Make sure there is an omnifunc set up.
with patch( 'vim.eval', return_value = ToBytesOnPY2( 'test_omnifunc' ) ):
ycm._omnicomp.OnFileReadyToParse( request_data )
omnifunc_result = {
'words': [
{
'word': ToBytesOnPY2( 'a' ),
'abbr': ToBytesOnPY2( 'ABBR'),
'menu': ToBytesOnPY2( 'MENU' ),
'info': ToBytesOnPY2( 'INFO' ),
'kind': ToBytesOnPY2( 'K' )
},
{
'word': ToBytesOnPY2( 'test' ),
'abbr': ToBytesOnPY2( 'ABBRTEST'),
'menu': ToBytesOnPY2( 'MENUTEST' ),
'info': ToBytesOnPY2( 'INFOTEST' ),
'kind': ToBytesOnPY2( 'T' )
}
]
}
# And get the completions
with patch( 'vim.eval',
new_callable = ExtendedMock,
side_effect = [ 6, omnifunc_result ] ) as vim_eval:
results = ycm._omnicomp.ComputeCandidates( request_data )
vim_eval.assert_has_exact_calls( [
call( 'test_omnifunc(1,"")' ),
call( "test_omnifunc(0,'tt')" ),
] )
# No FilterAndSortCandidates for cache_omnifunc=0 (we expect the omnifunc
# to do the filtering?)
eq_( results, omnifunc_result[ 'words' ] )
@YouCompleteMeInstance( { 'cache_omnifunc': 1 } )
def OmniCompleter_GetCompletions_Cache_List_Unicode_test( ycm ):
contents = '†åsty_π.'
request_data = BuildRequestWrap( line_num = 1,
column_num = 13,
contents = contents )
# Make sure there is an omnifunc set up.
with patch( 'vim.eval', return_value = ToBytesOnPY2( 'test_omnifunc' ) ):
ycm._omnicomp.OnFileReadyToParse( request_data )
omnifunc_result = [ ToBytesOnPY2( '†est' ),
ToBytesOnPY2( 'å_unicode_identifier' ),
ToBytesOnPY2( 'πππππππ yummy πie' ) ]
# And get the completions
with patch( 'vim.eval',
new_callable = ExtendedMock,
side_effect = [ 6, omnifunc_result ] ) as vim_eval:
results = ycm._omnicomp.ComputeCandidates( request_data )
vim_eval.assert_has_exact_calls( [
call( 'test_omnifunc(1,"")' ),
call( "test_omnifunc(0,'')" ),
] )
eq_( results, omnifunc_result )
@YouCompleteMeInstance( { 'cache_omnifunc': 1 } )
def OmniCompleter_GetCompletions_NoCache_List_Unicode_test( ycm ):
contents = '†åsty_π.'
request_data = BuildRequestWrap( line_num = 1,
column_num = 13,
contents = contents )
# Make sure there is an omnifunc set up.
with patch( 'vim.eval', return_value = ToBytesOnPY2( 'test_omnifunc' ) ):
ycm._omnicomp.OnFileReadyToParse( request_data )
omnifunc_result = [ ToBytesOnPY2( '†est' ),
ToBytesOnPY2( 'å_unicode_identifier' ),
ToBytesOnPY2( 'πππππππ yummy πie' ) ]
# And get the completions
with patch( 'vim.eval',
new_callable = ExtendedMock,
side_effect = [ 6, omnifunc_result ] ) as vim_eval:
results = ycm._omnicomp.ComputeCandidates( request_data )
vim_eval.assert_has_exact_calls( [
call( 'test_omnifunc(1,"")' ),
call( "test_omnifunc(0,'')" ),
] )
eq_( results, omnifunc_result )
@ExpectedFailure( 'Filtering on unicode is not supported by the server' )
@YouCompleteMeInstance( { 'cache_omnifunc': 1 } )
def OmniCompleter_GetCompletions_Cache_List_Filter_Unicode_test( ycm ):
contents = '†åsty_π.ππ'
request_data = BuildRequestWrap( line_num = 1,
column_num = 17,
contents = contents )
# Make sure there is an omnifunc set up.
with patch( 'vim.eval', return_value = ToBytesOnPY2( 'test_omnifunc' ) ):
ycm._omnicomp.OnFileReadyToParse( request_data )
omnifunc_result = [ ToBytesOnPY2( '†est' ),
ToBytesOnPY2( 'å_unicode_identifier' ),
ToBytesOnPY2( 'πππππππ yummy πie' ) ]
# And get the completions
with patch( 'vim.eval',
new_callable = ExtendedMock,
side_effect = [ 6, omnifunc_result ] ) as vim_eval:
results = ycm._omnicomp.ComputeCandidates( request_data )
vim_eval.assert_has_exact_calls( [
call( 'test_omnifunc(1,"")' ),
call( "test_omnifunc(0,'ππ')" ),
] )
# Fails here: Filtering on unicode is not supported
eq_( results, [ omnifunc_result[ 2 ] ] )
@YouCompleteMeInstance( { 'cache_omnifunc': 0 } )
def OmniCompleter_GetCompletions_NoCache_List_Filter_Unicode_test( ycm ):
contents = '†åsty_π.ππ'
request_data = BuildRequestWrap( line_num = 1,
column_num = 17,
contents = contents )
# Make sure there is an omnifunc set up.
with patch( 'vim.eval', return_value = ToBytesOnPY2( 'test_omnifunc' ) ):
ycm._omnicomp.OnFileReadyToParse( request_data )
omnifunc_result = [ ToBytesOnPY2( 'πππππππ yummy πie' ) ]
# And get the completions
with patch( 'vim.eval',
new_callable = ExtendedMock,
side_effect = [ 6, omnifunc_result ] ) as vim_eval:
results = ycm._omnicomp.ComputeCandidates( request_data )
vim_eval.assert_has_exact_calls( [
call( 'test_omnifunc(1,"")' ),
call( "test_omnifunc(0,'ππ')" ),
] )
eq_( results, omnifunc_result )
@ExpectedFailure( 'Filtering on unicode is not supported by the server' )
@YouCompleteMeInstance( { 'cache_omnifunc': 1 } )
def OmniCompleter_GetCompletions_Cache_ObjectList_Unicode_test( ycm ):
contents = '†åsty_π.ππ'
request_data = BuildRequestWrap( line_num = 1,
column_num = 17,
contents = contents )
eq_( request_data[ 'query' ], 'ππ' )
# Make sure there is an omnifunc set up.
with patch( 'vim.eval', return_value = ToBytesOnPY2( 'test_omnifunc' ) ):
ycm._omnicomp.OnFileReadyToParse( request_data )
omnifunc_result = [
{
'word': ToBytesOnPY2( 'ålpha∫et' ),
'abbr': ToBytesOnPY2( 'å∫∫®'),
'menu': ToBytesOnPY2( 'µ´~¨á' ),
'info': ToBytesOnPY2( '^~fo' ),
'kind': ToBytesOnPY2( '˚' )
},
{
'word': ToBytesOnPY2( 'π†´ß†π' ),
'abbr': ToBytesOnPY2( 'ÅııÂʉÍÊ'),
'menu': ToBytesOnPY2( '˜‰ˆËʉÍÊ' ),
'info': ToBytesOnPY2( 'ÈˆÏØÊ‰ÍÊ' ),
'kind': ToBytesOnPY2( 'Ê' )
}
]
# And get the completions
with patch( 'vim.eval',
new_callable = ExtendedMock,
side_effect = [ 6, omnifunc_result ] ) as vim_eval:
results = ycm._omnicomp.ComputeCandidates( request_data )
vim_eval.assert_has_exact_calls( [
call( 'test_omnifunc(1,"")' ),
call( "test_omnifunc(0,'ππ')" ),
] )
# Fails here: Filtering on unicode is not supported
eq_( results, [ omnifunc_result[ 1 ] ] )
@YouCompleteMeInstance( { 'cache_omnifunc': 1 } )
def OmniCompleter_GetCompletions_Cache_ObjectListObject_Unicode_test( ycm ):
contents = '†åsty_π.t'
request_data = BuildRequestWrap( line_num = 1,
column_num = 14,
contents = contents )
eq_( request_data[ 'query' ], 't' )
# Make sure there is an omnifunc set up.
with patch( 'vim.eval', return_value = ToBytesOnPY2( 'test_omnifunc' ) ):
ycm._omnicomp.OnFileReadyToParse( request_data )
omnifunc_result = {
'words': [
{
'word': ToBytesOnPY2( 'ålpha∫et' ),
'abbr': ToBytesOnPY2( 'å∫∫®'),
'menu': ToBytesOnPY2( 'µ´~¨á' ),
'info': ToBytesOnPY2( '^~fo' ),
'kind': ToBytesOnPY2( '˚' )
},
{
'word': ToBytesOnPY2( 'π†´ß†π' ),
'abbr': ToBytesOnPY2( 'ÅııÂʉÍÊ'),
'menu': ToBytesOnPY2( '˜‰ˆËʉÍÊ' ),
'info': ToBytesOnPY2( 'ÈˆÏØÊ‰ÍÊ' ),
'kind': ToBytesOnPY2( 'Ê' )
},
{
'word': ToBytesOnPY2( 'test' ),
'abbr': ToBytesOnPY2( 'ÅııÂʉÍÊ'),
'menu': ToBytesOnPY2( '˜‰ˆËʉÍÊ' ),
'info': ToBytesOnPY2( 'ÈˆÏØÊ‰ÍÊ' ),
'kind': ToBytesOnPY2( 'Ê' )
}
]
}
# And get the completions
with patch( 'vim.eval',
new_callable = ExtendedMock,
side_effect = [ 6, omnifunc_result ] ) as vim_eval:
results = ycm._omnicomp.ComputeCandidates( request_data )
vim_eval.assert_has_exact_calls( [
call( 'test_omnifunc(1,"")' ),
call( "test_omnifunc(0,'t')" ),
] )
# Note: the filtered results are all unicode objects (not bytes) because
# they are passed through the FilterAndSortCandidates machinery
# (via the server)
eq_( results, [ {
'word': 'test',
'abbr': 'ÅııÂʉÍÊ',
'menu': '˜‰ˆËʉÍÊ',
'info': 'ÈˆÏØÊ‰ÍÊ',
'kind': 'Ê'
} ] )
| snakeleon/YouCompleteMe-x86 | python/ycm/tests/omni_completer_test.py | Python | gpl-3.0 | 22,455 |
import urllib,urllib2,re,string,sys,os
import xbmc, xbmcgui, xbmcaddon, xbmcplugin
from resources.libs import main
#Mash Up - by Mash2k3 2012.
addon_id = 'plugin.video.movie25'
selfAddon = xbmcaddon.Addon(id=addon_id)
art = main.art
prettyName = 'iWatchOnline'
def AtoZiWATCHtv():
main.addDir('0-9','http://www.iwatchonline.to/tv-show?startwith=09&p=0',589,art+'/09.png')
for i in string.ascii_uppercase:
main.addDir(i,'http://www.iwatchonline.to/tv-show?startwith='+i.lower()+'&p=0',589,art+'/'+i.lower()+'.png')
main.GA("Tvshows","A-ZTV")
main.VIEWSB()
def AtoZiWATCHm():
main.addDir('0-9','http://www.iwatchonline.to/movies?startwith=09&p=0',587,art+'/09.png')
for i in string.ascii_uppercase:
main.addDir(i,'http://www.iwatchonline.to/movies?startwith='+i.lower()+'&p=0',587,art+'/'+i.lower()+'.png')
main.GA("Movies","A-ZM")
main.VIEWSB()
def iWatchMAIN():
main.addDir('Movies','http://www.iwatchonline.org/',586,art+'/iwatchm.png')
main.addDir('Tv Shows','http://www.iwatchonline.org/',585,art+'/iwatcht.png')
main.addDir('Todays Episodes','http://www.iwatchonline.to/tv-schedule',592,art+'/iwatcht.png')
main.GA("Plugin","iWatchonline")
main.VIEWSB2()
def iWatchMOVIES():
main.addDir('Search Movies','http://www.iwatchonline.to',644,art+'/search.png')
main.addDir('A-Z','http://www.iwatchonline.to',595,art+'/az.png')
main.addDir('Popular','http://www.iwatchonline.to/movies?sort=popular&p=0',587,art+'/iwatchm.png')
main.addDir('Latest Added','http://www.iwatchonline.to/movies?sort=latest&p=0',587,art+'/iwatchm.png')
main.addDir('Featured Movies','http://www.iwatchonline.to/movies?sort=featured&p=0',587,art+'/iwatchm.png')
main.addDir('Latest HD Movies','http://www.iwatchonline.to/movies?quality=hd&sort=latest&p=0',587,art+'/iwatchm.png')
main.addDir('Upcoming','http://www.iwatchonline.to/movies?sort=upcoming&p=0',587,art+'/iwatchm.png')
main.addDir('Genre','http://www.iwatchonline.to',596,art+'/genre.png')
main.GA("iWatchonline","Movies")
main.VIEWSB2()
def iWatchTV():
main.addDir('Search TV Shows','http://www.iwatchonline.to',642,art+'/search.png')
main.addDir('A-Z','http://www.iwatchonline.to',593,art+'/az.png')
main.addDir('Todays Episodes','http://www.iwatchonline.to/tv-schedule',592,art+'/iwatcht.png')
main.addDir('Featured Shows','http://www.iwatchonline.to/tv-show?sort=featured&p=0',589,art+'/iwatcht.png')
main.addDir('Popular Shows','http://www.iwatchonline.to/tv-show?sort=popular&p=0',589,art+'/iwatcht.png')
main.addDir('Latest Additions','http://www.iwatchonline.to/tv-show?sort=latest&p=0',589,art+'/iwatcht.png')
main.addDir('Genre','http://www.iwatchonline.to',594,art+'/genre.png')
main.GA("iWatchonline","Tvshows")
main.VIEWSB2()
def SearchhistoryTV():
seapath=os.path.join(main.datapath,'Search')
SeaFile=os.path.join(seapath,'SearchHistoryTv')
if not os.path.exists(SeaFile):
SEARCHTV()
else:
main.addDir('Search TV Shows','###',643,art+'/search.png')
main.addDir('Clear History',SeaFile,128,art+'/cleahis.png')
thumb=art+'/link.png'
searchis=re.compile('search="(.+?)",').findall(open(SeaFile,'r').read())
for seahis in reversed(searchis):
seahis=seahis.replace('%20',' ')
url=seahis
main.addDir(seahis,url,643,thumb)
def superSearch(encode,type):
try:
if type=='Movies': type='m'
else: type='t'
returnList=[]
search_url = 'http://www.iwatchonline.to/search'
from t0mm0.common.net import Net as net
search_content = net().http_POST(search_url, { 'searchquery' : encode, 'searchin' : type} ).content.encode('utf-8')
r = re.findall('(?s)<table(.+?)</table>',search_content)
r=main.unescapes(r[0])
match=re.compile('<img.+?src=\"(.+?)\".+?<a.+?href=\"(.+?)\">(.+?)</a>').findall(r)
for thumb,url,name in match:
if type=='m':
returnList.append((name,prettyName,url,thumb,588,True))
else:
returnList.append((name,prettyName,url,thumb,590,True))
return returnList
except: return []
def SEARCHTV(murl = ''):
encode = main.updateSearchFile(murl,'TV')
if not encode: return False
search_url = 'http://www.iwatchonline.to/search'
from t0mm0.common.net import Net as net
search_content = net().http_POST(search_url, { 'searchquery' : encode, 'searchin' : 't'} ).content.encode('utf-8')
r = re.findall('(?s)<table(.+?)</table>',search_content)
r=main.unescapes(r[0])
match=re.compile('<img[^>]+?src="([^"]+?)\".+?<a[^>]+?href="([^"]+?)">([^<]+?)</a>').findall(r)
for thumb,url,name in match:
main.addDirT(name,url,590,thumb,'','','','','')
main.GA("iWatchonline","Search")
def SearchhistoryM():
seapath=os.path.join(main.datapath,'Search')
SeaFile=os.path.join(seapath,'SearchHistory25')
if not os.path.exists(SeaFile):
SEARCHM('')
else:
main.addDir('Search Movies','###',645,art+'/search.png')
main.addDir('Clear History',SeaFile,128,art+'/cleahis.png')
thumb=art+'/link.png'
searchis=re.compile('search="(.+?)",').findall(open(SeaFile,'r').read())
for seahis in reversed(searchis):
seahis=seahis.replace('%20',' ')
url=seahis
main.addDir(seahis,url,645,thumb)
def SEARCHM(murl):
encode = main.updateSearchFile(murl,'Movies')
if not encode: return False
search_url = 'http://www.iwatchonline.to/search'
from t0mm0.common.net import Net as net
search_content = net().http_POST(search_url, { 'searchquery' : encode, 'searchin' : 'm'} ).content.encode('utf-8')
r = re.findall('(?s)<table(.+?)</table>',search_content)
r=main.unescapes(r[0])
match=re.compile('<img.+?src=\"(.+?)\".+?<a.+?href=\"(.+?)\">(.+?)</a>').findall(r)
dialogWait = xbmcgui.DialogProgress()
ret = dialogWait.create('Please wait until Movie list is cached.')
totalLinks = len(match)
loadedLinks = 0
remaining_display = 'Movies loaded :: [B]'+str(loadedLinks)+' / '+str(totalLinks)+'[/B].'
dialogWait.update(0,'[B]Will load instantly from now on[/B]',remaining_display)
xbmc.executebuiltin("XBMC.Dialog.Close(busydialog,true)")
for thumb,url,name in match:
main.addDirM(name,url,588,thumb,'','','','','')
loadedLinks = loadedLinks + 1
percent = (loadedLinks * 100)/totalLinks
remaining_display = 'Movies loaded :: [B]'+str(loadedLinks)+' / '+str(totalLinks)+'[/B].'
dialogWait.update(percent,'[B]Will load instantly from now on[/B]',remaining_display)
if dialogWait.iscanceled(): return False
dialogWait.close()
del dialogWait
main.GA("iWatchonline","Search")
def ENTYEAR():
dialog = xbmcgui.Dialog()
d = dialog.numeric(0, 'Enter Year')
if d:
encode=urllib.quote(d)
if encode < '2014' and encode > '1900':
surl='http://www.iwatchonline.to/main/content_more/movies/?year='+encode+'&start=0'
iWatchLISTMOVIES(surl)
else:
dialog = xbmcgui.Dialog()
ret = dialog.ok('Wrong Entry', 'Must enter year in four digit format like 1999','Enrty must be between 1900 and 2014')
def GotoPage(url):
dialog = xbmcgui.Dialog()
r=re.findall('http://www.iwatchonline.to/movies(.+?)&p=.+?',url)
d = dialog.numeric(0, 'Please Enter Page number.')
if d:
temp=int(d)-1
page= int(temp)*25
encode=str(page)
url='http://www.iwatchonline.to/movies'+r[0]
surl=url+'&p='+encode
iWatchLISTMOVIES(surl)
else:
dialog = xbmcgui.Dialog()
xbmcplugin.endOfDirectory(int(sys.argv[1]), False, False)
return False
def iWatchGenreTV():
link=main.OPENURL('http://www.iwatchonline.to/tv-show')
link=link.replace('\r','').replace('\n','').replace('\t','').replace(' ','')
match=re.compile('<li.+?a href=".?gener=([^<]+)">(.+?)</a>.+?/li>').findall(link)
for url,genre in match:
genre=genre.replace(' ','')
if not 'Adult' in genre:
main.addDir(genre,'http://www.iwatchonline.to/tv-show?sort=popular&gener='+url+'',589,art+'/folder.png')
main.GA("Tvshows","GenreT")
def iWatchGenreM():
link=main.OPENURL('http://www.iwatchonline.to/movies')
link=link.replace('\r','').replace('\n','').replace('\t','').replace(' ','')
match=re.compile('<li.+?a href=".?gener=([^<]+)">(.+?)</a>.+?/li>').findall(link)
for url,genre in match:
genre=genre.replace(' ','')
if not 'Adult' in genre:
main.addDir(genre,'http://www.iwatchonline.to/movies?sort=popular&gener='+url+'&p=0',587,art+'/folder.png')
main.GA("Movies","GenreM")
def iWatchYearM():
main.addDir('2013','http://www.iwatchonline.to/main/content_more/movies/?year=2013&start=0',587,art+'/year.png')
main.addDir('2012','http://www.iwatchonline.to/main/content_more/movies/?year=2012&start=0',587,art+'/2012.png')
main.addDir('2011','http://www.iwatchonline.to/main/content_more/movies/?year=2011&start=0',587,art+'/2011.png')
main.addDir('2010','http://www.iwatchonline.to/main/content_more/movies/?year=2010&start=0',587,art+'/2010.png')
main.addDir('2009','http://www.iwatchonline.to/main/content_more/movies/?year=2009&start=0',587,art+'/2009.png')
main.addDir('2008','http://www.iwatchonline.to/main/content_more/movies/?year=2008&start=0',587,art+'/2008.png')
main.addDir('2007','http://www.iwatchonline.to/main/content_more/movies/?year=2007&start=0',587,art+'/2007.png')
main.addDir('2006','http://www.iwatchonline.to/main/content_more/movies/?year=2006&start=0',587,art+'/2006.png')
main.addDir('2005','http://www.iwatchonline.to/main/content_more/movies/?year=2005&start=0',587,art+'/2005.png')
main.addDir('2004','http://www.iwatchonline.to/main/content_more/movies/?year=2004&start=0',587,art+'/2004.png')
main.addDir('2003','http://www.iwatchonline.to/main/content_more/movies/?year=2003&start=0',587,art+'/2003.png')
main.addDir('Enter Year','iwatchonline',653,art+'/enteryear.png')
def iWatchLISTMOVIES(murl):
main.GA("Movies","List")
link=main.OPENURL(murl)
link=link.replace('\r','').replace('\n','').replace('\t','').replace(' ','')
videos = re.search('<ul class="thumbnails">(.+?)</ul>', link)
if videos:
videos = videos.group(1)
match=re.compile('<li.+?<a.+?href=\"(.+?)\".+?<img.+?src=\"(.+?)\".+?<div class=\"title.+?>(.+?)<div').findall(videos)
dialogWait = xbmcgui.DialogProgress()
ret = dialogWait.create('Please wait until Movie list is cached.')
totalLinks = len(match)
loadedLinks = 0
remaining_display = 'Movies loaded :: [B]'+str(loadedLinks)+' / '+str(totalLinks)+'[/B].'
dialogWait.update(0,'[B]Will load instantly from now on[/B]',remaining_display)
xbmc.executebuiltin("XBMC.Dialog.Close(busydialog,true)")
for url,thumb,name in match:
main.addDirIWO(name,url,588,thumb,'','','','','')
loadedLinks = loadedLinks + 1
percent = (loadedLinks * 100)/totalLinks
remaining_display = 'Movies loaded :: [B]'+str(loadedLinks)+' / '+str(totalLinks)+'[/B].'
dialogWait.update(percent,'[B]Will load instantly from now on[/B]',remaining_display)
if (dialogWait.iscanceled()):
return False
dialogWait.close()
del dialogWait
if len(match)==25:
paginate=re.compile('([^<]+)&p=([^<]+)').findall(murl)
for purl,page in paginate:
i=int(page)+25
pg=(int(page)/25)+2
# if pg >2:
# main.addDir('[COLOR red]Home[/COLOR]','',2000,art+'/home.png')
main.addDir('[COLOR red]Enter Page #[/COLOR]',murl,654,art+'/gotopage.png')
main.addDir('[COLOR blue]Page '+ str(pg)+'[/COLOR]',purl+'&p='+str(i),587,art+'/next2.png')
xbmcplugin.setContent(int(sys.argv[1]), 'Movies')
main.VIEWS()
def iWatchToday(murl):
main.GA("Tvshows","TodaysList")
link=main.OPENURL(murl)
daysback = 2
for x in range(0, daysback):
match = re.findall(r"</i></a> <a href='(.*?)'" , link)
if(match):
link = link + main.OPENURL("http://www.iwatchonline.to/tv-schedule" + match[x])
link=link.replace('\r','').replace('\n','').replace('\t','').replace(' ','')
link = re.sub('>\s*','>',link)
link = re.sub('\s*<','<',link)
match=re.compile('<img src="([^"]+?)"[^<]+?<br /><a href="([^"]+?)">(.+?)</a></td><td.+?>([^<]+?)</td><td.+?>([^<]+?)</td>.*?>(\d{,2}) Link\(s\)', re.M).findall(link)
dialogWait = xbmcgui.DialogProgress()
ret = dialogWait.create('Please wait until Show list is cached.')
totalLinks = len(match)
loadedLinks = 0
remaining_display = 'Episodes loaded :: [B]'+str(loadedLinks)+' / '+str(totalLinks)+'[/B].'
dialogWait.update(0,'[B]Will load instantly from now on[/B]',remaining_display)
xbmc.executebuiltin("XBMC.Dialog.Close(busydialog,true)")
for thumb,url,name,episea,epiname,active in match:
if(active == '0'):
totalLinks -= 1
continue
name=name.strip()
thumb=thumb.strip()
url=url.strip()
episea=episea.strip()
epiname=epiname.strip()
name=name.replace('(','').replace(')','')
name=name.replace('(\d{4})','')
main.addDirTE(name+' '+episea+' [COLOR blue]'+epiname+'[/COLOR]',url,588,thumb,'','','','','')
loadedLinks = loadedLinks + 1
percent = (loadedLinks * 100)/totalLinks
remaining_display = 'Episodes loaded :: [B]'+str(loadedLinks)+' / '+str(totalLinks)+'[/B].'
dialogWait.update(percent,'[B]Will load instantly from now on[/B]',remaining_display)
if (dialogWait.iscanceled()):
return False
dialogWait.close()
del dialogWait
def iWatchLISTSHOWS(murl):
main.GA("Tvshows","List")
link=main.OPENURL(murl)
link=link.replace('\r','').replace('\n','').replace('\t','').replace(' ','')
videos = re.search('<ul class="thumbnails">(.+?)</ul>', link)
if videos:
videos = videos.group(1)
match=re.compile('<li.+?<a[^>]+?href=\"([^"]+?)\".+?<img[^>]+?src=\"([^"]+?)\".+?<div class=\"title[^>]+?>([^>]+?)<div').findall(videos)
for url,thumb,name in match:
main.addDirT(name,url,590,thumb,'','','','','')
if len(match)==25:
paginate=re.compile('([^<]+)&p=([^<]+)').findall(murl)
for purl,page in paginate:
i=int(page)+25
main.addDir('[COLOR blue]Next[/COLOR]',purl+'&p='+str(i),589,art+'/next2.png')
xbmcplugin.setContent(int(sys.argv[1]), 'Movies')
main.VIEWS()
def iWatchSeason(name,murl,thumb):
link=main.OPENURL(murl)
link=link.replace('\r','').replace('\n','').replace('\t','').replace(' ','')
match=re.compile('<h5><i.+?</i>.*?(.+?)</h5>').findall(link)
for season in match:
main.addDir(name.strip()+' '+season.strip(),murl,591,thumb,'')
def GET_HTML(url):
req = urllib2.Request(url)
req.add_header('User-Agent', 'Mozilla/5.0 (iPhone; U; CPU like Mac OS X; en) AppleWebKit/420+ (KHTML, like Gecko) Version/3.0 Mobile/1A543 Safari/419.3')
response = urllib2.urlopen(req)
link = response.read()
response.close()
link = link.replace('\\','')
return link
def _decode_callback(matches):
id = matches.group(1)
try: return unichr(int(id))
except: return id
def decode(data):
return re.sub("&#(\d+)(;|(?=\s))", _decode_callback, data).strip()
def PANEL_REPLACER(content):
panel_exists = True
panel_id = 0
while panel_exists == True:
panel_name = "panel-id." + str(panel_id)
panel_search_pattern = "(?s)\"" + panel_name + "\"\:\[\{(.+?)\}\]"
panel_data = re.search(panel_search_pattern, content)
if panel_data:
panel_data = panel_data.group(1)
content = re.sub("begin " + panel_name, "-->" + panel_data + "<!--", content)
content = re.sub(panel_search_pattern, "panel used", content)
panel_id = panel_id + 1
else:
panel_exists = False
content = main.unescapes(content)
content = re.sub("\\\"", "\"", content)
from resources.universal import _common as univ_common
content = univ_common.str_conv(decode(content))
return content
def iWatchEpisode(mname,murl):
seanum = mname.split('Season ')[1]
tv_content=main.OPENURL(murl)
link = PANEL_REPLACER(tv_content)
descs=re.compile('<meta name="description" content="(.+?)">').findall(link)
if len(descs)>0: desc=descs[0]
else: desc=''
thumbs=re.compile('<div class="movie-cover span2"><img src="(.+?)" alt=".+?" class=".+?" />').findall(link)
if len(thumbs)>0: thumb=thumbs[0]
else: thumb=''
episodes = re.search('(?sim)season'+seanum+'(.+?)</table>', link)
if episodes:
episodes = episodes.group(1)
match=re.compile('<a[^>]+?href=\"([^"]+?)\".+?</i>([^<]+?)</a>.+?<td>([^<]+?)</td>').findall(episodes)
dialogWait = xbmcgui.DialogProgress()
ret = dialogWait.create('Please wait until Show list is cached.')
totalLinks = len(match)
loadedLinks = 0
remaining_display = 'Episodes loaded :: [B]'+str(loadedLinks)+' / '+str(totalLinks)+'[/B].'
dialogWait.update(0,'[B]Will load instantly from now on[/B]',remaining_display)
for url,epi,name in match:
mname=mname.replace('(','').replace(')','')
mname = re.sub(" \d{4}", "", mname)
sea=re.compile('s'+str(seanum)).findall(url)
if len(sea)>0:
main.addDirTE(mname.strip()+' '+epi.strip()+' [COLOR blue]'+name.strip()+'[/COLOR]',url,588,thumb,desc,'','','','')
loadedLinks = loadedLinks + 1
percent = (loadedLinks * 100)/totalLinks
remaining_display = 'Episodes loaded :: [B]'+str(loadedLinks)+' / '+str(totalLinks)+'[/B].'
dialogWait.update(percent,'[B]Will load instantly from now on[/B]',remaining_display)
if (dialogWait.iscanceled()):
return False
dialogWait.close()
del dialogWait
if selfAddon.getSetting('auto-view') == 'true':
xbmc.executebuiltin("Container.SetViewMode(%s)" % selfAddon.getSetting('episodes-view'))
def GetUrl(url):
link=main.OPENURL(url)
link=link.replace('\r','').replace('\n','').replace('\t','').replace(' ','')
match=re.compile('<iframe.+?src=\"(.+?)\"').findall(link)
link=match[0]
return link
def iWatchLINK(mname,url):
link=main.OPENURL(url)
movie_content = main.unescapes(link)
movie_content = re.sub("\\\"", "\"", movie_content)
movie_content=movie_content.replace('\'','')
from resources.universal import _common as univ_common
link2 = univ_common.str_conv(decode(movie_content))
if selfAddon.getSetting("hide-download-instructions") != "true":
main.addLink("[COLOR red]For Download Options, Bring up Context Menu Over Selected Link.[/COLOR]",'','')
links = re.search('<tbody>(.+?)</tbody>', link2)
if links:
links = links.group(1)
match=re.compile('<a href="([^"]+?)".+?<img.+?> ([^<]+?)</a>.+?<td>.+?<td>.+?<td>([^<]+?)</td>', re.DOTALL).findall(links)
import urlresolver
for url, name, qua in match:
name=name.replace(' ','')
if name[0:1]=='.':
name=name[1:]
name=name.split('.')[0]
hosted_media = urlresolver.HostedMediaFile(host=name.lower(), media_id=name.lower())
if hosted_media:
main.addDown2(mname+' [COLOR red]('+qua+')[/COLOR]'+' [COLOR blue]'+name.upper()+'[/COLOR]',url,649,art+'/hosts/'+name.lower()+'.png',art+'/hosts/'+name.lower()+'.png')
def iWatchLINKB(mname,url):
main.GA("iWatchonline","Watched")
ok=True
hname=mname
xbmc.executebuiltin("XBMC.Notification(Please Wait!,Opening Link,3000)")
mname=mname.split(' [COLOR red]')[0]
r = re.findall('Season(.+?)Episode([^<]+)',mname)
if r:
infoLabels =main.GETMETAEpiT(mname,'','')
video_type='episode'
season=infoLabels['season']
episode=infoLabels['episode']
else:
infoLabels =main.GETMETAT(mname,'','','')
video_type='movie'
season=''
episode=''
img=infoLabels['cover_url']
fanart =infoLabels['backdrop_url']
imdb_id=infoLabels['imdb_id']
infolabels = { 'supports_meta' : 'true', 'video_type':video_type, 'name':str(infoLabels['title']), 'imdb_id':str(infoLabels['imdb_id']), 'season':str(season), 'episode':str(episode), 'year':str(infoLabels['year']) }
link=main.OPENURL(url)
link=main.unescapes(link)
match=re.compile('<(?:iframe|pagespeed_iframe).+?src=\"(.+?)\"').findall(link)
try :
xbmc.executebuiltin("XBMC.Notification(Please Wait!,Resolving Link,3000)")
stream_url = main.resolve_url(match[0])
if stream_url == False: return
infoL={'Title': infoLabels['title'], 'Plot': infoLabels['plot'], 'Genre': infoLabels['genre'],'originalTitle': main.removeColoredText(infoLabels['title'])}
# play with bookmark
from resources.universal import playbackengine
player = playbackengine.PlayWithoutQueueSupport(resolved_url=stream_url, addon_id=addon_id, video_type=video_type, title=str(infoLabels['title']),season=str(season), episode=str(episode), year=str(infoLabels['year']),img=img,infolabels=infoL, watchedCallbackwithParams=main.WatchedCallbackwithParams,imdb_id=imdb_id)
#WatchHistory
if selfAddon.getSetting("whistory") == "true":
from resources.universal import watchhistory
wh = watchhistory.WatchHistory('plugin.video.movie25')
wh.add_item(hname+' '+'[COLOR green]'+prettyName+'[/COLOR]', sys.argv[0]+sys.argv[2], infolabels=infolabels, img=str(img), fanart=str(fanart), is_folder=False)
player.KeepAlive()
return ok
except Exception, e:
if stream_url != False:
main.ErrorReport(e)
return ok
| marduk191/plugin.video.movie25 | resources/libs/plugins/iwatchonline.py | Python | gpl-3.0 | 22,479 |
import markdown
import re
from django.core.urlresolvers import reverse
from django.template.context import Context
from django.template.loader import render_to_string
from wiki.core.permissions import can_read
ATTACHMENT_RE = re.compile(r'(?P<before>.*)(\[attachment\:(?P<id>\d+)\])(?P<after>.*)', re.IGNORECASE)
from wiki.plugins.attachments import models
class AttachmentExtension(markdown.Extension):
""" Abbreviation Extension for Python-Markdown. """
def extendMarkdown(self, md, md_globals):
""" Insert AbbrPreprocessor before ReferencePreprocessor. """
md.preprocessors.add('dw-attachments', AttachmentPreprocessor(md), '>html_block')
class AttachmentPreprocessor(markdown.preprocessors.Preprocessor):
"""django-wiki attachment preprocessor - parse text for [attachment:id] references. """
def run(self, lines):
new_text = []
for line in lines:
m = ATTACHMENT_RE.match(line)
if m:
attachment_id = m.group('id').strip()
before = m.group('before')
after = m.group('after')
try:
attachment = models.Attachment.objects.get(
articles__current_revision__deleted=False,
id=attachment_id, current_revision__deleted=False
)
url = reverse('wiki:attachments_download', kwargs={'article_id': self.markdown.article.id,
'attachment_id':attachment.id,})
# The readability of the attachment is decided relative
# to the owner of the original article.
# I.e. do not insert attachments in other articles that
# the original uploader cannot read, that would be out
# of scope!
attachment_can_read = can_read( self.markdown.article,
attachment.article.owner)
html = render_to_string(
"wiki/plugins/attachments/render.html",
Context({
'url': url,
'filename': attachment.original_filename,
'attachment_can_read': attachment_can_read,
}))
line = self.markdown.htmlStash.store(html, safe=True)
except models.Attachment.DoesNotExist:
line = line.replace(m.group(1), u"""<span class="attachment attachment-deleted">Attachment with ID #%s is deleted.</span>""" % attachment_id)
line = before + line + after
new_text.append(line)
return new_text
| habibmasuro/django-wiki | wiki/plugins/attachments/markdown_extensions.py | Python | gpl-3.0 | 2,806 |
#!/usr/bin/python
#
# Copyright 2012 Lee Verberne <lee@blarg.org>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
# Command line front end to all these hats
"This is the command line interface to a collection of platform control scripts"
import logging
import optparse
import os
import sys
import ubik.config
import ubik.defaults
import ubik.hats
config = ubik.config.UbikConfig()
options = None
log = logging.getLogger('rug.cli')
def init_cli(args=None):
global config, options
p = optparse.OptionParser(usage='%prog [global_options] COMMAND [ARG ...]',
version='%prog ' + ubik.defaults.VERSION,
description=__doc__,
epilog='Use the help sub-command for more '
'details.')
p.add_option('--conf', '-c', metavar='FILE',
default=ubik.defaults.CONFIG_FILE,
help='Use config FILE instead of %default')
p.add_option('--debug', '-d', action='store_true',
help='Enable debug logging')
p.add_option('--workdir', metavar='DIR',
help="Use DIR as working directory, creating if necessary")
p.add_option('--verbose', '-v', action='store_true',
help='Enable verbose logging')
p.disable_interspersed_args()
(options, args) = p.parse_args(args=args)
if 'DEBUG' in os.environ:
options.debug = True
if options.debug:
log.setLevel(logging.DEBUG)
elif options.verbose:
log.setLevel(logging.INFO)
if 'RUG_GLOBAL_CONFIG' in os.environ:
global_cf = os.environ['RUG_GLOBAL_CONFIG']
else:
global_cf = ubik.defaults.GLOBAL_CONFIG_FILE
config.read(options.conf, global_cf)
if len(args) == 0:
args = ['help',]
return args
def main(args=None):
args = init_cli(args)
# Try to figure out what hat we're using here
hat = ubik.hats.hatter(args, config, options)
if hat:
try:
hat.run()
except ubik.hats.HatException as e:
print >>sys.stderr, "ERROR:", str(e)
if options.debug:
raise e
return 1
else:
print >>sys.stderr, "ERROR: No such command"
return 2
if __name__ == '__main__':
sys.exit(main())
| kafana/ubik | lib/ubik/rug/cli.py | Python | gpl-3.0 | 2,912 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('settings', '0003_delete_organizationsettings'),
]
operations = [
migrations.CreateModel(
name='AccessionNumberCount',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('accession_number', models.IntegerField(default=0)),
],
),
]
| ayys/siya | settings/migrations/0004_accessionnumbercount.py | Python | gpl-3.0 | 564 |
#!/usr/bin/env python
#
# (c) 2018, Red Hat, Inc.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
import os
import sys
import json
import argparse
from ansible.parsing.dataloader import DataLoader
from ansible.module_utils.six import iteritems
from ansible.module_utils._text import to_text
from ansible.module_utils.net_tools.nios.api import get_connector
from ansible.module_utils.net_tools.nios.api import normalize_extattrs, flatten_extattrs
try:
# disable urllib3 warnings so as to not interfere with printing to stdout
# which is read by ansible
import urllib3
urllib3.disable_warnings()
except ImportError:
sys.stdout.write('missing required library: urllib3\n')
sys.exit(-1)
CONFIG_FILES = [
'/etc/ansible/infoblox.yaml',
'/etc/ansible/infoblox.yml'
]
def parse_args():
parser = argparse.ArgumentParser()
parser.add_argument('--list', action='store_true',
help='List host records from NIOS for use in Ansible')
parser.add_argument('--host',
help='List meta data about single host (not used)')
return parser.parse_args()
def main():
args = parse_args()
for config_file in CONFIG_FILES:
if os.path.exists(config_file):
break
else:
sys.stdout.write('unable to locate config file at /etc/ansible/infoblox.yaml\n')
sys.exit(-1)
try:
loader = DataLoader()
config = loader.load_from_file(config_file)
provider = config.get('provider') or {}
connector = get_connector(**provider)
except Exception as exc:
sys.stdout.write(to_text(exc))
sys.exit(-1)
if args.host:
host_filter = {'name': args.host}
else:
host_filter = {}
config_filters = config.get('filters')
if config_filters.get('view') is not None:
host_filter['view'] = config_filters['view']
if config_filters.get('extattrs'):
extattrs = normalize_extattrs(config_filters['extattrs'])
else:
extattrs = {}
hostvars = {}
inventory = {
'_meta': {
'hostvars': hostvars
}
}
return_fields = ['name', 'view', 'extattrs', 'ipv4addrs']
hosts = connector.get_object('record:host',
host_filter,
extattrs=extattrs,
return_fields=return_fields)
if hosts:
for item in hosts:
view = item['view']
name = item['name']
if view not in inventory:
inventory[view] = {'hosts': []}
inventory[view]['hosts'].append(name)
hostvars[name] = {
'view': view
}
if item.get('extattrs'):
for key, value in iteritems(flatten_extattrs(item['extattrs'])):
if key.startswith('ansible_'):
hostvars[name][key] = value
else:
if 'extattrs' not in hostvars:
hostvars[name]['extattrs'] = {}
hostvars[name]['extattrs'][key] = value
sys.stdout.write(json.dumps(inventory, indent=4))
sys.exit(0)
if __name__ == '__main__':
main()
| wilvk/ansible | contrib/inventory/infoblox.py | Python | gpl-3.0 | 3,883 |
# -*- coding: utf-8 -*-
# Taboot - Client utility for performing deployments with Func.
# Copyright © 2009, Red Hat, Inc.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""TODO: Decide what to do with this file"""
| tbielawa/Taboot | taboot/tasks/httpd.py | Python | gpl-3.0 | 805 |
# pylint: disable=missing-docstring
# pylint: disable=redefined-outer-name
from lettuce import step, world
from cms.djangoapps.contentstore.features.common import press_the_notification_button, type_in_codemirror
KEY_CSS = '.key h3.title'
ADVANCED_MODULES_KEY = "Advanced Module List"
def get_index_of(expected_key):
for i, element in enumerate(world.css_find(KEY_CSS)):
# Sometimes get stale reference if I hold on to the array of elements
key = world.css_value(KEY_CSS, index=i)
if key == expected_key:
return i
return -1
def change_value(step, key, new_value):
index = get_index_of(key)
type_in_codemirror(index, new_value)
press_the_notification_button(step, "Save")
world.wait_for_ajax_complete()
| ahmedaljazzar/edx-platform | cms/djangoapps/contentstore/features/advanced_settings.py | Python | agpl-3.0 | 769 |
from dependencies.dependency import getToolByName
from lims.browser import BrowserView
from dependencies.dependency import ViewPageTemplateFile
from lims import bikaMessageFactory as _
from lims.utils import t
from lims.utils import formatDateQuery, formatDateParms
from dependencies.dependency import IViewView
from dependencies.dependency import implements
class Report(BrowserView):
implements(IViewView)
template = ViewPageTemplateFile("templates/report_out.pt")
def __init__(self, context, request, report=None):
self.report = report
BrowserView.__init__(self, context, request)
def __call__(self):
bac = getToolByName(self.context, 'bika_analysis_catalog')
self.report_content = {}
parm_lines = {}
parms = []
headings = {}
headings['header'] = _("Analyses retested")
headings['subheader'] = _("Analyses which have been retested")
count_all = 0
query = {'portal_type': 'Analysis',
'getRetested': True,
'sort_order': 'reverse'}
date_query = formatDateQuery(self.context, 'Received')
if date_query:
query['getDateReceived'] = date_query
received = formatDateParms(self.context, 'Received')
else:
received = 'Undefined'
parms.append(
{'title': _('Received'),
'value': received,
'type': 'text'})
wf_tool = getToolByName(self.context, 'portal_workflow')
if self.request.form.has_key('bika_analysis_workflow'):
query['review_state'] = self.request.form['bika_analysis_workflow']
review_state = wf_tool.getTitleForStateOnType(
self.request.form['bika_analysis_workflow'], 'Analysis')
else:
review_state = 'Undefined'
parms.append(
{'title': _('Status'),
'value': review_state,
'type': 'text'})
if self.request.form.has_key('bika_cancellation_workflow'):
query['cancellation_state'] = self.request.form[
'bika_cancellation_workflow']
cancellation_state = wf_tool.getTitleForStateOnType(
self.request.form['bika_cancellation_workflow'], 'Analysis')
else:
cancellation_state = 'Undefined'
parms.append(
{'title': _('Active'),
'value': cancellation_state,
'type': 'text'})
if self.request.form.has_key('bika_worksheetanalysis_workflow'):
query['worksheetanalysis_review_state'] = self.request.form[
'bika_worksheetanalysis_workflow']
ws_review_state = wf_tool.getTitleForStateOnType(
self.request.form['bika_worksheetanalysis_workflow'], 'Analysis')
else:
ws_review_state = 'Undefined'
parms.append(
{'title': _('Assigned to worksheet'),
'value': ws_review_state,
'type': 'text'})
# and now lets do the actual report lines
formats = {'columns': 8,
'col_heads': [_('Client'),
_('Request'),
_('Sample type'),
_('Sample point'),
_('Category'),
_('Analysis'),
_('Received'),
_('Status'),
],
'class': '',
}
datalines = []
clients = {}
sampletypes = {}
samplepoints = {}
categories = {}
services = {}
for a_proxy in bac(query):
analysis = a_proxy.getObject()
dataline = []
dataitem = {'value': analysis.getClientTitle()}
dataline.append(dataitem)
dataitem = {'value': analysis.getRequestID()}
dataline.append(dataitem)
dataitem = {'value': analysis.aq_parent.getSampleTypeTitle()}
dataline.append(dataitem)
dataitem = {'value': analysis.aq_parent.getSamplePointTitle()}
dataline.append(dataitem)
dataitem = {'value': analysis.getCategoryTitle()}
dataline.append(dataitem)
dataitem = {'value': analysis.getServiceTitle()}
dataline.append(dataitem)
dataitem = {'value': self.ulocalized_time(analysis.getDateReceived())}
dataline.append(dataitem)
state = wf_tool.getInfoFor(analysis, 'review_state', '')
review_state = wf_tool.getTitleForStateOnType(
state, 'Analysis')
dataitem = {'value': review_state}
dataline.append(dataitem)
datalines.append(dataline)
count_all += 1
# table footer data
footlines = []
footline = []
footitem = {'value': _('Number of analyses retested for period'),
'colspan': 7,
'class': 'total_label'}
footline.append(footitem)
footitem = {'value': count_all}
footline.append(footitem)
footlines.append(footline)
self.report_content = {
'headings': headings,
'parms': parms,
'formats': formats,
'datalines': datalines,
'footings': footlines}
title = t(headings['header'])
return {'report_title': title,
'report_data': self.template()}
| yasir1brahim/OLiMS | lims/browser/reports/qualitycontrol_analysesrepeated.py | Python | agpl-3.0 | 5,554 |
from django.conf.urls import patterns, url
from quenta.storybook import views
urlpatterns = patterns('',
url(r'^(?P<story_id>\d+/)', views.story, name='story'),#+ parametar
url(r'^(?P<story_id>\d+/html/)', views.story_html, name='story_html'),#+ parametar
url(r'^$', views.stories_list, name='stories_list'),
)
| metakermit/quenta | quenta/storybook/urls.py | Python | agpl-3.0 | 325 |
from __future__ import unicode_literals
from frappe import _
def get_data():
return [
{
"label": _("Documents"),
"icon": "icon-star",
"items": [
{
"type": "doctype",
"name": "Employee",
"description": _("Employee records."),
},
{
"type": "doctype",
"name": "Leave Application",
"description": _("Applications for leave."),
},
{
"type": "doctype",
"name": "Expense Claim",
"description": _("Claims for company expense."),
},
{
"type": "doctype",
"name": "Attendance",
"description": _("Attendance record."),
},
{
"type": "doctype",
"name": "Salary Slip",
"description": _("Monthly salary statement."),
},
{
"type": "doctype",
"name": "Appraisal",
"description": _("Performance appraisal."),
},
{
"type": "doctype",
"name": "Job Applicant",
"description": _("Applicant for a Job."),
},
{
"type": "doctype",
"name": "Job Opening",
"description": _("Opening for a Job."),
},
{
"type": "doctype",
"name": "Offer Letter",
"description": _("Offer candidate a Job."),
},
]
},
{
"label": _("Tools"),
"icon": "icon-wrench",
"items": [
{
"type": "doctype",
"name": "Salary Manager",
"label": _("Process Payroll"),
"description":_("Generate Salary Slips"),
"hide_count": True
},
{
"type": "doctype",
"name": "Timesheet Report",
"label": _("Timesheet Report"),
"description":_("Attendance Report"),
"hide_count": True
},
{
"type": "doctype",
"name": "Upload Attendance",
"description":_("Upload attendance from a .csv file"),
"hide_count": True
},
{
"type": "doctype",
"name": "Leave Control Panel",
"label": _("Leave Allocation Tool"),
"description":_("Allocate leaves for the year."),
"hide_count": True
},
]
},
{
"label": _("Setup"),
"icon": "icon-cog",
"items": [
{
"type": "doctype",
"name": "HR Settings",
"description": _("Settings for HR Module")
},
{
"type": "doctype",
"name": "Employment Type",
"description": _("Types of employment (permanent, contract, intern etc.).")
},
{
"type": "doctype",
"name": "Branch",
"description": _("Organization branch master.")
},
{
"type": "doctype",
"name": "Department",
"description": _("Organization unit (department) master.")
},
{
"type": "doctype",
"name": "Designation",
"description": _("Employee designation (e.g. CEO, Director etc.).")
},
{
"type": "doctype",
"name": "Salary Structure",
"description": _("Salary template master.")
},
{
"type": "doctype",
"name": "Earning Type",
"description": _("Salary components.")
},
{
"type": "doctype",
"name": "Deduction Type",
"description": _("Tax and other salary deductions.")
},
{
"type": "doctype",
"name": "Leave Allocation",
"description": _("Allocate leaves for a period.")
},
{
"type": "doctype",
"name":"Leave Type",
"description": _("Type of leaves like casual, sick etc."),
},
{
"type": "doctype",
"name": "Holiday List",
"description": _("Holiday master.")
},
{
"type": "doctype",
"name": "Leave Block List",
"description": _("Block leave applications by department.")
},
{
"type": "doctype",
"name": "Appraisal Template",
"description": _("Template for performance appraisals.")
},
{
"type": "doctype",
"name": "Expense Claim Type",
"description": _("Types of Expense Claim.")
},
{
"type": "doctype",
"name": "Email Account",
"description": _("Setup incoming server for jobs email id. (e.g. jobs@example.com)")
},
]
},
{
"label": _("Standard Reports"),
"icon": "icon-list",
"items": [
{
"type": "report",
"is_query_report": True,
"name": "Employee Leave Balance",
"doctype": "Leave Application"
},
{
"type": "report",
"is_query_report": True,
"name": "Employee Birthday",
"doctype": "Employee"
},
{
"type": "report",
"name": "Employee Information",
"doctype": "Employee"
},
{
"type": "report",
"is_query_report": True,
"name": "Monthly Salary Register",
"doctype": "Salary Slip"
},
{
"type": "report",
"is_query_report": True,
"name": "Monthly Attendance Sheet",
"doctype": "Attendance"
},
]
},
]
| indictranstech/tele-erpnext | erpnext/config/hr.py | Python | agpl-3.0 | 4,677 |
"""
LMS edxnotes page
"""
from __future__ import absolute_import
from bok_choy.page_object import PageLoadError, PageObject, unguarded
from bok_choy.promise import BrokenPromise, EmptyPromise
from selenium.webdriver.common.action_chains import ActionChains
from common.test.acceptance.pages.common.paging import PaginatedUIMixin
from common.test.acceptance.pages.lms.course_page import CoursePage
from common.test.acceptance.tests.helpers import disable_animations
class NoteChild(PageObject):
url = None
BODY_SELECTOR = None
def __init__(self, browser, item_id):
super(NoteChild, self).__init__(browser)
self.item_id = item_id
def is_browser_on_page(self):
return self.q(css="{}#{}".format(self.BODY_SELECTOR, self.item_id)).present
def _bounded_selector(self, selector):
"""
Return `selector`, but limited to this particular `NoteChild` context
"""
return u"{}#{} {}".format(
self.BODY_SELECTOR,
self.item_id,
selector,
)
def _get_element_text(self, selector):
element = self.q(css=self._bounded_selector(selector)).first
if element:
return element.text[0]
else:
return None
class EdxNotesChapterGroup(NoteChild):
"""
Helper class that works with chapter (section) grouping of notes in the Course Structure view on the Note page.
"""
BODY_SELECTOR = ".note-group"
@property
def title(self):
return self._get_element_text(".course-title")
@property
def subtitles(self):
return [section.title for section in self.children]
@property
def children(self):
children = self.q(css=self._bounded_selector('.note-section'))
return [EdxNotesSubsectionGroup(self.browser, child.get_attribute("id")) for child in children]
class EdxNotesGroupMixin(object):
"""
Helper mixin that works with note groups (used for subsection and tag groupings).
"""
@property
def title(self):
return self._get_element_text(self.TITLE_SELECTOR)
@property
def children(self):
children = self.q(css=self._bounded_selector('.note'))
return [EdxNotesPageItem(self.browser, child.get_attribute("id")) for child in children]
@property
def notes(self):
return [section.text for section in self.children]
class EdxNotesSubsectionGroup(NoteChild, EdxNotesGroupMixin):
"""
Helper class that works with subsection grouping of notes in the Course Structure view on the Note page.
"""
BODY_SELECTOR = ".note-section"
TITLE_SELECTOR = ".course-subtitle"
class EdxNotesTagsGroup(NoteChild, EdxNotesGroupMixin):
"""
Helper class that works with tags grouping of notes in the Tags view on the Note page.
"""
BODY_SELECTOR = ".note-group"
TITLE_SELECTOR = ".tags-title"
def scrolled_to_top(self, group_index):
"""
Returns True if the group with supplied group)index is scrolled near the top of the page
(expects 10 px padding).
The group_index must be supplied because JQuery must be used to get this information, and it
does not have access to the bounded selector.
"""
title_selector = "$('" + self.TITLE_SELECTOR + "')[" + str(group_index) + "]"
top_script = "return " + title_selector + ".getBoundingClientRect().top;"
EmptyPromise(
lambda: 8 < self.browser.execute_script(top_script) < 12,
u"Expected tag title '{}' to scroll to top, but was at location {}".format(
self.title, self.browser.execute_script(top_script)
)
).fulfill()
# Now also verify that focus has moved to this title (for screen readers):
active_script = "return " + title_selector + " === document.activeElement;"
return self.browser.execute_script(active_script)
class EdxNotesPageItem(NoteChild):
"""
Helper class that works with note items on Note page of the course.
"""
BODY_SELECTOR = ".note"
UNIT_LINK_SELECTOR = "a.reference-unit-link"
TAG_SELECTOR = "span.reference-tags"
def go_to_unit(self, unit_page=None):
self.q(css=self._bounded_selector(self.UNIT_LINK_SELECTOR)).click()
if unit_page is not None:
unit_page.wait_for_page()
@property
def unit_name(self):
return self._get_element_text(self.UNIT_LINK_SELECTOR)
@property
def text(self):
return self._get_element_text(".note-comment-p")
@property
def quote(self):
return self._get_element_text(".note-excerpt")
@property
def time_updated(self):
return self._get_element_text(".reference-updated-date")
@property
def tags(self):
""" The tags associated with this note. """
tag_links = self.q(css=self._bounded_selector(self.TAG_SELECTOR))
if len(tag_links) == 0:
return None
return[tag_link.text for tag_link in tag_links]
def go_to_tag(self, tag_name):
""" Clicks a tag associated with the note to change to the tags view (and scroll to the tag group). """
self.q(css=self._bounded_selector(self.TAG_SELECTOR)).filter(lambda el: tag_name in el.text).click()
class EdxNotesPageView(PageObject):
"""
Base class for EdxNotes views: Recent Activity, Location in Course, Search Results.
"""
url = None
BODY_SELECTOR = ".tab-panel"
TAB_SELECTOR = ".tab"
CHILD_SELECTOR = ".note"
CHILD_CLASS = EdxNotesPageItem
@unguarded
def visit(self):
"""
Open the page containing this page object in the browser.
Raises:
PageLoadError: The page did not load successfully.
Returns:
PageObject
"""
self.q(css=self.TAB_SELECTOR).first.click()
try:
return self.wait_for_page()
except BrokenPromise:
raise PageLoadError(u"Timed out waiting to load page '{!r}'".format(self))
def is_browser_on_page(self):
return all([
self.q(css="{}".format(self.BODY_SELECTOR)).present,
self.q(css="{}.is-active".format(self.TAB_SELECTOR)).present,
not self.q(css=".ui-loading").visible,
])
@property
def is_closable(self):
"""
Indicates if tab is closable or not.
"""
return self.q(css=u"{} .action-close".format(self.TAB_SELECTOR)).present
def close(self):
"""
Closes the tab.
"""
self.q(css=u"{} .action-close".format(self.TAB_SELECTOR)).first.click()
@property
def children(self):
"""
Returns all notes on the page.
"""
children = self.q(css=self.CHILD_SELECTOR)
return [self.CHILD_CLASS(self.browser, child.get_attribute("id")) for child in children]
class RecentActivityView(EdxNotesPageView):
"""
Helper class for Recent Activity view.
"""
BODY_SELECTOR = "#recent-panel"
TAB_SELECTOR = ".tab#view-recent-activity"
class CourseStructureView(EdxNotesPageView):
"""
Helper class for Location in Course view.
"""
BODY_SELECTOR = "#structure-panel"
TAB_SELECTOR = ".tab#view-course-structure"
CHILD_SELECTOR = ".note-group"
CHILD_CLASS = EdxNotesChapterGroup
class TagsView(EdxNotesPageView):
"""
Helper class for Tags view.
"""
BODY_SELECTOR = "#tags-panel"
TAB_SELECTOR = ".tab#view-tags"
CHILD_SELECTOR = ".note-group"
CHILD_CLASS = EdxNotesTagsGroup
class SearchResultsView(EdxNotesPageView):
"""
Helper class for Search Results view.
"""
BODY_SELECTOR = "#search-results-panel"
TAB_SELECTOR = ".tab#view-search-results"
class EdxNotesPage(CoursePage, PaginatedUIMixin):
"""
EdxNotes page.
"""
url_path = "edxnotes/"
MAPPING = {
"recent": RecentActivityView,
"structure": CourseStructureView,
"tags": TagsView,
"search": SearchResultsView,
}
def __init__(self, *args, **kwargs):
super(EdxNotesPage, self).__init__(*args, **kwargs)
self.current_view = self.MAPPING["recent"](self.browser)
def is_browser_on_page(self):
return self.q(css=".wrapper-student-notes .note-group").visible
def switch_to_tab(self, tab_name):
"""
Switches to the appropriate tab `tab_name(str)`.
"""
self.current_view = self.MAPPING[tab_name](self.browser)
self.current_view.visit()
def close_tab(self):
"""
Closes the current view.
"""
self.current_view.close()
self.current_view = self.MAPPING["recent"](self.browser)
def search(self, text):
"""
Runs search with `text(str)` query.
"""
self.q(css="#search-notes-form #search-notes-input").first.fill(text)
self.q(css='#search-notes-form .search-notes-submit').first.click()
# Frontend will automatically switch to Search results tab when search
# is running, so the view also needs to be changed.
self.current_view = self.MAPPING["search"](self.browser)
if text.strip():
self.current_view.wait_for_page()
@property
def tabs(self):
"""
Returns all tabs on the page.
"""
tabs = self.q(css=".tabs .tab-label")
if tabs:
return [x.replace("Current tab\n", "") for x in tabs.text]
else:
return None
@property
def is_error_visible(self):
"""
Indicates whether error message is visible or not.
"""
return self.q(css=".inline-error").visible
@property
def error_text(self):
"""
Returns error message.
"""
element = self.q(css=".inline-error").first
if element and self.is_error_visible:
return element.text[0]
else:
return None
@property
def notes(self):
"""
Returns all notes on the page.
"""
children = self.q(css='.note')
return [EdxNotesPageItem(self.browser, child.get_attribute("id")) for child in children]
@property
def chapter_groups(self):
"""
Returns all chapter groups on the page.
"""
children = self.q(css='.note-group')
return [EdxNotesChapterGroup(self.browser, child.get_attribute("id")) for child in children]
@property
def subsection_groups(self):
"""
Returns all subsection groups on the page.
"""
children = self.q(css='.note-section')
return [EdxNotesSubsectionGroup(self.browser, child.get_attribute("id")) for child in children]
@property
def tag_groups(self):
"""
Returns all tag groups on the page.
"""
children = self.q(css='.note-group')
return [EdxNotesTagsGroup(self.browser, child.get_attribute("id")) for child in children]
def count(self):
""" Returns the total number of notes in the list """
return len(self.q(css='div.wrapper-note-excerpts').results)
class EdxNotesPageNoContent(CoursePage):
"""
EdxNotes page -- when no notes have been added.
"""
url_path = "edxnotes/"
def is_browser_on_page(self):
return self.q(css=".wrapper-student-notes .is-empty").visible
@property
def no_content_text(self):
"""
Returns no content message.
"""
element = self.q(css=".is-empty").first
if element:
return element.text[0]
else:
return None
class EdxNotesUnitPage(CoursePage):
"""
Page for the Unit with EdxNotes.
"""
url_path = "courseware/"
def is_browser_on_page(self):
return self.q(css="body.courseware .edx-notes-wrapper").present
def move_mouse_to(self, selector):
"""
Moves mouse to the element that matches `selector(str)`.
"""
body = self.q(css=selector)[0]
ActionChains(self.browser).move_to_element(body).perform()
return self
def click(self, selector):
"""
Clicks on the element that matches `selector(str)`.
"""
self.q(css=selector).first.click()
return self
def toggle_visibility(self):
"""
Clicks on the "Show notes" checkbox.
"""
self.q(css=".action-toggle-notes").first.click()
return self
@property
def components(self):
"""
Returns a list of annotatable components.
"""
components = self.q(css=".edx-notes-wrapper")
return [AnnotatableComponent(self.browser, component.get_attribute("id")) for component in components]
@property
def notes(self):
"""
Returns a list of notes for the page.
"""
notes = []
for component in self.components:
notes.extend(component.notes)
return notes
def refresh(self):
"""
Refreshes the page and returns a list of annotatable components.
"""
self.browser.refresh()
return self.components
class AnnotatableComponent(NoteChild):
"""
Helper class that works with annotatable components.
"""
BODY_SELECTOR = ".edx-notes-wrapper"
@property
def notes(self):
"""
Returns a list of notes for the component.
"""
notes = self.q(css=self._bounded_selector(".annotator-hl"))
return [EdxNoteHighlight(self.browser, note, self.item_id) for note in notes]
def create_note(self, selector=".annotate-id"):
"""
Create the note by the selector, return a context manager that will
show and save the note popup.
"""
for element in self.q(css=self._bounded_selector(selector)):
note = EdxNoteHighlight(self.browser, element, self.item_id)
note.select_and_click_adder()
yield note
note.save()
def edit_note(self, selector=".annotator-hl"):
"""
Edit the note by the selector, return a context manager that will
show and save the note popup.
"""
for element in self.q(css=self._bounded_selector(selector)):
note = EdxNoteHighlight(self.browser, element, self.item_id)
note.show().edit()
yield note
note.save()
def remove_note(self, selector=".annotator-hl"):
"""
Removes the note by the selector.
"""
for element in self.q(css=self._bounded_selector(selector)):
note = EdxNoteHighlight(self.browser, element, self.item_id)
note.show().remove()
class EdxNoteHighlight(NoteChild):
"""
Helper class that works with notes.
"""
BODY_SELECTOR = ""
ADDER_SELECTOR = ".annotator-adder"
VIEWER_SELECTOR = ".annotator-viewer"
EDITOR_SELECTOR = ".annotator-editor"
NOTE_SELECTOR = ".annotator-note"
def __init__(self, browser, element, parent_id):
super(EdxNoteHighlight, self).__init__(browser, parent_id)
self.element = element
self.item_id = parent_id
disable_animations(self)
@property
def is_visible(self):
"""
Returns True if the note is visible.
"""
viewer_is_visible = self.q(css=self._bounded_selector(self.VIEWER_SELECTOR)).visible
editor_is_visible = self.q(css=self._bounded_selector(self.EDITOR_SELECTOR)).visible
return viewer_is_visible or editor_is_visible
def wait_for_adder_visibility(self):
"""
Waiting for visibility of note adder button.
"""
self.wait_for_element_visibility(
self._bounded_selector(self.ADDER_SELECTOR), "Adder is visible."
)
def wait_for_viewer_visibility(self):
"""
Waiting for visibility of note viewer.
"""
self.wait_for_element_visibility(
self._bounded_selector(self.VIEWER_SELECTOR), "Note Viewer is visible."
)
def wait_for_editor_visibility(self):
"""
Waiting for visibility of note editor.
"""
self.wait_for_element_visibility(
self._bounded_selector(self.EDITOR_SELECTOR), "Note Editor is visible."
)
def wait_for_notes_invisibility(self, text="Notes are hidden"):
"""
Waiting for invisibility of all notes.
"""
selector = self._bounded_selector(".annotator-outer")
self.wait_for_element_invisibility(selector, text)
def select_and_click_adder(self):
"""
Creates selection for the element and clicks `add note` button.
"""
ActionChains(self.browser).double_click(self.element).perform()
self.wait_for_adder_visibility()
self.q(css=self._bounded_selector(self.ADDER_SELECTOR)).first.click()
self.wait_for_editor_visibility()
return self
def click_on_highlight(self):
"""
Clicks on the highlighted text.
"""
ActionChains(self.browser).move_to_element(self.element).click().perform()
return self
def click_on_viewer(self):
"""
Clicks on the note viewer.
"""
self.q(css=self.NOTE_SELECTOR).first.click()
return self
def show(self):
"""
Hover over highlighted text -> shows note.
"""
ActionChains(self.browser).move_to_element(self.element).perform()
self.wait_for_viewer_visibility()
return self
def cancel(self):
"""
Clicks cancel button.
"""
self.q(css=self._bounded_selector(".annotator-close")).first.click()
self.wait_for_notes_invisibility("Note is canceled.")
return self
def save(self):
"""
Clicks save button.
"""
self.q(css=self._bounded_selector(".annotator-save")).first.click()
self.wait_for_notes_invisibility("Note is saved.")
self.wait_for_ajax()
return self
def remove(self):
"""
Clicks delete button.
"""
self.q(css=self._bounded_selector(".annotator-delete")).first.click()
self.wait_for_notes_invisibility("Note is removed.")
self.wait_for_ajax()
return self
def edit(self):
"""
Clicks edit button.
"""
self.q(css=self._bounded_selector(".annotator-edit")).first.click()
self.wait_for_editor_visibility()
return self
@property
def text(self):
"""
Returns text of the note.
"""
self.show()
element = self.q(css=self._bounded_selector(".annotator-annotation > div.annotator-note"))
if element:
text = element.text[0].strip()
else:
text = None
self.cancel()
return text
@text.setter
def text(self, value):
"""
Sets text for the note.
"""
self.q(css=self._bounded_selector(".annotator-item textarea")).first.fill(value)
@property
def tags(self):
"""
Returns the tags associated with the note.
Tags are returned as a list of strings, with each tag as an individual string.
"""
tag_text = []
self.show()
tags = self.q(css=self._bounded_selector(".annotator-annotation > div.annotator-tags > span.annotator-tag"))
if tags:
for tag in tags:
tag_text.append(tag.text)
self.cancel()
return tag_text
@tags.setter
def tags(self, tags):
"""
Sets tags for the note. Tags should be supplied as a list of strings, with each tag as an individual string.
"""
self.q(css=self._bounded_selector(".annotator-item input")).first.fill(" ".join(tags))
def has_sr_label(self, sr_index, field_index, expected_text):
"""
Returns true iff a screen reader label (of index sr_index) exists for the annotator field with
the specified field_index and text.
"""
label_exists = False
EmptyPromise(
lambda: len(self.q(css=self._bounded_selector("li.annotator-item > label.sr"))) > sr_index,
u"Expected more than '{}' sr labels".format(sr_index)
).fulfill()
annotator_field_label = self.q(css=self._bounded_selector("li.annotator-item > label.sr"))[sr_index]
for_attrib_correct = annotator_field_label.get_attribute("for") == "annotator-field-" + str(field_index)
if for_attrib_correct and (annotator_field_label.text == expected_text):
label_exists = True
self.q(css="body").first.click()
self.wait_for_notes_invisibility()
return label_exists
| ESOedX/edx-platform | common/test/acceptance/pages/lms/edxnotes.py | Python | agpl-3.0 | 20,755 |
# -*- coding: utf-8; -*-
#
# This file is part of Superdesk.
#
# Copyright 2013, 2014, 2015 Sourcefabric z.u. and contributors.
#
# For the full copyright and license information, please see the
# AUTHORS and LICENSE files distributed with this source code, or
# at https://www.sourcefabric.org/superdesk/license
import pytz
from datetime import datetime, timedelta
from apps.rules.routing_rules import Weekdays
from apps.templates.content_templates import get_next_run, get_item_from_template, render_content_template
from superdesk.metadata.item import ITEM_STATE, CONTENT_STATE
from superdesk.tests import TestCase
from superdesk.utc import utcnow
class TemplatesTestCase(TestCase):
def setUp(self):
# now is today at 09:05:03
self.now = datetime.utcnow().replace(hour=9, minute=5, second=3)
self.weekdays = [day.name for day in Weekdays]
def get_delta(self, create_at, weekdays, time_zone=None, now=None, cron_list=None):
schedule = {
'day_of_week': weekdays,
'create_at': create_at,
'is_active': True,
'time_zone': time_zone or 'UTC'
}
if cron_list:
schedule['cron_list'] = cron_list
schedule.pop('create_at', None)
next_run = get_next_run(schedule, now or self.now)
return next_run - (now or self.now).replace(second=0)
def test_inactive_schedule(self):
self.assertEqual(None,
get_next_run({'is_active': False, 'day_of_week': self.weekdays, 'create_at': '09:15:00'}))
def test_next_run_same_day_later(self):
delta = self.get_delta('09:08:00', self.weekdays)
self.assertEqual(delta.days, 0)
self.assertEqual(delta.seconds, 179)
def test_next_run_same_day_later_cron_list(self):
cron_list = ['30 07 * * *', '08 09 * * *']
delta = self.get_delta('09:08:00', self.weekdays, cron_list=cron_list)
self.assertEqual(delta.days, 0)
self.assertEqual(delta.seconds, 179)
def test_next_run_next_day(self):
delta = self.get_delta('09:03:00', self.weekdays)
self.assertEqual(delta.days, 0)
self.assertEqual(delta.seconds, 3600 * 24 - 121)
def test_next_run_next_week(self):
delta = self.get_delta('09:03:00', [self.now.strftime('%a').upper()])
self.assertEqual(delta.days, 6)
def test_next_run_now(self):
delta = self.get_delta('09:05:00', self.weekdays)
self.assertEqual(delta.seconds, 24 * 60 * 60 - 1)
def test_get_item_from_template(self):
template = {'_id': 'foo', 'name': 'test',
'schedule_desk': 'sports', 'schedule_stage': 'schedule',
'data': {
'headline': 'Foo',
'dateline': {
'located': {
'city': 'Sydney',
'city_code': 'Sydney',
'tz': 'Australia/Sydney'
},
'date': '2015-10-10T10:10:10',
}
}}
now = utcnow()
with self.app.app_context():
item = get_item_from_template(template)
self.assertNotIn('_id', item)
self.assertEqual('foo', item.get('template'))
self.assertEqual('Foo', item.get('headline'))
self.assertEqual(CONTENT_STATE.SUBMITTED, item.get(ITEM_STATE))
self.assertEqual({'desk': 'sports', 'stage': 'schedule'}, item.get('task'))
dateline = item.get('dateline')
self.assertEqual('Sydney', dateline['located']['city'])
self.assertEqual(now, dateline.get('date'))
self.assertIn('SYDNEY', dateline.get('text'))
def test_next_run_for_timezone(self):
# UTC time Zero hours
now = datetime(2018, 6, 30, 19, 0, 0, 0, tzinfo=pytz.utc)
current_now = now + timedelta(seconds=5)
# schedule at 06:00 AM
delta = self.get_delta('06:00:00',
self.weekdays,
time_zone='Australia/Sydney',
now=current_now
)
self.assertEqual(delta.days, 0)
self.assertEqual(delta.seconds, 3600)
# 30 minutes before schedule
current_now = now + timedelta(minutes=30)
delta = self.get_delta('06:00:00',
self.weekdays,
time_zone='Australia/Sydney',
now=current_now
)
self.assertEqual(delta.days, 0)
self.assertEqual(delta.seconds, 1800)
# hour after schedule
current_now = now + timedelta(hours=1, seconds=5)
delta = self.get_delta('06:00:00',
self.weekdays,
time_zone='Australia/Sydney',
now=current_now
)
self.assertEqual(delta.days, 1)
class RenderTemplateTestCase(TestCase):
def test_render_content_template(self):
template = {
'_id': 'foo',
'template_name': 'test',
'template_desks': ['sports'],
'data': {
'headline': 'Foo Template: {{item.headline}}',
'body_html': 'This article has slugline: {{item.slugline}} and dateline: {{item.dateline["text"]}} '
'at {{item.versioncreated | format_datetime("Australia/Sydney", "%d %b %Y %H:%S %Z")}}',
'urgency': 1, 'priority': 3,
'dateline': {},
'anpa_take_key': 'this is test',
'place': ['Australia']
}
}
item = {
'_id': '123', 'headline': 'Test Template',
'slugline': 'Testing', 'body_html': 'This is test story',
'dateline': {
'text': 'hello world'
},
'urgency': 4, 'priority': 6,
'versioncreated': '2015-06-01T22:54:53+0000',
'place': ['NSW']
}
updates = render_content_template(item, template)
self.assertEqual(updates['headline'], 'Foo Template: Test Template')
self.assertEqual(updates['urgency'], 1)
self.assertEqual(updates['priority'], 3)
self.assertEqual(updates['body_html'], 'This article has slugline: Testing and dateline: '
'hello world at 02 Jun 2015 08:53 AEST')
self.assertListEqual(updates['place'], ['Australia'])
def test_headline_strip_tags(self):
template = {'data': {'headline': ' test\nit<br>'}}
updates = render_content_template({}, template)
self.assertEqual('test it', updates['headline'])
item = get_item_from_template(template)
self.assertEqual('test it', item['headline'])
| mdhaman/superdesk-core | apps/templates/content_templates_test.py | Python | agpl-3.0 | 6,949 |
# Generated by Django 3.2.6 on 2021-10-19 09:14
from django.db import migrations
from ureport.sql import InstallSQL
class Migration(migrations.Migration):
dependencies = [
("polls", "0069_pollquestion_color_choice"),
]
operations = [InstallSQL("polls_0070")]
| rapidpro/ureport | ureport/polls/migrations/0070_install_triggers.py | Python | agpl-3.0 | 285 |
"""
Test OLPC MUC properties.
"""
import dbus
from twisted.words.xish import domish, xpath
from gabbletest import exec_test, acknowledge_iq, make_muc_presence
from servicetest import call_async, EventPattern, wrap_channel
import constants as cs
import ns
from mucutil import echo_muc_presence
def test(q, bus, conn, stream):
iq_event = q.expect('stream-iq', to=None, query_ns='vcard-temp',
query_name='vCard')
acknowledge_iq(stream, iq_event.stanza)
buddy_iface = dbus.Interface(conn, 'org.laptop.Telepathy.BuddyInfo')
act_prop_iface = dbus.Interface(conn, 'org.laptop.Telepathy.ActivityProperties')
bob_handle = conn.get_contact_handle_sync('bob@localhost')
# Bob invites us to a chatroom, pre-seeding properties
message = domish.Element(('jabber:client', 'message'))
message['from'] = 'bob@localhost'
message['to'] = 'test@localhost'
properties = message.addElement(
(ns.OLPC_ACTIVITY_PROPS, 'properties'))
properties['room'] = 'chat@conf.localhost'
properties['activity'] = 'foo_id'
property = properties.addElement((None, 'property'))
property['type'] = 'str'
property['name'] = 'title'
property.addContent('From the invitation')
property = properties.addElement((None, 'property'))
property['type'] = 'bool'
property['name'] = 'private'
property.addContent('1')
stream.send(message)
message = domish.Element((None, 'message'))
message['from'] = 'chat@conf.localhost'
message['to'] = 'test@localhost'
x = message.addElement((ns.MUC_USER, 'x'))
invite = x.addElement((None, 'invite'))
invite['from'] = 'bob@localhost'
reason = invite.addElement((None, 'reason'))
reason.addContent('No good reason')
stream.send(message)
event = q.expect('dbus-signal', signal='NewChannel')
assert event.args[1] == cs.CHANNEL_TYPE_TEXT
assert event.args[2] == 2 # handle type
assert event.args[3] == 1 # handle
room_handle = 1
text_chan = wrap_channel(bus.get_object(conn.bus_name, event.args[0]),
'Text')
group_iface = text_chan.Group
members = group_iface.GetAllMembers()[0]
local_pending = group_iface.GetAllMembers()[1]
remote_pending = group_iface.GetAllMembers()[2]
assert len(members) == 1
assert conn.inspect_contact_sync(members[0]) == 'bob@localhost'
bob_handle = members[0]
assert len(local_pending) == 1
# FIXME: the username-part-is-nickname assumption
assert conn.inspect_contact_sync(local_pending[0]) == \
'chat@conf.localhost/test'
assert len(remote_pending) == 0
room_self_handle = text_chan.Properties.Get(cs.CHANNEL_IFACE_GROUP,
"SelfHandle")
assert room_self_handle == local_pending[0]
# by now, we should have picked up the extra activity properties
buddy_iface = dbus.Interface(conn, 'org.laptop.Telepathy.BuddyInfo')
call_async(q, buddy_iface, 'GetActivities', bob_handle)
event = q.expect('stream-iq', iq_type='get', to='bob@localhost')
# Bob still has no (public) activities
event.stanza['type'] = 'result'
event.stanza['to'] = 'test@localhost'
event.stanza['from'] = 'bob@localhost'
stream.send(event.stanza)
event = q.expect('dbus-return', method='GetActivities')
assert event.value == ([('foo_id', room_handle)],)
props = act_prop_iface.GetProperties(room_handle)
assert len(props) == 2
assert props['title'] == 'From the invitation'
assert props['private'] == True
# Now Bob changes the properties
message = domish.Element(('jabber:client', 'message'))
message['from'] = 'bob@localhost'
message['to'] = 'test@localhost'
properties = message.addElement(
(ns.OLPC_ACTIVITY_PROPS, 'properties'))
properties['room'] = 'chat@conf.localhost'
properties['activity'] = 'foo_id'
property = properties.addElement((None, 'property'))
property['type'] = 'str'
property['name'] = 'title'
property.addContent('Mushroom, mushroom')
property = properties.addElement((None, 'property'))
property['type'] = 'bool'
property['name'] = 'private'
property.addContent('0')
stream.send(message)
event = q.expect('dbus-signal', signal='ActivityPropertiesChanged')
assert event.args == [room_handle, {'title': 'Mushroom, mushroom',
'private': False }]
assert act_prop_iface.GetProperties(room_handle) == \
event.args[1]
# OK, now accept the invitation
call_async(q, group_iface, 'AddMembers', [room_self_handle], 'Oh, OK then')
q.expect_many(
EventPattern('stream-presence', to='chat@conf.localhost/test'),
EventPattern('dbus-signal', signal='MembersChanged',
args=['', [], [bob_handle], [], [room_self_handle],
0, cs.GC_REASON_INVITED]),
EventPattern('dbus-return', method='AddMembers'),
)
# Send presence for own membership of room.
stream.send(make_muc_presence('owner', 'moderator', 'chat@conf.localhost', 'test'))
event = q.expect('dbus-signal', signal='MembersChanged')
assert event.args == ['', [room_self_handle], [], [], [], 0, 0]
call_async(q, buddy_iface, 'SetActivities', [('foo_id', room_handle)])
event = q.expect('stream-iq', iq_type='set')
# Now that it's not private, it'll go in my PEP
event.stanza['type'] = 'result'
event.stanza['to'] = 'test@localhost'
event.stanza['from'] = 'test@localhost'
stream.send(event.stanza)
q.expect('dbus-return', method='SetActivities')
# Bob changes the properties and tells the room he's done so
message = domish.Element(('jabber:client', 'message'))
message['from'] = 'chat@conf.localhost/bob'
message['to'] = 'chat@conf.localhost'
properties = message.addElement(
(ns.OLPC_ACTIVITY_PROPS, 'properties'))
properties['activity'] = 'foo_id'
property = properties.addElement((None, 'property'))
property['type'] = 'str'
property['name'] = 'title'
property.addContent('Badger badger badger')
property = properties.addElement((None, 'property'))
property['type'] = 'bool'
property['name'] = 'private'
property.addContent('0')
stream.send(message)
event = q.expect('stream-iq', iq_type='set')
message = event.stanza
activities = xpath.queryForNodes('/iq/pubsub/publish/item/activities',
message)
assert (activities is not None and len(activities) == 1), repr(activities)
assert activities[0].uri == ns.OLPC_ACTIVITY_PROPS
properties = xpath.queryForNodes('/activities/properties', activities[0])
assert (properties is not None and len(properties) == 1), repr(properties)
assert properties[0].uri == ns.OLPC_ACTIVITY_PROPS
assert properties[0]['room'] == 'chat@conf.localhost'
assert properties[0]['activity'] == 'foo_id'
property = xpath.queryForNodes('/properties/property', properties[0])
assert (property is not None and len(property) == 2), repr(property)
seen = set()
for p in property:
seen.add(p['name'])
if p['name'] == 'title':
assert p['type'] == 'str'
assert str(p) == 'Badger badger badger'
elif p['name'] == 'private':
assert p['type'] == 'bool'
assert str(p) == '0'
else:
assert False, 'Unexpected property %s' % p['name']
assert 'title' in seen, seen
assert 'private' in seen, seen
event.stanza['type'] = 'result'
event.stanza['to'] = 'test@localhost'
event.stanza['from'] = 'test@localhost'
stream.send(event.stanza)
act_prop_iface = dbus.Interface(conn, 'org.laptop.Telepathy.ActivityProperties')
# test sets the title and sets private back to True
call_async(q, act_prop_iface, 'SetProperties',
room_handle, {'title': 'I can set the properties too', 'private': True})
event = q.expect('stream-message', to='chat@conf.localhost')
message = event.stanza
properties = xpath.queryForNodes('/message/properties', message)
assert (properties is not None and len(properties) == 1), repr(properties)
assert properties[0].uri == ns.OLPC_ACTIVITY_PROPS
assert properties[0]['room'] == 'chat@conf.localhost'
assert properties[0]['activity'] == 'foo_id'
property = xpath.queryForNodes('/properties/property', properties[0])
assert (property is not None and len(property) == 2), repr(property)
seen = set()
for p in property:
seen.add(p['name'])
if p['name'] == 'title':
assert p['type'] == 'str'
assert str(p) == 'I can set the properties too'
elif p['name'] == 'private':
assert p['type'] == 'bool'
assert str(p) == '1'
else:
assert False, 'Unexpected property %s' % p['name']
assert 'title' in seen, seen
assert 'private' in seen, seen
event = q.expect('stream-iq', iq_type='set')
event.stanza['type'] = 'result'
event.stanza['to'] = 'test@localhost'
event.stanza['from'] = 'test@localhost'
stream.send(event.stanza)
message = event.stanza
activities = xpath.queryForNodes('/iq/pubsub/publish/item/activities',
message)
assert (activities is not None and len(activities) == 1), repr(activities)
assert activities[0].uri == ns.OLPC_ACTIVITY_PROPS
properties = xpath.queryForNodes('/activities/properties', activities[0])
assert properties is None, repr(properties)
event = q.expect('stream-iq', iq_type='set')
event.stanza['type'] = 'result'
event.stanza['to'] = 'test@localhost'
event.stanza['from'] = 'test@localhost'
stream.send(event.stanza)
message = event.stanza
activities = xpath.queryForNodes('/iq/pubsub/publish/item/activities',
message)
assert (activities is not None and len(activities) == 1), repr(activities)
assert activities[0].uri == ns.OLPC_ACTIVITIES
activity = xpath.queryForNodes('/activities/activity', activities[0])
assert activity is None, repr(activity)
q.expect('dbus-return', method='SetProperties')
# test sets the title and sets private back to True
call_async(q, act_prop_iface, 'SetProperties',
room_handle, {'title': 'I can set the properties too',
'private': False})
event = q.expect('stream-message', to='chat@conf.localhost')
message = event.stanza
properties = xpath.queryForNodes('/message/properties', message)
assert (properties is not None and len(properties) == 1), repr(properties)
assert properties[0].uri == ns.OLPC_ACTIVITY_PROPS
assert properties[0]['room'] == 'chat@conf.localhost'
assert properties[0]['activity'] == 'foo_id'
property = xpath.queryForNodes('/properties/property', properties[0])
assert (property is not None and len(property) == 2), repr(property)
seen = set()
for p in property:
seen.add(p['name'])
if p['name'] == 'title':
assert p['type'] == 'str'
assert str(p) == 'I can set the properties too'
elif p['name'] == 'private':
assert p['type'] == 'bool'
assert str(p) == '0'
else:
assert False, 'Unexpected property %s' % p['name']
assert 'title' in seen, seen
assert 'private' in seen, seen
event = q.expect('stream-iq', iq_type='set')
event.stanza['type'] = 'result'
event.stanza['to'] = 'test@localhost'
event.stanza['from'] = 'test@localhost'
stream.send(event.stanza)
message = event.stanza
activities = xpath.queryForNodes('/iq/pubsub/publish/item/activities',
message)
assert (activities is not None and len(activities) == 1), repr(activities)
assert activities[0].uri == ns.OLPC_ACTIVITY_PROPS
properties = xpath.queryForNodes('/activities/properties', activities[0])
assert (properties is not None and len(properties) == 1), repr(properties)
assert properties[0].uri == ns.OLPC_ACTIVITY_PROPS
assert properties[0]['room'] == 'chat@conf.localhost'
assert properties[0]['activity'] == 'foo_id'
property = xpath.queryForNodes('/properties/property', properties[0])
assert (property is not None and len(property) == 2), repr(property)
seen = set()
for p in property:
seen.add(p['name'])
if p['name'] == 'title':
assert p['type'] == 'str'
assert str(p) == 'I can set the properties too'
elif p['name'] == 'private':
assert p['type'] == 'bool'
assert str(p) == '0'
else:
assert False, 'Unexpected property %s' % p['name']
assert 'title' in seen, seen
assert 'private' in seen, seen
event = q.expect('stream-iq', iq_type='set')
event.stanza['type'] = 'result'
event.stanza['to'] = 'test@localhost'
event.stanza['from'] = 'test@localhost'
stream.send(event.stanza)
message = event.stanza
activities = xpath.queryForNodes('/iq/pubsub/publish/item/activities',
message)
assert (activities is not None and len(activities) == 1), repr(activities)
assert activities[0].uri == ns.OLPC_ACTIVITIES
activity = xpath.queryForNodes('/activities/activity', activities[0])
assert (activity is not None and len(activity) == 1), repr(activity)
assert activity[0]['room'] == 'chat@conf.localhost'
assert activity[0]['type'] == 'foo_id' # sic
q.expect('dbus-return', method='SetProperties')
text_chan.Close()
# we must echo the MUC presence so the room will actually close
event = q.expect('stream-presence', to='chat@conf.localhost/test',
presence_type='unavailable')
echo_muc_presence(q, stream, event.stanza, 'none', 'participant')
event = q.expect('stream-iq', iq_type='set')
event.stanza['type'] = 'result'
event.stanza['to'] = 'test@localhost'
event.stanza['from'] = 'test@localhost'
stream.send(event.stanza)
message = event.stanza
activities = xpath.queryForNodes('/iq/pubsub/publish/item/activities',
message)
assert (activities is not None and len(activities) == 1), repr(activities)
assert activities[0].uri == ns.OLPC_ACTIVITIES
activity = xpath.queryForNodes('/activities/activity', activities[0])
assert activity is None, repr(activity)
event = q.expect('stream-iq', iq_type='set')
event.stanza['type'] = 'result'
event.stanza['to'] = 'test@localhost'
event.stanza['from'] = 'test@localhost'
stream.send(event.stanza)
message = event.stanza
activities = xpath.queryForNodes('/iq/pubsub/publish/item/activities',
message)
assert (activities is not None and len(activities) == 1), repr(activities)
assert activities[0].uri == ns.OLPC_ACTIVITY_PROPS
properties = xpath.queryForNodes('/activities/properties', activities[0])
assert properties is None, repr(properties)
if __name__ == '__main__':
exec_test(test)
| Ziemin/telepathy-gabble | tests/twisted/olpc/olpc-muc-prop-change.py | Python | lgpl-2.1 | 14,987 |
import sys
from services.spawn import MobileTemplate
from services.spawn import WeaponTemplate
from resources.datatables import WeaponType
from resources.datatables import Difficulty
from resources.datatables import Options
from java.util import Vector
def addTemplate(core):
mobileTemplate = MobileTemplate()
mobileTemplate.setCreatureName('desert_razorback_felspur')
mobileTemplate.setLevel(24)
mobileTemplate.setDifficulty(Difficulty.NORMAL)
mobileTemplate.setMinSpawnDistance(4)
mobileTemplate.setMaxSpawnDistance(8)
mobileTemplate.setDeathblow(False)
mobileTemplate.setScale(1)
mobileTemplate.setMeatType("Herbivore Meat")
mobileTemplate.setMeatAmount(65)
mobileTemplate.setHideType("Leathery Hide")
mobileTemplate.setBoneAmount(40)
mobileTemplate.setBoneType("Animal Bone")
mobileTemplate.setHideAmount(25)
mobileTemplate.setSocialGroup("zucca Boar")
mobileTemplate.setAssistRange(6)
mobileTemplate.setStalker(False)
mobileTemplate.setOptionsBitmask(Options.ATTACKABLE)
templates = Vector()
templates.add('object/mobile/shared_zucca_boar.iff')
mobileTemplate.setTemplates(templates)
weaponTemplates = Vector()
weapontemplate = WeaponTemplate('object/weapon/melee/unarmed/shared_unarmed_default.iff', WeaponType.UNARMED, 1.0, 6, 'kinetic')
weaponTemplates.add(weapontemplate)
mobileTemplate.setWeaponTemplateVector(weaponTemplates)
attacks = Vector()
attacks.add('bm_charge_2')
attacks.add('bm_dampen_pain_2')
attacks.add('bm_slash_2')
mobileTemplate.setDefaultAttack('creatureMeleeAttack')
mobileTemplate.setAttacks(attacks)
core.spawnService.addMobileTemplate('razorback_felspur', mobileTemplate)
return | agry/NGECore2 | scripts/mobiles/tatooine/razorback_felspur.py | Python | lgpl-3.0 | 1,656 |
#!/usr/bin/env python
'''
mavlink python utility functions
Copyright Andrew Tridgell 2011
Released under GNU GPL version 3 or later
'''
import socket, math, struct, time, os, fnmatch, array, sys, errno
from math import *
from mavextra import *
if os.getenv('MAVLINK09') or 'MAVLINK09' in os.environ:
import mavlinkv09 as mavlink
else:
import mavlinkv10 as mavlink
def mavlink10():
'''return True if using MAVLink 1.0'''
return not 'MAVLINK09' in os.environ
def evaluate_expression(expression, vars):
'''evaluation an expression'''
try:
v = eval(expression, globals(), vars)
except NameError:
return None
except ZeroDivisionError:
return None
return v
def evaluate_condition(condition, vars):
'''evaluation a conditional (boolean) statement'''
if condition is None:
return True
v = evaluate_expression(condition, vars)
if v is None:
return False
return v
mavfile_global = None
class location(object):
'''represent a GPS coordinate'''
def __init__(self, lat, lng, alt=0, heading=0):
self.lat = lat
self.lng = lng
self.alt = alt
self.heading = heading
def __str__(self):
return "lat=%.6f,lon=%.6f,alt=%.1f" % (self.lat, self.lng, self.alt)
class mavfile(object):
'''a generic mavlink port'''
def __init__(self, fd, address, source_system=255, notimestamps=False, input=True):
global mavfile_global
if input:
mavfile_global = self
self.fd = fd
self.address = address
self.messages = { 'MAV' : self }
if mavlink.WIRE_PROTOCOL_VERSION == "1.0":
self.messages['HOME'] = mavlink.MAVLink_gps_raw_int_message(0,0,0,0,0,0,0,0,0,0)
mavlink.MAVLink_waypoint_message = mavlink.MAVLink_mission_item_message
else:
self.messages['HOME'] = mavlink.MAVLink_gps_raw_message(0,0,0,0,0,0,0,0,0)
self.params = {}
self.target_system = 0
self.target_component = 0
self.source_system = source_system
self.first_byte = True
self.robust_parsing = True
self.mav = mavlink.MAVLink(self, srcSystem=self.source_system)
self.mav.robust_parsing = self.robust_parsing
self.logfile = None
self.logfile_raw = None
self.param_fetch_in_progress = False
self.param_fetch_complete = False
self.start_time = time.time()
self.flightmode = "UNKNOWN"
self.base_mode = 0
self.timestamp = 0
self.message_hooks = []
self.idle_hooks = []
self.uptime = 0.0
self.notimestamps = notimestamps
self._timestamp = None
self.ground_pressure = None
self.ground_temperature = None
self.altitude = 0
self.WIRE_PROTOCOL_VERSION = mavlink.WIRE_PROTOCOL_VERSION
self.last_seq = {}
self.mav_loss = 0
self.mav_count = 0
self.stop_on_EOF = False
def auto_mavlink_version(self, buf):
'''auto-switch mavlink protocol version'''
global mavlink
if len(buf) == 0:
return
if not ord(buf[0]) in [ 85, 254 ]:
return
self.first_byte = False
if self.WIRE_PROTOCOL_VERSION == "0.9" and ord(buf[0]) == 254:
import mavlinkv10 as mavlink
elif self.WIRE_PROTOCOL_VERSION == "1.0" and ord(buf[0]) == 85:
import mavlinkv09 as mavlink
os.environ['MAVLINK09'] = '1'
else:
return
# switch protocol
(callback, callback_args, callback_kwargs) = (self.mav.callback,
self.mav.callback_args,
self.mav.callback_kwargs)
self.mav = mavlink.MAVLink(self, srcSystem=self.source_system)
self.mav.robust_parsing = self.robust_parsing
self.WIRE_PROTOCOL_VERSION = mavlink.WIRE_PROTOCOL_VERSION
(self.mav.callback, self.mav.callback_args, self.mav.callback_kwargs) = (callback,
callback_args,
callback_kwargs)
def recv(self, n=None):
'''default recv method'''
raise RuntimeError('no recv() method supplied')
def close(self, n=None):
'''default close method'''
raise RuntimeError('no close() method supplied')
def write(self, buf):
'''default write method'''
raise RuntimeError('no write() method supplied')
def pre_message(self):
'''default pre message call'''
return
def post_message(self, msg):
'''default post message call'''
if '_posted' in msg.__dict__:
return
msg._posted = True
msg._timestamp = time.time()
type = msg.get_type()
self.messages[type] = msg
if 'usec' in msg.__dict__:
self.uptime = msg.usec * 1.0e-6
if 'time_boot_ms' in msg.__dict__:
self.uptime = msg.time_boot_ms * 1.0e-3
if self._timestamp is not None:
if self.notimestamps:
msg._timestamp = self.uptime
else:
msg._timestamp = self._timestamp
src_system = msg.get_srcSystem()
if not (
# its the radio or planner
(src_system == ord('3') and msg.get_srcComponent() == ord('D')) or
msg.get_type() == 'BAD_DATA'):
if not src_system in self.last_seq:
last_seq = -1
else:
last_seq = self.last_seq[src_system]
seq = (last_seq+1) % 256
seq2 = msg.get_seq()
if seq != seq2 and last_seq != -1:
diff = (seq2 - seq) % 256
self.mav_loss += diff
#print("lost %u seq=%u seq2=%u src_system=%u" % (diff, seq, seq2, src_system))
self.last_seq[src_system] = seq2
self.mav_count += 1
self.timestamp = msg._timestamp
if type == 'HEARTBEAT':
self.target_system = msg.get_srcSystem()
self.target_component = msg.get_srcComponent()
if mavlink.WIRE_PROTOCOL_VERSION == '1.0' and msg.type != mavlink.MAV_TYPE_GCS:
self.flightmode = mode_string_v10(msg)
self.base_mode = msg.base_mode
elif type == 'PARAM_VALUE':
s = str(msg.param_id)
self.params[str(msg.param_id)] = msg.param_value
if msg.param_index+1 == msg.param_count:
self.param_fetch_in_progress = False
self.param_fetch_complete = True
elif type == 'SYS_STATUS' and mavlink.WIRE_PROTOCOL_VERSION == '0.9':
self.flightmode = mode_string_v09(msg)
elif type == 'GPS_RAW':
if self.messages['HOME'].fix_type < 2:
self.messages['HOME'] = msg
elif type == 'GPS_RAW_INT':
if self.messages['HOME'].fix_type < 3:
self.messages['HOME'] = msg
for hook in self.message_hooks:
hook(self, msg)
def packet_loss(self):
'''packet loss as a percentage'''
if self.mav_count == 0:
return 0
return (100.0*self.mav_loss)/(self.mav_count+self.mav_loss)
def recv_msg(self):
'''message receive routine'''
self.pre_message()
while True:
n = self.mav.bytes_needed()
s = self.recv(n)
if len(s) == 0 and (len(self.mav.buf) == 0 or self.stop_on_EOF):
return None
if self.logfile_raw:
self.logfile_raw.write(str(s))
if self.first_byte:
self.auto_mavlink_version(s)
msg = self.mav.parse_char(s)
if msg:
self.post_message(msg)
return msg
def recv_match(self, condition=None, type=None, blocking=False, timeout=None):
'''recv the next MAVLink message that matches the given condition
type can be a string or a list of strings'''
if type is not None and not isinstance(type, list):
type = [type]
start_time = time.time()
while True:
if timeout is not None:
if start_time + timeout < time.time():
return None
m = self.recv_msg()
if m is None:
if blocking:
for hook in self.idle_hooks:
hook(self)
time.sleep(0.01)
continue
return None
if type is not None and not m.get_type() in type:
continue
if not evaluate_condition(condition, self.messages):
continue
return m
def mavlink10(self):
'''return True if using MAVLink 1.0'''
return self.WIRE_PROTOCOL_VERSION == "1.0"
def setup_logfile(self, logfile, mode='w'):
'''start logging to the given logfile, with timestamps'''
self.logfile = open(logfile, mode=mode)
def setup_logfile_raw(self, logfile, mode='w'):
'''start logging raw bytes to the given logfile, without timestamps'''
self.logfile_raw = open(logfile, mode=mode)
def wait_heartbeat(self, blocking=True):
'''wait for a heartbeat so we know the target system IDs'''
return self.recv_match(type='HEARTBEAT', blocking=blocking)
def param_fetch_all(self):
'''initiate fetch of all parameters'''
if time.time() - getattr(self, 'param_fetch_start', 0) < 2.0:
# don't fetch too often
return
self.param_fetch_start = time.time()
self.param_fetch_in_progress = True
self.mav.param_request_list_send(self.target_system, self.target_component)
def param_fetch_one(self, name):
'''initiate fetch of one parameter'''
try:
idx = int(name)
self.mav.param_request_read_send(self.target_system, self.target_component, "", idx)
except Exception:
self.mav.param_request_read_send(self.target_system, self.target_component, name, -1)
def time_since(self, mtype):
'''return the time since the last message of type mtype was received'''
if not mtype in self.messages:
return time.time() - self.start_time
return time.time() - self.messages[mtype]._timestamp
def param_set_send(self, parm_name, parm_value, parm_type=None):
'''wrapper for parameter set'''
if self.mavlink10():
if parm_type == None:
parm_type = mavlink.MAVLINK_TYPE_FLOAT
self.mav.param_set_send(self.target_system, self.target_component,
parm_name, parm_value, parm_type)
else:
self.mav.param_set_send(self.target_system, self.target_component,
parm_name, parm_value)
def waypoint_request_list_send(self):
'''wrapper for waypoint_request_list_send'''
if self.mavlink10():
self.mav.mission_request_list_send(self.target_system, self.target_component)
else:
self.mav.waypoint_request_list_send(self.target_system, self.target_component)
def waypoint_clear_all_send(self):
'''wrapper for waypoint_clear_all_send'''
if self.mavlink10():
self.mav.mission_clear_all_send(self.target_system, self.target_component)
else:
self.mav.waypoint_clear_all_send(self.target_system, self.target_component)
def waypoint_request_send(self, seq):
'''wrapper for waypoint_request_send'''
if self.mavlink10():
self.mav.mission_request_send(self.target_system, self.target_component, seq)
else:
self.mav.waypoint_request_send(self.target_system, self.target_component, seq)
def waypoint_set_current_send(self, seq):
'''wrapper for waypoint_set_current_send'''
if self.mavlink10():
self.mav.mission_set_current_send(self.target_system, self.target_component, seq)
else:
self.mav.waypoint_set_current_send(self.target_system, self.target_component, seq)
def waypoint_current(self):
'''return current waypoint'''
if self.mavlink10():
m = self.recv_match(type='MISSION_CURRENT', blocking=True)
else:
m = self.recv_match(type='WAYPOINT_CURRENT', blocking=True)
return m.seq
def waypoint_count_send(self, seq):
'''wrapper for waypoint_count_send'''
if self.mavlink10():
self.mav.mission_count_send(self.target_system, self.target_component, seq)
else:
self.mav.waypoint_count_send(self.target_system, self.target_component, seq)
def set_mode_flag(self, flag, enable):
'''
Enables/ disables MAV_MODE_FLAG
@param flag The mode flag,
see MAV_MODE_FLAG enum
@param enable Enable the flag, (True/False)
'''
if self.mavlink10():
mode = self.base_mode
if (enable == True):
mode = mode | flag
elif (enable == False):
mode = mode & ~flag
self.mav.command_long_send(self.target_system, self.target_component,
mavlink.MAV_CMD_DO_SET_MODE, 0,
mode,
0, 0, 0, 0, 0, 0)
else:
print("Set mode flag not supported")
def set_mode_auto(self):
'''enter auto mode'''
if self.mavlink10():
self.mav.command_long_send(self.target_system, self.target_component,
mavlink.MAV_CMD_MISSION_START, 0, 0, 0, 0, 0, 0, 0, 0)
else:
MAV_ACTION_SET_AUTO = 13
self.mav.action_send(self.target_system, self.target_component, MAV_ACTION_SET_AUTO)
def set_mode_rtl(self):
'''enter RTL mode'''
if self.mavlink10():
self.mav.command_long_send(self.target_system, self.target_component,
mavlink.MAV_CMD_NAV_RETURN_TO_LAUNCH, 0, 0, 0, 0, 0, 0, 0, 0)
else:
MAV_ACTION_RETURN = 3
self.mav.action_send(self.target_system, self.target_component, MAV_ACTION_RETURN)
def set_mode_manual(self):
'''enter MANUAL mode'''
if self.mavlink10():
self.mav.command_long_send(self.target_system, self.target_component,
mavlink.MAV_CMD_DO_SET_MODE, 0,
mavlink.MAV_MODE_MANUAL_ARMED,
0, 0, 0, 0, 0, 0)
else:
MAV_ACTION_SET_MANUAL = 12
self.mav.action_send(self.target_system, self.target_component, MAV_ACTION_SET_MANUAL)
def set_mode_fbwa(self):
'''enter FBWA mode'''
if self.mavlink10():
self.mav.command_long_send(self.target_system, self.target_component,
mavlink.MAV_CMD_DO_SET_MODE, 0,
mavlink.MAV_MODE_STABILIZE_ARMED,
0, 0, 0, 0, 0, 0)
else:
print("Forcing FBWA not supported")
def set_mode_loiter(self):
'''enter LOITER mode'''
if self.mavlink10():
self.mav.command_long_send(self.target_system, self.target_component,
mavlink.MAV_CMD_NAV_LOITER_UNLIM, 0, 0, 0, 0, 0, 0, 0, 0)
else:
MAV_ACTION_LOITER = 27
self.mav.action_send(self.target_system, self.target_component, MAV_ACTION_LOITER)
def set_servo(self, channel, pwm):
'''set a servo value'''
self.mav.command_long_send(self.target_system, self.target_component,
mavlink.MAV_CMD_DO_SET_SERVO, 0,
channel, pwm,
0, 0, 0, 0, 0)
def calibrate_imu(self):
'''calibrate IMU'''
if self.mavlink10():
self.mav.command_long_send(self.target_system, self.target_component,
mavlink.MAV_CMD_PREFLIGHT_CALIBRATION, 0,
1, 1, 1, 1, 0, 0, 0)
else:
MAV_ACTION_CALIBRATE_GYRO = 17
self.mav.action_send(self.target_system, self.target_component, MAV_ACTION_CALIBRATE_GYRO)
def calibrate_level(self):
'''calibrate accels'''
if self.mavlink10():
self.mav.command_long_send(self.target_system, self.target_component,
mavlink.MAV_CMD_PREFLIGHT_CALIBRATION, 0,
1, 1, 1, 1, 0, 0, 0)
else:
MAV_ACTION_CALIBRATE_ACC = 19
self.mav.action_send(self.target_system, self.target_component, MAV_ACTION_CALIBRATE_ACC)
def calibrate_pressure(self):
'''calibrate pressure'''
if self.mavlink10():
self.mav.command_long_send(self.target_system, self.target_component,
mavlink.MAV_CMD_PREFLIGHT_CALIBRATION, 0,
0, 0, 1, 0, 0, 0, 0)
else:
MAV_ACTION_CALIBRATE_PRESSURE = 20
self.mav.action_send(self.target_system, self.target_component, MAV_ACTION_CALIBRATE_PRESSURE)
def reboot_autopilot(self):
'''reboot the autopilot'''
if self.mavlink10():
self.mav.command_long_send(self.target_system, self.target_component,
mavlink.MAV_CMD_PREFLIGHT_REBOOT_SHUTDOWN, 0,
1, 0, 0, 0, 0, 0, 0)
def wait_gps_fix(self):
self.recv_match(type='VFR_HUD', blocking=True)
if self.mavlink10():
self.recv_match(type='GPS_RAW_INT', blocking=True,
condition='GPS_RAW_INT.fix_type==3 and GPS_RAW_INT.lat != 0 and GPS_RAW_INT.alt != 0')
else:
self.recv_match(type='GPS_RAW', blocking=True,
condition='GPS_RAW.fix_type==2 and GPS_RAW.lat != 0 and GPS_RAW.alt != 0')
def location(self, relative_alt=False):
'''return current location'''
self.wait_gps_fix()
# wait for another VFR_HUD, to ensure we have correct altitude
self.recv_match(type='VFR_HUD', blocking=True)
self.recv_match(type='GLOBAL_POSITION_INT', blocking=True)
if relative_alt:
alt = self.messages['GLOBAL_POSITION_INT'].relative_alt*0.001
else:
alt = self.messages['VFR_HUD'].alt
return location(self.messages['GPS_RAW_INT'].lat*1.0e-7,
self.messages['GPS_RAW_INT'].lon*1.0e-7,
alt,
self.messages['VFR_HUD'].heading)
def arducopter_arm(self):
'''arm motors (arducopter only)'''
if self.mavlink10():
self.mav.command_long_send(
self.target_system, # target_system
mavlink.MAV_COMP_ID_SYSTEM_CONTROL, # target_component
mavlink.MAV_CMD_COMPONENT_ARM_DISARM, # command
0, # confirmation
1, # param1 (1 to indicate arm)
0, # param2 (all other params meaningless)
0, # param3
0, # param4
0, # param5
0, # param6
0) # param7
def arducopter_disarm(self):
'''calibrate pressure'''
if self.mavlink10():
self.mav.command_long_send(
self.target_system, # target_system
mavlink.MAV_COMP_ID_SYSTEM_CONTROL, # target_component
mavlink.MAV_CMD_COMPONENT_ARM_DISARM, # command
0, # confirmation
0, # param1 (0 to indicate disarm)
0, # param2 (all other params meaningless)
0, # param3
0, # param4
0, # param5
0, # param6
0) # param7
def field(self, type, field, default=None):
'''convenient function for returning an arbitrary MAVLink
field with a default'''
if not type in self.messages:
return default
return getattr(self.messages[type], field, default)
def param(self, name, default=None):
'''convenient function for returning an arbitrary MAVLink
parameter with a default'''
if not name in self.params:
return default
return self.params[name]
def set_close_on_exec(fd):
'''set the clone on exec flag on a file descriptor. Ignore exceptions'''
try:
import fcntl
flags = fcntl.fcntl(fd, fcntl.F_GETFD)
flags |= fcntl.FD_CLOEXEC
fcntl.fcntl(fd, fcntl.F_SETFD, flags)
except Exception:
pass
class mavserial(mavfile):
'''a serial mavlink port'''
def __init__(self, device, baud=115200, autoreconnect=False, source_system=255):
import serial
self.baud = baud
self.device = device
self.autoreconnect = autoreconnect
self.port = serial.Serial(self.device, self.baud, timeout=0,
dsrdtr=False, rtscts=False, xonxoff=False)
try:
fd = self.port.fileno()
set_close_on_exec(fd)
except Exception:
fd = None
mavfile.__init__(self, fd, device, source_system=source_system)
def close(self):
self.port.close()
def recv(self,n=None):
if n is None:
n = self.mav.bytes_needed()
if self.fd is None:
waiting = self.port.inWaiting()
if waiting < n:
n = waiting
return self.port.read(n)
def write(self, buf):
try:
return self.port.write(buf)
except Exception:
if self.autoreconnect:
self.reset()
return -1
def reset(self):
import serial
self.port.close()
while True:
try:
self.port = serial.Serial(self.device, self.baud, timeout=1,
dsrdtr=False, rtscts=False, xonxoff=False)
try:
self.fd = self.port.fileno()
except Exception:
self.fd = None
return
except Exception:
print("Failed to reopen %s" % self.device)
time.sleep(0.5)
class mavudp(mavfile):
'''a UDP mavlink socket'''
def __init__(self, device, input=True, source_system=255):
a = device.split(':')
if len(a) != 2:
print("UDP ports must be specified as host:port")
sys.exit(1)
self.port = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
self.udp_server = input
if input:
self.port.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
self.port.bind((a[0], int(a[1])))
else:
self.destination_addr = (a[0], int(a[1]))
set_close_on_exec(self.port.fileno())
self.port.setblocking(0)
self.last_address = None
mavfile.__init__(self, self.port.fileno(), device, source_system=source_system, input=input)
def close(self):
self.port.close()
def recv(self,n=None):
try:
data, self.last_address = self.port.recvfrom(300)
except socket.error as e:
if e.errno in [ errno.EAGAIN, errno.EWOULDBLOCK, errno.ECONNREFUSED ]:
return ""
raise
return data
def write(self, buf):
try:
if self.udp_server:
if self.last_address:
self.port.sendto(buf, self.last_address)
else:
self.port.sendto(buf, self.destination_addr)
except socket.error:
pass
def recv_msg(self):
'''message receive routine for UDP link'''
self.pre_message()
s = self.recv()
if len(s) == 0:
return None
if self.first_byte:
self.auto_mavlink_version(s)
msg = self.mav.parse_buffer(s)
if msg is not None:
for m in msg:
self.post_message(m)
return msg[0]
return None
class mavtcp(mavfile):
'''a TCP mavlink socket'''
def __init__(self, device, source_system=255):
a = device.split(':')
if len(a) != 2:
print("TCP ports must be specified as host:port")
sys.exit(1)
self.port = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.destination_addr = (a[0], int(a[1]))
self.port.connect(self.destination_addr)
self.port.setblocking(0)
set_close_on_exec(self.port.fileno())
self.port.setsockopt(socket.SOL_TCP, socket.TCP_NODELAY, 1)
mavfile.__init__(self, self.port.fileno(), device, source_system=source_system)
def close(self):
self.port.close()
def recv(self,n=None):
if n is None:
n = self.mav.bytes_needed()
try:
data = self.port.recv(n)
except socket.error as e:
if e.errno in [ errno.EAGAIN, errno.EWOULDBLOCK ]:
return ""
raise
return data
def write(self, buf):
try:
self.port.send(buf)
except socket.error:
pass
class mavlogfile(mavfile):
'''a MAVLink logfile reader/writer'''
def __init__(self, filename, planner_format=None,
write=False, append=False,
robust_parsing=True, notimestamps=False, source_system=255):
self.filename = filename
self.writeable = write
self.robust_parsing = robust_parsing
self.planner_format = planner_format
self._two64 = math.pow(2.0, 63)
mode = 'rb'
if self.writeable:
if append:
mode = 'ab'
else:
mode = 'wb'
self.f = open(filename, mode)
self.filesize = os.path.getsize(filename)
self.percent = 0
mavfile.__init__(self, None, filename, source_system=source_system, notimestamps=notimestamps)
if self.notimestamps:
self._timestamp = 0
else:
self._timestamp = time.time()
self.stop_on_EOF = True
def close(self):
self.f.close()
def recv(self,n=None):
if n is None:
n = self.mav.bytes_needed()
return self.f.read(n)
def write(self, buf):
self.f.write(buf)
def pre_message(self):
'''read timestamp if needed'''
# read the timestamp
if self.filesize != 0:
self.percent = (100.0 * self.f.tell()) / self.filesize
if self.notimestamps:
return
if self.planner_format:
tbuf = self.f.read(21)
if len(tbuf) != 21 or tbuf[0] != '-' or tbuf[20] != ':':
raise RuntimeError('bad planner timestamp %s' % tbuf)
hnsec = self._two64 + float(tbuf[0:20])
t = hnsec * 1.0e-7 # convert to seconds
t -= 719163 * 24 * 60 * 60 # convert to 1970 base
self._link = 0
else:
tbuf = self.f.read(8)
if len(tbuf) != 8:
return
(tusec,) = struct.unpack('>Q', tbuf)
t = tusec * 1.0e-6
self._link = tusec & 0x3
self._timestamp = t
def post_message(self, msg):
'''add timestamp to message'''
# read the timestamp
super(mavlogfile, self).post_message(msg)
if self.planner_format:
self.f.read(1) # trailing newline
self.timestamp = msg._timestamp
class mavchildexec(mavfile):
'''a MAVLink child processes reader/writer'''
def __init__(self, filename, source_system=255):
from subprocess import Popen, PIPE
import fcntl
self.filename = filename
self.child = Popen(filename, shell=True, stdout=PIPE, stdin=PIPE)
self.fd = self.child.stdout.fileno()
fl = fcntl.fcntl(self.fd, fcntl.F_GETFL)
fcntl.fcntl(self.fd, fcntl.F_SETFL, fl | os.O_NONBLOCK)
fl = fcntl.fcntl(self.child.stdout.fileno(), fcntl.F_GETFL)
fcntl.fcntl(self.child.stdout.fileno(), fcntl.F_SETFL, fl | os.O_NONBLOCK)
mavfile.__init__(self, self.fd, filename, source_system=source_system)
def close(self):
self.child.close()
def recv(self,n=None):
try:
x = self.child.stdout.read(1)
except Exception:
return ''
return x
def write(self, buf):
self.child.stdin.write(buf)
def mavlink_connection(device, baud=115200, source_system=255,
planner_format=None, write=False, append=False,
robust_parsing=True, notimestamps=False, input=True):
'''make a serial or UDP mavlink connection'''
if device.startswith('tcp:'):
return mavtcp(device[4:], source_system=source_system)
if device.startswith('udp:'):
return mavudp(device[4:], input=input, source_system=source_system)
# list of suffixes to prevent setting DOS paths as UDP sockets
logsuffixes = [ 'log', 'raw', 'tlog' ]
suffix = device.split('.')[-1].lower()
if device.find(':') != -1 and not suffix in logsuffixes:
return mavudp(device, source_system=source_system, input=input)
if os.path.isfile(device):
if device.endswith(".elf"):
return mavchildexec(device, source_system=source_system)
else:
return mavlogfile(device, planner_format=planner_format, write=write,
append=append, robust_parsing=robust_parsing, notimestamps=notimestamps,
source_system=source_system)
return mavserial(device, baud=baud, source_system=source_system)
class periodic_event(object):
'''a class for fixed frequency events'''
def __init__(self, frequency):
self.frequency = float(frequency)
self.last_time = time.time()
def force(self):
'''force immediate triggering'''
self.last_time = 0
def trigger(self):
'''return True if we should trigger now'''
tnow = time.time()
if self.last_time + (1.0/self.frequency) <= tnow:
self.last_time = tnow
return True
return False
try:
from curses import ascii
have_ascii = True
except:
have_ascii = False
def is_printable(c):
'''see if a character is printable'''
global have_ascii
if have_ascii:
return ascii.isprint(c)
if isinstance(c, int):
ic = c
else:
ic = ord(c)
return ic >= 32 and ic <= 126
def all_printable(buf):
'''see if a string is all printable'''
for c in buf:
if not is_printable(c) and not c in ['\r', '\n', '\t']:
return False
return True
class SerialPort(object):
'''auto-detected serial port'''
def __init__(self, device, description=None, hwid=None):
self.device = device
self.description = description
self.hwid = hwid
def __str__(self):
ret = self.device
if self.description is not None:
ret += " : " + self.description
if self.hwid is not None:
ret += " : " + self.hwid
return ret
def auto_detect_serial_win32(preferred_list=['*']):
'''try to auto-detect serial ports on win32'''
try:
import scanwin32
list = sorted(scanwin32.comports())
except:
return []
ret = []
for order, port, desc, hwid in list:
for preferred in preferred_list:
if fnmatch.fnmatch(desc, preferred) or fnmatch.fnmatch(hwid, preferred):
ret.append(SerialPort(port, description=desc, hwid=hwid))
if len(ret) > 0:
return ret
# now the rest
for order, port, desc, hwid in list:
ret.append(SerialPort(port, description=desc, hwid=hwid))
return ret
def auto_detect_serial_unix(preferred_list=['*']):
'''try to auto-detect serial ports on win32'''
import glob
glist = glob.glob('/dev/ttyS*') + glob.glob('/dev/ttyUSB*') + glob.glob('/dev/ttyACM*') + glob.glob('/dev/serial/by-id/*')
ret = []
# try preferred ones first
for d in glist:
for preferred in preferred_list:
if fnmatch.fnmatch(d, preferred):
ret.append(SerialPort(d))
if len(ret) > 0:
return ret
# now the rest
for d in glist:
ret.append(SerialPort(d))
return ret
def auto_detect_serial(preferred_list=['*']):
'''try to auto-detect serial port'''
# see if
if os.name == 'nt':
return auto_detect_serial_win32(preferred_list=preferred_list)
return auto_detect_serial_unix(preferred_list=preferred_list)
def mode_string_v09(msg):
'''mode string for 0.9 protocol'''
mode = msg.mode
nav_mode = msg.nav_mode
MAV_MODE_UNINIT = 0
MAV_MODE_MANUAL = 2
MAV_MODE_GUIDED = 3
MAV_MODE_AUTO = 4
MAV_MODE_TEST1 = 5
MAV_MODE_TEST2 = 6
MAV_MODE_TEST3 = 7
MAV_NAV_GROUNDED = 0
MAV_NAV_LIFTOFF = 1
MAV_NAV_HOLD = 2
MAV_NAV_WAYPOINT = 3
MAV_NAV_VECTOR = 4
MAV_NAV_RETURNING = 5
MAV_NAV_LANDING = 6
MAV_NAV_LOST = 7
MAV_NAV_LOITER = 8
cmode = (mode, nav_mode)
mapping = {
(MAV_MODE_UNINIT, MAV_NAV_GROUNDED) : "INITIALISING",
(MAV_MODE_MANUAL, MAV_NAV_VECTOR) : "MANUAL",
(MAV_MODE_TEST3, MAV_NAV_VECTOR) : "CIRCLE",
(MAV_MODE_GUIDED, MAV_NAV_VECTOR) : "GUIDED",
(MAV_MODE_TEST1, MAV_NAV_VECTOR) : "STABILIZE",
(MAV_MODE_TEST2, MAV_NAV_LIFTOFF) : "FBWA",
(MAV_MODE_AUTO, MAV_NAV_WAYPOINT) : "AUTO",
(MAV_MODE_AUTO, MAV_NAV_RETURNING) : "RTL",
(MAV_MODE_AUTO, MAV_NAV_LOITER) : "LOITER",
(MAV_MODE_AUTO, MAV_NAV_LIFTOFF) : "TAKEOFF",
(MAV_MODE_AUTO, MAV_NAV_LANDING) : "LANDING",
(MAV_MODE_AUTO, MAV_NAV_HOLD) : "LOITER",
(MAV_MODE_GUIDED, MAV_NAV_VECTOR) : "GUIDED",
(MAV_MODE_GUIDED, MAV_NAV_WAYPOINT) : "GUIDED",
(100, MAV_NAV_VECTOR) : "STABILIZE",
(101, MAV_NAV_VECTOR) : "ACRO",
(102, MAV_NAV_VECTOR) : "ALT_HOLD",
(107, MAV_NAV_VECTOR) : "CIRCLE",
(109, MAV_NAV_VECTOR) : "LAND",
}
if cmode in mapping:
return mapping[cmode]
return "Mode(%s,%s)" % cmode
def mode_string_v10(msg):
'''mode string for 1.0 protocol, from heartbeat'''
if not msg.base_mode & mavlink.MAV_MODE_FLAG_CUSTOM_MODE_ENABLED:
return "Mode(0x%08x)" % msg.base_mode
mapping_apm = {
0 : 'MANUAL',
1 : 'CIRCLE',
2 : 'STABILIZE',
3 : 'TRAINING',
5 : 'FBWA',
6 : 'FBWB',
7 : 'FBWC',
10 : 'AUTO',
11 : 'RTL',
12 : 'LOITER',
13 : 'TAKEOFF',
14 : 'LAND',
15 : 'GUIDED',
16 : 'INITIALISING'
}
mapping_acm = {
0 : 'STABILIZE',
1 : 'ACRO',
2 : 'ALT_HOLD',
3 : 'AUTO',
4 : 'GUIDED',
5 : 'LOITER',
6 : 'RTL',
7 : 'CIRCLE',
8 : 'POSITION',
9 : 'LAND',
10 : 'OF_LOITER',
11 : 'APPROACH'
}
if msg.type == mavlink.MAV_TYPE_QUADROTOR:
if msg.custom_mode in mapping_acm:
return mapping_acm[msg.custom_mode]
if msg.type == mavlink.MAV_TYPE_FIXED_WING:
if msg.custom_mode in mapping_apm:
return mapping_apm[msg.custom_mode]
return "Mode(%u)" % msg.custom_mode
class x25crc(object):
'''x25 CRC - based on checksum.h from mavlink library'''
def __init__(self, buf=''):
self.crc = 0xffff
self.accumulate(buf)
def accumulate(self, buf):
'''add in some more bytes'''
bytes = array.array('B')
if isinstance(buf, array.array):
bytes.extend(buf)
else:
bytes.fromstring(buf)
accum = self.crc
for b in bytes:
tmp = b ^ (accum & 0xff)
tmp = (tmp ^ (tmp<<4)) & 0xFF
accum = (accum>>8) ^ (tmp<<8) ^ (tmp<<3) ^ (tmp>>4)
accum = accum & 0xFFFF
self.crc = accum
| x75/mavlink | pymavlink/mavutil.py | Python | lgpl-3.0 | 36,824 |
# This library is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as
# published by the Free Software Foundation; either version 3 of the
# License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, see
# <http://www.gnu.org/licenses/>.
"""
unit tests for javatools/distinfo.py
author: Konstantin Shemyak <konstantin@shemyak.com>
license: LGPL v.3
"""
import os
from unittest import TestCase
from . import get_data_fn
from javatools.distinfo import main
class DistinfoTest(TestCase):
dist = get_data_fn(os.path.join("test_distinfo", "dist1"))
# classinfo-specific option is accepted:
def test_classinfo_options(self):
self.assertEqual(0, main(["argv0", "-p", self.dist]))
# jarinfo-specific option is accepted:
def test_jarinfo_options(self):
self.assertEqual(0, main(["argv0", "--jar-classes", self.dist]))
# distinfo-specific option is accepted:
def test_distinfo_options(self):
self.assertEqual(0, main(["argv0", "--dist-provides", self.dist]))
| obriencj/python-javatools | tests/distinfo.py | Python | lgpl-3.0 | 1,430 |
from __future__ import unicode_literals
from .abc import ABCIE
from .abc7news import Abc7NewsIE
from .academicearth import AcademicEarthCourseIE
from .addanime import AddAnimeIE
from .adobetv import (
AdobeTVIE,
AdobeTVVideoIE,
)
from .adultswim import AdultSwimIE
from .aftenposten import AftenpostenIE
from .aftonbladet import AftonbladetIE
from .airmozilla import AirMozillaIE
from .aljazeera import AlJazeeraIE
from .alphaporno import AlphaPornoIE
from .anitube import AnitubeIE
from .anysex import AnySexIE
from .aol import AolIE
from .allocine import AllocineIE
from .aparat import AparatIE
from .appletrailers import AppleTrailersIE
from .archiveorg import ArchiveOrgIE
from .ard import ARDIE, ARDMediathekIE
from .arte import (
ArteTvIE,
ArteTVPlus7IE,
ArteTVCreativeIE,
ArteTVConcertIE,
ArteTVFutureIE,
ArteTVDDCIE,
ArteTVEmbedIE,
)
from .atresplayer import AtresPlayerIE
from .atttechchannel import ATTTechChannelIE
from .audiomack import AudiomackIE, AudiomackAlbumIE
from .azubu import AzubuIE
from .baidu import BaiduVideoIE
from .bambuser import BambuserIE, BambuserChannelIE
from .bandcamp import BandcampIE, BandcampAlbumIE
from .bbccouk import BBCCoUkIE
from .beeg import BeegIE
from .behindkink import BehindKinkIE
from .beatportpro import BeatportProIE
from .bet import BetIE
from .bild import BildIE
from .bilibili import BiliBiliIE
from .blinkx import BlinkxIE
from .bliptv import BlipTVIE, BlipTVUserIE
from .bloomberg import BloombergIE
from .bpb import BpbIE
from .br import BRIE
from .breakcom import BreakIE
from .brightcove import BrightcoveIE
from .buzzfeed import BuzzFeedIE
from .byutv import BYUtvIE
from .c56 import C56IE
from .camdemy import (
CamdemyIE,
CamdemyFolderIE
)
from .canal13cl import Canal13clIE
from .canalplus import CanalplusIE
from .canalc2 import Canalc2IE
from .cbs import CBSIE
from .cbsnews import CBSNewsIE
from .cbssports import CBSSportsIE
from .ccc import CCCIE
from .ceskatelevize import CeskaTelevizeIE
from .channel9 import Channel9IE
from .chilloutzone import ChilloutzoneIE
from .chirbit import (
ChirbitIE,
ChirbitProfileIE,
)
from .cinchcast import CinchcastIE
from .cinemassacre import CinemassacreIE
from .clipfish import ClipfishIE
from .cliphunter import CliphunterIE
from .clipsyndicate import ClipsyndicateIE
from .cloudy import CloudyIE
from .clubic import ClubicIE
from .cmt import CMTIE
from .cnet import CNETIE
from .cnn import (
CNNIE,
CNNBlogsIE,
CNNArticleIE,
)
from .collegehumor import CollegeHumorIE
from .collegerama import CollegeRamaIE
from .comedycentral import ComedyCentralIE, ComedyCentralShowsIE
from .comcarcoff import ComCarCoffIE
from .commonmistakes import CommonMistakesIE, UnicodeBOMIE
from .condenast import CondeNastIE
from .cracked import CrackedIE
from .criterion import CriterionIE
from .crooksandliars import CrooksAndLiarsIE
from .crunchyroll import (
CrunchyrollIE,
CrunchyrollShowPlaylistIE
)
from .cspan import CSpanIE
from .ctsnews import CtsNewsIE
from .dailymotion import (
DailymotionIE,
DailymotionPlaylistIE,
DailymotionUserIE,
DailymotionCloudIE,
)
from .daum import DaumIE
from .dbtv import DBTVIE
from .dctp import DctpTvIE
from .deezer import DeezerPlaylistIE
from .dfb import DFBIE
from .dhm import DHMIE
from .dotsub import DotsubIE
from .douyutv import DouyuTVIE
from .dramafever import (
DramaFeverIE,
DramaFeverSeriesIE,
)
from .dreisat import DreiSatIE
from .drbonanza import DRBonanzaIE
from .drtuber import DrTuberIE
from .drtv import DRTVIE
from .dvtv import DVTVIE
from .dump import DumpIE
from .dumpert import DumpertIE
from .defense import DefenseGouvFrIE
from .discovery import DiscoveryIE
from .divxstage import DivxStageIE
from .dropbox import DropboxIE
from .eagleplatform import EaglePlatformIE
from .ebaumsworld import EbaumsWorldIE
from .echomsk import EchoMskIE
from .ehow import EHowIE
from .eighttracks import EightTracksIE
from .einthusan import EinthusanIE
from .eitb import EitbIE
from .ellentv import (
EllenTVIE,
EllenTVClipsIE,
)
from .elpais import ElPaisIE
from .embedly import EmbedlyIE
from .engadget import EngadgetIE
from .eporner import EpornerIE
from .eroprofile import EroProfileIE
from .escapist import EscapistIE
from .espn import ESPNIE
from .everyonesmixtape import EveryonesMixtapeIE
from .exfm import ExfmIE
from .expotv import ExpoTVIE
from .extremetube import ExtremeTubeIE
from .facebook import FacebookIE
from .faz import FazIE
from .fc2 import FC2IE
from .firstpost import FirstpostIE
from .firsttv import FirstTVIE
from .fivemin import FiveMinIE
from .fivetv import FiveTVIE
from .fktv import (
FKTVIE,
FKTVPosteckeIE,
)
from .flickr import FlickrIE
from .folketinget import FolketingetIE
from .footyroom import FootyRoomIE
from .fourtube import FourTubeIE
from .foxgay import FoxgayIE
from .foxnews import FoxNewsIE
from .foxsports import FoxSportsIE
from .franceculture import FranceCultureIE
from .franceinter import FranceInterIE
from .francetv import (
PluzzIE,
FranceTvInfoIE,
FranceTVIE,
GenerationQuoiIE,
CultureboxIE,
)
from .freesound import FreesoundIE
from .freespeech import FreespeechIE
from .freevideo import FreeVideoIE
from .funnyordie import FunnyOrDieIE
from .gamekings import GamekingsIE
from .gameone import (
GameOneIE,
GameOnePlaylistIE,
)
from .gamersyde import GamersydeIE
from .gamespot import GameSpotIE
from .gamestar import GameStarIE
from .gametrailers import GametrailersIE
from .gazeta import GazetaIE
from .gdcvault import GDCVaultIE
from .generic import GenericIE
from .gfycat import GfycatIE
from .giantbomb import GiantBombIE
from .giga import GigaIE
from .glide import GlideIE
from .globo import GloboIE
from .godtube import GodTubeIE
from .goldenmoustache import GoldenMoustacheIE
from .golem import GolemIE
from .googleplus import GooglePlusIE
from .googlesearch import GoogleSearchIE
from .gorillavid import GorillaVidIE
from .goshgay import GoshgayIE
from .groupon import GrouponIE
from .hark import HarkIE
from .hearthisat import HearThisAtIE
from .heise import HeiseIE
from .hellporno import HellPornoIE
from .helsinki import HelsinkiIE
from .hentaistigma import HentaiStigmaIE
from .historicfilms import HistoricFilmsIE
from .history import HistoryIE
from .hitbox import HitboxIE, HitboxLiveIE
from .hornbunny import HornBunnyIE
from .hostingbulk import HostingBulkIE
from .hotnewhiphop import HotNewHipHopIE
from .howcast import HowcastIE
from .howstuffworks import HowStuffWorksIE
from .huffpost import HuffPostIE
from .hypem import HypemIE
from .iconosquare import IconosquareIE
from .ign import IGNIE, OneUPIE
from .imdb import (
ImdbIE,
ImdbListIE
)
from .imgur import ImgurIE
from .ina import InaIE
from .infoq import InfoQIE
from .instagram import InstagramIE, InstagramUserIE
from .internetvideoarchive import InternetVideoArchiveIE
from .iprima import IPrimaIE
from .iqiyi import IqiyiIE
from .ivi import (
IviIE,
IviCompilationIE
)
from .izlesene import IzleseneIE
from .jadorecettepub import JadoreCettePubIE
from .jeuxvideo import JeuxVideoIE
from .jove import JoveIE
from .jukebox import JukeboxIE
from .jpopsukitv import JpopsukiIE
from .kaltura import KalturaIE
from .kanalplay import KanalPlayIE
from .kankan import KankanIE
from .karaoketv import KaraoketvIE
from .karrierevideos import KarriereVideosIE
from .keezmovies import KeezMoviesIE
from .khanacademy import KhanAcademyIE
from .kickstarter import KickStarterIE
from .keek import KeekIE
from .kontrtube import KontrTubeIE
from .krasview import KrasViewIE
from .ku6 import Ku6IE
from .la7 import LA7IE
from .laola1tv import Laola1TvIE
from .letv import (
LetvIE,
LetvTvIE,
LetvPlaylistIE
)
from .libsyn import LibsynIE
from .lifenews import (
LifeNewsIE,
LifeEmbedIE,
)
from .liveleak import LiveLeakIE
from .livestream import (
LivestreamIE,
LivestreamOriginalIE,
LivestreamShortenerIE,
)
from .lnkgo import LnkGoIE
from .lrt import LRTIE
from .lynda import (
LyndaIE,
LyndaCourseIE
)
from .m6 import M6IE
from .macgamestore import MacGameStoreIE
from .mailru import MailRuIE
from .malemotion import MalemotionIE
from .mdr import MDRIE
from .megavideoz import MegaVideozIE
from .metacafe import MetacafeIE
from .metacritic import MetacriticIE
from .mgoon import MgoonIE
from .minhateca import MinhatecaIE
from .ministrygrid import MinistryGridIE
from .miomio import MioMioIE
from .mit import TechTVMITIE, MITIE, OCWMITIE
from .mitele import MiTeleIE
from .mixcloud import MixcloudIE
from .mlb import MLBIE
from .mpora import MporaIE
from .moevideo import MoeVideoIE
from .mofosex import MofosexIE
from .mojvideo import MojvideoIE
from .moniker import MonikerIE
from .mooshare import MooshareIE
from .morningstar import MorningstarIE
from .motherless import MotherlessIE
from .motorsport import MotorsportIE
from .movieclips import MovieClipsIE
from .moviezine import MoviezineIE
from .movshare import MovShareIE
from .mtv import (
MTVIE,
MTVServicesEmbeddedIE,
MTVIggyIE,
)
from .muenchentv import MuenchenTVIE
from .musicplayon import MusicPlayOnIE
from .musicvault import MusicVaultIE
from .muzu import MuzuTVIE
from .myspace import MySpaceIE, MySpaceAlbumIE
from .myspass import MySpassIE
from .myvideo import MyVideoIE
from .myvidster import MyVidsterIE
from .nationalgeographic import NationalGeographicIE
from .naver import NaverIE
from .nba import NBAIE
from .nbc import (
NBCIE,
NBCNewsIE,
NBCSportsIE,
NBCSportsVPlayerIE,
)
from .ndr import (
NDRIE,
NJoyIE,
)
from .ndtv import NDTVIE
from .netzkino import NetzkinoIE
from .nerdcubed import NerdCubedFeedIE
from .nerdist import NerdistIE
from .newgrounds import NewgroundsIE
from .newstube import NewstubeIE
from .nextmedia import (
NextMediaIE,
NextMediaActionNewsIE,
AppleDailyIE,
)
from .nfb import NFBIE
from .nfl import NFLIE
from .nhl import (
NHLIE,
NHLNewsIE,
NHLVideocenterIE,
)
from .niconico import NiconicoIE, NiconicoPlaylistIE
from .ninegag import NineGagIE
from .noco import NocoIE
from .normalboots import NormalbootsIE
from .nosvideo import NosVideoIE
from .nova import NovaIE
from .novamov import NovaMovIE
from .nowness import NownessIE
from .nowtv import NowTVIE
from .nowvideo import NowVideoIE
from .npo import (
NPOIE,
NPOLiveIE,
NPORadioIE,
NPORadioFragmentIE,
TegenlichtVproIE,
)
from .nrk import (
NRKIE,
NRKPlaylistIE,
NRKTVIE,
)
from .ntvde import NTVDeIE
from .ntvru import NTVRuIE
from .nytimes import (
NYTimesIE,
NYTimesArticleIE,
)
from .nuvid import NuvidIE
from .odnoklassniki import OdnoklassnikiIE
from .oktoberfesttv import OktoberfestTVIE
from .onionstudios import OnionStudiosIE
from .ooyala import (
OoyalaIE,
OoyalaExternalIE,
)
from .openfilm import OpenFilmIE
from .orf import (
ORFTVthekIE,
ORFOE1IE,
ORFFM4IE,
ORFIPTVIE,
)
from .parliamentliveuk import ParliamentLiveUKIE
from .patreon import PatreonIE
from .pbs import PBSIE
from .philharmoniedeparis import PhilharmonieDeParisIE
from .phoenix import PhoenixIE
from .photobucket import PhotobucketIE
from .pinkbike import PinkbikeIE
from .planetaplay import PlanetaPlayIE
from .pladform import PladformIE
from .played import PlayedIE
from .playfm import PlayFMIE
from .playvid import PlayvidIE
from .playwire import PlaywireIE
from .podomatic import PodomaticIE
from .porn91 import Porn91IE
from .pornhd import PornHdIE
from .pornhub import (
PornHubIE,
PornHubPlaylistIE,
)
from .pornotube import PornotubeIE
from .pornovoisines import PornoVoisinesIE
from .pornoxo import PornoXOIE
from .primesharetv import PrimeShareTVIE
from .promptfile import PromptFileIE
from .prosiebensat1 import ProSiebenSat1IE
from .puls4 import Puls4IE
from .pyvideo import PyvideoIE
from .qqmusic import (
QQMusicIE,
QQMusicSingerIE,
QQMusicAlbumIE,
QQMusicToplistIE,
)
from .quickvid import QuickVidIE
from .r7 import R7IE
from .radiode import RadioDeIE
from .radiojavan import RadioJavanIE
from .radiobremen import RadioBremenIE
from .radiofrance import RadioFranceIE
from .rai import RaiIE
from .rbmaradio import RBMARadioIE
from .redtube import RedTubeIE
from .restudy import RestudyIE
from .reverbnation import ReverbNationIE
from .ringtv import RingTVIE
from .ro220 import Ro220IE
from .rottentomatoes import RottenTomatoesIE
from .roxwel import RoxwelIE
from .rtbf import RTBFIE
from .rte import RteIE
from .rtlnl import RtlNlIE
from .rtl2 import RTL2IE
from .rtp import RTPIE
from .rts import RTSIE
from .rtve import RTVEALaCartaIE, RTVELiveIE, RTVEInfantilIE
from .ruhd import RUHDIE
from .rutube import (
RutubeIE,
RutubeChannelIE,
RutubeEmbedIE,
RutubeMovieIE,
RutubePersonIE,
)
from .rutv import RUTVIE
from .ruutu import RuutuIE
from .sandia import SandiaIE
from .safari import (
SafariIE,
SafariCourseIE,
)
from .sapo import SapoIE
from .savefrom import SaveFromIE
from .sbs import SBSIE
from .scivee import SciVeeIE
from .screencast import ScreencastIE
from .screencastomatic import ScreencastOMaticIE
from .screenwavemedia import ScreenwaveMediaIE, TeamFourIE
from .senateisvp import SenateISVPIE
from .servingsys import ServingSysIE
from .sexu import SexuIE
from .sexykarma import SexyKarmaIE
from .shared import SharedIE
from .sharesix import ShareSixIE
from .sina import SinaIE
from .slideshare import SlideshareIE
from .slutload import SlutloadIE
from .smotri import (
SmotriIE,
SmotriCommunityIE,
SmotriUserIE,
SmotriBroadcastIE,
)
from .snagfilms import (
SnagFilmsIE,
SnagFilmsEmbedIE,
)
from .snotr import SnotrIE
from .sohu import SohuIE
from .soompi import (
SoompiIE,
SoompiShowIE,
)
from .soundcloud import (
SoundcloudIE,
SoundcloudSetIE,
SoundcloudUserIE,
SoundcloudPlaylistIE
)
from .soundgasm import (
SoundgasmIE,
SoundgasmProfileIE
)
from .southpark import (
SouthParkIE,
SouthParkDeIE,
SouthParkDkIE,
SouthParkEsIE,
SouthParkNlIE
)
from .space import SpaceIE
from .spankbang import SpankBangIE
from .spankwire import SpankwireIE
from .spiegel import SpiegelIE, SpiegelArticleIE
from .spiegeltv import SpiegeltvIE
from .spike import SpikeIE
from .sport5 import Sport5IE
from .sportbox import (
SportBoxIE,
SportBoxEmbedIE,
)
from .sportdeutschland import SportDeutschlandIE
from .srf import SrfIE
from .srmediathek import SRMediathekIE
from .ssa import SSAIE
from .stanfordoc import StanfordOpenClassroomIE
from .steam import SteamIE
from .streamcloud import StreamcloudIE
from .streamcz import StreamCZIE
from .streetvoice import StreetVoiceIE
from .sunporno import SunPornoIE
from .svt import (
SVTIE,
SVTPlayIE,
)
from .swrmediathek import SWRMediathekIE
from .syfy import SyfyIE
from .sztvhu import SztvHuIE
from .tagesschau import TagesschauIE
from .tapely import TapelyIE
from .tass import TassIE
from .teachertube import (
TeacherTubeIE,
TeacherTubeUserIE,
)
from .teachingchannel import TeachingChannelIE
from .teamcoco import TeamcocoIE
from .techtalks import TechTalksIE
from .ted import TEDIE
from .telebruxelles import TeleBruxellesIE
from .telecinco import TelecincoIE
from .telemb import TeleMBIE
from .teletask import TeleTaskIE
from .tenplay import TenPlayIE
from .testurl import TestURLIE
from .testtube import TestTubeIE
from .tf1 import TF1IE
from .theonion import TheOnionIE
from .theplatform import ThePlatformIE
from .thesixtyone import TheSixtyOneIE
from .thisamericanlife import ThisAmericanLifeIE
from .thisav import ThisAVIE
from .tinypic import TinyPicIE
from .tlc import TlcIE, TlcDeIE
from .tmz import (
TMZIE,
TMZArticleIE,
)
from .tnaflix import (
TNAFlixIE,
EMPFlixIE,
MovieFapIE,
)
from .thvideo import (
THVideoIE,
THVideoPlaylistIE
)
from .toutv import TouTvIE
from .toypics import ToypicsUserIE, ToypicsIE
from .traileraddict import TrailerAddictIE
from .trilulilu import TriluliluIE
from .trutube import TruTubeIE
from .tube8 import Tube8IE
from .tubitv import TubiTvIE
from .tudou import TudouIE
from .tumblr import TumblrIE
from .tunein import TuneInIE
from .turbo import TurboIE
from .tutv import TutvIE
from .tv2 import (
TV2IE,
TV2ArticleIE,
)
from .tv4 import TV4IE
from .tvc import (
TVCIE,
TVCArticleIE,
)
from .tvigle import TvigleIE
from .tvp import TvpIE, TvpSeriesIE
from .tvplay import TVPlayIE
from .tweakers import TweakersIE
from .twentyfourvideo import TwentyFourVideoIE
from .twentytwotracks import (
TwentyTwoTracksIE,
TwentyTwoTracksGenreIE
)
from .twitch import (
TwitchVideoIE,
TwitchChapterIE,
TwitchVodIE,
TwitchProfileIE,
TwitchPastBroadcastsIE,
TwitchBookmarksIE,
TwitchStreamIE,
)
from .twitter import TwitterCardIE
from .ubu import UbuIE
from .udemy import (
UdemyIE,
UdemyCourseIE
)
from .udn import UDNEmbedIE
from .ultimedia import UltimediaIE
from .unistra import UnistraIE
from .urort import UrortIE
from .ustream import UstreamIE, UstreamChannelIE
from .varzesh3 import Varzesh3IE
from .vbox7 import Vbox7IE
from .veehd import VeeHDIE
from .veoh import VeohIE
from .vessel import VesselIE
from .vesti import VestiIE
from .vevo import VevoIE
from .vgtv import (
BTArticleIE,
BTVestlendingenIE,
VGTVIE,
)
from .vh1 import VH1IE
from .vice import ViceIE
from .viddler import ViddlerIE
from .videobam import VideoBamIE
from .videodetective import VideoDetectiveIE
from .videolecturesnet import VideoLecturesNetIE
from .videofyme import VideofyMeIE
from .videomega import VideoMegaIE
from .videopremium import VideoPremiumIE
from .videott import VideoTtIE
from .videoweed import VideoWeedIE
from .vidme import VidmeIE
from .vidzi import VidziIE
from .vier import VierIE, VierVideosIE
from .viewster import ViewsterIE
from .vimeo import (
VimeoIE,
VimeoAlbumIE,
VimeoChannelIE,
VimeoGroupsIE,
VimeoLikesIE,
VimeoReviewIE,
VimeoUserIE,
VimeoWatchLaterIE,
)
from .vimple import VimpleIE
from .vine import (
VineIE,
VineUserIE,
)
from .viki import (
VikiIE,
VikiChannelIE,
)
from .vk import (
VKIE,
VKUserVideosIE,
)
from .vodlocker import VodlockerIE
from .voicerepublic import VoiceRepublicIE
from .vporn import VpornIE
from .vrt import VRTIE
from .vube import VubeIE
from .vuclip import VuClipIE
from .vulture import VultureIE
from .walla import WallaIE
from .washingtonpost import WashingtonPostIE
from .wat import WatIE
from .wayofthemaster import WayOfTheMasterIE
from .wdr import (
WDRIE,
WDRMobileIE,
WDRMausIE,
)
from .webofstories import WebOfStoriesIE
from .weibo import WeiboIE
from .wimp import WimpIE
from .wistia import WistiaIE
from .worldstarhiphop import WorldStarHipHopIE
from .wrzuta import WrzutaIE
from .wsj import WSJIE
from .xbef import XBefIE
from .xboxclips import XboxClipsIE
from .xhamster import (
XHamsterIE,
XHamsterEmbedIE,
)
from .xminus import XMinusIE
from .xnxx import XNXXIE
from .xstream import XstreamIE
from .xtube import XTubeUserIE, XTubeIE
from .xuite import XuiteIE
from .xvideos import XVideosIE
from .xxxymovies import XXXYMoviesIE
from .yahoo import (
YahooIE,
YahooSearchIE,
)
from .yam import YamIE
from .yandexmusic import (
YandexMusicTrackIE,
YandexMusicAlbumIE,
YandexMusicPlaylistIE,
)
from .yesjapan import YesJapanIE
from .ynet import YnetIE
from .youjizz import YouJizzIE
from .youku import YoukuIE
from .youporn import YouPornIE
from .yourupload import YourUploadIE
from .youtube import (
YoutubeIE,
YoutubeChannelIE,
YoutubeFavouritesIE,
YoutubeHistoryIE,
YoutubePlaylistIE,
YoutubeRecommendedIE,
YoutubeSearchDateIE,
YoutubeSearchIE,
YoutubeSearchURLIE,
YoutubeShowIE,
YoutubeSubscriptionsIE,
YoutubeTruncatedIDIE,
YoutubeTruncatedURLIE,
YoutubeUserIE,
YoutubeWatchLaterIE,
)
from .zapiks import ZapiksIE
from .zdf import ZDFIE, ZDFChannelIE
from .zingmp3 import (
ZingMp3SongIE,
ZingMp3AlbumIE,
)
_ALL_CLASSES = [
klass
for name, klass in globals().items()
if name.endswith('IE') and name != 'GenericIE'
]
_ALL_CLASSES.append(GenericIE)
def gen_extractors():
""" Return a list of an instance of every supported extractor.
The order does matter; the first extractor matched is the one handling the URL.
"""
return [klass() for klass in _ALL_CLASSES]
def list_extractors(age_limit):
"""
Return a list of extractors that are suitable for the given age,
sorted by extractor ID.
"""
return sorted(
filter(lambda ie: ie.is_suitable(age_limit), gen_extractors()),
key=lambda ie: ie.IE_NAME.lower())
def get_info_extractor(ie_name):
"""Returns the info extractor class with the given ie_name"""
return globals()[ie_name + 'IE']
| apllicationCOM/youtube-dl-api-server | youtube_dl_server/youtube_dl/extractor/__init__.py | Python | unlicense | 20,811 |
# Copyright (C) 2015 Google Inc., authors, and contributors <see AUTHORS file>
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
# Created By: miha@reciprocitylabs.com
# Maintained By: miha@reciprocitylabs.com
from datetime import date
from ggrc import db
from ggrc import builder
from ggrc_workflows.models import (Workflow, TaskGroup, TaskGroupTask,
TaskGroupObject, Cycle)
from tests.ggrc.generator import Generator
import random
import copy
class WorkflowsGenerator(Generator):
def generate_workflow(self, data={}):
""" create a workflow with dict data
return: wf if it was created, or response otherwise
"""
obj_name = "workflow"
data = copy.deepcopy(data)
tgs = data.pop("task_groups", [])
wf = Workflow(title="wf " + self.random_str())
obj_dict = self.obj_to_dict(wf, obj_name)
obj_dict[obj_name].update(data)
response, workflow = self.generate(Workflow, obj_name, obj_dict)
for tg in tgs:
self.generate_task_group(workflow, tg)
return response, workflow
def generate_task_group(self, workflow=None, data={}):
if not workflow:
_, workflow = self.generate_workflow()
data = copy.deepcopy(data)
tgts = data.pop("task_group_tasks", [])
tgos = data.pop("task_group_objects", [])
obj_name = "task_group"
workflow = self._session_add(workflow)
tg = TaskGroup(
title="tg " + self.random_str(),
workflow_id=workflow.id,
context_id=workflow.context.id,
contact_id=1
)
obj_dict = self.obj_to_dict(tg, obj_name)
obj_dict[obj_name].update(data)
response, task_group = self.generate(TaskGroup, obj_name, obj_dict)
for tgt in tgts:
self.generate_task_group_task(task_group, tgt)
for tgo in tgos:
self.generate_task_group_object(task_group, tgo)
return response, task_group
def generate_task_group_task(self, task_group=None, data={}):
if not task_group:
_, task_group = self.generate_task_group()
task_group = self._session_add(task_group)
default_start = self.random_date()
default_end = self.random_date(default_start, date.today())
day_range = 5 if task_group.workflow.frequency == "weekly" else 31
obj_name = "task_group_task"
tgt = TaskGroupTask(
task_group_id=task_group.id,
context_id=task_group.context.id,
title="tgt " + self.random_str(),
start_date=default_start,
end_date=default_end,
relative_start_day=random.randrange(1, day_range),
relative_start_month=random.randrange(1, 12),
relative_end_day=random.randrange(1, day_range),
relative_end_month=random.randrange(1, 12),
contact_id=1
)
obj_dict = self.obj_to_dict(tgt, obj_name)
obj_dict[obj_name].update(data)
return self.generate(TaskGroupTask, obj_name, obj_dict)
def generate_task_group_object(self, task_group=None, obj=None):
if not task_group:
_, task_group = self.generate_task_group()
task_group = self._session_add(task_group)
obj = self._session_add(obj)
obj_name = "task_group_object"
tgo = TaskGroupObject(
object_id=obj.id,
object=obj,
task_group_id=task_group.id,
context_id=task_group.context.id
)
obj_dict = self.obj_to_dict(tgo, obj_name)
return self.generate(TaskGroupObject, obj_name, obj_dict)
def generate_cycle(self, workflow=None):
if not workflow:
_, workflow = self.generate_workflow()
workflow = self._session_add(workflow) # this should be nicer
obj_name = "cycle"
obj_dict = {
obj_name: {
"workflow": {
"id": workflow.id,
"type": workflow.__class__.__name__,
"href": "/api/workflows/%d" % workflow.id
},
"context": {
"id": workflow.context.id,
"type": workflow.context.__class__.__name__,
"href": "/api/workflows/%d" % workflow.context.id
},
"autogenerate": "true"
}
}
return self.generate(Cycle, obj_name, obj_dict)
def activate_workflow(self, workflow):
workflow = self._session_add(workflow)
return self.modify_workflow(workflow, {
"status": "Active",
"recurrences": workflow.frequency != "one_time"
})
def modify_workflow(self, wf=None, data={}):
if not wf:
_, wf = self.generate_workflow()
wf = self._session_add(wf)
obj_name = "workflow"
obj_dict = builder.json.publish(wf)
builder.json.publish_representation(obj_dict)
obj_dict.update(data)
default = {obj_name: obj_dict}
response, workflow = self.modify(wf, obj_name, default)
return response, workflow
def modify_object(self, obj, data={}):
obj = self._session_add(obj)
obj_name = obj._inflector.table_singular
obj_dict = builder.json.publish(obj)
builder.json.publish_representation(obj_dict)
obj_dict.update(data)
obj_data = {obj_name: obj_dict}
response, generated_object = self.modify(obj, obj_name, obj_data)
return response, generated_object
def _session_add(self, obj):
""" Sometimes tests throw conflicting state present error."""
try:
db.session.add(obj)
return obj
except:
return obj.__class__.query.get(obj.id)
| hyperNURb/ggrc-core | src/tests/ggrc_workflows/generator.py | Python | apache-2.0 | 5,290 |
import sqlalchemy
from pypi_org.data.modelbase import SqlAlchemyBase
class Maintainer(SqlAlchemyBase):
__tablename__ = 'maintainers'
user_id: int = sqlalchemy.Column(sqlalchemy.Integer, primary_key=True)
package_id: str = sqlalchemy.Column(sqlalchemy.String, primary_key=True)
| Wintellect/WintellectWebinars | 2019-06-06-ten-tips-python-web-devs-kennedy/code/top_10_web_explore/ex05_migrations/pypi_org/data/maintainers.py | Python | apache-2.0 | 292 |
from enum import Enum
import time
from fysom import Fysom, FysomError
# WIP(joey): Fysom is not thread safe -- multiple threads could theoretically traverse
# WIP(joey): the same state transition simultaneously.
from app.util import log
class BuildState(str, Enum):
"""Posssible states for the FSM"""
QUEUED = 'QUEUED'
PREPARING = 'PREPARING'
PREPARED = 'PREPARED'
BUILDING = 'BUILDING'
FINISHED = 'FINISHED'
ERROR = 'ERROR'
CANCELED = 'CANCELED'
class BuildEvent(str, Enum):
"""Events that correspond to FSM state transitions"""
START_PREPARE = 'START_PREPARE'
FINISH_PREPARE = 'FINISH_PREPARE'
START_BUILDING = 'START_BUILDING'
POSTBUILD_TASKS_COMPLETE = 'POSTBUILD_TASKS_COMPLETE'
FAIL = 'FAIL'
CANCEL = 'CANCEL'
class BuildFsm(object):
"""
+--------+
(initial state) >>> | QUEUED |-----+
+--------+ |
| |
START_PREPARE | | CANCEL
v | START_PREPARE
+-----------+ | FINISH_PREPARE
| PREPARING |----| +-------+
+-----------+ | | |
| | CANCEL +----------+ |
FINISH_PREPARE | |--------->| CANCELED |<--+
v | +----------+
+----------+ | |
| PREPARED |----| |
+----------+ | | FAIL
| | | v
START_BUILDING | | | FAIL +---------+
v | |-----+--->| ERROR |<--+
+----------+ | | | +---------+ |
| BUILDING |-(-----+ | | |
+----------+ | | +-------+
| | | FAIL
POSTBUILD_TASKS_COMPLETE | | | CANCEL
v v |
+----------+ |
+-->| FINISHED |----------+
| +----------+
| |
+---------+
CANCEL
"""
def __init__(self, build_id, enter_state_callbacks):
"""
:type build_id: int
:type enter_state_callbacks: dict[BuildState, callable]
"""
self._logger = log.get_logger(__name__)
self._build_id = build_id
self._transition_timestamps = {state: None for state in BuildState} # initialize all timestamps to None
self._fsm = self._create_state_machine()
for build_state, callback in enter_state_callbacks.items():
self._register_enter_state_callback(build_state, callback)
def _create_state_machine(self):
"""
Create the Fysom object and set up transitions and states. Note that the first transition
(none ==> initial) is triggered immediately on instantiation.
:rtype: Fysom
"""
return Fysom({
'initial': BuildState.QUEUED,
'events': [
{'name': BuildEvent.START_PREPARE,
'src': BuildState.QUEUED,
'dst': BuildState.PREPARING},
{'name': BuildEvent.FINISH_PREPARE,
'src': BuildState.PREPARING,
'dst': BuildState.PREPARED},
{'name': BuildEvent.START_BUILDING,
'src': BuildState.PREPARED,
'dst': BuildState.BUILDING},
{'name': BuildEvent.POSTBUILD_TASKS_COMPLETE,
'src': [
BuildState.PREPARED,
BuildState.BUILDING,
],
'dst': BuildState.FINISHED},
{'name': BuildEvent.CANCEL,
'src': [
BuildState.QUEUED,
BuildState.PREPARING,
BuildState.PREPARED,
BuildState.BUILDING,
],
'dst': BuildState.CANCELED},
{'name': BuildEvent.FAIL,
'src': '*', # '*' means this transition can happen from any state.
'dst': BuildState.ERROR},
# Cancellation immediately after request might cause this transition.
{'name': BuildEvent.START_PREPARE,
'src': BuildState.CANCELED,
'dst': '='}, # '=' means the destination state is the same as the source state (no-op).
# Cancellation during PREPARING will cause this transition.
{'name': BuildEvent.FINISH_PREPARE,
'src': BuildState.CANCELED,
'dst': '='},
# CANCEL is a no-op for a few states.
{'name': BuildEvent.CANCEL,
'src': [
BuildState.CANCELED,
BuildState.ERROR,
BuildState.FINISHED,
],
'dst': '='},
],
'callbacks': {
'onchangestate': self._record_state_timestamp,
}
})
@property
def state(self):
"""
The current state of the state machine.
:rtype: BuildState
"""
return self._fsm.current
@property
def transition_timestamps(self):
"""
Return a dict of BuildState to the timestamp that the state machine entered that state.
:rtype: dict[BuildState, float|None]
"""
return self._transition_timestamps.copy() # return a copy to prevent external modification
def trigger(self, build_event, __trigger_fail_on_error=True, **kwargs):
"""
Trigger the specified event to make the state machine transition to a new state.
:param build_event:
:type build_event: BuildEvent
:param __trigger_fail_on_error: Whether to make a recursive call in the case of failure -- this
exists only for this method's internal use to prevent infinite recursion.
:type __trigger_fail_on_error: bool
:param kwargs: Parameters that will be attached to the event which is passed to callbacks
:type kwargs: dict
"""
try:
self._fsm.trigger(build_event, **kwargs)
except FysomError as ex:
# Don't raise transition errors; just fail the build.
self._logger.exception('Error during build state transition.')
if __trigger_fail_on_error:
error_msg = 'Error during build state transition. ({}: {})'.format(type(ex).__name__, ex)
self.trigger(BuildEvent.FAIL, error_msg=error_msg, __trigger_fail_on_error=False)
else:
self._logger.critical('Build attempted to move to ERROR state but the transition itself failed!')
def _register_enter_state_callback(self, build_state, callback):
"""
Register a callback that will be executed by Fysom when the specified state is entered. This
leverages Fysom magic which calls methods by name using a convention ("onenter<state_name>").
:type build_state: BuildState
:type callback: callable
"""
setattr(self._fsm, 'onenter' + build_state, callback)
def _record_state_timestamp(self, event):
"""
Record a timestamp for a given build status. This is used to record the timing of the various
build phases and is exposed via the Build object's API representation.
"""
self._logger.debug('Build {} transitioned from {} to {}', self._build_id, event.src, event.dst)
build_state = event.dst
if self._transition_timestamps.get(build_state) is not None:
self._logger.warning(
'Overwriting timestamp for build {}, state {}'.format(self._build_id, build_state))
self._transition_timestamps[build_state] = time.time()
| josephharrington/ClusterRunner | app/master/build_fsm.py | Python | apache-2.0 | 8,309 |
# Copyright 2015 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""## Control Flow Operations
TensorFlow provides several operations and classes that you can use to control
the execution of operations and add conditional dependencies to your graph.
@@identity
@@tuple
@@group
@@no_op
@@count_up_to
@@cond
## Logical Operators
TensorFlow provides several operations that you can use to add logical operators
to your graph.
@@logical_and
@@logical_not
@@logical_or
@@logical_xor
## Comparison Operators
TensorFlow provides several operations that you can use to add comparison
operators to your graph.
@@equal
@@not_equal
@@less
@@less_equal
@@greater
@@greater_equal
@@select
@@where
## Debugging Operations
TensorFlow provides several operations that you can use to validate values and
debug your graph.
@@is_finite
@@is_inf
@@is_nan
@@verify_tensor_all_finite
@@check_numerics
@@add_check_numerics_ops
@@Assert
@@Print
"""
# pylint: disable=g-bad-name
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import collections
import six
from six.moves import xrange # pylint: disable=redefined-builtin
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.framework import tensor_shape
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import common_shapes
from tensorflow.python.ops import constant_op
from tensorflow.python.ops import gen_array_ops
from tensorflow.python.ops import gen_control_flow_ops
from tensorflow.python.ops import gen_data_flow_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import tensor_array_ops
# pylint: disable=wildcard-import,undefined-variable
from tensorflow.python.ops.gen_control_flow_ops import *
from tensorflow.python.platform import logging
# We override the 'tuple' for a control flow op, so we keep python's
# existing 'tuple' for later use in this module.
_basetuple = tuple
# pylint: disable=protected-access
def _Identity(data, name=None):
"""Return a tensor with the same shape and contents as the input tensor.
Args:
data: A Tensor.
name: A name for this operation (optional).
Returns:
A Tensor with the same type and value as the input Tensor.
"""
if not data.dtype.is_ref_dtype:
return array_ops.identity(data, name=name)
else:
return gen_array_ops._ref_identity(data, name=name)
def _NextIteration(data, name=None):
if not data.dtype.is_ref_dtype:
return next_iteration(data, name=name)
else:
return ref_next_iteration(data, name=name)
def _Merge(values, name=None):
if all([v.dtype.is_ref_dtype for v in values]):
return gen_control_flow_ops._ref_merge(values, name)
else:
return gen_control_flow_ops._merge(values, name)
def _Enter(data, frame_name, is_constant=False, parallel_iterations=10,
use_ref=True, name=None):
"""Creates or finds a child frame, and makes `data` available to it.
The unique `frame_name` is used by the `Executor` to identify frames. If
`is_constant` is true, `output` is a constant in the child frame; otherwise
it may be changed in the child frame. At most `parallel_iterations` iterations
are run in parallel in the child frame.
Args:
data: The tensor to be made available to the child frame.
frame_name: The name of the child frame.
is_constant: If true, the output is constant within the child frame.
parallel_iterations: The number of iterations allowed to run in parallel.
use_ref: If true, use ref_enter if data is of ref type.
name: A name for this operation (optional).
Returns:
The same tensor as `data`.
"""
if data.dtype.is_ref_dtype and use_ref:
return ref_enter(data, frame_name, is_constant, parallel_iterations,
name=name)
else:
return enter(data, frame_name, is_constant, parallel_iterations,
name=name)
def exit(data, name=None):
"""Exits the current frame to its parent frame.
Exit makes its input `data` available to the parent frame.
Args:
data: The tensor to be made available to the parent frame.
name: A name for this operation (optional).
Returns:
The same tensor as `data`.
"""
if data.dtype.is_ref_dtype:
return gen_control_flow_ops._ref_exit(data, name)
else:
return gen_control_flow_ops._exit(data, name)
def switch(data, pred, dtype=None, name=None):
"""Forwards `data` to an output determined by `pred`.
If `pred` is true, the `data` input is forwared to the first output.
Otherwise, the data goes to the second output.
This op handles `Tensor`s and `IndexedSlices`.
Args:
data: The tensor to be forwarded to the appropriate output.
pred: A scalar that specifies which output port will receive data.
dtype: Optional element type for the returned tensor. If missing,
the type is inferred from the type of `value`.
name: A name for this operation (optional).
Returns:
`(output_false, output_true)`: If `pred` is true, data will be forwarded to
`output_true`, otherwise it goes to `output_false`.
"""
with ops.op_scope([data, pred], name, "Switch") as name:
data = ops.convert_to_tensor_or_indexed_slices(data, dtype=dtype,
name="data")
pred = ops.convert_to_tensor(pred, name="pred")
if isinstance(data, ops.Tensor):
return gen_control_flow_ops._switch(data, pred, name=name)
else:
val, ind, dense_shape = data.values, data.indices, data.dense_shape
val_f, val_t = gen_control_flow_ops._switch(val, pred, name=name)
ind_f, ind_t = gen_control_flow_ops._switch(ind, pred, name="indices")
if dense_shape:
dense_shape_f, dense_shape_t = gen_control_flow_ops._switch(
dense_shape, pred, name="dense_shape")
else:
dense_shape_f, dense_shape_t = None, None
return (ops.IndexedSlices(val_f, ind_f, dense_shape_f),
ops.IndexedSlices(val_t, ind_t, dense_shape_t))
def merge(inputs, name=None):
"""Returns the value of an available element of `inputs`.
This op tests each of the tensors in `inputs` in turn to determine if any of
them is available. If it finds an available tensor, it returns it and its
index in `inputs`.
It is an error if more than one tensor in `inputs` is available. If no tensor
in `inputs` is available, the returned tensor and index are not set.
This op handles both `Tensor`s and `IndexedSlices`. If inputs has a mix of
`Tensor`s and `IndexedSlices`, all inputs are converted to IndexedSlices
before merging.
Args:
inputs: The input tensors, at most one of which is available.
name: A name for this operation (optional).
Returns:
A tuple containing the chosen input tensor and its index in `inputs`.
Raises:
ValueError: If inputs are IndexedSlices and some but not all have a
dense_shape property.
"""
with ops.op_scope(inputs, name, "Merge") as name:
inputs = [ops.convert_to_tensor_or_indexed_slices(inp)
for inp in inputs]
if all([isinstance(inp, ops.Tensor) for inp in inputs]):
return _Merge(inputs, name=name)
else:
inputs = math_ops._as_indexed_slices_list(inputs)
values, _ = _Merge([inp.values for inp in inputs], name=name)
indices, chosen_index = _Merge(
[inp.indices for inp in inputs], name="indices")
if any(inp.dense_shape for inp in inputs):
if not all(inp.dense_shape for inp in inputs):
raise ValueError("Either all merged IndexedSlices must have a "
"dense_shape, or none must have a dense_shape.")
dense_shape, _ = _Merge(
[inp.dense_shape for inp in inputs], name="dense_shape")
else:
dense_shape = None
return ops.IndexedSlices(values, indices, dense_shape), chosen_index
def _SwitchRefOrTensor(data, pred, name="Switch"):
"""Forwards `data` to an output determined by `pred`.
If `pred` is true, the `data` input is forwared to the first output.
Otherwise, the data goes to the second output.
This op handles `Tensor`s and `IndexedSlices`.
Args:
data: The tensor to be forwarded to the appropriate output.
pred: A scalar that specifies which output port will receive data.
name: A name for this operation (optional).
Returns:
`(output_false, output_false)`: If `pred` is true, data will be forwarded to
`output_true`, otherwise it goes to `output_false`.
Raises:
TypeError: if data is not a Tensor or IndexedSlices
"""
data = ops.convert_to_tensor_or_indexed_slices(data, name="data")
with ops.device(data.device):
if isinstance(data, ops.Tensor):
if not data.dtype.is_ref_dtype:
return switch(data, pred, name=name)
else:
return ref_switch(data, pred, name=name)
else:
return switch(data, pred, name=name)
def _convert_tensorarrays_to_flows(tensors_or_tensor_arrays):
return [ta.flow if isinstance(ta, tensor_array_ops.TensorArray)
else ta
for ta in tensors_or_tensor_arrays]
def _convert_flows_to_tensorarrays(tensors_or_tensorarrays, tensors_or_flows):
if len(tensors_or_tensorarrays) != len(tensors_or_flows):
raise ValueError(
"Lengths of original Tensor list and new list do not match: %d vs. %d"
% (len(tensors_or_tensorarrays), len(tensors_or_flows)))
return [
tensor_array_ops.TensorArray(
dtype=ta.dtype, handle=ta.handle, flow=t_or_flow)
if isinstance(ta, tensor_array_ops.TensorArray)
else t_or_flow
for (ta, t_or_flow) in zip(tensors_or_tensorarrays, tensors_or_flows)]
class ControlFlowOpWrapper(object):
"""A wrapper class for Operation.
A wrapped op allows us to capture the uses of its inputs and outputs. In
gradients(), right before calling the gradient function of an op, we wrap
the op by calling MakeWrapper. So during the exection of the gradient
function of an op , any time when one of its inputs/outputs is used, we
generate code to remember its values for all iterations.
"""
class _ControlFlowOpInputs(object):
"""An indirection to capture the input tensors needed in backprop."""
def __init__(self, op, grad_state):
self._op = op
self._grad_state = grad_state
self._inputs = None
def __len__(self):
return len(self._op._inputs)
def __getitem__(self, index):
if self._inputs is None:
self._inputs = [None for _ in self._op.inputs]
if isinstance(index, int):
val = self._inputs[index]
if val is None:
f_val = self._op.inputs[index]
val = self._grad_state.GetRealValue(f_val)
self._inputs[index] = val
return val
elif isinstance(index, slice):
start, stop, step = index.indices(len(self))
vals = [self[i] for i in xrange(start, stop, step)]
return vals
else:
raise TypeError("index must be an integer or slice")
class _ControlFlowOpOutputs(object):
"""An indirection to capture the output tensors needed in backprop."""
def __init__(self, op, grad_state):
self._op = op
self._grad_state = grad_state
self._outputs = None
def __len__(self):
return len(self._op._outputs)
def __getitem__(self, index):
if self._outputs is None:
self._outputs = [None for _ in self._op.outputs]
if isinstance(index, int):
val = self._outputs[index]
if val is None:
f_val = self._op.outputs[index]
val = self._grad_state.GetRealValue(f_val)
self._outputs[index] = val
return val
elif isinstance(index, slice):
start, stop, step = index.indices(len(self))
vals = [self[i] for i in xrange(start, stop, step)]
return vals
else:
raise TypeError("index must be an integer or slice")
def __init__(self, op, grad_state):
self._grad_state = grad_state # The GradLoopState this op belongs to.
self._op = op
self._inputs = None
self._outputs = None
@property
def grad_state(self):
return self._grad_state
@property
def inputs(self):
if self._inputs is None:
self._inputs = self._ControlFlowOpInputs(self._op, self._grad_state)
return self._inputs
@property
def outputs(self):
if self._outputs is None:
self._outputs = self._ControlFlowOpOutputs(self._op, self._grad_state)
return self._outputs
@property
def op(self):
return self._op
@property
def name(self):
"""Returns the name of this instance of op."""
return self._op.name
@property
def _id(self):
"""Returns the unique id of this operation."""
return self._op._id
@property
def device(self):
"""Returns the device of this operation.
Returns:
a string or None if the device was not set.
"""
return self._op.device
@property
def type(self):
"""Returns the type of the op."""
return self._op.type
@property
def graph(self):
"""The `Graph` that contains this operation."""
return self._op.graph
def get_attr(self, name):
"""Returns the value of the attr of this op with the given `name`."""
return self._op.get_attr(name)
def _get_control_flow_context(self):
"""Returns the control flow context of this op."""
return self._op._get_control_flow_context()
def _IsLoopConstantEnter(op):
"""Returns true iff op is a loop invariant."""
is_enter = (op.type == "Enter" or op.type == "RefEnter")
return is_enter and op.get_attr("is_constant")
def _IsLoopExit(op):
return op.type == "Exit" or op.type == "RefExit"
class GradLoopState(object):
"""The state used for constructing the gradient graph for a while loop.
We create a GradLoopState for each while loop in forward and its
corresponding while loop in backprop. This gives us access to both
the forward and the backprop WhileContexts.
During the construction of gradient graph, any time when we detect
a forward value that is needed for backprop, we create a history
accumulator and add it to `history_map`. Any time when we backprop
a loop switch op (in _SwitchGrad), we add the grad merge op in
`switch_map`.
"""
def __init__(self, forward_ctxt, outer_grad_state):
# The grad loop state for the outer while loop.
self._outer_grad_state = None
# The while loop context for forward.
self._forward_context = None
# The loop counter added by AddForwardCounter. It is the value
# of the loop counter for the next iteration.
self._forward_index = None
# A sync op for forward.
self._forward_sync = None
# The while loop context for backprop.
self._grad_context = None
# The loop counter added by AddBackPropCounter. It is the value
# of the loop counter for the current iteration.
self._grad_index = None
# A sync op for backprop.
self._grad_sync = None
# Information needed by backprop.
self._history_map = {}
self._switch_map = {}
self._outer_grad_state = outer_grad_state
if outer_grad_state:
outer_forward_ctxt = outer_grad_state.forward_context
else:
outer_forward_ctxt = forward_ctxt.outer_context
# Add the forward loop counter.
if outer_forward_ctxt: outer_forward_ctxt.Enter()
cnt, forward_index = forward_ctxt.AddForwardCounter()
if outer_forward_ctxt: outer_forward_ctxt.Exit()
self._forward_context = forward_ctxt
self._forward_index = forward_index
# Add the backprop WhileContext, and the backprop loop counter.
if outer_grad_state:
# This is a nested loop. Remember the iteration counts for each
# execution of this inner loop.
outer_forward_ctxt.AddName(cnt.name)
history_cnt = outer_grad_state.AddForwardAccumulator(cnt)
outer_grad_ctxt = outer_grad_state.grad_context
outer_grad_ctxt.Enter()
self._grad_context = WhileContext(forward_ctxt.parallel_iterations,
forward_ctxt.back_prop,
forward_ctxt.name)
real_cnt = outer_grad_state.AddBackPropAccumulatedValue(history_cnt, cnt)
self._grad_index = self._grad_context.AddBackPropCounter(real_cnt)
outer_grad_ctxt.Exit()
else:
if outer_forward_ctxt: outer_forward_ctxt.Enter()
self._grad_context = WhileContext(forward_ctxt.parallel_iterations,
forward_ctxt.back_prop,
forward_ctxt.name)
self._grad_index = self._grad_context.AddBackPropCounter(cnt)
if outer_forward_ctxt: outer_forward_ctxt.Exit()
@property
def outer_grad_state(self):
"""The grad loop state for outer loop."""
return self._outer_grad_state
@property
def forward_context(self):
"""The while loop context for forward."""
return self._forward_context
@property
def forward_index(self):
"""The loop index of forward loop."""
return self._forward_index
@property
def forward_sync(self):
"""A control trigger node for synchronization in the forward loop.
One main use is to keep the push ops of a stack executed in the
iteration order.
"""
if self._forward_sync is None:
with ops.control_dependencies(None):
self._forward_sync = control_trigger(name="f_sync")
self._forward_sync._set_control_flow_context(self._forward_context)
self._forward_index.op._add_control_input(self._forward_sync)
return self._forward_sync
@property
def grad_context(self):
"""The corresponding WhileContext for gradient."""
return self._grad_context
@property
def grad_index(self):
"""The loop index of backprop loop."""
return self._grad_index
@property
def grad_sync(self):
"""A control trigger node for synchronization in the grad loop.
One main use is to keep the pop ops of a stack executed in the
iteration order.
"""
if self._grad_sync is None:
with ops.control_dependencies(None):
self._grad_sync = control_trigger(name="b_sync")
self._grad_sync._set_control_flow_context(self._grad_context)
self._grad_index.op._add_control_input(self._grad_sync)
return self._grad_sync
@property
def history_map(self):
"""The map that records all the tensors needed for backprop."""
return self._history_map
@property
def switch_map(self):
"""The map that records all the Switch ops for the While loop."""
return self._switch_map
def AddForwardAccumulator(self, value, dead_branch=False):
"""Add an accumulator for each forward tensor that is needed in backprop.
This is added to the forward loop at the first time when a tensor
in the forward loop is used by backprop gradient computation loop.
We create an accumulator that accumulates the value of tensor at each
iteration. Called in the control flow context where gradients() is called.
The pseudocode is:
```
acc = stack();
while (_pivot) {
acc = stack_push(acc, value);
}
```
We make sure that the stack push op in one iteration is executed before
next iteration. This is achieved by adding a control edge from
`forward_index.op.inputs[0].op` to the push op, and another control
edge from the push op to either `forward_index.op` or `forward_sync`.
Args:
value: The tensor that is to be accumulated.
dead_branch: True iff the tensor is on a dead branch of a cond.
Returns:
The stack that contains the accumulated history of the tensor.
"""
# TODO(yuanbyu): Make sure the colocation of stack ops and value.
# pylint: disable=protected-access
acc = gen_data_flow_ops._stack(value.dtype.base_dtype, name="f_acc")
# pylint: enable=protected-access
# Make acc available in the forward context.
enter_acc = self.forward_context.AddValue(acc)
# Add the stack_push op in the context of value.op.
value_ctxt = value.op._get_control_flow_context()
if _IsLoopExit(value.op):
value_ctxt = value_ctxt.outer_context
if value_ctxt == self.forward_context:
# value is not nested in the forward context.
self.forward_context.Enter()
push = gen_data_flow_ops._stack_push(enter_acc, value)
# Protect stack push and order it before forward_index.
self.forward_index.op._add_control_input(push.op)
self.forward_context.Exit()
else:
# value is in a cond context within the forward context.
assert isinstance(value_ctxt, CondContext)
if dead_branch:
# The special case for creating a zero tensor for a dead
# branch of a switch. See ControlFlowState.ZerosLike().
value_ctxt.outer_context.Enter()
push = gen_data_flow_ops._stack_push(enter_acc, value)
value_ctxt.outer_context.Exit()
# Guard with a switch but take the other branch.
pred = self.history_map.get(value_ctxt.pred.name)
branch = value_ctxt.branch
value_ctxt.AddName(push.name)
value_ctxt.Enter()
push = _SwitchRefOrTensor(push, pred)[1 - branch]
value_ctxt.Exit()
else:
value_ctxt.Enter()
push = gen_data_flow_ops._stack_push(enter_acc, value)
value_ctxt.Exit()
# Protect stack push and order it before forward_sync.
self.forward_sync._add_control_input(push.op)
# Order stack push after the successor of forward_index
add_op = self.forward_index.op.inputs[0].op
push.op._add_control_input(add_op)
return acc
def AddBackPropAccumulatedValue(self, history_value, value,
dead_branch=False):
"""Add the getter for an accumulated value in the grad context.
This is added to the backprop loop. Called in the grad context to
get the value of an accumulated value. The stack pop op must be guarded
by the pred of the controlling cond.
Args:
history_value: The history (a stack) of a value.
value: The value that is pushed onto the stack.
dead_branch: True iff the tensor is on a dead branch of a cond.
Returns:
The current value (the top of the stack).
"""
history_ctxt = history_value.op._get_control_flow_context()
# Find the cond context that controls history_value.
cond_ctxt = None
value_ctxt = value.op._get_control_flow_context()
while value_ctxt and value_ctxt != history_ctxt:
if isinstance(value_ctxt, CondContext):
cond_ctxt = value_ctxt
break
value_ctxt = value_ctxt.outer_context
if cond_ctxt:
# Guard stack pop with a switch if it is controlled by a cond
grad_state = self
pred = None
while not pred and grad_state:
pred = grad_state.history_map.get(cond_ctxt.pred.name)
grad_state = grad_state.outer_grad_state
branch = (1 - cond_ctxt.branch) if dead_branch else cond_ctxt.branch
history_value = _SwitchRefOrTensor(history_value, pred)[branch]
pop = gen_data_flow_ops._stack_pop(history_value, value.dtype.base_dtype)
if self.grad_context.parallel_iterations > 1:
# All pops are ordered after pivot_for_body and before grad_sync.
self.grad_sync._add_control_input(pop.op)
return pop
def GetRealValue(self, value):
"""Get the real value.
If backprop "uses" a value produced by forward inference, an
accumulator is added in the forward loop to accumulate its values.
We use the accumulated value.
Args:
value: A tensor to be captured.
Returns:
The same tensor value from the saved history.
"""
assert value.op.type != "Variable"
real_value = self._history_map.get(value.name)
if real_value is None:
if _IsLoopConstantEnter(value.op):
# Special case for loop invariant.
if self._outer_grad_state:
# This is a nested loop so we record the history of this
# value in outer_forward_ctxt.
self._grad_context.Exit()
outer_value = value.op.inputs[0]
history_value = self._outer_grad_state.AddForwardAccumulator(
outer_value)
self._grad_context.Enter()
else:
# Just use the input value of this Enter node.
real_value = GetRealOp(value.op).inputs[0]
else:
# Record the history of this value in forward_ctxt.
# NOTE(yuanbyu): Don't record for constants.
self._grad_context.Exit()
history_value = self.AddForwardAccumulator(value)
self._grad_context.Enter()
if real_value is None:
# Add the stack pop op in the grad context.
real_value = self.AddBackPropAccumulatedValue(history_value, value)
self._history_map[value.name] = real_value
return real_value
def _GetWhileContext(op):
"""Get the WhileContext to which this op belongs."""
ctxt = op._get_control_flow_context()
if ctxt:
ctxt = ctxt.GetWhileContext()
return ctxt
class ControlFlowState(object):
"""Maintain the mapping from the loops to their grad states."""
def __init__(self):
self._map = {} # maps forward loop context to GradLoopState
def _GetGradState(self, op):
forward_ctxt = _GetWhileContext(op)
if forward_ctxt is None:
return None
return self._map.get(forward_ctxt)
def MakeWrapper(self, op):
"""Make a wrapper for op if it is in a WhileContext."""
grad_state = self._GetGradState(op)
if grad_state:
return ControlFlowOpWrapper(op, grad_state)
return op
def GetAllLoopExits(self):
"""Return a list containing the exits of all the loops."""
loop_exits = []
for forward_ctxt in self._map:
for loop_exit in forward_ctxt.loop_exits:
loop_exits.append(loop_exit)
return loop_exits
def EnterGradWhileContext(self, op):
"""Enter the WhileContext for gradient computation."""
grad_state = self._GetGradState(op)
if grad_state:
grad_state.grad_context.Enter()
def ExitGradWhileContext(self, op):
"""Exit the WhileContext for gradient computation."""
grad_state = self._GetGradState(op)
if grad_state:
grad_state.grad_context.Exit()
def AddWhileContext(self, op, between_op_list, between_ops):
"""Add the grad state for the while loop that op belongs to.
Note that op is an Exit, and this method must be called in
the control flow context where gradients() is called.
Note that this method modifies `between_op_list` and `between_ops`.
"""
forward_ctxt = _GetWhileContext(op)
grad_state = self._map.get(forward_ctxt)
if grad_state is None:
# This is a new while loop so create a grad state for it.
outer_forward_ctxt = forward_ctxt.outer_context
if outer_forward_ctxt:
outer_forward_ctxt = outer_forward_ctxt.GetWhileContext()
outer_grad_state = None
if outer_forward_ctxt:
outer_grad_state = self._map.get(outer_forward_ctxt)
grad_state = GradLoopState(forward_ctxt, outer_grad_state)
self._map[forward_ctxt] = grad_state
# We need to include all exits of a loop for backprop.
for loop_exit in forward_ctxt.loop_exits:
if not between_ops[loop_exit.op._id]:
between_ops[loop_exit.op._id] = True
between_op_list.append(loop_exit.op)
def ZerosLikeForExit(self, val):
"""Create zeros_like gradient for a loop exit.
If the result of a loop variable is not used but is involved in
computing the result of some needed loop variable, we create a
zero-valued tensor that is fed as gradient for the Exit node of that
loop variable. Note that val.op is an Exit, and this method must be
called in the control flow context where gradients() is called.
Args:
val: The output tensor of an Exit op.
Returns:
A zero tensor of the same shape of val.
"""
val_shape = val.get_shape()
forward_ctxt = val.op._get_control_flow_context()
outer_forward_ctxt = forward_ctxt.outer_context
if outer_forward_ctxt:
outer_forward_ctxt = outer_forward_ctxt.GetWhileContext()
outer_grad_state = None
if outer_forward_ctxt:
outer_grad_state = self._map.get(outer_forward_ctxt)
if outer_grad_state:
# This is a nested loop.
if val_shape.is_fully_defined():
# If the shape is known statically, just create a zero tensor
# with the right shape in the right context.
outer_grad_state.grad_context.Enter()
result = array_ops.zeros(val_shape.dims, val.dtype)
outer_grad_state.grad_context.Exit()
else:
history_val = outer_grad_state.AddForwardAccumulator(val)
outer_grad_ctxt = outer_grad_state.grad_context
outer_grad_ctxt.Enter()
real_val = outer_grad_state.AddBackPropAccumulatedValue(
history_val, val)
result = array_ops.zeros_like(real_val)
outer_grad_ctxt.Exit()
else:
# This is not a nested loop.
if val_shape.is_fully_defined():
# If the shape is known statically, just create a zero tensor
# with the right shape.
result = array_ops.zeros(val_shape.dims, val.dtype)
else:
result = array_ops.zeros_like(val)
return result
def ZerosLike(self, op, index):
"""Create zeros_like for the specified output of an op.
This method must be called in the grad loop context.
Args:
op: A tensorflow operation.
index: the index for a specific output of the op.
Returns:
A zero tensor of the same shape of op.outputs[index].
"""
if IsLoopSwitch(op): return None
dead_branch = op.type in {"Switch", "RefSwitch"}
forward_ctxt = _GetWhileContext(op)
if forward_ctxt is None:
return array_ops.zeros_like(op.outputs[index])
op_ctxt = op._get_control_flow_context()
grad_state = self._map.get(forward_ctxt)
val = ops.convert_to_tensor(op.outputs[index], name="tensor")
shape = val.get_shape()
if shape.is_fully_defined():
# If the shape is known statically, just create a zero tensor with
# the right shape in the grad loop context.
result = constant_op.constant(0, shape=shape.dims, dtype=val.dtype)
if dead_branch:
# op is a cond switch. Guard the zero tensor with a switch.
pred = grad_state.history_map.get(op_ctxt.pred.name)
branch = op_ctxt.branch
result = _SwitchRefOrTensor(result, pred)[1 - branch]
else:
# Unknown shape so keep a history of the shape at runtime.
op_ctxt.Enter()
zeros_shape = shape(val)
op_ctxt.Exit()
# Add forward accumulator for shape.
grad_state.grad_context.Exit()
history_shape = grad_state.AddForwardAccumulator(zeros_shape, dead_branch)
grad_state.grad_context.Enter()
# Create a zero tensor with the right shape.
shape = grad_state.AddBackPropAccumulatedValue(
history_shape, zero_shape, dead_branch)
result = array_ops.zeros(shape, val.dtype)
return result
def GetRealOp(op):
"""Get the real op by removing the wrapper."""
while isinstance(op, ControlFlowOpWrapper):
op = op.op
return op
def MaybeCreateControlFlowState(between_op_list, between_ops):
"""Create the state for all the while loops involved in one gradients().
We create a ControlFlowState when there are while loops involved in
gradients(). In gradients(), control flow logic is only invoked when
the ControlFlowState is not None.
Note that this method modifies `between_op_list` and `between_ops`.
"""
loop_state = None
for op in between_op_list:
if _IsLoopExit(op):
if loop_state is None:
loop_state = ControlFlowState()
loop_state.AddWhileContext(op, between_op_list, between_ops)
return loop_state
def IsLoopSwitch(op):
"""Return true if `op` is the Switch for a While loop."""
if op.type == "Switch" or op.type == "RefSwitch":
ctxt = op._get_control_flow_context()
return ctxt and isinstance(ctxt, WhileContext)
return False
class ControlFlowContext(object):
"""The base class for control flow context.
The usage pattern is a sequence of (Enter, Exit) followed by a final
ExitResult.
We maintain the following state for control flow contexts during graph
construction:
1. graph has _control_flow_context: the current context used to
construct new nodes. Changed by ctxt.Enter() and ctxt.Exit()
2. op has _control_flow_context: the context to which the op belongs.
Set at the time the op is created. Immutable.
3. A ControlFlowContext has _outer_context: the context in which this
context is created. Set at the time a context is created. Immutable.
4. A ControlFlowContext has _context_stack.
Pushed and popped by ctxt.Enter() and ctxt.Exit()
"""
def __init__(self):
self._outer_context = ops.get_default_graph()._get_control_flow_context()
self._context_stack = []
# Values that have been already seen in this context.
self._values = set()
# Values referenced by but external to this context.
self._external_values = {}
@property
def outer_context(self):
"""Return the context containing this context."""
return self._outer_context
def AddName(self, name):
self._values.add(name)
# pylint: disable=protected-access
def Enter(self):
"""Enter this control flow context."""
graph = ops.get_default_graph()
self._context_stack.append(graph._get_control_flow_context())
graph._set_control_flow_context(self)
def Exit(self):
"""Exit this control flow context."""
graph = ops.get_default_graph()
last_context = self._context_stack.pop()
graph._set_control_flow_context(last_context)
def ExitResult(self, result):
"""Make a list of tensors available in the outer context."""
if self._outer_context:
for x in result:
self._outer_context.AddName(x.name)
def GetWhileContext(self):
"""Return the while context containing this context."""
if self._outer_context:
return self._outer_context.GetWhileContext()
return None
def MaybeAddToWhileContext(self, op):
"""Add a control dependency to the containing WhileContext.
The added control dependency ensures that the outputs of this op
belong to the WhileContext. Do nothing if the op is not contained
in a WhileContext.
Args:
op: An operation.
"""
while_ctxt = self.GetWhileContext()
if while_ctxt is not None:
# pylint: disable=protected-access
op._add_control_input(while_ctxt.GetControlPivot().op)
# pylint: enable=protected-access
class CondContext(ControlFlowContext):
"""The context for the conditional construct."""
def __init__(self, pred, pivot, branch):
ControlFlowContext.__init__(self)
self._pred = pred # The boolean tensor for the cond predicate
self._pivot = pivot # The predicate tensor in this branch
self._branch = branch # 0 or 1 representing this branch
# Values considered to have been already seen in this context.
self._values.add(pred.name)
self._values.add(pivot.name)
@property
def pred(self):
return self._pred
@property
def pivot(self):
return self._pivot
@property
def branch(self):
return self._branch
def AddValue(self, val):
"""Add `val` to the current context and its outer context recursively."""
result = val
if val.name not in self._values:
self._values.add(val.name)
if self._outer_context:
result = self._outer_context.AddValue(val)
self._values.add(result.name)
with ops.control_dependencies(None):
result = _SwitchRefOrTensor(result, self._pred)[self._branch]
# pylint: disable=protected-access
result.op._set_control_flow_context(self)
# pylint: enable=protected-access
self._values.add(result.name)
self._external_values[val.name] = result
return result
def AddOp(self, op):
"""Add `op` to the current context."""
if not op.inputs:
# Add this op to the enclosing while context
self.MaybeAddToWhileContext(op)
# pylint: disable=protected-access
op._add_control_input(self._pivot.op)
# pylint: enable=protected-access
for x in op.outputs:
self._values.add(x.name)
else:
for index in range(len(op.inputs)):
x = op.inputs[index]
if x.name not in self._values:
self._values.add(x.name)
# Add this value to the parent contexts up to the context that
# creates this value.
real_x = x
if self._outer_context:
real_x = self._outer_context.AddValue(x)
self._values.add(real_x.name)
real_x = _SwitchRefOrTensor(real_x, self._pred)[self._branch]
self._external_values[x.name] = real_x
x = self._external_values.get(x.name)
if x is not None:
op._update_input(index, x)
for x in op.outputs:
self._values.add(x.name)
def BuildCondBranch(self, fn):
"""Add the subgraph defined by fn() to the graph."""
r = fn()
result = []
if r is not None:
if not isinstance(r, list) and not isinstance(r, _basetuple):
r = [r]
for v in r:
real_v = v
if isinstance(v, ops.Operation):
# Use pivot as the proxy for this op.
real_v = with_dependencies([v], self._pivot)
elif v.name not in self._values:
# Handle the special case of lambda: x
self._values.add(v.name)
if self._outer_context:
real_v = self._outer_context.AddValue(v)
self._values.add(real_v.name)
real_v = _SwitchRefOrTensor(real_v, self._pred)[self._branch]
self._external_values[v.name] = real_v
else:
external_v = self._external_values.get(v.name)
if external_v is not None:
real_v = external_v
result.append(real_v)
return result
def cond(pred, fn1, fn2, name=None):
"""Return either fn1() or fn2() based on the boolean predicate `pred`.
`fn1` and `fn2` both return lists of output tensors. `fn1` and `fn2` must have
the same non-zero number and type of outputs.
Args:
pred: A scalar determining whether to return the result of `fn1` or `fn2`.
fn1: The function to be performed if pred is true.
fn2: The function to be performed if pref is false.
name: Optional name prefix for the returned tensors.
Returns:
Tensors returned by the call to either `fn1` or `fn2`. If the functions
return a singleton list, the element is extracted from the list.
Raises:
TypeError: if `fn1` or `fn2` is not callable.
ValueError: if `fn1` and `fn2` do not return the same number of tensors, or
return tensors of different types.
Example:
```python
x = constant(2)
y = constant(5)
def f1(): return constant(17)
def f2(): return constant(23)
r = cond(math_ops.less(x, y), f1, f2)
# r is set to f1()
```
"""
with ops.op_scope([pred], name, "cond") as name:
if not callable(fn1):
raise TypeError("fn1 must be callable.")
if not callable(fn2):
raise TypeError("fn2 must be callable.")
# Add the Switch to the graph.
if isinstance(pred, bool):
raise TypeError("pred must not be a Python bool")
p_2, p_1 = switch(pred, pred)
pivot_1 = array_ops.identity(p_1, name="switch_t")
pivot_2 = array_ops.identity(p_2, name="switch_f")
pred = array_ops.identity(pred, name="pred_id")
# Build the graph for the true branch in a new context.
context_t = CondContext(pred, pivot_1, 1)
context_t.Enter()
res_t = context_t.BuildCondBranch(fn1)
context_t.ExitResult(res_t)
context_t.Exit()
# Build the graph for the false branch in a new context.
context_f = CondContext(pred, pivot_2, 0)
context_f.Enter()
res_f = context_f.BuildCondBranch(fn2)
context_f.ExitResult(res_f)
context_f.Exit()
# Add the final merge to the graph.
if len(res_t) != len(res_f):
raise ValueError("fn1 and fn2 must return the same number of results.")
if not res_t:
raise ValueError("fn1 and fn2 must return at least one result.")
for x, y in zip(res_f, res_t):
assert ((isinstance(x, ops.IndexedSlices) and
isinstance(y, ops.IndexedSlices)) or
(isinstance(x, ops.Tensor) and isinstance(y, ops.Tensor)))
val_x = x if isinstance(x, ops.Tensor) else x.values
val_y = y if isinstance(y, ops.Tensor) else y.values
if val_x.dtype.base_dtype != val_y.dtype.base_dtype:
raise ValueError("Outputs of fn1 and fn2 must have the same type: "
"%s, %s" % (val_x.dtype.name, val_y.dtype.name))
merges = [merge([x[0], x[1]])[0] for x in zip(res_f, res_t)]
return merges[0] if len(merges) == 1 else merges
# TODO(yuanbyu): Consider having a unified notion of context for
# not only conditionals and loops but also control dependency and
# subgraphs.
class WhileContext(ControlFlowContext):
"""The context for the loop construct."""
def __init__(self, parallel_iterations, back_prop, name):
ControlFlowContext.__init__(self)
self._name = ops.get_default_graph().unique_name(name)
self._parallel_iterations = parallel_iterations
self._back_prop = back_prop
# We use this node to control constants created by the pred lambda.
self._pivot_for_pred = None
# We use this node to control constants created by the body lambda.
self._pivot_for_body = None
# The boolean tensor for loop termination condition. Used in code
# generation for gradient computation
self._pivot = None
# The list of exit tensors for loop variables.
self._loop_exits = None
@property
def name(self):
return self._name
@property
def parallel_iterations(self):
"""The number of iterations allowed to run in parallel."""
return self._parallel_iterations
@property
def back_prop(self):
"""True iff backprop is enabled for this While loop."""
return self._back_prop
@property
def pivot(self):
"""The boolean tensor representing the loop termination condition."""
return self._pivot
@property
def loop_exits(self):
"""The list of exit tensors for loop variables."""
return self._loop_exits
def GetWhileContext(self):
return self
def GetControlPivot(self):
if self._pivot_for_body:
return self._pivot_for_body
return self._pivot_for_pred
def AddValue(self, val):
"""Add `val` to the current context and its outer context recursively."""
result = val
if val.name not in self._values:
self._values.add(val.name)
if self._outer_context is not None:
result = self._outer_context.AddValue(val)
# Create an Enter to make `result` known to this loop context.
with ops.control_dependencies(None):
enter = _Enter(result, self._name, is_constant=True,
parallel_iterations=self._parallel_iterations)
# pylint: disable=protected-access
enter.op._set_control_flow_context(self)
# pylint: enable=protected-access
# Add `enter` in this context.
self._values.add(enter.name)
self._external_values[val.name] = enter
result = enter
else:
actual_val = self._external_values.get(val.name)
if actual_val is not None:
result = actual_val
return result
def AddOp(self, op):
"""Adds `op` to the current context."""
if not op.inputs:
if not op.control_inputs:
# Add a control edge from the control pivot to this op.
# pylint: disable=protected-access
op._add_control_input(self.GetControlPivot().op)
# pylint: enable=protected-access
else:
# Control edges must be in the same context.
for x in op.control_inputs:
assert x._get_control_flow_context() == self, (
"Control inputs must come from Operations in the same while "
"loop context (not an outer context).")
for x in op.outputs:
self._values.add(x.name)
else:
for index in range(len(op.inputs)):
x = op.inputs[index]
self.AddValue(x)
real_x = self._external_values.get(x.name)
if real_x is not None:
op._update_input(index, real_x)
# Add a control dependency to prevent loop invariants from
# enabling ops that should not be executed.
if real_x.op.type == "RefEnter" and real_x.op.get_attr("is_constant"):
# pylint: disable=protected-access
op._add_control_input(self.GetControlPivot().op)
# pylint: enable=protected-access
for x in op.outputs:
self._values.add(x.name)
def AddForwardCounter(self):
"""Adds a loop that counts the number of iterations.
This is added to the forward loop at the time when we start to
create the loop for backprop gradient computation. Called in
the outer context of this forward context.
The pseudocode is:
`n = 0; while (_pivot) { n++; }`
Returns:
The number of iterations taken by the forward loop and the loop index.
"""
n = constant_op.constant(0, name="f_count")
assert n.op._get_control_flow_context() == self.outer_context
self.Enter()
self.AddName(n.name)
enter_n = _Enter(n, self._name, is_constant=False,
parallel_iterations=self._parallel_iterations,
name="f_count")
merge_n = merge([enter_n, enter_n])[0]
switch_n = switch(merge_n, self._pivot)
index = math_ops.add(switch_n[1], 1)
next_n = _NextIteration(index)
merge_n.op._update_input(1, next_n)
total_iterations = exit(switch_n[0], name="f_count")
self.ExitResult([total_iterations])
self.Exit()
return total_iterations, next_n
def AddBackPropCounter(self, count):
"""Add the backprop loop that controls the iterations.
This is added to the backprop loop. It is used to control the loop
termination of the backprop loop. Called in the outer context of
this grad context.
The pseudocode is:
`n = count; while (n >= 1) { n--; }`
Args:
count: The number of iterations for backprop.
Returns:
The loop index.
"""
one = constant_op.constant(1, name="b_count")
self.Enter()
self.AddName(count.name)
enter_count = _Enter(count, self._name, is_constant=False,
parallel_iterations=self._parallel_iterations,
name="b_count")
merge_count = merge([enter_count, enter_count])[0]
self._pivot_for_pred = merge_count
cond = math_ops.greater_equal(merge_count, one)
self._pivot = loop_cond(cond, name="b_count")
switch_count = switch(merge_count, self._pivot)
index = math_ops.sub(switch_count[1], one)
self._pivot_for_body = index
next_count = _NextIteration(index)
merge_count.op._update_input(1, next_count)
self.Exit()
return next_count
def AddBackPropAccumulator(self, value):
"""Add an accumulation loop for every loop invariant.
This is added to the backprop loop. It is used to accumulate
partial gradients within each loop iteration. Called when in the
gradient while context.
The pseudocode is:
```
acc = 0.0;
while (_pivot) {
acc += value;
}
```
Args:
value: The partial gradient of an iteration for a loop invariant.
Returns:
The gradient for a loop invariant.
"""
self.Exit()
if self.outer_context: self.outer_context.Enter()
acc = constant_op.constant(0, value.dtype, name="b_acc")
if self.outer_context: self.outer_context.Exit()
self.Enter()
self.AddName(acc.name)
enter_acc = _Enter(acc, self._name, is_constant=False,
parallel_iterations=self._parallel_iterations,
name="b_acc")
merge_acc = merge([enter_acc, enter_acc], name="b_acc")[0]
switch_acc = switch(merge_acc, self._pivot)
add_acc = math_ops.add(switch_acc[1], value)
next_acc = _NextIteration(add_acc)
merge_acc.op._update_input(1, next_acc)
acc_result = exit(switch_acc[0], name="b_acc")
self.ExitResult([acc_result])
return acc_result
def BuildLoop(self, pred, body, loop_vars):
"""Add the loop termination condition and body to the graph."""
# Keep original_loop_vars to identify which are TensorArrays
original_loop_vars = loop_vars
# Connvert TensorArrays to their flow variables
loop_vars = _convert_tensorarrays_to_flows(loop_vars)
loop_vars = ops.convert_n_to_tensor_or_indexed_slices(loop_vars)
# Let the context know the loop variabes so the loop variables
# would be added in the outer contexts properly.
self._values = set([x.name for x in loop_vars])
real_vars = loop_vars
if self._outer_context:
real_vars = [self._outer_context.AddValue(x) for x in loop_vars]
with ops.control_dependencies(None):
enter_vars = [_Enter(x, self._name, is_constant=False,
parallel_iterations=self._parallel_iterations)
for x in real_vars]
for x in enter_vars:
x.op._set_control_flow_context(self) # pylint: disable=protected-access
self._values = set([x.name for x in enter_vars])
merge_vars = [merge([x, x])[0] for x in enter_vars]
self._pivot_for_pred = merge_vars[0]
# Build the graph for pred.
merge_vars_with_tensor_arrays = (
_convert_flows_to_tensorarrays(original_loop_vars, merge_vars))
c = ops.convert_to_tensor(pred(*merge_vars_with_tensor_arrays))
self._pivot = loop_cond(c, name="LoopCond")
switch_vars = [_SwitchRefOrTensor(x, self._pivot) for x in merge_vars]
# Build the graph for body.
vars_for_body = [_Identity(x[1]) for x in switch_vars]
self._pivot_for_body = vars_for_body[0]
# Convert TensorArray flow variables inside the context back into
# their associated TensorArrays for calling the body.
vars_for_body_with_tensor_arrays = (
_convert_flows_to_tensorarrays(original_loop_vars, vars_for_body))
body_result = body(*vars_for_body_with_tensor_arrays)
if not isinstance(body_result, collections.Sequence):
body_result = [body_result]
# Store body_result to keep track of TensorArrays returned by body
original_body_result = body_result
# Convert TensorArrays returned by body into their flow variables
result = _convert_tensorarrays_to_flows(body_result)
result = ops.convert_n_to_tensor_or_indexed_slices(result)
next_vars = [_NextIteration(x) for x in result]
# Add the back edges to complete the loop.
assert len(merge_vars) == len(next_vars)
for x in zip(merge_vars, next_vars):
x[0].op._update_input(1, x[1])
# Add the exit ops.
exit_vars = [exit(x[0]) for x in switch_vars]
self._loop_exits = exit_vars
for m_var, n_var, e_var in zip(merge_vars, next_vars, exit_vars):
if m_var.get_shape().is_compatible_with(n_var.get_shape()):
e_var.set_shape(m_var.get_shape().merge_with(n_var.get_shape()))
# Exit the loop.
self.ExitResult(exit_vars)
# Convert TensorArray flow variables outside the context back into
# their associated TensorArrays for returning to caller.
exit_vars_with_tensor_arrays = (
_convert_flows_to_tensorarrays(original_body_result, exit_vars))
return (exit_vars_with_tensor_arrays[0]
if len(exit_vars) == 1
else exit_vars_with_tensor_arrays)
def While(cond, body, loop_vars, parallel_iterations=10, back_prop=True,
name=None):
"""Repeat `body` while the condition `cond` is true.
`cond` is a function taking a list of tensors and returning a boolean scalar
tensor. `body` is a function taking a list of tensors and returning a list of
tensors of the same length and with the same types as the input. `loop_vars`
is a list of tensors that is passed to both `cond` and `body`.
In addition to regular Tensors or IndexedSlices, the body may accept and
return TensorArray objects. The flows of the TensorArray objects will
be appropriately forwarded between loops and during gradient calculations.
While `cond` evaluates to true, `body` is executed.
Args:
cond: The termination condition of the loop.
body: A function that represents the loop body.
loop_vars: The list of variable input tensors.
parallel_iterations: The number of iterations allowed to run in parallel.
back_prop: Whether backprop is enabled for this while loop.
name: Optional name prefix for the returned tensors.
Returns:
The output tensors for the loop variables after the loop.
Raises:
TypeError: if `cond` or `body` is not callable.
ValueError: if `loop_var` is empty.
Example:
```python
i = Constant(0)
c = lambda i: math_ops.less(i, 10)
b = lambda i: math_ops.add(i, 1)
r = While(c, b, [i])
```
"""
with ops.op_scope(loop_vars, name, "While") as name:
if not loop_vars:
raise ValueError("No loop variables provided")
if not callable(cond):
raise TypeError("cond must be callable.")
if not callable(body):
raise TypeError("body must be callable.")
context = WhileContext(parallel_iterations, back_prop, name)
context.Enter()
result = context.BuildLoop(cond, body, loop_vars)
context.Exit()
return result
def _AsTensorList(x, p):
"""Return x as a list of Tensors or IndexedSlices.
For entries of `x` that are Operations, this returns an Identity of `p`
with a dependency on the operation.
Args:
x: A Tensor/IndexedSlices/Operation or a list or tuple of them.
p: A Tensor to return for entries in `x` that are Operations.
Returns:
A list of Tensors or IndexedSlices.
"""
if not isinstance(x, (list, _basetuple)):
x = [x]
l = []
for v in x:
if isinstance(v, ops.Operation):
v = with_dependencies([v], p)
v = ops.convert_to_tensor_or_indexed_slices(v)
if isinstance(v, ops.Tensor):
l.append(array_ops.identity(v))
else:
l.append(ops.IndexedSlices(array_ops.identity(v.values),
array_ops.identity(v.indices)))
return l
def _CheckResults(a, b):
assert len(a) == len(b), (
"Values returned by a() and b() must have the same length.")
for x, y in zip(a, b):
assert x.dtype == y.dtype, (
"Values returned by a() [%s] and b() [%s] must have "
"the same type: %s, %s." %
(x.name, y.name, x.dtype.name, y.dtype.name))
def with_dependencies(dependencies, output_tensor, name=None):
"""Produces the content of `output_tensor` only after `dependencies`.
In some cases, a user may want the output of an operation to be
consumed externally only after some other dependencies have run
first. This function ensures returns `output_tensor`, but only after all
operations in `dependencies` have run. Note that this means that there is
no guarantee that `output_tensor` will be evaluated after any `dependencies`
have run.
See also `tuple` and `group`.
Args:
dependencies: A list of operations to run before this op finishes.
output_tensor: A `Tensor` or `IndexedSlices` that will be returned.
name: (Optional) A name for this operation.
Returns:
Same as `output_tensor`.
Raises:
TypeError: if `output_tensor` is not a `Tensor` or `IndexedSlices`.
"""
with ops.op_scope(dependencies + [output_tensor], name,
"control_dependency") as name:
with ops.device(output_tensor.device
or ops.get_default_graph().get_default_device()):
with ops.control_dependencies(dependencies):
output_tensor = ops.convert_to_tensor_or_indexed_slices(output_tensor)
if isinstance(output_tensor, ops.Tensor):
return _Identity(output_tensor, name=name)
else:
return ops.IndexedSlices(_Identity(output_tensor.values, name=name),
output_tensor.indices,
output_tensor.dense_shape)
def _GroupControlDeps(dev, deps, name=None):
with ops.control_dependencies(deps):
if dev is None:
return no_op(name=name)
else:
with ops.device(dev):
return no_op(name=name)
# TODO(touts): Accept "inputs" as a list.
def group(*inputs, **kwargs):
"""Create an op that groups multiple operations.
When this op finishes, all ops in `input` have finished. This op has no
output.
See also `tuple` and `with_dependencies`.
Args:
*inputs: One or more tensors to group.
**kwargs: Optional parameters to pass when constructing the NodeDef.
name: A name for this operation (optional).
Returns:
An Operation that executes all its inputs.
Raises:
ValueError: If an unknown keyword argument is provided, or if there are
no inputs.
"""
name = kwargs.pop("name", None)
if kwargs:
raise ValueError("Unknown keyword arguments: " + ", ".join(kwargs.keys()))
if not inputs:
# TODO(touts): Would make sense to return a NoOp.
raise ValueError("No inputs provided")
with ops.op_scope(inputs, name, "group_deps") as name:
# Sorts *inputs according to their devices.
ops_on_device = {} # device -> operations specified on the device.
for inp in inputs:
dev = inp.device
if dev in ops_on_device:
ops_on_device[dev].append(inp)
else:
ops_on_device[dev] = [inp]
if len(ops_on_device) == 1:
# 1-level tree. The root node is the returned NoOp node.
(dev, deps), = ops_on_device.items()
return _GroupControlDeps(dev, deps, name=name)
# 2-level tree. The root node is the returned NoOp node.
# deps contains 1 NoOp node for each device.
deps = []
def device_key(dev):
"""A sort key that allows None to be compared to strings."""
return "" if dev is None else dev
for dev in sorted(six.iterkeys(ops_on_device), key=device_key):
deps.append(_GroupControlDeps(dev, ops_on_device[dev]))
return _GroupControlDeps(None, deps, name=name)
def tuple(tensors, name=None, control_inputs=None):
"""Group tensors together.
This creates a tuple of tensors with the same values as the `tensors`
argument, except that the value of each tensor is only returned after the
values of all tensors have been computed.
`control_inputs` contains additional ops that have to finish before this op
finishes, but whose outputs are not returned.
This can be used as a "join" mechanism for parallel computations: all the
argument tensors can be computed in parallel, but the values of any tensor
returned by `tuple` are only available after all the parallel computations
are done.
See also `group` and `with_dependencies`.
Args:
tensors: A list of `Tensor`s or `IndexedSlices`, some entries can be `None`.
name: (optional) A name to use as a `name_scope` for the operation.
control_inputs: List of additional ops to finish before returning.
Returns:
Same as `tensors`.
Raises:
ValueError: If `tensors` does not contain any `Tensor` or `IndexedSlices`.
TypeError: If `control_inputs` is not a list of `Operation` or `Tensor`
objects.
"""
with ops.op_scope(tensors, name, "tuple") as name:
gating_ops = [t.op for t in tensors if t]
if control_inputs:
for c in control_inputs:
if isinstance(c, ops.Tensor):
c = c.op
elif not isinstance(c, ops.Operation):
raise TypeError("Control input must be Operation or Tensor: %s" % c)
gating_ops.append(c)
# Note that in order to ensure ordering in the pbtxt, we must take care to
# ensure the order here.
gating_ops = sorted(set(gating_ops), key=lambda op: op._id) # Uniquify ops.
if not gating_ops:
raise ValueError("Must have at least one Tensor: %s" % tensors)
gate = group(*gating_ops)
tpl = []
for t in tensors:
if t:
tpl.append(with_dependencies([gate], t))
else:
tpl.append(None)
return tpl
# TODO(yuanbyu): It would be nicer if we could have the distributed list
# support that Derek has been proposing.
# TODO(yuanbyu, mrry): Handle stride to support sliding windows.
def fold(fn, elems, elem_shape, name=None):
"""The fold operator on slices of a tensor.
This fold operator applies the function `fn` to slices of `elems` on
dimension 0. The shape of the slices is specified by `elem_shape`. `elems`
must contain at least one slice (`shape(elems)[0] / elem_shape[0] > 0`).
Args:
fn: The function to be performed on each slice of the tensor.
elems: The tensor to whose slices we want to apply `fn`.
elem_shape: The shape definition for the slices.
name: Optional name prefix for the returned tensors.
Returns:
A tensor resulting from applying `fn` consecutively on each slice of
`elems`.
Raises:
TypeError: if `fn` is not callable.
"""
with ops.op_scope([elems], name, "fold") as name:
if not callable(fn):
raise TypeError("fn must be callable.")
s0 = array_ops.shape(elems)[0]
d0 = elem_shape[0]
n = math_ops.div(s0, d0)
b1 = array_ops.zeros(array_ops.expand_dims(array_ops.rank(elems) - 1, 0),
dtype=dtypes.int32)
# Initialize the output with slice 0
b = array_ops.concat(0, [[0], b1])
o = array_ops.slice(elems, b, elem_shape)
i = ops.convert_to_tensor(d0)
def Compute(i, o):
b = array_ops.concat(0, [array_ops.expand_dims(i, 0), b1])
x = array_ops.slice(elems, b, elem_shape)
o = fn(o, x)
i = math_ops.add(i, d0)
return [i, o]
r = While(lambda i, o: math_ops.less(i, n), Compute, [i, o])
return r[1]
def case(pred_fn_pairs, default, exclusive=False, name="case"):
"""Create a case operation.
The `pred_fn_pairs` parameter is a dict or list of pairs of size N.
Each pair contains a boolean scalar tensor and a python callable that
creates the tensors to be returned if the boolean evaluates to True. `default`
is a callable generating a list of tensors. All the callables in
`pred_fn_pairs` as well as `default` should return the same number and types
of tensors.
If `exclusive==True`, all predicates are evaluated, and a logging operation
with an error is returned if more than one of the predicates evaluates to
True. If `exclusive==False`, execution stops are the first predicate which
evaluates to True, and the tensors generated by the corresponding function
are returned immediately. If none of the predicates evaluate to True, this
operation returns the tensors generated by `default`.
Example 1:
Pseudocode:
```
if (x < y) return 17;
else return 23;
```
Expressions:
```
f1 = lambda: tf.constant(17)
f2 = lambda: tf.constant(23)
r = case([(tf.less(x, y), f1)], default=f2)
```
Example 2:
Pseudocode:
```
if (x < y && x > z) raise OpError("Only one predicate may evaluate true");
if (x < y) return 17;
else if (x > z) return 23;
else return -1;
```
Expressions:
```
def f1(): return tf.constant(17)
def f2(): return tf.constant(23)
def f3(): return tf.constant(-1)
r = case({tf.less(x, y): f1, tf.greater(x, z): f2},
default=f3, exclusive=True)
```
Args:
pred_fn_pairs: Dict or list of pairs of a boolean scalar tensor and a
callable which returns a list of tensors.
default: A callable that returns a list of tensors.
exclusive: True iff more than one predicate is allowed to evaluate to True.
name: A name for this operation (optional).
Returns:
The tensors returned by the first pair whose predicate evaluated to True, or
those returned by `default` if none does.
Raises:
TypeError: If `pred_fn_pairs` is not a list/dictionary.
TypeError: If `pred_fn_pairs` is a list but does not contain 2-tuples.
TypeError: If `fns[i]` is not callable for any i, or `default` is not
callable.
"""
pfp = pred_fn_pairs # For readability
if not (isinstance(pfp, list) or isinstance(pfp, _basetuple)
or isinstance(pfp, dict)):
raise TypeError("fns must be a list, tuple, or dict")
if isinstance(pfp, dict):
pfp = pfp.items()
if not exclusive:
logging.warn("%s: Provided dictionary of predicate/fn pairs, but "
"exclusive=False. Order of conditional tests is "
"not guaranteed.", name)
for tup in pfp:
if not isinstance(tup, _basetuple) or len(tup) != 2:
raise TypeError("Each entry in pred_fn_pairs must be a 2-tuple")
pred, fn = tup
if pred.dtype != dtypes.bool:
raise TypeError("pred must be of type bool: %s", pred.name)
if not callable(fn):
raise TypeError("fn for pred %s must be callable." % pred.name)
if not callable(default):
raise TypeError("default must be callable.")
preds, fns = map(list, zip(*pfp))
with ops.op_scope([preds], name, "case"):
if not preds:
return default()
not_preds = []
for i, p in enumerate(preds):
with ops.name_scope("not_%d" % i):
not_preds.append(math_ops.logical_not(p))
and_not_preds = [constant_op.constant(True, name="and_not_true")]
for i, notp in enumerate(not_preds[:-1]):
with ops.name_scope("and_not_%d" % i):
and_not_preds.append(math_ops.logical_and(and_not_preds[-1], notp))
# preds = [p1, p2, p3]
# fns = [f1, f2, f3]
# not_preds = [~p1, ~p2, ~p3]
# case_preds = [p1 & True,
# p2 & ~p1,
# p3 & ~p1 & ~ p2]
case_preds = []
for i, (p, and_not_p_prev) in enumerate(zip(preds, and_not_preds)):
with ops.name_scope("case_%d" % i):
case_preds.append(math_ops.logical_and(p, and_not_p_prev))
# case_sequence = [cond(p3 & ..., f3, default),
# cond(p2 & ..., f2, lambda: case_sequence[0]),
# ...
# cond(p1 & True, f1, lambda: case_sequence[i-1])]
# and prev_case_seq will loop from case_sequence[0] to case_sequence[-1]
if exclusive:
# TODO(ebrevdo): Add Where() for DT_BOOL, replace with Size(Where(preds))
preds_c = array_ops.concat(0, preds, name="preds_c")
num_true_conditions = math_ops.reduce_sum(
math_ops.cast(preds_c, dtypes.int32), name="num_true_conds")
at_most_one_true_condition = math_ops.less(
num_true_conditions, constant_op.constant(2, name="two_true_conds"))
error_msg = [
("More than one condition evaluated as True but "
"exclusive=True. Conditions: (%s), Values:"
% ", ".join([p.name for p in preds])),
preds_c]
with ops.control_dependencies([
logging_ops.Assert(condition=at_most_one_true_condition,
data=error_msg, summarize=len(preds))]):
prev_case_seq = None
for i, (cp, fn) in enumerate(zip(case_preds, fns)[::-1]):
prev_case_seq = cond(
cp, fn,
default if i == 0 else lambda: prev_case_seq,
name="If_%d" % i)
else:
prev_case_seq = None
for i, (cp, fn) in enumerate(zip(case_preds, fns)[::-1]):
prev_case_seq = cond(
cp, fn,
default if i == 0 else lambda: prev_case_seq,
name="If_%d" % i)
return prev_case_seq
ops.RegisterShape("Enter")(common_shapes.unchanged_shape)
ops.RegisterShape("Exit")(common_shapes.unknown_shape)
ops.RegisterShape("NextIteration")(common_shapes.unchanged_shape)
ops.RegisterShape("RefEnter")(common_shapes.unchanged_shape)
ops.RegisterShape("RefExit")(common_shapes.unknown_shape)
ops.RegisterShape("RefNextIteration")(common_shapes.unchanged_shape)
ops.RegisterShape("ControlTrigger")(common_shapes.no_outputs)
ops.RegisterShape("NoOp")(common_shapes.no_outputs)
@ops.RegisterShape("LoopCond")
def _LoopCondShape(op):
"""Shape function for the LoopCond op."""
return [op.inputs[0].get_shape().merge_with(tensor_shape.scalar())]
@ops.RegisterShape("Merge")
def _MergeShape(op):
"""Shape function for the Merge op.
The Merge op takes many inputs of arbitrary shapes, and produces a
first output that is one of those inputs, and a second scalar
output.
If all input shapes are known and have the same rank, the output
shape must have that rank, otherwise the output shape is unknown.
Each output dimension is specified only if that dimension in all
inputs are the same.
Args:
op: A Merge Operation.
Returns:
A single-element list containing the Shape of the Merge op.
"""
output_shape = op.inputs[0].get_shape()
if output_shape.dims is None:
return [tensor_shape.unknown_shape(), tensor_shape.scalar()]
else:
for input_ in op.inputs[1:]:
input_shape = input_.get_shape()
if input_shape.dims is None or input_shape.ndims != output_shape.ndims:
return [tensor_shape.unknown_shape(), tensor_shape.scalar()]
else:
output_shape = tensor_shape.TensorShape(
[input_dim.value if input_dim.value == output_dim.value else None
for input_dim, output_dim in zip(input_shape.dims,
output_shape.dims)])
return [output_shape, tensor_shape.scalar()]
ops.RegisterShape("RefMerge")(_MergeShape)
@ops.RegisterShape("RefSelect")
def _RefSelectShape(op):
"""Shape function for the RefSelect op.
The RefSelect takes one scalar input and N inputs of arbitrary
shapes, and produces one output, which is one of those N inputs.
This function conservatively assumes that if any of the N inputs is
not fully defined, the output shape is unknown. If all of the N
inputs have the exact same known shape, the output must have that
shape.
Args:
op: A RefSelect Operation.
Returns:
A single-element list containing the Shape of the RefSelect op.
"""
unused_shape = op.inputs[0].get_shape().merge_with(tensor_shape.scalar())
first_input_shape = op.inputs[1].get_shape()
if first_input_shape.is_fully_defined():
for input_ in op.inputs[2:]:
input_shape = input_.get_shape()
if (not input_shape.is_fully_defined()
or not input_shape.is_compatible_with(first_input_shape)):
return [tensor_shape.unknown_shape()]
return [first_input_shape]
else:
return [tensor_shape.unknown_shape()]
@ops.RegisterShape("RefSwitch")
@ops.RegisterShape("Switch")
def _SwitchShape(op):
input_shape = op.inputs[0].get_shape()
unused_pred_shape = op.inputs[1].get_shape().merge_with(tensor_shape.scalar())
return [input_shape] * 2
| lukas-krecan/tensorflow | tensorflow/python/ops/control_flow_ops.py | Python | apache-2.0 | 71,570 |
#!/usr/bin/env python
# Copyright 2017 The Kubernetes Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Create e2e test definitions.
Usage example:
In $GOPATH/src/k8s.io/test-infra,
$ bazel run //experiment:generate_tests -- \
--yaml-config-path=experiment/test_config.yaml \
"""
import argparse
import hashlib
import os
import ruamel.yaml as yaml
# TODO(yguo0905): Generate Prow and testgrid configurations.
PROW_CONFIG_TEMPLATE = """
tags:
- generated # AUTO-GENERATED by experiment/generate_tests.py - DO NOT EDIT!
interval:
agent: kubernetes
labels:
preset-service-account: "true"
preset-k8s-ssh: "true"
name:
spec:
containers:
- args:
env:
image: gcr.io/k8s-testimages/kubekins-e2e:v20180730-8b7ab3104-master
"""
COMMENT = 'AUTO-GENERATED by experiment/generate_tests.py - DO NOT EDIT.'
def get_sha1_hash(data):
"""Returns the SHA1 hash of the specified data."""
sha1_hash = hashlib.sha1()
sha1_hash.update(data)
return sha1_hash.hexdigest()
def substitute(job_name, lines):
"""Replace '${job_name_hash}' in lines with the SHA1 hash of job_name."""
return [line.replace('${job_name_hash}', get_sha1_hash(job_name)[:10]) \
for line in lines]
def get_args(job_name, field):
"""Returns a list of args for the given field."""
if not field:
return []
return substitute(job_name, field.get('args', []))
def write_prow_configs_file(output_file, job_defs):
"""Writes the Prow configurations into output_file."""
with open(output_file, 'w') as fp:
yaml.dump(
job_defs, fp, Dumper=yaml.RoundTripDumper, width=float("inf"))
fp.write('\n')
def apply_job_overrides(envs_or_args, job_envs_or_args):
'''Applies the envs or args overrides defined in the job level'''
for job_env_or_arg in job_envs_or_args:
name = job_env_or_arg.split('=', 1)[0]
env_or_arg = next(
(x for x in envs_or_args if (x.strip().startswith('%s=' % name) or
x.strip() == name)), None)
if env_or_arg:
envs_or_args.remove(env_or_arg)
envs_or_args.append(job_env_or_arg)
class E2ENodeTest(object):
def __init__(self, job_name, job, config):
self.job_name = job_name
self.job = job
self.common = config['nodeCommon']
self.images = config['nodeImages']
self.k8s_versions = config['nodeK8sVersions']
self.test_suites = config['nodeTestSuites']
def __get_job_def(self, args):
"""Returns the job definition from the given args."""
return {
'scenario': 'kubernetes_e2e',
'args': args,
'sigOwners': self.job.get('sigOwners') or ['UNNOWN'],
# Indicates that this job definition is auto-generated.
'tags': ['generated'],
'_comment': COMMENT,
}
def __get_prow_config(self, test_suite, k8s_version):
"""Returns the Prow config for the job from the given fields."""
prow_config = yaml.round_trip_load(PROW_CONFIG_TEMPLATE)
prow_config['name'] = self.job_name
prow_config['interval'] = self.job['interval']
# Assumes that the value in --timeout is of minutes.
timeout = int(next(
x[10:-1] for x in test_suite['args'] if (
x.startswith('--timeout='))))
container = prow_config['spec']['containers'][0]
if not container['args']:
container['args'] = []
if not container['env']:
container['env'] = []
# Prow timeout = job timeout + 20min
container['args'].append('--timeout=%d' % (timeout + 20))
container['args'].extend(k8s_version.get('args', []))
container['args'].append('--root=/go/src')
container['env'].extend([{'name':'GOPATH', 'value': '/go'}])
# Specify the appropriate kubekins-e2e image. This allows us to use a
# specific image (containing a particular Go version) to build and
# trigger the node e2e test to avoid issues like
# https://github.com/kubernetes/kubernetes/issues/43534.
if k8s_version.get('prowImage', None):
container['image'] = k8s_version['prowImage']
return prow_config
def generate(self):
'''Returns the job and the Prow configurations for this test.'''
fields = self.job_name.split('-')
if len(fields) != 6:
raise ValueError('Expected 6 fields in job name', self.job_name)
image = self.images[fields[3]]
k8s_version = self.k8s_versions[fields[4][3:]]
test_suite = self.test_suites[fields[5]]
# envs are disallowed in node e2e tests.
if 'envs' in self.common or 'envs' in image or 'envs' in test_suite:
raise ValueError(
'envs are disallowed in node e2e test', self.job_name)
# Generates args.
args = []
args.extend(get_args(self.job_name, self.common))
args.extend(get_args(self.job_name, image))
args.extend(get_args(self.job_name, test_suite))
# Generates job config.
job_config = self.__get_job_def(args)
# Generates prow config.
prow_config = self.__get_prow_config(test_suite, k8s_version)
# Combine --node-args
node_args = []
job_args = []
for arg in job_config['args']:
if '--node-args=' in arg:
node_args.append(arg.split('=', 1)[1])
else:
job_args.append(arg)
if node_args:
flag = '--node-args='
for node_arg in node_args:
flag += '%s ' % node_arg
job_args.append(flag.strip())
job_config['args'] = job_args
return job_config, prow_config
class E2ETest(object):
def __init__(self, output_dir, job_name, job, config):
self.env_filename = os.path.join(output_dir, '%s.env' % job_name),
self.job_name = job_name
self.job = job
self.common = config['common']
self.cloud_providers = config['cloudProviders']
self.images = config['images']
self.k8s_versions = config['k8sVersions']
self.test_suites = config['testSuites']
def __get_job_def(self, args):
"""Returns the job definition from the given args."""
return {
'scenario': 'kubernetes_e2e',
'args': args,
'sigOwners': self.job.get('sigOwners') or ['UNNOWN'],
# Indicates that this job definition is auto-generated.
'tags': ['generated'],
'_comment': COMMENT,
}
def __get_prow_config(self, test_suite):
"""Returns the Prow config for the e2e job from the given fields."""
prow_config = yaml.round_trip_load(PROW_CONFIG_TEMPLATE)
prow_config['name'] = self.job_name
prow_config['interval'] = self.job['interval']
# Assumes that the value in --timeout is of minutes.
timeout = int(next(
x[10:-1] for x in test_suite['args'] if (
x.startswith('--timeout='))))
container = prow_config['spec']['containers'][0]
if not container['args']:
container['args'] = []
container['args'].append('--bare')
# Prow timeout = job timeout + 20min
container['args'].append('--timeout=%d' % (timeout + 20))
return prow_config
def generate(self):
'''Returns the job and the Prow configurations for this test.'''
fields = self.job_name.split('-')
if len(fields) != 7:
raise ValueError('Expected 7 fields in job name', self.job_name)
cloud_provider = self.cloud_providers[fields[3]]
image = self.images[fields[4]]
k8s_version = self.k8s_versions[fields[5][3:]]
test_suite = self.test_suites[fields[6]]
# Generates args.
args = []
args.extend(get_args(self.job_name, self.common))
args.extend(get_args(self.job_name, cloud_provider))
args.extend(get_args(self.job_name, image))
args.extend(get_args(self.job_name, k8s_version))
args.extend(get_args(self.job_name, test_suite))
# Generates job config.
job_config = self.__get_job_def(args)
# Generates Prow config.
prow_config = self.__get_prow_config(test_suite)
return job_config, prow_config
def for_each_job(output_dir, job_name, job, yaml_config):
"""Returns the job config and the Prow config for one test job."""
fields = job_name.split('-')
if len(fields) < 3:
raise ValueError('Expected at least 3 fields in job name', job_name)
job_type = fields[2]
# Generates configurations.
if job_type == 'e2e':
generator = E2ETest(output_dir, job_name, job, yaml_config)
elif job_type == 'e2enode':
generator = E2ENodeTest(job_name, job, yaml_config)
else:
raise ValueError('Unexpected job type ', job_type)
job_config, prow_config = generator.generate()
# Applies job-level overrides.
apply_job_overrides(job_config['args'], get_args(job_name, job))
# merge job_config into prow_config
args = prow_config['spec']['containers'][0]['args']
args.append('--scenario=' + job_config['scenario'])
args.append('--')
args.extend(job_config['args'])
return prow_config
def main(yaml_config_path, output_dir):
"""Creates test job definitions.
Converts the test configurations in yaml_config_path to the job definitions
in output_dir/generated.yaml.
"""
# TODO(yguo0905): Validate the configurations from yaml_config_path.
with open(yaml_config_path) as fp:
yaml_config = yaml.safe_load(fp)
output_config = {}
output_config['periodics'] = []
for job_name, _ in yaml_config['jobs'].items():
# Get the envs and args for each job defined under "jobs".
prow = for_each_job(
output_dir, job_name, yaml_config['jobs'][job_name], yaml_config)
output_config['periodics'].append(prow)
# Write the job definitions to --output-dir/generated.yaml
write_prow_configs_file(output_dir + 'generated.yaml', output_config)
if __name__ == '__main__':
PARSER = argparse.ArgumentParser(
description='Create test definitions from the given yaml config')
PARSER.add_argument('--yaml-config-path', help='Path to config.yaml')
PARSER.add_argument(
'--output-dir',
help='Prowjob config output dir',
default='config/jobs/kubernetes/generated/')
ARGS = PARSER.parse_args()
main(
ARGS.yaml_config_path,
ARGS.output_dir)
| mindprince/test-infra | experiment/generate_tests.py | Python | apache-2.0 | 11,228 |
# -*- encoding: utf-8 -*-
#
# Copyright © 2013 Intel
#
# Author: Shuangtai Tian <shuangtai.tian@intel.com>
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo.config import cfg
import oslo.messaging
from ceilometer import plugin
OPTS = [
cfg.StrOpt('nova_control_exchange',
default='nova',
help="Exchange name for Nova notifications."),
]
cfg.CONF.register_opts(OPTS)
class ComputeNotificationBase(plugin.NotificationBase):
@staticmethod
def get_targets(conf):
"""Return a sequence of oslo.messaging.Target defining the exchange and
topics to be connected for this plugin.
"""
return [oslo.messaging.Target(topic=topic,
exchange=conf.nova_control_exchange)
for topic in conf.notification_topics]
| tanglei528/ceilometer | ceilometer/compute/notifications/__init__.py | Python | apache-2.0 | 1,338 |
"""Support for Wireless Sensor Tags."""
import logging
from requests.exceptions import ConnectTimeout, HTTPError
import voluptuous as vol
from wirelesstagpy import NotificationConfig as NC
from homeassistant import util
from homeassistant.const import (
ATTR_BATTERY_LEVEL,
ATTR_VOLTAGE,
CONF_PASSWORD,
CONF_USERNAME,
)
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.dispatcher import dispatcher_send
from homeassistant.helpers.entity import Entity
_LOGGER = logging.getLogger(__name__)
# Strength of signal in dBm
ATTR_TAG_SIGNAL_STRENGTH = "signal_strength"
# Indicates if tag is out of range or not
ATTR_TAG_OUT_OF_RANGE = "out_of_range"
# Number in percents from max power of tag receiver
ATTR_TAG_POWER_CONSUMPTION = "power_consumption"
NOTIFICATION_ID = "wirelesstag_notification"
NOTIFICATION_TITLE = "Wireless Sensor Tag Setup"
DOMAIN = "wirelesstag"
DEFAULT_ENTITY_NAMESPACE = "wirelesstag"
# Template for signal - first parameter is tag_id,
# second, tag manager mac address
SIGNAL_TAG_UPDATE = "wirelesstag.tag_info_updated_{}_{}"
# Template for signal - tag_id, sensor type and
# tag manager mac address
SIGNAL_BINARY_EVENT_UPDATE = "wirelesstag.binary_event_updated_{}_{}_{}"
CONFIG_SCHEMA = vol.Schema(
{
DOMAIN: vol.Schema(
{
vol.Required(CONF_USERNAME): cv.string,
vol.Required(CONF_PASSWORD): cv.string,
}
)
},
extra=vol.ALLOW_EXTRA,
)
class WirelessTagPlatform:
"""Principal object to manage all registered in HA tags."""
def __init__(self, hass, api):
"""Designated initializer for wirelesstags platform."""
self.hass = hass
self.api = api
self.tags = {}
self._local_base_url = None
@property
def tag_manager_macs(self):
"""Return list of tag managers mac addresses in user account."""
return self.api.mac_addresses
def load_tags(self):
"""Load tags from remote server."""
self.tags = self.api.load_tags()
return self.tags
def arm(self, switch):
"""Arm entity sensor monitoring."""
func_name = f"arm_{switch.sensor_type}"
arm_func = getattr(self.api, func_name)
if arm_func is not None:
arm_func(switch.tag_id, switch.tag_manager_mac)
def disarm(self, switch):
"""Disarm entity sensor monitoring."""
func_name = f"disarm_{switch.sensor_type}"
disarm_func = getattr(self.api, func_name)
if disarm_func is not None:
disarm_func(switch.tag_id, switch.tag_manager_mac)
def make_notifications(self, binary_sensors, mac):
"""Create configurations for push notifications."""
_LOGGER.info("Creating configurations for push notifications.")
configs = []
bi_url = self.binary_event_callback_url
for bi_sensor in binary_sensors:
configs.extend(bi_sensor.event.build_notifications(bi_url, mac))
update_url = self.update_callback_url
update_config = NC.make_config_for_update_event(update_url, mac)
configs.append(update_config)
return configs
def install_push_notifications(self, binary_sensors):
"""Register local push notification from tag manager."""
_LOGGER.info("Registering local push notifications.")
for mac in self.tag_manager_macs:
configs = self.make_notifications(binary_sensors, mac)
# install notifications for all tags in tag manager
# specified by mac
result = self.api.install_push_notification(0, configs, True, mac)
if not result:
self.hass.components.persistent_notification.create(
"Error: failed to install local push notifications <br />",
title="Wireless Sensor Tag Setup Local Push Notifications",
notification_id="wirelesstag_failed_push_notification",
)
else:
_LOGGER.info(
"Installed push notifications for all\
tags in %s.",
mac,
)
@property
def local_base_url(self):
"""Define base url of hass in local network."""
if self._local_base_url is None:
self._local_base_url = "http://{}".format(util.get_local_ip())
port = self.hass.config.api.port
if port is not None:
self._local_base_url += f":{port}"
return self._local_base_url
@property
def update_callback_url(self):
"""Return url for local push notifications(update event)."""
return f"{self.local_base_url}/api/events/wirelesstag_update_tags"
@property
def binary_event_callback_url(self):
"""Return url for local push notifications(binary event)."""
return f"{self.local_base_url}/api/events/wirelesstag_binary_event"
def handle_update_tags_event(self, event):
"""Handle push event from wireless tag manager."""
_LOGGER.info("push notification for update arrived: %s", event)
try:
tag_id = event.data.get("id")
mac = event.data.get("mac")
dispatcher_send(self.hass, SIGNAL_TAG_UPDATE.format(tag_id, mac), event)
except Exception as ex: # pylint: disable=broad-except
_LOGGER.error(
"Unable to handle tag update event:\
%s error: %s",
str(event),
str(ex),
)
def handle_binary_event(self, event):
"""Handle push notifications for binary (on/off) events."""
_LOGGER.info("Push notification for binary event arrived: %s", event)
try:
tag_id = event.data.get("id")
event_type = event.data.get("type")
mac = event.data.get("mac")
dispatcher_send(
self.hass,
SIGNAL_BINARY_EVENT_UPDATE.format(tag_id, event_type, mac),
event,
)
except Exception as ex: # pylint: disable=broad-except
_LOGGER.error(
"Unable to handle tag binary event:\
%s error: %s",
str(event),
str(ex),
)
def setup(hass, config):
"""Set up the Wireless Sensor Tag component."""
conf = config[DOMAIN]
username = conf.get(CONF_USERNAME)
password = conf.get(CONF_PASSWORD)
try:
from wirelesstagpy import WirelessTags, WirelessTagsException
wirelesstags = WirelessTags(username=username, password=password)
platform = WirelessTagPlatform(hass, wirelesstags)
platform.load_tags()
hass.data[DOMAIN] = platform
except (ConnectTimeout, HTTPError, WirelessTagsException) as ex:
_LOGGER.error("Unable to connect to wirelesstag.net service: %s", str(ex))
hass.components.persistent_notification.create(
"Error: {}<br />" "Please restart hass after fixing this." "".format(ex),
title=NOTIFICATION_TITLE,
notification_id=NOTIFICATION_ID,
)
return False
# listen to custom events
hass.bus.listen(
"wirelesstag_update_tags", hass.data[DOMAIN].handle_update_tags_event
)
hass.bus.listen("wirelesstag_binary_event", hass.data[DOMAIN].handle_binary_event)
return True
class WirelessTagBaseSensor(Entity):
"""Base class for HA implementation for Wireless Sensor Tag."""
def __init__(self, api, tag):
"""Initialize a base sensor for Wireless Sensor Tag platform."""
self._api = api
self._tag = tag
self._uuid = self._tag.uuid
self.tag_id = self._tag.tag_id
self.tag_manager_mac = self._tag.tag_manager_mac
self._name = self._tag.name
self._state = None
@property
def should_poll(self):
"""Return the polling state."""
return True
@property
def name(self):
"""Return the name of the sensor."""
return self._name
@property
def principal_value(self):
"""Return base value.
Subclasses need override based on type of sensor.
"""
return 0
def updated_state_value(self):
"""Return formatted value.
The default implementation formats principal value.
"""
return self.decorate_value(self.principal_value)
# pylint: disable=no-self-use
def decorate_value(self, value):
"""Decorate input value to be well presented for end user."""
return f"{value:.1f}"
@property
def available(self):
"""Return True if entity is available."""
return self._tag.is_alive
def update(self):
"""Update state."""
if not self.should_poll:
return
updated_tags = self._api.load_tags()
updated_tag = updated_tags[self._uuid]
if updated_tag is None:
_LOGGER.error('Unable to update tag: "%s"', self.name)
return
self._tag = updated_tag
self._state = self.updated_state_value()
@property
def device_state_attributes(self):
"""Return the state attributes."""
return {
ATTR_BATTERY_LEVEL: int(self._tag.battery_remaining * 100),
ATTR_VOLTAGE: f"{self._tag.battery_volts:.2f}V",
ATTR_TAG_SIGNAL_STRENGTH: f"{self._tag.signal_strength}dBm",
ATTR_TAG_OUT_OF_RANGE: not self._tag.is_in_range,
ATTR_TAG_POWER_CONSUMPTION: f"{self._tag.power_consumption:.2f}%",
}
| leppa/home-assistant | homeassistant/components/wirelesstag/__init__.py | Python | apache-2.0 | 9,650 |
# -*- coding: utf-8 -*-
"""
Density Filter Tool
Created on Thu May 11 11:03:05 2017
@author: cheny
"""
from arcpy import Parameter
import arcpy
from section_cpu import dens_filter_cpu
from multiprocessing import cpu_count
class DensFilterTool(object):
def __init__(self):
"""Classify Tool"""
self.label = "4 Density Filtering Tool"
self.description = "Post Processing - Density Filter"
self.canRunInBackground = True
def getParameterInfo(self):
"""Define parameter definitions"""
#1
paramclsinput = Parameter(
displayName="Input Classified Points",
name="in_cls_points",
datatype="DEFeatureClass",
parameterType="Required",
direction="Input")
paramclsinput.filter.list = ["Point"]
#2
paramcntrinput = Parameter(
displayName="Input Centers Points",
name="in_cntr_points",
datatype="DEFeatureClass",
parameterType="Required",
direction="Input")
paramcntrinput.filter.list = ["Point"]
#3
paramidfield = Parameter(
displayName="Identifier Field",
name="id_field",
datatype="Field",
parameterType="Required",
direction="Input")
paramidfield.parameterDependencies = [paramclsinput.name]
paramidfield.filter.list = ['Short','Long']
#4
paramcntridfield = Parameter(
displayName="Center ID Field",
name="cntr_id_field",
datatype="Field",
parameterType="Required",
direction="Input")
paramcntridfield.parameterDependencies = [paramclsinput.name]
paramcntridfield.filter.list = ['Short','Long']
paramcntridfield.value='CNTR_ID'
#5
paramdens = Parameter(
displayName="Density Field",
name="density_field",
datatype="Field",
parameterType="Required",
direction="Input")
# Set the filter to accept only fields that are Short or Long type
paramdens.filter.list = ['Short','Long','Float','Single','Double']
paramdens.parameterDependencies = [paramclsinput.name]
paramdens.value='DENSITY'
#6
paramclsoutput = Parameter(
displayName="Output Classified Points",
name="out_cls_points",
datatype="DEFeatureClass",
parameterType="Required",
direction="Output")
#7
paramdistthrs = Parameter(
displayName="Distance for Density Connection",
name="distthrs",
datatype="GPDouble",
parameterType="Required",
direction="Input"
)
paramdistthrs.value=100.0
#8
paramdensthrs= Parameter(
displayName="Density Threshold for Density Connection",
name="densthrs",
datatype="GPDouble",
parameterType="Required",
direction="Input"
)
paramdensthrs.value=1.2
#9
paramdevice = Parameter(
displayName="Device for Calculation",
name="calc_device",
datatype="GPString",
parameterType="Required",
direction="Input"
)
paramdevice.filter.list=['CPU']
paramdevice.value='CPU'
#10
paramcpuc = Parameter(
displayName="CPU Parallel Cores",
name="cpu_cores",
datatype="GPLong",
parameterType="Required",
direction="Input"
)
paramcpuc.value=cpu_count()
params = [paramclsinput,paramcntrinput,paramidfield,
paramcntridfield,paramdens,paramclsoutput,
paramdistthrs,paramdensthrs,paramdevice,
paramcpuc]
return params
def updateParameters(self, parameters):
# if parameters[0].altered and not parameters[2].altered:
# parameters[2].value=arcpy.Describe(parameters[0].valueAsText).OIDFieldName
if parameters[0].altered and not parameters[5].altered:
in_fe=parameters[0].valueAsText
parameters[5].value=in_fe[:len(in_fe)-4]+'_filter'+in_fe[-4:] if in_fe[-3:]=='shp' else in_fe+'_filter'
return
def execute(self, parameters, messages):
cls_input=parameters[0].valueAsText
cntr_input=parameters[1].valueAsText
id_field=parameters[2].valueAsText
cntr_id_field=parameters[3].valueAsText
dens_field=parameters[4].valueAsText
cls_output=parameters[5].valueAsText
dist_thrs=parameters[6].value
dens_thrs=parameters[7].value
cpu_core=parameters[9].value
dens_filter_cpu(cls_input,cntr_input,id_field,
cntr_id_field,dens_field,cls_output,
dist_thrs,dens_thrs,cpu_core)
return
| lopp2005/spatial_cluster_fs | tool_densfilter.py | Python | apache-2.0 | 5,412 |
# Copyright 2012, Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can
# be found in the LICENSE file.
"""VTGateCursor, and StreamVTGateCursor."""
import itertools
import operator
import re
from vtdb import base_cursor
from vtdb import dbexceptions
write_sql_pattern = re.compile(r'\s*(insert|update|delete)', re.IGNORECASE)
def ascii_lower(string):
"""Lower-case, but only in the ASCII range."""
return string.encode('utf8').lower().decode('utf8')
class VTGateCursorMixin(object):
def connection_list(self):
return [self._conn]
def is_writable(self):
return self._writable
class VTGateCursor(base_cursor.BaseListCursor, VTGateCursorMixin):
"""A cursor for execute statements to VTGate.
Results are stored as a list.
"""
def __init__(
self, connection, tablet_type, keyspace=None,
shards=None, keyspace_ids=None, keyranges=None,
writable=False, as_transaction=False, single_db=False,
twopc=False):
"""Init VTGateCursor.
Args:
connection: A PEP0249 connection object.
tablet_type: Str tablet_type.
keyspace: Str keyspace or None if batch API will be used.
shards: List of strings.
keyspace_ids: Struct('!Q').packed keyspace IDs.
keyranges: Str keyranges.
writable: True if writable.
as_transaction: True if an executemany call is its own transaction.
single_db: True if single db transaction is needed.
twopc: True if 2-phase commit is needed.
"""
super(VTGateCursor, self).__init__(single_db=single_db, twopc=twopc)
self._conn = connection
self._writable = writable
self.description = None
self.index = None
self.keyspace = keyspace
self.shards = shards
self.keyspace_ids = keyspace_ids
self.keyranges = keyranges
self.lastrowid = None
self.results = None
self.routing = None
self.rowcount = 0
self.tablet_type = tablet_type
self.as_transaction = as_transaction
self._clear_batch_state()
# pass kwargs here in case higher level APIs need to push more data through
# for instance, a key value for shard mapping
def execute(self, sql, bind_variables, **kwargs):
"""Perform a query, return the number of rows affected."""
self._clear_list_state()
self._clear_batch_state()
if self._handle_transaction_sql(sql):
return
entity_keyspace_id_map = kwargs.pop('entity_keyspace_id_map', None)
entity_column_name = kwargs.pop('entity_column_name', None)
write_query = bool(write_sql_pattern.match(sql))
# NOTE: This check may also be done at higher layers but adding it
# here for completion.
if write_query:
if not self.is_writable():
raise dbexceptions.ProgrammingError('DML on a non-writable cursor', sql)
if entity_keyspace_id_map:
raise dbexceptions.ProgrammingError(
'entity_keyspace_id_map is not allowed for write queries')
# FIXME(alainjobart): the entity_keyspace_id_map should be in the
# cursor, same as keyspace_ids, shards, keyranges, to avoid this hack.
if entity_keyspace_id_map:
shards = None
keyspace_ids = None
keyranges = None
else:
shards = self.shards
keyspace_ids = self.keyspace_ids
keyranges = self.keyranges
self.results, self.rowcount, self.lastrowid, self.description = (
self.connection._execute( # pylint: disable=protected-access
sql,
bind_variables,
tablet_type=self.tablet_type,
keyspace_name=self.keyspace,
shards=shards,
keyspace_ids=keyspace_ids,
keyranges=keyranges,
entity_keyspace_id_map=entity_keyspace_id_map,
entity_column_name=entity_column_name,
not_in_transaction=not self.is_writable(),
effective_caller_id=self.effective_caller_id,
**kwargs))
return self.rowcount
def fetch_aggregate_function(self, func):
return func(row[0] for row in self.fetchall())
def fetch_aggregate(self, order_by_columns, limit):
"""Fetch from many shards, sort, then remove sort columns.
A scatter query may return up to limit rows. Sort all results
manually order them, and return the first rows.
This is a special-use function.
Args:
order_by_columns: The ORDER BY clause. Each element is either a
column, [column, 'ASC'], or [column, 'DESC'].
limit: Int limit.
Returns:
Smallest rows, with up to limit items. First len(order_by_columns)
columns are stripped.
"""
sort_columns = []
desc_columns = []
for order_clause in order_by_columns:
if isinstance(order_clause, (tuple, list)):
sort_columns.append(order_clause[0])
if ascii_lower(order_clause[1]) == 'desc':
desc_columns.append(order_clause[0])
else:
sort_columns.append(order_clause)
# sort the rows and then trim off the prepended sort columns
if sort_columns:
sorted_rows = list(sort_row_list_by_columns(
self.fetchall(), sort_columns, desc_columns))[:limit]
else:
sorted_rows = itertools.islice(self.fetchall(), limit)
neutered_rows = [row[len(order_by_columns):] for row in sorted_rows]
return neutered_rows
def _clear_batch_state(self):
"""Clear state that allows traversal to next query's results."""
self.result_sets = []
self.result_set_index = None
def close(self):
super(VTGateCursor, self).close()
self._clear_batch_state()
def executemany(self, sql, params_list, **kwargs):
"""Execute multiple statements in one batch.
This adds len(params_list) result_sets to self.result_sets. Each
result_set is a (results, rowcount, lastrowid, fields) tuple.
Each call overwrites the old result_sets. After execution, nextset()
is called to move the fetch state to the start of the first
result set.
Args:
sql: The sql text, with %(format)s-style tokens. May be None.
params_list: A list of the keyword params that are normally sent
to execute. Either the sql arg or params['sql'] must be defined.
**kwargs: passed as is to connection._execute_batch.
"""
if sql:
sql_list = [sql] * len(params_list)
else:
sql_list = [params.get('sql') for params in params_list]
bind_variables_list = [params['bind_variables'] for params in params_list]
keyspace_list = [params['keyspace'] for params in params_list]
keyspace_ids_list = [params.get('keyspace_ids') for params in params_list]
shards_list = [params.get('shards') for params in params_list]
self._clear_batch_state()
# Find other _execute_batch calls in test code.
self.result_sets = self.connection._execute_batch( # pylint: disable=protected-access
sql_list, bind_variables_list, keyspace_list, keyspace_ids_list,
shards_list,
self.tablet_type, self.as_transaction, self.effective_caller_id,
**kwargs)
self.nextset()
def nextset(self):
"""Move the fetch state to the start of the next result set.
self.(results, rowcount, lastrowid, description) will be set to
the next result_set, and the fetch-commands will work on this
result set.
Returns:
True if another result set exists, False if not.
"""
if self.result_set_index is None:
self.result_set_index = 0
else:
self.result_set_index += 1
self._clear_list_state()
if self.result_set_index < len(self.result_sets):
self.results, self.rowcount, self.lastrowid, self.description = (
self.result_sets[self.result_set_index])
return True
else:
self._clear_batch_state()
return None
class StreamVTGateCursor(base_cursor.BaseStreamCursor, VTGateCursorMixin):
"""A cursor for streaming statements to VTGate.
Results are returned as a generator.
"""
def __init__(
self, connection, tablet_type, keyspace=None,
shards=None, keyspace_ids=None,
keyranges=None, writable=False):
super(StreamVTGateCursor, self).__init__()
self._conn = connection
self._writable = writable
self.keyspace = keyspace
self.shards = shards
self.keyspace_ids = keyspace_ids
self.keyranges = keyranges
self.routing = None
self.tablet_type = tablet_type
def is_writable(self):
return self._writable
# pass kwargs here in case higher level APIs need to push more data through
# for instance, a key value for shard mapping
def execute(self, sql, bind_variables, **kwargs):
"""Start a streaming query."""
if self._writable:
raise dbexceptions.ProgrammingError('Streaming query cannot be writable')
self._clear_stream_state()
self.generator, self.description = self.connection._stream_execute( # pylint: disable=protected-access
sql,
bind_variables,
tablet_type=self.tablet_type,
keyspace_name=self.keyspace,
shards=self.shards,
keyspace_ids=self.keyspace_ids,
keyranges=self.keyranges,
not_in_transaction=not self.is_writable(),
effective_caller_id=self.effective_caller_id,
**kwargs)
return 0
def sort_row_list_by_columns(row_list, sort_columns=(), desc_columns=()):
"""Sort by leading sort columns by stable-sorting in reverse-index order."""
for column_index, column_name in reversed(
[x for x in enumerate(sort_columns)]):
og = operator.itemgetter(column_index)
if not isinstance(row_list, list):
row_list = sorted(
row_list, key=og, reverse=bool(column_name in desc_columns))
else:
row_list.sort(key=og, reverse=bool(column_name in desc_columns))
return row_list
| theskyinflames/bpulse-go-client | vendor/github.com/youtube/vitess/py/vtdb/vtgate_cursor.py | Python | apache-2.0 | 9,742 |
# Copyright 2015 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
# in compliance with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software distributed under the License
# is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
# or implied. See the License for the specific language governing permissions and limitations under
# the License.
from __future__ import absolute_import
from __future__ import unicode_literals
import imp
import mock
from oauth2client.client import AccessTokenCredentials
import unittest
# import Python so we can mock the parts we need to here.
import IPython
import IPython.core.magic
import datalab.bigquery
import datalab.context
import datalab.data
import datalab.data.commands
import datalab.utils.commands
def noop_decorator(func):
return func
IPython.core.magic.register_line_cell_magic = noop_decorator
IPython.core.magic.register_line_magic = noop_decorator
IPython.core.magic.register_cell_magic = noop_decorator
IPython.get_ipython = mock.Mock()
class TestCases(unittest.TestCase):
_SQL_MODULE_MAIN = datalab.data._utils._SQL_MODULE_MAIN
_SQL_MODULE_LAST = datalab.data._utils._SQL_MODULE_LAST
def test_split_cell(self):
# TODO(gram): add tests for argument parser.
m = imp.new_module('m')
query = datalab.data.commands._sql._split_cell('', m)
self.assertIsNone(query)
self.assertNotIn(TestCases._SQL_MODULE_LAST, m.__dict__)
self.assertNotIn(TestCases._SQL_MODULE_MAIN, m.__dict__)
m = imp.new_module('m')
query = datalab.data.commands._sql._split_cell('\n\n', m)
self.assertIsNone(query)
self.assertNotIn(TestCases._SQL_MODULE_LAST, m.__dict__)
self.assertNotIn(TestCases._SQL_MODULE_MAIN, m.__dict__)
m = imp.new_module('m')
query = datalab.data.commands._sql._split_cell('# This is a comment\n\nSELECT 3 AS x', m)
self.assertEquals(query, m.__dict__[TestCases._SQL_MODULE_MAIN])
self.assertEquals(query, m.__dict__[TestCases._SQL_MODULE_LAST])
self.assertEquals('SELECT 3 AS x', m.__dict__[TestCases._SQL_MODULE_MAIN].sql)
self.assertEquals('SELECT 3 AS x', m.__dict__[TestCases._SQL_MODULE_LAST].sql)
m = imp.new_module('m')
query = datalab.data.commands._sql._split_cell(
'# This is a comment\n\nfoo="bar"\nSELECT 3 AS x', m)
self.assertEquals(query, m.__dict__[TestCases._SQL_MODULE_MAIN])
self.assertEquals(query, m.__dict__[TestCases._SQL_MODULE_LAST])
self.assertEquals('SELECT 3 AS x', m.__dict__[TestCases._SQL_MODULE_MAIN].sql)
self.assertEquals('SELECT 3 AS x', m.__dict__[TestCases._SQL_MODULE_LAST].sql)
sql_string_list = ['SELECT 3 AS x',
'WITH q1 as (SELECT "1")\nSELECT * FROM q1',
'INSERT DataSet.Table (Id, Description)\nVALUES(100,"TestDesc")',
'INSERT DataSet.Table (Id, Description)\n'
'SELECT * FROM UNNEST([(200,"TestDesc2"),(300,"TestDesc3")])'
'INSERT DataSet.Table (Id, Description)\n' +
'WITH w as (SELECT ARRAY<STRUCT<Id int64, Description string>>\n' +
'[(400, "TestDesc4"),(500, "TestDesc5")] col)\n' +
'SELECT Id, Description FROM w, UNNEST(w.col)'
'INSERT DataSet.Table (Id, Description)\n' +
'VALUES (600,\n' +
'(SELECT Description FROM DataSet.Table WHERE Id = 400))',
'DELETE FROM DataSet.Table WHERE DESCRIPTION IS NULL'
'DELETE FROM DataSet.Table\n' +
'WHERE Id NOT IN (100, 200, 300)'
]
for i in range(0, len(sql_string_list)):
m = imp.new_module('m')
query = datalab.data.commands._sql._split_cell(sql_string_list[i], m)
self.assertEquals(query, m.__dict__[TestCases._SQL_MODULE_MAIN])
self.assertEquals(query, m.__dict__[TestCases._SQL_MODULE_LAST])
self.assertEquals(sql_string_list[i], m.__dict__[TestCases._SQL_MODULE_MAIN].sql)
self.assertEquals(sql_string_list[i], m.__dict__[TestCases._SQL_MODULE_LAST].sql)
m = imp.new_module('m')
query = datalab.data.commands._sql._split_cell('DEFINE QUERY q1\nSELECT 3 AS x', m)
self.assertEquals(query, m.__dict__[TestCases._SQL_MODULE_LAST])
self.assertEquals(query, m.__dict__[TestCases._SQL_MODULE_LAST])
self.assertEquals('SELECT 3 AS x', m.q1.sql)
self.assertNotIn(TestCases._SQL_MODULE_MAIN, m.__dict__)
self.assertEquals('SELECT 3 AS x', m.__dict__[TestCases._SQL_MODULE_LAST].sql)
m = imp.new_module('m')
query = datalab.data.commands._sql._split_cell(
'DEFINE QUERY q1\nSELECT 3 AS x\nSELECT * FROM $q1', m)
self.assertEquals(query, m.__dict__[TestCases._SQL_MODULE_MAIN])
self.assertEquals(query, m.__dict__[TestCases._SQL_MODULE_LAST])
self.assertEquals('SELECT 3 AS x', m.q1.sql)
self.assertEquals('SELECT * FROM $q1', m.__dict__[TestCases._SQL_MODULE_MAIN].sql)
self.assertEquals('SELECT * FROM $q1', m.__dict__[TestCases._SQL_MODULE_LAST].sql)
@mock.patch('datalab.context._context.Context.default')
def test_arguments(self, mock_default_context):
mock_default_context.return_value = TestCases._create_context()
m = imp.new_module('m')
query = datalab.data.commands._sql._split_cell("""
words = ('thus', 'forsooth')
limit = 10
SELECT * FROM [publicdata:samples.shakespeare]
WHERE word IN $words
LIMIT $limit
""", m)
sql = datalab.bigquery.Query(query, values={}).sql
self.assertEquals('SELECT * FROM [publicdata:samples.shakespeare]\n' +
'WHERE word IN ("thus", "forsooth")\nLIMIT 10', sql)
# As above but with overrides, using list
sql = datalab.bigquery.Query(query, words=['eyeball'], limit=5).sql
self.assertEquals('SELECT * FROM [publicdata:samples.shakespeare]\n' +
'WHERE word IN ("eyeball")\nLIMIT 5', sql)
# As above but with overrides, using tuple and values dict
sql = datalab.bigquery.Query(query, values={'limit': 3, 'words': ('thus',)}).sql
self.assertEquals('SELECT * FROM [publicdata:samples.shakespeare]\n' +
'WHERE word IN ("thus")\nLIMIT 3', sql)
# As above but with list argument
m = imp.new_module('m')
query = datalab.data.commands._sql._split_cell("""
words = ['thus', 'forsooth']
limit = 10
SELECT * FROM [publicdata:samples.shakespeare]
WHERE word IN $words
LIMIT $limit
""", m)
sql = datalab.bigquery.Query(query, values={}).sql
self.assertEquals('SELECT * FROM [publicdata:samples.shakespeare]\n' +
'WHERE word IN ("thus", "forsooth")\nLIMIT 10', sql)
# As above but with overrides, using list
sql = datalab.bigquery.Query(query, values={'limit': 2, 'words': ['forsooth']}).sql
self.assertEquals('SELECT * FROM [publicdata:samples.shakespeare]\n' +
'WHERE word IN ("forsooth")\nLIMIT 2', sql)
# As above but with overrides, using tuple
sql = datalab.bigquery.Query(query, words=('eyeball',)).sql
self.assertEquals('SELECT * FROM [publicdata:samples.shakespeare]\n' +
'WHERE word IN ("eyeball")\nLIMIT 10', sql)
# TODO(gram): add some tests for source and datestring variables
def test_date(self):
# TODO(gram): complete this test
pass
def test_sql_cell(self):
# TODO(gram): complete this test
pass
@staticmethod
def _create_context():
project_id = 'test'
creds = AccessTokenCredentials('test_token', 'test_ua')
return datalab.context.Context(project_id, creds)
| jdanbrown/pydatalab | legacy_tests/kernel/sql_tests.py | Python | apache-2.0 | 7,834 |
from datetime import timedelta
from unittest import TestCase
import pandas as pd
import pandas.testing
from fireant.dataset.modifiers import Rollup
from fireant.dataset.totals import scrub_totals_from_share_results
from fireant.tests.dataset.mocks import (
dimx0_metricx2_df,
dimx1_str_df,
dimx1_str_totals_df,
dimx2_date_str_df,
dimx2_date_str_totals_df,
dimx2_date_str_totalsx2_df,
mock_dataset,
)
TIMESTAMP_UPPERBOUND = pd.Timestamp.max - timedelta(seconds=1)
class ScrubTotalsTests(TestCase):
def ignore_dimensionless_result_sets(self):
result = scrub_totals_from_share_results(dimx0_metricx2_df, [])
expected = dimx0_metricx2_df
pandas.testing.assert_frame_equal(result, expected)
def test_remove_totals_for_non_rollup_dimensions(self):
result = scrub_totals_from_share_results(dimx1_str_totals_df, [mock_dataset.fields.political_party])
expected = dimx1_str_df
pandas.testing.assert_frame_equal(result, expected)
def test_remove_totals_for_non_rollup_dimensions_with_multiindex(self):
result = scrub_totals_from_share_results(
dimx2_date_str_totals_df, [mock_dataset.fields.timestamp, mock_dataset.fields.political_party]
)
expected = dimx2_date_str_df
pandas.testing.assert_frame_equal(result, expected)
def test_remove_totals_for_non_rollup_dimensions_with_multiindex_and_multiple_totals(self):
result = scrub_totals_from_share_results(
dimx2_date_str_totalsx2_df, [mock_dataset.fields.timestamp, mock_dataset.fields.political_party]
)
expected = dimx2_date_str_df
pandas.testing.assert_frame_equal(result, expected)
def test_do_not_remove_totals_for_rollup_dimensions(self):
result = scrub_totals_from_share_results(dimx1_str_totals_df, [Rollup(mock_dataset.fields.political_party)])
expected = dimx1_str_totals_df
pandas.testing.assert_frame_equal(result, expected)
def test_do_not_remove_totals_for_rollup_dimensions_with_multiindex(self):
result = scrub_totals_from_share_results(
dimx2_date_str_totals_df, [mock_dataset.fields.timestamp, Rollup(mock_dataset.fields.political_party)]
)
expected = dimx2_date_str_totals_df
pandas.testing.assert_frame_equal(result, expected)
def test_do_not_remove_totals_for_rollup_dimensions_with_multiindex_and_lower_dimension_totals(self):
result = scrub_totals_from_share_results(
dimx2_date_str_totalsx2_df, [mock_dataset.fields.timestamp, Rollup(mock_dataset.fields.political_party)]
)
expected = dimx2_date_str_totalsx2_df.loc[:TIMESTAMP_UPPERBOUND]
pandas.testing.assert_frame_equal(result, expected)
def test_do_not_remove_totals_for_rollup_dimensions_with_multiindex_and_higher_dimension_totals(self):
result = scrub_totals_from_share_results(
dimx2_date_str_totalsx2_df, [Rollup(mock_dataset.fields.timestamp), mock_dataset.fields.political_party]
)
expected = dimx2_date_str_totalsx2_df.loc[(slice(None), slice('Democrat', 'Republican')), :].append(
dimx2_date_str_totalsx2_df.iloc[-1]
)
pandas.testing.assert_frame_equal(result, expected)
def test_do_not_remove_totals_for_rollup_dimensions_with_multiindex_and_all_rolled_up(self):
result = scrub_totals_from_share_results(
dimx2_date_str_totalsx2_df,
[Rollup(mock_dataset.fields.timestamp), Rollup(mock_dataset.fields.political_party)],
)
expected = dimx2_date_str_totalsx2_df
pandas.testing.assert_frame_equal(result, expected)
| mikeengland/fireant | fireant/tests/dataset/test_filter_totals_from_share_results.py | Python | apache-2.0 | 3,701 |
#!/usr/bin/env python3
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# This script translates invalid authors in the contributors list generated
# by generate-contributors.py. When the script encounters an author name that
# is considered invalid, it searches Github and JIRA in an attempt to search
# for replacements. This tool runs in two modes:
#
# (1) Interactive mode: For each invalid author name, this script presents
# all candidate replacements to the user and awaits user response. In this
# mode, the user may also input a custom name. This is the default.
#
# (2) Non-interactive mode: For each invalid author name, this script replaces
# the name with the first valid candidate it can find. If there is none, it
# uses the original name. This can be enabled through the --non-interactive flag.
import os
import sys
from releaseutils import *
# You must set the following before use!
JIRA_API_BASE = os.environ.get("JIRA_API_BASE", "https://issues.apache.org/jira")
JIRA_USERNAME = os.environ.get("JIRA_USERNAME", None)
JIRA_PASSWORD = os.environ.get("JIRA_PASSWORD", None)
GITHUB_API_TOKEN = os.environ.get("GITHUB_API_TOKEN", None)
if not JIRA_USERNAME or not JIRA_PASSWORD:
sys.exit("Both JIRA_USERNAME and JIRA_PASSWORD must be set")
if not GITHUB_API_TOKEN:
sys.exit("GITHUB_API_TOKEN must be set")
# Write new contributors list to <old_file_name>.final
if not os.path.isfile(contributors_file_name):
print("Contributors file %s does not exist!" % contributors_file_name)
print("Have you run ./generate-contributors.py yet?")
sys.exit(1)
contributors_file = open(contributors_file_name, "r")
warnings = []
# In non-interactive mode, this script will choose the first replacement that is valid
INTERACTIVE_MODE = True
if len(sys.argv) > 1:
options = set(sys.argv[1:])
if "--non-interactive" in options:
INTERACTIVE_MODE = False
if INTERACTIVE_MODE:
print("Running in interactive mode. To disable this, provide the --non-interactive flag.")
# Setup Github and JIRA clients
jira_options = {"server": JIRA_API_BASE}
jira_client = JIRA(options=jira_options, basic_auth=(JIRA_USERNAME, JIRA_PASSWORD))
github_client = Github(GITHUB_API_TOKEN)
# Load known author translations that are cached locally
known_translations = {}
known_translations_file_name = "known_translations"
known_translations_file = open(known_translations_file_name, "r")
for line in known_translations_file:
if line.startswith("#"):
continue
[old_name, new_name] = line.strip("\n").split(" - ")
known_translations[old_name] = new_name
known_translations_file.close()
# Open again in case the user adds new mappings
known_translations_file = open(known_translations_file_name, "a")
# Generate candidates for the given author. This should only be called if the given author
# name does not represent a full name as this operation is somewhat expensive. Under the
# hood, it makes several calls to the Github and JIRA API servers to find the candidates.
#
# This returns a list of (candidate name, source) 2-tuples. E.g.
# [
# (NOT_FOUND, "No full name found for Github user andrewor14"),
# ("Andrew Or", "Full name of JIRA user andrewor14"),
# ("Andrew Orso", "Full name of SPARK-1444 assignee andrewor14"),
# ("Andrew Ordall", "Full name of SPARK-1663 assignee andrewor14"),
# (NOT_FOUND, "No assignee found for SPARK-1763")
# ]
NOT_FOUND = "Not found"
def generate_candidates(author, issues):
candidates = []
# First check for full name of Github user
github_name = get_github_name(author, github_client)
if github_name:
candidates.append((github_name, "Full name of Github user %s" % author))
else:
candidates.append((NOT_FOUND, "No full name found for Github user %s" % author))
# Then do the same for JIRA user
jira_name = get_jira_name(author, jira_client)
if jira_name:
candidates.append((jira_name, "Full name of JIRA user %s" % author))
else:
candidates.append((NOT_FOUND, "No full name found for JIRA user %s" % author))
# Then do the same for the assignee of each of the associated JIRAs
# Note that a given issue may not have an assignee, or the assignee may not have a full name
for issue in issues:
try:
jira_issue = jira_client.issue(issue)
except JIRAError as e:
# Do not exit just because an issue is not found!
if e.status_code == 404:
warnings.append("Issue %s not found!" % issue)
continue
raise e
jira_assignee = jira_issue.fields.assignee
if jira_assignee:
user_name = jira_assignee.name
display_name = jira_assignee.displayName
if display_name:
candidates.append(
(display_name, "Full name of %s assignee %s" % (issue, user_name)))
else:
candidates.append(
(NOT_FOUND, "No full name found for %s assignee %s" % (issue, user_name)))
else:
candidates.append((NOT_FOUND, "No assignee found for %s" % issue))
# Guard against special characters in candidate names
# Note that the candidate name may already be in unicode (JIRA returns this)
for i, (candidate, source) in enumerate(candidates):
try:
candidate = unicode(candidate, "UTF-8")
except TypeError:
# already in unicode
pass
candidate = unidecode.unidecode(candidate).strip()
candidates[i] = (candidate, source)
return candidates
# Translate each invalid author by searching for possible candidates from Github and JIRA
# In interactive mode, this script presents the user with a list of choices and have the user
# select from this list. Additionally, the user may also choose to enter a custom name.
# In non-interactive mode, this script picks the first valid author name from the candidates
# If no such name exists, the original name is used (without the JIRA numbers).
print("\n========================== Translating contributor list ==========================")
lines = contributors_file.readlines()
contributions = []
for i, line in enumerate(lines):
# It is possible that a line in the contributor file only has the github name, e.g. yhuai.
# So, we need a strip() to remove the newline.
temp_author = line.strip(" * ").split(" -- ")[0].strip()
print("Processing author %s (%d/%d)" % (temp_author, i + 1, len(lines)))
if not temp_author:
error_msg = " ERROR: Expected the following format \" * <author> -- <contributions>\"\n"
error_msg += " ERROR: Actual = %s" % line
print(error_msg)
warnings.append(error_msg)
contributions.append(line)
continue
author = temp_author.split("/")[0]
# Use the local copy of known translations where possible
if author in known_translations:
line = line.replace(temp_author, known_translations[author])
elif not is_valid_author(author):
new_author = author
issues = temp_author.split("/")[1:]
candidates = generate_candidates(author, issues)
# Print out potential replacement candidates along with the sources, e.g.
# [X] No full name found for Github user andrewor14
# [X] No assignee found for SPARK-1763
# [0] Andrew Or - Full name of JIRA user andrewor14
# [1] Andrew Orso - Full name of SPARK-1444 assignee andrewor14
# [2] Andrew Ordall - Full name of SPARK-1663 assignee andrewor14
# [3] andrewor14 - Raw Github username
# [4] Custom
candidate_names = []
bad_prompts = [] # Prompts that can't actually be selected; print these first.
good_prompts = [] # Prompts that contain valid choices
for candidate, source in candidates:
if candidate == NOT_FOUND:
bad_prompts.append(" [X] %s" % source)
else:
index = len(candidate_names)
candidate_names.append(candidate)
good_prompts.append(" [%d] %s - %s" % (index, candidate, source))
raw_index = len(candidate_names)
custom_index = len(candidate_names) + 1
for p in bad_prompts:
print(p)
if bad_prompts:
print(" ---")
for p in good_prompts:
print(p)
# In interactive mode, additionally provide "custom" option and await user response
if INTERACTIVE_MODE:
print(" [%d] %s - Raw Github username" % (raw_index, author))
print(" [%d] Custom" % custom_index)
response = raw_input(" Your choice: ")
last_index = custom_index
while not response.isdigit() or int(response) > last_index:
response = raw_input(" Please enter an integer between 0 and %d: " % last_index)
response = int(response)
if response == custom_index:
new_author = raw_input(" Please type a custom name for this author: ")
elif response != raw_index:
new_author = candidate_names[response]
# In non-interactive mode, just pick the first candidate
else:
valid_candidate_names = [name for name, _ in candidates
if is_valid_author(name) and name != NOT_FOUND]
if valid_candidate_names:
new_author = valid_candidate_names[0]
# Finally, capitalize the author and replace the original one with it
# If the final replacement is still invalid, log a warning
if is_valid_author(new_author):
new_author = capitalize_author(new_author)
else:
warnings.append(
"Unable to find a valid name %s for author %s" % (author, temp_author))
print(" * Replacing %s with %s" % (author, new_author))
# If we are in interactive mode, prompt the user whether we want to remember this new
# mapping
if INTERACTIVE_MODE and \
author not in known_translations and \
yesOrNoPrompt(
" Add mapping %s -> %s to known translations file?" % (author, new_author)):
known_translations_file.write("%s - %s\n" % (author, new_author))
known_translations_file.flush()
line = line.replace(temp_author, author)
contributions.append(line)
print("==================================================================================\n")
contributors_file.close()
known_translations_file.close()
# Sort the contributions before writing them to the new file.
# Additionally, check if there are any duplicate author rows.
# This could happen if the same user has both a valid full
# name (e.g. Andrew Or) and an invalid one (andrewor14).
# If so, warn the user about this at the end.
contributions.sort()
all_authors = set()
new_contributors_file_name = contributors_file_name + ".final"
new_contributors_file = open(new_contributors_file_name, "w")
for line in contributions:
author = line.strip(" * ").split(" -- ")[0]
if author in all_authors:
warnings.append("Detected duplicate author name %s. Please merge these manually." % author)
all_authors.add(author)
new_contributors_file.write(line)
new_contributors_file.close()
print("Translated contributors list successfully written to %s!" % new_contributors_file_name)
# Log any warnings encountered in the process
if warnings:
print("\n========== Warnings encountered while translating the contributor list ===========")
for w in warnings:
print(w)
print("Please manually correct these in the final contributors list at %s." %
new_contributors_file_name)
print("==================================================================================\n")
| dbtsai/spark | dev/create-release/translate-contributors.py | Python | apache-2.0 | 12,628 |
import ceilometerclient.client as clclient
import logging
log = logging.getLogger(__name__)
class Metering:
'''Wrapper for the OpenStack MEtering service (Ceilometer)'''
def __init__(self, conf):
creds = self._get_creds(conf)
self.ceilo = clclient.get_client(2, **creds)
def _get_creds(self, conf):
d = {}
d['os_username'] = conf.get("environment", "OS_USERNAME")
d['os_password'] = conf.get("environment", "OS_PASSWORD")
d['os_auth_url'] = conf.get("environment", "OS_AUTH_URL")
d['os_tenant_name'] = conf.get("environment", "OS_TENANT_NAME")
return d
def meter_list(self, query=None):
return self.ceilo.meters.list()
| bigfootproject/pyostack | pyostack/metering.py | Python | apache-2.0 | 712 |
#
# This file is part of pySMT.
#
# Copyright 2014 Andrea Micheli and Marco Gario
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import picosat
import pysmt.logics
from pysmt import typing as types
from pysmt.solvers.solver import Solver
from pysmt.solvers.eager import EagerModel
from pysmt.rewritings import CNFizer
from pysmt.decorators import clear_pending_pop, catch_conversion_error
from six.moves import xrange
from six import iteritems
class PicosatSolver(Solver):
"""PicoSAT solver"""
LOGICS = [ pysmt.logics.QF_BOOL ]
def __init__(self, environment, logic, user_options):
Solver.__init__(self,
environment=environment,
logic=logic,
user_options=user_options)
self.mgr = environment.formula_manager
self.pico = picosat.picosat_init()
self.converter = None
self.cnfizer = CNFizer(environment=environment)
self.latest_model = None
self._var_ids = {}
def _get_var_id(self, symbol):
if not symbol.is_symbol(types.BOOL):
raise NotImplementedError("No theory terms are supported in PicoSAT")
if symbol in self._var_ids:
return self._var_ids[symbol]
else:
vid = picosat.picosat_inc_max_var(self.pico)
self._var_ids[symbol] = vid
return vid
@clear_pending_pop
def reset_assertions(self):
picosat.picosat_reset(self.pico)
self.pico = picosat.picosat_init()
@clear_pending_pop
def declare_variable(self, var):
# no need to declare variables
pass
def _get_pico_lit(self, lit):
mult = 1
var = lit
if lit.is_not():
mult = -1
var = lit.arg(0)
vid = self._get_var_id(var)
return vid * mult
@clear_pending_pop
@catch_conversion_error
def add_assertion(self, formula, named=None):
# First, we get rid of True/False constants
formula = formula.simplify()
if formula.is_false():
picosat.picosat_add(self.pico, 0)
elif not formula.is_true():
cnf = self.cnfizer.convert(formula)
self._add_cnf_assertion(cnf)
def _add_cnf_assertion(self, cnf):
for clause in cnf:
for lit in clause:
v = self._get_pico_lit(lit)
picosat.picosat_add(self.pico, v)
picosat.picosat_add(self.pico, 0)
@clear_pending_pop
@catch_conversion_error
def solve(self, assumptions=None):
if assumptions is not None:
cnf = []
for a in assumptions:
cnf += self.cnfizer.convert(a)
missing = []
for clause in cnf:
if len(clause) == 1:
v = self._get_pico_lit(next(iter(clause)))
picosat.picosat_assume(self.pico, v)
else:
missing.append(clause)
if len(missing) > 0:
self.push()
self._add_cnf_assertion(missing)
self.pending_pop = True
res = picosat.picosat_sat(self.pico, -1)
if res == picosat.PICOSAT_SATISFIABLE:
self.latest_model = self.get_model()
return True
else:
self.latest_model = None
return False
def get_value(self, item):
if self.latest_model is None:
self.get_model()
return self.latest_model.get_value(item)
def get_model(self):
assignment = {}
for var, vid in iteritems(self._var_ids):
v = picosat.picosat_deref(self.pico, vid)
if v == 0:
assert False
value = self.mgr.Bool(v == 1)
assignment[var] = value
return EagerModel(assignment=assignment,
environment=self.environment)
@clear_pending_pop
def push(self, levels=1):
for _ in xrange(levels):
picosat.picosat_push(self.pico)
@clear_pending_pop
def pop(self, levels=1):
for _ in xrange(levels):
picosat.picosat_pop(self.pico)
def exit(self):
if not self._destroyed:
self._destroyed = True
picosat.picosat_reset(self.pico)
| idkwim/pysmt | pysmt/solvers/pico.py | Python | apache-2.0 | 4,835 |
# coding: utf-8
#
# Copyright 2021 The Oppia Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Unit tests for core.domain.beam_job_domain."""
from __future__ import annotations
import datetime
from core import utils
from core.domain import beam_job_domain
from core.jobs.batch_jobs import model_validation_jobs
from core.platform import models
from core.tests import test_utils
MYPY = False
if MYPY: # pragma: no cover
from mypy_imports import beam_job_models
(beam_job_models,) = models.Registry.import_models([models.NAMES.beam_job])
class BeamJobTests(test_utils.TestBase):
NOW = datetime.datetime.utcnow()
def test_usage(self) -> None:
job = beam_job_domain.BeamJob(
model_validation_jobs.AuditAllStorageModelsJob)
self.assertEqual(job.name, 'AuditAllStorageModelsJob')
def test_in_terminal_state(self) -> None:
cancelled_beam_job_run = beam_job_domain.BeamJobRun(
'123', 'FooJob', beam_job_models.BeamJobState.CANCELLED.value,
self.NOW, self.NOW, True)
drained_beam_job_run = beam_job_domain.BeamJobRun(
'123', 'FooJob', beam_job_models.BeamJobState.DRAINED.value,
self.NOW, self.NOW, True)
updated_beam_job_run = beam_job_domain.BeamJobRun(
'123', 'FooJob', beam_job_models.BeamJobState.UPDATED.value,
self.NOW, self.NOW, True)
done_beam_job_run = beam_job_domain.BeamJobRun(
'123', 'FooJob', beam_job_models.BeamJobState.DONE.value,
self.NOW, self.NOW, True)
failed_beam_job_run = beam_job_domain.BeamJobRun(
'123', 'FooJob', beam_job_models.BeamJobState.FAILED.value,
self.NOW, self.NOW, True)
cancelling_beam_job_run = beam_job_domain.BeamJobRun(
'123', 'FooJob', beam_job_models.BeamJobState.CANCELLING.value,
self.NOW, self.NOW, True)
draining_beam_job_run = beam_job_domain.BeamJobRun(
'123', 'FooJob', beam_job_models.BeamJobState.DRAINING.value,
self.NOW, self.NOW, True)
pending_beam_job_run = beam_job_domain.BeamJobRun(
'123', 'FooJob', beam_job_models.BeamJobState.PENDING.value,
self.NOW, self.NOW, True)
running_beam_job_run = beam_job_domain.BeamJobRun(
'123', 'FooJob', beam_job_models.BeamJobState.RUNNING.value,
self.NOW, self.NOW, True)
stopped_beam_job_run = beam_job_domain.BeamJobRun(
'123', 'FooJob', beam_job_models.BeamJobState.STOPPED.value,
self.NOW, self.NOW, True)
unknown_beam_job_run = beam_job_domain.BeamJobRun(
'123', 'FooJob', beam_job_models.BeamJobState.UNKNOWN.value,
self.NOW, self.NOW, True)
self.assertTrue(cancelled_beam_job_run.in_terminal_state)
self.assertTrue(drained_beam_job_run.in_terminal_state)
self.assertTrue(updated_beam_job_run.in_terminal_state)
self.assertTrue(done_beam_job_run.in_terminal_state)
self.assertTrue(failed_beam_job_run.in_terminal_state)
self.assertFalse(cancelling_beam_job_run.in_terminal_state)
self.assertFalse(draining_beam_job_run.in_terminal_state)
self.assertFalse(pending_beam_job_run.in_terminal_state)
self.assertFalse(running_beam_job_run.in_terminal_state)
self.assertFalse(stopped_beam_job_run.in_terminal_state)
self.assertFalse(unknown_beam_job_run.in_terminal_state)
def test_to_dict(self) -> None:
job = beam_job_domain.BeamJob(
model_validation_jobs.AuditAllStorageModelsJob)
self.assertEqual(job.to_dict(), {'name': 'AuditAllStorageModelsJob'})
class BeamJobRunTests(test_utils.TestBase):
NOW = datetime.datetime.utcnow()
def test_usage(self) -> None:
run = beam_job_domain.BeamJobRun(
'123', 'FooJob', 'RUNNING', self.NOW, self.NOW, True)
self.assertEqual(run.job_id, '123')
self.assertEqual(run.job_name, 'FooJob')
self.assertEqual(run.job_state, 'RUNNING')
self.assertEqual(run.job_started_on, self.NOW)
self.assertEqual(run.job_updated_on, self.NOW)
self.assertTrue(run.job_is_synchronous)
def test_to_dict(self) -> None:
run = beam_job_domain.BeamJobRun(
'123', 'FooJob', 'RUNNING', self.NOW, self.NOW, True)
self.assertEqual(run.to_dict(), {
'job_id': '123',
'job_name': 'FooJob',
'job_state': 'RUNNING',
'job_started_on_msecs': utils.get_time_in_millisecs(self.NOW),
'job_updated_on_msecs': utils.get_time_in_millisecs(self.NOW),
'job_is_synchronous': True,
})
class AggregateBeamJobRunResultTests(test_utils.TestBase):
def test_usage(self) -> None:
result = beam_job_domain.AggregateBeamJobRunResult('abc', '123')
self.assertEqual(result.stdout, 'abc')
self.assertEqual(result.stderr, '123')
def test_to_dict(self) -> None:
result = beam_job_domain.AggregateBeamJobRunResult('abc', '123')
self.assertEqual(result.to_dict(), {
'stdout': 'abc',
'stderr': '123',
})
| brianrodri/oppia | core/domain/beam_job_domain_test.py | Python | apache-2.0 | 5,710 |
from six import string_types
from pypif.obj.common.display_item import DisplayItem
from pypif.obj.common.name import Name
from pypif.obj.common.pages import Pages
from pypif.obj.common.pio import Pio
class Reference(Pio):
"""
Information about a referenced publication.
"""
def __init__(self, doi=None, isbn=None, issn=None, url=None, title=None, publisher=None, journal=None, volume=None,
issue=None, year=None, figure=None, table=None, pages=None, authors=None, editors=None,
affiliations=None, acknowledgements=None, references=None, tags=None, **kwargs):
"""
Constructor.
:param doi: String with DOI of the published work
:param isbn: String with ISBN of the published work
:param issn: String with ISSN of the published work
:param url: String with URL to the published work
:param title: String with title of the published work.
:param publisher: String with publisher of the work.
:param journal: String with the journal in which the work was published.
:param volume: String with the volume in which the work was published.
:param issue: String with the issue in which the work was published.
:param year: String with the year in which the work was published.
:param figure: Dictionary or :class:`.DisplayItem` object with the figure to reference.
:param table: Dictionary or :class:`.DisplayItem` object with the table to reference.
:param pages: String, integer, dictionary, or :class:`.Pages` object with the starting and ending pages for
the published work.
:param authors: List of strings, dictionaries, or :class:`.Name` objects with information about the authors.
:param editors: List of strings, dictionaries, or :class:`.Name` objects with information about the editors.
:param affiliations: List of strings with affiliations.
:param acknowledgements: List of strings with acknowledgements.
:param references: List of dictionaries or :class:`.Reference` objects with works cited by this published work.
:param tags: List of strings or numbers that are tags for this object.
:param kwargs: Dictionary of fields that are not supported.
"""
super(Reference, self).__init__(tags=tags, **kwargs)
self._doi = None
self.doi = doi
self._isbn = None
self.isbn = isbn
self._issn = None
self.issn = issn
self._url = None
self.url = url
self._title = None
self.title = title
self._publisher = None
self.publisher = publisher
self._journal = None
self.journal = journal
self._volume = None
self.volume = volume
self._issue = None
self.issue = issue
self._year = None
self.year = year
self._figure = None
self.figure = figure
self._table = None
self.table = table
self._pages = None
self.pages = pages
self._authors = None
self.authors = authors
self._editors = None
self.editors = editors
self._affiliations = None
self.affiliations = affiliations
self._acknowledgements = None
self.acknowledgements = acknowledgements
self._references = None
self.references = references
@property
def doi(self):
return self._doi
@doi.setter
def doi(self, doi):
self._validate_type('doi', doi, string_types)
self._doi = doi
@doi.deleter
def doi(self):
self._doi = None
@property
def isbn(self):
return self._isbn
@isbn.setter
def isbn(self, isbn):
self._validate_type('isbn', isbn, string_types)
self._isbn = isbn
@isbn.deleter
def isbn(self):
self._isbn = None
@property
def issn(self):
return self.issn
@issn.setter
def issn(self, issn):
self._validate_type('issn', issn, string_types)
self._issn = issn
@issn.deleter
def issn(self):
self._issn = None
@property
def url(self):
return self._url
@url.setter
def url(self, url):
self._validate_type('url', url, string_types)
self._url = url
@url.deleter
def url(self):
self._url = None
@property
def title(self):
return self._title
@title.setter
def title(self, title):
self._validate_type('title', title, string_types)
self._title = title
@title.deleter
def title(self):
self._title = None
@property
def publisher(self):
return self._publisher
@publisher.setter
def publisher(self, publisher):
self._validate_type('publisher', publisher, string_types)
self._publisher = publisher
@publisher.deleter
def publisher(self):
self._publisher = None
@property
def journal(self):
return self._journal
@journal.setter
def journal(self, journal):
self._validate_type('journal', journal, string_types)
self._journal = journal
@journal.deleter
def journal(self):
self._journal = None
@property
def volume(self):
return self._volume
@volume.setter
def volume(self, volume):
self._validate_type('volume', volume, string_types)
self._volume = volume
@volume.deleter
def volume(self):
self._volume = None
@property
def issue(self):
return self._issue
@issue.setter
def issue(self, issue):
self._validate_type('issue', issue, string_types)
self._issue = issue
@issue.deleter
def issue(self):
self._issue = None
@property
def year(self):
return self._year
@year.setter
def year(self, year):
self._validate_type('year', year, string_types)
self._year = year
@year.deleter
def year(self):
self._year = None
@property
def figure(self):
return self._figure
@figure.setter
def figure(self, figure):
self._validate_type('figure', figure, dict, DisplayItem)
self._figure = self._get_object(DisplayItem, figure)
@figure.deleter
def figure(self):
self._figure = None
@property
def table(self):
return self._table
@table.setter
def table(self, table):
self._validate_type('table', table, dict, DisplayItem)
self._table = self._get_object(DisplayItem, table)
@table.deleter
def table(self):
self._table = None
@property
def pages(self):
return self._pages
@pages.setter
def pages(self, pages):
self._validate_type('pages', pages, string_types, int, dict, Pages)
self._pages = self._get_object(Pages, pages)
@pages.deleter
def pages(self):
self._pages = None
@property
def authors(self):
return self._authors
@authors.setter
def authors(self, authors):
self._validate_list_type('authors', authors, string_types, dict, Name)
self._authors = self._get_object(Name, authors)
@authors.deleter
def authors(self):
self._authors = None
@property
def editors(self):
return self._editors
@editors.setter
def editors(self, editors):
self._validate_list_type('editors', editors, string_types, dict, Name)
self._editors = self._get_object(Name, editors)
@editors.deleter
def editors(self):
self._editors = None
@property
def affiliations(self):
return self._affiliations
@affiliations.setter
def affiliations(self, affiliations):
self._validate_list_type('affiliations', affiliations, string_types)
self._affiliations = affiliations
@affiliations.deleter
def affiliations(self):
self._affiliations = None
@property
def acknowledgements(self):
return self._acknowledgements
@acknowledgements.setter
def acknowledgements(self, acknowledgements):
self._validate_list_type('acknowledgements', acknowledgements, string_types)
self._acknowledgements = acknowledgements
@acknowledgements.deleter
def acknowledgements(self):
self._acknowledgements = None
@property
def references(self):
return self._references
@references.setter
def references(self, references):
self._validate_list_type('references', references, dict, Reference)
self._references = self._get_object(Reference, references)
| kjaym/pypif | pypif/obj/common/reference.py | Python | apache-2.0 | 8,674 |
"""Client Utilities
Factor out code shared by both the resync and resync-explorer
clients.
Copyright 2012,2013 Simeon Warner
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License
"""
import logging
import logging.config
import optparse
import sys
from resync.client import ClientFatalError
from resync.explorer import Explorer
from resync.utils import UTCFormatter
def init_logging(to_file=False, logfile=None, default_logfile='/tmp/resync.log',
human=True, verbose=False, eval_mode=False,
default_logger='client', extra_loggers=None):
"""Initialize logging
Use of log levels:
DEBUG - very verbose, for evaluation of output (-e)
INFO - verbose, only seen by users if they ask for it (-v)
WARNING - messages output messages to console
Logging to a file: If to_file is True then output will be written to
a file. This will be logfile if set, else default_logfile (which may
also be overridden).
"""
fmt = '%(asctime)s | %(name)s | %(levelname)s | %(message)s'
formatter = UTCFormatter(fmt)
if human:
# Create a special handler designed just for human readable output
hh = logging.StreamHandler()
hh.setLevel( logging.INFO if (verbose) else logging.WARNING )
hh.setFormatter(logging.Formatter(fmt='%(message)s'))
if to_file:
if (logfile is None):
logfile = default_logfile
fh = logging.FileHandler(filename=logfile, mode='a')
fh.setFormatter(formatter)
fh.setLevel( logging.DEBUG if (eval_mode) else logging.INFO )
loggers = [default_logger,'resync']
if (extra_loggers is not None):
for logger in extra_loggers:
loggers.append(logger)
for logger in loggers:
log = logging.getLogger(logger)
log.setLevel(logging.DEBUG) #control at handler instead
if human:
log.addHandler(hh)
if to_file:
log.addHandler(fh)
log=logging.getLogger(default_logger)
if (to_file):
log.info("Writing detailed log to %s" % (logfile))
def count_true_args(*args):
"""Count number of list of arguments that evaluate True"""
count=0
for arg in args:
if (arg):
count+=1
return(count)
def parse_links(args_link):
links=[]
if (args_link is not None):
for link_str in args_link:
try:
links.append(parse_link(link_str))
except ValueError as e:
raise ClientFatalError("Bad --link option '%s' (%s)"%(link_str,str(e)))
return(links)
def parse_link(link_str):
"""Parse --link option to add to <rs:ln> links
Input string of the form: rel,href,att1=val1,att2=val2
"""
atts={}
help_str = "--link option '%s' (format rel,href,att1=val1...)"%(link_str)
try:
segs = link_str.split(',')
# First segments are relation and subject
atts['rel'] = segs.pop(0)
atts['href'] = segs.pop(0)
if (atts['href']==''):
raise ClientFatalError("Missing uri in " + help_str)
# Remaining segments are attributes
for term in segs:
(k,v)=term.split('=')
if (k=='' or v==''):
raise ClientFatalError("Bad attribute (%s) in " % (term) + help_str)
atts[k]=v
except ValueError as e:
raise ClientFatalError("Bad component of " + help_str)
except IndexError as e:
raise ClientFatalError("Incomplete component of " + help_str)
return(atts)
def parse_capabilities(caps_str):
"""Parse list of capabilities in --capabilitylist option
Input string of the form: cap_name=uri,cap_name=uri
"""
capabilities={}
try:
segs = caps_str.split(',')
for term in segs:
(k,v)=term.split('=')
capabilities[k]=v
except ValueError as e:
raise ClientFatalError("Bad component of --capabilitylist option '%s' (%s)"%(caps_str,str(e)))
return(capabilities)
def parse_capability_lists(cls_str):
"""Parse list of capability lists in --capabilitylistindex option
Input string of the form: uri,uri
"""
return(cls_str.split(','))
| dans-er/resync | resync/client_utils.py | Python | apache-2.0 | 4,699 |