code
stringlengths 3
1.05M
| repo_name
stringlengths 5
104
| path
stringlengths 4
251
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 3
1.05M
|
|---|---|---|---|---|---|
# TestSwiftPartiallyGenericFuncStruct.py
#
# This source file is part of the Swift.org open source project
#
# Copyright (c) 2014 - 2016 Apple Inc. and the Swift project authors
# Licensed under Apache License v2.0 with Runtime Library Exception
#
# See https://swift.org/LICENSE.txt for license information
# See https://swift.org/CONTRIBUTORS.txt for the list of Swift project authors
#
# ------------------------------------------------------------------------------
import lldbsuite.test.lldbinline as lldbinline
from lldbsuite.test.decorators import *
lldbinline.MakeInlineTest(__file__, globals(), decorators=[swiftTest])
|
apple/swift-lldb
|
packages/Python/lldbsuite/test/lang/swift/partially_generic_func/struct/TestSwiftPartiallyGenericFuncStruct.py
|
Python
|
apache-2.0
| 629
|
numbers = range(0, 15)
print map(lambda n: n ** n, numbers)
for i in map(lambda n: n ** n, numbers):
print i
|
regnart-tech-club/programming-concepts
|
course-2:combining-building-blocks/subject-4:all together now/topic-2:ETL/lesson-1:`map` function.py
|
Python
|
apache-2.0
| 111
|
from __future__ import print_function
import sys
import traceback
from game_state import GameState
def warning(*objs):
print("WARNING: ", *objs, file=sys.stderr)
traceback.print_exc()
class Player:
VERSION = "Default Python folding player"
def preFlopBet(self):
stack = self.state.get_stack()
if self.state.have_pair_in_hand():
return int(round(stack * 0.5))
elif self.state.get_highest_rank() == "A":
return int(round(stack * 0.4))
elif self.state.get_highest_rank() == "K":
return int(round(stack * 0.3))
return 0
def calcBet(self):
if self.state.get_round() >= 0:
return self.preFlopBet()
elif self.state.get_round() > 1:
if self.preFlopBet() != 0:
return self.state.keep()
return 0
else:
return self.state.keep()
def betRequest(self, game_state):
self.state = GameState(game_state)
try:
return self.calcBet()
except Exception as x:
warning("Exception during betRequest", x)
return 501
def showdown(self, game_state):
pass
|
szepnapot/poker-player-pypoker
|
player.py
|
Python
|
mit
| 1,189
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright 2012 The Plaso Project Authors.
# Please see the AUTHORS file for details on individual authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for the WinVer Windows Registry plugin."""
import unittest
# pylint: disable=unused-import
from plaso.formatters import winreg as winreg_formatter
from plaso.lib import timelib_test
from plaso.parsers.winreg_plugins import test_lib
from plaso.parsers.winreg_plugins import winver
from plaso.winreg import test_lib as winreg_test_lib
class WinVerPluginTest(test_lib.RegistryPluginTestCase):
"""Tests for the WinVer Windows Registry plugin."""
def setUp(self):
"""Sets up the needed objects used throughout the test."""
self._plugin = winver.WinVerPlugin()
def testWinVer(self):
"""Test the WinVer plugin."""
key_path = u'\\Microsoft\\Windows NT\\CurrentVersion'
values = []
values.append(winreg_test_lib.TestRegValue(
'ProductName', 'MyTestOS'.encode('utf_16_le'), 1, 123))
values.append(winreg_test_lib.TestRegValue(
'CSDBuildNumber', '5'.encode('utf_16_le'), 1, 1892))
values.append(winreg_test_lib.TestRegValue(
'RegisteredOwner', 'A Concerned Citizen'.encode('utf_16_le'), 1, 612))
values.append(winreg_test_lib.TestRegValue(
'InstallDate', '\x13\x1aAP', 3, 1001))
winreg_key = winreg_test_lib.TestRegKey(
key_path, 1346445929000000, values, 153)
event_queue_consumer = self._ParseKeyWithPlugin(self._plugin, winreg_key)
event_objects = self._GetEventObjectsFromQueue(event_queue_consumer)
self.assertEquals(len(event_objects), 1)
event_object = event_objects[0]
expected_timestamp = timelib_test.CopyStringToTimestamp(
'2012-08-31 20:09:55')
self.assertEquals(event_object.timestamp, expected_timestamp)
# Note that the double spaces here are intentional.
expected_msg = (
u'[{0:s}] '
u'Windows Version Information: '
u'Owner: A Concerned Citizen '
u'Product name: MyTestOS sp: 5').format(key_path)
expected_msg_short = (
u'[{0:s}] '
u'Windows Version Information: '
u'Owner: ...').format(key_path)
self._TestGetMessageStrings(event_object, expected_msg, expected_msg_short)
if __name__ == '__main__':
unittest.main()
|
cvandeplas/plaso
|
plaso/parsers/winreg_plugins/winver_test.py
|
Python
|
apache-2.0
| 2,842
|
# Copyright 2011 Antoine Bertin <diaoulael@gmail.com>
#
# This file is part of Dobby.
#
# Dobby is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Dobby is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Dobby. If not, see <http://www.gnu.org/licenses/>.
from . import Speaker, SPEAKING, IDLE
import logging
import speechd
import time
logger = logging.getLogger(__name__)
class SpeechDispatcher(Speaker):
"""Speaker using speech-dispatcher (speechd)
:param string name: speechd's name of the SSIPClient
:param string engine: speechd's output module
:param string voice: speechd's voice
:param string language: speechd's language
:param integer rate: speechd's rate
:param integer pitch: speechd's pitch
"""
def __init__(self, tts_queue, name, engine, voice, language, volume, rate, pitch):
super(SpeechDispatcher, self).__init__(tts_queue)
self.client = speechd.SSIPClient(name)
self.client.set_output_module(engine)
self.client.set_voice(voice)
self.client.set_language(language)
self.client.set_volume(volume)
self.client.set_rate(rate)
self.client.set_pitch(pitch)
self.client.set_punctuation(speechd.PunctuationMode.SOME)
def speak(self, text):
logger.debug(u'Speaking "%s"' % text)
self.client.speak(text, callback=self._callback, event_types=(speechd.CallbackType.END))
self.state = SPEAKING
self._wait()
def _callback(self, event_type):
"""Callback for speechd end of speech
:param speechd.CallbackType event_type: type of the event raised by speechd
"""
if event_type == speechd.CallbackType.END:
self.state = IDLE
def _wait(self, timeout=60, poll=0.1):
"""Block until :attr:`state` changes back to :data:`IDLE`
:param integer timeout: maximum time to wait
:param double poll: polling interval for checking :attr:`state`
"""
i = 0
while self.state == SPEAKING and i <= timeout / poll:
time.sleep(poll)
i += 1
def terminate(self):
logger.debug(u'Terminating...')
self.client.close()
|
Diaoul/Dobby
|
dobby/speakers/speechdispatcher.py
|
Python
|
lgpl-3.0
| 2,638
|
"""Tabulate sun and moon ephemerides during the survey.
"""
from __future__ import print_function, division
import warnings
import math
import os.path
import datetime
import numpy as np
import scipy.interpolate
import astropy.time
import astropy.table
import astropy.utils.exceptions
import astropy.units as u
import ephem
import desiutil.log
import desisurvey.config
import desisurvey.utils
import desisurvey.tiles
# Date range 2019-2025 for tabulated ephemerides.
# This range is chosen large enough to cover commissioning,
# survey validation and the 5-year main survey, so should
# not normally need to be changed, except for testing.
START_DATE = datetime.date(2019, 1, 1)
STOP_DATE = datetime.date(2027, 12, 31)
_ephem = None
def get_ephem(use_cache=True, write_cache=True):
"""Return tabulated ephemerides for (START_DATE,STOP_DATE).
The pyephem module must be installed to calculate ephemerides,
but is not necessary when a FITS file of precalcuated data is
available.
Parameters
----------
use_cache : bool
Use cached ephemerides from memory or disk if possible
when True. Otherwise, always calculate from scratch.
write_cache : bool
When True, write a generated table so it is available for
future invocations. Writing only takes place when a
cached object is not available or ``use_cache`` is False.
Returns
-------
Ephemerides
Object with tabulated ephemerides for (START_DATE,STOP_DATE).
"""
global _ephem
# Freeze IERS table for consistent results.
desisurvey.utils.freeze_iers()
# Use standardized string representation of dates.
start_iso = START_DATE.isoformat()
stop_iso = STOP_DATE.isoformat()
range_iso = '({},{})'.format(start_iso, stop_iso)
log = desiutil.log.get_logger()
# First check for a cached object in memory.
if use_cache and _ephem is not None:
if _ephem.start_date != START_DATE or _ephem.stop_date != STOP_DATE:
raise RuntimeError('START_DATE, STOP_DATE have changed.')
log.debug('Returning cached ephemerides for {}.'.format(range_iso))
return _ephem
# Next check for a FITS file on disk.
config = desisurvey.config.Configuration()
filename = config.get_path('ephem_{}_{}.fits'.format(start_iso, stop_iso))
if use_cache and os.path.exists(filename):
# Save restored object in memory.
_ephem = Ephemerides(START_DATE, STOP_DATE, restore=filename)
log.info('Restored ephemerides for {} from {}.'
.format(range_iso, filename))
return _ephem
# Finally, create new ephemerides and save in the memory cache.
log.info('Building ephemerides for {}...'.format(range_iso))
_ephem = Ephemerides(START_DATE, STOP_DATE)
if write_cache:
# Save the tabulated ephemerides to disk.
_ephem._table.write(filename, overwrite=True)
log.info('Saved ephemerides for {} to {}'.format(range_iso, filename))
return _ephem
class Ephemerides(object):
"""Tabulate ephemerides.
:func:`get_ephem` should normally be used rather than calling this
constructor directly.
Parameters
----------
start_date : datetime.date
Calculated ephemerides start on the evening of this date.
stop_date : datetime.date
Calculated ephemerides stop on the morning of this date.
num_obj_steps : int
Number of steps for tabulating object (ra, dec) during each 24-hour
period from local noon to local noon. Ignored when restore is set.
restore : str or None
Name of a file to restore ephemerides from. Construct ephemerides
from scratch when None. A restored file must have start and stop
dates that match our args.
Attributes
----------
start : astropy.time.Time
Local noon before the first night for which ephemerides are calculated.
stop : astropy.time.Time
Local noon after the last night for which ephemerides are calculated.
num_nights : int
Number of consecutive nights for which ephemerides are calculated.
"""
def __init__(self, start_date, stop_date, num_obj_steps=25, restore=None):
self.log = desiutil.log.get_logger()
config = desisurvey.config.Configuration()
# Validate date range.
num_nights = (stop_date - start_date).days
if num_nights <= 0:
raise ValueError('Expected start_date < stop_date.')
self.num_nights = num_nights
self.start_date = start_date
self.stop_date = stop_date
# Convert to astropy times at local noon.
self.start = desisurvey.utils.local_noon_on_date(start_date)
self.stop = desisurvey.utils.local_noon_on_date(stop_date)
# Moon illumination fraction interpolator will be initialized the
# first time it is used.
self._moon_illum_frac_interpolator = None
# Restore ephemerides from a FITS table if requested.
if restore is not None:
self._table = astropy.table.Table.read(restore)
assert self._table.meta['START'] == str(start_date)
assert self._table.meta['STOP'] == str(stop_date)
assert len(self._table) == num_nights
return
# Initialize an empty table to fill.
meta = dict(NAME='Survey Ephemerides', EXTNAME='EPHEM',
START=str(start_date), STOP=str(stop_date))
self._table = astropy.table.Table(meta=meta)
mjd_format = '%.5f'
self._table['noon'] = astropy.table.Column(
length=num_nights, format=mjd_format,
description='MJD of local noon before night')
self._table['dusk'] = astropy.table.Column(
length=num_nights, format=mjd_format,
description='MJD of dark/gray sunset')
self._table['dawn'] = astropy.table.Column(
length=num_nights, format=mjd_format,
description='MJD of dark/gray sunrise')
self._table['brightdusk'] = astropy.table.Column(
length=num_nights, format=mjd_format,
description='MJD of bright sunset')
self._table['brightdawn'] = astropy.table.Column(
length=num_nights, format=mjd_format,
description='MJD of bright sunrise')
self._table['brightdusk_LST'] = astropy.table.Column(
length=num_nights, format='%.5f',
description='Apparent LST at brightdawn in degrees')
self._table['brightdawn_LST'] = astropy.table.Column(
length=num_nights, format='%.5f',
description='Apparent LST at brightdusk in degrees')
self._table['moonrise'] = astropy.table.Column(
length=num_nights, format=mjd_format,
description='MJD of moonrise before/during night')
self._table['moonset'] = astropy.table.Column(
length=num_nights, format=mjd_format,
description='MJD of moonset after/during night')
self._table['moon_illum_frac'] = astropy.table.Column(
length=num_nights, format='%.3f',
description='Illuminated fraction of moon surface')
self._table['nearest_full_moon'] = astropy.table.Column(
length=num_nights, format='%.5f',
description='Nearest full moon - local midnight in days')
self._table['programs'] = astropy.table.Column(
length=num_nights, shape=(4,), dtype=np.int16,
description='Program sequence between dusk and dawn')
self._table['changes'] = astropy.table.Column(
length=num_nights, shape=(3,),
description='MJD of program changes between dusk and dawn')
# Add (ra,dec) arrays for each object that we need to avoid and
# check that ephem has a model for it.
models = {}
for name in config.avoid_bodies.keys:
models[name] = getattr(ephem, name.capitalize())()
self._table[name + '_ra'] = astropy.table.Column(
length=num_nights, shape=(num_obj_steps,), format='%.2f',
description='RA of {0} during night in degrees'.format(name))
self._table[name + '_dec'] = astropy.table.Column(
length=num_nights, shape=(num_obj_steps,), format='%.2f',
description='DEC of {0} during night in degrees'.format(name))
# The moon is required.
if 'moon' not in models:
raise ValueError('Missing required avoid_bodies entry for "moon".')
# Initialize the observer.
mayall = ephem.Observer()
mayall.lat = config.location.latitude().to(u.rad).value
mayall.lon = config.location.longitude().to(u.rad).value
mayall.elevation = config.location.elevation().to(u.m).value
# Configure atmospheric refraction model for rise/set calculations.
mayall.pressure = 1e3 * config.location.pressure().to(u.bar).value
mayall.temp = config.location.temperature().to(u.C).value
# Do not use atmospheric refraction corrections for other calculations.
mayall_no_ar = mayall.copy()
mayall_no_ar.pressure = 0.
# Calculate the MJD corresponding to date=0. in ephem.
# This throws a warning because of the early year, but it is harmless.
with warnings.catch_warnings():
warnings.simplefilter(
'ignore', astropy.utils.exceptions.AstropyUserWarning)
mjd0 = astropy.time.Time(
datetime.datetime(1899, 12, 31, 12, 0, 0)).mjd
# Initialize a grid covering each 24-hour period for
# tabulating the (ra,dec) of objects to avoid.
t_obj = np.linspace(0., 1., num_obj_steps)
# Calculate ephmerides for each night.
for day_offset in range(num_nights):
day = self.start + day_offset * u.day
mayall.date = day.datetime
row = self._table[day_offset]
# Store local noon for this day.
row['noon'] = day.mjd
# Calculate bright twilight.
mayall.horizon = (
config.conditions.BRIGHT.max_sun_altitude().to(u.rad).value)
row['brightdusk'] = mayall.next_setting(
ephem.Sun(), use_center=True) + mjd0
row['brightdawn'] = mayall.next_rising(
ephem.Sun(), use_center=True) + mjd0
# Calculate dark / gray twilight.
mayall.horizon = (
config.conditions.DARK.max_sun_altitude().to(u.rad).value)
row['dusk'] = mayall.next_setting(
ephem.Sun(), use_center=True) + mjd0
row['dawn'] = mayall.next_rising(
ephem.Sun(), use_center=True) + mjd0
# Calculate the moonrise/set for any moon visible tonight.
m0 = ephem.Moon()
# Use the USNO standard for defining moonrise/set, which means that
# it will not exactly correspond to DARK <-> ? program transitions
# at an altitude of 0deg.
mayall.horizon = '-0:34'
row['moonrise'] = mayall.next_rising(m0) + mjd0
if row['moonrise'] > row['brightdawn']:
# Any moon visible tonight is from the previous moon rise.
row['moonrise'] = mayall.previous_rising(m0) + mjd0
mayall.date = row['moonrise'] - mjd0
row['moonset'] = mayall.next_setting(ephem.Moon()) + mjd0
# Calculate the fraction of the moon's surface that is illuminated
# at local midnight.
m0.compute(row['noon'] + 0.5 - mjd0)
row['moon_illum_frac'] = m0.moon_phase
# Loop over objects to avoid.
for i, t in enumerate(t_obj):
# Set the date of the no-refraction model.
mayall_no_ar.date = row['noon'] + t - mjd0
for name, model in models.items():
model.compute(mayall_no_ar)
row[name + '_ra'][i] = math.degrees(float(model.ra))
row[name + '_dec'][i] = math.degrees(float(model.dec))
# Build a 1s grid covering the night.
step_size_sec = 1
step_size_day = step_size_sec / 86400.
dmjd_grid = desisurvey.ephem.get_grid(step_size=step_size_sec * u.s)
# Loop over nights to calculate the program sequence.
self._table['programs'][:] = -1
self._table['changes'][:] = 0.
for row in self._table:
mjd_grid = dmjd_grid + row['noon'] + 0.5
pindex = self.tabulate_program(
mjd_grid, include_twilight=False, as_tuple=False)
assert pindex[0] == -1 and pindex[-1] == -1
# Calculate index-1 where new programs starts (-1 because of np.diff)
changes = np.where(np.diff(pindex) != 0)[0]
# Must have at least DAY -> NIGHT -> DAY changes.
assert len(changes) >= 2 and pindex[changes[0]] == -1 and pindex[changes[-1] + 1] == -1
# Max possible changes is 5.
assert len(changes) <= 6
# Check that first change is at dusk.
assert np.abs(mjd_grid[changes[0]] + 0.5 * step_size_day - row['dusk']) <= step_size_day
# Check that the last change is at dusk.
assert np.abs(mjd_grid[changes[-1]] + 0.5 * step_size_day - row['dawn']) <= step_size_day
row['programs'][0] = pindex[changes[0] + 1]
for k, idx in enumerate(changes[1:-1]):
row['programs'][k + 1] = pindex[idx + 1]
row['changes'][k] = mjd_grid[idx] + 0.5 * step_size_day
# Tabulate all full moons covering (start, stop) with a 30-day pad.
full_moons = []
lo, hi = self._table[0]['noon'] - 30 - mjd0, self._table[-1]['noon'] + 30 - mjd0
when = lo
while when < hi:
when = ephem.next_full_moon(when)
full_moons.append(when)
full_moons = np.array(full_moons) + mjd0
# Find the first full moon after each midnight.
midnight = self._table['noon'] + 0.5
idx = np.searchsorted(full_moons, midnight, side='left')
assert np.all(midnight <= full_moons[idx])
assert np.all(midnight > full_moons[idx - 1])
# Calculate time until next full moon and after previous full moon.
next_full_moon = full_moons[idx] - midnight
prev_full_moon = midnight - full_moons[idx - 1]
# Record the nearest full moon to each midnight.
next_is_nearest = next_full_moon <= prev_full_moon
self._table['nearest_full_moon'][next_is_nearest] = next_full_moon[next_is_nearest]
self._table['nearest_full_moon'][~next_is_nearest] = -prev_full_moon[~next_is_nearest]
# Calculate apparent LST at each brightdusk/dawn in degrees.
dusk_t = astropy.time.Time(self._table['brightdusk'].data, format='mjd')
dawn_t = astropy.time.Time(self._table['brightdawn'].data, format='mjd')
dusk_t.location = desisurvey.utils.get_location()
dawn_t.location = desisurvey.utils.get_location()
self._table['brightdusk_LST'] = dusk_t.sidereal_time('apparent').to(u.deg).value
self._table['brightdawn_LST'] = dawn_t.sidereal_time('apparent').to(u.deg).value
# Subtract 360 deg if LST wraps around during this night, so that the
# [dusk, dawn] values can be used for linear interpolation.
wrap = self._table['brightdusk_LST'] > self._table['brightdawn_LST']
self._table['brightdusk_LST'][wrap] -= 360
assert np.all(self._table['brightdawn_LST'] > self._table['brightdusk_LST'])
def get_row(self, row_index):
"""Return the specified row of our table.
Parameters
----------
row_index : int
Index starting from zero of the requested row. Negative values
are allowed and specify offsets from the end of the table in
the usual way.
Returns
astropy.table.Row or int
Row of ephemeris data for the requested night.
"""
if row_index < -self.num_nights or row_index >= self.num_nights:
raise ValueError('Requested row index outside table: {0}'
.format(row_index))
return self._table[row_index]
@property
def table(self):
"""Read-only access to our internal table."""
return self._table
def get_night(self, night, as_index=False):
"""Return the row of ephemerides for a single night.
Parameters
----------
night : date
Converted to a date using :func:`desisurvey.utils.get_date`.
as_index : bool
Return the row index of the specified night in our per-night table
if True. Otherwise return the row itself.
Returns
-------
astropy.table.Row or int
Row of ephemeris data for the requested night or the index
of this row (selected via ``as_index``).
"""
date = desisurvey.utils.get_date(night)
row_index = (date - self.start_date).days
if row_index < 0 or row_index >= self.num_nights:
raise ValueError('Requested night outside ephemerides: {0}'
.format(night))
return row_index if as_index else self._table[row_index]
def get_moon_illuminated_fraction(self, mjd):
"""Return the illuminated fraction of the moon.
Uses linear interpolation on the tabulated fractions at midnight and
should be accurate to about 0.01. For reference, the fraction changes
by up to 0.004 per hour.
Parameters
----------
mjd : float or array
MJD values during a single night where the program should be
tabulated.
Returns
-------
float or array
Illuminated fraction at each input time.
"""
mjd = np.asarray(mjd)
if (np.min(mjd) < self._table['noon'][0] or
np.max(mjd) >= self._table['noon'][-1] + 1):
raise ValueError('Requested MJD is outside ephemerides range.')
if self._moon_illum_frac_interpolator is None:
# Lazy initialization of a cubic interpolator.
midnight = self._table['noon'] + 0.5
self._moon_illum_frac_interpolator = scipy.interpolate.interp1d(
midnight, self._table['moon_illum_frac'], copy=True,
kind='linear', fill_value='extrapolate', assume_sorted=True)
return self._moon_illum_frac_interpolator(mjd)
def get_night_program(self, night, include_twilight=False, program_as_int=False):
"""Return the program sequence for one night.
The program definitions are taken from
:class:`desisurvey.config.Configuration` and depend only on
sun and moon ephemerides for the night.
Parameters
----------
night : date
Converted to a date using :func:`desisurvey.utils.get_date`.
include_twilight : bool
Include twilight time at the start and end of each night in
the BRIGHT program.
program_as_int : bool
Return program encoded as a small integer instead of a string
when True.
Returns
-------
tuple
Tuple (programs, changes) where programs is a list of N program
names and changes is a 1D numpy array of N+1 MJD values that
bracket each program during the night.
"""
night_ephem = self.get_night(night)
programs = night_ephem['programs']
changes = night_ephem['changes']
# Unused slots are -1.
num_programs = np.count_nonzero(programs >= 0)
programs = programs[:num_programs]
changes = changes[:num_programs - 1]
if include_twilight:
start = night_ephem['brightdusk']
stop = night_ephem['brightdawn']
BRIGHT = desisurvey.tiles.Tiles.CONDITION_INDEX['BRIGHT']
if programs[0] != BRIGHT:
# Twilight adds a BRIGHT program at the start of the night.
programs = np.insert(programs, 0, BRIGHT)
changes = np.insert(changes, 0, night_ephem['dusk'])
if programs[-1] != BRIGHT:
# Twilight adds a BRIGHT program at the end of the night.
programs = np.append(programs, BRIGHT)
changes = np.append(changes, night_ephem['dawn'])
else:
start = night_ephem['dusk']
stop = night_ephem['dawn']
# Add start, stop to the change times.
changes = np.concatenate(([start], changes, [stop]))
if not program_as_int:
# Replace program indices with names.
programs = [desisurvey.tiles.Tiles.CONDITIONS[pidx] for pidx in programs]
return programs, changes
def get_program_hours(self, start_date=None, stop_date=None,
include_monsoon=False, include_full_moon=False,
include_twilight=True):
"""Tabulate hours in each program during each night of the survey.
Use :func:`desisurvey.plots.plot_program` to visualize program hours.
This method calculates scheduled hours with no correction for weather.
Use 1 - :func:`desimodel.weather.dome_closed_fractions` to lookup
nightly corrections based on historical weather data.
Parameters
----------
ephem : :class:`desisurvey.ephem.Ephemerides`
Tabulated ephemerides data to use for determining the program.
start_date : date or None
First night to include or use the first date of the survey. Must
be convertible to a date using :func:`desisurvey.utils.get_date`.
stop_date : date or None
First night to include or use the last date of the survey. Must
be convertible to a date using :func:`desisurvey.utils.get_date`.
include_monsoon : bool
Include nights during the annual monsoon shutdowns.
include_fullmoon : bool
Include nights during the monthly full-moon breaks.
include_twilight : bool
Include twilight time at the start and end of each night in
the BRIGHT program.
Returns
-------
array
Numpy array of shape (3, num_nights) containing the number of
hours in each program (0=DARK, 1=GRAY, 2=BRIGHT) during each
night.
"""
# Determine date range to use.
config = desisurvey.config.Configuration()
if start_date is None:
start_date = config.first_day()
else:
start_date = desisurvey.utils.get_date(start_date)
if stop_date is None:
stop_date = config.last_day()
else:
stop_date = desisurvey.utils.get_date(stop_date)
if start_date >= stop_date:
raise ValueError('Expected start_date < stop_date.')
num_nights = (stop_date - start_date).days
hours = np.zeros((3, num_nights))
for i in range(num_nights):
tonight = start_date + datetime.timedelta(days=i)
if not include_monsoon and desisurvey.utils.is_monsoon(tonight):
continue
if not include_full_moon and self.is_full_moon(tonight):
continue
programs, changes = self.get_night_program(
tonight, include_twilight=include_twilight, program_as_int=True)
for p, dt in zip(programs, np.diff(changes)):
hours[p, i] += dt
hours *= 24
return hours
def get_available_lst(self, start_date=None, stop_date=None, nbins=192, origin=-60,
weather=None, include_monsoon=False, include_full_moon=False,
include_twilight=False):
"""Calculate histograms of available LST for each program.
Parameters
----------
start_date : date or None
First night to include or use the first date of the survey. Must
be convertible to a date using :func:`desisurvey.utils.get_date`.
stop_date : date or None
First night to include or use the last date of the survey. Must
be convertible to a date using :func:`desisurvey.utils.get_date`.
nbins : int
Number of LST bins to use.
origin : float
Rotate DEC values in plots so that the left edge is at this value
in degrees.
weather : array or None
1D array of nightly weather factors (0-1) to use, or None to calculate
available LST assuming perfect weather. Length must equal the number
of nights between start and stop. Values are fraction of the night
with the dome open (0=never, 1=always). Use
1 - :func:`desimodel.weather.dome_closed_fractions` to lookup
suitable corrections based on historical weather data.
include_monsoon : bool
Include nights during the annual monsoon shutdowns.
include_fullmoon : bool
Include nights during the monthly full-moon breaks.
include_twilight : bool
Include twilight in the BRIGHT program when True.
Returns
-------
tuple
Tuple (lst_hist, lst_bins) with lst_hist having shape (3,nbins) and
lst_bins having shape (nbins+1,).
"""
config = desisurvey.config.Configuration()
if start_date is None:
start_date = config.first_day()
else:
start_date = desisurvey.utils.get_date(start_date)
if stop_date is None:
stop_date = config.last_day()
else:
stop_date = desisurvey.utils.get_date(stop_date)
num_nights = (stop_date - start_date).days
if num_nights <= 0:
raise ValueError('Expected start_date < stop_date.')
if weather is not None:
weather = np.asarray(weather)
if len(weather) != num_nights:
raise ValueError('Expected weather array of length {}.'.format(num_nights))
# Initialize LST histograms for each program.
lst_bins = np.linspace(origin, origin + 360, nbins + 1)
lst_hist = np.zeros((len(desisurvey.tiles.Tiles.CONDITIONS), nbins))
dlst = 360. / nbins
# Loop over nights.
for n in range(num_nights):
night = start_date + datetime.timedelta(n)
if not include_monsoon and desisurvey.utils.is_monsoon(night):
continue
if not include_full_moon and self.is_full_moon(night):
continue
# Look up the program changes during this night.
programs, changes = self.get_night_program(
night, include_twilight, program_as_int=True)
# Convert each change MJD to a corresponding LST in degrees.
night_ephem = self.get_night(night)
MJD0, MJD1 = night_ephem['brightdusk'], night_ephem['brightdawn']
LST0, LST1 = [night_ephem['brightdusk_LST'], night_ephem['brightdawn_LST']]
lst_changes = LST0 + (changes - MJD0) * (LST1 - LST0) / (MJD1 - MJD0)
assert np.all(np.diff(lst_changes) > 0)
lst_bin = (lst_changes - origin) / 360 * nbins
# Loop over programs during the night.
for i, prog_index in enumerate(programs):
phist = lst_hist[prog_index]
lo, hi = lst_bin[i:i + 2]
# Ensure that 0 <= lo < nbins
left_edge = np.floor(lo / nbins) * nbins
lo -= left_edge
hi -= left_edge
assert 0 <= lo and lo < nbins
ilo = int(np.ceil(lo))
assert ilo > 0
# Calculate the weight of this night in sidereal hours.
wgt = 24 / nbins
if weather is not None:
wgt *= weather[n]
# Divide this program's LST window among the LST bins.
if hi < nbins:
# [lo,hi) falls completely within [0,nbins)
ihi = int(np.floor(hi))
if ilo == ihi + 1:
# LST window is contained within a single LST bin.
phist[ihi] += (hi - lo) * wgt
else:
# Accumulate to bins that fall completely within the window.
phist[ilo:ihi] += wgt
# Accumulate to partial bins at each end of the program window.
phist[ilo - 1] += (ilo - lo) * wgt
phist[ihi] += (hi - ihi) * wgt
else:
# [lo,hi) wraps around on the right edge.
hi -= nbins
assert hi >= 0 and hi < nbins
ihi = int(np.floor(hi))
# Accumulate to bins that fall completely within the window.
phist[ilo:nbins] += wgt
phist[0:ihi] += wgt
# Accumulate partial bins at each end of the program window.
phist[ilo - 1] += (ilo - lo) * wgt
phist[ihi] += (hi - ihi) * wgt
return lst_hist, lst_bins
def tabulate_program(self, mjd, include_twilight=False, as_tuple=True):
"""Tabulate the program during one night.
The program definitions are taken from
:class:`desisurvey.config.Configuration` and depend only on
sun and moon ephemerides for the night.
Parameters
----------
mjd : float or array
MJD values during a single night where the program should be
tabulated.
include_twilight : bool
Include twilight time at the start and end of each night in
the BRIGHT program.
as_tuple : bool
Return a tuple (dark, gray, bright) or else a vector of int16
values.
Returns
-------
tuple or array
Tuple (dark, gray, bright) of boolean arrays that tabulates the
program at each input MJD or an array of small integer indices
into :attr:`desisurvey.tiles.Tiles.CONDITIONS`, with the special
value -1 indicating DAYTIME. All output arrays have the same shape
as the input ``mjd`` array.
"""
# Get the night of the earliest time.
mjd = np.asarray(mjd)
night = self.get_night(astropy.time.Time(np.min(mjd), format='mjd'))
# Check that all input MJDs are valid for this night.
mjd0 = night['noon']
if np.any((mjd < mjd0) | (mjd >= mjd0 + 1)):
raise ValueError('MJD values span more than one night.')
# Calculate the moon (ra, dec) in degrees at each grid time.
interpolator = get_object_interpolator(night, 'moon', altaz=True)
moon_alt, _ = interpolator(mjd)
# Calculate the moon illuminated fraction at each time.
moon_frac = self.get_moon_illuminated_fraction(mjd)
# Select bright and dark night conditions.
dark_night = (mjd >= night['dusk']) & (mjd <= night['dawn'])
if include_twilight:
bright_night = (
mjd >= night['brightdusk']) & (mjd <= night['brightdawn'])
else:
bright_night = dark_night
# Identify program during each MJD.
GRAY = desisurvey.config.Configuration().conditions.GRAY
max_prod = GRAY.max_moon_illumination_altitude_product().to(u.deg).value
max_frac = GRAY.max_moon_illumination()
gray = dark_night & (moon_alt >= 0) & (
(moon_frac <= max_frac) &
(moon_frac * moon_alt <= max_prod))
dark = dark_night & (moon_alt < 0)
bright = bright_night & ~(dark | gray)
assert not np.any(dark & gray | dark & bright | gray & bright)
if as_tuple:
return dark, gray, bright
else:
# Default value -1=DAYTIME.
program = np.full(mjd.shape, -1, np.int16)
program[dark] = desisurvey.tiles.Tiles.CONDITION_INDEX['DARK']
program[gray] = desisurvey.tiles.Tiles.CONDITION_INDEX['GRAY']
program[bright] = desisurvey.tiles.Tiles.CONDITION_INDEX['BRIGHT']
return program
def is_full_moon(self, night, num_nights=None):
"""Test if a night occurs during a full-moon break.
The full moon break is defined as the ``num_nights`` nights where
the moon is most fully illuminated at local midnight. This method
should normally be called with ``num_nights`` equal to None, in which
case the value is taken from our
:class:`desisurvey.config.Configuration``.
Parameters
----------
night : date
Converted to a date using :func:`desisurvey.utils.get_date`.
num_nights : int or None
Number of nights to block out around each full-moon.
Returns
-------
bool
True if the specified night falls during a full-moon break.
"""
# Check the requested length of the full moon break.
if num_nights is None:
num_nights = desisurvey.config.Configuration().full_moon_nights()
# Look up the index of this night in our table.
index = self.get_night(night, as_index=True)
# When is the nearest full moon?
nearest = self._table['nearest_full_moon'][index]
if np.abs(nearest) < 0.5 * num_nights:
return True
elif nearest == 0.5 * num_nights:
# Tie breaker if two nights are equally close.
return True
else:
return False
def get_object_interpolator(row, object_name, altaz=False):
"""Build an interpolator for object location during one night.
Wrap around in RA is handled correctly and we assume that the object never
wraps around in DEC. The interpolated unit vectors should be within
0.3 degrees of the true unit vectors in both (dec,ra) and (alt,az).
Parameters
----------
row : astropy.table.Row
A single row from the ephemerides astropy Table corresponding to the
night in question.
object_name : string
Name of the object to build an interpolator for. Must be listed under
avoid_objects in :class:`our configuration
<desisurvey.config.Configuration>`.
altaz : bool
Interpolate in (alt,az) if True, else interpolate in (dec,ra).
Returns
-------
callable
A callable object that takes a single MJD value or an array of MJD
values and returns the corresponding (dec,ra) or (alt,az) values in
degrees, with -90 <= dec,alt <= +90 and 0 <= ra,az < 360.
"""
# Find the tabulated (ra, dec) values for the requested object.
try:
ra = row[object_name + '_ra']
dec = row[object_name + '_dec']
except AttributeError:
raise ValueError('Invalid object_name {0}.'.format(object_name))
# Calculate the grid of MJD time steps where (ra,dec) are tabulated.
t_obj = row['noon'] + np.linspace(0., 1., len(ra))
# Interpolate in (theta,phi) = (dec,ra) or (alt,az)?
if altaz:
# Convert each (ra,dec) to (alt,az) at the appropriate time.
times = astropy.time.Time(t_obj, format='mjd')
frame = desisurvey.utils.get_observer(times)
sky = astropy.coordinates.ICRS(ra=ra * u.deg, dec=dec * u.deg)
altaz = sky.transform_to(frame)
theta = altaz.alt.to(u.deg).value
phi = altaz.az.to(u.deg).value
else:
theta = dec
phi = ra
# Construct arrays of (theta, cos(phi), sin(phi)) values for this night.
# Use cos(phi), sin(phi) instead of phi directly to avoid wrap-around
# discontinuities. Leave theta in degrees.
data = np.empty((3, len(ra)))
data[0] = theta
phi = np.radians(phi)
data[1] = np.cos(phi)
data[2] = np.sin(phi)
# Build a cubic interpolator in (alt, az) during this interval.
# Return (0, 0, 0) outside the interval.
interpolator = scipy.interpolate.interp1d(
t_obj, data, axis=1, kind='cubic', copy=True,
bounds_error=False, fill_value=0., assume_sorted=True)
# Wrap the interpolator to convert (cos(phi), sin(phi)) back to an angle
# in degrees.
def wrapper(mjd):
theta, cos_phi, sin_phi = interpolator(mjd)
# Map arctan2 range [-180, +180] into [0, 360] with fmod().
phi = np.fmod(360 + np.degrees(np.arctan2(sin_phi, cos_phi)), 360)
return theta, phi
return wrapper
def get_grid(step_size=1, night_start=-6, night_stop=7):
"""Calculate a grid of equally spaced times covering one night.
In case the requested step size does not evenly divide the requested
range, the last grid point will be rounded up.
The default range covers all possible observing times at KPNO.
Parameters
----------
step_size : :class:`astropy.units.Quantity`, optional
Size of each grid step with time units, default 1 min.
night_start : :class:`astropy.units.Quantity`, optional
First grid point relative to local midnight with time units, default -6 h.
night_stop : :class:`astropy.units.Quantity`, optional
Last grid point relative to local midnight with time units, default 7 h.
Returns
-------
array
Numpy array of dimensionless offsets relative to local midnight
in units of days.
"""
if not isinstance(step_size, u.Quantity):
step_size = step_size * u.min
if not isinstance(night_start, u.Quantity):
night_start = night_start * u.hour
if not isinstance(night_stop, u.Quantity):
night_stop = night_stop * u.hour
num_points = int(round(((night_stop - night_start) / step_size).to(1).value))
night_stop = night_start + num_points * step_size
return (night_start.to(u.day).value +
step_size.to(u.day).value * np.arange(num_points + 1))
|
desihub/desisurvey
|
py/desisurvey/ephem.py
|
Python
|
bsd-3-clause
| 38,246
|
# -*- python -*-
# you must invoke this with an explicit python, from the tree root
"""Run an arbitrary command with a PYTHONPATH that will include the Tahoe
code, including dependent libraries. Run this like:
python misc/build_helpers/run-with-pythonpath.py python foo.py
"""
import os, sys
# figure out where support/lib/pythonX.X/site-packages is
# add it to os.environ["PYTHONPATH"]
# spawn the child process
def pylibdir(prefixdir):
pyver = "python%d.%d" % (sys.version_info[:2])
if sys.platform == "win32":
return os.path.join(prefixdir, "Lib", "site-packages")
else:
return os.path.join(prefixdir, "lib", pyver, "site-packages")
basedir = os.path.dirname(os.path.abspath(__file__))
supportlib = pylibdir(os.path.abspath("support"))
oldpp = os.environ.get("PYTHONPATH", "").split(os.pathsep)
if oldpp == [""]:
# grr silly split() behavior
oldpp = []
newpp = os.pathsep.join(oldpp + [supportlib,])
os.environ['PYTHONPATH'] = newpp
from twisted.python.procutils import which
cmd = sys.argv[1]
if cmd and cmd[0] not in "/~.":
cmds = which(cmd)
if not cmds:
print >>sys.stderr, "'%s' not found on PATH" % (cmd,)
sys.exit(-1)
cmd = cmds[0]
os.execve(cmd, sys.argv[1:], os.environ)
|
drewp/tahoe-lafs
|
misc/build_helpers/run-with-pythonpath.py
|
Python
|
gpl-2.0
| 1,257
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright (c) 2011 Citrix Systems, Inc.
# Copyright 2011 OpenStack LLC.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Test suite for VMWareAPI.
"""
from nova import context
from nova import db
from nova import exception
from nova import flags
from nova import test
from nova.compute import power_state
from nova.tests.glance import stubs as glance_stubs
from nova.tests.vmwareapi import db_fakes
from nova.tests.vmwareapi import stubs
from nova.virt import vmwareapi_conn
from nova.virt.vmwareapi import fake as vmwareapi_fake
FLAGS = flags.FLAGS
class VMWareAPIVMTestCase(test.TestCase):
"""Unit tests for Vmware API connection calls."""
def setUp(self):
super(VMWareAPIVMTestCase, self).setUp()
self.context = context.RequestContext('fake', 'fake', is_admin=False)
self.flags(vmwareapi_host_ip='test_url',
vmwareapi_host_username='test_username',
vmwareapi_host_password='test_pass')
self.user_id = 'fake'
self.project_id = 'fake'
self.context = context.RequestContext(self.user_id, self.project_id)
vmwareapi_fake.reset()
db_fakes.stub_out_db_instance_api(self.stubs)
stubs.set_stubs(self.stubs)
glance_stubs.stubout_glance_client(self.stubs)
self.conn = vmwareapi_conn.get_connection(False)
# NOTE(vish): none of the network plugging code is actually
# being tested
self.network_info = [({'bridge': 'fa0',
'id': 0,
'vlan': None,
'bridge_interface': None,
'injected': True},
{'broadcast': '192.168.0.255',
'dns': ['192.168.0.1'],
'gateway': '192.168.0.1',
'gateway_v6': 'dead:beef::1',
'ip6s': [{'enabled': '1',
'ip': 'dead:beef::dcad:beff:feef:0',
'netmask': '64'}],
'ips': [{'enabled': '1',
'ip': '192.168.0.100',
'netmask': '255.255.255.0'}],
'label': 'fake',
'mac': 'DE:AD:BE:EF:00:00',
'rxtx_cap': 3})]
self.image = {
'id': 'c1c8ce3d-c2e0-4247-890c-ccf5cc1c004c',
'disk_format': 'vhd',
'size': 512,
}
def tearDown(self):
super(VMWareAPIVMTestCase, self).tearDown()
vmwareapi_fake.cleanup()
def _create_instance_in_the_db(self):
values = {'name': 1,
'id': 1,
'project_id': self.project_id,
'user_id': self.user_id,
'image_ref': "1",
'kernel_id': "1",
'ramdisk_id': "1",
'mac_address': "de:ad:be:ef:be:ef",
'instance_type': 'm1.large',
}
self.instance = db.instance_create(None, values)
def _create_vm(self):
"""Create and spawn the VM."""
self._create_instance_in_the_db()
self.type_data = db.instance_type_get_by_name(None, 'm1.large')
self.conn.spawn(self.context, self.instance, self.image,
self.network_info)
self._check_vm_record()
def _check_vm_record(self):
"""
Check if the spawned VM's properties correspond to the instance in
the db.
"""
instances = self.conn.list_instances()
self.assertEquals(len(instances), 1)
# Get Nova record for VM
vm_info = self.conn.get_info({'name': 1})
# Get record for VM
vms = vmwareapi_fake._get_objects("VirtualMachine")
vm = vms[0]
# Check that m1.large above turned into the right thing.
mem_kib = long(self.type_data['memory_mb']) << 10
vcpus = self.type_data['vcpus']
self.assertEquals(vm_info['max_mem'], mem_kib)
self.assertEquals(vm_info['mem'], mem_kib)
self.assertEquals(vm.get("summary.config.numCpu"), vcpus)
self.assertEquals(vm.get("summary.config.memorySizeMB"),
self.type_data['memory_mb'])
# Check that the VM is running according to Nova
self.assertEquals(vm_info['state'], power_state.RUNNING)
# Check that the VM is running according to vSphere API.
self.assertEquals(vm.get("runtime.powerState"), 'poweredOn')
def _check_vm_info(self, info, pwr_state=power_state.RUNNING):
"""
Check if the get_info returned values correspond to the instance
object in the db.
"""
mem_kib = long(self.type_data['memory_mb']) << 10
self.assertEquals(info["state"], pwr_state)
self.assertEquals(info["max_mem"], mem_kib)
self.assertEquals(info["mem"], mem_kib)
self.assertEquals(info["num_cpu"], self.type_data['vcpus'])
def test_list_instances(self):
instances = self.conn.list_instances()
self.assertEquals(len(instances), 0)
def test_list_instances_1(self):
self._create_vm()
instances = self.conn.list_instances()
self.assertEquals(len(instances), 1)
def test_spawn(self):
self._create_vm()
info = self.conn.get_info({'name': 1})
self._check_vm_info(info, power_state.RUNNING)
def test_snapshot(self):
self._create_vm()
info = self.conn.get_info({'name': 1})
self._check_vm_info(info, power_state.RUNNING)
self.conn.snapshot(self.context, self.instance, "Test-Snapshot")
info = self.conn.get_info({'name': 1})
self._check_vm_info(info, power_state.RUNNING)
def test_snapshot_non_existent(self):
self._create_instance_in_the_db()
self.assertRaises(exception.InstanceNotFound, self.conn.snapshot,
self.context, self.instance, "Test-Snapshot")
def test_reboot(self):
self._create_vm()
info = self.conn.get_info({'name': 1})
self._check_vm_info(info, power_state.RUNNING)
reboot_type = "SOFT"
self.conn.reboot(self.instance, self.network_info, reboot_type)
info = self.conn.get_info({'name': 1})
self._check_vm_info(info, power_state.RUNNING)
def test_reboot_non_existent(self):
self._create_instance_in_the_db()
self.assertRaises(exception.InstanceNotFound, self.conn.reboot,
self.instance, self.network_info, 'SOFT')
def test_reboot_not_poweredon(self):
self._create_vm()
info = self.conn.get_info({'name': 1})
self._check_vm_info(info, power_state.RUNNING)
self.conn.suspend(self.instance)
info = self.conn.get_info({'name': 1})
self._check_vm_info(info, power_state.PAUSED)
self.assertRaises(exception.InstanceRebootFailure, self.conn.reboot,
self.instance, self.network_info, 'SOFT')
def test_suspend(self):
self._create_vm()
info = self.conn.get_info({'name': 1})
self._check_vm_info(info, power_state.RUNNING)
self.conn.suspend(self.instance)
info = self.conn.get_info({'name': 1})
self._check_vm_info(info, power_state.PAUSED)
def test_suspend_non_existent(self):
self._create_instance_in_the_db()
self.assertRaises(exception.InstanceNotFound, self.conn.suspend,
self.instance)
def test_resume(self):
self._create_vm()
info = self.conn.get_info({'name': 1})
self._check_vm_info(info, power_state.RUNNING)
self.conn.suspend(self.instance)
info = self.conn.get_info({'name': 1})
self._check_vm_info(info, power_state.PAUSED)
self.conn.resume(self.instance)
info = self.conn.get_info({'name': 1})
self._check_vm_info(info, power_state.RUNNING)
def test_resume_non_existent(self):
self._create_instance_in_the_db()
self.assertRaises(exception.InstanceNotFound, self.conn.resume,
self.instance)
def test_resume_not_suspended(self):
self._create_vm()
info = self.conn.get_info({'name': 1})
self._check_vm_info(info, power_state.RUNNING)
self.assertRaises(exception.InstanceResumeFailure, self.conn.resume,
self.instance)
def test_get_info(self):
self._create_vm()
info = self.conn.get_info({'name': 1})
self._check_vm_info(info, power_state.RUNNING)
def test_destroy(self):
self._create_vm()
info = self.conn.get_info({'name': 1})
self._check_vm_info(info, power_state.RUNNING)
instances = self.conn.list_instances()
self.assertEquals(len(instances), 1)
self.conn.destroy(self.instance, self.network_info)
instances = self.conn.list_instances()
self.assertEquals(len(instances), 0)
def test_destroy_non_existent(self):
self._create_instance_in_the_db()
self.assertEquals(self.conn.destroy(self.instance, self.network_info),
None)
def test_pause(self):
pass
def test_unpause(self):
pass
def test_diagnostics(self):
pass
def test_get_console_output(self):
pass
|
usc-isi/essex-baremetal-support
|
nova/tests/test_vmwareapi.py
|
Python
|
apache-2.0
| 10,058
|
#
# Widgets.py -- wrapped Qt widgets and convenience functions
#
# Eric Jeschke (eric@naoj.org)
#
# Copyright (c) Eric R. Jeschke. All rights reserved.
# This is open-source software licensed under a BSD license.
# Please see the file LICENSE.txt for details.
#
import os.path
from functools import reduce
from ginga.qtw.QtHelp import QtGui, QtCore, QTextCursor, \
QIcon, QPixmap, QImage, have_pyqt4
from ginga.qtw import QtHelp
from ginga.misc import Callback, Bunch
import ginga.icons
# path to our icons
icondir = os.path.split(ginga.icons.__file__)[0]
class WidgetError(Exception):
"""For errors thrown in this module."""
pass
_app = None
# BASE
class WidgetBase(Callback.Callbacks):
def __init__(self):
super(WidgetBase, self).__init__()
self.widget = None
self.changed = False
# external data can be attached here
self.extdata = Bunch.Bunch()
def get_widget(self):
return self.widget
def set_tooltip(self, text):
self.widget.setToolTip(text)
def set_enabled(self, tf):
self.widget.setEnabled(tf)
def get_size(self):
wd, ht = self.widget.width(), self.widget.height()
return (wd, ht)
def get_app(self):
return _app
def delete(self):
self.widget.deleteLater()
def focus(self):
self.widget.activateWindow()
self.widget.setFocus()
#self.widget.raise_()
def resize(self, width, height):
self.widget.resize(width, height)
def show(self):
self.widget.show()
def hide(self):
self.widget.hide()
def get_font(self, font_family, point_size):
font = QtHelp.get_font(font_family, point_size)
return font
def cfg_expand(self, horizontal=0, vertical=0):
h_policy = QtGui.QSizePolicy.Policy(horizontal)
v_policy = QtGui.QSizePolicy.Policy(vertical)
self.widget.setSizePolicy(QtGui.QSizePolicy(h_policy, v_policy))
# BASIC WIDGETS
class TextEntry(WidgetBase):
def __init__(self, text='', editable=True):
super(TextEntry, self).__init__()
self.widget = QtGui.QLineEdit()
self.widget.setText(text)
self.widget.setReadOnly(not editable)
self.widget.returnPressed.connect(self._cb_redirect)
self.enable_callback('activated')
def _cb_redirect(self, *args):
self.make_callback('activated')
def get_text(self):
return self.widget.text()
def set_text(self, text):
self.widget.setText(text)
def set_editable(self, tf):
self.widget.setReadOnly(not tf)
def set_font(self, font):
self.widget.setFont(font)
def set_length(self, numchars):
# this is only supposed to set the visible length (but Qt doesn't
# really have a good way to do that)
#self.widget.setMaxLength(numchars)
pass
class TextEntrySet(WidgetBase):
def __init__(self, text='', editable=True):
super(TextEntrySet, self).__init__()
self.widget = QtHelp.HBox()
self.entry = QtGui.QLineEdit()
self.entry.setText(text)
self.entry.setReadOnly(not editable)
layout = self.widget.layout()
layout.addWidget(self.entry, stretch=1)
self.btn = QtGui.QPushButton('Set')
self.entry.returnPressed.connect(self._cb_redirect)
self.btn.clicked.connect(self._cb_redirect)
layout.addWidget(self.btn, stretch=0)
self.enable_callback('activated')
def _cb_redirect(self, *args):
self.make_callback('activated')
def get_text(self):
return self.entry.text()
def set_text(self, text):
self.entry.setText(text)
def set_editable(self, tf):
self.entry.setReadOnly(not tf)
def set_font(self, font):
self.widget.setFont(font)
def set_length(self, numchars):
# this is only supposed to set the visible length (but Qt doesn't
# really have a good way to do that)
#self.widget.setMaxLength(numchars)
pass
def set_enabled(self, tf):
super(TextEntrySet, self).set_enabled(tf)
self.entry.setEnabled(tf)
class GrowingTextEdit(QtGui.QTextEdit):
def __init__(self, *args, **kwargs):
super(GrowingTextEdit, self).__init__(*args, **kwargs)
self.document().documentLayout().documentSizeChanged.connect(
self.sizeChange)
self.heightMin = 0
self.heightMax = 65000
def sizeChange(self):
docHeight = self.document().size().height()
# add some margin to prevent auto scrollbars
docHeight += 20
if self.heightMin <= docHeight <= self.heightMax:
self.setMaximumHeight(docHeight)
class TextArea(WidgetBase):
def __init__(self, wrap=False, editable=False):
super(TextArea, self).__init__()
#tw = QtGui.QTextEdit()
tw = GrowingTextEdit()
tw.setReadOnly(not editable)
if wrap:
tw.setLineWrapMode(QtGui.QTextEdit.WidgetWidth)
else:
tw.setLineWrapMode(QtGui.QTextEdit.NoWrap)
self.widget = tw
def append_text(self, text, autoscroll=True):
if text.endswith('\n'):
text = text[:-1]
self.widget.append(text)
if not autoscroll:
return
self.widget.moveCursor(QTextCursor.End)
self.widget.moveCursor(QTextCursor.StartOfLine)
self.widget.ensureCursorVisible()
def get_text(self):
return self.widget.document().toPlainText()
def clear(self):
self.widget.clear()
def set_text(self, text):
self.clear()
self.append_text(text)
def set_editable(self, tf):
self.widget.setReadOnly(not tf)
def set_limit(self, numlines):
#self.widget.setMaximumBlockCount(numlines)
pass
def set_font(self, font):
self.widget.setCurrentFont(font)
def set_wrap(self, tf):
if tf:
self.widget.setLineWrapMode(QtGui.QTextEdit.WidgetWidth)
else:
self.widget.setLineWrapMode(QtGui.QTextEdit.NoWrap)
class Label(WidgetBase):
def __init__(self, text='', halign='left', style='normal', menu=None):
super(Label, self).__init__()
lbl = QtGui.QLabel(text)
if halign == 'left':
lbl.setAlignment(QtCore.Qt.AlignLeft)
elif halign == 'center':
lbl.setAlignment(QtCore.Qt.AlignHCenter)
elif halign == 'center':
lbl.setAlignment(QtCore.Qt.AlignRight)
self.widget = lbl
lbl.mousePressEvent = self._cb_redirect
if style == 'clickable':
lbl.setSizePolicy(QtGui.QSizePolicy.Minimum,
QtGui.QSizePolicy.Minimum)
lbl.setFrameStyle(QtGui.QFrame.Box | QtGui.QFrame.Raised)
if menu is not None:
lbl.setContextMenuPolicy(QtCore.Qt.CustomContextMenu)
menu_w = menu.get_widget()
def on_context_menu(point):
menu_w.exec_(lbl.mapToGlobal(point))
lbl.customContextMenuRequested.connect(on_context_menu)
# Enable highlighting for copying
#lbl.setTextInteractionFlags(QtCore.Qt.TextSelectableByMouse)
self.enable_callback('activated')
def _cb_redirect(self, event):
buttons = event.buttons()
if buttons & QtCore.Qt.LeftButton:
self.make_callback('activated')
def get_text(self):
return self.widget.text()
def set_text(self, text):
self.widget.setText(text)
def set_font(self, font):
self.widget.setFont(font)
def set_color(self, fg=None, bg=None):
self.widget.setStyleSheet("QLabel { background-color: %s; color: %s; }" % (bg, fg));
class Button(WidgetBase):
def __init__(self, text=''):
super(Button, self).__init__()
self.widget = QtGui.QPushButton(text)
self.widget.clicked.connect(self._cb_redirect)
self.enable_callback('activated')
def _cb_redirect(self, *args):
self.make_callback('activated')
class ComboBox(WidgetBase):
def __init__(self, editable=False):
super(ComboBox, self).__init__()
self.widget = QtHelp.ComboBox()
self.widget.setSizeAdjustPolicy(QtGui.QComboBox.AdjustToContents)
self.widget.setEditable(editable)
self.widget.activated.connect(self._cb_redirect)
self.enable_callback('activated')
def _cb_redirect(self):
idx = self.widget.currentIndex()
self.make_callback('activated', idx)
def insert_alpha(self, text):
index = 0
while True:
itemText = self.widget.itemText(index)
if len(itemText) == 0:
break
if itemText > text:
self.widget.insertItem(index, text)
return
index += 1
self.widget.addItem(text)
def delete_alpha(self, text):
index = self.widget.findText(text)
self.widget.removeItem(index)
def get_alpha(self, idx):
return self.widget.itemText(idx)
def clear(self):
self.widget.clear()
def show_text(self, text):
index = self.widget.findText(text)
self.set_index(index)
def append_text(self, text):
self.widget.addItem(text)
def set_index(self, index):
self.widget.setCurrentIndex(index)
def get_index(self):
return self.widget.currentIndex()
class SpinBox(WidgetBase):
def __init__(self, dtype=int):
super(SpinBox, self).__init__()
if dtype == float:
w = QtGui.QDoubleSpinBox()
else:
w = QtGui.QSpinBox()
w.valueChanged.connect(self._cb_redirect)
# should values wrap around
w.setWrapping(False)
self.widget = w
self.enable_callback('value-changed')
def _cb_redirect(self, val):
if self.changed:
self.changed = False
return
self.make_callback('value-changed', val)
def get_value(self):
return self.widget.value()
def set_value(self, val):
self.changed = True
self.widget.setValue(val)
def set_decimals(self, num):
self.widget.setDecimals(num)
def set_limits(self, minval, maxval, incr_value=1):
adj = self.widget
adj.setRange(minval, maxval)
adj.setSingleStep(incr_value)
class Slider(WidgetBase):
def __init__(self, orientation='horizontal', track=False):
super(Slider, self).__init__()
if orientation == 'horizontal':
w = QtGui.QSlider(QtCore.Qt.Horizontal)
w.setTickPosition(QtGui.QSlider.TicksBelow)
else:
w = QtGui.QSlider(QtCore.Qt.Vertical)
w.setTickPosition(QtGui.QSlider.TicksRight)
#w.setTickPosition(QtGui.QSlider.NoTicks)
# this controls whether the callbacks are made *as the user
# moves the slider* or afterwards
w.setTracking(track)
self.widget = w
w.valueChanged.connect(self._cb_redirect)
self.enable_callback('value-changed')
def _cb_redirect(self, val):
# It appears that Qt uses set_value() to set the value of the
# slider when it is dragged, so we cannot use the usual method
# of setting a hidden "changed" variable to suppress the callback
# when setting the value programmatically.
## if self.changed:
## self.changed = False
## return
self.make_callback('value-changed', val)
def get_value(self):
return self.widget.value()
def set_value(self, val):
self.changed = True
self.widget.setValue(val)
def set_tracking(self, tf):
self.widget.setTracking(tf)
def set_limits(self, minval, maxval, incr_value=1):
adj = self.widget
adj.setRange(minval, maxval)
adj.setSingleStep(incr_value)
class ScrollBar(WidgetBase):
def __init__(self, orientation='horizontal'):
super(ScrollBar, self).__init__()
if orientation == 'horizontal':
self.widget = QtGui.QScrollBar(QtCore.Qt.Horizontal)
else:
self.widget = QtGui.QScrollBar(QtCore.Qt.Vertical)
self.widget.valueChanged.connect(self._cb_redirect)
self.enable_callback('activated')
def _cb_redirect(self):
val = self.widget.value()
self.make_callback('activated', val)
class CheckBox(WidgetBase):
def __init__(self, text=''):
super(CheckBox, self).__init__()
self.widget = QtGui.QCheckBox(text)
self.widget.stateChanged.connect(self._cb_redirect)
self.enable_callback('activated')
def _cb_redirect(self, *args):
val = self.get_state()
self.make_callback('activated', val)
def set_state(self, tf):
self.widget.setChecked(tf)
def get_state(self):
val = self.widget.checkState()
# returns 0 (unchecked) or 2 (checked)
return (val != 0)
class ToggleButton(WidgetBase):
def __init__(self, text=''):
super(ToggleButton, self).__init__()
self.widget = QtGui.QPushButton(text)
self.widget.setCheckable(True)
self.widget.clicked.connect(self._cb_redirect)
self.enable_callback('activated')
def _cb_redirect(self, val):
self.make_callback('activated', val)
def set_state(self, tf):
self.widget.setChecked(tf)
def get_state(self):
return self.widget.isChecked()
class RadioButton(WidgetBase):
def __init__(self, text='', group=None):
super(RadioButton, self).__init__()
self.widget = QtGui.QRadioButton(text)
self.widget.toggled.connect(self._cb_redirect)
self.enable_callback('activated')
def _cb_redirect(self, val):
if self.changed:
self.changed = False
return
self.make_callback('activated', val)
def set_state(self, tf):
if self.widget.isChecked() != tf:
# toggled only fires when the value is toggled
self.changed = True
self.widget.setChecked(tf)
def get_state(self):
return self.widget.isChecked()
class Image(WidgetBase):
def __init__(self, native_image=None, style='normal', menu=None):
super(Image, self).__init__()
lbl = QtGui.QLabel()
self.widget = lbl
if native_image is not None:
self._set_image(native_image)
lbl.mousePressEvent = self._cb_redirect
if style == 'clickable':
lbl.setSizePolicy(QtGui.QSizePolicy.Minimum,
QtGui.QSizePolicy.Minimum)
#lbl.setFrameStyle(QtGui.QFrame.Box | QtGui.QFrame.Raised)
if menu is not None:
lbl.setContextMenuPolicy(QtCore.Qt.CustomContextMenu)
menu_w = menu.get_widget()
def on_context_menu(point):
menu_w.exec_(lbl.mapToGlobal(point))
lbl.customContextMenuRequested.connect(on_context_menu)
self.enable_callback('activated')
def _cb_redirect(self, event):
buttons = event.buttons()
if buttons & QtCore.Qt.LeftButton:
self.make_callback('activated')
def _set_image(self, native_image):
pixmap = QPixmap.fromImage(native_image)
self.widget.setPixmap(pixmap)
class ProgressBar(WidgetBase):
def __init__(self):
super(ProgressBar, self).__init__()
w = QtGui.QProgressBar()
w.setRange(0, 100)
w.setTextVisible(True)
self.widget = w
def set_value(self, pct):
self.widget.setValue(int(pct * 100.0))
class StatusBar(WidgetBase):
def __init__(self):
super(StatusBar, self).__init__()
sbar = QtGui.QStatusBar()
self.widget = sbar
def set_message(self, msg_str):
# remove message in about 10 seconds
self.widget.showMessage(msg_str, 10000)
class TreeView(WidgetBase):
def __init__(self, auto_expand=False, sortable=False,
selection='single', use_alt_row_color=False,
dragable=False):
super(TreeView, self).__init__()
self.auto_expand = auto_expand
self.sortable = sortable
self.dragable = dragable
self.selection = selection
self.levels = 1
self.leaf_key = None
self.leaf_idx = 0
self.columns = []
self.datakeys = []
# shadow index
self.shadow = {}
tv = QtGui.QTreeWidget()
self.widget = tv
tv.setSelectionBehavior(QtGui.QAbstractItemView.SelectRows)
if selection == 'multiple':
tv.setSelectionMode(QtGui.QAbstractItemView.ExtendedSelection)
tv.setAlternatingRowColors(use_alt_row_color)
tv.itemDoubleClicked.connect(self._cb_redirect)
tv.itemSelectionChanged.connect(self._selection_cb)
if self.dragable:
tv.setDragEnabled(True)
tv.startDrag = self._start_drag
for cbname in ('selected', 'activated', 'drag-start'):
self.enable_callback(cbname)
def setup_table(self, columns, levels, leaf_key):
self.clear()
self.columns = columns
self.levels = levels
self.leaf_key = leaf_key
treeview = self.widget
treeview.setColumnCount(len(columns))
treeview.setSortingEnabled(self.sortable)
# speeds things up a bit
treeview.setUniformRowHeights(True)
# create the column headers
if not isinstance(columns[0], str):
# columns specifies a mapping
headers = [ col[0] for col in columns ]
datakeys = [ col[1] for col in columns ]
else:
headers = datakeys = columns
self.datakeys = datakeys
self.leaf_idx = datakeys.index(self.leaf_key)
if self.sortable:
# Sort increasing by default
treeview.sortByColumn(self.leaf_idx, QtCore.Qt.AscendingOrder)
treeview.setHeaderLabels(headers)
def set_tree(self, tree_dict):
self.clear()
self.add_tree(tree_dict)
def add_tree(self, tree_dict):
if self.sortable:
self.widget.setSortingEnabled(False)
for key in tree_dict:
self._add_subtree(1, self.shadow,
self.widget, key, tree_dict[key])
if self.sortable:
self.widget.setSortingEnabled(True)
# User wants auto expand?
if self.auto_expand:
self.widget.expandAll()
def _add_subtree(self, level, shadow, parent_item, key, node):
if level >= self.levels:
# leaf node
values = [ '' if _key == 'icon' else str(node[_key])
for _key in self.datakeys ]
try:
bnch = shadow[key]
item = bnch.item
# TODO: update leaf item
except KeyError:
# new item
item = QtGui.QTreeWidgetItem(parent_item, values)
if level == 1:
parent_item.addTopLevelItem(item)
else:
parent_item.addChild(item)
shadow[key] = Bunch.Bunch(node=node, item=item, terminal=True)
# hack for adding an image to a table
# TODO: add types for columns
if 'icon' in node:
i = self.datakeys.index('icon')
item.setIcon(i, node['icon'])
# mark cell as non-editable
item.setFlags(item.flags() & ~QtCore.Qt.ItemIsEditable)
else:
try:
# node already exists
bnch = shadow[key]
item = bnch.item
d = bnch.node
except KeyError:
# new node
item = QtGui.QTreeWidgetItem(parent_item, [str(key)])
if level == 1:
parent_item.addTopLevelItem(item)
else:
parent_item.addChild(item)
d = {}
shadow[key] = Bunch.Bunch(node=d, item=item, terminal=False)
# recurse for non-leaf interior node
for key in node:
self._add_subtree(level+1, d, item, key, node[key])
def _selection_cb(self):
res_dict = self.get_selected()
self.make_callback('selected', res_dict)
def _cb_redirect(self, item):
res_dict = {}
self._get_item(res_dict, item)
self.make_callback('activated', res_dict)
def _get_path(self, item):
if item is None:
return []
if item.childCount() == 0:
path_rest = self._get_path(item.parent())
myname = item.text(self.leaf_idx)
path_rest.append(myname)
return path_rest
myname = item.text(0)
path_rest = self._get_path(item.parent())
path_rest.append(myname)
return path_rest
def _get_item(self, res_dict, item):
# from the QTreeViewItem `item`, return the item via a path
# in the dictionary `res_dict`
path = self._get_path(item)
d, s = res_dict, self.shadow
for name in path[:-1]:
d = d.setdefault(name, {})
s = s[name].node
dst_key = path[-1]
d[dst_key] = s[dst_key].node
def get_selected(self):
items = list(self.widget.selectedItems())
res_dict = {}
for item in items:
if item.childCount() > 0:
# only leaf nodes can be selected
continue
self._get_item(res_dict, item)
return res_dict
def clear(self):
self.widget.clear()
self.shadow = {}
def clear_selection(self):
self.widget.clearSelection()
def _path_to_item(self, path):
s = self.shadow
for name in path[:-1]:
s = s[name].node
item = s[path[-1]].item
return item
def select_path(self, path):
item = self._path_to_item(path)
self.widget.setItemSelected(item, True)
def highlight_path(self, path, onoff, font_color='green'):
item = self._path_to_item(path)
# A little painfully inefficient, can we do better than this?
font = QtHelp.QFont()
if not onoff:
color = QtHelp.QColor('black')
else:
font.setBold(True)
color = QtHelp.QColor(font_color)
brush = QtHelp.QBrush(color)
for i in range(item.columnCount()):
item.setForeground(i, brush)
item.setFont(i, font)
def scroll_to_path(self, path):
# TODO: this doesn't give an error, but does not seem to be
# working as the API indicates
item = self._path_to_item(path)
row = self.widget.indexOfTopLevelItem(item)
midx = self.widget.indexAt(QtCore.QPoint(row, 0))
self.widget.scrollTo(midx, QtGui.QAbstractItemView.PositionAtCenter)
def sort_on_column(self, i):
self.widget.sortByColumn(i, QtCore.Qt.AscendingOrder)
def set_column_width(self, i, width):
self.widget.setColumnWidth(i, width)
def set_column_widths(self, lwidths):
for i, width in enumerate(lwidths):
if width is not None:
self.set_column_width(i, width)
def set_optimal_column_widths(self):
for i in range(len(self.columns)):
self.widget.resizeColumnToContents(i)
def _start_drag(self, event):
res_dict = self.get_selected()
drag_pkg = DragPackage(self.widget)
self.make_callback('drag-start', drag_pkg, res_dict)
drag_pkg.start_drag()
# CONTAINERS
class ContainerBase(WidgetBase):
def __init__(self):
super(ContainerBase, self).__init__()
self.children = []
def add_ref(self, ref):
# TODO: should this be a weakref?
self.children.append(ref)
def _remove(self, childw, delete=False):
layout = self.widget.layout()
if layout is not None:
layout.removeWidget(childw)
childw.setParent(None)
if delete:
childw.deleteLater()
def remove(self, w, delete=False):
if not w in self.children:
raise ValueError("Widget is not a child of this container")
self.children.remove(w)
self._remove(w.get_widget(), delete=delete)
def remove_all(self, delete=False):
for w in list(self.children):
self.remove(w, delete=delete)
def get_children(self):
return self.children
def num_children(self):
return len(self.children)
def _get_native_children(self):
return [child.get_widget() for child in self.children]
def _get_native_index(self, nchild):
l = self._get_native_children()
try:
return l.index(nchild)
except (IndexError, ValueError) as e:
return -1
def _native_to_child(self, nchild):
idx = self._get_native_index(nchild)
if idx < 0:
return None
return self.children[idx]
def set_margins(self, left, right, top, bottom):
layout = self.widget.layout()
layout.setContentsMargins(left, right, top, bottom)
def set_border_width(self, pix):
layout = self.widget.layout()
layout.setContentsMargins(pix, pix, pix, pix)
class Box(ContainerBase):
def __init__(self, orientation='horizontal'):
super(Box, self).__init__()
self.widget = QtGui.QWidget()
self.orientation = orientation
if orientation == 'horizontal':
self.layout = QtGui.QHBoxLayout()
else:
self.layout = QtGui.QVBoxLayout()
# because of ridiculous defaults
self.layout.setContentsMargins(0, 0, 0, 0)
self.widget.setLayout(self.layout)
def add_widget(self, child, stretch=0.0):
self.add_ref(child)
child_w = child.get_widget()
if self.orientation == 'horizontal':
self.layout.addWidget(child_w, stretch=stretch,
alignment=QtCore.Qt.AlignLeft)
else:
self.layout.addWidget(child_w, stretch=stretch)
def set_spacing(self, val):
self.layout.setSpacing(val)
class HBox(Box):
def __init__(self):
super(HBox, self).__init__(orientation='horizontal')
class VBox(Box):
def __init__(self):
super(VBox, self).__init__(orientation='vertical')
class Frame(ContainerBase):
def __init__(self, title=None):
super(Frame, self).__init__()
self.widget = QtGui.QFrame()
self.widget.setFrameStyle(QtGui.QFrame.Box | QtGui.QFrame.Raised)
vbox = QtGui.QVBoxLayout()
self.layout = vbox
# because of ridiculous defaults
vbox.setContentsMargins(2, 2, 2, 2)
self.widget.setLayout(vbox)
if title:
lbl = QtGui.QLabel(title)
lbl.setAlignment(QtCore.Qt.AlignHCenter)
#lbl.setAlignment(QtCore.Qt.AlignLeft)
vbox.addWidget(lbl, stretch=0)
self.label = lbl
else:
self.label = None
def set_widget(self, child, stretch=1):
self.remove_all()
self.add_ref(child)
self.widget.layout().addWidget(child.get_widget(), stretch=stretch)
# Qt custom expander widget
# See http://stackoverflow.com/questions/10364589/equivalent-of-gtks-expander-in-pyqt4
#
class Expander(ContainerBase):
r_arrow = None
d_arrow = None
# Note: add 'text-align: left;' if you want left adjusted labels
widget_style = """
QPushButton { margin: 1px,1px,1px,1px; padding: 0px;
border-width: 0px; border-style: solid; }
"""
def __init__(self, title=''):
super(Expander, self).__init__()
# Qt doesn't seem to like it (segfault) if we actually construct
# these icons in the class variable declarations
if Expander.r_arrow is None:
Expander.r_arrow = QtHelp.get_icon(os.path.join(icondir,
'triangle-right-48.png'),
size=(12, 12))
if Expander.d_arrow is None:
Expander.d_arrow = QtHelp.get_icon(os.path.join(icondir,
'triangle-down-48.png'),
size=(12, 12))
self.widget = QtGui.QWidget()
vbox = QtGui.QVBoxLayout()
vbox.setContentsMargins(0, 0, 0, 0)
vbox.setSpacing(0)
self.layout = vbox
self.toggle = QtGui.QPushButton(Expander.r_arrow, title)
self.toggle.setStyleSheet(Expander.widget_style)
#self.toggle.setCheckable(True)
self.toggle.clicked.connect(self._toggle_widget)
vbox.addWidget(self.toggle, stretch=0)
self.widget.setLayout(vbox)
def set_widget(self, child, stretch=1):
self.remove_all()
self.add_ref(child)
child_w = child.get_widget()
self.widget.layout().addWidget(child_w, stretch=stretch)
child_w.setVisible(False)
def _toggle_widget(self):
child = self.get_children()[0]
child_w = child.get_widget()
#if self.toggle.isChecked():
if child_w.isVisible():
self.toggle.setIcon(Expander.r_arrow)
child_w.setVisible(False)
else:
self.toggle.setIcon(Expander.d_arrow)
child_w.setVisible(True)
class TabWidget(ContainerBase):
def __init__(self, tabpos='top', reorderable=False, detachable=False,
group=0):
super(TabWidget, self).__init__()
self.reorderable = reorderable
self.detachable = detachable
w = QtGui.QTabWidget()
w.currentChanged.connect(self._cb_redirect)
w.tabCloseRequested.connect(self._tab_close)
w.setUsesScrollButtons(True)
#w.setTabsClosable(True)
if self.reorderable:
w.setMovable(True)
## w.tabInserted = self._tab_insert_cb
## w.tabRemoved = self._tab_remove_cb
self.widget = w
self.set_tab_position(tabpos)
for name in ('page-switch', 'page-close', 'page-move', 'page-detach'):
self.enable_callback(name)
def set_tab_position(self, tabpos):
w = self.widget
if tabpos == 'top':
w.setTabPosition(QtGui.QTabWidget.North)
elif tabpos == 'bottom':
w.setTabPosition(QtGui.QTabWidget.South)
elif tabpos == 'left':
w.setTabPosition(QtGui.QTabWidget.West)
elif tabpos == 'right':
w.setTabPosition(QtGui.QTabWidget.East)
def _cb_redirect(self, index):
# get new index, because passed index can be out of date
index = self.get_index()
child = self.index_to_widget(index)
if child is not None:
self.make_callback('page-switch', child)
def _tab_close(self, index):
child = self.index_to_widget(index)
self.make_callback('page-close', child)
def add_widget(self, child, title=''):
self.add_ref(child)
child_w = child.get_widget()
self.widget.addTab(child_w, title)
# attach title to child
child.extdata.tab_title = title
def _remove(self, nchild, delete=False):
idx = self.widget.indexOf(nchild)
self.widget.removeTab(idx)
nchild.setParent(None)
if delete:
nchild.deleteLater()
def get_index(self):
return self.widget.currentIndex()
def set_index(self, idx):
self.widget.setCurrentIndex(idx)
child = self.index_to_widget(idx)
#child.focus()
def index_of(self, child):
return self.widget.indexOf(child.get_widget())
def index_to_widget(self, idx):
"""Returns child corresponding to `idx`"""
nchild = self.widget.widget(idx)
if nchild is None:
return nchild
return self._native_to_child(nchild)
def highlight_tab(self, idx, tf):
tabbar = self.widget.tabBar()
if not tf:
color = QtHelp.QColor('black')
else:
color = QtHelp.QColor('green')
tabbar.setTabTextColor(idx, color)
class StackWidget(ContainerBase):
def __init__(self):
super(StackWidget, self).__init__()
self.widget = QtGui.QStackedWidget()
# TODO: currently only provided for compatibility with other
# like widgets
self.enable_callback('page-switch')
def add_widget(self, child, title=''):
self.add_ref(child)
child_w = child.get_widget()
self.widget.addWidget(child_w)
# attach title to child
child.extdata.tab_title = title
def get_index(self):
return self.widget.currentIndex()
def set_index(self, idx):
self.widget.setCurrentIndex(idx)
#child = self.index_to_widget(idx)
#child.focus()
def index_of(self, child):
return self.widget.indexOf(child.get_widget())
def index_to_widget(self, idx):
nchild = self.widget.widget(idx)
return self._native_to_child(nchild)
class MDIWidget(ContainerBase):
def __init__(self, tabpos='top', mode='mdi'):
super(MDIWidget, self).__init__()
w = QtGui.QMdiArea()
w.setVerticalScrollBarPolicy(QtCore.Qt.ScrollBarAsNeeded)
w.setHorizontalScrollBarPolicy(QtCore.Qt.ScrollBarAsNeeded)
w.subWindowActivated.connect(self._cb_redirect)
## w.setSizePolicy(QtGui.QSizePolicy(QtGui.QSizePolicy.Expanding,
## QtGui.QSizePolicy.Expanding))
w.setTabsClosable(True)
w.setTabsMovable(False)
self.widget = w
self.true_mdi = True
self.cur_index = -1
for name in ('page-switch', 'page-close'):
self.enable_callback(name)
self.set_tab_position(tabpos)
self.set_mode(mode)
def set_tab_position(self, tabpos):
w = self.widget
if tabpos == 'top':
w.setTabPosition(QtGui.QTabWidget.North)
elif tabpos == 'bottom':
w.setTabPosition(QtGui.QTabWidget.South)
elif tabpos == 'left':
w.setTabPosition(QtGui.QTabWidget.West)
elif tabpos == 'right':
w.setTabPosition(QtGui.QTabWidget.East)
def get_mode(self):
if self.widget.viewMode() == QtGui.QMdiArea.TabbedView:
return 'tabs'
return 'mdi'
def set_mode(self, mode):
mode = mode.lower()
if mode == 'tabs':
self.widget.setViewMode(QtGui.QMdiArea.TabbedView)
elif mode == 'mdi':
self.widget.setViewMode(QtGui.QMdiArea.SubWindowView)
else:
raise ValueError("Don't understand mode='%s'" % (mode))
def _cb_redirect(self, subwin):
if subwin is not None:
nchild = subwin.widget()
child = self._native_to_child(nchild)
self.cur_index = self.children.index(child)
self.make_callback('page-switch', child)
def _window_resized(self, event, subwin, widget):
qsize = event.size()
wd, ht = qsize.width(), qsize.height()
# save size
widget.extdata.mdi_size = (wd, ht)
subwin._resizeEvent(event)
def _window_moved(self, event, subwin, widget):
qpos = event.pos()
x, y = qpos.x(), qpos.y()
# save position
widget.extdata.mdi_pos = (x, y)
subwin._moveEvent(event)
def _window_closed(self, event, subwin, widget):
nchild = subwin.widget()
child = self._native_to_child(nchild)
# let the application deal with this if desired in page-close
# callback
event.ignore()
#self.widget.removeSubWindow(subwin)
self.make_callback('page-close', child)
def add_widget(self, child, title=''):
self.add_ref(child)
child_w = child.get_widget()
subwin = QtGui.QMdiSubWindow(self.widget)
subwin.setWidget(child_w)
# attach title to child
child.extdata.tab_title = title
w = self.widget.addSubWindow(subwin)
w._closeEvent = w.closeEvent
w.closeEvent = lambda event: self._window_closed(event, w, child)
# does child have a previously saved size
size = child.extdata.get('mdi_size', None)
if size is not None:
wd, ht = size
w.resize(wd, ht)
# does child have a previously saved position
pos = child.extdata.get('mdi_pos', None)
if pos is not None:
x, y = pos
w.move(x, y)
w._resizeEvent = w.resizeEvent
w.resizeEvent = lambda event: self._window_resized(event, w, child)
w._moveEvent = w.moveEvent
w.moveEvent = lambda event: self._window_moved(event, w, child)
w.setWindowTitle(title)
child_w.show()
w.show()
def _remove(self, nchild, delete=False):
subwins = list(self.widget.subWindowList())
l = [ sw.widget() for sw in subwins ]
try:
idx = l.index(nchild)
subwin = subwins[idx]
except (IndexError, ValueError) as e:
subwin = None
if subwin is not None:
self.widget.removeSubWindow(subwin)
subwin.deleteLater()
nchild.setParent(None)
if delete:
nchild.deleteLater()
def get_index(self):
subwin = self.widget.activeSubWindow()
if subwin is not None:
return self._get_native_index(subwin.widget())
return self.cur_index
def _get_subwin(self, widget):
for subwin in list(self.widget.subWindowList()):
if subwin.widget() == widget:
return subwin
return None
def set_index(self, idx):
if 0 <= idx < len(self.children):
child = self.children[idx]
subwin = self._get_subwin(child.widget)
if subwin is not None:
self.widget.setActiveSubWindow(subwin)
def index_of(self, child):
nchild = child.get_widget()
return self._get_native_index(nchild)
def index_to_widget(self, idx):
if 0 <= idx < len(self.children):
return self.children[idx]
return None
def tile_panes(self):
self.widget.tileSubWindows()
def cascade_panes(self):
self.widget.cascadeSubWindows()
def use_tabs(self, tf):
if tf:
self.widget.setViewMode(QtGui.QMdiArea.TabbedView)
else:
self.widget.setViewMode(QtGui.QMdiArea.SubWindowView)
class ScrollArea(ContainerBase):
def __init__(self):
super(ScrollArea, self).__init__()
self.widget = QtGui.QScrollArea()
self.widget.setWidgetResizable(True)
self.widget._resizeEvent = self.widget.resizeEvent
self.widget.resizeEvent = self._resize_cb
self.enable_callback('configure')
def _resize_cb(self, event):
self.widget._resizeEvent(event)
rect = self.widget.geometry()
x1, y1, x2, y2 = rect.getCoords()
width = x2 - x1
height = y2 - y1
self.make_callback('configure', width, height)
def set_widget(self, child):
self.add_ref(child)
self.widget.setWidget(child.get_widget())
def scroll_to_end(self, vertical=True, horizontal=False):
area = self.widget
if vertical:
area.verticalScrollBar().setValue(area.verticalScrollBar().maximum())
if horizontal:
area.horizontalScrollBar().setValue(area.horizontalScrollBar().maximum())
class Splitter(ContainerBase):
def __init__(self, orientation='horizontal'):
super(Splitter, self).__init__()
w = QtGui.QSplitter()
self.orientation = orientation
if orientation == 'horizontal':
w.setOrientation(QtCore.Qt.Horizontal)
else:
w.setOrientation(QtCore.Qt.Vertical)
self.widget = w
w.setStretchFactor(0, 0.5)
w.setStretchFactor(1, 0.5)
def add_widget(self, child):
self.add_ref(child)
child_w = child.get_widget()
self.widget.addWidget(child_w)
def get_sizes(self):
return list(self.widget.sizes())
def set_sizes(self, sizes):
return self.widget.setSizes(sizes)
class GridBox(ContainerBase):
def __init__(self, rows=1, columns=1):
super(GridBox, self).__init__()
w = QtGui.QWidget()
layout = QtGui.QGridLayout()
w.setLayout(layout)
self.widget = w
def resize_grid(self, rows, columns):
pass
def set_row_spacing(self, val):
self.widget.layout().setVerticalSpacing(val)
def set_column_spacing(self, val):
self.widget.layout().setHorizontalSpacing(val)
def set_spacing(self, val):
self.set_row_spacing(val)
self.set_column_spacing(val)
def add_widget(self, child, row, col, stretch=0):
self.add_ref(child)
w = child.get_widget()
self.widget.layout().addWidget(w, row, col)
class ToolbarAction(WidgetBase):
def __init__(self):
super(ToolbarAction, self).__init__()
self.widget = None
self.enable_callback('activated')
def _cb_redirect(self, *args):
if self.widget.isCheckable():
tf = self.widget.isChecked()
self.make_callback('activated', tf)
else:
self.make_callback('activated')
def set_state(self, tf):
self.widget.setChecked(tf)
def get_state(self):
return self.widget.isChecked()
class Toolbar(ContainerBase):
def __init__(self, orientation='horizontal'):
super(Toolbar, self).__init__()
w = QtGui.QToolBar()
if orientation == 'horizontal':
w.setOrientation(QtCore.Qt.Horizontal)
else:
w.setOrientation(QtCore.Qt.Vertical)
self.widget = w
def add_action(self, text, toggle=False, iconpath=None):
child = ToolbarAction()
if iconpath:
image = QImage(iconpath)
qsize = QtCore.QSize(24, 24)
image = image.scaled(qsize)
pixmap = QPixmap.fromImage(image)
iconw = QIcon(pixmap)
action = self.widget.addAction(iconw, text,
child._cb_redirect)
else:
action = self.widget.addAction(text, child._cb_redirect)
action.setCheckable(toggle)
child.widget = action
self.add_ref(child)
return child
def add_widget(self, child):
self.add_ref(child)
w = child.get_widget()
self.widget.addWidget(w)
def add_menu(self, text, menu=None):
if menu is None:
menu = Menu()
child = self.add_action(text)
child.add_callback('activated', lambda w: menu.popup())
return menu
def add_separator(self):
self.widget.addSeparator()
class MenuAction(WidgetBase):
def __init__(self, text=None, checkable=False):
super(MenuAction, self).__init__()
self.widget = None
self.text = text
self.checkable = checkable
self.enable_callback('activated')
def set_state(self, tf):
if not self.checkable:
raise ValueError("Not a checkable menu item")
self.widget.setChecked(tf)
def get_state(self):
return self.widget.isChecked()
def _cb_redirect(self, *args):
if self.widget.isCheckable():
tf = self.widget.isChecked()
self.make_callback('activated', tf)
else:
self.make_callback('activated')
class Menu(ContainerBase):
def __init__(self):
super(Menu, self).__init__()
# this get's overwritten if created from Menubar
self.widget = QtGui.QMenu()
def add_widget(self, child):
w = self.widget.addAction(child.text, lambda: child._cb_redirect())
if child.checkable:
w.setCheckable(True)
child.widget = w
self.add_ref(child)
def add_name(self, name, checkable=False):
child = MenuAction(text=name, checkable=checkable)
self.add_widget(child)
return child
def add_separator(self):
self.widget.addSeparator()
def popup(self, widget=None):
if widget is not None:
w = widget.get_widget()
#self.widget.popup(w.mapToGlobal(QtCore.QPoint(0, 0)))
self.widget.exec_(w.mapToGlobal(QtCore.QPoint(0, 0)))
else:
self.widget.exec_(QtGui.QCursor.pos())
class Menubar(ContainerBase):
def __init__(self):
super(Menubar, self).__init__()
self.widget = QtGui.QMenuBar()
def add_widget(self, child):
menu_w = child.get_widget()
self.widget.addMenu(menu_w)
self.add_ref(child)
def add_name(self, name):
menu_w = self.widget.addMenu(name)
child = Menu()
child.widget = menu_w
self.add_ref(child)
return child
class TopLevel(ContainerBase):
def __init__(self, title=None):
super(TopLevel, self).__init__()
widget = QtHelp.TopLevel()
self.widget = widget
box = QtGui.QVBoxLayout()
box.setContentsMargins(0, 0, 0, 0)
box.setSpacing(0)
widget.setLayout(box)
widget.closeEvent = lambda event: self._quit(event)
widget.destroyed = self._destroyed_cb
if not title is None:
widget.setWindowTitle(title)
self.enable_callback('close')
def set_widget(self, child):
self.add_ref(child)
child_w = child.get_widget()
self.widget.layout().addWidget(child_w)
def _quit(self, event):
#event.accept()
# let application decide how to handle this
event.ignore()
self.close()
def _closeEvent(*args):
self.close()
def close(self):
#self.widget.deleteLater()
#self.widget = None
self.make_callback('close')
def _destroyed_cb(self, *args):
event.accept()
def raise_(self):
self.widget.raise_()
self.widget.activateWindow()
def lower(self):
self.widget.lower()
def focus(self):
self.widget.raise_()
self.widget.activateWindow()
def move(self, x, y):
self.widget.move(x, y)
def maximize(self):
self.widget.showMaximized()
def unmaximize(self):
self.widget.showNormal()
def fullscreen(self):
self.widget.showFullScreen()
def unfullscreen(self):
self.widget.showNormal()
def is_fullscreen(self):
return self.widget.isFullScreen()
def iconify(self):
self.hide()
def uniconify(self):
self.widget.showNormal()
def set_title(self, title):
self.widget.setWindowTitle(title)
class Application(Callback.Callbacks):
def __init__(self, logger=None):
global _app
super(Application, self).__init__()
self.logger = logger
self.window_list = []
self.window_dict = {}
self.wincnt = 0
if have_pyqt4:
QtGui.QApplication.setGraphicsSystem('raster')
app = QtGui.QApplication([])
#app.lastWindowClosed.connect(lambda *args: self._quit())
self._qtapp = app
_app = self
# Get screen size
desktop = self._qtapp.desktop()
#rect = desktop.screenGeometry()
rect = desktop.availableGeometry()
size = rect.size()
self.screen_wd = size.width()
self.screen_ht = size.height()
for name in ('shutdown', ):
self.enable_callback(name)
def get_screen_size(self):
return (self.screen_wd, self.screen_ht)
def process_events(self):
self._qtapp.processEvents()
def process_end(self):
self._qtapp.quit()
def add_window(self, window, wid=None):
if wid is None:
wid = 'win%d' % (self.wincnt)
self.wincnt += 1
window.wid = wid
window.url = ''
window.app = self
self.window_dict[wid] = window
def get_window(self, wid):
return self.window_dict[wid]
def has_window(self, wid):
return wid in self.window_dict
def get_wids(self):
return list(self.window_dict.keys())
def make_window(self, title=None):
w = TopLevel(title=title)
self.add_window(w)
return w
class Dialog(WidgetBase):
def __init__(self, title=None, flags=None, buttons=None,
parent=None):
super(Dialog, self).__init__()
self.widget = QtGui.QDialog(parent.get_widget())
self.widget.setModal(True)
vbox = QtGui.QVBoxLayout()
self.widget.setLayout(vbox)
self.content = VBox()
vbox.addWidget(self.content.get_widget(), stretch=1)
hbox_w = QtGui.QWidget()
hbox = QtGui.QHBoxLayout()
hbox_w.setLayout(hbox)
for name, val in buttons:
btn = QtGui.QPushButton(name)
def cb(val):
return lambda: self._cb_redirect(val)
btn.clicked.connect(cb(val))
hbox.addWidget(btn, stretch=0)
vbox.addWidget(hbox_w, stretch=0)
## self.widget.closeEvent = lambda event: self.delete()
self.enable_callback('activated')
def _cb_redirect(self, val):
self.make_callback('activated', val)
def get_content_area(self):
return self.content
class SaveDialog(QtGui.QFileDialog):
def __init__(self, title=None, selectedfilter=None):
super(SaveDialog, self).__init__()
self.selectedfilter = selectedfilter
self.widget = self.getSaveFileName(self, title, '', selectedfilter)
def get_path(self):
if self.widget and self.selectedfilter is not None and not self.widget.endswith(self.selectedfilter[1:]):
self.widget += self.selectedfilter[1:]
return self.widget
class DragPackage(object):
def __init__(self, src_widget):
self.src_widget = src_widget
self._drag = QtHelp.QDrag(self.src_widget)
def set_urls(self, urls):
mimeData = QtCore.QMimeData()
_urls = [ QtCore.QUrl(url) for url in urls ]
mimeData.setUrls(_urls)
self._drag.setMimeData(mimeData)
def start_drag(self):
if QtHelp.have_pyqt5:
result = self._drag.exec_(QtCore.Qt.MoveAction)
else:
result = self._drag.start(QtCore.Qt.MoveAction)
# MODULE FUNCTIONS
def name_mangle(name, pfx=''):
newname = []
for c in name.lower():
if not (c.isalpha() or c.isdigit() or (c == '_')):
newname.append('_')
else:
newname.append(c)
return pfx + ''.join(newname)
def make_widget(title, wtype):
if wtype == 'label':
w = Label(title)
w.widget.setAlignment(QtCore.Qt.AlignRight)
elif wtype == 'llabel':
w = Label(title)
w.widget.setAlignment(QtCore.Qt.AlignLeft)
elif wtype == 'entry':
w = TextEntry()
#w.widget.setMaxLength(12)
elif wtype == 'entryset':
w = TextEntrySet()
#w.widget.setMaxLength(12)
elif wtype == 'combobox':
w = ComboBox()
elif wtype == 'spinbutton':
w = SpinBox(dtype=int)
elif wtype == 'spinfloat':
w = SpinBox(dtype=float)
elif wtype == 'vbox':
w = VBox()
elif wtype == 'hbox':
w = HBox()
elif wtype == 'hscale':
w = Slider(orientation='horizontal')
elif wtype == 'vscale':
w = Slider(orientation='vertical')
elif wtype == 'checkbutton':
w = CheckBox(title)
elif wtype == 'radiobutton':
w = RadioButton(title)
elif wtype == 'togglebutton':
w = ToggleButton(title)
elif wtype == 'button':
w = Button(title)
elif wtype == 'spacer':
w = Label('')
elif wtype == 'textarea':
w = TextArea(editable=True)
elif wtype == 'toolbar':
w = Toolbar()
elif wtype == 'progress':
w = ProgressBar()
elif wtype == 'menubar':
w = Menubar()
else:
raise ValueError("Bad wtype=%s" % wtype)
return w
def hadjust(w, orientation):
if orientation != 'horizontal':
return w
vbox = VBox()
vbox.add_widget(w)
vbox.add_widget(Label(''), stretch=1)
return vbox
def build_info(captions, orientation='vertical'):
numrows = len(captions)
numcols = reduce(lambda acc, tup: max(acc, len(tup)), captions, 0)
if (numcols % 2) != 0:
raise ValueError("Column spec is not an even number")
numcols = int(numcols // 2)
widget = QtGui.QWidget()
table = QtGui.QGridLayout()
widget.setLayout(table)
table.setVerticalSpacing(2)
table.setHorizontalSpacing(4)
table.setContentsMargins(2, 2, 2, 2)
wb = Bunch.Bunch()
row = 0
for tup in captions:
col = 0
while col < numcols:
idx = col * 2
if idx < len(tup):
title, wtype = tup[idx:idx+2]
if not title.endswith(':'):
name = name_mangle(title)
else:
name = name_mangle('lbl_'+title[:-1])
w = make_widget(title, wtype)
table.addWidget(w.widget, row, col)
wb[name] = w
col += 1
row += 1
w = wrap(widget)
w = hadjust(w, orientation=orientation)
return w, wb
def wrap(native_widget):
wrapper = WidgetBase()
wrapper.widget = native_widget
return wrapper
def get_orientation(container):
if not hasattr(container, 'size'):
return 'vertical'
(wd, ht) = container.size
## wd, ht = container.get_size()
#print('container size is %dx%d' % (wd, ht))
if wd < ht:
return 'vertical'
else:
return 'horizontal'
def get_oriented_box(container, scrolled=True, fill=False):
orientation = get_orientation(container)
if orientation == 'vertical':
box1 = VBox()
box2 = VBox()
else:
box1 = HBox()
box2 = VBox()
box2.add_widget(box1, stretch=0)
if not fill:
box2.add_widget(Label(''), stretch=1)
if scrolled:
sw = ScrollArea()
sw.set_widget(box2)
else:
sw = box2
return box1, sw, orientation
#END
|
Cadair/ginga
|
ginga/qtw/Widgets.py
|
Python
|
bsd-3-clause
| 54,587
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_log import log as logging
import six
from senlin.common import consts
from senlin.common import exception as exc
from senlin.common.i18n import _
from senlin.common import schema
from senlin.common import utils
from senlin.profiles import base
LOG = logging.getLogger(__name__)
class StackProfile(base.Profile):
"""Profile for an OpenStack Heat stack."""
VERSIONS = {
'1.0': [
{'status': consts.SUPPORTED, 'since': '2016.04'}
]
}
KEYS = (
CONTEXT, TEMPLATE, TEMPLATE_URL, PARAMETERS,
FILES, TIMEOUT, DISABLE_ROLLBACK, ENVIRONMENT,
) = (
'context', 'template', 'template_url', 'parameters',
'files', 'timeout', 'disable_rollback', 'environment',
)
properties_schema = {
CONTEXT: schema.Map(
_('A dictionary for specifying the customized context for '
'stack operations'),
default={},
),
TEMPLATE: schema.Map(
_('Heat stack template.'),
default={},
updatable=True,
),
TEMPLATE_URL: schema.String(
_('Heat stack template url.'),
default='',
updatable=True,
),
PARAMETERS: schema.Map(
_('Parameters to be passed to Heat for stack operations.'),
default={},
updatable=True,
),
FILES: schema.Map(
_('Contents of files referenced by the template, if any.'),
default={},
updatable=True,
),
TIMEOUT: schema.Integer(
_('A integer that specifies the number of minutes that a '
'stack operation times out.'),
updatable=True,
),
DISABLE_ROLLBACK: schema.Boolean(
_('A boolean specifying whether a stack operation can be '
'rolled back.'),
default=True,
updatable=True,
),
ENVIRONMENT: schema.Map(
_('A map that specifies the environment used for stack '
'operations.'),
default={},
updatable=True,
)
}
OP_NAMES = (
OP_ABANDON,
) = (
'abandon',
)
OPERATIONS = {
OP_ABANDON: schema.Map(
_('Abandon a heat stack node.'),
)
}
def __init__(self, type_name, name, **kwargs):
super(StackProfile, self).__init__(type_name, name, **kwargs)
self.stack_id = None
def validate(self, validate_props=False):
"""Validate the schema and the data provided."""
# general validation
self.spec_data.validate()
self.properties.validate()
# validate template
template = self.properties[self.TEMPLATE]
template_url = self.properties[self.TEMPLATE_URL]
if not template and not template_url:
msg = _("Both template and template_url are not specified "
"for profile '%s'.") % self.name
raise exc.InvalidSpec(message=msg)
if validate_props:
self.do_validate(obj=self)
def do_validate(self, obj):
"""Validate the stack template used by a node.
:param obj: Node object to operate.
:returns: True if validation succeeds.
:raises: `InvalidSpec` exception is raised if template is invalid.
"""
kwargs = {
'stack_name': utils.random_name(),
'template': self.properties[self.TEMPLATE],
'template_url': self.properties[self.TEMPLATE_URL],
'parameters': self.properties[self.PARAMETERS],
'files': self.properties[self.FILES],
'environment': self.properties[self.ENVIRONMENT],
'preview': True,
}
try:
self.orchestration(obj).stack_create(**kwargs)
except exc.InternalError as ex:
msg = _('Failed in validating template: %s') % six.text_type(ex)
raise exc.InvalidSpec(message=msg)
return True
def do_create(self, obj):
"""Create a heat stack using the given node object.
:param obj: The node object to operate on.
:returns: The UUID of the heat stack created.
"""
tags = ["cluster_node_id=%s" % obj.id]
if obj.cluster_id:
tags.append('cluster_id=%s' % obj.cluster_id)
tags.append('cluster_node_index=%s' % obj.index)
kwargs = {
'stack_name': obj.name + '-' + utils.random_name(8),
'template': self.properties[self.TEMPLATE],
'template_url': self.properties[self.TEMPLATE_URL],
'timeout_mins': self.properties[self.TIMEOUT],
'disable_rollback': self.properties[self.DISABLE_ROLLBACK],
'parameters': self.properties[self.PARAMETERS],
'files': self.properties[self.FILES],
'environment': self.properties[self.ENVIRONMENT],
'tags': ",".join(tags)
}
try:
stack = self.orchestration(obj).stack_create(**kwargs)
# Timeout = None means we will use the 'default_action_timeout'
# It can be overridden by the TIMEOUT profile properties
timeout = None
if self.properties[self.TIMEOUT]:
timeout = self.properties[self.TIMEOUT] * 60
self.orchestration(obj).wait_for_stack(stack.id, 'CREATE_COMPLETE',
timeout=timeout)
return stack.id
except exc.InternalError as ex:
raise exc.EResourceCreation(type='stack',
message=six.text_type(ex))
def do_delete(self, obj, **params):
"""Delete the physical stack behind the node object.
:param obj: The node object to operate on.
:param kwargs params: Optional keyword arguments for the delete
operation.
:returns: This operation always returns True unless exception is
caught.
:raises: `EResourceDeletion` if interaction with heat fails.
"""
stack_id = obj.physical_id
if not stack_id:
return True
ignore_missing = params.get('ignore_missing', True)
try:
self.orchestration(obj).stack_delete(stack_id, ignore_missing)
self.orchestration(obj).wait_for_stack_delete(stack_id)
except exc.InternalError as ex:
raise exc.EResourceDeletion(type='stack', id=stack_id,
message=six.text_type(ex))
return True
def do_update(self, obj, new_profile, **params):
"""Perform update on object.
:param obj: the node object to operate on
:param new_profile: the new profile used for updating
:param params: other parameters for the update request.
:returns: A boolean indicating whether the operation is successful.
"""
self.stack_id = obj.physical_id
if not self.stack_id:
return False
if not self.validate_for_update(new_profile):
return False
fields = {}
new_template = new_profile.properties[new_profile.TEMPLATE]
if new_template != self.properties[self.TEMPLATE]:
fields['template'] = new_template
new_params = new_profile.properties[new_profile.PARAMETERS]
if new_params != self.properties[self.PARAMETERS]:
fields['parameters'] = new_params
new_timeout = new_profile.properties[new_profile.TIMEOUT]
if new_timeout != self.properties[self.TIMEOUT]:
fields['timeout_mins'] = new_timeout
new_dr = new_profile.properties[new_profile.DISABLE_ROLLBACK]
if new_dr != self.properties[self.DISABLE_ROLLBACK]:
fields['disable_rollback'] = new_dr
new_files = new_profile.properties[new_profile.FILES]
if new_files != self.properties[self.FILES]:
fields['files'] = new_files
new_environment = new_profile.properties[new_profile.ENVIRONMENT]
if new_environment != self.properties[self.ENVIRONMENT]:
fields['environment'] = new_environment
if not fields:
return True
try:
hc = self.orchestration(obj)
# Timeout = None means we will use the 'default_action_timeout'
# It can be overridden by the TIMEOUT profile properties
timeout = None
if self.properties[self.TIMEOUT]:
timeout = self.properties[self.TIMEOUT] * 60
hc.stack_update(self.stack_id, **fields)
hc.wait_for_stack(self.stack_id, 'UPDATE_COMPLETE',
timeout=timeout)
except exc.InternalError as ex:
raise exc.EResourceUpdate(type='stack', id=self.stack_id,
message=six.text_type(ex))
return True
def do_check(self, obj):
"""Check stack status.
:param obj: Node object to operate.
:returns: True if check succeeded, or False otherwise.
"""
stack_id = obj.physical_id
if stack_id is None:
return False
hc = self.orchestration(obj)
try:
# Timeout = None means we will use the 'default_action_timeout'
# It can be overridden by the TIMEOUT profile properties
timeout = None
if self.properties[self.TIMEOUT]:
timeout = self.properties[self.TIMEOUT] * 60
hc.stack_check(stack_id)
hc.wait_for_stack(stack_id, 'CHECK_COMPLETE', timeout=timeout)
except exc.InternalError as ex:
raise exc.EResourceOperation(op='checking', type='stack',
id=stack_id,
message=six.text_type(ex))
return True
def do_get_details(self, obj):
if not obj.physical_id:
return {}
try:
stack = self.orchestration(obj).stack_get(obj.physical_id)
return stack.to_dict()
except exc.InternalError as ex:
return {
'Error': {
'code': ex.code,
'message': six.text_type(ex)
}
}
def do_adopt(self, obj, overrides=None, snapshot=False):
"""Adopt an existing stack node for management.
:param obj: A node object for this operation. It could be a puppet
node that provides only 'user', 'project' and 'physical_id'
properties when doing a preview. It can be a real Node object for
node adoption.
:param overrides: A dict containing the properties that will be
overridden when generating a profile for the stack.
:param snapshot: A boolean flag indicating whether the profile should
attempt a snapshot operation before adopting the stack. If set to
True, the ID of the snapshot will be used as the image ID.
:returns: A dict containing the spec created from the stack object or
a dict containing error information if failure occurred.
"""
driver = self.orchestration(obj)
# TODO(Qiming): Add snapshot support
# snapshot = driver.snapshot_create(...)
try:
stack = driver.stack_get(obj.physical_id)
tmpl = driver.stack_get_template(obj.physical_id)
env = driver.stack_get_environment(obj.physical_id)
files = driver.stack_get_files(obj.physical_id)
except exc.InternalError as ex:
return {'Error': {'code': ex.code, 'message': six.text_type(ex)}}
spec = {
self.ENVIRONMENT: env.to_dict(),
self.FILES: files,
self.TEMPLATE: tmpl.to_dict(),
self.PARAMETERS: dict((k, v) for k, v in stack.parameters.items()
if k.find('OS::', 0) < 0),
self.TIMEOUT: stack.timeout_mins,
self.DISABLE_ROLLBACK: stack.is_rollback_disabled
}
if overrides:
spec.update(overrides)
return spec
def _refresh_tags(self, current, node, add=False):
"""Refresh tag list.
:param current: Current list of tags.
:param node: The node object.
:param add: Flag indicating whether new tags are added.
:returns: (tags, updated) where tags contains a new list of tags and
updated indicates whether new tag list differs from the old
one.
"""
tags = []
for tag in current:
if tag.find('cluster_id=') == 0:
continue
elif tag.find('cluster_node_id=') == 0:
continue
elif tag.find('cluster_node_index=') == 0:
continue
if tag.strip() != "":
tags.append(tag.strip())
if add:
tags.append('cluster_id=' + node.cluster_id)
tags.append('cluster_node_id=' + node.id)
tags.append('cluster_node_index=%s' % node.index)
tag_str = ",".join(tags)
return (tag_str, tags != current)
def do_join(self, obj, cluster_id):
if not obj.physical_id:
return False
hc = self.orchestration(obj)
try:
stack = hc.stack_get(obj.physical_id)
tags, updated = self._refresh_tags(stack.tags, obj, True)
field = {'tags': tags}
if updated:
hc.stack_update(obj.physical_id, **field)
except exc.InternalError as ex:
LOG.error('Failed in updating stack tags: %s.', ex)
return False
return True
def do_leave(self, obj):
if not obj.physical_id:
return False
hc = self.orchestration(obj)
try:
stack = hc.stack_get(obj.physical_id)
tags, updated = self._refresh_tags(stack.tags, obj, False)
field = {'tags': tags}
if updated:
hc.stack_update(obj.physical_id, **field)
except exc.InternalError as ex:
LOG.error('Failed in updating stack tags: %s.', ex)
return False
return True
def handle_abandon(self, obj, **options):
"""Handler for abandoning a heat stack node."""
pass
|
stackforge/senlin
|
senlin/profiles/os/heat/stack.py
|
Python
|
apache-2.0
| 14,984
|
# Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Timing benchmark for AlexNet inference.
To run, use:
bazel run -c opt --config=cuda \
third_party/tensorflow/models/image/alexnet:alexnet_benchmark
Across 100 steps on batch size = 128.
Forward pass:
Run on Tesla K40c: 145 +/- 1.5 ms / batch
Run on Titan X: 70 +/- 0.1 ms / batch
Forward-backward pass:
Run on Tesla K40c: 480 +/- 48 ms / batch
Run on Titan X: 244 +/- 30 ms / batch
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import argparse
from datetime import datetime
import math
import time
from six.moves import xrange # pylint: disable=redefined-builtin
import tensorflow as tf
FLAGS = None
def print_activations(t):
print(t.op.name, ' ', t.get_shape().as_list())
def inference(images):
"""Build the AlexNet model.
Args:
images: Images Tensor
Returns:
pool5: the last Tensor in the convolutional component of AlexNet.
parameters: a list of Tensors corresponding to the weights and biases of the
AlexNet model.
"""
parameters = []
# conv1
with tf.name_scope('conv1') as scope:
kernel = tf.Variable(tf.truncated_normal([11, 11, 3, 64], dtype=tf.float32,
stddev=1e-1), name='weights')
conv = tf.nn.conv2d(images, kernel, [1, 4, 4, 1], padding='SAME')
biases = tf.Variable(tf.constant(0.0, shape=[64], dtype=tf.float32),
trainable=True, name='biases')
bias = tf.nn.bias_add(conv, biases)
conv1 = tf.nn.relu(bias, name=scope)
print_activations(conv1)
parameters += [kernel, biases]
# lrn1
# TODO(shlens, jiayq): Add a GPU version of local response normalization.
# pool1
pool1 = tf.nn.max_pool(conv1,
ksize=[1, 3, 3, 1],
strides=[1, 2, 2, 1],
padding='VALID',
name='pool1')
print_activations(pool1)
# conv2
with tf.name_scope('conv2') as scope:
kernel = tf.Variable(tf.truncated_normal([5, 5, 64, 192], dtype=tf.float32,
stddev=1e-1), name='weights')
conv = tf.nn.conv2d(pool1, kernel, [1, 1, 1, 1], padding='SAME')
biases = tf.Variable(tf.constant(0.0, shape=[192], dtype=tf.float32),
trainable=True, name='biases')
bias = tf.nn.bias_add(conv, biases)
conv2 = tf.nn.relu(bias, name=scope)
parameters += [kernel, biases]
print_activations(conv2)
# pool2
pool2 = tf.nn.max_pool(conv2,
ksize=[1, 3, 3, 1],
strides=[1, 2, 2, 1],
padding='VALID',
name='pool2')
print_activations(pool2)
# conv3
with tf.name_scope('conv3') as scope:
kernel = tf.Variable(tf.truncated_normal([3, 3, 192, 384],
dtype=tf.float32,
stddev=1e-1), name='weights')
conv = tf.nn.conv2d(pool2, kernel, [1, 1, 1, 1], padding='SAME')
biases = tf.Variable(tf.constant(0.0, shape=[384], dtype=tf.float32),
trainable=True, name='biases')
bias = tf.nn.bias_add(conv, biases)
conv3 = tf.nn.relu(bias, name=scope)
parameters += [kernel, biases]
print_activations(conv3)
# conv4
with tf.name_scope('conv4') as scope:
kernel = tf.Variable(tf.truncated_normal([3, 3, 384, 256],
dtype=tf.float32,
stddev=1e-1), name='weights')
conv = tf.nn.conv2d(conv3, kernel, [1, 1, 1, 1], padding='SAME')
biases = tf.Variable(tf.constant(0.0, shape=[256], dtype=tf.float32),
trainable=True, name='biases')
bias = tf.nn.bias_add(conv, biases)
conv4 = tf.nn.relu(bias, name=scope)
parameters += [kernel, biases]
print_activations(conv4)
# conv5
with tf.name_scope('conv5') as scope:
kernel = tf.Variable(tf.truncated_normal([3, 3, 256, 256],
dtype=tf.float32,
stddev=1e-1), name='weights')
conv = tf.nn.conv2d(conv4, kernel, [1, 1, 1, 1], padding='SAME')
biases = tf.Variable(tf.constant(0.0, shape=[256], dtype=tf.float32),
trainable=True, name='biases')
bias = tf.nn.bias_add(conv, biases)
conv5 = tf.nn.relu(bias, name=scope)
parameters += [kernel, biases]
print_activations(conv5)
# pool5
pool5 = tf.nn.max_pool(conv5,
ksize=[1, 3, 3, 1],
strides=[1, 2, 2, 1],
padding='VALID',
name='pool5')
print_activations(pool5)
return pool5, parameters
def time_tensorflow_run(session, target, info_string):
"""Run the computation to obtain the target tensor and print timing stats.
Args:
session: the TensorFlow session to run the computation under.
target: the target Tensor that is passed to the session's run() function.
info_string: a string summarizing this run, to be printed with the stats.
Returns:
None
"""
num_steps_burn_in = 10
total_duration = 0.0
total_duration_squared = 0.0
for i in xrange(FLAGS.num_batches + num_steps_burn_in):
start_time = time.time()
_ = session.run(target)
duration = time.time() - start_time
if i >= num_steps_burn_in:
if not i % 10:
print ('%s: step %d, duration = %.3f' %
(datetime.now(), i - num_steps_burn_in, duration))
total_duration += duration
total_duration_squared += duration * duration
mn = total_duration / FLAGS.num_batches
vr = total_duration_squared / FLAGS.num_batches - mn * mn
sd = math.sqrt(vr)
print ('%s: %s across %d steps, %.3f +/- %.3f sec / batch' %
(datetime.now(), info_string, FLAGS.num_batches, mn, sd))
def run_benchmark():
"""Run the benchmark on AlexNet."""
with tf.Graph().as_default():
# Generate some dummy images.
image_size = 224
# Note that our padding definition is slightly different the cuda-convnet.
# In order to force the model to start with the same activations sizes,
# we add 3 to the image_size and employ VALID padding above.
images = tf.Variable(tf.random_normal([FLAGS.batch_size,
image_size,
image_size, 3],
dtype=tf.float32,
stddev=1e-1))
# Build a Graph that computes the logits predictions from the
# inference model.
pool5, parameters = inference(images)
# Build an initialization operation.
init = tf.initialize_all_variables()
# Start running operations on the Graph.
config = tf.ConfigProto()
config.gpu_options.allocator_type = 'BFC'
sess = tf.Session(config=config)
sess.run(init)
# Run the forward benchmark.
time_tensorflow_run(sess, pool5, "Forward")
# Add a simple objective so we can calculate the backward pass.
objective = tf.nn.l2_loss(pool5)
# Compute the gradient with respect to all the parameters.
grad = tf.gradients(objective, parameters)
# Run the backward benchmark.
time_tensorflow_run(sess, grad, "Forward-backward")
def main(_):
run_benchmark()
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument(
'--batch_size',
type=int,
default=128,
help='Batch size.'
)
parser.add_argument(
'--num_batches',
type=int,
default=100,
help='Number of batches to run.'
)
FLAGS = parser.parse_args()
tf.app.run()
|
cg31/tensorflow
|
tensorflow/models/image/alexnet/alexnet_benchmark.py
|
Python
|
apache-2.0
| 8,437
|
# -*- coding: utf-8 -*-
###############################################################################
#
# Tech-Receptives Solutions Pvt. Ltd.
# Copyright (C) 2009-TODAY Tech-Receptives(<http://www.techreceptives.com>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
###############################################################################
from openerp import models, fields, api
class OpPlacementOffer(models.Model):
_name = 'op.placement.offer'
_description = 'Placement Offer'
name = fields.Char('Company Name', required=True)
student_id = fields.Many2one('op.student', 'Student Name', required=True)
join_date = fields.Date('Join Date')
offer_package = fields.Char('Offered Package', size=256)
training_period = fields.Char('Training Period', size=256)
state = fields.Selection(
[('d', 'Draft'), ('o', 'Offer'), ('j', 'Join'), ('r', 'Rejected'),
('c', 'Cancel')], 'State', default='d')
@api.one
def placement_offer(self):
self.state = 'o'
@api.one
def placement_join(self):
self.state = 'j'
@api.one
def confirm_rejected(self):
self.state = 'r'
@api.one
def confirm_to_draft(self):
self.state = 'd'
@api.one
def confirm_cancel(self):
self.state = 'c'
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
mohamedhagag/community-addons
|
openeducat_erp/op_placement_offer/op_placement_offer.py
|
Python
|
agpl-3.0
| 2,017
|
# Copyright (c) 2010-2012 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import time
import traceback
from swift import gettext_ as _
from xml.etree.cElementTree import Element, SubElement, tostring
from eventlet import Timeout
import swift.common.db
from swift.container.backend import ContainerBroker, DATADIR
from swift.container.replicator import ContainerReplicatorRpc
from swift.common.db import DatabaseAlreadyExists
from swift.common.container_sync_realms import ContainerSyncRealms
from swift.common.request_helpers import get_param, get_listing_content_type, \
split_and_validate_path, is_sys_or_user_meta
from swift.common.utils import get_logger, hash_path, public, \
Timestamp, storage_directory, validate_sync_to, \
config_true_value, json, timing_stats, replication, \
override_bytes_from_content_type, get_log_line
from swift.common.constraints import check_mount, valid_timestamp, check_utf8
from swift.common import constraints
from swift.common.bufferedhttp import http_connect
from swift.common.exceptions import ConnectionTimeout
from swift.common.http import HTTP_NOT_FOUND, is_success
from swift.common.storage_policy import POLICIES
from swift.common.base_storage_server import BaseStorageServer
from swift.common.swob import HTTPAccepted, HTTPBadRequest, HTTPConflict, \
HTTPCreated, HTTPInternalServerError, HTTPNoContent, HTTPNotFound, \
HTTPPreconditionFailed, HTTPMethodNotAllowed, Request, Response, \
HTTPInsufficientStorage, HTTPException, HeaderKeyDict
def gen_resp_headers(info, is_deleted=False):
"""
Convert container info dict to headers.
"""
# backend headers are always included
headers = {
'X-Backend-Timestamp': Timestamp(info.get('created_at', 0)).internal,
'X-Backend-PUT-Timestamp': Timestamp(info.get(
'put_timestamp', 0)).internal,
'X-Backend-DELETE-Timestamp': Timestamp(
info.get('delete_timestamp', 0)).internal,
'X-Backend-Status-Changed-At': Timestamp(
info.get('status_changed_at', 0)).internal,
'X-Backend-Storage-Policy-Index': info.get('storage_policy_index', 0),
}
if not is_deleted:
# base container info on deleted containers is not exposed to client
headers.update({
'X-Container-Object-Count': info.get('object_count', 0),
'X-Container-Bytes-Used': info.get('bytes_used', 0),
'X-Timestamp': Timestamp(info.get('created_at', 0)).normal,
'X-PUT-Timestamp': Timestamp(
info.get('put_timestamp', 0)).normal,
})
return headers
class ContainerController(BaseStorageServer):
"""WSGI Controller for the container server."""
# Ensure these are all lowercase
save_headers = ['x-container-read', 'x-container-write',
'x-container-sync-key', 'x-container-sync-to']
server_type = 'container-server'
def __init__(self, conf, logger=None):
super(ContainerController, self).__init__(conf)
self.logger = logger or get_logger(conf, log_route='container-server')
self.log_requests = config_true_value(conf.get('log_requests', 'true'))
self.root = conf.get('devices', '/srv/node')
self.mount_check = config_true_value(conf.get('mount_check', 'true'))
self.node_timeout = int(conf.get('node_timeout', 3))
self.conn_timeout = float(conf.get('conn_timeout', 0.5))
#: ContainerSyncCluster instance for validating sync-to values.
self.realms_conf = ContainerSyncRealms(
os.path.join(
conf.get('swift_dir', '/etc/swift'),
'container-sync-realms.conf'),
self.logger)
#: The list of hosts we're allowed to send syncs to. This can be
#: overridden by data in self.realms_conf
self.allowed_sync_hosts = [
h.strip()
for h in conf.get('allowed_sync_hosts', '127.0.0.1').split(',')
if h.strip()]
self.replicator_rpc = ContainerReplicatorRpc(
self.root, DATADIR, ContainerBroker, self.mount_check,
logger=self.logger)
self.auto_create_account_prefix = \
conf.get('auto_create_account_prefix') or '.'
if config_true_value(conf.get('allow_versions', 'f')):
self.save_headers.append('x-versions-location')
swift.common.db.DB_PREALLOCATION = \
config_true_value(conf.get('db_preallocation', 'f'))
def _get_container_broker(self, drive, part, account, container, **kwargs):
"""
Get a DB broker for the container.
:param drive: drive that holds the container
:param part: partition the container is in
:param account: account name
:param container: container name
:returns: ContainerBroker object
"""
hsh = hash_path(account, container)
db_dir = storage_directory(DATADIR, part, hsh)
db_path = os.path.join(self.root, drive, db_dir, hsh + '.db')
kwargs.setdefault('account', account)
kwargs.setdefault('container', container)
kwargs.setdefault('logger', self.logger)
return ContainerBroker(db_path, **kwargs)
def get_and_validate_policy_index(self, req):
"""
Validate that the index supplied maps to a policy.
:returns: policy index from request, or None if not present
:raises: HTTPBadRequest if the supplied index is bogus
"""
policy_index = req.headers.get('X-Backend-Storage-Policy-Index', None)
if policy_index is None:
return None
try:
policy_index = int(policy_index)
except ValueError:
raise HTTPBadRequest(
request=req, content_type="text/plain",
body=("Invalid X-Storage-Policy-Index %r" % policy_index))
policy = POLICIES.get_by_index(policy_index)
if policy is None:
raise HTTPBadRequest(
request=req, content_type="text/plain",
body=("Invalid X-Storage-Policy-Index %r" % policy_index))
return int(policy)
def account_update(self, req, account, container, broker):
"""
Update the account server(s) with latest container info.
:param req: swob.Request object
:param account: account name
:param container: container name
:param broker: container DB broker object
:returns: if all the account requests return a 404 error code,
HTTPNotFound response object,
if the account cannot be updated due to a malformed header,
an HTTPBadRequest response object,
otherwise None.
"""
account_hosts = [h.strip() for h in
req.headers.get('X-Account-Host', '').split(',')]
account_devices = [d.strip() for d in
req.headers.get('X-Account-Device', '').split(',')]
account_partition = req.headers.get('X-Account-Partition', '')
if len(account_hosts) != len(account_devices):
# This shouldn't happen unless there's a bug in the proxy,
# but if there is, we want to know about it.
self.logger.error(_('ERROR Account update failed: different '
'numbers of hosts and devices in request: '
'"%s" vs "%s"') %
(req.headers.get('X-Account-Host', ''),
req.headers.get('X-Account-Device', '')))
return HTTPBadRequest(req=req)
if account_partition:
# zip is lazy on py3, but we need a list, so force evaluation.
# On py2 it's an extra list copy, but the list is so small
# (one element per replica in account ring, usually 3) that it
# doesn't matter.
updates = list(zip(account_hosts, account_devices))
else:
updates = []
account_404s = 0
for account_host, account_device in updates:
account_ip, account_port = account_host.rsplit(':', 1)
new_path = '/' + '/'.join([account, container])
info = broker.get_info()
account_headers = HeaderKeyDict({
'x-put-timestamp': info['put_timestamp'],
'x-delete-timestamp': info['delete_timestamp'],
'x-object-count': info['object_count'],
'x-bytes-used': info['bytes_used'],
'x-trans-id': req.headers.get('x-trans-id', '-'),
'X-Backend-Storage-Policy-Index': info['storage_policy_index'],
'user-agent': 'container-server %s' % os.getpid(),
'referer': req.as_referer()})
if req.headers.get('x-account-override-deleted', 'no').lower() == \
'yes':
account_headers['x-account-override-deleted'] = 'yes'
try:
with ConnectionTimeout(self.conn_timeout):
conn = http_connect(
account_ip, account_port, account_device,
account_partition, 'PUT', new_path, account_headers)
with Timeout(self.node_timeout):
account_response = conn.getresponse()
account_response.read()
if account_response.status == HTTP_NOT_FOUND:
account_404s += 1
elif not is_success(account_response.status):
self.logger.error(_(
'ERROR Account update failed '
'with %(ip)s:%(port)s/%(device)s (will retry '
'later): Response %(status)s %(reason)s'),
{'ip': account_ip, 'port': account_port,
'device': account_device,
'status': account_response.status,
'reason': account_response.reason})
except (Exception, Timeout):
self.logger.exception(_(
'ERROR account update failed with '
'%(ip)s:%(port)s/%(device)s (will retry later)'),
{'ip': account_ip, 'port': account_port,
'device': account_device})
if updates and account_404s == len(updates):
return HTTPNotFound(req=req)
else:
return None
@public
@timing_stats()
def DELETE(self, req):
"""Handle HTTP DELETE request."""
drive, part, account, container, obj = split_and_validate_path(
req, 4, 5, True)
req_timestamp = valid_timestamp(req)
if self.mount_check and not check_mount(self.root, drive):
return HTTPInsufficientStorage(drive=drive, request=req)
# policy index is only relevant for delete_obj (and transitively for
# auto create accounts)
obj_policy_index = self.get_and_validate_policy_index(req) or 0
broker = self._get_container_broker(drive, part, account, container)
if account.startswith(self.auto_create_account_prefix) and obj and \
not os.path.exists(broker.db_file):
try:
broker.initialize(req_timestamp.internal, obj_policy_index)
except DatabaseAlreadyExists:
pass
if not os.path.exists(broker.db_file):
return HTTPNotFound()
if obj: # delete object
broker.delete_object(obj, req.headers.get('x-timestamp'),
obj_policy_index)
return HTTPNoContent(request=req)
else:
# delete container
if not broker.empty():
return HTTPConflict(request=req)
existed = Timestamp(broker.get_info()['put_timestamp']) and \
not broker.is_deleted()
broker.delete_db(req_timestamp.internal)
if not broker.is_deleted():
return HTTPConflict(request=req)
resp = self.account_update(req, account, container, broker)
if resp:
return resp
if existed:
return HTTPNoContent(request=req)
return HTTPNotFound()
def _update_or_create(self, req, broker, timestamp, new_container_policy,
requested_policy_index):
"""
Create new database broker or update timestamps for existing database.
:param req: the swob request object
:param broker: the broker instance for the container
:param timestamp: internalized timestamp
:param new_container_policy: the storage policy index to use
when creating the container
:param requested_policy_index: the storage policy index sent in the
request, may be None
:returns: created, a bool, if database did not previously exist
"""
if not os.path.exists(broker.db_file):
try:
broker.initialize(timestamp, new_container_policy)
except DatabaseAlreadyExists:
pass
else:
return True # created
recreated = broker.is_deleted()
if recreated:
# only set storage policy on deleted containers
broker.set_storage_policy_index(new_container_policy,
timestamp=timestamp)
elif requested_policy_index is not None:
# validate requested policy with existing container
if requested_policy_index != broker.storage_policy_index:
raise HTTPConflict(request=req,
headers={'x-backend-storage-policy-index':
broker.storage_policy_index})
broker.update_put_timestamp(timestamp)
if broker.is_deleted():
raise HTTPConflict(request=req)
if recreated:
broker.update_status_changed_at(timestamp)
return recreated
@public
@timing_stats()
def PUT(self, req):
"""Handle HTTP PUT request."""
drive, part, account, container, obj = split_and_validate_path(
req, 4, 5, True)
req_timestamp = valid_timestamp(req)
if 'x-container-sync-to' in req.headers:
err, sync_to, realm, realm_key = validate_sync_to(
req.headers['x-container-sync-to'], self.allowed_sync_hosts,
self.realms_conf)
if err:
return HTTPBadRequest(err)
if self.mount_check and not check_mount(self.root, drive):
return HTTPInsufficientStorage(drive=drive, request=req)
requested_policy_index = self.get_and_validate_policy_index(req)
broker = self._get_container_broker(drive, part, account, container)
if obj: # put container object
# obj put expects the policy_index header, default is for
# legacy support during upgrade.
obj_policy_index = requested_policy_index or 0
if account.startswith(self.auto_create_account_prefix) and \
not os.path.exists(broker.db_file):
try:
broker.initialize(req_timestamp.internal, obj_policy_index)
except DatabaseAlreadyExists:
pass
if not os.path.exists(broker.db_file):
return HTTPNotFound()
broker.put_object(obj, req_timestamp.internal,
int(req.headers['x-size']),
req.headers['x-content-type'],
req.headers['x-etag'], 0,
obj_policy_index)
return HTTPCreated(request=req)
else: # put container
if requested_policy_index is None:
# use the default index sent by the proxy if available
new_container_policy = req.headers.get(
'X-Backend-Storage-Policy-Default', int(POLICIES.default))
else:
new_container_policy = requested_policy_index
created = self._update_or_create(req, broker,
req_timestamp.internal,
new_container_policy,
requested_policy_index)
metadata = {}
metadata.update(
(key, (value, req_timestamp.internal))
for key, value in req.headers.items()
if key.lower() in self.save_headers or
is_sys_or_user_meta('container', key))
if 'X-Container-Sync-To' in metadata:
if 'X-Container-Sync-To' not in broker.metadata or \
metadata['X-Container-Sync-To'][0] != \
broker.metadata['X-Container-Sync-To'][0]:
broker.set_x_container_sync_points(-1, -1)
broker.update_metadata(metadata, validate_metadata=True)
resp = self.account_update(req, account, container, broker)
if resp:
return resp
if created:
return HTTPCreated(request=req,
headers={'x-backend-storage-policy-index':
broker.storage_policy_index})
else:
return HTTPAccepted(request=req,
headers={'x-backend-storage-policy-index':
broker.storage_policy_index})
@public
@timing_stats(sample_rate=0.1)
def HEAD(self, req):
"""Handle HTTP HEAD request."""
drive, part, account, container, obj = split_and_validate_path(
req, 4, 5, True)
out_content_type = get_listing_content_type(req)
if self.mount_check and not check_mount(self.root, drive):
return HTTPInsufficientStorage(drive=drive, request=req)
broker = self._get_container_broker(drive, part, account, container,
pending_timeout=0.1,
stale_reads_ok=True)
info, is_deleted = broker.get_info_is_deleted()
headers = gen_resp_headers(info, is_deleted=is_deleted)
if is_deleted:
return HTTPNotFound(request=req, headers=headers)
headers.update(
(key, value)
for key, (value, timestamp) in broker.metadata.items()
if value != '' and (key.lower() in self.save_headers or
is_sys_or_user_meta('container', key)))
headers['Content-Type'] = out_content_type
return HTTPNoContent(request=req, headers=headers, charset='utf-8')
def update_data_record(self, record):
"""
Perform any mutations to container listing records that are common to
all serialization formats, and returns it as a dict.
Converts created time to iso timestamp.
Replaces size with 'swift_bytes' content type parameter.
:params record: object entry record
:returns: modified record
"""
(name, created, size, content_type, etag) = record[:5]
if content_type is None:
return {'subdir': name}
response = {'bytes': size, 'hash': etag, 'name': name,
'content_type': content_type}
response['last_modified'] = Timestamp(created).isoformat
override_bytes_from_content_type(response, logger=self.logger)
return response
@public
@timing_stats()
def GET(self, req):
"""Handle HTTP GET request."""
drive, part, account, container, obj = split_and_validate_path(
req, 4, 5, True)
path = get_param(req, 'path')
prefix = get_param(req, 'prefix')
delimiter = get_param(req, 'delimiter')
if delimiter and (len(delimiter) > 1 or ord(delimiter) > 254):
# delimiters can be made more flexible later
return HTTPPreconditionFailed(body='Bad delimiter')
marker = get_param(req, 'marker', '')
end_marker = get_param(req, 'end_marker')
limit = constraints.CONTAINER_LISTING_LIMIT
given_limit = get_param(req, 'limit')
if given_limit and given_limit.isdigit():
limit = int(given_limit)
if limit > constraints.CONTAINER_LISTING_LIMIT:
return HTTPPreconditionFailed(
request=req,
body='Maximum limit is %d'
% constraints.CONTAINER_LISTING_LIMIT)
out_content_type = get_listing_content_type(req)
if self.mount_check and not check_mount(self.root, drive):
return HTTPInsufficientStorage(drive=drive, request=req)
broker = self._get_container_broker(drive, part, account, container,
pending_timeout=0.1,
stale_reads_ok=True)
info, is_deleted = broker.get_info_is_deleted()
resp_headers = gen_resp_headers(info, is_deleted=is_deleted)
if is_deleted:
return HTTPNotFound(request=req, headers=resp_headers)
container_list = broker.list_objects_iter(
limit, marker, end_marker, prefix, delimiter, path,
storage_policy_index=info['storage_policy_index'])
return self.create_listing(req, out_content_type, info, resp_headers,
broker.metadata, container_list, container)
def create_listing(self, req, out_content_type, info, resp_headers,
metadata, container_list, container):
for key, (value, timestamp) in metadata.items():
if value and (key.lower() in self.save_headers or
is_sys_or_user_meta('container', key)):
resp_headers[key] = value
ret = Response(request=req, headers=resp_headers,
content_type=out_content_type, charset='utf-8')
if out_content_type == 'application/json':
ret.body = json.dumps([self.update_data_record(record)
for record in container_list])
elif out_content_type.endswith('/xml'):
doc = Element('container', name=container.decode('utf-8'))
for obj in container_list:
record = self.update_data_record(obj)
if 'subdir' in record:
name = record['subdir'].decode('utf-8')
sub = SubElement(doc, 'subdir', name=name)
SubElement(sub, 'name').text = name
else:
obj_element = SubElement(doc, 'object')
for field in ["name", "hash", "bytes", "content_type",
"last_modified"]:
SubElement(obj_element, field).text = str(
record.pop(field)).decode('utf-8')
for field in sorted(record):
SubElement(obj_element, field).text = str(
record[field]).decode('utf-8')
ret.body = tostring(doc, encoding='UTF-8').replace(
"<?xml version='1.0' encoding='UTF-8'?>",
'<?xml version="1.0" encoding="UTF-8"?>', 1)
else:
if not container_list:
return HTTPNoContent(request=req, headers=resp_headers)
ret.body = '\n'.join(rec[0] for rec in container_list) + '\n'
return ret
@public
@replication
@timing_stats(sample_rate=0.01)
def REPLICATE(self, req):
"""
Handle HTTP REPLICATE request (json-encoded RPC calls for replication.)
"""
post_args = split_and_validate_path(req, 3)
drive, partition, hash = post_args
if self.mount_check and not check_mount(self.root, drive):
return HTTPInsufficientStorage(drive=drive, request=req)
try:
args = json.load(req.environ['wsgi.input'])
except ValueError as err:
return HTTPBadRequest(body=str(err), content_type='text/plain')
ret = self.replicator_rpc.dispatch(post_args, args)
ret.request = req
return ret
@public
@timing_stats()
def POST(self, req):
"""Handle HTTP POST request."""
drive, part, account, container = split_and_validate_path(req, 4)
req_timestamp = valid_timestamp(req)
if 'x-container-sync-to' in req.headers:
err, sync_to, realm, realm_key = validate_sync_to(
req.headers['x-container-sync-to'], self.allowed_sync_hosts,
self.realms_conf)
if err:
return HTTPBadRequest(err)
if self.mount_check and not check_mount(self.root, drive):
return HTTPInsufficientStorage(drive=drive, request=req)
broker = self._get_container_broker(drive, part, account, container)
if broker.is_deleted():
return HTTPNotFound(request=req)
broker.update_put_timestamp(req_timestamp.internal)
metadata = {}
metadata.update(
(key, (value, req_timestamp.internal))
for key, value in req.headers.items()
if key.lower() in self.save_headers or
is_sys_or_user_meta('container', key))
if metadata:
if 'X-Container-Sync-To' in metadata:
if 'X-Container-Sync-To' not in broker.metadata or \
metadata['X-Container-Sync-To'][0] != \
broker.metadata['X-Container-Sync-To'][0]:
broker.set_x_container_sync_points(-1, -1)
broker.update_metadata(metadata, validate_metadata=True)
return HTTPNoContent(request=req)
def __call__(self, env, start_response):
start_time = time.time()
req = Request(env)
self.logger.txn_id = req.headers.get('x-trans-id', None)
if not check_utf8(req.path_info):
res = HTTPPreconditionFailed(body='Invalid UTF8 or contains NULL')
else:
try:
# disallow methods which have not been marked 'public'
try:
if req.method not in self.allowed_methods:
raise AttributeError('Not allowed method.')
except AttributeError:
res = HTTPMethodNotAllowed()
else:
method = getattr(self, req.method)
res = method(req)
except HTTPException as error_response:
res = error_response
except (Exception, Timeout):
self.logger.exception(_(
'ERROR __call__ error with %(method)s %(path)s '),
{'method': req.method, 'path': req.path})
res = HTTPInternalServerError(body=traceback.format_exc())
if self.log_requests:
trans_time = time.time() - start_time
log_message = get_log_line(req, res, trans_time, '')
if req.method.upper() == 'REPLICATE':
self.logger.debug(log_message)
else:
self.logger.info(log_message)
return res(env, start_response)
def app_factory(global_conf, **local_conf):
"""paste.deploy app factory for creating WSGI container server apps"""
conf = global_conf.copy()
conf.update(local_conf)
return ContainerController(conf)
|
hbhdytf/mac
|
swift/container/server.py
|
Python
|
apache-2.0
| 28,271
|
from getTerminalSize import getTerminalSize
def pictureShower(url, cookies = None):
import urllib2
try:
import PIL.Image as Image
from StringIO import StringIO
print('Downloading image ...')
opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(cookies))
urllib2.install_opener(opener)
im = Image.open(StringIO(urllib2.urlopen(url).read()))
w = getTerminalSize()[0]
im = im.resize((w,w/5),Image.ANTIALIAS)
#im.show()
red = ""
for y in range(im.size[1]):
for x in range(im.size[0]):
red += " " if im.getpixel((x,y))[0] > 150 else "#"
if len(red.split()): print (red)
red = ""
except:
#TODO WHAT TO DO IF PIL IS NOT INSTALLED
raise
|
charlieamer/Euler-Task
|
EulerTask/pictureShower.py
|
Python
|
gpl-2.0
| 678
|
from pybindgen import Module, FileCodeSink, param, retval, cppclass, typehandlers
def register_types(module):
root_module = module.get_root()
## average.h: ns3::Average<double> [class]
module.add_class('Average', template_parameters=['double'])
## delay-jitter-estimation.h: ns3::DelayJitterEstimation [class]
module.add_class('DelayJitterEstimation')
## event-garbage-collector.h: ns3::EventGarbageCollector [class]
module.add_class('EventGarbageCollector')
## gnuplot.h: ns3::Gnuplot [class]
module.add_class('Gnuplot')
## gnuplot.h: ns3::GnuplotCollection [class]
module.add_class('GnuplotCollection')
## gnuplot.h: ns3::GnuplotDataset [class]
module.add_class('GnuplotDataset')
## gnuplot.h: ns3::Gnuplot2dDataset [class]
module.add_class('Gnuplot2dDataset', parent=root_module['ns3::GnuplotDataset'])
## gnuplot.h: ns3::Gnuplot2dDataset::Style [enumeration]
module.add_enum('Style', ['LINES', 'POINTS', 'LINES_POINTS', 'DOTS', 'IMPULSES', 'STEPS', 'FSTEPS', 'HISTEPS'], outer_class=root_module['ns3::Gnuplot2dDataset'])
## gnuplot.h: ns3::Gnuplot2dDataset::ErrorBars [enumeration]
module.add_enum('ErrorBars', ['NONE', 'X', 'Y', 'XY'], outer_class=root_module['ns3::Gnuplot2dDataset'])
## gnuplot.h: ns3::Gnuplot2dFunction [class]
module.add_class('Gnuplot2dFunction', parent=root_module['ns3::GnuplotDataset'])
## gnuplot.h: ns3::Gnuplot3dDataset [class]
module.add_class('Gnuplot3dDataset', parent=root_module['ns3::GnuplotDataset'])
## gnuplot.h: ns3::Gnuplot3dFunction [class]
module.add_class('Gnuplot3dFunction', parent=root_module['ns3::GnuplotDataset'])
## Register a nested module for the namespace Config
nested_module = module.add_cpp_namespace('Config')
register_types_ns3_Config(nested_module)
## Register a nested module for the namespace FatalImpl
nested_module = module.add_cpp_namespace('FatalImpl')
register_types_ns3_FatalImpl(nested_module)
## Register a nested module for the namespace addressUtils
nested_module = module.add_cpp_namespace('addressUtils')
register_types_ns3_addressUtils(nested_module)
## Register a nested module for the namespace aodv
nested_module = module.add_cpp_namespace('aodv')
register_types_ns3_aodv(nested_module)
## Register a nested module for the namespace dot11s
nested_module = module.add_cpp_namespace('dot11s')
register_types_ns3_dot11s(nested_module)
## Register a nested module for the namespace dsdv
nested_module = module.add_cpp_namespace('dsdv')
register_types_ns3_dsdv(nested_module)
## Register a nested module for the namespace flame
nested_module = module.add_cpp_namespace('flame')
register_types_ns3_flame(nested_module)
## Register a nested module for the namespace internal
nested_module = module.add_cpp_namespace('internal')
register_types_ns3_internal(nested_module)
## Register a nested module for the namespace olsr
nested_module = module.add_cpp_namespace('olsr')
register_types_ns3_olsr(nested_module)
def register_types_ns3_Config(module):
root_module = module.get_root()
def register_types_ns3_FatalImpl(module):
root_module = module.get_root()
def register_types_ns3_addressUtils(module):
root_module = module.get_root()
def register_types_ns3_aodv(module):
root_module = module.get_root()
def register_types_ns3_dot11s(module):
root_module = module.get_root()
def register_types_ns3_dsdv(module):
root_module = module.get_root()
def register_types_ns3_flame(module):
root_module = module.get_root()
def register_types_ns3_internal(module):
root_module = module.get_root()
def register_types_ns3_olsr(module):
root_module = module.get_root()
def register_methods(root_module):
register_Ns3Average__Double_methods(root_module, root_module['ns3::Average< double >'])
register_Ns3DelayJitterEstimation_methods(root_module, root_module['ns3::DelayJitterEstimation'])
register_Ns3EventGarbageCollector_methods(root_module, root_module['ns3::EventGarbageCollector'])
register_Ns3Gnuplot_methods(root_module, root_module['ns3::Gnuplot'])
register_Ns3GnuplotCollection_methods(root_module, root_module['ns3::GnuplotCollection'])
register_Ns3GnuplotDataset_methods(root_module, root_module['ns3::GnuplotDataset'])
register_Ns3Gnuplot2dDataset_methods(root_module, root_module['ns3::Gnuplot2dDataset'])
register_Ns3Gnuplot2dFunction_methods(root_module, root_module['ns3::Gnuplot2dFunction'])
register_Ns3Gnuplot3dDataset_methods(root_module, root_module['ns3::Gnuplot3dDataset'])
register_Ns3Gnuplot3dFunction_methods(root_module, root_module['ns3::Gnuplot3dFunction'])
return
def register_Ns3Average__Double_methods(root_module, cls):
## average.h: ns3::Average<double>::Average(ns3::Average<double> const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Average< double > const &', 'arg0')])
## average.h: ns3::Average<double>::Average() [constructor]
cls.add_constructor([])
## average.h: double ns3::Average<double>::Avg() const [member function]
cls.add_method('Avg',
'double',
[],
is_const=True)
## average.h: uint32_t ns3::Average<double>::Count() const [member function]
cls.add_method('Count',
'uint32_t',
[],
is_const=True)
## average.h: double ns3::Average<double>::Error90() const [member function]
cls.add_method('Error90',
'double',
[],
is_const=True)
## average.h: double ns3::Average<double>::Error95() const [member function]
cls.add_method('Error95',
'double',
[],
is_const=True)
## average.h: double ns3::Average<double>::Error99() const [member function]
cls.add_method('Error99',
'double',
[],
is_const=True)
## average.h: double ns3::Average<double>::Max() const [member function]
cls.add_method('Max',
'double',
[],
is_const=True)
## average.h: double ns3::Average<double>::Mean() const [member function]
cls.add_method('Mean',
'double',
[],
is_const=True)
## average.h: double ns3::Average<double>::Min() const [member function]
cls.add_method('Min',
'double',
[],
is_const=True)
## average.h: void ns3::Average<double>::Reset() [member function]
cls.add_method('Reset',
'void',
[])
## average.h: double ns3::Average<double>::Stddev() const [member function]
cls.add_method('Stddev',
'double',
[],
is_const=True)
## average.h: void ns3::Average<double>::Update(double const & x) [member function]
cls.add_method('Update',
'void',
[param('double const &', 'x')])
## average.h: double ns3::Average<double>::Var() const [member function]
cls.add_method('Var',
'double',
[],
is_const=True)
return
def register_Ns3DelayJitterEstimation_methods(root_module, cls):
## delay-jitter-estimation.h: ns3::DelayJitterEstimation::DelayJitterEstimation(ns3::DelayJitterEstimation const & arg0) [copy constructor]
cls.add_constructor([param('ns3::DelayJitterEstimation const &', 'arg0')])
## delay-jitter-estimation.h: ns3::DelayJitterEstimation::DelayJitterEstimation() [constructor]
cls.add_constructor([])
## delay-jitter-estimation.h: ns3::Time ns3::DelayJitterEstimation::GetLastDelay() const [member function]
cls.add_method('GetLastDelay',
'ns3::Time',
[],
is_const=True)
## delay-jitter-estimation.h: ns3::Time ns3::DelayJitterEstimation::GetLastJitter() const [member function]
cls.add_method('GetLastJitter',
'ns3::Time',
[],
is_const=True)
## delay-jitter-estimation.h: static void ns3::DelayJitterEstimation::PrepareTx(ns3::Ptr<ns3::Packet const> packet) [member function]
cls.add_method('PrepareTx',
'void',
[param('ns3::Ptr< ns3::Packet const >', 'packet')],
is_static=True)
## delay-jitter-estimation.h: void ns3::DelayJitterEstimation::RecordRx(ns3::Ptr<ns3::Packet const> packet) [member function]
cls.add_method('RecordRx',
'void',
[param('ns3::Ptr< ns3::Packet const >', 'packet')])
return
def register_Ns3EventGarbageCollector_methods(root_module, cls):
## event-garbage-collector.h: ns3::EventGarbageCollector::EventGarbageCollector(ns3::EventGarbageCollector const & arg0) [copy constructor]
cls.add_constructor([param('ns3::EventGarbageCollector const &', 'arg0')])
## event-garbage-collector.h: ns3::EventGarbageCollector::EventGarbageCollector() [constructor]
cls.add_constructor([])
## event-garbage-collector.h: void ns3::EventGarbageCollector::Track(ns3::EventId event) [member function]
cls.add_method('Track',
'void',
[param('ns3::EventId', 'event')])
return
def register_Ns3Gnuplot_methods(root_module, cls):
## gnuplot.h: ns3::Gnuplot::Gnuplot(ns3::Gnuplot const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Gnuplot const &', 'arg0')])
## gnuplot.h: ns3::Gnuplot::Gnuplot(std::string const & outputFilename="", std::string const & title="") [constructor]
cls.add_constructor([param('std::string const &', 'outputFilename', default_value='""'), param('std::string const &', 'title', default_value='""')])
## gnuplot.h: void ns3::Gnuplot::AddDataset(ns3::GnuplotDataset const & dataset) [member function]
cls.add_method('AddDataset',
'void',
[param('ns3::GnuplotDataset const &', 'dataset')])
## gnuplot.h: void ns3::Gnuplot::AppendExtra(std::string const & extra) [member function]
cls.add_method('AppendExtra',
'void',
[param('std::string const &', 'extra')])
## gnuplot.h: static std::string ns3::Gnuplot::DetectTerminal(std::string const & filename) [member function]
cls.add_method('DetectTerminal',
'std::string',
[param('std::string const &', 'filename')],
is_static=True)
## gnuplot.h: void ns3::Gnuplot::GenerateOutput(std::ostream & os) const [member function]
cls.add_method('GenerateOutput',
'void',
[param('std::ostream &', 'os')],
is_const=True)
## gnuplot.h: void ns3::Gnuplot::SetExtra(std::string const & extra) [member function]
cls.add_method('SetExtra',
'void',
[param('std::string const &', 'extra')])
## gnuplot.h: void ns3::Gnuplot::SetLegend(std::string const & xLegend, std::string const & yLegend) [member function]
cls.add_method('SetLegend',
'void',
[param('std::string const &', 'xLegend'), param('std::string const &', 'yLegend')])
## gnuplot.h: void ns3::Gnuplot::SetTerminal(std::string const & terminal) [member function]
cls.add_method('SetTerminal',
'void',
[param('std::string const &', 'terminal')])
## gnuplot.h: void ns3::Gnuplot::SetTitle(std::string const & title) [member function]
cls.add_method('SetTitle',
'void',
[param('std::string const &', 'title')])
return
def register_Ns3GnuplotCollection_methods(root_module, cls):
## gnuplot.h: ns3::GnuplotCollection::GnuplotCollection(ns3::GnuplotCollection const & arg0) [copy constructor]
cls.add_constructor([param('ns3::GnuplotCollection const &', 'arg0')])
## gnuplot.h: ns3::GnuplotCollection::GnuplotCollection(std::string const & outputFilename) [constructor]
cls.add_constructor([param('std::string const &', 'outputFilename')])
## gnuplot.h: void ns3::GnuplotCollection::AddPlot(ns3::Gnuplot const & plot) [member function]
cls.add_method('AddPlot',
'void',
[param('ns3::Gnuplot const &', 'plot')])
## gnuplot.h: void ns3::GnuplotCollection::GenerateOutput(std::ostream & os) const [member function]
cls.add_method('GenerateOutput',
'void',
[param('std::ostream &', 'os')],
is_const=True)
## gnuplot.h: ns3::Gnuplot & ns3::GnuplotCollection::GetPlot(unsigned int id) [member function]
cls.add_method('GetPlot',
'ns3::Gnuplot &',
[param('unsigned int', 'id')])
## gnuplot.h: void ns3::GnuplotCollection::SetTerminal(std::string const & terminal) [member function]
cls.add_method('SetTerminal',
'void',
[param('std::string const &', 'terminal')])
return
def register_Ns3GnuplotDataset_methods(root_module, cls):
## gnuplot.h: ns3::GnuplotDataset::GnuplotDataset(ns3::GnuplotDataset const & original) [copy constructor]
cls.add_constructor([param('ns3::GnuplotDataset const &', 'original')])
## gnuplot.h: static void ns3::GnuplotDataset::SetDefaultExtra(std::string const & extra) [member function]
cls.add_method('SetDefaultExtra',
'void',
[param('std::string const &', 'extra')],
is_static=True)
## gnuplot.h: void ns3::GnuplotDataset::SetExtra(std::string const & extra) [member function]
cls.add_method('SetExtra',
'void',
[param('std::string const &', 'extra')])
## gnuplot.h: void ns3::GnuplotDataset::SetTitle(std::string const & title) [member function]
cls.add_method('SetTitle',
'void',
[param('std::string const &', 'title')])
## gnuplot.h: ns3::GnuplotDataset::GnuplotDataset(ns3::GnuplotDataset::Data * data) [constructor]
cls.add_constructor([param('ns3::GnuplotDataset::Data *', 'data')],
visibility='protected')
return
def register_Ns3Gnuplot2dDataset_methods(root_module, cls):
## gnuplot.h: ns3::Gnuplot2dDataset::Gnuplot2dDataset(ns3::Gnuplot2dDataset const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Gnuplot2dDataset const &', 'arg0')])
## gnuplot.h: ns3::Gnuplot2dDataset::Gnuplot2dDataset(std::string const & title="Untitled") [constructor]
cls.add_constructor([param('std::string const &', 'title', default_value='"Untitled"')])
## gnuplot.h: void ns3::Gnuplot2dDataset::Add(double x, double y) [member function]
cls.add_method('Add',
'void',
[param('double', 'x'), param('double', 'y')])
## gnuplot.h: void ns3::Gnuplot2dDataset::Add(double x, double y, double errorDelta) [member function]
cls.add_method('Add',
'void',
[param('double', 'x'), param('double', 'y'), param('double', 'errorDelta')])
## gnuplot.h: void ns3::Gnuplot2dDataset::Add(double x, double y, double minY, double maxY) [member function]
cls.add_method('Add',
'void',
[param('double', 'x'), param('double', 'y'), param('double', 'minY'), param('double', 'maxY')])
## gnuplot.h: void ns3::Gnuplot2dDataset::AddEmptyLine() [member function]
cls.add_method('AddEmptyLine',
'void',
[])
## gnuplot.h: static void ns3::Gnuplot2dDataset::SetDefaultErrorBars(ns3::Gnuplot2dDataset::ErrorBars errorBars) [member function]
cls.add_method('SetDefaultErrorBars',
'void',
[param('ns3::Gnuplot2dDataset::ErrorBars', 'errorBars')],
is_static=True)
## gnuplot.h: static void ns3::Gnuplot2dDataset::SetDefaultStyle(ns3::Gnuplot2dDataset::Style style) [member function]
cls.add_method('SetDefaultStyle',
'void',
[param('ns3::Gnuplot2dDataset::Style', 'style')],
is_static=True)
## gnuplot.h: void ns3::Gnuplot2dDataset::SetErrorBars(ns3::Gnuplot2dDataset::ErrorBars errorBars) [member function]
cls.add_method('SetErrorBars',
'void',
[param('ns3::Gnuplot2dDataset::ErrorBars', 'errorBars')])
## gnuplot.h: void ns3::Gnuplot2dDataset::SetStyle(ns3::Gnuplot2dDataset::Style style) [member function]
cls.add_method('SetStyle',
'void',
[param('ns3::Gnuplot2dDataset::Style', 'style')])
return
def register_Ns3Gnuplot2dFunction_methods(root_module, cls):
## gnuplot.h: ns3::Gnuplot2dFunction::Gnuplot2dFunction(ns3::Gnuplot2dFunction const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Gnuplot2dFunction const &', 'arg0')])
## gnuplot.h: ns3::Gnuplot2dFunction::Gnuplot2dFunction(std::string const & title="Untitled", std::string const & function="") [constructor]
cls.add_constructor([param('std::string const &', 'title', default_value='"Untitled"'), param('std::string const &', 'function', default_value='""')])
## gnuplot.h: void ns3::Gnuplot2dFunction::SetFunction(std::string const & function) [member function]
cls.add_method('SetFunction',
'void',
[param('std::string const &', 'function')])
return
def register_Ns3Gnuplot3dDataset_methods(root_module, cls):
## gnuplot.h: ns3::Gnuplot3dDataset::Gnuplot3dDataset(ns3::Gnuplot3dDataset const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Gnuplot3dDataset const &', 'arg0')])
## gnuplot.h: ns3::Gnuplot3dDataset::Gnuplot3dDataset(std::string const & title="Untitled") [constructor]
cls.add_constructor([param('std::string const &', 'title', default_value='"Untitled"')])
## gnuplot.h: void ns3::Gnuplot3dDataset::Add(double x, double y, double z) [member function]
cls.add_method('Add',
'void',
[param('double', 'x'), param('double', 'y'), param('double', 'z')])
## gnuplot.h: void ns3::Gnuplot3dDataset::AddEmptyLine() [member function]
cls.add_method('AddEmptyLine',
'void',
[])
## gnuplot.h: static void ns3::Gnuplot3dDataset::SetDefaultStyle(std::string const & style) [member function]
cls.add_method('SetDefaultStyle',
'void',
[param('std::string const &', 'style')],
is_static=True)
## gnuplot.h: void ns3::Gnuplot3dDataset::SetStyle(std::string const & style) [member function]
cls.add_method('SetStyle',
'void',
[param('std::string const &', 'style')])
return
def register_Ns3Gnuplot3dFunction_methods(root_module, cls):
## gnuplot.h: ns3::Gnuplot3dFunction::Gnuplot3dFunction(ns3::Gnuplot3dFunction const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Gnuplot3dFunction const &', 'arg0')])
## gnuplot.h: ns3::Gnuplot3dFunction::Gnuplot3dFunction(std::string const & title="Untitled", std::string const & function="") [constructor]
cls.add_constructor([param('std::string const &', 'title', default_value='"Untitled"'), param('std::string const &', 'function', default_value='""')])
## gnuplot.h: void ns3::Gnuplot3dFunction::SetFunction(std::string const & function) [member function]
cls.add_method('SetFunction',
'void',
[param('std::string const &', 'function')])
return
def register_functions(root_module):
module = root_module
register_functions_ns3_Config(module.get_submodule('Config'), root_module)
register_functions_ns3_FatalImpl(module.get_submodule('FatalImpl'), root_module)
register_functions_ns3_addressUtils(module.get_submodule('addressUtils'), root_module)
register_functions_ns3_aodv(module.get_submodule('aodv'), root_module)
register_functions_ns3_dot11s(module.get_submodule('dot11s'), root_module)
register_functions_ns3_dsdv(module.get_submodule('dsdv'), root_module)
register_functions_ns3_flame(module.get_submodule('flame'), root_module)
register_functions_ns3_internal(module.get_submodule('internal'), root_module)
register_functions_ns3_olsr(module.get_submodule('olsr'), root_module)
return
def register_functions_ns3_Config(module, root_module):
return
def register_functions_ns3_FatalImpl(module, root_module):
return
def register_functions_ns3_addressUtils(module, root_module):
return
def register_functions_ns3_aodv(module, root_module):
return
def register_functions_ns3_dot11s(module, root_module):
return
def register_functions_ns3_dsdv(module, root_module):
return
def register_functions_ns3_flame(module, root_module):
return
def register_functions_ns3_internal(module, root_module):
return
def register_functions_ns3_olsr(module, root_module):
return
|
joelagnel/ns-3
|
bindings/python/apidefs/gcc-LP64/ns3_module_tools.py
|
Python
|
gpl-2.0
| 21,544
|
# This script serves two purposes:
#
# - to demonstrate that an AWT Listener can be written in Jython, and
#
# - to find the width of an image you know is uncompressed, but do not know
# the dimensions.
#
# To use it, open the raw image with File>Import>Raw... choosing a width and
# height that should roughly be the correct one. Then start this script,
# which will open a dialog box with a slider, with which you can interactively
# test new widths -- the pixels in the image window will be updated accordingly.
from ij.gui import GenericDialog
from java.awt.event import AdjustmentListener
from java.lang import Math, System
image = WindowManager.getCurrentImage()
ip = image.getProcessor()
pixelsCopy = ip.getPixelsCopy()
pixels = ip.getPixels()
width = ip.getWidth()
height = ip.getHeight()
minWidth = int(Math.sqrt(len(pixels) / 16))
maxWidth = minWidth * 16
class Listener(AdjustmentListener):
def adjustmentValueChanged(self, event):
value = event.getSource().getValue()
rowstride = min(width, value)
for j in range(0, min(height, int(width * height / value))):
System.arraycopy(pixelsCopy, j * value,
pixels, j * width, rowstride)
image.updateAndDraw()
gd = GenericDialog("Width")
gd.addSlider("width", minWidth, maxWidth, ip.getHeight())
gd.getSliders().get(0).addAdjustmentListener(Listener())
gd.showDialog()
if gd.wasCanceled():
pixels[0:width * height] = pixelsCopy
image.updateAndDraw()
|
ferlandlab/BranchAnalysis2D-3D
|
Fiji.app/plugins/Examples/Find_Dimension_of_Raw_Image.py
|
Python
|
gpl-3.0
| 1,431
|
from __future__ import unicode_literals
from django.dispatch import Signal
local_site_user_added = Signal(providing_args=['user', 'localsite'])
|
chipx86/reviewboard
|
reviewboard/site/signals.py
|
Python
|
mit
| 147
|
# Generated by Django 3.0.5 on 2020-05-29 23:07
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('adventure', '0064_auto_20200529_0136'),
]
operations = [
migrations.AddField(
model_name='artifact',
name='data',
field=models.TextField(blank=True, help_text='Adventure-specific data for this artifact, e.g., elemental weapon, etc.Enter as a JSON object.', max_length=1000, null=True),
),
migrations.AddField(
model_name='monster',
name='data',
field=models.TextField(blank=True, help_text='Adventure-specific data for this monster, e.g., type of monster like vampire, undead, soldier, frost, etc. Data can be used in custom code. Enter as a JSON object.', max_length=1000, null=True),
),
migrations.AddField(
model_name='room',
name='data',
field=models.TextField(blank=True, help_text='Adventure-specific data for this room, e.g., room type or environment (road, cave, snow, etc.). Data can be used in custom code. Enter as a JSON object.', max_length=1000, null=True),
),
]
|
kdechant/eamon
|
adventure/migrations/0065_auto_20200529_1607.py
|
Python
|
mit
| 1,209
|
import numpy as np
from random import uniform
# set numpy error
np.seterr( over='raise' )
# read full file
data = open( 'pg_essays.txt', 'rt' ).read()
# dictionary conversion. We will use n-gram coding for all the unique characters in the input data
dict_chars = list( set( data ) )
data_size = len( data )
dict_size = len( dict_chars )
# character encoding
char_to_x = { c:i for i, c in enumerate( dict_chars ) }
x_to_char = { i:c for i, c in enumerate( dict_chars ) }
print( 'data size: {0}, dictionary size: {1}'.format( data_size, dict_size ) )
# net structure
hidden_nodes = 200
seq_len = 25
learning_rate = 1e-1
# model weights - drawn initially from guassian distribution
Whh = np.random.normal( 0., 0.01, (hidden_nodes, hidden_nodes) )
Wxh = np.random.normal( 0., 0.01, (hidden_nodes, dict_size ) )
Why = np.random.normal( 0., 0.01, (dict_size, hidden_nodes ) )
bh = np.zeros( (hidden_nodes, 1) )
by = np.zeros( (dict_size, 1 ) )
# calculate loss, model weights gradients and return hidden state for propagation
def rnn( inputs, targets, hprev ):
'''
input and target have len of seq_len
'''
xs, hraw, hs, ys, ps = {}, {}, {}, {}, {}
hs[ -1 ] = hprev
loss = 0.
# forward pass
for i in range( len( inputs ) ):
# encode input character
xs[i] = np.zeros( ( dict_size, 1 ) )
xs[i][inputs[i]] = 1.
hraw[i] = np.dot( Wxh, xs[i] ) + np.dot( Whh, hs[i-1] ) + bh
hs[i] = np.maximum( hraw[i], 0. )
ys[i] = np.dot( Why, hs[i] ) + by
# clip ys to avoid overflows. tanh does clipping via it's natural range
np.clip( ys[i], -100., 100., out=ys[i] )
# normalise probabilities
ps[i] = np.exp( ys[i] ) / np.sum( np.exp( ys[i] ) )
# softmax (cross-entropy loss)
loss += -np.log( ps[i][targets[i],0] )
dWxh, dWhh, dWhy = np.zeros_like( Wxh ), np.zeros_like( Whh ), np.zeros_like( Why )
dbh, dby = np.zeros_like( bh ), np.zeros_like( by )
dhnext = np.zeros_like( hs[0] )
# backward pass: start from the end
for i in reversed( range( len( inputs ) ) ):
dy = np.copy( ps[i] )
dy[targets[i]] -= 1.0 # backprop into y. In the target state the ps[ target[i] ] == 1.0
# recover weights
dWhy += np.dot( dy, hs[i].T )
dby += dy
dh = np.dot( Why.T, dy) + dhnext # backprop into h
dhtemp = np.zeros_like( dhnext )
dhtemp[ hraw[i] > 0. ] = 1.
dhraw = dhtemp * dh
dbh += dhraw
dWxh += np.dot( dhraw, xs[i].T )
dWhh += np.dot( dhraw, hs[i-1].T )
dhnext = np.dot( Whh.T, dhraw )
# clip to mitigate exploding gradients
for dparam in [ dWxh, dWhh, dWhy, dbh, dby ]:
np.clip( dparam, -5, 5, out=dparam )
return loss, dWxh, dWhh, dWhy, dbh, dby, hs[ len(inputs)-1 ]
# gradient validation
def gradCheck(inputs, targets, hprev):
global Wxh, Whh, Why, bh, by
num_checks, delta = 10, 1e-5
# calculate gradients using backprop
_, dWxh, dWhh, dWhy, dbh, dby, _ = rnn( inputs, targets, hprev )
for param, dparam, name in zip([Wxh, Whh, Why, bh, by], [dWxh, dWhh, dWhy, dbh, dby], ['Wxh', 'Whh', 'Why', 'bh', 'by']):
s0 = dparam.shape
s1 = param.shape
assert s0 == s1, 'Error dims dont match: %s and %s.' % (`s0`, `s1`)
print( name )
for i in xrange(num_checks):
ri = int( uniform(0,param.size) )
# evaluate cost at [x + delta] and [x - delta]
old_val = param.flat[ri]
param.flat[ri] = old_val + delta
cg0, _, _, _, _, _, _ = rnn( inputs, targets, hprev )
param.flat[ri] = old_val - delta
cg1, _, _, _, _, _, _ = rnn( inputs, targets, hprev )
param.flat[ri] = old_val # reset old value for this parameter
# fetch both numerical and analytic gradient
grad_analytic = dparam.flat[ri]
grad_numerical = (cg0 - cg1) / ( 2 * delta )
if grad_numerical + grad_analytic == 0.0:
rel_error = 0.0
else:
rel_error = abs( grad_analytic - grad_numerical ) / abs( grad_numerical + grad_analytic )
print( '%f, %f => %e ' % ( grad_numerical, grad_analytic, rel_error) )
# rel_error should be on order of 1e-7 or less
def sample( h, seed_ix, n ):
"""
sample a sequence of integers from the model
h is memory state, seed_ix is seed letter for first time step
"""
x = np.zeros( (dict_size, 1) )
x[ seed_ix ] = 1
ixes = []
for t in range( n ):
h = np.maximum( np.dot( Wxh, x ) + np.dot( Whh, h ) + bh, 0. )
y = np.dot( Why, h ) + by
p = np.exp( y ) / np.sum( np.exp( y ) )
ix = np.random.choice( range( dict_size ), p=p.ravel() )
x = np.zeros( (dict_size, 1) )
x[ix] = 1
ixes.append(ix)
return ixes
# run gradient validation
if False:
p = 0
inputs = [ char_to_x[ c ] for c in data[ p:p+seq_len ] ]
targets = [ char_to_x[ c ] for c in data[ p + 1:p+seq_len + 1 ] ]
hprev = np.zeros_like( bh )
print( data[ p:p+seq_len ], inputs )
print( data[ p + 1:p+seq_len + 1 ], targets )
gradCheck( inputs, targets, hprev )
# main program
n, p = 0, 0
# memory for Adagrad
mWxh = np.zeros_like( Wxh )
mWhh = np.zeros_like( Whh )
mWhy = np.zeros_like( Why )
mbh, mby = np.zeros_like(bh), np.zeros_like(by)
# loss at iteration 0
smooth_loss = -np.log(1.0/dict_size)*seq_len
while True:
# prepare inputs (we're sweeping from left to right in steps seq_length long)
if p+seq_len+1 >= len(data) or n == 0:
hprev = np.zeros( (hidden_nodes,1) ) # reset RNN memory
p = 0 # go from start of data
inputs = [char_to_x[ch] for ch in data[p:p+seq_len]]
targets = [char_to_x[ch] for ch in data[p+1:p+seq_len + 1]]
# sample from the model now and then
if n % 100 == 0:
sample_ix = sample(hprev, inputs[0], 200)
txt = ''.join(x_to_char[ix] for ix in sample_ix)
print( '----\n %s \n----' % txt )
# forward seq_length characters through the net and fetch gradient
loss, dWxh, dWhh, dWhy, dbh, dby, hprev = rnn( inputs, targets, hprev )
smooth_loss = smooth_loss * 0.999 + loss * 0.001
if n % 100 == 0: print 'iter %d, loss: %f' % (n, smooth_loss) # print progress
# perform parameter update with Adagrad
for param, dparam, mem in zip([Wxh, Whh, Why, bh, by],
[dWxh, dWhh, dWhy, dbh, dby],
[mWxh, mWhh, mWhy, mbh, mby]):
mem += dparam * dparam
param += -learning_rate * dparam / np.sqrt(mem + 1e-8) # adagrad update
p += seq_len # move data pointer
n += 1 # iteration counter
|
marcino239/min_rnn
|
min_rnn.py
|
Python
|
gpl-2.0
| 6,607
|
import json
import time
def parseSearches(searchesFile, begintimeframe = 0, endtimeframe = int(time.time())) :
searches = json.load(open(searchesFile, 'r'))
listOfsearches = []
for search in searches["event"]:
#a query can contain several timestap so special measurments need te be implemented in order to handle it
for timeStampDic in search["query"]['id'] :
timeStamp = int(timeStampDic["timestamp_usec"]) // 1000000
# time filtering
if timeStamp < endtimeframe and timeStamp > begintimeframe:
#timeStamp = timeStamp / 1000
queryText = str(search["query"]["query_text"])
searchInfo = timeStamp, 'Searches', \
"Query Text: " + queryText
listOfsearches.append(searchInfo)
return listOfsearches
|
LeanVel/TakeoutsTimelining
|
SearchesParser.py
|
Python
|
gpl-3.0
| 857
|
# Copyright 2014 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
from webob import exc
from neutron.openstack.common import uuidutils
from neutron.tests.unit import test_api_v2
from neutron.tests.unit import test_api_v2_extension
from midonet.neutron.extensions import license
_uuid = uuidutils.generate_uuid
_get_path = test_api_v2._get_path
class LicenseExtensionTestCase(test_api_v2_extension.ExtensionTestCase):
"""Test the endpoints for the license."""
fmt = "json"
def setUp(self):
super(LicenseExtensionTestCase, self).setUp()
plural_mappings = {'license': 'licenses'}
self._setUpExtension(
'midonet.neutron.extensions.license.LicensePluginBase',
None, license.RESOURCE_ATTRIBUTE_MAP,
license.License, '', plural_mappings=plural_mappings)
def test_license_list(self):
return_value = [{'id': _uuid(),
'description': 'whatevs'}]
instance = self.plugin.return_value
instance.get_licenses.return_value = return_value
res = self.api.get(_get_path('licenses', fmt=self.fmt))
self.assertEqual(exc.HTTPOk.code, res.status_int)
instance.get_licenses.assert_called_once_with(
mock.ANY, fields=mock.ANY, filters=mock.ANY)
res = self.deserialize(res)
self.assertIn('licenses', res)
self.assertEqual(1, len(res['licenses']))
def test_license_show(self):
license_id = _uuid()
return_value = {'id': _uuid(),
'description': 'whatevs'}
instance = self.plugin.return_value
instance.get_license.return_value = return_value
res = self.api.get(_get_path('licenses/%s' % license_id, fmt=self.fmt))
self.assertEqual(exc.HTTPOk.code, res.status_int)
instance.get_license.assert_called_once_with(
mock.ANY, unicode(license_id), fields=mock.ANY)
res = self.deserialize(res)
self.assertIn('license', res)
def test_license_delete(self):
license_id = _uuid()
instance = self.plugin.return_value
res = self.api.delete(_get_path('licenses', id=license_id))
instance.delete_license.assert_called_once_with(mock.ANY, license_id)
self.assertEqual(exc.HTTPNoContent.code, res.status_int)
class LicenseExtensionTestCaseXml(LicenseExtensionTestCase):
fmt = "xml"
|
midonet/python-neutron-plugin-midonet
|
midonet/neutron/tests/unit/test_extension_license.py
|
Python
|
apache-2.0
| 2,932
|
from django import forms
from django.contrib import admin
from . import models
from .models import Template, TemplateTranslation
@admin.register(models.Message)
class MessageAdmin(admin.ModelAdmin):
search_fields = ('subject', 'body', 'mail_to')
date_hierarchy = 'created_at'
list_display = (
'pk', 'mail_to', 'subject',
'created_at', 'sent_at',
'template',
)
readonly_fields = (
'pk', 'mail_to',
'created_at', 'sent_at',
'template',
'subject',
'body',
)
class TemplateAdminForm(forms.ModelForm):
class Meta:
model = Template
fields = '__all__'
widgets = {
'description': forms.Textarea(),
}
class TemplateTranslationAdminForm(forms.ModelForm):
class Meta:
model = TemplateTranslation
fields = '__all__'
widgets = {
'message_body': forms.Textarea(),
}
class TemplateTranslationInline(admin.StackedInline):
model = TemplateTranslation
form = TemplateTranslationAdminForm
extra = 1
@admin.register(Template)
class TemplateAdmin(admin.ModelAdmin):
inlines = [TemplateTranslationInline]
form = TemplateAdminForm
search_fields = ['name', 'description']
list_display = ['name', 'description', 'updated_at']
list_filter = ['updated_at']
ordering = ['name']
@admin.register(TemplateTranslation)
class TemplateTranslationAdmin(admin.ModelAdmin):
form = TemplateTranslationAdminForm
search_fields = ['name', 'description']
list_display = ['template', 'language', 'lang_code', 'message_subject', 'updated_at']
list_filter = ['template', 'lang_code', 'updated_at']
ordering = ['template']
|
dvhbru/dvhb-hybrid
|
dvhb_hybrid/mailer/admin.py
|
Python
|
mit
| 1,731
|
import os
import sys
from setuptools import setup, find_packages
if sys.argv[-1] == 'publish':
os.system('python setup.py sdist upload')
os.system('python setup.py bdist_wheel upload')
print("Now tag me :)")
print(" git tag -a {0} -m 'version {0}'".format(__import__('pynamodb').__version__))
print(" git push --tags")
sys.exit()
setup(
name='pynamodb',
version=__import__('pynamodb').__version__,
packages=find_packages(),
url='http://jlafon.io/pynamodb.html',
author='Jharrod LaFon',
author_email='jlafon@eyesopen.com',
description='A Pythonic Interface to DynamoDB',
long_description=open('README.rst').read(),
zip_safe=False,
license='MIT',
keywords='python dynamodb amazon',
install_requires=[
'Delorean',
'six',
'botocore',
],
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Programming Language :: Python',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'License :: OSI Approved :: MIT License',
],
)
|
mtsgrd/PynamoDB2
|
setup.py
|
Python
|
mit
| 1,186
|
from __future__ import unicode_literals
import mimetypes
import unittest
from os import path
from django.conf.urls.static import static
from django.http import FileResponse, HttpResponseNotModified
from django.test import SimpleTestCase, override_settings
from django.utils.http import http_date
from django.views.static import was_modified_since
from .. import urls
from ..urls import media_dir
@override_settings(DEBUG=True, ROOT_URLCONF='view_tests.urls')
class StaticTests(SimpleTestCase):
"""Tests django views in django/views/static.py"""
prefix = 'site_media'
def test_serve(self):
"The static view can serve static media"
media_files = ['file.txt', 'file.txt.gz']
for filename in media_files:
response = self.client.get('/%s/%s' % (self.prefix, filename))
response_content = b''.join(response)
file_path = path.join(media_dir, filename)
with open(file_path, 'rb') as fp:
self.assertEqual(fp.read(), response_content)
self.assertEqual(len(response_content), int(response['Content-Length']))
self.assertEqual(mimetypes.guess_type(file_path)[1], response.get('Content-Encoding', None))
def test_chunked(self):
"The static view should stream files in chunks to avoid large memory usage"
response = self.client.get('/%s/%s' % (self.prefix, 'long-line.txt'))
first_chunk = next(response.streaming_content)
self.assertEqual(len(first_chunk), FileResponse.block_size)
second_chunk = next(response.streaming_content)
response.close()
# strip() to prevent OS line endings from causing differences
self.assertEqual(len(second_chunk.strip()), 1449)
def test_unknown_mime_type(self):
response = self.client.get('/%s/file.unknown' % self.prefix)
self.assertEqual('application/octet-stream', response['Content-Type'])
response.close()
def test_copes_with_empty_path_component(self):
file_name = 'file.txt'
response = self.client.get('/%s//%s' % (self.prefix, file_name))
response_content = b''.join(response)
with open(path.join(media_dir, file_name), 'rb') as fp:
self.assertEqual(fp.read(), response_content)
def test_is_modified_since(self):
file_name = 'file.txt'
response = self.client.get(
'/%s/%s' % (self.prefix, file_name),
HTTP_IF_MODIFIED_SINCE='Thu, 1 Jan 1970 00:00:00 GMT'
)
response_content = b''.join(response)
with open(path.join(media_dir, file_name), 'rb') as fp:
self.assertEqual(fp.read(), response_content)
def test_not_modified_since(self):
file_name = 'file.txt'
response = self.client.get(
'/%s/%s' % (self.prefix, file_name),
HTTP_IF_MODIFIED_SINCE='Mon, 18 Jan 2038 05:14:07 GMT'
# This is 24h before max Unix time. Remember to fix Django and
# update this test well before 2038 :)
)
self.assertIsInstance(response, HttpResponseNotModified)
def test_invalid_if_modified_since(self):
"""Handle bogus If-Modified-Since values gracefully
Assume that a file is modified since an invalid timestamp as per RFC
2616, section 14.25.
"""
file_name = 'file.txt'
invalid_date = 'Mon, 28 May 999999999999 28:25:26 GMT'
response = self.client.get('/%s/%s' % (self.prefix, file_name),
HTTP_IF_MODIFIED_SINCE=invalid_date)
response_content = b''.join(response)
with open(path.join(media_dir, file_name), 'rb') as fp:
self.assertEqual(fp.read(), response_content)
self.assertEqual(len(response_content), int(response['Content-Length']))
def test_invalid_if_modified_since2(self):
"""Handle even more bogus If-Modified-Since values gracefully
Assume that a file is modified since an invalid timestamp as per RFC
2616, section 14.25.
"""
file_name = 'file.txt'
invalid_date = ': 1291108438, Wed, 20 Oct 2010 14:05:00 GMT'
response = self.client.get('/%s/%s' % (self.prefix, file_name),
HTTP_IF_MODIFIED_SINCE=invalid_date)
response_content = b''.join(response)
with open(path.join(media_dir, file_name), 'rb') as fp:
self.assertEqual(fp.read(), response_content)
self.assertEqual(len(response_content), int(response['Content-Length']))
def test_404(self):
response = self.client.get('/%s/non_existing_resource' % self.prefix)
self.assertEqual(404, response.status_code)
def test_index(self):
response = self.client.get('/%s/' % self.prefix)
self.assertContains(response, 'Index of /')
class StaticHelperTest(StaticTests):
"""
Test case to make sure the static URL pattern helper works as expected
"""
def setUp(self):
super(StaticHelperTest, self).setUp()
self._old_views_urlpatterns = urls.urlpatterns[:]
urls.urlpatterns += static('/media/', document_root=media_dir)
def tearDown(self):
super(StaticHelperTest, self).tearDown()
urls.urlpatterns = self._old_views_urlpatterns
class StaticUtilsTests(unittest.TestCase):
def test_was_modified_since_fp(self):
"""
A floating point mtime does not disturb was_modified_since (#18675).
"""
mtime = 1343416141.107817
header = http_date(mtime)
self.assertFalse(was_modified_since(header, mtime))
|
frishberg/django
|
tests/view_tests/tests/test_static.py
|
Python
|
bsd-3-clause
| 5,610
|
"""Check new
Revision ID: 92235b77ea53
Revises: 381fdb66ec27
Create Date: 2017-10-14 02:38:51.007307
"""
# revision identifiers, used by Alembic.
revision = '92235b77ea53'
down_revision = '381fdb66ec27'
from alembic import op
import sqlalchemy as sa
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_index('ix_ActiveTranslationMessages_category', table_name='ActiveTranslationMessages')
op.drop_index('ix_ActiveTranslationMessages_datetime', table_name='ActiveTranslationMessages')
op.drop_index('ix_ActiveTranslationMessages_fmt', table_name='ActiveTranslationMessages')
op.drop_index('ix_ActiveTranslationMessages_from_developer', table_name='ActiveTranslationMessages')
op.drop_index('ix_ActiveTranslationMessages_key', table_name='ActiveTranslationMessages')
op.drop_index('ix_ActiveTranslationMessages_namespace', table_name='ActiveTranslationMessages')
op.drop_index('ix_ActiveTranslationMessages_position', table_name='ActiveTranslationMessages')
op.drop_index('ix_ActiveTranslationMessages_same_tool', table_name='ActiveTranslationMessages')
op.drop_index('ix_ActiveTranslationMessages_taken_from_default', table_name='ActiveTranslationMessages')
op.drop_index('ix_ActiveTranslationMessages_tool_id', table_name='ActiveTranslationMessages')
op.drop_index('ix_Apps_composer', table_name='Apps')
op.drop_index('ix_Apps_creation_date', table_name='Apps')
op.drop_index('ix_Apps_last_access_date', table_name='Apps')
op.drop_index('ix_Apps_modification_date', table_name='Apps')
op.drop_index('ix_Apps_name', table_name='Apps')
op.drop_index('ix_Apps_owner_id', table_name='Apps')
op.drop_index('ix_Apps_unique_id', table_name='Apps')
op.drop_index('ix_GoLabOAuthUsers_display_name', table_name='GoLabOAuthUsers')
op.drop_index('ix_GoLabOAuthUsers_email', table_name='GoLabOAuthUsers')
op.drop_index('ix_Languages_language', table_name='Languages')
op.drop_index('ix_RepositoryApps_adaptable', table_name='RepositoryApps')
op.drop_index('ix_RepositoryApps_contents_hash', table_name='RepositoryApps')
op.drop_index('ix_RepositoryApps_downloaded_hash', table_name='RepositoryApps')
op.drop_index('ix_RepositoryApps_external_id', table_name='RepositoryApps')
op.drop_index('ix_RepositoryApps_failing', table_name='RepositoryApps')
op.drop_index('ix_RepositoryApps_failing_since', table_name='RepositoryApps')
op.drop_index('ix_RepositoryApps_last_change', table_name='RepositoryApps')
op.drop_index('ix_RepositoryApps_last_check', table_name='RepositoryApps')
op.drop_index('ix_RepositoryApps_last_download_change', table_name='RepositoryApps')
op.drop_index('ix_RepositoryApps_last_processed_contents_hash', table_name='RepositoryApps')
op.drop_index('ix_RepositoryApps_last_processed_downloaded_hash', table_name='RepositoryApps')
op.drop_index('ix_RepositoryApps_last_processed_time', table_name='RepositoryApps')
op.drop_index('ix_RepositoryApps_name', table_name='RepositoryApps')
op.drop_index('ix_RepositoryApps_repository', table_name='RepositoryApps')
op.drop_index('ix_RepositoryApps_translatable', table_name='RepositoryApps')
op.drop_index('ix_RepositoryApps_url', table_name='RepositoryApps')
op.drop_index('ix_TranslatedApps_url', table_name='TranslatedApps')
op.drop_index('ix_TranslationBundles_from_developer', table_name='TranslationBundles')
op.drop_index('ix_TranslationBundles_language', table_name='TranslationBundles')
op.drop_index('ix_TranslationBundles_target', table_name='TranslationBundles')
op.drop_index('ix_TranslationCurrentActiveUsers_last_check', table_name='TranslationCurrentActiveUsers')
op.drop_index('ix_TranslationExternalSuggestions_engine', table_name='TranslationExternalSuggestions')
op.drop_index('ix_TranslationExternalSuggestions_human_key', table_name='TranslationExternalSuggestions')
op.drop_index('ix_TranslationExternalSuggestions_human_key_hash', table_name='TranslationExternalSuggestions')
op.drop_index('ix_TranslationExternalSuggestions_language', table_name='TranslationExternalSuggestions')
op.drop_index('ix_TranslationExternalSuggestions_origin_language', table_name='TranslationExternalSuggestions')
op.drop_index('ix_TranslationKeySuggestions_key', table_name='TranslationKeySuggestions')
op.drop_index('ix_TranslationKeySuggestions_language', table_name='TranslationKeySuggestions')
op.drop_index('ix_TranslationKeySuggestions_target', table_name='TranslationKeySuggestions')
op.drop_index('ix_TranslationMessageHistory_category', table_name='TranslationMessageHistory')
op.drop_index('ix_TranslationMessageHistory_datetime', table_name='TranslationMessageHistory')
op.drop_index('ix_TranslationMessageHistory_fmt', table_name='TranslationMessageHistory')
op.drop_index('ix_TranslationMessageHistory_from_developer', table_name='TranslationMessageHistory')
op.drop_index('ix_TranslationMessageHistory_key', table_name='TranslationMessageHistory')
op.drop_index('ix_TranslationMessageHistory_namespace', table_name='TranslationMessageHistory')
op.drop_index('ix_TranslationMessageHistory_parent_translation_id', table_name='TranslationMessageHistory')
op.drop_index('ix_TranslationMessageHistory_position', table_name='TranslationMessageHistory')
op.drop_index('ix_TranslationMessageHistory_same_tool', table_name='TranslationMessageHistory')
op.drop_index('ix_TranslationMessageHistory_taken_from_default', table_name='TranslationMessageHistory')
op.drop_index('ix_TranslationMessageHistory_tool_id', table_name='TranslationMessageHistory')
op.drop_index('ix_TranslationNotificationRecipients_created', table_name='TranslationNotificationRecipients')
op.drop_index('ix_TranslationNotificationRecipients_email', table_name='TranslationNotificationRecipients')
op.drop_index('ix_TranslationSubscriptions_last_check', table_name='TranslationSubscriptions')
op.drop_index('ix_TranslationSubscriptions_mechanism', table_name='TranslationSubscriptions')
op.drop_index('ix_TranslationSyncLogs_end_datetime', table_name='TranslationSyncLogs')
op.drop_index('ix_TranslationSyncLogs_start_datetime', table_name='TranslationSyncLogs')
op.drop_index('ix_TranslationUrls_automatic', table_name='TranslationUrls')
op.drop_index('ix_TranslationUrls_url', table_name='TranslationUrls')
op.drop_index('ix_TranslationValueSuggestions_human_key', table_name='TranslationValueSuggestions')
op.drop_index('ix_TranslationValueSuggestions_language', table_name='TranslationValueSuggestions')
op.drop_index('ix_TranslationValueSuggestions_target', table_name='TranslationValueSuggestions')
op.drop_index('ix_Users_creation_date', table_name='Users')
op.drop_index('ix_Users_last_access_date', table_name='Users')
op.create_index(op.f('ix_ActiveTranslationMessages_category'), 'ActiveTranslationMessages', ['category'], unique=False)
op.create_index(op.f('ix_ActiveTranslationMessages_datetime'), 'ActiveTranslationMessages', ['datetime'], unique=False)
op.create_index(op.f('ix_ActiveTranslationMessages_fmt'), 'ActiveTranslationMessages', ['fmt'], unique=False)
op.create_index(op.f('ix_ActiveTranslationMessages_from_developer'), 'ActiveTranslationMessages', ['from_developer'], unique=False)
op.create_index(op.f('ix_ActiveTranslationMessages_key'), 'ActiveTranslationMessages', ['key'], unique=False)
op.create_index(op.f('ix_ActiveTranslationMessages_namespace'), 'ActiveTranslationMessages', ['namespace'], unique=False)
op.create_index(op.f('ix_ActiveTranslationMessages_position'), 'ActiveTranslationMessages', ['position'], unique=False)
op.create_index(op.f('ix_ActiveTranslationMessages_same_tool'), 'ActiveTranslationMessages', ['same_tool'], unique=False)
op.create_index(op.f('ix_ActiveTranslationMessages_taken_from_default'), 'ActiveTranslationMessages', ['taken_from_default'], unique=False)
op.create_index(op.f('ix_ActiveTranslationMessages_tool_id'), 'ActiveTranslationMessages', ['tool_id'], unique=False)
op.create_index(op.f('ix_Apps_composer'), 'Apps', ['composer'], unique=False)
op.create_index(op.f('ix_Apps_creation_date'), 'Apps', ['creation_date'], unique=False)
op.create_index(op.f('ix_Apps_last_access_date'), 'Apps', ['last_access_date'], unique=False)
op.create_index(op.f('ix_Apps_modification_date'), 'Apps', ['modification_date'], unique=False)
op.create_index(op.f('ix_Apps_name'), 'Apps', ['name'], unique=False)
op.create_index(op.f('ix_Apps_owner_id'), 'Apps', ['owner_id'], unique=False)
op.create_index(op.f('ix_Apps_unique_id'), 'Apps', ['unique_id'], unique=True)
op.create_index(op.f('ix_GoLabOAuthUsers_display_name'), 'GoLabOAuthUsers', ['display_name'], unique=False)
op.create_index(op.f('ix_GoLabOAuthUsers_email'), 'GoLabOAuthUsers', ['email'], unique=True)
op.create_index(op.f('ix_Languages_language'), 'Languages', ['language'], unique=True)
op.create_index(op.f('ix_RepositoryApps_adaptable'), 'RepositoryApps', ['adaptable'], unique=False)
op.create_index(op.f('ix_RepositoryApps_contents_hash'), 'RepositoryApps', ['contents_hash'], unique=False)
op.create_index(op.f('ix_RepositoryApps_downloaded_hash'), 'RepositoryApps', ['downloaded_hash'], unique=False)
op.create_index(op.f('ix_RepositoryApps_external_id'), 'RepositoryApps', ['external_id'], unique=False)
op.create_index(op.f('ix_RepositoryApps_failing_since'), 'RepositoryApps', ['failing_since'], unique=False)
op.create_index(op.f('ix_RepositoryApps_failing'), 'RepositoryApps', ['failing'], unique=False)
op.create_index(op.f('ix_RepositoryApps_last_change'), 'RepositoryApps', ['last_change'], unique=False)
op.create_index(op.f('ix_RepositoryApps_last_check'), 'RepositoryApps', ['last_check'], unique=False)
op.create_index(op.f('ix_RepositoryApps_last_download_change'), 'RepositoryApps', ['last_download_change'], unique=False)
op.create_index(op.f('ix_RepositoryApps_last_processed_contents_hash'), 'RepositoryApps', ['last_processed_contents_hash'], unique=False)
op.create_index(op.f('ix_RepositoryApps_last_processed_downloaded_hash'), 'RepositoryApps', ['last_processed_downloaded_hash'], unique=False)
op.create_index(op.f('ix_RepositoryApps_last_processed_time'), 'RepositoryApps', ['last_processed_time'], unique=False)
op.create_index(op.f('ix_RepositoryApps_name'), 'RepositoryApps', ['name'], unique=False)
op.create_index(op.f('ix_RepositoryApps_repository'), 'RepositoryApps', ['repository'], unique=False)
op.create_index(op.f('ix_RepositoryApps_translatable'), 'RepositoryApps', ['translatable'], unique=False)
op.create_index(op.f('ix_RepositoryApps_url'), 'RepositoryApps', ['url'], unique=False)
op.create_index(op.f('ix_TranslatedApps_url'), 'TranslatedApps', ['url'], unique=True)
op.create_index(op.f('ix_TranslationBundles_from_developer'), 'TranslationBundles', ['from_developer'], unique=False)
op.create_index(op.f('ix_TranslationBundles_language'), 'TranslationBundles', ['language'], unique=False)
op.create_index(op.f('ix_TranslationBundles_target'), 'TranslationBundles', ['target'], unique=False)
op.create_index(op.f('ix_TranslationCurrentActiveUsers_last_check'), 'TranslationCurrentActiveUsers', ['last_check'], unique=False)
op.create_index(op.f('ix_TranslationExternalSuggestions_engine'), 'TranslationExternalSuggestions', ['engine'], unique=False)
op.create_index(op.f('ix_TranslationExternalSuggestions_human_key_hash'), 'TranslationExternalSuggestions', ['human_key_hash'], unique=False)
op.create_index(op.f('ix_TranslationExternalSuggestions_human_key'), 'TranslationExternalSuggestions', ['human_key'], unique=False)
op.create_index(op.f('ix_TranslationExternalSuggestions_language'), 'TranslationExternalSuggestions', ['language'], unique=False)
op.create_index(op.f('ix_TranslationExternalSuggestions_origin_language'), 'TranslationExternalSuggestions', ['origin_language'], unique=False)
op.create_index(op.f('ix_TranslationKeySuggestions_key'), 'TranslationKeySuggestions', ['key'], unique=False)
op.create_index(op.f('ix_TranslationKeySuggestions_language'), 'TranslationKeySuggestions', ['language'], unique=False)
op.create_index(op.f('ix_TranslationKeySuggestions_target'), 'TranslationKeySuggestions', ['target'], unique=False)
op.create_index(op.f('ix_TranslationMessageHistory_category'), 'TranslationMessageHistory', ['category'], unique=False)
op.create_index(op.f('ix_TranslationMessageHistory_datetime'), 'TranslationMessageHistory', ['datetime'], unique=False)
op.create_index(op.f('ix_TranslationMessageHistory_fmt'), 'TranslationMessageHistory', ['fmt'], unique=False)
op.create_index(op.f('ix_TranslationMessageHistory_from_developer'), 'TranslationMessageHistory', ['from_developer'], unique=False)
op.create_index(op.f('ix_TranslationMessageHistory_key'), 'TranslationMessageHistory', ['key'], unique=False)
op.create_index(op.f('ix_TranslationMessageHistory_namespace'), 'TranslationMessageHistory', ['namespace'], unique=False)
op.create_index(op.f('ix_TranslationMessageHistory_parent_translation_id'), 'TranslationMessageHistory', ['parent_translation_id'], unique=False)
op.create_index(op.f('ix_TranslationMessageHistory_position'), 'TranslationMessageHistory', ['position'], unique=False)
op.create_index(op.f('ix_TranslationMessageHistory_same_tool'), 'TranslationMessageHistory', ['same_tool'], unique=False)
op.create_index(op.f('ix_TranslationMessageHistory_taken_from_default'), 'TranslationMessageHistory', ['taken_from_default'], unique=False)
op.create_index(op.f('ix_TranslationMessageHistory_tool_id'), 'TranslationMessageHistory', ['tool_id'], unique=False)
op.create_index(op.f('ix_TranslationNotificationRecipients_created'), 'TranslationNotificationRecipients', ['created'], unique=False)
op.create_index(op.f('ix_TranslationNotificationRecipients_email'), 'TranslationNotificationRecipients', ['email'], unique=True)
op.create_index(op.f('ix_TranslationSubscriptions_last_check'), 'TranslationSubscriptions', ['last_check'], unique=False)
op.create_index(op.f('ix_TranslationSubscriptions_mechanism'), 'TranslationSubscriptions', ['mechanism'], unique=False)
op.create_index(op.f('ix_TranslationSyncLogs_end_datetime'), 'TranslationSyncLogs', ['end_datetime'], unique=False)
op.create_index(op.f('ix_TranslationSyncLogs_start_datetime'), 'TranslationSyncLogs', ['start_datetime'], unique=False)
op.create_index(op.f('ix_TranslationUrls_automatic'), 'TranslationUrls', ['automatic'], unique=False)
op.create_index(op.f('ix_TranslationUrls_url'), 'TranslationUrls', ['url'], unique=True)
op.create_index(op.f('ix_TranslationValueSuggestions_human_key'), 'TranslationValueSuggestions', ['human_key'], unique=False)
op.create_index(op.f('ix_TranslationValueSuggestions_language'), 'TranslationValueSuggestions', ['language'], unique=False)
op.create_index(op.f('ix_TranslationValueSuggestions_target'), 'TranslationValueSuggestions', ['target'], unique=False)
op.create_index(op.f('ix_Users_creation_date'), 'Users', ['creation_date'], unique=False)
op.create_index(op.f('ix_Users_last_access_date'), 'Users', ['last_access_date'], unique=False)
# op.create_unique_constraint(None, 'ActiveTranslationMessages', ['bundle_id', 'key'])
# op.create_unique_constraint(None, 'RepositoryApp2languages', ['repository_app_id', 'language_id'])
# op.create_unique_constraint(None, 'TranslationBundles', ['translation_url_id', 'language', 'target'])
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_index(op.f('ix_Users_last_access_date'), table_name='Users')
op.drop_index(op.f('ix_Users_creation_date'), table_name='Users')
op.drop_index(op.f('ix_TranslationValueSuggestions_target'), table_name='TranslationValueSuggestions')
op.drop_index(op.f('ix_TranslationValueSuggestions_language'), table_name='TranslationValueSuggestions')
op.drop_index(op.f('ix_TranslationValueSuggestions_human_key'), table_name='TranslationValueSuggestions')
op.drop_index(op.f('ix_TranslationUrls_url'), table_name='TranslationUrls')
op.drop_index(op.f('ix_TranslationUrls_automatic'), table_name='TranslationUrls')
op.drop_index(op.f('ix_TranslationSyncLogs_start_datetime'), table_name='TranslationSyncLogs')
op.drop_index(op.f('ix_TranslationSyncLogs_end_datetime'), table_name='TranslationSyncLogs')
op.drop_index(op.f('ix_TranslationSubscriptions_mechanism'), table_name='TranslationSubscriptions')
op.drop_index(op.f('ix_TranslationSubscriptions_last_check'), table_name='TranslationSubscriptions')
op.drop_index(op.f('ix_TranslationNotificationRecipients_email'), table_name='TranslationNotificationRecipients')
op.drop_index(op.f('ix_TranslationNotificationRecipients_created'), table_name='TranslationNotificationRecipients')
op.drop_index(op.f('ix_TranslationMessageHistory_tool_id'), table_name='TranslationMessageHistory')
op.drop_index(op.f('ix_TranslationMessageHistory_taken_from_default'), table_name='TranslationMessageHistory')
op.drop_index(op.f('ix_TranslationMessageHistory_same_tool'), table_name='TranslationMessageHistory')
op.drop_index(op.f('ix_TranslationMessageHistory_position'), table_name='TranslationMessageHistory')
op.drop_index(op.f('ix_TranslationMessageHistory_parent_translation_id'), table_name='TranslationMessageHistory')
op.drop_index(op.f('ix_TranslationMessageHistory_namespace'), table_name='TranslationMessageHistory')
op.drop_index(op.f('ix_TranslationMessageHistory_key'), table_name='TranslationMessageHistory')
op.drop_index(op.f('ix_TranslationMessageHistory_from_developer'), table_name='TranslationMessageHistory')
op.drop_index(op.f('ix_TranslationMessageHistory_fmt'), table_name='TranslationMessageHistory')
op.drop_index(op.f('ix_TranslationMessageHistory_datetime'), table_name='TranslationMessageHistory')
op.drop_index(op.f('ix_TranslationMessageHistory_category'), table_name='TranslationMessageHistory')
op.drop_index(op.f('ix_TranslationKeySuggestions_target'), table_name='TranslationKeySuggestions')
op.drop_index(op.f('ix_TranslationKeySuggestions_language'), table_name='TranslationKeySuggestions')
op.drop_index(op.f('ix_TranslationKeySuggestions_key'), table_name='TranslationKeySuggestions')
op.drop_index(op.f('ix_TranslationExternalSuggestions_origin_language'), table_name='TranslationExternalSuggestions')
op.drop_index(op.f('ix_TranslationExternalSuggestions_language'), table_name='TranslationExternalSuggestions')
op.drop_index(op.f('ix_TranslationExternalSuggestions_human_key'), table_name='TranslationExternalSuggestions')
op.drop_index(op.f('ix_TranslationExternalSuggestions_human_key_hash'), table_name='TranslationExternalSuggestions')
op.drop_index(op.f('ix_TranslationExternalSuggestions_engine'), table_name='TranslationExternalSuggestions')
op.drop_index(op.f('ix_TranslationBundles_target'), table_name='TranslationBundles')
op.drop_index(op.f('ix_TranslationBundles_language'), table_name='TranslationBundles')
op.drop_index(op.f('ix_TranslationBundles_from_developer'), table_name='TranslationBundles')
op.drop_index(op.f('ix_TranslationCurrentActiveUsers_last_check'), table_name='TranslationCurrentActiveUsers')
# op.drop_constraint(None, 'TranslationBundles', type_='unique')
op.drop_index(op.f('ix_RepositoryApps_url'), table_name='RepositoryApps')
op.drop_index(op.f('ix_RepositoryApps_translatable'), table_name='RepositoryApps')
op.drop_index(op.f('ix_RepositoryApps_repository'), table_name='RepositoryApps')
op.drop_index(op.f('ix_RepositoryApps_name'), table_name='RepositoryApps')
op.drop_index(op.f('ix_RepositoryApps_last_processed_time'), table_name='RepositoryApps')
op.drop_index(op.f('ix_RepositoryApps_last_processed_downloaded_hash'), table_name='RepositoryApps')
op.drop_index(op.f('ix_RepositoryApps_last_processed_contents_hash'), table_name='RepositoryApps')
op.drop_index(op.f('ix_RepositoryApps_last_download_change'), table_name='RepositoryApps')
op.drop_index(op.f('ix_RepositoryApps_last_check'), table_name='RepositoryApps')
op.drop_index(op.f('ix_RepositoryApps_last_change'), table_name='RepositoryApps')
op.drop_index(op.f('ix_RepositoryApps_failing'), table_name='RepositoryApps')
op.drop_index(op.f('ix_RepositoryApps_failing_since'), table_name='RepositoryApps')
op.drop_index(op.f('ix_RepositoryApps_external_id'), table_name='RepositoryApps')
op.drop_index(op.f('ix_RepositoryApps_downloaded_hash'), table_name='RepositoryApps')
op.drop_index(op.f('ix_RepositoryApps_contents_hash'), table_name='RepositoryApps')
op.drop_index(op.f('ix_RepositoryApps_adaptable'), table_name='RepositoryApps')
# op.drop_constraint(None, 'RepositoryApp2languages', type_='unique')
op.drop_index(op.f('ix_TranslatedApps_url'), table_name='TranslatedApps')
op.drop_index(op.f('ix_Languages_language'), table_name='Languages')
op.drop_index(op.f('ix_GoLabOAuthUsers_email'), table_name='GoLabOAuthUsers')
op.drop_index(op.f('ix_GoLabOAuthUsers_display_name'), table_name='GoLabOAuthUsers')
op.drop_index(op.f('ix_Apps_unique_id'), table_name='Apps')
op.drop_index(op.f('ix_Apps_owner_id'), table_name='Apps')
op.drop_index(op.f('ix_Apps_name'), table_name='Apps')
op.drop_index(op.f('ix_Apps_modification_date'), table_name='Apps')
op.drop_index(op.f('ix_Apps_last_access_date'), table_name='Apps')
op.drop_index(op.f('ix_Apps_creation_date'), table_name='Apps')
op.drop_index(op.f('ix_Apps_composer'), table_name='Apps')
# op.drop_constraint(None, 'ActiveTranslationMessages', type_='unique')
op.drop_index(op.f('ix_ActiveTranslationMessages_tool_id'), table_name='ActiveTranslationMessages')
op.drop_index(op.f('ix_ActiveTranslationMessages_taken_from_default'), table_name='ActiveTranslationMessages')
op.drop_index(op.f('ix_ActiveTranslationMessages_same_tool'), table_name='ActiveTranslationMessages')
op.drop_index(op.f('ix_ActiveTranslationMessages_position'), table_name='ActiveTranslationMessages')
op.drop_index(op.f('ix_ActiveTranslationMessages_namespace'), table_name='ActiveTranslationMessages')
op.drop_index(op.f('ix_ActiveTranslationMessages_key'), table_name='ActiveTranslationMessages')
op.drop_index(op.f('ix_ActiveTranslationMessages_from_developer'), table_name='ActiveTranslationMessages')
op.drop_index(op.f('ix_ActiveTranslationMessages_fmt'), table_name='ActiveTranslationMessages')
op.drop_index(op.f('ix_ActiveTranslationMessages_datetime'), table_name='ActiveTranslationMessages')
op.drop_index(op.f('ix_ActiveTranslationMessages_category'), table_name='ActiveTranslationMessages')
op.create_index('ix_Users_last_access_date', 'Users', ['last_access_date'], unique=False)
op.create_index('ix_Users_creation_date', 'Users', ['creation_date'], unique=False)
op.create_index('ix_TranslationValueSuggestions_target', 'TranslationValueSuggestions', ['target'], unique=False)
op.create_index('ix_TranslationValueSuggestions_language', 'TranslationValueSuggestions', ['language'], unique=False)
op.create_index('ix_TranslationValueSuggestions_human_key', 'TranslationValueSuggestions', ['human_key'], unique=False)
op.create_index('ix_TranslationUrls_url', 'TranslationUrls', ['url'], unique=True)
op.create_index('ix_TranslationUrls_automatic', 'TranslationUrls', ['automatic'], unique=False)
op.create_index('ix_TranslationSyncLogs_start_datetime', 'TranslationSyncLogs', ['start_datetime'], unique=False)
op.create_index('ix_TranslationSyncLogs_end_datetime', 'TranslationSyncLogs', ['end_datetime'], unique=False)
op.create_index('ix_TranslationSubscriptions_mechanism', 'TranslationSubscriptions', ['mechanism'], unique=False)
op.create_index('ix_TranslationSubscriptions_last_check', 'TranslationSubscriptions', ['last_check'], unique=False)
op.create_index('ix_TranslationNotificationRecipients_email', 'TranslationNotificationRecipients', ['email'], unique=True)
op.create_index('ix_TranslationNotificationRecipients_created', 'TranslationNotificationRecipients', ['created'], unique=False)
op.create_index('ix_TranslationMessageHistory_tool_id', 'TranslationMessageHistory', ['tool_id'], unique=False)
op.create_index('ix_TranslationMessageHistory_taken_from_default', 'TranslationMessageHistory', ['taken_from_default'], unique=False)
op.create_index('ix_TranslationMessageHistory_same_tool', 'TranslationMessageHistory', ['same_tool'], unique=False)
op.create_index('ix_TranslationMessageHistory_position', 'TranslationMessageHistory', ['position'], unique=False)
op.create_index('ix_TranslationMessageHistory_parent_translation_id', 'TranslationMessageHistory', ['parent_translation_id'], unique=False)
op.create_index('ix_TranslationMessageHistory_namespace', 'TranslationMessageHistory', ['namespace'], unique=False)
op.create_index('ix_TranslationMessageHistory_key', 'TranslationMessageHistory', ['key'], unique=False)
op.create_index('ix_TranslationMessageHistory_from_developer', 'TranslationMessageHistory', ['from_developer'], unique=False)
op.create_index('ix_TranslationMessageHistory_fmt', 'TranslationMessageHistory', ['fmt'], unique=False)
op.create_index('ix_TranslationMessageHistory_datetime', 'TranslationMessageHistory', ['datetime'], unique=False)
op.create_index('ix_TranslationMessageHistory_category', 'TranslationMessageHistory', ['category'], unique=False)
op.create_index('ix_TranslationKeySuggestions_target', 'TranslationKeySuggestions', ['target'], unique=False)
op.create_index('ix_TranslationKeySuggestions_language', 'TranslationKeySuggestions', ['language'], unique=False)
op.create_index('ix_TranslationKeySuggestions_key', 'TranslationKeySuggestions', ['key'], unique=False)
op.create_index('ix_TranslationExternalSuggestions_origin_language', 'TranslationExternalSuggestions', ['origin_language'], unique=False)
op.create_index('ix_TranslationExternalSuggestions_language', 'TranslationExternalSuggestions', ['language'], unique=False)
op.create_index('ix_TranslationExternalSuggestions_human_key_hash', 'TranslationExternalSuggestions', ['human_key_hash'], unique=False)
op.create_index('ix_TranslationExternalSuggestions_human_key', 'TranslationExternalSuggestions', ['human_key'], unique=False)
op.create_index('ix_TranslationExternalSuggestions_engine', 'TranslationExternalSuggestions', ['engine'], unique=False)
op.create_index('ix_TranslationCurrentActiveUsers_last_check', 'TranslationCurrentActiveUsers', ['last_check'], unique=False)
op.create_index('ix_TranslationBundles_target', 'TranslationBundles', ['target'], unique=False)
op.create_index('ix_TranslationBundles_language', 'TranslationBundles', ['language'], unique=False)
op.create_index('ix_TranslationBundles_from_developer', 'TranslationBundles', ['from_developer'], unique=False)
op.create_index('ix_TranslatedApps_url', 'TranslatedApps', ['url'], unique=True)
op.create_index('ix_RepositoryApps_url', 'RepositoryApps', ['url'], unique=False)
op.create_index('ix_RepositoryApps_translatable', 'RepositoryApps', ['translatable'], unique=False)
op.create_index('ix_RepositoryApps_repository', 'RepositoryApps', ['repository'], unique=False)
op.create_index('ix_RepositoryApps_name', 'RepositoryApps', ['name'], unique=False)
op.create_index('ix_RepositoryApps_last_processed_time', 'RepositoryApps', ['last_processed_time'], unique=False)
op.create_index('ix_RepositoryApps_last_processed_downloaded_hash', 'RepositoryApps', ['last_processed_downloaded_hash'], unique=False)
op.create_index('ix_RepositoryApps_last_processed_contents_hash', 'RepositoryApps', ['last_processed_contents_hash'], unique=False)
op.create_index('ix_RepositoryApps_last_download_change', 'RepositoryApps', ['last_download_change'], unique=False)
op.create_index('ix_RepositoryApps_last_check', 'RepositoryApps', ['last_check'], unique=False)
op.create_index('ix_RepositoryApps_last_change', 'RepositoryApps', ['last_change'], unique=False)
op.create_index('ix_RepositoryApps_failing_since', 'RepositoryApps', ['failing_since'], unique=False)
op.create_index('ix_RepositoryApps_failing', 'RepositoryApps', ['failing'], unique=False)
op.create_index('ix_RepositoryApps_external_id', 'RepositoryApps', ['external_id'], unique=False)
op.create_index('ix_RepositoryApps_downloaded_hash', 'RepositoryApps', ['downloaded_hash'], unique=False)
op.create_index('ix_RepositoryApps_contents_hash', 'RepositoryApps', ['contents_hash'], unique=False)
op.create_index('ix_RepositoryApps_adaptable', 'RepositoryApps', ['adaptable'], unique=False)
op.create_index('ix_Languages_language', 'Languages', ['language'], unique=True)
op.create_index('ix_GoLabOAuthUsers_email', 'GoLabOAuthUsers', ['email'], unique=True)
op.create_index('ix_GoLabOAuthUsers_display_name', 'GoLabOAuthUsers', ['display_name'], unique=False)
op.create_index('ix_Apps_unique_id', 'Apps', ['unique_id'], unique=True)
op.create_index('ix_Apps_owner_id', 'Apps', ['owner_id'], unique=False)
op.create_index('ix_Apps_name', 'Apps', ['name'], unique=False)
op.create_index('ix_Apps_modification_date', 'Apps', ['modification_date'], unique=False)
op.create_index('ix_Apps_last_access_date', 'Apps', ['last_access_date'], unique=False)
op.create_index('ix_Apps_creation_date', 'Apps', ['creation_date'], unique=False)
op.create_index('ix_Apps_composer', 'Apps', ['composer'], unique=False)
op.create_index('ix_ActiveTranslationMessages_tool_id', 'ActiveTranslationMessages', ['tool_id'], unique=False)
op.create_index('ix_ActiveTranslationMessages_taken_from_default', 'ActiveTranslationMessages', ['taken_from_default'], unique=False)
op.create_index('ix_ActiveTranslationMessages_same_tool', 'ActiveTranslationMessages', ['same_tool'], unique=False)
op.create_index('ix_ActiveTranslationMessages_position', 'ActiveTranslationMessages', ['position'], unique=False)
op.create_index('ix_ActiveTranslationMessages_namespace', 'ActiveTranslationMessages', ['namespace'], unique=False)
op.create_index('ix_ActiveTranslationMessages_key', 'ActiveTranslationMessages', ['key'], unique=False)
op.create_index('ix_ActiveTranslationMessages_from_developer', 'ActiveTranslationMessages', ['from_developer'], unique=False)
op.create_index('ix_ActiveTranslationMessages_fmt', 'ActiveTranslationMessages', ['fmt'], unique=False)
op.create_index('ix_ActiveTranslationMessages_datetime', 'ActiveTranslationMessages', ['datetime'], unique=False)
op.create_index('ix_ActiveTranslationMessages_category', 'ActiveTranslationMessages', ['category'], unique=False)
# ### end Alembic commands ###
|
porduna/appcomposer
|
alembic/versions/92235b77ea53_check_new.py
|
Python
|
bsd-2-clause
| 31,211
|
#!/opt/epd/bin/python
# -*- coding: utf8 -*-
from setuptools import setup, find_packages
long_description = '''
This python module is a set of routines designed to ease the way of making SIESTA calculations,
adding them to PBS queue and interpreting the results.
'''
setup(name='SHS',
version='0.4',
description='Siesta help scripts',
long_description=long_description,
author='Andrey Sobolev',
author_email='andrey@physics.susu.ac.ru',
url='http://asobolev.ddns.info',
packages=find_packages('.'),
package_data={'shs': ['PBS/*.pbs', 'slurm/*'],
'shs.gui': ['data-export-icon.png', ],
},
scripts=['bin/plotmde',
'bin/plotrdf',
'bin/plotmsd',
'bin/gui.py',
'bin/setup_input.py',
],
install_requires=["numpy>=1.8.1",
"scipy>=0.14.0",
"matplotlib>=1.3.1",
"wxPython>=2.8.10.0"],
license='MIT'
)
|
ansobolev/shs
|
setup.py
|
Python
|
mit
| 1,055
|
from time import sleep
import sys
class bcolors:
HEADER = '\033[95m'
OKBLUE = '\033[94m'
OKGREEN = '\033[92m'
WARNING = '\033[93m'
FAIL = '\033[91m'
ENDC = '\033[0m'
BOLD = '\033[1m'
UNDERLINE = '\033[4m'
BgGreen = '\033[42m\033[30m'
sys.stdout.write(bcolors.BgGreen+' '+bcolors.ENDC)
print " "
#sys.stdout.write(REVERSE + CYAN)
#print "From now on change to cyan, in reverse mode"
#print "NOTE: 'CYAN + REVERSE' wouldn't work"
#
#blank = bcolors.WARNING + " " + bcolors.ENDC
#
#for i in range(21):
# sys.stdout.write('\r')
# sys.stdout.write("[%-20s] %d%%" % (blank*i, 5*i))
# sys.stdout.flush()
# sleep(0.25)
#
#print
|
cpausmit/Kraken
|
bin/progress.py
|
Python
|
mit
| 672
|
#!/usr/bin/env python2.7
# Copyright 2013 Virantha Ekanayake All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Run Tesseract to generate hocr file
"""
import os, sys
import logging
import subprocess
import glob
from subprocess import CalledProcessError
from multiprocessing import Pool
from pypdfocr_interrupts import init_worker
def error(text):
print("ERROR: %s" % text)
sys.exit(-1)
# Ugly hack to pass in object method to the multiprocessing library
# From http://www.rueckstiess.net/research/snippets/show/ca1d7d90
# Basically gets passed in a pair of (self, arg), and calls the method
def unwrap_self(arg, **kwarg):
return PyTesseract.make_hocr_from_pnm(*arg, **kwarg)
class PyTesseract(object):
"""Class to wrap all the tesseract calls"""
def __init__(self, config):
"""
Detect windows tesseract location.
"""
self.lang = 'eng'
self.required = "3.02.02"
self.threads = config.get('threads',4)
if "binary" in config: # Override location of binary
binary = config['binary']
if os.name == 'nt':
binary = '"%s"' % binary
binary = binary.replace("\\", "\\\\")
logging.info("Setting location for tesseracdt executable to %s" % (binary))
else:
if str(os.name) == 'nt':
# Explicit str here to get around some MagicMock stuff for testing that I don't quite understand
binary = '"c:\\Program Files (x86)\\Tesseract-OCR\\tesseract.exe"'
else:
binary = "tesseract"
self.binary = binary
self.msgs = {
'TS_MISSING': """
Could not execute %s
Please make sure you have Tesseract installed correctly
""" % self.binary,
'TS_VERSION':'Tesseract version is too old',
'TS_img_MISSING':'Cannot find specified tiff file',
'TS_FAILED': 'Tesseract-OCR execution failed!',
}
def _is_version_uptodate(self):
"""
Make sure the version is current
"""
logging.info("Checking tesseract version")
cmd = '%s -v' % (self.binary)
logging.info(cmd)
try:
ret_output = subprocess.check_output(cmd, shell=True, stderr=subprocess.STDOUT)
except CalledProcessError:
# Could not run tesseract
error(self.msgs['TS_MISSING'])
ver_str = '0.0.0'
for line in ret_output.splitlines():
if 'tesseract' in line:
ver_str = line.split(' ')[1]
if ver_str.endswith('dev'): # Fix for version strings that end in 'dev'
ver_str = ver_str[:-3]
# Iterate through the version dots
ver = [int(x) for x in ver_str.split('.')]
req = [int(x) for x in self.required.split('.')]
# Aargh, in windows 3.02.02 is reported as version 3.02
# SFKM
if str(os.name) == 'nt':
req = req[:2]
version_good = False
for i,num in enumerate(req):
if len(ver) < i+1:
# This minor version number is not present in tesseract, so it must be
# lower than required. (3.02 < 3.02.01)
break
if ver[i]==num and len(ver) == i+1 and len(ver)==len(req):
# 3.02.02 == 3.02.02
version_good = True
continue
if ver[i]>num:
# 4.0 > 3.02.02
# 3.03.02 > 3.02.02
version_good = True
break
if ver[i]<num:
# 3.01.02 < 3.02.02
break
return version_good, ver_str
def _warn(self, msg): # pragma: no cover
print("WARNING: %s" % msg)
def make_hocr_from_pnms(self, fns):
uptodate,ver = self._is_version_uptodate()
if not uptodate:
error(self.msgs['TS_VERSION']+ " (found %s, required %s)" % (ver, self.required))
# Glob it
#fns = glob.glob(img_filename)
logging.debug("Making pool for tesseract")
pool = Pool(processes=self.threads, initializer=init_worker)
try:
hocr_filenames = pool.map(unwrap_self, zip([self]*len(fns), fns))
pool.close()
except KeyboardInterrupt or Exception:
print("Caught keyboard interrupt... terminating")
pool.terminate()
raise
finally:
pool.join()
return zip(fns,hocr_filenames)
def make_hocr_from_pnm(self, img_filename):
basename,filext = os.path.splitext(img_filename)
hocr_filename = "%s.html" % basename
if not os.path.exists(img_filename):
error(self.msgs['TS_img_MISSING'] + " %s" % (img_filename))
logging.info("Running OCR on %s to create %s.html" % (img_filename, basename))
cmd = '%s "%s" "%s" -psm 1 -c hocr_font_info=1 -l %s hocr' % (self.binary, img_filename, basename, self.lang)
logging.info(cmd)
try:
ret_output = subprocess.check_output(cmd, shell=True, stderr=subprocess.STDOUT)
except subprocess.CalledProcessError as e:
# Could not run tesseract
print e.output
self._warn (self.msgs['TS_FAILED'])
if os.path.isfile(hocr_filename):
# Output format is html for old versions of tesseract
logging.info("Created %s.html" % basename)
return hocr_filename
else:
# Try changing extension to .hocr for tesseract 3.03 and higher
hocr_filename = "%s.hocr" % basename
if os.path.isfile(hocr_filename):
logging.info("Created %s.hocr" % basename)
return hocr_filename
else:
error(self.msgs['TS_FAILED'])
|
virantha/pypdfocr
|
pypdfocr/pypdfocr_tesseract.py
|
Python
|
apache-2.0
| 6,463
|
s = 'hello world'
print s[0] # print first element, h
print s[1] # print e
print s[-1] # will print the last character
|
wavicles/pycode-browser
|
Code/PythonBook/chap2/string.py
|
Python
|
gpl-3.0
| 153
|
# Originally from:
# github.com/pytorch/tutorials/blob/60d6ef365e36f3ba82c2b61bf32cc40ac4e86c7b/custom_directives.py # noqa
from docutils.parsers.rst import Directive, directives
from docutils.statemachine import StringList
from docutils import nodes
import os
import sphinx_gallery
try:
FileNotFoundError
except NameError:
FileNotFoundError = IOError
GALLERY_TEMPLATE = """
.. raw:: html
<div class="sphx-glr-thumbcontainer" tooltip="{tooltip}">
.. only:: html
.. figure:: {thumbnail}
{description}
.. raw:: html
</div>
"""
class CustomGalleryItemDirective(Directive):
"""Create a sphinx gallery style thumbnail.
tooltip and figure are self explanatory. Description could be a link to
a document like in below example.
Example usage:
.. customgalleryitem::
:tooltip: I am writing this tutorial to focus specifically on NLP.
:figure: /_static/img/thumbnails/babel.jpg
:description: :doc:`/beginner/deep_learning_nlp_tutorial`
If figure is specified, a thumbnail will be made out of it and stored in
_static/thumbs. Therefore, consider _static/thumbs as a "built" directory.
"""
required_arguments = 0
optional_arguments = 0
final_argument_whitespace = True
option_spec = {
"tooltip": directives.unchanged,
"figure": directives.unchanged,
"description": directives.unchanged
}
has_content = False
add_index = False
def run(self):
# Cutoff the `tooltip` after 195 chars.
if "tooltip" in self.options:
tooltip = self.options["tooltip"]
if len(self.options["tooltip"]) > 195:
tooltip = tooltip[:195] + "..."
else:
raise ValueError("Need to provide :tooltip: under "
"`.. customgalleryitem::`.")
# Generate `thumbnail` used in the gallery.
if "figure" in self.options:
env = self.state.document.settings.env
rel_figname, figname = env.relfn2path(self.options["figure"])
thumbnail = os.path.join("_static/thumbs/",
os.path.basename(figname))
os.makedirs("_static/thumbs", exist_ok=True)
sphinx_gallery.gen_rst.scale_image(figname, thumbnail, 400, 280)
else:
thumbnail = "/_static/img/thumbnails/default.png"
if "description" in self.options:
description = self.options["description"]
else:
raise ValueError("Need to provide :description: under "
"`customgalleryitem::`.")
thumbnail_rst = GALLERY_TEMPLATE.format(
tooltip=tooltip, thumbnail=thumbnail, description=description)
thumbnail = StringList(thumbnail_rst.split("\n"))
thumb = nodes.paragraph()
self.state.nested_parse(thumbnail, self.content_offset, thumb)
return [thumb]
|
ujvl/ray-ng
|
doc/source/custom_directives.py
|
Python
|
apache-2.0
| 2,945
|
from httplib import HTTPConnection, _CS_IDLE
import urlparse
def pipeline(domain,pages,max_out_bound=4,debuglevel=0):
pagecount = len(pages)
conn = HTTPConnection(domain)
conn.set_debuglevel(debuglevel)
respobjs = [None]*pagecount
finished = [False]*pagecount
data = [None]*pagecount
headers = {'Host':domain,'Content-Length':0,'Connection':'Keep-Alive'}
while not all(finished):
# Send
out_bound = 0
for i,page in enumerate(pages):
if out_bound >= max_out_bound:
break
elif page and not finished[i] and respobjs[i] is None:
if debuglevel > 0:
print 'Sending request for %r...' % (page,)
conn._HTTPConnection__state = _CS_IDLE # FU private variable!
conn.request("GET", page, None, headers)
respobjs[i] = conn.response_class(conn.sock, strict=conn.strict, method=conn._method)
out_bound += 1
# Try to read a response
for i,resp in enumerate(respobjs):
if resp is None:
continue
if debuglevel > 0:
print 'Retrieving %r...' % (pages[i],)
out_bound -= 1
skip_read = False
resp.begin()
if debuglevel > 0:
print ' %d %s' % (resp.status, resp.reason)
if 200 <= resp.status < 300:
# Ok
data[i] = resp.read()
cookie = resp.getheader('Set-Cookie')
if cookie is not None:
headers['Cookie'] = cookie
skip_read = True
finished[i] = True
respobjs[i] = None
elif 300 <= resp.status < 400:
# Redirect
loc = resp.getheader('Location')
respobjs[i] = None
parsed = loc and urlparse.urlparse(loc)
if not parsed:
# Missing or empty location header
data[i] = (resp.status, resp.reason)
finished[i] = True
elif parsed.netloc != '' and parsed.netloc != host:
# Redirect to another host
data[i] = (resp.status, resp.reason, loc)
finished[i] = True
else:
path = urlparse.urlunparse(parsed._replace(scheme='',netloc='',fragment=''))
if debuglevel > 0:
print ' Updated %r to %r' % (pages[i],path)
pages[i] = path
elif resp.status >= 400:
# Failed
data[i] = (resp.status, resp.reason)
finished[i] = True
respobjs[i] = None
if resp.will_close:
# Connection (will be) closed, need to resend
conn.close()
if debuglevel > 0:
print ' Connection closed'
for j,f in enumerate(finished):
if not f and respobj[j] is not None:
if debuglevel > 0:
print ' Discarding out-bound request for %r' % (pages[j],)
respobj[j] = None
break
elif not skip_read:
resp.read() # read any data
if any(not f and respobjs[j] is None for j,f in enumerate(finished)):
# Send another pending request
break
else:
break # All respobjs are None?
return data
if __name__ == '__main__':
domain = 'en.wikipedia.org'
pages = ('/wiki/HTTP_pipelining', '/wiki/HTTP', '/wiki/HTTP_persistent_connection')
data = pipeline(domain,pages,max_out_bound=2,debuglevel=1)
for i,page in enumerate(data):
print
print '==== Page %r ====' % (pages[i],)
print page[:512]
|
ActiveState/code
|
recipes/Python/576673_Python_HTTP_Pipelining/recipe-576673.py
|
Python
|
mit
| 3,921
|
#this module here is to compute the formula to calculate the new means and
#new variance.
def update(mean1, var1, mean2, var2):
new_mean = ((mean1 * var2) + (mean2*var1))/(var1 + var2)
new_var = 1/(1/var1 + 1/var2)
return [new_mean, new_var]
def predict(mean1, var1, mean2, var2):
new_mean = mean1 + mean2
new_var = var1 + var2
return [new_mean, new_var]
print 'update : 'update(10.,4., 12.,4.)
print predict(10.,4., 12.,4.)
|
napjon/moocs_solution
|
robotics-udacity/2.4.py
|
Python
|
mit
| 465
|
# coding=utf-8
# Copyright 2022 Google LLC.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Defines reset that are compatible with learnable stridding."""
import functools
from typing import Optional, Sequence, Tuple, Union
import gin
import tensorflow as tf
Number = Union[float, int]
Stride = Union[Number, Tuple[Number, Number]]
def data_format(channels_first: bool = True) -> str:
return 'channels_first' if channels_first else 'channels_last'
def conv2d(
*args, channels_first: bool = True, weight_decay: float = 0.0, **kwargs):
return tf.keras.layers.Conv2D(
*args,
kernel_initializer='he_normal',
kernel_regularizer=tf.keras.regularizers.L2(weight_decay),
data_format=data_format(channels_first),
use_bias=False,
**kwargs)
@gin.configurable
def batch_norm(channels_first: bool = True, **kwargs):
axis = 1 if channels_first else 3
return tf.keras.layers.BatchNormalization(axis=axis, **kwargs)
@gin.configurable
class ResidualLayer(tf.keras.layers.Layer):
"""A generic residual layer for Resnet, using the pre-act formulation.
This resnet can represent an `IdBlock` or a `ProjBlock` by setting the
`project` parameter and can be compatible with Spectral or Learnable poolings
by setting the `pooling_cls` parameter.
The pooling_cls and strides will be overwritten automatically in case of an
ID block.
The pre-act formulation applies batch norm and non-linearity before the first
conv.
"""
def __init__(self,
filters: int,
kernel_size: int = gin.REQUIRED,
strides: Stride = (1, 1),
pooling_cls=None,
project: bool = False,
channels_first: bool = True,
weight_decay: float = 5e-3,
**kwargs):
super().__init__(**kwargs)
# If we are in an Id Layer there is no striding of any kind.
pooling_cls = None if not project else pooling_cls
strides = (1, 1) if not project else strides
# DiffStride compatibility: the strides go into the pooling layer.
if pooling_cls is not None:
conv_strides = (1, 1)
self._pooling = pooling_cls(
strides=strides, data_format=data_format(channels_first))
else:
self._pooling = tf.identity
conv_strides = strides
self._strided_conv = conv2d(
filters, kernel_size, strides=conv_strides, padding='same',
channels_first=channels_first, weight_decay=weight_decay)
# The second convolution is a regular one with no strides, no matter what.
self._unstrided_conv = conv2d(
filters, kernel_size, strides=(1, 1), padding='same',
channels_first=channels_first, weight_decay=weight_decay)
self._bns = tuple(batch_norm(channels_first) for _ in range(2))
self._shortcut_conv = None
if project:
self._shortcut_conv = conv2d(
filters, kernel_size=1, strides=conv_strides, padding='same',
channels_first=channels_first, weight_decay=weight_decay)
def call(self, inputs: tf.Tensor, training: bool = True) -> tf.Tensor:
shortcut_x = inputs
x = self._bns[0](inputs, training=training)
x = tf.nn.relu(x)
x = self._strided_conv(x)
x = self._pooling(x)
x = self._bns[1](x, training=training)
x = tf.nn.relu(x)
x = self._unstrided_conv(x)
if self._shortcut_conv is not None:
shortcut_x = self._shortcut_conv(shortcut_x)
shortcut_x = self._pooling(shortcut_x)
return x + shortcut_x
@gin.configurable
class ResnetBlock(tf.keras.Sequential):
"""A block of residual layers sharing the same number of filters.
The first residual layer of the block and only this one might be strided.
This parameter is controlled by the `project_first` parameters.
The kwargs are passed down to the ResidualLayer.
"""
def __init__(self,
filters: int = gin.REQUIRED,
strides: Stride = gin.REQUIRED,
num_layers: int = 2,
project_first: bool = True,
**kwargs):
residual_fn = functools.partial(
ResidualLayer, filters=filters, strides=strides, **kwargs)
blocks = [residual_fn(project=True)] if project_first else []
num_left_layers = num_layers - int(project_first)
blocks.extend([residual_fn(project=False) for i in range(num_left_layers)])
super().__init__(blocks)
@gin.configurable
class Resnet(tf.keras.Sequential):
"""A generic Resnet class, using the pre-activation implementation.
Depending on the number of blocks and the used filters, it can easily
instantiate a Resnet18 or Resnet56.
The kwargs are passed down to the ResnetBlock layer.
"""
def __init__(self,
filters: Sequence[int],
strides: Sequence[Stride],
num_output_classes: int = gin.REQUIRED,
output_activation: Optional[str] = None,
id_only: Sequence[int] = (),
channels_first: bool = True,
pooling_cls=None,
weight_decay: float = 5e-3,
**kwargs):
if len(filters) != len(strides):
raise ValueError(f'The number of `filters` ({len(filters)}) should match'
f' the number of strides ({len(strides)})')
df = data_format(channels_first)
layers = [
tf.keras.layers.Permute((3, 1, 2)) if channels_first else None,
conv2d(filters[0], 3, padding='same',
strides=(1, 1) if pooling_cls is not None else strides[0],
channels_first=channels_first, weight_decay=weight_decay),
pooling_cls(
strides=strides[0], data_format=df) if pooling_cls else None,
]
for i, (num_filters, stride) in enumerate(zip(filters[1:], strides[1:])):
layers.append(ResnetBlock(filters=num_filters,
strides=stride,
project_first=(i not in id_only),
channels_first=channels_first,
weight_decay=weight_decay,
pooling_cls=pooling_cls,
**kwargs))
layers.extend([
batch_norm(channels_first),
tf.keras.layers.ReLU(),
tf.keras.layers.GlobalAveragePooling2D(data_format=df),
tf.keras.layers.Flatten(),
tf.keras.layers.Dense(
num_output_classes,
activation=output_activation,
kernel_initializer='he_normal',
kernel_regularizer=tf.keras.regularizers.L2(weight_decay),
bias_regularizer=tf.keras.regularizers.L2(weight_decay),
),
])
super().__init__(list(filter(None, layers)))
@gin.configurable
def resnet18(strides=None, **kwargs):
strides = [1, 1, 2, 2, 2] if strides is None else strides
filters = [64, 64, 128, 256, 512]
return Resnet(
filters, strides, id_only=[0], num_layers=2, kernel_size=3, **kwargs)
@gin.configurable
def resnet56(strides=None, **kwargs):
filters = [16, 16, 32, 64]
strides = [1, 1, 2, 2] if strides is None else strides
return Resnet(filters, strides, num_layers=9, kernel_size=3, **kwargs)
|
google-research/diffstride
|
diffstride/resnet.py
|
Python
|
apache-2.0
| 7,666
|
# -*- coding: utf-8 -*-
#
# This file is part of INGInious. See the LICENSE and the COPYRIGHTS files for
# more information about the licensing of this file.
""" An algorithm contest plugin for INGInious. Based on the same principles than contests like ACM-ICPC. """
import copy
from collections import OrderedDict
from datetime import datetime, timedelta
import pymongo
import flask
from werkzeug.exceptions import NotFound
from inginious.frontend.accessible_time import AccessibleTime
from inginious.frontend.pages.course_admin.utils import INGIniousAdminPage
from inginious.frontend.pages.utils import INGIniousAuthPage
def add_admin_menu(course): # pylint: disable=unused-argument
""" Add a menu for the contest settings in the administration """
return ('contest', '<i class="fa fa-trophy fa-fw"></i> Contest')
def task_accessibility(course, task, default): # pylint: disable=unused-argument
contest_data = get_contest_data(course)
if contest_data['enabled']:
return AccessibleTime(contest_data['start'] + '/')
else:
return default
def additional_headers():
""" Additional HTML headers """
return '<link href="' + flask.request.url_root \
+ '/static/plugins/contests/scoreboard.css" rel="stylesheet">' \
'<script src="' + flask.request.url_root + '/static/plugins/contests/jquery.countdown.min.js"></script>' \
'<script src="' + flask.request.url_root + '/static/plugins/contests/contests.js"></script>'
def get_contest_data(course):
""" Returns the settings of the contest for this course """
return course.get_descriptor().get('contest_settings', {"enabled": False,
"start": datetime.now().strftime("%Y-%m-%d %H:%M:%S"),
"end": (datetime.now() + timedelta(hours=1)).strftime(
"%Y-%m-%d %H:%M:%S"),
"blackout": 0,
"penalty": 20})
def course_menu(course, template_helper):
""" Displays some informations about the contest on the course page"""
contest_data = get_contest_data(course)
if contest_data['enabled']:
start = datetime.strptime(contest_data['start'], "%Y-%m-%d %H:%M:%S")
end = datetime.strptime(contest_data['end'], "%Y-%m-%d %H:%M:%S")
blackout = end - timedelta(hours=contest_data['blackout'])
return template_helper.render("course_menu.html", template_folder="frontend/plugins/contests",
course=course, start=start, end=end, blackout=blackout)
else:
return None
class ContestScoreboard(INGIniousAuthPage):
""" Displays the scoreboard of the contest """
def GET_AUTH(self, courseid): # pylint: disable=arguments-differ
course = self.course_factory.get_course(courseid)
contest_data = get_contest_data(course)
if not contest_data['enabled']:
raise NotFound()
start = datetime.strptime(contest_data['start'], "%Y-%m-%d %H:%M:%S")
end = datetime.strptime(contest_data['end'], "%Y-%m-%d %H:%M:%S")
blackout = end - timedelta(hours=contest_data['blackout'])
users = self.user_manager.get_course_registered_users(course)
tasks = list(course.get_tasks().keys())
db_results = self.database.submissions.find({
"username": {"$in": users},
"courseid": courseid,
"submitted_on": {"$gte": start, "$lt": blackout},
"status": "done"},
{"username": True, "_id": False, "taskid": True, "result": True, "submitted_on": True}).sort([("submitted_on", pymongo.ASCENDING)])
task_status = {taskid: {"status": "NA", "tries": 0} for taskid in tasks}
results = {username: {"name": self.user_manager.get_user_realname(username), "tasks": copy.deepcopy(task_status)} for username in users}
activity = []
# Compute stats for each submission
task_succeeded = {taskid: False for taskid in tasks}
for submission in db_results:
for username in submission["username"]:
if submission['taskid'] not in tasks:
continue
if username not in users:
continue
status = results[username]["tasks"][submission['taskid']]
if status["status"] == "AC" or status["status"] == "ACF":
continue
else:
if submission['result'] == "success":
if not task_succeeded[submission['taskid']]:
status["status"] = "ACF"
task_succeeded[submission['taskid']] = True
else:
status["status"] = "AC"
status["tries"] += 1
status["time"] = submission['submitted_on']
status["score"] = (submission['submitted_on']
+ timedelta(minutes=contest_data["penalty"]*(status["tries"] - 1))
- start).total_seconds() / 60
elif submission['result'] == "failed" or submission['result'] == "killed":
status["status"] = "WA"
status["tries"] += 1
elif submission['result'] == "timeout":
status["status"] = "TLE"
status["tries"] += 1
else: # other internal error
continue
activity.append({"user": results[username]["name"],
"when": submission['submitted_on'],
"result": (status["status"] == 'AC' or status["status"] == 'ACF'),
"taskid": submission['taskid']})
activity.reverse()
# Compute current score
for user in results:
score = [0, 0]
for data in list(results[user]["tasks"].values()):
if "score" in data:
score[0] += 1
score[1] += data["score"]
results[user]["score"] = tuple(score)
# Sort everybody
results = OrderedDict(sorted(list(results.items()), key=lambda t: (-t[1]["score"][0], t[1]["score"][1])))
# Compute ranking
old = None
current_rank = 0
for cid, user in enumerate(results.keys()):
if results[user]["score"] != old:
old = results[user]["score"]
current_rank = cid + 1
results[user]["rank"] = current_rank
results[user]["displayed_rank"] = str(current_rank)
else:
results[user]["rank"] = current_rank
results[user]["displayed_rank"] = ""
return self.template_helper.render("scoreboard.html", template_folder="frontend/plugins/contests",
course=course, start=start, end=end, blackout=blackout, tasks=tasks,
results=results, activity=activity)
class ContestAdmin(INGIniousAdminPage):
""" Contest settings for a course """
def save_contest_data(self, course, contest_data):
""" Saves updated contest data for the course """
course_content = self.course_factory.get_course_descriptor_content(course.get_id())
course_content["contest_settings"] = contest_data
self.course_factory.update_course_descriptor_content(course.get_id(), course_content)
def GET_AUTH(self, courseid): # pylint: disable=arguments-differ
""" GET request: simply display the form """
course, __ = self.get_course_and_check_rights(courseid, allow_all_staff=False)
contest_data = get_contest_data(course)
return self.template_helper.render("admin.html", template_folder="frontend/plugins/contests", course=course,
data=contest_data, errors=None, saved=False)
def POST_AUTH(self, courseid): # pylint: disable=arguments-differ
""" POST request: update the settings """
course, __ = self.get_course_and_check_rights(courseid, allow_all_staff=False)
contest_data = get_contest_data(course)
new_data = flask.request.form
errors = []
try:
contest_data['enabled'] = new_data.get('enabled', '0') == '1'
contest_data['start'] = new_data["start"]
contest_data['end'] = new_data["end"]
try:
start = datetime.strptime(contest_data['start'], "%Y-%m-%d %H:%M:%S")
except:
errors.append('Invalid start date')
try:
end = datetime.strptime(contest_data['end'], "%Y-%m-%d %H:%M:%S")
except:
errors.append('Invalid end date')
if len(errors) == 0:
if start >= end:
errors.append('Start date should be before end date')
try:
contest_data['blackout'] = int(new_data["blackout"])
if contest_data['blackout'] < 0:
errors.append('Invalid number of hours for the blackout: should be greater than 0')
except:
errors.append('Invalid number of hours for the blackout')
try:
contest_data['penalty'] = int(new_data["penalty"])
if contest_data['penalty'] < 0:
errors.append('Invalid number of minutes for the penalty: should be greater than 0')
except:
errors.append('Invalid number of minutes for the penalty')
except:
errors.append('User returned an invalid form')
if len(errors) == 0:
self.save_contest_data(course, contest_data)
return self.template_helper.render("admin.html", template_folder="frontend/plugins/contests", course=course,
data=contest_data, errors=None, saved=True)
else:
return self.template_helper.render("admin.html", template_folder="frontend/plugins/contests", course=course,
data=contest_data, errors=errors, saved=False)
def init(plugin_manager, course_factory, client, config): # pylint: disable=unused-argument
"""
Init the contest plugin.
Available configuration:
::
{
"plugin_module": "inginious.frontend.plugins.contests"
}
"""
plugin_manager.add_page('/contest/<courseid>', ContestScoreboard.as_view('contestscoreboard'))
plugin_manager.add_page('/admin/<courseid>/contest', ContestAdmin.as_view('contestadmin'))
plugin_manager.add_hook('course_admin_menu', add_admin_menu)
plugin_manager.add_hook('task_accessibility', task_accessibility)
plugin_manager.add_hook('header_html', additional_headers)
plugin_manager.add_hook('course_menu', course_menu)
|
UCL-INGI/INGInious
|
inginious/frontend/plugins/contests/__init__.py
|
Python
|
agpl-3.0
| 11,265
|
# Copyright (C) 2014, CERN
# This software is distributed under the terms of the GNU General Public
# Licence version 3 (GPL Version 3), copied verbatim in the file "COPYING".
# In applying this license, CERN does not waive the privileges and immunities
# granted to it by virtue of its status as Intergovernmental Organization
# or submit itself to any jurisdiction.
import os
import logging
import jens.git as git
from jens.errors import JensError, JensGitError
from jens.git import GIT_CLONE_TIMEOUT, GIT_FETCH_TIMEOUT
def refresh_metadata(settings, lock):
lock.renew(2 * GIT_FETCH_TIMEOUT)
_refresh_environments(settings)
_refresh_repositories(settings)
def validate_directories(settings):
directories = [settings.BAREDIR,
settings.CLONEDIR,
settings.CACHEDIR,
settings.CACHEDIR + "/environments",
settings.REPO_METADATADIR,
settings.ENV_METADATADIR]
for partition in ("modules", "hostgroups", "common"):
directories.append(settings.BAREDIR + "/%s" % partition)
directories.append(settings.CLONEDIR + "/%s" % partition)
for directory in directories:
_validate_directory(directory)
if settings.LOCK_TYPE == 'FILE':
_validate_directory(settings.FILELOCK_LOCKDIR)
if not os.path.exists(settings.ENV_METADATADIR + "/.git"):
raise JensError("%s not initialized (no Git repository found)" % \
settings.ENV_METADATADIR)
if not os.path.exists(settings.REPO_METADATA):
raise JensError("Couldn't find metadata of repositories (%s not initialized)" % \
settings.REPO_METADATADIR)
def _validate_directory(directory):
try:
os.stat(directory)
except OSError:
raise JensError("Directory '%s' does not exist" % directory)
if not os.access(directory, os.W_OK):
raise JensError("Cannot read or write on directory '%s'" % directory)
def _refresh_environments(settings):
logging.debug("Refreshing environment metadata...")
path = settings.ENV_METADATADIR
try:
git.fetch(path)
git.reset(path, "origin/master", hard=True)
except JensGitError, error:
raise JensError("Couldn't refresh environments metadata (%s)" % error)
def _refresh_repositories(settings):
logging.debug("Refreshing repositories metadata...")
path = settings.REPO_METADATADIR
try:
git.fetch(path)
git.reset(path, "origin/master", hard=True)
except JensGitError, error:
raise JensError("Couldn't refresh repositories metadata (%s)" % error)
|
AlbertoPeon/jens
|
src/jens/maintenance.py
|
Python
|
gpl-3.0
| 2,566
|
# Copyright (C) 2014 Nippon Telegraph and Telephone Corporation.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Defines some base class related to managing green threads.
"""
from __future__ import absolute_import
import abc
from collections import OrderedDict
import logging
import socket
import time
import traceback
import weakref
import netaddr
import six
from ryu.lib import hub
from ryu.lib import sockopt
from ryu.lib import ip
from ryu.lib.hub import Timeout
from ryu.lib.packet.bgp import RF_IPv4_UC
from ryu.lib.packet.bgp import RF_IPv6_UC
from ryu.lib.packet.bgp import RF_IPv4_VPN
from ryu.lib.packet.bgp import RF_IPv6_VPN
from ryu.lib.packet.bgp import RF_L2_EVPN
from ryu.lib.packet.bgp import RF_IPv4_FLOWSPEC
from ryu.lib.packet.bgp import RF_IPv6_FLOWSPEC
from ryu.lib.packet.bgp import RF_VPNv4_FLOWSPEC
from ryu.lib.packet.bgp import RF_VPNv6_FLOWSPEC
from ryu.lib.packet.bgp import RF_L2VPN_FLOWSPEC
from ryu.lib.packet.bgp import RF_RTC_UC
from ryu.services.protocols.bgp.utils.circlist import CircularListType
from ryu.services.protocols.bgp.utils.evtlet import LoopingCall
# Logger instance for this module.
LOG = logging.getLogger('bgpspeaker.base')
# Pointer to active/available OrderedDict.
OrderedDict = OrderedDict
# Currently supported address families.
SUPPORTED_GLOBAL_RF = {
RF_IPv4_UC,
RF_IPv6_UC,
RF_IPv4_VPN,
RF_RTC_UC,
RF_IPv6_VPN,
RF_L2_EVPN,
RF_IPv4_FLOWSPEC,
RF_IPv6_FLOWSPEC,
RF_VPNv4_FLOWSPEC,
RF_VPNv6_FLOWSPEC,
RF_L2VPN_FLOWSPEC,
}
# Various error codes
ACTIVITY_ERROR_CODE = 100
RUNTIME_CONF_ERROR_CODE = 200
BIN_ERROR = 300
NET_CTRL_ERROR_CODE = 400
API_ERROR_CODE = 500
PREFIX_ERROR_CODE = 600
BGP_PROCESSOR_ERROR_CODE = 700
CORE_ERROR_CODE = 800
# Registry of custom exceptions
# Key: code:sub-code
# Value: exception class
_EXCEPTION_REGISTRY = {}
class BGPSException(Exception):
"""Base exception class for all BGPS related exceptions.
"""
CODE = 1
SUB_CODE = 1
DEF_DESC = 'Unknown exception.'
def __init__(self, desc=None):
super(BGPSException, self).__init__()
if not desc:
desc = self.__class__.DEF_DESC
kls = self.__class__
self.message = '%d.%d - %s' % (kls.CODE, kls.SUB_CODE, desc)
def __repr__(self):
kls = self.__class__
return '<%s(desc=%s)>' % (kls, self.message)
def __str__(self, *args, **kwargs):
return self.message
def add_bgp_error_metadata(code, sub_code, def_desc='unknown'):
"""Decorator for all exceptions that want to set exception class meta-data.
"""
# Check registry if we already have an exception with same code/sub-code
if _EXCEPTION_REGISTRY.get((code, sub_code)) is not None:
raise ValueError('BGPSException with code %d and sub-code %d '
'already defined.' % (code, sub_code))
def decorator(subclass):
"""Sets class constants for exception code and sub-code.
If given class is sub-class of BGPSException we sets class constants.
"""
if issubclass(subclass, BGPSException):
_EXCEPTION_REGISTRY[(code, sub_code)] = subclass
subclass.CODE = code
subclass.SUB_CODE = sub_code
subclass.DEF_DESC = def_desc
return subclass
return decorator
@add_bgp_error_metadata(code=ACTIVITY_ERROR_CODE,
sub_code=1,
def_desc='Unknown activity exception.')
class ActivityException(BGPSException):
"""Base class for exceptions related to Activity.
"""
pass
@six.add_metaclass(abc.ABCMeta)
class Activity(object):
"""Base class for a thread of execution that provides some custom settings.
Activity is also a container of other activities or threads that it has
started. Inside a Activity you should always use one of the spawn method
to start another activity or greenthread. Activity is also holds pointers
to sockets that it or its child activities of threads have create.
"""
def __init__(self, name=None):
self._name = name
if self._name is None:
self._name = 'UnknownActivity: ' + str(time.time())
self._child_thread_map = weakref.WeakValueDictionary()
self._child_activity_map = weakref.WeakValueDictionary()
self._asso_socket_map = weakref.WeakValueDictionary()
self._timers = weakref.WeakValueDictionary()
self._started = False
@property
def name(self):
return self._name
@property
def started(self):
return self._started
def _validate_activity(self, activity):
"""Checks the validity of the given activity before it can be started.
"""
if not self._started:
raise ActivityException(desc='Tried to spawn a child activity'
' before Activity was started.')
if activity.started:
raise ActivityException(desc='Tried to start an Activity that was '
'already started.')
def _spawn_activity(self, activity, *args, **kwargs):
"""Starts *activity* in a new thread and passes *args* and *kwargs*.
Maintains pointer to this activity and stops *activity* when this
activity is stopped.
"""
self._validate_activity(activity)
# Spawn a new greenthread for given activity
greenthread = hub.spawn(activity.start, *args, **kwargs)
self._child_thread_map[activity.name] = greenthread
self._child_activity_map[activity.name] = activity
return greenthread
def _spawn_activity_after(self, seconds, activity, *args, **kwargs):
self._validate_activity(activity)
# Schedule to spawn a new greenthread after requested delay
greenthread = hub.spawn_after(seconds, activity.start, *args,
**kwargs)
self._child_thread_map[activity.name] = greenthread
self._child_activity_map[activity.name] = activity
return greenthread
def _validate_callable(self, callable_):
if callable_ is None:
raise ActivityException(desc='Callable cannot be None')
if not hasattr(callable_, '__call__'):
raise ActivityException(desc='Currently only supports instances'
' that have __call__ as callable which'
' is missing in given arg.')
if not self._started:
raise ActivityException(desc='Tried to spawn a child thread '
'before this Activity was started.')
def _spawn(self, name, callable_, *args, **kwargs):
self._validate_callable(callable_)
greenthread = hub.spawn(callable_, *args, **kwargs)
self._child_thread_map[name] = greenthread
return greenthread
def _spawn_after(self, name, seconds, callable_, *args, **kwargs):
self._validate_callable(callable_)
greenthread = hub.spawn_after(seconds, callable_, *args, **kwargs)
self._child_thread_map[name] = greenthread
return greenthread
def _create_timer(self, name, func, *arg, **kwarg):
timer = LoopingCall(func, *arg, **kwarg)
self._timers[name] = timer
return timer
@abc.abstractmethod
def _run(self, *args, **kwargs):
"""Main activity of this class.
Can launch other activity/callables here.
Sub-classes should override this method.
"""
raise NotImplementedError()
def start(self, *args, **kwargs):
"""Starts the main activity of this class.
Calls *_run* and calls *stop* when *_run* is finished.
This method should be run in a new greenthread as it may not return
immediately.
"""
if self.started:
raise ActivityException(desc='Activity already started')
self._started = True
try:
self._run(*args, **kwargs)
except BGPSException:
LOG.error(traceback.format_exc())
finally:
if self.started: # could have been stopped somewhere else
self.stop()
def pause(self, seconds=0):
"""Relinquishes hub for given number of seconds.
In other words is puts to sleep to give other greenthread a chance to
run.
"""
hub.sleep(seconds)
def _stop_child_activities(self, name=None):
"""Stop all child activities spawn by this activity.
"""
# Makes a list copy of items() to avoid dictionary size changed
# during iteration
for child_name, child in list(self._child_activity_map.items()):
if name is not None and name != child_name:
continue
LOG.debug('%s: Stopping child activity %s ', self.name, child_name)
if child.started:
child.stop()
self._child_activity_map.pop(child_name, None)
def _stop_child_threads(self, name=None):
"""Stops all threads spawn by this activity.
"""
for thread_name, thread in list(self._child_thread_map.items()):
if name is not None and thread_name is name:
LOG.debug('%s: Stopping child thread %s',
self.name, thread_name)
thread.kill()
self._child_thread_map.pop(thread_name, None)
def _close_asso_sockets(self):
"""Closes all the sockets linked to this activity.
"""
for sock_name, sock in list(self._asso_socket_map.items()):
LOG.debug('%s: Closing socket %s - %s', self.name, sock_name, sock)
sock.close()
def _stop_timers(self):
for timer_name, timer in list(self._timers.items()):
LOG.debug('%s: Stopping timer %s', self.name, timer_name)
timer.stop()
def stop(self):
"""Stops all child threads and activities and closes associated
sockets.
Re-initializes this activity to be able to start again.
Raise `ActivityException` if activity is not currently started.
"""
if not self.started:
raise ActivityException(desc='Cannot call stop when activity is '
'not started or has been stopped already.')
LOG.debug('Stopping activity %s.', self.name)
self._stop_timers()
self._stop_child_activities()
self._stop_child_threads()
self._close_asso_sockets()
# Setup activity for start again.
self._started = False
self._asso_socket_map = weakref.WeakValueDictionary()
self._child_activity_map = weakref.WeakValueDictionary()
self._child_thread_map = weakref.WeakValueDictionary()
self._timers = weakref.WeakValueDictionary()
LOG.debug('Stopping activity %s finished.', self.name)
def _canonicalize_ip(self, ip):
addr = netaddr.IPAddress(ip)
if addr.is_ipv4_mapped():
ip = str(addr.ipv4())
return ip
def get_remotename(self, sock):
addr, port = sock.getpeername()[:2]
return self._canonicalize_ip(addr), str(port)
def get_localname(self, sock):
addr, port = sock.getsockname()[:2]
return self._canonicalize_ip(addr), str(port)
def _create_listen_socket(self, family, loc_addr):
s = socket.socket(family)
s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
s.bind(loc_addr)
s.listen(1)
return s
def _listen_socket_loop(self, s, conn_handle):
while True:
sock, client_address = s.accept()
client_address, port = self.get_remotename(sock)
LOG.debug('Connect request received from client for port'
' %s:%s', client_address, port)
client_name = self.name + '_client@' + client_address
self._asso_socket_map[client_name] = sock
self._spawn(client_name, conn_handle, sock)
def _listen_tcp(self, loc_addr, conn_handle):
"""Creates a TCP server socket which listens on `port` number.
For each connection `server_factory` starts a new protocol.
"""
info = socket.getaddrinfo(loc_addr[0], loc_addr[1], socket.AF_UNSPEC,
socket.SOCK_STREAM, 0, socket.AI_PASSIVE)
listen_sockets = {}
for res in info:
af, socktype, proto, _, sa = res
sock = None
try:
sock = socket.socket(af, socktype, proto)
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
if af == socket.AF_INET6:
sock.setsockopt(socket.IPPROTO_IPV6, socket.IPV6_V6ONLY, 1)
sock.bind(sa)
sock.listen(50)
listen_sockets[sa] = sock
except socket.error as e:
LOG.error('Error creating socket: %s', e)
if sock:
sock.close()
count = 0
server = None
for sa in listen_sockets:
name = self.name + '_server@' + str(sa[0])
self._asso_socket_map[name] = listen_sockets[sa]
if count == 0:
import eventlet
server = eventlet.spawn(self._listen_socket_loop,
listen_sockets[sa], conn_handle)
self._child_thread_map[name] = server
count += 1
else:
server = self._spawn(name, self._listen_socket_loop,
listen_sockets[sa], conn_handle)
return server, listen_sockets
def _connect_tcp(self, peer_addr, conn_handler, time_out=None,
bind_address=None, password=None):
"""Creates a TCP connection to given peer address.
Tries to create a socket for `timeout` number of seconds. If
successful, uses the socket instance to start `client_factory`.
The socket is bound to `bind_address` if specified.
"""
LOG.debug('Connect TCP called for %s:%s', peer_addr[0], peer_addr[1])
if ip.valid_ipv4(peer_addr[0]):
family = socket.AF_INET
else:
family = socket.AF_INET6
with Timeout(time_out, socket.error):
sock = socket.socket(family)
if bind_address:
sock.bind(bind_address)
if password:
sockopt.set_tcp_md5sig(sock, peer_addr[0], password)
sock.connect(peer_addr)
# socket.error exception is raised in case of timeout and
# the following code is executed only when the connection
# is established.
# Connection name for pro-active connection is made up of
# local end address + remote end address
local = self.get_localname(sock)[0]
remote = self.get_remotename(sock)[0]
conn_name = ('L: ' + local + ', R: ' + remote)
self._asso_socket_map[conn_name] = sock
# If connection is established, we call connection handler
# in a new thread.
self._spawn(conn_name, conn_handler, sock)
return sock
#
# Sink
#
class Sink(object):
"""An entity to which we send out messages (eg. BGP routes)."""
#
# OutgoingMsgList
#
# A circular list type in which objects are linked to each
# other using the 'next_sink_out_route' and 'prev_sink_out_route'
# attributes.
#
OutgoingMsgList = CircularListType(next_attr_name='next_sink_out_route',
prev_attr_name='prev_sink_out_route')
# Next available index that can identify an instance uniquely.
idx = 0
@staticmethod
def next_index():
"""Increments the sink index and returns the value."""
Sink.idx += 1
return Sink.idx
def __init__(self):
# A small integer that represents this sink.
self.index = Sink.next_index()
# Create an event for signal enqueuing.
from .utils.evtlet import EventletIOFactory
self.outgoing_msg_event = EventletIOFactory.create_custom_event()
self.messages_queued = 0
# List of msgs. that are to be sent to this peer. Each item
# in the list is an instance of OutgoingRoute.
self.outgoing_msg_list = Sink.OutgoingMsgList()
def clear_outgoing_msg_list(self):
self.outgoing_msg_list = Sink.OutgoingMsgList()
def enque_outgoing_msg(self, msg):
self.outgoing_msg_list.append(msg)
self.outgoing_msg_event.set()
self.messages_queued += 1
def enque_first_outgoing_msg(self, msg):
self.outgoing_msg_list.prepend(msg)
self.outgoing_msg_event.set()
def __iter__(self):
return self
def next(self):
"""Pops and returns the first outgoing message from the list.
If message list currently has no messages, the calling thread will
be put to sleep until we have at-least one message in the list that
can be popped and returned.
"""
# We pick the first outgoing available and send it.
outgoing_msg = self.outgoing_msg_list.pop_first()
# If we do not have any outgoing msg., we wait.
if outgoing_msg is None:
self.outgoing_msg_event.clear()
self.outgoing_msg_event.wait()
outgoing_msg = self.outgoing_msg_list.pop_first()
return outgoing_msg
# For Python 3 compatibility
__next__ = next
#
# Source
#
class Source(object):
"""An entity that gives us BGP routes. A BGP peer, for example."""
def __init__(self, version_num):
# Number that is currently being used to stamp information
# received from this source. We will bump this number up when
# the information that is now expected from the source belongs
# to a different logical batch. This mechanism can be used to
# identify stale information.
self.version_num = version_num
class FlexinetPeer(Source, Sink):
def __init__(self):
# Initialize source and sink
Source.__init__(self, 1)
Sink.__init__(self)
# Registry of validators for configuration/settings.
_VALIDATORS = {}
def validate(**kwargs):
"""Defines a decorator to register a validator with a name for look-up.
If name is not provided we use function name as name of the validator.
"""
def decorator(func):
_VALIDATORS[kwargs.pop('name', func.__name__)] = func
return func
return decorator
def get_validator(name):
"""Returns a validator registered for given name.
"""
return _VALIDATORS.get(name)
|
iwaseyusuke/ryu
|
ryu/services/protocols/bgp/base.py
|
Python
|
apache-2.0
| 19,278
|
#!/usr/bin/python
import argparse
import sys
from itertools import *
import math
import operator
def euler15(n, m):
"""Number of distinct Manhattan paths from one side to the other of an n x m grid
http://projecteuler.net/problem=15"""
# You have to walk N + M blocks, of which N have to be horiz and M vertical.
# The # of paths is the number of ways to arrange the N horiz blocks within
# the N+M blocks walked: c(N+M, N) = (N+M)!/(N! * M!)
return math.factorial(n+m)/(math.factorial(n) * math.factorial(m))
def debug_out(s, *args, **kwargs):
print s.format(*args, **kwargs)
def debug_noop(s, *args, **kwargs):
pass
debug = debug_noop
class Usage(Exception):
def __init__(self, msg):
self.msg = msg
def main(argv=None):
if argv is None:
argv = sys.argv
try:
try:
parser = argparse.ArgumentParser(description='Euler problem 15.')
parser.add_argument('--debug', action='store_true')
parser.add_argument('--n', default=20, type=long)
args = parser.parse_args()
except ArgumentError, msg:
raise Usage(msg)
global debug
if args.debug:
debug = debug_out
total = euler15(args.n, args.n)
print total
except Usage, err:
print >>sys.stderr, err.msg
print >>sys.stderr, "for help use --help"
return 2
if __name__ == "__main__":
sys.exit(main())
|
tdierks/project-euler
|
e15.py
|
Python
|
mit
| 1,370
|
from matplotlib.pyplot import *
from math import sqrt
m = 1/3.
xs = [+1, +1, -1, -1]
ys = [-1, +1, -1, +1]
figure(figsize=(4, 4))
ax = gca()
ax.spines['right'].set_visible(False)
ax.spines['top'].set_visible(False)
ax.spines['left'].set_position(('data', 0))
ax.spines['bottom'].set_position(('data', 0))
ax.xaxis.set_ticks_position('none')
ax.yaxis.set_ticks_position('none')
ax.set_aspect('equal')
ax.set_xlim(-1.4, +1.6)
ax.set_ylim(-1.4, +1.6)
ax.text(1.8, 0., r'$\xi$', transform=ax.transData, va='center')
ax.text(0., 1.8, r'$\eta$', rotation='horizontal', transform=ax.transData,
ha='center')
ax.text(+1.1, +1.1, '$n_1$\n' + r'$(+1, +1)$', ha='center', va='bottom',
fontsize=10)
ax.text(-1.1, +1.1, '$n_2$\n' + r'$(-1, +1)$', ha='center', va='bottom',
fontsize=10)
ax.text(-1.1, -1.1, '$n_3$\n' + r'$(-1, -1)$', ha='center', va='top' ,
fontsize=10)
ax.text(+1.1, -1.1, '$n_4$\n' + r'$(+1, -1)$', ha='center', va='top' ,
fontsize=10)
# radius
ax.annotate('$r_1$', xy=(-1, 0.5), xytext=(-0.5, 0.2),
arrowprops=dict(arrowstyle='->'), va='center', ha='center')
ax.annotate('$r_2$', xy=(+1, 0.5), xytext=(+0.5, 0.2),
arrowprops=dict(arrowstyle='->'), va='center', ha='center')
ax.set_xticks([])
ax.set_yticks([])
#ax.set_xticklabels(['-1', '+1'])
#ax.set_yticklabels(['-1', '+1'])
plot([1, -1, -1, 1, 1], [1, 1, -1, -1, 1], '-k')
plot(xs, ys, 'ok', mfc='k')
tight_layout()
savefig('test.png')
#show()
|
saullocastro/compmech
|
doc/pyplots/theory/fem/fsdt_donnell_kquad4.py
|
Python
|
bsd-3-clause
| 1,473
|
# UrbanFootprint v1.5
# Copyright (C) 2017 Calthorpe Analytics
#
# This file is part of UrbanFootprint version 1.5
#
# UrbanFootprint is distributed under the terms of the GNU General
# Public License version 3, as published by the Free Software Foundation. This
# code is distributed WITHOUT ANY WARRANTY, without implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General
# Public License v3 for more details; see <http://www.gnu.org/licenses/>.
from urlparse import urlparse
from django.test.client import FakePayload, Client, MULTIPART_CONTENT
__author__ = 'calthorpe_analytics'
class Client2(Client):
"""
Construct a second test client which can do PATCH requests.
"""
def patch(self, path, data={}, content_type=MULTIPART_CONTENT, **extra):
"""
Construct a PATCH request."
:param path:
:param data:
:param content_type:
:param extra:
:return:
"""
patch_data = self._encode_data(data, content_type)
parsed = urlparse(path)
r = {
'CONTENT_LENGTH': len(patch_data),
'CONTENT_TYPE': content_type,
'PATH_INFO': self._get_path(parsed),
'QUERY_STRING': parsed[4],
'REQUEST_METHOD': 'PATCH',
'wsgi.input': FakePayload(patch_data),
}
r.update(extra)
return self.request(**r)
|
CalthorpeAnalytics/urbanfootprint
|
footprint/main/tests/test_api/patch_patch.py
|
Python
|
gpl-3.0
| 1,431
|
from difflib import ndiff, restore
class DiffContent:
@classmethod
def get_diff(cls, c_o, c_n):
if c_o is not None and c_n is not None:
diff = ndiff(c_o, c_n)
else:
diff = []
return diff
@classmethod
def get_original(cls, diff):
return ''.join(restore(diff, 1))
@classmethod
def get_revised(cls, diff):
return ''.join(restore(diff, 2))
|
vaizguy/snaps
|
src/snaps/differ.py
|
Python
|
gpl-3.0
| 435
|
#!/usr/bin/env python
# coding=utf-8
"""
ola channel mapper.
read a configuration file and map channels from one universe to a second.
history:
see git commits
todo:
~ all fine :-)
"""
import sys
import time
import os
import array
import json
from configdict import ConfigDict
from olathreaded import OLAThread, OLAThread_States
version = """08.03.2016 12:30 stefan"""
##########################################
# globals
##########################################
# functions
##########################################
# classes
class OLAMapper(OLAThread):
"""Class that extends on OLAThread and implements the Mapper functions."""
def __init__(self, config):
"""init mapper things."""
# super(OLAThread, self).__init__()
OLAThread.__init__(self)
self.config = config
# print("config: {}".format(self.config))
self.universe = self.config['universe']['output']
# self.channel_count = 512
# self.channel_count = 50
self.channel_count = self.config['universe']['channel_count']
self.channels_out = array.array('B')
# internal map
self.map = []
self.map_create()
# print("full map: {}".format(map_tostring_pretty()))
# self.channels = []
for channel_index in range(0, self.channel_count):
self.channels_out.append(0)
# timing things
self.duration = 0
self.calls = 0
def print_measurements(self):
"""print duration statistics on exit."""
# print duration meassurements:
if self.calls > 0:
print(
(
"map_channels:\n" +
" sum duration: {:>10f}s\n" +
" sum calls: {:>10}\n" +
" duration/call: {:>10.2f}ms/call\n"
).format(
self.duration,
self.calls,
((self.duration / self.calls)*1000)
)
)
def ola_connected(self):
"""register receive callback and switch to running mode."""
self.client.RegisterUniverse(
self.config['universe']['input'],
self.client.REGISTER,
self.dmx_receive_frame
)
# python3 syntax
# super().ola_connected()
# python2 syntax
# super(OLAThread, self).ola_connected()
# explicit call
OLAThread.ola_connected(self)
def dmx_receive_frame(self, data):
"""receive one dmx frame."""
# print(data)
# meassure duration:
start = time.time()
self.map_channels(data)
stop = time.time()
duration = stop - start
self.duration += duration
self.calls += 1
# temp_array = array.array('B')
# for channel_index in range(0, self.channel_count):
# temp_array.append(self.channels[channel_index])
def map_create(self):
"""create map based on configuration."""
# self.map
map_config = self.config['map']
data_output = array.array('B')
# for channel_index in range(0, data_input_length):
# data_output.append(data_input[channel_index])
channel_output_count_temp = len(map_config['channels'])
if map_config['repeat'] is True:
channel_output_count_temp = self.channel_count
elif isinstance(map_config['repeat'], int):
channel_output_count_temp = (
len(map_config['channels']) * map_config['repeat']
)
for channel_output_index in range(0, channel_output_count_temp):
# calculate map_index
map_index = channel_output_index % len(map_config['channels'])
# print("map_index: {}".format(map_index))
# get map_config channel
map_value = map_config['channels'][map_index]
if map_value is not -1:
if map_config['repeat'] and map_config['offset']:
loop_index = (
channel_output_index // len(map_config['channels'])
)
if (
isinstance(map_config['repeat'], int) and
map_config['repeat_reverse']
):
map_value = map_value + (
((map_config['repeat']-1) - loop_index) *
map_config['offset_count']
)
else:
map_value = (
map_value +
(loop_index * map_config['offset_count'])
)
# print("map_value: {}".format(map_value))
# add channel to map
self.map.append(map_value)
def map_tostring_pretty(self):
"""print map content in pretty way."""
output = ""
map_config = self.config['map']
array = ""
separator_line = "\n "
array += separator_line
for index, value in enumerate(self.map):
if (index is len(self.map)-1):
array += "{: >3}".format(value)
else:
array += "{: >3}, ".format(value)
# after every repeat break line
if (
(((index+1) % len(map_config['channels'])) == 0) and
not (index+1 is len(self.map))
):
# print("index: {}".format(index))
array += separator_line
output = "[{}\n]".format(array)
# else:
# # print(output)
# output = json.dumps(
# self.map,
# sort_keys=True,
# indent=4,
# separators=(',', ': ')
# )
# [ array, content ]
# print("map: {}".format(output))
return output
def map_channels(self, data_input):
"""remap channels according to map tabel."""
# print("map channels:")
# print("data_input: {}".format(data_input))
data_input_length = data_input.buffer_info()[1]
# print("data_input_length: {}".format(data_input_length))
# print("map: {}".format(self.config['map']))
for channel_output_index, map_value in enumerate(self.map):
# check if map_value is in range of input channels
if (
# (map_value < data_input_length) and
(map_value < data_input_length)
):
try:
self.channels_out[channel_output_index] = (
data_input[map_value]
)
except Exception as e:
print(
(
"additional info:\n" +
" channel_output_index: {}\n" +
" len(self.channels_out): {}\n" +
" map_value: {}\n"
).format(
channel_output_index,
len(self.channels_out),
map_value
)
)
raise
else:
# don't alter data
pass
self.dmx_send_frame(
self.config['universe']['output'],
self.channels_out
)
##########################################
if __name__ == '__main__':
print(42*'*')
print('Python Version: ' + sys.version)
print(42*'*')
print(__doc__)
print(42*'*')
# parse arguments
filename = "map.json"
# only use args after script name
arg = sys.argv[1:]
if not arg:
print("using standard values.")
print(" Allowed parameters:")
print(" filename for config file (default='map.json')")
print("")
else:
filename = arg[0]
# if len(arg) > 1:
# pixel_count = int(arg[1])
# print parsed argument values
print('''values:
filename :{}
'''.format(filename))
default_config = {
'universe': {
'input': 1,
'output': 2,
'channel_count': 240,
},
'map': {
'channels': [
-1,
-1,
-1,
-1,
30,
31,
-1,
-1,
-1,
-1,
22,
23,
-1,
-1,
-1,
-1,
14,
15,
-1,
-1,
-1,
-1,
6,
7,
28,
29,
26,
27,
24,
25,
20,
21,
18,
19,
16,
17,
12,
13,
10,
11,
8,
9,
4,
5,
2,
3,
0,
1,
],
'repeat': 5,
'repeat_reverse': True,
'offset': True,
'offset_count': 32,
},
}
my_config = ConfigDict(default_config, filename)
print("my_config.config: {}".format(my_config.config))
my_mapper = OLAMapper(my_config.config)
print("full map:\n{}".format(my_mapper.map_tostring_pretty()))
my_mapper.start_ola()
# wait for user to hit key.
try:
raw_input(
"\n\n" +
42*'*' +
"\nhit a key to stop the mapper\n" +
42*'*' +
"\n\n"
)
except KeyboardInterrupt:
print("\nstop.")
except:
print("\nstop.")
# blocks untill thread has joined.
my_mapper.stop_ola()
my_mapper.print_measurements()
# as last thing we save the current configuration.
print("\nwrite config.")
my_config.write_to_file()
# ###########################################
|
s-light/OLA_channel_mapper
|
olamapper.py
|
Python
|
mit
| 10,276
|
#!/usr/bin/env python2.7
import sys
import connection as CN
import serial
import os
import time
import argparse
import logging
serial_device_basename = "/dev/ttyACM"
serial_device_init = 0
def set_log(logname):
logging.basicConfig(filename=logname,
filemode='a',
format='%(asctime)s,%(msecs)d %(message)s',
datefmt='%d/%m/%y %H:%M:%S',
level=logging.DEBUG)
global logger
logger = logging.getLogger("monitor")
def parse_args():
parser = argparse.ArgumentParser(
description='Trastos monitor')
parser.add_argument(
'-d',
dest='serial_device',
type=str,
default=None,
help='Serial device')
parser.add_argument(
'-i',
dest='recolector_id',
type=str,
default=0,
help='Recolector identifier')
parser.add_argument(
'-l',
dest='logname',
type=str,
default='/tmp/monitor.log',
help='Log file')
parser.add_argument(
'-s',
dest='host',
default='127.0.0.1',
type=str,
help='Server host or IP')
parser.add_argument(
'-p',
dest='port',
type=int,
default=7645,
help='Server port')
parser.add_argument(
'-b',
dest='baudrate',
type=int,
default=9600,
help='Baudrate for the serial device')
return parser.parse_args()
def open_serial(device):
#serial.tools.list_ports
if device == None:
serial_device = False
for i in range(serial_device_init,8):
try:
os.stat(serial_device_basename+str(i))
serial_device = serial_device_basename+str(i)
break
except Exception as e:
print(e)
pass
if not serial_device:
print("Error, cannot find a valid serial device.")
logger.error("Cannot find a valid serial device")
sys.exit(2)
else: serial_device = device
print("Opening serial device " + serial_device)
logger.info("Opening serial device " + serial_device)
return serial.Serial(serial_device, baudrate, timeout=0)
def read(s):
while not s.inWaiting(): pass
line = s.readline().decode('utf-8')
return line
def connect():
socket = CN.Client(host,port)
socket.connect()
return socket
def send_id_info():
data = "RECOLECTOR_ID:%s\n" %(recolector_id)
client.send(data.encode())
time.sleep(0.2)
args = parse_args()
set_log(args.logname)
recolector_id = args.recolector_id
baudrate = args.baudrate
port = args.port
host = args.host
serial_con = open_serial(args.serial_device)
try:
client = connect()
except ConnectionRefusedError:
print("Cannot connect to server")
logger.error("Cannot connect to server")
sys.exit(2)
send_id_info()
while True:
data = read(serial_con)
logger.info("Send: " + data)
client.send(data.encode())
time.sleep(0.2)
client.close()
|
p4u/projecte_frigos
|
client.py
|
Python
|
agpl-3.0
| 2,992
|
from __future__ import division
from drivepy.base.powermeter import BasePowerMeter, CommError, PowerMeterLibraryError
import drivepy.visaconnection as visaconnection
import math
DEFAULT_AVERAGING_TIME = 100 # ms
AVERAGING_TIME_MAX_MODE = 20 # ms
class PowerMeter(BasePowerMeter):
""" Creates a power meter object for the Agilent 8163A/B power meter via GPIB """
def __init__(self, addr = "GPIB::20"):
self._conn=VisaConnection(addr)
self._conn.write("*RST")
# make sure that the refernece is not used
self._conn.write("SENS1:CHAN1:POW:REF:STATE 0")
# clear the error queue
self._conn.write("*CLS")
# turn auto range on
self._conn.write("SENS1:CHAN1:POW:RANGE:AUTO 1")
# change the power unit to Watts
self._conn.write("SENS1:CHAN1:POW:UNIT W")
# set the default averaging time
self.tau = None
self._setTau(DEFAULT_AVERAGING_TIME)
# turn continuous measuring on
self._conn.write("INIT1:CHAN1:CONT 1")
def readPower(self, tau=DEFAULT_AVERAGING_TIME, mode="mean"):
""" Read the power using specified averaging time and either max or averaging mode """
if mode == 'mean' or tau <= AVERAGING_TIME_MAX_MODE:
self._setTau(tau)
return self._readPower()
elif mode == "max":
n = int(math.ceil(tau/AVERAGING_TIME_MAX_MODE))
self._setTau(AVERAGING_TIME_MAX_MODE)
return max([self._readPower() for i in range(n)])
def _setTau(self, tau):
if not tau == self.tau:
self._conn.write("SENS1:CHAN1:POW:ATIME %f"%(tau/1000))
self.tau = tau
def _readPower(self):
readStr=self._conn.readQuery("READ1:CHAN1:POW?")
return float(readStr)
class VisaConnection(visaconnection.VisaConnection):
""" Abstraction of the VISA connection for consistency between implementation of instrument classes """
def __init__(self,addr):
super(VisaConnection,self).__init__(addr)
def __del__(self):
self.write(":INIT:CONT 1")
|
timrae/drivepy
|
agilent/powermeter.py
|
Python
|
gpl-3.0
| 2,114
|
# -*- coding: utf-8 -*-
# This technical data was produced for the U. S. Government under Contract No. W15P7T-13-C-F600, and
# is subject to the Rights in Technical Data-Noncommercial Items clause at DFARS 252.227-7013 (FEB 2012)
import subprocess
# from django.contrib.gis.geos import *
from geojson import MultiPolygon, Feature, FeatureCollection
from exceptions import ProgramException
import json
import logging
logger = logging.getLogger(__name__)
class Grid:
LETTERS = ['A','B','C','D','E','F','G','H','J','K','L','M','N','P','Q','R','S','T','U','V','W','X','Y','Z','A','B','C']
def __init__(self, sw_lat, sw_lon, ne_lat, ne_lon):
self.sw_mgrs = self.get_mgrs(sw_lat,sw_lon)
self.ne_mgrs = self.get_mgrs(ne_lat,ne_lon)
if self.sw_mgrs[0:2] != self.ne_mgrs[0:2]:
raise GridException("Can't create grids across longitudinal boundaries.")
try:
sw_mgrs_east = self.sw_mgrs[3:4]
ne_mgrs_east = self.ne_mgrs[3:4]
sw_mgrs_north = self.sw_mgrs[4:5]
ne_mgrs_north = self.ne_mgrs[4:5]
self.start_100k_easting_index = Grid.LETTERS.index(sw_mgrs_east)
self.end_100k_easting_index = Grid.LETTERS.index(ne_mgrs_east)
self.start_100k_northing_index = Grid.LETTERS.index(sw_mgrs_north)
self.end_100k_northing_index = Grid.LETTERS.index(ne_mgrs_north)
except:
error = dict(sw_mgrs=self.sw_mgrs, ne_mgrs=self.ne_mgrs)
raise GeoConvertException(json.dumps(error))
# need to check for a maximum size limit...
# specify a grid point with a 1m designation (add zeros to easting and northing)
def expand(self,original):
return original[:7] + '000' + original[7:] + '000'
# given a lat/lon combination, determine its 1km MGRS grid
def get_mgrs(self,lat,lon):
try:
#input = "%s %s" % (float(lon), float(lat))
# process = subprocess.Popen(["GeoConvert","-w","-m","-p","-3","--input-string",input],stdout=subprocess.PIPE)
# return process.communicate()[0].rstrip()
#Note: This gives shell access, be careful to screen your inputs!
shell_command = "echo " + str(float(lat)) + " " + str(float(lon)) + " | GeoConvert -m -p -3"
try:
process = subprocess.check_output(shell_command, shell=True)
output = process.rstrip()
except subprocess.CalledProcessError, e:
output = e.output
return output
except Exception:
import traceback
errorCode = 'Program Error: ' + traceback.format_exc()
raise ProgramException('Unable to execute GeoConvert program. errorCode = '+errorCode)
def get_polygon(self,mgrs_list):
try:
# m_string = ';'.join(mgrs_list)
# process = subprocess.Popen(["GeoConvert","-w","-g","-p","0","--input-string",m_string],stdout=subprocess.PIPE)
# result = process.communicate()[0].rstrip().split('\n')
#Note: This gives shell access, be careful to screen your inputs!
m_string = '\n'.join(mgrs_list)
shell_command = "printf '" + m_string + "' | GeoConvert -g -p -0"
try:
process = subprocess.check_output(shell_command, shell=True)
result = process.rstrip().split('\n')
except subprocess.CalledProcessError, e:
result = e.output
except Exception:
import traceback
errorCode = traceback.format_exc()
raise ProgramException('Error executing GeoConvert program. errorCode='+errorCode+'. m_string='+m_string)
for i, val in enumerate(result):
result[i] = tuple(float(x) for x in val.split())
# Flip the lat/lngs
for i, (lat, lon) in enumerate(result):
result[i] = (lon,lat)
return MultiPolygon([[result]])
def create_geojson_polygon_fc(self,coords):
feature = Feature(geometry=Polygon([coords]))
return FeatureCollection([feature])
def get_northing_list(self,count,northing):
if count:
return [northing+1,northing]
else:
return [northing,northing+1]
def get_grid_coords(self,mgrs):
easting = int(mgrs[5:7])
northing = int(mgrs[7:9])
heading = mgrs[0:3]
e_index = Grid.LETTERS.index(mgrs[3:4])
n_index = Grid.LETTERS.index(mgrs[4:5])
coords = []
for x_index in [easting,easting+1]:
for y_index in self.get_northing_list(x_index-easting,northing):
e = e_index
n = n_index
x = x_index
y = y_index
if x == 100:
x = 0
e = e_index+1
if y == 100:
y = 0
n = n_index+1
corner = "%s%s%s%02d%02d" % (heading, Grid.LETTERS[e], Grid.LETTERS[n], x, y)
coords.append(self.expand(corner))
coords.append(coords[0])
return coords
def get_array_for_block(self,northing_start,northing_end,easting_start,easting_end,prefix):
m_array = []
for n in range(northing_start,northing_end+1):
for e in range(easting_start,easting_end+1):
m_array.append("%s%02d%02d" % (prefix,e,n))
return m_array
def determine_mgrs_array(self):
easting_start = int(self.sw_mgrs[5:7])
easting_end = int(self.ne_mgrs[5:7])
northing_start = int(self.sw_mgrs[7:9])
northing_end = int(self.ne_mgrs[7:9])
gzd_prefix = self.sw_mgrs[0:3]
mgrs_array = []
for e in range(self.start_100k_easting_index,self.end_100k_easting_index+1):
for n in range(self.start_100k_northing_index,self.end_100k_northing_index+1):
e_start = easting_start if (e == self.start_100k_easting_index) else 0
e_end = easting_end if (e == self.end_100k_easting_index) else 99
n_start = northing_start if (n == self.start_100k_northing_index) else 0
n_end = northing_end if (n == self.end_100k_northing_index) else 99
prefix = "%s%s%s" % (gzd_prefix,Grid.LETTERS[e],Grid.LETTERS[n])
mgrs_array.extend(self.get_array_for_block(n_start,n_end,e_start,e_end,prefix))
return mgrs_array
def build_grid_fc(self):
# can probably check for a maximum grid size...
# and check that bounding box specified correctly
# if we're not in the same 100,000km grid, will have to do something with this boundary condition
# probably break each grid down into their components and get the relevant boxes within each
m_array = self.determine_mgrs_array()
for i,val in enumerate(m_array):
gc = self.get_grid_coords(val)
polygon = self.get_polygon(gc)
m_array[i] = Feature(geometry=polygon,properties={"mgrs":val},id="mgrs."+val,geometry_name="the_geom")
return FeatureCollection(m_array)
class GridException(Exception):
pass
class GeoConvertException(Exception):
pass
|
stephenrjones/geoq
|
geoq/mgrs/utils.py
|
Python
|
mit
| 7,246
|
#!/usr/bin/env python
import random
from os.path import abspath, dirname, join as pjoin
def get_words(filename, min_length=3, max_length=9, disallowed=[' ', '-']):
words = []
filename = abspath(pjoin(dirname(__file__), filename))
with open(filename, 'r') as data_in:
for line in data_in:
word = line.strip().lower()
if not word:
continue
if len(word) >= min_length and len(word) <= max_length:
if not any([c in word for c in disallowed]):
words.append(word)
return words
_dictionaries = ['adverbs', 'verbs', 'nouns']
words = [get_words(dictionary+'.txt') for dictionary in _dictionaries]
def create_name(add_number=99, hashable=None):
"""
Create a random, semi-realistic, name. If add_number is != 0 (default 99) a
number in range 1-add_number is added to the end of the name.
If you include a hashable in the call to create_name the returned name is
consistent across calls.
"""
if hashable:
random.seed(hashable)
name = [random.choice(wlist) for wlist in words]
if add_number:
name.append(str(random.randint(1, add_number)))
return '-'.join(name)
def name_possibilities():
'''Get the number of possible names'''
return reduce(lambda pos, lst: pos*len(lst), words, 1)
def word_len():
'''Number of words being used'''
return reduce(lambda l, wlist: l + len(wlist), words, 0)
if __name__ == '__main__':
print create_name(add_number=0)
print name_possibilities()
print word_len()
|
stengaard/moniker
|
moniker/moniker.py
|
Python
|
mit
| 1,589
|
from . import object
from .function import FunctionReturnType, native_function
@native_function
def object_constructor(scope, this_object, params):
data = params[0].call(
scope,
this_object,
[],
return_type=FunctionReturnType.RETURN_NAME_MAP
)
return object.PvlObject(
cls=object_class,
data=data
)
@native_function
def object_get(scope, this_object, params, **kwargs):
key = params[0]
return this_object.instance.data[key]
@native_function
def object_set(scope, this_object, params, **kwargs):
key = params[0]
value = params[1]
this_object.instance.data[key] = value
return value
object_class = object.PvlClass(
name="Object",
attrs=(),
delegators=(
('get', object_get),
('set', object_set),
),
parents=()
)
# class _Object(PavelObject):
# def __init__(self, data=None):
# if data:
# self.__data = data
# def get_attr(self, scope, attr_name):
# return self.__data[attr_name]
# def __setitem__(self, attr_name, value):
# self.__data[attr_name] = value
# return value
@native_function
def range(scope, this_object, params):
return _RangeGenerator(params[0])
class _RangeGenerator:
def __init__(self, n):
self.__n = n
self.__i = -1
def next(self):
self.__i += 1
goon = self.__i < self.__n
return self.__i, goon
@native_function
def delegator_constructor(scope, this_object, params):
block = params[0]
data = params[0].call(
scope,
this_object,
[],
return_type=FunctionReturnType.RETURN_NAME_MAP
)
# delegated_class = object.PvlClass(
# name=None,
# attrs=(),
# delegators=(
# (key, value)
# for key, value in data.items()
# ),
# parents=(object_class,),
# )
# return delegator
return object.PvlObject(
cls=delegator_class,
data=dict(
delegators=(
(key, value)
for key, value in data.items()
),
)
)
@native_function
def delegator_call(scope, this_object, params, **kwargs):
inner_object = params[0]
delegated_class = object.PvlClass(
name=None,
attrs=(),
delegators=this_object.instance.data['delegators'],
parents=(inner_object.cls,),
)
return object.PvlObject(
cls=delegated_class,
data=inner_object.data,
)
delegator_class = object.PvlClass(
name="Delegator",
attrs=(),
delegators=(
('call', delegator_call),
),
parents=(),
)
@native_function
def pvl_super(scope, this_object, params, **kwargs):
if scope.this_object.in_delegator:
result = object._ThisObject(
scope.this_object.instance,
scope.this_object.chain_point,
False,
)
else:
result = object._ThisObject(
scope.this_object.instance,
scope.this_object.chain_point.get_super(),
True,
)
return result
buildins = dict(
lang=dict(
object=object_constructor,
range=range,
delegator=delegator_constructor,
super=pvl_super,
)
)
|
lexdene/pavel
|
pavel/runtime/buildins.py
|
Python
|
gpl-3.0
| 3,305
|
# -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'Article.issue'
db.add_column('journalmanager_article', 'issue',
self.gf('django.db.models.fields.related.ForeignKey')(default=0, related_name='issue_article', to=orm['journalmanager.Issue']),
keep_default=False)
def backwards(self, orm):
# Deleting field 'Article.issue'
db.delete_column('journalmanager_article', 'issue_id')
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'journalmanager.aheadpressrelease': {
'Meta': {'object_name': 'AheadPressRelease', '_ormbases': ['journalmanager.PressRelease']},
'journal': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'press_releases'", 'to': "orm['journalmanager.Journal']"}),
'pressrelease_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['journalmanager.PressRelease']", 'unique': 'True', 'primary_key': 'True'})
},
'journalmanager.article': {
'Meta': {'object_name': 'Article'},
'front': ('jsonfield.fields.JSONField', [], {}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'images_url': ('django.db.models.fields.CharField', [], {'max_length': '256'}),
'issue': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'issue_article'", 'to': "orm['journalmanager.Issue']"}),
'pdf_url': ('django.db.models.fields.CharField', [], {'max_length': '256'}),
'xml_url': ('django.db.models.fields.CharField', [], {'max_length': '256'})
},
'journalmanager.collection': {
'Meta': {'ordering': "['name']", 'object_name': 'Collection'},
'acronym': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '16', 'blank': 'True'}),
'address': ('django.db.models.fields.TextField', [], {}),
'address_complement': ('django.db.models.fields.CharField', [], {'max_length': '128', 'blank': 'True'}),
'address_number': ('django.db.models.fields.CharField', [], {'max_length': '8'}),
'city': ('django.db.models.fields.CharField', [], {'max_length': '32', 'blank': 'True'}),
'collection': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'user_collection'", 'to': "orm['auth.User']", 'through': "orm['journalmanager.UserCollections']", 'blank': 'True', 'symmetrical': 'False', 'null': 'True'}),
'country': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75'}),
'fax': ('django.db.models.fields.CharField', [], {'max_length': '16', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'logo': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '128', 'db_index': 'True'}),
'name_slug': ('django.db.models.fields.SlugField', [], {'max_length': '50', 'unique': 'True', 'null': 'True', 'blank': 'True'}),
'phone': ('django.db.models.fields.CharField', [], {'max_length': '16', 'blank': 'True'}),
'state': ('django.db.models.fields.CharField', [], {'max_length': '32', 'blank': 'True'}),
'url': ('django.db.models.fields.URLField', [], {'max_length': '200'}),
'zip_code': ('django.db.models.fields.CharField', [], {'max_length': '16', 'null': 'True', 'blank': 'True'})
},
'journalmanager.datachangeevent': {
'Meta': {'object_name': 'DataChangeEvent'},
'changed_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'collection': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['journalmanager.Collection']"}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'event_type': ('django.db.models.fields.CharField', [], {'max_length': '16'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'object_id': ('django.db.models.fields.PositiveIntegerField', [], {}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'journalmanager.institution': {
'Meta': {'ordering': "['name']", 'object_name': 'Institution'},
'acronym': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '16', 'blank': 'True'}),
'address': ('django.db.models.fields.TextField', [], {}),
'address_complement': ('django.db.models.fields.CharField', [], {'max_length': '128', 'blank': 'True'}),
'address_number': ('django.db.models.fields.CharField', [], {'max_length': '8'}),
'cel': ('django.db.models.fields.CharField', [], {'max_length': '16', 'blank': 'True'}),
'city': ('django.db.models.fields.CharField', [], {'max_length': '32', 'blank': 'True'}),
'complement': ('django.db.models.fields.TextField', [], {'default': "''", 'blank': 'True'}),
'country': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75'}),
'fax': ('django.db.models.fields.CharField', [], {'max_length': '16', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_trashed': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '256', 'db_index': 'True'}),
'phone': ('django.db.models.fields.CharField', [], {'max_length': '16', 'blank': 'True'}),
'state': ('django.db.models.fields.CharField', [], {'max_length': '32', 'blank': 'True'}),
'updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'zip_code': ('django.db.models.fields.CharField', [], {'max_length': '16', 'null': 'True', 'blank': 'True'})
},
'journalmanager.issue': {
'Meta': {'object_name': 'Issue'},
'cover': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'ctrl_vocabulary': ('django.db.models.fields.CharField', [], {'max_length': '64', 'blank': 'True'}),
'editorial_standard': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_marked_up': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_trashed': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}),
'journal': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['journalmanager.Journal']"}),
'label': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '64', 'null': 'True', 'blank': 'True'}),
'number': ('django.db.models.fields.CharField', [], {'max_length': '16', 'blank': 'True'}),
'order': ('django.db.models.fields.IntegerField', [], {'blank': 'True'}),
'publication_end_month': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'publication_start_month': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'publication_year': ('django.db.models.fields.IntegerField', [], {}),
'section': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['journalmanager.Section']", 'symmetrical': 'False', 'blank': 'True'}),
'suppl_number': ('django.db.models.fields.CharField', [], {'max_length': '16', 'null': 'True', 'blank': 'True'}),
'suppl_volume': ('django.db.models.fields.CharField', [], {'max_length': '16', 'null': 'True', 'blank': 'True'}),
'total_documents': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'use_license': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['journalmanager.UseLicense']", 'null': 'True'}),
'volume': ('django.db.models.fields.CharField', [], {'max_length': '16', 'blank': 'True'})
},
'journalmanager.issuetitle': {
'Meta': {'object_name': 'IssueTitle'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'issue': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['journalmanager.Issue']"}),
'language': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['journalmanager.Language']"}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '128'})
},
'journalmanager.journal': {
'Meta': {'ordering': "['title']", 'object_name': 'Journal'},
'abstract_keyword_languages': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'abstract_keyword_languages'", 'symmetrical': 'False', 'to': "orm['journalmanager.Language']"}),
'acronym': ('django.db.models.fields.CharField', [], {'max_length': '16'}),
'collection': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'journals'", 'to': "orm['journalmanager.Collection']"}),
'copyrighter': ('django.db.models.fields.CharField', [], {'max_length': '254'}),
'cover': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'creator': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'enjoy_creator'", 'to': "orm['auth.User']"}),
'ctrl_vocabulary': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'current_ahead_documents': ('django.db.models.fields.IntegerField', [], {'default': '0', 'max_length': '3', 'null': 'True', 'blank': 'True'}),
'editor_address': ('django.db.models.fields.CharField', [], {'max_length': '512'}),
'editor_address_city': ('django.db.models.fields.CharField', [], {'max_length': '256'}),
'editor_address_country': ('scielo_extensions.modelfields.CountryField', [], {'max_length': '2'}),
'editor_address_state': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'editor_address_zip': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'editor_email': ('django.db.models.fields.EmailField', [], {'max_length': '75'}),
'editor_name': ('django.db.models.fields.CharField', [], {'max_length': '512'}),
'editor_phone1': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'editor_phone2': ('django.db.models.fields.CharField', [], {'max_length': '32', 'null': 'True', 'blank': 'True'}),
'editorial_standard': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'editors': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'user_editors'", 'null': 'True', 'symmetrical': 'False', 'to': "orm['auth.User']"}),
'eletronic_issn': ('django.db.models.fields.CharField', [], {'max_length': '9', 'db_index': 'True'}),
'final_num': ('django.db.models.fields.CharField', [], {'max_length': '16', 'blank': 'True'}),
'final_vol': ('django.db.models.fields.CharField', [], {'max_length': '16', 'blank': 'True'}),
'final_year': ('django.db.models.fields.CharField', [], {'max_length': '4', 'null': 'True', 'blank': 'True'}),
'frequency': ('django.db.models.fields.CharField', [], {'max_length': '16'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'index_coverage': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'init_num': ('django.db.models.fields.CharField', [], {'max_length': '16'}),
'init_vol': ('django.db.models.fields.CharField', [], {'max_length': '16'}),
'init_year': ('django.db.models.fields.CharField', [], {'max_length': '4'}),
'is_indexed_aehci': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_indexed_scie': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_indexed_ssci': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_trashed': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}),
'languages': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['journalmanager.Language']", 'symmetrical': 'False'}),
'logo': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'medline_code': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True', 'blank': 'True'}),
'medline_title': ('django.db.models.fields.CharField', [], {'max_length': '256', 'null': 'True', 'blank': 'True'}),
'national_code': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True', 'blank': 'True'}),
'notes': ('django.db.models.fields.TextField', [], {'max_length': '254', 'null': 'True', 'blank': 'True'}),
'other_previous_title': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'previous_ahead_documents': ('django.db.models.fields.IntegerField', [], {'default': '0', 'max_length': '3', 'null': 'True', 'blank': 'True'}),
'previous_title': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'prev_title'", 'null': 'True', 'to': "orm['journalmanager.Journal']"}),
'print_issn': ('django.db.models.fields.CharField', [], {'max_length': '9', 'db_index': 'True'}),
'pub_level': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'pub_status': ('django.db.models.fields.CharField', [], {'default': "'inprogress'", 'max_length': '16', 'null': 'True', 'blank': 'True'}),
'pub_status_changed_by': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'pub_status_changed_by'", 'to': "orm['auth.User']"}),
'pub_status_reason': ('django.db.models.fields.TextField', [], {'default': "''", 'blank': 'True'}),
'publication_city': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'publisher_country': ('scielo_extensions.modelfields.CountryField', [], {'max_length': '2'}),
'publisher_name': ('django.db.models.fields.CharField', [], {'max_length': '256'}),
'publisher_state': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'scielo_issn': ('django.db.models.fields.CharField', [], {'max_length': '16'}),
'secs_code': ('django.db.models.fields.CharField', [], {'max_length': '64', 'blank': 'True'}),
'short_title': ('django.db.models.fields.CharField', [], {'max_length': '256', 'null': 'True', 'db_index': 'True'}),
'sponsor': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'journal_sponsor'", 'null': 'True', 'symmetrical': 'False', 'to': "orm['journalmanager.Sponsor']"}),
'study_areas': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "'journals_migration_tmp'", 'null': 'True', 'to': "orm['journalmanager.StudyArea']"}),
'subject_categories': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "'journals'", 'null': 'True', 'to': "orm['journalmanager.SubjectCategory']"}),
'subject_descriptors': ('django.db.models.fields.CharField', [], {'max_length': '1024'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '256', 'db_index': 'True'}),
'title_iso': ('django.db.models.fields.CharField', [], {'max_length': '256', 'db_index': 'True'}),
'twitter_user': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True', 'blank': 'True'}),
'updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'url_journal': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True', 'blank': 'True'}),
'url_online_submission': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True', 'blank': 'True'}),
'use_license': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['journalmanager.UseLicense']"})
},
'journalmanager.journalmission': {
'Meta': {'object_name': 'JournalMission'},
'description': ('django.db.models.fields.TextField', [], {}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'journal': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'missions'", 'to': "orm['journalmanager.Journal']"}),
'language': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['journalmanager.Language']", 'null': 'True'})
},
'journalmanager.journalpublicationevents': {
'Meta': {'ordering': "['created_at']", 'object_name': 'JournalPublicationEvents'},
'changed_by': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'journal': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'status_history'", 'to': "orm['journalmanager.Journal']"}),
'reason': ('django.db.models.fields.TextField', [], {'default': "''", 'blank': 'True'}),
'status': ('django.db.models.fields.CharField', [], {'max_length': '16'})
},
'journalmanager.journaltitle': {
'Meta': {'object_name': 'JournalTitle'},
'category': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'journal': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'other_titles'", 'to': "orm['journalmanager.Journal']"}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '128'})
},
'journalmanager.language': {
'Meta': {'ordering': "['name']", 'object_name': 'Language'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'iso_code': ('django.db.models.fields.CharField', [], {'max_length': '2'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '64'})
},
'journalmanager.pendedform': {
'Meta': {'object_name': 'PendedForm'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'form_hash': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'pending_forms'", 'to': "orm['auth.User']"}),
'view_name': ('django.db.models.fields.CharField', [], {'max_length': '128'})
},
'journalmanager.pendedvalue': {
'Meta': {'object_name': 'PendedValue'},
'form': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'data'", 'to': "orm['journalmanager.PendedForm']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'value': ('django.db.models.fields.TextField', [], {})
},
'journalmanager.pressrelease': {
'Meta': {'object_name': 'PressRelease'},
'doi': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'})
},
'journalmanager.pressreleasearticle': {
'Meta': {'object_name': 'PressReleaseArticle'},
'article_pid': ('django.db.models.fields.CharField', [], {'max_length': '32', 'db_index': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'press_release': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'articles'", 'to': "orm['journalmanager.PressRelease']"})
},
'journalmanager.pressreleasetranslation': {
'Meta': {'object_name': 'PressReleaseTranslation'},
'content': ('django.db.models.fields.TextField', [], {}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'language': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['journalmanager.Language']"}),
'press_release': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'translations'", 'to': "orm['journalmanager.PressRelease']"}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '128'})
},
'journalmanager.regularpressrelease': {
'Meta': {'object_name': 'RegularPressRelease', '_ormbases': ['journalmanager.PressRelease']},
'issue': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'press_releases'", 'to': "orm['journalmanager.Issue']"}),
'pressrelease_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['journalmanager.PressRelease']", 'unique': 'True', 'primary_key': 'True'})
},
'journalmanager.section': {
'Meta': {'object_name': 'Section'},
'code': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '21', 'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_trashed': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}),
'journal': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['journalmanager.Journal']"}),
'legacy_code': ('django.db.models.fields.CharField', [], {'max_length': '16', 'null': 'True', 'blank': 'True'}),
'updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
'journalmanager.sectiontitle': {
'Meta': {'ordering': "['title']", 'object_name': 'SectionTitle'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'language': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['journalmanager.Language']"}),
'section': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'titles'", 'to': "orm['journalmanager.Section']"}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '256'})
},
'journalmanager.sponsor': {
'Meta': {'ordering': "['name']", 'object_name': 'Sponsor', '_ormbases': ['journalmanager.Institution']},
'collections': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['journalmanager.Collection']", 'symmetrical': 'False'}),
'institution_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['journalmanager.Institution']", 'unique': 'True', 'primary_key': 'True'})
},
'journalmanager.studyarea': {
'Meta': {'object_name': 'StudyArea'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'study_area': ('django.db.models.fields.CharField', [], {'max_length': '256'})
},
'journalmanager.subjectcategory': {
'Meta': {'object_name': 'SubjectCategory'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'term': ('django.db.models.fields.CharField', [], {'max_length': '256', 'db_index': 'True'})
},
'journalmanager.translateddata': {
'Meta': {'object_name': 'TranslatedData'},
'field': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'language': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'translation': ('django.db.models.fields.CharField', [], {'max_length': '512', 'null': 'True', 'blank': 'True'})
},
'journalmanager.uselicense': {
'Meta': {'ordering': "['license_code']", 'object_name': 'UseLicense'},
'disclaimer': ('django.db.models.fields.TextField', [], {'max_length': '512', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'license_code': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '64'}),
'reference_url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'})
},
'journalmanager.usercollections': {
'Meta': {'unique_together': "(('user', 'collection'),)", 'object_name': 'UserCollections'},
'collection': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['journalmanager.Collection']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_default': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_manager': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'journalmanager.userprofile': {
'Meta': {'object_name': 'UserProfile'},
'email': ('django.db.models.fields.EmailField', [], {'unique': 'True', 'max_length': '75'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'user': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['auth.User']", 'unique': 'True'})
}
}
complete_apps = ['journalmanager']
|
jfunez/scielo-manager
|
scielomanager/journalmanager/migrations/0044_auto__add_field_article_issue.py
|
Python
|
bsd-2-clause
| 30,833
|
import numpy as np
import numpy as np
import pylab as pl
import scipy.special as ss
def beta(a, b, mew):
e1 = ss.gamma(a + b)
e2 = ss.gamma(a)
e3 = ss.gamma(b)
e4 = mew ** (a - 1)
e5 = (1 - mew) ** (b - 1)
return (e1/(e2*e3)) * e4 * e5
def plot_beta(a, b):
Ly = []
Lx = []
mews = np.mgrid[0:1:100j]
for mew in mews:
Lx.append(mew)
Ly.append(beta(a, b, mew))
pl.plot(Lx, Ly, label="a=%f, b=%f" %(a,b))
o
def main():
plot_beta(0.1, 0.1)
plot_beta(1, 1)
plot_beta(2, 3)
plot_beta(8, 4)
pl.xlim(0.0, 1.0)
pl.ylim(0.0, 3.0)
pl.legend()
pl.show()
if __name__ == "__main__":
main()
|
nicholasmalaya/paleologos
|
combustion/final/beta.py
|
Python
|
mit
| 678
|
#!/usr/bin/env python
from netmiko import ConnectHandler
from getpass import getpass
ip_address = raw_input("Enter IP address: ")
device = {
'device_type': 'cisco_ios',
'ip': ip_address,
'username': 'pyclass',
'password': getpass(),
'port': 22,
}
net_connect = ConnectHandler(**device)
output = net_connect.send_command_expect('show version')
print output
|
Collisio-Adolebitque/pfne-2017
|
pynet/interop_2016/ex1_router_output/test_cisco.py
|
Python
|
gpl-3.0
| 381
|
'''
Confounder Learning and Correction Module
-----------------------------------------
@author: Max Zwiessele
'''
import numpy
from pygp.covar.linear import LinearCFISO
from pygp.covar.combinators import SumCF, ProductCF
from pygp.covar.se import SqexpCFARD
from pygp.gp.gplvm import GPLVM
from pygp.optimize.optimize_base import opt_hyper
from pygp.covar.fixed import FixedCF
from pygp.likelihood.likelihood_base import GaussLikISO
from pygp.covar.bias import BiasCF
from pygp.util.pca import PCA
from gptwosample.twosample.twosample import TwoSample
class TwoSampleConfounder(TwoSample):
"""Run GPTwoSample on given Data
**Parameters**:
- T : TimePoints [n x r x t] [Samples x Replicates x Timepoints]
- Y : ExpressionMatrix [n x r x t x d] [Samples x Replicates x Timepoints x Genes]
- q : Number of Confounders to use
- lvm_covariance : optional - set covariance to use in confounder learning
- init : [random, pca]
**Fields**:
* T: Time Points [n x r x t] [Samples x Replicates x Timepoints]
* Y: Expression [n x r x t x d] [Samples x Replicates x Timepoints x Genes]
* X: Confounders [nrt x 1+q] [SamplesReplicatesTimepoints x T+q]
* lvm_covariance: GPLVM covaraince function used for confounder learning
* n: Samples
* r: Replicates
* t: Timepoints
* d: Genes
* q: Confounder Components
"""
def __init__(self, T, Y, q=4,
lvm_covariance=None,
init="random",
covar_common=None,
covar_individual_1=None,
covar_individual_2=None):
"""
**Parameters**:
T : TimePoints [n x r x t] [Samples x Replicates x Timepoints]
Y : ExpressionMatrix [n x r x t x d] [Samples x Replicates x Timepoints x Genes]
q : Number of Confounders to use
lvm_covariance : optional - set covariance to use in confounder learning
init : [random, pca]
"""
super(TwoSampleConfounder, self).__init__(T, Y, covar_common=covar_common,
covar_individual_1=covar_individual_1,
covar_individual_2=covar_individual_2)
self.q = q
self.init = init
self.init_X(Y, init)
if lvm_covariance is not None:
self._lvm_covariance = lvm_covariance
else:
rt = self.r * self.t
# self.X_r = numpy.zeros((self.n * rt, self.n * self.r))
# for i in xrange(self.n * self.r):self.X_r[i * self.t:(i + 1) * self.t, i] = 1
# rep = LinearCFISO(dimension_indices=numpy.arange(1 + q, 1 + q + (self.n * self.r)))
self.X_s = numpy.zeros((self.n * rt, self.n))
for i in xrange(self.n):self.X_s[i * rt:(i + 1) * rt, i] = 1
sam = LinearCFISO(dimension_indices=numpy.arange(1 + q,
# + (self.n * self.r),
1 + q + self.n)) # + (self.n * self.r)
self._lvm_covariance = SumCF([LinearCFISO(dimension_indices=numpy.arange(1, 2)),
# rep,
sam,
ProductCF([sam, SqexpCFARD(dimension_indices=numpy.array([0]))]),
BiasCF()])
# if initial_hyperparameters is None:
# initial_hyperparameters = numpy.zeros(self._lvm_covariance.get_number_of_parameters()))
self._initialized = False
def learn_confounder_matrix(self,
ard_indices=None,
x=None,
messages=True,
gradient_tolerance=1E-12,
lvm_dimension_indices=None,
gradcheck=False,
maxiter=10000,
):
"""
Learn confounder matrix with this model.
**Parameters**:
x : array-like
If you provided an own lvm_covariance you have to specify
the X to use within GPLVM
lvm_dimension_indices : [int]
If you specified an own lvm_covariance you have to specify
the dimension indices for GPLVM
ard_indices : [indices]
If you provided an own lvm_covariance, give the ard indices of the covariance here,
to be able to use the correct hyperparameters for calculating the confounder covariance matrix.
"""
if lvm_dimension_indices is None:
lvm_dimension_indices = xrange(1, 1 + self.q)
if ard_indices is None:
ard_indices = slice(0, self.q)
self._check_data()
self.init_X(self.Y, self.init)
# p = PCA(Y)
# try:
# self.X = p.project(Y, self.q)
# except IndexError:
# raise IndexError("More confounder components then genes (q > d)")
if x is None:
x = self._x()
self._Xlvm = x
self.gplvm = self._gplvm(lvm_dimension_indices)
hyper = {
'lik':numpy.log([.15]),
'x':self.X,
'covar':numpy.zeros(self._lvm_covariance.get_number_of_parameters())
}
lvm_hyperparams, _ = opt_hyper(self.gplvm, hyper,
Ifilter=None, maxiter=maxiter,
gradcheck=gradcheck, bounds=None,
messages=messages,
gradient_tolerance=gradient_tolerance)
self._Xlvm = self.gplvm.getData()[0]
self._init_conf_matrix(lvm_hyperparams, ard_indices, lvm_dimension_indices)
self.initialize_twosample_covariance()
def predict_lvm(self):
return self.gplvm.predict(self._lvm_hyperparams, self._Xlvm, numpy.arange(self.d))
def initialize_twosample_covariance(self, covar_common=lambda x: SumCF([SqexpCFARD(1), x, BiasCF()]),
covar_individual_1=lambda x: SumCF([SqexpCFARD(1), x, BiasCF()]),
covar_individual_2=lambda x: SumCF([SqexpCFARD(1), x, BiasCF()]),
):
"""
initialize twosample covariance with function covariance(XX), where XX
is a FixedCF with the learned confounder matrix.
default is SumCF([SqexpCFARD(1), FixedCF(self.K_conf.copy()), BiasCF()])
"""
self.covar_comm = covar_common(FixedCF(self.K_conf.copy()))
self.covar_ind1 = covar_individual_1(FixedCF(self.K_conf.copy()))
self.covar_ind2 = covar_individual_2(FixedCF(self.K_conf.copy()))
def init_X(self, Y, init):
if init == 'pca':
y = Y.reshape(-1, self.d)
p = PCA(y)
self.X = p.project(y, self.q)
self.X += .1 * numpy.random.randn(*self.X.shape)
elif init == 'random':
self.X = numpy.random.randn(numpy.prod(self.n * self.r * self.t), self.q)
else:
print "init model {0!s} not known".format(init)
def _init_conf_matrix(self, lvm_hyperparams, conf_covar_name, lvm_dimension_indices):
self._initialized = True
self.X = lvm_hyperparams['x']
self._lvm_hyperparams = lvm_hyperparams
# if ard_indices is None:
# ard_indices = numpy.arange(1)
# ard = self._lvm_covariance.get_reparametrized_theta(lvm_hyperparams['covar'])[ard_indices]
# self.K_conf = numpy.dot(self.X*ard, self.X.T)
try:
self.gplvm
except:
self.gplvm = self._gplvm(lvm_dimension_indices)
self.K_conf = self._lvm_covariance.K(self._lvm_hyperparams['covar'], self._Xlvm, self._Xlvm, names=['XX'])
def _gplvm(self, lvm_dimension_indices):
self._Xlvm[:, lvm_dimension_indices] = self.X
Y = self.Y.reshape(numpy.prod(self.n * self.r * self.t), self.Y.shape[3])
return GPLVM(gplvm_dimensions=lvm_dimension_indices, covar_func=self._lvm_covariance,
likelihood=GaussLikISO(),
x=self._Xlvm,
y=Y)
def _x(self):
return numpy.concatenate((self.T.reshape(-1, 1), self.X, # self.X_r,
self.X_s),
axis=1)
if __name__ == '__main__':
Tt = numpy.arange(0, 16, 2)[:, None]
Tr = numpy.tile(Tt, 3).T
Ts = numpy.array([Tr, Tr])
n, r, t, d = nrtd = Ts.shape + (12,)
covar = SqexpCFARD(1)
K = covar.K(covar.get_de_reparametrized_theta([1, 13]), Tt)
m = numpy.zeros(t)
try:
from scikits.learn.mixture import sample_gaussian
except:
raise "scikits needed for this example"
# raise r
y1 = sample_gaussian(m, K, cvtype='full', n_samples=d)
y2 = sample_gaussian(m, K, cvtype='full', n_samples=d)
Y1 = numpy.zeros((t, d + d / 2))
Y2 = numpy.zeros((t, d + d / 2))
Y1[:, :d] = y1
Y2[:, :d] = y2
sames = numpy.random.randint(0, d, size=d / 2)
Y1[:, d:] = y2[:, sames]
Y2[:, d:] = y1[:, sames]
Y = numpy.zeros((n, r, t, d + d / 2))
sigma = .5
Y[0, :, :, :] = Y1 + sigma * numpy.random.randn(r, t, d + d / 2)
Y[1, :, :, :] = Y2 + sigma * numpy.random.randn(r, t, d + d / 2)
c = TwoSampleConfounder(Ts, Y)
# c.__verbose = True
# lvm_hyperparams_file_name = 'lvm_hyperparams.pickle'
c.learn_confounder_matrix()
# lvm_hyperparams_file = open(lvm_hyperparams_file_name, 'w')
# pickle.dump(c._lvm_hyperparams, lvm_hyperparams_file)
# lvm_hyperparams_file.close()
# lvm_hyperparams_file = open(lvm_hyperparams_file_name, 'r')
# c._init_conf_matrix(pickle.load(lvm_hyperparams_file), None)
# lvm_hyperparams_file.close()
c.predict_likelihoods(Ts, Y)
c.predict_means_variances(numpy.linspace(0, 24, 100))
import pylab
pylab.ion()
pylab.figure()
for _ in c.plot():
raw_input("enter to continue")
|
PMBio/gptwosample
|
gptwosample/confounder/confounder.py
|
Python
|
apache-2.0
| 10,272
|
###########################################################
#
# Copyright (c) 2005, Southpaw Technology
# All Rights Reserved
#
# PROPRIETARY INFORMATION. This software is proprietary to
# Southpaw Technology, and is not to be reproduced, transmitted,
# or disclosed in any way without written permission.
#
#
#
__all__ = [
'InputException', 'BaseInputWdg', 'TextWdg', 'FilterTextWdg', 'TextAreaWdg',
#'TextAreaWithSelectWdg',
'RadioWdg', 'CheckboxWdg', 'FilterCheckboxWdg', 'SelectWdg', 'FilterSelectWdg',
'MultiSelectWdg', 'ItemsNavigatorWdg', 'ButtonWdg',
'SubmitWdg', 'ActionSelectWdg', 'DownloadWdg',
'ResetWdg', 'PasswordWdg', 'HiddenWdg', 'NoneWdg', 'ThumbInputWdg',
'SimpleUploadWdg', 'UploadWdg', 'MultiUploadWdg',
'CalendarWdg', 'CalendarInputWdg',
"PopupWdg", "PopupMenuWdg"
]
import os, shutil, string, types
from pyasm.common import Common, Marshaller, Date, TacticException
from pyasm.biz import File, Snapshot, Pipeline, NamingUtil, ExpressionParser
from pyasm.web import *
from pyasm.search import Search, SearchKey, SearchException
from icon_wdg import IconButtonWdg, IconWdg
from operator import itemgetter
class InputException(Exception):
pass
class BaseInputWdg(HtmlElement):
ARGS_KEYS = {}
def get_args_keys(cls):
'''external settings which populate the widget'''
return cls.ARGS_KEYS
get_args_keys = classmethod(get_args_keys)
#def __init__(my,name=None, type=None, label=None):
def __init__(my, name=None, type=None, label=None, **kwargs):
super(BaseInputWdg,my).__init__(type)
# the name of the input element
my.name = name
my.input_prefix = None
my.value = ""
my.options = {}
my.options['default'] = ""
my.options['persist'] = "false"
my.persistence = False
my.persistence_obj = None
my.cached_values = None
my.label = label
my.disabled_look = True
my.prefix = ''
my.change_cbjs_action = ''
# deprecated
my.element = None
my.parent_wdg = None
my.state = {}
my.title = ''
my.related_type = None
# FIXME: need to make this more elegant: these are only put here
# to conform to the interface of BaseTableElementWdg so that these
# elements can be put into a TableWdg. This should be more formal
# because the relationship here is quite tenuous
def get_style(my):
return ""
def get_bottom(my):
return ""
def copy(my, input):
'''copies the parameters of one widget to the other. This is useful
for transfering the parameters specified in a config file to a contained
widget.'''
my.name = input.name
my.input_prefix = input.input_prefix
my.options = input.options
my.sobjects = input.sobjects
my.current_index = input.current_index
my.set_sobject = input.get_current_sobject()
def set_state(my, state):
'''Set the state for this table element'''
my.state = state
def get_state(my):
'''get the state for this table element'''
return my.state
def get_related_type(my):
'''Some input widgets will be related to a search type to define
a list or range of parameters. This will allow an external
widget to discover this relationship and provide a means to add
to this list'''
return my.related_type
def set_title(my, title):
my.title = title
def get_display_title(my):
'''Function that that gives a title represenation of this widget'''
if my.title:
return my.title
name = my.get_name()
name = name.replace("_", " ")
return name.title()
def get_title(my):
'''Function that that gives a title represenation of this widget'''
if my.title:
return my.title
name = my.get_name()
title = string.replace(my.name, "_", " ")
title = title.capitalize()
span = SpanWdg(title)
required = my.get_option("required")
if required == "true":
my._add_required(span)
return span
def _add_required(my, span):
required_span = SpanWdg(" *")
required_span.add_style("color: #f44")
required_span.add_style("font-size: 1.0em")
span.add_tip("Required Field")
span.add(required_span)
def set_parent_wdg(my, parent_wdg):
'''method to set the parent widget. This is typicaly the EditWdg'''
my.parent_wdg = parent_wdg
def get_parent_wdg(my):
return my.parent_wdg
def set_layout_wdg(my, layout_wdg):
my.parent_wdg = layout_wdg
def get_prefs(my):
'''Function that that gives a preference widget for this input'''
return ""
def set_input_prefix(my, input_prefix):
my.input_prefix = input_prefix
def get_input_name(my, name=''):
input_name = my.name
if name:
input_name = name
if my.input_prefix:
return "%s|%s" % (my.input_prefix, input_name)
else:
return input_name
def set_name(my, name):
'''set the name externally'''
my.name = name
def get_name(my):
return my.name
def get_label(my):
if my.label:
return my.label
else:
return my.name
def set_options(my, options):
my.options = options
if my.has_option('search_key'):
search_key = options.get('search_key')
if search_key:
sobj = SearchKey.get_by_search_key(search_key)
my.set_sobjects([sobj])
def has_option(my, key):
return my.options.has_key(key)
def set_option(my, key, value):
my.options[key] = value
def get_option(my, key):
'''gets the value of the specified option'''
if my.options.has_key(key):
return my.options[key]
else:
return ""
def set_disabled_look(my, disable):
my.disabled_look = disable
def is_read_only(my):
''' if the read_only option is true, either set disabled or readonly'''
if my.get_option('read_only') in ['true', True]:
return True
return False
def is_edit_only(my):
return my.get_option('edit_only') == 'true'
def is_simple_viewable(my):
return True
def is_editable(my):
return True
def check_persistent_values(my, cgi_values):
web = WebContainer.get_web()
if my.is_form_submitted() and web.has_form_key(my.get_input_name()):
# if the form is submitted, then always use the submitted value
my._set_persistent_values(cgi_values)
my.cached_values = cgi_values
return cgi_values
else:
return False
def check_persistent_display(my, cgi_values):
# no longer checking for web.get_form_keys()
web = WebContainer.get_web()
if my.get_option("persist") == "true":
# old web implementation
if web.has_form_key(my.get_input_name()):
values = cgi_values
#my._set_persistent_values(values)
return values
else:
# try the json implementation if it has been set
from tactic.ui.filter import FilterData
filter_data = FilterData.get()
values = filter_data.get_values_by_prefix(my.prefix)
if values:
values = values[0]
value = values.get(my.get_input_name())
if value:
cgi_values = [value]
#my._set_persistent_values(cgi_values)
return cgi_values
return False
else:
return False
def get_values(my, for_display=False):
'''gets the current value of this input element. The order of
importance is as follows. If the form was submitted, this value
will always take precedence. Then externally set values through
code.'''
values = []
web = WebContainer.get_web()
# getting the value from CGI depends on whether this is for display
# of the widget or for getting the current value of this widget.
cgi_values = web.get_form_values( my.get_input_name() )
if for_display:
# get it from the sobject: this grabs the values from the
# sobject in the db for editing
column = my.get_option('column')
if not column:
column = my.name
if my.get_current_sobject() and \
my.get_current_sobject().has_value(column):
sobject = my.get_current_sobject()
values = [sobject.get_value(column)]
if not values:
values = []
return values
# if set explicitly, then this is the value
if my.value != '':
values = [my.value]
my._set_persistent_values(values)
return values
# the value is taken from CGI only if the input is persistent
values = my.check_persistent_display(cgi_values)
if values != False:
return values
else:
values = []
# This option will read the webstate if no explicit value is
# present
if my.get_option("web_state") == "true":
# this will eventually use the WebState: for now, use cgi
values = cgi_values
if values and values[0] != "":
my._set_persistent_values(values)
return values
# if this has been called before, get the previous value
elif my.cached_values != None:
return my.cached_values
# check for key existence only in for_display=False
#elif my.is_form_submitted() and web.has_form_key(my.get_input_name()):
# # if the form is submitted, then always use the submitted value
# my._set_persistent_values(cgi_values)
# my.cached_values = cgi_values
# return cgi_values
else:
temp_values = my.check_persistent_values(cgi_values)
if temp_values != False:
return temp_values
# if there are values in CGI, use these
if not for_display and cgi_values:
values = cgi_values
# if the value has been explicitly set, then use that one
elif my.value != '':
values = [my.value]
# otherwise, get it from the sobject: this grabs the values from the
# sobject in the db for editing
elif my.get_current_sobject() and \
my.get_current_sobject().has_value(my.name):
sobject = my.get_current_sobject()
values = [sobject.get_value(my.name)]
if not values:
values = []
# This option will read the webstate if no explicit value is
# present
elif my.get_option("web_state") == "true":
# this will eventually use the WebState: for now, use cgi
values = cgi_values
my._set_persistent_values(values)
my.cached_values = values
return values
# otherwise, get it from the persistence (database)
elif my.persistence:
class_path = Common.get_full_class_name(my.persistence_obj)
key = "%s|%s" % (class_path, my.name)
#values = WidgetSettings.get_key_values(key, auto_create=False)
values = WidgetSettings.get_key_values(key)
# if all of the above overrides fail, then set to the default
# the rules for persistent input is slightly different
if (values == None and my.persistence) or (values == [] and not my.persistence):
default = my.get_option("default")
if default != "":
# default can be a list
if isinstance(default, list):
values = default
else:
values = [default]
# evaluate an sobject expression
new_values = []
for value in values:
new_value = NamingUtil.eval_template(value)
new_values.append(new_value)
values = new_values
else:
values = []
if values:
#web.set_form_value(my.name, values[0])
web.set_form_value(my.get_input_name(), values)
my._set_persistent_values(values)
# only cache if it is not for display: otherwise we have to separate
# the for display cache and the non for display cache
if not for_display:
my.cached_values = values
return values
def _set_persistent_values(my, values):
if my.persistence:
class_path = Common.get_full_class_name(my.persistence_obj)
key = "%s|%s" % (class_path, my.name)
# make sure the value is not empty
if not values:
values = []
# if the current value is different from stored value, then update
# this check is done in set_key_values()
WidgetSettings.set_key_values(key, values)
def get_value(my, for_display=False):
values = my.get_values(for_display)
if not values:
return ""
else:
return values[0]
def set_value(my, value, set_form_value=True):
my.value = value
# some widgets do not have names (occasionally)
name = my.get_input_name()
if not name:
return
# when the value is explicitly set, the set then form value as such
if set_form_value:
web = WebContainer.get_web()
web.set_form_value(name, value)
def set_persistence(my, object=None):
my.persistence = True
if object == None:
object = my
my.persistence_obj = object
# this implies persist on submit (it is also faster)
my.set_persist_on_submit()
def set_persist_on_submit(my, prefix=''):
my.set_option("persist", "true")
my.prefix = prefix
def set_submit_onchange(my, set=True):
if set:
my.change_cbjs_action = 'spt.panel.refresh( bvr.src_el.getParent(".spt_panel") );'
#my.add_behavior(behavior)
else:
print("DEPRECATED: set_submit_onchange, arg set=False")
my.remove_event('onchange')
def is_form_submitted(my):
web = WebContainer.get_web()
if web.get_form_value("is_from_login") == "yes":
return False
# all ajax interactions are considered submitted as well
if web.get_form_value("ajax"):
return True
return web.get_form_value("is_form_submitted") == "yes"
def set_form_submitted(my, event='onchange'):
'''TODO: deprecated this: to declare if a form is submitted, used primarily for FilterCheckboxWdg'''
my.add_event(event, "document.form.elements['is_form_submitted'].value='yes'", idx=0)
def set_style(my, style):
'''Sets the style of the top widget contained in the input widget'''
my.element.set_style(style)
def get_key(my):
if not my.persistence_obj:
my.persistence_obj = my
key = "%s|%s"%(Common.get_full_class_name(my.persistence_obj), my.name)
return key
def get_save_script(my):
'''get the js script to save the value to widget settings for persistence'''
key = my.get_key()
return "spt.api.Utility.save_widget_setting('%s', bvr.src_el.value)" %key;
def get_refresh_script(my):
'''get a general refresh script. use this as a template if you need to pass in
bvr.src_el.value to values'''
return "var top=spt.get_parent_panel(bvr.src_el); spt.panel.refresh(top, {}, true)"
class BaseTextWdg(BaseInputWdg):
def handle_mode(my):
return
'''
# DISABLED for now
mode = my.options.get("mode")
if mode == "string":
behavior = {
'type': 'keyboard',
'kbd_handler_name': 'DgTableMultiLineTextEdit'
}
my.add_behavior(behavior)
elif mode in ["float", "integer"]:
behavior = {
'type': 'keyboard',
'kbd_handler_name': 'FloatTextEdit'
}
my.add_behavior(behavior)
'''
class TextWdg(BaseTextWdg):
ARGS_KEYS = {
'size': {
'description': 'width of the text field in pixels',
'type': 'TextWdg',
'order': 0,
'category': 'Options'
},
'read_only': {
'description': 'whether to set this text field to read-only',
'type': 'SelectWdg',
'values' : 'true|false',
'order': 1,
'category': 'Options'
}
}
def __init__(my,name=None, label=None):
super(TextWdg,my).__init__(name,"input", label=label)
my.css = "inputfield"
#my.add_class(my.css)
my.add_class("spt_input")
#my.add_class("form-control")
#my.add_color("background", "background", 10)
#my.add_color("color", "color")
#my.add_border()
def get_display(my):
my.set_attr("type", "text")
my.set_attr("name", my.get_input_name())
if my.is_read_only():
# do not set disabled attr to disabled cuz usually we want the data to
# get read and passed to callbacks
my.set_attr('readonly', 'readonly')
if my.disabled_look == True:
#my.add_class('disabled')
my.add_color("background", "background", -10)
value = my.get_value(for_display=True)
# this make sure that the display
if isinstance(value, basestring):
value = value.replace('"', '"')
my.set_attr("value", value)
size = my.get_option("size")
if size:
my.set_attr("size", size)
my.handle_mode()
return super(TextWdg,my).get_display()
class FilterTextWdg(TextWdg):
'''This composite text acts as a filter and can be, for instance,
used in prefs area in TableWdg'''
def __init__(my,name=None, label=None, css=None , is_number=False, has_persistence=True):
super(FilterTextWdg,my).__init__(name, label=label)
if is_number:
my.add_event('onchange',\
"val=document.form.elements['%s'].value; if (Common.validate_int(val))\
document.form.submit(); else \
{alert('[' + val + '] is not a valid integer.')}" %name)
else:
my.set_submit_onchange()
if has_persistence:
my.set_persistence()
else:
my.set_persist_on_submit()
my.css = css
my.unit = ''
def set_unit(my, unit):
my.unit = unit
def get_display(my):
my.handle_behavior()
if not my.label:
return super(FilterTextWdg, my).get_display()
else:
text = TextWdg.get_class_display(my)
span = SpanWdg(my.label, css=my.css)
span.add(text)
span.add(my.unit)
return span
def handle_behavior(my):
if my.persistence:
key = my.get_key()
value = WidgetSettings.get_value_by_key(key)
if value:
my.set_value(value)
behavior = {"type" : "change",
"cbjs_preaction":\
"spt.api.Utility.save_widget_setting('%s',bvr.src_el.value)"%key}
if my.change_cbjs_action:
behavior['cbjs_action'] = my.change_cbjs_action
my.add_behavior(behavior)
class TextAreaWdg(BaseTextWdg):
ARGS_KEYS = {
'rows': 'The number of rows to show',
'cols': 'The number of columns to show',
}
def __init__(my,name=None, **kwargs):
super(TextAreaWdg,my).__init__(name,"textarea")
my.kwargs = kwargs
# on OSX rows and cols flag are not respected
width = kwargs.get("width")
if width:
my.add_style("width", width)
height = kwargs.get("height")
if height:
my.add_style("height", height)
web = WebContainer.get_web()
browser = web.get_browser()
if browser == "Qt":
rows = None
cols = None
else:
rows = kwargs.get("rows")
cols = kwargs.get("cols")
if rows:
my.set_attr("rows", rows)
if cols:
my.set_attr("cols", cols)
browser = web.get_browser()
if not width and not cols:
width = 300
my.add_style("width", width)
my.add_class("spt_input")
my.add_border()
def get_display(my):
my.set_attr("name", my.get_input_name())
#my.add_style("font-family: Courier New")
my.add_color("background", "background", 10)
my.add_color("color", "color")
#my.add_border()
rows = my.get_option("rows")
cols = my.get_option("cols")
if not rows:
rows = 3
my.set_attr("rows", rows)
if not cols:
cols = 50
my.set_attr("cols", cols)
if my.is_read_only():
my.set_attr('readonly', 'readonly')
if my.disabled_look == True:
#my.add_class('disabled')
my.add_color("background", "background", -10)
# value always overrides
value = my.kwargs.get("value")
if not value:
value = my.get_value(for_display=True)
my.add(value)
#my.handle_mode()
return super(TextAreaWdg,my).get_display()
class RadioWdg(BaseInputWdg):
def __init__(my,name=None, label=None):
super(RadioWdg,my).__init__(name,"input")
my.set_attr("type", "radio")
my.label = label
def set_checked(my):
my.set_attr("checked", "1")
def get_display(my):
my.set_attr("name", my.get_input_name())
my.add_class("spt_input")
# This is a little confusing. the option value is mapped to the
# html attribute value, however, the value from get_value() is the
# state of the element (on or off)
values = my.get_values(for_display=True)
# determine if this is checked
if my.name != None and len(values) != 0 \
and my.get_option("value") in values:
my.set_checked()
# convert all of the options to attributes
for name, option in my.options.items():
my.set_attr(name,option)
if my.label:
span = SpanWdg()
span.add(" %s" % my.label)
my.add(span)
span.add_style("top: 3px")
span.add_style("position: relative")
return super(RadioWdg,my).get_display()
class CheckboxWdg(BaseInputWdg):
def __init__(my,name=None, label=None, css=None):
super(CheckboxWdg,my).__init__(name,"input", label)
my.set_attr("type", "checkbox")
my.label = label
my.css = css
my.add_class("spt_input")
def set_default_checked(my):
''' this is used for checkbox that has no value set'''
my.set_option("default", "on")
def set_checked(my):
my.set_option("checked", "1")
def is_checked(my, for_display=False):
# Checkbox needs special treatment when comes to getting values
values = my.get_values(for_display=for_display)
value_option = my._get_value_option()
# FIXME if values is boolean, it will raise exception
if value_option in values:
return True
else:
return False
#return my.get_value() == my._get_value_option()
def _get_value_option(my):
value_option = my.get_option("value")
if value_option == "":
value_option = 'on'
return value_option
def get_key(my):
class_path = Common.get_full_class_name(my)
key = "%s|%s" % (class_path, my.name)
return key
def check_persistent_values(my, cgi_values):
web = WebContainer.get_web()
if my.is_form_submitted():# and web.has_form_key(my.get_input_name):
# if the form is submitted, then always use the submitted value
if not my.persistence_obj:
return False
class_path = Common.get_full_class_name(my.persistence_obj)
key = "%s|%s" % (class_path, my.name)
setting = WidgetSettings.get_by_key(key, auto_create=False)
if setting == None:
return False
if not my.is_ajax(check_name=False):
my._set_persistent_values(cgi_values)
my.cached_values = cgi_values
return cgi_values
else:
return False
def get_display(my):
my.set_attr("name", my.get_input_name())
# This is a little confusing. the option value is mapped to the
# html attribute value, however, the value from get_value() is the
# state of the element (on or off) or the "value" option
values = my.get_values(for_display=True)
# for multiple checkboxes using the same name
if len(values) == 1:
# skip boolean
value = values[0]
if value and not isinstance(value, bool) and '||' in value:
values = value.split('||')
# determine if this is checked
value_option = my._get_value_option()
if values and len(values) != 0:
if value_option in values:
my.set_checked()
elif True in values: # for boolean columns
my.set_checked()
# convert all of the options to attributes
for name, option in my.options.items():
my.set_attr(name,option)
my.handle_behavior()
if not my.label:
return super(CheckboxWdg, my).get_display()
else:
cb = BaseInputWdg.get_class_display(my)
span = SpanWdg(cb, css=my.css)
span.add(my.label)
return span
return super(CheckboxWdg,my).get_display()
def handle_behavior(my):
if my.persistence:
key = "%s|%s"%(Common.get_full_class_name(my.persistence_obj), my.name)
value = WidgetSettings.get_value_by_key(key)
if value:
my.set_value(value)
behavior = {"type" : "click_up",
'propagate_evt': True,
"cbjs_preaction":
"spt.input.save_selected(bvr, '%s','%s')"%(my.name, key)}
#"spt.api.Utility.save_widget_setting('%s',bvr.src_el.value)"%key}
#if my.change_cbjs_action:
# behavior['cbjs_action'] = my.change_cbjs_action
my.add_behavior(behavior)
class FilterCheckboxWdg(CheckboxWdg):
'''This composite checkbox acts as a filter and can be, for instance,
used in prefs area in TableWdg'''
def __init__(my,name=None, label=None, css=None ):
super(FilterCheckboxWdg,my).__init__(name, label=label, css=css)
#my.set_submit_onchange()
my.set_persistence()
def get_display(my):
# order matters here
return super(FilterCheckboxWdg, my).get_display()
class SelectWdg(BaseInputWdg):
SELECT_LABEL = "- Select -"
ALL_MODE = "all"
NONE_MODE = "NONE"
MAX_DEFAULT_SIZE = 20
# FIXME: this should not be here!!!
# dict for default project settings that will be auto-created if encountered.
# If not listed here, user will be prompted to add it himself
DEFAULT_SETTING = {'bin_type': 'client|dailies', 'bin_label': 'anim|tech', \
'shot_status': 'online|offline', 'note_dailies_context': 'dailies|review',\
'timecard_item': 'meeting|training|research'}
ARGS_KEYS = {
'values': {
'description': 'A list of values separated by | that determine the actual values of the selection',
'order': 0,
'category': 'Options'
},
'labels': {
'description': 'A list of values separated by | that determine the label of the selection',
'order': 1,
'category': 'Options'
},
'values_expr': {
'description': 'A list of values retrieved through an expression. e.g. @GET(prod/shot.code)',
'type': 'TextAreaWdg',
'order': 2
},
'labels_expr': {
'description': 'A list of labels retrieved through an expression. e.g. @GET(prod/shot.name)',
'type': 'TextAreaWdg',
'order': 3
},
'mode_expr': {
'description': 'Specify if it uses the current sObject as a starting point',
'type': 'SelectWdg',
'values': 'relative',
'empty': 'true',
'order': 4,
},
'empty': {
'description': 'The label for an empty selection',
#'default': '-- Select --',
'type': 'SelectWdg',
'values': 'true|false',
'order': 3,
'category': 'Options'
},
'default': {
'description': 'The default selection value in an edit form. Can be a TEL variable.',
'type': 'TextWdg',
'category': 'Options',
'order': 2,
},
'query': {
'description': 'Query shorthand in the form of <search_type>|<value_column>|<label_column>"'
}
}
def __init__(my, name=None, **kwargs):
my.kwargs = kwargs
css = kwargs.get('css')
label = kwargs.get('label')
my.sobjects_for_options = None
my.empty_option_flag = False
my.empty_option_label, my.empty_option_value = (my.SELECT_LABEL, "")
my.append_list = []
my.values = []
my.labels = []
my.has_set_options = False
my.css = css
my.append_widget = None
super(SelectWdg,my).__init__(name, type="select", label=label)
# add the standard style class
my.add_class("inputfield")
my.add_class("spt_input")
# BOOTSTRAP
my.add_class("form-control")
my.add_class("input-sm")
def get_related_type(my):
# In order to get the related type, the dom options need to have
# been processed
if not my.has_set_options:
my.set_dom_options(is_run=False)
return my.related_type
def add_empty_option(my, label='---', value= ''):
'''convenience function to an option with no value'''
my.empty_option_flag = True
my.empty_option_label, my.empty_option_value = label, value
def add_none_option(my):
my.append_option("-- %s --" %SelectWdg.NONE_MODE,\
SelectWdg.NONE_MODE)
def remove_empty_option(my):
my.empty_option_flag = False
def append_option(my, label, value):
my.append_list.append((label, value))
def set_search_for_options(my, search, value_column=None, label_column=None):
assert value_column != ""
assert label_column != ""
sobjects = search.do_search()
my.set_sobjects_for_options(sobjects,value_column,label_column)
def set_sobjects_for_options(my,sobjects,value_column=None,label_column=None):
if value_column == None:
my.value_column = my.name
else:
my.value_column = value_column
if label_column == None:
my.label_column = my.value_column
else:
my.label_column = label_column
assert my.value_column
assert my.label_column
my.sobjects_for_options = sobjects
def _get_setting(my):
''' this check setting and add warnings if it's empty'''
values_option = []
labels_option = []
setting = my.get_option("setting")
if setting:
from pyasm.prod.biz import ProdSetting
values_option = ProdSetting.get_seq_by_key(setting)
if not values_option:
data_dict = {'key': setting}
prod_setting = ProdSetting.get_by_key(setting)
search_id = -1
setting_value = my.DEFAULT_SETTING.get(setting)
if prod_setting:
if setting_value:
# use the default if available
prod_setting.set_value('value', setting_value)
prod_setting.commit()
values_option = ProdSetting.get_seq_by_key(setting)
labels_option = values_option
else:
# prompt the user to do it instead
my._set_append_widget(prod_setting.get_id(), data_dict)
# if it is a new insert
else:
if setting_value:
data_dict['value'] = setting_value
type = 'sequence'
ProdSetting.create(setting, setting_value, type)
values_option = ProdSetting.get_seq_by_key(setting)
labels_option = values_option
else:
my._set_append_widget(search_id, data_dict)
else:
# check if it is map
prod_setting = ProdSetting.get_by_key(setting)
if prod_setting.get_value('type') =='map':
map_option = ProdSetting.get_map_by_key(setting)
labels_option = [ x[1] for x in map_option ]
values_option = [ x[0] for x in map_option ]
else:
labels_option = values_option
return values_option, labels_option
def _set_append_widget(my, search_id, data_dict):
from web_wdg import ProdSettingLinkWdg
prod_setting_link = ProdSettingLinkWdg(search_id)
prod_setting_link.set_value_dict(data_dict)
# HACK: usually when there is an iframe, there is a widget value
#if WebContainer.get_web().get_form_value('widget'):
# prod_setting_link.set_layout('plain')
my.append_widget = prod_setting_link
def set_dom_options(my, is_run=True):
''' set the dom options for the Select. It should only be called once
or there will be some unexpected behaviour'''
# get the values
my.values = []
labels_option = my.get_option("labels")
values_option = my.get_option("values")
# if there are no values, check if there is a project setting
# which will provide both values_option and labels_option
if not values_option:
values_option, labels_option = my._get_setting()
if type(values_option) == types.ListType:
my.values.extend(values_option)
elif my.values != "":
my.values = string.split( my.get_option("values"), "|" )
else:
my.values = ["None"]
# get the labels for the select options
my.labels = []
if type(labels_option) == types.ListType:
my.labels = labels_option[:]
elif labels_option != "":
my.labels = string.split( labels_option, "|" )
if len(my.values) != len(my.labels):
raise InputException("values [%s] does not have the same number of elements as [%s]" % (`my.values`, `my.labels`))
else:
my.labels = my.values[:]
query = my.get_option("query")
if query and query != "" and query.find("|") != -1:
search_type, value, label = query.split("|")
project_code = None
search = None
current_sobj = my.get_current_sobject()
if current_sobj:
project_code = current_sobj.get_project_code()
try:
search = Search(search_type, project_code=project_code)
except SearchException, e:
# skip if there is an unregistered sType or the table does not exist in the db
if e.__str__().find('does not exist for database') != -1 or 'not registered' != -1:
my.values = ['ERROR in query option. Remove it in Edit Mode > Other Options']
my.labels = my.values[:]
return
query_filter = my.get_option("query_filter")
if query_filter:
search.add_where(query_filter)
query_limit = my.get_option("query_limit")
if query_limit:
search.add_limit(int(query_limit))
if '()' not in label:
search.add_order_by(label)
elif '()' not in value:
search.add_order_by(value)
if not value or not label:
raise InputException("Query string for SelectWdg is malformed [%s]" % query)
# store the related type
my.related_type = search_type
my.set_search_for_options(search,value,label)
values_expr = my.get_option("values_expr")
if not values_expr:
values_expr = my.kwargs.get("values_expr")
labels_expr = my.get_option("labels_expr")
if not labels_expr:
labels_expr = my.kwargs.get("labels_expr")
mode_expr = my.get_option("mode_expr")
if not mode_expr:
mode_expr = my.kwargs.get("mode_expr")
if values_expr:
if mode_expr == 'relative':
sobjects = my.sobjects
if not sobjects:
parent_wdg = my.get_parent_wdg()
if parent_wdg:
# use the search_key as a starting point if applicable
sk = parent_wdg.kwargs.get('search_key')
if sk:
sobjects = [Search.get_by_search_key(sk)]
else:
sk = my.kwargs.get('search_key')
if sk:
sobjects = [Search.get_by_search_key(sk)]
else:
sobjects = []
try:
parser = ExpressionParser()
my.values = parser.eval(values_expr, sobjects=sobjects)
except Exception, e:
print "Expression error: ", str(e)
my.values = ['Error in values expression']
my.labels = my.values[:]
# don't raise anything yet until things are properly drawn
#raise InputException(e)
if labels_expr:
try:
my.labels = parser.eval(labels_expr, sobjects=sobjects)
# expression may return it as a string when doing concatenation is done on a 1-item list
if isinstance(my.labels, basestring):
my.labels = [my.labels]
except Exception, e:
print "Expression error: ", str(e)
my.labels = ['Error in labels expression']
else:
my.labels = my.values[:]
# create a tuple for sorting by label if it's a list
if my.values:
zipped = zip(my.values, my.labels)
zipped = sorted(zipped, key=itemgetter(1))
unzipped = zip(*zipped)
my.values = list(unzipped[0])
my.labels = list(unzipped[1])
# if there is a search for options stored, then use these
if my.sobjects_for_options != None:
my.values = []
my.labels = []
for sobject in my.sobjects_for_options:
# if there was a function call, use it
if my.value_column.find("()") != -1:
my.values.append( eval("sobject.%s" % my.value_column ) )
else:
my.values.append(sobject.get_value(my.value_column, no_exception=True))
if my.label_column.find("()") != -1:
my.labels.append( eval("sobject.%s" % my.label_column ) )
else:
my.labels.append(sobject.get_value(my.label_column, no_exception=True))
# manually add extra values and labes
extra_values = my.get_option("extra_values")
if extra_values:
extra_values = extra_values.split("|")
my.values.extend(extra_values)
extra_labels = my.get_option("extra_labels")
if extra_labels:
extra_labels = "|".split(extra_labels)
my.labels.extend(extra_labels)
else:
my.labels.extend(extra_values)
# add empty option
if my.empty_option_flag or my.get_option("empty") not in ['','false']:
my.values.insert(0, my.empty_option_value)
my.labels.insert(0, my.empty_option_label)
# append any custom ones
if my.append_list:
for label, value in my.append_list:
my.values.append(value)
my.labels.append(label)
if is_run:
my.has_set_options = True
def get_select_values(my):
if not my.has_set_options:
my.set_dom_options()
return my.labels, my.values
def init(my):
my.add_color("background", "background", 10)
my.add_color("color", "color")
def get_display(my):
class_name = my.kwargs.get('class')
if class_name:
my.add_class(class_name)
if my.is_read_only():
# don't disable it, just have to look disabled
my.set_attr('disabled', 'disabled')
my.add_class('disabled')
assert my.get_input_name() != None
my.set_attr("name", my.get_input_name())
width = my.get_option("width")
if width:
my.add_style("width: %s" % width)
my.add_border()
my.add_style("margin: 0px 5px")
# default select element size to max of 20 ...
sz = '20'
# look for a site-wide configuration for SELECT element size ...
from pyasm.common import Config
select_element_size = Config.get_value('web_ui','select_element_size')
if select_element_size:
sz = select_element_size
# see if the configuration of this widget specified a SELECT size (local config overrides site-wide) ...
wdg_config_select_size = my.get_option("select_size")
if wdg_config_select_size:
sz = wdg_config_select_size
# store configured size of SELECT to be used later on the client side to set the
# SELECT drop down size ...
my.set_attr('spt_select_size',sz)
# assign all the labels and values
if not my.has_set_options:
my.set_dom_options()
# get the current value for this element
current_values = my.get_values(for_display=True)
#if not current_value and my.has_option("default"):
#current_value = my.get_option("default")
# go through each value and set the select options
selection_found = False
for i in range(0, len(my.values)):
if i >= len(my.labels): break
value = my.values[i]
label = my.labels[i]
option = HtmlElement("option")
# always compare string values. Not sure if this is a good
# idea, but it should work for most cases
if my._is_selected(value, current_values):
option.set_attr("selected", "selected")
selection_found = True
option.set_attr("value", value)
option.add(label)
my.add(option)
# if no valid values are found, then show the current value in red
show_missing = my.get_option("show_missing")
if show_missing in ['false', False]:
show_missing = False
else:
show_missing = True
if show_missing and not selection_found: #and (my.empty_option_flag or my.get_option("empty") != ""):
option = HtmlElement("option")
value = my.get_value()
# this converts potential int to string
my.values = [Common.process_unicode_string(x) for x in my.values]
if value and value not in my.values:
option.add("%s" % value)
option.set_attr("value", value)
option.add_style("color", "red")
option.set_attr("selected", "selected")
my.add_style("color", "#f44")
my.add(option)
my.handle_behavior()
if not my.label and not my.append_widget:
return super(SelectWdg, my).get_display()
else:
sel = BaseInputWdg.get_class_display(my)
span = SpanWdg(my.label, css=my.css)
span.add(sel)
span.add(my.append_widget)
return span
def _is_selected(my, value, current_values):
if current_values:
if not isinstance(value, basestring):
value = str(value)
cur_value = current_values[0]
if not isinstance(cur_value, basestring):
cur_value = str(cur_value)
return value == cur_value
else:
return False
def handle_behavior(my):
# if this interferes with something else, please leave a comment so it can be fixed.. Similar logic is found in FilterCheckboxWdg and FilterTextWdg
if my.persistence:
key = my.get_key()
value = WidgetSettings.get_value_by_key(key)
if value:
my.set_value(value)
behavior = {"type" : "change",
"cbjs_preaction":\
"spt.api.Utility.save_widget_setting('%s',bvr.src_el.value)"%key}
if my.change_cbjs_action:
behavior['cbjs_action'] = my.change_cbjs_action
my.add_behavior(behavior)
class FilterSelectWdg(SelectWdg):
def __init__(my, name=None, label='', css=None):
super(FilterSelectWdg,my).__init__(name, label=label, css=css)
my.set_submit_onchange()
my.set_persistence()
def get_display(my):
return super(FilterSelectWdg, my).get_display()
class ActionSelectWdg(SelectWdg):
def __init__(my,name=None):
super(ActionSelectWdg,my).__init__(name)
my.add_class("action")
class MultiSelectWdg(SelectWdg):
def __init__(my,name=None, label='', css=None):
super(MultiSelectWdg,my).__init__(name, label=label, css=css)
my.set_attr("multiple", "1")
my.set_attr("size", "6")
def _is_selected(my, value, current_values):
if not current_values:
return False
# if there is only one value, then try and make the assumption that
# this may be a single string array
if len(current_values) == 1:
current_value = current_values[0]
if current_value.startswith("||") and current_value.endswith("||"):
current_value = current_value.strip("||")
current_values = current_value.split("||")
return value in current_values
class ItemsNavigatorWdg(HtmlElement):
''' a navigator that breaks down a long list of items into chunks
selected by a drop-down menu '''
DETAIL = "detail_style"
LESS_DETAIL = "less_detail_style"
def __init__(my, label, max_length, step, refresh=True, max_items=100):
assert isinstance(max_length, int) and step > 0
if max_length < 0:
max_length = 0
my.max_length = max_length
my.step = step
my.label = label
my.show_label = True
my.style = my.DETAIL
my.refresh = refresh
my.refresh_script = ''
my.select = SelectWdg(my.label)
my.select.add_color("background-color", "background", -8)
my.select.add_style("font-size: 0.9em")
my.select.add_style("margin-top: 3px")
my.select.set_persist_on_submit()
my.max_items = max_items
super(ItemsNavigatorWdg, my).__init__('span')
def set_style(my, style):
my.style = style
def set_refresh_script(my, script):
my.refresh_script = script
def get_display(my):
if not my.refresh:
my.select.add_event('onchange', my.refresh_script)
list_num = int(my.max_length / my.step)
value_list = []
label_list = []
# set limit
if list_num > my.max_items:
past_max = list_num - my.max_items
list_num = my.max_items
else:
past_max = 0
for x in xrange(list_num):
value_list.append("%s - %s" %(x* my.step + 1, (x+1) * my.step))
# handle the last item
if not past_max:
if list_num * my.step + 1 <= my.max_length:
value_list.append("%s - %s" %(list_num * my.step + 1,\
my.max_length ))
else:
value_list.append( "+ %s more" % past_max)
if my.style == my.DETAIL:
label_list = value_list
else:
for x in xrange(list_num):
label_list.append("Page %s" %(x+1) )
if list_num * my.step + 1 <= my.max_length:
label_list.append("Page %s" % (list_num+1))
my.select.add_empty_option(my.select.SELECT_LABEL, '')
my.select.set_option("values", value_list)
my.select.set_option("labels", label_list)
if my.max_length < my.step:
my.step = my.max_length
my.select.set_option("default", "%s - %s" %(1, my.step))
if my.show_label:
my.add("%s:" %my.label)
my.add(my.select)
return super(ItemsNavigatorWdg, my).get_display()
def set_display_label(my, visible=True):
my.show_label = visible
def set_value(my, value):
my.select.set_value(value)
def get_value(my):
return my.select.get_value()
class ButtonWdg(BaseInputWdg):
def __init__(my,name=None):
super(ButtonWdg,my).__init__(name,"input")
#my.add_style("background-color: #f0f0f0")
def get_display(my):
my.set_attr("type", "button")
my.set_attr("name", my.get_input_name())
value = my.name
my.set_attr("value",value)
return super(ButtonWdg,my).get_display()
class SubmitWdg(BaseInputWdg):
def __init__(my,name=None,value=None):
super(SubmitWdg,my).__init__(name, "input")
my.add_style("background-color: #f0f0f0")
my.value = value
def get_display(my):
my.set_attr("type", "submit")
my.set_attr("name", my.get_input_name())
if my.value == None:
my.value = my.name
my.set_attr("value",my.value)
return super(SubmitWdg,my).get_display()
class ResetWdg(BaseInputWdg):
def __init__(my,name=None):
super(ResetWdg,my).__init__(name, "input")
def get_display(my):
my.set_attr("type", "reset")
my.set_attr("name", my.get_input_name())
return super(ResetWdg,my).get_display()
class PasswordWdg(BaseInputWdg):
def __init__(my,name=None):
super(PasswordWdg,my).__init__(name,"input")
my.css = "inputfield"
my.add_class(my.css)
my.add_class("spt_input")
my.add_color("background", "background", 10)
my.add_color("color", "color")
my.add_style("border: solid 1px %s" % my.get_color("border_color") )
#my.add_style("width: 200px")
def get_display(my):
my.set_attr("type", "password")
my.set_attr("name", my.get_input_name())
my.add_class(my.css)
return super(PasswordWdg,my).get_display()
class HiddenWdg(BaseInputWdg):
def __init__(my,name=None, value=''):
super(HiddenWdg,my).__init__(name,"input")
my.value = value
def get_title(my):
return None
def get_display(my):
if my.options.get("value"):
my.value = my.options.get("value")
my.set_attr("type", "hidden")
my.set_attr("name", my.get_input_name())
my.set_attr("value", my.get_value(for_display=True))
my.add_class("spt_input")
return super(HiddenWdg,my).get_display()
class NoneWdg(BaseInputWdg):
'''An empty widget'''
def __init__(my,name=None):
super(NoneWdg,my).__init__(name)
def get_title(my):
if my.is_read_only():
return super(NoneWdg, my).get_title()
else:
return ''
def get_display(my):
if my.is_read_only():
my.set_attr('readonly', 'readonly')
my.add(my.get_value())
return super(NoneWdg, my).get_display()
else:
return ''
class ThumbInputWdg(BaseInputWdg):
'''Wrapper around the thumb widget, so that it can be display in the
input form'''
def __init__(my,name=None):
super(ThumbInputWdg,my).__init__(name)
def get_title(my):
return ' '
def get_display(my):
sobject = my.get_current_sobject()
if sobject.is_insert():
icon_path = IconWdg.get_icon_path("NO_IMAGE")
img= "<img src="+icon_path+"></img>"
return img
if sobject.has_value("files"):
column = "files"
elif sobject.has_value("images"):
column = "images"
elif sobject.has_value("snapshot"):
column = "snapshot"
else:
column = "snapshot"
#return 'No icon'
from file_wdg import ThumbWdg
icon = ThumbWdg()
icon.set_name(column)
icon.set_show_orig_icon(True)
icon.set_show_filename(True)
if my.get_option('latest_icon') == 'true':
icon.set_show_latest_icon(True)
icon.set_sobject( my.get_current_sobject() )
return icon.get_display()
class SimpleUploadWdg(BaseInputWdg):
def __init__(my,name=None):
super(SimpleUploadWdg,my).__init__(name)
def get_display(my):
input = HtmlElement.input()
input.set_attr("type","file")
input.set_attr("name",my.get_input_name())
input.add_class("inputfield")
my.add(input)
context = my.get_option("context")
if context == "":
context = Snapshot.get_default_context()
context_input = HiddenWdg("%s|context" % my.get_input_name(), context)
my.add(context_input)
# override the column
column = my.get_option("column")
if column != "":
column_input = HiddenWdg("%s|column" % my.get_input_name(), column)
my.add(column_input)
# create an event that will trigger a copy to handoff
"""
web = WebContainer.get_web()
handoff_dir = web.get_client_handoff_dir()
path_hidden = HiddenWdg("%s|path" % my.get_input_name(), "")
my.add(path_hidden)
script = HtmlElement.script('''
function foo() {
var handoff_dir = '%s'
var el = document.form.elements['%s']
var path = el.value
if (path == "") {
return false
}
var parts = path.split(/\\\\|\//)
var filename = parts[parts.length-1]
var to_path = handoff_dir + "/" + filename
alert(to_path)
var hidden = document.form.elements['%s|path']
hidden.value = path
el.value = null
// copy the file
Applet.copy_file(path, to_path)
//alert('move: ' + to_path)
//Applet.move_file(path, to_path)
}
''' % (handoff_dir, my.get_input_name(), my.get_input_name() ) )
my.add(script)
from pyasm.widget import GeneralAppletWdg
my.add(GeneralAppletWdg())
event_container = WebContainer.get_event_container()
event_container.add_listener('sthpw:submit', 'foo()')
"""
return super(SimpleUploadWdg,my).get_display()
class UploadWdg(BaseInputWdg):
def __init__(my,name=None):
super(UploadWdg,my).__init__(name)
def add_upload(my, table, name, is_required=False):
span = SpanWdg()
if is_required:
my._add_required(span)
table.add_row()
span.add("File (%s)" %name)
table.add_cell(span)
input = HtmlElement.input()
input.set_attr("type","file")
input.set_attr("name", my.get_input_name(name))
input.set_attr("size", "40")
table.add_cell(input)
def get_display(my):
icon_id = 'upload_div'
div = DivWdg()
if my.get_option('upload_type') == 'arbitrary':
counter = HiddenWdg('upload_counter','0')
div.add(counter)
icon = IconButtonWdg('add upload', icon=IconWdg.ADD)
icon.set_id(icon_id)
icon.add_event('onclick', "Common.add_upload_input('%s','%s','upload_counter')" \
%(icon_id, my.get_input_name()))
div.add(icon)
table = Table()
table.set_class("minimal")
table.add_style("font-size: 0.8em")
names = my.get_option('names')
required = my.get_option('required')
if not names:
my.add_upload(table, my.name)
else:
names = names.split('|')
if required:
required = required.split('|')
if len(required) != len(names):
raise TacticException('required needs to match the number of names if defined in the config file.')
# check for uniqueness in upload_names
if len(set(names)) != len(names):
raise TacticException('[names] in the config file must be unique')
for idx, name in enumerate(names):
if required:
is_required = required[idx] == 'true'
else:
is_required = False
my.add_upload(table, name, is_required)
table.add_row()
context_option = my.get_option('context')
pipeline_option = my.get_option('pipeline')
setting_option = my.get_option('setting')
context_name = "%s|context" % my.get_input_name()
text = None
span1 = SpanWdg("Context", id='context_mode')
span2 = SpanWdg("Context<br/>/Subcontext", id='subcontext_mode')
span2.add_style('display','none')
table.add_cell(span1)
table.add_data(span2)
if context_option or setting_option:
# add swap display for subcontext only if there is setting or context option
from web_wdg import SwapDisplayWdg
swap = SwapDisplayWdg()
table.add_data(SpanWdg(swap, css='small'))
swap.set_display_widgets(StringWdg('[+]'), StringWdg('[-]'))
subcontext_name = "%s|subcontext" % my.get_input_name()
subcontext = SpanWdg('/ ', css='small')
subcontext.add(TextWdg(subcontext_name))
subcontext.add_style('display','none')
subcontext.set_id(subcontext_name)
on_script = "set_display_on('%s');swap_display('subcontext_mode','context_mode')"%subcontext_name
off_script = "set_display_off('%s');get_elements('%s').set_value(''); "\
"swap_display('context_mode','subcontext_mode')"%(subcontext_name, subcontext_name)
swap.add_action_script(on_script, off_script)
text = SelectWdg(context_name)
if context_option:
text.set_option('values', context_option)
elif setting_option:
text.set_option('setting', setting_option)
td = table.add_cell(text)
table.add_data(subcontext)
elif pipeline_option:
sobject = my.sobjects[0]
pipeline = Pipeline.get_by_sobject(sobject)
context_names = []
process_names = pipeline.get_process_names(recurse=True)
for process in process_names:
context_names.append(pipeline.get_output_contexts(process))
text = SelectWdg(context_name)
text.set_option('values', process_names)
table.add_cell(text)
else:
text = TextWdg(context_name)
table.add_cell(text)
from web_wdg import HintWdg
hint = HintWdg('If not specified, the default is [publish]')
table.add_data(hint)
revision_cb = CheckboxWdg('%s|is_revision' %my.get_input_name(),\
label='is revision', css='med')
table.add_data(revision_cb)
table.add_row()
table.add_cell("Comment")
textarea = TextAreaWdg("%s|description"% my.get_input_name())
table.add_cell(textarea)
div.add(table)
return div
class MultiUploadWdg(BaseInputWdg):
UPLOAD_ID = "upload"
def __init__(my,name=None):
super(MultiUploadWdg,my).__init__(name)
def get_display(my):
# put in a default name
if my.name == None:
my.name = "upload_files"
widget = Widget()
# javascript function that polls the java applet for the files
# that were uploaded
widget.add(HtmlElement.script('''
function do_upload()
{
upload = document.getElementById("%s")
upload.do_upload()
files = upload.get_uploaded_files()
input = document.getElementById("%s")
input.value = files
return true
}
''' % (my.UPLOAD_ID,my.name) ))
# bind this to the edit button
event = WebContainer.get_event("sthpw:submit")
event.add_listener("do_upload()")
context_url = WebContainer.get_web().get_context_url()
# add the applet
applet = HtmlElement("applet")
applet.set_attr("code", "upload.UploadApplet")
applet.set_attr("codebase", "%s/java" % context_url.get_url() )
applet.set_attr("archive", "Upload-latest.jar")
applet.set_attr("width", "450")
applet.set_attr("height", "120")
applet.set_attr("id", my.UPLOAD_ID)
# create param for applet
param = HtmlElement("param")
param.set_attr("name","scriptable")
param.set_attr("value","true")
applet.add(param)
widget.add(applet)
# hidden element which fills in the file names that were
# uploaded
hidden = HiddenWdg(my.name)
hidden.set_attr('id', my.name)
widget.add(hidden)
return widget
class DownloadWdg(BaseInputWdg):
def __init__(my,name=None):
super(DownloadWdg,my).__init__(name)
def get_display(my):
context_url = WebContainer.get_web().get_context_url()
download_id = "download"
# create applet
applet = HtmlElement("applet")
applet.set_attr("code", "upload.DownloadApplet")
applet.set_attr("codebase", "%s/java" % context_url.get_url() )
applet.set_attr("archive", "Upload-latest.jar")
applet.set_attr("width", "1")
applet.set_attr("height", "1")
applet.set_attr("id", download_id)
# create param for applet
param = HtmlElement("param")
param.set_attr("name","scriptable")
param.set_attr("value","true")
applet.add(param)
my.add(applet)
my.do_download()
return super(DownloadWdg,my).get_display()
def do_download(my):
# get all of the files to download
web = WebContainer.get_web()
download_files = web.get_form_values("download_files")
for download_file in download_files:
search_type, search_id = download_file.split("|")
search = Search(search_type)
search.add_id_filter(search_id)
sobject = search.get_sobject()
# TODO: this code is highly flash dependent
if sobject.has_value("episode_code"):
sub_dir = sobject.get_value("episode_code")
else:
sub_dir = ""
my._download_sobject(sobject,sub_dir)
# for each shot download all of the dependent files
if search_type.startswith("flash/shot"):
instances = sobject.get_all_instances()
for instance in instances:
from pyasm.flash import FlashAsset
asset = FlashAsset.get_by_code(instance.get_value("asset_code"))
asset_sub_dir = "%s/design" % sub_dir
my._download_sobject(asset, asset_sub_dir)
def _download_sobject(my, sobject, sub_dir):
web = WebContainer.get_web()
to_dir = web.get_local_dir()
snapshot = Snapshot.get_latest_by_sobject(sobject)
if not snapshot:
return
web_paths = snapshot.get_all_web_paths()
for web_path in web_paths:
basename = os.path.basename(web_path)
to_path = "%s/download/%s/%s" \
% (to_dir, sub_dir, File.remove_file_code(basename))
script = HtmlElement.script("download.do_download('%s','%s')"%\
(web_path,to_path))
my.add(script)
# DEPRECATED
class CalendarWdg(BaseInputWdg):
''' this can be instantiated multiple times in a page'''
def __init__(my,name=None,id=None):
my.id = id
my.cal_options = {}
my.on_wdg = None
my.trigger = my.generate_unique_id("f_trigger_c")
super(CalendarWdg,my).__init__(name,"div")
def set_cal_option(my, name, value):
my.cal_options[name] = value
def set_on_wdg(my, widget):
my.on_wdg = widget
my.on_wdg.set_id(my.trigger)
my.on_wdg.add_class("hand")
def class_init(my):
if WebContainer.get_web().is_IE():
my.add( '''
<!-- main calendar program -->
<script src="/context/javascript/jscalendar/calendar.js"></script>
<!-- language for the calendar -->
<script src="/context/javascript/jscalendar/lang/calendar-en.js"></script>
<!-- the following script defines the Calendar.setup helper
function, which makes adding a calendar a matter of 1 or 2
lines of code. -->
<script src="/context/javascript/jscalendar/calendar-setup.js"></script>
''' )
return
my.add("<!-- main script for calendar -->")
my.add("<!-- language for the calendar -->")
my.add("<!-- the following script defines the Calendar.setup helper function -->")
script = []
script.append("var js=new Script()")
script.append("js.include_once('/context/javascript/jscalendar/calendar.js')")
script.append("js.include_once('/context/javascript/jscalendar/lang/calendar-en.js')")
script.append("js.include_once('/context/javascript/jscalendar/calendar-setup.js')")
init_script = HtmlElement.script(";".join(script))
init_script.set_attr('mode','dynamic')
my.add(init_script)
def get_id(my):
return my.id
def get_display(my):
value = my.get_value(for_display=True)
if value == "":
display_date = ""
#hidden_value = "__NONE__"
hidden_value = ""
else:
date = Date( value )
display_date = date.get_display_date()
hidden_value = value
input_field = None
if my.id:
input_field = my.id
else:
input_field = my.get_input_name()
hidden = HiddenWdg(input_field, hidden_value)
hidden.set_id(input_field)
my.add(hidden)
display_area = "%s|display_area" % input_field
text = SpanWdg()
text.add( display_date )
text.add_style("padding: 3px")
text.set_id(display_area)
my.add(text)
#shows_time = "true"
shows_time = "false"
da_format = "%b %e, %Y"
if_format = "%Y-%m-%e %H:%M"
cal_options_str = ", ".join( [ "%s\t: %s" % (x,my.cal_options[x]) for x in my.cal_options.keys() ] )
if cal_options_str != "":
comma = ","
else:
comma = ""
# set a default widget if it hasn't been defined
if not my.on_wdg:
img = HtmlElement.img("/context/javascript/jscalendar/img.gif")
my.set_on_wdg(img)
my.add(my.on_wdg)
script = HtmlElement.script('''Calendar.setup({
%s%s
inputField : '%s', /* id of the input field */
displayArea : '%s',
ifFormat : '%s', /* format of the input field */
daFormat : '%s', /* format of the display field */
button : '%s', /* trigger for the calendar (button ID) */
align : 'Br', /* alignment (defaults to 'Bl') */
singleClick : true,
showsTime : %s
})'''% (cal_options_str, comma, input_field, display_area, if_format, da_format, my.trigger, shows_time))
script.set_attr('mode', 'dynamic')
my.add(script)
return super(CalendarWdg,my).get_display()
class CalendarInputWdg(BaseInputWdg):
''' this one is the newer version with or without a TextWdg'''
def __init__(my, name=None, label=None, css=None, show_week=False):
my.show_on_wdg = True
my.show_value = True
#my.cal_name = my.generate_unique_id()
my.show_warning = True
my.onchange_script = ''
my.hidden = HiddenWdg(name)
my.show_week = show_week
my.css = css
super(CalendarInputWdg,my).__init__(name, "span", label=label)
def class_init(my):
if WebContainer.get_web().is_IE():
my.add('''
<!-- main calendar program -->
<script type="text/javascript" src="/context/javascript/jscalendar/calendar.js"></script>
<!-- language for the calendar -->
<script type="text/javascript" src="/context/javascript/jscalendar/lang/calendar-en.js"></script>
<script type="text/javascript" src="/context/javascript/TacticCalendar.js"></script>
''')
show_week = "false"
if my.show_week :#or my.get_option('show_week') == 'true':
show_week = "true"
script = HtmlElement.script('''
var calendar_tactic = new TacticCalendar(%s)
''' % (show_week) )
my.add(script)
return
my.add('''
<!-- main calendar program -->
<script type="text/javascript" src="/context/javascript/jscalendar/calendar.js"></script>
<!-- language for the calendar -->
<script type="text/javascript" src="/context/javascript/jscalendar/lang/calendar-en.js"></script>
<!-- the following script defines the Calendar.setup helper
function, which makes adding a calendar a matter of 1 or 2
lines of code. -->
<script type="text/javascript" src="/context/javascript/jscalendar/calendar-setup.js"></script>
<script type="text/javascript" src="/context/javascript/TacticCalendar.js"></script>
''')
show_week = "false"
if my.show_week :#or my.get_option('show_week') == 'true':
show_week = "true"
script = HtmlElement.script('''
var calendar_tactic = new TacticCalendar(%s)
''' % (show_week) )
my.add(script)
def get_hidden_wdg(my):
if my.get_option('show_warning') =='false':
my.show_warning = False
value = super(CalendarInputWdg, my).get_value(for_display=True)
if value == "":
display_date = ""
hidden_value = ""
else:
# In some cases the user is allowed to append chars after it
date = Date( db_date=value, show_warning=my.show_warning )
# display date format is not used for now
# but date is instantiated to issue warning where applicable
# display_date = date.get_display_date()
hidden_value = value
hidden_name = my.get_input_name()
if my.show_value:
hidden = TextWdg(hidden_name)
hidden.set_persist_on_submit()
hidden.set_attr("size", "15")
hidden.set_value(hidden_value)
else:
hidden = HiddenWdg(hidden_name, hidden_value)
return hidden
def set_onchange_script(my, script):
''' script that runs when the user clicks on a date '''
my.onchange_script = script
'''
def get_value(my):
return my.hidden.get_value()
def get_js_name(my):
name = my.cal_name
return "calendar_%s" % name
'''
def get_on_script(my, date=None):
if not date:
date = ''
name = my.get_input_name()
script = "calendar_tactic.show_calendar('%s', null,'%s')" % (name, date)
return script
def set_show_on_wdg(my, flag):
my.show_on_wdg = flag
def set_show_value(my, flag):
my.show_value = flag
def set_show_warning(my, show):
my.show_warning = show
def get_on_wdg(my):
widget = HtmlElement.img("/context/javascript/jscalendar/img.gif")
widget.add_event("onclick", my.get_on_script() )
widget.add_class("hand")
return widget
def get_display(my):
widget = Widget()
name = my.get_input_name()
# set a default widget if it hasn't been defined
if my.show_on_wdg:
widget.add(my.get_on_wdg() )
my.hidden = my.get_hidden_wdg()
widget.add(my.hidden)
show_week = "false"
if my.show_week or my.get_option('show_week') == 'true':
show_week = "true"
# on choosing a date, it executes this js
if my.onchange_script:
script = "calendar_tactic.init('%s');\
calendar_tactic.cal.onClose = function() { if (!calendar_tactic.check('%s')) return; %s }"\
%(name, name, my.onchange_script)
from pyasm.web import AppServer
AppServer.add_onload_script(script)
my.add(widget)
if not my.label:
return super(CalendarInputWdg, my).get_display()
else:
sel = BaseInputWdg.get_class_display(my)
span = SpanWdg(my.label, css=my.css)
span.add(sel)
return span
class PopupWdg(BaseInputWdg):
def __init__(my, name=None, type=None, label=None):
super(PopupWdg,my).__init__(name,type,label)
my.title = ''
my.offset_x = 10
my.offset_y = 0
my.is_auto_hide = True
def set_auto_hide(my, hide):
my.is_auto_hide = hide
def get_display(my):
div = DivWdg(css="popup_wdg")
div.set_id(my.name)
hidden_name = '%s_hidden' % my.name
div.add(HiddenWdg(hidden_name))
div.add_style("display: none")
div.add_style("margin", "15px 0 0 0px")
div.add_style("position", "absolute")
from web_wdg import CloseWdg
div.add(CloseWdg(my.get_off_script()))
div.add( HtmlElement.br(clear="all") )
for widget in my.widgets:
div.add(widget)
div.add( HtmlElement.br(clear="all") )
return div
def get_on_script(my):
script = "Common.follow_click(event, '%s', %d, %d); Effects.fade_in('%s', 200);"\
%(my.get_name(),my.offset_x, my.offset_y, my.get_name())
if my.is_auto_hide:
script += "Common.overlay_setup('mouseup',function(){%s})" %my.get_off_script()
return script
def get_off_script(my):
return "Effects.fade_out('%s', 200); document.removeEvents('mouseup')" % my.get_name()
class PopupMenuWdg(BaseInputWdg):
def __init__(my,name=None, type=None, label=None, action_name=None, \
multi=False, height='', width=''):
'''
Creates a popup widget
@params
name: inherited (optional)
type: inherited (optional)
label: inherited (optional)
action_name: name of hidden widget that performs the action. If not
specified, defaults to name
multi: adds checkbox (optional)
'''
super(PopupMenuWdg,my).__init__(name,type,label)
if action_name:
my.action_name = action_name
else:
my.action_name = name
my.multi = multi
my.title = ''
my.offset_x = 10
my.offset_y = 0
my.height = height
my.menu_width = width
my.monitor = None
my.is_submit = True
my.is_auto_hide = True
# this is the group name
my.item_name = '%s_item' %my.get_input_name()
def set_auto_hide(my, hide):
my.is_auto_hide = hide
def add_monitor(my, widget):
''' add a monitor div on the right side'''
my.monitor = widget
def add_separator(my):
widget = "<hr/>"
super(PopupMenuWdg,my).add(widget)
def add_title(my, title):
if isinstance(title, basestring):
widget = FloatDivWdg("<b>%s</b>" % title)
else:
widget = "<b>%s</b>" % title.get_buffer_display()
my.title = widget
my.title.add_style('border-bottom','1px dotted #ccc')
def add(my, widget, name=None):
if type(widget) in types.StringTypes:
tmp = widget
widget = StringWdg(widget)
if not name:
name = tmp
widget.set_name(name)
else:
if name:
widget.set_name(name)
super(PopupMenuWdg,my).add(widget,name)
def set_offset(my, x, y):
my.offset_x = x
my.offset_y = y
def set_submit(my, submit):
my.is_submit = submit
def get_display(my):
div = DivWdg(css="popup_wdg")
div.set_id(my.name)
select_name = '%s_hidden' % my.action_name
if not my.multi:
hidden = HiddenWdg(select_name)
hidden.set_id(select_name)
div.add(hidden)
"""
hidden_name = '%s_hidden' % my.name
div.add(HiddenWdg(hidden_name))
"""
div.add_style("display: none")
div.add_style("margin", "5px 0 0 0px")
div.add_style("position", "absolute")
from web_wdg import CloseWdg
div.add(my.title)
div.add(CloseWdg(my.get_off_script()))
div.add(HtmlElement.br())
content_div = FloatDivWdg()
if my.height:
content_div.add_style('height', my.height)
content_div.add_style('clear', 'left')
content_div.add_style('padding-top','8px')
div.add(content_div)
for widget in my.widgets:
if not widget.get_name():
item = DivWdg(css='hand')
item.add(widget)
if my.menu_width:
item.add_style('width', my.menu_width)
content_div.add(item)
continue
id='%s_%s' %(my.get_input_name(), widget.get_name())
item = DivWdg(css="hand")
item.set_attr('name', my.item_name)
item.set_attr('tab', id)
if my.menu_width:
item.add_style('width', my.menu_width)
item.add_style('padding-left','3px')
# checkbox and extra logic is added for named widgets only
if my.multi:
span = SpanWdg(widget, css='small')
#checkbox = CheckboxWdg("%s_select" % my.name)
checkbox = CheckboxWdg(select_name)
cb_id = '%s|%s' %(select_name, widget.get_name())
checkbox.set_id(cb_id)
checkbox.set_option( "value", widget.get_name())
item.add(checkbox)
span.add_event("onclick", "var a=get_elements('%s');a.toggle_me('%s')" %(checkbox.get_name(), cb_id))
item.add(span)
else:
item.add(widget)
# FIXME: these colors should reflect the skin
#item.add_event("onmouseover", "this.style.backgroundColor='#333'")
#item.add_event("onmouseout", "this.style.backgroundColor='#555'")
item.add_event("onmouseover", "this.style.fontWeight='600'")
item.add_event("onmouseout", "this.style.fontWeight='100'")
"""
if not my.multi:
item.add_event("onclick", "el=document.form.elements['%s'];el.value='%s';document.form.submit()" % (select_name,widget.get_name()) )
else:
pass
"""
if not my.multi:
item.add_event("onclick", "get_elements('%s').tab_me('%s','active_menu_item',\
'inactive_menu_item'); get_elements('%s').set_value('%s')" \
% ( my.item_name, id, select_name,widget.get_name()) )
if my.is_submit:
item.add_event("onclick", "document.form.submit()")
content_div.add(item)
if my.monitor:
mon_div = FloatDivWdg(my.monitor, id='%s_monitor' %my.get_input_name(),float='left')
mon_div.add_style('height', my.height)
mon_div.add_style('display', 'none')
mon_div.add_class('monitor')
div.add(mon_div)
return div
def get_on_script(my):
#script = "Common.follow_click(event, '%s', %d, %d); set_display_on('%s');"\
# %(my.get_name(),my.offset_x, my.offset_y, my.get_name())
script = "Effects.fade_in('%s', 30);"%my.get_name()
if my.is_auto_hide:
script += "Common.overlay_setup('mouseup',function(){%s})" %my.get_off_script()
return script
def get_off_script(my):
return "Effects.fade_out('%s', 200); document.removeEvents('mouseup')" % my.get_name()
def get_monitor_on_script(my):
return "Effects.fade_in('%s_monitor', 50)" % my.get_input_name()
def get_monitor_off_script(my):
return "set_display_off('%s_monitor')" % my.get_input_name()
def get_clear_css_script(my):
''' clears the css of the menu buttons, make them inactive'''
return "$$('div[name=%s]').each(function(elem) {elem.className='inactive_menu_item';})" %my.item_name
|
talha81/TACTIC-DEV
|
src/pyasm/widget/input_wdg.py
|
Python
|
epl-1.0
| 86,425
|
'''
Validation of received probe dispatch requests.
Validation is performed in two parts:
1. Requests are handled as messages and are required to carry an HMAC digest
allowing the server to verify the message and validate that client is a
trusted AMPT manager using the same shared key.
2. Request messages contain the core packet dispatch parameters as well as a
per-request counter in the form of a timestamp, included to allow for a basic
level of replay protection. After a request is validated, the counter (a
floating point timestamp value) is stored in the counter database. Future
requests ensure that the counter in the validated message is greater than the
stored counter from the previous message.
'''
import os
import hmac
import json
import os.path
from shutil import chown
from . import app
class RequestValidationError(Exception):
'Failure validating HMAC or replay counter in request'
pass
def prep_counter_db(db_path, user=None, group=None):
'''
Initialize new counter DB file.
If user and group are specified, caller is a privileged process specifying
ownership of file by less privileged user/group.
'''
try:
persist_counter(db_path)
except FileNotFoundError as e:
os.makedirs(os.path.dirname(db_path))
persist_counter(db_path)
if user is not None and group is not None:
chown(db_path, user, group)
def persist_counter(db_path, ctr=app.config['DB_INIT_VAL']):
'Store counter into DB file'
with open(db_path, 'w') as f:
f.write(str(ctr))
if ctr == app.config['DB_INIT_VAL']:
app.logger.debug('initialized counter database with base '
'value of %d', app.config['DB_INIT_VAL'])
def validate_request(args):
'Validate HMAC and timestamp counter on request'
# Extract HMAC hash from request, grab timestamp
req_digest = args.pop('h')
req_ts = args['ts']
# Construct message from request and compute digest
j = json.dumps(args, sort_keys=True)
computed_digest = (hmac.new(bytes(app.config['HMAC_KEY'].encode('utf-8')),
j.encode('utf-8'), app.config['HMAC_DIGEST'])
.hexdigest())
# Fail out if HMAC comparison unsuccessful
if not hmac.compare_digest(req_digest, computed_digest):
raise RequestValidationError('HMAC digest failed verification')
# Compare stored counter to request counter. The counter is valid if it is
# greater than the previously stored one.
with open(app.config['DB_PATH'], 'r') as f:
if not req_ts > float(f.read()):
raise RequestValidationError('Replay counter comparison '
'failed verification')
|
nids-io/ampt-generator
|
ampt_generator/validator.py
|
Python
|
bsd-2-clause
| 2,785
|
# Copyright (c) 2012-2021, NVIDIA CORPORATION.
# SPDX-License-Identifier: Apache-2.0
import elasticsearch
import json
import logging
import os
import random
import string
import swiftclient
import unittest
import utils
class MetadataSyncTest(unittest.TestCase):
ES_HOST = 'https://localhost:9200'
ES_VERSION = os.environ['ES_VERSION']
def _get_container(self, container=None):
if not container:
container = u'\062a' + ''.join([
random.choice(string.ascii_lowercase) for _ in range(8)])
self.client.put_container(container)
self.containers.append(container)
return container
def _get_index(self, index=None):
if not index:
index = ''.join([
random.choice(string.ascii_lowercase) for _ in range(8)])
if self.es_conn.indices.exists(index):
self.es_conn.indices.delete(index)
self.es_conn.indices.create(index, include_type_name=False)
self.indices.append(index)
return index
def setUp(self):
self.logger = logging.getLogger('test-metadata-sync')
self.logger.addHandler(logging.StreamHandler())
self.client = swiftclient.client.Connection(
'http://localhost:8080/auth/v1.0',
u'\u062aacct:\u062auser',
u'\u062apass')
self.es_conn = utils.get_es_connection(
self.ES_HOST, True, utils.get_ca_cert(self.ES_VERSION))
self.containers = []
self.indices = []
self.index = self._get_index()
self.container = self._get_container()
self.config = {
'containers': [
{'account': u'AUTH_\u062aacct',
'container': self.container,
'index': self.index,
'es_hosts': self.ES_HOST,
'verify_certs': True,
'ca_certs': utils.get_ca_cert(self.ES_VERSION)}
],
}
self.indexer = utils.get_metadata_sync_instance(
self.config, self.logger)
def tearDown(self):
for container in self.containers:
_, listing = self.client.get_container(container)
for entry in listing:
self.client.delete_object(container, entry['name'])
self.client.delete_container(container)
self.containers = []
for index in self.indices:
self.es_conn.indices.delete(index)
def test_index_regular_objects(self):
object_name = u'\u062a-object'
self.client.put_object(
self.container, object_name, 'stuff',
headers={'x-object-meta-foo': 'sample meta',
u'x-object-meta-\u062a-bar': u'unicode h\u00e9ader'})
self.indexer.run_once()
doc_id = utils.get_doc_id(self.config['containers'][0]['account'],
self.container, object_name)
es_doc = self.es_conn.get(self.index, doc_id)
self.assertEqual('sample meta', es_doc['_source']['foo'])
self.assertEqual(u'unicode h\u00e9ader',
es_doc['_source'][u'\u062a-bar'])
def test_removes_documents(self):
object_name = u'\u062a-object'
self.client.put_object(
self.container, object_name, 'stuff',
headers={'x-object-meta-foo': 'sample meta',
u'x-object-meta-\u062a-bar': u'unicode h\u00e9ader'})
self.indexer.run_once()
# Elasticsearch client will raise an exception if the document ID is
# not found
doc_id = utils.get_doc_id(self.config['containers'][0]['account'],
self.container, object_name)
self.es_conn.get(self.index, doc_id)
self.client.delete_object(self.container, object_name)
self.indexer.run_once()
with self.assertRaises(elasticsearch.TransportError) as ctx:
self.es_conn.get(self.index, doc_id)
self.assertEqual(404, ctx.exception.status_code)
def test_indexes_slos(self):
segments_container = self._get_container()
manifest = []
for i in range(2):
self.client.put_object(segments_container, 'part-%d' % i,
chr((ord('A') + i)) * 1024)
manifest.append(
{'path': '/'.join((segments_container, 'part-%d' % i))})
slo_key = u'SLO-\u062a'
self.client.put_object(
self.container, slo_key, json.dumps(manifest),
query_string='multipart-manifest=put',
headers={u'x-object-meta-sl\u00f6': u'valu\ue009'})
self.indexer.run_once()
doc_id = utils.get_doc_id(self.config['containers'][0]['account'],
self.container, slo_key)
resp = self.es_conn.get(self.index, doc_id)
self.assertEqual('true', resp['_source']['x-static-large-object'])
self.assertEqual(u'valu\ue009', resp['_source'][u'sl\u00f6'])
class MetadataSync6xTest(MetadataSyncTest):
ES_HOST = 'https://localhost:9201'
ES_VERSION = os.environ['OLD_ES_VERSION']
|
swiftstack/swift-metadata-sync
|
test/integration/test_metadata_sync.py
|
Python
|
apache-2.0
| 5,118
|
# The Hazard Library
# Copyright (C) 2012 GEM Foundation
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import unittest
import numpy
from openquake.hazardlib.const import TRT
from openquake.hazardlib.source.simple_fault import SimpleFaultSource
from openquake.hazardlib.source.rupture import ProbabilisticRupture
from openquake.hazardlib.mfd import TruncatedGRMFD, EvenlyDiscretizedMFD
from openquake.hazardlib.scalerel import PeerMSR, WC1994
from openquake.hazardlib.geo import Point, Line
from openquake.hazardlib.tom import PoissonTOM
from openquake.hazardlib.tests import assert_angles_equal, assert_pickleable
from openquake.hazardlib.tests.geo.surface._utils import assert_mesh_is
from openquake.hazardlib.tests.source import _simple_fault_test_data as test_data
class _BaseFaultSourceTestCase(unittest.TestCase):
TRT = TRT.ACTIVE_SHALLOW_CRUST
RAKE = 0
def _make_source(self, mfd, aspect_ratio, fault_trace=None, dip=45):
source_id = name = 'test-source'
trt = self.TRT
rake = self.RAKE
rupture_mesh_spacing = 1
upper_seismogenic_depth = 0
lower_seismogenic_depth = 4.2426406871192848
magnitude_scaling_relationship = PeerMSR()
rupture_aspect_ratio = aspect_ratio
if fault_trace is None:
fault_trace = Line([Point(0.0, 0.0),
Point(0.0, 0.0359728811758),
Point(0.0190775080917, 0.0550503815181),
Point(0.03974514139, 0.0723925718855)])
sfs = SimpleFaultSource(
source_id, name, trt, mfd, rupture_mesh_spacing,
magnitude_scaling_relationship, rupture_aspect_ratio,
upper_seismogenic_depth, lower_seismogenic_depth,
fault_trace, dip, rake
)
assert_pickleable(sfs)
return sfs
def _test_ruptures(self, expected_ruptures, source):
tom = PoissonTOM(time_span=50)
ruptures = list(source.iter_ruptures(tom))
for rupture in ruptures:
self.assertIsInstance(rupture, ProbabilisticRupture)
self.assertIs(rupture.temporal_occurrence_model, tom)
self.assertIs(rupture.tectonic_region_type, self.TRT)
self.assertEqual(rupture.rake, self.RAKE)
self.assertEqual(len(expected_ruptures), len(ruptures))
for i in xrange(len(expected_ruptures)):
expected_rupture, rupture = expected_ruptures[i], ruptures[i]
self.assertAlmostEqual(rupture.mag, expected_rupture['mag'])
self.assertAlmostEqual(rupture.rake, expected_rupture['rake'])
self.assertAlmostEqual(rupture.occurrence_rate,
expected_rupture['occurrence_rate'])
assert_mesh_is(self, rupture.surface,
expected_rupture['surface'])
self.assertEqual(rupture.hypocenter,
Point(*expected_rupture['hypocenter']))
assert_angles_equal(self, rupture.surface.get_strike(),
expected_rupture['strike'], delta=0.5)
assert_angles_equal(self, rupture.surface.get_dip(),
expected_rupture['dip'], delta=3)
class SimpleFaultIterRupturesTestCase(_BaseFaultSourceTestCase):
def test_2(self):
# rupture dimensions are larger then mesh_spacing, number of nodes
# along strike and dip is even
mfd = TruncatedGRMFD(a_val=0.5, b_val=1.0, min_mag=3.0, max_mag=4.0,
bin_width=1.0)
self._test_ruptures(test_data.TEST2_RUPTURES,
self._make_source(mfd=mfd, aspect_ratio=1.0))
def test_3(self):
# rupture length greater than fault length, number of nodes along
# length is odd and along width is even
mfd = TruncatedGRMFD(a_val=0.5, b_val=1.0, min_mag=5.0, max_mag=6.0,
bin_width=1.0)
self._test_ruptures(test_data.TEST3_RUPTURES,
self._make_source(mfd=mfd, aspect_ratio=4.0))
def test_4(self):
# rupture width greater than fault width, number of nodes along
# length is even, along width is odd
mfd = TruncatedGRMFD(a_val=0.5, b_val=1.0, min_mag=5.4, max_mag=5.5,
bin_width=0.1)
self._test_ruptures(test_data.TEST4_RUPTURES,
self._make_source(mfd=mfd, aspect_ratio=0.5))
def test_5(self):
# rupture length and width greater than fault length and width
# respectively
mfd = TruncatedGRMFD(a_val=0.5, b_val=1.0, min_mag=6.0, max_mag=7.0,
bin_width=1.0)
self._test_ruptures(test_data.TEST5_RUPTURES,
self._make_source(mfd=mfd, aspect_ratio=1.0))
def test_Pago_VeianoMontaguto(self):
# regression test
fault_trace = Line([Point(15.2368, 41.1594), Point(15.1848, 41.1644),
Point(15.1327, 41.1694), Point(15.0807, 41.1745),
Point(15.0286, 41.1795), Point(14.9765, 41.1846),
Point(14.9245, 41.1896), Point(14.8724, 41.1946),
Point(14.8204, 41.1997)])
mfd = EvenlyDiscretizedMFD(min_mag=6.9, bin_width=0.2,
occurrence_rates=[1.0])
dip = 70.0
upper_seismogenic_depth = 11.0
lower_seismogenic_depth = 25.0
rake = -130
scalerel = WC1994()
rupture_mesh_spacing = 5
rupture_aspect_ratio = 1
tom = PoissonTOM(10)
fault = SimpleFaultSource(
'ITCS057', 'Pago Veiano-Montaguto', TRT.ACTIVE_SHALLOW_CRUST, mfd,
rupture_mesh_spacing, scalerel, rupture_aspect_ratio,
upper_seismogenic_depth, lower_seismogenic_depth,
fault_trace, dip, rake
)
self.assertEqual(len(list(fault.iter_ruptures(tom))), 1)
class SimpleFaultParametersChecksTestCase(_BaseFaultSourceTestCase):
def test_mesh_spacing_too_small(self):
mfd = TruncatedGRMFD(a_val=0.5, b_val=1.0, min_mag=0.5, max_mag=1.5,
bin_width=1.0)
with self.assertRaises(ValueError) as ar:
self._make_source(mfd=mfd, aspect_ratio=1.0)
self.assertEqual(str(ar.exception),
'mesh spacing 1 is too low to represent '
'ruptures of magnitude 1.5')
def test_fault_trace_intersects_itself(self):
mfd = TruncatedGRMFD(a_val=0.5, b_val=1.0, min_mag=10, max_mag=20,
bin_width=1.0)
fault_trace = Line([Point(0, 0), Point(0, 1),
Point(1, 1), Point(0, 0.5)])
with self.assertRaises(ValueError) as ar:
self._make_source(mfd=mfd, aspect_ratio=1, fault_trace=fault_trace)
self.assertEqual(str(ar.exception), 'fault trace intersects itself')
class SimpleFaultRupEncPolyTestCase(_BaseFaultSourceTestCase):
mfd = TruncatedGRMFD(a_val=0.5, b_val=1.0, min_mag=10, max_mag=20,
bin_width=1.0)
def test_dip_90_no_dilation(self):
trace = Line([Point(0.0, 0.0), Point(0.0, 0.04),
Point(0.03, 0.05), Point(0.04, 0.06)])
source = self._make_source(self.mfd, 1, dip=90, fault_trace=trace)
polygon = source.get_rupture_enclosing_polygon()
elons = [0, 0, 0.04]
elats = [0, 0.04, 0.06]
numpy.testing.assert_allclose(polygon.lons, elons, rtol=0, atol=1e-5)
numpy.testing.assert_allclose(polygon.lats, elats)
def test_dip_90_dilated(self):
trace = Line([Point(-1.0, 2.0), Point(-1.0, 2.04)])
source = self._make_source(self.mfd, 1, dip=90, fault_trace=trace)
polygon = source.get_rupture_enclosing_polygon(dilation=4.5)
elons = [
-1.0405401, -1.0403452, -1.0397622, -1.0387967, -1.0374580,
-1.0357589, -1.0337159, -1.0313487, -1.0286799, -1.0286349,
-1.0256903, -1.0224984, -1.0190897, -1.0154972, -1.0117554,
-1.0079004, -1.0039693, -1.0000000, -0.9960307, -0.9920996,
-0.9882446, -0.9845028, -0.9809103, -0.9775016, -0.9743097,
-0.9713651, -0.9713201, -0.9686513, -0.9662841, -0.9642411,
-0.9625420, -0.9612033, -0.9602378, -0.9596548, -0.9594599,
-0.9594609, -0.9596560, -0.9602391, -0.9612048, -0.9625436,
-0.9642428, -0.9662858, -0.9686531, -0.9713218, -0.9713668,
-0.9743113, -0.9775031, -0.9809116, -0.9845039, -0.9882454,
-0.9921002, -0.9960310, -1.0000000, -1.0039690, -1.0078998,
-1.0117546, -1.0154961, -1.0190884, -1.0224969, -1.0256887,
-1.0286332, -1.0286782, -1.0313469, -1.0337142, -1.0357572,
-1.0374564, -1.0387952, -1.0397609, -1.0403440, -1.0405391
]
elats = [
2.0399995, 2.0439662, 2.0478947, 2.0517472, 2.0554866, 2.0590768,
2.0624833, 2.0656733, 2.0686160, 2.0686610, 2.0713281, 2.0736940,
2.0757358, 2.0774338, 2.0787718, 2.0797368, 2.0803196, 2.0805144,
2.0803196, 2.0797368, 2.0787718, 2.0774338, 2.0757358, 2.0736940,
2.0713281, 2.0686610, 2.0686160, 2.0656733, 2.0624833, 2.0590768,
2.0554866, 2.0517472, 2.0478947, 2.0439662, 2.0399995, 1.9999995,
1.9960328, 1.9921043, 1.9882519, 1.9845126, 1.9809224, 1.9775160,
1.9743261, 1.9713835, 1.9713385, 1.9686715, 1.9663057, 1.9642640,
1.9625660, 1.9612281, 1.9602631, 1.9596804, 1.9594856, 1.9596804,
1.9602631, 1.9612281, 1.9625660, 1.9642640, 1.9663057, 1.9686715,
1.9713385, 1.9713835, 1.9743261, 1.9775160, 1.9809224, 1.9845126,
1.9882519, 1.9921043, 1.9960328, 1.9999999
]
numpy.testing.assert_allclose(polygon.lons, elons)
numpy.testing.assert_allclose(polygon.lats, elats, rtol=0, atol=1e-6)
def test_dip_30_no_dilation(self):
trace = Line([Point(0.0, 0.0), Point(0.0, 0.04),
Point(0.03, 0.05), Point(0.04, 0.06)])
source = self._make_source(self.mfd, 1, dip=30, fault_trace=trace)
polygon = source.get_rupture_enclosing_polygon()
elons = [0.0549872, 0., 0., 0.04, 0.09498719]
elats = [-0.0366581, 0, 0.04, 0.06, 0.02334187]
numpy.testing.assert_allclose(polygon.lons, elons, rtol=0, atol=1e-5)
numpy.testing.assert_allclose(polygon.lats, elats, rtol=0, atol=1e-5)
def test_dip_30_dilated(self):
trace = Line([Point(0.0, 0.0), Point(0.0, 0.04),
Point(0.03, 0.05), Point(0.04, 0.06)])
source = self._make_source(self.mfd, 1, dip=30, fault_trace=trace)
polygon = source.get_rupture_enclosing_polygon(dilation=10)
elons = [
0.1298154, 0.1245655, 0.1186454, 0.1121124, 0.1050291,
0.0974640, 0.0894897, 0.0811832, 0.0726244, 0.0638958,
0.0550813, 0.0462659, 0.0375346, 0.0289713, 0.0206585,
0.0126764, 0.0051017, -0.0498855, -0.0569870, -0.0635385,
-0.0694768, -0.0747446, -0.0792910, -0.0830722, -0.0860516,
-0.0882006, -0.0894983, -0.0899323, -0.0899323, -0.0894772,
-0.0881164, -0.0858637, -0.0827419, -0.0787826, -0.0740259,
-0.0685199, -0.0623203, -0.0554900, -0.0480979, -0.0402191,
-0.0002190, 0.0076432, 0.0158009, 0.0241796, 0.0327029,
0.0412927, 0.0498708, 0.0583587, 0.0666790, 0.0747555,
0.0825147, 0.0898855, 0.1448728, 0.1519670, 0.1585125,
0.1644462, 0.1697109, 0.1742561, 0.1780378, 0.1810197,
0.1831731, 0.1844772, 0.1849194, 0.1844956, 0.1832098,
0.1810743, 0.1781097, 0.1743447, 0.1698154
]
elats = [
-0.0865436, -0.0936378, -0.1001833, -0.1061170, -0.1113818,
-0.1159269, -0.1197087, -0.1226906, -0.1248440, -0.1261481,
-0.1265903, -0.1261665, -0.1248807, -0.1227452, -0.1197807,
-0.1160156, -0.1114863, -0.0748281, -0.0695722, -0.0636449,
-0.0571033, -0.0500106, -0.0424352, -0.0344503, -0.0261330,
-0.0175634, -0.0088243, -0.0000000, 0.0400000, 0.0490364,
0.0579813, 0.0667442, 0.0752364, 0.0833720, 0.0910686,
0.0982482, 0.1048383, 0.1107721, 0.1159895, 0.1204378,
0.1404379, 0.1439098, 0.1466154, 0.1485298, 0.1496358,
0.1499230, 0.1493889, 0.1480385, 0.1458839, 0.1429450,
0.1392485, 0.1348282, 0.0981700, 0.0929200, 0.0870000,
0.0804669, 0.0733837, 0.0658185, 0.0578443, 0.0495378,
0.0409790, 0.0322503, 0.0234359, 0.0146206, 0.0058892,
-0.0026741, -0.0109868, -0.0189689, -0.0265436
]
numpy.testing.assert_allclose(polygon.lons, elons, rtol=0, atol=1e-5)
numpy.testing.assert_allclose(polygon.lats, elats, rtol=0, atol=1e-5)
|
ROB-Seismology/oq-hazardlib
|
openquake/hazardlib/tests/source/simple_fault_test.py
|
Python
|
agpl-3.0
| 13,597
|
import ljson.base.mem
import ljson.base.generic
import ljson.convert.csv
from .data import data, header_descriptor
def test_read_write():
from io import StringIO
header = ljson.base.generic.Header(header_descriptor)
table = ljson.base.mem.Table(header, data)
fio = StringIO()
ljson.convert.csv.table2csv(table, fio)
fio.seek(0)
table_in = ljson.convert.csv.csv2table(fio, types = {k: v["type"] for k,v in header_descriptor.items()})
assert list(table_in) == data
f = StringIO()
fio.seek(0)
disk_table = ljson.convert.csv.csv2file(fio, f, types = {k: v["type"] for k,v in header_descriptor.items()})
assert list(disk_table) == data
|
daknuett/ljson
|
test/test_ljson_convert_csv.py
|
Python
|
agpl-3.0
| 655
|
"""Test the cross_validation module"""
from __future__ import division
import warnings
import numpy as np
from scipy.sparse import coo_matrix
from scipy import stats
from sklearn.utils.testing import assert_true
from sklearn.utils.testing import assert_false
from sklearn.utils.testing import assert_equal
from sklearn.utils.testing import assert_almost_equal
from sklearn.utils.testing import assert_raises
from sklearn.utils.testing import assert_greater
from sklearn.utils.testing import assert_less
from sklearn.utils.testing import assert_not_equal
from sklearn.utils.testing import assert_array_almost_equal
from sklearn.utils.testing import assert_array_equal
from sklearn.utils.testing import assert_warns
from sklearn.utils.testing import assert_warns_message
from sklearn.utils.testing import ignore_warnings
from sklearn.utils.mocking import CheckingClassifier, MockDataFrame
from sklearn import cross_validation as cval
from sklearn.base import BaseEstimator
from sklearn.datasets import make_regression
from sklearn.datasets import load_boston
from sklearn.datasets import load_digits
from sklearn.datasets import load_iris
from sklearn.metrics import explained_variance_score
from sklearn.metrics import make_scorer
from sklearn.metrics import precision_score
from sklearn.externals import six
from sklearn.externals.six.moves import zip
from sklearn.linear_model import Ridge
from sklearn.neighbors import KNeighborsClassifier
from sklearn.svm import SVC
from sklearn.cluster import KMeans
from sklearn.preprocessing import Imputer, LabelBinarizer
from sklearn.pipeline import Pipeline
class MockClassifier(BaseEstimator):
"""Dummy classifier to test the cross-validation"""
def __init__(self, a=0, allow_nd=False):
self.a = a
self.allow_nd = allow_nd
def fit(self, X, Y=None, sample_weight=None, class_prior=None,
sparse_sample_weight=None, sparse_param=None, dummy_int=None,
dummy_str=None, dummy_obj=None, callback=None):
"""The dummy arguments are to test that this fit function can
accept non-array arguments through cross-validation, such as:
- int
- str (this is actually array-like)
- object
- function
"""
self.dummy_int = dummy_int
self.dummy_str = dummy_str
self.dummy_obj = dummy_obj
if callback is not None:
callback(self)
if self.allow_nd:
X = X.reshape(len(X), -1)
if X.ndim >= 3 and not self.allow_nd:
raise ValueError('X cannot be d')
if sample_weight is not None:
assert_true(sample_weight.shape[0] == X.shape[0],
'MockClassifier extra fit_param sample_weight.shape[0]'
' is {0}, should be {1}'.format(sample_weight.shape[0],
X.shape[0]))
if class_prior is not None:
assert_true(class_prior.shape[0] == len(np.unique(y)),
'MockClassifier extra fit_param class_prior.shape[0]'
' is {0}, should be {1}'.format(class_prior.shape[0],
len(np.unique(y))))
if sparse_sample_weight is not None:
fmt = ('MockClassifier extra fit_param sparse_sample_weight'
'.shape[0] is {0}, should be {1}')
assert_true(sparse_sample_weight.shape[0] == X.shape[0],
fmt.format(sparse_sample_weight.shape[0], X.shape[0]))
if sparse_param is not None:
fmt = ('MockClassifier extra fit_param sparse_param.shape '
'is ({0}, {1}), should be ({2}, {3})')
assert_true(sparse_param.shape == P_sparse.shape,
fmt.format(sparse_param.shape[0],
sparse_param.shape[1],
P_sparse.shape[0], P_sparse.shape[1]))
return self
def predict(self, T):
if self.allow_nd:
T = T.reshape(len(T), -1)
return T.shape[0]
def score(self, X=None, Y=None):
return 1. / (1 + np.abs(self.a))
X = np.ones((10, 2))
X_sparse = coo_matrix(X)
W_sparse = coo_matrix((np.array([1]), (np.array([1]), np.array([0]))),
shape=(10, 1))
P_sparse = coo_matrix(np.eye(5))
y = np.arange(10) // 2
##############################################################################
# Tests
def check_valid_split(train, test, n_samples=None):
# Use python sets to get more informative assertion failure messages
train, test = set(train), set(test)
# Train and test split should not overlap
assert_equal(train.intersection(test), set())
if n_samples is not None:
# Check that the union of train an test split cover all the indices
assert_equal(train.union(test), set(range(n_samples)))
def check_cv_coverage(cv, expected_n_iter=None, n_samples=None):
# Check that a all the samples appear at least once in a test fold
if expected_n_iter is not None:
assert_equal(len(cv), expected_n_iter)
else:
expected_n_iter = len(cv)
collected_test_samples = set()
iterations = 0
for train, test in cv:
check_valid_split(train, test, n_samples=n_samples)
iterations += 1
collected_test_samples.update(test)
# Check that the accumulated test samples cover the whole dataset
assert_equal(iterations, expected_n_iter)
if n_samples is not None:
assert_equal(collected_test_samples, set(range(n_samples)))
def test_kfold_valueerrors():
# Check that errors are raised if there is not enough samples
assert_raises(ValueError, cval.KFold, 3, 4)
# Check that a warning is raised if the least populated class has too few
# members.
y = [3, 3, -1, -1, 2]
cv = assert_warns_message(Warning, "The least populated class",
cval.StratifiedKFold, y, 3)
# Check that despite the warning the folds are still computed even
# though all the classes are not necessarily represented at on each
# side of the split at each split
check_cv_coverage(cv, expected_n_iter=3, n_samples=len(y))
# Error when number of folds is <= 1
assert_raises(ValueError, cval.KFold, 2, 0)
assert_raises(ValueError, cval.KFold, 2, 1)
assert_raises(ValueError, cval.StratifiedKFold, y, 0)
assert_raises(ValueError, cval.StratifiedKFold, y, 1)
# When n is not integer:
assert_raises(ValueError, cval.KFold, 2.5, 2)
# When n_folds is not integer:
assert_raises(ValueError, cval.KFold, 5, 1.5)
assert_raises(ValueError, cval.StratifiedKFold, y, 1.5)
def test_kfold_indices():
# Check all indices are returned in the test folds
kf = cval.KFold(300, 3)
check_cv_coverage(kf, expected_n_iter=3, n_samples=300)
# Check all indices are returned in the test folds even when equal-sized
# folds are not possible
kf = cval.KFold(17, 3)
check_cv_coverage(kf, expected_n_iter=3, n_samples=17)
def test_kfold_no_shuffle():
# Manually check that KFold preserves the data ordering on toy datasets
splits = iter(cval.KFold(4, 2))
train, test = next(splits)
assert_array_equal(test, [0, 1])
assert_array_equal(train, [2, 3])
train, test = next(splits)
assert_array_equal(test, [2, 3])
assert_array_equal(train, [0, 1])
splits = iter(cval.KFold(5, 2))
train, test = next(splits)
assert_array_equal(test, [0, 1, 2])
assert_array_equal(train, [3, 4])
train, test = next(splits)
assert_array_equal(test, [3, 4])
assert_array_equal(train, [0, 1, 2])
def test_stratified_kfold_no_shuffle():
# Manually check that StratifiedKFold preserves the data ordering as much
# as possible on toy datasets in order to avoid hiding sample dependencies
# when possible
splits = iter(cval.StratifiedKFold([1, 1, 0, 0], 2))
train, test = next(splits)
assert_array_equal(test, [0, 2])
assert_array_equal(train, [1, 3])
train, test = next(splits)
assert_array_equal(test, [1, 3])
assert_array_equal(train, [0, 2])
splits = iter(cval.StratifiedKFold([1, 1, 1, 0, 0, 0, 0], 2))
train, test = next(splits)
assert_array_equal(test, [0, 1, 3, 4])
assert_array_equal(train, [2, 5, 6])
train, test = next(splits)
assert_array_equal(test, [2, 5, 6])
assert_array_equal(train, [0, 1, 3, 4])
def test_stratified_kfold_ratios():
# Check that stratified kfold preserves label ratios in individual splits
# Repeat with shuffling turned off and on
n_samples = 1000
labels = np.array([4] * int(0.10 * n_samples) +
[0] * int(0.89 * n_samples) +
[1] * int(0.01 * n_samples))
for shuffle in [False, True]:
for train, test in cval.StratifiedKFold(labels, 5, shuffle=shuffle):
assert_almost_equal(np.sum(labels[train] == 4) / len(train), 0.10,
2)
assert_almost_equal(np.sum(labels[train] == 0) / len(train), 0.89,
2)
assert_almost_equal(np.sum(labels[train] == 1) / len(train), 0.01,
2)
assert_almost_equal(np.sum(labels[test] == 4) / len(test), 0.10, 2)
assert_almost_equal(np.sum(labels[test] == 0) / len(test), 0.89, 2)
assert_almost_equal(np.sum(labels[test] == 1) / len(test), 0.01, 2)
def test_kfold_balance():
# Check that KFold returns folds with balanced sizes
for kf in [cval.KFold(i, 5) for i in range(11, 17)]:
sizes = []
for _, test in kf:
sizes.append(len(test))
assert_true((np.max(sizes) - np.min(sizes)) <= 1)
assert_equal(np.sum(sizes), kf.n)
def test_stratifiedkfold_balance():
# Check that KFold returns folds with balanced sizes (only when
# stratification is possible)
# Repeat with shuffling turned off and on
labels = [0] * 3 + [1] * 14
for shuffle in [False, True]:
for skf in [cval.StratifiedKFold(labels[:i], 3, shuffle=shuffle)
for i in range(11, 17)]:
sizes = []
for _, test in skf:
sizes.append(len(test))
assert_true((np.max(sizes) - np.min(sizes)) <= 1)
assert_equal(np.sum(sizes), skf.n)
def test_shuffle_kfold():
# Check the indices are shuffled properly, and that all indices are
# returned in the different test folds
kf = cval.KFold(300, 3, shuffle=True, random_state=0)
ind = np.arange(300)
all_folds = None
for train, test in kf:
sorted_array = np.arange(100)
assert_true(np.any(sorted_array != ind[train]))
sorted_array = np.arange(101, 200)
assert_true(np.any(sorted_array != ind[train]))
sorted_array = np.arange(201, 300)
assert_true(np.any(sorted_array != ind[train]))
if all_folds is None:
all_folds = ind[test].copy()
else:
all_folds = np.concatenate((all_folds, ind[test]))
all_folds.sort()
assert_array_equal(all_folds, ind)
def test_shuffle_stratifiedkfold():
# Check that shuffling is happening when requested, and for proper
# sample coverage
labels = [0] * 20 + [1] * 20
kf0 = list(cval.StratifiedKFold(labels, 5, shuffle=True, random_state=0))
kf1 = list(cval.StratifiedKFold(labels, 5, shuffle=True, random_state=1))
for (_, test0), (_, test1) in zip(kf0, kf1):
assert_true(set(test0) != set(test1))
check_cv_coverage(kf0, expected_n_iter=5, n_samples=40)
def test_kfold_can_detect_dependent_samples_on_digits(): # see #2372
# The digits samples are dependent: they are apparently grouped by authors
# although we don't have any information on the groups segment locations
# for this data. We can highlight this fact be computing k-fold cross-
# validation with and without shuffling: we observe that the shuffling case
# wrongly makes the IID assumption and is therefore too optimistic: it
# estimates a much higher accuracy (around 0.96) than than the non
# shuffling variant (around 0.86).
digits = load_digits()
X, y = digits.data[:800], digits.target[:800]
model = SVC(C=10, gamma=0.005)
n = len(y)
cv = cval.KFold(n, 5, shuffle=False)
mean_score = cval.cross_val_score(model, X, y, cv=cv).mean()
assert_greater(0.88, mean_score)
assert_greater(mean_score, 0.85)
# Shuffling the data artificially breaks the dependency and hides the
# overfitting of the model with regards to the writing style of the authors
# by yielding a seriously overestimated score:
cv = cval.KFold(n, 5, shuffle=True, random_state=0)
mean_score = cval.cross_val_score(model, X, y, cv=cv).mean()
assert_greater(mean_score, 0.95)
cv = cval.KFold(n, 5, shuffle=True, random_state=1)
mean_score = cval.cross_val_score(model, X, y, cv=cv).mean()
assert_greater(mean_score, 0.95)
# Similarly, StratifiedKFold should try to shuffle the data as little
# as possible (while respecting the balanced class constraints)
# and thus be able to detect the dependency by not overestimating
# the CV score either. As the digits dataset is approximately balanced
# the estimated mean score is close to the score measured with
# non-shuffled KFold
cv = cval.StratifiedKFold(y, 5)
mean_score = cval.cross_val_score(model, X, y, cv=cv).mean()
assert_greater(0.88, mean_score)
assert_greater(mean_score, 0.85)
def test_shuffle_split():
ss1 = cval.ShuffleSplit(10, test_size=0.2, random_state=0)
ss2 = cval.ShuffleSplit(10, test_size=2, random_state=0)
ss3 = cval.ShuffleSplit(10, test_size=np.int32(2), random_state=0)
for typ in six.integer_types:
ss4 = cval.ShuffleSplit(10, test_size=typ(2), random_state=0)
for t1, t2, t3, t4 in zip(ss1, ss2, ss3, ss4):
assert_array_equal(t1[0], t2[0])
assert_array_equal(t2[0], t3[0])
assert_array_equal(t3[0], t4[0])
assert_array_equal(t1[1], t2[1])
assert_array_equal(t2[1], t3[1])
assert_array_equal(t3[1], t4[1])
def test_stratified_shuffle_split_init():
y = np.asarray([0, 1, 1, 1, 2, 2, 2])
# Check that error is raised if there is a class with only one sample
assert_raises(ValueError, cval.StratifiedShuffleSplit, y, 3, 0.2)
# Check that error is raised if the test set size is smaller than n_classes
assert_raises(ValueError, cval.StratifiedShuffleSplit, y, 3, 2)
# Check that error is raised if the train set size is smaller than
# n_classes
assert_raises(ValueError, cval.StratifiedShuffleSplit, y, 3, 3, 2)
y = np.asarray([0, 0, 0, 1, 1, 1, 2, 2, 2])
# Check that errors are raised if there is not enough samples
assert_raises(ValueError, cval.StratifiedShuffleSplit, y, 3, 0.5, 0.6)
assert_raises(ValueError, cval.StratifiedShuffleSplit, y, 3, 8, 0.6)
assert_raises(ValueError, cval.StratifiedShuffleSplit, y, 3, 0.6, 8)
# Train size or test size too small
assert_raises(ValueError, cval.StratifiedShuffleSplit, y, train_size=2)
assert_raises(ValueError, cval.StratifiedShuffleSplit, y, test_size=2)
def test_stratified_shuffle_split_iter():
ys = [np.array([1, 1, 1, 1, 2, 2, 2, 3, 3, 3, 3, 3]),
np.array([0, 0, 0, 1, 1, 1, 2, 2, 2, 3, 3, 3]),
np.array([0, 1, 2, 3, 0, 1, 2, 3, 0, 1, 2, 3, 0, 1, 2]),
np.array([1, 1, 2, 2, 2, 3, 3, 3, 4, 4, 4, 4, 4, 4, 4, 4]),
np.array([-1] * 800 + [1] * 50)
]
for y in ys:
sss = cval.StratifiedShuffleSplit(y, 6, test_size=0.33,
random_state=0)
for train, test in sss:
assert_array_equal(np.unique(y[train]), np.unique(y[test]))
# Checks if folds keep classes proportions
p_train = (np.bincount(np.unique(y[train], return_inverse=True)[1])
/ float(len(y[train])))
p_test = (np.bincount(np.unique(y[test], return_inverse=True)[1])
/ float(len(y[test])))
assert_array_almost_equal(p_train, p_test, 1)
assert_equal(y[train].size + y[test].size, y.size)
assert_array_equal(np.lib.arraysetops.intersect1d(train, test), [])
@ignore_warnings
def test_stratified_shuffle_split_iter_no_indices():
y = np.asarray([0, 1, 2] * 10)
sss1 = cval.StratifiedShuffleSplit(y, indices=False, random_state=0)
train_mask, test_mask = next(iter(sss1))
sss2 = cval.StratifiedShuffleSplit(y, indices=True, random_state=0)
train_indices, test_indices = next(iter(sss2))
assert_array_equal(sorted(test_indices), np.where(test_mask)[0])
def test_stratified_shuffle_split_even():
# Test the StratifiedShuffleSplit, indices are drawn with a
# equal chance
n_folds = 5
n_iter = 1000
def assert_counts_are_ok(idx_counts, p):
# Here we test that the distribution of the counts
# per index is close enough to a binomial
threshold = 0.05 / n_splits
bf = stats.binom(n_splits, p)
for count in idx_counts:
p = bf.pmf(count)
assert_true(p > threshold,
"An index is not drawn with chance corresponding "
"to even draws")
for n_samples in (6, 22):
labels = np.array((n_samples // 2) * [0, 1])
splits = cval.StratifiedShuffleSplit(labels, n_iter=n_iter,
test_size=1./n_folds,
random_state=0)
train_counts = [0] * n_samples
test_counts = [0] * n_samples
n_splits = 0
for train, test in splits:
n_splits += 1
for counter, ids in [(train_counts, train), (test_counts, test)]:
for id in ids:
counter[id] += 1
assert_equal(n_splits, n_iter)
assert_equal(len(train), splits.n_train)
assert_equal(len(test), splits.n_test)
assert_equal(len(set(train).intersection(test)), 0)
label_counts = np.unique(labels)
assert_equal(splits.test_size, 1.0 / n_folds)
assert_equal(splits.n_train + splits.n_test, len(labels))
assert_equal(len(label_counts), 2)
ex_test_p = float(splits.n_test) / n_samples
ex_train_p = float(splits.n_train) / n_samples
assert_counts_are_ok(train_counts, ex_train_p)
assert_counts_are_ok(test_counts, ex_test_p)
def test_predefinedsplit_with_kfold_split():
'''Check that PredefinedSplit can reproduce a split generated by Kfold.'''
folds = -1 * np.ones(10)
kf_train = []
kf_test = []
for i, (train_ind, test_ind) in enumerate(cval.KFold(10, 5, shuffle=True)):
kf_train.append(train_ind)
kf_test.append(test_ind)
folds[test_ind] = i
ps_train = []
ps_test = []
ps = cval.PredefinedSplit(folds)
for train_ind, test_ind in ps:
ps_train.append(train_ind)
ps_test.append(test_ind)
assert_array_equal(ps_train, kf_train)
assert_array_equal(ps_test, kf_test)
def test_leave_label_out_changing_labels():
"""Check that LeaveOneLabelOut and LeavePLabelOut work normally if
the labels variable is changed before calling __iter__"""
labels = np.array([0, 1, 2, 1, 1, 2, 0, 0])
labels_changing = np.array(labels, copy=True)
lolo = cval.LeaveOneLabelOut(labels)
lolo_changing = cval.LeaveOneLabelOut(labels_changing)
lplo = cval.LeavePLabelOut(labels, p=2)
lplo_changing = cval.LeavePLabelOut(labels_changing, p=2)
labels_changing[:] = 0
for llo, llo_changing in [(lolo, lolo_changing), (lplo, lplo_changing)]:
for (train, test), (train_chan, test_chan) in zip(llo, llo_changing):
assert_array_equal(train, train_chan)
assert_array_equal(test, test_chan)
def test_cross_val_score():
clf = MockClassifier()
for a in range(-10, 10):
clf.a = a
# Smoke test
scores = cval.cross_val_score(clf, X, y)
assert_array_equal(scores, clf.score(X, y))
# test with multioutput y
scores = cval.cross_val_score(clf, X_sparse, X)
assert_array_equal(scores, clf.score(X_sparse, X))
scores = cval.cross_val_score(clf, X_sparse, y)
assert_array_equal(scores, clf.score(X_sparse, y))
# test with multioutput y
scores = cval.cross_val_score(clf, X_sparse, X)
assert_array_equal(scores, clf.score(X_sparse, X))
# test with X and y as list
list_check = lambda x: isinstance(x, list)
clf = CheckingClassifier(check_X=list_check)
scores = cval.cross_val_score(clf, X.tolist(), y.tolist())
clf = CheckingClassifier(check_y=list_check)
scores = cval.cross_val_score(clf, X, y.tolist())
assert_raises(ValueError, cval.cross_val_score, clf, X, y,
scoring="sklearn")
# test with 3d X and
X_3d = X[:, :, np.newaxis]
clf = MockClassifier(allow_nd=True)
scores = cval.cross_val_score(clf, X_3d, y)
clf = MockClassifier(allow_nd=False)
assert_raises(ValueError, cval.cross_val_score, clf, X_3d, y)
def test_cross_val_score_pandas():
# check cross_val_score doesn't destroy pandas dataframe
types = [(MockDataFrame, MockDataFrame)]
try:
from pandas import Series, DataFrame
types.append((Series, DataFrame))
except ImportError:
pass
for TargetType, InputFeatureType in types:
# X dataframe, y series
X_df, y_ser = InputFeatureType(X), TargetType(y)
check_df = lambda x: isinstance(x, InputFeatureType)
check_series = lambda x: isinstance(x, TargetType)
clf = CheckingClassifier(check_X=check_df, check_y=check_series)
cval.cross_val_score(clf, X_df, y_ser)
def test_cross_val_score_mask():
# test that cross_val_score works with boolean masks
svm = SVC(kernel="linear")
iris = load_iris()
X, y = iris.data, iris.target
with warnings.catch_warnings(record=True):
cv_indices = cval.KFold(len(y), 5, indices=True)
scores_indices = cval.cross_val_score(svm, X, y, cv=cv_indices)
with warnings.catch_warnings(record=True):
cv_masks = cval.KFold(len(y), 5, indices=False)
scores_masks = cval.cross_val_score(svm, X, y, cv=cv_masks)
assert_array_equal(scores_indices, scores_masks)
def test_cross_val_score_precomputed():
# test for svm with precomputed kernel
svm = SVC(kernel="precomputed")
iris = load_iris()
X, y = iris.data, iris.target
linear_kernel = np.dot(X, X.T)
score_precomputed = cval.cross_val_score(svm, linear_kernel, y)
svm = SVC(kernel="linear")
score_linear = cval.cross_val_score(svm, X, y)
assert_array_equal(score_precomputed, score_linear)
# Error raised for non-square X
svm = SVC(kernel="precomputed")
assert_raises(ValueError, cval.cross_val_score, svm, X, y)
# test error is raised when the precomputed kernel is not array-like
# or sparse
assert_raises(ValueError, cval.cross_val_score, svm,
linear_kernel.tolist(), y)
def test_cross_val_score_fit_params():
clf = MockClassifier()
n_samples = X.shape[0]
n_classes = len(np.unique(y))
DUMMY_INT = 42
DUMMY_STR = '42'
DUMMY_OBJ = object()
def assert_fit_params(clf):
"""Function to test that the values are passed correctly to the
classifier arguments for non-array type
"""
assert_equal(clf.dummy_int, DUMMY_INT)
assert_equal(clf.dummy_str, DUMMY_STR)
assert_equal(clf.dummy_obj, DUMMY_OBJ)
fit_params = {'sample_weight': np.ones(n_samples),
'class_prior': np.ones(n_classes) / n_classes,
'sparse_sample_weight': W_sparse,
'sparse_param': P_sparse,
'dummy_int': DUMMY_INT,
'dummy_str': DUMMY_STR,
'dummy_obj': DUMMY_OBJ,
'callback': assert_fit_params}
cval.cross_val_score(clf, X, y, fit_params=fit_params)
def test_cross_val_score_score_func():
clf = MockClassifier()
_score_func_args = []
def score_func(y_test, y_predict):
_score_func_args.append((y_test, y_predict))
return 1.0
with warnings.catch_warnings(record=True):
scoring = make_scorer(score_func)
score = cval.cross_val_score(clf, X, y, scoring=scoring)
assert_array_equal(score, [1.0, 1.0, 1.0])
assert len(_score_func_args) == 3
def test_cross_val_score_errors():
class BrokenEstimator:
pass
assert_raises(TypeError, cval.cross_val_score, BrokenEstimator(), X)
def test_train_test_split_errors():
assert_raises(ValueError, cval.train_test_split)
assert_raises(ValueError, cval.train_test_split, range(3), train_size=1.1)
assert_raises(ValueError, cval.train_test_split, range(3), test_size=0.6,
train_size=0.6)
assert_raises(ValueError, cval.train_test_split, range(3),
test_size=np.float32(0.6), train_size=np.float32(0.6))
assert_raises(ValueError, cval.train_test_split, range(3),
test_size="wrong_type")
assert_raises(ValueError, cval.train_test_split, range(3), test_size=2,
train_size=4)
assert_raises(TypeError, cval.train_test_split, range(3),
some_argument=1.1)
assert_raises(ValueError, cval.train_test_split, range(3), range(42))
def test_train_test_split():
X = np.arange(100).reshape((10, 10))
X_s = coo_matrix(X)
y = np.arange(10)
# simple test
split = cval.train_test_split(X, y, test_size=None, train_size=.5)
X_train, X_test, y_train, y_test = split
assert_equal(len(y_test), len(y_train))
# test correspondence of X and y
assert_array_equal(X_train[:, 0], y_train * 10)
assert_array_equal(X_test[:, 0], y_test * 10)
# conversion of lists to arrays (deprecated?)
with warnings.catch_warnings(record=True):
split = cval.train_test_split(X, X_s, y.tolist(), allow_lists=False)
X_train, X_test, X_s_train, X_s_test, y_train, y_test = split
assert_array_equal(X_train, X_s_train.toarray())
assert_array_equal(X_test, X_s_test.toarray())
# don't convert lists to anything else by default
split = cval.train_test_split(X, X_s, y.tolist())
X_train, X_test, X_s_train, X_s_test, y_train, y_test = split
assert_true(isinstance(y_train, list))
assert_true(isinstance(y_test, list))
# allow nd-arrays
X_4d = np.arange(10 * 5 * 3 * 2).reshape(10, 5, 3, 2)
y_3d = np.arange(10 * 7 * 11).reshape(10, 7, 11)
split = cval.train_test_split(X_4d, y_3d)
assert_equal(split[0].shape, (7, 5, 3, 2))
assert_equal(split[1].shape, (3, 5, 3, 2))
assert_equal(split[2].shape, (7, 7, 11))
assert_equal(split[3].shape, (3, 7, 11))
def train_test_split_pandas():
# check cross_val_score doesn't destroy pandas dataframe
types = [MockDataFrame]
try:
from pandas import DataFrame
types.append(DataFrame)
except ImportError:
pass
for InputFeatureType in types:
# X dataframe
X_df = InputFeatureType(X)
X_train, X_test = cval.train_test_split(X_df)
assert_true(isinstance(X_train, InputFeatureType))
assert_true(isinstance(X_test, InputFeatureType))
def train_test_split_mock_pandas():
# X mock dataframe
X_df = MockDataFrame(X)
X_train, X_test = cval.train_test_split(X_df)
assert_true(isinstance(X_train, MockDataFrame))
assert_true(isinstance(X_test, MockDataFrame))
X_train_arr, X_test_arr = cval.train_test_split(X_df, allow_lists=False)
assert_true(isinstance(X_train_arr, np.ndarray))
assert_true(isinstance(X_test_arr, np.ndarray))
def test_cross_val_score_with_score_func_classification():
iris = load_iris()
clf = SVC(kernel='linear')
# Default score (should be the accuracy score)
scores = cval.cross_val_score(clf, iris.data, iris.target, cv=5)
assert_array_almost_equal(scores, [0.97, 1., 0.97, 0.97, 1.], 2)
# Correct classification score (aka. zero / one score) - should be the
# same as the default estimator score
zo_scores = cval.cross_val_score(clf, iris.data, iris.target,
scoring="accuracy", cv=5)
assert_array_almost_equal(zo_scores, [0.97, 1., 0.97, 0.97, 1.], 2)
# F1 score (class are balanced so f1_score should be equal to zero/one
# score
f1_scores = cval.cross_val_score(clf, iris.data, iris.target,
scoring="f1_weighted", cv=5)
assert_array_almost_equal(f1_scores, [0.97, 1., 0.97, 0.97, 1.], 2)
def test_cross_val_score_with_score_func_regression():
X, y = make_regression(n_samples=30, n_features=20, n_informative=5,
random_state=0)
reg = Ridge()
# Default score of the Ridge regression estimator
scores = cval.cross_val_score(reg, X, y, cv=5)
assert_array_almost_equal(scores, [0.94, 0.97, 0.97, 0.99, 0.92], 2)
# R2 score (aka. determination coefficient) - should be the
# same as the default estimator score
r2_scores = cval.cross_val_score(reg, X, y, scoring="r2", cv=5)
assert_array_almost_equal(r2_scores, [0.94, 0.97, 0.97, 0.99, 0.92], 2)
# Mean squared error; this is a loss function, so "scores" are negative
mse_scores = cval.cross_val_score(reg, X, y, cv=5,
scoring="mean_squared_error")
expected_mse = np.array([-763.07, -553.16, -274.38, -273.26, -1681.99])
assert_array_almost_equal(mse_scores, expected_mse, 2)
# Explained variance
scoring = make_scorer(explained_variance_score)
ev_scores = cval.cross_val_score(reg, X, y, cv=5, scoring=scoring)
assert_array_almost_equal(ev_scores, [0.94, 0.97, 0.97, 0.99, 0.92], 2)
def test_permutation_score():
iris = load_iris()
X = iris.data
X_sparse = coo_matrix(X)
y = iris.target
svm = SVC(kernel='linear')
cv = cval.StratifiedKFold(y, 2)
score, scores, pvalue = cval.permutation_test_score(
svm, X, y, n_permutations=30, cv=cv, scoring="accuracy")
assert_greater(score, 0.9)
assert_almost_equal(pvalue, 0.0, 1)
score_label, _, pvalue_label = cval.permutation_test_score(
svm, X, y, n_permutations=30, cv=cv, scoring="accuracy",
labels=np.ones(y.size), random_state=0)
assert_true(score_label == score)
assert_true(pvalue_label == pvalue)
# check that we obtain the same results with a sparse representation
svm_sparse = SVC(kernel='linear')
cv_sparse = cval.StratifiedKFold(y, 2)
score_label, _, pvalue_label = cval.permutation_test_score(
svm_sparse, X_sparse, y, n_permutations=30, cv=cv_sparse,
scoring="accuracy", labels=np.ones(y.size), random_state=0)
assert_true(score_label == score)
assert_true(pvalue_label == pvalue)
# test with custom scoring object
def custom_score(y_true, y_pred):
return (((y_true == y_pred).sum() - (y_true != y_pred).sum())
/ y_true.shape[0])
scorer = make_scorer(custom_score)
score, _, pvalue = cval.permutation_test_score(
svm, X, y, n_permutations=100, scoring=scorer, cv=cv, random_state=0)
assert_almost_equal(score, .93, 2)
assert_almost_equal(pvalue, 0.01, 3)
# set random y
y = np.mod(np.arange(len(y)), 3)
score, scores, pvalue = cval.permutation_test_score(
svm, X, y, n_permutations=30, cv=cv, scoring="accuracy")
assert_less(score, 0.5)
assert_greater(pvalue, 0.2)
def test_cross_val_generator_with_mask():
X = np.array([[1, 2], [3, 4], [5, 6], [7, 8]])
y = np.array([1, 1, 2, 2])
labels = np.array([1, 2, 3, 4])
loo = assert_warns(DeprecationWarning, cval.LeaveOneOut,
4, indices=False)
lpo = assert_warns(DeprecationWarning, cval.LeavePOut,
4, 2, indices=False)
kf = assert_warns(DeprecationWarning, cval.KFold,
4, 2, indices=False)
skf = assert_warns(DeprecationWarning, cval.StratifiedKFold,
y, 2, indices=False)
lolo = assert_warns(DeprecationWarning, cval.LeaveOneLabelOut,
labels, indices=False)
lopo = assert_warns(DeprecationWarning, cval.LeavePLabelOut,
labels, 2, indices=False)
ss = assert_warns(DeprecationWarning, cval.ShuffleSplit,
4, indices=False)
ps = assert_warns(DeprecationWarning, cval.PredefinedSplit,
[1, 1, 2, 2], indices=False)
for cv in [loo, lpo, kf, skf, lolo, lopo, ss, ps]:
for train, test in cv:
assert_equal(np.asarray(train).dtype.kind, 'b')
assert_equal(np.asarray(train).dtype.kind, 'b')
X_train, X_test = X[train], X[test]
y_train, y_test = y[train], y[test]
def test_cross_val_generator_with_indices():
X = np.array([[1, 2], [3, 4], [5, 6], [7, 8]])
y = np.array([1, 1, 2, 2])
labels = np.array([1, 2, 3, 4])
# explicitly passing indices value is deprecated
loo = assert_warns(DeprecationWarning, cval.LeaveOneOut,
4, indices=True)
lpo = assert_warns(DeprecationWarning, cval.LeavePOut,
4, 2, indices=True)
kf = assert_warns(DeprecationWarning, cval.KFold,
4, 2, indices=True)
skf = assert_warns(DeprecationWarning, cval.StratifiedKFold,
y, 2, indices=True)
lolo = assert_warns(DeprecationWarning, cval.LeaveOneLabelOut,
labels, indices=True)
lopo = assert_warns(DeprecationWarning, cval.LeavePLabelOut,
labels, 2, indices=True)
ps = assert_warns(DeprecationWarning, cval.PredefinedSplit,
[1, 1, 2, 2], indices=True)
# Bootstrap as a cross-validation is deprecated
b = assert_warns(DeprecationWarning, cval.Bootstrap, 2)
ss = assert_warns(DeprecationWarning, cval.ShuffleSplit,
2, indices=True)
for cv in [loo, lpo, kf, skf, lolo, lopo, b, ss, ps]:
for train, test in cv:
assert_not_equal(np.asarray(train).dtype.kind, 'b')
assert_not_equal(np.asarray(train).dtype.kind, 'b')
X_train, X_test = X[train], X[test]
y_train, y_test = y[train], y[test]
@ignore_warnings
def test_cross_val_generator_with_default_indices():
X = np.array([[1, 2], [3, 4], [5, 6], [7, 8]])
y = np.array([1, 1, 2, 2])
labels = np.array([1, 2, 3, 4])
loo = cval.LeaveOneOut(4)
lpo = cval.LeavePOut(4, 2)
kf = cval.KFold(4, 2)
skf = cval.StratifiedKFold(y, 2)
lolo = cval.LeaveOneLabelOut(labels)
lopo = cval.LeavePLabelOut(labels, 2)
b = cval.Bootstrap(2) # only in index mode
ss = cval.ShuffleSplit(2)
ps = cval.PredefinedSplit([1, 1, 2, 2])
for cv in [loo, lpo, kf, skf, lolo, lopo, b, ss, ps]:
for train, test in cv:
assert_not_equal(np.asarray(train).dtype.kind, 'b')
assert_not_equal(np.asarray(train).dtype.kind, 'b')
X_train, X_test = X[train], X[test]
y_train, y_test = y[train], y[test]
@ignore_warnings
def test_cross_val_generator_mask_indices_same():
# Test that the cross validation generators return the same results when
# indices=True and when indices=False
y = np.array([0, 0, 0, 1, 1, 1, 1, 1, 1, 2, 2, 2, 2, 2, 2])
labels = np.array([1, 1, 2, 3, 3, 3, 4])
loo_mask = cval.LeaveOneOut(5, indices=False)
loo_ind = cval.LeaveOneOut(5, indices=True)
lpo_mask = cval.LeavePOut(10, 2, indices=False)
lpo_ind = cval.LeavePOut(10, 2, indices=True)
kf_mask = cval.KFold(10, 5, indices=False, shuffle=True, random_state=1)
kf_ind = cval.KFold(10, 5, indices=True, shuffle=True, random_state=1)
skf_mask = cval.StratifiedKFold(y, 3, indices=False)
skf_ind = cval.StratifiedKFold(y, 3, indices=True)
lolo_mask = cval.LeaveOneLabelOut(labels, indices=False)
lolo_ind = cval.LeaveOneLabelOut(labels, indices=True)
lopo_mask = cval.LeavePLabelOut(labels, 2, indices=False)
lopo_ind = cval.LeavePLabelOut(labels, 2, indices=True)
ps_mask = cval.PredefinedSplit([1, 1, 2, 2], indices=False)
ps_ind = cval.PredefinedSplit([1, 1, 2, 2], indices=True)
for cv_mask, cv_ind in [(loo_mask, loo_ind), (lpo_mask, lpo_ind),
(kf_mask, kf_ind), (skf_mask, skf_ind),
(lolo_mask, lolo_ind), (lopo_mask, lopo_ind),
(ps_mask, ps_ind)]:
for (train_mask, test_mask), (train_ind, test_ind) in \
zip(cv_mask, cv_ind):
assert_array_equal(np.where(train_mask)[0], train_ind)
assert_array_equal(np.where(test_mask)[0], test_ind)
@ignore_warnings
def test_bootstrap_errors():
assert_raises(ValueError, cval.Bootstrap, 10, train_size=100)
assert_raises(ValueError, cval.Bootstrap, 10, test_size=100)
assert_raises(ValueError, cval.Bootstrap, 10, train_size=1.1)
assert_raises(ValueError, cval.Bootstrap, 10, test_size=1.1)
assert_raises(ValueError, cval.Bootstrap, 10, train_size=0.6,
test_size=0.5)
@ignore_warnings
def test_bootstrap_test_sizes():
assert_equal(cval.Bootstrap(10, test_size=0.2).test_size, 2)
assert_equal(cval.Bootstrap(10, test_size=1).test_size, 1)
assert_equal(cval.Bootstrap(10, train_size=1.).train_size, 10)
assert_equal(cval.Bootstrap(10, test_size=2).test_size, 2)
assert_equal(cval.Bootstrap(10, test_size=None).test_size, 5)
def test_shufflesplit_errors():
assert_raises(ValueError, cval.ShuffleSplit, 10, test_size=2.0)
assert_raises(ValueError, cval.ShuffleSplit, 10, test_size=1.0)
assert_raises(ValueError, cval.ShuffleSplit, 10, test_size=0.1,
train_size=0.95)
assert_raises(ValueError, cval.ShuffleSplit, 10, test_size=11)
assert_raises(ValueError, cval.ShuffleSplit, 10, test_size=10)
assert_raises(ValueError, cval.ShuffleSplit, 10, test_size=8, train_size=3)
assert_raises(ValueError, cval.ShuffleSplit, 10, train_size=1j)
assert_raises(ValueError, cval.ShuffleSplit, 10, test_size=None,
train_size=None)
def test_shufflesplit_reproducible():
# Check that iterating twice on the ShuffleSplit gives the same
# sequence of train-test when the random_state is given
ss = cval.ShuffleSplit(10, random_state=21)
assert_array_equal(list(a for a, b in ss), list(a for a, b in ss))
@ignore_warnings
def test_cross_indices_exception():
X = coo_matrix(np.array([[1, 2], [3, 4], [5, 6], [7, 8]]))
y = np.array([1, 1, 2, 2])
labels = np.array([1, 2, 3, 4])
loo = cval.LeaveOneOut(4, indices=False)
lpo = cval.LeavePOut(4, 2, indices=False)
kf = cval.KFold(4, 2, indices=False)
skf = cval.StratifiedKFold(y, 2, indices=False)
lolo = cval.LeaveOneLabelOut(labels, indices=False)
lopo = cval.LeavePLabelOut(labels, 2, indices=False)
assert_raises(ValueError, cval.check_cv, loo, X, y)
assert_raises(ValueError, cval.check_cv, lpo, X, y)
assert_raises(ValueError, cval.check_cv, kf, X, y)
assert_raises(ValueError, cval.check_cv, skf, X, y)
assert_raises(ValueError, cval.check_cv, lolo, X, y)
assert_raises(ValueError, cval.check_cv, lopo, X, y)
def test_safe_split_with_precomputed_kernel():
clf = SVC()
clfp = SVC(kernel="precomputed")
iris = load_iris()
X, y = iris.data, iris.target
K = np.dot(X, X.T)
cv = cval.ShuffleSplit(X.shape[0], test_size=0.25, random_state=0)
tr, te = list(cv)[0]
X_tr, y_tr = cval._safe_split(clf, X, y, tr)
K_tr, y_tr2 = cval._safe_split(clfp, K, y, tr)
assert_array_almost_equal(K_tr, np.dot(X_tr, X_tr.T))
X_te, y_te = cval._safe_split(clf, X, y, te, tr)
K_te, y_te2 = cval._safe_split(clfp, K, y, te, tr)
assert_array_almost_equal(K_te, np.dot(X_te, X_tr.T))
def test_cross_val_score_allow_nans():
# Check that cross_val_score allows input data with NaNs
X = np.arange(200, dtype=np.float64).reshape(10, -1)
X[2, :] = np.nan
y = np.repeat([0, 1], X.shape[0]/2)
p = Pipeline([
('imputer', Imputer(strategy='mean', missing_values='NaN')),
('classifier', MockClassifier()),
])
cval.cross_val_score(p, X, y, cv=5)
def test_train_test_split_allow_nans():
# Check that train_test_split allows input data with NaNs
X = np.arange(200, dtype=np.float64).reshape(10, -1)
X[2, :] = np.nan
y = np.repeat([0, 1], X.shape[0]/2)
cval.train_test_split(X, y, test_size=0.2, random_state=42)
def test_permutation_test_score_allow_nans():
# Check that permutation_test_score allows input data with NaNs
X = np.arange(200, dtype=np.float64).reshape(10, -1)
X[2, :] = np.nan
y = np.repeat([0, 1], X.shape[0]/2)
p = Pipeline([
('imputer', Imputer(strategy='mean', missing_values='NaN')),
('classifier', MockClassifier()),
])
cval.permutation_test_score(p, X, y, cv=5)
def test_check_cv_return_types():
X = np.ones((9, 2))
cv = cval._check_cv(3, X, classifier=False)
assert_true(isinstance(cv, cval.KFold))
y_binary = np.array([0, 1, 0, 1, 0, 0, 1, 1, 1])
cv = cval._check_cv(3, X, y_binary, classifier=True)
assert_true(isinstance(cv, cval.StratifiedKFold))
y_multiclass = np.array([0, 1, 0, 1, 2, 1, 2, 0, 2])
cv = cval._check_cv(3, X, y_multiclass, classifier=True)
assert_true(isinstance(cv, cval.StratifiedKFold))
X = np.ones((5, 2))
y_seq_of_seqs = [[], [1, 2], [3], [0, 1, 3], [2]]
cv = cval._check_cv(3, X, y_seq_of_seqs, classifier=True)
assert_true(isinstance(cv, cval.KFold))
y_indicator_matrix = LabelBinarizer().fit_transform(y_seq_of_seqs)
cv = cval._check_cv(3, X, y_indicator_matrix, classifier=True)
assert_true(isinstance(cv, cval.KFold))
y_multioutput = np.array([[1, 2], [0, 3], [0, 0], [3, 1], [2, 0]])
cv = cval._check_cv(3, X, y_multioutput, classifier=True)
assert_true(isinstance(cv, cval.KFold))
def test_cross_val_score_multilabel():
X = np.array([[-3, 4], [2, 4], [3, 3], [0, 2], [-3, 1],
[-2, 1], [0, 0], [-2, -1], [-1, -2], [1, -2]])
y = np.array([[1, 1], [0, 1], [0, 1], [0, 1], [1, 1],
[0, 1], [1, 0], [1, 1], [1, 0], [0, 0]])
clf = KNeighborsClassifier(n_neighbors=1)
scoring_micro = make_scorer(precision_score, average='micro')
scoring_macro = make_scorer(precision_score, average='macro')
scoring_samples = make_scorer(precision_score, average='samples')
score_micro = cval.cross_val_score(clf, X, y, scoring=scoring_micro, cv=5)
score_macro = cval.cross_val_score(clf, X, y, scoring=scoring_macro, cv=5)
score_samples = cval.cross_val_score(clf, X, y,
scoring=scoring_samples, cv=5)
assert_almost_equal(score_micro, [1, 1/2, 3/4, 1/2, 1/3])
assert_almost_equal(score_macro, [1, 1/2, 3/4, 1/2, 1/4])
assert_almost_equal(score_samples, [1, 1/2, 3/4, 1/2, 1/4])
def test_cross_val_predict():
boston = load_boston()
X, y = boston.data, boston.target
cv = cval.KFold(len(boston.target))
est = Ridge()
# Naive loop (should be same as cross_val_predict):
preds2 = np.zeros_like(y)
for train, test in cv:
est.fit(X[train], y[train])
preds2[test] = est.predict(X[test])
preds = cval.cross_val_predict(est, X, y, cv=cv)
assert_array_almost_equal(preds, preds2)
preds = cval.cross_val_predict(est, X, y)
assert_equal(len(preds), len(y))
cv = cval.LeaveOneOut(len(y))
preds = cval.cross_val_predict(est, X, y, cv=cv)
assert_equal(len(preds), len(y))
Xsp = X.copy()
Xsp *= (Xsp > np.median(Xsp))
Xsp = coo_matrix(Xsp)
preds = cval.cross_val_predict(est, Xsp, y)
assert_array_almost_equal(len(preds), len(y))
preds = cval.cross_val_predict(KMeans(), X)
assert_equal(len(preds), len(y))
def bad_cv():
for i in range(4):
yield np.array([0, 1, 2, 3]), np.array([4, 5, 6, 7, 8])
assert_raises(ValueError, cval.cross_val_predict, est, X, y, cv=bad_cv())
def test_sparse_fit_params():
iris = load_iris()
X, y = iris.data, iris.target
clf = MockClassifier()
fit_params = {'sparse_sample_weight': coo_matrix(np.eye(X.shape[0]))}
a = cval.cross_val_score(clf, X, y, fit_params=fit_params)
assert_array_equal(a, np.ones(3))
def test_check_is_partition():
p = np.arange(100)
assert_true(cval._check_is_partition(p, 100))
assert_false(cval._check_is_partition(np.delete(p, 23), 100))
p[0] = 23
assert_false(cval._check_is_partition(p, 100))
|
ycaihua/scikit-learn
|
sklearn/tests/test_cross_validation.py
|
Python
|
bsd-3-clause
| 45,051
|
#!/usr/bin/env python
"""
Generate the table of all terms for the sphinx documentation.
"""
from __future__ import absolute_import
import os
from sfepy.base.base import dict_from_keys_init
from sfepy.discrete.equations import parse_definition
from sfepy.base.conf import ProblemConf, get_standard_keywords
from sfepy.base.ioutils import locate_files
from sfepy import get_paths
import sys
from argparse import ArgumentParser
import pyparsing as pp
import numpy as nm
import six
sys.path.append('.')
import sfepy.discrete.fem # Hack: fix circular dependency, as terms.pyx imports
# from sfepy.discrete.fem
from sfepy.terms import term_table
def set_section(sec):
def action(str, loc, toks):
if toks:
sec[0] = toks[0][1:-1]
return toks
return action
def to_list(slist, sec):
def action(str, loc, toks):
if toks:
slist.append((sec[0], toks[0]))
return toks
return action
def create_parser(slist, current_section):
colon = pp.Literal(':')
section = pp.Combine(colon
+ pp.Word(pp.alphas, pp.alphanums + '_ ')
+ colon)
section.setParseAction(set_section(current_section))
section.setName('section')
text = pp.SkipTo(section | pp.StringEnd())
text.setParseAction(to_list(slist, current_section))
text.setName('text')
doc = pp.StringStart()\
+ pp.Optional(text) + pp.ZeroOrMore(section + text)\
+ pp.StringEnd()
return doc
newpage = r"""
.. raw:: latex
\newpage
"""
header = """
.. tabularcolumns:: |p{0.15\linewidth}|p{0.10\linewidth}|p{0.6\linewidth}|p{0.15\linewidth}|
.. list-table:: %s terms
:widths: 15 10 60 15
:header-rows: 1
:class: longtable
* - name/class
- arguments
- definition
- examples
"""
table_row = """ * - %s
:class:`%s <%s.%s>`
- %s
- %s
- %s
"""
def format_next(text, new_text, pos, can_newline, width, ispaces):
new_len = len(new_text)
if (pos + new_len > width) and can_newline:
text += '\n' + ispaces + new_text
pos = new_len
can_newline = False
else:
if pos > 0:
text += ' ' + new_text
pos += new_len + 1
else:
text += new_text
pos += new_len
can_newline = True
return text, pos, can_newline
def typeset_to_indent(txt, indent0, indent, width):
if not len(txt): return txt
txt_lines = txt.strip().split('\n')
ispaces = ' ' * indent
text = (' ' * indent0) + txt_lines[0] + '\n' + ispaces
can_newline = False
pos = indent0
for line in txt_lines[1:]:
for word in line.split():
text, pos, can_newline = format_next(text, word, pos, can_newline,
width, ispaces)
return text
def typeset_term_syntax(term_class):
if ((len(term_class.arg_types) > 1) and not
isinstance(term_class.arg_types[0], str)):
arg_types = [', '.join(['``<%s>``' % arg for arg in arg_type])
for arg_type in term_class.arg_types]
text = '\n\n '.join(arg_types)
else:
text = ', '.join(['``<%s>``' % arg for arg in term_class.arg_types])
return text
link_example = ':ref:`%s <%s>`'
omits = [
'vibro_acoustic3d_mid.py',
'its2D_5.py',
'linear_elastic_probes.py',
'__init__.py',
]
def typeset_examples(term_class, term_use):
# e.g. fem-time_advection_diffusion -> tim.adv.dif.
to_shorter_name = lambda st: '.'.join(
[s[:3] for s in st.split('-')[-1].split('_')])
link_list = [(link_example % (to_shorter_name(exmpl), exmpl))
for exmpl in term_use[term_class.name]]
return ', '.join(link_list)
def get_examples(table):
term_use = dict_from_keys_init(table.keys(), set)
required, other = get_standard_keywords()
for filename in locate_files('*py', get_paths('examples/')[0]):
try:
conf = ProblemConf.from_file(filename, required, other,
verbose=False)
except:
continue
ebase = filename.split('examples/')[1]
lbase = os.path.splitext(ebase)[0]
label = lbase.replace('/', '-')
pyfile_name = ebase.split('/')[1]
if pyfile_name in omits:
continue
use = conf.options.get('use_equations', 'equations')
eqs_conf = getattr(conf, use)
for key, eq_conf in six.iteritems(eqs_conf):
term_descs = parse_definition(eq_conf)
for td in term_descs:
term_use[td.name].add(label)
return term_use
def typeset_term_table(fd, keys, table, title):
"""Terms are sorted by name without the d*_ prefix."""
sec_list = []
current_section = ['']
parser = create_parser(sec_list, current_section)
fd.write('.. _term_table_%s:\n' % title)
label = 'Table of %s terms' % title
fd.write(''.join([newpage, label, '\n', '"' * len(label), '\n']))
fd.write(header % (title[0].upper() + title[1:]))
term_use = get_examples(table)
sort_keys = [key[key.find('_'):] for key in keys]
iis = nm.argsort(sort_keys)
for ii in iis:
key = keys[ii]
item_class = table[key]
doc = item_class.__doc__
if doc is not None:
sec_list[:] = []
current_section[0] = ''
parser.parseString(doc)
dd = [x[1] for x in sec_list if x[0].lower() == 'definition']
if len(dd):
dd = dd[0]
else:
dd = ''
dds = dd.strip().split('\n\n')
definition = '\n\n'.join(typeset_to_indent(dd, 7, 11, 65)
for dd in dds)[7:]
fd.write(table_row % (item_class.name,
item_class.__name__,
item_class.__module__,
item_class.__name__,
typeset_term_syntax(item_class),
definition,
typeset_examples(item_class, term_use)))
fd.write('\n')
def typeset_term_tables(fd, table):
"""Generate tables: basic, sensitivity, special."""
scattab = [
('_st_', 2),
('_sd_', 0),
('_adj_', 0),
('_tl_', 1),
('_ul_', 1),
('_th', 2),
('_eth', 2),
('_of_', 2),
('de_', 3)]
new_tabs = [[],[],[],[]]
for term_name in six.iterkeys(table):
for term_tag, tab_id in scattab:
if term_tag in term_name:
new_tabs[tab_id].append(term_name)
break
basic_keys = list(set(table.keys())
- set(new_tabs[0]) - set(new_tabs[1])
- set(new_tabs[2]) - set(new_tabs[3]))
typeset_term_table(fd, basic_keys, table, 'basic')
typeset_term_table(fd, new_tabs[0], table, 'sensitivity')
typeset_term_table(fd, new_tabs[1], table, 'large deformation')
typeset_term_table(fd, new_tabs[2], table, 'special')
typeset_term_table(fd, new_tabs[3], table, 'multi-linear')
fd.write(newpage)
def typeset(filename):
"""Utility function called by sphinx. """
fd = open(filename, 'w')
typeset_term_tables(fd, term_table)
fd.close()
def gen_term_table(app):
typeset(os.path.join(app.builder.srcdir, 'term_table.rst'))
def setup(app):
app.connect('builder-inited', gen_term_table)
helps = {
'output_filename' :
'output file name',
}
def main():
parser = ArgumentParser(description=__doc__)
parser.add_argument("--version", action="version", version="%(prog)s")
parser.add_argument("-o", "--output", metavar='output_filename',
action="store", dest="output_filename",
default="term_table.rst",
help=helps['output_filename'])
options = parser.parse_args()
typeset(options.output_filename)
if __name__ == '__main__':
main()
|
vlukes/sfepy
|
script/gen_term_table.py
|
Python
|
bsd-3-clause
| 8,129
|
#
# Copyright 2013 Quantopian, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
A source to be used in testing.
"""
import pytz
from six.moves import filter
from datetime import datetime, timedelta
import itertools
from six.moves import range
from zipline.protocol import (
Event,
DATASOURCE_TYPE
)
from zipline.gens.utils import hash_args
def create_trade(sid, price, amount, datetime, source_id="test_factory"):
trade = Event()
trade.source_id = source_id
trade.type = DATASOURCE_TYPE.TRADE
trade.sid = sid
trade.dt = datetime
trade.price = price
trade.close_price = price
trade.open_price = price
trade.low = price * .95
trade.high = price * 1.05
trade.volume = amount
return trade
def date_gen(start,
end,
env,
delta=timedelta(minutes=1),
repeats=None):
"""
Utility to generate a stream of dates.
"""
daily_delta = not (delta.total_seconds()
% timedelta(days=1).total_seconds())
cur = start
if daily_delta:
# if we are producing daily timestamps, we
# use midnight
cur = cur.replace(hour=0, minute=0, second=0,
microsecond=0)
def advance_current(cur):
"""
Advances the current dt skipping non market days and minutes.
"""
cur = cur + delta
if not (env.is_trading_day
if daily_delta
else env.is_market_hours)(cur):
if daily_delta:
return env.next_trading_day(cur)
else:
return env.next_open_and_close(cur)[0]
else:
return cur
# yield count trade events, all on trading days, and
# during trading hours.
while cur < end:
if repeats:
for j in range(repeats):
yield cur
else:
yield cur
cur = advance_current(cur)
class SpecificEquityTrades(object):
"""
Yields all events in event_list that match the given sid_filter.
If no event_list is specified, generates an internal stream of events
to filter. Returns all events if filter is None.
Configuration options:
count : integer representing number of trades
sids : list of values representing simulated internal sids
start : start date
delta : timedelta between internal events
filter : filter to remove the sids
"""
def __init__(self, env, *args, **kwargs):
# We shouldn't get any positional arguments.
assert len(args) == 0
self.env = env
# Default to None for event_list and filter.
self.event_list = kwargs.get('event_list')
self.filter = kwargs.get('filter')
if self.event_list is not None:
# If event_list is provided, extract parameters from there
# This isn't really clean and ultimately I think this
# class should serve a single purpose (either take an
# event_list or autocreate events).
self.count = kwargs.get('count', len(self.event_list))
self.start = kwargs.get('start', self.event_list[0].dt)
self.end = kwargs.get('end', self.event_list[-1].dt)
self.delta = kwargs.get(
'delta',
self.event_list[1].dt - self.event_list[0].dt)
self.concurrent = kwargs.get('concurrent', False)
self.identifiers = kwargs.get(
'sids',
set(event.sid for event in self.event_list)
)
assets_by_identifier = {}
for identifier in self.identifiers:
assets_by_identifier[identifier] = env.asset_finder.\
lookup_generic(identifier, datetime.now())[0]
self.sids = [asset.sid for asset in assets_by_identifier.values()]
for event in self.event_list:
event.sid = assets_by_identifier[event.sid].sid
else:
# Unpack config dictionary with default values.
self.count = kwargs.get('count', 500)
self.start = kwargs.get(
'start',
datetime(2008, 6, 6, 15, tzinfo=pytz.utc))
self.end = kwargs.get(
'end',
datetime(2008, 6, 6, 15, tzinfo=pytz.utc))
self.delta = kwargs.get(
'delta',
timedelta(minutes=1))
self.concurrent = kwargs.get('concurrent', False)
self.identifiers = kwargs.get('sids', [1, 2])
assets_by_identifier = {}
for identifier in self.identifiers:
assets_by_identifier[identifier] = env.asset_finder.\
lookup_generic(identifier, datetime.now())[0]
self.sids = [asset.sid for asset in assets_by_identifier.values()]
# Hash_value for downstream sorting.
self.arg_string = hash_args(*args, **kwargs)
self.generator = self.create_fresh_generator()
def __iter__(self):
return self
def next(self):
return self.generator.next()
def __next__(self):
return next(self.generator)
def rewind(self):
self.generator = self.create_fresh_generator()
def get_hash(self):
return self.__class__.__name__ + "-" + self.arg_string
def uate_source_id(self, gen):
for event in gen:
event.source_id = self.get_hash()
yield event
def create_fresh_generator(self):
if self.event_list:
event_gen = (event for event in self.event_list)
unfiltered = self.update_source_id(event_gen)
# Set up iterators for each expected field.
else:
if self.concurrent:
# in this context the count is the number of
# trades per sid, not the total.
date_generator = date_gen(
start=self.start,
end=self.end,
delta=self.delta,
repeats=len(self.sids),
env=self.env,
)
else:
date_generator = date_gen(
start=self.start,
end=self.end,
delta=self.delta,
env=self.env,
)
source_id = self.get_hash()
unfiltered = (
create_trade(
sid=sid,
price=float(i % 10) + 1.0,
amount=(i * 50) % 900 + 100,
datetime=date,
source_id=source_id,
) for (i, date), sid in itertools.product(
enumerate(date_generator), self.sids
)
)
# If we specified a sid filter, filter out elements that don't
# match the filter.
if self.filter:
filtered = filter(
lambda event: event.sid in self.filter, unfiltered)
# Otherwise just use all events.
else:
filtered = unfiltered
# Return the filtered event stream.
return filtered
|
jimgoo/zipline-fork
|
zipline/sources/test_source.py
|
Python
|
apache-2.0
| 7,702
|
import logging
import pathlib
import time
import discord
from discord.ext import commands, tasks
import Data
from NossiInterface.Tools import discordname
logger = logging.getLogger(__name__)
class NossiCog(commands.Cog, name="NossiBot"):
def __init__(self, client):
self.client: discord.client = client
self.storage = Data.read("NossiBot.storage") or dict()
self.storage_age = time.time()
self.shutdownflag = pathlib.Path("~/shutdown_nossibot")
self.tasks.start()
@property
def allowed_channels(self):
return self.storage.get("allowed_rooms", [])
def persist(self):
Data.write("NossiBot.storage", self.storage)
@tasks.loop(seconds=5)
async def tasks(self):
for c in self.client.voice_clients:
if not c.is_playing():
await c.disconnect()
if self.shutdownflag.exists():
self.shutdownflag.unlink()
await self.client.owner.send("I got Killed")
self.client.close()
if time.time() - self.storage_age > 30:
Data.read("NossiBot.storage") or dict()
@commands.group(name="NossiBot")
async def nossi(self, ctx: commands.Context):
pass
@commands.is_owner()
@nossi.command("DIE")
async def die(self, ctx):
await ctx.message.add_reaction("\U0001f480")
await ctx.send("I shall die.")
await self.client.close()
@commands.is_owner()
@nossi.command("JOIN")
async def joinme(self, ctx):
vc = ctx.author.voice.channel
connection: discord.VoiceClient = await vc.connect()
logger.info(f"Voice Connection: { connection.is_connected()}")
connection.play(
# discord.FFmpegOpusAudio("default", before_options="-f pulse"),
# contents of pacatffmpeg
# #!/bin/bash
# pacat -r -d alsa_output.pci-0000_00_1b.0.analog-stereo.monitor \
# --format=s32le --rate=48000 > ~/soundpipe
discord.FFmpegPCMAudio(
pathlib.Path("~/soundpipe").expanduser(),
before_options="-f s32le -ac 2 -ar 48000",
),
after=lambda e: logger.info(
"disconnected with " + (f"{e}" if e else "no errors.")
),
)
@nossi.command("SYNC")
async def sync(self, ctx):
vc = ctx.author.voice.channel
for c in self.client.voice_clients:
if vc == c.channel:
connection = c
break
else:
return
r = discord.FFmpegPCMAudio(
pathlib.Path("~/soundpipe").expanduser(),
before_options="-f s32le -ac 2 -ar 48000",
)
connection.stop()
connection.play(
r,
after=lambda e: logger.info(
"disconnected with " + (f"{e}" if e else "no errors.")
),
)
connection.resume()
@commands.is_owner()
@nossi.command("LEAVE")
async def leave(self, ctx):
vc = ctx.author.voice.channel
for c in self.client.voice_clients:
if vc == c.channel:
connection = c
break
else:
return
await connection.disconnect()
await ctx.message.add_reaction("🔇")
@nossi.command("i")
async def iam(self, ctx, am: str, *msg):
if not am == "am":
raise commands.CommandNotFound(f'Command "i {am}" is not found')
if self.storage.get(discordname(ctx.message.author), None) is None:
self.storage[discordname(ctx.message.author)] = {"defines": {}}
self.storage[discordname(ctx.message.author)]["NossiAccount"] = (
" ".join(msg).strip().upper()
)
self.storage[discordname(ctx.message.author)]["DiscordAccount"] = discordname(
ctx.message.author
)
await ctx.message.add_reaction("\N{THUMBS UP SIGN}")
self.persist()
await self.whoami(ctx, "am", "i")
@nossi.command("who")
async def whoami(self, ctx, am, i):
if not ((am == "am") and (i == "i")):
raise commands.CommandNotFound(f'Command "who {am} {i}" is not found')
try:
await ctx.send(
"You are "
+ self.storage[discordname(ctx.message.author)]["NossiAccount"]
)
except KeyError:
await ctx.send("I have no recollection of you.")
@commands.guild_only()
@nossi.command("BANISH")
async def banish(self, ctx):
self.storage["allowed_rooms"].remove(ctx.message.channel.id)
self.persist()
await ctx.send("I will no longer listen here.")
@commands.guild_only()
@nossi.command("INVOKE")
async def invoke(self, ctx):
self.storage["allowed_rooms"] = {ctx.message.channel.id} | self.storage.get(
"allowed_rooms", set()
)
self.persist()
await ctx.send(
"I have been invoked and shall do my duties here until BANISHed."
)
def setup(client: commands.Bot):
client.add_cog(NossiCog(client))
|
x4dr/NossiNet
|
NossiInterface/Cogs/NossiCog.py
|
Python
|
gpl-2.0
| 5,131
|
# encoding: utf-8
"""
cache.py
Created by David Farrar on 2012-12-27.
Copyright (c) 2009-2013 Exa Networks. All rights reserved.
"""
import time
class Cache (dict):
def __init__ (self, min_items=10, max_items=2000, cache_life=3600):
dict.__init__(self)
self.ordered = []
self.min_items = min_items
self.max_items = max_items
self.cache_life = cache_life
self.last_accessed = int(time.time())
def cache (self, key, value):
now = int(time.time())
if now - self.last_accessed >= self.cache_life:
self.truncate(self.min_items)
elif len(self) >= self.max_items:
self.truncate(self.max_items/2)
if key not in self:
self.ordered.append(key)
self.last_accessed = now
self[key] = value
return value
def retrieve (self, key):
now = int(time.time())
res = self[key]
if now - self.last_accessed >= self.cache_life:
self.truncate(self.min_items)
# only update the access time if we modified the cache
self.last_accessed = now
return res
def truncate (self, pos):
pos = len(self.ordered) - pos
expiring = self.ordered[:pos]
self.ordered = self.ordered[pos:]
for _key in expiring:
self.pop(_key)
if __name__ == '__main__':
class klass1:
def __init__ (self, data):
pass
class klass2 (object):
def __init__ (self, data):
pass
class klass3:
def __init__ (self, data):
self.a = data[0]
self.b = data[1]
self.c = data[2]
self.d = data[3]
self.e = data[4]
class klass4:
def __init__ (self, data):
self.a = data[0]
self.b = data[1]
self.c = data[2]
self.d = data[3]
self.e = data[4]
class _kparent1:
def __init__ (self, data):
self.a = data[0]
self.b = data[1]
class _kparent2 (object):
def __init__ (self, data):
self.a = data[0]
self.b = data[1]
class klass5 (_kparent1):
def __init__ (self, data):
_kparent1.__init__(self,data)
self.c = data[2]
self.d = data[3]
self.e = data[4]
class klass6 (_kparent2):
def __init__ (self, data):
_kparent2.__init__(self,data)
self.c = data[2]
self.d = data[3]
self.e = data[4]
class klass7 (klass6):
pass
class klass8 (klass6):
def __init__ (self, data):
klass6.__init__(self,data)
self.s = self.a + self.b + self.c + self.d + self.e
class klass9 (klass6):
def __init__ (self, data):
klass6.__init__(self,data)
self.s1 = self.a + self.b + self.c + self.d + self.e
self.s2 = self.b + self.c + self.d + self.e
self.s3 = self.c + self.d + self.e
self.s4 = self.d + self.e
self.s5 = self.a + self.b + self.c + self.d
self.s6 = self.a + self.b + self.c
self.s7 = self.a + self.b
COUNT = 100000
UNIQUE = 5000
samples = set()
chars = 'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789:"|;<>?,./[]{}-=_+!@£$%^&*()'
from random import choice
while len(samples) != UNIQUE:
samples.add(choice(chars)+choice(chars)+choice(chars)+choice(chars)+choice(chars))
samples = list(samples)
for klass in [klass1,klass2,klass3,klass4,klass5,klass6,klass7,klass8,klass9]:
cache = {}
start = time.time()
for val in xrange(COUNT):
val = val % UNIQUE
_ = klass(samples[val])
end = time.time()
time1 = end-start
print COUNT,'iterations of',klass.__name__,'with',UNIQUE,'uniques classes'
print "time instance %d" % time1
cache = Cache()
start = time.time()
for val in xrange(COUNT):
val = val % UNIQUE
if val in cache:
_ = cache.retrieve(val)
else:
_ = cache.cache(val, klass(samples[val]))
end = time.time()
time2 = end-start
print "time cached %d" % time2
print "speedup %.3f" % (time1/time2)
print
|
jbfavre/exabgp
|
lib/exabgp/util/cache.py
|
Python
|
bsd-3-clause
| 3,608
|
"""Created By: Andrew Ryan DeFilippis"""
print('Lambda cold-start...')
import random
import re
import string
import boto3
import os
from botocore.config import Config
from botocore.exceptions import ClientError
from json import dumps, loads
# Disable 'testing_locally' when deploying to AWS Lambda.
testing_locally = True
verbose = True
debug = False
# Set the Lambda Function environment variable named "ddbTable" with
# the table name or statically set the DDB Table name below.
DDB_TABLE = ""
# Define length of unique IDs.
ID_LENGTH = 7
# Debug level logging.
if debug:
boto3.set_stream_logger(name='botocore')
verbose = True
# Initiate boto3 DynamoDB client.
ddb_conf = Config(connect_timeout=0.5, read_timeout=1)
ddbc = boto3.client('dynamodb', config=ddb_conf)
class CWLogs(object):
"""Define the structure of log events to match all other CloudWatch Log Events logged by AWS Lambda.
"""
def __init__(self, context):
"""Define the instance of the context object.
:param context: The Lambda context object.
"""
self.context = context
def event(self, message, event_prefix='LOG'):
# type: (any, str) -> None
"""Print an event into the CloudWatch Logs stream for the Function's invocation.
:param message: The information to be logged (required).
:param event_prefix: The prefix that appears before the 'RequestId' (default 'LOG').
:return:
"""
print('{} RequestId: {}\t{}'.format(
event_prefix,
self.context.aws_request_id,
message
))
return None
class APIGWProxy(object):
"""Define the Lambda Proxy interaction with AWS API Gateway.
"""
def __init__(self, log):
"""Define the instance of the log object.
"param log: CloudWatch Logs context object.
"""
self.log = log
def response(self, status_code, body_is_base64_encoded=False, headers=None, body=None):
# type: (int, bool, dict, str) -> dict
"""Return an API Gateway Lambda Proxy response object.
:param status_code: The response status code.
:param body_is_base64_encoded: Is the body a base64 encoded string?
:param headers: The response headers.
:param body: The response body.
:return: An API Gateway Lambda Proxy response object.
"""
response_object = {'statusCode': int(status_code)}
if headers is not None:
response_object['headers'] = dict(headers)
if body is not None:
response_object['body'] = str(body)
response_object['isBase64Encoded'] = bool(body_is_base64_encoded)
# Log the API Gateway Lambda Proxy response object.
if verbose:
self.log.event('Response: {}'.format(dumps(response_object)))
return response_object
def status_301(self, location):
# type: (str) -> dict
"""Return a response with a 301 status code.
:param location: URL to be redirected to.
:return: An API Gateway Lambda Proxy response object.
"""
return self.response(
status_code=301,
headers={
'Content-Type': 'text/html',
'Location': location
}
)
def status_400(self):
# type: () -> dict
"""Return a response with a 400 status code.
:return: An API Gateway Lambda Proxy response object.
"""
return self.response(
status_code=400,
headers={
'Content-Type': 'text/html'
},
body='<html><head><title>400 - Bad Request</head><body><center><h1>400 - Bad Request</h1></center></body></html>'
)
def status_404(self):
# type: () -> dict
"""Return a response with a 404 status code.
:return: An API Gateway Lambda Proxy response object.
"""
return self.response(
status_code=404,
headers={
'Content-Type': 'text/html'
},
body='<html><head><title>404 - Page Not Found</head><body><center><h1>404 - Page Not Found</h1></center></body></html>'
)
def status_405(self):
# type: () -> dict
"""Return a response with a 405 status code.
:return: An API Gateway Lambda Proxy response object.
"""
return self.response(
status_code=405,
headers={
'Content-Type': 'text/html'
},
body='<html><head><title>405 - Method Not Allowed</head><body><center><h1>405 - Method Not Allowed</h1></center></body></html>'
)
def status_500(self):
# type: () -> dict
"""Return a response with a 500 status code.
:return: An API Gateway Lambda Proxy response object.
"""
return self.response(
status_code=500,
headers={
'Content-Type': 'text/html'
},
body='<html><head><title>500 - Internal Server Error</head><body><center><h1>500 - Internal Server Error</h1></center></body></html>'
)
class DynamoDBLogic(object):
"""Define the get_item and put_item request structure for shortened IDs.
"""
def __init__(self, event, log, id_length):
"""Define the instance of the log object and id_length value.
:param event: Ingested JSON event object provided at invocation.
:param log: CloudWatch Logs context object.
:param id_length: Length of the resource IDs being generated.
"""
self.event = event
self.log = log
self.id_length = id_length
def get_url(self):
# type: (str) -> str
"""Retrieve a stored URL from DynamoDB based on the resource ID.
:return: URL stored in DynamoDB associated with the specified resource ID.
"""
try:
path = self.event['resource']
url_id = path.split("/")[1]
try:
# The ID must match the expected length.
if len(url_id) != ID_LENGTH:
raise ValueError('Invalid ID Length')
# Only "/a1b2c3d" and "/a1b2c3d/" are valid paths.
elif len(path.split("/")[2]) > 0:
raise ValueError('Invalid Path')
except IndexError:
# The path is a match to what we expect to receive.
pass
# Return the URL stored in DynamoDB.
ddb_response = ddbc.get_item(
TableName=os.getenv('ddbTable', DDB_TABLE),
Key={
'ID': {'S': url_id}
}
)
# Log the DynamoDB response object.
if verbose:
self.log.event('DDB: {}'.format(ddb_response))
try:
url = ddb_response['Item']['Endpoint']['S']
except KeyError:
raise KeyError('Item does not exist')
return url
except Exception:
raise
def set_url(self, url):
# type: (int, str) -> str
"""Store a URL and unique ID in DynamoDB.
:param url: URL to be stored in DynamoDB.
:return: Unique resource ID associated with the URL.
"""
try:
url_id = id_gen(self.id_length)
location = url
# Store the URL and unique resource ID in DynamoDB if the resource ID does not already exist.
ddb_response = ddbc.put_item(
TableName=os.getenv('ddbTable', DDB_TABLE),
Item={
'ID': {'S': url_id},
'Endpoint': {'S': location}
},
ConditionExpression='attribute_not_exists(ID)'
)
# Log the DynamoDB response object.
if verbose:
self.log.event('DDB: {}'.format(ddb_response))
return url_id
except ClientError as e:
# If an item with the same unique resource ID already exists, then generate a new one.
if e.response['Error']['Code'] == 'ConditionalCheckFailedException':
self.set_url(url)
else:
raise
except Exception:
raise
def id_gen(id_length):
# type: (int) -> str
"""Generate a unique alpha-numeric ID of a specified length.
:param id_length: Length of unique string.
:return: Unique string.
"""
selection = string.ascii_letters + string.digits
return ''.join(random.choice(selection) for _ in range(id_length))
def lambda_handler(event, context):
"""AWS Lambda executes the 'lambda_handler' function on invocation.
:param event: Ingested JSON event object provided at invocation.
:param context: Lambda context object, containing information specific to the invocation and Function.
:return: Final response to AWS Lambda, and passed to the invoker if the invocation type is RequestResponse.
"""
# Instantiate our CloudWatch logging class.
log = CWLogs(context)
# Instantiate our API Gateway Lambda Proxy class.
apigw = APIGWProxy(log)
# Instantiate our DynamoDB URL logic class.
dynamodb = DynamoDBLogic(event, log, ID_LENGTH)
# Log the event object provided to the Lambda Function at invocation.
if verbose:
log.event('Event: {}'.format(dumps(event)))
# Request processing logic.
try:
if event['httpMethod'] == 'GET':
try:
# Request the URL from DynamoDB.
location = dynamodb.get_url()
except ValueError as e:
log.event('Error: {}'.format(e))
return apigw.status_404()
except KeyError as e:
log.event('Error: {}'.format(e))
return apigw.status_404()
except Exception:
raise
# Return "301: Permanent Redirect" with the location stored in DynamoDB.
return apigw.status_301(location)
elif event['httpMethod'] == 'POST':
try:
url = event['headers']['URL']
# Check for a valid URL.
if not re.match('^[a-z0-9]+://.*', url):
raise KeyError('Missing protocol prefix in URL')
# Store the URL in DynamoDB.
url_id = dynamodb.set_url(url=url)
except KeyError as e:
log.event('Error: {}'.format(e))
return apigw.status_400()
except Exception:
raise
# Return "200: Ok" with the shortened URL.
return apigw.response(
status_code=200,
headers={'Content-Type': 'application/json'},
body=dumps({'UrlId': url_id})
)
else:
# Return "405: Method Not Allowed".
return apigw.status_405()
except Exception as e:
log.event('Error: {}'.format(e))
# Return "500: Internal Server Error".
return apigw.status_500()
def local_test():
"""Testing on a local development machine (outside of AWS Lambda) is made possible by...
"""
import context
with open('event.json', 'r') as f:
event = loads(f.read())
print('\nFunction Log:\n')
lambda_handler(event, context)
if testing_locally:
local_test()
|
andrewdefilippis/aws-lambda
|
Functions/Python/url_shortening_service/lambda_function.py
|
Python
|
apache-2.0
| 11,567
|
#-*- coding: utf-8 -*-
__author__ = 'rdk'
from .importer import import_to_db
|
renaud-dk/invoice_generator
|
app/utils/__init__.py
|
Python
|
gpl-3.0
| 77
|
import _plotly_utils.basevalidators
class IdsValidator(_plotly_utils.basevalidators.DataArrayValidator):
def __init__(self, plotly_name="ids", parent_name="contour", **kwargs):
super(IdsValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop("edit_type", "calc"),
role=kwargs.pop("role", "data"),
**kwargs
)
|
plotly/python-api
|
packages/python/plotly/plotly/validators/contour/_ids.py
|
Python
|
mit
| 432
|
from datetime import date
from unittest import TestCase
import warnings
from . import GenericCalendarTest
from ..europe import (
Scotland, Aberdeen, Angus, Arbroath, Ayr, CarnoustieMonifieth, Clydebank,
DumfriesGalloway, Dundee, EastDunbartonshire, Edinburgh, Elgin, Falkirk,
Fife, Galashiels, Glasgow, Hawick, Inverclyde, Inverness, Kilmarnock,
Lanark, Linlithgow, Lochaber, NorthLanarkshire, Paisley, Perth,
ScottishBorders, SouthLanarkshire, Stirling, WestDunbartonshire,
)
class GoodFridayTestMixin:
def test_good_friday(self):
holidays = self.cal.holidays_set(2018)
self.assertIn(date(2018, 3, 30), holidays)
class EasterMondayTestMixin:
def test_easter_monday(self):
holidays = self.cal.holidays_set(2018)
self.assertIn(date(2018, 4, 2), holidays)
class SpringHolidayFirstMondayAprilTestMixin:
def test_spring_holiday(self):
holidays = self.cal.holidays_set(2018)
self.assertIn(date(2018, 4, 2), holidays)
class SpringHolidaySecondMondayAprilTestMixin:
def test_spring_holiday(self):
holidays = self.cal.holidays_set(2018)
self.assertIn(date(2018, 4, 9), holidays)
class SpringHolidayLastMondayMayTestMixin:
def test_spring_holiday(self):
holidays = self.cal.holidays_set(2018)
self.assertIn(date(2018, 5, 28), holidays)
class FairHolidayLastMondayJuneTestMixin:
def test_fair_holiday(self):
holidays = self.cal.holidays_set(2018)
self.assertIn(date(2018, 6, 25), holidays)
class FairHolidayFirstMondayJulyTestMixin:
def test_fair_holiday(self):
holidays = self.cal.holidays_set(2018)
self.assertIn(date(2018, 7, 2), holidays)
class FairHolidaySecondMondayJulyTestMixin:
def test_fair_holiday(self):
holidays = self.cal.holidays_set(2018)
self.assertIn(date(2018, 7, 9), holidays)
class FairHolidayThirdMondayJulyTestMixin:
def test_fair_holiday(self):
holidays = self.cal.holidays_set(2018)
self.assertIn(date(2018, 7, 16), holidays)
class FairHolidayLastMondayJulyTestMixin:
def test_fair_holiday(self):
holidays = self.cal.holidays_set(2018)
self.assertIn(date(2018, 7, 30), holidays)
class FairHolidayFourthFridayJulyTestMixin:
def test_fair_holiday(self):
holidays = self.cal.holidays_set(2018)
self.assertIn(date(2018, 7, 27), holidays)
class FairHolidayFirstMondayAugustTestMixin:
def test_fair_holiday(self):
holidays = self.cal.holidays_set(2018)
self.assertIn(date(2018, 8, 6), holidays)
class LateSummerTestMixin:
def test_late_summer(self):
# First monday of september
holidays = self.cal.holidays_set(2018)
self.assertIn(date(2018, 9, 3), holidays)
class BattleStirlingBridgeTestMixin:
def test_stirling(self):
# Second monday of september
holidays = self.cal.holidays_set(2018)
self.assertIn(date(2018, 9, 10), holidays)
class AutumnHolidayLastMondaySeptemberTestMixin:
def test_autumn_holiday(self):
holidays = self.cal.holidays_set(2018)
self.assertIn(date(2018, 9, 24), holidays)
class AutumnHolidayFirstMondayOctoberTestMixin:
def test_autumn_holiday(self):
holidays = self.cal.holidays_set(2018)
self.assertIn(date(2018, 10, 1), holidays)
class AutumnHolidaySecondMondayOctoberTestMixin:
def test_autumn_holiday(self):
holidays = self.cal.holidays_set(2018)
self.assertIn(date(2018, 10, 8), holidays)
class AutumnHolidayThirdMondayOctoberTestMixin:
def test_autumn_holiday(self):
holidays = self.cal.holidays_set(2018)
self.assertIn(date(2018, 10, 15), holidays)
class SaintAndrewTestMixin:
def test_saint_andrew(self):
# St. Andrew's day happens on November 30th
holidays = self.cal.holidays_set(2018)
self.assertIn(date(2018, 11, 30), holidays)
class VictoriaDayLastMondayMayTestMixin:
def test_victoria_day(self):
holidays = self.cal.holidays_set(2018)
self.assertIn(date(2018, 5, 28), holidays)
class SpringHolidayTuesdayAfterFirstMondayMayTestMixin:
def test_spring_holiday_2018(self):
holidays = self.cal.holidays_set(2018)
self.assertIn(date(2018, 5, 8), holidays)
def test_spring_holiday_2017(self):
holidays = self.cal.holidays_set(2017)
self.assertIn(date(2017, 5, 2), holidays)
class VictoriaDayFirstMondayJuneTestMixin:
def test_victoria_day(self):
holidays = self.cal.holidays_set(2018)
self.assertIn(date(2018, 6, 4), holidays)
class VictoriaDayFourthMondayMayTestMixin:
def test_victoria_day(self):
holidays = self.cal.holidays_set(2018)
self.assertIn(date(2018, 5, 28), holidays)
class AyrGoldCupTestMixin:
"""
Ayr Gold cup - two holidays for Ayr and Kilmarnock
"""
def test_ayr_gold_cup(self):
# Specific holidays in Ayr:
# * 3rd Friday in September
# * + the following Monday
holidays = self.cal.holidays_set(2018)
gold_cup_friday = date(2018, 9, 21)
gold_cup_monday = date(2018, 9, 24)
self.assertIn(gold_cup_friday, holidays)
self.assertIn(gold_cup_monday, holidays)
# Testing labels
holidays = self.cal.holidays(2018)
holidays_dict = dict(holidays)
self.assertEqual(holidays_dict[gold_cup_friday], "Ayr Gold Cup Friday")
self.assertEqual(holidays_dict[gold_cup_monday], "Ayr Gold Cup Monday")
# -----------------------------------------------------------------------------
class SpringHolidayTestCase(TestCase):
def test_not_implemented_error(self):
class FakeCalendar(Scotland):
include_spring_holiday = True
cal = FakeCalendar()
with self.assertRaises(NotImplementedError):
cal.holidays_set(2018)
def test_correct_implementation(self):
class FakeCalendar(Scotland):
include_spring_holiday = True
def get_spring_holiday(self, year):
return date(year, 1, 1), "Spring Holiday"
cal = FakeCalendar()
self.assertTrue(cal.holidays_set(2018))
class VictoriaDayTestCase(TestCase):
def test_not_implemented_error(self):
class FakeCalendar(Scotland):
include_victoria_day = True
cal = FakeCalendar()
with self.assertRaises(NotImplementedError):
cal.holidays_set(2018)
def test_correct_implementation(self):
class FakeCalendar(Scotland):
include_victoria_day = True
def get_victoria_day(self, year):
return date(year, 1, 1), "Victoria Day"
cal = FakeCalendar()
self.assertTrue(cal.holidays_set(2018))
class FairHolidayTestCase(TestCase):
def test_not_implemented_error(self):
class FakeCalendar(Scotland):
include_fair_holiday = True
cal = FakeCalendar()
with self.assertRaises(NotImplementedError):
cal.holidays_set(2018)
def test_correct_implementation(self):
class FakeCalendar(Scotland):
include_fair_holiday = True
def get_fair_holiday(self, year):
return date(year, 1, 1), "Fair Holiday"
cal = FakeCalendar()
self.assertTrue(cal.holidays_set(2018))
class AutumnHolidayTestCase(TestCase):
def test_not_implemented_error(self):
class FakeCalendar(Scotland):
include_autumn_holiday = True
cal = FakeCalendar()
with self.assertRaises(NotImplementedError):
cal.holidays_set(2018)
def test_correct_implementation(self):
class FakeCalendar(Scotland):
include_autumn_holiday = True
def get_autumn_holiday(self, year):
return date(year, 1, 1), "Autumn Holiday"
cal = FakeCalendar()
self.assertTrue(cal.holidays_set(2018))
class ScotlandTest(GenericCalendarTest):
"""
Generic Scotland test calendar.
Scotland calendar includes the generic holidays + GoodFriday
Some towns or cities don't necessarily observe it.
"""
cal_class = Scotland
def test_init_warning(self):
warnings.simplefilter("always")
with warnings.catch_warnings(record=True) as w:
# Cause all warnings to always be triggered.
# Trigger a warning.
self.cal_class()
# Verify some things
assert len(w) == 1
assert issubclass(w[-1].category, UserWarning)
assert "experimental" in str(w[-1].message)
# Back to normal filtering
warnings.simplefilter("ignore")
def test_year_2018(self):
holidays = self.cal.holidays_set(2018)
self.assertIn(date(2018, 1, 1), holidays) # New year's day
self.assertIn(date(2018, 1, 2), holidays) # New year holiday
self.assertIn(date(2018, 5, 7), holidays) # May day
self.assertIn(date(2018, 12, 25), holidays) # XMas
self.assertIn(date(2018, 12, 26), holidays) # Boxing day
def test_good_friday(self):
# By default, Good Friday is not a holiday
holidays = self.cal.holidays_set(2018)
self.assertNotIn(date(2018, 3, 30), holidays)
class ScotlandAberdeenTest(
GoodFridayTestMixin,
FairHolidaySecondMondayJulyTestMixin,
AutumnHolidayLastMondaySeptemberTestMixin,
ScotlandTest):
cal_class = Aberdeen
class ScotlandAngusTest(
SpringHolidaySecondMondayAprilTestMixin,
AutumnHolidayLastMondaySeptemberTestMixin,
SaintAndrewTestMixin,
ScotlandTest):
cal_class = Angus
class ScotlandArbroathTest(
FairHolidayThirdMondayJulyTestMixin, ScotlandTest):
cal_class = Arbroath
class ScotlandAyrTest(
GoodFridayTestMixin,
EasterMondayTestMixin,
SpringHolidayLastMondayMayTestMixin,
AyrGoldCupTestMixin,
ScotlandTest):
cal_class = Ayr
class ScotlandCarnoustieMonifiethTest(
SpringHolidayFirstMondayAprilTestMixin,
AutumnHolidayFirstMondayOctoberTestMixin,
ScotlandTest):
cal_class = CarnoustieMonifieth
class ScotlandClydebankTest(
SpringHolidayTuesdayAfterFirstMondayMayTestMixin,
ScotlandTest):
cal_class = Clydebank
class ScotlandDumfriesGallowayTest(GoodFridayTestMixin, ScotlandTest):
cal_class = DumfriesGalloway
class ScotlandDundeeTest(
SpringHolidayFirstMondayAprilTestMixin,
VictoriaDayLastMondayMayTestMixin,
FairHolidayLastMondayJulyTestMixin,
AutumnHolidayFirstMondayOctoberTestMixin,
ScotlandTest):
cal_class = Dundee
class ScotlandEastDunbartonshireTest(
GoodFridayTestMixin, EasterMondayTestMixin,
SpringHolidayLastMondayMayTestMixin,
FairHolidayThirdMondayJulyTestMixin,
AutumnHolidayLastMondaySeptemberTestMixin,
ScotlandTest):
cal_class = EastDunbartonshire
class ScotlandEdinburghTest(
GoodFridayTestMixin, EasterMondayTestMixin,
ScotlandTest):
cal_class = Edinburgh
def test_edinburgh_spring_holiday(self):
# Stated as the 3rd Monday in April...
holidays = self.cal.holidays_set(2018)
spring_holiday = date(2018, 4, 16)
self.assertIn(spring_holiday, holidays)
# ... except if it falls on Easter Monday
# Then it's shifted to the previous week.
# That was the case in 2017
holidays = self.cal.holidays(2017)
holidays_dict = dict(holidays)
easter_monday = date(2017, 4, 17)
spring_holiday = date(2017, 4, 10)
self.assertIn(easter_monday, holidays_dict)
self.assertIn(spring_holiday, holidays_dict)
self.assertEqual(holidays_dict[easter_monday], "Easter Monday")
self.assertEqual(holidays_dict[spring_holiday], "Spring Holiday")
def test_edinburgh_victoria_day(self):
# The Monday strictly before May 24th
holidays = self.cal.holidays_set(2018)
victoria_day = date(2018, 5, 21)
self.assertIn(victoria_day, holidays)
# In 2010, May 24th was a monday, so Victoria Day is on 17th.
holidays = self.cal.holidays_set(2010)
victoria_day = date(2010, 5, 17)
self.assertIn(victoria_day, holidays)
def test_edinbirgh_autumn_holiday(self):
# Third Monday in September
holidays = self.cal.holidays_set(2018)
autumn_holiday = date(2018, 9, 17)
self.assertIn(autumn_holiday, holidays)
class ScotlandElginTest(
SpringHolidaySecondMondayAprilTestMixin,
FairHolidayLastMondayJuneTestMixin,
LateSummerTestMixin,
AutumnHolidayThirdMondayOctoberTestMixin,
ScotlandTest):
cal_class = Elgin
class ScotlandFalkirkTest(
GoodFridayTestMixin,
EasterMondayTestMixin,
FairHolidayFirstMondayJulyTestMixin,
BattleStirlingBridgeTestMixin,
ScotlandTest):
cal_class = Falkirk
class ScotlandFifeTest(
VictoriaDayFirstMondayJuneTestMixin,
FairHolidayThirdMondayJulyTestMixin,
AutumnHolidayThirdMondayOctoberTestMixin,
SaintAndrewTestMixin,
ScotlandTest):
cal_class = Fife
def test_spring_holiday(self):
# Special computation rule, Fife has TWO spring holidays
holidays = self.cal.holidays_set(2018)
# First MON in April
self.assertIn(date(2018, 4, 2), holidays)
# First MON in June
self.assertIn(date(2018, 6, 4), holidays)
class ScotlandGalashiels(VictoriaDayFirstMondayJuneTestMixin, ScotlandTest):
cal_class = Galashiels
def test_braw_lads_gathering(self):
# First friday in July
holidays = self.cal.holidays_set(2018)
braw_lads_gathering = date(2018, 7, 6)
self.assertIn(braw_lads_gathering, holidays)
class ScotlandGlasgowTest(
EasterMondayTestMixin,
SpringHolidayLastMondayMayTestMixin,
FairHolidayThirdMondayJulyTestMixin,
AutumnHolidayLastMondaySeptemberTestMixin,
ScotlandTest):
cal_class = Glasgow
class ScotlandHawickTest(ScotlandTest):
cal_class = Hawick
def test_common_riding(self):
# Friday after first monday in june & saturday
holidays = self.cal.holidays_set(2018)
common_riding_day1 = date(2018, 6, 8)
common_riding_day2 = date(2018, 6, 9)
self.assertIn(common_riding_day1, holidays)
self.assertIn(common_riding_day2, holidays)
# https://www.inverclyde.gov.uk/council-and-government/council-public-holidays
# These documents say that Spring Holidays happened:
# * on April 11th 2016 (second monday of April)
# * on April 24th 2017 (last monday April)
# * on April 16th 2018 (3rd monday April)
# * on April 29th 2019 (last monday April)
# ...
# I think I'm becoming crazy
class ScotlandInverclydeTest(
GoodFridayTestMixin, EasterMondayTestMixin,
LateSummerTestMixin,
ScotlandTest):
cal_class = Inverclyde
def test_spring_holiday(self):
# Special computation rule, Fife has TWO spring holidays
holidays = self.cal.holidays_set(2018)
# Last MON in April
self.assertIn(date(2018, 4, 30), holidays)
# First MON in June
self.assertIn(date(2018, 6, 4), holidays)
class ScotlandInvernessTest(
SpringHolidayFirstMondayAprilTestMixin,
FairHolidayFirstMondayJulyTestMixin,
AutumnHolidayFirstMondayOctoberTestMixin,
ScotlandTest):
cal_class = Inverness
def test_winter_february(self):
# First MON of February
holidays = self.cal.holidays_set(2018)
winter_february = date(2018, 2, 5)
self.assertIn(winter_february, holidays)
def test_winter_march(self):
# First MON of March
holidays = self.cal.holidays_set(2018)
winter_march = date(2018, 3, 5)
self.assertIn(winter_march, holidays)
def test_samhain_holiday(self):
# First MON of November
holidays = self.cal.holidays_set(2018)
samhain_holiday = date(2018, 11, 5)
self.assertIn(samhain_holiday, holidays)
class ScotlandKilmarnockTest(
GoodFridayTestMixin, EasterMondayTestMixin,
AyrGoldCupTestMixin,
ScotlandTest):
cal_class = Kilmarnock
class ScotlandLanarkTest(ScotlandTest):
cal_class = Lanark
def test_lanimer_day(self):
# Second THU in June
holidays = self.cal.holidays_set(2018)
lanimer_day = date(2018, 6, 14)
self.assertIn(lanimer_day, holidays)
class ScotlandLinlithgowTest(ScotlandTest):
cal_class = Linlithgow
def test_linlithgow_marches(self):
# Linlithgow marches is on TUE after the 2nd THU in June
holidays = self.cal.holidays_set(2018)
linlithgow_marches = date(2018, 6, 19)
self.assertIn(linlithgow_marches, holidays)
class ScotlandLochaberTest(ScotlandTest):
cal_class = Lochaber
def test_winter_holiday(self):
# Winter holiday is on last MON in March.
holidays = self.cal.holidays_set(2018)
winter_holiday = date(2018, 3, 26)
self.assertIn(winter_holiday, holidays)
# Not the 4th, the *last*
holidays = self.cal.holidays_set(2015)
winter_holiday = date(2015, 3, 30)
self.assertIn(winter_holiday, holidays)
class ScotlandNorthLanarkshireTest(
EasterMondayTestMixin,
SpringHolidayLastMondayMayTestMixin,
FairHolidayThirdMondayJulyTestMixin,
AutumnHolidayLastMondaySeptemberTestMixin,
ScotlandTest):
cal_class = NorthLanarkshire
class ScotlandPaisleyTest(
GoodFridayTestMixin, EasterMondayTestMixin,
VictoriaDayLastMondayMayTestMixin,
FairHolidayFirstMondayAugustTestMixin,
AutumnHolidayLastMondaySeptemberTestMixin,
ScotlandTest):
cal_class = Paisley
class ScotlandPerthTest(
SpringHolidayFirstMondayAprilTestMixin,
VictoriaDayFourthMondayMayTestMixin,
BattleStirlingBridgeTestMixin,
AutumnHolidayFirstMondayOctoberTestMixin,
ScotlandTest):
cal_class = Perth
class ScotlandScottishBordersTest(
SpringHolidayFirstMondayAprilTestMixin,
FairHolidayFourthFridayJulyTestMixin,
AutumnHolidaySecondMondayOctoberTestMixin,
SaintAndrewTestMixin,
ScotlandTest):
cal_class = ScottishBorders
class ScotlandSouthLanarkshireTest(
GoodFridayTestMixin,
EasterMondayTestMixin,
SpringHolidayLastMondayMayTestMixin,
FairHolidayThirdMondayJulyTestMixin,
AutumnHolidayLastMondaySeptemberTestMixin,
ScotlandTest):
cal_class = SouthLanarkshire
class ScotlandStirlingTest(
GoodFridayTestMixin,
EasterMondayTestMixin,
SpringHolidayTuesdayAfterFirstMondayMayTestMixin,
BattleStirlingBridgeTestMixin,
ScotlandTest):
cal_class = Stirling
class ScotlandWestDunbartonshireTest(
GoodFridayTestMixin,
EasterMondayTestMixin,
AutumnHolidayLastMondaySeptemberTestMixin,
ScotlandTest):
cal_class = WestDunbartonshire
|
novapost/workalendar
|
workalendar/tests/test_scotland.py
|
Python
|
mit
| 19,206
|
import errno
import subprocess
import fnmatch
import os
import yaml
import os.path as path
import yapp
import logging
def makeDir(path):
"""
Make a dir but ignore if it exists.
"""
try:
os.mkdir(path)
except OSError as exception:
if exception.errno != errno.EEXIST:
raise
class YappProcessor:
def __init__(self,root_dir, verbosity=logging.ERROR):
self.root_dir = root_dir
self.verbosity = verbosity
self.log = logging.getLogger("Yapp")
self.log.setLevel(verbosity)
ch = logging.StreamHandler()
ch.setLevel(verbosity)
#formatter = logging.Formatter('%(levelname)s - %(message)s')
#ch.setFormatter(formatter)
self.log.addHandler(ch)
def process(self):
"""
Process the dir
"""
yapp_files = [path.join(root, file) for (root, dirs, files) in os.walk(self.root_dir) for file in files if file[-len(yapp.CONFIG_EXTENSION):] == yapp.CONFIG_EXTENSION]
if len(yapp_files) == 0:
self.log.warn("No config (*{ext}) files found".format(ext=yapp.CONFIG_EXTENSION))
for yapp_file in yapp_files:
if self.verbosity:
self.log.info("Processing {file}".format(file=yapp_file))
self.process_config(yapp_file)
def process_config(self, yapp_file):
"""
Process a single yapp file
"""
#noinspection PyBroadException
try:
with open(yapp_file,'r') as file:
config = yaml.load(file)
except:
self.log.exception("\tError loading config from {file}".format(file=yapp_file))
return
#Find all the input matching files
dir = path.dirname(yapp_file)
files = [path.join(dir,file) for file in os.listdir(dir) if fnmatch.fnmatch(file, config['input_file_pattern'])]
self.log.info("\tFound {num} files".format(num=len(files)))
outdir = yapp_file[:-len(yapp.CONFIG_EXTENSION)]
#Make the output dir
makeDir(outdir)
for file in files:
outfile = path.join(outdir, path.basename(file))
#Check if an old outfile exists and check if it needs to be updated
if path.exists(outfile) and path.getmtime(outfile) > path.getmtime(file):
self.log.info("\t\t{file} already has up-to-date output".format(file=file))
continue
tempfile = outfile+'.working'
errfile = outfile+'.err'
if path.exists(tempfile):
self.log.info("\t\t{file} is being worked on".format(file=file))
continue
self.log.info("\t\t{file} processing....".format(file=file))
with open(tempfile, 'w') as tempfile_f, open(errfile, 'w') as errfile_f:
command = config['command'].format(input_file=file)
ret_code = subprocess.Popen(command, bufsize=-1, stdout=tempfile_f, stderr=errfile_f, cwd=outdir, shell=True,).wait()
#Delete the errfile if empty
if ret_code == 0:
os.rename(tempfile, outfile)
if path.getsize(errfile) == 0:
os.remove(errfile)
self.log.info("\t\t{file} success".format(file=file))
else:
os.rename(tempfile, outfile+'.output_before_err')
self.log.warn("\t\t{file} failed with code: {code}".format(file=file, code=ret_code))
|
benjeffery/yapp
|
yapp/core.py
|
Python
|
mit
| 3,481
|
# -*- coding: utf-8 -*-
#
# Copyright (c) 2005,2006,2007,2008,2009 Brett Adams <brett@belizebotanic.org>
# Copyright (c) 2012-2015 Mario Frasca <mario@anche.no>
#
# This file is part of bauble.classic.
#
# bauble.classic is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# bauble.classic is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with bauble.classic. If not, see <http://www.gnu.org/licenses/>.
#
# meta.py
#
from sqlalchemy import Unicode, UnicodeText, Column
import bauble.db as db
import bauble.utils as utils
VERSION_KEY = u'version'
CREATED_KEY = u'created'
REGISTRY_KEY = u'registry'
# date format strings:
# yy - short year
# yyyy - long year
# dd - number day, always two digits
# d - number day, two digits when necessary
# mm -number month, always two digits
# m - number month, two digits when necessary
DATE_FORMAT_KEY = u'date_format'
def get_default(name, default=None, session=None):
"""
Get a BaubleMeta object with name. If the default value is not
None then a BaubleMeta object is returned with name and the
default value given.
If a session instance is passed (session != None) then we
don't commit the session.
"""
commit = False
if not session:
session = db.Session()
commit = True
query = session.query(BaubleMeta)
meta = query.filter_by(name=name).first()
if not meta and default is not None:
meta = BaubleMeta(name=utils.utf8(name), value=default)
session.add(meta)
if commit:
session.commit()
# load the properties so that we can close the session and
# avoid getting errors when accessing the properties on the
# returned meta
meta.value
meta.name
if commit:
# close the session whether we added anything or not
session.close()
return meta
class BaubleMeta(db.Base):
"""
The BaubleMeta class is used to set and retrieve meta information
based on key/name values from the bauble meta table.
:Table name: bauble
:Columns:
*name*:
The name of the data.
*value*:
The value.
"""
__tablename__ = 'bauble'
name = Column(Unicode(64), unique=True)
value = Column(UnicodeText)
|
mfrasca/bauble.classic
|
bauble/meta.py
|
Python
|
gpl-2.0
| 2,702
|
# -*- coding: utf-8 -*-
"""
***************************************************************************
GeoAlgorithm.py
---------------------
Date : August 2012
Copyright : (C) 2012 by Victor Olaya
Email : volayaf at gmail dot com
***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************
"""
__author__ = 'Victor Olaya'
__date__ = 'August 2012'
__copyright__ = '(C) 2012, Victor Olaya'
# This will get replaced with a git SHA1 when you do a git archive
__revision__ = '$Format:%H$'
from builtins import str
from builtins import object
import os.path
import traceback
import subprocess
import copy
from qgis.PyQt.QtCore import QCoreApplication
from qgis.core import (QgsProcessingFeedback,
QgsSettings,
QgsProcessingAlgorithm,
QgsProject,
QgsProcessingUtils,
QgsProcessingException,
QgsProcessingParameterDefinition,
QgsMessageLog)
from qgis.gui import QgsHelp
from processing.core.ProcessingConfig import ProcessingConfig
from processing.core.parameters import ParameterRaster, ParameterVector, ParameterMultipleInput, ParameterTable, Parameter
from processing.core.outputs import OutputVector, OutputRaster, OutputTable, OutputHTML, Output
from processing.algs.gdal.GdalUtils import GdalUtils
from processing.tools import dataobjects
class GeoAlgorithm(QgsProcessingAlgorithm):
def __init__(self):
super().__init__()
# Outputs generated by the algorithm
self.outputs = list()
# The crs taken from input layers (if possible), and used when
# loading output layers
self.crs = None
# If the algorithm is run as part of a model, the parent model
# can be set in this variable, to allow for customized
# behavior, in case some operations should be run differently
# when running as part of a model
self.model = None
# methods to overwrite when creating a custom geoalgorithm
def processAlgorithm(self, parameters, context, feedback):
"""Here goes the algorithm itself.
There is no return value from this method.
A QgsProcessingException should be raised in case
something goes wrong.
:param parameters:
:param context:
"""
pass
def getCustomModelerParametersDialog(self, modelAlg, algName=None):
"""If the algorithm has a custom parameters dialog when called
from the modeler, it should be returned here, ready to be
executed.
"""
return None
def processBeforeAddingToModeler(self, alg, model):
"""Add here any task that has to be performed before adding an algorithm
to a model, such as changing the value of a parameter depending on value
of another one"""
pass
# =========================================================
def execute(self, parameters, context=None, feedback=None, model=None):
"""The method to use to call a processing algorithm.
Although the body of the algorithm is in processAlgorithm(),
it should be called using this method, since it performs
some additional operations.
Raises a QgsProcessingException in case anything goes
wrong.
:param parameters:
"""
if feedback is None:
feedback = QgsProcessingFeedback()
if context is None:
context = dataobjects.createContext(feedback)
self.model = model
try:
self.setOutputCRS()
self.resolveOutputs()
self.runPreExecutionScript(feedback)
self.processAlgorithm(parameters, context, feedback)
feedback.setProgress(100)
self.convertUnsupportedFormats(context, feedback)
self.runPostExecutionScript(feedback)
except QgsProcessingException as gaee:
lines = [self.tr('Error while executing algorithm')]
lines.append(traceback.format_exc())
QgsMessageLog.logMessage(gaee.msg, self.tr('Processing'), QgsMessageLog.CRITICAL)
raise QgsProcessingException(gaee.msg, lines, gaee)
except Exception as e:
# If something goes wrong and is not caught in the
# algorithm, we catch it here and wrap it
lines = [self.tr('Uncaught error while executing algorithm')]
lines.append(traceback.format_exc())
QgsMessageLog.logMessage('\n'.join(lines), self.tr('Processing'), QgsMessageLog.CRITICAL)
raise QgsProcessingException(str(e) + self.tr('\nSee log for more details'), lines, e)
def runPostExecutionScript(self, feedback):
scriptFile = ProcessingConfig.getSetting(
ProcessingConfig.POST_EXECUTION_SCRIPT)
self.runHookScript(scriptFile, feedback)
def runPreExecutionScript(self, feedback):
scriptFile = ProcessingConfig.getSetting(
ProcessingConfig.PRE_EXECUTION_SCRIPT)
self.runHookScript(scriptFile, feedback)
def runHookScript(self, filename, feedback):
if filename is None or not os.path.exists(filename):
return
try:
script = 'import processing\n'
ns = {}
ns['feedback'] = feedback
ns['alg'] = self
with open(filename) as f:
lines = f.readlines()
for line in lines:
script += line
exec(script, ns)
except Exception as e:
QgsMessageLog.logMessage("Error in hook script: " + str(e), self.tr('Processing'), QgsMessageLog.WARNING)
# A wrong script should not cause problems, so we swallow
# all exceptions
pass
def convertUnsupportedFormats(self, context, feedback):
i = 0
feedback.setProgressText(self.tr('Converting outputs'))
for out in self.outputs:
if isinstance(out, OutputVector):
if out.compatible is not None:
layer = QgsProcessingUtils.mapLayerFromString(out.compatible, context)
if layer is None:
# For the case of memory layer, if the
# getCompatible method has been called
continue
writer = out.getVectorWriter(layer.fields(), layer.wkbType(), layer.crs(), context)
features = QgsProcessingUtils.getFeatures(layer, context)
for feature in features:
writer.addFeature(feature, QgsFeatureSink.FastInsert)
elif isinstance(out, OutputRaster):
if out.compatible is not None:
layer = QgsProcessingUtils.mapLayerFromString(out.compatible, context)
format = self.getFormatShortNameFromFilename(out.value)
orgFile = out.compatible
destFile = out.value
crsid = layer.crs().authid()
settings = QgsSettings()
path = str(settings.value('/GdalTools/gdalPath', ''))
envval = str(os.getenv('PATH'))
if not path.lower() in envval.lower().split(os.pathsep):
envval += '%s%s' % (os.pathsep, path)
os.putenv('PATH', envval)
command = 'gdal_translate -of %s -a_srs %s %s %s' % (format, crsid, orgFile, destFile)
if os.name == 'nt':
command = command.split(" ")
else:
command = [command]
proc = subprocess.Popen(
command,
shell=True,
stdout=subprocess.PIPE,
stdin=subprocess.PIPE,
stderr=subprocess.STDOUT,
universal_newlines=False,
)
proc.communicate()
elif isinstance(out, OutputTable):
if out.compatible is not None:
layer = QgsProcessingUtils.mapLayerFromString(out.compatible, context)
writer = out.getTableWriter(layer.fields())
features = QgsProcessingUtils.getFeatures(layer, context)
for feature in features:
writer.addRecord(feature)
feedback.setProgress(100 * i / float(len(self.outputs)))
def getFormatShortNameFromFilename(self, filename):
ext = filename[filename.rfind('.') + 1:]
supported = GdalUtils.getSupportedRasters()
for name in list(supported.keys()):
exts = supported[name]
if ext in exts:
return name
return 'GTiff'
def resolveOutputs(self):
"""Sets temporary outputs (output.value = None) with a
temporary file instead. Resolves expressions as well.
"""
try:
for out in self.outputs:
out.resolveValue(self)
except ValueError as e:
raise QgsProcessingException(str(e))
def setOutputCRS(self):
context = dataobjects.createContext()
layers = QgsProcessingUtils.compatibleLayers(QgsProject.instance())
for param in self.parameterDefinitions():
if isinstance(param, (ParameterRaster, ParameterVector, ParameterMultipleInput)):
if param.value:
if isinstance(param, ParameterMultipleInput):
inputlayers = param.value.split(';')
else:
inputlayers = [param.value]
for inputlayer in inputlayers:
for layer in layers:
if layer.source() == inputlayer:
self.crs = layer.crs()
return
p = QgsProcessingUtils.mapLayerFromString(inputlayer, context)
if p is not None:
self.crs = p.crs()
p = None
return
try:
from qgis.utils import iface
if iface is not None:
self.crs = iface.mapCanvas().mapSettings().destinationCrs()
except:
pass
def addOutput(self, output):
# TODO: check that name does not exist
if isinstance(output, Output):
self.outputs.append(output)
def addParameter(self, param):
# TODO: check that name does not exist
if isinstance(param, Parameter):
self.parameters.append(param)
def setOutputValue(self, outputName, value):
for out in self.outputs:
if out.name == outputName:
out.setValue(value)
def removeOutputFromName(self, name):
for out in self.outputs:
if out.name == name:
self.outputs.remove(out)
def getOutputFromName(self, name):
for out in self.outputs:
if out.name == name:
return out
def getParameterValue(self, name):
for param in self.parameters:
if param.name == name:
return param.value
return None
def getOutputValue(self, name):
for out in self.outputs:
if out.name == name:
return out.value
return None
def tr(self, string, context=''):
if context == '':
context = self.__class__.__name__
return QCoreApplication.translate(context, string)
def trAlgorithm(self, string, context=''):
if context == '':
context = self.__class__.__name__
return string, QCoreApplication.translate(context, string)
def executeAlgorithm(alg, parameters, context=None, feedback=None, model=None):
"""The method to use to call a processing algorithm.
Although the body of the algorithm is in processAlgorithm(),
it should be called using this method, since it performs
some additional operations.
Raises a QgsProcessingException in case anything goes
wrong.
:param parameters:
"""
if feedback is None:
feedback = QgsProcessingFeedback()
if context is None:
context = dataobjects.createContext(feedback)
#self.model = model
#self.setOutputCRS()
#self.resolveOutputs()
#self.evaluateParameterValues()
#self.runPreExecutionScript(feedback)
result, ok = alg.run(parameters, context, feedback)
#self.processAlgorithm(parameters, context, feedback)
feedback.setProgress(100)
return result, ok
#self.convertUnsupportedFormats(context, feedback)
#self.runPostExecutionScript(feedback)
|
nirvn/QGIS
|
python/plugins/processing/core/GeoAlgorithm.py
|
Python
|
gpl-2.0
| 13,468
|
# Copyright (c) 2014 VMware, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import contextlib
import re
import mock
from oslo.vmware import exceptions as vexc
from testtools import matchers
from nova import exception
from nova.i18n import _
from nova.openstack.common import units
from nova import test
from nova.tests.virt.vmwareapi import fake
from nova.virt.vmwareapi import ds_util
class DsUtilTestCase(test.NoDBTestCase):
def setUp(self):
super(DsUtilTestCase, self).setUp()
self.session = fake.FakeSession()
self.flags(api_retry_count=1, group='vmware')
fake.reset()
def tearDown(self):
super(DsUtilTestCase, self).tearDown()
fake.reset()
def test_file_delete(self):
def fake_call_method(module, method, *args, **kwargs):
self.assertEqual('DeleteDatastoreFile_Task', method)
name = kwargs.get('name')
self.assertEqual('[ds] fake/path', name)
datacenter = kwargs.get('datacenter')
self.assertEqual('fake-dc-ref', datacenter)
return 'fake_delete_task'
with contextlib.nested(
mock.patch.object(self.session, '_wait_for_task'),
mock.patch.object(self.session, '_call_method',
fake_call_method)
) as (_wait_for_task, _call_method):
ds_path = ds_util.DatastorePath('ds', 'fake/path')
ds_util.file_delete(self.session,
ds_path, 'fake-dc-ref')
_wait_for_task.assert_has_calls([
mock.call('fake_delete_task')])
def test_file_move(self):
def fake_call_method(module, method, *args, **kwargs):
self.assertEqual('MoveDatastoreFile_Task', method)
sourceName = kwargs.get('sourceName')
self.assertEqual('[ds] tmp/src', sourceName)
destinationName = kwargs.get('destinationName')
self.assertEqual('[ds] base/dst', destinationName)
sourceDatacenter = kwargs.get('sourceDatacenter')
self.assertEqual('fake-dc-ref', sourceDatacenter)
destinationDatacenter = kwargs.get('destinationDatacenter')
self.assertEqual('fake-dc-ref', destinationDatacenter)
return 'fake_move_task'
with contextlib.nested(
mock.patch.object(self.session, '_wait_for_task'),
mock.patch.object(self.session, '_call_method',
fake_call_method)
) as (_wait_for_task, _call_method):
src_ds_path = ds_util.DatastorePath('ds', 'tmp/src')
dst_ds_path = ds_util.DatastorePath('ds', 'base/dst')
ds_util.file_move(self.session,
'fake-dc-ref', src_ds_path, dst_ds_path)
_wait_for_task.assert_has_calls([
mock.call('fake_move_task')])
def test_mkdir(self):
def fake_call_method(module, method, *args, **kwargs):
self.assertEqual('MakeDirectory', method)
name = kwargs.get('name')
self.assertEqual('[ds] fake/path', name)
datacenter = kwargs.get('datacenter')
self.assertEqual('fake-dc-ref', datacenter)
createParentDirectories = kwargs.get('createParentDirectories')
self.assertTrue(createParentDirectories)
with mock.patch.object(self.session, '_call_method',
fake_call_method):
ds_path = ds_util.DatastorePath('ds', 'fake/path')
ds_util.mkdir(self.session, ds_path, 'fake-dc-ref')
def test_file_exists(self):
def fake_call_method(module, method, *args, **kwargs):
if method == 'SearchDatastore_Task':
ds_browser = args[0]
self.assertEqual('fake-browser', ds_browser)
datastorePath = kwargs.get('datastorePath')
self.assertEqual('[ds] fake/path', datastorePath)
return 'fake_exists_task'
# Should never get here
self.fail()
def fake_wait_for_task(task_ref):
if task_ref == 'fake_exists_task':
result_file = fake.DataObject()
result_file.path = 'fake-file'
result = fake.DataObject()
result.file = [result_file]
result.path = '[ds] fake/path'
task_info = fake.DataObject()
task_info.result = result
return task_info
# Should never get here
self.fail()
with contextlib.nested(
mock.patch.object(self.session, '_call_method',
fake_call_method),
mock.patch.object(self.session, '_wait_for_task',
fake_wait_for_task)):
ds_path = ds_util.DatastorePath('ds', 'fake/path')
file_exists = ds_util.file_exists(self.session,
'fake-browser', ds_path, 'fake-file')
self.assertTrue(file_exists)
def test_file_exists_fails(self):
def fake_call_method(module, method, *args, **kwargs):
if method == 'SearchDatastore_Task':
return 'fake_exists_task'
# Should never get here
self.fail()
def fake_wait_for_task(task_ref):
if task_ref == 'fake_exists_task':
raise vexc.FileNotFoundException()
# Should never get here
self.fail()
with contextlib.nested(
mock.patch.object(self.session, '_call_method',
fake_call_method),
mock.patch.object(self.session, '_wait_for_task',
fake_wait_for_task)):
ds_path = ds_util.DatastorePath('ds', 'fake/path')
file_exists = ds_util.file_exists(self.session,
'fake-browser', ds_path, 'fake-file')
self.assertFalse(file_exists)
def _mock_get_datastore_calls(self, *datastores):
"""Mock vim_util calls made by get_datastore."""
datastores_i = [None]
# For the moment, at least, this list of datastores is simply passed to
# get_properties_for_a_collection_of_objects, which we mock below. We
# don't need to over-complicate the fake function by worrying about its
# contents.
fake_ds_list = ['fake-ds']
def fake_call_method(module, method, *args, **kwargs):
# Mock the call which returns a list of datastores for the cluster
if (module == ds_util.vim_util and
method == 'get_dynamic_property' and
args == ('fake-cluster', 'ClusterComputeResource',
'datastore')):
fake_ds_mor = fake.DataObject()
fake_ds_mor.ManagedObjectReference = fake_ds_list
return fake_ds_mor
# Return the datastore result sets we were passed in, in the order
# given
if (module == ds_util.vim_util and
method == 'get_properties_for_a_collection_of_objects' and
args[0] == 'Datastore' and
args[1] == fake_ds_list):
# Start a new iterator over given datastores
datastores_i[0] = iter(datastores)
return datastores_i[0].next()
# Continue returning results from the current iterator.
if (module == ds_util.vim_util and
method == 'continue_to_get_objects'):
try:
return datastores_i[0].next()
except StopIteration:
return None
# Sentinel that get_datastore's use of vim has changed
self.fail('Unexpected vim call in get_datastore: %s' % method)
return mock.patch.object(self.session, '_call_method',
side_effect=fake_call_method)
def test_get_datastore(self):
fake_objects = fake.FakeRetrieveResult()
fake_objects.add_object(fake.Datastore())
fake_objects.add_object(fake.Datastore("fake-ds-2", 2048, 1000,
False, "normal"))
fake_objects.add_object(fake.Datastore("fake-ds-3", 4096, 2000,
True, "inMaintenance"))
with self._mock_get_datastore_calls(fake_objects):
result = ds_util.get_datastore(self.session, 'fake-cluster')
self.assertEqual("fake-ds", result.name)
self.assertEqual(units.Ti, result.capacity)
self.assertEqual(500 * units.Gi, result.freespace)
def test_get_datastore_with_regex(self):
# Test with a regex that matches with a datastore
datastore_valid_regex = re.compile("^openstack.*\d$")
fake_objects = fake.FakeRetrieveResult()
fake_objects.add_object(fake.Datastore("openstack-ds0"))
fake_objects.add_object(fake.Datastore("fake-ds0"))
fake_objects.add_object(fake.Datastore("fake-ds1"))
with self._mock_get_datastore_calls(fake_objects):
result = ds_util.get_datastore(self.session, 'fake-cluster',
datastore_valid_regex)
self.assertEqual("openstack-ds0", result.name)
def test_get_datastore_with_token(self):
regex = re.compile("^ds.*\d$")
fake0 = fake.FakeRetrieveResult()
fake0.add_object(fake.Datastore("ds0", 10 * units.Gi, 5 * units.Gi))
fake0.add_object(fake.Datastore("foo", 10 * units.Gi, 9 * units.Gi))
setattr(fake0, 'token', 'token-0')
fake1 = fake.FakeRetrieveResult()
fake1.add_object(fake.Datastore("ds2", 10 * units.Gi, 8 * units.Gi))
fake1.add_object(fake.Datastore("ds3", 10 * units.Gi, 1 * units.Gi))
with self._mock_get_datastore_calls(fake0, fake1):
result = ds_util.get_datastore(self.session, 'fake-cluster', regex)
self.assertEqual("ds2", result.name)
def test_get_datastore_with_list(self):
# Test with a regex containing whitelist of datastores
datastore_valid_regex = re.compile("(openstack-ds0|openstack-ds2)")
fake_objects = fake.FakeRetrieveResult()
fake_objects.add_object(fake.Datastore("openstack-ds0"))
fake_objects.add_object(fake.Datastore("openstack-ds1"))
fake_objects.add_object(fake.Datastore("openstack-ds2"))
with self._mock_get_datastore_calls(fake_objects):
result = ds_util.get_datastore(self.session, 'fake-cluster',
datastore_valid_regex)
self.assertNotEqual("openstack-ds1", result.name)
def test_get_datastore_with_regex_error(self):
# Test with a regex that has no match
# Checks if code raises DatastoreNotFound with a specific message
datastore_invalid_regex = re.compile("unknown-ds")
exp_message = (_("Datastore regex %s did not match any datastores")
% datastore_invalid_regex.pattern)
fake_objects = fake.FakeRetrieveResult()
fake_objects.add_object(fake.Datastore("fake-ds0"))
fake_objects.add_object(fake.Datastore("fake-ds1"))
# assertRaisesRegExp would have been a good choice instead of
# try/catch block, but it's available only from Py 2.7.
try:
with self._mock_get_datastore_calls(fake_objects):
ds_util.get_datastore(self.session, 'fake-cluster',
datastore_invalid_regex)
except exception.DatastoreNotFound as e:
self.assertEqual(exp_message, e.args[0])
else:
self.fail("DatastoreNotFound Exception was not raised with "
"message: %s" % exp_message)
def test_get_datastore_without_datastore(self):
self.assertRaises(exception.DatastoreNotFound,
ds_util.get_datastore,
fake.FakeObjectRetrievalSession(None), cluster="fake-cluster")
def test_get_datastore_inaccessible_ds(self):
data_store = fake.Datastore()
data_store.set("summary.accessible", False)
fake_objects = fake.FakeRetrieveResult()
fake_objects.add_object(data_store)
with self._mock_get_datastore_calls(fake_objects):
self.assertRaises(exception.DatastoreNotFound,
ds_util.get_datastore,
self.session, 'fake-cluster')
def test_get_datastore_ds_in_maintenance(self):
data_store = fake.Datastore()
data_store.set("summary.maintenanceMode", "inMaintenance")
fake_objects = fake.FakeRetrieveResult()
fake_objects.add_object(data_store)
with self._mock_get_datastore_calls(fake_objects):
self.assertRaises(exception.DatastoreNotFound,
ds_util.get_datastore,
self.session, 'fake-cluster')
def test_get_datastore_no_host_in_cluster(self):
def fake_call_method(module, method, *args, **kwargs):
return ''
with mock.patch.object(self.session, '_call_method',
fake_call_method):
self.assertRaises(exception.DatastoreNotFound,
ds_util.get_datastore,
self.session, 'fake-cluster')
def _test_is_datastore_valid(self, accessible=True,
maintenance_mode="normal",
type="VMFS",
datastore_regex=None):
propdict = {}
propdict["summary.accessible"] = accessible
propdict["summary.maintenanceMode"] = maintenance_mode
propdict["summary.type"] = type
propdict["summary.name"] = "ds-1"
return ds_util._is_datastore_valid(propdict, datastore_regex)
def test_is_datastore_valid(self):
for ds_type in ds_util.ALLOWED_DATASTORE_TYPES:
self.assertTrue(self._test_is_datastore_valid(True,
"normal",
ds_type))
def test_is_datastore_valid_inaccessible_ds(self):
self.assertFalse(self._test_is_datastore_valid(False,
"normal",
"VMFS"))
def test_is_datastore_valid_ds_in_maintenance(self):
self.assertFalse(self._test_is_datastore_valid(True,
"inMaintenance",
"VMFS"))
def test_is_datastore_valid_ds_type_invalid(self):
self.assertFalse(self._test_is_datastore_valid(True,
"normal",
"vfat"))
def test_is_datastore_valid_not_matching_regex(self):
datastore_regex = re.compile("ds-2")
self.assertFalse(self._test_is_datastore_valid(True,
"normal",
"VMFS",
datastore_regex))
def test_is_datastore_valid_matching_regex(self):
datastore_regex = re.compile("ds-1")
self.assertTrue(self._test_is_datastore_valid(True,
"normal",
"VMFS",
datastore_regex))
class DatastoreTestCase(test.NoDBTestCase):
def test_ds(self):
ds = ds_util.Datastore(
"fake_ref", "ds_name", 2 * units.Gi, 1 * units.Gi)
self.assertEqual('ds_name', ds.name)
self.assertEqual('fake_ref', ds.ref)
self.assertEqual(2 * units.Gi, ds.capacity)
self.assertEqual(1 * units.Gi, ds.freespace)
def test_ds_invalid_space(self):
self.assertRaises(ValueError, ds_util.Datastore,
"fake_ref", "ds_name", 1 * units.Gi, 2 * units.Gi)
self.assertRaises(ValueError, ds_util.Datastore,
"fake_ref", "ds_name", None, 2 * units.Gi)
def test_ds_no_capacity_no_freespace(self):
ds = ds_util.Datastore("fake_ref", "ds_name")
self.assertIsNone(ds.capacity)
self.assertIsNone(ds.freespace)
def test_ds_invalid(self):
self.assertRaises(ValueError, ds_util.Datastore, None, "ds_name")
self.assertRaises(ValueError, ds_util.Datastore, "fake_ref", None)
def test_build_path(self):
ds = ds_util.Datastore("fake_ref", "ds_name")
ds_path = ds.build_path("some_dir", "foo.vmdk")
self.assertEqual('[ds_name] some_dir/foo.vmdk', str(ds_path))
class DatastorePathTestCase(test.NoDBTestCase):
def test_ds_path(self):
p = ds_util.DatastorePath('dsname', 'a/b/c', 'file.iso')
self.assertEqual('[dsname] a/b/c/file.iso', str(p))
self.assertEqual('a/b/c/file.iso', p.rel_path)
self.assertEqual('a/b/c', p.parent.rel_path)
self.assertEqual('[dsname] a/b/c', str(p.parent))
self.assertEqual('dsname', p.datastore)
self.assertEqual('file.iso', p.basename)
self.assertEqual('a/b/c', p.dirname)
def test_ds_path_no_ds_name(self):
bad_args = [
('', ['a/b/c', 'file.iso']),
(None, ['a/b/c', 'file.iso'])]
for t in bad_args:
self.assertRaises(
ValueError, ds_util.DatastorePath,
t[0], *t[1])
def test_ds_path_invalid_path_components(self):
bad_args = [
('dsname', [None]),
('dsname', ['', None]),
('dsname', ['a', None]),
('dsname', ['a', None, 'b']),
('dsname', [None, '']),
('dsname', [None, 'b'])]
for t in bad_args:
self.assertRaises(
ValueError, ds_util.DatastorePath,
t[0], *t[1])
def test_ds_path_no_subdir(self):
args = [
('dsname', ['', 'x.vmdk']),
('dsname', ['x.vmdk'])]
canonical_p = ds_util.DatastorePath('dsname', 'x.vmdk')
self.assertEqual('[dsname] x.vmdk', str(canonical_p))
self.assertEqual('', canonical_p.dirname)
self.assertEqual('x.vmdk', canonical_p.basename)
self.assertEqual('x.vmdk', canonical_p.rel_path)
for t in args:
p = ds_util.DatastorePath(t[0], *t[1])
self.assertEqual(str(canonical_p), str(p))
def test_ds_path_ds_only(self):
args = [
('dsname', []),
('dsname', ['']),
('dsname', ['', ''])]
canonical_p = ds_util.DatastorePath('dsname')
self.assertEqual('[dsname]', str(canonical_p))
self.assertEqual('', canonical_p.rel_path)
self.assertEqual('', canonical_p.basename)
self.assertEqual('', canonical_p.dirname)
for t in args:
p = ds_util.DatastorePath(t[0], *t[1])
self.assertEqual(str(canonical_p), str(p))
self.assertEqual(canonical_p.rel_path, p.rel_path)
def test_ds_path_equivalence(self):
args = [
('dsname', ['a/b/c/', 'x.vmdk']),
('dsname', ['a/', 'b/c/', 'x.vmdk']),
('dsname', ['a', 'b', 'c', 'x.vmdk']),
('dsname', ['a/b/c', 'x.vmdk'])]
canonical_p = ds_util.DatastorePath('dsname', 'a/b/c', 'x.vmdk')
for t in args:
p = ds_util.DatastorePath(t[0], *t[1])
self.assertEqual(str(canonical_p), str(p))
self.assertEqual(canonical_p.datastore, p.datastore)
self.assertEqual(canonical_p.rel_path, p.rel_path)
self.assertEqual(str(canonical_p.parent), str(p.parent))
def test_ds_path_non_equivalence(self):
args = [
# leading slash
('dsname', ['/a', 'b', 'c', 'x.vmdk']),
('dsname', ['/a/b/c/', 'x.vmdk']),
('dsname', ['a/b/c', '/x.vmdk']),
# leading space
('dsname', ['a/b/c/', ' x.vmdk']),
('dsname', ['a/', ' b/c/', 'x.vmdk']),
('dsname', [' a', 'b', 'c', 'x.vmdk']),
# trailing space
('dsname', ['/a/b/c/', 'x.vmdk ']),
('dsname', ['a/b/c/ ', 'x.vmdk'])]
canonical_p = ds_util.DatastorePath('dsname', 'a/b/c', 'x.vmdk')
for t in args:
p = ds_util.DatastorePath(t[0], *t[1])
self.assertNotEqual(str(canonical_p), str(p))
def test_ds_path_hashable(self):
ds1 = ds_util.DatastorePath('dsname', 'path')
ds2 = ds_util.DatastorePath('dsname', 'path')
# If the above objects have the same hash, they will only be added to
# the set once
self.assertThat(set([ds1, ds2]), matchers.HasLength(1))
def test_equal(self):
a = ds_util.DatastorePath('ds_name', 'a')
b = ds_util.DatastorePath('ds_name', 'a')
self.assertEqual(a, b)
def test_join(self):
p = ds_util.DatastorePath('ds_name', 'a')
ds_path = p.join('b')
self.assertEqual('[ds_name] a/b', str(ds_path))
p = ds_util.DatastorePath('ds_name', 'a')
ds_path = p.join()
self.assertEqual('[ds_name] a', str(ds_path))
bad_args = [
[None],
['', None],
['a', None],
['a', None, 'b']]
for arg in bad_args:
self.assertRaises(ValueError, p.join, *arg)
def test_ds_path_parse(self):
p = ds_util.DatastorePath.parse('[dsname]')
self.assertEqual('dsname', p.datastore)
self.assertEqual('', p.rel_path)
p = ds_util.DatastorePath.parse('[dsname] folder')
self.assertEqual('dsname', p.datastore)
self.assertEqual('folder', p.rel_path)
p = ds_util.DatastorePath.parse('[dsname] folder/file')
self.assertEqual('dsname', p.datastore)
self.assertEqual('folder/file', p.rel_path)
for p in [None, '']:
self.assertRaises(ValueError, ds_util.DatastorePath.parse, p)
for p in ['bad path', '/a/b/c', 'a/b/c']:
self.assertRaises(IndexError, ds_util.DatastorePath.parse, p)
|
redhat-openstack/nova
|
nova/tests/virt/vmwareapi/test_ds_util.py
|
Python
|
apache-2.0
| 22,988
|
# -*- coding: utf-8 -*-
import os
import json
import platform
import unittest
from httmock import urlmatch, HTTMock, response
from wechatpy import WeChatClient
_TESTS_PATH = os.path.abspath(os.path.dirname(__file__))
_FIXTURE_PATH = os.path.join(_TESTS_PATH, "fixtures")
@urlmatch(netloc=r"(.*\.)?api\.weixin\.qq\.com$")
def wechat_api_mock(url, request):
path = url.path.replace("/cgi-bin/", "").replace("/", "_")
if path.startswith("_"):
path = path[1:]
res_file = os.path.join(_FIXTURE_PATH, f"{path}.json")
content = {
"errcode": 99999,
"errmsg": f"can not find fixture {res_file}",
}
headers = {"Content-Type": "application/json"}
try:
with open(res_file, "rb") as f:
content = json.loads(f.read().decode("utf-8"))
except (IOError, ValueError) as e:
print(e)
return response(200, content, headers, request=request)
class WeChatSessionTestCase(unittest.TestCase):
app_id = "123456"
secret = "123456"
def test_memory_session_storage_init(self):
from wechatpy.session.memorystorage import MemoryStorage
client = WeChatClient(self.app_id, self.secret)
self.assertTrue(isinstance(client.session, MemoryStorage))
def test_memory_session_storage_access_token(self):
client = WeChatClient(self.app_id, self.secret)
with HTTMock(wechat_api_mock):
token = client.fetch_access_token()
self.assertEqual("1234567890", token["access_token"])
self.assertEqual(7200, token["expires_in"])
self.assertEqual("1234567890", client.access_token)
def test_redis_session_storage_init(self):
from redis import Redis
from wechatpy.session.redisstorage import RedisStorage
redis = Redis()
session = RedisStorage(redis)
client = WeChatClient(self.app_id, self.secret, session=session)
self.assertTrue(isinstance(client.session, RedisStorage))
def test_redis_session_storage_access_token(self):
from redis import Redis
from wechatpy.session.redisstorage import RedisStorage
redis = Redis()
session = RedisStorage(redis)
client = WeChatClient(self.app_id, self.secret, session=session)
with HTTMock(wechat_api_mock):
token = client.fetch_access_token()
self.assertEqual("1234567890", token["access_token"])
self.assertEqual(7200, token["expires_in"])
self.assertEqual("1234567890", client.access_token)
def test_memcached_storage_init(self):
if platform.system() == "Windows":
return
from pymemcache.client import Client
from wechatpy.session.memcachedstorage import MemcachedStorage
servers = ("127.0.0.1", 11211)
memcached = Client(servers)
session = MemcachedStorage(memcached)
client = WeChatClient(self.app_id, self.secret, session=session)
self.assertTrue(isinstance(client.session, MemcachedStorage))
def test_memcached_storage_access_token(self):
if platform.system() == "Windows":
return
from pymemcache.client import Client
from wechatpy.session.memcachedstorage import MemcachedStorage
servers = ("127.0.0.1", 11211)
memcached = Client(servers)
session = MemcachedStorage(memcached)
client = WeChatClient(self.app_id, self.secret, session=session)
with HTTMock(wechat_api_mock):
token = client.fetch_access_token()
self.assertEqual("1234567890", token["access_token"])
self.assertEqual(7200, token["expires_in"])
self.assertEqual("1234567890", client.access_token)
|
jxtech/wechatpy
|
tests/test_session.py
|
Python
|
mit
| 3,725
|
<<<<<<< HEAD
<<<<<<< HEAD
import contextlib
import importlib.abc
import importlib.machinery
import os
import sys
import types
import unittest
from test.test_importlib import util
from test.support import run_unittest
# needed tests:
#
# need to test when nested, so that the top-level path isn't sys.path
# need to test dynamic path detection, both at top-level and nested
# with dynamic path, check when a loader is returned on path reload (that is,
# trying to switch from a namespace package to a regular package)
@contextlib.contextmanager
def sys_modules_context():
"""
Make sure sys.modules is the same object and has the same content
when exiting the context as when entering.
Similar to importlib.test.util.uncache, but doesn't require explicit
names.
"""
sys_modules_saved = sys.modules
sys_modules_copy = sys.modules.copy()
try:
yield
finally:
sys.modules = sys_modules_saved
sys.modules.clear()
sys.modules.update(sys_modules_copy)
@contextlib.contextmanager
def namespace_tree_context(**kwargs):
"""
Save import state and sys.modules cache and restore it on exit.
Typical usage:
>>> with namespace_tree_context(path=['/tmp/xxyy/portion1',
... '/tmp/xxyy/portion2']):
... pass
"""
# use default meta_path and path_hooks unless specified otherwise
kwargs.setdefault('meta_path', sys.meta_path)
kwargs.setdefault('path_hooks', sys.path_hooks)
import_context = util.import_state(**kwargs)
with import_context, sys_modules_context():
yield
class NamespacePackageTest(unittest.TestCase):
"""
Subclasses should define self.root and self.paths (under that root)
to be added to sys.path.
"""
root = os.path.join(os.path.dirname(__file__), 'namespace_pkgs')
def setUp(self):
self.resolved_paths = [
os.path.join(self.root, path) for path in self.paths
]
self.ctx = namespace_tree_context(path=self.resolved_paths)
self.ctx.__enter__()
def tearDown(self):
# TODO: will we ever want to pass exc_info to __exit__?
self.ctx.__exit__(None, None, None)
class SingleNamespacePackage(NamespacePackageTest):
paths = ['portion1']
def test_simple_package(self):
import foo.one
self.assertEqual(foo.one.attr, 'portion1 foo one')
def test_cant_import_other(self):
with self.assertRaises(ImportError):
import foo.two
def test_module_repr(self):
import foo.one
self.assertEqual(repr(foo), "<module 'foo' (namespace)>")
class DynamicPatheNamespacePackage(NamespacePackageTest):
paths = ['portion1']
def test_dynamic_path(self):
# Make sure only 'foo.one' can be imported
import foo.one
self.assertEqual(foo.one.attr, 'portion1 foo one')
with self.assertRaises(ImportError):
import foo.two
# Now modify sys.path
sys.path.append(os.path.join(self.root, 'portion2'))
# And make sure foo.two is now importable
import foo.two
self.assertEqual(foo.two.attr, 'portion2 foo two')
class CombinedNamespacePackages(NamespacePackageTest):
paths = ['both_portions']
def test_imports(self):
import foo.one
import foo.two
self.assertEqual(foo.one.attr, 'both_portions foo one')
self.assertEqual(foo.two.attr, 'both_portions foo two')
class SeparatedNamespacePackages(NamespacePackageTest):
paths = ['portion1', 'portion2']
def test_imports(self):
import foo.one
import foo.two
self.assertEqual(foo.one.attr, 'portion1 foo one')
self.assertEqual(foo.two.attr, 'portion2 foo two')
class SeparatedOverlappingNamespacePackages(NamespacePackageTest):
paths = ['portion1', 'both_portions']
def test_first_path_wins(self):
import foo.one
import foo.two
self.assertEqual(foo.one.attr, 'portion1 foo one')
self.assertEqual(foo.two.attr, 'both_portions foo two')
def test_first_path_wins_again(self):
sys.path.reverse()
import foo.one
import foo.two
self.assertEqual(foo.one.attr, 'both_portions foo one')
self.assertEqual(foo.two.attr, 'both_portions foo two')
def test_first_path_wins_importing_second_first(self):
import foo.two
import foo.one
self.assertEqual(foo.one.attr, 'portion1 foo one')
self.assertEqual(foo.two.attr, 'both_portions foo two')
class SingleZipNamespacePackage(NamespacePackageTest):
paths = ['top_level_portion1.zip']
def test_simple_package(self):
import foo.one
self.assertEqual(foo.one.attr, 'portion1 foo one')
def test_cant_import_other(self):
with self.assertRaises(ImportError):
import foo.two
class SeparatedZipNamespacePackages(NamespacePackageTest):
paths = ['top_level_portion1.zip', 'portion2']
def test_imports(self):
import foo.one
import foo.two
self.assertEqual(foo.one.attr, 'portion1 foo one')
self.assertEqual(foo.two.attr, 'portion2 foo two')
self.assertIn('top_level_portion1.zip', foo.one.__file__)
self.assertNotIn('.zip', foo.two.__file__)
class SingleNestedZipNamespacePackage(NamespacePackageTest):
paths = ['nested_portion1.zip/nested_portion1']
def test_simple_package(self):
import foo.one
self.assertEqual(foo.one.attr, 'portion1 foo one')
def test_cant_import_other(self):
with self.assertRaises(ImportError):
import foo.two
class SeparatedNestedZipNamespacePackages(NamespacePackageTest):
paths = ['nested_portion1.zip/nested_portion1', 'portion2']
def test_imports(self):
import foo.one
import foo.two
self.assertEqual(foo.one.attr, 'portion1 foo one')
self.assertEqual(foo.two.attr, 'portion2 foo two')
fn = os.path.join('nested_portion1.zip', 'nested_portion1')
self.assertIn(fn, foo.one.__file__)
self.assertNotIn('.zip', foo.two.__file__)
class LegacySupport(NamespacePackageTest):
paths = ['not_a_namespace_pkg', 'portion1', 'portion2', 'both_portions']
def test_non_namespace_package_takes_precedence(self):
import foo.one
with self.assertRaises(ImportError):
import foo.two
self.assertIn('__init__', foo.__file__)
self.assertNotIn('namespace', str(foo.__loader__).lower())
class DynamicPathCalculation(NamespacePackageTest):
paths = ['project1', 'project2']
def test_project3_fails(self):
import parent.child.one
self.assertEqual(len(parent.__path__), 2)
self.assertEqual(len(parent.child.__path__), 2)
import parent.child.two
self.assertEqual(len(parent.__path__), 2)
self.assertEqual(len(parent.child.__path__), 2)
self.assertEqual(parent.child.one.attr, 'parent child one')
self.assertEqual(parent.child.two.attr, 'parent child two')
with self.assertRaises(ImportError):
import parent.child.three
self.assertEqual(len(parent.__path__), 2)
self.assertEqual(len(parent.child.__path__), 2)
def test_project3_succeeds(self):
import parent.child.one
self.assertEqual(len(parent.__path__), 2)
self.assertEqual(len(parent.child.__path__), 2)
import parent.child.two
self.assertEqual(len(parent.__path__), 2)
self.assertEqual(len(parent.child.__path__), 2)
self.assertEqual(parent.child.one.attr, 'parent child one')
self.assertEqual(parent.child.two.attr, 'parent child two')
with self.assertRaises(ImportError):
import parent.child.three
# now add project3
sys.path.append(os.path.join(self.root, 'project3'))
import parent.child.three
# the paths dynamically get longer, to include the new directories
self.assertEqual(len(parent.__path__), 3)
self.assertEqual(len(parent.child.__path__), 3)
self.assertEqual(parent.child.three.attr, 'parent child three')
class ZipWithMissingDirectory(NamespacePackageTest):
paths = ['missing_directory.zip']
@unittest.expectedFailure
def test_missing_directory(self):
# This will fail because missing_directory.zip contains:
# Length Date Time Name
# --------- ---------- ----- ----
# 29 2012-05-03 18:13 foo/one.py
# 0 2012-05-03 20:57 bar/
# 38 2012-05-03 20:57 bar/two.py
# --------- -------
# 67 3 files
# Because there is no 'foo/', the zipimporter currently doesn't
# know that foo is a namespace package
import foo.one
def test_present_directory(self):
# This succeeds because there is a "bar/" in the zip file
import bar.two
self.assertEqual(bar.two.attr, 'missing_directory foo two')
class ModuleAndNamespacePackageInSameDir(NamespacePackageTest):
paths = ['module_and_namespace_package']
def test_module_before_namespace_package(self):
# Make sure we find the module in preference to the
# namespace package.
import a_test
self.assertEqual(a_test.attr, 'in module')
if __name__ == "__main__":
unittest.main()
=======
import contextlib
import importlib.abc
import importlib.machinery
import os
import sys
import types
import unittest
from test.test_importlib import util
from test.support import run_unittest
# needed tests:
#
# need to test when nested, so that the top-level path isn't sys.path
# need to test dynamic path detection, both at top-level and nested
# with dynamic path, check when a loader is returned on path reload (that is,
# trying to switch from a namespace package to a regular package)
@contextlib.contextmanager
def sys_modules_context():
"""
Make sure sys.modules is the same object and has the same content
when exiting the context as when entering.
Similar to importlib.test.util.uncache, but doesn't require explicit
names.
"""
sys_modules_saved = sys.modules
sys_modules_copy = sys.modules.copy()
try:
yield
finally:
sys.modules = sys_modules_saved
sys.modules.clear()
sys.modules.update(sys_modules_copy)
@contextlib.contextmanager
def namespace_tree_context(**kwargs):
"""
Save import state and sys.modules cache and restore it on exit.
Typical usage:
>>> with namespace_tree_context(path=['/tmp/xxyy/portion1',
... '/tmp/xxyy/portion2']):
... pass
"""
# use default meta_path and path_hooks unless specified otherwise
kwargs.setdefault('meta_path', sys.meta_path)
kwargs.setdefault('path_hooks', sys.path_hooks)
import_context = util.import_state(**kwargs)
with import_context, sys_modules_context():
yield
class NamespacePackageTest(unittest.TestCase):
"""
Subclasses should define self.root and self.paths (under that root)
to be added to sys.path.
"""
root = os.path.join(os.path.dirname(__file__), 'namespace_pkgs')
def setUp(self):
self.resolved_paths = [
os.path.join(self.root, path) for path in self.paths
]
self.ctx = namespace_tree_context(path=self.resolved_paths)
self.ctx.__enter__()
def tearDown(self):
# TODO: will we ever want to pass exc_info to __exit__?
self.ctx.__exit__(None, None, None)
class SingleNamespacePackage(NamespacePackageTest):
paths = ['portion1']
def test_simple_package(self):
import foo.one
self.assertEqual(foo.one.attr, 'portion1 foo one')
def test_cant_import_other(self):
with self.assertRaises(ImportError):
import foo.two
def test_module_repr(self):
import foo.one
self.assertEqual(repr(foo), "<module 'foo' (namespace)>")
class DynamicPatheNamespacePackage(NamespacePackageTest):
paths = ['portion1']
def test_dynamic_path(self):
# Make sure only 'foo.one' can be imported
import foo.one
self.assertEqual(foo.one.attr, 'portion1 foo one')
with self.assertRaises(ImportError):
import foo.two
# Now modify sys.path
sys.path.append(os.path.join(self.root, 'portion2'))
# And make sure foo.two is now importable
import foo.two
self.assertEqual(foo.two.attr, 'portion2 foo two')
class CombinedNamespacePackages(NamespacePackageTest):
paths = ['both_portions']
def test_imports(self):
import foo.one
import foo.two
self.assertEqual(foo.one.attr, 'both_portions foo one')
self.assertEqual(foo.two.attr, 'both_portions foo two')
class SeparatedNamespacePackages(NamespacePackageTest):
paths = ['portion1', 'portion2']
def test_imports(self):
import foo.one
import foo.two
self.assertEqual(foo.one.attr, 'portion1 foo one')
self.assertEqual(foo.two.attr, 'portion2 foo two')
class SeparatedOverlappingNamespacePackages(NamespacePackageTest):
paths = ['portion1', 'both_portions']
def test_first_path_wins(self):
import foo.one
import foo.two
self.assertEqual(foo.one.attr, 'portion1 foo one')
self.assertEqual(foo.two.attr, 'both_portions foo two')
def test_first_path_wins_again(self):
sys.path.reverse()
import foo.one
import foo.two
self.assertEqual(foo.one.attr, 'both_portions foo one')
self.assertEqual(foo.two.attr, 'both_portions foo two')
def test_first_path_wins_importing_second_first(self):
import foo.two
import foo.one
self.assertEqual(foo.one.attr, 'portion1 foo one')
self.assertEqual(foo.two.attr, 'both_portions foo two')
class SingleZipNamespacePackage(NamespacePackageTest):
paths = ['top_level_portion1.zip']
def test_simple_package(self):
import foo.one
self.assertEqual(foo.one.attr, 'portion1 foo one')
def test_cant_import_other(self):
with self.assertRaises(ImportError):
import foo.two
class SeparatedZipNamespacePackages(NamespacePackageTest):
paths = ['top_level_portion1.zip', 'portion2']
def test_imports(self):
import foo.one
import foo.two
self.assertEqual(foo.one.attr, 'portion1 foo one')
self.assertEqual(foo.two.attr, 'portion2 foo two')
self.assertIn('top_level_portion1.zip', foo.one.__file__)
self.assertNotIn('.zip', foo.two.__file__)
class SingleNestedZipNamespacePackage(NamespacePackageTest):
paths = ['nested_portion1.zip/nested_portion1']
def test_simple_package(self):
import foo.one
self.assertEqual(foo.one.attr, 'portion1 foo one')
def test_cant_import_other(self):
with self.assertRaises(ImportError):
import foo.two
class SeparatedNestedZipNamespacePackages(NamespacePackageTest):
paths = ['nested_portion1.zip/nested_portion1', 'portion2']
def test_imports(self):
import foo.one
import foo.two
self.assertEqual(foo.one.attr, 'portion1 foo one')
self.assertEqual(foo.two.attr, 'portion2 foo two')
fn = os.path.join('nested_portion1.zip', 'nested_portion1')
self.assertIn(fn, foo.one.__file__)
self.assertNotIn('.zip', foo.two.__file__)
class LegacySupport(NamespacePackageTest):
paths = ['not_a_namespace_pkg', 'portion1', 'portion2', 'both_portions']
def test_non_namespace_package_takes_precedence(self):
import foo.one
with self.assertRaises(ImportError):
import foo.two
self.assertIn('__init__', foo.__file__)
self.assertNotIn('namespace', str(foo.__loader__).lower())
class DynamicPathCalculation(NamespacePackageTest):
paths = ['project1', 'project2']
def test_project3_fails(self):
import parent.child.one
self.assertEqual(len(parent.__path__), 2)
self.assertEqual(len(parent.child.__path__), 2)
import parent.child.two
self.assertEqual(len(parent.__path__), 2)
self.assertEqual(len(parent.child.__path__), 2)
self.assertEqual(parent.child.one.attr, 'parent child one')
self.assertEqual(parent.child.two.attr, 'parent child two')
with self.assertRaises(ImportError):
import parent.child.three
self.assertEqual(len(parent.__path__), 2)
self.assertEqual(len(parent.child.__path__), 2)
def test_project3_succeeds(self):
import parent.child.one
self.assertEqual(len(parent.__path__), 2)
self.assertEqual(len(parent.child.__path__), 2)
import parent.child.two
self.assertEqual(len(parent.__path__), 2)
self.assertEqual(len(parent.child.__path__), 2)
self.assertEqual(parent.child.one.attr, 'parent child one')
self.assertEqual(parent.child.two.attr, 'parent child two')
with self.assertRaises(ImportError):
import parent.child.three
# now add project3
sys.path.append(os.path.join(self.root, 'project3'))
import parent.child.three
# the paths dynamically get longer, to include the new directories
self.assertEqual(len(parent.__path__), 3)
self.assertEqual(len(parent.child.__path__), 3)
self.assertEqual(parent.child.three.attr, 'parent child three')
class ZipWithMissingDirectory(NamespacePackageTest):
paths = ['missing_directory.zip']
@unittest.expectedFailure
def test_missing_directory(self):
# This will fail because missing_directory.zip contains:
# Length Date Time Name
# --------- ---------- ----- ----
# 29 2012-05-03 18:13 foo/one.py
# 0 2012-05-03 20:57 bar/
# 38 2012-05-03 20:57 bar/two.py
# --------- -------
# 67 3 files
# Because there is no 'foo/', the zipimporter currently doesn't
# know that foo is a namespace package
import foo.one
def test_present_directory(self):
# This succeeds because there is a "bar/" in the zip file
import bar.two
self.assertEqual(bar.two.attr, 'missing_directory foo two')
class ModuleAndNamespacePackageInSameDir(NamespacePackageTest):
paths = ['module_and_namespace_package']
def test_module_before_namespace_package(self):
# Make sure we find the module in preference to the
# namespace package.
import a_test
self.assertEqual(a_test.attr, 'in module')
if __name__ == "__main__":
unittest.main()
>>>>>>> b875702c9c06ab5012e52ff4337439b03918f453
=======
import contextlib
import importlib.abc
import importlib.machinery
import os
import sys
import types
import unittest
from test.test_importlib import util
from test.support import run_unittest
# needed tests:
#
# need to test when nested, so that the top-level path isn't sys.path
# need to test dynamic path detection, both at top-level and nested
# with dynamic path, check when a loader is returned on path reload (that is,
# trying to switch from a namespace package to a regular package)
@contextlib.contextmanager
def sys_modules_context():
"""
Make sure sys.modules is the same object and has the same content
when exiting the context as when entering.
Similar to importlib.test.util.uncache, but doesn't require explicit
names.
"""
sys_modules_saved = sys.modules
sys_modules_copy = sys.modules.copy()
try:
yield
finally:
sys.modules = sys_modules_saved
sys.modules.clear()
sys.modules.update(sys_modules_copy)
@contextlib.contextmanager
def namespace_tree_context(**kwargs):
"""
Save import state and sys.modules cache and restore it on exit.
Typical usage:
>>> with namespace_tree_context(path=['/tmp/xxyy/portion1',
... '/tmp/xxyy/portion2']):
... pass
"""
# use default meta_path and path_hooks unless specified otherwise
kwargs.setdefault('meta_path', sys.meta_path)
kwargs.setdefault('path_hooks', sys.path_hooks)
import_context = util.import_state(**kwargs)
with import_context, sys_modules_context():
yield
class NamespacePackageTest(unittest.TestCase):
"""
Subclasses should define self.root and self.paths (under that root)
to be added to sys.path.
"""
root = os.path.join(os.path.dirname(__file__), 'namespace_pkgs')
def setUp(self):
self.resolved_paths = [
os.path.join(self.root, path) for path in self.paths
]
self.ctx = namespace_tree_context(path=self.resolved_paths)
self.ctx.__enter__()
def tearDown(self):
# TODO: will we ever want to pass exc_info to __exit__?
self.ctx.__exit__(None, None, None)
class SingleNamespacePackage(NamespacePackageTest):
paths = ['portion1']
def test_simple_package(self):
import foo.one
self.assertEqual(foo.one.attr, 'portion1 foo one')
def test_cant_import_other(self):
with self.assertRaises(ImportError):
import foo.two
def test_module_repr(self):
import foo.one
self.assertEqual(repr(foo), "<module 'foo' (namespace)>")
class DynamicPatheNamespacePackage(NamespacePackageTest):
paths = ['portion1']
def test_dynamic_path(self):
# Make sure only 'foo.one' can be imported
import foo.one
self.assertEqual(foo.one.attr, 'portion1 foo one')
with self.assertRaises(ImportError):
import foo.two
# Now modify sys.path
sys.path.append(os.path.join(self.root, 'portion2'))
# And make sure foo.two is now importable
import foo.two
self.assertEqual(foo.two.attr, 'portion2 foo two')
class CombinedNamespacePackages(NamespacePackageTest):
paths = ['both_portions']
def test_imports(self):
import foo.one
import foo.two
self.assertEqual(foo.one.attr, 'both_portions foo one')
self.assertEqual(foo.two.attr, 'both_portions foo two')
class SeparatedNamespacePackages(NamespacePackageTest):
paths = ['portion1', 'portion2']
def test_imports(self):
import foo.one
import foo.two
self.assertEqual(foo.one.attr, 'portion1 foo one')
self.assertEqual(foo.two.attr, 'portion2 foo two')
class SeparatedOverlappingNamespacePackages(NamespacePackageTest):
paths = ['portion1', 'both_portions']
def test_first_path_wins(self):
import foo.one
import foo.two
self.assertEqual(foo.one.attr, 'portion1 foo one')
self.assertEqual(foo.two.attr, 'both_portions foo two')
def test_first_path_wins_again(self):
sys.path.reverse()
import foo.one
import foo.two
self.assertEqual(foo.one.attr, 'both_portions foo one')
self.assertEqual(foo.two.attr, 'both_portions foo two')
def test_first_path_wins_importing_second_first(self):
import foo.two
import foo.one
self.assertEqual(foo.one.attr, 'portion1 foo one')
self.assertEqual(foo.two.attr, 'both_portions foo two')
class SingleZipNamespacePackage(NamespacePackageTest):
paths = ['top_level_portion1.zip']
def test_simple_package(self):
import foo.one
self.assertEqual(foo.one.attr, 'portion1 foo one')
def test_cant_import_other(self):
with self.assertRaises(ImportError):
import foo.two
class SeparatedZipNamespacePackages(NamespacePackageTest):
paths = ['top_level_portion1.zip', 'portion2']
def test_imports(self):
import foo.one
import foo.two
self.assertEqual(foo.one.attr, 'portion1 foo one')
self.assertEqual(foo.two.attr, 'portion2 foo two')
self.assertIn('top_level_portion1.zip', foo.one.__file__)
self.assertNotIn('.zip', foo.two.__file__)
class SingleNestedZipNamespacePackage(NamespacePackageTest):
paths = ['nested_portion1.zip/nested_portion1']
def test_simple_package(self):
import foo.one
self.assertEqual(foo.one.attr, 'portion1 foo one')
def test_cant_import_other(self):
with self.assertRaises(ImportError):
import foo.two
class SeparatedNestedZipNamespacePackages(NamespacePackageTest):
paths = ['nested_portion1.zip/nested_portion1', 'portion2']
def test_imports(self):
import foo.one
import foo.two
self.assertEqual(foo.one.attr, 'portion1 foo one')
self.assertEqual(foo.two.attr, 'portion2 foo two')
fn = os.path.join('nested_portion1.zip', 'nested_portion1')
self.assertIn(fn, foo.one.__file__)
self.assertNotIn('.zip', foo.two.__file__)
class LegacySupport(NamespacePackageTest):
paths = ['not_a_namespace_pkg', 'portion1', 'portion2', 'both_portions']
def test_non_namespace_package_takes_precedence(self):
import foo.one
with self.assertRaises(ImportError):
import foo.two
self.assertIn('__init__', foo.__file__)
self.assertNotIn('namespace', str(foo.__loader__).lower())
class DynamicPathCalculation(NamespacePackageTest):
paths = ['project1', 'project2']
def test_project3_fails(self):
import parent.child.one
self.assertEqual(len(parent.__path__), 2)
self.assertEqual(len(parent.child.__path__), 2)
import parent.child.two
self.assertEqual(len(parent.__path__), 2)
self.assertEqual(len(parent.child.__path__), 2)
self.assertEqual(parent.child.one.attr, 'parent child one')
self.assertEqual(parent.child.two.attr, 'parent child two')
with self.assertRaises(ImportError):
import parent.child.three
self.assertEqual(len(parent.__path__), 2)
self.assertEqual(len(parent.child.__path__), 2)
def test_project3_succeeds(self):
import parent.child.one
self.assertEqual(len(parent.__path__), 2)
self.assertEqual(len(parent.child.__path__), 2)
import parent.child.two
self.assertEqual(len(parent.__path__), 2)
self.assertEqual(len(parent.child.__path__), 2)
self.assertEqual(parent.child.one.attr, 'parent child one')
self.assertEqual(parent.child.two.attr, 'parent child two')
with self.assertRaises(ImportError):
import parent.child.three
# now add project3
sys.path.append(os.path.join(self.root, 'project3'))
import parent.child.three
# the paths dynamically get longer, to include the new directories
self.assertEqual(len(parent.__path__), 3)
self.assertEqual(len(parent.child.__path__), 3)
self.assertEqual(parent.child.three.attr, 'parent child three')
class ZipWithMissingDirectory(NamespacePackageTest):
paths = ['missing_directory.zip']
@unittest.expectedFailure
def test_missing_directory(self):
# This will fail because missing_directory.zip contains:
# Length Date Time Name
# --------- ---------- ----- ----
# 29 2012-05-03 18:13 foo/one.py
# 0 2012-05-03 20:57 bar/
# 38 2012-05-03 20:57 bar/two.py
# --------- -------
# 67 3 files
# Because there is no 'foo/', the zipimporter currently doesn't
# know that foo is a namespace package
import foo.one
def test_present_directory(self):
# This succeeds because there is a "bar/" in the zip file
import bar.two
self.assertEqual(bar.two.attr, 'missing_directory foo two')
class ModuleAndNamespacePackageInSameDir(NamespacePackageTest):
paths = ['module_and_namespace_package']
def test_module_before_namespace_package(self):
# Make sure we find the module in preference to the
# namespace package.
import a_test
self.assertEqual(a_test.attr, 'in module')
if __name__ == "__main__":
unittest.main()
>>>>>>> b875702c9c06ab5012e52ff4337439b03918f453
|
ArcherSys/ArcherSys
|
Lib/test/test_importlib/test_namespace_pkgs.py
|
Python
|
mit
| 28,346
|
# coding=utf-8
"""Radiance rfluxmtx parameters"""
from gridbased import GridBasedParameters
from ._frozen import frozen
@frozen
class RfluxmtxParameters(GridBasedParameters):
def __init__(self, sender=None, receiver=None, octree=None, systemFiles=None):
"""Init parameters."""
GridBasedParameters.__init__(self)
|
antonszilasi/honeybeex
|
honeybeex/honeybee/radiance/parameters/rfluxmtx.py
|
Python
|
gpl-3.0
| 336
|
# -*- coding: utf-8 -*-
from geolucidate.functions import _cleanup, _convert
from geolucidate.parser import parser_re
from nose.tools import eq_
def test_parser():
values = [
("N424400 W800557", ['N', '42', '44', '00', 'W', '80', '05', '57']),
("N 5930 W 12330", ['N', '59', '30', '00', 'W', '123', '30', '00']),
("4745N/6440W", ['N', '47', '45', '00', 'W', '64', '40', '00']),
("4523N/07319W", ['N', '45', '23', '00', 'W', '073', '19', '00']),
("5335N / 12155W ", ['N', '53', '35', '00', 'W', '121', '55', '00']),
("58147N/07720W", ['N', '58', '14', '7', 'W', '077', '20', '00']),
("462716N/0721147W", ['N', '46', '27', '16', 'W', '072', '11', '47']),
("491500N 1230720W", ['N', '49', '15', '00', 'W', '123', '07', '20']),
("5046.6N / 06829.2W", ['N', '50', '46.6', '00', 'W', '068', '29.2', '00']),
("5734.8 N / 10006.2 W", ['N', '57', '34.8', '00', 'W', '100', '06.2', '00']),
("(4952.013N / 09548.474W)", ['N', '49', '52.013', '00', 'W', '095', '48.474', '00']),
("N4909.44 W12210.13", ['N', '49', '09.44', '00', 'W', '122', '10.13', '00']),
("6535.26N/08801.25W", ['N', '65', '35.26', '00', 'W', '088', '01.25', '00']),
("5033.15N 11544.09W", ['N', '50', '33.15', '00', 'W', '115', '44.09', '00']),
("N53 35.48 W112 02.60", ['N', '53', '35.48', '00', 'W', '112', '02.60', '00']),
("52 degrees, 42 minutes north, 124 degrees, 50 minutes west",
['N', '52', '42', '00', 'W', '124', '50', '00']),
("5115N8940W", ['N', '51', '15', '00', 'W', '89', '40', '00']),
("4630 NORTH 5705 WEST", ['N', '46', '30', '00', 'W', '57', '05', '00']),
("6146 north 5328 west", ['N', '61', '46', '00', 'W', '53', '28', '00']),
("52 North 50 West", ['N', '52', '00', '00', 'W', '50', '00', '00']),
(u"70 ° 57N 070 ° 05W", ['N', '70', '57', '00', 'W', '070', '05', '00']),
(u"""(45º10'17"N 076º23'46"W)""", ['N', '45', '10', '17', 'W', '076', '23', '46']),
# Note that the degree and minute punctuation are actually backwards; we support it anyway.
(u"""(45º10"17'N 076º23"46'W" """, ['N', '45', '10', '17', 'W', '076', '23', '46']),
(u"43º55'N 078º18'W", ['N', '43', '55', '00', 'W', '078', '18', '00']),
(u"43º01N 081º46W", ['N', '43', '01', '00', 'W', '081', '46', '00']),
(u"""49º41'34"N 093º37'54"W""", ['N', '49', '41', '34', 'W', '093', '37', '54']),
# See note below on confusion created by using periods both as a decimal separator
# and to delimit parts of coordinates.
("(N51.33.9 W119.02.30)", ['N', '51', '33', '9', 'W', '119', '02', '30']),
("N50.26.008 W121.41.470", ['N', '50', '26.008', '00', 'W', '121', '41.470', '00']),
("49-21.834N 126-15.923W", ['N', '49', '21.834', '00', 'W', '126', '15.923', '00']),
(u"(40º02.247'N 111º44.383'W)", ['N', '40', '02.247', '00', 'W', '111', '44.383', '00']),
("N495342 / W0742553", ['N', '49', '53', '42', 'W', '074', '25', '53']),
("502661N 1214161W", ['N', '50', '26', '61', 'W', '121', '41', '61']),
# The 'seconds' may in fact be a decimal fraction of minutes.
("50 27 55 N 127 27 65 W", ['N', '50', '27', '55', 'W', '127', '27', '65']),
# Longitude seconds (95) may be a decimal fraction of minutes.
("484819N 1231195W", ['N', '48', '48', '19', 'W', '123', '11', '95']),
# The minutes may be a single digit.
(u"N45° 28' W77° 1'", ['N', '45', '28', '00', 'W', '77', '1', '00']),
# No direction given for latitude and longitude;
# are we to assume north and west?
#(u"""(43º52'43"/079º48'13")""", ['', '43', '52', '43', '', '079', '48', '13']),
# Possibly missing something; 7º W isn't anywhere near Canada.
#("5617N/0721W", ['N', '56', '17', '00', 'W', '07', '21', '00']),
# Latitude and longitude reversed.
#("10626W / 5156N", ['N', '', '', '', 'W', '', '', '']),
# Can't have 71 minutes.
#(u"""(46º71'56"N 081º13'08"W)""", ['N', '46', '71', '56', 'W', '081', '13', '08']),
# Can't figure out how to parse this one. The latitude seems to have seconds with a decimal
# fraction, but if that's the case, then there aren't enough digits for the longitude.
#("464525.9N04622.4W", ['N', '46', '45', '25.9', 'W', '046', '22.4', '00']),
# Where a period is used to separate the degrees and minutes, and the minutes and seconds,
# it's hard to tell if the 'seconds' are meant to be seconds or a decimal fraction of minutes
# (given that the period is also a decimal separator)
("493616N 1221258W", ['N', '49', '36', '16', 'W', '122', '12', '58']),
# If the a period is used to separate the degrees and minutes, _and_ the 'seconds' value
# is only two digits, we now treat it as a proper seconds value rather than a decimal fraction.
("49.36.16N 122.12.58W", ['N', '49', '36', '16', 'W', '122', '12', '58'])
]
for test in values:
(coord_string, expected) = test
yield check_parser, coord_string, expected
def check_parser(coord_string, expected):
match = parser_re.search(coord_string)
assert match
result = _cleanup(match.groupdict())
eq_(result, expected)
def test_false_positive():
values = ["GGN7383 was", "6830N 70W"]
for test in values:
yield check_false_positive, test
def check_false_positive(test):
match = parser_re.search(test)
eq_(match, None)
|
kurtraschke/geolucidate
|
geolucidate/tests/tests.py
|
Python
|
mit
| 5,593
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
import base64
import logging
import os
from tempfile import TemporaryFile
from psycopg2 import ProgrammingError
from contextlib import closing
from odoo import api, fields, models, tools, sql_db, _
from odoo.exceptions import UserError
_logger = logging.getLogger(__name__)
class BaseLanguageImport(models.TransientModel):
_name = "base.language.import"
_description = "Language Import"
name = fields.Char('Language Name', required=True)
code = fields.Char('ISO Code', size=6, required=True,
help="ISO Language and Country code, e.g. en_US")
data = fields.Binary('File', required=True, attachment=False)
filename = fields.Char('File Name', required=True)
overwrite = fields.Boolean('Overwrite Existing Terms',
default=True,
help="If you enable this option, existing translations (including custom ones) "
"will be overwritten and replaced by those in this file")
def import_lang(self):
this = self[0]
with TemporaryFile('wb+') as buf:
try:
buf.write(base64.decodebytes(this.data))
# now we determine the file format
buf.seek(0)
fileformat = os.path.splitext(this.filename)[-1][1:].lower()
Lang = self.env["res.lang"]
lang = Lang._activate_lang(self.code) or Lang._create_lang(
self.code, lang_name=self.name
)
tools.trans_load_data(
this._cr, buf, fileformat, this.code, overwrite=self.overwrite
)
except ProgrammingError as e:
_logger.exception('File unsuccessfully imported, due to a malformed file.')
with closing(sql_db.db_connect(self._cr.dbname).cursor()) as cr:
raise UserError(_('File %r not imported due to a malformed file.\n\n'
'This issue can be caused by duplicates entries who are referring to the same field. '
'Please check the content of the file you are trying to import.\n\n'
'Technical Details:\n%s') % (self.filename, tools.ustr(e)))
except Exception as e:
_logger.exception('File unsuccessfully imported, due to format mismatch.')
raise UserError(
_('File %r not imported due to format mismatch or a malformed file.'
' (Valid formats are .csv, .po, .pot)\n\nTechnical Details:\n%s') % \
(this.filename, tools.ustr(e))
)
return True
|
ygol/odoo
|
odoo/addons/base/wizard/base_import_language.py
|
Python
|
agpl-3.0
| 2,837
|
from sqlalchemy import ForeignKey
from sqlalchemy import Integer
from sqlalchemy import String
from sqlalchemy import testing
from sqlalchemy.testing import eq_
from sqlalchemy.testing import fixtures
from sqlalchemy.testing.fixtures import fixture_session
from sqlalchemy.testing.schema import Column
from sqlalchemy.testing.schema import Table
class ABCTest(fixtures.MappedTest):
@classmethod
def define_tables(cls, metadata):
global a, b, c
a = Table(
"a",
metadata,
Column(
"id", Integer, primary_key=True, test_needs_autoincrement=True
),
Column("adata", String(30)),
Column("type", String(30)),
)
b = Table(
"b",
metadata,
Column("id", Integer, ForeignKey("a.id"), primary_key=True),
Column("bdata", String(30)),
)
c = Table(
"c",
metadata,
Column("id", Integer, ForeignKey("b.id"), primary_key=True),
Column("cdata", String(30)),
)
@testing.combinations(("union",), ("none",))
def test_abc_poly_roundtrip(self, fetchtype):
class A(fixtures.ComparableEntity):
pass
class B(A):
pass
class C(B):
pass
if fetchtype == "union":
abc = a.outerjoin(b).outerjoin(c)
bc = a.join(b).outerjoin(c)
else:
abc = bc = None
self.mapper_registry.map_imperatively(
A,
a,
with_polymorphic=("*", abc),
polymorphic_on=a.c.type,
polymorphic_identity="a",
)
self.mapper_registry.map_imperatively(
B,
b,
with_polymorphic=("*", bc),
inherits=A,
polymorphic_identity="b",
)
self.mapper_registry.map_imperatively(
C, c, inherits=B, polymorphic_identity="c"
)
a1 = A(adata="a1")
b1 = B(bdata="b1", adata="b1")
b2 = B(bdata="b2", adata="b2")
b3 = B(bdata="b3", adata="b3")
c1 = C(cdata="c1", bdata="c1", adata="c1")
c2 = C(cdata="c2", bdata="c2", adata="c2")
c3 = C(cdata="c2", bdata="c2", adata="c2")
sess = fixture_session()
for x in (a1, b1, b2, b3, c1, c2, c3):
sess.add(x)
sess.flush()
sess.expunge_all()
# for obj in sess.query(A).all():
# print obj
eq_(
[
A(adata="a1"),
B(bdata="b1", adata="b1"),
B(bdata="b2", adata="b2"),
B(bdata="b3", adata="b3"),
C(cdata="c1", bdata="c1", adata="c1"),
C(cdata="c2", bdata="c2", adata="c2"),
C(cdata="c2", bdata="c2", adata="c2"),
],
sess.query(A).order_by(A.id).all(),
)
eq_(
[
B(bdata="b1", adata="b1"),
B(bdata="b2", adata="b2"),
B(bdata="b3", adata="b3"),
C(cdata="c1", bdata="c1", adata="c1"),
C(cdata="c2", bdata="c2", adata="c2"),
C(cdata="c2", bdata="c2", adata="c2"),
],
sess.query(B).order_by(A.id).all(),
)
eq_(
[
C(cdata="c1", bdata="c1", adata="c1"),
C(cdata="c2", bdata="c2", adata="c2"),
C(cdata="c2", bdata="c2", adata="c2"),
],
sess.query(C).order_by(A.id).all(),
)
|
monetate/sqlalchemy
|
test/orm/inheritance/test_abc_polymorphic.py
|
Python
|
mit
| 3,596
|
# Copyright (c) 2012 The Native Client Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
# Documentation on PRESUBMIT.py can be found at:
# http://www.chromium.org/developers/how-tos/depottools/presubmit-scripts
import os
import sys
# List of directories to not apply presubmit project checks, relative
# to the NaCl top directory
EXCLUDE_PROJECT_CHECKS_DIRS = [
# The following contain test data (including automatically generated),
# and do not follow our conventions.
'src/trusted/validator_ragel/testdata/32',
'src/trusted/validator_ragel/testdata/64',
'src/trusted/validator_x86/testdata/32',
'src/trusted/validator_x86/testdata/64',
'src/trusted/validator/x86/decoder/generator/testdata/32',
'src/trusted/validator/x86/decoder/generator/testdata/64',
# The following directories contains automatically generated source,
# which may not follow our conventions.
'src/trusted/validator_x86/gen',
'src/trusted/validator/x86/decoder/gen',
'src/trusted/validator/x86/decoder/generator/gen',
'src/trusted/validator/x86/ncval_seg_sfi/gen',
'src/trusted/validator_arm/gen',
'src/trusted/validator_ragel/gen',
]
NACL_TOP_DIR = os.getcwd()
while not os.path.isfile(os.path.join(NACL_TOP_DIR, 'PRESUBMIT.py')):
NACL_TOP_DIR = os.path.dirname(NACL_TOP_DIR)
assert len(NACL_TOP_DIR) >= 3, "Could not find NaClTopDir"
def _CommonChecks(input_api, output_api):
"""Checks for both upload and commit."""
results = []
results.extend(input_api.canned_checks.PanProjectChecks(
input_api, output_api, project_name='Native Client',
excluded_paths=tuple(EXCLUDE_PROJECT_CHECKS_DIRS)))
return results
def IsFileInDirectories(f, dirs):
""" Returns true if f is in list of directories"""
for d in dirs:
if d is os.path.commonprefix([f , d]):
return True
return False
def CheckChangeOnUpload(input_api, output_api):
"""Verifies all changes in all files.
Args:
input_api: the limited set of input modules allowed in presubmit.
output_api: the limited set of output modules allowed in presubmit.
"""
report = []
report.extend(_CommonChecks(input_api, output_api))
# The commit queue assumes PRESUBMIT.py is standalone.
# TODO(bradnelson): Migrate code_hygiene to a common location so that
# it can be used by the commit queue.
old_sys_path = list(sys.path)
try:
sys.path.append(os.path.join(NACL_TOP_DIR, 'tools'))
sys.path.append(os.path.join(NACL_TOP_DIR, 'build'))
import code_hygiene
finally:
sys.path = old_sys_path
del old_sys_path
affected_files = input_api.AffectedFiles(include_deletes=False)
exclude_dirs = [ NACL_TOP_DIR + '/' + x + '/'
for x in EXCLUDE_PROJECT_CHECKS_DIRS ]
for filename in affected_files:
filename = filename.AbsoluteLocalPath()
if not IsFileInDirectories(filename, exclude_dirs):
errors, warnings = code_hygiene.CheckFile(filename, False)
for e in errors:
report.append(output_api.PresubmitError(e, items=errors[e]))
for w in warnings:
report.append(output_api.PresubmitPromptWarning(w, items=warnings[w]))
return report
def CheckChangeOnCommit(input_api, output_api):
"""Verifies all changes in all files and verifies that the
tree is open and can accept a commit.
Args:
input_api: the limited set of input modules allowed in presubmit.
output_api: the limited set of output modules allowed in presubmit.
"""
report = []
report.extend(CheckChangeOnUpload(input_api, output_api))
report.extend(input_api.canned_checks.CheckTreeIsOpen(
input_api, output_api,
json_url='http://nativeclient-status.appspot.com/current?format=json'))
return report
# Note that this list is duplicated in the Commit Queue. If you
# change this list, you should also update the CQ's list here:
# https://chrome-internal.googlesource.com/infra/infra_internal/+/master/commit_queue/projects.py
# (see https://crbug.com/399059).
DEFAULT_TRYBOTS = [
'nacl-precise32_newlib_dbg',
'nacl-precise32_newlib_opt',
'nacl-precise32_glibc_opt',
'nacl-precise64_newlib_dbg',
'nacl-precise64_newlib_opt',
'nacl-precise64_glibc_opt',
'nacl-mac10.6_newlib_opt',
'nacl-mac10.6_glibc_opt',
'nacl-mac10.6_64_newlib_dbg',
'nacl-mac10.6_64_glibc_opt',
'nacl-mac10.7_newlib_opt',
'nacl-mac10.7_glibc_opt',
'nacl-mac10.7_64_newlib_dbg',
'nacl-mac10.7_64_glibc_opt',
'nacl-mac10.8_32_newlib_dbg',
'nacl-mac10.8_32_glibc_opt',
'nacl-mac10.8_64_newlib_dbg',
'nacl-mac10.8_64_glibc_opt',
'nacl-win32_newlib_opt',
'nacl-win32_glibc_opt',
'nacl-win64_newlib_dbg',
'nacl-win64_newlib_opt',
'nacl-win64_glibc_opt',
'nacl-win8-64_newlib_dbg',
'nacl-win8-64_newlib_opt',
'nacl-arm_opt_panda',
# arm-nacl-gcc bots
'nacl-win7_64_arm_newlib_opt',
'nacl-mac10.7_arm_newlib_opt',
'nacl-precise64_arm_newlib_opt',
# Clang bots
'nacl-precise_64-newlib-dbg-clang',
'nacl-mac10.6-newlib-dbg-clang',
# pnacl scons bots
'nacl-precise_64-newlib-arm_qemu-pnacl',
'nacl-precise_64-newlib-x86_32-pnacl',
'nacl-precise_64-newlib-x86_64-pnacl',
'nacl-mac10.8_newlib_opt_pnacl',
'nacl-win7_64_newlib_opt_pnacl',
# pnacl spec2k bots
'nacl-arm_perf_panda',
'nacl-precise_64-newlib-x86_32-pnacl-spec',
'nacl-precise_64-newlib-x86_64-pnacl-spec',
]
PNACL_TOOLCHAIN_TRYBOTS = [
'nacl-toolchain-linux-pnacl-x86_64',
'nacl-toolchain-linux-pnacl-x86_32',
'nacl-toolchain-mac-pnacl-x86_32',
'nacl-toolchain-win7-pnacl-x86_64',
]
TOOLCHAIN_BUILD_TRYBOTS = [
'nacl-toolchain-precise64-newlib-arm',
'nacl-toolchain-mac-newlib-arm',
]
def GetPreferredTryMasters(_, change):
has_pnacl = False
has_toolchain_build = False
has_others = False
for file in change.AffectedFiles(include_dirs=True):
if IsFileInDirectories(file.AbsoluteLocalPath(),
[os.path.join(NACL_TOP_DIR, 'build'),
os.path.join(NACL_TOP_DIR, 'buildbot'),
os.path.join(NACL_TOP_DIR, 'pynacl')]):
# Buildbot and infrastructure changes should trigger all the try bots.
has_pnacl = True
has_toolchain_build = True
has_others = True
break
elif IsFileInDirectories(file.AbsoluteLocalPath(),
[os.path.join(NACL_TOP_DIR, 'pnacl')]):
has_pnacl = True
elif IsFileInDirectories(file.AbsoluteLocalPath(),
[os.path.join(NACL_TOP_DIR, 'toolchain_build')]):
has_toolchain_build = True
else:
has_others = True
trybots = []
if has_pnacl:
trybots += PNACL_TOOLCHAIN_TRYBOTS
if has_toolchain_build:
trybots += TOOLCHAIN_BUILD_TRYBOTS
if has_others:
trybots += DEFAULT_TRYBOTS
return {
'tryserver.nacl': { t: set(['defaulttests']) for t in trybots },
}
|
mxOBS/deb-pkg_trusty_chromium-browser
|
native_client/PRESUBMIT.py
|
Python
|
bsd-3-clause
| 7,031
|
"""
Django settings for aiplay project.
Generated by 'django-admin startproject' using Django 1.10.4.
For more information on this file, see
https://docs.djangoproject.com/en/1.10/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.10/ref/settings/
"""
import os
import datetime
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.10/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '&d5z3&%+yvk2%!0pm848!u&-&mznp2k-q7m^xwi*uk#y_i4e8('
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = ['*']
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
# installed app
'account',
'problem',
'submission',
'utils',
# third party package
'corsheaders',
'rest_framework',
'rest_framework_docs',
'tagging'
]
MIDDLEWARE = [
'corsheaders.middleware.CorsMiddleware',
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'aiplay.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [os.path.join(BASE_DIR, 'templates')]
,
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'aiplay.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.10/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/1.10/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.10/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.10/howto/static-files/
STATIC_URL = '/static/'
STATICFILES_DIRS = [os.path.join(BASE_DIR, 'static')]
# django cors headers settings
CORS_ORIGIN_ALLOW_ALL = True # only for debug
# django rest frame work settings
REST_FRAMEWORK = {
'DEFAULT_PERMISSION_CLASSES': (
'rest_framework.permissions.AllowAny',
),
'DEFAULT_AUTHENTICATION_CLASSES': (
# 'rest_framework.authentication.SessionAuthentication',
# 'rest_framework.authentication.BasicAuthentication',
'rest_framework_jwt.authentication.JSONWebTokenAuthentication',
),
}
# jwt settings
JWT_AUTH = {
'JWT_ENCODE_HANDLER':
'rest_framework_jwt.utils.jwt_encode_handler',
'JWT_DECODE_HANDLER':
'rest_framework_jwt.utils.jwt_decode_handler',
'JWT_PAYLOAD_HANDLER':
'rest_framework_jwt.utils.jwt_payload_handler',
'JWT_PAYLOAD_GET_USER_ID_HANDLER':
'rest_framework_jwt.utils.jwt_get_user_id_from_payload_handler',
'JWT_RESPONSE_PAYLOAD_HANDLER':
'rest_framework_jwt.utils.jwt_response_payload_handler',
'JWT_SECRET_KEY': SECRET_KEY,
'JWT_PUBLIC_KEY': None,
'JWT_PRIVATE_KEY': None,
'JWT_ALGORITHM': 'HS256',
'JWT_VERIFY': True,
'JWT_VERIFY_EXPIRATION': True,
'JWT_LEEWAY': 0,
'JWT_EXPIRATION_DELTA': datetime.timedelta(seconds=300),
'JWT_AUDIENCE': None,
'JWT_ISSUER': None,
'JWT_ALLOW_REFRESH': False,
'JWT_REFRESH_EXPIRATION_DELTA': datetime.timedelta(days=7),
'JWT_AUTH_HEADER_PREFIX': 'JWT',
}
# django custom settings
# LOGGING_CONFIG = None
AUTH_USER_MODEL = 'account.User'
|
zerolfx/aiplay-api
|
aiplay/settings.py
|
Python
|
mit
| 4,928
|
###########################################################
#
# Copyright (c) 2005-2009, Southpaw Technology
# All Rights Reserved
#
# PROPRIETARY INFORMATION. This software is proprietary to
# Southpaw Technology, and is not to be reproduced, transmitted,
# or disclosed in any way without written permission.
#
#
#
'''Command line script to get user info.'''
import tacticenv
import os, sys, getopt
try:
import active_directory
except Exception as e:
print("WARNING: cannot import active_directory")
print
#class active_directory:
# pass
raise
def get_user_info(user_name, domain=None):
# TEST
"""
return '''
dn: whatver
l: Fort Worth
displayName: cow
mail: remko@southpawtech.com
'''
"""
user = active_directory.find_user(user_name, domain)
if not user:
print("WARNING: user [%s] cannot be found" % user_name)
return ''
import types
if isinstance(user, types.GeneratorType):
try:
user = user.next()
except StopIteration:
user = None
return ''
else:
users = list(user)
if users:
user = users[0]
else:
user = None
return ''
# TODO: need to find a way to get all properties
#print "properties: ", user.properties
#print "-"*20
#print "xxx: ", user.dump()
attrs_map = {
'dn': 'dn',
'displayName': 'display_name',
'name': 'name',
'sn': 'last_name',
'givenName': 'first_name',
'mail': 'email',
'telephoneNumber': 'phone_number',
'department': 'department',
'employeeID': 'employee_id',
'sAMAccountName': 'sAMAccountName',
# some extras
'l': 'location',
'title': 'title',
}
data = []
for key in attrs_map.keys():
try:
value = eval("user.%s" % key)
data.append("%s: %s" % (key, value))
except AttributeError:
#print "Attribute [%s] does not exist" % key
pass
if hasattr(user,'memberOf'):
for memberOf in user.memberOf:
memberOf = str(memberOf).replace("LDAP://", "")
data.append("memberOf: %s" % (memberOf))
else:
for memberOf in user:
memberOf = str(memberOf).replace("LDAP://", "")
data.append("memberOf: %s" % (memberOf))
return "\n".join(data)
def get_group_info(group_name):
group = active_directory.find_group(group_name)
if not group:
return {}
group_attrs_map = {
'dn': 'dn',
'name': 'name',
}
data = []
for key in group_attrs_map.keys():
value = eval("group.%s" % key)
data.append("%s: %s" % (key, value))
return "\n".join(data)
def main(argv):
try:
opts, args = getopt.getopt(argv, "d:u:g:", ["help"])
except getopt.GetoptError:
usage()
sys.exit(2)
#try:
domain = None
if len(opts) > 0:
for opt, arg in opts:
if opt in ("-h", "--help"):
usage()
sys.exit()
elif opt == '-d':
domain = arg
elif opt == '-u':
try:
print(get_user_info(arg, domain))
except Exception as e:
print("ERROR: ", str(e))
raise
elif opt == '-g':
print(get_group_info(arg))
else:
usage()
sys.exit()
else:
print(("Try 'python ad_get_user_info.py -h' for more information."))
def usage():
print("Usage: python ad_get_user_info.py [Option]")
print("")
print("-d <name> Set domain to use")
print("-u <name> Look up the user 'name'")
print("-g <name> Look up the group 'name'")
print("")
if __name__ == '__main__':
main(sys.argv[1:])
|
Southpaw-TACTIC/TACTIC
|
src/tactic/active_directory/ad_get_user_info.py
|
Python
|
epl-1.0
| 4,097
|
"""Tornado app settings."""
import os
from tornado.options import define, options, parse_command_line
define("host", default='0.0.0.0', help="run on the given host", type=str)
define("port", default=5000, help="run on the given port", type=int)
define("debug", default=True, help="run in debug mode")
parse_command_line()
settings = dict(cookie_secret="86648e5df6a3974c8352e7ffaf7b68c7",
template_path=os.path.join(
os.path.dirname(__file__), "templates"),
static_path=os.path.join(
os.path.dirname(__file__), "static"),
xsrf_cookies=False,
debug=options.debug)
|
yoziru-desu/locomo-pebble
|
mock-server/settings.py
|
Python
|
mit
| 668
|
from topia.termextract import extract
class KeywordExtractor:
def __init__(self):
self.extractor = extract.TermExtractor()
self.extractor.filter = extract.permissiveFilter
def extract(self, text):
return self.extractor(text.lower())
|
antoan-angelov/videogame-oracle
|
extractor/keyword_extractor.py
|
Python
|
gpl-3.0
| 269
|
input = """
% No auxiliary atoms at all.
ouch :- #max{V:a(V)} = 0.
"""
output = """
{}
"""
|
Yarrick13/hwasp
|
tests/wasp1/AllAnswerSets/aggregates_max_bug_1.test.py
|
Python
|
apache-2.0
| 92
|
# -*- coding: utf-8 -*-
"""
Created on Sun Mar 30 18:09:16 2014
@author: jfelipe
"""
# import multiprocessing as mp
import sys
import os
import time
import multiprocessing as mp
import subprocess
from .processors import Producer, Processor, Consumer
from .dump import DumpFile, process_xml
from .page import pages_to_file, pages_file_to_db
from .revision import revs_to_file, revs_file_to_db
from .logitem import logitem_to_file, logitem_file_to_db
from wikidat.utils.dbutils import MySQLDB
class ETL(mp.Process):
"""
Abstract class defining common behaviour for all ETL (Extraction,
Tranformation and Load) workflows with Wikipedia data
"""
def __init__(self, group=None, target=None, name=None, args=None,
kwargs=None, lang=None, db_name=None, db_user=None,
db_passw=None):
"""
Initialize new worfklow
"""
super(ETL, self).__init__(name=name)
self.target = target
self.args = args if args is not None else []
self.kwargs = kwargs if kwargs is not None else {}
self.lang = lang
self.db_name = db_name
self.db_user = db_user
self.db_passw = db_passw
class RevisionHistoryETL(ETL):
"""
Models workflow to import page and revision history data from Wikipedia
database dump files
"""
def __init__(self, group=None, target=None, name=None, args=None,
kwargs=None, paths_queue=None, lang=None, page_fan=1,
rev_fan=3, page_cache_size=1000000, rev_cache_size=1000000,
db_name=None, db_user=None, db_passw=None,
base_port=None, control_port=None):
"""
Initialize new PageRevision workflow
"""
super(RevisionHistoryETL,
self).__init__(group=None, target=None, name=name, args=None,
kwargs=None, lang=lang, db_name=db_name,
db_user=db_user, db_passw=db_passw)
self.page_fan = page_fan
self.rev_fan = rev_fan
self.paths_queue = paths_queue
self.page_cache_size = page_cache_size
self.rev_cache_size = rev_cache_size
self.base_port = base_port
self.control_port = control_port
def run(self):
"""
Execute workflow to import revision history data from dump files
The data loading workflow is composed of a number of processor
elements, which can be:
- Producer (P): raw input data --> input element queue
- ConsumerProducer (CP): input element queue --> insert db queue
- Consumer (C): insert db queue --> database (MySQL/MariaDB)
In this case, the logical combination is usually N:N:1 (P, CP, C)
"""
start = time.time()
print(self.name, "Starting PageRevisionETL workflow at %s" % (
time.strftime("%Y-%m-%d %H:%M:%S %Z",
time.localtime())))
db_ns = MySQLDB(host='localhost', port=3306, user=self.db_user,
passwd=self.db_passw, db=self.db_name)
db_ns.connect()
db_pages = MySQLDB(host='localhost', port=3306,
user=self.db_user, passwd=self.db_passw,
db=self.db_name)
db_pages.connect()
db_revs = MySQLDB(host='localhost', port=3306, user=self.db_user,
passwd=self.db_passw, db=self.db_name)
db_revs.connect()
# DATA EXTRACTION
# Use consistent naming for all child processes
xml_reader_name = '-'.join([self.name, 'xml_reader'])
page_proc_name = '-'.join([self.name, 'process_page'])
rev_proc_name = '-'.join([self.name, 'process_revision'])
page_insert_name = '-'.join([self.name, 'insert_page'])
rev_insert_name = '-'.join([self.name, 'insert_revision'])
for path in iter(self.paths_queue.get, 'STOP'):
# Start subprocess to extract elements from revision dump file
dump_file = DumpFile(path)
xml_reader = Producer(name=xml_reader_name,
target=process_xml,
kwargs=dict(
dump_file=dump_file),
consumers=self.page_fan + self.rev_fan,
push_pages_port=self.base_port,
push_revs_port=self.base_port+1,
control_port=self.control_port)
xml_reader.start()
print(xml_reader_name, "started")
print(self.name, "Extracting data from XML revision history file:")
print(path)
# List to keep tracking of page and revision workers
workers = []
db_workers_revs = []
# Create and start page processes
for worker in range(self.page_fan):
page_worker_name = '-'.join([page_proc_name, str(worker)])
process_page = Processor(name=page_worker_name,
target=pages_to_file,
producers=1, consumers=1,
pull_port=self.base_port,
push_port=self.base_port+2,
control_port=self.control_port)
process_page.start()
workers.append(process_page)
print(page_worker_name, "started")
# Create and start revision processes
for worker in range(self.rev_fan):
rev_worker_name = '-'.join([rev_proc_name, str(worker)])
db_wrev = MySQLDB(host='localhost', port=3306,
user=self.db_user,
passwd=self.db_passw, db=self.db_name)
db_wrev.connect()
process_revision = Processor(name=rev_worker_name,
target=revs_to_file,
kwargs=dict(
lang=self.lang),
producers=1, consumers=1,
pull_port=self.base_port+1,
push_port=self.base_port+3,
control_port=self.control_port)
process_revision.start()
workers.append(process_revision)
db_workers_revs.append(db_wrev)
print(rev_worker_name, "started")
# Create directory for logging files if it does not exist
log_dir = os.path.join(os.path.split(path)[0], 'logs')
tmp_dir = os.path.join(os.getcwd(), os.path.split(path)[0], 'tmp')
file_name = os.path.split(path)[1]
if not os.path.exists(log_dir):
os.makedirs(log_dir)
if not os.path.exists(tmp_dir):
os.makedirs(tmp_dir)
log_file = os.path.join(log_dir, file_name + '.log')
page_insert_db = Consumer(name=page_insert_name,
target=pages_file_to_db,
kwargs=dict(con=db_pages,
log_file=log_file,
tmp_dir=tmp_dir,
file_rows=self.page_cache_size,
etl_prefix=self.name),
producers=self.page_fan,
pull_port=self.base_port+2)
rev_insert_db = Consumer(name=rev_insert_name,
target=revs_file_to_db,
kwargs=dict(con=db_revs,
log_file=log_file,
tmp_dir=tmp_dir,
file_rows=self.rev_cache_size,
etl_prefix=self.name),
producers=self.rev_fan,
pull_port=self.base_port+3)
page_insert_db.start()
print(page_insert_name, "started")
rev_insert_db.start()
print(rev_insert_name, "started")
print(self.name, "Waiting for all processes to finish...")
print()
xml_reader.join()
for w in workers:
w.join()
page_insert_db.join()
rev_insert_db.join()
# Mark this path as done
self.paths_queue.task_done()
# Mark STOP message as processed and finish
self.paths_queue.task_done()
end = time.time()
print(self.name, ": All tasks done in %.4f sec." % ((end-start)/1.))
print()
db_ns.close()
db_pages.close()
db_revs.close()
for dbcon in db_workers_revs:
dbcon.close()
class RevisionMetaETL(ETL):
"""
Implements workflow to extract and store metadata for pages and
revisions (stub-meta-history.xml files)
"""
pass
class LoggingETL(ETL):
"""
Implements workflow to extract and store information from logged
actions in MediaWiki. For instance, user blocks, page protections,
new users, flagged revisions reviews, etc.
"""
def __init__(self, group=None, target=None, name=None, args=None,
kwargs=None, path=None, lang=None, log_fan=1,
log_cache_size=1000000,
db_name=None, db_user=None, db_passw=None,
base_port=None, control_port=None):
"""
Initialize new PageRevision workflow
"""
super(LoggingETL,
self).__init__(group=None, target=None, name=name, args=None,
kwargs=None, lang=lang, db_name=db_name,
db_user=db_user, db_passw=db_passw)
self.path = path
self.log_fan = log_fan
self.log_cache_size = log_cache_size
self.base_port = base_port
self.control_port = control_port
def run(self):
"""
Execute workflow to import logging records of actions on pages and
users from dump file
The data loading workflow is composed of a number of processor
elements, which can be:
- Producer (P): raw input data --> input element queue
- ConsumerProducer (CP): input element queue --> insert db queue
- Consumer (C): insert db queue --> database (MySQL/MariaDB)
In this case, the usual combination is 1:N:1 (P, CP, C)
"""
start = time.time()
print("Starting LoggingETL workflow at %s" % (
time.strftime("%Y-%m-%d %H:%M:%S %Z",
time.localtime())))
# DATA EXTRACTION
xml_reader_name = '-'.join([self.name, 'xml_reader'])
logitem_proc_name = '-'.join([self.name, 'process_logitem'])
logitem_insert_name = '-'.join([self.name, 'insert_logitem'])
# Start subprocess to extract elements from logging dump file
file_path = self.path[0]
dump_file = DumpFile(file_path)
xml_reader = Producer(name=xml_reader_name,
target=process_xml,
kwargs=dict(
dump_file=dump_file),
consumers=self.log_fan,
push_logs_port=self.base_port,
control_port=self.control_port)
xml_reader.start()
print(xml_reader_name, "started")
print(self.name, "Extracting data from XML revision history file:")
print(str(self.path[0]))
# List to keep tracking of logitem workers
workers = []
# Create and start page processes
for worker in range(self.log_fan):
worker_name = '-'.join([logitem_proc_name, str(worker)])
process_logitems = Processor(name=worker_name,
target=logitem_to_file,
producers=1, consumers=1,
pull_port=self.base_port,
push_port=self.base_port+2,
control_port=self.control_port)
process_logitems.start()
workers.append(process_logitems)
print(worker_name, "started")
# Create directory for logging files if it does not exist
log_dir = os.path.join(os.path.split(file_path)[0], 'logs')
tmp_dir = os.path.join(os.getcwd(), os.path.split(file_path)[0], 'tmp')
file_name = os.path.split(file_path)[1]
if not os.path.exists(log_dir):
os.makedirs(log_dir)
if not os.path.exists(tmp_dir):
os.makedirs(tmp_dir)
log_file = os.path.join(log_dir, file_name + '.log')
db_log = MySQLDB(host='localhost', port=3306, user=self.db_user,
passwd=self.db_passw, db=self.db_name)
db_log.connect()
logitem_insert_db = Consumer(name=logitem_insert_name,
target=logitem_file_to_db,
kwargs=dict(con=db_log,
log_file=log_file,
tmp_dir=tmp_dir,
file_rows=self.log_cache_size,
etl_prefix=self.name),
producers=self.log_fan,
pull_port=self.base_port+2)
print(logitem_insert_name, "started")
logitem_insert_db.start()
print("Waiting for all processes to finish...")
print()
xml_reader.join()
for w in workers:
w.join()
logitem_insert_db.join()
# All operations finished
end = time.time()
print("All tasks done in %.4f sec." % ((end-start)/1.))
print()
db_log.close()
class SQLDumpsETL(ETL):
"""
Implements workflow to load native SQL dump files, created with
mysqldump and published in compressed format (gzip file)
"""
def __init__(self, group=None, target=None, name=None, args=None,
kwargs=None, path=None, lang=None,
db_name=None, db_user=None, db_passw=None):
"""
Initialize new PageRevision workflow
"""
super(SQLDumpsETL,
self).__init__(group=None, target=None, name=name, args=None,
kwargs=None, lang=lang, db_name=db_name,
db_user=db_user, db_passw=db_passw)
self.path = path
def run(self):
"""
Docstring
"""
# TODO: Create Popen o similar subprocessing strategy w/ shell
# gzip -cd file | mysql [params]
# or in case the file is already uncompressed
# cat sql | mysql [params]
for path in self.path:
if '.gz' in path:
command = "gzip -cd {0} | mysql -u {1} -p{2} {3}"
else:
command = "cat {0} | mysql -u {1} -p{2} {3}"
print("Processing file ", os.path.split(path)[1])
p = subprocess.Popen(command.format(path, self.db_user,
self.db_passw, self.db_name),
shell=True,
stdout=subprocess.PIPE,
stderr=open(os.devnull, "w")
)
# sys.stderr.write(p.stdout.read(1000))
# return False
return p.stdout
if __name__ == '__main__':
path = sys.argv[1]
page_fan = int(sys.argv[2])
rev_fan = int(sys.argv[3])
lang = sys.argv[4]
db_name = sys.argv[5]
db_user = sys.argv[6]
db_passw = sys.argv[7]
workflow = RevisionHistoryETL(path, page_fan, rev_fan, lang, db_name,
db_user, db_passw)
workflow.run()
|
glimmerphoenix/WikiDAT
|
wikidat/retrieval/etl.py
|
Python
|
gpl-3.0
| 16,498
|
from django.conf.urls import patterns, include, url
urlpatterns = patterns('lrs.views',
url(r'^$', 'home'),
url(r'^statements/more/(?P<more_id>.{32})$', 'statements_more'),
url(r'^statements', 'statements'),
url(r'^activities/state', 'activity_state'),
url(r'^activities/profile', 'activity_profile'),
url(r'^activities', 'activities'),
url(r'^agents/profile', 'agent_profile'),
url(r'^agents', 'agents'),
url(r'^actexample/$', 'actexample'),
url(r'^actexample2/$', 'actexample2'),
url(r'^actexample3/$', 'actexample3'),
url(r'^actexample4/$', 'actexample4'),
url(r'^register/$', 'register'),
url(r'^regclient/$', 'reg_client'),
url(r'^regclient2/$', 'reg_client2'),
url(r'^OAuth/', include('oauth_provider.urls', namespace='oauth')),
# just urls for some user interface and oauth2... not part of xapi
url(r'^oauth2/', include('oauth2_provider.provider.oauth2.urls', namespace='oauth2')),
url(r'^me/statements/', 'my_statements'),
url(r'^me/jono/', 'jono'),
url(r'^me/download/statements/', 'download_statements'),
url(r'^me/activities/profiles', 'my_activity_profiles'),
url(r'^me/activities/single_profile', 'my_activity_profile'),
url(r'^me/activities/states', 'my_activity_states'),
url(r'^me/activities/single_state', 'my_activity_state'),
url(r'^me/activities/', 'my_activities'),
url(r'^me/apps/', 'my_app_status'),
url(r'^me/tokens/', 'delete_token'),
url(r'^me/tokens2/', 'delete_token2'),
url(r'^me/clients/', 'delete_client'),
url(r'^me/', 'me'),
url(r'^about', 'about'),
url(r'^statementvalidator', 'stmt_validator')
)
|
daafgo/Server_LRS
|
lrs/urls.py
|
Python
|
apache-2.0
| 1,672
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# brewpi_firmware documentation build configuration file, created by
# sphinx-quickstart on Wed Feb 24 21:47:52 2016.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys
import os
import distutils.spawn
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
sys.path.append(os.path.join(os.path.dirname(os.path.abspath(__file__)), 'src'))
extensions = [
'sphinx.ext.intersphinx',
'sphinx.ext.todo',
'sphinx.ext.autodoc',
'sphinx.ext.todo',
'sphinx.ext.viewcode',
'breathe',
'sphinxcontrib.doxylink',
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
source_suffix = ['.rst', '.md']
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = 'brewpi_firmware'
copyright = '2016, BrewPi'
author = 'BrewPi'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '0.4'
# The full version, including alpha/beta/rc tags.
release = '0.4.3'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build', 'src']
# The reST default role (used for this markup: `text`) to use for all
# documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
#keep_warnings = False
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = True
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'default'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#html_extra_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Language to be used for generating the HTML full-text search index.
# Sphinx supports the following languages:
# 'da', 'de', 'en', 'es', 'fi', 'fr', 'h', 'it', 'ja'
# 'nl', 'no', 'pt', 'ro', 'r', 'sv', 'tr'
#html_search_language = 'en'
# A dictionary with options for the search language support, empty by default.
# Now only 'ja' uses this config value
#html_search_options = {'type': 'default'}
# The name of a javascript file (relative to the configuration directory) that
# implements a search results scorer. If empty, the default will be used.
#html_search_scorer = 'scorer.js'
# Output file base name for HTML help builder.
htmlhelp_basename = 'brewpi_firmware_doc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
# Latex figure (float) alignment
#'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'brewpi_firmware.tex', 'BrewPi Firmware Documentation',
'BrewPi', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'brewpi_firmware', 'BrewPi Firmware Documentation',
[author], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'brewpi_firmware', 'BrewPi Firmware Documentation',
author, 'brewpi_firmware', 'Developer documentation for the BrewPi Firmware.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#texinfo_no_detailmenu = False
# Example configuration for intersphinx: refer to the Python standard library.
intersphinx_mapping = {'https://docs.python.org/': None}
# add modules directory to path for local imports
sys.path.append(os.path.join(os.path.dirname(__file__), "modules"))
# add support for markdown
import recommonmark
from recommonmark.parser import CommonMarkParser
from recommonmark.transform import AutoStructify
source_parsers = {
'.md': CommonMarkParser,
}
# on_rtd is whether we are on readthedocs.org
import platform, subprocess, os
on_rtd = os.environ.get('READTHEDOCS', None) == 'True'
if not on_rtd: # only import and set the theme if we're building docs locally
import sphinx_rtd_theme
html_theme = 'sphinx_rtd_theme'
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
# otherwise, readthedocs.org uses their theme by default, so no need to specify it
doxygen_output_dir = "_build/doxygen/"
if not os.path.exists(doxygen_output_dir):
os.makedirs(doxygen_output_dir)
# Breathe extension variables
breathe_projects = { "BrewPi Firmware": '_build/html/doxygen/xml'}
breathe_default_project = "BrewPi Firmware"
# add mapping for doxylink. doxylink.tag is generated by doxygen
# doxylink allows easy linkign to html generated by doxygen
doxy_tag_file = '_build/doxygen/doxylink.tag'
doxy_html_dir = 'doxygen'
doxylink = {
'doxylink' : (doxy_tag_file, doxy_html_dir),
}
def run_doxygen():
"""Run the doxygen command in the current folder"""
if distutils.spawn.find_executable('doxygen') is not None:
try:
retcode = subprocess.call("doxygen", shell=True)
if retcode < 0:
sys.stderr.write("doxygen terminated by signal %s\n" % (-retcode))
retcode = subprocess.call("cp -rf ./_build/doxygen/html ./_build/html/doxygen", shell=True)
retcode = subprocess.call("cp -rf ./_build/doxygen/xml ./_build/html/doxygen/xml", shell=True)
except OSError as e:
sys.stderr.write("doxygen execution failed: %s\n" % e)
else:
sys.stderr.write("could not find doxygen executable on the path\n")
github_doc_root = 'https://github.com/BrewPi/firmware/tree/develop/docs'
def setup(app):
app.add_config_value('recommonmark_config', {
'url_resolver': lambda url: github_doc_root + url,
'auto_toc_tree_section': 'Contents',
}, True)
app.add_transform(AutoStructify)
run_doxygen()
|
BrewPi/firmware
|
docs/conf.py
|
Python
|
agpl-3.0
| 11,969
|
"""ASCII-ART 2D pretty-printer"""
from .pretty import (pretty, pretty_print, pprint, pprint_use_unicode,
pprint_try_use_unicode, pager_print)
# if unicode output is available -- let's use it
pprint_try_use_unicode()
|
wxgeo/geophar
|
wxgeometrie/sympy/printing/pretty/__init__.py
|
Python
|
gpl-2.0
| 222
|
#!/usr/bin/env python
"""
Copyright (c) 2006-2013 sqlmap developers (http://sqlmap.org/)
See the file 'doc/COPYING' for copying permission
"""
import re
from lib.core.common import extractRegexResult
from lib.core.common import getFilteredPageContent
from lib.core.common import listToStrValue
from lib.core.common import removeDynamicContent
from lib.core.common import wasLastResponseDBMSError
from lib.core.common import wasLastResponseHTTPError
from lib.core.data import conf
from lib.core.data import kb
from lib.core.data import logger
from lib.core.exception import SqlmapNoneDataException
from lib.core.settings import DEFAULT_PAGE_ENCODING
from lib.core.settings import DIFF_TOLERANCE
from lib.core.settings import HTML_TITLE_REGEX
from lib.core.settings import MIN_RATIO
from lib.core.settings import MAX_RATIO
from lib.core.settings import REFLECTED_VALUE_MARKER
from lib.core.settings import LOWER_RATIO_BOUND
from lib.core.settings import UPPER_RATIO_BOUND
from lib.core.threads import getCurrentThreadData
def comparison(page, headers, code=None, getRatioValue=False, pageLength=None):
_ = _adjust(_comparison(page, headers, code, getRatioValue, pageLength), getRatioValue)
return _
def _adjust(condition, getRatioValue):
if not any((conf.string, conf.notString, conf.regexp, conf.code)):
# Negative logic approach is used in raw page comparison scheme as that what is "different" than original
# PAYLOAD.WHERE.NEGATIVE response is considered as True; in switch based approach negative logic is not
# applied as that what is by user considered as True is that what is returned by the comparison mechanism
# itself
retVal = not condition if kb.negativeLogic and condition is not None and not getRatioValue else condition
else:
retVal = condition if not getRatioValue else (MAX_RATIO if condition else MIN_RATIO)
return retVal
def _comparison(page, headers, code, getRatioValue, pageLength):
threadData = getCurrentThreadData()
if kb.testMode:
threadData.lastComparisonHeaders = listToStrValue(headers.headers) if headers else ""
threadData.lastComparisonPage = page
if page is None and pageLength is None:
return None
seqMatcher = threadData.seqMatcher
seqMatcher.set_seq1(kb.pageTemplate)
if any((conf.string, conf.notString, conf.regexp)):
rawResponse = "%s%s" % (listToStrValue(headers.headers) if headers else "", page)
# String to match in page when the query is True and/or valid
if conf.string:
return conf.string in rawResponse
# String to match in page when the query is False and/or invalid
if conf.notString:
return conf.notString not in rawResponse
# Regular expression to match in page when the query is True and/or valid
if conf.regexp:
return re.search(conf.regexp, rawResponse, re.I | re.M) is not None
# HTTP code to match when the query is valid
if conf.code:
return conf.code == code
if page:
# In case of an DBMS error page return None
if kb.errorIsNone and (wasLastResponseDBMSError() or wasLastResponseHTTPError()):
return None
# Dynamic content lines to be excluded before comparison
if not kb.nullConnection:
page = removeDynamicContent(page)
seqMatcher.set_seq1(removeDynamicContent(kb.pageTemplate))
if not pageLength:
pageLength = len(page)
if kb.nullConnection and pageLength:
if not seqMatcher.a:
errMsg = "problem occurred while retrieving original page content "
errMsg += "which prevents sqlmap from continuation. Please rerun, "
errMsg += "and if the problem persists turn off any optimization switches"
raise SqlmapNoneDataException(errMsg)
ratio = 1. * pageLength / len(seqMatcher.a)
if ratio > 1.:
ratio = 1. / ratio
else:
# Preventing "Unicode equal comparison failed to convert both arguments to Unicode"
# (e.g. if one page is PDF and the other is HTML)
if isinstance(seqMatcher.a, str) and isinstance(page, unicode):
page = page.encode(kb.pageEncoding or DEFAULT_PAGE_ENCODING, 'ignore')
elif isinstance(seqMatcher.a, unicode) and isinstance(page, str):
seqMatcher.a = seqMatcher.a.encode(kb.pageEncoding or DEFAULT_PAGE_ENCODING, 'ignore')
seq1, seq2 = None, None
if conf.titles:
seq1 = extractRegexResult(HTML_TITLE_REGEX, seqMatcher.a)
seq2 = extractRegexResult(HTML_TITLE_REGEX, page)
else:
seq1 = getFilteredPageContent(seqMatcher.a, True) if conf.textOnly else seqMatcher.a
seq2 = getFilteredPageContent(page, True) if conf.textOnly else page
if seq1 is None or seq2 is None:
return None
seq1 = seq1.replace(REFLECTED_VALUE_MARKER, "")
seq2 = seq2.replace(REFLECTED_VALUE_MARKER, "")
count = 0
while count < min(len(seq1), len(seq2)):
if seq1[count] == seq2[count]:
count += 1
else:
break
if count:
seq1 = seq1[count:]
seq2 = seq2[count:]
seqMatcher.set_seq1(seq1)
seqMatcher.set_seq2(seq2)
ratio = round(seqMatcher.quick_ratio(), 3)
# If the url is stable and we did not set yet the match ratio and the
# current injected value changes the url page content
if kb.matchRatio is None:
if ratio >= LOWER_RATIO_BOUND and ratio <= UPPER_RATIO_BOUND:
kb.matchRatio = ratio
logger.debug("setting match ratio for current parameter to %.3f" % kb.matchRatio)
# If it has been requested to return the ratio and not a comparison
# response
if getRatioValue:
return ratio
elif ratio > UPPER_RATIO_BOUND:
return True
elif kb.matchRatio is None:
return None
else:
return (ratio - kb.matchRatio) > DIFF_TOLERANCE
|
golismero/golismero
|
tools/sqlmap/lib/request/comparison.py
|
Python
|
gpl-2.0
| 6,090
|
#!/usr/bin/env python
# **********************************************************************
#
# Copyright (c) 2003-2013 ZeroC, Inc. All rights reserved.
#
# This copy of Ice is licensed to you under the terms described in the
# ICE_LICENSE file included in this distribution.
#
# **********************************************************************
import os, sys, shutil, glob, socket
def usage():
print("usage: " + sys.argv[0] + " [ip address]")
sys.exit(1)
debug = True
ipAddress = None
if len(sys.argv) == 2:
ipAddress = sys.argv[1]
if not ipAddress:
try:
ipAddress = socket.gethostbyname(socket.gethostname())
except:
ipAddress = "127.0.0.1"
cwd = os.getcwd()
if not os.path.exists("makewinrtcerts.py") or os.path.basename(cwd) != "certs":
print("You must run this script from the certs demo directory")
sys.exit(1)
if os.path.exists("winrt"):
shutil.rmtree("winrt")
os.mkdir("winrt")
os.environ["ICE_CA_HOME"] = os.path.abspath("winrt")
os.chdir("winrt")
while True:
print("The IP address used for the server certificate will be: " + ipAddress)
sys.stdout.write("Do you want to keep this IP address? (y/n) [y]")
sys.stdout.flush()
input = sys.stdin.readline().strip()
if input == 'n':
sys.stdout.write("IP : ")
sys.stdout.flush()
ipAddress = sys.stdin.readline().strip()
else:
break
certs = "."
caHome = os.path.join(certs, "ca")
caKey = os.path.join(certs, "cakey.pem")
caCert = os.path.join(certs, "cacert.pem")
print("Generating new CA certificate and key...")
os.mkdir(caHome)
f = open(os.path.join(caHome, "serial"), "w")
f.write("01")
f.close()
f = open(os.path.join(caHome, "index.txt"), "w")
f.truncate(0)
f.close()
#
# Static configuration file data.
#
config = {\
"ca.cnf":"\
# **********************************************************************\n\
#\n\
# Copyright (c) 2003-2013 ZeroC, Inc. All rights reserved.\n\
#\n\
# This copy of Ice is licensed to you under the terms described in the\n\
# ICE_LICENSE file included in this distribution.\n\
#\n\
# **********************************************************************\n\
\n\
# Configuration file for the CA. This file is generated by iceca init.\n\
# DO NOT EDIT!\n\
\n\
###############################################################################\n\
### Self Signed Root Certificate\n\
###############################################################################\n\
\n\
[ ca ]\n\
default_ca = ice\n\
\n\
[ ice ]\n\
default_days = 1825 # How long certs are valid.\n\
default_md = md5 # The Message Digest type.\n\
preserve = no # Keep passed DN ordering?\n\
\n\
[ req ]\n\
default_bits = 2048\n\
default_keyfile = $ENV::ICE_CA_HOME/ca/cakey.pem\n\
default_md = md5\n\
prompt = no\n\
distinguished_name = dn\n\
x509_extensions = extensions\n\
\n\
[ extensions ]\n\
basicConstraints = CA:true\n\
\n\
# PKIX recommendation.\n\
subjectKeyIdentifier = hash\n\
authorityKeyIdentifier = keyid:always,issuer:always\n\
\n\
[dn]\n\
countryName = US\n\
stateOrProvinceName = Florida\n\
localityName = Palm Beach Gardens\n\
organizationName = ZeroC, Inc.\n\
organizationalUnitName = Ice\n\
commonName = ZeroC WinRT Test CA\n\
emailAddress = info@zeroc.com\n\
",\
"server.cnf":"\
# **********************************************************************\n\
#\n\
# Copyright (c) 2003-2013 ZeroC, Inc. All rights reserved.\n\
#\n\
# This copy of Ice is licensed to you under the terms described in the\n\
# ICE_LICENSE file included in this distribution.\n\
#\n\
# **********************************************************************\n\
\n\
# Configuration file to sign a certificate. This file is generated by iceca init.\n\
# DO NOT EDIT!!\n\
\n\
[ ca ]\n\
default_ca = ice\n\
\n\
[ ice ]\n\
dir = $ENV::ICE_CA_HOME/ca # Where everything is kept.\n\
private_key = $dir/cakey.pem # The CA Private Key.\n\
certificate = $dir/cacert.pem # The CA Certificate.\n\
database = $dir/index.txt # Database index file.\n\
new_certs_dir = $dir # Default loc for new certs.\n\
serial = $dir/serial # The current serial number.\n\
certs = $dir # Where issued certs are kept.\n\
RANDFILE = $dir/.rand # Private random number file.\n\
default_days = 1825 # How long certs are valid.\n\
default_md = md5 # The Message Digest type.\n\
preserve = yes # Keep passed DN ordering?\n\
\n\
policy = ca_policy\n\
x509_extensions = certificate_extensions\n\
\n\
[ certificate_extensions ]\n\
basicConstraints = CA:false\n\
\n\
# PKIX recommendation.\n\
subjectKeyIdentifier = hash\n\
authorityKeyIdentifier = keyid:always,issuer:always\n\
subjectAltName = DNS:%s, IP:%s\n\
\n\
[ ca_policy ]\n\
countryName = match\n\
stateOrProvinceName = match\n\
organizationName = match\n\
organizationalUnitName = optional\n\
emailAddress = optional\n\
commonName = supplied\n\
\n\
[ req ]\n\
default_bits = 1024\n\
default_md = md5\n\
prompt = no\n\
distinguished_name = dn\n\
x509_extensions = extensions\n\
\n\
[ extensions ]\n\
basicConstraints = CA:false\n\
\n\
# PKIX recommendation.\n\
subjectKeyIdentifier = hash\n\
authorityKeyIdentifier = keyid:always,issuer:always\n\
keyUsage = nonRepudiation, digitalSignature, keyEncipherment\n\
\n\
[dn]\n\
countryName = US\n\
stateOrProvinceName = Florida\n\
localityName = Palm Beach Gardens\n\
organizationName = ZeroC, Inc.\n\
organizationalUnitName = Ice\n\
commonName = %s\n\
emailAddress = info@zeroc.com\n\
" % (ipAddress, ipAddress, ipAddress) }
sys.stdout.write("Generating configuration files... ")
for file in ["ca.cnf", "server.cnf"]:
sys.stdout.write(" " + file)
sys.stdout.flush()
cnf = open(os.path.join(caHome, file), "w")
cnf.write(config[file])
cnf.close()
print("")
config = os.path.join(caHome, "ca.cnf")
cmd = "openssl req -config " + config + " -x509 -days 1825 -newkey rsa:1024 -out " + \
os.path.join(caHome, "cacert.pem") + " -outform PEM -nodes"
if debug:
print("[debug]", cmd)
os.system(cmd)
shutil.copyfile(os.path.join(caHome, "cakey.pem"), caKey)
shutil.copyfile(os.path.join(caHome, "cacert.pem"), caCert)
#
# C++ server RSA certificate and key.
#
cppServerCert = os.path.join(certs, "s_rsa1024_pub.pem")
cppServerKey = os.path.join(certs, "s_rsa1024_priv.pem")
print("Generating new C++ server RSA certificate and key...")
if os.path.exists(cppServerCert):
os.remove(cppServerCert)
if os.path.exists(cppServerKey):
os.remove(cppServerKey)
serial = os.path.join(caHome, "serial")
f = open(serial, "r")
serialNum = f.read().strip()
f.close()
tmpKey = os.path.join(caHome, serialNum + "_key.pem")
tmpCert = os.path.join(caHome, serialNum + "_cert.pem")
req = os.path.join(caHome, "req.pem")
config = os.path.join(caHome, "server.cnf")
cmd = "openssl req -config " + config + " -newkey rsa:1024 -nodes -keyout " + tmpKey + " -keyform PEM" + \
" -out " + req
if debug:
print("[debug]", cmd)
os.system(cmd)
cmd = "openssl ca -config " + config + " -batch -in " + req
if debug:
print("[debug]", cmd)
os.system(cmd)
shutil.move(os.path.join(caHome, serialNum + ".pem"), tmpCert)
shutil.copyfile(tmpKey, cppServerKey)
shutil.copyfile(tmpCert, cppServerCert)
os.remove(req)
#
# .NET server RSA certificate and key.
#
csServer = os.path.join(certs, "s_rsa1024.pfx")
print("Generating new .NET server RSA certificate and key...")
cmd = "openssl pkcs12 -in " + cppServerCert + " -inkey " + cppServerKey + " -export -out " + csServer + \
" -certpbe PBE-SHA1-RC4-40 -keypbe PBE-SHA1-RC4-40 -passout pass:password"
if debug:
print("[debug]", cmd)
os.system(cmd)
#
# Done.
#
print("Done.")
os.chdir("..")
|
sbesson/zeroc-ice
|
certs/makewinrtcerts.py
|
Python
|
gpl-2.0
| 8,091
|
"""
Type Inference
"""
from .typevar import TypeVar
from .ast import Def, Var
from copy import copy
from itertools import product
try:
from typing import Dict, TYPE_CHECKING, Union, Tuple, Optional, Set # noqa
from typing import Iterable, List, Any, TypeVar as MTypeVar # noqa
from typing import cast
from .xform import Rtl, XForm # noqa
from .ast import Expr # noqa
from .typevar import TypeSet # noqa
if TYPE_CHECKING:
T = MTypeVar('T')
TypeMap = Dict[TypeVar, TypeVar]
VarTyping = Dict[Var, TypeVar]
except ImportError:
TYPE_CHECKING = False
pass
class TypeConstraint(object):
"""
Base class for all runtime-emittable type constraints.
"""
def __init__(self, tv, tc):
# type: (TypeVar, Union[TypeVar, TypeSet]) -> None
"""
Abstract "constructor" for linters
"""
assert False, "Abstract"
def translate(self, m):
# type: (Union[TypeEnv, TypeMap]) -> TypeConstraint
"""
Translate any TypeVars in the constraint according to the map or
TypeEnv m
"""
def translate_one(a):
# type: (Any) -> Any
if (isinstance(a, TypeVar)):
return m[a] if isinstance(m, TypeEnv) else subst(a, m)
return a
res = None # type: TypeConstraint
res = self.__class__(*tuple(map(translate_one, self._args())))
return res
def __eq__(self, other):
# type: (object) -> bool
if (not isinstance(other, self.__class__)):
return False
assert isinstance(other, TypeConstraint) # help MyPy figure out other
return self._args() == other._args()
def is_concrete(self):
# type: () -> bool
"""
Return true iff all typevars in the constraint are singletons.
"""
return [] == list(filter(lambda x: x.singleton_type() is None,
self.tvs()))
def __hash__(self):
# type: () -> int
return hash(self._args())
def _args(self):
# type: () -> Tuple[Any,...]
"""
Return a tuple with the exact arguments passed to __init__ to create
this object.
"""
assert False, "Abstract"
def tvs(self):
# type: () -> Iterable[TypeVar]
"""
Return the typevars contained in this constraint.
"""
return list(filter(lambda x: isinstance(x, TypeVar), self._args()))
def is_trivial(self):
# type: () -> bool
"""
Return true if this constrain is statically decidable.
"""
assert False, "Abstract"
def eval(self):
# type: () -> bool
"""
Evaluate this constraint. Should only be called when the constraint has
been translated to concrete types.
"""
assert False, "Abstract"
def __repr__(self):
# type: () -> str
return (self.__class__.__name__ + '(' +
', '.join(map(str, self._args())) + ')')
class TypesEqual(TypeConstraint):
"""
Constraint specifying that two derived type vars must have the same runtime
type.
"""
def __init__(self, tv1, tv2):
# type: (TypeVar, TypeVar) -> None
(self.tv1, self.tv2) = sorted([tv1, tv2], key=repr)
def _args(self):
# type: () -> Tuple[Any,...]
""" See TypeConstraint._args() """
return (self.tv1, self.tv2)
def is_trivial(self):
# type: () -> bool
""" See TypeConstraint.is_trivial() """
return self.tv1 == self.tv2 or self.is_concrete()
def eval(self):
# type: () -> bool
""" See TypeConstraint.eval() """
assert self.is_concrete()
return self.tv1.singleton_type() == self.tv2.singleton_type()
class InTypeset(TypeConstraint):
"""
Constraint specifying that a type var must belong to some typeset.
"""
def __init__(self, tv, ts):
# type: (TypeVar, TypeSet) -> None
assert not tv.is_derived and tv.name.startswith("typeof_")
self.tv = tv
self.ts = ts
def _args(self):
# type: () -> Tuple[Any,...]
""" See TypeConstraint._args() """
return (self.tv, self.ts)
def is_trivial(self):
# type: () -> bool
""" See TypeConstraint.is_trivial() """
tv_ts = self.tv.get_typeset().copy()
# Trivially True
if (tv_ts.issubset(self.ts)):
return True
# Trivially false
tv_ts &= self.ts
if (tv_ts.size() == 0):
return True
return self.is_concrete()
def eval(self):
# type: () -> bool
""" See TypeConstraint.eval() """
assert self.is_concrete()
return self.tv.get_typeset().issubset(self.ts)
class WiderOrEq(TypeConstraint):
"""
Constraint specifying that a type var tv1 must be wider than or equal to
type var tv2 at runtime. This requires that:
1) They have the same number of lanes
2) In a lane tv1 has at least as many bits as tv2.
"""
def __init__(self, tv1, tv2):
# type: (TypeVar, TypeVar) -> None
self.tv1 = tv1
self.tv2 = tv2
def _args(self):
# type: () -> Tuple[Any,...]
""" See TypeConstraint._args() """
return (self.tv1, self.tv2)
def is_trivial(self):
# type: () -> bool
""" See TypeConstraint.is_trivial() """
# Trivially true
if (self.tv1 == self.tv2):
return True
ts1 = self.tv1.get_typeset()
ts2 = self.tv2.get_typeset()
def set_wider_or_equal(s1, s2):
# type: (Set[int], Set[int]) -> bool
return len(s1) > 0 and len(s2) > 0 and min(s1) >= max(s2)
# Trivially True
if set_wider_or_equal(ts1.ints, ts2.ints) and\
set_wider_or_equal(ts1.floats, ts2.floats) and\
set_wider_or_equal(ts1.bools, ts2.bools):
return True
def set_narrower(s1, s2):
# type: (Set[int], Set[int]) -> bool
return len(s1) > 0 and len(s2) > 0 and min(s1) < max(s2)
# Trivially False
if set_narrower(ts1.ints, ts2.ints) and\
set_narrower(ts1.floats, ts2.floats) and\
set_narrower(ts1.bools, ts2.bools):
return True
# Trivially False
if len(ts1.lanes.intersection(ts2.lanes)) == 0:
return True
return self.is_concrete()
def eval(self):
# type: () -> bool
""" See TypeConstraint.eval() """
assert self.is_concrete()
typ1 = self.tv1.singleton_type()
typ2 = self.tv2.singleton_type()
return typ1.wider_or_equal(typ2)
class SameWidth(TypeConstraint):
"""
Constraint specifying that two types have the same width. E.g. i32x2 has
the same width as i64x1, i16x4, f32x2, f64, b1x64 etc.
"""
def __init__(self, tv1, tv2):
# type: (TypeVar, TypeVar) -> None
self.tv1 = tv1
self.tv2 = tv2
def _args(self):
# type: () -> Tuple[Any,...]
""" See TypeConstraint._args() """
return (self.tv1, self.tv2)
def is_trivial(self):
# type: () -> bool
""" See TypeConstraint.is_trivial() """
# Trivially true
if (self.tv1 == self.tv2):
return True
ts1 = self.tv1.get_typeset()
ts2 = self.tv2.get_typeset()
# Trivially False
if len(ts1.widths().intersection(ts2.widths())) == 0:
return True
return self.is_concrete()
def eval(self):
# type: () -> bool
""" See TypeConstraint.eval() """
assert self.is_concrete()
typ1 = self.tv1.singleton_type()
typ2 = self.tv2.singleton_type()
return (typ1.width() == typ2.width())
class TypeEnv(object):
"""
Class encapsulating the necessary book keeping for type inference.
:attribute type_map: dict holding the equivalence relations between tvs
:attribute constraints: a list of accumulated constraints - tuples
(tv1, tv2)) where tv1 and tv2 are equal
:attribute ranks: dictionary recording the (optional) ranks for tvs.
'rank' is a partial ordering on TVs based on their
origin. See comments in rank() and register().
:attribute vars: a set containing all known Vars
:attribute idx: counter used to get fresh ids
"""
RANK_SINGLETON = 5
RANK_INPUT = 4
RANK_INTERMEDIATE = 3
RANK_OUTPUT = 2
RANK_TEMP = 1
RANK_INTERNAL = 0
def __init__(self, arg=None):
# type: (Optional[Tuple[TypeMap, List[TypeConstraint]]]) -> None
self.ranks = {} # type: Dict[TypeVar, int]
self.vars = set() # type: Set[Var]
if arg is None:
self.type_map = {} # type: TypeMap
self.constraints = [] # type: List[TypeConstraint]
else:
self.type_map, self.constraints = arg
self.idx = 0
def __getitem__(self, arg):
# type: (Union[TypeVar, Var]) -> TypeVar
"""
Lookup the canonical representative for a Var/TypeVar.
"""
if (isinstance(arg, Var)):
assert arg in self.vars
tv = arg.get_typevar()
else:
assert (isinstance(arg, TypeVar))
tv = arg
while tv in self.type_map:
tv = self.type_map[tv]
if tv.is_derived:
tv = TypeVar.derived(self[tv.base], tv.derived_func)
return tv
def equivalent(self, tv1, tv2):
# type: (TypeVar, TypeVar) -> None
"""
Record a that the free tv1 is part of the same equivalence class as
tv2. The canonical representative of the merged class is tv2's
canonical representative.
"""
assert not tv1.is_derived
assert self[tv1] == tv1
# Make sure we don't create cycles
if tv2.is_derived:
assert self[tv2.base] != tv1
self.type_map[tv1] = tv2
def add_constraint(self, constr):
# type: (TypeConstraint) -> None
"""
Add a new constraint
"""
if (constr in self.constraints):
return
# InTypeset constraints can be expressed by constraining the typeset of
# a variable. No need to add them to self.constraints
if (isinstance(constr, InTypeset)):
self[constr.tv].constrain_types_by_ts(constr.ts)
return
self.constraints.append(constr)
def get_uid(self):
# type: () -> str
r = str(self.idx)
self.idx += 1
return r
def __repr__(self):
# type: () -> str
return self.dot()
def rank(self, tv):
# type: (TypeVar) -> int
"""
Get the rank of tv in the partial order. TVs directly associated with a
Var get their rank from the Var (see register()). Internally generated
non-derived TVs implicitly get the lowest rank (0). Derived variables
get their rank from their free typevar. Singletons have the highest
rank. TVs associated with vars in a source pattern have a higher rank
than TVs associated with temporary vars.
"""
default_rank = TypeEnv.RANK_INTERNAL if tv.singleton_type() is None \
else TypeEnv.RANK_SINGLETON
if tv.is_derived:
tv = tv.free_typevar()
return self.ranks.get(tv, default_rank)
def register(self, v):
# type: (Var) -> None
"""
Register a new Var v. This computes a rank for the associated TypeVar
for v, which is used to impose a partial order on type variables.
"""
self.vars.add(v)
if v.is_input():
r = TypeEnv.RANK_INPUT
elif v.is_intermediate():
r = TypeEnv.RANK_INTERMEDIATE
elif v.is_output():
r = TypeEnv.RANK_OUTPUT
else:
assert(v.is_temp())
r = TypeEnv.RANK_TEMP
self.ranks[v.get_typevar()] = r
def free_typevars(self):
# type: () -> List[TypeVar]
"""
Get the free typevars in the current type env.
"""
tvs = set([self[tv].free_typevar() for tv in self.type_map.keys()])
tvs = tvs.union(set([self[v].free_typevar() for v in self.vars]))
# Filter out None here due to singleton type vars
return sorted(filter(lambda x: x is not None, tvs),
key=lambda x: x.name)
def normalize(self):
# type: () -> None
"""
Normalize by:
- collapsing any roots that don't correspond to a concrete TV AND
have a single TV derived from them or equivalent to them
E.g. if we have a root of the tree that looks like:
typeof_a typeof_b
\\ /
typeof_x
|
half_width(1)
|
1
we want to collapse the linear path between 1 and typeof_x. The
resulting graph is:
typeof_a typeof_b
\\ /
typeof_x
"""
source_tvs = set([v.get_typevar() for v in self.vars])
children = {} # type: Dict[TypeVar, Set[TypeVar]]
for v in self.type_map.values():
if not v.is_derived:
continue
t = v.free_typevar()
s = children.get(t, set())
s.add(v)
children[t] = s
for (a, b) in self.type_map.items():
s = children.get(b, set())
s.add(a)
children[b] = s
for r in self.free_typevars():
while (r not in source_tvs and r in children and
len(children[r]) == 1):
child = list(children[r])[0]
if child in self.type_map:
assert self.type_map[child] == r
del self.type_map[child]
r = child
def extract(self):
# type: () -> TypeEnv
"""
Extract a clean type environment from self, that only mentions
TVs associated with real variables
"""
vars_tvs = set([v.get_typevar() for v in self.vars])
new_type_map = {tv: self[tv] for tv in vars_tvs if tv != self[tv]}
new_constraints = [] # type: List[TypeConstraint]
for constr in self.constraints:
constr = constr.translate(self)
if constr.is_trivial() or constr in new_constraints:
continue
# Sanity: translated constraints should refer to only real vars
for arg in constr._args():
if (not isinstance(arg, TypeVar)):
continue
arg_free_tv = arg.free_typevar()
assert arg_free_tv is None or arg_free_tv in vars_tvs
new_constraints.append(constr)
# Sanity: translated typemap should refer to only real vars
for (k, v) in new_type_map.items():
assert k in vars_tvs
assert v.free_typevar() is None or v.free_typevar() in vars_tvs
t = TypeEnv()
t.type_map = new_type_map
t.constraints = new_constraints
# ranks and vars contain only TVs associated with real vars
t.ranks = copy(self.ranks)
t.vars = copy(self.vars)
return t
def concrete_typings(self):
# type: () -> Iterable[VarTyping]
"""
Return an iterable over all possible concrete typings permitted by this
TypeEnv.
"""
free_tvs = self.free_typevars()
free_tv_iters = [tv.get_typeset().concrete_types() for tv in free_tvs]
for concrete_types in product(*free_tv_iters):
# Build type substitutions for all free vars
m = {tv: TypeVar.singleton(typ)
for (tv, typ) in zip(free_tvs, concrete_types)}
concrete_var_map = {v: subst(self[v.get_typevar()], m)
for v in self.vars}
# Check if constraints are satisfied for this typing
failed = None
for constr in self.constraints:
concrete_constr = constr.translate(m)
if not concrete_constr.eval():
failed = concrete_constr
break
if (failed is not None):
continue
yield concrete_var_map
def permits(self, concrete_typing):
# type: (VarTyping) -> bool
"""
Return true iff this TypeEnv permits the (possibly partial) concrete
variable type mapping concrete_typing.
"""
# Each variable has a concrete type, that is a subset of its inferred
# typeset.
for (v, typ) in concrete_typing.items():
assert typ.singleton_type() is not None
if not typ.get_typeset().issubset(self[v].get_typeset()):
return False
m = {self[v]: typ for (v, typ) in concrete_typing.items()}
# Constraints involving vars in concrete_typing are satisfied
for constr in self.constraints:
try:
# If the constraint includes only vars in concrete_typing, we
# can translate it using m. Otherwise we encounter a KeyError
# and ignore it
constr = constr.translate(m)
if not constr.eval():
return False
except KeyError:
pass
return True
def dot(self):
# type: () -> str
"""
Return a representation of self as a graph in dot format.
Nodes correspond to TypeVariables.
Dotted edges correspond to equivalences between TVS
Solid edges correspond to derivation relations between TVs.
Dashed edges correspond to equivalence constraints.
"""
def label(s):
# type: (TypeVar) -> str
return "\"" + str(s) + "\""
# Add all registered TVs (as some of them may be singleton nodes not
# appearing in the graph
nodes = set() # type: Set[TypeVar]
edges = set() # type: Set[Tuple[TypeVar, TypeVar, str, str, Optional[str]]] # noqa
def add_nodes(*args):
# type: (*TypeVar) -> None
for tv in args:
nodes.add(tv)
while (tv.is_derived):
nodes.add(tv.base)
edges.add((tv, tv.base, "solid", "forward",
tv.derived_func))
tv = tv.base
for v in self.vars:
add_nodes(v.get_typevar())
for (tv1, tv2) in self.type_map.items():
# Add all intermediate TVs appearing in edges
add_nodes(tv1, tv2)
edges.add((tv1, tv2, "dotted", "forward", None))
for constr in self.constraints:
if isinstance(constr, TypesEqual):
add_nodes(constr.tv1, constr.tv2)
edges.add((constr.tv1, constr.tv2, "dashed", "none", "equal"))
elif isinstance(constr, WiderOrEq):
add_nodes(constr.tv1, constr.tv2)
edges.add((constr.tv1, constr.tv2, "dashed", "forward", ">="))
elif isinstance(constr, SameWidth):
add_nodes(constr.tv1, constr.tv2)
edges.add((constr.tv1, constr.tv2, "dashed", "none",
"same_width"))
else:
assert False, "Can't display constraint {}".format(constr)
root_nodes = set([x for x in nodes
if x not in self.type_map and not x.is_derived])
r = "digraph {\n"
for n in nodes:
r += label(n)
if n in root_nodes:
r += "[xlabel=\"{}\"]".format(self[n].get_typeset())
r += ";\n"
for (n1, n2, style, direction, elabel) in edges:
e = label(n1) + "->" + label(n2)
e += "[style={},dir={}".format(style, direction)
if elabel is not None:
e += ",label=\"{}\"".format(elabel)
e += "];\n"
r += e
r += "}"
return r
if TYPE_CHECKING:
TypingError = str
TypingOrError = Union[TypeEnv, TypingError]
def get_error(typing_or_err):
# type: (TypingOrError) -> Optional[TypingError]
"""
Helper function to appease mypy when checking the result of typing.
"""
if isinstance(typing_or_err, str):
if (TYPE_CHECKING):
return cast(TypingError, typing_or_err)
else:
return typing_or_err
else:
return None
def get_type_env(typing_or_err):
# type: (TypingOrError) -> TypeEnv
"""
Helper function to appease mypy when checking the result of typing.
"""
assert isinstance(typing_or_err, TypeEnv), \
"Unexpected error: {}".format(typing_or_err)
if (TYPE_CHECKING):
return cast(TypeEnv, typing_or_err)
else:
return typing_or_err
def subst(tv, tv_map):
# type: (TypeVar, TypeMap) -> TypeVar
"""
Perform substition on the input tv using the TypeMap tv_map.
"""
if tv in tv_map:
return tv_map[tv]
if tv.is_derived:
return TypeVar.derived(subst(tv.base, tv_map), tv.derived_func)
return tv
def normalize_tv(tv):
# type: (TypeVar) -> TypeVar
"""
Normalize a (potentially derived) TV using the following rules:
- vector and width derived functions commute
{HALF,DOUBLE}VECTOR({HALF,DOUBLE}WIDTH(base)) ->
{HALF,DOUBLE}WIDTH({HALF,DOUBLE}VECTOR(base))
- half/double pairs collapse
{HALF,DOUBLE}WIDTH({DOUBLE,HALF}WIDTH(base)) -> base
{HALF,DOUBLE}VECTOR({DOUBLE,HALF}VECTOR(base)) -> base
"""
vector_derives = [TypeVar.HALFVECTOR, TypeVar.DOUBLEVECTOR]
width_derives = [TypeVar.HALFWIDTH, TypeVar.DOUBLEWIDTH]
if not tv.is_derived:
return tv
df = tv.derived_func
if (tv.base.is_derived):
base_df = tv.base.derived_func
# Reordering: {HALFWIDTH, DOUBLEWIDTH} commute with {HALFVECTOR,
# DOUBLEVECTOR}. Arbitrarily pick WIDTH < VECTOR
if df in vector_derives and base_df in width_derives:
return normalize_tv(
TypeVar.derived(
TypeVar.derived(tv.base.base, df), base_df))
# Cancelling: HALFWIDTH, DOUBLEWIDTH and HALFVECTOR, DOUBLEVECTOR
# cancel each other. Note: This doesn't hide any over/underflows,
# since we 1) assert the safety of each TV in the chain upon its
# creation, and 2) the base typeset is only allowed to shrink.
if (df, base_df) in \
[(TypeVar.HALFVECTOR, TypeVar.DOUBLEVECTOR),
(TypeVar.DOUBLEVECTOR, TypeVar.HALFVECTOR),
(TypeVar.HALFWIDTH, TypeVar.DOUBLEWIDTH),
(TypeVar.DOUBLEWIDTH, TypeVar.HALFWIDTH)]:
return normalize_tv(tv.base.base)
return TypeVar.derived(normalize_tv(tv.base), df)
def constrain_fixpoint(tv1, tv2):
# type: (TypeVar, TypeVar) -> None
"""
Given typevars tv1 and tv2 (which could be derived from one another)
constrain their typesets to be the same. When one is derived from the
other, repeat the constrain process until fixpoint.
"""
# Constrain tv2's typeset as long as tv1's typeset is changing.
while True:
old_tv1_ts = tv1.get_typeset().copy()
tv2.constrain_types(tv1)
if tv1.get_typeset() == old_tv1_ts:
break
old_tv2_ts = tv2.get_typeset().copy()
tv1.constrain_types(tv2)
assert old_tv2_ts == tv2.get_typeset()
def unify(tv1, tv2, typ):
# type: (TypeVar, TypeVar, TypeEnv) -> TypingOrError
"""
Unify tv1 and tv2 in the current type environment typ, and return an
updated type environment or error.
"""
tv1 = normalize_tv(typ[tv1])
tv2 = normalize_tv(typ[tv2])
# Already unified
if tv1 == tv2:
return typ
if typ.rank(tv2) < typ.rank(tv1):
return unify(tv2, tv1, typ)
constrain_fixpoint(tv1, tv2)
if (tv1.get_typeset().size() == 0 or tv2.get_typeset().size() == 0):
return "Error: empty type created when unifying {} and {}"\
.format(tv1, tv2)
# Free -> Derived(Free)
if not tv1.is_derived:
typ.equivalent(tv1, tv2)
return typ
if (tv1.is_derived and TypeVar.is_bijection(tv1.derived_func)):
inv_f = TypeVar.inverse_func(tv1.derived_func)
return unify(tv1.base, normalize_tv(TypeVar.derived(tv2, inv_f)), typ)
typ.add_constraint(TypesEqual(tv1, tv2))
return typ
def move_first(l, i):
# type: (List[T], int) -> List[T]
return [l[i]] + l[:i] + l[i+1:]
def ti_def(definition, typ):
# type: (Def, TypeEnv) -> TypingOrError
"""
Perform type inference on one Def in the current type environment typ and
return an updated type environment or error.
At a high level this works by creating fresh copies of each formal type var
in the Def's instruction's signature, and unifying the formal tv with the
corresponding actual tv.
"""
expr = definition.expr
inst = expr.inst
# Create a dict m mapping each free typevar in the signature of definition
# to a fresh copy of itself.
free_formal_tvs = inst.all_typevars()
m = {tv: tv.get_fresh_copy(str(typ.get_uid())) for tv in free_formal_tvs}
# Update m with any explicitly bound type vars
for (idx, bound_typ) in enumerate(expr.typevars):
m[free_formal_tvs[idx]] = TypeVar.singleton(bound_typ)
# Get fresh copies for each typevar in the signature (both free and
# derived)
fresh_formal_tvs = \
[subst(inst.outs[i].typevar, m) for i in inst.value_results] +\
[subst(inst.ins[i].typevar, m) for i in inst.value_opnums]
# Get the list of actual Vars
actual_vars = [] # type: List[Expr]
actual_vars += [definition.defs[i] for i in inst.value_results]
actual_vars += [expr.args[i] for i in inst.value_opnums]
# Get the list of the actual TypeVars
actual_tvs = []
for v in actual_vars:
assert(isinstance(v, Var))
# Register with TypeEnv that this typevar corresponds ot variable v,
# and thus has a given rank
typ.register(v)
actual_tvs.append(v.get_typevar())
# Make sure we unify the control typevar first.
if inst.is_polymorphic:
idx = fresh_formal_tvs.index(m[inst.ctrl_typevar])
fresh_formal_tvs = move_first(fresh_formal_tvs, idx)
actual_tvs = move_first(actual_tvs, idx)
# Unify each actual typevar with the corresponding fresh formal tv
for (actual_tv, formal_tv) in zip(actual_tvs, fresh_formal_tvs):
typ_or_err = unify(actual_tv, formal_tv, typ)
err = get_error(typ_or_err)
if (err):
return "fail ti on {} <: {}: ".format(actual_tv, formal_tv) + err
typ = get_type_env(typ_or_err)
# Add any instruction specific constraints
for constr in inst.constraints:
typ.add_constraint(constr.translate(m))
return typ
def ti_rtl(rtl, typ):
# type: (Rtl, TypeEnv) -> TypingOrError
"""
Perform type inference on an Rtl in a starting type env typ. Return an
updated type environment or error.
"""
for (i, d) in enumerate(rtl.rtl):
assert (isinstance(d, Def))
typ_or_err = ti_def(d, typ)
err = get_error(typ_or_err) # type: Optional[TypingError]
if (err):
return "On line {}: ".format(i) + err
typ = get_type_env(typ_or_err)
return typ
def ti_xform(xform, typ):
# type: (XForm, TypeEnv) -> TypingOrError
"""
Perform type inference on an Rtl in a starting type env typ. Return an
updated type environment or error.
"""
typ_or_err = ti_rtl(xform.src, typ)
err = get_error(typ_or_err) # type: Optional[TypingError]
if (err):
return "In src pattern: " + err
typ = get_type_env(typ_or_err)
typ_or_err = ti_rtl(xform.dst, typ)
err = get_error(typ_or_err)
if (err):
return "In dst pattern: " + err
typ = get_type_env(typ_or_err)
return get_type_env(typ_or_err)
|
nrc/rustc-perf
|
collector/benchmarks/cranelift-codegen/cranelift-codegen/meta-python/cdsl/ti.py
|
Python
|
mit
| 28,415
|
#*********************************************************************************
#
# Inviwo - Interactive Visualization Workshop
#
# Copyright (c) 2013-2016 Inviwo Foundation
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
# ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
#*********************************************************************************
import os
import sys
import itertools
import datetime
import math
import subprocess
def subDirs(path):
if os.path.isdir(path):
return next(os.walk(path))[1]
else:
return []
def toPath(*list):
return "/".join(list)
def useForwardSlash(path):
return "/".join(path.split(os.sep))
def addPostfix(file, postfix):
parts = file.split(os.path.extsep)
parts[0]+= postfix
return os.path.extsep.join(parts)
def in_directory(file, directory):
#make both absolute
directory = os.path.join(os.path.realpath(directory), '')
file = os.path.realpath(file)
#return true, if the common prefix of both is equal to directory
#e.g. /a/b/c/d.rst and directory is /a/b, the common prefix is /a/b
return os.path.commonprefix([file, directory]) == directory
def getScriptFolder():
import inspect
""" Get the directory of the script is calling this function """
return os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe().f_back)))
def mkdir(*path):
res = toPath(*path)
if not os.path.isdir(res):
os.mkdir(res)
return res
def partition(l, n):
"""Yield successive n-sized chunks from l."""
for i in range(0, len(l), n):
yield l[i:i+n]
def pad_infinite(iterable, padding=None):
return itertools.chain(iterable, itertools.repeat(padding))
def pad(iterable, size, padding=None):
return itertools.islice(pad_infinite(iterable, padding), size)
def addMidSteps(func, iterable, transform = lambda x: x):
''' s -> s1, func(s1,s2), s2, func(s2,s3), s3'''
tmp = next(iterable)
yield transform(tmp)
for n in iterable:
res = func(tmp, n)
try:
for r in res: yield r
except TypeError:
yield res
tmp = n
yield transform(n)
def makeSlice(string):
def toInt(s):
try:
return int(s)
except ValueError:
return None
return slice(*list(pad(map(toInt, string.split(":")), 3)))
def dateToString(date):
return date.strftime("%Y-%m-%dT%H:%M:%S.%f")
def stringToDate(string):
return datetime.datetime.strptime(string, "%Y-%m-%dT%H:%M:%S.%f" )
def safeget(dct, *keys, failure = None):
for key in keys:
if key in dct.keys():
dct = dct[key]
else:
return failure
return dct
def find_pyconfig(path):
while path != "":
if os.path.exists(toPath(path, "pyconfig.ini")):
return toPath(path, "pyconfig.ini")
else:
path = os.path.split(path)[0];
return None
def stats(l):
mean = sum(l)/len(l)
std = math.sqrt(sum([pow(mean-x,2) for x in l])/len(l))
return mean, std
def openWithDefaultApp(file):
print(file)
if sys.platform.startswith('linux'):
subprocess.call(["xdg-open", file])
elif sys.platform == "darwin":
subprocess.call(["open", file])
elif sys.platform == "win32":
os.startfile(file)
|
cgloger/inviwo
|
tools/ivwpy/util.py
|
Python
|
bsd-2-clause
| 4,314
|
#!/usr/bin/env python
from flask import Flask
from flask import render_template
import pandas as pd
import numpy as np
import datetime as datetime
app = Flask(__name__)
if not app.debug:
import logging
file_handler = logging.FileHandler('error.log')
file_handler.setLevel(logging.WARNING)
app.logger.addHandler(file_handler)
def int_to_dow(dayno):
""" convert an integer into a day of week string """
days = ['Monday', 'Tuesday', 'Wednesday', 'Thursday', 'Friday', 'Saturday',
'Sunday']
return days[int(dayno)]
def create_graph_strings(input_list):
return None
def get_raw_data():
file_name = "static/files/all_data.csv"
dataframe = pd.read_csv(file_name, header=0)
dataframe['date'] = pd.to_datetime(dataframe['date'])
return dataframe
def get_max_speed(df):
return float(max(df['peak_speed']))
def get_vehicle_count(df):
return float(sum(df['vehicle_count']))
def get_violator_count(df):
return float(sum(df['violator_count']))
def get_avg_speed(df):
theavg = np.mean(df['peak_speed'])
return round(theavg, 2)
def get_over_limit(df):
theavg = get_avg_speed(df)
return (30-theavg)*-1
def get_timeseries_by_year(df):
''' group by keys, then return strings suitable for graphing '''
df['year'] = df.date.map(lambda x: '{year}'.format(year=x.year))
grouped = df.sort(['year'], ascending=1).groupby(['year'])
vehicle_count_by_month = grouped.aggregate(np.sum)['vehicle_count']
violator_count_by_month = grouped.aggregate(np.sum)['violator_count']
keys = vehicle_count_by_month.index.get_values()
# convert to specially formatted strings
vehicle_count_by_month_l = [str(i) for i in list(vehicle_count_by_month.get_values())]
violator_count_by_month_l = [str(i) for i in list(violator_count_by_month.get_values())]
keys_l = [str(i) for i in list(keys)]
vehicle_count_by_month_str = ','.join(vehicle_count_by_month_l)
violator_count_by_month_str = ','.join(violator_count_by_month_l)
keys_str = ",".join(keys_l)
return [keys_str, vehicle_count_by_month_str, violator_count_by_month_str]
def get_speed_by_hour(df):
grouped = df.sort(['hour_of_day'], ascending=1).groupby(['hour_of_day'])
mean_speed = grouped.aggregate(np.mean)['peak_speed']
max_speed = grouped.aggregate(np.max)['peak_speed']
keys = mean_speed.index.get_values()
mean_speed_l = [str(i) for i in list(mean_speed.get_values())]
max_speed_l = [str(i) for i in list(max_speed.get_values())]
keys_l = [str(i) for i in list(keys)]
mean_speed_str = ','.join(mean_speed_l)
max_speed_str = ','.join(max_speed_l)
keys_str = ",".join(keys_l)
return [keys_str, mean_speed_str, max_speed_str]
def get_speed_by_day(df):
grouped = df.sort(['weekday'], ascending=0).groupby(['weekday'])
mean_speed = grouped.aggregate(np.mean)['peak_speed']
max_speed = grouped.aggregate(np.max)['peak_speed']
keys = mean_speed.index.get_values()
mean_dow_l = [str(i) for i in list(mean_speed.get_values())]
max_dow_l = [str(i) for i in list(max_speed.get_values())]
dow_keys_l = [int_to_dow(i) for i in list(keys)]
mean_speed_str = ','.join(mean_dow_l)
max_speed_str = ','.join(max_dow_l)
keys_str = "','".join(dow_keys_l)
keys_str = "'"+keys_str+"'"
return [keys_str, mean_speed_str, max_speed_str]
def car_count_by_hour(df):
grouped = df.sort(['date'], ascending=0).groupby(['hour_of_day'])
car_count = grouped.aggregate(np.mean)['vehicle_count']
violator_count = grouped.aggregate(np.max)['violator_count']
keys = car_count.index.get_values()
car_count_l = [str(i) for i in list(car_count.get_values())]
violator_count_l = [str(i) for i in list(violator_count.get_values())]
keys_l = [str(i) for i in list(keys)]
car_count_str = ','.join(car_count_l)
violator_count_str = ','.join(violator_count_l)
keys_str = ",".join(keys_l)
return [keys_str, car_count_str, violator_count_str]
@app.route("/")
def dashboard():
df = get_raw_data()
violator_pct = round((get_violator_count(df)/get_vehicle_count(df)*100), 2)
violator_graph = get_timeseries_by_year(df)
speed_graph = get_speed_by_hour(df)
dow_graph = get_speed_by_day(df)
car_count_graph = car_count_by_hour(df)
return render_template('index.html',
car_count=get_vehicle_count(df),
violator_count=get_violator_count(df),
violator_pct=violator_pct,
max_speed=get_max_speed(df),
avg_speed=get_avg_speed(df),
over_limit=get_over_limit(df),
ts_labels=violator_graph[0],
ts_vehicle=violator_graph[1],
ts_violator=violator_graph[2],
ts_speed_labels=speed_graph[0],
ts_mean_speed_data=speed_graph[1],
ts_max_speed_data=speed_graph[2],
ts_dow_labels=dow_graph[0],
ts_dow_mean=dow_graph[1],
ts_dow_max=dow_graph[2],
ts_car_count_labels=car_count_graph[0],
ts_car_count_count=car_count_graph[1],
ts_car_count_violators=car_count_graph[2]
)
@app.route("/about")
def about():
return render_template('about.html')
@app.route("/contact")
def contact():
return render_template('contact.html')
if __name__ == "__main__":
app.run(host='0.0.0.0')
|
kgorman/WMG_speed
|
app/app.py
|
Python
|
mit
| 5,559
|
"""
WSGI config for config project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.8/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "config.settings")
application = get_wsgi_application()
|
Feverup/workshop-django-react
|
src/config/wsgi.py
|
Python
|
bsd-3-clause
| 389
|
import string
import datetime
import numpy as np
import pylab as pl
from matplotlib.patches import Rectangle
import saltefficiency.util.blockvisitstats as bvs
import saltefficiency.util.sdb_utils as su
def create_night_table(obsdate, sdb, els):
"""Create a table that shows a break down for the night and what happened in each block
Parameters
----------
nid: int
NightInfo_Id
sdb: ~mysql.mysql
A connection to the sdb database
els: ~mysql.mysql
A connection to the els database
"""
# create a dictionary to break down the events of the night
night_dict = {}
nid = su.getnightinfo(sdb, obsdate)
#get the times for the night
record=sdb.select('EveningTwilightEnd, MorningTwilightStart', 'NightInfo', 'NightInfo_Id=%i' % nid)
stime=record[0][0]
etime=record[0][1]
totaltime=(etime-stime).seconds
night = Night(nid, stime, etime)
#get the SO event log
record=sdb.select('EventType_Id, EventTime', 'SoLogEvent', 'NightInfo_Id=%i' % nid)
#set it up wtih the correct time
event_list=[]
for i in range(len(record)):
t=record[i][1].seconds
#convert to times from the observing day
if t < 12*3600:
t+=12*3600
else:
t-=12*3600
event_list.append([record[i][0],t])
# add weather down time to night_dict
time_list,wea_arr = create_weather(els, stime, etime)
night.add_weather(time_list, wea_arr)
# add the accepted blocks to night_dict
block_list=bvs.blockvisitstats(sdb, obsdate, update=False)
for b in block_list:
print b
night_dict[b[1]] = ['Science', b]
night.add_blocks(block_list)
# add fault down time to the night_dict
faults = create_faults(sdb, nid)
problem_list=[]
for f in faults:
t1 = (f[1]-night.day_start).seconds
t2 = (f[2]-night.day_start).seconds
problem_list.append([t1, t2])
print f
night_dict[f[1]] = ['Fault', f]
night.add_problems(problem_list)
# add mirror alignment to the night_dict
mirror_alignment=create_mirror_alignment(event_list)
night.add_mirroralignment(mirror_alignment)
for m in mirror_alignment:
t1 = convert_decimal_hours(obsdate, m[0]/3600.0)
night_dict[t1] = ['Mirror', m]
# use the dict to populate the table to display what did happen
night.calc_engineering()
night.calc_weather()
#night.plot()
info_txt="""
Total Time: {5:0.2f} <br>
Science Time: {0:0.2f} <br>
Engineering TIme: {4: 0.2f} <br>
Weather Time: {1:0.2f} <br>
Time Lost to Problems: {3:0.2f} <br>\n
<br>
Mirror Alignment Time: {2:0.2f} <br>\n
""".format(night.sciencetime, night.weathertime, night.mirroralignmenttime, night.problemtime, night.engineertime, night.totaltime/3600.0)
table_txt ='<p><table>'
table_txt +='<tr><th>Time</th><th>Type</th><th>Length</th><th>Comment</th></tr>\n'
status = 0
start = None
for i in range(len(night.status_arr)):
if start is not None and night.status_arr[i]!=status:
table_txt += create_row(sdb, start, i, night_dict, night, obsdate)
start=None
if night.status_arr[i]>0 and start is None:
status = night.status_arr[i]
start = i
table_txt +='</table></p>\n'
return info_txt + table_txt
def create_row(sdb, sid, eid, night_dict, night, obsdate):
"""Create a row with all the information for that block
"""
status = night.status_arr[sid]
t1 = convert_decimal_hours(obsdate, night.time_arr[sid])
t2 = convert_decimal_hours(obsdate, night.time_arr[eid])
l=(t2-t1).seconds/3600.0
length='{0}:{1}'.format(int(l), string.zfill(int(60.0*(l-int(l))),2))
#find the key
min_time = 600
block_time = None
t0 = convert_decimal_hours(obsdate, 1.0)
for k in night_dict.keys():
t = (t1 - t0).seconds - (k-t0).seconds
if abs(t) < min_time:
block_time = k
min_time = abs(t)
keys = night_dict.keys()
#create the row
fgcolor='#000000'
if status==1: fgcolor='#FFFFFF'
row_str='<tr height={5}><td>{0}<br>{1}</td><td bgcolor="{2}"><font color="{4}">{3}</font></td>'.format(t1, t2, night.color_list[status], night.statusname_list[status], fgcolor, 50*l)
row_str+='<td>{0}</td>'.format(length)
if status==1:
b = night_dict[block_time][1]
print b
row_str+='<td>{0}</td>'.format(b[4])
row_str+='</tr>\n'
return row_str
def convert_decimal_hours(obsdate, t1):
"""Convert decimal hours to a date time"""
if t1 < 12:
t1 = '{} {}:{}'.format(obsdate, int(t1), int(60*(t1-int(t1))))
t1 = datetime.datetime.strptime(t1, '%Y%m%d %H:%M')
t1 = t1+datetime.timedelta(seconds=12*3600)
else:
t1 = '{} {}:{}'.format(obsdate, int(t1-12), int(60*(t1-int(t1))))
t1 = datetime.datetime.strptime(t1, '%Y%m%d %H:%M')
t1 = t1+datetime.timedelta(seconds=24*3600)
return t1
def create_faults(sdb, nid):
"""Return a list of information about the faults
"""
select = 'Fault_id, FaultStart, FaultEnd, TimeLost, SaltSubsystem'
tables = 'Fault join SaltSubsystem using (SaltSubsystem_Id)'
logic = 'NightInfo_Id={} and TimeLost > 0'.format(nid)
return sdb.select(select, tables, logic)
def create_weather(els, stime, etime):
"""Return an array of times that the weather was bad
"""
weather_info = su.get_weather_info(els, stime, etime)
time_list, air_arr, dew_arr, hum_arr, w30_arr, w30d_arr, w10_arr, \
w10d_arr, rain_list, t02_arr, t05_arr, t10_arr, t15_arr, t20_arr, \
t25_arr, t30_arr = weather_info
#need to include other/better limits
wea_arr = (hum_arr>85.0)
return time_list, wea_arr
def create_mirror_alignment(event_list):
"""Determine the mirror alignment time
"""
mirror_list=[]
mirror_start=False
for r in event_list:
if r[0]==10 and mirror_start==False:
t=r[1]
#use the time from the
if old_event[0] in [4, 6, 13, 14]: t=old_event[1]
mirror_start=[t]
if mirror_start:
if r[0] in [3,5]:
t=r[1]
mirror_start.append(t)
mirror_list.append(mirror_start)
mirror_start=False
old_event = r
return mirror_list
class Night:
def __init__(self, nid, night_start, night_end):
self.nid=nid
self.night_start = night_start
self.night_end = night_end
self.totaltime=(self.night_end-self.night_start).seconds
self.sciencetime=0
self.engineertime=0
self.weathertime=0
self.problemtime=0
self.shuttertime=0
self.mirroralignmenttime=0
self.dt=0.1
#set up arrays to represent different events
self.time_arr = np.arange(0,24,self.dt) #array representing time since noon that day
self.day_start = datetime.datetime(self.night_start.year, self.night_start.month, self.night_start.day, 12,0,0)
self.night_time = (self.time_arr > (self.night_start-self.day_start).seconds/3600.0) * ( self.time_arr < (self.night_end-self.day_start).seconds/3600.0)
self.status_arr = np.zeros(len(self.time_arr), dtype=int)
self.stime = (self.night_start-self.day_start).seconds/3600.0
self.etime = (self.night_end-self.day_start).seconds/3600.0
#set color and name list
self.statusname_list=['none', 'Science', 'Engineer', 'Weather', 'Problem', 'Rejected']
self.color_list=['none', 'blue', 'green', 'purple', 'red','yellow'] #none, science, engineer, weather, problem, rejected
def add_weather(self, time_list, wea_arr):
"""Add the weather to the status array and weather
the telescope is closed for weather or not
time_list is in seconds since the start of the night
"""
nstart = (self.night_start-self.day_start).seconds
for i in range(len(time_list)):
if wea_arr[i]:
t = int((nstart + time_list[i])/3600.0/self.dt)
self.status_arr[t]=3
return
def add_mirroralignment(self, mirror_list):
"""Add the mirror alignment to the status array
"""
for t1,t2 in mirror_list:
t1=t1/3600.0
t2=t2/3600.0
if t1 > self.stime and t1 < self.etime:
self.mirroralignmenttime += t2-t1
print t1, t2, self.stime, self.etime, self.mirroralignmenttime
mask = (self.time_arr>t1)*(self.time_arr<t2)
mid = np.where(mask)[0]
self.status_arr[mid] = 2
def add_problems(self, problems_list):
"""Add the problems to the status array
"""
for t1,t2 in problems_list:
t1=t1/3600.0
t2=t2/3600.0
if t1 > self.stime and t1 < self.etime:
et2 = min(t2, self.etime)
self.problemtime += et2-t1
mask = (self.time_arr>t1)*(self.time_arr<t2)
mid = np.where(mask)[0]
self.status_arr[mid] = 4
def add_blocks(self, block_list):
"""Add science time blocks to the status array
"""
for bvid, t1,t2,stat, propcode in block_list:
t1 = (t1- self.day_start).seconds/3600.0
t2 = (t2- self.day_start).seconds/3600.0
et1 = max(self.stime, t1)
if t1 < self.etime:
et2 = min(t2, self.etime)
mask = (self.time_arr>t1)*(self.time_arr<t2)
mid = np.where(mask)[0]
if stat==0:
self.sciencetime += et2-et1
self.status_arr[mid] = 1
else:
self.status_arr[mid] = 5
if stat==3: self.weathertime += et2-et1
if stat<3:
pass #self.problemtime += et2-et1
#print 'reject', self.problemtime, et2,et1
def calc_engineering(self):
for i in range(len(self.time_arr)):
if self.time_arr[i]>self.stime and self.time_arr[i]<self.etime:
if self.status_arr[i]==2 or self.status_arr[i]==0:
self.engineertime += self.dt
def calc_weather(self):
for i in range(len(self.time_arr)):
if self.time_arr[i]>self.stime and self.time_arr[i]<self.etime:
if self.status_arr[i]==3:
self.weathertime += self.dt
def plot(self):
color_list=['none', 'blue', 'green', 'purple', 'red','yellow'] #none, science, engineer, weather, problem, rejected
pl.figure()
ax=pl.axes([0.1,0.1,0.8,0.8])
#add nightiime patch
ax.add_patch(Rectangle((self.stime,0),self.totaltime/3600.0,4, alpha=0.3))
#add status patches
for i in range(len(self.status_arr)):
if self.status_arr[i]>0:
color=color_list[self.status_arr[i]]
ax.add_patch(Rectangle((self.time_arr[i],1),self.dt,0.5, alpha=1.0, facecolor=color, edgecolor=color)) #color_list[self.status_arr[i]]))
ax.axis([7,17,1,1.5])
pl.show()
|
hettlage/saltefficiency
|
saltefficiency/nightly/create_night_table.py
|
Python
|
bsd-3-clause
| 11,116
|
# -*- coding: utf-8 -*-
from django.db import models
import django.template.defaultfilters
from django.db.models import Max
from django.utils.functional import cached_property
# Create your models here.
from django.conf import settings
from django.contrib.contenttypes.models import ContentType
from django.contrib.contenttypes.fields import GenericForeignKey,GenericRelation
#from django.contrib.contenttypes import generic
from django.dispatch import receiver
from django.db.models.signals import post_save,post_delete,pre_save,pre_delete
from django.db.models.signals import m2m_changed
import django.dispatch
position_changed = django.dispatch.Signal(providing_args=["instance"])
valid_changed = django.dispatch.Signal(providing_args=["instance"])
#from santaclara_base.models import PositionAbstract
import re
import heapq
def custom_model_list(model_list):
sections=["Language",
"Place",
"Time span",
"Person",
"Category",
"Author",
"Publisher",
"Book",
"Publication",
"Migr",
"Repository",]
ret={}
for sec in sections:
ret[sec]=[]
for model_dict in model_list:
if model_dict["model_label"] in ["repositorycachebook","repositorycacheauthor","repositoryfailedisbn",]:
ret["Repository"].append(model_dict)
continue
if model_dict["model_label"] in [ "timepoint","timespan","datemodifier" ]:
ret["Time span"].append(model_dict)
continue
if model_dict["model_label"] in [ "language","languagefamily","languagefamilyrelation",
"languagefamilyfamilyrelation","languagevarietytype","languagevariety" ]:
ret["Language"].append(model_dict)
continue
if model_dict["model_label"] in [ "placetype","place","alternateplacename","placerelation" ]:
ret["Place"].append(model_dict)
continue
if model_dict["model_label"] in [ "article","articleauthorrelation","issuetype",
"issue","publication","volumetype","volume" ]:
ret["Publication"].append(model_dict)
continue
if model_dict["model_label"] in [ "nameformat","nametype","nameformatcollection","personcache",
"person","personnamerelation" ]:
ret["Person"].append(model_dict)
continue
if model_dict["model_label"] in [ "categorytreenode","category","categoryrelation",
"categorytimespanrelation", "categoryplacerelation",
"categorypersonrelation",
"categorylanguagerelation" ]:
ret["Category"].append(model_dict)
continue
if model_dict["model_label"] in [ "author","authorrole","authorrelation" ]:
ret["Author"].append(model_dict)
continue
if model_dict["model_label"] in [ "migrauthor","migrpublisherriviste" ]:
ret["Migr"].append(model_dict)
continue
if model_dict["model_label"] in [ "publisherstate","publisheraddress","publisherisbn","publisher",
"publisheraddresspublisherrelation" ]:
ret["Publisher"].append(model_dict)
continue
ret["Book"].append(model_dict)
xret=[]
for sec in sections:
xret.append( (sec,ret[sec]))
return xret
class PositionAbstract(models.Model):
""" Classe astratta per gestire oggetti posizionabili all'interno di un elenco.
Definisce il campo *pos* (posizione) come intero positivo.
Emette il segnale :any:`santaclara_base.signals.position_changed`
quando la posizione viene modificata.
Un modello che estende la classe PositionAbstract e ridefinisce
__init__() o save() deve ricordarsi di richiamare rispettivamente
:any:`PositionAbstract.my_action_post_init
<santaclara_base.models.PositionAbstract.my_action_post_init>` e
:any:`PositionAbstract.my_action_post_save
<santaclara_base.models.PositionAbstract.my_action_post_save>`.
Un modello che estende la classe PositionAbstract con eredità
multipla e in modo che save() e __init__() siano ereditati da
un'altra classe (quindi con PositionAbstract non primo modello tra
i padri), deve ridefinirli in modo o da richiamare
PositionAbstract.save() e PositionAbstract.__init__() oppure da
utilizzare esplicitamente
:any:`PositionAbstract.my_action_post_init
<santaclara_base.models.PositionAbstract.my_action_post_init>` e
:any:`PositionAbstract.my_action_post_save
<santaclara_base.models.PositionAbstract.my_action_post_save>`.
"""
#: Posizione.
pos = models.PositiveIntegerField()
class Meta:
abstract = True
def __init__(self,*args,**kwargs):
super(PositionAbstract, self).__init__(*args, **kwargs)
self.my_action_post_init(*args,**kwargs)
def save(self,*args,**kwargs):
super(PositionAbstract,self).save(*args,**kwargs)
self.my_action_post_save(*args,**kwargs)
def my_action_post_save(self,*args,**kwargs):
""" Se un modello che estende PositionAbstract sovrascrive
save() e non richiama esplicitamente PositionAbstract.save(),
oppure se in caso di eredità multipla il save() del modello
non è PositionAbstract.save(), nel nuovo save() dev'essere
richiamata questa funzione, passandole gli stessi parametri di
save(). """
if self.__original_pos!=self.pos:
position_changed.send(self.__class__,instance=self)
self.__original_pos = self.pos
def my_action_post_init(self,*args,**kwargs):
""" Se un modello che estende PositionAbstract sovrascrive
__init__() e non richiama esplicitamente PositionAbstract.__init__(),
oppure se in caso di eredità multipla il __init__() del modello
non è PositionAbstract.__init__(), nel nuovo __init__() dev'essere
richiamata questa funzione, passandole gli stessi parametri di
__init__(). """
self.__original_pos = self.pos
class LabeledAbstract(models.Model):
label = models.SlugField(unique=True)
description = models.CharField(max_length=1024)
class Meta:
abstract = True
def __str__(self):
return str(self.label)
def clean(self,*args,**kwargs):
self.label = self.label.lower()
super(LabeledAbstract, self).clean(*args, **kwargs)
### time span
class DateModifier(PositionAbstract):
name = models.CharField(max_length=1024)
reverse = models.BooleanField(default=False)
class Meta:
ordering = [ 'pos' ]
def __str__(self):
if self.id==0: return ""
if not self.name: return "-"
return str(self.name)
def save(self,*args,**kwargs):
super(DateModifier, self).save(*args, **kwargs)
for obj in self.timepoint_set.all():
obj.save()
class TimePoint(models.Model):
date = models.IntegerField()
modifier = models.ForeignKey(DateModifier,blank=True,default=0,on_delete=models.PROTECT)
class Meta:
ordering = [ 'modifier','date' ]
unique_together= [ 'modifier','date' ]
def __str__(self):
U=str(abs(self.date))
if self.modifier.id!=0:
U+=" "+str(self.modifier)
return U
def save(self,*args,**kwargs):
if not self.modifier:
self.modifier=DateModifier.objects.get(id=0)
if self.modifier.reverse:
self.date=-abs(self.date)
else:
self.date=abs(self.date)
super(TimePoint, self).save(*args, **kwargs)
def begins(self):
return "; ".join([str(x) for x in self.begin_set.all()])
def ends(self):
return "; ".join([str(x) for x in self.end_set.all()])
def time_spans(self):
L=[str(x) for x in self.begin_set.all()]
L+=[str(x) for x in self.end_set.all()]
L=list(set(L))
return "; ".join(L)
class TimeSpan(models.Model):
begin = models.ForeignKey(TimePoint,related_name="begin_set",on_delete=models.PROTECT)
end = models.ForeignKey(TimePoint,related_name="end_set",on_delete=models.PROTECT)
name = models.CharField(max_length=4096,blank=True)
def __str__(self):
if self.name:
return str(self.name)
return str(self.begin)+"-"+str(self.end)
class Meta:
ordering = [ 'begin','end' ]
def categories(self):
return "; ".join([str(x.category) for x in self.categorytimespanrelation_set.all()])
### language
class Language(models.Model):
name = models.CharField(max_length=4096)
def __str__(self): return self.name
def families(self):
return "; ".join([str(x.family) for x in self.languagefamilyrelation_set.all()])
def varieties(self):
return "; ".join([str(x) for x in self.languagevariety_set.all()])
class LanguageFamily(models.Model):
name = models.CharField(max_length=4096)
def __str__(self): return self.name
def parents(self):
return "; ".join([str(x.parent) for x in self.parent_set.all()])
def children(self):
return "; ".join([str(x.child) for x in self.child_set.all()])
def languages(self):
return "; ".join([str(x.language) for x in self.languagefamilyrelation_set.all()])
class LanguageFamilyRelation(models.Model):
language = models.ForeignKey(Language,on_delete=models.PROTECT)
family = models.ForeignKey(LanguageFamily,on_delete=models.PROTECT)
def __str__(self):
return str(self.family)+"/"+str(self.language)
class LanguageFamilyFamilyRelation(models.Model):
parent = models.ForeignKey(LanguageFamily,related_name="child_set",on_delete=models.PROTECT)
child = models.ForeignKey(LanguageFamily,related_name="parent_set",on_delete=models.PROTECT)
def __str__(self):
return str(self.parent)+"/"+str(self.child)
class Meta:
ordering = ["parent","child"]
class LanguageVarietyType(models.Model):
name = models.CharField(max_length=4096)
def __str__(self): return self.name
class LanguageVariety(models.Model):
name = models.CharField(max_length=4096,blank=True)
language = models.ForeignKey(Language,on_delete=models.PROTECT)
type = models.ForeignKey(LanguageVarietyType,default=1,on_delete=models.PROTECT)
def __str__(self):
if self.type.id==1:
return str(self.language)
if not self.name:
return str(self.language)
return str(self.language)+" ("+str(self.name)+")"
### place
class PlaceType(models.Model):
name = models.CharField(max_length=4096)
def __str__(self): return self.name
class Place(models.Model):
name = models.CharField(max_length=4096,unique=True)
type = models.ForeignKey(PlaceType,on_delete=models.PROTECT)
def __str__(self):
return self.name
def alternate_names(self):
return "; ".join([str(x.name) for x in self.alternateplacename_set.all()])
def areas(self):
return "; ".join([str(x.area) for x in self.area_set.all()])
def places(self):
return "; ".join([str(x.place) for x in self.place_set.all()])
class Meta:
ordering = [ "name" ]
class AlternatePlaceName(models.Model):
place = models.ForeignKey(Place,on_delete=models.PROTECT)
name = models.CharField(max_length=4096)
note = models.CharField(max_length=65536,blank=True)
def __str__(self):
return self.name
class PlaceRelation(models.Model):
place = models.ForeignKey(Place,related_name="area_set",on_delete=models.PROTECT)
area = models.ForeignKey(Place,related_name="place_set",on_delete=models.PROTECT)
def __str__(self):
return str(self.area)+"/"+str(self.place)
class Meta:
ordering = ["area","place"]
### person
class NameFormat(LabeledAbstract):
pattern = models.CharField(max_length=1024)
class Meta:
ordering = ["label"]
def save(self, *args, **kwargs):
super(NameFormat, self).save(*args, **kwargs)
for coll in self.long_format_set.all():
coll.save()
for coll in self.short_format_set.all():
coll.save()
for coll in self.ordering_format_set.all():
coll.save()
for coll in self.list_format_set.all():
coll.save()
class NameType(LabeledAbstract): pass
RE_NAME_SEP=re.compile("('| |-)")
VONS=["von","di","da","del","della","dell","dello","dei","degli","delle","de","d","la","lo",
"dal","dalla","dall","dallo","dai","dagli","dalle","al","ibn"]
ROMANS=["I","II","III","IV","V","VI","VII","VIII","IX","X",
"XI","XII","XIII","XIV","XV","XVI","XVII","XVIII","XIX","XX",
"XXI","XXII","XXIII","XXIV","XXV","XXVI","XXVII","XXVIII","XXIX","XXX",
"XXXI","XXXII","XXXIII","XXXIV","XXXV","XXXVI","XXXVII","XXXVIII","XXXIX","XL",
"XLI","XLII","XLIII","XLIV","XLV","XLVI","XLVII","XLVIII","XLIX","L"]
class NameFormatCollectionManager(models.Manager):
def get_preferred(self,num_fields):
preferred_list=self.all().filter(preferred=True)
for format_c in preferred_list:
fields=format_c.fields
if len(fields)==num_fields:
return format_c
format_max_num=-1
format_max=None
for format_c in self.all():
fields=format_c.fields
if len(fields)==num_fields:
return format_c
if len(fields)>format_max_num:
format_max_num=len(fields)
format_max=format_c
return format_max
def get_format_for_name(self,search):
if not search:
return self.get_preferred(0),[]
if search.lower().replace(".","") in [ "av","aavv" ]:
return self.get_preferred(0),[]
t=RE_NAME_SEP.split(search)
names=[]
t_vons=""
for n in range(0,len(t)):
if not t[n]: continue
if t[n] in [ " ","'" ]:
if t_vons:
t_vons+=t[n]
continue
if t[n]=="-":
if t_vons:
t_vons+="-"
else:
names[-1]+="-"
continue
if t[n].lower() not in VONS:
if names and names[-1].endswith("-"):
names[-1]+=t[n].capitalize()
else:
names.append(t_vons+t[n].capitalize())
t_vons=""
continue
t_vons+=t[n]
return self.get_preferred(len(names)),names
class NameFormatCollection(LabeledAbstract):
long_format = models.ForeignKey(NameFormat,related_name='long_format_set',on_delete=models.PROTECT)
short_format = models.ForeignKey(NameFormat,related_name='short_format_set',on_delete=models.PROTECT)
list_format = models.ForeignKey(NameFormat,related_name='list_format_set',on_delete=models.PROTECT)
ordering_format = models.ForeignKey(NameFormat,related_name='ordering_format_set',on_delete=models.PROTECT)
preferred = models.BooleanField(default=False)
objects = NameFormatCollectionManager()
def save(self, *args, **kwargs):
super(NameFormatCollection, self).save(*args, **kwargs)
for person in self.person_set.all():
person.update_cache()
@cached_property
def fields(self):
L=["name","surname"]
long_name=str(self.long_format.pattern)
short_name=str(self.short_format.pattern)
list_name=str(self.list_format.pattern)
ordering_name=str(self.ordering_format.pattern)
for s in "VALURNIC":
long_name=long_name.replace("{{"+s+"|","{{")
short_name=short_name.replace("{{"+s+"|","{{")
list_name=list_name.replace("{{"+s+"|","{{")
ordering_name=ordering_name.replace("{{"+s+"|","{{")
names=[]
for f in [long_name,short_name,list_name,ordering_name]:
L=[x.replace("{{","").replace("}}","") for x in re.findall(r'{{.*?}}',f)]
for name in L:
if name in names: continue
names.append(name)
return names
### Sintassi dei formati
# {{<name_type>}}: <name_type>
# {{C|<name_type>}}: <name_type> (capitalized)
# {{V|<name_type>}}: <name_type> (capitalized except von, de, ecc.)
# {{L|<name_type>}}: <name_type> (lowered)
# {{U|<name_type>}}: <name_type> (uppered)
# {{A|<name_type>}}: <name_type> as integer in arabic
# {{R|<name_type>}}: <name_type> as integer in roman upper
# {{N|<name_type>}}: <name_type> (lowered and with space => _)
# {{I|<name_type>}}: iniziali (Gian Uberto => G. U.)
def apply_formats(self,names):
long_name=str(self.long_format.pattern)
short_name=str(self.short_format.pattern)
list_name=str(self.list_format.pattern)
ordering_name=str(self.ordering_format.pattern)
list_upper=str(self.list_format.pattern)
list_lower=str(self.list_format.pattern)
names_list=list(names.items())
if not names_list:
return long_name,short_name,list_name,ordering_name,"-","-"
for key,rel in names_list:
val_f=rel.formatted()
long_name=long_name.replace("{{"+key+"}}",val_f["norm"])
short_name=short_name.replace("{{"+key+"}}",val_f["norm"])
list_name=list_name.replace("{{"+key+"}}",val_f["norm"])
ordering_name=ordering_name.replace("{{"+key+"}}",val_f["norm"])
list_upper=list_upper.replace("{{"+key+"}}",val_f["norm_upper"])
list_lower=list_lower.replace("{{"+key+"}}",val_f["norm_lower"])
for k in "VALURNIC":
long_name=long_name.replace("{{"+k+"|"+key+"}}",val_f[k])
short_name=short_name.replace("{{"+k+"|"+key+"}}",val_f[k])
list_name=list_name.replace("{{"+k+"|"+key+"}}",val_f[k])
ordering_name=ordering_name.replace("{{"+k+"|"+key+"}}",val_f[k])
if k in "AR":
list_upper=list_upper.replace("{{"+k+"|"+key+"}}",val_f[k])
list_lower=list_lower.replace("{{"+k+"|"+key+"}}",val_f[k])
else:
list_upper=list_upper.replace("{{"+k+"|"+key+"}}",val_f["norm_upper"])
list_lower=list_lower.replace("{{"+k+"|"+key+"}}",val_f["norm_lower"])
return long_name,short_name,list_name,ordering_name,list_upper[0],list_lower[0]
class PersonCache(models.Model):
long_name = models.CharField(max_length=4096,default="-")
short_name = models.CharField(max_length=4096,default="-")
list_name = models.CharField(max_length=4096,default="-")
ordering_name = models.CharField(max_length=4096,default="-")
upper_initial = models.CharField(max_length=4,default="-")
lower_initial = models.CharField(max_length=4,default="-")
class Meta:
ordering = ["ordering_name"]
db_table = 'bibliography_personcache'
def __str__(self): return self.list_name
class PersonManager(models.Manager):
def search_names(self,names):
qset=self.all()
if len(names)==0: return qset
#D=[]
for name in names:
if name.endswith("."):
name=name[:-1]
qset=qset.filter(personnamerelation__value__istartswith=name)
elif len(name)==1:
qset=qset.filter(personnamerelation__value__istartswith=name)
else:
qset=qset.filter(personnamerelation__value__iexact=name)
# if qset.count()>0: return qset.select_related("cache")
# if len(names)==1: return qset.select_related("cache")
# if len(names)==2:
# newnames=[ " ".join(names) ]
# return self.search_names(newnames)
# L=len(names)
# for n in range(0,L-1):
# newnames=names[0:n] + [ " ".join(names[n:n+2])] + names[n+2:L]
# qset=self.search_names(newnames)
# if qset.count()>0: return qset.select_related("cache")
return qset.select_related("cache")
def filter_by_name(self,search):
search=search.replace(" , "," ")
search=search.replace(", "," ")
search=search.replace(" ,"," ")
search=search.replace(","," ")
if search.lower() in [ "--","","- -","-","aavv","aa.vv.","aa. vv."]:
format_c=NameFormatCollection.objects.get(label="aavv")
qset=self.all().filter(format_collection=format_c)
return qset
t_name=search.lower().split(" ")
return self.search_names(t_name)
def look_for(self,name_list):
old={}
new=[]
for name in name_list:
qset=self.filter_by_name(name)
if qset.count():
old[name]=(qset.first())
else:
new.append(name)
return old,new
def create_by_names(self,format_collection,**kwargs):
obj=self.create(format_collection=format_collection)
for key,val in list(kwargs.items()):
name_type,created=NameType.objects.get_or_create(label=key)
rel,created=PersonNameRelation.objects.get_or_create(person=obj,name_type=name_type,
defaults={"value": val})
if not created:
rel.value=val
rel.save()
return obj
class Person(models.Model):
format_collection = models.ForeignKey(NameFormatCollection,on_delete=models.PROTECT)
cache = models.OneToOneField(PersonCache,editable=False,null=True,on_delete=models.PROTECT)
names = models.ManyToManyField(NameType,through='PersonNameRelation',blank=True)
objects = PersonManager()
class Meta:
ordering = ["cache"]
db_table = 'bibliography_person'
def __str__(self):
return self.list_name()
def long_name(self): return str(self.cache.long_name)
def short_name(self): return str(self.cache.short_name)
def ordering_name(self): return str(self.cache.ordering_name)
def list_name(self): return str(self.cache.list_name)
def upper_initial(self): return str(self.cache.upper_initial)
def lower_initial(self): return str(self.cache.lower_initial)
def save(self, *args, **kwargs):
if not self.cache:
self.cache = PersonCache.objects.create()
super(Person, self).save(*args, **kwargs)
self.update_cache()
def update_cache(self):
names={}
for rel in self.personnamerelation_set.all():
names[str(rel.name_type.label)]=rel
long_name,short_name,list_name,ordering_name,upper_initial,lower_initial=self.format_collection.apply_formats(names)
self.cache.long_name = long_name
self.cache.short_name = short_name
self.cache.list_name = list_name
self.cache.ordering_name = ordering_name
self.cache.upper_initial = upper_initial
self.cache.lower_initial = lower_initial
self.cache.save()
class PersonNameRelation(models.Model):
person = models.ForeignKey(Person,on_delete=models.PROTECT)
name_type = models.ForeignKey(NameType,on_delete=models.PROTECT)
value = models.CharField(max_length=4096,default="-",db_index=True)
case_rule = models.CharField(max_length=128,choices=[ ("latin","latin"),
("turkic","turkic") ],
default="latin")
def __str__(self): return str(self.value)
def save(self, *args, **kwargs):
super(PersonNameRelation, self).save(*args, **kwargs)
self.person.update_cache()
def _upper(self,x):
if self.case_rule=="latin":
return x.upper()
x=x.replace("ı","I")
x=x.replace("i","İ")
return x.upper()
def _lower(self,x):
if self.case_rule=="latin":
return x.lower()
x=x.replace("I","ı")
x=x.replace("İ","i")
return x.lower()
def _capitalize(self,x):
if self.case_rule=="latin":
return x.capitalize()
return self._upper(x[0])+self._lower(x[1:])
### Sintassi dei formati
# {{<name_type>}}: <name_type>
# {{C|<name_type>}}: <name_type> (capitalized)
# {{V|<name_type>}}: <name_type> (capitalized except von, de, ecc.)
# {{L|<name_type>}}: <name_type> (lowered)
# {{U|<name_type>}}: <name_type> (uppered)
# {{A|<name_type>}}: <name_type> as integer in arabic
# {{R|<name_type>}}: <name_type> as integer in roman upper
# {{N|<name_type>}}: <name_type> (lowered and with space => _)
# {{I|<name_type>}}: iniziali (Gian Uberto => G. U.)
def formatted(self):
val=str(self.value)
val_f={}
t=RE_NAME_SEP.split(val)
#t=map(lambda x: self._capitalize(x),RE_NAME_SEP.split(val))
vons_t=[]
norm_t=[]
for x in t:
if self._lower(x) in VONS:
vons_t.append(self._lower(x))
else:
if len(x)==1 and x.isalpha():
vons_t.append(self._upper(x)+".")
else:
vons_t.append(self._capitalize(x))
if len(x)==1 and x.isalpha():
norm_t.append(x+".")
else:
norm_t.append(x)
cap_t=[self._capitalize(x) for x in norm_t]
val_norm="".join(norm_t)
val_f["L"]=self._lower(val)
val_f["U"]=self._upper(val)
val_f["N"]=self._lower(val).replace(" ","_")
val_f["I"]=". ".join([x[0].upper() for x in list(filter(bool,val.split(" ")))])+"."
val_f["C"]="".join(cap_t)
val_f["V"]="".join(vons_t)
if val.isdigit():
val_f["R"]=ROMANS[int(val)-1]
val_f["A"]="%3.3d" % int(val)
else:
val_f["R"]=""
val_f["A"]=""
val_f["norm"]=val_norm
val_f["norm_upper"]=self._upper(val_norm)
val_f["norm_lower"]=self._lower(val_norm)
return val_f
# long_name=long_name.replace("{{"+key+"}}",val_norm)
# short_name=short_name.replace("{{"+key+"}}",val_norm)
# list_name=list_name.replace("{{"+key+"}}",val_norm)
# ordering_name=ordering_name.replace("{{"+key+"}}",val_norm)
# for k in "VALURNIC":
# long_name=long_name.replace("{{"+k+"|"+key+"}}",val_f[k])
# short_name=short_name.replace("{{"+k+"|"+key+"}}",val_f[k])
# list_name=list_name.replace("{{"+k+"|"+key+"}}",val_f[k])
# ordering_name=ordering_name.replace("{{"+k+"|"+key+"}}",val_f[k])
# return long_name,short_name,list_name,ordering_name
### category
class CategoryTreeNodeManager(models.Manager):
def roots(self):
return self.filter(level=0)
def until_level(self,level,only_category=True):
if not only_category:
return self.filter(level__lte=level)
return self.filter(level__lte=level,is_category=True)
def branch_nodes(self,base_node,level,only_category=True):
if not only_category:
return self.filter(level=level,node_id__istartswith=base_node.node_id+":")
return self.filter(level=level,node_id__istartswith=base_node.node_id+":",is_category=True)
def update_category(self,cat):
ctype = ContentType.objects.get_for_model(Category)
for cat_node in self.filter(content_type=ctype,object_id=cat.id):
level=int(cat_node.level)
old_node_id=str(cat_node.node_id)
parent_node_id=":".join(old_node_id.split(":")[:-1])
if parent_node_id:
new_node_id=parent_node_id+":"+cat.label
else:
new_node_id=cat.label
cat_node.node_id=new_node_id
cat_node.save()
if not cat_node.has_children: return
cat_children=list(self.filter(node_id__istartswith=old_node_id+":",level=level+1))
for child in cat_children:
self.reparent(new_node_id,level,child)
def remove_category(self,cat):
ctype = ContentType.objects.get_for_model(Category)
node_ids=[]
for cat_node in self.filter(content_type=ctype,object_id=cat.id):
node_ids.append(cat_node.node_id)
self.filter(node_id__istartswith=cat_node.node_id+':').delete()
cat_node.delete()
def create_category(self,cat):
newobj=self.create(content_object=cat,node_id=cat.label,has_children=False,level=0)
newobj.save()
return newobj
def reparent(self,parent_node_id,parent_level,cat_node):
ret=[]
old_node_id=str(cat_node.node_id)
old_level=int(cat_node.level)
rel_node_id=old_node_id.split(":")[-1]
if parent_node_id:
new_node_id=parent_node_id+":"+rel_node_id
else:
new_node_id=rel_node_id
if parent_level>=0:
new_level=parent_level+1
else:
new_level=0
cat_node.node_id=new_node_id
cat_node.level=new_level
cat_node.save()
ret.append(("R",cat_node))
if not cat_node.has_children: return ret
cat_children=list(self.filter(node_id__istartswith=old_node_id+":"))
for cch_node in cat_children:
new_cch_node_id=str(cch_node.node_id).replace(old_node_id+":",new_node_id+":",1)
new_cch_level=int(cch_node.level)-old_level+new_level
cch_node.node_id=new_cch_node_id
cch_node.level=new_cch_level
cch_node.save()
ret.append(("R",cch_node))
return ret
def clone(self,parent_node_id,parent_level,cat_node):
ret=[]
old_node_id=str(cat_node.node_id)
old_level=int(cat_node.level)
rel_node_id=old_node_id.split(":")[-1]
if parent_node_id:
new_node_id=parent_node_id+":"+rel_node_id
else:
new_node_id=rel_node_id
if parent_level>=0:
new_level=parent_level+1
else:
new_level=0
newobj=self.create(content_object=cat_node.content_object,
node_id=new_node_id,
has_children=cat_node.has_children,
level=new_level)
newobj.save()
ret.append(("C",newobj))
if not cat_node.has_children: return ret
cat_children=list(self.filter(node_id__istartswith=old_node_id+":"))
for cch_node in cat_children:
new_cch_node_id=str(cch_node.node_id).replace(old_node_id+":",new_node_id+":",1)
new_cch_level=int(cch_node.level)-old_level+new_level
newobj=self.create(content_object=cch_node.content_object,
node_id=new_cch_node_id,
has_children=cch_node.has_children,
level=new_cch_level)
newobj.save()
ret.append(("C",newobj))
return ret
def add_child_category(self,parent,child):
parent_nodes=list(parent.tree_nodes.all())
child_nodes=list(child.tree_nodes.all())
cn=child_nodes[0]
startind=0
new_objects=[]
if len(child_nodes)==1 and child_nodes[0].level==0:
## l'unico child è un rootnode
fn=parent_nodes[0]
new_objects=self.reparent(str(fn.node_id),int(fn.level),cn)
startind=1
fn.has_children=True
fn.save()
for fn in parent_nodes[startind:]:
new_objects+=self.clone(str(fn.node_id),int(fn.level),cn)
fn.has_children=True
fn.save()
return new_objects
def remove_child_category(self,parent,child):
parent_nodes=list(parent.tree_nodes.all())
child_nodes=list(child.tree_nodes.all())
del_list=[]
for fn in parent_nodes:
fn_node_id=str(fn.node_id)
for cn in child_nodes:
cn_node_id=str(cn.node_id)
cn_rel_node_id=cn_node_id.split(":")[-1]
if cn_node_id==fn_node_id+":"+cn_rel_node_id:
del_list.append((fn,cn))
break
if len(del_list)==len(child_nodes):
objs=self.clone("",-1,child_nodes[0])
for action,obj in objs:
obj.save()
for parent,node in del_list:
self.remove_branch(node)
parent.has_children=bool(self.filter(node_id__istartswith=str(parent.node_id)+":").exists())
parent.save()
def update_child_category(self,old_parent,old_child,new_parent,new_child):
if not old_parent and not old_child: return
if (old_parent==new_parent) and (old_child==new_child): return
self.remove_child_category(old_parent,old_child)
self.add_child_category(new_parent,new_child)
def remove_branch(self,basenode):
base_node_id=str(basenode.node_id)
self.filter(node_id__istartswith=base_node_id+":").delete()
self.filter(node_id=base_node_id).delete()
def add_category_relation(self,cat,child):
parent_nodes=list(cat.tree_nodes.all())
ret=[]
for fn in parent_nodes:
new_node_id=str(fn.node_id)+":"+str(child.id)
new_level=int(fn.level)+1
newobj=self.create(content_object=child,
node_id=new_node_id,
has_children=False,
level=new_level)
ret.append(("C",newobj))
fn.has_children=True
fn.save()
return ret
def remove_category_relation(self,cat,child):
parent_nodes=list(cat.tree_nodes.all())
node_ids=[]
for fn in parent_nodes:
node_ids.append(str(fn.node_id)+":"+str(child.id))
self.filter(node_id__in=node_ids).delete()
for fn in parent_nodes:
fn.has_children=bool(self.filter(node_id__istartswith=str(fn.node_id)+":").exists())
fn.save()
def update_category_relation(self,old_cat,old_child,new_cat,new_child):
if not old_cat and not old_child: return
if (old_cat==new_cat) and (old_child==new_child): return
self.remove_category_relation(old_cat,old_child)
self.add_category_relation(new_cat,new_child)
def get_num_objects(self,catnode):
if not catnode.is_category: return 1
N=self.filter(node_id__istartswith=catnode.node_id+":",is_category=False).values("content_type","object_id").distinct().count()
return N
def max_level(self,only_cat=True):
if not only_cat:
return self.all().aggregate(Max('level'))["level__max"]
return self.filter(is_category=True).aggregate(Max('level'))["level__max"]
class CategoryTreeNode(models.Model):
content_type = models.ForeignKey(ContentType,on_delete=models.PROTECT)
object_id = models.PositiveIntegerField()
content_object = GenericForeignKey('content_type','object_id')
node_id = models.CharField(max_length=4096,unique=True)
has_children = models.BooleanField()
level = models.PositiveIntegerField()
objects = CategoryTreeNodeManager()
label = models.CharField(max_length=4096,editable=False)
label_children = models.CharField(max_length=4096,editable=False)
is_category = models.BooleanField(editable=False)
num_objects = models.PositiveIntegerField(editable=False)
def branch_depth(self,only_cat=True):
if only_cat:
ret=CategoryTreeNode.objects.filter(node_id__istartswith=self.node_id+":",is_category=True).aggregate(Max('level'))["level__max"]
else:
ret=CategoryTreeNode.objects.filter(node_id__istartswith=self.node_id+":").aggregate(Max('level'))["level__max"]
if not ret: return 0
return ret
def branch_level_size(self,level,only_cat=True):
if only_cat:
return CategoryTreeNode.objects.filter(node_id__istartswith=self.node_id+":",
level=level,is_category=True).count()
return CategoryTreeNode.objects.filter(node_id__istartswith=self.node_id+":",level=level).count()
def branch(self,only_cat=True):
if only_cat:
return CategoryTreeNode.objects.filter(node_id__istartswith=self.node_id+":",is_category=True)
return CategoryTreeNode.objects.filter(node_id__istartswith=self.node_id+":")
def __str__(self):
U= "%3d %s" % (int(self.level),str(self.node_id))
return U
def direct_size(self):
if not self.is_category: return 0
return self.content_object.child_set.count()
class Meta:
ordering = [ "node_id" ]
def save(self, *args, **kwargs):
self.label_children="_"+str(self.node_id).replace(":","_")
t=str(self.node_id).split(":")
if len(t)==1:
self.label=""
else:
self.label="_"+"_".join(t[:-1])
self.is_category=( self.content_type.model_class() == Category )
self.num_objects = CategoryTreeNode.objects.get_num_objects(self)
super(CategoryTreeNode, self).save(*args, **kwargs)
class CategoryManager(models.Manager):
use_for_related_fields = True
def get_query_set(self):
class CategoryQueryset(models.query.QuerySet):
def all_in_branch(self,parent_id):
parent=Category.objects.get(id=int(parent_id))
children_ids=[parent.id]
for catnode in parent.tree_nodes.all():
L=catnode.branch()
children_ids+=[x.object_id for x in list(L)]
children_ids=list(set(children_ids))
return self.filter(id__in=children_ids)
return CategoryQueryset(Category)
def query_set_branch(self,queryset,parent_id):
parent=Category.objects.get(id=int(parent_id))
children_ids=[parent.id]
for catnode in parent.tree_nodes.all():
L=catnode.branch()
children_ids+=[x.object_id for x in list(L)]
children_ids=list(set(children_ids))
return queryset.filter(id__in=children_ids)
def all_in_branch(self,parent_id):
return self.get_query_set().all_in_branch(parent_id)
def merge(self,cat_queryset):
new_name="[merge]"
old_cats=list(cat_queryset.all())
for cat in old_cats:
new_name+=" "+cat.name
new_cat=self.create(name=new_name)
children=[]
for catrel in CategoryRelation.objects.filter(parent__in=old_cats):
if catrel.child in children:
catrel.delete()
continue
catrel.parent=new_cat
children.append(catrel.child)
catrel.save()
parents=[]
for catrel in CategoryRelation.objects.filter(child__in=old_cats):
if new_cat==catrel.parent:
catrel.delete()
continue
if catrel.parent in parents:
catrel.delete()
continue
catrel.child=new_cat
parents.append(catrel.parent)
catrel.save()
L=[]
for catrel in CategoryTimeSpanRelation.objects.filter(category__in=old_cats):
if catrel.time_span in L:
catrel.delete()
continue
catrel.category=new_cat
catrel.save()
L.append(catrel.time_span)
L=[]
for catrel in CategoryPlaceRelation.objects.filter(category__in=old_cats):
if catrel.place in L:
catrel.delete()
continue
catrel.category=new_cat
catrel.save()
L.append(catrel.place)
L=[]
for catrel in CategoryPersonRelation.objects.filter(category__in=old_cats):
if catrel.person in L:
catrel.delete()
continue
catrel.category=new_cat
catrel.save()
L.append(catrel.person)
L=[]
for catrel in CategoryLanguageRelation.objects.filter(category__in=old_cats):
if catrel.language in L:
catrel.delete()
continue
catrel.category=new_cat
catrel.save()
L.append(catrel.language)
for cat in old_cats:
for book in cat.book_set.all():
book.categories.add(new_cat)
book.categories.remove(cat)
cat.delete()
class Category(models.Model):
name = models.CharField(max_length=4096,unique=True)
label = models.SlugField(max_length=4096,editable=False,unique=True)
tree_nodes = GenericRelation(CategoryTreeNode)
objects = CategoryManager()
def __str__(self): return str(self.name)
class Meta:
ordering = ["name"]
def slugify(self):
S=str(self.name)
S=S.replace("#","sharp")
S=S.replace("++","plusplus")
return django.template.defaultfilters.slugify(S)
def save(self, *args, **kwargs):
self.label = self.slugify()
super(Category, self).save(*args, **kwargs)
def parents(self):
return "; ".join([str(x.parent) for x in self.parent_set.all()])
def children(self):
return "; ".join([str(x.child) for x in self.child_set.all()])
def time_span(self):
return "; ".join([str(x.time_span) for x in self.categorytimespanrelation_set.all()])
def place(self):
return "; ".join([str(x.place) for x in self.categoryplacerelation_set.all()])
def person(self):
return "; ".join([str(x.person) for x in self.categorypersonrelation_set.all()])
def language(self):
return "; ".join([str(x.language) for x in self.categorylanguagerelation_set.all()])
def num_books(self):
return self.book_set.count()
def min_level(self):
level=-1
for node in self.tree_nodes.all():
if level<0:
level=node.level
continue
level=min(level,node.level)
return level
def num_objects(self):
node=self.tree_nodes.all().first()
return node.num_objects
def my_branch_depth(self):
node=self.tree_nodes.all().first()
return node.branch_depth()
def my_branch_id(self):
level=-1
elected=None
for node in self.tree_nodes.all():
if level<0:
elected=node
level=node.level
continue
if level<=node.level: continue
elected=node
level=node.level
node_id=elected.node_id
big_parent_id=node_id.split(":")[0]
#big_parent_node=CategoryTreeNode.objects.get(node_id=big_parent_id)
return big_parent_id
class CategoryRelation(models.Model):
child = models.ForeignKey(Category,related_name="parent_set",on_delete=models.PROTECT)
parent = models.ForeignKey(Category,related_name="child_set",on_delete=models.PROTECT)
def __str__(self):
return str(self.parent)+"/"+str(self.child)
class Meta:
ordering = ["parent","child"]
class CategoryTimeSpanRelation(models.Model):
time_span=models.ForeignKey(TimeSpan,on_delete=models.PROTECT)
category=models.ForeignKey(Category,on_delete=models.PROTECT)
def __str__(self):
return str(self.time_span)+"/"+str(self.category)
class CategoryPlaceRelation(models.Model):
place=models.ForeignKey(Place,on_delete=models.PROTECT)
category=models.ForeignKey(Category,on_delete=models.PROTECT)
def __str__(self):
return str(self.place)+"/"+str(self.category)
class CategoryPersonRelation(models.Model):
person=models.ForeignKey(Person,on_delete=models.PROTECT)
category=models.ForeignKey(Category,on_delete=models.PROTECT)
def __str__(self):
return str(self.person)+"/"+str(self.category)
class CategoryLanguageRelation(models.Model):
language=models.ForeignKey(LanguageVariety,on_delete=models.PROTECT)
category=models.ForeignKey(Category,on_delete=models.PROTECT)
def __str__(self):
return str(self.language)+"/"+str(self.category)
class CategorizedObject(models.Model):
categories = models.ManyToManyField(Category,blank=True)
class Meta:
abstract = True
def get_categories(self):
return "; ".join([str(x) for x in self.categories.all()])
### authors
def print_result(label):
def g(func):
def f(*args):
res=func(*args)
print(label,res,*args)
return res
return f
return g
class AuthorManager(PersonManager):
def catalog(self):
class PubTuple(tuple):
def __new__ (cls, year,role,obj):
x=super(PubTuple, cls).__new__(cls, tuple( (year,role,obj) ))
return x
def __str__(self):
return "(%s,%s,%s)" % (str(self._year),str(self._role),str(self._obj))
def __init__(self,year,role,obj):
self._year=year
self._role=role
self._obj=obj
#@print_result("EQ")
def __eq__(self,other):
if self._year!=other._year: return False
if type(self._obj) is not type(other._obj): return False
return self._obj.id == other._obj.id
#@print_result("LT")
def __lt__(self,other):
if self._year < other._year: return True
if self._year > other._year: return False
if type(self._obj) is type(other._obj):
return self._obj.id < other._obj.id
if type(self._obj) is Book: return True
if type(other._obj) is Book: return False
return type(self._obj) is Issue
# if isinstance(self._obj,Book):
# if isinstance(other._obj,Book):
# if self._obj.title == other._obj.title:
# return self._obj.id < other._obj.id
# return self._obj.title < other._obj.title
# return True
# if isinstance(other._obj,Book): return False
# if isinstance(self._obj,Issue):
# s_date=self._obj.date
# else:
# s_date=self._obj.issue.date
# if isinstance(other._obj,Issue):
# o_date=other._obj.date
# else:
# o_date=other._obj.issue.date
# if s_date<o_date: return True
# if s_date>o_date: return False
# if type(self._obj) is not type(other._obj):
# return type(self._obj) is Issue
# if self._obj.title == other._obj.title:
# return self._obj.id < other._obj.id
# return self._obj.title < other._obj.title
def _gt__(self,other): return other.__lt__(self)
def _le__(self,other): return self.__eq__(other) or self.__lt__(other)
def _ge__(self,other): return self.__eq__(other) or self.__gt__(other)
def _ne__(self,other): return not self.__eq__(other)
class CatAuthor(object):
def __init__(self,db_author):
self._db_author=db_author
self.id=db_author.id
self.list_name=db_author.list_name()
self.long_name=db_author.long_name()
self.ordering_name=db_author.ordering_name()
self._publications=[]
def add(self,pub):
heapq.heappush(self._publications, pub)
@property
def publications(self):
return heapq.nsmallest(len(self._publications), self._publications)
issues=[ (rel.author,rel.author_role,rel.issue)
for rel in IssueAuthorRelation.objects.all().select_related() ]
books=[ (rel.author,rel.author_role,rel.book)
for rel in BookAuthorRelation.objects.all().select_related() ]
articles=[ (rel.author,rel.author_role,rel.article)
for rel in ArticleAuthorRelation.objects.all().select_related() ]
authors=[ CatAuthor(aut) for aut in self.all().select_related().prefetch_related("cache") ]
dict_aut={ aut.id: aut for aut in authors }
for aut,role,obj in issues:
dict_aut[aut.id].add( PubTuple(obj.year(),role,obj) )
for aut,role,obj in books:
dict_aut[aut.id].add( PubTuple(obj.year,role,obj) )
for aut,role,obj in articles:
dict_aut[aut.id].add( PubTuple(obj.year(),role,obj) )
return authors
#return self.all().select_related().prefetch_related("cache","authorrelation_set")
class Author(Person):
objects=AuthorManager()
class Meta:
proxy = True
def publications(self):
L=[]
for rel in self.authorrelation_set.all().select_related():
L.append( (rel.year,rel.author_role,rel.actual()) )
return L
def get_absolute_url(self):
return "/bibliography/author/%d" % self.pk
def save(self,*args,**kwargs):
Person.save(self,*args,**kwargs)
class AuthorRole(LabeledAbstract):
cover_name = models.BooleanField(default=False)
action = models.CharField(default="",max_length=1024,blank=True)
pos = models.IntegerField(unique=True)
class AuthorRelation(models.Model):
author = models.ForeignKey(Author,on_delete=models.PROTECT)
author_role = models.ForeignKey(AuthorRole,on_delete=models.PROTECT)
content_type = models.ForeignKey(ContentType,editable=False,null=True,on_delete=models.PROTECT)
year = models.IntegerField(editable=False,db_index=True)
#year_label = models.CharField(max_length=10,editable=False)
#title = models.CharField(max_length=4096)
class Meta:
ordering = [ "year" ]
def _year(self): return 0
def _title(self): return ""
def html(self): return ""
def update_year(self):
try:
self.year=self.actual()._year()
except:
self.year=self._year()
self.save()
def actual(self):
model = self.content_type.model
return self.__getattribute__(model)
def save(self,*args, **kwargs):
if (not self.content_type):
self.content_type = ContentType.objects.get_for_model(self.__class__)
try:
self.year=self.actual()._year()
except:
self.year=self._year()
super(AuthorRelation, self).save(*args, **kwargs)
def clean(self,*args,**kwargs):
self.year=self._year()
super(AuthorRelation, self).clean(*args, **kwargs)
class MigrAuthor(models.Model):
cod = models.CharField(max_length=1,default="-",db_index=True)
ind = models.IntegerField(db_index=True)
author = models.ForeignKey(Author,on_delete=models.PROTECT)
def __str__(self): return str(self.cod)+str(self.ind)+" "+str(self.author)
### publishers
class PublisherState(models.Model):
name = models.CharField(max_length=4096)
class Meta:
ordering = ["name"]
def __str__(self): return str(self.name)
class PublisherAddress(models.Model):
city = models.CharField(max_length=4096)
state = models.ForeignKey(PublisherState,on_delete=models.PROTECT)
def __str__(self): return str(self.city)+" - "+str(self.state)
class Meta:
ordering = ["city"]
class PublisherIsbnManager(models.Manager):
def isbn_alpha(self):
return self.all().filter(isbn__iregex=r'^[a-z].*')
def split_isbn(self,unseparated):
if not unseparated: return [],[]
isbn_list=[]
for isbn in unseparated:
for n in range(1,9):
isbn_list.append(isbn[:n])
L=[ v.isbn for v in self.filter(isbn__in=isbn_list) ]
if not L:
return [],unseparated
uns=[]
sep=[]
for isbn in unseparated:
trovato=False
for db_isbn in L:
if isbn.startswith(db_isbn):
trovato=True
isbn_book=isbn[len(db_isbn):]
sep.append( (db_isbn,isbn_book) )
break
if not trovato:
uns.append(isbn)
return sep,uns
class PublisherIsbn(models.Model):
isbn = models.CharField(max_length=4096,unique=True,db_index=True)
preferred = models.ForeignKey("Publisher",editable=False,blank=True,on_delete=models.PROTECT)
objects = PublisherIsbnManager()
class Meta:
ordering = ["isbn"]
def update_preferred(self):
self.preferred=self.get_preferred()
self.save()
def get_preferred(self):
if self._state.adding:
return Publisher.objects.get(pk=0)
pubs=list(self.publisher_set.all())
if len(pubs)==0: return Publisher.objects.get(pk=0)
if len(pubs)!=1:
for p in pubs:
if not p.alias:
return p
return pubs[0]
def clean(self,*args,**kwargs):
self.preferred=self.get_preferred()
super(PublisherIsbn, self).clean(*args, **kwargs)
def save(self,*args,**kwargs):
self.preferred=self.get_preferred()
super(PublisherIsbn, self).save(*args, **kwargs)
def __str__(self): return str(self.isbn)
def publishers(self):
return "; ".join(map(str, self.publisher_set.all()))
class PublisherManager(models.Manager):
def add_prefetch(self,obj_list):
qset=self.filter(id__in=[obj.id for obj in obj_list])
qset=qset.prefetch_related("addresses")
return qset
def look_for(self,isbn_list):
qset=PublisherIsbn.objects.filter(isbn__in=isbn_list)
for pub in qset:
isbn_list.remove( pub.isbn )
isbn_ids=[ obj.id for obj in qset ]
p_qset=self.filter(isbns__id__in=isbn_ids).prefetch_related("isbns","addresses")
return p_qset,isbn_list
class Publisher(models.Model):
name = models.CharField(max_length=4096)
full_name = models.CharField(max_length=4096,blank=True)
url = models.CharField(max_length=4096,default="--")
note = models.TextField(blank=True,default="")
addresses = models.ManyToManyField(PublisherAddress,through='PublisherAddressPublisherRelation',blank=True)
alias = models.BooleanField(default=False)
isbns = models.ManyToManyField(PublisherIsbn,blank=True)
objects=PublisherManager()
class Meta:
ordering = ["name"]
def short_name(self):
name=self.show_name().lower()
tname=name.replace(".","").replace(",","").split()
for s in [ "srl", "spa","editore","editrice","edizioni","verlag","publisher","inc",
"éditions","editions","edition","editorial","editori","editoriale","ltd",
"gruppo","publishing","yayın","yayınları","co","publications","press","editoriali"]:
if s in tname:
tname.remove(s)
tname=[ s.capitalize() for s in tname ]
return " ".join(tname)
def clean(self,*args,**kwargs):
if not self.full_name:
self.full_name=self.name
super(Publisher, self).clean(*args, **kwargs)
def __str__(self): return str(self.name)
def address(self):
return " - ".join([str(x.address.city) for x in self.publisheraddresspublisherrelation_set.order_by("pos")])
def show_name(self):
if self.full_name: return self.full_name
return self.name
def html(self):
H=self.name
adrs=self.address()
if adrs:
H+=", "+adrs
return H
@cached_property
def isbn_prefix(self):
return ", ".join([str(x.isbn) for x in self.isbns.all()])
@cached_property
def isbn_list(self):
return [str(x.isbn) for x in self.isbns.all()]
class PublisherAddressPublisherRelation(PositionAbstract):
address = models.ForeignKey(PublisherAddress,on_delete=models.PROTECT)
publisher = models.ForeignKey(Publisher,on_delete=models.PROTECT)
def __str__(self): return str(self.publisher)+" ["+str(self.pos)+"] "+str(self.address)
class MigrPublisherRiviste(models.Model):
registro = models.CharField(max_length=4096)
publisher = models.ForeignKey(Publisher,on_delete=models.PROTECT)
def __str__(self): return str(self.registro)
### publications
class VolumeType(LabeledAbstract):
read_as = models.CharField(max_length=1024,default="")
class PublicationManager(models.Manager):
def issn_alpha(self):
return self.all().filter(issn_crc='Y')
class Publication(models.Model):
issn = models.CharField(max_length=128) #7
issn_crc = models.CharField(max_length=1,editable=False,default="Y")
publisher = models.ForeignKey(Publisher,on_delete=models.PROTECT)
title = models.CharField(max_length=4096)
volume_type = models.ForeignKey(VolumeType,on_delete=models.PROTECT)
date_format = models.CharField(max_length=4096,default="%Y-%m-%d")
objects=PublicationManager()
#periodicity=models.CharField(max_length=128,choices=[ ("monthly","monthly"),("unknown","unknown") ],default="unknown")
#first_day=models.IntegerField(default=1)
class Meta:
ordering = ['title']
def html(self):
tit=str(self.title)
if not tit: return ""
return "<i>"+tit+"</i>"
def __str__(self): return str(self.title)
def get_absolute_url(self):
return "/bibliography/publication/%d" % self.pk
def update_crc(self):
self.issn_crc = self.crc()
self.save()
def crc(self):
if not str(self.issn).isdigit(): return('Y')
pesi=[8,7,6,5,4,3,2]
cod_lista=list(map(int,list(self.issn)))
if len(cod_lista)<7:
L=len(cod_lista)
cod_lista+=[0 for x in range(L,7)]
crc=11-(sum(map(lambda x,y: x*y,cod_lista,pesi))%11)
if (crc==10): return('X')
if (crc==11): return(0)
return(crc)
def clean(self,*args,**kwargs):
self.issn_crc = self.crc()
super(Publication, self).clean(*args, **kwargs)
def issue_set(self):
return Issue.objects.filter(volume__publication__id=self.id).order_by("date")
class Volume(models.Model):
label = models.CharField(max_length=256,db_index=True)
publication = models.ForeignKey(Publication,on_delete=models.PROTECT)
def __str__(self): return str(self.publication)+" - "+str(self.label)
def html(self):
H=self.publication.html()
if H:
H+=", "
H+=str(self.publication.volume_type.read_as)
if H:
H+=" "
H+=str(self.label)
return H
### publication issues
class IssueType(LabeledAbstract): pass
class IssueManager(models.Manager):
def by_publication(self,publication):
return self.all().filter(volume__publication__id=publication.id).order_by("date")
class Issue(models.Model):
volume = models.ForeignKey(Volume,on_delete=models.PROTECT)
issue_type = models.ForeignKey(IssueType,on_delete=models.PROTECT)
issn_num = models.CharField(max_length=8)
number = models.CharField(max_length=256)
title = models.CharField(max_length=4096,blank=True,default="")
date = models.DateField()
date_ipotetic = models.BooleanField(default=False)
html_cache = models.TextField(blank=True,null=True,default="",editable=False)
authors = models.ManyToManyField(Author,through='IssueAuthorRelation',blank=True)
objects=IssueManager()
class Meta:
ordering = ['date']
def issn(self):
return self.volume.publication.issn
def show_date(self):
D=self.date.strftime(self.volume.publication.date_format)
if self.date_ipotetic:
return D+"?"
return D
def save(self,*args,**kwargs):
self.html_cache=self._html()
return models.Model.save(self,*args,**kwargs)
def html(self): return self.html_cache
def _html(self):
H=self.volume.html()
if H:
H+=", "
H+="n. "+str(self.number)
tit=str(self.title)
if tit:
H+=", <i>"+tit+"</i>"
H+=", "
H+=self.date.strftime("%B %Y")
if self.date_ipotetic:
H+="?"
return H
def __str__(self):
U=str(self.volume)
U+="/"+str(self.number)
if str(self.title):
U+=". "+str(self.title)
return U
def year(self):
return self.date.year
class IssueAuthorRelation(AuthorRelation,PositionAbstract):
issue = models.ForeignKey(Issue,on_delete=models.PROTECT)
def __str__(self): return str(self.author)+", "+str(self.issue)
def _year(self): return int(self.issue.year())
def _title(self): return str(self.issue.title)
def html(self):
print("COM")
print(self.issue.html())
return self.issue.html()
class Meta:
ordering=["pos"]
#unique_together= [ 'author','author_role','issue' ]
def save(self,*args,**kwargs):
if not self.pos:
self.pos=1
return super(IssueAuthorRelation,self).save(*args,**kwargs)
class Article(models.Model):
title = models.CharField(max_length=4096)
issue = models.ForeignKey(Issue,on_delete=models.PROTECT)
page_begin = models.CharField(max_length=10,blank=True,default="x")
page_end = models.CharField(max_length=10,blank=True,default="x")
authors = models.ManyToManyField(Author,through='ArticleAuthorRelation',blank=True)
html_cache = models.TextField(blank=True,null=True,default="",editable=False)
def get_authors(self):
return ", ".join([str(x.author.long_name()) for x in self.articleauthorrelation_set.filter(author_role__cover_name=True).order_by("pos")])
def get_secondary_authors(self):
L=list(self.articleauthorrelation_set.filter(author_role__cover_name=False).order_by("author_role__pos","pos"))
ret=""
curr_pos=-1
comma=True
for rel in L:
if curr_pos!=int(rel.author_role.pos):
action=str(rel.author_role.action).strip()
if action:
if ret:
ret+=", "
ret+=action+" "
comma=False
curr_pos=int(rel.author_role.pos)
if ret and comma: ret+=", "
ret+=rel.author.long_name()
comma=True
return ret
def __str__(self): return str(self.title) #+" ("+unicode(self.year)+")"
def issn(self): return self.issue.issn()
def issn_num(self): return self.issue.issn_num
def year(self): return self.issue.year()
def save(self,*args,**kwargs):
self.html_cache=self._html()
return models.Model.save(self,*args,**kwargs)
def html(self): return self.html_cache
def _html(self):
H=""
H+=self.get_authors()
if H:
H+=", "
H+="“"+str(self.title)+"”, "
sec_authors=self.get_secondary_authors()
if sec_authors:
H+=sec_authors+", "
issue=self.issue.html()
if issue:
H+=issue+", "
if str(self.page_begin)==str(self.page_end):
H+="p. "+str(self.page_begin)
else:
H+="pp. "+str(self.page_begin)+"-"+str(self.page_end)
return H
class ArticleAuthorRelation(AuthorRelation,PositionAbstract):
article = models.ForeignKey(Article,on_delete=models.PROTECT)
def __str__(self): return str(self.author)+", "+str(self.article)
def _year(self): return int(self.article.year())
def _title(self): return str(self.article.title)
def html(self):
print("ART")
print(self.article.html())
return self.article.html()
class Meta:
ordering=["pos"]
### books
class BookManager(models.Manager):
def isbn_alpha(self):
return self.all().filter(isbn_crc10='Y').order_by("isbn_ced","isbn_book","year","title")
def by_isbn_pub(self,isbn):
print("ISBN:",isbn)
return self.all().filter(isbn_ced__iexact=isbn).order_by("isbn_ced","isbn_book","year","title")
def add_prefetch(self,obj_list):
qset=self.filter(id__in=[book.id for book in obj_list])
qset=qset.select_related("publisher").prefetch_related("authors")
return qset
def look_for(self,isbn_list):
if not isbn_list: return None,[]
q=models.Q()
for isbn_ced,isbn_book in isbn_list:
q=q|models.Q(isbn_ced=isbn_ced,isbn_book=isbn_book)
qset=self.filter(q).select_related("publisher").prefetch_related("authors")
new_isbn_list=[]
for book in qset:
isbn_list.remove( (book.isbn_ced,book.isbn_book) )
return qset,isbn_list
class Book(CategorizedObject):
isbn_ced = models.CharField(max_length=9,db_index=True)
isbn_book = models.CharField(max_length=9,db_index=True)
isbn_crc10 = models.CharField(max_length=1,editable=False,default="Y")
isbn_crc13 = models.CharField(max_length=1,editable=False,default="Y")
isbn_cache10 = models.CharField(max_length=20,editable=False,default="")
isbn_cache13 = models.CharField(max_length=20,editable=False,default="")
title = models.CharField(max_length=4096)
year = models.IntegerField()
year_ipotetic = models.BooleanField(default=False)
publisher = models.ForeignKey(Publisher,on_delete=models.PROTECT)
authors = models.ManyToManyField(Author,through='BookAuthorRelation',blank=True)
html_cache = models.TextField(blank=True,default="",editable=False)
objects=BookManager()
class Meta:
ordering=["title","year","publisher"]
index_together=[ ["isbn_ced","isbn_book"] ]
def get_authors(self):
return ", ".join([str(x.author.long_name()) for x in self.bookauthorrelation_set.filter(author_role__cover_name=True).order_by("pos")])
def get_absolute_url(self):
U="/bibliography/book/%d" % self.pk
return U
def get_secondary_authors(self):
L=list(self.bookauthorrelation_set.filter(author_role__cover_name=False).order_by("author_role__pos","pos"))
ret=""
curr_pos=-1
comma=True
for rel in L:
if curr_pos!=int(rel.author_role.pos):
action=str(rel.author_role.action).strip()
if action:
if ret:
ret+=", "
ret+=action+" "
comma=False
curr_pos=int(rel.author_role.pos)
if ret and comma: ret+=", "
ret+=rel.author.long_name()
comma=True
return ret
def __str__(self):
if not self.year_ipotetic:
return str(self.title)+" ("+str(self.year)+")"
return str(self.title)+" ("+str(self.year)+"?)"
@cached_property
def html(self): return self.html_cache
def _html(self):
H=""
H+=self.get_authors()
if H:
H+=", "
H+="<i>"+str(self.title)+"</i>, "
sec_authors=self.get_secondary_authors()
if sec_authors:
H+=sec_authors+", "
pub=self.publisher.html()
if pub:
H+=pub+", "
H+=str(self.year)
if self.year_ipotetic: H+="?"
return H
def clean(self,*args,**kwargs):
self.isbn_crc10 = self.crc10()
self.isbn_crc13 = self.crc13()
self.isbn_cache10=self.isbn_ced+self.isbn_book+str(self.crc10())
self.isbn_cache13='978'+self.isbn_ced+self.isbn_book+str(self.crc13())
super(Book, self).clean(*args, **kwargs)
def save(self,*args,**kwargs):
self.isbn_crc10 = self.crc10()
self.isbn_crc13 = self.crc13()
self.isbn_cache10=self.isbn_ced+self.isbn_book+str(self.crc10())
self.isbn_cache13='978'+self.isbn_ced+self.isbn_book+str(self.crc13())
self.html_cache=self._html()
super(Book, self).save(*args, **kwargs)
def update_crc(self):
self.isbn_crc10 = self.crc10()
self.isbn_crc13 = self.crc13()
self.isbn_cache10=self.isbn_ced+self.isbn_book+str(self.crc10())
self.isbn_cache13='978'+self.isbn_ced+self.isbn_book+str(self.crc13())
self.save()
def isbn10(self):
return str(self.isbn_ced)+"-"+str(self.isbn_book)+"-"+str(self.isbn_crc10)
def isbn13(self):
return "978-"+str(self.isbn_ced)+"-"+str(self.isbn_book)+"-"+str(self.isbn_crc13)
def crc10(self):
if not str(self.isbn_book).isdigit(): return('Y')
if not str(self.isbn_ced).isdigit(): return('Y')
isbn=str(self.isbn_ced)+str(self.isbn_book)
pesi=[10,9,8,7,6,5,4,3,2]
cod_lista=list(map(int,list(isbn)))
if len(cod_lista)<9:
L=len(cod_lista)
cod_lista+=[0 for x in range(L,9)]
crc=11-(sum(map(lambda x,y: x*y,cod_lista,pesi))%11)
if (crc==10): return('X')
if (crc==11): return(0)
return(crc)
def crc13(self):
if not str(self.isbn_book).isdigit(): return('Y')
if not str(self.isbn_ced).isdigit(): return('Y')
isbn=str(self.isbn_ced)+str(self.isbn_book)
pesi=[1,3,1,3,1,3,1,3,1,3,1,3]
cod_lista=[9,7,8]+list(map(int,list(isbn)))
if len(cod_lista)<12:
L=len(cod_lista)
cod_lista+=[0 for x in range(L,12)]
crc=10-(sum(map(lambda x,y: x*y,cod_lista,pesi))%10)
if (crc==10): return(0)
return(crc)
class BookAuthorRelation(AuthorRelation,PositionAbstract):
book = models.ForeignKey(Book,on_delete=models.PROTECT)
def __str__(self): return str(self.author)+", "+str(self.book)
def _year(self): return int(self.book.year)
def _title(self): return str(self.book.title)
def html(self): return self.book.html()
def get_absolute_url(self): return self.book.get_absolute_url()
class Meta:
ordering=["pos"]
class TextsCdrom(LabeledAbstract):
books = models.ManyToManyField(Book,blank=True)
# class BookTimeSpanRelation(models.Model):
# time_span=models.ForeignKey(TimeSpan)
# book=models.OneToOneField(Book)
# def __str__(self):
# return unicode(self.time_span)+u"/"+unicode(self.book)
### repository cache
class RepositoryCacheBook(models.Model):
isbn = models.CharField(max_length=13,unique=True)
publisher = models.CharField(max_length=4096,default=" ")
year = models.CharField(max_length=4096,default=" ",blank=True)
title = models.CharField(max_length=4096,default=" ")
city = models.CharField(max_length=4096,default=" ")
indb = models.BooleanField(default=False)
def clean(self,*args,**kwargs):
if not self.year:
self.year=" "
super(RepositoryCacheBook, self).clean(*args, **kwargs)
def __str__(self):
return str(self.isbn)+" "+str(self.title)
class Meta:
ordering = [ "isbn" ]
class RepositoryCacheAuthor(PositionAbstract):
book = models.ForeignKey(RepositoryCacheBook,on_delete=models.PROTECT)
name = models.CharField(max_length=4096)
role = models.CharField(max_length=4096)
def __str__(self):
return self.name
class Meta:
ordering = [ "name" ]
class RepositoryFailedIsbn(models.Model):
isbn10 = models.CharField(max_length=4096)
isbn13 = models.CharField(max_length=4096)
def __str__(self):
return self.isbn10+"/"+self.isbn13
class Meta:
ordering = [ "isbn10" ]
### others
class BookSerieWithoutIsbn(models.Model):
isbn_ced = models.CharField(max_length=9,db_index=True)
isbn_book_prefix = models.CharField(max_length=9,db_index=True)
title = models.CharField(max_length=4096)
title_prefix = models.CharField(max_length=4096,default='',blank=True)
publisher = models.ForeignKey(Publisher,on_delete=models.PROTECT)
def __str__(self): return str(self.title)
### signals
def category_post_save_handler(sender,instance,created,raw,using,update_fields,**kwargs):
if raw: return
if created:
CategoryTreeNode.objects.create_category(instance)
else:
CategoryTreeNode.objects.update_category(instance)
post_save.connect(category_post_save_handler,sender=Category)
def category_pre_delete_handler(sender,instance,using,**kwargs):
CategoryTreeNode.objects.remove_category(instance)
pre_delete.connect(category_pre_delete_handler,sender=Category)
class CategoryRelationChangeHandler(object):
def __init__(self):
self.old_parents={}
self.old_children={}
def pre_save(self,sender,instance,raw,using,update_fields,**kwargs):
if raw: return
if not instance.id: return
old_obj=CategoryRelation.objects.get(id=instance.id)
self.old_parents[instance.id]=old_obj.parent
self.old_children[instance.id]=old_obj.child
def post_save(self,sender,instance,created,raw,using,update_fields,**kwargs):
if raw: return
if created:
CategoryTreeNode.objects.add_child_category(instance.parent,instance.child)
return
old_parent=None
old_child=None
if instance.id in self.old_parents:
old_parent=self.old_parents[instance.id]
del(self.old_parents[instance.id])
if instance.id in self.old_children:
old_child=self.old_children[instance.id]
del(self.old_children[instance.id])
CategoryTreeNode.objects.update_child_category(old_parent,old_child,instance.parent,instance.child)
categoryrelation_save_handler=CategoryRelationChangeHandler()
post_save.connect(categoryrelation_save_handler.post_save,sender=CategoryRelation)
pre_save.connect(categoryrelation_save_handler.pre_save,sender=CategoryRelation)
def categoryrelation_pre_delete_handler(sender,instance,using,**kwargs):
CategoryTreeNode.objects.remove_child_category(instance.parent,instance.child)
pre_delete.connect(categoryrelation_pre_delete_handler,sender=CategoryRelation)
def categorizedobjectcategoryrelation_m2m_changed_handler(sender, instance, action, reverse,model,pk_set,using,**kwargs):
if action=="post_add":
function=CategoryTreeNode.objects.add_category_relation
elif action=="pre_remove":
function=CategoryTreeNode.objects.remove_category_relation
else:
return
if model==Category:
cat_list=Category.objects.filter(pk__in=list(pk_set))
for cat in cat_list:
function(cat,instance)
return
target_list=model.objects.filter(pk__in=list(pk_set))
for target in target_list:
function(instance,target)
m2m_changed.connect(categorizedobjectcategoryrelation_m2m_changed_handler,sender=Book.categories.through)
# @receiver(django.db.models.signals.m2m_changed, sender=Article.categories.through)
# def modify_articlecategoryrelation_handler(sender, **kwargs):
# print "Modify",kwargs["instance"],"with action:",kwargs["action"],kwargs["model"],kwargs["pk_set"]
|
chiara-paci/baskerville
|
baskervilleweb/bibliography/models.py
|
Python
|
gpl-3.0
| 75,809
|
# -*- coding: utf-8 -*-
from functools import partial
from openprocurement.edge.utils import (
context_unpack,
decrypt,
encrypt,
APIResource,
json_view
)
from openprocurement.edge.utils import planningresource
from openprocurement.edge.design import (
by_dateModified_view_ViewDefinition,
real_by_dateModified_view_ViewDefinition,
test_by_dateModified_view_ViewDefinition,
by_local_seq_view_ViewDefinition,
real_by_local_seq_view_ViewDefinition,
test_by_local_seq_view_ViewDefinition,
)
from openprocurement.edge.design import PLAN_FIELDS as FIELDS
VIEW_MAP = {
u'': real_by_dateModified_view_ViewDefinition('plans'),
u'test': test_by_dateModified_view_ViewDefinition('plans'),
u'_all_': by_dateModified_view_ViewDefinition('plans'),
}
CHANGES_VIEW_MAP = {
u'': real_by_local_seq_view_ViewDefinition('plans'),
u'test': test_by_local_seq_view_ViewDefinition('plans'),
u'_all_': by_local_seq_view_ViewDefinition('plans'),
}
FEED = {
u'dateModified': VIEW_MAP,
u'changes': CHANGES_VIEW_MAP,
}
@planningresource(name='Plans',
path='/plans',
description="Open Planing compatible data exchange format. See http://ocds.open-planing.org/standard/r/master/#plan for more info")
class PlansResource(APIResource):
def __init__(self, request, context):
super(PlansResource, self).__init__(request, context)
self.server = request.registry.couchdb_server
self.update_after = request.registry.update_after
@json_view()
def get(self):
"""Plans List
Get Plans List
----------------
Example request to get plans list:
.. sourcecode:: http
GET /plans HTTP/1.1
Host: example.com
Accept: application/json
This is what one should expect in response:
.. sourcecode:: http
HTTP/1.1 200 OK
Content-Type: application/json
{
"data": [
{
"id": "64e93250be76435397e8c992ed4214d1",
"dateModified": "2014-10-27T08:06:58.158Z"
}
]
}
"""
# http://wiki.apache.org/couchdb/HTTP_view_API#Querying_Options
params = {}
pparams = {}
fields = self.request.params.get('opt_fields', '')
if fields:
params['opt_fields'] = fields
pparams['opt_fields'] = fields
fields = fields.split(',')
view_fields = fields + ['dateModified', 'id']
limit = self.request.params.get('limit', '')
if limit:
params['limit'] = limit
pparams['limit'] = limit
limit = int(limit) if limit.isdigit() and 1000 >= int(limit) > 0 else 100
descending = bool(self.request.params.get('descending'))
offset = self.request.params.get('offset', '')
if descending:
params['descending'] = 1
else:
pparams['descending'] = 1
feed = self.request.params.get('feed', '')
view_map = FEED.get(feed, VIEW_MAP)
changes = view_map is CHANGES_VIEW_MAP
if feed and feed in FEED:
params['feed'] = feed
pparams['feed'] = feed
mode = self.request.params.get('mode', '')
if mode and mode in view_map:
params['mode'] = mode
pparams['mode'] = mode
view_limit = limit + 1 if offset else limit
if changes:
if offset:
view_offset = decrypt(self.server.uuid, self.db.name, offset)
if view_offset and view_offset.isdigit():
view_offset = int(view_offset)
else:
self.request.errors.add('params', 'offset', 'Offset expired/invalid')
self.request.errors.status = 404
return
if not offset:
view_offset = 'now' if descending else 0
else:
if offset:
view_offset = offset
else:
view_offset = '9' if descending else ''
list_view = view_map.get(mode, view_map[u''])
if self.update_after:
view = partial(list_view, self.db, limit=view_limit, startkey=view_offset, descending=descending, stale='update_after')
else:
view = partial(list_view, self.db, limit=view_limit, startkey=view_offset, descending=descending)
if fields:
if not changes and set(fields).issubset(set(FIELDS)):
results = [
(dict([(i, j) for i, j in x.value.items() + [('id', x.id), ('dateModified', x.key)] if i in view_fields]), x.key)
for x in view()
]
elif changes and set(fields).issubset(set(FIELDS)):
results = [
(dict([(i, j) for i, j in x.value.items() + [('id', x.id)] if i in view_fields]), x.key)
for x in view()
]
elif fields:
self.LOGGER.info('Used custom fields for plans list: {}'.format(','.join(sorted(fields))),
extra=context_unpack(self.request, {'MESSAGE_ID': 'plan_list_custom'}))
results = [
(dict([(k, j) for k, j in i[u'doc'].items() if k in view_fields]), i.key)
for i in view(include_docs=True)
]
else:
results = [
({'id': i.id, 'dateModified': i.value['dateModified']} if changes else {'id': i.id, 'dateModified': i.key}, i.key)
for i in view()
]
if results:
params['offset'], pparams['offset'] = results[-1][1], results[0][1]
if offset and view_offset == results[0][1]:
results = results[1:]
elif offset and view_offset != results[0][1]:
results = results[:limit]
params['offset'], pparams['offset'] = results[-1][1], view_offset
results = [i[0] for i in results]
if changes:
params['offset'] = encrypt(self.server.uuid, self.db.name, params['offset'])
pparams['offset'] = encrypt(self.server.uuid, self.db.name, pparams['offset'])
else:
params['offset'] = offset
pparams['offset'] = offset
data = {
'data': results,
'next_page': {
"offset": params['offset'],
"path": self.request.route_path('Plans', _query=params),
"uri": self.request.route_url('Plans', _query=params)
}
}
if descending or offset:
data['prev_page'] = {
"offset": pparams['offset'],
"path": self.request.route_path('Plans', _query=pparams),
"uri": self.request.route_url('Plans', _query=pparams)
}
return data
|
openprocurement/openprocurement.edge
|
openprocurement/edge/views/plans.py
|
Python
|
apache-2.0
| 7,002
|
from http import HTTPStatus
from flask import url_for
def test_route_home(client):
response = client.get(url_for('home'))
assert response.status_code == HTTPStatus.OK
def test_route_project(client):
with client.session_transaction() as session:
session['token'] = 'foo'
response = client.get(url_for('project', project_id=123))
assert response.status_code == HTTPStatus.OK
def test_route_project_redirect(client):
response = client.get(url_for('project', project_id=123))
assert response.status_code == HTTPStatus.FOUND
|
textbook/flask-forecaster
|
tests/test_routes.py
|
Python
|
isc
| 563
|
# -*- coding: utf-8 -*-
import datetime
import re
import scrapy
from locations.items import GeojsonPointItem
from locations.hours import OpeningHours
DAY_MAPPING = {
'Sun': 'Su',
'Mon': 'Mo',
'Tue': 'Tu',
'Wed': 'We',
'Thu': 'Th',
'Fri': 'Fr',
'Sat': 'Sa'
}
class PenskeSpider(scrapy.Spider):
download_delay = 0.5
name = "penske"
allowed_domains = ["pensketruckrental.com"]
start_urls = (
'https://www.pensketruckrental.com/locations',
)
def parse_hours(self, elem):
opening_hours = OpeningHours()
day = elem.xpath('.//dt/text()').extract()
times = elem.xpath('.//dd/text()').extract()
for day, times in zip(day, times):
if times == "Closed":
continue
start_time, end_time = times.split(' - ')
if start_time == 'Noon':
start_time = '12:00 PM'
if end_time == 'Noon':
end_time = '12:00 PM'
if '-' in day:
days = list(DAY_MAPPING.keys())
start_day, end_day = day.split(' - ')
for i in days[days.index(start_day):days.index(end_day) + 1]:
opening_hours.add_range(day=DAY_MAPPING[i],
open_time=start_time,
close_time=end_time,
time_format='%I:%M %p')
elif ',' in day:
days = list(DAY_MAPPING.keys())
start_day, end_day = day.split(', ')
for i in days[days.index(start_day):days.index(end_day) + 1]:
opening_hours.add_range(day=DAY_MAPPING[i],
open_time=start_time,
close_time=end_time,
time_format='%I:%M %p')
else:
opening_hours.add_range(day=DAY_MAPPING[day],
open_time=start_time,
close_time=end_time,
time_format='%I:%M %p')
return opening_hours.as_opening_hours()
def parse_store(self, response):
ref = re.search(r'.+/(.+)$', response.url).group(1)
properties = {
'addr_full': response.xpath('//div[@id="location-left"]/p/text()').extract_first(),
'phone': response.xpath('//span[@itemprop="telephone"]/text()').extract_first(),
'city': response.xpath('//span[@itemprop="addressLocality"]/text()').extract_first(),
'state': response.xpath('//span[@itemprop="addressRegion"]/text()').extract_first(),
'postcode': response.xpath('//span[@itemprop="postalCode"]/text()').extract_first(),
'ref': ref,
'website': response.url,
'lat': float(response.xpath('//dt[@itemprop="latitude"]/text()').extract_first()),
'lon': float(response.xpath('//dt[@itemprop="longitude"]/text()').extract_first()),
'name': response.xpath('//h1[@itemprop="name"]/text()').extract_first()
}
hours = self.parse_hours(response.xpath('//dl[@class="hours"]'))
if hours:
properties['opening_hours'] = hours
yield GeojsonPointItem(**properties)
def parse(self, response):
urls = response.xpath('//section[@class="locations-by-state"]/ul/li/a/@href').extract()
if urls:
urls.extend(response.xpath('//section[@class="locations-by-province"]/ul/li/a/@href').extract())
is_store = False
if not urls:
urls = response.xpath('//section[@class="locations-by-city"]/ul/li/a/@href').extract()
if not urls:
urls = response.xpath('//a[contains(@class,"location-link")]/@href').extract()
is_store = True
for url in urls:
if is_store:
yield scrapy.Request(response.urljoin(url), callback=self.parse_store)
else:
yield scrapy.Request(response.urljoin(url))
|
iandees/all-the-places
|
locations/spiders/penske.py
|
Python
|
mit
| 4,137
|
#!/usr/bin/python
#
# Copyright (c) 2018 Yuwei Zhou, <yuwzho@microsoft.com>
#
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: azure_rm_servicebustopicsubscription
version_added: "2.8"
short_description: Manage Azure Service Bus subscription
description:
- Create, update or delete an Azure Service Bus subscriptions.
options:
resource_group:
description:
- Name of resource group.
required: true
name:
description:
- Name of the servicebus subscription.
required: true
state:
description:
- Assert the state of the servicebus subscription. Use C(present) to create or update and use C(absent) to delete.
default: present
choices:
- absent
- present
namespace:
description:
- Servicebus namespace name.
- A namespace is a scoping container for all messaging components.
- Multiple subscriptions and topics can reside within a single namespace, and namespaces often serve as application containers.
required: true
topic:
description:
- Topic name which the subscription subscribe to.
required: true
auto_delete_on_idle_in_seconds:
description:
- Time idle interval after which a subscription is automatically deleted.
- The minimum duration is 5 minutes.
type: int
dead_lettering_on_message_expiration:
description:
- A value that indicates whether a subscription has dead letter support when a message expires.
type: bool
dead_lettering_on_filter_evaluation_exceptions:
description:
- Value that indicates whether a subscription has dead letter support on filter evaluation exceptions.
type: bool
default_message_time_to_live_seconds:
description:
- Default message timespan to live value.
- This is the duration after which the message expires, starting from when the message is sent to Service Bus.
- This is the default value used when TimeToLive is not set on a message itself.
type: int
enable_batched_operations:
description:
- Value that indicates whether server-side batched operations are enabled.
type: bool
forward_dead_lettered_messages_to:
description:
- Queue or topic name to forward the Dead Letter message for a subscription.
forward_to:
description:
- Queue or topic name to forward the messages for a subscription.
lock_duration_in_seconds:
description:
- Timespan duration of a peek-lock.
- The amount of time that the message is locked for other receivers.
- The maximum value for LockDuration is 5 minutes.
type: int
max_delivery_count:
description:
- he maximum delivery count.
- A message is automatically deadlettered after this number of deliveries.
type: int
requires_session:
description:
- A value that indicates whether the subscription supports the concept of sessions.
type: bool
duplicate_detection_time_in_seconds:
description:
- TimeSpan structure that defines the duration of the duplicate detection history.
type: int
status:
description:
- Status of the entity.
choices:
- active
- disabled
- send_disabled
- receive_disabled
extends_documentation_fragment:
- azure
- azure_tags
author:
- Yuwei Zhou (@yuwzho)
'''
EXAMPLES = '''
- name: Create a subscription
azure_rm_servicebustopicsubscription:
name: sbsub
resource_group: myResourceGroup
namespace: bar
topic: subtopic
'''
RETURN = '''
id:
description:
- Current state of the subscription.
returned: success
type: str
sample: "/subscriptions/xxx...xxx/resourceGroups/myResourceGroup/providers/Microsoft.ServiceBus/
namespaces/nsb57dc95979/topics/topicb57dc95979/subscriptions/subsb57dc95979"
'''
try:
from msrestazure.azure_exceptions import CloudError
except ImportError:
# This is handled in azure_rm_common
pass
from ansible.module_utils.azure_rm_common import AzureRMModuleBase
from ansible.module_utils.common.dict_transformations import _snake_to_camel, _camel_to_snake
from ansible.module_utils._text import to_native
from datetime import datetime, timedelta
duration_spec_map = dict(
default_message_time_to_live='default_message_time_to_live_seconds',
duplicate_detection_history_time_window='duplicate_detection_time_in_seconds',
auto_delete_on_idle='auto_delete_on_idle_in_seconds',
lock_duration='lock_duration_in_seconds'
)
class AzureRMServiceSubscription(AzureRMModuleBase):
def __init__(self):
self.module_arg_spec = dict(
auto_delete_on_idle_in_seconds=dict(type='int'),
dead_lettering_on_filter_evaluation_exceptions=dict(type='bool'),
dead_lettering_on_message_expiration=dict(type='bool'),
default_message_time_to_live_seconds=dict(type='int'),
duplicate_detection_time_in_seconds=dict(type='int'),
enable_batched_operations=dict(type='bool'),
forward_dead_lettered_messages_to=dict(type='str'),
forward_to=dict(type='str'),
lock_duration_in_seconds=dict(type='int'),
max_delivery_count=dict(type='int'),
name=dict(type='str', required=True),
namespace=dict(type='str', required=True),
requires_session=dict(type='bool'),
resource_group=dict(type='str', required=True),
state=dict(type='str', default='present', choices=['present', 'absent']),
status=dict(type='str',
choices=['active', 'disabled', 'send_disabled', 'receive_disabled']),
topic=dict(type='str', required=True)
)
self.auto_delete_on_idle_in_seconds = None
self.dead_lettering_on_filter_evaluation_exceptions = None
self.dead_lettering_on_message_expiration = None
self.default_message_time_to_live_seconds = None
self.duplicate_detection_time_in_seconds = None
self.enable_batched_operations = None
self.forward_dead_lettered_messages_to = None
self.forward_to = None
self.lock_duration_in_seconds = None
self.max_delivery_count = None
self.name = None
self.namespace = None
self.requires_session = None
self.resource_group = None
self.state = None
self.status = None
self.topic = None
self.results = dict(
changed=False,
id=None
)
super(AzureRMServiceSubscription, self).__init__(self.module_arg_spec,
supports_check_mode=True)
def exec_module(self, **kwargs):
for key in list(self.module_arg_spec.keys()):
setattr(self, key, kwargs[key])
changed = False
original = self.get()
if self.state == 'present':
# Create the resource instance
params = dict(
dead_lettering_on_filter_evaluation_exceptions=self.dead_lettering_on_filter_evaluation_exceptions,
dead_lettering_on_message_expiration=self.dead_lettering_on_message_expiration,
enable_batched_operations=self.enable_batched_operations,
forward_dead_lettered_messages_to=self.forward_dead_lettered_messages_to,
forward_to=self.forward_to,
max_delivery_count=self.max_delivery_count,
requires_session=self.requires_session
)
if self.status:
params['status'] = self.servicebus_models.EntityStatus(str.capitalize(_snake_to_camel(self.status)))
for k, v in duration_spec_map.items():
seconds = getattr(self, v)
if seconds:
params[k] = timedelta(seconds=seconds)
instance = self.servicebus_models.SBSubscription(**params)
result = original
if not original:
changed = True
result = instance
else:
result = original
attribute_map_keys = set(self.servicebus_models.SBSubscription._attribute_map.keys())
validation_keys = set(self.servicebus_models.SBSubscription._validation.keys())
attribute_map = attribute_map_keys - validation_keys
for attribute in attribute_map:
value = getattr(instance, attribute)
if value and value != getattr(original, attribute):
changed = True
if changed and not self.check_mode:
result = self.create_or_update(instance)
self.results = self.to_dict(result)
elif original:
changed = True
if not self.check_mode:
self.delete()
self.results['deleted'] = True
self.results['changed'] = changed
return self.results
def create_or_update(self, param):
try:
client = self._get_client()
return client.create_or_update(self.resource_group, self.namespace, self.topic, self.name, param)
except Exception as exc:
self.fail("Error creating or updating servicebus subscription {0} - {1}".format(self.name, str(exc)))
def delete(self):
try:
client = self._get_client()
client.delete(self.resource_group, self.namespace, self.topic, self.name)
return True
except Exception as exc:
self.fail("Error deleting servicebus subscription {0} - {1}".format(self.name, str(exc)))
def _get_client(self):
return self.servicebus_client.subscriptions
def get(self):
try:
client = self._get_client()
return client.get(self.resource_group, self.namespace, self.topic, self.name)
except Exception:
return None
def to_dict(self, instance):
result = dict()
attribute_map = self.servicebus_models.SBSubscription._attribute_map
for attribute in attribute_map.keys():
value = getattr(instance, attribute)
if not value:
continue
if attribute_map[attribute]['type'] == 'duration':
if is_valid_timedelta(value):
key = duration_spec_map.get(attribute) or attribute
result[key] = int(value.total_seconds())
elif attribute == 'status':
result['status'] = _camel_to_snake(value)
elif isinstance(value, self.servicebus_models.MessageCountDetails):
result[attribute] = value.as_dict()
elif isinstance(value, self.servicebus_models.SBSku):
result[attribute] = value.name.lower()
elif isinstance(value, datetime):
result[attribute] = str(value)
elif isinstance(value, str):
result[attribute] = to_native(value)
elif attribute == 'max_size_in_megabytes':
result['max_size_in_mb'] = value
else:
result[attribute] = value
return result
def is_valid_timedelta(value):
if value == timedelta(10675199, 10085, 477581):
return None
return value
def main():
AzureRMServiceSubscription()
if __name__ == '__main__':
main()
|
kvar/ansible
|
lib/ansible/modules/cloud/azure/azure_rm_servicebustopicsubscription.py
|
Python
|
gpl-3.0
| 12,038
|
import time
import tornado.ioloop
import tornado.httpserver
import tornado.web
from tornado import httpclient
from tornado import gen
class Application(tornado.web.Application):
def __init__(self):
handlers = [
(r"/", IndexHandler),
(r"/now", BitNowHandler),
(r"/chartapi", ChartApiHandler),
(r"/chart", ChartHandler),
]
settings = dict(
template_path="templates",
static_path="static",
)
tornado.web.Application.__init__(self, handlers, **settings)
class IndexHandler(tornado.web.RequestHandler):
def get(self):
self.render('index.html')
class ChartHandler(tornado.web.RequestHandler):
def get(self):
self.render('chart.html')
class BitNowHandler(tornado.web.RequestHandler):
count = 0
last_time = int(time.time())
data = ''
@tornado.gen.coroutine
def get(self):
"""每隔5秒重新获取一次数据"""
now_time = int(time.time())
if now_time - BitNowHandler.last_time >= 5:
BitNowHandler.last_time = now_time
BitNowHandler.count += 1
print(BitNowHandler.count)
client = httpclient.AsyncHTTPClient()
url = 'http://blockchain.info/ticker'
response = yield gen.Task(client.fetch, url)
BitNowHandler.data = response.body.decode()
print(str(BitNowHandler.data))
self.write(str(BitNowHandler.data))
self.finish()
class ChartApiHandler(tornado.web.RequestHandler):
@tornado.gen.coroutine
def get(self):
url = 'http://blockchain.info/charts/market-price?format=json'
client = httpclient.AsyncHTTPClient()
response = yield gen.Task(client.fetch, url)
data = response.body.decode()
self.write(data)
self.finish()
if __name__ == "__main__":
server = tornado.httpserver.HTTPServer(Application())
server.listen(8088)
tornado.ioloop.IOLoop.instance().start()
|
guke1991/hellopy
|
Bitcoin/now/now.py
|
Python
|
mit
| 2,020
|
"""Support for HomematicIP Cloud cover devices."""
import logging
from typing import Optional
from homematicip.aio.device import AsyncFullFlushBlind, AsyncFullFlushShutter
from homeassistant.components.cover import (
ATTR_POSITION,
ATTR_TILT_POSITION,
CoverDevice,
)
from homeassistant.config_entries import ConfigEntry
from homeassistant.core import HomeAssistant
from . import DOMAIN as HMIPC_DOMAIN, HMIPC_HAPID, HomematicipGenericDevice
_LOGGER = logging.getLogger(__name__)
HMIP_COVER_OPEN = 0
HMIP_COVER_CLOSED = 1
HMIP_SLATS_OPEN = 0
HMIP_SLATS_CLOSED = 1
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
"""Set up the HomematicIP Cloud cover devices."""
pass
async def async_setup_entry(
hass: HomeAssistant, config_entry: ConfigEntry, async_add_entities
) -> None:
"""Set up the HomematicIP cover from a config entry."""
home = hass.data[HMIPC_DOMAIN][config_entry.data[HMIPC_HAPID]].home
devices = []
for device in home.devices:
if isinstance(device, AsyncFullFlushBlind):
devices.append(HomematicipCoverSlats(home, device))
elif isinstance(device, AsyncFullFlushShutter):
devices.append(HomematicipCoverShutter(home, device))
if devices:
async_add_entities(devices)
class HomematicipCoverShutter(HomematicipGenericDevice, CoverDevice):
"""Representation of a HomematicIP Cloud cover shutter device."""
@property
def current_cover_position(self) -> int:
"""Return current position of cover."""
return int((1 - self._device.shutterLevel) * 100)
async def async_set_cover_position(self, **kwargs):
"""Move the cover to a specific position."""
position = kwargs[ATTR_POSITION]
# HmIP cover is closed:1 -> open:0
level = 1 - position / 100.0
await self._device.set_shutter_level(level)
@property
def is_closed(self) -> Optional[bool]:
"""Return if the cover is closed."""
if self._device.shutterLevel is not None:
return self._device.shutterLevel == HMIP_COVER_CLOSED
return None
async def async_open_cover(self, **kwargs):
"""Open the cover."""
await self._device.set_shutter_level(HMIP_COVER_OPEN)
async def async_close_cover(self, **kwargs):
"""Close the cover."""
await self._device.set_shutter_level(HMIP_COVER_CLOSED)
async def async_stop_cover(self, **kwargs):
"""Stop the device if in motion."""
await self._device.set_shutter_stop()
class HomematicipCoverSlats(HomematicipCoverShutter, CoverDevice):
"""Representation of a HomematicIP Cloud cover slats device."""
@property
def current_cover_tilt_position(self) -> int:
"""Return current tilt position of cover."""
return int((1 - self._device.slatsLevel) * 100)
async def async_set_cover_tilt_position(self, **kwargs):
"""Move the cover to a specific tilt position."""
position = kwargs[ATTR_TILT_POSITION]
# HmIP slats is closed:1 -> open:0
level = 1 - position / 100.0
await self._device.set_slats_level(level)
async def async_open_cover_tilt(self, **kwargs):
"""Open the slats."""
await self._device.set_slats_level(HMIP_SLATS_OPEN)
async def async_close_cover_tilt(self, **kwargs):
"""Close the slats."""
await self._device.set_slats_level(HMIP_SLATS_CLOSED)
async def async_stop_cover_tilt(self, **kwargs):
"""Stop the device if in motion."""
await self._device.set_shutter_stop()
|
Cinntax/home-assistant
|
homeassistant/components/homematicip_cloud/cover.py
|
Python
|
apache-2.0
| 3,615
|
from flask import Flask
from flask import request
from flask import jsonify
from flask import abort
import time
app = Flask(__name__)
@app.route('/api/1', defaults={'path': ''}, methods=['GET', 'POST'])
@app.route('/api/1/<path:path>', methods=['GET', 'POST'])
def api1(path):
time.sleep(20)
return jsonify({
'userinfo': {
'username': 'zhouyang',
'pk': 10,
'birthday': '2010101'
}
})
@app.route('/api/2', defaults={'path': ''}, methods=['GET', 'POST'])
@app.route('/api/2/<path:path>', methods=['GET', 'POST'])
def api2(path):
return abort(400, 'you did a bad request')
@app.route('/api/3', defaults={'path': ''}, methods=['GET', 'POST'])
@app.route('/api/3/<path:path>', methods=['GET', 'POST'])
def api3(path):
userId = request.args.get('userId')
return jsonify({
'userinfo': {
'userId': userId
}
})
@app.route('/usercenter/userinfo', methods=['GET', 'POST'])
def api4():
return jsonify({
'userinfo': {
'username': 'zhouyang'
}
})
if __name__ == '__main__':
app.run(port=1330, host='0.0.0.0')
|
jie/microgate
|
test_server.py
|
Python
|
mit
| 1,151
|
"""Define a function is_palindrome() that recognizes
palindromes (i.e. words that look the same written backwards).
For example, is_palindrome("radar") should return True."""
def is_palindrome(str):
pass
#test
print(is_palindrome("radar"))
print(is_palindrome("IzitizI"))
print(is_palindrome("Definitely not a palindrome :)"))
|
m3rik/nn
|
PythonCourse/exercises/ex8.py
|
Python
|
apache-2.0
| 332
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.