repo_name stringlengths 5 100 | ref stringlengths 12 67 | path stringlengths 4 244 | copies stringlengths 1 8 | content stringlengths 0 1.05M ⌀ |
|---|---|---|---|---|
Gandi/pyramid_kvs | refs/heads/master | pyramid_kvs/__init__.py | 1 | """
pyramid_kvs is a Key/Value Store helpers for pyramid.
See the README.rst file for more information.
"""
__version__ = '0.4.1'
from pyramid.events import NewRequest
from .session import SessionFactory
from .perlsess import PerlSession
from .cache import ApplicationCache
from .ratelimit import Ratelimit
def subscribe_perlsess(event):
request = event.request
request.set_property(PerlSession(request), 'perlsess', reify=True)
def subscribe_cache(event):
request = event.request
request.set_property(ApplicationCache(request), 'cache', reify=True)
def subscribe_ratelimit(event):
Ratelimit(event.request)
def includeme(config):
settings = config.registry.settings
if 'kvs.perlsess' in settings:
PerlSession.connect(settings)
config.add_subscriber(subscribe_perlsess, NewRequest)
if 'kvs.cache' in settings:
ApplicationCache.connect(settings)
config.add_subscriber(subscribe_cache, NewRequest)
if 'kvs.session' in settings:
config.set_session_factory(SessionFactory(settings))
if 'kvs.ratelimit' in settings:
Ratelimit.configure(settings)
config.add_subscriber(subscribe_ratelimit, NewRequest)
|
plotly/python-api | refs/heads/master | packages/python/plotly/plotly/validators/heatmapgl/hoverlabel/_bordercolor.py | 1 | import _plotly_utils.basevalidators
class BordercolorValidator(_plotly_utils.basevalidators.ColorValidator):
def __init__(
self, plotly_name="bordercolor", parent_name="heatmapgl.hoverlabel", **kwargs
):
super(BordercolorValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
array_ok=kwargs.pop("array_ok", True),
edit_type=kwargs.pop("edit_type", "none"),
role=kwargs.pop("role", "style"),
**kwargs
)
|
Vagab0nd/SiCKRAGE | refs/heads/master | sickchill/views/home.py | 1 | import ast
import base64
import datetime
import json
import os
import time
import urllib.parse
from operator import attrgetter
from urllib.parse import unquote_plus
from github.GithubException import GithubException
from tornado.escape import xhtml_unescape
import sickchill.oldbeard
from sickchill import adba, logger, settings
from sickchill.helper import try_int
from sickchill.helper.common import episode_num, pretty_file_size
from sickchill.helper.exceptions import CantRefreshShowException, CantUpdateShowException, NoNFOException, ShowDirectoryNotFoundException
from sickchill.oldbeard.blackandwhitelist import BlackAndWhiteList, short_group_names
from sickchill.oldbeard.common import cpu_presets, FAILED, IGNORED, Overview, Quality, SKIPPED, SNATCHED_BEST, statusStrings, UNAIRED, WANTED
from sickchill.oldbeard.scene_numbering import (get_scene_absolute_numbering, get_scene_absolute_numbering_for_show, get_scene_numbering,
get_scene_numbering_for_show, get_xem_absolute_numbering_for_show, get_xem_numbering_for_show,
set_scene_numbering)
from sickchill.oldbeard.trakt_api import TraktAPI
from sickchill.show.Show import Show
from sickchill.system.Restart import Restart
from sickchill.system.Shutdown import Shutdown
from sickchill.tv import TVShow
from sickchill.update_manager import UpdateManager
from ..oldbeard import clients, config, db, filters, helpers, notifiers, sab, search_queue, subtitles as subtitle_module, ui
from ..providers.metadata.generic import GenericMetadata
from ..providers.metadata.helpers import getShowImage
from .common import PageTemplate
from .index import WebRoot
from .routes import Route
@Route('/home(/?.*)', name='home')
class Home(WebRoot):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
def _genericMessage(self, subject=None, message=None):
t = PageTemplate(rh=self, filename="genericMessage.mako")
return t.render(message=message, subject=subject, topmenu="home", title="")
@staticmethod
def _getEpisode(show, season=None, episode=None, absolute=None):
if not show:
return None, _("Invalid show parameters")
show_obj = Show.find(settings.showList, int(show))
if not show_obj:
return None, _("Invalid show parameters")
if absolute:
ep_obj = show_obj.getEpisode(absolute_number=absolute)
elif season and episode:
ep_obj = show_obj.getEpisode(season, episode)
else:
return None, _("Invalid parameters")
if not ep_obj:
return None, _("Episode couldn't be retrieved")
return ep_obj, ''
def index(self):
t = PageTemplate(rh=self, filename="home.mako")
selected_root = self.get_body_argument('root', None)
if selected_root and settings.ROOT_DIRS:
backend_pieces = settings.ROOT_DIRS.split('|')
backend_dirs = backend_pieces[1:]
try:
assert selected_root != '-1'
selected_root_dir = backend_dirs[int(selected_root)]
if selected_root_dir[-1] not in ('/', '\\'):
selected_root_dir += os.sep
except (IndexError, ValueError, TypeError, AssertionError):
selected_root_dir = ''
else:
selected_root_dir = ''
if settings.ANIME_SPLIT_HOME:
shows = []
anime = []
for show in settings.showList:
# noinspection PyProtectedMember
if selected_root_dir in show._location:
if show.is_anime:
anime.append(show)
else:
shows.append(show)
sortedShowLists = [
["Shows", sorted(shows, key=lambda mbr: attrgetter('sort_name')(mbr))],
["Anime", sorted(anime, key=lambda mbr: attrgetter('sort_name')(mbr))]
]
else:
shows = []
for show in settings.showList:
# noinspection PyProtectedMember
if selected_root_dir in show._location:
shows.append(show)
sortedShowLists = [
["Shows", sorted(shows, key=lambda mbr: attrgetter('sort_name')(mbr))]
]
stats = self.show_statistics()
return t.render(title=_("Home"), header=_("Show List"), topmenu="home", sortedShowLists=sortedShowLists, show_stat=stats[
0], max_download_count=stats[1], controller="home", action="index", selected_root=selected_root or '-1')
@staticmethod
def show_statistics():
""" Loads show and episode statistics from db """
main_db_con = db.DBConnection()
today = str(datetime.date.today().toordinal())
status_quality = '(' + ','.join([str(x) for x in Quality.SNATCHED + Quality.SNATCHED_PROPER + Quality.SNATCHED_BEST]) + ')'
status_download = '(' + ','.join([str(x) for x in Quality.DOWNLOADED + Quality.ARCHIVED]) + ')'
sql_statement = 'SELECT showid,'
# noinspection PyPep8
sql_statement += ' (SELECT COUNT(*) FROM tv_episodes WHERE showid=tv_eps.showid AND season > 0 AND episode > 0 AND airdate > 1 AND status IN ' + status_quality + ') AS ep_snatched,'
# noinspection PyPep8
sql_statement += ' (SELECT COUNT(*) FROM tv_episodes WHERE showid=tv_eps.showid AND season > 0 AND episode > 0 AND airdate > 1 AND status IN ' + status_download + ') AS ep_downloaded,'
sql_statement += ' (SELECT COUNT(*) FROM tv_episodes WHERE showid=tv_eps.showid AND season > 0 AND episode > 0 AND airdate > 1'
sql_statement += ' AND ((airdate <= ' + today + ' AND status IN (' + ','.join([str(SKIPPED), str(WANTED), str(FAILED)]) + '))'
sql_statement += ' OR (status IN ' + status_quality + ') OR (status IN ' + status_download + '))) AS ep_total,'
sql_statement += ' (SELECT airdate FROM tv_episodes WHERE showid=tv_eps.showid AND airdate >= ' + today
sql_statement += (' AND season > 0', '')[settings.DISPLAY_SHOW_SPECIALS] + ' AND status IN (' + ','.join([str(UNAIRED), str(WANTED)]) + ')'
sql_statement += ' ORDER BY airdate ASC LIMIT 1) AS ep_airs_next,'
sql_statement += ' (SELECT airdate FROM tv_episodes WHERE showid=tv_eps.showid AND airdate > 1'
sql_statement += (' AND season > 0', '')[settings.DISPLAY_SHOW_SPECIALS] + ' AND status <> ' + str(UNAIRED)
sql_statement += ' ORDER BY airdate DESC LIMIT 1) AS ep_airs_prev,'
# @TODO: Store each show_size in tv_shows. also change in displayShow.mako:250, where we use helpers.get_size()
sql_statement += ' (SELECT SUM(file_size) FROM tv_episodes WHERE showid=tv_eps.showid) AS show_size'
sql_statement += ' FROM tv_episodes tv_eps GROUP BY showid'
sql_result = main_db_con.select(sql_statement)
show_stat = {}
max_download_count = 1000
for cur_result in sql_result:
show_stat[cur_result['showid']] = cur_result
if cur_result['ep_total'] > max_download_count:
max_download_count = cur_result['ep_total']
max_download_count *= 100
return show_stat, max_download_count
def is_alive(self):
callback = self.get_query_argument('callback')
self.set_header('Cache-Control', 'max-age=0,no-cache,no-store')
self.set_header('Content-Type', 'text/javascript')
self.set_header('Access-Control-Allow-Origin', '*')
self.set_header('Access-Control-Allow-Headers', 'x-requested-with')
if settings.started:
return callback + '(' + json.dumps({"msg": str(settings.PID)}) + ');'
else:
return callback + '(' + json.dumps({"msg": "nope"}) + ');'
@staticmethod
def haveKODI():
return settings.USE_KODI and settings.KODI_UPDATE_LIBRARY
@staticmethod
def havePLEX():
return settings.USE_PLEX_SERVER and settings.PLEX_UPDATE_LIBRARY
@staticmethod
def haveEMBY():
return settings.USE_EMBY
@staticmethod
def haveTORRENT():
host_good = (settings.TORRENT_HOST[:5] == 'http:', settings.TORRENT_HOST[:5] == 'https')[settings.ENABLE_HTTPS]
if settings.USE_TORRENTS and settings.TORRENT_METHOD != 'blackhole' and host_good:
return True
else:
return False
def testSABnzbd(self):
# self.set_header('Cache-Control', 'max-age=0,no-cache,no-store')
username = self.get_body_argument('username')
password = filters.unhide(settings.SAB_PASSWORD, self.get_body_argument('password'))
apikey = filters.unhide(settings.SAB_APIKEY, self.get_body_argument('apikey'))
host = config.clean_url(self.get_body_argument('host'))
connection, accesMsg = sab.getSabAccesMethod(host)
if connection:
authed, authMsg = sab.testAuthentication(host, username, password, apikey)
if authed:
return _("Success. Connected and authenticated")
else:
return _("Authentication failed. SABnzbd expects") + " '" + accesMsg + "' " + _("as authentication method") + ", '" + authMsg + "'"
else:
return _("Unable to connect to host")
@staticmethod
def __torrent_test(host, username, password, method):
client = clients.getClientInstance(method)
result, message = client(host, username, password).testAuthentication()
return message
def testDSM(self):
host = config.clean_url(self.get_body_argument('host'))
username = self.get_body_argument('username')
password = filters.unhide(settings.SYNOLOGY_DSM_PASSWORD, self.get_body_argument('password'))
return self.__torrent_test(host, username, password, 'download_station')
def testTorrent(self):
torrent_method = self.get_body_argument('torrent_method')
host = config.clean_url(self.get_body_argument('host'))
username = self.get_body_argument('username')
password = filters.unhide(settings.TORRENT_PASSWORD, self.get_body_argument('password'))
return self.__torrent_test(host, username, password, torrent_method)
def testFreeMobile(self):
freemobile_id = self.get_body_argument('freemobile_id')
freemobile_apikey = filters.unhide(settings.FREEMOBILE_APIKEY, self.get_body_argument('freemobile_apikey'))
result, message = notifiers.freemobile_notifier.test_notify(freemobile_id, freemobile_apikey)
if result:
return _("SMS sent successfully")
else:
return _("Problem sending SMS: {message}".format(message=message))
def testTelegram(self):
telegram_id = self.get_body_argument('telegram_id')
telegram_apikey = filters.unhide(settings.TELEGRAM_APIKEY, self.get_body_argument('telegram_apikey'))
result, message = notifiers.telegram_notifier.test_notify(telegram_id, telegram_apikey)
if result:
return _("Telegram notification succeeded. Check your Telegram clients to make sure it worked")
else:
return _("Error sending Telegram notification: {message}".format(message=message))
def testJoin(self):
join_id = self.get_body_argument('join_id')
join_apikey = filters.unhide(settings.JOIN_APIKEY, self.get_body_argument('join_apikey'))
result, message = notifiers.join_notifier.test_notify(join_id, join_apikey)
if result:
return _("join notification succeeded. Check your join clients to make sure it worked")
else:
return _("Error sending join notification: {message}".format(message=message))
def testGrowl(self):
host = self.get_query_argument('host')
password = filters.unhide(settings.GROWL_PASSWORD, self.get_query_argument('password'))
# self.set_header('Cache-Control', 'max-age=0,no-cache,no-store')
host = config.clean_host(host, default_port=23053)
result = notifiers.growl_notifier.test_notify(host, password)
pw_append = _(" with password") + ": " + password if password else ''
if result:
return _("Registered and Tested growl successfully {growl_host}").format(growl_host=unquote_plus(host)) + pw_append
else:
return _("Registration and Testing of growl failed {growl_host}").format(growl_host=unquote_plus(host)) + pw_append
def testProwl(self):
prowl_api = self.get_query_argument('prowl_api')
prowl_priority = self.get_query_argument('prowl_priority')
result = notifiers.prowl_notifier.test_notify(prowl_api, prowl_priority)
if result:
return _("Test prowl notice sent successfully")
else:
return _("Test prowl notice failed")
def testBoxcar2(self):
accesstoken = self.get_query_argument('accesstoken')
result = notifiers.boxcar2_notifier.test_notify(accesstoken)
if result:
return _("Boxcar2 notification succeeded. Check your Boxcar2 clients to make sure it worked")
else:
return _("Error sending Boxcar2 notification")
def testPushover(self):
userKey = self.get_query_argument('userKey')
apiKey = self.get_query_argument('apiKey')
result = notifiers.pushover_notifier.test_notify(userKey, apiKey)
if result:
return _("Pushover notification succeeded. Check your Pushover clients to make sure it worked")
else:
return _("Error sending Pushover notification")
@staticmethod
def twitterStep1():
# noinspection PyProtectedMember
return notifiers.twitter_notifier._get_authorization()
def twitterStep2(self):
key = self.get_query_argument('key')
# noinspection PyProtectedMember
result = notifiers.twitter_notifier._get_credentials(key)
logger.info("result: " + str(result))
if result:
return _("Key verification successful")
else:
return _("Unable to verify key")
@staticmethod
def testTwitter():
result = notifiers.twitter_notifier.test_notify()
if result:
return _("Tweet successful, check your twitter to make sure it worked")
else:
return _("Error sending tweet")
@staticmethod
def testTwilio():
if not notifiers.twilio_notifier.account_regex.match(settings.TWILIO_ACCOUNT_SID):
return _('Please enter a valid account sid')
if not notifiers.twilio_notifier.auth_regex.match(settings.TWILIO_AUTH_TOKEN):
return _('Please enter a valid auth token')
if not notifiers.twilio_notifier.phone_regex.match(settings.TWILIO_PHONE_SID):
return _('Please enter a valid phone sid')
if not notifiers.twilio_notifier.number_regex.match(settings.TWILIO_TO_NUMBER):
return _('Please format the phone number as "+1-###-###-####"')
result = notifiers.twilio_notifier.test_notify()
if result:
return _('Authorization successful and number ownership verified')
else:
return _('Error sending sms')
@staticmethod
def testSlack():
result = notifiers.slack_notifier.test_notify()
if result:
return _("Slack message successful")
else:
return _("Slack message failed")
@staticmethod
def testRocketChat():
result = notifiers.rocketchat_notifier.test_notify()
if result:
return _("Rocket.Chat message successful")
else:
return _("Rocket.Chat message failed")
@staticmethod
def testMatrix():
result = notifiers.matrix_notifier.test_notify()
if result:
return _("Matrix message successful")
else:
return _("Matrix message failed")
@staticmethod
def testDiscord():
result = notifiers.discord_notifier.test_notify()
if result:
return _("Discord message successful")
else:
return _("Discord message failed")
def testKODI(self):
username = self.get_query_argument('username')
host = config.clean_hosts(self.get_query_argument('host'))
password = filters.unhide(settings.KODI_PASSWORD, self.get_query_argument('password'))
results = notifiers.kodi_notifier.test_notify(unquote_plus(host), username, password)
final_results = [
_("Test KODI notice {result} to {kodi_host}").format(kodi_host=host, result=('failed', 'sent successfully')[result]) for host, result in results.items()]
return "<br>\n".join(final_results)
def testPHT(self):
self.set_header('Cache-Control', 'max-age=0,no-cache,no-store')
username = self.get_query_argument('username')
host = config.clean_hosts(self.get_query_argument('host'))
password = filters.unhide(settings.PLEX_CLIENT_PASSWORD, self.get_query_argument('password'))
finalResult = ''
for curHost in [x.strip() for x in host.split(',')]:
curResult = notifiers.plex_notifier.test_notify_pht(unquote_plus(curHost), username, password)
if len(curResult.split(':')) > 2 and 'OK' in curResult.split(':')[2]:
finalResult += _('Successful test notice sent to Plex Home Theater ... {plex_clients}').format(plex_clients=unquote_plus(curHost))
else:
finalResult += _('Test failed for Plex Home Theater ... {plex_clients}').format(plex_clients=unquote_plus(curHost))
finalResult += '<br>' + '\n'
ui.notifications.message(_('Tested Plex Home Theater(s)') + ':', unquote_plus(host.replace(',', ', ')))
return finalResult
def testPMS(self):
self.set_header('Cache-Control', 'max-age=0,no-cache,no-store')
username = self.get_query_argument('username')
host = config.clean_hosts(self.get_query_argument('host'))
password = filters.unhide(settings.PLEX_SERVER_PASSWORD, self.get_query_argument('password'))
plex_server_token = self.get_query_argument('plex_server_token')
finalResult = ''
curResult = notifiers.plex_notifier.test_notify_pms(unquote_plus(host), username, password, plex_server_token)
if curResult is None:
finalResult += _('Successful test of Plex Media Server(s) ... {plex_servers}').format(plex_servers=unquote_plus(host.replace(',', ', ')))
elif curResult is False:
finalResult += _('Test failed, No Plex Media Server host specified')
else:
finalResult += _('Test failed for Plex Media Server(s) ... {plex_servers}').format(plex_servers=unquote_plus(str(curResult).replace(',', ', ')))
finalResult += '<br>' + '\n'
ui.notifications.message(_('Tested Plex Media Server host(s)') + ':', unquote_plus(host.replace(',', ', ')))
return finalResult
@staticmethod
def testLibnotify():
if notifiers.libnotify_notifier.test_notify():
return _("Tried sending desktop notification via libnotify")
return notifiers.libnotify_notifier.diagnose()
def testEMBY(self):
host = config.clean_url(self.get_query_argument('host'))
emby_apikey = filters.unhide(settings.EMBY_APIKEY, self.get_query_argument('emby_apikey'))
result = notifiers.emby_notifier.test_notify(host, emby_apikey)
if result:
return _("Test notice sent successfully to {emby_host}").format(emby_host=unquote_plus(host))
else:
return _("Test notice failed to {emby_host}").format(emby_host=unquote_plus(host))
def testNMJ(self):
host = config.clean_host(self.get_body_argument('host'))
database = self.get_body_argument('database')
mount = self.get_body_argument('mount')
result = notifiers.nmj_notifier.test_notify(unquote_plus(host), database, mount)
if result:
return _("Successfully started the scan update")
else:
return _("Test failed to start the scan update")
def settingsNMJ(self):
host = config.clean_host(self.get_body_argument('host'))
result = notifiers.nmj_notifier.notify_settings(unquote_plus(host))
if result:
return '{{"message": _("Got settings from {host}"), "database": "{database}", "mount": "{mount}"}}'.format(**{
"host": host, "database": settings.NMJ_DATABASE, "mount": settings.NMJ_MOUNT
})
else:
# noinspection PyPep8
return '{"message": _("Failed! Make sure your Popcorn is on and NMJ is running. (see Log & Errors -> Debug for detailed info)"), "database": "", "mount": ""}'
def testNMJv2(self):
host = config.clean_host(self.get_body_argument('host'))
result = notifiers.nmjv2_notifier.test_notify(unquote_plus(host))
if result:
return _("Test notice sent successfully to {nmj2_host}").format(nmj2_host=unquote_plus(host))
else:
return _("Test notice failed to {nmj2_host}").format(nmj2_host=unquote_plus(host))
def settingsNMJv2(self):
host = config.clean_host(self.get_body_argument('host'))
dbloc = self.get_body_argument('dbloc')
instance = self.get_body_argument('instance')
result = notifiers.nmjv2_notifier.notify_settings(unquote_plus(host), dbloc, instance)
if result:
return '{{"message": _("NMJ Database found at: {host}"), "database": "{database}"}}'.format(
**{"host": host, "database": settings.NMJv2_DATABASE})
else:
# noinspection PyPep8
return '{{"message": _("Unable to find NMJ Database at location: {dbloc}. Is the right location selected and PCH running?"), "database": ""}}'.format(
**{
"dbloc": dbloc
})
def getTraktToken(self):
trakt_pin = self.get_body_argument('trakt_pin')
trakt_api = TraktAPI(settings.SSL_VERIFY, settings.TRAKT_TIMEOUT)
response = trakt_api.traktToken(trakt_pin)
if response:
return _("Trakt Authorized")
return _("Trakt Not Authorized!")
def testTrakt(self):
username = self.get_body_argument('username')
blacklist_name = self.get_body_argument('blacklist_name')
return notifiers.trakt_notifier.test_notify(username, blacklist_name)
@staticmethod
def loadShowNotifyLists():
main_db_con = db.DBConnection()
rows = main_db_con.select("SELECT show_id, show_name, notify_list FROM tv_shows ORDER BY show_name ASC")
data = {}
size = 0
for r in rows:
NotifyList = {'emails': '', 'prowlAPIs': ''}
if r['notify_list'] and len(r['notify_list']) > 0:
# First, handle legacy format (emails only)
if not r['notify_list'][0] == '{':
NotifyList['emails'] = r['notify_list']
else:
NotifyList = dict(ast.literal_eval(r['notify_list']))
data[r['show_id']] = {
'id': r['show_id'],
'name': r['show_name'],
'list': NotifyList['emails'],
'prowl_notify_list': NotifyList['prowlAPIs']
}
size += 1
data['_size'] = size
return json.dumps(data)
def saveShowNotifyList(self):
show = self.get_body_argument('show')
emails = self.get_body_argument('emails', '')
prowlAPIs = self.get_body_argument('prowlAPIs', '')
entries = {'emails': emails or '', 'prowlAPIs': prowlAPIs or ''}
main_db_con = db.DBConnection()
# Get current data
for subs in main_db_con.select("SELECT notify_list FROM tv_shows WHERE show_id = ?", [show]):
if subs['notify_list'] and len(subs['notify_list']) > 0:
# First, handle legacy format (emails only)
if not subs['notify_list'][0] == '{':
entries['emails'] = subs['notify_list']
else:
entries = dict(ast.literal_eval(subs['notify_list']))
if emails or prowlAPIs:
if not main_db_con.action("UPDATE tv_shows SET notify_list = ? WHERE show_id = ?", [str(entries), show]):
return 'ERROR'
return 'OK'
def testEmail(self):
port = self.get_body_argument('port')
smtp_from = self.get_body_argument('smtp_from')
use_tls = self.get_body_argument('use_tls')
user = self.get_body_argument('user')
pwd = filters.unhide(settings.EMAIL_PASSWORD, self.get_body_argument('pwd'))
to = self.get_body_argument('to')
host = config.clean_host(self.get_body_argument('host'))
if notifiers.email_notifier.test_notify(host, port, smtp_from, use_tls, user, pwd, to):
return _('Test email sent successfully! Check inbox.')
else:
return _('ERROR: {last_error}').format(last_error=notifiers.email_notifier.last_err)
def testPushalot(self):
authorizationToken = self.get_body_argument('authorizationToken')
result = notifiers.pushalot_notifier.test_notify(authorizationToken)
if result:
return _("Pushalot notification succeeded. Check your Pushalot clients to make sure it worked")
else:
return _("Error sending Pushalot notification")
def testPushbullet(self):
api = self.get_body_argument('api')
result = notifiers.pushbullet_notifier.test_notify(api)
if result:
return _("Pushbullet notification succeeded. Check your device to make sure it worked")
else:
return _("Error sending Pushbullet notification")
def getPushbulletDevices(self):
api = self.get_body_argument('api')
# self.set_header('Cache-Control', 'max-age=0,no-cache,no-store')
result = notifiers.pushbullet_notifier.get_devices(api)
if result:
return result
else:
return _("Error sending Pushbullet notification")
def getPushbulletChannels(self):
api = self.get_body_argument('api')
result = notifiers.pushbullet_notifier.get_channels(api)
if result:
return result
else:
return _("Error sending Pushbullet notification")
def status(self):
tvdirFree = helpers.disk_usage_hr(settings.TV_DOWNLOAD_DIR)
rootDir = {}
if settings.ROOT_DIRS:
backend_pieces = settings.ROOT_DIRS.split('|')
backend_dirs = backend_pieces[1:]
else:
backend_dirs = []
if len(backend_dirs):
for subject in backend_dirs:
rootDir[subject] = helpers.disk_usage_hr(subject)
t = PageTemplate(rh=self, filename="status.mako")
return t.render(title=_('Status'), header=_('Status'), topmenu='system',
tvdirFree=tvdirFree, rootDir=rootDir,
controller="home", action="status")
def shutdown(self):
pid = self.get_query_argument('pid')
if not Shutdown.stop(pid):
return self.redirect('/' + settings.DEFAULT_PAGE + '/')
title = "Shutting down"
message = "SickChill is shutting down..."
return self._genericMessage(title, message)
def restart(self):
pid = self.get_query_argument('pid')
if not Restart.restart(pid):
return self.redirect('/' + settings.DEFAULT_PAGE + '/')
t = PageTemplate(rh=self, filename="restart.mako")
return t.render(title=_("Home"), header=_("Restarting SickChill"), topmenu="system",
controller="home", action="restart")
def updateCheck(self):
pid = self.get_query_argument('pid')
if str(pid) != str(settings.PID):
return self.redirect('/home/')
settings.versionCheckScheduler.action.check_for_new_version(force=True)
settings.versionCheckScheduler.action.check_for_new_news()
return self.redirect('/' + settings.DEFAULT_PAGE + '/')
def update(self, pid, branch=None):
if str(pid) != str(settings.PID):
return self.redirect('/home/')
updater = UpdateManager()
if updater.backup():
if branch:
settings.BRANCH = branch
if updater.update():
# do a hard restart
settings.events.put(settings.events.SystemEvent.RESTART)
t = PageTemplate(rh=self, filename="restart.mako")
return t.render(title=_("Home"), header=_("Restarting SickChill"), topmenu="home",
controller="home", action="restart")
else:
return self._genericMessage(_("Update Failed"),
_("Update wasn't successful, not restarting. Check your log for more information."))
else:
return self.redirect('/' + settings.DEFAULT_PAGE + '/')
@staticmethod
def fetchRemoteBranches():
response = []
try:
gh_branches = settings.versionCheckScheduler.action.list_remote_branches()
except GithubException:
gh_branches = None
if gh_branches:
for cur_branch in gh_branches:
branch_obj = {'name': cur_branch}
if cur_branch == settings.BRANCH:
branch_obj['current'] = True
if cur_branch == 'master' or (settings.GIT_TOKEN and (settings.DEVELOPER == 1 or cur_branch == 'develop')):
response.append(branch_obj)
return json.dumps(response)
def branchCheckout(self):
branch = self.get_query_argument('branch')
if settings.BRANCH != branch:
settings.BRANCH = branch
ui.notifications.message(_('Checking out branch') + ': ', branch)
return self.redirect('/update/?pid={}&branch={}'.format(settings.PID, branch))
else:
ui.notifications.message(_('Already on branch') + ': ', branch)
return self.redirect('/' + settings.DEFAULT_PAGE + '/')
@staticmethod
def compare_db_version():
update_manager = UpdateManager()
db_status = update_manager.compare_db_version()
if db_status == 'upgrade':
logger.debug("Checkout branch has a new DB version - Upgrade")
return json.dumps({"status": "success", 'message': 'upgrade'})
elif db_status == 'equal':
logger.debug("Checkout branch has the same DB version - Equal")
return json.dumps({"status": "success", 'message': 'equal'})
elif db_status == 'downgrade':
logger.debug("Checkout branch has an old DB version - Downgrade")
return json.dumps({"status": "success", 'message': 'downgrade'})
else:
logger.exception("Checkout branch couldn't compare DB version.")
return json.dumps({"status": "error", 'message': 'General exception'})
def displayShow(self):
show = self.get_query_argument('show')
# todo: add more comprehensive show validation
try:
show_obj = Show.find(settings.showList, int(show))
except (ValueError, TypeError):
return self._genericMessage(_("Error"), _("Invalid show ID: {show}").format(show=str(show)))
if not show_obj:
return self._genericMessage(_("Error"), _("Show not in show list"))
main_db_con = db.DBConnection()
seasonResults = main_db_con.select(
"SELECT DISTINCT season FROM tv_episodes WHERE showid = ? AND season IS NOT NULL ORDER BY season DESC",
[show_obj.indexerid]
)
min_season = (1, 0)[settings.DISPLAY_SHOW_SPECIALS]
sql_results = main_db_con.select(
"SELECT * FROM tv_episodes WHERE showid = ? AND season >= ? ORDER BY season DESC, episode DESC",
[show_obj.indexerid, min_season]
)
t = PageTemplate(rh=self, filename="displayShow.mako")
submenu = [{'title': _('Edit'), 'path': 'home/editShow?show={0:d}'.format(show_obj.indexerid), 'icon': 'fa fa-pencil'}]
try:
showLoc = (show_obj.location, True)
except ShowDirectoryNotFoundException:
# noinspection PyProtectedMember
showLoc = (show_obj._location, False)
show_message = ''
if settings.showQueueScheduler.action.is_being_added(show_obj):
show_message = _('This show is in the process of being downloaded - the info below is incomplete.')
elif settings.showQueueScheduler.action.is_being_updated(show_obj):
show_message = _('The information on this page is in the process of being updated.')
elif settings.showQueueScheduler.action.is_being_refreshed(show_obj):
show_message = _('The episodes below are currently being refreshed from disk')
elif settings.showQueueScheduler.action.is_being_subtitled(show_obj):
show_message = _('Currently downloading subtitles for this show')
elif settings.showQueueScheduler.action.is_in_refresh_queue(show_obj):
show_message = _('This show is queued to be refreshed.')
elif settings.showQueueScheduler.action.is_in_update_queue(show_obj):
show_message = _('This show is queued and awaiting an update.')
elif settings.showQueueScheduler.action.is_in_subtitle_queue(show_obj):
show_message = _('This show is queued and awaiting subtitles download.')
if not settings.showQueueScheduler.action.is_being_added(show_obj):
if not settings.showQueueScheduler.action.is_being_updated(show_obj):
if show_obj.paused:
submenu.append({'title': _('Resume'), 'path': 'home/togglePause?show={0:d}'.format(show_obj.indexerid), 'icon': 'fa fa-play'})
else:
submenu.append({'title': _('Pause'), 'path': 'home/togglePause?show={0:d}'.format(show_obj.indexerid), 'icon': 'fa fa-pause'})
# noinspection PyPep8
submenu.append({
'title': _('Remove'),
'path': 'home/deleteShow?show={0:d}'.format(show_obj.indexerid),
'class': 'removeshow',
'confirm': True,
'icon': 'fa fa-trash'
})
submenu.append({'title': _('Re-scan files'), 'path': 'home/refreshShow?show={0:d}&force=1'.format(show_obj.indexerid), 'icon': 'fa fa-refresh'})
# noinspection PyPep8
submenu.append(
{'title': _('Force Full Update'), 'path': 'home/updateShow?show={0:d}&force=1'.format(show_obj.indexerid), 'icon': 'fa fa-exchange'})
# noinspection PyPep8
submenu.append({
'title': _('Update show in KODI'),
'path': 'home/updateKODI?show={0:d}'.format(show_obj.indexerid),
'requires': self.haveKODI(),
'icon': 'menu-icon-kodi'
})
# noinspection PyPep8
submenu.append({
'title': _('Update show in Emby'),
'path': 'home/updateEMBY?show={0:d}'.format(show_obj.indexerid),
'requires': self.haveEMBY(),
'icon': 'menu-icon-emby'
})
if seasonResults and int(seasonResults[-1]["season"]) == 0:
if settings.DISPLAY_SHOW_SPECIALS:
# noinspection PyPep8
submenu.append({
'title': _('Hide specials'),
'path': 'home/toggleDisplayShowSpecials/?show={0:d}'.format(show_obj.indexerid),
'confirm': True,
'icon': 'fa fa-times'
})
else:
# noinspection PyPep8
submenu.append({
'title': _('Show specials'),
'path': 'home/toggleDisplayShowSpecials/?show={0:d}'.format(show_obj.indexerid),
'confirm': True,
'icon': 'fa fa-check'
})
submenu.append({'title': _('Preview Rename'), 'path': 'home/testRename?show={0:d}'.format(show_obj.indexerid), 'icon': 'fa fa-tag'})
if settings.USE_SUBTITLES and show_obj.subtitles and not settings.showQueueScheduler.action.is_being_subtitled(show_obj):
# noinspection PyPep8
submenu.append(
{'title': _('Download Subtitles'), 'path': 'home/subtitleShow?show={0:d}'.format(show_obj.indexerid), 'icon': 'fa fa-language'})
epCounts = {
Overview.SKIPPED: 0,
Overview.WANTED: 0,
Overview.QUAL: 0,
Overview.GOOD: 0,
Overview.UNAIRED: 0,
Overview.SNATCHED: 0,
Overview.SNATCHED_PROPER: 0,
Overview.SNATCHED_BEST: 0
}
epCats = {}
for curResult in sql_results:
curEpCat = show_obj.getOverview(curResult["status"])
if curEpCat:
epCats[str(curResult["season"]) + "x" + str(curResult["episode"])] = curEpCat
epCounts[curEpCat] += 1
if settings.ANIME_SPLIT_HOME:
shows = []
anime = []
for show in settings.showList:
if show.is_anime:
anime.append(show)
else:
shows.append(show)
sortedShowLists = [
["Shows", sorted(shows, key=lambda mbr: attrgetter('sort_name')(mbr))],
["Anime", sorted(anime, key=lambda mbr: attrgetter('sort_name')(mbr))]
]
else:
sortedShowLists = [
["Shows", sorted(settings.showList, key=lambda mbr: attrgetter('sort_name')(mbr))]
]
bwl = None
if show_obj.is_anime:
bwl = show_obj.release_groups
show_obj.exceptions = sickchill.oldbeard.scene_exceptions.get_scene_exceptions(show_obj.indexerid)
indexerid = int(show_obj.indexerid)
indexer = int(show_obj.indexer)
# Delete any previous occurrences
for index, recentShow in enumerate(settings.SHOWS_RECENT):
if recentShow['indexerid'] == indexerid:
del settings.SHOWS_RECENT[index]
# Only track 5 most recent shows
del settings.SHOWS_RECENT[4:]
# Insert most recent show
settings.SHOWS_RECENT.insert(0, {
'indexerid': indexerid,
'name': show_obj.name,
})
return t.render(
submenu=submenu, showLoc=showLoc, show_message=show_message,
show=show_obj, sql_results=sql_results, seasonResults=seasonResults,
sortedShowLists=sortedShowLists, bwl=bwl, epCounts=epCounts,
epCats=epCats, all_scene_exceptions=show_obj.exceptions,
scene_numbering=get_scene_numbering_for_show(indexerid, indexer),
xem_numbering=get_xem_numbering_for_show(indexerid, indexer),
scene_absolute_numbering=get_scene_absolute_numbering_for_show(indexerid, indexer),
xem_absolute_numbering=get_xem_absolute_numbering_for_show(indexerid, indexer),
title=show_obj.name,
controller="home",
action="displayShow"
)
def plotDetails(self):
show = self.get_query_argument('show')
season = self.get_query_argument('season')
episode = self.get_query_argument('episode')
main_db_con = db.DBConnection()
result = main_db_con.select_one(
"SELECT description FROM tv_episodes WHERE showid = ? AND season = ? AND episode = ?", (int(show), int(season), int(episode)))
return result['description'] if result else 'Episode not found.'
def sceneExceptions(self):
show = self.get_query_argument('show')
exeptions_list = sickchill.oldbeard.scene_exceptions.get_all_scene_exceptions(show)
if not exeptions_list:
return _("No scene exceptions")
out = []
for season, exceptions in iter(sorted(exeptions_list.items())):
if season == -1:
season = "*"
out.append("S" + str(season) + ": " + ", ".join(exceptions.names))
return "<br>".join(out)
def editShow(self, show=None, location=None, anyQualities=None, bestQualities=None,
exceptions_list=None, season_folders=None, paused=None, directCall=False,
air_by_date=None, sports=None, dvdorder=None, indexerLang=None,
subtitles=None, subtitles_sr_metadata=None, rls_ignore_words=None, rls_require_words=None, rls_prefer_words=None,
anime=None, blacklist=None, whitelist=None, scene=None,
defaultEpStatus=None, quality_preset=None,
custom_name='',
poster=None, banner=None, fanart=None):
anidb_failed = False
try:
show_obj = Show.find(settings.showList, int(show))
except (ValueError, TypeError):
errString = _("Invalid show ID") + ": {show}".format(show=str(show))
if directCall:
return [errString]
else:
return self._genericMessage(_("Error"), errString)
if not show_obj:
errString = _("Unable to find the specified show") + ": {show}".format(show=str(show))
if directCall:
return [errString]
else:
return self._genericMessage(_("Error"), errString)
show_obj.exceptions = sickchill.oldbeard.scene_exceptions.get_all_scene_exceptions(show_obj.indexerid)
main_db_con = db.DBConnection()
seasonResults = main_db_con.select(
"SELECT DISTINCT season FROM tv_episodes WHERE showid = ? AND season IS NOT NULL ORDER BY season DESC",
[show_obj.indexerid]
)
if try_int(quality_preset, None):
bestQualities = []
if not (location or anyQualities or bestQualities or season_folders):
t = PageTemplate(rh=self, filename="editShow.mako")
groups = []
if show_obj.is_anime:
whitelist = show_obj.release_groups.whitelist
blacklist = show_obj.release_groups.blacklist
if helpers.set_up_anidb_connection() and not anidb_failed:
try:
anime = adba.Anime(settings.ADBA_CONNECTION, name=show_obj.name)
groups = anime.get_groups()
except Exception as e:
ui.notifications.error(_('Unable to retreive Fansub Groups from AniDB.'))
logger.debug('Unable to retreive Fansub Groups from AniDB. Error is {0}'.format(e))
with show_obj.lock:
show = show_obj
if show_obj.is_anime:
return t.render(show=show, scene_exceptions=show_obj.exceptions, seasonResults=seasonResults,
groups=groups, whitelist=whitelist, blacklist=blacklist,
title=_('Edit Show'), header=_('Edit Show'), controller="home", action="editShow")
else:
return t.render(show=show, scene_exceptions=show_obj.exceptions, seasonResults=seasonResults,
title=_('Edit Show'), header=_('Edit Show'), controller="home", action="editShow")
season_folders = config.checkbox_to_value(season_folders)
dvdorder = config.checkbox_to_value(dvdorder)
paused = config.checkbox_to_value(paused)
air_by_date = config.checkbox_to_value(air_by_date)
scene = config.checkbox_to_value(scene)
sports = config.checkbox_to_value(sports)
anime = config.checkbox_to_value(anime)
subtitles = config.checkbox_to_value(subtitles)
subtitles_sr_metadata = config.checkbox_to_value(subtitles_sr_metadata)
if indexerLang and indexerLang in show_obj.idxr.languages:
indexer_lang = indexerLang
else:
indexer_lang = show_obj.lang
# if we changed the language then kick off an update
do_update = indexer_lang != show_obj.lang
do_update_scene_numbering = scene != show_obj.scene or anime != show_obj.anime
if not anyQualities:
anyQualities = []
if not bestQualities:
bestQualities = []
if not exceptions_list:
exceptions_list = []
if not isinstance(anyQualities, list):
anyQualities = [anyQualities]
if not isinstance(bestQualities, list):
bestQualities = [bestQualities]
if isinstance(exceptions_list, list):
if len(exceptions_list) > 0:
exceptions_list = exceptions_list[0]
else:
exceptions_list = None
# Map custom exceptions
exceptions = {}
if exceptions_list:
# noinspection PyUnresolvedReferences
for season in exceptions_list.split(','):
season, shows = season.split(':')
show_list = []
for cur_show in shows.split('|'):
show_list.append({'show_name': unquote_plus(cur_show), 'custom': True})
exceptions[int(season)] = show_list
show_obj.custom_name = custom_name
metadata_generator = GenericMetadata()
def get_images(image):
if image.startswith('data:image'):
start = image.index('base64,') + 7
img_data = base64.b64decode(image[start:])
return img_data, img_data
else:
image_parts = image.split('|')
img_url = image_parts[0]
img_data = getShowImage(img_url)
if len(image_parts) > 1:
img_thumb_url = image_parts[1]
img_thumb_data = getShowImage(img_thumb_url)
return img_data, img_thumb_data
else:
return img_data, img_data
if poster:
img_data, img_thumb_data = get_images(poster)
dest_path = settings.IMAGE_CACHE.poster_path(show_obj.indexerid)
dest_thumb_path = settings.IMAGE_CACHE.poster_thumb_path(show_obj.indexerid)
metadata_generator._write_image(img_data, dest_path, overwrite=True)
metadata_generator._write_image(img_thumb_data, dest_thumb_path, overwrite=True)
if banner:
img_data, img_thumb_data = get_images(banner)
dest_path = settings.IMAGE_CACHE.banner_path(show_obj.indexerid)
dest_thumb_path = settings.IMAGE_CACHE.banner_thumb_path(show_obj.indexerid)
metadata_generator._write_image(img_data, dest_path, overwrite=True)
metadata_generator._write_image(img_thumb_data, dest_thumb_path, overwrite=True)
if fanart:
img_data, img_thumb_data = get_images(fanart)
dest_path = settings.IMAGE_CACHE.fanart_path(show_obj.indexerid)
metadata_generator._write_image(img_data, dest_path, overwrite=True)
# If directCall from mass_edit_update no scene exceptions handling or blackandwhite list handling
if not directCall:
with show_obj.lock:
if anime:
if not show_obj.release_groups:
show_obj.release_groups = BlackAndWhiteList(show_obj.indexerid)
if whitelist:
shortwhitelist = short_group_names(whitelist)
show_obj.release_groups.set_white_keywords(shortwhitelist)
else:
show_obj.release_groups.set_white_keywords([])
if blacklist:
shortblacklist = short_group_names(blacklist)
show_obj.release_groups.set_black_keywords(shortblacklist)
else:
show_obj.release_groups.set_black_keywords([])
errors = []
with show_obj.lock:
newQuality = Quality.combineQualities([int(q) for q in anyQualities], [int(q) for q in bestQualities])
show_obj.quality = newQuality
if bool(show_obj.season_folders) != season_folders:
show_obj.season_folders = season_folders
try:
settings.showQueueScheduler.action.refresh_show(show_obj)
except CantRefreshShowException as e:
errors.append(_("Unable to refresh this show: {error}").format(error=e))
show_obj.paused = paused
show_obj.scene = scene
show_obj.anime = anime
show_obj.sports = sports
show_obj.subtitles = subtitles
show_obj.subtitles_sr_metadata = subtitles_sr_metadata
show_obj.air_by_date = air_by_date
show_obj.default_ep_status = int(defaultEpStatus)
if not directCall:
show_obj.lang = indexer_lang
show_obj.dvdorder = dvdorder
show_obj.rls_ignore_words = rls_ignore_words.strip()
show_obj.rls_require_words = rls_require_words.strip()
show_obj.rls_prefer_words = rls_prefer_words.strip()
location = os.path.normpath(xhtml_unescape(location))
# noinspection PyProtectedMember
old_location = os.path.normpath(show_obj._location)
# if we change location clear the db of episodes, change it, write to db, and rescan
if old_location != location:
logger.debug(old_location + " != " + location)
if not (os.path.isdir(location) or settings.CREATE_MISSING_SHOW_DIRS or settings.ADD_SHOWS_WO_DIR):
errors.append(_("New location <tt>{location}</tt> does not exist").format(location=location))
else:
# change it
try:
show_obj.location = location
try:
settings.showQueueScheduler.action.refresh_show(show_obj, True)
except CantRefreshShowException as e:
errors.append(_("Unable to refresh this show: {error}").format(error=e))
# grab updated info from TVDB
# show_obj.loadEpisodesFromIndexer()
# rescan the episodes in the new folder
except NoNFOException:
# noinspection PyPep8
errors.append(
"The folder at <tt>{0}</tt> doesn't contain a tvshow.nfo - copy your files to that folder before you change the directory in SickChill.".format(
location))
# save it to the DB
show_obj.saveToDB()
# force the update
if do_update:
try:
settings.showQueueScheduler.action.update_show(show_obj, True)
time.sleep(cpu_presets[settings.CPU_PRESET])
except CantUpdateShowException as e:
errors.append(_("Unable to update show: {error}").format(error=e))
import traceback
logger.debug('Updating show exceptions')
try:
sickchill.oldbeard.scene_exceptions.update_scene_exceptions(show_obj.indexerid, exceptions)
time.sleep(cpu_presets[settings.CPU_PRESET])
except CantUpdateShowException:
logger.debug(traceback.format_exc())
errors.append(_("Unable to force an update on scene exceptions of the show."))
if do_update_scene_numbering:
try:
sickchill.oldbeard.scene_numbering.xem_refresh(show_obj.indexerid, show_obj.indexer)
time.sleep(cpu_presets[settings.CPU_PRESET])
except CantUpdateShowException:
errors.append(_("Unable to force an update on scene numbering of the show."))
if directCall:
return errors
if errors:
ui.notifications.error(
_('{num_errors:d} error{plural} while saving changes:').format(num_errors=len(errors), plural="" if len(errors) == 1 else "s"),
'<ul>' + '\n'.join(['<li>{0}</li>'.format(error) for error in errors]) + "</ul>"
)
return self.redirect("/home/displayShow?show=" + show)
def togglePause(self, show=None):
error, show = Show.pause(show)
if error:
return self._genericMessage(_('Error'), error)
ui.notifications.message(
_('{show_name} has been {paused_resumed}').format(show_name=show.name, paused_resumed=(_('resumed'), _('paused'))[show.paused])
)
return self.redirect("/home/displayShow?show={0:d}".format(show.indexerid))
def deleteShow(self, show=None, full=0):
if show:
error, show = Show.delete(show, full)
if error:
return self._genericMessage(_('Error'), error)
ui.notifications.message(
_('{show_name} has been {deleted_trashed} {was_deleted}').format(
show_name=show.name,
deleted_trashed=(_('deleted'), _('trashed'))[settings.TRASH_REMOVE_SHOW],
was_deleted=(_('(media untouched)'), _('(with all related media)'))[bool(full)]
)
)
time.sleep(cpu_presets[settings.CPU_PRESET])
# Remove show from 'RECENT SHOWS' in 'Shows' menu
settings.SHOWS_RECENT = [x for x in settings.SHOWS_RECENT if x['indexerid'] != show.indexerid]
# Don't redirect to the default page, so the user can confirm that the show was deleted
return self.redirect('/home/')
def refreshShow(self, show=None, force=False):
error, show = Show.refresh(show, force)
# This is a show validation error
if error and not show:
return self._genericMessage(_('Error'), error)
# This is a refresh error
if error:
ui.notifications.error(_('Unable to refresh this show.'), error)
time.sleep(cpu_presets[settings.CPU_PRESET])
return self.redirect("/home/displayShow?show=" + str(show.indexerid))
def updateShow(self, show=None, force=0):
if not show:
return self._genericMessage(_("Error"), _("Invalid show ID"))
show_obj = Show.find(settings.showList, int(show))
if not show_obj:
return self._genericMessage(_("Error"), _("Unable to find the specified show"))
# force the update
try:
settings.showQueueScheduler.action.update_show(show_obj, bool(force))
except CantUpdateShowException as e:
ui.notifications.error(_("Unable to update this show."), str(e))
# just give it some time
time.sleep(cpu_presets[settings.CPU_PRESET])
return self.redirect("/home/displayShow?show=" + str(show_obj.indexerid))
def subtitleShow(self, show=None, force=0):
if not show:
return self._genericMessage(_("Error"), _("Invalid show ID"))
show_obj = Show.find(settings.showList, int(show))
if not show_obj:
return self._genericMessage(_("Error"), _("Unable to find the specified show"))
# search and download subtitles
settings.showQueueScheduler.action.download_subtitles(show_obj, bool(force))
time.sleep(cpu_presets[settings.CPU_PRESET])
return self.redirect("/home/displayShow?show=" + str(show_obj.indexerid))
def updateKODI(self, show=None):
showName = None
show_obj = None
if show:
show_obj = Show.find(settings.showList, int(show))
if show_obj:
showName = urllib.parse.quote_plus(show_obj.name)
if settings.KODI_UPDATE_ONLYFIRST:
host = settings.KODI_HOST.split(",")[0].strip()
else:
host = settings.KODI_HOST
if notifiers.kodi_notifier.update_library(show_name=showName):
ui.notifications.message(_("Library update command sent to KODI host(s)): {kodi_hosts}").format(kodi_hosts=host))
else:
ui.notifications.error(_("Unable to contact one or more KODI host(s)): {kodi_hosts}").format(kodi_hosts=host))
if show_obj:
return self.redirect('/home/displayShow?show=' + str(show_obj.indexerid))
else:
return self.redirect('/home/')
def updatePLEX(self):
if notifiers.plex_notifier.update_library() is None:
ui.notifications.message(_("Library update command sent to Plex Media Server host: {plex_server}").format
(plex_server=settings.PLEX_SERVER_HOST))
else:
ui.notifications.error(_("Unable to contact Plex Media Server host: {plex_server}").format
(plex_server=settings.PLEX_SERVER_HOST))
return self.redirect('/home/')
def updateEMBY(self, show=None):
show_obj = None
if show:
show_obj = Show.find(settings.showList, int(show))
if notifiers.emby_notifier.update_library(show_obj):
ui.notifications.message(
_("Library update command sent to Emby host: {emby_host}").format(emby_host=settings.EMBY_HOST))
else:
ui.notifications.error(_("Unable to contact Emby host: {emby_host}").format(emby_host=settings.EMBY_HOST))
if show_obj:
return self.redirect('/home/displayShow?show=' + str(show_obj.indexerid))
else:
return self.redirect('/home/')
def setStatus(self, show=None, eps=None, status=None, direct=False):
if not all([show, eps, status]):
errMsg = _("You must specify a show and at least one episode")
if direct:
ui.notifications.error(_('Error'), errMsg)
return json.dumps({'result': 'error'})
else:
return self._genericMessage(_("Error"), errMsg)
# Use .has_key() since it is overridden for statusStrings in common.py
if status not in statusStrings:
errMsg = _("Invalid status")
if direct:
ui.notifications.error(_('Error'), errMsg)
return json.dumps({'result': 'error'})
else:
return self._genericMessage(_("Error"), errMsg)
show_obj = Show.find(settings.showList, int(show))
if not show_obj:
errMsg = _("Show not in show list")
if direct:
ui.notifications.error(_('Error'), errMsg)
return json.dumps({'result': 'error'})
else:
return self._genericMessage(_("Error"), errMsg)
segments = {}
if eps:
trakt_data = []
sql_l = []
for cur_ep in eps.split('|'):
if not cur_ep:
logger.debug("cur_ep was empty when trying to setStatus")
logger.debug("Attempting to set status on episode " + cur_ep + " to " + status)
epInfo = cur_ep.split('x')
if not all(epInfo):
logger.debug("Something went wrong when trying to setStatus, epInfo[0]: {0}, epInfo[1]: {1}".format(epInfo[0], epInfo[1]))
continue
ep_obj = show_obj.getEpisode(epInfo[0], epInfo[1])
if not ep_obj:
return self._genericMessage(_("Error"), _("Episode couldn't be retrieved"))
if int(status) in [WANTED, FAILED]:
# figure out what episodes are wanted so we can backlog them
if ep_obj.season in segments:
segments[ep_obj.season].append(ep_obj)
else:
segments[ep_obj.season] = [ep_obj]
with ep_obj.lock:
# don't let them mess up UNAIRED episodes
if ep_obj.status == UNAIRED:
logger.warning("Refusing to change status of " + cur_ep + " because it is UNAIRED")
continue
if int(status) in Quality.DOWNLOADED and ep_obj.status not in Quality.SNATCHED + Quality.SNATCHED_PROPER + Quality.SNATCHED_BEST + \
Quality.DOWNLOADED + [IGNORED] and not os.path.isfile(ep_obj.location):
logger.warning("Refusing to change status of " + cur_ep + " to DOWNLOADED because it's not SNATCHED/DOWNLOADED")
continue
if int(status) == FAILED and ep_obj.status not in Quality.SNATCHED + Quality.SNATCHED_PROPER + Quality.SNATCHED_BEST + \
Quality.DOWNLOADED + Quality.ARCHIVED:
logger.warning("Refusing to change status of " + cur_ep + " to FAILED because it's not SNATCHED/DOWNLOADED")
continue
if ep_obj.status in Quality.DOWNLOADED + Quality.ARCHIVED and int(status) == WANTED:
logger.info(
"Removing release_name for episode as you want to set a downloaded episode back to wanted, so obviously you want it replaced")
ep_obj.release_name = ""
ep_obj.status = int(status)
# mass add to database
sql_l.append(ep_obj.get_sql())
if settings.USE_TRAKT and settings.TRAKT_SYNC_WATCHLIST:
trakt_data.append((ep_obj.season, ep_obj.episode))
if settings.USE_TRAKT and settings.TRAKT_SYNC_WATCHLIST:
data = notifiers.trakt_notifier.trakt_episode_data_generate(trakt_data)
if data['seasons']:
upd = ""
if int(status) in [WANTED, FAILED]:
logger.debug(
"Add episodes, showid: indexerid " + str(show_obj.indexerid) + ", Title " + str(show_obj.name) + " to Watchlist"
)
upd = "add"
elif int(status) in [IGNORED, SKIPPED] + Quality.DOWNLOADED + Quality.ARCHIVED:
# noinspection PyPep8
logger.debug(
"Remove episodes, showid: indexerid " + str(show_obj.indexerid) + ", Title " + str(show_obj.name) + " from Watchlist"
)
upd = "remove"
if upd:
notifiers.trakt_notifier.update_watchlist(show_obj, data_episode=data, update=upd)
if sql_l:
main_db_con = db.DBConnection()
main_db_con.mass_action(sql_l)
if int(status) == WANTED and not show_obj.paused:
msg = _("Backlog was automatically started for the following seasons of <b>{show_name}</b>").format(show_name=show_obj.name)
msg += ':<br><ul>'
for season, segment in segments.items():
cur_backlog_queue_item = search_queue.BacklogQueueItem(show_obj, segment)
settings.searchQueueScheduler.action.add_item(cur_backlog_queue_item)
msg += "<li>" + _("Season") + " " + str(season) + "</li>"
logger.info("Sending backlog for " + show_obj.name + " season " + str(
season) + " because some eps were set to wanted")
msg += "</ul>"
if segments:
ui.notifications.message(_("Backlog started"), msg)
elif int(status) == WANTED and show_obj.paused:
logger.info("Some episodes were set to wanted, but " + show_obj.name + " is paused. Not adding to Backlog until show is unpaused")
if int(status) == FAILED:
msg = _("Retrying Search was automatically started for the following season of <b>{show_name}</b>").format(show_name=show_obj.name)
msg += ':<br><ul>'
for season, segment in segments.items():
cur_failed_queue_item = search_queue.FailedQueueItem(show_obj, segment)
settings.searchQueueScheduler.action.add_item(cur_failed_queue_item)
msg += "<li>" + _("Season") + " " + str(season) + "</li>"
logger.info("Retrying Search for " + show_obj.name + " season " + str(
season) + " because some eps were set to failed")
msg += "</ul>"
if segments:
ui.notifications.message(_("Retry Search started"), msg)
if direct:
return json.dumps({'result': 'success'})
else:
return self.redirect("/home/displayShow?show=" + show)
def testRename(self, show=None):
if not show:
return self._genericMessage(_("Error"), _("You must specify a show"))
show_obj = Show.find(settings.showList, int(show))
if not show_obj:
return self._genericMessage(_("Error"), _("Show not in show list"))
try:
show_obj.location
except ShowDirectoryNotFoundException:
return self._genericMessage(_("Error"), _("Can't rename episodes when the show dir is missing."))
show_obj.getAllEpisodes(has_location=True)
t = PageTemplate(rh=self, filename="testRename.mako")
submenu = [{'title': _('Edit'), 'path': 'home/editShow?show={0:d}'.format(show_obj.indexerid), 'icon': 'ui-icon ui-icon-pencil'}]
return t.render(submenu=submenu,
show=show_obj, title=_('Preview Rename'),
header=_('Preview Rename'),
controller="home", action="previewRename")
def doRename(self, show=None, eps=None):
if not (show and eps):
return self._genericMessage(_("Error"), _("You must specify a show and at least one episode"))
show_obj = Show.find(settings.showList, int(show))
if not show_obj:
return self._genericMessage(_("Error"), _("Show not in show list"))
try:
show_obj.location
except ShowDirectoryNotFoundException:
return self._genericMessage(_("Error"), _("Can't rename episodes when the show dir is missing."))
if not eps:
return self.redirect("/home/displayShow?show=" + show)
main_db_con = db.DBConnection()
for cur_ep in eps.split('|'):
epInfo = cur_ep.split('x')
# this is probably the worst possible way to deal with double eps but I've kinda painted myself into a corner here with this stupid database
ep_result = main_db_con.select(
"SELECT location FROM tv_episodes WHERE showid = ? AND season = ? AND episode = ? AND 5=5",
[show, epInfo[0], epInfo[1]])
if not ep_result:
logger.warning("Unable to find an episode for " + cur_ep + ", skipping")
continue
related_eps_result = main_db_con.select(
"SELECT season, episode FROM tv_episodes WHERE location = ? AND episode != ?",
[ep_result[0]["location"], epInfo[1]]
)
root_ep_obj = show_obj.getEpisode(epInfo[0], epInfo[1])
root_ep_obj.relatedEps = []
for cur_related_ep in related_eps_result:
related_ep_obj = show_obj.getEpisode(cur_related_ep["season"], cur_related_ep["episode"])
if related_ep_obj not in root_ep_obj.relatedEps:
root_ep_obj.relatedEps.append(related_ep_obj)
root_ep_obj.rename()
return self.redirect("/home/displayShow?show=" + show)
def manual_search_show_releases(self):
show = self.get_query_argument('show')
season = self.get_query_argument('season')
episode = self.get_query_argument('episode', None)
self.set_header('Cache-Control', 'max-age=0,no-cache,no-store')
cache_db_con = db.DBConnection('cache.db', row_type="dict")
# show_object: TVShow = Show.find(settings.showList, show)
# sickchill.oldbeard.search.searchProviders(
# show_object,
# show_object.getEpisode(season=season, episode=episode or 1),
# downCurQuality=True,
# manualSearch=True,
# manual_snatch=('sponly', 'eponly')[episode is not None]
# )
if episode:
results = cache_db_con.select(
'SELECT * FROM results WHERE indexerid = ? AND season = ? AND episodes LIKE ? AND status != ? ORDER BY seeders DESC',
[show, season, f'%|{episode}|%', FAILED])
else:
show_object: TVShow = Show.find(settings.showList, show)
episodes_sql = '|'.join([str(ep.season) for ep in show_object.getAllEpisodes(season=season) if ep.season > 0])
results = cache_db_con.select(
'SELECT * FROM results WHERE indexerid = ? AND season = ? AND episodes LIKE ? AND status != ? ORDER BY seeders DESC',
[show, season, f'%{episodes_sql}%', FAILED])
for result in results:
episodes_list = [int(ep) for ep in result['episodes'].split('|') if ep]
if len(episodes_list) > 1:
result['ep_string'] = 'S{:02}E{}-{}'.format(result['season'], min(episodes_list), max(episodes_list))
else:
result['ep_string'] = episode_num(result['season'], episodes_list[0])
# TODO: If no cache results do a search on indexers and post back to this method.
t = PageTemplate(rh=self, filename="manual_search_show_releases.mako")
submenu = [{'title': _('Edit'), 'path': 'home/editShow?show={0}'.format(show), 'icon': 'fa fa-pencil'}]
return t.render(submenu=submenu, title=_('Manual Snatch'), header=_('Manual Snatch'), controller="home", action="manual_search_show_releases", results=results)
def manual_snatch_show_release(self, *args, **kwargs):
url = self.get_body_argument('url')
show = self.get_body_argument('show')
self.set_header('Cache-Control', 'max-age=0,no-cache,no-store')
self.set_header('Content-Type', 'application/json')
cache_db_con = db.DBConnection('cache.db', row_type="dict")
result = cache_db_con.select_one('SELECT * FROM results WHERE url = ?', [url])
if not result:
json.dumps({'result': 'failure', 'message': _('Result not found in the cache')})
result = sickchill.oldbeard.classes.TorrentSearchResult.make_result(result)
if isinstance(result, str):
sickchill.oldbeard.logger.info(_('Could not snatch manually selected result: {}').format(result))
elif result:
sickchill.oldbeard.search.snatchEpisode(result, SNATCHED_BEST)
return self.redirect("/home/displayShow?show=" + show)
def searchEpisode(self, show=None, season=None, episode=None, downCurQuality=0):
# retrieve the episode object and fail if we can't get one
ep_obj, error_msg = self._getEpisode(show, season, episode)
if error_msg or not ep_obj:
return json.dumps({'result': 'failure', 'errorMessage': error_msg})
# make a queue item for it and put it on the queue
ep_queue_item = search_queue.ManualSearchQueueItem(ep_obj.show, ep_obj, bool(int(downCurQuality)))
settings.searchQueueScheduler.action.add_item(ep_queue_item)
if not ep_queue_item.started and ep_queue_item.success is None:
return json.dumps(
{'result': 'success'}) # I Actually want to call it queued, because the search hasnt been started yet!
if ep_queue_item.started and ep_queue_item.success is None:
return json.dumps({'result': 'success'})
else:
return json.dumps({'result': 'failure'})
# ## Returns the current ep_queue_item status for the current viewed show.
# Possible status: Downloaded, Snatched, etc...
# Returns {'show': 279530, 'episodes' : ['episode' : 6, 'season' : 1, 'searchstatus' : 'queued', 'status' : 'running', 'quality': '4013']
def getManualSearchStatus(self, show=None):
def getEpisodes(search_thread, search_status):
results = []
show_obj = Show.find(settings.showList, int(search_thread.show.indexerid))
if not show_obj:
logger.warning('No Show Object found for show with indexerID: ' + str(search_thread.show.indexerid))
return results
# noinspection PyProtectedMember
def relative_ep_location(ep_loc, show_loc):
""" Returns the relative location compared to the show's location """
if ep_loc and show_loc and ep_loc.lower().startswith(show_loc.lower()):
return ep_loc[len(show_loc) + 1:]
else:
return ep_loc
if isinstance(search_thread, sickchill.oldbeard.search_queue.ManualSearchQueueItem):
# noinspection PyProtectedMember
results.append({
'show': search_thread.show.indexerid,
'episode': search_thread.segment.episode,
'episodeindexid': search_thread.segment.indexerid,
'season': search_thread.segment.season,
'searchstatus': search_status,
'status': statusStrings[search_thread.segment.status],
'quality': self.getQualityClass(search_thread.segment),
'overview': Overview.overviewStrings[show_obj.getOverview(search_thread.segment.status)],
'location': relative_ep_location(search_thread.segment._location, show_obj._location),
'size': pretty_file_size(search_thread.segment.file_size) if search_thread.segment.file_size else ''
})
else:
for ep_obj in search_thread.segment:
# noinspection PyProtectedMember
results.append({
'show': ep_obj.show.indexerid,
'episode': ep_obj.episode,
'episodeindexid': ep_obj.indexerid,
'season': ep_obj.season,
'searchstatus': search_status,
'status': statusStrings[ep_obj.status],
'quality': self.getQualityClass(ep_obj),
'overview': Overview.overviewStrings[show_obj.getOverview(ep_obj.status)],
'location': relative_ep_location(ep_obj._location, show_obj._location),
'size': pretty_file_size(ep_obj.file_size) if ep_obj.file_size else ''
})
return results
episodes = []
# Queued Searches
searchstatus = 'Queued'
for searchThread in settings.searchQueueScheduler.action.get_all_ep_from_queue(show):
episodes += getEpisodes(searchThread, searchstatus)
# Running Searches
searchstatus = 'Searching'
if settings.searchQueueScheduler.action.is_manualsearch_in_progress():
searchThread = settings.searchQueueScheduler.action.currentItem
if searchThread.success:
searchstatus = 'Finished'
episodes += getEpisodes(searchThread, searchstatus)
# Finished Searches
searchstatus = 'Finished'
for searchThread in sickchill.oldbeard.search_queue.MANUAL_SEARCH_HISTORY:
if show and str(searchThread.show.indexerid) != show:
continue
if isinstance(searchThread, sickchill.oldbeard.search_queue.ManualSearchQueueItem):
# noinspection PyTypeChecker
if not [x for x in episodes if x['episodeindexid'] == searchThread.segment.indexerid]:
episodes += getEpisodes(searchThread, searchstatus)
else:
# ## These are only Failed Downloads/Retry SearchThreadItems.. lets loop through the segment/episodes
# TODO: WTF is this doing? Intensive
if not [i for i, j in zip(searchThread.segment, episodes) if i.indexerid == j['episodeindexid']]:
episodes += getEpisodes(searchThread, searchstatus)
self.set_header('Cache-Control', 'max-age=0,no-cache,no-store')
self.set_header('Content-Type', 'application/json')
return json.dumps({'episodes': episodes})
@staticmethod
def getQualityClass(ep_obj):
# return the correct json value
# Find the quality class for the episode
ep_status_, ep_quality = Quality.splitCompositeStatus(ep_obj.status)
if ep_quality in Quality.cssClassStrings:
quality_class = Quality.cssClassStrings[ep_quality]
else:
quality_class = Quality.cssClassStrings[Quality.UNKNOWN]
return quality_class
def searchEpisodeSubtitles(self, show=None, season=None, episode=None):
# retrieve the episode object and fail if we can't get one
ep_obj, error_msg = self._getEpisode(show, season, episode)
if error_msg or not ep_obj:
return json.dumps({'result': 'failure', 'errorMessage': error_msg})
# noinspection PyBroadException
try:
new_subtitles = ep_obj.download_subtitles()
except Exception:
return json.dumps({'result': 'failure'})
if new_subtitles:
new_languages = [subtitle_module.name_from_code(code) for code in new_subtitles]
status = _('New subtitles downloaded: {new_subtitle_languages}').format(
new_subtitle_languages=', '.join(new_languages))
else:
status = _('No subtitles downloaded')
ui.notifications.message(ep_obj.show.name, status)
return json.dumps({'result': status, 'subtitles': ','.join(ep_obj.subtitles)})
def playOnKodi(self, show, season, episode, host):
ep_obj, error_msg = self._getEpisode(show, season, episode)
if error_msg or not ep_obj:
print('error')
return json.dumps({'result': 'failure', 'errorMessage': error_msg})
sickchill.oldbeard.notifiers.kodi_notifier.play_episode(ep_obj, host)
return json.dumps({'result': 'success'})
def retrySearchSubtitles(self, show, season, episode, lang):
# retrieve the episode object and fail if we can't get one
ep_obj, error_msg = self._getEpisode(show, season, episode)
if error_msg or not ep_obj:
return json.dumps({'result': 'failure', 'errorMessage': error_msg})
try:
new_subtitles = ep_obj.download_subtitles(force_lang=lang)
except Exception as error:
return json.dumps({'result': 'failure', 'errorMessage': error})
if new_subtitles:
new_languages = [subtitle_module.name_from_code(code) for code in new_subtitles]
status = _('New subtitles downloaded: {new_subtitle_languages}').format(
new_subtitle_languages=', '.join(new_languages))
else:
status = _('No subtitles downloaded')
ui.notifications.message(ep_obj.show.name, status)
return json.dumps({'result': status, 'subtitles': ','.join(ep_obj.subtitles)})
def setSceneNumbering(self, show, indexer, forSeason=None, forEpisode=None, forAbsolute=None, sceneSeason=None,
sceneEpisode=None, sceneAbsolute=None):
# sanitize:
if forSeason in ('null', ''):
forSeason = None
if forEpisode in ('null', ''):
forEpisode = None
if forAbsolute in ('null', ''):
forAbsolute = None
if sceneSeason in ('null', ''):
sceneSeason = None
if sceneEpisode in ('null', ''):
sceneEpisode = None
if sceneAbsolute in ('null', ''):
sceneAbsolute = None
show_obj = Show.find(settings.showList, int(show))
if show_obj.is_anime:
result = {
'success': True,
'forAbsolute': forAbsolute,
}
else:
result = {
'success': True,
'forSeason': forSeason,
'forEpisode': forEpisode,
}
# retrieve the episode object and fail if we can't get one
if show_obj.is_anime:
ep_obj, error_msg = self._getEpisode(show, absolute=forAbsolute)
else:
ep_obj, error_msg = self._getEpisode(show, forSeason, forEpisode)
if error_msg or not ep_obj:
result['success'] = False
result['errorMessage'] = error_msg
elif show_obj.is_anime:
logger.debug("setAbsoluteSceneNumbering for {0} from {1} to {2}".format(show, forAbsolute, sceneAbsolute))
show = int(show)
indexer = int(indexer)
forAbsolute = int(forAbsolute)
if sceneAbsolute is not None:
sceneAbsolute = int(sceneAbsolute)
set_scene_numbering(show, indexer, absolute_number=forAbsolute, sceneAbsolute=sceneAbsolute)
else:
logger.debug("setEpisodeSceneNumbering for {0} from {1}x{2} to {3}x{4}".format(show, forSeason, forEpisode, sceneSeason, sceneEpisode))
show = int(show)
indexer = int(indexer)
forSeason = int(forSeason)
forEpisode = int(forEpisode)
if sceneSeason is not None:
sceneSeason = int(sceneSeason)
if sceneEpisode is not None:
sceneEpisode = int(sceneEpisode)
set_scene_numbering(show, indexer, season=forSeason, episode=forEpisode, sceneSeason=sceneSeason,
sceneEpisode=sceneEpisode)
if show_obj.is_anime:
sn = get_scene_absolute_numbering(show, indexer, forAbsolute)
if sn:
result['sceneAbsolute'] = sn
else:
result['sceneAbsolute'] = None
else:
sn = get_scene_numbering(show, indexer, forSeason, forEpisode)
if sn:
(result['sceneSeason'], result['sceneEpisode']) = sn
else:
(result['sceneSeason'], result['sceneEpisode']) = (None, None)
return json.dumps(result)
def retryEpisode(self, show, season, episode, downCurQuality=0):
# retrieve the episode object and fail if we can't get one
ep_obj, error_msg = self._getEpisode(show, season, episode)
if error_msg or not ep_obj:
return json.dumps({'result': 'failure', 'errorMessage': error_msg})
# make a queue item for it and put it on the queue
ep_queue_item = search_queue.FailedQueueItem(ep_obj.show, [ep_obj], bool(int(downCurQuality)))
settings.searchQueueScheduler.action.add_item(ep_queue_item)
if not ep_queue_item.started and ep_queue_item.success is None:
return json.dumps(
{'result': 'success'}) # I Actually want to call it queued, because the search hasnt been started yet!
if ep_queue_item.started and ep_queue_item.success is None:
return json.dumps({'result': 'success'})
else:
return json.dumps({'result': 'failure'})
@staticmethod
def fetch_releasegroups(show_name):
logger.info('ReleaseGroups: {0}'.format(show_name))
if helpers.set_up_anidb_connection():
try:
anime = adba.Anime(settings.ADBA_CONNECTION, name=show_name)
groups = anime.get_groups()
logger.info('ReleaseGroups: {0}'.format(groups))
return json.dumps({'result': 'success', 'groups': groups})
except AttributeError as error:
logger.debug('Unable to get ReleaseGroups: {0}'.format(error))
return json.dumps({'result': 'failure'})
|
elpaso/QGIS | refs/heads/master | python/plugins/processing/algs/gdal/proximity.py | 15 | # -*- coding: utf-8 -*-
"""
***************************************************************************
proximity.py
---------------------
Date : August 2012
Copyright : (C) 2012 by Victor Olaya
Email : volayaf at gmail dot com
***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************
"""
__author__ = 'Victor Olaya'
__date__ = 'August 2012'
__copyright__ = '(C) 2012, Victor Olaya'
import os
from qgis.PyQt.QtGui import QIcon
from qgis.core import (QgsRasterFileWriter,
QgsProcessingException,
QgsProcessingParameterDefinition,
QgsProcessingParameterRasterLayer,
QgsProcessingParameterBand,
QgsProcessingParameterEnum,
QgsProcessingParameterString,
QgsProcessingParameterNumber,
QgsProcessingParameterRasterDestination)
from processing.algs.gdal.GdalAlgorithm import GdalAlgorithm
from processing.algs.gdal.GdalUtils import GdalUtils
from processing.tools.system import isWindows
pluginPath = os.path.split(os.path.split(os.path.dirname(__file__))[0])[0]
class proximity(GdalAlgorithm):
INPUT = 'INPUT'
BAND = 'BAND'
VALUES = 'VALUES'
MAX_DISTANCE = 'MAX_DISTANCE'
REPLACE = 'REPLACE'
UNITS = 'UNITS'
NODATA = 'NODATA'
OPTIONS = 'OPTIONS'
EXTRA = 'EXTRA'
DATA_TYPE = 'DATA_TYPE'
OUTPUT = 'OUTPUT'
TYPES = ['Byte', 'Int16', 'UInt16', 'UInt32', 'Int32', 'Float32', 'Float64', 'CInt16', 'CInt32', 'CFloat32', 'CFloat64']
def icon(self):
return QIcon(os.path.join(pluginPath, 'images', 'gdaltools', 'proximity.png'))
def __init__(self):
super().__init__()
def initAlgorithm(self, config=None):
self.distanceUnits = ((self.tr('Georeferenced coordinates'), 'GEO'),
(self.tr('Pixel coordinates'), 'PIXEL'))
self.addParameter(QgsProcessingParameterRasterLayer(self.INPUT,
self.tr('Input layer')))
self.addParameter(QgsProcessingParameterBand(self.BAND,
self.tr('Band number'),
1,
parentLayerParameterName=self.INPUT))
self.addParameter(QgsProcessingParameterString(self.VALUES,
self.tr('A list of pixel values in the source image to be considered target pixels'),
optional=True))
self.addParameter(QgsProcessingParameterEnum(self.UNITS,
self.tr('Distance units'),
options=[i[0] for i in self.distanceUnits],
allowMultiple=False,
defaultValue=1))
self.addParameter(QgsProcessingParameterNumber(self.MAX_DISTANCE,
self.tr('The maximum distance to be generated'),
type=QgsProcessingParameterNumber.Double,
minValue=0.0,
defaultValue=0.0,
optional=True))
self.addParameter(QgsProcessingParameterNumber(self.REPLACE,
self.tr('Value to be applied to all pixels that are within the -maxdist of target pixels'),
type=QgsProcessingParameterNumber.Double,
defaultValue=0.0,
optional=True))
self.addParameter(QgsProcessingParameterNumber(self.NODATA,
self.tr('Nodata value to use for the destination proximity raster'),
type=QgsProcessingParameterNumber.Double,
defaultValue=0.0,
optional=True))
options_param = QgsProcessingParameterString(self.OPTIONS,
self.tr('Additional creation options'),
defaultValue='',
optional=True)
options_param.setFlags(options_param.flags() | QgsProcessingParameterDefinition.FlagAdvanced)
options_param.setMetadata({
'widget_wrapper': {
'class': 'processing.algs.gdal.ui.RasterOptionsWidget.RasterOptionsWidgetWrapper'}})
self.addParameter(options_param)
extra_param = QgsProcessingParameterString(self.EXTRA,
self.tr('Additional command-line parameters'),
defaultValue=None,
optional=True)
extra_param.setFlags(extra_param.flags() | QgsProcessingParameterDefinition.FlagAdvanced)
self.addParameter(extra_param)
dataType_param = QgsProcessingParameterEnum(self.DATA_TYPE,
self.tr('Output data type'),
self.TYPES,
allowMultiple=False,
defaultValue=5)
dataType_param.setFlags(dataType_param.flags() | QgsProcessingParameterDefinition.FlagAdvanced)
self.addParameter(dataType_param)
self.addParameter(QgsProcessingParameterRasterDestination(self.OUTPUT,
self.tr('Proximity map')))
def name(self):
return 'proximity'
def displayName(self):
return self.tr('Proximity (raster distance)')
def group(self):
return self.tr('Raster analysis')
def groupId(self):
return 'rasteranalysis'
def commandName(self):
return 'gdal_proximity'
def getConsoleCommands(self, parameters, context, feedback, executing=True):
inLayer = self.parameterAsRasterLayer(parameters, self.INPUT, context)
if inLayer is None:
raise QgsProcessingException(self.invalidRasterError(parameters, self.INPUT))
distance = self.parameterAsDouble(parameters, self.MAX_DISTANCE, context)
replaceValue = self.parameterAsDouble(parameters, self.REPLACE, context)
if self.NODATA in parameters and parameters[self.NODATA] is not None:
nodata = self.parameterAsDouble(parameters, self.NODATA, context)
else:
nodata = None
options = self.parameterAsString(parameters, self.OPTIONS, context)
out = self.parameterAsOutputLayer(parameters, self.OUTPUT, context)
self.setOutputValue(self.OUTPUT, out)
arguments = []
arguments.append('-srcband')
arguments.append(str(self.parameterAsInt(parameters, self.BAND, context)))
arguments.append('-distunits')
arguments.append(self.distanceUnits[self.parameterAsEnum(parameters, self.UNITS, context)][1])
values = self.parameterAsString(parameters, self.VALUES, context)
if values:
arguments.append('-values')
arguments.append(values)
if distance:
arguments.append('-maxdist')
arguments.append(str(distance))
if nodata is not None:
arguments.append('-nodata')
arguments.append(str(nodata))
if replaceValue:
arguments.append('-fixed-buf-val')
arguments.append(str(replaceValue))
arguments.append('-ot')
arguments.append(self.TYPES[self.parameterAsEnum(parameters, self.DATA_TYPE, context)])
arguments.append('-of')
arguments.append(QgsRasterFileWriter.driverForExtension(os.path.splitext(out)[1]))
if options:
arguments.extend(GdalUtils.parseCreationOptions(options))
if self.EXTRA in parameters and parameters[self.EXTRA] not in (None, ''):
extra = self.parameterAsString(parameters, self.EXTRA, context)
arguments.append(extra)
arguments.append(inLayer.source())
arguments.append(out)
if isWindows():
commands = ["python3", "-m", self.commandName()]
else:
commands = [self.commandName() + '.py']
commands.append(GdalUtils.escapeAndJoin(arguments))
return commands
|
abhattad4/Digi-Menu | refs/heads/master | digimenu2/django/contrib/gis/db/models/sql/__init__.py | 127 | from django.contrib.gis.db.models.sql.conversion import AreaField, DistanceField, GeomField, GMLField
__all__ = [
'AreaField', 'DistanceField', 'GeomField', 'GMLField'
]
|
jhg/django | refs/heads/master | django/contrib/gis/db/models/sql/__init__.py | 127 | from django.contrib.gis.db.models.sql.conversion import AreaField, DistanceField, GeomField, GMLField
__all__ = [
'AreaField', 'DistanceField', 'GeomField', 'GMLField'
]
|
bitmonk/fabtools | refs/heads/python3 | fabtools/tests/test_files.py | 9 | import hashlib
import unittest
from mock import patch
import pytest
@patch('fabtools.require.files._mode')
@patch('fabtools.require.files._owner')
@patch('fabtools.require.files.umask')
@patch('fabtools.require.files.put')
@patch('fabtools.require.files.md5sum')
@patch('fabtools.require.files.is_file')
class FilesTestCase(unittest.TestCase):
def _file(self, *args, **kwargs):
""" Proxy to ensure ImportErrors actually cause test failures rather
than trashing the test run entirely """
from fabtools import require
require.files.file(*args, **kwargs)
def test_verify_remote_false(self, is_file, md5sum, put, umask, owner, mode):
""" If verify_remote is set to False, then we should find that
only is_file is used to check for the file's existence. Hashlib's
md5 should not have been called.
"""
is_file.return_value = True
self._file(contents='This is a test', verify_remote=False)
self.assertTrue(is_file.called)
self.assertFalse(md5sum.called)
def test_verify_remote_true(self, is_file, md5sum, put, umask, owner, mode):
""" If verify_remote is True, then we should find that an MD5 hash is
used to work out whether the file is different.
"""
is_file.return_value = True
md5sum.return_value = hashlib.md5('This is a test').hexdigest()
self._file(contents='This is a test', verify_remote=True)
self.assertTrue(is_file.called)
self.assertTrue(md5sum.called)
def test_temp_dir(self, is_file, md5sum, put, umask, owner, mode):
owner.return_value = 'root'
umask.return_value = '0002'
mode.return_value = '0664'
from fabtools import require
require.file('/var/tmp/foo', source=__file__, use_sudo=True, temp_dir='/somewhere')
put.assert_called_with(__file__, '/var/tmp/foo', use_sudo=True, temp_dir='/somewhere')
def test_home_as_temp_dir(self, is_file, md5sum, put, umask, owner, mode):
owner.return_value = 'root'
umask.return_value = '0002'
mode.return_value = '0664'
from fabtools import require
require.file('/var/tmp/foo', source=__file__, use_sudo=True, temp_dir='')
put.assert_called_with(__file__, '/var/tmp/foo', use_sudo=True, temp_dir='')
def test_default_temp_dir(self, is_file, md5sum, put, umask, owner, mode):
owner.return_value = 'root'
umask.return_value = '0002'
mode.return_value = '0664'
from fabtools import require
require.file('/var/tmp/foo', source=__file__, use_sudo=True)
put.assert_called_with(__file__, '/var/tmp/foo', use_sudo=True, temp_dir='/tmp')
class TestUploadTemplate(unittest.TestCase):
@patch('fabtools.files.run')
@patch('fabtools.files._upload_template')
def test_mkdir(self, mock_upload_template, mock_run):
from fabtools.files import upload_template
upload_template('filename', '/path/to/destination', mkdir=True)
args, kwargs = mock_run.call_args
self.assertEqual(args[0], 'mkdir -p /path/to')
@patch('fabtools.files.sudo')
@patch('fabtools.files._upload_template')
def test_mkdir_sudo(self, mock_upload_template, mock_sudo):
from fabtools.files import upload_template
upload_template('filename', '/path/to/destination', mkdir=True, use_sudo=True)
args, kwargs = mock_sudo.call_args
self.assertEqual(args[0], 'mkdir -p /path/to')
self.assertEqual(kwargs['user'], None)
@patch('fabtools.files.sudo')
@patch('fabtools.files._upload_template')
def test_mkdir_sudo_user(self, mock_upload_template, mock_sudo):
from fabtools.files import upload_template
upload_template('filename', '/path/to/destination', mkdir=True, use_sudo=True, user='alice')
args, kwargs = mock_sudo.call_args
self.assertEqual(args[0], 'mkdir -p /path/to')
self.assertEqual(kwargs['user'], 'alice')
@patch('fabtools.files.run_as_root')
@patch('fabtools.files._upload_template')
def test_chown(self, mock_upload_template, mock_run_as_root):
from fabric.api import env
from fabtools.files import upload_template
upload_template('filename', 'destination', chown=True)
args, kwargs = mock_run_as_root.call_args
self.assertEqual(args[0], 'chown %s: destination' % env.user)
@patch('fabtools.files.run_as_root')
@patch('fabtools.files._upload_template')
def test_chown_user(self, mock_upload_template, mock_run_as_root):
from fabtools.files import upload_template
upload_template('filename', 'destination', chown=True, user='alice')
args, kwargs = mock_run_as_root.call_args
self.assertEqual(args[0], 'chown alice: destination')
@patch('fabtools.files._upload_template')
def test_use_jinja_true(self, mock_upload_template):
from fabtools.files import upload_template
upload_template('filename', 'destination', use_jinja=True)
args, kwargs = mock_upload_template.call_args
self.assertEqual(kwargs['use_jinja'], True)
@patch('fabtools.files._upload_template')
def test_use_jinja_false(self, mock_upload_template):
from fabtools.files import upload_template
upload_template('filename', 'destination', use_jinja=False)
args, kwargs = mock_upload_template.call_args
self.assertEqual(kwargs['use_jinja'], False)
@pytest.yield_fixture(scope='module')
def mock_run():
with patch('fabtools.files.run') as mock:
yield mock
def test_copy(mock_run):
from fabtools.files import copy
copy('/tmp/src', '/tmp/dst')
mock_run.assert_called_with('/bin/cp /tmp/src /tmp/dst')
def test_copy_recursive(mock_run):
from fabtools.files import copy
copy('/tmp/src', '/tmp/dst', recursive=True)
mock_run.assert_called_with('/bin/cp -r /tmp/src /tmp/dst')
def test_move(mock_run):
from fabtools.files import move
move('/tmp/src', '/tmp/dst')
mock_run.assert_called_with('/bin/mv /tmp/src /tmp/dst')
def test_symlink(mock_run):
from fabtools.files import symlink
symlink('/tmp/src', '/tmp/dst')
mock_run.assert_called_with('/bin/ln -s /tmp/src /tmp/dst')
def test_remove(mock_run):
from fabtools.files import remove
remove('/tmp/src')
mock_run.assert_called_with('/bin/rm /tmp/src')
def test_remove_recursive(mock_run):
from fabtools.files import remove
remove('/tmp/src', recursive=True)
mock_run.assert_called_with('/bin/rm -r /tmp/src')
|
ravindrapanda/tensorflow | refs/heads/master | tensorflow/python/ops/manip_grad.py | 3 | # Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Gradients for operators defined in manip_ops.py."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.python.framework import ops
from tensorflow.python.ops import manip_ops
@ops.RegisterGradient("Roll")
def _RollGrad(op, grad):
# The gradient is just the roll reversed
shift = op.inputs[1]
axis = op.inputs[2]
roll_grad = manip_ops.roll(grad, -shift, axis)
return roll_grad, None, None
|
astrorafael/twisted-mqtt | refs/heads/master | mqtt/client/publisher.py | 1 | # ----------------------------------------------------------------------
# Copyright (C) 2015 by Rafael Gonzalez
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
# ----------------------------------------------------------------------
# ----------------
# Standard modules
# ----------------
# ----------------
# Twisted modules
# ----------------
from zope.interface import implementer
from twisted.logger import Logger
# -----------
# Own modules
# -----------
from .interfaces import IMQTTPublisher
from .base import IdleState, ConnectingState as BaseConnectingState, ConnectedState as BaseConnectedState
from .pubsubs import MQTTProtocol as PubSubsMQTTProtocol
log = Logger(namespace='mqtt')
# --------------------------------------------------
# MQTT Client Connecting State Class (for publisher)
# --------------------------------------------------
class ConnectingState(BaseConnectingState):
def handleCONNACK(self, response):
self.protocol.handleCONNACK(response)
# The standard allows publishing data without waiting for CONNACK
def publish(self, request):
return self.protocol.doPublish(request)
# ---------------------------------
# MQTT Client Connected State Class
# ---------------------------------
class ConnectedState(BaseConnectedState):
def publish(self, request):
return self.protocol.doPublish(request)
def handlePUBACK(self, response):
self.protocol.handlePUBACK(response)
def handlePUBREC(self, response):
self.protocol.handlePUBREC(response)
def handlePUBCOMP(self, response):
self.protocol.handlePUBCOMP(response)
# ------------------------
# MQTT Client Protocol Class
# ------------------------
@implementer(IMQTTPublisher)
class MQTTProtocol(PubSubsMQTTProtocol):
'''
Publisher role MQTT Client Protocol
'''
def __init__(self, factory, addr):
PubSubsMQTTProtocol.__init__(self, factory, addr)
# patches the state machine
self.IDLE = IdleState(self)
self.CONNECTING = ConnectingState(self)
self.CONNECTED = ConnectedState(self)
self.state = self.IDLE
__all__ = [ "MQTTProtocol" ]
|
jjmiranda/edx-platform | refs/heads/master | common/lib/xmodule/xmodule/library_content_module.py | 15 | # -*- coding: utf-8 -*-
"""
LibraryContent: The XBlock used to include blocks from a library in a course.
"""
import json
from lxml import etree
from copy import copy
from capa.responsetypes import registry
from gettext import ngettext
from lazy import lazy
from .mako_module import MakoModuleDescriptor
from opaque_keys.edx.locator import LibraryLocator
import random
from webob import Response
from xblock.core import XBlock
from xblock.fields import Scope, String, List, Integer, Boolean
from xblock.fragment import Fragment
from xmodule.validation import StudioValidationMessage, StudioValidation
from xmodule.x_module import XModule, STUDENT_VIEW
from xmodule.studio_editable import StudioEditableModule, StudioEditableDescriptor
from .xml_module import XmlDescriptor
from pkg_resources import resource_string
# Make '_' a no-op so we can scrape strings. Using lambda instead of
# `django.utils.translation.ugettext_noop` because Django cannot be imported in this file
_ = lambda text: text
ANY_CAPA_TYPE_VALUE = 'any'
def _get_human_name(problem_class):
"""
Get the human-friendly name for a problem type.
"""
return getattr(problem_class, 'human_name', problem_class.__name__)
def _get_capa_types():
"""
Gets capa types tags and labels
"""
capa_types = {tag: _get_human_name(registry.get_class_for_tag(tag)) for tag in registry.registered_tags()}
return [{'value': ANY_CAPA_TYPE_VALUE, 'display_name': _('Any Type')}] + sorted([
{'value': capa_type, 'display_name': caption}
for capa_type, caption in capa_types.items()
], key=lambda item: item.get('display_name'))
class LibraryContentFields(object):
"""
Fields for the LibraryContentModule.
Separated out for now because they need to be added to the module and the
descriptor.
"""
# Please note the display_name of each field below is used in
# common/test/acceptance/pages/studio/library.py:StudioLibraryContentXBlockEditModal
# to locate input elements - keep synchronized
display_name = String(
display_name=_("Display Name"),
help=_("Display name for this module"),
default="Randomized Content Block",
scope=Scope.settings,
)
source_library_id = String(
display_name=_("Library"),
help=_("Select the library from which you want to draw content."),
scope=Scope.settings,
values_provider=lambda instance: instance.source_library_values(),
)
source_library_version = String(
# This is a hidden field that stores the version of source_library when we last pulled content from it
display_name=_("Library Version"),
scope=Scope.settings,
)
mode = String(
display_name=_("Mode"),
help=_("Determines how content is drawn from the library"),
default="random",
values=[
{"display_name": _("Choose n at random"), "value": "random"}
# Future addition: Choose a new random set of n every time the student refreshes the block, for self tests
# Future addition: manually selected blocks
],
scope=Scope.settings,
)
max_count = Integer(
display_name=_("Count"),
help=_("Enter the number of components to display to each student."),
default=1,
scope=Scope.settings,
)
capa_type = String(
display_name=_("Problem Type"),
help=_('Choose a problem type to fetch from the library. If "Any Type" is selected no filtering is applied.'),
default=ANY_CAPA_TYPE_VALUE,
values=_get_capa_types(),
scope=Scope.settings,
)
has_score = Boolean(
display_name=_("Scored"),
help=_("Set this value to True if this module is either a graded assignment or a practice problem."),
default=False,
scope=Scope.settings,
)
selected = List(
# This is a list of (block_type, block_id) tuples used to record
# which random/first set of matching blocks was selected per user
default=[],
scope=Scope.user_state,
)
has_children = True
@property
def source_library_key(self):
"""
Convenience method to get the library ID as a LibraryLocator and not just a string
"""
return LibraryLocator.from_string(self.source_library_id)
#pylint: disable=abstract-method
@XBlock.wants('library_tools') # Only needed in studio
class LibraryContentModule(LibraryContentFields, XModule, StudioEditableModule):
"""
An XBlock whose children are chosen dynamically from a content library.
Can be used to create randomized assessments among other things.
Note: technically, all matching blocks from the content library are added
as children of this block, but only a subset of those children are shown to
any particular student.
"""
@classmethod
def make_selection(cls, selected, children, max_count, mode):
"""
Dynamically selects block_ids indicating which of the possible children are displayed to the current user.
Arguments:
selected - list of (block_type, block_id) tuples assigned to this student
children - children of this block
max_count - number of components to display to each student
mode - how content is drawn from the library
Returns:
A dict containing the following keys:
'selected' (set) of (block_type, block_id) tuples assigned to this student
'invalid' (set) of dropped (block_type, block_id) tuples that are no longer valid
'overlimit' (set) of dropped (block_type, block_id) tuples that were previously selected
'added' (set) of newly added (block_type, block_id) tuples
"""
selected = set(tuple(k) for k in selected) # set of (block_type, block_id) tuples assigned to this student
# Determine which of our children we will show:
valid_block_keys = set([(c.block_type, c.block_id) for c in children])
# Remove any selected blocks that are no longer valid:
invalid_block_keys = (selected - valid_block_keys)
if invalid_block_keys:
selected -= invalid_block_keys
# If max_count has been decreased, we may have to drop some previously selected blocks:
overlimit_block_keys = set()
while len(selected) > max_count:
overlimit_block_keys.add(selected.pop())
# Do we have enough blocks now?
num_to_add = max_count - len(selected)
added_block_keys = None
if num_to_add > 0:
# We need to select [more] blocks to display to this user:
pool = valid_block_keys - selected
if mode == "random":
num_to_add = min(len(pool), num_to_add)
added_block_keys = set(random.sample(pool, num_to_add))
# We now have the correct n random children to show for this user.
else:
raise NotImplementedError("Unsupported mode.")
selected |= added_block_keys
return {
'selected': selected,
'invalid': invalid_block_keys,
'overlimit': overlimit_block_keys,
'added': added_block_keys,
}
def _publish_event(self, event_name, result, **kwargs):
"""
Helper method to publish an event for analytics purposes
"""
event_data = {
"location": unicode(self.location),
"result": result,
"previous_count": getattr(self, "_last_event_result_count", len(self.selected)),
"max_count": self.max_count,
}
event_data.update(kwargs)
self.runtime.publish(self, "edx.librarycontentblock.content.{}".format(event_name), event_data)
self._last_event_result_count = len(result) # pylint: disable=attribute-defined-outside-init
@classmethod
def publish_selected_children_events(cls, block_keys, format_block_keys, publish_event):
"""
Helper method for publishing events when children blocks are
selected/updated for a user. This helper is also used by
the ContentLibraryTransformer.
Arguments:
block_keys -
A dict describing which events to publish (add or
remove), see `make_selection` above for format details.
format_block_keys -
A function to convert block keys to the format expected
by publish_event. Must have the signature:
[(block_type, block_id)] -> T
Where T is a collection of block keys as accepted by
`publish_event`.
publish_event -
Function that handles the actual publishing. Must have
the signature:
<'removed'|'assigned'> -> result:T -> removed:T -> reason:basestring -> None
Where T is a collection of block_keys as returned by
`format_block_keys`.
"""
if block_keys['invalid']:
# reason "invalid" means deleted from library or a different library is now being used.
publish_event(
"removed",
result=format_block_keys(block_keys['selected']),
removed=format_block_keys(block_keys['invalid']),
reason="invalid"
)
if block_keys['overlimit']:
publish_event(
"removed",
result=format_block_keys(block_keys['selected']),
removed=format_block_keys(block_keys['overlimit']),
reason="overlimit"
)
if block_keys['added']:
publish_event(
"assigned",
result=format_block_keys(block_keys['selected']),
added=format_block_keys(block_keys['added'])
)
def selected_children(self):
"""
Returns a set() of block_ids indicating which of the possible children
have been selected to display to the current user.
This reads and updates the "selected" field, which has user_state scope.
Note: self.selected and the return value contain block_ids. To get
actual BlockUsageLocators, it is necessary to use self.children,
because the block_ids alone do not specify the block type.
"""
if hasattr(self, "_selected_set"):
# Already done:
return self._selected_set # pylint: disable=access-member-before-definition
block_keys = self.make_selection(self.selected, self.children, self.max_count, "random") # pylint: disable=no-member
# Publish events for analytics purposes:
lib_tools = self.runtime.service(self, 'library_tools')
format_block_keys = lambda keys: lib_tools.create_block_analytics_summary(self.location.course_key, keys)
self.publish_selected_children_events(
block_keys,
format_block_keys,
self._publish_event,
)
# Save our selections to the user state, to ensure consistency:
selected = block_keys['selected']
self.selected = list(selected) # TODO: this doesn't save from the LMS "Progress" page.
# Cache the results
self._selected_set = selected # pylint: disable=attribute-defined-outside-init
return selected
def _get_selected_child_blocks(self):
"""
Generator returning XBlock instances of the children selected for the
current user.
"""
for block_type, block_id in self.selected_children():
yield self.runtime.get_block(self.location.course_key.make_usage_key(block_type, block_id))
def student_view(self, context):
fragment = Fragment()
contents = []
child_context = {} if not context else copy(context)
for child in self._get_selected_child_blocks():
for displayable in child.displayable_items():
rendered_child = displayable.render(STUDENT_VIEW, child_context)
fragment.add_frag_resources(rendered_child)
contents.append({
'id': displayable.location.to_deprecated_string(),
'content': rendered_child.content,
})
fragment.add_content(self.system.render_template('vert_module.html', {
'items': contents,
'xblock_context': context,
'show_bookmark_button': False,
}))
return fragment
def validate(self):
"""
Validates the state of this Library Content Module Instance.
"""
return self.descriptor.validate()
def author_view(self, context):
"""
Renders the Studio views.
Normal studio view: If block is properly configured, displays library status summary
Studio container view: displays a preview of all possible children.
"""
fragment = Fragment()
root_xblock = context.get('root_xblock')
is_root = root_xblock and root_xblock.location == self.location
if is_root:
# User has clicked the "View" link. Show a preview of all possible children:
if self.children: # pylint: disable=no-member
fragment.add_content(self.system.render_template("library-block-author-preview-header.html", {
'max_count': self.max_count,
'display_name': self.display_name or self.url_name,
}))
context['can_edit_visibility'] = False
self.render_children(context, fragment, can_reorder=False, can_add=False)
# else: When shown on a unit page, don't show any sort of preview -
# just the status of this block in the validation area.
# The following JS is used to make the "Update now" button work on the unit page and the container view:
fragment.add_javascript_url(self.runtime.local_resource_url(self, 'public/js/library_content_edit.js'))
fragment.initialize_js('LibraryContentAuthorView')
return fragment
def get_child_descriptors(self):
"""
Return only the subset of our children relevant to the current student.
"""
return list(self._get_selected_child_blocks())
@XBlock.wants('user')
@XBlock.wants('library_tools') # Only needed in studio
@XBlock.wants('studio_user_permissions') # Only available in studio
class LibraryContentDescriptor(LibraryContentFields, MakoModuleDescriptor, XmlDescriptor, StudioEditableDescriptor):
"""
Descriptor class for LibraryContentModule XBlock.
"""
resources_dir = 'assets/library_content'
module_class = LibraryContentModule
mako_template = 'widgets/metadata-edit.html'
js = {'coffee': [resource_string(__name__, 'js/src/vertical/edit.coffee')]}
js_module_name = "VerticalDescriptor"
show_in_read_only_mode = True
@property
def non_editable_metadata_fields(self):
non_editable_fields = super(LibraryContentDescriptor, self).non_editable_metadata_fields
# The only supported mode is currently 'random'.
# Add the mode field to non_editable_metadata_fields so that it doesn't
# render in the edit form.
non_editable_fields.extend([LibraryContentFields.mode, LibraryContentFields.source_library_version])
return non_editable_fields
@lazy
def tools(self):
"""
Grab the library tools service or raise an error.
"""
return self.runtime.service(self, 'library_tools')
def get_user_id(self):
"""
Get the ID of the current user.
"""
user_service = self.runtime.service(self, 'user')
if user_service:
# May be None when creating bok choy test fixtures
user_id = user_service.get_current_user().opt_attrs.get('edx-platform.user_id', None)
else:
user_id = None
return user_id
@XBlock.handler
def refresh_children(self, request=None, suffix=None): # pylint: disable=unused-argument
"""
Refresh children:
This method is to be used when any of the libraries that this block
references have been updated. It will re-fetch all matching blocks from
the libraries, and copy them as children of this block. The children
will be given new block_ids, but the definition ID used should be the
exact same definition ID used in the library.
This method will update this block's 'source_library_id' field to store
the version number of the libraries used, so we easily determine if
this block is up to date or not.
"""
user_perms = self.runtime.service(self, 'studio_user_permissions')
user_id = self.get_user_id()
if not self.tools:
return Response("Library Tools unavailable in current runtime.", status=400)
self.tools.update_children(self, user_id, user_perms)
return Response()
# Copy over any overridden settings the course author may have applied to the blocks.
def _copy_overrides(self, store, user_id, source, dest):
"""
Copy any overrides the user has made on blocks in this library.
"""
for field in source.fields.itervalues():
if field.scope == Scope.settings and field.is_set_on(source):
setattr(dest, field.name, field.read_from(source))
if source.has_children:
source_children = [self.runtime.get_block(source_key) for source_key in source.children]
dest_children = [self.runtime.get_block(dest_key) for dest_key in dest.children]
for source_child, dest_child in zip(source_children, dest_children):
self._copy_overrides(store, user_id, source_child, dest_child)
store.update_item(dest, user_id)
def studio_post_duplicate(self, store, source_block):
"""
Used by the studio after basic duplication of a source block. We handle the children
ourselves, because we have to properly reference the library upstream and set the overrides.
Otherwise we'll end up losing data on the next refresh.
"""
# The first task will be to refresh our copy of the library to generate the children.
# We must do this at the currently set version of the library block. Otherwise we may not have
# exactly the same children-- someone may be duplicating an out of date block, after all.
user_id = self.get_user_id()
user_perms = self.runtime.service(self, 'studio_user_permissions')
if not self.tools:
raise RuntimeError("Library tools unavailable, duplication will not be sane!")
self.tools.update_children(self, user_id, user_perms, version=self.source_library_version)
self._copy_overrides(store, user_id, source_block, self)
# Children have been handled.
return True
def _validate_library_version(self, validation, lib_tools, version, library_key):
"""
Validates library version
"""
latest_version = lib_tools.get_library_version(library_key)
if latest_version is not None:
if version is None or version != unicode(latest_version):
validation.set_summary(
StudioValidationMessage(
StudioValidationMessage.WARNING,
_(u'This component is out of date. The library has new content.'),
# TODO: change this to action_runtime_event='...' once the unit page supports that feature.
# See https://openedx.atlassian.net/browse/TNL-993
action_class='library-update-btn',
# Translators: {refresh_icon} placeholder is substituted to "↻" (without double quotes)
action_label=_(u"{refresh_icon} Update now.").format(refresh_icon=u"↻")
)
)
return False
else:
validation.set_summary(
StudioValidationMessage(
StudioValidationMessage.ERROR,
_(u'Library is invalid, corrupt, or has been deleted.'),
action_class='edit-button',
action_label=_(u"Edit Library List.")
)
)
return False
return True
def _set_validation_error_if_empty(self, validation, summary):
""" Helper method to only set validation summary if it's empty """
if validation.empty:
validation.set_summary(summary)
def validate(self):
"""
Validates the state of this Library Content Module Instance. This
is the override of the general XBlock method, and it will also ask
its superclass to validate.
"""
validation = super(LibraryContentDescriptor, self).validate()
if not isinstance(validation, StudioValidation):
validation = StudioValidation.copy(validation)
library_tools = self.runtime.service(self, "library_tools")
if not (library_tools and library_tools.can_use_library_content(self)):
validation.set_summary(
StudioValidationMessage(
StudioValidationMessage.ERROR,
_(
u"This course does not support content libraries. "
u"Contact your system administrator for more information."
)
)
)
return validation
if not self.source_library_id:
validation.set_summary(
StudioValidationMessage(
StudioValidationMessage.NOT_CONFIGURED,
_(u"A library has not yet been selected."),
action_class='edit-button',
action_label=_(u"Select a Library.")
)
)
return validation
lib_tools = self.runtime.service(self, 'library_tools')
self._validate_library_version(validation, lib_tools, self.source_library_version, self.source_library_key)
# Note: we assume refresh_children() has been called
# since the last time fields like source_library_id or capa_types were changed.
matching_children_count = len(self.children) # pylint: disable=no-member
if matching_children_count == 0:
self._set_validation_error_if_empty(
validation,
StudioValidationMessage(
StudioValidationMessage.WARNING,
_(u'There are no matching problem types in the specified libraries.'),
action_class='edit-button',
action_label=_(u"Select another problem type.")
)
)
if matching_children_count < self.max_count:
self._set_validation_error_if_empty(
validation,
StudioValidationMessage(
StudioValidationMessage.WARNING,
(
ngettext(
u'The specified library is configured to fetch {count} problem, ',
u'The specified library is configured to fetch {count} problems, ',
self.max_count
) +
ngettext(
u'but there is only {actual} matching problem.',
u'but there are only {actual} matching problems.',
matching_children_count
)
).format(count=self.max_count, actual=matching_children_count),
action_class='edit-button',
action_label=_(u"Edit the library configuration.")
)
)
return validation
def source_library_values(self):
"""
Return a list of possible values for self.source_library_id
"""
lib_tools = self.runtime.service(self, 'library_tools')
user_perms = self.runtime.service(self, 'studio_user_permissions')
all_libraries = [
(key, name) for key, name in lib_tools.list_available_libraries()
if user_perms.can_read(key) or self.source_library_id == unicode(key)
]
all_libraries.sort(key=lambda entry: entry[1]) # Sort by name
if self.source_library_id and self.source_library_key not in [entry[0] for entry in all_libraries]:
all_libraries.append((self.source_library_id, _(u"Invalid Library")))
all_libraries = [(u"", _("No Library Selected"))] + all_libraries
values = [{"display_name": name, "value": unicode(key)} for key, name in all_libraries]
return values
def editor_saved(self, user, old_metadata, old_content):
"""
If source_library_id or capa_type has been edited, refresh_children automatically.
"""
old_source_library_id = old_metadata.get('source_library_id', [])
if (old_source_library_id != self.source_library_id or
old_metadata.get('capa_type', ANY_CAPA_TYPE_VALUE) != self.capa_type):
try:
self.refresh_children()
except ValueError:
pass # The validation area will display an error message, no need to do anything now.
def has_dynamic_children(self):
"""
Inform the runtime that our children vary per-user.
See get_child_descriptors() above
"""
return True
def get_content_titles(self):
"""
Returns list of friendly titles for our selected children only; without
thi, all possible children's titles would be seen in the sequence bar in
the LMS.
This overwrites the get_content_titles method included in x_module by default.
"""
titles = []
for child in self._xmodule.get_child_descriptors():
titles.extend(child.get_content_titles())
return titles
@classmethod
def definition_from_xml(cls, xml_object, system):
children = [
system.process_xml(etree.tostring(child)).scope_ids.usage_id
for child in xml_object.getchildren()
]
definition = {
attr_name: json.loads(attr_value)
for attr_name, attr_value in xml_object.attrib
}
return definition, children
def definition_to_xml(self, resource_fs):
""" Exports Library Content Module to XML """
xml_object = etree.Element('library_content')
for child in self.get_children():
self.runtime.add_block_as_child_node(child, xml_object)
# Set node attributes based on our fields.
for field_name, field in self.fields.iteritems():
if field_name in ('children', 'parent', 'content'):
continue
if field.is_set_on(self):
xml_object.set(field_name, unicode(field.read_from(self)))
return xml_object
|
yongshengwang/hue | refs/heads/master | desktop/core/ext-py/lxml/src/lxml/html/tests/test_xhtml.py | 48 | import unittest, sys
from lxml.tests.common_imports import make_doctest
import lxml.html
def test_suite():
suite = unittest.TestSuite()
suite.addTests([make_doctest('test_xhtml.txt')])
return suite
if __name__ == '__main__':
unittest.main()
|
TifaSky/zenoss_midrest | refs/heads/master | app/utils/db_manager.py | 1 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
File: db_manager.py
Author: fangfei
Email: fangfei@youku.com
Description:
"""
import os
import sys
import logging
parentdir = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
sys.path.insert(0, parentdir)
import MySQLdb
import memcache
from utils.switch_env import config
import setting
logger = logging.getLogger('app')
class DataManager(object):
"""connect to mysql
"""
def __init__(self):
self._loadMysqlConfig()
self._loadMemcacheConfig()
def _loadMysqlConfig(self):
self.dbhost = config.DB_HOST
self.dbport = config.DB_PORT
self.dbuser = config.DB_USER
self.dbname = config.DB_NAME
self.passwd = 'YW20080707yw'
def _loadMemcacheConfig(self):
self.memhost = '127.0.0.1'
self.memport = 11211
def _connect(self):
try:
self.conn = MySQLdb.connect(db=self.dbname, user=self.dbuser, passwd=self.passwd,
host=self.dbhost, port=int(self.dbport))
except Exception, e:
print e
logger.warn(e)
def _disconnect(self):
self.conn.close()
def queryDB(self, sql, sqlPara=None):
dataRtn = {'flag': 0, 'message': []}
try:
self._connect()
self.cur = self.conn.cursor()
self.cur.execute(sql, sqlPara)
data = self.cur.fetchall()
dataRtn = {'flag': 0, 'message': data}
except Exception, e:
logger.warn(e)
return {'flag': 1, 'message': str(e)}
finally:
self.cur.close()
self._disconnect()
return dataRtn
def queryCache(self, key_str):
cache_uri = "%s:%s" % (self.memhost, self.memport)
dataRtn = None
try:
self.mc = memcache.Client([cache_uri], debug=0)
dataRtn = self.mc.get(key_str)
except Exception, e:
print e
logger.warn(e)
return dataRtn
def getPathFromUuid(self, uuid_str):
dataRtn = None
cache_rs = self.queryCache(uuid_str)
if cache_rs is None:
sql = """ select path, name, key_error
from uuid_to_pathname
where uuid = %s
"""
rs_sql = self.queryDB(sql, (uuid_str,))
if rs_sql['flag'] == 0 and len(rs_sql['message']) > 0:
rs_path = rs_sql['message'][0][0]
rs_name = rs_sql['message'][0][1]
self.mc.set(uuid_str, rs_path)
if str(rs_name).strip() != '':
self.mc.set(rs_path, rs_name)
dataRtn = rs_path
return dataRtn
def getNameFromPath(self, path_str):
dataRtn = None
cache_rs = self.queryCache(path_str)
if cache_rs is None:
sql = """ select path, name, key_error, uuid
from uuid_to_pathname
where path = %s
"""
rs_sql = self.queryDB(sql, (path_str,))
if rs_sql['flag'] == 0 and len(rs_sql['message']) > 0:
rs_path = rs_sql['message'][0][0]
rs_name = rs_sql['message'][0][1]
uuid_str = rs_sql['message'][0][3]
self.mc.set(uuid_str, rs_path)
if str(rs_name).strip() != '':
self.mc.set(rs_path, rs_name)
dataRtn = rs_name
return dataRtn
def getNameFromUuid(self, uuid_str):
rs_path = self.getPathFromUuid(uuid_str)
if rs_path is not None:
cache_rs = self.queryCache(rs_path)
if cache_rs is not None:
return cache_rs
return None
|
40223220/cd0504 | refs/heads/master | static/Brython3.1.1-20150328-091302/Lib/tokenize.py | 728 | """Tokenization help for Python programs.
tokenize(readline) is a generator that breaks a stream of bytes into
Python tokens. It decodes the bytes according to PEP-0263 for
determining source file encoding.
It accepts a readline-like method which is called repeatedly to get the
next line of input (or b"" for EOF). It generates 5-tuples with these
members:
the token type (see token.py)
the token (a string)
the starting (row, column) indices of the token (a 2-tuple of ints)
the ending (row, column) indices of the token (a 2-tuple of ints)
the original line (string)
It is designed to match the working of the Python tokenizer exactly, except
that it produces COMMENT tokens for comments and gives type OP for all
operators. Additionally, all token lists start with an ENCODING token
which tells you which encoding was used to decode the bytes stream.
"""
__author__ = 'Ka-Ping Yee <ping@lfw.org>'
__credits__ = ('GvR, ESR, Tim Peters, Thomas Wouters, Fred Drake, '
'Skip Montanaro, Raymond Hettinger, Trent Nelson, '
'Michael Foord')
import builtins
import re
import sys
from token import *
from codecs import lookup, BOM_UTF8
import collections
from io import TextIOWrapper
cookie_re = re.compile(r'^[ \t\f]*#.*coding[:=][ \t]*([-\w.]+)', re.ASCII)
import token
__all__ = token.__all__ + ["COMMENT", "tokenize", "detect_encoding",
"NL", "untokenize", "ENCODING", "TokenInfo"]
del token
COMMENT = N_TOKENS
tok_name[COMMENT] = 'COMMENT'
NL = N_TOKENS + 1
tok_name[NL] = 'NL'
ENCODING = N_TOKENS + 2
tok_name[ENCODING] = 'ENCODING'
N_TOKENS += 3
EXACT_TOKEN_TYPES = {
'(': LPAR,
')': RPAR,
'[': LSQB,
']': RSQB,
':': COLON,
',': COMMA,
';': SEMI,
'+': PLUS,
'-': MINUS,
'*': STAR,
'/': SLASH,
'|': VBAR,
'&': AMPER,
'<': LESS,
'>': GREATER,
'=': EQUAL,
'.': DOT,
'%': PERCENT,
'{': LBRACE,
'}': RBRACE,
'==': EQEQUAL,
'!=': NOTEQUAL,
'<=': LESSEQUAL,
'>=': GREATEREQUAL,
'~': TILDE,
'^': CIRCUMFLEX,
'<<': LEFTSHIFT,
'>>': RIGHTSHIFT,
'**': DOUBLESTAR,
'+=': PLUSEQUAL,
'-=': MINEQUAL,
'*=': STAREQUAL,
'/=': SLASHEQUAL,
'%=': PERCENTEQUAL,
'&=': AMPEREQUAL,
'|=': VBAREQUAL,
'^=': CIRCUMFLEXEQUAL,
'<<=': LEFTSHIFTEQUAL,
'>>=': RIGHTSHIFTEQUAL,
'**=': DOUBLESTAREQUAL,
'//': DOUBLESLASH,
'//=': DOUBLESLASHEQUAL,
'@': AT
}
class TokenInfo(collections.namedtuple('TokenInfo', 'type string start end line')):
def __repr__(self):
annotated_type = '%d (%s)' % (self.type, tok_name[self.type])
return ('TokenInfo(type=%s, string=%r, start=%r, end=%r, line=%r)' %
self._replace(type=annotated_type))
@property
def exact_type(self):
if self.type == OP and self.string in EXACT_TOKEN_TYPES:
return EXACT_TOKEN_TYPES[self.string]
else:
return self.type
def group(*choices): return '(' + '|'.join(choices) + ')'
def any(*choices): return group(*choices) + '*'
def maybe(*choices): return group(*choices) + '?'
# Note: we use unicode matching for names ("\w") but ascii matching for
# number literals.
Whitespace = r'[ \f\t]*'
Comment = r'#[^\r\n]*'
Ignore = Whitespace + any(r'\\\r?\n' + Whitespace) + maybe(Comment)
Name = r'\w+'
Hexnumber = r'0[xX][0-9a-fA-F]+'
Binnumber = r'0[bB][01]+'
Octnumber = r'0[oO][0-7]+'
Decnumber = r'(?:0+|[1-9][0-9]*)'
Intnumber = group(Hexnumber, Binnumber, Octnumber, Decnumber)
Exponent = r'[eE][-+]?[0-9]+'
Pointfloat = group(r'[0-9]+\.[0-9]*', r'\.[0-9]+') + maybe(Exponent)
Expfloat = r'[0-9]+' + Exponent
Floatnumber = group(Pointfloat, Expfloat)
Imagnumber = group(r'[0-9]+[jJ]', Floatnumber + r'[jJ]')
Number = group(Imagnumber, Floatnumber, Intnumber)
StringPrefix = r'(?:[bB][rR]?|[rR][bB]?|[uU])?'
# Tail end of ' string.
Single = r"[^'\\]*(?:\\.[^'\\]*)*'"
# Tail end of " string.
Double = r'[^"\\]*(?:\\.[^"\\]*)*"'
# Tail end of ''' string.
Single3 = r"[^'\\]*(?:(?:\\.|'(?!''))[^'\\]*)*'''"
# Tail end of """ string.
Double3 = r'[^"\\]*(?:(?:\\.|"(?!""))[^"\\]*)*"""'
Triple = group(StringPrefix + "'''", StringPrefix + '"""')
# Single-line ' or " string.
String = group(StringPrefix + r"'[^\n'\\]*(?:\\.[^\n'\\]*)*'",
StringPrefix + r'"[^\n"\\]*(?:\\.[^\n"\\]*)*"')
# Because of leftmost-then-longest match semantics, be sure to put the
# longest operators first (e.g., if = came before ==, == would get
# recognized as two instances of =).
Operator = group(r"\*\*=?", r">>=?", r"<<=?", r"!=",
r"//=?", r"->",
r"[+\-*/%&|^=<>]=?",
r"~")
Bracket = '[][(){}]'
Special = group(r'\r?\n', r'\.\.\.', r'[:;.,@]')
Funny = group(Operator, Bracket, Special)
PlainToken = group(Number, Funny, String, Name)
Token = Ignore + PlainToken
# First (or only) line of ' or " string.
ContStr = group(StringPrefix + r"'[^\n'\\]*(?:\\.[^\n'\\]*)*" +
group("'", r'\\\r?\n'),
StringPrefix + r'"[^\n"\\]*(?:\\.[^\n"\\]*)*' +
group('"', r'\\\r?\n'))
PseudoExtras = group(r'\\\r?\n|\Z', Comment, Triple)
PseudoToken = Whitespace + group(PseudoExtras, Number, Funny, ContStr, Name)
def _compile(expr):
return re.compile(expr, re.UNICODE)
endpats = {"'": Single, '"': Double,
"'''": Single3, '"""': Double3,
"r'''": Single3, 'r"""': Double3,
"b'''": Single3, 'b"""': Double3,
"R'''": Single3, 'R"""': Double3,
"B'''": Single3, 'B"""': Double3,
"br'''": Single3, 'br"""': Double3,
"bR'''": Single3, 'bR"""': Double3,
"Br'''": Single3, 'Br"""': Double3,
"BR'''": Single3, 'BR"""': Double3,
"rb'''": Single3, 'rb"""': Double3,
"Rb'''": Single3, 'Rb"""': Double3,
"rB'''": Single3, 'rB"""': Double3,
"RB'''": Single3, 'RB"""': Double3,
"u'''": Single3, 'u"""': Double3,
"R'''": Single3, 'R"""': Double3,
"U'''": Single3, 'U"""': Double3,
'r': None, 'R': None, 'b': None, 'B': None,
'u': None, 'U': None}
triple_quoted = {}
for t in ("'''", '"""',
"r'''", 'r"""', "R'''", 'R"""',
"b'''", 'b"""', "B'''", 'B"""',
"br'''", 'br"""', "Br'''", 'Br"""',
"bR'''", 'bR"""', "BR'''", 'BR"""',
"rb'''", 'rb"""', "rB'''", 'rB"""',
"Rb'''", 'Rb"""', "RB'''", 'RB"""',
"u'''", 'u"""', "U'''", 'U"""',
):
triple_quoted[t] = t
single_quoted = {}
for t in ("'", '"',
"r'", 'r"', "R'", 'R"',
"b'", 'b"', "B'", 'B"',
"br'", 'br"', "Br'", 'Br"',
"bR'", 'bR"', "BR'", 'BR"' ,
"rb'", 'rb"', "rB'", 'rB"',
"Rb'", 'Rb"', "RB'", 'RB"' ,
"u'", 'u"', "U'", 'U"',
):
single_quoted[t] = t
tabsize = 8
class TokenError(Exception): pass
class StopTokenizing(Exception): pass
class Untokenizer:
def __init__(self):
self.tokens = []
self.prev_row = 1
self.prev_col = 0
self.encoding = None
def add_whitespace(self, start):
row, col = start
assert row <= self.prev_row
col_offset = col - self.prev_col
if col_offset:
self.tokens.append(" " * col_offset)
def untokenize(self, iterable):
for t in iterable:
if len(t) == 2:
self.compat(t, iterable)
break
tok_type, token, start, end, line = t
if tok_type == ENCODING:
self.encoding = token
continue
self.add_whitespace(start)
self.tokens.append(token)
self.prev_row, self.prev_col = end
if tok_type in (NEWLINE, NL):
self.prev_row += 1
self.prev_col = 0
return "".join(self.tokens)
def compat(self, token, iterable):
startline = False
indents = []
toks_append = self.tokens.append
toknum, tokval = token
if toknum in (NAME, NUMBER):
tokval += ' '
if toknum in (NEWLINE, NL):
startline = True
prevstring = False
for tok in iterable:
toknum, tokval = tok[:2]
if toknum == ENCODING:
self.encoding = tokval
continue
if toknum in (NAME, NUMBER):
tokval += ' '
# Insert a space between two consecutive strings
if toknum == STRING:
if prevstring:
tokval = ' ' + tokval
prevstring = True
else:
prevstring = False
if toknum == INDENT:
indents.append(tokval)
continue
elif toknum == DEDENT:
indents.pop()
continue
elif toknum in (NEWLINE, NL):
startline = True
elif startline and indents:
toks_append(indents[-1])
startline = False
toks_append(tokval)
def untokenize(iterable):
"""Transform tokens back into Python source code.
It returns a bytes object, encoded using the ENCODING
token, which is the first token sequence output by tokenize.
Each element returned by the iterable must be a token sequence
with at least two elements, a token number and token value. If
only two tokens are passed, the resulting output is poor.
Round-trip invariant for full input:
Untokenized source will match input source exactly
Round-trip invariant for limited intput:
# Output bytes will tokenize the back to the input
t1 = [tok[:2] for tok in tokenize(f.readline)]
newcode = untokenize(t1)
readline = BytesIO(newcode).readline
t2 = [tok[:2] for tok in tokenize(readline)]
assert t1 == t2
"""
ut = Untokenizer()
out = ut.untokenize(iterable)
if ut.encoding is not None:
out = out.encode(ut.encoding)
return out
def _get_normal_name(orig_enc):
"""Imitates get_normal_name in tokenizer.c."""
# Only care about the first 12 characters.
enc = orig_enc[:12].lower().replace("_", "-")
if enc == "utf-8" or enc.startswith("utf-8-"):
return "utf-8"
if enc in ("latin-1", "iso-8859-1", "iso-latin-1") or \
enc.startswith(("latin-1-", "iso-8859-1-", "iso-latin-1-")):
return "iso-8859-1"
return orig_enc
def detect_encoding(readline):
"""
The detect_encoding() function is used to detect the encoding that should
be used to decode a Python source file. It requires one argment, readline,
in the same way as the tokenize() generator.
It will call readline a maximum of twice, and return the encoding used
(as a string) and a list of any lines (left as bytes) it has read in.
It detects the encoding from the presence of a utf-8 bom or an encoding
cookie as specified in pep-0263. If both a bom and a cookie are present,
but disagree, a SyntaxError will be raised. If the encoding cookie is an
invalid charset, raise a SyntaxError. Note that if a utf-8 bom is found,
'utf-8-sig' is returned.
If no encoding is specified, then the default of 'utf-8' will be returned.
"""
try:
filename = readline.__self__.name
except AttributeError:
filename = None
bom_found = False
encoding = None
default = 'utf-8'
def read_or_stop():
try:
return readline()
except StopIteration:
return b''
def find_cookie(line):
try:
# Decode as UTF-8. Either the line is an encoding declaration,
# in which case it should be pure ASCII, or it must be UTF-8
# per default encoding.
line_string = line.decode('utf-8')
except UnicodeDecodeError:
msg = "invalid or missing encoding declaration"
if filename is not None:
msg = '{} for {!r}'.format(msg, filename)
raise SyntaxError(msg)
match = cookie_re.match(line_string)
if not match:
return None
encoding = _get_normal_name(match.group(1))
try:
codec = lookup(encoding)
except LookupError:
# This behaviour mimics the Python interpreter
if filename is None:
msg = "unknown encoding: " + encoding
else:
msg = "unknown encoding for {!r}: {}".format(filename,
encoding)
raise SyntaxError(msg)
if bom_found:
if encoding != 'utf-8':
# This behaviour mimics the Python interpreter
if filename is None:
msg = 'encoding problem: utf-8'
else:
msg = 'encoding problem for {!r}: utf-8'.format(filename)
raise SyntaxError(msg)
encoding += '-sig'
return encoding
first = read_or_stop()
if first.startswith(BOM_UTF8):
bom_found = True
first = first[3:]
default = 'utf-8-sig'
if not first:
return default, []
encoding = find_cookie(first)
if encoding:
return encoding, [first]
second = read_or_stop()
if not second:
return default, [first]
encoding = find_cookie(second)
if encoding:
return encoding, [first, second]
return default, [first, second]
def open(filename):
"""Open a file in read only mode using the encoding detected by
detect_encoding().
"""
buffer = builtins.open(filename, 'rb')
encoding, lines = detect_encoding(buffer.readline)
buffer.seek(0)
text = TextIOWrapper(buffer, encoding, line_buffering=True)
text.mode = 'r'
return text
def tokenize(readline):
"""
The tokenize() generator requires one argment, readline, which
must be a callable object which provides the same interface as the
readline() method of built-in file objects. Each call to the function
should return one line of input as bytes. Alternately, readline
can be a callable function terminating with StopIteration:
readline = open(myfile, 'rb').__next__ # Example of alternate readline
The generator produces 5-tuples with these members: the token type; the
token string; a 2-tuple (srow, scol) of ints specifying the row and
column where the token begins in the source; a 2-tuple (erow, ecol) of
ints specifying the row and column where the token ends in the source;
and the line on which the token was found. The line passed is the
logical line; continuation lines are included.
The first token sequence will always be an ENCODING token
which tells you which encoding was used to decode the bytes stream.
"""
# This import is here to avoid problems when the itertools module is not
# built yet and tokenize is imported.
from itertools import chain, repeat
encoding, consumed = detect_encoding(readline)
rl_gen = iter(readline, b"")
empty = repeat(b"")
return _tokenize(chain(consumed, rl_gen, empty).__next__, encoding)
def _tokenize(readline, encoding):
lnum = parenlev = continued = 0
numchars = '0123456789'
contstr, needcont = '', 0
contline = None
indents = [0]
if encoding is not None:
if encoding == "utf-8-sig":
# BOM will already have been stripped.
encoding = "utf-8"
yield TokenInfo(ENCODING, encoding, (0, 0), (0, 0), '')
while True: # loop over lines in stream
try:
line = readline()
except StopIteration:
line = b''
if encoding is not None:
line = line.decode(encoding)
lnum += 1
pos, max = 0, len(line)
if contstr: # continued string
if not line:
raise TokenError("EOF in multi-line string", strstart)
endmatch = endprog.match(line)
if endmatch:
pos = end = endmatch.end(0)
yield TokenInfo(STRING, contstr + line[:end],
strstart, (lnum, end), contline + line)
contstr, needcont = '', 0
contline = None
elif needcont and line[-2:] != '\\\n' and line[-3:] != '\\\r\n':
yield TokenInfo(ERRORTOKEN, contstr + line,
strstart, (lnum, len(line)), contline)
contstr = ''
contline = None
continue
else:
contstr = contstr + line
contline = contline + line
continue
elif parenlev == 0 and not continued: # new statement
if not line: break
column = 0
while pos < max: # measure leading whitespace
if line[pos] == ' ':
column += 1
elif line[pos] == '\t':
column = (column//tabsize + 1)*tabsize
elif line[pos] == '\f':
column = 0
else:
break
pos += 1
if pos == max:
break
if line[pos] in '#\r\n': # skip comments or blank lines
if line[pos] == '#':
comment_token = line[pos:].rstrip('\r\n')
nl_pos = pos + len(comment_token)
yield TokenInfo(COMMENT, comment_token,
(lnum, pos), (lnum, pos + len(comment_token)), line)
yield TokenInfo(NL, line[nl_pos:],
(lnum, nl_pos), (lnum, len(line)), line)
else:
yield TokenInfo((NL, COMMENT)[line[pos] == '#'], line[pos:],
(lnum, pos), (lnum, len(line)), line)
continue
if column > indents[-1]: # count indents or dedents
indents.append(column)
yield TokenInfo(INDENT, line[:pos], (lnum, 0), (lnum, pos), line)
while column < indents[-1]:
if column not in indents:
raise IndentationError(
"unindent does not match any outer indentation level",
("<tokenize>", lnum, pos, line))
indents = indents[:-1]
yield TokenInfo(DEDENT, '', (lnum, pos), (lnum, pos), line)
else: # continued statement
if not line:
raise TokenError("EOF in multi-line statement", (lnum, 0))
continued = 0
while pos < max:
pseudomatch = _compile(PseudoToken).match(line, pos)
if pseudomatch: # scan for tokens
start, end = pseudomatch.span(1)
spos, epos, pos = (lnum, start), (lnum, end), end
if start == end:
continue
token, initial = line[start:end], line[start]
if (initial in numchars or # ordinary number
(initial == '.' and token != '.' and token != '...')):
yield TokenInfo(NUMBER, token, spos, epos, line)
elif initial in '\r\n':
yield TokenInfo(NL if parenlev > 0 else NEWLINE,
token, spos, epos, line)
elif initial == '#':
assert not token.endswith("\n")
yield TokenInfo(COMMENT, token, spos, epos, line)
elif token in triple_quoted:
endprog = _compile(endpats[token])
endmatch = endprog.match(line, pos)
if endmatch: # all on one line
pos = endmatch.end(0)
token = line[start:pos]
yield TokenInfo(STRING, token, spos, (lnum, pos), line)
else:
strstart = (lnum, start) # multiple lines
contstr = line[start:]
contline = line
break
elif initial in single_quoted or \
token[:2] in single_quoted or \
token[:3] in single_quoted:
if token[-1] == '\n': # continued string
strstart = (lnum, start)
endprog = _compile(endpats[initial] or
endpats[token[1]] or
endpats[token[2]])
contstr, needcont = line[start:], 1
contline = line
break
else: # ordinary string
yield TokenInfo(STRING, token, spos, epos, line)
elif initial.isidentifier(): # ordinary name
yield TokenInfo(NAME, token, spos, epos, line)
elif initial == '\\': # continued stmt
continued = 1
else:
if initial in '([{':
parenlev += 1
elif initial in ')]}':
parenlev -= 1
yield TokenInfo(OP, token, spos, epos, line)
else:
yield TokenInfo(ERRORTOKEN, line[pos],
(lnum, pos), (lnum, pos+1), line)
pos += 1
for indent in indents[1:]: # pop remaining indent levels
yield TokenInfo(DEDENT, '', (lnum, 0), (lnum, 0), '')
yield TokenInfo(ENDMARKER, '', (lnum, 0), (lnum, 0), '')
# An undocumented, backwards compatible, API for all the places in the standard
# library that expect to be able to use tokenize with strings
def generate_tokens(readline):
return _tokenize(readline, None)
def main():
import argparse
# Helper error handling routines
def perror(message):
print(message, file=sys.stderr)
def error(message, filename=None, location=None):
if location:
args = (filename,) + location + (message,)
perror("%s:%d:%d: error: %s" % args)
elif filename:
perror("%s: error: %s" % (filename, message))
else:
perror("error: %s" % message)
sys.exit(1)
# Parse the arguments and options
parser = argparse.ArgumentParser(prog='python -m tokenize')
parser.add_argument(dest='filename', nargs='?',
metavar='filename.py',
help='the file to tokenize; defaults to stdin')
parser.add_argument('-e', '--exact', dest='exact', action='store_true',
help='display token names using the exact type')
args = parser.parse_args()
try:
# Tokenize the input
if args.filename:
filename = args.filename
with builtins.open(filename, 'rb') as f:
tokens = list(tokenize(f.readline))
else:
filename = "<stdin>"
tokens = _tokenize(sys.stdin.readline, None)
# Output the tokenization
for token in tokens:
token_type = token.type
if args.exact:
token_type = token.exact_type
token_range = "%d,%d-%d,%d:" % (token.start + token.end)
print("%-20s%-15s%-15r" %
(token_range, tok_name[token_type], token.string))
except IndentationError as err:
line, column = err.args[1][1:3]
error(err.args[0], filename, (line, column))
except TokenError as err:
line, column = err.args[1]
error(err.args[0], filename, (line, column))
except SyntaxError as err:
error(err, filename)
except IOError as err:
error(err)
except KeyboardInterrupt:
print("interrupted\n")
except Exception as err:
perror("unexpected error: %s" % err)
raise
if __name__ == "__main__":
main()
|
Tejeshwarabm/Westwood | refs/heads/master | src/antenna/bindings/modulegen__gcc_ILP32.py | 38 | from pybindgen import Module, FileCodeSink, param, retval, cppclass, typehandlers
import pybindgen.settings
import warnings
class ErrorHandler(pybindgen.settings.ErrorHandler):
def handle_error(self, wrapper, exception, traceback_):
warnings.warn("exception %r in wrapper %s" % (exception, wrapper))
return True
pybindgen.settings.error_handler = ErrorHandler()
import sys
def module_init():
root_module = Module('ns.antenna', cpp_namespace='::ns3')
return root_module
def register_types(module):
root_module = module.get_root()
## angles.h (module 'antenna'): ns3::Angles [struct]
module.add_class('Angles')
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList [class]
module.add_class('AttributeConstructionList', import_from_module='ns.core')
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::Item [struct]
module.add_class('Item', import_from_module='ns.core', outer_class=root_module['ns3::AttributeConstructionList'])
## callback.h (module 'core'): ns3::CallbackBase [class]
module.add_class('CallbackBase', import_from_module='ns.core')
## hash.h (module 'core'): ns3::Hasher [class]
module.add_class('Hasher', import_from_module='ns.core')
## object-base.h (module 'core'): ns3::ObjectBase [class]
module.add_class('ObjectBase', allow_subclassing=True, import_from_module='ns.core')
## object.h (module 'core'): ns3::ObjectDeleter [struct]
module.add_class('ObjectDeleter', import_from_module='ns.core')
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter> [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::Object', 'ns3::ObjectBase', 'ns3::ObjectDeleter'], parent=root_module['ns3::ObjectBase'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## type-id.h (module 'core'): ns3::TypeId [class]
module.add_class('TypeId', import_from_module='ns.core')
## type-id.h (module 'core'): ns3::TypeId::AttributeFlag [enumeration]
module.add_enum('AttributeFlag', ['ATTR_GET', 'ATTR_SET', 'ATTR_CONSTRUCT', 'ATTR_SGC'], outer_class=root_module['ns3::TypeId'], import_from_module='ns.core')
## type-id.h (module 'core'): ns3::TypeId::SupportLevel [enumeration]
module.add_enum('SupportLevel', ['SUPPORTED', 'DEPRECATED', 'OBSOLETE'], outer_class=root_module['ns3::TypeId'], import_from_module='ns.core')
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation [struct]
module.add_class('AttributeInformation', import_from_module='ns.core', outer_class=root_module['ns3::TypeId'])
## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation [struct]
module.add_class('TraceSourceInformation', import_from_module='ns.core', outer_class=root_module['ns3::TypeId'])
## vector.h (module 'core'): ns3::Vector2D [class]
module.add_class('Vector2D', import_from_module='ns.core')
## vector.h (module 'core'): ns3::Vector3D [class]
module.add_class('Vector3D', import_from_module='ns.core')
## empty.h (module 'core'): ns3::empty [class]
module.add_class('empty', import_from_module='ns.core')
## object.h (module 'core'): ns3::Object [class]
module.add_class('Object', import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter >'])
## object.h (module 'core'): ns3::Object::AggregateIterator [class]
module.add_class('AggregateIterator', import_from_module='ns.core', outer_class=root_module['ns3::Object'])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter<ns3::AttributeAccessor> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::AttributeAccessor', 'ns3::empty', 'ns3::DefaultDeleter<ns3::AttributeAccessor>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter<ns3::AttributeChecker> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::AttributeChecker', 'ns3::empty', 'ns3::DefaultDeleter<ns3::AttributeChecker>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter<ns3::AttributeValue> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::AttributeValue', 'ns3::empty', 'ns3::DefaultDeleter<ns3::AttributeValue>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter<ns3::CallbackImplBase> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::CallbackImplBase', 'ns3::empty', 'ns3::DefaultDeleter<ns3::CallbackImplBase>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Hash::Implementation, ns3::empty, ns3::DefaultDeleter<ns3::Hash::Implementation> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::Hash::Implementation', 'ns3::empty', 'ns3::DefaultDeleter<ns3::Hash::Implementation>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter<ns3::TraceSourceAccessor> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::TraceSourceAccessor', 'ns3::empty', 'ns3::DefaultDeleter<ns3::TraceSourceAccessor>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## trace-source-accessor.h (module 'core'): ns3::TraceSourceAccessor [class]
module.add_class('TraceSourceAccessor', import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter<ns3::TraceSourceAccessor> >'])
## antenna-model.h (module 'antenna'): ns3::AntennaModel [class]
module.add_class('AntennaModel', parent=root_module['ns3::Object'])
## attribute.h (module 'core'): ns3::AttributeAccessor [class]
module.add_class('AttributeAccessor', import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter<ns3::AttributeAccessor> >'])
## attribute.h (module 'core'): ns3::AttributeChecker [class]
module.add_class('AttributeChecker', allow_subclassing=False, automatic_type_narrowing=True, import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter<ns3::AttributeChecker> >'])
## attribute.h (module 'core'): ns3::AttributeValue [class]
module.add_class('AttributeValue', allow_subclassing=False, automatic_type_narrowing=True, import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter<ns3::AttributeValue> >'])
## callback.h (module 'core'): ns3::CallbackChecker [class]
module.add_class('CallbackChecker', import_from_module='ns.core', parent=root_module['ns3::AttributeChecker'])
## callback.h (module 'core'): ns3::CallbackImplBase [class]
module.add_class('CallbackImplBase', import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter<ns3::CallbackImplBase> >'])
## callback.h (module 'core'): ns3::CallbackValue [class]
module.add_class('CallbackValue', import_from_module='ns.core', parent=root_module['ns3::AttributeValue'])
## cosine-antenna-model.h (module 'antenna'): ns3::CosineAntennaModel [class]
module.add_class('CosineAntennaModel', parent=root_module['ns3::AntennaModel'])
## attribute.h (module 'core'): ns3::EmptyAttributeAccessor [class]
module.add_class('EmptyAttributeAccessor', import_from_module='ns.core', parent=root_module['ns3::AttributeAccessor'])
## attribute.h (module 'core'): ns3::EmptyAttributeChecker [class]
module.add_class('EmptyAttributeChecker', import_from_module='ns.core', parent=root_module['ns3::AttributeChecker'])
## attribute.h (module 'core'): ns3::EmptyAttributeValue [class]
module.add_class('EmptyAttributeValue', import_from_module='ns.core', parent=root_module['ns3::AttributeValue'])
## isotropic-antenna-model.h (module 'antenna'): ns3::IsotropicAntennaModel [class]
module.add_class('IsotropicAntennaModel', parent=root_module['ns3::AntennaModel'])
## parabolic-antenna-model.h (module 'antenna'): ns3::ParabolicAntennaModel [class]
module.add_class('ParabolicAntennaModel', parent=root_module['ns3::AntennaModel'])
## type-id.h (module 'core'): ns3::TypeIdChecker [class]
module.add_class('TypeIdChecker', import_from_module='ns.core', parent=root_module['ns3::AttributeChecker'])
## type-id.h (module 'core'): ns3::TypeIdValue [class]
module.add_class('TypeIdValue', import_from_module='ns.core', parent=root_module['ns3::AttributeValue'])
## vector.h (module 'core'): ns3::Vector2DChecker [class]
module.add_class('Vector2DChecker', import_from_module='ns.core', parent=root_module['ns3::AttributeChecker'])
## vector.h (module 'core'): ns3::Vector2DValue [class]
module.add_class('Vector2DValue', import_from_module='ns.core', parent=root_module['ns3::AttributeValue'])
## vector.h (module 'core'): ns3::Vector3DChecker [class]
module.add_class('Vector3DChecker', import_from_module='ns.core', parent=root_module['ns3::AttributeChecker'])
## vector.h (module 'core'): ns3::Vector3DValue [class]
module.add_class('Vector3DValue', import_from_module='ns.core', parent=root_module['ns3::AttributeValue'])
typehandlers.add_type_alias(u'ns3::Vector3D', u'ns3::Vector')
typehandlers.add_type_alias(u'ns3::Vector3D*', u'ns3::Vector*')
typehandlers.add_type_alias(u'ns3::Vector3D&', u'ns3::Vector&')
module.add_typedef(root_module['ns3::Vector3D'], 'Vector')
typehandlers.add_type_alias(u'ns3::Vector3DValue', u'ns3::VectorValue')
typehandlers.add_type_alias(u'ns3::Vector3DValue*', u'ns3::VectorValue*')
typehandlers.add_type_alias(u'ns3::Vector3DValue&', u'ns3::VectorValue&')
module.add_typedef(root_module['ns3::Vector3DValue'], 'VectorValue')
typehandlers.add_type_alias(u'ns3::Vector3DChecker', u'ns3::VectorChecker')
typehandlers.add_type_alias(u'ns3::Vector3DChecker*', u'ns3::VectorChecker*')
typehandlers.add_type_alias(u'ns3::Vector3DChecker&', u'ns3::VectorChecker&')
module.add_typedef(root_module['ns3::Vector3DChecker'], 'VectorChecker')
## Register a nested module for the namespace FatalImpl
nested_module = module.add_cpp_namespace('FatalImpl')
register_types_ns3_FatalImpl(nested_module)
## Register a nested module for the namespace Hash
nested_module = module.add_cpp_namespace('Hash')
register_types_ns3_Hash(nested_module)
def register_types_ns3_FatalImpl(module):
root_module = module.get_root()
def register_types_ns3_Hash(module):
root_module = module.get_root()
## hash-function.h (module 'core'): ns3::Hash::Implementation [class]
module.add_class('Implementation', import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::Hash::Implementation, ns3::empty, ns3::DefaultDeleter<ns3::Hash::Implementation> >'])
typehandlers.add_type_alias(u'uint32_t ( * ) ( char const *, size_t ) *', u'ns3::Hash::Hash32Function_ptr')
typehandlers.add_type_alias(u'uint32_t ( * ) ( char const *, size_t ) **', u'ns3::Hash::Hash32Function_ptr*')
typehandlers.add_type_alias(u'uint32_t ( * ) ( char const *, size_t ) *&', u'ns3::Hash::Hash32Function_ptr&')
typehandlers.add_type_alias(u'uint64_t ( * ) ( char const *, size_t ) *', u'ns3::Hash::Hash64Function_ptr')
typehandlers.add_type_alias(u'uint64_t ( * ) ( char const *, size_t ) **', u'ns3::Hash::Hash64Function_ptr*')
typehandlers.add_type_alias(u'uint64_t ( * ) ( char const *, size_t ) *&', u'ns3::Hash::Hash64Function_ptr&')
## Register a nested module for the namespace Function
nested_module = module.add_cpp_namespace('Function')
register_types_ns3_Hash_Function(nested_module)
def register_types_ns3_Hash_Function(module):
root_module = module.get_root()
## hash-fnv.h (module 'core'): ns3::Hash::Function::Fnv1a [class]
module.add_class('Fnv1a', import_from_module='ns.core', parent=root_module['ns3::Hash::Implementation'])
## hash-function.h (module 'core'): ns3::Hash::Function::Hash32 [class]
module.add_class('Hash32', import_from_module='ns.core', parent=root_module['ns3::Hash::Implementation'])
## hash-function.h (module 'core'): ns3::Hash::Function::Hash64 [class]
module.add_class('Hash64', import_from_module='ns.core', parent=root_module['ns3::Hash::Implementation'])
## hash-murmur3.h (module 'core'): ns3::Hash::Function::Murmur3 [class]
module.add_class('Murmur3', import_from_module='ns.core', parent=root_module['ns3::Hash::Implementation'])
def register_methods(root_module):
register_Ns3Angles_methods(root_module, root_module['ns3::Angles'])
register_Ns3AttributeConstructionList_methods(root_module, root_module['ns3::AttributeConstructionList'])
register_Ns3AttributeConstructionListItem_methods(root_module, root_module['ns3::AttributeConstructionList::Item'])
register_Ns3CallbackBase_methods(root_module, root_module['ns3::CallbackBase'])
register_Ns3Hasher_methods(root_module, root_module['ns3::Hasher'])
register_Ns3ObjectBase_methods(root_module, root_module['ns3::ObjectBase'])
register_Ns3ObjectDeleter_methods(root_module, root_module['ns3::ObjectDeleter'])
register_Ns3SimpleRefCount__Ns3Object_Ns3ObjectBase_Ns3ObjectDeleter_methods(root_module, root_module['ns3::SimpleRefCount< ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter >'])
register_Ns3TypeId_methods(root_module, root_module['ns3::TypeId'])
register_Ns3TypeIdAttributeInformation_methods(root_module, root_module['ns3::TypeId::AttributeInformation'])
register_Ns3TypeIdTraceSourceInformation_methods(root_module, root_module['ns3::TypeId::TraceSourceInformation'])
register_Ns3Vector2D_methods(root_module, root_module['ns3::Vector2D'])
register_Ns3Vector3D_methods(root_module, root_module['ns3::Vector3D'])
register_Ns3Empty_methods(root_module, root_module['ns3::empty'])
register_Ns3Object_methods(root_module, root_module['ns3::Object'])
register_Ns3ObjectAggregateIterator_methods(root_module, root_module['ns3::Object::AggregateIterator'])
register_Ns3SimpleRefCount__Ns3AttributeAccessor_Ns3Empty_Ns3DefaultDeleter__lt__ns3AttributeAccessor__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter<ns3::AttributeAccessor> >'])
register_Ns3SimpleRefCount__Ns3AttributeChecker_Ns3Empty_Ns3DefaultDeleter__lt__ns3AttributeChecker__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter<ns3::AttributeChecker> >'])
register_Ns3SimpleRefCount__Ns3AttributeValue_Ns3Empty_Ns3DefaultDeleter__lt__ns3AttributeValue__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter<ns3::AttributeValue> >'])
register_Ns3SimpleRefCount__Ns3CallbackImplBase_Ns3Empty_Ns3DefaultDeleter__lt__ns3CallbackImplBase__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter<ns3::CallbackImplBase> >'])
register_Ns3SimpleRefCount__Ns3HashImplementation_Ns3Empty_Ns3DefaultDeleter__lt__ns3HashImplementation__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::Hash::Implementation, ns3::empty, ns3::DefaultDeleter<ns3::Hash::Implementation> >'])
register_Ns3SimpleRefCount__Ns3TraceSourceAccessor_Ns3Empty_Ns3DefaultDeleter__lt__ns3TraceSourceAccessor__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter<ns3::TraceSourceAccessor> >'])
register_Ns3TraceSourceAccessor_methods(root_module, root_module['ns3::TraceSourceAccessor'])
register_Ns3AntennaModel_methods(root_module, root_module['ns3::AntennaModel'])
register_Ns3AttributeAccessor_methods(root_module, root_module['ns3::AttributeAccessor'])
register_Ns3AttributeChecker_methods(root_module, root_module['ns3::AttributeChecker'])
register_Ns3AttributeValue_methods(root_module, root_module['ns3::AttributeValue'])
register_Ns3CallbackChecker_methods(root_module, root_module['ns3::CallbackChecker'])
register_Ns3CallbackImplBase_methods(root_module, root_module['ns3::CallbackImplBase'])
register_Ns3CallbackValue_methods(root_module, root_module['ns3::CallbackValue'])
register_Ns3CosineAntennaModel_methods(root_module, root_module['ns3::CosineAntennaModel'])
register_Ns3EmptyAttributeAccessor_methods(root_module, root_module['ns3::EmptyAttributeAccessor'])
register_Ns3EmptyAttributeChecker_methods(root_module, root_module['ns3::EmptyAttributeChecker'])
register_Ns3EmptyAttributeValue_methods(root_module, root_module['ns3::EmptyAttributeValue'])
register_Ns3IsotropicAntennaModel_methods(root_module, root_module['ns3::IsotropicAntennaModel'])
register_Ns3ParabolicAntennaModel_methods(root_module, root_module['ns3::ParabolicAntennaModel'])
register_Ns3TypeIdChecker_methods(root_module, root_module['ns3::TypeIdChecker'])
register_Ns3TypeIdValue_methods(root_module, root_module['ns3::TypeIdValue'])
register_Ns3Vector2DChecker_methods(root_module, root_module['ns3::Vector2DChecker'])
register_Ns3Vector2DValue_methods(root_module, root_module['ns3::Vector2DValue'])
register_Ns3Vector3DChecker_methods(root_module, root_module['ns3::Vector3DChecker'])
register_Ns3Vector3DValue_methods(root_module, root_module['ns3::Vector3DValue'])
register_Ns3HashImplementation_methods(root_module, root_module['ns3::Hash::Implementation'])
register_Ns3HashFunctionFnv1a_methods(root_module, root_module['ns3::Hash::Function::Fnv1a'])
register_Ns3HashFunctionHash32_methods(root_module, root_module['ns3::Hash::Function::Hash32'])
register_Ns3HashFunctionHash64_methods(root_module, root_module['ns3::Hash::Function::Hash64'])
register_Ns3HashFunctionMurmur3_methods(root_module, root_module['ns3::Hash::Function::Murmur3'])
return
def register_Ns3Angles_methods(root_module, cls):
cls.add_output_stream_operator()
## angles.h (module 'antenna'): ns3::Angles::Angles(ns3::Angles const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Angles const &', 'arg0')])
## angles.h (module 'antenna'): ns3::Angles::Angles() [constructor]
cls.add_constructor([])
## angles.h (module 'antenna'): ns3::Angles::Angles(double phi, double theta) [constructor]
cls.add_constructor([param('double', 'phi'), param('double', 'theta')])
## angles.h (module 'antenna'): ns3::Angles::Angles(ns3::Vector v) [constructor]
cls.add_constructor([param('ns3::Vector', 'v')])
## angles.h (module 'antenna'): ns3::Angles::Angles(ns3::Vector v, ns3::Vector o) [constructor]
cls.add_constructor([param('ns3::Vector', 'v'), param('ns3::Vector', 'o')])
## angles.h (module 'antenna'): ns3::Angles::phi [variable]
cls.add_instance_attribute('phi', 'double', is_const=False)
## angles.h (module 'antenna'): ns3::Angles::theta [variable]
cls.add_instance_attribute('theta', 'double', is_const=False)
return
def register_Ns3AttributeConstructionList_methods(root_module, cls):
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::AttributeConstructionList(ns3::AttributeConstructionList const & arg0) [copy constructor]
cls.add_constructor([param('ns3::AttributeConstructionList const &', 'arg0')])
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::AttributeConstructionList() [constructor]
cls.add_constructor([])
## attribute-construction-list.h (module 'core'): void ns3::AttributeConstructionList::Add(std::string name, ns3::Ptr<ns3::AttributeChecker const> checker, ns3::Ptr<ns3::AttributeValue> value) [member function]
cls.add_method('Add',
'void',
[param('std::string', 'name'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker'), param('ns3::Ptr< ns3::AttributeValue >', 'value')])
## attribute-construction-list.h (module 'core'): std::_List_const_iterator<ns3::AttributeConstructionList::Item> ns3::AttributeConstructionList::Begin() const [member function]
cls.add_method('Begin',
'std::_List_const_iterator< ns3::AttributeConstructionList::Item >',
[],
is_const=True)
## attribute-construction-list.h (module 'core'): std::_List_const_iterator<ns3::AttributeConstructionList::Item> ns3::AttributeConstructionList::End() const [member function]
cls.add_method('End',
'std::_List_const_iterator< ns3::AttributeConstructionList::Item >',
[],
is_const=True)
## attribute-construction-list.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::AttributeConstructionList::Find(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('Find',
'ns3::Ptr< ns3::AttributeValue >',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True)
return
def register_Ns3AttributeConstructionListItem_methods(root_module, cls):
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::Item::Item() [constructor]
cls.add_constructor([])
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::Item::Item(ns3::AttributeConstructionList::Item const & arg0) [copy constructor]
cls.add_constructor([param('ns3::AttributeConstructionList::Item const &', 'arg0')])
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::Item::checker [variable]
cls.add_instance_attribute('checker', 'ns3::Ptr< ns3::AttributeChecker const >', is_const=False)
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::Item::name [variable]
cls.add_instance_attribute('name', 'std::string', is_const=False)
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::Item::value [variable]
cls.add_instance_attribute('value', 'ns3::Ptr< ns3::AttributeValue >', is_const=False)
return
def register_Ns3CallbackBase_methods(root_module, cls):
## callback.h (module 'core'): ns3::CallbackBase::CallbackBase(ns3::CallbackBase const & arg0) [copy constructor]
cls.add_constructor([param('ns3::CallbackBase const &', 'arg0')])
## callback.h (module 'core'): ns3::CallbackBase::CallbackBase() [constructor]
cls.add_constructor([])
## callback.h (module 'core'): ns3::Ptr<ns3::CallbackImplBase> ns3::CallbackBase::GetImpl() const [member function]
cls.add_method('GetImpl',
'ns3::Ptr< ns3::CallbackImplBase >',
[],
is_const=True)
## callback.h (module 'core'): ns3::CallbackBase::CallbackBase(ns3::Ptr<ns3::CallbackImplBase> impl) [constructor]
cls.add_constructor([param('ns3::Ptr< ns3::CallbackImplBase >', 'impl')],
visibility='protected')
return
def register_Ns3Hasher_methods(root_module, cls):
## hash.h (module 'core'): ns3::Hasher::Hasher(ns3::Hasher const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Hasher const &', 'arg0')])
## hash.h (module 'core'): ns3::Hasher::Hasher() [constructor]
cls.add_constructor([])
## hash.h (module 'core'): ns3::Hasher::Hasher(ns3::Ptr<ns3::Hash::Implementation> hp) [constructor]
cls.add_constructor([param('ns3::Ptr< ns3::Hash::Implementation >', 'hp')])
## hash.h (module 'core'): uint32_t ns3::Hasher::GetHash32(char const * buffer, size_t const size) [member function]
cls.add_method('GetHash32',
'uint32_t',
[param('char const *', 'buffer'), param('size_t const', 'size')])
## hash.h (module 'core'): uint32_t ns3::Hasher::GetHash32(std::string const s) [member function]
cls.add_method('GetHash32',
'uint32_t',
[param('std::string const', 's')])
## hash.h (module 'core'): uint64_t ns3::Hasher::GetHash64(char const * buffer, size_t const size) [member function]
cls.add_method('GetHash64',
'uint64_t',
[param('char const *', 'buffer'), param('size_t const', 'size')])
## hash.h (module 'core'): uint64_t ns3::Hasher::GetHash64(std::string const s) [member function]
cls.add_method('GetHash64',
'uint64_t',
[param('std::string const', 's')])
## hash.h (module 'core'): ns3::Hasher & ns3::Hasher::clear() [member function]
cls.add_method('clear',
'ns3::Hasher &',
[])
return
def register_Ns3ObjectBase_methods(root_module, cls):
## object-base.h (module 'core'): ns3::ObjectBase::ObjectBase() [constructor]
cls.add_constructor([])
## object-base.h (module 'core'): ns3::ObjectBase::ObjectBase(ns3::ObjectBase const & arg0) [copy constructor]
cls.add_constructor([param('ns3::ObjectBase const &', 'arg0')])
## object-base.h (module 'core'): void ns3::ObjectBase::GetAttribute(std::string name, ns3::AttributeValue & value) const [member function]
cls.add_method('GetAttribute',
'void',
[param('std::string', 'name'), param('ns3::AttributeValue &', 'value')],
is_const=True)
## object-base.h (module 'core'): bool ns3::ObjectBase::GetAttributeFailSafe(std::string name, ns3::AttributeValue & value) const [member function]
cls.add_method('GetAttributeFailSafe',
'bool',
[param('std::string', 'name'), param('ns3::AttributeValue &', 'value')],
is_const=True)
## object-base.h (module 'core'): ns3::TypeId ns3::ObjectBase::GetInstanceTypeId() const [member function]
cls.add_method('GetInstanceTypeId',
'ns3::TypeId',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## object-base.h (module 'core'): static ns3::TypeId ns3::ObjectBase::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## object-base.h (module 'core'): void ns3::ObjectBase::SetAttribute(std::string name, ns3::AttributeValue const & value) [member function]
cls.add_method('SetAttribute',
'void',
[param('std::string', 'name'), param('ns3::AttributeValue const &', 'value')])
## object-base.h (module 'core'): bool ns3::ObjectBase::SetAttributeFailSafe(std::string name, ns3::AttributeValue const & value) [member function]
cls.add_method('SetAttributeFailSafe',
'bool',
[param('std::string', 'name'), param('ns3::AttributeValue const &', 'value')])
## object-base.h (module 'core'): bool ns3::ObjectBase::TraceConnect(std::string name, std::string context, ns3::CallbackBase const & cb) [member function]
cls.add_method('TraceConnect',
'bool',
[param('std::string', 'name'), param('std::string', 'context'), param('ns3::CallbackBase const &', 'cb')])
## object-base.h (module 'core'): bool ns3::ObjectBase::TraceConnectWithoutContext(std::string name, ns3::CallbackBase const & cb) [member function]
cls.add_method('TraceConnectWithoutContext',
'bool',
[param('std::string', 'name'), param('ns3::CallbackBase const &', 'cb')])
## object-base.h (module 'core'): bool ns3::ObjectBase::TraceDisconnect(std::string name, std::string context, ns3::CallbackBase const & cb) [member function]
cls.add_method('TraceDisconnect',
'bool',
[param('std::string', 'name'), param('std::string', 'context'), param('ns3::CallbackBase const &', 'cb')])
## object-base.h (module 'core'): bool ns3::ObjectBase::TraceDisconnectWithoutContext(std::string name, ns3::CallbackBase const & cb) [member function]
cls.add_method('TraceDisconnectWithoutContext',
'bool',
[param('std::string', 'name'), param('ns3::CallbackBase const &', 'cb')])
## object-base.h (module 'core'): void ns3::ObjectBase::ConstructSelf(ns3::AttributeConstructionList const & attributes) [member function]
cls.add_method('ConstructSelf',
'void',
[param('ns3::AttributeConstructionList const &', 'attributes')],
visibility='protected')
## object-base.h (module 'core'): void ns3::ObjectBase::NotifyConstructionCompleted() [member function]
cls.add_method('NotifyConstructionCompleted',
'void',
[],
visibility='protected', is_virtual=True)
return
def register_Ns3ObjectDeleter_methods(root_module, cls):
## object.h (module 'core'): ns3::ObjectDeleter::ObjectDeleter() [constructor]
cls.add_constructor([])
## object.h (module 'core'): ns3::ObjectDeleter::ObjectDeleter(ns3::ObjectDeleter const & arg0) [copy constructor]
cls.add_constructor([param('ns3::ObjectDeleter const &', 'arg0')])
## object.h (module 'core'): static void ns3::ObjectDeleter::Delete(ns3::Object * object) [member function]
cls.add_method('Delete',
'void',
[param('ns3::Object *', 'object')],
is_static=True)
return
def register_Ns3SimpleRefCount__Ns3Object_Ns3ObjectBase_Ns3ObjectDeleter_methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter>::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter>::SimpleRefCount(ns3::SimpleRefCount<ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter> const & o) [copy constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter > const &', 'o')])
## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter>::Cleanup() [member function]
cls.add_method('Cleanup',
'void',
[],
is_static=True)
return
def register_Ns3TypeId_methods(root_module, cls):
cls.add_binary_comparison_operator('<')
cls.add_binary_comparison_operator('!=')
cls.add_output_stream_operator()
cls.add_binary_comparison_operator('==')
## type-id.h (module 'core'): ns3::TypeId::TypeId(char const * name) [constructor]
cls.add_constructor([param('char const *', 'name')])
## type-id.h (module 'core'): ns3::TypeId::TypeId() [constructor]
cls.add_constructor([])
## type-id.h (module 'core'): ns3::TypeId::TypeId(ns3::TypeId const & o) [copy constructor]
cls.add_constructor([param('ns3::TypeId const &', 'o')])
## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::AddAttribute(std::string name, std::string help, ns3::AttributeValue const & initialValue, ns3::Ptr<ns3::AttributeAccessor const> accessor, ns3::Ptr<ns3::AttributeChecker const> checker, ns3::TypeId::SupportLevel supportLevel=::ns3::TypeId::SUPPORTED, std::string const & supportMsg="") [member function]
cls.add_method('AddAttribute',
'ns3::TypeId',
[param('std::string', 'name'), param('std::string', 'help'), param('ns3::AttributeValue const &', 'initialValue'), param('ns3::Ptr< ns3::AttributeAccessor const >', 'accessor'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker'), param('ns3::TypeId::SupportLevel', 'supportLevel', default_value='::ns3::TypeId::SUPPORTED'), param('std::string const &', 'supportMsg', default_value='""')])
## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::AddAttribute(std::string name, std::string help, uint32_t flags, ns3::AttributeValue const & initialValue, ns3::Ptr<ns3::AttributeAccessor const> accessor, ns3::Ptr<ns3::AttributeChecker const> checker, ns3::TypeId::SupportLevel supportLevel=::ns3::TypeId::SUPPORTED, std::string const & supportMsg="") [member function]
cls.add_method('AddAttribute',
'ns3::TypeId',
[param('std::string', 'name'), param('std::string', 'help'), param('uint32_t', 'flags'), param('ns3::AttributeValue const &', 'initialValue'), param('ns3::Ptr< ns3::AttributeAccessor const >', 'accessor'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker'), param('ns3::TypeId::SupportLevel', 'supportLevel', default_value='::ns3::TypeId::SUPPORTED'), param('std::string const &', 'supportMsg', default_value='""')])
## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::AddTraceSource(std::string name, std::string help, ns3::Ptr<ns3::TraceSourceAccessor const> accessor) [member function]
cls.add_method('AddTraceSource',
'ns3::TypeId',
[param('std::string', 'name'), param('std::string', 'help'), param('ns3::Ptr< ns3::TraceSourceAccessor const >', 'accessor')],
deprecated=True)
## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::AddTraceSource(std::string name, std::string help, ns3::Ptr<ns3::TraceSourceAccessor const> accessor, std::string callback, ns3::TypeId::SupportLevel supportLevel=::ns3::TypeId::SUPPORTED, std::string const & supportMsg="") [member function]
cls.add_method('AddTraceSource',
'ns3::TypeId',
[param('std::string', 'name'), param('std::string', 'help'), param('ns3::Ptr< ns3::TraceSourceAccessor const >', 'accessor'), param('std::string', 'callback'), param('ns3::TypeId::SupportLevel', 'supportLevel', default_value='::ns3::TypeId::SUPPORTED'), param('std::string const &', 'supportMsg', default_value='""')])
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation ns3::TypeId::GetAttribute(uint32_t i) const [member function]
cls.add_method('GetAttribute',
'ns3::TypeId::AttributeInformation',
[param('uint32_t', 'i')],
is_const=True)
## type-id.h (module 'core'): std::string ns3::TypeId::GetAttributeFullName(uint32_t i) const [member function]
cls.add_method('GetAttributeFullName',
'std::string',
[param('uint32_t', 'i')],
is_const=True)
## type-id.h (module 'core'): uint32_t ns3::TypeId::GetAttributeN() const [member function]
cls.add_method('GetAttributeN',
'uint32_t',
[],
is_const=True)
## type-id.h (module 'core'): ns3::Callback<ns3::ObjectBase*,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty> ns3::TypeId::GetConstructor() const [member function]
cls.add_method('GetConstructor',
'ns3::Callback< ns3::ObjectBase *, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >',
[],
is_const=True)
## type-id.h (module 'core'): std::string ns3::TypeId::GetGroupName() const [member function]
cls.add_method('GetGroupName',
'std::string',
[],
is_const=True)
## type-id.h (module 'core'): uint32_t ns3::TypeId::GetHash() const [member function]
cls.add_method('GetHash',
'uint32_t',
[],
is_const=True)
## type-id.h (module 'core'): std::string ns3::TypeId::GetName() const [member function]
cls.add_method('GetName',
'std::string',
[],
is_const=True)
## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::GetParent() const [member function]
cls.add_method('GetParent',
'ns3::TypeId',
[],
is_const=True)
## type-id.h (module 'core'): static ns3::TypeId ns3::TypeId::GetRegistered(uint32_t i) [member function]
cls.add_method('GetRegistered',
'ns3::TypeId',
[param('uint32_t', 'i')],
is_static=True)
## type-id.h (module 'core'): static uint32_t ns3::TypeId::GetRegisteredN() [member function]
cls.add_method('GetRegisteredN',
'uint32_t',
[],
is_static=True)
## type-id.h (module 'core'): std::size_t ns3::TypeId::GetSize() const [member function]
cls.add_method('GetSize',
'std::size_t',
[],
is_const=True)
## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation ns3::TypeId::GetTraceSource(uint32_t i) const [member function]
cls.add_method('GetTraceSource',
'ns3::TypeId::TraceSourceInformation',
[param('uint32_t', 'i')],
is_const=True)
## type-id.h (module 'core'): uint32_t ns3::TypeId::GetTraceSourceN() const [member function]
cls.add_method('GetTraceSourceN',
'uint32_t',
[],
is_const=True)
## type-id.h (module 'core'): uint16_t ns3::TypeId::GetUid() const [member function]
cls.add_method('GetUid',
'uint16_t',
[],
is_const=True)
## type-id.h (module 'core'): bool ns3::TypeId::HasConstructor() const [member function]
cls.add_method('HasConstructor',
'bool',
[],
is_const=True)
## type-id.h (module 'core'): bool ns3::TypeId::HasParent() const [member function]
cls.add_method('HasParent',
'bool',
[],
is_const=True)
## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::HideFromDocumentation() [member function]
cls.add_method('HideFromDocumentation',
'ns3::TypeId',
[])
## type-id.h (module 'core'): bool ns3::TypeId::IsChildOf(ns3::TypeId other) const [member function]
cls.add_method('IsChildOf',
'bool',
[param('ns3::TypeId', 'other')],
is_const=True)
## type-id.h (module 'core'): bool ns3::TypeId::LookupAttributeByName(std::string name, ns3::TypeId::AttributeInformation * info) const [member function]
cls.add_method('LookupAttributeByName',
'bool',
[param('std::string', 'name'), param('ns3::TypeId::AttributeInformation *', 'info', transfer_ownership=False)],
is_const=True)
## type-id.h (module 'core'): static ns3::TypeId ns3::TypeId::LookupByHash(uint32_t hash) [member function]
cls.add_method('LookupByHash',
'ns3::TypeId',
[param('uint32_t', 'hash')],
is_static=True)
## type-id.h (module 'core'): static bool ns3::TypeId::LookupByHashFailSafe(uint32_t hash, ns3::TypeId * tid) [member function]
cls.add_method('LookupByHashFailSafe',
'bool',
[param('uint32_t', 'hash'), param('ns3::TypeId *', 'tid')],
is_static=True)
## type-id.h (module 'core'): static ns3::TypeId ns3::TypeId::LookupByName(std::string name) [member function]
cls.add_method('LookupByName',
'ns3::TypeId',
[param('std::string', 'name')],
is_static=True)
## type-id.h (module 'core'): ns3::Ptr<ns3::TraceSourceAccessor const> ns3::TypeId::LookupTraceSourceByName(std::string name) const [member function]
cls.add_method('LookupTraceSourceByName',
'ns3::Ptr< ns3::TraceSourceAccessor const >',
[param('std::string', 'name')],
is_const=True)
## type-id.h (module 'core'): ns3::Ptr<ns3::TraceSourceAccessor const> ns3::TypeId::LookupTraceSourceByName(std::string name, ns3::TypeId::TraceSourceInformation * info) const [member function]
cls.add_method('LookupTraceSourceByName',
'ns3::Ptr< ns3::TraceSourceAccessor const >',
[param('std::string', 'name'), param('ns3::TypeId::TraceSourceInformation *', 'info')],
is_const=True)
## type-id.h (module 'core'): bool ns3::TypeId::MustHideFromDocumentation() const [member function]
cls.add_method('MustHideFromDocumentation',
'bool',
[],
is_const=True)
## type-id.h (module 'core'): bool ns3::TypeId::SetAttributeInitialValue(uint32_t i, ns3::Ptr<ns3::AttributeValue const> initialValue) [member function]
cls.add_method('SetAttributeInitialValue',
'bool',
[param('uint32_t', 'i'), param('ns3::Ptr< ns3::AttributeValue const >', 'initialValue')])
## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::SetGroupName(std::string groupName) [member function]
cls.add_method('SetGroupName',
'ns3::TypeId',
[param('std::string', 'groupName')])
## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::SetParent(ns3::TypeId tid) [member function]
cls.add_method('SetParent',
'ns3::TypeId',
[param('ns3::TypeId', 'tid')])
## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::SetSize(std::size_t size) [member function]
cls.add_method('SetSize',
'ns3::TypeId',
[param('std::size_t', 'size')])
## type-id.h (module 'core'): void ns3::TypeId::SetUid(uint16_t uid) [member function]
cls.add_method('SetUid',
'void',
[param('uint16_t', 'uid')])
return
def register_Ns3TypeIdAttributeInformation_methods(root_module, cls):
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::AttributeInformation() [constructor]
cls.add_constructor([])
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::AttributeInformation(ns3::TypeId::AttributeInformation const & arg0) [copy constructor]
cls.add_constructor([param('ns3::TypeId::AttributeInformation const &', 'arg0')])
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::accessor [variable]
cls.add_instance_attribute('accessor', 'ns3::Ptr< ns3::AttributeAccessor const >', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::checker [variable]
cls.add_instance_attribute('checker', 'ns3::Ptr< ns3::AttributeChecker const >', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::flags [variable]
cls.add_instance_attribute('flags', 'uint32_t', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::help [variable]
cls.add_instance_attribute('help', 'std::string', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::initialValue [variable]
cls.add_instance_attribute('initialValue', 'ns3::Ptr< ns3::AttributeValue const >', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::name [variable]
cls.add_instance_attribute('name', 'std::string', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::originalInitialValue [variable]
cls.add_instance_attribute('originalInitialValue', 'ns3::Ptr< ns3::AttributeValue const >', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::supportLevel [variable]
cls.add_instance_attribute('supportLevel', 'ns3::TypeId::SupportLevel', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::supportMsg [variable]
cls.add_instance_attribute('supportMsg', 'std::string', is_const=False)
return
def register_Ns3TypeIdTraceSourceInformation_methods(root_module, cls):
## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation::TraceSourceInformation() [constructor]
cls.add_constructor([])
## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation::TraceSourceInformation(ns3::TypeId::TraceSourceInformation const & arg0) [copy constructor]
cls.add_constructor([param('ns3::TypeId::TraceSourceInformation const &', 'arg0')])
## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation::accessor [variable]
cls.add_instance_attribute('accessor', 'ns3::Ptr< ns3::TraceSourceAccessor const >', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation::callback [variable]
cls.add_instance_attribute('callback', 'std::string', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation::help [variable]
cls.add_instance_attribute('help', 'std::string', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation::name [variable]
cls.add_instance_attribute('name', 'std::string', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation::supportLevel [variable]
cls.add_instance_attribute('supportLevel', 'ns3::TypeId::SupportLevel', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation::supportMsg [variable]
cls.add_instance_attribute('supportMsg', 'std::string', is_const=False)
return
def register_Ns3Vector2D_methods(root_module, cls):
cls.add_output_stream_operator()
## vector.h (module 'core'): ns3::Vector2D::Vector2D(ns3::Vector2D const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Vector2D const &', 'arg0')])
## vector.h (module 'core'): ns3::Vector2D::Vector2D(double _x, double _y) [constructor]
cls.add_constructor([param('double', '_x'), param('double', '_y')])
## vector.h (module 'core'): ns3::Vector2D::Vector2D() [constructor]
cls.add_constructor([])
## vector.h (module 'core'): ns3::Vector2D::x [variable]
cls.add_instance_attribute('x', 'double', is_const=False)
## vector.h (module 'core'): ns3::Vector2D::y [variable]
cls.add_instance_attribute('y', 'double', is_const=False)
return
def register_Ns3Vector3D_methods(root_module, cls):
cls.add_output_stream_operator()
## vector.h (module 'core'): ns3::Vector3D::Vector3D(ns3::Vector3D const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Vector3D const &', 'arg0')])
## vector.h (module 'core'): ns3::Vector3D::Vector3D(double _x, double _y, double _z) [constructor]
cls.add_constructor([param('double', '_x'), param('double', '_y'), param('double', '_z')])
## vector.h (module 'core'): ns3::Vector3D::Vector3D() [constructor]
cls.add_constructor([])
## vector.h (module 'core'): ns3::Vector3D::x [variable]
cls.add_instance_attribute('x', 'double', is_const=False)
## vector.h (module 'core'): ns3::Vector3D::y [variable]
cls.add_instance_attribute('y', 'double', is_const=False)
## vector.h (module 'core'): ns3::Vector3D::z [variable]
cls.add_instance_attribute('z', 'double', is_const=False)
return
def register_Ns3Empty_methods(root_module, cls):
## empty.h (module 'core'): ns3::empty::empty() [constructor]
cls.add_constructor([])
## empty.h (module 'core'): ns3::empty::empty(ns3::empty const & arg0) [copy constructor]
cls.add_constructor([param('ns3::empty const &', 'arg0')])
return
def register_Ns3Object_methods(root_module, cls):
## object.h (module 'core'): ns3::Object::Object() [constructor]
cls.add_constructor([])
## object.h (module 'core'): void ns3::Object::AggregateObject(ns3::Ptr<ns3::Object> other) [member function]
cls.add_method('AggregateObject',
'void',
[param('ns3::Ptr< ns3::Object >', 'other')])
## object.h (module 'core'): void ns3::Object::Dispose() [member function]
cls.add_method('Dispose',
'void',
[])
## object.h (module 'core'): ns3::Object::AggregateIterator ns3::Object::GetAggregateIterator() const [member function]
cls.add_method('GetAggregateIterator',
'ns3::Object::AggregateIterator',
[],
is_const=True)
## object.h (module 'core'): ns3::TypeId ns3::Object::GetInstanceTypeId() const [member function]
cls.add_method('GetInstanceTypeId',
'ns3::TypeId',
[],
is_const=True, is_virtual=True)
## object.h (module 'core'): static ns3::TypeId ns3::Object::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## object.h (module 'core'): void ns3::Object::Initialize() [member function]
cls.add_method('Initialize',
'void',
[])
## object.h (module 'core'): bool ns3::Object::IsInitialized() const [member function]
cls.add_method('IsInitialized',
'bool',
[],
is_const=True)
## object.h (module 'core'): ns3::Object::Object(ns3::Object const & o) [copy constructor]
cls.add_constructor([param('ns3::Object const &', 'o')],
visibility='protected')
## object.h (module 'core'): void ns3::Object::DoDispose() [member function]
cls.add_method('DoDispose',
'void',
[],
visibility='protected', is_virtual=True)
## object.h (module 'core'): void ns3::Object::DoInitialize() [member function]
cls.add_method('DoInitialize',
'void',
[],
visibility='protected', is_virtual=True)
## object.h (module 'core'): void ns3::Object::NotifyNewAggregate() [member function]
cls.add_method('NotifyNewAggregate',
'void',
[],
visibility='protected', is_virtual=True)
return
def register_Ns3ObjectAggregateIterator_methods(root_module, cls):
## object.h (module 'core'): ns3::Object::AggregateIterator::AggregateIterator(ns3::Object::AggregateIterator const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Object::AggregateIterator const &', 'arg0')])
## object.h (module 'core'): ns3::Object::AggregateIterator::AggregateIterator() [constructor]
cls.add_constructor([])
## object.h (module 'core'): bool ns3::Object::AggregateIterator::HasNext() const [member function]
cls.add_method('HasNext',
'bool',
[],
is_const=True)
## object.h (module 'core'): ns3::Ptr<ns3::Object const> ns3::Object::AggregateIterator::Next() [member function]
cls.add_method('Next',
'ns3::Ptr< ns3::Object const >',
[])
return
def register_Ns3SimpleRefCount__Ns3AttributeAccessor_Ns3Empty_Ns3DefaultDeleter__lt__ns3AttributeAccessor__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter<ns3::AttributeAccessor> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter<ns3::AttributeAccessor> >::SimpleRefCount(ns3::SimpleRefCount<ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter<ns3::AttributeAccessor> > const & o) [copy constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter< ns3::AttributeAccessor > > const &', 'o')])
## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter<ns3::AttributeAccessor> >::Cleanup() [member function]
cls.add_method('Cleanup',
'void',
[],
is_static=True)
return
def register_Ns3SimpleRefCount__Ns3AttributeChecker_Ns3Empty_Ns3DefaultDeleter__lt__ns3AttributeChecker__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter<ns3::AttributeChecker> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter<ns3::AttributeChecker> >::SimpleRefCount(ns3::SimpleRefCount<ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter<ns3::AttributeChecker> > const & o) [copy constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter< ns3::AttributeChecker > > const &', 'o')])
## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter<ns3::AttributeChecker> >::Cleanup() [member function]
cls.add_method('Cleanup',
'void',
[],
is_static=True)
return
def register_Ns3SimpleRefCount__Ns3AttributeValue_Ns3Empty_Ns3DefaultDeleter__lt__ns3AttributeValue__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter<ns3::AttributeValue> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter<ns3::AttributeValue> >::SimpleRefCount(ns3::SimpleRefCount<ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter<ns3::AttributeValue> > const & o) [copy constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter< ns3::AttributeValue > > const &', 'o')])
## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter<ns3::AttributeValue> >::Cleanup() [member function]
cls.add_method('Cleanup',
'void',
[],
is_static=True)
return
def register_Ns3SimpleRefCount__Ns3CallbackImplBase_Ns3Empty_Ns3DefaultDeleter__lt__ns3CallbackImplBase__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter<ns3::CallbackImplBase> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter<ns3::CallbackImplBase> >::SimpleRefCount(ns3::SimpleRefCount<ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter<ns3::CallbackImplBase> > const & o) [copy constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter< ns3::CallbackImplBase > > const &', 'o')])
## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter<ns3::CallbackImplBase> >::Cleanup() [member function]
cls.add_method('Cleanup',
'void',
[],
is_static=True)
return
def register_Ns3SimpleRefCount__Ns3HashImplementation_Ns3Empty_Ns3DefaultDeleter__lt__ns3HashImplementation__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Hash::Implementation, ns3::empty, ns3::DefaultDeleter<ns3::Hash::Implementation> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Hash::Implementation, ns3::empty, ns3::DefaultDeleter<ns3::Hash::Implementation> >::SimpleRefCount(ns3::SimpleRefCount<ns3::Hash::Implementation, ns3::empty, ns3::DefaultDeleter<ns3::Hash::Implementation> > const & o) [copy constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::Hash::Implementation, ns3::empty, ns3::DefaultDeleter< ns3::Hash::Implementation > > const &', 'o')])
## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::Hash::Implementation, ns3::empty, ns3::DefaultDeleter<ns3::Hash::Implementation> >::Cleanup() [member function]
cls.add_method('Cleanup',
'void',
[],
is_static=True)
return
def register_Ns3SimpleRefCount__Ns3TraceSourceAccessor_Ns3Empty_Ns3DefaultDeleter__lt__ns3TraceSourceAccessor__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter<ns3::TraceSourceAccessor> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter<ns3::TraceSourceAccessor> >::SimpleRefCount(ns3::SimpleRefCount<ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter<ns3::TraceSourceAccessor> > const & o) [copy constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter< ns3::TraceSourceAccessor > > const &', 'o')])
## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter<ns3::TraceSourceAccessor> >::Cleanup() [member function]
cls.add_method('Cleanup',
'void',
[],
is_static=True)
return
def register_Ns3TraceSourceAccessor_methods(root_module, cls):
## trace-source-accessor.h (module 'core'): ns3::TraceSourceAccessor::TraceSourceAccessor(ns3::TraceSourceAccessor const & arg0) [copy constructor]
cls.add_constructor([param('ns3::TraceSourceAccessor const &', 'arg0')])
## trace-source-accessor.h (module 'core'): ns3::TraceSourceAccessor::TraceSourceAccessor() [constructor]
cls.add_constructor([])
## trace-source-accessor.h (module 'core'): bool ns3::TraceSourceAccessor::Connect(ns3::ObjectBase * obj, std::string context, ns3::CallbackBase const & cb) const [member function]
cls.add_method('Connect',
'bool',
[param('ns3::ObjectBase *', 'obj', transfer_ownership=False), param('std::string', 'context'), param('ns3::CallbackBase const &', 'cb')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## trace-source-accessor.h (module 'core'): bool ns3::TraceSourceAccessor::ConnectWithoutContext(ns3::ObjectBase * obj, ns3::CallbackBase const & cb) const [member function]
cls.add_method('ConnectWithoutContext',
'bool',
[param('ns3::ObjectBase *', 'obj', transfer_ownership=False), param('ns3::CallbackBase const &', 'cb')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## trace-source-accessor.h (module 'core'): bool ns3::TraceSourceAccessor::Disconnect(ns3::ObjectBase * obj, std::string context, ns3::CallbackBase const & cb) const [member function]
cls.add_method('Disconnect',
'bool',
[param('ns3::ObjectBase *', 'obj', transfer_ownership=False), param('std::string', 'context'), param('ns3::CallbackBase const &', 'cb')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## trace-source-accessor.h (module 'core'): bool ns3::TraceSourceAccessor::DisconnectWithoutContext(ns3::ObjectBase * obj, ns3::CallbackBase const & cb) const [member function]
cls.add_method('DisconnectWithoutContext',
'bool',
[param('ns3::ObjectBase *', 'obj', transfer_ownership=False), param('ns3::CallbackBase const &', 'cb')],
is_pure_virtual=True, is_const=True, is_virtual=True)
return
def register_Ns3AntennaModel_methods(root_module, cls):
## antenna-model.h (module 'antenna'): ns3::AntennaModel::AntennaModel(ns3::AntennaModel const & arg0) [copy constructor]
cls.add_constructor([param('ns3::AntennaModel const &', 'arg0')])
## antenna-model.h (module 'antenna'): ns3::AntennaModel::AntennaModel() [constructor]
cls.add_constructor([])
## antenna-model.h (module 'antenna'): double ns3::AntennaModel::GetGainDb(ns3::Angles a) [member function]
cls.add_method('GetGainDb',
'double',
[param('ns3::Angles', 'a')],
is_pure_virtual=True, is_virtual=True)
## antenna-model.h (module 'antenna'): static ns3::TypeId ns3::AntennaModel::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
return
def register_Ns3AttributeAccessor_methods(root_module, cls):
## attribute.h (module 'core'): ns3::AttributeAccessor::AttributeAccessor(ns3::AttributeAccessor const & arg0) [copy constructor]
cls.add_constructor([param('ns3::AttributeAccessor const &', 'arg0')])
## attribute.h (module 'core'): ns3::AttributeAccessor::AttributeAccessor() [constructor]
cls.add_constructor([])
## attribute.h (module 'core'): bool ns3::AttributeAccessor::Get(ns3::ObjectBase const * object, ns3::AttributeValue & attribute) const [member function]
cls.add_method('Get',
'bool',
[param('ns3::ObjectBase const *', 'object'), param('ns3::AttributeValue &', 'attribute')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## attribute.h (module 'core'): bool ns3::AttributeAccessor::HasGetter() const [member function]
cls.add_method('HasGetter',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## attribute.h (module 'core'): bool ns3::AttributeAccessor::HasSetter() const [member function]
cls.add_method('HasSetter',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## attribute.h (module 'core'): bool ns3::AttributeAccessor::Set(ns3::ObjectBase * object, ns3::AttributeValue const & value) const [member function]
cls.add_method('Set',
'bool',
[param('ns3::ObjectBase *', 'object', transfer_ownership=False), param('ns3::AttributeValue const &', 'value')],
is_pure_virtual=True, is_const=True, is_virtual=True)
return
def register_Ns3AttributeChecker_methods(root_module, cls):
## attribute.h (module 'core'): ns3::AttributeChecker::AttributeChecker(ns3::AttributeChecker const & arg0) [copy constructor]
cls.add_constructor([param('ns3::AttributeChecker const &', 'arg0')])
## attribute.h (module 'core'): ns3::AttributeChecker::AttributeChecker() [constructor]
cls.add_constructor([])
## attribute.h (module 'core'): bool ns3::AttributeChecker::Check(ns3::AttributeValue const & value) const [member function]
cls.add_method('Check',
'bool',
[param('ns3::AttributeValue const &', 'value')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## attribute.h (module 'core'): bool ns3::AttributeChecker::Copy(ns3::AttributeValue const & source, ns3::AttributeValue & destination) const [member function]
cls.add_method('Copy',
'bool',
[param('ns3::AttributeValue const &', 'source'), param('ns3::AttributeValue &', 'destination')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## attribute.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::AttributeChecker::Create() const [member function]
cls.add_method('Create',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## attribute.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::AttributeChecker::CreateValidValue(ns3::AttributeValue const & value) const [member function]
cls.add_method('CreateValidValue',
'ns3::Ptr< ns3::AttributeValue >',
[param('ns3::AttributeValue const &', 'value')],
is_const=True)
## attribute.h (module 'core'): std::string ns3::AttributeChecker::GetUnderlyingTypeInformation() const [member function]
cls.add_method('GetUnderlyingTypeInformation',
'std::string',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## attribute.h (module 'core'): std::string ns3::AttributeChecker::GetValueTypeName() const [member function]
cls.add_method('GetValueTypeName',
'std::string',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## attribute.h (module 'core'): bool ns3::AttributeChecker::HasUnderlyingTypeInformation() const [member function]
cls.add_method('HasUnderlyingTypeInformation',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
return
def register_Ns3AttributeValue_methods(root_module, cls):
## attribute.h (module 'core'): ns3::AttributeValue::AttributeValue(ns3::AttributeValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::AttributeValue const &', 'arg0')])
## attribute.h (module 'core'): ns3::AttributeValue::AttributeValue() [constructor]
cls.add_constructor([])
## attribute.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::AttributeValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## attribute.h (module 'core'): bool ns3::AttributeValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_pure_virtual=True, is_virtual=True)
## attribute.h (module 'core'): std::string ns3::AttributeValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_pure_virtual=True, is_const=True, is_virtual=True)
return
def register_Ns3CallbackChecker_methods(root_module, cls):
## callback.h (module 'core'): ns3::CallbackChecker::CallbackChecker() [constructor]
cls.add_constructor([])
## callback.h (module 'core'): ns3::CallbackChecker::CallbackChecker(ns3::CallbackChecker const & arg0) [copy constructor]
cls.add_constructor([param('ns3::CallbackChecker const &', 'arg0')])
return
def register_Ns3CallbackImplBase_methods(root_module, cls):
## callback.h (module 'core'): ns3::CallbackImplBase::CallbackImplBase() [constructor]
cls.add_constructor([])
## callback.h (module 'core'): ns3::CallbackImplBase::CallbackImplBase(ns3::CallbackImplBase const & arg0) [copy constructor]
cls.add_constructor([param('ns3::CallbackImplBase const &', 'arg0')])
## callback.h (module 'core'): std::string ns3::CallbackImplBase::GetTypeid() const [member function]
cls.add_method('GetTypeid',
'std::string',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## callback.h (module 'core'): bool ns3::CallbackImplBase::IsEqual(ns3::Ptr<ns3::CallbackImplBase const> other) const [member function]
cls.add_method('IsEqual',
'bool',
[param('ns3::Ptr< ns3::CallbackImplBase const >', 'other')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## callback.h (module 'core'): static std::string ns3::CallbackImplBase::Demangle(std::string const & mangled) [member function]
cls.add_method('Demangle',
'std::string',
[param('std::string const &', 'mangled')],
is_static=True, visibility='protected')
return
def register_Ns3CallbackValue_methods(root_module, cls):
## callback.h (module 'core'): ns3::CallbackValue::CallbackValue(ns3::CallbackValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::CallbackValue const &', 'arg0')])
## callback.h (module 'core'): ns3::CallbackValue::CallbackValue() [constructor]
cls.add_constructor([])
## callback.h (module 'core'): ns3::CallbackValue::CallbackValue(ns3::CallbackBase const & base) [constructor]
cls.add_constructor([param('ns3::CallbackBase const &', 'base')])
## callback.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::CallbackValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## callback.h (module 'core'): bool ns3::CallbackValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## callback.h (module 'core'): std::string ns3::CallbackValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## callback.h (module 'core'): void ns3::CallbackValue::Set(ns3::CallbackBase base) [member function]
cls.add_method('Set',
'void',
[param('ns3::CallbackBase', 'base')])
return
def register_Ns3CosineAntennaModel_methods(root_module, cls):
## cosine-antenna-model.h (module 'antenna'): ns3::CosineAntennaModel::CosineAntennaModel() [constructor]
cls.add_constructor([])
## cosine-antenna-model.h (module 'antenna'): ns3::CosineAntennaModel::CosineAntennaModel(ns3::CosineAntennaModel const & arg0) [copy constructor]
cls.add_constructor([param('ns3::CosineAntennaModel const &', 'arg0')])
## cosine-antenna-model.h (module 'antenna'): double ns3::CosineAntennaModel::GetBeamwidth() const [member function]
cls.add_method('GetBeamwidth',
'double',
[],
is_const=True)
## cosine-antenna-model.h (module 'antenna'): double ns3::CosineAntennaModel::GetGainDb(ns3::Angles a) [member function]
cls.add_method('GetGainDb',
'double',
[param('ns3::Angles', 'a')],
is_virtual=True)
## cosine-antenna-model.h (module 'antenna'): double ns3::CosineAntennaModel::GetOrientation() const [member function]
cls.add_method('GetOrientation',
'double',
[],
is_const=True)
## cosine-antenna-model.h (module 'antenna'): static ns3::TypeId ns3::CosineAntennaModel::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## cosine-antenna-model.h (module 'antenna'): void ns3::CosineAntennaModel::SetBeamwidth(double beamwidthDegrees) [member function]
cls.add_method('SetBeamwidth',
'void',
[param('double', 'beamwidthDegrees')])
## cosine-antenna-model.h (module 'antenna'): void ns3::CosineAntennaModel::SetOrientation(double orientationDegrees) [member function]
cls.add_method('SetOrientation',
'void',
[param('double', 'orientationDegrees')])
return
def register_Ns3EmptyAttributeAccessor_methods(root_module, cls):
## attribute.h (module 'core'): ns3::EmptyAttributeAccessor::EmptyAttributeAccessor(ns3::EmptyAttributeAccessor const & arg0) [copy constructor]
cls.add_constructor([param('ns3::EmptyAttributeAccessor const &', 'arg0')])
## attribute.h (module 'core'): ns3::EmptyAttributeAccessor::EmptyAttributeAccessor() [constructor]
cls.add_constructor([])
## attribute.h (module 'core'): bool ns3::EmptyAttributeAccessor::Get(ns3::ObjectBase const * object, ns3::AttributeValue & attribute) const [member function]
cls.add_method('Get',
'bool',
[param('ns3::ObjectBase const *', 'object'), param('ns3::AttributeValue &', 'attribute')],
is_const=True, is_virtual=True)
## attribute.h (module 'core'): bool ns3::EmptyAttributeAccessor::HasGetter() const [member function]
cls.add_method('HasGetter',
'bool',
[],
is_const=True, is_virtual=True)
## attribute.h (module 'core'): bool ns3::EmptyAttributeAccessor::HasSetter() const [member function]
cls.add_method('HasSetter',
'bool',
[],
is_const=True, is_virtual=True)
## attribute.h (module 'core'): bool ns3::EmptyAttributeAccessor::Set(ns3::ObjectBase * object, ns3::AttributeValue const & value) const [member function]
cls.add_method('Set',
'bool',
[param('ns3::ObjectBase *', 'object'), param('ns3::AttributeValue const &', 'value')],
is_const=True, is_virtual=True)
return
def register_Ns3EmptyAttributeChecker_methods(root_module, cls):
## attribute.h (module 'core'): ns3::EmptyAttributeChecker::EmptyAttributeChecker(ns3::EmptyAttributeChecker const & arg0) [copy constructor]
cls.add_constructor([param('ns3::EmptyAttributeChecker const &', 'arg0')])
## attribute.h (module 'core'): ns3::EmptyAttributeChecker::EmptyAttributeChecker() [constructor]
cls.add_constructor([])
## attribute.h (module 'core'): bool ns3::EmptyAttributeChecker::Check(ns3::AttributeValue const & value) const [member function]
cls.add_method('Check',
'bool',
[param('ns3::AttributeValue const &', 'value')],
is_const=True, is_virtual=True)
## attribute.h (module 'core'): bool ns3::EmptyAttributeChecker::Copy(ns3::AttributeValue const & source, ns3::AttributeValue & destination) const [member function]
cls.add_method('Copy',
'bool',
[param('ns3::AttributeValue const &', 'source'), param('ns3::AttributeValue &', 'destination')],
is_const=True, is_virtual=True)
## attribute.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::EmptyAttributeChecker::Create() const [member function]
cls.add_method('Create',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## attribute.h (module 'core'): std::string ns3::EmptyAttributeChecker::GetUnderlyingTypeInformation() const [member function]
cls.add_method('GetUnderlyingTypeInformation',
'std::string',
[],
is_const=True, is_virtual=True)
## attribute.h (module 'core'): std::string ns3::EmptyAttributeChecker::GetValueTypeName() const [member function]
cls.add_method('GetValueTypeName',
'std::string',
[],
is_const=True, is_virtual=True)
## attribute.h (module 'core'): bool ns3::EmptyAttributeChecker::HasUnderlyingTypeInformation() const [member function]
cls.add_method('HasUnderlyingTypeInformation',
'bool',
[],
is_const=True, is_virtual=True)
return
def register_Ns3EmptyAttributeValue_methods(root_module, cls):
## attribute.h (module 'core'): ns3::EmptyAttributeValue::EmptyAttributeValue(ns3::EmptyAttributeValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::EmptyAttributeValue const &', 'arg0')])
## attribute.h (module 'core'): ns3::EmptyAttributeValue::EmptyAttributeValue() [constructor]
cls.add_constructor([])
## attribute.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::EmptyAttributeValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, visibility='private', is_virtual=True)
## attribute.h (module 'core'): bool ns3::EmptyAttributeValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
visibility='private', is_virtual=True)
## attribute.h (module 'core'): std::string ns3::EmptyAttributeValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, visibility='private', is_virtual=True)
return
def register_Ns3IsotropicAntennaModel_methods(root_module, cls):
## isotropic-antenna-model.h (module 'antenna'): ns3::IsotropicAntennaModel::IsotropicAntennaModel(ns3::IsotropicAntennaModel const & arg0) [copy constructor]
cls.add_constructor([param('ns3::IsotropicAntennaModel const &', 'arg0')])
## isotropic-antenna-model.h (module 'antenna'): ns3::IsotropicAntennaModel::IsotropicAntennaModel() [constructor]
cls.add_constructor([])
## isotropic-antenna-model.h (module 'antenna'): double ns3::IsotropicAntennaModel::GetGainDb(ns3::Angles a) [member function]
cls.add_method('GetGainDb',
'double',
[param('ns3::Angles', 'a')],
is_virtual=True)
## isotropic-antenna-model.h (module 'antenna'): static ns3::TypeId ns3::IsotropicAntennaModel::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
return
def register_Ns3ParabolicAntennaModel_methods(root_module, cls):
## parabolic-antenna-model.h (module 'antenna'): ns3::ParabolicAntennaModel::ParabolicAntennaModel() [constructor]
cls.add_constructor([])
## parabolic-antenna-model.h (module 'antenna'): ns3::ParabolicAntennaModel::ParabolicAntennaModel(ns3::ParabolicAntennaModel const & arg0) [copy constructor]
cls.add_constructor([param('ns3::ParabolicAntennaModel const &', 'arg0')])
## parabolic-antenna-model.h (module 'antenna'): double ns3::ParabolicAntennaModel::GetBeamwidth() const [member function]
cls.add_method('GetBeamwidth',
'double',
[],
is_const=True)
## parabolic-antenna-model.h (module 'antenna'): double ns3::ParabolicAntennaModel::GetGainDb(ns3::Angles a) [member function]
cls.add_method('GetGainDb',
'double',
[param('ns3::Angles', 'a')],
is_virtual=True)
## parabolic-antenna-model.h (module 'antenna'): double ns3::ParabolicAntennaModel::GetOrientation() const [member function]
cls.add_method('GetOrientation',
'double',
[],
is_const=True)
## parabolic-antenna-model.h (module 'antenna'): static ns3::TypeId ns3::ParabolicAntennaModel::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## parabolic-antenna-model.h (module 'antenna'): void ns3::ParabolicAntennaModel::SetBeamwidth(double beamwidthDegrees) [member function]
cls.add_method('SetBeamwidth',
'void',
[param('double', 'beamwidthDegrees')])
## parabolic-antenna-model.h (module 'antenna'): void ns3::ParabolicAntennaModel::SetOrientation(double orientationDegrees) [member function]
cls.add_method('SetOrientation',
'void',
[param('double', 'orientationDegrees')])
return
def register_Ns3TypeIdChecker_methods(root_module, cls):
## type-id.h (module 'core'): ns3::TypeIdChecker::TypeIdChecker() [constructor]
cls.add_constructor([])
## type-id.h (module 'core'): ns3::TypeIdChecker::TypeIdChecker(ns3::TypeIdChecker const & arg0) [copy constructor]
cls.add_constructor([param('ns3::TypeIdChecker const &', 'arg0')])
return
def register_Ns3TypeIdValue_methods(root_module, cls):
## type-id.h (module 'core'): ns3::TypeIdValue::TypeIdValue() [constructor]
cls.add_constructor([])
## type-id.h (module 'core'): ns3::TypeIdValue::TypeIdValue(ns3::TypeIdValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::TypeIdValue const &', 'arg0')])
## type-id.h (module 'core'): ns3::TypeIdValue::TypeIdValue(ns3::TypeId const & value) [constructor]
cls.add_constructor([param('ns3::TypeId const &', 'value')])
## type-id.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::TypeIdValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## type-id.h (module 'core'): bool ns3::TypeIdValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## type-id.h (module 'core'): ns3::TypeId ns3::TypeIdValue::Get() const [member function]
cls.add_method('Get',
'ns3::TypeId',
[],
is_const=True)
## type-id.h (module 'core'): std::string ns3::TypeIdValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## type-id.h (module 'core'): void ns3::TypeIdValue::Set(ns3::TypeId const & value) [member function]
cls.add_method('Set',
'void',
[param('ns3::TypeId const &', 'value')])
return
def register_Ns3Vector2DChecker_methods(root_module, cls):
## vector.h (module 'core'): ns3::Vector2DChecker::Vector2DChecker() [constructor]
cls.add_constructor([])
## vector.h (module 'core'): ns3::Vector2DChecker::Vector2DChecker(ns3::Vector2DChecker const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Vector2DChecker const &', 'arg0')])
return
def register_Ns3Vector2DValue_methods(root_module, cls):
## vector.h (module 'core'): ns3::Vector2DValue::Vector2DValue() [constructor]
cls.add_constructor([])
## vector.h (module 'core'): ns3::Vector2DValue::Vector2DValue(ns3::Vector2DValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Vector2DValue const &', 'arg0')])
## vector.h (module 'core'): ns3::Vector2DValue::Vector2DValue(ns3::Vector2D const & value) [constructor]
cls.add_constructor([param('ns3::Vector2D const &', 'value')])
## vector.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::Vector2DValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## vector.h (module 'core'): bool ns3::Vector2DValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## vector.h (module 'core'): ns3::Vector2D ns3::Vector2DValue::Get() const [member function]
cls.add_method('Get',
'ns3::Vector2D',
[],
is_const=True)
## vector.h (module 'core'): std::string ns3::Vector2DValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## vector.h (module 'core'): void ns3::Vector2DValue::Set(ns3::Vector2D const & value) [member function]
cls.add_method('Set',
'void',
[param('ns3::Vector2D const &', 'value')])
return
def register_Ns3Vector3DChecker_methods(root_module, cls):
## vector.h (module 'core'): ns3::Vector3DChecker::Vector3DChecker() [constructor]
cls.add_constructor([])
## vector.h (module 'core'): ns3::Vector3DChecker::Vector3DChecker(ns3::Vector3DChecker const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Vector3DChecker const &', 'arg0')])
return
def register_Ns3Vector3DValue_methods(root_module, cls):
## vector.h (module 'core'): ns3::Vector3DValue::Vector3DValue() [constructor]
cls.add_constructor([])
## vector.h (module 'core'): ns3::Vector3DValue::Vector3DValue(ns3::Vector3DValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Vector3DValue const &', 'arg0')])
## vector.h (module 'core'): ns3::Vector3DValue::Vector3DValue(ns3::Vector3D const & value) [constructor]
cls.add_constructor([param('ns3::Vector3D const &', 'value')])
## vector.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::Vector3DValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## vector.h (module 'core'): bool ns3::Vector3DValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## vector.h (module 'core'): ns3::Vector3D ns3::Vector3DValue::Get() const [member function]
cls.add_method('Get',
'ns3::Vector3D',
[],
is_const=True)
## vector.h (module 'core'): std::string ns3::Vector3DValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## vector.h (module 'core'): void ns3::Vector3DValue::Set(ns3::Vector3D const & value) [member function]
cls.add_method('Set',
'void',
[param('ns3::Vector3D const &', 'value')])
return
def register_Ns3HashImplementation_methods(root_module, cls):
## hash-function.h (module 'core'): ns3::Hash::Implementation::Implementation(ns3::Hash::Implementation const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Hash::Implementation const &', 'arg0')])
## hash-function.h (module 'core'): ns3::Hash::Implementation::Implementation() [constructor]
cls.add_constructor([])
## hash-function.h (module 'core'): uint32_t ns3::Hash::Implementation::GetHash32(char const * buffer, size_t const size) [member function]
cls.add_method('GetHash32',
'uint32_t',
[param('char const *', 'buffer'), param('size_t const', 'size')],
is_pure_virtual=True, is_virtual=True)
## hash-function.h (module 'core'): uint64_t ns3::Hash::Implementation::GetHash64(char const * buffer, size_t const size) [member function]
cls.add_method('GetHash64',
'uint64_t',
[param('char const *', 'buffer'), param('size_t const', 'size')],
is_virtual=True)
## hash-function.h (module 'core'): void ns3::Hash::Implementation::clear() [member function]
cls.add_method('clear',
'void',
[],
is_pure_virtual=True, is_virtual=True)
return
def register_Ns3HashFunctionFnv1a_methods(root_module, cls):
## hash-fnv.h (module 'core'): ns3::Hash::Function::Fnv1a::Fnv1a(ns3::Hash::Function::Fnv1a const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Hash::Function::Fnv1a const &', 'arg0')])
## hash-fnv.h (module 'core'): ns3::Hash::Function::Fnv1a::Fnv1a() [constructor]
cls.add_constructor([])
## hash-fnv.h (module 'core'): uint32_t ns3::Hash::Function::Fnv1a::GetHash32(char const * buffer, size_t const size) [member function]
cls.add_method('GetHash32',
'uint32_t',
[param('char const *', 'buffer'), param('size_t const', 'size')],
is_virtual=True)
## hash-fnv.h (module 'core'): uint64_t ns3::Hash::Function::Fnv1a::GetHash64(char const * buffer, size_t const size) [member function]
cls.add_method('GetHash64',
'uint64_t',
[param('char const *', 'buffer'), param('size_t const', 'size')],
is_virtual=True)
## hash-fnv.h (module 'core'): void ns3::Hash::Function::Fnv1a::clear() [member function]
cls.add_method('clear',
'void',
[],
is_virtual=True)
return
def register_Ns3HashFunctionHash32_methods(root_module, cls):
## hash-function.h (module 'core'): ns3::Hash::Function::Hash32::Hash32(ns3::Hash::Function::Hash32 const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Hash::Function::Hash32 const &', 'arg0')])
## hash-function.h (module 'core'): ns3::Hash::Function::Hash32::Hash32(ns3::Hash::Hash32Function_ptr hp) [constructor]
cls.add_constructor([param('ns3::Hash::Hash32Function_ptr', 'hp')])
## hash-function.h (module 'core'): uint32_t ns3::Hash::Function::Hash32::GetHash32(char const * buffer, size_t const size) [member function]
cls.add_method('GetHash32',
'uint32_t',
[param('char const *', 'buffer'), param('size_t const', 'size')],
is_virtual=True)
## hash-function.h (module 'core'): void ns3::Hash::Function::Hash32::clear() [member function]
cls.add_method('clear',
'void',
[],
is_virtual=True)
return
def register_Ns3HashFunctionHash64_methods(root_module, cls):
## hash-function.h (module 'core'): ns3::Hash::Function::Hash64::Hash64(ns3::Hash::Function::Hash64 const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Hash::Function::Hash64 const &', 'arg0')])
## hash-function.h (module 'core'): ns3::Hash::Function::Hash64::Hash64(ns3::Hash::Hash64Function_ptr hp) [constructor]
cls.add_constructor([param('ns3::Hash::Hash64Function_ptr', 'hp')])
## hash-function.h (module 'core'): uint32_t ns3::Hash::Function::Hash64::GetHash32(char const * buffer, size_t const size) [member function]
cls.add_method('GetHash32',
'uint32_t',
[param('char const *', 'buffer'), param('size_t const', 'size')],
is_virtual=True)
## hash-function.h (module 'core'): uint64_t ns3::Hash::Function::Hash64::GetHash64(char const * buffer, size_t const size) [member function]
cls.add_method('GetHash64',
'uint64_t',
[param('char const *', 'buffer'), param('size_t const', 'size')],
is_virtual=True)
## hash-function.h (module 'core'): void ns3::Hash::Function::Hash64::clear() [member function]
cls.add_method('clear',
'void',
[],
is_virtual=True)
return
def register_Ns3HashFunctionMurmur3_methods(root_module, cls):
## hash-murmur3.h (module 'core'): ns3::Hash::Function::Murmur3::Murmur3(ns3::Hash::Function::Murmur3 const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Hash::Function::Murmur3 const &', 'arg0')])
## hash-murmur3.h (module 'core'): ns3::Hash::Function::Murmur3::Murmur3() [constructor]
cls.add_constructor([])
## hash-murmur3.h (module 'core'): uint32_t ns3::Hash::Function::Murmur3::GetHash32(char const * buffer, size_t const size) [member function]
cls.add_method('GetHash32',
'uint32_t',
[param('char const *', 'buffer'), param('size_t const', 'size')],
is_virtual=True)
## hash-murmur3.h (module 'core'): uint64_t ns3::Hash::Function::Murmur3::GetHash64(char const * buffer, size_t const size) [member function]
cls.add_method('GetHash64',
'uint64_t',
[param('char const *', 'buffer'), param('size_t const', 'size')],
is_virtual=True)
## hash-murmur3.h (module 'core'): void ns3::Hash::Function::Murmur3::clear() [member function]
cls.add_method('clear',
'void',
[],
is_virtual=True)
return
def register_functions(root_module):
module = root_module
## angles.h (module 'antenna'): extern double ns3::DegreesToRadians(double degrees) [free function]
module.add_function('DegreesToRadians',
'double',
[param('double', 'degrees')])
## angles.h (module 'antenna'): extern double ns3::RadiansToDegrees(double radians) [free function]
module.add_function('RadiansToDegrees',
'double',
[param('double', 'radians')])
register_functions_ns3_FatalImpl(module.get_submodule('FatalImpl'), root_module)
register_functions_ns3_Hash(module.get_submodule('Hash'), root_module)
return
def register_functions_ns3_FatalImpl(module, root_module):
return
def register_functions_ns3_Hash(module, root_module):
register_functions_ns3_Hash_Function(module.get_submodule('Function'), root_module)
return
def register_functions_ns3_Hash_Function(module, root_module):
return
def main():
out = FileCodeSink(sys.stdout)
root_module = module_init()
register_types(root_module)
register_methods(root_module)
register_functions(root_module)
root_module.generate(out)
if __name__ == '__main__':
main()
|
flakerimi/ajenti | refs/heads/master | plugins/pkgman/pm_portage.py | 17 | import os
import subprocess
import lxml.etree
from ajenti.com import *
from ajenti.utils import shell, shell_bg
from ajenti import apis
class PortagePackageManager(Plugin):
implements(apis.pkgman.IPackageManager)
platform = ['gentoo']
_pending = {}
def refresh(self, st):
st.full = self.eix_parse(shell('eix \'-I*\' --xml'))
st.upgradeable = self.eix_parse(shell('eix -u --xml'))
st.pending = self._pending
def get_lists(self):
shell_bg('emerge --sync', output='/tmp/ajenti-portage-output', deleteout=True)
def search(self, q, st):
return self.eix_parse(shell('eix --xml \'%s\''%q))
def mark_install(self, st, name):
st.pending[name] = 'install'
def mark_remove(self, st, name):
st.pending[name] = 'remove'
def mark_cancel(self, st, name):
del st.pending[name]
def mark_cancel_all(self, st):
st.pending = {}
def apply(self, st):
cmd = 'emerge '
cmd2 = 'emerge --unmerge'
for x in st.pending:
if st.pending[x] == 'install':
cmd += ' ' + x
else:
cmd2 += ' ' + x
shell_bg('%s; %s'%(cmd,cmd2), output='/tmp/ajenti-portage-output', deleteout=True)
def is_busy(self):
return os.path.exists('/tmp/ajenti-portage-output')
def get_busy_status(self):
try:
return open('/tmp/ajenti-portage-output', 'r').read().splitlines()[-1]
except:
return ''
def get_expected_result(self, st):
return st.pending
def abort(self):
shell('pkill emerge')
shell('rm /tmp/ajenti-portage-output')
def get_info(self, pkg):
return self.eix_parse(shell('eix \'-I*\' --xml'))[pkg]
def get_info_ui(self, pkg):
return None
def eix_parse(self, data):
xml = lxml.etree.fromstring(data)
r = {}
for pkg in xml.findall('*/package'):
try:
p = apis.pkgman.Package()
p.name = pkg.get('name')
p.available = pkg.findall('version')[-1].get('id')
if len(pkg.findall('version[@installed]')) == 0:
p.state = 'removed'
else:
p.installed = pkg.findall('version[@installed]')[0].get('id')
p.version = p.installed
p.description = pkg.find('description').text
r[p.name] = p
if len(r.keys()) > 250: break
except:
pass
return r
|
georgtroska/root | refs/heads/master | interpreter/llvm/src/utils/lit/tests/shtest-timeout.py | 24 | # REQUIRES: python-psutil
# Test per test timeout using external shell
# RUN: not %{lit} \
# RUN: %{inputs}/shtest-timeout/infinite_loop.py \
# RUN: %{inputs}/shtest-timeout/quick_then_slow.py \
# RUN: %{inputs}/shtest-timeout/short.py \
# RUN: %{inputs}/shtest-timeout/slow.py \
# RUN: -j 1 -v --debug --timeout 1 --param external=1 > %t.extsh.out 2> %t.extsh.err
# RUN: FileCheck --check-prefix=CHECK-OUT-COMMON < %t.extsh.out %s
# RUN: FileCheck --check-prefix=CHECK-EXTSH-ERR < %t.extsh.err %s
#
# CHECK-EXTSH-ERR: Using external shell
# Test per test timeout using internal shell
# RUN: not %{lit} \
# RUN: %{inputs}/shtest-timeout/infinite_loop.py \
# RUN: %{inputs}/shtest-timeout/quick_then_slow.py \
# RUN: %{inputs}/shtest-timeout/short.py \
# RUN: %{inputs}/shtest-timeout/slow.py \
# RUN: -j 1 -v --debug --timeout 1 --param external=0 > %t.intsh.out 2> %t.intsh.err
# RUN: FileCheck --check-prefix=CHECK-OUT-COMMON < %t.intsh.out %s
# RUN: FileCheck --check-prefix=CHECK-INTSH-OUT < %t.intsh.out %s
# RUN: FileCheck --check-prefix=CHECK-INTSH-ERR < %t.intsh.err %s
#
# CHECK-INTSH-OUT: TIMEOUT: per_test_timeout :: infinite_loop.py
# CHECK-INTSH-OUT: Command 0 Reached Timeout: True
# CHECK-INTSH-OUT: Command 0 Output:
# CHECK-INTSH-OUT-NEXT: Running infinite loop
# CHECK-INTSH-OUT: TIMEOUT: per_test_timeout :: quick_then_slow.py
# CHECK-INTSH-OUT: Timeout: Reached timeout of 1 seconds
# CHECK-INTSH-OUT: Command Output
# CHECK-INTSH-OUT: Command 0 Reached Timeout: False
# CHECK-INTSH-OUT: Command 0 Output:
# CHECK-INTSH-OUT-NEXT: Running in quick mode
# CHECK-INTSH-OUT: Command 1 Reached Timeout: True
# CHECK-INTSH-OUT: Command 1 Output:
# CHECK-INTSH-OUT-NEXT: Running in slow mode
# CHECK-INTSH-OUT: TIMEOUT: per_test_timeout :: slow.py
# CHECK-INTSH-OUT: Command 0 Reached Timeout: True
# CHECK-INTSH-OUT: Command 0 Output:
# CHECK-INTSH-OUT-NEXT: Running slow program
# CHECK-INTSH-ERR: Using internal shell
# Test per test timeout set via a config file rather than on the command line
# RUN: not %{lit} \
# RUN: %{inputs}/shtest-timeout/infinite_loop.py \
# RUN: %{inputs}/shtest-timeout/quick_then_slow.py \
# RUN: %{inputs}/shtest-timeout/short.py \
# RUN: %{inputs}/shtest-timeout/slow.py \
# RUN: -j 1 -v --debug --param external=0 \
# RUN: --param set_timeout=1 > %t.cfgset.out 2> %t.cfgset.err
# RUN: FileCheck --check-prefix=CHECK-OUT-COMMON < %t.cfgset.out %s
# RUN: FileCheck --check-prefix=CHECK-CFGSET-ERR < %t.cfgset.err %s
#
# CHECK-CFGSET-ERR: Using internal shell
# CHECK-OUT-COMMON: TIMEOUT: per_test_timeout :: infinite_loop.py
# CHECK-OUT-COMMON: Timeout: Reached timeout of 1 seconds
# CHECK-OUT-COMMON: Command {{([0-9]+ )?}}Output
# CHECK-OUT-COMMON: Running infinite loop
# CHECK-OUT-COMMON: TIMEOUT: per_test_timeout :: quick_then_slow.py
# CHECK-OUT-COMMON: Timeout: Reached timeout of 1 seconds
# CHECK-OUT-COMMON: Command {{([0-9]+ )?}}Output
# CHECK-OUT-COMMON: Running in quick mode
# CHECK-OUT-COMMON: Running in slow mode
# CHECK-OUT-COMMON: PASS: per_test_timeout :: short.py
# CHECK-OUT-COMMON: TIMEOUT: per_test_timeout :: slow.py
# CHECK-OUT-COMMON: Timeout: Reached timeout of 1 seconds
# CHECK-OUT-COMMON: Command {{([0-9]+ )?}}Output
# CHECK-OUT-COMMON: Running slow program
# CHECK-OUT-COMMON: Expected Passes{{ *}}: 1
# CHECK-OUT-COMMON: Individual Timeouts{{ *}}: 3
# Test per test timeout via a config file and on the command line.
# The value set on the command line should override the config file.
# RUN: not %{lit} \
# RUN: %{inputs}/shtest-timeout/infinite_loop.py \
# RUN: %{inputs}/shtest-timeout/quick_then_slow.py \
# RUN: %{inputs}/shtest-timeout/short.py \
# RUN: %{inputs}/shtest-timeout/slow.py \
# RUN: -j 1 -v --debug --param external=0 \
# RUN: --param set_timeout=1 --timeout=2 > %t.cmdover.out 2> %t.cmdover.err
# RUN: FileCheck --check-prefix=CHECK-CMDLINE-OVERRIDE-OUT < %t.cmdover.out %s
# RUN: FileCheck --check-prefix=CHECK-CMDLINE-OVERRIDE-ERR < %t.cmdover.err %s
# CHECK-CMDLINE-OVERRIDE-ERR: Forcing timeout to be 2 seconds
# CHECK-CMDLINE-OVERRIDE-OUT: TIMEOUT: per_test_timeout :: infinite_loop.py
# CHECK-CMDLINE-OVERRIDE-OUT: Timeout: Reached timeout of 2 seconds
# CHECK-CMDLINE-OVERRIDE-OUT: Command {{([0-9]+ )?}}Output
# CHECK-CMDLINE-OVERRIDE-OUT: Running infinite loop
# CHECK-CMDLINE-OVERRIDE-OUT: TIMEOUT: per_test_timeout :: quick_then_slow.py
# CHECK-CMDLINE-OVERRIDE-OUT: Timeout: Reached timeout of 2 seconds
# CHECK-CMDLINE-OVERRIDE-OUT: Command {{([0-9]+ )?}}Output
# CHECK-CMDLINE-OVERRIDE-OUT: Running in quick mode
# CHECK-CMDLINE-OVERRIDE-OUT: Running in slow mode
# CHECK-CMDLINE-OVERRIDE-OUT: PASS: per_test_timeout :: short.py
# CHECK-CMDLINE-OVERRIDE-OUT: TIMEOUT: per_test_timeout :: slow.py
# CHECK-CMDLINE-OVERRIDE-OUT: Timeout: Reached timeout of 2 seconds
# CHECK-CMDLINE-OVERRIDE-OUT: Command {{([0-9]+ )?}}Output
# CHECK-CMDLINE-OVERRIDE-OUT: Running slow program
# CHECK-CMDLINE-OVERRIDE-OUT: Expected Passes{{ *}}: 1
# CHECK-CMDLINE-OVERRIDE-OUT: Individual Timeouts{{ *}}: 3
|
JohnDenker/brython | refs/heads/master | www/src/Lib/test/test_call.py | 182 | import unittest
from test import support
# The test cases here cover several paths through the function calling
# code. They depend on the METH_XXX flag that is used to define a C
# function, which can't be verified from Python. If the METH_XXX decl
# for a C function changes, these tests may not cover the right paths.
class CFunctionCalls(unittest.TestCase):
def test_varargs0(self):
self.assertRaises(TypeError, {}.__contains__)
def test_varargs1(self):
{}.__contains__(0)
def test_varargs2(self):
self.assertRaises(TypeError, {}.__contains__, 0, 1)
def test_varargs0_ext(self):
try:
{}.__contains__(*())
except TypeError:
pass
def test_varargs1_ext(self):
{}.__contains__(*(0,))
def test_varargs2_ext(self):
try:
{}.__contains__(*(1, 2))
except TypeError:
pass
else:
raise RuntimeError
def test_varargs0_kw(self):
self.assertRaises(TypeError, {}.__contains__, x=2)
def test_varargs1_kw(self):
self.assertRaises(TypeError, {}.__contains__, x=2)
def test_varargs2_kw(self):
self.assertRaises(TypeError, {}.__contains__, x=2, y=2)
def test_oldargs0_0(self):
{}.keys()
def test_oldargs0_1(self):
self.assertRaises(TypeError, {}.keys, 0)
def test_oldargs0_2(self):
self.assertRaises(TypeError, {}.keys, 0, 1)
def test_oldargs0_0_ext(self):
{}.keys(*())
def test_oldargs0_1_ext(self):
try:
{}.keys(*(0,))
except TypeError:
pass
else:
raise RuntimeError
def test_oldargs0_2_ext(self):
try:
{}.keys(*(1, 2))
except TypeError:
pass
else:
raise RuntimeError
def test_oldargs0_0_kw(self):
try:
{}.keys(x=2)
except TypeError:
pass
else:
raise RuntimeError
def test_oldargs0_1_kw(self):
self.assertRaises(TypeError, {}.keys, x=2)
def test_oldargs0_2_kw(self):
self.assertRaises(TypeError, {}.keys, x=2, y=2)
def test_oldargs1_0(self):
self.assertRaises(TypeError, [].count)
def test_oldargs1_1(self):
[].count(1)
def test_oldargs1_2(self):
self.assertRaises(TypeError, [].count, 1, 2)
def test_oldargs1_0_ext(self):
try:
[].count(*())
except TypeError:
pass
else:
raise RuntimeError
def test_oldargs1_1_ext(self):
[].count(*(1,))
def test_oldargs1_2_ext(self):
try:
[].count(*(1, 2))
except TypeError:
pass
else:
raise RuntimeError
def test_oldargs1_0_kw(self):
self.assertRaises(TypeError, [].count, x=2)
def test_oldargs1_1_kw(self):
self.assertRaises(TypeError, [].count, {}, x=2)
def test_oldargs1_2_kw(self):
self.assertRaises(TypeError, [].count, x=2, y=2)
def test_main():
support.run_unittest(CFunctionCalls)
if __name__ == "__main__":
test_main()
|
ryannathans/micropython | refs/heads/master | tests/basics/dict_iterator.py | 118 | d = {1: 2, 3: 4}
els = []
for i in d:
els.append((i, d[i]))
print(sorted(els))
|
malconis/deproxy | refs/heads/master | deproxy/request.py | 1 |
class Request:
"""A simple HTTP Request, with method, path, headers, and body."""
def __init__(self, method, path, headers, body):
self.method = method
self.path = path
self.headers = dict(headers)
self.body = body
def __repr__(self):
return ('Request(method=%r, path=%r, headers=%r, body=%r)' %
(self.method, self.path, self.headers, self.body))
|
itziakos/haas | refs/heads/master | haas/plugins/i_result_handler_plugin.py | 3 | # -*- coding: utf-8 -*-
# Copyright (c) 2013-2014 Simon Jagoe
# All rights reserved.
#
# This software may be modified and distributed under the terms
# of the 3-clause BSD license. See the LICENSE.txt file for details.
from __future__ import absolute_import, unicode_literals
from abc import ABCMeta, abstractmethod
from six import add_metaclass
from haas.utils import abstractclassmethod
@add_metaclass(ABCMeta)
class IResultHandlerPlugin(object):
@abstractclassmethod
def from_args(cls, args, name, dest_prefix, test_count):
"""Construct the result handler from parsed command line arguments.
Parameters
----------
args : argparse.Namespace
The ``argparse.Namespace`` containing parsed arguments.
name : str
The name of the plugin.
dest_prefix : str
The prefix that ``dest`` strings for options added by this
plugin should use.
test_count : int
The totel number of tests discovered.
"""
@abstractclassmethod
def add_parser_arguments(cls, parser, name, option_prefix, dest_prefix):
"""Add options for the plugin to the main argument parser.
Parameters
----------
parser : argparse.ArgumentParser
The parser to extend
name : str
The name of the plugin.
option_prefix : str
The prefix that option strings added by this plugin should use.
dest_prefix : str
The prefix that ``dest`` strings for options added by this
plugin should use.
"""
@abstractmethod
def start_test(self, test):
"""Perform tasks at the start of a single test.
"""
@abstractmethod
def stop_test(self, test):
"""Perform tasks at the end of a single test.
"""
@abstractmethod
def start_test_run(self):
"""Perform tasks at the very start of the test run.
"""
@abstractmethod
def stop_test_run(self):
"""Perform tasks at the very end of the test run.
"""
@abstractmethod
def __call__(self, result):
"""Handle the completed test result ``result``.
"""
|
hoangminhitvn/flask | refs/heads/master | flask/lib/python2.7/site-packages/flask/testsuite/test_apps/flaskext/oldext_simple.py | 629 | ext_id = 'oldext_simple'
|
NationalGenomicsInfrastructure/charon | refs/heads/master | charon/test_seqrun.py | 3 | """ Charon: nosetests /api/v1/seqrun
Requires env vars CHARON_API_TOKEN and CHARON_BASE_URL.
"""
import os
import json
import requests
import nose
def url(*segments):
"Synthesize absolute URL from path segments."
return "{0}api/v1/{1}".format(BASE_URL,'/'.join([str(s) for s in segments]))
API_TOKEN = os.getenv('CHARON_API_TOKEN')
if not API_TOKEN: raise ValueError('no API token')
BASE_URL = os.getenv('CHARON_BASE_URL')
if not BASE_URL: raise ValueError('no base URL')
PROJECTID = 'P0'
SAMPLEID = 'S1'
LIBPREPID = 'A'
SEQRUNID='1337_WORLD'
api_token = {'X-Charon-API-token': API_TOKEN}
session = requests.Session()
def my_setup():
"Create the project, sample and libprep to work with seqrun."
data = dict(projectid=PROJECTID)
session.post(url('project'), data=json.dumps(data), headers=api_token)
data = dict(sampleid=SAMPLEID)
session.post(url('sample', PROJECTID),
data=json.dumps(data),
headers=api_token)
data = dict(libprepid=LIBPREPID)
session.post(url('libprep', PROJECTID, SAMPLEID),
data=json.dumps(data),
headers=api_token)
def my_teardown():
"Delete the project and all its dependents."
session.delete(url('project', PROJECTID), headers=api_token)
@nose.with_setup(my_setup, my_teardown)
def test_create_seqrun():
"Create a seqrun in a libprep and manipulate it."
data = dict(sequencing_status='NEW', alignment_status="RUNNING", mean_autosomal_coverage=0.0, seqrunid=SEQRUNID)
response = session.post(url('seqrun', PROJECTID, SAMPLEID, LIBPREPID),
data=json.dumps(data),
headers=api_token)
assert response.status_code == 201, response.reason
data = response.json()
assert data['projectid'] == PROJECTID
assert data['sampleid'] == SAMPLEID
assert data['libprepid'] == LIBPREPID
assert data['seqrunid'] == SEQRUNID, repr(data['seqrunid'])
response = session.get(url('seqrun', PROJECTID, SAMPLEID, LIBPREPID, SEQRUNID),
headers=api_token)
assert response.status_code == 200, response
assert data == response.json()
data = dict(alignment_status='RUNNING',
seqrunid=SEQRUNID,
runid=SEQRUNID)
response = session.put(url('seqrun', PROJECTID, SAMPLEID, LIBPREPID, SEQRUNID),
data=json.dumps(data),
headers=api_token)
assert response.status_code == 204, response
response = session.get(url('seqrun', PROJECTID, SAMPLEID, LIBPREPID, SEQRUNID),
headers=api_token)
newdata = response.json()
assert data['seqrunid'] == newdata['seqrunid']
assert data['alignment_status'] == newdata['alignment_status']
data = dict(status='DONE',
alignment_status='DONE',
mean_autosomal_coverage=1.0)
response = session.put(url('seqrun', PROJECTID, SAMPLEID, LIBPREPID, SEQRUNID),
data=json.dumps(data),
headers=api_token)
assert response.status_code == 204, response
response = session.get(url('seqrun', PROJECTID, SAMPLEID, LIBPREPID, SEQRUNID),
headers=api_token)
newdata = response.json()
assert data['mean_autosomal_coverage'] == newdata['mean_autosomal_coverage']
data = dict(sequencing_status='NEW', mean_autosomal_coverage=0.0, seqrunid=SEQRUNID)
response = session.post(url('seqrun', PROJECTID, SAMPLEID, LIBPREPID),
data=json.dumps(data),
headers=api_token)
assert response.status_code == 400, response.reason
@nose.with_setup(my_setup, my_teardown)
def test_delete_seqrun():
data = dict(sequencing_status='NEW', alignment_status="RUNNING", mean_autosomal_coverage=0.0, seqrunid=SEQRUNID)
response = session.post(url('seqrun', PROJECTID, SAMPLEID, LIBPREPID),
data=json.dumps(data),
headers=api_token)
assert response.status_code == 201, response.reason
seqrun_url= BASE_URL.rstrip('/') + response.headers['location']
response = session.delete(seqrun_url, headers=api_token)
assert response.status_code == 204, response.reason
|
JohnFrazier/pacman | refs/heads/master | test/pacman/tests/sync009.py | 56 | self.description = "Install a package from a sync db"
sp = pmpkg("dummy")
sp.files = ["bin/dummy",
"usr/man/man1/dummy.1"]
self.addpkg2db("sync", sp)
self.args = "-S %s" % sp.name
self.addrule("PACMAN_RETCODE=0")
self.addrule("PKG_EXIST=dummy")
for f in sp.files:
self.addrule("FILE_EXIST=%s" % f)
|
yeraydiazdiaz/nonrel-blog | refs/heads/master | search/core.py | 9 | from django.conf import settings
from django.core.exceptions import ObjectDoesNotExist
from django.db import models
from django.db.models import signals
from djangotoolbox.fields import ListField
from djangotoolbox.utils import getattr_by_path
from copy import deepcopy
import re
import string
_PUNCTUATION_REGEX = re.compile(
'[' + re.escape(string.punctuation.replace('-', '').replace(
'_', '').replace('#', '')) + ']')
_PUNCTUATION_SEARCH_REGEX = re.compile(
'[' + re.escape(string.punctuation.replace('_', '').replace(
'#', '')) + ']')
# Various base indexers
def startswith(words, indexing, **kwargs):
"""Allows for word prefix search."""
if not indexing:
# In search mode we simply match search terms exactly
return words
# In indexing mode we add all prefixes ('h', 'he', ..., 'hello')
result = []
for word in words:
result.extend([word[:count].strip(u'-')
for count in range(1, len(word)+1)])
return result
def porter_stemmer(words, language, **kwargs):
"""Porter-stemmer in various languages."""
languages = [language,]
if '-' in language:
languages.append(language.split('-')[0])
# Fall back to English
languages.append('en')
# Find a stemmer for this language
for language in languages:
try:
stem = __import__('search.porter_stemmers.%s' % language,
{}, {}, ['']).stem
except:
continue
break
result = []
for word in words:
result.append(stem(word))
return result
stop_words = {
'en': set(('a', 'an', 'and', 'or', 'the', 'these', 'those', 'whose', 'to')),
'de': set(('ein', 'eine', 'eines', 'einer', 'einem', 'einen', 'den',
'der', 'die', 'das', 'dieser', 'dieses', 'diese', 'diesen',
'deren', 'und', 'oder'))
}
def get_stop_words(language):
if language not in stop_words and '-' in language:
language = language.split('-', 1)[0]
return stop_words.get(language, set())
def non_stop(words, indexing, language, **kwargs):
"""Removes stop words from search query."""
if indexing:
return words
return list(set(words) - get_stop_words(language))
def porter_stemmer_non_stop(words, **kwargs):
"""Combines porter_stemmer with non_stop."""
return porter_stemmer(non_stop(words, **kwargs), **kwargs)
# Language handler
def site_language(instance, **kwargs):
"""The default language handler tries to determine the language from
fields in the model instance."""
# Check if there's a language attribute
if hasattr(instance, 'language'):
return instance.language
if hasattr(instance, 'lang'):
return instance.lang
# Fall back to default language
return settings.LANGUAGE_CODE
def default_splitter(text, indexing=False, **kwargs):
"""
Returns an array of keywords, that are included
in query. All character besides of letters, numbers
and '_' are split characters. The character '-' is a special
case: two words separated by '-' create an additional keyword
consisting of both words without separation (see example).
Examples:
- text='word1/word2 word3'
returns ['word1', 'word2', word3]
- text='word1/word2-word3'
returns ['word1', 'word2', 'word3', 'word2word3']
"""
if not text:
return []
if not indexing:
return _PUNCTUATION_SEARCH_REGEX.sub(u' ', text.lower()).split()
keywords = []
for word in set(_PUNCTUATION_REGEX.sub(u' ', text.lower()).split()):
if not word:
continue
if '-' not in word:
keywords.append(word)
else:
keywords.extend(get_word_combinations(word))
return keywords
def get_word_combinations(word):
"""
'one-two-three'
=>
['one', 'two', 'three', 'onetwo', 'twothree', 'onetwothree']
"""
permutations = []
parts = [part for part in word.split(u'-') if part]
for count in range(1, len(parts) + 1):
for index in range(len(parts) - count + 1):
permutations.append(u''.join(parts[index:index+count]))
return permutations
class DictEmu(object):
def __init__(self, data):
self.data = data
def __getitem__(self, key):
return getattr(self.data, key)
# IndexField is a (String)ListField storing indexed fields of a model_instance
class IndexField(ListField):
def __init__(self, search_manager, *args, **kwargs):
self.search_manager = search_manager
kwargs['item_field'] = models.CharField(max_length=500)
kwargs['editable'] = False
super(IndexField, self).__init__(*args, **kwargs)
def pre_save(self, model_instance, add):
if self.search_manager.filters and not \
self.search_manager.should_index(DictEmu(model_instance)):
return []
language = self.search_manager.language
if callable(language):
language = language(model_instance, property=self)
index = []
for field_name in self.search_manager.fields_to_index:
values = getattr_by_path(model_instance, field_name, None)
if not values:
values = ()
elif not isinstance(values, (list, tuple)):
values = (values,)
for value in values:
index.extend(self.search_manager.splitter(value, indexing=True,
language=language))
if self.search_manager.indexer:
index = self.search_manager.indexer(index, indexing=True,
language=language)
# Sort index to make debugging easier
setattr(model_instance, self.search_manager.search_list_field_name,
sorted(set(index)))
return index
class SearchManager(models.Manager):
"""
Simple full-text manager adding a search function.
If "relation_index" is True the index will be stored in a separate entity.
With "integrate" you can add fields to your relation index,
so they can be searched, too.
With "filters" you can specify when a values index should be created.
"""
def __init__(self, fields_to_index, indexer=None, splitter=default_splitter,
relation_index=True, integrate='*', filters={},
language=site_language, **kwargs):
# integrate should be specified when using the relation index otherwise
# we doublicate the amount of data in the datastore and the relation
# index makes no sense any more
# TODO: filters has to be extended (maybe a function) to allow Django's
# QuerySet methods like exclude
if integrate is None:
integrate = ()
if integrate == '*' and not relation_index:
integrate = ()
if isinstance(fields_to_index, basestring):
fields_to_index = (fields_to_index,)
self.fields_to_index = fields_to_index
if isinstance(integrate, basestring):
integrate = (integrate,)
self.filters = filters
self.integrate = integrate
self.splitter = splitter
self.indexer = indexer
self.language = language
self.relation_index = relation_index
if len(fields_to_index) == 0:
raise ValueError('No fields specified for index!')
# search_list_field_name will be set if no relation_index is used that is
# for relation_index=False or for the relation_index_model itself
self.search_list_field_name = ''
super(SearchManager, self).__init__(**kwargs)
def contribute_to_class(self, model, name):
super(SearchManager, self).contribute_to_class(model, name)
# set default_manager to None such that the default_manager will be set
# to 'objects' via the class-prepared signal calling
# ensure_default_manager
# setattr(model, '_default_manager', None)
self.name = name
# add IndexField to the model if we do not use the relation_index
if not self.relation_index:
self.search_list_field_name = "%s_search_list_field" %name
# Add field to class dynamically
setattr(model, self.search_list_field_name, IndexField(self))
getattr(model, self.search_list_field_name).contribute_to_class(
model, self.search_list_field_name)
def filter(self, values):
"""
Returns a query for the given values (creates '=' filters for the
IndexField. Additionally filters can be applied afterwoods via chaining.
"""
if not isinstance(values, (tuple, list)):
values = (values,)
filtered = self.model.objects.all()
for value in set(values):
filter = {self.search_list_field_name:value}
filtered = filtered.filter(**filter)
return filtered
def _search(self, query, indexer=None, splitter=None,
language=settings.LANGUAGE_CODE):
if not splitter:
splitter = default_splitter
words = splitter(query, indexing=False, language=language)
if indexer:
words = indexer(words, indexing=False, language=language)
# Optimize query
words = set(words)
if len(words) >= 4:
words -= get_stop_words(language)
# Don't allow empty queries
if not words and query:
# This query will never find anything
return self.filter(()).filter({self.search_list_field_name:' '})
return self.filter(sorted(words))
def should_index(self, values):
# Check if filter doesn't match
if not values:
return False
for filter, value in self.filters.items():
attr, op = filter, 'exact'
if '__' in filter:
attr, op = filter.rsplit('__', 1)
op = op.lower()
if (op == 'exact' and values[attr] != value or
# op == '!=' and values[attr] == value or
op == 'in' and values[attr] not in value or
op == 'lt' and values[attr] >= value or
op == 'lte' and values[attr] > value or
op == 'gt' and values[attr] <= value or
op == 'gte' and values[attr] < value):
return False
elif op not in ('exact', 'in', 'lt', 'lte', 'gte', 'gt'):
raise ValueError('Invalid search index filter: %s %s' % (filter, value))
return True
# @commit_locked
def update_relation_index(self, parent_pk, delete=False):
relation_index_model = self._relation_index_model
try:
index = relation_index_model.objects.get(pk=parent_pk)
except ObjectDoesNotExist:
index = None
if not delete:
try:
parent = self.model.objects.get(pk=parent_pk)
except ObjectDoesNotExist:
parent = None
values = None
if parent:
values = self.get_index_values(parent)
# Remove index if it's not needed, anymore
if delete or not self.should_index(values):
if index:
index.delete()
return
# Update/create index
if not index:
index = relation_index_model(pk=parent_pk, **values)
# This guarantees that we also set virtual @properties
for key, value in values.items():
setattr(index, key, value)
index.save()
def create_index_model(self):
attrs = dict(__module__=self.__module__)
# By default we integrate everything when using relation index
# manager will add the IndexField to the relation index automaticaly
if self.integrate == ('*',):
self.integrate = tuple(field.name
for field in self.model._meta.fields
if not isinstance(field, IndexField))
for field_name in self.integrate:
field = self.model._meta.get_field_by_name(field_name)[0]
field = deepcopy(field)
attrs[field_name] = field
if isinstance(field, models.ForeignKey):
attrs[field_name].rel.related_name = '_sidx_%s_%s_%s_set_' % (
self.model._meta.object_name.lower(),
self.name, field_name,
)
owner = self
def __init__(self, *args, **kwargs):
# Save some space: don't copy the whole indexed text into the
# relation index field unless the field gets integrated.
field_names = [field.name for field in self._meta.fields]
owner_field_names = [field.name
for field in owner.model._meta.fields]
for key, value in kwargs.items():
if key in field_names or key not in owner_field_names:
continue
setattr(self, key, value)
del kwargs[key]
models.Model.__init__(self, *args, **kwargs)
attrs['__init__'] = __init__
self._relation_index_model = type(
'RelationIndex_%s_%s_%s' % (self.model._meta.app_label,
self.model._meta.object_name,
self.name),
(models.Model,), attrs)
self._relation_index_model.add_to_class(self.name, SearchManager(
self.fields_to_index, splitter=self.splitter, indexer=self.indexer,
language=self.language, relation_index=False))
def get_index_values(self, parent):
filters = []
for filter in self.filters.keys():
if '__' in filter:
filters.append(filter.rsplit('__')[0])
else:
filters.append(filter)
filters = tuple(filters)
values = {}
for field_name in set(self.fields_to_index + self.integrate + filters):
field = self.model._meta.get_field_by_name(field_name)[0]
if isinstance(field, models.ForeignKey):
value = field.pre_save(parent, False)
else:
value = getattr(parent, field_name)
if field_name == self.fields_to_index[0] and \
isinstance(value, (list, tuple)):
value = sorted(value)
if isinstance(field, models.ForeignKey):
values[field.column] = value
else:
values[field_name] = value
return values
def search(self, query, language=settings.LANGUAGE_CODE):
if self.relation_index:
items = getattr(self._relation_index_model, self.name).search(query,
language=language).values('pk')
return RelationIndexQuery(self.model, items)
return self._search(query, splitter=self.splitter,
indexer=self.indexer, language=language)
def load_backend():
backend = getattr(settings, 'SEARCH_BACKEND', 'search.backends.immediate_update')
import_list = []
if '.' in backend:
import_list = [backend.rsplit('.', 1)[1]]
return __import__(backend, globals(), locals(), import_list)
def post(delete, sender, instance, **kwargs):
for counter, manager_name, manager in sender._meta.concrete_managers:
if isinstance(manager, SearchManager):
if manager.relation_index:
backend = load_backend()
backend.update_relation_index(manager, instance.pk, delete)
def post_save(sender, instance, **kwargs):
post(False, sender, instance, **kwargs)
def post_delete(sender, instance, **kwargs):
post(True, sender, instance, **kwargs)
def install_index_model(sender, **kwargs):
needs_relation_index = False
# what to do for abstract_managers?
for counter, manager_name, manager in sender._meta.concrete_managers:
if isinstance(manager, SearchManager) and manager.relation_index:
manager.create_index_model()
needs_relation_index = True
if needs_relation_index:
signals.post_save.connect(post_save, sender=sender)
signals.post_delete.connect(post_delete, sender=sender)
#signals.class_prepared.connect(install_index_model)
class QueryTraits(object):
def __iter__(self):
return iter(self[:301])
def __len__(self):
return self.count()
def get(self, *args, **kwargs):
result = self[:1]
if result:
return result[0]
raise ObjectDoesNotExist
class RelationIndexQuery(QueryTraits):
"""Combines the results of multiple queries by appending the queries in the
given order."""
def __init__(self, model, query):
self.model = model
self.query = query
def order_by(self, *args, **kwargs):
self.query = self.query.order_by(*args, **kwargs)
return self
def filter(self, *args, **kwargs):
self.query = self.query.filter(*args, **kwargs)
return self
def __getitem__(self, index):
pks_slice = index
if not isinstance(index, slice):
pks_slice = slice(None, index + 1, None)
pks = [instance.pk if isinstance(instance, models.Model) else instance['pk']
for instance in self.query[pks_slice]]
if not isinstance(index, slice):
return self.model.objects.filter(pk__in=pks)[index]
return self.model.objects.filter(pk__in=pks)[pks_slice]
# return [item for item in self.model.objects.filter(
# pk__in=pks) if item]
def count(self):
return self.query.count()
# TODO: add keys_only query
# def values(self, fields):
# pass
def search(model, query, language=settings.LANGUAGE_CODE,
search_index='search_index'):
return getattr(model, search_index).search(query, language) |
ankurjimmy/catawampus | refs/heads/master | tr/vendor/curtain/digest.py | 5 | from tornado.web import *
from hashlib import md5
class DigestAuthMixin(object):
def apply_checksum(self, data):
return md5(data).hexdigest()
def apply_digest(self, secret, data):
return self.apply_checksum(secret + ":" + data)
def A1(self, algorithm, auth_pass):
"""
If 'algorithm' is "MD5" or unset, A1 is:
A1 = unq(username-value) ":" unq(realm-value) ":" passwd
if 'algorithm' is 'MD5-Sess', A1 is:
A1 = H( unq(username-value) ":" unq(realm-value) ":" passwd )
":" unq(nonce-value) ":" unq(cnonce-value)
"""
username = self.params["username"]
if algorithm == 'MD5' or not algorithm:
return "%s:%s:%s" % (username, self.realm, auth_pass)
elif algorithm == 'MD5-Sess':
return self.apply_checksum('%s:%s:%s:%s:%s' % \
(username,
self.realm,
auth_pass,
self.params['nonce'],
self.params['cnonce']))
def A2(self):
"""
If the "qop" directive's value is "auth" or is unspecified, then A2 is:
A2 = Method ":" digest-uri-value
Else,
A2 = Method ":" digest-uri-value ":" H(entity-body)
"""
if self.params['qop'] == 'auth' or not self.params['qop']:
return self.request.method + ":" + self.request.uri
elif self.params['qop'] == 'auth-int':
#print "UNSUPPORTED 'qop' METHOD\n"
return ":".join([self.request.method,
self.request.uri,
self.apply_checksum(self.request.body)])
else:
print "A2 GOT BAD VALUE FOR 'qop': %s\n" % self.params['qop']
def response(self, auth_pass):
if 'qop' in self.params:
auth_comps = [self.params['nonce'],
self.params['nc'],
self.params['cnonce'],
self.params['qop'],
self.apply_checksum(self.A2())]
return self.apply_digest(self.apply_checksum( \
self.A1(self.params.get('algorithm'),
auth_pass)),
':'.join(auth_comps))
else:
return self.apply_digest(self.apply_checksum( \
self.A1(self.params.get('algorithm'),
auth_pass)),
':'.join([self.params["nonce"],
self.apply_checksum(self.A2())]))
def _parse_header(self, authheader):
try:
n = len("Digest ")
authheader = authheader[n:].strip()
items = authheader.split(", ")
keyvalues = [i.split("=", 1) for i in items]
keyvalues = [(k.strip(), v.strip().replace('"', '')) for k, v in keyvalues]
self.params = dict(keyvalues)
except:
self.params = []
def _create_nonce(self):
return md5("%d:%s" % (time.time(), self.realm)).hexdigest()
def createAuthHeader(self):
self.set_status(401)
nonce = self._create_nonce()
self.set_header('WWW-Authenticate', 'Digest algorithm=MD5, realm="%s", qop=auth, nonce=%s' % (self.realm, nonce))
self.write('please authenticate\n')
self.finish()
return False
def get_authenticated_user(self, get_creds_callback, realm):
creds = None
expected_response = None
actual_response = None
auth = None
if not hasattr(self,'realm'):
self.realm = realm
try:
auth = self.request.headers.get('Authorization')
if not auth or not auth.startswith('Digest '):
return self.createAuthHeader()
else:
self._parse_header(auth)
required_params = ['username', 'realm', 'nonce', 'uri', 'response', 'qop', 'nc', 'cnonce']
for k in required_params:
if k not in self.params:
print "REQUIRED PARAM %s MISSING\n" % k
return self.createAuthHeader()
elif not self.params[k]:
print "REQUIRED PARAM %s IS NONE OR EMPTY\n" % k
return self.createAuthHeader()
else:
continue
creds = get_creds_callback(self.params['username'])
if not creds:
# the username passed to get_creds_callback didn't
# match any valid users.
self.createAuthHeader()
else:
expected_response = self.response(creds['auth_password'])
actual_response = self.params['response']
print "Expected: %s" % expected_response
print "Actual: %s" % actual_response
if expected_response and actual_response:
if expected_response == actual_response:
self._current_user = self.params['username']
print "Digest Auth user '%s' successful for realm '%s'. URI: '%s', IP: '%s'" % (self.params['username'], self.realm, self.request.uri, self.request.remote_ip)
return True
else:
self.createAuthHeader()
except Exception as out:
print "FELL THROUGH: %s\n" % out
print "AUTH HEADERS: %s" % auth
print "SELF.PARAMS: ",self.params,"\n"
print "CREDS: ", creds
print "EXPECTED RESPONSE: %s" % expected_response
print "ACTUAL RESPONSE: %s" % actual_response
return self.createAuthHeader()
def digest_auth(realm, auth_func):
"""A decorator used to protect methods with HTTP Digest authentication.
"""
def digest_auth_decorator(func):
def func_replacement(self, *args, **kwargs):
# 'self' here is the RequestHandler object, which is inheriting
# from DigestAuthMixin to get 'get_authenticated_user'
if self.get_authenticated_user(auth_func, realm):
return func(self, *args, **kwargs)
return func_replacement
return digest_auth_decorator
|
csrocha/OpenUpgrade | refs/heads/8.0 | addons/pos_discount/__init__.py | 315 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import discount
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
chipaca/snapcraft | refs/heads/master | tests/unit/test_mountinfo.py | 2 | # -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
#
# Copyright (C) 2018 Canonical Ltd
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import logging
from textwrap import dedent
import fixtures
from testtools.matchers import Equals, HasLength
from snapcraft.internal import errors, mountinfo
from tests import unit
class MountInfoTestCase(unit.TestCase):
def _write_mountinfo(self, contents):
path = "mountinfo"
with open(path, "w") as f:
f.write(contents)
return path
def test_mountinfo_by_root(self):
mounts = mountinfo.MountInfo(
mountinfo_file=self._write_mountinfo(
dedent(
"""\
23 28 0:4 / /proc rw,nosuid,nodev,noexec,relatime shared:14 - proc proc rw
1341 28 7:6 / /snap/snapcraft/1 ro,nodev,relatime shared:39 - squashfs /dev/loop6 ro
1455 28 253:0 /test-snap/prime /snap/test-snap/x1 ro,relatime shared:1 - ext4 /dev/mapper/foo rw,errors=remount-ro,data=ordered
"""
)
)
) # noqa
root_mounts = mounts.for_root("/")
for mount_point in ("/proc", "/snap/snapcraft/1"):
self.assertTrue(
any(m for m in root_mounts if m.mount_point == mount_point),
"Expected {!r} to be included in root mounts".format(mount_point),
)
test_snap_mounts = mounts.for_root("/test-snap/prime")
self.assertThat(test_snap_mounts, HasLength(1))
self.expectThat(test_snap_mounts[0].mount_point, Equals("/snap/test-snap/x1"))
def test_mountinfo_by_mount_point(self):
mounts = mountinfo.MountInfo(
mountinfo_file=self._write_mountinfo(
dedent(
"""\
23 28 0:4 / /proc rw,nosuid,nodev,noexec,relatime shared:14 - proc proc rw
1341 28 7:6 / /snap/snapcraft/1 ro,nodev,relatime shared:39 - squashfs /dev/loop6 ro
1455 28 253:0 /test-snap/prime /snap/test-snap/x1 ro,relatime shared:1 - ext4 /dev/mapper/foo rw,errors=remount-ro,data=ordered
"""
)
)
) # noqa
mount = mounts.for_mount_point("/proc")
self.assertThat(mount.mount_id, Equals("23"))
self.assertThat(mount.parent_id, Equals("28"))
self.assertThat(mount.st_dev, Equals("0:4"))
self.assertThat(mount.root, Equals("/"))
self.assertThat(mount.mount_point, Equals("/proc"))
self.assertThat(mount.mount_options, Equals("rw,nosuid,nodev,noexec,relatime"))
self.assertThat(mount.optional_fields, Equals(["shared:14"]))
self.assertThat(mount.filesystem_type, Equals("proc"))
self.assertThat(mount.mount_source, Equals("proc"))
self.assertThat(mount.super_options, Equals("rw"))
mount = mounts.for_mount_point("/snap/snapcraft/1")
self.assertThat(mount.mount_id, Equals("1341"))
self.assertThat(mount.parent_id, Equals("28"))
self.assertThat(mount.st_dev, Equals("7:6"))
self.assertThat(mount.root, Equals("/"))
self.assertThat(mount.mount_point, Equals("/snap/snapcraft/1"))
self.assertThat(mount.mount_options, Equals("ro,nodev,relatime"))
self.assertThat(mount.optional_fields, Equals(["shared:39"]))
self.assertThat(mount.filesystem_type, Equals("squashfs"))
self.assertThat(mount.mount_source, Equals("/dev/loop6"))
self.assertThat(mount.super_options, Equals("ro"))
mount = mounts.for_mount_point("/snap/test-snap/x1")
self.assertThat(mount.mount_id, Equals("1455"))
self.assertThat(mount.parent_id, Equals("28"))
self.assertThat(mount.st_dev, Equals("253:0"))
self.assertThat(mount.root, Equals("/test-snap/prime"))
self.assertThat(mount.mount_point, Equals("/snap/test-snap/x1"))
self.assertThat(mount.mount_options, Equals("ro,relatime"))
self.assertThat(mount.optional_fields, Equals(["shared:1"]))
self.assertThat(mount.filesystem_type, Equals("ext4"))
self.assertThat(mount.mount_source, Equals("/dev/mapper/foo"))
self.assertThat(
mount.super_options, Equals("rw,errors=remount-ro,data=ordered")
)
def test_mountinfo_missing_root(self):
mounts = mountinfo.MountInfo(mountinfo_file=self._write_mountinfo(""))
raised = self.assertRaises(
errors.RootNotMountedError, mounts.for_root, "test-root"
)
self.assertThat(raised.root, Equals("test-root"))
def test_mountinfo_missing_mount_point(self):
mounts = mountinfo.MountInfo(mountinfo_file=self._write_mountinfo(""))
raised = self.assertRaises(
errors.MountPointNotFoundError, mounts.for_mount_point, "test-root"
)
self.assertThat(raised.mount_point, Equals("test-root"))
def test_invalid_mountinfo(self):
self.fake_logger = fixtures.FakeLogger(level=logging.WARN)
self.useFixture(self.fake_logger)
mountinfo.MountInfo(mountinfo_file=self._write_mountinfo(dedent("I'm invalid")))
# Assert that a warning was logged
self.assertThat(
self.fake_logger.output,
Equals("Unable to parse mountinfo row: I'm invalid\n"),
)
|
FreekingDean/home-assistant | refs/heads/dev | homeassistant/helpers/service.py | 19 | """Service calling related helpers."""
import asyncio
import functools
import logging
# pylint: disable=unused-import
from typing import Optional # NOQA
import voluptuous as vol
from homeassistant.const import ATTR_ENTITY_ID
from homeassistant.core import HomeAssistant # NOQA
from homeassistant.exceptions import TemplateError
from homeassistant.loader import get_component
import homeassistant.helpers.config_validation as cv
from homeassistant.util.async import run_coroutine_threadsafe
HASS = None # type: Optional[HomeAssistant]
CONF_SERVICE = 'service'
CONF_SERVICE_TEMPLATE = 'service_template'
CONF_SERVICE_ENTITY_ID = 'entity_id'
CONF_SERVICE_DATA = 'data'
CONF_SERVICE_DATA_TEMPLATE = 'data_template'
_LOGGER = logging.getLogger(__name__)
def service(domain, service_name):
"""Decorator factory to register a service."""
def register_service_decorator(action):
"""Decorator to register a service."""
HASS.services.register(domain, service_name,
functools.partial(action, HASS))
return action
return register_service_decorator
def call_from_config(hass, config, blocking=False, variables=None,
validate_config=True):
"""Call a service based on a config hash."""
run_coroutine_threadsafe(
async_call_from_config(hass, config, blocking, variables,
validate_config), hass.loop).result()
@asyncio.coroutine
def async_call_from_config(hass, config, blocking=False, variables=None,
validate_config=True):
"""Call a service based on a config hash."""
if validate_config:
try:
config = cv.SERVICE_SCHEMA(config)
except vol.Invalid as ex:
_LOGGER.error("Invalid config for calling service: %s", ex)
return
if CONF_SERVICE in config:
domain_service = config[CONF_SERVICE]
else:
try:
config[CONF_SERVICE_TEMPLATE].hass = hass
domain_service = config[CONF_SERVICE_TEMPLATE].async_render(
variables)
domain_service = cv.service(domain_service)
except TemplateError as ex:
_LOGGER.error('Error rendering service name template: %s', ex)
return
except vol.Invalid as ex:
_LOGGER.error('Template rendered invalid service: %s',
domain_service)
return
domain, service_name = domain_service.split('.', 1)
service_data = dict(config.get(CONF_SERVICE_DATA, {}))
if CONF_SERVICE_DATA_TEMPLATE in config:
def _data_template_creator(value):
"""Recursive template creator helper function."""
if isinstance(value, list):
return [_data_template_creator(item) for item in value]
elif isinstance(value, dict):
return {key: _data_template_creator(item)
for key, item in value.items()}
value.hass = hass
return value.async_render(variables)
service_data.update(_data_template_creator(
config[CONF_SERVICE_DATA_TEMPLATE]))
if CONF_SERVICE_ENTITY_ID in config:
service_data[ATTR_ENTITY_ID] = config[CONF_SERVICE_ENTITY_ID]
yield from hass.services.async_call(
domain, service_name, service_data, blocking)
def extract_entity_ids(hass, service_call, expand_group=True):
"""Helper method to extract a list of entity ids from a service call.
Will convert group entity ids to the entity ids it represents.
Async friendly.
"""
if not (service_call.data and ATTR_ENTITY_ID in service_call.data):
return []
group = get_component('group')
# Entity ID attr can be a list or a string
service_ent_id = service_call.data[ATTR_ENTITY_ID]
if expand_group:
if isinstance(service_ent_id, str):
return group.expand_entity_ids(hass, [service_ent_id])
return [ent_id for ent_id in
group.expand_entity_ids(hass, service_ent_id)]
else:
if isinstance(service_ent_id, str):
return [service_ent_id]
return service_ent_id
|
ColumbiaCMB/kid_readout | refs/heads/master | apps/data_taking_scripts/2017-05-jpl-lf-n1-optical/single-horn/sweep_stream_mmw_compressor_off.py | 1 | import time
import numpy as np
from equipment.custom import mmwave_source
from equipment.hittite import signal_generator
from equipment.srs import lockin
from xystage import stepper
from kid_readout.interactive import *
from kid_readout.equipment import hardware
from kid_readout.measurement import mmw_source_sweep, core, acquire
logger.setLevel(logging.DEBUG)
# fg = FunctionGenerator()
#hittite = signal_generator.Hittite(ipaddr='192.168.0.200')
#hittite.set_power(0)
#hittite.on()
lockin = lockin.Lockin(LOCKIN_SERIAL_PORT)
tic = time.time()
# lockin.sensitivity = 17
print lockin.identification
print lockin.identification
# print time.time()-tic
# tic = time.time()
# print lockin.state(measurement_only=True)
# print time.time()-tic
source = mmwave_source.MMWaveSource()
source.set_attenuator_turns(6.0,6.0)
source.multiplier_input = 'thermal'
source.waveguide_twist_angle = 0
source.ttl_modulation_source = 'roach'
hwp_motor = stepper.SimpleStepper(port='/dev/ttyACM2')
setup = hardware.Hardware(hwp_motor, source, lockin)
ri = Roach2Baseband()
ri.set_modulation_output(7)
initial_f0s = np.load('/data/readout/resonances/2017-06-JPL-8x8-LF-N1_single_horn_4.npy')/1e6
#initial_f0s = all_f0s[:96][::6]
nf = len(initial_f0s)
atonce = 4
if nf % atonce > 0:
print "extending list of resonators to make a multiple of ", atonce
initial_f0s = np.concatenate((initial_f0s, np.arange(1, 1 + atonce - (nf % atonce)) + initial_f0s.max()))
print len(initial_f0s)
nsamp = 2**20 #going above 2**18 with 128 simultaneous tones doesn't quite work yet
offsets = np.arange(-16,16)*512./nsamp
last_f0s = initial_f0s
mmw_freqs = np.linspace(140e9, 165e9, 128)
ri.set_dac_atten(35)
tic = time.time()
f0s = initial_f0s
#setup.hittite.off()
#high is off for initital
ri.set_modulation_output('high')
ncf_source_off = new_nc_file(suffix= 'mmw_broadband_source_off')
swpa = acquire.run_sweep(ri,tone_banks=f0s[None,:]+offsets[:,None],num_tone_samples=nsamp,
length_seconds=0.2,
verbose=True, state=setup.state())
print "resonance sweep done", (time.time()-tic)/60.
print "sweep written", (time.time()-tic)/60.
current_f0s = []
for sidx in range(swpa.num_channels):
swp = swpa.sweep(sidx)
res = swp.resonator
print res.f_0, res.Q, res.delay*1e6, res.current_result.redchi, (f0s[sidx]*1e6-res.f_0)
if np.abs(f0s[sidx]*1e6-res.f_0) > 100e3:
current_f0s.append(f0s[sidx]*1e6)
logger.info("Resonator index %d moved more than 100 kHz, keeping original value %.1f MHz" % (sidx,
f0s[sidx]))
else:
current_f0s.append(res.f_0)
print "fits complete", (time.time()-tic)/60.
current_f0s = np.array(current_f0s)/1e6
current_f0s.sort()
bad_deltas = np.diff(current_f0s) < (256./2**14)*8
if bad_deltas.sum():
print "found bad deltas", bad_deltas.sum()
current_f0s[np.nonzero(bad_deltas)] -= 0.1
bad_deltas = np.diff(current_f0s) < (256./2**14)*8
if bad_deltas.sum():
print "found bad deltas", bad_deltas.sum()
current_f0s[np.nonzero(bad_deltas)] -= 0.1
ri.set_tone_freqs(current_f0s,nsamp=nsamp)
ri.select_fft_bins(range(len(current_f0s)))
print ri.fpga_fft_readout_indexes
print np.diff(ri.fpga_fft_readout_indexes.astype('float')).min()
raw_input("turn off compressor")
meas = ri.get_measurement(num_seconds=30)
raw_input("turn on compressor")
meas.state = setup.state(fast=True)
sweep_stream_array = basic.SweepStreamArray(sweep_array = swpa, stream_array = meas, state = meas.state, description= 'source off')
ncf_source_off.write(sweep_stream_array)
ncf_source_off.close()
turnlist = np.arange(9,-0.1,-0.5)
for turn_num in turnlist:
ri.set_modulation_output(7)
raw_input('set attenuator knobs to %f turns & check lock-in range' %turn_num)
source.set_attenuator_turns(turn_num, turn_num)
#turn on source
ri.set_modulation_output('low')
ncf = new_nc_file(suffix='mmw_broadband_source_on_%.2f_turns' %turn_num)
swpa = acquire.run_sweep(ri, tone_banks=f0s[None, :] + offsets[:, None], num_tone_samples=nsamp,
length_seconds=0.2,
verbose=True, state=setup.state())
print "resonance sweep done", (time.time() - tic) / 60.
print "sweep written", (time.time() - tic) / 60.
current_f0s = []
for sidx in range(swpa.num_channels):
swp = swpa.sweep(sidx)
res = swp.resonator
print res.f_0, res.Q, res.delay * 1e6, res.current_result.redchi, (f0s[sidx] * 1e6 - res.f_0)
if np.abs(f0s[sidx] * 1e6 - res.f_0) > 100e3:
current_f0s.append(f0s[sidx] * 1e6)
logger.info("Resonator index %d moved more than 100 kHz, keeping original value %.1f MHz" % (sidx,
f0s[sidx]))
else:
current_f0s.append(res.f_0)
print "fits complete", (time.time() - tic) / 60.
current_f0s = np.array(current_f0s) / 1e6
current_f0s.sort()
bad_deltas = np.diff(current_f0s) < (256. / 2 ** 14) * 8
if bad_deltas.sum():
print "found bad deltas", bad_deltas.sum()
current_f0s[np.nonzero(bad_deltas)] -= 0.1
bad_deltas = np.diff(current_f0s) < (256. / 2 ** 14) * 8
if bad_deltas.sum():
print "found bad deltas", bad_deltas.sum()
current_f0s[np.nonzero(bad_deltas)] -= 0.1
ri.set_tone_freqs(current_f0s, nsamp=nsamp)
ri.select_fft_bins(range(len(current_f0s)))
print ri.fpga_fft_readout_indexes
print np.diff(ri.fpga_fft_readout_indexes.astype('float')).min()
raw_input("turn off compressor")
meas = ri.get_measurement(num_seconds=30)
raw_input("turn on compressor")
#turn on modulation to get zbd voltage
ri.set_modulation_output(7)
time.sleep(2)
sweep_stream_array = basic.SweepStreamArray(sweep_array=swpa, stream_array=meas, state=setup.state(fast=True),
description='source on')
ncf.write(sweep_stream_array)
meas = ri.get_measurement(num_seconds=30)
meas.state = setup.state(fast=True)
sweep_stream_array = basic.SweepStreamArray(sweep_array=swpa, stream_array=meas, state=meas.state,
description='chopped')
ncf.write(sweep_stream_array)
ncf.close() |
nagadomi/keras | refs/heads/master | activations.py | 2 | import theano
import theano.tensor as T
import types
def softmax(x):
return T.nnet.softmax(x)
def softplus(x):
return T.nnet.softplus(x)
def relu(x):
return (x + abs(x)) / 2.0
def tanh(x):
return T.tanh(x)
def sigmoid(x):
return T.nnet.sigmoid(x)
def hard_sigmoid(x):
return T.nnet.hard_sigmoid(x)
def linear(x):
return x
from utils.generic_utils import get_from_module
def get(identifier):
return get_from_module(identifier, globals(), 'activation function') |
andydandy74/ClockworkForDynamo | refs/heads/master | nodes/0.7.x/python/Wall.CreationMethodIsStandard.py | 4 | import clr
clr.AddReference('RevitAPI')
from Autodesk.Revit.DB import *
clr.AddReference("RevitNodes")
import Revit
clr.ImportExtensions(Revit.Elements)
wallinstances = UnwrapElement(IN[0])
booleans = list()
for item in wallinstances:
try:
if item.GetType().Name == 'Wall':
booleans.append(True)
else:
booleans.append(False)
except:
booleans.append(False)
OUT = booleans |
wglas85/math | refs/heads/master | src/mathx/solver.py | 1 | '''
Created on 24.09.2015
@author: michi
'''
from mathx import formula
from mathx import ast
from copy import deepcopy
import os
import logging
log = logging.getLogger('mathx.gleichungslöser')
#handler = logging.StreamHandler(open('/dev/stderr', 'w'))
#formatter = logging.Formatter( '%(asctime)s %(levelname)s %(message)s')
#handler.setFormatter(formatter)
#root_logger = logging.getLogger()
#root_logger.addHandler(handler)
#root_logger.setLevel(logging.DEBUG)
class Solver:
def __init__(self,g):
self.gleichung = g
filename = os.path.dirname(__file__)
filename = os.path.join(filename,"gleichungslöser_history.txt")
fp = open(filename,"a")
fp.write(self.gleichung+"\n")
fp.close()
l = [formula.Parser(i).parseAst() for i in g.split("=")]
self.lhs = l[0]
self.rhs = l[-1]
def evaluate(self)->dict:
rhs = ast.AstBinaryOperator(self.lhs,"-",self.rhs)
varsv = rhs.findVars()
for i in varsv.keys():
path = rhs.searchPath(ast.AstVariable(i))
work = deepcopy(rhs)
lhs = ast.AstConstant(0.0)
while path != []:
try:
work,path,lhs = work.reducePath(path,lhs)
except ValueError:
if not(type(work)==ast.AstVariable) or lhs.count(ast.AstVariable)!=0:
raise
#elif type(work)==ast.AstVariable and lhs.count(ast.AstVariable)!=0:
else:
log.debug("1. %s = %s"%(work,lhs))
lhs = lhs.simplify()
log.debug("2. %s = %s"%(work,lhs))
break
log.debug("1. %s = %s"%(work,lhs))
lhs = lhs.simplify()
log.debug("2. %s = %s"%(work,lhs))
log.debug("%s = %s"%(i,lhs))
varsv[i] = lhs
return varsv
class AstSolver(Solver):
def __init__(self,lhs,rhs):
filename = os.path.dirname(__file__)
filename = os.path.join(filename,"gleichungslöser_history.txt")
fp = open(filename,"a")
fp.write("%s=%s\n"%(lhs,rhs))
fp.close()
self.lhs = lhs
self.rhs = rhs
if __name__ == '__main__':
g = input("geben Sie die Gleichung ein: ")
'''
assert len(g.split("="))==2
filename = os.path.dirname(__file__)
filename = os.path.join(filename,"gleichungslöser_history.txt")
fp = open(filename,"a")
fp.write(g+"\n")
fp.close()
l = [formula.Parser(i).parseAst() for i in g.split("=")]
print("=".join([str(i) for i in l]))
rhs = ast.AstBinaryOperator(l[0],"-",l[1])
varsv = {}
rhs.findVars(varsv)
for i in varsv.keys():
path = rhs.searchPath(ast.AstVariable(i))
work = deepcopy(rhs)
lhs = ast.AstConstant(0.0)
while path != []:
work,path,lhs= work.reducePath(path,lhs)
print("1. %s = %s"%(work,lhs))
lhs = lhs.simplify()
print("2. %s = %s"%(work,lhs))
print("%s = %s"%(i,lhs))
'''
handler = logging.StreamHandler(open('/dev/stderr', 'w'))
formatter = logging.Formatter( '%(asctime)s %(levelname)s %(message)s')
handler.setFormatter(formatter)
root_logger = logging.getLogger()
root_logger.addHandler(handler)
root_logger.setLevel(logging.DEBUG)
evg = Solver(g).evaluate()
print(evg)
print("\n".join(["%s = %s"%(key,value) for key,value in evg.items()]))
|
GenericStudent/home-assistant | refs/heads/dev | tests/components/notion/test_config_flow.py | 5 | """Define tests for the Notion config flow."""
import aionotion
import pytest
from homeassistant import data_entry_flow
from homeassistant.components.notion import DOMAIN, config_flow
from homeassistant.config_entries import SOURCE_USER
from homeassistant.const import CONF_PASSWORD, CONF_USERNAME
from tests.async_mock import AsyncMock, patch
from tests.common import MockConfigEntry
@pytest.fixture
def mock_client():
"""Define a fixture for a client creation coroutine."""
return AsyncMock(return_value=None)
@pytest.fixture
def mock_aionotion(mock_client):
"""Mock the aionotion library."""
with patch("homeassistant.components.notion.config_flow.async_get_client") as mock_:
mock_.side_effect = mock_client
yield mock_
async def test_duplicate_error(hass):
"""Test that errors are shown when duplicates are added."""
conf = {CONF_USERNAME: "user@host.com", CONF_PASSWORD: "password123"}
MockConfigEntry(domain=DOMAIN, unique_id="user@host.com", data=conf).add_to_hass(
hass
)
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": SOURCE_USER}, data=conf
)
assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT
assert result["reason"] == "already_configured"
@pytest.mark.parametrize(
"mock_client", [AsyncMock(side_effect=aionotion.errors.NotionError)]
)
async def test_invalid_credentials(hass, mock_aionotion):
"""Test that an invalid API/App Key throws an error."""
conf = {CONF_USERNAME: "user@host.com", CONF_PASSWORD: "password123"}
flow = config_flow.NotionFlowHandler()
flow.hass = hass
flow.context = {"source": SOURCE_USER}
result = await flow.async_step_user(user_input=conf)
assert result["errors"] == {"base": "invalid_auth"}
async def test_show_form(hass):
"""Test that the form is served with no input."""
flow = config_flow.NotionFlowHandler()
flow.hass = hass
flow.context = {"source": SOURCE_USER}
result = await flow.async_step_user(user_input=None)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "user"
async def test_step_import(hass, mock_aionotion):
"""Test that the import step works."""
conf = {CONF_USERNAME: "user@host.com", CONF_PASSWORD: "password123"}
flow = config_flow.NotionFlowHandler()
flow.hass = hass
flow.context = {"source": SOURCE_USER}
result = await flow.async_step_import(import_config=conf)
assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
assert result["title"] == "user@host.com"
assert result["data"] == {
CONF_USERNAME: "user@host.com",
CONF_PASSWORD: "password123",
}
async def test_step_user(hass, mock_aionotion):
"""Test that the user step works."""
conf = {CONF_USERNAME: "user@host.com", CONF_PASSWORD: "password123"}
flow = config_flow.NotionFlowHandler()
flow.hass = hass
flow.context = {"source": SOURCE_USER}
result = await flow.async_step_user(user_input=conf)
assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
assert result["title"] == "user@host.com"
assert result["data"] == {
CONF_USERNAME: "user@host.com",
CONF_PASSWORD: "password123",
}
|
Letractively/userinfuser | refs/heads/master | serverside/main.py | 12 | # Copyright (C) 2011, CloudCaptive
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
import wsgiref.handlers
import cgi
from google.appengine.ext.webapp import template
from serverside.console import Console
from serverside.entities.users import Users
from serverside.account import Accounts
from serverside.signup import NewsLetterSignUp
from serverside.signup import SignUp
from serverside.signin import SignIn
from serverside.logout import LogOut
from serverside.badge import UploadBadge
from serverside.badge import DownloadBadge
from serverside.badge import SeeTheme
from serverside.analytics import GetAnalytics
from serverside.analytics import RunAnalytics
from serverside import constants
from google.appengine.ext import webapp
from google.appengine.ext.webapp.util import run_wsgi_app
from google.appengine.ext import db
from google.appengine.api import users
import logging
import os
from serverside.analytics import *
class IndexPage(webapp.RequestHandler):
def get(self):
self.redirect('/html/signup.html')
class HelloWorld(webapp.RequestHandler):
def get(self):
self.response.out.write("hi!")
application = webapp.WSGIApplication([
('/', IndexPage),
('/account', Accounts),
('/login', SignIn),
('/signup', SignUp),
('/logout', LogOut),
('/badge/u', UploadBadge),
('/badge/d', DownloadBadge),
('/badge/t', SeeTheme),
('/newslettersignup',NewsLetterSignUp),
('/hello', HelloWorld),
('/runanalytics', RunAnalytics),
('/getanalytics', GetAnalytics),
], debug=constants.DEBUG)
def main():
run_wsgi_app(application)
if __name__ == '__main__':
main()
|
ohinckel/smarthome | refs/heads/master | lib/config.py | 1 | #!/usr/bin/env python3
# vim: set encoding=utf-8 tabstop=4 softtabstop=4 shiftwidth=4 expandtab
#########################################################################
# Copyright 2013 Marcus Popp marcus@popp.mx
# Copyright 2016 The SmartHomeNG team
#########################################################################
# This file is part of SmartHomeNG.
#
# SmartHomeNG is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# SmartHomeNG is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with SmartHomeNG. If not, see <http://www.gnu.org/licenses/>.
#########################################################################
import logging
import collections
logger = logging.getLogger(__name__)
def strip_quotes(string):
string = string.strip()
if len(string) > 0:
if string[0] in ['"', "'"]: # check if string starts with ' or "
if string[0] == string[-1]: # and end with it
if string.count(string[0]) == 2: # if they are the only one
string = string[1:-1] # remove them
return string
def parse(filename, config=None):
"""
this functions parses a file with a given filename for config entries
:param filename: the filename of the file that provides the configuration entries
:param config: already existing config information, should be an ordered dict
The config file should stick to the following setup:
[firstlevel]
attribute1 = xyz
attribute2 = foo
attribute3 = bar
[[secondlevel]]
attribute1 = abc
attribute2 = bar
attribute3 = foo
[[[thirdlevel]]]
attribute1 = def
attribute2 = barfoo
attribute3 = foobar
[[anothersecondlevel]]
attribute1 = andsoon
where firstlevel, secondlevel, thirdlevel and anothersecondlevel are defined as items and attribute are their respective attribute - value pairs
Valid characters for the items are a-z and A-Z plus any digit and underscore as second or further characters.
Valid characters for the attributes are the same as for an item plus @ and *
"""
valid_item_chars = 'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789_'
valid_attr_chars = 'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789_@*'
digits = '0123456789'
valid_set = set(valid_attr_chars)
if config is None:
config = collections.OrderedDict()
item = config
with open(filename, 'r', encoding='UTF-8') as f:
linenu = 0
parent = collections.OrderedDict()
for raw in f.readlines():
linenu += 1
line = raw.lstrip('\ufeff') # remove BOM
line = line.partition('#')[0].strip()
if line is '':
continue
if line[0] == '[': # item
brackets = 0
level = 0
closing = False
for index in range(len(line)):
if line[index] == '[' and not closing:
brackets += 1
level += 1
elif line[index] == ']':
closing = True
brackets -= 1
else:
closing = True
if line[index] not in valid_item_chars + "'":
logger.error("Problem parsing '{}' invalid character in line {}: {}. Valid characters are: {}".format(filename, linenu, line, valid_item_chars))
return config
if brackets != 0:
logger.error("Problem parsing '{}' unbalanced brackets in line {}: {}".format(filename, linenu, line))
return config
name = line.strip("[]")
name = strip_quotes(name)
if len(name) > 0:
if name[0] in digits:
logger.error("Problem parsing '{}': item starts with digit '{}' in line {}: {}".format(filename, name[0], linenu, line))
return config
else:
logger.error("Problem parsing '{}' tried to use an empty item name in line {}: {}".format(filename, linenu, line))
return config
if level == 1:
if name not in config:
config[name] = collections.OrderedDict()
item = config[name]
parents = collections.OrderedDict()
parents[level] = item
else:
if level - 1 not in parents:
logger.error("Problem parsing '{}' no parent item defined for item in line {}: {}".format(filename, linenu, line))
return config
parent = parents[level - 1]
if name not in parent:
parent[name] = collections.OrderedDict()
item = parent[name]
parents[level] = item
else: # attribute
attr, __, value = line.partition('=')
if not value:
continue
attr = attr.strip()
if not set(attr).issubset(valid_set):
logger.error("Problem parsing '{}' invalid character in line {}: {}. Valid characters are: {}".format(filename, linenu, attr, valid_attr_chars))
continue
if len(attr) > 0:
if attr[0] in digits:
logger.error("Problem parsing '{}' attrib starts with a digit '{}' in line {}: {}.".format(filename, attr[0], linenu, attr ))
if '|' in value:
item[attr] = [strip_quotes(x) for x in value.split('|')]
else:
item[attr] = strip_quotes(value)
return config
if __name__ == '__main__':
logging.basicConfig(level=logging.DEBUG)
conf = parse('dev.conf')
print(conf)
|
mdaniel/intellij-community | refs/heads/master | python/testData/refactoring/inlineFunction/usesArgumentUnpacking.py | 12 | def foo(a, b, c, d):
print(a, b, c, d)
arg = (1, 2, 3, 4)
fo<caret>o(*arg)
|
godLoveLucifer/viewfinder | refs/heads/master | backend/db/notification.py | 13 | # Copyright 2012 Viewfinder Inc. All Rights Reserved.
"""Viewfinder notification.
Notifications are the mechanism by which the client is notified of incremental modifications
to server assets. Notifications enable the client to incrementally keep its state in sync
with the server. Furthermore, pushed alerts notify end users that their client does not
have the latest data.
*Every* change that is visible to a particular user results in the creation of a notification.
Certain kinds of operations, such as share, also result in the push of an alert to all the
user's devices. Operations which modify viewpoint assets in ways that are visible to other
followers also result in the creation of an activity.
Although at first glance notifications and activities are similar, they are different in
important ways:
1. Notifications are created *per-user*, and apply to any change that may have occurred in
assets viewable by that user. In contrast, activities are associated with viewpoints,
not users, and are only created for changes that are visible to all followers. As an
example, a user might override the title of a viewpoint. A notification is created, but
an activity is not, since that change is user-specific and not visible to other followers.
2. Notifications contain coarse-granularity invalidation lists, which instruct the client as
to which assets need to be re-queried. In contrast, activities contain the exhaustive list
of operation-specific asset identifiers which were changed. As an example, a notification
generated by a share might contain just a single episode invalidation, whereas the
corresponding activity would contain the identifier of that episode *plus* the identifiers
of all photos shared as part of that episode.
3. The notification table will be truncated periodically, whereas the activity table lives
indefinitely. The notification table exists in order to allow the client to incrementally
update its cache of server state. The activity table exists in order to keep a record of
structural changes to the viewpoint.
Notification: Notify client of changes to asset tree.
"""
__authors__ = ['spencer@emailscrubbed.com (Spencer Kimball)',
'andy@emailscrubbed.com (Andy Kimball)']
import json
import logging
from tornado import gen
from viewfinder.backend.base import util
from viewfinder.backend.db import vf_schema
from viewfinder.backend.db.base import DBObject
from viewfinder.backend.db.range_base import DBRangeObject
@DBObject.map_table_attributes
class Notification(DBRangeObject):
"""Viewfinder notification data object."""
__slots__ = []
_table = DBObject._schema.GetTable(vf_schema.NOTIFICATION)
def __init__(self, user_id=None, notification_id=None):
super(Notification, self).__init__()
self.user_id = user_id
self.notification_id = notification_id
def GetInvalidate(self):
"""Parses and returns the JSON invalidate attribute as a python dict."""
return json.loads(self.invalidate) if self.invalidate is not None else None
def SetInvalidate(self, invalidate_dict):
"""Sets invalidation python dict as JSON invalidate attribute."""
self.invalidate = json.dumps(invalidate_dict)
@classmethod
@gen.coroutine
def TryClearBadge(cls, client, user_id, device_id, notification_id):
"""Tries to create a "clear_badges" notification with the given id. Returns False if another
notification with this id has already been created, else returns True.
"""
notification = Notification(user_id, notification_id)
notification.name = 'clear_badges'
notification.timestamp = util.GetCurrentTimestamp()
notification.sender_id = user_id
notification.sender_device_id = device_id
notification.badge = 0
# If _TryUpdate returns false, then new notifications showed up while the query was running, and so
# retry creation of the notification.
success = yield notification._TryUpdate(client)
raise gen.Return(success)
@classmethod
@gen.coroutine
def QueryLast(cls, client, user_id, consistent_read=False):
"""Returns the notification with the highest notification_id, or None if the notification
table is empty.
"""
notification_list = yield gen.Task(Notification.RangeQuery,
client,
user_id,
range_desc=None,
limit=1,
col_names=None,
scan_forward=False,
consistent_read=consistent_read)
raise gen.Return(notification_list[0] if len(notification_list) > 0 else None)
@classmethod
@gen.coroutine
def CreateForUser(cls, client, operation, user_id, name, invalidate=None,
activity_id=None, viewpoint_id=None, seq_num_pair=None,
inc_badge=False, consistent_read=False):
"""Creates a notification database record for the specified user, based upon the
notification record that was last created and the current operation. If "inc_badge" is
true, then increment the user's pending notification badge count. Returns the newly
created notification.
"""
while True:
last_notification = yield Notification.QueryLast(client, user_id, consistent_read=consistent_read)
if last_notification is None:
notification_id = 1
badge = 0
else:
notification_id = last_notification.notification_id + 1
badge = last_notification.badge
notification = Notification(user_id, notification_id)
notification.name = name
if invalidate is not None:
notification.SetInvalidate(invalidate)
notification.activity_id = activity_id
notification.viewpoint_id = viewpoint_id
# Store update_seq and/or viewed_seq on notification if they were specified.
if seq_num_pair is not None:
update_seq, viewed_seq = seq_num_pair
notification.update_seq = update_seq
# viewed_seq applies only to the user that submitted the operation.
if viewed_seq is not None and operation.user_id == user_id:
notification.viewed_seq = viewed_seq
# Increment badge if requested to do so.
if inc_badge:
badge += 1
notification.badge = badge
notification.timestamp = operation.timestamp
notification.sender_id = operation.user_id
notification.sender_device_id = operation.device_id
notification.op_id = operation.operation_id
success = yield notification._TryUpdate(client)
# If creation of the notification succeeded, then query is complete. Otherwise, retry from
# start since another notification allocated the same id.
if success:
raise gen.Return(notification)
# If update failed, may have been because we couldn't read the "real" last notification.
consistent_read = True
@gen.coroutine
def _TryUpdate(self, client):
"""Creates a new notification database record using the next available notification_id.
Avoids race conditions by using the "expected" argument to Update in order to ensure that
a unique notification_id is used. If another notification allocates a particular
notification_id first, this method will return False. The caller can then retry with a new
notification_id.
"""
try:
yield gen.Task(self.Update, client, expected={'notification_id': False})
except Exception as e:
# Notification creation failed, so return False so caller can retry.
logging.info('notification id %d is already in use: %s' % (self.notification_id, e))
raise gen.Return(False)
raise gen.Return(True)
|
jmgilbert2/energi | refs/heads/energi_v0_egihash_integration | qa/rpc-tests/bipdersig-p2p.py | 40 | #!/usr/bin/env python2
#
# Distributed under the MIT/X11 software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
#
from test_framework.test_framework import ComparisonTestFramework
from test_framework.util import *
from test_framework.mininode import CTransaction, NetworkThread
from test_framework.blocktools import create_coinbase, create_block
from test_framework.comptool import TestInstance, TestManager
from test_framework.script import CScript
from io import BytesIO
import time
# A canonical signature consists of:
# <30> <total len> <02> <len R> <R> <02> <len S> <S> <hashtype>
def unDERify(tx):
'''
Make the signature in vin 0 of a tx non-DER-compliant,
by adding padding after the S-value.
'''
scriptSig = CScript(tx.vin[0].scriptSig)
newscript = []
for i in scriptSig:
if (len(newscript) == 0):
newscript.append(i[0:-1] + b'\0' + i[-1:])
else:
newscript.append(i)
tx.vin[0].scriptSig = CScript(newscript)
'''
This test is meant to exercise BIP66 (DER SIG).
Connect to a single node.
Mine 2 (version 2) blocks (save the coinbases for later).
Generate 98 more version 2 blocks, verify the node accepts.
Mine 749 version 3 blocks, verify the node accepts.
Check that the new DERSIG rules are not enforced on the 750th version 3 block.
Check that the new DERSIG rules are enforced on the 751st version 3 block.
Mine 199 new version blocks.
Mine 1 old-version block.
Mine 1 new version block.
Mine 1 old version block, see that the node rejects.
'''
class BIP66Test(ComparisonTestFramework):
def __init__(self):
self.num_nodes = 1
def setup_network(self):
# Must set the blockversion for this test
self.nodes = start_nodes(1, self.options.tmpdir,
extra_args=[['-debug', '-whitelist=127.0.0.1', '-blockversion=2']],
binary=[self.options.testbinary])
def run_test(self):
test = TestManager(self, self.options.tmpdir)
test.add_all_connections(self.nodes)
NetworkThread().start() # Start up network handling in another thread
test.run()
def create_transaction(self, node, coinbase, to_address, amount):
from_txid = node.getblock(coinbase)['tx'][0]
inputs = [{ "txid" : from_txid, "vout" : 0}]
outputs = { to_address : amount }
rawtx = node.createrawtransaction(inputs, outputs)
signresult = node.signrawtransaction(rawtx)
tx = CTransaction()
f = BytesIO(hex_str_to_bytes(signresult['hex']))
tx.deserialize(f)
return tx
def get_tests(self):
self.coinbase_blocks = self.nodes[0].generate(2)
height = 3 # height of the next block to build
self.tip = int ("0x" + self.nodes[0].getbestblockhash() + "L", 0)
self.nodeaddress = self.nodes[0].getnewaddress()
self.last_block_time = int(time.time())
''' 98 more version 2 blocks '''
test_blocks = []
for i in xrange(98):
block = create_block(self.tip, create_coinbase(height), self.last_block_time + 1)
block.nVersion = 2
block.rehash()
block.solve()
test_blocks.append([block, True])
self.last_block_time += 1
self.tip = block.sha256
height += 1
yield TestInstance(test_blocks, sync_every_block=False)
''' Mine 749 version 3 blocks '''
test_blocks = []
for i in xrange(749):
block = create_block(self.tip, create_coinbase(height), self.last_block_time + 1)
block.nVersion = 3
block.rehash()
block.solve()
test_blocks.append([block, True])
self.last_block_time += 1
self.tip = block.sha256
height += 1
yield TestInstance(test_blocks, sync_every_block=False)
'''
Check that the new DERSIG rules are not enforced in the 750th
version 3 block.
'''
spendtx = self.create_transaction(self.nodes[0],
self.coinbase_blocks[0], self.nodeaddress, 1.0)
unDERify(spendtx)
spendtx.rehash()
block = create_block(self.tip, create_coinbase(height), self.last_block_time + 1)
block.nVersion = 3
block.vtx.append(spendtx)
block.hashMerkleRoot = block.calc_merkle_root()
block.rehash()
block.solve()
self.last_block_time += 1
self.tip = block.sha256
height += 1
yield TestInstance([[block, True]])
'''
Check that the new DERSIG rules are enforced in the 751st version 3
block.
'''
spendtx = self.create_transaction(self.nodes[0],
self.coinbase_blocks[1], self.nodeaddress, 1.0)
unDERify(spendtx)
spendtx.rehash()
block = create_block(self.tip, create_coinbase(height), self.last_block_time + 1)
block.nVersion = 3
block.vtx.append(spendtx)
block.hashMerkleRoot = block.calc_merkle_root()
block.rehash()
block.solve()
self.last_block_time += 1
yield TestInstance([[block, False]])
''' Mine 199 new version blocks on last valid tip '''
test_blocks = []
for i in xrange(199):
block = create_block(self.tip, create_coinbase(height), self.last_block_time + 1)
block.nVersion = 3
block.rehash()
block.solve()
test_blocks.append([block, True])
self.last_block_time += 1
self.tip = block.sha256
height += 1
yield TestInstance(test_blocks, sync_every_block=False)
''' Mine 1 old version block '''
block = create_block(self.tip, create_coinbase(height), self.last_block_time + 1)
block.nVersion = 2
block.rehash()
block.solve()
self.last_block_time += 1
self.tip = block.sha256
height += 1
yield TestInstance([[block, True]])
''' Mine 1 new version block '''
block = create_block(self.tip, create_coinbase(height), self.last_block_time + 1)
block.nVersion = 3
block.rehash()
block.solve()
self.last_block_time += 1
self.tip = block.sha256
height += 1
yield TestInstance([[block, True]])
''' Mine 1 old version block, should be invalid '''
block = create_block(self.tip, create_coinbase(height), self.last_block_time + 1)
block.nVersion = 2
block.rehash()
block.solve()
self.last_block_time += 1
yield TestInstance([[block, False]])
if __name__ == '__main__':
BIP66Test().main()
|
gauribhoite/personfinder | refs/heads/master | env/google_appengine/lib/django-1.2/django/contrib/flatpages/tests/views.py | 47 | import os
from django.conf import settings
from django.test import TestCase
class FlatpageViewTests(TestCase):
fixtures = ['sample_flatpages']
urls = 'django.contrib.flatpages.tests.urls'
def setUp(self):
self.old_MIDDLEWARE_CLASSES = settings.MIDDLEWARE_CLASSES
flatpage_middleware_class = 'django.contrib.flatpages.middleware.FlatpageFallbackMiddleware'
if flatpage_middleware_class in settings.MIDDLEWARE_CLASSES:
settings.MIDDLEWARE_CLASSES = tuple(m for m in settings.MIDDLEWARE_CLASSES if m != flatpage_middleware_class)
self.old_TEMPLATE_DIRS = settings.TEMPLATE_DIRS
settings.TEMPLATE_DIRS = (
os.path.join(
os.path.dirname(__file__),
'templates'
),
)
self.old_LOGIN_URL = settings.LOGIN_URL
settings.LOGIN_URL = '/accounts/login/'
def tearDown(self):
settings.MIDDLEWARE_CLASSES = self.old_MIDDLEWARE_CLASSES
settings.TEMPLATE_DIRS = self.old_TEMPLATE_DIRS
settings.LOGIN_URL = self.old_LOGIN_URL
def test_view_flatpage(self):
"A flatpage can be served through a view"
response = self.client.get('/flatpage_root/flatpage/')
self.assertEquals(response.status_code, 200)
self.assertContains(response, "<p>Isn't it flat!</p>")
def test_view_non_existent_flatpage(self):
"A non-existent flatpage raises 404 when served through a view"
response = self.client.get('/flatpage_root/no_such_flatpage/')
self.assertEquals(response.status_code, 404)
def test_view_authenticated_flatpage(self):
"A flatpage served through a view can require authentication"
response = self.client.get('/flatpage_root/sekrit/')
self.assertRedirects(response, '/accounts/login/?next=/flatpage_root/sekrit/')
def test_fallback_flatpage(self):
"A fallback flatpage won't be served if the middleware is disabled"
response = self.client.get('/flatpage/')
self.assertEquals(response.status_code, 404)
def test_fallback_non_existent_flatpage(self):
"A non-existent flatpage won't be served if the fallback middlware is disabled"
response = self.client.get('/no_such_flatpage/')
self.assertEquals(response.status_code, 404)
|
dkarakats/edx-platform | refs/heads/master | cms/djangoapps/contentstore/management/commands/populate_creators.py | 182 | """
Script for granting existing course instructors course creator privileges.
This script is only intended to be run once on a given environment.
"""
from course_creators.views import add_user_with_status_granted, add_user_with_status_unrequested
from django.core.management.base import BaseCommand
from django.contrib.auth.models import User
from django.db.utils import IntegrityError
from student.roles import CourseInstructorRole, CourseStaffRole
#------------ to run: ./manage.py cms populate_creators --settings=dev
class Command(BaseCommand):
"""
Script for granting existing course instructors course creator privileges.
"""
help = 'Grants all users with INSTRUCTOR role permission to create courses'
def handle(self, *args, **options):
"""
The logic of the command.
"""
username = 'populate_creators_command'
email = 'grant+creator+access@edx.org'
try:
admin = User.objects.create_user(username, email, 'foo')
admin.is_staff = True
admin.save()
except IntegrityError:
# If the script did not complete the last time it was run,
# the admin user will already exist.
admin = User.objects.get(username=username, email=email)
for user in get_users_with_role(CourseInstructorRole.ROLE):
add_user_with_status_granted(admin, user)
# Some users will be both staff and instructors. Those folks have been
# added with status granted above, and add_user_with_status_unrequested
# will not try to add them again if they already exist in the course creator database.
for user in get_users_with_role(CourseStaffRole.ROLE):
add_user_with_status_unrequested(user)
# There could be users who are not in either staff or instructor (they've
# never actually done anything in Studio). I plan to add those as unrequested
# when they first go to their dashboard.
admin.delete()
#=============================================================================================================
# Because these are expensive and far-reaching, I moved them here
def get_users_with_role(role_prefix):
"""
An expensive operation which finds all users in the db with the given role prefix
"""
return User.objects.filter(groups__name__startswith=role_prefix)
|
Azulinho/ansible | refs/heads/devel | lib/ansible/modules/source_control/github_issue.py | 44 | #!/usr/bin/python
# (c) 2017, Abhijeet Kasurde <akasurde@redhat.com>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
module: github_issue
short_description: View GitHub issue.
description:
- View GitHub issue for a given repository.
version_added: "2.4"
options:
repo:
description:
- Name of repository from which issue needs to be retrieved.
required: true
default: none
organization:
description:
- Name of the GitHub organization in which the repository is hosted.
required: true
default: none
issue:
description:
- Issue number for which information is required.
default: none
required: true
action:
description:
- Get various details about issue depending upon action specified.
default: 'get_status'
required: false
choices:
- ['get_status']
author:
- Abhijeet Kasurde (@akasurde)
requirements:
- "github3.py >= 1.0.0a4"
'''
RETURN = '''
get_status:
description: State of the GitHub issue
type: string
returned: success
sample: open, closed
'''
EXAMPLES = '''
- name: Check if GitHub issue is closed or not
github_issue:
organization: ansible
repo: ansible
issue: 23642
action: get_status
register: r
- name: Take action depending upon issue status
debug:
msg: Do something when issue 23642 is open
when: r.issue_status == 'open'
'''
try:
import github3
HAS_GITHUB_PACKAGE = True
except ImportError:
HAS_GITHUB_PACKAGE = False
from ansible.module_utils.basic import AnsibleModule
def main():
module = AnsibleModule(
argument_spec=dict(
organization=dict(required=True),
repo=dict(required=True),
issue=dict(required=True),
action=dict(required=False, choices=['get_status']),
),
supports_check_mode=True,
)
if not HAS_GITHUB_PACKAGE:
module.fail_json(msg="Missing required github3 module. (check docs or "
"install with: pip install github3.py==1.0.0a4)")
organization = module.params['organization']
repo = module.params['repo']
issue = module.params['issue']
action = module.params['action']
result = dict()
gh_obj = github3.issue(organization, repo, issue)
if gh_obj is None:
module.fail_json(msg="Failed to get details about issue specified. "
"Please check organization, repo and issue "
"details and try again.")
if action == 'get_status' or action is None:
if module.check_mode:
result.update(changed=True)
else:
result.update(changed=True, issue_status=gh_obj.state)
module.exit_json(**result)
if __name__ == '__main__':
main()
|
yjmade/odoo | refs/heads/8.0 | addons/sale/sales_team.py | 61 | # -*- coding: utf-8 -*-
import calendar
from datetime import date
from dateutil import relativedelta
import json
from openerp import tools
from openerp.osv import fields, osv
class crm_case_section(osv.osv):
_inherit = 'crm.case.section'
def _get_sale_orders_data(self, cr, uid, ids, field_name, arg, context=None):
obj = self.pool['sale.order']
month_begin = date.today().replace(day=1)
date_begin = (month_begin - relativedelta.relativedelta(months=self._period_number - 1)).strftime(tools.DEFAULT_SERVER_DATE_FORMAT)
date_end = month_begin.replace(day=calendar.monthrange(month_begin.year, month_begin.month)[1]).strftime(tools.DEFAULT_SERVER_DATE_FORMAT)
res = {}
for id in ids:
res[id] = {}
created_domain = [('section_id', '=', id), ('state', '=', 'draft'), ('date_order', '>=', date_begin), ('date_order', '<=', date_end)]
validated_domain = [('section_id', '=', id), ('state', 'not in', ['draft', 'sent', 'cancel']), ('date_order', '>=', date_begin), ('date_order', '<=', date_end)]
res[id]['monthly_quoted'] = json.dumps(self.__get_bar_values(cr, uid, obj, created_domain, ['amount_total', 'date_order'], 'amount_total', 'date_order', context=context))
res[id]['monthly_confirmed'] = json.dumps(self.__get_bar_values(cr, uid, obj, validated_domain, ['amount_total', 'date_order'], 'amount_total', 'date_order', context=context))
return res
def _get_invoices_data(self, cr, uid, ids, field_name, arg, context=None):
obj = self.pool['account.invoice.report']
month_begin = date.today().replace(day=1)
date_begin = (month_begin - relativedelta.relativedelta(months=self._period_number - 1)).strftime(tools.DEFAULT_SERVER_DATE_FORMAT)
date_end = month_begin.replace(day=calendar.monthrange(month_begin.year, month_begin.month)[1]).strftime(tools.DEFAULT_SERVER_DATE_FORMAT)
res = {}
for id in ids:
created_domain = [('section_id', '=', id), ('state', 'not in', ['draft', 'cancel']), ('date', '>=', date_begin), ('date', '<=', date_end)]
res[id] = json.dumps(self.__get_bar_values(cr, uid, obj, created_domain, ['price_total', 'date'], 'price_total', 'date', context=context))
return res
_columns = {
'use_quotations': fields.boolean('Quotations', help="Check this box to manage quotations in this sales team."),
'invoiced_forecast': fields.integer(string='Invoice Forecast',
help="Forecast of the invoice revenue for the current month. This is the amount the sales \n"
"team should invoice this month. It is used to compute the progression ratio \n"
" of the current and forecast revenue on the kanban view."),
'invoiced_target': fields.integer(string='Invoice Target',
help="Target of invoice revenue for the current month. This is the amount the sales \n"
"team estimates to be able to invoice this month."),
'monthly_quoted': fields.function(_get_sale_orders_data,
type='char', readonly=True, multi='_get_sale_orders_data',
string='Rate of created quotation per duration'),
'monthly_confirmed': fields.function(_get_sale_orders_data,
type='char', readonly=True, multi='_get_sale_orders_data',
string='Rate of validate sales orders per duration'),
'monthly_invoiced': fields.function(_get_invoices_data,
type='char', readonly=True,
string='Rate of sent invoices per duration'),
}
_defaults = {
'use_quotations': True,
}
def action_forecast(self, cr, uid, id, value, context=None):
return self.write(cr, uid, [id], {'invoiced_forecast': round(float(value))}, context=context)
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
ammarkhann/FinalSeniorCode | refs/heads/master | lib/python2.7/site-packages/IPython/core/prefilter.py | 8 | # encoding: utf-8
"""
Prefiltering components.
Prefilters transform user input before it is exec'd by Python. These
transforms are used to implement additional syntax such as !ls and %magic.
"""
# Copyright (c) IPython Development Team.
# Distributed under the terms of the Modified BSD License.
from keyword import iskeyword
import re
from IPython.core.autocall import IPyAutocall
from traitlets.config.configurable import Configurable
from IPython.core.inputsplitter import (
ESC_MAGIC,
ESC_QUOTE,
ESC_QUOTE2,
ESC_PAREN,
)
from IPython.core.macro import Macro
from IPython.core.splitinput import LineInfo
from traitlets import (
List, Integer, Unicode, Bool, Instance, CRegExp
)
#-----------------------------------------------------------------------------
# Global utilities, errors and constants
#-----------------------------------------------------------------------------
class PrefilterError(Exception):
pass
# RegExp to identify potential function names
re_fun_name = re.compile(r'[a-zA-Z_]([a-zA-Z0-9_.]*) *$')
# RegExp to exclude strings with this start from autocalling. In
# particular, all binary operators should be excluded, so that if foo is
# callable, foo OP bar doesn't become foo(OP bar), which is invalid. The
# characters '!=()' don't need to be checked for, as the checkPythonChars
# routine explicitely does so, to catch direct calls and rebindings of
# existing names.
# Warning: the '-' HAS TO BE AT THE END of the first group, otherwise
# it affects the rest of the group in square brackets.
re_exclude_auto = re.compile(r'^[,&^\|\*/\+-]'
r'|^is |^not |^in |^and |^or ')
# try to catch also methods for stuff in lists/tuples/dicts: off
# (experimental). For this to work, the line_split regexp would need
# to be modified so it wouldn't break things at '['. That line is
# nasty enough that I shouldn't change it until I can test it _well_.
#self.re_fun_name = re.compile (r'[a-zA-Z_]([a-zA-Z0-9_.\[\]]*) ?$')
# Handler Check Utilities
def is_shadowed(identifier, ip):
"""Is the given identifier defined in one of the namespaces which shadow
the alias and magic namespaces? Note that an identifier is different
than ifun, because it can not contain a '.' character."""
# This is much safer than calling ofind, which can change state
return (identifier in ip.user_ns \
or identifier in ip.user_global_ns \
or identifier in ip.ns_table['builtin']\
or iskeyword(identifier))
#-----------------------------------------------------------------------------
# Main Prefilter manager
#-----------------------------------------------------------------------------
class PrefilterManager(Configurable):
"""Main prefilter component.
The IPython prefilter is run on all user input before it is run. The
prefilter consumes lines of input and produces transformed lines of
input.
The iplementation consists of two phases:
1. Transformers
2. Checkers and handlers
Over time, we plan on deprecating the checkers and handlers and doing
everything in the transformers.
The transformers are instances of :class:`PrefilterTransformer` and have
a single method :meth:`transform` that takes a line and returns a
transformed line. The transformation can be accomplished using any
tool, but our current ones use regular expressions for speed.
After all the transformers have been run, the line is fed to the checkers,
which are instances of :class:`PrefilterChecker`. The line is passed to
the :meth:`check` method, which either returns `None` or a
:class:`PrefilterHandler` instance. If `None` is returned, the other
checkers are tried. If an :class:`PrefilterHandler` instance is returned,
the line is passed to the :meth:`handle` method of the returned
handler and no further checkers are tried.
Both transformers and checkers have a `priority` attribute, that determines
the order in which they are called. Smaller priorities are tried first.
Both transformers and checkers also have `enabled` attribute, which is
a boolean that determines if the instance is used.
Users or developers can change the priority or enabled attribute of
transformers or checkers, but they must call the :meth:`sort_checkers`
or :meth:`sort_transformers` method after changing the priority.
"""
multi_line_specials = Bool(True).tag(config=True)
shell = Instance('IPython.core.interactiveshell.InteractiveShellABC', allow_none=True)
def __init__(self, shell=None, **kwargs):
super(PrefilterManager, self).__init__(shell=shell, **kwargs)
self.shell = shell
self.init_transformers()
self.init_handlers()
self.init_checkers()
#-------------------------------------------------------------------------
# API for managing transformers
#-------------------------------------------------------------------------
def init_transformers(self):
"""Create the default transformers."""
self._transformers = []
for transformer_cls in _default_transformers:
transformer_cls(
shell=self.shell, prefilter_manager=self, parent=self
)
def sort_transformers(self):
"""Sort the transformers by priority.
This must be called after the priority of a transformer is changed.
The :meth:`register_transformer` method calls this automatically.
"""
self._transformers.sort(key=lambda x: x.priority)
@property
def transformers(self):
"""Return a list of checkers, sorted by priority."""
return self._transformers
def register_transformer(self, transformer):
"""Register a transformer instance."""
if transformer not in self._transformers:
self._transformers.append(transformer)
self.sort_transformers()
def unregister_transformer(self, transformer):
"""Unregister a transformer instance."""
if transformer in self._transformers:
self._transformers.remove(transformer)
#-------------------------------------------------------------------------
# API for managing checkers
#-------------------------------------------------------------------------
def init_checkers(self):
"""Create the default checkers."""
self._checkers = []
for checker in _default_checkers:
checker(
shell=self.shell, prefilter_manager=self, parent=self
)
def sort_checkers(self):
"""Sort the checkers by priority.
This must be called after the priority of a checker is changed.
The :meth:`register_checker` method calls this automatically.
"""
self._checkers.sort(key=lambda x: x.priority)
@property
def checkers(self):
"""Return a list of checkers, sorted by priority."""
return self._checkers
def register_checker(self, checker):
"""Register a checker instance."""
if checker not in self._checkers:
self._checkers.append(checker)
self.sort_checkers()
def unregister_checker(self, checker):
"""Unregister a checker instance."""
if checker in self._checkers:
self._checkers.remove(checker)
#-------------------------------------------------------------------------
# API for managing handlers
#-------------------------------------------------------------------------
def init_handlers(self):
"""Create the default handlers."""
self._handlers = {}
self._esc_handlers = {}
for handler in _default_handlers:
handler(
shell=self.shell, prefilter_manager=self, parent=self
)
@property
def handlers(self):
"""Return a dict of all the handlers."""
return self._handlers
def register_handler(self, name, handler, esc_strings):
"""Register a handler instance by name with esc_strings."""
self._handlers[name] = handler
for esc_str in esc_strings:
self._esc_handlers[esc_str] = handler
def unregister_handler(self, name, handler, esc_strings):
"""Unregister a handler instance by name with esc_strings."""
try:
del self._handlers[name]
except KeyError:
pass
for esc_str in esc_strings:
h = self._esc_handlers.get(esc_str)
if h is handler:
del self._esc_handlers[esc_str]
def get_handler_by_name(self, name):
"""Get a handler by its name."""
return self._handlers.get(name)
def get_handler_by_esc(self, esc_str):
"""Get a handler by its escape string."""
return self._esc_handlers.get(esc_str)
#-------------------------------------------------------------------------
# Main prefiltering API
#-------------------------------------------------------------------------
def prefilter_line_info(self, line_info):
"""Prefilter a line that has been converted to a LineInfo object.
This implements the checker/handler part of the prefilter pipe.
"""
# print "prefilter_line_info: ", line_info
handler = self.find_handler(line_info)
return handler.handle(line_info)
def find_handler(self, line_info):
"""Find a handler for the line_info by trying checkers."""
for checker in self.checkers:
if checker.enabled:
handler = checker.check(line_info)
if handler:
return handler
return self.get_handler_by_name('normal')
def transform_line(self, line, continue_prompt):
"""Calls the enabled transformers in order of increasing priority."""
for transformer in self.transformers:
if transformer.enabled:
line = transformer.transform(line, continue_prompt)
return line
def prefilter_line(self, line, continue_prompt=False):
"""Prefilter a single input line as text.
This method prefilters a single line of text by calling the
transformers and then the checkers/handlers.
"""
# print "prefilter_line: ", line, continue_prompt
# All handlers *must* return a value, even if it's blank ('').
# save the line away in case we crash, so the post-mortem handler can
# record it
self.shell._last_input_line = line
if not line:
# Return immediately on purely empty lines, so that if the user
# previously typed some whitespace that started a continuation
# prompt, he can break out of that loop with just an empty line.
# This is how the default python prompt works.
return ''
# At this point, we invoke our transformers.
if not continue_prompt or (continue_prompt and self.multi_line_specials):
line = self.transform_line(line, continue_prompt)
# Now we compute line_info for the checkers and handlers
line_info = LineInfo(line, continue_prompt)
# the input history needs to track even empty lines
stripped = line.strip()
normal_handler = self.get_handler_by_name('normal')
if not stripped:
return normal_handler.handle(line_info)
# special handlers are only allowed for single line statements
if continue_prompt and not self.multi_line_specials:
return normal_handler.handle(line_info)
prefiltered = self.prefilter_line_info(line_info)
# print "prefiltered line: %r" % prefiltered
return prefiltered
def prefilter_lines(self, lines, continue_prompt=False):
"""Prefilter multiple input lines of text.
This is the main entry point for prefiltering multiple lines of
input. This simply calls :meth:`prefilter_line` for each line of
input.
This covers cases where there are multiple lines in the user entry,
which is the case when the user goes back to a multiline history
entry and presses enter.
"""
llines = lines.rstrip('\n').split('\n')
# We can get multiple lines in one shot, where multiline input 'blends'
# into one line, in cases like recalling from the readline history
# buffer. We need to make sure that in such cases, we correctly
# communicate downstream which line is first and which are continuation
# ones.
if len(llines) > 1:
out = '\n'.join([self.prefilter_line(line, lnum>0)
for lnum, line in enumerate(llines) ])
else:
out = self.prefilter_line(llines[0], continue_prompt)
return out
#-----------------------------------------------------------------------------
# Prefilter transformers
#-----------------------------------------------------------------------------
class PrefilterTransformer(Configurable):
"""Transform a line of user input."""
priority = Integer(100).tag(config=True)
# Transformers don't currently use shell or prefilter_manager, but as we
# move away from checkers and handlers, they will need them.
shell = Instance('IPython.core.interactiveshell.InteractiveShellABC', allow_none=True)
prefilter_manager = Instance('IPython.core.prefilter.PrefilterManager', allow_none=True)
enabled = Bool(True).tag(config=True)
def __init__(self, shell=None, prefilter_manager=None, **kwargs):
super(PrefilterTransformer, self).__init__(
shell=shell, prefilter_manager=prefilter_manager, **kwargs
)
self.prefilter_manager.register_transformer(self)
def transform(self, line, continue_prompt):
"""Transform a line, returning the new one."""
return None
def __repr__(self):
return "<%s(priority=%r, enabled=%r)>" % (
self.__class__.__name__, self.priority, self.enabled)
#-----------------------------------------------------------------------------
# Prefilter checkers
#-----------------------------------------------------------------------------
class PrefilterChecker(Configurable):
"""Inspect an input line and return a handler for that line."""
priority = Integer(100).tag(config=True)
shell = Instance('IPython.core.interactiveshell.InteractiveShellABC', allow_none=True)
prefilter_manager = Instance('IPython.core.prefilter.PrefilterManager', allow_none=True)
enabled = Bool(True).tag(config=True)
def __init__(self, shell=None, prefilter_manager=None, **kwargs):
super(PrefilterChecker, self).__init__(
shell=shell, prefilter_manager=prefilter_manager, **kwargs
)
self.prefilter_manager.register_checker(self)
def check(self, line_info):
"""Inspect line_info and return a handler instance or None."""
return None
def __repr__(self):
return "<%s(priority=%r, enabled=%r)>" % (
self.__class__.__name__, self.priority, self.enabled)
class EmacsChecker(PrefilterChecker):
priority = Integer(100).tag(config=True)
enabled = Bool(False).tag(config=True)
def check(self, line_info):
"Emacs ipython-mode tags certain input lines."
if line_info.line.endswith('# PYTHON-MODE'):
return self.prefilter_manager.get_handler_by_name('emacs')
else:
return None
class MacroChecker(PrefilterChecker):
priority = Integer(250).tag(config=True)
def check(self, line_info):
obj = self.shell.user_ns.get(line_info.ifun)
if isinstance(obj, Macro):
return self.prefilter_manager.get_handler_by_name('macro')
else:
return None
class IPyAutocallChecker(PrefilterChecker):
priority = Integer(300).tag(config=True)
def check(self, line_info):
"Instances of IPyAutocall in user_ns get autocalled immediately"
obj = self.shell.user_ns.get(line_info.ifun, None)
if isinstance(obj, IPyAutocall):
obj.set_ip(self.shell)
return self.prefilter_manager.get_handler_by_name('auto')
else:
return None
class AssignmentChecker(PrefilterChecker):
priority = Integer(600).tag(config=True)
def check(self, line_info):
"""Check to see if user is assigning to a var for the first time, in
which case we want to avoid any sort of automagic / autocall games.
This allows users to assign to either alias or magic names true python
variables (the magic/alias systems always take second seat to true
python code). E.g. ls='hi', or ls,that=1,2"""
if line_info.the_rest:
if line_info.the_rest[0] in '=,':
return self.prefilter_manager.get_handler_by_name('normal')
else:
return None
class AutoMagicChecker(PrefilterChecker):
priority = Integer(700).tag(config=True)
def check(self, line_info):
"""If the ifun is magic, and automagic is on, run it. Note: normal,
non-auto magic would already have been triggered via '%' in
check_esc_chars. This just checks for automagic. Also, before
triggering the magic handler, make sure that there is nothing in the
user namespace which could shadow it."""
if not self.shell.automagic or not self.shell.find_magic(line_info.ifun):
return None
# We have a likely magic method. Make sure we should actually call it.
if line_info.continue_prompt and not self.prefilter_manager.multi_line_specials:
return None
head = line_info.ifun.split('.',1)[0]
if is_shadowed(head, self.shell):
return None
return self.prefilter_manager.get_handler_by_name('magic')
class PythonOpsChecker(PrefilterChecker):
priority = Integer(900).tag(config=True)
def check(self, line_info):
"""If the 'rest' of the line begins with a function call or pretty much
any python operator, we should simply execute the line (regardless of
whether or not there's a possible autocall expansion). This avoids
spurious (and very confusing) geattr() accesses."""
if line_info.the_rest and line_info.the_rest[0] in '!=()<>,+*/%^&|':
return self.prefilter_manager.get_handler_by_name('normal')
else:
return None
class AutocallChecker(PrefilterChecker):
priority = Integer(1000).tag(config=True)
function_name_regexp = CRegExp(re_fun_name,
help="RegExp to identify potential function names."
).tag(config=True)
exclude_regexp = CRegExp(re_exclude_auto,
help="RegExp to exclude strings with this start from autocalling."
).tag(config=True)
def check(self, line_info):
"Check if the initial word/function is callable and autocall is on."
if not self.shell.autocall:
return None
oinfo = line_info.ofind(self.shell) # This can mutate state via getattr
if not oinfo['found']:
return None
if callable(oinfo['obj']) \
and (not self.exclude_regexp.match(line_info.the_rest)) \
and self.function_name_regexp.match(line_info.ifun):
return self.prefilter_manager.get_handler_by_name('auto')
else:
return None
#-----------------------------------------------------------------------------
# Prefilter handlers
#-----------------------------------------------------------------------------
class PrefilterHandler(Configurable):
handler_name = Unicode('normal')
esc_strings = List([])
shell = Instance('IPython.core.interactiveshell.InteractiveShellABC', allow_none=True)
prefilter_manager = Instance('IPython.core.prefilter.PrefilterManager', allow_none=True)
def __init__(self, shell=None, prefilter_manager=None, **kwargs):
super(PrefilterHandler, self).__init__(
shell=shell, prefilter_manager=prefilter_manager, **kwargs
)
self.prefilter_manager.register_handler(
self.handler_name,
self,
self.esc_strings
)
def handle(self, line_info):
# print "normal: ", line_info
"""Handle normal input lines. Use as a template for handlers."""
# With autoindent on, we need some way to exit the input loop, and I
# don't want to force the user to have to backspace all the way to
# clear the line. The rule will be in this case, that either two
# lines of pure whitespace in a row, or a line of pure whitespace but
# of a size different to the indent level, will exit the input loop.
line = line_info.line
continue_prompt = line_info.continue_prompt
if (continue_prompt and
self.shell.autoindent and
line.isspace() and
0 < abs(len(line) - self.shell.indent_current_nsp) <= 2):
line = ''
return line
def __str__(self):
return "<%s(name=%s)>" % (self.__class__.__name__, self.handler_name)
class MacroHandler(PrefilterHandler):
handler_name = Unicode("macro")
def handle(self, line_info):
obj = self.shell.user_ns.get(line_info.ifun)
pre_space = line_info.pre_whitespace
line_sep = "\n" + pre_space
return pre_space + line_sep.join(obj.value.splitlines())
class MagicHandler(PrefilterHandler):
handler_name = Unicode('magic')
esc_strings = List([ESC_MAGIC])
def handle(self, line_info):
"""Execute magic functions."""
ifun = line_info.ifun
the_rest = line_info.the_rest
cmd = '%sget_ipython().magic(%r)' % (line_info.pre_whitespace,
(ifun + " " + the_rest))
return cmd
class AutoHandler(PrefilterHandler):
handler_name = Unicode('auto')
esc_strings = List([ESC_PAREN, ESC_QUOTE, ESC_QUOTE2])
def handle(self, line_info):
"""Handle lines which can be auto-executed, quoting if requested."""
line = line_info.line
ifun = line_info.ifun
the_rest = line_info.the_rest
esc = line_info.esc
continue_prompt = line_info.continue_prompt
obj = line_info.ofind(self.shell)['obj']
# This should only be active for single-line input!
if continue_prompt:
return line
force_auto = isinstance(obj, IPyAutocall)
# User objects sometimes raise exceptions on attribute access other
# than AttributeError (we've seen it in the past), so it's safest to be
# ultra-conservative here and catch all.
try:
auto_rewrite = obj.rewrite
except Exception:
auto_rewrite = True
if esc == ESC_QUOTE:
# Auto-quote splitting on whitespace
newcmd = '%s("%s")' % (ifun,'", "'.join(the_rest.split()) )
elif esc == ESC_QUOTE2:
# Auto-quote whole string
newcmd = '%s("%s")' % (ifun,the_rest)
elif esc == ESC_PAREN:
newcmd = '%s(%s)' % (ifun,",".join(the_rest.split()))
else:
# Auto-paren.
if force_auto:
# Don't rewrite if it is already a call.
do_rewrite = not the_rest.startswith('(')
else:
if not the_rest:
# We only apply it to argument-less calls if the autocall
# parameter is set to 2.
do_rewrite = (self.shell.autocall >= 2)
elif the_rest.startswith('[') and hasattr(obj, '__getitem__'):
# Don't autocall in this case: item access for an object
# which is BOTH callable and implements __getitem__.
do_rewrite = False
else:
do_rewrite = True
# Figure out the rewritten command
if do_rewrite:
if the_rest.endswith(';'):
newcmd = '%s(%s);' % (ifun.rstrip(),the_rest[:-1])
else:
newcmd = '%s(%s)' % (ifun.rstrip(), the_rest)
else:
normal_handler = self.prefilter_manager.get_handler_by_name('normal')
return normal_handler.handle(line_info)
# Display the rewritten call
if auto_rewrite:
self.shell.auto_rewrite_input(newcmd)
return newcmd
class EmacsHandler(PrefilterHandler):
handler_name = Unicode('emacs')
esc_strings = List([])
def handle(self, line_info):
"""Handle input lines marked by python-mode."""
# Currently, nothing is done. Later more functionality can be added
# here if needed.
# The input cache shouldn't be updated
return line_info.line
#-----------------------------------------------------------------------------
# Defaults
#-----------------------------------------------------------------------------
_default_transformers = [
]
_default_checkers = [
EmacsChecker,
MacroChecker,
IPyAutocallChecker,
AssignmentChecker,
AutoMagicChecker,
PythonOpsChecker,
AutocallChecker
]
_default_handlers = [
PrefilterHandler,
MacroHandler,
MagicHandler,
AutoHandler,
EmacsHandler
]
|
axelkennedal/dissen | refs/heads/master | dissenEnv/lib/python3.5/site-packages/pip/_vendor/requests/structures.py | 149 | # -*- coding: utf-8 -*-
"""
requests.structures
~~~~~~~~~~~~~~~~~~~
Data structures that power Requests.
"""
import collections
from .compat import OrderedDict
class CaseInsensitiveDict(collections.MutableMapping):
"""
A case-insensitive ``dict``-like object.
Implements all methods and operations of
``collections.MutableMapping`` as well as dict's ``copy``. Also
provides ``lower_items``.
All keys are expected to be strings. The structure remembers the
case of the last key to be set, and ``iter(instance)``,
``keys()``, ``items()``, ``iterkeys()``, and ``iteritems()``
will contain case-sensitive keys. However, querying and contains
testing is case insensitive::
cid = CaseInsensitiveDict()
cid['Accept'] = 'application/json'
cid['aCCEPT'] == 'application/json' # True
list(cid) == ['Accept'] # True
For example, ``headers['content-encoding']`` will return the
value of a ``'Content-Encoding'`` response header, regardless
of how the header name was originally stored.
If the constructor, ``.update``, or equality comparison
operations are given keys that have equal ``.lower()``s, the
behavior is undefined.
"""
def __init__(self, data=None, **kwargs):
self._store = OrderedDict()
if data is None:
data = {}
self.update(data, **kwargs)
def __setitem__(self, key, value):
# Use the lowercased key for lookups, but store the actual
# key alongside the value.
self._store[key.lower()] = (key, value)
def __getitem__(self, key):
return self._store[key.lower()][1]
def __delitem__(self, key):
del self._store[key.lower()]
def __iter__(self):
return (casedkey for casedkey, mappedvalue in self._store.values())
def __len__(self):
return len(self._store)
def lower_items(self):
"""Like iteritems(), but with all lowercase keys."""
return (
(lowerkey, keyval[1])
for (lowerkey, keyval)
in self._store.items()
)
def __eq__(self, other):
if isinstance(other, collections.Mapping):
other = CaseInsensitiveDict(other)
else:
return NotImplemented
# Compare insensitively
return dict(self.lower_items()) == dict(other.lower_items())
# Copy is required
def copy(self):
return CaseInsensitiveDict(self._store.values())
def __repr__(self):
return str(dict(self.items()))
class LookupDict(dict):
"""Dictionary lookup object."""
def __init__(self, name=None):
self.name = name
super(LookupDict, self).__init__()
def __repr__(self):
return '<lookup \'%s\'>' % (self.name)
def __getitem__(self, key):
# We allow fall-through here, so values default to None
return self.__dict__.get(key, None)
def get(self, key, default=None):
return self.__dict__.get(key, default)
|
WaltHP/os-brick | refs/heads/master | os_brick/tests/initiator/test_connector.py | 1 | # (c) Copyright 2013 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os.path
import socket
import string
import tempfile
import time
import mock
from oslo_concurrency import processutils as putils
import testtools
from os_brick import exception
from os_brick.i18n import _LE
from os_brick.initiator import connector
from os_brick.initiator import host_driver
from os_brick.initiator import linuxfc
from os_brick.openstack.common import log as logging
from os_brick.openstack.common import loopingcall
from os_brick.tests import base
LOG = logging.getLogger(__name__)
MY_IP = '10.0.0.1'
class ConnectorUtilsTestCase(base.TestCase):
@mock.patch.object(socket, 'gethostname', return_value='fakehost')
@mock.patch.object(connector.ISCSIConnector, 'get_initiator',
return_value='fakeinitiator')
@mock.patch.object(linuxfc.LinuxFibreChannel, 'get_fc_wwpns',
return_value=None)
@mock.patch.object(linuxfc.LinuxFibreChannel, 'get_fc_wwnns',
return_value=None)
def _test_brick_get_connector_properties(self, multipath,
enforce_multipath,
multipath_result,
mock_wwnns, mock_wwpns,
mock_initiator, mock_gethostname):
props_actual = connector.get_connector_properties('sudo',
MY_IP,
multipath,
enforce_multipath)
props = {'initiator': 'fakeinitiator',
'host': 'fakehost',
'ip': MY_IP,
'multipath': multipath_result}
self.assertEqual(props, props_actual)
def test_brick_get_connector_properties(self):
self._test_brick_get_connector_properties(False, False, False)
@mock.patch.object(putils, 'execute')
def test_brick_get_connector_properties_multipath(self, mock_execute):
self._test_brick_get_connector_properties(True, True, True)
mock_execute.assert_called_once_with('multipathd', 'show', 'status',
run_as_root=True,
root_helper='sudo')
@mock.patch.object(putils, 'execute',
side_effect=putils.ProcessExecutionError)
def test_brick_get_connector_properties_fallback(self, mock_execute):
self._test_brick_get_connector_properties(True, False, False)
mock_execute.assert_called_once_with('multipathd', 'show', 'status',
run_as_root=True,
root_helper='sudo')
@mock.patch.object(putils, 'execute',
side_effect=putils.ProcessExecutionError)
def test_brick_get_connector_properties_raise(self, mock_execute):
self.assertRaises(putils.ProcessExecutionError,
self._test_brick_get_connector_properties,
True, True, None)
class ConnectorTestCase(base.TestCase):
def setUp(self):
super(ConnectorTestCase, self).setUp()
self.cmds = []
def fake_execute(self, *cmd, **kwargs):
self.cmds.append(string.join(cmd))
return "", None
def test_connect_volume(self):
self.connector = connector.InitiatorConnector(None)
self.assertRaises(NotImplementedError,
self.connector.connect_volume, None)
def test_disconnect_volume(self):
self.connector = connector.InitiatorConnector(None)
self.assertRaises(NotImplementedError,
self.connector.disconnect_volume, None, None)
def test_factory(self):
obj = connector.InitiatorConnector.factory('iscsi', None)
self.assertEqual(obj.__class__.__name__, "ISCSIConnector")
obj = connector.InitiatorConnector.factory('fibre_channel', None)
self.assertEqual(obj.__class__.__name__, "FibreChannelConnector")
obj = connector.InitiatorConnector.factory('aoe', None)
self.assertEqual(obj.__class__.__name__, "AoEConnector")
obj = connector.InitiatorConnector.factory(
'nfs', None, nfs_mount_point_base='/mnt/test')
self.assertEqual(obj.__class__.__name__, "RemoteFsConnector")
obj = connector.InitiatorConnector.factory(
'glusterfs', None, glusterfs_mount_point_base='/mnt/test')
self.assertEqual(obj.__class__.__name__, "RemoteFsConnector")
obj = connector.InitiatorConnector.factory('local', None)
self.assertEqual(obj.__class__.__name__, "LocalConnector")
self.assertRaises(ValueError,
connector.InitiatorConnector.factory,
"bogus", None)
def test_check_valid_device_with_wrong_path(self):
self.connector = connector.InitiatorConnector(None)
self.connector._execute = \
lambda *args, **kwargs: ("", None)
self.assertFalse(self.connector.check_valid_device('/d0v'))
def test_check_valid_device(self):
self.connector = connector.InitiatorConnector(None)
self.connector._execute = \
lambda *args, **kwargs: ("", "")
self.assertTrue(self.connector.check_valid_device('/dev'))
def test_check_valid_device_with_cmd_error(self):
def raise_except(*args, **kwargs):
raise putils.ProcessExecutionError
self.connector = connector.InitiatorConnector(None)
self.connector._execute = mock.Mock()
self.connector._execute.side_effect = raise_except
self.assertFalse(self.connector.check_valid_device('/dev'))
class HostDriverTestCase(base.TestCase):
def setUp(self):
super(HostDriverTestCase, self).setUp()
isdir_mock = mock.Mock()
isdir_mock.return_value = True
os.path.isdir = isdir_mock
self.devlist = ['device1', 'device2']
listdir_mock = mock.Mock()
listdir_mock.return_value = self.devlist
os.listdir = listdir_mock
def test_host_driver(self):
expected = ['/dev/disk/by-path/' + dev for dev in self.devlist]
driver = host_driver.HostDriver()
actual = driver.get_all_block_devices()
self.assertEqual(expected, actual)
class ISCSIConnectorTestCase(ConnectorTestCase):
def setUp(self):
super(ISCSIConnectorTestCase, self).setUp()
self.connector = connector.ISCSIConnector(
None, execute=self.fake_execute, use_multipath=False)
self.connector_with_multipath = connector.ISCSIConnector(
None, execute=self.fake_execute, use_multipath=True)
get_name_mock = mock.Mock()
get_name_mock.return_value = "/dev/sdb"
self.connector._linuxscsi.get_name_from_path = get_name_mock
def iscsi_connection(self, volume, location, iqn):
return {
'driver_volume_type': 'iscsi',
'data': {
'volume_id': volume['id'],
'target_portal': location,
'target_iqn': iqn,
'target_lun': 1,
}
}
def iscsi_connection_multipath(self, volume, locations, iqns, luns):
return {
'driver_volume_type': 'iscsi',
'data': {
'volume_id': volume['id'],
'target_portals': locations,
'target_iqns': iqns,
'target_luns': luns,
}
}
def test_get_initiator(self):
def initiator_no_file(*args, **kwargs):
raise putils.ProcessExecutionError('No file')
def initiator_get_text(*arg, **kwargs):
text = ('## DO NOT EDIT OR REMOVE THIS FILE!\n'
'## If you remove this file, the iSCSI daemon '
'will not start.\n'
'## If you change the InitiatorName, existing '
'access control lists\n'
'## may reject this initiator. The InitiatorName must '
'be unique\n'
'## for each iSCSI initiator. Do NOT duplicate iSCSI '
'InitiatorNames.\n'
'InitiatorName=iqn.1234-56.foo.bar:01:23456789abc')
return text, None
self.connector._execute = initiator_no_file
initiator = self.connector.get_initiator()
self.assertIsNone(initiator)
self.connector._execute = initiator_get_text
initiator = self.connector.get_initiator()
self.assertEqual(initiator, 'iqn.1234-56.foo.bar:01:23456789abc')
@testtools.skipUnless(os.path.exists('/dev/disk/by-path'),
'Test requires /dev/disk/by-path')
def test_connect_volume(self):
exists_mock = mock.Mock()
exists_mock.return_value = True
os.path.exists = exists_mock
location = '10.0.2.15:3260'
name = 'volume-00000001'
iqn = 'iqn.2010-10.org.openstack:%s' % name
vol = {'id': 1, 'name': name}
connection_info = self.iscsi_connection(vol, location, iqn)
device = self.connector.connect_volume(connection_info['data'])
dev_str = '/dev/disk/by-path/ip-%s-iscsi-%s-lun-1' % (location, iqn)
self.assertEqual(device['type'], 'block')
self.assertEqual(device['path'], dev_str)
self.connector.disconnect_volume(connection_info['data'], device)
expected_commands = [('iscsiadm -m node -T %s -p %s' %
(iqn, location)),
('iscsiadm -m session'),
('iscsiadm -m node -T %s -p %s --login' %
(iqn, location)),
('iscsiadm -m node -T %s -p %s --op update'
' -n node.startup -v automatic'
% (iqn, location)),
('iscsiadm -m node --rescan'),
('iscsiadm -m session --rescan'),
('blockdev --flushbufs /dev/sdb'),
('tee -a /sys/block/sdb/device/delete'),
('iscsiadm -m node -T %s -p %s --op update'
' -n node.startup -v manual' % (iqn, location)),
('iscsiadm -m node -T %s -p %s --logout' %
(iqn, location)),
('iscsiadm -m node -T %s -p %s --op delete' %
(iqn, location)), ]
LOG.debug("self.cmds = %s" % self.cmds)
LOG.debug("expected = %s" % expected_commands)
self.assertEqual(expected_commands, self.cmds)
def test_connect_volume_with_multipath(self):
location = '10.0.2.15:3260'
name = 'volume-00000001'
iqn = 'iqn.2010-10.org.openstack:%s' % name
vol = {'id': 1, 'name': name}
connection_properties = self.iscsi_connection(vol, location, iqn)
self.connector_with_multipath = \
connector.ISCSIConnector(None, use_multipath=True)
self.connector_with_multipath._run_iscsiadm_bare = \
lambda *args, **kwargs: "%s %s" % (location, iqn)
portals_mock = mock.Mock()
portals_mock.return_value = [[location, iqn]]
self.connector_with_multipath.\
_get_target_portals_from_iscsiadm_output = portals_mock
connect_to_mock = mock.Mock()
connect_to_mock.return_value = None
self.connector_with_multipath._connect_to_iscsi_portal = \
connect_to_mock
rescan_iscsi_mock = mock.Mock()
rescan_iscsi_mock.return_value = None
self.connector_with_multipath._rescan_iscsi = rescan_iscsi_mock
rescan_multipath_mock = mock.Mock()
rescan_multipath_mock.return_value = None
self.connector_with_multipath._rescan_multipath = \
rescan_multipath_mock
get_device_mock = mock.Mock()
get_device_mock.return_value = 'iqn.2010-10.org.openstack:%s' % name
self.connector_with_multipath._get_multipath_device_name = \
get_device_mock
exists_mock = mock.Mock()
exists_mock.return_value = True
os.path.exists = exists_mock
result = self.connector_with_multipath.connect_volume(
connection_properties['data'])
expected_result = {'path': 'iqn.2010-10.org.openstack:volume-00000001',
'type': 'block'}
self.assertEqual(result, expected_result)
@mock.patch.object(os.path, 'exists', return_value=True)
@mock.patch.object(host_driver.HostDriver, 'get_all_block_devices')
@mock.patch.object(connector.ISCSIConnector, '_rescan_multipath')
@mock.patch.object(connector.ISCSIConnector, '_run_multipath')
@mock.patch.object(connector.ISCSIConnector, '_get_multipath_device_name')
@mock.patch.object(connector.ISCSIConnector, '_get_multipath_iqn')
def test_connect_volume_with_multiple_portals(
self, mock_get_iqn, mock_device_name, mock_run_multipath,
mock_rescan_multipath, mock_devices, mock_exists):
location1 = '10.0.2.15:3260'
location2 = '10.0.3.15:3260'
name1 = 'volume-00000001-1'
name2 = 'volume-00000001-2'
iqn1 = 'iqn.2010-10.org.openstack:%s' % name1
iqn2 = 'iqn.2010-10.org.openstack:%s' % name2
fake_multipath_dev = '/dev/mapper/fake-multipath-dev'
vol = {'id': 1, 'name': name1}
connection_properties = self.iscsi_connection_multipath(
vol, [location1, location2], [iqn1, iqn2], [1, 2])
devs = ['/dev/disk/by-path/ip-%s-iscsi-%s-lun-1' % (location1, iqn1),
'/dev/disk/by-path/ip-%s-iscsi-%s-lun-2' % (location2, iqn2)]
mock_devices.return_value = devs
mock_device_name.return_value = fake_multipath_dev
mock_get_iqn.return_value = [iqn1, iqn2]
result = self.connector_with_multipath.connect_volume(
connection_properties['data'])
expected_result = {'path': fake_multipath_dev, 'type': 'block'}
cmd_format = 'iscsiadm -m node -T %s -p %s --%s'
expected_commands = [cmd_format % (iqn1, location1, 'login'),
cmd_format % (iqn2, location2, 'login')]
self.assertEqual(expected_result, result)
for command in expected_commands:
self.assertIn(command, self.cmds)
mock_device_name.assert_called_once_with(devs[0])
self.cmds = []
self.connector_with_multipath.disconnect_volume(
connection_properties['data'], result)
expected_commands = [cmd_format % (iqn1, location1, 'logout'),
cmd_format % (iqn2, location2, 'logout')]
for command in expected_commands:
self.assertIn(command, self.cmds)
@mock.patch.object(os.path, 'exists')
@mock.patch.object(host_driver.HostDriver, 'get_all_block_devices')
@mock.patch.object(connector.ISCSIConnector, '_rescan_multipath')
@mock.patch.object(connector.ISCSIConnector, '_run_multipath')
@mock.patch.object(connector.ISCSIConnector, '_get_multipath_device_name')
@mock.patch.object(connector.ISCSIConnector, '_get_multipath_iqn')
@mock.patch.object(connector.ISCSIConnector, '_run_iscsiadm')
def test_connect_volume_with_multiple_portals_primary_error(
self, mock_iscsiadm, mock_get_iqn, mock_device_name,
mock_run_multipath, mock_rescan_multipath, mock_devices,
mock_exists):
location1 = '10.0.2.15:3260'
location2 = '10.0.3.15:3260'
name1 = 'volume-00000001-1'
name2 = 'volume-00000001-2'
iqn1 = 'iqn.2010-10.org.openstack:%s' % name1
iqn2 = 'iqn.2010-10.org.openstack:%s' % name2
fake_multipath_dev = '/dev/mapper/fake-multipath-dev'
vol = {'id': 1, 'name': name1}
connection_properties = self.iscsi_connection_multipath(
vol, [location1, location2], [iqn1, iqn2], [1, 2])
dev1 = '/dev/disk/by-path/ip-%s-iscsi-%s-lun-1' % (location1, iqn1)
dev2 = '/dev/disk/by-path/ip-%s-iscsi-%s-lun-2' % (location2, iqn2)
def fake_run_iscsiadm(iscsi_properties, iscsi_command, **kwargs):
if iscsi_properties['target_portal'] == location1:
if iscsi_command == ('--login',):
raise putils.ProcessExecutionError(None, None, 21)
return mock.DEFAULT
mock_exists.side_effect = lambda x: x != dev1
mock_devices.return_value = [dev2]
mock_device_name.return_value = fake_multipath_dev
mock_get_iqn.return_value = [iqn2]
mock_iscsiadm.side_effect = fake_run_iscsiadm
props = connection_properties['data'].copy()
result = self.connector_with_multipath.connect_volume(
connection_properties['data'])
expected_result = {'path': fake_multipath_dev, 'type': 'block'}
self.assertEqual(expected_result, result)
mock_device_name.assert_called_once_with(dev2)
props['target_portal'] = location1
props['target_iqn'] = iqn1
mock_iscsiadm.assert_any_call(props, ('--login',),
check_exit_code=[0, 255])
props['target_portal'] = location2
props['target_iqn'] = iqn2
mock_iscsiadm.assert_any_call(props, ('--login',),
check_exit_code=[0, 255])
mock_iscsiadm.reset_mock()
self.connector_with_multipath.disconnect_volume(
connection_properties['data'], result)
props = connection_properties['data'].copy()
props['target_portal'] = location1
props['target_iqn'] = iqn1
mock_iscsiadm.assert_any_call(props, ('--logout',),
check_exit_code=[0, 21, 255])
props['target_portal'] = location2
props['target_iqn'] = iqn2
mock_iscsiadm.assert_any_call(props, ('--logout',),
check_exit_code=[0, 21, 255])
def test_connect_volume_with_not_found_device(self):
exists_mock = mock.Mock()
exists_mock.return_value = False
os.path.exists = exists_mock
sleep_mock = mock.Mock()
sleep_mock.return_value = False
time.sleep = sleep_mock
location = '10.0.2.15:3260'
name = 'volume-00000001'
iqn = 'iqn.2010-10.org.openstack:%s' % name
vol = {'id': 1, 'name': name}
connection_info = self.iscsi_connection(vol, location, iqn)
self.assertRaises(exception.VolumeDeviceNotFound,
self.connector.connect_volume,
connection_info['data'])
def test_get_target_portals_from_iscsiadm_output(self):
connector = self.connector
test_output = '''10.15.84.19:3260 iqn.1992-08.com.netapp:sn.33615311
10.15.85.19:3260 iqn.1992-08.com.netapp:sn.33615311'''
res = connector._get_target_portals_from_iscsiadm_output(test_output)
ip_iqn1 = ['10.15.84.19:3260', 'iqn.1992-08.com.netapp:sn.33615311']
ip_iqn2 = ['10.15.85.19:3260', 'iqn.1992-08.com.netapp:sn.33615311']
expected = [ip_iqn1, ip_iqn2]
self.assertEqual(expected, res)
def test_get_multipath_device_name(self):
realpath = mock.Mock()
realpath.return_value = None
os.path.realpath = realpath
multipath_return_string = [('mpath2 (20017380006c00036)'
'dm-7 IBM,2810XIV')]
self.connector._run_multipath = \
lambda *args, **kwargs: multipath_return_string
expected = '/dev/mapper/mpath2'
self.assertEqual(expected,
self.connector.
_get_multipath_device_name('/dev/md-1'))
def test_get_iscsi_devices(self):
paths = [('ip-10.0.0.1:3260-iscsi-iqn.2013-01.ro.'
'com.netapp:node.netapp02-lun-0')]
walk_mock = lambda x: [(['.'], ['by-path'], paths)]
os.walk = walk_mock
self.assertEqual(self.connector._get_iscsi_devices(), paths)
def test_get_iscsi_devices_with_empty_dir(self):
walk_mock = mock.Mock()
walk_mock.return_value = []
os.walk = walk_mock
self.assertEqual(self.connector._get_iscsi_devices(), [])
def test_get_multipath_iqn(self):
paths = [('ip-10.0.0.1:3260-iscsi-iqn.2013-01.ro.'
'com.netapp:node.netapp02-lun-0')]
realpath = lambda x: '/dev/disk/by-path/%s' % paths[0]
os.path.realpath = realpath
get_iscsi_mock = mock.Mock()
get_iscsi_mock.return_value = paths
self.connector._get_iscsi_devices = get_iscsi_mock
get_multipath_device_mock = mock.Mock()
get_multipath_device_mock.return_value = paths[0]
self.connector._get_multipath_device_name = get_multipath_device_mock
self.assertEqual(self.connector._get_multipath_iqn(paths[0]),
'iqn.2013-01.ro.com.netapp:node.netapp02')
def test_disconnect_volume_multipath_iscsi(self):
result = []
def fake_disconnect_from_iscsi_portal(properties):
result.append(properties)
iqn1 = 'iqn.2013-01.ro.com.netapp:node.netapp01'
iqn2 = 'iqn.2013-01.ro.com.netapp:node.netapp02'
iqns = [iqn1, iqn2]
portal = '10.0.0.1:3260'
dev = ('ip-%s-iscsi-%s-lun-0' % (portal, iqn1))
get_portals_mock = mock.Mock()
get_portals_mock.return_value = [[portal, iqn1]]
rescan_iscsi_mock = mock.Mock()
rescan_iscsi_mock.return_value = None
rescan_multipath = mock.Mock()
rescan_multipath.return_value = None
get_block_devices_mock = mock.Mock()
get_block_devices_mock.return_value = [dev, '/dev/mapper/md-1']
get_multipath_name_mock = mock.Mock()
get_multipath_name_mock.return_value = '/dev/mapper/md-3'
self.connector._get_multipath_iqn = lambda x: iqns.pop()
disconnect_mock = fake_disconnect_from_iscsi_portal
self.connector._disconnect_from_iscsi_portal = disconnect_mock
fake_property = {'target_portal': portal,
'target_iqn': iqn1}
self.connector._disconnect_volume_multipath_iscsi(fake_property,
'fake/multipath')
# Target in use by other mp devices, don't disconnect
self.assertEqual([], result)
def test_disconnect_volume_multipath_iscsi_without_other_mp_devices(self):
result = []
def fake_disconnect_from_iscsi_portal(properties):
result.append(properties)
portal = '10.0.2.15:3260'
name = 'volume-00000001'
iqn = 'iqn.2010-10.org.openstack:%s' % name
get_portals_mock = mock.Mock()
get_portals_mock.return_value = [[portal, iqn]]
self.connector._get_target_portals_from_iscsiadm_output = \
get_portals_mock
rescan_iscsi_mock = mock.Mock()
rescan_iscsi_mock.return_value = None
self.connector._rescan_iscsi = rescan_iscsi_mock
rescan_multipath_mock = mock.Mock()
rescan_multipath_mock.return_value = None
self.connector._rescan_multipath = rescan_multipath_mock
get_all_devices_mock = mock.Mock()
get_all_devices_mock.return_value = []
self.connector.driver.get_all_block_devices = get_all_devices_mock
self.connector._disconnect_from_iscsi_portal = \
fake_disconnect_from_iscsi_portal
fake_property = {'target_portal': portal,
'target_iqn': iqn}
self.connector._disconnect_volume_multipath_iscsi(fake_property,
'fake/multipath')
# Target not in use by other mp devices, disconnect
self.assertEqual([fake_property], result)
def test_disconnect_volume_multipath_iscsi_with_invalid_symlink(self):
result = []
def fake_disconnect_from_iscsi_portal(properties):
result.append(properties)
portal = '10.0.0.1:3260'
name = 'volume-00000001'
iqn = 'iqn.2010-10.org.openstack:%s' % name
dev = ('ip-%s-iscsi-%s-lun-0' % (portal, iqn))
get_portals_mock = mock.Mock()
get_portals_mock.return_value = [[portal, iqn]]
self.connector._get_target_portals_from_iscsiadm_output = \
get_portals_mock
rescan_iscsi_mock = mock.Mock()
rescan_iscsi_mock.return_value = None
self.connector._rescan_iscsi = rescan_iscsi_mock
rescan_multipath_mock = mock.Mock()
rescan_multipath_mock.return_value = None
self.connector._rescan_multipath = rescan_multipath_mock
get_all_devices_mock = mock.Mock()
get_all_devices_mock.return_value = [dev, '/dev/mapper/md-1']
self.connector.driver.get_all_block_devices = get_all_devices_mock
self.connector._disconnect_from_iscsi_portal = \
fake_disconnect_from_iscsi_portal
# Simulate a broken symlink by returning False for os.path.exists(dev)
mock_exists = mock.Mock()
mock_exists.return_value = False
os.path.exists = mock_exists
fake_property = {'target_portal': portal,
'target_iqn': iqn}
self.connector._disconnect_volume_multipath_iscsi(fake_property,
'fake/multipath')
# Target not in use by other mp devices, disconnect
self.assertEqual([fake_property], result)
class FibreChannelConnectorTestCase(ConnectorTestCase):
def setUp(self):
super(FibreChannelConnectorTestCase, self).setUp()
self.connector = connector.FibreChannelConnector(
None, execute=self.fake_execute, use_multipath=False)
self.assertIsNotNone(self.connector)
self.assertIsNotNone(self.connector._linuxfc)
self.assertIsNotNone(self.connector._linuxscsi)
def fake_get_fc_hbas(self):
return [{'ClassDevice': 'host1',
'ClassDevicePath': '/sys/devices/pci0000:00/0000:00:03.0'
'/0000:05:00.2/host1/fc_host/host1',
'dev_loss_tmo': '30',
'fabric_name': '0x1000000533f55566',
'issue_lip': '<store method only>',
'max_npiv_vports': '255',
'maxframe_size': '2048 bytes',
'node_name': '0x200010604b019419',
'npiv_vports_inuse': '0',
'port_id': '0x680409',
'port_name': '0x100010604b019419',
'port_state': 'Online',
'port_type': 'NPort (fabric via point-to-point)',
'speed': '10 Gbit',
'supported_classes': 'Class 3',
'supported_speeds': '10 Gbit',
'symbolic_name': 'Emulex 554M FV4.0.493.0 DV8.3.27',
'tgtid_bind_type': 'wwpn (World Wide Port Name)',
'uevent': None,
'vport_create': '<store method only>',
'vport_delete': '<store method only>'}]
def fake_get_fc_hbas_info(self):
hbas = self.fake_get_fc_hbas()
info = [{'port_name': hbas[0]['port_name'].replace('0x', ''),
'node_name': hbas[0]['node_name'].replace('0x', ''),
'host_device': hbas[0]['ClassDevice'],
'device_path': hbas[0]['ClassDevicePath']}]
return info
def fibrechan_connection(self, volume, location, wwn):
return {'driver_volume_type': 'fibrechan',
'data': {
'volume_id': volume['id'],
'target_portal': location,
'target_wwn': wwn,
'target_lun': 1,
}}
def test_connect_volume(self):
self.connector._linuxfc.get_fc_hbas = self.fake_get_fc_hbas
self.connector._linuxfc.get_fc_hbas_info = \
self.fake_get_fc_hbas_info
exists_mock = mock.Mock()
exists_mock.return_value = True
os.path.exists = exists_mock
realpath_mock = mock.Mock()
realpath_mock.return_value = '/dev/sdb'
os.path.realpath = realpath_mock
multipath_devname = '/dev/md-1'
devices = {"device": multipath_devname,
"id": "1234567890",
"devices": [{'device': '/dev/sdb',
'address': '1:0:0:1',
'host': 1, 'channel': 0,
'id': 0, 'lun': 1}]}
find_device_mock = mock.Mock()
find_device_mock.return_value = devices
self.connector._linuxscsi.find_multipath_device = find_device_mock
remove_device_mock = mock.Mock()
remove_device_mock.return_value = None
self.connector._linuxscsi.remove_scsi_device = remove_device_mock
get_device_info_mock = mock.Mock()
get_device_info_mock.return_value = devices['devices'][0]
self.connector._linuxscsi.get_device_info = get_device_info_mock
location = '10.0.2.15:3260'
name = 'volume-00000001'
vol = {'id': 1, 'name': name}
# Should work for string, unicode, and list
wwns = ['1234567890123456', unicode('1234567890123456'),
['1234567890123456', '1234567890123457']]
for wwn in wwns:
connection_info = self.fibrechan_connection(vol, location, wwn)
dev_info = self.connector.connect_volume(connection_info['data'])
exp_wwn = wwn[0] if isinstance(wwn, list) else wwn
dev_str = ('/dev/disk/by-path/pci-0000:05:00.2-fc-0x%s-lun-1' %
exp_wwn)
self.assertEqual(dev_info['type'], 'block')
self.assertEqual(dev_info['path'], dev_str)
self.connector.disconnect_volume(connection_info['data'], dev_info)
expected_commands = []
self.assertEqual(expected_commands, self.cmds)
# Should not work for anything other than string, unicode, and list
connection_info = self.fibrechan_connection(vol, location, 123)
self.assertRaises(exception.NoFibreChannelHostsFound,
self.connector.connect_volume,
connection_info['data'])
get_fc_hbas_mock = mock.Mock()
get_fc_hbas_mock.return_value = []
self.connector._linuxfc.get_fc_hbas = get_fc_hbas_mock
get_fc_hbas_info_mock = mock.Mock()
get_fc_hbas_info_mock.return_value = []
self.connector._linuxfc.get_fc_hbas_info = get_fc_hbas_info_mock
self.assertRaises(exception.NoFibreChannelHostsFound,
self.connector.connect_volume,
connection_info['data'])
class FakeFixedIntervalLoopingCall(object):
def __init__(self, f=None, *args, **kw):
self.args = args
self.kw = kw
self.f = f
self._stop = False
def stop(self):
self._stop = True
def wait(self):
return self
def start(self, interval, initial_delay=None):
while not self._stop:
try:
self.f(*self.args, **self.kw)
except loopingcall.LoopingCallDone:
return self
except Exception:
LOG.exception(_LE('in fixed duration looping call'))
raise
class AoEConnectorTestCase(ConnectorTestCase):
"""Test cases for AoE initiator class."""
def setUp(self):
super(AoEConnectorTestCase, self).setUp()
self.connector = connector.AoEConnector('sudo')
self.connection_properties = {'target_shelf': 'fake_shelf',
'target_lun': 'fake_lun'}
loopingcall.FixedIntervalLoopingCall = FakeFixedIntervalLoopingCall
def _mock_path_exists(self, aoe_path, mock_values=None):
exists_mock = mock.Mock()
exists_mock.return_value = mock_values
os.path.exists = exists_mock
def test_connect_volume(self):
"""Ensure that if path exist aoe-revaliadte was called."""
aoe_device, aoe_path = self.connector._get_aoe_info(
self.connection_properties)
self._mock_path_exists(aoe_path, [True, True])
exec_mock = mock.Mock()
exec_mock.return_value = ["", ""]
self.connector._execute = exec_mock
self.connector.connect_volume(self.connection_properties)
def test_connect_volume_without_path(self):
"""Ensure that if path doesn't exist aoe-discovery was called."""
aoe_device, aoe_path = self.connector._get_aoe_info(
self.connection_properties)
expected_info = {
'type': 'block',
'device': aoe_device,
'path': aoe_path,
}
self._mock_path_exists(aoe_path, [False, True])
exec_mock = mock.Mock()
exec_mock.return_value = ["", ""]
self.connector._execute = exec_mock
volume_info = self.connector.connect_volume(
self.connection_properties)
self.assertDictMatch(volume_info, expected_info)
def test_connect_volume_could_not_discover_path(self):
_aoe_device, aoe_path = self.connector._get_aoe_info(
self.connection_properties)
exists_mock = mock.Mock()
exists_mock.return_value = False
os.path.exists = exists_mock
exec_mock = mock.Mock()
exec_mock.return_value = ["", ""]
self.connector._execute = exec_mock
self.assertRaises(exception.VolumeDeviceNotFound,
self.connector.connect_volume,
self.connection_properties)
def test_disconnect_volume(self):
"""Ensure that if path exist aoe-revaliadte was called."""
aoe_device, aoe_path = self.connector._get_aoe_info(
self.connection_properties)
self._mock_path_exists(aoe_path, [True])
exec_mock = mock.Mock()
exec_mock.return_value = ["", ""]
self.connector._execute = exec_mock
self.connector.disconnect_volume(self.connection_properties, {})
class RemoteFsConnectorTestCase(ConnectorTestCase):
"""Test cases for Remote FS initiator class."""
TEST_DEV = '172.18.194.100:/var/nfs'
TEST_PATH = '/mnt/test/df0808229363aad55c27da50c38d6328'
def setUp(self):
super(RemoteFsConnectorTestCase, self).setUp()
self.connection_properties = {
'export': self.TEST_DEV,
'name': '9c592d52-ce47-4263-8c21-4ecf3c029cdb'}
self.connector = connector.RemoteFsConnector(
'nfs', root_helper='sudo', nfs_mount_point_base='/mnt/test',
nfs_mount_options='vers=3')
def test_connect_volume(self):
"""Test the basic connect volume case."""
client = self.connector._remotefsclient
client.mount = mock.Mock()
client.get_mount_point = mock.Mock()
client.get_mount_point.return_value = "something"
self.connector.connect_volume(self.connection_properties)
def test_disconnect_volume(self):
"""Nothing should happen here -- make sure it doesn't blow up."""
self.connector.disconnect_volume(self.connection_properties, {})
class LocalConnectorTestCase(base.TestCase):
def setUp(self):
super(LocalConnectorTestCase, self).setUp()
self.connection_properties = {'name': 'foo',
'device_path': '/tmp/bar'}
def test_connect_volume(self):
self.connector = connector.LocalConnector(None)
cprops = self.connection_properties
dev_info = self.connector.connect_volume(cprops)
self.assertEqual(dev_info['type'], 'local')
self.assertEqual(dev_info['path'], cprops['device_path'])
def test_connect_volume_with_invalid_connection_data(self):
self.connector = connector.LocalConnector(None)
cprops = {}
self.assertRaises(ValueError,
self.connector.connect_volume, cprops)
class HuaweiStorHyperConnectorTestCase(ConnectorTestCase):
"""Test cases for StorHyper initiator class."""
attached = False
def setUp(self):
super(HuaweiStorHyperConnectorTestCase, self).setUp()
self.fake_sdscli_file = tempfile.mktemp()
self.addCleanup(os.remove, self.fake_sdscli_file)
newefile = open(self.fake_sdscli_file, 'w')
newefile.write('test')
newefile.close()
self.connector = connector.HuaweiStorHyperConnector(
None, execute=self.fake_execute)
self.connector.cli_path = self.fake_sdscli_file
self.connector.iscliexist = True
self.connector_fail = connector.HuaweiStorHyperConnector(
None, execute=self.fake_execute_fail)
self.connector_fail.cli_path = self.fake_sdscli_file
self.connector_fail.iscliexist = True
self.connector_nocli = connector.HuaweiStorHyperConnector(
None, execute=self.fake_execute_fail)
self.connector_nocli.cli_path = self.fake_sdscli_file
self.connector_nocli.iscliexist = False
self.connection_properties = {
'access_mode': 'rw',
'qos_specs': None,
'volume_id': 'volume-b2911673-863c-4380-a5f2-e1729eecfe3f'
}
self.device_info = {'type': 'block',
'path': '/dev/vdxxx'}
HuaweiStorHyperConnectorTestCase.attached = False
def fake_execute(self, *cmd, **kwargs):
method = cmd[2]
self.cmds.append(string.join(cmd))
if 'attach' == method:
HuaweiStorHyperConnectorTestCase.attached = True
return 'ret_code=0', None
if 'querydev' == method:
if HuaweiStorHyperConnectorTestCase.attached:
return 'ret_code=0\ndev_addr=/dev/vdxxx', None
else:
return 'ret_code=1\ndev_addr=/dev/vdxxx', None
if 'detach' == method:
HuaweiStorHyperConnectorTestCase.attached = False
return 'ret_code=0', None
def fake_execute_fail(self, *cmd, **kwargs):
method = cmd[2]
self.cmds.append(string.join(cmd))
if 'attach' == method:
HuaweiStorHyperConnectorTestCase.attached = False
return 'ret_code=330151401', None
if 'querydev' == method:
if HuaweiStorHyperConnectorTestCase.attached:
return 'ret_code=0\ndev_addr=/dev/vdxxx', None
else:
return 'ret_code=1\ndev_addr=/dev/vdxxx', None
if 'detach' == method:
HuaweiStorHyperConnectorTestCase.attached = True
return 'ret_code=330155007', None
def test_connect_volume(self):
"""Test the basic connect volume case."""
retval = self.connector.connect_volume(self.connection_properties)
self.assertEqual(self.device_info, retval)
expected_commands = [self.fake_sdscli_file + ' -c attach'
' -v volume-b2911673-863c-4380-a5f2-e1729eecfe3f',
self.fake_sdscli_file + ' -c querydev'
' -v volume-b2911673-863c-4380-a5f2-e1729eecfe3f']
LOG.debug("self.cmds = %s." % self.cmds)
LOG.debug("expected = %s." % expected_commands)
self.assertEqual(expected_commands, self.cmds)
def test_disconnect_volume(self):
"""Test the basic disconnect volume case."""
self.connector.connect_volume(self.connection_properties)
self.assertEqual(True, HuaweiStorHyperConnectorTestCase.attached)
self.connector.disconnect_volume(self.connection_properties,
self.device_info)
self.assertEqual(False, HuaweiStorHyperConnectorTestCase.attached)
expected_commands = [self.fake_sdscli_file + ' -c attach'
' -v volume-b2911673-863c-4380-a5f2-e1729eecfe3f',
self.fake_sdscli_file + ' -c querydev'
' -v volume-b2911673-863c-4380-a5f2-e1729eecfe3f',
self.fake_sdscli_file + ' -c detach'
' -v volume-b2911673-863c-4380-a5f2-e1729eecfe3f']
LOG.debug("self.cmds = %s." % self.cmds)
LOG.debug("expected = %s." % expected_commands)
self.assertEqual(expected_commands, self.cmds)
def test_is_volume_connected(self):
"""Test if volume connected to host case."""
self.connector.connect_volume(self.connection_properties)
self.assertEqual(True, HuaweiStorHyperConnectorTestCase.attached)
is_connected = self.connector.is_volume_connected(
'volume-b2911673-863c-4380-a5f2-e1729eecfe3f')
self.assertEqual(HuaweiStorHyperConnectorTestCase.attached,
is_connected)
self.connector.disconnect_volume(self.connection_properties,
self.device_info)
self.assertEqual(False, HuaweiStorHyperConnectorTestCase.attached)
is_connected = self.connector.is_volume_connected(
'volume-b2911673-863c-4380-a5f2-e1729eecfe3f')
self.assertEqual(HuaweiStorHyperConnectorTestCase.attached,
is_connected)
expected_commands = [self.fake_sdscli_file + ' -c attach'
' -v volume-b2911673-863c-4380-a5f2-e1729eecfe3f',
self.fake_sdscli_file + ' -c querydev'
' -v volume-b2911673-863c-4380-a5f2-e1729eecfe3f',
self.fake_sdscli_file + ' -c querydev'
' -v volume-b2911673-863c-4380-a5f2-e1729eecfe3f',
self.fake_sdscli_file + ' -c detach'
' -v volume-b2911673-863c-4380-a5f2-e1729eecfe3f',
self.fake_sdscli_file + ' -c querydev'
' -v volume-b2911673-863c-4380-a5f2-e1729eecfe3f']
LOG.debug("self.cmds = %s." % self.cmds)
LOG.debug("expected = %s." % expected_commands)
self.assertEqual(expected_commands, self.cmds)
def test__analyze_output(self):
cliout = 'ret_code=0\ndev_addr=/dev/vdxxx\nret_desc="success"'
analyze_result = {'dev_addr': '/dev/vdxxx',
'ret_desc': '"success"',
'ret_code': '0'}
result = self.connector._analyze_output(cliout)
self.assertEqual(analyze_result, result)
def test_connect_volume_fail(self):
"""Test the fail connect volume case."""
self.assertRaises(exception.BrickException,
self.connector_fail.connect_volume,
self.connection_properties)
expected_commands = [self.fake_sdscli_file + ' -c attach'
' -v volume-b2911673-863c-4380-a5f2-e1729eecfe3f']
LOG.debug("self.cmds = %s." % self.cmds)
LOG.debug("expected = %s." % expected_commands)
self.assertEqual(expected_commands, self.cmds)
def test_disconnect_volume_fail(self):
"""Test the fail disconnect volume case."""
self.connector.connect_volume(self.connection_properties)
self.assertEqual(True, HuaweiStorHyperConnectorTestCase.attached)
self.assertRaises(exception.BrickException,
self.connector_fail.disconnect_volume,
self.connection_properties,
self.device_info)
expected_commands = [self.fake_sdscli_file + ' -c attach'
' -v volume-b2911673-863c-4380-a5f2-e1729eecfe3f',
self.fake_sdscli_file + ' -c querydev'
' -v volume-b2911673-863c-4380-a5f2-e1729eecfe3f',
self.fake_sdscli_file + ' -c detach'
' -v volume-b2911673-863c-4380-a5f2-e1729eecfe3f']
LOG.debug("self.cmds = %s." % self.cmds)
LOG.debug("expected = %s." % expected_commands)
self.assertEqual(expected_commands, self.cmds)
def test_connect_volume_nocli(self):
"""Test the fail connect volume case."""
self.assertRaises(exception.BrickException,
self.connector_nocli.connect_volume,
self.connection_properties)
def test_disconnect_volume_nocli(self):
"""Test the fail disconnect volume case."""
self.connector.connect_volume(self.connection_properties)
self.assertEqual(True, HuaweiStorHyperConnectorTestCase.attached)
self.assertRaises(exception.BrickException,
self.connector_nocli.disconnect_volume,
self.connection_properties,
self.device_info)
expected_commands = [self.fake_sdscli_file + ' -c attach'
' -v volume-b2911673-863c-4380-a5f2-e1729eecfe3f',
self.fake_sdscli_file + ' -c querydev'
' -v volume-b2911673-863c-4380-a5f2-e1729eecfe3f']
LOG.debug("self.cmds = %s." % self.cmds)
LOG.debug("expected = %s." % expected_commands)
|
mosser/ArduinoML-kernel | refs/heads/master | externals/xtext/vscode-extension-self-contained/.gradle/nodejs/node-v10.16.0-linux-x64/lib/node_modules/npm/node_modules/node-gyp/gyp/pylib/gyp/xcode_ninja.py | 1789 | # Copyright (c) 2014 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Xcode-ninja wrapper project file generator.
This updates the data structures passed to the Xcode gyp generator to build
with ninja instead. The Xcode project itself is transformed into a list of
executable targets, each with a build step to build with ninja, and a target
with every source and resource file. This appears to sidestep some of the
major performance headaches experienced using complex projects and large number
of targets within Xcode.
"""
import errno
import gyp.generator.ninja
import os
import re
import xml.sax.saxutils
def _WriteWorkspace(main_gyp, sources_gyp, params):
""" Create a workspace to wrap main and sources gyp paths. """
(build_file_root, build_file_ext) = os.path.splitext(main_gyp)
workspace_path = build_file_root + '.xcworkspace'
options = params['options']
if options.generator_output:
workspace_path = os.path.join(options.generator_output, workspace_path)
try:
os.makedirs(workspace_path)
except OSError, e:
if e.errno != errno.EEXIST:
raise
output_string = '<?xml version="1.0" encoding="UTF-8"?>\n' + \
'<Workspace version = "1.0">\n'
for gyp_name in [main_gyp, sources_gyp]:
name = os.path.splitext(os.path.basename(gyp_name))[0] + '.xcodeproj'
name = xml.sax.saxutils.quoteattr("group:" + name)
output_string += ' <FileRef location = %s></FileRef>\n' % name
output_string += '</Workspace>\n'
workspace_file = os.path.join(workspace_path, "contents.xcworkspacedata")
try:
with open(workspace_file, 'r') as input_file:
input_string = input_file.read()
if input_string == output_string:
return
except IOError:
# Ignore errors if the file doesn't exist.
pass
with open(workspace_file, 'w') as output_file:
output_file.write(output_string)
def _TargetFromSpec(old_spec, params):
""" Create fake target for xcode-ninja wrapper. """
# Determine ninja top level build dir (e.g. /path/to/out).
ninja_toplevel = None
jobs = 0
if params:
options = params['options']
ninja_toplevel = \
os.path.join(options.toplevel_dir,
gyp.generator.ninja.ComputeOutputDir(params))
jobs = params.get('generator_flags', {}).get('xcode_ninja_jobs', 0)
target_name = old_spec.get('target_name')
product_name = old_spec.get('product_name', target_name)
product_extension = old_spec.get('product_extension')
ninja_target = {}
ninja_target['target_name'] = target_name
ninja_target['product_name'] = product_name
if product_extension:
ninja_target['product_extension'] = product_extension
ninja_target['toolset'] = old_spec.get('toolset')
ninja_target['default_configuration'] = old_spec.get('default_configuration')
ninja_target['configurations'] = {}
# Tell Xcode to look in |ninja_toplevel| for build products.
new_xcode_settings = {}
if ninja_toplevel:
new_xcode_settings['CONFIGURATION_BUILD_DIR'] = \
"%s/$(CONFIGURATION)$(EFFECTIVE_PLATFORM_NAME)" % ninja_toplevel
if 'configurations' in old_spec:
for config in old_spec['configurations'].iterkeys():
old_xcode_settings = \
old_spec['configurations'][config].get('xcode_settings', {})
if 'IPHONEOS_DEPLOYMENT_TARGET' in old_xcode_settings:
new_xcode_settings['CODE_SIGNING_REQUIRED'] = "NO"
new_xcode_settings['IPHONEOS_DEPLOYMENT_TARGET'] = \
old_xcode_settings['IPHONEOS_DEPLOYMENT_TARGET']
ninja_target['configurations'][config] = {}
ninja_target['configurations'][config]['xcode_settings'] = \
new_xcode_settings
ninja_target['mac_bundle'] = old_spec.get('mac_bundle', 0)
ninja_target['ios_app_extension'] = old_spec.get('ios_app_extension', 0)
ninja_target['ios_watchkit_extension'] = \
old_spec.get('ios_watchkit_extension', 0)
ninja_target['ios_watchkit_app'] = old_spec.get('ios_watchkit_app', 0)
ninja_target['type'] = old_spec['type']
if ninja_toplevel:
ninja_target['actions'] = [
{
'action_name': 'Compile and copy %s via ninja' % target_name,
'inputs': [],
'outputs': [],
'action': [
'env',
'PATH=%s' % os.environ['PATH'],
'ninja',
'-C',
new_xcode_settings['CONFIGURATION_BUILD_DIR'],
target_name,
],
'message': 'Compile and copy %s via ninja' % target_name,
},
]
if jobs > 0:
ninja_target['actions'][0]['action'].extend(('-j', jobs))
return ninja_target
def IsValidTargetForWrapper(target_extras, executable_target_pattern, spec):
"""Limit targets for Xcode wrapper.
Xcode sometimes performs poorly with too many targets, so only include
proper executable targets, with filters to customize.
Arguments:
target_extras: Regular expression to always add, matching any target.
executable_target_pattern: Regular expression limiting executable targets.
spec: Specifications for target.
"""
target_name = spec.get('target_name')
# Always include targets matching target_extras.
if target_extras is not None and re.search(target_extras, target_name):
return True
# Otherwise just show executable targets.
if spec.get('type', '') == 'executable' and \
spec.get('product_extension', '') != 'bundle':
# If there is a filter and the target does not match, exclude the target.
if executable_target_pattern is not None:
if not re.search(executable_target_pattern, target_name):
return False
return True
return False
def CreateWrapper(target_list, target_dicts, data, params):
"""Initialize targets for the ninja wrapper.
This sets up the necessary variables in the targets to generate Xcode projects
that use ninja as an external builder.
Arguments:
target_list: List of target pairs: 'base/base.gyp:base'.
target_dicts: Dict of target properties keyed on target pair.
data: Dict of flattened build files keyed on gyp path.
params: Dict of global options for gyp.
"""
orig_gyp = params['build_files'][0]
for gyp_name, gyp_dict in data.iteritems():
if gyp_name == orig_gyp:
depth = gyp_dict['_DEPTH']
# Check for custom main gyp name, otherwise use the default CHROMIUM_GYP_FILE
# and prepend .ninja before the .gyp extension.
generator_flags = params.get('generator_flags', {})
main_gyp = generator_flags.get('xcode_ninja_main_gyp', None)
if main_gyp is None:
(build_file_root, build_file_ext) = os.path.splitext(orig_gyp)
main_gyp = build_file_root + ".ninja" + build_file_ext
# Create new |target_list|, |target_dicts| and |data| data structures.
new_target_list = []
new_target_dicts = {}
new_data = {}
# Set base keys needed for |data|.
new_data[main_gyp] = {}
new_data[main_gyp]['included_files'] = []
new_data[main_gyp]['targets'] = []
new_data[main_gyp]['xcode_settings'] = \
data[orig_gyp].get('xcode_settings', {})
# Normally the xcode-ninja generator includes only valid executable targets.
# If |xcode_ninja_executable_target_pattern| is set, that list is reduced to
# executable targets that match the pattern. (Default all)
executable_target_pattern = \
generator_flags.get('xcode_ninja_executable_target_pattern', None)
# For including other non-executable targets, add the matching target name
# to the |xcode_ninja_target_pattern| regular expression. (Default none)
target_extras = generator_flags.get('xcode_ninja_target_pattern', None)
for old_qualified_target in target_list:
spec = target_dicts[old_qualified_target]
if IsValidTargetForWrapper(target_extras, executable_target_pattern, spec):
# Add to new_target_list.
target_name = spec.get('target_name')
new_target_name = '%s:%s#target' % (main_gyp, target_name)
new_target_list.append(new_target_name)
# Add to new_target_dicts.
new_target_dicts[new_target_name] = _TargetFromSpec(spec, params)
# Add to new_data.
for old_target in data[old_qualified_target.split(':')[0]]['targets']:
if old_target['target_name'] == target_name:
new_data_target = {}
new_data_target['target_name'] = old_target['target_name']
new_data_target['toolset'] = old_target['toolset']
new_data[main_gyp]['targets'].append(new_data_target)
# Create sources target.
sources_target_name = 'sources_for_indexing'
sources_target = _TargetFromSpec(
{ 'target_name' : sources_target_name,
'toolset': 'target',
'default_configuration': 'Default',
'mac_bundle': '0',
'type': 'executable'
}, None)
# Tell Xcode to look everywhere for headers.
sources_target['configurations'] = {'Default': { 'include_dirs': [ depth ] } }
sources = []
for target, target_dict in target_dicts.iteritems():
base = os.path.dirname(target)
files = target_dict.get('sources', []) + \
target_dict.get('mac_bundle_resources', [])
for action in target_dict.get('actions', []):
files.extend(action.get('inputs', []))
# Remove files starting with $. These are mostly intermediate files for the
# build system.
files = [ file for file in files if not file.startswith('$')]
# Make sources relative to root build file.
relative_path = os.path.dirname(main_gyp)
sources += [ os.path.relpath(os.path.join(base, file), relative_path)
for file in files ]
sources_target['sources'] = sorted(set(sources))
# Put sources_to_index in it's own gyp.
sources_gyp = \
os.path.join(os.path.dirname(main_gyp), sources_target_name + ".gyp")
fully_qualified_target_name = \
'%s:%s#target' % (sources_gyp, sources_target_name)
# Add to new_target_list, new_target_dicts and new_data.
new_target_list.append(fully_qualified_target_name)
new_target_dicts[fully_qualified_target_name] = sources_target
new_data_target = {}
new_data_target['target_name'] = sources_target['target_name']
new_data_target['_DEPTH'] = depth
new_data_target['toolset'] = "target"
new_data[sources_gyp] = {}
new_data[sources_gyp]['targets'] = []
new_data[sources_gyp]['included_files'] = []
new_data[sources_gyp]['xcode_settings'] = \
data[orig_gyp].get('xcode_settings', {})
new_data[sources_gyp]['targets'].append(new_data_target)
# Write workspace to file.
_WriteWorkspace(main_gyp, sources_gyp, params)
return (new_target_list, new_target_dicts, new_data)
|
miniconfig/home-assistant | refs/heads/dev | homeassistant/components/sensor/command_line.py | 13 | """
Allows to configure custom shell commands to turn a value for a sensor.
For more details about this platform, please refer to the documentation at
https://home-assistant.io/components/sensor.command_line/
"""
from datetime import timedelta
import logging
import subprocess
import voluptuous as vol
from homeassistant.components.sensor import PLATFORM_SCHEMA
from homeassistant.const import (
CONF_NAME, CONF_VALUE_TEMPLATE, CONF_UNIT_OF_MEASUREMENT, CONF_COMMAND,
STATE_UNKNOWN)
from homeassistant.helpers.entity import Entity
import homeassistant.helpers.config_validation as cv
_LOGGER = logging.getLogger(__name__)
DEFAULT_NAME = 'Command Sensor'
SCAN_INTERVAL = timedelta(seconds=60)
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
vol.Required(CONF_COMMAND): cv.string,
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(CONF_UNIT_OF_MEASUREMENT): cv.string,
vol.Optional(CONF_VALUE_TEMPLATE): cv.template,
})
# pylint: disable=unused-argument
def setup_platform(hass, config, add_devices, discovery_info=None):
"""Setup the Command Sensor."""
name = config.get(CONF_NAME)
command = config.get(CONF_COMMAND)
unit = config.get(CONF_UNIT_OF_MEASUREMENT)
value_template = config.get(CONF_VALUE_TEMPLATE)
if value_template is not None:
value_template.hass = hass
data = CommandSensorData(command)
add_devices([CommandSensor(hass, data, name, unit, value_template)])
class CommandSensor(Entity):
"""Representation of a sensor that is using shell commands."""
def __init__(self, hass, data, name, unit_of_measurement, value_template):
"""Initialize the sensor."""
self._hass = hass
self.data = data
self._name = name
self._state = STATE_UNKNOWN
self._unit_of_measurement = unit_of_measurement
self._value_template = value_template
self.update()
@property
def name(self):
"""Return the name of the sensor."""
return self._name
@property
def unit_of_measurement(self):
"""Return the unit the value is expressed in."""
return self._unit_of_measurement
@property
def state(self):
"""Return the state of the device."""
return self._state
def update(self):
"""Get the latest data and updates the state."""
self.data.update()
value = self.data.value
if value is None:
value = STATE_UNKNOWN
elif self._value_template is not None:
self._state = self._value_template.render_with_possible_json_value(
value, STATE_UNKNOWN)
else:
self._state = value
class CommandSensorData(object):
"""The class for handling the data retrieval."""
def __init__(self, command):
"""Initialize the data object."""
self.command = command
self.value = None
def update(self):
"""Get the latest data with a shell command."""
_LOGGER.info('Running command: %s', self.command)
try:
return_value = subprocess.check_output(self.command, shell=True,
timeout=15)
self.value = return_value.strip().decode('utf-8')
except subprocess.CalledProcessError:
_LOGGER.error('Command failed: %s', self.command)
except subprocess.TimeoutExpired:
_LOGGER.error('Timeout for command: %s', self.command)
|
hwu25/AppPkg | refs/heads/trunk | Applications/Python/Python-2.7.2/Tools/gdb/libpython.py | 6 | #!/usr/bin/python
'''
From gdb 7 onwards, gdb's build can be configured --with-python, allowing gdb
to be extended with Python code e.g. for library-specific data visualizations,
such as for the C++ STL types. Documentation on this API can be seen at:
http://sourceware.org/gdb/current/onlinedocs/gdb/Python-API.html
This python module deals with the case when the process being debugged (the
"inferior process" in gdb parlance) is itself python, or more specifically,
linked against libpython. In this situation, almost every item of data is a
(PyObject*), and having the debugger merely print their addresses is not very
enlightening.
This module embeds knowledge about the implementation details of libpython so
that we can emit useful visualizations e.g. a string, a list, a dict, a frame
giving file/line information and the state of local variables
In particular, given a gdb.Value corresponding to a PyObject* in the inferior
process, we can generate a "proxy value" within the gdb process. For example,
given a PyObject* in the inferior process that is in fact a PyListObject*
holding three PyObject* that turn out to be PyStringObject* instances, we can
generate a proxy value within the gdb process that is a list of strings:
["foo", "bar", "baz"]
Doing so can be expensive for complicated graphs of objects, and could take
some time, so we also have a "write_repr" method that writes a representation
of the data to a file-like object. This allows us to stop the traversal by
having the file-like object raise an exception if it gets too much data.
With both "proxyval" and "write_repr" we keep track of the set of all addresses
visited so far in the traversal, to avoid infinite recursion due to cycles in
the graph of object references.
We try to defer gdb.lookup_type() invocations for python types until as late as
possible: for a dynamically linked python binary, when the process starts in
the debugger, the libpython.so hasn't been dynamically loaded yet, so none of
the type names are known to the debugger
The module also extends gdb with some python-specific commands.
'''
from __future__ import with_statement
import gdb
import sys
# Look up the gdb.Type for some standard types:
_type_char_ptr = gdb.lookup_type('char').pointer() # char*
_type_unsigned_char_ptr = gdb.lookup_type('unsigned char').pointer() # unsigned char*
_type_void_ptr = gdb.lookup_type('void').pointer() # void*
_type_size_t = gdb.lookup_type('size_t')
SIZEOF_VOID_P = _type_void_ptr.sizeof
Py_TPFLAGS_HEAPTYPE = (1L << 9)
Py_TPFLAGS_INT_SUBCLASS = (1L << 23)
Py_TPFLAGS_LONG_SUBCLASS = (1L << 24)
Py_TPFLAGS_LIST_SUBCLASS = (1L << 25)
Py_TPFLAGS_TUPLE_SUBCLASS = (1L << 26)
Py_TPFLAGS_STRING_SUBCLASS = (1L << 27)
Py_TPFLAGS_UNICODE_SUBCLASS = (1L << 28)
Py_TPFLAGS_DICT_SUBCLASS = (1L << 29)
Py_TPFLAGS_BASE_EXC_SUBCLASS = (1L << 30)
Py_TPFLAGS_TYPE_SUBCLASS = (1L << 31)
MAX_OUTPUT_LEN=1024
class NullPyObjectPtr(RuntimeError):
pass
def safety_limit(val):
# Given a integer value from the process being debugged, limit it to some
# safety threshold so that arbitrary breakage within said process doesn't
# break the gdb process too much (e.g. sizes of iterations, sizes of lists)
return min(val, 1000)
def safe_range(val):
# As per range, but don't trust the value too much: cap it to a safety
# threshold in case the data was corrupted
return xrange(safety_limit(val))
class StringTruncated(RuntimeError):
pass
class TruncatedStringIO(object):
'''Similar to cStringIO, but can truncate the output by raising a
StringTruncated exception'''
def __init__(self, maxlen=None):
self._val = ''
self.maxlen = maxlen
def write(self, data):
if self.maxlen:
if len(data) + len(self._val) > self.maxlen:
# Truncation:
self._val += data[0:self.maxlen - len(self._val)]
raise StringTruncated()
self._val += data
def getvalue(self):
return self._val
class PyObjectPtr(object):
"""
Class wrapping a gdb.Value that's a either a (PyObject*) within the
inferior process, or some subclass pointer e.g. (PyStringObject*)
There will be a subclass for every refined PyObject type that we care
about.
Note that at every stage the underlying pointer could be NULL, point
to corrupt data, etc; this is the debugger, after all.
"""
_typename = 'PyObject'
def __init__(self, gdbval, cast_to=None):
if cast_to:
self._gdbval = gdbval.cast(cast_to)
else:
self._gdbval = gdbval
def field(self, name):
'''
Get the gdb.Value for the given field within the PyObject, coping with
some python 2 versus python 3 differences.
Various libpython types are defined using the "PyObject_HEAD" and
"PyObject_VAR_HEAD" macros.
In Python 2, this these are defined so that "ob_type" and (for a var
object) "ob_size" are fields of the type in question.
In Python 3, this is defined as an embedded PyVarObject type thus:
PyVarObject ob_base;
so that the "ob_size" field is located insize the "ob_base" field, and
the "ob_type" is most easily accessed by casting back to a (PyObject*).
'''
if self.is_null():
raise NullPyObjectPtr(self)
if name == 'ob_type':
pyo_ptr = self._gdbval.cast(PyObjectPtr.get_gdb_type())
return pyo_ptr.dereference()[name]
if name == 'ob_size':
try:
# Python 2:
return self._gdbval.dereference()[name]
except RuntimeError:
# Python 3:
return self._gdbval.dereference()['ob_base'][name]
# General case: look it up inside the object:
return self._gdbval.dereference()[name]
def pyop_field(self, name):
'''
Get a PyObjectPtr for the given PyObject* field within this PyObject,
coping with some python 2 versus python 3 differences.
'''
return PyObjectPtr.from_pyobject_ptr(self.field(name))
def write_field_repr(self, name, out, visited):
'''
Extract the PyObject* field named "name", and write its representation
to file-like object "out"
'''
field_obj = self.pyop_field(name)
field_obj.write_repr(out, visited)
def get_truncated_repr(self, maxlen):
'''
Get a repr-like string for the data, but truncate it at "maxlen" bytes
(ending the object graph traversal as soon as you do)
'''
out = TruncatedStringIO(maxlen)
try:
self.write_repr(out, set())
except StringTruncated:
# Truncation occurred:
return out.getvalue() + '...(truncated)'
# No truncation occurred:
return out.getvalue()
def type(self):
return PyTypeObjectPtr(self.field('ob_type'))
def is_null(self):
return 0 == long(self._gdbval)
def is_optimized_out(self):
'''
Is the value of the underlying PyObject* visible to the debugger?
This can vary with the precise version of the compiler used to build
Python, and the precise version of gdb.
See e.g. https://bugzilla.redhat.com/show_bug.cgi?id=556975 with
PyEval_EvalFrameEx's "f"
'''
return self._gdbval.is_optimized_out
def safe_tp_name(self):
try:
return self.type().field('tp_name').string()
except NullPyObjectPtr:
# NULL tp_name?
return 'unknown'
except RuntimeError:
# Can't even read the object at all?
return 'unknown'
def proxyval(self, visited):
'''
Scrape a value from the inferior process, and try to represent it
within the gdb process, whilst (hopefully) avoiding crashes when
the remote data is corrupt.
Derived classes will override this.
For example, a PyIntObject* with ob_ival 42 in the inferior process
should result in an int(42) in this process.
visited: a set of all gdb.Value pyobject pointers already visited
whilst generating this value (to guard against infinite recursion when
visiting object graphs with loops). Analogous to Py_ReprEnter and
Py_ReprLeave
'''
class FakeRepr(object):
"""
Class representing a non-descript PyObject* value in the inferior
process for when we don't have a custom scraper, intended to have
a sane repr().
"""
def __init__(self, tp_name, address):
self.tp_name = tp_name
self.address = address
def __repr__(self):
# For the NULL pointer, we have no way of knowing a type, so
# special-case it as per
# http://bugs.python.org/issue8032#msg100882
if self.address == 0:
return '0x0'
return '<%s at remote 0x%x>' % (self.tp_name, self.address)
return FakeRepr(self.safe_tp_name(),
long(self._gdbval))
def write_repr(self, out, visited):
'''
Write a string representation of the value scraped from the inferior
process to "out", a file-like object.
'''
# Default implementation: generate a proxy value and write its repr
# However, this could involve a lot of work for complicated objects,
# so for derived classes we specialize this
return out.write(repr(self.proxyval(visited)))
@classmethod
def subclass_from_type(cls, t):
'''
Given a PyTypeObjectPtr instance wrapping a gdb.Value that's a
(PyTypeObject*), determine the corresponding subclass of PyObjectPtr
to use
Ideally, we would look up the symbols for the global types, but that
isn't working yet:
(gdb) python print gdb.lookup_symbol('PyList_Type')[0].value
Traceback (most recent call last):
File "<string>", line 1, in <module>
NotImplementedError: Symbol type not yet supported in Python scripts.
Error while executing Python code.
For now, we use tp_flags, after doing some string comparisons on the
tp_name for some special-cases that don't seem to be visible through
flags
'''
try:
tp_name = t.field('tp_name').string()
tp_flags = int(t.field('tp_flags'))
except RuntimeError:
# Handle any kind of error e.g. NULL ptrs by simply using the base
# class
return cls
#print 'tp_flags = 0x%08x' % tp_flags
#print 'tp_name = %r' % tp_name
name_map = {'bool': PyBoolObjectPtr,
'classobj': PyClassObjectPtr,
'instance': PyInstanceObjectPtr,
'NoneType': PyNoneStructPtr,
'frame': PyFrameObjectPtr,
'set' : PySetObjectPtr,
'frozenset' : PySetObjectPtr,
'builtin_function_or_method' : PyCFunctionObjectPtr,
}
if tp_name in name_map:
return name_map[tp_name]
if tp_flags & Py_TPFLAGS_HEAPTYPE:
return HeapTypeObjectPtr
if tp_flags & Py_TPFLAGS_INT_SUBCLASS:
return PyIntObjectPtr
if tp_flags & Py_TPFLAGS_LONG_SUBCLASS:
return PyLongObjectPtr
if tp_flags & Py_TPFLAGS_LIST_SUBCLASS:
return PyListObjectPtr
if tp_flags & Py_TPFLAGS_TUPLE_SUBCLASS:
return PyTupleObjectPtr
if tp_flags & Py_TPFLAGS_STRING_SUBCLASS:
return PyStringObjectPtr
if tp_flags & Py_TPFLAGS_UNICODE_SUBCLASS:
return PyUnicodeObjectPtr
if tp_flags & Py_TPFLAGS_DICT_SUBCLASS:
return PyDictObjectPtr
if tp_flags & Py_TPFLAGS_BASE_EXC_SUBCLASS:
return PyBaseExceptionObjectPtr
#if tp_flags & Py_TPFLAGS_TYPE_SUBCLASS:
# return PyTypeObjectPtr
# Use the base class:
return cls
@classmethod
def from_pyobject_ptr(cls, gdbval):
'''
Try to locate the appropriate derived class dynamically, and cast
the pointer accordingly.
'''
try:
p = PyObjectPtr(gdbval)
cls = cls.subclass_from_type(p.type())
return cls(gdbval, cast_to=cls.get_gdb_type())
except RuntimeError:
# Handle any kind of error e.g. NULL ptrs by simply using the base
# class
pass
return cls(gdbval)
@classmethod
def get_gdb_type(cls):
return gdb.lookup_type(cls._typename).pointer()
def as_address(self):
return long(self._gdbval)
class ProxyAlreadyVisited(object):
'''
Placeholder proxy to use when protecting against infinite recursion due to
loops in the object graph.
Analogous to the values emitted by the users of Py_ReprEnter and Py_ReprLeave
'''
def __init__(self, rep):
self._rep = rep
def __repr__(self):
return self._rep
def _write_instance_repr(out, visited, name, pyop_attrdict, address):
'''Shared code for use by old-style and new-style classes:
write a representation to file-like object "out"'''
out.write('<')
out.write(name)
# Write dictionary of instance attributes:
if isinstance(pyop_attrdict, PyDictObjectPtr):
out.write('(')
first = True
for pyop_arg, pyop_val in pyop_attrdict.iteritems():
if not first:
out.write(', ')
first = False
out.write(pyop_arg.proxyval(visited))
out.write('=')
pyop_val.write_repr(out, visited)
out.write(')')
out.write(' at remote 0x%x>' % address)
class InstanceProxy(object):
def __init__(self, cl_name, attrdict, address):
self.cl_name = cl_name
self.attrdict = attrdict
self.address = address
def __repr__(self):
if isinstance(self.attrdict, dict):
kwargs = ', '.join(["%s=%r" % (arg, val)
for arg, val in self.attrdict.iteritems()])
return '<%s(%s) at remote 0x%x>' % (self.cl_name,
kwargs, self.address)
else:
return '<%s at remote 0x%x>' % (self.cl_name,
self.address)
def _PyObject_VAR_SIZE(typeobj, nitems):
return ( ( typeobj.field('tp_basicsize') +
nitems * typeobj.field('tp_itemsize') +
(SIZEOF_VOID_P - 1)
) & ~(SIZEOF_VOID_P - 1)
).cast(_type_size_t)
class HeapTypeObjectPtr(PyObjectPtr):
_typename = 'PyObject'
def get_attr_dict(self):
'''
Get the PyDictObject ptr representing the attribute dictionary
(or None if there's a problem)
'''
try:
typeobj = self.type()
dictoffset = int_from_int(typeobj.field('tp_dictoffset'))
if dictoffset != 0:
if dictoffset < 0:
type_PyVarObject_ptr = gdb.lookup_type('PyVarObject').pointer()
tsize = int_from_int(self._gdbval.cast(type_PyVarObject_ptr)['ob_size'])
if tsize < 0:
tsize = -tsize
size = _PyObject_VAR_SIZE(typeobj, tsize)
dictoffset += size
assert dictoffset > 0
assert dictoffset % SIZEOF_VOID_P == 0
dictptr = self._gdbval.cast(_type_char_ptr) + dictoffset
PyObjectPtrPtr = PyObjectPtr.get_gdb_type().pointer()
dictptr = dictptr.cast(PyObjectPtrPtr)
return PyObjectPtr.from_pyobject_ptr(dictptr.dereference())
except RuntimeError:
# Corrupt data somewhere; fail safe
pass
# Not found, or some kind of error:
return None
def proxyval(self, visited):
'''
Support for new-style classes.
Currently we just locate the dictionary using a transliteration to
python of _PyObject_GetDictPtr, ignoring descriptors
'''
# Guard against infinite loops:
if self.as_address() in visited:
return ProxyAlreadyVisited('<...>')
visited.add(self.as_address())
pyop_attr_dict = self.get_attr_dict()
if pyop_attr_dict:
attr_dict = pyop_attr_dict.proxyval(visited)
else:
attr_dict = {}
tp_name = self.safe_tp_name()
# New-style class:
return InstanceProxy(tp_name, attr_dict, long(self._gdbval))
def write_repr(self, out, visited):
# Guard against infinite loops:
if self.as_address() in visited:
out.write('<...>')
return
visited.add(self.as_address())
pyop_attrdict = self.get_attr_dict()
_write_instance_repr(out, visited,
self.safe_tp_name(), pyop_attrdict, self.as_address())
class ProxyException(Exception):
def __init__(self, tp_name, args):
self.tp_name = tp_name
self.args = args
def __repr__(self):
return '%s%r' % (self.tp_name, self.args)
class PyBaseExceptionObjectPtr(PyObjectPtr):
"""
Class wrapping a gdb.Value that's a PyBaseExceptionObject* i.e. an exception
within the process being debugged.
"""
_typename = 'PyBaseExceptionObject'
def proxyval(self, visited):
# Guard against infinite loops:
if self.as_address() in visited:
return ProxyAlreadyVisited('(...)')
visited.add(self.as_address())
arg_proxy = self.pyop_field('args').proxyval(visited)
return ProxyException(self.safe_tp_name(),
arg_proxy)
def write_repr(self, out, visited):
# Guard against infinite loops:
if self.as_address() in visited:
out.write('(...)')
return
visited.add(self.as_address())
out.write(self.safe_tp_name())
self.write_field_repr('args', out, visited)
class PyBoolObjectPtr(PyObjectPtr):
"""
Class wrapping a gdb.Value that's a PyBoolObject* i.e. one of the two
<bool> instances (Py_True/Py_False) within the process being debugged.
"""
_typename = 'PyBoolObject'
def proxyval(self, visited):
if int_from_int(self.field('ob_ival')):
return True
else:
return False
class PyClassObjectPtr(PyObjectPtr):
"""
Class wrapping a gdb.Value that's a PyClassObject* i.e. a <classobj>
instance within the process being debugged.
"""
_typename = 'PyClassObject'
class BuiltInFunctionProxy(object):
def __init__(self, ml_name):
self.ml_name = ml_name
def __repr__(self):
return "<built-in function %s>" % self.ml_name
class BuiltInMethodProxy(object):
def __init__(self, ml_name, pyop_m_self):
self.ml_name = ml_name
self.pyop_m_self = pyop_m_self
def __repr__(self):
return ('<built-in method %s of %s object at remote 0x%x>'
% (self.ml_name,
self.pyop_m_self.safe_tp_name(),
self.pyop_m_self.as_address())
)
class PyCFunctionObjectPtr(PyObjectPtr):
"""
Class wrapping a gdb.Value that's a PyCFunctionObject*
(see Include/methodobject.h and Objects/methodobject.c)
"""
_typename = 'PyCFunctionObject'
def proxyval(self, visited):
m_ml = self.field('m_ml') # m_ml is a (PyMethodDef*)
ml_name = m_ml['ml_name'].string()
pyop_m_self = self.pyop_field('m_self')
if pyop_m_self.is_null():
return BuiltInFunctionProxy(ml_name)
else:
return BuiltInMethodProxy(ml_name, pyop_m_self)
class PyCodeObjectPtr(PyObjectPtr):
"""
Class wrapping a gdb.Value that's a PyCodeObject* i.e. a <code> instance
within the process being debugged.
"""
_typename = 'PyCodeObject'
def addr2line(self, addrq):
'''
Get the line number for a given bytecode offset
Analogous to PyCode_Addr2Line; translated from pseudocode in
Objects/lnotab_notes.txt
'''
co_lnotab = self.pyop_field('co_lnotab').proxyval(set())
# Initialize lineno to co_firstlineno as per PyCode_Addr2Line
# not 0, as lnotab_notes.txt has it:
lineno = int_from_int(self.field('co_firstlineno'))
addr = 0
for addr_incr, line_incr in zip(co_lnotab[::2], co_lnotab[1::2]):
addr += ord(addr_incr)
if addr > addrq:
return lineno
lineno += ord(line_incr)
return lineno
class PyDictObjectPtr(PyObjectPtr):
"""
Class wrapping a gdb.Value that's a PyDictObject* i.e. a dict instance
within the process being debugged.
"""
_typename = 'PyDictObject'
def iteritems(self):
'''
Yields a sequence of (PyObjectPtr key, PyObjectPtr value) pairs,
analagous to dict.iteritems()
'''
for i in safe_range(self.field('ma_mask') + 1):
ep = self.field('ma_table') + i
pyop_value = PyObjectPtr.from_pyobject_ptr(ep['me_value'])
if not pyop_value.is_null():
pyop_key = PyObjectPtr.from_pyobject_ptr(ep['me_key'])
yield (pyop_key, pyop_value)
def proxyval(self, visited):
# Guard against infinite loops:
if self.as_address() in visited:
return ProxyAlreadyVisited('{...}')
visited.add(self.as_address())
result = {}
for pyop_key, pyop_value in self.iteritems():
proxy_key = pyop_key.proxyval(visited)
proxy_value = pyop_value.proxyval(visited)
result[proxy_key] = proxy_value
return result
def write_repr(self, out, visited):
# Guard against infinite loops:
if self.as_address() in visited:
out.write('{...}')
return
visited.add(self.as_address())
out.write('{')
first = True
for pyop_key, pyop_value in self.iteritems():
if not first:
out.write(', ')
first = False
pyop_key.write_repr(out, visited)
out.write(': ')
pyop_value.write_repr(out, visited)
out.write('}')
class PyInstanceObjectPtr(PyObjectPtr):
_typename = 'PyInstanceObject'
def proxyval(self, visited):
# Guard against infinite loops:
if self.as_address() in visited:
return ProxyAlreadyVisited('<...>')
visited.add(self.as_address())
# Get name of class:
in_class = self.pyop_field('in_class')
cl_name = in_class.pyop_field('cl_name').proxyval(visited)
# Get dictionary of instance attributes:
in_dict = self.pyop_field('in_dict').proxyval(visited)
# Old-style class:
return InstanceProxy(cl_name, in_dict, long(self._gdbval))
def write_repr(self, out, visited):
# Guard against infinite loops:
if self.as_address() in visited:
out.write('<...>')
return
visited.add(self.as_address())
# Old-style class:
# Get name of class:
in_class = self.pyop_field('in_class')
cl_name = in_class.pyop_field('cl_name').proxyval(visited)
# Get dictionary of instance attributes:
pyop_in_dict = self.pyop_field('in_dict')
_write_instance_repr(out, visited,
cl_name, pyop_in_dict, self.as_address())
class PyIntObjectPtr(PyObjectPtr):
_typename = 'PyIntObject'
def proxyval(self, visited):
result = int_from_int(self.field('ob_ival'))
return result
class PyListObjectPtr(PyObjectPtr):
_typename = 'PyListObject'
def __getitem__(self, i):
# Get the gdb.Value for the (PyObject*) with the given index:
field_ob_item = self.field('ob_item')
return field_ob_item[i]
def proxyval(self, visited):
# Guard against infinite loops:
if self.as_address() in visited:
return ProxyAlreadyVisited('[...]')
visited.add(self.as_address())
result = [PyObjectPtr.from_pyobject_ptr(self[i]).proxyval(visited)
for i in safe_range(int_from_int(self.field('ob_size')))]
return result
def write_repr(self, out, visited):
# Guard against infinite loops:
if self.as_address() in visited:
out.write('[...]')
return
visited.add(self.as_address())
out.write('[')
for i in safe_range(int_from_int(self.field('ob_size'))):
if i > 0:
out.write(', ')
element = PyObjectPtr.from_pyobject_ptr(self[i])
element.write_repr(out, visited)
out.write(']')
class PyLongObjectPtr(PyObjectPtr):
_typename = 'PyLongObject'
def proxyval(self, visited):
'''
Python's Include/longobjrep.h has this declaration:
struct _longobject {
PyObject_VAR_HEAD
digit ob_digit[1];
};
with this description:
The absolute value of a number is equal to
SUM(for i=0 through abs(ob_size)-1) ob_digit[i] * 2**(SHIFT*i)
Negative numbers are represented with ob_size < 0;
zero is represented by ob_size == 0.
where SHIFT can be either:
#define PyLong_SHIFT 30
#define PyLong_SHIFT 15
'''
ob_size = long(self.field('ob_size'))
if ob_size == 0:
return 0L
ob_digit = self.field('ob_digit')
if gdb.lookup_type('digit').sizeof == 2:
SHIFT = 15L
else:
SHIFT = 30L
digits = [long(ob_digit[i]) * 2**(SHIFT*i)
for i in safe_range(abs(ob_size))]
result = sum(digits)
if ob_size < 0:
result = -result
return result
class PyNoneStructPtr(PyObjectPtr):
"""
Class wrapping a gdb.Value that's a PyObject* pointing to the
singleton (we hope) _Py_NoneStruct with ob_type PyNone_Type
"""
_typename = 'PyObject'
def proxyval(self, visited):
return None
class PyFrameObjectPtr(PyObjectPtr):
_typename = 'PyFrameObject'
def __init__(self, gdbval, cast_to):
PyObjectPtr.__init__(self, gdbval, cast_to)
if not self.is_optimized_out():
self.co = PyCodeObjectPtr.from_pyobject_ptr(self.field('f_code'))
self.co_name = self.co.pyop_field('co_name')
self.co_filename = self.co.pyop_field('co_filename')
self.f_lineno = int_from_int(self.field('f_lineno'))
self.f_lasti = int_from_int(self.field('f_lasti'))
self.co_nlocals = int_from_int(self.co.field('co_nlocals'))
self.co_varnames = PyTupleObjectPtr.from_pyobject_ptr(self.co.field('co_varnames'))
def iter_locals(self):
'''
Yield a sequence of (name,value) pairs of PyObjectPtr instances, for
the local variables of this frame
'''
if self.is_optimized_out():
return
f_localsplus = self.field('f_localsplus')
for i in safe_range(self.co_nlocals):
pyop_value = PyObjectPtr.from_pyobject_ptr(f_localsplus[i])
if not pyop_value.is_null():
pyop_name = PyObjectPtr.from_pyobject_ptr(self.co_varnames[i])
yield (pyop_name, pyop_value)
def iter_globals(self):
'''
Yield a sequence of (name,value) pairs of PyObjectPtr instances, for
the global variables of this frame
'''
if self.is_optimized_out():
return
pyop_globals = self.pyop_field('f_globals')
return pyop_globals.iteritems()
def iter_builtins(self):
'''
Yield a sequence of (name,value) pairs of PyObjectPtr instances, for
the builtin variables
'''
if self.is_optimized_out():
return
pyop_builtins = self.pyop_field('f_builtins')
return pyop_builtins.iteritems()
def get_var_by_name(self, name):
'''
Look for the named local variable, returning a (PyObjectPtr, scope) pair
where scope is a string 'local', 'global', 'builtin'
If not found, return (None, None)
'''
for pyop_name, pyop_value in self.iter_locals():
if name == pyop_name.proxyval(set()):
return pyop_value, 'local'
for pyop_name, pyop_value in self.iter_globals():
if name == pyop_name.proxyval(set()):
return pyop_value, 'global'
for pyop_name, pyop_value in self.iter_builtins():
if name == pyop_name.proxyval(set()):
return pyop_value, 'builtin'
return None, None
def filename(self):
'''Get the path of the current Python source file, as a string'''
if self.is_optimized_out():
return '(frame information optimized out)'
return self.co_filename.proxyval(set())
def current_line_num(self):
'''Get current line number as an integer (1-based)
Translated from PyFrame_GetLineNumber and PyCode_Addr2Line
See Objects/lnotab_notes.txt
'''
if self.is_optimized_out():
return None
f_trace = self.field('f_trace')
if long(f_trace) != 0:
# we have a non-NULL f_trace:
return self.f_lineno
else:
#try:
return self.co.addr2line(self.f_lasti)
#except ValueError:
# return self.f_lineno
def current_line(self):
'''Get the text of the current source line as a string, with a trailing
newline character'''
if self.is_optimized_out():
return '(frame information optimized out)'
with open(self.filename(), 'r') as f:
all_lines = f.readlines()
# Convert from 1-based current_line_num to 0-based list offset:
return all_lines[self.current_line_num()-1]
def write_repr(self, out, visited):
if self.is_optimized_out():
out.write('(frame information optimized out)')
return
out.write('Frame 0x%x, for file %s, line %i, in %s ('
% (self.as_address(),
self.co_filename,
self.current_line_num(),
self.co_name))
first = True
for pyop_name, pyop_value in self.iter_locals():
if not first:
out.write(', ')
first = False
out.write(pyop_name.proxyval(visited))
out.write('=')
pyop_value.write_repr(out, visited)
out.write(')')
class PySetObjectPtr(PyObjectPtr):
_typename = 'PySetObject'
def proxyval(self, visited):
# Guard against infinite loops:
if self.as_address() in visited:
return ProxyAlreadyVisited('%s(...)' % self.safe_tp_name())
visited.add(self.as_address())
members = []
table = self.field('table')
for i in safe_range(self.field('mask')+1):
setentry = table[i]
key = setentry['key']
if key != 0:
key_proxy = PyObjectPtr.from_pyobject_ptr(key).proxyval(visited)
if key_proxy != '<dummy key>':
members.append(key_proxy)
if self.safe_tp_name() == 'frozenset':
return frozenset(members)
else:
return set(members)
def write_repr(self, out, visited):
out.write(self.safe_tp_name())
# Guard against infinite loops:
if self.as_address() in visited:
out.write('(...)')
return
visited.add(self.as_address())
out.write('([')
first = True
table = self.field('table')
for i in safe_range(self.field('mask')+1):
setentry = table[i]
key = setentry['key']
if key != 0:
pyop_key = PyObjectPtr.from_pyobject_ptr(key)
key_proxy = pyop_key.proxyval(visited) # FIXME!
if key_proxy != '<dummy key>':
if not first:
out.write(', ')
first = False
pyop_key.write_repr(out, visited)
out.write('])')
class PyStringObjectPtr(PyObjectPtr):
_typename = 'PyStringObject'
def __str__(self):
field_ob_size = self.field('ob_size')
field_ob_sval = self.field('ob_sval')
char_ptr = field_ob_sval.address.cast(_type_unsigned_char_ptr)
return ''.join([chr(char_ptr[i]) for i in safe_range(field_ob_size)])
def proxyval(self, visited):
return str(self)
class PyTupleObjectPtr(PyObjectPtr):
_typename = 'PyTupleObject'
def __getitem__(self, i):
# Get the gdb.Value for the (PyObject*) with the given index:
field_ob_item = self.field('ob_item')
return field_ob_item[i]
def proxyval(self, visited):
# Guard against infinite loops:
if self.as_address() in visited:
return ProxyAlreadyVisited('(...)')
visited.add(self.as_address())
result = tuple([PyObjectPtr.from_pyobject_ptr(self[i]).proxyval(visited)
for i in safe_range(int_from_int(self.field('ob_size')))])
return result
def write_repr(self, out, visited):
# Guard against infinite loops:
if self.as_address() in visited:
out.write('(...)')
return
visited.add(self.as_address())
out.write('(')
for i in safe_range(int_from_int(self.field('ob_size'))):
if i > 0:
out.write(', ')
element = PyObjectPtr.from_pyobject_ptr(self[i])
element.write_repr(out, visited)
if self.field('ob_size') == 1:
out.write(',)')
else:
out.write(')')
class PyTypeObjectPtr(PyObjectPtr):
_typename = 'PyTypeObject'
if sys.maxunicode >= 0x10000:
_unichr = unichr
else:
# Needed for proper surrogate support if sizeof(Py_UNICODE) is 2 in gdb
def _unichr(x):
if x < 0x10000:
return unichr(x)
x -= 0x10000
ch1 = 0xD800 | (x >> 10)
ch2 = 0xDC00 | (x & 0x3FF)
return unichr(ch1) + unichr(ch2)
class PyUnicodeObjectPtr(PyObjectPtr):
_typename = 'PyUnicodeObject'
def char_width(self):
_type_Py_UNICODE = gdb.lookup_type('Py_UNICODE')
return _type_Py_UNICODE.sizeof
def proxyval(self, visited):
# From unicodeobject.h:
# Py_ssize_t length; /* Length of raw Unicode data in buffer */
# Py_UNICODE *str; /* Raw Unicode buffer */
field_length = long(self.field('length'))
field_str = self.field('str')
# Gather a list of ints from the Py_UNICODE array; these are either
# UCS-2 or UCS-4 code points:
if self.char_width() > 2:
Py_UNICODEs = [int(field_str[i]) for i in safe_range(field_length)]
else:
# A more elaborate routine if sizeof(Py_UNICODE) is 2 in the
# inferior process: we must join surrogate pairs.
Py_UNICODEs = []
i = 0
limit = safety_limit(field_length)
while i < limit:
ucs = int(field_str[i])
i += 1
if ucs < 0xD800 or ucs >= 0xDC00 or i == field_length:
Py_UNICODEs.append(ucs)
continue
# This could be a surrogate pair.
ucs2 = int(field_str[i])
if ucs2 < 0xDC00 or ucs2 > 0xDFFF:
continue
code = (ucs & 0x03FF) << 10
code |= ucs2 & 0x03FF
code += 0x00010000
Py_UNICODEs.append(code)
i += 1
# Convert the int code points to unicode characters, and generate a
# local unicode instance.
# This splits surrogate pairs if sizeof(Py_UNICODE) is 2 here (in gdb).
result = u''.join([_unichr(ucs) for ucs in Py_UNICODEs])
return result
def int_from_int(gdbval):
return int(str(gdbval))
def stringify(val):
# TODO: repr() puts everything on one line; pformat can be nicer, but
# can lead to v.long results; this function isolates the choice
if True:
return repr(val)
else:
from pprint import pformat
return pformat(val)
class PyObjectPtrPrinter:
"Prints a (PyObject*)"
def __init__ (self, gdbval):
self.gdbval = gdbval
def to_string (self):
pyop = PyObjectPtr.from_pyobject_ptr(self.gdbval)
if True:
return pyop.get_truncated_repr(MAX_OUTPUT_LEN)
else:
# Generate full proxy value then stringify it.
# Doing so could be expensive
proxyval = pyop.proxyval(set())
return stringify(proxyval)
def pretty_printer_lookup(gdbval):
type = gdbval.type.unqualified()
if type.code == gdb.TYPE_CODE_PTR:
type = type.target().unqualified()
t = str(type)
if t in ("PyObject", "PyFrameObject"):
return PyObjectPtrPrinter(gdbval)
"""
During development, I've been manually invoking the code in this way:
(gdb) python
import sys
sys.path.append('/home/david/coding/python-gdb')
import libpython
end
then reloading it after each edit like this:
(gdb) python reload(libpython)
The following code should ensure that the prettyprinter is registered
if the code is autoloaded by gdb when visiting libpython.so, provided
that this python file is installed to the same path as the library (or its
.debug file) plus a "-gdb.py" suffix, e.g:
/usr/lib/libpython2.6.so.1.0-gdb.py
/usr/lib/debug/usr/lib/libpython2.6.so.1.0.debug-gdb.py
"""
def register (obj):
if obj == None:
obj = gdb
# Wire up the pretty-printer
obj.pretty_printers.append(pretty_printer_lookup)
register (gdb.current_objfile ())
# Unfortunately, the exact API exposed by the gdb module varies somewhat
# from build to build
# See http://bugs.python.org/issue8279?#msg102276
class Frame(object):
'''
Wrapper for gdb.Frame, adding various methods
'''
def __init__(self, gdbframe):
self._gdbframe = gdbframe
def older(self):
older = self._gdbframe.older()
if older:
return Frame(older)
else:
return None
def newer(self):
newer = self._gdbframe.newer()
if newer:
return Frame(newer)
else:
return None
def select(self):
'''If supported, select this frame and return True; return False if unsupported
Not all builds have a gdb.Frame.select method; seems to be present on Fedora 12
onwards, but absent on Ubuntu buildbot'''
if not hasattr(self._gdbframe, 'select'):
print ('Unable to select frame: '
'this build of gdb does not expose a gdb.Frame.select method')
return False
self._gdbframe.select()
return True
def get_index(self):
'''Calculate index of frame, starting at 0 for the newest frame within
this thread'''
index = 0
# Go down until you reach the newest frame:
iter_frame = self
while iter_frame.newer():
index += 1
iter_frame = iter_frame.newer()
return index
def is_evalframeex(self):
'''Is this a PyEval_EvalFrameEx frame?'''
if self._gdbframe.name() == 'PyEval_EvalFrameEx':
'''
I believe we also need to filter on the inline
struct frame_id.inline_depth, only regarding frames with
an inline depth of 0 as actually being this function
So we reject those with type gdb.INLINE_FRAME
'''
if self._gdbframe.type() == gdb.NORMAL_FRAME:
# We have a PyEval_EvalFrameEx frame:
return True
return False
def get_pyop(self):
try:
f = self._gdbframe.read_var('f')
return PyFrameObjectPtr.from_pyobject_ptr(f)
except ValueError:
return None
@classmethod
def get_selected_frame(cls):
_gdbframe = gdb.selected_frame()
if _gdbframe:
return Frame(_gdbframe)
return None
@classmethod
def get_selected_python_frame(cls):
'''Try to obtain the Frame for the python code in the selected frame,
or None'''
frame = cls.get_selected_frame()
while frame:
if frame.is_evalframeex():
return frame
frame = frame.older()
# Not found:
return None
def print_summary(self):
if self.is_evalframeex():
pyop = self.get_pyop()
if pyop:
sys.stdout.write('#%i %s\n' % (self.get_index(), pyop.get_truncated_repr(MAX_OUTPUT_LEN)))
sys.stdout.write(pyop.current_line())
else:
sys.stdout.write('#%i (unable to read python frame information)\n' % self.get_index())
else:
sys.stdout.write('#%i\n' % self.get_index())
class PyList(gdb.Command):
'''List the current Python source code, if any
Use
py-list START
to list at a different line number within the python source.
Use
py-list START, END
to list a specific range of lines within the python source.
'''
def __init__(self):
gdb.Command.__init__ (self,
"py-list",
gdb.COMMAND_FILES,
gdb.COMPLETE_NONE)
def invoke(self, args, from_tty):
import re
start = None
end = None
m = re.match(r'\s*(\d+)\s*', args)
if m:
start = int(m.group(0))
end = start + 10
m = re.match(r'\s*(\d+)\s*,\s*(\d+)\s*', args)
if m:
start, end = map(int, m.groups())
frame = Frame.get_selected_python_frame()
if not frame:
print 'Unable to locate python frame'
return
pyop = frame.get_pyop()
if not pyop:
print 'Unable to read information on python frame'
return
filename = pyop.filename()
lineno = pyop.current_line_num()
if start is None:
start = lineno - 5
end = lineno + 5
if start<1:
start = 1
with open(filename, 'r') as f:
all_lines = f.readlines()
# start and end are 1-based, all_lines is 0-based;
# so [start-1:end] as a python slice gives us [start, end] as a
# closed interval
for i, line in enumerate(all_lines[start-1:end]):
linestr = str(i+start)
# Highlight current line:
if i + start == lineno:
linestr = '>' + linestr
sys.stdout.write('%4s %s' % (linestr, line))
# ...and register the command:
PyList()
def move_in_stack(move_up):
'''Move up or down the stack (for the py-up/py-down command)'''
frame = Frame.get_selected_python_frame()
while frame:
if move_up:
iter_frame = frame.older()
else:
iter_frame = frame.newer()
if not iter_frame:
break
if iter_frame.is_evalframeex():
# Result:
if iter_frame.select():
iter_frame.print_summary()
return
frame = iter_frame
if move_up:
print 'Unable to find an older python frame'
else:
print 'Unable to find a newer python frame'
class PyUp(gdb.Command):
'Select and print the python stack frame that called this one (if any)'
def __init__(self):
gdb.Command.__init__ (self,
"py-up",
gdb.COMMAND_STACK,
gdb.COMPLETE_NONE)
def invoke(self, args, from_tty):
move_in_stack(move_up=True)
class PyDown(gdb.Command):
'Select and print the python stack frame called by this one (if any)'
def __init__(self):
gdb.Command.__init__ (self,
"py-down",
gdb.COMMAND_STACK,
gdb.COMPLETE_NONE)
def invoke(self, args, from_tty):
move_in_stack(move_up=False)
# Not all builds of gdb have gdb.Frame.select
if hasattr(gdb.Frame, 'select'):
PyUp()
PyDown()
class PyBacktrace(gdb.Command):
'Display the current python frame and all the frames within its call stack (if any)'
def __init__(self):
gdb.Command.__init__ (self,
"py-bt",
gdb.COMMAND_STACK,
gdb.COMPLETE_NONE)
def invoke(self, args, from_tty):
frame = Frame.get_selected_python_frame()
while frame:
if frame.is_evalframeex():
frame.print_summary()
frame = frame.older()
PyBacktrace()
class PyPrint(gdb.Command):
'Look up the given python variable name, and print it'
def __init__(self):
gdb.Command.__init__ (self,
"py-print",
gdb.COMMAND_DATA,
gdb.COMPLETE_NONE)
def invoke(self, args, from_tty):
name = str(args)
frame = Frame.get_selected_python_frame()
if not frame:
print 'Unable to locate python frame'
return
pyop_frame = frame.get_pyop()
if not pyop_frame:
print 'Unable to read information on python frame'
return
pyop_var, scope = pyop_frame.get_var_by_name(name)
if pyop_var:
print ('%s %r = %s'
% (scope,
name,
pyop_var.get_truncated_repr(MAX_OUTPUT_LEN)))
else:
print '%r not found' % name
PyPrint()
class PyLocals(gdb.Command):
'Look up the given python variable name, and print it'
def __init__(self):
gdb.Command.__init__ (self,
"py-locals",
gdb.COMMAND_DATA,
gdb.COMPLETE_NONE)
def invoke(self, args, from_tty):
name = str(args)
frame = Frame.get_selected_python_frame()
if not frame:
print 'Unable to locate python frame'
return
pyop_frame = frame.get_pyop()
if not pyop_frame:
print 'Unable to read information on python frame'
return
for pyop_name, pyop_value in pyop_frame.iter_locals():
print ('%s = %s'
% (pyop_name.proxyval(set()),
pyop_value.get_truncated_repr(MAX_OUTPUT_LEN)))
PyLocals()
|
yanheven/glance | refs/heads/master | glance/tests/functional/v2/test_metadef_objects.py | 5 | # Copyright (c) 2014 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import uuid
from oslo.serialization import jsonutils
import requests
from glance.tests import functional
TENANT1 = str(uuid.uuid4())
class TestMetadefObjects(functional.FunctionalTest):
def setUp(self):
super(TestMetadefObjects, self).setUp()
self.cleanup()
self.api_server.deployment_flavor = 'noauth'
self.start_servers(**self.__dict__.copy())
def _url(self, path):
return 'http://127.0.0.1:%d%s' % (self.api_port, path)
def _headers(self, custom_headers=None):
base_headers = {
'X-Identity-Status': 'Confirmed',
'X-Auth-Token': '932c5c84-02ac-4fe5-a9ba-620af0e2bb96',
'X-User-Id': 'f9a41d13-0c13-47e9-bee2-ce4e8bfe958e',
'X-Tenant-Id': TENANT1,
'X-Roles': 'admin',
}
base_headers.update(custom_headers or {})
return base_headers
def test_metadata_objects_lifecycle(self):
# Namespace should not exist
path = self._url('/v2/metadefs/namespaces/MyNamespace')
response = requests.get(path, headers=self._headers())
self.assertEqual(404, response.status_code)
# Create a namespace
path = self._url('/v2/metadefs/namespaces')
headers = self._headers({'content-type': 'application/json'})
namespace_name = 'MyNamespace'
data = jsonutils.dumps({
"namespace": namespace_name,
"display_name": "My User Friendly Namespace",
"description": "My description",
"visibility": "public",
"protected": False,
"owner": "The Test Owner"
}
)
response = requests.post(path, headers=headers, data=data)
self.assertEqual(201, response.status_code)
# Metadata objects should not exist
path = self._url('/v2/metadefs/namespaces/MyNamespace/objects/object1')
response = requests.get(path, headers=self._headers())
self.assertEqual(404, response.status_code)
# Create a object
path = self._url('/v2/metadefs/namespaces/MyNamespace/objects')
headers = self._headers({'content-type': 'application/json'})
metadata_object_name = "object1"
data = jsonutils.dumps(
{
"name": metadata_object_name,
"description": "object1 description.",
"required": [
"property1"
],
"properties": {
"property1": {
"type": "integer",
"title": "property1",
"description": "property1 description",
"operators": ["<all-in>"],
"default": 100,
"minimum": 100,
"maximum": 30000369
},
"property2": {
"type": "string",
"title": "property2",
"description": "property2 description ",
"default": "value2",
"minLength": 2,
"maxLength": 50
}
}
}
)
response = requests.post(path, headers=headers, data=data)
self.assertEqual(201, response.status_code)
# Get the metadata object created above
path = self._url('/v2/metadefs/namespaces/%s/objects/%s' %
(namespace_name, metadata_object_name))
response = requests.get(path,
headers=self._headers())
self.assertEqual(200, response.status_code)
metadata_object = jsonutils.loads(response.text)
self.assertEqual("object1", metadata_object['name'])
# Returned object should match the created object
metadata_object = jsonutils.loads(response.text)
checked_keys = set([
u'name',
u'description',
u'properties',
u'required',
u'self',
u'schema',
u'created_at',
u'updated_at'
])
self.assertEqual(set(metadata_object.keys()), checked_keys)
expected_metadata_object = {
"name": metadata_object_name,
"description": "object1 description.",
"required": [
"property1"
],
"properties": {
'property1': {
'type': 'integer',
"title": "property1",
'description': 'property1 description',
'operators': ['<all-in>'],
'default': 100,
'minimum': 100,
'maximum': 30000369
},
"property2": {
"type": "string",
"title": "property2",
"description": "property2 description ",
"default": "value2",
"minLength": 2,
"maxLength": 50
}
},
"self": "/v2/metadefs/namespaces/%("
"namespace)s/objects/%(object)s" %
{'namespace': namespace_name,
'object': metadata_object_name},
"schema": "v2/schemas/metadefs/object"
}
# Simple key values
checked_values = set([
u'name',
u'description',
])
for key, value in expected_metadata_object.items():
if(key in checked_values):
self.assertEqual(metadata_object[key], value, key)
# Complex key values - properties
for key, value in \
expected_metadata_object["properties"]['property2'].items():
self.assertEqual(
metadata_object["properties"]["property2"][key],
value, key
)
# The metadata_object should be mutable
path = self._url('/v2/metadefs/namespaces/%s/objects/%s' %
(namespace_name, metadata_object_name))
media_type = 'application/json'
headers = self._headers({'content-type': media_type})
metadata_object_name = "object1-UPDATED"
data = jsonutils.dumps(
{
"name": metadata_object_name,
"description": "desc-UPDATED",
"required": [
"property2"
],
"properties": {
'property1': {
'type': 'integer',
"title": "property1",
'description': 'p1 desc-UPDATED',
'default': 500,
'minimum': 500,
'maximum': 1369
},
"property2": {
"type": "string",
"title": "property2",
"description": "p2 desc-UPDATED",
'operators': ['<or>'],
"default": "value2-UPDATED",
"minLength": 5,
"maxLength": 150
}
}
}
)
response = requests.put(path, headers=headers, data=data)
self.assertEqual(200, response.status_code, response.text)
# Returned metadata_object should reflect the changes
metadata_object = jsonutils.loads(response.text)
self.assertEqual('object1-UPDATED', metadata_object['name'])
self.assertEqual('desc-UPDATED', metadata_object['description'])
self.assertEqual('property2', metadata_object['required'][0])
updated_property1 = metadata_object['properties']['property1']
updated_property2 = metadata_object['properties']['property2']
self.assertEqual('integer', updated_property1['type'])
self.assertEqual('p1 desc-UPDATED', updated_property1['description'])
self.assertEqual('500', updated_property1['default'])
self.assertEqual(500, updated_property1['minimum'])
self.assertEqual(1369, updated_property1['maximum'])
self.assertEqual(['<or>'], updated_property2['operators'])
self.assertEqual('string', updated_property2['type'])
self.assertEqual('p2 desc-UPDATED', updated_property2['description'])
self.assertEqual('value2-UPDATED', updated_property2['default'])
self.assertEqual(5, updated_property2['minLength'])
self.assertEqual(150, updated_property2['maxLength'])
# Updates should persist across requests
path = self._url('/v2/metadefs/namespaces/%s/objects/%s' %
(namespace_name, metadata_object_name))
response = requests.get(path, headers=self._headers())
self.assertEqual(200, response.status_code)
self.assertEqual('object1-UPDATED', metadata_object['name'])
self.assertEqual('desc-UPDATED', metadata_object['description'])
self.assertEqual('property2', metadata_object['required'][0])
updated_property1 = metadata_object['properties']['property1']
updated_property2 = metadata_object['properties']['property2']
self.assertEqual('integer', updated_property1['type'])
self.assertEqual('p1 desc-UPDATED', updated_property1['description'])
self.assertEqual('500', updated_property1['default'])
self.assertEqual(500, updated_property1['minimum'])
self.assertEqual(1369, updated_property1['maximum'])
self.assertEqual(['<or>'], updated_property2['operators'])
self.assertEqual('string', updated_property2['type'])
self.assertEqual('p2 desc-UPDATED', updated_property2['description'])
self.assertEqual('value2-UPDATED', updated_property2['default'])
self.assertEqual(5, updated_property2['minLength'])
self.assertEqual(150, updated_property2['maxLength'])
# Deletion of metadata_object object1
path = self._url('/v2/metadefs/namespaces/%s/objects/%s' %
(namespace_name, metadata_object_name))
response = requests.delete(path, headers=self._headers())
self.assertEqual(204, response.status_code)
# metadata_object object1 should not exist
path = self._url('/v2/metadefs/namespaces/%s/objects/%s' %
(namespace_name, metadata_object_name))
response = requests.get(path, headers=self._headers())
self.assertEqual(404, response.status_code)
|
MounirMesselmeni/django | refs/heads/master | tests/template_tests/filter_tests/test_capfirst.py | 521 | from django.template.defaultfilters import capfirst
from django.test import SimpleTestCase
from django.utils.safestring import mark_safe
from ..utils import setup
class CapfirstTests(SimpleTestCase):
@setup({'capfirst01': '{% autoescape off %}{{ a|capfirst }} {{ b|capfirst }}{% endautoescape %}'})
def test_capfirst01(self):
output = self.engine.render_to_string('capfirst01', {'a': 'fred>', 'b': mark_safe('fred>')})
self.assertEqual(output, 'Fred> Fred>')
@setup({'capfirst02': '{{ a|capfirst }} {{ b|capfirst }}'})
def test_capfirst02(self):
output = self.engine.render_to_string('capfirst02', {'a': 'fred>', 'b': mark_safe('fred>')})
self.assertEqual(output, 'Fred> Fred>')
class FunctionTests(SimpleTestCase):
def test_capfirst(self):
self.assertEqual(capfirst('hello world'), 'Hello world')
|
andybak/hendrix | refs/heads/master | examples/django_hx_chatserver/example_app/example_app/settings.py | 4 | """
Django settings for example_app project.
For more information on this file, see
https://docs.djangoproject.com/en/1.7/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.7/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
BASE_DIR = os.path.dirname(os.path.dirname(__file__))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.7/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'do not use this app straight out of the box!!!!'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
TEMPLATE_DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
)
INSTALLED_APPS += (
'chat', # your app goes here.
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
ROOT_URLCONF = 'example_app.urls'
WSGI_APPLICATION = 'example_app.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.7/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Internationalization
# https://docs.djangoproject.com/en/1.7/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'US/Pacific'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.7/howto/static-files/
STATIC_URL = '/static/'
# THIS IS THE ONLY THING THAT NEEDS TO BE ADDED TO SETTINGS
STATIC_ROOT = BASE_DIR + '/servedstatics'
HENDRIX_CHILD_RESOURCES = (
'hendrix.contrib.async.resources.MessageResource',
'hendrix.contrib.resources.static.DjangoStaticsFinder'
)
|
romain-li/edx-platform | refs/heads/master | openedx/core/djangoapps/embargo/views.py | 10 | """Views served by the embargo app. """
from django.http import Http404
from django.views.generic.base import View
from edxmako.shortcuts import render_to_response
from . import messages
class CourseAccessMessageView(View):
"""Show a message explaining that the user was blocked from a course. """
ENROLLMENT_ACCESS_POINT = 'enrollment'
COURSEWARE_ACCESS_POINT = 'courseware'
def get(self, request, access_point=None, message_key=None):
"""Show a message explaining that the user was blocked.
Arguments:
request (HttpRequest)
Keyword Arguments:
access_point (str): Either 'enrollment' or 'courseware',
indicating how the user is trying to access the restricted
content.
message_key (str): An identifier for which message to show.
See `embargo.messages` for more information.
Returns:
HttpResponse
Raises:
Http404: If no message is configured for the specified message key.
"""
blocked_message = self._message(access_point, message_key)
if blocked_message is None:
raise Http404
return render_to_response(blocked_message.template, {})
def _message(self, access_point, message_key):
"""Retrieve message information.
Arguments:
access_point (str): Either 'enrollment' or 'courseware'
message_key (str): The identifier for which message to show.
Returns:
embargo.messages.BlockedMessage or None
"""
message_dict = dict()
# The access point determines which set of messages to use.
# This allows us to show different messages to students who
# are enrolling in a course than we show to students
# who are enrolled and accessing courseware.
if access_point == self.ENROLLMENT_ACCESS_POINT:
message_dict = messages.ENROLL_MESSAGES
elif access_point == self.COURSEWARE_ACCESS_POINT:
message_dict = messages.COURSEWARE_MESSAGES
return message_dict.get(message_key)
|
timkrentz/SunTracker | refs/heads/master | IMU/VTK-6.2.0/ThirdParty/Twisted/twisted/protocols/memcache.py | 2 | # -*- test-case-name: twisted.test.test_memcache -*-
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Memcache client protocol. Memcached is a caching server, storing data in the
form of pairs key/value, and memcache is the protocol to talk with it.
To connect to a server, create a factory for L{MemCacheProtocol}::
from twisted.internet import reactor, protocol
from twisted.protocols.memcache import MemCacheProtocol, DEFAULT_PORT
d = protocol.ClientCreator(reactor, MemCacheProtocol
).connectTCP("localhost", DEFAULT_PORT)
def doSomething(proto):
# Here you call the memcache operations
return proto.set("mykey", "a lot of data")
d.addCallback(doSomething)
reactor.run()
All the operations of the memcache protocol are present, but
L{MemCacheProtocol.set} and L{MemCacheProtocol.get} are the more important.
See U{http://code.sixapart.com/svn/memcached/trunk/server/doc/protocol.txt} for
more information about the protocol.
"""
from collections import deque
from twisted.protocols.basic import LineReceiver
from twisted.protocols.policies import TimeoutMixin
from twisted.internet.defer import Deferred, fail, TimeoutError
from twisted.python import log
DEFAULT_PORT = 11211
class NoSuchCommand(Exception):
"""
Exception raised when a non existent command is called.
"""
class ClientError(Exception):
"""
Error caused by an invalid client call.
"""
class ServerError(Exception):
"""
Problem happening on the server.
"""
class Command(object):
"""
Wrap a client action into an object, that holds the values used in the
protocol.
@ivar _deferred: the L{Deferred} object that will be fired when the result
arrives.
@type _deferred: L{Deferred}
@ivar command: name of the command sent to the server.
@type command: C{str}
"""
def __init__(self, command, **kwargs):
"""
Create a command.
@param command: the name of the command.
@type command: C{str}
@param kwargs: this values will be stored as attributes of the object
for future use
"""
self.command = command
self._deferred = Deferred()
for k, v in kwargs.items():
setattr(self, k, v)
def success(self, value):
"""
Shortcut method to fire the underlying deferred.
"""
self._deferred.callback(value)
def fail(self, error):
"""
Make the underlying deferred fails.
"""
self._deferred.errback(error)
class MemCacheProtocol(LineReceiver, TimeoutMixin):
"""
MemCache protocol: connect to a memcached server to store/retrieve values.
@ivar persistentTimeOut: the timeout period used to wait for a response.
@type persistentTimeOut: C{int}
@ivar _current: current list of requests waiting for an answer from the
server.
@type _current: C{deque} of L{Command}
@ivar _lenExpected: amount of data expected in raw mode, when reading for
a value.
@type _lenExpected: C{int}
@ivar _getBuffer: current buffer of data, used to store temporary data
when reading in raw mode.
@type _getBuffer: C{list}
@ivar _bufferLength: the total amount of bytes in C{_getBuffer}.
@type _bufferLength: C{int}
@ivar _disconnected: indicate if the connectionLost has been called or not.
@type _disconnected: C{bool}
"""
MAX_KEY_LENGTH = 250
_disconnected = False
def __init__(self, timeOut=60):
"""
Create the protocol.
@param timeOut: the timeout to wait before detecting that the
connection is dead and close it. It's expressed in seconds.
@type timeOut: C{int}
"""
self._current = deque()
self._lenExpected = None
self._getBuffer = None
self._bufferLength = None
self.persistentTimeOut = self.timeOut = timeOut
def _cancelCommands(self, reason):
"""
Cancel all the outstanding commands, making them fail with C{reason}.
"""
while self._current:
cmd = self._current.popleft()
cmd.fail(reason)
def timeoutConnection(self):
"""
Close the connection in case of timeout.
"""
self._cancelCommands(TimeoutError("Connection timeout"))
self.transport.loseConnection()
def connectionLost(self, reason):
"""
Cause any outstanding commands to fail.
"""
self._disconnected = True
self._cancelCommands(reason)
LineReceiver.connectionLost(self, reason)
def sendLine(self, line):
"""
Override sendLine to add a timeout to response.
"""
if not self._current:
self.setTimeout(self.persistentTimeOut)
LineReceiver.sendLine(self, line)
def rawDataReceived(self, data):
"""
Collect data for a get.
"""
self.resetTimeout()
self._getBuffer.append(data)
self._bufferLength += len(data)
if self._bufferLength >= self._lenExpected + 2:
data = "".join(self._getBuffer)
buf = data[:self._lenExpected]
rem = data[self._lenExpected + 2:]
val = buf
self._lenExpected = None
self._getBuffer = None
self._bufferLength = None
cmd = self._current[0]
if cmd.multiple:
flags, cas = cmd.values[cmd.currentKey]
cmd.values[cmd.currentKey] = (flags, cas, val)
else:
cmd.value = val
self.setLineMode(rem)
def cmd_STORED(self):
"""
Manage a success response to a set operation.
"""
self._current.popleft().success(True)
def cmd_NOT_STORED(self):
"""
Manage a specific 'not stored' response to a set operation: this is not
an error, but some condition wasn't met.
"""
self._current.popleft().success(False)
def cmd_END(self):
"""
This the end token to a get or a stat operation.
"""
cmd = self._current.popleft()
if cmd.command == "get":
if cmd.multiple:
values = dict([(key, val[::2]) for key, val in
cmd.values.iteritems()])
cmd.success(values)
else:
cmd.success((cmd.flags, cmd.value))
elif cmd.command == "gets":
if cmd.multiple:
cmd.success(cmd.values)
else:
cmd.success((cmd.flags, cmd.cas, cmd.value))
elif cmd.command == "stats":
cmd.success(cmd.values)
def cmd_NOT_FOUND(self):
"""
Manage error response for incr/decr/delete.
"""
self._current.popleft().success(False)
def cmd_VALUE(self, line):
"""
Prepare the reading a value after a get.
"""
cmd = self._current[0]
if cmd.command == "get":
key, flags, length = line.split()
cas = ""
else:
key, flags, length, cas = line.split()
self._lenExpected = int(length)
self._getBuffer = []
self._bufferLength = 0
if cmd.multiple:
if key not in cmd.keys:
raise RuntimeError("Unexpected commands answer.")
cmd.currentKey = key
cmd.values[key] = [int(flags), cas]
else:
if cmd.key != key:
raise RuntimeError("Unexpected commands answer.")
cmd.flags = int(flags)
cmd.cas = cas
self.setRawMode()
def cmd_STAT(self, line):
"""
Reception of one stat line.
"""
cmd = self._current[0]
key, val = line.split(" ", 1)
cmd.values[key] = val
def cmd_VERSION(self, versionData):
"""
Read version token.
"""
self._current.popleft().success(versionData)
def cmd_ERROR(self):
"""
An non-existent command has been sent.
"""
log.err("Non-existent command sent.")
cmd = self._current.popleft()
cmd.fail(NoSuchCommand())
def cmd_CLIENT_ERROR(self, errText):
"""
An invalid input as been sent.
"""
log.err("Invalid input: %s" % (errText,))
cmd = self._current.popleft()
cmd.fail(ClientError(errText))
def cmd_SERVER_ERROR(self, errText):
"""
An error has happened server-side.
"""
log.err("Server error: %s" % (errText,))
cmd = self._current.popleft()
cmd.fail(ServerError(errText))
def cmd_DELETED(self):
"""
A delete command has completed successfully.
"""
self._current.popleft().success(True)
def cmd_OK(self):
"""
The last command has been completed.
"""
self._current.popleft().success(True)
def cmd_EXISTS(self):
"""
A C{checkAndSet} update has failed.
"""
self._current.popleft().success(False)
def lineReceived(self, line):
"""
Receive line commands from the server.
"""
self.resetTimeout()
token = line.split(" ", 1)[0]
# First manage standard commands without space
cmd = getattr(self, "cmd_%s" % (token,), None)
if cmd is not None:
args = line.split(" ", 1)[1:]
if args:
cmd(args[0])
else:
cmd()
else:
# Then manage commands with space in it
line = line.replace(" ", "_")
cmd = getattr(self, "cmd_%s" % (line,), None)
if cmd is not None:
cmd()
else:
# Increment/Decrement response
cmd = self._current.popleft()
val = int(line)
cmd.success(val)
if not self._current:
# No pending request, remove timeout
self.setTimeout(None)
def increment(self, key, val=1):
"""
Increment the value of C{key} by given value (default to 1).
C{key} must be consistent with an int. Return the new value.
@param key: the key to modify.
@type key: C{str}
@param val: the value to increment.
@type val: C{int}
@return: a deferred with will be called back with the new value
associated with the key (after the increment).
@rtype: L{Deferred}
"""
return self._incrdecr("incr", key, val)
def decrement(self, key, val=1):
"""
Decrement the value of C{key} by given value (default to 1).
C{key} must be consistent with an int. Return the new value, coerced to
0 if negative.
@param key: the key to modify.
@type key: C{str}
@param val: the value to decrement.
@type val: C{int}
@return: a deferred with will be called back with the new value
associated with the key (after the decrement).
@rtype: L{Deferred}
"""
return self._incrdecr("decr", key, val)
def _incrdecr(self, cmd, key, val):
"""
Internal wrapper for incr/decr.
"""
if self._disconnected:
return fail(RuntimeError("not connected"))
if not isinstance(key, str):
return fail(ClientError(
"Invalid type for key: %s, expecting a string" % (type(key),)))
if len(key) > self.MAX_KEY_LENGTH:
return fail(ClientError("Key too long"))
fullcmd = "%s %s %d" % (cmd, key, int(val))
self.sendLine(fullcmd)
cmdObj = Command(cmd, key=key)
self._current.append(cmdObj)
return cmdObj._deferred
def replace(self, key, val, flags=0, expireTime=0):
"""
Replace the given C{key}. It must already exist in the server.
@param key: the key to replace.
@type key: C{str}
@param val: the new value associated with the key.
@type val: C{str}
@param flags: the flags to store with the key.
@type flags: C{int}
@param expireTime: if different from 0, the relative time in seconds
when the key will be deleted from the store.
@type expireTime: C{int}
@return: a deferred that will fire with C{True} if the operation has
succeeded, and C{False} with the key didn't previously exist.
@rtype: L{Deferred}
"""
return self._set("replace", key, val, flags, expireTime, "")
def add(self, key, val, flags=0, expireTime=0):
"""
Add the given C{key}. It must not exist in the server.
@param key: the key to add.
@type key: C{str}
@param val: the value associated with the key.
@type val: C{str}
@param flags: the flags to store with the key.
@type flags: C{int}
@param expireTime: if different from 0, the relative time in seconds
when the key will be deleted from the store.
@type expireTime: C{int}
@return: a deferred that will fire with C{True} if the operation has
succeeded, and C{False} with the key already exists.
@rtype: L{Deferred}
"""
return self._set("add", key, val, flags, expireTime, "")
def set(self, key, val, flags=0, expireTime=0):
"""
Set the given C{key}.
@param key: the key to set.
@type key: C{str}
@param val: the value associated with the key.
@type val: C{str}
@param flags: the flags to store with the key.
@type flags: C{int}
@param expireTime: if different from 0, the relative time in seconds
when the key will be deleted from the store.
@type expireTime: C{int}
@return: a deferred that will fire with C{True} if the operation has
succeeded.
@rtype: L{Deferred}
"""
return self._set("set", key, val, flags, expireTime, "")
def checkAndSet(self, key, val, cas, flags=0, expireTime=0):
"""
Change the content of C{key} only if the C{cas} value matches the
current one associated with the key. Use this to store a value which
hasn't been modified since last time you fetched it.
@param key: The key to set.
@type key: C{str}
@param val: The value associated with the key.
@type val: C{str}
@param cas: Unique 64-bit value returned by previous call of C{get}.
@type cas: C{str}
@param flags: The flags to store with the key.
@type flags: C{int}
@param expireTime: If different from 0, the relative time in seconds
when the key will be deleted from the store.
@type expireTime: C{int}
@return: A deferred that will fire with C{True} if the operation has
succeeded, C{False} otherwise.
@rtype: L{Deferred}
"""
return self._set("cas", key, val, flags, expireTime, cas)
def _set(self, cmd, key, val, flags, expireTime, cas):
"""
Internal wrapper for setting values.
"""
if self._disconnected:
return fail(RuntimeError("not connected"))
if not isinstance(key, str):
return fail(ClientError(
"Invalid type for key: %s, expecting a string" % (type(key),)))
if len(key) > self.MAX_KEY_LENGTH:
return fail(ClientError("Key too long"))
if not isinstance(val, str):
return fail(ClientError(
"Invalid type for value: %s, expecting a string" %
(type(val),)))
if cas:
cas = " " + cas
length = len(val)
fullcmd = "%s %s %d %d %d%s" % (
cmd, key, flags, expireTime, length, cas)
self.sendLine(fullcmd)
self.sendLine(val)
cmdObj = Command(cmd, key=key, flags=flags, length=length)
self._current.append(cmdObj)
return cmdObj._deferred
def append(self, key, val):
"""
Append given data to the value of an existing key.
@param key: The key to modify.
@type key: C{str}
@param val: The value to append to the current value associated with
the key.
@type val: C{str}
@return: A deferred that will fire with C{True} if the operation has
succeeded, C{False} otherwise.
@rtype: L{Deferred}
"""
# Even if flags and expTime values are ignored, we have to pass them
return self._set("append", key, val, 0, 0, "")
def prepend(self, key, val):
"""
Prepend given data to the value of an existing key.
@param key: The key to modify.
@type key: C{str}
@param val: The value to prepend to the current value associated with
the key.
@type val: C{str}
@return: A deferred that will fire with C{True} if the operation has
succeeded, C{False} otherwise.
@rtype: L{Deferred}
"""
# Even if flags and expTime values are ignored, we have to pass them
return self._set("prepend", key, val, 0, 0, "")
def get(self, key, withIdentifier=False):
"""
Get the given C{key}. It doesn't support multiple keys. If
C{withIdentifier} is set to C{True}, the command issued is a C{gets},
that will return the current identifier associated with the value. This
identifier has to be used when issuing C{checkAndSet} update later,
using the corresponding method.
@param key: The key to retrieve.
@type key: C{str}
@param withIdentifier: If set to C{True}, retrieve the current
identifier along with the value and the flags.
@type withIdentifier: C{bool}
@return: A deferred that will fire with the tuple (flags, value) if
C{withIdentifier} is C{False}, or (flags, cas identifier, value)
if C{True}. If the server indicates there is no value
associated with C{key}, the returned value will be C{None} and
the returned flags will be C{0}.
@rtype: L{Deferred}
"""
return self._get([key], withIdentifier, False)
def getMultiple(self, keys, withIdentifier=False):
"""
Get the given list of C{keys}. If C{withIdentifier} is set to C{True},
the command issued is a C{gets}, that will return the identifiers
associated with each values. This identifier has to be used when
issuing C{checkAndSet} update later, using the corresponding method.
@param keys: The keys to retrieve.
@type keys: C{list} of C{str}
@param withIdentifier: If set to C{True}, retrieve the identifiers
along with the values and the flags.
@type withIdentifier: C{bool}
@return: A deferred that will fire with a dictionary with the elements
of C{keys} as keys and the tuples (flags, value) as values if
C{withIdentifier} is C{False}, or (flags, cas identifier, value) if
C{True}. If the server indicates there is no value associated with
C{key}, the returned values will be C{None} and the returned flags
will be C{0}.
@rtype: L{Deferred}
@since: 9.0
"""
return self._get(keys, withIdentifier, True)
def _get(self, keys, withIdentifier, multiple):
"""
Helper method for C{get} and C{getMultiple}.
"""
if self._disconnected:
return fail(RuntimeError("not connected"))
for key in keys:
if not isinstance(key, str):
return fail(ClientError(
"Invalid type for key: %s, expecting a string" % (type(key),)))
if len(key) > self.MAX_KEY_LENGTH:
return fail(ClientError("Key too long"))
if withIdentifier:
cmd = "gets"
else:
cmd = "get"
fullcmd = "%s %s" % (cmd, " ".join(keys))
self.sendLine(fullcmd)
if multiple:
values = dict([(key, (0, "", None)) for key in keys])
cmdObj = Command(cmd, keys=keys, values=values, multiple=True)
else:
cmdObj = Command(cmd, key=keys[0], value=None, flags=0, cas="",
multiple=False)
self._current.append(cmdObj)
return cmdObj._deferred
def stats(self, arg=None):
"""
Get some stats from the server. It will be available as a dict.
@param arg: An optional additional string which will be sent along
with the I{stats} command. The interpretation of this value by
the server is left undefined by the memcache protocol
specification.
@type arg: L{NoneType} or L{str}
@return: a deferred that will fire with a C{dict} of the available
statistics.
@rtype: L{Deferred}
"""
if arg:
cmd = "stats " + arg
else:
cmd = "stats"
if self._disconnected:
return fail(RuntimeError("not connected"))
self.sendLine(cmd)
cmdObj = Command("stats", values={})
self._current.append(cmdObj)
return cmdObj._deferred
def version(self):
"""
Get the version of the server.
@return: a deferred that will fire with the string value of the
version.
@rtype: L{Deferred}
"""
if self._disconnected:
return fail(RuntimeError("not connected"))
self.sendLine("version")
cmdObj = Command("version")
self._current.append(cmdObj)
return cmdObj._deferred
def delete(self, key):
"""
Delete an existing C{key}.
@param key: the key to delete.
@type key: C{str}
@return: a deferred that will be called back with C{True} if the key
was successfully deleted, or C{False} if not.
@rtype: L{Deferred}
"""
if self._disconnected:
return fail(RuntimeError("not connected"))
if not isinstance(key, str):
return fail(ClientError(
"Invalid type for key: %s, expecting a string" % (type(key),)))
self.sendLine("delete %s" % key)
cmdObj = Command("delete", key=key)
self._current.append(cmdObj)
return cmdObj._deferred
def flushAll(self):
"""
Flush all cached values.
@return: a deferred that will be called back with C{True} when the
operation has succeeded.
@rtype: L{Deferred}
"""
if self._disconnected:
return fail(RuntimeError("not connected"))
self.sendLine("flush_all")
cmdObj = Command("flush_all")
self._current.append(cmdObj)
return cmdObj._deferred
__all__ = ["MemCacheProtocol", "DEFAULT_PORT", "NoSuchCommand", "ClientError",
"ServerError"]
|
EricMuller/mywebmarks-backend | refs/heads/master | requirements/twisted/Twisted-17.1.0/build/lib.linux-x86_64-3.5/twisted/application/runner/test/test_runner.py | 11 | # Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Tests for L{twisted.application.runner._runner}.
"""
from signal import SIGTERM
from io import BytesIO
import errno
from twisted.logger import (
LogLevel, LogPublisher, LogBeginner,
FileLogObserver, FilteringLogObserver, LogLevelFilterPredicate,
)
from twisted.test.proto_helpers import MemoryReactor
from ...runner import _runner
from .._exit import ExitStatus
from .._pidfile import PIDFile, NonePIDFile
from .._runner import Runner, RunnerOptions
from .test_pidfile import DummyFilePath
import twisted.trial.unittest
class RunnerTests(twisted.trial.unittest.TestCase):
"""
Tests for L{Runner}.
"""
def setUp(self):
# Patch exit and kill so we can capture usage and prevent actual exits
# and kills.
self.exit = DummyExit()
self.kill = DummyKill()
self.patch(_runner, "exit", self.exit)
self.patch(_runner, "kill", self.kill)
# Patch getpid so we get a known result
self.pid = 1337
self.pidFileContent = u"{}\n".format(self.pid).encode("utf-8")
# Patch globalLogBeginner so that we aren't trying to install multiple
# global log observers.
self.stdout = BytesIO()
self.stderr = BytesIO()
self.stdio = DummyStandardIO(self.stdout, self.stderr)
self.warnings = DummyWarningsModule()
self.globalLogPublisher = LogPublisher()
self.globalLogBeginner = LogBeginner(
self.globalLogPublisher,
self.stdio.stderr, self.stdio,
self.warnings,
)
self.patch(_runner, "stderr", self.stderr)
self.patch(_runner, "globalLogBeginner", self.globalLogBeginner)
def test_runInOrder(self):
"""
L{Runner.run} calls the expected methods in order.
"""
runner = DummyRunner({})
runner.run()
self.assertEqual(
runner.calledMethods,
[
"killIfRequested",
"startLogging",
"startReactor",
"reactorExited",
]
)
def test_runUsesPIDFile(self):
"""
L{Runner.run} uses the provided PID file.
"""
pidFile = DummyPIDFile()
runner = DummyRunner({RunnerOptions.pidFile: pidFile})
self.assertFalse(pidFile.entered)
self.assertFalse(pidFile.exited)
runner.run()
self.assertTrue(pidFile.entered)
self.assertTrue(pidFile.exited)
def test_runAlreadyRunning(self):
"""
L{Runner.run} exits with L{ExitStatus.EX_USAGE} and the expected
message if a process is already running that corresponds to the given
PID file.
"""
pidFile = PIDFile(DummyFilePath(self.pidFileContent))
pidFile.isRunning = lambda: True
runner = DummyRunner({RunnerOptions.pidFile: pidFile})
runner.run()
self.assertEqual(self.exit.status, ExitStatus.EX_CONFIG)
self.assertEqual(self.exit.message, "Already running.")
def test_killNotRequested(self):
"""
L{Runner.killIfRequested} without L{RunnerOptions.kill} doesn't exit
and doesn't indiscriminately murder anyone.
"""
runner = Runner({})
runner.killIfRequested()
self.assertEqual(self.kill.calls, [])
self.assertFalse(self.exit.exited)
def test_killRequestedWithoutPIDFile(self):
"""
L{Runner.killIfRequested} with L{RunnerOptions.kill} but without
L{RunnerOptions.pidFile}, exits with L{ExitStatus.EX_USAGE} and
the expected message, and also doesn't indiscriminately murder anyone.
"""
runner = Runner({RunnerOptions.kill: True})
runner.killIfRequested()
self.assertEqual(self.kill.calls, [])
self.assertEqual(self.exit.status, ExitStatus.EX_USAGE)
self.assertEqual(self.exit.message, "No PID file specified.")
def test_killRequestedWithPIDFile(self):
"""
L{Runner.killIfRequested} with both L{RunnerOptions.kill} and
L{RunnerOptions.pidFile} performs a targeted killing of the
appropriate process.
"""
pidFile = PIDFile(DummyFilePath(self.pidFileContent))
runner = Runner({
RunnerOptions.kill: True,
RunnerOptions.pidFile: pidFile,
})
runner.killIfRequested()
self.assertEqual(self.kill.calls, [(self.pid, SIGTERM)])
self.assertEqual(self.exit.status, ExitStatus.EX_OK)
self.assertIdentical(self.exit.message, None)
def test_killRequestedWithPIDFileCantRead(self):
"""
L{Runner.killIfRequested} with both L{RunnerOptions.kill} and a
L{RunnerOptions.pidFile} that it can't read exits with
L{ExitStatus.EX_IOERR}.
"""
pidFile = PIDFile(DummyFilePath(None))
def read():
raise OSError(errno.EACCES, "Permission denied")
pidFile.read = read
runner = Runner({
RunnerOptions.kill: True,
RunnerOptions.pidFile: pidFile,
})
runner.killIfRequested()
self.assertEqual(self.exit.status, ExitStatus.EX_IOERR)
self.assertEqual(self.exit.message, "Unable to read PID file.")
def test_killRequestedWithPIDFileEmpty(self):
"""
L{Runner.killIfRequested} with both L{RunnerOptions.kill} and a
L{RunnerOptions.pidFile} containing no value exits with
L{ExitStatus.EX_DATAERR}.
"""
pidFile = PIDFile(DummyFilePath(b""))
runner = Runner({
RunnerOptions.kill: True,
RunnerOptions.pidFile: pidFile,
})
runner.killIfRequested()
self.assertEqual(self.exit.status, ExitStatus.EX_DATAERR)
self.assertEqual(self.exit.message, "Invalid PID file.")
def test_killRequestedWithPIDFileNotAnInt(self):
"""
L{Runner.killIfRequested} with both L{RunnerOptions.kill} and a
L{RunnerOptions.pidFile} containing a non-integer value exits with
L{ExitStatus.EX_DATAERR}.
"""
pidFile = PIDFile(DummyFilePath(b"** totally not a number, dude **"))
runner = Runner({
RunnerOptions.kill: True,
RunnerOptions.pidFile: pidFile,
})
runner.killIfRequested()
self.assertEqual(self.exit.status, ExitStatus.EX_DATAERR)
self.assertEqual(self.exit.message, "Invalid PID file.")
def test_startLogging(self):
"""
L{Runner.startLogging} sets up a filtering observer with a log level
predicate set to the given log level that contains a file observer of
the given type which writes to the given file.
"""
logFile = object()
# Patch the log beginner so that we don't try to start the already
# running (started by trial) logging system.
class LogBeginner(object):
def beginLoggingTo(self, observers):
LogBeginner.observers = observers
self.patch(_runner, "globalLogBeginner", LogBeginner())
# Patch FilteringLogObserver so we can capture its arguments
class MockFilteringLogObserver(FilteringLogObserver):
def __init__(
self, observer, predicates,
negativeObserver=lambda event: None
):
MockFilteringLogObserver.observer = observer
MockFilteringLogObserver.predicates = predicates
FilteringLogObserver.__init__(
self, observer, predicates, negativeObserver
)
self.patch(_runner, "FilteringLogObserver", MockFilteringLogObserver)
# Patch FileLogObserver so we can capture its arguments
class MockFileLogObserver(FileLogObserver):
def __init__(self, outFile):
MockFileLogObserver.outFile = outFile
FileLogObserver.__init__(self, outFile, str)
# Start logging
runner = Runner({
RunnerOptions.logFile: logFile,
RunnerOptions.fileLogObserverFactory: MockFileLogObserver,
RunnerOptions.defaultLogLevel: LogLevel.critical,
})
runner.startLogging()
# Check for a filtering observer
self.assertEqual(len(LogBeginner.observers), 1)
self.assertIsInstance(LogBeginner.observers[0], FilteringLogObserver)
# Check log level predicate with the correct default log level
self.assertEqual(len(MockFilteringLogObserver.predicates), 1)
self.assertIsInstance(
MockFilteringLogObserver.predicates[0],
LogLevelFilterPredicate
)
self.assertIdentical(
MockFilteringLogObserver.predicates[0].defaultLogLevel,
LogLevel.critical
)
# Check for a file observer attached to the filtering observer
self.assertIsInstance(
MockFilteringLogObserver.observer, MockFileLogObserver
)
# Check for the file we gave it
self.assertIdentical(
MockFilteringLogObserver.observer.outFile, logFile
)
def test_startReactorWithoutReactor(self):
"""
L{Runner.startReactor} without L{RunnerOptions.reactor} runs the default
reactor.
"""
# Patch defaultReactor
reactor = MemoryReactor()
self.patch(_runner, "defaultReactor", reactor)
runner = Runner({})
runner.startReactor()
self.assertTrue(reactor.hasInstalled)
self.assertTrue(reactor.hasRun)
def test_startReactorWithReactor(self):
"""
L{Runner.startReactor} with L{RunnerOptions.reactor} runs that reactor.
"""
reactor = MemoryReactor()
runner = Runner({RunnerOptions.reactor: reactor})
runner.startReactor()
self.assertTrue(reactor.hasRun)
def test_startReactorWithWhenRunning(self):
"""
L{Runner.startReactor} with L{RunnerOptions.whenRunning} ensures that
the given callable is called with the runner's options when the reactor
is running.
"""
optionsSeen = []
def txmain(options):
optionsSeen.append(options)
options = {
RunnerOptions.reactor: MemoryReactor(),
RunnerOptions.whenRunning: txmain,
}
runner = Runner(options)
runner.startReactor()
self.assertEqual(len(optionsSeen), 1)
self.assertIdentical(optionsSeen[0], options)
def test_whenRunningWithWhenRunning(self):
"""
L{Runner.whenRunning} with L{RunnerOptions.whenRunning} calls the given
callable with the runner's options.
"""
optionsSeen = []
def txmain(options):
optionsSeen.append(options)
options = {RunnerOptions.whenRunning: txmain}
runner = Runner(options)
runner.whenRunning()
self.assertEqual(len(optionsSeen), 1)
self.assertIdentical(optionsSeen[0], options)
def test_reactorExitedWithReactorExited(self):
"""
L{Runner.reactorExited} with L{RunnerOptions.reactorExited} calls the
given callable with the runner's options.
"""
optionsSeen = []
def exited(options):
optionsSeen.append(options)
options = {RunnerOptions.reactorExited: exited}
runner = Runner(options)
runner.reactorExited()
self.assertEqual(len(optionsSeen), 1)
self.assertIdentical(optionsSeen[0], options)
class DummyRunner(Runner):
"""
Stub for L{Runner}.
Keep track of calls to some methods without actually doing anything.
"""
def __init__(self, *args, **kwargs):
Runner.__init__(self, *args, **kwargs)
self.calledMethods = []
def killIfRequested(self):
self.calledMethods.append("killIfRequested")
def startLogging(self):
self.calledMethods.append("startLogging")
def startReactor(self):
self.calledMethods.append("startReactor")
def reactorExited(self):
self.calledMethods.append("reactorExited")
class DummyPIDFile(NonePIDFile):
"""
Stub for L{PIDFile}.
Tracks context manager entry/exit without doing anything.
"""
def __init__(self):
NonePIDFile.__init__(self)
self.entered = False
self.exited = False
def __enter__(self):
self.entered = True
return self
def __exit__(self, excType, excValue, traceback):
self.exited = True
class DummyExit(object):
"""
Stub for L{exit} that remembers whether it's been called and, if it has,
what arguments it was given.
"""
def __init__(self):
self.exited = False
def __call__(self, status, message=None):
assert not self.exited
self.status = status
self.message = message
self.exited = True
class DummyKill(object):
"""
Stub for L{os.kill} that remembers whether it's been called and, if it has,
what arguments it was given.
"""
def __init__(self):
self.calls = []
def __call__(self, pid, sig):
self.calls.append((pid, sig))
class DummyStandardIO(object):
"""
Stub for L{sys} which provides L{BytesIO} streams as stdout and stderr.
"""
def __init__(self, stdout, stderr):
self.stdout = stdout
self.stderr = stderr
class DummyWarningsModule(object):
"""
Stub for L{warnings} which provides a C{showwarning} method that is a no-op.
"""
def showwarning(*args, **kwargs):
"""
Do nothing.
@param args: ignored.
@param kwargs: ignored.
"""
|
endlessm/chromium-browser | refs/heads/master | third_party/depot_tools/tests/auth_test.py | 2 | #!/usr/bin/env vpython3
# Copyright (c) 2017 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Unit Tests for auth.py"""
import calendar
import datetime
import json
import os
import unittest
import sys
if sys.version_info.major == 2:
import mock
BUILTIN_OPEN = '__builtin__.open'
else:
from unittest import mock
BUILTIN_OPEN = 'builtins.open'
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
import auth
import subprocess2
NOW = datetime.datetime(2019, 10, 17, 12, 30, 59, 0)
VALID_EXPIRY = NOW + datetime.timedelta(seconds=31)
class AuthenticatorTest(unittest.TestCase):
def setUp(self):
mock.patch('subprocess2.check_call').start()
mock.patch('subprocess2.check_call_out').start()
mock.patch('auth.datetime_now', return_value=NOW).start()
self.addCleanup(mock.patch.stopall)
def testHasCachedCredentials_NotLoggedIn(self):
subprocess2.check_call_out.side_effect = [
subprocess2.CalledProcessError(1, ['cmd'], 'cwd', 'stdout', 'stderr')]
self.assertFalse(auth.Authenticator().has_cached_credentials())
def testHasCachedCredentials_LoggedIn(self):
subprocess2.check_call_out.return_value = (
json.dumps({'token': 'token', 'expiry': 12345678}), '')
self.assertTrue(auth.Authenticator().has_cached_credentials())
def testGetAccessToken_NotLoggedIn(self):
subprocess2.check_call_out.side_effect = [
subprocess2.CalledProcessError(1, ['cmd'], 'cwd', 'stdout', 'stderr')]
self.assertRaises(
auth.LoginRequiredError, auth.Authenticator().get_access_token)
def testGetAccessToken_CachedToken(self):
authenticator = auth.Authenticator()
authenticator._access_token = auth.AccessToken('token', None)
self.assertEqual(
auth.AccessToken('token', None), authenticator.get_access_token())
subprocess2.check_call_out.assert_not_called()
def testGetAccesstoken_LoggedIn(self):
expiry = calendar.timegm(VALID_EXPIRY.timetuple())
subprocess2.check_call_out.return_value = (
json.dumps({'token': 'token', 'expiry': expiry}), '')
self.assertEqual(
auth.AccessToken('token', VALID_EXPIRY),
auth.Authenticator().get_access_token())
subprocess2.check_call_out.assert_called_with(
['luci-auth',
'token',
'-scopes', auth.OAUTH_SCOPE_EMAIL,
'-json-output', '-'],
stdout=subprocess2.PIPE, stderr=subprocess2.PIPE)
def testGetAccessToken_DifferentScope(self):
expiry = calendar.timegm(VALID_EXPIRY.timetuple())
subprocess2.check_call_out.return_value = (
json.dumps({'token': 'token', 'expiry': expiry}), '')
self.assertEqual(
auth.AccessToken('token', VALID_EXPIRY),
auth.Authenticator('custom scopes').get_access_token())
subprocess2.check_call_out.assert_called_with(
['luci-auth', 'token', '-scopes', 'custom scopes', '-json-output', '-'],
stdout=subprocess2.PIPE, stderr=subprocess2.PIPE)
def testAuthorize(self):
http = mock.Mock()
http_request = http.request
http_request.__name__ = '__name__'
authenticator = auth.Authenticator()
authenticator._access_token = auth.AccessToken('token', None)
authorized = authenticator.authorize(http)
authorized.request(
'https://example.com', method='POST', body='body',
headers={'header': 'value'})
http_request.assert_called_once_with(
'https://example.com', 'POST', 'body',
{'header': 'value', 'Authorization': 'Bearer token'}, mock.ANY,
mock.ANY)
class AccessTokenTest(unittest.TestCase):
def setUp(self):
mock.patch('auth.datetime_now', return_value=NOW).start()
self.addCleanup(mock.patch.stopall)
def testNeedsRefresh_NoExpiry(self):
self.assertFalse(auth.AccessToken('token', None).needs_refresh())
def testNeedsRefresh_Expired(self):
expired = NOW + datetime.timedelta(seconds=30)
self.assertTrue(auth.AccessToken('token', expired).needs_refresh())
def testNeedsRefresh_Valid(self):
self.assertFalse(auth.AccessToken('token', VALID_EXPIRY).needs_refresh())
class HasLuciContextLocalAuthTest(unittest.TestCase):
def setUp(self):
mock.patch('os.environ').start()
mock.patch(BUILTIN_OPEN, mock.mock_open()).start()
self.addCleanup(mock.patch.stopall)
def testNoLuciContextEnvVar(self):
os.environ = {}
self.assertFalse(auth.has_luci_context_local_auth())
def testNonexistentPath(self):
os.environ = {'LUCI_CONTEXT': 'path'}
open.side_effect = OSError
self.assertFalse(auth.has_luci_context_local_auth())
open.assert_called_with('path')
def testInvalidJsonFile(self):
os.environ = {'LUCI_CONTEXT': 'path'}
open().read.return_value = 'not-a-json-file'
self.assertFalse(auth.has_luci_context_local_auth())
open.assert_called_with('path')
def testNoLocalAuth(self):
os.environ = {'LUCI_CONTEXT': 'path'}
open().read.return_value = '{}'
self.assertFalse(auth.has_luci_context_local_auth())
open.assert_called_with('path')
def testNoDefaultAccountId(self):
os.environ = {'LUCI_CONTEXT': 'path'}
open().read.return_value = json.dumps({
'local_auth': {
'secret': 'secret',
'accounts': [{
'email': 'bots@account.iam.gserviceaccount.com',
'id': 'system',
}],
'rpc_port': 1234,
}
})
self.assertFalse(auth.has_luci_context_local_auth())
open.assert_called_with('path')
def testHasLocalAuth(self):
os.environ = {'LUCI_CONTEXT': 'path'}
open().read.return_value = json.dumps({
'local_auth': {
'secret': 'secret',
'accounts': [
{
'email': 'bots@account.iam.gserviceaccount.com',
'id': 'system',
},
{
'email': 'builder@account.iam.gserviceaccount.com',
'id': 'task',
},
],
'rpc_port': 1234,
'default_account_id': 'task',
},
})
self.assertTrue(auth.has_luci_context_local_auth())
open.assert_called_with('path')
if __name__ == '__main__':
if '-v' in sys.argv:
logging.basicConfig(level=logging.DEBUG)
unittest.main()
|
grahamgilbert/Crypt-Server | refs/heads/master | server/migrations/0009_secret_rotation_required.py | 1 | # -*- coding: utf-8 -*-
# Generated by Django 1.10 on 2018-04-30 21:37
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [("server", "0008_auto_20150814_2140")]
operations = [
migrations.AddField(
model_name="secret",
name="rotation_required",
field=models.BooleanField(default=False),
)
]
|
noba3/KoTos | refs/heads/master | addons/plugin.video.mega/resources/lib/platform_libraries/Darwin/osx/Crypto/Random/Fortuna/SHAd256.py | 11 | # -*- coding: ascii -*-
#
# Random/Fortuna/SHAd256.py : SHA_d-256 hash function implementation
#
# Written in 2008 by Dwayne C. Litzenberger <dlitz@dlitz.net>
#
# ===================================================================
# The contents of this file are dedicated to the public domain. To
# the extent that dedication to the public domain is not available,
# everyone is granted a worldwide, perpetual, royalty-free,
# non-exclusive license to exercise all rights associated with the
# contents of this file for any purpose whatsoever.
# No rights are reserved.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
# ===================================================================
"""\
SHA_d-256 hash function implementation.
This module should comply with PEP 247.
"""
__revision__ = "$Id$"
__all__ = ['new', 'digest_size']
from Crypto.Util.python_compat import *
from binascii import b2a_hex
from Crypto.Hash import SHA256
assert SHA256.digest_size == 32
class _SHAd256(object):
"""SHA-256, doubled.
Returns SHA-256(SHA-256(data)).
"""
digest_size = SHA256.digest_size
_internal = object()
def __init__(self, internal_api_check, sha256_hash_obj):
if internal_api_check is not self._internal:
raise AssertionError("Do not instantiate this class directly. Use %s.new()" % (__name__,))
self._h = sha256_hash_obj
# PEP 247 "copy" method
def copy(self):
"""Return a copy of this hashing object"""
return _SHAd256(SHAd256._internal, self._h.copy())
# PEP 247 "digest" method
def digest(self):
"""Return the hash value of this object as a binary string"""
retval = SHA256.new(self._h.digest()).digest()
assert len(retval) == 32
return retval
# PEP 247 "hexdigest" method
def hexdigest(self):
"""Return the hash value of this object as a (lowercase) hexadecimal string"""
retval = b2a_hex(self.digest())
assert len(retval) == 64
return retval
# PEP 247 "update" method
def update(self, data):
self._h.update(data)
# PEP 247 module-level "digest_size" variable
digest_size = _SHAd256.digest_size
# PEP 247 module-level "new" function
def new(data=""):
"""Return a new SHAd256 hashing object"""
return _SHAd256(_SHAd256._internal, SHA256.new(data))
# vim:set ts=4 sw=4 sts=4 expandtab:
|
madarou/angular-django | refs/heads/master | app/__init__.py | 22 | __author__ = 'RaPoSpectre'
|
igorcompuff/ns-3.26 | refs/heads/master | src/nix-vector-routing/test/examples-to-run.py | 200 | #! /usr/bin/env python
## -*- Mode: python; py-indent-offset: 4; indent-tabs-mode: nil; coding: utf-8; -*-
# A list of C++ examples to run in order to ensure that they remain
# buildable and runnable over time. Each tuple in the list contains
#
# (example_name, do_run, do_valgrind_run).
#
# See test.py for more information.
cpp_examples = [
("nix-simple", "True", "True"),
("nms-p2p-nix", "False", "True"), # Takes too long to run
]
# A list of Python examples to run in order to ensure that they remain
# runnable over time. Each tuple in the list contains
#
# (example_name, do_run).
#
# See test.py for more information.
python_examples = []
|
ZsTizy/heekscnc | refs/heads/master | area_funcs.py | 24 | import area
from nc.nc import *
import math
import kurve_funcs
# some globals, to save passing variables as parameters too much
area_for_feed_possible = None
tool_radius_for_pocket = None
def cut_curve(curve, need_rapid, p, rapid_safety_space, current_start_depth, final_depth):
prev_p = p
first = True
for vertex in curve.getVertices():
if need_rapid and first:
# rapid across
rapid(vertex.p.x, vertex.p.y)
##rapid down
rapid(z = current_start_depth + rapid_safety_space)
#feed down
feed(z = final_depth)
first = False
else:
if vertex.type == 1:
arc_ccw(vertex.p.x, vertex.p.y, i = vertex.c.x, j = vertex.c.y)
elif vertex.type == -1:
arc_cw(vertex.p.x, vertex.p.y, i = vertex.c.x, j = vertex.c.y)
else:
feed(vertex.p.x, vertex.p.y)
prev_p = vertex.p
return prev_p
def area_distance(a, old_area):
best_dist = None
for curve in a.getCurves():
for vertex in curve.getVertices():
c = old_area.NearestPoint(vertex.p)
d = c.dist(vertex.p)
if best_dist == None or d < best_dist:
best_dist = d
for curve in old_area.getCurves():
for vertex in curve.getVertices():
c = a.NearestPoint(vertex.p)
d = c.dist(vertex.p)
if best_dist == None or d < best_dist:
best_dist = d
return best_dist
def make_obround(p0, p1, radius):
dir = p1 - p0
d = dir.length()
dir.normalize()
right = area.Point(dir.y, -dir.x)
obround = area.Area()
c = area.Curve()
vt0 = p0 + right * radius
vt1 = p1 + right * radius
vt2 = p1 - right * radius
vt3 = p0 - right * radius
c.append(area.Vertex(0, vt0, area.Point(0, 0)))
c.append(area.Vertex(0, vt1, area.Point(0, 0)))
c.append(area.Vertex(1, vt2, p1))
c.append(area.Vertex(0, vt3, area.Point(0, 0)))
c.append(area.Vertex(1, vt0, p0))
obround.append(c)
return obround
def feed_possible(p0, p1):
if p0 == p1:
return True
obround = make_obround(p0, p1, tool_radius_for_pocket)
a = area.Area(area_for_feed_possible)
obround.Subtract(a)
if obround.num_curves() > 0:
return False
return True
def cut_curvelist1(curve_list, rapid_safety_space, current_start_depth, depth, clearance_height, keep_tool_down_if_poss):
p = area.Point(0, 0)
first = True
for curve in curve_list:
need_rapid = True
if first == False:
s = curve.FirstVertex().p
if keep_tool_down_if_poss == True:
# see if we can feed across
if feed_possible(p, s):
need_rapid = False
elif s.x == p.x and s.y == p.y:
need_rapid = False
if need_rapid:
rapid(z = clearance_height)
p = cut_curve(curve, need_rapid, p, rapid_safety_space, current_start_depth, depth)
first = False
rapid(z = clearance_height)
def cut_curvelist2(curve_list, rapid_safety_space, current_start_depth, depth, clearance_height, keep_tool_down_if_poss,start_point):
p = area.Point(0, 0)
start_x,start_y=start_point
first = True
for curve in curve_list:
need_rapid = True
if first == True:
direction = "on";radius = 0.0;offset_extra = 0.0; roll_radius = 0.0;roll_on = 0.0; roll_off = 0.0; rapid_safety_space; step_down = math.fabs(depth);extend_at_start = 0.0;extend_at_end = 0.0
kurve_funcs.make_smaller( curve, start = area.Point(start_x,start_y))
kurve_funcs.profile(curve, direction, radius , offset_extra, roll_radius, roll_on, roll_off, rapid_safety_space , clearance_height, current_start_depth, step_down , depth, extend_at_start, extend_at_end)
else:
s = curve.FirstVertex().p
if keep_tool_down_if_poss == True:
# see if we can feed across
if feed_possible(p, s):
need_rapid = False
elif s.x == p.x and s.y == p.y:
need_rapid = False
cut_curve(curve, need_rapid, p, rapid_safety_space, current_start_depth, depth)
first = False #change to True if you want to rapid back to start side before zigging again with unidirectional set
rapid(z = clearance_height)
def recur(arealist, a1, stepover, from_center):
# this makes arealist by recursively offsetting a1 inwards
if a1.num_curves() == 0:
return
if from_center:
arealist.insert(0, a1)
else:
arealist.append(a1)
a_offset = area.Area(a1)
a_offset.Offset(stepover)
# split curves into new areas
if area.holes_linked():
for curve in a_offset.getCurves():
a2 = area.Area()
a2.append(curve)
recur(arealist, a2, stepover, from_center)
else:
# split curves into new areas
a_offset.Reorder()
a2 = None
for curve in a_offset.getCurves():
if curve.IsClockwise():
if a2 != None:
a2.append(curve)
else:
if a2 != None:
recur(arealist, a2, stepover, from_center)
a2 = area.Area()
a2.append(curve)
if a2 != None:
recur(arealist, a2, stepover, from_center)
def get_curve_list(arealist, reverse_curves = False):
curve_list = list()
for a in arealist:
for curve in a.getCurves():
if reverse_curves == True:
curve.Reverse()
curve_list.append(curve)
return curve_list
curve_list_for_zigs = []
rightward_for_zigs = True
sin_angle_for_zigs = 0.0
cos_angle_for_zigs = 1.0
sin_minus_angle_for_zigs = 0.0
cos_minus_angle_for_zigs = 1.0
one_over_units = 1.0
def make_zig_curve(curve, y0, y, zig_unidirectional):
if rightward_for_zigs:
curve.Reverse()
# find a high point to start looking from
high_point = None
for vertex in curve.getVertices():
if high_point == None:
high_point = vertex.p
elif vertex.p.y > high_point.y:
# use this as the new high point
high_point = vertex.p
elif math.fabs(vertex.p.y - high_point.y) < 0.002 * one_over_units:
# equal high point
if rightward_for_zigs:
# use the furthest left point
if vertex.p.x < high_point.x:
high_point = vertex.p
else:
# use the furthest right point
if vertex.p.x > high_point.x:
high_point = vertex.p
zig = area.Curve()
high_point_found = False
zig_started = False
zag_found = False
for i in range(0, 2): # process the curve twice because we don't know where it will start
prev_p = None
for vertex in curve.getVertices():
if zag_found: break
if prev_p != None:
if zig_started:
zig.append(unrotated_vertex(vertex))
if math.fabs(vertex.p.y - y) < 0.002 * one_over_units:
zag_found = True
break
elif high_point_found:
if math.fabs(vertex.p.y - y0) < 0.002 * one_over_units:
if zig_started:
zig.append(unrotated_vertex(vertex))
elif math.fabs(prev_p.y - y0) < 0.002 * one_over_units and vertex.type == 0:
zig.append(area.Vertex(0, unrotated_point(prev_p), area.Point(0, 0)))
zig.append(unrotated_vertex(vertex))
zig_started = True
elif vertex.p.x == high_point.x and vertex.p.y == high_point.y:
high_point_found = True
prev_p = vertex.p
if zig_started:
if zig_unidirectional == True:
# remove the last bit of zig
if math.fabs(zig.LastVertex().p.y - y) < 0.002 * one_over_units:
vertices = zig.getVertices()
while len(vertices) > 0:
v = vertices[len(vertices)-1]
if math.fabs(v.p.y - y0) < 0.002 * one_over_units:
break
else:
vertices.pop()
zig = area.Curve()
for v in vertices:
zig.append(v)
curve_list_for_zigs.append(zig)
def make_zig(a, y0, y, zig_unidirectional):
for curve in a.getCurves():
make_zig_curve(curve, y0, y, zig_unidirectional)
reorder_zig_list_list = []
def add_reorder_zig(curve):
global reorder_zig_list_list
# look in existing lists
s = curve.FirstVertex().p
for curve_list in reorder_zig_list_list:
last_curve = curve_list[len(curve_list) - 1]
e = last_curve.LastVertex().p
if math.fabs(s.x - e.x) < 0.002 * one_over_units and math.fabs(s.y - e.y) < 0.002 * one_over_units:
curve_list.append(curve)
return
# else add a new list
curve_list = []
curve_list.append(curve)
reorder_zig_list_list.append(curve_list)
def reorder_zigs():
global curve_list_for_zigs
global reorder_zig_list_list
reorder_zig_list_list = []
for curve in curve_list_for_zigs:
add_reorder_zig(curve)
curve_list_for_zigs = []
for curve_list in reorder_zig_list_list:
for curve in curve_list:
curve_list_for_zigs.append(curve)
def rotated_point(p):
return area.Point(p.x * cos_angle_for_zigs - p.y * sin_angle_for_zigs, p.x * sin_angle_for_zigs + p.y * cos_angle_for_zigs)
def unrotated_point(p):
return area.Point(p.x * cos_minus_angle_for_zigs - p.y * sin_minus_angle_for_zigs, p.x * sin_minus_angle_for_zigs + p.y * cos_minus_angle_for_zigs)
def rotated_vertex(v):
if v.type:
return area.Vertex(v.type, rotated_point(v.p), rotated_point(v.c))
return area.Vertex(v.type, rotated_point(v.p), area.Point(0, 0))
def unrotated_vertex(v):
if v.type:
return area.Vertex(v.type, unrotated_point(v.p), unrotated_point(v.c))
return area.Vertex(v.type, unrotated_point(v.p), area.Point(0, 0))
def rotated_area(a):
an = area.Area()
for curve in a.getCurves():
curve_new = area.Curve()
for v in curve.getVertices():
curve_new.append(rotated_vertex(v))
an.append(curve_new)
return an
def zigzag(a, stepover, zig_unidirectional):
if a.num_curves() == 0:
return
global rightward_for_zigs
global curve_list_for_zigs
global sin_angle_for_zigs
global cos_angle_for_zigs
global sin_minus_angle_for_zigs
global cos_minus_angle_for_zigs
global one_over_units
one_over_units = 1 / area.get_units()
a = rotated_area(a)
b = area.Box()
a.GetBox(b)
x0 = b.MinX() - 1.0
x1 = b.MaxX() + 1.0
height = b.MaxY() - b.MinY()
num_steps = int(height / stepover + 1)
y = b.MinY() + 0.1 * one_over_units
null_point = area.Point(0, 0)
rightward_for_zigs = True
curve_list_for_zigs = []
for i in range(0, num_steps):
y0 = y
y = y + stepover
p0 = area.Point(x0, y0)
p1 = area.Point(x0, y)
p2 = area.Point(x1, y)
p3 = area.Point(x1, y0)
c = area.Curve()
c.append(area.Vertex(0, p0, null_point, 0))
c.append(area.Vertex(0, p1, null_point, 0))
c.append(area.Vertex(0, p2, null_point, 1))
c.append(area.Vertex(0, p3, null_point, 0))
c.append(area.Vertex(0, p0, null_point, 1))
a2 = area.Area()
a2.append(c)
a2.Intersect(a)
make_zig(a2, y0, y, zig_unidirectional)
if zig_unidirectional == False:
rightward_for_zigs = (rightward_for_zigs == False)
reorder_zigs()
def pocket(a,tool_radius, extra_offset, stepover, depthparams, from_center, keep_tool_down_if_poss, use_zig_zag, zig_angle, zig_unidirectional = False,start_point=None, cut_mode = 'conventional'):
global tool_radius_for_pocket
global area_for_feed_possible
#if len(a.getCurves()) > 1:
# for crv in a.getCurves():
# ar = area.Area()
# ar.append(crv)
# pocket(ar, tool_radius, extra_offset, rapid_safety_space, start_depth, final_depth, stepover, stepdown, clearance_height, from_center, keep_tool_down_if_poss, use_zig_zag, zig_angle, zig_unidirectional)
# return
tool_radius_for_pocket = tool_radius
if keep_tool_down_if_poss:
area_for_feed_possible = area.Area(a)
area_for_feed_possible.Offset(extra_offset - 0.01)
use_internal_function = (area.holes_linked() == False) # use internal function, if area module is the Clipper library
if use_internal_function:
curve_list = a.MakePocketToolpath(tool_radius, extra_offset, stepover, from_center, use_zig_zag, zig_angle)
else:
global sin_angle_for_zigs
global cos_angle_for_zigs
global sin_minus_angle_for_zigs
global cos_minus_angle_for_zigs
radians_angle = zig_angle * math.pi / 180
sin_angle_for_zigs = math.sin(-radians_angle)
cos_angle_for_zigs = math.cos(-radians_angle)
sin_minus_angle_for_zigs = math.sin(radians_angle)
cos_minus_angle_for_zigs = math.cos(radians_angle)
arealist = list()
a_offset = area.Area(a)
current_offset = tool_radius + extra_offset
a_offset.Offset(current_offset)
do_recursive = True
if use_zig_zag:
zigzag(a_offset, stepover, zig_unidirectional)
curve_list = curve_list_for_zigs
else:
if do_recursive:
recur(arealist, a_offset, stepover, from_center)
else:
while(a_offset.num_curves() > 0):
if from_center:
arealist.insert(0, a_offset)
else:
arealist.append(a_offset)
current_offset = current_offset + stepover
a_offset = area.Area(a)
a_offset.Offset(current_offset)
curve_list = get_curve_list(arealist, cut_mode == 'climb')
depths = depthparams.get_depths()
current_start_depth = depthparams.start_depth
if start_point==None:
for depth in depths:
cut_curvelist1(curve_list, depthparams.rapid_safety_space, current_start_depth, depth, depthparams.clearance_height, keep_tool_down_if_poss)
current_start_depth = depth
else:
for depth in depths:
cut_curvelist2(curve_list, depthparams.rapid_safety_space, current_start_depth, depth, depthparams.clearance_height, keep_tool_down_if_poss, start_point)
current_start_depth = depth
|
hanicker/odoo | refs/heads/8.0 | addons/account_bank_statement_extensions/account_bank_statement.py | 131 | # -*- encoding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
#
# Copyright (c) 2011 Noviat nv/sa (www.noviat.be). All rights reserved.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import time
from openerp.osv import fields, osv
import openerp.addons.decimal_precision as dp
from openerp.tools.translate import _
class account_bank_statement(osv.osv):
_inherit = 'account.bank.statement'
def write(self, cr, uid, ids, vals, context=None):
if context is None:
context = {}
# bypass obsolete statement line resequencing
if vals.get('line_ids', False) or context.get('ebanking_import', False):
res = super(osv.osv, self).write(cr, uid, ids, vals, context=context)
else:
res = super(account_bank_statement, self).write(cr, uid, ids, vals, context=context)
return res
def confirm_statement_lines(self, cr, uid, ids, context=None):
bank_statement_line_obj = self.pool.get('account.bank.statement.line')
for st in self.browse(cr, uid, ids, context=context):
if st.line_ids:
line_ids = [l.id for l in st.line_ids]
cr.execute("UPDATE account_bank_statement_line \
SET state='confirm' WHERE id in %s ",
(tuple(line_ids),))
bank_statement_line_obj.invalidate_cache(cr, uid, ['state'], line_ids, context=context)
return True
def button_confirm_bank(self, cr, uid, ids, context=None):
res = super(account_bank_statement, self).button_confirm_bank(cr, uid, ids, context=context)
self.confirm_statement_lines(cr, uid, ids, context=context)
return res
def button_confirm_cash(self, cr, uid, ids, context=None):
res = super(account_bank_statement, self).button_confirm_cash(cr, uid, ids, context=context)
self.confirm_statement_lines(cr, uid, ids, context=context)
return res
def button_cancel(self, cr, uid, ids, context=None):
bank_statement_line_obj = self.pool.get('account.bank.statement.line')
super(account_bank_statement, self).button_cancel(cr, uid, ids, context=context)
for st in self.browse(cr, uid, ids, context=context):
if st.line_ids:
line_ids = [l.id for l in st.line_ids]
cr.execute("UPDATE account_bank_statement_line \
SET state='draft' WHERE id in %s ",
(tuple(line_ids),))
bank_statement_line_obj.invalidate_cache(cr, uid, ['state'], line_ids, context=context)
return True
class account_bank_statement_line_global(osv.osv):
_name = 'account.bank.statement.line.global'
_description = 'Batch Payment Info'
_columns = {
'name': fields.char('OBI', required=True, help="Originator to Beneficiary Information"),
'code': fields.char('Code', size=64, required=True),
'parent_id': fields.many2one('account.bank.statement.line.global', 'Parent Code', ondelete='cascade'),
'child_ids': fields.one2many('account.bank.statement.line.global', 'parent_id', 'Child Codes', copy=True),
'type': fields.selection([
('iso20022', 'ISO 20022'),
('coda', 'CODA'),
('manual', 'Manual'),
], 'Type', required=True),
'amount': fields.float('Amount', digits_compute=dp.get_precision('Account')),
'bank_statement_line_ids': fields.one2many('account.bank.statement.line', 'globalisation_id', 'Bank Statement Lines'),
}
_rec_name = 'code'
_defaults = {
'code': lambda s,c,u,ctx={}: s.pool.get('ir.sequence').get(c, u, 'account.bank.statement.line.global'),
'name': '/',
}
_sql_constraints = [
('code_uniq', 'unique (code)', 'The code must be unique !'),
]
def name_search(self, cr, user, name, args=None, operator='ilike', context=None, limit=100):
if not args:
args = []
ids = []
if name:
ids = self.search(cr, user, [('code', 'ilike', name)] + args, limit=limit)
if not ids:
ids = self.search(cr, user, [('name', operator, name)] + args, limit=limit)
if not ids and len(name.split()) >= 2:
#Separating code and name for searching
operand1, operand2 = name.split(' ', 1) #name can contain spaces
ids = self.search(cr, user, [('code', 'like', operand1), ('name', operator, operand2)] + args, limit=limit)
else:
ids = self.search(cr, user, args, context=context, limit=limit)
return self.name_get(cr, user, ids, context=context)
class account_bank_statement_line(osv.osv):
_inherit = 'account.bank.statement.line'
_columns = {
'val_date': fields.date('Value Date', states={'confirm': [('readonly', True)]}),
'globalisation_id': fields.many2one('account.bank.statement.line.global', 'Globalisation ID',
states={'confirm': [('readonly', True)]},
help="Code to identify transactions belonging to the same globalisation level within a batch payment"),
'globalisation_amount': fields.related('globalisation_id', 'amount', type='float',
relation='account.bank.statement.line.global', string='Glob. Amount', readonly=True),
'state': fields.selection([('draft', 'Draft'), ('confirm', 'Confirmed')],
'Status', required=True, readonly=True, copy=False),
'counterparty_name': fields.char('Counterparty Name', size=35),
'counterparty_bic': fields.char('Counterparty BIC', size=11),
'counterparty_number': fields.char('Counterparty Number', size=34),
'counterparty_currency': fields.char('Counterparty Currency', size=3),
}
_defaults = {
'state': 'draft',
}
def unlink(self, cr, uid, ids, context=None):
if context is None:
context = {}
if context.get('block_statement_line_delete', False):
raise osv.except_osv(_('Warning!'), _('Delete operation not allowed. \
Please go to the associated bank statement in order to delete and/or modify bank statement line.'))
return super(account_bank_statement_line, self).unlink(cr, uid, ids, context=context)
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
CloudWareChile/OpenChile | refs/heads/master | openerp/addons/account_anglo_saxon/__init__.py | 9 | ##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import product
import stock
import purchase
import invoice
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4: |
dufferzafar/picard | refs/heads/master | picard/ui/options/cdlookup.py | 6 | # -*- coding: utf-8 -*-
#
# Picard, the next-generation MusicBrainz tagger
# Copyright (c) 2004 Robert Kaye
# Copyright (C) 2006 Lukáš Lalinský
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
from picard import config
from picard.ui.options import OptionsPage, register_options_page
from picard.util.cdrom import (
get_cdrom_drives,
AUTO_DETECT_DRIVES,
DEFAULT_DRIVES
)
if AUTO_DETECT_DRIVES:
from picard.ui.ui_options_cdlookup_select import Ui_CDLookupOptionsPage
else:
from picard.ui.ui_options_cdlookup import Ui_CDLookupOptionsPage
class CDLookupOptionsPage(OptionsPage):
NAME = "cdlookup"
TITLE = N_("CD Lookup")
PARENT = None
SORT_ORDER = 50
ACTIVE = True
options = [
config.TextOption("setting", "cd_lookup_device",
",".join(DEFAULT_DRIVES)),
]
def __init__(self, parent=None):
super(CDLookupOptionsPage, self).__init__(parent)
self.ui = Ui_CDLookupOptionsPage()
self.ui.setupUi(self)
if AUTO_DETECT_DRIVES:
self.drives = get_cdrom_drives()
self.ui.cd_lookup_device.addItems(self.drives)
def load(self):
if AUTO_DETECT_DRIVES:
try:
self.ui.cd_lookup_device.setCurrentIndex(self.drives.index(config.setting["cd_lookup_device"]))
except ValueError:
pass
else:
self.ui.cd_lookup_device.setText(config.setting["cd_lookup_device"])
def save(self):
if AUTO_DETECT_DRIVES:
config.setting["cd_lookup_device"] = unicode(self.ui.cd_lookup_device.currentText())
else:
config.setting["cd_lookup_device"] = unicode(self.ui.cd_lookup_device.text())
register_options_page(CDLookupOptionsPage)
|
coreos/autotest | refs/heads/master | client/shared/common.py | 12 | import os, sys
try:
import autotest.client.setup_modules as setup_modules
client_dir = os.path.dirname(setup_modules.__file__)
except ImportError:
dirname = os.path.dirname(sys.modules[__name__].__file__)
client_dir = os.path.abspath(os.path.join(dirname, ".."))
sys.path.insert(0, client_dir)
import setup_modules
sys.path.pop(0)
setup_modules.setup(base_path=client_dir,
root_module_name="autotest.client")
|
proxysh/Safejumper-for-Mac | refs/heads/master | buildlinux/env64/lib/python2.7/encodings/gbk.py | 816 | #
# gbk.py: Python Unicode Codec for GBK
#
# Written by Hye-Shik Chang <perky@FreeBSD.org>
#
import _codecs_cn, codecs
import _multibytecodec as mbc
codec = _codecs_cn.getcodec('gbk')
class Codec(codecs.Codec):
encode = codec.encode
decode = codec.decode
class IncrementalEncoder(mbc.MultibyteIncrementalEncoder,
codecs.IncrementalEncoder):
codec = codec
class IncrementalDecoder(mbc.MultibyteIncrementalDecoder,
codecs.IncrementalDecoder):
codec = codec
class StreamReader(Codec, mbc.MultibyteStreamReader, codecs.StreamReader):
codec = codec
class StreamWriter(Codec, mbc.MultibyteStreamWriter, codecs.StreamWriter):
codec = codec
def getregentry():
return codecs.CodecInfo(
name='gbk',
encode=Codec().encode,
decode=Codec().decode,
incrementalencoder=IncrementalEncoder,
incrementaldecoder=IncrementalDecoder,
streamreader=StreamReader,
streamwriter=StreamWriter,
)
|
sayoun/workalendar | refs/heads/master | workalendar/usa/mississippi.py | 1 | # -*- coding: utf-8 -*-
from __future__ import (absolute_import, division, print_function,
unicode_literals)
from .core import UnitedStates
class Mississippi(UnitedStates):
"""Mississippi"""
include_thanksgiving_friday = True
include_confederation_day = True
include_columbus_day = False
martin_luther_king_label = ("Martin Luther King's"
" and Robert E. Lee's Birthdays")
veterans_day_label = "Armistice Day (Veterans Day)"
national_memorial_day_label = ("National Memorial Day / "
"Jefferson Davis Birthday")
|
srimai/odoo | refs/heads/8.0 | openerp/http.py | 66 | # -*- coding: utf-8 -*-
#----------------------------------------------------------
# OpenERP HTTP layer
#----------------------------------------------------------
import ast
import collections
import contextlib
import datetime
import errno
import functools
import getpass
import inspect
import logging
import mimetypes
import os
import pprint
import random
import re
import sys
import tempfile
import threading
import time
import traceback
import urlparse
import warnings
from zlib import adler32
import babel.core
import psycopg2
import simplejson
import werkzeug.contrib.sessions
import werkzeug.datastructures
import werkzeug.exceptions
import werkzeug.local
import werkzeug.routing
import werkzeug.wrappers
import werkzeug.wsgi
from werkzeug.wsgi import wrap_file
try:
import psutil
except ImportError:
psutil = None
import openerp
from openerp import SUPERUSER_ID
from openerp.service.server import memory_info
from openerp.service import security, model as service_model
from openerp.tools.func import lazy_property
from openerp.tools import ustr
_logger = logging.getLogger(__name__)
rpc_request = logging.getLogger(__name__ + '.rpc.request')
rpc_response = logging.getLogger(__name__ + '.rpc.response')
# 1 week cache for statics as advised by Google Page Speed
STATIC_CACHE = 60 * 60 * 24 * 7
#----------------------------------------------------------
# RequestHandler
#----------------------------------------------------------
# Thread local global request object
_request_stack = werkzeug.local.LocalStack()
request = _request_stack()
"""
A global proxy that always redirect to the current request object.
"""
def replace_request_password(args):
# password is always 3rd argument in a request, we replace it in RPC logs
# so it's easier to forward logs for diagnostics/debugging purposes...
if len(args) > 2:
args = list(args)
args[2] = '*'
return tuple(args)
# don't trigger debugger for those exceptions, they carry user-facing warnings
# and indications, they're not necessarily indicative of anything being
# *broken*
NO_POSTMORTEM = (openerp.osv.orm.except_orm,
openerp.exceptions.AccessError,
openerp.exceptions.AccessDenied,
openerp.exceptions.Warning,
openerp.exceptions.RedirectWarning)
def dispatch_rpc(service_name, method, params):
""" Handle a RPC call.
This is pure Python code, the actual marshalling (from/to XML-RPC) is done
in a upper layer.
"""
try:
rpc_request_flag = rpc_request.isEnabledFor(logging.DEBUG)
rpc_response_flag = rpc_response.isEnabledFor(logging.DEBUG)
if rpc_request_flag or rpc_response_flag:
start_time = time.time()
start_rss, start_vms = 0, 0
if psutil:
start_rss, start_vms = memory_info(psutil.Process(os.getpid()))
if rpc_request and rpc_response_flag:
openerp.netsvc.log(rpc_request, logging.DEBUG, '%s.%s' % (service_name, method), replace_request_password(params))
threading.current_thread().uid = None
threading.current_thread().dbname = None
if service_name == 'common':
dispatch = openerp.service.common.dispatch
elif service_name == 'db':
dispatch = openerp.service.db.dispatch
elif service_name == 'object':
dispatch = openerp.service.model.dispatch
elif service_name == 'report':
dispatch = openerp.service.report.dispatch
else:
dispatch = openerp.service.wsgi_server.rpc_handlers.get(service_name)
result = dispatch(method, params)
if rpc_request_flag or rpc_response_flag:
end_time = time.time()
end_rss, end_vms = 0, 0
if psutil:
end_rss, end_vms = memory_info(psutil.Process(os.getpid()))
logline = '%s.%s time:%.3fs mem: %sk -> %sk (diff: %sk)' % (service_name, method, end_time - start_time, start_vms / 1024, end_vms / 1024, (end_vms - start_vms)/1024)
if rpc_response_flag:
openerp.netsvc.log(rpc_response, logging.DEBUG, logline, result)
else:
openerp.netsvc.log(rpc_request, logging.DEBUG, logline, replace_request_password(params), depth=1)
return result
except NO_POSTMORTEM:
raise
except openerp.exceptions.DeferredException, e:
_logger.exception(openerp.tools.exception_to_unicode(e))
openerp.tools.debugger.post_mortem(openerp.tools.config, e.traceback)
raise
except Exception, e:
_logger.exception(openerp.tools.exception_to_unicode(e))
openerp.tools.debugger.post_mortem(openerp.tools.config, sys.exc_info())
raise
def local_redirect(path, query=None, keep_hash=False, forward_debug=True, code=303):
url = path
if not query:
query = {}
if forward_debug and request and request.debug:
query['debug'] = None
if query:
url += '?' + werkzeug.url_encode(query)
if keep_hash:
return redirect_with_hash(url, code)
else:
return werkzeug.utils.redirect(url, code)
def redirect_with_hash(url, code=303):
# Most IE and Safari versions decided not to preserve location.hash upon
# redirect. And even if IE10 pretends to support it, it still fails
# inexplicably in case of multiple redirects (and we do have some).
# See extensive test page at http://greenbytes.de/tech/tc/httpredirects/
if request.httprequest.user_agent.browser in ('firefox',):
return werkzeug.utils.redirect(url, code)
return "<html><head><script>window.location = '%s' + location.hash;</script></head></html>" % url
class WebRequest(object):
""" Parent class for all Odoo Web request types, mostly deals with
initialization and setup of the request object (the dispatching itself has
to be handled by the subclasses)
:param httprequest: a wrapped werkzeug Request object
:type httprequest: :class:`werkzeug.wrappers.BaseRequest`
.. attribute:: httprequest
the original :class:`werkzeug.wrappers.Request` object provided to the
request
.. attribute:: params
:class:`~collections.Mapping` of request parameters, not generally
useful as they're provided directly to the handler method as keyword
arguments
"""
def __init__(self, httprequest):
self.httprequest = httprequest
self.httpresponse = None
self.httpsession = httprequest.session
self.disable_db = False
self.uid = None
self.endpoint = None
self.auth_method = None
self._cr = None
# prevents transaction commit, use when you catch an exception during handling
self._failed = None
# set db/uid trackers - they're cleaned up at the WSGI
# dispatching phase in openerp.service.wsgi_server.application
if self.db:
threading.current_thread().dbname = self.db
if self.session.uid:
threading.current_thread().uid = self.session.uid
@lazy_property
def env(self):
"""
The :class:`~openerp.api.Environment` bound to current request.
Raises a :class:`RuntimeError` if the current requests is not bound
to a database.
"""
if not self.db:
return RuntimeError('request not bound to a database')
return openerp.api.Environment(self.cr, self.uid, self.context)
@lazy_property
def context(self):
"""
:class:`~collections.Mapping` of context values for the current
request
"""
return dict(self.session.context)
@lazy_property
def lang(self):
self.session._fix_lang(self.context)
return self.context["lang"]
@lazy_property
def session(self):
"""
a :class:`OpenERPSession` holding the HTTP session data for the
current http session
"""
return self.httprequest.session
@property
def cr(self):
"""
:class:`~openerp.sql_db.Cursor` initialized for the current method
call.
Accessing the cursor when the current request uses the ``none``
authentication will raise an exception.
"""
# can not be a lazy_property because manual rollback in _call_function
# if already set (?)
if not self.db:
return RuntimeError('request not bound to a database')
if not self._cr:
self._cr = self.registry.cursor()
return self._cr
def __enter__(self):
_request_stack.push(self)
return self
def __exit__(self, exc_type, exc_value, traceback):
_request_stack.pop()
if self._cr:
if exc_type is None and not self._failed:
self._cr.commit()
self._cr.close()
# just to be sure no one tries to re-use the request
self.disable_db = True
self.uid = None
def set_handler(self, endpoint, arguments, auth):
# is this needed ?
arguments = dict((k, v) for k, v in arguments.iteritems()
if not k.startswith("_ignored_"))
endpoint.arguments = arguments
self.endpoint = endpoint
self.auth_method = auth
def _handle_exception(self, exception):
"""Called within an except block to allow converting exceptions
to abitrary responses. Anything returned (except None) will
be used as response."""
self._failed = exception # prevent tx commit
if not isinstance(exception, NO_POSTMORTEM) \
and not isinstance(exception, werkzeug.exceptions.HTTPException):
openerp.tools.debugger.post_mortem(
openerp.tools.config, sys.exc_info())
raise
def _call_function(self, *args, **kwargs):
request = self
if self.endpoint.routing['type'] != self._request_type:
msg = "%s, %s: Function declared as capable of handling request of type '%s' but called with a request of type '%s'"
params = (self.endpoint.original, self.httprequest.path, self.endpoint.routing['type'], self._request_type)
_logger.error(msg, *params)
raise werkzeug.exceptions.BadRequest(msg % params)
kwargs.update(self.endpoint.arguments)
# Backward for 7.0
if self.endpoint.first_arg_is_req:
args = (request,) + args
# Correct exception handling and concurency retry
@service_model.check
def checked_call(___dbname, *a, **kw):
# The decorator can call us more than once if there is an database error. In this
# case, the request cursor is unusable. Rollback transaction to create a new one.
if self._cr:
self._cr.rollback()
return self.endpoint(*a, **kw)
if self.db:
return checked_call(self.db, *args, **kwargs)
return self.endpoint(*args, **kwargs)
@property
def debug(self):
""" Indicates whether the current request is in "debug" mode
"""
return 'debug' in self.httprequest.args
@contextlib.contextmanager
def registry_cr(self):
warnings.warn('please use request.registry and request.cr directly', DeprecationWarning)
yield (self.registry, self.cr)
@lazy_property
def session_id(self):
"""
opaque identifier for the :class:`OpenERPSession` instance of
the current request
.. deprecated:: 8.0
Use the ``sid`` attribute on :attr:`.session`
"""
return self.session.sid
@property
def registry(self):
"""
The registry to the database linked to this request. Can be ``None``
if the current request uses the ``none`` authentication.
.. deprecated:: 8.0
use :attr:`.env`
"""
return openerp.modules.registry.RegistryManager.get(self.db) if self.db else None
@property
def db(self):
"""
The database linked to this request. Can be ``None``
if the current request uses the ``none`` authentication.
"""
return self.session.db if not self.disable_db else None
@lazy_property
def httpsession(self):
""" HTTP session data
.. deprecated:: 8.0
Use :attr:`.session` instead.
"""
return self.session
def route(route=None, **kw):
"""
Decorator marking the decorated method as being a handler for
requests. The method must be part of a subclass of ``Controller``.
:param route: string or array. The route part that will determine which
http requests will match the decorated method. Can be a
single string or an array of strings. See werkzeug's routing
documentation for the format of route expression (
http://werkzeug.pocoo.org/docs/routing/ ).
:param type: The type of request, can be ``'http'`` or ``'json'``.
:param auth: The type of authentication method, can on of the following:
* ``user``: The user must be authenticated and the current request
will perform using the rights of the user.
* ``admin``: The user may not be authenticated and the current request
will perform using the admin user.
* ``none``: The method is always active, even if there is no
database. Mainly used by the framework and authentication
modules. There request code will not have any facilities to access
the database nor have any configuration indicating the current
database nor the current user.
:param methods: A sequence of http methods this route applies to. If not
specified, all methods are allowed.
:param cors: The Access-Control-Allow-Origin cors directive value.
"""
routing = kw.copy()
assert not 'type' in routing or routing['type'] in ("http", "json")
def decorator(f):
if route:
if isinstance(route, list):
routes = route
else:
routes = [route]
routing['routes'] = routes
@functools.wraps(f)
def response_wrap(*args, **kw):
response = f(*args, **kw)
if isinstance(response, Response) or f.routing_type == 'json':
return response
if isinstance(response, basestring):
return Response(response)
if isinstance(response, werkzeug.exceptions.HTTPException):
response = response.get_response(request.httprequest.environ)
if isinstance(response, werkzeug.wrappers.BaseResponse):
response = Response.force_type(response)
response.set_default()
return response
_logger.warn("<function %s.%s> returns an invalid response type for an http request" % (f.__module__, f.__name__))
return response
response_wrap.routing = routing
response_wrap.original_func = f
return response_wrap
return decorator
class JsonRequest(WebRequest):
""" Request handler for `JSON-RPC 2
<http://www.jsonrpc.org/specification>`_ over HTTP
* ``method`` is ignored
* ``params`` must be a JSON object (not an array) and is passed as keyword
arguments to the handler method
* the handler method's result is returned as JSON-RPC ``result`` and
wrapped in the `JSON-RPC Response
<http://www.jsonrpc.org/specification#response_object>`_
Sucessful request::
--> {"jsonrpc": "2.0",
"method": "call",
"params": {"context": {},
"arg1": "val1" },
"id": null}
<-- {"jsonrpc": "2.0",
"result": { "res1": "val1" },
"id": null}
Request producing a error::
--> {"jsonrpc": "2.0",
"method": "call",
"params": {"context": {},
"arg1": "val1" },
"id": null}
<-- {"jsonrpc": "2.0",
"error": {"code": 1,
"message": "End user error message.",
"data": {"code": "codestring",
"debug": "traceback" } },
"id": null}
"""
_request_type = "json"
def __init__(self, *args):
super(JsonRequest, self).__init__(*args)
self.jsonp_handler = None
args = self.httprequest.args
jsonp = args.get('jsonp')
self.jsonp = jsonp
request = None
request_id = args.get('id')
if jsonp and self.httprequest.method == 'POST':
# jsonp 2 steps step1 POST: save call
def handler():
self.session['jsonp_request_%s' % (request_id,)] = self.httprequest.form['r']
self.session.modified = True
headers=[('Content-Type', 'text/plain; charset=utf-8')]
r = werkzeug.wrappers.Response(request_id, headers=headers)
return r
self.jsonp_handler = handler
return
elif jsonp and args.get('r'):
# jsonp method GET
request = args.get('r')
elif jsonp and request_id:
# jsonp 2 steps step2 GET: run and return result
request = self.session.pop('jsonp_request_%s' % (request_id,), '{}')
else:
# regular jsonrpc2
request = self.httprequest.stream.read()
# Read POST content or POST Form Data named "request"
try:
self.jsonrequest = simplejson.loads(request)
except simplejson.JSONDecodeError:
msg = 'Invalid JSON data: %r' % (request,)
_logger.error('%s: %s', self.httprequest.path, msg)
raise werkzeug.exceptions.BadRequest(msg)
self.params = dict(self.jsonrequest.get("params", {}))
self.context = self.params.pop('context', dict(self.session.context))
def _json_response(self, result=None, error=None):
response = {
'jsonrpc': '2.0',
'id': self.jsonrequest.get('id')
}
if error is not None:
response['error'] = error
if result is not None:
response['result'] = result
if self.jsonp:
# If we use jsonp, that's mean we are called from another host
# Some browser (IE and Safari) do no allow third party cookies
# We need then to manage http sessions manually.
response['session_id'] = self.session_id
mime = 'application/javascript'
body = "%s(%s);" % (self.jsonp, simplejson.dumps(response),)
else:
mime = 'application/json'
body = simplejson.dumps(response)
return Response(
body, headers=[('Content-Type', mime),
('Content-Length', len(body))])
def _handle_exception(self, exception):
"""Called within an except block to allow converting exceptions
to arbitrary responses. Anything returned (except None) will
be used as response."""
try:
return super(JsonRequest, self)._handle_exception(exception)
except Exception:
if not isinstance(exception, (openerp.exceptions.Warning, SessionExpiredException)):
_logger.exception("Exception during JSON request handling.")
error = {
'code': 200,
'message': "Odoo Server Error",
'data': serialize_exception(exception)
}
if isinstance(exception, AuthenticationError):
error['code'] = 100
error['message'] = "Odoo Session Invalid"
if isinstance(exception, SessionExpiredException):
error['code'] = 100
error['message'] = "Odoo Session Expired"
return self._json_response(error=error)
def dispatch(self):
if self.jsonp_handler:
return self.jsonp_handler()
try:
rpc_request_flag = rpc_request.isEnabledFor(logging.DEBUG)
rpc_response_flag = rpc_response.isEnabledFor(logging.DEBUG)
if rpc_request_flag or rpc_response_flag:
endpoint = self.endpoint.method.__name__
model = self.params.get('model')
method = self.params.get('method')
args = self.params.get('args', [])
start_time = time.time()
_, start_vms = 0, 0
if psutil:
_, start_vms = memory_info(psutil.Process(os.getpid()))
if rpc_request and rpc_response_flag:
rpc_request.debug('%s: %s %s, %s',
endpoint, model, method, pprint.pformat(args))
result = self._call_function(**self.params)
if rpc_request_flag or rpc_response_flag:
end_time = time.time()
_, end_vms = 0, 0
if psutil:
_, end_vms = memory_info(psutil.Process(os.getpid()))
logline = '%s: %s %s: time:%.3fs mem: %sk -> %sk (diff: %sk)' % (
endpoint, model, method, end_time - start_time, start_vms / 1024, end_vms / 1024, (end_vms - start_vms)/1024)
if rpc_response_flag:
rpc_response.debug('%s, %s', logline, pprint.pformat(result))
else:
rpc_request.debug(logline)
return self._json_response(result)
except Exception, e:
return self._handle_exception(e)
def serialize_exception(e):
tmp = {
"name": type(e).__module__ + "." + type(e).__name__ if type(e).__module__ else type(e).__name__,
"debug": traceback.format_exc(),
"message": ustr(e),
"arguments": to_jsonable(e.args),
}
if isinstance(e, openerp.osv.osv.except_osv):
tmp["exception_type"] = "except_osv"
elif isinstance(e, openerp.exceptions.Warning):
tmp["exception_type"] = "warning"
elif isinstance(e, openerp.exceptions.AccessError):
tmp["exception_type"] = "access_error"
elif isinstance(e, openerp.exceptions.AccessDenied):
tmp["exception_type"] = "access_denied"
return tmp
def to_jsonable(o):
if isinstance(o, str) or isinstance(o,unicode) or isinstance(o, int) or isinstance(o, long) \
or isinstance(o, bool) or o is None or isinstance(o, float):
return o
if isinstance(o, list) or isinstance(o, tuple):
return [to_jsonable(x) for x in o]
if isinstance(o, dict):
tmp = {}
for k, v in o.items():
tmp[u"%s" % k] = to_jsonable(v)
return tmp
return ustr(o)
def jsonrequest(f):
"""
.. deprecated:: 8.0
Use the :func:`~openerp.http.route` decorator instead.
"""
base = f.__name__.lstrip('/')
if f.__name__ == "index":
base = ""
return route([base, base + "/<path:_ignored_path>"], type="json", auth="user", combine=True)(f)
class HttpRequest(WebRequest):
""" Handler for the ``http`` request type.
matched routing parameters, query string parameters, form_ parameters
and files are passed to the handler method as keyword arguments.
In case of name conflict, routing parameters have priority.
The handler method's result can be:
* a falsy value, in which case the HTTP response will be an
`HTTP 204`_ (No Content)
* a werkzeug Response object, which is returned as-is
* a ``str`` or ``unicode``, will be wrapped in a Response object and
interpreted as HTML
.. _form: http://www.w3.org/TR/html401/interact/forms.html#h-17.13.4.2
.. _HTTP 204: http://tools.ietf.org/html/rfc7231#section-6.3.5
"""
_request_type = "http"
def __init__(self, *args):
super(HttpRequest, self).__init__(*args)
params = self.httprequest.args.to_dict()
params.update(self.httprequest.form.to_dict())
params.update(self.httprequest.files.to_dict())
params.pop('session_id', None)
self.params = params
def _handle_exception(self, exception):
"""Called within an except block to allow converting exceptions
to abitrary responses. Anything returned (except None) will
be used as response."""
try:
return super(HttpRequest, self)._handle_exception(exception)
except SessionExpiredException:
if not request.params.get('noredirect'):
query = werkzeug.urls.url_encode({
'redirect': request.httprequest.url,
})
return werkzeug.utils.redirect('/web/login?%s' % query)
except werkzeug.exceptions.HTTPException, e:
return e
def dispatch(self):
if request.httprequest.method == 'OPTIONS' and request.endpoint and request.endpoint.routing.get('cors'):
headers = {
'Access-Control-Max-Age': 60 * 60 * 24,
'Access-Control-Allow-Headers': 'Origin, X-Requested-With, Content-Type, Accept'
}
return Response(status=200, headers=headers)
r = self._call_function(**self.params)
if not r:
r = Response(status=204) # no content
return r
def make_response(self, data, headers=None, cookies=None):
""" Helper for non-HTML responses, or HTML responses with custom
response headers or cookies.
While handlers can just return the HTML markup of a page they want to
send as a string if non-HTML data is returned they need to create a
complete response object, or the returned data will not be correctly
interpreted by the clients.
:param basestring data: response body
:param headers: HTTP headers to set on the response
:type headers: ``[(name, value)]``
:param collections.Mapping cookies: cookies to set on the client
"""
response = Response(data, headers=headers)
if cookies:
for k, v in cookies.iteritems():
response.set_cookie(k, v)
return response
def render(self, template, qcontext=None, lazy=True, **kw):
""" Lazy render of a QWeb template.
The actual rendering of the given template will occur at then end of
the dispatching. Meanwhile, the template and/or qcontext can be
altered or even replaced by a static response.
:param basestring template: template to render
:param dict qcontext: Rendering context to use
:param bool lazy: whether the template rendering should be deferred
until the last possible moment
:param kw: forwarded to werkzeug's Response object
"""
response = Response(template=template, qcontext=qcontext, **kw)
if not lazy:
return response.render()
return response
def not_found(self, description=None):
""" Shortcut for a `HTTP 404
<http://tools.ietf.org/html/rfc7231#section-6.5.4>`_ (Not Found)
response
"""
return werkzeug.exceptions.NotFound(description)
def httprequest(f):
"""
.. deprecated:: 8.0
Use the :func:`~openerp.http.route` decorator instead.
"""
base = f.__name__.lstrip('/')
if f.__name__ == "index":
base = ""
return route([base, base + "/<path:_ignored_path>"], type="http", auth="user", combine=True)(f)
#----------------------------------------------------------
# Controller and route registration
#----------------------------------------------------------
addons_module = {}
addons_manifest = {}
controllers_per_module = collections.defaultdict(list)
class ControllerType(type):
def __init__(cls, name, bases, attrs):
super(ControllerType, cls).__init__(name, bases, attrs)
# flag old-style methods with req as first argument
for k, v in attrs.items():
if inspect.isfunction(v) and hasattr(v, 'original_func'):
# Set routing type on original functions
routing_type = v.routing.get('type')
parent = [claz for claz in bases if isinstance(claz, ControllerType) and hasattr(claz, k)]
parent_routing_type = getattr(parent[0], k).original_func.routing_type if parent else routing_type or 'http'
if routing_type is not None and routing_type is not parent_routing_type:
routing_type = parent_routing_type
_logger.warn("Subclass re-defines <function %s.%s.%s> with different type than original."
" Will use original type: %r" % (cls.__module__, cls.__name__, k, parent_routing_type))
v.original_func.routing_type = routing_type or parent_routing_type
spec = inspect.getargspec(v.original_func)
first_arg = spec.args[1] if len(spec.args) >= 2 else None
if first_arg in ["req", "request"]:
v._first_arg_is_req = True
# store the controller in the controllers list
name_class = ("%s.%s" % (cls.__module__, cls.__name__), cls)
class_path = name_class[0].split(".")
if not class_path[:2] == ["openerp", "addons"]:
module = ""
else:
# we want to know all modules that have controllers
module = class_path[2]
# but we only store controllers directly inheriting from Controller
if not "Controller" in globals() or not Controller in bases:
return
controllers_per_module[module].append(name_class)
class Controller(object):
__metaclass__ = ControllerType
class EndPoint(object):
def __init__(self, method, routing):
self.method = method
self.original = getattr(method, 'original_func', method)
self.routing = routing
self.arguments = {}
@property
def first_arg_is_req(self):
# Backward for 7.0
return getattr(self.method, '_first_arg_is_req', False)
def __call__(self, *args, **kw):
return self.method(*args, **kw)
def routing_map(modules, nodb_only, converters=None):
routing_map = werkzeug.routing.Map(strict_slashes=False, converters=converters)
def get_subclasses(klass):
def valid(c):
return c.__module__.startswith('openerp.addons.') and c.__module__.split(".")[2] in modules
subclasses = klass.__subclasses__()
result = []
for subclass in subclasses:
if valid(subclass):
result.extend(get_subclasses(subclass))
if not result and valid(klass):
result = [klass]
return result
uniq = lambda it: collections.OrderedDict((id(x), x) for x in it).values()
for module in modules:
if module not in controllers_per_module:
continue
for _, cls in controllers_per_module[module]:
subclasses = uniq(c for c in get_subclasses(cls) if c is not cls)
if subclasses:
name = "%s (extended by %s)" % (cls.__name__, ', '.join(sub.__name__ for sub in subclasses))
cls = type(name, tuple(reversed(subclasses)), {})
o = cls()
members = inspect.getmembers(o, inspect.ismethod)
for _, mv in members:
if hasattr(mv, 'routing'):
routing = dict(type='http', auth='user', methods=None, routes=None)
methods_done = list()
# update routing attributes from subclasses(auth, methods...)
for claz in reversed(mv.im_class.mro()):
fn = getattr(claz, mv.func_name, None)
if fn and hasattr(fn, 'routing') and fn not in methods_done:
methods_done.append(fn)
routing.update(fn.routing)
if not nodb_only or routing['auth'] == "none":
assert routing['routes'], "Method %r has not route defined" % mv
endpoint = EndPoint(mv, routing)
for url in routing['routes']:
if routing.get("combine", False):
# deprecated v7 declaration
url = o._cp_path.rstrip('/') + '/' + url.lstrip('/')
if url.endswith("/") and len(url) > 1:
url = url[: -1]
xtra_keys = 'defaults subdomain build_only strict_slashes redirect_to alias host'.split()
kw = {k: routing[k] for k in xtra_keys if k in routing}
routing_map.add(werkzeug.routing.Rule(url, endpoint=endpoint, methods=routing['methods'], **kw))
return routing_map
#----------------------------------------------------------
# HTTP Sessions
#----------------------------------------------------------
class AuthenticationError(Exception):
pass
class SessionExpiredException(Exception):
pass
class Service(object):
"""
.. deprecated:: 8.0
Use :func:`dispatch_rpc` instead.
"""
def __init__(self, session, service_name):
self.session = session
self.service_name = service_name
def __getattr__(self, method):
def proxy_method(*args):
result = dispatch_rpc(self.service_name, method, args)
return result
return proxy_method
class Model(object):
"""
.. deprecated:: 8.0
Use the registry and cursor in :data:`request` instead.
"""
def __init__(self, session, model):
self.session = session
self.model = model
self.proxy = self.session.proxy('object')
def __getattr__(self, method):
self.session.assert_valid()
def proxy(*args, **kw):
# Can't provide any retro-compatibility for this case, so we check it and raise an Exception
# to tell the programmer to adapt his code
if not request.db or not request.uid or self.session.db != request.db \
or self.session.uid != request.uid:
raise Exception("Trying to use Model with badly configured database or user.")
if method.startswith('_'):
raise Exception("Access denied")
mod = request.registry[self.model]
meth = getattr(mod, method)
# make sure to instantiate an environment
cr = request.env.cr
result = meth(cr, request.uid, *args, **kw)
# reorder read
if method == "read":
if isinstance(result, list) and len(result) > 0 and "id" in result[0]:
index = {}
for r in result:
index[r['id']] = r
result = [index[x] for x in args[0] if x in index]
return result
return proxy
class OpenERPSession(werkzeug.contrib.sessions.Session):
def __init__(self, *args, **kwargs):
self.inited = False
self.modified = False
self.rotate = False
super(OpenERPSession, self).__init__(*args, **kwargs)
self.inited = True
self._default_values()
self.modified = False
def __getattr__(self, attr):
return self.get(attr, None)
def __setattr__(self, k, v):
if getattr(self, "inited", False):
try:
object.__getattribute__(self, k)
except:
return self.__setitem__(k, v)
object.__setattr__(self, k, v)
def authenticate(self, db, login=None, password=None, uid=None):
"""
Authenticate the current user with the given db, login and
password. If successful, store the authentication parameters in the
current session and request.
:param uid: If not None, that user id will be used instead the login
to authenticate the user.
"""
if uid is None:
wsgienv = request.httprequest.environ
env = dict(
base_location=request.httprequest.url_root.rstrip('/'),
HTTP_HOST=wsgienv['HTTP_HOST'],
REMOTE_ADDR=wsgienv['REMOTE_ADDR'],
)
uid = dispatch_rpc('common', 'authenticate', [db, login, password, env])
else:
security.check(db, uid, password)
self.db = db
self.uid = uid
self.login = login
self.password = password
request.uid = uid
request.disable_db = False
if uid: self.get_context()
return uid
def check_security(self):
"""
Check the current authentication parameters to know if those are still
valid. This method should be called at each request. If the
authentication fails, a :exc:`SessionExpiredException` is raised.
"""
if not self.db or not self.uid:
raise SessionExpiredException("Session expired")
security.check(self.db, self.uid, self.password)
def logout(self, keep_db=False):
for k in self.keys():
if not (keep_db and k == 'db'):
del self[k]
self._default_values()
self.rotate = True
def _default_values(self):
self.setdefault("db", None)
self.setdefault("uid", None)
self.setdefault("login", None)
self.setdefault("password", None)
self.setdefault("context", {})
def get_context(self):
"""
Re-initializes the current user's session context (based on his
preferences) by calling res.users.get_context() with the old context.
:returns: the new context
"""
assert self.uid, "The user needs to be logged-in to initialize his context"
self.context = request.registry.get('res.users').context_get(request.cr, request.uid) or {}
self.context['uid'] = self.uid
self._fix_lang(self.context)
return self.context
def _fix_lang(self, context):
""" OpenERP provides languages which may not make sense and/or may not
be understood by the web client's libraries.
Fix those here.
:param dict context: context to fix
"""
lang = context.get('lang')
# inane OpenERP locale
if lang == 'ar_AR':
lang = 'ar'
# lang to lang_REGION (datejs only handles lang_REGION, no bare langs)
if lang in babel.core.LOCALE_ALIASES:
lang = babel.core.LOCALE_ALIASES[lang]
context['lang'] = lang or 'en_US'
# Deprecated to be removed in 9
"""
Damn properties for retro-compatibility. All of that is deprecated,
all of that.
"""
@property
def _db(self):
return self.db
@_db.setter
def _db(self, value):
self.db = value
@property
def _uid(self):
return self.uid
@_uid.setter
def _uid(self, value):
self.uid = value
@property
def _login(self):
return self.login
@_login.setter
def _login(self, value):
self.login = value
@property
def _password(self):
return self.password
@_password.setter
def _password(self, value):
self.password = value
def send(self, service_name, method, *args):
"""
.. deprecated:: 8.0
Use :func:`dispatch_rpc` instead.
"""
return dispatch_rpc(service_name, method, args)
def proxy(self, service):
"""
.. deprecated:: 8.0
Use :func:`dispatch_rpc` instead.
"""
return Service(self, service)
def assert_valid(self, force=False):
"""
.. deprecated:: 8.0
Use :meth:`check_security` instead.
Ensures this session is valid (logged into the openerp server)
"""
if self.uid and not force:
return
# TODO use authenticate instead of login
self.uid = self.proxy("common").login(self.db, self.login, self.password)
if not self.uid:
raise AuthenticationError("Authentication failure")
def ensure_valid(self):
"""
.. deprecated:: 8.0
Use :meth:`check_security` instead.
"""
if self.uid:
try:
self.assert_valid(True)
except Exception:
self.uid = None
def execute(self, model, func, *l, **d):
"""
.. deprecated:: 8.0
Use the registry and cursor in :data:`request` instead.
"""
model = self.model(model)
r = getattr(model, func)(*l, **d)
return r
def exec_workflow(self, model, id, signal):
"""
.. deprecated:: 8.0
Use the registry and cursor in :data:`request` instead.
"""
self.assert_valid()
r = self.proxy('object').exec_workflow(self.db, self.uid, self.password, model, signal, id)
return r
def model(self, model):
"""
.. deprecated:: 8.0
Use the registry and cursor in :data:`request` instead.
Get an RPC proxy for the object ``model``, bound to this session.
:param model: an OpenERP model name
:type model: str
:rtype: a model object
"""
if not self.db:
raise SessionExpiredException("Session expired")
return Model(self, model)
def save_action(self, action):
"""
This method store an action object in the session and returns an integer
identifying that action. The method get_action() can be used to get
back the action.
:param the_action: The action to save in the session.
:type the_action: anything
:return: A key identifying the saved action.
:rtype: integer
"""
saved_actions = self.setdefault('saved_actions', {"next": 1, "actions": {}})
# we don't allow more than 10 stored actions
if len(saved_actions["actions"]) >= 10:
del saved_actions["actions"][min(saved_actions["actions"])]
key = saved_actions["next"]
saved_actions["actions"][key] = action
saved_actions["next"] = key + 1
self.modified = True
return key
def get_action(self, key):
"""
Gets back a previously saved action. This method can return None if the action
was saved since too much time (this case should be handled in a smart way).
:param key: The key given by save_action()
:type key: integer
:return: The saved action or None.
:rtype: anything
"""
saved_actions = self.get('saved_actions', {})
return saved_actions.get("actions", {}).get(key)
def session_gc(session_store):
if random.random() < 0.001:
# we keep session one week
last_week = time.time() - 60*60*24*7
for fname in os.listdir(session_store.path):
path = os.path.join(session_store.path, fname)
try:
if os.path.getmtime(path) < last_week:
os.unlink(path)
except OSError:
pass
#----------------------------------------------------------
# WSGI Layer
#----------------------------------------------------------
# Add potentially missing (older ubuntu) font mime types
mimetypes.add_type('application/font-woff', '.woff')
mimetypes.add_type('application/vnd.ms-fontobject', '.eot')
mimetypes.add_type('application/x-font-ttf', '.ttf')
class Response(werkzeug.wrappers.Response):
""" Response object passed through controller route chain.
In addition to the :class:`werkzeug.wrappers.Response` parameters, this
class's constructor can take the following additional parameters
for QWeb Lazy Rendering.
:param basestring template: template to render
:param dict qcontext: Rendering context to use
:param int uid: User id to use for the ir.ui.view render call,
``None`` to use the request's user (the default)
these attributes are available as parameters on the Response object and
can be altered at any time before rendering
Also exposes all the attributes and methods of
:class:`werkzeug.wrappers.Response`.
"""
default_mimetype = 'text/html'
def __init__(self, *args, **kw):
template = kw.pop('template', None)
qcontext = kw.pop('qcontext', None)
uid = kw.pop('uid', None)
super(Response, self).__init__(*args, **kw)
self.set_default(template, qcontext, uid)
def set_default(self, template=None, qcontext=None, uid=None):
self.template = template
self.qcontext = qcontext or dict()
self.uid = uid
# Support for Cross-Origin Resource Sharing
if request.endpoint and 'cors' in request.endpoint.routing:
self.headers.set('Access-Control-Allow-Origin', request.endpoint.routing['cors'])
methods = 'GET, POST'
if request.endpoint.routing['type'] == 'json':
methods = 'POST'
elif request.endpoint.routing.get('methods'):
methods = ', '.join(request.endpoint.routing['methods'])
self.headers.set('Access-Control-Allow-Methods', methods)
@property
def is_qweb(self):
return self.template is not None
def render(self):
""" Renders the Response's template, returns the result
"""
view_obj = request.registry["ir.ui.view"]
uid = self.uid or request.uid or openerp.SUPERUSER_ID
return view_obj.render(
request.cr, uid, self.template, self.qcontext,
context=request.context)
def flatten(self):
""" Forces the rendering of the response's template, sets the result
as response body and unsets :attr:`.template`
"""
self.response.append(self.render())
self.template = None
class DisableCacheMiddleware(object):
def __init__(self, app):
self.app = app
def __call__(self, environ, start_response):
def start_wrapped(status, headers):
referer = environ.get('HTTP_REFERER', '')
parsed = urlparse.urlparse(referer)
debug = parsed.query.count('debug') >= 1
new_headers = []
unwanted_keys = ['Last-Modified']
if debug:
new_headers = [('Cache-Control', 'no-cache')]
unwanted_keys += ['Expires', 'Etag', 'Cache-Control']
for k, v in headers:
if k not in unwanted_keys:
new_headers.append((k, v))
start_response(status, new_headers)
return self.app(environ, start_wrapped)
class Root(object):
"""Root WSGI application for the OpenERP Web Client.
"""
def __init__(self):
self._loaded = False
@lazy_property
def session_store(self):
# Setup http sessions
path = openerp.tools.config.session_dir
_logger.debug('HTTP sessions stored in: %s', path)
return werkzeug.contrib.sessions.FilesystemSessionStore(path, session_class=OpenERPSession)
@lazy_property
def nodb_routing_map(self):
_logger.info("Generating nondb routing")
return routing_map([''] + openerp.conf.server_wide_modules, True)
def __call__(self, environ, start_response):
""" Handle a WSGI request
"""
if not self._loaded:
self._loaded = True
self.load_addons()
return self.dispatch(environ, start_response)
def load_addons(self):
""" Load all addons from addons path containing static files and
controllers and configure them. """
# TODO should we move this to ir.http so that only configured modules are served ?
statics = {}
for addons_path in openerp.modules.module.ad_paths:
for module in sorted(os.listdir(str(addons_path))):
if module not in addons_module:
manifest_path = os.path.join(addons_path, module, '__openerp__.py')
path_static = os.path.join(addons_path, module, 'static')
if os.path.isfile(manifest_path) and os.path.isdir(path_static):
manifest = ast.literal_eval(open(manifest_path).read())
if not manifest.get('installable', True):
continue
manifest['addons_path'] = addons_path
_logger.debug("Loading %s", module)
if 'openerp.addons' in sys.modules:
m = __import__('openerp.addons.' + module)
else:
m = None
addons_module[module] = m
addons_manifest[module] = manifest
statics['/%s/static' % module] = path_static
if statics:
_logger.info("HTTP Configuring static files")
app = werkzeug.wsgi.SharedDataMiddleware(self.dispatch, statics, cache_timeout=STATIC_CACHE)
self.dispatch = DisableCacheMiddleware(app)
def setup_session(self, httprequest):
# recover or create session
session_gc(self.session_store)
sid = httprequest.args.get('session_id')
explicit_session = True
if not sid:
sid = httprequest.headers.get("X-Openerp-Session-Id")
if not sid:
sid = httprequest.cookies.get('session_id')
explicit_session = False
if sid is None:
httprequest.session = self.session_store.new()
else:
httprequest.session = self.session_store.get(sid)
return explicit_session
def setup_db(self, httprequest):
db = httprequest.session.db
# Check if session.db is legit
if db:
if db not in db_filter([db], httprequest=httprequest):
_logger.warn("Logged into database '%s', but dbfilter "
"rejects it; logging session out.", db)
httprequest.session.logout()
db = None
if not db:
httprequest.session.db = db_monodb(httprequest)
def setup_lang(self, httprequest):
if not "lang" in httprequest.session.context:
lang = httprequest.accept_languages.best or "en_US"
lang = babel.core.LOCALE_ALIASES.get(lang, lang).replace('-', '_')
httprequest.session.context["lang"] = lang
def get_request(self, httprequest):
# deduce type of request
if httprequest.args.get('jsonp'):
return JsonRequest(httprequest)
if httprequest.mimetype in ("application/json", "application/json-rpc"):
return JsonRequest(httprequest)
else:
return HttpRequest(httprequest)
def get_response(self, httprequest, result, explicit_session):
if isinstance(result, Response) and result.is_qweb:
try:
result.flatten()
except(Exception), e:
if request.db:
result = request.registry['ir.http']._handle_exception(e)
else:
raise
if isinstance(result, basestring):
response = Response(result, mimetype='text/html')
else:
response = result
if httprequest.session.should_save:
if httprequest.session.rotate:
self.session_store.delete(httprequest.session)
httprequest.session.sid = self.session_store.generate_key()
httprequest.session.modified = True
self.session_store.save(httprequest.session)
# We must not set the cookie if the session id was specified using a http header or a GET parameter.
# There are two reasons to this:
# - When using one of those two means we consider that we are overriding the cookie, which means creating a new
# session on top of an already existing session and we don't want to create a mess with the 'normal' session
# (the one using the cookie). That is a special feature of the Session Javascript class.
# - It could allow session fixation attacks.
if not explicit_session and hasattr(response, 'set_cookie'):
response.set_cookie('session_id', httprequest.session.sid, max_age=90 * 24 * 60 * 60)
return response
def dispatch(self, environ, start_response):
"""
Performs the actual WSGI dispatching for the application.
"""
try:
httprequest = werkzeug.wrappers.Request(environ)
httprequest.app = self
explicit_session = self.setup_session(httprequest)
self.setup_db(httprequest)
self.setup_lang(httprequest)
request = self.get_request(httprequest)
def _dispatch_nodb():
try:
func, arguments = self.nodb_routing_map.bind_to_environ(request.httprequest.environ).match()
except werkzeug.exceptions.HTTPException, e:
return request._handle_exception(e)
request.set_handler(func, arguments, "none")
result = request.dispatch()
return result
with request:
db = request.session.db
if db:
openerp.modules.registry.RegistryManager.check_registry_signaling(db)
try:
with openerp.tools.mute_logger('openerp.sql_db'):
ir_http = request.registry['ir.http']
except (AttributeError, psycopg2.OperationalError):
# psycopg2 error or attribute error while constructing
# the registry. That means the database probably does
# not exists anymore or the code doesnt match the db.
# Log the user out and fall back to nodb
request.session.logout()
result = _dispatch_nodb()
else:
result = ir_http._dispatch()
openerp.modules.registry.RegistryManager.signal_caches_change(db)
else:
result = _dispatch_nodb()
response = self.get_response(httprequest, result, explicit_session)
return response(environ, start_response)
except werkzeug.exceptions.HTTPException, e:
return e(environ, start_response)
def get_db_router(self, db):
if not db:
return self.nodb_routing_map
return request.registry['ir.http'].routing_map()
def db_list(force=False, httprequest=None):
dbs = dispatch_rpc("db", "list", [force])
return db_filter(dbs, httprequest=httprequest)
def db_filter(dbs, httprequest=None):
httprequest = httprequest or request.httprequest
h = httprequest.environ.get('HTTP_HOST', '').split(':')[0]
d, _, r = h.partition('.')
if d == "www" and r:
d = r.partition('.')[0]
r = openerp.tools.config['dbfilter'].replace('%h', h).replace('%d', d)
dbs = [i for i in dbs if re.match(r, i)]
return dbs
def db_monodb(httprequest=None):
"""
Magic function to find the current database.
Implementation details:
* Magic
* More magic
Returns ``None`` if the magic is not magic enough.
"""
httprequest = httprequest or request.httprequest
dbs = db_list(True, httprequest)
# try the db already in the session
db_session = httprequest.session.db
if db_session in dbs:
return db_session
# if there is only one possible db, we take that one
if len(dbs) == 1:
return dbs[0]
return None
def send_file(filepath_or_fp, mimetype=None, as_attachment=False, filename=None, mtime=None,
add_etags=True, cache_timeout=STATIC_CACHE, conditional=True):
"""This is a modified version of Flask's send_file()
Sends the contents of a file to the client. This will use the
most efficient method available and configured. By default it will
try to use the WSGI server's file_wrapper support.
By default it will try to guess the mimetype for you, but you can
also explicitly provide one. For extra security you probably want
to send certain files as attachment (HTML for instance). The mimetype
guessing requires a `filename` or an `attachment_filename` to be
provided.
Please never pass filenames to this function from user sources without
checking them first.
:param filepath_or_fp: the filename of the file to send.
Alternatively a file object might be provided
in which case `X-Sendfile` might not work and
fall back to the traditional method. Make sure
that the file pointer is positioned at the start
of data to send before calling :func:`send_file`.
:param mimetype: the mimetype of the file if provided, otherwise
auto detection happens.
:param as_attachment: set to `True` if you want to send this file with
a ``Content-Disposition: attachment`` header.
:param filename: the filename for the attachment if it differs from the file's filename or
if using file object without 'name' attribute (eg: E-tags with StringIO).
:param mtime: last modification time to use for contitional response.
:param add_etags: set to `False` to disable attaching of etags.
:param conditional: set to `False` to disable conditional responses.
:param cache_timeout: the timeout in seconds for the headers.
"""
if isinstance(filepath_or_fp, (str, unicode)):
if not filename:
filename = os.path.basename(filepath_or_fp)
file = open(filepath_or_fp, 'rb')
if not mtime:
mtime = os.path.getmtime(filepath_or_fp)
else:
file = filepath_or_fp
if not filename:
filename = getattr(file, 'name', None)
file.seek(0, 2)
size = file.tell()
file.seek(0)
if mimetype is None and filename:
mimetype = mimetypes.guess_type(filename)[0]
if mimetype is None:
mimetype = 'application/octet-stream'
headers = werkzeug.datastructures.Headers()
if as_attachment:
if filename is None:
raise TypeError('filename unavailable, required for sending as attachment')
headers.add('Content-Disposition', 'attachment', filename=filename)
headers['Content-Length'] = size
data = wrap_file(request.httprequest.environ, file)
rv = Response(data, mimetype=mimetype, headers=headers,
direct_passthrough=True)
if isinstance(mtime, str):
try:
server_format = openerp.tools.misc.DEFAULT_SERVER_DATETIME_FORMAT
mtime = datetime.datetime.strptime(mtime.split('.')[0], server_format)
except Exception:
mtime = None
if mtime is not None:
rv.last_modified = mtime
rv.cache_control.public = True
if cache_timeout:
rv.cache_control.max_age = cache_timeout
rv.expires = int(time.time() + cache_timeout)
if add_etags and filename and mtime:
rv.set_etag('odoo-%s-%s-%s' % (
mtime,
size,
adler32(
filename.encode('utf-8') if isinstance(filename, unicode)
else filename
) & 0xffffffff
))
if conditional:
rv = rv.make_conditional(request.httprequest)
# make sure we don't send x-sendfile for servers that
# ignore the 304 status code for x-sendfile.
if rv.status_code == 304:
rv.headers.pop('x-sendfile', None)
return rv
#----------------------------------------------------------
# RPC controller
#----------------------------------------------------------
class CommonController(Controller):
@route('/jsonrpc', type='json', auth="none")
def jsonrpc(self, service, method, args):
""" Method used by client APIs to contact OpenERP. """
return dispatch_rpc(service, method, args)
@route('/gen_session_id', type='json', auth="none")
def gen_session_id(self):
nsession = root.session_store.new()
return nsession.sid
# register main wsgi handler
root = Root()
openerp.service.wsgi_server.register_wsgi_handler(root)
# vim:et:ts=4:sw=4:
|
DavidEGrayson/MSYS2-pacman | refs/heads/master | test/pacman/tests/sync200.py | 10 | self.description = "Synchronize the local database"
self.option['XferCommand'] = ['/usr/bin/curl %u > %o']
sp1 = pmpkg("spkg1", "1.0-1")
sp1.depends = ["spkg2"]
sp2 = pmpkg("spkg2", "2.0-1")
sp2.depends = ["spkg3"]
sp3 = pmpkg("spkg3", "3.0-1")
sp3.depends = ["spkg1"]
for sp in sp1, sp2, sp3:
self.addpkg2db("sync", sp)
self.args = "-Sy"
self.addrule("PACMAN_RETCODE=0")
|
waddedMeat/asteroids-ish | refs/heads/master | Asteroids/Asteroid.py | 1 | __author__ = 'jmoran'
from Asteroids import MovingObject
from Asteroids.Colors import *
from random import getrandbits, randint
from math import sin, cos, radians
class Asteroid(MovingObject):
def __init__(self, window, game, init_point, size):
slope = (1 if getrandbits(1) else -1, 1 if getrandbits(1) else -1)
MovingObject.__init__(self, window, game, init_point, slope)
self.size = size
x, y = self.point
self.points = []
for d in range(0, 360, 30):
r = randint(int(size/2), size)
p = (x + (r * cos(radians(d))), y + (r * sin(radians(d))))
self.points.append(p)
def draw(self):
x, y = self.point
rx, ry = self.slope
x = int(x - rx)
y = int(y - ry)
if x > self.wWidth:
x -= self.wWidth
self.points = tuple((x1-self.wWidth, y1-ry) for (x1, y1) in self.points)
elif x < 0:
x += self.wWidth
self.points = tuple((x1+self.wWidth, y1-ry) for (x1, y1) in self.points)
elif y > self.wHeight:
y -= self.wHeight
self.points = tuple((x1, y1-self.wHeight) for (x1, y1) in self.points)
elif y < 0:
y += self.wHeight
self.points = tuple((x1, y1+self.wHeight) for (x1, y1) in self.points)
else:
self.points = tuple((x1-rx, y1-ry) for (x1, y1) in self.points)
self.point = (x, y)
self.gObj = self.game.draw.aalines(self.window, WHITE, True, self.points, 1)
def do_collision(self, obj):
MovingObject.do_collision(self, obj)
size = int(self.size / 1.5)
if size > 15:
return [Asteroid(self.window, self.game, tuple(p + 2 for p in self.point), size),
Asteroid(self.window, self.game, self.point, size),
Asteroid(self.window, self.game, tuple(p - 2 for p in self.point), size)]
else:
return []
|
unnikrishnankgs/va | refs/heads/master | venv/lib/python3.5/site-packages/matplotlib/tests/test_labeled_data_unpacking.py | 5 | from __future__ import (absolute_import, division, print_function)
from nose.tools import (assert_raises, assert_equal)
from nose.plugins.skip import SkipTest
try:
# 3.2+ versions
from nose.tools import assert_regex, assert_not_regex
except ImportError:
# 2.7 versions
from nose.tools import assert_regexp_matches, assert_not_regexp_matches
assert_regex = assert_regexp_matches
assert_not_regex = assert_not_regexp_matches
from ..testing import assert_produces_warning
from .. import unpack_labeled_data
# Notes on testing the plotting functions itself
# * the individual decorated plotting functions are tested in 'test_axes.py'
# * that pyplot functions accept a data kwarg is only tested in
# test_axes.test_pie_linewidth_0
# these two get used in multiple tests, so define them here
@unpack_labeled_data(replace_names=["x", "y"], label_namer="y")
def plot_func(ax, x, y, ls="x", label=None, w="xyz"):
return ("x: %s, y: %s, ls: %s, w: %s, label: %s" % (
list(x), list(y), ls, w, label))
@unpack_labeled_data(replace_names=["x", "y"], label_namer="y",
positional_parameter_names=["x", "y", "ls", "label", "w"])
def plot_func_varags(ax, *args, **kwargs):
all_args = [None, None, "x", None, "xyz"]
for i, v in enumerate(args):
all_args[i] = v
for i, k in enumerate(["x", "y", "ls", "label", "w"]):
if k in kwargs:
all_args[i] = kwargs[k]
x, y, ls, label, w = all_args
return ("x: %s, y: %s, ls: %s, w: %s, label: %s" % (
list(x), list(y), ls, w, label))
all_funcs = [plot_func, plot_func_varags]
def test_compiletime_checks():
"""test decorator invocations -> no replacements"""
def func(ax, x, y): pass
def func_args(ax, x, y, *args): pass
def func_kwargs(ax, x, y, **kwargs): pass
def func_no_ax_args(*args, **kwargs): pass
# this is ok
unpack_labeled_data(replace_names=["x", "y"])(func)
unpack_labeled_data(replace_names=["x", "y"])(func_kwargs)
# this has "enough" information to do all the replaces
unpack_labeled_data(replace_names=["x", "y"])(func_args)
# no positional_parameter_names but needed due to replaces
def f():
# z is unknown
unpack_labeled_data(replace_names=["x", "y", "z"])(func_args)
assert_raises(AssertionError, f)
def f():
unpack_labeled_data(replace_names=["x", "y"])(func_no_ax_args)
assert_raises(AssertionError, f)
# no replacements at all -> all ok...
unpack_labeled_data(replace_names=[], label_namer=None)(func)
unpack_labeled_data(replace_names=[], label_namer=None)(func_args)
unpack_labeled_data(replace_names=[], label_namer=None)(func_kwargs)
unpack_labeled_data(replace_names=[], label_namer=None)(func_no_ax_args)
# label namer is unknown
def f():
unpack_labeled_data(label_namer="z")(func)
assert_raises(AssertionError, f)
def f():
unpack_labeled_data(label_namer="z")(func_args)
assert_raises(AssertionError, f)
# but "ok-ish", if func has kwargs -> will show up at runtime :-(
unpack_labeled_data(label_namer="z")(func_kwargs)
unpack_labeled_data(label_namer="z")(func_no_ax_args)
def test_label_problems_at_runtime():
"""Tests for behaviour which would actually be nice to get rid of."""
@unpack_labeled_data(label_namer="z")
def func(*args, **kwargs):
pass
def f():
func(None, x="a", y="b")
# This is a programming mistake: the parameter which should add the
# label is not present in the function call. Unfortunately this was masked
# due to the **kwargs useage
# This would be nice to handle as a compiletime check (see above...)
with assert_produces_warning(RuntimeWarning):
f()
def real_func(x, y):
pass
@unpack_labeled_data(label_namer="x")
def func(*args, **kwargs):
real_func(**kwargs)
def f():
func(None, x="a", y="b")
# This sets a label although the function can't handle it.
assert_raises(TypeError, f)
def test_function_call_without_data():
"""test without data -> no replacements"""
for func in all_funcs:
assert_equal(func(None, "x", "y"),
"x: ['x'], y: ['y'], ls: x, w: xyz, label: None")
assert_equal(func(None, x="x", y="y"),
"x: ['x'], y: ['y'], ls: x, w: xyz, label: None")
assert_equal(func(None, "x", "y", label=""),
"x: ['x'], y: ['y'], ls: x, w: xyz, label: ")
assert_equal(func(None, "x", "y", label="text"),
"x: ['x'], y: ['y'], ls: x, w: xyz, label: text")
assert_equal(func(None, x="x", y="y", label=""),
"x: ['x'], y: ['y'], ls: x, w: xyz, label: ")
assert_equal(func(None, x="x", y="y", label="text"),
"x: ['x'], y: ['y'], ls: x, w: xyz, label: text")
def test_function_call_with_dict_data():
"""Test with dict data -> label comes from the value of 'x' parameter """
data = {"a": [1, 2], "b": [8, 9], "w": "NOT"}
for func in all_funcs:
assert_equal(func(None, "a", "b", data=data),
"x: [1, 2], y: [8, 9], ls: x, w: xyz, label: b")
assert_equal(func(None, x="a", y="b", data=data),
"x: [1, 2], y: [8, 9], ls: x, w: xyz, label: b")
assert_equal(func(None, "a", "b", label="", data=data),
"x: [1, 2], y: [8, 9], ls: x, w: xyz, label: ")
assert_equal(func(None, "a", "b", label="text", data=data),
"x: [1, 2], y: [8, 9], ls: x, w: xyz, label: text")
assert_equal(func(None, x="a", y="b", label="", data=data),
"x: [1, 2], y: [8, 9], ls: x, w: xyz, label: ")
assert_equal(func(None, x="a", y="b", label="text", data=data),
"x: [1, 2], y: [8, 9], ls: x, w: xyz, label: text")
def test_function_call_with_dict_data_not_in_data():
"test for the case that one var is not in data -> half replaces, half kept"
data = {"a": [1, 2], "w": "NOT"}
for func in all_funcs:
assert_equal(func(None, "a", "b", data=data),
"x: [1, 2], y: ['b'], ls: x, w: xyz, label: b")
assert_equal(func(None, x="a", y="b", data=data),
"x: [1, 2], y: ['b'], ls: x, w: xyz, label: b")
assert_equal(func(None, "a", "b", label="", data=data),
"x: [1, 2], y: ['b'], ls: x, w: xyz, label: ")
assert_equal(func(None, "a", "b", label="text", data=data),
"x: [1, 2], y: ['b'], ls: x, w: xyz, label: text")
assert_equal(func(None, x="a", y="b", label="", data=data),
"x: [1, 2], y: ['b'], ls: x, w: xyz, label: ")
assert_equal(func(None, x="a", y="b", label="text", data=data),
"x: [1, 2], y: ['b'], ls: x, w: xyz, label: text")
def test_function_call_with_pandas_data():
"""test with pandas dataframe -> label comes from data["col"].name """
try:
import pandas as pd
except ImportError:
raise SkipTest("Pandas not installed")
data = pd.DataFrame({"a": [1, 2], "b": [8, 9], "w": ["NOT", "NOT"]})
for func in all_funcs:
assert_equal(func(None, "a", "b", data=data),
"x: [1, 2], y: [8, 9], ls: x, w: xyz, label: b")
assert_equal(func(None, x="a", y="b", data=data),
"x: [1, 2], y: [8, 9], ls: x, w: xyz, label: b")
assert_equal(func(None, "a", "b", label="", data=data),
"x: [1, 2], y: [8, 9], ls: x, w: xyz, label: ")
assert_equal(func(None, "a", "b", label="text", data=data),
"x: [1, 2], y: [8, 9], ls: x, w: xyz, label: text")
assert_equal(func(None, x="a", y="b", label="", data=data),
"x: [1, 2], y: [8, 9], ls: x, w: xyz, label: ")
assert_equal(func(None, x="a", y="b", label="text", data=data),
"x: [1, 2], y: [8, 9], ls: x, w: xyz, label: text")
def test_function_call_replace_all():
"""Test without a "replace_names" argument, all vars should be replaced"""
data = {"a": [1, 2], "b": [8, 9], "x": "xyz"}
@unpack_labeled_data(label_namer="y")
def func_replace_all(ax, x, y, ls="x", label=None, w="NOT"):
return "x: %s, y: %s, ls: %s, w: %s, label: %s" % (
list(x), list(y), ls, w, label)
assert_equal(func_replace_all(None, "a", "b", w="x", data=data),
"x: [1, 2], y: [8, 9], ls: x, w: xyz, label: b")
assert_equal(func_replace_all(None, x="a", y="b", w="x", data=data),
"x: [1, 2], y: [8, 9], ls: x, w: xyz, label: b")
assert_equal(func_replace_all(None, "a", "b", w="x", label="", data=data),
"x: [1, 2], y: [8, 9], ls: x, w: xyz, label: ")
assert_equal(
func_replace_all(None, "a", "b", w="x", label="text", data=data),
"x: [1, 2], y: [8, 9], ls: x, w: xyz, label: text")
assert_equal(
func_replace_all(None, x="a", y="b", w="x", label="", data=data),
"x: [1, 2], y: [8, 9], ls: x, w: xyz, label: ")
assert_equal(
func_replace_all(None, x="a", y="b", w="x", label="text", data=data),
"x: [1, 2], y: [8, 9], ls: x, w: xyz, label: text")
@unpack_labeled_data(label_namer="y")
def func_varags_replace_all(ax, *args, **kwargs):
all_args = [None, None, "x", None, "xyz"]
for i, v in enumerate(args):
all_args[i] = v
for i, k in enumerate(["x", "y", "ls", "label", "w"]):
if k in kwargs:
all_args[i] = kwargs[k]
x, y, ls, label, w = all_args
return "x: %s, y: %s, ls: %s, w: %s, label: %s" % (
list(x), list(y), ls, w, label)
# in the first case, we can't get a "y" argument,
# as we don't know the names of the *args
assert_equal(func_varags_replace_all(None, x="a", y="b", w="x", data=data),
"x: [1, 2], y: [8, 9], ls: x, w: xyz, label: b")
assert_equal(
func_varags_replace_all(None, "a", "b", w="x", label="", data=data),
"x: [1, 2], y: [8, 9], ls: x, w: xyz, label: ")
assert_equal(
func_varags_replace_all(None, "a", "b", w="x", label="text",
data=data),
"x: [1, 2], y: [8, 9], ls: x, w: xyz, label: text")
assert_equal(
func_varags_replace_all(None, x="a", y="b", w="x", label="",
data=data),
"x: [1, 2], y: [8, 9], ls: x, w: xyz, label: ")
assert_equal(
func_varags_replace_all(None, x="a", y="b", w="x", label="text",
data=data),
"x: [1, 2], y: [8, 9], ls: x, w: xyz, label: text")
with assert_produces_warning():
assert_equal(func_varags_replace_all(None, "a", "b", w="x", data=data),
"x: [1, 2], y: [8, 9], ls: x, w: xyz, label: None")
def test_no_label_replacements():
"""Test with "label_namer=None" -> no label replacement at all"""
@unpack_labeled_data(replace_names=["x", "y"], label_namer=None)
def func_no_label(ax, x, y, ls="x", label=None, w="xyz"):
return "x: %s, y: %s, ls: %s, w: %s, label: %s" % (
list(x), list(y), ls, w, label)
data = {"a": [1, 2], "b": [8, 9], "w": "NOT"}
assert_equal(func_no_label(None, "a", "b", data=data),
"x: [1, 2], y: [8, 9], ls: x, w: xyz, label: None")
assert_equal(func_no_label(None, x="a", y="b", data=data),
"x: [1, 2], y: [8, 9], ls: x, w: xyz, label: None")
assert_equal(func_no_label(None, "a", "b", label="", data=data),
"x: [1, 2], y: [8, 9], ls: x, w: xyz, label: ")
assert_equal(func_no_label(None, "a", "b", label="text", data=data),
"x: [1, 2], y: [8, 9], ls: x, w: xyz, label: text")
def test_more_args_than_pos_parameter():
@unpack_labeled_data(replace_names=["x", "y"], label_namer="y")
def func(ax, x, y, z=1):
pass
data = {"a": [1, 2], "b": [8, 9], "w": "NOT"}
def f():
func(None, "a", "b", "z", "z", data=data)
assert_raises(RuntimeError, f)
def test_function_call_with_replace_all_args():
"""Test with a "replace_all_args" argument, all *args should be replaced"""
data = {"a": [1, 2], "b": [8, 9], "x": "xyz"}
def funcy(ax, *args, **kwargs):
all_args = [None, None, "x", None, "NOT"]
for i, v in enumerate(args):
all_args[i] = v
for i, k in enumerate(["x", "y", "ls", "label", "w"]):
if k in kwargs:
all_args[i] = kwargs[k]
x, y, ls, label, w = all_args
return "x: %s, y: %s, ls: %s, w: %s, label: %s" % (
list(x), list(y), ls, w, label)
func = unpack_labeled_data(replace_all_args=True, replace_names=["w"],
label_namer="y")(funcy)
assert_equal(func(None, "a", "b", w="x", label="", data=data),
"x: [1, 2], y: [8, 9], ls: x, w: xyz, label: ")
assert_equal(func(None, "a", "b", w="x", label="text", data=data),
"x: [1, 2], y: [8, 9], ls: x, w: xyz, label: text")
func2 = unpack_labeled_data(replace_all_args=True, replace_names=["w"],
label_namer="y",
positional_parameter_names=["x", "y", "ls",
"label", "w"])(
funcy)
assert_equal(func2(None, "a", "b", w="x", data=data),
"x: [1, 2], y: [8, 9], ls: x, w: xyz, label: b")
assert_equal(func2(None, "a", "b", w="x", label="", data=data),
"x: [1, 2], y: [8, 9], ls: x, w: xyz, label: ")
assert_equal(func2(None, "a", "b", w="x", label="text", data=data),
"x: [1, 2], y: [8, 9], ls: x, w: xyz, label: text")
def test_docstring_addition():
@unpack_labeled_data()
def funcy(ax, *args, **kwargs):
"""Funcy does nothing"""
pass
assert_regex(funcy.__doc__,
r".*All positional and all keyword arguments\.")
assert_not_regex(funcy.__doc__, r".*All positional arguments\.")
assert_not_regex(funcy.__doc__,
r".*All arguments with the following names: .*")
@unpack_labeled_data(replace_all_args=True, replace_names=[])
def funcy(ax, x, y, z, bar=None):
"""Funcy does nothing"""
pass
assert_regex(funcy.__doc__, r".*All positional arguments\.")
assert_not_regex(funcy.__doc__,
r".*All positional and all keyword arguments\.")
assert_not_regex(funcy.__doc__,
r".*All arguments with the following names: .*")
@unpack_labeled_data(replace_all_args=True, replace_names=["bar"])
def funcy(ax, x, y, z, bar=None):
"""Funcy does nothing"""
pass
assert_regex(funcy.__doc__, r".*All positional arguments\.")
assert_regex(funcy.__doc__,
r".*All arguments with the following names: 'bar'\.")
assert_not_regex(funcy.__doc__,
r".*All positional and all keyword arguments\.")
@unpack_labeled_data(replace_names=["x", "bar"])
def funcy(ax, x, y, z, bar=None):
"""Funcy does nothing"""
pass
# lists can print in any order, so test for both x,bar and bar,x
assert_regex(funcy.__doc__,
r".*All arguments with the following names: '.*', '.*'\.")
assert_regex(funcy.__doc__, r".*'x'.*")
assert_regex(funcy.__doc__, r".*'bar'.*")
assert_not_regex(funcy.__doc__,
r".*All positional and all keyword arguments\.")
assert_not_regex(funcy.__doc__, r".*All positional arguments\.")
def test_positional_parameter_names_as_function():
# Also test the _plot_arg_replacer for plot...
from matplotlib.axes._axes import _plot_args_replacer
@unpack_labeled_data(replace_names=["x", "y"],
positional_parameter_names=_plot_args_replacer)
def funcy(ax, *args, **kwargs):
return "{args} | {kwargs}".format(args=args, kwargs=kwargs)
# the normal case...
data = {"x": "X", "y1": "Y"}
assert_equal(funcy(None, "x", "y1", data=data),
"('X', 'Y') | {}")
assert_equal(funcy(None, "x", "y1", "c", data=data),
"('X', 'Y', 'c') | {}")
# no arbitrary long args with data
def f():
assert_equal(funcy(None, "x", "y", "c", "x", "y", "x", "y", data=data),
"('X', 'Y', 'c', 'X', 'Y', 'X', 'Y') | {}")
assert_raises(ValueError, f)
# In the two arg case, if a valid color spec is in data, we warn but use
# it as data...
data = {"x": "X", "y": "Y", "ro": "!!"}
with assert_produces_warning(RuntimeWarning):
assert_equal(funcy(None, "y", "ro", data=data),
"('Y', '!!') | {}")
|
v-iam/azure-sdk-for-python | refs/heads/master | azure-mgmt-network/azure/mgmt/network/v2017_06_01/models/connectivity_issue.py | 2 | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class ConnectivityIssue(Model):
"""Information about an issue encountered in the process of checking for
connectivity.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar origin: The origin of the issue. Possible values include: 'Local',
'Inbound', 'Outbound'
:vartype origin: str or :class:`Origin
<azure.mgmt.network.v2017_06_01.models.Origin>`
:ivar severity: The severity of the issue. Possible values include:
'Error', 'Warning'
:vartype severity: str or :class:`Severity
<azure.mgmt.network.v2017_06_01.models.Severity>`
:ivar type: The type of issue. Possible values include: 'Unknown',
'AgentStopped', 'GuestFirewall', 'DnsResolution', 'SocketBind',
'NetworkSecurityRule', 'UserDefinedRoute', 'PortThrottled', 'Platform'
:vartype type: str or :class:`IssueType
<azure.mgmt.network.v2017_06_01.models.IssueType>`
:ivar context: Provides additional context on the issue.
:vartype context: list of dict
"""
_validation = {
'origin': {'readonly': True},
'severity': {'readonly': True},
'type': {'readonly': True},
'context': {'readonly': True},
}
_attribute_map = {
'origin': {'key': 'origin', 'type': 'str'},
'severity': {'key': 'severity', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'context': {'key': 'context', 'type': '[{str}]'},
}
def __init__(self):
self.origin = None
self.severity = None
self.type = None
self.context = None
|
DSLituiev/scikit-learn | refs/heads/master | sklearn/neural_network/_base.py | 29 | """Utilities for the neural network modules
"""
# Author: Issam H. Laradji <issam.laradji@gmail.com>
# Licence: BSD 3 clause
import numpy as np
from ..utils.fixes import expit as logistic_sigmoid
def identity(X):
"""Simply return the input array.
Parameters
----------
X : {array-like, sparse matrix}, shape (n_samples, n_features)
Data, where n_samples is the number of samples
and n_features is the number of features.
Returns
-------
X : {array-like, sparse matrix}, shape (n_samples, n_features)
Same as the input data.
"""
return X
def logistic(X):
"""Compute the logistic function inplace.
Parameters
----------
X : {array-like, sparse matrix}, shape (n_samples, n_features)
The input data.
Returns
-------
X_new : {array-like, sparse matrix}, shape (n_samples, n_features)
The transformed data.
"""
return logistic_sigmoid(X, out=X)
def tanh(X):
"""Compute the hyperbolic tan function inplace.
Parameters
----------
X : {array-like, sparse matrix}, shape (n_samples, n_features)
The input data.
Returns
-------
X_new : {array-like, sparse matrix}, shape (n_samples, n_features)
The transformed data.
"""
return np.tanh(X, out=X)
def relu(X):
"""Compute the rectified linear unit function inplace.
Parameters
----------
X : {array-like, sparse matrix}, shape (n_samples, n_features)
The input data.
Returns
-------
X_new : {array-like, sparse matrix}, shape (n_samples, n_features)
The transformed data.
"""
np.clip(X, 0, np.finfo(X.dtype).max, out=X)
return X
def softmax(X):
"""Compute the K-way softmax function inplace.
Parameters
----------
X : {array-like, sparse matrix}, shape (n_samples, n_features)
The input data.
Returns
-------
X_new : {array-like, sparse matrix}, shape (n_samples, n_features)
The transformed data.
"""
tmp = X - X.max(axis=1)[:, np.newaxis]
np.exp(tmp, out=X)
X /= X.sum(axis=1)[:, np.newaxis]
return X
ACTIVATIONS = {'identity': identity, 'tanh': tanh, 'logistic': logistic,
'relu': relu, 'softmax': softmax}
def inplace_logistic_derivative(Z):
"""Compute the derivative of the logistic function given output value
from logistic function
It exploits the fact that the derivative is a simple function of the output
value from logistic function
Parameters
----------
Z : {array-like, sparse matrix}, shape (n_samples, n_features)
The input data which is output from logistic function
Returns
-------
Z_new : {array-like, sparse matrix}, shape (n_samples, n_features)
The transformed data.
"""
return Z * (1 - Z)
def inplace_tanh_derivative(Z):
"""Compute the derivative of the hyperbolic tan function given output value
from hyperbolic tan
It exploits the fact that the derivative is a simple function of the output
value from hyperbolic tan
Parameters
----------
Z : {array-like, sparse matrix}, shape (n_samples, n_features)
The input data which is output from hyperbolic tan function
Returns
-------
Z_new : {array-like, sparse matrix}, shape (n_samples, n_features)
The transformed data.
"""
return 1 - (Z ** 2)
def inplace_relu_derivative(Z):
"""Compute the derivative of the rectified linear unit function given output
value from relu
Parameters
----------
Z : {array-like, sparse matrix}, shape (n_samples, n_features)
The input data which is output from some relu
Returns
-------
Z_new : {array-like, sparse matrix}, shape (n_samples, n_features)
The transformed data.
"""
return (Z > 0).astype(Z.dtype)
DERIVATIVES = {'tanh': inplace_tanh_derivative,
'logistic': inplace_logistic_derivative,
'relu': inplace_relu_derivative}
def squared_loss(y_true, y_pred):
"""Compute the squared loss for regression.
Parameters
----------
y_true : array-like or label indicator matrix
Ground truth (correct) values.
y_pred : array-like or label indicator matrix
Predicted values, as returned by a regression estimator.
Returns
-------
loss : float
The degree to which the samples are correctly predicted.
"""
return ((y_true - y_pred) ** 2).mean() / 2
def log_loss(y_true, y_prob):
"""Compute Logistic loss for classification.
Parameters
----------
y_true : array-like or label indicator matrix
Ground truth (correct) labels.
y_prob : array-like of float, shape = (n_samples, n_classes)
Predicted probabilities, as returned by a classifier's
predict_proba method.
Returns
-------
loss : float
The degree to which the samples are correctly predicted.
"""
y_prob = np.clip(y_prob, 1e-10, 1 - 1e-10)
if y_prob.shape[1] == 1:
y_prob = np.append(1 - y_prob, y_prob, axis=1)
if y_true.shape[1] == 1:
y_true = np.append(1 - y_true, y_true, axis=1)
return -np.sum(y_true * np.log(y_prob)) / y_prob.shape[0]
def binary_log_loss(y_true, y_prob):
"""Compute binary logistic loss for classification.
This is identical to log_loss in binary classification case,
but is kept for its use in multilabel case.
Parameters
----------
y_true : array-like or label indicator matrix
Ground truth (correct) labels.
y_prob : array-like of float, shape = (n_samples, n_classes)
Predicted probabilities, as returned by a classifier's
predict_proba method.
Returns
-------
loss : float
The degree to which the samples are correctly predicted.
"""
y_prob = np.clip(y_prob, 1e-10, 1 - 1e-10)
return -np.sum(y_true * np.log(y_prob) +
(1 - y_true) * np.log(1 - y_prob)) / y_prob.shape[0]
LOSS_FUNCTIONS = {'squared_loss': squared_loss, 'log_loss': log_loss,
'binary_log_loss': binary_log_loss}
|
omni5cience/django-inlineformfield | refs/heads/master | .tox/py27/lib/python2.7/site-packages/django/test/simple.py | 51 | """
This module is pending deprecation as of Django 1.6 and will be removed in
version 1.8.
"""
from importlib import import_module
import json
import re
import unittest as real_unittest
import warnings
from django.apps import apps
from django.test import _doctest as doctest
from django.test import runner
from django.test.utils import compare_xml, strip_quotes
# django.utils.unittest is deprecated, but so is django.test.simple,
# and the latter will be removed before the former.
from django.utils import unittest
from django.utils.deprecation import RemovedInDjango18Warning
from django.utils.module_loading import module_has_submodule
__all__ = ('DjangoTestSuiteRunner',)
warnings.warn(
"The django.test.simple module and DjangoTestSuiteRunner are deprecated; "
"use django.test.runner.DiscoverRunner instead.",
RemovedInDjango18Warning)
# The module name for tests outside models.py
TEST_MODULE = 'tests'
normalize_long_ints = lambda s: re.sub(r'(?<![\w])(\d+)L(?![\w])', '\\1', s)
normalize_decimals = lambda s: re.sub(r"Decimal\('(\d+(\.\d*)?)'\)",
lambda m: "Decimal(\"%s\")" % m.groups()[0], s)
class OutputChecker(doctest.OutputChecker):
def check_output(self, want, got, optionflags):
"""
The entry method for doctest output checking. Defers to a sequence of
child checkers
"""
checks = (self.check_output_default,
self.check_output_numeric,
self.check_output_xml,
self.check_output_json)
for check in checks:
if check(want, got, optionflags):
return True
return False
def check_output_default(self, want, got, optionflags):
"""
The default comparator provided by doctest - not perfect, but good for
most purposes
"""
return doctest.OutputChecker.check_output(self, want, got, optionflags)
def check_output_numeric(self, want, got, optionflags):
"""Doctest does an exact string comparison of output, which means that
some numerically equivalent values aren't equal. This check normalizes
* long integers (22L) so that they equal normal integers. (22)
* Decimals so that they are comparable, regardless of the change
made to __repr__ in Python 2.6.
"""
return doctest.OutputChecker.check_output(self,
normalize_decimals(normalize_long_ints(want)),
normalize_decimals(normalize_long_ints(got)),
optionflags)
def check_output_xml(self, want, got, optionsflags):
try:
return compare_xml(want, got)
except Exception:
return False
def check_output_json(self, want, got, optionsflags):
"""
Tries to compare want and got as if they were JSON-encoded data
"""
want, got = strip_quotes(want, got)
try:
want_json = json.loads(want)
got_json = json.loads(got)
except Exception:
return False
return want_json == got_json
class DocTestRunner(doctest.DocTestRunner):
def __init__(self, *args, **kwargs):
doctest.DocTestRunner.__init__(self, *args, **kwargs)
self.optionflags = doctest.ELLIPSIS
doctestOutputChecker = OutputChecker()
def get_tests(app_config):
try:
test_module = import_module('%s.%s' % (app_config.name, TEST_MODULE))
except ImportError:
# Couldn't import tests.py. Was it due to a missing file, or
# due to an import error in a tests.py that actually exists?
if not module_has_submodule(app_config.module, TEST_MODULE):
test_module = None
else:
# The module exists, so there must be an import error in the test
# module itself.
raise
return test_module
def make_doctest(module):
return doctest.DocTestSuite(module,
checker=doctestOutputChecker,
runner=DocTestRunner)
def build_suite(app_config):
"""
Create a complete Django test suite for the provided application module.
"""
suite = unittest.TestSuite()
# Load unit and doctests in the models.py module. If module has
# a suite() method, use it. Otherwise build the test suite ourselves.
models_module = app_config.models_module
if models_module:
if hasattr(models_module, 'suite'):
suite.addTest(models_module.suite())
else:
suite.addTest(unittest.defaultTestLoader.loadTestsFromModule(
models_module))
try:
suite.addTest(make_doctest(models_module))
except ValueError:
# No doc tests in models.py
pass
# Check to see if a separate 'tests' module exists parallel to the
# models module
tests_module = get_tests(app_config)
if tests_module:
# Load unit and doctests in the tests.py module. If module has
# a suite() method, use it. Otherwise build the test suite ourselves.
if hasattr(tests_module, 'suite'):
suite.addTest(tests_module.suite())
else:
suite.addTest(unittest.defaultTestLoader.loadTestsFromModule(
tests_module))
try:
suite.addTest(make_doctest(tests_module))
except ValueError:
# No doc tests in tests.py
pass
return suite
def build_test(label):
"""
Construct a test case with the specified label. Label should be of the
form app_label.TestClass or app_label.TestClass.test_method. Returns an
instantiated test or test suite corresponding to the label provided.
"""
parts = label.split('.')
if len(parts) < 2 or len(parts) > 3:
raise ValueError("Test label '%s' should be of the form app.TestCase "
"or app.TestCase.test_method" % label)
app_config = apps.get_app_config(parts[0])
models_module = app_config.models_module
tests_module = get_tests(app_config)
test_modules = []
if models_module:
test_modules.append(models_module)
if tests_module:
test_modules.append(tests_module)
TestClass = None
for module in test_modules:
TestClass = getattr(module, parts[1], None)
if TestClass is not None:
break
try:
if issubclass(TestClass, (unittest.TestCase, real_unittest.TestCase)):
if len(parts) == 2: # label is app.TestClass
try:
return unittest.TestLoader().loadTestsFromTestCase(
TestClass)
except TypeError:
raise ValueError(
"Test label '%s' does not refer to a test class"
% label)
else: # label is app.TestClass.test_method
return TestClass(parts[2])
except TypeError:
# TestClass isn't a TestClass - it must be a method or normal class
pass
#
# If there isn't a TestCase, look for a doctest that matches
#
tests = []
for module in test_modules:
try:
doctests = make_doctest(module)
# Now iterate over the suite, looking for doctests whose name
# matches the pattern that was given
for test in doctests:
if test._dt_test.name in (
'%s.%s' % (module.__name__, '.'.join(parts[1:])),
'%s.__test__.%s' % (
module.__name__, '.'.join(parts[1:]))):
tests.append(test)
except ValueError:
# No doctests found.
pass
# If no tests were found, then we were given a bad test label.
if not tests:
raise ValueError("Test label '%s' does not refer to a test" % label)
# Construct a suite out of the tests that matched.
return unittest.TestSuite(tests)
class DjangoTestSuiteRunner(runner.DiscoverRunner):
def build_suite(self, test_labels, extra_tests=None, **kwargs):
suite = unittest.TestSuite()
if test_labels:
for label in test_labels:
if '.' in label:
suite.addTest(build_test(label))
else:
app_config = apps.get_app_config(label)
suite.addTest(build_suite(app_config))
else:
for app_config in apps.get_app_configs():
suite.addTest(build_suite(app_config))
if extra_tests:
for test in extra_tests:
suite.addTest(test)
return runner.reorder_suite(suite, (unittest.TestCase,))
|
jnishi/chainer | refs/heads/master | tests/chainer_tests/distributions_tests/test_one_hot_categorical.py | 2 | import numpy
from chainer import cuda
from chainer import distributions
from chainer import testing
def _numpy_stack(xs, axis):
try:
return numpy.stack(xs, axis)
except AttributeError:
# in case numpy<1.10, which does not have numpy.stack
return numpy.concatenate(
[numpy.expand_dims(x, axis) for x in xs],
axis=axis)
def _numpy_random_multinomial(n, pvals, size):
pvals = pvals.astype(numpy.float64)
return numpy.random.multinomial(n, pvals, size)
@testing.parameterize(*testing.product({
'shape': [(2, 3), ()],
'is_variable': [True, False],
'sample_shape': [(3, 2), ()],
'k': [3],
}))
@testing.fix_random()
@testing.with_requires('scipy>=0.19')
class TestOneHotCategorical(testing.distribution_unittest):
scipy_onebyone = True
def setUp_configure(self):
from scipy import stats
self.dist = distributions.OneHotCategorical
self.scipy_dist = stats.multinomial
self.test_targets = set([
"batch_shape", "event_shape", "mean", "sample"])
n = numpy.ones(self.shape).astype(numpy.int32)
p = numpy.random.normal(
size=self.shape+(self.k,)).astype(numpy.float32)
p = numpy.exp(p)
p /= p.sum(axis=-1, keepdims=True)
self.n, self.p = n, p
self.params = {"p": p}
self.scipy_params = {"n": n, "p": p}
self.continuous = False
self.event_shape = (self.k,)
def sample_for_test(self):
obo_p = self.p.reshape(-1, self.k)
obo_n = self.n.reshape(-1)
smp = [_numpy_random_multinomial(one_n, one_p, size=self.sample_shape)
for one_n, one_p in zip(obo_n, obo_p)]
smp = _numpy_stack(smp, axis=-2)
smp = smp.reshape(self.sample_shape + self.shape + (self.k,))
return smp
def check_log_prob(self, is_gpu):
smp = self.sample_for_test()
if is_gpu:
log_prob1 = self.gpu_dist.log_prob(cuda.to_gpu(smp)).data
else:
log_prob1 = self.cpu_dist.log_prob(smp).data
onebyone_smp = smp.reshape(self.sample_shape + (-1,) + (self.k,))
onebyone_smp = numpy.rollaxis(onebyone_smp, -2, 0)
onebyone_smp = onebyone_smp.reshape(
(-1,) + self.sample_shape + (self.k,))
log_prob2 = []
for one_params, one_smp in zip(
self.scipy_onebyone_params_iter(), onebyone_smp):
log_prob2.append(self.scipy_dist.logpmf(one_smp, **one_params))
log_prob2 = _numpy_stack(log_prob2, axis=-1)
log_prob2 = log_prob2.reshape(self.sample_shape + self.shape)
testing.assert_allclose(log_prob1, log_prob2)
def test_log_prob_cpu(self):
self.check_log_prob(False)
@testing.attr.gpu
def test_log_prob_gpu(self):
self.check_log_prob(True)
testing.run_module(__name__, __file__)
|
samfpetersen/gnuradio | refs/heads/master | gr-digital/python/digital/ofdm_txrx.py | 27 | #
# Copyright 2013 Free Software Foundation, Inc.
#
# This file is part of GNU Radio
#
# GNU Radio is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3, or (at your option)
# any later version.
#
# GNU Radio is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with GNU Radio; see the file COPYING. If not, write to
# the Free Software Foundation, Inc., 51 Franklin Street,
# Boston, MA 02110-1301, USA.
#
"""
OFDM Transmitter / Receiver hier blocks.
For simple configurations, no need to connect all the relevant OFDM blocks
to form an OFDM Tx/Rx--simply use these.
"""
# Reminder: All frequency-domain stuff is in shifted form, i.e. DC carrier
# in the middle!
import numpy
from gnuradio import gr
import digital_swig as digital
from utils import tagged_streams
try:
# This will work when feature #505 is added.
from gnuradio import fft
from gnuradio import blocks
from gnuradio import analog
except ImportError:
# Until then this will work.
import fft_swig as fft
import blocks_swig as blocks
import analog_swig as analog
_def_fft_len = 64
_def_cp_len = 16
_def_frame_length_tag_key = "frame_length"
_def_packet_length_tag_key = "packet_length"
_def_packet_num_tag_key = "packet_num"
# Data and pilot carriers are same as in 802.11a
_def_occupied_carriers = (range(-26, -21) + range(-20, -7) + range(-6, 0) + range(1, 7) + range(8, 21) + range(22, 27),)
_def_pilot_carriers=((-21, -7, 7, 21,),)
_pilot_sym_scramble_seq = (
1,1,1,1, -1,-1,-1,1, -1,-1,-1,-1, 1,1,-1,1, -1,-1,1,1, -1,1,1,-1, 1,1,1,1, 1,1,-1,1,
1,1,-1,1, 1,-1,-1,1, 1,1,-1,1, -1,-1,-1,1, -1,1,-1,-1, 1,-1,-1,1, 1,1,1,1, -1,-1,1,1,
-1,-1,1,-1, 1,-1,1,1, -1,-1,-1,1, 1,-1,-1,-1, -1,1,-1,-1, 1,-1,1,1, 1,1,-1,1, -1,1,-1,1,
-1,-1,-1,-1, -1,1,-1,1, 1,-1,1,-1, 1,1,1,-1, -1,1,-1,-1, -1,1,1,1, -1,-1,-1,-1, -1,-1,-1
)
_def_pilot_symbols= tuple([(x, x, x, -x) for x in _pilot_sym_scramble_seq])
_seq_seed = 42
def _get_active_carriers(fft_len, occupied_carriers, pilot_carriers):
""" Returns a list of all carriers that at some point carry data or pilots. """
active_carriers = list()
for carrier in list(occupied_carriers[0]) + list(pilot_carriers[0]):
if carrier < 0:
carrier += fft_len
active_carriers.append(carrier)
return active_carriers
def _make_sync_word1(fft_len, occupied_carriers, pilot_carriers):
""" Creates a random sync sequence for fine frequency offset and timing
estimation. This is the first of typically two sync preamble symbols
for the Schmidl & Cox sync algorithm.
The relevant feature of this symbols is that every second sub-carrier
is zero. In the time domain, this results in two identical halves of
the OFDM symbols.
Symbols are always BPSK symbols. Carriers are scaled by sqrt(2) to keep
total energy constant.
Carrier 0 (DC carrier) is always zero. If used, carrier 1 is non-zero.
This means the sync algorithm has to check on odd carriers!
"""
active_carriers = _get_active_carriers(fft_len, occupied_carriers, pilot_carriers)
numpy.random.seed(_seq_seed)
bpsk = {0: numpy.sqrt(2), 1: -numpy.sqrt(2)}
sw1 = [bpsk[numpy.random.randint(2)] if x in active_carriers and x % 2 else 0 for x in range(fft_len)]
return numpy.fft.fftshift(sw1)
def _make_sync_word2(fft_len, occupied_carriers, pilot_carriers):
""" Creates a random sync sequence for coarse frequency offset and channel
estimation. This is the second of typically two sync preamble symbols
for the Schmidl & Cox sync algorithm.
Symbols are always BPSK symbols.
"""
active_carriers = _get_active_carriers(fft_len, occupied_carriers, pilot_carriers)
numpy.random.seed(_seq_seed)
bpsk = {0: 1, 1: -1}
sw2 = [bpsk[numpy.random.randint(2)] if x in active_carriers else 0 for x in range(fft_len)]
sw2[0] = 0j
return numpy.fft.fftshift(sw2)
def _get_constellation(bps):
""" Returns a modulator block for a given number of bits per symbol """
constellation = {
1: digital.constellation_bpsk(),
2: digital.constellation_qpsk(),
3: digital.constellation_8psk()
}
try:
return constellation[bps]
except KeyError:
print 'Modulation not supported.'
exit(1)
class ofdm_tx(gr.hier_block2):
"""Hierarchical block for OFDM modulation.
The input is a byte stream (unsigned char) and the
output is the complex modulated signal at baseband.
Args:
fft_len: The length of FFT (integer).
cp_len: The length of cyclic prefix in total samples (integer).
packet_length_tag_key: The name of the tag giving packet length at the input.
occupied_carriers: A vector of vectors describing which OFDM carriers are occupied.
pilot_carriers: A vector of vectors describing which OFDM carriers are occupied with pilot symbols.
pilot_symbols: The pilot symbols.
bps_header: Bits per symbol (header).
bps_payload: Bits per symbol (payload).
sync_word1: The first sync preamble symbol. This has to be with
| zeros on alternating carriers. Used for fine and
| coarse frequency offset and timing estimation.
sync_word2: The second sync preamble symbol. This has to be filled
| entirely. Also used for coarse frequency offset and
| channel estimation.
rolloff: The rolloff length in samples. Must be smaller than the CP.
debug_log: Write output into log files (Warning: creates lots of data!)
scramble_bits: Activates the scramblers (set this to True unless debugging)
"""
def __init__(self, fft_len=_def_fft_len, cp_len=_def_cp_len,
packet_length_tag_key=_def_packet_length_tag_key,
occupied_carriers=_def_occupied_carriers,
pilot_carriers=_def_pilot_carriers,
pilot_symbols=_def_pilot_symbols,
bps_header=1,
bps_payload=1,
sync_word1=None,
sync_word2=None,
rolloff=0,
debug_log=False,
scramble_bits=False
):
gr.hier_block2.__init__(self, "ofdm_tx",
gr.io_signature(1, 1, gr.sizeof_char),
gr.io_signature(1, 1, gr.sizeof_gr_complex))
### Param init / sanity check ########################################
self.fft_len = fft_len
self.cp_len = cp_len
self.packet_length_tag_key = packet_length_tag_key
self.occupied_carriers = occupied_carriers
self.pilot_carriers = pilot_carriers
self.pilot_symbols = pilot_symbols
self.bps_header = bps_header
self.bps_payload = bps_payload
self.sync_word1 = sync_word1
if sync_word1 is None:
self.sync_word1 = _make_sync_word1(fft_len, occupied_carriers, pilot_carriers)
else:
if len(sync_word1) != self.fft_len:
raise ValueError("Length of sync sequence(s) must be FFT length.")
self.sync_words = [self.sync_word1,]
if sync_word2 is None:
self.sync_word2 = _make_sync_word2(fft_len, occupied_carriers, pilot_carriers)
else:
self.sync_word2 = sync_word2
if len(self.sync_word2):
if len(self.sync_word2) != fft_len:
raise ValueError("Length of sync sequence(s) must be FFT length.")
self.sync_word2 = list(self.sync_word2)
self.sync_words.append(self.sync_word2)
if scramble_bits:
self.scramble_seed = 0x7f
else:
self.scramble_seed = 0x00 # We deactivate the scrambler by init'ing it with zeros
### Header modulation ################################################
crc = digital.crc32_bb(False, self.packet_length_tag_key)
header_constellation = _get_constellation(bps_header)
header_mod = digital.chunks_to_symbols_bc(header_constellation.points())
formatter_object = digital.packet_header_ofdm(
occupied_carriers=occupied_carriers, n_syms=1,
bits_per_header_sym=self.bps_header,
bits_per_payload_sym=self.bps_payload,
scramble_header=scramble_bits
)
header_gen = digital.packet_headergenerator_bb(formatter_object.base(), self.packet_length_tag_key)
header_payload_mux = blocks.tagged_stream_mux(
itemsize=gr.sizeof_gr_complex*1,
lengthtagname=self.packet_length_tag_key,
tag_preserve_head_pos=1 # Head tags on the payload stream stay on the head
)
self.connect(
self,
crc,
header_gen,
header_mod,
(header_payload_mux, 0)
)
if debug_log:
self.connect(header_gen, blocks.file_sink(1, 'tx-hdr.dat'))
### Payload modulation ###############################################
payload_constellation = _get_constellation(bps_payload)
payload_mod = digital.chunks_to_symbols_bc(payload_constellation.points())
payload_scrambler = digital.additive_scrambler_bb(
0x8a,
self.scramble_seed,
7,
0, # Don't reset after fixed length (let the reset tag do that)
bits_per_byte=8, # This is before unpacking
reset_tag_key=self.packet_length_tag_key
)
payload_unpack = blocks.repack_bits_bb(
8, # Unpack 8 bits per byte
bps_payload,
self.packet_length_tag_key
)
self.connect(
crc,
payload_scrambler,
payload_unpack,
payload_mod,
(header_payload_mux, 1)
)
### Create OFDM frame ################################################
allocator = digital.ofdm_carrier_allocator_cvc(
self.fft_len,
occupied_carriers=self.occupied_carriers,
pilot_carriers=self.pilot_carriers,
pilot_symbols=self.pilot_symbols,
sync_words=self.sync_words,
len_tag_key=self.packet_length_tag_key
)
ffter = fft.fft_vcc(
self.fft_len,
False, # Inverse FFT
(), # No window
True # Shift
)
cyclic_prefixer = digital.ofdm_cyclic_prefixer(
self.fft_len,
self.fft_len+self.cp_len,
rolloff,
self.packet_length_tag_key
)
self.connect(header_payload_mux, allocator, ffter, cyclic_prefixer, self)
if debug_log:
self.connect(allocator, blocks.file_sink(gr.sizeof_gr_complex * fft_len, 'tx-post-allocator.dat'))
self.connect(cyclic_prefixer, blocks.file_sink(gr.sizeof_gr_complex, 'tx-signal.dat'))
class ofdm_rx(gr.hier_block2):
"""Hierarchical block for OFDM demodulation.
The input is a complex baseband signal (e.g. from a UHD source).
The detected packets are output as a stream of packed bits on the output.
Args:
fft_len: The length of FFT (integer).
cp_len: The length of cyclic prefix in total samples (integer).
frame_length_tag_key: Used internally to tag the length of the OFDM frame.
packet_length_tag_key: The name of the tag giving packet length at the input.
occupied_carriers: A vector of vectors describing which OFDM carriers are occupied.
pilot_carriers: A vector of vectors describing which OFDM carriers are occupied with pilot symbols.
pilot_symbols: The pilot symbols.
bps_header: Bits per symbol (header).
bps_payload: Bits per symbol (payload).
sync_word1: The first sync preamble symbol. This has to be with
| zeros on alternating carriers. Used for fine and
| coarse frequency offset and timing estimation.
sync_word2: The second sync preamble symbol. This has to be filled
| entirely. Also used for coarse frequency offset and
| channel estimation.
"""
def __init__(self, fft_len=_def_fft_len, cp_len=_def_cp_len,
frame_length_tag_key=_def_frame_length_tag_key,
packet_length_tag_key=_def_packet_length_tag_key,
packet_num_tag_key=_def_packet_num_tag_key,
occupied_carriers=_def_occupied_carriers,
pilot_carriers=_def_pilot_carriers,
pilot_symbols=_def_pilot_symbols,
bps_header=1,
bps_payload=1,
sync_word1=None,
sync_word2=None,
debug_log=False,
scramble_bits=False
):
gr.hier_block2.__init__(self, "ofdm_rx",
gr.io_signature(1, 1, gr.sizeof_gr_complex),
gr.io_signature(1, 1, gr.sizeof_char))
### Param init / sanity check ########################################
self.fft_len = fft_len
self.cp_len = cp_len
self.frame_length_tag_key = frame_length_tag_key
self.packet_length_tag_key = packet_length_tag_key
self.occupied_carriers = occupied_carriers
self.bps_header = bps_header
self.bps_payload = bps_payload
n_sync_words = 1
if sync_word1 is None:
self.sync_word1 = _make_sync_word1(fft_len, occupied_carriers, pilot_carriers)
else:
if len(sync_word1) != self.fft_len:
raise ValueError("Length of sync sequence(s) must be FFT length.")
self.sync_word1 = sync_word1
self.sync_word2 = ()
if sync_word2 is None:
self.sync_word2 = _make_sync_word2(fft_len, occupied_carriers, pilot_carriers)
n_sync_words = 2
elif len(sync_word2):
if len(sync_word2) != fft_len:
raise ValueError("Length of sync sequence(s) must be FFT length.")
self.sync_word2 = sync_word2
n_sync_words = 2
if scramble_bits:
self.scramble_seed = 0x7f
else:
self.scramble_seed = 0x00 # We deactivate the scrambler by init'ing it with zeros
### Sync ############################################################
sync_detect = digital.ofdm_sync_sc_cfb(fft_len, cp_len)
delay = blocks.delay(gr.sizeof_gr_complex, fft_len+cp_len)
oscillator = analog.frequency_modulator_fc(-2.0 / fft_len)
mixer = blocks.multiply_cc()
hpd = digital.header_payload_demux(
n_sync_words+1, # Number of OFDM symbols before payload (sync + 1 sym header)
fft_len, cp_len, # FFT length, guard interval
frame_length_tag_key, # Frame length tag key
"", # We're not using trigger tags
True # One output item is one OFDM symbol (False would output complex scalars)
)
self.connect(self, sync_detect)
self.connect(self, delay, (mixer, 0), (hpd, 0))
self.connect((sync_detect, 0), oscillator, (mixer, 1))
self.connect((sync_detect, 1), (hpd, 1))
if debug_log:
self.connect((sync_detect, 0), blocks.file_sink(gr.sizeof_float, 'freq-offset.dat'))
self.connect((sync_detect, 1), blocks.file_sink(gr.sizeof_char, 'sync-detect.dat'))
### Header demodulation ##############################################
header_fft = fft.fft_vcc(self.fft_len, True, (), True)
chanest = digital.ofdm_chanest_vcvc(self.sync_word1, self.sync_word2, 1)
header_constellation = _get_constellation(bps_header)
header_equalizer = digital.ofdm_equalizer_simpledfe(
fft_len,
header_constellation.base(),
occupied_carriers,
pilot_carriers,
pilot_symbols,
symbols_skipped=0,
)
header_eq = digital.ofdm_frame_equalizer_vcvc(
header_equalizer.base(),
cp_len,
self.frame_length_tag_key,
True,
1 # Header is 1 symbol long
)
header_serializer = digital.ofdm_serializer_vcc(
fft_len, occupied_carriers,
self.frame_length_tag_key
)
header_demod = digital.constellation_decoder_cb(header_constellation.base())
header_formatter = digital.packet_header_ofdm(
occupied_carriers, 1,
packet_length_tag_key,
frame_length_tag_key,
packet_num_tag_key,
bps_header,
bps_payload,
scramble_header=scramble_bits
)
header_parser = digital.packet_headerparser_b(header_formatter.formatter())
self.connect(
(hpd, 0),
header_fft,
chanest,
header_eq,
header_serializer,
header_demod,
header_parser
)
self.msg_connect(header_parser, "header_data", hpd, "header_data")
if debug_log:
self.connect((chanest, 1), blocks.file_sink(gr.sizeof_gr_complex * fft_len, 'channel-estimate.dat'))
self.connect((chanest, 0), blocks.file_sink(gr.sizeof_gr_complex * fft_len, 'post-hdr-chanest.dat'))
self.connect((chanest, 0), blocks.tag_debug(gr.sizeof_gr_complex * fft_len, 'post-hdr-chanest'))
self.connect(header_eq, blocks.file_sink(gr.sizeof_gr_complex * fft_len, 'post-hdr-eq.dat'))
self.connect(header_serializer, blocks.file_sink(gr.sizeof_gr_complex, 'post-hdr-serializer.dat'))
self.connect(header_descrambler, blocks.file_sink(1, 'post-hdr-demod.dat'))
### Payload demod ####################################################
payload_fft = fft.fft_vcc(self.fft_len, True, (), True)
payload_constellation = _get_constellation(bps_payload)
payload_equalizer = digital.ofdm_equalizer_simpledfe(
fft_len,
payload_constellation.base(),
occupied_carriers,
pilot_carriers,
pilot_symbols,
symbols_skipped=1, # (that was already in the header)
alpha=0.1
)
payload_eq = digital.ofdm_frame_equalizer_vcvc(
payload_equalizer.base(),
cp_len,
self.frame_length_tag_key
)
payload_serializer = digital.ofdm_serializer_vcc(
fft_len, occupied_carriers,
self.frame_length_tag_key,
self.packet_length_tag_key,
1 # Skip 1 symbol (that was already in the header)
)
payload_demod = digital.constellation_decoder_cb(payload_constellation.base())
self.payload_descrambler = digital.additive_scrambler_bb(
0x8a,
self.scramble_seed,
7,
0, # Don't reset after fixed length
bits_per_byte=8, # This is after packing
reset_tag_key=self.packet_length_tag_key
)
payload_pack = blocks.repack_bits_bb(bps_payload, 8, self.packet_length_tag_key, True)
self.crc = digital.crc32_bb(True, self.packet_length_tag_key)
self.connect(
(hpd, 1),
payload_fft,
payload_eq,
payload_serializer,
payload_demod,
payload_pack,
self.payload_descrambler,
self.crc,
self
)
if debug_log:
self.connect((hpd, 1), blocks.tag_debug(gr.sizeof_gr_complex*fft_len, 'post-hpd'))
self.connect(payload_fft, blocks.file_sink(gr.sizeof_gr_complex*fft_len, 'post-payload-fft.dat'))
self.connect(payload_eq, blocks.file_sink(gr.sizeof_gr_complex*fft_len, 'post-payload-eq.dat'))
self.connect(payload_serializer, blocks.file_sink(gr.sizeof_gr_complex, 'post-payload-serializer.dat'))
self.connect(payload_demod, blocks.file_sink(1, 'post-payload-demod.dat'))
self.connect(payload_pack, blocks.file_sink(1, 'post-payload-pack.dat'))
self.connect(crc, blocks.file_sink(1, 'post-payload-crc.dat'))
|
tarikkdiry/Flock | refs/heads/master | flask/lib/python2.7/site-packages/pip/_vendor/requests/packages/chardet/utf8prober.py | 2918 | ######################## BEGIN LICENSE BLOCK ########################
# The Original Code is mozilla.org code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 1998
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
from . import constants
from .charsetprober import CharSetProber
from .codingstatemachine import CodingStateMachine
from .mbcssm import UTF8SMModel
ONE_CHAR_PROB = 0.5
class UTF8Prober(CharSetProber):
def __init__(self):
CharSetProber.__init__(self)
self._mCodingSM = CodingStateMachine(UTF8SMModel)
self.reset()
def reset(self):
CharSetProber.reset(self)
self._mCodingSM.reset()
self._mNumOfMBChar = 0
def get_charset_name(self):
return "utf-8"
def feed(self, aBuf):
for c in aBuf:
codingState = self._mCodingSM.next_state(c)
if codingState == constants.eError:
self._mState = constants.eNotMe
break
elif codingState == constants.eItsMe:
self._mState = constants.eFoundIt
break
elif codingState == constants.eStart:
if self._mCodingSM.get_current_charlen() >= 2:
self._mNumOfMBChar += 1
if self.get_state() == constants.eDetecting:
if self.get_confidence() > constants.SHORTCUT_THRESHOLD:
self._mState = constants.eFoundIt
return self.get_state()
def get_confidence(self):
unlike = 0.99
if self._mNumOfMBChar < 6:
for i in range(0, self._mNumOfMBChar):
unlike = unlike * ONE_CHAR_PROB
return 1.0 - unlike
else:
return unlike
|
ddki/my_study_project | refs/heads/master | language/python/frameworks/flask/venv/lib/python2.7/site-packages/click/exceptions.py | 199 | from ._compat import PY2, filename_to_ui, get_text_stderr
from .utils import echo
class ClickException(Exception):
"""An exception that Click can handle and show to the user."""
#: The exit code for this exception
exit_code = 1
def __init__(self, message):
if PY2:
if message is not None:
message = message.encode('utf-8')
Exception.__init__(self, message)
self.message = message
def format_message(self):
return self.message
def show(self, file=None):
if file is None:
file = get_text_stderr()
echo('Error: %s' % self.format_message(), file=file)
class UsageError(ClickException):
"""An internal exception that signals a usage error. This typically
aborts any further handling.
:param message: the error message to display.
:param ctx: optionally the context that caused this error. Click will
fill in the context automatically in some situations.
"""
exit_code = 2
def __init__(self, message, ctx=None):
ClickException.__init__(self, message)
self.ctx = ctx
def show(self, file=None):
if file is None:
file = get_text_stderr()
color = None
if self.ctx is not None:
color = self.ctx.color
echo(self.ctx.get_usage() + '\n', file=file, color=color)
echo('Error: %s' % self.format_message(), file=file, color=color)
class BadParameter(UsageError):
"""An exception that formats out a standardized error message for a
bad parameter. This is useful when thrown from a callback or type as
Click will attach contextual information to it (for instance, which
parameter it is).
.. versionadded:: 2.0
:param param: the parameter object that caused this error. This can
be left out, and Click will attach this info itself
if possible.
:param param_hint: a string that shows up as parameter name. This
can be used as alternative to `param` in cases
where custom validation should happen. If it is
a string it's used as such, if it's a list then
each item is quoted and separated.
"""
def __init__(self, message, ctx=None, param=None,
param_hint=None):
UsageError.__init__(self, message, ctx)
self.param = param
self.param_hint = param_hint
def format_message(self):
if self.param_hint is not None:
param_hint = self.param_hint
elif self.param is not None:
param_hint = self.param.opts or [self.param.human_readable_name]
else:
return 'Invalid value: %s' % self.message
if isinstance(param_hint, (tuple, list)):
param_hint = ' / '.join('"%s"' % x for x in param_hint)
return 'Invalid value for %s: %s' % (param_hint, self.message)
class MissingParameter(BadParameter):
"""Raised if click required an option or argument but it was not
provided when invoking the script.
.. versionadded:: 4.0
:param param_type: a string that indicates the type of the parameter.
The default is to inherit the parameter type from
the given `param`. Valid values are ``'parameter'``,
``'option'`` or ``'argument'``.
"""
def __init__(self, message=None, ctx=None, param=None,
param_hint=None, param_type=None):
BadParameter.__init__(self, message, ctx, param, param_hint)
self.param_type = param_type
def format_message(self):
if self.param_hint is not None:
param_hint = self.param_hint
elif self.param is not None:
param_hint = self.param.opts or [self.param.human_readable_name]
else:
param_hint = None
if isinstance(param_hint, (tuple, list)):
param_hint = ' / '.join('"%s"' % x for x in param_hint)
param_type = self.param_type
if param_type is None and self.param is not None:
param_type = self.param.param_type_name
msg = self.message
if self.param is not None:
msg_extra = self.param.type.get_missing_message(self.param)
if msg_extra:
if msg:
msg += '. ' + msg_extra
else:
msg = msg_extra
return 'Missing %s%s%s%s' % (
param_type,
param_hint and ' %s' % param_hint or '',
msg and '. ' or '.',
msg or '',
)
class NoSuchOption(UsageError):
"""Raised if click attempted to handle an option that does not
exist.
.. versionadded:: 4.0
"""
def __init__(self, option_name, message=None, possibilities=None,
ctx=None):
if message is None:
message = 'no such option: %s' % option_name
UsageError.__init__(self, message, ctx)
self.option_name = option_name
self.possibilities = possibilities
def format_message(self):
bits = [self.message]
if self.possibilities:
if len(self.possibilities) == 1:
bits.append('Did you mean %s?' % self.possibilities[0])
else:
possibilities = sorted(self.possibilities)
bits.append('(Possible options: %s)' % ', '.join(possibilities))
return ' '.join(bits)
class BadOptionUsage(UsageError):
"""Raised if an option is generally supplied but the use of the option
was incorrect. This is for instance raised if the number of arguments
for an option is not correct.
.. versionadded:: 4.0
"""
def __init__(self, message, ctx=None):
UsageError.__init__(self, message, ctx)
class BadArgumentUsage(UsageError):
"""Raised if an argument is generally supplied but the use of the argument
was incorrect. This is for instance raised if the number of values
for an argument is not correct.
.. versionadded:: 6.0
"""
def __init__(self, message, ctx=None):
UsageError.__init__(self, message, ctx)
class FileError(ClickException):
"""Raised if a file cannot be opened."""
def __init__(self, filename, hint=None):
ui_filename = filename_to_ui(filename)
if hint is None:
hint = 'unknown error'
ClickException.__init__(self, hint)
self.ui_filename = ui_filename
self.filename = filename
def format_message(self):
return 'Could not open file %s: %s' % (self.ui_filename, self.message)
class Abort(RuntimeError):
"""An internal signalling exception that signals Click to abort."""
|
allotria/intellij-community | refs/heads/master | python/testData/optimizeImports/moduleLevelDunderWithImportFromFutureAbove.after.py | 12 | from __future__ import print_function
__author__ = "akniazev"
from collections import OrderedDict
from datetime import date, time
from foo import bar
date(1, 1, 1)
time(1)
OrderedDict()
bar() |
utcompling/textgrounder | refs/heads/master | data/lists/python/fix-state-abbrevs.py | 1 | #!/usr/bin/python
# Fix up state abbrevations inside of parentheses to the full state names.
# Meant to be run after a file has been converted to standard form, where the
# state in inside of parens. Only acts inside of parens to avoid munging
# Twitter handles containing state abbrevs.
import fileinput
import re
abbrevs = {
"AL":"Alabama",
"AK":"Alaska",
"AS":"American Samoa",
"AZ":"Arizona",
"AR":"Arkansas",
"CA":"California",
"CO":"Colorado",
"CT":"Connecticut",
"DE":"Delaware",
"DC":"District of Columbia",
"FM":"Fed. States of Micronesia",
"FL":"Florida",
"GA":"Georgia",
"GU":"Guam",
"HI":"Hawaii",
"ID":"Idaho",
"IL":"Illinois",
"IN":"Indiana",
"IA":"Iowa",
"KS":"Kansas",
"KY":"Kentucky",
"LA":"Louisiana",
"ME":"Maine",
"MH":"Marshall Islands",
"MD":"Maryland",
"MA":"Massachusetts",
"MI":"Michigan",
"MN":"Minnesota",
"MS":"Mississippi",
"MO":"Missouri",
"MT":"Montana",
"NE":"Nebraska",
"NV":"Nevada",
"NH":"New Hampshire",
"NJ":"New Jersey",
"NM":"New Mexico",
"NY":"New York",
"NC":"North Carolina",
"ND":"North Dakota",
"MP":"Northern Mariana Is.",
"OH":"Ohio",
"OK":"Oklahoma",
"OR":"Oregon",
"PW":"Palau",
"PA":"Pennsylvania",
"PR":"Puerto Rico",
"RI":"Rhode Island",
"SC":"South Carolina",
"SD":"South Dakota",
"TN":"Tennessee",
"TX":"Texas",
"UT":"Utah",
"VT":"Vermont",
"VA":"Virginia",
"VI":"Virgin Islands",
"WA":"Washington",
"WV":"West Virginia",
"WI":"Wisconsin",
"WY":"Wyoming",
}
for line in fileinput.input():
line = line.strip()
while True:
m = re.match("^(.*\([^()]*)([A-Z][A-Z])([^()]*\))$", line)
if not m:
print line
break
elif m.group(2) not in abbrevs:
print line
print "Strange abbrevation %s not recognized" % m.group(2)
else:
line = "%s%s%s" % (m.group(1), abbrevs[m.group(2)], m.group(3))
|
Xeralux/tensorflow | refs/heads/master | tensorflow/python/kernel_tests/random/random_crop_test.py | 133 | # Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for random_crop."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from tensorflow.python.ops import random_ops
from tensorflow.python.platform import test
class RandomCropTest(test.TestCase):
def testNoOp(self):
# No random cropping is performed since the size is value.shape.
for shape in (2, 1, 1), (2, 1, 3), (4, 5, 3):
value = np.arange(0, np.prod(shape), dtype=np.int32).reshape(shape)
with self.test_session():
crop = random_ops.random_crop(value, shape).eval()
self.assertAllEqual(crop, value)
def testContains(self):
with self.test_session():
shape = (3, 5, 7)
target = (2, 3, 4)
value = np.random.randint(1000000, size=shape)
value_set = set(
tuple(value[i:i + 2, j:j + 3, k:k + 4].ravel())
for i in range(2) for j in range(3) for k in range(4))
crop = random_ops.random_crop(value, size=target)
for _ in range(20):
y = crop.eval()
self.assertAllEqual(y.shape, target)
self.assertTrue(tuple(y.ravel()) in value_set)
def testRandomization(self):
# Run 1x1 crop num_samples times in an image and ensure that one finds each
# pixel 1/size of the time.
num_samples = 1000
shape = [5, 4, 1]
size = np.prod(shape)
single = [1, 1, 1]
value = np.arange(size).reshape(shape)
with self.test_session():
crop = random_ops.random_crop(value, single, seed=7)
counts = np.zeros(size, dtype=np.int32)
for _ in range(num_samples):
y = crop.eval()
self.assertAllEqual(y.shape, single)
counts[y] += 1
# Calculate the mean and 4 * standard deviation.
mean = np.repeat(num_samples / size, size)
four_stddev = 4.0 * np.sqrt(mean)
# Ensure that each entry is observed in 1/size of the samples
# within 4 standard deviations.
self.assertAllClose(counts, mean, atol=four_stddev)
if __name__ == '__main__':
test.main()
|
wangjun/pythoner.net | refs/heads/dev | pythoner/accounts/templatetags/accounts_tags.py | 3 | # -*- coding: utf-8 -*-
# Data:11-6-14 下午10:09
# Author: T-y(master@t-y.me)
# File:accounts_tags
import urllib, hashlib
from django import template
from django.contrib.auth.models import User
from accounts.models import UserProfile
register = template.Library()
@register.inclusion_tag('account_latest.tag.html')
def get_latest_user(count=10):
"""
得到最新注册的用户
"""
try:
count = int(count)
except ValueError:
count = 10
users = User.objects.filter(is_active=True).order_by('-id')[0:count]
return {'users':users}
@register.inclusion_tag('account_alive_user.tag.html')
def get_alive_user(count=200):
"""
得到活跃用户
"""
ps = UserProfile.objects.filter().order_by('-score')[:count]
return {'users':[p.user for p in ps]}
@register.filter
def gravatar_url(email="somone@example.com",size=40):
default = "http://www.gravatar.com/avatar/00000000000000000000000000000000"
# construct the url
gravatar_url = "http://www.gravatar.com/avatar/" + hashlib.md5(email.lower()).hexdigest() + "?"
gravatar_url += urllib.urlencode({'d':default, 's':str(size)})
return gravatar_url
|
GNOME/kupfer | refs/heads/master | kupfer/plugin/show_text.py | 4 | __kupfer_name__ = _("Show Text")
__kupfer_actions__ = (
"ShowText",
"LargeType",
"ShowNotification",
)
__description__ = _("Display text in a window")
__version__ = ""
__author__ = "US"
from kupfer.objects import Action, Leaf, TextLeaf
from kupfer import icons, uiutils
from kupfer import textutils
class ShowText (Action):
def __init__(self):
Action.__init__(self, _("Show Text"))
def wants_context(self):
return True
def activate(self, leaf, ctx):
uiutils.show_text_result(leaf.get_text_representation(),
title=_("Show Text"), ctx=ctx)
def item_types(self):
yield TextLeaf
def get_description(self):
return _("Display text in a window")
def get_icon_name(self):
return "format-text-bold"
class LargeType (Action):
def __init__(self):
Action.__init__(self, _("Large Type"))
def wants_context(self):
return True
def activate(self, leaf, ctx):
return self.activate_multiple((leaf, ), ctx)
def activate_multiple(self, objects, ctx):
all_texts = []
for obj in objects:
all_texts.append(obj.get_text_representation())
uiutils.show_large_type("\n".join(all_texts), ctx)
def item_types(self):
yield Leaf
def valid_for_item(self, obj):
return hasattr(obj, "get_text_representation")
def get_description(self):
return _("Display text in a window")
def get_gicon(self):
return icons.ComposedIcon("format-text-bold", "zoom-in")
def get_icon_name(self):
return "format-text-bold"
class ShowNotification (Action):
def __init__(self):
Action.__init__(self, _("Show Notification"))
def activate(self, leaf):
title, body = textutils.extract_title_body(leaf.object)
if body:
uiutils.show_notification(title, body,
icon_name=self.get_icon_name())
else:
uiutils.show_notification(title)
def item_types(self):
yield TextLeaf
def get_icon_name(self):
return "format-text-bold"
|
Andr3iC/courtlistener | refs/heads/master | cl/tests/test_visualizations.py | 1 | # coding=utf-8
"""
Functional tests for the Visualization feature of CourtListener
"""
from django.contrib.auth.models import User
from cl.users.models import UserProfile
from cl.tests.base import BaseSeleniumTest
class VisualizationCrudTests(BaseSeleniumTest):
"""
Test CRUD operations from the browser point of view
"""
fixtures = ['scotus_map_data.json', 'visualizations.json']
def setUp(self):
self.user = User.objects.create_user(
'user', 'user@cl.com', 'password'
)
self.user.save()
self.user = UserProfile.objects.create(
user=self.user,
email_confirmed=True
)
super(VisualizationCrudTests, self).setUp()
def test_creating_new_visualization(self):
""" Test if a user can create a new Visualization """
# Beth Beta-User logs into CL
self.browser.get(self.server_url)
self.attempt_sign_in('user', 'password')
# She selects "New Visualization" from the new Visualization menu
menu = self.browser.find_element_by_link_text('Visualizations ')
menu.click()
menu_item = self.browser.find_element_by_link_text('New Network')
menu_item.click()
self.assertIn('Create a New Citation Network', self.browser.title)
# Once there, she notices inputs for a First and Second Case
self.assert_text_in_body('Create a New Citation Network')
self.assert_text_in_body('First Case')
self.assert_text_in_body('Second Case')
# For the First Case, she starts typing 'Marsh'
first_case = self.browser.find_element_by_id(
'starting-cluster-typeahead'
)
type_ahead = self.browser.find_element_by_css_selector('.tt-dataset-0')
first_case.send_keys('Marsh')
suggestion = type_ahead.find_element_by_css_selector('.tt-suggestion')
# She notices a drop down from the type-ahead search!
suggestion_text = suggestion.text
self.assertIn('Marsh v. Chambers', suggestion_text)
# She selects the case she was thinking of: 'Marsh v. Chambers'
suggestion.click()
# And the new case name is now in the input!
first_case = self.browser.find_element_by_id(
'starting-cluster-typeahead'
)
self.assertIn(suggestion_text, first_case.get_attribute('value'))
# For the Second Case, she starts typing 'Cutter'
second_case = self.browser.find_element_by_id(
'ending-cluster-typeahead-search'
)
type_ahead = self.browser.find_element_by_css_selector('.tt-dataset-1')
second_case.send_keys('Cutter')
suggestion = type_ahead.find_element_by_css_selector('.tt-suggestion')
# In the new type-ahead, selects the Jon B. Cutter case
suggestion_text = suggestion.text
self.assertIn('JON B. CUTTER', suggestion_text)
suggestion.click()
second_case = self.browser.find_element_by_id(
'ending-cluster-typeahead-search'
)
self.assertIn(suggestion_text, second_case.get_attribute('value'))
# She notices a "More Options" button and, why not, she clicks it
more = self.browser.find_element_by_id('more')
self.assertIn('More Options', more.text)
self.assert_text_not_in_body('Title')
self.assert_text_not_in_body('Description')
more.click()
# Wow, looks like she can enter a Title and Description
self.assert_text_in_body('Title')
title = self.browser.find_element_by_id('id_title')
title.send_keys('Selenium Test Visualization')
self.assert_text_in_body('Description')
description = self.browser.find_element_by_id('id_notes')
description.send_keys('Test description.\n#FreeKe$ha')
# She clicks Make this Network when she's done
self.browser.find_element_by_id('make-viz-button').click()
# And she's brought to the new Visualization she just created!
self.assertIn('Network Graph of Selenium', self.browser.title)
|
faun/django_test | refs/heads/master | mysite/urls.py | 1 | from django.conf.urls.defaults import *
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
# Example:
# (r'^mysite/', include('mysite.foo.urls')),
# Uncomment the admin/doc line below and add 'django.contrib.admindocs'
# to INSTALLED_APPS to enable admin documentation:
# (r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
(r'^polls/$', 'polls.views.index'),
(r'^polls/(?P<poll_id>\d+)/$', 'polls.views.detail'),
(r'^polls/(?P<poll_id>\d+)/results/$', 'polls.views.results'),
(r'^polls/(?P<poll_id>\d+)/vote/$', 'polls.views.vote'),
(r'^admin/', include(admin.site.urls)),
) |
ranjinidas/Axelrod | refs/heads/master | axelrod/tests/unit/test_mathematicalconstants.py | 2 | """Test for the golden and other mathematical strategies."""
import axelrod
from .test_player import TestPlayer
C, D = axelrod.Actions.C, axelrod.Actions.D
class TestGolden(TestPlayer):
name = '$\phi$'
player = axelrod.Golden
expected_classifier = {
'memory_depth': float('inf'), # Long memory
'stochastic': False,
'makes_use_of': set(),
'inspects_source': False,
'manipulates_source': False,
'manipulates_state': False
}
def test_strategy(self):
"""test initial strategy co-operates"""
self.first_play_test(C)
def test_when_no_defection(self):
"""tests that if the opposing player does not defect initially then strategy defects"""
self.responses_test([C], [C], [D])
def test_when_greater_than_golden_ratio(self):
"""tests that if the ratio of Cs to Ds is greater than the golden ratio then strategy defects"""
self.responses_test([C] * 4, [C, C, D, D], [D])
def test_when_less_than_golder_ratio(self):
"""tests that if the ratio of Cs to Ds is less than the golden ratio then strategy co-operates"""
self.responses_test([C] * 4, [D] * 4, [C])
class TestPi(TestPlayer):
name = '$\pi$'
player = axelrod.Pi
expected_classifier = {
'memory_depth': float('inf'), # Long memory
'stochastic': False,
'makes_use_of': set(),
'inspects_source': False,
'manipulates_source': False,
'manipulates_state': False
}
def test_strategy(self):
"""test initial strategy co-operates"""
self.first_play_test(C)
def test_when_no_defection(self):
"""tests that if the opposing player does not defect initially then strategy defects"""
self.responses_test([C], [C], [D])
def test_when_greater_than_pi(self):
"""tests that if the ratio of Cs to Ds is greater than pi then strategy defects"""
self.responses_test([C] * 4, [C, C, C, D], [D])
def test_when_less_than_pi(self):
"""tests that if the ratio of Cs to Ds is less than pi then strategy co-operates"""
self.responses_test([C] * 4, [C, C, D, D], [C])
class Teste(TestPlayer):
name = '$e$'
player = axelrod.e
expected_classifier = {
'memory_depth': float('inf'), # Long memory
'stochastic': False,
'makes_use_of': set(),
'inspects_source': False,
'manipulates_source': False,
'manipulates_state': False
}
def test_strategy(self):
"""test initial strategy co-operates"""
self.first_play_test(C)
def test_when_no_defection(self):
"""tests that if the opposing player does not defect initially then strategy defects"""
self.responses_test([C], [C], [D])
def test_when_greater_than_e(self):
"""tests that if the ratio of Cs to Ds is greater than e then strategy defects"""
self.responses_test([C] * 4, [C, C, D, D], [D])
def test_when_less_than_e(self):
"""tests that if the ratio of Cs to Ds is less than e then strategy co-operates"""
self.responses_test([C] * 4, [C, D, D, D], [C])
|
ivanhorvath/openshift-tools | refs/heads/prod | openshift/installer/vendored/openshift-ansible-3.9.40/roles/lib_openshift/src/test/unit/test_oc_adm_registry.py | 45 | #!/usr/bin/env python
'''
Unit tests for oc adm registry
'''
import os
import six
import sys
import unittest
import mock
# Removing invalid variable names for tests so that I can
# keep them brief
# pylint: disable=invalid-name,no-name-in-module
# Disable import-error b/c our libraries aren't loaded in jenkins
# pylint: disable=import-error
# place class in our python path
module_path = os.path.join('/'.join(os.path.realpath(__file__).split('/')[:-4]), 'library') # noqa: E501
sys.path.insert(0, module_path)
from oc_adm_registry import Registry, locate_oc_binary # noqa: E402
# pylint: disable=too-many-public-methods
class RegistryTest(unittest.TestCase):
'''
Test class for Registry
'''
dry_run = '''{
"kind": "List",
"apiVersion": "v1",
"metadata": {},
"items": [
{
"kind": "ServiceAccount",
"apiVersion": "v1",
"metadata": {
"name": "registry",
"creationTimestamp": null
}
},
{
"kind": "ClusterRoleBinding",
"apiVersion": "v1",
"metadata": {
"name": "registry-registry-role",
"creationTimestamp": null
},
"userNames": [
"system:serviceaccount:default:registry"
],
"groupNames": null,
"subjects": [
{
"kind": "ServiceAccount",
"namespace": "default",
"name": "registry"
}
],
"roleRef": {
"kind": "ClusterRole",
"name": "system:registry"
}
},
{
"kind": "DeploymentConfig",
"apiVersion": "v1",
"metadata": {
"name": "docker-registry",
"creationTimestamp": null,
"labels": {
"docker-registry": "default"
}
},
"spec": {
"strategy": {
"resources": {}
},
"triggers": [
{
"type": "ConfigChange"
}
],
"replicas": 1,
"test": false,
"selector": {
"docker-registry": "default"
},
"template": {
"metadata": {
"creationTimestamp": null,
"labels": {
"docker-registry": "default"
}
},
"spec": {
"volumes": [
{
"name": "registry-storage",
"emptyDir": {}
}
],
"containers": [
{
"name": "registry",
"image": "openshift3/ose-docker-registry:v3.5.0.39",
"ports": [
{
"containerPort": 5000
}
],
"env": [
{
"name": "REGISTRY_HTTP_ADDR",
"value": ":5000"
},
{
"name": "REGISTRY_HTTP_NET",
"value": "tcp"
},
{
"name": "REGISTRY_HTTP_SECRET",
"value": "WQjSGeUu5KFZRTwGeIXgwIjyraNDLmdJblsFbtzZdF8="
},
{
"name": "REGISTRY_MIDDLEWARE_REPOSITORY_OPENSHIFT_ENFORCEQUOTA",
"value": "false"
}
],
"resources": {
"requests": {
"cpu": "100m",
"memory": "256Mi"
}
},
"volumeMounts": [
{
"name": "registry-storage",
"mountPath": "/registry"
}
],
"livenessProbe": {
"httpGet": {
"path": "/healthz",
"port": 5000
},
"initialDelaySeconds": 10,
"timeoutSeconds": 5
},
"readinessProbe": {
"httpGet": {
"path": "/healthz",
"port": 5000
},
"timeoutSeconds": 5
},
"securityContext": {
"privileged": false
}
}
],
"nodeSelector": {
"type": "infra"
},
"serviceAccountName": "registry",
"serviceAccount": "registry"
}
}
},
"status": {
"latestVersion": 0,
"observedGeneration": 0,
"replicas": 0,
"updatedReplicas": 0,
"availableReplicas": 0,
"unavailableReplicas": 0
}
},
{
"kind": "Service",
"apiVersion": "v1",
"metadata": {
"name": "docker-registry",
"creationTimestamp": null,
"labels": {
"docker-registry": "default"
}
},
"spec": {
"ports": [
{
"name": "5000-tcp",
"port": 5000,
"targetPort": 5000
}
],
"selector": {
"docker-registry": "default"
},
"clusterIP": "172.30.119.110",
"sessionAffinity": "ClientIP"
},
"status": {
"loadBalancer": {}
}
}
]}'''
@mock.patch('oc_adm_registry.locate_oc_binary')
@mock.patch('oc_adm_registry.Utils._write')
@mock.patch('oc_adm_registry.Utils.create_tmpfile_copy')
@mock.patch('oc_adm_registry.Registry._run')
def test_state_present(self, mock_cmd, mock_tmpfile_copy, mock_write, mock_oc_binary):
''' Testing state present '''
params = {'state': 'present',
'debug': False,
'namespace': 'default',
'name': 'docker-registry',
'kubeconfig': '/etc/origin/master/admin.kubeconfig',
'images': None,
'latest_images': None,
'labels': {"docker-registry": "default", "another-label": "val"},
'ports': ['5000'],
'replicas': 1,
'selector': 'type=infra',
'service_account': 'registry',
'mount_host': None,
'volume_mounts': None,
'env_vars': {},
'enforce_quota': False,
'force': False,
'daemonset': False,
'tls_key': None,
'tls_certificate': None,
'edits': []}
mock_cmd.side_effect = [
(1, '', 'Error from server (NotFound): deploymentconfigs "docker-registry" not found'),
(1, '', 'Error from server (NotFound): service "docker-registry" not found'),
(0, RegistryTest.dry_run, ''),
(0, '', ''),
(0, '', ''),
]
mock_tmpfile_copy.return_value = '/tmp/mocked_kubeconfig'
mock_oc_binary.return_value = 'oc'
results = Registry.run_ansible(params, False)
self.assertTrue(results['changed'])
for result in results['results']['results']:
self.assertEqual(result['returncode'], 0)
mock_cmd.assert_has_calls([
mock.call(['oc', 'get', 'dc', 'docker-registry', '-o', 'json', '-n', 'default'], None),
mock.call(['oc', 'get', 'svc', 'docker-registry', '-o', 'json', '-n', 'default'], None),
mock.call(['oc', 'adm', 'registry',
"--labels=another-label=val,docker-registry=default",
'--ports=5000', '--replicas=1', '--selector=type=infra',
'--service-account=registry', '--dry-run=True', '-o', 'json', '-n', 'default'], None),
mock.call(['oc', 'create', '-f', mock.ANY, '-n', 'default'], None),
mock.call(['oc', 'create', '-f', mock.ANY, '-n', 'default'], None), ])
@unittest.skipIf(six.PY3, 'py2 test only')
@mock.patch('os.path.exists')
@mock.patch('os.environ.get')
def test_binary_lookup_fallback(self, mock_env_get, mock_path_exists):
''' Testing binary lookup fallback '''
mock_env_get.side_effect = lambda _v, _d: ''
mock_path_exists.side_effect = lambda _: False
self.assertEqual(locate_oc_binary(), 'oc')
@unittest.skipIf(six.PY3, 'py2 test only')
@mock.patch('os.path.exists')
@mock.patch('os.environ.get')
def test_binary_lookup_in_path(self, mock_env_get, mock_path_exists):
''' Testing binary lookup in path '''
oc_bin = '/usr/bin/oc'
mock_env_get.side_effect = lambda _v, _d: '/bin:/usr/bin'
mock_path_exists.side_effect = lambda f: f == oc_bin
self.assertEqual(locate_oc_binary(), oc_bin)
@unittest.skipIf(six.PY3, 'py2 test only')
@mock.patch('os.path.exists')
@mock.patch('os.environ.get')
def test_binary_lookup_in_usr_local(self, mock_env_get, mock_path_exists):
''' Testing binary lookup in /usr/local/bin '''
oc_bin = '/usr/local/bin/oc'
mock_env_get.side_effect = lambda _v, _d: '/bin:/usr/bin'
mock_path_exists.side_effect = lambda f: f == oc_bin
self.assertEqual(locate_oc_binary(), oc_bin)
@unittest.skipIf(six.PY3, 'py2 test only')
@mock.patch('os.path.exists')
@mock.patch('os.environ.get')
def test_binary_lookup_in_home(self, mock_env_get, mock_path_exists):
''' Testing binary lookup in ~/bin '''
oc_bin = os.path.expanduser('~/bin/oc')
mock_env_get.side_effect = lambda _v, _d: '/bin:/usr/bin'
mock_path_exists.side_effect = lambda f: f == oc_bin
self.assertEqual(locate_oc_binary(), oc_bin)
@unittest.skipIf(six.PY2, 'py3 test only')
@mock.patch('shutil.which')
@mock.patch('os.environ.get')
def test_binary_lookup_fallback_py3(self, mock_env_get, mock_shutil_which):
''' Testing binary lookup fallback '''
mock_env_get.side_effect = lambda _v, _d: ''
mock_shutil_which.side_effect = lambda _f, path=None: None
self.assertEqual(locate_oc_binary(), 'oc')
@unittest.skipIf(six.PY2, 'py3 test only')
@mock.patch('shutil.which')
@mock.patch('os.environ.get')
def test_binary_lookup_in_path_py3(self, mock_env_get, mock_shutil_which):
''' Testing binary lookup in path '''
oc_bin = '/usr/bin/oc'
mock_env_get.side_effect = lambda _v, _d: '/bin:/usr/bin'
mock_shutil_which.side_effect = lambda _f, path=None: oc_bin
self.assertEqual(locate_oc_binary(), oc_bin)
@unittest.skipIf(six.PY2, 'py3 test only')
@mock.patch('shutil.which')
@mock.patch('os.environ.get')
def test_binary_lookup_in_usr_local_py3(self, mock_env_get, mock_shutil_which):
''' Testing binary lookup in /usr/local/bin '''
oc_bin = '/usr/local/bin/oc'
mock_env_get.side_effect = lambda _v, _d: '/bin:/usr/bin'
mock_shutil_which.side_effect = lambda _f, path=None: oc_bin
self.assertEqual(locate_oc_binary(), oc_bin)
@unittest.skipIf(six.PY2, 'py3 test only')
@mock.patch('shutil.which')
@mock.patch('os.environ.get')
def test_binary_lookup_in_home_py3(self, mock_env_get, mock_shutil_which):
''' Testing binary lookup in ~/bin '''
oc_bin = os.path.expanduser('~/bin/oc')
mock_env_get.side_effect = lambda _v, _d: '/bin:/usr/bin'
mock_shutil_which.side_effect = lambda _f, path=None: oc_bin
self.assertEqual(locate_oc_binary(), oc_bin)
|
burzillibus/RobHome | refs/heads/master | venv/lib/python2.7/site-packages/pip/_vendor/html5lib/treeadapters/__init__.py | 354 | from __future__ import absolute_import, division, unicode_literals
from . import sax
__all__ = ["sax"]
try:
from . import genshi # noqa
except ImportError:
pass
else:
__all__.append("genshi")
|
ltilve/ChromiumGStreamerBackend | refs/heads/master | tools/telemetry/telemetry/core/discover_unittest.py | 16 | # Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
import unittest
from telemetry.core import discover
from telemetry.core import util
class DiscoverTest(unittest.TestCase):
def setUp(self):
self._base_dir = util.GetUnittestDataDir()
self._start_dir = os.path.join(self._base_dir, 'discoverable_classes')
self._base_class = Exception
def testDiscoverClassesWithIndexByModuleName(self):
classes = discover.DiscoverClasses(
self._start_dir, self._base_dir, self._base_class,
index_by_class_name=False)
actual_classes = dict(
(name, cls.__name__) for name, cls in classes.iteritems())
expected_classes = {
'another_discover_dummyclass': 'DummyExceptionWithParameterImpl1',
'discover_dummyclass': 'DummyException',
'parameter_discover_dummyclass': 'DummyExceptionWithParameterImpl2'
}
self.assertEqual(actual_classes, expected_classes)
def testDiscoverDirectlyConstructableClassesWithIndexByClassName(self):
classes = discover.DiscoverClasses(
self._start_dir, self._base_dir, self._base_class,
directly_constructable=True)
actual_classes = dict(
(name, cls.__name__) for name, cls in classes.iteritems())
expected_classes = {
'dummy_exception': 'DummyException',
'dummy_exception_impl1': 'DummyExceptionImpl1',
'dummy_exception_impl2': 'DummyExceptionImpl2',
}
self.assertEqual(actual_classes, expected_classes)
def testDiscoverClassesWithIndexByClassName(self):
classes = discover.DiscoverClasses(
self._start_dir, self._base_dir, self._base_class)
actual_classes = dict(
(name, cls.__name__) for name, cls in classes.iteritems())
expected_classes = {
'dummy_exception': 'DummyException',
'dummy_exception_impl1': 'DummyExceptionImpl1',
'dummy_exception_impl2': 'DummyExceptionImpl2',
'dummy_exception_with_parameter_impl1':
'DummyExceptionWithParameterImpl1',
'dummy_exception_with_parameter_impl2':
'DummyExceptionWithParameterImpl2'
}
self.assertEqual(actual_classes, expected_classes)
def testDiscoverClassesWithPatternAndIndexByModule(self):
classes = discover.DiscoverClasses(
self._start_dir, self._base_dir, self._base_class,
pattern='another*', index_by_class_name=False)
actual_classes = dict(
(name, cls.__name__) for name, cls in classes.iteritems())
expected_classes = {
'another_discover_dummyclass': 'DummyExceptionWithParameterImpl1'
}
self.assertEqual(actual_classes, expected_classes)
def testDiscoverDirectlyConstructableClassesWithPatternAndIndexByClassName(
self):
classes = discover.DiscoverClasses(
self._start_dir, self._base_dir, self._base_class,
pattern='another*', directly_constructable=True)
actual_classes = dict(
(name, cls.__name__) for name, cls in classes.iteritems())
expected_classes = {
'dummy_exception_impl1': 'DummyExceptionImpl1',
'dummy_exception_impl2': 'DummyExceptionImpl2',
}
self.assertEqual(actual_classes, expected_classes)
def testDiscoverClassesWithPatternAndIndexByClassName(self):
classes = discover.DiscoverClasses(
self._start_dir, self._base_dir, self._base_class,
pattern='another*')
actual_classes = dict(
(name, cls.__name__) for name, cls in classes.iteritems())
expected_classes = {
'dummy_exception_impl1': 'DummyExceptionImpl1',
'dummy_exception_impl2': 'DummyExceptionImpl2',
'dummy_exception_with_parameter_impl1':
'DummyExceptionWithParameterImpl1',
}
self.assertEqual(actual_classes, expected_classes)
|
chromium2014/src | refs/heads/master | tools/site_compare/scrapers/ie/ie7.py | 189 | #!/usr/bin/env python
# Copyright (c) 2011 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Does scraping for all known versions of IE."""
import pywintypes
import time
import types
from drivers import keyboard
from drivers import mouse
from drivers import windowing
# Default version
version = "7.0.5730.1"
DEFAULT_PATH = r"c:\program files\internet explorer\iexplore.exe"
def GetBrowser(path):
"""Invoke the IE browser and return the process, frame, and content window.
Args:
path: full path to browser
Returns:
A tuple of (process handle, render pane)
"""
if not path: path = DEFAULT_PATH
(iewnd, ieproc, address_bar, render_pane, tab_window) = InvokeBrowser(path)
return (ieproc, iewnd, render_pane)
def InvokeBrowser(path):
"""Invoke the IE browser.
Args:
path: full path to browser
Returns:
A tuple of (main window, process handle, address bar,
render_pane, tab_window)
"""
# Invoke IE
(ieproc, iewnd) = windowing.InvokeAndWait(path)
# Get windows we'll need
for tries in xrange(10):
try:
address_bar = windowing.FindChildWindow(
iewnd, "WorkerW|Navigation Bar/ReBarWindow32/"
"Address Band Root/ComboBoxEx32/ComboBox/Edit")
render_pane = windowing.FindChildWindow(
iewnd, "TabWindowClass/Shell DocObject View")
tab_window = windowing.FindChildWindow(
iewnd, "CommandBarClass/ReBarWindow32/TabBandClass/DirectUIHWND")
except IndexError:
time.sleep(1)
continue
break
return (iewnd, ieproc, address_bar, render_pane, tab_window)
def Scrape(urls, outdir, size, pos, timeout=20, **kwargs):
"""Invoke a browser, send it to a series of URLs, and save its output.
Args:
urls: list of URLs to scrape
outdir: directory to place output
size: size of browser window to use
pos: position of browser window
timeout: amount of time to wait for page to load
kwargs: miscellaneous keyword args
Returns:
None if success, else an error string
"""
path = r"c:\program files\internet explorer\iexplore.exe"
if "path" in kwargs and kwargs["path"]: path = kwargs["path"]
(iewnd, ieproc, address_bar, render_pane, tab_window) = (
InvokeBrowser(path) )
# Resize and reposition the frame
windowing.MoveAndSizeWindow(iewnd, pos, size, render_pane)
# Visit each URL we're given
if type(urls) in types.StringTypes: urls = [urls]
timedout = False
for url in urls:
# Double-click in the address bar, type the name, and press Enter
mouse.DoubleClickInWindow(address_bar)
keyboard.TypeString(url)
keyboard.TypeString("\n")
# Wait for the page to finish loading
load_time = windowing.WaitForThrobber(
tab_window, (6, 8, 22, 24), timeout)
timedout = load_time < 0
if timedout:
break
# Scrape the page
image = windowing.ScrapeWindow(render_pane)
# Save to disk
if "filename" in kwargs:
if callable(kwargs["filename"]):
filename = kwargs["filename"](url)
else:
filename = kwargs["filename"]
else:
filename = windowing.URLtoFilename(url, outdir, ".bmp")
image.save(filename)
windowing.EndProcess(ieproc)
if timedout:
return "timeout"
def Time(urls, size, timeout, **kwargs):
"""Measure how long it takes to load each of a series of URLs
Args:
urls: list of URLs to time
size: size of browser window to use
timeout: amount of time to wait for page to load
kwargs: miscellaneous keyword args
Returns:
A list of tuples (url, time). "time" can be "crashed" or "timeout"
"""
if "path" in kwargs and kwargs["path"]: path = kwargs["path"]
else: path = DEFAULT_PATH
proc = None
# Visit each URL we're given
if type(urls) in types.StringTypes: urls = [urls]
ret = []
for url in urls:
try:
# Invoke the browser if necessary
if not proc:
(wnd, proc, address_bar, render_pane, tab_window) = InvokeBrowser(path)
# Resize and reposition the frame
windowing.MoveAndSizeWindow(wnd, (0,0), size, render_pane)
# Double-click in the address bar, type the name, and press Enter
mouse.DoubleClickInWindow(address_bar)
keyboard.TypeString(url)
keyboard.TypeString("\n")
# Wait for the page to finish loading
load_time = windowing.WaitForThrobber(
tab_window, (6, 8, 22, 24), timeout)
timedout = load_time < 0
if timedout:
load_time = "timeout"
# Send an alt-F4 to make the browser close; if this times out,
# we've probably got a crash
keyboard.TypeString(r"{\4}", use_modifiers=True)
if not windowing.WaitForProcessExit(proc, timeout):
windowing.EndProcess(proc)
load_time = "crashed"
proc = None
except pywintypes.error:
load_time = "crashed"
proc = None
ret.append( (url, load_time) )
# Send an alt-F4 to make the browser close; if this times out,
# we've probably got a crash
if proc:
keyboard.TypeString(r"{\4}", use_modifiers=True)
if not windowing.WaitForProcessExit(proc, timeout):
windowing.EndProcess(proc)
return ret
def main():
# We're being invoked rather than imported, so run some tests
path = r"c:\sitecompare\scrapes\ie7\7.0.5380.11"
windowing.PreparePath(path)
# Scrape three sites and save the results
Scrape(
["http://www.microsoft.com",
"http://www.google.com",
"http://www.sun.com"],
path, (1024, 768), (0, 0))
return 0
if __name__ == "__main__":
sys.exit(main())
|
ActiveState/code | refs/heads/master | recipes/Python/576564_Walkers_alimethod_random_objects_different/recipe-576564.py | 1 | #!/usr/bin/env python
""" Walker's alias method for random objects with different probablities
walkerrandom.py
Examples
--------
# 0 1 2 or 3 with probabilities .1 .2 .3 .4 --
wrand = Walkerrandom( [10, 20, 30 40] ) # builds the Walker tables
wrand.random() # each call -> 0 1 2 or 3
# for example, 1000 calls with random.seed(1) -> [96, 199, 334, 371]
# strings A B C or D with probabilities .1 .2 .3 .4 --
abcd = dict( A=1, D=4, C=3, B=2 )
# keys can be any immutables: 2d points, colors, atoms ...
wrand = Walkerrandom( abcd.values(), abcd.keys() )
wrand.random() # each call -> "A" "B" "C" or "D"
# fast: 1 randint(), 1 uniform(), table lookup
How it works
------------
For weights 10 20 30 40 as above, picture sticks A B C D of those lengths:
10 AAAAAAAAAA
20 BBBBBBBBBB BBBBBBBBBB
30 CCCCCCCCCC CCCCCCCCCC CCCCCCCCCC
40 DDDDDDDDDD DDDDDDDDDD DDDDDDDDDD DDDDDDDDDD
Split and rearrange them into equal-length rows, like this:
AAAAAAAAAA DDDDDDDDDDDDDDD -- 10 A + 15 D = 40% A + 60% D
BBBBBBBBBBBBBBBBBBBB DDDDD -- 20 B + 5 D = 80% B + 20% D
CCCCCCCCCCCCCCCCCCCCCCCCC -- 25 C = 100% C
DDDDDDDDDDDDDDDDDDDD CCCCC -- 20 D + 5 C = 80% D + 20% C
Clearly 10 % of the area is A, 20 % B, 30 % C and 40 % D --
we haven't changed areas, just rearranged.
Now to choose a random one of A or B or C or D,
throw a dart at a "dart board" of the sticks in these 4 rows:
if it hits row 0, return A with probablity 40 % / D 60 %
if it hits row 1, return B with probablity 80 % / D 20 %
...
This picture is in Devroye, p. 111 (rediscovered here).
Walker's algorithm essentially arranges a given lot of sticks
into equal-length rows: pick a row shorter than average
and a row longer than average, split the longer to fill the shorter,
iterate until they're all the same length.
Notes
To generate random colors similar to those in a given picture,
first collect color samples in a histogram:
for color in ...:
# cluster e.g. rrggbb -> rgb, 16^3 bins
# (many many methods, see Wikipedia Data_clustering)
colors[color] += 1
(cPickle to a file, write it, read it back in)
then use Walkerrandom to select colors with these frequencies:
colorrand = Walkerrandom( colors.values(), colors.keys() )
colorrand.random() # each call -> a color
References
L. Devroye, Non-Uniform Random Variate Generation, 1986, p. 107 ff.
http://cg.scs.carleton.ca/~luc/rnbookindex.html (800 pages)
Knuth, Stanford GraphBase, 1993, p. 392
C++ hat random container by AngleWyrm,
http://home.comcast.net/~anglewyrm/hat.html
"""
from __future__ import division
import random
__author__ = "Denis Bzowy"
__version__ = "16nov2008"
Test = 0
#...............................................................................
class Walkerrandom:
""" Walker's alias method for random objects with different probablities
"""
def __init__( self, weights, keys=None ):
""" builds the Walker tables prob and inx for calls to random().
The weights (a list or tuple or iterable) can be in any order;
they need not sum to 1.
"""
n = self.n = len(weights)
self.keys = keys
sumw = sum(weights)
prob = [w * n / sumw for w in weights] # av 1
inx = [-1] * n
short = [j for j, p in enumerate( prob ) if p < 1]
long = [j for j, p in enumerate( prob ) if p > 1]
while short and long:
j = short.pop()
k = long[-1]
# assert prob[j] <= 1 <= prob[k]
inx[j] = k
prob[k] -= (1 - prob[j]) # -= residual weight
if prob[k] < 1:
short.append( k )
long.pop()
if Test:
print "test Walkerrandom: j k pk: %d %d %.2g" % (j, k, prob[k])
self.prob = prob
self.inx = inx
if Test:
print "test", self
def __str__( self ):
""" e.g. "Walkerrandom prob: 0.4 0.8 1 0.8 inx: 3 3 -1 2" """
probstr = " ".join([ "%.2g" % x for x in self.prob ])
inxstr = " ".join([ "%.2g" % x for x in self.inx ])
return "Walkerrandom prob: %s inx: %s" % (probstr, inxstr)
#...............................................................................
def random( self ):
""" each call -> a random int or key with the given probability
fast: 1 randint(), 1 random.uniform(), table lookup
"""
u = random.uniform( 0, 1 )
j = random.randint( 0, self.n - 1 ) # or low bits of u
randint = j if u <= self.prob[j] \
else self.inx[j]
return self.keys[randint] if self.keys \
else randint
#...............................................................................
if __name__ == "__main__":
# little examples, self-contained --
N = 5
Nrand = 1000
randomseed = 1
try:
import bz.util
bz.util.scan_eq_args( globals(), __doc__ ) # N=5 ...
except ImportError:
pass
if randomseed:
random.seed( randomseed )
print Nrand, "Walkerrandom with weights .1 .2 .3 .4:"
w = range( 1, N )
wrand = Walkerrandom( w )
nrand = [0] * (N - 1)
for _ in range( Nrand ):
j = wrand.random()
nrand[j] += 1
s = str( nrand )
print s
if N==5 and Nrand==1000 and randomseed==1:
assert s == "[96, 199, 334, 371]"
print Nrand, "Walkerrandom strings with weights .1 .2 .3 .4:"
abcd = dict( A=1, D=4, C=3, B=2 )
wrand = Walkerrandom( abcd.values(), abcd.keys() )
from collections import defaultdict
nrand = defaultdict(int) # init 0
for _ in range( Nrand ):
j = wrand.random()
nrand[j] += 1
s = str( sorted( nrand.iteritems() ))
print s
if N==5 and Nrand==1000 and randomseed==1:
assert s == "[('A', 105), ('B', 181), ('C', 283), ('D', 431)]"
# end walkerrandom.py
|
plamut/ggrc-core | refs/heads/develop | src/ggrc/utils/rules.py | 4 | # Copyright (C) 2017 Google Inc.
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
"""Mapping rules for Relationship validation and map:model import columns."""
import copy
def get_mapping_rules():
""" Get mappings rules as defined in business_object.js
Special cases:
Aduit has direct mapping to Program with program_id
Section has a direct mapping to Standard/Regulation/Poicy with directive_id
"""
from ggrc import snapshotter
all_rules = set(['AccessGroup', 'Clause', 'Contract', 'Control',
'CycleTaskGroupObjectTask', 'DataAsset', 'Facility',
'Market', 'Objective', 'OrgGroup', 'Person', 'Policy',
'Process', 'Product', 'Program', 'Project', 'Regulation',
'Risk', 'Section', 'Standard', 'System', 'Threat',
'Vendor'])
snapshots = snapshotter.rules.Types.all
business_object_rules = {
"AccessGroup": all_rules - set(['AccessGroup']),
"Clause": all_rules - set(['Clause']),
"Contract": all_rules - set(['Policy', 'Regulation',
'Contract', 'Standard']),
"Control": all_rules,
"CycleTaskGroupObjectTask": (all_rules -
set(['CycleTaskGroupObjectTask'])),
"DataAsset": all_rules,
"Facility": all_rules,
"Market": all_rules,
"Objective": all_rules,
"OrgGroup": all_rules,
"Person": all_rules - set(['Person']),
"Policy": all_rules - set(['Policy', 'Regulation',
'Contract', 'Standard']),
"Process": all_rules,
"Product": all_rules,
"Program": all_rules - set(['Program']),
"Project": all_rules,
"Regulation": all_rules - set(['Policy', 'Regulation',
'Contract', 'Standard']),
"Risk": all_rules - set(['Risk']),
"Section": all_rules,
"Standard": all_rules - set(['Policy', 'Regulation',
'Contract', 'Standard']),
"System": all_rules,
"Threat": all_rules - set(['Threat']),
"Vendor": all_rules,
}
# Audit and Audit-scope objects
# Assessment and Issue have a special Audit field instead of map:audit
business_object_rules.update({
"Audit": set(),
"Assessment": snapshots | {"Issue"},
"Issue": snapshots | {"Assessment"},
})
return business_object_rules
def get_unmapping_rules():
"""Get unmapping rules from mapping dict."""
unmapping_rules = copy.deepcopy(get_mapping_rules())
# Audit and Audit-scope objects
unmapping_rules["Audit"] = set()
unmapping_rules["Assessment"] = {"Issue"}
unmapping_rules["Issue"] = {"Assessment"}
return unmapping_rules
__all__ = [
"get_mapping_rules",
"get_unmapping_rules",
]
|
richard-fisher/repository | refs/heads/master | xorg/fonts/font-adobe-100dpi/actions.py | 37 | #!/usr/bin/env python
from pisi.actionsapi import shelltools, get, autotools, pisitools
def setup():
autotools.configure ("--disable-static --prefix=/usr")
def build():
autotools.make ()
def install():
autotools.rawInstall ("DESTDIR=%s" % get.installDIR())
|
isabernardes/Heriga | refs/heads/master | Herigaenv/lib/python2.7/site-packages/django/utils/module_loading.py | 320 | import copy
import os
import sys
from importlib import import_module
from django.utils import six
def import_string(dotted_path):
"""
Import a dotted module path and return the attribute/class designated by the
last name in the path. Raise ImportError if the import failed.
"""
try:
module_path, class_name = dotted_path.rsplit('.', 1)
except ValueError:
msg = "%s doesn't look like a module path" % dotted_path
six.reraise(ImportError, ImportError(msg), sys.exc_info()[2])
module = import_module(module_path)
try:
return getattr(module, class_name)
except AttributeError:
msg = 'Module "%s" does not define a "%s" attribute/class' % (
module_path, class_name)
six.reraise(ImportError, ImportError(msg), sys.exc_info()[2])
def autodiscover_modules(*args, **kwargs):
"""
Auto-discover INSTALLED_APPS modules and fail silently when
not present. This forces an import on them to register any admin bits they
may want.
You may provide a register_to keyword parameter as a way to access a
registry. This register_to object must have a _registry instance variable
to access it.
"""
from django.apps import apps
register_to = kwargs.get('register_to')
for app_config in apps.get_app_configs():
for module_to_search in args:
# Attempt to import the app's module.
try:
if register_to:
before_import_registry = copy.copy(register_to._registry)
import_module('%s.%s' % (app_config.name, module_to_search))
except:
# Reset the registry to the state before the last import
# as this import will have to reoccur on the next request and
# this could raise NotRegistered and AlreadyRegistered
# exceptions (see #8245).
if register_to:
register_to._registry = before_import_registry
# Decide whether to bubble up this error. If the app just
# doesn't have the module in question, we can ignore the error
# attempting to import it, otherwise we want it to bubble up.
if module_has_submodule(app_config.module, module_to_search):
raise
if six.PY3:
from importlib.util import find_spec as importlib_find
def module_has_submodule(package, module_name):
"""See if 'module' is in 'package'."""
try:
package_name = package.__name__
package_path = package.__path__
except AttributeError:
# package isn't a package.
return False
full_module_name = package_name + '.' + module_name
return importlib_find(full_module_name, package_path) is not None
else:
import imp
def module_has_submodule(package, module_name):
"""See if 'module' is in 'package'."""
name = ".".join([package.__name__, module_name])
try:
# None indicates a cached miss; see mark_miss() in Python/import.c.
return sys.modules[name] is not None
except KeyError:
pass
try:
package_path = package.__path__ # No __path__, then not a package.
except AttributeError:
# Since the remainder of this function assumes that we're dealing with
# a package (module with a __path__), so if it's not, then bail here.
return False
for finder in sys.meta_path:
if finder.find_module(name, package_path):
return True
for entry in package_path:
try:
# Try the cached finder.
finder = sys.path_importer_cache[entry]
if finder is None:
# Implicit import machinery should be used.
try:
file_, _, _ = imp.find_module(module_name, [entry])
if file_:
file_.close()
return True
except ImportError:
continue
# Else see if the finder knows of a loader.
elif finder.find_module(name):
return True
else:
continue
except KeyError:
# No cached finder, so try and make one.
for hook in sys.path_hooks:
try:
finder = hook(entry)
# XXX Could cache in sys.path_importer_cache
if finder.find_module(name):
return True
else:
# Once a finder is found, stop the search.
break
except ImportError:
# Continue the search for a finder.
continue
else:
# No finder found.
# Try the implicit import machinery if searching a directory.
if os.path.isdir(entry):
try:
file_, _, _ = imp.find_module(module_name, [entry])
if file_:
file_.close()
return True
except ImportError:
pass
# XXX Could insert None or NullImporter
else:
# Exhausted the search, so the module cannot be found.
return False
def module_dir(module):
"""
Find the name of the directory that contains a module, if possible.
Raise ValueError otherwise, e.g. for namespace packages that are split
over several directories.
"""
# Convert to list because _NamespacePath does not support indexing on 3.3.
paths = list(getattr(module, '__path__', []))
if len(paths) == 1:
return paths[0]
else:
filename = getattr(module, '__file__', None)
if filename is not None:
return os.path.dirname(filename)
raise ValueError("Cannot determine directory containing %s" % module)
|
bertrand-l/numpy | refs/heads/master | numpy/core/tests/test_records.py | 3 | from __future__ import division, absolute_import, print_function
import sys
import collections
import pickle
import warnings
from os import path
import numpy as np
from numpy.compat import asbytes
from numpy.testing import (
TestCase, run_module_suite, assert_, assert_equal, assert_array_equal,
assert_array_almost_equal, assert_raises, assert_warns
)
class TestFromrecords(TestCase):
def test_fromrecords(self):
r = np.rec.fromrecords([[456, 'dbe', 1.2], [2, 'de', 1.3]],
names='col1,col2,col3')
assert_equal(r[0].item(), (456, 'dbe', 1.2))
assert_equal(r['col1'].dtype.kind, 'i')
if sys.version_info[0] >= 3:
assert_equal(r['col2'].dtype.kind, 'U')
assert_equal(r['col2'].dtype.itemsize, 12)
else:
assert_equal(r['col2'].dtype.kind, 'S')
assert_equal(r['col2'].dtype.itemsize, 3)
assert_equal(r['col3'].dtype.kind, 'f')
def test_fromrecords_0len(self):
""" Verify fromrecords works with a 0-length input """
dtype = [('a', np.float), ('b', np.float)]
r = np.rec.fromrecords([], dtype=dtype)
assert_equal(r.shape, (0,))
def test_fromrecords_2d(self):
data = [
[(1, 2), (3, 4), (5, 6)],
[(6, 5), (4, 3), (2, 1)]
]
expected_a = [[1, 3, 5], [6, 4, 2]]
expected_b = [[2, 4, 6], [5, 3, 1]]
# try with dtype
r1 = np.rec.fromrecords(data, dtype=[('a', int), ('b', int)])
assert_equal(r1['a'], expected_a)
assert_equal(r1['b'], expected_b)
# try with names
r2 = np.rec.fromrecords(data, names=['a', 'b'])
assert_equal(r2['a'], expected_a)
assert_equal(r2['b'], expected_b)
assert_equal(r1, r2)
def test_method_array(self):
r = np.rec.array(asbytes('abcdefg') * 100, formats='i2,a3,i4', shape=3, byteorder='big')
assert_equal(r[1].item(), (25444, asbytes('efg'), 1633837924))
def test_method_array2(self):
r = np.rec.array([(1, 11, 'a'), (2, 22, 'b'), (3, 33, 'c'), (4, 44, 'd'), (5, 55, 'ex'),
(6, 66, 'f'), (7, 77, 'g')], formats='u1,f4,a1')
assert_equal(r[1].item(), (2, 22.0, asbytes('b')))
def test_recarray_slices(self):
r = np.rec.array([(1, 11, 'a'), (2, 22, 'b'), (3, 33, 'c'), (4, 44, 'd'), (5, 55, 'ex'),
(6, 66, 'f'), (7, 77, 'g')], formats='u1,f4,a1')
assert_equal(r[1::2][1].item(), (4, 44.0, asbytes('d')))
def test_recarray_fromarrays(self):
x1 = np.array([1, 2, 3, 4])
x2 = np.array(['a', 'dd', 'xyz', '12'])
x3 = np.array([1.1, 2, 3, 4])
r = np.rec.fromarrays([x1, x2, x3], names='a,b,c')
assert_equal(r[1].item(), (2, 'dd', 2.0))
x1[1] = 34
assert_equal(r.a, np.array([1, 2, 3, 4]))
def test_recarray_fromfile(self):
data_dir = path.join(path.dirname(__file__), 'data')
filename = path.join(data_dir, 'recarray_from_file.fits')
fd = open(filename, 'rb')
fd.seek(2880 * 2)
r1 = np.rec.fromfile(fd, formats='f8,i4,a5', shape=3, byteorder='big')
fd.seek(2880 * 2)
r2 = np.rec.array(fd, formats='f8,i4,a5', shape=3, byteorder='big')
fd.close()
assert_equal(r1, r2)
def test_recarray_from_obj(self):
count = 10
a = np.zeros(count, dtype='O')
b = np.zeros(count, dtype='f8')
c = np.zeros(count, dtype='f8')
for i in range(len(a)):
a[i] = list(range(1, 10))
mine = np.rec.fromarrays([a, b, c], names='date,data1,data2')
for i in range(len(a)):
assert_((mine.date[i] == list(range(1, 10))))
assert_((mine.data1[i] == 0.0))
assert_((mine.data2[i] == 0.0))
def test_recarray_from_repr(self):
a = np.array([(1,'ABC'), (2, "DEF")],
dtype=[('foo', int), ('bar', 'S4')])
recordarr = np.rec.array(a)
recarr = a.view(np.recarray)
recordview = a.view(np.dtype((np.record, a.dtype)))
recordarr_r = eval("numpy." + repr(recordarr), {'numpy': np})
recarr_r = eval("numpy." + repr(recarr), {'numpy': np})
recordview_r = eval("numpy." + repr(recordview), {'numpy': np})
assert_equal(type(recordarr_r), np.recarray)
assert_equal(recordarr_r.dtype.type, np.record)
assert_equal(recordarr, recordarr_r)
assert_equal(type(recarr_r), np.recarray)
assert_equal(recarr_r.dtype.type, np.record)
assert_equal(recarr, recarr_r)
assert_equal(type(recordview_r), np.ndarray)
assert_equal(recordview.dtype.type, np.record)
assert_equal(recordview, recordview_r)
def test_recarray_views(self):
a = np.array([(1,'ABC'), (2, "DEF")],
dtype=[('foo', int), ('bar', 'S4')])
b = np.array([1,2,3,4,5], dtype=np.int64)
#check that np.rec.array gives right dtypes
assert_equal(np.rec.array(a).dtype.type, np.record)
assert_equal(type(np.rec.array(a)), np.recarray)
assert_equal(np.rec.array(b).dtype.type, np.int64)
assert_equal(type(np.rec.array(b)), np.recarray)
#check that viewing as recarray does the same
assert_equal(a.view(np.recarray).dtype.type, np.record)
assert_equal(type(a.view(np.recarray)), np.recarray)
assert_equal(b.view(np.recarray).dtype.type, np.int64)
assert_equal(type(b.view(np.recarray)), np.recarray)
#check that view to non-structured dtype preserves type=np.recarray
r = np.rec.array(np.ones(4, dtype="f4,i4"))
rv = r.view('f8').view('f4,i4')
assert_equal(type(rv), np.recarray)
assert_equal(rv.dtype.type, np.record)
#check that getitem also preserves np.recarray and np.record
r = np.rec.array(np.ones(4, dtype=[('a', 'i4'), ('b', 'i4'),
('c', 'i4,i4')]))
assert_equal(r['c'].dtype.type, np.record)
assert_equal(type(r['c']), np.recarray)
# suppress deprecation warning in 1.12 (remove in 1.13)
with assert_warns(FutureWarning):
assert_equal(r[['a', 'b']].dtype.type, np.record)
assert_equal(type(r[['a', 'b']]), np.recarray)
#and that it preserves subclasses (gh-6949)
class C(np.recarray):
pass
c = r.view(C)
assert_equal(type(c['c']), C)
# check that accessing nested structures keep record type, but
# not for subarrays, non-void structures, non-structured voids
test_dtype = [('a', 'f4,f4'), ('b', 'V8'), ('c', ('f4',2)),
('d', ('i8', 'i4,i4'))]
r = np.rec.array([((1,1), b'11111111', [1,1], 1),
((1,1), b'11111111', [1,1], 1)], dtype=test_dtype)
assert_equal(r.a.dtype.type, np.record)
assert_equal(r.b.dtype.type, np.void)
assert_equal(r.c.dtype.type, np.float32)
assert_equal(r.d.dtype.type, np.int64)
# check the same, but for views
r = np.rec.array(np.ones(4, dtype='i4,i4'))
assert_equal(r.view('f4,f4').dtype.type, np.record)
assert_equal(r.view(('i4',2)).dtype.type, np.int32)
assert_equal(r.view('V8').dtype.type, np.void)
assert_equal(r.view(('i8', 'i4,i4')).dtype.type, np.int64)
#check that we can undo the view
arrs = [np.ones(4, dtype='f4,i4'), np.ones(4, dtype='f8')]
for arr in arrs:
rec = np.rec.array(arr)
# recommended way to view as an ndarray:
arr2 = rec.view(rec.dtype.fields or rec.dtype, np.ndarray)
assert_equal(arr2.dtype.type, arr.dtype.type)
assert_equal(type(arr2), type(arr))
def test_recarray_repr(self):
# make sure non-structured dtypes also show up as rec.array
a = np.array(np.ones(4, dtype='f8'))
assert_(repr(np.rec.array(a)).startswith('rec.array'))
# check that the 'np.record' part of the dtype isn't shown
a = np.rec.array(np.ones(3, dtype='i4,i4'))
assert_equal(repr(a).find('numpy.record'), -1)
a = np.rec.array(np.ones(3, dtype='i4'))
assert_(repr(a).find('dtype=int32') != -1)
def test_recarray_from_names(self):
ra = np.rec.array([
(1, 'abc', 3.7000002861022949, 0),
(2, 'xy', 6.6999998092651367, 1),
(0, ' ', 0.40000000596046448, 0)],
names='c1, c2, c3, c4')
pa = np.rec.fromrecords([
(1, 'abc', 3.7000002861022949, 0),
(2, 'xy', 6.6999998092651367, 1),
(0, ' ', 0.40000000596046448, 0)],
names='c1, c2, c3, c4')
assert_(ra.dtype == pa.dtype)
assert_(ra.shape == pa.shape)
for k in range(len(ra)):
assert_(ra[k].item() == pa[k].item())
def test_recarray_conflict_fields(self):
ra = np.rec.array([(1, 'abc', 2.3), (2, 'xyz', 4.2),
(3, 'wrs', 1.3)],
names='field, shape, mean')
ra.mean = [1.1, 2.2, 3.3]
assert_array_almost_equal(ra['mean'], [1.1, 2.2, 3.3])
assert_(type(ra.mean) is type(ra.var))
ra.shape = (1, 3)
assert_(ra.shape == (1, 3))
ra.shape = ['A', 'B', 'C']
assert_array_equal(ra['shape'], [['A', 'B', 'C']])
ra.field = 5
assert_array_equal(ra['field'], [[5, 5, 5]])
assert_(isinstance(ra.field, collections.Callable))
def test_fromrecords_with_explicit_dtype(self):
a = np.rec.fromrecords([(1, 'a'), (2, 'bbb')],
dtype=[('a', int), ('b', np.object)])
assert_equal(a.a, [1, 2])
assert_equal(a[0].a, 1)
assert_equal(a.b, ['a', 'bbb'])
assert_equal(a[-1].b, 'bbb')
#
ndtype = np.dtype([('a', int), ('b', np.object)])
a = np.rec.fromrecords([(1, 'a'), (2, 'bbb')], dtype=ndtype)
assert_equal(a.a, [1, 2])
assert_equal(a[0].a, 1)
assert_equal(a.b, ['a', 'bbb'])
assert_equal(a[-1].b, 'bbb')
def test_recarray_stringtypes(self):
# Issue #3993
a = np.array([('abc ', 1), ('abc', 2)],
dtype=[('foo', 'S4'), ('bar', int)])
a = a.view(np.recarray)
assert_equal(a.foo[0] == a.foo[1], False)
def test_recarray_returntypes(self):
qux_fields = {'C': (np.dtype('S5'), 0), 'D': (np.dtype('S5'), 6)}
a = np.rec.array([('abc ', (1,1), 1, ('abcde', 'fgehi')),
('abc', (2,3), 1, ('abcde', 'jklmn'))],
dtype=[('foo', 'S4'),
('bar', [('A', int), ('B', int)]),
('baz', int), ('qux', qux_fields)])
assert_equal(type(a.foo), np.ndarray)
assert_equal(type(a['foo']), np.ndarray)
assert_equal(type(a.bar), np.recarray)
assert_equal(type(a['bar']), np.recarray)
assert_equal(a.bar.dtype.type, np.record)
assert_equal(type(a['qux']), np.recarray)
assert_equal(a.qux.dtype.type, np.record)
assert_equal(dict(a.qux.dtype.fields), qux_fields)
assert_equal(type(a.baz), np.ndarray)
assert_equal(type(a['baz']), np.ndarray)
assert_equal(type(a[0].bar), np.record)
assert_equal(type(a[0]['bar']), np.record)
assert_equal(a[0].bar.A, 1)
assert_equal(a[0].bar['A'], 1)
assert_equal(a[0]['bar'].A, 1)
assert_equal(a[0]['bar']['A'], 1)
assert_equal(a[0].qux.D, asbytes('fgehi'))
assert_equal(a[0].qux['D'], asbytes('fgehi'))
assert_equal(a[0]['qux'].D, asbytes('fgehi'))
assert_equal(a[0]['qux']['D'], asbytes('fgehi'))
def test_zero_width_strings(self):
# Test for #6430, based on the test case from #1901
cols = [['test'] * 3, [''] * 3]
rec = np.rec.fromarrays(cols)
assert_equal(rec['f0'], ['test', 'test', 'test'])
assert_equal(rec['f1'], ['', '', ''])
dt = np.dtype([('f0', '|S4'), ('f1', '|S')])
rec = np.rec.fromarrays(cols, dtype=dt)
assert_equal(rec.itemsize, 4)
assert_equal(rec['f0'], [b'test', b'test', b'test'])
assert_equal(rec['f1'], [b'', b'', b''])
class TestRecord(TestCase):
def setUp(self):
self.data = np.rec.fromrecords([(1, 2, 3), (4, 5, 6)],
dtype=[("col1", "<i4"),
("col2", "<i4"),
("col3", "<i4")])
def test_assignment1(self):
a = self.data
assert_equal(a.col1[0], 1)
a[0].col1 = 0
assert_equal(a.col1[0], 0)
def test_assignment2(self):
a = self.data
assert_equal(a.col1[0], 1)
a.col1[0] = 0
assert_equal(a.col1[0], 0)
def test_invalid_assignment(self):
a = self.data
def assign_invalid_column(x):
x[0].col5 = 1
self.assertRaises(AttributeError, assign_invalid_column, a)
def test_nonwriteable_setfield(self):
# gh-8171
r = np.rec.array([(0,), (1,)], dtype=[('f', 'i4')])
r.flags.writeable = False
with assert_raises(ValueError):
r.f = [2, 3]
with assert_raises(ValueError):
r.setfield([2,3], *r.dtype.fields['f'])
def test_out_of_order_fields(self):
"""Ticket #1431."""
# this test will be invalid in 1.13
# suppress deprecation warning in 1.12 (remove in 1.13)
with assert_warns(FutureWarning):
x = self.data[['col1', 'col2']]
y = self.data[['col2', 'col1']]
assert_equal(x[0][0], y[0][1])
def test_pickle_1(self):
# Issue #1529
a = np.array([(1, [])], dtype=[('a', np.int32), ('b', np.int32, 0)])
assert_equal(a, pickle.loads(pickle.dumps(a)))
assert_equal(a[0], pickle.loads(pickle.dumps(a[0])))
def test_pickle_2(self):
a = self.data
assert_equal(a, pickle.loads(pickle.dumps(a)))
assert_equal(a[0], pickle.loads(pickle.dumps(a[0])))
def test_pickle_3(self):
# Issue #7140
a = self.data
pa = pickle.loads(pickle.dumps(a[0]))
assert_(pa.flags.c_contiguous)
assert_(pa.flags.f_contiguous)
assert_(pa.flags.writeable)
assert_(pa.flags.aligned)
def test_objview_record(self):
# https://github.com/numpy/numpy/issues/2599
dt = np.dtype([('foo', 'i8'), ('bar', 'O')])
r = np.zeros((1,3), dtype=dt).view(np.recarray)
r.foo = np.array([1, 2, 3]) # TypeError?
# https://github.com/numpy/numpy/issues/3256
ra = np.recarray((2,), dtype=[('x', object), ('y', float), ('z', int)])
with assert_warns(FutureWarning):
ra[['x','y']] # TypeError?
def test_record_scalar_setitem(self):
# https://github.com/numpy/numpy/issues/3561
rec = np.recarray(1, dtype=[('x', float, 5)])
rec[0].x = 1
assert_equal(rec[0].x, np.ones(5))
def test_missing_field(self):
# https://github.com/numpy/numpy/issues/4806
arr = np.zeros((3,), dtype=[('x', int), ('y', int)])
assert_raises(ValueError, lambda: arr[['nofield']])
def test_find_duplicate():
l1 = [1, 2, 3, 4, 5, 6]
assert_(np.rec.find_duplicate(l1) == [])
l2 = [1, 2, 1, 4, 5, 6]
assert_(np.rec.find_duplicate(l2) == [1])
l3 = [1, 2, 1, 4, 1, 6, 2, 3]
assert_(np.rec.find_duplicate(l3) == [1, 2])
l3 = [2, 2, 1, 4, 1, 6, 2, 3]
assert_(np.rec.find_duplicate(l3) == [2, 1])
if __name__ == "__main__":
run_module_suite()
|
shuggiefisher/potato | refs/heads/master | django/contrib/gis/db/backend/__init__.py | 307 | from django.db import connection
if hasattr(connection.ops, 'spatial_version'):
from warnings import warn
warn('The `django.contrib.gis.db.backend` module was refactored and '
'renamed to `django.contrib.gis.db.backends` in 1.2. '
'All functionality of `SpatialBackend` '
'has been moved to the `ops` attribute of the spatial database '
'backend. A `SpatialBackend` alias is provided here for '
'backwards-compatibility, but will be removed in 1.3.')
SpatialBackend = connection.ops
|
nielsbuwen/ilastik | refs/heads/master | ilastik/applets/featureSelection/__init__.py | 4 | ###############################################################################
# ilastik: interactive learning and segmentation toolkit
#
# Copyright (C) 2011-2014, the ilastik developers
# <team@ilastik.org>
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# In addition, as a special exception, the copyright holders of
# ilastik give you permission to combine ilastik with applets,
# workflows and plugins which are not covered under the GNU
# General Public License.
#
# See the LICENSE file for details. License information is also available
# on the ilastik web site at:
# http://ilastik.org/license.html
###############################################################################
from featureSelectionApplet import FeatureSelectionApplet |
devinbalkind/eden | refs/heads/master | private/templates/skeleton/controllers.py | 94 | # -*- coding: utf-8 -*-
from gluon import *
from s3 import S3CustomController
THEME = "skeleton"
# =============================================================================
class index(S3CustomController):
""" Custom Home Page """
def __call__(self):
output = {}
# Allow editing of page content from browser using CMS module
if current.deployment_settings.has_module("cms"):
system_roles = current.auth.get_system_roles()
ADMIN = system_roles.ADMIN in current.session.s3.roles
s3db = current.s3db
table = s3db.cms_post
ltable = s3db.cms_post_module
module = "default"
resource = "index"
query = (ltable.module == module) & \
((ltable.resource == None) | \
(ltable.resource == resource)) & \
(ltable.post_id == table.id) & \
(table.deleted != True)
item = current.db(query).select(table.body,
table.id,
limitby=(0, 1)).first()
if item:
if ADMIN:
item = DIV(XML(item.body),
BR(),
A(current.T("Edit"),
_href=URL(c="cms", f="post",
args=[item.id, "update"]),
_class="action-btn"))
else:
item = DIV(XML(item.body))
elif ADMIN:
if current.response.s3.crud.formstyle == "bootstrap":
_class = "btn"
else:
_class = "action-btn"
item = A(current.T("Edit"),
_href=URL(c="cms", f="post", args="create",
vars={"module": module,
"resource": resource
}),
_class="%s cms-edit" % _class)
else:
item = ""
else:
item = ""
output["item"] = item
self._view(THEME, "index.html")
return output
# END =========================================================================
|
Endika/edx-platform | refs/heads/master | lms/djangoapps/edxnotes/exceptions.py | 201 | """
Exceptions related to EdxNotes.
"""
class EdxNotesParseError(Exception):
"""
An exception that is raised whenever we have issues with data parsing.
"""
pass
class EdxNotesServiceUnavailable(Exception):
"""
An exception that is raised whenever EdxNotes service is unavailable.
"""
pass
|
zhebrak/raftos | refs/heads/master | raftos/server.py | 1 | import asyncio
import functools
from .network import UDPProtocol
from .state import State
async def register(*address_list, cluster=None, loop=None):
"""Start Raft node (server)
Args:
address_list — 127.0.0.1:8000 [, 127.0.0.1:8001 ...]
cluster — [127.0.0.1:8001, 127.0.0.1:8002, ...]
"""
loop = loop or asyncio.get_event_loop()
for address in address_list:
host, port = address.rsplit(':', 1)
node = Node(address=(host, int(port)), loop=loop)
await node.start()
for address in cluster:
host, port = address.rsplit(':', 1)
port = int(port)
if (host, port) != (node.host, node.port):
node.update_cluster((host, port))
def stop():
for node in Node.nodes:
node.stop()
class Node:
"""Raft Node (Server)"""
nodes = []
def __init__(self, address, loop):
self.host, self.port = address
self.cluster = set()
self.loop = loop
self.state = State(self)
self.requests = asyncio.Queue(loop=self.loop)
self.__class__.nodes.append(self)
async def start(self):
protocol = UDPProtocol(
queue=self.requests,
request_handler=self.request_handler,
loop=self.loop
)
address = self.host, self.port
self.transport, _ = await asyncio.Task(
self.loop.create_datagram_endpoint(protocol, local_addr=address),
loop=self.loop
)
self.state.start()
def stop(self):
self.state.stop()
self.transport.close()
def update_cluster(self, address_list):
self.cluster.update({address_list})
@property
def cluster_count(self):
return len(self.cluster)
def request_handler(self, data):
self.state.request_handler(data)
async def send(self, data, destination):
"""Sends data to destination Node
Args:
data — serializable object
destination — <str> '127.0.0.1:8000' or <tuple> (127.0.0.1, 8000)
"""
if isinstance(destination, str):
host, port = destination.split(':')
destination = host, int(port)
await self.requests.put({
'data': data,
'destination': destination
})
def broadcast(self, data):
"""Sends data to all Nodes in cluster (cluster list does not contain self Node)"""
for destination in self.cluster:
asyncio.ensure_future(self.send(data, destination), loop=self.loop)
|
sriprasanna/django-1.3.1 | refs/heads/master | tests/regressiontests/comment_tests/tests/moderation_view_tests.py | 50 | from django.contrib.auth.models import User, Permission
from django.contrib.comments import signals
from django.contrib.comments.models import Comment, CommentFlag
from django.contrib.contenttypes.models import ContentType
from regressiontests.comment_tests.tests import CommentTestCase
class FlagViewTests(CommentTestCase):
def testFlagGet(self):
"""GET the flag view: render a confirmation page."""
comments = self.createSomeComments()
pk = comments[0].pk
self.client.login(username="normaluser", password="normaluser")
response = self.client.get("/flag/%d/" % pk)
self.assertTemplateUsed(response, "comments/flag.html")
def testFlagPost(self):
"""POST the flag view: actually flag the view (nice for XHR)"""
comments = self.createSomeComments()
pk = comments[0].pk
self.client.login(username="normaluser", password="normaluser")
response = self.client.post("/flag/%d/" % pk)
self.assertEqual(response["Location"], "http://testserver/flagged/?c=%d" % pk)
c = Comment.objects.get(pk=pk)
self.assertEqual(c.flags.filter(flag=CommentFlag.SUGGEST_REMOVAL).count(), 1)
return c
def testFlagPostTwice(self):
"""Users don't get to flag comments more than once."""
c = self.testFlagPost()
self.client.post("/flag/%d/" % c.pk)
self.client.post("/flag/%d/" % c.pk)
self.assertEqual(c.flags.filter(flag=CommentFlag.SUGGEST_REMOVAL).count(), 1)
def testFlagAnon(self):
"""GET/POST the flag view while not logged in: redirect to log in."""
comments = self.createSomeComments()
pk = comments[0].pk
response = self.client.get("/flag/%d/" % pk)
self.assertEqual(response["Location"], "http://testserver/accounts/login/?next=/flag/%d/" % pk)
response = self.client.post("/flag/%d/" % pk)
self.assertEqual(response["Location"], "http://testserver/accounts/login/?next=/flag/%d/" % pk)
def testFlaggedView(self):
comments = self.createSomeComments()
pk = comments[0].pk
response = self.client.get("/flagged/", data={"c":pk})
self.assertTemplateUsed(response, "comments/flagged.html")
def testFlagSignals(self):
"""Test signals emitted by the comment flag view"""
# callback
def receive(sender, **kwargs):
self.assertEqual(kwargs['flag'].flag, CommentFlag.SUGGEST_REMOVAL)
self.assertEqual(kwargs['request'].user.username, "normaluser")
received_signals.append(kwargs.get('signal'))
# Connect signals and keep track of handled ones
received_signals = []
signals.comment_was_flagged.connect(receive)
# Post a comment and check the signals
self.testFlagPost()
self.assertEqual(received_signals, [signals.comment_was_flagged])
def makeModerator(username):
u = User.objects.get(username=username)
ct = ContentType.objects.get_for_model(Comment)
p = Permission.objects.get(content_type=ct, codename="can_moderate")
u.user_permissions.add(p)
class DeleteViewTests(CommentTestCase):
def testDeletePermissions(self):
"""The delete view should only be accessible to 'moderators'"""
comments = self.createSomeComments()
pk = comments[0].pk
self.client.login(username="normaluser", password="normaluser")
response = self.client.get("/delete/%d/" % pk)
self.assertEqual(response["Location"], "http://testserver/accounts/login/?next=/delete/%d/" % pk)
makeModerator("normaluser")
response = self.client.get("/delete/%d/" % pk)
self.assertEqual(response.status_code, 200)
def testDeletePost(self):
"""POSTing the delete view should mark the comment as removed"""
comments = self.createSomeComments()
pk = comments[0].pk
makeModerator("normaluser")
self.client.login(username="normaluser", password="normaluser")
response = self.client.post("/delete/%d/" % pk)
self.assertEqual(response["Location"], "http://testserver/deleted/?c=%d" % pk)
c = Comment.objects.get(pk=pk)
self.assertTrue(c.is_removed)
self.assertEqual(c.flags.filter(flag=CommentFlag.MODERATOR_DELETION, user__username="normaluser").count(), 1)
def testDeleteSignals(self):
def receive(sender, **kwargs):
received_signals.append(kwargs.get('signal'))
# Connect signals and keep track of handled ones
received_signals = []
signals.comment_was_flagged.connect(receive)
# Post a comment and check the signals
self.testDeletePost()
self.assertEqual(received_signals, [signals.comment_was_flagged])
def testDeletedView(self):
comments = self.createSomeComments()
pk = comments[0].pk
response = self.client.get("/deleted/", data={"c":pk})
self.assertTemplateUsed(response, "comments/deleted.html")
class ApproveViewTests(CommentTestCase):
def testApprovePermissions(self):
"""The delete view should only be accessible to 'moderators'"""
comments = self.createSomeComments()
pk = comments[0].pk
self.client.login(username="normaluser", password="normaluser")
response = self.client.get("/approve/%d/" % pk)
self.assertEqual(response["Location"], "http://testserver/accounts/login/?next=/approve/%d/" % pk)
makeModerator("normaluser")
response = self.client.get("/approve/%d/" % pk)
self.assertEqual(response.status_code, 200)
def testApprovePost(self):
"""POSTing the delete view should mark the comment as removed"""
c1, c2, c3, c4 = self.createSomeComments()
c1.is_public = False; c1.save()
makeModerator("normaluser")
self.client.login(username="normaluser", password="normaluser")
response = self.client.post("/approve/%d/" % c1.pk)
self.assertEqual(response["Location"], "http://testserver/approved/?c=%d" % c1.pk)
c = Comment.objects.get(pk=c1.pk)
self.assertTrue(c.is_public)
self.assertEqual(c.flags.filter(flag=CommentFlag.MODERATOR_APPROVAL, user__username="normaluser").count(), 1)
def testApproveSignals(self):
def receive(sender, **kwargs):
received_signals.append(kwargs.get('signal'))
# Connect signals and keep track of handled ones
received_signals = []
signals.comment_was_flagged.connect(receive)
# Post a comment and check the signals
self.testApprovePost()
self.assertEqual(received_signals, [signals.comment_was_flagged])
def testApprovedView(self):
comments = self.createSomeComments()
pk = comments[0].pk
response = self.client.get("/approved/", data={"c":pk})
self.assertTemplateUsed(response, "comments/approved.html")
class AdminActionsTests(CommentTestCase):
urls = "regressiontests.comment_tests.urls_admin"
def setUp(self):
super(AdminActionsTests, self).setUp()
# Make "normaluser" a moderator
u = User.objects.get(username="normaluser")
u.is_staff = True
perms = Permission.objects.filter(
content_type__app_label = 'comments',
codename__endswith = 'comment'
)
for perm in perms:
u.user_permissions.add(perm)
u.save()
def testActionsNonModerator(self):
comments = self.createSomeComments()
self.client.login(username="normaluser", password="normaluser")
response = self.client.get("/admin/comments/comment/")
self.assertEqual("approve_comments" in response.content, False)
def testActionsModerator(self):
comments = self.createSomeComments()
makeModerator("normaluser")
self.client.login(username="normaluser", password="normaluser")
response = self.client.get("/admin/comments/comment/")
self.assertEqual("approve_comments" in response.content, True)
def testActionsDisabledDelete(self):
"Tests a CommentAdmin where 'delete_selected' has been disabled."
comments = self.createSomeComments()
self.client.login(username="normaluser", password="normaluser")
response = self.client.get('/admin2/comments/comment/')
self.assertEqual(response.status_code, 200)
self.assertTrue(
'<option value="delete_selected">' not in response.content,
"Found an unexpected delete_selected in response"
)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.