repo_name stringlengths 5 100 | path stringlengths 4 294 | copies stringclasses 990 values | size stringlengths 4 7 | content stringlengths 666 1M | license stringclasses 15 values |
|---|---|---|---|---|---|
kl4us/kgb | parser/parser.py | 1 | 9468 | import os
import re
import time
import datetime
from kgb import settings as settings
from quake3 import rcon
from database import api
from random import choice
class Parser:
def __init__(self, log_file):
self.full_file_name = log_file
self.file_dimension = os.path.getsize(self.full_file_name)
self.text_to_process = []
def read(self):
if os.path.getsize(self.full_file_name) > self.file_dimension:
logfile = open(self.full_file_name, "r")
logfile.seek(self.file_dimension)
self.text_to_process = logfile.read()
logfile.close()
self.file_dimension = os.path.getsize(self.full_file_name)
self.text_to_process = self.text_to_process.split('\n')
return self.text_to_process
elif os.path.getsize(self.full_file_name) < self.file_dimension:
self.file_dimension = os.path.getsize(self.full_file_name)
return None
class Evaluator:
def __init__(self, host_address, host_port, rcon_passwd, api_url , api_user, api_key, geo_database):
self.host_address = host_address
self.host_port = host_port
self.rcon_passwd = rcon_passwd
self.geo_database = geo_database
self.rc = rcon.Rcon(self.host_address, self.host_port, self.rcon_passwd, api_url, api_user, api_key, geo_database)
self.api_url = api_url
self.api_user = api_user
self.api_key = api_key
self.api = api.Api(api_user, api_key, api_url)
def evaluate_config(self):
server_config_found, server_config_objs = self.api.get_server_configs()
if server_config_found:
for item in server_config_objs:
if item['code'] == 'SPAM_MESSAGE':
settings.SPAM_MESSAGES.append(item['value'])
elif item['code'] == 'SPAM_MESSAGES_TIMEOUT':
settings.SPAM_MESSAGES_TIMEOUT = item['value']
elif item['code'] == 'SERVER_CLOSED':
settings.SERVER_CLOSED = item['value']
elif item['code'] == 'SERVER_CLOSED_TIMEOUT':
settings.SERVER_CLOSED_TIMEOUT = item['value']
elif item['code'] == 'EXCLUDE_COMMAND':
settings.EXCLUDE_COMMANDS.append(item['value'])
def put_spam(self):
self.rc.putMessage(None, str(choice(settings.SPAM_MESSAGES)))
def start(self):
self.rc.putMessage(None, settings.BOT_MESSAGE_START)
def evaluate_player(self, data):
res = None
if data.find("ClientUserinfo:") != -1:
res = re.search(r"ClientUserinfo: (?P<id>\d+).*\\ip\\(?P<ip>\d*\.\d*\.\d*\.\d*).*\\name\\(?P<name>.*?)(\\|$)",data)
elif data.find("ClientUserinfoChanged:")!=-1:
res = re.search(r"ClientUserinfoChanged: (?P<id>\d+).*n\\(?P<name>.*?)\\t", data)
if res:
player = self.rc.getPlayer(res.group("id"))
if player and player.guid != '' and player.guid is not None:
player_found, player_obj = self.api.get_player(player.guid)
if not player_found:
# 'player non trovato, lo inserisco'
player_found, player_obj = self.api.insert_player(player)
if player_found:
# 'player inserito'
pass
else:
# 'errore: player non inserito'
pass
if player_found:
# 'player esiste, verifico se inserire alias'
alias_found, alias_obj = self.api.get_alias(player.guid, player.name)
if alias_found is not None and not alias_found:
# 'alias non trovato, lo inserisco'
alias_found, alias_obj = self.api.insert_alias(player, player_obj['resource_uri'])
if alias_found:
# 'alias inserito'
pass
else:
# 'alias esiste'
pass
# 'player esiste, verifico se inserire profile'
profile_found, profile_obj = self.api.get_profile(player.guid, player.address.split(":")[0])
if profile_found is not None and not profile_found:
# 'profile non trovato, lo inserisco'
profile_found, profile_obj = self.api.insert_profile(player, player_obj['resource_uri'])
if profile_found:
# 'profile inserito'
pass
else:
# 'profile esiste'
pass
if data.find("ClientBegin:") != -1: # *** CLIENTCONNECT
res = re.search(r"ClientBegin: (?P<id>\d+)", data)
if res:
player = self.rc.getPlayer(res.group("id"))
if player:
# 'player esiste, verifico se e' bannato
bans_found, bans_obj = self.api.get_bans(player.guid)
if bans_found is not None and bans_found:
# 'verifico se esiste un ban attivo'
for ban in bans_obj:
if ban['is_permanent']:
# ban permanente, lo kikko
print "perban per %s (%s). kick" % (player.name, player.guid)
self.rc.putMessage(player.slot, "You are ^1permbanned!")
self.rc.putMessage(player.slot, "Reason: ^1" + str(ban['ban_reason']))
time.sleep(1)
self.rc.putCommand('kick %d' % player.slot)
else:
c = time.strptime(str(ban['created']),"%Y-%m-%dT%H:%M:%S")
t = int(time.mktime(c))
t = t + (int(ban['ban_minute'])*60)
# t = time.strftime("%Y-%m-%d %H:%M:%S",time.localtime(t))
# print "scadenza: %s - ora: %s" % (t,datetime.datetime.now().strftime("%s"))
if str(datetime.datetime.now().strftime("%s")) < str(t):
# print "suca nabbo"
print "tempban con scadenza %s per %s (%s). kick" % (time.strftime("%Y-%m-%d %H:%M:%S",time.localtime(t)), player.name, player.guid)
self.rc.putMessage(player.slot, "You are ^1tempbanned!")
self.rc.putMessage(player.slot, "Reason: ^1" + str(ban['ban_reason']))
time.sleep(1)
self.rc.putCommand("kick %d" % (player.slot))
else:
pass
# print "tempban scaduto il %s per %s" % (t, player.guid)
else:
pass
# print 'player isn\'t banned'
def evaluate_command(self, x):
for searchstring, replacestring in settings.REPLACE_STRINGS:
x = x.replace(searchstring, replacestring)
res = re.search( r"(?P<say>say(team)?): (?P<id>\d+) .*?: (?P<text>.*)",x)
if res or True:
slot= res.group("id")
message = res.group("text")
say = res.group("say")
# '\n'
# 'verifico se mi e\' stato passato un comando'
data = message.split()
if len(data)>0:
for command, command_prop in settings.COMMANDS.items():
if data[0] == command_prop['command'] or data[0] == command_prop['command_slug']:
# controllo se e' un comando escluso
for exclude_command in settings.EXCLUDE_COMMANDS:
if command_prop['command'] == str(exclude_command):
return True
# 'e\' un comando, verifico se il player ha i permessi'
player = self.rc.getPlayer(res.group("id"))
is_authorized = False
if player and player.guid != '':
player_found, player_obj = self.api.get_player(player.guid)
if player_found:
if player_obj['level'] >= command_prop['min_level']:
is_authorized = True
if is_authorized:
# 'e\' autorizzato ... perform command'
getattr(self.rc, command_prop['function'])(player, message, player_obj)
else:
self.rc.putMessage(player.slot, settings.MESSAGE_PERMISSION % (command_prop['min_level']))
else:
# 'no player'
pass
| mit |
S01780/python-social-auth | examples/pyramid_example/example/settings.py | 51 | 2415 | SOCIAL_AUTH_SETTINGS = {
'SOCIAL_AUTH_LOGIN_URL': '/',
'SOCIAL_AUTH_LOGIN_REDIRECT_URL': '/done',
'SOCIAL_AUTH_USER_MODEL': 'example.models.User',
'SOCIAL_AUTH_LOGIN_FUNCTION': 'example.auth.login_user',
'SOCIAL_AUTH_LOGGEDIN_FUNCTION': 'example.auth.login_required',
'SOCIAL_AUTH_AUTHENTICATION_BACKENDS': (
'social.backends.twitter.TwitterOAuth',
'social.backends.open_id.OpenIdAuth',
'social.backends.google.GoogleOpenId',
'social.backends.google.GoogleOAuth2',
'social.backends.google.GoogleOAuth',
'social.backends.yahoo.YahooOpenId',
'social.backends.stripe.StripeOAuth2',
'social.backends.persona.PersonaAuth',
'social.backends.facebook.FacebookOAuth2',
'social.backends.facebook.FacebookAppOAuth2',
'social.backends.yahoo.YahooOAuth',
'social.backends.angel.AngelOAuth2',
'social.backends.behance.BehanceOAuth2',
'social.backends.bitbucket.BitbucketOAuth',
'social.backends.box.BoxOAuth2',
'social.backends.linkedin.LinkedinOAuth',
'social.backends.github.GithubOAuth2',
'social.backends.foursquare.FoursquareOAuth2',
'social.backends.instagram.InstagramOAuth2',
'social.backends.live.LiveOAuth2',
'social.backends.vk.VKOAuth2',
'social.backends.dailymotion.DailymotionOAuth2',
'social.backends.disqus.DisqusOAuth2',
'social.backends.dropbox.DropboxOAuth',
'social.backends.eveonline.EVEOnlineOAuth2',
'social.backends.evernote.EvernoteSandboxOAuth',
'social.backends.fitbit.FitbitOAuth',
'social.backends.flickr.FlickrOAuth',
'social.backends.livejournal.LiveJournalOpenId',
'social.backends.soundcloud.SoundcloudOAuth2',
'social.backends.thisismyjam.ThisIsMyJamOAuth1',
'social.backends.stocktwits.StocktwitsOAuth2',
'social.backends.tripit.TripItOAuth',
'social.backends.twilio.TwilioAuth',
'social.backends.clef.ClefOAuth2',
'social.backends.xing.XingOAuth',
'social.backends.yandex.YandexOAuth2',
'social.backends.podio.PodioOAuth2',
'social.backends.reddit.RedditOAuth2',
'social.backends.mineid.MineIDOAuth2',
'social.backends.wunderlist.WunderlistOAuth2',
)
}
def includeme(config):
config.registry.settings.update(SOCIAL_AUTH_SETTINGS)
| bsd-3-clause |
windedge/odoo | addons/stock_account/product.py | 166 | 11361 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import fields, osv
from openerp.tools.translate import _
from openerp.fields import Many2one
class product_template(osv.osv):
_name = 'product.template'
_inherit = 'product.template'
_columns = {
'valuation': fields.property(type='selection', selection=[('manual_periodic', 'Periodical (manual)'),
('real_time', 'Real Time (automated)')], string='Inventory Valuation',
help="If real-time valuation is enabled for a product, the system will automatically write journal entries corresponding to stock moves, with product price as specified by the 'Costing Method'" \
"The inventory variation account set on the product category will represent the current inventory value, and the stock input and stock output account will hold the counterpart moves for incoming and outgoing products."
, required=True, copy=True),
'cost_method': fields.property(type='selection', selection=[('standard', 'Standard Price'), ('average', 'Average Price'), ('real', 'Real Price')],
help="""Standard Price: The cost price is manually updated at the end of a specific period (usually every year).
Average Price: The cost price is recomputed at each incoming shipment and used for the product valuation.
Real Price: The cost price displayed is the price of the last outgoing product (will be use in case of inventory loss for example).""",
string="Costing Method", required=True, copy=True),
'property_stock_account_input': fields.property(
type='many2one',
relation='account.account',
string='Stock Input Account',
help="When doing real-time inventory valuation, counterpart journal items for all incoming stock moves will be posted in this account, unless "
"there is a specific valuation account set on the source location. When not set on the product, the one from the product category is used."),
'property_stock_account_output': fields.property(
type='many2one',
relation='account.account',
string='Stock Output Account',
help="When doing real-time inventory valuation, counterpart journal items for all outgoing stock moves will be posted in this account, unless "
"there is a specific valuation account set on the destination location. When not set on the product, the one from the product category is used."),
}
_defaults = {
'valuation': 'manual_periodic',
}
def onchange_type(self, cr, uid, ids, type):
res = super(product_template, self).onchange_type(cr, uid, ids, type)
if type in ('consu', 'service'):
res = {'value': {'valuation': 'manual_periodic'}}
return res
def get_product_accounts(self, cr, uid, product_id, context=None):
""" To get the stock input account, stock output account and stock journal related to product.
@param product_id: product id
@return: dictionary which contains information regarding stock input account, stock output account and stock journal
"""
if context is None:
context = {}
product_obj = self.browse(cr, uid, product_id, context=context)
stock_input_acc = product_obj.property_stock_account_input and product_obj.property_stock_account_input.id or False
if not stock_input_acc:
stock_input_acc = product_obj.categ_id.property_stock_account_input_categ and product_obj.categ_id.property_stock_account_input_categ.id or False
stock_output_acc = product_obj.property_stock_account_output and product_obj.property_stock_account_output.id or False
if not stock_output_acc:
stock_output_acc = product_obj.categ_id.property_stock_account_output_categ and product_obj.categ_id.property_stock_account_output_categ.id or False
journal_id = product_obj.categ_id.property_stock_journal and product_obj.categ_id.property_stock_journal.id or False
account_valuation = product_obj.categ_id.property_stock_valuation_account_id and product_obj.categ_id.property_stock_valuation_account_id.id or False
if not all([stock_input_acc, stock_output_acc, account_valuation, journal_id]):
raise osv.except_osv(_('Error!'), _('''One of the following information is missing on the product or product category and prevents the accounting valuation entries to be created:
Product: %s
Stock Input Account: %s
Stock Output Account: %s
Stock Valuation Account: %s
Stock Journal: %s
''') % (product_obj.name, stock_input_acc, stock_output_acc, account_valuation, journal_id))
return {
'stock_account_input': stock_input_acc,
'stock_account_output': stock_output_acc,
'stock_journal': journal_id,
'property_stock_valuation_account_id': account_valuation
}
def do_change_standard_price(self, cr, uid, ids, new_price, context=None):
""" Changes the Standard Price of Product and creates an account move accordingly."""
location_obj = self.pool.get('stock.location')
move_obj = self.pool.get('account.move')
move_line_obj = self.pool.get('account.move.line')
if context is None:
context = {}
user_company_id = self.pool.get('res.users').browse(cr, uid, uid, context=context).company_id.id
loc_ids = location_obj.search(cr, uid, [('usage', '=', 'internal'), ('company_id', '=', user_company_id)])
for rec_id in ids:
datas = self.get_product_accounts(cr, uid, rec_id, context=context)
for location in location_obj.browse(cr, uid, loc_ids, context=context):
c = context.copy()
c.update({'location': location.id, 'compute_child': False})
product = self.browse(cr, uid, rec_id, context=c)
diff = product.standard_price - new_price
if not diff:
raise osv.except_osv(_('Error!'), _("No difference between standard price and new price!"))
for prod_variant in product.product_variant_ids:
qty = prod_variant.qty_available
if qty:
# Accounting Entries
move_vals = {
'journal_id': datas['stock_journal'],
'company_id': location.company_id.id,
}
move_id = move_obj.create(cr, uid, move_vals, context=context)
if diff*qty > 0:
amount_diff = qty * diff
debit_account_id = datas['stock_account_input']
credit_account_id = datas['property_stock_valuation_account_id']
else:
amount_diff = qty * -diff
debit_account_id = datas['property_stock_valuation_account_id']
credit_account_id = datas['stock_account_output']
move_line_obj.create(cr, uid, {
'name': _('Standard Price changed'),
'account_id': debit_account_id,
'debit': amount_diff,
'credit': 0,
'move_id': move_id,
}, context=context)
move_line_obj.create(cr, uid, {
'name': _('Standard Price changed'),
'account_id': credit_account_id,
'debit': 0,
'credit': amount_diff,
'move_id': move_id
}, context=context)
self.write(cr, uid, rec_id, {'standard_price': new_price})
return True
class product_product(osv.osv):
_inherit = 'product.product'
def onchange_type(self, cr, uid, ids, type):
res = super(product_product, self).onchange_type(cr, uid, ids, type)
if type in ('consu', 'service'):
res = {'value': {'valuation': 'manual_periodic'}}
return res
class product_category(osv.osv):
_inherit = 'product.category'
_columns = {
'property_stock_journal': fields.property(
relation='account.journal',
type='many2one',
string='Stock Journal',
help="When doing real-time inventory valuation, this is the Accounting Journal in which entries will be automatically posted when stock moves are processed."),
'property_stock_account_input_categ': fields.property(
type='many2one',
relation='account.account',
string='Stock Input Account',
help="When doing real-time inventory valuation, counterpart journal items for all incoming stock moves will be posted in this account, unless "
"there is a specific valuation account set on the source location. This is the default value for all products in this category. It "
"can also directly be set on each product"),
'property_stock_account_output_categ': fields.property(
type='many2one',
relation='account.account',
string='Stock Output Account',
help="When doing real-time inventory valuation, counterpart journal items for all outgoing stock moves will be posted in this account, unless "
"there is a specific valuation account set on the destination location. This is the default value for all products in this category. It "
"can also directly be set on each product"),
'property_stock_valuation_account_id': fields.property(
type='many2one',
relation='account.account',
string="Stock Valuation Account",
help="When real-time inventory valuation is enabled on a product, this account will hold the current value of the products.",),
}
| agpl-3.0 |
VoiDeD/Sick-Beard | cherrypy/_cpthreadinglocal.py | 36 | 6855 | # This is a backport of Python-2.4's threading.local() implementation
"""Thread-local objects
(Note that this module provides a Python version of thread
threading.local class. Depending on the version of Python you're
using, there may be a faster one available. You should always import
the local class from threading.)
Thread-local objects support the management of thread-local data.
If you have data that you want to be local to a thread, simply create
a thread-local object and use its attributes:
>>> mydata = local()
>>> mydata.number = 42
>>> mydata.number
42
You can also access the local-object's dictionary:
>>> mydata.__dict__
{'number': 42}
>>> mydata.__dict__.setdefault('widgets', [])
[]
>>> mydata.widgets
[]
What's important about thread-local objects is that their data are
local to a thread. If we access the data in a different thread:
>>> log = []
>>> def f():
... items = mydata.__dict__.items()
... items.sort()
... log.append(items)
... mydata.number = 11
... log.append(mydata.number)
>>> import threading
>>> thread = threading.Thread(target=f)
>>> thread.start()
>>> thread.join()
>>> log
[[], 11]
we get different data. Furthermore, changes made in the other thread
don't affect data seen in this thread:
>>> mydata.number
42
Of course, values you get from a local object, including a __dict__
attribute, are for whatever thread was current at the time the
attribute was read. For that reason, you generally don't want to save
these values across threads, as they apply only to the thread they
came from.
You can create custom local objects by subclassing the local class:
>>> class MyLocal(local):
... number = 2
... initialized = False
... def __init__(self, **kw):
... if self.initialized:
... raise SystemError('__init__ called too many times')
... self.initialized = True
... self.__dict__.update(kw)
... def squared(self):
... return self.number ** 2
This can be useful to support default values, methods and
initialization. Note that if you define an __init__ method, it will be
called each time the local object is used in a separate thread. This
is necessary to initialize each thread's dictionary.
Now if we create a local object:
>>> mydata = MyLocal(color='red')
Now we have a default number:
>>> mydata.number
2
an initial color:
>>> mydata.color
'red'
>>> del mydata.color
And a method that operates on the data:
>>> mydata.squared()
4
As before, we can access the data in a separate thread:
>>> log = []
>>> thread = threading.Thread(target=f)
>>> thread.start()
>>> thread.join()
>>> log
[[('color', 'red'), ('initialized', True)], 11]
without affecting this thread's data:
>>> mydata.number
2
>>> mydata.color
Traceback (most recent call last):
...
AttributeError: 'MyLocal' object has no attribute 'color'
Note that subclasses can define slots, but they are not thread
local. They are shared across threads:
>>> class MyLocal(local):
... __slots__ = 'number'
>>> mydata = MyLocal()
>>> mydata.number = 42
>>> mydata.color = 'red'
So, the separate thread:
>>> thread = threading.Thread(target=f)
>>> thread.start()
>>> thread.join()
affects what we see:
>>> mydata.number
11
>>> del mydata
"""
# Threading import is at end
class _localbase(object):
__slots__ = '_local__key', '_local__args', '_local__lock'
def __new__(cls, *args, **kw):
self = object.__new__(cls)
key = 'thread.local.' + str(id(self))
object.__setattr__(self, '_local__key', key)
object.__setattr__(self, '_local__args', (args, kw))
object.__setattr__(self, '_local__lock', RLock())
if args or kw and (cls.__init__ is object.__init__):
raise TypeError("Initialization arguments are not supported")
# We need to create the thread dict in anticipation of
# __init__ being called, to make sure we don't call it
# again ourselves.
dict = object.__getattribute__(self, '__dict__')
currentThread().__dict__[key] = dict
return self
def _patch(self):
key = object.__getattribute__(self, '_local__key')
d = currentThread().__dict__.get(key)
if d is None:
d = {}
currentThread().__dict__[key] = d
object.__setattr__(self, '__dict__', d)
# we have a new instance dict, so call out __init__ if we have
# one
cls = type(self)
if cls.__init__ is not object.__init__:
args, kw = object.__getattribute__(self, '_local__args')
cls.__init__(self, *args, **kw)
else:
object.__setattr__(self, '__dict__', d)
class local(_localbase):
def __getattribute__(self, name):
lock = object.__getattribute__(self, '_local__lock')
lock.acquire()
try:
_patch(self)
return object.__getattribute__(self, name)
finally:
lock.release()
def __setattr__(self, name, value):
lock = object.__getattribute__(self, '_local__lock')
lock.acquire()
try:
_patch(self)
return object.__setattr__(self, name, value)
finally:
lock.release()
def __delattr__(self, name):
lock = object.__getattribute__(self, '_local__lock')
lock.acquire()
try:
_patch(self)
return object.__delattr__(self, name)
finally:
lock.release()
def __del__():
threading_enumerate = enumerate
__getattribute__ = object.__getattribute__
def __del__(self):
key = __getattribute__(self, '_local__key')
try:
threads = list(threading_enumerate())
except:
# if enumerate fails, as it seems to do during
# shutdown, we'll skip cleanup under the assumption
# that there is nothing to clean up
return
for thread in threads:
try:
__dict__ = thread.__dict__
except AttributeError:
# Thread is dying, rest in peace
continue
if key in __dict__:
try:
del __dict__[key]
except KeyError:
pass # didn't have anything in this thread
return __del__
__del__ = __del__()
from threading import currentThread, enumerate, RLock
| gpl-3.0 |
Haifen/android_kernel_google_msm | tools/perf/scripts/python/net_dropmonitor.py | 4235 | 1554 | # Monitor the system for dropped packets and proudce a report of drop locations and counts
import os
import sys
sys.path.append(os.environ['PERF_EXEC_PATH'] + \
'/scripts/python/Perf-Trace-Util/lib/Perf/Trace')
from perf_trace_context import *
from Core import *
from Util import *
drop_log = {}
kallsyms = []
def get_kallsyms_table():
global kallsyms
try:
f = open("/proc/kallsyms", "r")
linecount = 0
for line in f:
linecount = linecount+1
f.seek(0)
except:
return
j = 0
for line in f:
loc = int(line.split()[0], 16)
name = line.split()[2]
j = j +1
if ((j % 100) == 0):
print "\r" + str(j) + "/" + str(linecount),
kallsyms.append({ 'loc': loc, 'name' : name})
print "\r" + str(j) + "/" + str(linecount)
kallsyms.sort()
return
def get_sym(sloc):
loc = int(sloc)
for i in kallsyms:
if (i['loc'] >= loc):
return (i['name'], i['loc']-loc)
return (None, 0)
def print_drop_table():
print "%25s %25s %25s" % ("LOCATION", "OFFSET", "COUNT")
for i in drop_log.keys():
(sym, off) = get_sym(i)
if sym == None:
sym = i
print "%25s %25s %25s" % (sym, off, drop_log[i])
def trace_begin():
print "Starting trace (Ctrl-C to dump results)"
def trace_end():
print "Gathering kallsyms data"
get_kallsyms_table()
print_drop_table()
# called from perf, when it finds a correspoinding event
def skb__kfree_skb(name, context, cpu, sec, nsec, pid, comm,
skbaddr, protocol, location):
slocation = str(location)
try:
drop_log[slocation] = drop_log[slocation] + 1
except:
drop_log[slocation] = 1
| gpl-2.0 |
hopkinsth/s3cmd | S3/ExitCodes.py | 8 | 1413 | # -*- coding: utf-8 -*-
# patterned on /usr/include/sysexits.h
EX_OK = 0
EX_GENERAL = 1
EX_PARTIAL = 2 # some parts of the command succeeded, while others failed
EX_SERVERMOVED = 10 # 301: Moved permanantly & 307: Moved temp
EX_SERVERERROR = 11 # 400, 405, 411, 416, 501: Bad request, 504: Gateway Time-out
EX_NOTFOUND = 12 # 404: Not found
EX_CONFLICT = 13 # 409: Conflict (ex: bucket error)
EX_PRECONDITION = 14 # 412: Precondition failed
EX_SERVICE = 15 # 503: Service not available or slow down
EX_USAGE = 64 # The command was used incorrectly (e.g. bad command line syntax)
EX_DATAERR = 65 # Failed file transfer, upload or download
EX_SOFTWARE = 70 # internal software error (e.g. S3 error of unknown specificity)
EX_OSERR = 71 # system error (e.g. out of memory)
EX_OSFILE = 72 # OS error (e.g. invalid Python version)
EX_IOERR = 74 # An error occurred while doing I/O on some file.
EX_TEMPFAIL = 75 # temporary failure (S3DownloadError or similar, retry later)
EX_ACCESSDENIED = 77 # Insufficient permissions to perform the operation on S3
EX_CONFIG = 78 # Configuration file error
_EX_SIGNAL = 128
_EX_SIGINT = 2
EX_BREAK = _EX_SIGNAL + _EX_SIGINT # Control-C (KeyboardInterrupt raised)
| gpl-2.0 |
otherness-space/myProject003 | my_project_003/lib/python2.7/site-packages/django/db/models/fields/subclassing.py | 227 | 1815 | """
Convenience routines for creating non-trivial Field subclasses, as well as
backwards compatibility utilities.
Add SubfieldBase as the metaclass for your Field subclass, implement
to_python() and the other necessary methods and everything will work
seamlessly.
"""
class SubfieldBase(type):
"""
A metaclass for custom Field subclasses. This ensures the model's attribute
has the descriptor protocol attached to it.
"""
def __new__(cls, name, bases, attrs):
new_class = super(SubfieldBase, cls).__new__(cls, name, bases, attrs)
new_class.contribute_to_class = make_contrib(
new_class, attrs.get('contribute_to_class')
)
return new_class
class Creator(object):
"""
A placeholder class that provides a way to set the attribute on the model.
"""
def __init__(self, field):
self.field = field
def __get__(self, obj, type=None):
if obj is None:
raise AttributeError('Can only be accessed via an instance.')
return obj.__dict__[self.field.name]
def __set__(self, obj, value):
obj.__dict__[self.field.name] = self.field.to_python(value)
def make_contrib(superclass, func=None):
"""
Returns a suitable contribute_to_class() method for the Field subclass.
If 'func' is passed in, it is the existing contribute_to_class() method on
the subclass and it is called before anything else. It is assumed in this
case that the existing contribute_to_class() calls all the necessary
superclass methods.
"""
def contribute_to_class(self, cls, name):
if func:
func(self, cls, name)
else:
super(superclass, self).contribute_to_class(cls, name)
setattr(cls, self.name, Creator(self))
return contribute_to_class
| mit |
BehavioralInsightsTeam/edx-platform | lms/djangoapps/courseware/views/views.py | 2 | 70618 | """
Courseware views functions
"""
import json
import logging
import urllib
from collections import OrderedDict, namedtuple
from datetime import datetime
import analytics
from django.conf import settings
from django.contrib.auth.decorators import login_required
from django.contrib.auth.models import AnonymousUser, User
from django.core.exceptions import PermissionDenied
from django.urls import reverse
from django.db import transaction
from django.db.models import Q
from django.http import Http404, HttpResponse, HttpResponseBadRequest, HttpResponseForbidden
from django.shortcuts import redirect
from django.template.context_processors import csrf
from django.utils.decorators import method_decorator
from django.utils.http import urlquote_plus
from django.utils.text import slugify
from django.utils.translation import ugettext as _
from django.views.decorators.cache import cache_control
from django.views.decorators.csrf import ensure_csrf_cookie
from django.views.decorators.http import require_GET, require_http_methods, require_POST
from django.views.generic import View
from eventtracking import tracker
from ipware.ip import get_ip
from markupsafe import escape
from opaque_keys import InvalidKeyError
from opaque_keys.edx.keys import CourseKey, UsageKey
from pytz import UTC
from rest_framework import status
from six import text_type
from web_fragments.fragment import Fragment
import shoppingcart
import survey.views
from lms.djangoapps.certificates import api as certs_api
from lms.djangoapps.certificates.models import CertificateStatuses
from course_modes.models import CourseMode, get_course_prices
from courseware.access import has_access, has_ccx_coach_role
from courseware.access_utils import check_course_open_for_learner
from courseware.courses import (
can_self_enroll_in_course,
course_open_for_self_enrollment,
get_course,
get_course_overview_with_access,
get_course_with_access,
get_courses,
get_current_child,
get_permission_for_course_about,
get_studio_url,
sort_by_announcement,
sort_by_start_date
)
from courseware.masquerade import setup_masquerade
from courseware.model_data import FieldDataCache
from courseware.models import BaseStudentModuleHistory, StudentModule
from courseware.url_helpers import get_redirect_url
from courseware.user_state_client import DjangoXBlockUserStateClient
from edxmako.shortcuts import marketing_link, render_to_response, render_to_string
from enrollment.api import add_enrollment
from lms.djangoapps.ccx.custom_exception import CCXLocatorValidationException
from lms.djangoapps.commerce.utils import EcommerceService
from lms.djangoapps.courseware.exceptions import CourseAccessRedirect, Redirect
from lms.djangoapps.experiments.utils import get_experiment_user_metadata_context
from lms.djangoapps.grades.course_grade_factory import CourseGradeFactory
from lms.djangoapps.instructor.enrollment import uses_shib
from lms.djangoapps.instructor.views.api import require_global_staff
from lms.djangoapps.verify_student.services import IDVerificationService
from openedx.core.djangoapps.catalog.utils import get_programs, get_programs_with_type
from openedx.core.djangoapps.certificates import api as auto_certs_api
from openedx.core.djangoapps.content.course_overviews.models import CourseOverview
from openedx.core.djangoapps.credit.api import (
get_credit_requirement_status,
is_credit_course,
is_user_eligible_for_credit
)
from openedx.core.djangoapps.models.course_details import CourseDetails
from openedx.core.djangoapps.monitoring_utils import set_custom_metrics_for_course_key
from openedx.core.djangoapps.plugin_api.views import EdxFragmentView
from openedx.core.djangoapps.programs.utils import ProgramMarketingDataExtender
from openedx.core.djangoapps.self_paced.models import SelfPacedConfiguration
from openedx.core.djangoapps.site_configuration import helpers as configuration_helpers
from openedx.core.djangoapps.util.user_messages import PageLevelMessages
from openedx.core.djangolib.markup import HTML, Text
from openedx.features.course_experience import UNIFIED_COURSE_TAB_FLAG, course_home_url_name
from openedx.features.course_experience.course_tools import CourseToolsPluginManager
from openedx.features.course_experience.views.course_dates import CourseDatesFragmentView
from openedx.features.course_experience.waffle import waffle as course_experience_waffle
from openedx.features.course_experience.waffle import ENABLE_COURSE_ABOUT_SIDEBAR_HTML
from openedx.features.enterprise_support.api import data_sharing_consent_required
from shoppingcart.utils import is_shopping_cart_enabled
from student.models import CourseEnrollment, UserTestGroup
from util.cache import cache, cache_if_anonymous
from util.db import outer_atomic
from util.milestones_helpers import get_prerequisite_courses_display
from util.views import _record_feedback_in_zendesk, ensure_valid_course_key, ensure_valid_usage_key
from xmodule.modulestore.django import modulestore
from xmodule.modulestore.exceptions import ItemNotFoundError, NoPathToItem
from xmodule.tabs import CourseTabList
from xmodule.x_module import STUDENT_VIEW
from ..entrance_exams import user_can_skip_entrance_exam
from ..module_render import get_module, get_module_by_usage_id, get_module_for_descriptor
log = logging.getLogger("edx.courseware")
# Only display the requirements on learner dashboard for
# credit and verified modes.
REQUIREMENTS_DISPLAY_MODES = CourseMode.CREDIT_MODES + [CourseMode.VERIFIED]
CertData = namedtuple(
"CertData", ["cert_status", "title", "msg", "download_url", "cert_web_view_url"]
)
AUDIT_PASSING_CERT_DATA = CertData(
CertificateStatuses.audit_passing,
_('Your enrollment: Audit track'),
_('You are enrolled in the audit track for this course. The audit track does not include a certificate.'),
download_url=None,
cert_web_view_url=None
)
HONOR_PASSING_CERT_DATA = CertData(
CertificateStatuses.honor_passing,
_('Your enrollment: Honor track'),
_('You are enrolled in the honor track for this course. The honor track does not include a certificate.'),
download_url=None,
cert_web_view_url=None
)
GENERATING_CERT_DATA = CertData(
CertificateStatuses.generating,
_("We're working on it..."),
_(
"We're creating your certificate. You can keep working in your courses and a link "
"to it will appear here and on your Dashboard when it is ready."
),
download_url=None,
cert_web_view_url=None
)
INVALID_CERT_DATA = CertData(
CertificateStatuses.invalidated,
_('Your certificate has been invalidated'),
_('Please contact your course team if you have any questions.'),
download_url=None,
cert_web_view_url=None
)
REQUESTING_CERT_DATA = CertData(
CertificateStatuses.requesting,
_('Congratulations, you qualified for a certificate!'),
_("You've earned a certificate for this course."),
download_url=None,
cert_web_view_url=None
)
UNVERIFIED_CERT_DATA = CertData(
CertificateStatuses.unverified,
_('Certificate unavailable'),
_(
'You have not received a certificate because you do not have a current {platform_name} '
'verified identity.'
).format(platform_name=configuration_helpers.get_value('PLATFORM_NAME', settings.PLATFORM_NAME)),
download_url=None,
cert_web_view_url=None
)
def _downloadable_cert_data(download_url=None, cert_web_view_url=None):
return CertData(
CertificateStatuses.downloadable,
_('Your certificate is available'),
_("You've earned a certificate for this course."),
download_url=download_url,
cert_web_view_url=cert_web_view_url
)
def user_groups(user):
"""
TODO (vshnayder): This is not used. When we have a new plan for groups, adjust appropriately.
"""
if not user.is_authenticated:
return []
# TODO: Rewrite in Django
key = 'user_group_names_{user.id}'.format(user=user)
cache_expiration = 60 * 60 # one hour
# Kill caching on dev machines -- we switch groups a lot
group_names = cache.get(key) # pylint: disable=no-member
if settings.DEBUG:
group_names = None
if group_names is None:
group_names = [u.name for u in UserTestGroup.objects.filter(users=user)]
cache.set(key, group_names, cache_expiration) # pylint: disable=no-member
return group_names
@ensure_csrf_cookie
@cache_if_anonymous()
def courses(request):
"""
Render "find courses" page. The course selection work is done in courseware.courses.
"""
courses_list = []
course_discovery_meanings = getattr(settings, 'COURSE_DISCOVERY_MEANINGS', {})
if not settings.FEATURES.get('ENABLE_COURSE_DISCOVERY'):
courses_list = get_courses(request.user)
if configuration_helpers.get_value("ENABLE_COURSE_SORTING_BY_START_DATE",
settings.FEATURES["ENABLE_COURSE_SORTING_BY_START_DATE"]):
courses_list = sort_by_start_date(courses_list)
else:
courses_list = sort_by_announcement(courses_list)
# Add marketable programs to the context.
programs_list = get_programs_with_type(request.site, include_hidden=False)
return render_to_response(
"courseware/courses.html",
{
'courses': courses_list,
'course_discovery_meanings': course_discovery_meanings,
'programs_list': programs_list
}
)
@ensure_csrf_cookie
@ensure_valid_course_key
def jump_to_id(request, course_id, module_id):
"""
This entry point allows for a shorter version of a jump to where just the id of the element is
passed in. This assumes that id is unique within the course_id namespace
"""
course_key = CourseKey.from_string(course_id)
items = modulestore().get_items(course_key, qualifiers={'name': module_id})
if len(items) == 0:
raise Http404(
u"Could not find id: {0} in course_id: {1}. Referer: {2}".format(
module_id, course_id, request.META.get("HTTP_REFERER", "")
))
if len(items) > 1:
log.warning(
u"Multiple items found with id: %s in course_id: %s. Referer: %s. Using first: %s",
module_id,
course_id,
request.META.get("HTTP_REFERER", ""),
text_type(items[0].location)
)
return jump_to(request, course_id, text_type(items[0].location))
@ensure_csrf_cookie
def jump_to(_request, course_id, location):
"""
Show the page that contains a specific location.
If the location is invalid or not in any class, return a 404.
Otherwise, delegates to the index view to figure out whether this user
has access, and what they should see.
"""
try:
course_key = CourseKey.from_string(course_id)
usage_key = UsageKey.from_string(location).replace(course_key=course_key)
except InvalidKeyError:
raise Http404(u"Invalid course_key or usage_key")
try:
redirect_url = get_redirect_url(course_key, usage_key)
except ItemNotFoundError:
raise Http404(u"No data at this location: {0}".format(usage_key))
except NoPathToItem:
raise Http404(u"This location is not in any class: {0}".format(usage_key))
return redirect(redirect_url)
@ensure_csrf_cookie
@ensure_valid_course_key
@data_sharing_consent_required
def course_info(request, course_id):
"""
Display the course's info.html, or 404 if there is no such course.
Assumes the course_id is in a valid format.
"""
# TODO: LEARNER-611: This can be deleted with Course Info removal. The new
# Course Home is using its own processing of last accessed.
def get_last_accessed_courseware(course, request, user):
"""
Returns the courseware module URL that the user last accessed, or None if it cannot be found.
"""
field_data_cache = FieldDataCache.cache_for_descriptor_descendents(
course.id, request.user, course, depth=2
)
course_module = get_module_for_descriptor(
user, request, course, field_data_cache, course.id, course=course
)
chapter_module = get_current_child(course_module)
if chapter_module is not None:
section_module = get_current_child(chapter_module)
if section_module is not None:
url = reverse('courseware_section', kwargs={
'course_id': text_type(course.id),
'chapter': chapter_module.url_name,
'section': section_module.url_name
})
return url
return None
course_key = CourseKey.from_string(course_id)
# If the unified course experience is enabled, redirect to the "Course" tab
if UNIFIED_COURSE_TAB_FLAG.is_enabled(course_key):
return redirect(reverse(course_home_url_name(course_key), args=[course_id]))
with modulestore().bulk_operations(course_key):
course = get_course_with_access(request.user, 'load', course_key)
staff_access = has_access(request.user, 'staff', course)
masquerade, user = setup_masquerade(request, course_key, staff_access, reset_masquerade_data=True)
# LEARNER-612: CCX redirect handled by new Course Home (DONE)
# LEARNER-1697: Transition banner messages to new Course Home (DONE)
# if user is not enrolled in a course then app will show enroll/get register link inside course info page.
user_is_enrolled = CourseEnrollment.is_enrolled(user, course.id)
show_enroll_banner = request.user.is_authenticated and not user_is_enrolled
# If the user is not enrolled but this is a course that does not support
# direct enrollment then redirect them to the dashboard.
if not user_is_enrolled and not can_self_enroll_in_course(course_key):
return redirect(reverse('dashboard'))
# LEARNER-170: Entrance exam is handled by new Course Outline. (DONE)
# If the user needs to take an entrance exam to access this course, then we'll need
# to send them to that specific course module before allowing them into other areas
if not user_can_skip_entrance_exam(user, course):
return redirect(reverse('courseware', args=[text_type(course.id)]))
# TODO: LEARNER-611: Remove deprecated course.bypass_home.
# If the user is coming from the dashboard and bypass_home setting is set,
# redirect them straight to the courseware page.
is_from_dashboard = reverse('dashboard') in request.META.get('HTTP_REFERER', [])
if course.bypass_home and is_from_dashboard:
return redirect(reverse('courseware', args=[course_id]))
# Construct the dates fragment
dates_fragment = None
if request.user.is_authenticated:
# TODO: LEARNER-611: Remove enable_course_home_improvements
if SelfPacedConfiguration.current().enable_course_home_improvements:
# Shared code with the new Course Home (DONE)
dates_fragment = CourseDatesFragmentView().render_to_fragment(request, course_id=course_id)
# This local import is due to the circularity of lms and openedx references.
# This may be resolved by using stevedore to allow web fragments to be used
# as plugins, and to avoid the direct import.
from openedx.features.course_experience.views.course_reviews import CourseReviewsModuleFragmentView
# Shared code with the new Course Home (DONE)
# Get the course tools enabled for this user and course
course_tools = CourseToolsPluginManager.get_enabled_course_tools(request, course_key)
course_homepage_invert_title =\
configuration_helpers.get_value(
'COURSE_HOMEPAGE_INVERT_TITLE',
False
)
course_homepage_show_subtitle =\
configuration_helpers.get_value(
'COURSE_HOMEPAGE_SHOW_SUBTITLE',
True
)
course_homepage_show_org =\
configuration_helpers.get_value('COURSE_HOMEPAGE_SHOW_ORG', True)
course_title = course.display_number_with_default
course_subtitle = course.display_name_with_default
if course_homepage_invert_title:
course_title = course.display_name_with_default
course_subtitle = course.display_number_with_default
context = {
'request': request,
'masquerade_user': user,
'course_id': text_type(course_key),
'url_to_enroll': CourseTabView.url_to_enroll(course_key),
'cache': None,
'course': course,
'course_title': course_title,
'course_subtitle': course_subtitle,
'show_subtitle': course_homepage_show_subtitle,
'show_org': course_homepage_show_org,
'staff_access': staff_access,
'masquerade': masquerade,
'supports_preview_menu': True,
'studio_url': get_studio_url(course, 'course_info'),
'show_enroll_banner': show_enroll_banner,
'user_is_enrolled': user_is_enrolled,
'dates_fragment': dates_fragment,
'course_tools': course_tools,
}
context.update(
get_experiment_user_metadata_context(
course,
user,
)
)
# Get the URL of the user's last position in order to display the 'where you were last' message
context['resume_course_url'] = None
# TODO: LEARNER-611: Remove enable_course_home_improvements
if SelfPacedConfiguration.current().enable_course_home_improvements:
context['resume_course_url'] = get_last_accessed_courseware(course, request, user)
if not check_course_open_for_learner(user, course):
# Disable student view button if user is staff and
# course is not yet visible to students.
context['disable_student_access'] = True
context['supports_preview_menu'] = False
return render_to_response('courseware/info.html', context)
class StaticCourseTabView(EdxFragmentView):
"""
View that displays a static course tab with a given name.
"""
@method_decorator(ensure_csrf_cookie)
@method_decorator(ensure_valid_course_key)
def get(self, request, course_id, tab_slug, **kwargs):
"""
Displays a static course tab page with a given name
"""
course_key = CourseKey.from_string(course_id)
course = get_course_with_access(request.user, 'load', course_key)
tab = CourseTabList.get_tab_by_slug(course.tabs, tab_slug)
if tab is None:
raise Http404
# Show warnings if the user has limited access
CourseTabView.register_user_access_warning_messages(request, course_key)
return super(StaticCourseTabView, self).get(request, course=course, tab=tab, **kwargs)
def render_to_fragment(self, request, course=None, tab=None, **kwargs):
"""
Renders the static tab to a fragment.
"""
return get_static_tab_fragment(request, course, tab)
def render_standalone_response(self, request, fragment, course=None, tab=None, **kwargs):
"""
Renders this static tab's fragment to HTML for a standalone page.
"""
return render_to_response('courseware/static_tab.html', {
'course': course,
'active_page': 'static_tab_{0}'.format(tab['url_slug']),
'tab': tab,
'fragment': fragment,
'uses_pattern_library': False,
'disable_courseware_js': True,
})
class CourseTabView(EdxFragmentView):
"""
View that displays a course tab page.
"""
@method_decorator(ensure_csrf_cookie)
@method_decorator(ensure_valid_course_key)
@method_decorator(data_sharing_consent_required)
def get(self, request, course_id, tab_type, **kwargs):
"""
Displays a course tab page that contains a web fragment.
"""
course_key = CourseKey.from_string(course_id)
with modulestore().bulk_operations(course_key):
course = get_course_with_access(request.user, 'load', course_key)
try:
# Render the page
tab = CourseTabList.get_tab_by_type(course.tabs, tab_type)
page_context = self.create_page_context(request, course=course, tab=tab, **kwargs)
# Show warnings if the user has limited access
# Must come after masquerading on creation of page context
self.register_user_access_warning_messages(request, course_key)
set_custom_metrics_for_course_key(course_key)
return super(CourseTabView, self).get(request, course=course, page_context=page_context, **kwargs)
except Exception as exception: # pylint: disable=broad-except
return CourseTabView.handle_exceptions(request, course, exception)
@staticmethod
def url_to_enroll(course_key):
"""
Returns the URL to use to enroll in the specified course.
"""
url_to_enroll = reverse('about_course', args=[text_type(course_key)])
if settings.FEATURES.get('ENABLE_MKTG_SITE'):
url_to_enroll = marketing_link('COURSES')
return url_to_enroll
@staticmethod
def register_user_access_warning_messages(request, course_key):
"""
Register messages to be shown to the user if they have limited access.
"""
if request.user.is_anonymous:
PageLevelMessages.register_warning_message(
request,
Text(_("To see course content, {sign_in_link} or {register_link}.")).format(
sign_in_link=HTML('<a href="/login?next={current_url}">{sign_in_label}</a>').format(
sign_in_label=_("sign in"),
current_url=urlquote_plus(request.path),
),
register_link=HTML('<a href="/register?next={current_url}">{register_label}</a>').format(
register_label=_("register"),
current_url=urlquote_plus(request.path),
),
)
)
else:
if not CourseEnrollment.is_enrolled(request.user, course_key):
# Only show enroll button if course is open for enrollment.
if course_open_for_self_enrollment(course_key):
enroll_message = _('You must be enrolled in the course to see course content. \
{enroll_link_start}Enroll now{enroll_link_end}.')
PageLevelMessages.register_warning_message(
request,
Text(enroll_message).format(
enroll_link_start=HTML('<button class="enroll-btn btn-link">'),
enroll_link_end=HTML('</button>')
)
)
else:
PageLevelMessages.register_warning_message(
request,
Text(_('You must be enrolled in the course to see course content.'))
)
@staticmethod
def handle_exceptions(request, course, exception):
"""
Handle exceptions raised when rendering a view.
"""
if isinstance(exception, Redirect) or isinstance(exception, Http404):
raise
if isinstance(exception, UnicodeEncodeError):
raise Http404("URL contains Unicode characters")
if settings.DEBUG:
raise
user = request.user
log.exception(
u"Error in %s: user=%s, effective_user=%s, course=%s",
request.path,
getattr(user, 'real_user', user),
user,
text_type(course.id),
)
try:
return render_to_response(
'courseware/courseware-error.html',
{
'staff_access': has_access(user, 'staff', course),
'course': course,
},
status=500,
)
except:
# Let the exception propagate, relying on global config to
# at least return a nice error message
log.exception("Error while rendering courseware-error page")
raise
def uses_bootstrap(self, request, course, tab):
"""
Returns true if this view uses Bootstrap.
"""
return tab.uses_bootstrap
def create_page_context(self, request, course=None, tab=None, **kwargs):
"""
Creates the context for the fragment's template.
"""
staff_access = has_access(request.user, 'staff', course)
supports_preview_menu = tab.get('supports_preview_menu', False)
uses_bootstrap = self.uses_bootstrap(request, course, tab=tab)
if supports_preview_menu:
masquerade, masquerade_user = setup_masquerade(request, course.id, staff_access, reset_masquerade_data=True)
request.user = masquerade_user
else:
masquerade = None
if course and not check_course_open_for_learner(request.user, course):
# Disable student view button if user is staff and
# course is not yet visible to students.
supports_preview_menu = False
context = {
'course': course,
'tab': tab,
'active_page': tab.get('type', None),
'staff_access': staff_access,
'masquerade': masquerade,
'supports_preview_menu': supports_preview_menu,
'uses_bootstrap': uses_bootstrap,
'uses_pattern_library': not uses_bootstrap,
'disable_courseware_js': True,
}
context.update(
get_experiment_user_metadata_context(
course,
request.user,
)
)
return context
def render_to_fragment(self, request, course=None, page_context=None, **kwargs):
"""
Renders the course tab to a fragment.
"""
tab = page_context['tab']
return tab.render_to_fragment(request, course, **kwargs)
def render_standalone_response(self, request, fragment, course=None, tab=None, page_context=None, **kwargs):
"""
Renders this course tab's fragment to HTML for a standalone page.
"""
if not page_context:
page_context = self.create_page_context(request, course=course, tab=tab, **kwargs)
tab = page_context['tab']
page_context['fragment'] = fragment
if self.uses_bootstrap(request, course, tab=tab):
return render_to_response('courseware/tab-view.html', page_context)
else:
return render_to_response('courseware/tab-view-v2.html', page_context)
@ensure_csrf_cookie
@ensure_valid_course_key
def syllabus(request, course_id):
"""
Display the course's syllabus.html, or 404 if there is no such course.
Assumes the course_id is in a valid format.
"""
course_key = CourseKey.from_string(course_id)
course = get_course_with_access(request.user, 'load', course_key)
staff_access = bool(has_access(request.user, 'staff', course))
return render_to_response('courseware/syllabus.html', {
'course': course,
'staff_access': staff_access,
})
def registered_for_course(course, user):
"""
Return True if user is registered for course, else False
"""
if user is None:
return False
if user.is_authenticated:
return CourseEnrollment.is_enrolled(user, course.id)
else:
return False
class EnrollStaffView(View):
"""
Displays view for registering in the course to a global staff user.
User can either choose to 'Enroll' or 'Don't Enroll' in the course.
Enroll: Enrolls user in course and redirects to the courseware.
Don't Enroll: Redirects user to course about page.
Arguments:
- request : HTTP request
- course_id : course id
Returns:
- RedirectResponse
"""
template_name = 'enroll_staff.html'
@method_decorator(require_global_staff)
@method_decorator(ensure_valid_course_key)
def get(self, request, course_id):
"""
Display enroll staff view to global staff user with `Enroll` and `Don't Enroll` options.
"""
user = request.user
course_key = CourseKey.from_string(course_id)
with modulestore().bulk_operations(course_key):
course = get_course_with_access(user, 'load', course_key)
if not registered_for_course(course, user):
context = {
'course': course,
'csrftoken': csrf(request)["csrf_token"]
}
return render_to_response(self.template_name, context)
@method_decorator(require_global_staff)
@method_decorator(ensure_valid_course_key)
def post(self, request, course_id):
"""
Either enrolls the user in course or redirects user to course about page
depending upon the option (Enroll, Don't Enroll) chosen by the user.
"""
_next = urllib.quote_plus(request.GET.get('next', 'info'), safe='/:?=')
course_key = CourseKey.from_string(course_id)
enroll = 'enroll' in request.POST
if enroll:
add_enrollment(request.user.username, course_id)
log.info(
u"User %s enrolled in %s via `enroll_staff` view",
request.user.username,
course_id
)
return redirect(_next)
# In any other case redirect to the course about page.
return redirect(reverse('about_course', args=[text_type(course_key)]))
@ensure_csrf_cookie
@ensure_valid_course_key
@cache_if_anonymous()
def course_about(request, course_id):
"""
Display the course's about page.
"""
course_key = CourseKey.from_string(course_id)
# If a user is not able to enroll in a course then redirect
# them away from the about page to the dashboard.
if not can_self_enroll_in_course(course_key):
return redirect(reverse('dashboard'))
with modulestore().bulk_operations(course_key):
permission = get_permission_for_course_about()
course = get_course_with_access(request.user, permission, course_key)
course_details = CourseDetails.populate(course)
modes = CourseMode.modes_for_course_dict(course_key)
if configuration_helpers.get_value('ENABLE_MKTG_SITE', settings.FEATURES.get('ENABLE_MKTG_SITE', False)):
return redirect(reverse(course_home_url_name(course.id), args=[text_type(course.id)]))
registered = registered_for_course(course, request.user)
staff_access = bool(has_access(request.user, 'staff', course))
studio_url = get_studio_url(course, 'settings/details')
if has_access(request.user, 'load', course):
course_target = reverse(course_home_url_name(course.id), args=[text_type(course.id)])
else:
course_target = reverse('about_course', args=[text_type(course.id)])
show_courseware_link = bool(
(
has_access(request.user, 'load', course)
) or settings.FEATURES.get('ENABLE_LMS_MIGRATION')
)
# Note: this is a flow for payment for course registration, not the Verified Certificate flow.
in_cart = False
reg_then_add_to_cart_link = ""
_is_shopping_cart_enabled = is_shopping_cart_enabled()
if _is_shopping_cart_enabled:
if request.user.is_authenticated:
cart = shoppingcart.models.Order.get_cart_for_user(request.user)
in_cart = shoppingcart.models.PaidCourseRegistration.contained_in_order(cart, course_key) or \
shoppingcart.models.CourseRegCodeItem.contained_in_order(cart, course_key)
reg_then_add_to_cart_link = "{reg_url}?course_id={course_id}&enrollment_action=add_to_cart".format(
reg_url=reverse('register_user'), course_id=urllib.quote(str(course_id))
)
# If the ecommerce checkout flow is enabled and the mode of the course is
# professional or no id professional, we construct links for the enrollment
# button to add the course to the ecommerce basket.
ecomm_service = EcommerceService()
ecommerce_checkout = ecomm_service.is_enabled(request.user)
ecommerce_checkout_link = ''
ecommerce_bulk_checkout_link = ''
professional_mode = None
is_professional_mode = CourseMode.PROFESSIONAL in modes or CourseMode.NO_ID_PROFESSIONAL_MODE in modes
if ecommerce_checkout and is_professional_mode:
professional_mode = modes.get(CourseMode.PROFESSIONAL, '') or \
modes.get(CourseMode.NO_ID_PROFESSIONAL_MODE, '')
if professional_mode.sku:
ecommerce_checkout_link = ecomm_service.get_checkout_page_url(professional_mode.sku)
if professional_mode.bulk_sku:
ecommerce_bulk_checkout_link = ecomm_service.get_checkout_page_url(professional_mode.bulk_sku)
registration_price, course_price = get_course_prices(course)
# Determine which checkout workflow to use -- LMS shoppingcart or Otto basket
can_add_course_to_cart = _is_shopping_cart_enabled and registration_price and not ecommerce_checkout_link
# Used to provide context to message to student if enrollment not allowed
can_enroll = bool(has_access(request.user, 'enroll', course))
invitation_only = course.invitation_only
is_course_full = CourseEnrollment.objects.is_course_full(course)
# Register button should be disabled if one of the following is true:
# - Student is already registered for course
# - Course is already full
# - Student cannot enroll in course
active_reg_button = not (registered or is_course_full or not can_enroll)
is_shib_course = uses_shib(course)
# get prerequisite courses display names
pre_requisite_courses = get_prerequisite_courses_display(course)
# Overview
overview = CourseOverview.get_from_id(course.id)
sidebar_html_enabled = course_experience_waffle().is_enabled(ENABLE_COURSE_ABOUT_SIDEBAR_HTML)
# This local import is due to the circularity of lms and openedx references.
# This may be resolved by using stevedore to allow web fragments to be used
# as plugins, and to avoid the direct import.
from openedx.features.course_experience.views.course_reviews import CourseReviewsModuleFragmentView
# Embed the course reviews tool
reviews_fragment_view = CourseReviewsModuleFragmentView().render_to_fragment(request, course=course)
context = {
'course': course,
'course_details': course_details,
'staff_access': staff_access,
'studio_url': studio_url,
'registered': registered,
'course_target': course_target,
'is_cosmetic_price_enabled': settings.FEATURES.get('ENABLE_COSMETIC_DISPLAY_PRICE'),
'course_price': course_price,
'in_cart': in_cart,
'ecommerce_checkout': ecommerce_checkout,
'ecommerce_checkout_link': ecommerce_checkout_link,
'ecommerce_bulk_checkout_link': ecommerce_bulk_checkout_link,
'professional_mode': professional_mode,
'reg_then_add_to_cart_link': reg_then_add_to_cart_link,
'show_courseware_link': show_courseware_link,
'is_course_full': is_course_full,
'can_enroll': can_enroll,
'invitation_only': invitation_only,
'active_reg_button': active_reg_button,
'is_shib_course': is_shib_course,
# We do not want to display the internal courseware header, which is used when the course is found in the
# context. This value is therefor explicitly set to render the appropriate header.
'disable_courseware_header': True,
'can_add_course_to_cart': can_add_course_to_cart,
'cart_link': reverse('shoppingcart.views.show_cart'),
'pre_requisite_courses': pre_requisite_courses,
'course_image_urls': overview.image_urls,
'reviews_fragment_view': reviews_fragment_view,
'sidebar_html_enabled': sidebar_html_enabled,
}
return render_to_response('courseware/course_about.html', context)
@ensure_csrf_cookie
@cache_if_anonymous()
def program_marketing(request, program_uuid):
"""
Display the program marketing page.
"""
program_data = get_programs(request.site, uuid=program_uuid)
if not program_data:
raise Http404
program = ProgramMarketingDataExtender(program_data, request.user).extend()
program['type_slug'] = slugify(program['type'])
skus = program.get('skus')
ecommerce_service = EcommerceService()
context = {'program': program}
if program.get('is_learner_eligible_for_one_click_purchase') and skus:
context['buy_button_href'] = ecommerce_service.get_checkout_page_url(*skus, program_uuid=program_uuid)
context['uses_bootstrap'] = True
return render_to_response('courseware/program_marketing.html', context)
@transaction.non_atomic_requests
@login_required
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@ensure_valid_course_key
@data_sharing_consent_required
def progress(request, course_id, student_id=None):
""" Display the progress page. """
course_key = CourseKey.from_string(course_id)
with modulestore().bulk_operations(course_key):
return _progress(request, course_key, student_id)
def _progress(request, course_key, student_id):
"""
Unwrapped version of "progress".
User progress. We show the grade bar and every problem score.
Course staff are allowed to see the progress of students in their class.
"""
if student_id is not None:
try:
student_id = int(student_id)
# Check for ValueError if 'student_id' cannot be converted to integer.
except ValueError:
raise Http404
course = get_course_with_access(request.user, 'load', course_key)
staff_access = bool(has_access(request.user, 'staff', course))
masquerade = None
if student_id is None or student_id == request.user.id:
# This will be a no-op for non-staff users, returning request.user
masquerade, student = setup_masquerade(request, course_key, staff_access, reset_masquerade_data=True)
else:
try:
coach_access = has_ccx_coach_role(request.user, course_key)
except CCXLocatorValidationException:
coach_access = False
has_access_on_students_profiles = staff_access or coach_access
# Requesting access to a different student's profile
if not has_access_on_students_profiles:
raise Http404
try:
student = User.objects.get(id=student_id)
except User.DoesNotExist:
raise Http404
# NOTE: To make sure impersonation by instructor works, use
# student instead of request.user in the rest of the function.
# The pre-fetching of groups is done to make auth checks not require an
# additional DB lookup (this kills the Progress page in particular).
student = User.objects.prefetch_related("groups").get(id=student.id)
if request.user.id != student.id:
# refetch the course as the assumed student
course = get_course_with_access(student, 'load', course_key, check_if_enrolled=True)
# NOTE: To make sure impersonation by instructor works, use
# student instead of request.user in the rest of the function.
course_grade = CourseGradeFactory().read(student, course)
courseware_summary = course_grade.chapter_grades.values()
studio_url = get_studio_url(course, 'settings/grading')
# checking certificate generation configuration
enrollment_mode, _ = CourseEnrollment.enrollment_mode_for_user(student, course_key)
context = {
'course': course,
'courseware_summary': courseware_summary,
'studio_url': studio_url,
'grade_summary': course_grade.summary,
'staff_access': staff_access,
'masquerade': masquerade,
'supports_preview_menu': True,
'student': student,
'credit_course_requirements': _credit_course_requirements(course_key, student),
'certificate_data': _get_cert_data(student, course, enrollment_mode, course_grade),
}
context.update(
get_experiment_user_metadata_context(
course,
student,
)
)
with outer_atomic():
response = render_to_response('courseware/progress.html', context)
return response
def _downloadable_certificate_message(course, cert_downloadable_status):
if certs_api.has_html_certificates_enabled(course):
if certs_api.get_active_web_certificate(course) is not None:
return _downloadable_cert_data(
download_url=None,
cert_web_view_url=certs_api.get_certificate_url(
course_id=course.id, uuid=cert_downloadable_status['uuid']
)
)
elif not cert_downloadable_status['download_url']:
return GENERATING_CERT_DATA
return _downloadable_cert_data(download_url=cert_downloadable_status['download_url'])
def _missing_required_verification(student, enrollment_mode):
return (
enrollment_mode in CourseMode.VERIFIED_MODES and not IDVerificationService.user_is_verified(student)
)
def _certificate_message(student, course, enrollment_mode):
if certs_api.is_certificate_invalid(student, course.id):
return INVALID_CERT_DATA
cert_downloadable_status = certs_api.certificate_downloadable_status(student, course.id)
if cert_downloadable_status['is_generating']:
return GENERATING_CERT_DATA
if cert_downloadable_status['is_unverified'] or _missing_required_verification(student, enrollment_mode):
return UNVERIFIED_CERT_DATA
if cert_downloadable_status['is_downloadable']:
return _downloadable_certificate_message(course, cert_downloadable_status)
return REQUESTING_CERT_DATA
def _get_cert_data(student, course, enrollment_mode, course_grade=None):
"""Returns students course certificate related data.
Arguments:
student (User): Student for whom certificate to retrieve.
course (Course): Course object for which certificate data to retrieve.
enrollment_mode (String): Course mode in which student is enrolled.
course_grade (CourseGrade): Student's course grade record.
Returns:
returns dict if course certificate is available else None.
"""
if not CourseMode.is_eligible_for_certificate(enrollment_mode):
return AUDIT_PASSING_CERT_DATA if enrollment_mode == CourseMode.AUDIT else HONOR_PASSING_CERT_DATA
certificates_enabled_for_course = certs_api.cert_generation_enabled(course.id)
if course_grade is None:
course_grade = CourseGradeFactory().read(student, course)
if not auto_certs_api.can_show_certificate_message(course, student, course_grade, certificates_enabled_for_course):
return
return _certificate_message(student, course, enrollment_mode)
def _credit_course_requirements(course_key, student):
"""Return information about which credit requirements a user has satisfied.
Arguments:
course_key (CourseKey): Identifier for the course.
student (User): Currently logged in user.
Returns: dict if the credit eligibility enabled and it is a credit course
and the user is enrolled in either verified or credit mode, and None otherwise.
"""
# If credit eligibility is not enabled or this is not a credit course,
# short-circuit and return `None`. This indicates that credit requirements
# should NOT be displayed on the progress page.
if not (settings.FEATURES.get("ENABLE_CREDIT_ELIGIBILITY", False) and is_credit_course(course_key)):
return None
# This indicates that credit requirements should NOT be displayed on the progress page.
enrollment = CourseEnrollment.get_enrollment(student, course_key)
if enrollment and enrollment.mode not in REQUIREMENTS_DISPLAY_MODES:
return None
# Credit requirement statuses for which user does not remain eligible to get credit.
non_eligible_statuses = ['failed', 'declined']
# Retrieve the status of the user for each eligibility requirement in the course.
# For each requirement, the user's status is either "satisfied", "failed", or None.
# In this context, `None` means that we don't know the user's status, either because
# the user hasn't done something (for example, submitting photos for verification)
# or we're waiting on more information (for example, a response from the photo
# verification service).
requirement_statuses = get_credit_requirement_status(course_key, student.username)
# If the user has been marked as "eligible", then they are *always* eligible
# unless someone manually intervenes. This could lead to some strange behavior
# if the requirements change post-launch. For example, if the user was marked as eligible
# for credit, then a new requirement was added, the user will see that they're eligible
# AND that one of the requirements is still pending.
# We're assuming here that (a) we can mitigate this by properly training course teams,
# and (b) it's a better user experience to allow students who were at one time
# marked as eligible to continue to be eligible.
# If we need to, we can always manually move students back to ineligible by
# deleting CreditEligibility records in the database.
if is_user_eligible_for_credit(student.username, course_key):
eligibility_status = "eligible"
# If the user has *failed* any requirements (for example, if a photo verification is denied),
# then the user is NOT eligible for credit.
elif any(requirement['status'] in non_eligible_statuses for requirement in requirement_statuses):
eligibility_status = "not_eligible"
# Otherwise, the user may be eligible for credit, but the user has not
# yet completed all the requirements.
else:
eligibility_status = "partial_eligible"
return {
'eligibility_status': eligibility_status,
'requirements': requirement_statuses,
}
@login_required
@ensure_valid_course_key
def submission_history(request, course_id, student_username, location):
"""Render an HTML fragment (meant for inclusion elsewhere) that renders a
history of all state changes made by this user for this problem location.
Right now this only works for problems because that's all
StudentModuleHistory records.
"""
course_key = CourseKey.from_string(course_id)
try:
usage_key = UsageKey.from_string(location).map_into_course(course_key)
except (InvalidKeyError, AssertionError):
return HttpResponse(escape(_(u'Invalid location.')))
course = get_course_overview_with_access(request.user, 'load', course_key)
staff_access = bool(has_access(request.user, 'staff', course))
# Permission Denied if they don't have staff access and are trying to see
# somebody else's submission history.
if (student_username != request.user.username) and (not staff_access):
raise PermissionDenied
user_state_client = DjangoXBlockUserStateClient()
try:
history_entries = list(user_state_client.get_history(student_username, usage_key))
except DjangoXBlockUserStateClient.DoesNotExist:
return HttpResponse(escape(_(u'User {username} has never accessed problem {location}').format(
username=student_username,
location=location
)))
# This is ugly, but until we have a proper submissions API that we can use to provide
# the scores instead, it will have to do.
csm = StudentModule.objects.filter(
module_state_key=usage_key,
student__username=student_username,
course_id=course_key)
scores = BaseStudentModuleHistory.get_history(csm)
if len(scores) != len(history_entries):
log.warning(
"Mismatch when fetching scores for student "
"history for course %s, user %s, xblock %s. "
"%d scores were found, and %d history entries were found. "
"Matching scores to history entries by date for display.",
course_id,
student_username,
location,
len(scores),
len(history_entries),
)
scores_by_date = {
score.created: score
for score in scores
}
scores = [
scores_by_date[history.updated]
for history in history_entries
]
context = {
'history_entries': history_entries,
'scores': scores,
'username': student_username,
'location': location,
'course_id': text_type(course_key)
}
return render_to_response('courseware/submission_history.html', context)
def get_static_tab_fragment(request, course, tab):
"""
Returns the fragment for the given static tab
"""
loc = course.id.make_usage_key(
tab.type,
tab.url_slug,
)
field_data_cache = FieldDataCache.cache_for_descriptor_descendents(
course.id, request.user, modulestore().get_item(loc), depth=0
)
tab_module = get_module(
request.user, request, loc, field_data_cache, static_asset_path=course.static_asset_path, course=course
)
logging.debug('course_module = %s', tab_module)
fragment = Fragment()
if tab_module is not None:
try:
fragment = tab_module.render(STUDENT_VIEW, {})
except Exception: # pylint: disable=broad-except
fragment.content = render_to_string('courseware/error-message.html', None)
log.exception(
u"Error rendering course=%s, tab=%s", course, tab['url_slug']
)
return fragment
@require_GET
@ensure_valid_course_key
def get_course_lti_endpoints(request, course_id):
"""
View that, given a course_id, returns the a JSON object that enumerates all of the LTI endpoints for that course.
The LTI 2.0 result service spec at
http://www.imsglobal.org/lti/ltiv2p0/uml/purl.imsglobal.org/vocab/lis/v2/outcomes/Result/service.html
says "This specification document does not prescribe a method for discovering the endpoint URLs." This view
function implements one way of discovering these endpoints, returning a JSON array when accessed.
Arguments:
request (django request object): the HTTP request object that triggered this view function
course_id (unicode): id associated with the course
Returns:
(django response object): HTTP response. 404 if course is not found, otherwise 200 with JSON body.
"""
course_key = CourseKey.from_string(course_id)
try:
course = get_course(course_key, depth=2)
except ValueError:
return HttpResponse(status=404)
anonymous_user = AnonymousUser()
anonymous_user.known = False # make these "noauth" requests like module_render.handle_xblock_callback_noauth
lti_descriptors = modulestore().get_items(course.id, qualifiers={'category': 'lti'})
lti_descriptors.extend(modulestore().get_items(course.id, qualifiers={'category': 'lti_consumer'}))
lti_noauth_modules = [
get_module_for_descriptor(
anonymous_user,
request,
descriptor,
FieldDataCache.cache_for_descriptor_descendents(
course_key,
anonymous_user,
descriptor
),
course_key,
course=course
)
for descriptor in lti_descriptors
]
endpoints = [
{
'display_name': module.display_name,
'lti_2_0_result_service_json_endpoint': module.get_outcome_service_url(
service_name='lti_2_0_result_rest_handler') + "/user/{anon_user_id}",
'lti_1_1_result_service_xml_endpoint': module.get_outcome_service_url(
service_name='grade_handler'),
}
for module in lti_noauth_modules
]
return HttpResponse(json.dumps(endpoints), content_type='application/json')
@login_required
def course_survey(request, course_id):
"""
URL endpoint to present a survey that is associated with a course_id
Note that the actual implementation of course survey is handled in the
views.py file in the Survey Djangoapp
"""
course_key = CourseKey.from_string(course_id)
course = get_course_with_access(request.user, 'load', course_key, check_survey_complete=False)
redirect_url = reverse(course_home_url_name(course.id), args=[course_id])
# if there is no Survey associated with this course,
# then redirect to the course instead
if not course.course_survey_name:
return redirect(redirect_url)
return survey.views.view_student_survey(
request.user,
course.course_survey_name,
course=course,
redirect_url=redirect_url,
is_required=course.course_survey_required,
)
def is_course_passed(student, course, course_grade=None):
"""
check user's course passing status. return True if passed
Arguments:
student : user object
course : course object
course_grade (CourseGrade) : contains student grade details.
Returns:
returns bool value
"""
if course_grade is None:
course_grade = CourseGradeFactory().read(student, course)
return course_grade.passed
# Grades can potentially be written - if so, let grading manage the transaction.
@transaction.non_atomic_requests
@require_POST
def generate_user_cert(request, course_id):
"""Start generating a new certificate for the user.
Certificate generation is allowed if:
* The user has passed the course, and
* The user does not already have a pending/completed certificate.
Note that if an error occurs during certificate generation
(for example, if the queue is down), then we simply mark the
certificate generation task status as "error" and re-run
the task with a management command. To students, the certificate
will appear to be "generating" until it is re-run.
Args:
request (HttpRequest): The POST request to this view.
course_id (unicode): The identifier for the course.
Returns:
HttpResponse: 200 on success, 400 if a new certificate cannot be generated.
"""
if not request.user.is_authenticated:
log.info(u"Anon user trying to generate certificate for %s", course_id)
return HttpResponseBadRequest(
_('You must be signed in to {platform_name} to create a certificate.').format(
platform_name=configuration_helpers.get_value('PLATFORM_NAME', settings.PLATFORM_NAME)
)
)
student = request.user
course_key = CourseKey.from_string(course_id)
course = modulestore().get_course(course_key, depth=2)
if not course:
return HttpResponseBadRequest(_("Course is not valid"))
if not is_course_passed(student, course):
log.info(u"User %s has not passed the course: %s", student.username, course_id)
return HttpResponseBadRequest(_("Your certificate will be available when you pass the course."))
certificate_status = certs_api.certificate_downloadable_status(student, course.id)
log.info(
u"User %s has requested for certificate in %s, current status: is_downloadable: %s, is_generating: %s",
student.username,
course_id,
certificate_status["is_downloadable"],
certificate_status["is_generating"],
)
if certificate_status["is_downloadable"]:
return HttpResponseBadRequest(_("Certificate has already been created."))
elif certificate_status["is_generating"]:
return HttpResponseBadRequest(_("Certificate is being created."))
else:
# If the certificate is not already in-process or completed,
# then create a new certificate generation task.
# If the certificate cannot be added to the queue, this will
# mark the certificate with "error" status, so it can be re-run
# with a management command. From the user's perspective,
# it will appear that the certificate task was submitted successfully.
certs_api.generate_user_certificates(student, course.id, course=course, generation_mode='self')
_track_successful_certificate_generation(student.id, course.id)
return HttpResponse()
def _track_successful_certificate_generation(user_id, course_id): # pylint: disable=invalid-name
"""
Track a successful certificate generation event.
Arguments:
user_id (str): The ID of the user generating the certificate.
course_id (CourseKey): Identifier for the course.
Returns:
None
"""
if settings.LMS_SEGMENT_KEY:
event_name = 'edx.bi.user.certificate.generate'
tracking_context = tracker.get_tracker().resolve_context()
analytics.track(
user_id,
event_name,
{
'category': 'certificates',
'label': text_type(course_id)
},
context={
'ip': tracking_context.get('ip'),
'Google Analytics': {
'clientId': tracking_context.get('client_id')
}
}
)
@require_http_methods(["GET", "POST"])
@ensure_valid_usage_key
def render_xblock(request, usage_key_string, check_if_enrolled=True):
"""
Returns an HttpResponse with HTML content for the xBlock with the given usage_key.
The returned HTML is a chromeless rendering of the xBlock (excluding content of the containing courseware).
"""
usage_key = UsageKey.from_string(usage_key_string)
usage_key = usage_key.replace(course_key=modulestore().fill_in_run(usage_key.course_key))
course_key = usage_key.course_key
requested_view = request.GET.get('view', 'student_view')
if requested_view != 'student_view':
return HttpResponseBadRequest("Rendering of the xblock view '{}' is not supported.".format(requested_view))
with modulestore().bulk_operations(course_key):
# verify the user has access to the course, including enrollment check
try:
course = get_course_with_access(request.user, 'load', course_key, check_if_enrolled=check_if_enrolled)
except CourseAccessRedirect:
raise Http404("Course not found.")
# get the block, which verifies whether the user has access to the block.
block, _ = get_module_by_usage_id(
request, text_type(course_key), text_type(usage_key), disable_staff_debug_info=True, course=course
)
student_view_context = request.GET.dict()
student_view_context['show_bookmark_button'] = False
enable_completion_on_view_service = False
completion_service = block.runtime.service(block, 'completion')
if completion_service and completion_service.completion_tracking_enabled():
if completion_service.blocks_to_mark_complete_on_view({block}):
enable_completion_on_view_service = True
student_view_context['wrap_xblock_data'] = {
'mark-completed-on-view-after-delay': completion_service.get_complete_on_view_delay_ms()
}
context = {
'fragment': block.render('student_view', context=student_view_context),
'course': course,
'disable_accordion': True,
'allow_iframing': True,
'disable_header': True,
'disable_footer': True,
'disable_window_wrap': True,
'enable_completion_on_view_service': enable_completion_on_view_service,
'staff_access': bool(has_access(request.user, 'staff', course)),
'xqa_server': settings.FEATURES.get('XQA_SERVER', 'http://your_xqa_server.com'),
}
return render_to_response('courseware/courseware-chromeless.html', context)
# Translators: "percent_sign" is the symbol "%". "platform_name" is a
# string identifying the name of this installation, such as "edX".
FINANCIAL_ASSISTANCE_HEADER = _(
'{platform_name} now offers financial assistance for learners who want to earn Verified Certificates but'
' who may not be able to pay the Verified Certificate fee. Eligible learners may receive up to 90{percent_sign} off'
' the Verified Certificate fee for a course.\nTo apply for financial assistance, enroll in the'
' audit track for a course that offers Verified Certificates, and then complete this application.'
' Note that you must complete a separate application for each course you take.\n We plan to use this'
' information to evaluate your application for financial assistance and to further develop our'
' financial assistance program.'
).format(
percent_sign="%",
platform_name=configuration_helpers.get_value('PLATFORM_NAME', settings.PLATFORM_NAME)
).split('\n')
FA_INCOME_LABEL = _('Annual Household Income')
FA_REASON_FOR_APPLYING_LABEL = _(
'Tell us about your current financial situation. Why do you need assistance?'
)
FA_GOALS_LABEL = _(
'Tell us about your learning or professional goals. How will a Verified Certificate in'
' this course help you achieve these goals?'
)
FA_EFFORT_LABEL = _(
'Tell us about your plans for this course. What steps will you take to help you complete'
' the course work and receive a certificate?'
)
FA_SHORT_ANSWER_INSTRUCTIONS = _('Use between 250 and 500 words or so in your response.')
@login_required
def financial_assistance(_request):
"""Render the initial financial assistance page."""
return render_to_response('financial-assistance/financial-assistance.html', {
'header_text': FINANCIAL_ASSISTANCE_HEADER
})
@login_required
@require_POST
def financial_assistance_request(request):
"""Submit a request for financial assistance to Zendesk."""
try:
data = json.loads(request.body)
# Simple sanity check that the session belongs to the user
# submitting an FA request
username = data['username']
if request.user.username != username:
return HttpResponseForbidden()
course_id = data['course']
course = modulestore().get_course(CourseKey.from_string(course_id))
legal_name = data['name']
email = data['email']
country = data['country']
income = data['income']
reason_for_applying = data['reason_for_applying']
goals = data['goals']
effort = data['effort']
marketing_permission = data['mktg-permission']
ip_address = get_ip(request)
except ValueError:
# Thrown if JSON parsing fails
return HttpResponseBadRequest(u'Could not parse request JSON.')
except InvalidKeyError:
# Thrown if course key parsing fails
return HttpResponseBadRequest(u'Could not parse request course key.')
except KeyError as err:
# Thrown if fields are missing
return HttpResponseBadRequest(u'The field {} is required.'.format(text_type(err)))
zendesk_submitted = _record_feedback_in_zendesk(
legal_name,
email,
u'Financial assistance request for learner {username} in course {course_name}'.format(
username=username,
course_name=course.display_name
),
u'Financial Assistance Request',
{'course_id': course_id},
# Send the application as additional info on the ticket so
# that it is not shown when support replies. This uses
# OrderedDict so that information is presented in the right
# order.
OrderedDict((
('Username', username),
('Full Name', legal_name),
('Course ID', course_id),
('Annual Household Income', income),
('Country', country),
('Allowed for marketing purposes', 'Yes' if marketing_permission else 'No'),
(FA_REASON_FOR_APPLYING_LABEL, '\n' + reason_for_applying + '\n\n'),
(FA_GOALS_LABEL, '\n' + goals + '\n\n'),
(FA_EFFORT_LABEL, '\n' + effort + '\n\n'),
('Client IP', ip_address),
)),
group_name='Financial Assistance',
require_update=True
)
if not zendesk_submitted:
# The call to Zendesk failed. The frontend will display a
# message to the user.
return HttpResponse(status=status.HTTP_500_INTERNAL_SERVER_ERROR)
return HttpResponse(status=status.HTTP_204_NO_CONTENT)
@login_required
def financial_assistance_form(request):
"""Render the financial assistance application form page."""
user = request.user
enrolled_courses = get_financial_aid_courses(user)
incomes = ['Less than $5,000', '$5,000 - $10,000', '$10,000 - $15,000', '$15,000 - $20,000', '$20,000 - $25,000']
annual_incomes = [
{'name': _(income), 'value': income} for income in incomes # pylint: disable=translation-of-non-string
]
return render_to_response('financial-assistance/apply.html', {
'header_text': FINANCIAL_ASSISTANCE_HEADER,
'student_faq_url': marketing_link('FAQ'),
'dashboard_url': reverse('dashboard'),
'account_settings_url': reverse('account_settings'),
'platform_name': configuration_helpers.get_value('PLATFORM_NAME', settings.PLATFORM_NAME),
'user_details': {
'email': user.email,
'username': user.username,
'name': user.profile.name,
'country': text_type(user.profile.country.name),
},
'submit_url': reverse('submit_financial_assistance_request'),
'fields': [
{
'name': 'course',
'type': 'select',
'label': _('Course'),
'placeholder': '',
'defaultValue': '',
'required': True,
'options': enrolled_courses,
'instructions': _(
'Select the course for which you want to earn a verified certificate. If'
' the course does not appear in the list, make sure that you have enrolled'
' in the audit track for the course.'
)
},
{
'name': 'income',
'type': 'select',
'label': FA_INCOME_LABEL,
'placeholder': '',
'defaultValue': '',
'required': True,
'options': annual_incomes,
'instructions': _('Specify your annual household income in US Dollars.')
},
{
'name': 'reason_for_applying',
'type': 'textarea',
'label': FA_REASON_FOR_APPLYING_LABEL,
'placeholder': '',
'defaultValue': '',
'required': True,
'restrictions': {
'min_length': settings.FINANCIAL_ASSISTANCE_MIN_LENGTH,
'max_length': settings.FINANCIAL_ASSISTANCE_MAX_LENGTH
},
'instructions': FA_SHORT_ANSWER_INSTRUCTIONS
},
{
'name': 'goals',
'type': 'textarea',
'label': FA_GOALS_LABEL,
'placeholder': '',
'defaultValue': '',
'required': True,
'restrictions': {
'min_length': settings.FINANCIAL_ASSISTANCE_MIN_LENGTH,
'max_length': settings.FINANCIAL_ASSISTANCE_MAX_LENGTH
},
'instructions': FA_SHORT_ANSWER_INSTRUCTIONS
},
{
'name': 'effort',
'type': 'textarea',
'label': FA_EFFORT_LABEL,
'placeholder': '',
'defaultValue': '',
'required': True,
'restrictions': {
'min_length': settings.FINANCIAL_ASSISTANCE_MIN_LENGTH,
'max_length': settings.FINANCIAL_ASSISTANCE_MAX_LENGTH
},
'instructions': FA_SHORT_ANSWER_INSTRUCTIONS
},
{
'placeholder': '',
'name': 'mktg-permission',
'label': _(
'I allow edX to use the information provided in this application '
'(except for financial information) for edX marketing purposes.'
),
'defaultValue': '',
'type': 'checkbox',
'required': False,
'instructions': '',
'restrictions': {}
}
],
})
def get_financial_aid_courses(user):
""" Retrieve the courses eligible for financial assistance. """
financial_aid_courses = []
for enrollment in CourseEnrollment.enrollments_for_user(user).order_by('-created'):
if enrollment.mode != CourseMode.VERIFIED and \
enrollment.course_overview and \
enrollment.course_overview.eligible_for_financial_aid and \
CourseMode.objects.filter(
Q(_expiration_datetime__isnull=True) | Q(_expiration_datetime__gt=datetime.now(UTC)),
course_id=enrollment.course_id,
mode_slug=CourseMode.VERIFIED).exists():
financial_aid_courses.append(
{
'name': enrollment.course_overview.display_name,
'value': text_type(enrollment.course_id)
}
)
return financial_aid_courses
| agpl-3.0 |
ClinicalGraphics/scikit-image | skimage/feature/tests/test_corner.py | 20 | 13002 | import numpy as np
from numpy.testing import (assert_array_equal, assert_raises,
assert_almost_equal)
from skimage import data
from skimage import img_as_float
from skimage.color import rgb2gray
from skimage.morphology import octagon
from skimage._shared.testing import test_parallel
from skimage.feature import (corner_moravec, corner_harris, corner_shi_tomasi,
corner_subpix, peak_local_max, corner_peaks,
corner_kitchen_rosenfeld, corner_foerstner,
corner_fast, corner_orientations,
structure_tensor, structure_tensor_eigvals,
hessian_matrix, hessian_matrix_eigvals,
hessian_matrix_det)
def test_structure_tensor():
square = np.zeros((5, 5))
square[2, 2] = 1
Axx, Axy, Ayy = structure_tensor(square, sigma=0.1)
assert_array_equal(Axx, np.array([[ 0, 0, 0, 0, 0],
[ 0, 1, 0, 1, 0],
[ 0, 4, 0, 4, 0],
[ 0, 1, 0, 1, 0],
[ 0, 0, 0, 0, 0]]))
assert_array_equal(Axy, np.array([[ 0, 0, 0, 0, 0],
[ 0, 1, 0, -1, 0],
[ 0, 0, 0, -0, 0],
[ 0, -1, -0, 1, 0],
[ 0, 0, 0, 0, 0]]))
assert_array_equal(Ayy, np.array([[ 0, 0, 0, 0, 0],
[ 0, 1, 4, 1, 0],
[ 0, 0, 0, 0, 0],
[ 0, 1, 4, 1, 0],
[ 0, 0, 0, 0, 0]]))
def test_hessian_matrix():
square = np.zeros((5, 5))
square[2, 2] = 1
Hxx, Hxy, Hyy = hessian_matrix(square, sigma=0.1)
assert_array_equal(Hxx, np.array([[0, 0, 0, 0, 0],
[0, 0, 0, 0, 0],
[0, 0, 1, 0, 0],
[0, 0, 0, 0, 0],
[0, 0, 0, 0, 0]]))
assert_array_equal(Hxy, np.array([[0, 0, 0, 0, 0],
[0, 0, 0, 0, 0],
[0, 0, 0, 0, 0],
[0, 0, 0, 0, 0],
[0, 0, 0, 0, 0]]))
assert_array_equal(Hyy, np.array([[0, 0, 0, 0, 0],
[0, 0, 0, 0, 0],
[0, 0, 1, 0, 0],
[0, 0, 0, 0, 0],
[0, 0, 0, 0, 0]]))
def test_structure_tensor_eigvals():
square = np.zeros((5, 5))
square[2, 2] = 1
Axx, Axy, Ayy = structure_tensor(square, sigma=0.1)
l1, l2 = structure_tensor_eigvals(Axx, Axy, Ayy)
assert_array_equal(l1, np.array([[0, 0, 0, 0, 0],
[0, 2, 4, 2, 0],
[0, 4, 0, 4, 0],
[0, 2, 4, 2, 0],
[0, 0, 0, 0, 0]]))
assert_array_equal(l2, np.array([[0, 0, 0, 0, 0],
[0, 0, 0, 0, 0],
[0, 0, 0, 0, 0],
[0, 0, 0, 0, 0],
[0, 0, 0, 0, 0]]))
def test_hessian_matrix_eigvals():
square = np.zeros((5, 5))
square[2, 2] = 1
Hxx, Hxy, Hyy = hessian_matrix(square, sigma=0.1)
l1, l2 = hessian_matrix_eigvals(Hxx, Hxy, Hyy)
assert_array_equal(l1, np.array([[0, 0, 0, 0, 0],
[0, 0, 0, 0, 0],
[0, 0, 1, 0, 0],
[0, 0, 0, 0, 0],
[0, 0, 0, 0, 0]]))
assert_array_equal(l2, np.array([[0, 0, 0, 0, 0],
[0, 0, 0, 0, 0],
[0, 0, 1, 0, 0],
[0, 0, 0, 0, 0],
[0, 0, 0, 0, 0]]))
@test_parallel()
def test_hessian_matrix_det():
image = np.zeros((5, 5))
image[2, 2] = 1
det = hessian_matrix_det(image, 5)
assert_almost_equal(det, 0, decimal = 3)
@test_parallel()
def test_square_image():
im = np.zeros((50, 50)).astype(float)
im[:25, :25] = 1.
# Moravec
results = peak_local_max(corner_moravec(im))
# interest points along edge
assert len(results) == 57
# Harris
results = peak_local_max(corner_harris(im, method='k'))
# interest at corner
assert len(results) == 1
results = peak_local_max(corner_harris(im, method='eps'))
# interest at corner
assert len(results) == 1
# Shi-Tomasi
results = peak_local_max(corner_shi_tomasi(im))
# interest at corner
assert len(results) == 1
def test_noisy_square_image():
im = np.zeros((50, 50)).astype(float)
im[:25, :25] = 1.
np.random.seed(seed=1234)
im = im + np.random.uniform(size=im.shape) * .2
# Moravec
results = peak_local_max(corner_moravec(im))
# undefined number of interest points
assert results.any()
# Harris
results = peak_local_max(corner_harris(im, sigma=1.5, method='k'))
assert len(results) == 1
results = peak_local_max(corner_harris(im, sigma=1.5, method='eps'))
assert len(results) == 1
# Shi-Tomasi
results = peak_local_max(corner_shi_tomasi(im, sigma=1.5))
assert len(results) == 1
def test_squared_dot():
im = np.zeros((50, 50))
im[4:8, 4:8] = 1
im = img_as_float(im)
# Moravec fails
# Harris
results = peak_local_max(corner_harris(im))
assert (results == np.array([[6, 6]])).all()
# Shi-Tomasi
results = peak_local_max(corner_shi_tomasi(im))
assert (results == np.array([[6, 6]])).all()
def test_rotated_img():
"""
The harris filter should yield the same results with an image and it's
rotation.
"""
im = img_as_float(data.astronaut().mean(axis=2))
im_rotated = im.T
# Moravec
results = peak_local_max(corner_moravec(im))
results_rotated = peak_local_max(corner_moravec(im_rotated))
assert (np.sort(results[:, 0]) == np.sort(results_rotated[:, 1])).all()
assert (np.sort(results[:, 1]) == np.sort(results_rotated[:, 0])).all()
# Harris
results = peak_local_max(corner_harris(im))
results_rotated = peak_local_max(corner_harris(im_rotated))
assert (np.sort(results[:, 0]) == np.sort(results_rotated[:, 1])).all()
assert (np.sort(results[:, 1]) == np.sort(results_rotated[:, 0])).all()
# Shi-Tomasi
results = peak_local_max(corner_shi_tomasi(im))
results_rotated = peak_local_max(corner_shi_tomasi(im_rotated))
assert (np.sort(results[:, 0]) == np.sort(results_rotated[:, 1])).all()
assert (np.sort(results[:, 1]) == np.sort(results_rotated[:, 0])).all()
def test_subpix_edge():
img = np.zeros((50, 50))
img[:25, :25] = 255
img[25:, 25:] = 255
corner = peak_local_max(corner_harris(img), num_peaks=1)
subpix = corner_subpix(img, corner)
assert_array_equal(subpix[0], (24.5, 24.5))
def test_subpix_dot():
img = np.zeros((50, 50))
img[25, 25] = 255
corner = peak_local_max(corner_harris(img), num_peaks=1)
subpix = corner_subpix(img, corner)
assert_array_equal(subpix[0], (25, 25))
def test_subpix_no_class():
img = np.zeros((50, 50))
subpix = corner_subpix(img, np.array([[25, 25]]))
assert_array_equal(subpix[0], (np.nan, np.nan))
img[25, 25] = 1e-10
corner = peak_local_max(corner_harris(img), num_peaks=1)
subpix = corner_subpix(img, np.array([[25, 25]]))
assert_array_equal(subpix[0], (np.nan, np.nan))
def test_subpix_border():
img = np.zeros((50, 50))
img[1:25,1:25] = 255
img[25:-1,25:-1] = 255
corner = corner_peaks(corner_harris(img), min_distance=1)
subpix = corner_subpix(img, corner, window_size=11)
ref = np.array([[ 0.52040816, 0.52040816],
[ 0.52040816, 24.47959184],
[24.47959184, 0.52040816],
[24.5 , 24.5 ],
[24.52040816, 48.47959184],
[48.47959184, 24.52040816],
[48.47959184, 48.47959184]])
assert_almost_equal(subpix, ref)
def test_num_peaks():
"""For a bunch of different values of num_peaks, check that
peak_local_max returns exactly the right amount of peaks. Test
is run on the astronaut image in order to produce a sufficient number of corners"""
img_corners = corner_harris(rgb2gray(data.astronaut()))
for i in range(20):
n = np.random.random_integers(20)
results = peak_local_max(img_corners, num_peaks=n)
assert (results.shape[0] == n)
def test_corner_peaks():
response = np.zeros((5, 5))
response[2:4, 2:4] = 1
corners = corner_peaks(response, exclude_border=False)
assert len(corners) == 1
corners = corner_peaks(response, exclude_border=False, min_distance=0)
assert len(corners) == 4
corners = corner_peaks(response, exclude_border=False, min_distance=0,
indices=False)
assert np.sum(corners) == 4
def test_blank_image_nans():
"""Some of the corner detectors had a weakness in terms of returning
NaN when presented with regions of constant intensity. This should
be fixed by now. We test whether each detector returns something
finite in the case of constant input"""
detectors = [corner_moravec, corner_harris, corner_shi_tomasi,
corner_kitchen_rosenfeld, corner_foerstner]
constant_image = np.zeros((20, 20))
for det in detectors:
response = det(constant_image)
assert np.all(np.isfinite(response))
def test_corner_fast_image_unsupported_error():
img = np.zeros((20, 20, 3))
assert_raises(ValueError, corner_fast, img)
@test_parallel()
def test_corner_fast_lena():
img = rgb2gray(data.astronaut())
expected = np.array([[101, 198],
[140, 205],
[141, 242],
[177, 156],
[188, 113],
[197, 148],
[213, 117],
[223, 375],
[232, 266],
[245, 137],
[249, 171],
[300, 244],
[305, 57],
[325, 245],
[339, 242],
[346, 279],
[353, 172],
[358, 307],
[362, 252],
[362, 328],
[363, 192],
[364, 147],
[369, 159],
[374, 171],
[379, 183],
[387, 195],
[390, 149],
[401, 197],
[403, 162],
[413, 181],
[444, 310],
[464, 251],
[476, 250],
[489, 155],
[492, 139],
[494, 169],
[496, 266]])
actual = corner_peaks(corner_fast(img, 12, 0.3))
assert_array_equal(actual, expected)
def test_corner_orientations_image_unsupported_error():
img = np.zeros((20, 20, 3))
assert_raises(ValueError, corner_orientations, img,
np.asarray([[7, 7]]), np.ones((3, 3)))
def test_corner_orientations_even_shape_error():
img = np.zeros((20, 20))
assert_raises(ValueError, corner_orientations, img,
np.asarray([[7, 7]]), np.ones((4, 4)))
@test_parallel()
def test_corner_orientations_lena():
img = rgb2gray(data.lena())
corners = corner_peaks(corner_fast(img, 11, 0.35))
expected = np.array([-1.9195897 , -3.03159624, -1.05991162, -2.89573739,
-2.61607644, 2.98660159])
actual = corner_orientations(img, corners, octagon(3, 2))
assert_almost_equal(actual, expected)
def test_corner_orientations_square():
square = np.zeros((12, 12))
square[3:9, 3:9] = 1
corners = corner_peaks(corner_fast(square, 9), min_distance=1)
actual_orientations = corner_orientations(square, corners, octagon(3, 2))
actual_orientations_degrees = np.rad2deg(actual_orientations)
expected_orientations_degree = np.array([ 45., 135., -45., -135.])
assert_array_equal(actual_orientations_degrees,
expected_orientations_degree)
if __name__ == '__main__':
from numpy import testing
testing.run_module_suite()
| bsd-3-clause |
frederick-masterton/django | django/db/models/query.py | 4 | 77548 | """
The main QuerySet implementation. This provides the public API for the ORM.
"""
import copy
import itertools
import sys
from django.conf import settings
from django.core import exceptions
from django.db import connections, router, transaction, IntegrityError
from django.db.models.constants import LOOKUP_SEP
from django.db.models.fields import AutoField, Empty
from django.db.models.query_utils import (Q, select_related_descend,
deferred_class_factory, InvalidQuery)
from django.db.models.deletion import Collector
from django.db.models.sql.constants import CURSOR
from django.db.models import sql
from django.utils.functional import partition
from django.utils import six
from django.utils import timezone
# The maximum number (one less than the max to be precise) of results to fetch
# in a get() query
MAX_GET_RESULTS = 20
# The maximum number of items to display in a QuerySet.__repr__
REPR_OUTPUT_SIZE = 20
# Pull into this namespace for backwards compatibility.
EmptyResultSet = sql.EmptyResultSet
def _pickle_queryset(class_bases, class_dict):
"""
Used by `__reduce__` to create the initial version of the `QuerySet` class
onto which the output of `__getstate__` will be applied.
See `__reduce__` for more details.
"""
new = Empty()
new.__class__ = type(class_bases[0].__name__, class_bases, class_dict)
return new
class QuerySet(object):
"""
Represents a lazy database lookup for a set of objects.
"""
def __init__(self, model=None, query=None, using=None, hints=None):
self.model = model
self._db = using
self._hints = hints or {}
self.query = query or sql.Query(self.model)
self._result_cache = None
self._sticky_filter = False
self._for_write = False
self._prefetch_related_lookups = []
self._prefetch_done = False
self._known_related_objects = {} # {rel_field, {pk: rel_obj}}
def as_manager(cls):
# Address the circular dependency between `Queryset` and `Manager`.
from django.db.models.manager import Manager
return Manager.from_queryset(cls)()
as_manager.queryset_only = True
as_manager = classmethod(as_manager)
########################
# PYTHON MAGIC METHODS #
########################
def __deepcopy__(self, memo):
"""
Deep copy of a QuerySet doesn't populate the cache
"""
obj = self.__class__()
for k, v in self.__dict__.items():
if k == '_result_cache':
obj.__dict__[k] = None
else:
obj.__dict__[k] = copy.deepcopy(v, memo)
return obj
def __getstate__(self):
"""
Allows the QuerySet to be pickled.
"""
# Force the cache to be fully populated.
self._fetch_all()
obj_dict = self.__dict__.copy()
return obj_dict
def __reduce__(self):
"""
Used by pickle to deal with the types that we create dynamically when
specialized queryset such as `ValuesQuerySet` are used in conjunction
with querysets that are *subclasses* of `QuerySet`.
See `_clone` implementation for more details.
"""
if hasattr(self, '_specialized_queryset_class'):
class_bases = (
self._specialized_queryset_class,
self._base_queryset_class,
)
class_dict = {
'_specialized_queryset_class': self._specialized_queryset_class,
'_base_queryset_class': self._base_queryset_class,
}
return _pickle_queryset, (class_bases, class_dict), self.__getstate__()
return super(QuerySet, self).__reduce__()
def __repr__(self):
data = list(self[:REPR_OUTPUT_SIZE + 1])
if len(data) > REPR_OUTPUT_SIZE:
data[-1] = "...(remaining elements truncated)..."
return repr(data)
def __len__(self):
self._fetch_all()
return len(self._result_cache)
def __iter__(self):
"""
The queryset iterator protocol uses three nested iterators in the
default case:
1. sql.compiler:execute_sql()
- Returns 100 rows at time (constants.GET_ITERATOR_CHUNK_SIZE)
using cursor.fetchmany(). This part is responsible for
doing some column masking, and returning the rows in chunks.
2. sql/compiler.results_iter()
- Returns one row at time. At this point the rows are still just
tuples. In some cases the return values are converted to
Python values at this location (see resolve_columns(),
resolve_aggregate()).
3. self.iterator()
- Responsible for turning the rows into model objects.
"""
self._fetch_all()
return iter(self._result_cache)
def __nonzero__(self):
self._fetch_all()
return bool(self._result_cache)
def __getitem__(self, k):
"""
Retrieves an item or slice from the set of results.
"""
if not isinstance(k, (slice,) + six.integer_types):
raise TypeError
assert ((not isinstance(k, slice) and (k >= 0)) or
(isinstance(k, slice) and (k.start is None or k.start >= 0) and
(k.stop is None or k.stop >= 0))), \
"Negative indexing is not supported."
if self._result_cache is not None:
return self._result_cache[k]
if isinstance(k, slice):
qs = self._clone()
if k.start is not None:
start = int(k.start)
else:
start = None
if k.stop is not None:
stop = int(k.stop)
else:
stop = None
qs.query.set_limits(start, stop)
return list(qs)[::k.step] if k.step else qs
qs = self._clone()
qs.query.set_limits(k, k + 1)
return list(qs)[0]
def __and__(self, other):
self._merge_sanity_check(other)
if isinstance(other, EmptyQuerySet):
return other
if isinstance(self, EmptyQuerySet):
return self
combined = self._clone()
combined._merge_known_related_objects(other)
combined.query.combine(other.query, sql.AND)
return combined
def __or__(self, other):
self._merge_sanity_check(other)
if isinstance(self, EmptyQuerySet):
return other
if isinstance(other, EmptyQuerySet):
return self
combined = self._clone()
combined._merge_known_related_objects(other)
combined.query.combine(other.query, sql.OR)
return combined
####################################
# METHODS THAT DO DATABASE QUERIES #
####################################
def iterator(self):
"""
An iterator over the results from applying this QuerySet to the
database.
"""
fill_cache = False
if connections[self.db].features.supports_select_related:
fill_cache = self.query.select_related
if isinstance(fill_cache, dict):
requested = fill_cache
else:
requested = None
max_depth = self.query.max_depth
extra_select = list(self.query.extra_select)
aggregate_select = list(self.query.aggregate_select)
only_load = self.query.get_loaded_field_names()
if not fill_cache:
fields = self.model._meta.concrete_fields
load_fields = []
# If only/defer clauses have been specified,
# build the list of fields that are to be loaded.
if only_load:
for field, model in self.model._meta.get_concrete_fields_with_model():
if model is None:
model = self.model
try:
if field.name in only_load[model]:
# Add a field that has been explicitly included
load_fields.append(field.name)
except KeyError:
# Model wasn't explicitly listed in the only_load table
# Therefore, we need to load all fields from this model
load_fields.append(field.name)
index_start = len(extra_select)
aggregate_start = index_start + len(load_fields or self.model._meta.concrete_fields)
skip = None
if load_fields and not fill_cache:
# Some fields have been deferred, so we have to initialize
# via keyword arguments.
skip = set()
init_list = []
for field in fields:
if field.name not in load_fields:
skip.add(field.attname)
else:
init_list.append(field.attname)
model_cls = deferred_class_factory(self.model, skip)
# Cache db and model outside the loop
db = self.db
model = self.model
compiler = self.query.get_compiler(using=db)
if fill_cache:
klass_info = get_klass_info(model, max_depth=max_depth,
requested=requested, only_load=only_load)
for row in compiler.results_iter():
if fill_cache:
obj, _ = get_cached_row(row, index_start, db, klass_info,
offset=len(aggregate_select))
else:
# Omit aggregates in object creation.
row_data = row[index_start:aggregate_start]
if skip:
obj = model_cls(**dict(zip(init_list, row_data)))
else:
obj = model(*row_data)
# Store the source database of the object
obj._state.db = db
# This object came from the database; it's not being added.
obj._state.adding = False
if extra_select:
for i, k in enumerate(extra_select):
setattr(obj, k, row[i])
# Add the aggregates to the model
if aggregate_select:
for i, aggregate in enumerate(aggregate_select):
setattr(obj, aggregate, row[i + aggregate_start])
# Add the known related objects to the model, if there are any
if self._known_related_objects:
for field, rel_objs in self._known_related_objects.items():
pk = getattr(obj, field.get_attname())
try:
rel_obj = rel_objs[pk]
except KeyError:
pass # may happen in qs1 | qs2 scenarios
else:
setattr(obj, field.name, rel_obj)
yield obj
def aggregate(self, *args, **kwargs):
"""
Returns a dictionary containing the calculations (aggregation)
over the current queryset
If args is present the expression is passed as a kwarg using
the Aggregate object's default alias.
"""
if self.query.distinct_fields:
raise NotImplementedError("aggregate() + distinct(fields) not implemented.")
for arg in args:
kwargs[arg.default_alias] = arg
query = self.query.clone()
force_subq = query.low_mark != 0 or query.high_mark is not None
for (alias, aggregate_expr) in kwargs.items():
query.add_aggregate(aggregate_expr, self.model, alias,
is_summary=True)
return query.get_aggregation(using=self.db, force_subq=force_subq)
def count(self):
"""
Performs a SELECT COUNT() and returns the number of records as an
integer.
If the QuerySet is already fully cached this simply returns the length
of the cached results set to avoid multiple SELECT COUNT(*) calls.
"""
if self._result_cache is not None:
return len(self._result_cache)
return self.query.get_count(using=self.db)
def get(self, *args, **kwargs):
"""
Performs the query and returns a single object matching the given
keyword arguments.
"""
clone = self.filter(*args, **kwargs)
if self.query.can_filter():
clone = clone.order_by()
clone = clone[:MAX_GET_RESULTS + 1]
num = len(clone)
if num == 1:
return clone._result_cache[0]
if not num:
raise self.model.DoesNotExist(
"%s matching query does not exist." %
self.model._meta.object_name)
raise self.model.MultipleObjectsReturned(
"get() returned more than one %s -- it returned %s!" % (
self.model._meta.object_name,
num if num <= MAX_GET_RESULTS else 'more than %s' % MAX_GET_RESULTS
)
)
def create(self, **kwargs):
"""
Creates a new object with the given kwargs, saving it to the database
and returning the created object.
"""
obj = self.model(**kwargs)
self._for_write = True
obj.save(force_insert=True, using=self.db)
return obj
def bulk_create(self, objs, batch_size=None):
"""
Inserts each of the instances into the database. This does *not* call
save() on each of the instances, does not send any pre/post save
signals, and does not set the primary key attribute if it is an
autoincrement field.
"""
# So this case is fun. When you bulk insert you don't get the primary
# keys back (if it's an autoincrement), so you can't insert into the
# child tables which references this. There are two workarounds, 1)
# this could be implemented if you didn't have an autoincrement pk,
# and 2) you could do it by doing O(n) normal inserts into the parent
# tables to get the primary keys back, and then doing a single bulk
# insert into the childmost table. Some databases might allow doing
# this by using RETURNING clause for the insert query. We're punting
# on these for now because they are relatively rare cases.
assert batch_size is None or batch_size > 0
if self.model._meta.parents:
raise ValueError("Can't bulk create an inherited model")
if not objs:
return objs
self._for_write = True
connection = connections[self.db]
fields = self.model._meta.local_concrete_fields
with transaction.atomic(using=self.db, savepoint=False):
if (connection.features.can_combine_inserts_with_and_without_auto_increment_pk
and self.model._meta.has_auto_field):
self._batched_insert(objs, fields, batch_size)
else:
objs_with_pk, objs_without_pk = partition(lambda o: o.pk is None, objs)
if objs_with_pk:
self._batched_insert(objs_with_pk, fields, batch_size)
if objs_without_pk:
fields = [f for f in fields if not isinstance(f, AutoField)]
self._batched_insert(objs_without_pk, fields, batch_size)
return objs
def get_or_create(self, defaults=None, **kwargs):
"""
Looks up an object with the given kwargs, creating one if necessary.
Returns a tuple of (object, created), where created is a boolean
specifying whether an object was created.
"""
lookup, params = self._extract_model_params(defaults, **kwargs)
self._for_write = True
try:
return self.get(**lookup), False
except self.model.DoesNotExist:
return self._create_object_from_params(lookup, params)
def update_or_create(self, defaults=None, **kwargs):
"""
Looks up an object with the given kwargs, updating one with defaults
if it exists, otherwise creates a new one.
Returns a tuple (object, created), where created is a boolean
specifying whether an object was created.
"""
defaults = defaults or {}
lookup, params = self._extract_model_params(defaults, **kwargs)
self._for_write = True
try:
obj = self.get(**lookup)
except self.model.DoesNotExist:
obj, created = self._create_object_from_params(lookup, params)
if created:
return obj, created
for k, v in six.iteritems(defaults):
setattr(obj, k, v)
with transaction.atomic(using=self.db, savepoint=False):
obj.save(using=self.db)
return obj, False
def _create_object_from_params(self, lookup, params):
"""
Tries to create an object using passed params.
Used by get_or_create and update_or_create
"""
obj = self.model(**params)
try:
with transaction.atomic(using=self.db):
obj.save(force_insert=True, using=self.db)
return obj, True
except IntegrityError:
exc_info = sys.exc_info()
try:
return self.get(**lookup), False
except self.model.DoesNotExist:
pass
six.reraise(*exc_info)
def _extract_model_params(self, defaults, **kwargs):
"""
Prepares `lookup` (kwargs that are valid model attributes), `params`
(for creating a model instance) based on given kwargs; for use by
get_or_create and update_or_create.
"""
defaults = defaults or {}
lookup = kwargs.copy()
for f in self.model._meta.fields:
if f.attname in lookup:
lookup[f.name] = lookup.pop(f.attname)
params = dict((k, v) for k, v in kwargs.items() if LOOKUP_SEP not in k)
params.update(defaults)
return lookup, params
def _earliest_or_latest(self, field_name=None, direction="-"):
"""
Returns the latest object, according to the model's
'get_latest_by' option or optional given field_name.
"""
order_by = field_name or getattr(self.model._meta, 'get_latest_by')
assert bool(order_by), "earliest() and latest() require either a "\
"field_name parameter or 'get_latest_by' in the model"
assert self.query.can_filter(), \
"Cannot change a query once a slice has been taken."
obj = self._clone()
obj.query.set_limits(high=1)
obj.query.clear_ordering(force_empty=True)
obj.query.add_ordering('%s%s' % (direction, order_by))
return obj.get()
def earliest(self, field_name=None):
return self._earliest_or_latest(field_name=field_name, direction="")
def latest(self, field_name=None):
return self._earliest_or_latest(field_name=field_name, direction="-")
def first(self):
"""
Returns the first object of a query, returns None if no match is found.
"""
qs = self if self.ordered else self.order_by('pk')
try:
return qs[0]
except IndexError:
return None
def last(self):
"""
Returns the last object of a query, returns None if no match is found.
"""
qs = self.reverse() if self.ordered else self.order_by('-pk')
try:
return qs[0]
except IndexError:
return None
def in_bulk(self, id_list):
"""
Returns a dictionary mapping each of the given IDs to the object with
that ID.
"""
assert self.query.can_filter(), \
"Cannot use 'limit' or 'offset' with in_bulk"
if not id_list:
return {}
qs = self.filter(pk__in=id_list).order_by()
return dict((obj._get_pk_val(), obj) for obj in qs)
def delete(self):
"""
Deletes the records in the current QuerySet.
"""
assert self.query.can_filter(), \
"Cannot use 'limit' or 'offset' with delete."
del_query = self._clone()
# The delete is actually 2 queries - one to find related objects,
# and one to delete. Make sure that the discovery of related
# objects is performed on the same database as the deletion.
del_query._for_write = True
# Disable non-supported fields.
del_query.query.select_for_update = False
del_query.query.select_related = False
del_query.query.clear_ordering(force_empty=True)
collector = Collector(using=del_query.db)
collector.collect(del_query)
collector.delete()
# Clear the result cache, in case this QuerySet gets reused.
self._result_cache = None
delete.alters_data = True
delete.queryset_only = True
def _raw_delete(self, using):
"""
Deletes objects found from the given queryset in single direct SQL
query. No signals are sent, and there is no protection for cascades.
"""
sql.DeleteQuery(self.model).delete_qs(self, using)
_raw_delete.alters_data = True
def update(self, **kwargs):
"""
Updates all elements in the current QuerySet, setting all the given
fields to the appropriate values.
"""
assert self.query.can_filter(), \
"Cannot update a query once a slice has been taken."
self._for_write = True
query = self.query.clone(sql.UpdateQuery)
query.add_update_values(kwargs)
with transaction.atomic(using=self.db, savepoint=False):
rows = query.get_compiler(self.db).execute_sql(CURSOR)
self._result_cache = None
return rows
update.alters_data = True
def _update(self, values):
"""
A version of update that accepts field objects instead of field names.
Used primarily for model saving and not intended for use by general
code (it requires too much poking around at model internals to be
useful at that level).
"""
assert self.query.can_filter(), \
"Cannot update a query once a slice has been taken."
query = self.query.clone(sql.UpdateQuery)
query.add_update_fields(values)
self._result_cache = None
return query.get_compiler(self.db).execute_sql(CURSOR)
_update.alters_data = True
_update.queryset_only = False
def exists(self):
if self._result_cache is None:
return self.query.has_results(using=self.db)
return bool(self._result_cache)
def _prefetch_related_objects(self):
# This method can only be called once the result cache has been filled.
prefetch_related_objects(self._result_cache, self._prefetch_related_lookups)
self._prefetch_done = True
##################################################
# PUBLIC METHODS THAT RETURN A QUERYSET SUBCLASS #
##################################################
def raw(self, raw_query, params=None, translations=None, using=None):
if using is None:
using = self.db
return RawQuerySet(raw_query, model=self.model,
params=params, translations=translations,
using=using)
def values(self, *fields):
return self._clone(klass=ValuesQuerySet, setup=True, _fields=fields)
def values_list(self, *fields, **kwargs):
flat = kwargs.pop('flat', False)
if kwargs:
raise TypeError('Unexpected keyword arguments to values_list: %s'
% (list(kwargs),))
if flat and len(fields) > 1:
raise TypeError("'flat' is not valid when values_list is called with more than one field.")
return self._clone(klass=ValuesListQuerySet, setup=True, flat=flat,
_fields=fields)
def dates(self, field_name, kind, order='ASC'):
"""
Returns a list of date objects representing all available dates for
the given field_name, scoped to 'kind'.
"""
assert kind in ("year", "month", "day"), \
"'kind' must be one of 'year', 'month' or 'day'."
assert order in ('ASC', 'DESC'), \
"'order' must be either 'ASC' or 'DESC'."
return self._clone(klass=DateQuerySet, setup=True,
_field_name=field_name, _kind=kind, _order=order)
def datetimes(self, field_name, kind, order='ASC', tzinfo=None):
"""
Returns a list of datetime objects representing all available
datetimes for the given field_name, scoped to 'kind'.
"""
assert kind in ("year", "month", "day", "hour", "minute", "second"), \
"'kind' must be one of 'year', 'month', 'day', 'hour', 'minute' or 'second'."
assert order in ('ASC', 'DESC'), \
"'order' must be either 'ASC' or 'DESC'."
if settings.USE_TZ:
if tzinfo is None:
tzinfo = timezone.get_current_timezone()
else:
tzinfo = None
return self._clone(klass=DateTimeQuerySet, setup=True,
_field_name=field_name, _kind=kind, _order=order, _tzinfo=tzinfo)
def none(self):
"""
Returns an empty QuerySet.
"""
clone = self._clone()
clone.query.set_empty()
return clone
##################################################################
# PUBLIC METHODS THAT ALTER ATTRIBUTES AND RETURN A NEW QUERYSET #
##################################################################
def all(self):
"""
Returns a new QuerySet that is a copy of the current one. This allows a
QuerySet to proxy for a model manager in some cases.
"""
return self._clone()
def filter(self, *args, **kwargs):
"""
Returns a new QuerySet instance with the args ANDed to the existing
set.
"""
return self._filter_or_exclude(False, *args, **kwargs)
def exclude(self, *args, **kwargs):
"""
Returns a new QuerySet instance with NOT (args) ANDed to the existing
set.
"""
return self._filter_or_exclude(True, *args, **kwargs)
def _filter_or_exclude(self, negate, *args, **kwargs):
if args or kwargs:
assert self.query.can_filter(), \
"Cannot filter a query once a slice has been taken."
clone = self._clone()
if negate:
clone.query.add_q(~Q(*args, **kwargs))
else:
clone.query.add_q(Q(*args, **kwargs))
return clone
def complex_filter(self, filter_obj):
"""
Returns a new QuerySet instance with filter_obj added to the filters.
filter_obj can be a Q object (or anything with an add_to_query()
method) or a dictionary of keyword lookup arguments.
This exists to support framework features such as 'limit_choices_to',
and usually it will be more natural to use other methods.
"""
if isinstance(filter_obj, Q) or hasattr(filter_obj, 'add_to_query'):
clone = self._clone()
clone.query.add_q(filter_obj)
return clone
else:
return self._filter_or_exclude(None, **filter_obj)
def select_for_update(self, nowait=False):
"""
Returns a new QuerySet instance that will select objects with a
FOR UPDATE lock.
"""
obj = self._clone()
obj._for_write = True
obj.query.select_for_update = True
obj.query.select_for_update_nowait = nowait
return obj
def select_related(self, *fields):
"""
Returns a new QuerySet instance that will select related objects.
If fields are specified, they must be ForeignKey fields and only those
related objects are included in the selection.
If select_related(None) is called, the list is cleared.
"""
obj = self._clone()
if fields == (None,):
obj.query.select_related = False
elif fields:
obj.query.add_select_related(fields)
else:
obj.query.select_related = True
return obj
def prefetch_related(self, *lookups):
"""
Returns a new QuerySet instance that will prefetch the specified
Many-To-One and Many-To-Many related objects when the QuerySet is
evaluated.
When prefetch_related() is called more than once, the list of lookups to
prefetch is appended to. If prefetch_related(None) is called, the
the list is cleared.
"""
clone = self._clone()
if lookups == (None,):
clone._prefetch_related_lookups = []
else:
clone._prefetch_related_lookups.extend(lookups)
return clone
def annotate(self, *args, **kwargs):
"""
Return a query set in which the returned objects have been annotated
with data aggregated from related fields.
"""
for arg in args:
if arg.default_alias in kwargs:
raise ValueError("The named annotation '%s' conflicts with the "
"default name for another annotation."
% arg.default_alias)
kwargs[arg.default_alias] = arg
names = getattr(self, '_fields', None)
if names is None:
names = set(self.model._meta.get_all_field_names())
for aggregate in kwargs:
if aggregate in names:
raise ValueError("The annotation '%s' conflicts with a field on "
"the model." % aggregate)
obj = self._clone()
obj._setup_aggregate_query(list(kwargs))
# Add the aggregates to the query
for (alias, aggregate_expr) in kwargs.items():
obj.query.add_aggregate(aggregate_expr, self.model, alias,
is_summary=False)
return obj
def order_by(self, *field_names):
"""
Returns a new QuerySet instance with the ordering changed.
"""
assert self.query.can_filter(), \
"Cannot reorder a query once a slice has been taken."
obj = self._clone()
obj.query.clear_ordering(force_empty=False)
obj.query.add_ordering(*field_names)
return obj
def distinct(self, *field_names):
"""
Returns a new QuerySet instance that will select only distinct results.
"""
assert self.query.can_filter(), \
"Cannot create distinct fields once a slice has been taken."
obj = self._clone()
obj.query.add_distinct_fields(*field_names)
return obj
def extra(self, select=None, where=None, params=None, tables=None,
order_by=None, select_params=None):
"""
Adds extra SQL fragments to the query.
"""
assert self.query.can_filter(), \
"Cannot change a query once a slice has been taken"
clone = self._clone()
clone.query.add_extra(select, select_params, where, params, tables, order_by)
return clone
def reverse(self):
"""
Reverses the ordering of the QuerySet.
"""
clone = self._clone()
clone.query.standard_ordering = not clone.query.standard_ordering
return clone
def defer(self, *fields):
"""
Defers the loading of data for certain fields until they are accessed.
The set of fields to defer is added to any existing set of deferred
fields. The only exception to this is if None is passed in as the only
parameter, in which case all deferrals are removed (None acts as a
reset option).
"""
clone = self._clone()
if fields == (None,):
clone.query.clear_deferred_loading()
else:
clone.query.add_deferred_loading(fields)
return clone
def only(self, *fields):
"""
Essentially, the opposite of defer. Only the fields passed into this
method and that are not already specified as deferred are loaded
immediately when the queryset is evaluated.
"""
if fields == (None,):
# Can only pass None to defer(), not only(), as the rest option.
# That won't stop people trying to do this, so let's be explicit.
raise TypeError("Cannot pass None as an argument to only().")
clone = self._clone()
clone.query.add_immediate_loading(fields)
return clone
def using(self, alias):
"""
Selects which database this QuerySet should excecute its query against.
"""
clone = self._clone()
clone._db = alias
return clone
###################################
# PUBLIC INTROSPECTION ATTRIBUTES #
###################################
def ordered(self):
"""
Returns True if the QuerySet is ordered -- i.e. has an order_by()
clause or a default ordering on the model.
"""
if self.query.extra_order_by or self.query.order_by:
return True
elif self.query.default_ordering and self.query.get_meta().ordering:
return True
else:
return False
ordered = property(ordered)
@property
def db(self):
"Return the database that will be used if this query is executed now"
if self._for_write:
return self._db or router.db_for_write(self.model, **self._hints)
return self._db or router.db_for_read(self.model, **self._hints)
###################
# PRIVATE METHODS #
###################
def _insert(self, objs, fields, return_id=False, raw=False, using=None):
"""
Inserts a new record for the given model. This provides an interface to
the InsertQuery class and is how Model.save() is implemented.
"""
self._for_write = True
if using is None:
using = self.db
query = sql.InsertQuery(self.model)
query.insert_values(fields, objs, raw=raw)
return query.get_compiler(using=using).execute_sql(return_id)
_insert.alters_data = True
_insert.queryset_only = False
def _batched_insert(self, objs, fields, batch_size):
"""
A little helper method for bulk_insert to insert the bulk one batch
at a time. Inserts recursively a batch from the front of the bulk and
then _batched_insert() the remaining objects again.
"""
if not objs:
return
ops = connections[self.db].ops
batch_size = (batch_size or max(ops.bulk_batch_size(fields, objs), 1))
for batch in [objs[i:i + batch_size]
for i in range(0, len(objs), batch_size)]:
self.model._base_manager._insert(batch, fields=fields,
using=self.db)
def _clone(self, klass=None, setup=False, **kwargs):
if klass is None:
klass = self.__class__
elif not issubclass(self.__class__, klass):
base_queryset_class = getattr(self, '_base_queryset_class', self.__class__)
class_bases = (klass, base_queryset_class)
class_dict = {
'_base_queryset_class': base_queryset_class,
'_specialized_queryset_class': klass,
}
klass = type(klass.__name__, class_bases, class_dict)
query = self.query.clone()
if self._sticky_filter:
query.filter_is_sticky = True
c = klass(model=self.model, query=query, using=self._db, hints=self._hints)
c._for_write = self._for_write
c._prefetch_related_lookups = self._prefetch_related_lookups[:]
c._known_related_objects = self._known_related_objects
c.__dict__.update(kwargs)
if setup and hasattr(c, '_setup_query'):
c._setup_query()
return c
def _fetch_all(self):
if self._result_cache is None:
self._result_cache = list(self.iterator())
if self._prefetch_related_lookups and not self._prefetch_done:
self._prefetch_related_objects()
def _next_is_sticky(self):
"""
Indicates that the next filter call and the one following that should
be treated as a single filter. This is only important when it comes to
determining when to reuse tables for many-to-many filters. Required so
that we can filter naturally on the results of related managers.
This doesn't return a clone of the current QuerySet (it returns
"self"). The method is only used internally and should be immediately
followed by a filter() that does create a clone.
"""
self._sticky_filter = True
return self
def _merge_sanity_check(self, other):
"""
Checks that we are merging two comparable QuerySet classes. By default
this does nothing, but see the ValuesQuerySet for an example of where
it's useful.
"""
pass
def _merge_known_related_objects(self, other):
"""
Keep track of all known related objects from either QuerySet instance.
"""
for field, objects in other._known_related_objects.items():
self._known_related_objects.setdefault(field, {}).update(objects)
def _setup_aggregate_query(self, aggregates):
"""
Prepare the query for computing a result that contains aggregate annotations.
"""
opts = self.model._meta
if self.query.group_by is None:
field_names = [f.attname for f in opts.concrete_fields]
self.query.add_fields(field_names, False)
self.query.set_group_by()
def _prepare(self):
return self
def _as_sql(self, connection):
"""
Returns the internal query's SQL and parameters (as a tuple).
"""
obj = self.values("pk")
if obj._db is None or connection == connections[obj._db]:
return obj.query.get_compiler(connection=connection).as_nested_sql()
raise ValueError("Can't do subqueries with queries on different DBs.")
# When used as part of a nested query, a queryset will never be an "always
# empty" result.
value_annotation = True
def _add_hints(self, **hints):
"""
Update hinting information for later use by Routers
"""
# If there is any hinting information, add it to what we already know.
# If we have a new hint for an existing key, overwrite with the new value.
self._hints.update(hints)
def _has_filters(self):
"""
Checks if this QuerySet has any filtering going on. Note that this
isn't equivalent for checking if all objects are present in results,
for example qs[1:]._has_filters() -> False.
"""
return self.query.has_filters()
class InstanceCheckMeta(type):
def __instancecheck__(self, instance):
return instance.query.is_empty()
class EmptyQuerySet(six.with_metaclass(InstanceCheckMeta)):
"""
Marker class usable for checking if a queryset is empty by .none():
isinstance(qs.none(), EmptyQuerySet) -> True
"""
def __init__(self, *args, **kwargs):
raise TypeError("EmptyQuerySet can't be instantiated")
class ValuesQuerySet(QuerySet):
def __init__(self, *args, **kwargs):
super(ValuesQuerySet, self).__init__(*args, **kwargs)
# select_related isn't supported in values(). (FIXME -#3358)
self.query.select_related = False
# QuerySet.clone() will also set up the _fields attribute with the
# names of the model fields to select.
def only(self, *fields):
raise NotImplementedError("ValuesQuerySet does not implement only()")
def defer(self, *fields):
raise NotImplementedError("ValuesQuerySet does not implement defer()")
def iterator(self):
# Purge any extra columns that haven't been explicitly asked for
extra_names = list(self.query.extra_select)
field_names = self.field_names
aggregate_names = list(self.query.aggregate_select)
names = extra_names + field_names + aggregate_names
for row in self.query.get_compiler(self.db).results_iter():
yield dict(zip(names, row))
def delete(self):
# values().delete() doesn't work currently - make sure it raises an
# user friendly error.
raise TypeError("Queries with .values() or .values_list() applied "
"can't be deleted")
def _setup_query(self):
"""
Constructs the field_names list that the values query will be
retrieving.
Called by the _clone() method after initializing the rest of the
instance.
"""
self.query.clear_deferred_loading()
self.query.clear_select_fields()
if self._fields:
self.extra_names = []
self.aggregate_names = []
if not self.query._extra and not self.query._aggregates:
# Short cut - if there are no extra or aggregates, then
# the values() clause must be just field names.
self.field_names = list(self._fields)
else:
self.query.default_cols = False
self.field_names = []
for f in self._fields:
# we inspect the full extra_select list since we might
# be adding back an extra select item that we hadn't
# had selected previously.
if self.query._extra and f in self.query._extra:
self.extra_names.append(f)
elif f in self.query.aggregate_select:
self.aggregate_names.append(f)
else:
self.field_names.append(f)
else:
# Default to all fields.
self.extra_names = None
self.field_names = [f.attname for f in self.model._meta.concrete_fields]
self.aggregate_names = None
self.query.select = []
if self.extra_names is not None:
self.query.set_extra_mask(self.extra_names)
self.query.add_fields(self.field_names, True)
if self.aggregate_names is not None:
self.query.set_aggregate_mask(self.aggregate_names)
def _clone(self, klass=None, setup=False, **kwargs):
"""
Cloning a ValuesQuerySet preserves the current fields.
"""
c = super(ValuesQuerySet, self)._clone(klass, **kwargs)
if not hasattr(c, '_fields'):
# Only clone self._fields if _fields wasn't passed into the cloning
# call directly.
c._fields = self._fields[:]
c.field_names = self.field_names
c.extra_names = self.extra_names
c.aggregate_names = self.aggregate_names
if setup and hasattr(c, '_setup_query'):
c._setup_query()
return c
def _merge_sanity_check(self, other):
super(ValuesQuerySet, self)._merge_sanity_check(other)
if (set(self.extra_names) != set(other.extra_names) or
set(self.field_names) != set(other.field_names) or
self.aggregate_names != other.aggregate_names):
raise TypeError("Merging '%s' classes must involve the same values in each case."
% self.__class__.__name__)
def _setup_aggregate_query(self, aggregates):
"""
Prepare the query for computing a result that contains aggregate annotations.
"""
self.query.set_group_by()
if self.aggregate_names is not None:
self.aggregate_names.extend(aggregates)
self.query.set_aggregate_mask(self.aggregate_names)
super(ValuesQuerySet, self)._setup_aggregate_query(aggregates)
def _as_sql(self, connection):
"""
For ValuesQuerySet (and subclasses like ValuesListQuerySet), they can
only be used as nested queries if they're already set up to select only
a single field (in which case, that is the field column that is
returned). This differs from QuerySet.as_sql(), where the column to
select is set up by Django.
"""
if ((self._fields and len(self._fields) > 1) or
(not self._fields and len(self.model._meta.fields) > 1)):
raise TypeError('Cannot use a multi-field %s as a filter value.'
% self.__class__.__name__)
obj = self._clone()
if obj._db is None or connection == connections[obj._db]:
return obj.query.get_compiler(connection=connection).as_nested_sql()
raise ValueError("Can't do subqueries with queries on different DBs.")
def _prepare(self):
"""
Validates that we aren't trying to do a query like
value__in=qs.values('value1', 'value2'), which isn't valid.
"""
if ((self._fields and len(self._fields) > 1) or
(not self._fields and len(self.model._meta.fields) > 1)):
raise TypeError('Cannot use a multi-field %s as a filter value.'
% self.__class__.__name__)
return self
class ValuesListQuerySet(ValuesQuerySet):
def iterator(self):
if self.flat and len(self._fields) == 1:
for row in self.query.get_compiler(self.db).results_iter():
yield row[0]
elif not self.query.extra_select and not self.query.aggregate_select:
for row in self.query.get_compiler(self.db).results_iter():
yield tuple(row)
else:
# When extra(select=...) or an annotation is involved, the extra
# cols are always at the start of the row, and we need to reorder
# the fields to match the order in self._fields.
extra_names = list(self.query.extra_select)
field_names = self.field_names
aggregate_names = list(self.query.aggregate_select)
names = extra_names + field_names + aggregate_names
# If a field list has been specified, use it. Otherwise, use the
# full list of fields, including extras and aggregates.
if self._fields:
fields = list(self._fields) + [f for f in aggregate_names if f not in self._fields]
else:
fields = names
for row in self.query.get_compiler(self.db).results_iter():
data = dict(zip(names, row))
yield tuple(data[f] for f in fields)
def _clone(self, *args, **kwargs):
clone = super(ValuesListQuerySet, self)._clone(*args, **kwargs)
if not hasattr(clone, "flat"):
# Only assign flat if the clone didn't already get it from kwargs
clone.flat = self.flat
return clone
class DateQuerySet(QuerySet):
def iterator(self):
return self.query.get_compiler(self.db).results_iter()
def _setup_query(self):
"""
Sets up any special features of the query attribute.
Called by the _clone() method after initializing the rest of the
instance.
"""
self.query.clear_deferred_loading()
self.query = self.query.clone(klass=sql.DateQuery, setup=True)
self.query.select = []
self.query.add_select(self._field_name, self._kind, self._order)
def _clone(self, klass=None, setup=False, **kwargs):
c = super(DateQuerySet, self)._clone(klass, False, **kwargs)
c._field_name = self._field_name
c._kind = self._kind
if setup and hasattr(c, '_setup_query'):
c._setup_query()
return c
class DateTimeQuerySet(QuerySet):
def iterator(self):
return self.query.get_compiler(self.db).results_iter()
def _setup_query(self):
"""
Sets up any special features of the query attribute.
Called by the _clone() method after initializing the rest of the
instance.
"""
self.query.clear_deferred_loading()
self.query = self.query.clone(klass=sql.DateTimeQuery, setup=True, tzinfo=self._tzinfo)
self.query.select = []
self.query.add_select(self._field_name, self._kind, self._order)
def _clone(self, klass=None, setup=False, **kwargs):
c = super(DateTimeQuerySet, self)._clone(klass, False, **kwargs)
c._field_name = self._field_name
c._kind = self._kind
c._tzinfo = self._tzinfo
if setup and hasattr(c, '_setup_query'):
c._setup_query()
return c
def get_klass_info(klass, max_depth=0, cur_depth=0, requested=None,
only_load=None, from_parent=None):
"""
Helper function that recursively returns an information for a klass, to be
used in get_cached_row. It exists just to compute this information only
once for entire queryset. Otherwise it would be computed for each row, which
leads to poor performance on large querysets.
Arguments:
* klass - the class to retrieve (and instantiate)
* max_depth - the maximum depth to which a select_related()
relationship should be explored.
* cur_depth - the current depth in the select_related() tree.
Used in recursive calls to determine if we should dig deeper.
* requested - A dictionary describing the select_related() tree
that is to be retrieved. keys are field names; values are
dictionaries describing the keys on that related object that
are themselves to be select_related().
* only_load - if the query has had only() or defer() applied,
this is the list of field names that will be returned. If None,
the full field list for `klass` can be assumed.
* from_parent - the parent model used to get to this model
Note that when travelling from parent to child, we will only load child
fields which aren't in the parent.
"""
if max_depth and requested is None and cur_depth > max_depth:
# We've recursed deeply enough; stop now.
return None
if only_load:
load_fields = only_load.get(klass) or set()
# When we create the object, we will also be creating populating
# all the parent classes, so traverse the parent classes looking
# for fields that must be included on load.
for parent in klass._meta.get_parent_list():
fields = only_load.get(parent)
if fields:
load_fields.update(fields)
else:
load_fields = None
if load_fields:
# Handle deferred fields.
skip = set()
init_list = []
# Build the list of fields that *haven't* been requested
for field, model in klass._meta.get_concrete_fields_with_model():
if field.name not in load_fields:
skip.add(field.attname)
elif from_parent and issubclass(from_parent, model.__class__):
# Avoid loading fields already loaded for parent model for
# child models.
continue
else:
init_list.append(field.attname)
# Retrieve all the requested fields
field_count = len(init_list)
if skip:
klass = deferred_class_factory(klass, skip)
field_names = init_list
else:
field_names = ()
else:
# Load all fields on klass
field_count = len(klass._meta.concrete_fields)
# Check if we need to skip some parent fields.
if from_parent and len(klass._meta.local_concrete_fields) != len(klass._meta.concrete_fields):
# Only load those fields which haven't been already loaded into
# 'from_parent'.
non_seen_models = [p for p in klass._meta.get_parent_list()
if not issubclass(from_parent, p)]
# Load local fields, too...
non_seen_models.append(klass)
field_names = [f.attname for f in klass._meta.concrete_fields
if f.model in non_seen_models]
field_count = len(field_names)
# Try to avoid populating field_names variable for performance reasons.
# If field_names variable is set, we use **kwargs based model init
# which is slower than normal init.
if field_count == len(klass._meta.concrete_fields):
field_names = ()
restricted = requested is not None
related_fields = []
for f in klass._meta.fields:
if select_related_descend(f, restricted, requested, load_fields):
if restricted:
next = requested[f.name]
else:
next = None
klass_info = get_klass_info(f.rel.to, max_depth=max_depth, cur_depth=cur_depth + 1,
requested=next, only_load=only_load)
related_fields.append((f, klass_info))
reverse_related_fields = []
if restricted:
for o in klass._meta.get_all_related_objects():
if o.field.unique and select_related_descend(o.field, restricted, requested,
only_load.get(o.model), reverse=True):
next = requested[o.field.related_query_name()]
parent = klass if issubclass(o.model, klass) else None
klass_info = get_klass_info(o.model, max_depth=max_depth, cur_depth=cur_depth + 1,
requested=next, only_load=only_load, from_parent=parent)
reverse_related_fields.append((o.field, klass_info))
if field_names:
pk_idx = field_names.index(klass._meta.pk.attname)
else:
pk_idx = klass._meta.pk_index()
return klass, field_names, field_count, related_fields, reverse_related_fields, pk_idx
def get_cached_row(row, index_start, using, klass_info, offset=0,
parent_data=()):
"""
Helper function that recursively returns an object with the specified
related attributes already populated.
This method may be called recursively to populate deep select_related()
clauses.
Arguments:
* row - the row of data returned by the database cursor
* index_start - the index of the row at which data for this
object is known to start
* offset - the number of additional fields that are known to
exist in row for `klass`. This usually means the number of
annotated results on `klass`.
* using - the database alias on which the query is being executed.
* klass_info - result of the get_klass_info function
* parent_data - parent model data in format (field, value). Used
to populate the non-local fields of child models.
"""
if klass_info is None:
return None
klass, field_names, field_count, related_fields, reverse_related_fields, pk_idx = klass_info
fields = row[index_start:index_start + field_count]
# If the pk column is None (or the equivalent '' in the case the
# connection interprets empty strings as nulls), then the related
# object must be non-existent - set the relation to None.
if (fields[pk_idx] is None or
(connections[using].features.interprets_empty_strings_as_nulls and
fields[pk_idx] == '')):
obj = None
elif field_names:
fields = list(fields)
for rel_field, value in parent_data:
field_names.append(rel_field.attname)
fields.append(value)
obj = klass(**dict(zip(field_names, fields)))
else:
obj = klass(*fields)
# If an object was retrieved, set the database state.
if obj:
obj._state.db = using
obj._state.adding = False
# Instantiate related fields
index_end = index_start + field_count + offset
# Iterate over each related object, populating any
# select_related() fields
for f, klass_info in related_fields:
# Recursively retrieve the data for the related object
cached_row = get_cached_row(row, index_end, using, klass_info)
# If the recursive descent found an object, populate the
# descriptor caches relevant to the object
if cached_row:
rel_obj, index_end = cached_row
if obj is not None:
# If the base object exists, populate the
# descriptor cache
setattr(obj, f.get_cache_name(), rel_obj)
if f.unique and rel_obj is not None:
# If the field is unique, populate the
# reverse descriptor cache on the related object
setattr(rel_obj, f.related.get_cache_name(), obj)
# Now do the same, but for reverse related objects.
# Only handle the restricted case - i.e., don't do a depth
# descent into reverse relations unless explicitly requested
for f, klass_info in reverse_related_fields:
# Transfer data from this object to childs.
parent_data = []
for rel_field, rel_model in klass_info[0]._meta.get_fields_with_model():
if rel_model is not None and isinstance(obj, rel_model):
parent_data.append((rel_field, getattr(obj, rel_field.attname)))
# Recursively retrieve the data for the related object
cached_row = get_cached_row(row, index_end, using, klass_info,
parent_data=parent_data)
# If the recursive descent found an object, populate the
# descriptor caches relevant to the object
if cached_row:
rel_obj, index_end = cached_row
if obj is not None:
# populate the reverse descriptor cache
setattr(obj, f.related.get_cache_name(), rel_obj)
if rel_obj is not None:
# If the related object exists, populate
# the descriptor cache.
setattr(rel_obj, f.get_cache_name(), obj)
# Populate related object caches using parent data.
for rel_field, _ in parent_data:
if rel_field.rel:
setattr(rel_obj, rel_field.attname, getattr(obj, rel_field.attname))
try:
cached_obj = getattr(obj, rel_field.get_cache_name())
setattr(rel_obj, rel_field.get_cache_name(), cached_obj)
except AttributeError:
# Related object hasn't been cached yet
pass
return obj, index_end
class RawQuerySet(object):
"""
Provides an iterator which converts the results of raw SQL queries into
annotated model instances.
"""
def __init__(self, raw_query, model=None, query=None, params=None,
translations=None, using=None, hints=None):
self.raw_query = raw_query
self.model = model
self._db = using
self._hints = hints or {}
self.query = query or sql.RawQuery(sql=raw_query, using=self.db, params=params)
self.params = params or ()
self.translations = translations or {}
def __iter__(self):
# Mapping of attrnames to row column positions. Used for constructing
# the model using kwargs, needed when not all model's fields are present
# in the query.
model_init_field_names = {}
# A list of tuples of (column name, column position). Used for
# annotation fields.
annotation_fields = []
# Cache some things for performance reasons outside the loop.
db = self.db
compiler = connections[db].ops.compiler('SQLCompiler')(
self.query, connections[db], db
)
need_resolv_columns = hasattr(compiler, 'resolve_columns')
query = iter(self.query)
try:
# Find out which columns are model's fields, and which ones should be
# annotated to the model.
for pos, column in enumerate(self.columns):
if column in self.model_fields:
model_init_field_names[self.model_fields[column].attname] = pos
else:
annotation_fields.append((column, pos))
# Find out which model's fields are not present in the query.
skip = set()
for field in self.model._meta.fields:
if field.attname not in model_init_field_names:
skip.add(field.attname)
if skip:
if self.model._meta.pk.attname in skip:
raise InvalidQuery('Raw query must include the primary key')
model_cls = deferred_class_factory(self.model, skip)
else:
model_cls = self.model
# All model's fields are present in the query. So, it is possible
# to use *args based model instantiation. For each field of the model,
# record the query column position matching that field.
model_init_field_pos = []
for field in self.model._meta.fields:
model_init_field_pos.append(model_init_field_names[field.attname])
if need_resolv_columns:
fields = [self.model_fields.get(c, None) for c in self.columns]
# Begin looping through the query values.
for values in query:
if need_resolv_columns:
values = compiler.resolve_columns(values, fields)
# Associate fields to values
if skip:
model_init_kwargs = {}
for attname, pos in six.iteritems(model_init_field_names):
model_init_kwargs[attname] = values[pos]
instance = model_cls(**model_init_kwargs)
else:
model_init_args = [values[pos] for pos in model_init_field_pos]
instance = model_cls(*model_init_args)
if annotation_fields:
for column, pos in annotation_fields:
setattr(instance, column, values[pos])
instance._state.db = db
instance._state.adding = False
yield instance
finally:
# Done iterating the Query. If it has its own cursor, close it.
if hasattr(self.query, 'cursor') and self.query.cursor:
self.query.cursor.close()
def __repr__(self):
text = self.raw_query
if self.params:
text = text % (self.params if hasattr(self.params, 'keys') else tuple(self.params))
return "<RawQuerySet: %r>" % text
def __getitem__(self, k):
return list(self)[k]
@property
def db(self):
"Return the database that will be used if this query is executed now"
return self._db or router.db_for_read(self.model, **self._hints)
def using(self, alias):
"""
Selects which database this Raw QuerySet should excecute it's query against.
"""
return RawQuerySet(self.raw_query, model=self.model,
query=self.query.clone(using=alias),
params=self.params, translations=self.translations,
using=alias)
@property
def columns(self):
"""
A list of model field names in the order they'll appear in the
query results.
"""
if not hasattr(self, '_columns'):
self._columns = self.query.get_columns()
# Adjust any column names which don't match field names
for (query_name, model_name) in self.translations.items():
try:
index = self._columns.index(query_name)
self._columns[index] = model_name
except ValueError:
# Ignore translations for non-existant column names
pass
return self._columns
@property
def model_fields(self):
"""
A dict mapping column names to model field names.
"""
if not hasattr(self, '_model_fields'):
converter = connections[self.db].introspection.table_name_converter
self._model_fields = {}
for field in self.model._meta.fields:
name, column = field.get_attname_column()
self._model_fields[converter(column)] = field
return self._model_fields
class Prefetch(object):
def __init__(self, lookup, queryset=None, to_attr=None):
# `prefetch_through` is the path we traverse to perform the prefetch.
self.prefetch_through = lookup
# `prefetch_to` is the path to the attribute that stores the result.
self.prefetch_to = lookup
if to_attr:
self.prefetch_to = LOOKUP_SEP.join(lookup.split(LOOKUP_SEP)[:-1] + [to_attr])
self.queryset = queryset
self.to_attr = to_attr
def add_prefix(self, prefix):
self.prefetch_through = LOOKUP_SEP.join([prefix, self.prefetch_through])
self.prefetch_to = LOOKUP_SEP.join([prefix, self.prefetch_to])
def get_current_prefetch_through(self, level):
return LOOKUP_SEP.join(self.prefetch_through.split(LOOKUP_SEP)[:level + 1])
def get_current_prefetch_to(self, level):
return LOOKUP_SEP.join(self.prefetch_to.split(LOOKUP_SEP)[:level + 1])
def get_current_to_attr(self, level):
parts = self.prefetch_to.split(LOOKUP_SEP)
to_attr = parts[level]
as_attr = self.to_attr and level == len(parts) - 1
return to_attr, as_attr
def get_current_queryset(self, level):
if self.get_current_prefetch_to(level) == self.prefetch_to:
return self.queryset
return None
def __eq__(self, other):
if isinstance(other, Prefetch):
return self.prefetch_to == other.prefetch_to
return False
def normalize_prefetch_lookups(lookups, prefix=None):
"""
Helper function that normalize lookups into Prefetch objects.
"""
ret = []
for lookup in lookups:
if not isinstance(lookup, Prefetch):
lookup = Prefetch(lookup)
if prefix:
lookup.add_prefix(prefix)
ret.append(lookup)
return ret
def prefetch_related_objects(result_cache, related_lookups):
"""
Helper function for prefetch_related functionality
Populates prefetched objects caches for a list of results
from a QuerySet
"""
if len(result_cache) == 0:
return # nothing to do
related_lookups = normalize_prefetch_lookups(related_lookups)
# We need to be able to dynamically add to the list of prefetch_related
# lookups that we look up (see below). So we need some book keeping to
# ensure we don't do duplicate work.
done_queries = {} # dictionary of things like 'foo__bar': [results]
auto_lookups = [] # we add to this as we go through.
followed_descriptors = set() # recursion protection
all_lookups = itertools.chain(related_lookups, auto_lookups)
for lookup in all_lookups:
if lookup.prefetch_to in done_queries:
if lookup.queryset:
raise ValueError("'%s' lookup was already seen with a different queryset. "
"You may need to adjust the ordering of your lookups." % lookup.prefetch_to)
continue
# Top level, the list of objects to decorate is the result cache
# from the primary QuerySet. It won't be for deeper levels.
obj_list = result_cache
through_attrs = lookup.prefetch_through.split(LOOKUP_SEP)
for level, through_attr in enumerate(through_attrs):
# Prepare main instances
if len(obj_list) == 0:
break
prefetch_to = lookup.get_current_prefetch_to(level)
if prefetch_to in done_queries:
# Skip any prefetching, and any object preparation
obj_list = done_queries[prefetch_to]
continue
# Prepare objects:
good_objects = True
for obj in obj_list:
# Since prefetching can re-use instances, it is possible to have
# the same instance multiple times in obj_list, so obj might
# already be prepared.
if not hasattr(obj, '_prefetched_objects_cache'):
try:
obj._prefetched_objects_cache = {}
except AttributeError:
# Must be in a QuerySet subclass that is not returning
# Model instances, either in Django or 3rd
# party. prefetch_related() doesn't make sense, so quit
# now.
good_objects = False
break
if not good_objects:
break
# Descend down tree
# We assume that objects retrieved are homogeneous (which is the premise
# of prefetch_related), so what applies to first object applies to all.
first_obj = obj_list[0]
prefetcher, descriptor, attr_found, is_fetched = get_prefetcher(first_obj, through_attr)
if not attr_found:
raise AttributeError("Cannot find '%s' on %s object, '%s' is an invalid "
"parameter to prefetch_related()" %
(through_attr, first_obj.__class__.__name__, lookup.prefetch_through))
if level == len(through_attrs) - 1 and prefetcher is None:
# Last one, this *must* resolve to something that supports
# prefetching, otherwise there is no point adding it and the
# developer asking for it has made a mistake.
raise ValueError("'%s' does not resolve to a item that supports "
"prefetching - this is an invalid parameter to "
"prefetch_related()." % lookup.prefetch_through)
if prefetcher is not None and not is_fetched:
obj_list, additional_lookups = prefetch_one_level(obj_list, prefetcher, lookup, level)
# We need to ensure we don't keep adding lookups from the
# same relationships to stop infinite recursion. So, if we
# are already on an automatically added lookup, don't add
# the new lookups from relationships we've seen already.
if not (lookup in auto_lookups and descriptor in followed_descriptors):
done_queries[prefetch_to] = obj_list
auto_lookups.extend(normalize_prefetch_lookups(additional_lookups, prefetch_to))
followed_descriptors.add(descriptor)
elif isinstance(getattr(first_obj, through_attr), list):
# The current part of the lookup relates to a custom Prefetch.
# This means that obj.attr is a list of related objects, and
# thus we must turn the obj.attr lists into a single related
# object list.
new_list = []
for obj in obj_list:
new_list.extend(getattr(obj, through_attr))
obj_list = new_list
else:
# Either a singly related object that has already been fetched
# (e.g. via select_related), or hopefully some other property
# that doesn't support prefetching but needs to be traversed.
# We replace the current list of parent objects with the list
# of related objects, filtering out empty or missing values so
# that we can continue with nullable or reverse relations.
new_obj_list = []
for obj in obj_list:
try:
new_obj = getattr(obj, through_attr)
except exceptions.ObjectDoesNotExist:
continue
if new_obj is None:
continue
new_obj_list.append(new_obj)
obj_list = new_obj_list
def get_prefetcher(instance, attr):
"""
For the attribute 'attr' on the given instance, finds
an object that has a get_prefetch_queryset().
Returns a 4 tuple containing:
(the object with get_prefetch_queryset (or None),
the descriptor object representing this relationship (or None),
a boolean that is False if the attribute was not found at all,
a boolean that is True if the attribute has already been fetched)
"""
prefetcher = None
attr_found = False
is_fetched = False
# For singly related objects, we have to avoid getting the attribute
# from the object, as this will trigger the query. So we first try
# on the class, in order to get the descriptor object.
rel_obj_descriptor = getattr(instance.__class__, attr, None)
if rel_obj_descriptor is None:
try:
rel_obj = getattr(instance, attr)
attr_found = True
# If we are following a lookup path which leads us through a previous
# fetch from a custom Prefetch then we might end up into a list
# instead of related qs. This means the objects are already fetched.
if isinstance(rel_obj, list):
is_fetched = True
except AttributeError:
pass
else:
attr_found = True
if rel_obj_descriptor:
# singly related object, descriptor object has the
# get_prefetch_queryset() method.
if hasattr(rel_obj_descriptor, 'get_prefetch_queryset'):
prefetcher = rel_obj_descriptor
if rel_obj_descriptor.is_cached(instance):
is_fetched = True
else:
# descriptor doesn't support prefetching, so we go ahead and get
# the attribute on the instance rather than the class to
# support many related managers
rel_obj = getattr(instance, attr)
if hasattr(rel_obj, 'get_prefetch_queryset'):
prefetcher = rel_obj
return prefetcher, rel_obj_descriptor, attr_found, is_fetched
def prefetch_one_level(instances, prefetcher, lookup, level):
"""
Helper function for prefetch_related_objects
Runs prefetches on all instances using the prefetcher object,
assigning results to relevant caches in instance.
The prefetched objects are returned, along with any additional
prefetches that must be done due to prefetch_related lookups
found from default managers.
"""
# prefetcher must have a method get_prefetch_queryset() which takes a list
# of instances, and returns a tuple:
# (queryset of instances of self.model that are related to passed in instances,
# callable that gets value to be matched for returned instances,
# callable that gets value to be matched for passed in instances,
# boolean that is True for singly related objects,
# cache name to assign to).
# The 'values to be matched' must be hashable as they will be used
# in a dictionary.
rel_qs, rel_obj_attr, instance_attr, single, cache_name = (
prefetcher.get_prefetch_queryset(instances, lookup.get_current_queryset(level)))
# We have to handle the possibility that the default manager itself added
# prefetch_related lookups to the QuerySet we just got back. We don't want to
# trigger the prefetch_related functionality by evaluating the query.
# Rather, we need to merge in the prefetch_related lookups.
additional_lookups = getattr(rel_qs, '_prefetch_related_lookups', [])
if additional_lookups:
# Don't need to clone because the manager should have given us a fresh
# instance, so we access an internal instead of using public interface
# for performance reasons.
rel_qs._prefetch_related_lookups = []
all_related_objects = list(rel_qs)
rel_obj_cache = {}
for rel_obj in all_related_objects:
rel_attr_val = rel_obj_attr(rel_obj)
rel_obj_cache.setdefault(rel_attr_val, []).append(rel_obj)
for obj in instances:
instance_attr_val = instance_attr(obj)
vals = rel_obj_cache.get(instance_attr_val, [])
to_attr, as_attr = lookup.get_current_to_attr(level)
if single:
val = vals[0] if vals else None
to_attr = to_attr if as_attr else cache_name
setattr(obj, to_attr, val)
else:
if as_attr:
setattr(obj, to_attr, vals)
else:
# Cache in the QuerySet.all().
qs = getattr(obj, to_attr).all()
qs._result_cache = vals
# We don't want the individual qs doing prefetch_related now,
# since we have merged this into the current work.
qs._prefetch_done = True
obj._prefetched_objects_cache[cache_name] = qs
return all_related_objects, additional_lookups
| bsd-3-clause |
punchagan/zulip | zerver/webhooks/slack/tests.py | 4 | 2501 | from zerver.lib.test_classes import WebhookTestCase
class SlackWebhookTests(WebhookTestCase):
STREAM_NAME = "slack"
URL_TEMPLATE = "/api/v1/external/slack?stream={stream}&api_key={api_key}"
FIXTURE_DIR_NAME = "slack"
def test_slack_channel_to_topic(self) -> None:
expected_topic = "channel: general"
expected_message = "**slack_user**: `test\n`"
self.check_webhook(
"message_info",
expected_topic,
expected_message,
content_type="application/x-www-form-urlencoded",
)
def test_slack_channel_to_stream(self) -> None:
self.STREAM_NAME = "general"
self.url = "{}{}".format(self.url, "&channels_map_to_topics=0")
expected_topic = "Message from Slack"
expected_message = "**slack_user**: `test\n`"
self.check_webhook(
"message_info",
expected_topic,
expected_message,
content_type="application/x-www-form-urlencoded",
)
def test_missing_data_user_name(self) -> None:
payload = self.get_body("message_info_missing_user_name")
url = self.build_webhook_url()
result = self.client_post(url, payload, content_type="application/x-www-form-urlencoded")
self.assert_json_error(result, "Missing 'user_name' argument")
def test_missing_data_channel_name(self) -> None:
payload = self.get_body("message_info_missing_channel_name")
url = self.build_webhook_url()
result = self.client_post(url, payload, content_type="application/x-www-form-urlencoded")
self.assert_json_error(result, "Missing 'channel_name' argument")
def test_missing_data_text(self) -> None:
payload = self.get_body("message_info_missing_text")
url = self.build_webhook_url()
result = self.client_post(url, payload, content_type="application/x-www-form-urlencoded")
self.assert_json_error(result, "Missing 'text' argument")
def test_invalid_channels_map_to_topics(self) -> None:
payload = self.get_body("message_info")
url = "{}{}".format(self.url, "&channels_map_to_topics=abc")
result = self.client_post(url, payload, content_type="application/x-www-form-urlencoded")
self.assert_json_error(result, "Error: channels_map_to_topics parameter other than 0 or 1")
def get_body(self, fixture_name: str) -> str:
return self.webhook_fixture_data("slack", fixture_name, file_type="txt")
| apache-2.0 |
nangia/django-allauth | allauth/socialaccount/south_migrations/0006_auto__del_field_socialapp_site.py | 80 | 6229 | # -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Deleting field 'SocialApp.site'
db.delete_column('socialaccount_socialapp', 'site_id')
def backwards(self, orm):
# User chose to not deal with backwards NULL issues for 'SocialApp.site'
raise RuntimeError("Cannot reverse this migration. 'SocialApp.site' and its values cannot be restored.")
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'sites.site': {
'Meta': {'ordering': "('domain',)", 'object_name': 'Site', 'db_table': "'django_site'"},
'domain': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'socialaccount.socialaccount': {
'Meta': {'unique_together': "(('provider', 'uid'),)", 'object_name': 'SocialAccount'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'extra_data': ('allauth.socialaccount.fields.JSONField', [], {'default': "'{}'"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'provider': ('django.db.models.fields.CharField', [], {'max_length': '30'}),
'uid': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'socialaccount.socialapp': {
'Meta': {'object_name': 'SocialApp'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '40'}),
'provider': ('django.db.models.fields.CharField', [], {'max_length': '30'}),
'secret': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'sites': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['sites.Site']", 'symmetrical': 'False'})
},
'socialaccount.socialtoken': {
'Meta': {'unique_together': "(('app', 'account'),)", 'object_name': 'SocialToken'},
'account': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['socialaccount.SocialAccount']"}),
'app': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['socialaccount.SocialApp']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'token': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'token_secret': ('django.db.models.fields.CharField', [], {'max_length': '200', 'blank': 'True'})
}
}
complete_apps = ['socialaccount'] | mit |
abenzbiria/clients_odoo | addons/account_check_writing/account.py | 379 | 2032 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import osv,fields
class account_journal(osv.osv):
_inherit = "account.journal"
_columns = {
'allow_check_writing': fields.boolean('Allow Check writing', help='Check this if the journal is to be used for writing checks.'),
'use_preprint_check': fields.boolean('Use Preprinted Check', help='Check if you use a preformated sheet for check'),
}
class res_company(osv.osv):
_inherit = "res.company"
_columns = {
'check_layout': fields.selection([
('top', 'Check on Top'),
('middle', 'Check in middle'),
('bottom', 'Check on bottom'),
],"Check Layout",
help="Check on top is compatible with Quicken, QuickBooks and Microsoft Money. Check in middle is compatible with Peachtree, ACCPAC and DacEasy. Check on bottom is compatible with Peachtree, ACCPAC and DacEasy only" ),
}
_defaults = {
'check_layout' : lambda *a: 'top',
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
ga7g08/sympy | sympy/plotting/pygletplot/tests/test_plotting.py | 109 | 2653 | from sympy.external.importtools import import_module
disabled = False
# if pyglet.gl fails to import, e.g. opengl is missing, we disable the tests
pyglet_gl = import_module("pyglet.gl", catch=(OSError,))
pyglet_window = import_module("pyglet.window", catch=(OSError,))
if not pyglet_gl or not pyglet_window:
disabled = True
from sympy import symbols, sin, cos
x, y, z = symbols('x, y, z')
def test_import():
from sympy.plotting.pygletplot import PygletPlot
def test_plot_2d():
from sympy.plotting.pygletplot import PygletPlot
p = PygletPlot(x, [x, -5, 5, 4], visible=False)
p.wait_for_calculations()
def test_plot_2d_discontinuous():
from sympy.plotting.pygletplot import PygletPlot
p = PygletPlot(1/x, [x, -1, 1, 2], visible=False)
p.wait_for_calculations()
def test_plot_3d():
from sympy.plotting.pygletplot import PygletPlot
p = PygletPlot(x*y, [x, -5, 5, 5], [y, -5, 5, 5], visible=False)
p.wait_for_calculations()
def test_plot_3d_discontinuous():
from sympy.plotting.pygletplot import PygletPlot
p = PygletPlot(1/x, [x, -3, 3, 6], [y, -1, 1, 1], visible=False)
p.wait_for_calculations()
def test_plot_2d_polar():
from sympy.plotting.pygletplot import PygletPlot
p = PygletPlot(1/x, [x, -1, 1, 4], 'mode=polar', visible=False)
p.wait_for_calculations()
def test_plot_3d_cylinder():
from sympy.plotting.pygletplot import PygletPlot
p = PygletPlot(
1/y, [x, 0, 6.282, 4], [y, -1, 1, 4], 'mode=polar;style=solid',
visible=False)
p.wait_for_calculations()
def test_plot_3d_spherical():
from sympy.plotting.pygletplot import PygletPlot
p = PygletPlot(
1, [x, 0, 6.282, 4], [y, 0, 3.141,
4], 'mode=spherical;style=wireframe',
visible=False)
p.wait_for_calculations()
def test_plot_2d_parametric():
from sympy.plotting.pygletplot import PygletPlot
p = PygletPlot(sin(x), cos(x), [x, 0, 6.282, 4], visible=False)
p.wait_for_calculations()
def test_plot_3d_parametric():
from sympy.plotting.pygletplot import PygletPlot
p = PygletPlot(sin(x), cos(x), x/5.0, [x, 0, 6.282, 4], visible=False)
p.wait_for_calculations()
def _test_plot_log():
from sympy.plotting.pygletplot import PygletPlot
p = PygletPlot(log(x), [x, 0, 6.282, 4], 'mode=polar', visible=False)
p.wait_for_calculations()
def test_plot_integral():
# Make sure it doesn't treat x as an independent variable
from sympy.plotting.pygletplot import PygletPlot
from sympy import Integral
p = PygletPlot(Integral(z*x, (x, 1, z), (z, 1, y)), visible=False)
p.wait_for_calculations()
| bsd-3-clause |
langholz/draw | draw/prob_layers.py | 5 | 8754 |
from __future__ import division, print_function
import logging
import numpy
import theano
from theano import tensor
from blocks.bricks.base import application, lazy
from blocks.roles import add_role, WEIGHT, BIAS
from blocks.bricks import Random, Initializable, Linear
from blocks.utils import shared_floatx_zeros
logger = logging.getLogger(__name__)
floatX = theano.config.floatX
N_STREAMS = 2048
#-----------------------------------------------------------------------------
def logsumexp(A, axis=None):
"""Numerically stable log( sum( exp(A) ) ) """
A_max = tensor.max(A, axis=axis, keepdims=True)
B = tensor.log(tensor.sum(tensor.exp(A-A_max), axis=axis, keepdims=True))+A_max
B = tensor.sum(B, axis=axis)
return B
def replicate_batch(A, repeat):
"""Extend the given 2d Tensor by repeating reach line *repeat* times.
With A.shape == (rows, cols), this function will return an array with
shape (rows*repeat, cols).
Parameters
----------
A : T.tensor
Each row of this 2d-Tensor will be replicated *repeat* times
repeat : int
Returns
-------
B : T.tensor
"""
A_ = A.dimshuffle((0, 'x', 1))
A_ = A_ + tensor.zeros((A.shape[0], repeat, A.shape[1]), dtype=floatX)
A_ = A_.reshape( [A_.shape[0]*repeat, A.shape[1]] )
return A_
#-----------------------------------------------------------------------------
class ProbabilisticTopLayer(Random):
def __init__(self, **kwargs):
super(ProbabilisticTopLayer, self).__init__(**kwargs)
def sample_expected(self):
raise NotImplemented
def sample(self):
raise NotImplemented
def log_prob(self, X):
raise NotImplemented
class ProbabilisticLayer(Random):
def __init__(self, **kwargs):
super(ProbabilisticLayer, self).__init__(**kwargs)
def sample_expected(self, Y):
raise NotImplemented
def sample(self, Y):
raise NotImplemented
def log_prob(self, X, Y):
raise NotImplemented
#-----------------------------------------------------------------------------
class BernoulliTopLayer(Initializable, ProbabilisticTopLayer):
@lazy
def __init__(self, dim_X, biases_init, **kwargs):
super(BernoulliTopLayer, self).__init__(**kwargs)
self.dim_X = dim_X
self.biases_init = biases_init
def _allocate(self):
b = shared_floatx_zeros((self.dim_X,), name='b')
add_role(b, BIASES)
self.params.append(b)
self.add_auxiliary_variable(b.norm(2), name='b_norm')
def _initialize(self):
b, = self.params
self.biases_init.initialize(b, self.rng)
@application(inputs=[], outputs=['X_expected'])
def sample_expected(self):
b = self.params[0]
return tensor.nnet.sigmoid(b)
@application(outputs=['X', 'log_prob'])
def sample(self, n_samples):
prob_X = self.sample_expected()
U = self.theano_rng.uniform(size=(n_samples, prob_X.shape[0]), nstreams=N_STREAMS)
X = tensor.cast(U <= prob_X, floatX)
return X, self.log_prob(X)
@application(inputs='X', outputs='log_prob')
def log_prob(self, X):
prob_X = self.sample_expected()
log_prob = X*tensor.log(prob_X) + (1.-X)*tensor.log(1.-prob_X)
return log_prob.sum(axis=1)
class BernoulliLayer(Initializable, ProbabilisticLayer):
@lazy
def __init__(self, dim_X, dim_Y, **kwargs):
super(BernoulliLayer, self).__init__(**kwargs)
self.dim_X = dim_X
self.dim_Y = dim_Y
self.linear_transform = Linear(
name=self.name + '_linear', input_dim=dim_Y,
output_dim=dim_X, weights_init=self.weights_init,
biases_init=self.biases_init, use_bias=self.use_bias)
self.children = [self.linear_transform]
@application(inputs=['Y'], outputs=['X_expected'])
def sample_expected(self, Y):
return tensor.nnet.sigmoid(self.linear_transform.apply(Y))
@application(inputs=['Y'], outputs=['X', 'log_prob'])
def sample(self, Y):
prob_X = self.sample_expected(Y)
U = self.theano_rng.uniform(size=prob_X.shape, nstreams=N_STREAMS)
X = tensor.cast(U <= prob_X, floatX)
return X, self.log_prob(X, Y)
@application(inputs=['X', 'Y'], outputs=['log_prob'])
def log_prob(self, X, Y):
prob_X = self.sample_expected(Y)
log_prob = X*tensor.log(prob_X) + (1.-X)*tensor.log(1-prob_X)
return log_prob.sum(axis=1)
#-----------------------------------------------------------------------------
class GaussianTopLayer(Initializable, ProbabilisticTopLayer):
@lazy
def __init__(self, dim_X, biases_init, **kwargs):
super(GaussianTopLayer, self).__init__(**kwargs)
self.dim_X = dim_X
self.biases_init = biases_init
def _allocate(self):
b = shared_floatx_zeros((self.dim_X,), name='b')
add_role(b, BIASES)
self.params = [b]
def _initialize(self):
b, = self.params
self.biases_init.initialize(b, self.rng)
@application(inputs=[], outputs=['mean', 'log_sigma'])
def sample_expected(self, n_samples):
b, = self.params
mean = tensor.zeros((n_samples, self.dim_X))
log_sigma = tensor.zeros((n_samples, self.dim_X)) + b
return mean, log_sigma
@application(outputs=['X', 'log_prob'])
def sample(self, n_samples):
mean, log_sigma = self.sample_expected(n_samples)
# Sample from mean-zeros std.-one Gaussian
U = self.theano_rng.normal(
size=(n_samples, self.dim_X),
avg=0., std=1.)
# ... and scale/translate samples
X = mean + tensor.exp(log_sigma) * U
return X, self.log_prob(X)
@application(inputs='X', outputs='log_prob')
def log_prob(self, X):
mean, log_sigma = self.sample_expected(X.shape[0])
# Calculate multivariate diagonal Gaussian
log_prob = -0.5*tensor.log(2*numpy.pi) - log_sigma -0.5*(X-mean)**2 / tensor.exp(2*log_sigma)
return log_prob.sum(axis=1)
class GaussianLayer(Initializable, ProbabilisticLayer):
@lazy
def __init__(self, dim_X, dim_Y, **kwargs):
super(GaussianLayer, self).__init__(**kwargs)
self.dim_X = dim_X
self.dim_Y = dim_Y
self.dim_H = (dim_X+dim_Y) // 2
self.linear_transform = Linear(
name=self.name + '_linear', input_dim=dim_Y,
output_dim=self.dim_H, weights_init=self.weights_init,
biases_init=self.biases_init, use_bias=self.use_bias)
self.children = [self.linear_transform]
def _allocate(self):
super(GaussianLayer, self)._allocate()
dim_X, dim_Y, dim_H = self.dim_X, self.dim_Y, self.dim_H
W_mean = shared_floatx_zeros((dim_H, dim_X), name='W_mean')
W_ls = shared_floatx_zeros((dim_H, dim_X), name='W_ls')
add_role(W_mean, WEIGHTS)
add_role(W_ls, WEIGHTS)
b_mean = shared_floatx_zeros((dim_X,), name='b_mean')
b_ls = shared_floatx_zeros((dim_X,), name='b_ls')
add_role(b_mean, BIASES)
add_role(b_ls, BIASES)
self.params = [W_mean, W_ls, b_mean, b_ls]
def _initialize(self):
super(GaussianLayer, self)._initialize()
W_mean, W_ls, b_mean, b_ls = self.params
self.weights_init.initialize(W_mean, self.rng)
self.weights_init.initialize(W_ls, self.rng)
self.biases_init.initialize(b_mean, self.rng)
self.biases_init.initialize(b_ls, self.rng)
@application(inputs=['Y'], outputs=['mean', 'log_sigma'])
def sample_expected(self, Y):
W_mean, W_ls, b_mean, b_ls = self.params
a = tensor.tanh(self.linear_transform.apply(Y))
mean = tensor.dot(a, W_mean) + b_mean
#log_sigma = tensor.dot(a, W_ls) + b_ls
log_sigma = tensor.log(0.1)
return mean, log_sigma
@application(inputs=['Y'], outputs=['X', 'log_prob'])
def sample(self, Y):
mean, log_sigma = self.sample_expected(Y)
# Sample from mean-zeros std.-one Gaussian
U = self.theano_rng.normal(
size=mean.shape,
avg=0., std=1.)
# ... and scale/translate samples
X = mean + tensor.exp(log_sigma) * U
return X, self.log_prob(X, Y)
@application(inputs=['X', 'Y'], outputs=['log_prob'])
def log_prob(self, X, Y):
mean, log_sigma = self.sample_expected(Y)
# Calculate multivariate diagonal Gaussian
log_prob = -0.5*tensor.log(2*numpy.pi) - log_sigma -0.5*(X-mean)**2 / tensor.exp(2*log_sigma)
return log_prob.sum(axis=1)
| mit |
smlng/RIOT | dist/tools/compile_test/compile_test.py | 21 | 8589 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (C) 2014 René Kijewski <rene.kijewski@fu-berlin.de>
# Copyright (C) 2015 Philipp Rosenkranz <philipp.rosenkranz@fu-berlin.de>
# Copyright (C) 2016 Eistec AB
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
from __future__ import print_function
from collections import defaultdict
from itertools import groupby
from os import devnull, environ
from os.path import abspath, dirname, isfile, join
from subprocess import CalledProcessError, check_call, check_output, PIPE, Popen
from sys import argv, exit, stdout
try:
# Python 2.x
from StringIO import StringIO
except ImportError:
# Python 3.x
from io import StringIO
from itertools import tee
class Termcolor:
red = '\033[1;31m'
green = '\033[1;32m'
yellow = '\033[1;33m'
blue = '\033[1;34m'
purple = '\033[1;35m'
end = '\033[0m'
def is_tracked(application_folder):
if not isfile(join(application_folder, 'Makefile')):
return False
try:
check_call(('git', 'ls-files', '--error-unmatch', 'Makefile'),
stdin=null, stdout=null, stderr=null, cwd=application_folder)
except CalledProcessError:
return False
else:
return True
def get_results_and_output_from(fd):
read_more_output = True
results_prefix = 'Building for '
output_prefix = 'Building application '
prev_results = False
result = ['']
output = StringIO()
while 1:
line = fd.readline().decode('utf-8', errors='replace')
if not line:
if prev_results:
yield (' .. '.join(result[:-1]), result[-1], output)
break
elif line.startswith(results_prefix):
read_more_output = False
if prev_results:
yield (' .. '.join(result[:-1]), result[-1], output)
prev_results = True
output = StringIO()
result = line[len(results_prefix):].rstrip().split(' .. ')[::-1]
if (len(result) > 1) and ('success' in result[0] or 'failed' in result[0]):
stdout.write('.')
stdout.flush()
elif line.startswith(output_prefix):
output.write(line)
read_more_output = True
elif read_more_output:
output.write(line)
def get_app_dirs():
return check_output(["make", "-f", "makefiles/app_dirs.inc.mk", "info-applications"]) \
.decode("utf-8", errors="ignore")\
.split()
def split_apps_by_dir(app_dirs):
""" creates a dictionary as follows:
{ "examples": ["hello_world", "gnrc_networking" ],
"tests": ["minimal", "fmt_print" ]
}
"""
res = defaultdict(list)
for app_dir in app_dirs:
folder, app = app_dir.split("/", 1)
res[folder].append(app)
return res
def build_all():
riotbase = environ.get('RIOTBASE') or abspath(join(dirname(abspath(__file__)), '../' * 3))
app_folders = split_apps_by_dir(get_app_dirs())
for folder in sorted(app_folders):
print('Building all applications in: {}'.format(colorize_str(folder, Termcolor.blue)))
applications = app_folders[folder]
applications = filter(lambda app: is_tracked(join(riotbase, folder, app)), applications)
applications = sorted(applications)
subprocess_env = environ.copy()
subprocess_env['RIOT_DO_RETRY'] = '1'
subprocess_env['BUILDTEST_VERBOSE'] = '1'
for nth, application in enumerate(applications, 1):
stdout.write('\tBuilding application: {} ({}/{}) '.format(
colorize_str(application, Termcolor.blue),
nth, len(applications)))
stdout.flush()
try:
app_dir = join(riotbase, folder, application)
subprocess = Popen(('make', 'buildtest'),
bufsize=1, stdin=null, stdout=PIPE, stderr=null,
cwd=app_dir,
env=subprocess_env)
results, results_with_output = tee(get_results_and_output_from(subprocess.stdout))
results = groupby(sorted(results), lambda res: res[0])
results_with_output = list(filter(lambda res: res[2].getvalue(), results_with_output))
failed_with_output = list(filter(lambda res: 'failed' in res[0], results_with_output))
success_with_output = list(filter(lambda res: 'success' in res[0], results_with_output))
# check if bin-directory isn't in system's PATH to not accidentally
# delete some valuable system executable ;-)
if join(app_dir, "bin") not in environ.get("PATH", "/bin:/usr/bin:/usr/local/bin:"):
check_call(["rm", "-rf", join(app_dir, "bin")])
print()
for group, result in results:
print('\t\t{}: {}'.format(group, ', '.join(sorted(board for outcome, board, output in result))))
returncode = subprocess.wait()
if success_with_output:
warnings.append((application, success_with_output))
if returncode == 0:
success.append(application)
else:
if not failed_with_output:
print(colorize_str('\t\tmake buildtest error!', Termcolor.red))
failed.append(application)
errors.append((application, failed_with_output))
except Exception as e:
print('\n\t\tException: {}'.format(e))
exceptions.append(application)
finally:
try:
subprocess.kill()
except Exception:
pass
def colorize_str(string, color):
return '%s%s%s' % (color, string, Termcolor.end)
def print_output_for(buf, name, color):
if buf:
print('%s:' % name)
for application, details in buf:
for _, board, output in details:
print()
print(colorize_str('%s:%s:' % (application, board), color))
print('%s' % output.getvalue())
def print_outcome(outputListDescription):
print()
print('Outcome:')
for color, group, name in outputListDescription:
applications = group
if applications:
print('\t{}{}{}: {}'.format(color, name, Termcolor.end, ', '.join(applications)))
def print_num_of_errors_and_warnings():
stdout.write('Errors: ')
if errors:
num_of_errors = sum(map(lambda x: len(x[1]), errors))
stdout.write('%s' % colorize_str(str(num_of_errors), Termcolor.red))
else:
stdout.write('0')
stdout.write(' Warnings: ')
if warnings:
num_of_warnings = sum(map(lambda x: len(x[1]), warnings))
stdout.write('%s' % colorize_str(str(num_of_warnings), Termcolor.yellow))
else:
stdout.write('0')
stdout.write('\n')
if __name__ == '__main__':
success = []
failed = []
exceptions = []
warnings = []
errors = []
null = open(devnull, 'wb', 0)
if len(argv) > 1:
base_branch = argv[1]
diff_files = check_output(('git', 'diff', '--name-only', base_branch, 'HEAD'))
diff_files = set(diff_files.split())
else:
base_branch = ''
build_all()
print_output_for(warnings, 'Warnings', Termcolor.yellow)
print_output_for(errors, 'Errors', Termcolor.red)
outputListDescription = [(Termcolor.green, success, 'success'),
(Termcolor.red, failed, 'failed'),
(Termcolor.blue, exceptions, 'exceptions')]
print_outcome(outputListDescription)
print_num_of_errors_and_warnings()
if exceptions:
exit(2)
elif failed:
exit(1)
else:
exit(0)
| lgpl-2.1 |
SebastianMerz/calalert | Server/venv/lib/python2.7/site-packages/openid/server/trustroot.py | 143 | 14388 | # -*- test-case-name: openid.test.test_rpverify -*-
"""
This module contains the C{L{TrustRoot}} class, which helps handle
trust root checking. This module is used by the
C{L{openid.server.server}} module, but it is also available to server
implementers who wish to use it for additional trust root checking.
It also implements relying party return_to URL verification, based on
the realm.
"""
__all__ = [
'TrustRoot',
'RP_RETURN_TO_URL_TYPE',
'extractReturnToURLs',
'returnToMatches',
'verifyReturnTo',
]
from openid import oidutil
from openid import urinorm
from openid.yadis import services
from urlparse import urlparse, urlunparse
import re
############################################
_protocols = ['http', 'https']
_top_level_domains = [
'ac', 'ad', 'ae', 'aero', 'af', 'ag', 'ai', 'al', 'am', 'an',
'ao', 'aq', 'ar', 'arpa', 'as', 'asia', 'at', 'au', 'aw',
'ax', 'az', 'ba', 'bb', 'bd', 'be', 'bf', 'bg', 'bh', 'bi',
'biz', 'bj', 'bm', 'bn', 'bo', 'br', 'bs', 'bt', 'bv', 'bw',
'by', 'bz', 'ca', 'cat', 'cc', 'cd', 'cf', 'cg', 'ch', 'ci',
'ck', 'cl', 'cm', 'cn', 'co', 'com', 'coop', 'cr', 'cu', 'cv',
'cx', 'cy', 'cz', 'de', 'dj', 'dk', 'dm', 'do', 'dz', 'ec',
'edu', 'ee', 'eg', 'er', 'es', 'et', 'eu', 'fi', 'fj', 'fk',
'fm', 'fo', 'fr', 'ga', 'gb', 'gd', 'ge', 'gf', 'gg', 'gh',
'gi', 'gl', 'gm', 'gn', 'gov', 'gp', 'gq', 'gr', 'gs', 'gt',
'gu', 'gw', 'gy', 'hk', 'hm', 'hn', 'hr', 'ht', 'hu', 'id',
'ie', 'il', 'im', 'in', 'info', 'int', 'io', 'iq', 'ir', 'is',
'it', 'je', 'jm', 'jo', 'jobs', 'jp', 'ke', 'kg', 'kh', 'ki',
'km', 'kn', 'kp', 'kr', 'kw', 'ky', 'kz', 'la', 'lb', 'lc',
'li', 'lk', 'lr', 'ls', 'lt', 'lu', 'lv', 'ly', 'ma', 'mc',
'md', 'me', 'mg', 'mh', 'mil', 'mk', 'ml', 'mm', 'mn', 'mo',
'mobi', 'mp', 'mq', 'mr', 'ms', 'mt', 'mu', 'museum', 'mv',
'mw', 'mx', 'my', 'mz', 'na', 'name', 'nc', 'ne', 'net', 'nf',
'ng', 'ni', 'nl', 'no', 'np', 'nr', 'nu', 'nz', 'om', 'org',
'pa', 'pe', 'pf', 'pg', 'ph', 'pk', 'pl', 'pm', 'pn', 'pr',
'pro', 'ps', 'pt', 'pw', 'py', 'qa', 're', 'ro', 'rs', 'ru',
'rw', 'sa', 'sb', 'sc', 'sd', 'se', 'sg', 'sh', 'si', 'sj',
'sk', 'sl', 'sm', 'sn', 'so', 'sr', 'st', 'su', 'sv', 'sy',
'sz', 'tc', 'td', 'tel', 'tf', 'tg', 'th', 'tj', 'tk', 'tl',
'tm', 'tn', 'to', 'tp', 'tr', 'travel', 'tt', 'tv', 'tw',
'tz', 'ua', 'ug', 'uk', 'us', 'uy', 'uz', 'va', 'vc', 've',
'vg', 'vi', 'vn', 'vu', 'wf', 'ws', 'xn--0zwm56d',
'xn--11b5bs3a9aj6g', 'xn--80akhbyknj4f', 'xn--9t4b11yi5a',
'xn--deba0ad', 'xn--g6w251d', 'xn--hgbk6aj7f53bba',
'xn--hlcj6aya9esc7a', 'xn--jxalpdlp', 'xn--kgbechtv',
'xn--zckzah', 'ye', 'yt', 'yu', 'za', 'zm', 'zw']
# Build from RFC3986, section 3.2.2. Used to reject hosts with invalid
# characters.
host_segment_re = re.compile(
r"(?:[-a-zA-Z0-9!$&'\(\)\*+,;=._~]|%[a-zA-Z0-9]{2})+$")
class RealmVerificationRedirected(Exception):
"""Attempting to verify this realm resulted in a redirect.
@since: 2.1.0
"""
def __init__(self, relying_party_url, rp_url_after_redirects):
self.relying_party_url = relying_party_url
self.rp_url_after_redirects = rp_url_after_redirects
def __str__(self):
return ("Attempting to verify %r resulted in "
"redirect to %r" %
(self.relying_party_url,
self.rp_url_after_redirects))
def _parseURL(url):
try:
url = urinorm.urinorm(url)
except ValueError:
return None
proto, netloc, path, params, query, frag = urlparse(url)
if not path:
# Python <2.4 does not parse URLs with no path properly
if not query and '?' in netloc:
netloc, query = netloc.split('?', 1)
path = '/'
path = urlunparse(('', '', path, params, query, frag))
if ':' in netloc:
try:
host, port = netloc.split(':')
except ValueError:
return None
if not re.match(r'\d+$', port):
return None
else:
host = netloc
port = ''
host = host.lower()
if not host_segment_re.match(host):
return None
return proto, host, port, path
class TrustRoot(object):
"""
This class represents an OpenID trust root. The C{L{parse}}
classmethod accepts a trust root string, producing a
C{L{TrustRoot}} object. The method OpenID server implementers
would be most likely to use is the C{L{isSane}} method, which
checks the trust root for given patterns that indicate that the
trust root is too broad or points to a local network resource.
@sort: parse, isSane
"""
def __init__(self, unparsed, proto, wildcard, host, port, path):
self.unparsed = unparsed
self.proto = proto
self.wildcard = wildcard
self.host = host
self.port = port
self.path = path
def isSane(self):
"""
This method checks the to see if a trust root represents a
reasonable (sane) set of URLs. 'http://*.com/', for example
is not a reasonable pattern, as it cannot meaningfully specify
the site claiming it. This function attempts to find many
related examples, but it can only work via heuristics.
Negative responses from this method should be treated as
advisory, used only to alert the user to examine the trust
root carefully.
@return: Whether the trust root is sane
@rtype: C{bool}
"""
if self.host == 'localhost':
return True
host_parts = self.host.split('.')
if self.wildcard:
assert host_parts[0] == '', host_parts
del host_parts[0]
# If it's an absolute domain name, remove the empty string
# from the end.
if host_parts and not host_parts[-1]:
del host_parts[-1]
if not host_parts:
return False
# Do not allow adjacent dots
if '' in host_parts:
return False
tld = host_parts[-1]
if tld not in _top_level_domains:
return False
if len(host_parts) == 1:
return False
if self.wildcard:
if len(tld) == 2 and len(host_parts[-2]) <= 3:
# It's a 2-letter tld with a short second to last segment
# so there needs to be more than two segments specified
# (e.g. *.co.uk is insane)
return len(host_parts) > 2
# Passed all tests for insanity.
return True
def validateURL(self, url):
"""
Validates a URL against this trust root.
@param url: The URL to check
@type url: C{str}
@return: Whether the given URL is within this trust root.
@rtype: C{bool}
"""
url_parts = _parseURL(url)
if url_parts is None:
return False
proto, host, port, path = url_parts
if proto != self.proto:
return False
if port != self.port:
return False
if '*' in host:
return False
if not self.wildcard:
if host != self.host:
return False
elif ((not host.endswith(self.host)) and
('.' + host) != self.host):
return False
if path != self.path:
path_len = len(self.path)
trust_prefix = self.path[:path_len]
url_prefix = path[:path_len]
# must be equal up to the length of the path, at least
if trust_prefix != url_prefix:
return False
# These characters must be on the boundary between the end
# of the trust root's path and the start of the URL's
# path.
if '?' in self.path:
allowed = '&'
else:
allowed = '?/'
return (self.path[-1] in allowed or
path[path_len] in allowed)
return True
def parse(cls, trust_root):
"""
This method creates a C{L{TrustRoot}} instance from the given
input, if possible.
@param trust_root: This is the trust root to parse into a
C{L{TrustRoot}} object.
@type trust_root: C{str}
@return: A C{L{TrustRoot}} instance if trust_root parses as a
trust root, C{None} otherwise.
@rtype: C{NoneType} or C{L{TrustRoot}}
"""
url_parts = _parseURL(trust_root)
if url_parts is None:
return None
proto, host, port, path = url_parts
# check for valid prototype
if proto not in _protocols:
return None
# check for URI fragment
if path.find('#') != -1:
return None
# extract wildcard if it is there
if host.find('*', 1) != -1:
# wildcard must be at start of domain: *.foo.com, not foo.*.com
return None
if host.startswith('*'):
# Starts with star, so must have a dot after it (if a
# domain is specified)
if len(host) > 1 and host[1] != '.':
return None
host = host[1:]
wilcard = True
else:
wilcard = False
# we have a valid trust root
tr = cls(trust_root, proto, wilcard, host, port, path)
return tr
parse = classmethod(parse)
def checkSanity(cls, trust_root_string):
"""str -> bool
is this a sane trust root?
"""
trust_root = cls.parse(trust_root_string)
if trust_root is None:
return False
else:
return trust_root.isSane()
checkSanity = classmethod(checkSanity)
def checkURL(cls, trust_root, url):
"""quick func for validating a url against a trust root. See the
TrustRoot class if you need more control."""
tr = cls.parse(trust_root)
return tr is not None and tr.validateURL(url)
checkURL = classmethod(checkURL)
def buildDiscoveryURL(self):
"""Return a discovery URL for this realm.
This function does not check to make sure that the realm is
valid. Its behaviour on invalid inputs is undefined.
@rtype: str
@returns: The URL upon which relying party discovery should be run
in order to verify the return_to URL
@since: 2.1.0
"""
if self.wildcard:
# Use "www." in place of the star
assert self.host.startswith('.'), self.host
www_domain = 'www' + self.host
return '%s://%s%s' % (self.proto, www_domain, self.path)
else:
return self.unparsed
def __repr__(self):
return "TrustRoot(%r, %r, %r, %r, %r, %r)" % (
self.unparsed, self.proto, self.wildcard, self.host, self.port,
self.path)
def __str__(self):
return repr(self)
# The URI for relying party discovery, used in realm verification.
#
# XXX: This should probably live somewhere else (like in
# openid.consumer or openid.yadis somewhere)
RP_RETURN_TO_URL_TYPE = 'http://specs.openid.net/auth/2.0/return_to'
def _extractReturnURL(endpoint):
"""If the endpoint is a relying party OpenID return_to endpoint,
return the endpoint URL. Otherwise, return None.
This function is intended to be used as a filter for the Yadis
filtering interface.
@see: C{L{openid.yadis.services}}
@see: C{L{openid.yadis.filters}}
@param endpoint: An XRDS BasicServiceEndpoint, as returned by
performing Yadis dicovery.
@returns: The endpoint URL or None if the endpoint is not a
relying party endpoint.
@rtype: str or NoneType
"""
if endpoint.matchTypes([RP_RETURN_TO_URL_TYPE]):
return endpoint.uri
else:
return None
def returnToMatches(allowed_return_to_urls, return_to):
"""Is the return_to URL under one of the supplied allowed
return_to URLs?
@since: 2.1.0
"""
for allowed_return_to in allowed_return_to_urls:
# A return_to pattern works the same as a realm, except that
# it's not allowed to use a wildcard. We'll model this by
# parsing it as a realm, and not trying to match it if it has
# a wildcard.
return_realm = TrustRoot.parse(allowed_return_to)
if (# Parses as a trust root
return_realm is not None and
# Does not have a wildcard
not return_realm.wildcard and
# Matches the return_to that we passed in with it
return_realm.validateURL(return_to)
):
return True
# No URL in the list matched
return False
def getAllowedReturnURLs(relying_party_url):
"""Given a relying party discovery URL return a list of return_to URLs.
@since: 2.1.0
"""
(rp_url_after_redirects, return_to_urls) = services.getServiceEndpoints(
relying_party_url, _extractReturnURL)
if rp_url_after_redirects != relying_party_url:
# Verification caused a redirect
raise RealmVerificationRedirected(
relying_party_url, rp_url_after_redirects)
return return_to_urls
# _vrfy parameter is there to make testing easier
def verifyReturnTo(realm_str, return_to, _vrfy=getAllowedReturnURLs):
"""Verify that a return_to URL is valid for the given realm.
This function builds a discovery URL, performs Yadis discovery on
it, makes sure that the URL does not redirect, parses out the
return_to URLs, and finally checks to see if the current return_to
URL matches the return_to.
@raises DiscoveryFailure: When Yadis discovery fails
@returns: True if the return_to URL is valid for the realm
@since: 2.1.0
"""
realm = TrustRoot.parse(realm_str)
if realm is None:
# The realm does not parse as a URL pattern
return False
try:
allowable_urls = _vrfy(realm.buildDiscoveryURL())
except RealmVerificationRedirected, err:
oidutil.log(str(err))
return False
if returnToMatches(allowable_urls, return_to):
return True
else:
oidutil.log("Failed to validate return_to %r for realm %r, was not "
"in %s" % (return_to, realm_str, allowable_urls))
return False
| gpl-2.0 |
sourcepole/qgis-wps-client | apicompat/sipv1/compat.py | 1 | 2442 | # -*- coding: utf-8 -*-
"""
/***************************************************************************
ApiCompat
A QGIS plugin
API compatibility layer
-------------------
begin : 2013-07-02
copyright : (C) 2013 by Pirmin Kalberer, Sourcepole
email : pka@sourcepole.ch
***************************************************************************/
/***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************/
"""
# Import the PyQt and QGIS libraries
from future import standard_library
standard_library.install_aliases()
from builtins import str
from qgis.PyQt.QtCore import *
from qgis.core import *
import builtins
def pystring(qvar):
return str(qvar.toString()) if hasattr(qvar, 'toString') else str(qvar)
builtins.pystring = pystring
def pylist(qvar):
return list(qvar.toList()) if hasattr(qvar, 'toList') else qvar
builtins.pylist = pylist
def pyint(qvar):
if hasattr(qvar, 'toInt'):
val, ok = qvar.toInt()
if not ok:
raise ValueError('QVariant conversion error')
return int(val)
else:
return qvar
builtins.pyint = pyint
def pyfloat(qvar):
if hasattr(qvar, 'toFloat'):
val, ok = qvar.toFloat()
if not ok:
raise ValueError('QVariant conversion error')
return float(val)
else:
return qvar
builtins.pyfloat = pyfloat
def pystringlist(qvar):
return list([str(s) for s in qvar.toStringList()]) if hasattr(qvar, 'toStringList') else qvar
builtins.pystringlist = pystringlist
def pybytearray(qvar):
return bytearray(qvar.toByteArray()) if hasattr(qvar, 'toByteArray') else qvar
builtins.pybytearray = pybytearray
def pyobject(qvar):
return qvar.toPyObject() if hasattr(qvar, 'toPyObject') else qvar
builtins.pyobject = pyobject
| gpl-2.0 |
xunmengfeng/engine | build/android/pylib/utils/reraiser_thread_unittest.py | 99 | 2368 | # Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Unittests for reraiser_thread.py."""
import threading
import unittest
from pylib.utils import reraiser_thread
from pylib.utils import watchdog_timer
class TestException(Exception):
pass
class TestReraiserThread(unittest.TestCase):
"""Tests for reraiser_thread.ReraiserThread."""
def testNominal(self):
result = [None, None]
def f(a, b=None):
result[0] = a
result[1] = b
thread = reraiser_thread.ReraiserThread(f, [1], {'b': 2})
thread.start()
thread.join()
self.assertEqual(result[0], 1)
self.assertEqual(result[1], 2)
def testRaise(self):
def f():
raise TestException
thread = reraiser_thread.ReraiserThread(f)
thread.start()
thread.join()
with self.assertRaises(TestException):
thread.ReraiseIfException()
class TestReraiserThreadGroup(unittest.TestCase):
"""Tests for reraiser_thread.ReraiserThreadGroup."""
def testInit(self):
ran = [False] * 5
def f(i):
ran[i] = True
group = reraiser_thread.ReraiserThreadGroup(
[reraiser_thread.ReraiserThread(f, args=[i]) for i in range(5)])
group.StartAll()
group.JoinAll()
for v in ran:
self.assertTrue(v)
def testAdd(self):
ran = [False] * 5
def f(i):
ran[i] = True
group = reraiser_thread.ReraiserThreadGroup()
for i in xrange(5):
group.Add(reraiser_thread.ReraiserThread(f, args=[i]))
group.StartAll()
group.JoinAll()
for v in ran:
self.assertTrue(v)
def testJoinRaise(self):
def f():
raise TestException
group = reraiser_thread.ReraiserThreadGroup(
[reraiser_thread.ReraiserThread(f) for _ in xrange(5)])
group.StartAll()
with self.assertRaises(TestException):
group.JoinAll()
def testJoinTimeout(self):
def f():
pass
event = threading.Event()
def g():
event.wait()
group = reraiser_thread.ReraiserThreadGroup(
[reraiser_thread.ReraiserThread(g),
reraiser_thread.ReraiserThread(f)])
group.StartAll()
with self.assertRaises(reraiser_thread.TimeoutError):
group.JoinAll(watchdog_timer.WatchdogTimer(0.01))
event.set()
if __name__ == '__main__':
unittest.main()
| bsd-3-clause |
goldeneye-source/ges-python | lib/contextlib.py | 83 | 11648 | """Utilities for with-statement contexts. See PEP 343."""
import sys
from collections import deque
from functools import wraps
__all__ = ["contextmanager", "closing", "ContextDecorator", "ExitStack",
"redirect_stdout", "suppress"]
class ContextDecorator(object):
"A base class or mixin that enables context managers to work as decorators."
def _recreate_cm(self):
"""Return a recreated instance of self.
Allows an otherwise one-shot context manager like
_GeneratorContextManager to support use as
a decorator via implicit recreation.
This is a private interface just for _GeneratorContextManager.
See issue #11647 for details.
"""
return self
def __call__(self, func):
@wraps(func)
def inner(*args, **kwds):
with self._recreate_cm():
return func(*args, **kwds)
return inner
class _GeneratorContextManager(ContextDecorator):
"""Helper for @contextmanager decorator."""
def __init__(self, func, *args, **kwds):
self.gen = func(*args, **kwds)
self.func, self.args, self.kwds = func, args, kwds
# Issue 19330: ensure context manager instances have good docstrings
doc = getattr(func, "__doc__", None)
if doc is None:
doc = type(self).__doc__
self.__doc__ = doc
# Unfortunately, this still doesn't provide good help output when
# inspecting the created context manager instances, since pydoc
# currently bypasses the instance docstring and shows the docstring
# for the class instead.
# See http://bugs.python.org/issue19404 for more details.
def _recreate_cm(self):
# _GCM instances are one-shot context managers, so the
# CM must be recreated each time a decorated function is
# called
return self.__class__(self.func, *self.args, **self.kwds)
def __enter__(self):
try:
return next(self.gen)
except StopIteration:
raise RuntimeError("generator didn't yield") from None
def __exit__(self, type, value, traceback):
if type is None:
try:
next(self.gen)
except StopIteration:
return
else:
raise RuntimeError("generator didn't stop")
else:
if value is None:
# Need to force instantiation so we can reliably
# tell if we get the same exception back
value = type()
try:
self.gen.throw(type, value, traceback)
raise RuntimeError("generator didn't stop after throw()")
except StopIteration as exc:
# Suppress the exception *unless* it's the same exception that
# was passed to throw(). This prevents a StopIteration
# raised inside the "with" statement from being suppressed
return exc is not value
except:
# only re-raise if it's *not* the exception that was
# passed to throw(), because __exit__() must not raise
# an exception unless __exit__() itself failed. But throw()
# has to raise the exception to signal propagation, so this
# fixes the impedance mismatch between the throw() protocol
# and the __exit__() protocol.
#
if sys.exc_info()[1] is not value:
raise
def contextmanager(func):
"""@contextmanager decorator.
Typical usage:
@contextmanager
def some_generator(<arguments>):
<setup>
try:
yield <value>
finally:
<cleanup>
This makes this:
with some_generator(<arguments>) as <variable>:
<body>
equivalent to this:
<setup>
try:
<variable> = <value>
<body>
finally:
<cleanup>
"""
@wraps(func)
def helper(*args, **kwds):
return _GeneratorContextManager(func, *args, **kwds)
return helper
class closing(object):
"""Context to automatically close something at the end of a block.
Code like this:
with closing(<module>.open(<arguments>)) as f:
<block>
is equivalent to this:
f = <module>.open(<arguments>)
try:
<block>
finally:
f.close()
"""
def __init__(self, thing):
self.thing = thing
def __enter__(self):
return self.thing
def __exit__(self, *exc_info):
self.thing.close()
class redirect_stdout:
"""Context manager for temporarily redirecting stdout to another file
# How to send help() to stderr
with redirect_stdout(sys.stderr):
help(dir)
# How to write help() to a file
with open('help.txt', 'w') as f:
with redirect_stdout(f):
help(pow)
"""
def __init__(self, new_target):
self._new_target = new_target
# We use a list of old targets to make this CM re-entrant
self._old_targets = []
def __enter__(self):
self._old_targets.append(sys.stdout)
sys.stdout = self._new_target
return self._new_target
def __exit__(self, exctype, excinst, exctb):
sys.stdout = self._old_targets.pop()
class suppress:
"""Context manager to suppress specified exceptions
After the exception is suppressed, execution proceeds with the next
statement following the with statement.
with suppress(FileNotFoundError):
os.remove(somefile)
# Execution still resumes here if the file was already removed
"""
def __init__(self, *exceptions):
self._exceptions = exceptions
def __enter__(self):
pass
def __exit__(self, exctype, excinst, exctb):
# Unlike isinstance and issubclass, CPython exception handling
# currently only looks at the concrete type hierarchy (ignoring
# the instance and subclass checking hooks). While Guido considers
# that a bug rather than a feature, it's a fairly hard one to fix
# due to various internal implementation details. suppress provides
# the simpler issubclass based semantics, rather than trying to
# exactly reproduce the limitations of the CPython interpreter.
#
# See http://bugs.python.org/issue12029 for more details
return exctype is not None and issubclass(exctype, self._exceptions)
# Inspired by discussions on http://bugs.python.org/issue13585
class ExitStack(object):
"""Context manager for dynamic management of a stack of exit callbacks
For example:
with ExitStack() as stack:
files = [stack.enter_context(open(fname)) for fname in filenames]
# All opened files will automatically be closed at the end of
# the with statement, even if attempts to open files later
# in the list raise an exception
"""
def __init__(self):
self._exit_callbacks = deque()
def pop_all(self):
"""Preserve the context stack by transferring it to a new instance"""
new_stack = type(self)()
new_stack._exit_callbacks = self._exit_callbacks
self._exit_callbacks = deque()
return new_stack
def _push_cm_exit(self, cm, cm_exit):
"""Helper to correctly register callbacks to __exit__ methods"""
def _exit_wrapper(*exc_details):
return cm_exit(cm, *exc_details)
_exit_wrapper.__self__ = cm
self.push(_exit_wrapper)
def push(self, exit):
"""Registers a callback with the standard __exit__ method signature
Can suppress exceptions the same way __exit__ methods can.
Also accepts any object with an __exit__ method (registering a call
to the method instead of the object itself)
"""
# We use an unbound method rather than a bound method to follow
# the standard lookup behaviour for special methods
_cb_type = type(exit)
try:
exit_method = _cb_type.__exit__
except AttributeError:
# Not a context manager, so assume its a callable
self._exit_callbacks.append(exit)
else:
self._push_cm_exit(exit, exit_method)
return exit # Allow use as a decorator
def callback(self, callback, *args, **kwds):
"""Registers an arbitrary callback and arguments.
Cannot suppress exceptions.
"""
def _exit_wrapper(exc_type, exc, tb):
callback(*args, **kwds)
# We changed the signature, so using @wraps is not appropriate, but
# setting __wrapped__ may still help with introspection
_exit_wrapper.__wrapped__ = callback
self.push(_exit_wrapper)
return callback # Allow use as a decorator
def enter_context(self, cm):
"""Enters the supplied context manager
If successful, also pushes its __exit__ method as a callback and
returns the result of the __enter__ method.
"""
# We look up the special methods on the type to match the with statement
_cm_type = type(cm)
_exit = _cm_type.__exit__
result = _cm_type.__enter__(cm)
self._push_cm_exit(cm, _exit)
return result
def close(self):
"""Immediately unwind the context stack"""
self.__exit__(None, None, None)
def __enter__(self):
return self
def __exit__(self, *exc_details):
received_exc = exc_details[0] is not None
# We manipulate the exception state so it behaves as though
# we were actually nesting multiple with statements
frame_exc = sys.exc_info()[1]
def _fix_exception_context(new_exc, old_exc):
# Context may not be correct, so find the end of the chain
while 1:
exc_context = new_exc.__context__
if exc_context is old_exc:
# Context is already set correctly (see issue 20317)
return
if exc_context is None or exc_context is frame_exc:
break
new_exc = exc_context
# Change the end of the chain to point to the exception
# we expect it to reference
new_exc.__context__ = old_exc
# Callbacks are invoked in LIFO order to match the behaviour of
# nested context managers
suppressed_exc = False
pending_raise = False
while self._exit_callbacks:
cb = self._exit_callbacks.pop()
try:
if cb(*exc_details):
suppressed_exc = True
pending_raise = False
exc_details = (None, None, None)
except:
new_exc_details = sys.exc_info()
# simulate the stack of exceptions by setting the context
_fix_exception_context(new_exc_details[1], exc_details[1])
pending_raise = True
exc_details = new_exc_details
if pending_raise:
try:
# bare "raise exc_details[1]" replaces our carefully
# set-up context
fixed_ctx = exc_details[1].__context__
raise exc_details[1]
except BaseException:
exc_details[1].__context__ = fixed_ctx
raise
return received_exc and suppressed_exc
| gpl-3.0 |
pietroquaglio/elephant | elephant/current_source_density.py | 1 | 13443 | # -*- coding: utf-8 -*-
"""'Current Source Density analysis (CSD) is a class of methods of analysis of
extracellular electric potentials recorded at multiple sites leading to
estimates of current sources generating the measured potentials. It is usually
applied to low-frequency part of the potential (called the Local Field
Potential, LFP) and to simultaneous recordings or to recordings taken with
fixed time reference to the onset of specific stimulus (Evoked Potentials)'
(Definition by Prof.Daniel K. Wójcik for Encyclopedia of Computational
Neuroscience)
CSD is also called as Source Localization or Source Imaging in the EEG circles.
Here are CSD methods for different types of electrode configurations.
1D - laminar probe like electrodes.
2D - Microelectrode Array like
3D - UtahArray or multiple laminar probes.
The following methods have been implemented so far
1D - StandardCSD, DeltaiCSD, SplineiCSD, StepiCSD, KCSD1D
2D - KCSD2D, MoIKCSD (Saline layer on top of slice)
3D - KCSD3D
Each of these methods listed have some advantages. The KCSD methods for
instance can handle broken or irregular electrode configurations electrode
Keywords: LFP; CSD; Multielectrode; Laminar electrode; Barrel cortex
Citation Policy: See ./current_source_density_src/README.md
Contributors to this current source density estimation module are:
Chaitanya Chintaluri(CC), Espen Hagen(EH) and Michał Czerwinski(MC).
EH implemented the iCSD methods and StandardCSD
CC implemented the kCSD methods, kCSD1D(MC and CC)
CC and EH developed the interface to elephant.
"""
from __future__ import division
import neo
import quantities as pq
import numpy as np
from scipy import io
from scipy.integrate import simps
from elephant.current_source_density_src import KCSD
from elephant.current_source_density_src import icsd
import elephant.current_source_density_src.utility_functions as utils
utils.patch_quantities()
available_1d = ['StandardCSD', 'DeltaiCSD', 'StepiCSD', 'SplineiCSD', 'KCSD1D']
available_2d = ['KCSD2D', 'MoIKCSD']
available_3d = ['KCSD3D']
kernel_methods = ['KCSD1D', 'KCSD2D', 'KCSD3D', 'MoIKCSD']
icsd_methods = ['DeltaiCSD', 'StepiCSD', 'SplineiCSD']
py_iCSD_toolbox = ['StandardCSD'] + icsd_methods
def estimate_csd(lfp, coords=None, method=None,
process_estimate=True, **kwargs):
"""
Fuction call to compute the current source density (CSD) from extracellular
potential recordings(local-field potentials - LFP) using laminar electrodes
or multi-contact electrodes with 2D or 3D geometries.
Parameters
----------
lfp : neo.AnalogSignal
positions of electrodes can be added as neo.RecordingChannel
coordinate or sent externally as a func argument (See coords)
coords : [Optional] corresponding spatial coordinates of the electrodes
Defaults to None
Otherwise looks for RecordingChannels coordinate
method : string
Pick a method corresonding to the setup, in this implementation
For Laminar probe style (1D), use 'KCSD1D' or 'StandardCSD',
or 'DeltaiCSD' or 'StepiCSD' or 'SplineiCSD'
For MEA probe style (2D), use 'KCSD2D', or 'MoIKCSD'
For array of laminar probes (3D), use 'KCSD3D'
Defaults to None
process_estimate : bool
In the py_iCSD_toolbox this corresponds to the filter_csd -
the parameters are passed as kwargs here ie., f_type and f_order
In the kcsd methods this corresponds to cross_validate -
the parameters are passed as kwargs here ie., lambdas and Rs
Defaults to True
kwargs : parameters to each method
The parameters corresponding to the method chosen
See the documentation of the individual method
Default is {} - picks the best parameters,
Returns
-------
Estimated CSD
neo.AnalogSignal object
annotated with the spatial coordinates
Raises
------
AttributeError
No units specified for electrode spatial coordinates
ValueError
Invalid function arguments, wrong method name, or
mismatching coordinates
TypeError
Invalid cv_param argument passed
"""
if not isinstance(lfp, neo.AnalogSignal):
raise TypeError('Parameter `lfp` must be a neo.AnalogSignal object')
if coords is None:
coords = lfp.channel_index.coordinates
else:
scaled_coords = []
for coord in coords:
try:
scaled_coords.append(coord.rescale(pq.mm))
except AttributeError:
raise AttributeError('No units given for electrode spatial \
coordinates')
coords = scaled_coords
if method is None:
raise ValueError('Must specify a method of CSD implementation')
if len(coords) != lfp.shape[1]:
raise ValueError('Number of signals and coords is not same')
for ii in coords: # CHECK for Dimensionality of electrodes
if len(ii) > 3:
raise ValueError('Invalid number of coordinate positions')
dim = len(coords[0]) # TODO : Generic co-ordinates!
if dim == 1 and (method not in available_1d):
raise ValueError('Invalid method, Available options are:',
available_1d)
if dim == 2 and (method not in available_2d):
raise ValueError('Invalid method, Available options are:',
available_2d)
if dim == 3 and (method not in available_3d):
raise ValueError('Invalid method, Available options are:',
available_3d)
if method in kernel_methods:
input_array = np.zeros((len(lfp), lfp[0].magnitude.shape[0]))
for ii, jj in enumerate(lfp):
input_array[ii, :] = jj.rescale(pq.mV).magnitude
kernel_method = getattr(KCSD, method) # fetch the class 'KCSD1D'
lambdas = kwargs.pop('lambdas', None)
Rs = kwargs.pop('Rs', None)
k = kernel_method(np.array(coords), input_array.T, **kwargs)
if process_estimate:
k.cross_validate(lambdas, Rs)
estm_csd = k.values()
estm_csd = np.rollaxis(estm_csd, -1, 0)
output = neo.AnalogSignal(estm_csd * pq.uA / pq.mm**3,
t_start=lfp.t_start,
sampling_rate=lfp.sampling_rate)
if dim == 1:
output.annotate(x_coords=k.estm_x)
elif dim == 2:
output.annotate(x_coords=k.estm_x, y_coords=k.estm_y)
elif dim == 3:
output.annotate(x_coords=k.estm_x, y_coords=k.estm_y,
z_coords=k.estm_z)
elif method in py_iCSD_toolbox:
coords = np.array(coords) * coords[0].units
if method in icsd_methods:
try:
coords = coords.rescale(kwargs['diam'].units)
except KeyError: # Then why specify as a default in icsd?
# All iCSD methods explicitly assume a source
# diameter in contrast to the stdCSD that
# implicitly assume infinite source radius
raise ValueError("Parameter diam must be specified for iCSD \
methods: {}".format(", ".join(icsd_methods)))
if 'f_type' in kwargs:
if (kwargs['f_type'] is not 'identity') and \
(kwargs['f_order'] is None):
raise ValueError("The order of {} filter must be \
specified".format(kwargs['f_type']))
lfp = neo.AnalogSignal(np.asarray(lfp).T, units=lfp.units,
sampling_rate=lfp.sampling_rate)
csd_method = getattr(icsd, method) # fetch class from icsd.py file
csd_estimator = csd_method(lfp=lfp.magnitude * lfp.units,
coord_electrode=coords.flatten(),
**kwargs)
csd_pqarr = csd_estimator.get_csd()
if process_estimate:
csd_pqarr_filtered = csd_estimator.filter_csd(csd_pqarr)
output = neo.AnalogSignal(csd_pqarr_filtered.T,
t_start=lfp.t_start,
sampling_rate=lfp.sampling_rate)
else:
output = neo.AnalogSignal(csd_pqarr.T, t_start=lfp.t_start,
sampling_rate=lfp.sampling_rate)
output.annotate(x_coords=coords)
return output
def generate_lfp(csd_profile, ele_xx, ele_yy=None, ele_zz=None,
xlims=[0., 1.], ylims=[0., 1.], zlims=[0., 1.], res=50):
"""Forward modelling for the getting the potentials for testing CSD
Parameters
----------
csd_profile : fuction that computes True CSD profile
Available options are (see ./csd/utility_functions.py)
1D : gauss_1d_dipole
2D : large_source_2D and small_source_2D
3D : gauss_3d_dipole
ele_xx : np.array
Positions of the x coordinates of the electrodes
ele_yy : np.array
Positions of the y coordinates of the electrodes
Defaults ot None, use in 2D or 3D cases only
ele_zz : np.array
Positions of the z coordinates of the electrodes
Defaults ot None, use in 3D case only
x_lims : [start, end]
The starting spatial coordinate and the ending for integration
Defaults to [0.,1.]
y_lims : [start, end]
The starting spatial coordinate and the ending for integration
Defaults to [0.,1.], use only in 2D and 3D case
z_lims : [start, end]
The starting spatial coordinate and the ending for integration
Defaults to [0.,1.], use only in 3D case
res : int
The resolution of the integration
Defaults to 50
Returns
-------
LFP : neo.AnalogSignal object
The potentials created by the csd profile at the electrode positions
The electrode postions are attached as RecordingChannel's coordinate
"""
def integrate_1D(x0, csd_x, csd, h):
m = np.sqrt((csd_x - x0)**2 + h**2) - abs(csd_x - x0)
y = csd * m
I = simps(y, csd_x)
return I
def integrate_2D(x, y, xlin, ylin, csd, h, X, Y):
Ny = ylin.shape[0]
m = np.sqrt((x - X)**2 + (y - Y)**2)
m[m < 0.0000001] = 0.0000001
y = np.arcsinh(2 * h / m) * csd
I = np.zeros(Ny)
for i in range(Ny):
I[i] = simps(y[:, i], ylin)
F = simps(I, xlin)
return F
def integrate_3D(x, y, z, xlim, ylim, zlim, csd, xlin, ylin, zlin,
X, Y, Z):
Nz = zlin.shape[0]
Ny = ylin.shape[0]
m = np.sqrt((x - X)**2 + (y - Y)**2 + (z - Z)**2)
m[m < 0.0000001] = 0.0000001
z = csd / m
Iy = np.zeros(Ny)
for j in range(Ny):
Iz = np.zeros(Nz)
for i in range(Nz):
Iz[i] = simps(z[:, j, i], zlin)
Iy[j] = simps(Iz, ylin)
F = simps(Iy, xlin)
return F
dim = 1
if ele_zz is not None:
dim = 3
elif ele_yy is not None:
dim = 2
x = np.linspace(xlims[0], xlims[1], res)
if dim >= 2:
y = np.linspace(ylims[0], ylims[1], res)
if dim == 3:
z = np.linspace(zlims[0], zlims[1], res)
sigma = 1.0
h = 50.
pots = np.zeros(len(ele_xx))
if dim == 1:
chrg_x = np.linspace(xlims[0], xlims[1], res)
csd = csd_profile(chrg_x)
for ii in range(len(ele_xx)):
pots[ii] = integrate_1D(ele_xx[ii], chrg_x, csd, h)
pots /= 2. * sigma # eq.: 26 from Potworowski et al
ele_pos = ele_xx
elif dim == 2:
chrg_x, chrg_y = np.mgrid[xlims[0]:xlims[1]:np.complex(0, res),
ylims[0]:ylims[1]:np.complex(0, res)]
csd = csd_profile(chrg_x, chrg_y)
for ii in range(len(ele_xx)):
pots[ii] = integrate_2D(ele_xx[ii], ele_yy[ii],
x, y, csd, h, chrg_x, chrg_y)
pots /= 2 * np.pi * sigma
ele_pos = np.vstack((ele_xx, ele_yy)).T
elif dim == 3:
chrg_x, chrg_y, chrg_z = np.mgrid[xlims[0]:xlims[1]:np.complex(0, res),
ylims[0]:ylims[1]:np.complex(0, res),
zlims[0]:zlims[1]:np.complex(0, res)]
csd = csd_profile(chrg_x, chrg_y, chrg_z)
xlin = chrg_x[:, 0, 0]
ylin = chrg_y[0, :, 0]
zlin = chrg_z[0, 0, :]
for ii in range(len(ele_xx)):
pots[ii] = integrate_3D(ele_xx[ii], ele_yy[ii], ele_zz[ii],
xlims, ylims, zlims, csd,
xlin, ylin, zlin,
chrg_x, chrg_y, chrg_z)
pots /= 4 * np.pi * sigma
ele_pos = np.vstack((ele_xx, ele_yy, ele_zz)).T
pots = np.reshape(pots, (-1, 1)) * pq.mV
ele_pos = ele_pos * pq.mm
lfp = []
ch = neo.ChannelIndex(index=range(len(pots)))
for ii in range(len(pots)):
lfp.append(pots[ii])
asig = neo.AnalogSignal(np.array(lfp).T, sampling_rate=pq.kHz, units='mV')
ch.coordinates = ele_pos
ch.analogsignals.append(asig)
ch.create_relationship()
return asig
| bsd-3-clause |
py-geek/City-Air | venv/lib/python2.7/site-packages/allauth/socialaccount/providers/openid/utils.py | 73 | 5352 | import base64
try:
from UserDict import UserDict
except ImportError:
from collections import UserDict
import pickle
from openid.store.interface import OpenIDStore as OIDStore
from openid.association import Association as OIDAssociation
from openid.extensions.sreg import SRegResponse
from openid.extensions.ax import FetchResponse
from allauth.utils import valid_email_or_none
from .models import OpenIDStore, OpenIDNonce
class JSONSafeSession(UserDict):
"""
openid puts e.g. class OpenIDServiceEndpoint in the session.
Django 1.6 no longer pickles stuff, so we'll need to do some
hacking here...
"""
def __init__(self, session):
UserDict.__init__(self)
self.data = session
def __setitem__(self, key, value):
data = base64.b64encode(pickle.dumps(value)).decode('ascii')
return UserDict.__setitem__(self, key, data)
def __getitem__(self, key):
data = UserDict.__getitem__(self, key)
return pickle.loads(base64.b64decode(data.encode('ascii')))
class OldAXAttribute:
PERSON_NAME = 'http://openid.net/schema/namePerson'
PERSON_FIRST_NAME = 'http://openid.net/schema/namePerson/first'
PERSON_LAST_NAME = 'http://openid.net/schema/namePerson/last'
class AXAttribute:
CONTACT_EMAIL = 'http://axschema.org/contact/email'
PERSON_NAME = 'http://axschema.org/namePerson'
PERSON_FIRST_NAME = 'http://axschema.org/namePerson/first'
PERSON_LAST_NAME = 'http://axschema.org/namePerson/last'
AXAttributes = [
AXAttribute.CONTACT_EMAIL,
AXAttribute.PERSON_NAME,
AXAttribute.PERSON_FIRST_NAME,
AXAttribute.PERSON_LAST_NAME,
OldAXAttribute.PERSON_NAME,
OldAXAttribute.PERSON_FIRST_NAME,
OldAXAttribute.PERSON_LAST_NAME,
]
class SRegField:
EMAIL = 'email'
NAME = 'fullname'
SRegFields = [
SRegField.EMAIL,
SRegField.NAME,
]
class DBOpenIDStore(OIDStore):
max_nonce_age = 6 * 60 * 60
def storeAssociation(self, server_url, assoc=None):
OpenIDStore.objects.create(
server_url=server_url,
handle=assoc.handle,
secret=base64.encodestring(assoc.secret),
issued=assoc.issued,
lifetime=assoc.lifetime,
assoc_type=assoc.assoc_type
)
def getAssociation(self, server_url, handle=None):
stored_assocs = OpenIDStore.objects.filter(
server_url=server_url
)
if handle:
stored_assocs = stored_assocs.filter(handle=handle)
stored_assocs.order_by('-issued')
if stored_assocs.count() == 0:
return None
return_val = None
for stored_assoc in stored_assocs:
assoc = OIDAssociation(
stored_assoc.handle,
base64.decodestring(stored_assoc.secret.encode('utf-8')),
stored_assoc.issued, stored_assoc.lifetime,
stored_assoc.assoc_type
)
# See:
# necaris/python3-openid@1abb155c8fc7b508241cbe9d2cae24f18e4a379b
if hasattr(assoc, 'getExpiresIn'):
expires_in = assoc.getExpiresIn()
else:
expires_in = assoc.expiresIn
if expires_in == 0:
stored_assoc.delete()
else:
if return_val is None:
return_val = assoc
return return_val
def removeAssociation(self, server_url, handle):
stored_assocs = OpenIDStore.objects.filter(
server_url=server_url
)
if handle:
stored_assocs = stored_assocs.filter(handle=handle)
stored_assocs.delete()
def useNonce(self, server_url, timestamp, salt):
try:
OpenIDNonce.objects.get(
server_url=server_url,
timestamp=timestamp,
salt=salt
)
except OpenIDNonce.DoesNotExist:
OpenIDNonce.objects.create(
server_url=server_url,
timestamp=timestamp,
salt=salt
)
return True
return False
def get_email_from_response(response):
email = None
sreg = SRegResponse.fromSuccessResponse(response)
if sreg:
email = valid_email_or_none(sreg.get(SRegField.EMAIL))
if not email:
ax = FetchResponse.fromSuccessResponse(response)
if ax:
try:
values = ax.get(AXAttribute.CONTACT_EMAIL)
if values:
email = valid_email_or_none(values[0])
except KeyError:
pass
return email
def get_value_from_response(response, sreg_names=None, ax_names=None):
value = None
if sreg_names:
sreg = SRegResponse.fromSuccessResponse(response)
if sreg:
for name in sreg_names:
value = sreg.get(name)
if value:
break
if not value and ax_names:
ax = FetchResponse.fromSuccessResponse(response)
if ax:
for name in ax_names:
try:
values = ax.get(name)
if values:
value = values[0]
except KeyError:
pass
if value:
break
return value
| mit |
leohmoraes/tablib | tablib/packages/xlwt3/Workbook.py | 46 | 21132 | '''
Record Order in BIFF8
Workbook Globals Substream
BOF Type = workbook globals
Interface Header
MMS
Interface End
WRITEACCESS
CODEPAGE
DSF
TABID
FNGROUPCOUNT
Workbook Protection Block
WINDOWPROTECT
PROTECT
PASSWORD
PROT4REV
PROT4REVPASS
BACKUP
HIDEOBJ
WINDOW1
DATEMODE
PRECISION
REFRESHALL
BOOKBOOL
FONT +
FORMAT *
XF +
STYLE +
? PALETTE
USESELFS
BOUNDSHEET +
COUNTRY
? Link Table
SST
ExtSST
EOF
'''
from . import BIFFRecords
from . import Style
class Workbook(object):
#################################################################
## Constructor
#################################################################
def __init__(self, encoding='ascii', style_compression=0):
self.encoding = encoding
self.__owner = 'None'
self.__country_code = None # 0x07 is Russia :-)
self.__wnd_protect = 0
self.__obj_protect = 0
self.__protect = 0
self.__backup_on_save = 0
# for WINDOW1 record
self.__hpos_twips = 0x01E0
self.__vpos_twips = 0x005A
self.__width_twips = 0x3FCF
self.__height_twips = 0x2A4E
self.__active_sheet = 0
self.__first_tab_index = 0
self.__selected_tabs = 0x01
self.__tab_width_twips = 0x0258
self.__wnd_hidden = 0
self.__wnd_mini = 0
self.__hscroll_visible = 1
self.__vscroll_visible = 1
self.__tabs_visible = 1
self.__styles = Style.StyleCollection(style_compression)
self.__dates_1904 = 0
self.__use_cell_values = 1
self.__sst = BIFFRecords.SharedStringTable(self.encoding)
self.__worksheets = []
self.__worksheet_idx_from_name = {}
self.__sheet_refs = {}
self._supbook_xref = {}
self._xcall_xref = {}
self._ownbook_supbookx = None
self._ownbook_supbook_ref = None
self._xcall_supbookx = None
self._xcall_supbook_ref = None
#################################################################
## Properties, "getters", "setters"
#################################################################
def get_style_stats(self):
return self.__styles.stats[:]
def set_owner(self, value):
self.__owner = value
def get_owner(self):
return self.__owner
owner = property(get_owner, set_owner)
#################################################################
def set_country_code(self, value):
self.__country_code = value
def get_country_code(self):
return self.__country_code
country_code = property(get_country_code, set_country_code)
#################################################################
def set_wnd_protect(self, value):
self.__wnd_protect = int(value)
def get_wnd_protect(self):
return bool(self.__wnd_protect)
wnd_protect = property(get_wnd_protect, set_wnd_protect)
#################################################################
def set_obj_protect(self, value):
self.__obj_protect = int(value)
def get_obj_protect(self):
return bool(self.__obj_protect)
obj_protect = property(get_obj_protect, set_obj_protect)
#################################################################
def set_protect(self, value):
self.__protect = int(value)
def get_protect(self):
return bool(self.__protect)
protect = property(get_protect, set_protect)
#################################################################
def set_backup_on_save(self, value):
self.__backup_on_save = int(value)
def get_backup_on_save(self):
return bool(self.__backup_on_save)
backup_on_save = property(get_backup_on_save, set_backup_on_save)
#################################################################
def set_hpos(self, value):
self.__hpos_twips = value & 0xFFFF
def get_hpos(self):
return self.__hpos_twips
hpos = property(get_hpos, set_hpos)
#################################################################
def set_vpos(self, value):
self.__vpos_twips = value & 0xFFFF
def get_vpos(self):
return self.__vpos_twips
vpos = property(get_vpos, set_vpos)
#################################################################
def set_width(self, value):
self.__width_twips = value & 0xFFFF
def get_width(self):
return self.__width_twips
width = property(get_width, set_width)
#################################################################
def set_height(self, value):
self.__height_twips = value & 0xFFFF
def get_height(self):
return self.__height_twips
height = property(get_height, set_height)
#################################################################
def set_active_sheet(self, value):
self.__active_sheet = value & 0xFFFF
self.__first_tab_index = self.__active_sheet
def get_active_sheet(self):
return self.__active_sheet
active_sheet = property(get_active_sheet, set_active_sheet)
#################################################################
def set_tab_width(self, value):
self.__tab_width_twips = value & 0xFFFF
def get_tab_width(self):
return self.__tab_width_twips
tab_width = property(get_tab_width, set_tab_width)
#################################################################
def set_wnd_visible(self, value):
self.__wnd_hidden = int(not value)
def get_wnd_visible(self):
return not bool(self.__wnd_hidden)
wnd_visible = property(get_wnd_visible, set_wnd_visible)
#################################################################
def set_wnd_mini(self, value):
self.__wnd_mini = int(value)
def get_wnd_mini(self):
return bool(self.__wnd_mini)
wnd_mini = property(get_wnd_mini, set_wnd_mini)
#################################################################
def set_hscroll_visible(self, value):
self.__hscroll_visible = int(value)
def get_hscroll_visible(self):
return bool(self.__hscroll_visible)
hscroll_visible = property(get_hscroll_visible, set_hscroll_visible)
#################################################################
def set_vscroll_visible(self, value):
self.__vscroll_visible = int(value)
def get_vscroll_visible(self):
return bool(self.__vscroll_visible)
vscroll_visible = property(get_vscroll_visible, set_vscroll_visible)
#################################################################
def set_tabs_visible(self, value):
self.__tabs_visible = int(value)
def get_tabs_visible(self):
return bool(self.__tabs_visible)
tabs_visible = property(get_tabs_visible, set_tabs_visible)
#################################################################
def set_dates_1904(self, value):
self.__dates_1904 = int(value)
def get_dates_1904(self):
return bool(self.__dates_1904)
dates_1904 = property(get_dates_1904, set_dates_1904)
#################################################################
def set_use_cell_values(self, value):
self.__use_cell_values = int(value)
def get_use_cell_values(self):
return bool(self.__use_cell_values)
use_cell_values = property(get_use_cell_values, set_use_cell_values)
#################################################################
def get_default_style(self):
return self.__styles.default_style
default_style = property(get_default_style)
##################################################################
## Methods
##################################################################
def add_style(self, style):
return self.__styles.add(style)
def add_str(self, s):
return self.__sst.add_str(s)
def del_str(self, sst_idx):
self.__sst.del_str(sst_idx)
def str_index(self, s):
return self.__sst.str_index(s)
def add_sheet(self, sheetname, cell_overwrite_ok=False):
from . import Worksheet, Utils
if not isinstance(sheetname, str):
sheetname = sheetname.decode(self.encoding)
if not Utils.valid_sheet_name(sheetname):
raise Exception("invalid worksheet name %r" % sheetname)
lower_name = sheetname.lower()
if lower_name in self.__worksheet_idx_from_name:
raise Exception("duplicate worksheet name %r" % sheetname)
self.__worksheet_idx_from_name[lower_name] = len(self.__worksheets)
self.__worksheets.append(Worksheet(sheetname, self, cell_overwrite_ok))
return self.__worksheets[-1]
def get_sheet(self, sheetnum):
return self.__worksheets[sheetnum]
def raise_bad_sheetname(self, sheetname):
raise Exception("Formula: unknown sheet name %s" % sheetname)
def convert_sheetindex(self, strg_ref, n_sheets):
idx = int(strg_ref)
if 0 <= idx < n_sheets:
return idx
msg = "Formula: sheet index (%s) >= number of sheets (%d)" % (strg_ref, n_sheets)
raise Exception(msg)
def _get_supbook_index(self, tag):
if tag in self._supbook_xref:
return self._supbook_xref[tag]
self._supbook_xref[tag] = idx = len(self._supbook_xref)
return idx
def setup_ownbook(self):
self._ownbook_supbookx = self._get_supbook_index(('ownbook', 0))
self._ownbook_supbook_ref = None
reference = (self._ownbook_supbookx, 0xFFFE, 0xFFFE)
if reference in self.__sheet_refs:
raise Exception("can't happen")
self.__sheet_refs[reference] = self._ownbook_supbook_ref = len(self.__sheet_refs)
def setup_xcall(self):
self._xcall_supbookx = self._get_supbook_index(('xcall', 0))
self._xcall_supbook_ref = None
reference = (self._xcall_supbookx, 0xFFFE, 0xFFFE)
if reference in self.__sheet_refs:
raise Exception("can't happen")
self.__sheet_refs[reference] = self._xcall_supbook_ref = len(self.__sheet_refs)
def add_sheet_reference(self, formula):
patches = []
n_sheets = len(self.__worksheets)
sheet_refs, xcall_refs = formula.get_references()
for ref0, ref1, offset in sheet_refs:
if not ref0.isdigit():
try:
ref0n = self.__worksheet_idx_from_name[ref0.lower()]
except KeyError:
self.raise_bad_sheetname(ref0)
else:
ref0n = self.convert_sheetindex(ref0, n_sheets)
if ref1 == ref0:
ref1n = ref0n
elif not ref1.isdigit():
try:
ref1n = self.__worksheet_idx_from_name[ref1.lower()]
except KeyError:
self.raise_bad_sheetname(ref1)
else:
ref1n = self.convert_sheetindex(ref1, n_sheets)
if ref1n < ref0n:
msg = "Formula: sheets out of order; %r:%r -> (%d, %d)" \
% (ref0, ref1, ref0n, ref1n)
raise Exception(msg)
if self._ownbook_supbookx is None:
self.setup_ownbook()
reference = (self._ownbook_supbookx, ref0n, ref1n)
if reference in self.__sheet_refs:
patches.append((offset, self.__sheet_refs[reference]))
else:
nrefs = len(self.__sheet_refs)
if nrefs > 65535:
raise Exception('More than 65536 inter-sheet references')
self.__sheet_refs[reference] = nrefs
patches.append((offset, nrefs))
for funcname, offset in xcall_refs:
if self._ownbook_supbookx is None:
self.setup_ownbook()
if self._xcall_supbookx is None:
self.setup_xcall()
# print funcname, self._supbook_xref
patches.append((offset, self._xcall_supbook_ref))
if not isinstance(funcname, str):
funcname = funcname.decode(self.encoding)
if funcname in self._xcall_xref:
idx = self._xcall_xref[funcname]
else:
self._xcall_xref[funcname] = idx = len(self._xcall_xref)
patches.append((offset + 2, idx + 1))
formula.patch_references(patches)
##################################################################
## BIFF records generation
##################################################################
def __bof_rec(self):
return BIFFRecords.Biff8BOFRecord(BIFFRecords.Biff8BOFRecord.BOOK_GLOBAL).get()
def __eof_rec(self):
return BIFFRecords.EOFRecord().get()
def __intf_hdr_rec(self):
return BIFFRecords.InteraceHdrRecord().get()
def __intf_end_rec(self):
return BIFFRecords.InteraceEndRecord().get()
def __intf_mms_rec(self):
return BIFFRecords.MMSRecord().get()
def __write_access_rec(self):
return BIFFRecords.WriteAccessRecord(self.__owner).get()
def __wnd_protect_rec(self):
return BIFFRecords.WindowProtectRecord(self.__wnd_protect).get()
def __obj_protect_rec(self):
return BIFFRecords.ObjectProtectRecord(self.__obj_protect).get()
def __protect_rec(self):
return BIFFRecords.ProtectRecord(self.__protect).get()
def __password_rec(self):
return BIFFRecords.PasswordRecord().get()
def __prot4rev_rec(self):
return BIFFRecords.Prot4RevRecord().get()
def __prot4rev_pass_rec(self):
return BIFFRecords.Prot4RevPassRecord().get()
def __backup_rec(self):
return BIFFRecords.BackupRecord(self.__backup_on_save).get()
def __hide_obj_rec(self):
return BIFFRecords.HideObjRecord().get()
def __window1_rec(self):
flags = 0
flags |= (self.__wnd_hidden) << 0
flags |= (self.__wnd_mini) << 1
flags |= (self.__hscroll_visible) << 3
flags |= (self.__vscroll_visible) << 4
flags |= (self.__tabs_visible) << 5
return BIFFRecords.Window1Record(self.__hpos_twips, self.__vpos_twips,
self.__width_twips, self.__height_twips,
flags,
self.__active_sheet, self.__first_tab_index,
self.__selected_tabs, self.__tab_width_twips).get()
def __codepage_rec(self):
return BIFFRecords.CodepageBiff8Record().get()
def __country_rec(self):
if not self.__country_code:
return b''
return BIFFRecords.CountryRecord(self.__country_code, self.__country_code).get()
def __dsf_rec(self):
return BIFFRecords.DSFRecord().get()
def __tabid_rec(self):
return BIFFRecords.TabIDRecord(len(self.__worksheets)).get()
def __fngroupcount_rec(self):
return BIFFRecords.FnGroupCountRecord().get()
def __datemode_rec(self):
return BIFFRecords.DateModeRecord(self.__dates_1904).get()
def __precision_rec(self):
return BIFFRecords.PrecisionRecord(self.__use_cell_values).get()
def __refresh_all_rec(self):
return BIFFRecords.RefreshAllRecord().get()
def __bookbool_rec(self):
return BIFFRecords.BookBoolRecord().get()
def __all_fonts_num_formats_xf_styles_rec(self):
return self.__styles.get_biff_data()
def __palette_rec(self):
result = b''
return result
def __useselfs_rec(self):
return BIFFRecords.UseSelfsRecord().get()
def __boundsheets_rec(self, data_len_before, data_len_after, sheet_biff_lens):
# .................................
# BOUNDSEHEET0
# BOUNDSEHEET1
# BOUNDSEHEET2
# ..................................
# WORKSHEET0
# WORKSHEET1
# WORKSHEET2
boundsheets_len = 0
for sheet in self.__worksheets:
boundsheets_len += len(BIFFRecords.BoundSheetRecord(
0x00, sheet.visibility, sheet.name, self.encoding
).get())
start = data_len_before + boundsheets_len + data_len_after
result = b''
for sheet_biff_len, sheet in zip(sheet_biff_lens, self.__worksheets):
result += BIFFRecords.BoundSheetRecord(
start, sheet.visibility, sheet.name, self.encoding
).get()
start += sheet_biff_len
return result
def __all_links_rec(self):
pieces = []
temp = [(idx, tag) for tag, idx in list(self._supbook_xref.items())]
temp.sort()
for idx, tag in temp:
stype, snum = tag
if stype == 'ownbook':
rec = BIFFRecords.InternalReferenceSupBookRecord(len(self.__worksheets)).get()
pieces.append(rec)
elif stype == 'xcall':
rec = BIFFRecords.XcallSupBookRecord().get()
pieces.append(rec)
temp = [(idx, name) for name, idx in list(self._xcall_xref.items())]
temp.sort()
for idx, name in temp:
rec = BIFFRecords.ExternnameRecord(
options=0, index=0, name=name, fmla='\x02\x00\x1c\x17').get()
pieces.append(rec)
else:
raise Exception('unknown supbook stype %r' % stype)
if len(self.__sheet_refs) > 0:
# get references in index order
temp = [(idx, ref) for ref, idx in list(self.__sheet_refs.items())]
temp.sort()
temp = [ref for idx, ref in temp]
externsheet_record = BIFFRecords.ExternSheetRecord(temp).get()
pieces.append(externsheet_record)
return b''.join(pieces)
def __sst_rec(self):
return self.__sst.get_biff_record()
def __ext_sst_rec(self, abs_stream_pos):
return b''
#return BIFFRecords.ExtSSTRecord(abs_stream_pos, self.sst_record.str_placement,
#self.sst_record.portions_len).get()
def get_biff_data(self):
before = b''
before += self.__bof_rec()
before += self.__intf_hdr_rec()
before += self.__intf_mms_rec()
before += self.__intf_end_rec()
before += self.__write_access_rec()
before += self.__codepage_rec()
before += self.__dsf_rec()
before += self.__tabid_rec()
before += self.__fngroupcount_rec()
before += self.__wnd_protect_rec()
before += self.__protect_rec()
before += self.__obj_protect_rec()
before += self.__password_rec()
before += self.__prot4rev_rec()
before += self.__prot4rev_pass_rec()
before += self.__backup_rec()
before += self.__hide_obj_rec()
before += self.__window1_rec()
before += self.__datemode_rec()
before += self.__precision_rec()
before += self.__refresh_all_rec()
before += self.__bookbool_rec()
before += self.__all_fonts_num_formats_xf_styles_rec()
before += self.__palette_rec()
before += self.__useselfs_rec()
country = self.__country_rec()
all_links = self.__all_links_rec()
shared_str_table = self.__sst_rec()
after = country + all_links + shared_str_table
ext_sst = self.__ext_sst_rec(0) # need fake cause we need calc stream pos
eof = self.__eof_rec()
self.__worksheets[self.__active_sheet].selected = True
sheets = b''
sheet_biff_lens = []
for sheet in self.__worksheets:
data = sheet.get_biff_data()
sheets += data
sheet_biff_lens.append(len(data))
bundlesheets = self.__boundsheets_rec(len(before), len(after)+len(ext_sst)+len(eof), sheet_biff_lens)
sst_stream_pos = len(before) + len(bundlesheets) + len(country) + len(all_links)
ext_sst = self.__ext_sst_rec(sst_stream_pos)
return before + bundlesheets + after + ext_sst + eof + sheets
def save(self, filename):
from . import CompoundDoc
doc = CompoundDoc.XlsDoc()
doc.save(filename, self.get_biff_data())
| mit |
twc-openstack/jenkins-job-builder | tests/cmd/subcommands/test_update.py | 6 | 5554 | # Joint copyright:
# - Copyright 2015 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
# The goal of these tests is to check that given a particular set of flags to
# Jenkins Job Builder's command line tools it will result in a particular set
# of actions by the JJB library, usually through interaction with the
# python-jenkins library.
import os
import six
from tests.base import mock
from tests.cmd.test_cmd import CmdTestsBase
@mock.patch('jenkins_jobs.builder.JenkinsManager.get_plugins_info',
mock.MagicMock)
class UpdateTests(CmdTestsBase):
@mock.patch('jenkins_jobs.builder.jenkins.Jenkins.job_exists')
@mock.patch('jenkins_jobs.builder.jenkins.Jenkins.get_jobs')
@mock.patch('jenkins_jobs.builder.jenkins.Jenkins.reconfig_job')
def test_update_jobs(self,
jenkins_reconfig_job,
jenkins_get_jobs,
jenkins_job_exists, ):
"""
Test update_job is called
"""
path = os.path.join(self.fixtures_path, 'cmd-002.yaml')
args = ['--conf', self.default_config_file, 'update', path]
self.execute_jenkins_jobs_with_args(args)
jenkins_reconfig_job.assert_has_calls(
[mock.call(job_name, mock.ANY)
for job_name in ['bar001', 'bar002', 'baz001', 'bam001']],
any_order=True
)
@mock.patch('jenkins_jobs.builder.JenkinsManager.is_job',
return_value=True)
@mock.patch('jenkins_jobs.builder.JenkinsManager.get_jobs')
@mock.patch('jenkins_jobs.builder.JenkinsManager.get_job_md5')
@mock.patch('jenkins_jobs.builder.JenkinsManager.update_job')
def test_update_jobs_decode_job_output(self, update_job_mock,
get_job_md5_mock, get_jobs_mock,
is_job_mock):
"""
Test that job xml output has been decoded before attempting to update
"""
# don't care about the value returned here
update_job_mock.return_value = ([], 0)
path = os.path.join(self.fixtures_path, 'cmd-002.yaml')
args = ['--conf', self.default_config_file, 'update', path]
self.execute_jenkins_jobs_with_args(args)
self.assertTrue(isinstance(update_job_mock.call_args[0][1],
six.text_type))
@mock.patch('jenkins_jobs.builder.jenkins.Jenkins.job_exists')
@mock.patch('jenkins_jobs.builder.jenkins.Jenkins.get_jobs')
@mock.patch('jenkins_jobs.builder.jenkins.Jenkins.reconfig_job')
@mock.patch('jenkins_jobs.builder.jenkins.Jenkins.delete_job')
def test_update_jobs_and_delete_old(self,
jenkins_delete_job,
jenkins_reconfig_job,
jenkins_get_jobs,
jenkins_job_exists, ):
"""
Test update behaviour with --delete-old option
* mock out a call to jenkins.Jenkins.get_jobs() to return a known list
of job names.
* mock out a call to jenkins.Jenkins.reconfig_job() and
jenkins.Jenkins.delete_job() to detect calls being made to determine
that JJB does correctly delete the jobs it should delete when passed
a specific set of inputs.
* mock out a call to jenkins.Jenkins.job_exists() to always return
True.
"""
yaml_jobs = ['bar001', 'bar002', 'baz001', 'bam001']
extra_jobs = ['old_job001', 'old_job002', 'unmanaged']
path = os.path.join(self.fixtures_path, 'cmd-002.yaml')
args = ['--conf', self.default_config_file, 'update', '--delete-old',
path]
jenkins_get_jobs.return_value = [{'name': name}
for name in yaml_jobs + extra_jobs]
with mock.patch('jenkins_jobs.builder.JenkinsManager.is_managed',
side_effect=(lambda name: name != 'unmanaged')):
self.execute_jenkins_jobs_with_args(args)
jenkins_reconfig_job.assert_has_calls(
[mock.call(job_name, mock.ANY) for job_name in yaml_jobs],
any_order=True
)
calls = [mock.call(name) for name in extra_jobs if name != 'unmanaged']
jenkins_delete_job.assert_has_calls(calls)
# to ensure only the calls we expected were made, have to check
# there were no others, as no API call for assert_has_only_calls
self.assertEquals(jenkins_delete_job.call_count, len(calls))
def test_update_timeout_not_set(self):
"""Validate update timeout behavior when timeout not explicitly configured.
"""
self.skipTest("TODO: Develop actual update timeout test approach.")
def test_update_timeout_set(self):
"""Validate update timeout behavior when timeout is explicitly configured.
"""
self.skipTest("TODO: Develop actual update timeout test approach.")
| apache-2.0 |
acq4/acq4 | setup.py | 3 | 7580 | from __future__ import print_function
DESCRIPTION = """\
ACQ4 is a python-based platform for experimental neurophysiology.
It includes support for standard electrophysiology, multiphoton imaging,
scanning laser photostimulation, and many other experimental techniques. ACQ4 is
highly modular and extensible, allowing support to be added for new types of
devices, techniques, user-interface modules, and analyses.
"""
import os, sys, re, shutil
from subprocess import check_output
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
import distutils.sysconfig as sysconfig
setupOpts = dict(
name='acq4',
description='Neurophysiology acquisition and analysis platform',
long_description=DESCRIPTION,
license='MIT',
url='http://www.acq4.org',
author='Luke Campagnola',
author_email='luke.campagnola@gmail.com',
)
## generate list of all sub-packages
path = os.path.abspath(os.path.dirname(__file__))
n = len(path.split(os.path.sep))
subdirs = [i[0].split(os.path.sep)[n:] for i in os.walk(os.path.join(path, 'acq4')) if '__init__.py' in i[2]]
allPackages = ['.'.join(p) for p in subdirs]
## Make sure build directory is clean before installing
buildPath = os.path.join(path, 'build')
if os.path.isdir(buildPath):
shutil.rmtree(buildPath)
## Determine current version string
initfile = os.path.join(path, 'acq4', '__init__.py')
init = open(initfile).read()
m = re.search(r'__version__ = (\S+)\n', init)
if m is None or len(m.groups()) != 1:
raise Exception("Cannot determine __version__ from init file: '%s'!" % initfile)
version = m.group(1).strip('\'\"')
initVersion = version
# If this is a git checkout, try to generate a more decriptive version string
try:
if os.path.isdir(os.path.join(path, '.git')):
def gitCommit(name):
commit = check_output(['git', 'show', name], universal_newlines=True).split('\n')[0]
assert commit[:7] == 'commit '
return commit[7:]
# Find last tag matching "acq4-.*"
tagNames = check_output(['git', 'tag'], universal_newlines=True).strip().split('\n')
while True:
if len(tagNames) == 0:
raise Exception("Could not determine last tagged version.")
lastTagName = tagNames.pop()
if re.match(r'acq4-.*', lastTagName):
break
# is this commit an unchanged checkout of the last tagged version?
lastTag = gitCommit(lastTagName)
head = gitCommit('HEAD')
if head != lastTag:
branch = re.search(r'\* (.*)', check_output(['git', 'branch'], universal_newlines=True)).group(1)
version = version + "-%s-%s" % (branch, head[:10])
# any uncommitted modifications?
modified = False
status = check_output(['git', 'status', '-s'], universal_newlines=True).strip().split('\n')
for line in status:
if line.strip() != '' and line[:2] != '??':
modified = True
break
if modified:
version = version + '+'
sys.stderr.write("Detected git commit; will use version string: '%s'\n" % version)
except:
version = initVersion
sys.stderr.write("This appears to be a git checkout, but an error occurred "
"while attempting to determine a version string for the "
"current commit.\nUsing the unmodified version string "
"instead: '%s'\n" % version)
sys.excepthook(*sys.exc_info())
print("__init__ version: %s current version: %s" % (initVersion, version))
if 'upload' in sys.argv and version != initVersion:
print("Base version does not match current; stubbornly refusing to upload.")
exit()
import distutils.command.build
class Build(distutils.command.build.build):
def run(self):
ret = distutils.command.build.build.run(self)
# If the version in __init__ is different from the automatically-generated
# version string, then we will update __init__ in the build directory
global path, version, initVersion
if initVersion == version:
return ret
initfile = os.path.join(path, self.build_lib, 'acq4', '__init__.py')
if not os.path.isfile(initfile):
sys.stderr.write("Warning: setup detected a git install and attempted "
"to generate a descriptive version string; however, "
"the expected build file at %s was not found. "
"Installation will use the original version string "
"%s instead.\n" % (initfile, initVersion)
)
else:
data = open(initfile, 'r').read()
open(initfile, 'w').write(re.sub(r"__version__ = .*", "__version__ = '%s'" % version, data))
# If this is windows, we need to update acq4.bat to reference the correct python executable.
if sys.platform == 'win32':
runner = os.path.join(path, self.build_scripts, 'acq4.bat')
runcmd = "%s -m acq4" % sys.executable
data = open(runner, 'r').read()
open(runner, 'w').write(re.sub(r'python -m acq4', runcmd, data))
return ret
# copy config tree to system location
# if sys.platform == 'win32':
# dataRoot = os.path.join(os.environ['ProgramFiles'], 'acq4')
# elif sys.platform == 'darwin':
# dataRoot = 'Library/Application Support/acq4'
# else:
# dataRoot = '/etc/acq4'
# instead, just install config example to same path as package.
if sys.platform == 'win32':
#dataRoot = distutils.sysconfig.get_python_lib().replace(sys.prefix, '')
dataRoot = 'Lib/site-packages/acq4'
else:
#dataRoot = 'python%d.%d/site-packages/acq4' % (sys.version_info.major, sys.version_info.minor)
dataRoot = distutils.sysconfig.get_python_lib().replace(sys.prefix+'/', '') + '/acq4'
dataFiles = []
configRoot = os.path.join(path, 'config')
for subpath, _, files in os.walk(configRoot):
endPath = subpath[len(path):].lstrip(os.path.sep)
files = [os.path.join(endPath, f) for f in files]
dataFiles.append((os.path.join(dataRoot, endPath), files))
# print dataFiles[-1]
packageData = []
pkgRoot = os.path.join(path, 'acq4')
for subpath, _, files in os.walk(pkgRoot):
for f in files:
addTo = None
for ext in ['.png', '.cache', '.h', '.hpp', '.dll']:
if f.endswith(ext):
packageData.append(os.path.join(subpath, f)[len(pkgRoot):].lstrip(os.path.sep))
if sys.platform == 'win32':
scripts = ['bin/acq4.bat']
else:
scripts = ['bin/acq4']
setup(
version=version,
cmdclass={'build': Build},
packages=allPackages,
package_dir={},
package_data={'acq4': packageData},
data_files=dataFiles,
classifiers = [
"Programming Language :: Python",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.7",
"Development Status :: 4 - Beta",
"Environment :: Other Environment",
"Intended Audience :: Science/Research",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Topic :: Software Development :: Libraries :: Python Modules",
"Topic :: Scientific/Engineering",
],
install_requires = [
'numpy',
'scipy',
'h5py',
'pillow',
],
scripts = scripts,
**setupOpts
)
| mit |
Edraak/edx-platform | common/djangoapps/terrain/stubs/comments.py | 85 | 5565 | """
Stub implementation of cs_comments_service for acceptance tests
"""
import re
import urlparse
from .http import StubHttpRequestHandler, StubHttpService
class StubCommentsServiceHandler(StubHttpRequestHandler):
@property
def _params(self):
return urlparse.parse_qs(urlparse.urlparse(self.path).query)
def do_GET(self):
pattern_handlers = {
"/api/v1/users/(?P<user_id>\\d+)/active_threads$": self.do_user_profile,
"/api/v1/users/(?P<user_id>\\d+)$": self.do_user,
"/api/v1/search/threads$": self.do_search_threads,
"/api/v1/threads$": self.do_threads,
"/api/v1/threads/(?P<thread_id>\\w+)$": self.do_thread,
"/api/v1/comments/(?P<comment_id>\\w+)$": self.do_comment,
"/api/v1/(?P<commentable_id>\\w+)/threads$": self.do_commentable,
}
if self.match_pattern(pattern_handlers):
return
self.send_response(404, content="404 Not Found")
def match_pattern(self, pattern_handlers):
path = urlparse.urlparse(self.path).path
for pattern in pattern_handlers:
match = re.match(pattern, path)
if match:
pattern_handlers[pattern](**match.groupdict())
return True
return None
def do_PUT(self):
if self.path.startswith('/set_config'):
return StubHttpRequestHandler.do_PUT(self)
pattern_handlers = {
"/api/v1/users/(?P<user_id>\\d+)$": self.do_put_user,
}
if self.match_pattern(pattern_handlers):
return
self.send_response(204, "")
def do_put_user(self, user_id):
self.server.config['default_sort_key'] = self.post_dict.get("default_sort_key", "date")
self.send_json_response({'username': self.post_dict.get("username"), 'external_id': self.post_dict.get("external_id")})
def do_DELETE(self):
pattern_handlers = {
"/api/v1/comments/(?P<comment_id>\\w+)$": self.do_delete_comment
}
if self.match_pattern(pattern_handlers):
return
self.send_json_response({})
def do_user(self, user_id):
response = {
"id": user_id,
"default_sort_key": self.server.config.get("default_sort_key", "date"),
"upvoted_ids": [],
"downvoted_ids": [],
"subscribed_thread_ids": [],
}
if 'course_id' in self._params:
response.update({
"threads_count": 1,
"comments_count": 2
})
self.send_json_response(response)
def do_user_profile(self, user_id):
if 'active_threads' in self.server.config:
user_threads = self.server.config['active_threads'][:]
params = self._params
page = int(params.get("page", ["1"])[0])
per_page = int(params.get("per_page", ["20"])[0])
num_pages = max(len(user_threads) - 1, 1) / per_page + 1
user_threads = user_threads[(page - 1) * per_page:page * per_page]
self.send_json_response({
"collection": user_threads,
"page": page,
"num_pages": num_pages
})
else:
self.send_response(404, content="404 Not Found")
def do_thread(self, thread_id):
if thread_id in self.server.config.get('threads', {}):
thread = self.server.config['threads'][thread_id].copy()
params = urlparse.parse_qs(urlparse.urlparse(self.path).query)
if "recursive" in params and params["recursive"][0] == "True":
thread.setdefault('children', [])
resp_total = thread.setdefault('resp_total', len(thread['children']))
resp_skip = int(params.get("resp_skip", ["0"])[0])
resp_limit = int(params.get("resp_limit", ["10000"])[0])
thread['children'] = thread['children'][resp_skip:(resp_skip + resp_limit)]
self.send_json_response(thread)
else:
self.send_response(404, content="404 Not Found")
def do_threads(self):
threads = self.server.config.get('threads', {})
threads_data = [val for key, val in threads.items()]
self.send_json_response({"collection": threads_data, "page": 1, "num_pages": 1})
def do_search_threads(self):
self.send_json_response(self.server.config.get('search_result', {}))
def do_comment(self, comment_id):
# django_comment_client calls GET comment before doing a DELETE, so that's what this is here to support.
if comment_id in self.server.config.get('comments', {}):
comment = self.server.config['comments'][comment_id]
self.send_json_response(comment)
def do_delete_comment(self, comment_id):
"""Handle comment deletion. Returns a JSON representation of the
deleted comment."""
if comment_id in self.server.config.get('comments', {}):
comment = self.server.config['comments'][comment_id]
self.send_json_response(comment)
def do_commentable(self, commentable_id):
self.send_json_response({
"collection": [
thread
for thread in self.server.config.get('threads', {}).values()
if thread.get('commentable_id') == commentable_id
],
"page": 1,
"num_pages": 1,
})
class StubCommentsService(StubHttpService):
HANDLER_CLASS = StubCommentsServiceHandler
| agpl-3.0 |
MalloyPower/parsing-python | front-end/testsuite-python-lib/Python-3.3.0/Lib/email/quoprimime.py | 54 | 10726 | # Copyright (C) 2001-2006 Python Software Foundation
# Author: Ben Gertzfield
# Contact: email-sig@python.org
"""Quoted-printable content transfer encoding per RFCs 2045-2047.
This module handles the content transfer encoding method defined in RFC 2045
to encode US ASCII-like 8-bit data called `quoted-printable'. It is used to
safely encode text that is in a character set similar to the 7-bit US ASCII
character set, but that includes some 8-bit characters that are normally not
allowed in email bodies or headers.
Quoted-printable is very space-inefficient for encoding binary files; use the
email.base64mime module for that instead.
This module provides an interface to encode and decode both headers and bodies
with quoted-printable encoding.
RFC 2045 defines a method for including character set information in an
`encoded-word' in a header. This method is commonly used for 8-bit real names
in To:/From:/Cc: etc. fields, as well as Subject: lines.
This module does not do the line wrapping or end-of-line character
conversion necessary for proper internationalized headers; it only
does dumb encoding and decoding. To deal with the various line
wrapping issues, use the email.header module.
"""
__all__ = [
'body_decode',
'body_encode',
'body_length',
'decode',
'decodestring',
'header_decode',
'header_encode',
'header_length',
'quote',
'unquote',
]
import re
import io
from string import ascii_letters, digits, hexdigits
CRLF = '\r\n'
NL = '\n'
EMPTYSTRING = ''
# Build a mapping of octets to the expansion of that octet. Since we're only
# going to have 256 of these things, this isn't terribly inefficient
# space-wise. Remember that headers and bodies have different sets of safe
# characters. Initialize both maps with the full expansion, and then override
# the safe bytes with the more compact form.
_QUOPRI_HEADER_MAP = dict((c, '=%02X' % c) for c in range(256))
_QUOPRI_BODY_MAP = _QUOPRI_HEADER_MAP.copy()
# Safe header bytes which need no encoding.
for c in b'-!*+/' + ascii_letters.encode('ascii') + digits.encode('ascii'):
_QUOPRI_HEADER_MAP[c] = chr(c)
# Headers have one other special encoding; spaces become underscores.
_QUOPRI_HEADER_MAP[ord(' ')] = '_'
# Safe body bytes which need no encoding.
for c in (b' !"#$%&\'()*+,-./0123456789:;<>'
b'?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`'
b'abcdefghijklmnopqrstuvwxyz{|}~\t'):
_QUOPRI_BODY_MAP[c] = chr(c)
# Helpers
def header_check(octet):
"""Return True if the octet should be escaped with header quopri."""
return chr(octet) != _QUOPRI_HEADER_MAP[octet]
def body_check(octet):
"""Return True if the octet should be escaped with body quopri."""
return chr(octet) != _QUOPRI_BODY_MAP[octet]
def header_length(bytearray):
"""Return a header quoted-printable encoding length.
Note that this does not include any RFC 2047 chrome added by
`header_encode()`.
:param bytearray: An array of bytes (a.k.a. octets).
:return: The length in bytes of the byte array when it is encoded with
quoted-printable for headers.
"""
return sum(len(_QUOPRI_HEADER_MAP[octet]) for octet in bytearray)
def body_length(bytearray):
"""Return a body quoted-printable encoding length.
:param bytearray: An array of bytes (a.k.a. octets).
:return: The length in bytes of the byte array when it is encoded with
quoted-printable for bodies.
"""
return sum(len(_QUOPRI_BODY_MAP[octet]) for octet in bytearray)
def _max_append(L, s, maxlen, extra=''):
if not isinstance(s, str):
s = chr(s)
if not L:
L.append(s.lstrip())
elif len(L[-1]) + len(s) <= maxlen:
L[-1] += extra + s
else:
L.append(s.lstrip())
def unquote(s):
"""Turn a string in the form =AB to the ASCII character with value 0xab"""
return chr(int(s[1:3], 16))
def quote(c):
return '=%02X' % ord(c)
def header_encode(header_bytes, charset='iso-8859-1'):
"""Encode a single header line with quoted-printable (like) encoding.
Defined in RFC 2045, this `Q' encoding is similar to quoted-printable, but
used specifically for email header fields to allow charsets with mostly 7
bit characters (and some 8 bit) to remain more or less readable in non-RFC
2045 aware mail clients.
charset names the character set to use in the RFC 2046 header. It
defaults to iso-8859-1.
"""
# Return empty headers as an empty string.
if not header_bytes:
return ''
# Iterate over every byte, encoding if necessary.
encoded = []
for octet in header_bytes:
encoded.append(_QUOPRI_HEADER_MAP[octet])
# Now add the RFC chrome to each encoded chunk and glue the chunks
# together.
return '=?%s?q?%s?=' % (charset, EMPTYSTRING.join(encoded))
class _body_accumulator(io.StringIO):
def __init__(self, maxlinelen, eol, *args, **kw):
super().__init__(*args, **kw)
self.eol = eol
self.maxlinelen = self.room = maxlinelen
def write_str(self, s):
"""Add string s to the accumulated body."""
self.write(s)
self.room -= len(s)
def newline(self):
"""Write eol, then start new line."""
self.write_str(self.eol)
self.room = self.maxlinelen
def write_soft_break(self):
"""Write a soft break, then start a new line."""
self.write_str('=')
self.newline()
def write_wrapped(self, s, extra_room=0):
"""Add a soft line break if needed, then write s."""
if self.room < len(s) + extra_room:
self.write_soft_break()
self.write_str(s)
def write_char(self, c, is_last_char):
if not is_last_char:
# Another character follows on this line, so we must leave
# extra room, either for it or a soft break, and whitespace
# need not be quoted.
self.write_wrapped(c, extra_room=1)
elif c not in ' \t':
# For this and remaining cases, no more characters follow,
# so there is no need to reserve extra room (since a hard
# break will immediately follow).
self.write_wrapped(c)
elif self.room >= 3:
# It's a whitespace character at end-of-line, and we have room
# for the three-character quoted encoding.
self.write(quote(c))
elif self.room == 2:
# There's room for the whitespace character and a soft break.
self.write(c)
self.write_soft_break()
else:
# There's room only for a soft break. The quoted whitespace
# will be the only content on the subsequent line.
self.write_soft_break()
self.write(quote(c))
def body_encode(body, maxlinelen=76, eol=NL):
"""Encode with quoted-printable, wrapping at maxlinelen characters.
Each line of encoded text will end with eol, which defaults to "\\n". Set
this to "\\r\\n" if you will be using the result of this function directly
in an email.
Each line will be wrapped at, at most, maxlinelen characters before the
eol string (maxlinelen defaults to 76 characters, the maximum value
permitted by RFC 2045). Long lines will have the 'soft line break'
quoted-printable character "=" appended to them, so the decoded text will
be identical to the original text.
The minimum maxlinelen is 4 to have room for a quoted character ("=XX")
followed by a soft line break. Smaller values will generate a
ValueError.
"""
if maxlinelen < 4:
raise ValueError("maxlinelen must be at least 4")
if not body:
return body
# The last line may or may not end in eol, but all other lines do.
last_has_eol = (body[-1] in '\r\n')
# This accumulator will make it easier to build the encoded body.
encoded_body = _body_accumulator(maxlinelen, eol)
lines = body.splitlines()
last_line_no = len(lines) - 1
for line_no, line in enumerate(lines):
last_char_index = len(line) - 1
for i, c in enumerate(line):
if body_check(ord(c)):
c = quote(c)
encoded_body.write_char(c, i==last_char_index)
# Add an eol if input line had eol. All input lines have eol except
# possibly the last one.
if line_no < last_line_no or last_has_eol:
encoded_body.newline()
return encoded_body.getvalue()
# BAW: I'm not sure if the intent was for the signature of this function to be
# the same as base64MIME.decode() or not...
def decode(encoded, eol=NL):
"""Decode a quoted-printable string.
Lines are separated with eol, which defaults to \\n.
"""
if not encoded:
return encoded
# BAW: see comment in encode() above. Again, we're building up the
# decoded string with string concatenation, which could be done much more
# efficiently.
decoded = ''
for line in encoded.splitlines():
line = line.rstrip()
if not line:
decoded += eol
continue
i = 0
n = len(line)
while i < n:
c = line[i]
if c != '=':
decoded += c
i += 1
# Otherwise, c == "=". Are we at the end of the line? If so, add
# a soft line break.
elif i+1 == n:
i += 1
continue
# Decode if in form =AB
elif i+2 < n and line[i+1] in hexdigits and line[i+2] in hexdigits:
decoded += unquote(line[i:i+3])
i += 3
# Otherwise, not in form =AB, pass literally
else:
decoded += c
i += 1
if i == n:
decoded += eol
# Special case if original string did not end with eol
if encoded[-1] not in '\r\n' and decoded.endswith(eol):
decoded = decoded[:-1]
return decoded
# For convenience and backwards compatibility w/ standard base64 module
body_decode = decode
decodestring = decode
def _unquote_match(match):
"""Turn a match in the form =AB to the ASCII character with value 0xab"""
s = match.group(0)
return unquote(s)
# Header decoding is done a bit differently
def header_decode(s):
"""Decode a string encoded with RFC 2045 MIME header `Q' encoding.
This function does not parse a full MIME header value encoded with
quoted-printable (like =?iso-8895-1?q?Hello_World?=) -- please use
the high level email.header class for that functionality.
"""
s = s.replace('_', ' ')
return re.sub(r'=[a-fA-F0-9]{2}', _unquote_match, s, re.ASCII)
| mit |
BeATz-UnKNoWN/python-for-android | python3-alpha/python3-src/Lib/xml/sax/expatreader.py | 870 | 14659 | """
SAX driver for the pyexpat C module. This driver works with
pyexpat.__version__ == '2.22'.
"""
version = "0.20"
from xml.sax._exceptions import *
from xml.sax.handler import feature_validation, feature_namespaces
from xml.sax.handler import feature_namespace_prefixes
from xml.sax.handler import feature_external_ges, feature_external_pes
from xml.sax.handler import feature_string_interning
from xml.sax.handler import property_xml_string, property_interning_dict
# xml.parsers.expat does not raise ImportError in Jython
import sys
if sys.platform[:4] == "java":
raise SAXReaderNotAvailable("expat not available in Java", None)
del sys
try:
from xml.parsers import expat
except ImportError:
raise SAXReaderNotAvailable("expat not supported", None)
else:
if not hasattr(expat, "ParserCreate"):
raise SAXReaderNotAvailable("expat not supported", None)
from xml.sax import xmlreader, saxutils, handler
AttributesImpl = xmlreader.AttributesImpl
AttributesNSImpl = xmlreader.AttributesNSImpl
# If we're using a sufficiently recent version of Python, we can use
# weak references to avoid cycles between the parser and content
# handler, otherwise we'll just have to pretend.
try:
import _weakref
except ImportError:
def _mkproxy(o):
return o
else:
import weakref
_mkproxy = weakref.proxy
del weakref, _weakref
# --- ExpatLocator
class ExpatLocator(xmlreader.Locator):
"""Locator for use with the ExpatParser class.
This uses a weak reference to the parser object to avoid creating
a circular reference between the parser and the content handler.
"""
def __init__(self, parser):
self._ref = _mkproxy(parser)
def getColumnNumber(self):
parser = self._ref
if parser._parser is None:
return None
return parser._parser.ErrorColumnNumber
def getLineNumber(self):
parser = self._ref
if parser._parser is None:
return 1
return parser._parser.ErrorLineNumber
def getPublicId(self):
parser = self._ref
if parser is None:
return None
return parser._source.getPublicId()
def getSystemId(self):
parser = self._ref
if parser is None:
return None
return parser._source.getSystemId()
# --- ExpatParser
class ExpatParser(xmlreader.IncrementalParser, xmlreader.Locator):
"""SAX driver for the pyexpat C module."""
def __init__(self, namespaceHandling=0, bufsize=2**16-20):
xmlreader.IncrementalParser.__init__(self, bufsize)
self._source = xmlreader.InputSource()
self._parser = None
self._namespaces = namespaceHandling
self._lex_handler_prop = None
self._parsing = 0
self._entity_stack = []
self._external_ges = 1
self._interning = None
# XMLReader methods
def parse(self, source):
"Parse an XML document from a URL or an InputSource."
source = saxutils.prepare_input_source(source)
self._source = source
self.reset()
self._cont_handler.setDocumentLocator(ExpatLocator(self))
xmlreader.IncrementalParser.parse(self, source)
def prepareParser(self, source):
if source.getSystemId() is not None:
self._parser.SetBase(source.getSystemId())
# Redefined setContentHandler to allow changing handlers during parsing
def setContentHandler(self, handler):
xmlreader.IncrementalParser.setContentHandler(self, handler)
if self._parsing:
self._reset_cont_handler()
def getFeature(self, name):
if name == feature_namespaces:
return self._namespaces
elif name == feature_string_interning:
return self._interning is not None
elif name in (feature_validation, feature_external_pes,
feature_namespace_prefixes):
return 0
elif name == feature_external_ges:
return self._external_ges
raise SAXNotRecognizedException("Feature '%s' not recognized" % name)
def setFeature(self, name, state):
if self._parsing:
raise SAXNotSupportedException("Cannot set features while parsing")
if name == feature_namespaces:
self._namespaces = state
elif name == feature_external_ges:
self._external_ges = state
elif name == feature_string_interning:
if state:
if self._interning is None:
self._interning = {}
else:
self._interning = None
elif name == feature_validation:
if state:
raise SAXNotSupportedException(
"expat does not support validation")
elif name == feature_external_pes:
if state:
raise SAXNotSupportedException(
"expat does not read external parameter entities")
elif name == feature_namespace_prefixes:
if state:
raise SAXNotSupportedException(
"expat does not report namespace prefixes")
else:
raise SAXNotRecognizedException(
"Feature '%s' not recognized" % name)
def getProperty(self, name):
if name == handler.property_lexical_handler:
return self._lex_handler_prop
elif name == property_interning_dict:
return self._interning
elif name == property_xml_string:
if self._parser:
if hasattr(self._parser, "GetInputContext"):
return self._parser.GetInputContext()
else:
raise SAXNotRecognizedException(
"This version of expat does not support getting"
" the XML string")
else:
raise SAXNotSupportedException(
"XML string cannot be returned when not parsing")
raise SAXNotRecognizedException("Property '%s' not recognized" % name)
def setProperty(self, name, value):
if name == handler.property_lexical_handler:
self._lex_handler_prop = value
if self._parsing:
self._reset_lex_handler_prop()
elif name == property_interning_dict:
self._interning = value
elif name == property_xml_string:
raise SAXNotSupportedException("Property '%s' cannot be set" %
name)
else:
raise SAXNotRecognizedException("Property '%s' not recognized" %
name)
# IncrementalParser methods
def feed(self, data, isFinal = 0):
if not self._parsing:
self.reset()
self._parsing = 1
self._cont_handler.startDocument()
try:
# The isFinal parameter is internal to the expat reader.
# If it is set to true, expat will check validity of the entire
# document. When feeding chunks, they are not normally final -
# except when invoked from close.
self._parser.Parse(data, isFinal)
except expat.error as e:
exc = SAXParseException(expat.ErrorString(e.code), e, self)
# FIXME: when to invoke error()?
self._err_handler.fatalError(exc)
def close(self):
if self._entity_stack:
# If we are completing an external entity, do nothing here
return
self.feed("", isFinal = 1)
self._cont_handler.endDocument()
self._parsing = 0
# break cycle created by expat handlers pointing to our methods
self._parser = None
bs = self._source.getByteStream()
if bs is not None:
bs.close()
def _reset_cont_handler(self):
self._parser.ProcessingInstructionHandler = \
self._cont_handler.processingInstruction
self._parser.CharacterDataHandler = self._cont_handler.characters
def _reset_lex_handler_prop(self):
lex = self._lex_handler_prop
parser = self._parser
if lex is None:
parser.CommentHandler = None
parser.StartCdataSectionHandler = None
parser.EndCdataSectionHandler = None
parser.StartDoctypeDeclHandler = None
parser.EndDoctypeDeclHandler = None
else:
parser.CommentHandler = lex.comment
parser.StartCdataSectionHandler = lex.startCDATA
parser.EndCdataSectionHandler = lex.endCDATA
parser.StartDoctypeDeclHandler = self.start_doctype_decl
parser.EndDoctypeDeclHandler = lex.endDTD
def reset(self):
if self._namespaces:
self._parser = expat.ParserCreate(self._source.getEncoding(), " ",
intern=self._interning)
self._parser.namespace_prefixes = 1
self._parser.StartElementHandler = self.start_element_ns
self._parser.EndElementHandler = self.end_element_ns
else:
self._parser = expat.ParserCreate(self._source.getEncoding(),
intern = self._interning)
self._parser.StartElementHandler = self.start_element
self._parser.EndElementHandler = self.end_element
self._reset_cont_handler()
self._parser.UnparsedEntityDeclHandler = self.unparsed_entity_decl
self._parser.NotationDeclHandler = self.notation_decl
self._parser.StartNamespaceDeclHandler = self.start_namespace_decl
self._parser.EndNamespaceDeclHandler = self.end_namespace_decl
self._decl_handler_prop = None
if self._lex_handler_prop:
self._reset_lex_handler_prop()
# self._parser.DefaultHandler =
# self._parser.DefaultHandlerExpand =
# self._parser.NotStandaloneHandler =
self._parser.ExternalEntityRefHandler = self.external_entity_ref
try:
self._parser.SkippedEntityHandler = self.skipped_entity_handler
except AttributeError:
# This pyexpat does not support SkippedEntity
pass
self._parser.SetParamEntityParsing(
expat.XML_PARAM_ENTITY_PARSING_UNLESS_STANDALONE)
self._parsing = 0
self._entity_stack = []
# Locator methods
def getColumnNumber(self):
if self._parser is None:
return None
return self._parser.ErrorColumnNumber
def getLineNumber(self):
if self._parser is None:
return 1
return self._parser.ErrorLineNumber
def getPublicId(self):
return self._source.getPublicId()
def getSystemId(self):
return self._source.getSystemId()
# event handlers
def start_element(self, name, attrs):
self._cont_handler.startElement(name, AttributesImpl(attrs))
def end_element(self, name):
self._cont_handler.endElement(name)
def start_element_ns(self, name, attrs):
pair = name.split()
if len(pair) == 1:
# no namespace
pair = (None, name)
elif len(pair) == 3:
pair = pair[0], pair[1]
else:
# default namespace
pair = tuple(pair)
newattrs = {}
qnames = {}
for (aname, value) in attrs.items():
parts = aname.split()
length = len(parts)
if length == 1:
# no namespace
qname = aname
apair = (None, aname)
elif length == 3:
qname = "%s:%s" % (parts[2], parts[1])
apair = parts[0], parts[1]
else:
# default namespace
qname = parts[1]
apair = tuple(parts)
newattrs[apair] = value
qnames[apair] = qname
self._cont_handler.startElementNS(pair, None,
AttributesNSImpl(newattrs, qnames))
def end_element_ns(self, name):
pair = name.split()
if len(pair) == 1:
pair = (None, name)
elif len(pair) == 3:
pair = pair[0], pair[1]
else:
pair = tuple(pair)
self._cont_handler.endElementNS(pair, None)
# this is not used (call directly to ContentHandler)
def processing_instruction(self, target, data):
self._cont_handler.processingInstruction(target, data)
# this is not used (call directly to ContentHandler)
def character_data(self, data):
self._cont_handler.characters(data)
def start_namespace_decl(self, prefix, uri):
self._cont_handler.startPrefixMapping(prefix, uri)
def end_namespace_decl(self, prefix):
self._cont_handler.endPrefixMapping(prefix)
def start_doctype_decl(self, name, sysid, pubid, has_internal_subset):
self._lex_handler_prop.startDTD(name, pubid, sysid)
def unparsed_entity_decl(self, name, base, sysid, pubid, notation_name):
self._dtd_handler.unparsedEntityDecl(name, pubid, sysid, notation_name)
def notation_decl(self, name, base, sysid, pubid):
self._dtd_handler.notationDecl(name, pubid, sysid)
def external_entity_ref(self, context, base, sysid, pubid):
if not self._external_ges:
return 1
source = self._ent_handler.resolveEntity(pubid, sysid)
source = saxutils.prepare_input_source(source,
self._source.getSystemId() or
"")
self._entity_stack.append((self._parser, self._source))
self._parser = self._parser.ExternalEntityParserCreate(context)
self._source = source
try:
xmlreader.IncrementalParser.parse(self, source)
except:
return 0 # FIXME: save error info here?
(self._parser, self._source) = self._entity_stack[-1]
del self._entity_stack[-1]
return 1
def skipped_entity_handler(self, name, is_pe):
if is_pe:
# The SAX spec requires to report skipped PEs with a '%'
name = '%'+name
self._cont_handler.skippedEntity(name)
# ---
def create_parser(*args, **kwargs):
return ExpatParser(*args, **kwargs)
# ---
if __name__ == "__main__":
import xml.sax.saxutils
p = create_parser()
p.setContentHandler(xml.sax.saxutils.XMLGenerator())
p.setErrorHandler(xml.sax.ErrorHandler())
p.parse("http://www.ibiblio.org/xml/examples/shakespeare/hamlet.xml")
| apache-2.0 |
mattseymour/django | tests/template_tests/syntax_tests/test_url.py | 71 | 11755 | from django.template import RequestContext, TemplateSyntaxError
from django.test import RequestFactory, SimpleTestCase, override_settings
from django.urls import NoReverseMatch, resolve
from ..utils import setup
@override_settings(ROOT_URLCONF='template_tests.urls')
class UrlTagTests(SimpleTestCase):
# Successes
@setup({'url01': '{% url "client" client.id %}'})
def test_url01(self):
output = self.engine.render_to_string('url01', {'client': {'id': 1}})
self.assertEqual(output, '/client/1/')
@setup({'url02': '{% url "client_action" id=client.id action="update" %}'})
def test_url02(self):
output = self.engine.render_to_string('url02', {'client': {'id': 1}})
self.assertEqual(output, '/client/1/update/')
@setup({'url02a': '{% url "client_action" client.id "update" %}'})
def test_url02a(self):
output = self.engine.render_to_string('url02a', {'client': {'id': 1}})
self.assertEqual(output, '/client/1/update/')
@setup({'url02b': "{% url 'client_action' id=client.id action='update' %}"})
def test_url02b(self):
output = self.engine.render_to_string('url02b', {'client': {'id': 1}})
self.assertEqual(output, '/client/1/update/')
@setup({'url02c': "{% url 'client_action' client.id 'update' %}"})
def test_url02c(self):
output = self.engine.render_to_string('url02c', {'client': {'id': 1}})
self.assertEqual(output, '/client/1/update/')
@setup({'url03': '{% url "index" %}'})
def test_url03(self):
output = self.engine.render_to_string('url03')
self.assertEqual(output, '/')
@setup({'url04': '{% url "named.client" client.id %}'})
def test_url04(self):
output = self.engine.render_to_string('url04', {'client': {'id': 1}})
self.assertEqual(output, '/named-client/1/')
@setup({'url05': '{% url "метка_оператора" v %}'})
def test_url05(self):
output = self.engine.render_to_string('url05', {'v': 'Ω'})
self.assertEqual(output, '/%D0%AE%D0%BD%D0%B8%D0%BA%D0%BE%D0%B4/%CE%A9/')
@setup({'url06': '{% url "метка_оператора_2" tag=v %}'})
def test_url06(self):
output = self.engine.render_to_string('url06', {'v': 'Ω'})
self.assertEqual(output, '/%D0%AE%D0%BD%D0%B8%D0%BA%D0%BE%D0%B4/%CE%A9/')
@setup({'url08': '{% url "метка_оператора" v %}'})
def test_url08(self):
output = self.engine.render_to_string('url08', {'v': 'Ω'})
self.assertEqual(output, '/%D0%AE%D0%BD%D0%B8%D0%BA%D0%BE%D0%B4/%CE%A9/')
@setup({'url09': '{% url "метка_оператора_2" tag=v %}'})
def test_url09(self):
output = self.engine.render_to_string('url09', {'v': 'Ω'})
self.assertEqual(output, '/%D0%AE%D0%BD%D0%B8%D0%BA%D0%BE%D0%B4/%CE%A9/')
@setup({'url10': '{% url "client_action" id=client.id action="two words" %}'})
def test_url10(self):
output = self.engine.render_to_string('url10', {'client': {'id': 1}})
self.assertEqual(output, '/client/1/two%20words/')
@setup({'url11': '{% url "client_action" id=client.id action="==" %}'})
def test_url11(self):
output = self.engine.render_to_string('url11', {'client': {'id': 1}})
self.assertEqual(output, '/client/1/==/')
@setup({'url12': '{% url "client_action" id=client.id action="!$&\'()*+,;=~:@," %}'})
def test_url12(self):
output = self.engine.render_to_string('url12', {'client': {'id': 1}})
self.assertEqual(output, '/client/1/!$&'()*+,;=~:@,/')
@setup({'url13': '{% url "client_action" id=client.id action=arg|join:"-" %}'})
def test_url13(self):
output = self.engine.render_to_string('url13', {'client': {'id': 1}, 'arg': ['a', 'b']})
self.assertEqual(output, '/client/1/a-b/')
@setup({'url14': '{% url "client_action" client.id arg|join:"-" %}'})
def test_url14(self):
output = self.engine.render_to_string('url14', {'client': {'id': 1}, 'arg': ['a', 'b']})
self.assertEqual(output, '/client/1/a-b/')
@setup({'url15': '{% url "client_action" 12 "test" %}'})
def test_url15(self):
output = self.engine.render_to_string('url15')
self.assertEqual(output, '/client/12/test/')
@setup({'url18': '{% url "client" "1,2" %}'})
def test_url18(self):
output = self.engine.render_to_string('url18')
self.assertEqual(output, '/client/1,2/')
@setup({'url19': '{% url named_url client.id %}'})
def test_url19(self):
output = self.engine.render_to_string(
'url19', {'client': {'id': 1}, 'named_url': 'client'}
)
self.assertEqual(output, '/client/1/')
@setup({'url20': '{% url url_name_in_var client.id %}'})
def test_url20(self):
output = self.engine.render_to_string('url20', {'client': {'id': 1}, 'url_name_in_var': 'named.client'})
self.assertEqual(output, '/named-client/1/')
@setup({'url21': '{% autoescape off %}'
'{% url "client_action" id=client.id action="!$&\'()*+,;=~:@," %}'
'{% endautoescape %}'})
def test_url21(self):
output = self.engine.render_to_string('url21', {'client': {'id': 1}})
self.assertEqual(output, '/client/1/!$&\'()*+,;=~:@,/')
# Failures
@setup({'url-fail01': '{% url %}'})
def test_url_fail01(self):
with self.assertRaises(TemplateSyntaxError):
self.engine.get_template('url-fail01')
@setup({'url-fail02': '{% url "no_such_view" %}'})
def test_url_fail02(self):
with self.assertRaises(NoReverseMatch):
self.engine.render_to_string('url-fail02')
@setup({'url-fail03': '{% url "client" %}'})
def test_url_fail03(self):
with self.assertRaises(NoReverseMatch):
self.engine.render_to_string('url-fail03')
@setup({'url-fail04': '{% url "view" id, %}'})
def test_url_fail04(self):
with self.assertRaises(TemplateSyntaxError):
self.engine.get_template('url-fail04')
@setup({'url-fail05': '{% url "view" id= %}'})
def test_url_fail05(self):
with self.assertRaises(TemplateSyntaxError):
self.engine.get_template('url-fail05')
@setup({'url-fail06': '{% url "view" a.id=id %}'})
def test_url_fail06(self):
with self.assertRaises(TemplateSyntaxError):
self.engine.get_template('url-fail06')
@setup({'url-fail07': '{% url "view" a.id!id %}'})
def test_url_fail07(self):
with self.assertRaises(TemplateSyntaxError):
self.engine.get_template('url-fail07')
@setup({'url-fail08': '{% url "view" id="unterminatedstring %}'})
def test_url_fail08(self):
with self.assertRaises(TemplateSyntaxError):
self.engine.get_template('url-fail08')
@setup({'url-fail09': '{% url "view" id=", %}'})
def test_url_fail09(self):
with self.assertRaises(TemplateSyntaxError):
self.engine.get_template('url-fail09')
@setup({'url-fail11': '{% url named_url %}'})
def test_url_fail11(self):
with self.assertRaises(NoReverseMatch):
self.engine.render_to_string('url-fail11')
@setup({'url-fail12': '{% url named_url %}'})
def test_url_fail12(self):
with self.assertRaises(NoReverseMatch):
self.engine.render_to_string('url-fail12', {'named_url': 'no_such_view'})
@setup({'url-fail13': '{% url named_url %}'})
def test_url_fail13(self):
with self.assertRaises(NoReverseMatch):
self.engine.render_to_string('url-fail13', {'named_url': 'template_tests.views.client'})
@setup({'url-fail14': '{% url named_url id, %}'})
def test_url_fail14(self):
with self.assertRaises(TemplateSyntaxError):
self.engine.render_to_string('url-fail14', {'named_url': 'view'})
@setup({'url-fail15': '{% url named_url id= %}'})
def test_url_fail15(self):
with self.assertRaises(TemplateSyntaxError):
self.engine.render_to_string('url-fail15', {'named_url': 'view'})
@setup({'url-fail16': '{% url named_url a.id=id %}'})
def test_url_fail16(self):
with self.assertRaises(TemplateSyntaxError):
self.engine.render_to_string('url-fail16', {'named_url': 'view'})
@setup({'url-fail17': '{% url named_url a.id!id %}'})
def test_url_fail17(self):
with self.assertRaises(TemplateSyntaxError):
self.engine.render_to_string('url-fail17', {'named_url': 'view'})
@setup({'url-fail18': '{% url named_url id="unterminatedstring %}'})
def test_url_fail18(self):
with self.assertRaises(TemplateSyntaxError):
self.engine.render_to_string('url-fail18', {'named_url': 'view'})
@setup({'url-fail19': '{% url named_url id=", %}'})
def test_url_fail19(self):
with self.assertRaises(TemplateSyntaxError):
self.engine.render_to_string('url-fail19', {'named_url': 'view'})
# {% url ... as var %}
@setup({'url-asvar01': '{% url "index" as url %}'})
def test_url_asvar01(self):
output = self.engine.render_to_string('url-asvar01')
self.assertEqual(output, '')
@setup({'url-asvar02': '{% url "index" as url %}{{ url }}'})
def test_url_asvar02(self):
output = self.engine.render_to_string('url-asvar02')
self.assertEqual(output, '/')
@setup({'url-asvar03': '{% url "no_such_view" as url %}{{ url }}'})
def test_url_asvar03(self):
output = self.engine.render_to_string('url-asvar03')
self.assertEqual(output, '')
@setup({'url-namespace01': '{% url "app:named.client" 42 %}'})
def test_url_namespace01(self):
request = RequestFactory().get('/')
request.resolver_match = resolve('/ns1/')
template = self.engine.get_template('url-namespace01')
context = RequestContext(request)
output = template.render(context)
self.assertEqual(output, '/ns1/named-client/42/')
@setup({'url-namespace02': '{% url "app:named.client" 42 %}'})
def test_url_namespace02(self):
request = RequestFactory().get('/')
request.resolver_match = resolve('/ns2/')
template = self.engine.get_template('url-namespace02')
context = RequestContext(request)
output = template.render(context)
self.assertEqual(output, '/ns2/named-client/42/')
@setup({'url-namespace03': '{% url "app:named.client" 42 %}'})
def test_url_namespace03(self):
request = RequestFactory().get('/')
template = self.engine.get_template('url-namespace03')
context = RequestContext(request)
output = template.render(context)
self.assertEqual(output, '/ns2/named-client/42/')
@setup({'url-namespace-no-current-app': '{% url "app:named.client" 42 %}'})
def test_url_namespace_no_current_app(self):
request = RequestFactory().get('/')
request.resolver_match = resolve('/ns1/')
request.current_app = None
template = self.engine.get_template('url-namespace-no-current-app')
context = RequestContext(request)
output = template.render(context)
self.assertEqual(output, '/ns2/named-client/42/')
@setup({'url-namespace-explicit-current-app': '{% url "app:named.client" 42 %}'})
def test_url_namespace_explicit_current_app(self):
request = RequestFactory().get('/')
request.resolver_match = resolve('/ns1/')
request.current_app = 'app'
template = self.engine.get_template('url-namespace-explicit-current-app')
context = RequestContext(request)
output = template.render(context)
self.assertEqual(output, '/ns2/named-client/42/')
| bsd-3-clause |
rootulp/exercism | python/palindrome-products/palindrome.py | 1 | 1285 | from operator import mul
from functools import reduce
class Palindromes:
@classmethod
def smallest_palindrome(cls, max_factor, min_factor=0):
return min(cls.palindromes(max_factor, min_factor), key=lambda
item: item[0])
@classmethod
def largest_palindrome(cls, max_factor, min_factor=0):
return max(cls.palindromes(max_factor, min_factor), key=lambda
item: item[0])
@classmethod
def palindromes(cls, max_factor, min_factor):
return [(cls.product(candidate), candidate) for candidate in
cls.candidates(max_factor, min_factor) if
cls.is_palindrome(cls.product(candidate))]
@staticmethod
def candidates(max_factor, min_factor):
return [(i, j) for i in range(min_factor, max_factor + 1)
for j in range(i, max_factor + 1)]
@staticmethod
def product(s):
return reduce(mul, s, 1)
@staticmethod
def is_palindrome(num):
return str(num) == ''.join(reversed(str(num)))
def smallest_palindrome(max_factor, min_factor=0):
return Palindromes.smallest_palindrome(max_factor, min_factor)
def largest_palindrome(max_factor, min_factor=0):
return Palindromes.largest_palindrome(max_factor, min_factor)
| mit |
blighj/django | django/utils/datetime_safe.py | 100 | 2837 | # Python's datetime strftime doesn't handle dates before 1900.
# These classes override date and datetime to support the formatting of a date
# through its full "proleptic Gregorian" date range.
#
# Based on code submitted to comp.lang.python by Andrew Dalke
#
# >>> datetime_safe.date(1850, 8, 2).strftime("%Y/%m/%d was a %A")
# '1850/08/02 was a Friday'
import re
import time as ttime
from datetime import (
date as real_date, datetime as real_datetime, time as real_time,
)
class date(real_date):
def strftime(self, fmt):
return strftime(self, fmt)
class datetime(real_datetime):
def strftime(self, fmt):
return strftime(self, fmt)
@classmethod
def combine(cls, date, time):
return cls(date.year, date.month, date.day,
time.hour, time.minute, time.second,
time.microsecond, time.tzinfo)
def date(self):
return date(self.year, self.month, self.day)
class time(real_time):
pass
def new_date(d):
"Generate a safe date from a datetime.date object."
return date(d.year, d.month, d.day)
def new_datetime(d):
"""
Generate a safe datetime from a datetime.date or datetime.datetime object.
"""
kw = [d.year, d.month, d.day]
if isinstance(d, real_datetime):
kw.extend([d.hour, d.minute, d.second, d.microsecond, d.tzinfo])
return datetime(*kw)
# This library does not support strftime's "%s" or "%y" format strings.
# Allowed if there's an even number of "%"s because they are escaped.
_illegal_formatting = re.compile(r"((^|[^%])(%%)*%[sy])")
def _findall(text, substr):
# Also finds overlaps
sites = []
i = 0
while 1:
j = text.find(substr, i)
if j == -1:
break
sites.append(j)
i = j + 1
return sites
def strftime(dt, fmt):
if dt.year >= 1900:
return super(type(dt), dt).strftime(fmt)
illegal_formatting = _illegal_formatting.search(fmt)
if illegal_formatting:
raise TypeError("strftime of dates before 1900 does not handle" + illegal_formatting.group(0))
year = dt.year
# For every non-leap year century, advance by
# 6 years to get into the 28-year repeat cycle
delta = 2000 - year
off = 6 * (delta // 100 + delta // 400)
year = year + off
# Move to around the year 2000
year = year + ((2000 - year) // 28) * 28
timetuple = dt.timetuple()
s1 = ttime.strftime(fmt, (year,) + timetuple[1:])
sites1 = _findall(s1, str(year))
s2 = ttime.strftime(fmt, (year + 28,) + timetuple[1:])
sites2 = _findall(s2, str(year + 28))
sites = []
for site in sites1:
if site in sites2:
sites.append(site)
s = s1
syear = "%04d" % (dt.year,)
for site in sites:
s = s[:site] + syear + s[site + 4:]
return s
| bsd-3-clause |
mSenyor/sl4a | python/src/Demo/parser/unparse.py | 35 | 13878 | "Usage: unparse.py <path to source file>"
import sys
import _ast
import cStringIO
import os
def interleave(inter, f, seq):
"""Call f on each item in seq, calling inter() in between.
"""
seq = iter(seq)
try:
f(seq.next())
except StopIteration:
pass
else:
for x in seq:
inter()
f(x)
class Unparser:
"""Methods in this class recursively traverse an AST and
output source code for the abstract syntax; original formatting
is disregarged. """
def __init__(self, tree, file = sys.stdout):
"""Unparser(tree, file=sys.stdout) -> None.
Print the source for tree to file."""
self.f = file
self._indent = 0
self.dispatch(tree)
print >>self.f,""
self.f.flush()
def fill(self, text = ""):
"Indent a piece of text, according to the current indentation level"
self.f.write("\n"+" "*self._indent + text)
def write(self, text):
"Append a piece of text to the current line."
self.f.write(text)
def enter(self):
"Print ':', and increase the indentation."
self.write(":")
self._indent += 1
def leave(self):
"Decrease the indentation level."
self._indent -= 1
def dispatch(self, tree):
"Dispatcher function, dispatching tree type T to method _T."
if isinstance(tree, list):
for t in tree:
self.dispatch(t)
return
meth = getattr(self, "_"+tree.__class__.__name__)
meth(tree)
############### Unparsing methods ######################
# There should be one method per concrete grammar type #
# Constructors should be grouped by sum type. Ideally, #
# this would follow the order in the grammar, but #
# currently doesn't. #
########################################################
def _Module(self, tree):
for stmt in tree.body:
self.dispatch(stmt)
# stmt
def _Expr(self, tree):
self.fill()
self.dispatch(tree.value)
def _Import(self, t):
self.fill("import ")
interleave(lambda: self.write(", "), self.dispatch, t.names)
def _ImportFrom(self, t):
self.fill("from ")
self.write(t.module)
self.write(" import ")
interleave(lambda: self.write(", "), self.dispatch, t.names)
# XXX(jpe) what is level for?
def _Assign(self, t):
self.fill()
for target in t.targets:
self.dispatch(target)
self.write(" = ")
self.dispatch(t.value)
def _AugAssign(self, t):
self.fill()
self.dispatch(t.target)
self.write(" "+self.binop[t.op.__class__.__name__]+"= ")
self.dispatch(t.value)
def _Return(self, t):
self.fill("return")
if t.value:
self.write(" ")
self.dispatch(t.value)
def _Pass(self, t):
self.fill("pass")
def _Break(self, t):
self.fill("break")
def _Continue(self, t):
self.fill("continue")
def _Delete(self, t):
self.fill("del ")
self.dispatch(t.targets)
def _Assert(self, t):
self.fill("assert ")
self.dispatch(t.test)
if t.msg:
self.write(", ")
self.dispatch(t.msg)
def _Exec(self, t):
self.fill("exec ")
self.dispatch(t.body)
if t.globals:
self.write(" in ")
self.dispatch(t.globals)
if t.locals:
self.write(", ")
self.dispatch(t.locals)
def _Print(self, t):
self.fill("print ")
do_comma = False
if t.dest:
self.write(">>")
self.dispatch(t.dest)
do_comma = True
for e in t.values:
if do_comma:self.write(", ")
else:do_comma=True
self.dispatch(e)
if not t.nl:
self.write(",")
def _Global(self, t):
self.fill("global ")
interleave(lambda: self.write(", "), self.write, t.names)
def _Yield(self, t):
self.write("(")
self.write("yield")
if t.value:
self.write(" ")
self.dispatch(t.value)
self.write(")")
def _Raise(self, t):
self.fill('raise ')
if t.type:
self.dispatch(t.type)
if t.inst:
self.write(", ")
self.dispatch(t.inst)
if t.tback:
self.write(", ")
self.dispatch(t.tback)
def _TryExcept(self, t):
self.fill("try")
self.enter()
self.dispatch(t.body)
self.leave()
for ex in t.handlers:
self.dispatch(ex)
if t.orelse:
self.fill("else")
self.enter()
self.dispatch(t.orelse)
self.leave()
def _TryFinally(self, t):
self.fill("try")
self.enter()
self.dispatch(t.body)
self.leave()
self.fill("finally")
self.enter()
self.dispatch(t.finalbody)
self.leave()
def _ExceptHandler(self, t):
self.fill("except")
if t.type:
self.write(" ")
self.dispatch(t.type)
if t.name:
self.write(", ")
self.dispatch(t.name)
self.enter()
self.dispatch(t.body)
self.leave()
def _ClassDef(self, t):
self.write("\n")
self.fill("class "+t.name)
if t.bases:
self.write("(")
for a in t.bases:
self.dispatch(a)
self.write(", ")
self.write(")")
self.enter()
self.dispatch(t.body)
self.leave()
def _FunctionDef(self, t):
self.write("\n")
for deco in t.decorator_list:
self.fill("@")
self.dispatch(deco)
self.fill("def "+t.name + "(")
self.dispatch(t.args)
self.write(")")
self.enter()
self.dispatch(t.body)
self.leave()
def _For(self, t):
self.fill("for ")
self.dispatch(t.target)
self.write(" in ")
self.dispatch(t.iter)
self.enter()
self.dispatch(t.body)
self.leave()
if t.orelse:
self.fill("else")
self.enter()
self.dispatch(t.orelse)
self.leave
def _If(self, t):
self.fill("if ")
self.dispatch(t.test)
self.enter()
# XXX elif?
self.dispatch(t.body)
self.leave()
if t.orelse:
self.fill("else")
self.enter()
self.dispatch(t.orelse)
self.leave()
def _While(self, t):
self.fill("while ")
self.dispatch(t.test)
self.enter()
self.dispatch(t.body)
self.leave()
if t.orelse:
self.fill("else")
self.enter()
self.dispatch(t.orelse)
self.leave
def _With(self, t):
self.fill("with ")
self.dispatch(t.context_expr)
if t.optional_vars:
self.write(" as ")
self.dispatch(t.optional_vars)
self.enter()
self.dispatch(t.body)
self.leave()
# expr
def _Str(self, tree):
self.write(repr(tree.s))
def _Name(self, t):
self.write(t.id)
def _Repr(self, t):
self.write("`")
self.dispatch(t.value)
self.write("`")
def _Num(self, t):
self.write(repr(t.n))
def _List(self, t):
self.write("[")
interleave(lambda: self.write(", "), self.dispatch, t.elts)
self.write("]")
def _ListComp(self, t):
self.write("[")
self.dispatch(t.elt)
for gen in t.generators:
self.dispatch(gen)
self.write("]")
def _GeneratorExp(self, t):
self.write("(")
self.dispatch(t.elt)
for gen in t.generators:
self.dispatch(gen)
self.write(")")
def _comprehension(self, t):
self.write(" for ")
self.dispatch(t.target)
self.write(" in ")
self.dispatch(t.iter)
for if_clause in t.ifs:
self.write(" if ")
self.dispatch(if_clause)
def _IfExp(self, t):
self.write("(")
self.dispatch(t.body)
self.write(" if ")
self.dispatch(t.test)
self.write(" else ")
self.dispatch(t.orelse)
self.write(")")
def _Dict(self, t):
self.write("{")
def writem((k, v)):
self.dispatch(k)
self.write(": ")
self.dispatch(v)
interleave(lambda: self.write(", "), writem, zip(t.keys, t.values))
self.write("}")
def _Tuple(self, t):
self.write("(")
if len(t.elts) == 1:
(elt,) = t.elts
self.dispatch(elt)
self.write(",")
else:
interleave(lambda: self.write(", "), self.dispatch, t.elts)
self.write(")")
unop = {"Invert":"~", "Not": "not", "UAdd":"+", "USub":"-"}
def _UnaryOp(self, t):
self.write(self.unop[t.op.__class__.__name__])
self.write("(")
self.dispatch(t.operand)
self.write(")")
binop = { "Add":"+", "Sub":"-", "Mult":"*", "Div":"/", "Mod":"%",
"LShift":">>", "RShift":"<<", "BitOr":"|", "BitXor":"^", "BitAnd":"&",
"FloorDiv":"//", "Pow": "**"}
def _BinOp(self, t):
self.write("(")
self.dispatch(t.left)
self.write(" " + self.binop[t.op.__class__.__name__] + " ")
self.dispatch(t.right)
self.write(")")
cmpops = {"Eq":"==", "NotEq":"!=", "Lt":"<", "LtE":"<=", "Gt":">", "GtE":">=",
"Is":"is", "IsNot":"is not", "In":"in", "NotIn":"not in"}
def _Compare(self, t):
self.write("(")
self.dispatch(t.left)
for o, e in zip(t.ops, t.comparators):
self.write(" " + self.cmpops[o.__class__.__name__] + " ")
self.dispatch(e)
self.write(")")
boolops = {_ast.And: 'and', _ast.Or: 'or'}
def _BoolOp(self, t):
self.write("(")
s = " %s " % self.boolops[t.op.__class__]
interleave(lambda: self.write(s), self.dispatch, t.values)
self.write(")")
def _Attribute(self,t):
self.dispatch(t.value)
self.write(".")
self.write(t.attr)
def _Call(self, t):
self.dispatch(t.func)
self.write("(")
comma = False
for e in t.args:
if comma: self.write(", ")
else: comma = True
self.dispatch(e)
for e in t.keywords:
if comma: self.write(", ")
else: comma = True
self.dispatch(e)
if t.starargs:
if comma: self.write(", ")
else: comma = True
self.write("*")
self.dispatch(t.starargs)
if t.kwargs:
if comma: self.write(", ")
else: comma = True
self.write("**")
self.dispatch(t.kwargs)
self.write(")")
def _Subscript(self, t):
self.dispatch(t.value)
self.write("[")
self.dispatch(t.slice)
self.write("]")
# slice
def _Ellipsis(self, t):
self.write("...")
def _Index(self, t):
self.dispatch(t.value)
def _Slice(self, t):
if t.lower:
self.dispatch(t.lower)
self.write(":")
if t.upper:
self.dispatch(t.upper)
if t.step:
self.write(":")
self.dispatch(t.step)
def _ExtSlice(self, t):
interleave(lambda: self.write(', '), self.dispatch, t.dims)
# others
def _arguments(self, t):
first = True
nonDef = len(t.args)-len(t.defaults)
for a in t.args[0:nonDef]:
if first:first = False
else: self.write(", ")
self.dispatch(a)
for a,d in zip(t.args[nonDef:], t.defaults):
if first:first = False
else: self.write(", ")
self.dispatch(a),
self.write("=")
self.dispatch(d)
if t.vararg:
if first:first = False
else: self.write(", ")
self.write("*"+t.vararg)
if t.kwarg:
if first:first = False
else: self.write(", ")
self.write("**"+t.kwarg)
def _keyword(self, t):
self.write(t.arg)
self.write("=")
self.dispatch(t.value)
def _Lambda(self, t):
self.write("lambda ")
self.dispatch(t.args)
self.write(": ")
self.dispatch(t.body)
def _alias(self, t):
self.write(t.name)
if t.asname:
self.write(" as "+t.asname)
def roundtrip(filename, output=sys.stdout):
source = open(filename).read()
tree = compile(source, filename, "exec", _ast.PyCF_ONLY_AST)
Unparser(tree, output)
def testdir(a):
try:
names = [n for n in os.listdir(a) if n.endswith('.py')]
except OSError:
print >> sys.stderr, "Directory not readable: %s" % a
else:
for n in names:
fullname = os.path.join(a, n)
if os.path.isfile(fullname):
output = cStringIO.StringIO()
print 'Testing %s' % fullname
try:
roundtrip(fullname, output)
except Exception, e:
print ' Failed to compile, exception is %s' % repr(e)
elif os.path.isdir(fullname):
testdir(fullname)
def main(args):
if args[0] == '--testdir':
for a in args[1:]:
testdir(a)
else:
for a in args:
roundtrip(a)
if __name__=='__main__':
main(sys.argv[1:])
| apache-2.0 |
gomyhr/elveg2osm | elveg_all.py | 1 | 1670 | #! /usr/bin/env python2
'''elveg_all Elveg_archive.zip [XXXX [YYYY [...]]]'''
import sys
import os
filename = sys.argv[1]
# Unzip archive if necessary
if filename[-4:] == '.zip':
# Assume that it is a zip file
dirname = filename[:-4]
if not os.path.isdir(dirname):
os.mkdir(dirname)
os.system('unzip -d {0} {1}'.format(dirname, filename))
else:
dirname = filename
# Decide which kommunes to work on
if len(sys.argv) > 2:
kommune_numbers = sys.argv[2:]
else:
allfiles = os.listdir(dirname)
kommune_numbers = [fn[0:4] for fn in allfiles if fn [4:] == 'Elveg.zip']
kommune_numbers.sort()
# Iterate over municipalities
for kn in kommune_numbers:
sys.stdout.write("Processing municipality: {0}\n".format(kn))
sys.stdout.flush()
# Unzip municipality files (if directory does not exist)
kommune_dir = os.path.join(dirname, kn)
if not os.path.isdir(kommune_dir):
#os.mkdir(kommune_dir)
zipfile = os.path.join(dirname, kn + "Elveg.zip")
os.system('unzip -o -d {0} {1} >/dev/null'.format(kommune_dir, zipfile))
# Convert SOSI file to OSM using sosi2osm
sosifile = os.path.join(kommune_dir, kn + 'Elveg.SOS')
osmfile = os.path.join(kommune_dir, kn + 'Elveg_default.osm')
fartfile = os.path.join(kommune_dir, kn + 'Fart.txt')
hoydefile = os.path.join(kommune_dir, kn + 'Hoyde.txt')
osmoutput = os.path.join(kommune_dir, kn + 'Elveg.osm')
logfile = os.path.join(kommune_dir, kn + 'elveg2osm.log')
os.system('sosi2osm {0} default.lua >{1}'.format(sosifile, osmfile))
os.system('./elveg2osm.py {0} {1} >{2} 2>&1'.format(kommune_dir, kn, logfile))
| mit |
thekingofkings/chicago-crime | python/multi_view_learning/nn_leaveOneOut.py | 2 | 2401 | #!/usr/bin/env python2
# -*- coding: utf-8 -*-
"""
Tensor flow NN model for leave-one-out evaluation.
Created on Fri Apr 7 14:25:01 2017
@author: hxw186
"""
import tensorflow as tf
import numpy as np
import sys
sys.path.append("../")
from feature_evaluation import build_features
def leaveOneOut_error(Y, D, P, Tf, Yt, Gd, Yg, features=['all'], gwr_gamma=None, taxi_norm="bydestination"):
"""
Use GLM model from python statsmodels library to fit data.
Evaluate with leave-one-out setting, return the average of n errors.
Input:
features - a list features. ['all'] == ['demo', 'poi', 'geo', 'taxi']
gwr_gamma - the GWR weight matrx. TODO
Output:
error - the average error of k leave-one-out evaluation
"""
errors = []
for k in range(len(Y)):
with tf.Graph().as_default():
X_train, X_test, Y_train, Y_test = build_features(Y, D, P, Tf, Yt, Gd, Yg, k, features, taxi_norm)
# build the TF nn model
F1 = X_train.shape[1]
x1 = tf.placeholder(tf.float32, [None, F1], name="numeric_features_set1")
y = tf.placeholder(tf.float32, [None, 1], name="label")
W = tf.Variable(tf.random_normal([F1]), name="weight")
b = tf.Variable(tf.random_normal([1]), name="bias")
y_est = tf.add(tf.reduce_sum(tf.multiply(x1, W)), b)
# h1 = tf.layers.dense(inputs=x1, units=F1/2, activation=tf.nn.relu, use_bias=True,
# name="reduce_half", reuse=None)
# y_est = tf.layers.dense(inputs=x1, units=1, activation=None, use_bias=True,
# name="reg_pred", reuse=None)
objective = tf.reduce_mean(tf.squared_difference(y, y_est))
train_step = tf.train.GradientDescentOptimizer(0.1).minimize(objective)
tf_mae = tf.reduce_mean(tf.abs(y - y_est))
sess = tf.InteractiveSession()
sess.run(tf.global_variables_initializer())
train_step.run(feed_dict={x1: X_train, y: Y_train[:,None]})
yarray = np.array(Y_test).reshape((1,1))
mae = tf_mae.eval(feed_dict={x1: X_test[None,:], y: yarray})
errors.append(mae)
return np.mean(errors), np.mean(errors) / np.mean(Y)
| mit |
SofiaReis/django-cms | cms/models/placeholdermodel.py | 29 | 11215 | # -*- coding: utf-8 -*-
from cms.utils.compat import DJANGO_1_7
from django.contrib import admin
from django.contrib.auth import get_permission_codename
from django.db import models
from django.template.defaultfilters import title
from django.utils.encoding import python_2_unicode_compatible
from django.utils.translation import ugettext_lazy as _
from cms.exceptions import LanguageError
from cms.utils.helpers import reversion_register
from cms.utils.i18n import get_language_object
from cms.utils.placeholder import PlaceholderNoAction, get_placeholder_conf
from cms.utils.urlutils import admin_reverse
@python_2_unicode_compatible
class Placeholder(models.Model):
"""
Attributes:
is_static Set to "True" for static placeholders by the template tag
is_editable If False the content of the placeholder is not editable in the frontend
"""
slot = models.CharField(_("slot"), max_length=255, db_index=True, editable=False)
default_width = models.PositiveSmallIntegerField(_("width"), null=True, editable=False)
cache_placeholder = True
is_static = False
is_editable = True
class Meta:
app_label = 'cms'
permissions = (
(u"use_structure", u"Can use Structure mode"),
)
def __str__(self):
return self.slot
def clear(self, language=None):
if language:
qs = self.cmsplugin_set.filter(language=language)
else:
qs = self.cmsplugin_set.all()
qs = qs.order_by('-depth').select_related()
for plugin in qs:
inst, cls = plugin.get_plugin_instance()
if inst and getattr(inst, 'cmsplugin_ptr', False):
inst.cmsplugin_ptr._no_reorder = True
inst._no_reorder = True
inst.delete(no_mp=True)
else:
plugin._no_reorder = True
plugin.delete(no_mp=True)
def get_label(self):
name = get_placeholder_conf("name", self.slot, default=title(self.slot))
name = _(name)
return name
def get_add_url(self):
return self._get_url('add_plugin')
def get_edit_url(self, plugin_pk):
return self._get_url('edit_plugin', plugin_pk)
def get_move_url(self):
return self._get_url('move_plugin')
def get_delete_url(self, plugin_pk):
return self._get_url('delete_plugin', plugin_pk)
def get_changelist_url(self):
return self._get_url('changelist')
def get_clear_url(self):
return self._get_url('clear_placeholder', self.pk)
def get_copy_url(self):
return self._get_url('copy_plugins')
def get_extra_menu_items(self):
from cms.plugin_pool import plugin_pool
return plugin_pool.get_extra_placeholder_menu_items(self)
def _get_url(self, key, pk=None):
model = self._get_attached_model()
args = []
if pk:
args.append(pk)
if not model:
return admin_reverse('cms_page_%s' % key, args=args)
else:
app_label = model._meta.app_label
model_name = model.__name__.lower()
return admin_reverse('%s_%s_%s' % (app_label, model_name, key), args=args)
def _get_permission(self, request, key):
"""
Generic method to check the permissions for a request for a given key,
the key can be: 'add', 'change' or 'delete'. For each attached object
permission has to be granted either on attached model or on attached object.
* 'add' and 'change' permissions on placeholder need either on add or change
permission on attached object to be granted.
* 'delete' need either on add, change or delete
"""
if getattr(request, 'user', None) and request.user.is_superuser:
return True
perm_keys = {
'add': ('add', 'change',),
'change': ('add', 'change',),
'delete': ('add', 'change', 'delete'),
}
if key not in perm_keys:
raise Exception("%s is not a valid perm key. "
"'Only 'add', 'change' and 'delete' are allowed" % key)
objects = [self.page] if self.page else self._get_attached_objects()
obj_perm = None
for obj in objects:
obj_perm = False
for key in perm_keys[key]:
if self._get_object_permission(obj, request, key):
obj_perm = True
break
if not obj_perm:
return False
return obj_perm
def _get_object_permission(self, obj, request, key):
if not getattr(request, 'user', None):
return False
opts = obj._meta
perm_code = '%s.%s' % (opts.app_label, get_permission_codename(key, opts))
return request.user.has_perm(perm_code) or request.user.has_perm(perm_code, obj)
def has_change_permission(self, request):
return self._get_permission(request, 'change')
def has_add_permission(self, request):
return self._get_permission(request, 'add')
def has_delete_permission(self, request):
return self._get_permission(request, 'delete')
def render(self, context, width, lang=None, editable=True, use_cache=True):
'''
Set editable = False to disable front-end rendering for this render.
'''
from cms.plugin_rendering import render_placeholder
if not 'request' in context:
return '<!-- missing request -->'
width = width or self.default_width
if width:
context['width'] = width
return render_placeholder(self, context, lang=lang, editable=editable,
use_cache=use_cache)
def _get_attached_fields(self):
"""
Returns an ITERATOR of all non-cmsplugin reverse foreign key related fields.
"""
from cms.models import CMSPlugin
if not hasattr(self, '_attached_fields_cache'):
self._attached_fields_cache = []
for rel in self._meta.get_all_related_objects():
if issubclass(rel.model, CMSPlugin):
continue
from cms.admin.placeholderadmin import PlaceholderAdminMixin
if DJANGO_1_7:
parent = rel.model
else:
parent = rel.related_model
if parent in admin.site._registry and isinstance(admin.site._registry[parent], PlaceholderAdminMixin):
field = getattr(self, rel.get_accessor_name())
try:
if field.count():
self._attached_fields_cache.append(rel.field)
except:
pass
return self._attached_fields_cache
def _get_attached_field(self):
from cms.models import CMSPlugin, StaticPlaceholder, Page
if not hasattr(self, '_attached_field_cache'):
self._attached_field_cache = None
relations = self._meta.get_all_related_objects()
for rel in relations:
if DJANGO_1_7:
parent = rel.model
else:
parent = rel.related_model
if parent == Page or parent == StaticPlaceholder:
relations.insert(0, relations.pop(relations.index(rel)))
for rel in relations:
if issubclass(rel.model, CMSPlugin):
continue
from cms.admin.placeholderadmin import PlaceholderAdminMixin
if DJANGO_1_7:
parent = rel.model
else:
parent = rel.related_model
if parent in admin.site._registry and isinstance(admin.site._registry[parent], PlaceholderAdminMixin):
field = getattr(self, rel.get_accessor_name())
try:
if field.count():
self._attached_field_cache = rel.field
break
except:
pass
return self._attached_field_cache
def _get_attached_field_name(self):
field = self._get_attached_field()
if field:
return field.name
return None
def _get_attached_model(self):
if hasattr(self, '_attached_model_cache'):
return self._attached_model_cache
if self.page or self.page_set.all().count():
from cms.models import Page
self._attached_model_cache = Page
return Page
field = self._get_attached_field()
if field:
self._attached_model_cache = field.model
return field.model
self._attached_model_cache = None
return None
def _get_attached_models(self):
"""
Returns a list of models of attached to this placeholder.
"""
if hasattr(self, '_attached_models_cache'):
return self._attached_models_cache
self._attached_models_cache = [field.model for field in self._get_attached_fields()]
return self._attached_models_cache
def _get_attached_objects(self):
"""
Returns a list of objects attached to this placeholder.
"""
return [obj for field in self._get_attached_fields()
for obj in getattr(self, field.related.get_accessor_name()).all()]
def page_getter(self):
if not hasattr(self, '_page'):
from cms.models.pagemodel import Page
try:
self._page = Page.objects.get(placeholders=self)
except (Page.DoesNotExist, Page.MultipleObjectsReturned,):
self._page = None
return self._page
def page_setter(self, value):
self._page = value
page = property(page_getter, page_setter)
def get_plugins_list(self, language=None):
return list(self.get_plugins(language))
def get_plugins(self, language=None):
if language:
return self.cmsplugin_set.filter(language=language).order_by('path')
else:
return self.cmsplugin_set.all().order_by('path')
def get_filled_languages(self):
"""
Returns language objects for every language for which the placeholder
has plugins.
This is not cached as it's meant to eb used in the frontend editor.
"""
languages = []
for lang_code in set(self.get_plugins().values_list('language', flat=True)):
try:
languages.append(get_language_object(lang_code))
except LanguageError:
pass
return languages
def get_cached_plugins(self):
return getattr(self, '_plugins_cache', [])
@property
def actions(self):
if not hasattr(self, '_actions_cache'):
field = self._get_attached_field()
self._actions_cache = getattr(field, 'actions', PlaceholderNoAction())
return self._actions_cache
reversion_register(Placeholder) # follow=["cmsplugin_set"] not following plugins since they are a spechial case
| bsd-3-clause |
ian-garrett/meetMe | env/lib/python3.4/site-packages/flask/testing.py | 783 | 5003 | # -*- coding: utf-8 -*-
"""
flask.testing
~~~~~~~~~~~~~
Implements test support helpers. This module is lazily imported
and usually not used in production environments.
:copyright: (c) 2011 by Armin Ronacher.
:license: BSD, see LICENSE for more details.
"""
from contextlib import contextmanager
from werkzeug.test import Client, EnvironBuilder
from flask import _request_ctx_stack
try:
from werkzeug.urls import url_parse
except ImportError:
from urlparse import urlsplit as url_parse
def make_test_environ_builder(app, path='/', base_url=None, *args, **kwargs):
"""Creates a new test builder with some application defaults thrown in."""
http_host = app.config.get('SERVER_NAME')
app_root = app.config.get('APPLICATION_ROOT')
if base_url is None:
url = url_parse(path)
base_url = 'http://%s/' % (url.netloc or http_host or 'localhost')
if app_root:
base_url += app_root.lstrip('/')
if url.netloc:
path = url.path
return EnvironBuilder(path, base_url, *args, **kwargs)
class FlaskClient(Client):
"""Works like a regular Werkzeug test client but has some knowledge about
how Flask works to defer the cleanup of the request context stack to the
end of a with body when used in a with statement. For general information
about how to use this class refer to :class:`werkzeug.test.Client`.
Basic usage is outlined in the :ref:`testing` chapter.
"""
preserve_context = False
@contextmanager
def session_transaction(self, *args, **kwargs):
"""When used in combination with a with statement this opens a
session transaction. This can be used to modify the session that
the test client uses. Once the with block is left the session is
stored back.
with client.session_transaction() as session:
session['value'] = 42
Internally this is implemented by going through a temporary test
request context and since session handling could depend on
request variables this function accepts the same arguments as
:meth:`~flask.Flask.test_request_context` which are directly
passed through.
"""
if self.cookie_jar is None:
raise RuntimeError('Session transactions only make sense '
'with cookies enabled.')
app = self.application
environ_overrides = kwargs.setdefault('environ_overrides', {})
self.cookie_jar.inject_wsgi(environ_overrides)
outer_reqctx = _request_ctx_stack.top
with app.test_request_context(*args, **kwargs) as c:
sess = app.open_session(c.request)
if sess is None:
raise RuntimeError('Session backend did not open a session. '
'Check the configuration')
# Since we have to open a new request context for the session
# handling we want to make sure that we hide out own context
# from the caller. By pushing the original request context
# (or None) on top of this and popping it we get exactly that
# behavior. It's important to not use the push and pop
# methods of the actual request context object since that would
# mean that cleanup handlers are called
_request_ctx_stack.push(outer_reqctx)
try:
yield sess
finally:
_request_ctx_stack.pop()
resp = app.response_class()
if not app.session_interface.is_null_session(sess):
app.save_session(sess, resp)
headers = resp.get_wsgi_headers(c.request.environ)
self.cookie_jar.extract_wsgi(c.request.environ, headers)
def open(self, *args, **kwargs):
kwargs.setdefault('environ_overrides', {}) \
['flask._preserve_context'] = self.preserve_context
as_tuple = kwargs.pop('as_tuple', False)
buffered = kwargs.pop('buffered', False)
follow_redirects = kwargs.pop('follow_redirects', False)
builder = make_test_environ_builder(self.application, *args, **kwargs)
return Client.open(self, builder,
as_tuple=as_tuple,
buffered=buffered,
follow_redirects=follow_redirects)
def __enter__(self):
if self.preserve_context:
raise RuntimeError('Cannot nest client invocations')
self.preserve_context = True
return self
def __exit__(self, exc_type, exc_value, tb):
self.preserve_context = False
# on exit we want to clean up earlier. Normally the request context
# stays preserved until the next request in the same thread comes
# in. See RequestGlobals.push() for the general behavior.
top = _request_ctx_stack.top
if top is not None and top.preserved:
top.pop()
| artistic-2.0 |
hujiajie/chromium-crosswalk | build/android/devil/android/device_utils_device_test.py | 10 | 7962 | #!/usr/bin/env python
# Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Unit tests for the contents of device_utils.py (mostly DeviceUtils).
The test will invoke real devices
"""
import os
import tempfile
import unittest
from devil.android import device_utils
from devil.android.sdk import adb_wrapper
from devil.utils import cmd_helper
_OLD_CONTENTS = "foo"
_NEW_CONTENTS = "bar"
_DEVICE_DIR = "/data/local/tmp/device_utils_test"
_SUB_DIR = "sub"
_SUB_DIR1 = "sub1"
_SUB_DIR2 = "sub2"
class DeviceUtilsPushDeleteFilesTest(unittest.TestCase):
def setUp(self):
devices = adb_wrapper.AdbWrapper.Devices()
assert devices, 'A device must be attached'
self.adb = devices[0]
self.adb.WaitForDevice()
self.device = device_utils.DeviceUtils(
self.adb, default_timeout=10, default_retries=0)
@staticmethod
def _MakeTempFile(contents):
"""Make a temporary file with the given contents.
Args:
contents: string to write to the temporary file.
Returns:
the tuple contains the absolute path to the file and the file name
"""
fi, path = tempfile.mkstemp(text=True)
with os.fdopen(fi, 'w') as f:
f.write(contents)
file_name = os.path.basename(path)
return (path, file_name)
@staticmethod
def _MakeTempFileGivenDir(directory, contents):
"""Make a temporary file under the given directory
with the given contents
Args:
directory: the temp directory to create the file
contents: string to write to the temp file
Returns:
the list contains the absolute path to the file and the file name
"""
fi, path = tempfile.mkstemp(dir=directory, text=True)
with os.fdopen(fi, 'w') as f:
f.write(contents)
file_name = os.path.basename(path)
return (path, file_name)
@staticmethod
def _ChangeTempFile(path, contents):
with os.open(path, 'w') as f:
f.write(contents)
@staticmethod
def _DeleteTempFile(path):
os.remove(path)
def testPushChangedFiles_noFileChange(self):
(host_file_path, file_name) = self._MakeTempFile(_OLD_CONTENTS)
device_file_path = "%s/%s" % (_DEVICE_DIR, file_name)
self.adb.Push(host_file_path, device_file_path)
self.device.PushChangedFiles([(host_file_path, device_file_path)])
result = self.device.RunShellCommand(['cat', device_file_path],
single_line=True)
self.assertEqual(_OLD_CONTENTS, result)
cmd_helper.RunCmd(['rm', host_file_path])
self.device.RunShellCommand(['rm', '-rf', _DEVICE_DIR])
def testPushChangedFiles_singleFileChange(self):
(host_file_path, file_name) = self._MakeTempFile(_OLD_CONTENTS)
device_file_path = "%s/%s" % (_DEVICE_DIR, file_name)
self.adb.Push(host_file_path, device_file_path)
with open(host_file_path, 'w') as f:
f.write(_NEW_CONTENTS)
self.device.PushChangedFiles([(host_file_path, device_file_path)])
result = self.device.RunShellCommand(['cat', device_file_path],
single_line=True)
self.assertEqual(_NEW_CONTENTS, result)
cmd_helper.RunCmd(['rm', host_file_path])
self.device.RunShellCommand(['rm', '-rf', _DEVICE_DIR])
def testDeleteFiles(self):
host_tmp_dir = tempfile.mkdtemp()
(host_file_path, file_name) = self._MakeTempFileGivenDir(
host_tmp_dir, _OLD_CONTENTS)
device_file_path = "%s/%s" % (_DEVICE_DIR, file_name)
self.adb.Push(host_file_path, device_file_path)
cmd_helper.RunCmd(['rm', host_file_path])
self.device.PushChangedFiles([(host_tmp_dir, _DEVICE_DIR)],
delete_device_stale=True)
result = self.device.RunShellCommand(['ls', _DEVICE_DIR], single_line=True)
self.assertEqual('', result)
cmd_helper.RunCmd(['rm', '-rf', host_tmp_dir])
self.device.RunShellCommand(['rm', '-rf', _DEVICE_DIR])
def testPushAndDeleteFiles_noSubDir(self):
host_tmp_dir = tempfile.mkdtemp()
(host_file_path1, file_name1) = self._MakeTempFileGivenDir(
host_tmp_dir, _OLD_CONTENTS)
(host_file_path2, file_name2) = self._MakeTempFileGivenDir(
host_tmp_dir, _OLD_CONTENTS)
device_file_path1 = "%s/%s" % (_DEVICE_DIR, file_name1)
device_file_path2 = "%s/%s" % (_DEVICE_DIR, file_name2)
self.adb.Push(host_file_path1, device_file_path1)
self.adb.Push(host_file_path2, device_file_path2)
with open(host_file_path1, 'w') as f:
f.write(_NEW_CONTENTS)
cmd_helper.RunCmd(['rm', host_file_path2])
self.device.PushChangedFiles([(host_tmp_dir, _DEVICE_DIR)],
delete_device_stale=True)
result = self.device.RunShellCommand(['cat', device_file_path1],
single_line=True)
self.assertEqual(_NEW_CONTENTS, result)
result = self.device.RunShellCommand(['ls', _DEVICE_DIR], single_line=True)
self.assertEqual(file_name1, result)
self.device.RunShellCommand(['rm', '-rf', _DEVICE_DIR])
cmd_helper.RunCmd(['rm', '-rf', host_tmp_dir])
def testPushAndDeleteFiles_SubDir(self):
host_tmp_dir = tempfile.mkdtemp()
host_sub_dir1 = "%s/%s" % (host_tmp_dir, _SUB_DIR1)
host_sub_dir2 = "%s/%s/%s" % (host_tmp_dir, _SUB_DIR, _SUB_DIR2)
cmd_helper.RunCmd(['mkdir', '-p', host_sub_dir1])
cmd_helper.RunCmd(['mkdir', '-p', host_sub_dir2])
(host_file_path1, file_name1) = self._MakeTempFileGivenDir(
host_tmp_dir, _OLD_CONTENTS)
(host_file_path2, file_name2) = self._MakeTempFileGivenDir(
host_tmp_dir, _OLD_CONTENTS)
(host_file_path3, file_name3) = self._MakeTempFileGivenDir(
host_sub_dir1, _OLD_CONTENTS)
(host_file_path4, file_name4) = self._MakeTempFileGivenDir(
host_sub_dir2, _OLD_CONTENTS)
device_file_path1 = "%s/%s" % (_DEVICE_DIR, file_name1)
device_file_path2 = "%s/%s" % (_DEVICE_DIR, file_name2)
device_file_path3 = "%s/%s/%s" % (_DEVICE_DIR, _SUB_DIR1, file_name3)
device_file_path4 = "%s/%s/%s/%s" % (_DEVICE_DIR, _SUB_DIR,
_SUB_DIR2, file_name4)
self.adb.Push(host_file_path1, device_file_path1)
self.adb.Push(host_file_path2, device_file_path2)
self.adb.Push(host_file_path3, device_file_path3)
self.adb.Push(host_file_path4, device_file_path4)
with open(host_file_path1, 'w') as f:
f.write(_NEW_CONTENTS)
cmd_helper.RunCmd(['rm', host_file_path2])
cmd_helper.RunCmd(['rm', host_file_path4])
self.device.PushChangedFiles([(host_tmp_dir, _DEVICE_DIR)],
delete_device_stale=True)
result = self.device.RunShellCommand(['cat', device_file_path1],
single_line=True)
self.assertEqual(_NEW_CONTENTS, result)
result = self.device.RunShellCommand(['ls', _DEVICE_DIR])
self.assertIn(file_name1, result)
self.assertIn(_SUB_DIR1, result)
self.assertIn(_SUB_DIR, result)
self.assertEqual(3, len(result))
result = self.device.RunShellCommand(['cat', device_file_path3],
single_line=True)
self.assertEqual(_OLD_CONTENTS, result)
result = self.device.RunShellCommand(["ls", "%s/%s/%s"
% (_DEVICE_DIR, _SUB_DIR, _SUB_DIR2)],
single_line=True)
self.assertEqual('', result)
self.device.RunShellCommand(['rm', '-rf', _DEVICE_DIR])
cmd_helper.RunCmd(['rm', '-rf', host_tmp_dir])
def testRestartAdbd(self):
old_adbd_pid = self.device.RunShellCommand(
['ps', '|', 'grep', 'adbd'])[1].split()[1]
self.device.RestartAdbd()
new_adbd_pid = self.device.RunShellCommand(
['ps', '|', 'grep', 'adbd'])[1].split()[1]
self.assertNotEqual(old_adbd_pid, new_adbd_pid)
if __name__ == '__main__':
unittest.main()
| bsd-3-clause |
sonnyhu/scikit-learn | examples/ensemble/plot_gradient_boosting_quantile.py | 392 | 2114 | """
=====================================================
Prediction Intervals for Gradient Boosting Regression
=====================================================
This example shows how quantile regression can be used
to create prediction intervals.
"""
import numpy as np
import matplotlib.pyplot as plt
from sklearn.ensemble import GradientBoostingRegressor
np.random.seed(1)
def f(x):
"""The function to predict."""
return x * np.sin(x)
#----------------------------------------------------------------------
# First the noiseless case
X = np.atleast_2d(np.random.uniform(0, 10.0, size=100)).T
X = X.astype(np.float32)
# Observations
y = f(X).ravel()
dy = 1.5 + 1.0 * np.random.random(y.shape)
noise = np.random.normal(0, dy)
y += noise
y = y.astype(np.float32)
# Mesh the input space for evaluations of the real function, the prediction and
# its MSE
xx = np.atleast_2d(np.linspace(0, 10, 1000)).T
xx = xx.astype(np.float32)
alpha = 0.95
clf = GradientBoostingRegressor(loss='quantile', alpha=alpha,
n_estimators=250, max_depth=3,
learning_rate=.1, min_samples_leaf=9,
min_samples_split=9)
clf.fit(X, y)
# Make the prediction on the meshed x-axis
y_upper = clf.predict(xx)
clf.set_params(alpha=1.0 - alpha)
clf.fit(X, y)
# Make the prediction on the meshed x-axis
y_lower = clf.predict(xx)
clf.set_params(loss='ls')
clf.fit(X, y)
# Make the prediction on the meshed x-axis
y_pred = clf.predict(xx)
# Plot the function, the prediction and the 90% confidence interval based on
# the MSE
fig = plt.figure()
plt.plot(xx, f(xx), 'g:', label=u'$f(x) = x\,\sin(x)$')
plt.plot(X, y, 'b.', markersize=10, label=u'Observations')
plt.plot(xx, y_pred, 'r-', label=u'Prediction')
plt.plot(xx, y_upper, 'k-')
plt.plot(xx, y_lower, 'k-')
plt.fill(np.concatenate([xx, xx[::-1]]),
np.concatenate([y_upper, y_lower[::-1]]),
alpha=.5, fc='b', ec='None', label='90% prediction interval')
plt.xlabel('$x$')
plt.ylabel('$f(x)$')
plt.ylim(-10, 20)
plt.legend(loc='upper left')
plt.show()
| bsd-3-clause |
KoertJanssens/MasterBall.be | pogom/altitude.py | 20 | 2596 | #!/usr/bin/python
# -*- coding: utf-8 -*-
import logging
import requests
import random
from .models import LocationAltitude
log = logging.getLogger(__name__)
# Altitude used when use_altitude_cache is enabled.
fallback_altitude = None
def get_gmaps_altitude(lat, lng, gmaps_key):
try:
r_session = requests.Session()
response = r_session.get((
'https://maps.googleapis.com/maps/api/elevation/json?' +
'locations={},{}&key={}').format(lat, lng, gmaps_key),
timeout=5)
response = response.json()
status = response['status']
results = response.get('results', [])
result = results[0] if results else {}
altitude = result.get('elevation', None)
except Exception as e:
log.exception('Unable to retrieve altitude from Google APIs: %s.', e)
status = 'UNKNOWN_ERROR'
altitude = None
return (altitude, status)
def randomize_altitude(altitude, altitude_variance):
if altitude_variance > 0:
altitude = (altitude +
random.randrange(-1 * altitude_variance,
altitude_variance) +
float(format(random.random(), '.13f')))
else:
altitude = altitude + float(format(random.random(), '.13f'))
return altitude
# Only once fetched altitude
def get_fallback_altitude(args, loc):
global fallback_altitude
# Only query if it's not set, and if it didn't fail already.
if fallback_altitude is None and fallback_altitude != -1:
(fallback_altitude, status) = get_gmaps_altitude(loc[0], loc[1],
args.gmaps_key)
# Failed, don't try again.
if fallback_altitude is None:
fallback_altitude = -1
return fallback_altitude
# Get altitude from the db or try to fetch from gmaps api,
# otherwise, default altitude
def cached_get_altitude(args, loc):
altitude = LocationAltitude.get_nearby_altitude(loc)
if altitude is None:
(altitude, status) = get_gmaps_altitude(loc[0], loc[1], args.gmaps_key)
if altitude is not None and altitude != -1:
LocationAltitude.save_altitude(loc, altitude)
return altitude
# Get altitude main method
def get_altitude(args, loc):
if not args.use_altitude_cache:
altitude = get_fallback_altitude(args, loc)
else:
altitude = cached_get_altitude(args, loc)
if altitude is None or altitude == -1:
altitude = args.altitude
return randomize_altitude(altitude, args.altitude_variance)
| agpl-3.0 |
Cojacfar/Maker | comm/lib/python2.7/site-packages/pip/_vendor/html5lib/trie/datrie.py | 1301 | 1178 | from __future__ import absolute_import, division, unicode_literals
from datrie import Trie as DATrie
from pip._vendor.six import text_type
from ._base import Trie as ABCTrie
class Trie(ABCTrie):
def __init__(self, data):
chars = set()
for key in data.keys():
if not isinstance(key, text_type):
raise TypeError("All keys must be strings")
for char in key:
chars.add(char)
self._data = DATrie("".join(chars))
for key, value in data.items():
self._data[key] = value
def __contains__(self, key):
return key in self._data
def __len__(self):
return len(self._data)
def __iter__(self):
raise NotImplementedError()
def __getitem__(self, key):
return self._data[key]
def keys(self, prefix=None):
return self._data.keys(prefix)
def has_keys_with_prefix(self, prefix):
return self._data.has_keys_with_prefix(prefix)
def longest_prefix(self, prefix):
return self._data.longest_prefix(prefix)
def longest_prefix_item(self, prefix):
return self._data.longest_prefix_item(prefix)
| gpl-2.0 |
rdipietro/tensorflow | tensorflow/python/kernel_tests/scan_ops_test.py | 21 | 9061 | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Functional tests for scan ops."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
import tensorflow as tf
def numpy_reverse(x, axis):
length = len(x.shape)
if axis < 0:
axis = length + axis
ix = [slice(None, None, -1)
if i == axis else slice(None) for i in range(length)]
return x[ix]
def handle_options(func, x, axis, exclusive, reverse):
"""Adds tf options to numpy scan ops."""
length = len(x.shape)
if axis < 0:
axis = length + axis
if reverse:
x = numpy_reverse(x, axis)
if exclusive:
ix_head = [slice(0, 1) if i == axis else slice(None)
for i in range(length)]
ix_init = [slice(0, -1) if i == axis else slice(None)
for i in range(length)]
if func == np.cumsum:
init = np.zeros_like(x[ix_head])
elif func == np.cumprod:
init = np.ones_like(x[ix_head])
else:
raise ValueError("Unknown scan function.")
x = np.concatenate([init, func(x[ix_init], axis)], axis=axis)
else:
x = func(x, axis=axis)
if reverse:
x = numpy_reverse(x, axis)
return x
class CumsumTest(tf.test.TestCase):
valid_dtypes = [np.int32, np.int64, np.float16, np.float32,
np.float64, np.complex64, np.complex128]
def _compare(self, x, axis, exclusive, reverse):
np_out = handle_options(np.cumsum, x, axis, exclusive, reverse)
with self.test_session(use_gpu=True):
tf_out = tf.cumsum(x, axis, exclusive, reverse).eval()
self.assertAllClose(np_out, tf_out)
def _compareAll(self, x, axis):
for exclusive in [True, False]:
for reverse in [True, False]:
self._compare(x, axis, exclusive, reverse)
def testEmpty(self):
for dtype in self.valid_dtypes:
x = np.zeros([0]).astype(dtype)
for axis in (-1, 0):
self._compareAll(x, axis)
def test1D(self):
for dtype in self.valid_dtypes:
x = np.arange(1, 6).reshape([5]).astype(dtype)
for axis in (-1, 0):
self._compareAll(x, axis)
def test2D(self):
for dtype in self.valid_dtypes:
x = np.arange(0, 10).reshape([2, 5]).astype(dtype)
for axis in (-2, -1, 0, 1):
self._compareAll(x, axis)
def test3D(self):
for dtype in self.valid_dtypes:
x = np.arange(0, 20).reshape([2, 2, 5]).astype(dtype)
for axis in (-3, -2, -1, 0, 1, 2):
self._compareAll(x, axis)
def test6D(self):
for dtype in self.valid_dtypes:
x = np.arange(1, 145).reshape([2, 2, 3, 3, 2, 2]).astype(dtype)
for axis in range(-6, 6, 3):
self._compareAll(x, axis)
def testInvalidAxis(self):
x = np.arange(0, 10).reshape([2, 5]).astype(np.float32)
input_tensor = tf.convert_to_tensor(x)
with self.test_session(use_gpu=True):
with self.assertRaisesWithPredicateMatch(
tf.errors.InvalidArgumentError,
lambda e: "Expected scan axis in the range [-2, 2)" in str(e)):
tf.cumsum(input_tensor, -3).eval()
with self.assertRaisesWithPredicateMatch(
tf.errors.InvalidArgumentError,
lambda e: "Expected scan axis in the range [-2, 2)" in str(e)):
tf.cumsum(input_tensor, 2).eval()
with self.assertRaisesWithPredicateMatch(
tf.errors.InvalidArgumentError,
lambda e: "axis must be a scalar" in str(e)):
tf.cumsum(input_tensor, [0]).eval()
def _compareGradient(self, shape, axis, exclusive, reverse):
x = np.arange(0, 50).reshape(shape).astype(np.float64)
with self.test_session(use_gpu=True):
t = tf.convert_to_tensor(x)
result = tf.cumsum(t, axis, exclusive, reverse)
jacob_t, jacob_n = tf.test.compute_gradient(t,
shape,
result,
shape,
x_init_value=x,
delta=1)
self.assertAllClose(jacob_t, jacob_n, rtol=1e-8, atol=1e-8)
def testGradient(self):
for axis in (-1, 0):
self._compareGradient([50], axis, False, False)
def testGradientReverse(self):
for axis in (-1, 0):
self._compareGradient([50], axis, False, True)
def testGradientExclusive(self):
for axis in (-1, 0):
self._compareGradient([50], axis, True, False)
def testGradientExclusiveReverse(self):
for axis in (-1, 0):
self._compareGradient([50], axis, True, True)
def testGradient2D(self):
for axis in (-1, 0, 1):
for exclusive in [True, False]:
for reverse in [True, False]:
self._compareGradient([5, 10], axis, exclusive, reverse)
class CumprodTest(tf.test.TestCase):
valid_dtypes = [np.int32, np.int64, np.float16, np.float32,
np.float64, np.complex64, np.complex128]
def _compare(self, x, axis, exclusive, reverse):
np_out = handle_options(np.cumprod, x, axis, exclusive, reverse)
with self.test_session(use_gpu=True):
tf_out = tf.cumprod(x, axis, exclusive, reverse).eval()
self.assertAllClose(np_out, tf_out)
def _compareAll(self, x, axis):
for exclusive in [True, False]:
for reverse in [True, False]:
self._compare(x, axis, exclusive, reverse)
def testEmpty(self):
for dtype in self.valid_dtypes:
x = np.zeros([0]).astype(dtype)
for axis in (-1, 0):
self._compareAll(x, axis)
def test1D(self):
for dtype in self.valid_dtypes:
x = np.arange(1, 6).reshape([5]).astype(dtype)
for axis in (-1, 0):
self._compareAll(x, axis)
def test2D(self):
for dtype in self.valid_dtypes:
x = np.arange(1, 11).reshape([2, 5]).astype(dtype)
for axis in (-2, -1, 0, 1):
self._compareAll(x, axis)
def test3D(self):
for dtype in self.valid_dtypes:
x = np.arange(1, 21).reshape([2, 2, 5]).astype(dtype)
for axis in (-3, -2, -1, 0, 1, 2):
self._compareAll(x, axis)
def test6D(self):
for dtype in self.valid_dtypes:
x = np.arange(1, 145).reshape([2, 2, 3, 3, 2, 2]).astype(dtype)
for axis in range(-6, 6, 3):
self._compareAll(x, axis)
def testInvalidAxis(self):
x = np.arange(0, 10).reshape([2, 5]).astype(np.float32)
input_tensor = tf.convert_to_tensor(x)
with self.test_session(use_gpu=True):
with self.assertRaisesWithPredicateMatch(
tf.errors.InvalidArgumentError,
lambda e: "Expected scan axis in the range [-2, 2)" in str(e)):
tf.cumprod(input_tensor, -3).eval()
with self.assertRaisesWithPredicateMatch(
tf.errors.InvalidArgumentError,
lambda e: "Expected scan axis in the range [-2, 2)" in str(e)):
tf.cumprod(input_tensor, 2).eval()
with self.assertRaisesWithPredicateMatch(
tf.errors.InvalidArgumentError,
lambda e: "axis must be a scalar" in str(e)):
tf.cumprod(input_tensor, [0]).eval()
def _compareGradient(self, shape, axis, exclusive, reverse):
x = np.arange(1, 9).reshape(shape).astype(np.float64)
with self.test_session(use_gpu=True):
t = tf.convert_to_tensor(x)
result = tf.cumprod(t, axis, exclusive, reverse)
jacob_t, jacob_n = tf.test.compute_gradient(t,
shape,
result,
shape,
x_init_value=x,
delta=1)
self.assertAllClose(jacob_t, jacob_n, rtol=1e-8, atol=1e-8)
def testGradient(self):
for axis in (-1, 0):
self._compareGradient([8], axis, False, False)
def testGradientReverse(self):
for axis in (-1, 0):
self._compareGradient([8], axis, False, True)
def testGradientExclusive(self):
for axis in (-1, 0):
self._compareGradient([8], axis, True, False)
def testGradientExclusiveReverse(self):
for axis in (-1, 0):
self._compareGradient([8], axis, True, True)
def testGradient2D(self):
for axis in (-2, -1, 0, 1):
for exclusive in [True, False]:
for reverse in [True, False]:
self._compareGradient([2, 4], axis, exclusive, reverse)
if __name__ == "__main__":
tf.test.main()
| apache-2.0 |
erral/eitbapi | eitbapi/utils.py | 1 | 10106 | # -*- coding: utf8 -*-
from __future__ import unicode_literals
from bs4 import BeautifulSoup
import requests
import sys
import xml.etree.ElementTree as ET
import os
import json
import datetime
import pytz
if sys.version_info >= (3, 0, 0):
# for Python 3
PYTHON3 = True
else:
PYTHON3 = False
EITB_PLAYLIST_BASE_URL = (
"https://mam.eitb.eus/mam/REST/ServiceMultiweb/Playlist/MULTIWEBTV/{}"
)
EITB_VIDEO_BASE_URL = "https://www.eitb.tv/es/video/"
EITB_VIDEO_URL = "https://www.eitb.tv/es/video/{}/{}/{}/{}/"
EITB_BASE_URL = "https://www.eitb.tv/"
EITB_CACHED_TV_PROGRAM_LIST_XML_URL = (
"https://raw.githubusercontent.com/erral/eitbapi/master/cache.json"
)
EITB_TV_PROGRAM_LIST_XML_URL = "https://www.eitb.tv/eu/menu/getMenu/tv/"
EITB_CACHED_RADIO_PROGRAM_LIST_XML_URL = "https://raw.githubusercontent.com/erral/eitbapi/master/radio-cache.json"
EITB_RADIO_PROGRAM_LIST_XML_URL = "https://www.eitb.tv/es/menu/getMenu/radio/"
EITB_LAST_BROADCAST_URL = 'https://mam.eitb.eus/mam/REST/ServiceMultiweb/SmartPlaylistByDestination/MULTIWEBTV/12/BROADCST_DATE/DESC/{}/'
def safe_unicode(value, encoding="utf-8"):
if PYTHON3:
if isinstance(value, bytes):
return value
elif isinstance(value, str):
try:
value = bytes(value, encoding)
except (UnicodeDecodeError):
value = value.decode("utf-8", "replace")
return value
else:
if isinstance(value, unicode):
return value
elif isinstance(value, basestring):
try:
value = unicode(value, encoding)
except (UnicodeDecodeError):
value = value.decode("utf-8", "replace")
return value
def safe_encode(value, charset="utf-8"):
if PYTHON3:
return safe_unicode(value, charset).decode(charset)
else:
return safe_unicode(value, charset).encode(charset)
def xml_to_dict(data):
try:
root = ET.fromstring(data)
except ET.ParseError:
root = []
d = {}
for item in root:
dd = {}
for subitem in item:
m = {}
m["text"] = subitem.text
m.update(subitem.attrib)
dd[subitem.tag] = m
d[dd["title"]["text"]] = dd
return d
def build_program_list_by_hash(menu_hash, mode="tv", first=False):
if mode == "tv":
results = get_tv_submenu_data(menu_hash, first=first)
elif mode == "radio":
results = get_radio_submenu_data(menu_hash, first=first)
return results
def get_tv_program_data():
res = requests.get(EITB_CACHED_TV_PROGRAM_LIST_XML_URL)
try:
result = res.json()
except:
result = []
return result
def _get_tv_program_data():
menudata = requests.get(EITB_TV_PROGRAM_LIST_XML_URL)
menudict = xml_to_dict(menudata.content)
menu_hash = (
menudict.get("programas_az", {}).get("submenu", {}).get("hash", "")
) # noqa
return build_program_list_by_hash(menu_hash, mode="tv", first=True)
def get_tv_program_data_per_type(menu_hash):
return build_program_list_by_hash(menu_hash, mode="tv")
def get_tv_program_types():
menudata = requests.get(EITB_TV_PROGRAM_LIST_XML_URL)
menudict = xml_to_dict(menudata.content)
menu_hash = (
menudict.get("por_categorias", {}).get("submenu", {}).get("hash", "")
) # noqa
categorydata = requests.get(EITB_TV_PROGRAM_LIST_XML_URL + "/" + menu_hash)
categorydict = xml_to_dict(categorydata.content)
return categorydict
def get_tv_news_programs():
menudata = requests.get(EITB_TV_PROGRAM_LIST_XML_URL)
menudict = xml_to_dict(menudata.content)
menu_hash = (
menudict.get("informativos", {}).get("submenu", {}).get("hash", "")
) # noqa
categorydata = requests.get(EITB_TV_PROGRAM_LIST_XML_URL + "/" + menu_hash)
categorydict = xml_to_dict(categorydata.content)
return categorydict
def get_last_broadcast_data(number_of_items):
listdata = requests.get(EITB_LAST_BROADCAST_URL.format(number_of_items))
listjson = json.loads(listdata.content)
return listjson
def get_radio_program_data():
res = requests.get(EITB_CACHED_RADIO_PROGRAM_LIST_XML_URL)
try:
result = res.json()
except:
result = []
return result
def _get_radio_program_data():
results = []
menudata = requests.get(EITB_RADIO_PROGRAM_LIST_XML_URL)
menudict = xml_to_dict(menudata.content)
menu_hash = (
menudict.get("programas_az", {}).get("submenu", {}).get("hash", None)
) # noqa
results = get_radio_submenu_data(menu_hash, first=True)
return results
def get_radio_program_types():
menudata = requests.get(EITB_RADIO_PROGRAM_LIST_XML_URL)
menudict = xml_to_dict(menudata.content)
menu_hash = (
menudict.get("por_categorias", {}).get("submenu", {}).get("hash", "")
) # noqa
categorydata = requests.get(
EITB_RADIO_PROGRAM_LIST_XML_URL + "/" + menu_hash
) # noqa
categorydict = xml_to_dict(categorydata.content)
return categorydict
def get_radio_stations():
menudata = requests.get(EITB_RADIO_PROGRAM_LIST_XML_URL)
menudict = xml_to_dict(menudata.content)
menu_hash = (
menudict.get("por_emisoras", {}).get("submenu", {}).get("hash", "")
) # noqa
categorydata = requests.get(
EITB_RADIO_PROGRAM_LIST_XML_URL + "/" + menu_hash
) # noqa
categorydict = xml_to_dict(categorydata.content)
return categorydict
def get_tv_submenu_data(menu_hash, pretitle="", first=False):
submenudata = requests.get(EITB_TV_PROGRAM_LIST_XML_URL + "/" + menu_hash)
submenudict = xml_to_dict(submenudata.content)
results = []
for item in submenudict.values():
subhash = item.get("submenu", {}).get("hash", None)
if subhash:
if first:
results += get_tv_submenu_data(subhash)
else:
results += get_tv_submenu_data(
subhash, pretitle=item.get("title").get("text")
) # noqa
data = {}
data["title"] = (
pretitle + " " + item.get("title", {}).get("text", "")
).strip() # noqa
data["id"] = item.get("id", {}).get("text", "")
if data["id"]:
results.append(data)
return results
def get_radio_submenu_data(menu_hash, pretitle="", first=False):
submenudata = requests.get(
EITB_RADIO_PROGRAM_LIST_XML_URL + "/" + menu_hash
) # noqa
submenudict = xml_to_dict(submenudata.content)
results = []
for item in submenudict.values():
subhash = item.get("submenu", {}).get("hash", None)
if subhash:
if first:
results += get_radio_submenu_data(subhash)
else:
results += get_radio_submenu_data(
subhash, pretitle=item.get("title").get("text")
) # noqa
data = {}
data["title"] = (
pretitle + " " + item.get("title", {}).get("text", "")
).strip() # noqa
data["id"] = item.get("id", {}).get("text", "")
if data["id"]:
results.append(data)
return results
def create_internal_video_url(
playlist_title, playlist_id, video_title, video_id, request=None
): # noqa
"""create an internal url to identify an episode inside this API."""
playlist_title = clean_title(playlist_title)
video_title = clean_title(playlist_title)
internal_url = "{}/{}/{}/{}".format(
playlist_title, playlist_id, video_id, video_title
) # noqa
return request.route_url("episode", episode_url=internal_url)
def clean_title(title):
"""slugify the titles using the method that EITB uses in
the website:
- url: http://www.eitb.tv/resources/js/comun/comun.js
- method: string2url
"""
translation_map = {
"À": "A",
"Á": "A",
"Â": "A",
"Ã": "A",
"Ä": "A",
"Å": "A",
"Æ": "E",
"È": "E",
"É": "E",
"Ê": "E",
"Ë": "E",
"Ì": "I",
"Í": "I",
"Î": "I",
"Ï": "I",
"Ò": "O",
"Ó": "O",
"Ô": "O",
"Ö": "O",
"Ù": "U",
"Ú": "U",
"Û": "U",
"Ü": "U",
"Ñ": "N",
"?": "",
"¿": "",
"!": "",
"¡": "",
":": "",
"_": "-",
"º": "",
"ª": "a",
",": "",
".": "",
"(": "",
")": "",
"@": "",
" ": "-",
"&": "",
"#": "",
}
val = title.upper()
for k, v in translation_map.items():
val = val.replace(k, v)
return val.lower()
def get_radio_programs(playlist_id):
while playlist_id.startswith("/"):
playlist_id = playlist_id[1:]
results = []
data = requests.get(EITB_BASE_URL + playlist_id)
soup = BeautifulSoup(data.text, "html.parser")
for li in soup.find_all("li", class_="audio_uno"):
title_p, date_p, download_p = li.find_all("p")
title = title_p.find("a").get("original-title")
date, duration = date_p.text.split()
url = download_p.find("a").get("href")
duration = duration.replace("(", "").replace(")", "")
item = dict(title=title, date=date, url=url, duration=duration)
results.append(item)
return results
def get_radio_program_data_per_type(playlist_id):
return build_program_list_by_hash(playlist_id, mode="radio", first=True)
def get_radio_program_data_per_station(station_id):
return build_program_list_by_hash(station_id, mode="radio", first=False)
# PARSE DATE TO ISO FORMAT
def date_to_iso_format(date):
dateformat = '%Y-%m-%d %H:%M:%S'
tz = pytz.timezone('Europe/Madrid')
try:
date_to_format = datetime.datetime.strptime(date, dateformat)
date_to_format = tz.localize(date_to_format)
dateiso = date_to_format.isoformat()
except (TypeError, ValueError):
dateiso = date
return dateiso
| gpl-2.0 |
loicsander/Robofont-scripts | ScaleFast/ScaleFast.roboFontExt/lib/mutatorScale/objects/scaler.py | 4 | 20742 | #coding=utf-8
from __future__ import division
from robofab.world import RGlyph
from mutatorMath.objects.location import Location
from mutatorMath.objects.mutator import buildMutator
from mutatorScale.objects.fonts import MutatorScaleFont
from mutatorScale.objects.errorGlyph import ErrorGlyph
from mutatorScale.utilities.fontUtils import makeListFontName, joinFontName
from mutatorScale.utilities.numbersUtils import mapValue
class MutatorScaleEngine:
"""
This object is built to handle the interpolated scaling of glyphs using MutatorMath.
It requires a list of fonts (at least two) from which it determines which kind of interpolation it can achieve.
Maybe I should state the obvious: the whole process is based on the assumption that the provided fonts are compatible for interpolation.
With existing masters, the object is then set to certain parameters that allow for specific glyph scaling operations,
while scaling, a MutatorScaleEngine attempts to obtain specified weight and contrast for the scaled glyph
by interpolating accordingly and to the best possible result with available masters.
Each master in a MutatorScaleEngine is an instance of a MutatorScaleFont for which stem values are defined.
If not specifically provided, these stem values are measured on capital letters I and H for vertical and horizontal stems respectively.
The stem values obtained are only meant to be reference value and do not reflect the stem values of all glyphs but only of I and H.
While scaling, if you ask for a scaled glyph with stem values (80, 60), you’re effectively asking for a scaledGlyph interpolated
as to have the vertical stem of a I equal to 80 and the horizontal stem of a H equal to 60. It is not akin to ask that these stem values
are applied to the exact glyph you asked for, that’s not how interpolation works.
When a MutatorScaleEngine is asked for a scaled glyph with specific horizontal and vertical stem values,
here’s what happens:
– it collects glyphs corresponding to the glyphName passed to .getScaledGlyph() in the available masters;
– it scales all the master glyphs to the proportions to which the MutatorScaleEngine is set;
– it then builds a MutatorMath space in which masters are placed according to their horizontal and vertical stem values scaled down;
– finally, it returns a scaled down (as all masters are) interpolated glyph with the asked for stem values.
#####
Here’s how it goes:
>>> scaler = MutatorScaleEngine(ListOfFonts)
>>> scaler.set({
'scale': (1.03, 0.85)
})
>>> scaler.getScaledGlyph('a', ())
"""
errorGlyph = ErrorGlyph()
def __init__(self, masterFonts=[], stemsWithSlantedSection=False):
self.masters = {}
self._currentScale = None
self._workingStems = None
self.stemsWithSlantedSection = stemsWithSlantedSection
self._availableGlyphs = []
for font in masterFonts:
self.addMaster(font)
self.mutatorErrors = []
def __repr__(self):
return 'MutatorScaleEngine w/ {0} masters\n- {1}\n'.format(len(self.masters), '\n- '.join([repr(master) for master in self.masters.values()]))
def __getitem__(self, key):
if key in self.masters.keys():
return self.masters[key]
else:
raise KeyError(key)
def __iter__(self):
for master in self.masters.values():
yield master
def __len__(self):
return len(self.masters)
def __contains__(self, fontName):
return fontName in self.masters
def getMaster(self, font):
"""Returning a master by parsing a fonts name and returning it if it’s among masters."""
name = makeListFontName(font)
if name in self.masters:
return self.masters[name]
return
def getMasterByName(self, familyName, styleName):
name = joinFontName(familyName, styleName)
if name in self:
return self[name]
def getCurrentStemBase(self):
return self._workingStems
def hasTwoAxes(self):
if self._workingStems == 'both':
return True
else:
return False
def hasGlyph(self, glyphName):
"""Checking for glyph availability in all masters."""
return glyphName in self._availableGlyphs
def getReferenceGlyphNames(self):
"""Returning a list of glyphNames for valid reference glyphs,
i.e., glyphs that are not empty so they can serve as height reference.
"""
masters = self.masters.values()
glyphNames = self._availableGlyphs
validGlyphs_names = reduce(lambda a, b: list(set(a) & set(b)), [[glyphName for glyphName in glyphNames if len(master.glyphSet[glyphName])] for master in masters])
return validGlyphs_names
def set(self, scalingParameters):
"""Define scaling parameters.
Collect relevant data in the various forms it can be input,
produce a scale definition relevant to a ScaleFont object.
"""
scale = (1, 1)
width = 1
if scalingParameters.has_key('width'):
width = scalingParameters['width']
scale = (width, 1)
if scalingParameters.has_key('scale'):
scale = scalingParameters['scale']
if isinstance(scale, (float, int)):
scale = (scale, scale)
elif scalingParameters.has_key('targetHeight') and scalingParameters.has_key('referenceHeight'):
targetHeight = scalingParameters['targetHeight']
referenceHeight = scalingParameters['referenceHeight']
scale = (width, targetHeight, referenceHeight)
for master in self.masters.values():
master.setScale(scale)
self._currentScale = scale
def update(self):
self._determineWorkingStems()
def _parseStemsInput(self, stems):
if stems is None:
vstem, hstem = None, None
else:
try: vstem, hstem = stems
except: vstem, hstem = stems, None
return vstem, hstem
def _makeMaster(self, font, vstem, hstem):
"""Return a MutatorScaleFont."""
name = makeListFontName(font)
master = MutatorScaleFont(font, vstem=vstem, hstem=hstem, stemsWithSlantedSection=self.stemsWithSlantedSection)
return name, master
def addMaster(self, font, stems=None):
"""Add a MutatorScaleFont to masters."""
vstem, hstem = self._parseStemsInput(stems)
if (vstem is None) and ('I' not in font):
vstem = len(self.masters) * 100
name, master = self._makeMaster(font, vstem, hstem)
if not len(self._availableGlyphs):
self._availableGlyphs = master.keys()
elif len(self._availableGlyphs):
self._availableGlyphs = list(set(self._availableGlyphs) & set(master.keys()))
if self._currentScale is not None:
master.setScale(self._currentScale)
self.masters[name] = master
self.update()
def removeMaster(self, font):
"""Remove a MutatorScaleFont from masters."""
name = makeListFontName(font)
if self.masters.has_key(name):
self.masters.pop(name, 0)
self.update()
def getScaledGlyph(self, glyphName, stemTarget, slantCorrection=True, attributes=None):
"""Return an interpolated & scaled glyph according to set parameters and given masters."""
masters = self.masters.values()
workingStems = self._workingStems
mutatorMasters = []
yScales = []
angles = []
"""
Gather master glyphs for interpolation:
each master glyph is scaled down according to set parameter,
it is then inserted in a mutator design space with scaled down stem values.
Asking for the initial stem values of a scaled down glyphName
will result in an scaled glyph which will retain specified stem widths.
"""
if len(masters) > 1 and workingStems is not None:
medianYscale = 1
medianAngle = 0
for master in masters:
xScale, yScale = master.getScale()
vstem, hstem = master.getStems()
yScales.append(yScale)
if glyphName in master and vstem is not None and hstem is not None:
masterGlyph = master[glyphName]
if workingStems == 'both':
axis = {
'vstem': vstem * xScale,
'hstem': hstem * yScale
}
else:
if workingStems == 'vstem':
stem = vstem
elif workingStems == 'hstem':
stem = hstem
if slantCorrection == True:
# if interpolation is an/isotropic
# skew master glyphs to upright angle to minimize deformations
angle = master.italicAngle
if angle:
masterGlyph.skewX(angle)
angles.append(angle)
axis = { 'stem': stem * xScale }
mutatorMasters.append((Location(**axis), masterGlyph))
if len(angles) and slantCorrection == True:
# calculate a median slant angle
# in case there are variations among masters
# shouldn’t happen, most of the time
medianAngle = sum(angles) / len(angles)
medianYscale = sum(yScales) / len(yScales)
targetLocation = self._getTargetLocation(stemTarget, masters, workingStems, (xScale, medianYscale))
instanceGlyph = self._getInstanceGlyph(targetLocation, mutatorMasters)
if instanceGlyph.name == '_error_':
if self.hasGlyph(glyphName):
instanceGlyph.unicodes = masters[0][glyphName].unicodes
self.mutatorErrors[-1]['glyph'] = glyphName
self.mutatorErrors[-1]['masters'] = mutatorMasters
if medianAngle and slantCorrection == True:
# if masters were skewed to upright position
# skew instance back to probable slant angle
instanceGlyph.skew(-medianAngle)
instanceGlyph.round()
if attributes is not None:
for attributeName in attributes:
value = attributes[attributeName]
setattr(instanceGlyph, attributeName, value)
return instanceGlyph
return ErrorGlyph('None')
def _getInstanceGlyph(self, location, masters):
I = self._getInstance(location, masters)
if I is not None:
return I.extractGlyph(RGlyph())
else:
errorMessage = self.mutatorErrors[-1]['error']
return ErrorGlyph('Interpolation', errorMessage)
def _getInstance(self, location, masters):
try:
b, m = buildMutator(masters)
if m is not None:
instance = m.makeInstance(location)
return instance
except Exception as e:
self.mutatorErrors.append({'error':e.message})
return None
def _getTargetLocation(self, stemTarget, masters, workingStems, (xScale, yScale)):
"""
Return a proper Location object for a scaled glyph instance,
the essential part lies in the conversion of stem values.
so that in anisotropic mode, a MutatorScaleEngine can attempt to produce
a glyph with proper stem widths without requiring two-axes interpolation.
"""
targetVstem, targetHstem = None, None
try: targetVstem, targetHstem = stemTarget
except: pass
if targetVstem is not None and targetHstem is not None:
if workingStems == 'both':
return Location(vstem=targetVstem, hstem=targetHstem)
elif workingStems == 'vstem':
vStems = [master.vstem * xScale for master in masters]
hStems = [master.hstem * yScale for master in masters]
(minVStem, minStemIndex), (maxVStem, maxStemIndex) = self._getExtremes(vStems)
vStemSpan = (minVStem, maxVStem)
hStemSpan = hStems[minStemIndex], hStems[maxStemIndex]
newHstem = mapValue(targetHstem, hStemSpan, vStemSpan)
return Location(stem=(targetVstem, newHstem))
elif workingStems == 'hstem':
return Location(stem=targetHstem)
else:
return Location(stem=stemTarget)
def _getExtremes(self, values):
"""
Return the minimum and maximum in a list of values with indices,
this implementation was necessary to distinguish indices when min and max value happen to be equal (without being the same value per se).
"""
if len(values) > 1:
baseValue = (values[0], 0)
smallest, largest = baseValue, baseValue
for i, value in enumerate(values[1:]):
if value >= largest[0]:
largest = (value, (i+1))
elif value < smallest[0]:
smallest = (value, (i+1))
return smallest, largest
return
def _determineWorkingStems(self):
"""
Check conditions are met for two-axis interpolation in MutatorMath:
1. At least two identical values (to bind a new axis to the first axis)
2. At least a third and different value (to be able to have a differential on second axis)
"""
masters = self.masters.values()
twoAxes = False
stemMode = None
stems = {
'vstem': [master.vstem for master in masters],
'hstem': [master.hstem for master in masters]
}
if len(masters) > 2:
twoAxes = self._checkForTwoAxes(stems)
if twoAxes == True:
stemMode = 'both'
elif twoAxes == False:
for stemName in stems:
stemValues = stems[stemName]
diff = self._numbersHaveDifferential(stemValues)
if diff == True:
stemMode = stemName
break
self._workingStems = stemMode
def _checkForTwoAxes(self, stemsList):
"""
Check conditions are met for two-axis interpolation in MutatorMath:
1. At least two identical values (to bind a new axis to the first axis)
2. At least a third and different value (to be able to have a differential on second axis)
"""
twoAxes = []
vstems = stemsList['vstem']
hstems = stemsList['hstem']
twoAxes.append(self._numbersHaveDifferential(vstems))
twoAxes.append(self._numbersHaveSplitDifferential(hstems))
return bool(reduce(lambda a,b: a*b, twoAxes))
def _numbersHaveSplitDifferential(self, values):
"""Looking for at least two similar values and one differing from the others."""
length = len(values)
values.sort()
if length > 1:
identicalValues = 0
differentValues = 0
for i, value in enumerate(values):
if i < length-1:
nextValue = values[i+1]
if value is not None:
if nextValue == value: identicalValues += 1
if nextValue != value: differentValues += 1
return bool(identicalValues) and bool(differentValues)
return False
def _numbersHaveDifferential(self, values):
"""Looking for at least two different values in a bunch."""
length = len(values)
values.sort()
differential = False
if length > 1:
differentValues = 0
for i, value in enumerate(values):
if i < length-1:
nextValue = values[i+1]
if nextValue != value and value is not None:
differential = True
break
return differential
def getMutatorReport(self):
return self.mutatorErrors
if __name__ == '__main__':
import os
import unittest
import glob
from defcon import Font
class MutatorScaleEngineTest(unittest.TestCase):
def setUp(self):
libFolder = os.path.dirname(os.path.dirname((os.path.dirname(os.path.abspath(__file__)))))
libFolder = os.path.join(libFolder, 'testFonts/')
self.scalers = []
self.loadedFonts = []
self.glyphNames = ['H','I']
for fontsFolder in ['two-axes','isotropic-anisotropic']:
fonts = []
fontsPath = os.path.join(libFolder, fontsFolder)
os.chdir(fontsPath)
for singleFontPath in glob.glob('*.ufo'):
font = Font(singleFontPath)
if 'Italic' not in font.info.styleName:
fonts.append(font)
self.loadedFonts.append(font)
scaler = MutatorScaleEngine(fonts)
self.scalers.append(scaler)
def test_if_scalingEngine_has_glyph(self):
"""Checking if glyph is present among all scaling masters."""
for scaler in self.scalers:
for glyphName in self.glyphNames:
hasGlyph = scaler.hasGlyph(glyphName)
self.assertTrue(hasGlyph)
def test_get_list_of_non_empty_glyph(self):
"""Checking if glyph is present among all scaling masters."""
for scaler in self.scalers:
scaler.getReferenceGlyphNames()
def test_setting_up_simple_scale(self):
"""Test setting up simple scale on a MutatorScaleEngine."""
for scaler in self.scalers:
scaler.set({'scale':(0.5, 0.4)})
for glyphName in self.glyphNames:
scaler.getScaledGlyph(glyphName, (100, 40))
def test_setting_up_width(self):
"""Test setting up width scaling on a MutatorScaleEngine."""
for scaler in self.scalers:
scaler.set({'width':0.75})
for glyphName in self.glyphNames:
scaler.getScaledGlyph(glyphName, (100, 40))
def test_setting_up_scale_by_reference(self):
"""Test setting up scale on a MutatorScaleEngine."""
testScales = [
{ 'targetHeight': 'A', 'referenceHeight': 'H' },
{ 'targetHeight': 'A', 'referenceHeight': 'capHeight' },
{ 'targetHeight': 490, 'referenceHeight': 'capHeight' },
{ 'targetHeight': 500, 'referenceHeight': 750 },
]
for scale in testScales:
for scaler in self.scalers:
scaler.set(scale)
for glyphName in self.glyphNames:
scaler.getScaledGlyph(glyphName, (100, 40))
def test_adding_master(self):
libFolder = os.path.dirname(os.path.dirname((os.path.dirname(os.path.abspath(__file__)))))
libFolder = os.path.join(libFolder, 'testFonts/')
newFontPath = os.path.join(libFolder, 'isotropic-anisotropic/bold-mid-contrast.ufo')
newFont = Font(newFontPath)
scaler = self.scalers[0]
scaler.addMaster(newFont)
self.assertEqual(len(scaler), 5)
def test_removing_master(self):
scaler = self.scalers[0]
fontToRemove = self.loadedFonts[0]
scaler.removeMaster(fontToRemove)
self.assertEqual(len(scaler), 3)
def test_scaler_uses_hstem_as_main_value_from_single_values(self):
scaler = MutatorScaleEngine()
font1 = self.loadedFonts[2]
font2 = self.loadedFonts[3]
scaler.addMaster(font1, 15)
scaler.addMaster(font2, 45)
g = scaler.getScaledGlyph('A', 45)
self.assertNotEqual(g.name, '_error_')
def test_scaler_uses_hstem_as_main_value_from_tuples(self):
scaler = MutatorScaleEngine()
font1 = self.loadedFonts[2]
font2 = self.loadedFonts[3]
scaler.addMaster(font1, (100, 15))
scaler.addMaster(font2, (100, 45))
g = scaler.getScaledGlyph('A', 45)
self.assertNotEqual(g.name, '_error_')
unittest.main() | mit |
adviti/melange | thirdparty/google_appengine/lib/django_1_2/django/contrib/gis/geos/geometry.py | 68 | 23445 | """
This module contains the 'base' GEOSGeometry object -- all GEOS Geometries
inherit from this object.
"""
# Python, ctypes and types dependencies.
import re
from ctypes import addressof, byref, c_double, c_size_t
# super-class for mutable list behavior
from django.contrib.gis.geos.mutable_list import ListMixin
# GEOS-related dependencies.
from django.contrib.gis.geos.base import GEOSBase, gdal
from django.contrib.gis.geos.coordseq import GEOSCoordSeq
from django.contrib.gis.geos.error import GEOSException, GEOSIndexError
from django.contrib.gis.geos.libgeos import GEOM_PTR, GEOS_PREPARE
from django.contrib.gis.geos.mutable_list import ListMixin
# All other functions in this module come from the ctypes
# prototypes module -- which handles all interaction with
# the underlying GEOS library.
from django.contrib.gis.geos import prototypes as capi
# These functions provide access to a thread-local instance
# of their corresponding GEOS I/O class.
from django.contrib.gis.geos.prototypes.io import wkt_r, wkt_w, wkb_r, wkb_w, ewkb_w, ewkb_w3d
# For recognizing geometry input.
from django.contrib.gis.geometry.regex import hex_regex, wkt_regex, json_regex
class GEOSGeometry(GEOSBase, ListMixin):
"A class that, generally, encapsulates a GEOS geometry."
# Raise GEOSIndexError instead of plain IndexError
# (see ticket #4740 and GEOSIndexError docstring)
_IndexError = GEOSIndexError
ptr_type = GEOM_PTR
#### Python 'magic' routines ####
def __init__(self, geo_input, srid=None):
"""
The base constructor for GEOS geometry objects, and may take the
following inputs:
* strings:
- WKT
- HEXEWKB (a PostGIS-specific canonical form)
- GeoJSON (requires GDAL)
* buffer:
- WKB
The `srid` keyword is used to specify the Source Reference Identifier
(SRID) number for this Geometry. If not set, the SRID will be None.
"""
if isinstance(geo_input, basestring):
if isinstance(geo_input, unicode):
# Encoding to ASCII, WKT or HEXEWKB doesn't need any more.
geo_input = geo_input.encode('ascii')
wkt_m = wkt_regex.match(geo_input)
if wkt_m:
# Handling WKT input.
if wkt_m.group('srid'): srid = int(wkt_m.group('srid'))
g = wkt_r().read(wkt_m.group('wkt'))
elif hex_regex.match(geo_input):
# Handling HEXEWKB input.
g = wkb_r().read(geo_input)
elif gdal.GEOJSON and json_regex.match(geo_input):
# Handling GeoJSON input.
g = wkb_r().read(gdal.OGRGeometry(geo_input).wkb)
else:
raise ValueError('String or unicode input unrecognized as WKT EWKT, and HEXEWKB.')
elif isinstance(geo_input, GEOM_PTR):
# When the input is a pointer to a geomtry (GEOM_PTR).
g = geo_input
elif isinstance(geo_input, buffer):
# When the input is a buffer (WKB).
g = wkb_r().read(geo_input)
elif isinstance(geo_input, GEOSGeometry):
g = capi.geom_clone(geo_input.ptr)
else:
# Invalid geometry type.
raise TypeError('Improper geometry input type: %s' % str(type(geo_input)))
if bool(g):
# Setting the pointer object with a valid pointer.
self.ptr = g
else:
raise GEOSException('Could not initialize GEOS Geometry with given input.')
# Post-initialization setup.
self._post_init(srid)
def _post_init(self, srid):
"Helper routine for performing post-initialization setup."
# Setting the SRID, if given.
if srid and isinstance(srid, int): self.srid = srid
# Setting the class type (e.g., Point, Polygon, etc.)
self.__class__ = GEOS_CLASSES[self.geom_typeid]
# Setting the coordinate sequence for the geometry (will be None on
# geometries that do not have coordinate sequences)
self._set_cs()
def __del__(self):
"""
Destroys this Geometry; in other words, frees the memory used by the
GEOS C++ object.
"""
if self._ptr: capi.destroy_geom(self._ptr)
def __copy__(self):
"""
Returns a clone because the copy of a GEOSGeometry may contain an
invalid pointer location if the original is garbage collected.
"""
return self.clone()
def __deepcopy__(self, memodict):
"""
The `deepcopy` routine is used by the `Node` class of django.utils.tree;
thus, the protocol routine needs to be implemented to return correct
copies (clones) of these GEOS objects, which use C pointers.
"""
return self.clone()
def __str__(self):
"WKT is used for the string representation."
return self.wkt
def __repr__(self):
"Short-hand representation because WKT may be very large."
return '<%s object at %s>' % (self.geom_type, hex(addressof(self.ptr)))
# Pickling support
def __getstate__(self):
# The pickled state is simply a tuple of the WKB (in string form)
# and the SRID.
return str(self.wkb), self.srid
def __setstate__(self, state):
# Instantiating from the tuple state that was pickled.
wkb, srid = state
ptr = wkb_r().read(buffer(wkb))
if not ptr: raise GEOSException('Invalid Geometry loaded from pickled state.')
self.ptr = ptr
self._post_init(srid)
# Comparison operators
def __eq__(self, other):
"""
Equivalence testing, a Geometry may be compared with another Geometry
or a WKT representation.
"""
if isinstance(other, basestring):
return self.wkt == other
elif isinstance(other, GEOSGeometry):
return self.equals_exact(other)
else:
return False
def __ne__(self, other):
"The not equals operator."
return not (self == other)
### Geometry set-like operations ###
# Thanks to Sean Gillies for inspiration:
# http://lists.gispython.org/pipermail/community/2007-July/001034.html
# g = g1 | g2
def __or__(self, other):
"Returns the union of this Geometry and the other."
return self.union(other)
# g = g1 & g2
def __and__(self, other):
"Returns the intersection of this Geometry and the other."
return self.intersection(other)
# g = g1 - g2
def __sub__(self, other):
"Return the difference this Geometry and the other."
return self.difference(other)
# g = g1 ^ g2
def __xor__(self, other):
"Return the symmetric difference of this Geometry and the other."
return self.sym_difference(other)
#### Coordinate Sequence Routines ####
@property
def has_cs(self):
"Returns True if this Geometry has a coordinate sequence, False if not."
# Only these geometries are allowed to have coordinate sequences.
if isinstance(self, (Point, LineString, LinearRing)):
return True
else:
return False
def _set_cs(self):
"Sets the coordinate sequence for this Geometry."
if self.has_cs:
self._cs = GEOSCoordSeq(capi.get_cs(self.ptr), self.hasz)
else:
self._cs = None
@property
def coord_seq(self):
"Returns a clone of the coordinate sequence for this Geometry."
if self.has_cs:
return self._cs.clone()
#### Geometry Info ####
@property
def geom_type(self):
"Returns a string representing the Geometry type, e.g. 'Polygon'"
return capi.geos_type(self.ptr)
@property
def geom_typeid(self):
"Returns an integer representing the Geometry type."
return capi.geos_typeid(self.ptr)
@property
def num_geom(self):
"Returns the number of geometries in the Geometry."
return capi.get_num_geoms(self.ptr)
@property
def num_coords(self):
"Returns the number of coordinates in the Geometry."
return capi.get_num_coords(self.ptr)
@property
def num_points(self):
"Returns the number points, or coordinates, in the Geometry."
return self.num_coords
@property
def dims(self):
"Returns the dimension of this Geometry (0=point, 1=line, 2=surface)."
return capi.get_dims(self.ptr)
def normalize(self):
"Converts this Geometry to normal form (or canonical form)."
return capi.geos_normalize(self.ptr)
#### Unary predicates ####
@property
def empty(self):
"""
Returns a boolean indicating whether the set of points in this Geometry
are empty.
"""
return capi.geos_isempty(self.ptr)
@property
def hasz(self):
"Returns whether the geometry has a 3D dimension."
return capi.geos_hasz(self.ptr)
@property
def ring(self):
"Returns whether or not the geometry is a ring."
return capi.geos_isring(self.ptr)
@property
def simple(self):
"Returns false if the Geometry not simple."
return capi.geos_issimple(self.ptr)
@property
def valid(self):
"This property tests the validity of this Geometry."
return capi.geos_isvalid(self.ptr)
#### Binary predicates. ####
def contains(self, other):
"Returns true if other.within(this) returns true."
return capi.geos_contains(self.ptr, other.ptr)
def crosses(self, other):
"""
Returns true if the DE-9IM intersection matrix for the two Geometries
is T*T****** (for a point and a curve,a point and an area or a line and
an area) 0******** (for two curves).
"""
return capi.geos_crosses(self.ptr, other.ptr)
def disjoint(self, other):
"""
Returns true if the DE-9IM intersection matrix for the two Geometries
is FF*FF****.
"""
return capi.geos_disjoint(self.ptr, other.ptr)
def equals(self, other):
"""
Returns true if the DE-9IM intersection matrix for the two Geometries
is T*F**FFF*.
"""
return capi.geos_equals(self.ptr, other.ptr)
def equals_exact(self, other, tolerance=0):
"""
Returns true if the two Geometries are exactly equal, up to a
specified tolerance.
"""
return capi.geos_equalsexact(self.ptr, other.ptr, float(tolerance))
def intersects(self, other):
"Returns true if disjoint returns false."
return capi.geos_intersects(self.ptr, other.ptr)
def overlaps(self, other):
"""
Returns true if the DE-9IM intersection matrix for the two Geometries
is T*T***T** (for two points or two surfaces) 1*T***T** (for two curves).
"""
return capi.geos_overlaps(self.ptr, other.ptr)
def relate_pattern(self, other, pattern):
"""
Returns true if the elements in the DE-9IM intersection matrix for the
two Geometries match the elements in pattern.
"""
if not isinstance(pattern, basestring) or len(pattern) > 9:
raise GEOSException('invalid intersection matrix pattern')
return capi.geos_relatepattern(self.ptr, other.ptr, pattern)
def touches(self, other):
"""
Returns true if the DE-9IM intersection matrix for the two Geometries
is FT*******, F**T***** or F***T****.
"""
return capi.geos_touches(self.ptr, other.ptr)
def within(self, other):
"""
Returns true if the DE-9IM intersection matrix for the two Geometries
is T*F**F***.
"""
return capi.geos_within(self.ptr, other.ptr)
#### SRID Routines ####
def get_srid(self):
"Gets the SRID for the geometry, returns None if no SRID is set."
s = capi.geos_get_srid(self.ptr)
if s == 0: return None
else: return s
def set_srid(self, srid):
"Sets the SRID for the geometry."
capi.geos_set_srid(self.ptr, srid)
srid = property(get_srid, set_srid)
#### Output Routines ####
@property
def ewkt(self):
"""
Returns the EWKT (WKT + SRID) of the Geometry. Note that Z values
are *not* included in this representation because GEOS does not yet
support serializing them.
"""
if self.get_srid(): return 'SRID=%s;%s' % (self.srid, self.wkt)
else: return self.wkt
@property
def wkt(self):
"Returns the WKT (Well-Known Text) representation of this Geometry."
return wkt_w().write(self)
@property
def hex(self):
"""
Returns the WKB of this Geometry in hexadecimal form. Please note
that the SRID and Z values are not included in this representation
because it is not a part of the OGC specification (use the `hexewkb`
property instead).
"""
# A possible faster, all-python, implementation:
# str(self.wkb).encode('hex')
return wkb_w().write_hex(self)
@property
def hexewkb(self):
"""
Returns the EWKB of this Geometry in hexadecimal form. This is an
extension of the WKB specification that includes SRID and Z values
that are a part of this geometry.
"""
if self.hasz:
if not GEOS_PREPARE:
# See: http://trac.osgeo.org/geos/ticket/216
raise GEOSException('Upgrade GEOS to 3.1 to get valid 3D HEXEWKB.')
return ewkb_w3d().write_hex(self)
else:
return ewkb_w().write_hex(self)
@property
def json(self):
"""
Returns GeoJSON representation of this Geometry if GDAL 1.5+
is installed.
"""
if gdal.GEOJSON:
return self.ogr.json
else:
raise GEOSException('GeoJSON output only supported on GDAL 1.5+.')
geojson = json
@property
def wkb(self):
"""
Returns the WKB (Well-Known Binary) representation of this Geometry
as a Python buffer. SRID and Z values are not included, use the
`ewkb` property instead.
"""
return wkb_w().write(self)
@property
def ewkb(self):
"""
Return the EWKB representation of this Geometry as a Python buffer.
This is an extension of the WKB specification that includes any SRID
and Z values that are a part of this geometry.
"""
if self.hasz:
if not GEOS_PREPARE:
# See: http://trac.osgeo.org/geos/ticket/216
raise GEOSException('Upgrade GEOS to 3.1 to get valid 3D EWKB.')
return ewkb_w3d().write(self)
else:
return ewkb_w().write(self)
@property
def kml(self):
"Returns the KML representation of this Geometry."
gtype = self.geom_type
return '<%s>%s</%s>' % (gtype, self.coord_seq.kml, gtype)
@property
def prepared(self):
"""
Returns a PreparedGeometry corresponding to this geometry -- it is
optimized for the contains, intersects, and covers operations.
"""
if GEOS_PREPARE:
return PreparedGeometry(self)
else:
raise GEOSException('GEOS 3.1+ required for prepared geometry support.')
#### GDAL-specific output routines ####
@property
def ogr(self):
"Returns the OGR Geometry for this Geometry."
if gdal.HAS_GDAL:
if self.srid:
return gdal.OGRGeometry(self.wkb, self.srid)
else:
return gdal.OGRGeometry(self.wkb)
else:
raise GEOSException('GDAL required to convert to an OGRGeometry.')
@property
def srs(self):
"Returns the OSR SpatialReference for SRID of this Geometry."
if gdal.HAS_GDAL:
if self.srid:
return gdal.SpatialReference(self.srid)
else:
return None
else:
raise GEOSException('GDAL required to return a SpatialReference object.')
@property
def crs(self):
"Alias for `srs` property."
return self.srs
def transform(self, ct, clone=False):
"""
Requires GDAL. Transforms the geometry according to the given
transformation object, which may be an integer SRID, and WKT or
PROJ.4 string. By default, the geometry is transformed in-place and
nothing is returned. However if the `clone` keyword is set, then this
geometry will not be modified and a transformed clone will be returned
instead.
"""
srid = self.srid
if gdal.HAS_GDAL and srid:
# Creating an OGR Geometry, which is then transformed.
g = gdal.OGRGeometry(self.wkb, srid)
g.transform(ct)
# Getting a new GEOS pointer
ptr = wkb_r().read(g.wkb)
if clone:
# User wants a cloned transformed geometry returned.
return GEOSGeometry(ptr, srid=g.srid)
if ptr:
# Reassigning pointer, and performing post-initialization setup
# again due to the reassignment.
capi.destroy_geom(self.ptr)
self.ptr = ptr
self._post_init(g.srid)
else:
raise GEOSException('Transformed WKB was invalid.')
#### Topology Routines ####
def _topology(self, gptr):
"Helper routine to return Geometry from the given pointer."
return GEOSGeometry(gptr, srid=self.srid)
@property
def boundary(self):
"Returns the boundary as a newly allocated Geometry object."
return self._topology(capi.geos_boundary(self.ptr))
def buffer(self, width, quadsegs=8):
"""
Returns a geometry that represents all points whose distance from this
Geometry is less than or equal to distance. Calculations are in the
Spatial Reference System of this Geometry. The optional third parameter sets
the number of segment used to approximate a quarter circle (defaults to 8).
(Text from PostGIS documentation at ch. 6.1.3)
"""
return self._topology(capi.geos_buffer(self.ptr, width, quadsegs))
@property
def centroid(self):
"""
The centroid is equal to the centroid of the set of component Geometries
of highest dimension (since the lower-dimension geometries contribute zero
"weight" to the centroid).
"""
return self._topology(capi.geos_centroid(self.ptr))
@property
def convex_hull(self):
"""
Returns the smallest convex Polygon that contains all the points
in the Geometry.
"""
return self._topology(capi.geos_convexhull(self.ptr))
def difference(self, other):
"""
Returns a Geometry representing the points making up this Geometry
that do not make up other.
"""
return self._topology(capi.geos_difference(self.ptr, other.ptr))
@property
def envelope(self):
"Return the envelope for this geometry (a polygon)."
return self._topology(capi.geos_envelope(self.ptr))
def intersection(self, other):
"Returns a Geometry representing the points shared by this Geometry and other."
return self._topology(capi.geos_intersection(self.ptr, other.ptr))
@property
def point_on_surface(self):
"Computes an interior point of this Geometry."
return self._topology(capi.geos_pointonsurface(self.ptr))
def relate(self, other):
"Returns the DE-9IM intersection matrix for this Geometry and the other."
return capi.geos_relate(self.ptr, other.ptr)
def simplify(self, tolerance=0.0, preserve_topology=False):
"""
Returns the Geometry, simplified using the Douglas-Peucker algorithm
to the specified tolerance (higher tolerance => less points). If no
tolerance provided, defaults to 0.
By default, this function does not preserve topology - e.g. polygons can
be split, collapse to lines or disappear holes can be created or
disappear, and lines can cross. By specifying preserve_topology=True,
the result will have the same dimension and number of components as the
input. This is significantly slower.
"""
if preserve_topology:
return self._topology(capi.geos_preservesimplify(self.ptr, tolerance))
else:
return self._topology(capi.geos_simplify(self.ptr, tolerance))
def sym_difference(self, other):
"""
Returns a set combining the points in this Geometry not in other,
and the points in other not in this Geometry.
"""
return self._topology(capi.geos_symdifference(self.ptr, other.ptr))
def union(self, other):
"Returns a Geometry representing all the points in this Geometry and other."
return self._topology(capi.geos_union(self.ptr, other.ptr))
#### Other Routines ####
@property
def area(self):
"Returns the area of the Geometry."
return capi.geos_area(self.ptr, byref(c_double()))
def distance(self, other):
"""
Returns the distance between the closest points on this Geometry
and the other. Units will be in those of the coordinate system of
the Geometry.
"""
if not isinstance(other, GEOSGeometry):
raise TypeError('distance() works only on other GEOS Geometries.')
return capi.geos_distance(self.ptr, other.ptr, byref(c_double()))
@property
def extent(self):
"""
Returns the extent of this geometry as a 4-tuple, consisting of
(xmin, ymin, xmax, ymax).
"""
env = self.envelope
if isinstance(env, Point):
xmin, ymin = env.tuple
xmax, ymax = xmin, ymin
else:
xmin, ymin = env[0][0]
xmax, ymax = env[0][2]
return (xmin, ymin, xmax, ymax)
@property
def length(self):
"""
Returns the length of this Geometry (e.g., 0 for point, or the
circumfrence of a Polygon).
"""
return capi.geos_length(self.ptr, byref(c_double()))
def clone(self):
"Clones this Geometry."
return GEOSGeometry(capi.geom_clone(self.ptr), srid=self.srid)
# Class mapping dictionary. Has to be at the end to avoid import
# conflicts with GEOSGeometry.
from django.contrib.gis.geos.linestring import LineString, LinearRing
from django.contrib.gis.geos.point import Point
from django.contrib.gis.geos.polygon import Polygon
from django.contrib.gis.geos.collections import GeometryCollection, MultiPoint, MultiLineString, MultiPolygon
GEOS_CLASSES = {0 : Point,
1 : LineString,
2 : LinearRing,
3 : Polygon,
4 : MultiPoint,
5 : MultiLineString,
6 : MultiPolygon,
7 : GeometryCollection,
}
# If supported, import the PreparedGeometry class.
if GEOS_PREPARE:
from django.contrib.gis.geos.prepared import PreparedGeometry
| apache-2.0 |
patricklodder/dogecoin | qa/rpc-tests/test_framework/comptool.py | 49 | 18852 | #!/usr/bin/env python3
# Copyright (c) 2015-2016 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
from .mininode import *
from .blockstore import BlockStore, TxStore
from .util import p2p_port
'''
This is a tool for comparing two or more bitcoinds to each other
using a script provided.
To use, create a class that implements get_tests(), and pass it in
as the test generator to TestManager. get_tests() should be a python
generator that returns TestInstance objects. See below for definition.
'''
# TestNode behaves as follows:
# Configure with a BlockStore and TxStore
# on_inv: log the message but don't request
# on_headers: log the chain tip
# on_pong: update ping response map (for synchronization)
# on_getheaders: provide headers via BlockStore
# on_getdata: provide blocks via BlockStore
global mininode_lock
class RejectResult(object):
'''
Outcome that expects rejection of a transaction or block.
'''
def __init__(self, code, reason=b''):
self.code = code
self.reason = reason
def match(self, other):
if self.code != other.code:
return False
return other.reason.startswith(self.reason)
def __repr__(self):
return '%i:%s' % (self.code,self.reason or '*')
class TestNode(NodeConnCB):
def __init__(self, block_store, tx_store):
NodeConnCB.__init__(self)
self.conn = None
self.bestblockhash = None
self.block_store = block_store
self.block_request_map = {}
self.tx_store = tx_store
self.tx_request_map = {}
self.block_reject_map = {}
self.tx_reject_map = {}
# When the pingmap is non-empty we're waiting for
# a response
self.pingMap = {}
self.lastInv = []
self.closed = False
def on_close(self, conn):
self.closed = True
def add_connection(self, conn):
self.conn = conn
def on_headers(self, conn, message):
if len(message.headers) > 0:
best_header = message.headers[-1]
best_header.calc_sha256()
self.bestblockhash = best_header.sha256
def on_getheaders(self, conn, message):
response = self.block_store.headers_for(message.locator, message.hashstop)
if response is not None:
conn.send_message(response)
def on_getdata(self, conn, message):
[conn.send_message(r) for r in self.block_store.get_blocks(message.inv)]
[conn.send_message(r) for r in self.tx_store.get_transactions(message.inv)]
for i in message.inv:
if i.type == 1:
self.tx_request_map[i.hash] = True
elif i.type == 2:
self.block_request_map[i.hash] = True
def on_inv(self, conn, message):
self.lastInv = [x.hash for x in message.inv]
def on_pong(self, conn, message):
try:
del self.pingMap[message.nonce]
except KeyError:
raise AssertionError("Got pong for unknown ping [%s]" % repr(message))
def on_reject(self, conn, message):
if message.message == b'tx':
self.tx_reject_map[message.data] = RejectResult(message.code, message.reason)
if message.message == b'block':
self.block_reject_map[message.data] = RejectResult(message.code, message.reason)
def send_inv(self, obj):
mtype = 2 if isinstance(obj, CBlock) else 1
self.conn.send_message(msg_inv([CInv(mtype, obj.sha256)]))
def send_getheaders(self):
# We ask for headers from their last tip.
m = msg_getheaders()
m.locator = self.block_store.get_locator(self.bestblockhash)
self.conn.send_message(m)
def send_header(self, header):
m = msg_headers()
m.headers.append(header)
self.conn.send_message(m)
# This assumes BIP31
def send_ping(self, nonce):
self.pingMap[nonce] = True
self.conn.send_message(msg_ping(nonce))
def received_ping_response(self, nonce):
return nonce not in self.pingMap
def send_mempool(self):
self.lastInv = []
self.conn.send_message(msg_mempool())
# TestInstance:
#
# Instances of these are generated by the test generator, and fed into the
# comptool.
#
# "blocks_and_transactions" should be an array of
# [obj, True/False/None, hash/None]:
# - obj is either a CBlock, CBlockHeader, or a CTransaction, and
# - the second value indicates whether the object should be accepted
# into the blockchain or mempool (for tests where we expect a certain
# answer), or "None" if we don't expect a certain answer and are just
# comparing the behavior of the nodes being tested.
# - the third value is the hash to test the tip against (if None or omitted,
# use the hash of the block)
# - NOTE: if a block header, no test is performed; instead the header is
# just added to the block_store. This is to facilitate block delivery
# when communicating with headers-first clients (when withholding an
# intermediate block).
# sync_every_block: if True, then each block will be inv'ed, synced, and
# nodes will be tested based on the outcome for the block. If False,
# then inv's accumulate until all blocks are processed (or max inv size
# is reached) and then sent out in one inv message. Then the final block
# will be synced across all connections, and the outcome of the final
# block will be tested.
# sync_every_tx: analogous to behavior for sync_every_block, except if outcome
# on the final tx is None, then contents of entire mempool are compared
# across all connections. (If outcome of final tx is specified as true
# or false, then only the last tx is tested against outcome.)
class TestInstance(object):
def __init__(self, objects=None, sync_every_block=True, sync_every_tx=False):
self.blocks_and_transactions = objects if objects else []
self.sync_every_block = sync_every_block
self.sync_every_tx = sync_every_tx
class TestManager(object):
def __init__(self, testgen, datadir):
self.test_generator = testgen
self.connections = []
self.test_nodes = []
self.block_store = BlockStore(datadir)
self.tx_store = TxStore(datadir)
self.ping_counter = 1
def add_all_connections(self, nodes):
for i in range(len(nodes)):
# Create a p2p connection to each node
test_node = TestNode(self.block_store, self.tx_store)
self.test_nodes.append(test_node)
self.connections.append(NodeConn('127.0.0.1', p2p_port(i), nodes[i], test_node))
# Make sure the TestNode (callback class) has a reference to its
# associated NodeConn
test_node.add_connection(self.connections[-1])
def clear_all_connections(self):
self.connections = []
self.test_nodes = []
def wait_for_disconnections(self):
def disconnected():
return all(node.closed for node in self.test_nodes)
return wait_until(disconnected, timeout=10)
def wait_for_verack(self):
def veracked():
return all(node.verack_received for node in self.test_nodes)
return wait_until(veracked, timeout=10)
def wait_for_pings(self, counter):
def received_pongs():
return all(node.received_ping_response(counter) for node in self.test_nodes)
return wait_until(received_pongs)
# sync_blocks: Wait for all connections to request the blockhash given
# then send get_headers to find out the tip of each node, and synchronize
# the response by using a ping (and waiting for pong with same nonce).
def sync_blocks(self, blockhash, num_blocks):
def blocks_requested():
return all(
blockhash in node.block_request_map and node.block_request_map[blockhash]
for node in self.test_nodes
)
# --> error if not requested
if not wait_until(blocks_requested, attempts=20*num_blocks):
# print [ c.cb.block_request_map for c in self.connections ]
raise AssertionError("Not all nodes requested block")
# Send getheaders message
[ c.cb.send_getheaders() for c in self.connections ]
# Send ping and wait for response -- synchronization hack
[ c.cb.send_ping(self.ping_counter) for c in self.connections ]
self.wait_for_pings(self.ping_counter)
self.ping_counter += 1
# Analogous to sync_block (see above)
def sync_transaction(self, txhash, num_events):
# Wait for nodes to request transaction (50ms sleep * 20 tries * num_events)
def transaction_requested():
return all(
txhash in node.tx_request_map and node.tx_request_map[txhash]
for node in self.test_nodes
)
# --> error if not requested
if not wait_until(transaction_requested, attempts=20*num_events):
# print [ c.cb.tx_request_map for c in self.connections ]
raise AssertionError("Not all nodes requested transaction")
# Get the mempool
[ c.cb.send_mempool() for c in self.connections ]
# Send ping and wait for response -- synchronization hack
[ c.cb.send_ping(self.ping_counter) for c in self.connections ]
self.wait_for_pings(self.ping_counter)
self.ping_counter += 1
# Sort inv responses from each node
with mininode_lock:
[ c.cb.lastInv.sort() for c in self.connections ]
# Verify that the tip of each connection all agree with each other, and
# with the expected outcome (if given)
def check_results(self, blockhash, outcome):
with mininode_lock:
for c in self.connections:
if outcome is None:
if c.cb.bestblockhash != self.connections[0].cb.bestblockhash:
return False
elif isinstance(outcome, RejectResult): # Check that block was rejected w/ code
if c.cb.bestblockhash == blockhash:
return False
if blockhash not in c.cb.block_reject_map:
print('Block not in reject map: %064x' % (blockhash))
return False
if not outcome.match(c.cb.block_reject_map[blockhash]):
print('Block rejected with %s instead of expected %s: %064x' % (c.cb.block_reject_map[blockhash], outcome, blockhash))
return False
elif ((c.cb.bestblockhash == blockhash) != outcome):
# print c.cb.bestblockhash, blockhash, outcome
return False
return True
# Either check that the mempools all agree with each other, or that
# txhash's presence in the mempool matches the outcome specified.
# This is somewhat of a strange comparison, in that we're either comparing
# a particular tx to an outcome, or the entire mempools altogether;
# perhaps it would be useful to add the ability to check explicitly that
# a particular tx's existence in the mempool is the same across all nodes.
def check_mempool(self, txhash, outcome):
with mininode_lock:
for c in self.connections:
if outcome is None:
# Make sure the mempools agree with each other
if c.cb.lastInv != self.connections[0].cb.lastInv:
# print c.rpc.getrawmempool()
return False
elif isinstance(outcome, RejectResult): # Check that tx was rejected w/ code
if txhash in c.cb.lastInv:
return False
if txhash not in c.cb.tx_reject_map:
print('Tx not in reject map: %064x' % (txhash))
return False
if not outcome.match(c.cb.tx_reject_map[txhash]):
print('Tx rejected with %s instead of expected %s: %064x' % (c.cb.tx_reject_map[txhash], outcome, txhash))
return False
elif ((txhash in c.cb.lastInv) != outcome):
# print c.rpc.getrawmempool(), c.cb.lastInv
return False
return True
def run(self):
# Wait until verack is received
self.wait_for_verack()
test_number = 1
for test_instance in self.test_generator.get_tests():
# We use these variables to keep track of the last block
# and last transaction in the tests, which are used
# if we're not syncing on every block or every tx.
[ block, block_outcome, tip ] = [ None, None, None ]
[ tx, tx_outcome ] = [ None, None ]
invqueue = []
for test_obj in test_instance.blocks_and_transactions:
b_or_t = test_obj[0]
outcome = test_obj[1]
# Determine if we're dealing with a block or tx
if isinstance(b_or_t, CBlock): # Block test runner
block = b_or_t
block_outcome = outcome
tip = block.sha256
# each test_obj can have an optional third argument
# to specify the tip we should compare with
# (default is to use the block being tested)
if len(test_obj) >= 3:
tip = test_obj[2]
# Add to shared block_store, set as current block
# If there was an open getdata request for the block
# previously, and we didn't have an entry in the
# block_store, then immediately deliver, because the
# node wouldn't send another getdata request while
# the earlier one is outstanding.
first_block_with_hash = True
if self.block_store.get(block.sha256) is not None:
first_block_with_hash = False
with mininode_lock:
self.block_store.add_block(block)
for c in self.connections:
if first_block_with_hash and block.sha256 in c.cb.block_request_map and c.cb.block_request_map[block.sha256] == True:
# There was a previous request for this block hash
# Most likely, we delivered a header for this block
# but never had the block to respond to the getdata
c.send_message(msg_block(block))
else:
c.cb.block_request_map[block.sha256] = False
# Either send inv's to each node and sync, or add
# to invqueue for later inv'ing.
if (test_instance.sync_every_block):
# if we expect success, send inv and sync every block
# if we expect failure, just push the block and see what happens.
if outcome == True:
[ c.cb.send_inv(block) for c in self.connections ]
self.sync_blocks(block.sha256, 1)
else:
[ c.send_message(msg_block(block)) for c in self.connections ]
[ c.cb.send_ping(self.ping_counter) for c in self.connections ]
self.wait_for_pings(self.ping_counter)
self.ping_counter += 1
if (not self.check_results(tip, outcome)):
raise AssertionError("Test failed at test %d" % test_number)
else:
invqueue.append(CInv(2, block.sha256))
elif isinstance(b_or_t, CBlockHeader):
block_header = b_or_t
self.block_store.add_header(block_header)
[ c.cb.send_header(block_header) for c in self.connections ]
else: # Tx test runner
assert(isinstance(b_or_t, CTransaction))
tx = b_or_t
tx_outcome = outcome
# Add to shared tx store and clear map entry
with mininode_lock:
self.tx_store.add_transaction(tx)
for c in self.connections:
c.cb.tx_request_map[tx.sha256] = False
# Again, either inv to all nodes or save for later
if (test_instance.sync_every_tx):
[ c.cb.send_inv(tx) for c in self.connections ]
self.sync_transaction(tx.sha256, 1)
if (not self.check_mempool(tx.sha256, outcome)):
raise AssertionError("Test failed at test %d" % test_number)
else:
invqueue.append(CInv(1, tx.sha256))
# Ensure we're not overflowing the inv queue
if len(invqueue) == MAX_INV_SZ:
[ c.send_message(msg_inv(invqueue)) for c in self.connections ]
invqueue = []
# Do final sync if we weren't syncing on every block or every tx.
if (not test_instance.sync_every_block and block is not None):
if len(invqueue) > 0:
[ c.send_message(msg_inv(invqueue)) for c in self.connections ]
invqueue = []
self.sync_blocks(block.sha256, len(test_instance.blocks_and_transactions))
if (not self.check_results(tip, block_outcome)):
raise AssertionError("Block test failed at test %d" % test_number)
if (not test_instance.sync_every_tx and tx is not None):
if len(invqueue) > 0:
[ c.send_message(msg_inv(invqueue)) for c in self.connections ]
invqueue = []
self.sync_transaction(tx.sha256, len(test_instance.blocks_and_transactions))
if (not self.check_mempool(tx.sha256, tx_outcome)):
raise AssertionError("Mempool test failed at test %d" % test_number)
print("Test %d: PASS" % test_number, [ c.rpc.getblockcount() for c in self.connections ])
test_number += 1
[ c.disconnect_node() for c in self.connections ]
self.wait_for_disconnections()
self.block_store.close()
self.tx_store.close()
| mit |
pllim/ginga | ginga/gtk3w/Widgets.py | 3 | 75769 | #
# Widgets.py -- wrapped Gtk widgets and convenience functions
#
# This is open-source software licensed under a BSD license.
# Please see the file LICENSE.txt for details.
#
import os.path
from ginga.gtk3w import GtkHelp
import ginga.icons
from ginga.misc import Callback, Bunch, Settings, LineHistory
from functools import reduce
from gi.repository import Gtk
from gi.repository import Gdk
from gi.repository import GObject
from gi.repository import GdkPixbuf
import gi
has_webkit = False
try:
# this is necessary to prevent a warning message on import
gi.require_version('WebKit2', '4.0')
from gi.repository import WebKit2 as WebKit # noqa
has_webkit = True
except Exception:
try:
gi.require_version('WebKit', '3.0')
from gi.repository import WebKit # noqa
except Exception:
pass
__all__ = ['WidgetError', 'WidgetBase', 'TextEntry', 'TextEntrySet',
'TextArea', 'Label', 'Button', 'ComboBox',
'SpinBox', 'Slider', 'Dial', 'ScrollBar', 'CheckBox', 'ToggleButton',
'RadioButton', 'Image', 'ProgressBar', 'StatusBar', 'TreeView',
'WebView', 'ContainerBase', 'Box', 'HBox', 'VBox', 'Frame',
'Expander', 'TabWidget', 'StackWidget', 'MDIWidget', 'ScrollArea',
'Splitter', 'GridBox', 'Toolbar', 'MenuAction',
'Menu', 'Menubar', 'TopLevelMixin', 'TopLevel', 'Application',
'Dialog', 'SaveDialog', 'DragPackage', 'WidgetMoveEvent',
'name_mangle', 'make_widget', 'hadjust', 'build_info', 'wrap',
'has_webkit']
# path to our icons
icondir = os.path.split(ginga.icons.__file__)[0]
class WidgetError(Exception):
"""For errors thrown in this module."""
pass
# (see TabWidget)
_widget_move_event = None
_app = None
# BASE
class WidgetBase(Callback.Callbacks):
def __init__(self):
super(WidgetBase, self).__init__()
self.widget = None
# external data can be attached here
self.extdata = Bunch.Bunch()
def get_widget(self):
return self.widget
def set_tooltip(self, text):
self.widget.set_tooltip_text(text)
def get_enabled(self):
self.widget.get_sensitive()
def set_enabled(self, tf):
self.widget.set_sensitive(tf)
def get_size(self):
try:
rect = self.widget.get_allocation()
# x, y = rect.x, rect.y
wd, ht = rect.width, rect.height
except Exception as e:
# window maybe isn't realized yet--try other ways
min_req, nat_req = self.widget.get_preferred_size()
wd, ht = nat_req.width, nat_req.height
# req = self.widget.get_size_request()
# wd, ht = req
# wd, ht = max(1, wd), max(1, ht)
return wd, ht
def get_pos(self):
rect = self.widget.get_allocation()
x, y = rect.x, rect.y
return x, y
def get_app(self):
return _app
def delete(self):
self.widget.destroy()
self.widget = None
def show(self):
# self.widget.show()
self.widget.show_all()
def hide(self):
self.widget.hide()
def focus(self):
self.widget.grab_focus()
def resize(self, width, height):
self.widget.set_size_request(width, height)
# hackish way to allow the widget to be resized down again later
# NOTE: this may cause some problems for sizing certain widgets
if width > 0 and height > 0:
GObject.idle_add(self.widget.set_size_request, -1, -1)
def get_font(self, font_family, point_size):
font = GtkHelp.get_font(font_family, point_size)
return font
def cfg_expand(self, horizontal=0, vertical=0):
# this is for compatibility with Qt widgets
pass
# BASIC WIDGETS
class TextEntry(WidgetBase):
def __init__(self, text='', editable=True):
super(TextEntry, self).__init__()
w = Gtk.Entry()
w.set_text(text)
w.set_editable(editable)
w.connect('key-press-event', self._key_press_event)
w.connect('activate', self._cb_redirect)
self.widget = w
self.history = LineHistory.LineHistory()
self.enable_callback('activated')
def _cb_redirect(self, *args):
self.history.append(self.get_text())
self.make_callback('activated')
def _key_press_event(self, widget, event):
keyname = Gdk.keyval_name(event.keyval)
if keyname == 'Up':
try:
text = self.history.prev()
self.set_text(text)
self.widget.set_position(len(text))
except ValueError:
pass
return True
elif keyname == 'Down':
try:
text = self.history.next()
self.set_text(text)
self.widget.set_position(len(text))
except ValueError:
pass
return True
return False
def get_text(self):
return self.widget.get_text()
def set_text(self, text):
self.widget.set_text(text)
def set_editable(self, tf):
self.widget.set_editable(tf)
def set_font(self, font, size=10):
if isinstance(font, str):
font = self.get_font(font, size)
self.widget.modify_font(font)
def set_length(self, numchars):
# this only sets the visible length of the widget
self.widget.set_width_chars(numchars)
pass
class TextEntrySet(WidgetBase):
def __init__(self, text='', editable=True):
super(TextEntrySet, self).__init__()
hbox = Gtk.HBox()
hbox.set_spacing(4)
w = Gtk.Entry()
w.set_text(text)
w.set_editable(editable)
hbox.pack_start(w, True, True, 0)
w.connect('activate', self._cb_redirect)
self.entry = w
w = Gtk.Button('Set')
w.connect('clicked', self._cb_redirect)
hbox.pack_start(w, False, False, 0)
self.btn = w
self.widget = hbox
self.enable_callback('activated')
def _cb_redirect(self, *args):
self.make_callback('activated')
def get_text(self):
return self.entry.get_text()
def set_text(self, text):
self.entry.set_text(text)
def set_editable(self, tf):
self.entry.set_editable(tf)
def set_font(self, font, size=10):
if isinstance(font, str):
font = self.get_font(font, size)
self.widget.modify_font(font)
def set_length(self, numchars):
# self.widget.set_width_chars(numchars)
pass
def set_enabled(self, tf):
super(TextEntrySet, self).set_enabled(tf)
self.entry.set_sensitive(tf)
class TextArea(WidgetBase):
def __init__(self, wrap=False, editable=False):
super(TextArea, self).__init__()
tw = Gtk.TextView()
if wrap:
tw.set_wrap_mode(Gtk.WrapMode.WORD)
else:
tw.set_wrap_mode(Gtk.WrapMode.NONE)
tw.set_editable(editable)
self.tw = tw
# this widget has a built in ScrollArea to match Qt functionality
sw = Gtk.ScrolledWindow()
sw.set_border_width(2)
sw.set_policy(Gtk.PolicyType.AUTOMATIC, Gtk.PolicyType.AUTOMATIC)
sw.add(self.tw)
self.widget = sw
self.histlimit = 0
def append_text(self, text, autoscroll=True):
buf = self.tw.get_buffer()
end = buf.get_end_iter()
buf.insert(end, text)
if self.histlimit > 0:
self._history_housekeeping()
if not autoscroll:
return
end = buf.get_end_iter()
mark = buf.get_insert()
# self.tw.scroll_to_iter(end, 0.5)
# NOTE: this was causing a segfault if the text widget is
# not mapped yet! Seems to be fixed in recent versions of
# gtk
buf.move_mark(mark, end)
res = self.tw.scroll_to_mark(mark, 0.2, False, 0.0, 0.0) # noqa
def get_text(self):
buf = self.tw.get_buffer()
return buf.get_text()
def _history_housekeeping(self):
# remove some lines to keep us within our history limit
buf = self.tw.get_buffer()
numlines = buf.get_line_count()
if numlines > self.histlimit:
rmcount = int(numlines - self.histlimit)
start = buf.get_iter_at_line(0)
end = buf.get_iter_at_line(rmcount)
buf.delete(start, end)
def clear(self):
buf = self.tw.get_buffer()
start = buf.get_start_iter()
end = buf.get_end_iter()
buf.delete(start, end)
def set_text(self, text):
self.clear()
self.append_text(text)
def set_limit(self, numlines):
self.histlimit = numlines
self._history_housekeeping()
def set_editable(self, tf):
self.tw.set_editable(tf)
def set_font(self, font, size=10):
if isinstance(font, str):
font = self.get_font(font, size)
self.tw.modify_font(font)
def set_wrap(self, tf):
if tf:
self.tw.set_wrap_mode(Gtk.WrapMode.WORD)
else:
self.tw.set_wrap_mode(Gtk.WrapMode.NONE)
class Label(WidgetBase):
def __init__(self, text='', halign='left', style='normal', menu=None):
super(Label, self).__init__()
label = Gtk.Label(text)
evbox = Gtk.EventBox()
evbox.set_border_width(0)
evbox.props.visible_window = False
evbox.add(label)
if halign == 'left':
label.set_justify(Gtk.Justification.LEFT)
elif halign == 'center':
label.set_justify(Gtk.Justification.CENTER)
elif halign == 'right':
label.set_justify(Gtk.Justification.RIGHT)
evbox.connect("button_press_event", self._cb_redirect)
self.enable_callback('activated')
evbox.connect("button_release_event", self._cb_redirect2)
self.enable_callback('released')
self.label = label
self.menu = menu
self.evbox = evbox
self.widget = evbox
if style == 'clickable':
fr = Gtk.Frame()
fr.set_shadow_type(Gtk.ShadowType.OUT)
evbox.props.visible_window = True
fr.add(evbox)
self.frame = fr
self.widget = fr
def _cb_redirect(self, widget, event):
# event.button, event.x, event.y
if event.button == 1:
self.make_callback('activated')
return True
elif event.button == 3 and self.menu is not None:
menu_w = self.menu.get_widget()
if menu_w.get_sensitive():
return menu_w.popup(None, None, None, None,
event.button, event.time)
return False
def _cb_redirect2(self, widget, event):
if event.button == 1:
self.make_callback('released')
return True
return False
def get_text(self):
return self.label.get_text()
def set_text(self, text):
self.label.set_text(text)
def set_font(self, font, size=10):
if isinstance(font, str):
font = self.get_font(font, size)
self.label.modify_font(font)
def set_color(self, fg=None, bg=None):
if bg is not None:
GtkHelp.modify_bg(self.evbox, bg)
if fg is not None:
self.label.modify_fg(Gtk.StateType.NORMAL, Gdk.color_parse(fg))
class Button(WidgetBase):
def __init__(self, text=''):
super(Button, self).__init__()
w = Gtk.Button(text)
self.widget = w
w.connect('clicked', self._cb_redirect)
self.enable_callback('activated')
def _cb_redirect(self, *args):
self.make_callback('activated')
class ComboBox(WidgetBase):
def __init__(self, editable=False):
super(ComboBox, self).__init__()
cb = GtkHelp.ComboBox(has_entry=editable)
liststore = Gtk.ListStore(GObject.TYPE_STRING)
cb.set_model(liststore)
cell = Gtk.CellRendererText()
cb.pack_start(cell, True)
cb.add_attribute(cell, 'text', 0)
if editable:
cb.set_entry_text_column(0)
self.widget = cb
self.widget.sconnect('changed', self._cb_redirect)
self.enable_callback('activated')
def _cb_redirect(self, widget):
idx = widget.get_active()
self.make_callback('activated', idx)
def insert_alpha(self, text):
model = self.widget.get_model()
tup = (text, )
j = 0
for i in range(len(model)):
j = i
if model[i][0] > text:
model.insert(j, tup)
return
model.insert(j + 1, tup)
def append_text(self, text):
model = self.widget.get_model()
tup = (text, )
idx = len(model)
model.insert(idx, tup)
def insert_text(self, idx, text):
model = self.widget.get_model()
tup = (text, )
model.insert(idx, tup)
def delete_alpha(self, text):
model = self.widget.get_model()
for i in range(len(model)):
if model[i][0] == text:
del model[i]
return
def get_alpha(self, idx):
model = self.widget.get_model()
text = model[idx][0]
return text
def clear(self):
model = self.widget.get_model()
model.clear()
if self.widget.get_has_entry():
entry = self.widget.get_entry()
entry.set_text('')
def set_text(self, text):
model = self.widget.get_model()
for i in range(len(model)):
if model[i][0] == text:
self.widget.set_active(i)
return
if self.widget.get_has_entry():
entry = self.widget.get_child()
entry.set_text(text)
# to be deprecated someday
show_text = set_text
def set_index(self, index):
self.widget.set_active(index)
def get_index(self):
return self.widget.get_active()
def get_text(self):
if self.widget.get_has_entry():
entry = self.widget.get_child()
return entry.get_text()
idx = self.get_index()
return self.get_alpha(idx)
class SpinBox(WidgetBase):
def __init__(self, dtype=int):
super(SpinBox, self).__init__()
self.dtype = dtype
self.widget = GtkHelp.SpinButton()
self.widget.sconnect('value-changed', self._cb_redirect)
self.enable_callback('value-changed')
def _cb_redirect(self, w):
val = self.dtype(w.get_value())
self.make_callback('value-changed', val)
def get_value(self):
return self.dtype(self.widget.get_value())
def set_value(self, val):
self.widget.set_value(val)
def set_decimals(self, num):
self.widget.set_digits(num)
def set_limits(self, minval, maxval, incr_value=1):
adj = self.widget.get_adjustment()
adj.configure(minval, minval, maxval, incr_value, incr_value, 0)
class Slider(WidgetBase):
def __init__(self, orientation='horizontal', dtype=int, track=False):
super(Slider, self).__init__()
# NOTE: parameter dtype is ignored for now for gtk3
if orientation == 'horizontal':
w = GtkHelp.HScale()
# TEMP: hack because scales don't seem to expand as expected
w.set_size_request(200, -1)
else:
w = GtkHelp.VScale()
w.set_size_request(-1, 200)
self.widget = w
w.set_draw_value(True)
w.set_value_pos(Gtk.PositionType.BOTTOM)
self.set_tracking(track)
w.sconnect('value-changed', self._cb_redirect)
self.enable_callback('value-changed')
def _cb_redirect(self, range):
val = range.get_value()
self.make_callback('value-changed', val)
def get_value(self):
return self.widget.get_value()
def set_value(self, val):
self.widget.set_value(val)
def set_tracking(self, tf):
if tf:
# self.widget.set_update_policy(Gtk.UPDATE_CONTINUOUS)
pass
else:
# self.widget.set_update_policy(Gtk.UPDATE_DISCONTINUOUS)
pass
def set_limits(self, minval, maxval, incr_value=1):
adj = self.widget.get_adjustment()
adj.configure(minval, minval, maxval, incr_value, incr_value, 0)
class Dial(WidgetBase):
def __init__(self, dtype=float, wrap=False, track=False):
super(Dial, self).__init__()
w = GtkHelp.ValueDial()
self.widget = w
w.draw_value = False
w.wrap = wrap
w.set_tracking(track)
w.connect('value-changed', self._cb_redirect)
self.dtype = dtype
self.enable_callback('value-changed')
def _cb_redirect(self, dial, val):
ext_val = self.dtype(val)
self.make_callback('value-changed', ext_val)
def get_value(self):
int_val = self.widget.get_value()
return self.dtype(int_val)
def set_value(self, val):
self.widget.set_value(val)
def set_tracking(self, tf):
self.widget.set_tracking(tf)
def set_limits(self, minval, maxval, incr_value=1):
self.widget.set_limits(minval, maxval, incr_value)
class ScrollBar(WidgetBase):
def __init__(self, orientation='horizontal'):
super(ScrollBar, self).__init__()
if orientation == 'horizontal':
self.widget = Gtk.HScrollbar()
else:
self.widget = Gtk.VScrollbar()
self.widget.connect('value-changed', self._cb_redirect)
self.enable_callback('activated')
def _cb_redirect(self, range):
val = range.get_value()
self.make_callback('activated', val)
class CheckBox(WidgetBase):
def __init__(self, text=''):
super(CheckBox, self).__init__()
self.widget = GtkHelp.CheckButton(text)
self.widget.sconnect('toggled', self._cb_redirect)
self.enable_callback('activated')
def _cb_redirect(self, widget):
val = widget.get_active()
self.make_callback('activated', val)
def set_state(self, tf):
self.widget.set_active(tf)
def get_state(self):
return self.widget.get_active()
class ToggleButton(WidgetBase):
def __init__(self, text=''):
super(ToggleButton, self).__init__()
w = GtkHelp.ToggleButton(text)
w.set_mode(True)
self.widget = w
self.widget.sconnect('toggled', self._cb_redirect)
self.enable_callback('activated')
def _cb_redirect(self, widget):
val = widget.get_active()
self.make_callback('activated', val)
def set_state(self, tf):
self.widget.set_active(tf)
def get_state(self):
return self.widget.get_active()
class RadioButton(WidgetBase):
def __init__(self, text='', group=None):
super(RadioButton, self).__init__()
if group is not None:
group = group.get_widget()
self.widget = GtkHelp.RadioButton.new_with_label_from_widget(group,
text)
else:
self.widget = GtkHelp.RadioButton.new_with_label(None, text)
self.widget.connect('toggled', self._cb_redirect)
self.enable_callback('activated')
def _cb_redirect(self, widget):
val = widget.get_active()
self.make_callback('activated', val)
def set_state(self, tf):
self.widget.set_active(tf)
def get_state(self):
return self.widget.get_active()
class Image(WidgetBase):
def __init__(self, native_image=None, style='normal', menu=None):
super(Image, self).__init__()
if native_image is None:
native_image = Gtk.Image()
self.image = native_image
self.image.set_property("has-tooltip", True)
evbox = Gtk.EventBox()
evbox.add(self.image)
evbox.connect("button-press-event", self._cb_redirect1)
evbox.connect("button-release-event", self._cb_redirect2)
self._action = None
self.menu = menu
self.widget = evbox
self.enable_callback('activated')
def _cb_redirect1(self, widget, event):
if event.type == Gdk.EventType.BUTTON_PRESS:
if event.button == 1:
self._action = 'click'
elif event.button == 3 and self.menu is not None:
menu_w = self.menu.get_widget()
if menu_w.get_sensitive():
return menu_w.popup(None, None, None, None,
event.button, event.time)
def _cb_redirect2(self, widget, event):
if event.type == Gdk.EventType.BUTTON_RELEASE:
if (event.button == 1) and (self._action == 'click'):
self._action = None
self.make_callback('activated')
def _set_image(self, native_image):
self.image.set_from_pixbuf(native_image.get_pixbuf())
def load_file(self, img_path, format=None):
# format ignored at present
pixbuf = GtkHelp.pixbuf_new_from_file(img_path)
self.image.set_from_pixbuf(pixbuf)
class ProgressBar(WidgetBase):
def __init__(self):
super(ProgressBar, self).__init__()
w = Gtk.ProgressBar()
# GTK3
# w.set_orientation(Gtk.Orientation.HORIZONTAL)
# w.set_inverted(False)
self.widget = w
def set_value(self, pct):
pct = float(pct)
self.widget.set_fraction(pct)
self.widget.set_text("%.2f %%" % (pct * 100.0))
class StatusBar(WidgetBase):
def __init__(self):
super(StatusBar, self).__init__()
sbar = Gtk.Statusbar()
self.ctx_id = None
self.widget = sbar
self.statustask = None
def _clear_message(self):
self.statustask = None
self.widget.remove_all(self.ctx_id)
def set_message(self, msg_str, duration=10.0):
try:
self.widget.remove_all(self.ctx_id)
except Exception:
pass
self.ctx_id = self.widget.get_context_id('status')
self.widget.push(self.ctx_id, msg_str)
# remove message in about 10 seconds
if self.statustask is not None:
GObject.source_remove(self.statustask)
self.statustask = GObject.timeout_add(int(1000 * duration),
self._clear_message)
class TreeView(WidgetBase):
def __init__(self, auto_expand=False, sortable=False, selection='single',
use_alt_row_color=False, dragable=False):
super(TreeView, self).__init__()
self.auto_expand = auto_expand
self.sortable = sortable
self.selection = selection
self.dragable = dragable
self.levels = 1
self.leaf_key = None
self.leaf_idx = 0
self.columns = []
self.datakeys = []
# shadow index
self.shadow = {}
# this widget has a built in ScrollArea to match Qt functionality
sw = Gtk.ScrolledWindow()
sw.set_border_width(2)
sw.set_policy(Gtk.PolicyType.AUTOMATIC, Gtk.PolicyType.AUTOMATIC)
self.widget = sw
if self.dragable:
tv = GtkHelp.MultiDragDropTreeView()
# enable drag from this widget
targets = [("text/plain", 0, GtkHelp.DND_TARGET_TYPE_TEXT),
("text/uri-list", 0, GtkHelp.DND_TARGET_TYPE_URIS)]
tv.enable_model_drag_source(Gdk.ModifierType.BUTTON1_MASK,
targets, Gdk.DragAction.COPY)
tv.connect("drag-data-get", self._start_drag)
else:
tv = Gtk.TreeView()
self.tv = tv
sw.add(self.tv)
tv.connect('cursor-changed', self._selection_cb)
tv.connect('row-activated', self._cb_redirect)
# needed to get alternating row colors
if use_alt_row_color:
tv.set_rules_hint(True)
if self.selection == 'multiple':
# enable multiple selection
treeselection = tv.get_selection()
treeselection.set_mode(Gtk.SelectionMode.MULTIPLE)
for cbname in ('selected', 'activated', 'drag-start'):
self.enable_callback(cbname)
def setup_table(self, columns, levels, leaf_key):
self.clear()
self.columns = columns
self.levels = levels
self.leaf_key = leaf_key
# create the column headers
if not isinstance(columns[0], str):
# columns specifies a mapping
headers = [col[0] for col in columns]
datakeys = [col[1] for col in columns]
else:
headers = datakeys = columns
self.datakeys = datakeys
self.leaf_idx = datakeys.index(self.leaf_key)
# make sort functions
self.cell_sort_funcs = []
for kwd in self.datakeys:
self.cell_sort_funcs.append(self._mksrtfnN(kwd))
# Remove old columns, if any
for col in list(self.tv.get_columns()):
self.tv.remove_column(col)
# Set up headers
for n in range(0, len(self.columns)):
kwd = self.datakeys[n]
if kwd == 'icon':
cell = Gtk.CellRendererPixbuf()
else:
cell = Gtk.CellRendererText()
cell.set_padding(2, 0)
header = headers[n]
tvc = Gtk.TreeViewColumn(header, cell)
tvc.set_resizable(True)
if self.sortable:
tvc.connect('clicked', self.sort_cb, n)
tvc.set_clickable(True)
if n == 0:
fn_data = self._mkcolfn0(kwd)
# cell.set_property('xalign', 1.0)
else:
fn_data = self._mkcolfnN(kwd)
tvc.set_cell_data_func(cell, fn_data)
self.tv.append_column(tvc)
treemodel = Gtk.TreeStore(object)
self.tv.set_fixed_height_mode(False)
self.tv.set_model(treemodel)
# This speeds up rendering of TreeViews
self.tv.set_fixed_height_mode(True)
def set_tree(self, tree_dict):
self.clear()
model = Gtk.TreeStore(object)
self._add_tree(model, tree_dict)
def add_tree(self, tree_dict):
model = self.tv.get_model()
self._add_tree(model, tree_dict)
def _add_tree(self, model, tree_dict):
# Hack to get around slow TreeView scrolling with large lists
self.tv.set_fixed_height_mode(False)
for key in tree_dict:
self._add_subtree(1, self.shadow,
model, None, key, tree_dict[key])
self.tv.set_model(model)
self.tv.set_fixed_height_mode(True)
# User wants auto expand?
if self.auto_expand:
self.tv.expand_all()
def _add_subtree(self, level, shadow, model, parent_item, key, node):
if level >= self.levels:
# leaf node
try:
bnch = shadow[key]
item_iter = bnch.item
# TODO: update leaf item
except KeyError:
# new item
item_iter = model.append(parent_item, [node])
shadow[key] = Bunch.Bunch(node=node, item=item_iter,
terminal=True)
else:
try:
# node already exists
bnch = shadow[key]
item = bnch.item
d = bnch.node
except KeyError:
# new node
item = model.append(None, [str(key)])
d = {}
shadow[key] = Bunch.Bunch(node=d, item=item, terminal=False)
# recurse for non-leaf interior node
for key in node:
self._add_subtree(level + 1, d, model, item, key, node[key])
def _selection_cb(self, treeview):
path, column = treeview.get_cursor()
if path is None:
return
model = treeview.get_model()
item = model.get_iter(path)
res_dict = {}
self._get_item(res_dict, item)
self.make_callback('selected', res_dict)
def _cb_redirect(self, treeview, path, column):
model = treeview.get_model()
item = model.get_iter(path)
res_dict = {}
self._get_item(res_dict, item)
self.make_callback('activated', res_dict)
def _get_path(self, item):
if item is None:
return []
model = self.tv.get_model()
if not model.iter_has_child(item):
# child node, so append my name to parent's path
path_rest = self._get_path(model.iter_parent(item))
d = model.get_value(item, 0)
if isinstance(d, str):
myname = d
else:
myname = d[self.leaf_key]
path_rest.append(myname)
return path_rest
# non-leaf node case
myname = model.get_value(item, 0)
path_rest = self._get_path(model.iter_parent(item))
path_rest.append(myname)
return path_rest
def _get_item(self, res_dict, item):
# from the model iter `item`, return the item via a path
# in the dictionary `res_dict`
path = self._get_path(item)
d, s = res_dict, self.shadow
for name in path[:-1]:
d = d.setdefault(name, {})
s = s[name].node
dst_key = path[-1]
d[dst_key] = s[dst_key].node
def get_selected(self):
treeselection = self.tv.get_selection()
model, pathlist = treeselection.get_selected_rows()
res_dict = {}
for path in pathlist:
item = model.get_iter(path)
self._get_item(res_dict, item)
return res_dict
def clear(self):
model = Gtk.TreeStore(object)
self.tv.set_model(model)
self.shadow = {}
def clear_selection(self):
treeselection = self.tv.get_selection()
treeselection.unselect_all()
def select_path(self, path, state=True):
treeselection = self.tv.get_selection()
item = self._path_to_item(path)
if state:
treeselection.select_iter(item)
else:
treeselection.unselect_iter(item)
def highlight_path(self, path, onoff, font_color='green'):
item = self._path_to_item(path) # noqa
# TODO
def _path_to_item(self, path):
s = self.shadow
for name in path[:-1]:
s = s[name].node
item = s[path[-1]].item
return item
def scroll_to_path(self, path):
item = self._path_to_item(path)
model = self.tv.get_model()
treepath = model.get_path(item)
self.tv.scroll_to_cell(treepath, use_align=True, row_align=0.5)
def sort_on_column(self, i):
model = self.tv.get_model()
model.set_sort_column_id(i, Gtk.SortType.ASCENDING)
def set_column_width(self, i, width):
col = self.tv.get_column(i)
col.set_max_width(width)
def set_column_widths(self, lwidths):
for i, width in enumerate(lwidths):
if width is not None:
self.set_column_width(i, width)
def set_optimal_column_widths(self):
self.tv.columns_autosize()
def sort_cb(self, column, idx):
treeview = column.get_tree_view()
model = treeview.get_model()
model.set_sort_column_id(idx, Gtk.SortType.ASCENDING)
fn = self.cell_sort_funcs[idx]
model.set_sort_func(idx, fn)
return True
def _mksrtfnN(self, idx):
def fn(*args):
model, iter1, iter2 = args[:3]
bnch1 = model.get_value(iter1, 0)
bnch2 = model.get_value(iter2, 0)
if isinstance(bnch1, str):
if isinstance(bnch2, str):
s1, s2 = bnch1.lower(), bnch2.lower()
if s1 < s2:
return -1
if s1 > s2:
return 1
return 0
val1, val2 = bnch1[idx], bnch2[idx]
if isinstance(val1, str):
val1, val2 = val1.lower(), val2.lower()
if val1 < val2:
return -1
if val1 > val2:
return 1
return 0
return fn
def _mkcolfn0(self, idx):
def fn(*args):
column, cell, model, iter = args[:4]
bnch = model.get_value(iter, 0)
if isinstance(bnch, str):
cell.set_property('text', bnch)
elif isinstance(bnch, GdkPixbuf.Pixbuf):
cell.set_property('pixbuf', bnch)
elif isinstance(bnch[idx], GdkPixbuf.Pixbuf):
cell.set_property('pixbuf', bnch[idx])
else:
cell.set_property('text', bnch[idx])
return fn
def _mkcolfnN(self, idx):
def fn(*args):
column, cell, model, iter = args[:4]
bnch = model.get_value(iter, 0)
if isinstance(bnch, str):
cell.set_property('text', '')
elif isinstance(bnch, GdkPixbuf.Pixbuf):
cell.set_property('text', '')
elif isinstance(bnch[idx], GdkPixbuf.Pixbuf):
cell.set_property('pixbuf', bnch[idx])
else:
cell.set_property('text', str(bnch[idx]))
return fn
def _start_drag(self, treeview, context, selection,
info, timestamp):
res_dict = self.get_selected()
drag_pkg = DragPackage(self.tv, selection)
self.make_callback('drag-start', drag_pkg, res_dict)
drag_pkg.start_drag()
class WebView(WidgetBase):
def __init__(self):
if not has_webkit:
raise NotImplementedError("Missing webkit")
super(WebView, self).__init__()
self.widget = WebKit.WebView()
def load_url(self, url):
self.widget.open(url)
def load_html_string(self, html_string):
self.widget.load_string(html_string, 'text/html', 'utf-8', 'file://')
def go_back(self):
self.widget.go_back()
def go_forward(self):
self.widget.go_forward()
def reload_page(self):
self.widget.reload()
def stop_loading(self):
self.widget.stop_loading()
# CONTAINERS
class ContainerBase(WidgetBase):
def __init__(self):
super(ContainerBase, self).__init__()
self.children = []
for name in ['widget-added', 'widget-removed']:
self.enable_callback(name)
def add_ref(self, ref):
# TODO: should this be a weakref?
self.children.append(ref)
def _remove(self, childw, delete=False):
self.widget.remove(childw)
if delete:
childw.destroy()
def remove(self, child, delete=False):
if child not in self.children:
raise KeyError("Widget is not a child of this container")
self.children.remove(child)
self._remove(child.get_widget(), delete=delete)
self.make_callback('widget-removed', child)
def remove_all(self, delete=False):
for child in list(self.children):
self.remove(child, delete=delete)
def get_children(self):
return self.children
def num_children(self):
return len(self.children)
def _get_native_children(self):
return [child.get_widget() for child in self.children]
def _get_native_index(self, nchild):
l = self._get_native_children()
return l.index(nchild)
def _native_to_child(self, nchild):
idx = self._get_native_index(nchild)
return self.children[idx]
def set_margins(self, left, right, top, bottom):
# TODO: can this be made more accurate?
self.widget.set_border_width(left)
def set_border_width(self, pix):
self.widget.set_border_width(pix)
class Box(ContainerBase):
def __init__(self, orientation='horizontal'):
super(Box, self).__init__()
if orientation == 'horizontal':
self.widget = Gtk.HBox()
else:
self.widget = Gtk.VBox()
def set_spacing(self, val):
self.widget.set_spacing(val)
def insert_widget(self, idx, child, stretch=0.0):
child_w = child.get_widget()
# TODO: can this be made more accurate?
expand = (float(stretch) > 0.0)
self.widget.pack_start(child_w, expand, True, 0)
self.widget.reorder_child(child_w, idx)
self.children.insert(idx, child)
self.widget.show_all()
self.make_callback('widget-added', child)
def add_widget(self, child, stretch=0.0):
self.add_ref(child)
child_w = child.get_widget()
# TODO: can this be made more accurate?
expand = (float(stretch) > 0.0)
self.widget.pack_start(child_w, expand, True, 0)
self.widget.show_all()
self.make_callback('widget-added', child)
class VBox(Box):
def __init__(self):
super(VBox, self).__init__(orientation='vertical')
class HBox(Box):
def __init__(self):
super(HBox, self).__init__(orientation='horizontal')
class Frame(ContainerBase):
def __init__(self, title=None):
super(Frame, self).__init__()
fr = Gtk.Frame(label=title)
fr.set_shadow_type(Gtk.ShadowType.ETCHED_IN)
fr.set_label_align(0.10, 0.5)
self.widget = fr
def set_widget(self, child):
self.remove_all()
self.add_ref(child)
self.widget.add(child.get_widget())
self.widget.show_all()
def set_text(self, text):
w = self.get_widget()
lbl = w.get_label_widget()
lbl.set_text(text)
class Expander(ContainerBase):
r_arrow = None
d_arrow = None
def __init__(self, title=None, notoggle=False):
super(Expander, self).__init__()
vbox = VBox()
vbox.set_margins(0, 0, 0, 0)
vbox.set_spacing(0)
self.widget = vbox.get_widget()
self._vbox = vbox
if Expander.r_arrow is None:
iconpath = os.path.join(icondir, 'triangle-right-48.png')
Expander.r_arrow = GtkHelp.pixbuf_new_from_file_at_size(iconpath,
12, 12)
if Expander.d_arrow is None:
iconpath = os.path.join(icondir, 'triangle-down-48.png')
Expander.d_arrow = GtkHelp.pixbuf_new_from_file_at_size(iconpath,
12, 12)
self._d_arrow = Gtk.Image.new_from_pixbuf(Expander.d_arrow)
self._r_arrow = Gtk.Image.new_from_pixbuf(Expander.r_arrow)
self.toggle = None
if not notoggle:
toggle = ToggleButton(title)
self.toggle = toggle
toggle_w = toggle.get_widget()
toggle_w.set_always_show_image(True)
r_arrow = Gtk.Image.new_from_pixbuf(Expander.r_arrow)
toggle_w.set_image(r_arrow)
toggle.add_callback('activated', self._toggle_widget)
vbox.add_widget(toggle, stretch=0)
self.content = None
for name in ('opened', 'closed'):
self.enable_callback(name)
def set_widget(self, child, stretch=1):
if self.content is not None:
self.widget.remove(self.content)
self.content = child
def expand(self, tf):
children = self._vbox.get_children()
if tf:
if self.content is None or self.content in children:
return
if self.toggle is not None:
self.toggle.get_widget().set_image(self._d_arrow)
self._vbox.add_widget(self.content, stretch=1)
self.make_callback('opened')
else:
if self.content is None or self.content not in children:
return
if self.toggle is not None:
self.toggle.get_widget().set_image(self._r_arrow)
self._vbox.remove(self.content)
self.make_callback('closed')
def _toggle_widget(self, w, tf):
self.expand(tf)
class TabWidget(ContainerBase):
def __init__(self, tabpos='top', reorderable=False, detachable=True,
group=0):
super(TabWidget, self).__init__()
self.reorderable = reorderable
self.detachable = detachable
nb = GtkHelp.Notebook()
# nb = Gtk.Notebook()
nb.set_show_border(False)
nb.set_scrollable(True)
# Allows drag-and-drop between notebooks
# nb.set_group_id(group) # in gtk3?
if self.detachable:
nb.connect("create-window", self._tab_detach_cb)
nb.connect("page-added", self._tab_insert_cb)
nb.connect("page-removed", self._tab_remove_cb)
# contrary to some other widgets, we want the "tab changed" event
# when the index is switched programmatically as well as by user
## nb.sconnect("switch-page", self._cb_redirect)
nb.connect("switch-page", self._cb_redirect)
self.widget = nb
self.set_tab_position(tabpos)
for name in ('page-switch', 'page-close', 'page-move', 'page-detach'):
self.enable_callback(name)
def set_tab_position(self, tabpos):
nb = self.widget
if tabpos == 'top':
nb.set_tab_pos(Gtk.PositionType.TOP)
elif tabpos == 'bottom':
nb.set_tab_pos(Gtk.PositionType.BOTTOM)
elif tabpos == 'left':
nb.set_tab_pos(Gtk.PositionType.LEFT)
elif tabpos == 'right':
nb.set_tab_pos(Gtk.PositionType.RIGHT)
def _tab_detach_cb(self, source, nchild_w, x, y):
child = self._native_to_child(nchild_w)
# remove child
# (native widget already has been removed by gtk)
self.children.remove(child)
# nchild_w.unparent()
self.make_callback('page-detach', child)
def _tab_insert_cb(self, nbw, nchild_w, page_num):
global _widget_move_event
if _widget_move_event is not None:
event, _widget_move_event = _widget_move_event, None
already_here = nchild_w in self._get_native_children()
if not already_here and event.child.get_widget() == nchild_w:
child = event.child
# remove child from src tab
# (native widget already has been removed by gtk)
event.src_widget.children.remove(child)
# add child to us
# (native widget already has been added by gtk)
self.add_ref(child)
self.make_callback('page-move', event.src_widget, child)
def _tab_remove_cb(self, nbw, nchild_w, page_num):
global _widget_move_event
try:
child = self._native_to_child(nchild_w)
_widget_move_event = WidgetMoveEvent(self, child)
except ValueError:
# we were triggered by a removal that is not a move
pass
def _cb_redirect(self, nbw, gptr, index):
child = self.index_to_widget(index)
self.make_callback('page-switch', child)
def _cb_select(self, widget, event, child):
self.make_callback('page-switch', child)
def add_widget(self, child, title=''):
self.add_ref(child)
child_w = child.get_widget()
label = Gtk.Label(title)
evbox = Gtk.EventBox()
evbox.props.visible_window = True
evbox.add(label)
evbox.show_all()
evbox.connect("button-press-event", self._cb_select, child)
self.widget.append_page(child_w, evbox)
if self.reorderable:
self.widget.set_tab_reorderable(child_w, True)
if self.detachable:
self.widget.set_tab_detachable(child_w, True)
self.widget.show_all()
# attach title to child
child.extdata.tab_title = title
self.make_callback('widget-added', child)
def get_index(self):
return self.widget.get_current_page()
def set_index(self, idx):
self.widget.set_current_page(idx)
def index_of(self, child):
widget = child.get_widget()
if widget is None:
return -1
return self.widget.page_num(widget)
def index_to_widget(self, idx):
"""Returns child corresponding to `idx`"""
nchild = self.widget.get_nth_page(idx)
return self._native_to_child(nchild)
def highlight_tab(self, idx, tf):
nchild = self.widget.get_nth_page(idx)
evbox = self.widget.get_tab_label(nchild)
if tf:
GtkHelp.modify_bg(evbox, 'palegreen')
else:
GtkHelp.modify_bg(evbox, None)
class StackWidget(TabWidget):
def __init__(self):
super(StackWidget, self).__init__()
nb = self.widget
# nb.set_scrollable(False)
nb.set_show_tabs(False)
nb.set_show_border(False)
class MDIWidget(ContainerBase):
def __init__(self, tabpos='top', mode='tabs'):
super(MDIWidget, self).__init__()
self.mode = 'mdi'
self.true_mdi = True
# TODO: currently scrollbars are only partially working
sw = Gtk.ScrolledWindow()
sw.set_border_width(2)
sw.set_policy(Gtk.PolicyType.AUTOMATIC, Gtk.PolicyType.AUTOMATIC)
self.widget = sw
w = GtkHelp.MDIWidget()
self.mdi_w = w
# Monkey patching the internal callbacks so that we can make
# the correct callbacks
w._move_page = w.move_page
w.move_page = self._window_moved
w._resize_page = w.resize_page
w.resize_page = self._window_resized
w._set_current_page = w.set_current_page
w.set_current_page = self._set_current_page
sw.set_hadjustment(self.mdi_w.get_hadjustment())
sw.set_vadjustment(self.mdi_w.get_vadjustment())
sw.add(self.mdi_w)
for name in ('page-switch', 'page-close'):
self.enable_callback(name)
def get_mode(self):
return self.mode
def set_mode(self, mode):
pass
def add_widget(self, child, title=''):
self.add_ref(child)
subwin = MDIWindow(self, child, title=title)
subwin.add_callback('close', self._window_close, child)
self.make_callback('widget-added', child)
return subwin
def _remove(self, childw, delete=False):
self.mdi_w.remove(childw)
if delete:
childw.destroy()
def _window_resized(self, subwin, wd, ht):
self.mdi_w._resize_page(subwin, wd, ht)
# save size
nchild = subwin.widget
child = self._native_to_child(nchild)
child.extdata.mdi_size = (wd, ht)
return True
def _window_moved(self, subwin, x, y):
self.mdi_w._move_page(subwin, x, y)
# save position
nchild = subwin.widget
child = self._native_to_child(nchild)
child.extdata.mdi_pos = (x, y)
return True
def _window_close(self, subwin, child):
return self.make_callback('page-close', child)
def _set_current_page(self, idx):
_idx = self.mdi_w.get_current_page()
self.mdi_w._set_current_page(idx)
if _idx != idx:
child = self.index_to_widget(idx)
self.make_callback('page-switch', child)
def get_index(self):
return self.mdi_w.get_current_page()
def set_index(self, idx):
self.mdi_w.set_current_page(idx)
def index_of(self, child):
return self.mdi_w.page_num(child.get_widget())
def index_to_widget(self, idx):
"""Returns child corresponding to `idx`"""
nchild = self.mdi_w.get_nth_page(idx)
return self._native_to_child(nchild)
def tile_panes(self):
self.mdi_w.tile_pages()
def cascade_panes(self):
self.mdi_w.cascade_pages()
def use_tabs(self, tf):
pass
class MDIWindow(WidgetBase):
def __init__(self, parent, child, title=''):
"""NOTE: this widget is not meant to be instantiated except *inside*
of MDIWidget implementation.
"""
WidgetBase.__init__(self)
self.parent = parent
mdi_w = parent.mdi_w
# does child have a previously saved size?
size = child.extdata.get('mdi_size', None)
if size is not None:
wd, ht = size
child.resize(wd, ht)
child_w = child.get_widget()
label = Gtk.Label(title)
subwin = GtkHelp.MDISubWindow(child_w, label)
self.widget = subwin
# attach title to child
child.extdata.tab_title = title
self.enable_callback('close')
subwin.add_callback('close', self._window_close)
# does child have a previously saved position?
pos = child.extdata.get('mdi_pos', None)
if pos is not None:
subwin.x, subwin.y = pos
mdi_w.add_subwin(subwin)
def get_pos(self):
return self.widget.x, self.widget.y
def raise_(self):
self.widget.raise_()
def lower(self):
self.widget.lower()
def focus(self):
self.widget.focus()
def move(self, x, y):
self.parent.mdi_w.move_page(self.widget, x, y)
def resize(self, wd, ht):
self.parent.mdi_w.resize_page(self.widget, wd, ht)
def maximize(self):
self.parent.mdi_w.maximize_page(self.widget)
def unmaximize(self):
raise WidgetError("this call not available for MDIWindow")
def fullscreen(self):
raise WidgetError("this call not available for MDIWindow")
def unfullscreen(self):
raise WidgetError("this call not available for MDIWindow")
def is_fullscreen(self):
raise WidgetError("this call not available for MDIWindow")
def iconify(self):
self.parent.mdi_w.minimize_page(self.widget)
def uniconify(self):
raise WidgetError("this call not available for MDIWindow")
def set_title(self, title):
self.widget.label.set_text(title)
def _window_close(self, subwin):
return self.make_callback('close')
class ScrollArea(ContainerBase):
def __init__(self):
super(ScrollArea, self).__init__()
sw = Gtk.ScrolledWindow()
sw.set_border_width(2)
sw.set_policy(Gtk.PolicyType.AUTOMATIC, Gtk.PolicyType.AUTOMATIC)
self.widget = sw
self.enable_callback('configure')
sw.connect("size_allocate", self._resize_cb)
def _resize_cb(self, widget, allocation):
rect = widget.get_allocation()
# x, y = rect.x, rect.y
width, height = rect.width, rect.height
self.make_callback('configure', width, height)
return True
def set_widget(self, child):
self.remove_all()
self.add_ref(child)
self.widget.add_with_viewport(child.get_widget())
self.widget.show_all()
def scroll_to_end(self, vertical=True, horizontal=False):
if vertical:
adj_w = self.widget.get_vadjustment()
maxv = adj_w.get_upper()
adj_w.set_value(maxv)
if horizontal:
adj_w = self.widget.get_hadjustment()
maxv = adj_w.get_upper()
adj_w.set_value(maxv)
class Splitter(ContainerBase):
def __init__(self, orientation='horizontal', thumb_px=8):
super(Splitter, self).__init__()
# thumb_px ignored in this version
self.orientation = orientation
self.widget = self._get_pane()
self.panes = [self.widget]
def _get_pane(self):
if self.orientation == 'horizontal':
w = Gtk.HPaned()
else:
w = Gtk.VPaned()
w.set_wide_handle(True)
return w
def add_widget(self, child):
self.add_ref(child)
child_w = child.get_widget()
# without a Frame it can be difficult to see the divider
frame_w = Gtk.Frame()
#frame_w.set_shadow_type(Gtk.ShadowType.ETCHED_IN)
frame_w.set_shadow_type(Gtk.ShadowType.NONE)
frame_w.add(child_w)
if len(self.children) == 1:
self.widget.pack1(frame_w)
else:
last = self.widget
if len(self.panes) > 0:
last = self.panes[-1]
w = self._get_pane()
self.panes.append(w)
w.pack1(frame_w)
last.pack2(w)
self.widget.show_all()
self.make_callback('widget-added', child)
def _get_sizes(self, pane):
rect = pane.get_allocation()
if self.orientation == 'horizontal':
total = rect.width
else:
total = rect.height
pos = pane.get_position()
return (pos, total)
def get_sizes(self):
res = []
if len(self.panes) > 0:
for pane in self.panes[:-1]:
pos, total = self._get_sizes(pane)
res.append(pos)
pane = self.panes[-1]
pos, total = self._get_sizes(pane)
res.append(total)
return res
def set_sizes(self, sizes):
for i, pos in enumerate(sizes):
pane = self.panes[i]
pane.set_position(pos)
class Splitter2(ContainerBase):
def __init__(self, orientation='horizontal', thumb_px=8):
super(Splitter, self).__init__()
self.orientation = orientation
self.widget = GtkHelp.Splitter(orientation=self.orientation,
thumb_px=thumb_px)
def add_widget(self, child):
self.add_ref(child)
child_w = child.get_widget()
# without a Frame it can be difficult to see the divider
frame_w = Gtk.Frame()
#frame_w.set_shadow_type(Gtk.ShadowType.ETCHED_IN)
frame_w.set_shadow_type(Gtk.ShadowType.NONE)
frame_w.add(child_w)
self.widget.add_widget(frame_w)
self.widget.show_all()
self.make_callback('widget-added', child)
def get_sizes(self):
return self.widget.get_sizes()
def set_sizes(self, sizes):
self.widget.set_sizes(sizes)
class GridBox(ContainerBase):
def __init__(self, rows=1, columns=1):
super(GridBox, self).__init__()
w = Gtk.Table(rows=rows, columns=columns)
self.widget = w
self.num_rows = rows
self.num_cols = columns
def resize_grid(self, rows, columns):
self.num_rows = rows
self.num_cols = columns
self.widget.resize(rows, columns)
def set_row_spacing(self, val):
self.widget.set_row_spacings(val)
def set_column_spacing(self, val):
self.widget.set_col_spacings(val)
def set_spacing(self, val):
self.set_row_spacing(val)
self.set_column_spacing(val)
def add_widget(self, child, row, col, stretch=0):
resize = False
if row > self.num_rows:
resize = True
self.num_rows = row
if col > self.num_cols:
resize = True
self.num_cols = col
if resize:
self.resize_grid(self.num_rows, self.num_cols)
self.add_ref(child)
w = child.get_widget()
if stretch > 0:
xoptions = (Gtk.AttachOptions.EXPAND | Gtk.AttachOptions.SHRINK |
Gtk.AttachOptions.FILL)
yoptions = (Gtk.AttachOptions.EXPAND | Gtk.AttachOptions.SHRINK |
Gtk.AttachOptions.FILL)
else:
xoptions = (Gtk.AttachOptions.FILL | Gtk.AttachOptions.SHRINK)
yoptions = (Gtk.AttachOptions.FILL | Gtk.AttachOptions.SHRINK)
self.widget.attach(w, col, col + 1, row, row + 1,
xoptions=xoptions, yoptions=yoptions,
xpadding=0, ypadding=0)
self.widget.show_all()
self.make_callback('widget-added', child)
class Toolbar(ContainerBase):
def __init__(self, orientation='horizontal'):
super(Toolbar, self).__init__()
w = Gtk.Toolbar()
w.set_style(Gtk.ToolbarStyle.ICONS)
if orientation == 'horizontal':
w.set_orientation(Gtk.Orientation.HORIZONTAL)
else:
w.set_orientation(Gtk.Orientation.VERTICAL)
self.widget = w
def add_action(self, text, toggle=False, iconpath=None, iconsize=None):
if toggle:
child = ToggleButton(text)
else:
child = Button(text)
if iconpath is not None:
if iconsize is not None:
wd, ht = iconsize
else:
scale_f = _app.screen_res / 96.0
px = int(scale_f * 24)
wd, ht = px, px
pixbuf = GtkHelp.pixbuf_new_from_file_at_size(iconpath, wd, ht)
if pixbuf is not None:
image = Gtk.Image.new_from_pixbuf(pixbuf)
child.get_widget().set_image(image)
self.add_widget(child)
return child
def add_widget(self, child):
# gtk3 says to add a generic widget using ToolItem.new()
tool_w = Gtk.ToolItem.new()
w = child.get_widget()
tool_w.add(w)
w.show()
tool = ContainerBase()
tool.widget = tool_w
tool_w.show()
tool.add_ref(child)
self.add_ref(tool)
self.widget.insert(tool_w, -1)
self.make_callback('widget-added', child)
return tool
def add_menu(self, text, menu=None, mtype='tool'):
if menu is None:
menu = Menu()
if mtype == 'tool':
child = self.add_action(text)
else:
child = Label(text, style='clickable', menu=menu)
self.add_widget(child)
child.add_callback('released', lambda w: menu.hide())
child.add_callback('activated', lambda w: menu.popup())
return menu
def add_separator(self):
sep_w = Gtk.SeparatorToolItem()
sep = wrap(sep_w)
self.widget.insert(sep_w, -1)
self.add_ref(sep)
class MenuAction(WidgetBase):
def __init__(self, text=None, checkable=False):
super(MenuAction, self).__init__()
self.text = text
self.checkable = checkable
if checkable:
self.widget = Gtk.CheckMenuItem(label=text)
self.widget.connect('toggled', self._cb_redirect)
else:
self.widget = Gtk.MenuItem(label=text)
self.widget.connect('activate', self._cb_redirect)
self.widget.show()
self.enable_callback('activated')
def set_state(self, tf):
if not self.checkable:
raise ValueError("Not a checkable menu item")
self.widget.set_active(tf)
def get_state(self):
return self.widget.get_active()
def _cb_redirect(self, *args):
if self.checkable:
tf = self.widget.get_active()
self.make_callback('activated', tf)
else:
self.make_callback('activated')
class Menu(ContainerBase):
def __init__(self):
super(Menu, self).__init__()
self.widget = Gtk.Menu()
self.menus = Bunch.Bunch(caseless=True)
self.widget.show()
def add_widget(self, child):
menuitem_w = child.get_widget()
self.widget.append(menuitem_w)
self.add_ref(child)
# self.widget.show_all()
self.make_callback('widget-added', child)
def add_name(self, name, checkable=False):
child = MenuAction(text=name, checkable=checkable)
self.add_widget(child)
return child
def add_menu(self, name):
item_w = Gtk.MenuItem(label=name)
child = Menu()
self.add_ref(child)
self.menus[name] = child
item_w.set_submenu(child.get_widget())
self.widget.append(item_w)
item_w.show()
return child
def get_menu(self, name):
return self.menus[name]
def add_separator(self):
sep = Gtk.SeparatorMenuItem()
self.widget.append(sep)
sep.show()
def popup(self, widget=None):
menu = self.widget
menu.show_all()
now = int(0)
if menu.get_sensitive():
menu.popup(None, None, None, None, 0, now)
class Menubar(ContainerBase):
def __init__(self):
super(Menubar, self).__init__()
self.widget = Gtk.MenuBar()
self.menus = Bunch.Bunch(caseless=True)
def add_widget(self, child, name):
if not isinstance(child, Menu):
raise ValueError("child widget needs to be a Menu object")
item_w = Gtk.MenuItem(label=name)
item_w.set_submenu(child.get_widget())
self.add_ref(child)
self.widget.append(item_w)
self.menus[name] = child
item_w.show()
self.make_callback('widget-added', child)
return child
def add_name(self, name):
item_w = Gtk.MenuItem(label=name)
child = Menu()
self.add_ref(child)
self.menus[name] = child
item_w.set_submenu(child.get_widget())
self.widget.append(item_w)
item_w.show()
return child
def get_menu(self, name):
return self.menus[name]
class TopLevelMixin(object):
def __init__(self, title=None):
self._fullscreen = False
self.widget.connect("destroy", self._quit)
self.widget.connect("delete_event", self._close_event)
self.widget.connect("window_state_event", self._window_event)
self.widget.connect("configure-event", self._configure_event)
if title is not None:
self.widget.set_title(title)
self.enable_callback('close')
def show(self):
self.widget.show_all()
def hide(self):
self.widget.hide()
def _quit(self, *args):
self.close()
def _close_event(self, widget, event):
try:
self.close()
finally:
# don't automatically destroy window
return True
def _window_event(self, widget, event):
if ((event.changed_mask & Gdk.WindowState.FULLSCREEN) or
(event.changed_mask & Gdk.WindowState.MAXIMIZED)):
self._fullscreen = True
else:
self._fullscreen = False
def _configure_event(self, widget, event):
x, y, width, height = event.x, event.y, event.width, event.height
x, y = self.widget.translate_coordinates(self.widget, x, y)
self.extdata.setvals(x=x, y=y, width=width, height=height)
return False
def close(self):
# try:
# self.widget.destroy()
# except Exception as e:
# pass
# self.widget = None
self.make_callback('close')
def get_size(self):
try:
rect = self.widget.get_allocation()
# x, y = rect.x, rect.y
wd, ht = rect.width, rect.height
except Exception as e:
# window maybe isn't realized yet--try other ways
# req = self.widget.get_size_request()
# wd, ht = req
min_req, nat_req = self.widget.get_preferred_size()
wd, ht = nat_req.width, nat_req.height
ed = self.extdata
wd, ht = ed.get('width', wd), ed.get('height', ht)
return wd, ht
def get_pos(self):
res = None
window = self.widget.get_window()
if window is not None:
res = window.get_origin()
if isinstance(res, tuple) and len(res) == 2:
return res
ed = self.extdata
x, y = ed.get('x', None), ed.get('y', None)
return x, y
def raise_(self):
window = self.widget.get_window()
if window is not None:
window.raise_()
def lower(self):
window = self.widget.get_window()
if window is not None:
window.lower()
def focus(self):
window = self.widget.get_window()
if window is not None:
window.focus()
def move(self, x, y):
window = self.widget.get_window()
if window is not None:
window.move(x, y)
def maximize(self):
window = self.widget.get_window()
if window is not None:
window.maximize()
def unmaximize(self):
window = self.widget.get_window()
if window is not None:
window.unmaximize()
def fullscreen(self):
window = self.widget.get_window()
if window is not None:
window.fullscreen()
def unfullscreen(self):
window = self.widget.get_window()
if window is not None:
window.unfullscreen()
def is_fullscreen(self):
return self._fullscreen
def iconify(self):
window = self.widget.get_window()
if window is not None:
window.iconify()
def uniconify(self):
window = self.widget.get_window()
if window is not None:
window.deiconify()
def set_title(self, title):
self.widget.set_title(title)
class TopLevel(TopLevelMixin, ContainerBase):
def __init__(self, title=None):
ContainerBase.__init__(self)
self._fullscreen = False
widget = GtkHelp.TopLevel()
self.widget = widget
widget.set_border_width(0)
TopLevelMixin.__init__(self, title=title)
def set_widget(self, child):
self.add_ref(child)
child_w = child.get_widget()
self.widget.add(child_w)
class Application(Callback.Callbacks):
def __init__(self, logger=None, settings=None):
global _app
super(Application, self).__init__()
self.logger = logger
if settings is None:
settings = Settings.SettingGroup(logger=self.logger)
self.settings = settings
self.settings.add_defaults(font_scaling_factor=None)
self.window_list = []
self.window_dict = {}
self.wincnt = 0
try:
display = Gdk.Display.get_default()
screen = display.get_default_screen()
window = screen.get_active_window()
monitor = screen.get_monitor_at_window(window)
g = screen.get_monitor_geometry(monitor)
self.screen_ht = g.height
self.screen_wd = g.width
self.screen_res = screen.get_resolution()
scale = self.settings.get('font_scaling_factor', None)
if scale is None:
# hack for Gtk--scale fonts on HiDPI displays
scale = self.screen_res / 72.0
self.logger.debug("setting default font_scaling_factor={}".format(scale))
from ginga.fonts import font_asst
font_asst.default_scaling_factor = scale
except Exception as e:
self.screen_wd = 1600
self.screen_ht = 1200
self.screen_res = 96
# self.logger.debug("screen dimensions %dx%d" % (
# self.screen_wd, self.screen_ht))
_app = self
# supposedly needed for GObject < 3.10.2
GObject.threads_init()
# self._time_save = time.time()
for name in ('shutdown', ):
self.enable_callback(name)
# Set up Gtk style
GtkHelp.set_default_style()
def get_screen_size(self):
return (self.screen_wd, self.screen_ht)
def process_events(self):
while Gtk.events_pending():
try:
Gtk.main_iteration()
# TEMP: to help solve the issue of gtk3 events getting
# lost--we want to know whether the process_event loop
# is running, so ping periodically if events are showing
# up
# cur_time = time.time()
# if cur_time - self._time_save > 10.0:
# self.logger.info("process_events ping!")
# self._time_save = cur_time
except Exception as e:
self.logger.error("Exception in main_iteration() loop: %s" %
(str(e)))
def process_end(self):
pass
def add_window(self, window, wid=None):
if wid is None:
wid = 'win%d' % (self.wincnt)
self.wincnt += 1
window.wid = wid
window.url = ''
window.app = self
self.window_dict[wid] = window
def get_window(self, wid):
return self.window_dict[wid]
def has_window(self, wid):
return wid in self.window_dict
def get_wids(self):
return list(self.window_dict.keys())
def make_window(self, title=None):
w = TopLevel(title=title)
self.add_window(w)
return w
def make_timer(self):
return GtkHelp.Timer()
def mainloop(self):
Gtk.main()
def quit(self):
Gtk.main_quit()
class Dialog(TopLevelMixin, WidgetBase):
def __init__(self, title='', flags=0, buttons=[],
parent=None, modal=False):
WidgetBase.__init__(self)
if parent is not None:
self.parent = parent.get_widget()
else:
self.parent = None
button_list = []
for name, val in buttons:
button_list.extend([name, val])
self.widget = Gtk.Dialog(title=title, flags=flags,
buttons=tuple(button_list))
self.widget.set_modal(modal)
TopLevelMixin.__init__(self, title=title)
self.content = VBox()
self.content.set_border_width(0)
content = self.widget.get_content_area()
content.pack_start(self.content.get_widget(), True, True, 0)
self.widget.connect("response", self._cb_redirect)
self.enable_callback('activated')
def _cb_redirect(self, w, val):
self.make_callback('activated', val)
def get_content_area(self):
return self.content
class SaveDialog(object):
def __init__(self, title='Save File', selectedfilter=None):
action = Gtk.FileChooserAction.SAVE
buttons = (Gtk.STOCK_CANCEL, Gtk.ResponseType.CANCEL,
Gtk.STOCK_SAVE, Gtk.ResponseType.OK)
self.widget = Gtk.FileChooserDialog(title=title, action=action,
buttons=buttons)
self.selectedfilter = selectedfilter
if selectedfilter is not None:
self._add_filter(selectedfilter)
def _add_filter(self, selectedfilter):
filtr = Gtk.FileFilter()
filtr.add_pattern(selectedfilter)
if 'png' in selectedfilter:
filtr.set_name('Image (*.png)')
self.selectedfilter = '.png'
elif 'avi' in selectedfilter:
filtr.set_name('Movie (*.avi)')
self.selectedfilter = '.avi'
elif 'npz' in selectedfilter:
filtr.set_name('Numpy Compressed Archive (*.npz)')
self.selectedfilter = '.npz'
self.widget.add_filter(filtr)
def get_path(self):
response = self.widget.run()
if response == Gtk.ResponseType.OK:
path = self.widget.get_filename()
if (self.selectedfilter is not None and
not path.endswith(self.selectedfilter)):
path += self.selectedfilter
self.widget.destroy()
return path
elif response == Gtk.ResponseType.CANCEL:
self.widget.destroy()
return None
class DragPackage(object):
def __init__(self, src_widget, selection):
self.src_widget = src_widget
self._selection = selection
def set_urls(self, urls):
self._selection.set_uris(urls)
def set_text(self, text):
self._selection.set_text(text, len(text))
def start_drag(self):
pass
class WidgetMoveEvent(object):
def __init__(self, src_widget, child):
self.src_widget = src_widget
self.child = child
self._result = False
def accept(self):
self._result = True
def reject(self):
self._result = False
# MODULE FUNCTIONS
def name_mangle(name, pfx=''):
newname = []
for c in name.lower():
if not (c.isalpha() or c.isdigit() or (c == '_')):
newname.append('_')
else:
newname.append(c)
return pfx + ''.join(newname)
def make_widget(title, wtype):
if wtype == 'label':
w = Label(title)
w.label.set_alignment(0.95, 0.5)
elif wtype == 'llabel':
w = Label(title)
w.label.set_alignment(0.05, 0.95)
elif wtype == 'entry':
w = TextEntry()
# w.get_widget().set_width_chars(12)
elif wtype == 'entryset':
w = TextEntrySet()
elif wtype == 'combobox':
w = ComboBox()
elif wtype == 'spinbutton':
w = SpinBox(dtype=int)
elif wtype == 'spinfloat':
w = SpinBox(dtype=float)
elif wtype == 'vbox':
w = VBox()
elif wtype == 'hbox':
w = HBox()
elif wtype == 'hscale':
w = Slider(orientation='horizontal')
elif wtype == 'vscale':
w = Slider(orientation='vertical')
elif wtype == 'checkbutton':
w = CheckBox(title)
elif wtype == 'radiobutton':
w = RadioButton(title)
elif wtype == 'togglebutton':
w = ToggleButton(title)
elif wtype == 'button':
w = Button(title)
elif wtype == 'spacer':
w = Label('')
elif wtype == 'textarea':
w = TextArea(editable=True)
elif wtype == 'toolbar':
w = Toolbar()
elif wtype == 'progress':
w = ProgressBar()
elif wtype == 'menubar':
w = Menubar()
else:
raise ValueError("Bad wtype=%s" % wtype)
return w
def hadjust(w, orientation):
"""Ostensibly, a function to reduce the vertical footprint of a widget
that is normally used in a vertical stack (usually a Splitter), when it
is instead used in a horizontal orientation.
"""
if orientation != 'horizontal':
return w
# This currently does not seem to be needed for most plugins that are
# coded to flow either vertically or horizontally and, in fact, reduces
# the visual asthetic somewhat.
## spl = Splitter(orientation='vertical')
## spl.add_widget(w)
## spl.add_widget(Label(''))
## return spl
return w
def build_info(captions, orientation='vertical'):
vbox = Gtk.VBox(spacing=2)
numrows = len(captions)
numcols = reduce(lambda acc, tup: max(acc, len(tup)), captions, 0)
if (numcols % 2) != 0:
raise ValueError("Column spec is not an even number")
numcols = int(numcols // 2)
table = Gtk.Table(rows=numrows, columns=numcols)
table.set_row_spacings(2)
table.set_col_spacings(4)
vbox.pack_start(table, False, False, 0)
wb = Bunch.Bunch()
row = 0
for tup in captions:
col = 0
while col < numcols:
idx = col * 2
if idx < len(tup):
title, wtype = tup[idx:idx + 2]
if not title.endswith(':'):
name = name_mangle(title)
else:
name = name_mangle('lbl_' + title[:-1])
w = make_widget(title, wtype)
table.attach(w.get_widget(), col, col + 1, row, row + 1,
xoptions=Gtk.AttachOptions.FILL,
yoptions=Gtk.AttachOptions.FILL,
xpadding=1, ypadding=1)
wb[name] = w
col += 1
row += 1
vbox.show_all()
w = wrap(vbox)
w = hadjust(w, orientation=orientation)
return w, wb
def wrap(native_widget):
wrapper = WidgetBase()
wrapper.widget = native_widget
return wrapper
# END
| bsd-3-clause |
dragorosson/heat | heat/tests/openstack/nova/test_keypair.py | 5 | 8720 | #
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import copy
import mock
import six
from heat.common import exception
from heat.engine.clients.os import nova
from heat.engine.resources.openstack.nova import keypair
from heat.engine import scheduler
from heat.tests import common
from heat.tests.openstack.nova import fakes as fakes_nova
from heat.tests import utils
class NovaKeyPairTest(common.HeatTestCase):
kp_template = {
"heat_template_version": "2013-05-23",
"resources": {
"kp": {
"type": "OS::Nova::KeyPair",
"properties": {
"name": "key_pair"
}
}
}
}
def setUp(self):
super(NovaKeyPairTest, self).setUp()
self.fake_nova = self.m.CreateMockAnything()
self.fake_keypairs = self.m.CreateMockAnything()
self.fake_nova.keypairs = self.fake_keypairs
self.patchobject(nova.NovaClientPlugin, 'has_extension',
return_value=True)
def _mock_key(self, name, pub=None, priv=None):
mkey = self.m.CreateMockAnything()
mkey.id = name
mkey.name = name
if pub:
mkey.public_key = pub
if priv:
mkey.private_key = priv
return mkey
def _get_test_resource(self, template):
self.stack = utils.parse_stack(template)
definition = self.stack.t.resource_definitions(self.stack)['kp']
kp_res = keypair.KeyPair('kp', definition, self.stack)
self.m.StubOutWithMock(nova.NovaClientPlugin, '_create')
nova.NovaClientPlugin._create().AndReturn(self.fake_nova)
return kp_res
def _get_mock_kp_for_create(self, key_name, public_key=None,
priv_saved=False):
template = copy.deepcopy(self.kp_template)
template['resources']['kp']['properties']['name'] = key_name
props = template['resources']['kp']['properties']
if public_key:
props['public_key'] = public_key
gen_pk = public_key or "generated test public key"
nova_key = self._mock_key(key_name, gen_pk)
if priv_saved:
nova_key.private_key = "private key for %s" % key_name
props['save_private_key'] = True
kp_res = self._get_test_resource(template)
self.fake_keypairs.create(key_name,
public_key=public_key).AndReturn(nova_key)
return kp_res, nova_key
def test_create_key(self):
"""Test basic create."""
key_name = "generate_no_save"
tp_test, created_key = self._get_mock_kp_for_create(key_name)
self.fake_keypairs.get(key_name).MultipleTimes().AndReturn(created_key)
created_key.to_dict().AndReturn({'key_pair': 'info'})
self.m.ReplayAll()
scheduler.TaskRunner(tp_test.create)()
self.assertEqual("", tp_test.FnGetAtt('private_key'))
self.assertEqual("generated test public key",
tp_test.FnGetAtt('public_key'))
self.assertEqual({'key_pair': 'info'}, tp_test.FnGetAtt('show'))
self.assertEqual((tp_test.CREATE, tp_test.COMPLETE), tp_test.state)
self.assertEqual(tp_test.resource_id, created_key.name)
self.m.VerifyAll()
def test_create_key_empty_name(self):
"""Test creation of a keypair whose name is of length zero."""
key_name = ""
template = copy.deepcopy(self.kp_template)
template['resources']['kp']['properties']['name'] = key_name
stack = utils.parse_stack(template)
definition = stack.t.resource_definitions(stack)['kp']
kp_res = keypair.KeyPair('kp', definition, stack)
self.m.ReplayAll()
error = self.assertRaises(exception.StackValidationFailed,
kp_res.validate)
self.assertIn("Property error", six.text_type(error))
self.assertIn("kp.properties.name: length (0) is out of "
"range (min: 1, max: 255)", six.text_type(error))
self.m.VerifyAll()
def test_create_key_excess_name_length(self):
"""Test creation of a keypair whose name is of excess length."""
key_name = 'k' * 256
template = copy.deepcopy(self.kp_template)
template['resources']['kp']['properties']['name'] = key_name
stack = utils.parse_stack(template)
definition = stack.t.resource_definitions(stack)['kp']
kp_res = keypair.KeyPair('kp', definition, stack)
self.m.ReplayAll()
error = self.assertRaises(exception.StackValidationFailed,
kp_res.validate)
self.assertIn("Property error", six.text_type(error))
self.assertIn("kp.properties.name: length (256) is out of "
"range (min: 1, max: 255)", six.text_type(error))
self.m.VerifyAll()
def test_check_key(self):
res = self._get_test_resource(self.kp_template)
res.client = mock.Mock()
scheduler.TaskRunner(res.check)()
self.assertEqual((res.CHECK, res.COMPLETE), res.state)
def test_check_key_fail(self):
res = self._get_test_resource(self.kp_template)
res.client = mock.Mock()
res.client().keypairs.get.side_effect = Exception("boom")
exc = self.assertRaises(exception.ResourceFailure,
scheduler.TaskRunner(res.check))
self.assertIn("boom", six.text_type(exc))
self.assertEqual((res.CHECK, res.FAILED), res.state)
def test_delete_key_not_found(self):
"""Test delete non-existent key."""
test_res = self._get_test_resource(self.kp_template)
test_res.resource_id = "key_name"
test_res.state_set(test_res.CREATE, test_res.COMPLETE)
(self.fake_keypairs.delete("key_name")
.AndRaise(fakes_nova.fake_exception()))
self.m.ReplayAll()
scheduler.TaskRunner(test_res.delete)()
self.assertEqual((test_res.DELETE, test_res.COMPLETE), test_res.state)
self.m.VerifyAll()
def test_create_pub(self):
"""Test create using existing pub key."""
key_name = "existing_key"
pk = "test_create_pub"
tp_test, created_key = self._get_mock_kp_for_create(key_name,
public_key=pk)
self.m.ReplayAll()
scheduler.TaskRunner(tp_test.create)()
self.assertEqual("", tp_test.FnGetAtt('private_key'))
self.assertEqual("test_create_pub",
tp_test.FnGetAtt('public_key'))
self.assertEqual((tp_test.CREATE, tp_test.COMPLETE), tp_test.state)
self.assertEqual(tp_test.resource_id, created_key.name)
self.m.VerifyAll()
def test_save_priv_key(self):
"""Test a saved private key."""
key_name = "save_private"
tp_test, created_key = self._get_mock_kp_for_create(key_name,
priv_saved=True)
self.fake_keypairs.get(key_name).AndReturn(created_key)
self.m.ReplayAll()
scheduler.TaskRunner(tp_test.create)()
self.assertEqual("private key for save_private",
tp_test.FnGetAtt('private_key'))
self.assertEqual("generated test public key",
tp_test.FnGetAtt('public_key'))
self.assertEqual((tp_test.CREATE, tp_test.COMPLETE), tp_test.state)
self.assertEqual(tp_test.resource_id, created_key.name)
self.m.VerifyAll()
def test_nova_keypair_refid(self):
stack = utils.parse_stack(self.kp_template)
rsrc = stack['kp']
rsrc.resource_id = 'xyz'
self.assertEqual('xyz', rsrc.FnGetRefId())
def test_nova_keypair_refid_convergence_cache_data(self):
cache_data = {'kp': {
'uuid': mock.ANY,
'id': mock.ANY,
'action': 'CREATE',
'status': 'COMPLETE',
'reference_id': 'convg_xyz'
}}
stack = utils.parse_stack(self.kp_template, cache_data=cache_data)
rsrc = stack['kp']
self.assertEqual('convg_xyz', rsrc.FnGetRefId())
| apache-2.0 |
nvoron23/avos | openstack_dashboard/management/commands/make_web_conf.py | 8 | 9452 | # -*- coding: utf-8 -*-
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from __future__ import print_function
from optparse import make_option # noqa
import os
import socket
import sys
import warnings
from django.conf import settings
from django.core.management.base import BaseCommand # noqa
from django.template import Context, Template # noqa
# Suppress DeprecationWarnings which clutter the output to the point of
# rendering it unreadable.
warnings.simplefilter('ignore')
cmd_name = __name__.split('.')[-1]
CURDIR = os.path.realpath(os.path.dirname(__file__))
PROJECT_PATH = os.path.realpath(os.path.join(CURDIR, '../..'))
STATIC_PATH = os.path.realpath(os.path.join(PROJECT_PATH, '../static'))
# Known apache log directory locations
APACHE_LOG_DIRS = (
'/var/log/httpd', # RHEL / Red Hat / CentOS / Fedora Linux
'/var/log/apache2', # Debian / Ubuntu Linux
)
# Default log directory
DEFAULT_LOG_DIR = '/var/log'
def _getattr(obj, name, default):
"""Like getattr but return `default` if None or False.
By default, getattr(obj, name, default) returns default only if
attr does not exist, here, we return `default` even if attr evaluates to
None or False.
"""
value = getattr(obj, name, default)
if value:
return value
else:
return default
context = Context({
'DJANGO_SETTINGS_MODULE': os.environ['DJANGO_SETTINGS_MODULE'],
'HOSTNAME': socket.getfqdn(),
'PROJECT_PATH': os.path.realpath(
_getattr(settings, 'ROOT_PATH', PROJECT_PATH)),
'STATIC_PATH': os.path.realpath(
_getattr(settings, 'STATIC_ROOT', STATIC_PATH)),
'SSLCERT': '/etc/pki/tls/certs/ca.crt',
'SSLKEY': '/etc/pki/tls/private/ca.key',
'CACERT': None,
})
context['PROJECT_ROOT'] = os.path.dirname(context['PROJECT_PATH'])
context['PROJECT_DIR_NAME'] = os.path.basename(
context['PROJECT_PATH'].split(context['PROJECT_ROOT'])[1])
context['PROJECT_NAME'] = context['PROJECT_DIR_NAME']
context['WSGI_FILE'] = os.path.join(
context['PROJECT_PATH'], 'wsgi/horizon.wsgi')
VHOSTNAME = context['HOSTNAME'].split('.')
VHOSTNAME[0] = context['PROJECT_NAME']
context['VHOSTNAME'] = '.'.join(VHOSTNAME)
if len(VHOSTNAME) > 1:
context['DOMAINNAME'] = '.'.join(VHOSTNAME[1:])
else:
context['DOMAINNAME'] = 'openstack.org'
context['ADMIN'] = 'webmaster@%s' % context['DOMAINNAME']
context['ACTIVATE_THIS'] = None
virtualenv = os.environ.get('VIRTUAL_ENV')
if virtualenv:
activate_this = os.path.join(
virtualenv, 'bin/activate_this.py')
if os.path.exists(activate_this):
context['ACTIVATE_THIS'] = activate_this
def find_apache_log_dir():
for log_dir in APACHE_LOG_DIRS:
if os.path.exists(log_dir) and os.path.isdir(log_dir):
return log_dir
return DEFAULT_LOG_DIR
context['LOGDIR'] = find_apache_log_dir()
class Command(BaseCommand):
args = ''
help = """Create %(wsgi_file)s
or the contents of an apache %(p_name)s.conf file (on stdout).
The apache configuration is generated on stdout because the place of this
file is distribution dependent.
examples::
manage.py %(cmd_name)s --wsgi # creates %(wsgi_file)s
manage.py %(cmd_name)s --apache # creates an apache vhost conf file (on \
stdout).
manage.py %(cmd_name)s --apache --ssl --mail=%(admin)s \
--project=%(p_name)s --hostname=%(hostname)s
To create an acpache configuration file, redirect the output towards the
location you desire, e.g.::
manage.py %(cmd_name)s --apache > \
/etc/httpd/conf.d/openstack_dashboard.conf
""" % {
'cmd_name': cmd_name,
'p_name': context['PROJECT_NAME'],
'wsgi_file': context['WSGI_FILE'],
'admin': context['ADMIN'],
'hostname': context['VHOSTNAME'], }
option_list = BaseCommand.option_list + (
# TODO(ygbo): Add an --nginx option.
make_option("-a", "--apache",
default=False, action="store_true", dest="apache",
help="generate an apache vhost configuration"),
make_option("--cacert",
dest="cacert",
help=("Use with the --apache and --ssl option to define "
"the path to the SSLCACertificateFile"
),
metavar="CACERT"),
make_option("-f", "--force",
default=False, action="store_true", dest="force",
help="force overwriting of an existing %s file" %
context['WSGI_FILE']),
make_option("-H", "--hostname",
dest="hostname",
help=("Use with the --apache option to define the server's"
" hostname (default : %s)") % context['VHOSTNAME'],
metavar="HOSTNAME"),
make_option("--logdir",
dest="logdir",
help=("Use with the --apache option to define the path to "
"the apache log directory(default : %s)"
% context['LOGDIR']),
metavar="CACERT"),
make_option("-m", "--mail",
dest="mail",
help=("Use with the --apache option to define the web site"
" administrator's email (default : %s)") %
context['ADMIN'],
metavar="MAIL"),
make_option("-n", "--namedhost",
default=False, action="store_true", dest="namedhost",
help=("Use with the --apache option. The apache vhost "
"configuration will work only when accessed with "
"the proper hostname (see --hostname).")),
make_option("-p", "--project",
dest="project",
help=("Use with the --apache option to define the project "
"name (default : %s)") % context['PROJECT_NAME'],
metavar="PROJECT"),
make_option("-s", "--ssl",
default=False, action="store_true", dest="ssl",
help=("Use with the --apache option. The apache vhost "
"configuration will use an SSL configuration")),
make_option("--sslcert",
dest="sslcert",
help=("Use with the --apache and --ssl option to define "
"the path to the SSLCertificateFile (default : %s)"
) % context['SSLCERT'],
metavar="SSLCERT"),
make_option("--sslkey",
dest="sslkey",
help=("Use with the --apache and --ssl option to define "
"the path to the SSLCertificateKeyFile "
"(default : %s)") % context['SSLKEY'],
metavar="SSLKEY"),
make_option("-w", "--wsgi",
default=False, action="store_true", dest="wsgi",
help="generate the horizon.wsgi file"),
)
def handle(self, *args, **options):
force = options.get('force')
context['SSL'] = options.get('ssl')
if options.get('mail'):
context['ADMIN'] = options['mail']
if options.get('cacert'):
context['CACERT'] = options['cacert']
if options.get('logdir'):
context['LOGDIR'] = options['logdir'].rstrip('/')
if options.get('project'):
context['PROJECT_NAME'] = options['project']
if options.get('hostname'):
context['VHOSTNAME'] = options['hostname']
if options.get('sslcert'):
context['SSLCERT'] = options['sslcert']
if options.get('sslkey'):
context['SSLKEY'] = options['sslkey']
if options.get('namedhost'):
context['NAMEDHOST'] = context['VHOSTNAME']
else:
context['NAMEDHOST'] = '*'
# Generate the WSGI.
if options.get('wsgi'):
with open(
os.path.join(CURDIR, 'horizon.wsgi.template'), 'r'
) as fp:
wsgi_template = Template(fp.read())
if not os.path.exists(context['WSGI_FILE']) or force:
with open(context['WSGI_FILE'], 'w') as fp:
fp.write(wsgi_template.render(context))
print('Generated "%s"' % context['WSGI_FILE'])
else:
sys.exit('"%s" already exists, use --force to overwrite' %
context['WSGI_FILE'])
# Generate the apache configuration.
elif options.get('apache'):
with open(
os.path.join(CURDIR, 'apache_vhost.conf.template'), 'r'
) as fp:
wsgi_template = Template(fp.read())
sys.stdout.write(wsgi_template.render(context))
else:
self.print_help('manage.py', cmd_name)
| apache-2.0 |
mozillazg/phrase-pinyin-data | merge.py | 1 | 1227 | # -*- coding: utf-8 -*-
import sys
import codecs
def parse(lines):
"""
:yield: hanzi, others
"""
for line in lines:
line = line.strip()
if line.startswith('#') or not line:
continue
hanzi, others = line.split(':', 1)
yield hanzi.strip(), others.strip()
def merge(pinyin_d_list):
"""
:rtype: dict
"""
final_d = {}
for overwrite_d in pinyin_d_list:
final_d.update(overwrite_d)
return final_d
def sort(pinyin_d):
"""
:rtype: list
"""
return sorted(pinyin_d.items(), key=lambda x: x[0])
def output(pinyin_s):
print('# version: 0.10.5')
print('# source: https://github.com/mozillazg/phrase-pinyin-data')
for hanzi, pinyin in pinyin_s:
hanzi = hanzi.split('_')[0]
print('{hanzi}: {pinyin}'.format(hanzi=hanzi, pinyin=pinyin))
def main(files):
pinyin_d_list = []
for name in files:
with codecs.open(name, 'r', 'utf-8-sig') as fp:
d = {}
for h, p in parse(fp):
d.setdefault(h, p)
pinyin_d_list.append(d)
pinyin_d = merge(pinyin_d_list)
output(sort(pinyin_d))
if __name__ == '__main__':
main(sys.argv[1:])
| mit |
hchen1202/django-react | virtualenv/lib/python3.6/site-packages/whitenoise/media_types.py | 4 | 4800 | import os
class MediaTypes(object):
def __init__(self, default='application/octet-stream', extra_types=None):
self.types_map = default_types()
self.default = default
if extra_types:
self.types_map.update(extra_types)
def get_type(self, path):
name = os.path.basename(path).lower()
media_type = self.types_map.get(name)
if media_type is not None:
return media_type
extension = os.path.splitext(name)[1]
return self.types_map.get(extension, self.default)
def default_types():
"""
We use our own set of default media types rather than the system-supplied
ones. This ensures consistent media type behaviour across varied
environments. The defaults are based on those shipped with nginx, with
some custom additions.
"""
return {
'.3gp': 'video/3gpp',
'.3gpp': 'video/3gpp',
'.7z': 'application/x-7z-compressed',
'.ai': 'application/postscript',
'.asf': 'video/x-ms-asf',
'.asx': 'video/x-ms-asf',
'.atom': 'application/atom+xml',
'.avi': 'video/x-msvideo',
'.bmp': 'image/x-ms-bmp',
'.cco': 'application/x-cocoa',
'.crt': 'application/x-x509-ca-cert',
'.css': 'text/css',
'.der': 'application/x-x509-ca-cert',
'.doc': 'application/msword',
'.docx': 'application/vnd.openxmlformats-officedocument.wordprocessingml.document',
'.ear': 'application/java-archive',
'.eot': 'application/vnd.ms-fontobject',
'.eps': 'application/postscript',
'.flv': 'video/x-flv',
'.gif': 'image/gif',
'.hqx': 'application/mac-binhex40',
'.htc': 'text/x-component',
'.htm': 'text/html',
'.html': 'text/html',
'.ico': 'image/x-icon',
'.jad': 'text/vnd.sun.j2me.app-descriptor',
'.jar': 'application/java-archive',
'.jardiff': 'application/x-java-archive-diff',
'.jng': 'image/x-jng',
'.jnlp': 'application/x-java-jnlp-file',
'.jpeg': 'image/jpeg',
'.jpg': 'image/jpeg',
'.js': 'application/javascript',
'.json': 'application/json',
'.kar': 'audio/midi',
'.kml': 'application/vnd.google-earth.kml+xml',
'.kmz': 'application/vnd.google-earth.kmz',
'.m3u8': 'application/vnd.apple.mpegurl',
'.m4a': 'audio/x-m4a',
'.m4v': 'video/x-m4v',
'.mid': 'audio/midi',
'.midi': 'audio/midi',
'.mml': 'text/mathml',
'.mng': 'video/x-mng',
'.mov': 'video/quicktime',
'.mp3': 'audio/mpeg',
'.mp4': 'video/mp4',
'.mpeg': 'video/mpeg',
'.mpg': 'video/mpeg',
'.ogg': 'audio/ogg',
'.pdb': 'application/x-pilot',
'.pdf': 'application/pdf',
'.pem': 'application/x-x509-ca-cert',
'.pl': 'application/x-perl',
'.pm': 'application/x-perl',
'.png': 'image/png',
'.ppt': 'application/vnd.ms-powerpoint',
'.pptx': 'application/vnd.openxmlformats-officedocument.presentationml.presentation',
'.prc': 'application/x-pilot',
'.ps': 'application/postscript',
'.ra': 'audio/x-realaudio',
'.rar': 'application/x-rar-compressed',
'.rpm': 'application/x-redhat-package-manager',
'.rss': 'application/rss+xml',
'.rtf': 'application/rtf',
'.run': 'application/x-makeself',
'.sea': 'application/x-sea',
'.shtml': 'text/html',
'.sit': 'application/x-stuffit',
'.svg': 'image/svg+xml',
'.svgz': 'image/svg+xml',
'.swf': 'application/x-shockwave-flash',
'.tcl': 'application/x-tcl',
'.tif': 'image/tiff',
'.tiff': 'image/tiff',
'.tk': 'application/x-tcl',
'.ts': 'video/mp2t',
'.txt': 'text/plain',
'.war': 'application/java-archive',
'.wbmp': 'image/vnd.wap.wbmp',
'.webm': 'video/webm',
'.webp': 'image/webp',
'.wml': 'text/vnd.wap.wml',
'.wmlc': 'application/vnd.wap.wmlc',
'.wmv': 'video/x-ms-wmv',
'.woff': 'application/font-woff',
'.woff2': 'font/woff2',
'.xhtml': 'application/xhtml+xml',
'.xls': 'application/vnd.ms-excel',
'.xlsx': 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet',
'.xml': 'text/xml',
'.xpi': 'application/x-xpinstall',
'.xspf': 'application/xspf+xml',
'.zip': 'application/zip',
'apple-app-site-association': 'application/pkc7-mime',
# Adobe Products - see:
# https://www.adobe.com/devnet-docs/acrobatetk/tools/AppSec/xdomain.html#policy-file-host-basics
'crossdomain.xml': 'text/x-cross-domain-policy'
}
| mit |
simonmeister/UnFlow | src/e2eflow/test/test_losses.py | 1 | 6535 | import numpy as np
import tensorflow as tf
from ..core.losses import _smoothness_deltas, create_outgoing_mask, \
gradient_loss, compute_losses, ternary_loss
from ..core.input import read_png_image
class LossesTest(tf.test.TestCase):
def test_smoothness_deltas(self):
flow = np.ones([1,3,3,2], np.float32)
flow[0, :, :, 0] = [[0,0,0],
[0,8,3],
[0,1,0]]
flow[0, :, :, 1] = [[0,0,0],
[0,8,3],
[0,1,0]]
delta_u_, delta_v_, mask_ = _smoothness_deltas(flow)
delta_u_ = tf.multiply(delta_u_, mask_)
delta_v_ = tf.multiply(delta_v_, mask_)
sess = tf.Session()
delta_u, delta_v, mask = sess.run([delta_u_, delta_v_, mask_])
self.assertAllEqual(mask[0,:,:,0], [[1,1,0],
[1,1,0],
[1,1,0]])
self.assertAllEqual(mask[0,:,:,1], [[1,1,1],
[1,1,1],
[0,0,0]])
self.assertAllEqual(delta_u[0,:,:,0], [[0,0,0],
[-8,5,0],
[-1,1,0]])
self.assertAllEqual(delta_u[0,:,:,1], [[0,-8,-3],
[0,7,3],
[0,0,0]])
self.assertAllEqual(delta_v[0,:,:,0], [[0,0,0],
[-8,5,0],
[-1,1,0]])
self.assertAllEqual(delta_v[0,:,:,1], [[0,-8,-3],
[0,7,3],
[0,0,0]])
def test_create_outgoing_mask_all_directions(self):
flow = np.ones([1,3,3,2], np.float32)
flow[0, :, :, 0] = [[0,0,1],
[-1,3,0],
[0,1,0]]
flow[0, :, :, 1] = [[-1,0,0],
[0,0,0],
[1,-1,0]]
sess = tf.Session()
mask = sess.run(create_outgoing_mask(flow))
self.assertAllEqual(mask[0,:,:,0], [[0,1,0],
[0,0,1],
[0,1,1]])
def test_create_outgoing_mask_large_movement(self):
flow = np.ones([1,3,3,2], np.float32)
flow[0, :, :, 0] = [[3,2,1],
[2,1,0],
[0,-2,-1]]
flow[0, :, :, 1] = [[0,0,0],
[0,0,0],
[0,0,0]]
sess = tf.Session()
mask = sess.run(create_outgoing_mask(flow))
self.assertAllEqual(mask[0,:,:,0], [[0,0,0],
[1,1,1],
[1,0,1]])
# def test_forward_backward_loss(self):
# im1 = np.ones([1,3,3,3], np.float32)
# im2 = np.ones([1,3,3,3], np.float32)
# mask = np.ones([1,3,3,1], np.float32)
# mask[0, :, :, 0] = [[1,1,0],
# [1,1,0],
# [0,0,0]]
#
# flow_fw = np.ones([1,3,3,2], np.float32)
# flow_fw[0, :, :, 0] = [[1,1,1],
# [1,1,1],
# [1,1,1]]
# flow_fw[0, :, :, 1] = [[1,1,1],
# [1,1,1],
# [1,1,1]]
# flow_bw = np.ones([1,3,3,2], np.float32)
# flow_bw[0, :, :, 0] = [[-1,-1,-1],
# [-1,-1,-1],
# [-1,-1,-1]]
# flow_bw[0, :, :, 1] = [[-1,-1,-1],
# [-1,-1,-1],
# [-1,-1,-1]]
#
# sess = tf.Session()
# losses = sess.run(compute_losses(im1, im2, flow_fw, flow_bw, mask))
# self.assertAllClose(losses['fb'], 0.0, atol=1e-2)
def test_gradient_loss(self):
im1 = np.ones([1,3,3,3], np.float32)
im2 = np.ones([1,3,3,3], np.float32)
mask = np.ones([1,3,3,1], np.float32)
im1[0, :, :, 0] = [[0,1,0],
[0,2,0],
[0,3,4]]
im1[0, :, :, 1] = [[0,1,0],
[0,2,0],
[0,3,4]]
im1[0, :, :, 2] = [[0,1,0],
[0,2,0],
[0,3,4]]
im2[0, :, :, 0] = [[1,2,1],
[1,3,1],
[1,4,5]]
im2[0, :, :, 1] = [[1,2,1],
[1,3,1],
[1,4,5]]
im2[0, :, :, 2] = [[1,2,1],
[1,3,1],
[1,4,5]]
sess = tf.Session()
loss = sess.run(gradient_loss(im1, im2, mask))
self.assertAllClose(loss, 0.0, atol=1e-2)
def test_ternary_reference(self):
def _ternary_reference_test(im1_name, im2_name, expected):
with self.test_session(use_gpu=True) as sess:
im1 = tf.expand_dims(read_png_image([im1_name]), 0)
im2 = tf.expand_dims(read_png_image([im2_name]), 0)
_, height, width, _ = tf.unstack(tf.shape(im1))
mask = tf.ones([1, height, width, 1])
sess.run(tf.global_variables_initializer())
coord = tf.train.Coordinator()
threads = tf.train.start_queue_runners(sess=sess, coord=coord)
scale = tf.cast(height * width, tf.float32)
loss_ = ternary_loss(im1, im2, mask, max_distance=3, truncate=22) * scale
loss = sess.run(loss_)
print(loss)
#self.assertAllClose(loss, expected)
_ternary_reference_test('../test_data/frame_0011.png',
'../test_data/frame_0012.png',
8.86846e+06)
_ternary_reference_test('../test_data/frame_0016.png',
'../test_data/frame_0017.png',
6.75537e+06)
_ternary_reference_test('../test_data/frame_0018.png',
'../test_data/frame_0019.png',
8.22283e+06)
_ternary_reference_test('../test_data/frame_0028.png',
'../test_data/frame_0029.png',
8.05619e+06)
| mit |
brettminnie/closure-linter | closure_linter/errorrecord.py | 126 | 2039 | #!/usr/bin/env python
# Copyright 2012 The Closure Linter Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""A simple, pickle-serializable class to represent a lint error."""
__author__ = 'nnaze@google.com (Nathan Naze)'
import gflags as flags
from closure_linter import errors
from closure_linter.common import erroroutput
FLAGS = flags.FLAGS
class ErrorRecord(object):
"""Record-keeping struct that can be serialized back from a process.
Attributes:
path: Path to the file.
error_string: Error string for the user.
new_error: Whether this is a "new error" (see errors.NEW_ERRORS).
"""
def __init__(self, path, error_string, new_error):
self.path = path
self.error_string = error_string
self.new_error = new_error
def MakeErrorRecord(path, error):
"""Make an error record with correctly formatted error string.
Errors are not able to be serialized (pickled) over processes because of
their pointers to the complex token/context graph. We use an intermediary
serializable class to pass back just the relevant information.
Args:
path: Path of file the error was found in.
error: An error.Error instance.
Returns:
_ErrorRecord instance.
"""
new_error = error.code in errors.NEW_ERRORS
if FLAGS.unix_mode:
error_string = erroroutput.GetUnixErrorOutput(
path, error, new_error=new_error)
else:
error_string = erroroutput.GetErrorOutput(error, new_error=new_error)
return ErrorRecord(path, error_string, new_error)
| apache-2.0 |
pymedusa/Medusa | ext/tornado/routing.py | 5 | 25082 | # Copyright 2015 The Tornado Authors
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Flexible routing implementation.
Tornado routes HTTP requests to appropriate handlers using `Router`
class implementations. The `tornado.web.Application` class is a
`Router` implementation and may be used directly, or the classes in
this module may be used for additional flexibility. The `RuleRouter`
class can match on more criteria than `.Application`, or the `Router`
interface can be subclassed for maximum customization.
`Router` interface extends `~.httputil.HTTPServerConnectionDelegate`
to provide additional routing capabilities. This also means that any
`Router` implementation can be used directly as a ``request_callback``
for `~.httpserver.HTTPServer` constructor.
`Router` subclass must implement a ``find_handler`` method to provide
a suitable `~.httputil.HTTPMessageDelegate` instance to handle the
request:
.. code-block:: python
class CustomRouter(Router):
def find_handler(self, request, **kwargs):
# some routing logic providing a suitable HTTPMessageDelegate instance
return MessageDelegate(request.connection)
class MessageDelegate(HTTPMessageDelegate):
def __init__(self, connection):
self.connection = connection
def finish(self):
self.connection.write_headers(
ResponseStartLine("HTTP/1.1", 200, "OK"),
HTTPHeaders({"Content-Length": "2"}),
b"OK")
self.connection.finish()
router = CustomRouter()
server = HTTPServer(router)
The main responsibility of `Router` implementation is to provide a
mapping from a request to `~.httputil.HTTPMessageDelegate` instance
that will handle this request. In the example above we can see that
routing is possible even without instantiating an `~.web.Application`.
For routing to `~.web.RequestHandler` implementations we need an
`~.web.Application` instance. `~.web.Application.get_handler_delegate`
provides a convenient way to create `~.httputil.HTTPMessageDelegate`
for a given request and `~.web.RequestHandler`.
Here is a simple example of how we can we route to
`~.web.RequestHandler` subclasses by HTTP method:
.. code-block:: python
resources = {}
class GetResource(RequestHandler):
def get(self, path):
if path not in resources:
raise HTTPError(404)
self.finish(resources[path])
class PostResource(RequestHandler):
def post(self, path):
resources[path] = self.request.body
class HTTPMethodRouter(Router):
def __init__(self, app):
self.app = app
def find_handler(self, request, **kwargs):
handler = GetResource if request.method == "GET" else PostResource
return self.app.get_handler_delegate(request, handler, path_args=[request.path])
router = HTTPMethodRouter(Application())
server = HTTPServer(router)
`ReversibleRouter` interface adds the ability to distinguish between
the routes and reverse them to the original urls using route's name
and additional arguments. `~.web.Application` is itself an
implementation of `ReversibleRouter` class.
`RuleRouter` and `ReversibleRuleRouter` are implementations of
`Router` and `ReversibleRouter` interfaces and can be used for
creating rule-based routing configurations.
Rules are instances of `Rule` class. They contain a `Matcher`, which
provides the logic for determining whether the rule is a match for a
particular request and a target, which can be one of the following.
1) An instance of `~.httputil.HTTPServerConnectionDelegate`:
.. code-block:: python
router = RuleRouter([
Rule(PathMatches("/handler"), ConnectionDelegate()),
# ... more rules
])
class ConnectionDelegate(HTTPServerConnectionDelegate):
def start_request(self, server_conn, request_conn):
return MessageDelegate(request_conn)
2) A callable accepting a single argument of `~.httputil.HTTPServerRequest` type:
.. code-block:: python
router = RuleRouter([
Rule(PathMatches("/callable"), request_callable)
])
def request_callable(request):
request.write(b"HTTP/1.1 200 OK\\r\\nContent-Length: 2\\r\\n\\r\\nOK")
request.finish()
3) Another `Router` instance:
.. code-block:: python
router = RuleRouter([
Rule(PathMatches("/router.*"), CustomRouter())
])
Of course a nested `RuleRouter` or a `~.web.Application` is allowed:
.. code-block:: python
router = RuleRouter([
Rule(HostMatches("example.com"), RuleRouter([
Rule(PathMatches("/app1/.*"), Application([(r"/app1/handler", Handler)])),
]))
])
server = HTTPServer(router)
In the example below `RuleRouter` is used to route between applications:
.. code-block:: python
app1 = Application([
(r"/app1/handler", Handler1),
# other handlers ...
])
app2 = Application([
(r"/app2/handler", Handler2),
# other handlers ...
])
router = RuleRouter([
Rule(PathMatches("/app1.*"), app1),
Rule(PathMatches("/app2.*"), app2)
])
server = HTTPServer(router)
For more information on application-level routing see docs for `~.web.Application`.
.. versionadded:: 4.5
"""
import re
from functools import partial
from tornado import httputil
from tornado.httpserver import _CallableAdapter
from tornado.escape import url_escape, url_unescape, utf8
from tornado.log import app_log
from tornado.util import basestring_type, import_object, re_unescape, unicode_type
from typing import Any, Union, Optional, Awaitable, List, Dict, Pattern, Tuple, overload
class Router(httputil.HTTPServerConnectionDelegate):
"""Abstract router interface."""
def find_handler(
self, request: httputil.HTTPServerRequest, **kwargs: Any
) -> Optional[httputil.HTTPMessageDelegate]:
"""Must be implemented to return an appropriate instance of `~.httputil.HTTPMessageDelegate`
that can serve the request.
Routing implementations may pass additional kwargs to extend the routing logic.
:arg httputil.HTTPServerRequest request: current HTTP request.
:arg kwargs: additional keyword arguments passed by routing implementation.
:returns: an instance of `~.httputil.HTTPMessageDelegate` that will be used to
process the request.
"""
raise NotImplementedError()
def start_request(
self, server_conn: object, request_conn: httputil.HTTPConnection
) -> httputil.HTTPMessageDelegate:
return _RoutingDelegate(self, server_conn, request_conn)
class ReversibleRouter(Router):
"""Abstract router interface for routers that can handle named routes
and support reversing them to original urls.
"""
def reverse_url(self, name: str, *args: Any) -> Optional[str]:
"""Returns url string for a given route name and arguments
or ``None`` if no match is found.
:arg str name: route name.
:arg args: url parameters.
:returns: parametrized url string for a given route name (or ``None``).
"""
raise NotImplementedError()
class _RoutingDelegate(httputil.HTTPMessageDelegate):
def __init__(
self, router: Router, server_conn: object, request_conn: httputil.HTTPConnection
) -> None:
self.server_conn = server_conn
self.request_conn = request_conn
self.delegate = None # type: Optional[httputil.HTTPMessageDelegate]
self.router = router # type: Router
def headers_received(
self,
start_line: Union[httputil.RequestStartLine, httputil.ResponseStartLine],
headers: httputil.HTTPHeaders,
) -> Optional[Awaitable[None]]:
assert isinstance(start_line, httputil.RequestStartLine)
request = httputil.HTTPServerRequest(
connection=self.request_conn,
server_connection=self.server_conn,
start_line=start_line,
headers=headers,
)
self.delegate = self.router.find_handler(request)
if self.delegate is None:
app_log.debug(
"Delegate for %s %s request not found",
start_line.method,
start_line.path,
)
self.delegate = _DefaultMessageDelegate(self.request_conn)
return self.delegate.headers_received(start_line, headers)
def data_received(self, chunk: bytes) -> Optional[Awaitable[None]]:
assert self.delegate is not None
return self.delegate.data_received(chunk)
def finish(self) -> None:
assert self.delegate is not None
self.delegate.finish()
def on_connection_close(self) -> None:
assert self.delegate is not None
self.delegate.on_connection_close()
class _DefaultMessageDelegate(httputil.HTTPMessageDelegate):
def __init__(self, connection: httputil.HTTPConnection) -> None:
self.connection = connection
def finish(self) -> None:
self.connection.write_headers(
httputil.ResponseStartLine("HTTP/1.1", 404, "Not Found"),
httputil.HTTPHeaders(),
)
self.connection.finish()
# _RuleList can either contain pre-constructed Rules or a sequence of
# arguments to be passed to the Rule constructor.
_RuleList = List[
Union[
"Rule",
List[Any], # Can't do detailed typechecking of lists.
Tuple[Union[str, "Matcher"], Any],
Tuple[Union[str, "Matcher"], Any, Dict[str, Any]],
Tuple[Union[str, "Matcher"], Any, Dict[str, Any], str],
]
]
class RuleRouter(Router):
"""Rule-based router implementation."""
def __init__(self, rules: Optional[_RuleList] = None) -> None:
"""Constructs a router from an ordered list of rules::
RuleRouter([
Rule(PathMatches("/handler"), Target),
# ... more rules
])
You can also omit explicit `Rule` constructor and use tuples of arguments::
RuleRouter([
(PathMatches("/handler"), Target),
])
`PathMatches` is a default matcher, so the example above can be simplified::
RuleRouter([
("/handler", Target),
])
In the examples above, ``Target`` can be a nested `Router` instance, an instance of
`~.httputil.HTTPServerConnectionDelegate` or an old-style callable,
accepting a request argument.
:arg rules: a list of `Rule` instances or tuples of `Rule`
constructor arguments.
"""
self.rules = [] # type: List[Rule]
if rules:
self.add_rules(rules)
def add_rules(self, rules: _RuleList) -> None:
"""Appends new rules to the router.
:arg rules: a list of Rule instances (or tuples of arguments, which are
passed to Rule constructor).
"""
for rule in rules:
if isinstance(rule, (tuple, list)):
assert len(rule) in (2, 3, 4)
if isinstance(rule[0], basestring_type):
rule = Rule(PathMatches(rule[0]), *rule[1:])
else:
rule = Rule(*rule)
self.rules.append(self.process_rule(rule))
def process_rule(self, rule: "Rule") -> "Rule":
"""Override this method for additional preprocessing of each rule.
:arg Rule rule: a rule to be processed.
:returns: the same or modified Rule instance.
"""
return rule
def find_handler(
self, request: httputil.HTTPServerRequest, **kwargs: Any
) -> Optional[httputil.HTTPMessageDelegate]:
for rule in self.rules:
target_params = rule.matcher.match(request)
if target_params is not None:
if rule.target_kwargs:
target_params["target_kwargs"] = rule.target_kwargs
delegate = self.get_target_delegate(
rule.target, request, **target_params
)
if delegate is not None:
return delegate
return None
def get_target_delegate(
self, target: Any, request: httputil.HTTPServerRequest, **target_params: Any
) -> Optional[httputil.HTTPMessageDelegate]:
"""Returns an instance of `~.httputil.HTTPMessageDelegate` for a
Rule's target. This method is called by `~.find_handler` and can be
extended to provide additional target types.
:arg target: a Rule's target.
:arg httputil.HTTPServerRequest request: current request.
:arg target_params: additional parameters that can be useful
for `~.httputil.HTTPMessageDelegate` creation.
"""
if isinstance(target, Router):
return target.find_handler(request, **target_params)
elif isinstance(target, httputil.HTTPServerConnectionDelegate):
assert request.connection is not None
return target.start_request(request.server_connection, request.connection)
elif callable(target):
assert request.connection is not None
return _CallableAdapter(
partial(target, **target_params), request.connection
)
return None
class ReversibleRuleRouter(ReversibleRouter, RuleRouter):
"""A rule-based router that implements ``reverse_url`` method.
Each rule added to this router may have a ``name`` attribute that can be
used to reconstruct an original uri. The actual reconstruction takes place
in a rule's matcher (see `Matcher.reverse`).
"""
def __init__(self, rules: Optional[_RuleList] = None) -> None:
self.named_rules = {} # type: Dict[str, Any]
super().__init__(rules)
def process_rule(self, rule: "Rule") -> "Rule":
rule = super().process_rule(rule)
if rule.name:
if rule.name in self.named_rules:
app_log.warning(
"Multiple handlers named %s; replacing previous value", rule.name
)
self.named_rules[rule.name] = rule
return rule
def reverse_url(self, name: str, *args: Any) -> Optional[str]:
if name in self.named_rules:
return self.named_rules[name].matcher.reverse(*args)
for rule in self.rules:
if isinstance(rule.target, ReversibleRouter):
reversed_url = rule.target.reverse_url(name, *args)
if reversed_url is not None:
return reversed_url
return None
class Rule(object):
"""A routing rule."""
def __init__(
self,
matcher: "Matcher",
target: Any,
target_kwargs: Optional[Dict[str, Any]] = None,
name: Optional[str] = None,
) -> None:
"""Constructs a Rule instance.
:arg Matcher matcher: a `Matcher` instance used for determining
whether the rule should be considered a match for a specific
request.
:arg target: a Rule's target (typically a ``RequestHandler`` or
`~.httputil.HTTPServerConnectionDelegate` subclass or even a nested `Router`,
depending on routing implementation).
:arg dict target_kwargs: a dict of parameters that can be useful
at the moment of target instantiation (for example, ``status_code``
for a ``RequestHandler`` subclass). They end up in
``target_params['target_kwargs']`` of `RuleRouter.get_target_delegate`
method.
:arg str name: the name of the rule that can be used to find it
in `ReversibleRouter.reverse_url` implementation.
"""
if isinstance(target, str):
# import the Module and instantiate the class
# Must be a fully qualified name (module.ClassName)
target = import_object(target)
self.matcher = matcher # type: Matcher
self.target = target
self.target_kwargs = target_kwargs if target_kwargs else {}
self.name = name
def reverse(self, *args: Any) -> Optional[str]:
return self.matcher.reverse(*args)
def __repr__(self) -> str:
return "%s(%r, %s, kwargs=%r, name=%r)" % (
self.__class__.__name__,
self.matcher,
self.target,
self.target_kwargs,
self.name,
)
class Matcher(object):
"""Represents a matcher for request features."""
def match(self, request: httputil.HTTPServerRequest) -> Optional[Dict[str, Any]]:
"""Matches current instance against the request.
:arg httputil.HTTPServerRequest request: current HTTP request
:returns: a dict of parameters to be passed to the target handler
(for example, ``handler_kwargs``, ``path_args``, ``path_kwargs``
can be passed for proper `~.web.RequestHandler` instantiation).
An empty dict is a valid (and common) return value to indicate a match
when the argument-passing features are not used.
``None`` must be returned to indicate that there is no match."""
raise NotImplementedError()
def reverse(self, *args: Any) -> Optional[str]:
"""Reconstructs full url from matcher instance and additional arguments."""
return None
class AnyMatches(Matcher):
"""Matches any request."""
def match(self, request: httputil.HTTPServerRequest) -> Optional[Dict[str, Any]]:
return {}
class HostMatches(Matcher):
"""Matches requests from hosts specified by ``host_pattern`` regex."""
def __init__(self, host_pattern: Union[str, Pattern]) -> None:
if isinstance(host_pattern, basestring_type):
if not host_pattern.endswith("$"):
host_pattern += "$"
self.host_pattern = re.compile(host_pattern)
else:
self.host_pattern = host_pattern
def match(self, request: httputil.HTTPServerRequest) -> Optional[Dict[str, Any]]:
if self.host_pattern.match(request.host_name):
return {}
return None
class DefaultHostMatches(Matcher):
"""Matches requests from host that is equal to application's default_host.
Always returns no match if ``X-Real-Ip`` header is present.
"""
def __init__(self, application: Any, host_pattern: Pattern) -> None:
self.application = application
self.host_pattern = host_pattern
def match(self, request: httputil.HTTPServerRequest) -> Optional[Dict[str, Any]]:
# Look for default host if not behind load balancer (for debugging)
if "X-Real-Ip" not in request.headers:
if self.host_pattern.match(self.application.default_host):
return {}
return None
class PathMatches(Matcher):
"""Matches requests with paths specified by ``path_pattern`` regex."""
def __init__(self, path_pattern: Union[str, Pattern]) -> None:
if isinstance(path_pattern, basestring_type):
if not path_pattern.endswith("$"):
path_pattern += "$"
self.regex = re.compile(path_pattern)
else:
self.regex = path_pattern
assert len(self.regex.groupindex) in (0, self.regex.groups), (
"groups in url regexes must either be all named or all "
"positional: %r" % self.regex.pattern
)
self._path, self._group_count = self._find_groups()
def match(self, request: httputil.HTTPServerRequest) -> Optional[Dict[str, Any]]:
match = self.regex.match(request.path)
if match is None:
return None
if not self.regex.groups:
return {}
path_args = [] # type: List[bytes]
path_kwargs = {} # type: Dict[str, bytes]
# Pass matched groups to the handler. Since
# match.groups() includes both named and
# unnamed groups, we want to use either groups
# or groupdict but not both.
if self.regex.groupindex:
path_kwargs = dict(
(str(k), _unquote_or_none(v)) for (k, v) in match.groupdict().items()
)
else:
path_args = [_unquote_or_none(s) for s in match.groups()]
return dict(path_args=path_args, path_kwargs=path_kwargs)
def reverse(self, *args: Any) -> Optional[str]:
if self._path is None:
raise ValueError("Cannot reverse url regex " + self.regex.pattern)
assert len(args) == self._group_count, (
"required number of arguments " "not found"
)
if not len(args):
return self._path
converted_args = []
for a in args:
if not isinstance(a, (unicode_type, bytes)):
a = str(a)
converted_args.append(url_escape(utf8(a), plus=False))
return self._path % tuple(converted_args)
def _find_groups(self) -> Tuple[Optional[str], Optional[int]]:
"""Returns a tuple (reverse string, group count) for a url.
For example: Given the url pattern /([0-9]{4})/([a-z-]+)/, this method
would return ('/%s/%s/', 2).
"""
pattern = self.regex.pattern
if pattern.startswith("^"):
pattern = pattern[1:]
if pattern.endswith("$"):
pattern = pattern[:-1]
if self.regex.groups != pattern.count("("):
# The pattern is too complicated for our simplistic matching,
# so we can't support reversing it.
return None, None
pieces = []
for fragment in pattern.split("("):
if ")" in fragment:
paren_loc = fragment.index(")")
if paren_loc >= 0:
try:
unescaped_fragment = re_unescape(fragment[paren_loc + 1 :])
except ValueError:
# If we can't unescape part of it, we can't
# reverse this url.
return (None, None)
pieces.append("%s" + unescaped_fragment)
else:
try:
unescaped_fragment = re_unescape(fragment)
except ValueError:
# If we can't unescape part of it, we can't
# reverse this url.
return (None, None)
pieces.append(unescaped_fragment)
return "".join(pieces), self.regex.groups
class URLSpec(Rule):
"""Specifies mappings between URLs and handlers.
.. versionchanged: 4.5
`URLSpec` is now a subclass of a `Rule` with `PathMatches` matcher and is preserved for
backwards compatibility.
"""
def __init__(
self,
pattern: Union[str, Pattern],
handler: Any,
kwargs: Optional[Dict[str, Any]] = None,
name: Optional[str] = None,
) -> None:
"""Parameters:
* ``pattern``: Regular expression to be matched. Any capturing
groups in the regex will be passed in to the handler's
get/post/etc methods as arguments (by keyword if named, by
position if unnamed. Named and unnamed capturing groups
may not be mixed in the same rule).
* ``handler``: `~.web.RequestHandler` subclass to be invoked.
* ``kwargs`` (optional): A dictionary of additional arguments
to be passed to the handler's constructor.
* ``name`` (optional): A name for this handler. Used by
`~.web.Application.reverse_url`.
"""
matcher = PathMatches(pattern)
super().__init__(matcher, handler, kwargs, name)
self.regex = matcher.regex
self.handler_class = self.target
self.kwargs = kwargs
def __repr__(self) -> str:
return "%s(%r, %s, kwargs=%r, name=%r)" % (
self.__class__.__name__,
self.regex.pattern,
self.handler_class,
self.kwargs,
self.name,
)
@overload
def _unquote_or_none(s: str) -> bytes:
pass
@overload # noqa: F811
def _unquote_or_none(s: None) -> None:
pass
def _unquote_or_none(s: Optional[str]) -> Optional[bytes]: # noqa: F811
"""None-safe wrapper around url_unescape to handle unmatched optional
groups correctly.
Note that args are passed as bytes so the handler can decide what
encoding to use.
"""
if s is None:
return s
return url_unescape(s, encoding=None, plus=False)
| gpl-3.0 |
shikhardb/scikit-learn | examples/linear_model/plot_sgd_iris.py | 286 | 2202 | """
========================================
Plot multi-class SGD on the iris dataset
========================================
Plot decision surface of multi-class SGD on iris dataset.
The hyperplanes corresponding to the three one-versus-all (OVA) classifiers
are represented by the dashed lines.
"""
print(__doc__)
import numpy as np
import matplotlib.pyplot as plt
from sklearn import datasets
from sklearn.linear_model import SGDClassifier
# import some data to play with
iris = datasets.load_iris()
X = iris.data[:, :2] # we only take the first two features. We could
# avoid this ugly slicing by using a two-dim dataset
y = iris.target
colors = "bry"
# shuffle
idx = np.arange(X.shape[0])
np.random.seed(13)
np.random.shuffle(idx)
X = X[idx]
y = y[idx]
# standardize
mean = X.mean(axis=0)
std = X.std(axis=0)
X = (X - mean) / std
h = .02 # step size in the mesh
clf = SGDClassifier(alpha=0.001, n_iter=100).fit(X, y)
# create a mesh to plot in
x_min, x_max = X[:, 0].min() - 1, X[:, 0].max() + 1
y_min, y_max = X[:, 1].min() - 1, X[:, 1].max() + 1
xx, yy = np.meshgrid(np.arange(x_min, x_max, h),
np.arange(y_min, y_max, h))
# Plot the decision boundary. For that, we will assign a color to each
# point in the mesh [x_min, m_max]x[y_min, y_max].
Z = clf.predict(np.c_[xx.ravel(), yy.ravel()])
# Put the result into a color plot
Z = Z.reshape(xx.shape)
cs = plt.contourf(xx, yy, Z, cmap=plt.cm.Paired)
plt.axis('tight')
# Plot also the training points
for i, color in zip(clf.classes_, colors):
idx = np.where(y == i)
plt.scatter(X[idx, 0], X[idx, 1], c=color, label=iris.target_names[i],
cmap=plt.cm.Paired)
plt.title("Decision surface of multi-class SGD")
plt.axis('tight')
# Plot the three one-against-all classifiers
xmin, xmax = plt.xlim()
ymin, ymax = plt.ylim()
coef = clf.coef_
intercept = clf.intercept_
def plot_hyperplane(c, color):
def line(x0):
return (-(x0 * coef[c, 0]) - intercept[c]) / coef[c, 1]
plt.plot([xmin, xmax], [line(xmin), line(xmax)],
ls="--", color=color)
for i, color in zip(clf.classes_, colors):
plot_hyperplane(i, color)
plt.legend()
plt.show()
| bsd-3-clause |
sujithvm/red-alert | src/aminer.py | 3 | 1748 | import pymongo
client = pymongo.MongoClient("localhost", 27017)
# db name - aminer
db = client.aminer
# collection
db.publications
print "DB name: ", db.name
print "DB collection: ", db.publications
print "[INFO] Processing papers"
file = open("../data/aminer_publications.txt")
lines = file.readlines()
papers = {}
i = 0
while i < len(lines) :
paper = {}
paper['references'] = []
while lines[i] != ' \r\n' :
line = lines[i].strip()
'''
#index ---- index id of this paper
#* ---- paper title
#@ ---- authors (separated by semicolons)
#o ---- affiliations (separated by semicolons, and each affiliaiton corresponds to an author in order)
#t ---- year
#c ---- publication venue
#% ---- the id of references of this paper (there are multiple lines, with each indicating a reference)
#! ---- abstract
'''
if line.startswith('#index') : paper['index'] = line[len('#index'):]
if line.startswith('#*') : paper['title'] = line[len('#*'):]
if line.startswith('#@') : paper['authors'] = line[len('#@'):].split(',')
if line.startswith('#o') : paper['affiliations'] = line[len('#o'):]
if line.startswith('#t') : paper['year'] = line[len('#t'):]
if line.startswith('#c') : paper['publication'] = line[len('#c'):]
if line.startswith('#!') : paper['abstract'] = line[len('#!'):]
if line.startswith('#%') : paper['references'].append( line[len('#%'):] )
print "journal",i+1,"done"
i += 1
db.publications.insert_one(paper)
print "[INFO] inserted into db paper", paper['index']
i += 1
file.close()
| mit |
zhaochengw/android_kernel_ef51lsk | tools/perf/scripts/python/futex-contention.py | 11261 | 1486 | # futex contention
# (c) 2010, Arnaldo Carvalho de Melo <acme@redhat.com>
# Licensed under the terms of the GNU GPL License version 2
#
# Translation of:
#
# http://sourceware.org/systemtap/wiki/WSFutexContention
#
# to perf python scripting.
#
# Measures futex contention
import os, sys
sys.path.append(os.environ['PERF_EXEC_PATH'] + '/scripts/python/Perf-Trace-Util/lib/Perf/Trace')
from Util import *
process_names = {}
thread_thislock = {}
thread_blocktime = {}
lock_waits = {} # long-lived stats on (tid,lock) blockage elapsed time
process_names = {} # long-lived pid-to-execname mapping
def syscalls__sys_enter_futex(event, ctxt, cpu, s, ns, tid, comm,
nr, uaddr, op, val, utime, uaddr2, val3):
cmd = op & FUTEX_CMD_MASK
if cmd != FUTEX_WAIT:
return # we don't care about originators of WAKE events
process_names[tid] = comm
thread_thislock[tid] = uaddr
thread_blocktime[tid] = nsecs(s, ns)
def syscalls__sys_exit_futex(event, ctxt, cpu, s, ns, tid, comm,
nr, ret):
if thread_blocktime.has_key(tid):
elapsed = nsecs(s, ns) - thread_blocktime[tid]
add_stats(lock_waits, (tid, thread_thislock[tid]), elapsed)
del thread_blocktime[tid]
del thread_thislock[tid]
def trace_begin():
print "Press control+C to stop and show the summary"
def trace_end():
for (tid, lock) in lock_waits:
min, max, avg, count = lock_waits[tid, lock]
print "%s[%d] lock %x contended %d times, %d avg ns" % \
(process_names[tid], tid, lock, count, avg)
| gpl-2.0 |
malelew/UCLA_Dining_Web_App | ENV/lib/python2.7/site-packages/pip/_vendor/requests/packages/urllib3/util/url.py | 553 | 5836 | from collections import namedtuple
from ..exceptions import LocationParseError
url_attrs = ['scheme', 'auth', 'host', 'port', 'path', 'query', 'fragment']
class Url(namedtuple('Url', url_attrs)):
"""
Datastructure for representing an HTTP URL. Used as a return value for
:func:`parse_url`.
"""
slots = ()
def __new__(cls, scheme=None, auth=None, host=None, port=None, path=None,
query=None, fragment=None):
if path and not path.startswith('/'):
path = '/' + path
return super(Url, cls).__new__(cls, scheme, auth, host, port, path,
query, fragment)
@property
def hostname(self):
"""For backwards-compatibility with urlparse. We're nice like that."""
return self.host
@property
def request_uri(self):
"""Absolute path including the query string."""
uri = self.path or '/'
if self.query is not None:
uri += '?' + self.query
return uri
@property
def netloc(self):
"""Network location including host and port"""
if self.port:
return '%s:%d' % (self.host, self.port)
return self.host
@property
def url(self):
"""
Convert self into a url
This function should more or less round-trip with :func:`.parse_url`. The
returned url may not be exactly the same as the url inputted to
:func:`.parse_url`, but it should be equivalent by the RFC (e.g., urls
with a blank port will have : removed).
Example: ::
>>> U = parse_url('http://google.com/mail/')
>>> U.url
'http://google.com/mail/'
>>> Url('http', 'username:password', 'host.com', 80,
... '/path', 'query', 'fragment').url
'http://username:password@host.com:80/path?query#fragment'
"""
scheme, auth, host, port, path, query, fragment = self
url = ''
# We use "is not None" we want things to happen with empty strings (or 0 port)
if scheme is not None:
url += scheme + '://'
if auth is not None:
url += auth + '@'
if host is not None:
url += host
if port is not None:
url += ':' + str(port)
if path is not None:
url += path
if query is not None:
url += '?' + query
if fragment is not None:
url += '#' + fragment
return url
def __str__(self):
return self.url
def split_first(s, delims):
"""
Given a string and an iterable of delimiters, split on the first found
delimiter. Return two split parts and the matched delimiter.
If not found, then the first part is the full input string.
Example::
>>> split_first('foo/bar?baz', '?/=')
('foo', 'bar?baz', '/')
>>> split_first('foo/bar?baz', '123')
('foo/bar?baz', '', None)
Scales linearly with number of delims. Not ideal for large number of delims.
"""
min_idx = None
min_delim = None
for d in delims:
idx = s.find(d)
if idx < 0:
continue
if min_idx is None or idx < min_idx:
min_idx = idx
min_delim = d
if min_idx is None or min_idx < 0:
return s, '', None
return s[:min_idx], s[min_idx+1:], min_delim
def parse_url(url):
"""
Given a url, return a parsed :class:`.Url` namedtuple. Best-effort is
performed to parse incomplete urls. Fields not provided will be None.
Partly backwards-compatible with :mod:`urlparse`.
Example::
>>> parse_url('http://google.com/mail/')
Url(scheme='http', host='google.com', port=None, path='/mail/', ...)
>>> parse_url('google.com:80')
Url(scheme=None, host='google.com', port=80, path=None, ...)
>>> parse_url('/foo?bar')
Url(scheme=None, host=None, port=None, path='/foo', query='bar', ...)
"""
# While this code has overlap with stdlib's urlparse, it is much
# simplified for our needs and less annoying.
# Additionally, this implementations does silly things to be optimal
# on CPython.
if not url:
# Empty
return Url()
scheme = None
auth = None
host = None
port = None
path = None
fragment = None
query = None
# Scheme
if '://' in url:
scheme, url = url.split('://', 1)
# Find the earliest Authority Terminator
# (http://tools.ietf.org/html/rfc3986#section-3.2)
url, path_, delim = split_first(url, ['/', '?', '#'])
if delim:
# Reassemble the path
path = delim + path_
# Auth
if '@' in url:
# Last '@' denotes end of auth part
auth, url = url.rsplit('@', 1)
# IPv6
if url and url[0] == '[':
host, url = url.split(']', 1)
host += ']'
# Port
if ':' in url:
_host, port = url.split(':', 1)
if not host:
host = _host
if port:
# If given, ports must be integers.
if not port.isdigit():
raise LocationParseError(url)
port = int(port)
else:
# Blank ports are cool, too. (rfc3986#section-3.2.3)
port = None
elif not host and url:
host = url
if not path:
return Url(scheme, auth, host, port, path, query, fragment)
# Fragment
if '#' in path:
path, fragment = path.split('#', 1)
# Query
if '?' in path:
path, query = path.split('?', 1)
return Url(scheme, auth, host, port, path, query, fragment)
def get_host(url):
"""
Deprecated. Use :func:`.parse_url` instead.
"""
p = parse_url(url)
return p.scheme or 'http', p.hostname, p.port
| mit |
hansey/youtube-dl | youtube_dl/extractor/planetaplay.py | 113 | 1921 | # coding: utf-8
from __future__ import unicode_literals
import re
from .common import InfoExtractor
from ..utils import ExtractorError
class PlanetaPlayIE(InfoExtractor):
_VALID_URL = r'https?://(?:www\.)?planetaplay\.com/\?sng=(?P<id>[0-9]+)'
_API_URL = 'http://planetaplay.com/action/playlist/?sng={0:}'
_THUMBNAIL_URL = 'http://planetaplay.com/img/thumb/{thumb:}'
_TEST = {
'url': 'http://planetaplay.com/?sng=3586',
'md5': '9d569dceb7251a4e01355d5aea60f9db',
'info_dict': {
'id': '3586',
'ext': 'flv',
'title': 'md5:e829428ee28b1deed00de90de49d1da1',
},
'skip': 'Not accessible from Travis CI server',
}
_SONG_FORMATS = {
'lq': (0, 'http://www.planetaplay.com/videoplayback/{med_hash:}'),
'hq': (1, 'http://www.planetaplay.com/videoplayback/hi/{med_hash:}'),
}
def _real_extract(self, url):
mobj = re.match(self._VALID_URL, url)
video_id = mobj.group('id')
response = self._download_json(
self._API_URL.format(video_id), video_id)['response']
try:
data = response.get('data')[0]
except IndexError:
raise ExtractorError(
'%s: failed to get the playlist' % self.IE_NAME, expected=True)
title = '{song_artists:} - {sng_name:}'.format(**data)
thumbnail = self._THUMBNAIL_URL.format(**data)
formats = []
for format_id, (quality, url_template) in self._SONG_FORMATS.items():
formats.append({
'format_id': format_id,
'url': url_template.format(**data),
'quality': quality,
'ext': 'flv',
})
self._sort_formats(formats)
return {
'id': video_id,
'title': title,
'formats': formats,
'thumbnail': thumbnail,
}
| unlicense |
PongPi/isl-odoo | addons/hr_payroll/wizard/__init__.py | 442 | 1159 | #-*- coding:utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>). All Rights Reserved
# d$
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import hr_payroll_payslips_by_employees
import hr_payroll_contribution_register_report
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
josauder/procedural_city_generation | procedural_city_generation/additional_stuff/Singleton.py | 2 | 1569 | class Singleton:
"""
Singleton Object which can only have one instance.
Is instanciated with a modulename, e.g. "roadmap", and reads the
corresponding "roadmap.conf" in procedural_city_generation/inputs.
All attributes are mutable, however this class should mainly be used for
immutable numeric values to avoid confusion/difficult-to-trace-bugs.
"""
class __Singleton:
def __init__(self, modulename=None):
import procedural_city_generation
import os
import json
if modulename:
path=os.path.dirname(procedural_city_generation.__file__)
with open(path+"/inputs/"+modulename+".conf", 'r') as f:
d=json.loads(f.read())
for k, v in d.items():
setattr(self, k, v["value"])
else:
print( "Warning, Singleton instanciated without parsing a json file. Please specify the modulename parameter to avoid errors")
instance=None
def __init__(self, modulename=None):
"""
Creates the instance.
Parameters
----------
modulename : String
"""
if not Singleton.instance:
Singleton.instance=Singleton.__Singleton(modulename)
def __getattr__(self, name):
return getattr(self.instance, name)
def __setattr__(self, name, value):
setattr(self.instance, name, value)
def kill(self):
"""
Deletes the Singleton's instance
"""
Singleton.instance = None
| mpl-2.0 |
timm/timmnix | pypy3-v5.5.0-linux64/lib-python/3/tkinter/__init__.py | 2 | 163238 | """Wrapper functions for Tcl/Tk.
Tkinter provides classes which allow the display, positioning and
control of widgets. Toplevel widgets are Tk and Toplevel. Other
widgets are Frame, Label, Entry, Text, Canvas, Button, Radiobutton,
Checkbutton, Scale, Listbox, Scrollbar, OptionMenu, Spinbox
LabelFrame and PanedWindow.
Properties of the widgets are specified with keyword arguments.
Keyword arguments have the same name as the corresponding resource
under Tk.
Widgets are positioned with one of the geometry managers Place, Pack
or Grid. These managers can be called with methods place, pack, grid
available in every Widget.
Actions are bound to events by resources (e.g. keyword argument
command) or with the method bind.
Example (Hello, World):
import tkinter
from tkinter.constants import *
tk = tkinter.Tk()
frame = tkinter.Frame(tk, relief=RIDGE, borderwidth=2)
frame.pack(fill=BOTH,expand=1)
label = tkinter.Label(frame, text="Hello, World")
label.pack(fill=X, expand=1)
button = tkinter.Button(frame,text="Exit",command=tk.destroy)
button.pack(side=BOTTOM)
tk.mainloop()
"""
import sys
if sys.platform == "win32":
# Attempt to configure Tcl/Tk without requiring PATH
from tkinter import _fix
import warnings
import _tkinter # If this fails your Python may not be configured for Tk
TclError = _tkinter.TclError
from tkinter.constants import *
import re
wantobjects = 1
TkVersion = float(_tkinter.TK_VERSION)
TclVersion = float(_tkinter.TCL_VERSION)
READABLE = _tkinter.READABLE
WRITABLE = _tkinter.WRITABLE
EXCEPTION = _tkinter.EXCEPTION
_magic_re = re.compile(r'([\\{}])')
_space_re = re.compile(r'([\s])', re.ASCII)
def _join(value):
"""Internal function."""
return ' '.join(map(_stringify, value))
def _stringify(value):
"""Internal function."""
if isinstance(value, (list, tuple)):
if len(value) == 1:
value = _stringify(value[0])
if value[0] == '{':
value = '{%s}' % value
else:
value = '{%s}' % _join(value)
else:
value = str(value)
if not value:
value = '{}'
elif _magic_re.search(value):
# add '\' before special characters and spaces
value = _magic_re.sub(r'\\\1', value)
value = _space_re.sub(r'\\\1', value)
elif value[0] == '"' or _space_re.search(value):
value = '{%s}' % value
return value
def _flatten(seq):
"""Internal function."""
res = ()
for item in seq:
if isinstance(item, (tuple, list)):
res = res + _flatten(item)
elif item is not None:
res = res + (item,)
return res
try: _flatten = _tkinter._flatten
except AttributeError: pass
def _cnfmerge(cnfs):
"""Internal function."""
if isinstance(cnfs, dict):
return cnfs
elif isinstance(cnfs, (type(None), str)):
return cnfs
else:
cnf = {}
for c in _flatten(cnfs):
try:
cnf.update(c)
except (AttributeError, TypeError) as msg:
print("_cnfmerge: fallback due to:", msg)
for k, v in c.items():
cnf[k] = v
return cnf
try: _cnfmerge = _tkinter._cnfmerge
except AttributeError: pass
class Event:
"""Container for the properties of an event.
Instances of this type are generated if one of the following events occurs:
KeyPress, KeyRelease - for keyboard events
ButtonPress, ButtonRelease, Motion, Enter, Leave, MouseWheel - for mouse events
Visibility, Unmap, Map, Expose, FocusIn, FocusOut, Circulate,
Colormap, Gravity, Reparent, Property, Destroy, Activate,
Deactivate - for window events.
If a callback function for one of these events is registered
using bind, bind_all, bind_class, or tag_bind, the callback is
called with an Event as first argument. It will have the
following attributes (in braces are the event types for which
the attribute is valid):
serial - serial number of event
num - mouse button pressed (ButtonPress, ButtonRelease)
focus - whether the window has the focus (Enter, Leave)
height - height of the exposed window (Configure, Expose)
width - width of the exposed window (Configure, Expose)
keycode - keycode of the pressed key (KeyPress, KeyRelease)
state - state of the event as a number (ButtonPress, ButtonRelease,
Enter, KeyPress, KeyRelease,
Leave, Motion)
state - state as a string (Visibility)
time - when the event occurred
x - x-position of the mouse
y - y-position of the mouse
x_root - x-position of the mouse on the screen
(ButtonPress, ButtonRelease, KeyPress, KeyRelease, Motion)
y_root - y-position of the mouse on the screen
(ButtonPress, ButtonRelease, KeyPress, KeyRelease, Motion)
char - pressed character (KeyPress, KeyRelease)
send_event - see X/Windows documentation
keysym - keysym of the event as a string (KeyPress, KeyRelease)
keysym_num - keysym of the event as a number (KeyPress, KeyRelease)
type - type of the event as a number
widget - widget in which the event occurred
delta - delta of wheel movement (MouseWheel)
"""
pass
_support_default_root = 1
_default_root = None
def NoDefaultRoot():
"""Inhibit setting of default root window.
Call this function to inhibit that the first instance of
Tk is used for windows without an explicit parent window.
"""
global _support_default_root
_support_default_root = 0
global _default_root
_default_root = None
del _default_root
def _tkerror(err):
"""Internal function."""
pass
def _exit(code=0):
"""Internal function. Calling it will raise the exception SystemExit."""
try:
code = int(code)
except ValueError:
pass
raise SystemExit(code)
_varnum = 0
class Variable:
"""Class to define value holders for e.g. buttons.
Subclasses StringVar, IntVar, DoubleVar, BooleanVar are specializations
that constrain the type of the value returned from get()."""
_default = ""
_tk = None
def __init__(self, master=None, value=None, name=None):
"""Construct a variable
MASTER can be given as master widget.
VALUE is an optional value (defaults to "")
NAME is an optional Tcl name (defaults to PY_VARnum).
If NAME matches an existing variable and VALUE is omitted
then the existing value is retained.
"""
# check for type of NAME parameter to override weird error message
# raised from Modules/_tkinter.c:SetVar like:
# TypeError: setvar() takes exactly 3 arguments (2 given)
if name is not None and not isinstance(name, str):
raise TypeError("name must be a string")
global _varnum
if not master:
master = _default_root
self._master = master
self._tk = master.tk
if name:
self._name = name
else:
self._name = 'PY_VAR' + repr(_varnum)
_varnum += 1
if value is not None:
self.initialize(value)
elif not self._tk.getboolean(self._tk.call("info", "exists", self._name)):
self.initialize(self._default)
def __del__(self):
"""Unset the variable in Tcl."""
if (self._tk is not None and
self._tk.getboolean(self._tk.call("info", "exists", self._name))):
self._tk.globalunsetvar(self._name)
def __str__(self):
"""Return the name of the variable in Tcl."""
return self._name
def set(self, value):
"""Set the variable to VALUE."""
return self._tk.globalsetvar(self._name, value)
initialize = set
def get(self):
"""Return value of variable."""
return self._tk.globalgetvar(self._name)
def trace_variable(self, mode, callback):
"""Define a trace callback for the variable.
MODE is one of "r", "w", "u" for read, write, undefine.
CALLBACK must be a function which is called when
the variable is read, written or undefined.
Return the name of the callback.
"""
cbname = self._master._register(callback)
self._tk.call("trace", "variable", self._name, mode, cbname)
return cbname
trace = trace_variable
def trace_vdelete(self, mode, cbname):
"""Delete the trace callback for a variable.
MODE is one of "r", "w", "u" for read, write, undefine.
CBNAME is the name of the callback returned from trace_variable or trace.
"""
self._tk.call("trace", "vdelete", self._name, mode, cbname)
self._master.deletecommand(cbname)
def trace_vinfo(self):
"""Return all trace callback information."""
return [self._tk.split(x) for x in self._tk.splitlist(
self._tk.call("trace", "vinfo", self._name))]
def __eq__(self, other):
"""Comparison for equality (==).
Note: if the Variable's master matters to behavior
also compare self._master == other._master
"""
return self.__class__.__name__ == other.__class__.__name__ \
and self._name == other._name
class StringVar(Variable):
"""Value holder for strings variables."""
_default = ""
def __init__(self, master=None, value=None, name=None):
"""Construct a string variable.
MASTER can be given as master widget.
VALUE is an optional value (defaults to "")
NAME is an optional Tcl name (defaults to PY_VARnum).
If NAME matches an existing variable and VALUE is omitted
then the existing value is retained.
"""
Variable.__init__(self, master, value, name)
def get(self):
"""Return value of variable as string."""
value = self._tk.globalgetvar(self._name)
if isinstance(value, str):
return value
return str(value)
class IntVar(Variable):
"""Value holder for integer variables."""
_default = 0
def __init__(self, master=None, value=None, name=None):
"""Construct an integer variable.
MASTER can be given as master widget.
VALUE is an optional value (defaults to 0)
NAME is an optional Tcl name (defaults to PY_VARnum).
If NAME matches an existing variable and VALUE is omitted
then the existing value is retained.
"""
Variable.__init__(self, master, value, name)
def get(self):
"""Return the value of the variable as an integer."""
return getint(self._tk.globalgetvar(self._name))
class DoubleVar(Variable):
"""Value holder for float variables."""
_default = 0.0
def __init__(self, master=None, value=None, name=None):
"""Construct a float variable.
MASTER can be given as master widget.
VALUE is an optional value (defaults to 0.0)
NAME is an optional Tcl name (defaults to PY_VARnum).
If NAME matches an existing variable and VALUE is omitted
then the existing value is retained.
"""
Variable.__init__(self, master, value, name)
def get(self):
"""Return the value of the variable as a float."""
return getdouble(self._tk.globalgetvar(self._name))
class BooleanVar(Variable):
"""Value holder for boolean variables."""
_default = False
def __init__(self, master=None, value=None, name=None):
"""Construct a boolean variable.
MASTER can be given as master widget.
VALUE is an optional value (defaults to False)
NAME is an optional Tcl name (defaults to PY_VARnum).
If NAME matches an existing variable and VALUE is omitted
then the existing value is retained.
"""
Variable.__init__(self, master, value, name)
def get(self):
"""Return the value of the variable as a bool."""
try:
return self._tk.getboolean(self._tk.globalgetvar(self._name))
except TclError:
raise ValueError("invalid literal for getboolean()")
def mainloop(n=0):
"""Run the main loop of Tcl."""
_default_root.tk.mainloop(n)
getint = int
getdouble = float
def getboolean(s):
"""Convert true and false to integer values 1 and 0."""
try:
return _default_root.tk.getboolean(s)
except TclError:
raise ValueError("invalid literal for getboolean()")
# Methods defined on both toplevel and interior widgets
class Misc:
"""Internal class.
Base class which defines methods common for interior widgets."""
# XXX font command?
_tclCommands = None
def destroy(self):
"""Internal function.
Delete all Tcl commands created for
this widget in the Tcl interpreter."""
if self._tclCommands is not None:
for name in self._tclCommands:
#print '- Tkinter: deleted command', name
self.tk.deletecommand(name)
self._tclCommands = None
def deletecommand(self, name):
"""Internal function.
Delete the Tcl command provided in NAME."""
#print '- Tkinter: deleted command', name
self.tk.deletecommand(name)
try:
self._tclCommands.remove(name)
except ValueError:
pass
def tk_strictMotif(self, boolean=None):
"""Set Tcl internal variable, whether the look and feel
should adhere to Motif.
A parameter of 1 means adhere to Motif (e.g. no color
change if mouse passes over slider).
Returns the set value."""
return self.tk.getboolean(self.tk.call(
'set', 'tk_strictMotif', boolean))
def tk_bisque(self):
"""Change the color scheme to light brown as used in Tk 3.6 and before."""
self.tk.call('tk_bisque')
def tk_setPalette(self, *args, **kw):
"""Set a new color scheme for all widget elements.
A single color as argument will cause that all colors of Tk
widget elements are derived from this.
Alternatively several keyword parameters and its associated
colors can be given. The following keywords are valid:
activeBackground, foreground, selectColor,
activeForeground, highlightBackground, selectBackground,
background, highlightColor, selectForeground,
disabledForeground, insertBackground, troughColor."""
self.tk.call(('tk_setPalette',)
+ _flatten(args) + _flatten(list(kw.items())))
def tk_menuBar(self, *args):
"""Do not use. Needed in Tk 3.6 and earlier."""
pass # obsolete since Tk 4.0
def wait_variable(self, name='PY_VAR'):
"""Wait until the variable is modified.
A parameter of type IntVar, StringVar, DoubleVar or
BooleanVar must be given."""
self.tk.call('tkwait', 'variable', name)
waitvar = wait_variable # XXX b/w compat
def wait_window(self, window=None):
"""Wait until a WIDGET is destroyed.
If no parameter is given self is used."""
if window is None:
window = self
self.tk.call('tkwait', 'window', window._w)
def wait_visibility(self, window=None):
"""Wait until the visibility of a WIDGET changes
(e.g. it appears).
If no parameter is given self is used."""
if window is None:
window = self
self.tk.call('tkwait', 'visibility', window._w)
def setvar(self, name='PY_VAR', value='1'):
"""Set Tcl variable NAME to VALUE."""
self.tk.setvar(name, value)
def getvar(self, name='PY_VAR'):
"""Return value of Tcl variable NAME."""
return self.tk.getvar(name)
getint = int
getdouble = float
def getboolean(self, s):
"""Return a boolean value for Tcl boolean values true and false given as parameter."""
try:
return self.tk.getboolean(s)
except TclError:
raise ValueError("invalid literal for getboolean()")
def focus_set(self):
"""Direct input focus to this widget.
If the application currently does not have the focus
this widget will get the focus if the application gets
the focus through the window manager."""
self.tk.call('focus', self._w)
focus = focus_set # XXX b/w compat?
def focus_force(self):
"""Direct input focus to this widget even if the
application does not have the focus. Use with
caution!"""
self.tk.call('focus', '-force', self._w)
def focus_get(self):
"""Return the widget which has currently the focus in the
application.
Use focus_displayof to allow working with several
displays. Return None if application does not have
the focus."""
name = self.tk.call('focus')
if name == 'none' or not name: return None
return self._nametowidget(name)
def focus_displayof(self):
"""Return the widget which has currently the focus on the
display where this widget is located.
Return None if the application does not have the focus."""
name = self.tk.call('focus', '-displayof', self._w)
if name == 'none' or not name: return None
return self._nametowidget(name)
def focus_lastfor(self):
"""Return the widget which would have the focus if top level
for this widget gets the focus from the window manager."""
name = self.tk.call('focus', '-lastfor', self._w)
if name == 'none' or not name: return None
return self._nametowidget(name)
def tk_focusFollowsMouse(self):
"""The widget under mouse will get automatically focus. Can not
be disabled easily."""
self.tk.call('tk_focusFollowsMouse')
def tk_focusNext(self):
"""Return the next widget in the focus order which follows
widget which has currently the focus.
The focus order first goes to the next child, then to
the children of the child recursively and then to the
next sibling which is higher in the stacking order. A
widget is omitted if it has the takefocus resource set
to 0."""
name = self.tk.call('tk_focusNext', self._w)
if not name: return None
return self._nametowidget(name)
def tk_focusPrev(self):
"""Return previous widget in the focus order. See tk_focusNext for details."""
name = self.tk.call('tk_focusPrev', self._w)
if not name: return None
return self._nametowidget(name)
def after(self, ms, func=None, *args):
"""Call function once after given time.
MS specifies the time in milliseconds. FUNC gives the
function which shall be called. Additional parameters
are given as parameters to the function call. Return
identifier to cancel scheduling with after_cancel."""
if not func:
# I'd rather use time.sleep(ms*0.001)
self.tk.call('after', ms)
else:
def callit():
try:
func(*args)
finally:
try:
self.deletecommand(name)
except TclError:
pass
name = self._register(callit)
return self.tk.call('after', ms, name)
def after_idle(self, func, *args):
"""Call FUNC once if the Tcl main loop has no event to
process.
Return an identifier to cancel the scheduling with
after_cancel."""
return self.after('idle', func, *args)
def after_cancel(self, id):
"""Cancel scheduling of function identified with ID.
Identifier returned by after or after_idle must be
given as first parameter."""
try:
data = self.tk.call('after', 'info', id)
# In Tk 8.3, splitlist returns: (script, type)
# In Tk 8.4, splitlist may return (script, type) or (script,)
script = self.tk.splitlist(data)[0]
self.deletecommand(script)
except TclError:
pass
self.tk.call('after', 'cancel', id)
def bell(self, displayof=0):
"""Ring a display's bell."""
self.tk.call(('bell',) + self._displayof(displayof))
# Clipboard handling:
def clipboard_get(self, **kw):
"""Retrieve data from the clipboard on window's display.
The window keyword defaults to the root window of the Tkinter
application.
The type keyword specifies the form in which the data is
to be returned and should be an atom name such as STRING
or FILE_NAME. Type defaults to STRING, except on X11, where the default
is to try UTF8_STRING and fall back to STRING.
This command is equivalent to:
selection_get(CLIPBOARD)
"""
if 'type' not in kw and self._windowingsystem == 'x11':
try:
kw['type'] = 'UTF8_STRING'
return self.tk.call(('clipboard', 'get') + self._options(kw))
except TclError:
del kw['type']
return self.tk.call(('clipboard', 'get') + self._options(kw))
def clipboard_clear(self, **kw):
"""Clear the data in the Tk clipboard.
A widget specified for the optional displayof keyword
argument specifies the target display."""
if 'displayof' not in kw: kw['displayof'] = self._w
self.tk.call(('clipboard', 'clear') + self._options(kw))
def clipboard_append(self, string, **kw):
"""Append STRING to the Tk clipboard.
A widget specified at the optional displayof keyword
argument specifies the target display. The clipboard
can be retrieved with selection_get."""
if 'displayof' not in kw: kw['displayof'] = self._w
self.tk.call(('clipboard', 'append') + self._options(kw)
+ ('--', string))
# XXX grab current w/o window argument
def grab_current(self):
"""Return widget which has currently the grab in this application
or None."""
name = self.tk.call('grab', 'current', self._w)
if not name: return None
return self._nametowidget(name)
def grab_release(self):
"""Release grab for this widget if currently set."""
self.tk.call('grab', 'release', self._w)
def grab_set(self):
"""Set grab for this widget.
A grab directs all events to this and descendant
widgets in the application."""
self.tk.call('grab', 'set', self._w)
def grab_set_global(self):
"""Set global grab for this widget.
A global grab directs all events to this and
descendant widgets on the display. Use with caution -
other applications do not get events anymore."""
self.tk.call('grab', 'set', '-global', self._w)
def grab_status(self):
"""Return None, "local" or "global" if this widget has
no, a local or a global grab."""
status = self.tk.call('grab', 'status', self._w)
if status == 'none': status = None
return status
def option_add(self, pattern, value, priority = None):
"""Set a VALUE (second parameter) for an option
PATTERN (first parameter).
An optional third parameter gives the numeric priority
(defaults to 80)."""
self.tk.call('option', 'add', pattern, value, priority)
def option_clear(self):
"""Clear the option database.
It will be reloaded if option_add is called."""
self.tk.call('option', 'clear')
def option_get(self, name, className):
"""Return the value for an option NAME for this widget
with CLASSNAME.
Values with higher priority override lower values."""
return self.tk.call('option', 'get', self._w, name, className)
def option_readfile(self, fileName, priority = None):
"""Read file FILENAME into the option database.
An optional second parameter gives the numeric
priority."""
self.tk.call('option', 'readfile', fileName, priority)
def selection_clear(self, **kw):
"""Clear the current X selection."""
if 'displayof' not in kw: kw['displayof'] = self._w
self.tk.call(('selection', 'clear') + self._options(kw))
def selection_get(self, **kw):
"""Return the contents of the current X selection.
A keyword parameter selection specifies the name of
the selection and defaults to PRIMARY. A keyword
parameter displayof specifies a widget on the display
to use. A keyword parameter type specifies the form of data to be
fetched, defaulting to STRING except on X11, where UTF8_STRING is tried
before STRING."""
if 'displayof' not in kw: kw['displayof'] = self._w
if 'type' not in kw and self._windowingsystem == 'x11':
try:
kw['type'] = 'UTF8_STRING'
return self.tk.call(('selection', 'get') + self._options(kw))
except TclError:
del kw['type']
return self.tk.call(('selection', 'get') + self._options(kw))
def selection_handle(self, command, **kw):
"""Specify a function COMMAND to call if the X
selection owned by this widget is queried by another
application.
This function must return the contents of the
selection. The function will be called with the
arguments OFFSET and LENGTH which allows the chunking
of very long selections. The following keyword
parameters can be provided:
selection - name of the selection (default PRIMARY),
type - type of the selection (e.g. STRING, FILE_NAME)."""
name = self._register(command)
self.tk.call(('selection', 'handle') + self._options(kw)
+ (self._w, name))
def selection_own(self, **kw):
"""Become owner of X selection.
A keyword parameter selection specifies the name of
the selection (default PRIMARY)."""
self.tk.call(('selection', 'own') +
self._options(kw) + (self._w,))
def selection_own_get(self, **kw):
"""Return owner of X selection.
The following keyword parameter can
be provided:
selection - name of the selection (default PRIMARY),
type - type of the selection (e.g. STRING, FILE_NAME)."""
if 'displayof' not in kw: kw['displayof'] = self._w
name = self.tk.call(('selection', 'own') + self._options(kw))
if not name: return None
return self._nametowidget(name)
def send(self, interp, cmd, *args):
"""Send Tcl command CMD to different interpreter INTERP to be executed."""
return self.tk.call(('send', interp, cmd) + args)
def lower(self, belowThis=None):
"""Lower this widget in the stacking order."""
self.tk.call('lower', self._w, belowThis)
def tkraise(self, aboveThis=None):
"""Raise this widget in the stacking order."""
self.tk.call('raise', self._w, aboveThis)
lift = tkraise
def colormodel(self, value=None):
"""Useless. Not implemented in Tk."""
return self.tk.call('tk', 'colormodel', self._w, value)
def winfo_atom(self, name, displayof=0):
"""Return integer which represents atom NAME."""
args = ('winfo', 'atom') + self._displayof(displayof) + (name,)
return getint(self.tk.call(args))
def winfo_atomname(self, id, displayof=0):
"""Return name of atom with identifier ID."""
args = ('winfo', 'atomname') \
+ self._displayof(displayof) + (id,)
return self.tk.call(args)
def winfo_cells(self):
"""Return number of cells in the colormap for this widget."""
return getint(
self.tk.call('winfo', 'cells', self._w))
def winfo_children(self):
"""Return a list of all widgets which are children of this widget."""
result = []
for child in self.tk.splitlist(
self.tk.call('winfo', 'children', self._w)):
try:
# Tcl sometimes returns extra windows, e.g. for
# menus; those need to be skipped
result.append(self._nametowidget(child))
except KeyError:
pass
return result
def winfo_class(self):
"""Return window class name of this widget."""
return self.tk.call('winfo', 'class', self._w)
def winfo_colormapfull(self):
"""Return true if at the last color request the colormap was full."""
return self.tk.getboolean(
self.tk.call('winfo', 'colormapfull', self._w))
def winfo_containing(self, rootX, rootY, displayof=0):
"""Return the widget which is at the root coordinates ROOTX, ROOTY."""
args = ('winfo', 'containing') \
+ self._displayof(displayof) + (rootX, rootY)
name = self.tk.call(args)
if not name: return None
return self._nametowidget(name)
def winfo_depth(self):
"""Return the number of bits per pixel."""
return getint(self.tk.call('winfo', 'depth', self._w))
def winfo_exists(self):
"""Return true if this widget exists."""
return getint(
self.tk.call('winfo', 'exists', self._w))
def winfo_fpixels(self, number):
"""Return the number of pixels for the given distance NUMBER
(e.g. "3c") as float."""
return getdouble(self.tk.call(
'winfo', 'fpixels', self._w, number))
def winfo_geometry(self):
"""Return geometry string for this widget in the form "widthxheight+X+Y"."""
return self.tk.call('winfo', 'geometry', self._w)
def winfo_height(self):
"""Return height of this widget."""
return getint(
self.tk.call('winfo', 'height', self._w))
def winfo_id(self):
"""Return identifier ID for this widget."""
return self.tk.getint(
self.tk.call('winfo', 'id', self._w))
def winfo_interps(self, displayof=0):
"""Return the name of all Tcl interpreters for this display."""
args = ('winfo', 'interps') + self._displayof(displayof)
return self.tk.splitlist(self.tk.call(args))
def winfo_ismapped(self):
"""Return true if this widget is mapped."""
return getint(
self.tk.call('winfo', 'ismapped', self._w))
def winfo_manager(self):
"""Return the window mananger name for this widget."""
return self.tk.call('winfo', 'manager', self._w)
def winfo_name(self):
"""Return the name of this widget."""
return self.tk.call('winfo', 'name', self._w)
def winfo_parent(self):
"""Return the name of the parent of this widget."""
return self.tk.call('winfo', 'parent', self._w)
def winfo_pathname(self, id, displayof=0):
"""Return the pathname of the widget given by ID."""
args = ('winfo', 'pathname') \
+ self._displayof(displayof) + (id,)
return self.tk.call(args)
def winfo_pixels(self, number):
"""Rounded integer value of winfo_fpixels."""
return getint(
self.tk.call('winfo', 'pixels', self._w, number))
def winfo_pointerx(self):
"""Return the x coordinate of the pointer on the root window."""
return getint(
self.tk.call('winfo', 'pointerx', self._w))
def winfo_pointerxy(self):
"""Return a tuple of x and y coordinates of the pointer on the root window."""
return self._getints(
self.tk.call('winfo', 'pointerxy', self._w))
def winfo_pointery(self):
"""Return the y coordinate of the pointer on the root window."""
return getint(
self.tk.call('winfo', 'pointery', self._w))
def winfo_reqheight(self):
"""Return requested height of this widget."""
return getint(
self.tk.call('winfo', 'reqheight', self._w))
def winfo_reqwidth(self):
"""Return requested width of this widget."""
return getint(
self.tk.call('winfo', 'reqwidth', self._w))
def winfo_rgb(self, color):
"""Return tuple of decimal values for red, green, blue for
COLOR in this widget."""
return self._getints(
self.tk.call('winfo', 'rgb', self._w, color))
def winfo_rootx(self):
"""Return x coordinate of upper left corner of this widget on the
root window."""
return getint(
self.tk.call('winfo', 'rootx', self._w))
def winfo_rooty(self):
"""Return y coordinate of upper left corner of this widget on the
root window."""
return getint(
self.tk.call('winfo', 'rooty', self._w))
def winfo_screen(self):
"""Return the screen name of this widget."""
return self.tk.call('winfo', 'screen', self._w)
def winfo_screencells(self):
"""Return the number of the cells in the colormap of the screen
of this widget."""
return getint(
self.tk.call('winfo', 'screencells', self._w))
def winfo_screendepth(self):
"""Return the number of bits per pixel of the root window of the
screen of this widget."""
return getint(
self.tk.call('winfo', 'screendepth', self._w))
def winfo_screenheight(self):
"""Return the number of pixels of the height of the screen of this widget
in pixel."""
return getint(
self.tk.call('winfo', 'screenheight', self._w))
def winfo_screenmmheight(self):
"""Return the number of pixels of the height of the screen of
this widget in mm."""
return getint(
self.tk.call('winfo', 'screenmmheight', self._w))
def winfo_screenmmwidth(self):
"""Return the number of pixels of the width of the screen of
this widget in mm."""
return getint(
self.tk.call('winfo', 'screenmmwidth', self._w))
def winfo_screenvisual(self):
"""Return one of the strings directcolor, grayscale, pseudocolor,
staticcolor, staticgray, or truecolor for the default
colormodel of this screen."""
return self.tk.call('winfo', 'screenvisual', self._w)
def winfo_screenwidth(self):
"""Return the number of pixels of the width of the screen of
this widget in pixel."""
return getint(
self.tk.call('winfo', 'screenwidth', self._w))
def winfo_server(self):
"""Return information of the X-Server of the screen of this widget in
the form "XmajorRminor vendor vendorVersion"."""
return self.tk.call('winfo', 'server', self._w)
def winfo_toplevel(self):
"""Return the toplevel widget of this widget."""
return self._nametowidget(self.tk.call(
'winfo', 'toplevel', self._w))
def winfo_viewable(self):
"""Return true if the widget and all its higher ancestors are mapped."""
return getint(
self.tk.call('winfo', 'viewable', self._w))
def winfo_visual(self):
"""Return one of the strings directcolor, grayscale, pseudocolor,
staticcolor, staticgray, or truecolor for the
colormodel of this widget."""
return self.tk.call('winfo', 'visual', self._w)
def winfo_visualid(self):
"""Return the X identifier for the visual for this widget."""
return self.tk.call('winfo', 'visualid', self._w)
def winfo_visualsavailable(self, includeids=0):
"""Return a list of all visuals available for the screen
of this widget.
Each item in the list consists of a visual name (see winfo_visual), a
depth and if INCLUDEIDS=1 is given also the X identifier."""
data = self.tk.split(
self.tk.call('winfo', 'visualsavailable', self._w,
includeids and 'includeids' or None))
if isinstance(data, str):
data = [self.tk.split(data)]
return [self.__winfo_parseitem(x) for x in data]
def __winfo_parseitem(self, t):
"""Internal function."""
return t[:1] + tuple(map(self.__winfo_getint, t[1:]))
def __winfo_getint(self, x):
"""Internal function."""
return int(x, 0)
def winfo_vrootheight(self):
"""Return the height of the virtual root window associated with this
widget in pixels. If there is no virtual root window return the
height of the screen."""
return getint(
self.tk.call('winfo', 'vrootheight', self._w))
def winfo_vrootwidth(self):
"""Return the width of the virtual root window associated with this
widget in pixel. If there is no virtual root window return the
width of the screen."""
return getint(
self.tk.call('winfo', 'vrootwidth', self._w))
def winfo_vrootx(self):
"""Return the x offset of the virtual root relative to the root
window of the screen of this widget."""
return getint(
self.tk.call('winfo', 'vrootx', self._w))
def winfo_vrooty(self):
"""Return the y offset of the virtual root relative to the root
window of the screen of this widget."""
return getint(
self.tk.call('winfo', 'vrooty', self._w))
def winfo_width(self):
"""Return the width of this widget."""
return getint(
self.tk.call('winfo', 'width', self._w))
def winfo_x(self):
"""Return the x coordinate of the upper left corner of this widget
in the parent."""
return getint(
self.tk.call('winfo', 'x', self._w))
def winfo_y(self):
"""Return the y coordinate of the upper left corner of this widget
in the parent."""
return getint(
self.tk.call('winfo', 'y', self._w))
def update(self):
"""Enter event loop until all pending events have been processed by Tcl."""
self.tk.call('update')
def update_idletasks(self):
"""Enter event loop until all idle callbacks have been called. This
will update the display of windows but not process events caused by
the user."""
self.tk.call('update', 'idletasks')
def bindtags(self, tagList=None):
"""Set or get the list of bindtags for this widget.
With no argument return the list of all bindtags associated with
this widget. With a list of strings as argument the bindtags are
set to this list. The bindtags determine in which order events are
processed (see bind)."""
if tagList is None:
return self.tk.splitlist(
self.tk.call('bindtags', self._w))
else:
self.tk.call('bindtags', self._w, tagList)
def _bind(self, what, sequence, func, add, needcleanup=1):
"""Internal function."""
if isinstance(func, str):
self.tk.call(what + (sequence, func))
elif func:
funcid = self._register(func, self._substitute,
needcleanup)
cmd = ('%sif {"[%s %s]" == "break"} break\n'
%
(add and '+' or '',
funcid, self._subst_format_str))
self.tk.call(what + (sequence, cmd))
return funcid
elif sequence:
return self.tk.call(what + (sequence,))
else:
return self.tk.splitlist(self.tk.call(what))
def bind(self, sequence=None, func=None, add=None):
"""Bind to this widget at event SEQUENCE a call to function FUNC.
SEQUENCE is a string of concatenated event
patterns. An event pattern is of the form
<MODIFIER-MODIFIER-TYPE-DETAIL> where MODIFIER is one
of Control, Mod2, M2, Shift, Mod3, M3, Lock, Mod4, M4,
Button1, B1, Mod5, M5 Button2, B2, Meta, M, Button3,
B3, Alt, Button4, B4, Double, Button5, B5 Triple,
Mod1, M1. TYPE is one of Activate, Enter, Map,
ButtonPress, Button, Expose, Motion, ButtonRelease
FocusIn, MouseWheel, Circulate, FocusOut, Property,
Colormap, Gravity Reparent, Configure, KeyPress, Key,
Unmap, Deactivate, KeyRelease Visibility, Destroy,
Leave and DETAIL is the button number for ButtonPress,
ButtonRelease and DETAIL is the Keysym for KeyPress and
KeyRelease. Examples are
<Control-Button-1> for pressing Control and mouse button 1 or
<Alt-A> for pressing A and the Alt key (KeyPress can be omitted).
An event pattern can also be a virtual event of the form
<<AString>> where AString can be arbitrary. This
event can be generated by event_generate.
If events are concatenated they must appear shortly
after each other.
FUNC will be called if the event sequence occurs with an
instance of Event as argument. If the return value of FUNC is
"break" no further bound function is invoked.
An additional boolean parameter ADD specifies whether FUNC will
be called additionally to the other bound function or whether
it will replace the previous function.
Bind will return an identifier to allow deletion of the bound function with
unbind without memory leak.
If FUNC or SEQUENCE is omitted the bound function or list
of bound events are returned."""
return self._bind(('bind', self._w), sequence, func, add)
def unbind(self, sequence, funcid=None):
"""Unbind for this widget for event SEQUENCE the
function identified with FUNCID."""
self.tk.call('bind', self._w, sequence, '')
if funcid:
self.deletecommand(funcid)
def bind_all(self, sequence=None, func=None, add=None):
"""Bind to all widgets at an event SEQUENCE a call to function FUNC.
An additional boolean parameter ADD specifies whether FUNC will
be called additionally to the other bound function or whether
it will replace the previous function. See bind for the return value."""
return self._bind(('bind', 'all'), sequence, func, add, 0)
def unbind_all(self, sequence):
"""Unbind for all widgets for event SEQUENCE all functions."""
self.tk.call('bind', 'all' , sequence, '')
def bind_class(self, className, sequence=None, func=None, add=None):
"""Bind to widgets with bindtag CLASSNAME at event
SEQUENCE a call of function FUNC. An additional
boolean parameter ADD specifies whether FUNC will be
called additionally to the other bound function or
whether it will replace the previous function. See bind for
the return value."""
return self._bind(('bind', className), sequence, func, add, 0)
def unbind_class(self, className, sequence):
"""Unbind for a all widgets with bindtag CLASSNAME for event SEQUENCE
all functions."""
self.tk.call('bind', className , sequence, '')
def mainloop(self, n=0):
"""Call the mainloop of Tk."""
self.tk.mainloop(n)
def quit(self):
"""Quit the Tcl interpreter. All widgets will be destroyed."""
self.tk.quit()
def _getints(self, string):
"""Internal function."""
if string:
return tuple(map(getint, self.tk.splitlist(string)))
def _getdoubles(self, string):
"""Internal function."""
if string:
return tuple(map(getdouble, self.tk.splitlist(string)))
def _getboolean(self, string):
"""Internal function."""
if string:
return self.tk.getboolean(string)
def _displayof(self, displayof):
"""Internal function."""
if displayof:
return ('-displayof', displayof)
if displayof is None:
return ('-displayof', self._w)
return ()
@property
def _windowingsystem(self):
"""Internal function."""
try:
return self._root()._windowingsystem_cached
except AttributeError:
ws = self._root()._windowingsystem_cached = \
self.tk.call('tk', 'windowingsystem')
return ws
def _options(self, cnf, kw = None):
"""Internal function."""
if kw:
cnf = _cnfmerge((cnf, kw))
else:
cnf = _cnfmerge(cnf)
res = ()
for k, v in cnf.items():
if v is not None:
if k[-1] == '_': k = k[:-1]
if callable(v):
v = self._register(v)
elif isinstance(v, (tuple, list)):
nv = []
for item in v:
if isinstance(item, int):
nv.append(str(item))
elif isinstance(item, str):
nv.append(_stringify(item))
else:
break
else:
v = ' '.join(nv)
res = res + ('-'+k, v)
return res
def nametowidget(self, name):
"""Return the Tkinter instance of a widget identified by
its Tcl name NAME."""
name = str(name).split('.')
w = self
if not name[0]:
w = w._root()
name = name[1:]
for n in name:
if not n:
break
w = w.children[n]
return w
_nametowidget = nametowidget
def _register(self, func, subst=None, needcleanup=1):
"""Return a newly created Tcl function. If this
function is called, the Python function FUNC will
be executed. An optional function SUBST can
be given which will be executed before FUNC."""
f = CallWrapper(func, subst, self).__call__
name = repr(id(f))
try:
func = func.__func__
except AttributeError:
pass
try:
name = name + func.__name__
except AttributeError:
pass
self.tk.createcommand(name, f)
if needcleanup:
if self._tclCommands is None:
self._tclCommands = []
self._tclCommands.append(name)
return name
register = _register
def _root(self):
"""Internal function."""
w = self
while w.master: w = w.master
return w
_subst_format = ('%#', '%b', '%f', '%h', '%k',
'%s', '%t', '%w', '%x', '%y',
'%A', '%E', '%K', '%N', '%W', '%T', '%X', '%Y', '%D')
_subst_format_str = " ".join(_subst_format)
def _substitute(self, *args):
"""Internal function."""
if len(args) != len(self._subst_format): return args
getboolean = self.tk.getboolean
getint = int
def getint_event(s):
"""Tk changed behavior in 8.4.2, returning "??" rather more often."""
try:
return int(s)
except ValueError:
return s
nsign, b, f, h, k, s, t, w, x, y, A, E, K, N, W, T, X, Y, D = args
# Missing: (a, c, d, m, o, v, B, R)
e = Event()
# serial field: valid vor all events
# number of button: ButtonPress and ButtonRelease events only
# height field: Configure, ConfigureRequest, Create,
# ResizeRequest, and Expose events only
# keycode field: KeyPress and KeyRelease events only
# time field: "valid for events that contain a time field"
# width field: Configure, ConfigureRequest, Create, ResizeRequest,
# and Expose events only
# x field: "valid for events that contain a x field"
# y field: "valid for events that contain a y field"
# keysym as decimal: KeyPress and KeyRelease events only
# x_root, y_root fields: ButtonPress, ButtonRelease, KeyPress,
# KeyRelease,and Motion events
e.serial = getint(nsign)
e.num = getint_event(b)
try: e.focus = getboolean(f)
except TclError: pass
e.height = getint_event(h)
e.keycode = getint_event(k)
e.state = getint_event(s)
e.time = getint_event(t)
e.width = getint_event(w)
e.x = getint_event(x)
e.y = getint_event(y)
e.char = A
try: e.send_event = getboolean(E)
except TclError: pass
e.keysym = K
e.keysym_num = getint_event(N)
e.type = T
try:
e.widget = self._nametowidget(W)
except KeyError:
e.widget = W
e.x_root = getint_event(X)
e.y_root = getint_event(Y)
try:
e.delta = getint(D)
except ValueError:
e.delta = 0
return (e,)
def _report_exception(self):
"""Internal function."""
exc, val, tb = sys.exc_info()
root = self._root()
root.report_callback_exception(exc, val, tb)
def _getconfigure(self, *args):
"""Call Tcl configure command and return the result as a dict."""
cnf = {}
for x in self.tk.splitlist(self.tk.call(*args)):
x = self.tk.splitlist(x)
cnf[x[0][1:]] = (x[0][1:],) + x[1:]
return cnf
def _getconfigure1(self, *args):
x = self.tk.splitlist(self.tk.call(*args))
return (x[0][1:],) + x[1:]
def _configure(self, cmd, cnf, kw):
"""Internal function."""
if kw:
cnf = _cnfmerge((cnf, kw))
elif cnf:
cnf = _cnfmerge(cnf)
if cnf is None:
return self._getconfigure(_flatten((self._w, cmd)))
if isinstance(cnf, str):
return self._getconfigure1(_flatten((self._w, cmd, '-'+cnf)))
self.tk.call(_flatten((self._w, cmd)) + self._options(cnf))
# These used to be defined in Widget:
def configure(self, cnf=None, **kw):
"""Configure resources of a widget.
The values for resources are specified as keyword
arguments. To get an overview about
the allowed keyword arguments call the method keys.
"""
return self._configure('configure', cnf, kw)
config = configure
def cget(self, key):
"""Return the resource value for a KEY given as string."""
return self.tk.call(self._w, 'cget', '-' + key)
__getitem__ = cget
def __setitem__(self, key, value):
self.configure({key: value})
def keys(self):
"""Return a list of all resource names of this widget."""
return [x[0][1:] for x in
self.tk.splitlist(self.tk.call(self._w, 'configure'))]
def __str__(self):
"""Return the window path name of this widget."""
return self._w
# Pack methods that apply to the master
_noarg_ = ['_noarg_']
def pack_propagate(self, flag=_noarg_):
"""Set or get the status for propagation of geometry information.
A boolean argument specifies whether the geometry information
of the slaves will determine the size of this widget. If no argument
is given the current setting will be returned.
"""
if flag is Misc._noarg_:
return self._getboolean(self.tk.call(
'pack', 'propagate', self._w))
else:
self.tk.call('pack', 'propagate', self._w, flag)
propagate = pack_propagate
def pack_slaves(self):
"""Return a list of all slaves of this widget
in its packing order."""
return [self._nametowidget(x) for x in
self.tk.splitlist(
self.tk.call('pack', 'slaves', self._w))]
slaves = pack_slaves
# Place method that applies to the master
def place_slaves(self):
"""Return a list of all slaves of this widget
in its packing order."""
return [self._nametowidget(x) for x in
self.tk.splitlist(
self.tk.call(
'place', 'slaves', self._w))]
# Grid methods that apply to the master
def grid_anchor(self, anchor=None): # new in Tk 8.5
"""The anchor value controls how to place the grid within the
master when no row/column has any weight.
The default anchor is nw."""
self.tk.call('grid', 'anchor', self._w, anchor)
anchor = grid_anchor
def grid_bbox(self, column=None, row=None, col2=None, row2=None):
"""Return a tuple of integer coordinates for the bounding
box of this widget controlled by the geometry manager grid.
If COLUMN, ROW is given the bounding box applies from
the cell with row and column 0 to the specified
cell. If COL2 and ROW2 are given the bounding box
starts at that cell.
The returned integers specify the offset of the upper left
corner in the master widget and the width and height.
"""
args = ('grid', 'bbox', self._w)
if column is not None and row is not None:
args = args + (column, row)
if col2 is not None and row2 is not None:
args = args + (col2, row2)
return self._getints(self.tk.call(*args)) or None
bbox = grid_bbox
def _gridconvvalue(self, value):
if isinstance(value, (str, _tkinter.Tcl_Obj)):
try:
svalue = str(value)
if not svalue:
return None
elif '.' in svalue:
return getdouble(svalue)
else:
return getint(svalue)
except ValueError:
pass
return value
def _grid_configure(self, command, index, cnf, kw):
"""Internal function."""
if isinstance(cnf, str) and not kw:
if cnf[-1:] == '_':
cnf = cnf[:-1]
if cnf[:1] != '-':
cnf = '-'+cnf
options = (cnf,)
else:
options = self._options(cnf, kw)
if not options:
res = self.tk.call('grid',
command, self._w, index)
words = self.tk.splitlist(res)
dict = {}
for i in range(0, len(words), 2):
key = words[i][1:]
value = words[i+1]
dict[key] = self._gridconvvalue(value)
return dict
res = self.tk.call(
('grid', command, self._w, index)
+ options)
if len(options) == 1:
return self._gridconvvalue(res)
def grid_columnconfigure(self, index, cnf={}, **kw):
"""Configure column INDEX of a grid.
Valid resources are minsize (minimum size of the column),
weight (how much does additional space propagate to this column)
and pad (how much space to let additionally)."""
return self._grid_configure('columnconfigure', index, cnf, kw)
columnconfigure = grid_columnconfigure
def grid_location(self, x, y):
"""Return a tuple of column and row which identify the cell
at which the pixel at position X and Y inside the master
widget is located."""
return self._getints(
self.tk.call(
'grid', 'location', self._w, x, y)) or None
def grid_propagate(self, flag=_noarg_):
"""Set or get the status for propagation of geometry information.
A boolean argument specifies whether the geometry information
of the slaves will determine the size of this widget. If no argument
is given, the current setting will be returned.
"""
if flag is Misc._noarg_:
return self._getboolean(self.tk.call(
'grid', 'propagate', self._w))
else:
self.tk.call('grid', 'propagate', self._w, flag)
def grid_rowconfigure(self, index, cnf={}, **kw):
"""Configure row INDEX of a grid.
Valid resources are minsize (minimum size of the row),
weight (how much does additional space propagate to this row)
and pad (how much space to let additionally)."""
return self._grid_configure('rowconfigure', index, cnf, kw)
rowconfigure = grid_rowconfigure
def grid_size(self):
"""Return a tuple of the number of column and rows in the grid."""
return self._getints(
self.tk.call('grid', 'size', self._w)) or None
size = grid_size
def grid_slaves(self, row=None, column=None):
"""Return a list of all slaves of this widget
in its packing order."""
args = ()
if row is not None:
args = args + ('-row', row)
if column is not None:
args = args + ('-column', column)
return [self._nametowidget(x) for x in
self.tk.splitlist(self.tk.call(
('grid', 'slaves', self._w) + args))]
# Support for the "event" command, new in Tk 4.2.
# By Case Roole.
def event_add(self, virtual, *sequences):
"""Bind a virtual event VIRTUAL (of the form <<Name>>)
to an event SEQUENCE such that the virtual event is triggered
whenever SEQUENCE occurs."""
args = ('event', 'add', virtual) + sequences
self.tk.call(args)
def event_delete(self, virtual, *sequences):
"""Unbind a virtual event VIRTUAL from SEQUENCE."""
args = ('event', 'delete', virtual) + sequences
self.tk.call(args)
def event_generate(self, sequence, **kw):
"""Generate an event SEQUENCE. Additional
keyword arguments specify parameter of the event
(e.g. x, y, rootx, rooty)."""
args = ('event', 'generate', self._w, sequence)
for k, v in kw.items():
args = args + ('-%s' % k, str(v))
self.tk.call(args)
def event_info(self, virtual=None):
"""Return a list of all virtual events or the information
about the SEQUENCE bound to the virtual event VIRTUAL."""
return self.tk.splitlist(
self.tk.call('event', 'info', virtual))
# Image related commands
def image_names(self):
"""Return a list of all existing image names."""
return self.tk.splitlist(self.tk.call('image', 'names'))
def image_types(self):
"""Return a list of all available image types (e.g. phote bitmap)."""
return self.tk.splitlist(self.tk.call('image', 'types'))
class CallWrapper:
"""Internal class. Stores function to call when some user
defined Tcl function is called e.g. after an event occurred."""
def __init__(self, func, subst, widget):
"""Store FUNC, SUBST and WIDGET as members."""
self.func = func
self.subst = subst
self.widget = widget
def __call__(self, *args):
"""Apply first function SUBST to arguments, than FUNC."""
try:
if self.subst:
args = self.subst(*args)
return self.func(*args)
except SystemExit:
raise
except:
self.widget._report_exception()
class XView:
"""Mix-in class for querying and changing the horizontal position
of a widget's window."""
def xview(self, *args):
"""Query and change the horizontal position of the view."""
res = self.tk.call(self._w, 'xview', *args)
if not args:
return self._getdoubles(res)
def xview_moveto(self, fraction):
"""Adjusts the view in the window so that FRACTION of the
total width of the canvas is off-screen to the left."""
self.tk.call(self._w, 'xview', 'moveto', fraction)
def xview_scroll(self, number, what):
"""Shift the x-view according to NUMBER which is measured in "units"
or "pages" (WHAT)."""
self.tk.call(self._w, 'xview', 'scroll', number, what)
class YView:
"""Mix-in class for querying and changing the vertical position
of a widget's window."""
def yview(self, *args):
"""Query and change the vertical position of the view."""
res = self.tk.call(self._w, 'yview', *args)
if not args:
return self._getdoubles(res)
def yview_moveto(self, fraction):
"""Adjusts the view in the window so that FRACTION of the
total height of the canvas is off-screen to the top."""
self.tk.call(self._w, 'yview', 'moveto', fraction)
def yview_scroll(self, number, what):
"""Shift the y-view according to NUMBER which is measured in
"units" or "pages" (WHAT)."""
self.tk.call(self._w, 'yview', 'scroll', number, what)
class Wm:
"""Provides functions for the communication with the window manager."""
def wm_aspect(self,
minNumer=None, minDenom=None,
maxNumer=None, maxDenom=None):
"""Instruct the window manager to set the aspect ratio (width/height)
of this widget to be between MINNUMER/MINDENOM and MAXNUMER/MAXDENOM. Return a tuple
of the actual values if no argument is given."""
return self._getints(
self.tk.call('wm', 'aspect', self._w,
minNumer, minDenom,
maxNumer, maxDenom))
aspect = wm_aspect
def wm_attributes(self, *args):
"""This subcommand returns or sets platform specific attributes
The first form returns a list of the platform specific flags and
their values. The second form returns the value for the specific
option. The third form sets one or more of the values. The values
are as follows:
On Windows, -disabled gets or sets whether the window is in a
disabled state. -toolwindow gets or sets the style of the window
to toolwindow (as defined in the MSDN). -topmost gets or sets
whether this is a topmost window (displays above all other
windows).
On Macintosh, XXXXX
On Unix, there are currently no special attribute values.
"""
args = ('wm', 'attributes', self._w) + args
return self.tk.call(args)
attributes=wm_attributes
def wm_client(self, name=None):
"""Store NAME in WM_CLIENT_MACHINE property of this widget. Return
current value."""
return self.tk.call('wm', 'client', self._w, name)
client = wm_client
def wm_colormapwindows(self, *wlist):
"""Store list of window names (WLIST) into WM_COLORMAPWINDOWS property
of this widget. This list contains windows whose colormaps differ from their
parents. Return current list of widgets if WLIST is empty."""
if len(wlist) > 1:
wlist = (wlist,) # Tk needs a list of windows here
args = ('wm', 'colormapwindows', self._w) + wlist
if wlist:
self.tk.call(args)
else:
return [self._nametowidget(x)
for x in self.tk.splitlist(self.tk.call(args))]
colormapwindows = wm_colormapwindows
def wm_command(self, value=None):
"""Store VALUE in WM_COMMAND property. It is the command
which shall be used to invoke the application. Return current
command if VALUE is None."""
return self.tk.call('wm', 'command', self._w, value)
command = wm_command
def wm_deiconify(self):
"""Deiconify this widget. If it was never mapped it will not be mapped.
On Windows it will raise this widget and give it the focus."""
return self.tk.call('wm', 'deiconify', self._w)
deiconify = wm_deiconify
def wm_focusmodel(self, model=None):
"""Set focus model to MODEL. "active" means that this widget will claim
the focus itself, "passive" means that the window manager shall give
the focus. Return current focus model if MODEL is None."""
return self.tk.call('wm', 'focusmodel', self._w, model)
focusmodel = wm_focusmodel
def wm_forget(self, window): # new in Tk 8.5
"""The window will be unmappend from the screen and will no longer
be managed by wm. toplevel windows will be treated like frame
windows once they are no longer managed by wm, however, the menu
option configuration will be remembered and the menus will return
once the widget is managed again."""
self.tk.call('wm', 'forget', window)
forget = wm_forget
def wm_frame(self):
"""Return identifier for decorative frame of this widget if present."""
return self.tk.call('wm', 'frame', self._w)
frame = wm_frame
def wm_geometry(self, newGeometry=None):
"""Set geometry to NEWGEOMETRY of the form =widthxheight+x+y. Return
current value if None is given."""
return self.tk.call('wm', 'geometry', self._w, newGeometry)
geometry = wm_geometry
def wm_grid(self,
baseWidth=None, baseHeight=None,
widthInc=None, heightInc=None):
"""Instruct the window manager that this widget shall only be
resized on grid boundaries. WIDTHINC and HEIGHTINC are the width and
height of a grid unit in pixels. BASEWIDTH and BASEHEIGHT are the
number of grid units requested in Tk_GeometryRequest."""
return self._getints(self.tk.call(
'wm', 'grid', self._w,
baseWidth, baseHeight, widthInc, heightInc))
grid = wm_grid
def wm_group(self, pathName=None):
"""Set the group leader widgets for related widgets to PATHNAME. Return
the group leader of this widget if None is given."""
return self.tk.call('wm', 'group', self._w, pathName)
group = wm_group
def wm_iconbitmap(self, bitmap=None, default=None):
"""Set bitmap for the iconified widget to BITMAP. Return
the bitmap if None is given.
Under Windows, the DEFAULT parameter can be used to set the icon
for the widget and any descendents that don't have an icon set
explicitly. DEFAULT can be the relative path to a .ico file
(example: root.iconbitmap(default='myicon.ico') ). See Tk
documentation for more information."""
if default:
return self.tk.call('wm', 'iconbitmap', self._w, '-default', default)
else:
return self.tk.call('wm', 'iconbitmap', self._w, bitmap)
iconbitmap = wm_iconbitmap
def wm_iconify(self):
"""Display widget as icon."""
return self.tk.call('wm', 'iconify', self._w)
iconify = wm_iconify
def wm_iconmask(self, bitmap=None):
"""Set mask for the icon bitmap of this widget. Return the
mask if None is given."""
return self.tk.call('wm', 'iconmask', self._w, bitmap)
iconmask = wm_iconmask
def wm_iconname(self, newName=None):
"""Set the name of the icon for this widget. Return the name if
None is given."""
return self.tk.call('wm', 'iconname', self._w, newName)
iconname = wm_iconname
def wm_iconphoto(self, default=False, *args): # new in Tk 8.5
"""Sets the titlebar icon for this window based on the named photo
images passed through args. If default is True, this is applied to
all future created toplevels as well.
The data in the images is taken as a snapshot at the time of
invocation. If the images are later changed, this is not reflected
to the titlebar icons. Multiple images are accepted to allow
different images sizes to be provided. The window manager may scale
provided icons to an appropriate size.
On Windows, the images are packed into a Windows icon structure.
This will override an icon specified to wm_iconbitmap, and vice
versa.
On X, the images are arranged into the _NET_WM_ICON X property,
which most modern window managers support. An icon specified by
wm_iconbitmap may exist simuultaneously.
On Macintosh, this currently does nothing."""
if default:
self.tk.call('wm', 'iconphoto', self._w, "-default", *args)
else:
self.tk.call('wm', 'iconphoto', self._w, *args)
iconphoto = wm_iconphoto
def wm_iconposition(self, x=None, y=None):
"""Set the position of the icon of this widget to X and Y. Return
a tuple of the current values of X and X if None is given."""
return self._getints(self.tk.call(
'wm', 'iconposition', self._w, x, y))
iconposition = wm_iconposition
def wm_iconwindow(self, pathName=None):
"""Set widget PATHNAME to be displayed instead of icon. Return the current
value if None is given."""
return self.tk.call('wm', 'iconwindow', self._w, pathName)
iconwindow = wm_iconwindow
def wm_manage(self, widget): # new in Tk 8.5
"""The widget specified will become a stand alone top-level window.
The window will be decorated with the window managers title bar,
etc."""
self.tk.call('wm', 'manage', widget)
manage = wm_manage
def wm_maxsize(self, width=None, height=None):
"""Set max WIDTH and HEIGHT for this widget. If the window is gridded
the values are given in grid units. Return the current values if None
is given."""
return self._getints(self.tk.call(
'wm', 'maxsize', self._w, width, height))
maxsize = wm_maxsize
def wm_minsize(self, width=None, height=None):
"""Set min WIDTH and HEIGHT for this widget. If the window is gridded
the values are given in grid units. Return the current values if None
is given."""
return self._getints(self.tk.call(
'wm', 'minsize', self._w, width, height))
minsize = wm_minsize
def wm_overrideredirect(self, boolean=None):
"""Instruct the window manager to ignore this widget
if BOOLEAN is given with 1. Return the current value if None
is given."""
return self._getboolean(self.tk.call(
'wm', 'overrideredirect', self._w, boolean))
overrideredirect = wm_overrideredirect
def wm_positionfrom(self, who=None):
"""Instruct the window manager that the position of this widget shall
be defined by the user if WHO is "user", and by its own policy if WHO is
"program"."""
return self.tk.call('wm', 'positionfrom', self._w, who)
positionfrom = wm_positionfrom
def wm_protocol(self, name=None, func=None):
"""Bind function FUNC to command NAME for this widget.
Return the function bound to NAME if None is given. NAME could be
e.g. "WM_SAVE_YOURSELF" or "WM_DELETE_WINDOW"."""
if callable(func):
command = self._register(func)
else:
command = func
return self.tk.call(
'wm', 'protocol', self._w, name, command)
protocol = wm_protocol
def wm_resizable(self, width=None, height=None):
"""Instruct the window manager whether this width can be resized
in WIDTH or HEIGHT. Both values are boolean values."""
return self.tk.call('wm', 'resizable', self._w, width, height)
resizable = wm_resizable
def wm_sizefrom(self, who=None):
"""Instruct the window manager that the size of this widget shall
be defined by the user if WHO is "user", and by its own policy if WHO is
"program"."""
return self.tk.call('wm', 'sizefrom', self._w, who)
sizefrom = wm_sizefrom
def wm_state(self, newstate=None):
"""Query or set the state of this widget as one of normal, icon,
iconic (see wm_iconwindow), withdrawn, or zoomed (Windows only)."""
return self.tk.call('wm', 'state', self._w, newstate)
state = wm_state
def wm_title(self, string=None):
"""Set the title of this widget."""
return self.tk.call('wm', 'title', self._w, string)
title = wm_title
def wm_transient(self, master=None):
"""Instruct the window manager that this widget is transient
with regard to widget MASTER."""
return self.tk.call('wm', 'transient', self._w, master)
transient = wm_transient
def wm_withdraw(self):
"""Withdraw this widget from the screen such that it is unmapped
and forgotten by the window manager. Re-draw it with wm_deiconify."""
return self.tk.call('wm', 'withdraw', self._w)
withdraw = wm_withdraw
class Tk(Misc, Wm):
"""Toplevel widget of Tk which represents mostly the main window
of an application. It has an associated Tcl interpreter."""
_w = '.'
def __init__(self, screenName=None, baseName=None, className='Tk',
useTk=1, sync=0, use=None):
"""Return a new Toplevel widget on screen SCREENNAME. A new Tcl interpreter will
be created. BASENAME will be used for the identification of the profile file (see
readprofile).
It is constructed from sys.argv[0] without extensions if None is given. CLASSNAME
is the name of the widget class."""
self.master = None
self.children = {}
self._tkloaded = 0
# to avoid recursions in the getattr code in case of failure, we
# ensure that self.tk is always _something_.
self.tk = None
if baseName is None:
import os
baseName = os.path.basename(sys.argv[0])
baseName, ext = os.path.splitext(baseName)
if ext not in ('.py', '.pyc', '.pyo'):
baseName = baseName + ext
interactive = 0
self.tk = _tkinter.create(screenName, baseName, className, interactive, wantobjects, useTk, sync, use)
if useTk:
self._loadtk()
if not sys.flags.ignore_environment:
# Issue #16248: Honor the -E flag to avoid code injection.
self.readprofile(baseName, className)
def loadtk(self):
if not self._tkloaded:
self.tk.loadtk()
self._loadtk()
def _loadtk(self):
self._tkloaded = 1
global _default_root
# Version sanity checks
tk_version = self.tk.getvar('tk_version')
if tk_version != _tkinter.TK_VERSION:
raise RuntimeError("tk.h version (%s) doesn't match libtk.a version (%s)"
% (_tkinter.TK_VERSION, tk_version))
# Under unknown circumstances, tcl_version gets coerced to float
tcl_version = str(self.tk.getvar('tcl_version'))
if tcl_version != _tkinter.TCL_VERSION:
raise RuntimeError("tcl.h version (%s) doesn't match libtcl.a version (%s)" \
% (_tkinter.TCL_VERSION, tcl_version))
if TkVersion < 4.0:
raise RuntimeError("Tk 4.0 or higher is required; found Tk %s"
% str(TkVersion))
# Create and register the tkerror and exit commands
# We need to inline parts of _register here, _ register
# would register differently-named commands.
if self._tclCommands is None:
self._tclCommands = []
self.tk.createcommand('tkerror', _tkerror)
self.tk.createcommand('exit', _exit)
self._tclCommands.append('tkerror')
self._tclCommands.append('exit')
if _support_default_root and not _default_root:
_default_root = self
self.protocol("WM_DELETE_WINDOW", self.destroy)
def destroy(self):
"""Destroy this and all descendants widgets. This will
end the application of this Tcl interpreter."""
for c in list(self.children.values()): c.destroy()
self.tk.call('destroy', self._w)
Misc.destroy(self)
global _default_root
if _support_default_root and _default_root is self:
_default_root = None
def readprofile(self, baseName, className):
"""Internal function. It reads BASENAME.tcl and CLASSNAME.tcl into
the Tcl Interpreter and calls exec on the contents of BASENAME.py and
CLASSNAME.py if such a file exists in the home directory."""
import os
if 'HOME' in os.environ: home = os.environ['HOME']
else: home = os.curdir
class_tcl = os.path.join(home, '.%s.tcl' % className)
class_py = os.path.join(home, '.%s.py' % className)
base_tcl = os.path.join(home, '.%s.tcl' % baseName)
base_py = os.path.join(home, '.%s.py' % baseName)
dir = {'self': self}
exec('from tkinter import *', dir)
if os.path.isfile(class_tcl):
self.tk.call('source', class_tcl)
if os.path.isfile(class_py):
exec(open(class_py).read(), dir)
if os.path.isfile(base_tcl):
self.tk.call('source', base_tcl)
if os.path.isfile(base_py):
exec(open(base_py).read(), dir)
def report_callback_exception(self, exc, val, tb):
"""Internal function. It reports exception on sys.stderr."""
import traceback
sys.stderr.write("Exception in Tkinter callback\n")
sys.last_type = exc
sys.last_value = val
sys.last_traceback = tb
traceback.print_exception(exc, val, tb)
def __getattr__(self, attr):
"Delegate attribute access to the interpreter object"
return getattr(self.tk, attr)
# Ideally, the classes Pack, Place and Grid disappear, the
# pack/place/grid methods are defined on the Widget class, and
# everybody uses w.pack_whatever(...) instead of Pack.whatever(w,
# ...), with pack(), place() and grid() being short for
# pack_configure(), place_configure() and grid_columnconfigure(), and
# forget() being short for pack_forget(). As a practical matter, I'm
# afraid that there is too much code out there that may be using the
# Pack, Place or Grid class, so I leave them intact -- but only as
# backwards compatibility features. Also note that those methods that
# take a master as argument (e.g. pack_propagate) have been moved to
# the Misc class (which now incorporates all methods common between
# toplevel and interior widgets). Again, for compatibility, these are
# copied into the Pack, Place or Grid class.
def Tcl(screenName=None, baseName=None, className='Tk', useTk=0):
return Tk(screenName, baseName, className, useTk)
class Pack:
"""Geometry manager Pack.
Base class to use the methods pack_* in every widget."""
def pack_configure(self, cnf={}, **kw):
"""Pack a widget in the parent widget. Use as options:
after=widget - pack it after you have packed widget
anchor=NSEW (or subset) - position widget according to
given direction
before=widget - pack it before you will pack widget
expand=bool - expand widget if parent size grows
fill=NONE or X or Y or BOTH - fill widget if widget grows
in=master - use master to contain this widget
in_=master - see 'in' option description
ipadx=amount - add internal padding in x direction
ipady=amount - add internal padding in y direction
padx=amount - add padding in x direction
pady=amount - add padding in y direction
side=TOP or BOTTOM or LEFT or RIGHT - where to add this widget.
"""
self.tk.call(
('pack', 'configure', self._w)
+ self._options(cnf, kw))
pack = configure = config = pack_configure
def pack_forget(self):
"""Unmap this widget and do not use it for the packing order."""
self.tk.call('pack', 'forget', self._w)
forget = pack_forget
def pack_info(self):
"""Return information about the packing options
for this widget."""
words = self.tk.splitlist(
self.tk.call('pack', 'info', self._w))
dict = {}
for i in range(0, len(words), 2):
key = words[i][1:]
value = words[i+1]
if str(value)[:1] == '.':
value = self._nametowidget(value)
dict[key] = value
return dict
info = pack_info
propagate = pack_propagate = Misc.pack_propagate
slaves = pack_slaves = Misc.pack_slaves
class Place:
"""Geometry manager Place.
Base class to use the methods place_* in every widget."""
def place_configure(self, cnf={}, **kw):
"""Place a widget in the parent widget. Use as options:
in=master - master relative to which the widget is placed
in_=master - see 'in' option description
x=amount - locate anchor of this widget at position x of master
y=amount - locate anchor of this widget at position y of master
relx=amount - locate anchor of this widget between 0.0 and 1.0
relative to width of master (1.0 is right edge)
rely=amount - locate anchor of this widget between 0.0 and 1.0
relative to height of master (1.0 is bottom edge)
anchor=NSEW (or subset) - position anchor according to given direction
width=amount - width of this widget in pixel
height=amount - height of this widget in pixel
relwidth=amount - width of this widget between 0.0 and 1.0
relative to width of master (1.0 is the same width
as the master)
relheight=amount - height of this widget between 0.0 and 1.0
relative to height of master (1.0 is the same
height as the master)
bordermode="inside" or "outside" - whether to take border width of
master widget into account
"""
self.tk.call(
('place', 'configure', self._w)
+ self._options(cnf, kw))
place = configure = config = place_configure
def place_forget(self):
"""Unmap this widget."""
self.tk.call('place', 'forget', self._w)
forget = place_forget
def place_info(self):
"""Return information about the placing options
for this widget."""
words = self.tk.splitlist(
self.tk.call('place', 'info', self._w))
dict = {}
for i in range(0, len(words), 2):
key = words[i][1:]
value = words[i+1]
if str(value)[:1] == '.':
value = self._nametowidget(value)
dict[key] = value
return dict
info = place_info
slaves = place_slaves = Misc.place_slaves
class Grid:
"""Geometry manager Grid.
Base class to use the methods grid_* in every widget."""
# Thanks to Masazumi Yoshikawa (yosikawa@isi.edu)
def grid_configure(self, cnf={}, **kw):
"""Position a widget in the parent widget in a grid. Use as options:
column=number - use cell identified with given column (starting with 0)
columnspan=number - this widget will span several columns
in=master - use master to contain this widget
in_=master - see 'in' option description
ipadx=amount - add internal padding in x direction
ipady=amount - add internal padding in y direction
padx=amount - add padding in x direction
pady=amount - add padding in y direction
row=number - use cell identified with given row (starting with 0)
rowspan=number - this widget will span several rows
sticky=NSEW - if cell is larger on which sides will this
widget stick to the cell boundary
"""
self.tk.call(
('grid', 'configure', self._w)
+ self._options(cnf, kw))
grid = configure = config = grid_configure
bbox = grid_bbox = Misc.grid_bbox
columnconfigure = grid_columnconfigure = Misc.grid_columnconfigure
def grid_forget(self):
"""Unmap this widget."""
self.tk.call('grid', 'forget', self._w)
forget = grid_forget
def grid_remove(self):
"""Unmap this widget but remember the grid options."""
self.tk.call('grid', 'remove', self._w)
def grid_info(self):
"""Return information about the options
for positioning this widget in a grid."""
words = self.tk.splitlist(
self.tk.call('grid', 'info', self._w))
dict = {}
for i in range(0, len(words), 2):
key = words[i][1:]
value = words[i+1]
if str(value)[:1] == '.':
value = self._nametowidget(value)
dict[key] = value
return dict
info = grid_info
location = grid_location = Misc.grid_location
propagate = grid_propagate = Misc.grid_propagate
rowconfigure = grid_rowconfigure = Misc.grid_rowconfigure
size = grid_size = Misc.grid_size
slaves = grid_slaves = Misc.grid_slaves
class BaseWidget(Misc):
"""Internal class."""
def _setup(self, master, cnf):
"""Internal function. Sets up information about children."""
if _support_default_root:
global _default_root
if not master:
if not _default_root:
_default_root = Tk()
master = _default_root
self.master = master
self.tk = master.tk
name = None
if 'name' in cnf:
name = cnf['name']
del cnf['name']
if not name:
name = repr(id(self))
self._name = name
if master._w=='.':
self._w = '.' + name
else:
self._w = master._w + '.' + name
self.children = {}
if self._name in self.master.children:
self.master.children[self._name].destroy()
self.master.children[self._name] = self
def __init__(self, master, widgetName, cnf={}, kw={}, extra=()):
"""Construct a widget with the parent widget MASTER, a name WIDGETNAME
and appropriate options."""
if kw:
cnf = _cnfmerge((cnf, kw))
self.widgetName = widgetName
BaseWidget._setup(self, master, cnf)
if self._tclCommands is None:
self._tclCommands = []
classes = [(k, v) for k, v in cnf.items() if isinstance(k, type)]
for k, v in classes:
del cnf[k]
self.tk.call(
(widgetName, self._w) + extra + self._options(cnf))
for k, v in classes:
k.configure(self, v)
def destroy(self):
"""Destroy this and all descendants widgets."""
for c in list(self.children.values()): c.destroy()
self.tk.call('destroy', self._w)
if self._name in self.master.children:
del self.master.children[self._name]
Misc.destroy(self)
def _do(self, name, args=()):
# XXX Obsolete -- better use self.tk.call directly!
return self.tk.call((self._w, name) + args)
class Widget(BaseWidget, Pack, Place, Grid):
"""Internal class.
Base class for a widget which can be positioned with the geometry managers
Pack, Place or Grid."""
pass
class Toplevel(BaseWidget, Wm):
"""Toplevel widget, e.g. for dialogs."""
def __init__(self, master=None, cnf={}, **kw):
"""Construct a toplevel widget with the parent MASTER.
Valid resource names: background, bd, bg, borderwidth, class,
colormap, container, cursor, height, highlightbackground,
highlightcolor, highlightthickness, menu, relief, screen, takefocus,
use, visual, width."""
if kw:
cnf = _cnfmerge((cnf, kw))
extra = ()
for wmkey in ['screen', 'class_', 'class', 'visual',
'colormap']:
if wmkey in cnf:
val = cnf[wmkey]
# TBD: a hack needed because some keys
# are not valid as keyword arguments
if wmkey[-1] == '_': opt = '-'+wmkey[:-1]
else: opt = '-'+wmkey
extra = extra + (opt, val)
del cnf[wmkey]
BaseWidget.__init__(self, master, 'toplevel', cnf, {}, extra)
root = self._root()
self.iconname(root.iconname())
self.title(root.title())
self.protocol("WM_DELETE_WINDOW", self.destroy)
class Button(Widget):
"""Button widget."""
def __init__(self, master=None, cnf={}, **kw):
"""Construct a button widget with the parent MASTER.
STANDARD OPTIONS
activebackground, activeforeground, anchor,
background, bitmap, borderwidth, cursor,
disabledforeground, font, foreground
highlightbackground, highlightcolor,
highlightthickness, image, justify,
padx, pady, relief, repeatdelay,
repeatinterval, takefocus, text,
textvariable, underline, wraplength
WIDGET-SPECIFIC OPTIONS
command, compound, default, height,
overrelief, state, width
"""
Widget.__init__(self, master, 'button', cnf, kw)
def tkButtonEnter(self, *dummy):
self.tk.call('tkButtonEnter', self._w)
def tkButtonLeave(self, *dummy):
self.tk.call('tkButtonLeave', self._w)
def tkButtonDown(self, *dummy):
self.tk.call('tkButtonDown', self._w)
def tkButtonUp(self, *dummy):
self.tk.call('tkButtonUp', self._w)
def tkButtonInvoke(self, *dummy):
self.tk.call('tkButtonInvoke', self._w)
def flash(self):
"""Flash the button.
This is accomplished by redisplaying
the button several times, alternating between active and
normal colors. At the end of the flash the button is left
in the same normal/active state as when the command was
invoked. This command is ignored if the button's state is
disabled.
"""
self.tk.call(self._w, 'flash')
def invoke(self):
"""Invoke the command associated with the button.
The return value is the return value from the command,
or an empty string if there is no command associated with
the button. This command is ignored if the button's state
is disabled.
"""
return self.tk.call(self._w, 'invoke')
# Indices:
# XXX I don't like these -- take them away
def AtEnd():
warnings.warn("tkinter.AtEnd will be removed in 3.4",
DeprecationWarning, stacklevel=2)
return 'end'
def AtInsert(*args):
warnings.warn("tkinter.AtInsert will be removed in 3.4",
DeprecationWarning, stacklevel=2)
s = 'insert'
for a in args:
if a: s = s + (' ' + a)
return s
def AtSelFirst():
warnings.warn("tkinter.AtSelFirst will be removed in 3.4",
DeprecationWarning, stacklevel=2)
return 'sel.first'
def AtSelLast():
warnings.warn("tkinter.AtSelLast will be removed in 3.4",
DeprecationWarning, stacklevel=2)
return 'sel.last'
def At(x, y=None):
warnings.warn("tkinter.At will be removed in 3.4",
DeprecationWarning, stacklevel=2)
if y is None:
return '@%r' % (x,)
else:
return '@%r,%r' % (x, y)
class Canvas(Widget, XView, YView):
"""Canvas widget to display graphical elements like lines or text."""
def __init__(self, master=None, cnf={}, **kw):
"""Construct a canvas widget with the parent MASTER.
Valid resource names: background, bd, bg, borderwidth, closeenough,
confine, cursor, height, highlightbackground, highlightcolor,
highlightthickness, insertbackground, insertborderwidth,
insertofftime, insertontime, insertwidth, offset, relief,
scrollregion, selectbackground, selectborderwidth, selectforeground,
state, takefocus, width, xscrollcommand, xscrollincrement,
yscrollcommand, yscrollincrement."""
Widget.__init__(self, master, 'canvas', cnf, kw)
def addtag(self, *args):
"""Internal function."""
self.tk.call((self._w, 'addtag') + args)
def addtag_above(self, newtag, tagOrId):
"""Add tag NEWTAG to all items above TAGORID."""
self.addtag(newtag, 'above', tagOrId)
def addtag_all(self, newtag):
"""Add tag NEWTAG to all items."""
self.addtag(newtag, 'all')
def addtag_below(self, newtag, tagOrId):
"""Add tag NEWTAG to all items below TAGORID."""
self.addtag(newtag, 'below', tagOrId)
def addtag_closest(self, newtag, x, y, halo=None, start=None):
"""Add tag NEWTAG to item which is closest to pixel at X, Y.
If several match take the top-most.
All items closer than HALO are considered overlapping (all are
closests). If START is specified the next below this tag is taken."""
self.addtag(newtag, 'closest', x, y, halo, start)
def addtag_enclosed(self, newtag, x1, y1, x2, y2):
"""Add tag NEWTAG to all items in the rectangle defined
by X1,Y1,X2,Y2."""
self.addtag(newtag, 'enclosed', x1, y1, x2, y2)
def addtag_overlapping(self, newtag, x1, y1, x2, y2):
"""Add tag NEWTAG to all items which overlap the rectangle
defined by X1,Y1,X2,Y2."""
self.addtag(newtag, 'overlapping', x1, y1, x2, y2)
def addtag_withtag(self, newtag, tagOrId):
"""Add tag NEWTAG to all items with TAGORID."""
self.addtag(newtag, 'withtag', tagOrId)
def bbox(self, *args):
"""Return a tuple of X1,Y1,X2,Y2 coordinates for a rectangle
which encloses all items with tags specified as arguments."""
return self._getints(
self.tk.call((self._w, 'bbox') + args)) or None
def tag_unbind(self, tagOrId, sequence, funcid=None):
"""Unbind for all items with TAGORID for event SEQUENCE the
function identified with FUNCID."""
self.tk.call(self._w, 'bind', tagOrId, sequence, '')
if funcid:
self.deletecommand(funcid)
def tag_bind(self, tagOrId, sequence=None, func=None, add=None):
"""Bind to all items with TAGORID at event SEQUENCE a call to function FUNC.
An additional boolean parameter ADD specifies whether FUNC will be
called additionally to the other bound function or whether it will
replace the previous function. See bind for the return value."""
return self._bind((self._w, 'bind', tagOrId),
sequence, func, add)
def canvasx(self, screenx, gridspacing=None):
"""Return the canvas x coordinate of pixel position SCREENX rounded
to nearest multiple of GRIDSPACING units."""
return getdouble(self.tk.call(
self._w, 'canvasx', screenx, gridspacing))
def canvasy(self, screeny, gridspacing=None):
"""Return the canvas y coordinate of pixel position SCREENY rounded
to nearest multiple of GRIDSPACING units."""
return getdouble(self.tk.call(
self._w, 'canvasy', screeny, gridspacing))
def coords(self, *args):
"""Return a list of coordinates for the item given in ARGS."""
# XXX Should use _flatten on args
return [getdouble(x) for x in
self.tk.splitlist(
self.tk.call((self._w, 'coords') + args))]
def _create(self, itemType, args, kw): # Args: (val, val, ..., cnf={})
"""Internal function."""
args = _flatten(args)
cnf = args[-1]
if isinstance(cnf, (dict, tuple)):
args = args[:-1]
else:
cnf = {}
return getint(self.tk.call(
self._w, 'create', itemType,
*(args + self._options(cnf, kw))))
def create_arc(self, *args, **kw):
"""Create arc shaped region with coordinates x1,y1,x2,y2."""
return self._create('arc', args, kw)
def create_bitmap(self, *args, **kw):
"""Create bitmap with coordinates x1,y1."""
return self._create('bitmap', args, kw)
def create_image(self, *args, **kw):
"""Create image item with coordinates x1,y1."""
return self._create('image', args, kw)
def create_line(self, *args, **kw):
"""Create line with coordinates x1,y1,...,xn,yn."""
return self._create('line', args, kw)
def create_oval(self, *args, **kw):
"""Create oval with coordinates x1,y1,x2,y2."""
return self._create('oval', args, kw)
def create_polygon(self, *args, **kw):
"""Create polygon with coordinates x1,y1,...,xn,yn."""
return self._create('polygon', args, kw)
def create_rectangle(self, *args, **kw):
"""Create rectangle with coordinates x1,y1,x2,y2."""
return self._create('rectangle', args, kw)
def create_text(self, *args, **kw):
"""Create text with coordinates x1,y1."""
return self._create('text', args, kw)
def create_window(self, *args, **kw):
"""Create window with coordinates x1,y1,x2,y2."""
return self._create('window', args, kw)
def dchars(self, *args):
"""Delete characters of text items identified by tag or id in ARGS (possibly
several times) from FIRST to LAST character (including)."""
self.tk.call((self._w, 'dchars') + args)
def delete(self, *args):
"""Delete items identified by all tag or ids contained in ARGS."""
self.tk.call((self._w, 'delete') + args)
def dtag(self, *args):
"""Delete tag or id given as last arguments in ARGS from items
identified by first argument in ARGS."""
self.tk.call((self._w, 'dtag') + args)
def find(self, *args):
"""Internal function."""
return self._getints(
self.tk.call((self._w, 'find') + args)) or ()
def find_above(self, tagOrId):
"""Return items above TAGORID."""
return self.find('above', tagOrId)
def find_all(self):
"""Return all items."""
return self.find('all')
def find_below(self, tagOrId):
"""Return all items below TAGORID."""
return self.find('below', tagOrId)
def find_closest(self, x, y, halo=None, start=None):
"""Return item which is closest to pixel at X, Y.
If several match take the top-most.
All items closer than HALO are considered overlapping (all are
closests). If START is specified the next below this tag is taken."""
return self.find('closest', x, y, halo, start)
def find_enclosed(self, x1, y1, x2, y2):
"""Return all items in rectangle defined
by X1,Y1,X2,Y2."""
return self.find('enclosed', x1, y1, x2, y2)
def find_overlapping(self, x1, y1, x2, y2):
"""Return all items which overlap the rectangle
defined by X1,Y1,X2,Y2."""
return self.find('overlapping', x1, y1, x2, y2)
def find_withtag(self, tagOrId):
"""Return all items with TAGORID."""
return self.find('withtag', tagOrId)
def focus(self, *args):
"""Set focus to the first item specified in ARGS."""
return self.tk.call((self._w, 'focus') + args)
def gettags(self, *args):
"""Return tags associated with the first item specified in ARGS."""
return self.tk.splitlist(
self.tk.call((self._w, 'gettags') + args))
def icursor(self, *args):
"""Set cursor at position POS in the item identified by TAGORID.
In ARGS TAGORID must be first."""
self.tk.call((self._w, 'icursor') + args)
def index(self, *args):
"""Return position of cursor as integer in item specified in ARGS."""
return getint(self.tk.call((self._w, 'index') + args))
def insert(self, *args):
"""Insert TEXT in item TAGORID at position POS. ARGS must
be TAGORID POS TEXT."""
self.tk.call((self._w, 'insert') + args)
def itemcget(self, tagOrId, option):
"""Return the resource value for an OPTION for item TAGORID."""
return self.tk.call(
(self._w, 'itemcget') + (tagOrId, '-'+option))
def itemconfigure(self, tagOrId, cnf=None, **kw):
"""Configure resources of an item TAGORID.
The values for resources are specified as keyword
arguments. To get an overview about
the allowed keyword arguments call the method without arguments.
"""
return self._configure(('itemconfigure', tagOrId), cnf, kw)
itemconfig = itemconfigure
# lower, tkraise/lift hide Misc.lower, Misc.tkraise/lift,
# so the preferred name for them is tag_lower, tag_raise
# (similar to tag_bind, and similar to the Text widget);
# unfortunately can't delete the old ones yet (maybe in 1.6)
def tag_lower(self, *args):
"""Lower an item TAGORID given in ARGS
(optional below another item)."""
self.tk.call((self._w, 'lower') + args)
lower = tag_lower
def move(self, *args):
"""Move an item TAGORID given in ARGS."""
self.tk.call((self._w, 'move') + args)
def postscript(self, cnf={}, **kw):
"""Print the contents of the canvas to a postscript
file. Valid options: colormap, colormode, file, fontmap,
height, pageanchor, pageheight, pagewidth, pagex, pagey,
rotate, witdh, x, y."""
return self.tk.call((self._w, 'postscript') +
self._options(cnf, kw))
def tag_raise(self, *args):
"""Raise an item TAGORID given in ARGS
(optional above another item)."""
self.tk.call((self._w, 'raise') + args)
lift = tkraise = tag_raise
def scale(self, *args):
"""Scale item TAGORID with XORIGIN, YORIGIN, XSCALE, YSCALE."""
self.tk.call((self._w, 'scale') + args)
def scan_mark(self, x, y):
"""Remember the current X, Y coordinates."""
self.tk.call(self._w, 'scan', 'mark', x, y)
def scan_dragto(self, x, y, gain=10):
"""Adjust the view of the canvas to GAIN times the
difference between X and Y and the coordinates given in
scan_mark."""
self.tk.call(self._w, 'scan', 'dragto', x, y, gain)
def select_adjust(self, tagOrId, index):
"""Adjust the end of the selection near the cursor of an item TAGORID to index."""
self.tk.call(self._w, 'select', 'adjust', tagOrId, index)
def select_clear(self):
"""Clear the selection if it is in this widget."""
self.tk.call(self._w, 'select', 'clear')
def select_from(self, tagOrId, index):
"""Set the fixed end of a selection in item TAGORID to INDEX."""
self.tk.call(self._w, 'select', 'from', tagOrId, index)
def select_item(self):
"""Return the item which has the selection."""
return self.tk.call(self._w, 'select', 'item') or None
def select_to(self, tagOrId, index):
"""Set the variable end of a selection in item TAGORID to INDEX."""
self.tk.call(self._w, 'select', 'to', tagOrId, index)
def type(self, tagOrId):
"""Return the type of the item TAGORID."""
return self.tk.call(self._w, 'type', tagOrId) or None
class Checkbutton(Widget):
"""Checkbutton widget which is either in on- or off-state."""
def __init__(self, master=None, cnf={}, **kw):
"""Construct a checkbutton widget with the parent MASTER.
Valid resource names: activebackground, activeforeground, anchor,
background, bd, bg, bitmap, borderwidth, command, cursor,
disabledforeground, fg, font, foreground, height,
highlightbackground, highlightcolor, highlightthickness, image,
indicatoron, justify, offvalue, onvalue, padx, pady, relief,
selectcolor, selectimage, state, takefocus, text, textvariable,
underline, variable, width, wraplength."""
Widget.__init__(self, master, 'checkbutton', cnf, kw)
def deselect(self):
"""Put the button in off-state."""
self.tk.call(self._w, 'deselect')
def flash(self):
"""Flash the button."""
self.tk.call(self._w, 'flash')
def invoke(self):
"""Toggle the button and invoke a command if given as resource."""
return self.tk.call(self._w, 'invoke')
def select(self):
"""Put the button in on-state."""
self.tk.call(self._w, 'select')
def toggle(self):
"""Toggle the button."""
self.tk.call(self._w, 'toggle')
class Entry(Widget, XView):
"""Entry widget which allows to display simple text."""
def __init__(self, master=None, cnf={}, **kw):
"""Construct an entry widget with the parent MASTER.
Valid resource names: background, bd, bg, borderwidth, cursor,
exportselection, fg, font, foreground, highlightbackground,
highlightcolor, highlightthickness, insertbackground,
insertborderwidth, insertofftime, insertontime, insertwidth,
invalidcommand, invcmd, justify, relief, selectbackground,
selectborderwidth, selectforeground, show, state, takefocus,
textvariable, validate, validatecommand, vcmd, width,
xscrollcommand."""
Widget.__init__(self, master, 'entry', cnf, kw)
def delete(self, first, last=None):
"""Delete text from FIRST to LAST (not included)."""
self.tk.call(self._w, 'delete', first, last)
def get(self):
"""Return the text."""
return self.tk.call(self._w, 'get')
def icursor(self, index):
"""Insert cursor at INDEX."""
self.tk.call(self._w, 'icursor', index)
def index(self, index):
"""Return position of cursor."""
return getint(self.tk.call(
self._w, 'index', index))
def insert(self, index, string):
"""Insert STRING at INDEX."""
self.tk.call(self._w, 'insert', index, string)
def scan_mark(self, x):
"""Remember the current X, Y coordinates."""
self.tk.call(self._w, 'scan', 'mark', x)
def scan_dragto(self, x):
"""Adjust the view of the canvas to 10 times the
difference between X and Y and the coordinates given in
scan_mark."""
self.tk.call(self._w, 'scan', 'dragto', x)
def selection_adjust(self, index):
"""Adjust the end of the selection near the cursor to INDEX."""
self.tk.call(self._w, 'selection', 'adjust', index)
select_adjust = selection_adjust
def selection_clear(self):
"""Clear the selection if it is in this widget."""
self.tk.call(self._w, 'selection', 'clear')
select_clear = selection_clear
def selection_from(self, index):
"""Set the fixed end of a selection to INDEX."""
self.tk.call(self._w, 'selection', 'from', index)
select_from = selection_from
def selection_present(self):
"""Return True if there are characters selected in the entry, False
otherwise."""
return self.tk.getboolean(
self.tk.call(self._w, 'selection', 'present'))
select_present = selection_present
def selection_range(self, start, end):
"""Set the selection from START to END (not included)."""
self.tk.call(self._w, 'selection', 'range', start, end)
select_range = selection_range
def selection_to(self, index):
"""Set the variable end of a selection to INDEX."""
self.tk.call(self._w, 'selection', 'to', index)
select_to = selection_to
class Frame(Widget):
"""Frame widget which may contain other widgets and can have a 3D border."""
def __init__(self, master=None, cnf={}, **kw):
"""Construct a frame widget with the parent MASTER.
Valid resource names: background, bd, bg, borderwidth, class,
colormap, container, cursor, height, highlightbackground,
highlightcolor, highlightthickness, relief, takefocus, visual, width."""
cnf = _cnfmerge((cnf, kw))
extra = ()
if 'class_' in cnf:
extra = ('-class', cnf['class_'])
del cnf['class_']
elif 'class' in cnf:
extra = ('-class', cnf['class'])
del cnf['class']
Widget.__init__(self, master, 'frame', cnf, {}, extra)
class Label(Widget):
"""Label widget which can display text and bitmaps."""
def __init__(self, master=None, cnf={}, **kw):
"""Construct a label widget with the parent MASTER.
STANDARD OPTIONS
activebackground, activeforeground, anchor,
background, bitmap, borderwidth, cursor,
disabledforeground, font, foreground,
highlightbackground, highlightcolor,
highlightthickness, image, justify,
padx, pady, relief, takefocus, text,
textvariable, underline, wraplength
WIDGET-SPECIFIC OPTIONS
height, state, width
"""
Widget.__init__(self, master, 'label', cnf, kw)
class Listbox(Widget, XView, YView):
"""Listbox widget which can display a list of strings."""
def __init__(self, master=None, cnf={}, **kw):
"""Construct a listbox widget with the parent MASTER.
Valid resource names: background, bd, bg, borderwidth, cursor,
exportselection, fg, font, foreground, height, highlightbackground,
highlightcolor, highlightthickness, relief, selectbackground,
selectborderwidth, selectforeground, selectmode, setgrid, takefocus,
width, xscrollcommand, yscrollcommand, listvariable."""
Widget.__init__(self, master, 'listbox', cnf, kw)
def activate(self, index):
"""Activate item identified by INDEX."""
self.tk.call(self._w, 'activate', index)
def bbox(self, *args):
"""Return a tuple of X1,Y1,X2,Y2 coordinates for a rectangle
which encloses the item identified by index in ARGS."""
return self._getints(
self.tk.call((self._w, 'bbox') + args)) or None
def curselection(self):
"""Return list of indices of currently selected item."""
# XXX Ought to apply self._getints()...
return self.tk.splitlist(self.tk.call(
self._w, 'curselection'))
def delete(self, first, last=None):
"""Delete items from FIRST to LAST (not included)."""
self.tk.call(self._w, 'delete', first, last)
def get(self, first, last=None):
"""Get list of items from FIRST to LAST (not included)."""
if last:
return self.tk.splitlist(self.tk.call(
self._w, 'get', first, last))
else:
return self.tk.call(self._w, 'get', first)
def index(self, index):
"""Return index of item identified with INDEX."""
i = self.tk.call(self._w, 'index', index)
if i == 'none': return None
return getint(i)
def insert(self, index, *elements):
"""Insert ELEMENTS at INDEX."""
self.tk.call((self._w, 'insert', index) + elements)
def nearest(self, y):
"""Get index of item which is nearest to y coordinate Y."""
return getint(self.tk.call(
self._w, 'nearest', y))
def scan_mark(self, x, y):
"""Remember the current X, Y coordinates."""
self.tk.call(self._w, 'scan', 'mark', x, y)
def scan_dragto(self, x, y):
"""Adjust the view of the listbox to 10 times the
difference between X and Y and the coordinates given in
scan_mark."""
self.tk.call(self._w, 'scan', 'dragto', x, y)
def see(self, index):
"""Scroll such that INDEX is visible."""
self.tk.call(self._w, 'see', index)
def selection_anchor(self, index):
"""Set the fixed end oft the selection to INDEX."""
self.tk.call(self._w, 'selection', 'anchor', index)
select_anchor = selection_anchor
def selection_clear(self, first, last=None):
"""Clear the selection from FIRST to LAST (not included)."""
self.tk.call(self._w,
'selection', 'clear', first, last)
select_clear = selection_clear
def selection_includes(self, index):
"""Return 1 if INDEX is part of the selection."""
return self.tk.getboolean(self.tk.call(
self._w, 'selection', 'includes', index))
select_includes = selection_includes
def selection_set(self, first, last=None):
"""Set the selection from FIRST to LAST (not included) without
changing the currently selected elements."""
self.tk.call(self._w, 'selection', 'set', first, last)
select_set = selection_set
def size(self):
"""Return the number of elements in the listbox."""
return getint(self.tk.call(self._w, 'size'))
def itemcget(self, index, option):
"""Return the resource value for an ITEM and an OPTION."""
return self.tk.call(
(self._w, 'itemcget') + (index, '-'+option))
def itemconfigure(self, index, cnf=None, **kw):
"""Configure resources of an ITEM.
The values for resources are specified as keyword arguments.
To get an overview about the allowed keyword arguments
call the method without arguments.
Valid resource names: background, bg, foreground, fg,
selectbackground, selectforeground."""
return self._configure(('itemconfigure', index), cnf, kw)
itemconfig = itemconfigure
class Menu(Widget):
"""Menu widget which allows to display menu bars, pull-down menus and pop-up menus."""
def __init__(self, master=None, cnf={}, **kw):
"""Construct menu widget with the parent MASTER.
Valid resource names: activebackground, activeborderwidth,
activeforeground, background, bd, bg, borderwidth, cursor,
disabledforeground, fg, font, foreground, postcommand, relief,
selectcolor, takefocus, tearoff, tearoffcommand, title, type."""
Widget.__init__(self, master, 'menu', cnf, kw)
def tk_bindForTraversal(self):
pass # obsolete since Tk 4.0
def tk_mbPost(self):
self.tk.call('tk_mbPost', self._w)
def tk_mbUnpost(self):
self.tk.call('tk_mbUnpost')
def tk_traverseToMenu(self, char):
self.tk.call('tk_traverseToMenu', self._w, char)
def tk_traverseWithinMenu(self, char):
self.tk.call('tk_traverseWithinMenu', self._w, char)
def tk_getMenuButtons(self):
return self.tk.call('tk_getMenuButtons', self._w)
def tk_nextMenu(self, count):
self.tk.call('tk_nextMenu', count)
def tk_nextMenuEntry(self, count):
self.tk.call('tk_nextMenuEntry', count)
def tk_invokeMenu(self):
self.tk.call('tk_invokeMenu', self._w)
def tk_firstMenu(self):
self.tk.call('tk_firstMenu', self._w)
def tk_mbButtonDown(self):
self.tk.call('tk_mbButtonDown', self._w)
def tk_popup(self, x, y, entry=""):
"""Post the menu at position X,Y with entry ENTRY."""
self.tk.call('tk_popup', self._w, x, y, entry)
def activate(self, index):
"""Activate entry at INDEX."""
self.tk.call(self._w, 'activate', index)
def add(self, itemType, cnf={}, **kw):
"""Internal function."""
self.tk.call((self._w, 'add', itemType) +
self._options(cnf, kw))
def add_cascade(self, cnf={}, **kw):
"""Add hierarchical menu item."""
self.add('cascade', cnf or kw)
def add_checkbutton(self, cnf={}, **kw):
"""Add checkbutton menu item."""
self.add('checkbutton', cnf or kw)
def add_command(self, cnf={}, **kw):
"""Add command menu item."""
self.add('command', cnf or kw)
def add_radiobutton(self, cnf={}, **kw):
"""Addd radio menu item."""
self.add('radiobutton', cnf or kw)
def add_separator(self, cnf={}, **kw):
"""Add separator."""
self.add('separator', cnf or kw)
def insert(self, index, itemType, cnf={}, **kw):
"""Internal function."""
self.tk.call((self._w, 'insert', index, itemType) +
self._options(cnf, kw))
def insert_cascade(self, index, cnf={}, **kw):
"""Add hierarchical menu item at INDEX."""
self.insert(index, 'cascade', cnf or kw)
def insert_checkbutton(self, index, cnf={}, **kw):
"""Add checkbutton menu item at INDEX."""
self.insert(index, 'checkbutton', cnf or kw)
def insert_command(self, index, cnf={}, **kw):
"""Add command menu item at INDEX."""
self.insert(index, 'command', cnf or kw)
def insert_radiobutton(self, index, cnf={}, **kw):
"""Addd radio menu item at INDEX."""
self.insert(index, 'radiobutton', cnf or kw)
def insert_separator(self, index, cnf={}, **kw):
"""Add separator at INDEX."""
self.insert(index, 'separator', cnf or kw)
def delete(self, index1, index2=None):
"""Delete menu items between INDEX1 and INDEX2 (included)."""
if index2 is None:
index2 = index1
num_index1, num_index2 = self.index(index1), self.index(index2)
if (num_index1 is None) or (num_index2 is None):
num_index1, num_index2 = 0, -1
for i in range(num_index1, num_index2 + 1):
if 'command' in self.entryconfig(i):
c = str(self.entrycget(i, 'command'))
if c:
self.deletecommand(c)
self.tk.call(self._w, 'delete', index1, index2)
def entrycget(self, index, option):
"""Return the resource value of an menu item for OPTION at INDEX."""
return self.tk.call(self._w, 'entrycget', index, '-' + option)
def entryconfigure(self, index, cnf=None, **kw):
"""Configure a menu item at INDEX."""
return self._configure(('entryconfigure', index), cnf, kw)
entryconfig = entryconfigure
def index(self, index):
"""Return the index of a menu item identified by INDEX."""
i = self.tk.call(self._w, 'index', index)
if i == 'none': return None
return getint(i)
def invoke(self, index):
"""Invoke a menu item identified by INDEX and execute
the associated command."""
return self.tk.call(self._w, 'invoke', index)
def post(self, x, y):
"""Display a menu at position X,Y."""
self.tk.call(self._w, 'post', x, y)
def type(self, index):
"""Return the type of the menu item at INDEX."""
return self.tk.call(self._w, 'type', index)
def unpost(self):
"""Unmap a menu."""
self.tk.call(self._w, 'unpost')
def xposition(self, index): # new in Tk 8.5
"""Return the x-position of the leftmost pixel of the menu item
at INDEX."""
return getint(self.tk.call(self._w, 'xposition', index))
def yposition(self, index):
"""Return the y-position of the topmost pixel of the menu item at INDEX."""
return getint(self.tk.call(
self._w, 'yposition', index))
class Menubutton(Widget):
"""Menubutton widget, obsolete since Tk8.0."""
def __init__(self, master=None, cnf={}, **kw):
Widget.__init__(self, master, 'menubutton', cnf, kw)
class Message(Widget):
"""Message widget to display multiline text. Obsolete since Label does it too."""
def __init__(self, master=None, cnf={}, **kw):
Widget.__init__(self, master, 'message', cnf, kw)
class Radiobutton(Widget):
"""Radiobutton widget which shows only one of several buttons in on-state."""
def __init__(self, master=None, cnf={}, **kw):
"""Construct a radiobutton widget with the parent MASTER.
Valid resource names: activebackground, activeforeground, anchor,
background, bd, bg, bitmap, borderwidth, command, cursor,
disabledforeground, fg, font, foreground, height,
highlightbackground, highlightcolor, highlightthickness, image,
indicatoron, justify, padx, pady, relief, selectcolor, selectimage,
state, takefocus, text, textvariable, underline, value, variable,
width, wraplength."""
Widget.__init__(self, master, 'radiobutton', cnf, kw)
def deselect(self):
"""Put the button in off-state."""
self.tk.call(self._w, 'deselect')
def flash(self):
"""Flash the button."""
self.tk.call(self._w, 'flash')
def invoke(self):
"""Toggle the button and invoke a command if given as resource."""
return self.tk.call(self._w, 'invoke')
def select(self):
"""Put the button in on-state."""
self.tk.call(self._w, 'select')
class Scale(Widget):
"""Scale widget which can display a numerical scale."""
def __init__(self, master=None, cnf={}, **kw):
"""Construct a scale widget with the parent MASTER.
Valid resource names: activebackground, background, bigincrement, bd,
bg, borderwidth, command, cursor, digits, fg, font, foreground, from,
highlightbackground, highlightcolor, highlightthickness, label,
length, orient, relief, repeatdelay, repeatinterval, resolution,
showvalue, sliderlength, sliderrelief, state, takefocus,
tickinterval, to, troughcolor, variable, width."""
Widget.__init__(self, master, 'scale', cnf, kw)
def get(self):
"""Get the current value as integer or float."""
value = self.tk.call(self._w, 'get')
try:
return getint(value)
except ValueError:
return getdouble(value)
def set(self, value):
"""Set the value to VALUE."""
self.tk.call(self._w, 'set', value)
def coords(self, value=None):
"""Return a tuple (X,Y) of the point along the centerline of the
trough that corresponds to VALUE or the current value if None is
given."""
return self._getints(self.tk.call(self._w, 'coords', value))
def identify(self, x, y):
"""Return where the point X,Y lies. Valid return values are "slider",
"though1" and "though2"."""
return self.tk.call(self._w, 'identify', x, y)
class Scrollbar(Widget):
"""Scrollbar widget which displays a slider at a certain position."""
def __init__(self, master=None, cnf={}, **kw):
"""Construct a scrollbar widget with the parent MASTER.
Valid resource names: activebackground, activerelief,
background, bd, bg, borderwidth, command, cursor,
elementborderwidth, highlightbackground,
highlightcolor, highlightthickness, jump, orient,
relief, repeatdelay, repeatinterval, takefocus,
troughcolor, width."""
Widget.__init__(self, master, 'scrollbar', cnf, kw)
def activate(self, index):
"""Display the element at INDEX with activebackground and activerelief.
INDEX can be "arrow1","slider" or "arrow2"."""
self.tk.call(self._w, 'activate', index)
def delta(self, deltax, deltay):
"""Return the fractional change of the scrollbar setting if it
would be moved by DELTAX or DELTAY pixels."""
return getdouble(
self.tk.call(self._w, 'delta', deltax, deltay))
def fraction(self, x, y):
"""Return the fractional value which corresponds to a slider
position of X,Y."""
return getdouble(self.tk.call(self._w, 'fraction', x, y))
def identify(self, x, y):
"""Return the element under position X,Y as one of
"arrow1","slider","arrow2" or ""."""
return self.tk.call(self._w, 'identify', x, y)
def get(self):
"""Return the current fractional values (upper and lower end)
of the slider position."""
return self._getdoubles(self.tk.call(self._w, 'get'))
def set(self, *args):
"""Set the fractional values of the slider position (upper and
lower ends as value between 0 and 1)."""
self.tk.call((self._w, 'set') + args)
class Text(Widget, XView, YView):
"""Text widget which can display text in various forms."""
def __init__(self, master=None, cnf={}, **kw):
"""Construct a text widget with the parent MASTER.
STANDARD OPTIONS
background, borderwidth, cursor,
exportselection, font, foreground,
highlightbackground, highlightcolor,
highlightthickness, insertbackground,
insertborderwidth, insertofftime,
insertontime, insertwidth, padx, pady,
relief, selectbackground,
selectborderwidth, selectforeground,
setgrid, takefocus,
xscrollcommand, yscrollcommand,
WIDGET-SPECIFIC OPTIONS
autoseparators, height, maxundo,
spacing1, spacing2, spacing3,
state, tabs, undo, width, wrap,
"""
Widget.__init__(self, master, 'text', cnf, kw)
def bbox(self, *args):
"""Return a tuple of (x,y,width,height) which gives the bounding
box of the visible part of the character at the index in ARGS."""
return self._getints(
self.tk.call((self._w, 'bbox') + args)) or None
def tk_textSelectTo(self, index):
self.tk.call('tk_textSelectTo', self._w, index)
def tk_textBackspace(self):
self.tk.call('tk_textBackspace', self._w)
def tk_textIndexCloser(self, a, b, c):
self.tk.call('tk_textIndexCloser', self._w, a, b, c)
def tk_textResetAnchor(self, index):
self.tk.call('tk_textResetAnchor', self._w, index)
def compare(self, index1, op, index2):
"""Return whether between index INDEX1 and index INDEX2 the
relation OP is satisfied. OP is one of <, <=, ==, >=, >, or !=."""
return self.tk.getboolean(self.tk.call(
self._w, 'compare', index1, op, index2))
def count(self, index1, index2, *args): # new in Tk 8.5
"""Counts the number of relevant things between the two indices.
If index1 is after index2, the result will be a negative number
(and this holds for each of the possible options).
The actual items which are counted depends on the options given by
args. The result is a list of integers, one for the result of each
counting option given. Valid counting options are "chars",
"displaychars", "displayindices", "displaylines", "indices",
"lines", "xpixels" and "ypixels". There is an additional possible
option "update", which if given then all subsequent options ensure
that any possible out of date information is recalculated."""
args = ['-%s' % arg for arg in args if not arg.startswith('-')]
args += [index1, index2]
res = self.tk.call(self._w, 'count', *args) or None
if res is not None and len(args) <= 3:
return (res, )
else:
return res
def debug(self, boolean=None):
"""Turn on the internal consistency checks of the B-Tree inside the text
widget according to BOOLEAN."""
if boolean is None:
return self.tk.getboolean(self.tk.call(self._w, 'debug'))
self.tk.call(self._w, 'debug', boolean)
def delete(self, index1, index2=None):
"""Delete the characters between INDEX1 and INDEX2 (not included)."""
self.tk.call(self._w, 'delete', index1, index2)
def dlineinfo(self, index):
"""Return tuple (x,y,width,height,baseline) giving the bounding box
and baseline position of the visible part of the line containing
the character at INDEX."""
return self._getints(self.tk.call(self._w, 'dlineinfo', index))
def dump(self, index1, index2=None, command=None, **kw):
"""Return the contents of the widget between index1 and index2.
The type of contents returned in filtered based on the keyword
parameters; if 'all', 'image', 'mark', 'tag', 'text', or 'window' are
given and true, then the corresponding items are returned. The result
is a list of triples of the form (key, value, index). If none of the
keywords are true then 'all' is used by default.
If the 'command' argument is given, it is called once for each element
of the list of triples, with the values of each triple serving as the
arguments to the function. In this case the list is not returned."""
args = []
func_name = None
result = None
if not command:
# Never call the dump command without the -command flag, since the
# output could involve Tcl quoting and would be a pain to parse
# right. Instead just set the command to build a list of triples
# as if we had done the parsing.
result = []
def append_triple(key, value, index, result=result):
result.append((key, value, index))
command = append_triple
try:
if not isinstance(command, str):
func_name = command = self._register(command)
args += ["-command", command]
for key in kw:
if kw[key]: args.append("-" + key)
args.append(index1)
if index2:
args.append(index2)
self.tk.call(self._w, "dump", *args)
return result
finally:
if func_name:
self.deletecommand(func_name)
## new in tk8.4
def edit(self, *args):
"""Internal method
This method controls the undo mechanism and
the modified flag. The exact behavior of the
command depends on the option argument that
follows the edit argument. The following forms
of the command are currently supported:
edit_modified, edit_redo, edit_reset, edit_separator
and edit_undo
"""
return self.tk.call(self._w, 'edit', *args)
def edit_modified(self, arg=None):
"""Get or Set the modified flag
If arg is not specified, returns the modified
flag of the widget. The insert, delete, edit undo and
edit redo commands or the user can set or clear the
modified flag. If boolean is specified, sets the
modified flag of the widget to arg.
"""
return self.edit("modified", arg)
def edit_redo(self):
"""Redo the last undone edit
When the undo option is true, reapplies the last
undone edits provided no other edits were done since
then. Generates an error when the redo stack is empty.
Does nothing when the undo option is false.
"""
return self.edit("redo")
def edit_reset(self):
"""Clears the undo and redo stacks
"""
return self.edit("reset")
def edit_separator(self):
"""Inserts a separator (boundary) on the undo stack.
Does nothing when the undo option is false
"""
return self.edit("separator")
def edit_undo(self):
"""Undoes the last edit action
If the undo option is true. An edit action is defined
as all the insert and delete commands that are recorded
on the undo stack in between two separators. Generates
an error when the undo stack is empty. Does nothing
when the undo option is false
"""
return self.edit("undo")
def get(self, index1, index2=None):
"""Return the text from INDEX1 to INDEX2 (not included)."""
return self.tk.call(self._w, 'get', index1, index2)
# (Image commands are new in 8.0)
def image_cget(self, index, option):
"""Return the value of OPTION of an embedded image at INDEX."""
if option[:1] != "-":
option = "-" + option
if option[-1:] == "_":
option = option[:-1]
return self.tk.call(self._w, "image", "cget", index, option)
def image_configure(self, index, cnf=None, **kw):
"""Configure an embedded image at INDEX."""
return self._configure(('image', 'configure', index), cnf, kw)
def image_create(self, index, cnf={}, **kw):
"""Create an embedded image at INDEX."""
return self.tk.call(
self._w, "image", "create", index,
*self._options(cnf, kw))
def image_names(self):
"""Return all names of embedded images in this widget."""
return self.tk.call(self._w, "image", "names")
def index(self, index):
"""Return the index in the form line.char for INDEX."""
return str(self.tk.call(self._w, 'index', index))
def insert(self, index, chars, *args):
"""Insert CHARS before the characters at INDEX. An additional
tag can be given in ARGS. Additional CHARS and tags can follow in ARGS."""
self.tk.call((self._w, 'insert', index, chars) + args)
def mark_gravity(self, markName, direction=None):
"""Change the gravity of a mark MARKNAME to DIRECTION (LEFT or RIGHT).
Return the current value if None is given for DIRECTION."""
return self.tk.call(
(self._w, 'mark', 'gravity', markName, direction))
def mark_names(self):
"""Return all mark names."""
return self.tk.splitlist(self.tk.call(
self._w, 'mark', 'names'))
def mark_set(self, markName, index):
"""Set mark MARKNAME before the character at INDEX."""
self.tk.call(self._w, 'mark', 'set', markName, index)
def mark_unset(self, *markNames):
"""Delete all marks in MARKNAMES."""
self.tk.call((self._w, 'mark', 'unset') + markNames)
def mark_next(self, index):
"""Return the name of the next mark after INDEX."""
return self.tk.call(self._w, 'mark', 'next', index) or None
def mark_previous(self, index):
"""Return the name of the previous mark before INDEX."""
return self.tk.call(self._w, 'mark', 'previous', index) or None
def peer_create(self, newPathName, cnf={}, **kw): # new in Tk 8.5
"""Creates a peer text widget with the given newPathName, and any
optional standard configuration options. By default the peer will
have the same start and end line as the parent widget, but
these can be overriden with the standard configuration options."""
self.tk.call(self._w, 'peer', 'create', newPathName,
*self._options(cnf, kw))
def peer_names(self): # new in Tk 8.5
"""Returns a list of peers of this widget (this does not include
the widget itself)."""
return self.tk.splitlist(self.tk.call(self._w, 'peer', 'names'))
def replace(self, index1, index2, chars, *args): # new in Tk 8.5
"""Replaces the range of characters between index1 and index2 with
the given characters and tags specified by args.
See the method insert for some more information about args, and the
method delete for information about the indices."""
self.tk.call(self._w, 'replace', index1, index2, chars, *args)
def scan_mark(self, x, y):
"""Remember the current X, Y coordinates."""
self.tk.call(self._w, 'scan', 'mark', x, y)
def scan_dragto(self, x, y):
"""Adjust the view of the text to 10 times the
difference between X and Y and the coordinates given in
scan_mark."""
self.tk.call(self._w, 'scan', 'dragto', x, y)
def search(self, pattern, index, stopindex=None,
forwards=None, backwards=None, exact=None,
regexp=None, nocase=None, count=None, elide=None):
"""Search PATTERN beginning from INDEX until STOPINDEX.
Return the index of the first character of a match or an
empty string."""
args = [self._w, 'search']
if forwards: args.append('-forwards')
if backwards: args.append('-backwards')
if exact: args.append('-exact')
if regexp: args.append('-regexp')
if nocase: args.append('-nocase')
if elide: args.append('-elide')
if count: args.append('-count'); args.append(count)
if pattern and pattern[0] == '-': args.append('--')
args.append(pattern)
args.append(index)
if stopindex: args.append(stopindex)
return str(self.tk.call(tuple(args)))
def see(self, index):
"""Scroll such that the character at INDEX is visible."""
self.tk.call(self._w, 'see', index)
def tag_add(self, tagName, index1, *args):
"""Add tag TAGNAME to all characters between INDEX1 and index2 in ARGS.
Additional pairs of indices may follow in ARGS."""
self.tk.call(
(self._w, 'tag', 'add', tagName, index1) + args)
def tag_unbind(self, tagName, sequence, funcid=None):
"""Unbind for all characters with TAGNAME for event SEQUENCE the
function identified with FUNCID."""
self.tk.call(self._w, 'tag', 'bind', tagName, sequence, '')
if funcid:
self.deletecommand(funcid)
def tag_bind(self, tagName, sequence, func, add=None):
"""Bind to all characters with TAGNAME at event SEQUENCE a call to function FUNC.
An additional boolean parameter ADD specifies whether FUNC will be
called additionally to the other bound function or whether it will
replace the previous function. See bind for the return value."""
return self._bind((self._w, 'tag', 'bind', tagName),
sequence, func, add)
def tag_cget(self, tagName, option):
"""Return the value of OPTION for tag TAGNAME."""
if option[:1] != '-':
option = '-' + option
if option[-1:] == '_':
option = option[:-1]
return self.tk.call(self._w, 'tag', 'cget', tagName, option)
def tag_configure(self, tagName, cnf=None, **kw):
"""Configure a tag TAGNAME."""
return self._configure(('tag', 'configure', tagName), cnf, kw)
tag_config = tag_configure
def tag_delete(self, *tagNames):
"""Delete all tags in TAGNAMES."""
self.tk.call((self._w, 'tag', 'delete') + tagNames)
def tag_lower(self, tagName, belowThis=None):
"""Change the priority of tag TAGNAME such that it is lower
than the priority of BELOWTHIS."""
self.tk.call(self._w, 'tag', 'lower', tagName, belowThis)
def tag_names(self, index=None):
"""Return a list of all tag names."""
return self.tk.splitlist(
self.tk.call(self._w, 'tag', 'names', index))
def tag_nextrange(self, tagName, index1, index2=None):
"""Return a list of start and end index for the first sequence of
characters between INDEX1 and INDEX2 which all have tag TAGNAME.
The text is searched forward from INDEX1."""
return self.tk.splitlist(self.tk.call(
self._w, 'tag', 'nextrange', tagName, index1, index2))
def tag_prevrange(self, tagName, index1, index2=None):
"""Return a list of start and end index for the first sequence of
characters between INDEX1 and INDEX2 which all have tag TAGNAME.
The text is searched backwards from INDEX1."""
return self.tk.splitlist(self.tk.call(
self._w, 'tag', 'prevrange', tagName, index1, index2))
def tag_raise(self, tagName, aboveThis=None):
"""Change the priority of tag TAGNAME such that it is higher
than the priority of ABOVETHIS."""
self.tk.call(
self._w, 'tag', 'raise', tagName, aboveThis)
def tag_ranges(self, tagName):
"""Return a list of ranges of text which have tag TAGNAME."""
return self.tk.splitlist(self.tk.call(
self._w, 'tag', 'ranges', tagName))
def tag_remove(self, tagName, index1, index2=None):
"""Remove tag TAGNAME from all characters between INDEX1 and INDEX2."""
self.tk.call(
self._w, 'tag', 'remove', tagName, index1, index2)
def window_cget(self, index, option):
"""Return the value of OPTION of an embedded window at INDEX."""
if option[:1] != '-':
option = '-' + option
if option[-1:] == '_':
option = option[:-1]
return self.tk.call(self._w, 'window', 'cget', index, option)
def window_configure(self, index, cnf=None, **kw):
"""Configure an embedded window at INDEX."""
return self._configure(('window', 'configure', index), cnf, kw)
window_config = window_configure
def window_create(self, index, cnf={}, **kw):
"""Create a window at INDEX."""
self.tk.call(
(self._w, 'window', 'create', index)
+ self._options(cnf, kw))
def window_names(self):
"""Return all names of embedded windows in this widget."""
return self.tk.splitlist(
self.tk.call(self._w, 'window', 'names'))
def yview_pickplace(self, *what):
"""Obsolete function, use see."""
self.tk.call((self._w, 'yview', '-pickplace') + what)
class _setit:
"""Internal class. It wraps the command in the widget OptionMenu."""
def __init__(self, var, value, callback=None):
self.__value = value
self.__var = var
self.__callback = callback
def __call__(self, *args):
self.__var.set(self.__value)
if self.__callback:
self.__callback(self.__value, *args)
class OptionMenu(Menubutton):
"""OptionMenu which allows the user to select a value from a menu."""
def __init__(self, master, variable, value, *values, **kwargs):
"""Construct an optionmenu widget with the parent MASTER, with
the resource textvariable set to VARIABLE, the initially selected
value VALUE, the other menu values VALUES and an additional
keyword argument command."""
kw = {"borderwidth": 2, "textvariable": variable,
"indicatoron": 1, "relief": RAISED, "anchor": "c",
"highlightthickness": 2}
Widget.__init__(self, master, "menubutton", kw)
self.widgetName = 'tk_optionMenu'
menu = self.__menu = Menu(self, name="menu", tearoff=0)
self.menuname = menu._w
# 'command' is the only supported keyword
callback = kwargs.get('command')
if 'command' in kwargs:
del kwargs['command']
if kwargs:
raise TclError('unknown option -'+kwargs.keys()[0])
menu.add_command(label=value,
command=_setit(variable, value, callback))
for v in values:
menu.add_command(label=v,
command=_setit(variable, v, callback))
self["menu"] = menu
def __getitem__(self, name):
if name == 'menu':
return self.__menu
return Widget.__getitem__(self, name)
def destroy(self):
"""Destroy this widget and the associated menu."""
Menubutton.destroy(self)
self.__menu = None
class Image:
"""Base class for images."""
_last_id = 0
def __init__(self, imgtype, name=None, cnf={}, master=None, **kw):
self.name = None
if not master:
master = _default_root
if not master:
raise RuntimeError('Too early to create image')
self.tk = master.tk
if not name:
Image._last_id += 1
name = "pyimage%r" % (Image._last_id,) # tk itself would use image<x>
# The following is needed for systems where id(x)
# can return a negative number, such as Linux/m68k:
if name[0] == '-': name = '_' + name[1:]
if kw and cnf: cnf = _cnfmerge((cnf, kw))
elif kw: cnf = kw
options = ()
for k, v in cnf.items():
if callable(v):
v = self._register(v)
options = options + ('-'+k, v)
self.tk.call(('image', 'create', imgtype, name,) + options)
self.name = name
def __str__(self): return self.name
def __del__(self):
if self.name:
try:
self.tk.call('image', 'delete', self.name)
except TclError:
# May happen if the root was destroyed
pass
def __setitem__(self, key, value):
self.tk.call(self.name, 'configure', '-'+key, value)
def __getitem__(self, key):
return self.tk.call(self.name, 'configure', '-'+key)
def configure(self, **kw):
"""Configure the image."""
res = ()
for k, v in _cnfmerge(kw).items():
if v is not None:
if k[-1] == '_': k = k[:-1]
if callable(v):
v = self._register(v)
res = res + ('-'+k, v)
self.tk.call((self.name, 'config') + res)
config = configure
def height(self):
"""Return the height of the image."""
return getint(
self.tk.call('image', 'height', self.name))
def type(self):
"""Return the type of the imgage, e.g. "photo" or "bitmap"."""
return self.tk.call('image', 'type', self.name)
def width(self):
"""Return the width of the image."""
return getint(
self.tk.call('image', 'width', self.name))
class PhotoImage(Image):
"""Widget which can display colored images in GIF, PPM/PGM format."""
def __init__(self, name=None, cnf={}, master=None, **kw):
"""Create an image with NAME.
Valid resource names: data, format, file, gamma, height, palette,
width."""
Image.__init__(self, 'photo', name, cnf, master, **kw)
def blank(self):
"""Display a transparent image."""
self.tk.call(self.name, 'blank')
def cget(self, option):
"""Return the value of OPTION."""
return self.tk.call(self.name, 'cget', '-' + option)
# XXX config
def __getitem__(self, key):
return self.tk.call(self.name, 'cget', '-' + key)
# XXX copy -from, -to, ...?
def copy(self):
"""Return a new PhotoImage with the same image as this widget."""
destImage = PhotoImage()
self.tk.call(destImage, 'copy', self.name)
return destImage
def zoom(self,x,y=''):
"""Return a new PhotoImage with the same image as this widget
but zoom it with X and Y."""
destImage = PhotoImage()
if y=='': y=x
self.tk.call(destImage, 'copy', self.name, '-zoom',x,y)
return destImage
def subsample(self,x,y=''):
"""Return a new PhotoImage based on the same image as this widget
but use only every Xth or Yth pixel."""
destImage = PhotoImage()
if y=='': y=x
self.tk.call(destImage, 'copy', self.name, '-subsample',x,y)
return destImage
def get(self, x, y):
"""Return the color (red, green, blue) of the pixel at X,Y."""
return self.tk.call(self.name, 'get', x, y)
def put(self, data, to=None):
"""Put row formatted colors to image starting from
position TO, e.g. image.put("{red green} {blue yellow}", to=(4,6))"""
args = (self.name, 'put', data)
if to:
if to[0] == '-to':
to = to[1:]
args = args + ('-to',) + tuple(to)
self.tk.call(args)
# XXX read
def write(self, filename, format=None, from_coords=None):
"""Write image to file FILENAME in FORMAT starting from
position FROM_COORDS."""
args = (self.name, 'write', filename)
if format:
args = args + ('-format', format)
if from_coords:
args = args + ('-from',) + tuple(from_coords)
self.tk.call(args)
class BitmapImage(Image):
"""Widget which can display a bitmap."""
def __init__(self, name=None, cnf={}, master=None, **kw):
"""Create a bitmap with NAME.
Valid resource names: background, data, file, foreground, maskdata, maskfile."""
Image.__init__(self, 'bitmap', name, cnf, master, **kw)
def image_names():
return _default_root.tk.splitlist(_default_root.tk.call('image', 'names'))
def image_types():
return _default_root.tk.splitlist(_default_root.tk.call('image', 'types'))
class Spinbox(Widget, XView):
"""spinbox widget."""
def __init__(self, master=None, cnf={}, **kw):
"""Construct a spinbox widget with the parent MASTER.
STANDARD OPTIONS
activebackground, background, borderwidth,
cursor, exportselection, font, foreground,
highlightbackground, highlightcolor,
highlightthickness, insertbackground,
insertborderwidth, insertofftime,
insertontime, insertwidth, justify, relief,
repeatdelay, repeatinterval,
selectbackground, selectborderwidth
selectforeground, takefocus, textvariable
xscrollcommand.
WIDGET-SPECIFIC OPTIONS
buttonbackground, buttoncursor,
buttondownrelief, buttonuprelief,
command, disabledbackground,
disabledforeground, format, from,
invalidcommand, increment,
readonlybackground, state, to,
validate, validatecommand values,
width, wrap,
"""
Widget.__init__(self, master, 'spinbox', cnf, kw)
def bbox(self, index):
"""Return a tuple of X1,Y1,X2,Y2 coordinates for a
rectangle which encloses the character given by index.
The first two elements of the list give the x and y
coordinates of the upper-left corner of the screen
area covered by the character (in pixels relative
to the widget) and the last two elements give the
width and height of the character, in pixels. The
bounding box may refer to a region outside the
visible area of the window.
"""
return self._getints(self.tk.call(self._w, 'bbox', index)) or None
def delete(self, first, last=None):
"""Delete one or more elements of the spinbox.
First is the index of the first character to delete,
and last is the index of the character just after
the last one to delete. If last isn't specified it
defaults to first+1, i.e. a single character is
deleted. This command returns an empty string.
"""
return self.tk.call(self._w, 'delete', first, last)
def get(self):
"""Returns the spinbox's string"""
return self.tk.call(self._w, 'get')
def icursor(self, index):
"""Alter the position of the insertion cursor.
The insertion cursor will be displayed just before
the character given by index. Returns an empty string
"""
return self.tk.call(self._w, 'icursor', index)
def identify(self, x, y):
"""Returns the name of the widget at position x, y
Return value is one of: none, buttondown, buttonup, entry
"""
return self.tk.call(self._w, 'identify', x, y)
def index(self, index):
"""Returns the numerical index corresponding to index
"""
return self.tk.call(self._w, 'index', index)
def insert(self, index, s):
"""Insert string s at index
Returns an empty string.
"""
return self.tk.call(self._w, 'insert', index, s)
def invoke(self, element):
"""Causes the specified element to be invoked
The element could be buttondown or buttonup
triggering the action associated with it.
"""
return self.tk.call(self._w, 'invoke', element)
def scan(self, *args):
"""Internal function."""
return self._getints(
self.tk.call((self._w, 'scan') + args)) or ()
def scan_mark(self, x):
"""Records x and the current view in the spinbox window;
used in conjunction with later scan dragto commands.
Typically this command is associated with a mouse button
press in the widget. It returns an empty string.
"""
return self.scan("mark", x)
def scan_dragto(self, x):
"""Compute the difference between the given x argument
and the x argument to the last scan mark command
It then adjusts the view left or right by 10 times the
difference in x-coordinates. This command is typically
associated with mouse motion events in the widget, to
produce the effect of dragging the spinbox at high speed
through the window. The return value is an empty string.
"""
return self.scan("dragto", x)
def selection(self, *args):
"""Internal function."""
return self._getints(
self.tk.call((self._w, 'selection') + args)) or ()
def selection_adjust(self, index):
"""Locate the end of the selection nearest to the character
given by index,
Then adjust that end of the selection to be at index
(i.e including but not going beyond index). The other
end of the selection is made the anchor point for future
select to commands. If the selection isn't currently in
the spinbox, then a new selection is created to include
the characters between index and the most recent selection
anchor point, inclusive. Returns an empty string.
"""
return self.selection("adjust", index)
def selection_clear(self):
"""Clear the selection
If the selection isn't in this widget then the
command has no effect. Returns an empty string.
"""
return self.selection("clear")
def selection_element(self, element=None):
"""Sets or gets the currently selected element.
If a spinbutton element is specified, it will be
displayed depressed
"""
return self.selection("element", element)
###########################################################################
class LabelFrame(Widget):
"""labelframe widget."""
def __init__(self, master=None, cnf={}, **kw):
"""Construct a labelframe widget with the parent MASTER.
STANDARD OPTIONS
borderwidth, cursor, font, foreground,
highlightbackground, highlightcolor,
highlightthickness, padx, pady, relief,
takefocus, text
WIDGET-SPECIFIC OPTIONS
background, class, colormap, container,
height, labelanchor, labelwidget,
visual, width
"""
Widget.__init__(self, master, 'labelframe', cnf, kw)
########################################################################
class PanedWindow(Widget):
"""panedwindow widget."""
def __init__(self, master=None, cnf={}, **kw):
"""Construct a panedwindow widget with the parent MASTER.
STANDARD OPTIONS
background, borderwidth, cursor, height,
orient, relief, width
WIDGET-SPECIFIC OPTIONS
handlepad, handlesize, opaqueresize,
sashcursor, sashpad, sashrelief,
sashwidth, showhandle,
"""
Widget.__init__(self, master, 'panedwindow', cnf, kw)
def add(self, child, **kw):
"""Add a child widget to the panedwindow in a new pane.
The child argument is the name of the child widget
followed by pairs of arguments that specify how to
manage the windows. The possible options and values
are the ones accepted by the paneconfigure method.
"""
self.tk.call((self._w, 'add', child) + self._options(kw))
def remove(self, child):
"""Remove the pane containing child from the panedwindow
All geometry management options for child will be forgotten.
"""
self.tk.call(self._w, 'forget', child)
forget=remove
def identify(self, x, y):
"""Identify the panedwindow component at point x, y
If the point is over a sash or a sash handle, the result
is a two element list containing the index of the sash or
handle, and a word indicating whether it is over a sash
or a handle, such as {0 sash} or {2 handle}. If the point
is over any other part of the panedwindow, the result is
an empty list.
"""
return self.tk.call(self._w, 'identify', x, y)
def proxy(self, *args):
"""Internal function."""
return self._getints(
self.tk.call((self._w, 'proxy') + args)) or ()
def proxy_coord(self):
"""Return the x and y pair of the most recent proxy location
"""
return self.proxy("coord")
def proxy_forget(self):
"""Remove the proxy from the display.
"""
return self.proxy("forget")
def proxy_place(self, x, y):
"""Place the proxy at the given x and y coordinates.
"""
return self.proxy("place", x, y)
def sash(self, *args):
"""Internal function."""
return self._getints(
self.tk.call((self._w, 'sash') + args)) or ()
def sash_coord(self, index):
"""Return the current x and y pair for the sash given by index.
Index must be an integer between 0 and 1 less than the
number of panes in the panedwindow. The coordinates given are
those of the top left corner of the region containing the sash.
pathName sash dragto index x y This command computes the
difference between the given coordinates and the coordinates
given to the last sash coord command for the given sash. It then
moves that sash the computed difference. The return value is the
empty string.
"""
return self.sash("coord", index)
def sash_mark(self, index):
"""Records x and y for the sash given by index;
Used in conjunction with later dragto commands to move the sash.
"""
return self.sash("mark", index)
def sash_place(self, index, x, y):
"""Place the sash given by index at the given coordinates
"""
return self.sash("place", index, x, y)
def panecget(self, child, option):
"""Query a management option for window.
Option may be any value allowed by the paneconfigure subcommand
"""
return self.tk.call(
(self._w, 'panecget') + (child, '-'+option))
def paneconfigure(self, tagOrId, cnf=None, **kw):
"""Query or modify the management options for window.
If no option is specified, returns a list describing all
of the available options for pathName. If option is
specified with no value, then the command returns a list
describing the one named option (this list will be identical
to the corresponding sublist of the value returned if no
option is specified). If one or more option-value pairs are
specified, then the command modifies the given widget
option(s) to have the given value(s); in this case the
command returns an empty string. The following options
are supported:
after window
Insert the window after the window specified. window
should be the name of a window already managed by pathName.
before window
Insert the window before the window specified. window
should be the name of a window already managed by pathName.
height size
Specify a height for the window. The height will be the
outer dimension of the window including its border, if
any. If size is an empty string, or if -height is not
specified, then the height requested internally by the
window will be used initially; the height may later be
adjusted by the movement of sashes in the panedwindow.
Size may be any value accepted by Tk_GetPixels.
minsize n
Specifies that the size of the window cannot be made
less than n. This constraint only affects the size of
the widget in the paned dimension -- the x dimension
for horizontal panedwindows, the y dimension for
vertical panedwindows. May be any value accepted by
Tk_GetPixels.
padx n
Specifies a non-negative value indicating how much
extra space to leave on each side of the window in
the X-direction. The value may have any of the forms
accepted by Tk_GetPixels.
pady n
Specifies a non-negative value indicating how much
extra space to leave on each side of the window in
the Y-direction. The value may have any of the forms
accepted by Tk_GetPixels.
sticky style
If a window's pane is larger than the requested
dimensions of the window, this option may be used
to position (or stretch) the window within its pane.
Style is a string that contains zero or more of the
characters n, s, e or w. The string can optionally
contains spaces or commas, but they are ignored. Each
letter refers to a side (north, south, east, or west)
that the window will "stick" to. If both n and s
(or e and w) are specified, the window will be
stretched to fill the entire height (or width) of
its cavity.
width size
Specify a width for the window. The width will be
the outer dimension of the window including its
border, if any. If size is an empty string, or
if -width is not specified, then the width requested
internally by the window will be used initially; the
width may later be adjusted by the movement of sashes
in the panedwindow. Size may be any value accepted by
Tk_GetPixels.
"""
if cnf is None and not kw:
return self._getconfigure(self._w, 'paneconfigure', tagOrId)
if isinstance(cnf, str) and not kw:
return self._getconfigure1(
self._w, 'paneconfigure', tagOrId, '-'+cnf)
self.tk.call((self._w, 'paneconfigure', tagOrId) +
self._options(cnf, kw))
paneconfig = paneconfigure
def panes(self):
"""Returns an ordered list of the child panes."""
return self.tk.splitlist(self.tk.call(self._w, 'panes'))
######################################################################
# Extensions:
class Studbutton(Button):
def __init__(self, master=None, cnf={}, **kw):
Widget.__init__(self, master, 'studbutton', cnf, kw)
self.bind('<Any-Enter>', self.tkButtonEnter)
self.bind('<Any-Leave>', self.tkButtonLeave)
self.bind('<1>', self.tkButtonDown)
self.bind('<ButtonRelease-1>', self.tkButtonUp)
class Tributton(Button):
def __init__(self, master=None, cnf={}, **kw):
Widget.__init__(self, master, 'tributton', cnf, kw)
self.bind('<Any-Enter>', self.tkButtonEnter)
self.bind('<Any-Leave>', self.tkButtonLeave)
self.bind('<1>', self.tkButtonDown)
self.bind('<ButtonRelease-1>', self.tkButtonUp)
self['fg'] = self['bg']
self['activebackground'] = self['bg']
######################################################################
# Test:
def _test():
root = Tk()
text = "This is Tcl/Tk version %s" % TclVersion
if TclVersion >= 8.1:
text += "\nThis should be a cedilla: \xe7"
label = Label(root, text=text)
label.pack()
test = Button(root, text="Click me!",
command=lambda root=root: root.test.configure(
text="[%s]" % root.test['text']))
test.pack()
root.test = test
quit = Button(root, text="QUIT", command=root.destroy)
quit.pack()
# The following three commands are needed so the window pops
# up on top on Windows...
root.iconify()
root.update()
root.deiconify()
root.mainloop()
if __name__ == '__main__':
_test()
| mit |
wuzheng-sjtu/FastFPN | libs/boxes/anchor.py | 2 | 4237 | from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from libs.boxes import cython_anchor
def anchors(scales=[2, 4, 8, 16, 32], ratios=[0.5, 1, 2.0], base=16):
"""Get a set of anchors at one position """
return generate_anchors(base_size=base, scales=np.asarray(scales, np.int32), ratios=ratios)
def anchors_plane(height, width, stride = 1.0,
scales=[2, 4, 8, 16, 32], ratios=[0.5, 1, 2.0], base=16):
"""Get a complete set of anchors in a spatial plane,
height, width are plane dimensions
stride is scale ratio of
"""
# TODO: implement in C, or pre-compute them, or set to a fixed input-shape
# enum all anchors in a plane
# scales = kwargs.setdefault('scales', [2, 4, 8, 16, 32])
# ratios = kwargs.setdefault('ratios', [0.5, 1, 2.0])
# base = kwargs.setdefault('base', 16)
anc = anchors(scales, ratios, base)
all_anchors = cython_anchor.anchors_plane(height, width, stride, anc)
return all_anchors
# Written by Ross Girshick and Sean Bell
def generate_anchors(base_size=16, ratios=[0.5, 1, 2],
scales=2 ** np.arange(3, 6)):
"""
Generate anchor (reference) windows by enumerating aspect ratios X
scales wrt a reference (0, 0, 15, 15) window.
"""
base_anchor = np.array([1, 1, base_size, base_size]) - 1
ratio_anchors = _ratio_enum(base_anchor, ratios)
anchors = np.vstack([_scale_enum(ratio_anchors[i, :], scales)
for i in xrange(ratio_anchors.shape[0])])
return anchors
def _whctrs(anchor):
"""
Return width, height, x center, and y center for an anchor (window).
"""
w = anchor[2] - anchor[0] + 1
h = anchor[3] - anchor[1] + 1
x_ctr = anchor[0] + 0.5 * (w - 1)
y_ctr = anchor[1] + 0.5 * (h - 1)
return w, h, x_ctr, y_ctr
def _mkanchors(ws, hs, x_ctr, y_ctr):
"""
Given a vector of widths (ws) and heights (hs) around a center
(x_ctr, y_ctr), output a set of anchors (windows).
"""
ws = ws[:, np.newaxis]
hs = hs[:, np.newaxis]
anchors = np.hstack((x_ctr - 0.5 * (ws - 1),
y_ctr - 0.5 * (hs - 1),
x_ctr + 0.5 * (ws - 1),
y_ctr + 0.5 * (hs - 1)))
return anchors
def _ratio_enum(anchor, ratios):
"""
Enumerate a set of anchors for each aspect ratio wrt an anchor.
"""
w, h, x_ctr, y_ctr = _whctrs(anchor)
size = w * h
size_ratios = size / ratios
ws = np.round(np.sqrt(size_ratios))
hs = np.round(ws * ratios)
anchors = _mkanchors(ws, hs, x_ctr, y_ctr)
return anchors
def _scale_enum(anchor, scales):
"""
Enumerate a set of anchors for each scale wrt an anchor.
"""
w, h, x_ctr, y_ctr = _whctrs(anchor)
ws = w * scales
hs = h * scales
anchors = _mkanchors(ws, hs, x_ctr, y_ctr)
return anchors
def _unmap(data, count, inds, fill=0):
""" Unmap a subset of item (data) back to the original set of items (of
size count) """
if len(data.shape) == 1:
ret = np.empty((count,), dtype=np.float32)
ret.fill(fill)
ret[inds] = data
else:
ret = np.empty((count,) + data.shape[1:], dtype=np.float32)
ret.fill(fill)
ret[inds, :] = data
return ret
if __name__ == '__main__':
import time
t = time.time()
a = anchors()
num_anchors = 0
# all_anchors = anchors_plane(200, 250, stride=4, boarder=0)
# num_anchors += all_anchors.shape[0]
for i in range(10):
ancs = anchors()
all_anchors = cython_anchor.anchors_plane(200, 250, 4, ancs)
num_anchors += all_anchors.shape[0] * all_anchors.shape[1] * all_anchors.shape[2]
all_anchors = cython_anchor.anchors_plane(100, 125, 8, ancs)
num_anchors += all_anchors.shape[0] * all_anchors.shape[1] * all_anchors.shape[2]
all_anchors = cython_anchor.anchors_plane(50, 63, 16, ancs)
num_anchors += all_anchors.shape[0] * all_anchors.shape[1] * all_anchors.shape[2]
all_anchors = cython_anchor.anchors_plane(25, 32, 32, ancs)
num_anchors += all_anchors.shape[0] * all_anchors.shape[1] * all_anchors.shape[2]
print('average time: %f' % ((time.time() - t) / 10))
print('anchors: %d' % (num_anchors / 10))
print(a.shape, '\n', a)
print (all_anchors.shape)
# from IPython import embed
# embed()
| apache-2.0 |
ahmed-mahran/hue | desktop/core/ext-py/pysaml2-2.4.0/src/saml2/extension/pefim.py | 33 | 1972 | #!/usr/bin/env python
import saml2
from saml2 import SamlBase
from xmldsig import X509Data
NAMESPACE = 'urn:net:eustix:names:tc:PEFIM:0.0:assertion'
class SPCertEncType_(SamlBase):
"""The urn:net:eustix:names:tc:PEFIM:0.0:assertion:SPCertEncType element """
c_tag = 'SPCertEncType'
c_namespace = NAMESPACE
c_children = SamlBase.c_children.copy()
c_attributes = SamlBase.c_attributes.copy()
c_child_order = SamlBase.c_child_order[:]
c_cardinality = SamlBase.c_cardinality.copy()
c_children['{http://www.w3.org/2000/09/xmldsig#}X509Data'] = ('x509_data',
[X509Data])
def __init__(self,
x509_data=None,
text=None,
extension_elements=None,
extension_attributes=None):
SamlBase.__init__(self,
text=text,
extension_elements=extension_elements,
extension_attributes=extension_attributes)
self.x509_data = x509_data
def spcertenc_type__from_string(xml_string):
return saml2.create_class_from_xml_string(SPCertEncType_, xml_string)
class SPCertEnc(SPCertEncType_):
"""The urn:net:eustix:names:tc:PEFIM:0.0:assertion:SPCertEnc element """
c_tag = 'SPCertEnc'
c_namespace = NAMESPACE
c_children = SPCertEncType_.c_children.copy()
c_attributes = SPCertEncType_.c_attributes.copy()
c_child_order = SPCertEncType_.c_child_order[:]
c_cardinality = SPCertEncType_.c_cardinality.copy()
def spcertenc_from_string(xml_string):
return saml2.create_class_from_xml_string(SPCertEnc, xml_string)
ELEMENT_FROM_STRING = {
SPCertEnc.c_tag: spcertenc_from_string,
SPCertEncType_.c_tag: spcertenc_type__from_string,
}
ELEMENT_BY_TAG = {
'SPCertEnc': SPCertEnc,
'SPCertEncType': SPCertEncType_,
}
def factory(tag, **kwargs):
return ELEMENT_BY_TAG[tag](**kwargs)
| apache-2.0 |
MediaSapiens/autonormix | django/contrib/gis/tests/geoapp/test_feeds.py | 16 | 3851 | from xml.dom import minidom
from django.test import Client
from django.utils import unittest
from models import City
class GeoFeedTest(unittest.TestCase):
client = Client()
def assertChildNodes(self, elem, expected):
"Taken from regressiontests/syndication/tests.py."
actual = set([n.nodeName for n in elem.childNodes])
expected = set(expected)
self.assertEqual(actual, expected)
def test_geofeed_rss(self):
"Tests geographic feeds using GeoRSS over RSSv2."
# Uses `GEOSGeometry` in `item_geometry`
doc1 = minidom.parseString(self.client.get('/geoapp/feeds/rss1/').content)
# Uses a 2-tuple in `item_geometry`
doc2 = minidom.parseString(self.client.get('/geoapp/feeds/rss2/').content)
feed1, feed2 = doc1.firstChild, doc2.firstChild
# Making sure the box got added to the second GeoRSS feed.
self.assertChildNodes(feed2.getElementsByTagName('channel')[0],
['title', 'link', 'description', 'language',
'lastBuildDate', 'item', 'georss:box', 'atom:link']
)
# Incrementing through the feeds.
for feed in [feed1, feed2]:
# Ensuring the georss namespace was added to the <rss> element.
self.assertEqual(feed.getAttribute(u'xmlns:georss'), u'http://www.georss.org/georss')
chan = feed.getElementsByTagName('channel')[0]
items = chan.getElementsByTagName('item')
self.assertEqual(len(items), City.objects.count())
# Ensuring the georss element was added to each item in the feed.
for item in items:
self.assertChildNodes(item, ['title', 'link', 'description', 'guid', 'georss:point'])
def test_geofeed_atom(self):
"Testing geographic feeds using GeoRSS over Atom."
doc1 = minidom.parseString(self.client.get('/geoapp/feeds/atom1/').content)
doc2 = minidom.parseString(self.client.get('/geoapp/feeds/atom2/').content)
feed1, feed2 = doc1.firstChild, doc2.firstChild
# Making sure the box got added to the second GeoRSS feed.
self.assertChildNodes(feed2, ['title', 'link', 'id', 'updated', 'entry', 'georss:box'])
for feed in [feed1, feed2]:
# Ensuring the georsss namespace was added to the <feed> element.
self.assertEqual(feed.getAttribute(u'xmlns:georss'), u'http://www.georss.org/georss')
entries = feed.getElementsByTagName('entry')
self.assertEqual(len(entries), City.objects.count())
# Ensuring the georss element was added to each entry in the feed.
for entry in entries:
self.assertChildNodes(entry, ['title', 'link', 'id', 'summary', 'georss:point'])
def test_geofeed_w3c(self):
"Testing geographic feeds using W3C Geo."
doc = minidom.parseString(self.client.get('/geoapp/feeds/w3cgeo1/').content)
feed = doc.firstChild
# Ensuring the geo namespace was added to the <feed> element.
self.assertEqual(feed.getAttribute(u'xmlns:geo'), u'http://www.w3.org/2003/01/geo/wgs84_pos#')
chan = feed.getElementsByTagName('channel')[0]
items = chan.getElementsByTagName('item')
self.assertEqual(len(items), City.objects.count())
# Ensuring the geo:lat and geo:lon element was added to each item in the feed.
for item in items:
self.assertChildNodes(item, ['title', 'link', 'description', 'guid', 'geo:lat', 'geo:lon'])
# Boxes and Polygons aren't allowed in W3C Geo feeds.
self.assertRaises(ValueError, self.client.get, '/geoapp/feeds/w3cgeo2/') # Box in <channel>
self.assertRaises(ValueError, self.client.get, '/geoapp/feeds/w3cgeo3/') # Polygons in <entry>
| bsd-3-clause |
Sbalbp/DIRAC | ResourceStatusSystem/Command/GGUSTicketsCommand.py | 7 | 5609 | # $HeadURL: $
''' GGUSTicketsCommand
The GGUSTickets_Command class is a command class to know about
the number of active present opened tickets.
'''
import urllib2
from DIRAC import gLogger, S_ERROR, S_OK
from DIRAC.Core.LCG.GGUSTicketsClient import GGUSTicketsClient
from DIRAC.ConfigurationSystem.Client.Helpers.Resources import getGOCSiteName
from DIRAC.ResourceStatusSystem.Client.ResourceManagementClient import ResourceManagementClient
from DIRAC.ResourceStatusSystem.Command.Command import Command
from DIRAC.ResourceStatusSystem.Utilities import CSHelpers
__RCSID__ = '$Id: $'
class GGUSTicketsCommand( Command ):
'''
GGUSTickets "master" Command
'''
def __init__( self, args = None, clients = None ):
super( GGUSTicketsCommand, self ).__init__( args, clients )
if 'GGUSTicketsClient' in self.apis:
self.gClient = self.apis[ 'GGUSTicketsClient' ]
else:
self.gClient = GGUSTicketsClient()
if 'ResourceManagementClient' in self.apis:
self.rmClient = self.apis[ 'ResourceManagementClient' ]
else:
self.rmClient = ResourceManagementClient()
def _storeCommand( self, result ):
'''
Stores the results of doNew method on the database.
'''
for ggus in result:
resQuery = self.rmClient.addOrModifyGGUSTicketsCache( ggus[ 'GocSite' ],
ggus[ 'Link' ],
ggus[ 'OpenTickets' ],
ggus[ 'Tickets' ] )
if not resQuery[ 'OK' ]:
return resQuery
return S_OK()
def _prepareCommand( self ):
'''
GGUSTicketsCommand requires one arguments:
- elementName : <str>
GGUSTickets are associated with gocDB names, so we have to transform the
diracSiteName into a gocSiteName.
'''
if not 'name' in self.args:
return S_ERROR( '"name" not found in self.args' )
name = self.args[ 'name' ]
return getGOCSiteName( name )
def doNew( self, masterParams = None ):
'''
Gets the parameters to run, either from the master method or from its
own arguments.
For every elementName ( cannot process bulk queries.. ) contacts the
ggus client. The server is not very stable, so we protect against crashes.
If there are ggus tickets, are recorded and then returned.
'''
if masterParams is not None:
gocName = masterParams
gocNames = [ gocName ]
else:
gocName = self._prepareCommand()
if not gocName[ 'OK' ]:
return gocName
gocName = gocName[ 'Value' ]
gocNames = [ gocName ]
try:
results = self.gClient.getTicketsList( gocName )
except urllib2.URLError, e:
return S_ERROR( '%s %s' % ( gocName, e ) )
if not results[ 'OK' ]:
return results
results = results[ 'Value' ]
uniformResult = []
for gocSite, ggusResult in results.items():
if not gocSite in gocNames:
continue
ggusDict = {}
ggusDict[ 'GocSite' ] = gocSite
ggusDict[ 'Link' ] = ggusResult[ 'URL' ]
del ggusResult[ 'URL' ]
openTickets = 0
for priorityDict in ggusResult.values():
openTickets += len( priorityDict )
ggusDict[ 'Tickets' ] = ggusResult
ggusDict[ 'OpenTickets' ] = openTickets
uniformResult.append( ggusDict )
storeRes = self._storeCommand( uniformResult )
if not storeRes[ 'OK' ]:
return storeRes
return S_OK( uniformResult )
def doCache( self ):
'''
Method that reads the cache table and tries to read from it. It will
return a list of dictionaries if there are results.
'''
gocName = self._prepareCommand()
if not gocName[ 'OK' ]:
return gocName
gocName = gocName[ 'Value' ]
result = self.rmClient.selectGGUSTicketsCache( gocSite = gocName )
if result[ 'OK' ]:
result = S_OK( [ dict( zip( result[ 'Columns' ], res ) ) for res in result[ 'Value' ] ] )
return result
def doMaster( self ):
'''
Master method, which looks little bit spaguetti code, sorry !
- It gets all gocSites.
As there is no bulk query, it compares with what we have on the database.
It queries a portion of them.
'''
gocSites = CSHelpers.getGOCSites()
if not gocSites[ 'OK' ]:
return gocSites
gocSites = gocSites[ 'Value' ]
# resQuery = self.rmClient.selectGGUSTicketsCache( meta = { 'columns' : [ 'GocSite' ] } )
# if not resQuery[ 'OK' ]:
# return resQuery
# resQuery = [ element[0] for element in resQuery[ 'Value' ] ]
#
# gocNamesToQuery = set( gocSites ).difference( set( resQuery ) )
gLogger.info( 'Processing %s' % ', '.join( gocSites ) )
for gocNameToQuery in gocSites:
# if gocNameToQuery is None:
# self.metrics[ 'failed' ].append( 'None result' )
# continue
result = self.doNew( gocNameToQuery )
if not result[ 'OK' ]:
self.metrics[ 'failed' ].append( result )
return S_OK( self.metrics )
################################################################################
#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF | gpl-3.0 |
Split-Screen/android_kernel_semc_msm7x30 | tools/perf/scripts/python/futex-contention.py | 11261 | 1486 | # futex contention
# (c) 2010, Arnaldo Carvalho de Melo <acme@redhat.com>
# Licensed under the terms of the GNU GPL License version 2
#
# Translation of:
#
# http://sourceware.org/systemtap/wiki/WSFutexContention
#
# to perf python scripting.
#
# Measures futex contention
import os, sys
sys.path.append(os.environ['PERF_EXEC_PATH'] + '/scripts/python/Perf-Trace-Util/lib/Perf/Trace')
from Util import *
process_names = {}
thread_thislock = {}
thread_blocktime = {}
lock_waits = {} # long-lived stats on (tid,lock) blockage elapsed time
process_names = {} # long-lived pid-to-execname mapping
def syscalls__sys_enter_futex(event, ctxt, cpu, s, ns, tid, comm,
nr, uaddr, op, val, utime, uaddr2, val3):
cmd = op & FUTEX_CMD_MASK
if cmd != FUTEX_WAIT:
return # we don't care about originators of WAKE events
process_names[tid] = comm
thread_thislock[tid] = uaddr
thread_blocktime[tid] = nsecs(s, ns)
def syscalls__sys_exit_futex(event, ctxt, cpu, s, ns, tid, comm,
nr, ret):
if thread_blocktime.has_key(tid):
elapsed = nsecs(s, ns) - thread_blocktime[tid]
add_stats(lock_waits, (tid, thread_thislock[tid]), elapsed)
del thread_blocktime[tid]
del thread_thislock[tid]
def trace_begin():
print "Press control+C to stop and show the summary"
def trace_end():
for (tid, lock) in lock_waits:
min, max, avg, count = lock_waits[tid, lock]
print "%s[%d] lock %x contended %d times, %d avg ns" % \
(process_names[tid], tid, lock, count, avg)
| gpl-2.0 |
aferr/TemporalPartitioningMemCtl | src/arch/x86/isa/insts/simd64/floating_point/arithmetic/subtraction.py | 91 | 2766 | # Copyright (c) 2007 The Hewlett-Packard Development Company
# All rights reserved.
#
# The license below extends only to copyright in the software and shall
# not be construed as granting a license to any other intellectual
# property including but not limited to intellectual property relating
# to a hardware implementation of the functionality of the software
# licensed hereunder. You may use the software subject to the license
# terms below provided that you ensure that this notice is replicated
# unmodified and in its entirety in all distributions of the software,
# modified or unmodified, in source code or in binary form.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met: redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer;
# redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution;
# neither the name of the copyright holders nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# Authors: Gabe Black
microcode = '''
def macroop PFSUB_MMX_MMX {
msubf mmx, mmx, mmxm, size=4, ext=0
};
def macroop PFSUB_MMX_M {
ldfp ufp1, seg, sib, disp, dataSize=8
msubf mmx, mmx, ufp1, size=4, ext=0
};
def macroop PFSUB_MMX_P {
rdip t7
ldfp ufp1, seg, riprel, disp, dataSize=8
msubf mmx, mmx, ufp1, size=4, ext=0
};
def macroop PFSUBR_MMX_MMX {
msubf mmx, mmxm, mmx, size=4, ext=0
};
def macroop PFSUBR_MMX_M {
ldfp ufp1, seg, sib, disp, dataSize=8
msubf mmx, ufp1, mmx, size=4, ext=0
};
def macroop PFSUBR_MMX_P {
rdip t7
ldfp ufp1, seg, riprel, disp, dataSize=8
msubf mmx, ufp1, mmx, size=4, ext=0
};
'''
| bsd-3-clause |
himmih/cluedo | venv/lib/python2.7/site-packages/jinja2/environment.py | 332 | 48120 | # -*- coding: utf-8 -*-
"""
jinja2.environment
~~~~~~~~~~~~~~~~~~
Provides a class that holds runtime and parsing time options.
:copyright: (c) 2010 by the Jinja Team.
:license: BSD, see LICENSE for more details.
"""
import os
import sys
from jinja2 import nodes
from jinja2.defaults import BLOCK_START_STRING, \
BLOCK_END_STRING, VARIABLE_START_STRING, VARIABLE_END_STRING, \
COMMENT_START_STRING, COMMENT_END_STRING, LINE_STATEMENT_PREFIX, \
LINE_COMMENT_PREFIX, TRIM_BLOCKS, NEWLINE_SEQUENCE, \
DEFAULT_FILTERS, DEFAULT_TESTS, DEFAULT_NAMESPACE, \
KEEP_TRAILING_NEWLINE, LSTRIP_BLOCKS
from jinja2.lexer import get_lexer, TokenStream
from jinja2.parser import Parser
from jinja2.nodes import EvalContext
from jinja2.optimizer import optimize
from jinja2.compiler import generate, CodeGenerator
from jinja2.runtime import Undefined, new_context, Context
from jinja2.exceptions import TemplateSyntaxError, TemplateNotFound, \
TemplatesNotFound, TemplateRuntimeError
from jinja2.utils import import_string, LRUCache, Markup, missing, \
concat, consume, internalcode
from jinja2._compat import imap, ifilter, string_types, iteritems, \
text_type, reraise, implements_iterator, implements_to_string, \
get_next, encode_filename, PY2, PYPY
from functools import reduce
# for direct template usage we have up to ten living environments
_spontaneous_environments = LRUCache(10)
# the function to create jinja traceback objects. This is dynamically
# imported on the first exception in the exception handler.
_make_traceback = None
def get_spontaneous_environment(*args):
"""Return a new spontaneous environment. A spontaneous environment is an
unnamed and unaccessible (in theory) environment that is used for
templates generated from a string and not from the file system.
"""
try:
env = _spontaneous_environments.get(args)
except TypeError:
return Environment(*args)
if env is not None:
return env
_spontaneous_environments[args] = env = Environment(*args)
env.shared = True
return env
def create_cache(size):
"""Return the cache class for the given size."""
if size == 0:
return None
if size < 0:
return {}
return LRUCache(size)
def copy_cache(cache):
"""Create an empty copy of the given cache."""
if cache is None:
return None
elif type(cache) is dict:
return {}
return LRUCache(cache.capacity)
def load_extensions(environment, extensions):
"""Load the extensions from the list and bind it to the environment.
Returns a dict of instantiated environments.
"""
result = {}
for extension in extensions:
if isinstance(extension, string_types):
extension = import_string(extension)
result[extension.identifier] = extension(environment)
return result
def _environment_sanity_check(environment):
"""Perform a sanity check on the environment."""
assert issubclass(environment.undefined, Undefined), 'undefined must ' \
'be a subclass of undefined because filters depend on it.'
assert environment.block_start_string != \
environment.variable_start_string != \
environment.comment_start_string, 'block, variable and comment ' \
'start strings must be different'
assert environment.newline_sequence in ('\r', '\r\n', '\n'), \
'newline_sequence set to unknown line ending string.'
return environment
class Environment(object):
r"""The core component of Jinja is the `Environment`. It contains
important shared variables like configuration, filters, tests,
globals and others. Instances of this class may be modified if
they are not shared and if no template was loaded so far.
Modifications on environments after the first template was loaded
will lead to surprising effects and undefined behavior.
Here are the possible initialization parameters:
`block_start_string`
The string marking the beginning of a block. Defaults to ``'{%'``.
`block_end_string`
The string marking the end of a block. Defaults to ``'%}'``.
`variable_start_string`
The string marking the beginning of a print statement.
Defaults to ``'{{'``.
`variable_end_string`
The string marking the end of a print statement. Defaults to
``'}}'``.
`comment_start_string`
The string marking the beginning of a comment. Defaults to ``'{#'``.
`comment_end_string`
The string marking the end of a comment. Defaults to ``'#}'``.
`line_statement_prefix`
If given and a string, this will be used as prefix for line based
statements. See also :ref:`line-statements`.
`line_comment_prefix`
If given and a string, this will be used as prefix for line based
comments. See also :ref:`line-statements`.
.. versionadded:: 2.2
`trim_blocks`
If this is set to ``True`` the first newline after a block is
removed (block, not variable tag!). Defaults to `False`.
`lstrip_blocks`
If this is set to ``True`` leading spaces and tabs are stripped
from the start of a line to a block. Defaults to `False`.
`newline_sequence`
The sequence that starts a newline. Must be one of ``'\r'``,
``'\n'`` or ``'\r\n'``. The default is ``'\n'`` which is a
useful default for Linux and OS X systems as well as web
applications.
`keep_trailing_newline`
Preserve the trailing newline when rendering templates.
The default is ``False``, which causes a single newline,
if present, to be stripped from the end of the template.
.. versionadded:: 2.7
`extensions`
List of Jinja extensions to use. This can either be import paths
as strings or extension classes. For more information have a
look at :ref:`the extensions documentation <jinja-extensions>`.
`optimized`
should the optimizer be enabled? Default is `True`.
`undefined`
:class:`Undefined` or a subclass of it that is used to represent
undefined values in the template.
`finalize`
A callable that can be used to process the result of a variable
expression before it is output. For example one can convert
`None` implicitly into an empty string here.
`autoescape`
If set to true the XML/HTML autoescaping feature is enabled by
default. For more details about autoescaping see
:class:`~jinja2.utils.Markup`. As of Jinja 2.4 this can also
be a callable that is passed the template name and has to
return `True` or `False` depending on autoescape should be
enabled by default.
.. versionchanged:: 2.4
`autoescape` can now be a function
`loader`
The template loader for this environment.
`cache_size`
The size of the cache. Per default this is ``400`` which means
that if more than 400 templates are loaded the loader will clean
out the least recently used template. If the cache size is set to
``0`` templates are recompiled all the time, if the cache size is
``-1`` the cache will not be cleaned.
.. versionchanged:: 2.8
The cache size was increased to 400 from a low 50.
`auto_reload`
Some loaders load templates from locations where the template
sources may change (ie: file system or database). If
`auto_reload` is set to `True` (default) every time a template is
requested the loader checks if the source changed and if yes, it
will reload the template. For higher performance it's possible to
disable that.
`bytecode_cache`
If set to a bytecode cache object, this object will provide a
cache for the internal Jinja bytecode so that templates don't
have to be parsed if they were not changed.
See :ref:`bytecode-cache` for more information.
"""
#: if this environment is sandboxed. Modifying this variable won't make
#: the environment sandboxed though. For a real sandboxed environment
#: have a look at jinja2.sandbox. This flag alone controls the code
#: generation by the compiler.
sandboxed = False
#: True if the environment is just an overlay
overlayed = False
#: the environment this environment is linked to if it is an overlay
linked_to = None
#: shared environments have this set to `True`. A shared environment
#: must not be modified
shared = False
#: these are currently EXPERIMENTAL undocumented features.
exception_handler = None
exception_formatter = None
#: the class that is used for code generation. See
#: :class:`~jinja2.compiler.CodeGenerator` for more information.
code_generator_class = CodeGenerator
#: the context class thatis used for templates. See
#: :class:`~jinja2.runtime.Context` for more information.
context_class = Context
def __init__(self,
block_start_string=BLOCK_START_STRING,
block_end_string=BLOCK_END_STRING,
variable_start_string=VARIABLE_START_STRING,
variable_end_string=VARIABLE_END_STRING,
comment_start_string=COMMENT_START_STRING,
comment_end_string=COMMENT_END_STRING,
line_statement_prefix=LINE_STATEMENT_PREFIX,
line_comment_prefix=LINE_COMMENT_PREFIX,
trim_blocks=TRIM_BLOCKS,
lstrip_blocks=LSTRIP_BLOCKS,
newline_sequence=NEWLINE_SEQUENCE,
keep_trailing_newline=KEEP_TRAILING_NEWLINE,
extensions=(),
optimized=True,
undefined=Undefined,
finalize=None,
autoescape=False,
loader=None,
cache_size=400,
auto_reload=True,
bytecode_cache=None):
# !!Important notice!!
# The constructor accepts quite a few arguments that should be
# passed by keyword rather than position. However it's important to
# not change the order of arguments because it's used at least
# internally in those cases:
# - spontaneous environments (i18n extension and Template)
# - unittests
# If parameter changes are required only add parameters at the end
# and don't change the arguments (or the defaults!) of the arguments
# existing already.
# lexer / parser information
self.block_start_string = block_start_string
self.block_end_string = block_end_string
self.variable_start_string = variable_start_string
self.variable_end_string = variable_end_string
self.comment_start_string = comment_start_string
self.comment_end_string = comment_end_string
self.line_statement_prefix = line_statement_prefix
self.line_comment_prefix = line_comment_prefix
self.trim_blocks = trim_blocks
self.lstrip_blocks = lstrip_blocks
self.newline_sequence = newline_sequence
self.keep_trailing_newline = keep_trailing_newline
# runtime information
self.undefined = undefined
self.optimized = optimized
self.finalize = finalize
self.autoescape = autoescape
# defaults
self.filters = DEFAULT_FILTERS.copy()
self.tests = DEFAULT_TESTS.copy()
self.globals = DEFAULT_NAMESPACE.copy()
# set the loader provided
self.loader = loader
self.cache = create_cache(cache_size)
self.bytecode_cache = bytecode_cache
self.auto_reload = auto_reload
# load extensions
self.extensions = load_extensions(self, extensions)
_environment_sanity_check(self)
def add_extension(self, extension):
"""Adds an extension after the environment was created.
.. versionadded:: 2.5
"""
self.extensions.update(load_extensions(self, [extension]))
def extend(self, **attributes):
"""Add the items to the instance of the environment if they do not exist
yet. This is used by :ref:`extensions <writing-extensions>` to register
callbacks and configuration values without breaking inheritance.
"""
for key, value in iteritems(attributes):
if not hasattr(self, key):
setattr(self, key, value)
def overlay(self, block_start_string=missing, block_end_string=missing,
variable_start_string=missing, variable_end_string=missing,
comment_start_string=missing, comment_end_string=missing,
line_statement_prefix=missing, line_comment_prefix=missing,
trim_blocks=missing, lstrip_blocks=missing,
extensions=missing, optimized=missing,
undefined=missing, finalize=missing, autoescape=missing,
loader=missing, cache_size=missing, auto_reload=missing,
bytecode_cache=missing):
"""Create a new overlay environment that shares all the data with the
current environment except for cache and the overridden attributes.
Extensions cannot be removed for an overlayed environment. An overlayed
environment automatically gets all the extensions of the environment it
is linked to plus optional extra extensions.
Creating overlays should happen after the initial environment was set
up completely. Not all attributes are truly linked, some are just
copied over so modifications on the original environment may not shine
through.
"""
args = dict(locals())
del args['self'], args['cache_size'], args['extensions']
rv = object.__new__(self.__class__)
rv.__dict__.update(self.__dict__)
rv.overlayed = True
rv.linked_to = self
for key, value in iteritems(args):
if value is not missing:
setattr(rv, key, value)
if cache_size is not missing:
rv.cache = create_cache(cache_size)
else:
rv.cache = copy_cache(self.cache)
rv.extensions = {}
for key, value in iteritems(self.extensions):
rv.extensions[key] = value.bind(rv)
if extensions is not missing:
rv.extensions.update(load_extensions(rv, extensions))
return _environment_sanity_check(rv)
lexer = property(get_lexer, doc="The lexer for this environment.")
def iter_extensions(self):
"""Iterates over the extensions by priority."""
return iter(sorted(self.extensions.values(),
key=lambda x: x.priority))
def getitem(self, obj, argument):
"""Get an item or attribute of an object but prefer the item."""
try:
return obj[argument]
except (TypeError, LookupError):
if isinstance(argument, string_types):
try:
attr = str(argument)
except Exception:
pass
else:
try:
return getattr(obj, attr)
except AttributeError:
pass
return self.undefined(obj=obj, name=argument)
def getattr(self, obj, attribute):
"""Get an item or attribute of an object but prefer the attribute.
Unlike :meth:`getitem` the attribute *must* be a bytestring.
"""
try:
return getattr(obj, attribute)
except AttributeError:
pass
try:
return obj[attribute]
except (TypeError, LookupError, AttributeError):
return self.undefined(obj=obj, name=attribute)
def call_filter(self, name, value, args=None, kwargs=None,
context=None, eval_ctx=None):
"""Invokes a filter on a value the same way the compiler does it.
.. versionadded:: 2.7
"""
func = self.filters.get(name)
if func is None:
raise TemplateRuntimeError('no filter named %r' % name)
args = [value] + list(args or ())
if getattr(func, 'contextfilter', False):
if context is None:
raise TemplateRuntimeError('Attempted to invoke context '
'filter without context')
args.insert(0, context)
elif getattr(func, 'evalcontextfilter', False):
if eval_ctx is None:
if context is not None:
eval_ctx = context.eval_ctx
else:
eval_ctx = EvalContext(self)
args.insert(0, eval_ctx)
elif getattr(func, 'environmentfilter', False):
args.insert(0, self)
return func(*args, **(kwargs or {}))
def call_test(self, name, value, args=None, kwargs=None):
"""Invokes a test on a value the same way the compiler does it.
.. versionadded:: 2.7
"""
func = self.tests.get(name)
if func is None:
raise TemplateRuntimeError('no test named %r' % name)
return func(value, *(args or ()), **(kwargs or {}))
@internalcode
def parse(self, source, name=None, filename=None):
"""Parse the sourcecode and return the abstract syntax tree. This
tree of nodes is used by the compiler to convert the template into
executable source- or bytecode. This is useful for debugging or to
extract information from templates.
If you are :ref:`developing Jinja2 extensions <writing-extensions>`
this gives you a good overview of the node tree generated.
"""
try:
return self._parse(source, name, filename)
except TemplateSyntaxError:
exc_info = sys.exc_info()
self.handle_exception(exc_info, source_hint=source)
def _parse(self, source, name, filename):
"""Internal parsing function used by `parse` and `compile`."""
return Parser(self, source, name, encode_filename(filename)).parse()
def lex(self, source, name=None, filename=None):
"""Lex the given sourcecode and return a generator that yields
tokens as tuples in the form ``(lineno, token_type, value)``.
This can be useful for :ref:`extension development <writing-extensions>`
and debugging templates.
This does not perform preprocessing. If you want the preprocessing
of the extensions to be applied you have to filter source through
the :meth:`preprocess` method.
"""
source = text_type(source)
try:
return self.lexer.tokeniter(source, name, filename)
except TemplateSyntaxError:
exc_info = sys.exc_info()
self.handle_exception(exc_info, source_hint=source)
def preprocess(self, source, name=None, filename=None):
"""Preprocesses the source with all extensions. This is automatically
called for all parsing and compiling methods but *not* for :meth:`lex`
because there you usually only want the actual source tokenized.
"""
return reduce(lambda s, e: e.preprocess(s, name, filename),
self.iter_extensions(), text_type(source))
def _tokenize(self, source, name, filename=None, state=None):
"""Called by the parser to do the preprocessing and filtering
for all the extensions. Returns a :class:`~jinja2.lexer.TokenStream`.
"""
source = self.preprocess(source, name, filename)
stream = self.lexer.tokenize(source, name, filename, state)
for ext in self.iter_extensions():
stream = ext.filter_stream(stream)
if not isinstance(stream, TokenStream):
stream = TokenStream(stream, name, filename)
return stream
def _generate(self, source, name, filename, defer_init=False):
"""Internal hook that can be overridden to hook a different generate
method in.
.. versionadded:: 2.5
"""
return generate(source, self, name, filename, defer_init=defer_init)
def _compile(self, source, filename):
"""Internal hook that can be overridden to hook a different compile
method in.
.. versionadded:: 2.5
"""
return compile(source, filename, 'exec')
@internalcode
def compile(self, source, name=None, filename=None, raw=False,
defer_init=False):
"""Compile a node or template source code. The `name` parameter is
the load name of the template after it was joined using
:meth:`join_path` if necessary, not the filename on the file system.
the `filename` parameter is the estimated filename of the template on
the file system. If the template came from a database or memory this
can be omitted.
The return value of this method is a python code object. If the `raw`
parameter is `True` the return value will be a string with python
code equivalent to the bytecode returned otherwise. This method is
mainly used internally.
`defer_init` is use internally to aid the module code generator. This
causes the generated code to be able to import without the global
environment variable to be set.
.. versionadded:: 2.4
`defer_init` parameter added.
"""
source_hint = None
try:
if isinstance(source, string_types):
source_hint = source
source = self._parse(source, name, filename)
if self.optimized:
source = optimize(source, self)
source = self._generate(source, name, filename,
defer_init=defer_init)
if raw:
return source
if filename is None:
filename = '<template>'
else:
filename = encode_filename(filename)
return self._compile(source, filename)
except TemplateSyntaxError:
exc_info = sys.exc_info()
self.handle_exception(exc_info, source_hint=source_hint)
def compile_expression(self, source, undefined_to_none=True):
"""A handy helper method that returns a callable that accepts keyword
arguments that appear as variables in the expression. If called it
returns the result of the expression.
This is useful if applications want to use the same rules as Jinja
in template "configuration files" or similar situations.
Example usage:
>>> env = Environment()
>>> expr = env.compile_expression('foo == 42')
>>> expr(foo=23)
False
>>> expr(foo=42)
True
Per default the return value is converted to `None` if the
expression returns an undefined value. This can be changed
by setting `undefined_to_none` to `False`.
>>> env.compile_expression('var')() is None
True
>>> env.compile_expression('var', undefined_to_none=False)()
Undefined
.. versionadded:: 2.1
"""
parser = Parser(self, source, state='variable')
exc_info = None
try:
expr = parser.parse_expression()
if not parser.stream.eos:
raise TemplateSyntaxError('chunk after expression',
parser.stream.current.lineno,
None, None)
expr.set_environment(self)
except TemplateSyntaxError:
exc_info = sys.exc_info()
if exc_info is not None:
self.handle_exception(exc_info, source_hint=source)
body = [nodes.Assign(nodes.Name('result', 'store'), expr, lineno=1)]
template = self.from_string(nodes.Template(body, lineno=1))
return TemplateExpression(template, undefined_to_none)
def compile_templates(self, target, extensions=None, filter_func=None,
zip='deflated', log_function=None,
ignore_errors=True, py_compile=False):
"""Finds all the templates the loader can find, compiles them
and stores them in `target`. If `zip` is `None`, instead of in a
zipfile, the templates will be stored in a directory.
By default a deflate zip algorithm is used. To switch to
the stored algorithm, `zip` can be set to ``'stored'``.
`extensions` and `filter_func` are passed to :meth:`list_templates`.
Each template returned will be compiled to the target folder or
zipfile.
By default template compilation errors are ignored. In case a
log function is provided, errors are logged. If you want template
syntax errors to abort the compilation you can set `ignore_errors`
to `False` and you will get an exception on syntax errors.
If `py_compile` is set to `True` .pyc files will be written to the
target instead of standard .py files. This flag does not do anything
on pypy and Python 3 where pyc files are not picked up by itself and
don't give much benefit.
.. versionadded:: 2.4
"""
from jinja2.loaders import ModuleLoader
if log_function is None:
log_function = lambda x: None
if py_compile:
if not PY2 or PYPY:
from warnings import warn
warn(Warning('py_compile has no effect on pypy or Python 3'))
py_compile = False
else:
import imp
import marshal
py_header = imp.get_magic() + \
u'\xff\xff\xff\xff'.encode('iso-8859-15')
# Python 3.3 added a source filesize to the header
if sys.version_info >= (3, 3):
py_header += u'\x00\x00\x00\x00'.encode('iso-8859-15')
def write_file(filename, data, mode):
if zip:
info = ZipInfo(filename)
info.external_attr = 0o755 << 16
zip_file.writestr(info, data)
else:
f = open(os.path.join(target, filename), mode)
try:
f.write(data)
finally:
f.close()
if zip is not None:
from zipfile import ZipFile, ZipInfo, ZIP_DEFLATED, ZIP_STORED
zip_file = ZipFile(target, 'w', dict(deflated=ZIP_DEFLATED,
stored=ZIP_STORED)[zip])
log_function('Compiling into Zip archive "%s"' % target)
else:
if not os.path.isdir(target):
os.makedirs(target)
log_function('Compiling into folder "%s"' % target)
try:
for name in self.list_templates(extensions, filter_func):
source, filename, _ = self.loader.get_source(self, name)
try:
code = self.compile(source, name, filename, True, True)
except TemplateSyntaxError as e:
if not ignore_errors:
raise
log_function('Could not compile "%s": %s' % (name, e))
continue
filename = ModuleLoader.get_module_filename(name)
if py_compile:
c = self._compile(code, encode_filename(filename))
write_file(filename + 'c', py_header +
marshal.dumps(c), 'wb')
log_function('Byte-compiled "%s" as %s' %
(name, filename + 'c'))
else:
write_file(filename, code, 'w')
log_function('Compiled "%s" as %s' % (name, filename))
finally:
if zip:
zip_file.close()
log_function('Finished compiling templates')
def list_templates(self, extensions=None, filter_func=None):
"""Returns a list of templates for this environment. This requires
that the loader supports the loader's
:meth:`~BaseLoader.list_templates` method.
If there are other files in the template folder besides the
actual templates, the returned list can be filtered. There are two
ways: either `extensions` is set to a list of file extensions for
templates, or a `filter_func` can be provided which is a callable that
is passed a template name and should return `True` if it should end up
in the result list.
If the loader does not support that, a :exc:`TypeError` is raised.
.. versionadded:: 2.4
"""
x = self.loader.list_templates()
if extensions is not None:
if filter_func is not None:
raise TypeError('either extensions or filter_func '
'can be passed, but not both')
filter_func = lambda x: '.' in x and \
x.rsplit('.', 1)[1] in extensions
if filter_func is not None:
x = list(ifilter(filter_func, x))
return x
def handle_exception(self, exc_info=None, rendered=False, source_hint=None):
"""Exception handling helper. This is used internally to either raise
rewritten exceptions or return a rendered traceback for the template.
"""
global _make_traceback
if exc_info is None:
exc_info = sys.exc_info()
# the debugging module is imported when it's used for the first time.
# we're doing a lot of stuff there and for applications that do not
# get any exceptions in template rendering there is no need to load
# all of that.
if _make_traceback is None:
from jinja2.debug import make_traceback as _make_traceback
traceback = _make_traceback(exc_info, source_hint)
if rendered and self.exception_formatter is not None:
return self.exception_formatter(traceback)
if self.exception_handler is not None:
self.exception_handler(traceback)
exc_type, exc_value, tb = traceback.standard_exc_info
reraise(exc_type, exc_value, tb)
def join_path(self, template, parent):
"""Join a template with the parent. By default all the lookups are
relative to the loader root so this method returns the `template`
parameter unchanged, but if the paths should be relative to the
parent template, this function can be used to calculate the real
template name.
Subclasses may override this method and implement template path
joining here.
"""
return template
@internalcode
def _load_template(self, name, globals):
if self.loader is None:
raise TypeError('no loader for this environment specified')
try:
# use abs path for cache key
cache_key = self.loader.get_source(self, name)[1]
except RuntimeError:
# if loader does not implement get_source()
cache_key = None
# if template is not file, use name for cache key
if cache_key is None:
cache_key = name
if self.cache is not None:
template = self.cache.get(cache_key)
if template is not None and (not self.auto_reload or
template.is_up_to_date):
return template
template = self.loader.load(self, name, globals)
if self.cache is not None:
self.cache[cache_key] = template
return template
@internalcode
def get_template(self, name, parent=None, globals=None):
"""Load a template from the loader. If a loader is configured this
method ask the loader for the template and returns a :class:`Template`.
If the `parent` parameter is not `None`, :meth:`join_path` is called
to get the real template name before loading.
The `globals` parameter can be used to provide template wide globals.
These variables are available in the context at render time.
If the template does not exist a :exc:`TemplateNotFound` exception is
raised.
.. versionchanged:: 2.4
If `name` is a :class:`Template` object it is returned from the
function unchanged.
"""
if isinstance(name, Template):
return name
if parent is not None:
name = self.join_path(name, parent)
return self._load_template(name, self.make_globals(globals))
@internalcode
def select_template(self, names, parent=None, globals=None):
"""Works like :meth:`get_template` but tries a number of templates
before it fails. If it cannot find any of the templates, it will
raise a :exc:`TemplatesNotFound` exception.
.. versionadded:: 2.3
.. versionchanged:: 2.4
If `names` contains a :class:`Template` object it is returned
from the function unchanged.
"""
if not names:
raise TemplatesNotFound(message=u'Tried to select from an empty list '
u'of templates.')
globals = self.make_globals(globals)
for name in names:
if isinstance(name, Template):
return name
if parent is not None:
name = self.join_path(name, parent)
try:
return self._load_template(name, globals)
except TemplateNotFound:
pass
raise TemplatesNotFound(names)
@internalcode
def get_or_select_template(self, template_name_or_list,
parent=None, globals=None):
"""Does a typecheck and dispatches to :meth:`select_template`
if an iterable of template names is given, otherwise to
:meth:`get_template`.
.. versionadded:: 2.3
"""
if isinstance(template_name_or_list, string_types):
return self.get_template(template_name_or_list, parent, globals)
elif isinstance(template_name_or_list, Template):
return template_name_or_list
return self.select_template(template_name_or_list, parent, globals)
def from_string(self, source, globals=None, template_class=None):
"""Load a template from a string. This parses the source given and
returns a :class:`Template` object.
"""
globals = self.make_globals(globals)
cls = template_class or self.template_class
return cls.from_code(self, self.compile(source), globals, None)
def make_globals(self, d):
"""Return a dict for the globals."""
if not d:
return self.globals
return dict(self.globals, **d)
class Template(object):
"""The central template object. This class represents a compiled template
and is used to evaluate it.
Normally the template object is generated from an :class:`Environment` but
it also has a constructor that makes it possible to create a template
instance directly using the constructor. It takes the same arguments as
the environment constructor but it's not possible to specify a loader.
Every template object has a few methods and members that are guaranteed
to exist. However it's important that a template object should be
considered immutable. Modifications on the object are not supported.
Template objects created from the constructor rather than an environment
do have an `environment` attribute that points to a temporary environment
that is probably shared with other templates created with the constructor
and compatible settings.
>>> template = Template('Hello {{ name }}!')
>>> template.render(name='John Doe') == u'Hello John Doe!'
True
>>> stream = template.stream(name='John Doe')
>>> next(stream) == u'Hello John Doe!'
True
>>> next(stream)
Traceback (most recent call last):
...
StopIteration
"""
def __new__(cls, source,
block_start_string=BLOCK_START_STRING,
block_end_string=BLOCK_END_STRING,
variable_start_string=VARIABLE_START_STRING,
variable_end_string=VARIABLE_END_STRING,
comment_start_string=COMMENT_START_STRING,
comment_end_string=COMMENT_END_STRING,
line_statement_prefix=LINE_STATEMENT_PREFIX,
line_comment_prefix=LINE_COMMENT_PREFIX,
trim_blocks=TRIM_BLOCKS,
lstrip_blocks=LSTRIP_BLOCKS,
newline_sequence=NEWLINE_SEQUENCE,
keep_trailing_newline=KEEP_TRAILING_NEWLINE,
extensions=(),
optimized=True,
undefined=Undefined,
finalize=None,
autoescape=False):
env = get_spontaneous_environment(
block_start_string, block_end_string, variable_start_string,
variable_end_string, comment_start_string, comment_end_string,
line_statement_prefix, line_comment_prefix, trim_blocks,
lstrip_blocks, newline_sequence, keep_trailing_newline,
frozenset(extensions), optimized, undefined, finalize, autoescape,
None, 0, False, None)
return env.from_string(source, template_class=cls)
@classmethod
def from_code(cls, environment, code, globals, uptodate=None):
"""Creates a template object from compiled code and the globals. This
is used by the loaders and environment to create a template object.
"""
namespace = {
'environment': environment,
'__file__': code.co_filename
}
exec(code, namespace)
rv = cls._from_namespace(environment, namespace, globals)
rv._uptodate = uptodate
return rv
@classmethod
def from_module_dict(cls, environment, module_dict, globals):
"""Creates a template object from a module. This is used by the
module loader to create a template object.
.. versionadded:: 2.4
"""
return cls._from_namespace(environment, module_dict, globals)
@classmethod
def _from_namespace(cls, environment, namespace, globals):
t = object.__new__(cls)
t.environment = environment
t.globals = globals
t.name = namespace['name']
t.filename = namespace['__file__']
t.blocks = namespace['blocks']
# render function and module
t.root_render_func = namespace['root']
t._module = None
# debug and loader helpers
t._debug_info = namespace['debug_info']
t._uptodate = None
# store the reference
namespace['environment'] = environment
namespace['__jinja_template__'] = t
return t
def render(self, *args, **kwargs):
"""This method accepts the same arguments as the `dict` constructor:
A dict, a dict subclass or some keyword arguments. If no arguments
are given the context will be empty. These two calls do the same::
template.render(knights='that say nih')
template.render({'knights': 'that say nih'})
This will return the rendered template as unicode string.
"""
vars = dict(*args, **kwargs)
try:
return concat(self.root_render_func(self.new_context(vars)))
except Exception:
exc_info = sys.exc_info()
return self.environment.handle_exception(exc_info, True)
def stream(self, *args, **kwargs):
"""Works exactly like :meth:`generate` but returns a
:class:`TemplateStream`.
"""
return TemplateStream(self.generate(*args, **kwargs))
def generate(self, *args, **kwargs):
"""For very large templates it can be useful to not render the whole
template at once but evaluate each statement after another and yield
piece for piece. This method basically does exactly that and returns
a generator that yields one item after another as unicode strings.
It accepts the same arguments as :meth:`render`.
"""
vars = dict(*args, **kwargs)
try:
for event in self.root_render_func(self.new_context(vars)):
yield event
except Exception:
exc_info = sys.exc_info()
else:
return
yield self.environment.handle_exception(exc_info, True)
def new_context(self, vars=None, shared=False, locals=None):
"""Create a new :class:`Context` for this template. The vars
provided will be passed to the template. Per default the globals
are added to the context. If shared is set to `True` the data
is passed as it to the context without adding the globals.
`locals` can be a dict of local variables for internal usage.
"""
return new_context(self.environment, self.name, self.blocks,
vars, shared, self.globals, locals)
def make_module(self, vars=None, shared=False, locals=None):
"""This method works like the :attr:`module` attribute when called
without arguments but it will evaluate the template on every call
rather than caching it. It's also possible to provide
a dict which is then used as context. The arguments are the same
as for the :meth:`new_context` method.
"""
return TemplateModule(self, self.new_context(vars, shared, locals))
@property
def module(self):
"""The template as module. This is used for imports in the
template runtime but is also useful if one wants to access
exported template variables from the Python layer:
>>> t = Template('{% macro foo() %}42{% endmacro %}23')
>>> str(t.module)
'23'
>>> t.module.foo() == u'42'
True
"""
if self._module is not None:
return self._module
self._module = rv = self.make_module()
return rv
def get_corresponding_lineno(self, lineno):
"""Return the source line number of a line number in the
generated bytecode as they are not in sync.
"""
for template_line, code_line in reversed(self.debug_info):
if code_line <= lineno:
return template_line
return 1
@property
def is_up_to_date(self):
"""If this variable is `False` there is a newer version available."""
if self._uptodate is None:
return True
return self._uptodate()
@property
def debug_info(self):
"""The debug info mapping."""
return [tuple(imap(int, x.split('='))) for x in
self._debug_info.split('&')]
def __repr__(self):
if self.name is None:
name = 'memory:%x' % id(self)
else:
name = repr(self.name)
return '<%s %s>' % (self.__class__.__name__, name)
@implements_to_string
class TemplateModule(object):
"""Represents an imported template. All the exported names of the
template are available as attributes on this object. Additionally
converting it into an unicode- or bytestrings renders the contents.
"""
def __init__(self, template, context):
self._body_stream = list(template.root_render_func(context))
self.__dict__.update(context.get_exported())
self.__name__ = template.name
def __html__(self):
return Markup(concat(self._body_stream))
def __str__(self):
return concat(self._body_stream)
def __repr__(self):
if self.__name__ is None:
name = 'memory:%x' % id(self)
else:
name = repr(self.__name__)
return '<%s %s>' % (self.__class__.__name__, name)
class TemplateExpression(object):
"""The :meth:`jinja2.Environment.compile_expression` method returns an
instance of this object. It encapsulates the expression-like access
to the template with an expression it wraps.
"""
def __init__(self, template, undefined_to_none):
self._template = template
self._undefined_to_none = undefined_to_none
def __call__(self, *args, **kwargs):
context = self._template.new_context(dict(*args, **kwargs))
consume(self._template.root_render_func(context))
rv = context.vars['result']
if self._undefined_to_none and isinstance(rv, Undefined):
rv = None
return rv
@implements_iterator
class TemplateStream(object):
"""A template stream works pretty much like an ordinary python generator
but it can buffer multiple items to reduce the number of total iterations.
Per default the output is unbuffered which means that for every unbuffered
instruction in the template one unicode string is yielded.
If buffering is enabled with a buffer size of 5, five items are combined
into a new unicode string. This is mainly useful if you are streaming
big templates to a client via WSGI which flushes after each iteration.
"""
def __init__(self, gen):
self._gen = gen
self.disable_buffering()
def dump(self, fp, encoding=None, errors='strict'):
"""Dump the complete stream into a file or file-like object.
Per default unicode strings are written, if you want to encode
before writing specify an `encoding`.
Example usage::
Template('Hello {{ name }}!').stream(name='foo').dump('hello.html')
"""
close = False
if isinstance(fp, string_types):
if encoding is None:
encoding = 'utf-8'
fp = open(fp, 'wb')
close = True
try:
if encoding is not None:
iterable = (x.encode(encoding, errors) for x in self)
else:
iterable = self
if hasattr(fp, 'writelines'):
fp.writelines(iterable)
else:
for item in iterable:
fp.write(item)
finally:
if close:
fp.close()
def disable_buffering(self):
"""Disable the output buffering."""
self._next = get_next(self._gen)
self.buffered = False
def enable_buffering(self, size=5):
"""Enable buffering. Buffer `size` items before yielding them."""
if size <= 1:
raise ValueError('buffer size too small')
def generator(next):
buf = []
c_size = 0
push = buf.append
while 1:
try:
while c_size < size:
c = next()
push(c)
if c:
c_size += 1
except StopIteration:
if not c_size:
return
yield concat(buf)
del buf[:]
c_size = 0
self.buffered = True
self._next = get_next(generator(get_next(self._gen)))
def __iter__(self):
return self
def __next__(self):
return self._next()
# hook in default template class. if anyone reads this comment: ignore that
# it's possible to use custom templates ;-)
Environment.template_class = Template
| apache-2.0 |
magical/jwt-python | jwt.py | 1 | 6231 | u"""JSON Web Token"""
import base64
import json
import re
import M2Crypto
import hashlib
import hmac
from struct import pack
from itertools import izip
JWT_TYPS = (u"JWT", u"http://openid.net/specs/jwt/1.0")
# XXX Should this be a subclass of ValueError?
class Invalid(Exception):
"""The JWT is invalid."""
class BadSyntax(Invalid):
"""The JWT could not be parsed because the syntax is invalid."""
def __init__(self, value, msg):
self.value = value
self.msg = msg
def __str__(self):
return "%s: %r" % (self.msg, self.value)
class BadSignature(Invalid):
"""The signature of the JWT is invalid."""
class Expired(Invalid):
"""The JWT claim has expired or is not yet valid."""
class UnknownAlgorithm(Invalid):
"""The JWT uses an unknown signing algorithm"""
class BadType(Invalid):
"""The JWT has an unexpected "typ" value."""
def b64e(b):
u"""Base64 encode some bytes.
Uses the url-safe - and _ characters, and doesn't pad with = characters."""
return base64.urlsafe_b64encode(b).rstrip(b"=")
_b64_re = re.compile(b"^[A-Za-z0-9_-]*$")
def b64d(b):
u"""Decode some base64-encoded bytes.
Raises BadSyntax if the string contains invalid characters or padding."""
# Python's base64 functions ignore invalid characters, so we need to
# check for them explicitly.
if not _b64_re.match(b):
raise BadSyntax(b, "base64-encoded data contains illegal characters")
# add padding chars
m = len(b) % 4
if m == 1:
# NOTE: for some reason b64decode raises *TypeError* if the
# padding is incorrect.
raise BadSyntax(b, "incorrect padding")
elif m == 2:
b += b"=="
elif m == 3:
b += b"="
return base64.urlsafe_b64decode(b)
def split_token(token):
if token.count(b".") != 2:
raise BadSyntax(token, "expected token to contain 2 dots, not %d" % token.count(b"."))
return tuple(token.split(b"."))
# Stolen from Werkzeug
def safe_str_cmp(a, b):
"""Compare two strings in constant time."""
if len(a) != len(b):
return False
r = 0
for c, d in izip(a, b):
r |= ord(c) ^ ord(d)
return r == 0
def sha256_digest(msg):
return hashlib.sha256(msg).digest()
def sha384_digest(msg):
return hashlib.sha384(msg).digest()
def sha512_digest(msg):
return hashlib.sha512(msg).digest()
def mpint(b):
b = b"\x00" + b
return pack(">L", len(b)) + b
def mp2bin(b):
# just ignore the length...
if b[4] == '\x00':
return b[5:]
else:
return b[4:]
class Signer(object):
"""Abstract base class for signing algorithms."""
def sign(self, msg, key):
"""Sign ``msg`` with ``key`` and return the signature."""
raise NotImplementedError
def verify(self, msg, sig, key):
"""Return True if ``sig`` is a valid signature for ``msg``."""
raise NotImplementedError
class HMACSigner(Signer):
def __init__(self, digest):
self.digest = digest
def sign(self, msg, key):
return hmac.new(key, msg, digestmod=self.digest).digest()
def verify(self, msg, sig, key):
if not safe_str_cmp(self.sign(msg, key), sig):
raise BadSignature(repr(sig))
return
class RSASigner(Signer):
def __init__(self, digest, algo):
self.digest = digest
self.algo = algo
def sign(self, msg, key):
return key.sign(self.digest(msg), self.algo)
def verify(self, msg, sig, key):
try:
return key.verify(self.digest(msg), sig, self.algo)
except M2Crypto.RSA.RSAError, e:
raise BadSignature(e)
class ECDSASigner(Signer):
def __init__(self, digest):
self.digest = digest
def sign(self, msg, key):
r, s = key.sign_dsa(self.digest(msg))
return mp2bin(r).rjust(32, '\x00') + mp2bin(s).rjust(32, '\x00')
def verify(self, msg, sig, key):
# XXX check sig length
half = len(sig) // 2
r = mpint(sig[:half])
s = mpint(sig[half:])
try:
r = key.verify_dsa(self.digest(msg), r, s)
except M2Crypto.EC.ECError, e:
raise BadSignature(e)
else:
if not r:
raise BadSignature
ALGS = {
u'HS256': HMACSigner(hashlib.sha256),
u'HS384': HMACSigner(hashlib.sha384),
u'HS512': HMACSigner(hashlib.sha512),
u'RS256': RSASigner(sha256_digest, 'sha256'),
u'RS384': RSASigner(sha384_digest, 'sha384'),
u'RS512': RSASigner(sha512_digest, 'sha512'),
u'ES256': ECDSASigner(sha256_digest),
}
def verify(token, key):
if isinstance(token, unicode):
raise TypeError
header_b64, claim_b64, crypto_b64 = split_token(token)
header = b64d(header_b64)
claim = b64d(claim_b64)
crypto = b64d(crypto_b64)
header = json.loads(header)
if u'typ' in header:
if header[u'typ'] not in JWT_TYPS:
raise BadType(header)
alg = header[u'alg']
if alg not in ALGS:
raise UnknownAlgorithm(alg)
sigdata = header_b64 + b'.' + claim_b64
verifier = ALGS[alg]
verifier.verify(sigdata, crypto, key)
return
def check(token, key):
try:
verify(token, key)
return True
except Invalid:
return False
def sign(alg, payload, key):
"""Sign the payload with the given algorithm and key.
The payload can be any JSON-dumpable object.
Returns a token string."""
if alg not in ALGS:
raise UnknownAlgorithm(alg)
header = {u'alg': alg}
signer = ALGS[alg]
header_b64 = b64e(json.dumps(header, separators=(",", ":")))
payload_b64 = b64e(payload)
token = header_b64 + b"." + payload_b64
sig = signer.sign(token, key)
token += b"." + b64e(sig)
return token
def rsa_load(filename):
"""Read a PEM-encoded RSA key pair from a file."""
return M2Crypto.RSA.load_key(filename, M2Crypto.util.no_passphrase_callback)
def rsa_loads(key):
"""Read a PEM-encoded RSA key pair from a string."""
return M2Crypto.RSA.load_key_str(key, M2Crypto.util.no_passphrase_callback)
def ec_load(filename):
return M2Crypto.EC.load_key(filename, M2Crypto.util.no_passphrase_callback)
| mit |
gangadhar-kadam/hrshop | shopping_cart/hooks.py | 1 | 1079 | app_name = "shopping_cart"
app_title = "Shopping Cart"
app_publisher = "Web Notes Technologies"
app_description = "Online Shopping Cart integrated with ERPNext"
app_icon = "icon-shopping-cart"
app_color = "#B7E090"
app_email = "info@erpnext.com"
app_url = "https://erpnext.com"
app_version = "0.0.1"
web_include_js = "assets/js/shopping-cart-web.min.js"
web_include_css = "assets/css/shopping-cart-web.css"
on_session_creation = "shopping_cart.utils.set_cart_count"
on_logout = "shopping_cart.utils.clear_cart_count"
update_website_context = "shopping_cart.utils.update_website_context"
# Bean Events
doc_events = {
"Sales Taxes and Charges Master": {
"on_update": "shopping_cart.shopping_cart.doctype.shopping_cart_settings.shopping_cart_settings.validate_cart_settings"
},
"Price List": {
"on_update": "shopping_cart.shopping_cart.doctype.shopping_cart_settings.shopping_cart_settings.validate_cart_settings"
},
# "Quotation": {
# "validate": "shopping_cart.shopping_cart.doctype.shopping_cart_settings.shopping_cart_settings.apply_shopping_cart_settings"
# }
}
| agpl-3.0 |
Joel-U/sparkle | sparkle/gui/plotting/protocoldisplay.py | 2 | 10507 | import numpy as np
from sparkle.QtWrapper import QtCore, QtGui
from sparkle.gui.plotting.pyqtgraph_widgets import FFTWidget, SpecWidget, \
TraceWidget
class ProtocolDisplay(QtGui.QWidget):
"""Data display intended for use during brain recording"""
thresholdUpdated = QtCore.Signal(float, str)
polarityInverted = QtCore.Signal(float, str)
rasterBoundsUpdated = QtCore.Signal(tuple, str)
absUpdated = QtCore.Signal(bool, str)
def __init__(self, response_chan_name='chan0', parent=None):
super(ProtocolDisplay, self).__init__(parent)
self.responsePlots = {}
self.fftPlot = FFTWidget(self, rotation=90)
spiketracePlot = TraceWidget(self)
self.responsePlots[response_chan_name] = spiketracePlot
self.specPlot = SpecWidget(self)
self.fftPlot.setToolTip('Stimulus Spectrum')
spiketracePlot.setToolTip('Spike Trace')
self.specPlot.setToolTip('Stimulus Spectrogram')
# custom behaviour for spec view all option
vb = self.specPlot.getViewBox()
vb.menu.viewAll.triggered.disconnect()
vb.menu.viewAll.triggered.connect(self.specAutoRange)
# self.fftPlot.set_title("Stimulus FFT")
# spiketracePlot.set_title("Response Trace")
# self.specPlot.set_title("Stimulus Spectrogram")
self.specPlot.plotItem.vb.sigXRangeChanged.connect(self.updateXRange)
spiketracePlot.plotItem.vb.sigXRangeChanged.connect(self.updateXRange)
self.specPlot.setMinimumHeight(100)
spiketracePlot.setMinimumWidth(100)
spiketracePlot.setMinimumHeight(100)
self.fftPlot.setMinimumWidth(100)
self.fftPlot.setMinimumHeight(100)
splittersw = QtGui.QSplitter(QtCore.Qt.Vertical)
splitterse = QtGui.QSplitter(QtCore.Qt.Horizontal)
splittersw.addWidget(self.specPlot)
splittersw.addWidget(spiketracePlot)
splitterse.addWidget(splittersw)
splitterse.addWidget(self.fftPlot)
# set inital sizes
splittersw.setSizes([100,500])
splitterse.setSizes([500,100])
layout = QtGui.QHBoxLayout()
layout.setContentsMargins(0,0,0,0)
layout.addWidget(splitterse)
self.setLayout(layout)
#relay threshold signal
spiketracePlot.thresholdUpdated.connect(self.thresholdUpdated.emit)
spiketracePlot.polarityInverted.connect(self.polarityInverted.emit)
spiketracePlot.rasterBoundsUpdated.connect(self.rasterBoundsUpdated.emit)
spiketracePlot.absUpdated.connect(self.absUpdated.emit)
self.colormapChanged = self.specPlot.colormapChanged
# for the purposes of splitter not updating contents...
self.splittersw = splittersw
self.badbadbad = 0
self._ignore_range_signal = False
def updateSpec(self, *args, **kwargs):
"""Updates the spectrogram. First argument can be a filename,
or a data array. If no arguments are given, clears the spectrograms.
For other arguments, see: :meth:`SpecWidget.updateData<sparkle.gui.plotting.pyqtgraph_widgets.SpecWidget.updateData>`
"""
if args[0] is None:
self.specPlot.clearImg()
elif isinstance(args[0], basestring):
self.specPlot.fromFile(*args, **kwargs)
else:
self.specPlot.updateData(*args,**kwargs)
def showSpec(self, fname):
"""Draws the spectrogram if it is currently None"""
if not self.specPlot.hasImg() and fname is not None:
self.specPlot.fromFile(fname)
def updateFft(self, *args, **kwargs):
"""Updates the FFT plot with new data
For arguments, see: :meth:`FFTWidget.updateData<sparkle.gui.plotting.pyqtgraph_widgets.FFTWidget.updateData>`
"""
self.fftPlot.updateData(*args, **kwargs)
def addResponsePlot(self, *names):
for name in names:
plot = TraceWidget(self)
plot.setTitle(name)
plot.plotItem.vb.sigXRangeChanged.connect(self.updateXRange)
self.splittersw.addWidget(plot)
plot.thresholdUpdated.connect(self.thresholdUpdated.emit)
plot.polarityInverted.connect(self.polarityInverted.emit)
plot.rasterBoundsUpdated.connect(self.rasterBoundsUpdated.emit)
plot.absUpdated.connect(self.absUpdated.emit)
self.responsePlots[name] = plot
def removeResponsePlot(self, *names):
for name in names:
if name in self.responsePlots:
plot = self.responsePlots.pop(name)
plot.thresholdUpdated.disconnect()
plot.polarityInverted.disconnect()
plot.rasterBoundsUpdated.disconnect()
plot.absUpdated.disconnect()
plot.plotItem.vb.sigXRangeChanged.disconnect()
plot.close()
plot.deleteLater()
def responseNameList(self):
return self.responsePlots.keys()
def responsePlotCount(self):
return len(self.responsePlots)
def updateSpiketrace(self, xdata, ydata, plotname=None):
"""Updates the spike trace
:param xdata: index values
:type xdata: numpy.ndarray
:param ydata: values to plot
:type ydata: numpy.ndarray
"""
if plotname is None:
plotname = self.responsePlots.keys()[0]
if len(ydata.shape) == 1:
self.responsePlots[plotname].updateData(axeskey='response', x=xdata, y=ydata)
else:
self.responsePlots[plotname].addTraces(xdata, ydata)
def clearRaster(self):
"""Clears data from the raster plots"""
for plot in self.responsePlots.values():
plot.clearData('raster')
def addRasterPoints(self, xdata, repnum, plotname=None):
"""Add a list (or numpy array) of points to raster plot,
in any order.
:param xdata: bin centers
:param ydata: rep number
"""
if plotname is None:
plotname = self.responsePlots.keys()[0]
ydata = np.ones_like(xdata)*repnum
self.responsePlots[plotname].appendData('raster', xdata, ydata)
def updateSignal(self, xdata, ydata, plotname=None):
"""Updates the trace of the outgoing signal
:param xdata: time points of recording
:param ydata: brain potential at time points
"""
if plotname is None:
plotname = self.responsePlots.keys()[0]
self.responsePlots[plotname].updateData(axeskey='stim', x=xdata, y=ydata)
def setXlimits(self, lims):
"""Sets the X axis limits of the trace plot
:param lims: (min, max) of x axis, in same units as data
:type lims: (float, float)
"""
# update all "linked", plots
self.specPlot.setXlim(lims)
for plot in self.responsePlots.values():
plot.setXlim(lims)
# ridiculous...
sizes = self.splittersw.sizes()
if len(sizes) > 1:
if self.badbadbad:
sizes[0] +=1
sizes[1] -=1
else:
sizes[0] -=1
sizes[1] +=1
self.badbadbad = not self.badbadbad
self.splittersw.setSizes(sizes)
self._ignore_range_signal = False
def updateXRange(self, view, lims):
if not self._ignore_range_signal:
# avoid crazy recursion, as we update the other plots
self._ignore_range_signal = True
self.setXlimits(lims)
def setNreps(self, nreps):
"""Sets the number of reps before the raster plot resets"""
for plot in self.responsePlots.values():
plot.setNreps(nreps)
def sizeHint(self):
"""default size?"""
return QtCore.QSize(500,300)
def specAutoRange(self):
"""Auto adjusts the visible range of the spectrogram"""
trace_range = self.responsePlots.values()[0].viewRange()[0]
vb = self.specPlot.getViewBox()
vb.autoRange(padding=0)
self.specPlot.setXlim(trace_range)
def setAmpConversionFactor(self, scalar):
for plot in self.responsePlots.values():
plot.setAmpConversionFactor(scalar)
def setThreshold(self, thresh, plotname):
self.responsePlots[plotname].setThreshold(thresh)
def setRasterBounds(self, bounds, plotname):
self.responsePlots[plotname].setRasterBounds(bounds)
def setAbs(self, bounds, plotname):
self.responsePlots[plotname].setAbs(bounds)
if __name__ == "__main__":
import random, time, os, sys
import numpy as np
import sparkle.tools.audiotools as audiotools
import scipy.io.wavfile as wv
import test.sample as sample
from scipy.io import loadmat
app = QtGui.QApplication(sys.argv)
plot = ProtocolDisplay()
plot.resize(800, 400)
plot.show()
sylpath = sample.samplewav()
spec, f, bins, fs = audiotools.spectrogram(sylpath)
# plot.updateSpec(spec, xaxis=bins, yaxis=f)
plot.updateSpec(sylpath)
fs, wavdata = wv.read(sylpath)
freqs, fft = audiotools.calc_spectrum(wavdata,fs)
# stim_times = np.arange(0,len(wavdata),1/float(len(wavdata)))
stim_times = np.linspace(0,float(len(wavdata))/fs, len(wavdata))
marr = loadmat(os.path.join(os.path.abspath(os.path.dirname(__file__)),"singlesweep.mat"), squeeze_me=True)
resp = abs(marr['sweep'])
acq_rate = 50000
resp_times = np.linspace(0,float(len(resp))/acq_rate, len(resp))
# x = np.arange(len(wavdata))
# y = random.randint(0,10) * np.sin(x)
plot.updateSignal(stim_times, wavdata)
plot.updateSpiketrace(resp_times,resp)
# for i in range(10):
# y = random.randint(0,10) * np.sin(x)
# plot.updateFft(x,y)
# time.sleep(0.2)
# QtGui.QApplication.processEvents()
plot.updateFft(freqs,fft)
nbins=20
bin_centers = np.linspace(0,float(len(resp))/acq_rate, nbins)
dummy_data = np.ones((nbins/2,))
dummy_bins = bin_centers[0:-1:2]
plot.addRasterPoints(dummy_bins, dummy_data)
dummy_data = np.ones(((nbins/2)-1,))*2
dummy_bins = bin_centers[1:-2:2]
plot.addRasterPoints(dummy_bins, dummy_data)
dummy_data = np.ones(((nbins/2)-1,))*3
dummy_bins = bin_centers[1:-2:2]
plot.addRasterPoints(dummy_bins, dummy_data)
# coerce x ranges to match
plot.setXlimits([0, resp_times[-1]])
plot.setTscale(0.001)
plot.setFscale(1000)
sys.exit(app.exec_())
| gpl-3.0 |
anthonydillon/horizon | openstack_dashboard/dashboards/project/images/images/tabs.py | 89 | 1576 | # Copyright 2012 Nebula, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from django import conf
from django.utils.translation import ugettext_lazy as _
from horizon import tabs
class OverviewTab(tabs.Tab):
name = _("Overview")
slug = "overview"
template_name = "project/images/images/_detail_overview.html"
def get_context_data(self, request):
image = self.tab_group.kwargs['image']
custom_titles = getattr(conf.settings,
'IMAGE_CUSTOM_PROPERTY_TITLES', {})
image_props = []
for prop, val in image.properties.items():
if prop == 'description':
# Description property is already listed in Info section
continue
title = custom_titles.get(prop, prop)
image_props.append((prop, title, val))
return {"image": image,
"image_props": sorted(image_props, key=lambda prop: prop[1])}
class ImageDetailTabs(tabs.TabGroup):
slug = "image_details"
tabs = (OverviewTab,)
| apache-2.0 |
ludmilamarian/invenio | invenio/modules/jsonalchemy/parser.py | 17 | 37499 | # -*- coding: utf-8 -*-
#
# This file is part of Invenio.
# Copyright (C) 2013, 2014, 2015 CERN.
#
# Invenio is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# Invenio is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Invenio; if not, write to the Free Software Foundation, Inc.,
# 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
"""Fields and models configuration loader.
This module uses `pyparsing <http://pyparsing.wikispaces.com/>`_ to read
from thedifferent configuration files the field and model definitions.
Default extensions to both parsers could be added inside
:mod:`invenio.modules.jsonalchemy.jsonext.parsers`
"""
import os
import six
from pyparsing import ParseException, FollowedBy, Suppress, OneOrMore, Word, \
LineEnd, ZeroOrMore, Optional, Literal, alphas, alphanums, \
originalTextFor, nestedExpr, quotedString, removeQuotes, lineEnd, \
empty, col, restOfLine, delimitedList, Each, Keyword, commaSeparatedList, \
Group
from .errors import FieldParserException, ModelParserException
from .registry import fields_definitions, models_definitions, parsers
ParseException.defaultWhitespaceChars = (' \r\t')
COMMENT = (Literal("#") + restOfLine + LineEnd()).suppress()
IDENT = Word(alphanums + '_')
DICT_DEF = originalTextFor(nestedExpr('{', '}'))
LIST_DEF = originalTextFor(nestedExpr('[', ']'))
DICT_ACCESS = LIST_ACCESS = originalTextFor(IDENT + nestedExpr('[', ']'))
PYTHON_ALLOWED_EXPR = (DICT_DEF ^ LIST_DEF ^ DICT_ACCESS ^
LIST_ACCESS ^ restOfLine
).setParseAction(lambda toks: toks[0])
def indentedBlock(expr, indent_stack, indent=True):
"""Define space-delimited indentation blocks.
Helper method for defining space-delimited indentation blocks, such as
those used to define block statements in Python source code.
There is also a version in pyparsing but doesn't seem to be working fine
with JSONAlchemy cfg files.
"""
def check_sub_indent(string, location, tokens):
"""Check the indentation."""
cur_col = col(location, string)
if cur_col > indent_stack[-1]:
indent_stack.append(cur_col)
else:
raise ParseException(string, location, "not a subentry")
def check_unindent(string, location, tokens):
"""Check the 'undentation'."""
if location >= len(string):
return
cur_col = col(location, string)
if not(cur_col < indent_stack[-1] and cur_col <= indent_stack[-2]):
raise ParseException(string, location, "not an unindent")
def do_unindent():
"""Unindent."""
indent_stack.pop()
indent = lineEnd.suppress() + empty + empty.copy()\
.setParseAction(check_sub_indent)
undent = FollowedBy(empty).setParseAction(check_unindent)
undent.setParseAction(do_unindent)
return indent + expr + undent
def _create_field_parser():
"""Create a parser that can handle field definitions.
BFN like grammar::
rule ::= [pid | extend | override]
json_id ["," aliases]":"
body
json_id ::= (letter|"_") (letter|digit|_)*
aliases ::= json_id ["," aliases]
pid ::= @persistent_identifier( level )
extend ::= @extend
override ::= @override
hidden ::= @hidden
body ::=(creator* | derived | calculated) (extensions)*
creator ::= [decorators] format "," tag "," expr
derived ::= [decorators] expr
calculated ::= [decorators] expr
To check the syntactics of the parser extensions or decorators please go to
:mod:`invenio.modules.jsonalchemy.jsonext.parsers`
"""
indent_stack = [1]
# Independent/special decorators
persistent_identifier = (
Keyword('@persistent_identifier').suppress() + nestedExpr()
).setResultsName('pid').setParseAction(lambda toks: int(toks[0][0]))
override = Keyword('@override').suppress()\
.setResultsName('override')\
.setParseAction(lambda toks: True)
extend = Keyword('@extend').suppress()\
.setResultsName('extend')\
.setParseAction(lambda toks: True)
hidden = Keyword('@hidden').suppress()\
.setResultsName('hidden')\
.setParseAction(lambda toks: True)
rule_decorators = (Optional(persistent_identifier) &
Optional(override) &
Optional(extend) &
Optional(hidden))
# Field definition decorators
field_decorators = Each(
[Optional(p.parser.parse_element(indent_stack))
for p in parsers if issubclass(p.parser,
DecoratorBaseExtensionParser)])
# Creator rules
creator_body = (
Optional(field_decorators).setResultsName('decorators') +
Word(alphas, alphanums + '_') +
Literal(',').suppress() +
quotedString.setParseAction(removeQuotes) +
Literal(',').suppress() +
PYTHON_ALLOWED_EXPR
).setParseAction(lambda toks: {
'source_format': toks[-3],
'source_tags': toks[-2].split(' '),
'function': compile(toks[-1].strip(), '', 'eval'),
'type': 'creator',
'decorators': toks.decorators.asDict()}
).setResultsName('creator_def', listAllMatches=True)
creator = (Keyword('creator:').suppress() +
indentedBlock(OneOrMore(creator_body), indent_stack))
# Derived and calculated rules
der_calc_body = (Optional(field_decorators).setResultsName('decorators') +
PYTHON_ALLOWED_EXPR)
derived = (
Keyword('derived:').suppress() +
indentedBlock(der_calc_body, indent_stack)
).setParseAction(lambda toks: {
'source_format': 'derived',
'source_tags': None,
'function': compile(toks[-1].strip(), '', 'eval'),
'type': 'derived',
'decorators': toks.decorators.asDict()}).setResultsName('derived_def')
calculated = (
Keyword('calculated:').suppress() +
indentedBlock(der_calc_body, indent_stack)
).setParseAction(lambda toks: {
'source_format': 'calculated',
'source_tags': None,
'function': compile(toks[-1].strip(), '', 'eval'),
'type': 'calculated',
'decorators': toks.decorators.asDict()
}).setResultsName('calculated_def')
rule_sections = [Optional(creator | derived | calculated), ]
rule_sections.extend([Optional(p.parser.parse_element(indent_stack))
for p in parsers
if issubclass(p.parser, FieldBaseExtensionParser)])
json_id = (IDENT +
Optional(Suppress(',') +
delimitedList(Word(alphanums + '_'))) +
Suppress(':')
).setResultsName('field')\
.setParseAction(lambda toks: {'json_id': toks[0],
'aliases': toks[1:]})
rule = Group(Optional(rule_decorators) +
json_id +
indentedBlock(Each(rule_sections), indent_stack)
)
return OneOrMore(COMMENT.suppress() | rule)
def _create_model_parser():
"""
Create a parser that can handle model definitions.
BFN like grammar::
TODO
Note: Unlike the field configuration files where you can specify more than
one field inside each file for the models only one definition is
allowed by file.
"""
def build_dict_for_fields(tokens):
"""Build the dictionary wih the field definitions.
E.g. ``{'field_name': 'json_identifier'}``
"""
dict_ = dict()
for token in tokens:
if len(token) == 1:
dict_[token[0]] = token[0]
else:
dict_[token[1]] = token[0]
return dict_
indent_stack = [1]
field = Group(Word(alphanums + '_') +
Optional(Literal('=').suppress() + Word(alphanums + '_')))
fields = (Keyword('fields:').suppress() +
indentedBlock(ZeroOrMore(field), indent_stack)
).setParseAction(build_dict_for_fields).setResultsName('fields')
bases = (Keyword('bases:').suppress() +
indentedBlock(commaSeparatedList, indent_stack)
).setResultsName('bases')
sections = [fields, Optional(bases), ]
sections.extend([Optional(p.parser.parse_element(indent_stack))
for p in parsers if issubclass(p.parser,
ModelBaseExtensionParser)])
rules = Each(sections)
return ZeroOrMore(COMMENT) & rules
class FieldParser(object):
"""Field definitions parser."""
_field_definitions = {}
"""Dictionary containing all the rules needed to create and validate json
fields"""
_legacy_field_matchings = {}
"""Dictionary containing matching between the legacy master format and the
current json"""
_field_extensions = None
"""Field only parser extensions"""
_decorator_before_extensions = None
"""Decorator before only parser extensions"""
_decorator_on_extensions = None
"""Decorator on only parser extensions"""
_decorator_after_extensions = None
"""Decorator after only parser extensions"""
def __init__(self, namespace):
"""Initialize."""
#Autodiscover cfg files
self.files = list(fields_definitions(namespace))
self.__namespace = namespace
@classmethod
def field_extensions(cls):
"""Get the field parser extensions from the parser registry."""
if cls._field_extensions is None:
cls._field_extensions = dict(
(module.parser.__parsername__, module.parser)
for module in parsers
if issubclass(module.parser,
FieldBaseExtensionParser))
return cls._field_extensions
@classmethod
def decorator_before_extensions(cls):
"""TODO."""
if cls._decorator_before_extensions is None:
cls._decorator_before_extensions = dict(
(module.parser.__parsername__, module.parser)
for module in parsers
if issubclass(module.parser,
DecoratorBeforeEvalBaseExtensionParser))
return cls._decorator_before_extensions
@classmethod
def decorator_on_extensions(cls):
"""TODO."""
if cls._decorator_on_extensions is None:
cls._decorator_on_extensions = dict(
(module.parser.__parsername__, module.parser)
for module in parsers
if issubclass(module.parser,
DecoratorOnEvalBaseExtensionParser))
return cls._decorator_on_extensions
@classmethod
def decorator_after_extensions(cls):
"""TODO."""
if cls._decorator_after_extensions is None:
cls._decorator_after_extensions = dict(
(module.parser.__parsername__, module.parser)
for module in parsers
if issubclass(module.parser,
DecoratorAfterEvalBaseExtensionParser))
return cls._decorator_after_extensions
@classmethod
def field_definitions(cls, namespace):
"""
Get all the field definitions from a given namespace.
If the namespace does not exist, it tries to create it first
"""
if namespace not in cls._field_definitions:
cls.reparse(namespace)
return cls._field_definitions.get(namespace)
@classmethod
def field_definition_model_based(cls, field_name, model_name, namespace):
"""
Get the real field definition based on the model name.
Based on a model name (and namespace) it gets the real field
definition.
"""
new_model = ModelParser.resolve_models(model_name, namespace)
json_id = field_name
for j, f in six.iteritems(new_model['fields']):
if f == field_name:
json_id = j
break
return cls.field_definitions(namespace).get(json_id, None)
@classmethod
def legacy_field_matchings(cls, namespace):
"""
Get all the legacy mappings for a given namespace.
If the namespace does not exist, it tries to create it first
:see: guess_legacy_field_names()
"""
if namespace not in cls._legacy_field_matchings:
cls.reparse(namespace)
return cls._legacy_field_matchings.get(namespace)
@classmethod
def reparse(cls, namespace):
"""
Reparse all the fields.
Invalidate the cached version of all the fields inside the given
namespace and parse them again.
"""
cls._field_definitions[namespace] = {}
cls._legacy_field_matchings = {}
cls(namespace)._create()
def _create(self):
"""
Create the fields and legacy fields definitions from configuration.
Fills up _field_definitions and _legacy_field_matchings dictionary with
the rules defined inside the configuration files.
This method should not be used (unless you really know what your are
doing), use instead :meth:`reparse`
"""
stand_by_rules = []
for field_file in self.files:
parser = _create_field_parser()
try:
rules = parser.parseFile(field_file, parseAll=True)
except ParseException as e:
raise FieldParserException(
"Cannot parse file '%s',\n%s" % (field_file, str(e)))
for rule in rules:
if (rule.field['json_id'] in
self.__class__._field_definitions[self.__namespace])\
and not rule.extend and not rule.override:
raise FieldParserException(
"Name error: '%s' field is duplicated '%s'"
% (rule.field['json_id'], field_file))
if (rule.field['json_id'] not in
self.__class__._field_definitions[self.__namespace])\
and (rule.extend or rule.override):
stand_by_rules.append(rule)
else:
self._create_rule(rule)
for rule in stand_by_rules:
if rule.field['json_id'] not in \
self.__class__._field_definitions[self.__namespace]:
raise FieldParserException(
"Name error: '%s' field is not defined but is "
"marked as 'extend' or 'override'"
% (rule.field['json_id'], ))
self._create_rule(rule)
def _create_rule(self, rule):
"""
Create the field and legacy definitions.
The result looks like this.
.. code-block:: json
{key: { override: True/False,
extend: True/False,
hidden: True/False,
aliases: [],
pid: num/None,
rules: {'master_format_1': [{rule1}, {rule2}, ...],
'master_format_2': [....],
......
'calculated': [....],
'derived': [...]}
.... extensions ....
}
}
Each of the rule (rule1, rule2, etc.) has the same content.
.. code-block:: json
{'source_format' : source_format/calculated/derived,
'source_tag' : source_tag/None,
'function' : python code to apply to the master value,
'decorators' : {}
}
"""
json_id = rule.field['json_id']
# TODO: check if pyparsing can handle this!
all_type_def = []
if rule.creator_def:
all_type_def.extend(rule.creator_def.asList())
if rule.calculated_def:
all_type_def.append(rule.calculated_def)
elif rule.derived_def:
all_type_def.append(rule.derived_def)
rules = self.__class__._field_definitions[self.__namespace][json_id]\
.get('rules', {}) if rule.extend else dict()
for field_def in all_type_def:
self.__create_decorators_content(rule, field_def)
if field_def['source_format'] not in rules:
rules[field_def['source_format']] = list()
rules[field_def['source_format']].append(field_def)
if 'json' not in rules:
rules['json'] = [{'source_format': 'json',
'source_tags': [json_id],
'function': compile('value', '', 'eval'),
'type': 'creator',
'decorators': {'before': {},
'on': {},
'after': {}
}
}]
rule_dict = dict()
rule_dict['aliases'] = rule.field['aliases']
rule_dict['pid'] = rule.pid if rule.pid is not '' else None
rule_dict['override'] = rule.override if rule.override else False
rule_dict['extend'] = rule.extend if rule.extend else False
rule_dict['hidden'] = rule.hidden if rule.hidden else False
rule_dict['rules'] = rules
if rule.override:
self.__class__._field_definitions[self.__namespace][json_id]\
.update(rule_dict)
elif rule.extend:
self.__class__._field_definitions[self.__namespace][json_id][
'aliases'].extend(rule_dict['aliases'])
self.__class__._field_definitions[self.__namespace][json_id][
'hidden'] |= rule_dict['hidden']
self.__class__._field_definitions[self.__namespace][json_id][
'extend'] = True
else:
self.__class__._field_definitions[self.__namespace][json_id] = \
rule_dict
self.__resolve_parser_extensions(rule)
def __resolve_parser_extensions(self, rule):
"""
Apply the incoming rule for each extension.
For each of the extension available it tries to apply it in the
incoming rule
"""
json_id = rule.field['json_id']
for name, parser in six.iteritems(self.__class__.field_extensions()):
if getattr(rule, name, None):
self.__class__._field_definitions[self.__namespace][
json_id][name] = parser.create_element(rule,
self.__namespace)
def __create_decorators_content(self, rule, field_def):
"""Extract from the rule all the possible decorators."""
decorators = {'before': {}, 'on': {}, 'after': {}}
for name, parser in six.iteritems(
self.__class__.decorator_before_extensions()):
if name in field_def['decorators']:
decorators['before'][name] = \
parser.create_element(rule, field_def,
field_def['decorators'][name],
self.__namespace)
for name, parser in six.iteritems(
self.__class__.decorator_on_extensions()):
if name in field_def['decorators']:
decorators['on'][name] = \
parser.create_element(rule, field_def,
field_def['decorators'][name],
self.__namespace)
for name, parser in six.iteritems(
self.__class__.decorator_after_extensions()):
if name in field_def['decorators']:
decorators['after'][name] = \
parser.create_element(rule, field_def,
field_def['decorators'][name],
self.__namespace)
field_def['decorators'] = decorators
class ModelParser(object):
"""Record model parser."""
_model_definitions = {}
"""Contain all the model definitions order by namespace."""
_parser_extensions = None
"""Model only parser extensions."""
def __init__(self, namespace):
"""Initialize the model parser with the given namespace."""
self.files = list(models_definitions(namespace))
self.__namespace = namespace
@classmethod
def parser_extensions(cls):
"""Get only the model parser extensions from the parser registry."""
if cls._parser_extensions is None:
cls._parser_extensions = \
dict((module.parser.__parsername__, module.parser)
for module in parsers
if issubclass(module.parser, ModelBaseExtensionParser))
return cls._parser_extensions
@classmethod
def model_definitions(cls, namespace):
"""
Get all the model definitions given a namespace.
If the namespace does not exist, it tries to create it first.
"""
if namespace not in cls._model_definitions:
cls.reparse(namespace)
return cls._model_definitions.get(namespace)
@classmethod
def resolve_models(cls, model_list, namespace):
"""
Resolve all the field conflicts.
From a given list of model definitions resolves all the field conflicts
and returns a new model definition containing all the information from
the model list.
The field definitions are resolved from left-to-right.
:param model_list: It could be also a string, in which case the model
definition is returned as it is.
:return: Dictionary containing the union of the model definitions.
"""
if model_list == '__default__':
return {
'fields': dict(
zip(FieldParser.field_definitions(namespace).keys(),
FieldParser.field_definitions(namespace).keys())),
'bases': [],
}
if isinstance(model_list, six.string_types):
try:
return cls.model_definitions(namespace)[model_list]
except KeyError:
return {
'fields': dict(
zip(FieldParser.field_definitions(namespace).keys(),
FieldParser.field_definitions(namespace).keys())),
'bases': [],
}
new_model = {'fields': dict(), 'bases': list()}
for model in model_list:
if model == '__default__':
new_model['fields'].update(
zip(FieldParser.field_definitions(namespace).keys(),
FieldParser.field_definitions(namespace).keys()))
elif model not in cls.model_definitions(namespace):
new_model['fields'].update(
dict(zip(FieldParser.field_definitions(namespace).keys(),
FieldParser.field_definitions(namespace).keys())))
else:
model_def = cls.model_definitions(namespace).get(model, {})
new_model['fields'].update(model_def.get('fields', {}))
new_model['bases'].extend(model_def.get('bases', []))
for key, value in six.iteritems(model_def):
if key in ('fields', 'bases'):
continue
new_model[key] = cls.parser_extensions()[key]\
.extend_model(new_model.get(key), value)
return new_model
@classmethod
def reparse(cls, namespace):
"""
Invalidate the cached version of all the models.
It does it inside the given namespace and parse it again.
"""
cls._model_definitions[namespace] = {}
cls(namespace)._create()
def _create(self):
"""
Fill up _model_definitions dictionary.
It uses what is written inside the `*.cfg` model descriptions
It also resolve inheritance at creation time and name matching for the
field names present inside the model file
The result looks like this:
.. code-block:: json
{'model': {'fields': {'name_for_fieldfield1': json_id1,
'name_for_field2': json_id2,
....
'name_for_fieldN': fieldN },
'bases: [(inherit_from_list), ...]
},
...
}
This method should not be used (unless you really know what your are
doing), use instead :meth:`reparse`
:raises: ModelParserException in case of missing model definition
(helpful if we use inheritance) or in case of unknown field
name.
"""
for model_file in self.files:
parser = _create_model_parser()
model_name = os.path.basename(model_file).split('.')[0]
if model_name in \
self.__class__._model_definitions[self.__namespace]:
raise ModelParserException(
"Already defined model: %s" % (model_name,))
self.__class__._model_definitions[self.__namespace][model_name] = {
'fields': {},
'bases': [],
}
try:
model_definition = parser.parseFile(model_file, parseAll=True)
except ParseException as e:
raise ModelParserException(
"Cannot parse file %s,\n%s" % (model_file, str(e)))
if not model_definition.fields:
raise ModelParserException("Field definition needed")
if any([json_id not in FieldParser.field_definitions(self.__namespace)
for json_id in model_definition.fields.values()]):
raise ModelParserException(
"At least one field is no find in the field "
"definitions for file '%s'" % (model_file))
self.__class__._model_definitions[self.__namespace][model_name][
'fields'] = model_definition.fields
self.__class__._model_definitions[self.__namespace][model_name][
'bases'] = model_definition.bases.asList() \
if model_definition.bases else []
self.__resolve_parser_extensions(model_name, model_definition)
self.__resolve_inheritance()
def __resolve_inheritance(self):
"""Resolve the inheritance."""
def resolve_ext_inheritance(ext_name, model_definition):
for inherit_from in model_definition['bases']:
base_model = self.__class__.model_definitions(
self.__namespace)[inherit_from]
model_definition[ext_name] = \
self.__class__.parser_extensions()[ext_name].inherit_model(
model_definition.get(ext_name),
resolve_ext_inheritance(ext_name, base_model))
return model_definition.get(ext_name)
def resolve_field_inheritance(model_definition):
fields = {}
for inherit_from in model_definition['bases']:
base_model = self.__class__.model_definitions(
self.__namespace)[inherit_from]
fields.update(resolve_field_inheritance(base_model))
if fields:
inverted_fields = dict((v, k)
for k, v in six.iteritems(fields))
inverted_model_fields = dict((v, k) for k, v in six.iteritems(
model_definition['fields']))
inverted_fields.update(inverted_model_fields)
fields = dict((v, k)
for k, v in six.iteritems(inverted_fields))
else:
fields.update(model_definition['fields'])
return fields
for model_definition in \
self.__class__.model_definitions(self.__namespace).values():
model_definition['fields'] = resolve_field_inheritance(
model_definition)
for name, model_ext in \
six.iteritems(self.__class__.parser_extensions()):
model_definition[name] = resolve_ext_inheritance(
name, model_definition)
def __resolve_parser_extensions(self, model_name, model_def):
"""Apply the incoming rule for each available extension."""
for name, parser in six.iteritems(self.__class__.parser_extensions()):
if name in model_def:
self.__class__._model_definitions[self.__namespace][
model_name][name] = parser.create_element(
model_def, self.__namespace)
def guess_legacy_field_names(fields, master_format, namespace):
"""
Find the equivalent JSON field for the legacy field(s).
Using the legacy rules written in the config file (@legacy) tries to find
the equivalent json field for one or more legacy fields.
.. doctest::
>>> guess_legacy_fields(('100__a', '245'), 'marc', 'recordext')
{'100__a':['authors[0].full_name'], '245':['title']}
"""
res = {}
if isinstance(fields, six.string_types):
fields = (fields, )
for field in fields:
try:
res[field] = FieldParser.legacy_field_matchings(
namespace)[master_format].get(field, [])
except (KeyError, TypeError):
res[field] = []
return res
def get_producer_rules(field, code, namespace, model=['__default__']): # pylint: disable=W0102
"""
Get all the producer rules related with the field and code.
From the field definitions gets all the producer rules related with the
field and the code (using also the namespace).
For each producer rule the first element are the 'preconditions' to apply
the rules and the second one are the actual rules.
.. doctest::
>>> get_producer_rules('_first_author', 'json_for_marc', 'recordext')
[((),
{'100__a': 'full_name',
'100__e': 'relator_name',
'100__h': 'CCID',
'100__i': 'INSPIRE_number',
'100__u': 'affiliation'})]
>>> get_producer_rules('title', 'json_for_marc', 'recordext')
[[((), {'245__a': 'title', '245__b': 'subtitle', '245__k': 'form'})]
"""
try:
return FieldParser.field_definition_model_based(
field, model, namespace).get('producer', {}).get(code, [])
except AttributeError:
raise KeyError(field)
class BaseExtensionParser(type): # pylint: disable=R0921
"""Metaclass for the configuration file extensions."""
def __new__(mcs, name, bases, dict_):
"""TODO."""
if not dict_.get('__parsername__'):
dict_['__parsername__'] = name.lower().replace('parser', '')
return super(BaseExtensionParser, mcs).__new__(mcs, name, bases, dict_)
@classmethod
def parse_element(mcs, indent_stack):
"""
Parse the element.
Using pyparsing defines a piece of the grammar to parse the
extension from configuration file
:return: pyparsing ParseElement
"""
raise NotImplementedError()
@classmethod
def create_element(mcs, *args, **kwargs):
"""
Create the element.
Once the extension is parsed defines the actions that have to be taken
to store inside the field_definitions the information needed or useful.
"""
raise NotImplementedError()
@classmethod
def add_info_to_field(mcs, *args, **kwargs):
"""
Define with information goes into the meta-metadata dictionary.
Defines which information goes inside the ``__meta_metadata__``
dictionary and how.
"""
raise NotImplementedError()
@classmethod
def evaluate(mcs, *args, **kwargs):
"""
Evaluate the field.
Once the extension information is added to the field, whenever it gets
accessed or modify this method is call for each of the extension set
in the metadata of this field.
"""
raise NotImplementedError()
class FieldBaseExtensionParser(six.with_metaclass(BaseExtensionParser)): # pylint: disable=W0223,W0232,R0903,R0921
"""Base class for field parser extensions."""
@classmethod
def add_info_to_field(cls, json_id, info):
"""
Create the content of ``extension_name``.
Should create the content of ``__meta_metadata__.json.extension_name``
"""
raise NotImplementedError()
@classmethod
def evaluate(cls, json, field_name, action, args):
"""
Evaluate the field.
Depending on the extension perform the actions that it defines using
the current value as parameter. (It could cause side effects on the
current json)
"""
raise NotImplementedError()
class ModelBaseExtensionParser(six.with_metaclass(BaseExtensionParser)): # pylint: disable=W0223,W0232,R0903,R0921
"""Base class for model parser extensions."""
@classmethod
def inherit_model(cls, current_value, base_value):
"""
Inherit the model from other.
When a model inherits from other (or several) it should resolve the
inheritance taking the current value and the base value from the
extension.
"""
raise NotImplementedError()
@classmethod
def extend_model(cls, current_value, new_value):
"""
Extend the model.
When a json object is using several models this method should provide
the logic to extend the content of the extensions.
:return: the content of model[extension]
"""
raise NotImplementedError()
@classmethod
def add_info_to_field(cls, info):
"""
Define with information goes into the model dictionary.
Defines which information goes inside the
``__meta_metadata__.__model__`` dictionary and how.
"""
raise NotImplementedError()
@classmethod
def evaluate(cls, obj, args):
"""
Get and modify the current object.
Gets the current object (typically a SmartJson object) and modifies it
accordingly with the extension nature.
"""
raise NotImplementedError()
class DecoratorBaseExtensionParser(six.with_metaclass(BaseExtensionParser)): # pylint: disable=W0223,W0232,R0903
"""Base class for decorator parser extension."""
pass
class DecoratorBeforeEvalBaseExtensionParser(DecoratorBaseExtensionParser): # pylint: disable=W0223,W0232,R0903,R0921
"""
Base class for decorator parser extensions.
This ones will be evaluated *before* any operation on the value.
"""
@classmethod
def evaluate(cls, reader, args):
"""Evaluate ``args`` and returns a boolean depending on them."""
raise NotImplementedError()
class DecoratorOnEvalBaseExtensionParser(DecoratorBaseExtensionParser): # pylint: disable=W0223,W0232,R0903,R0921
"""
Base class for decorator parser extensions.
this ones will be evaluated *while* the rule gets evaluated with the input
value. (Therefore they have access to ``value``) This decorators are only
useful for ``creator`` definitions.
"""
@classmethod
def evaluate(cls, value, namespace, args):
"""
Evaluate ``args`` with the master value from the input.
:returns: a boolean depending on them.
"""
raise NotImplementedError()
class DecoratorAfterEvalBaseExtensionParser(DecoratorBaseExtensionParser): # pylint: disable=W0223,W0232,R0903,R0921
"""
Base class for decorator parser extensions.
This one will be evaluated *after* the rule gets evaluated and before
setting the value to the json.
"""
@classmethod
def add_info_to_field(cls, json_id, info, args):
"""
Add a field to the JSON so it can be evaluated.
When adding a new field to the json, if its definition uses the current
decorator it adds the needed content in a way that ``evaluate`` can
use.
"""
raise NotImplementedError()
@classmethod
def evaluate(cls, json, field_name, action, args):
"""
Evaluate the actions depending on the decoratior.
Depending on the decorator performs the actions that it defines using
the current value as parameter. (It could cause side effects on the
current json).
"""
raise NotImplementedError()
| gpl-2.0 |
xbmc/xbmc-antiquated | xbmc/lib/libPython/Python/Lib/test/test_sys.py | 24 | 8698 | # -*- coding: iso-8859-1 -*-
import unittest, test.test_support
import sys, cStringIO
class SysModuleTest(unittest.TestCase):
def test_original_displayhook(self):
import __builtin__
savestdout = sys.stdout
out = cStringIO.StringIO()
sys.stdout = out
dh = sys.__displayhook__
self.assertRaises(TypeError, dh)
if hasattr(__builtin__, "_"):
del __builtin__._
dh(None)
self.assertEqual(out.getvalue(), "")
self.assert_(not hasattr(__builtin__, "_"))
dh(42)
self.assertEqual(out.getvalue(), "42\n")
self.assertEqual(__builtin__._, 42)
del sys.stdout
self.assertRaises(RuntimeError, dh, 42)
sys.stdout = savestdout
def test_lost_displayhook(self):
olddisplayhook = sys.displayhook
del sys.displayhook
code = compile("42", "<string>", "single")
self.assertRaises(RuntimeError, eval, code)
sys.displayhook = olddisplayhook
def test_custom_displayhook(self):
olddisplayhook = sys.displayhook
def baddisplayhook(obj):
raise ValueError
sys.displayhook = baddisplayhook
code = compile("42", "<string>", "single")
self.assertRaises(ValueError, eval, code)
sys.displayhook = olddisplayhook
def test_original_excepthook(self):
savestderr = sys.stderr
err = cStringIO.StringIO()
sys.stderr = err
eh = sys.__excepthook__
self.assertRaises(TypeError, eh)
try:
raise ValueError(42)
except ValueError, exc:
eh(*sys.exc_info())
sys.stderr = savestderr
self.assert_(err.getvalue().endswith("ValueError: 42\n"))
# FIXME: testing the code for a lost or replaced excepthook in
# Python/pythonrun.c::PyErr_PrintEx() is tricky.
def test_exc_clear(self):
self.assertRaises(TypeError, sys.exc_clear, 42)
# Verify that exc_info is present and matches exc, then clear it, and
# check that it worked.
def clear_check(exc):
typ, value, traceback = sys.exc_info()
self.assert_(typ is not None)
self.assert_(value is exc)
self.assert_(traceback is not None)
sys.exc_clear()
typ, value, traceback = sys.exc_info()
self.assert_(typ is None)
self.assert_(value is None)
self.assert_(traceback is None)
def clear():
try:
raise ValueError, 42
except ValueError, exc:
clear_check(exc)
# Raise an exception and check that it can be cleared
clear()
# Verify that a frame currently handling an exception is
# unaffected by calling exc_clear in a nested frame.
try:
raise ValueError, 13
except ValueError, exc:
typ1, value1, traceback1 = sys.exc_info()
clear()
typ2, value2, traceback2 = sys.exc_info()
self.assert_(typ1 is typ2)
self.assert_(value1 is exc)
self.assert_(value1 is value2)
self.assert_(traceback1 is traceback2)
# Check that an exception can be cleared outside of an except block
clear_check(exc)
def test_exit(self):
self.assertRaises(TypeError, sys.exit, 42, 42)
# call without argument
try:
sys.exit(0)
except SystemExit, exc:
self.assertEquals(exc.code, 0)
except:
self.fail("wrong exception")
else:
self.fail("no exception")
# call with tuple argument with one entry
# entry will be unpacked
try:
sys.exit(42)
except SystemExit, exc:
self.assertEquals(exc.code, 42)
except:
self.fail("wrong exception")
else:
self.fail("no exception")
# call with integer argument
try:
sys.exit((42,))
except SystemExit, exc:
self.assertEquals(exc.code, 42)
except:
self.fail("wrong exception")
else:
self.fail("no exception")
# call with string argument
try:
sys.exit("exit")
except SystemExit, exc:
self.assertEquals(exc.code, "exit")
except:
self.fail("wrong exception")
else:
self.fail("no exception")
# call with tuple argument with two entries
try:
sys.exit((17, 23))
except SystemExit, exc:
self.assertEquals(exc.code, (17, 23))
except:
self.fail("wrong exception")
else:
self.fail("no exception")
def test_getdefaultencoding(self):
if test.test_support.have_unicode:
self.assertRaises(TypeError, sys.getdefaultencoding, 42)
# can't check more than the type, as the user might have changed it
self.assert_(isinstance(sys.getdefaultencoding(), str))
# testing sys.settrace() is done in test_trace.py
# testing sys.setprofile() is done in test_profile.py
def test_setcheckinterval(self):
self.assertRaises(TypeError, sys.setcheckinterval)
orig = sys.getcheckinterval()
for n in 0, 100, 120, orig: # orig last to restore starting state
sys.setcheckinterval(n)
self.assertEquals(sys.getcheckinterval(), n)
def test_recursionlimit(self):
self.assertRaises(TypeError, sys.getrecursionlimit, 42)
oldlimit = sys.getrecursionlimit()
self.assertRaises(TypeError, sys.setrecursionlimit)
self.assertRaises(ValueError, sys.setrecursionlimit, -42)
sys.setrecursionlimit(10000)
self.assertEqual(sys.getrecursionlimit(), 10000)
sys.setrecursionlimit(oldlimit)
def test_getwindowsversion(self):
if hasattr(sys, "getwindowsversion"):
v = sys.getwindowsversion()
self.assert_(isinstance(v, tuple))
self.assertEqual(len(v), 5)
self.assert_(isinstance(v[0], int))
self.assert_(isinstance(v[1], int))
self.assert_(isinstance(v[2], int))
self.assert_(isinstance(v[3], int))
self.assert_(isinstance(v[4], str))
def test_dlopenflags(self):
if hasattr(sys, "setdlopenflags"):
self.assert_(hasattr(sys, "getdlopenflags"))
self.assertRaises(TypeError, sys.getdlopenflags, 42)
oldflags = sys.getdlopenflags()
self.assertRaises(TypeError, sys.setdlopenflags)
sys.setdlopenflags(oldflags+1)
self.assertEqual(sys.getdlopenflags(), oldflags+1)
sys.setdlopenflags(oldflags)
def test_refcount(self):
self.assertRaises(TypeError, sys.getrefcount)
c = sys.getrefcount(None)
n = None
self.assertEqual(sys.getrefcount(None), c+1)
del n
self.assertEqual(sys.getrefcount(None), c)
if hasattr(sys, "gettotalrefcount"):
self.assert_(isinstance(sys.gettotalrefcount(), int))
def test_getframe(self):
self.assertRaises(TypeError, sys._getframe, 42, 42)
self.assertRaises(ValueError, sys._getframe, 2000000000)
self.assert_(
SysModuleTest.test_getframe.im_func.func_code \
is sys._getframe().f_code
)
def test_attributes(self):
self.assert_(isinstance(sys.api_version, int))
self.assert_(isinstance(sys.argv, list))
self.assert_(sys.byteorder in ("little", "big"))
self.assert_(isinstance(sys.builtin_module_names, tuple))
self.assert_(isinstance(sys.copyright, basestring))
self.assert_(isinstance(sys.exec_prefix, basestring))
self.assert_(isinstance(sys.executable, basestring))
self.assert_(isinstance(sys.hexversion, int))
self.assert_(isinstance(sys.maxint, int))
self.assert_(isinstance(sys.maxunicode, int))
self.assert_(isinstance(sys.platform, basestring))
self.assert_(isinstance(sys.prefix, basestring))
self.assert_(isinstance(sys.version, basestring))
vi = sys.version_info
self.assert_(isinstance(vi, tuple))
self.assertEqual(len(vi), 5)
self.assert_(isinstance(vi[0], int))
self.assert_(isinstance(vi[1], int))
self.assert_(isinstance(vi[2], int))
self.assert_(vi[3] in ("alpha", "beta", "candidate", "final"))
self.assert_(isinstance(vi[4], int))
def test_main():
test.test_support.run_unittest(SysModuleTest)
if __name__ == "__main__":
test_main()
| gpl-2.0 |
servo/servo | tests/wpt/web-platform-tests/webdriver/tests/support/fixtures.py | 3 | 11504 | import copy
import json
import os
import asyncio
import pytest
import webdriver
from six import string_types
from six.moves.urllib.parse import urlunsplit
from tests.support import defaults
from tests.support.helpers import cleanup_session, deep_update
from tests.support.inline import build_inline
from tests.support.http_request import HTTPRequest
from tests.support.sync import Poll
_current_session = None
_custom_session = False
def pytest_configure(config):
# register the capabilities marker
config.addinivalue_line("markers",
"capabilities: mark test to use capabilities")
@pytest.fixture
def capabilities():
"""Default capabilities to use for a new WebDriver session."""
return {}
def pytest_generate_tests(metafunc):
if "capabilities" in metafunc.fixturenames:
marker = metafunc.definition.get_closest_marker(name="capabilities")
if marker:
metafunc.parametrize("capabilities", marker.args, ids=None)
# Ensure that the event loop is restarted once per session rather than the default of once per test
# if we don't do this, tests will try to reuse a closed event loop and fail with an error that the "future
# belongs to a different loop"
@pytest.fixture(scope="session")
def event_loop():
"""Change event_loop fixture to session level."""
loop = asyncio.get_event_loop_policy().new_event_loop()
yield loop
loop.close()
@pytest.fixture
def add_event_listeners(session):
"""Register listeners for tracked events on element."""
def add_event_listeners(element, tracked_events):
element.session.execute_script("""
let element = arguments[0];
let trackedEvents = arguments[1];
if (!("events" in window)) {
window.events = [];
}
for (var i = 0; i < trackedEvents.length; i++) {
element.addEventListener(trackedEvents[i], function (event) {
window.events.push(event.type);
});
}
""", args=(element, tracked_events))
return add_event_listeners
@pytest.fixture
def create_cookie(session, url):
"""Create a cookie"""
def create_cookie(name, value, **kwargs):
if kwargs.get("path", None) is not None:
session.url = url(kwargs["path"])
session.set_cookie(name, value, **kwargs)
return session.cookies(name)
return create_cookie
@pytest.fixture
def create_frame(session):
"""Create an `iframe` element in the current browsing context and insert it
into the document. Return a reference to the newly-created element."""
def create_frame():
append = """
var frame = document.createElement('iframe');
document.body.appendChild(frame);
return frame;
"""
return session.execute_script(append)
return create_frame
@pytest.fixture
def http(configuration):
return HTTPRequest(configuration["host"], configuration["port"])
@pytest.fixture
def server_config():
return json.loads(os.environ.get("WD_SERVER_CONFIG"))
@pytest.fixture(scope="session")
def configuration():
host = os.environ.get("WD_HOST", defaults.DRIVER_HOST)
port = int(os.environ.get("WD_PORT", str(defaults.DRIVER_PORT)))
capabilities = json.loads(os.environ.get("WD_CAPABILITIES", "{}"))
return {
"host": host,
"port": port,
"capabilities": capabilities
}
async def reset_current_session_if_necessary(caps, request_bidi):
global _current_session
# If there is a session with different capabilities active or the current session
# is of different type than the one we would like to create, end it now.
if _current_session is not None:
is_bidi = isinstance(_current_session, webdriver.BidiSession)
if is_bidi != request_bidi or not _current_session.match(caps):
if is_bidi:
await _current_session.end()
else:
_current_session.end()
_current_session = None
@pytest.fixture(scope="function")
async def session(capabilities, configuration, request):
"""Create and start a session for a test that does not itself test session creation.
By default the session will stay open after each test, but we always try to start a
new one and assume that if that fails there is already a valid session. This makes it
possible to recover from some errors that might leave the session in a bad state, but
does not demand that we start a new session per test."""
global _current_session
# Update configuration capabilities with custom ones from the
# capabilities fixture, which can be set by tests
caps = copy.deepcopy(configuration["capabilities"])
deep_update(caps, capabilities)
caps = {"alwaysMatch": caps}
await reset_current_session_if_necessary(caps, False)
if _current_session is None:
_current_session = webdriver.Session(
configuration["host"],
configuration["port"],
capabilities=caps)
try:
_current_session.start()
except webdriver.error.SessionNotCreatedException:
if not _current_session.session_id:
raise
# Enforce a fixed default window size and position
_current_session.window.size = defaults.WINDOW_SIZE
_current_session.window.position = defaults.WINDOW_POSITION
yield _current_session
cleanup_session(_current_session)
@pytest.fixture(scope="function")
async def bidi_session(capabilities, configuration, request):
"""Create and start a bidi session for a test that does not itself test
bidi session creation.
By default the session will stay open after each test, but we always try to start a
new one and assume that if that fails there is already a valid session. This makes it
possible to recover from some errors that might leave the session in a bad state, but
does not demand that we start a new session per test."""
global _current_session
# Update configuration capabilities with custom ones from the
# capabilities fixture, which can be set by tests
caps = copy.deepcopy(configuration["capabilities"])
deep_update(caps, capabilities)
caps = {"alwaysMatch": caps}
await reset_current_session_if_necessary(caps, True)
if _current_session is None:
_current_session = webdriver.BidiSession(
configuration["host"],
configuration["port"],
capabilities=caps)
try:
await _current_session.start()
except webdriver.error.SessionNotCreatedException:
if not _current_session.session_id:
raise
# Enforce a fixed default window size and position
_current_session.window.size = defaults.WINDOW_SIZE
_current_session.window.position = defaults.WINDOW_POSITION
yield _current_session
cleanup_session(_current_session)
@pytest.fixture(scope="function")
def current_session():
return _current_session
@pytest.fixture
def url(server_config):
def url(path, protocol="http", domain="", subdomain="", query="", fragment=""):
domain = server_config["domains"][domain][subdomain]
port = server_config["ports"][protocol][0]
host = "{0}:{1}".format(domain, port)
return urlunsplit((protocol, host, path, query, fragment))
return url
@pytest.fixture
def create_dialog(session):
"""Create a dialog (one of "alert", "prompt", or "confirm") and provide a
function to validate that the dialog has been "handled" (either accepted or
dismissed) by returning some value."""
def create_dialog(dialog_type, text=None):
assert dialog_type in ("alert", "confirm", "prompt"), (
"Invalid dialog type: '%s'" % dialog_type)
if text is None:
text = ""
assert isinstance(text, string_types), "`text` parameter must be a string"
# Script completes itself when the user prompt has been opened.
# For prompt() dialogs, add a value for the 'default' argument,
# as some user agents (IE, for example) do not produce consistent
# values for the default.
session.execute_async_script("""
let dialog_type = arguments[0];
let text = arguments[1];
setTimeout(function() {
if (dialog_type == 'prompt') {
window.dialog_return_value = window[dialog_type](text, '');
} else {
window.dialog_return_value = window[dialog_type](text);
}
}, 0);
""", args=(dialog_type, text))
wait = Poll(
session,
timeout=15,
ignored_exceptions=webdriver.NoSuchAlertException,
message="No user prompt with text '{}' detected".format(text))
wait.until(lambda s: s.alert.text == text)
return create_dialog
@pytest.fixture
def closed_frame(session, url):
original_handle = session.window_handle
new_handle = session.new_window()
session.window_handle = new_handle
session.url = url("/webdriver/tests/support/html/frames.html")
subframe = session.find.css("#sub-frame", all=False)
session.switch_frame(subframe)
deleteframe = session.find.css("#delete-frame", all=False)
session.switch_frame(deleteframe)
button = session.find.css("#remove-parent", all=False)
button.click()
yield
session.window.close()
assert new_handle not in session.handles, "Unable to close window {}".format(new_handle)
session.window_handle = original_handle
@pytest.fixture
def closed_window(session, inline):
original_handle = session.window_handle
new_handle = session.new_window()
session.window_handle = new_handle
session.url = inline("<input id='a' value='b'>")
element = session.find.css("input", all=False)
session.window.close()
assert new_handle not in session.handles, "Unable to close window {}".format(new_handle)
yield (original_handle, element)
session.window_handle = original_handle
@pytest.fixture
def inline(url):
"""Takes a source extract and produces well-formed documents.
Based on the desired document type, the extract is embedded with
predefined boilerplate in order to produce well-formed documents.
The media type and character set may also be individually configured.
This helper function originally used data URLs, but since these
are not universally supported (or indeed standardised!) across
browsers, it now delegates the serving of the document to wptserve.
This file also acts as a wptserve handler (see the main function
below) which configures the HTTP response using query parameters.
This function returns a URL to the wptserve handler, which in turn
will serve an HTTP response with the requested source extract
inlined in a well-formed document, and the Content-Type header
optionally configured using the desired media type and character set.
Any additional keyword arguments are passed on to the build_url
function, which comes from the url fixture.
"""
def inline(src, **kwargs):
return build_inline(url, src, **kwargs)
return inline
@pytest.fixture
def iframe(inline):
"""Inlines document extract as the source document of an <iframe>."""
def iframe(src, **kwargs):
return "<iframe src='{}'></iframe>".format(inline(src, **kwargs))
return iframe
| mpl-2.0 |
mojoboss/scikit-learn | sklearn/utils/tests/test_random.py | 230 | 7344 | from __future__ import division
import numpy as np
import scipy.sparse as sp
from scipy.misc import comb as combinations
from numpy.testing import assert_array_almost_equal
from sklearn.utils.random import sample_without_replacement
from sklearn.utils.random import random_choice_csc
from sklearn.utils.testing import (
assert_raises,
assert_equal,
assert_true)
###############################################################################
# test custom sampling without replacement algorithm
###############################################################################
def test_invalid_sample_without_replacement_algorithm():
assert_raises(ValueError, sample_without_replacement, 5, 4, "unknown")
def test_sample_without_replacement_algorithms():
methods = ("auto", "tracking_selection", "reservoir_sampling", "pool")
for m in methods:
def sample_without_replacement_method(n_population, n_samples,
random_state=None):
return sample_without_replacement(n_population, n_samples,
method=m,
random_state=random_state)
check_edge_case_of_sample_int(sample_without_replacement_method)
check_sample_int(sample_without_replacement_method)
check_sample_int_distribution(sample_without_replacement_method)
def check_edge_case_of_sample_int(sample_without_replacement):
# n_poluation < n_sample
assert_raises(ValueError, sample_without_replacement, 0, 1)
assert_raises(ValueError, sample_without_replacement, 1, 2)
# n_population == n_samples
assert_equal(sample_without_replacement(0, 0).shape, (0, ))
assert_equal(sample_without_replacement(1, 1).shape, (1, ))
# n_population >= n_samples
assert_equal(sample_without_replacement(5, 0).shape, (0, ))
assert_equal(sample_without_replacement(5, 1).shape, (1, ))
# n_population < 0 or n_samples < 0
assert_raises(ValueError, sample_without_replacement, -1, 5)
assert_raises(ValueError, sample_without_replacement, 5, -1)
def check_sample_int(sample_without_replacement):
# This test is heavily inspired from test_random.py of python-core.
#
# For the entire allowable range of 0 <= k <= N, validate that
# the sample is of the correct length and contains only unique items
n_population = 100
for n_samples in range(n_population + 1):
s = sample_without_replacement(n_population, n_samples)
assert_equal(len(s), n_samples)
unique = np.unique(s)
assert_equal(np.size(unique), n_samples)
assert_true(np.all(unique < n_population))
# test edge case n_population == n_samples == 0
assert_equal(np.size(sample_without_replacement(0, 0)), 0)
def check_sample_int_distribution(sample_without_replacement):
# This test is heavily inspired from test_random.py of python-core.
#
# For the entire allowable range of 0 <= k <= N, validate that
# sample generates all possible permutations
n_population = 10
# a large number of trials prevents false negatives without slowing normal
# case
n_trials = 10000
for n_samples in range(n_population):
# Counting the number of combinations is not as good as counting the
# the number of permutations. However, it works with sampling algorithm
# that does not provide a random permutation of the subset of integer.
n_expected = combinations(n_population, n_samples, exact=True)
output = {}
for i in range(n_trials):
output[frozenset(sample_without_replacement(n_population,
n_samples))] = None
if len(output) == n_expected:
break
else:
raise AssertionError(
"number of combinations != number of expected (%s != %s)" %
(len(output), n_expected))
def test_random_choice_csc(n_samples=10000, random_state=24):
# Explicit class probabilities
classes = [np.array([0, 1]), np.array([0, 1, 2])]
class_probabilites = [np.array([0.5, 0.5]), np.array([0.6, 0.1, 0.3])]
got = random_choice_csc(n_samples, classes, class_probabilites,
random_state)
assert_true(sp.issparse(got))
for k in range(len(classes)):
p = np.bincount(got.getcol(k).toarray().ravel()) / float(n_samples)
assert_array_almost_equal(class_probabilites[k], p, decimal=1)
# Implicit class probabilities
classes = [[0, 1], [1, 2]] # test for array-like support
class_probabilites = [np.array([0.5, 0.5]), np.array([0, 1/2, 1/2])]
got = random_choice_csc(n_samples=n_samples,
classes=classes,
random_state=random_state)
assert_true(sp.issparse(got))
for k in range(len(classes)):
p = np.bincount(got.getcol(k).toarray().ravel()) / float(n_samples)
assert_array_almost_equal(class_probabilites[k], p, decimal=1)
# Edge case proabilites 1.0 and 0.0
classes = [np.array([0, 1]), np.array([0, 1, 2])]
class_probabilites = [np.array([1.0, 0.0]), np.array([0.0, 1.0, 0.0])]
got = random_choice_csc(n_samples, classes, class_probabilites,
random_state)
assert_true(sp.issparse(got))
for k in range(len(classes)):
p = np.bincount(got.getcol(k).toarray().ravel(),
minlength=len(class_probabilites[k])) / n_samples
assert_array_almost_equal(class_probabilites[k], p, decimal=1)
# One class target data
classes = [[1], [0]] # test for array-like support
class_probabilites = [np.array([0.0, 1.0]), np.array([1.0])]
got = random_choice_csc(n_samples=n_samples,
classes=classes,
random_state=random_state)
assert_true(sp.issparse(got))
for k in range(len(classes)):
p = np.bincount(got.getcol(k).toarray().ravel()) / n_samples
assert_array_almost_equal(class_probabilites[k], p, decimal=1)
def test_random_choice_csc_errors():
# the length of an array in classes and class_probabilites is mismatched
classes = [np.array([0, 1]), np.array([0, 1, 2, 3])]
class_probabilites = [np.array([0.5, 0.5]), np.array([0.6, 0.1, 0.3])]
assert_raises(ValueError, random_choice_csc, 4, classes,
class_probabilites, 1)
# the class dtype is not supported
classes = [np.array(["a", "1"]), np.array(["z", "1", "2"])]
class_probabilites = [np.array([0.5, 0.5]), np.array([0.6, 0.1, 0.3])]
assert_raises(ValueError, random_choice_csc, 4, classes,
class_probabilites, 1)
# the class dtype is not supported
classes = [np.array([4.2, 0.1]), np.array([0.1, 0.2, 9.4])]
class_probabilites = [np.array([0.5, 0.5]), np.array([0.6, 0.1, 0.3])]
assert_raises(ValueError, random_choice_csc, 4, classes,
class_probabilites, 1)
# Given proabilites don't sum to 1
classes = [np.array([0, 1]), np.array([0, 1, 2])]
class_probabilites = [np.array([0.5, 0.6]), np.array([0.6, 0.1, 0.3])]
assert_raises(ValueError, random_choice_csc, 4, classes,
class_probabilites, 1)
| bsd-3-clause |
tarzan0820/addons-yelizariev | crm_phonecall_notification/models.py | 17 | 2078 | # -*- coding: utf-8 -*-
from openerp.osv import osv,fields
from openerp import SUPERUSER_ID
class crm_phonecall(osv.Model):
_inherit = 'crm.phonecall'
_track = {
'partner_id': {
'crm_phonecall_notification.mt_phonecall_new': lambda self, cr, uid, obj, ctx=None: obj.partner_id
}
}
_columns = {
'name': fields.char('Call Summary', required=True, track_visibility='onchange'),
'partner_id': fields.many2one('res.partner', 'Contact', track_visibility='onchange'),
}
def _add_followers(self, cr, uid, vals, context):
vals = vals or {}
vals['message_follower_ids'] = vals.get('message_follower_ids') or []
partner_ids = []
if vals.get('partner_id'):
#partner_ids.append(vals.get('partner_id'))
r = self.pool['res.partner'].browse(cr, uid, vals.get('partner_id'), context=context)
if r.user_id and r.user_id.partner_id:
partner_ids.append(r.user_id.partner_id.id)
if vals.get('user_id'):
r = self.pool['res.users'].browse(cr, uid, vals.get('user_id'), context=context)
partner_ids.append(r.partner_id.id)
for partner_id in partner_ids:
vals['message_follower_ids'].append( (4,partner_id) )
return vals
def create(self, cr, uid, vals, context=None):
vals = self._add_followers(cr, uid, vals, context)
ctx = context.copy()
# fix bug:
# ValueError: Wrong value for mail.mail.state: 'done'
state = ctx.get('default_state')
if state and not vals.get('state'):
vals['state'] = state
del ctx['default_state']
return super(crm_phonecall, self).create(cr, uid, vals, context=ctx)
def write(self, cr, uid, ids, vals, context=None):
vals = self._add_followers(cr, uid, vals, context)
ctx = context.copy()
try:
del ctx['default_state']
except:
pass
return super(crm_phonecall, self).write(cr, uid, ids, vals, context=ctx)
| lgpl-3.0 |
KSchopmeyer/smipyping | tools/click_help_capture.py | 1 | 8399 | #!/usr/bin/env python
"""
This tool can capture the help outputs from a click application and output
the result either as text or in restructured text format.
It executes the click script --help and recursively scrapes all of the
subcommands from the output, generating an output that is the help text for
every groupt/subcommand in the script.
All output is to stdout.
It will generate either restructured text or pure text output depending on
the variable USE_RST.
the rst output set a section name for each help subject.
This should be usable with different click generated apps by simply changing
the variable SCRIPT_NAME to the name of the target scripere t.
There are no execution inputs since the primary use is to generate information
for review and documentation in a fixed environmet. The two variables:
SCRIPT_NAME - Name of the click script that will be executed to generate
help information
USE_RST - Boolean. If true, generates .rst output. Otherwise it generates
pure formatted text.
"""
from __future__ import print_function, absolute_import
import sys
import subprocess
try:
import textwrap
textwrap.indent # pylint: disable=pointless-statement
except AttributeError: # undefined function (wasn't added until Python 3.3)
def indent(text, amount, ch=' '): # pylint: disable=invalid-name
"""Indent locally define"""
padding = amount * ch
return ''.join(padding + line for line in text.splitlines(True))
else:
def indent(text, amount, ch=' '): # pylint: disable=invalid-name
"""Wrap textwrap function"""
return textwrap.indent(text, amount * ch)
import six
# Flag that allows displaying the data as pure text rather than markdown
# format
USE_RST = True
SCRIPT_NAME = 'smicli'
# SCRIPT_CMD = SCRIPT_NAME # TODO #103: Reactivate once smicli works on Windows
SCRIPT_CMD = 'python -c "import sys; from smicli import cli; ' \
'sys.argv[0]=\'smicli\'; sys.exit(cli())"'
ERRORS = 0
VERBOSE = False
def rst_headline(title, level):
"""
Format a markdown header line based on the level argument. The rst format
for headings is generally of the form
====================
title for this thing
====================
"""
level_char_list = ['#', '*', '=', '-', '^', '"']
try:
level_char = level_char_list[level]
except IndexError:
level_char = '='
# output anchor in form .. _`smicli subcommands`:
anchor = '.. _`%s`:' % title
title_marker = level_char * len(title)
if level == 0:
return '\n%s\n\n%s\n%s\n%s\n' % (anchor, title_marker, title,
title_marker)
return '\n%s\n\n%s\n%s\n' % (anchor, title, title_marker)
def print_rst_verbatum_text(text_str):
"""
Print the text on input proceeded by the rst literal block indicator
"""
print('::\n')
# indent text for rst. rst requires that block monospace test be
# indented and preceeded by line with just '::' and followed by
# empty line. Indent all lines with text
lines = text_str.split('\n')
new_lines = []
for line in lines:
if line:
new_lines.append(indent(line, 4))
else:
new_lines.append(line)
print('%s' % '\n'.join(new_lines))
HELP_DICT = {}
def cmd_exists(cmd):
"""
Determine if the command defined by cmd can be executed in a shell.
Returns a tuple (rc, msg), where rc==0 indicates that the command can be
executed, and otherwise rc is the command (or shell) return code and
msg is an error message.
"""
if True: # TODO #103: Debug PATH for pywbemcli not found issue on Windows
if sys.platform == 'win32':
echo_cmd = 'echo %PATH%'
else:
echo_cmd = 'echo $PATH'
proc = subprocess.Popen(echo_cmd, shell=True, stdout=subprocess.PIPE,
stderr=subprocess.STDOUT)
out, _ = proc.communicate()
print("Debug: %s: %s" % (echo_cmd, out), file=sys.stderr)
proc = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE,
stderr=subprocess.STDOUT)
out, _ = proc.communicate()
rc = proc.returncode
if rc == 0:
msg = None
else:
msg = out.strip()
return rc, msg
def get_subcmd_group_names(script_cmd, script_name, cmd):
"""
Execute the script with defined subcommand and help and get the
groups defined for that help.
returns list of subcommands/groups
"""
command = '%s %s --help' % (script_cmd, cmd)
# Disable python warnings for script call.
if sys.platform != 'win32':
command = 'export PYTHONWARNINGS="" && %s' % command
if VERBOSE:
print('command %s' % command)
proc = subprocess.Popen(command, shell=True, stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
std_out, std_err = proc.communicate()
exitcode = proc.returncode
HELP_DICT[cmd] = std_out
if six.PY3:
std_out = std_out.decode()
std_err = std_err.decode()
if exitcode:
raise RuntimeError("Error: Shell execution of command %r failed with "
"rc=%s: %s" % (command, exitcode, std_err))
if std_err:
raise RuntimeError("Error: Unexpected stderr from command %r:\n"
"%s" % (command, std_err))
# Split stdout into list of lines
lines = std_out.split('\n')
# get first word of all lines after line containing 'Commands:'
group_list = []
group_state = False
for line in lines:
if group_state and line:
# split line into list of words and get first word as subcommand
words = line.split()
group_list.append(words[0])
# test for line that matchs the word Commands
if line == 'Commands:':
group_state = True
return group_list
def get_subgroup_names(group_name, script_cmd, script_name):
"""
Get all the subcommands for the help_group_name defined on input.
Executes script and extracts groups after line with 'Commands'
"""
subcmds_list = get_subcmd_group_names(script_cmd, script_name, group_name)
space = ' ' if group_name else ''
return ['%s%s%s' % (group_name, space, name) for name in subcmds_list]
def create_help_cmd_list(script_cmd, script_name):
"""
Create the command list.
"""
# Result list of assembled help subcmds
help_groups_result = []
# start with empty group, the top level (i.e. smicli --help).
# This is list of names to process and is extended as we process
# each group.
group_names = [""]
help_groups_result.extend(group_names)
for name in group_names:
return_cmds = get_subgroup_names(name, script_cmd, script_name)
help_groups_result.extend(return_cmds)
# extend input list with returned assembled groups
group_names.extend(return_cmds)
# sort to match order of click
help_groups_result.sort()
if USE_RST:
print(rst_headline("%s Help Command Details" % script_name, 2))
print('\nThis section defines the help output for each %s '
'command group and subcommand.\n' % script_name)
for name in help_groups_result:
command = '%s %s --help' % (script_cmd, name)
command_name = '%s %s --help' % (script_name, name)
out = HELP_DICT[name]
if USE_RST:
level = len(command_name.split())
# Don't put the top level in a separate section
if name:
print(rst_headline(command_name, level))
print('\n%s\n' % '\nThe following defines the help output for the '
'`%s` subcommand\n' % command_name)
print_rst_verbatum_text(out.decode())
else:
print('%s\n%s COMMAND: %s' % (('=' * 50), script_name, command_name))
print(out.decode())
return help_groups_result
if __name__ == '__main__':
# Verify that the script exists. Executing with --help loads click
# script generates help and exits.
check_cmd = '%s --help' % SCRIPT_CMD
rc, msg = cmd_exists(check_cmd)
if rc != 0:
print("Error: Shell execution of %r returns rc=%s: %s" %
(check_cmd, rc, msg), file=sys.stderr)
sys.exit(1)
create_help_cmd_list(SCRIPT_CMD, SCRIPT_NAME)
| mit |
dga4654dan/UTM-Demo | V_1_0_2_1/UtmDemo_Sfs_2.9.0/UtmDemo_Sfs_2.9.0_Server/lib/Lib/test/test_userdict.py | 10 | 2034 | # Check every path through every method of UserDict
from test_support import verify, verbose
from UserDict import UserDict, IterableUserDict
d0 = {}
d1 = {"one": 1}
d2 = {"one": 1, "two": 2}
# Test constructors
u = UserDict()
u0 = UserDict(d0)
u1 = UserDict(d1)
u2 = IterableUserDict(d2)
uu = UserDict(u)
uu0 = UserDict(u0)
uu1 = UserDict(u1)
uu2 = UserDict(u2)
# Test __repr__
verify(str(u0) == str(d0))
verify(repr(u1) == repr(d1))
verify(`u2` == `d2`)
# Test __cmp__ and __len__
all = [d0, d1, d2, u, u0, u1, u2, uu, uu0, uu1, uu2]
for a in all:
for b in all:
verify(cmp(a, b) == cmp(len(a), len(b)))
# Test __getitem__
verify(u2["one"] == 1)
try:
u1["two"]
except KeyError:
pass
else:
verify(0, "u1['two'] shouldn't exist")
# Test __setitem__
u3 = UserDict(u2)
u3["two"] = 2
u3["three"] = 3
# Test __delitem__
del u3["three"]
try:
del u3["three"]
except KeyError:
pass
else:
verify(0, "u3['three'] shouldn't exist")
# Test clear
u3.clear()
verify(u3 == {})
# Test copy()
u2a = u2.copy()
verify(u2a == u2)
class MyUserDict(UserDict):
def display(self): print self
m2 = MyUserDict(u2)
m2a = m2.copy()
verify(m2a == m2)
# SF bug #476616 -- copy() of UserDict subclass shared data
m2['foo'] = 'bar'
verify(m2a != m2)
# Test keys, items, values
verify(u2.keys() == d2.keys())
verify(u2.items() == d2.items())
verify(u2.values() == d2.values())
# Test has_key and "in".
for i in u2.keys():
verify(u2.has_key(i) == 1)
verify((i in u2) == 1)
verify(u1.has_key(i) == d1.has_key(i))
verify((i in u1) == (i in d1))
verify(u0.has_key(i) == d0.has_key(i))
verify((i in u0) == (i in d0))
# Test update
t = UserDict()
t.update(u2)
verify(t == u2)
# Test get
for i in u2.keys():
verify(u2.get(i) == u2[i])
verify(u1.get(i) == d1.get(i))
verify(u0.get(i) == d0.get(i))
# Test "in" iteration.
for i in xrange(20):
u2[i] = str(i)
ikeys = []
for k in u2:
ikeys.append(k)
ikeys.sort()
keys = u2.keys()
keys.sort()
verify(ikeys == keys)
| gpl-2.0 |
motion2015/edx-platform | cms/djangoapps/contentstore/tests/test_crud.py | 74 | 12414 | import unittest
from opaque_keys.edx.locator import LocalId
from xmodule import templates
from xmodule.modulestore import ModuleStoreEnum
from xmodule.modulestore.tests import persistent_factories
from xmodule.course_module import CourseDescriptor
from xmodule.modulestore.django import modulestore, clear_existing_modulestores
from xmodule.seq_module import SequenceDescriptor
from xmodule.capa_module import CapaDescriptor
from xmodule.contentstore.django import _CONTENTSTORE
from xmodule.modulestore.exceptions import ItemNotFoundError, DuplicateCourseError
from xmodule.html_module import HtmlDescriptor
class TemplateTests(unittest.TestCase):
"""
Test finding and using the templates (boilerplates) for xblocks.
"""
def setUp(self):
super(TemplateTests, self).setUp()
clear_existing_modulestores() # redundant w/ cleanup but someone was getting errors
self.addCleanup(self._drop_mongo_collections)
self.addCleanup(clear_existing_modulestores)
self.split_store = modulestore()._get_modulestore_by_type(ModuleStoreEnum.Type.split)
@staticmethod
def _drop_mongo_collections():
"""
If using a Mongo-backed modulestore & contentstore, drop the collections.
"""
module_store = modulestore()
if hasattr(module_store, '_drop_database'):
module_store._drop_database() # pylint: disable=protected-access
_CONTENTSTORE.clear()
if hasattr(module_store, 'close_connections'):
module_store.close_connections()
def test_get_templates(self):
found = templates.all_templates()
self.assertIsNotNone(found.get('course'))
self.assertIsNotNone(found.get('about'))
self.assertIsNotNone(found.get('html'))
self.assertIsNotNone(found.get('problem'))
self.assertEqual(len(found.get('course')), 0)
self.assertEqual(len(found.get('about')), 1)
self.assertGreaterEqual(len(found.get('html')), 2)
self.assertGreaterEqual(len(found.get('problem')), 10)
dropdown = None
for template in found['problem']:
self.assertIn('metadata', template)
self.assertIn('display_name', template['metadata'])
if template['metadata']['display_name'] == 'Dropdown':
dropdown = template
break
self.assertIsNotNone(dropdown)
self.assertIn('markdown', dropdown['metadata'])
self.assertIn('data', dropdown)
self.assertRegexpMatches(dropdown['metadata']['markdown'], r'^Dropdown.*')
self.assertRegexpMatches(dropdown['data'], r'<problem>\s*<p>Dropdown.*')
def test_get_some_templates(self):
self.assertEqual(len(SequenceDescriptor.templates()), 0)
self.assertGreater(len(HtmlDescriptor.templates()), 0)
self.assertIsNone(SequenceDescriptor.get_template('doesntexist.yaml'))
self.assertIsNone(HtmlDescriptor.get_template('doesntexist.yaml'))
self.assertIsNotNone(HtmlDescriptor.get_template('announcement.yaml'))
def test_factories(self):
test_course = persistent_factories.PersistentCourseFactory.create(
course='course', run='2014', org='testx',
display_name='fun test course', user_id='testbot'
)
self.assertIsInstance(test_course, CourseDescriptor)
self.assertEqual(test_course.display_name, 'fun test course')
index_info = self.split_store.get_course_index_info(test_course.id)
self.assertEqual(index_info['org'], 'testx')
self.assertEqual(index_info['course'], 'course')
self.assertEqual(index_info['run'], '2014')
test_chapter = persistent_factories.ItemFactory.create(
display_name='chapter 1',
parent_location=test_course.location
)
self.assertIsInstance(test_chapter, SequenceDescriptor)
# refetch parent which should now point to child
test_course = self.split_store.get_course(test_course.id.version_agnostic())
self.assertIn(test_chapter.location, test_course.children)
with self.assertRaises(DuplicateCourseError):
persistent_factories.PersistentCourseFactory.create(
course='course', run='2014', org='testx',
display_name='fun test course', user_id='testbot'
)
def test_temporary_xblocks(self):
"""
Test create_xblock to create non persisted xblocks
"""
test_course = persistent_factories.PersistentCourseFactory.create(
course='course', run='2014', org='testx',
display_name='fun test course', user_id='testbot'
)
test_chapter = self.split_store.create_xblock(
test_course.system, test_course.id, 'chapter', fields={'display_name': 'chapter n'},
parent_xblock=test_course
)
self.assertIsInstance(test_chapter, SequenceDescriptor)
self.assertEqual(test_chapter.display_name, 'chapter n')
self.assertIn(test_chapter, test_course.get_children())
# test w/ a definition (e.g., a problem)
test_def_content = '<problem>boo</problem>'
test_problem = self.split_store.create_xblock(
test_course.system, test_course.id, 'problem', fields={'data': test_def_content},
parent_xblock=test_chapter
)
self.assertIsInstance(test_problem, CapaDescriptor)
self.assertEqual(test_problem.data, test_def_content)
self.assertIn(test_problem, test_chapter.get_children())
test_problem.display_name = 'test problem'
self.assertEqual(test_problem.display_name, 'test problem')
def test_persist_dag(self):
"""
try saving temporary xblocks
"""
test_course = persistent_factories.PersistentCourseFactory.create(
course='course', run='2014', org='testx',
display_name='fun test course', user_id='testbot'
)
test_chapter = self.split_store.create_xblock(
test_course.system, test_course.id, 'chapter', fields={'display_name': 'chapter n'},
parent_xblock=test_course
)
self.assertEqual(test_chapter.display_name, 'chapter n')
test_def_content = '<problem>boo</problem>'
# create child
new_block = self.split_store.create_xblock(
test_course.system, test_course.id,
'problem',
fields={
'data': test_def_content,
'display_name': 'problem'
},
parent_xblock=test_chapter
)
self.assertIsNotNone(new_block.definition_locator)
self.assertTrue(isinstance(new_block.definition_locator.definition_id, LocalId))
# better to pass in persisted parent over the subdag so
# subdag gets the parent pointer (otherwise 2 ops, persist dag, update parent children,
# persist parent
persisted_course = self.split_store.persist_xblock_dag(test_course, 'testbot')
self.assertEqual(len(persisted_course.children), 1)
persisted_chapter = persisted_course.get_children()[0]
self.assertEqual(persisted_chapter.category, 'chapter')
self.assertEqual(persisted_chapter.display_name, 'chapter n')
self.assertEqual(len(persisted_chapter.children), 1)
persisted_problem = persisted_chapter.get_children()[0]
self.assertEqual(persisted_problem.category, 'problem')
self.assertEqual(persisted_problem.data, test_def_content)
# update it
persisted_problem.display_name = 'altered problem'
persisted_problem = self.split_store.persist_xblock_dag(persisted_problem, 'testbot')
self.assertEqual(persisted_problem.display_name, 'altered problem')
def test_delete_course(self):
test_course = persistent_factories.PersistentCourseFactory.create(
course='history', run='doomed', org='edu.harvard',
display_name='doomed test course',
user_id='testbot')
persistent_factories.ItemFactory.create(
display_name='chapter 1',
parent_location=test_course.location
)
id_locator = test_course.id.for_branch(ModuleStoreEnum.BranchName.draft)
guid_locator = test_course.location.course_agnostic()
# verify it can be retrieved by id
self.assertIsInstance(self.split_store.get_course(id_locator), CourseDescriptor)
# and by guid -- TODO reenable when split_draft supports getting specific versions
# self.assertIsInstance(self.split_store.get_item(guid_locator), CourseDescriptor)
self.split_store.delete_course(id_locator, 'testbot')
# test can no longer retrieve by id
self.assertRaises(ItemNotFoundError, self.split_store.get_course, id_locator)
# but can by guid -- same TODO as above
# self.assertIsInstance(self.split_store.get_item(guid_locator), CourseDescriptor)
def test_block_generations(self):
"""
Test get_block_generations
"""
test_course = persistent_factories.PersistentCourseFactory.create(
course='history', run='hist101', org='edu.harvard',
display_name='history test course',
user_id='testbot'
)
chapter = persistent_factories.ItemFactory.create(
display_name='chapter 1',
parent_location=test_course.location,
user_id='testbot'
)
sub = persistent_factories.ItemFactory.create(
display_name='subsection 1',
parent_location=chapter.location,
user_id='testbot',
category='vertical'
)
first_problem = persistent_factories.ItemFactory.create(
display_name='problem 1', parent_location=sub.location, user_id='testbot', category='problem',
data="<problem></problem>"
)
first_problem.max_attempts = 3
first_problem.save() # decache the above into the kvs
updated_problem = self.split_store.update_item(first_problem, 'testbot')
self.assertIsNotNone(updated_problem.previous_version)
self.assertEqual(updated_problem.previous_version, first_problem.update_version)
self.assertNotEqual(updated_problem.update_version, first_problem.update_version)
self.split_store.delete_item(updated_problem.location, 'testbot')
second_problem = persistent_factories.ItemFactory.create(
display_name='problem 2',
parent_location=sub.location.version_agnostic(),
user_id='testbot', category='problem',
data="<problem></problem>"
)
# The draft course root has 2 revisions: the published revision, and then the subsequent
# changes to the draft revision
version_history = self.split_store.get_block_generations(test_course.location)
self.assertIsNotNone(version_history)
self.assertEqual(version_history.locator.version_guid, test_course.location.version_guid)
self.assertEqual(len(version_history.children), 1)
self.assertEqual(version_history.children[0].children, [])
self.assertEqual(version_history.children[0].locator.version_guid, chapter.location.version_guid)
# sub changed on add, add problem, delete problem, add problem in strict linear seq
version_history = self.split_store.get_block_generations(sub.location)
self.assertEqual(len(version_history.children), 1)
self.assertEqual(len(version_history.children[0].children), 1)
self.assertEqual(len(version_history.children[0].children[0].children), 1)
self.assertEqual(len(version_history.children[0].children[0].children[0].children), 0)
# first and second problem may show as same usage_id; so, need to ensure their histories are right
version_history = self.split_store.get_block_generations(updated_problem.location)
self.assertEqual(version_history.locator.version_guid, first_problem.location.version_guid)
self.assertEqual(len(version_history.children), 1) # updated max_attempts
self.assertEqual(len(version_history.children[0].children), 0)
version_history = self.split_store.get_block_generations(second_problem.location)
self.assertNotEqual(version_history.locator.version_guid, first_problem.location.version_guid)
| agpl-3.0 |
aroche/django | django/core/management/commands/flush.py | 181 | 3879 | from __future__ import unicode_literals
import sys
from importlib import import_module
from django.apps import apps
from django.core.management.base import BaseCommand, CommandError
from django.core.management.color import no_style
from django.core.management.sql import emit_post_migrate_signal, sql_flush
from django.db import DEFAULT_DB_ALIAS, connections, transaction
from django.utils import six
from django.utils.six.moves import input
class Command(BaseCommand):
help = ('Removes ALL DATA from the database, including data added during '
'migrations. Does not achieve a "fresh install" state.')
def add_arguments(self, parser):
parser.add_argument('--noinput', action='store_false', dest='interactive', default=True,
help='Tells Django to NOT prompt the user for input of any kind.')
parser.add_argument('--database', action='store', dest='database',
default=DEFAULT_DB_ALIAS,
help='Nominates a database to flush. Defaults to the "default" database.')
def handle(self, **options):
database = options.get('database')
connection = connections[database]
verbosity = options.get('verbosity')
interactive = options.get('interactive')
# The following are stealth options used by Django's internals.
reset_sequences = options.get('reset_sequences', True)
allow_cascade = options.get('allow_cascade', False)
inhibit_post_migrate = options.get('inhibit_post_migrate', False)
self.style = no_style()
# Import the 'management' module within each installed app, to register
# dispatcher events.
for app_config in apps.get_app_configs():
try:
import_module('.management', app_config.name)
except ImportError:
pass
sql_list = sql_flush(self.style, connection, only_django=True,
reset_sequences=reset_sequences,
allow_cascade=allow_cascade)
if interactive:
confirm = input("""You have requested a flush of the database.
This will IRREVERSIBLY DESTROY all data currently in the %r database,
and return each table to an empty state.
Are you sure you want to do this?
Type 'yes' to continue, or 'no' to cancel: """ % connection.settings_dict['NAME'])
else:
confirm = 'yes'
if confirm == 'yes':
try:
with transaction.atomic(using=database,
savepoint=connection.features.can_rollback_ddl):
with connection.cursor() as cursor:
for sql in sql_list:
cursor.execute(sql)
except Exception as e:
new_msg = (
"Database %s couldn't be flushed. Possible reasons:\n"
" * The database isn't running or isn't configured correctly.\n"
" * At least one of the expected database tables doesn't exist.\n"
" * The SQL was invalid.\n"
"Hint: Look at the output of 'django-admin sqlflush'. "
"That's the SQL this command wasn't able to run.\n"
"The full error: %s") % (connection.settings_dict['NAME'], e)
six.reraise(CommandError, CommandError(new_msg), sys.exc_info()[2])
# Empty sql_list may signify an empty database and post_migrate would then crash
if sql_list and not inhibit_post_migrate:
# Emit the post migrate signal. This allows individual applications to
# respond as if the database had been migrated from scratch.
emit_post_migrate_signal(verbosity, interactive, database)
else:
self.stdout.write("Flush cancelled.\n")
| bsd-3-clause |
itsjeyd/edx-platform | lms/djangoapps/grades/context.py | 7 | 2823 | """
Grading Context
"""
from collections import defaultdict
from openedx.core.djangoapps.content.block_structure.api import get_course_in_cache
from .scores import possibly_scored
def grading_context_for_course(course):
"""
Same as grading_context, but takes in a course object.
"""
course_structure = get_course_in_cache(course.id)
return grading_context(course_structure)
def grading_context(course_structure):
"""
This returns a dictionary with keys necessary for quickly grading
a student. They are used by grades.grade()
The grading context has two keys:
graded_sections - This contains the sections that are graded, as
well as all possible children modules that can affect the
grading. This allows some sections to be skipped if the student
hasn't seen any part of it.
The format is a dictionary keyed by section-type. The values are
arrays of dictionaries containing
"section_block" : The section block
"scored_descendant_keys" : An array of usage keys for blocks
could possibly be in the section, for any student
all_graded_blocks - This contains a list of all blocks that can
affect grading a student. This is used to efficiently fetch
all the xmodule state for a FieldDataCache without walking
the descriptor tree again.
"""
all_graded_blocks = []
all_graded_sections = defaultdict(list)
for chapter_key in course_structure.get_children(course_structure.root_block_usage_key):
for section_key in course_structure.get_children(chapter_key):
section = course_structure[section_key]
scored_descendants_of_section = [section]
if section.graded:
for descendant_key in course_structure.post_order_traversal(
filter_func=possibly_scored,
start_node=section_key,
):
scored_descendants_of_section.append(
course_structure[descendant_key],
)
# include only those blocks that have scores, not if they are just a parent
section_info = {
'section_block': section,
'scored_descendants': [
child for child in scored_descendants_of_section
if getattr(child, 'has_score', None)
]
}
section_format = getattr(section, 'format', '')
all_graded_sections[section_format].append(section_info)
all_graded_blocks.extend(scored_descendants_of_section)
return {
'all_graded_sections': all_graded_sections,
'all_graded_blocks': all_graded_blocks,
}
| agpl-3.0 |
Odingod/mne-python | mne/simulation/evoked.py | 7 | 3964 | # Authors: Alexandre Gramfort <alexandre.gramfort@telecom-paristech.fr>
# Daniel Strohmeier <daniel.strohmeier@tu-ilmenau.de>
# Martin Luessi <mluessi@nmr.mgh.harvard.edu>
#
# License: BSD (3-clause)
import copy
import numpy as np
from ..io.pick import pick_channels_cov
from ..forward import apply_forward
from ..utils import check_random_state, verbose, _time_mask
@verbose
def generate_evoked(fwd, stc, evoked, cov, snr=3, tmin=None, tmax=None,
iir_filter=None, random_state=None, verbose=None):
"""Generate noisy evoked data
Parameters
----------
fwd : dict
a forward solution.
stc : SourceEstimate object
The source time courses.
evoked : Evoked object
An instance of evoked used as template.
cov : Covariance object
The noise covariance
snr : float
signal to noise ratio in dB. It corresponds to
10 * log10( var(signal) / var(noise) ).
tmin : float | None
start of time interval to estimate SNR. If None first time point
is used.
tmax : float
start of time interval to estimate SNR. If None last time point
is used.
iir_filter : None | array
IIR filter coefficients (denominator) e.g. [1, -1, 0.2].
random_state : None | int | np.random.RandomState
To specify the random generator state.
verbose : bool, str, int, or None
If not None, override default verbose level (see mne.verbose).
Returns
-------
evoked : Evoked object
The simulated evoked data
"""
evoked = apply_forward(fwd, stc, evoked) # verbose
noise = generate_noise_evoked(evoked, cov, iir_filter, random_state)
evoked_noise = add_noise_evoked(evoked, noise, snr, tmin=tmin, tmax=tmax)
return evoked_noise
def generate_noise_evoked(evoked, cov, iir_filter=None, random_state=None):
"""Creates noise as a multivariate Gaussian
The spatial covariance of the noise is given from the cov matrix.
Parameters
----------
evoked : evoked object
an instance of evoked used as template
cov : Covariance object
The noise covariance
iir_filter : None | array
IIR filter coefficients (denominator)
random_state : None | int | np.random.RandomState
To specify the random generator state.
Returns
-------
noise : evoked object
an instance of evoked
"""
from scipy.signal import lfilter
noise = copy.deepcopy(evoked)
noise_cov = pick_channels_cov(cov, include=noise.info['ch_names'])
rng = check_random_state(random_state)
n_channels = np.zeros(noise.info['nchan'])
n_samples = evoked.data.shape[1]
c = np.diag(noise_cov.data) if noise_cov['diag'] else noise_cov.data
noise.data = rng.multivariate_normal(n_channels, c, n_samples).T
if iir_filter is not None:
noise.data = lfilter([1], iir_filter, noise.data, axis=-1)
return noise
def add_noise_evoked(evoked, noise, snr, tmin=None, tmax=None):
"""Adds noise to evoked object with specified SNR.
SNR is computed in the interval from tmin to tmax.
Parameters
----------
evoked : Evoked object
An instance of evoked with signal
noise : Evoked object
An instance of evoked with noise
snr : float
signal to noise ratio in dB. It corresponds to
10 * log10( var(signal) / var(noise) )
tmin : float
start time before event
tmax : float
end time after event
Returns
-------
evoked_noise : Evoked object
An instance of evoked corrupted by noise
"""
evoked = copy.deepcopy(evoked)
tmask = _time_mask(evoked.times, tmin, tmax)
tmp = 10 * np.log10(np.mean((evoked.data[:, tmask] ** 2).ravel()) /
np.mean((noise.data ** 2).ravel()))
noise.data = 10 ** ((tmp - float(snr)) / 20) * noise.data
evoked.data += noise.data
return evoked
| bsd-3-clause |
davidovich/pip | tests/lib/venv.py | 30 | 2577 | from __future__ import absolute_import
import os
import sys
import subprocess
import virtualenv as _virtualenv
from .path import Path
# On Python < 3.3 we don't have subprocess.DEVNULL
try:
DEVNULL = subprocess.DEVNULL
except AttributeError:
DEVNULL = open(os.devnull, "wb")
class VirtualEnvironment(object):
"""
An abstraction around virtual environments, currently it only uses
virtualenv but in the future it could use pyvenv.
"""
def __init__(self, location, *args, **kwargs):
self.location = Path(location)
self.pip_source_dir = kwargs.pop("pip_source_dir")
self._system_site_packages = kwargs.pop("system_site_packages", False)
home, lib, inc, bin = _virtualenv.path_locations(self.location)
# workaround for https://github.com/pypa/virtualenv/issues/306
if hasattr(sys, "pypy_version_info"):
lib = os.path.join(home, 'lib-python', sys.version[:3])
self.lib = Path(lib)
self.bin = Path(bin)
super(VirtualEnvironment, self).__init__(*args, **kwargs)
def __repr__(self):
return "<VirtualEnvironment {0}>".format(self.location)
@classmethod
def create(cls, location, clear=False, pip_source_dir=None):
obj = cls(location, pip_source_dir=pip_source_dir)
obj._create(clear=clear)
return obj
def _create(self, clear=False):
# Create the actual virtual environment
_virtualenv.create_environment(
self.location,
clear=clear,
never_download=True,
no_pip=True,
no_wheel=True,
)
# Install our development version of pip install the virtual
# environment
cmd = [self.bin.join("python"), "setup.py", "install", "--no-compile"]
p = subprocess.Popen(
cmd,
cwd=self.pip_source_dir,
stderr=subprocess.STDOUT,
stdout=DEVNULL,
)
p.communicate()
if p.returncode != 0:
raise subprocess.CalledProcessError(
p.returncode,
cmd,
output=p.stdout,
)
def clear(self):
self._create(clear=True)
@property
def system_site_packages(self):
return self._system_site_packages
@system_site_packages.setter
def system_site_packages(self, value):
marker = self.lib.join("no-global-site-packages.txt")
if value:
marker.rm()
else:
marker.touch()
self._system_site_packages = value
| mit |
institution/mpskit | charmap.py | 1 | 2324 | import sys
import os.path
import json
from record import Record
from conf import conf
from fail import fail,warning,printf
# NOTE: charmap will not be applied to "[]" characters and everything inside them
# NOTE: charmap: keep lower and upper case characters separated by 32
# NOTE: charmap: keep digits at their default positions
# NOTE: charmap: keep 010, 013 codes (line-feed and carriage-return)
# NOTE: charmap: in Rex keep "@"
default_charmap = u'''{
"000": "|",
"010": "\\u000A",
"013": "\\u000D",
"064": "@",
"048": "0",
"049": "1",
"050": "2",
"051": "3",
"052": "4",
"053": "5",
"054": "6",
"055": "7",
"056": "8",
"057": "9"
}
'''
charmap_filename = 'charmap-mpskit.json'
def save_default_charmap(cwd):
charmap_path = os.path.join(cwd, charmap_filename)
if not os.path.exists(charmap_path):
with open(charmap_path, 'w', encoding='utf-8') as f:
f.write(default_charmap)
printf(charmap_path)
else:
printf("charmap already exists at: {}", charmap_path)
def find_charmap_path(cwd):
""" find charmap file starting from cwd directory and searching upwards"""
cur = cwd
for _ in range(64):
charmap_path = os.path.join(cur, charmap_filename)
if os.path.exists(charmap_path):
return charmap_path
par = os.path.abspath(os.path.join(cur, os.pardir))
if par == cur:
return None
cur = par
warning("charmap file not found due to search depth limit; curr_dir: {}", cur)
return None
def load_charmap(cwd):
charmap_path = find_charmap_path(cwd)
if charmap_path is None:
# warning('cannot locate charmap file: {}', charmap_filename)
printf('using default charmap')
# printf('you can create default charmap with: mpskit charmap create')
# printf('it should be located in game main directory')
charmap_json = default_charmap
else:
printf('charmap-path: {}', charmap_path)
with open(charmap_path, encoding='utf-8') as f:
charmap_json = f.read()
try:
cm = json.loads(charmap_json)
except json.decoder.JSONDecodeError as e:
#import ipdb; ipdb.set_trace()
fail("while reading charmap file: {}", str(e))
conf.charmap_decode = {}
conf.charmap_encode = {}
for (k, v) in cm.items():
kk = chr(int(k))
vv = v
conf.charmap_decode[kk] = vv
conf.charmap_encode[vv] = kk
printf('charmap-size: {}', len(conf.charmap_decode))
| agpl-3.0 |
ajdawson/numpy | numpy/ma/__init__.py | 76 | 1576 | """
=============
Masked Arrays
=============
Arrays sometimes contain invalid or missing data. When doing operations
on such arrays, we wish to suppress invalid values, which is the purpose masked
arrays fulfill (an example of typical use is given below).
For example, examine the following array:
>>> x = np.array([2, 1, 3, np.nan, 5, 2, 3, np.nan])
When we try to calculate the mean of the data, the result is undetermined:
>>> np.mean(x)
nan
The mean is calculated using roughly ``np.sum(x)/len(x)``, but since
any number added to ``NaN`` [1]_ produces ``NaN``, this doesn't work. Enter
masked arrays:
>>> m = np.ma.masked_array(x, np.isnan(x))
>>> m
masked_array(data = [2.0 1.0 3.0 -- 5.0 2.0 3.0 --],
mask = [False False False True False False False True],
fill_value=1e+20)
Here, we construct a masked array that suppress all ``NaN`` values. We
may now proceed to calculate the mean of the other values:
>>> np.mean(m)
2.6666666666666665
.. [1] Not-a-Number, a floating point value that is the result of an
invalid operation.
"""
from __future__ import division, absolute_import, print_function
__author__ = "Pierre GF Gerard-Marchant ($Author: jarrod.millman $)"
__version__ = '1.0'
__revision__ = "$Revision: 3473 $"
__date__ = '$Date: 2007-10-29 17:18:13 +0200 (Mon, 29 Oct 2007) $'
from . import core
from .core import *
from . import extras
from .extras import *
__all__ = ['core', 'extras']
__all__ += core.__all__
__all__ += extras.__all__
from numpy.testing import Tester
test = Tester().test
bench = Tester().bench
| bsd-3-clause |
eharney/cinder | cinder/tests/unit/attachments/test_attachments_api.py | 1 | 11677 | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
from oslo_config import cfg
from cinder import context
from cinder import db
from cinder import exception
from cinder import objects
from cinder import test
from cinder.tests.unit import fake_constants as fake
from cinder.tests.unit import utils as tests_utils
from cinder.volume import api as volume_api
from cinder.volume import configuration as conf
CONF = cfg.CONF
class AttachmentManagerTestCase(test.TestCase):
"""Attachment related test for volume/api.py."""
def setUp(self):
"""Setup test class."""
super(AttachmentManagerTestCase, self).setUp()
self.configuration = mock.Mock(conf.Configuration)
self.context = context.get_admin_context()
self.context.user_id = fake.USER_ID
self.project_id = fake.PROJECT3_ID
self.context.project_id = self.project_id
self.volume_api = volume_api.API()
@mock.patch('cinder.volume.api.check_policy')
def test_attachment_create_no_connector(self, mock_policy):
"""Test attachment_create no connector."""
volume_params = {'status': 'available'}
vref = tests_utils.create_volume(self.context, **volume_params)
aref = self.volume_api.attachment_create(self.context,
vref,
fake.UUID2)
self.assertEqual(fake.UUID2, aref.instance_uuid)
self.assertIsNone(aref.attach_time)
self.assertEqual('reserved', aref.attach_status)
self.assertIsNone(aref.attach_mode)
self.assertEqual(vref.id, aref.volume_id)
self.assertEqual({}, aref.connection_info)
@mock.patch('cinder.volume.api.check_policy')
@mock.patch('cinder.volume.rpcapi.VolumeAPI.attachment_update')
def test_attachment_create_with_connector(self,
mock_rpc_attachment_update,
mock_policy):
"""Test attachment_create with connector."""
volume_params = {'status': 'available'}
connection_info = {'fake_key': 'fake_value',
'fake_key2': ['fake_value1', 'fake_value2']}
mock_rpc_attachment_update.return_value = connection_info
vref = tests_utils.create_volume(self.context, **volume_params)
connector = {'fake': 'connector'}
attachment = self.volume_api.attachment_create(self.context,
vref,
fake.UUID2,
connector)
mock_rpc_attachment_update.assert_called_once_with(self.context,
mock.ANY,
connector,
mock.ANY)
new_attachment = objects.VolumeAttachment.get_by_id(self.context,
attachment.id)
self.assertEqual(connection_info, new_attachment.connection_info)
@mock.patch('cinder.volume.api.check_policy')
@mock.patch('cinder.volume.rpcapi.VolumeAPI.attachment_delete')
def test_attachment_delete_reserved(self,
mock_rpc_attachment_delete,
mock_policy):
"""Test attachment_delete with reserved."""
volume_params = {'status': 'available'}
vref = tests_utils.create_volume(self.context, **volume_params)
aref = self.volume_api.attachment_create(self.context,
vref,
fake.UUID2)
aobj = objects.VolumeAttachment.get_by_id(self.context,
aref.id)
self.assertEqual('reserved', aref.attach_status)
self.assertEqual(vref.id, aref.volume_id)
self.volume_api.attachment_delete(self.context,
aobj)
# Since it's just reserved and never finalized, we should never make an
# rpc call
mock_rpc_attachment_delete.assert_not_called()
@mock.patch('cinder.volume.api.check_policy')
@mock.patch('cinder.volume.rpcapi.VolumeAPI.attachment_delete')
@mock.patch('cinder.volume.rpcapi.VolumeAPI.attachment_update')
def test_attachment_create_update_and_delete(
self,
mock_rpc_attachment_update,
mock_rpc_attachment_delete,
mock_policy):
"""Test attachment_delete."""
volume_params = {'status': 'available'}
connection_info = {'fake_key': 'fake_value',
'fake_key2': ['fake_value1', 'fake_value2']}
mock_rpc_attachment_update.return_value = connection_info
vref = tests_utils.create_volume(self.context, **volume_params)
aref = self.volume_api.attachment_create(self.context,
vref,
fake.UUID2)
aref = objects.VolumeAttachment.get_by_id(self.context,
aref.id)
vref = objects.Volume.get_by_id(self.context,
vref.id)
connector = {'fake': 'connector'}
self.volume_api.attachment_update(self.context,
aref,
connector)
aref = objects.VolumeAttachment.get_by_id(self.context,
aref.id)
self.assertEqual(connection_info, aref.connection_info)
# We mock the actual call that updates the status
# so force it here
values = {'volume_id': vref.id,
'volume_host': vref.host,
'attach_status': 'attached',
'instance_uuid': fake.UUID2}
aref = db.volume_attach(self.context, values)
aref = objects.VolumeAttachment.get_by_id(self.context,
aref.id)
self.assertEqual(vref.id, aref.volume_id)
self.volume_api.attachment_delete(self.context,
aref)
mock_rpc_attachment_delete.assert_called_once_with(self.context,
aref.id,
mock.ANY)
@mock.patch('cinder.volume.api.check_policy')
def test_additional_attachment_create_no_connector(self, mock_policy):
"""Test attachment_create no connector."""
volume_params = {'status': 'available'}
vref = tests_utils.create_volume(self.context, **volume_params)
aref = self.volume_api.attachment_create(self.context,
vref,
fake.UUID2)
self.assertEqual(fake.UUID2, aref.instance_uuid)
self.assertIsNone(aref.attach_time)
self.assertEqual('reserved', aref.attach_status)
self.assertIsNone(aref.attach_mode)
self.assertEqual(vref.id, aref.volume_id)
self.assertEqual({}, aref.connection_info)
self.assertRaises(exception.InvalidVolume,
self.volume_api.attachment_create,
self.context,
vref,
fake.UUID1)
self.volume_api.attachment_create(self.context,
vref,
fake.UUID2)
vref = objects.Volume.get_by_id(self.context,
vref.id)
self.assertEqual(2, len(vref.volume_attachment))
@mock.patch('cinder.volume.api.check_policy')
@mock.patch('cinder.volume.rpcapi.VolumeAPI.attachment_update')
def test_attachment_create_reserve_delete(
self,
mock_rpc_attachment_update,
mock_policy):
volume_params = {'status': 'available'}
connector = {
"initiator": "iqn.1993-08.org.debian:01:cad181614cec",
"ip": "192.168.1.20",
"platform": "x86_64",
"host": "tempest-1",
"os_type": "linux2",
"multipath": False}
connection_info = {'fake_key': 'fake_value',
'fake_key2': ['fake_value1', 'fake_value2']}
mock_rpc_attachment_update.return_value = connection_info
vref = tests_utils.create_volume(self.context, **volume_params)
aref = self.volume_api.attachment_create(self.context,
vref,
fake.UUID2,
connector=connector)
vref = objects.Volume.get_by_id(self.context,
vref.id)
# Need to set the status here because our mock isn't doing it for us
vref.status = 'in-use'
vref.save()
# Now a second attachment acting as a reserve
self.volume_api.attachment_create(self.context,
vref,
fake.UUID2)
# We should now be able to delete the original attachment that gave us
# 'in-use' status, and in turn we should revert to the outstanding
# attachments reserve
self.volume_api.attachment_delete(self.context,
aref)
vref = objects.Volume.get_by_id(self.context,
vref.id)
self.assertEqual('reserved', vref.status)
@mock.patch('cinder.volume.api.check_policy')
def test_reserve_reserve_delete(self, mock_policy):
"""Test that we keep reserved status across multiple reserves."""
volume_params = {'status': 'available'}
vref = tests_utils.create_volume(self.context, **volume_params)
aref = self.volume_api.attachment_create(self.context,
vref,
fake.UUID2)
vref = objects.Volume.get_by_id(self.context,
vref.id)
self.assertEqual('reserved', vref.status)
self.volume_api.attachment_create(self.context,
vref,
fake.UUID2)
vref = objects.Volume.get_by_id(self.context,
vref.id)
self.assertEqual('reserved', vref.status)
self.volume_api.attachment_delete(self.context,
aref)
vref = objects.Volume.get_by_id(self.context,
vref.id)
self.assertEqual('reserved', vref.status)
self.assertEqual(1, len(vref.volume_attachment))
| apache-2.0 |
tejassp/asadmn-web | webapp/lib/terminal.py | 1 | 5712 | # Copyright 2013-2014 Aerospike, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http:#www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
from inspect import isfunction
def enable_color(is_enable):
global _add_it, _remove_it, _reset
global sclear, sbold, sdim, snormal, sunderline, sinverse, siclear
global fgblack, fgred, fggreen, fgyellow, fgblue, fgmagenta, fgcyan, fgwhite
global bgblack, bgred, bggreen, bgyellow, bgblue, bgmagenta, bgcyan, bgwhite
global esc, term
global sclear_code, cur_format
global color_enabled
color_enabled = is_enable
if is_enable:
sclear = '0'
sbold = '1'
sdim = '2'
snormal = '22'
sunderline = '4'
sinverse = '7'
siclear = '27'
fgblack = '30;90'
fgred = '31;91'
fggreen = '32;92'
fgyellow = '33;93'
fgblue = '34;94'
fgmagenta = '35;95'
fgcyan = '36;96'
fgwhite = '37;97'
bgblack = '40;100'
bgred = '41;101'
bggreen = '42;102'
bgyellow = '43;103'
bgblue = '44;104'
bgmagenta = '45;105'
bgcyan = '46;106'
bgwhite = '47;107'
esc = '\033['
term = 'm'
sclear_code = esc + sclear + term
cur_format = set()
def _add_it(decoration):
if decoration in cur_format:
return '' # nothing to do
else:
cur_format.add(decoration)
return esc + ';'.join(cur_format) + term
def _remove_it(decoration, decoration_clear=''):
if decoration in cur_format:
cur_format.remove(decoration)
if decoration_clear:
return esc + decoration_clear + term
else:
return esc + sclear + ';' + ';'.join(cur_format) + term
else:
return '' # nothing to do
def _reset():
cur_format.clear()
return esc + sclear + term
else:
sclear = ''
sbold = ''
sdim = ''
snormal = ''
sunderline = ''
sinverse = ''
siclear = ''
fgblack = ''
fgred = ''
fggreen = ''
fgyellow = ''
fgblue = ''
fgmagenta = ''
fgcyan = ''
fgwhite = ''
bgblack = ''
bgred = ''
bggreen = ''
bgyellow = ''
bgblue = ''
bgmagenta = ''
bgcyan = ''
bgwhite = ''
sclear_code = ''
cur_format = list()
def _add_it(decoration):
if decoration in cur_format:
return '' # nothing to do
else:
cur_format.append(decoration)
return decoration
def _remove_it(decoration, decoration_clear=''):
if decoration in cur_format:
cur_format.remove(decoration)
return decoration
else:
return '' # nothing to do
def _reset():
cur_format.reverse()
retval = ''.join(cur_format)
del(cur_format[:])
return retval
# Real terminal?
isatty = sys.stdout.isatty()
color_enabled = isatty
enable_color(isatty)
def bold():
return _add_it(sbold)
def unbold():
return _remove_it(sbold)
def dim():
return _add_it(sdim)
def undim():
return _remove_it(sdim)
def underline():
return _add_it(sunderline)
def ununderline():
return _remove_it(sunderline)
def inverse():
return _add_it(sinverse)
def uninverse():
return _remove_it(sinverse, siclear)
def reset():
return _reset()
def fg_black():
return _add_it(fgblack)
def fg_red():
return _add_it(fgred)
def fg_green():
return _add_it(fggreen)
def fg_yellow():
return _add_it(fgyellow)
def fg_blue():
return _add_it(fgblue)
def fg_magenta():
return _add_it(fgmagenta)
def fg_cyan():
return _add_it(fgcyan)
def fg_white():
return _add_it(fgwhite)
def fg_clear():
_remove_it(fgblack)
_remove_it(fgred)
_remove_it(fggreen)
_remove_it(fgyellow)
_remove_it(fgblue)
_remove_it(fgmagenta)
_remove_it(fgcyan)
return sclear_code + _remove_it(fgwhite)
def bg_black():
return _add_it(bgblack)
def bg_red():
return _add_it(bgred)
def bg_green():
return _add_it(bggreen)
def bg_yellow():
return _add_it(bgyellow)
def bg_blue():
return _add_it(bgblue)
def bg_magenta():
return _add_it(bgmagenta)
def bg_cyan():
return _add_it(bgcyan)
def bg_white():
return _add_it(bgwhite)
def bg_clear():
_remove_it(bgblack)
_remove_it(bgred)
_remove_it(bggreen)
_remove_it(bgyellow)
_remove_it(bgblue)
_remove_it(bgmagenta)
_remove_it(bgcyan)
return sclear_code + _remove_it(bgwhite)
def style(*functions):
if not functions:
return ''
for function in functions[:-1]:
function()
return functions[-1]()
| unlicense |
translate/pootle | tests/models/unit.py | 5 | 13351 | # -*- coding: utf-8 -*-
#
# Copyright (C) Pootle contributors.
#
# This file is a part of the Pootle project. It is distributed under the GPL3
# or later license. See the LICENSE file for a copy of the license and the
# AUTHORS file for copyright and authorship information.
import os
import pytest
from translate.storage.factory import getclass
from translate.storage.pypo import pounit
from translate.storage.statsdb import wordcount as counter
from django.contrib.auth import get_user_model
from pootle.core.delegate import review, wordcount
from pootle.core.plugin import getter
from pootle_fs.utils import FSPlugin
from pootle_store.constants import FUZZY, OBSOLETE, TRANSLATED, UNTRANSLATED
from pootle_store.models import Suggestion, Unit
from pootle_store.syncer import UnitSyncer
User = get_user_model()
def _sync_translations(db_unit):
store = db_unit.store
tp = store.translation_project
project = tp.project
language = tp.language
plugin = FSPlugin(project)
plugin.fetch()
plugin.sync()
file_store = db_unit.store.deserialize(
open(os.path.join(
plugin.fs_url,
language.code,
store.name)).read())
file_unit = file_store.findid(db_unit.getid())
return file_store, file_unit
@pytest.mark.django_db
def test_getorig(project0_nongnu, af_tutorial_po):
"""Tests that the in-DB Store and on-disk Store match by checking that
units match in order.
"""
file_store, __ = _sync_translations(af_tutorial_po.units.first())
for i, db_unit in enumerate(af_tutorial_po.units.iterator()):
file_unit = file_store.units[i + 1]
assert db_unit.getid() == file_unit.getid()
@pytest.mark.django_db
def test_convert(project0_nongnu, af_tutorial_po):
"""Tests that in-DB and on-disk units match after format conversion."""
# db unit doesnt have plural form by default, so add
plural_unit = af_tutorial_po.units.last()
plural_unit.target = [u'samaka', u'samak']
plural_unit.save()
file_store, __ = _sync_translations(af_tutorial_po.units.first())
for db_unit in af_tutorial_po.units.iterator():
file_unit = file_store.findid(db_unit.getid())
newunit = db_unit.convert(file_store.UnitClass)
assert str(newunit) == str(file_unit)
@pytest.mark.django_db
def test_sync_target(project0_nongnu, af_tutorial_po):
"""Tests that target changes are properly sync'ed to disk."""
db_unit = af_tutorial_po.units.first()
db_unit.target = u'samaka'
db_unit.save()
file_store, file_unit = _sync_translations(db_unit)
assert (
db_unit.target
== file_unit.target
== u'samaka')
@pytest.mark.django_db
def test_empty_plural_target(af_tutorial_po):
"""Tests empty plural targets are not deleted."""
db_unit = af_tutorial_po.units.get(unitid="%d fish")
db_unit.target = ["samaka"]
db_unit.save()
file_store, file_unit = _sync_translations(db_unit)
assert file_unit.target == "samaka"
assert len(file_unit.target.strings) == 2
db_unit.refresh_from_db()
db_unit.target = ""
db_unit.save()
file_store, file_unit = _sync_translations(db_unit)
assert file_unit.target == ""
assert len(file_unit.target.strings) == 2
@pytest.mark.django_db
def test_sync_plural_target(af_tutorial_po):
"""Tests plural translations are stored and sync'ed."""
db_unit = af_tutorial_po.units.get(unitid="%d fish")
db_unit.target = [u'samaka', u'samak']
db_unit.save()
file_store, file_unit = _sync_translations(db_unit)
assert (
db_unit.target.strings
== file_unit.target.strings
== [u'samaka', u'samak']
== file_store.units[db_unit.index].target.strings)
assert (
db_unit.target
== file_unit.target
== u'samaka'
== file_store.units[db_unit.index].target)
@pytest.mark.django_db
def test_sync_plural_target_dict(af_tutorial_po):
"""Tests plural translations are stored and sync'ed (dict version)."""
db_unit = af_tutorial_po.units.get(unitid="%d fish")
db_unit.target = {0: u'samaka', 1: u'samak'}
db_unit.save()
file_store, file_unit = _sync_translations(db_unit)
assert (
db_unit.target.strings
== file_unit.target.strings
== [u'samaka', u'samak']
== file_store.units[db_unit.index].target.strings)
assert (
db_unit.target
== file_unit.target
== u'samaka'
== file_store.units[db_unit.index].target)
@pytest.mark.django_db
def test_sync_fuzzy(project0_nongnu, af_tutorial_po):
"""Tests fuzzy state changes are stored and sync'ed."""
db_unit = af_tutorial_po.units.first()
db_unit.target = u'samaka'
db_unit.markfuzzy()
db_unit.save()
file_store, file_unit = _sync_translations(db_unit)
assert (
db_unit.isfuzzy()
== file_unit.isfuzzy()
is True)
db_unit.refresh_from_db()
db_unit.markfuzzy(False)
db_unit.save()
file_store, file_unit = _sync_translations(db_unit)
assert (
db_unit.isfuzzy()
== file_unit.isfuzzy()
is False)
@pytest.mark.django_db
def test_sync_comment(project0_nongnu, af_tutorial_po):
"""Tests translator comments are stored and sync'ed."""
db_unit = af_tutorial_po.units.first()
db_unit.translator_comment = u'7amada'
db_unit.save()
file_store, file_unit = _sync_translations(db_unit)
assert (
db_unit.getnotes(origin='translator')
== file_unit.getnotes(origin='translator')
== u'7amada')
@pytest.mark.django_db
def test_add_suggestion(store0, system):
"""Tests adding new suggestions to units."""
untranslated_unit = store0.units.filter(state=UNTRANSLATED)[0]
translated_unit = store0.units.filter(state=TRANSLATED)[0]
suggestion_text = 'foo bar baz'
initial_suggestions = len(untranslated_unit.get_suggestions())
suggestions = review.get(Suggestion)()
# Empty suggestion is not recorded
sugg, added = suggestions.add(untranslated_unit, "")
assert sugg is None
assert not added
# Existing translation can't be added as a suggestion
sugg, added = suggestions.add(translated_unit, translated_unit.target)
assert sugg is None
assert not added
# Add new suggestion
sugg, added = suggestions.add(untranslated_unit, suggestion_text)
assert sugg is not None
assert added
assert len(untranslated_unit.get_suggestions()) == initial_suggestions + 1
# Already-suggested text can't be suggested again
assert suggestions.add(untranslated_unit, suggestion_text) == (None, False)
assert len(untranslated_unit.get_suggestions()) == initial_suggestions + 1
# Removing a suggestion should allow suggesting the same text again
review.get(Suggestion)([sugg], system).reject()
assert len(untranslated_unit.get_suggestions()) == initial_suggestions
sugg, added = suggestions.add(untranslated_unit, suggestion_text)
assert sugg is not None
assert added
assert len(untranslated_unit.get_suggestions()) == initial_suggestions + 1
@pytest.mark.django_db
def test_accept_suggestion_changes_state(issue_2401_po, system):
"""Tests that accepting a suggestion will change the state of the unit."""
suggestions = review.get(Suggestion)()
# First test with an untranslated unit
unit = issue_2401_po.units[0]
assert unit.state == UNTRANSLATED
suggestion, created_ = suggestions.add(unit, "foo")
assert unit.state == UNTRANSLATED
review.get(Suggestion)([suggestion], system).accept()
assert unit.state == TRANSLATED
# Let's try with a translated unit now
unit = issue_2401_po.units[1]
assert unit.state == TRANSLATED
suggestion, created_ = suggestions.add(unit, "bar")
assert unit.state == TRANSLATED
review.get(Suggestion)([suggestion], system).accept()
assert unit.state == TRANSLATED
# And finally a fuzzy unit
unit = issue_2401_po.units[2]
assert unit.state == FUZZY
suggestion, created_ = suggestions.add(unit, "baz")
assert unit.state == FUZZY
review.get(Suggestion)([suggestion], system).accept()
assert unit.state == TRANSLATED
@pytest.mark.django_db
def test_accept_suggestion_update_wordcount(it_tutorial_po, system):
"""Tests that accepting a suggestion for an untranslated unit will
change the wordcount stats of the unit's store.
"""
orig_translated = it_tutorial_po.data.translated_words
suggestions = review.get(Suggestion)()
untranslated_unit = it_tutorial_po.units[0]
suggestion_text = 'foo bar baz'
sugg, added = suggestions.add(untranslated_unit, suggestion_text)
assert sugg is not None
assert added
assert len(untranslated_unit.get_suggestions()) == 1
assert untranslated_unit.state == UNTRANSLATED
review.get(Suggestion)([sugg], system).accept()
assert untranslated_unit.state == TRANSLATED
assert it_tutorial_po.data.translated_words > orig_translated
@pytest.mark.django_db
def test_unit_repr():
unit = Unit.objects.first()
assert str(unit) == str(unit.convert())
assert unicode(unit) == unicode(unit.source)
@pytest.mark.django_db
def test_unit_po_plurals(store_po):
unit = Unit(store=store_po)
unit_po = pounit('bar')
unit_po.msgid_plural = ['bars']
unit.update(unit_po)
assert unit.hasplural()
unit.save()
assert unit.hasplural()
@pytest.mark.django_db
def test_unit_ts_plurals(store_po, test_fs):
with test_fs.open(['data', 'ts', 'add_plurals.ts']) as f:
file_store = getclass(f)(f.read())
unit = Unit(store=store_po)
unit_ts = file_store.units[0]
unit.update(unit_ts)
assert unit.hasplural()
unit.save()
unit = Unit.objects.get(id=unit.id)
assert unit.hasplural()
unit.save()
unit = Unit.objects.get(id=unit.id)
assert unit.hasplural()
def _test_unit_syncer(unit, newunit):
assert newunit.source == unit.source
assert newunit.target == unit.target
assert newunit.getid() == unit.getid()
assert newunit.istranslated() == unit.istranslated()
assert (
newunit.getnotes(origin="developer")
== unit.getnotes(origin="developer"))
assert (
newunit.getnotes(origin="translator")
== unit.getnotes(origin="translator"))
assert newunit.isobsolete() == unit.isobsolete()
assert newunit.isfuzzy() == unit.isfuzzy()
@pytest.mark.django_db
def test_unit_syncer(unit_syncer):
unit, unit_class = unit_syncer
syncer = UnitSyncer(unit)
newunit = syncer.convert(unit_class)
assert newunit.istranslated()
assert not newunit.isfuzzy()
assert not newunit.isobsolete()
_test_unit_syncer(unit, newunit)
@pytest.mark.django_db
def test_unit_syncer_fuzzy(unit_syncer):
unit, unit_class = unit_syncer
syncer = UnitSyncer(unit)
unit.state = FUZZY
unit.save()
newunit = syncer.convert(unit_class)
assert newunit.isfuzzy()
assert not newunit.isobsolete()
assert not newunit.istranslated()
_test_unit_syncer(unit, newunit)
@pytest.mark.django_db
def test_unit_syncer_untranslated(unit_syncer):
unit, unit_class = unit_syncer
syncer = UnitSyncer(unit)
unit.state = UNTRANSLATED
unit.target = ""
unit.save()
newunit = syncer.convert(unit_class)
assert not newunit.isfuzzy()
assert not newunit.isobsolete()
assert not newunit.istranslated()
_test_unit_syncer(unit, newunit)
@pytest.mark.django_db
def test_unit_syncer_obsolete(unit_syncer):
unit, unit_class = unit_syncer
syncer = UnitSyncer(unit)
unit.state = OBSOLETE
unit.save()
newunit = syncer.convert(unit_class)
assert newunit.isobsolete()
assert not newunit.isfuzzy()
assert not newunit.istranslated()
_test_unit_syncer(unit, newunit)
@pytest.mark.django_db
def test_unit_syncer_notes(unit_syncer):
unit, unit_class = unit_syncer
syncer = UnitSyncer(unit)
unit.addnote(origin="developer", text="hello")
newunit = syncer.convert(unit_class)
assert newunit.getnotes(origin="developer") == "hello"
_test_unit_syncer(unit, newunit)
unit.addnote(origin="translator", text="world")
newunit = syncer.convert(unit_class)
assert newunit.getnotes(origin="translator") == "world"
_test_unit_syncer(unit, newunit)
@pytest.mark.django_db
def test_unit_syncer_locations(unit_syncer):
unit, unit_class = unit_syncer
unit.addlocation("FOO")
syncer = UnitSyncer(unit)
newunit = syncer.convert(unit_class)
assert newunit.getlocations() == ["FOO"]
_test_unit_syncer(unit, newunit)
@pytest.mark.django_db
def test_add_autotranslated_unit(settings, store0, admin, no_wordcount):
class DummyWordcount(object):
def count(self, value):
return counter(value) - value.count('Pootle')
def count_words(self, strings):
return sum(self.count(string) for string in strings)
wc = DummyWordcount()
with no_wordcount():
@getter(wordcount, sender=Unit)
def temp_wc_getter(**kwargs_):
return wc
unit = store0.addunit(
store0.UnitClass(source_f='Pootle Pootle'),
user=admin)
dbunit = store0.units.get(id=unit.id)
assert dbunit.state == FUZZY
assert dbunit.target_f == unit.source_f
| gpl-3.0 |
cjds/cron | lib/werkzeug/contrib/fixers.py | 148 | 10197 | # -*- coding: utf-8 -*-
"""
werkzeug.contrib.fixers
~~~~~~~~~~~~~~~~~~~~~~~
.. versionadded:: 0.5
This module includes various helpers that fix bugs in web servers. They may
be necessary for some versions of a buggy web server but not others. We try
to stay updated with the status of the bugs as good as possible but you have
to make sure whether they fix the problem you encounter.
If you notice bugs in webservers not fixed in this module consider
contributing a patch.
:copyright: Copyright 2009 by the Werkzeug Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
try:
from urllib import unquote
except ImportError:
from urllib.parse import unquote
from werkzeug.http import parse_options_header, parse_cache_control_header, \
parse_set_header
from werkzeug.useragents import UserAgent
from werkzeug.datastructures import Headers, ResponseCacheControl
class CGIRootFix(object):
"""Wrap the application in this middleware if you are using FastCGI or CGI
and you have problems with your app root being set to the cgi script's path
instead of the path users are going to visit
.. versionchanged:: 0.9
Added `app_root` parameter and renamed from `LighttpdCGIRootFix`.
:param app: the WSGI application
:param app_root: Defaulting to ``'/'``, you can set this to something else
if your app is mounted somewhere else.
"""
def __init__(self, app, app_root='/'):
self.app = app
self.app_root = app_root
def __call__(self, environ, start_response):
# only set PATH_INFO for older versions of Lighty or if no
# server software is provided. That's because the test was
# added in newer Werkzeug versions and we don't want to break
# people's code if they are using this fixer in a test that
# does not set the SERVER_SOFTWARE key.
if 'SERVER_SOFTWARE' not in environ or \
environ['SERVER_SOFTWARE'] < 'lighttpd/1.4.28':
environ['PATH_INFO'] = environ.get('SCRIPT_NAME', '') + \
environ.get('PATH_INFO', '')
environ['SCRIPT_NAME'] = self.app_root.strip('/')
return self.app(environ, start_response)
# backwards compatibility
LighttpdCGIRootFix = CGIRootFix
class PathInfoFromRequestUriFix(object):
"""On windows environment variables are limited to the system charset
which makes it impossible to store the `PATH_INFO` variable in the
environment without loss of information on some systems.
This is for example a problem for CGI scripts on a Windows Apache.
This fixer works by recreating the `PATH_INFO` from `REQUEST_URI`,
`REQUEST_URL`, or `UNENCODED_URL` (whatever is available). Thus the
fix can only be applied if the webserver supports either of these
variables.
:param app: the WSGI application
"""
def __init__(self, app):
self.app = app
def __call__(self, environ, start_response):
for key in 'REQUEST_URL', 'REQUEST_URI', 'UNENCODED_URL':
if key not in environ:
continue
request_uri = unquote(environ[key])
script_name = unquote(environ.get('SCRIPT_NAME', ''))
if request_uri.startswith(script_name):
environ['PATH_INFO'] = request_uri[len(script_name):] \
.split('?', 1)[0]
break
return self.app(environ, start_response)
class ProxyFix(object):
"""This middleware can be applied to add HTTP proxy support to an
application that was not designed with HTTP proxies in mind. It
sets `REMOTE_ADDR`, `HTTP_HOST` from `X-Forwarded` headers. While
Werkzeug-based applications already can use
:py:func:`werkzeug.wsgi.get_host` to retrieve the current host even if
behind proxy setups, this middleware can be used for applications which
access the WSGI environment directly.
If you have more than one proxy server in front of your app, set
`num_proxies` accordingly.
Do not use this middleware in non-proxy setups for security reasons.
The original values of `REMOTE_ADDR` and `HTTP_HOST` are stored in
the WSGI environment as `werkzeug.proxy_fix.orig_remote_addr` and
`werkzeug.proxy_fix.orig_http_host`.
:param app: the WSGI application
:param num_proxies: the number of proxy servers in front of the app.
"""
def __init__(self, app, num_proxies=1):
self.app = app
self.num_proxies = num_proxies
def get_remote_addr(self, forwarded_for):
"""Selects the new remote addr from the given list of ips in
X-Forwarded-For. By default it picks the one that the `num_proxies`
proxy server provides. Before 0.9 it would always pick the first.
.. versionadded:: 0.8
"""
if len(forwarded_for) >= self.num_proxies:
return forwarded_for[-1 * self.num_proxies]
def __call__(self, environ, start_response):
getter = environ.get
forwarded_proto = getter('HTTP_X_FORWARDED_PROTO', '')
forwarded_for = getter('HTTP_X_FORWARDED_FOR', '').split(',')
forwarded_host = getter('HTTP_X_FORWARDED_HOST', '')
environ.update({
'werkzeug.proxy_fix.orig_wsgi_url_scheme': getter('wsgi.url_scheme'),
'werkzeug.proxy_fix.orig_remote_addr': getter('REMOTE_ADDR'),
'werkzeug.proxy_fix.orig_http_host': getter('HTTP_HOST')
})
forwarded_for = [x for x in [x.strip() for x in forwarded_for] if x]
remote_addr = self.get_remote_addr(forwarded_for)
if remote_addr is not None:
environ['REMOTE_ADDR'] = remote_addr
if forwarded_host:
environ['HTTP_HOST'] = forwarded_host
if forwarded_proto:
environ['wsgi.url_scheme'] = forwarded_proto
return self.app(environ, start_response)
class HeaderRewriterFix(object):
"""This middleware can remove response headers and add others. This
is for example useful to remove the `Date` header from responses if you
are using a server that adds that header, no matter if it's present or
not or to add `X-Powered-By` headers::
app = HeaderRewriterFix(app, remove_headers=['Date'],
add_headers=[('X-Powered-By', 'WSGI')])
:param app: the WSGI application
:param remove_headers: a sequence of header keys that should be
removed.
:param add_headers: a sequence of ``(key, value)`` tuples that should
be added.
"""
def __init__(self, app, remove_headers=None, add_headers=None):
self.app = app
self.remove_headers = set(x.lower() for x in (remove_headers or ()))
self.add_headers = list(add_headers or ())
def __call__(self, environ, start_response):
def rewriting_start_response(status, headers, exc_info=None):
new_headers = []
for key, value in headers:
if key.lower() not in self.remove_headers:
new_headers.append((key, value))
new_headers += self.add_headers
return start_response(status, new_headers, exc_info)
return self.app(environ, rewriting_start_response)
class InternetExplorerFix(object):
"""This middleware fixes a couple of bugs with Microsoft Internet
Explorer. Currently the following fixes are applied:
- removing of `Vary` headers for unsupported mimetypes which
causes troubles with caching. Can be disabled by passing
``fix_vary=False`` to the constructor.
see: http://support.microsoft.com/kb/824847/en-us
- removes offending headers to work around caching bugs in
Internet Explorer if `Content-Disposition` is set. Can be
disabled by passing ``fix_attach=False`` to the constructor.
If it does not detect affected Internet Explorer versions it won't touch
the request / response.
"""
# This code was inspired by Django fixers for the same bugs. The
# fix_vary and fix_attach fixers were originally implemented in Django
# by Michael Axiak and is available as part of the Django project:
# http://code.djangoproject.com/ticket/4148
def __init__(self, app, fix_vary=True, fix_attach=True):
self.app = app
self.fix_vary = fix_vary
self.fix_attach = fix_attach
def fix_headers(self, environ, headers, status=None):
if self.fix_vary:
header = headers.get('content-type', '')
mimetype, options = parse_options_header(header)
if mimetype not in ('text/html', 'text/plain', 'text/sgml'):
headers.pop('vary', None)
if self.fix_attach and 'content-disposition' in headers:
pragma = parse_set_header(headers.get('pragma', ''))
pragma.discard('no-cache')
header = pragma.to_header()
if not header:
headers.pop('pragma', '')
else:
headers['Pragma'] = header
header = headers.get('cache-control', '')
if header:
cc = parse_cache_control_header(header,
cls=ResponseCacheControl)
cc.no_cache = None
cc.no_store = False
header = cc.to_header()
if not header:
headers.pop('cache-control', '')
else:
headers['Cache-Control'] = header
def run_fixed(self, environ, start_response):
def fixing_start_response(status, headers, exc_info=None):
headers = Headers(headers)
self.fix_headers(environ, headers, status)
return start_response(status, headers.to_wsgi_list(), exc_info)
return self.app(environ, fixing_start_response)
def __call__(self, environ, start_response):
ua = UserAgent(environ)
if ua.browser != 'msie':
return self.app(environ, start_response)
return self.run_fixed(environ, start_response)
| apache-2.0 |
buildhappy/ice | cpp/test/Ice/operations/run.py | 3 | 1111 | #!/usr/bin/env python
# **********************************************************************
#
# Copyright (c) 2003-2015 ZeroC, Inc. All rights reserved.
#
# This copy of Ice is licensed to you under the terms described in the
# ICE_LICENSE file included in this distribution.
#
# **********************************************************************
import os, sys
path = [ ".", "..", "../..", "../../..", "../../../..", "../../../../.." ]
head = os.path.dirname(sys.argv[0])
if len(head) > 0:
path = [os.path.join(head, p) for p in path]
path = [os.path.abspath(p) for p in path if os.path.exists(os.path.join(p, "scripts", "TestUtil.py")) ]
if len(path) == 0:
raise RuntimeError("can't find toplevel directory!")
sys.path.append(os.path.join(path[0], "scripts"))
import TestUtil
print("tests with regular server.")
TestUtil.clientServerTest(additionalClientOptions = "--Ice.Warn.AMICallback=0")
print("tests with AMD server.")
TestUtil.clientServerTest(additionalClientOptions = "--Ice.Warn.AMICallback=0", server = "serveramd")
print("tests with collocated server.")
TestUtil.collocatedTest()
| gpl-2.0 |
ed-/solum | solum/tests/api/handlers/test_component.py | 2 | 3212 | # -*- coding: utf-8 -*-
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
from solum.api.handlers import component_handler
from solum.tests import base
from solum.tests import fakes
from solum.tests import utils
@mock.patch('solum.objects.registry')
class TestComponentHandler(base.BaseTestCase):
def setUp(self):
super(TestComponentHandler, self).setUp()
self.ctx = utils.dummy_context()
def test_component_get(self, mock_registry):
mock_registry.component.get_by_uuid.return_value = {'assembly_id': 42}
handler = component_handler.ComponentHandler(self.ctx)
res = handler.get('test_id')
self.assertIsNotNone(res)
get_by_uuid = mock_registry.Component.get_by_uuid
get_by_uuid.assert_called_once_with(self.ctx, 'test_id')
def test_get_all(self, mock_registry):
mock_registry.ComponentList.get_all.return_value = {}
handler = component_handler.ComponentHandler(self.ctx)
res = handler.get_all()
self.assertIsNotNone(res)
mock_registry.ComponentList.get_all.assert_called_once_with(self.ctx)
def test_update(self, mock_registry):
data = {'user_id': 'new_user_id',
'assembly_id': 'new_assembly_id'}
db_obj = fakes.FakeComponent()
mock_registry.Component.get_by_uuid.return_value = db_obj
handler = component_handler.ComponentHandler(self.ctx)
res = handler.update('test_id', data)
self.assertEqual(db_obj.user_id, res.user_id)
db_obj.update.assert_called_once_with(data)
db_obj.save.assert_called_once_with(self.ctx)
mock_registry.Component.get_by_uuid.assert_called_once_with(self.ctx,
'test_id')
def test_create(self, mock_registry):
data = {'name': 'new_name',
'assembly_id': 'new_assembly_id'}
db_obj = fakes.FakeComponent()
mock_registry.Component.return_value = db_obj
handler = component_handler.ComponentHandler(self.ctx)
res = handler.create(data)
db_obj.update.assert_called_once_with(data)
db_obj.create.assert_called_once_with(self.ctx)
self.assertEqual(res, db_obj)
def test_delete(self, mock_registry):
db_obj = fakes.FakeComponent()
mock_registry.Component.get_by_uuid.return_value = db_obj
handler = component_handler.ComponentHandler(self.ctx)
handler.delete('test_id')
db_obj.destroy.assert_called_once_with(self.ctx)
mock_registry.Component.get_by_uuid.assert_called_once_with(self.ctx,
'test_id')
| apache-2.0 |
sadanandb/pmt | src/pyasm/application/maya/maya_builder.py | 6 | 5093 | ###########################################################
#
# Copyright (c) 2005, Southpaw Technology
# All Rights Reserved
#
# PROPRIETARY INFORMATION. This software is proprietary to
# Southpaw Technology, and is not to be reproduced, transmitted,
# or disclosed in any way without written permission.
#
#
#
__all__ = ['MayaBuilder']
import os, sys, urllib, xmlrpclib
from xml.dom.minidom import parseString
from pyasm.application.common import SessionBuilder
from maya_environment import MayaEnvironment
from maya_app import Maya, MayaNodeNaming
from maya_anim_file import MayaAnimFile
class MayaBuilder(SessionBuilder):
'''builds a maya file'''
def import_file(my, node_name, path, instantiation='import', use_namespace=True):
if node_name and my.app.node_exists(node_name):
print "WARNING: Node '%s' already exists" % node_name
naming = MayaNodeNaming(node_name)
# if there is no instance name, then just import without namespaces
if not use_namespace:
old_nodes = my.app.get_top_nodes()
if instantiation == 'reference':
# reference needs the node_name as a namespace
# but it can't be the same as a node already in the session
created_node = my.app.import_reference(path, node_name)
else:
# import works with the default namespace
created_node = my.app.import_file(path)
new_nodes = my.app.get_top_nodes()
created_nodes = [val for val in new_nodes if val not in old_nodes]
if not created_nodes:
created_nodes = []
# select all the created nodes, so that it can be added to a
# set if necessary
my.app.select_none()
for created_node in created_nodes:
my.app.select_add(created_node)
else:
instance = naming.get_instance()
asset_code = naming.get_asset_code()
# the namespace is the instance name
namespace = instance
my.app.add_namespace(namespace)
my.app.set_namespace(namespace)
contents = my.app.get_namespace_contents()
# remove namespace if empty
my.app.set_namespace()
if contents == None:
my.app.remove_namespace(namespace)
# get all of the namespaces
old = my.app.get_all_namespaces()
old_nodes = my.app.get_top_nodes()
sets = my.app.get_sets()
if sets:
old_nodes.extend(sets)
# import file into namespace
if instantiation == 'reference':
my.app.import_reference(path,namespace)
else:
my.app.import_file(path,namespace)
# set the user environment
sandbox_dir = my.get_sandbox_dir()
basename = os.path.basename(path)
# DON'T set the project or rename the file
#my.app.set_user_environment(sandbox_dir, basename)
# get the two differences to find out which namespace was created
new = my.app.get_all_namespaces()
diff = [val for val in new if val not in old]
if not diff:
raise Exception("No namespaces created")
new_nodes = my.app.get_top_nodes()
sets = my.app.get_sets()
if sets:
new_nodes.extend(sets)
created_nodes = [val for val in new_nodes if val not in old_nodes]
# get the top node for this asset
created_node = None
for created_node in created_nodes:
# choose the node that contains the asset code
if created_node.find(":%s" % asset_code) != -1:
break
# select newly created attr
if created_node:
my.app.select(created_node)
return created_node
def import_anim(my, node_name, path, created_node=""):
node_naming = my.app.get_node_naming(node_name)
instance = node_naming.get_instance()
select = node_name
# select the node that was created if the variable exists
if created_node != "" and created_node != node_name:
select = created_node
# check to see if this node_name has a corresponding interface
interface = "%s_interface" % select
if my.app.node_exists( interface ):
select = interface
# select the node
my.app.select(select)
# parse the animation
anim = MayaAnimFile(path)
anim.parse()
# put the animation data into a temp file
tmp = "%s/temp.anim" % my.get_tmpdir()
file2 = open(tmp, "w")
file2.write( anim.get_anim(instance) )
file2.close()
# import the file just created
my.app.import_anim(tmp)
my.app.import_static(anim.get_static(instance), node_name)
| epl-1.0 |
lucashmorais/x-Bench | mozmill-env/python/Lib/encodings/utf_8_sig.py | 412 | 3685 | """ Python 'utf-8-sig' Codec
This work similar to UTF-8 with the following changes:
* On encoding/writing a UTF-8 encoded BOM will be prepended/written as the
first three bytes.
* On decoding/reading if the first three bytes are a UTF-8 encoded BOM, these
bytes will be skipped.
"""
import codecs
### Codec APIs
def encode(input, errors='strict'):
return (codecs.BOM_UTF8 + codecs.utf_8_encode(input, errors)[0], len(input))
def decode(input, errors='strict'):
prefix = 0
if input[:3] == codecs.BOM_UTF8:
input = input[3:]
prefix = 3
(output, consumed) = codecs.utf_8_decode(input, errors, True)
return (output, consumed+prefix)
class IncrementalEncoder(codecs.IncrementalEncoder):
def __init__(self, errors='strict'):
codecs.IncrementalEncoder.__init__(self, errors)
self.first = 1
def encode(self, input, final=False):
if self.first:
self.first = 0
return codecs.BOM_UTF8 + codecs.utf_8_encode(input, self.errors)[0]
else:
return codecs.utf_8_encode(input, self.errors)[0]
def reset(self):
codecs.IncrementalEncoder.reset(self)
self.first = 1
def getstate(self):
return self.first
def setstate(self, state):
self.first = state
class IncrementalDecoder(codecs.BufferedIncrementalDecoder):
def __init__(self, errors='strict'):
codecs.BufferedIncrementalDecoder.__init__(self, errors)
self.first = True
def _buffer_decode(self, input, errors, final):
if self.first:
if len(input) < 3:
if codecs.BOM_UTF8.startswith(input):
# not enough data to decide if this really is a BOM
# => try again on the next call
return (u"", 0)
else:
self.first = None
else:
self.first = None
if input[:3] == codecs.BOM_UTF8:
(output, consumed) = codecs.utf_8_decode(input[3:], errors, final)
return (output, consumed+3)
return codecs.utf_8_decode(input, errors, final)
def reset(self):
codecs.BufferedIncrementalDecoder.reset(self)
self.first = True
class StreamWriter(codecs.StreamWriter):
def reset(self):
codecs.StreamWriter.reset(self)
try:
del self.encode
except AttributeError:
pass
def encode(self, input, errors='strict'):
self.encode = codecs.utf_8_encode
return encode(input, errors)
class StreamReader(codecs.StreamReader):
def reset(self):
codecs.StreamReader.reset(self)
try:
del self.decode
except AttributeError:
pass
def decode(self, input, errors='strict'):
if len(input) < 3:
if codecs.BOM_UTF8.startswith(input):
# not enough data to decide if this is a BOM
# => try again on the next call
return (u"", 0)
elif input[:3] == codecs.BOM_UTF8:
self.decode = codecs.utf_8_decode
(output, consumed) = codecs.utf_8_decode(input[3:],errors)
return (output, consumed+3)
# (else) no BOM present
self.decode = codecs.utf_8_decode
return codecs.utf_8_decode(input, errors)
### encodings module API
def getregentry():
return codecs.CodecInfo(
name='utf-8-sig',
encode=encode,
decode=decode,
incrementalencoder=IncrementalEncoder,
incrementaldecoder=IncrementalDecoder,
streamreader=StreamReader,
streamwriter=StreamWriter,
)
| mit |
ty707/airflow | tests/contrib/operators/hipchat_operator.py | 9 | 2204 | # -*- coding: utf-8 -*-
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
import requests
from airflow.contrib.operators.hipchat_operator import \
HipChatAPISendRoomNotificationOperator
from airflow.exceptions import AirflowException
from airflow import configuration
try:
from unittest import mock
except ImportError:
try:
import mock
except ImportError:
mock = None
class HipChatOperatorTest(unittest.TestCase):
def setUp(self):
configuration.test_mode()
@unittest.skipIf(mock is None, 'mock package not present')
@mock.patch('requests.request')
def test_execute(self, request_mock):
resp = requests.Response()
resp.status_code = 200
request_mock.return_value = resp
operator = HipChatAPISendRoomNotificationOperator(
task_id='test_hipchat_success',
owner = 'airflow',
token='abc123',
room_id='room_id',
message='hello world!'
)
operator.execute(None)
@unittest.skipIf(mock is None, 'mock package not present')
@mock.patch('requests.request')
def test_execute_error_response(self, request_mock):
resp = requests.Response()
resp.status_code = 404
resp.reason = 'Not Found'
request_mock.return_value = resp
operator = HipChatAPISendRoomNotificationOperator(
task_id='test_hipchat_failure',
owner='airflow',
token='abc123',
room_id='room_id',
message='hello world!'
)
with self.assertRaises(AirflowException):
operator.execute(None)
if __name__ == '__main__':
unittest.main()
| apache-2.0 |
taedori81/e-commerce-template | saleor/core/utils/__init__.py | 12 | 2520 | # coding: utf-8
from __future__ import unicode_literals
import re
from django import forms
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
from django.template.response import TemplateResponse
from django.utils.encoding import iri_to_uri, smart_text
from satchless.process import InvalidData, Step
try:
from urllib.parse import urljoin
except ImportError:
from urlparse import urljoin
__all__ = ['BaseStep', 'CategoryChoiceField', 'build_absolute_uri']
absolute_http_url_re = re.compile(r"^https?://", re.I)
class CategoryChoiceField(forms.ModelChoiceField):
def label_from_instance(self, obj):
# pylint: disable=W0212
level = getattr(obj, obj._mptt_meta.level_attr)
indent = max(0, level - 1) * '│'
if obj.parent:
last = ((obj.parent.rght - obj.rght == 1)
and (obj.rght - obj.lft == 1))
if last:
indent += '└ '
else:
indent += '├ '
return '%s%s' % (indent, smart_text(obj))
class BaseStep(Step):
forms = None
template = ''
group = None
def __init__(self, request):
self.request = request
self.forms = {}
def __nonzero__(self):
try:
self.validate()
except InvalidData:
return False
return True
def save(self):
raise NotImplementedError()
def forms_are_valid(self):
for form in self.forms.values():
if not form.is_valid():
return False
return True
def validate(self):
if not self.forms_are_valid():
raise InvalidData()
def process(self, extra_context=None):
context = extra_context or {}
if not self.forms_are_valid() or self.request.method == 'GET':
context['step'] = self
return TemplateResponse(self.request, self.template, context)
self.save()
def get_absolute_url(self):
raise NotImplementedError()
def build_absolute_uri(location, is_secure=False):
try:
host = settings.CANONICAL_HOSTNAME
except AttributeError:
raise ImproperlyConfigured('You need to specify CANONICAL_HOSTNAME in '
'your Django settings file')
if not absolute_http_url_re.match(location):
current_uri = '%s://%s' % ('https' if is_secure else 'http', host)
location = urljoin(current_uri, location)
return iri_to_uri(location)
| bsd-3-clause |
dbflute-test/dbflute-test-option-compatible10x | mydbflute/dbflute-1.x/ant/bin/runant.py | 126 | 3285 | #!/usr/bin/python
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""
runant.py
This script is a translation of the runant.pl written by Steve Loughran.
It runs ant with/out arguments, it should be quite portable (thanks to
the python os library)
This script has been tested with Python2.0/Win2K
created: 2001-04-11
author: Pierre Dittgen pierre.dittgen@criltelecom.com
Assumptions:
- the "java" executable/script is on the command path
"""
import os, os.path, string, sys
# Change it to 1 to get extra debug information
debug = 0
#######################################################################
# If ANT_HOME is not set default to script's parent directory
if os.environ.has_key('ANT_HOME'):
ANT_HOME = os.environ['ANT_HOME']
else:
ANT_HOME = os.path.dirname(os.path.dirname(os.path.abspath(sys.argv[0])))
# set ANT_LIB location
ANT_LIB = os.path.join(ANT_HOME, 'lib')
# set JAVACMD (check variables JAVACMD and JAVA_HOME)
JAVACMD = None
if not os.environ.has_key('JAVACMD'):
if os.environ.has_key('JAVA_HOME'):
if not os.path.exists(os.environ['JAVA_HOME']):
print "Warning: JAVA_HOME is not defined correctly."
else:
JAVACMD = os.path.join(os.environ['JAVA_HOME'], 'bin', 'java')
else:
print "Warning: JAVA_HOME not set."
else:
JAVACMD = os.environ['JAVACMD']
if not JAVACMD:
JAVACMD = 'java'
launcher_jar = os.path.join(ANT_LIB, 'ant-launcher.jar')
if not os.path.exists(launcher_jar):
print 'Unable to locate ant-launcher.jar. Expected to find it in %s' % \
ANT_LIB
# Build up standard classpath (LOCALCLASSPATH)
LOCALCLASSPATH = launcher_jar
if os.environ.has_key('LOCALCLASSPATH'):
LOCALCLASSPATH += os.pathsep + os.environ['LOCALCLASSPATH']
ANT_OPTS = ""
if os.environ.has_key('ANT_OPTS'):
ANT_OPTS = os.environ['ANT_OPTS']
OPTS = ""
if os.environ.has_key('JIKESPATH'):
OPTS = '-Djikes.class.path=\"%s\"' % os.environ['JIKESPATH']
ANT_ARGS = ""
if os.environ.has_key('ANT_ARGS'):
ANT_ARGS = os.environ['ANT_ARGS']
CLASSPATH = ""
if os.environ.has_key('CLASSPATH'):
CLASSPATH = os.environ['CLASSPATH']
# Builds the commandline
cmdline = ('%s %s -classpath %s -Dant.home=%s %s ' + \
'org.apache.tools.ant.launch.Launcher %s -lib %s %s') \
% (JAVACMD, ANT_OPTS, LOCALCLASSPATH, ANT_HOME, OPTS, ANT_ARGS, \
CLASSPATH, string.join(sys.argv[1:], ' '))
if debug:
print '\n%s\n\n' % (cmdline)
sys.stdout.flush()
# Run the biniou!
os.system(cmdline)
| apache-2.0 |
muraliselva10/cloudkitty | cloudkitty/api/v1/types.py | 1 | 1852 | # -*- coding: utf-8 -*-
# Copyright 2014 Objectif Libre
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
# @author: Stéphane Albert
#
from oslo_utils import uuidutils
from wsme import types as wtypes
from cloudkitty.i18n import _LE
class UuidType(wtypes.UuidType):
"""A simple UUID type."""
basetype = wtypes.text
name = 'uuid'
@staticmethod
def validate(value):
if not uuidutils.is_uuid_like(value):
raise ValueError(_LE("Invalid UUID, got '%s'") % value)
return value
# Code taken from ironic types
class MultiType(wtypes.UserType):
"""A complex type that represents one or more types.
Used for validating that a value is an instance of one of the types.
:param *types: Variable-length list of types.
"""
def __init__(self, *types):
self.types = types
def __str__(self):
return ' | '.join(map(str, self.types))
def validate(self, value):
for t in self.types:
if t is wtypes.text and isinstance(value, wtypes.bytes):
value = value.decode()
if isinstance(value, t):
return value
else:
raise ValueError(
_LE("Wrong type. Expected '%(type)s', got '%(value)s'")
% {'type': self.types, 'value': type(value)})
| apache-2.0 |
SCOAP3/invenio | invenio/modules/uploader/manage.py | 13 | 1461 | # -*- coding: utf-8 -*-
#
# This file is part of Invenio.
# Copyright (C) 2014 CERN.
#
# Invenio is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# Invenio is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Invenio; if not, write to the Free Software Foundation, Inc.,
# 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
"""Perform uploader operations."""
import argparse
from invenio.ext.script import Manager
manager = Manager(usage=__doc__)
@manager.option('-f', '-filename', dest='blobs', nargs='+',
type=argparse.FileType('r'))
def insert(blobs):
"""Upload new records."""
from .api import run
for blob in blobs:
filename = getattr(blob, 'name', None)
run('insert', blob.read(), master_format='marc',
reader_info=dict(schema='xml'), filename=filename)
def main():
"""Execute manager."""
from invenio.base.factory import create_app
app = create_app()
manager.app = app
manager.run()
if __name__ == '__main__':
main()
| gpl-2.0 |
florian-dacosta/OCB | addons/warning/warning.py | 73 | 11827 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import fields,osv
from openerp.tools.translate import _
WARNING_MESSAGE = [
('no-message','No Message'),
('warning','Warning'),
('block','Blocking Message')
]
WARNING_HELP = _('Selecting the "Warning" option will notify user with the message, Selecting "Blocking Message" will throw an exception with the message and block the flow. The Message has to be written in the next field.')
class res_partner(osv.osv):
_inherit = 'res.partner'
_columns = {
'sale_warn' : fields.selection(WARNING_MESSAGE, 'Sales Order', help=WARNING_HELP, required=True),
'sale_warn_msg' : fields.text('Message for Sales Order'),
'purchase_warn' : fields.selection(WARNING_MESSAGE, 'Purchase Order', help=WARNING_HELP, required=True),
'purchase_warn_msg' : fields.text('Message for Purchase Order'),
'picking_warn' : fields.selection(WARNING_MESSAGE, 'Stock Picking', help=WARNING_HELP, required=True),
'picking_warn_msg' : fields.text('Message for Stock Picking'),
'invoice_warn' : fields.selection(WARNING_MESSAGE, 'Invoice', help=WARNING_HELP, required=True),
'invoice_warn_msg' : fields.text('Message for Invoice'),
}
_defaults = {
'sale_warn' : 'no-message',
'purchase_warn' : 'no-message',
'picking_warn' : 'no-message',
'invoice_warn' : 'no-message',
}
class sale_order(osv.osv):
_inherit = 'sale.order'
def onchange_partner_id(self, cr, uid, ids, part, context=None):
if not part:
return {'value':{'partner_invoice_id': False, 'partner_shipping_id':False, 'payment_term' : False}}
warning = {}
title = False
message = False
partner = self.pool.get('res.partner').browse(cr, uid, part, context=context)
if partner.sale_warn != 'no-message':
title = _("Warning for %s") % partner.name
message = partner.sale_warn_msg
warning = {
'title': title,
'message': message,
}
if partner.sale_warn == 'block':
return {'value': {'partner_id': False}, 'warning': warning}
result = super(sale_order, self).onchange_partner_id(cr, uid, ids, part, context=context)
if result.get('warning',False):
warning['title'] = title and title +' & '+ result['warning']['title'] or result['warning']['title']
warning['message'] = message and message + ' ' + result['warning']['message'] or result['warning']['message']
return {'value': result.get('value',{}), 'warning':warning}
class purchase_order(osv.osv):
_inherit = 'purchase.order'
def onchange_partner_id(self, cr, uid, ids, part, context=None):
if not part:
return {'value':{'partner_address_id': False}}
warning = {}
title = False
message = False
partner = self.pool.get('res.partner').browse(cr, uid, part, context=context)
if partner.purchase_warn != 'no-message':
title = _("Warning for %s") % partner.name
message = partner.purchase_warn_msg
warning = {
'title': title,
'message': message
}
if partner.purchase_warn == 'block':
return {'value': {'partner_id': False}, 'warning': warning}
result = super(purchase_order, self).onchange_partner_id(cr, uid, ids, part, context=context)
if result.get('warning',False):
warning['title'] = title and title +' & '+ result['warning']['title'] or result['warning']['title']
warning['message'] = message and message + ' ' + result['warning']['message'] or result['warning']['message']
return {'value': result.get('value',{}), 'warning':warning}
class account_invoice(osv.osv):
_inherit = 'account.invoice'
def onchange_partner_id(self, cr, uid, ids, type, partner_id,
date_invoice=False, payment_term=False,
partner_bank_id=False, company_id=False,
context=None):
if not partner_id:
return {'value': {
'account_id': False,
'payment_term': False,
}
}
warning = {}
title = False
message = False
partner = self.pool.get('res.partner').browse(cr, uid, partner_id, context=context)
if partner.invoice_warn != 'no-message':
title = _("Warning for %s") % partner.name
message = partner.invoice_warn_msg
warning = {
'title': title,
'message': message
}
if partner.invoice_warn == 'block':
return {'value': {'partner_id': False}, 'warning': warning}
result = super(account_invoice, self).onchange_partner_id(cr, uid, ids, type, partner_id,
date_invoice=date_invoice, payment_term=payment_term,
partner_bank_id=partner_bank_id, company_id=company_id, context=context)
if result.get('warning',False):
warning['title'] = title and title +' & '+ result['warning']['title'] or result['warning']['title']
warning['message'] = message and message + ' ' + result['warning']['message'] or result['warning']['message']
return {'value': result.get('value',{}), 'warning':warning}
class stock_picking(osv.osv):
_inherit = 'stock.picking'
def onchange_partner_in(self, cr, uid, ids, partner_id=None, context=None):
if not partner_id:
return {}
partner = self.pool.get('res.partner').browse(cr, uid, partner_id, context=context)
warning = {}
title = False
message = False
if partner.picking_warn != 'no-message':
title = _("Warning for %s") % partner.name
message = partner.picking_warn_msg
warning = {
'title': title,
'message': message
}
if partner.picking_warn == 'block':
return {'value': {'partner_id': False}, 'warning': warning}
result = super(stock_picking_in, self).onchange_partner_in(cr, uid, ids, partner_id, context)
if result.get('warning',False):
warning['title'] = title and title +' & '+ result['warning']['title'] or result['warning']['title']
warning['message'] = message and message + ' ' + result['warning']['message'] or result['warning']['message']
return {'value': result.get('value',{}), 'warning':warning}
class product_product(osv.osv):
_inherit = 'product.template'
_columns = {
'sale_line_warn' : fields.selection(WARNING_MESSAGE,'Sales Order Line', help=WARNING_HELP, required=True),
'sale_line_warn_msg' : fields.text('Message for Sales Order Line'),
'purchase_line_warn' : fields.selection(WARNING_MESSAGE,'Purchase Order Line', help=WARNING_HELP, required=True),
'purchase_line_warn_msg' : fields.text('Message for Purchase Order Line'),
}
_defaults = {
'sale_line_warn' : 'no-message',
'purchase_line_warn' : 'no-message',
}
class sale_order_line(osv.osv):
_inherit = 'sale.order.line'
def product_id_change_with_wh(self, cr, uid, ids, pricelist, product, qty=0,
uom=False, qty_uos=0, uos=False, name='', partner_id=False,
lang=False, update_tax=True, date_order=False, packaging=False,
fiscal_position=False, flag=False, warehouse_id=False, context=None):
warning = {}
if not product:
return {'value': {'th_weight' : 0, 'product_packaging': False,
'product_uos_qty': qty}, 'domain': {'product_uom': [],
'product_uos': []}}
product_obj = self.pool.get('product.product')
product_info = product_obj.browse(cr, uid, product)
title = False
message = False
if product_info.sale_line_warn != 'no-message':
title = _("Warning for %s") % product_info.name
message = product_info.sale_line_warn_msg
warning['title'] = title
warning['message'] = message
if product_info.sale_line_warn == 'block':
return {'value': {'product_id': False}, 'warning': warning}
result = super(sale_order_line, self).product_id_change_with_wh( cr, uid, ids, pricelist, product, qty,
uom, qty_uos, uos, name, partner_id,
lang, update_tax, date_order, packaging, fiscal_position, flag, warehouse_id=warehouse_id, context=context)
if result.get('warning',False):
warning['title'] = title and title +' & '+result['warning']['title'] or result['warning']['title']
warning['message'] = message and message +'\n\n'+result['warning']['message'] or result['warning']['message']
return {'value': result.get('value',{}), 'warning':warning}
class purchase_order_line(osv.osv):
_inherit = 'purchase.order.line'
def onchange_product_id(self,cr, uid, ids, pricelist, product, qty, uom,
partner_id, date_order=False, fiscal_position_id=False, date_planned=False,
name=False, price_unit=False, state='draft', notes=False, context=None):
warning = {}
if not product:
return {'value': {'price_unit': price_unit or 0.0, 'name': name or '', 'notes': notes or '', 'product_uom' : uom or False}, 'domain':{'product_uom':[]}}
product_obj = self.pool.get('product.product')
product_info = product_obj.browse(cr, uid, product)
title = False
message = False
if product_info.purchase_line_warn != 'no-message':
title = _("Warning for %s") % product_info.name
message = product_info.purchase_line_warn_msg
warning['title'] = title
warning['message'] = message
if product_info.purchase_line_warn == 'block':
return {'value': {'product_id': False}, 'warning': warning}
result = super(purchase_order_line, self).onchange_product_id(cr, uid, ids, pricelist, product, qty, uom,
partner_id, date_order=date_order, fiscal_position_id=fiscal_position_id, date_planned=date_planned, name=name, price_unit=price_unit, state=state, context=context)
if result.get('warning',False):
warning['title'] = title and title +' & '+result['warning']['title'] or result['warning']['title']
warning['message'] = message and message +'\n\n'+result['warning']['message'] or result['warning']['message']
return {'value': result.get('value',{}), 'warning':warning}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.